From 74a8af6ca6b1a35e2fd5e99a3a464b7dfb02f897 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Fri, 24 Dec 2021 23:00:59 +0000 Subject: [PATCH 001/730] Run the js_test in CI We add a script for running the js tests in `scripts/ci/js_tests`. This script can also be run locally. We move the `automerge-js` package to below the `automerge-wasm` crate as it is specifically testing the wasm interface. We also add an action to the github actions workflow for CI to run the js tests. --- .github/workflows/ci.yaml | 9 +++++++++ .../automerge-js}/.gitignore | 0 .../automerge-js}/package.json | 2 +- .../automerge-js}/src/columnar.js | 0 .../automerge-js}/src/common.js | 0 .../automerge-js}/src/constants.js | 0 .../automerge-js}/src/counter.js | 0 .../automerge-js}/src/encoding.js | 0 .../automerge-js}/src/index.js | 0 .../automerge-js}/src/numbers.js | 0 .../automerge-js}/src/proxies.js | 0 .../automerge-js}/src/sync.js | 0 .../automerge-js}/src/text.js | 0 .../automerge-js}/src/uuid.js | 0 .../automerge-js}/test/basic_test.js | 0 .../automerge-js}/test/columnar_test.js | 0 .../automerge-js}/test/helpers.js | 0 .../automerge-js}/test/legacy_tests.js | 0 .../automerge-js}/test/sync_test.js | 0 .../automerge-js}/test/text_test.js | 0 .../automerge-js}/test/uuid_test.js | 0 scripts/ci/js_tests | 17 +++++++++++++++++ scripts/ci/run | 1 + 23 files changed, 28 insertions(+), 1 deletion(-) rename {automerge-js => automerge-wasm/automerge-js}/.gitignore (100%) rename {automerge-js => automerge-wasm/automerge-js}/package.json (86%) rename {automerge-js => automerge-wasm/automerge-js}/src/columnar.js (100%) rename {automerge-js => automerge-wasm/automerge-js}/src/common.js (100%) rename {automerge-js => automerge-wasm/automerge-js}/src/constants.js (100%) rename {automerge-js => automerge-wasm/automerge-js}/src/counter.js (100%) rename {automerge-js => automerge-wasm/automerge-js}/src/encoding.js (100%) rename {automerge-js => automerge-wasm/automerge-js}/src/index.js (100%) rename {automerge-js => automerge-wasm/automerge-js}/src/numbers.js (100%) rename {automerge-js => automerge-wasm/automerge-js}/src/proxies.js (100%) rename {automerge-js => automerge-wasm/automerge-js}/src/sync.js (100%) rename {automerge-js => automerge-wasm/automerge-js}/src/text.js (100%) rename {automerge-js => automerge-wasm/automerge-js}/src/uuid.js (100%) rename {automerge-js => automerge-wasm/automerge-js}/test/basic_test.js (100%) rename {automerge-js => automerge-wasm/automerge-js}/test/columnar_test.js (100%) rename {automerge-js => automerge-wasm/automerge-js}/test/helpers.js (100%) rename {automerge-js => automerge-wasm/automerge-js}/test/legacy_tests.js (100%) rename {automerge-js => automerge-wasm/automerge-js}/test/sync_test.js (100%) rename {automerge-js => automerge-wasm/automerge-js}/test/text_test.js (100%) rename {automerge-js => automerge-wasm/automerge-js}/test/uuid_test.js (100%) create mode 100755 scripts/ci/js_tests diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 90d81636..5bdb2bed 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -53,6 +53,15 @@ jobs: with: command: check ${{ matrix.checks }} + js_tests: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Install wasm-pack + run: curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh + - name: run tests + run: ./scripts/ci/js_tests + linux: runs-on: ubuntu-latest strategy: diff --git a/automerge-js/.gitignore b/automerge-wasm/automerge-js/.gitignore similarity index 100% rename from automerge-js/.gitignore rename to automerge-wasm/automerge-js/.gitignore diff --git a/automerge-js/package.json b/automerge-wasm/automerge-js/package.json similarity index 86% rename from automerge-js/package.json rename to automerge-wasm/automerge-js/package.json index 17018429..f0e65a18 100644 --- a/automerge-js/package.json +++ b/automerge-wasm/automerge-js/package.json @@ -10,7 +10,7 @@ "mocha": "^9.1.1" }, "dependencies": { - "automerge-wasm": "file:../automerge-wasm", + "automerge-wasm": "file:../dev", "fast-sha256": "^1.3.0", "pako": "^2.0.4", "uuid": "^8.3" diff --git a/automerge-js/src/columnar.js b/automerge-wasm/automerge-js/src/columnar.js similarity index 100% rename from automerge-js/src/columnar.js rename to automerge-wasm/automerge-js/src/columnar.js diff --git a/automerge-js/src/common.js b/automerge-wasm/automerge-js/src/common.js similarity index 100% rename from automerge-js/src/common.js rename to automerge-wasm/automerge-js/src/common.js diff --git a/automerge-js/src/constants.js b/automerge-wasm/automerge-js/src/constants.js similarity index 100% rename from automerge-js/src/constants.js rename to automerge-wasm/automerge-js/src/constants.js diff --git a/automerge-js/src/counter.js b/automerge-wasm/automerge-js/src/counter.js similarity index 100% rename from automerge-js/src/counter.js rename to automerge-wasm/automerge-js/src/counter.js diff --git a/automerge-js/src/encoding.js b/automerge-wasm/automerge-js/src/encoding.js similarity index 100% rename from automerge-js/src/encoding.js rename to automerge-wasm/automerge-js/src/encoding.js diff --git a/automerge-js/src/index.js b/automerge-wasm/automerge-js/src/index.js similarity index 100% rename from automerge-js/src/index.js rename to automerge-wasm/automerge-js/src/index.js diff --git a/automerge-js/src/numbers.js b/automerge-wasm/automerge-js/src/numbers.js similarity index 100% rename from automerge-js/src/numbers.js rename to automerge-wasm/automerge-js/src/numbers.js diff --git a/automerge-js/src/proxies.js b/automerge-wasm/automerge-js/src/proxies.js similarity index 100% rename from automerge-js/src/proxies.js rename to automerge-wasm/automerge-js/src/proxies.js diff --git a/automerge-js/src/sync.js b/automerge-wasm/automerge-js/src/sync.js similarity index 100% rename from automerge-js/src/sync.js rename to automerge-wasm/automerge-js/src/sync.js diff --git a/automerge-js/src/text.js b/automerge-wasm/automerge-js/src/text.js similarity index 100% rename from automerge-js/src/text.js rename to automerge-wasm/automerge-js/src/text.js diff --git a/automerge-js/src/uuid.js b/automerge-wasm/automerge-js/src/uuid.js similarity index 100% rename from automerge-js/src/uuid.js rename to automerge-wasm/automerge-js/src/uuid.js diff --git a/automerge-js/test/basic_test.js b/automerge-wasm/automerge-js/test/basic_test.js similarity index 100% rename from automerge-js/test/basic_test.js rename to automerge-wasm/automerge-js/test/basic_test.js diff --git a/automerge-js/test/columnar_test.js b/automerge-wasm/automerge-js/test/columnar_test.js similarity index 100% rename from automerge-js/test/columnar_test.js rename to automerge-wasm/automerge-js/test/columnar_test.js diff --git a/automerge-js/test/helpers.js b/automerge-wasm/automerge-js/test/helpers.js similarity index 100% rename from automerge-js/test/helpers.js rename to automerge-wasm/automerge-js/test/helpers.js diff --git a/automerge-js/test/legacy_tests.js b/automerge-wasm/automerge-js/test/legacy_tests.js similarity index 100% rename from automerge-js/test/legacy_tests.js rename to automerge-wasm/automerge-js/test/legacy_tests.js diff --git a/automerge-js/test/sync_test.js b/automerge-wasm/automerge-js/test/sync_test.js similarity index 100% rename from automerge-js/test/sync_test.js rename to automerge-wasm/automerge-js/test/sync_test.js diff --git a/automerge-js/test/text_test.js b/automerge-wasm/automerge-js/test/text_test.js similarity index 100% rename from automerge-js/test/text_test.js rename to automerge-wasm/automerge-js/test/text_test.js diff --git a/automerge-js/test/uuid_test.js b/automerge-wasm/automerge-js/test/uuid_test.js similarity index 100% rename from automerge-js/test/uuid_test.js rename to automerge-wasm/automerge-js/test/uuid_test.js diff --git a/scripts/ci/js_tests b/scripts/ci/js_tests new file mode 100755 index 00000000..7d55db77 --- /dev/null +++ b/scripts/ci/js_tests @@ -0,0 +1,17 @@ +THIS_SCRIPT=$(dirname "$0"); +WASM_PROJECT=$THIS_SCRIPT/../../automerge-wasm; +JS_PROJECT=$THIS_SCRIPT/../../automerge-wasm/automerge-js; + +# This will take care of running wasm-pack +yarn --cwd $WASM_PROJECT build; +# If the dependencies are already installed we delete automerge-wasm. This makes +# this script usable for iterative development. +if [ -d $JS_PROJECT/node_modules/automerge-wasm ]; then + rm -rf $JS_PROJECT/node_modules/automerge-wasm +fi +# --check-files forces yarn to check if the local dep has changed +yarn --cwd $JS_PROJECT install --check-files; +yarn --cwd $JS_PROJECT test; + + + diff --git a/scripts/ci/run b/scripts/ci/run index c03f2991..42367e10 100755 --- a/scripts/ci/run +++ b/scripts/ci/run @@ -6,3 +6,4 @@ set -eou pipefail ./scripts/ci/build-test ./scripts/ci/docs ./scripts/ci/advisory +./scripts/ci/js_tests From 29820f9d50c0b5f56fbbf735037e45129938283d Mon Sep 17 00:00:00 2001 From: Alex Good Date: Mon, 27 Dec 2021 12:59:13 +0000 Subject: [PATCH 002/730] wip --- automerge-wasm/src/lib.rs | 37 +-- automerge/src/change.rs | 13 +- automerge/src/clock.rs | 22 +- automerge/src/columnar.rs | 25 +- automerge/src/external_types.rs | 89 ++++++ automerge/src/indexed_cache.rs | 5 + automerge/src/lib.rs | 292 ++++++++++-------- automerge/src/op_set.rs | 58 +++- automerge/src/op_tree.rs | 6 +- automerge/src/query.rs | 2 +- automerge/src/query/len.rs | 2 +- automerge/src/query/list_vals.rs | 4 +- automerge/src/query/prop.rs | 4 +- automerge/src/types.rs | 58 ++-- automerge/src/value.rs | 2 +- automerge/tests/helpers/mod.rs | 201 +++---------- automerge/tests/test.rs | 491 ++++++++++++++++--------------- edit-trace/Cargo.toml | 3 + edit-trace/src/main.rs | 6 +- todo.adoc | 3 + 20 files changed, 703 insertions(+), 620 deletions(-) create mode 100644 automerge/src/external_types.rs create mode 100644 todo.adoc diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 80a3d65f..f3bbda98 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -8,6 +8,7 @@ use std::collections::{HashMap, HashSet}; use std::convert::TryFrom; use std::convert::TryInto; use std::fmt::Display; +use std::str::FromStr; use wasm_bindgen::prelude::*; use wasm_bindgen::JsCast; @@ -149,7 +150,7 @@ impl Automerge { } pub fn keys(&mut self, obj: JsValue, heads: JsValue) -> Result { - let obj = self.import(obj)?; + let obj: automerge::ObjId = self.import(obj)?; let result = if let Some(heads) = get_heads(heads) { self.0.keys_at(obj, &heads) } else { @@ -162,7 +163,7 @@ impl Automerge { } pub fn text(&mut self, obj: JsValue, heads: JsValue) -> Result { - let obj = self.import(obj)?; + let obj: automerge::ObjId = self.import(obj)?; if let Some(heads) = get_heads(heads) { self.0.text_at(obj, &heads) } else { @@ -179,7 +180,7 @@ impl Automerge { delete_count: JsValue, text: JsValue, ) -> Result<(), JsValue> { - let obj = self.import(obj)?; + let obj: automerge::ObjId = self.import(obj)?; let start = to_usize(start, "start")?; let delete_count = to_usize(delete_count, "deleteCount")?; let mut vals = vec![]; @@ -214,7 +215,7 @@ impl Automerge { value: JsValue, datatype: JsValue, ) -> Result { - let obj = self.import(obj)?; + let obj: automerge::ObjId = self.import(obj)?; //let key = self.insert_pos_for_index(&obj, prop)?; let index: Result<_, JsValue> = index .as_f64() @@ -235,7 +236,7 @@ impl Automerge { value: JsValue, datatype: JsValue, ) -> Result { - let obj = self.import(obj)?; + let obj: automerge::ObjId = self.import(obj)?; let prop = self.import_prop(prop)?; let value = self.import_value(value, datatype)?; let opid = self.0.set(obj, prop, value).map_err(to_js_err)?; @@ -246,7 +247,7 @@ impl Automerge { } pub fn inc(&mut self, obj: JsValue, prop: JsValue, value: JsValue) -> Result<(), JsValue> { - let obj = self.import(obj)?; + let obj: automerge::ObjId = self.import(obj)?; let prop = self.import_prop(prop)?; let value: f64 = value .as_f64() @@ -257,7 +258,7 @@ impl Automerge { } pub fn value(&mut self, obj: JsValue, prop: JsValue, heads: JsValue) -> Result { - let obj = self.import(obj)?; + let obj: automerge::ObjId = self.import(obj)?; let result = Array::new(); let prop = to_prop(prop); let heads = get_heads(heads); @@ -284,7 +285,7 @@ impl Automerge { } pub fn values(&mut self, obj: JsValue, arg: JsValue, heads: JsValue) -> Result { - let obj = self.import(obj)?; + let obj: automerge::ObjId = self.import(obj)?; let result = Array::new(); let prop = to_prop(arg); if let Ok(prop) = prop { @@ -316,7 +317,7 @@ impl Automerge { } pub fn length(&mut self, obj: JsValue, heads: JsValue) -> Result { - let obj = self.import(obj)?; + let obj: automerge::ObjId = self.import(obj)?; if let Some(heads) = get_heads(heads) { Ok((self.0.length_at(obj, &heads) as f64).into()) } else { @@ -325,7 +326,7 @@ impl Automerge { } pub fn del(&mut self, obj: JsValue, prop: JsValue) -> Result<(), JsValue> { - let obj = self.import(obj)?; + let obj: automerge::ObjId = self.import(obj)?; let prop = to_prop(prop)?; self.0.del(obj, prop).map_err(to_js_err)?; Ok(()) @@ -442,16 +443,18 @@ impl Automerge { } } - fn export(&self, val: E) -> JsValue { - self.0.export(val).into() + fn export(&self, val: D) -> JsValue { + val.to_string().into() } - fn import(&self, id: JsValue) -> Result { - let id_str = id + fn import(&self, id: JsValue) -> Result + where F::Err: std::fmt::Display + { + id .as_string() - .ok_or("invalid opid/objid/elemid") - .map_err(to_js_err)?; - self.0.import(&id_str).map_err(to_js_err) + .ok_or("invalid opid/objid/elemid")? + .parse::() + .map_err(to_js_err) } fn import_prop(&mut self, prop: JsValue) -> Result { diff --git a/automerge/src/change.rs b/automerge/src/change.rs index 4d3984e5..80fe571c 100644 --- a/automerge/src/change.rs +++ b/automerge/src/change.rs @@ -7,8 +7,8 @@ use crate::decoding::{Decodable, InvalidChangeError}; use crate::encoding::{Encodable, DEFLATE_MIN_SIZE}; use crate::legacy as amp; use crate::{ - ActorId, AutomergeError, ElemId, IndexedCache, Key, ObjId, Op, OpId, OpType, Transaction, HEAD, - ROOT, + types::{ObjId, OpId}, + ActorId, AutomergeError, ElemId, IndexedCache, Key, Op, OpType, Transaction, HEAD, }; use core::ops::Range; use flate2::{ @@ -417,10 +417,9 @@ fn increment_range_map(ranges: &mut HashMap>, len: usize) { } fn export_objid(id: &ObjId, actors: &IndexedCache) -> amp::ObjectId { - if id.0 == ROOT { - amp::ObjectId::Root - } else { - export_opid(&id.0, actors).into() + match id { + ObjId::Root => amp::ObjectId::Root, + ObjId::Op(op) => export_opid(op, actors).into() } } @@ -433,7 +432,7 @@ fn export_elemid(id: &ElemId, actors: &IndexedCache) -> amp::ElementId } fn export_opid(id: &OpId, actors: &IndexedCache) -> amp::OpId { - amp::OpId(id.0, actors.get(id.1).clone()) + amp::OpId(id.counter(), actors.get(id.actor()).clone()) } fn export_op(op: &Op, actors: &IndexedCache, props: &IndexedCache) -> amp::Op { diff --git a/automerge/src/clock.rs b/automerge/src/clock.rs index 979885b3..7edab530 100644 --- a/automerge/src/clock.rs +++ b/automerge/src/clock.rs @@ -1,4 +1,4 @@ -use crate::OpId; +use crate::types::OpId; use fxhash::FxBuildHasher; use std::cmp; use std::collections::HashMap; @@ -19,8 +19,8 @@ impl Clock { } pub fn covers(&self, id: &OpId) -> bool { - if let Some(val) = self.0.get(&id.1) { - val >= &id.0 + if let Some(val) = self.0.get(&id.actor()) { + val >= &id.counter() } else { false } @@ -38,15 +38,15 @@ mod tests { clock.include(1, 20); clock.include(2, 10); - assert!(clock.covers(&OpId(10, 1))); - assert!(clock.covers(&OpId(20, 1))); - assert!(!clock.covers(&OpId(30, 1))); + assert!(clock.covers(&OpId::new(10, 1))); + assert!(clock.covers(&OpId::new(20, 1))); + assert!(!clock.covers(&OpId::new(30, 1))); - assert!(clock.covers(&OpId(5, 2))); - assert!(clock.covers(&OpId(10, 2))); - assert!(!clock.covers(&OpId(15, 2))); + assert!(clock.covers(&OpId::new(5, 2))); + assert!(clock.covers(&OpId::new(10, 2))); + assert!(!clock.covers(&OpId::new(15, 2))); - assert!(!clock.covers(&OpId(1, 3))); - assert!(!clock.covers(&OpId(100, 3))); + assert!(!clock.covers(&OpId::new(1, 3))); + assert!(!clock.covers(&OpId::new(100, 3))); } } diff --git a/automerge/src/columnar.rs b/automerge/src/columnar.rs index 3a1df3cb..3339a68e 100644 --- a/automerge/src/columnar.rs +++ b/automerge/src/columnar.rs @@ -11,8 +11,7 @@ use std::{ str, }; -use crate::ROOT; -use crate::{ActorId, ElemId, Key, ObjId, ObjType, OpId, OpType, ScalarValue}; +use crate::{ActorId, ElemId, Key, ObjType, OpType, ScalarValue, types::{ObjId, OpId}}; use crate::legacy as amp; use amp::SortedVec; @@ -686,15 +685,15 @@ impl KeyEncoder { self.ctr.append_null(); self.str.append_value(props[i].clone()); } - Key::Seq(ElemId(OpId(0, 0))) => { + Key::Seq(ElemId(opid)) if opid.actor() == 0 && opid.counter() == 0 => { // HEAD self.actor.append_null(); self.ctr.append_value(0); self.str.append_null(); } - Key::Seq(ElemId(OpId(ctr, actor))) => { - self.actor.append_value(actors[actor]); - self.ctr.append_value(ctr); + Key::Seq(ElemId(opid)) => { + self.actor.append_value(actors[opid.actor()]); + self.ctr.append_value(opid.counter()); self.str.append_null(); } } @@ -773,8 +772,8 @@ impl SuccEncoder { fn append(&mut self, succ: &[OpId], actors: &[usize]) { self.num.append_value(succ.len()); for s in succ.iter() { - self.ctr.append_value(s.0); - self.actor.append_value(actors[s.1]); + self.ctr.append_value(s.counter()); + self.actor.append_value(actors[s.actor()]); } } @@ -845,14 +844,14 @@ impl ObjEncoder { } fn append(&mut self, obj: &ObjId, actors: &[usize]) { - match obj.0 { - ROOT => { + match obj { + ObjId::Root => { self.actor.append_null(); self.ctr.append_null(); } - OpId(ctr, actor) => { - self.actor.append_value(actors[actor]); - self.ctr.append_value(ctr); + ObjId::Op(opid) => { + self.actor.append_value(actors[opid.actor()]); + self.ctr.append_value(opid.counter()); } } } diff --git a/automerge/src/external_types.rs b/automerge/src/external_types.rs new file mode 100644 index 00000000..4411f3a9 --- /dev/null +++ b/automerge/src/external_types.rs @@ -0,0 +1,89 @@ +use std::{str::FromStr, borrow::Cow, fmt::Display}; + +use crate::{ActorId, types::OpId, op_tree::OpSetMetadata}; + +const ROOT_STR: &str = "_root"; + +#[derive(Copy, Debug, PartialEq, Clone, Hash, Eq)] +pub struct ExternalOpId<'a> { + counter: u64, + actor: Cow<'a, ActorId>, +} + +impl<'a> ExternalOpId<'a> { + pub(crate) fn from_internal(opid: OpId, metadata: &OpSetMetadata) -> Option { + metadata.actors.get_safe(opid.actor()).map(|actor| { + ExternalOpId{ + counter: opid.counter(), + actor: actor.into(), + } + }) + } + + pub(crate) fn into_opid(self, metadata: &mut OpSetMetadata) -> OpId { + let actor = metadata.actors.cache(self.actor); + OpId::new(self.counter, actor) + } +} + +#[derive(Debug, PartialEq, Clone, Hash, Eq)] +pub enum ExternalObjId<'a> { + Root, + Op(ExternalOpId<'a>), +} + +impl<'a> From> for ExternalObjId<'a> { + fn from(op: ExternalOpId) -> Self { + ExternalObjId::Op(op) + } +} + +#[derive(thiserror::Error, Debug)] +pub enum ParseError { + #[error("op IDs should have the format @")] + BadFormat, + #[error("the counter of an opid should be a positive integer")] + InvalidCounter, + #[error("the actor of an opid should be valid hex encoded bytes")] + InvalidActor, +} + +impl FromStr for ExternalOpId<'static> { + type Err = ParseError; + + fn from_str(s: &str) -> Result { + let mut parts = s.split("@"); + let first_part = parts.next().ok_or(ParseError::BadFormat)?; + let second_part = parts.next().ok_or(ParseError::BadFormat)?; + let counter: u64 = first_part.parse().map_err(|_| ParseError::InvalidCounter)?; + let actor: ActorId = second_part.parse().map_err(|_| ParseError::InvalidActor)?; + Ok(ExternalOpId{counter, actor}) + } +} + +impl<'a> FromStr for ExternalObjId<'a> { + type Err = ParseError; + + fn from_str(s: &str) -> Result { + if s == ROOT_STR { + Ok(ExternalObjId::Root) + } else { + Ok(s.parse::()?.into()) + } + } +} + +impl Display for ExternalOpId { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}@{}", self.counter, self.actor) + } +} + +impl Display for ExternalObjId { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Root => write!(f, "{}", ROOT_STR), + Self::Op(op) => write!(f, "{}", op), + } + } +} diff --git a/automerge/src/indexed_cache.rs b/automerge/src/indexed_cache.rs index 21ffd75b..43a0fa2c 100644 --- a/automerge/src/indexed_cache.rs +++ b/automerge/src/indexed_cache.rs @@ -43,6 +43,11 @@ where &self.cache[index] } + // Todo replace all uses of `get` with this + pub fn get_safe(&self, index: usize) -> Option<&T> { + self.cache.get(index) + } + pub fn sorted(&self) -> IndexedCache { let mut sorted = Self::new(); self.cache.iter().sorted().cloned().for_each(|item| { diff --git a/automerge/src/lib.rs b/automerge/src/lib.rs index c2595c68..92340fd5 100644 --- a/automerge/src/lib.rs +++ b/automerge/src/lib.rs @@ -45,13 +45,15 @@ mod op_tree; mod query; mod types; mod value; +mod external_types; +pub use external_types::{ExternalOpId as OpId, ExternalObjId as ObjId}; use change::{encode_document, export_change}; use clock::Clock; use indexed_cache::IndexedCache; use op_set::OpSet; use std::collections::{HashMap, HashSet, VecDeque}; -use types::{ElemId, Key, ObjId, Op, HEAD}; +use types::{ElemId, Key, ObjId as InternalObjId, Op, HEAD}; use unicode_segmentation::UnicodeSegmentation; pub use change::{decode_change, Change}; @@ -59,9 +61,9 @@ pub use error::AutomergeError; pub use legacy::Change as ExpandedChange; pub use sync::{BloomFilter, SyncHave, SyncMessage, SyncState}; pub use types::{ - ActorId, ChangeHash, Export, Exportable, Importable, ObjType, OpId, OpType, Patch, Peer, Prop, - ROOT, + ActorId, ChangeHash, ObjType, OpType, Patch, Peer, Prop, }; +use types::{OpId as InternalOpId, Export, Exportable, Importable}; pub use value::{ScalarValue, Value}; #[derive(Debug, Clone)] @@ -96,25 +98,25 @@ impl Automerge { pub fn set_actor(&mut self, actor: ActorId) { self.ensure_transaction_closed(); - self.actor = Some(self.ops.m.actors.cache(actor)) + self.actor = Some(self.ops.m.borrow_mut().actors.cache(actor)) } fn random_actor(&mut self) -> ActorId { let actor = ActorId::from(uuid::Uuid::new_v4().as_bytes().to_vec()); - self.actor = Some(self.ops.m.actors.cache(actor.clone())); + self.actor = Some(self.ops.m.borrow_mut().actors.cache(actor.clone())); actor } pub fn get_actor(&mut self) -> ActorId { if let Some(actor) = self.actor { - self.ops.m.actors[actor].clone() + self.ops.m.borrow().actors[actor].clone() } else { self.random_actor() } } pub fn maybe_get_actor(&self) -> Option { - self.actor.map(|i| self.ops.m.actors[i].clone()) + self.actor.map(|i| self.ops.m.borrow().actors[i].clone()) } fn get_actor_index(&mut self) -> usize { @@ -139,7 +141,7 @@ impl Automerge { max_op: 0, transaction: None, }; - am.actor = Some(am.ops.m.actors.cache(actor)); + am.actor = Some(am.ops.m.borrow_mut().actors.cache(actor)); am } @@ -199,7 +201,8 @@ impl Automerge { pub fn ensure_transaction_closed(&mut self) { if let Some(tx) = self.transaction.take() { - self.update_history(export_change(&tx, &self.ops.m.actors, &self.ops.m.props)); + let change = export_change(&tx, &self.ops.m.borrow_mut().actors, &self.ops.m.borrow().props); + self.update_history(change); } } @@ -224,9 +227,9 @@ impl Automerge { } } - fn next_id(&mut self) -> OpId { + fn next_id(&mut self) -> InternalOpId { let tx = self.tx(); - OpId(tx.start_op + tx.operations.len() as u64, tx.actor) + InternalOpId::new(tx.start_op + tx.operations.len() as u64, tx.actor) } fn insert_local_op(&mut self, op: Op, pos: usize, succ_pos: &[usize]) { @@ -262,22 +265,26 @@ impl Automerge { // PropAt::() // NthAt::() - pub fn keys(&self, obj: OpId) -> Vec { + pub fn keys>(&self, obj: O) -> Vec { + let obj = self.import_objid(obj.into()); let q = self.ops.search(obj.into(), query::Keys::new()); q.keys.iter().map(|k| self.export(*k)).collect() } - pub fn keys_at(&self, obj: OpId, heads: &[ChangeHash]) -> Vec { + pub fn keys_at>(&mut self, obj: O, heads: &[ChangeHash]) -> Vec { + let obj = self.import_objid(obj.into()); let clock = self.clock_at(heads); let q = self.ops.search(obj.into(), query::KeysAt::new(clock)); q.keys.iter().map(|k| self.export(*k)).collect() } - pub fn length(&self, obj: OpId) -> usize { + pub fn length>(&self, obj: O) -> usize { + let obj = self.import_objid(obj.into()); self.ops.search(obj.into(), query::Len::new(obj.into())).len } - pub fn length_at(&self, obj: OpId, heads: &[ChangeHash]) -> usize { + pub fn length_at>(&self, obj: O, heads: &[ChangeHash]) -> usize { + let obj = self.import_objid(obj.into()); let clock = self.clock_at(heads); self.ops.search(obj.into(), query::LenAt::new(clock)).len } @@ -300,23 +307,24 @@ impl Automerge { /// - The object does not exist /// - The key is the wrong type for the object /// - The key does not exist in the object - pub fn set, V: Into>( + pub fn set, P: Into, V: Into>( &mut self, - obj: OpId, + obj: O, prop: P, value: V, ) -> Result, AutomergeError> { let value = value.into(); + let obj = self.import_objid(obj.into()); self.local_op(obj.into(), prop.into(), value.into()) } - pub fn insert>( + pub fn insert, V: Into>( &mut self, - obj: OpId, + obj: O, index: usize, value: V, ) -> Result { - let obj = obj.into(); + let obj = self.import_objid(obj.into()).into(); let id = self.next_id(); let query = self.ops.search(obj, query::InsertNth::new(index)); @@ -338,16 +346,16 @@ impl Automerge { self.ops.insert(query.pos, op.clone()); self.tx().operations.push(op); - Ok(id) + Ok(self.export_opid(id).unwrap()) } - pub fn inc>( + pub fn inc, P: Into>( &mut self, - obj: OpId, + obj: O, prop: P, value: i64, ) -> Result { - match self.local_op(obj.into(), prop.into(), OpType::Inc(value))? { + match self.local_op(self.import_objid(obj.into()).into(), prop.into(), OpType::Inc(value))? { Some(opid) => Ok(opid), None => { panic!("increment should always create a new op") @@ -355,9 +363,9 @@ impl Automerge { } } - pub fn del>(&mut self, obj: OpId, prop: P) -> Result { + pub fn del, P: Into>(&mut self, obj: O, prop: P) -> Result { // TODO: Should we also no-op multiple delete operations? - match self.local_op(obj.into(), prop.into(), OpType::Del)? { + match self.local_op(self.import_objid(obj.into()).into(), prop.into(), OpType::Del)? { Some(opid) => Ok(opid), None => { panic!("delete should always create a new op") @@ -367,27 +375,28 @@ impl Automerge { /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert /// the new elements - pub fn splice( + pub fn splice>( &mut self, - obj: OpId, + obj: O, mut pos: usize, del: usize, vals: Vec, ) -> Result, AutomergeError> { + let obj = obj.into(); for _ in 0..del { - self.del(obj, pos)?; + self.del(obj.clone(), pos)?; } let mut result = Vec::with_capacity(vals.len()); for v in vals { - result.push(self.insert(obj, pos, v)?); + result.push(self.insert(obj.clone(), pos, v)?); pos += 1; } Ok(result) } - pub fn splice_text( + pub fn splice_text>( &mut self, - obj: OpId, + obj: O, pos: usize, del: usize, text: &str, @@ -399,8 +408,8 @@ impl Automerge { self.splice(obj, pos, del, vals) } - pub fn text(&self, obj: OpId) -> Result { - let obj = obj.into(); + pub fn text>(&self, obj: O) -> Result { + let obj = self.import_objid(obj.into()).into(); let query = self.ops.search(obj, query::ListVals::new(obj)); let mut buffer = String::new(); for q in &query.ops { @@ -411,9 +420,9 @@ impl Automerge { Ok(buffer) } - pub fn text_at(&self, obj: OpId, heads: &[ChangeHash]) -> Result { + pub fn text_at>(&self, obj: O, heads: &[ChangeHash]) -> Result { let clock = self.clock_at(heads); - let obj = obj.into(); + let obj = self.import_objid(obj.into()).into(); let query = self.ops.search(obj, query::ListValsAt::new(clock)); let mut buffer = String::new(); for q in &query.ops { @@ -427,38 +436,38 @@ impl Automerge { // TODO - I need to return these OpId's here **only** to get // the legacy conflicts format of { [opid]: value } // Something better? - pub fn value>( + pub fn value, P: Into>( &self, - obj: OpId, + obj: O, prop: P, ) -> Result, AutomergeError> { Ok(self.values(obj, prop.into())?.first().cloned()) } - pub fn value_at>( + pub fn value_at, P: Into>( &self, - obj: OpId, + obj: O, prop: P, heads: &[ChangeHash], ) -> Result, AutomergeError> { Ok(self.values_at(obj, prop, heads)?.first().cloned()) } - pub fn values>( + pub fn values, P: Into>( &self, - obj: OpId, + obj: O, prop: P, ) -> Result, AutomergeError> { - let obj = obj.into(); + let obj = self.import_objid(obj.into()).into(); let result = match prop.into() { Prop::Map(p) => { - let prop = self.ops.m.props.lookup(p); + let prop = self.ops.m.borrow().props.lookup(p); if let Some(p) = prop { self.ops .search(obj, query::Prop::new(obj, p)) .ops .into_iter() - .map(|o| o.into()) + .map(|o| self.labelled_value(&o)) .collect() } else { vec![] @@ -469,30 +478,30 @@ impl Automerge { .search(obj, query::Nth::new(n)) .ops .into_iter() - .map(|o| o.into()) + .map(|o| self.labelled_value(&o)) .collect(), }; Ok(result) } - pub fn values_at>( + pub fn values_at, P: Into>( &self, - obj: OpId, + obj: O, prop: P, heads: &[ChangeHash], ) -> Result, AutomergeError> { let prop = prop.into(); - let obj = obj.into(); + let obj = self.import_objid(obj.into()).into(); let clock = self.clock_at(heads); let result = match prop { Prop::Map(p) => { - let prop = self.ops.m.props.lookup(p); + let prop = self.ops.m.borrow().props.lookup(p); if let Some(p) = prop { self.ops .search(obj, query::PropAt::new(p, clock)) .ops .into_iter() - .map(|o| o.into()) + .map(|o| self.labelled_value(&o)) .collect() } else { vec![] @@ -503,7 +512,7 @@ impl Automerge { .search(obj, query::NthAt::new(n, clock)) .ops .into_iter() - .map(|o| o.into()) + .map(|o| self.labelled_value(&o)) .collect(), }; Ok(result) @@ -552,7 +561,7 @@ impl Automerge { fn local_op( &mut self, - obj: ObjId, + obj: InternalObjId, prop: Prop, action: OpType, ) -> Result, AutomergeError> { @@ -564,7 +573,7 @@ impl Automerge { fn local_map_op( &mut self, - obj: ObjId, + obj: InternalObjId, prop: String, action: OpType, ) -> Result, AutomergeError> { @@ -573,7 +582,7 @@ impl Automerge { } let id = self.next_id(); - let prop = self.ops.m.props.cache(prop); + let prop = self.ops.m.borrow_mut().props.cache(prop); let query = self.ops.search(obj, query::Prop::new(obj, prop)); match (&query.ops[..], &action) { @@ -606,12 +615,12 @@ impl Automerge { self.insert_local_op(op, query.pos, &query.ops_pos); - Ok(Some(id)) + Ok(Some(self.export_opid(id).unwrap())) } fn local_list_op( &mut self, - obj: ObjId, + obj: InternalObjId, index: usize, action: OpType, ) -> Result, AutomergeError> { @@ -649,7 +658,7 @@ impl Automerge { self.insert_local_op(op, query.pos, &query.ops_pos); - Ok(Some(id)) + Ok(Some(self.export_opid(id).unwrap())) } fn is_causally_ready(&self, change: &Change) -> bool { @@ -675,17 +684,17 @@ impl Automerge { .iter_ops() .enumerate() .map(|(i, c)| { - let actor = self.ops.m.actors.cache(change.actor_id().clone()); - let id = OpId(change.start_op + i as u64, actor); + let actor = self.ops.m.borrow_mut().actors.cache(change.actor_id().clone()); + let id = InternalOpId::new(change.start_op + i as u64, actor); // FIXME dont need to_string() - let obj: ObjId = self.import(&c.obj.to_string()).unwrap(); + let obj: InternalObjId = self.import(&c.obj.to_string()).unwrap(); let pred = c .pred .iter() .map(|i| self.import(&i.to_string()).unwrap()) .collect(); let key = match &c.key { - legacy::Key::Map(n) => Key::Map(self.ops.m.props.cache(n.to_string())), + legacy::Key::Map(n) => Key::Map(self.ops.m.borrow_mut().props.cache(n.to_string())), legacy::Key::Seq(legacy::ElementId::Head) => Key::Seq(HEAD), // FIXME dont need to_string() legacy::Key::Seq(legacy::ElementId::Id(i)) => { @@ -727,8 +736,8 @@ impl Automerge { let bytes = encode_document( &c, ops.as_slice(), - &self.ops.m.actors, - &self.ops.m.props.cache, + &self.ops.m.borrow().actors, + &self.ops.m.borrow().props.cache, ); if bytes.is_ok() { self.saved = self.get_heads().iter().copied().collect(); @@ -899,7 +908,7 @@ impl Automerge { pub fn get_last_local_change(&mut self) -> Option<&Change> { self.ensure_transaction_closed(); if let Some(actor) = &self.actor { - let actor = &self.ops.m.actors[*actor]; + let actor = &self.ops.m.borrow().actors[*actor]; return self.history.iter().rev().find(|c| c.actor_id() == actor); } None @@ -930,7 +939,7 @@ impl Automerge { to_see.push(*h); } } - let actor = self.ops.m.actors.lookup(c.actor_id().clone()).unwrap(); + let actor = self.ops.m.borrow().actors.lookup(c.actor_id().clone()).unwrap(); clock.include(actor, c.max_op()); seen.insert(hash); } @@ -1006,7 +1015,7 @@ impl Automerge { let history_index = self.history.len(); self.states - .entry(self.ops.m.actors.cache(change.actor_id().clone())) + .entry(self.ops.m.borrow_mut().actors.cache(change.actor_id().clone())) .or_default() .push(history_index); @@ -1023,7 +1032,7 @@ impl Automerge { self.deps.insert(change.hash); } - pub fn import(&self, s: &str) -> Result { + pub(crate) fn import(&self, s: &str) -> Result { if let Some(x) = I::from(s) { Ok(x) } else { @@ -1037,17 +1046,36 @@ impl Automerge { let actor = self .ops .m + .borrow() .actors .lookup(actor) .ok_or_else(|| AutomergeError::InvalidOpId(s.to_owned()))?; - Ok(I::wrap(OpId(counter, actor))) + Ok(I::wrap(InternalOpId::new(counter, actor))) } } - pub fn export(&self, id: E) -> String { + fn import_opid(&self, opid: &OpId) -> InternalOpId { + opid.into_opid(&mut *self.ops.m.borrow_mut()) + } + + fn export_opid(&self, opid: InternalOpId) -> Option { + OpId::from_internal(opid, &self.ops.m.borrow_mut()) + } + + fn import_objid>(&self, objid: O) -> InternalObjId { + match objid.as_ref() { + ObjId::Root => InternalObjId::Root, + ObjId::Op(external_op) => { + let op = self.import_opid(external_op); + InternalObjId::Op(op) + } + } + } + + pub(crate) fn export(&self, id: E) -> String { match id.export() { - Export::Id(id) => format!("{}@{}", id.counter(), self.ops.m.actors[id.actor()]), - Export::Prop(index) => self.ops.m.props[index].clone(), + Export::Id(id) => format!("{}@{}", id.counter(), self.ops.m.borrow().actors[id.actor()]), + Export::Prop(index) => self.ops.m.borrow().props[index].clone(), Export::Special(s) => s, } } @@ -1066,7 +1094,7 @@ impl Automerge { let id = self.export(i.id); let obj = self.export(i.obj); let key = match i.key { - Key::Map(n) => self.ops.m.props[n].clone(), + Key::Map(n) => self.ops.m.borrow().props[n].clone(), Key::Seq(n) => self.export(n), }; let value: String = match &i.action { @@ -1093,6 +1121,16 @@ impl Automerge { pub fn visualise_optree(&self) -> String { self.ops.visualise() } + + fn labelled_value(&self, op: &Op) -> (Value, OpId) { + let id = self.export_opid(op.id).unwrap(); + let value = match &op.action { + OpType::Make(obj_type) => Value::Object(*obj_type), + OpType::Set(scalar) => Value::Scalar(scalar.clone()), + _ => panic!("expected a make or set op"), + }; + (value, id) + } } #[derive(Debug, Clone)] @@ -1123,9 +1161,9 @@ mod tests { fn insert_op() -> Result<(), AutomergeError> { let mut doc = Automerge::new(); doc.set_actor(ActorId::random()); - doc.set(ROOT, "hello", "world")?; + doc.set(ObjId::Root, "hello", "world")?; assert!(doc.pending_ops() == 1); - doc.value(ROOT, "hello")?; + doc.value(ObjId::Root, "hello")?; Ok(()) } @@ -1133,17 +1171,17 @@ mod tests { fn test_list() -> Result<(), AutomergeError> { let mut doc = Automerge::new(); doc.set_actor(ActorId::random()); - let list_id = doc.set(ROOT, "items", Value::list())?.unwrap(); - doc.set(ROOT, "zzz", "zzzval")?; - assert!(doc.value(ROOT, "items")?.unwrap().1 == list_id); + let list_id = doc.set(ObjId::Root, "items", Value::list())?.unwrap().into(); + doc.set(ObjId::Root, "zzz", "zzzval")?; + assert!(doc.value(ObjId::Root, "items")?.unwrap().1 == list_id); doc.insert(list_id, 0, "a")?; doc.insert(list_id, 0, "b")?; doc.insert(list_id, 2, "c")?; doc.insert(list_id, 1, "d")?; - assert!(doc.value(list_id, 0)?.unwrap().0 == "b".into()); - assert!(doc.value(list_id, 1)?.unwrap().0 == "d".into()); - assert!(doc.value(list_id, 2)?.unwrap().0 == "a".into()); - assert!(doc.value(list_id, 3)?.unwrap().0 == "c".into()); + assert!(doc.value(list_id.clone(), 0)?.unwrap().0 == "b".into()); + assert!(doc.value(list_id.clone(), 1)?.unwrap().0 == "d".into()); + assert!(doc.value(list_id.clone(), 2)?.unwrap().0 == "a".into()); + assert!(doc.value(list_id.clone(), 3)?.unwrap().0 == "c".into()); assert!(doc.length(list_id) == 4); doc.save()?; Ok(()) @@ -1153,22 +1191,22 @@ mod tests { fn test_del() -> Result<(), AutomergeError> { let mut doc = Automerge::new(); doc.set_actor(ActorId::random()); - doc.set(ROOT, "xxx", "xxx")?; - assert!(!doc.values(ROOT, "xxx")?.is_empty()); - doc.del(ROOT, "xxx")?; - assert!(doc.values(ROOT, "xxx")?.is_empty()); + doc.set(ObjId::Root, "xxx", "xxx")?; + assert!(!doc.values(ObjId::Root, "xxx")?.is_empty()); + doc.del(ObjId::Root, "xxx")?; + assert!(doc.values(ObjId::Root, "xxx")?.is_empty()); Ok(()) } #[test] fn test_inc() -> Result<(), AutomergeError> { let mut doc = Automerge::new(); - let id = doc.set(ROOT, "counter", Value::counter(10))?.unwrap(); - assert!(doc.value(ROOT, "counter")? == Some((Value::counter(10), id))); - doc.inc(ROOT, "counter", 10)?; - assert!(doc.value(ROOT, "counter")? == Some((Value::counter(20), id))); - doc.inc(ROOT, "counter", -5)?; - assert!(doc.value(ROOT, "counter")? == Some((Value::counter(15), id))); + let id = doc.set(ObjId::Root, "counter", Value::counter(10))?.unwrap(); + assert!(doc.value(ObjId::Root, "counter")? == Some((Value::counter(10), id))); + doc.inc(ObjId::Root, "counter", 10)?; + assert!(doc.value(ObjId::Root, "counter")? == Some((Value::counter(20), id))); + doc.inc(ObjId::Root, "counter", -5)?; + assert!(doc.value(ObjId::Root, "counter")? == Some((Value::counter(15), id))); Ok(()) } @@ -1176,15 +1214,15 @@ mod tests { fn test_save_incremental() -> Result<(), AutomergeError> { let mut doc = Automerge::new(); - doc.set(ROOT, "foo", 1)?; + doc.set(ObjId::Root, "foo", 1)?; let save1 = doc.save().unwrap(); - doc.set(ROOT, "bar", 2)?; + doc.set(ObjId::Root, "bar", 2)?; let save2 = doc.save_incremental(); - doc.set(ROOT, "baz", 3)?; + doc.set(ObjId::Root, "baz", 3)?; let save3 = doc.save_incremental(); @@ -1202,7 +1240,7 @@ mod tests { let mut doc_a = Automerge::load(&save_a)?; let mut doc_b = Automerge::load(&save_b)?; - assert!(doc_a.values(ROOT, "baz")? == doc_b.values(ROOT, "baz")?); + assert!(doc_a.values(ObjId::Root, "baz")? == doc_b.values(ObjId::Root, "baz")?); assert!(doc_a.save().unwrap() == doc_b.save().unwrap()); @@ -1212,7 +1250,7 @@ mod tests { #[test] fn test_save_text() -> Result<(), AutomergeError> { let mut doc = Automerge::new(); - let text = doc.set(ROOT, "text", Value::text())?.unwrap(); + let text = doc.set(ObjId::Root, "text", Value::text())?.unwrap(); let heads1 = doc.commit(None, None); doc.splice_text(text, 0, 0, "hello world")?; let heads2 = doc.commit(None, None); @@ -1231,50 +1269,50 @@ mod tests { fn test_props_vals_at() -> Result<(), AutomergeError> { let mut doc = Automerge::new(); doc.set_actor("aaaa".try_into().unwrap()); - doc.set(ROOT, "prop1", "val1")?; + doc.set(ObjId::Root, "prop1", "val1")?; doc.commit(None, None); let heads1 = doc.get_heads(); - doc.set(ROOT, "prop1", "val2")?; + doc.set(ObjId::Root, "prop1", "val2")?; doc.commit(None, None); let heads2 = doc.get_heads(); - doc.set(ROOT, "prop2", "val3")?; + doc.set(ObjId::Root, "prop2", "val3")?; doc.commit(None, None); let heads3 = doc.get_heads(); - doc.del(ROOT, "prop1")?; + doc.del(ObjId::Root, "prop1")?; doc.commit(None, None); let heads4 = doc.get_heads(); - doc.set(ROOT, "prop3", "val4")?; + doc.set(ObjId::Root, "prop3", "val4")?; doc.commit(None, None); let heads5 = doc.get_heads(); - assert!(doc.keys_at(ROOT, &heads1) == vec!["prop1".to_owned()]); - assert!(doc.value_at(ROOT, "prop1", &heads1)?.unwrap().0 == Value::str("val1")); - assert!(doc.value_at(ROOT, "prop2", &heads1)? == None); - assert!(doc.value_at(ROOT, "prop3", &heads1)? == None); + assert!(doc.keys_at(ObjId::Root, &heads1) == vec!["prop1".to_owned()]); + assert!(doc.value_at(ObjId::Root, "prop1", &heads1)?.unwrap().0 == Value::str("val1")); + assert!(doc.value_at(ObjId::Root, "prop2", &heads1)? == None); + assert!(doc.value_at(ObjId::Root, "prop3", &heads1)? == None); - assert!(doc.keys_at(ROOT, &heads2) == vec!["prop1".to_owned()]); - assert!(doc.value_at(ROOT, "prop1", &heads2)?.unwrap().0 == Value::str("val2")); - assert!(doc.value_at(ROOT, "prop2", &heads2)? == None); - assert!(doc.value_at(ROOT, "prop3", &heads2)? == None); + assert!(doc.keys_at(ObjId::Root, &heads2) == vec!["prop1".to_owned()]); + assert!(doc.value_at(ObjId::Root, "prop1", &heads2)?.unwrap().0 == Value::str("val2")); + assert!(doc.value_at(ObjId::Root, "prop2", &heads2)? == None); + assert!(doc.value_at(ObjId::Root, "prop3", &heads2)? == None); - assert!(doc.keys_at(ROOT, &heads3) == vec!["prop1".to_owned(), "prop2".to_owned()]); - assert!(doc.value_at(ROOT, "prop1", &heads3)?.unwrap().0 == Value::str("val2")); - assert!(doc.value_at(ROOT, "prop2", &heads3)?.unwrap().0 == Value::str("val3")); - assert!(doc.value_at(ROOT, "prop3", &heads3)? == None); + assert!(doc.keys_at(ObjId::Root, &heads3) == vec!["prop1".to_owned(), "prop2".to_owned()]); + assert!(doc.value_at(ObjId::Root, "prop1", &heads3)?.unwrap().0 == Value::str("val2")); + assert!(doc.value_at(ObjId::Root, "prop2", &heads3)?.unwrap().0 == Value::str("val3")); + assert!(doc.value_at(ObjId::Root, "prop3", &heads3)? == None); - assert!(doc.keys_at(ROOT, &heads4) == vec!["prop2".to_owned()]); - assert!(doc.value_at(ROOT, "prop1", &heads4)? == None); - assert!(doc.value_at(ROOT, "prop2", &heads4)?.unwrap().0 == Value::str("val3")); - assert!(doc.value_at(ROOT, "prop3", &heads4)? == None); + assert!(doc.keys_at(ObjId::Root, &heads4) == vec!["prop2".to_owned()]); + assert!(doc.value_at(ObjId::Root, "prop1", &heads4)? == None); + assert!(doc.value_at(ObjId::Root, "prop2", &heads4)?.unwrap().0 == Value::str("val3")); + assert!(doc.value_at(ObjId::Root, "prop3", &heads4)? == None); - assert!(doc.keys_at(ROOT, &heads5) == vec!["prop2".to_owned(), "prop3".to_owned()]); - assert!(doc.value_at(ROOT, "prop1", &heads5)? == None); - assert!(doc.value_at(ROOT, "prop2", &heads5)?.unwrap().0 == Value::str("val3")); - assert!(doc.value_at(ROOT, "prop3", &heads5)?.unwrap().0 == Value::str("val4")); + assert!(doc.keys_at(ObjId::Root, &heads5) == vec!["prop2".to_owned(), "prop3".to_owned()]); + assert!(doc.value_at(ObjId::Root, "prop1", &heads5)? == None); + assert!(doc.value_at(ObjId::Root, "prop2", &heads5)?.unwrap().0 == Value::str("val3")); + assert!(doc.value_at(ObjId::Root, "prop3", &heads5)?.unwrap().0 == Value::str("val4")); - assert!(doc.keys_at(ROOT, &[]).is_empty()); - assert!(doc.value_at(ROOT, "prop1", &[])? == None); - assert!(doc.value_at(ROOT, "prop2", &[])? == None); - assert!(doc.value_at(ROOT, "prop3", &[])? == None); + assert!(doc.keys_at(ObjId::Root, &[]).is_empty()); + assert!(doc.value_at(ObjId::Root, "prop1", &[])? == None); + assert!(doc.value_at(ObjId::Root, "prop2", &[])? == None); + assert!(doc.value_at(ObjId::Root, "prop3", &[])? == None); Ok(()) } @@ -1283,7 +1321,7 @@ mod tests { let mut doc = Automerge::new(); doc.set_actor("aaaa".try_into().unwrap()); - let list = doc.set(ROOT, "list", Value::list())?.unwrap(); + let list = doc.set(ObjId::Root, "list", Value::list())?.unwrap(); let heads1 = doc.commit(None, None); doc.insert(list, 0, Value::int(10))?; diff --git a/automerge/src/op_set.rs b/automerge/src/op_set.rs index 537cb80f..923b8cad 100644 --- a/automerge/src/op_set.rs +++ b/automerge/src/op_set.rs @@ -1,9 +1,11 @@ use crate::op_tree::OpTreeInternal; use crate::query::TreeQuery; -use crate::{ActorId, IndexedCache, Key, ObjId, Op, OpId}; +use crate::{ActorId, IndexedCache, Key, types::{ObjId, OpId}, Op}; use fxhash::FxBuildHasher; use std::cmp::Ordering; use std::collections::HashMap; +use std::rc::Rc; +use std::cell::RefCell; pub(crate) type OpSet = OpSetInternal<16>; @@ -12,7 +14,7 @@ pub(crate) struct OpSetInternal { trees: HashMap, FxBuildHasher>, objs: Vec, length: usize, - pub m: OpSetMetadata, + pub m: Rc>, } impl OpSetInternal { @@ -21,10 +23,10 @@ impl OpSetInternal { trees: Default::default(), objs: Default::default(), length: 0, - m: OpSetMetadata { + m: Rc::new(RefCell::new(OpSetMetadata { actors: IndexedCache::new(), props: IndexedCache::new(), - }, + })), } } @@ -41,7 +43,7 @@ impl OpSetInternal { Q: TreeQuery, { if let Some(tree) = self.trees.get(&obj) { - tree.search(query, &self.m) + tree.search(query, &*self.m.borrow()) } else { query } @@ -83,7 +85,7 @@ impl OpSetInternal { .entry(element.obj) .or_insert_with(|| { let pos = objs - .binary_search_by(|probe| m.lamport_cmp(probe.0, element.obj.0)) + .binary_search_by(|probe| m.borrow().lamport_cmp(probe, &element.obj)) .unwrap_err(); objs.insert(pos, element.obj); Default::default() @@ -162,14 +164,42 @@ impl OpSetMetadata { } } - pub fn lamport_cmp(&self, left: OpId, right: OpId) -> Ordering { - match (left, right) { - (OpId(0, _), OpId(0, _)) => Ordering::Equal, - (OpId(0, _), OpId(_, _)) => Ordering::Less, - (OpId(_, _), OpId(0, _)) => Ordering::Greater, - // FIXME - this one seems backwards to me - why - is values() returning in the wrong order? - (OpId(a, x), OpId(b, y)) if a == b => self.actors[y].cmp(&self.actors[x]), - (OpId(a, _), OpId(b, _)) => a.cmp(&b), + pub fn lamport_cmp(&self, left: S, right: S) -> Ordering { + S::cmp(self, left, right) + } +} + +/// Lamport timestamps which don't contain their actor ID directly and therefore need access to +/// some metadata to compare their actor ID parts +pub(crate) trait SuccinctLamport { + fn cmp(m: &OpSetMetadata, left: Self, right: Self) -> Ordering; +} + +impl SuccinctLamport for OpId { + fn cmp(m: &OpSetMetadata, left: Self, right: Self) -> Ordering { + match (left.counter(), right.counter()) { + (0, 0) => Ordering::Equal, + (0, _) => Ordering::Less, + (_, 0) => Ordering::Greater, + (a, b) if a == b => m.actors[right.actor()].cmp(&m.actors[left.actor()]), + (a, b) => a.cmp(&b), } } } + +impl SuccinctLamport for ObjId { + fn cmp(m: &OpSetMetadata, left: Self, right: Self) -> Ordering { + match (left, right) { + (ObjId::Root, ObjId::Root) => Ordering::Equal, + (ObjId::Root, ObjId::Op(_)) => Ordering::Less, + (ObjId::Op(_), ObjId::Root) => Ordering::Greater, + (ObjId::Op(left_op), ObjId::Op(right_op)) => ::cmp(m, left_op, right_op), + } + } +} + +impl SuccinctLamport for &ObjId { + fn cmp(m: &OpSetMetadata, left: Self, right: Self) -> Ordering { + ::cmp(m, *left, *right) + } +} diff --git a/automerge/src/op_tree.rs b/automerge/src/op_tree.rs index 6142a7bf..a9d72353 100644 --- a/automerge/src/op_tree.rs +++ b/automerge/src/op_tree.rs @@ -6,7 +6,7 @@ use std::{ pub(crate) use crate::op_set::OpSetMetadata; use crate::query::{Index, QueryResult, TreeQuery}; -use crate::{Op, OpId}; +use crate::types::{Op, OpId}; use std::collections::HashSet; #[allow(dead_code)] @@ -628,12 +628,12 @@ struct CounterData { #[cfg(test)] mod tests { use crate::legacy as amp; - use crate::{Op, OpId}; + use crate::types::{Op, OpId}; use super::*; fn op(n: usize) -> Op { - let zero = OpId(0, 0); + let zero = OpId::new(0, 0); Op { change: n, id: zero, diff --git a/automerge/src/query.rs b/automerge/src/query.rs index 15ac6fd6..662ad8ae 100644 --- a/automerge/src/query.rs +++ b/automerge/src/query.rs @@ -1,5 +1,5 @@ use crate::op_tree::{OpSetMetadata, OpTreeNode}; -use crate::{Clock, ElemId, Op, OpId, OpType, ScalarValue}; +use crate::{Clock, ElemId, Op, ScalarValue, types::{OpId, OpType}}; use fxhash::FxBuildHasher; use std::cmp::Ordering; use std::collections::{HashMap, HashSet}; diff --git a/automerge/src/query/len.rs b/automerge/src/query/len.rs index 494b3515..b73e804a 100644 --- a/automerge/src/query/len.rs +++ b/automerge/src/query/len.rs @@ -1,6 +1,6 @@ use crate::op_tree::OpTreeNode; use crate::query::{QueryResult, TreeQuery}; -use crate::ObjId; +use crate::types::ObjId; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] diff --git a/automerge/src/query/list_vals.rs b/automerge/src/query/list_vals.rs index c19ac4ad..aec8f878 100644 --- a/automerge/src/query/list_vals.rs +++ b/automerge/src/query/list_vals.rs @@ -1,6 +1,6 @@ use crate::op_tree::{OpSetMetadata, OpTreeNode}; use crate::query::{binary_search_by, is_visible, visible_op, QueryResult, TreeQuery}; -use crate::{ElemId, ObjId, Op}; +use crate::{ElemId, types::ObjId, Op}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] @@ -26,7 +26,7 @@ impl TreeQuery for ListVals { child: &OpTreeNode, m: &OpSetMetadata, ) -> QueryResult { - let start = binary_search_by(child, |op| m.lamport_cmp(op.obj.0, self.obj.0)); + let start = binary_search_by(child, |op| m.lamport_cmp(op.obj, self.obj)); let mut counters = Default::default(); for pos in start..child.len() { let op = child.get(pos).unwrap(); diff --git a/automerge/src/query/prop.rs b/automerge/src/query/prop.rs index ac4b2bca..aa125c9d 100644 --- a/automerge/src/query/prop.rs +++ b/automerge/src/query/prop.rs @@ -1,6 +1,6 @@ use crate::op_tree::{OpSetMetadata, OpTreeNode}; use crate::query::{binary_search_by, is_visible, visible_op, QueryResult, TreeQuery}; -use crate::{Key, ObjId, Op}; +use crate::{Key, types::ObjId, Op}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] @@ -31,7 +31,7 @@ impl TreeQuery for Prop { m: &OpSetMetadata, ) -> QueryResult { let start = binary_search_by(child, |op| { - m.lamport_cmp(op.obj.0, self.obj.0) + m.lamport_cmp(op.obj, self.obj) .then_with(|| m.key_cmp(&op.key, &self.key)) }); let mut counters = Default::default(); diff --git a/automerge/src/types.rs b/automerge/src/types.rs index f00beed3..4adcf393 100644 --- a/automerge/src/types.rs +++ b/automerge/src/types.rs @@ -10,7 +10,6 @@ use std::str::FromStr; use tinyvec::{ArrayVec, TinyVec}; pub(crate) const HEAD: ElemId = ElemId(OpId(0, 0)); -pub const ROOT: OpId = OpId(0, 0); const ROOT_STR: &str = "_root"; const HEAD_STR: &str = "_head"; @@ -161,17 +160,17 @@ pub enum OpType { } #[derive(Debug)] -pub enum Export { +pub(crate) enum Export { Id(OpId), Special(String), Prop(usize), } -pub trait Exportable { +pub(crate) trait Exportable { fn export(&self) -> Export; } -pub trait Importable { +pub(crate) trait Importable { fn wrap(id: OpId) -> Self; fn from(s: &str) -> Option where @@ -179,33 +178,32 @@ pub trait Importable { } impl OpId { + pub(crate) fn new(counter: u64, actor: usize) -> OpId { + OpId(counter, actor) + } + #[inline] pub fn counter(&self) -> u64 { self.0 } #[inline] - pub fn actor(&self) -> usize { + pub(crate) fn actor(&self) -> usize { self.1 } } impl Exportable for ObjId { fn export(&self) -> Export { - if self.0 == ROOT { - Export::Special(ROOT_STR.to_owned()) - } else { - Export::Id(self.0) + match self { + ObjId::Root => Export::Special(ROOT_STR.to_owned()), + ObjId::Op(o) => Export::Id(*o) } } } impl Exportable for &ObjId { fn export(&self) -> Export { - if self.0 == ROOT { - Export::Special(ROOT_STR.to_owned()) - } else { - Export::Id(self.0) - } + (*self).export() } } @@ -236,11 +234,11 @@ impl Exportable for Key { impl Importable for ObjId { fn wrap(id: OpId) -> Self { - ObjId(id) + ObjId::Op(id) } fn from(s: &str) -> Option { if s == ROOT_STR { - Some(ROOT.into()) + Some(ObjId::Root) } else { None } @@ -251,12 +249,8 @@ impl Importable for OpId { fn wrap(id: OpId) -> Self { id } - fn from(s: &str) -> Option { - if s == ROOT_STR { - Some(ROOT) - } else { - None - } + fn from(_s: &str) -> Option { + None } } @@ -275,7 +269,10 @@ impl Importable for ElemId { impl From for ObjId { fn from(o: OpId) -> Self { - ObjId(o) + match (o.counter(), o.actor()) { + (0,0) => ObjId::Root, + (_,_) => ObjId::Op(o), + } } } @@ -352,10 +349,19 @@ impl Key { } #[derive(Debug, Clone, PartialOrd, Ord, Eq, PartialEq, Copy, Hash, Default)] -pub struct OpId(pub u64, pub usize); +pub(crate) struct OpId(u64, usize); -#[derive(Debug, Clone, Copy, PartialOrd, Eq, PartialEq, Ord, Hash, Default)] -pub(crate) struct ObjId(pub OpId); +#[derive(Debug, Clone, Copy, PartialOrd, Eq, PartialEq, Ord, Hash)] +pub(crate) enum ObjId{ + Root, + Op(OpId), +} + +impl Default for ObjId { + fn default() -> Self { + Self::Root + } +} #[derive(Debug, Clone, Copy, PartialOrd, Eq, PartialEq, Ord, Hash, Default)] pub(crate) struct ElemId(pub OpId); diff --git a/automerge/src/value.rs b/automerge/src/value.rs index 333c1f53..2859073e 100644 --- a/automerge/src/value.rs +++ b/automerge/src/value.rs @@ -1,4 +1,4 @@ -use crate::{error, ObjType, Op, OpId, OpType}; +use crate::{error, ObjType, Op, types::OpId, OpType}; use serde::{Deserialize, Serialize}; use smol_str::SmolStr; use std::convert::TryFrom; diff --git a/automerge/tests/helpers/mod.rs b/automerge/tests/helpers/mod.rs index d93a211b..40ee7faf 100644 --- a/automerge/tests/helpers/mod.rs +++ b/automerge/tests/helpers/mod.rs @@ -1,3 +1,5 @@ +use automerge::ObjId; + use std::{collections::HashMap, convert::TryInto, hash::Hash}; use serde::ser::{SerializeMap, SerializeSeq}; @@ -84,10 +86,9 @@ pub fn sorted_actors() -> (automerge::ActorId, automerge::ActorId) { #[macro_export] macro_rules! assert_doc { ($doc: expr, $expected: expr) => {{ - use $crate::helpers::{realize, ExportableOpId}; + use $crate::helpers::realize; let realized = realize($doc); - let to_export: RealizedObject> = $expected.into(); - let exported = to_export.export($doc); + let exported: RealizedObject = $expected.into(); if realized != exported { let serde_right = serde_json::to_string_pretty(&realized).unwrap(); let serde_left = serde_json::to_string_pretty(&exported).unwrap(); @@ -105,10 +106,9 @@ macro_rules! assert_doc { #[macro_export] macro_rules! assert_obj { ($doc: expr, $obj_id: expr, $prop: expr, $expected: expr) => {{ - use $crate::helpers::{realize_prop, ExportableOpId}; + use $crate::helpers::realize_prop; let realized = realize_prop($doc, $obj_id, $prop); - let to_export: RealizedObject> = $expected.into(); - let exported = to_export.export($doc); + let exported: RealizedObject = $expected.into(); if realized != exported { let serde_right = serde_json::to_string_pretty(&realized).unwrap(); let serde_left = serde_json::to_string_pretty(&exported).unwrap(); @@ -145,7 +145,7 @@ macro_rules! map { (@inner { $($opid:expr => $value:expr),* }) => { { use std::collections::HashMap; - let mut inner: HashMap, RealizedObject>> = HashMap::new(); + let mut inner: HashMap = HashMap::new(); $( let _ = inner.insert($opid.into(), $value.into()); )* @@ -159,9 +159,8 @@ macro_rules! map { ($($key:expr => $inner:tt),*) => { { use std::collections::HashMap; - use crate::helpers::ExportableOpId; let _cap = map!(@count $($key),*); - let mut _map: HashMap, RealizedObject>>> = ::std::collections::HashMap::with_capacity(_cap); + let mut _map: HashMap> = ::std::collections::HashMap::with_capacity(_cap); $( let inner = map!(@inner $inner); let _ = _map.insert($key.to_string(), inner); @@ -194,7 +193,7 @@ macro_rules! list { (@inner { $($opid:expr => $value:expr),* }) => { { use std::collections::HashMap; - let mut inner: HashMap, RealizedObject>> = HashMap::new(); + let mut inner: HashMap = HashMap::new(); $( let _ = inner.insert($opid.into(), $value.into()); )* @@ -204,9 +203,8 @@ macro_rules! list { ($($inner:tt,)+) => { list!($($inner),+) }; ($($inner:tt),*) => { { - use crate::helpers::ExportableOpId; let _cap = list!(@count $($inner),*); - let mut _list: Vec, RealizedObject>>> = Vec::new(); + let mut _list: Vec> = Vec::new(); $( //println!("{}", stringify!($inner)); let inner = list!(@inner $inner); @@ -217,26 +215,6 @@ macro_rules! list { } } -/// Translate an op ID produced by one document to an op ID which can be understood by -/// another -/// -/// The current API of automerge exposes OpIds of the form (u64, usize) where the first component -/// is the counter of an actors lamport timestamp and the second component is the index into an -/// array of actor IDs stored by the document where the opid was generated. Obviously this is not -/// portable between documents as the index of the actor array is unlikely to match between two -/// documents. This function translates between the two representations. -/// -/// At some point we will probably change the API to not be document specific but this function -/// allows us to write tests first. -pub fn translate_obj_id( - from: &automerge::Automerge, - to: &automerge::Automerge, - id: automerge::OpId, -) -> automerge::OpId { - let exported = from.export(id); - to.import(&exported).unwrap() -} - pub fn mk_counter(value: i64) -> automerge::ScalarValue { automerge::ScalarValue::Counter(value) } @@ -253,13 +231,13 @@ impl std::fmt::Display for ExportedOpId { /// A `RealizedObject` is a representation of all the current values in a document - including /// conflicts. #[derive(PartialEq, Debug)] -pub enum RealizedObject { - Map(HashMap>>), - Sequence(Vec>>), +pub enum RealizedObject { + Map(HashMap>), + Sequence(Vec>), Value(automerge::ScalarValue), } -impl serde::Serialize for RealizedObject { +impl serde::Serialize for RealizedObject { fn serialize(&self, serializer: S) -> Result where S: serde::Serializer, @@ -271,7 +249,7 @@ impl serde::Serialize for RealizedObject { let kvs_serded = kvs .iter() .map(|(opid, value)| (opid.to_string(), value)) - .collect::>>(); + .collect::>(); map_ser.serialize_entry(k, &kvs_serded)?; } map_ser.end() @@ -282,7 +260,7 @@ impl serde::Serialize for RealizedObject { let kvs_serded = elem .iter() .map(|(opid, value)| (opid.to_string(), value)) - .collect::>>(); + .collect::>(); list_ser.serialize_element(&kvs_serded)?; } list_ser.end() @@ -292,40 +270,40 @@ impl serde::Serialize for RealizedObject { } } -pub fn realize(doc: &automerge::Automerge) -> RealizedObject { - realize_obj(doc, automerge::ROOT, automerge::ObjType::Map) +pub fn realize(doc: &automerge::Automerge) -> RealizedObject { + realize_obj(doc, ObjId::Root, automerge::ObjType::Map) } pub fn realize_prop>( doc: &automerge::Automerge, - obj_id: automerge::OpId, + obj_id: automerge::ObjId, prop: P, -) -> RealizedObject { +) -> RealizedObject { let (val, obj_id) = doc.value(obj_id, prop).unwrap().unwrap(); match val { - automerge::Value::Object(obj_type) => realize_obj(doc, obj_id, obj_type), + automerge::Value::Object(obj_type) => realize_obj(doc, obj_id.into(), obj_type), automerge::Value::Scalar(v) => RealizedObject::Value(v), } } pub fn realize_obj( doc: &automerge::Automerge, - obj_id: automerge::OpId, + obj_id: automerge::ObjId, objtype: automerge::ObjType, -) -> RealizedObject { +) -> RealizedObject { match objtype { automerge::ObjType::Map | automerge::ObjType::Table => { let mut result = HashMap::new(); - for key in doc.keys(obj_id) { - result.insert(key.clone(), realize_values(doc, obj_id, key)); + for key in doc.keys(obj_id.clone()) { + result.insert(key.clone(), realize_values(doc, obj_id.clone(), key)); } RealizedObject::Map(result) } automerge::ObjType::List | automerge::ObjType::Text => { - let length = doc.length(obj_id); + let length = doc.length(obj_id.clone()); let mut result = Vec::with_capacity(length); for i in 0..length { - result.push(realize_values(doc, obj_id, i)); + result.push(realize_values(doc, obj_id.clone(), i)); } RealizedObject::Sequence(result) } @@ -334,55 +312,25 @@ pub fn realize_obj( fn realize_values>( doc: &automerge::Automerge, - obj_id: automerge::OpId, + obj_id: automerge::ObjId, key: K, -) -> HashMap> { - let mut values_by_opid = HashMap::new(); +) -> HashMap { + let mut values_by_objid: HashMap = HashMap::new(); for (value, opid) in doc.values(obj_id, key).unwrap() { let realized = match value { - automerge::Value::Object(objtype) => realize_obj(doc, opid, objtype), + automerge::Value::Object(objtype) => realize_obj(doc, opid.clone().into(), objtype), automerge::Value::Scalar(v) => RealizedObject::Value(v), }; - let exported_opid = ExportedOpId(doc.export(opid)); - values_by_opid.insert(exported_opid, realized); + values_by_objid.insert(opid.into(), realized); } - values_by_opid + values_by_objid } -impl<'a> RealizedObject> { - pub fn export(self, doc: &automerge::Automerge) -> RealizedObject { - match self { - Self::Map(kvs) => RealizedObject::Map( - kvs.into_iter() - .map(|(k, v)| { - ( - k, - v.into_iter() - .map(|(k, v)| (k.export(doc), v.export(doc))) - .collect(), - ) - }) - .collect(), - ), - Self::Sequence(values) => RealizedObject::Sequence( - values - .into_iter() - .map(|v| { - v.into_iter() - .map(|(k, v)| (k.export(doc), v.export(doc))) - .collect() - }) - .collect(), - ), - Self::Value(v) => RealizedObject::Value(v), - } - } -} -impl<'a, O: Into>, I: Into>>> - From>> for RealizedObject> +impl> + From>> for RealizedObject { - fn from(values: HashMap<&str, HashMap>) -> Self { + fn from(values: HashMap<&str, HashMap>) -> Self { let intoed = values .into_iter() .map(|(k, v)| { @@ -396,10 +344,10 @@ impl<'a, O: Into>, I: Into> } } -impl<'a, O: Into>, I: Into>>> - From>> for RealizedObject> +impl> + From>> for RealizedObject { - fn from(values: Vec>) -> Self { + fn from(values: Vec>) -> Self { RealizedObject::Sequence( values .into_iter() @@ -409,94 +357,31 @@ impl<'a, O: Into>, I: Into> } } -impl From for RealizedObject> { +impl From for RealizedObject { fn from(b: bool) -> Self { RealizedObject::Value(b.into()) } } -impl From for RealizedObject> { +impl From for RealizedObject { fn from(u: usize) -> Self { let v = u.try_into().unwrap(); RealizedObject::Value(automerge::ScalarValue::Int(v)) } } -impl From for RealizedObject> { +impl From for RealizedObject { fn from(s: automerge::ScalarValue) -> Self { RealizedObject::Value(s) } } -impl From<&str> for RealizedObject> { +impl From<&str> for RealizedObject { fn from(s: &str) -> Self { RealizedObject::Value(automerge::ScalarValue::Str(s.into())) } } -#[derive(Eq, PartialEq, Hash)] -pub enum ExportableOpId<'a> { - Native(automerge::OpId), - Translate(Translate<'a>), -} - -impl<'a> ExportableOpId<'a> { - fn export(self, doc: &automerge::Automerge) -> ExportedOpId { - let oid = match self { - Self::Native(oid) => oid, - Self::Translate(Translate { from, opid }) => translate_obj_id(from, doc, opid), - }; - ExportedOpId(doc.export(oid)) - } -} - -pub struct Translate<'a> { - from: &'a automerge::Automerge, - opid: automerge::OpId, -} - -impl<'a> PartialEq for Translate<'a> { - fn eq(&self, other: &Self) -> bool { - self.from.maybe_get_actor().unwrap() == other.from.maybe_get_actor().unwrap() - && self.opid == other.opid - } -} - -impl<'a> Eq for Translate<'a> {} - -impl<'a> Hash for Translate<'a> { - fn hash(&self, state: &mut H) { - self.from.maybe_get_actor().unwrap().hash(state); - self.opid.hash(state); - } -} - -pub trait OpIdExt { - fn native(self) -> ExportableOpId<'static>; - fn translate(self, doc: &automerge::Automerge) -> ExportableOpId<'_>; -} - -impl OpIdExt for automerge::OpId { - /// Use this opid directly when exporting - fn native(self) -> ExportableOpId<'static> { - ExportableOpId::Native(self) - } - - /// Translate this OpID from `doc` when exporting - fn translate(self, doc: &automerge::Automerge) -> ExportableOpId<'_> { - ExportableOpId::Translate(Translate { - from: doc, - opid: self, - }) - } -} - -impl From for ExportableOpId<'_> { - fn from(oid: automerge::OpId) -> Self { - ExportableOpId::Native(oid) - } -} - /// Pretty print the contents of a document #[allow(dead_code)] pub fn pretty_print(doc: &automerge::Automerge) { diff --git a/automerge/tests/test.rs b/automerge/tests/test.rs index 8dcc51df..9bf8a0ea 100644 --- a/automerge/tests/test.rs +++ b/automerge/tests/test.rs @@ -1,16 +1,16 @@ -use automerge::Automerge; +use automerge::{Automerge, ObjId}; mod helpers; #[allow(unused_imports)] use helpers::{ mk_counter, new_doc, new_doc_with_actor, pretty_print, realize, realize_obj, sorted_actors, - translate_obj_id, OpIdExt, RealizedObject, + RealizedObject, }; #[test] fn no_conflict_on_repeated_assignment() { let mut doc = Automerge::new(); - doc.set(automerge::ROOT, "foo", 1).unwrap(); - let op = doc.set(automerge::ROOT, "foo", 2).unwrap().unwrap(); + doc.set(ObjId::Root, "foo", 1).unwrap(); + let op = doc.set(ObjId::Root, "foo", 2).unwrap().unwrap(); assert_doc!( &doc, map! { @@ -22,45 +22,45 @@ fn no_conflict_on_repeated_assignment() { #[test] fn no_change_on_repeated_map_set() { let mut doc = new_doc(); - doc.set(automerge::ROOT, "foo", 1).unwrap(); - assert!(doc.set(automerge::ROOT, "foo", 1).unwrap().is_none()); + doc.set(ObjId::Root, "foo", 1).unwrap(); + assert!(doc.set(ObjId::Root, "foo", 1).unwrap().is_none()); } #[test] fn no_change_on_repeated_list_set() { let mut doc = new_doc(); - let list_id = doc - .set(automerge::ROOT, "list", automerge::Value::list()) + let list_id: ObjId = doc + .set(ObjId::Root, "list", automerge::Value::list()) .unwrap() - .unwrap(); - doc.insert(list_id, 0, 1).unwrap(); - doc.set(list_id, 0, 1).unwrap(); + .unwrap().into(); + doc.insert(list_id.clone(), 0, 1).unwrap(); + doc.set(list_id.clone(), 0, 1).unwrap(); assert!(doc.set(list_id, 0, 1).unwrap().is_none()); } #[test] fn no_change_on_list_insert_followed_by_set_of_same_value() { let mut doc = new_doc(); - let list_id = doc - .set(automerge::ROOT, "list", automerge::Value::list()) + let list_id: ObjId = doc + .set(ObjId::Root, "list", automerge::Value::list()) .unwrap() - .unwrap(); - doc.insert(list_id, 0, 1).unwrap(); - assert!(doc.set(list_id, 0, 1).unwrap().is_none()); + .unwrap().into(); + doc.insert(list_id.clone(), 0, 1).unwrap(); + assert!(doc.set(list_id.clone(), 0, 1).unwrap().is_none()); } #[test] fn repeated_map_assignment_which_resolves_conflict_not_ignored() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); - doc1.set(automerge::ROOT, "field", 123).unwrap(); + doc1.set(ObjId::Root, "field", 123).unwrap(); doc2.merge(&mut doc1); - doc2.set(automerge::ROOT, "field", 456).unwrap(); - doc1.set(automerge::ROOT, "field", 789).unwrap(); + doc2.set(ObjId::Root, "field", 456).unwrap(); + doc1.set(ObjId::Root, "field", 789).unwrap(); doc1.merge(&mut doc2); - assert_eq!(doc1.values(automerge::ROOT, "field").unwrap().len(), 2); + assert_eq!(doc1.values(ObjId::Root, "field").unwrap().len(), 2); - let op = doc1.set(automerge::ROOT, "field", 123).unwrap().unwrap(); + let op = doc1.set(ObjId::Root, "field", 123).unwrap().unwrap(); assert_doc!( &doc1, map! { @@ -75,16 +75,15 @@ fn repeated_map_assignment_which_resolves_conflict_not_ignored() { fn repeated_list_assignment_which_resolves_conflict_not_ignored() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); - let list_id = doc1 - .set(automerge::ROOT, "list", automerge::Value::list()) + let list_id: ObjId = doc1 + .set(ObjId::Root, "list", automerge::Value::list()) .unwrap() - .unwrap(); - doc1.insert(list_id, 0, 123).unwrap(); + .unwrap().into(); + doc1.insert(list_id.clone(), 0, 123).unwrap(); doc2.merge(&mut doc1); - let list_id_in_doc2 = translate_obj_id(&doc1, &doc2, list_id); - doc2.set(list_id_in_doc2, 0, 456).unwrap().unwrap(); + doc2.set(list_id.clone(), 0, 456).unwrap().unwrap(); doc1.merge(&mut doc2); - let doc1_op = doc1.set(list_id, 0, 789).unwrap().unwrap(); + let doc1_op = doc1.set(list_id.clone(), 0, 789).unwrap().unwrap(); assert_doc!( &doc1, @@ -101,14 +100,14 @@ fn repeated_list_assignment_which_resolves_conflict_not_ignored() { #[test] fn list_deletion() { let mut doc = new_doc(); - let list_id = doc - .set(automerge::ROOT, "list", automerge::Value::list()) + let list_id: ObjId = doc + .set(ObjId::Root, "list", automerge::Value::list()) .unwrap() - .unwrap(); - let op1 = doc.insert(list_id, 0, 123).unwrap(); - doc.insert(list_id, 1, 456).unwrap(); - let op3 = doc.insert(list_id, 2, 789).unwrap(); - doc.del(list_id, 1).unwrap(); + .unwrap().into(); + let op1 = doc.insert(list_id.clone(), 0, 123).unwrap(); + doc.insert(list_id.clone(), 1, 456).unwrap(); + let op3 = doc.insert(list_id.clone(), 2, 789).unwrap(); + doc.del(list_id.clone(), 1).unwrap(); assert_doc!( &doc, map! { @@ -124,28 +123,28 @@ fn list_deletion() { fn merge_concurrent_map_prop_updates() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); - let op1 = doc1.set(automerge::ROOT, "foo", "bar").unwrap().unwrap(); - let hello = doc2 - .set(automerge::ROOT, "hello", "world") + let op1 = doc1.set(ObjId::Root, "foo", "bar").unwrap().unwrap(); + let hello: ObjId = doc2 + .set(ObjId::Root, "hello", "world") .unwrap() - .unwrap(); + .unwrap().into(); doc1.merge(&mut doc2); assert_eq!( - doc1.value(automerge::ROOT, "foo").unwrap().unwrap().0, + doc1.value(ObjId::Root, "foo").unwrap().unwrap().0, "bar".into() ); assert_doc!( &doc1, map! { - "foo" => { op1 => "bar" }, - "hello" => { hello.translate(&doc2) => "world" }, + "foo" => { op1.clone() => "bar" }, + "hello" => { hello.clone() => "world" }, } ); doc2.merge(&mut doc1); assert_doc!( &doc2, map! { - "foo" => { op1.translate(&doc1) => "bar" }, + "foo" => { op1 => "bar" }, "hello" => { hello => "world" }, } ); @@ -157,12 +156,12 @@ fn add_concurrent_increments_of_same_property() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); let counter_id = doc1 - .set(automerge::ROOT, "counter", mk_counter(0)) + .set(ObjId::Root, "counter", mk_counter(0)) .unwrap() .unwrap(); doc2.merge(&mut doc1); - doc1.inc(automerge::ROOT, "counter", 1).unwrap(); - doc2.inc(automerge::ROOT, "counter", 2).unwrap(); + doc1.inc(ObjId::Root, "counter", 1).unwrap(); + doc2.inc(ObjId::Root, "counter", 2).unwrap(); doc1.merge(&mut doc2); assert_doc!( &doc1, @@ -181,17 +180,17 @@ fn add_increments_only_to_preceeded_values() { // create a counter in doc1 let doc1_counter_id = doc1 - .set(automerge::ROOT, "counter", mk_counter(0)) + .set(ObjId::Root, "counter", mk_counter(0)) .unwrap() .unwrap(); - doc1.inc(automerge::ROOT, "counter", 1).unwrap(); + doc1.inc(ObjId::Root, "counter", 1).unwrap(); // create a counter in doc2 let doc2_counter_id = doc2 - .set(automerge::ROOT, "counter", mk_counter(0)) + .set(ObjId::Root, "counter", mk_counter(0)) .unwrap() .unwrap(); - doc2.inc(automerge::ROOT, "counter", 3).unwrap(); + doc2.inc(ObjId::Root, "counter", 3).unwrap(); // The two values should be conflicting rather than added doc1.merge(&mut doc2); @@ -200,8 +199,8 @@ fn add_increments_only_to_preceeded_values() { &doc1, map! { "counter" => { - doc1_counter_id.native() => mk_counter(1), - doc2_counter_id.translate(&doc2) => mk_counter(3), + doc1_counter_id => mk_counter(1), + doc2_counter_id => mk_counter(3), } } ); @@ -211,8 +210,8 @@ fn add_increments_only_to_preceeded_values() { fn concurrent_updates_of_same_field() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); - let set_one_opid = doc1.set(automerge::ROOT, "field", "one").unwrap().unwrap(); - let set_two_opid = doc2.set(automerge::ROOT, "field", "two").unwrap().unwrap(); + let set_one_opid = doc1.set(ObjId::Root, "field", "one").unwrap().unwrap(); + let set_two_opid = doc2.set(ObjId::Root, "field", "two").unwrap().unwrap(); doc1.merge(&mut doc2); @@ -220,8 +219,8 @@ fn concurrent_updates_of_same_field() { &doc1, map! { "field" => { - set_one_opid.native() => "one", - set_two_opid.translate(&doc2) => "two", + set_one_opid => "one", + set_two_opid => "two", } } ); @@ -231,15 +230,14 @@ fn concurrent_updates_of_same_field() { fn concurrent_updates_of_same_list_element() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); - let list_id = doc1 - .set(automerge::ROOT, "birds", automerge::Value::list()) + let list_id: ObjId = doc1 + .set(ObjId::Root, "birds", automerge::Value::list()) .unwrap() - .unwrap(); - doc1.insert(list_id, 0, "finch").unwrap(); + .unwrap().into(); + doc1.insert(list_id.clone(), 0, "finch").unwrap(); doc2.merge(&mut doc1); - let set_one_op = doc1.set(list_id, 0, "greenfinch").unwrap().unwrap(); - let list_id_in_doc2 = translate_obj_id(&doc1, &doc2, list_id); - let set_op_two = doc2.set(list_id_in_doc2, 0, "goldfinch").unwrap().unwrap(); + let set_one_op = doc1.set(list_id.clone(), 0, "greenfinch").unwrap().unwrap(); + let set_op_two = doc2.set(list_id.clone(), 0, "goldfinch").unwrap().unwrap(); doc1.merge(&mut doc2); @@ -248,8 +246,8 @@ fn concurrent_updates_of_same_list_element() { map! { "birds" => { list_id => list![{ - set_one_op.native() => "greenfinch", - set_op_two.translate(&doc2) => "goldfinch", + set_one_op => "greenfinch", + set_op_two => "goldfinch", }] } } @@ -262,15 +260,15 @@ fn assignment_conflicts_of_different_types() { let mut doc2 = new_doc(); let mut doc3 = new_doc(); let op_one = doc1 - .set(automerge::ROOT, "field", "string") + .set(ObjId::Root, "field", "string") .unwrap() .unwrap(); let op_two = doc2 - .set(automerge::ROOT, "field", automerge::Value::list()) + .set(ObjId::Root, "field", automerge::Value::list()) .unwrap() .unwrap(); let op_three = doc3 - .set(automerge::ROOT, "field", automerge::Value::map()) + .set(ObjId::Root, "field", automerge::Value::map()) .unwrap() .unwrap(); @@ -281,9 +279,9 @@ fn assignment_conflicts_of_different_types() { &doc1, map! { "field" => { - op_one.native() => "string", - op_two.translate(&doc2) => list!{}, - op_three.translate(&doc3) => map!{}, + op_one => "string", + op_two => list!{}, + op_three => map!{}, } } ); @@ -294,24 +292,24 @@ fn changes_within_conflicting_map_field() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); let op_one = doc1 - .set(automerge::ROOT, "field", "string") + .set(ObjId::Root, "field", "string") .unwrap() .unwrap(); - let map_id = doc2 - .set(automerge::ROOT, "field", automerge::Value::map()) + let map_id: ObjId = doc2 + .set(ObjId::Root, "field", automerge::Value::map()) .unwrap() - .unwrap(); - let set_in_doc2 = doc2.set(map_id, "innerKey", 42).unwrap().unwrap(); + .unwrap().into(); + let set_in_doc2 = doc2.set(map_id.clone(), "innerKey", 42).unwrap().unwrap(); doc1.merge(&mut doc2); assert_doc!( &doc1, map! { "field" => { - op_one.native() => "string", - map_id.translate(&doc2) => map!{ + op_one => "string", + map_id => map!{ "innerKey" => { - set_in_doc2.translate(&doc2) => 42, + set_in_doc2 => 42, } } } @@ -325,27 +323,26 @@ fn changes_within_conflicting_list_element() { let mut doc1 = new_doc_with_actor(actor1); let mut doc2 = new_doc_with_actor(actor2); let list_id = doc1 - .set(automerge::ROOT, "list", automerge::Value::list()) + .set(ObjId::Root, "list", automerge::Value::list()) .unwrap() .unwrap(); - doc1.insert(list_id, 0, "hello").unwrap(); + doc1.insert(list_id.clone(), 0, "hello").unwrap(); doc2.merge(&mut doc1); let map_in_doc1 = doc1 - .set(list_id, 0, automerge::Value::map()) + .set(list_id.clone(), 0, automerge::Value::map()) .unwrap() .unwrap(); - let set_map1 = doc1.set(map_in_doc1, "map1", true).unwrap().unwrap(); - let set_key1 = doc1.set(map_in_doc1, "key", 1).unwrap().unwrap(); + let set_map1 = doc1.set(map_in_doc1.clone(), "map1", true).unwrap().unwrap(); + let set_key1 = doc1.set(map_in_doc1.clone(), "key", 1).unwrap().unwrap(); - let list_id_in_doc2 = translate_obj_id(&doc1, &doc2, list_id); let map_in_doc2 = doc2 - .set(list_id_in_doc2, 0, automerge::Value::map()) + .set(list_id.clone(), 0, automerge::Value::map()) .unwrap() .unwrap(); doc1.merge(&mut doc2); - let set_map2 = doc2.set(map_in_doc2, "map2", true).unwrap().unwrap(); - let set_key2 = doc2.set(map_in_doc2, "key", 2).unwrap().unwrap(); + let set_map2 = doc2.set(map_in_doc2.clone(), "map2", true).unwrap().unwrap(); + let set_key2 = doc2.set(map_in_doc2.clone(), "key", 2).unwrap().unwrap(); doc1.merge(&mut doc2); @@ -355,13 +352,13 @@ fn changes_within_conflicting_list_element() { "list" => { list_id => list![ { - map_in_doc2.translate(&doc2) => map!{ - "map2" => { set_map2.translate(&doc2) => true }, - "key" => { set_key2.translate(&doc2) => 2 }, + map_in_doc2 => map!{ + "map2" => { set_map2 => true }, + "key" => { set_key2 => 2 }, }, - map_in_doc1.native() => map!{ - "key" => { set_key1.native() => 1 }, - "map1" => { set_map1.native() => true }, + map_in_doc1 => map!{ + "key" => { set_key1 => 1 }, + "map1" => { set_map1 => true }, } } ] @@ -376,20 +373,20 @@ fn concurrently_assigned_nested_maps_should_not_merge() { let mut doc2 = new_doc(); let doc1_map_id = doc1 - .set(automerge::ROOT, "config", automerge::Value::map()) + .set(ObjId::Root, "config", automerge::Value::map()) .unwrap() .unwrap(); let doc1_field = doc1 - .set(doc1_map_id, "background", "blue") + .set(doc1_map_id.clone(), "background", "blue") .unwrap() .unwrap(); let doc2_map_id = doc2 - .set(automerge::ROOT, "config", automerge::Value::map()) + .set(ObjId::Root, "config", automerge::Value::map()) .unwrap() .unwrap(); let doc2_field = doc2 - .set(doc2_map_id, "logo_url", "logo.png") + .set(doc2_map_id.clone(), "logo_url", "logo.png") .unwrap() .unwrap(); @@ -399,11 +396,11 @@ fn concurrently_assigned_nested_maps_should_not_merge() { &doc1, map! { "config" => { - doc1_map_id.native() => map!{ - "background" => {doc1_field.native() => "blue"} + doc1_map_id => map!{ + "background" => {doc1_field => "blue"} }, - doc2_map_id.translate(&doc2) => map!{ - "logo_url" => {doc2_field.translate(&doc2) => "logo.png"} + doc2_map_id => map!{ + "logo_url" => {doc2_field => "logo.png"} } } } @@ -418,16 +415,15 @@ fn concurrent_insertions_at_different_list_positions() { assert!(doc1.maybe_get_actor().unwrap() < doc2.maybe_get_actor().unwrap()); let list_id = doc1 - .set(automerge::ROOT, "list", automerge::Value::list()) + .set(ObjId::Root, "list", automerge::Value::list()) .unwrap() .unwrap(); - let one = doc1.insert(list_id, 0, "one").unwrap(); - let three = doc1.insert(list_id, 1, "three").unwrap(); + let one = doc1.insert(list_id.clone(), 0, "one").unwrap(); + let three = doc1.insert(list_id.clone(), 1, "three").unwrap(); doc2.merge(&mut doc1); - let two = doc1.splice(list_id, 1, 0, vec!["two".into()]).unwrap()[0]; - let list_id_in_doc2 = translate_obj_id(&doc1, &doc2, list_id); - let four = doc2.insert(list_id_in_doc2, 2, "four").unwrap(); + let two = doc1.splice(list_id.clone(), 1, 0, vec!["two".into()]).unwrap()[0].clone(); + let four = doc2.insert(list_id.clone(), 2, "four").unwrap(); doc1.merge(&mut doc2); @@ -436,10 +432,10 @@ fn concurrent_insertions_at_different_list_positions() { map! { "list" => { list_id => list![ - {one.native() => "one"}, - {two.native() => "two"}, - {three.native() => "three"}, - {four.translate(&doc2) => "four"}, + {one => "one"}, + {two => "two"}, + {three => "three"}, + {four => "four"}, ] } } @@ -454,15 +450,14 @@ fn concurrent_insertions_at_same_list_position() { assert!(doc1.maybe_get_actor().unwrap() < doc2.maybe_get_actor().unwrap()); let list_id = doc1 - .set(automerge::ROOT, "birds", automerge::Value::list()) + .set(ObjId::Root, "birds", automerge::Value::list()) .unwrap() .unwrap(); - let parakeet = doc1.insert(list_id, 0, "parakeet").unwrap(); + let parakeet = doc1.insert(list_id.clone(), 0, "parakeet").unwrap(); doc2.merge(&mut doc1); - let list_id_in_doc2 = translate_obj_id(&doc1, &doc2, list_id); - let starling = doc1.insert(list_id, 1, "starling").unwrap(); - let chaffinch = doc2.insert(list_id_in_doc2, 1, "chaffinch").unwrap(); + let starling = doc1.insert(list_id.clone(), 1, "starling").unwrap(); + let chaffinch = doc2.insert(list_id.clone(), 1, "chaffinch").unwrap(); doc1.merge(&mut doc2); assert_doc!( @@ -471,13 +466,13 @@ fn concurrent_insertions_at_same_list_position() { "birds" => { list_id => list![ { - parakeet.native() => "parakeet", + parakeet => "parakeet", }, { - starling.native() => "starling", + starling => "starling", }, { - chaffinch.translate(&doc2) => "chaffinch", + chaffinch => "chaffinch", }, ] }, @@ -489,11 +484,11 @@ fn concurrent_insertions_at_same_list_position() { fn concurrent_assignment_and_deletion_of_a_map_entry() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); - doc1.set(automerge::ROOT, "bestBird", "robin").unwrap(); + doc1.set(ObjId::Root, "bestBird", "robin").unwrap(); doc2.merge(&mut doc1); - doc1.del(automerge::ROOT, "bestBird").unwrap(); + doc1.del(ObjId::Root, "bestBird").unwrap(); let set_two = doc2 - .set(automerge::ROOT, "bestBird", "magpie") + .set(ObjId::Root, "bestBird", "magpie") .unwrap() .unwrap(); @@ -503,7 +498,7 @@ fn concurrent_assignment_and_deletion_of_a_map_entry() { &doc1, map! { "bestBird" => { - set_two.translate(&doc2) => "magpie", + set_two => "magpie", } } ); @@ -514,25 +509,24 @@ fn concurrent_assignment_and_deletion_of_list_entry() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); let list_id = doc1 - .set(automerge::ROOT, "birds", automerge::Value::list()) + .set(ObjId::Root, "birds", automerge::Value::list()) .unwrap() .unwrap(); - let blackbird = doc1.insert(list_id, 0, "blackbird").unwrap(); - doc1.insert(list_id, 1, "thrush").unwrap(); - let goldfinch = doc1.insert(list_id, 2, "goldfinch").unwrap(); + let blackbird = doc1.insert(list_id.clone(), 0, "blackbird").unwrap(); + doc1.insert(list_id.clone(), 1, "thrush").unwrap(); + let goldfinch = doc1.insert(list_id.clone(), 2, "goldfinch").unwrap(); doc2.merge(&mut doc1); - let starling = doc1.set(list_id, 1, "starling").unwrap().unwrap(); + let starling = doc1.set(list_id.clone(), 1, "starling").unwrap().unwrap(); - let list_id_in_doc2 = translate_obj_id(&doc1, &doc2, list_id); - doc2.del(list_id_in_doc2, 1).unwrap(); + doc2.del(list_id.clone(), 1).unwrap(); assert_doc!( &doc2, map! { - "birds" => {list_id.translate(&doc1) => list![ - { blackbird.translate(&doc1) => "blackbird"}, - { goldfinch.translate(&doc1) => "goldfinch"}, + "birds" => {list_id.clone() => list![ + { blackbird.clone() => "blackbird"}, + { goldfinch.clone() => "goldfinch"}, ]} } ); @@ -540,10 +534,10 @@ fn concurrent_assignment_and_deletion_of_list_entry() { assert_doc!( &doc1, map! { - "birds" => {list_id => list![ - { blackbird => "blackbird" }, - { starling => "starling" }, - { goldfinch => "goldfinch" }, + "birds" => {list_id.clone() => list![ + { blackbird.clone() => "blackbird" }, + { starling.clone() => "starling" }, + { goldfinch.clone() => "goldfinch" }, ]} } ); @@ -567,31 +561,30 @@ fn insertion_after_a_deleted_list_element() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); let list_id = doc1 - .set(automerge::ROOT, "birds", automerge::Value::list()) + .set(ObjId::Root, "birds", automerge::Value::list()) .unwrap() .unwrap(); - let blackbird = doc1.insert(list_id, 0, "blackbird").unwrap(); - doc1.insert(list_id, 1, "thrush").unwrap(); - doc1.insert(list_id, 2, "goldfinch").unwrap(); + let blackbird = doc1.insert(list_id.clone(), 0, "blackbird").unwrap(); + doc1.insert(list_id.clone(), 1, "thrush").unwrap(); + doc1.insert(list_id.clone(), 2, "goldfinch").unwrap(); doc2.merge(&mut doc1); - doc1.splice(list_id, 1, 2, Vec::new()).unwrap(); + doc1.splice(list_id.clone(), 1, 2, Vec::new()).unwrap(); - let list_id_in_doc2 = translate_obj_id(&doc1, &doc2, list_id); let starling = doc2 - .splice(list_id_in_doc2, 2, 0, vec!["starling".into()]) - .unwrap()[0]; + .splice(list_id.clone(), 2, 0, vec!["starling".into()]) + .unwrap()[0].clone(); doc1.merge(&mut doc2); assert_doc!( &doc1, map! { - "birds" => {list_id => list![ - { blackbird.native() => "blackbird" }, - { starling.translate(&doc2) => "starling" } + "birds" => {list_id.clone() => list![ + { blackbird.clone() => "blackbird" }, + { starling.clone() => "starling" } ]} } ); @@ -600,9 +593,9 @@ fn insertion_after_a_deleted_list_element() { assert_doc!( &doc2, map! { - "birds" => {list_id.translate(&doc1) => list![ - { blackbird.translate(&doc1) => "blackbird" }, - { starling.native() => "starling" } + "birds" => {list_id => list![ + { blackbird => "blackbird" }, + { starling => "starling" } ]} } ); @@ -613,29 +606,28 @@ fn concurrent_deletion_of_same_list_element() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); let list_id = doc1 - .set(automerge::ROOT, "birds", automerge::Value::list()) + .set(ObjId::Root, "birds", automerge::Value::list()) .unwrap() .unwrap(); - let albatross = doc1.insert(list_id, 0, "albatross").unwrap(); - doc1.insert(list_id, 1, "buzzard").unwrap(); - let cormorant = doc1.insert(list_id, 2, "cormorant").unwrap(); + let albatross = doc1.insert(list_id.clone(), 0, "albatross").unwrap(); + doc1.insert(list_id.clone(), 1, "buzzard").unwrap(); + let cormorant = doc1.insert(list_id.clone(), 2, "cormorant").unwrap(); doc2.merge(&mut doc1); - doc1.del(list_id, 1).unwrap(); + doc1.del(list_id.clone(), 1).unwrap(); - let list_id_in_doc2 = translate_obj_id(&doc1, &doc2, list_id); - doc2.del(list_id_in_doc2, 1).unwrap(); + doc2.del(list_id.clone(), 1).unwrap(); doc1.merge(&mut doc2); assert_doc!( &doc1, map! { - "birds" => {list_id => list![ - { albatross => "albatross" }, - { cormorant => "cormorant" } + "birds" => {list_id.clone() => list![ + { albatross.clone() => "albatross" }, + { cormorant.clone() => "cormorant" } ]} } ); @@ -644,9 +636,9 @@ fn concurrent_deletion_of_same_list_element() { assert_doc!( &doc2, map! { - "birds" => {list_id.translate(&doc1) => list![ - { albatross.translate(&doc1) => "albatross" }, - { cormorant.translate(&doc1) => "cormorant" } + "birds" => {list_id => list![ + { albatross => "albatross" }, + { cormorant => "cormorant" } ]} } ); @@ -658,48 +650,47 @@ fn concurrent_updates_at_different_levels() { let mut doc2 = new_doc(); let animals = doc1 - .set(automerge::ROOT, "animals", automerge::Value::map()) + .set(ObjId::Root, "animals", automerge::Value::map()) .unwrap() .unwrap(); let birds = doc1 - .set(animals, "birds", automerge::Value::map()) + .set(animals.clone(), "birds", automerge::Value::map()) .unwrap() .unwrap(); - doc1.set(birds, "pink", "flamingo").unwrap().unwrap(); - doc1.set(birds, "black", "starling").unwrap().unwrap(); + doc1.set(birds.clone(), "pink", "flamingo").unwrap().unwrap(); + doc1.set(birds.clone(), "black", "starling").unwrap().unwrap(); let mammals = doc1 - .set(animals, "mammals", automerge::Value::list()) + .set(animals.clone(), "mammals", automerge::Value::list()) .unwrap() .unwrap(); - let badger = doc1.insert(mammals, 0, "badger").unwrap(); + let badger = doc1.insert(mammals.clone(), 0, "badger").unwrap(); doc2.merge(&mut doc1); doc1.set(birds, "brown", "sparrow").unwrap().unwrap(); - let animals_in_doc2 = translate_obj_id(&doc1, &doc2, animals); - doc2.del(animals_in_doc2, "birds").unwrap(); + doc2.del(animals, "birds").unwrap(); doc1.merge(&mut doc2); assert_obj!( &doc1, - automerge::ROOT, + ObjId::Root, "animals", map! { "mammals" => { - mammals => list![{ badger => "badger" }], + mammals.clone() => list![{ badger.clone() => "badger" }], } } ); assert_obj!( &doc2, - automerge::ROOT, + ObjId::Root, "animals", map! { "mammals" => { - mammals.translate(&doc1) => list![{ badger.translate(&doc1) => "badger" }], + mammals => list![{ badger => "badger" }], } } ); @@ -711,21 +702,20 @@ fn concurrent_updates_of_concurrently_deleted_objects() { let mut doc2 = new_doc(); let birds = doc1 - .set(automerge::ROOT, "birds", automerge::Value::map()) + .set(ObjId::Root, "birds", automerge::Value::map()) .unwrap() .unwrap(); let blackbird = doc1 - .set(birds, "blackbird", automerge::Value::map()) + .set(birds.clone(), "blackbird", automerge::Value::map()) .unwrap() .unwrap(); - doc1.set(blackbird, "feathers", "black").unwrap().unwrap(); + doc1.set(blackbird.clone(), "feathers", "black").unwrap().unwrap(); doc2.merge(&mut doc1); - doc1.del(birds, "blackbird").unwrap(); + doc1.del(birds.clone(), "blackbird").unwrap(); - translate_obj_id(&doc1, &doc2, blackbird); - doc2.set(blackbird, "beak", "orange").unwrap(); + doc2.set(blackbird.clone(), "beak", "orange").unwrap(); doc1.merge(&mut doc2); @@ -746,14 +736,14 @@ fn does_not_interleave_sequence_insertions_at_same_position() { let mut doc2 = new_doc_with_actor(actor2); let wisdom = doc1 - .set(automerge::ROOT, "wisdom", automerge::Value::list()) + .set(ObjId::Root, "wisdom", automerge::Value::list()) .unwrap() .unwrap(); doc2.merge(&mut doc1); let doc1elems = doc1 .splice( - wisdom, + wisdom.clone(), 0, 0, vec![ @@ -766,10 +756,9 @@ fn does_not_interleave_sequence_insertions_at_same_position() { ) .unwrap(); - let wisdom_in_doc2 = translate_obj_id(&doc1, &doc2, wisdom); let doc2elems = doc2 .splice( - wisdom_in_doc2, + wisdom.clone(), 0, 0, vec![ @@ -788,16 +777,16 @@ fn does_not_interleave_sequence_insertions_at_same_position() { &doc1, map! { "wisdom" => {wisdom => list![ - {doc1elems[0].native() => "to"}, - {doc1elems[1].native() => "be"}, - {doc1elems[2].native() => "is"}, - {doc1elems[3].native() => "to"}, - {doc1elems[4].native() => "do"}, - {doc2elems[0].translate(&doc2) => "to"}, - {doc2elems[1].translate(&doc2) => "do"}, - {doc2elems[2].translate(&doc2) => "is"}, - {doc2elems[3].translate(&doc2) => "to"}, - {doc2elems[4].translate(&doc2) => "be"}, + {doc1elems[0].clone() => "to"}, + {doc1elems[1].clone() => "be"}, + {doc1elems[2].clone() => "is"}, + {doc1elems[3].clone() => "to"}, + {doc1elems[4].clone() => "do"}, + {doc2elems[0].clone() => "to"}, + {doc2elems[1].clone() => "do"}, + {doc2elems[2].clone() => "is"}, + {doc2elems[3].clone() => "to"}, + {doc2elems[4].clone() => "be"}, ]} } ); @@ -811,20 +800,19 @@ fn mutliple_insertions_at_same_list_position_with_insertion_by_greater_actor_id( let mut doc2 = new_doc_with_actor(actor2); let list = doc1 - .set(automerge::ROOT, "list", automerge::Value::list()) + .set(ObjId::Root, "list", automerge::Value::list()) .unwrap() .unwrap(); - let two = doc1.insert(list, 0, "two").unwrap(); + let two = doc1.insert(list.clone(), 0, "two").unwrap(); doc2.merge(&mut doc1); - let list_in_doc2 = translate_obj_id(&doc1, &doc2, list); - let one = doc2.insert(list_in_doc2, 0, "one").unwrap(); + let one = doc2.insert(list.clone(), 0, "one").unwrap(); assert_doc!( &doc2, map! { - "list" => { list.translate(&doc1) => list![ - { one.native() => "one" }, - { two.translate(&doc1) => "two" }, + "list" => { list => list![ + { one => "one" }, + { two => "two" }, ]} } ); @@ -838,20 +826,19 @@ fn mutliple_insertions_at_same_list_position_with_insertion_by_lesser_actor_id() let mut doc2 = new_doc_with_actor(actor2); let list = doc1 - .set(automerge::ROOT, "list", automerge::Value::list()) + .set(ObjId::Root, "list", automerge::Value::list()) .unwrap() .unwrap(); - let two = doc1.insert(list, 0, "two").unwrap(); + let two = doc1.insert(list.clone(), 0, "two").unwrap(); doc2.merge(&mut doc1); - let list_in_doc2 = translate_obj_id(&doc1, &doc2, list); - let one = doc2.insert(list_in_doc2, 0, "one").unwrap(); + let one = doc2.insert(list.clone(), 0, "one").unwrap(); assert_doc!( &doc2, map! { - "list" => { list.translate(&doc1) => list![ - { one.native() => "one" }, - { two.translate(&doc1) => "two" }, + "list" => { list => list![ + { one => "one" }, + { two => "two" }, ]} } ); @@ -863,31 +850,68 @@ fn insertion_consistent_with_causality() { let mut doc2 = new_doc(); let list = doc1 - .set(automerge::ROOT, "list", automerge::Value::list()) + .set(ObjId::Root, "list", automerge::Value::list()) .unwrap() .unwrap(); - let four = doc1.insert(list, 0, "four").unwrap(); + let four = doc1.insert(list.clone(), 0, "four").unwrap(); doc2.merge(&mut doc1); - let list_in_doc2 = translate_obj_id(&doc1, &doc2, list); - let three = doc2.insert(list_in_doc2, 0, "three").unwrap(); + let three = doc2.insert(list.clone(), 0, "three").unwrap(); doc1.merge(&mut doc2); - let two = doc1.insert(list, 0, "two").unwrap(); + let two = doc1.insert(list.clone(), 0, "two").unwrap(); doc2.merge(&mut doc1); - let one = doc2.insert(list_in_doc2, 0, "one").unwrap(); + let one = doc2.insert(list.clone(), 0, "one").unwrap(); assert_doc!( &doc2, map! { - "list" => {list.translate(&doc1) => list![ - {one.native() => "one"}, - {two.translate(&doc1) => "two"}, - {three.native() => "three" }, - {four.translate(&doc1) => "four"}, + "list" => {list => list![ + {one => "one"}, + {two => "two"}, + {three => "three" }, + {four => "four"}, ]} } ); } +#[test] +fn should_handle_arbitrary_depth_nesting() { + let mut doc1 = new_doc(); + let a = doc1.set(ObjId::Root, "a", automerge::Value::map()).unwrap().unwrap(); + let b = doc1.set(a.clone(), "b", automerge::Value::map()).unwrap().unwrap(); + let c = doc1.set(b.clone(), "c", automerge::Value::map()).unwrap().unwrap(); + let d = doc1.set(c.clone(), "d", automerge::Value::map()).unwrap().unwrap(); + let e = doc1.set(d.clone(), "e", automerge::Value::map()).unwrap().unwrap(); + let f = doc1.set(e.clone(), "f", automerge::Value::map()).unwrap().unwrap(); + let g = doc1.set(f.clone(), "g", automerge::Value::map()).unwrap().unwrap(); + let h = doc1.set(g.clone(), "h", "h").unwrap().unwrap(); + let j = doc1.set(f.clone(), "i", "j").unwrap().unwrap(); + + assert_doc!( + &doc1, + map!{ + "a" => {a => map!{ + "b" => {b => map!{ + "c" => {c => map!{ + "d" => {d => map!{ + "e" => {e => map!{ + "f" => {f => map!{ + "g" => {g => map!{ + "h" => {h => "h"} + }}, + "i" => {j => "j"}, + }} + }} + }} + }} + }} + }} + } + ); + + Automerge::load(&doc1.save().unwrap()).unwrap(); +} + #[test] fn save_and_restore_empty() { let mut doc = new_doc(); @@ -900,26 +924,25 @@ fn save_and_restore_empty() { fn save_restore_complex() { let mut doc1 = new_doc(); let todos = doc1 - .set(automerge::ROOT, "todos", automerge::Value::list()) + .set(ObjId::Root, "todos", automerge::Value::list()) .unwrap() .unwrap(); - let first_todo = doc1.insert(todos, 0, automerge::Value::map()).unwrap(); - doc1.set(first_todo, "title", "water plants") + let first_todo = doc1.insert(todos.clone(), 0, automerge::Value::map()).unwrap(); + doc1.set(first_todo.clone(), "title", "water plants") .unwrap() .unwrap(); - let first_done = doc1.set(first_todo, "done", false).unwrap().unwrap(); + let first_done = doc1.set(first_todo.clone(), "done", false).unwrap().unwrap(); let mut doc2 = new_doc(); doc2.merge(&mut doc1); - let first_todo_in_doc2 = translate_obj_id(&doc1, &doc2, first_todo); let weed_title = doc2 - .set(first_todo_in_doc2, "title", "weed plants") + .set(first_todo.clone(), "title", "weed plants") .unwrap() .unwrap(); let kill_title = doc1 - .set(first_todo, "title", "kill plants") + .set(first_todo.clone(), "title", "kill plants") .unwrap() .unwrap(); doc1.merge(&mut doc2); @@ -929,13 +952,13 @@ fn save_restore_complex() { assert_doc!( &reloaded, map! { - "todos" => {todos.translate(&doc1) => list![ - {first_todo.translate(&doc1) => map!{ + "todos" => {todos => list![ + {first_todo => map!{ "title" => { - weed_title.translate(&doc2) => "weed plants", - kill_title.translate(&doc1) => "kill plants", + weed_title => "weed plants", + kill_title => "kill plants", }, - "done" => {first_done.translate(&doc1) => false}, + "done" => {first_done => false}, }} ]} } diff --git a/edit-trace/Cargo.toml b/edit-trace/Cargo.toml index 68d47433..375c8995 100644 --- a/edit-trace/Cargo.toml +++ b/edit-trace/Cargo.toml @@ -5,6 +5,9 @@ edition = "2018" license = "MIT" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html +[[bin]] +name = "edit-trace" +bench = false [dependencies] automerge = { path = "../automerge" } diff --git a/edit-trace/src/main.rs b/edit-trace/src/main.rs index 94fde72c..7e2284af 100644 --- a/edit-trace/src/main.rs +++ b/edit-trace/src/main.rs @@ -1,4 +1,4 @@ -use automerge::{Automerge, AutomergeError, Value, ROOT}; +use automerge::{Automerge, AutomergeError, Value, ObjId}; use std::fs; use std::time::Instant; @@ -19,12 +19,12 @@ fn main() -> Result<(), AutomergeError> { let mut doc = Automerge::new(); let now = Instant::now(); - let text = doc.set(ROOT, "text", Value::text()).unwrap().unwrap(); + let text: ObjId = doc.set(ObjId::Root, "text", Value::text()).unwrap().unwrap().into(); for (i, (pos, del, vals)) in commands.into_iter().enumerate() { if i % 1000 == 0 { println!("Processed {} edits in {} ms", i, now.elapsed().as_millis()); } - doc.splice(text, pos, del, vals)?; + doc.splice(text.clone(), pos, del, vals)?; } let _ = doc.save(); println!("Done in {} ms", now.elapsed().as_millis()); diff --git a/todo.adoc b/todo.adoc new file mode 100644 index 00000000..8bafc3c4 --- /dev/null +++ b/todo.adoc @@ -0,0 +1,3 @@ +* Add an `ElementId::Head` +* Make all fields of Transaction private +* Remove panics from From implementations in value.rs From 65751aeb45b86bfb4bec464a05acda4a8082c740 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Mon, 27 Dec 2021 15:01:01 +0000 Subject: [PATCH 003/730] add external opid --- automerge/src/external_types.rs | 63 ++++++---- automerge/src/lib.rs | 134 ++++++++++---------- automerge/tests/helpers/mod.rs | 57 ++++----- automerge/tests/test.rs | 212 ++++++++++++++++---------------- 4 files changed, 236 insertions(+), 230 deletions(-) diff --git a/automerge/src/external_types.rs b/automerge/src/external_types.rs index 4411f3a9..197a3011 100644 --- a/automerge/src/external_types.rs +++ b/automerge/src/external_types.rs @@ -1,27 +1,28 @@ -use std::{str::FromStr, borrow::Cow, fmt::Display}; +use std::{borrow::Cow, fmt::Display, str::FromStr}; -use crate::{ActorId, types::OpId, op_tree::OpSetMetadata}; +use crate::{op_tree::OpSetMetadata, types::OpId, ActorId}; const ROOT_STR: &str = "_root"; -#[derive(Copy, Debug, PartialEq, Clone, Hash, Eq)] -pub struct ExternalOpId<'a> { +#[derive(Debug, PartialEq, Clone, Hash, Eq)] +pub struct ExternalOpId { counter: u64, - actor: Cow<'a, ActorId>, + actor: ActorId, } -impl<'a> ExternalOpId<'a> { - pub(crate) fn from_internal(opid: OpId, metadata: &OpSetMetadata) -> Option { - metadata.actors.get_safe(opid.actor()).map(|actor| { - ExternalOpId{ +impl ExternalOpId { + pub(crate) fn from_internal(opid: &OpId, metadata: &OpSetMetadata) -> Option { + metadata + .actors + .get_safe(opid.actor()) + .map(|actor| ExternalOpId { counter: opid.counter(), - actor: actor.into(), - } - }) + actor: actor.clone(), + }) } - pub(crate) fn into_opid(self, metadata: &mut OpSetMetadata) -> OpId { - let actor = metadata.actors.cache(self.actor); + pub(crate) fn into_opid(&self, metadata: &mut OpSetMetadata) -> OpId { + let actor = metadata.actors.cache(self.actor.clone()); OpId::new(self.counter, actor) } } @@ -29,12 +30,27 @@ impl<'a> ExternalOpId<'a> { #[derive(Debug, PartialEq, Clone, Hash, Eq)] pub enum ExternalObjId<'a> { Root, - Op(ExternalOpId<'a>), + Op(Cow<'a, ExternalOpId>), } -impl<'a> From> for ExternalObjId<'a> { +impl<'a> ExternalObjId<'a> { + pub fn into_owned(self) -> ExternalObjId<'static> { + match self { + Self::Root => ExternalObjId::Root, + Self::Op(cow) => ExternalObjId::Op(Cow::<'static, _>::Owned(cow.into_owned().into())), + } + } +} + +impl<'a> From<&'a ExternalOpId> for ExternalObjId<'a> { + fn from(op: &'a ExternalOpId) -> Self { + ExternalObjId::Op(Cow::Borrowed(op)) + } +} + +impl From for ExternalObjId<'static> { fn from(op: ExternalOpId) -> Self { - ExternalObjId::Op(op) + ExternalObjId::Op(Cow::Owned(op)) } } @@ -48,7 +64,7 @@ pub enum ParseError { InvalidActor, } -impl FromStr for ExternalOpId<'static> { +impl FromStr for ExternalOpId { type Err = ParseError; fn from_str(s: &str) -> Result { @@ -56,19 +72,20 @@ impl FromStr for ExternalOpId<'static> { let first_part = parts.next().ok_or(ParseError::BadFormat)?; let second_part = parts.next().ok_or(ParseError::BadFormat)?; let counter: u64 = first_part.parse().map_err(|_| ParseError::InvalidCounter)?; - let actor: ActorId = second_part.parse().map_err(|_| ParseError::InvalidActor)?; - Ok(ExternalOpId{counter, actor}) + let actor: ActorId = second_part.parse().map_err(|_| ParseError::InvalidActor)?; + Ok(ExternalOpId { counter, actor }) } } -impl<'a> FromStr for ExternalObjId<'a> { +impl FromStr for ExternalObjId<'static> { type Err = ParseError; fn from_str(s: &str) -> Result { if s == ROOT_STR { Ok(ExternalObjId::Root) } else { - Ok(s.parse::()?.into()) + let op = s.parse::()?.into(); + Ok(ExternalObjId::Op(Cow::Owned(op))) } } } @@ -79,7 +96,7 @@ impl Display for ExternalOpId { } } -impl Display for ExternalObjId { +impl<'a> Display for ExternalObjId<'a> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Self::Root => write!(f, "{}", ROOT_STR), diff --git a/automerge/src/lib.rs b/automerge/src/lib.rs index 92340fd5..b7a3714f 100644 --- a/automerge/src/lib.rs +++ b/automerge/src/lib.rs @@ -201,7 +201,7 @@ impl Automerge { pub fn ensure_transaction_closed(&mut self) { if let Some(tx) = self.transaction.take() { - let change = export_change(&tx, &self.ops.m.borrow_mut().actors, &self.ops.m.borrow().props); + let change = export_change(&tx, &self.ops.m.borrow().actors, &self.ops.m.borrow().props); self.update_history(change); } } @@ -265,25 +265,25 @@ impl Automerge { // PropAt::() // NthAt::() - pub fn keys>(&self, obj: O) -> Vec { - let obj = self.import_objid(obj.into()); + pub fn keys<'a, O: Into>>(&self, obj: O) -> Vec { + let obj = self.import_objid(obj); let q = self.ops.search(obj.into(), query::Keys::new()); q.keys.iter().map(|k| self.export(*k)).collect() } - pub fn keys_at>(&mut self, obj: O, heads: &[ChangeHash]) -> Vec { + pub fn keys_at<'a, O: Into>>(&mut self, obj: O, heads: &[ChangeHash]) -> Vec { let obj = self.import_objid(obj.into()); let clock = self.clock_at(heads); let q = self.ops.search(obj.into(), query::KeysAt::new(clock)); q.keys.iter().map(|k| self.export(*k)).collect() } - pub fn length>(&self, obj: O) -> usize { + pub fn length<'a, O: Into>>(&self, obj: O) -> usize { let obj = self.import_objid(obj.into()); self.ops.search(obj.into(), query::Len::new(obj.into())).len } - pub fn length_at>(&self, obj: O, heads: &[ChangeHash]) -> usize { + pub fn length_at<'a, O: Into>>(&self, obj: O, heads: &[ChangeHash]) -> usize { let obj = self.import_objid(obj.into()); let clock = self.clock_at(heads); self.ops.search(obj.into(), query::LenAt::new(clock)).len @@ -307,7 +307,7 @@ impl Automerge { /// - The object does not exist /// - The key is the wrong type for the object /// - The key does not exist in the object - pub fn set, P: Into, V: Into>( + pub fn set<'a, O: Into>, P: Into, V: Into>( &mut self, obj: O, prop: P, @@ -318,7 +318,7 @@ impl Automerge { self.local_op(obj.into(), prop.into(), value.into()) } - pub fn insert, V: Into>( + pub fn insert<'a, O: Into>, V: Into>( &mut self, obj: O, index: usize, @@ -346,10 +346,10 @@ impl Automerge { self.ops.insert(query.pos, op.clone()); self.tx().operations.push(op); - Ok(self.export_opid(id).unwrap()) + Ok(self.export_opid(&id).unwrap()) } - pub fn inc, P: Into>( + pub fn inc<'a, O: Into>, P: Into>( &mut self, obj: O, prop: P, @@ -363,7 +363,7 @@ impl Automerge { } } - pub fn del, P: Into>(&mut self, obj: O, prop: P) -> Result { + pub fn del<'a, O: Into>, P: Into>(&mut self, obj: O, prop: P) -> Result { // TODO: Should we also no-op multiple delete operations? match self.local_op(self.import_objid(obj.into()).into(), prop.into(), OpType::Del)? { Some(opid) => Ok(opid), @@ -375,7 +375,7 @@ impl Automerge { /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert /// the new elements - pub fn splice>( + pub fn splice<'a, O: Into>>( &mut self, obj: O, mut pos: usize, @@ -394,7 +394,7 @@ impl Automerge { Ok(result) } - pub fn splice_text>( + pub fn splice_text<'a, O: Into>>( &mut self, obj: O, pos: usize, @@ -408,7 +408,7 @@ impl Automerge { self.splice(obj, pos, del, vals) } - pub fn text>(&self, obj: O) -> Result { + pub fn text<'a, O: Into>>(&self, obj: O) -> Result { let obj = self.import_objid(obj.into()).into(); let query = self.ops.search(obj, query::ListVals::new(obj)); let mut buffer = String::new(); @@ -420,7 +420,7 @@ impl Automerge { Ok(buffer) } - pub fn text_at>(&self, obj: O, heads: &[ChangeHash]) -> Result { + pub fn text_at<'a, O: Into>>(&self, obj: O, heads: &[ChangeHash]) -> Result { let clock = self.clock_at(heads); let obj = self.import_objid(obj.into()).into(); let query = self.ops.search(obj, query::ListValsAt::new(clock)); @@ -436,7 +436,7 @@ impl Automerge { // TODO - I need to return these OpId's here **only** to get // the legacy conflicts format of { [opid]: value } // Something better? - pub fn value, P: Into>( + pub fn value<'a, O: Into>, P: Into>( &self, obj: O, prop: P, @@ -444,7 +444,7 @@ impl Automerge { Ok(self.values(obj, prop.into())?.first().cloned()) } - pub fn value_at, P: Into>( + pub fn value_at<'a, O: Into>, P: Into>( &self, obj: O, prop: P, @@ -453,7 +453,7 @@ impl Automerge { Ok(self.values_at(obj, prop, heads)?.first().cloned()) } - pub fn values, P: Into>( + pub fn values<'a, O: Into>, P: Into>( &self, obj: O, prop: P, @@ -484,14 +484,14 @@ impl Automerge { Ok(result) } - pub fn values_at, P: Into>( + pub fn values_at<'a, O: Into>, P: Into>( &self, obj: O, prop: P, heads: &[ChangeHash], ) -> Result, AutomergeError> { let prop = prop.into(); - let obj = self.import_objid(obj.into()).into(); + let obj = self.import_objid(obj).into(); let clock = self.clock_at(heads); let result = match prop { Prop::Map(p) => { @@ -615,7 +615,7 @@ impl Automerge { self.insert_local_op(op, query.pos, &query.ops_pos); - Ok(Some(self.export_opid(id).unwrap())) + Ok(Some(self.export_opid(&id).unwrap())) } fn local_list_op( @@ -658,7 +658,7 @@ impl Automerge { self.insert_local_op(op, query.pos, &query.ops_pos); - Ok(Some(self.export_opid(id).unwrap())) + Ok(Some(self.export_opid(&id).unwrap())) } fn is_causally_ready(&self, change: &Change) -> bool { @@ -1058,15 +1058,15 @@ impl Automerge { opid.into_opid(&mut *self.ops.m.borrow_mut()) } - fn export_opid(&self, opid: InternalOpId) -> Option { + fn export_opid(&self, opid: &InternalOpId) -> Option { OpId::from_internal(opid, &self.ops.m.borrow_mut()) } - fn import_objid>(&self, objid: O) -> InternalObjId { - match objid.as_ref() { + fn import_objid<'a, A: Into>>(&self, objid: A) -> InternalObjId { + match objid.into() { ObjId::Root => InternalObjId::Root, ObjId::Op(external_op) => { - let op = self.import_opid(external_op); + let op = self.import_opid(&external_op); InternalObjId::Op(op) } } @@ -1123,7 +1123,7 @@ impl Automerge { } fn labelled_value(&self, op: &Op) -> (Value, OpId) { - let id = self.export_opid(op.id).unwrap(); + let id = self.export_opid(&op.id).unwrap(); let value = match &op.action { OpType::Make(obj_type) => Value::Object(*obj_type), OpType::Set(scalar) => Value::Scalar(scalar.clone()), @@ -1174,14 +1174,14 @@ mod tests { let list_id = doc.set(ObjId::Root, "items", Value::list())?.unwrap().into(); doc.set(ObjId::Root, "zzz", "zzzval")?; assert!(doc.value(ObjId::Root, "items")?.unwrap().1 == list_id); - doc.insert(list_id, 0, "a")?; - doc.insert(list_id, 0, "b")?; - doc.insert(list_id, 2, "c")?; - doc.insert(list_id, 1, "d")?; - assert!(doc.value(list_id.clone(), 0)?.unwrap().0 == "b".into()); - assert!(doc.value(list_id.clone(), 1)?.unwrap().0 == "d".into()); - assert!(doc.value(list_id.clone(), 2)?.unwrap().0 == "a".into()); - assert!(doc.value(list_id.clone(), 3)?.unwrap().0 == "c".into()); + doc.insert(&list_id, 0, "a")?; + doc.insert(&list_id, 0, "b")?; + doc.insert(&list_id, 2, "c")?; + doc.insert(&list_id, 1, "d")?; + assert!(doc.value(&list_id, 0)?.unwrap().0 == "b".into()); + assert!(doc.value(&list_id, 1)?.unwrap().0 == "d".into()); + assert!(doc.value(&list_id, 2)?.unwrap().0 == "a".into()); + assert!(doc.value(&list_id, 3)?.unwrap().0 == "c".into()); assert!(doc.length(list_id) == 4); doc.save()?; Ok(()) @@ -1202,11 +1202,11 @@ mod tests { fn test_inc() -> Result<(), AutomergeError> { let mut doc = Automerge::new(); let id = doc.set(ObjId::Root, "counter", Value::counter(10))?.unwrap(); - assert!(doc.value(ObjId::Root, "counter")? == Some((Value::counter(10), id))); + assert!(doc.value(ObjId::Root, "counter")? == Some((Value::counter(10), id.clone()))); doc.inc(ObjId::Root, "counter", 10)?; - assert!(doc.value(ObjId::Root, "counter")? == Some((Value::counter(20), id))); + assert!(doc.value(ObjId::Root, "counter")? == Some((Value::counter(20), id.clone()))); doc.inc(ObjId::Root, "counter", -5)?; - assert!(doc.value(ObjId::Root, "counter")? == Some((Value::counter(15), id))); + assert!(doc.value(ObjId::Root, "counter")? == Some((Value::counter(15), id.clone()))); Ok(()) } @@ -1252,15 +1252,15 @@ mod tests { let mut doc = Automerge::new(); let text = doc.set(ObjId::Root, "text", Value::text())?.unwrap(); let heads1 = doc.commit(None, None); - doc.splice_text(text, 0, 0, "hello world")?; + doc.splice_text(&text, 0, 0, "hello world")?; let heads2 = doc.commit(None, None); - doc.splice_text(text, 6, 0, "big bad ")?; + doc.splice_text(&text, 6, 0, "big bad ")?; let heads3 = doc.commit(None, None); - assert!(&doc.text(text)? == "hello big bad world"); - assert!(&doc.text_at(text, &heads1)?.is_empty()); - assert!(&doc.text_at(text, &heads2)? == "hello world"); - assert!(&doc.text_at(text, &heads3)? == "hello big bad world"); + assert!(&doc.text(&text)? == "hello big bad world"); + assert!(&doc.text_at(&text, &heads1)?.is_empty()); + assert!(&doc.text_at(&text, &heads2)? == "hello world"); + assert!(&doc.text_at(&text, &heads3)? == "hello big bad world"); Ok(()) } @@ -1324,44 +1324,44 @@ mod tests { let list = doc.set(ObjId::Root, "list", Value::list())?.unwrap(); let heads1 = doc.commit(None, None); - doc.insert(list, 0, Value::int(10))?; + doc.insert(&list, 0, Value::int(10))?; let heads2 = doc.commit(None, None); - doc.set(list, 0, Value::int(20))?; - doc.insert(list, 0, Value::int(30))?; + doc.set(&list, 0, Value::int(20))?; + doc.insert(&list, 0, Value::int(30))?; let heads3 = doc.commit(None, None); - doc.set(list, 1, Value::int(40))?; - doc.insert(list, 1, Value::int(50))?; + doc.set(&list, 1, Value::int(40))?; + doc.insert(&list, 1, Value::int(50))?; let heads4 = doc.commit(None, None); - doc.del(list, 2)?; + doc.del(&list, 2)?; let heads5 = doc.commit(None, None); - doc.del(list, 0)?; + doc.del(&list, 0)?; let heads6 = doc.commit(None, None); - assert!(doc.length_at(list, &heads1) == 0); - assert!(doc.value_at(list, 0, &heads1)?.is_none()); + assert!(doc.length_at(&list, &heads1) == 0); + assert!(doc.value_at(&list, 0, &heads1)?.is_none()); - assert!(doc.length_at(list, &heads2) == 1); - assert!(doc.value_at(list, 0, &heads2)?.unwrap().0 == Value::int(10)); + assert!(doc.length_at(&list, &heads2) == 1); + assert!(doc.value_at(&list, 0, &heads2)?.unwrap().0 == Value::int(10)); - assert!(doc.length_at(list, &heads3) == 2); - assert!(doc.value_at(list, 0, &heads3)?.unwrap().0 == Value::int(30)); - assert!(doc.value_at(list, 1, &heads3)?.unwrap().0 == Value::int(20)); + assert!(doc.length_at(&list, &heads3) == 2); + assert!(doc.value_at(&list, 0, &heads3)?.unwrap().0 == Value::int(30)); + assert!(doc.value_at(&list, 1, &heads3)?.unwrap().0 == Value::int(20)); - assert!(doc.length_at(list, &heads4) == 3); - assert!(doc.value_at(list, 0, &heads4)?.unwrap().0 == Value::int(30)); - assert!(doc.value_at(list, 1, &heads4)?.unwrap().0 == Value::int(50)); - assert!(doc.value_at(list, 2, &heads4)?.unwrap().0 == Value::int(40)); + assert!(doc.length_at(&list, &heads4) == 3); + assert!(doc.value_at(&list, 0, &heads4)?.unwrap().0 == Value::int(30)); + assert!(doc.value_at(&list, 1, &heads4)?.unwrap().0 == Value::int(50)); + assert!(doc.value_at(&list, 2, &heads4)?.unwrap().0 == Value::int(40)); - assert!(doc.length_at(list, &heads5) == 2); - assert!(doc.value_at(list, 0, &heads5)?.unwrap().0 == Value::int(30)); - assert!(doc.value_at(list, 1, &heads5)?.unwrap().0 == Value::int(50)); + assert!(doc.length_at(&list, &heads5) == 2); + assert!(doc.value_at(&list, 0, &heads5)?.unwrap().0 == Value::int(30)); + assert!(doc.value_at(&list, 1, &heads5)?.unwrap().0 == Value::int(50)); - assert!(doc.length_at(list, &heads6) == 1); - assert!(doc.value_at(list, 0, &heads6)?.unwrap().0 == Value::int(50)); + assert!(doc.length_at(&list, &heads6) == 1); + assert!(doc.value_at(&list, 0, &heads6)?.unwrap().0 == Value::int(50)); Ok(()) } diff --git a/automerge/tests/helpers/mod.rs b/automerge/tests/helpers/mod.rs index 40ee7faf..bbcad074 100644 --- a/automerge/tests/helpers/mod.rs +++ b/automerge/tests/helpers/mod.rs @@ -25,7 +25,7 @@ pub fn sorted_actors() -> (automerge::ActorId, automerge::ActorId) { /// This macro makes it easy to make assertions about a document. It is called with two arguments, /// the first is a reference to an `automerge::Automerge`, the second is an instance of -/// `RealizedObject`. +/// `RealizedObject`. /// /// What - I hear you ask - is a `RealizedObject`? It's a fully hydrated version of the contents of /// an automerge document. You don't need to think about this too much though because you can @@ -67,22 +67,11 @@ pub fn sorted_actors() -> (automerge::ActorId, automerge::ActorId) { /// map!{ /// "field" => { /// op1 => "one", -/// op2.translate(&doc2) => "two" +/// op2 => "two" /// } /// } /// ); /// ``` -/// -/// ## Translating OpIds -/// -/// One thing you may have noticed in the example above is the `op2.translate(&doc2)` call. What is -/// that doing there? Well, the problem is that automerge OpIDs (in the current API) are specific -/// to a document. Using an opid from one document in a different document will not work. Therefore -/// this module defines an `OpIdExt` trait with a `translate` method on it. This method takes a -/// document and converts the opid into something which knows how to be compared with opids from -/// another document by using the document you pass to `translate`. Again, all you really need to -/// know is that when constructing a document for comparison you should call `translate(fromdoc)` -/// on opids which come from a document other than the one you pass to `assert_doc`. #[macro_export] macro_rules! assert_doc { ($doc: expr, $expected: expr) => {{ @@ -147,7 +136,7 @@ macro_rules! map { use std::collections::HashMap; let mut inner: HashMap = HashMap::new(); $( - let _ = inner.insert($opid.into(), $value.into()); + let _ = inner.insert(ObjId::from((&$opid)).into_owned(), $value.into()); )* inner } @@ -195,7 +184,7 @@ macro_rules! list { use std::collections::HashMap; let mut inner: HashMap = HashMap::new(); $( - let _ = inner.insert($opid.into(), $value.into()); + let _ = inner.insert(ObjId::from(&$opid).into_owned(), $value.into()); )* inner } @@ -231,13 +220,13 @@ impl std::fmt::Display for ExportedOpId { /// A `RealizedObject` is a representation of all the current values in a document - including /// conflicts. #[derive(PartialEq, Debug)] -pub enum RealizedObject { - Map(HashMap>), - Sequence(Vec>), +pub enum RealizedObject<'a> { + Map(HashMap, RealizedObject<'a>>>), + Sequence(Vec, RealizedObject<'a>>>), Value(automerge::ScalarValue), } -impl serde::Serialize for RealizedObject { +impl serde::Serialize for RealizedObject<'static> { fn serialize(&self, serializer: S) -> Result where S: serde::Serializer, @@ -270,7 +259,7 @@ impl serde::Serialize for RealizedObject { } } -pub fn realize(doc: &automerge::Automerge) -> RealizedObject { +pub fn realize<'a>(doc: &automerge::Automerge) -> RealizedObject<'a> { realize_obj(doc, ObjId::Root, automerge::ObjType::Map) } @@ -278,7 +267,7 @@ pub fn realize_prop>( doc: &automerge::Automerge, obj_id: automerge::ObjId, prop: P, -) -> RealizedObject { +) -> RealizedObject<'static> { let (val, obj_id) = doc.value(obj_id, prop).unwrap().unwrap(); match val { automerge::Value::Object(obj_type) => realize_obj(doc, obj_id.into(), obj_type), @@ -290,7 +279,7 @@ pub fn realize_obj( doc: &automerge::Automerge, obj_id: automerge::ObjId, objtype: automerge::ObjType, -) -> RealizedObject { +) -> RealizedObject<'static> { match objtype { automerge::ObjType::Map | automerge::ObjType::Table => { let mut result = HashMap::new(); @@ -314,7 +303,7 @@ fn realize_values>( doc: &automerge::Automerge, obj_id: automerge::ObjId, key: K, -) -> HashMap { +) -> HashMap, RealizedObject<'static>> { let mut values_by_objid: HashMap = HashMap::new(); for (value, opid) in doc.values(obj_id, key).unwrap() { let realized = match value { @@ -327,10 +316,10 @@ fn realize_values>( } -impl> - From>> for RealizedObject +impl<'a, I: Into>> + From, I>>> for RealizedObject<'a> { - fn from(values: HashMap<&str, HashMap>) -> Self { + fn from(values: HashMap<&str, HashMap, I>>) -> Self { let intoed = values .into_iter() .map(|(k, v)| { @@ -344,39 +333,39 @@ impl> } } -impl> - From>> for RealizedObject +impl<'a, I: Into>> + From, I>>> for RealizedObject<'a> { - fn from(values: Vec>) -> Self { + fn from(values: Vec, I>>) -> Self { RealizedObject::Sequence( values .into_iter() - .map(|v| v.into_iter().map(|(k, v)| (k.into(), v.into())).collect()) + .map(|v| v.into_iter().map(|(k, v)| (k, v.into())).collect()) .collect(), ) } } -impl From for RealizedObject { +impl From for RealizedObject<'static> { fn from(b: bool) -> Self { RealizedObject::Value(b.into()) } } -impl From for RealizedObject { +impl From for RealizedObject<'static> { fn from(u: usize) -> Self { let v = u.try_into().unwrap(); RealizedObject::Value(automerge::ScalarValue::Int(v)) } } -impl From for RealizedObject { +impl From for RealizedObject<'static> { fn from(s: automerge::ScalarValue) -> Self { RealizedObject::Value(s) } } -impl From<&str> for RealizedObject { +impl From<&str> for RealizedObject<'static> { fn from(s: &str) -> Self { RealizedObject::Value(automerge::ScalarValue::Str(s.into())) } diff --git a/automerge/tests/test.rs b/automerge/tests/test.rs index 9bf8a0ea..4be44b97 100644 --- a/automerge/tests/test.rs +++ b/automerge/tests/test.rs @@ -29,24 +29,24 @@ fn no_change_on_repeated_map_set() { #[test] fn no_change_on_repeated_list_set() { let mut doc = new_doc(); - let list_id: ObjId = doc + let list_id = doc .set(ObjId::Root, "list", automerge::Value::list()) .unwrap() .unwrap().into(); - doc.insert(list_id.clone(), 0, 1).unwrap(); - doc.set(list_id.clone(), 0, 1).unwrap(); + doc.insert(&list_id, 0, 1).unwrap(); + doc.set(&list_id, 0, 1).unwrap(); assert!(doc.set(list_id, 0, 1).unwrap().is_none()); } #[test] fn no_change_on_list_insert_followed_by_set_of_same_value() { let mut doc = new_doc(); - let list_id: ObjId = doc + let list_id = doc .set(ObjId::Root, "list", automerge::Value::list()) .unwrap() - .unwrap().into(); - doc.insert(list_id.clone(), 0, 1).unwrap(); - assert!(doc.set(list_id.clone(), 0, 1).unwrap().is_none()); + .unwrap(); + doc.insert(&list_id, 0, 1).unwrap(); + assert!(doc.set(&list_id, 0, 1).unwrap().is_none()); } #[test] @@ -75,15 +75,15 @@ fn repeated_map_assignment_which_resolves_conflict_not_ignored() { fn repeated_list_assignment_which_resolves_conflict_not_ignored() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); - let list_id: ObjId = doc1 + let list_id = doc1 .set(ObjId::Root, "list", automerge::Value::list()) .unwrap() - .unwrap().into(); - doc1.insert(list_id.clone(), 0, 123).unwrap(); + .unwrap(); + doc1.insert(&list_id, 0, 123).unwrap(); doc2.merge(&mut doc1); - doc2.set(list_id.clone(), 0, 456).unwrap().unwrap(); + doc2.set(&list_id, 0, 456).unwrap().unwrap(); doc1.merge(&mut doc2); - let doc1_op = doc1.set(list_id.clone(), 0, 789).unwrap().unwrap(); + let doc1_op = doc1.set(&list_id, 0, 789).unwrap().unwrap(); assert_doc!( &doc1, @@ -100,14 +100,14 @@ fn repeated_list_assignment_which_resolves_conflict_not_ignored() { #[test] fn list_deletion() { let mut doc = new_doc(); - let list_id: ObjId = doc + let list_id = doc .set(ObjId::Root, "list", automerge::Value::list()) .unwrap() - .unwrap().into(); - let op1 = doc.insert(list_id.clone(), 0, 123).unwrap(); - doc.insert(list_id.clone(), 1, 456).unwrap(); - let op3 = doc.insert(list_id.clone(), 2, 789).unwrap(); - doc.del(list_id.clone(), 1).unwrap(); + .unwrap(); + let op1 = doc.insert(&list_id, 0, 123).unwrap(); + doc.insert(&list_id, 1, 456).unwrap(); + let op3 = doc.insert(&list_id.clone(), 2, 789).unwrap(); + doc.del(&list_id, 1).unwrap(); assert_doc!( &doc, map! { @@ -124,10 +124,10 @@ fn merge_concurrent_map_prop_updates() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); let op1 = doc1.set(ObjId::Root, "foo", "bar").unwrap().unwrap(); - let hello: ObjId = doc2 + let hello = doc2 .set(ObjId::Root, "hello", "world") .unwrap() - .unwrap().into(); + .unwrap(); doc1.merge(&mut doc2); assert_eq!( doc1.value(ObjId::Root, "foo").unwrap().unwrap().0, @@ -136,8 +136,8 @@ fn merge_concurrent_map_prop_updates() { assert_doc!( &doc1, map! { - "foo" => { op1.clone() => "bar" }, - "hello" => { hello.clone() => "world" }, + "foo" => { op1 => "bar" }, + "hello" => { hello => "world" }, } ); doc2.merge(&mut doc1); @@ -230,14 +230,14 @@ fn concurrent_updates_of_same_field() { fn concurrent_updates_of_same_list_element() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); - let list_id: ObjId = doc1 + let list_id = doc1 .set(ObjId::Root, "birds", automerge::Value::list()) .unwrap() - .unwrap().into(); + .unwrap(); doc1.insert(list_id.clone(), 0, "finch").unwrap(); doc2.merge(&mut doc1); - let set_one_op = doc1.set(list_id.clone(), 0, "greenfinch").unwrap().unwrap(); - let set_op_two = doc2.set(list_id.clone(), 0, "goldfinch").unwrap().unwrap(); + let set_one_op = doc1.set(&list_id, 0, "greenfinch").unwrap().unwrap(); + let set_op_two = doc2.set(&list_id, 0, "goldfinch").unwrap().unwrap(); doc1.merge(&mut doc2); @@ -295,11 +295,11 @@ fn changes_within_conflicting_map_field() { .set(ObjId::Root, "field", "string") .unwrap() .unwrap(); - let map_id: ObjId = doc2 + let map_id = doc2 .set(ObjId::Root, "field", automerge::Value::map()) .unwrap() - .unwrap().into(); - let set_in_doc2 = doc2.set(map_id.clone(), "innerKey", 42).unwrap().unwrap(); + .unwrap(); + let set_in_doc2 = doc2.set(&map_id, "innerKey", 42).unwrap().unwrap(); doc1.merge(&mut doc2); assert_doc!( @@ -326,23 +326,23 @@ fn changes_within_conflicting_list_element() { .set(ObjId::Root, "list", automerge::Value::list()) .unwrap() .unwrap(); - doc1.insert(list_id.clone(), 0, "hello").unwrap(); + doc1.insert(&list_id, 0, "hello").unwrap(); doc2.merge(&mut doc1); let map_in_doc1 = doc1 - .set(list_id.clone(), 0, automerge::Value::map()) + .set(&list_id, 0, automerge::Value::map()) .unwrap() .unwrap(); - let set_map1 = doc1.set(map_in_doc1.clone(), "map1", true).unwrap().unwrap(); - let set_key1 = doc1.set(map_in_doc1.clone(), "key", 1).unwrap().unwrap(); + let set_map1 = doc1.set(&map_in_doc1, "map1", true).unwrap().unwrap(); + let set_key1 = doc1.set(&map_in_doc1, "key", 1).unwrap().unwrap(); let map_in_doc2 = doc2 - .set(list_id.clone(), 0, automerge::Value::map()) + .set(&list_id, 0, automerge::Value::map()) .unwrap() .unwrap(); doc1.merge(&mut doc2); - let set_map2 = doc2.set(map_in_doc2.clone(), "map2", true).unwrap().unwrap(); - let set_key2 = doc2.set(map_in_doc2.clone(), "key", 2).unwrap().unwrap(); + let set_map2 = doc2.set(&map_in_doc2, "map2", true).unwrap().unwrap(); + let set_key2 = doc2.set(&map_in_doc2, "key", 2).unwrap().unwrap(); doc1.merge(&mut doc2); @@ -419,11 +419,11 @@ fn concurrent_insertions_at_different_list_positions() { .unwrap() .unwrap(); - let one = doc1.insert(list_id.clone(), 0, "one").unwrap(); - let three = doc1.insert(list_id.clone(), 1, "three").unwrap(); + let one = doc1.insert(&list_id, 0, "one").unwrap(); + let three = doc1.insert(&list_id, 1, "three").unwrap(); doc2.merge(&mut doc1); - let two = doc1.splice(list_id.clone(), 1, 0, vec!["two".into()]).unwrap()[0].clone(); - let four = doc2.insert(list_id.clone(), 2, "four").unwrap(); + let two = doc1.splice(&list_id, 1, 0, vec!["two".into()]).unwrap()[0].clone(); + let four = doc2.insert(&list_id, 2, "four").unwrap(); doc1.merge(&mut doc2); @@ -453,11 +453,11 @@ fn concurrent_insertions_at_same_list_position() { .set(ObjId::Root, "birds", automerge::Value::list()) .unwrap() .unwrap(); - let parakeet = doc1.insert(list_id.clone(), 0, "parakeet").unwrap(); + let parakeet = doc1.insert(&list_id, 0, "parakeet").unwrap(); doc2.merge(&mut doc1); - let starling = doc1.insert(list_id.clone(), 1, "starling").unwrap(); - let chaffinch = doc2.insert(list_id.clone(), 1, "chaffinch").unwrap(); + let starling = doc1.insert(&list_id, 1, "starling").unwrap(); + let chaffinch = doc2.insert(&list_id, 1, "chaffinch").unwrap(); doc1.merge(&mut doc2); assert_doc!( @@ -512,21 +512,21 @@ fn concurrent_assignment_and_deletion_of_list_entry() { .set(ObjId::Root, "birds", automerge::Value::list()) .unwrap() .unwrap(); - let blackbird = doc1.insert(list_id.clone(), 0, "blackbird").unwrap(); - doc1.insert(list_id.clone(), 1, "thrush").unwrap(); - let goldfinch = doc1.insert(list_id.clone(), 2, "goldfinch").unwrap(); + let blackbird = doc1.insert(&list_id, 0, "blackbird").unwrap(); + doc1.insert(&list_id, 1, "thrush").unwrap(); + let goldfinch = doc1.insert(&list_id, 2, "goldfinch").unwrap(); doc2.merge(&mut doc1); - let starling = doc1.set(list_id.clone(), 1, "starling").unwrap().unwrap(); + let starling = doc1.set(&list_id, 1, "starling").unwrap().unwrap(); - doc2.del(list_id.clone(), 1).unwrap(); + doc2.del(&list_id, 1).unwrap(); assert_doc!( &doc2, map! { - "birds" => {list_id.clone() => list![ - { blackbird.clone() => "blackbird"}, - { goldfinch.clone() => "goldfinch"}, + "birds" => {list_id => list![ + { blackbird => "blackbird"}, + { goldfinch => "goldfinch"}, ]} } ); @@ -535,9 +535,9 @@ fn concurrent_assignment_and_deletion_of_list_entry() { &doc1, map! { "birds" => {list_id.clone() => list![ - { blackbird.clone() => "blackbird" }, + { blackbird => "blackbird" }, { starling.clone() => "starling" }, - { goldfinch.clone() => "goldfinch" }, + { goldfinch => "goldfinch" }, ]} } ); @@ -566,15 +566,15 @@ fn insertion_after_a_deleted_list_element() { .unwrap(); let blackbird = doc1.insert(list_id.clone(), 0, "blackbird").unwrap(); - doc1.insert(list_id.clone(), 1, "thrush").unwrap(); - doc1.insert(list_id.clone(), 2, "goldfinch").unwrap(); + doc1.insert(&list_id, 1, "thrush").unwrap(); + doc1.insert(&list_id, 2, "goldfinch").unwrap(); doc2.merge(&mut doc1); - doc1.splice(list_id.clone(), 1, 2, Vec::new()).unwrap(); + doc1.splice(&list_id, 1, 2, Vec::new()).unwrap(); let starling = doc2 - .splice(list_id.clone(), 2, 0, vec!["starling".into()]) + .splice(&list_id, 2, 0, vec!["starling".into()]) .unwrap()[0].clone(); doc1.merge(&mut doc2); @@ -582,9 +582,9 @@ fn insertion_after_a_deleted_list_element() { assert_doc!( &doc1, map! { - "birds" => {list_id.clone() => list![ - { blackbird.clone() => "blackbird" }, - { starling.clone() => "starling" } + "birds" => {list_id => list![ + { blackbird => "blackbird" }, + { starling => "starling" } ]} } ); @@ -611,14 +611,14 @@ fn concurrent_deletion_of_same_list_element() { .unwrap(); let albatross = doc1.insert(list_id.clone(), 0, "albatross").unwrap(); - doc1.insert(list_id.clone(), 1, "buzzard").unwrap(); - let cormorant = doc1.insert(list_id.clone(), 2, "cormorant").unwrap(); + doc1.insert(&list_id, 1, "buzzard").unwrap(); + let cormorant = doc1.insert(&list_id, 2, "cormorant").unwrap(); doc2.merge(&mut doc1); - doc1.del(list_id.clone(), 1).unwrap(); + doc1.del(&list_id, 1).unwrap(); - doc2.del(list_id.clone(), 1).unwrap(); + doc2.del(&list_id, 1).unwrap(); doc1.merge(&mut doc2); @@ -654,23 +654,23 @@ fn concurrent_updates_at_different_levels() { .unwrap() .unwrap(); let birds = doc1 - .set(animals.clone(), "birds", automerge::Value::map()) + .set(&animals, "birds", automerge::Value::map()) .unwrap() .unwrap(); - doc1.set(birds.clone(), "pink", "flamingo").unwrap().unwrap(); - doc1.set(birds.clone(), "black", "starling").unwrap().unwrap(); + doc1.set(&birds, "pink", "flamingo").unwrap().unwrap(); + doc1.set(&birds, "black", "starling").unwrap().unwrap(); let mammals = doc1 - .set(animals.clone(), "mammals", automerge::Value::list()) + .set(&animals, "mammals", automerge::Value::list()) .unwrap() .unwrap(); - let badger = doc1.insert(mammals.clone(), 0, "badger").unwrap(); + let badger = doc1.insert(&mammals, 0, "badger").unwrap(); doc2.merge(&mut doc1); - doc1.set(birds, "brown", "sparrow").unwrap().unwrap(); + doc1.set(&birds, "brown", "sparrow").unwrap().unwrap(); - doc2.del(animals, "birds").unwrap(); + doc2.del(&animals, "birds").unwrap(); doc1.merge(&mut doc2); assert_obj!( @@ -679,7 +679,7 @@ fn concurrent_updates_at_different_levels() { "animals", map! { "mammals" => { - mammals.clone() => list![{ badger.clone() => "badger" }], + mammals => list![{ badger => "badger" }], } } ); @@ -706,16 +706,16 @@ fn concurrent_updates_of_concurrently_deleted_objects() { .unwrap() .unwrap(); let blackbird = doc1 - .set(birds.clone(), "blackbird", automerge::Value::map()) + .set(&birds, "blackbird", automerge::Value::map()) .unwrap() .unwrap(); - doc1.set(blackbird.clone(), "feathers", "black").unwrap().unwrap(); + doc1.set(&blackbird, "feathers", "black").unwrap().unwrap(); doc2.merge(&mut doc1); - doc1.del(birds.clone(), "blackbird").unwrap(); + doc1.del(&birds, "blackbird").unwrap(); - doc2.set(blackbird.clone(), "beak", "orange").unwrap(); + doc2.set(&blackbird, "beak", "orange").unwrap(); doc1.merge(&mut doc2); @@ -743,7 +743,7 @@ fn does_not_interleave_sequence_insertions_at_same_position() { let doc1elems = doc1 .splice( - wisdom.clone(), + &wisdom, 0, 0, vec![ @@ -758,7 +758,7 @@ fn does_not_interleave_sequence_insertions_at_same_position() { let doc2elems = doc2 .splice( - wisdom.clone(), + &wisdom, 0, 0, vec![ @@ -777,16 +777,16 @@ fn does_not_interleave_sequence_insertions_at_same_position() { &doc1, map! { "wisdom" => {wisdom => list![ - {doc1elems[0].clone() => "to"}, - {doc1elems[1].clone() => "be"}, - {doc1elems[2].clone() => "is"}, - {doc1elems[3].clone() => "to"}, - {doc1elems[4].clone() => "do"}, - {doc2elems[0].clone() => "to"}, - {doc2elems[1].clone() => "do"}, - {doc2elems[2].clone() => "is"}, - {doc2elems[3].clone() => "to"}, - {doc2elems[4].clone() => "be"}, + {doc1elems[0] => "to"}, + {doc1elems[1] => "be"}, + {doc1elems[2] => "is"}, + {doc1elems[3] => "to"}, + {doc1elems[4] => "do"}, + {doc2elems[0] => "to"}, + {doc2elems[1] => "do"}, + {doc2elems[2] => "is"}, + {doc2elems[3] => "to"}, + {doc2elems[4] => "be"}, ]} } ); @@ -803,10 +803,10 @@ fn mutliple_insertions_at_same_list_position_with_insertion_by_greater_actor_id( .set(ObjId::Root, "list", automerge::Value::list()) .unwrap() .unwrap(); - let two = doc1.insert(list.clone(), 0, "two").unwrap(); + let two = doc1.insert(&list, 0, "two").unwrap(); doc2.merge(&mut doc1); - let one = doc2.insert(list.clone(), 0, "one").unwrap(); + let one = doc2.insert(&list, 0, "one").unwrap(); assert_doc!( &doc2, map! { @@ -829,10 +829,10 @@ fn mutliple_insertions_at_same_list_position_with_insertion_by_lesser_actor_id() .set(ObjId::Root, "list", automerge::Value::list()) .unwrap() .unwrap(); - let two = doc1.insert(list.clone(), 0, "two").unwrap(); + let two = doc1.insert(&list, 0, "two").unwrap(); doc2.merge(&mut doc1); - let one = doc2.insert(list.clone(), 0, "one").unwrap(); + let one = doc2.insert(&list, 0, "one").unwrap(); assert_doc!( &doc2, map! { @@ -853,13 +853,13 @@ fn insertion_consistent_with_causality() { .set(ObjId::Root, "list", automerge::Value::list()) .unwrap() .unwrap(); - let four = doc1.insert(list.clone(), 0, "four").unwrap(); + let four = doc1.insert(&list, 0, "four").unwrap(); doc2.merge(&mut doc1); - let three = doc2.insert(list.clone(), 0, "three").unwrap(); + let three = doc2.insert(&list, 0, "three").unwrap(); doc1.merge(&mut doc2); - let two = doc1.insert(list.clone(), 0, "two").unwrap(); + let two = doc1.insert(&list, 0, "two").unwrap(); doc2.merge(&mut doc1); - let one = doc2.insert(list.clone(), 0, "one").unwrap(); + let one = doc2.insert(&list, 0, "one").unwrap(); assert_doc!( &doc2, @@ -878,14 +878,14 @@ fn insertion_consistent_with_causality() { fn should_handle_arbitrary_depth_nesting() { let mut doc1 = new_doc(); let a = doc1.set(ObjId::Root, "a", automerge::Value::map()).unwrap().unwrap(); - let b = doc1.set(a.clone(), "b", automerge::Value::map()).unwrap().unwrap(); - let c = doc1.set(b.clone(), "c", automerge::Value::map()).unwrap().unwrap(); - let d = doc1.set(c.clone(), "d", automerge::Value::map()).unwrap().unwrap(); - let e = doc1.set(d.clone(), "e", automerge::Value::map()).unwrap().unwrap(); - let f = doc1.set(e.clone(), "f", automerge::Value::map()).unwrap().unwrap(); - let g = doc1.set(f.clone(), "g", automerge::Value::map()).unwrap().unwrap(); - let h = doc1.set(g.clone(), "h", "h").unwrap().unwrap(); - let j = doc1.set(f.clone(), "i", "j").unwrap().unwrap(); + let b = doc1.set(&a, "b", automerge::Value::map()).unwrap().unwrap(); + let c = doc1.set(&b, "c", automerge::Value::map()).unwrap().unwrap(); + let d = doc1.set(&c, "d", automerge::Value::map()).unwrap().unwrap(); + let e = doc1.set(&d, "e", automerge::Value::map()).unwrap().unwrap(); + let f = doc1.set(&e, "f", automerge::Value::map()).unwrap().unwrap(); + let g = doc1.set(&f, "g", automerge::Value::map()).unwrap().unwrap(); + let h = doc1.set(&g, "h", "h").unwrap().unwrap(); + let j = doc1.set(&f, "i", "j").unwrap().unwrap(); assert_doc!( &doc1, @@ -929,7 +929,7 @@ fn save_restore_complex() { .unwrap(); let first_todo = doc1.insert(todos.clone(), 0, automerge::Value::map()).unwrap(); - doc1.set(first_todo.clone(), "title", "water plants") + doc1.set(&first_todo, "title", "water plants") .unwrap() .unwrap(); let first_done = doc1.set(first_todo.clone(), "done", false).unwrap().unwrap(); @@ -942,7 +942,7 @@ fn save_restore_complex() { .unwrap(); let kill_title = doc1 - .set(first_todo.clone(), "title", "kill plants") + .set(&first_todo, "title", "kill plants") .unwrap() .unwrap(); doc1.merge(&mut doc2); From 1f50c386b8429fb7ff48811358034356ecfb3011 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Mon, 27 Dec 2021 16:57:24 +0000 Subject: [PATCH 004/730] Benches --- automerge/src/lib.rs | 122 ++++++++++++++++++++++++++++--------- edit-trace/benches/main.rs | 6 +- edit-trace/src/main.rs | 4 +- 3 files changed, 98 insertions(+), 34 deletions(-) diff --git a/automerge/src/lib.rs b/automerge/src/lib.rs index b7a3714f..4e07a02e 100644 --- a/automerge/src/lib.rs +++ b/automerge/src/lib.rs @@ -40,13 +40,13 @@ mod sync; mod visualisation; mod error; +mod external_types; mod op_set; mod op_tree; mod query; mod types; mod value; -mod external_types; -pub use external_types::{ExternalOpId as OpId, ExternalObjId as ObjId}; +pub use external_types::{ExternalObjId as ObjId, ExternalOpId as OpId}; use change::{encode_document, export_change}; use clock::Clock; @@ -60,10 +60,8 @@ pub use change::{decode_change, Change}; pub use error::AutomergeError; pub use legacy::Change as ExpandedChange; pub use sync::{BloomFilter, SyncHave, SyncMessage, SyncState}; -pub use types::{ - ActorId, ChangeHash, ObjType, OpType, Patch, Peer, Prop, -}; -use types::{OpId as InternalOpId, Export, Exportable, Importable}; +pub use types::{ActorId, ChangeHash, ObjType, OpType, Patch, Peer, Prop}; +use types::{Export, Exportable, Importable, OpId as InternalOpId}; pub use value::{ScalarValue, Value}; #[derive(Debug, Clone)] @@ -201,7 +199,8 @@ impl Automerge { pub fn ensure_transaction_closed(&mut self) { if let Some(tx) = self.transaction.take() { - let change = export_change(&tx, &self.ops.m.borrow().actors, &self.ops.m.borrow().props); + let change = + export_change(&tx, &self.ops.m.borrow().actors, &self.ops.m.borrow().props); self.update_history(change); } } @@ -316,6 +315,7 @@ impl Automerge { let value = value.into(); let obj = self.import_objid(obj.into()); self.local_op(obj.into(), prop.into(), value.into()) + .map(|o| o.map(|o| self.export_opid(&o).unwrap())) } pub fn insert<'a, O: Into>, V: Into>( @@ -325,6 +325,16 @@ impl Automerge { value: V, ) -> Result { let obj = self.import_objid(obj.into()).into(); + let internal_op = self.insert_internal(obj, index, value)?; + Ok(self.export_opid(&internal_op).unwrap()) + } + + fn insert_internal>( + &mut self, + obj: InternalObjId, + index: usize, + value: V, + ) -> Result { let id = self.next_id(); let query = self.ops.search(obj, query::InsertNth::new(index)); @@ -346,7 +356,7 @@ impl Automerge { self.ops.insert(query.pos, op.clone()); self.tx().operations.push(op); - Ok(self.export_opid(&id).unwrap()) + Ok(id) } pub fn inc<'a, O: Into>, P: Into>( @@ -355,17 +365,35 @@ impl Automerge { prop: P, value: i64, ) -> Result { - match self.local_op(self.import_objid(obj.into()).into(), prop.into(), OpType::Inc(value))? { - Some(opid) => Ok(opid), + match self.local_op( + self.import_objid(obj.into()).into(), + prop.into(), + OpType::Inc(value), + )? { + Some(opid) => Ok(self.export_opid(&opid).unwrap()), None => { panic!("increment should always create a new op") } } } - pub fn del<'a, O: Into>, P: Into>(&mut self, obj: O, prop: P) -> Result { + pub fn del<'a, O: Into>, P: Into>( + &mut self, + obj: O, + prop: P, + ) -> Result { + let obj = self.import_objid(obj); + self.del_internal(obj, prop) + .map(|o| self.export_opid(&o).unwrap()) + } + + fn del_internal>( + &mut self, + obj: InternalObjId, + prop: P, + ) -> Result { // TODO: Should we also no-op multiple delete operations? - match self.local_op(self.import_objid(obj.into()).into(), prop.into(), OpType::Del)? { + match self.local_op(obj, prop.into(), OpType::Del)? { Some(opid) => Ok(opid), None => { panic!("delete should always create a new op") @@ -382,16 +410,20 @@ impl Automerge { del: usize, vals: Vec, ) -> Result, AutomergeError> { - let obj = obj.into(); + let obj = self.import_objid(obj); for _ in 0..del { - self.del(obj.clone(), pos)?; + self.del_internal(obj, pos)?; } let mut result = Vec::with_capacity(vals.len()); for v in vals { - result.push(self.insert(obj.clone(), pos, v)?); + result.push(self.insert_internal(obj, pos, v)?); pos += 1; } - Ok(result) + let exported = result + .into_iter() + .map(|o| self.export_opid(&o).unwrap()) + .collect::>(); + Ok(exported) } pub fn splice_text<'a, O: Into>>( @@ -420,7 +452,11 @@ impl Automerge { Ok(buffer) } - pub fn text_at<'a, O: Into>>(&self, obj: O, heads: &[ChangeHash]) -> Result { + pub fn text_at<'a, O: Into>>( + &self, + obj: O, + heads: &[ChangeHash], + ) -> Result { let clock = self.clock_at(heads); let obj = self.import_objid(obj.into()).into(); let query = self.ops.search(obj, query::ListValsAt::new(clock)); @@ -564,7 +600,7 @@ impl Automerge { obj: InternalObjId, prop: Prop, action: OpType, - ) -> Result, AutomergeError> { + ) -> Result, AutomergeError> { match prop { Prop::Map(s) => self.local_map_op(obj, s, action), Prop::Seq(n) => self.local_list_op(obj, n, action), @@ -576,7 +612,7 @@ impl Automerge { obj: InternalObjId, prop: String, action: OpType, - ) -> Result, AutomergeError> { + ) -> Result, AutomergeError> { if prop.is_empty() { return Err(AutomergeError::EmptyStringKey); } @@ -615,7 +651,7 @@ impl Automerge { self.insert_local_op(op, query.pos, &query.ops_pos); - Ok(Some(self.export_opid(&id).unwrap())) + Ok(Some(id)) } fn local_list_op( @@ -623,7 +659,7 @@ impl Automerge { obj: InternalObjId, index: usize, action: OpType, - ) -> Result, AutomergeError> { + ) -> Result, AutomergeError> { let query = self.ops.search(obj, query::Nth::new(index)); let id = self.next_id(); @@ -658,7 +694,7 @@ impl Automerge { self.insert_local_op(op, query.pos, &query.ops_pos); - Ok(Some(self.export_opid(&id).unwrap())) + Ok(Some(id)) } fn is_causally_ready(&self, change: &Change) -> bool { @@ -684,7 +720,12 @@ impl Automerge { .iter_ops() .enumerate() .map(|(i, c)| { - let actor = self.ops.m.borrow_mut().actors.cache(change.actor_id().clone()); + let actor = self + .ops + .m + .borrow_mut() + .actors + .cache(change.actor_id().clone()); let id = InternalOpId::new(change.start_op + i as u64, actor); // FIXME dont need to_string() let obj: InternalObjId = self.import(&c.obj.to_string()).unwrap(); @@ -694,7 +735,9 @@ impl Automerge { .map(|i| self.import(&i.to_string()).unwrap()) .collect(); let key = match &c.key { - legacy::Key::Map(n) => Key::Map(self.ops.m.borrow_mut().props.cache(n.to_string())), + legacy::Key::Map(n) => { + Key::Map(self.ops.m.borrow_mut().props.cache(n.to_string())) + } legacy::Key::Seq(legacy::ElementId::Head) => Key::Seq(HEAD), // FIXME dont need to_string() legacy::Key::Seq(legacy::ElementId::Id(i)) => { @@ -939,7 +982,13 @@ impl Automerge { to_see.push(*h); } } - let actor = self.ops.m.borrow().actors.lookup(c.actor_id().clone()).unwrap(); + let actor = self + .ops + .m + .borrow() + .actors + .lookup(c.actor_id().clone()) + .unwrap(); clock.include(actor, c.max_op()); seen.insert(hash); } @@ -1015,7 +1064,13 @@ impl Automerge { let history_index = self.history.len(); self.states - .entry(self.ops.m.borrow_mut().actors.cache(change.actor_id().clone())) + .entry( + self.ops + .m + .borrow_mut() + .actors + .cache(change.actor_id().clone()), + ) .or_default() .push(history_index); @@ -1074,7 +1129,11 @@ impl Automerge { pub(crate) fn export(&self, id: E) -> String { match id.export() { - Export::Id(id) => format!("{}@{}", id.counter(), self.ops.m.borrow().actors[id.actor()]), + Export::Id(id) => format!( + "{}@{}", + id.counter(), + self.ops.m.borrow().actors[id.actor()] + ), Export::Prop(index) => self.ops.m.borrow().props[index].clone(), Export::Special(s) => s, } @@ -1171,7 +1230,10 @@ mod tests { fn test_list() -> Result<(), AutomergeError> { let mut doc = Automerge::new(); doc.set_actor(ActorId::random()); - let list_id = doc.set(ObjId::Root, "items", Value::list())?.unwrap().into(); + let list_id = doc + .set(ObjId::Root, "items", Value::list())? + .unwrap() + .into(); doc.set(ObjId::Root, "zzz", "zzzval")?; assert!(doc.value(ObjId::Root, "items")?.unwrap().1 == list_id); doc.insert(&list_id, 0, "a")?; @@ -1201,7 +1263,9 @@ mod tests { #[test] fn test_inc() -> Result<(), AutomergeError> { let mut doc = Automerge::new(); - let id = doc.set(ObjId::Root, "counter", Value::counter(10))?.unwrap(); + let id = doc + .set(ObjId::Root, "counter", Value::counter(10))? + .unwrap(); assert!(doc.value(ObjId::Root, "counter")? == Some((Value::counter(10), id.clone()))); doc.inc(ObjId::Root, "counter", 10)?; assert!(doc.value(ObjId::Root, "counter")? == Some((Value::counter(20), id.clone()))); diff --git a/edit-trace/benches/main.rs b/edit-trace/benches/main.rs index fed72f1e..fb5540f7 100644 --- a/edit-trace/benches/main.rs +++ b/edit-trace/benches/main.rs @@ -1,13 +1,13 @@ -use automerge::{Automerge, Value, ROOT}; +use automerge::{Automerge, Value, ObjId}; use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion, Throughput}; use std::fs; fn replay_trace(commands: Vec<(usize, usize, Vec)>) -> Automerge { let mut doc = Automerge::new(); - let text = doc.set(ROOT, "text", Value::text()).unwrap().unwrap(); + let text = doc.set(ObjId::Root, "text", Value::text()).unwrap().unwrap(); for (pos, del, vals) in commands { - doc.splice(text, pos, del, vals).unwrap(); + doc.splice(&text, pos, del, vals).unwrap(); } doc.commit(None, None); doc diff --git a/edit-trace/src/main.rs b/edit-trace/src/main.rs index 7e2284af..85b437b7 100644 --- a/edit-trace/src/main.rs +++ b/edit-trace/src/main.rs @@ -19,12 +19,12 @@ fn main() -> Result<(), AutomergeError> { let mut doc = Automerge::new(); let now = Instant::now(); - let text: ObjId = doc.set(ObjId::Root, "text", Value::text()).unwrap().unwrap().into(); + let text = doc.set(ObjId::Root, "text", Value::text()).unwrap().unwrap(); for (i, (pos, del, vals)) in commands.into_iter().enumerate() { if i % 1000 == 0 { println!("Processed {} edits in {} ms", i, now.elapsed().as_millis()); } - doc.splice(text.clone(), pos, del, vals)?; + doc.splice(&text, pos, del, vals)?; } let _ = doc.save(); println!("Done in {} ms", now.elapsed().as_millis()); From 8441fccea2ffe81faf50fe0d8c8188627458ee15 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Mon, 27 Dec 2021 18:13:51 +0000 Subject: [PATCH 005/730] Add LRU cache for external object ID lookup --- automerge/Cargo.toml | 1 + automerge/src/external_types.rs | 9 +++++--- automerge/src/lib.rs | 2 +- automerge/src/op_set.rs | 39 ++++++++++++++++++++++++++++++++- 4 files changed, 46 insertions(+), 5 deletions(-) diff --git a/automerge/Cargo.toml b/automerge/Cargo.toml index 6a0f81e7..d05dfe6d 100644 --- a/automerge/Cargo.toml +++ b/automerge/Cargo.toml @@ -26,6 +26,7 @@ tinyvec = { version = "^1.5.1", features = ["alloc"] } unicode-segmentation = "1.7.1" serde = { version = "^1.0", features=["derive"] } dot = { version = "0.1.4", optional = true } +lru = "^0.7.1" [dependencies.web-sys] version = "^0.3.55" diff --git a/automerge/src/external_types.rs b/automerge/src/external_types.rs index 197a3011..edaeef7d 100644 --- a/automerge/src/external_types.rs +++ b/automerge/src/external_types.rs @@ -21,9 +21,12 @@ impl ExternalOpId { }) } - pub(crate) fn into_opid(&self, metadata: &mut OpSetMetadata) -> OpId { - let actor = metadata.actors.cache(self.actor.clone()); - OpId::new(self.counter, actor) + pub(crate) fn counter(&self) -> u64 { + self.counter + } + + pub(crate) fn actor(&self) -> &ActorId { + &self.actor } } diff --git a/automerge/src/lib.rs b/automerge/src/lib.rs index 4e07a02e..dab21f23 100644 --- a/automerge/src/lib.rs +++ b/automerge/src/lib.rs @@ -1110,7 +1110,7 @@ impl Automerge { } fn import_opid(&self, opid: &OpId) -> InternalOpId { - opid.into_opid(&mut *self.ops.m.borrow_mut()) + self.ops.m.borrow_mut().import_opid(opid) } fn export_opid(&self, opid: &InternalOpId) -> Option { diff --git a/automerge/src/op_set.rs b/automerge/src/op_set.rs index 923b8cad..fc6dd82c 100644 --- a/automerge/src/op_set.rs +++ b/automerge/src/op_set.rs @@ -1,11 +1,15 @@ use crate::op_tree::OpTreeInternal; use crate::query::TreeQuery; use crate::{ActorId, IndexedCache, Key, types::{ObjId, OpId}, Op}; +use crate::external_types::ExternalOpId; use fxhash::FxBuildHasher; use std::cmp::Ordering; use std::collections::HashMap; use std::rc::Rc; use std::cell::RefCell; +use std::fmt::Debug; + +const EXTERNAL_OP_CACHE_SIZE: usize = 100; pub(crate) type OpSet = OpSetInternal<16>; @@ -26,6 +30,7 @@ impl OpSetInternal { m: Rc::new(RefCell::new(OpSetMetadata { actors: IndexedCache::new(), props: IndexedCache::new(), + external_op_cache: lru::LruCache::with_hasher(EXTERNAL_OP_CACHE_SIZE, FxBuildHasher::default()) })), } } @@ -150,12 +155,33 @@ impl<'a, const B: usize> Iterator for Iter<'a, B> { } } -#[derive(Clone, Debug)] pub(crate) struct OpSetMetadata { pub actors: IndexedCache, pub props: IndexedCache, + external_op_cache: lru::LruCache, } +impl Debug for OpSetMetadata { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("OpSetMetadata") + .field("actors", &self.actors) + .field("props", &self.props) + .field("external_op_cache", &format_args!("LruCache with {} keys", self.external_op_cache.len())) + .finish() + } +} + +impl Clone for OpSetMetadata { + fn clone(&self) -> Self { + OpSetMetadata { + actors: self.actors.clone(), + props: self.props.clone(), + external_op_cache: lru::LruCache::with_hasher(EXTERNAL_OP_CACHE_SIZE, FxBuildHasher::default()), + } + } +} + + impl OpSetMetadata { pub fn key_cmp(&self, left: &Key, right: &Key) -> Ordering { match (left, right) { @@ -167,6 +193,17 @@ impl OpSetMetadata { pub fn lamport_cmp(&self, left: S, right: S) -> Ordering { S::cmp(self, left, right) } + + pub fn import_opid(&mut self, ext_opid: &ExternalOpId) -> OpId { + if let Some(opid) = self.external_op_cache.get(ext_opid) { + *opid + } else { + let actor = self.actors.cache(ext_opid.actor().clone()); + let opid = OpId::new(ext_opid.counter(), actor); + self.external_op_cache.put(ext_opid.clone(), opid); + opid + } + } } /// Lamport timestamps which don't contain their actor ID directly and therefore need access to From b9624f5f65cdfaf7e3ab65a35f9546904331c4ef Mon Sep 17 00:00:00 2001 From: Alex Good Date: Mon, 27 Dec 2021 18:44:30 +0000 Subject: [PATCH 006/730] Cache the last object ID used --- automerge/Cargo.toml | 1 - automerge/src/op_set.rs | 44 ++++++++++++----------------------------- 2 files changed, 13 insertions(+), 32 deletions(-) diff --git a/automerge/Cargo.toml b/automerge/Cargo.toml index d05dfe6d..6a0f81e7 100644 --- a/automerge/Cargo.toml +++ b/automerge/Cargo.toml @@ -26,7 +26,6 @@ tinyvec = { version = "^1.5.1", features = ["alloc"] } unicode-segmentation = "1.7.1" serde = { version = "^1.0", features=["derive"] } dot = { version = "0.1.4", optional = true } -lru = "^0.7.1" [dependencies.web-sys] version = "^0.3.55" diff --git a/automerge/src/op_set.rs b/automerge/src/op_set.rs index fc6dd82c..c814c069 100644 --- a/automerge/src/op_set.rs +++ b/automerge/src/op_set.rs @@ -9,8 +9,6 @@ use std::rc::Rc; use std::cell::RefCell; use std::fmt::Debug; -const EXTERNAL_OP_CACHE_SIZE: usize = 100; - pub(crate) type OpSet = OpSetInternal<16>; #[derive(Debug, Clone)] @@ -30,7 +28,7 @@ impl OpSetInternal { m: Rc::new(RefCell::new(OpSetMetadata { actors: IndexedCache::new(), props: IndexedCache::new(), - external_op_cache: lru::LruCache::with_hasher(EXTERNAL_OP_CACHE_SIZE, FxBuildHasher::default()) + last_opid: None, })), } } @@ -155,30 +153,13 @@ impl<'a, const B: usize> Iterator for Iter<'a, B> { } } +#[derive(Debug, Clone)] pub(crate) struct OpSetMetadata { pub actors: IndexedCache, pub props: IndexedCache, - external_op_cache: lru::LruCache, -} - -impl Debug for OpSetMetadata { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.debug_struct("OpSetMetadata") - .field("actors", &self.actors) - .field("props", &self.props) - .field("external_op_cache", &format_args!("LruCache with {} keys", self.external_op_cache.len())) - .finish() - } -} - -impl Clone for OpSetMetadata { - fn clone(&self) -> Self { - OpSetMetadata { - actors: self.actors.clone(), - props: self.props.clone(), - external_op_cache: lru::LruCache::with_hasher(EXTERNAL_OP_CACHE_SIZE, FxBuildHasher::default()), - } - } + // For the common case of many subsequent operations on the same object we cache the last + // object we looked up + last_opid: Option<(ExternalOpId, OpId)>, } @@ -195,14 +176,15 @@ impl OpSetMetadata { } pub fn import_opid(&mut self, ext_opid: &ExternalOpId) -> OpId { - if let Some(opid) = self.external_op_cache.get(ext_opid) { - *opid - } else { - let actor = self.actors.cache(ext_opid.actor().clone()); - let opid = OpId::new(ext_opid.counter(), actor); - self.external_op_cache.put(ext_opid.clone(), opid); - opid + if let Some((last_ext, last_int)) = &self.last_opid { + if last_ext == ext_opid { + return *last_int; + } } + let actor = self.actors.cache(ext_opid.actor().clone()); + let opid = OpId::new(ext_opid.counter(), actor); + self.last_opid = Some((ext_opid.clone(), opid)); + opid } } From 6932bdff0885dfa0c770bc6bb8531e8bc90998e7 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Wed, 29 Dec 2021 14:16:15 -0500 Subject: [PATCH 007/730] package.json can run on windows now --- automerge-wasm/package.json | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/automerge-wasm/package.json b/automerge-wasm/package.json index 152bb592..79a0781d 100644 --- a/automerge-wasm/package.json +++ b/automerge-wasm/package.json @@ -17,14 +17,15 @@ ], "main": "./dev/index.js", "scripts": { - "build": "rm -rf dev && wasm-pack build --target nodejs --dev --out-name index -d dev", - "release": "rm -rf dev && wasm-pack build --target nodejs --release --out-name index -d dev && yarn opt", - "prof": "rm -rf dev && wasm-pack build --target nodejs --profiling --out-name index -d dev", + "build": "rimraf ./dev && wasm-pack build --target nodejs --dev --out-name index -d dev", + "release": "rimraf ./dev && wasm-pack build --target nodejs --release --out-name index -d dev && yarn opt", + "prof": "rimraf ./dev && wasm-pack build --target nodejs --profiling --out-name index -d dev", "opt": "wasm-opt -Oz dev/index_bg.wasm -o tmp.wasm && mv tmp.wasm dev/index_bg.wasm", "test": "yarn build && mocha --bail --full-trace" }, "dependencies": {}, "devDependencies": { - "mocha": "^9.1.3" + "mocha": "^9.1.3", + "rimraf": "^3.0.2" } } From de5332af05c09d734eca3574d8e4845e0daea9d3 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Fri, 24 Dec 2021 23:00:59 +0000 Subject: [PATCH 008/730] Run the js_test in CI We add a script for running the js tests in `scripts/ci/js_tests`. This script can also be run locally. We move the `automerge-js` package to below the `automerge-wasm` crate as it is specifically testing the wasm interface. We also add an action to the github actions workflow for CI to run the js tests. --- .github/workflows/ci.yaml | 17 ++++++++++++++++- automerge-js/package.json | 2 +- scripts/ci/js_tests | 18 ++++++++++++++++++ scripts/ci/run | 1 + 4 files changed, 36 insertions(+), 2 deletions(-) create mode 100755 scripts/ci/js_tests diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 90d81636..b6e8dc31 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -1,5 +1,11 @@ name: ci -on: [push, pull_request] +on: + push: + branches: + - experiment + pull_request: + branches: + - experiment jobs: fmt: runs-on: ubuntu-latest @@ -53,6 +59,15 @@ jobs: with: command: check ${{ matrix.checks }} + js_tests: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Install wasm-pack + run: curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh + - name: run tests + run: ./scripts/ci/js_tests + linux: runs-on: ubuntu-latest strategy: diff --git a/automerge-js/package.json b/automerge-js/package.json index 17018429..8742d99a 100644 --- a/automerge-js/package.json +++ b/automerge-js/package.json @@ -10,7 +10,7 @@ "mocha": "^9.1.1" }, "dependencies": { - "automerge-wasm": "file:../automerge-wasm", + "automerge-wasm": "file:../automerge-wasm/dev", "fast-sha256": "^1.3.0", "pako": "^2.0.4", "uuid": "^8.3" diff --git a/scripts/ci/js_tests b/scripts/ci/js_tests new file mode 100755 index 00000000..9b1d0e77 --- /dev/null +++ b/scripts/ci/js_tests @@ -0,0 +1,18 @@ +THIS_SCRIPT=$(dirname "$0"); +WASM_PROJECT=$THIS_SCRIPT/../../automerge-wasm; +JS_PROJECT=$THIS_SCRIPT/../../automerge-js; + +yarn --cwd $WASM_PROJECT install; +# This will take care of running wasm-pack +yarn --cwd $WASM_PROJECT build; +# If the dependencies are already installed we delete automerge-wasm. This makes +# this script usable for iterative development. +if [ -d $JS_PROJECT/node_modules/automerge-wasm ]; then + rm -rf $JS_PROJECT/node_modules/automerge-wasm +fi +# --check-files forces yarn to check if the local dep has changed +yarn --cwd $JS_PROJECT install --check-files; +yarn --cwd $JS_PROJECT test; + + + diff --git a/scripts/ci/run b/scripts/ci/run index c03f2991..42367e10 100755 --- a/scripts/ci/run +++ b/scripts/ci/run @@ -6,3 +6,4 @@ set -eou pipefail ./scripts/ci/build-test ./scripts/ci/docs ./scripts/ci/advisory +./scripts/ci/js_tests From 3046cbab35faab023358d216ea4467711b30c473 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Fri, 31 Dec 2021 12:17:07 -0500 Subject: [PATCH 009/730] Replace the OpID API with an object ID Rather than returning an OpID for every mutation, we now return an `Option`. This is `Some` only when a `make*` operation was applied. This `ObjID` is an opaque type which can be used with any document. --- automerge-wasm/src/lib.rs | 45 +- automerge/Cargo.toml | 1 + automerge/src/automerge.rs | 1338 +++++++++++++++++ automerge/src/change.rs | 13 +- automerge/src/clock.rs | 2 +- automerge/src/columnar.rs | 9 +- automerge/src/error.rs | 4 +- automerge/src/exid.rs | 33 + automerge/src/indexed_cache.rs | 4 +- automerge/src/legacy/mod.rs | 2 +- .../src/legacy/serde_impls/scalar_value.rs | 2 +- automerge/src/lib.rs | 1295 +--------------- automerge/src/op_set.rs | 3 +- automerge/src/op_tree.rs | 4 +- automerge/src/query.rs | 2 +- automerge/src/query/insert.rs | 3 +- automerge/src/query/keys.rs | 2 +- automerge/src/query/keys_at.rs | 2 +- automerge/src/query/len.rs | 6 +- automerge/src/query/len_at.rs | 2 +- automerge/src/query/list_vals.rs | 2 +- automerge/src/query/list_vals_at.rs | 2 +- automerge/src/query/nth.rs | 3 +- automerge/src/query/nth_at.rs | 2 +- automerge/src/query/prop.rs | 2 +- automerge/src/query/prop_at.rs | 2 +- automerge/src/query/seek_op.rs | 2 +- automerge/src/sync.rs | 3 +- automerge/src/types.rs | 78 +- automerge/src/value.rs | 3 +- automerge/src/visualisation.rs | 10 +- automerge/tests/helpers/mod.rs | 368 ++--- automerge/tests/test.rs | 557 +++---- edit-trace/benches/main.rs | 4 +- edit-trace/src/main.rs | 4 +- 35 files changed, 1872 insertions(+), 1942 deletions(-) create mode 100644 automerge/src/automerge.rs create mode 100644 automerge/src/exid.rs diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 80a3d65f..d0c6232d 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -1,6 +1,6 @@ extern crate web_sys; use automerge as am; -use automerge::{Change, ChangeHash, Prop, Value}; +use automerge::{Change, ChangeHash, ObjId, Prop, Value}; use js_sys::{Array, Object, Reflect, Uint8Array}; use serde::de::DeserializeOwned; use serde::Serialize; @@ -151,9 +151,9 @@ impl Automerge { pub fn keys(&mut self, obj: JsValue, heads: JsValue) -> Result { let obj = self.import(obj)?; let result = if let Some(heads) = get_heads(heads) { - self.0.keys_at(obj, &heads) + self.0.keys_at(&obj, &heads) } else { - self.0.keys(obj) + self.0.keys(&obj) } .iter() .map(|s| JsValue::from_str(s)) @@ -164,9 +164,9 @@ impl Automerge { pub fn text(&mut self, obj: JsValue, heads: JsValue) -> Result { let obj = self.import(obj)?; if let Some(heads) = get_heads(heads) { - self.0.text_at(obj, &heads) + self.0.text_at(&obj, &heads) } else { - self.0.text(obj) + self.0.text(&obj) } .map_err(to_js_err) .map(|t| t.into()) @@ -185,7 +185,7 @@ impl Automerge { let mut vals = vec![]; if let Some(t) = text.as_string() { self.0 - .splice_text(obj, start, delete_count, &t) + .splice_text(&obj, start, delete_count, &t) .map_err(to_js_err)?; } else { if let Ok(array) = text.dyn_into::() { @@ -201,7 +201,7 @@ impl Automerge { } } self.0 - .splice(obj, start, delete_count, vals) + .splice(&obj, start, delete_count, vals) .map_err(to_js_err)?; } Ok(()) @@ -223,9 +223,12 @@ impl Automerge { let value = self.import_value(value, datatype)?; let opid = self .0 - .insert(obj, index as usize, value) + .insert(&obj, index as usize, value) .map_err(to_js_err)?; - Ok(self.export(opid)) + match opid { + Some(opid) => Ok(self.export(opid)), + None => Ok(JsValue::null()), + } } pub fn set( @@ -238,7 +241,7 @@ impl Automerge { let obj = self.import(obj)?; let prop = self.import_prop(prop)?; let value = self.import_value(value, datatype)?; - let opid = self.0.set(obj, prop, value).map_err(to_js_err)?; + let opid = self.0.set(&obj, prop, value).map_err(to_js_err)?; match opid { Some(opid) => Ok(self.export(opid)), None => Ok(JsValue::null()), @@ -252,7 +255,7 @@ impl Automerge { .as_f64() .ok_or("inc needs a numberic value") .map_err(to_js_err)?; - self.0.inc(obj, prop, value as i64).map_err(to_js_err)?; + self.0.inc(&obj, prop, value as i64).map_err(to_js_err)?; Ok(()) } @@ -263,9 +266,9 @@ impl Automerge { let heads = get_heads(heads); if let Ok(prop) = prop { let value = if let Some(h) = heads { - self.0.value_at(obj, prop, &h) + self.0.value_at(&obj, prop, &h) } else { - self.0.value(obj, prop) + self.0.value(&obj, prop) } .map_err(to_js_err)?; match value { @@ -289,9 +292,9 @@ impl Automerge { let prop = to_prop(arg); if let Ok(prop) = prop { let values = if let Some(heads) = get_heads(heads) { - self.0.values_at(obj, prop, &heads) + self.0.values_at(&obj, prop, &heads) } else { - self.0.values(obj, prop) + self.0.values(&obj, prop) } .map_err(to_js_err)?; for value in values { @@ -318,16 +321,16 @@ impl Automerge { pub fn length(&mut self, obj: JsValue, heads: JsValue) -> Result { let obj = self.import(obj)?; if let Some(heads) = get_heads(heads) { - Ok((self.0.length_at(obj, &heads) as f64).into()) + Ok((self.0.length_at(&obj, &heads) as f64).into()) } else { - Ok((self.0.length(obj) as f64).into()) + Ok((self.0.length(&obj) as f64).into()) } } pub fn del(&mut self, obj: JsValue, prop: JsValue) -> Result<(), JsValue> { let obj = self.import(obj)?; let prop = to_prop(prop)?; - self.0.del(obj, prop).map_err(to_js_err)?; + self.0.del(&obj, prop).map_err(to_js_err)?; Ok(()) } @@ -442,11 +445,11 @@ impl Automerge { } } - fn export(&self, val: E) -> JsValue { - self.0.export(val).into() + fn export(&self, val: ObjId) -> JsValue { + val.to_string().into() } - fn import(&self, id: JsValue) -> Result { + fn import(&self, id: JsValue) -> Result { let id_str = id .as_string() .ok_or("invalid opid/objid/elemid") diff --git a/automerge/Cargo.toml b/automerge/Cargo.toml index 6a0f81e7..2212cb02 100644 --- a/automerge/Cargo.toml +++ b/automerge/Cargo.toml @@ -36,3 +36,4 @@ pretty_assertions = "1.0.0" proptest = { version = "^1.0.0", default-features = false, features = ["std"] } serde_json = { version = "^1.0.73", features=["float_roundtrip"], default-features=true } maplit = { version = "^1.0" } +decorum = "0.3.1" diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs new file mode 100644 index 00000000..9b70ce9f --- /dev/null +++ b/automerge/src/automerge.rs @@ -0,0 +1,1338 @@ +use std::collections::{HashMap, HashSet, VecDeque}; +use unicode_segmentation::UnicodeSegmentation; + +use crate::change::{encode_document, export_change}; +use crate::exid::ExId; +use crate::op_set::OpSet; +use crate::types::{ + ActorId, ChangeHash, Clock, ElemId, Export, Exportable, Key, ObjId, Op, OpId, OpType, Patch, + ScalarValue, Value, +}; +use crate::{legacy, query, types}; +use crate::{AutomergeError, Change, Prop}; + +#[derive(Debug, Clone)] +pub struct Automerge { + queue: Vec, + history: Vec, + history_index: HashMap, + states: HashMap>, + deps: HashSet, + saved: Vec, + ops: OpSet, + actor: Option, + max_op: u64, + transaction: Option, +} + +impl Automerge { + pub fn new() -> Self { + Automerge { + queue: vec![], + history: vec![], + history_index: HashMap::new(), + states: HashMap::new(), + ops: Default::default(), + deps: Default::default(), + saved: Default::default(), + actor: None, + max_op: 0, + transaction: None, + } + } + + pub fn set_actor(&mut self, actor: ActorId) { + self.ensure_transaction_closed(); + self.actor = Some(self.ops.m.actors.cache(actor)) + } + + fn random_actor(&mut self) -> ActorId { + let actor = ActorId::from(uuid::Uuid::new_v4().as_bytes().to_vec()); + self.actor = Some(self.ops.m.actors.cache(actor.clone())); + actor + } + + pub fn get_actor(&mut self) -> ActorId { + if let Some(actor) = self.actor { + self.ops.m.actors[actor].clone() + } else { + self.random_actor() + } + } + + pub fn maybe_get_actor(&self) -> Option { + self.actor.map(|i| self.ops.m.actors[i].clone()) + } + + fn get_actor_index(&mut self) -> usize { + if let Some(actor) = self.actor { + actor + } else { + self.random_actor(); + self.actor.unwrap() // random_actor always sets actor to is_some() + } + } + + pub fn new_with_actor_id(actor: ActorId) -> Self { + let mut am = Automerge { + queue: vec![], + history: vec![], + history_index: HashMap::new(), + states: HashMap::new(), + ops: Default::default(), + deps: Default::default(), + saved: Default::default(), + actor: None, + max_op: 0, + transaction: None, + }; + am.actor = Some(am.ops.m.actors.cache(actor)); + am + } + + pub fn pending_ops(&self) -> u64 { + self.transaction + .as_ref() + .map(|t| t.operations.len() as u64) + .unwrap_or(0) + } + + fn tx(&mut self) -> &mut Transaction { + if self.transaction.is_none() { + let actor = self.get_actor_index(); + + let seq = self.states.entry(actor).or_default().len() as u64 + 1; + let mut deps = self.get_heads(); + if seq > 1 { + let last_hash = self.get_hash(actor, seq - 1).unwrap(); + if !deps.contains(&last_hash) { + deps.push(last_hash); + } + } + + self.transaction = Some(Transaction { + actor, + seq, + start_op: self.max_op + 1, + time: 0, + message: None, + extra_bytes: Default::default(), + hash: None, + operations: vec![], + deps, + }); + } + + self.transaction.as_mut().unwrap() + } + + pub fn commit(&mut self, message: Option, time: Option) -> Vec { + let tx = self.tx(); + + if message.is_some() { + tx.message = message; + } + + if let Some(t) = time { + tx.time = t; + } + + tx.operations.len(); + + self.ensure_transaction_closed(); + + self.get_heads() + } + + pub fn ensure_transaction_closed(&mut self) { + if let Some(tx) = self.transaction.take() { + self.update_history(export_change(&tx, &self.ops.m.actors, &self.ops.m.props)); + } + } + + pub fn rollback(&mut self) -> usize { + if let Some(tx) = self.transaction.take() { + let num = tx.operations.len(); + for op in &tx.operations { + for pred_id in &op.pred { + // FIXME - use query to make this fast + if let Some(p) = self.ops.iter().position(|o| o.id == *pred_id) { + self.ops + .replace(op.obj, p, |o| o.succ.retain(|i| i != pred_id)); + } + } + if let Some(pos) = self.ops.iter().position(|o| o.id == op.id) { + self.ops.remove(op.obj, pos); + } + } + num + } else { + 0 + } + } + + fn next_id(&mut self) -> OpId { + let tx = self.tx(); + OpId(tx.start_op + tx.operations.len() as u64, tx.actor) + } + + fn insert_local_op(&mut self, op: Op, pos: usize, succ_pos: &[usize]) { + for succ in succ_pos { + self.ops.replace(op.obj, *succ, |old_op| { + old_op.succ.push(op.id); + }); + } + + if !op.is_del() { + self.ops.insert(pos, op.clone()); + } + + self.tx().operations.push(op); + } + + fn insert_op(&mut self, op: Op) -> Op { + let q = self.ops.search(op.obj, query::SeekOp::new(&op)); + + for i in q.succ { + self.ops + .replace(op.obj, i, |old_op| old_op.succ.push(op.id)); + } + + if !op.is_del() { + self.ops.insert(q.pos, op.clone()); + } + op + } + + // KeysAt::() + // LenAt::() + // PropAt::() + // NthAt::() + + pub fn keys(&self, obj: &ExId) -> Vec { + if let Ok(obj) = self.exid_to_obj(obj) { + let q = self.ops.search(obj, query::Keys::new()); + q.keys.iter().map(|k| self.to_string(*k)).collect() + } else { + vec![] + } + } + + pub fn keys_at(&self, obj: &ExId, heads: &[ChangeHash]) -> Vec { + if let Ok(obj) = self.exid_to_obj(obj) { + let clock = self.clock_at(heads); + let q = self.ops.search(obj, query::KeysAt::new(clock)); + q.keys.iter().map(|k| self.to_string(*k)).collect() + } else { + vec![] + } + } + + pub fn length(&self, obj: &ExId) -> usize { + if let Ok(obj) = self.exid_to_obj(obj) { + self.ops.search(obj, query::Len::new()).len + } else { + 0 + } + } + + pub fn length_at(&self, obj: &ExId, heads: &[ChangeHash]) -> usize { + if let Ok(obj) = self.exid_to_obj(obj) { + let clock = self.clock_at(heads); + self.ops.search(obj, query::LenAt::new(clock)).len + } else { + 0 + } + } + + // set(obj, prop, value) - value can be scalar or objtype + // del(obj, prop) + // inc(obj, prop, value) + // insert(obj, index, value) + + /// Set the value of property `P` to value `V` in object `obj`. + /// + /// # Returns + /// + /// The opid of the operation which was created, or None if this operation doesn't change the + /// document + /// + /// # Errors + /// + /// This will return an error if + /// - The object does not exist + /// - The key is the wrong type for the object + /// - The key does not exist in the object + pub fn set, V: Into>( + &mut self, + obj: &ExId, + prop: P, + value: V, + ) -> Result, AutomergeError> { + let obj = self.exid_to_obj(obj)?; + let value = value.into(); + if let Some(id) = self.local_op(obj, prop.into(), value.into())? { + Ok(Some(self.id_to_exid(id))) + } else { + Ok(None) + } + } + + fn exid_to_obj(&self, id: &ExId) -> Result { + match id { + ExId::Root => Ok(ObjId::root()), + ExId::Id(ctr, actor, idx) => { + // do a direct get here b/c this could be foriegn and not be within the array + // bounds + if self.ops.m.actors.cache.get(*idx) == Some(actor) { + Ok(ObjId(OpId(*ctr, *idx))) + } else { + // FIXME - make a real error + let idx = self + .ops + .m + .actors + .lookup(actor) + .ok_or(AutomergeError::Fail)?; + Ok(ObjId(OpId(*ctr, idx))) + } + } + } + } + + fn id_to_exid(&self, id: OpId) -> ExId { + ExId::Id(id.0, self.ops.m.actors.cache[id.1].clone(), id.1) + } + + pub fn insert>( + &mut self, + obj: &ExId, + index: usize, + value: V, + ) -> Result, AutomergeError> { + let obj = self.exid_to_obj(obj)?; + if let Some(id) = self.do_insert(obj, index, value)? { + Ok(Some(self.id_to_exid(id))) + } else { + Ok(None) + } + } + + fn do_insert>( + &mut self, + obj: ObjId, + index: usize, + value: V, + ) -> Result, AutomergeError> { + let id = self.next_id(); + + let query = self.ops.search(obj, query::InsertNth::new(index)); + + let key = query.key()?; + let value = value.into(); + let action = value.into(); + let is_make = matches!(&action, OpType::Make(_)); + + let op = Op { + change: self.history.len(), + id, + action, + obj, + key, + succ: Default::default(), + pred: Default::default(), + insert: true, + }; + + self.ops.insert(query.pos, op.clone()); + self.tx().operations.push(op); + + if is_make { + Ok(Some(id)) + } else { + Ok(None) + } + } + + pub fn inc>( + &mut self, + obj: &ExId, + prop: P, + value: i64, + ) -> Result<(), AutomergeError> { + let obj = self.exid_to_obj(obj)?; + self.local_op(obj, prop.into(), OpType::Inc(value))?; + Ok(()) + } + + pub fn del>(&mut self, obj: &ExId, prop: P) -> Result<(), AutomergeError> { + let obj = self.exid_to_obj(obj)?; + self.local_op(obj, prop.into(), OpType::Del)?; + Ok(()) + } + + /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert + /// the new elements + pub fn splice( + &mut self, + obj: &ExId, + mut pos: usize, + del: usize, + vals: Vec, + ) -> Result, AutomergeError> { + let obj = self.exid_to_obj(obj)?; + for _ in 0..del { + // del() + self.local_op(obj, pos.into(), OpType::Del)?; + } + let mut results = Vec::new(); + for v in vals { + // insert() + let id = self.do_insert(obj, pos, v)?; + if let Some(id) = id { + results.push(self.id_to_exid(id)); + } + pos += 1; + } + Ok(results) + } + + pub fn splice_text( + &mut self, + obj: &ExId, + pos: usize, + del: usize, + text: &str, + ) -> Result, AutomergeError> { + let mut vals = vec![]; + for c in text.to_owned().graphemes(true) { + vals.push(c.into()); + } + self.splice(obj, pos, del, vals) + } + + pub fn text(&self, obj: &ExId) -> Result { + let obj = self.exid_to_obj(obj)?; + let query = self.ops.search(obj, query::ListVals::new(obj)); + let mut buffer = String::new(); + for q in &query.ops { + if let OpType::Set(ScalarValue::Str(s)) = &q.action { + buffer.push_str(s); + } + } + Ok(buffer) + } + + pub fn text_at(&self, obj: &ExId, heads: &[ChangeHash]) -> Result { + let obj = self.exid_to_obj(obj)?; + let clock = self.clock_at(heads); + let query = self.ops.search(obj, query::ListValsAt::new(clock)); + let mut buffer = String::new(); + for q in &query.ops { + if let OpType::Set(ScalarValue::Str(s)) = &q.action { + buffer.push_str(s); + } + } + Ok(buffer) + } + + // TODO - I need to return these OpId's here **only** to get + // the legacy conflicts format of { [opid]: value } + // Something better? + pub fn value>( + &self, + obj: &ExId, + prop: P, + ) -> Result, AutomergeError> { + Ok(self.values(obj, prop.into())?.first().cloned()) + } + + pub fn value_at>( + &self, + obj: &ExId, + prop: P, + heads: &[ChangeHash], + ) -> Result, AutomergeError> { + Ok(self.values_at(obj, prop, heads)?.first().cloned()) + } + + pub fn values>( + &self, + obj: &ExId, + prop: P, + ) -> Result, AutomergeError> { + let obj = self.exid_to_obj(obj)?; + let result = match prop.into() { + Prop::Map(p) => { + let prop = self.ops.m.props.lookup(&p); + if let Some(p) = prop { + self.ops + .search(obj, query::Prop::new(obj, p)) + .ops + .into_iter() + .map(|o| (o.value(), self.id_to_exid(o.id))) + .collect() + } else { + vec![] + } + } + Prop::Seq(n) => self + .ops + .search(obj, query::Nth::new(n)) + .ops + .into_iter() + .map(|o| (o.value(), self.id_to_exid(o.id))) + .collect(), + }; + Ok(result) + } + + pub fn values_at>( + &self, + obj: &ExId, + prop: P, + heads: &[ChangeHash], + ) -> Result, AutomergeError> { + let prop = prop.into(); + let obj = self.exid_to_obj(obj)?; + let clock = self.clock_at(heads); + let result = match prop { + Prop::Map(p) => { + let prop = self.ops.m.props.lookup(&p); + if let Some(p) = prop { + self.ops + .search(obj, query::PropAt::new(p, clock)) + .ops + .into_iter() + .map(|o| (o.value(), self.id_to_exid(o.id))) + .collect() + } else { + vec![] + } + } + Prop::Seq(n) => self + .ops + .search(obj, query::NthAt::new(n, clock)) + .ops + .into_iter() + .map(|o| (o.value(), self.id_to_exid(o.id))) + .collect(), + }; + Ok(result) + } + + pub fn load(data: &[u8]) -> Result { + let changes = Change::load_document(data)?; + let mut doc = Self::new(); + doc.apply_changes(&changes)?; + Ok(doc) + } + + pub fn load_incremental(&mut self, data: &[u8]) -> Result { + let changes = Change::load_document(data)?; + let start = self.ops.len(); + self.apply_changes(&changes)?; + let delta = self.ops.len() - start; + Ok(delta) + } + + pub fn apply_changes(&mut self, changes: &[Change]) -> Result { + self.ensure_transaction_closed(); + for c in changes { + if !self.history_index.contains_key(&c.hash) { + if self.is_causally_ready(c) { + self.apply_change(c.clone()); + } else { + self.queue.push(c.clone()); + while let Some(c) = self.pop_next_causally_ready_change() { + self.apply_change(c); + } + } + } + } + Ok(Patch {}) + } + + pub fn apply_change(&mut self, change: Change) { + self.ensure_transaction_closed(); + let ops = self.import_ops(&change, self.history.len()); + self.update_history(change); + for op in ops { + self.insert_op(op); + } + } + + fn local_op( + &mut self, + obj: ObjId, + prop: Prop, + action: OpType, + ) -> Result, AutomergeError> { + match prop { + Prop::Map(s) => self.local_map_op(obj, s, action), + Prop::Seq(n) => self.local_list_op(obj, n, action), + } + } + + fn local_map_op( + &mut self, + obj: ObjId, + prop: String, + action: OpType, + ) -> Result, AutomergeError> { + if prop.is_empty() { + return Err(AutomergeError::EmptyStringKey); + } + + let id = self.next_id(); + let prop = self.ops.m.props.cache(prop); + let query = self.ops.search(obj, query::Prop::new(obj, prop)); + + if query.ops.len() == 1 && query.ops[0].is_noop(&action) { + return Ok(None); + } + + let is_make = matches!(&action, OpType::Make(_)); + + let pred = query.ops.iter().map(|op| op.id).collect(); + + let op = Op { + change: self.history.len(), + id, + action, + obj, + key: Key::Map(prop), + succ: Default::default(), + pred, + insert: false, + }; + + self.insert_local_op(op, query.pos, &query.ops_pos); + + if is_make { + Ok(Some(id)) + } else { + Ok(None) + } + } + + fn local_list_op( + &mut self, + obj: ObjId, + index: usize, + action: OpType, + ) -> Result, AutomergeError> { + let query = self.ops.search(obj, query::Nth::new(index)); + + let id = self.next_id(); + let pred = query.ops.iter().map(|op| op.id).collect(); + let key = query.key()?; + + if query.ops.len() == 1 && query.ops[0].is_noop(&action) { + return Ok(None); + } + + let is_make = matches!(&action, OpType::Make(_)); + + let op = Op { + change: self.history.len(), + id, + action, + obj, + key, + succ: Default::default(), + pred, + insert: false, + }; + + self.insert_local_op(op, query.pos, &query.ops_pos); + + if is_make { + Ok(Some(id)) + } else { + Ok(None) + } + } + + fn is_causally_ready(&self, change: &Change) -> bool { + change + .deps + .iter() + .all(|d| self.history_index.contains_key(d)) + } + + fn pop_next_causally_ready_change(&mut self) -> Option { + let mut index = 0; + while index < self.queue.len() { + if self.is_causally_ready(&self.queue[index]) { + return Some(self.queue.swap_remove(index)); + } + index += 1; + } + None + } + + fn import_ops(&mut self, change: &Change, change_id: usize) -> Vec { + change + .iter_ops() + .enumerate() + .map(|(i, c)| { + let actor = self.ops.m.actors.cache(change.actor_id().clone()); + let id = OpId(change.start_op + i as u64, actor); + let obj = match c.obj { + legacy::ObjectId::Root => ObjId::root(), + legacy::ObjectId::Id(id) => ObjId(OpId(id.0, self.ops.m.actors.cache(id.1))), + }; + let pred = c + .pred + .iter() + .map(|i| OpId(i.0, self.ops.m.actors.cache(i.1.clone()))) + .collect(); + let key = match &c.key { + legacy::Key::Map(n) => Key::Map(self.ops.m.props.cache(n.to_string())), + legacy::Key::Seq(legacy::ElementId::Head) => Key::Seq(types::HEAD), + legacy::Key::Seq(legacy::ElementId::Id(i)) => { + Key::Seq(ElemId(OpId(i.0, self.ops.m.actors.cache(i.1.clone())))) + } + }; + Op { + change: change_id, + id, + action: c.action, + obj, + key, + succ: Default::default(), + pred, + insert: c.insert, + } + }) + .collect() + } + + /// Takes all the changes in `other` which are not in `self` and applies them + pub fn merge(&mut self, other: &mut Self) { + // TODO: Make this fallible and figure out how to do this transactionally + other.ensure_transaction_closed(); + let changes = self + .get_changes_added(other) + .into_iter() + .cloned() + .collect::>(); + self.apply_changes(&changes).unwrap(); + } + + pub fn save(&mut self) -> Result, AutomergeError> { + self.ensure_transaction_closed(); + // TODO - would be nice if I could pass an iterator instead of a collection here + let c: Vec<_> = self.history.iter().map(|c| c.decode()).collect(); + let ops: Vec<_> = self.ops.iter().cloned().collect(); + // TODO - can we make encode_document error free + let bytes = encode_document( + &c, + ops.as_slice(), + &self.ops.m.actors, + &self.ops.m.props.cache, + ); + if bytes.is_ok() { + self.saved = self.get_heads().iter().copied().collect(); + } + bytes + } + + // should this return an empty vec instead of None? + pub fn save_incremental(&mut self) -> Vec { + self.ensure_transaction_closed(); + let changes = self._get_changes(self.saved.as_slice()); + let mut bytes = vec![]; + for c in changes { + bytes.extend(c.raw_bytes()); + } + if !bytes.is_empty() { + self.saved = self._get_heads().iter().copied().collect(); + } + bytes + } + + /// Filter the changes down to those that are not transitive dependencies of the heads. + /// + /// Thus a graph with these heads has not seen the remaining changes. + pub(crate) fn filter_changes(&self, heads: &[ChangeHash], changes: &mut HashSet) { + // Reduce the working set to find to those which we may be able to find. + // This filters out those hashes that are successors of or concurrent with all of the + // heads. + // This can help in avoiding traversing the entire graph back to the roots when we try to + // search for a hash we can know won't be found there. + let max_head_index = heads + .iter() + .map(|h| self.history_index.get(h).unwrap_or(&0)) + .max() + .unwrap_or(&0); + let mut may_find: HashSet = changes + .iter() + .filter(|hash| { + let change_index = self.history_index.get(hash).unwrap_or(&0); + change_index <= max_head_index + }) + .copied() + .collect(); + + if may_find.is_empty() { + return; + } + + let mut queue: VecDeque<_> = heads.iter().collect(); + let mut seen = HashSet::new(); + while let Some(hash) = queue.pop_front() { + if seen.contains(hash) { + continue; + } + seen.insert(hash); + + let removed = may_find.remove(hash); + changes.remove(hash); + if may_find.is_empty() { + break; + } + + for dep in self + .history_index + .get(hash) + .and_then(|i| self.history.get(*i)) + .map(|c| c.deps.as_slice()) + .unwrap_or_default() + { + // if we just removed something from our hashes then it is likely there is more + // down here so do a quick inspection on the children. + // When we don't remove anything it is less likely that there is something down + // that chain so delay it. + if removed { + queue.push_front(dep); + } else { + queue.push_back(dep); + } + } + } + } + + pub fn get_missing_deps(&mut self, heads: &[ChangeHash]) -> Vec { + self.ensure_transaction_closed(); + self._get_missing_deps(heads) + } + + pub(crate) fn _get_missing_deps(&self, heads: &[ChangeHash]) -> Vec { + let in_queue: HashSet<_> = self.queue.iter().map(|change| change.hash).collect(); + let mut missing = HashSet::new(); + + for head in self.queue.iter().flat_map(|change| &change.deps) { + if !self.history_index.contains_key(head) { + missing.insert(head); + } + } + + for head in heads { + if !self.history_index.contains_key(head) { + missing.insert(head); + } + } + + let mut missing = missing + .into_iter() + .filter(|hash| !in_queue.contains(hash)) + .copied() + .collect::>(); + missing.sort(); + missing + } + + fn get_changes_fast(&self, have_deps: &[ChangeHash]) -> Option> { + if have_deps.is_empty() { + return Some(self.history.iter().collect()); + } + + let lowest_idx = have_deps + .iter() + .filter_map(|h| self.history_index.get(h)) + .min()? + + 1; + + let mut missing_changes = vec![]; + let mut has_seen: HashSet<_> = have_deps.iter().collect(); + for change in &self.history[lowest_idx..] { + let deps_seen = change.deps.iter().filter(|h| has_seen.contains(h)).count(); + if deps_seen > 0 { + if deps_seen != change.deps.len() { + // future change depends on something we haven't seen - fast path cant work + return None; + } + missing_changes.push(change); + has_seen.insert(&change.hash); + } + } + + // if we get to the end and there is a head we haven't seen then fast path cant work + if self._get_heads().iter().all(|h| has_seen.contains(h)) { + Some(missing_changes) + } else { + None + } + } + + fn get_changes_slow(&self, have_deps: &[ChangeHash]) -> Vec<&Change> { + let mut stack: Vec<_> = have_deps.iter().collect(); + let mut has_seen = HashSet::new(); + while let Some(hash) = stack.pop() { + if has_seen.contains(&hash) { + continue; + } + if let Some(change) = self + .history_index + .get(hash) + .and_then(|i| self.history.get(*i)) + { + stack.extend(change.deps.iter()); + } + has_seen.insert(hash); + } + self.history + .iter() + .filter(|change| !has_seen.contains(&change.hash)) + .collect() + } + + pub fn get_last_local_change(&mut self) -> Option<&Change> { + self.ensure_transaction_closed(); + if let Some(actor) = &self.actor { + let actor = &self.ops.m.actors[*actor]; + return self.history.iter().rev().find(|c| c.actor_id() == actor); + } + None + } + + pub fn get_changes(&mut self, have_deps: &[ChangeHash]) -> Vec<&Change> { + self.ensure_transaction_closed(); + self._get_changes(have_deps) + } + + pub(crate) fn _get_changes(&self, have_deps: &[ChangeHash]) -> Vec<&Change> { + if let Some(changes) = self.get_changes_fast(have_deps) { + changes + } else { + self.get_changes_slow(have_deps) + } + } + + fn clock_at(&self, heads: &[ChangeHash]) -> Clock { + let mut clock = Clock::new(); + let mut seen = HashSet::new(); + let mut to_see = heads.to_vec(); + // FIXME - faster + while let Some(hash) = to_see.pop() { + if let Some(c) = self._get_change_by_hash(&hash) { + for h in &c.deps { + if !seen.contains(h) { + to_see.push(*h); + } + } + let actor = self.ops.m.actors.lookup(c.actor_id()).unwrap(); + clock.include(actor, c.max_op()); + seen.insert(hash); + } + } + clock + } + + pub fn get_change_by_hash(&mut self, hash: &ChangeHash) -> Option<&Change> { + self.ensure_transaction_closed(); + self._get_change_by_hash(hash) + } + + pub(crate) fn _get_change_by_hash(&self, hash: &ChangeHash) -> Option<&Change> { + self.history_index + .get(hash) + .and_then(|index| self.history.get(*index)) + } + + pub fn get_changes_added<'a>(&mut self, other: &'a Self) -> Vec<&'a Change> { + self.ensure_transaction_closed(); + self._get_changes_added(other) + } + + pub(crate) fn _get_changes_added<'a>(&self, other: &'a Self) -> Vec<&'a Change> { + // Depth-first traversal from the heads through the dependency graph, + // until we reach a change that is already present in other + let mut stack: Vec<_> = other._get_heads(); + let mut seen_hashes = HashSet::new(); + let mut added_change_hashes = Vec::new(); + while let Some(hash) = stack.pop() { + if !seen_hashes.contains(&hash) && self._get_change_by_hash(&hash).is_none() { + seen_hashes.insert(hash); + added_change_hashes.push(hash); + if let Some(change) = other._get_change_by_hash(&hash) { + stack.extend(&change.deps); + } + } + } + // Return those changes in the reverse of the order in which the depth-first search + // found them. This is not necessarily a topological sort, but should usually be close. + added_change_hashes.reverse(); + added_change_hashes + .into_iter() + .filter_map(|h| other._get_change_by_hash(&h)) + .collect() + } + + pub fn get_heads(&mut self) -> Vec { + self.ensure_transaction_closed(); + self._get_heads() + } + + pub(crate) fn _get_heads(&self) -> Vec { + let mut deps: Vec<_> = self.deps.iter().copied().collect(); + deps.sort_unstable(); + deps + } + + fn get_hash(&mut self, actor: usize, seq: u64) -> Result { + self.states + .get(&actor) + .and_then(|v| v.get(seq as usize - 1)) + .and_then(|&i| self.history.get(i)) + .map(|c| c.hash) + .ok_or(AutomergeError::InvalidSeq(seq)) + } + + fn update_history(&mut self, change: Change) -> usize { + self.max_op = std::cmp::max(self.max_op, change.start_op + change.len() as u64 - 1); + + self.update_deps(&change); + + let history_index = self.history.len(); + + self.states + .entry(self.ops.m.actors.cache(change.actor_id().clone())) + .or_default() + .push(history_index); + + self.history_index.insert(change.hash, history_index); + self.history.push(change); + + history_index + } + + fn update_deps(&mut self, change: &Change) { + for d in &change.deps { + self.deps.remove(d); + } + self.deps.insert(change.hash); + } + + pub fn import(&self, s: &str) -> Result { + if s == "_root" { + Ok(ExId::Root) + } else { + let n = s + .find('@') + .ok_or_else(|| AutomergeError::InvalidOpId(s.to_owned()))?; + let counter = s[0..n] + .parse() + .map_err(|_| AutomergeError::InvalidOpId(s.to_owned()))?; + let actor = ActorId::from(hex::decode(&s[(n + 1)..]).unwrap()); + let actor = self + .ops + .m + .actors + .lookup(&actor) + .ok_or_else(|| AutomergeError::InvalidOpId(s.to_owned()))?; + Ok(ExId::Id( + counter, + self.ops.m.actors.cache[actor].clone(), + actor, + )) + } + } + + fn to_string(&self, id: E) -> String { + match id.export() { + Export::Id(id) => format!("{}@{}", id.counter(), self.ops.m.actors[id.actor()]), + Export::Prop(index) => self.ops.m.props[index].clone(), + Export::Special(s) => s, + } + } + + pub fn dump(&self) { + log!( + " {:12} {:12} {:12} {} {} {}", + "id", + "obj", + "key", + "value", + "pred", + "succ" + ); + for i in self.ops.iter() { + let id = self.to_string(i.id); + let obj = self.to_string(i.obj); + let key = match i.key { + Key::Map(n) => self.ops.m.props[n].clone(), + Key::Seq(n) => self.to_string(n), + }; + let value: String = match &i.action { + OpType::Set(value) => format!("{}", value), + OpType::Make(obj) => format!("make{}", obj), + OpType::Inc(obj) => format!("inc{}", obj), + OpType::Del => format!("del{}", 0), + }; + let pred: Vec<_> = i.pred.iter().map(|id| self.to_string(*id)).collect(); + let succ: Vec<_> = i.succ.iter().map(|id| self.to_string(*id)).collect(); + log!( + " {:12} {:12} {:12} {} {:?} {:?}", + id, + obj, + key, + value, + pred, + succ + ); + } + } + + #[cfg(feature = "optree-visualisation")] + pub fn visualise_optree(&self) -> String { + self.ops.visualise() + } +} + +#[derive(Debug, Clone)] +pub(crate) struct Transaction { + pub actor: usize, + pub seq: u64, + pub start_op: u64, + pub time: i64, + pub message: Option, + pub extra_bytes: Vec, + pub hash: Option, + pub deps: Vec, + pub operations: Vec, +} + +impl Default for Automerge { + fn default() -> Self { + Self::new() + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::*; + use std::convert::TryInto; + + #[test] + fn insert_op() -> Result<(), AutomergeError> { + let mut doc = Automerge::new(); + doc.set_actor(ActorId::random()); + doc.set(&ROOT, "hello", "world")?; + assert!(doc.pending_ops() == 1); + doc.value(&ROOT, "hello")?; + Ok(()) + } + + #[test] + fn test_list() -> Result<(), AutomergeError> { + let mut doc = Automerge::new(); + doc.set_actor(ActorId::random()); + let list_id = doc.set(&ROOT, "items", Value::list())?.unwrap(); + doc.set(&ROOT, "zzz", "zzzval")?; + assert!(doc.value(&ROOT, "items")?.unwrap().1 == list_id); + doc.insert(&list_id, 0, "a")?; + doc.insert(&list_id, 0, "b")?; + doc.insert(&list_id, 2, "c")?; + doc.insert(&list_id, 1, "d")?; + assert!(doc.value(&list_id, 0)?.unwrap().0 == "b".into()); + assert!(doc.value(&list_id, 1)?.unwrap().0 == "d".into()); + assert!(doc.value(&list_id, 2)?.unwrap().0 == "a".into()); + assert!(doc.value(&list_id, 3)?.unwrap().0 == "c".into()); + assert!(doc.length(&list_id) == 4); + doc.save()?; + Ok(()) + } + + #[test] + fn test_del() -> Result<(), AutomergeError> { + let mut doc = Automerge::new(); + doc.set_actor(ActorId::random()); + doc.set(&ROOT, "xxx", "xxx")?; + assert!(!doc.values(&ROOT, "xxx")?.is_empty()); + doc.del(&ROOT, "xxx")?; + assert!(doc.values(&ROOT, "xxx")?.is_empty()); + Ok(()) + } + + #[test] + fn test_inc() -> Result<(), AutomergeError> { + let mut doc = Automerge::new(); + doc.set(&ROOT, "counter", Value::counter(10))?; + assert!(doc.value(&ROOT, "counter")?.unwrap().0 == Value::counter(10)); + doc.inc(&ROOT, "counter", 10)?; + assert!(doc.value(&ROOT, "counter")?.unwrap().0 == Value::counter(20)); + doc.inc(&ROOT, "counter", -5)?; + assert!(doc.value(&ROOT, "counter")?.unwrap().0 == Value::counter(15)); + Ok(()) + } + + #[test] + fn test_save_incremental() -> Result<(), AutomergeError> { + let mut doc = Automerge::new(); + + doc.set(&ROOT, "foo", 1)?; + + let save1 = doc.save().unwrap(); + + doc.set(&ROOT, "bar", 2)?; + + let save2 = doc.save_incremental(); + + doc.set(&ROOT, "baz", 3)?; + + let save3 = doc.save_incremental(); + + let mut save_a: Vec = vec![]; + save_a.extend(&save1); + save_a.extend(&save2); + save_a.extend(&save3); + + assert!(doc.save_incremental().is_empty()); + + let save_b = doc.save().unwrap(); + + assert!(save_b.len() < save_a.len()); + + let mut doc_a = Automerge::load(&save_a)?; + let mut doc_b = Automerge::load(&save_b)?; + + assert!(doc_a.values(&ROOT, "baz")? == doc_b.values(&ROOT, "baz")?); + + assert!(doc_a.save().unwrap() == doc_b.save().unwrap()); + + Ok(()) + } + + #[test] + fn test_save_text() -> Result<(), AutomergeError> { + let mut doc = Automerge::new(); + let text = doc.set(&ROOT, "text", Value::text())?.unwrap(); + let heads1 = doc.commit(None, None); + doc.splice_text(&text, 0, 0, "hello world")?; + let heads2 = doc.commit(None, None); + doc.splice_text(&text, 6, 0, "big bad ")?; + let heads3 = doc.commit(None, None); + + assert!(&doc.text(&text)? == "hello big bad world"); + assert!(&doc.text_at(&text, &heads1)?.is_empty()); + assert!(&doc.text_at(&text, &heads2)? == "hello world"); + assert!(&doc.text_at(&text, &heads3)? == "hello big bad world"); + + Ok(()) + } + + #[test] + fn test_props_vals_at() -> Result<(), AutomergeError> { + let mut doc = Automerge::new(); + doc.set_actor("aaaa".try_into().unwrap()); + doc.set(&ROOT, "prop1", "val1")?; + doc.commit(None, None); + let heads1 = doc.get_heads(); + doc.set(&ROOT, "prop1", "val2")?; + doc.commit(None, None); + let heads2 = doc.get_heads(); + doc.set(&ROOT, "prop2", "val3")?; + doc.commit(None, None); + let heads3 = doc.get_heads(); + doc.del(&ROOT, "prop1")?; + doc.commit(None, None); + let heads4 = doc.get_heads(); + doc.set(&ROOT, "prop3", "val4")?; + doc.commit(None, None); + let heads5 = doc.get_heads(); + assert!(doc.keys_at(&ROOT, &heads1) == vec!["prop1".to_owned()]); + assert!(doc.value_at(&ROOT, "prop1", &heads1)?.unwrap().0 == Value::str("val1")); + assert!(doc.value_at(&ROOT, "prop2", &heads1)? == None); + assert!(doc.value_at(&ROOT, "prop3", &heads1)? == None); + + assert!(doc.keys_at(&ROOT, &heads2) == vec!["prop1".to_owned()]); + assert!(doc.value_at(&ROOT, "prop1", &heads2)?.unwrap().0 == Value::str("val2")); + assert!(doc.value_at(&ROOT, "prop2", &heads2)? == None); + assert!(doc.value_at(&ROOT, "prop3", &heads2)? == None); + + assert!(doc.keys_at(&ROOT, &heads3) == vec!["prop1".to_owned(), "prop2".to_owned()]); + assert!(doc.value_at(&ROOT, "prop1", &heads3)?.unwrap().0 == Value::str("val2")); + assert!(doc.value_at(&ROOT, "prop2", &heads3)?.unwrap().0 == Value::str("val3")); + assert!(doc.value_at(&ROOT, "prop3", &heads3)? == None); + + assert!(doc.keys_at(&ROOT, &heads4) == vec!["prop2".to_owned()]); + assert!(doc.value_at(&ROOT, "prop1", &heads4)? == None); + assert!(doc.value_at(&ROOT, "prop2", &heads4)?.unwrap().0 == Value::str("val3")); + assert!(doc.value_at(&ROOT, "prop3", &heads4)? == None); + + assert!(doc.keys_at(&ROOT, &heads5) == vec!["prop2".to_owned(), "prop3".to_owned()]); + assert!(doc.value_at(&ROOT, "prop1", &heads5)? == None); + assert!(doc.value_at(&ROOT, "prop2", &heads5)?.unwrap().0 == Value::str("val3")); + assert!(doc.value_at(&ROOT, "prop3", &heads5)?.unwrap().0 == Value::str("val4")); + + assert!(doc.keys_at(&ROOT, &[]).is_empty()); + assert!(doc.value_at(&ROOT, "prop1", &[])? == None); + assert!(doc.value_at(&ROOT, "prop2", &[])? == None); + assert!(doc.value_at(&ROOT, "prop3", &[])? == None); + Ok(()) + } + + #[test] + fn test_len_at() -> Result<(), AutomergeError> { + let mut doc = Automerge::new(); + doc.set_actor("aaaa".try_into().unwrap()); + + let list = doc.set(&ROOT, "list", Value::list())?.unwrap(); + let heads1 = doc.commit(None, None); + + doc.insert(&list, 0, Value::int(10))?; + let heads2 = doc.commit(None, None); + + doc.set(&list, 0, Value::int(20))?; + doc.insert(&list, 0, Value::int(30))?; + let heads3 = doc.commit(None, None); + + doc.set(&list, 1, Value::int(40))?; + doc.insert(&list, 1, Value::int(50))?; + let heads4 = doc.commit(None, None); + + doc.del(&list, 2)?; + let heads5 = doc.commit(None, None); + + doc.del(&list, 0)?; + let heads6 = doc.commit(None, None); + + assert!(doc.length_at(&list, &heads1) == 0); + assert!(doc.value_at(&list, 0, &heads1)?.is_none()); + + assert!(doc.length_at(&list, &heads2) == 1); + assert!(doc.value_at(&list, 0, &heads2)?.unwrap().0 == Value::int(10)); + + assert!(doc.length_at(&list, &heads3) == 2); + assert!(doc.value_at(&list, 0, &heads3)?.unwrap().0 == Value::int(30)); + assert!(doc.value_at(&list, 1, &heads3)?.unwrap().0 == Value::int(20)); + + assert!(doc.length_at(&list, &heads4) == 3); + assert!(doc.value_at(&list, 0, &heads4)?.unwrap().0 == Value::int(30)); + assert!(doc.value_at(&list, 1, &heads4)?.unwrap().0 == Value::int(50)); + assert!(doc.value_at(&list, 2, &heads4)?.unwrap().0 == Value::int(40)); + + assert!(doc.length_at(&list, &heads5) == 2); + assert!(doc.value_at(&list, 0, &heads5)?.unwrap().0 == Value::int(30)); + assert!(doc.value_at(&list, 1, &heads5)?.unwrap().0 == Value::int(50)); + + assert!(doc.length_at(&list, &heads6) == 1); + assert!(doc.value_at(&list, 0, &heads6)?.unwrap().0 == Value::int(50)); + + Ok(()) + } +} diff --git a/automerge/src/change.rs b/automerge/src/change.rs index 4d3984e5..846cc71d 100644 --- a/automerge/src/change.rs +++ b/automerge/src/change.rs @@ -1,3 +1,4 @@ +use crate::automerge::Transaction; use crate::columnar::{ ChangeEncoder, ChangeIterator, ColumnEncoder, DepsIterator, DocChange, DocOp, DocOpEncoder, DocOpIterator, OperationIterator, COLUMN_TYPE_DEFLATE, @@ -5,11 +6,11 @@ use crate::columnar::{ use crate::decoding; use crate::decoding::{Decodable, InvalidChangeError}; use crate::encoding::{Encodable, DEFLATE_MIN_SIZE}; +use crate::error::AutomergeError; +use crate::indexed_cache::IndexedCache; use crate::legacy as amp; -use crate::{ - ActorId, AutomergeError, ElemId, IndexedCache, Key, ObjId, Op, OpId, OpType, Transaction, HEAD, - ROOT, -}; +use crate::types; +use crate::types::{ActorId, ElemId, Key, ObjId, Op, OpId, OpType}; use core::ops::Range; use flate2::{ bufread::{DeflateDecoder, DeflateEncoder}, @@ -417,7 +418,7 @@ fn increment_range_map(ranges: &mut HashMap>, len: usize) { } fn export_objid(id: &ObjId, actors: &IndexedCache) -> amp::ObjectId { - if id.0 == ROOT { + if id == &ObjId::root() { amp::ObjectId::Root } else { export_opid(&id.0, actors).into() @@ -425,7 +426,7 @@ fn export_objid(id: &ObjId, actors: &IndexedCache) -> amp::ObjectId { } fn export_elemid(id: &ElemId, actors: &IndexedCache) -> amp::ElementId { - if id == &HEAD { + if id == &types::HEAD { amp::ElementId::Head } else { export_opid(&id.0, actors).into() diff --git a/automerge/src/clock.rs b/automerge/src/clock.rs index 979885b3..d01c7748 100644 --- a/automerge/src/clock.rs +++ b/automerge/src/clock.rs @@ -1,4 +1,4 @@ -use crate::OpId; +use crate::types::OpId; use fxhash::FxBuildHasher; use std::cmp; use std::collections::HashMap; diff --git a/automerge/src/columnar.rs b/automerge/src/columnar.rs index 3a1df3cb..c821b9bb 100644 --- a/automerge/src/columnar.rs +++ b/automerge/src/columnar.rs @@ -11,8 +11,7 @@ use std::{ str, }; -use crate::ROOT; -use crate::{ActorId, ElemId, Key, ObjId, ObjType, OpId, OpType, ScalarValue}; +use crate::types::{ActorId, ElemId, Key, ObjId, ObjType, Op, OpId, OpType, ScalarValue}; use crate::legacy as amp; use amp::SortedVec; @@ -20,10 +19,10 @@ use flate2::bufread::DeflateDecoder; use smol_str::SmolStr; use tracing::instrument; +use crate::indexed_cache::IndexedCache; use crate::{ decoding::{BooleanDecoder, Decodable, Decoder, DeltaDecoder, RleDecoder}, encoding::{BooleanEncoder, ColData, DeltaEncoder, Encodable, RleEncoder}, - IndexedCache, Op, }; impl Encodable for Action { @@ -846,7 +845,7 @@ impl ObjEncoder { fn append(&mut self, obj: &ObjId, actors: &[usize]) { match obj.0 { - ROOT => { + OpId(ctr, _) if ctr == 0 => { self.actor.append_null(); self.ctr.append_null(); } @@ -951,7 +950,7 @@ impl ChangeEncoder { index_by_hash.insert(hash, index); } self.actor - .append_value(actors.lookup(change.actor_id.clone()).unwrap()); //actors.iter().position(|a| a == &change.actor_id).unwrap()); + .append_value(actors.lookup(&change.actor_id).unwrap()); //actors.iter().position(|a| a == &change.actor_id).unwrap()); self.seq.append_value(change.seq); // FIXME iterops.count is crazy slow self.max_op diff --git a/automerge/src/error.rs b/automerge/src/error.rs index ddb7092b..32eb9d1d 100644 --- a/automerge/src/error.rs +++ b/automerge/src/error.rs @@ -1,6 +1,6 @@ use crate::decoding; +use crate::types::ScalarValue; use crate::value::DataType; -use crate::ScalarValue; use thiserror::Error; #[derive(Error, Debug)] @@ -17,6 +17,8 @@ pub enum AutomergeError { InvalidSeq(u64), #[error("index {0} is out of bounds")] InvalidIndex(usize), + #[error("generic automerge error")] + Fail, } impl From for AutomergeError { diff --git a/automerge/src/exid.rs b/automerge/src/exid.rs new file mode 100644 index 00000000..d79b35ce --- /dev/null +++ b/automerge/src/exid.rs @@ -0,0 +1,33 @@ +use crate::ActorId; +use std::fmt; + +#[derive(Debug, Clone)] +pub enum ExId { + Root, + Id(u64, ActorId, usize), +} + +impl PartialEq for ExId { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (ExId::Root, ExId::Root) => true, + (ExId::Id(ctr1, actor1, _), ExId::Id(ctr2, actor2, _)) + if ctr1 == ctr2 && actor1 == actor2 => + { + true + } + _ => false, + } + } +} + +impl Eq for ExId {} + +impl fmt::Display for ExId { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + ExId::Root => write!(f, "_root"), + ExId::Id(ctr, actor, _) => write!(f, "{}@{}", ctr, actor), + } + } +} diff --git a/automerge/src/indexed_cache.rs b/automerge/src/indexed_cache.rs index 21ffd75b..b11f39ad 100644 --- a/automerge/src/indexed_cache.rs +++ b/automerge/src/indexed_cache.rs @@ -31,8 +31,8 @@ where } } - pub fn lookup(&self, item: T) -> Option { - self.lookup.get(&item).cloned() + pub fn lookup(&self, item: &T) -> Option { + self.lookup.get(item).cloned() } pub fn len(&self) -> usize { diff --git a/automerge/src/legacy/mod.rs b/automerge/src/legacy/mod.rs index 0968d290..91e07298 100644 --- a/automerge/src/legacy/mod.rs +++ b/automerge/src/legacy/mod.rs @@ -2,8 +2,8 @@ mod serde_impls; mod utility_impls; use std::iter::FromIterator; +pub(crate) use crate::types::{ActorId, ChangeHash, ObjType, OpType, ScalarValue}; pub(crate) use crate::value::DataType; -pub(crate) use crate::{ActorId, ChangeHash, ObjType, OpType, ScalarValue}; use serde::{Deserialize, Serialize}; use smol_str::SmolStr; diff --git a/automerge/src/legacy/serde_impls/scalar_value.rs b/automerge/src/legacy/serde_impls/scalar_value.rs index c04d359a..7a08f697 100644 --- a/automerge/src/legacy/serde_impls/scalar_value.rs +++ b/automerge/src/legacy/serde_impls/scalar_value.rs @@ -1,7 +1,7 @@ use serde::{de, Deserialize, Deserializer}; use smol_str::SmolStr; -use crate::ScalarValue; +use crate::types::ScalarValue; impl<'de> Deserialize<'de> for ScalarValue { fn deserialize(deserializer: D) -> Result diff --git a/automerge/src/lib.rs b/automerge/src/lib.rs index c2595c68..27de9c39 100644 --- a/automerge/src/lib.rs +++ b/automerge/src/lib.rs @@ -1,7 +1,3 @@ -extern crate hex; -extern crate uuid; -extern crate web_sys; - #[macro_export] macro_rules! log { ( $( $t:tt )* ) => { @@ -28,1303 +24,32 @@ macro_rules! __log { } } +mod automerge; mod change; mod clock; mod columnar; mod decoding; mod encoding; +mod error; +mod exid; mod indexed_cache; mod legacy; -mod sync; -#[cfg(feature = "optree-visualisation")] -mod visualisation; - -mod error; mod op_set; mod op_tree; mod query; +mod sync; mod types; mod value; +#[cfg(feature = "optree-visualisation")] +mod visualisation; -use change::{encode_document, export_change}; -use clock::Clock; -use indexed_cache::IndexedCache; -use op_set::OpSet; -use std::collections::{HashMap, HashSet, VecDeque}; -use types::{ElemId, Key, ObjId, Op, HEAD}; -use unicode_segmentation::UnicodeSegmentation; - +pub use crate::automerge::Automerge; pub use change::{decode_change, Change}; pub use error::AutomergeError; +pub use exid::ExId as ObjId; pub use legacy::Change as ExpandedChange; pub use sync::{BloomFilter, SyncHave, SyncMessage, SyncState}; -pub use types::{ - ActorId, ChangeHash, Export, Exportable, Importable, ObjType, OpId, OpType, Patch, Peer, Prop, - ROOT, -}; +pub use types::{ActorId, ChangeHash, ObjType, OpType, Prop}; pub use value::{ScalarValue, Value}; -#[derive(Debug, Clone)] -pub struct Automerge { - queue: Vec, - history: Vec, - history_index: HashMap, - states: HashMap>, - deps: HashSet, - saved: Vec, - ops: OpSet, - actor: Option, - max_op: u64, - transaction: Option, -} - -impl Automerge { - pub fn new() -> Self { - Automerge { - queue: vec![], - history: vec![], - history_index: HashMap::new(), - states: HashMap::new(), - ops: Default::default(), - deps: Default::default(), - saved: Default::default(), - actor: None, - max_op: 0, - transaction: None, - } - } - - pub fn set_actor(&mut self, actor: ActorId) { - self.ensure_transaction_closed(); - self.actor = Some(self.ops.m.actors.cache(actor)) - } - - fn random_actor(&mut self) -> ActorId { - let actor = ActorId::from(uuid::Uuid::new_v4().as_bytes().to_vec()); - self.actor = Some(self.ops.m.actors.cache(actor.clone())); - actor - } - - pub fn get_actor(&mut self) -> ActorId { - if let Some(actor) = self.actor { - self.ops.m.actors[actor].clone() - } else { - self.random_actor() - } - } - - pub fn maybe_get_actor(&self) -> Option { - self.actor.map(|i| self.ops.m.actors[i].clone()) - } - - fn get_actor_index(&mut self) -> usize { - if let Some(actor) = self.actor { - actor - } else { - self.random_actor(); - self.actor.unwrap() // random_actor always sets actor to is_some() - } - } - - pub fn new_with_actor_id(actor: ActorId) -> Self { - let mut am = Automerge { - queue: vec![], - history: vec![], - history_index: HashMap::new(), - states: HashMap::new(), - ops: Default::default(), - deps: Default::default(), - saved: Default::default(), - actor: None, - max_op: 0, - transaction: None, - }; - am.actor = Some(am.ops.m.actors.cache(actor)); - am - } - - pub fn pending_ops(&self) -> u64 { - self.transaction - .as_ref() - .map(|t| t.operations.len() as u64) - .unwrap_or(0) - } - - fn tx(&mut self) -> &mut Transaction { - if self.transaction.is_none() { - let actor = self.get_actor_index(); - - let seq = self.states.entry(actor).or_default().len() as u64 + 1; - let mut deps = self.get_heads(); - if seq > 1 { - let last_hash = self.get_hash(actor, seq - 1).unwrap(); - if !deps.contains(&last_hash) { - deps.push(last_hash); - } - } - - self.transaction = Some(Transaction { - actor, - seq, - start_op: self.max_op + 1, - time: 0, - message: None, - extra_bytes: Default::default(), - hash: None, - operations: vec![], - deps, - }); - } - - self.transaction.as_mut().unwrap() - } - - pub fn commit(&mut self, message: Option, time: Option) -> Vec { - let tx = self.tx(); - - if message.is_some() { - tx.message = message; - } - - if let Some(t) = time { - tx.time = t; - } - - tx.operations.len(); - - self.ensure_transaction_closed(); - - self.get_heads() - } - - pub fn ensure_transaction_closed(&mut self) { - if let Some(tx) = self.transaction.take() { - self.update_history(export_change(&tx, &self.ops.m.actors, &self.ops.m.props)); - } - } - - pub fn rollback(&mut self) -> usize { - if let Some(tx) = self.transaction.take() { - let num = tx.operations.len(); - for op in &tx.operations { - for pred_id in &op.pred { - // FIXME - use query to make this fast - if let Some(p) = self.ops.iter().position(|o| o.id == *pred_id) { - self.ops - .replace(op.obj, p, |o| o.succ.retain(|i| i != pred_id)); - } - } - if let Some(pos) = self.ops.iter().position(|o| o.id == op.id) { - self.ops.remove(op.obj, pos); - } - } - num - } else { - 0 - } - } - - fn next_id(&mut self) -> OpId { - let tx = self.tx(); - OpId(tx.start_op + tx.operations.len() as u64, tx.actor) - } - - fn insert_local_op(&mut self, op: Op, pos: usize, succ_pos: &[usize]) { - for succ in succ_pos { - self.ops.replace(op.obj, *succ, |old_op| { - old_op.succ.push(op.id); - }); - } - - if !op.is_del() { - self.ops.insert(pos, op.clone()); - } - - self.tx().operations.push(op); - } - - fn insert_op(&mut self, op: Op) -> Op { - let q = self.ops.search(op.obj, query::SeekOp::new(&op)); - - for i in q.succ { - self.ops - .replace(op.obj, i, |old_op| old_op.succ.push(op.id)); - } - - if !op.is_del() { - self.ops.insert(q.pos, op.clone()); - } - op - } - - // KeysAt::() - // LenAt::() - // PropAt::() - // NthAt::() - - pub fn keys(&self, obj: OpId) -> Vec { - let q = self.ops.search(obj.into(), query::Keys::new()); - q.keys.iter().map(|k| self.export(*k)).collect() - } - - pub fn keys_at(&self, obj: OpId, heads: &[ChangeHash]) -> Vec { - let clock = self.clock_at(heads); - let q = self.ops.search(obj.into(), query::KeysAt::new(clock)); - q.keys.iter().map(|k| self.export(*k)).collect() - } - - pub fn length(&self, obj: OpId) -> usize { - self.ops.search(obj.into(), query::Len::new(obj.into())).len - } - - pub fn length_at(&self, obj: OpId, heads: &[ChangeHash]) -> usize { - let clock = self.clock_at(heads); - self.ops.search(obj.into(), query::LenAt::new(clock)).len - } - - // set(obj, prop, value) - value can be scalar or objtype - // del(obj, prop) - // inc(obj, prop, value) - // insert(obj, index, value) - - /// Set the value of property `P` to value `V` in object `obj`. - /// - /// # Returns - /// - /// The opid of the operation which was created, or None if this operation doesn't change the - /// document - /// - /// # Errors - /// - /// This will return an error if - /// - The object does not exist - /// - The key is the wrong type for the object - /// - The key does not exist in the object - pub fn set, V: Into>( - &mut self, - obj: OpId, - prop: P, - value: V, - ) -> Result, AutomergeError> { - let value = value.into(); - self.local_op(obj.into(), prop.into(), value.into()) - } - - pub fn insert>( - &mut self, - obj: OpId, - index: usize, - value: V, - ) -> Result { - let obj = obj.into(); - let id = self.next_id(); - - let query = self.ops.search(obj, query::InsertNth::new(index)); - - let key = query.key()?; - let value = value.into(); - - let op = Op { - change: self.history.len(), - id, - action: value.into(), - obj, - key, - succ: Default::default(), - pred: Default::default(), - insert: true, - }; - - self.ops.insert(query.pos, op.clone()); - self.tx().operations.push(op); - - Ok(id) - } - - pub fn inc>( - &mut self, - obj: OpId, - prop: P, - value: i64, - ) -> Result { - match self.local_op(obj.into(), prop.into(), OpType::Inc(value))? { - Some(opid) => Ok(opid), - None => { - panic!("increment should always create a new op") - } - } - } - - pub fn del>(&mut self, obj: OpId, prop: P) -> Result { - // TODO: Should we also no-op multiple delete operations? - match self.local_op(obj.into(), prop.into(), OpType::Del)? { - Some(opid) => Ok(opid), - None => { - panic!("delete should always create a new op") - } - } - } - - /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert - /// the new elements - pub fn splice( - &mut self, - obj: OpId, - mut pos: usize, - del: usize, - vals: Vec, - ) -> Result, AutomergeError> { - for _ in 0..del { - self.del(obj, pos)?; - } - let mut result = Vec::with_capacity(vals.len()); - for v in vals { - result.push(self.insert(obj, pos, v)?); - pos += 1; - } - Ok(result) - } - - pub fn splice_text( - &mut self, - obj: OpId, - pos: usize, - del: usize, - text: &str, - ) -> Result, AutomergeError> { - let mut vals = vec![]; - for c in text.to_owned().graphemes(true) { - vals.push(c.into()); - } - self.splice(obj, pos, del, vals) - } - - pub fn text(&self, obj: OpId) -> Result { - let obj = obj.into(); - let query = self.ops.search(obj, query::ListVals::new(obj)); - let mut buffer = String::new(); - for q in &query.ops { - if let OpType::Set(ScalarValue::Str(s)) = &q.action { - buffer.push_str(s); - } - } - Ok(buffer) - } - - pub fn text_at(&self, obj: OpId, heads: &[ChangeHash]) -> Result { - let clock = self.clock_at(heads); - let obj = obj.into(); - let query = self.ops.search(obj, query::ListValsAt::new(clock)); - let mut buffer = String::new(); - for q in &query.ops { - if let OpType::Set(ScalarValue::Str(s)) = &q.action { - buffer.push_str(s); - } - } - Ok(buffer) - } - - // TODO - I need to return these OpId's here **only** to get - // the legacy conflicts format of { [opid]: value } - // Something better? - pub fn value>( - &self, - obj: OpId, - prop: P, - ) -> Result, AutomergeError> { - Ok(self.values(obj, prop.into())?.first().cloned()) - } - - pub fn value_at>( - &self, - obj: OpId, - prop: P, - heads: &[ChangeHash], - ) -> Result, AutomergeError> { - Ok(self.values_at(obj, prop, heads)?.first().cloned()) - } - - pub fn values>( - &self, - obj: OpId, - prop: P, - ) -> Result, AutomergeError> { - let obj = obj.into(); - let result = match prop.into() { - Prop::Map(p) => { - let prop = self.ops.m.props.lookup(p); - if let Some(p) = prop { - self.ops - .search(obj, query::Prop::new(obj, p)) - .ops - .into_iter() - .map(|o| o.into()) - .collect() - } else { - vec![] - } - } - Prop::Seq(n) => self - .ops - .search(obj, query::Nth::new(n)) - .ops - .into_iter() - .map(|o| o.into()) - .collect(), - }; - Ok(result) - } - - pub fn values_at>( - &self, - obj: OpId, - prop: P, - heads: &[ChangeHash], - ) -> Result, AutomergeError> { - let prop = prop.into(); - let obj = obj.into(); - let clock = self.clock_at(heads); - let result = match prop { - Prop::Map(p) => { - let prop = self.ops.m.props.lookup(p); - if let Some(p) = prop { - self.ops - .search(obj, query::PropAt::new(p, clock)) - .ops - .into_iter() - .map(|o| o.into()) - .collect() - } else { - vec![] - } - } - Prop::Seq(n) => self - .ops - .search(obj, query::NthAt::new(n, clock)) - .ops - .into_iter() - .map(|o| o.into()) - .collect(), - }; - Ok(result) - } - - pub fn load(data: &[u8]) -> Result { - let changes = Change::load_document(data)?; - let mut doc = Self::new(); - doc.apply_changes(&changes)?; - Ok(doc) - } - - pub fn load_incremental(&mut self, data: &[u8]) -> Result { - let changes = Change::load_document(data)?; - let start = self.ops.len(); - self.apply_changes(&changes)?; - let delta = self.ops.len() - start; - Ok(delta) - } - - pub fn apply_changes(&mut self, changes: &[Change]) -> Result { - self.ensure_transaction_closed(); - for c in changes { - if !self.history_index.contains_key(&c.hash) { - if self.is_causally_ready(c) { - self.apply_change(c.clone()); - } else { - self.queue.push(c.clone()); - while let Some(c) = self.pop_next_causally_ready_change() { - self.apply_change(c); - } - } - } - } - Ok(Patch {}) - } - - pub fn apply_change(&mut self, change: Change) { - self.ensure_transaction_closed(); - let ops = self.import_ops(&change, self.history.len()); - self.update_history(change); - for op in ops { - self.insert_op(op); - } - } - - fn local_op( - &mut self, - obj: ObjId, - prop: Prop, - action: OpType, - ) -> Result, AutomergeError> { - match prop { - Prop::Map(s) => self.local_map_op(obj, s, action), - Prop::Seq(n) => self.local_list_op(obj, n, action), - } - } - - fn local_map_op( - &mut self, - obj: ObjId, - prop: String, - action: OpType, - ) -> Result, AutomergeError> { - if prop.is_empty() { - return Err(AutomergeError::EmptyStringKey); - } - - let id = self.next_id(); - let prop = self.ops.m.props.cache(prop); - let query = self.ops.search(obj, query::Prop::new(obj, prop)); - - match (&query.ops[..], &action) { - // If there are no conflicts for this value and the old operation and the new operation are - // both setting the same value then we do nothing. - ( - &[Op { - action: OpType::Set(ref old_v), - .. - }], - OpType::Set(new_v), - ) if old_v == new_v => { - return Ok(None); - } - _ => {} - } - - let pred = query.ops.iter().map(|op| op.id).collect(); - - let op = Op { - change: self.history.len(), - id, - action, - obj, - key: Key::Map(prop), - succ: Default::default(), - pred, - insert: false, - }; - - self.insert_local_op(op, query.pos, &query.ops_pos); - - Ok(Some(id)) - } - - fn local_list_op( - &mut self, - obj: ObjId, - index: usize, - action: OpType, - ) -> Result, AutomergeError> { - let query = self.ops.search(obj, query::Nth::new(index)); - - let id = self.next_id(); - let pred = query.ops.iter().map(|op| op.id).collect(); - let key = query.key()?; - - match (&query.ops[..], &action) { - // If there are no conflicts for this value and the old operation and the new operation are - // both setting the same value then we do nothing. - ( - &[Op { - action: OpType::Set(ref old_v), - .. - }], - OpType::Set(new_v), - ) if old_v == new_v => { - return Ok(None); - } - _ => {} - } - - let op = Op { - change: self.history.len(), - id, - action, - obj, - key, - succ: Default::default(), - pred, - insert: false, - }; - - self.insert_local_op(op, query.pos, &query.ops_pos); - - Ok(Some(id)) - } - - fn is_causally_ready(&self, change: &Change) -> bool { - change - .deps - .iter() - .all(|d| self.history_index.contains_key(d)) - } - - fn pop_next_causally_ready_change(&mut self) -> Option { - let mut index = 0; - while index < self.queue.len() { - if self.is_causally_ready(&self.queue[index]) { - return Some(self.queue.swap_remove(index)); - } - index += 1; - } - None - } - - fn import_ops(&mut self, change: &Change, change_id: usize) -> Vec { - change - .iter_ops() - .enumerate() - .map(|(i, c)| { - let actor = self.ops.m.actors.cache(change.actor_id().clone()); - let id = OpId(change.start_op + i as u64, actor); - // FIXME dont need to_string() - let obj: ObjId = self.import(&c.obj.to_string()).unwrap(); - let pred = c - .pred - .iter() - .map(|i| self.import(&i.to_string()).unwrap()) - .collect(); - let key = match &c.key { - legacy::Key::Map(n) => Key::Map(self.ops.m.props.cache(n.to_string())), - legacy::Key::Seq(legacy::ElementId::Head) => Key::Seq(HEAD), - // FIXME dont need to_string() - legacy::Key::Seq(legacy::ElementId::Id(i)) => { - Key::Seq(self.import(&i.to_string()).unwrap()) - } - }; - Op { - change: change_id, - id, - action: c.action, - obj, - key, - succ: Default::default(), - pred, - insert: c.insert, - } - }) - .collect() - } - - /// Takes all the changes in `other` which are not in `self` and applies them - pub fn merge(&mut self, other: &mut Self) { - // TODO: Make this fallible and figure out how to do this transactionally - other.ensure_transaction_closed(); - let changes = self - .get_changes_added(other) - .into_iter() - .cloned() - .collect::>(); - self.apply_changes(&changes).unwrap(); - } - - pub fn save(&mut self) -> Result, AutomergeError> { - self.ensure_transaction_closed(); - // TODO - would be nice if I could pass an iterator instead of a collection here - let c: Vec<_> = self.history.iter().map(|c| c.decode()).collect(); - let ops: Vec<_> = self.ops.iter().cloned().collect(); - // TODO - can we make encode_document error free - let bytes = encode_document( - &c, - ops.as_slice(), - &self.ops.m.actors, - &self.ops.m.props.cache, - ); - if bytes.is_ok() { - self.saved = self.get_heads().iter().copied().collect(); - } - bytes - } - - // should this return an empty vec instead of None? - pub fn save_incremental(&mut self) -> Vec { - self.ensure_transaction_closed(); - let changes = self._get_changes(self.saved.as_slice()); - let mut bytes = vec![]; - for c in changes { - bytes.extend(c.raw_bytes()); - } - if !bytes.is_empty() { - self.saved = self._get_heads().iter().copied().collect(); - } - bytes - } - - /// Filter the changes down to those that are not transitive dependencies of the heads. - /// - /// Thus a graph with these heads has not seen the remaining changes. - pub(crate) fn filter_changes(&self, heads: &[ChangeHash], changes: &mut HashSet) { - // Reduce the working set to find to those which we may be able to find. - // This filters out those hashes that are successors of or concurrent with all of the - // heads. - // This can help in avoiding traversing the entire graph back to the roots when we try to - // search for a hash we can know won't be found there. - let max_head_index = heads - .iter() - .map(|h| self.history_index.get(h).unwrap_or(&0)) - .max() - .unwrap_or(&0); - let mut may_find: HashSet = changes - .iter() - .filter(|hash| { - let change_index = self.history_index.get(hash).unwrap_or(&0); - change_index <= max_head_index - }) - .copied() - .collect(); - - if may_find.is_empty() { - return; - } - - let mut queue: VecDeque<_> = heads.iter().collect(); - let mut seen = HashSet::new(); - while let Some(hash) = queue.pop_front() { - if seen.contains(hash) { - continue; - } - seen.insert(hash); - - let removed = may_find.remove(hash); - changes.remove(hash); - if may_find.is_empty() { - break; - } - - for dep in self - .history_index - .get(hash) - .and_then(|i| self.history.get(*i)) - .map(|c| c.deps.as_slice()) - .unwrap_or_default() - { - // if we just removed something from our hashes then it is likely there is more - // down here so do a quick inspection on the children. - // When we don't remove anything it is less likely that there is something down - // that chain so delay it. - if removed { - queue.push_front(dep); - } else { - queue.push_back(dep); - } - } - } - } - - pub fn get_missing_deps(&mut self, heads: &[ChangeHash]) -> Vec { - self.ensure_transaction_closed(); - self._get_missing_deps(heads) - } - - fn _get_missing_deps(&self, heads: &[ChangeHash]) -> Vec { - let in_queue: HashSet<_> = self.queue.iter().map(|change| change.hash).collect(); - let mut missing = HashSet::new(); - - for head in self.queue.iter().flat_map(|change| &change.deps) { - if !self.history_index.contains_key(head) { - missing.insert(head); - } - } - - for head in heads { - if !self.history_index.contains_key(head) { - missing.insert(head); - } - } - - let mut missing = missing - .into_iter() - .filter(|hash| !in_queue.contains(hash)) - .copied() - .collect::>(); - missing.sort(); - missing - } - - fn get_changes_fast(&self, have_deps: &[ChangeHash]) -> Option> { - if have_deps.is_empty() { - return Some(self.history.iter().collect()); - } - - let lowest_idx = have_deps - .iter() - .filter_map(|h| self.history_index.get(h)) - .min()? - + 1; - - let mut missing_changes = vec![]; - let mut has_seen: HashSet<_> = have_deps.iter().collect(); - for change in &self.history[lowest_idx..] { - let deps_seen = change.deps.iter().filter(|h| has_seen.contains(h)).count(); - if deps_seen > 0 { - if deps_seen != change.deps.len() { - // future change depends on something we haven't seen - fast path cant work - return None; - } - missing_changes.push(change); - has_seen.insert(&change.hash); - } - } - - // if we get to the end and there is a head we haven't seen then fast path cant work - if self._get_heads().iter().all(|h| has_seen.contains(h)) { - Some(missing_changes) - } else { - None - } - } - - fn get_changes_slow(&self, have_deps: &[ChangeHash]) -> Vec<&Change> { - let mut stack: Vec<_> = have_deps.iter().collect(); - let mut has_seen = HashSet::new(); - while let Some(hash) = stack.pop() { - if has_seen.contains(&hash) { - continue; - } - if let Some(change) = self - .history_index - .get(hash) - .and_then(|i| self.history.get(*i)) - { - stack.extend(change.deps.iter()); - } - has_seen.insert(hash); - } - self.history - .iter() - .filter(|change| !has_seen.contains(&change.hash)) - .collect() - } - - pub fn get_last_local_change(&mut self) -> Option<&Change> { - self.ensure_transaction_closed(); - if let Some(actor) = &self.actor { - let actor = &self.ops.m.actors[*actor]; - return self.history.iter().rev().find(|c| c.actor_id() == actor); - } - None - } - - pub fn get_changes(&mut self, have_deps: &[ChangeHash]) -> Vec<&Change> { - self.ensure_transaction_closed(); - self._get_changes(have_deps) - } - - fn _get_changes(&self, have_deps: &[ChangeHash]) -> Vec<&Change> { - if let Some(changes) = self.get_changes_fast(have_deps) { - changes - } else { - self.get_changes_slow(have_deps) - } - } - - fn clock_at(&self, heads: &[ChangeHash]) -> Clock { - let mut clock = Clock::new(); - let mut seen = HashSet::new(); - let mut to_see = heads.to_vec(); - // FIXME - faster - while let Some(hash) = to_see.pop() { - if let Some(c) = self._get_change_by_hash(&hash) { - for h in &c.deps { - if !seen.contains(h) { - to_see.push(*h); - } - } - let actor = self.ops.m.actors.lookup(c.actor_id().clone()).unwrap(); - clock.include(actor, c.max_op()); - seen.insert(hash); - } - } - clock - } - - pub fn get_change_by_hash(&mut self, hash: &ChangeHash) -> Option<&Change> { - self.ensure_transaction_closed(); - self._get_change_by_hash(hash) - } - - fn _get_change_by_hash(&self, hash: &ChangeHash) -> Option<&Change> { - self.history_index - .get(hash) - .and_then(|index| self.history.get(*index)) - } - - pub fn get_changes_added<'a>(&mut self, other: &'a Self) -> Vec<&'a Change> { - self.ensure_transaction_closed(); - self._get_changes_added(other) - } - - fn _get_changes_added<'a>(&self, other: &'a Self) -> Vec<&'a Change> { - // Depth-first traversal from the heads through the dependency graph, - // until we reach a change that is already present in other - let mut stack: Vec<_> = other._get_heads(); - let mut seen_hashes = HashSet::new(); - let mut added_change_hashes = Vec::new(); - while let Some(hash) = stack.pop() { - if !seen_hashes.contains(&hash) && self._get_change_by_hash(&hash).is_none() { - seen_hashes.insert(hash); - added_change_hashes.push(hash); - if let Some(change) = other._get_change_by_hash(&hash) { - stack.extend(&change.deps); - } - } - } - // Return those changes in the reverse of the order in which the depth-first search - // found them. This is not necessarily a topological sort, but should usually be close. - added_change_hashes.reverse(); - added_change_hashes - .into_iter() - .filter_map(|h| other._get_change_by_hash(&h)) - .collect() - } - - pub fn get_heads(&mut self) -> Vec { - self.ensure_transaction_closed(); - self._get_heads() - } - - fn _get_heads(&self) -> Vec { - let mut deps: Vec<_> = self.deps.iter().copied().collect(); - deps.sort_unstable(); - deps - } - - fn get_hash(&mut self, actor: usize, seq: u64) -> Result { - self.states - .get(&actor) - .and_then(|v| v.get(seq as usize - 1)) - .and_then(|&i| self.history.get(i)) - .map(|c| c.hash) - .ok_or(AutomergeError::InvalidSeq(seq)) - } - - fn update_history(&mut self, change: Change) -> usize { - self.max_op = std::cmp::max(self.max_op, change.start_op + change.len() as u64 - 1); - - self.update_deps(&change); - - let history_index = self.history.len(); - - self.states - .entry(self.ops.m.actors.cache(change.actor_id().clone())) - .or_default() - .push(history_index); - - self.history_index.insert(change.hash, history_index); - self.history.push(change); - - history_index - } - - fn update_deps(&mut self, change: &Change) { - for d in &change.deps { - self.deps.remove(d); - } - self.deps.insert(change.hash); - } - - pub fn import(&self, s: &str) -> Result { - if let Some(x) = I::from(s) { - Ok(x) - } else { - let n = s - .find('@') - .ok_or_else(|| AutomergeError::InvalidOpId(s.to_owned()))?; - let counter = s[0..n] - .parse() - .map_err(|_| AutomergeError::InvalidOpId(s.to_owned()))?; - let actor = ActorId::from(hex::decode(&s[(n + 1)..]).unwrap()); - let actor = self - .ops - .m - .actors - .lookup(actor) - .ok_or_else(|| AutomergeError::InvalidOpId(s.to_owned()))?; - Ok(I::wrap(OpId(counter, actor))) - } - } - - pub fn export(&self, id: E) -> String { - match id.export() { - Export::Id(id) => format!("{}@{}", id.counter(), self.ops.m.actors[id.actor()]), - Export::Prop(index) => self.ops.m.props[index].clone(), - Export::Special(s) => s, - } - } - - pub fn dump(&self) { - log!( - " {:12} {:12} {:12} {} {} {}", - "id", - "obj", - "key", - "value", - "pred", - "succ" - ); - for i in self.ops.iter() { - let id = self.export(i.id); - let obj = self.export(i.obj); - let key = match i.key { - Key::Map(n) => self.ops.m.props[n].clone(), - Key::Seq(n) => self.export(n), - }; - let value: String = match &i.action { - OpType::Set(value) => format!("{}", value), - OpType::Make(obj) => format!("make{}", obj), - OpType::Inc(obj) => format!("inc{}", obj), - OpType::Del => format!("del{}", 0), - }; - let pred: Vec<_> = i.pred.iter().map(|id| self.export(*id)).collect(); - let succ: Vec<_> = i.succ.iter().map(|id| self.export(*id)).collect(); - log!( - " {:12} {:12} {:12} {} {:?} {:?}", - id, - obj, - key, - value, - pred, - succ - ); - } - } - - #[cfg(feature = "optree-visualisation")] - pub fn visualise_optree(&self) -> String { - self.ops.visualise() - } -} - -#[derive(Debug, Clone)] -pub(crate) struct Transaction { - pub actor: usize, - pub seq: u64, - pub start_op: u64, - pub time: i64, - pub message: Option, - pub extra_bytes: Vec, - pub hash: Option, - pub deps: Vec, - pub operations: Vec, -} - -impl Default for Automerge { - fn default() -> Self { - Self::new() - } -} - -#[cfg(test)] -mod tests { - use super::*; - use std::convert::TryInto; - - #[test] - fn insert_op() -> Result<(), AutomergeError> { - let mut doc = Automerge::new(); - doc.set_actor(ActorId::random()); - doc.set(ROOT, "hello", "world")?; - assert!(doc.pending_ops() == 1); - doc.value(ROOT, "hello")?; - Ok(()) - } - - #[test] - fn test_list() -> Result<(), AutomergeError> { - let mut doc = Automerge::new(); - doc.set_actor(ActorId::random()); - let list_id = doc.set(ROOT, "items", Value::list())?.unwrap(); - doc.set(ROOT, "zzz", "zzzval")?; - assert!(doc.value(ROOT, "items")?.unwrap().1 == list_id); - doc.insert(list_id, 0, "a")?; - doc.insert(list_id, 0, "b")?; - doc.insert(list_id, 2, "c")?; - doc.insert(list_id, 1, "d")?; - assert!(doc.value(list_id, 0)?.unwrap().0 == "b".into()); - assert!(doc.value(list_id, 1)?.unwrap().0 == "d".into()); - assert!(doc.value(list_id, 2)?.unwrap().0 == "a".into()); - assert!(doc.value(list_id, 3)?.unwrap().0 == "c".into()); - assert!(doc.length(list_id) == 4); - doc.save()?; - Ok(()) - } - - #[test] - fn test_del() -> Result<(), AutomergeError> { - let mut doc = Automerge::new(); - doc.set_actor(ActorId::random()); - doc.set(ROOT, "xxx", "xxx")?; - assert!(!doc.values(ROOT, "xxx")?.is_empty()); - doc.del(ROOT, "xxx")?; - assert!(doc.values(ROOT, "xxx")?.is_empty()); - Ok(()) - } - - #[test] - fn test_inc() -> Result<(), AutomergeError> { - let mut doc = Automerge::new(); - let id = doc.set(ROOT, "counter", Value::counter(10))?.unwrap(); - assert!(doc.value(ROOT, "counter")? == Some((Value::counter(10), id))); - doc.inc(ROOT, "counter", 10)?; - assert!(doc.value(ROOT, "counter")? == Some((Value::counter(20), id))); - doc.inc(ROOT, "counter", -5)?; - assert!(doc.value(ROOT, "counter")? == Some((Value::counter(15), id))); - Ok(()) - } - - #[test] - fn test_save_incremental() -> Result<(), AutomergeError> { - let mut doc = Automerge::new(); - - doc.set(ROOT, "foo", 1)?; - - let save1 = doc.save().unwrap(); - - doc.set(ROOT, "bar", 2)?; - - let save2 = doc.save_incremental(); - - doc.set(ROOT, "baz", 3)?; - - let save3 = doc.save_incremental(); - - let mut save_a: Vec = vec![]; - save_a.extend(&save1); - save_a.extend(&save2); - save_a.extend(&save3); - - assert!(doc.save_incremental().is_empty()); - - let save_b = doc.save().unwrap(); - - assert!(save_b.len() < save_a.len()); - - let mut doc_a = Automerge::load(&save_a)?; - let mut doc_b = Automerge::load(&save_b)?; - - assert!(doc_a.values(ROOT, "baz")? == doc_b.values(ROOT, "baz")?); - - assert!(doc_a.save().unwrap() == doc_b.save().unwrap()); - - Ok(()) - } - - #[test] - fn test_save_text() -> Result<(), AutomergeError> { - let mut doc = Automerge::new(); - let text = doc.set(ROOT, "text", Value::text())?.unwrap(); - let heads1 = doc.commit(None, None); - doc.splice_text(text, 0, 0, "hello world")?; - let heads2 = doc.commit(None, None); - doc.splice_text(text, 6, 0, "big bad ")?; - let heads3 = doc.commit(None, None); - - assert!(&doc.text(text)? == "hello big bad world"); - assert!(&doc.text_at(text, &heads1)?.is_empty()); - assert!(&doc.text_at(text, &heads2)? == "hello world"); - assert!(&doc.text_at(text, &heads3)? == "hello big bad world"); - - Ok(()) - } - - #[test] - fn test_props_vals_at() -> Result<(), AutomergeError> { - let mut doc = Automerge::new(); - doc.set_actor("aaaa".try_into().unwrap()); - doc.set(ROOT, "prop1", "val1")?; - doc.commit(None, None); - let heads1 = doc.get_heads(); - doc.set(ROOT, "prop1", "val2")?; - doc.commit(None, None); - let heads2 = doc.get_heads(); - doc.set(ROOT, "prop2", "val3")?; - doc.commit(None, None); - let heads3 = doc.get_heads(); - doc.del(ROOT, "prop1")?; - doc.commit(None, None); - let heads4 = doc.get_heads(); - doc.set(ROOT, "prop3", "val4")?; - doc.commit(None, None); - let heads5 = doc.get_heads(); - assert!(doc.keys_at(ROOT, &heads1) == vec!["prop1".to_owned()]); - assert!(doc.value_at(ROOT, "prop1", &heads1)?.unwrap().0 == Value::str("val1")); - assert!(doc.value_at(ROOT, "prop2", &heads1)? == None); - assert!(doc.value_at(ROOT, "prop3", &heads1)? == None); - - assert!(doc.keys_at(ROOT, &heads2) == vec!["prop1".to_owned()]); - assert!(doc.value_at(ROOT, "prop1", &heads2)?.unwrap().0 == Value::str("val2")); - assert!(doc.value_at(ROOT, "prop2", &heads2)? == None); - assert!(doc.value_at(ROOT, "prop3", &heads2)? == None); - - assert!(doc.keys_at(ROOT, &heads3) == vec!["prop1".to_owned(), "prop2".to_owned()]); - assert!(doc.value_at(ROOT, "prop1", &heads3)?.unwrap().0 == Value::str("val2")); - assert!(doc.value_at(ROOT, "prop2", &heads3)?.unwrap().0 == Value::str("val3")); - assert!(doc.value_at(ROOT, "prop3", &heads3)? == None); - - assert!(doc.keys_at(ROOT, &heads4) == vec!["prop2".to_owned()]); - assert!(doc.value_at(ROOT, "prop1", &heads4)? == None); - assert!(doc.value_at(ROOT, "prop2", &heads4)?.unwrap().0 == Value::str("val3")); - assert!(doc.value_at(ROOT, "prop3", &heads4)? == None); - - assert!(doc.keys_at(ROOT, &heads5) == vec!["prop2".to_owned(), "prop3".to_owned()]); - assert!(doc.value_at(ROOT, "prop1", &heads5)? == None); - assert!(doc.value_at(ROOT, "prop2", &heads5)?.unwrap().0 == Value::str("val3")); - assert!(doc.value_at(ROOT, "prop3", &heads5)?.unwrap().0 == Value::str("val4")); - - assert!(doc.keys_at(ROOT, &[]).is_empty()); - assert!(doc.value_at(ROOT, "prop1", &[])? == None); - assert!(doc.value_at(ROOT, "prop2", &[])? == None); - assert!(doc.value_at(ROOT, "prop3", &[])? == None); - Ok(()) - } - - #[test] - fn test_len_at() -> Result<(), AutomergeError> { - let mut doc = Automerge::new(); - doc.set_actor("aaaa".try_into().unwrap()); - - let list = doc.set(ROOT, "list", Value::list())?.unwrap(); - let heads1 = doc.commit(None, None); - - doc.insert(list, 0, Value::int(10))?; - let heads2 = doc.commit(None, None); - - doc.set(list, 0, Value::int(20))?; - doc.insert(list, 0, Value::int(30))?; - let heads3 = doc.commit(None, None); - - doc.set(list, 1, Value::int(40))?; - doc.insert(list, 1, Value::int(50))?; - let heads4 = doc.commit(None, None); - - doc.del(list, 2)?; - let heads5 = doc.commit(None, None); - - doc.del(list, 0)?; - let heads6 = doc.commit(None, None); - - assert!(doc.length_at(list, &heads1) == 0); - assert!(doc.value_at(list, 0, &heads1)?.is_none()); - - assert!(doc.length_at(list, &heads2) == 1); - assert!(doc.value_at(list, 0, &heads2)?.unwrap().0 == Value::int(10)); - - assert!(doc.length_at(list, &heads3) == 2); - assert!(doc.value_at(list, 0, &heads3)?.unwrap().0 == Value::int(30)); - assert!(doc.value_at(list, 1, &heads3)?.unwrap().0 == Value::int(20)); - - assert!(doc.length_at(list, &heads4) == 3); - assert!(doc.value_at(list, 0, &heads4)?.unwrap().0 == Value::int(30)); - assert!(doc.value_at(list, 1, &heads4)?.unwrap().0 == Value::int(50)); - assert!(doc.value_at(list, 2, &heads4)?.unwrap().0 == Value::int(40)); - - assert!(doc.length_at(list, &heads5) == 2); - assert!(doc.value_at(list, 0, &heads5)?.unwrap().0 == Value::int(30)); - assert!(doc.value_at(list, 1, &heads5)?.unwrap().0 == Value::int(50)); - - assert!(doc.length_at(list, &heads6) == 1); - assert!(doc.value_at(list, 0, &heads6)?.unwrap().0 == Value::int(50)); - - Ok(()) - } -} +pub const ROOT: ObjId = ObjId::Root; diff --git a/automerge/src/op_set.rs b/automerge/src/op_set.rs index 537cb80f..79fef3e4 100644 --- a/automerge/src/op_set.rs +++ b/automerge/src/op_set.rs @@ -1,6 +1,7 @@ +use crate::indexed_cache::IndexedCache; use crate::op_tree::OpTreeInternal; use crate::query::TreeQuery; -use crate::{ActorId, IndexedCache, Key, ObjId, Op, OpId}; +use crate::types::{ActorId, Key, ObjId, Op, OpId}; use fxhash::FxBuildHasher; use std::cmp::Ordering; use std::collections::HashMap; diff --git a/automerge/src/op_tree.rs b/automerge/src/op_tree.rs index 6142a7bf..c91c150e 100644 --- a/automerge/src/op_tree.rs +++ b/automerge/src/op_tree.rs @@ -6,7 +6,7 @@ use std::{ pub(crate) use crate::op_set::OpSetMetadata; use crate::query::{Index, QueryResult, TreeQuery}; -use crate::{Op, OpId}; +use crate::types::{Op, OpId}; use std::collections::HashSet; #[allow(dead_code)] @@ -628,7 +628,7 @@ struct CounterData { #[cfg(test)] mod tests { use crate::legacy as amp; - use crate::{Op, OpId}; + use crate::types::{Op, OpId}; use super::*; diff --git a/automerge/src/query.rs b/automerge/src/query.rs index 15ac6fd6..c062c964 100644 --- a/automerge/src/query.rs +++ b/automerge/src/query.rs @@ -1,5 +1,5 @@ use crate::op_tree::{OpSetMetadata, OpTreeNode}; -use crate::{Clock, ElemId, Op, OpId, OpType, ScalarValue}; +use crate::types::{Clock, ElemId, Op, OpId, OpType, ScalarValue}; use fxhash::FxBuildHasher; use std::cmp::Ordering; use std::collections::{HashMap, HashSet}; diff --git a/automerge/src/query/insert.rs b/automerge/src/query/insert.rs index 745af80e..b91f9970 100644 --- a/automerge/src/query/insert.rs +++ b/automerge/src/query/insert.rs @@ -1,6 +1,7 @@ +use crate::error::AutomergeError; use crate::op_tree::OpTreeNode; use crate::query::{QueryResult, TreeQuery, VisWindow}; -use crate::{AutomergeError, ElemId, Key, Op, HEAD}; +use crate::types::{ElemId, Key, Op, HEAD}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] diff --git a/automerge/src/query/keys.rs b/automerge/src/query/keys.rs index 12cfaaa6..e6f6486f 100644 --- a/automerge/src/query/keys.rs +++ b/automerge/src/query/keys.rs @@ -1,6 +1,6 @@ use crate::op_tree::OpTreeNode; use crate::query::{QueryResult, TreeQuery, VisWindow}; -use crate::Key; +use crate::types::Key; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] diff --git a/automerge/src/query/keys_at.rs b/automerge/src/query/keys_at.rs index cd66b29e..81c8ba86 100644 --- a/automerge/src/query/keys_at.rs +++ b/automerge/src/query/keys_at.rs @@ -1,5 +1,5 @@ use crate::query::{QueryResult, TreeQuery, VisWindow}; -use crate::{Clock, Key, Op}; +use crate::types::{Clock, Key, Op}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] diff --git a/automerge/src/query/len.rs b/automerge/src/query/len.rs index 494b3515..f92b8096 100644 --- a/automerge/src/query/len.rs +++ b/automerge/src/query/len.rs @@ -1,17 +1,15 @@ use crate::op_tree::OpTreeNode; use crate::query::{QueryResult, TreeQuery}; -use crate::ObjId; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] pub(crate) struct Len { - obj: ObjId, pub len: usize, } impl Len { - pub fn new(obj: ObjId) -> Self { - Len { obj, len: 0 } + pub fn new() -> Self { + Len { len: 0 } } } diff --git a/automerge/src/query/len_at.rs b/automerge/src/query/len_at.rs index acf4af84..03187db1 100644 --- a/automerge/src/query/len_at.rs +++ b/automerge/src/query/len_at.rs @@ -1,5 +1,5 @@ use crate::query::{QueryResult, TreeQuery, VisWindow}; -use crate::{Clock, ElemId, Op}; +use crate::types::{Clock, ElemId, Op}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] diff --git a/automerge/src/query/list_vals.rs b/automerge/src/query/list_vals.rs index c19ac4ad..0d8958fd 100644 --- a/automerge/src/query/list_vals.rs +++ b/automerge/src/query/list_vals.rs @@ -1,6 +1,6 @@ use crate::op_tree::{OpSetMetadata, OpTreeNode}; use crate::query::{binary_search_by, is_visible, visible_op, QueryResult, TreeQuery}; -use crate::{ElemId, ObjId, Op}; +use crate::types::{ElemId, ObjId, Op}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] diff --git a/automerge/src/query/list_vals_at.rs b/automerge/src/query/list_vals_at.rs index 3ae19d01..5d720bf6 100644 --- a/automerge/src/query/list_vals_at.rs +++ b/automerge/src/query/list_vals_at.rs @@ -1,5 +1,5 @@ use crate::query::{QueryResult, TreeQuery, VisWindow}; -use crate::{Clock, ElemId, Op}; +use crate::types::{Clock, ElemId, Op}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] diff --git a/automerge/src/query/nth.rs b/automerge/src/query/nth.rs index e76bc385..6000b71a 100644 --- a/automerge/src/query/nth.rs +++ b/automerge/src/query/nth.rs @@ -1,6 +1,7 @@ +use crate::error::AutomergeError; use crate::op_tree::OpTreeNode; use crate::query::{QueryResult, TreeQuery, VisWindow}; -use crate::{AutomergeError, ElemId, Key, Op}; +use crate::types::{ElemId, Key, Op}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] diff --git a/automerge/src/query/nth_at.rs b/automerge/src/query/nth_at.rs index cecf82ac..7a867cad 100644 --- a/automerge/src/query/nth_at.rs +++ b/automerge/src/query/nth_at.rs @@ -1,5 +1,5 @@ use crate::query::{QueryResult, TreeQuery, VisWindow}; -use crate::{Clock, ElemId, Op}; +use crate::types::{Clock, ElemId, Op}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] diff --git a/automerge/src/query/prop.rs b/automerge/src/query/prop.rs index ac4b2bca..11d2b0cd 100644 --- a/automerge/src/query/prop.rs +++ b/automerge/src/query/prop.rs @@ -1,6 +1,6 @@ use crate::op_tree::{OpSetMetadata, OpTreeNode}; use crate::query::{binary_search_by, is_visible, visible_op, QueryResult, TreeQuery}; -use crate::{Key, ObjId, Op}; +use crate::types::{Key, ObjId, Op}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] diff --git a/automerge/src/query/prop_at.rs b/automerge/src/query/prop_at.rs index 3fcb2c19..a5c02e34 100644 --- a/automerge/src/query/prop_at.rs +++ b/automerge/src/query/prop_at.rs @@ -1,6 +1,6 @@ use crate::op_tree::{OpSetMetadata, OpTreeNode}; use crate::query::{binary_search_by, QueryResult, TreeQuery, VisWindow}; -use crate::{Clock, Key, Op}; +use crate::types::{Clock, Key, Op}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] diff --git a/automerge/src/query/seek_op.rs b/automerge/src/query/seek_op.rs index 5a6b3e24..c30a15f5 100644 --- a/automerge/src/query/seek_op.rs +++ b/automerge/src/query/seek_op.rs @@ -1,6 +1,6 @@ use crate::op_tree::{OpSetMetadata, OpTreeNode}; use crate::query::{binary_search_by, QueryResult, TreeQuery}; -use crate::{Key, Op, HEAD}; +use crate::types::{Key, Op, HEAD}; use std::cmp::Ordering; use std::fmt::Debug; diff --git a/automerge/src/sync.rs b/automerge/src/sync.rs index 62ee9935..3d58da70 100644 --- a/automerge/src/sync.rs +++ b/automerge/src/sync.rs @@ -6,9 +6,10 @@ use std::{ io::Write, }; +use crate::types::Patch; use crate::{ decoding, decoding::Decoder, encoding, encoding::Encodable, Automerge, AutomergeError, Change, - ChangeHash, Patch, + ChangeHash, }; mod bloom; diff --git a/automerge/src/types.rs b/automerge/src/types.rs index f00beed3..c8856fe4 100644 --- a/automerge/src/types.rs +++ b/automerge/src/types.rs @@ -1,6 +1,5 @@ use crate::error; use crate::legacy as amp; -use crate::ScalarValue; use serde::{Deserialize, Serialize}; use std::cmp::Eq; use std::convert::TryFrom; @@ -9,8 +8,11 @@ use std::fmt; use std::str::FromStr; use tinyvec::{ArrayVec, TinyVec}; +pub(crate) use crate::clock::Clock; +pub(crate) use crate::value::{ScalarValue, Value}; + pub(crate) const HEAD: ElemId = ElemId(OpId(0, 0)); -pub const ROOT: OpId = OpId(0, 0); +pub(crate) const ROOT: OpId = OpId(0, 0); const ROOT_STR: &str = "_root"; const HEAD_STR: &str = "_head"; @@ -161,23 +163,16 @@ pub enum OpType { } #[derive(Debug)] -pub enum Export { +pub(crate) enum Export { Id(OpId), Special(String), Prop(usize), } -pub trait Exportable { +pub(crate) trait Exportable { fn export(&self) -> Export; } -pub trait Importable { - fn wrap(id: OpId) -> Self; - fn from(s: &str) -> Option - where - Self: std::marker::Sized; -} - impl OpId { #[inline] pub fn counter(&self) -> u64 { @@ -234,45 +229,6 @@ impl Exportable for Key { } } -impl Importable for ObjId { - fn wrap(id: OpId) -> Self { - ObjId(id) - } - fn from(s: &str) -> Option { - if s == ROOT_STR { - Some(ROOT.into()) - } else { - None - } - } -} - -impl Importable for OpId { - fn wrap(id: OpId) -> Self { - id - } - fn from(s: &str) -> Option { - if s == ROOT_STR { - Some(ROOT) - } else { - None - } - } -} - -impl Importable for ElemId { - fn wrap(id: OpId) -> Self { - ElemId(id) - } - fn from(s: &str) -> Option { - if s == HEAD_STR { - Some(HEAD) - } else { - None - } - } -} - impl From for ObjId { fn from(o: OpId) -> Self { ObjId(o) @@ -352,11 +308,17 @@ impl Key { } #[derive(Debug, Clone, PartialOrd, Ord, Eq, PartialEq, Copy, Hash, Default)] -pub struct OpId(pub u64, pub usize); +pub(crate) struct OpId(pub u64, pub usize); #[derive(Debug, Clone, Copy, PartialOrd, Eq, PartialEq, Ord, Hash, Default)] pub(crate) struct ObjId(pub OpId); +impl ObjId { + pub fn root() -> Self { + ObjId(OpId(0, 0)) + } +} + #[derive(Debug, Clone, Copy, PartialOrd, Eq, PartialEq, Ord, Hash, Default)] pub(crate) struct ElemId(pub OpId); @@ -374,7 +336,11 @@ pub(crate) struct Op { impl Op { pub fn is_del(&self) -> bool { - matches!(self.action, OpType::Del) + matches!(&self.action, OpType::Del) + } + + pub fn is_noop(&self, action: &OpType) -> bool { + matches!((&self.action, action), (OpType::Set(n), OpType::Set(m)) if n == m) } pub fn overwrites(&self, other: &Op) -> bool { @@ -389,6 +355,14 @@ impl Op { } } + pub fn value(&self) -> Value { + match &self.action { + OpType::Make(obj_type) => Value::Object(*obj_type), + OpType::Set(scalar) => Value::Scalar(scalar.clone()), + _ => panic!("cant convert op into a value - {:?}", self), + } + } + #[allow(dead_code)] pub fn dump(&self) -> String { match &self.action { diff --git a/automerge/src/value.rs b/automerge/src/value.rs index 333c1f53..e5af0cb6 100644 --- a/automerge/src/value.rs +++ b/automerge/src/value.rs @@ -1,4 +1,5 @@ -use crate::{error, ObjType, Op, OpId, OpType}; +use crate::error; +use crate::types::{ObjType, Op, OpId, OpType}; use serde::{Deserialize, Serialize}; use smol_str::SmolStr; use std::convert::TryFrom; diff --git a/automerge/src/visualisation.rs b/automerge/src/visualisation.rs index 11233d50..81f52470 100644 --- a/automerge/src/visualisation.rs +++ b/automerge/src/visualisation.rs @@ -24,7 +24,7 @@ pub(crate) struct Node<'a, const B: usize> { #[derive(Clone)] pub(crate) enum NodeType<'a, const B: usize> { - ObjRoot(crate::ObjId), + ObjRoot(crate::types::ObjId), ObjTreeNode(&'a crate::op_tree::OpTreeNode), } @@ -225,7 +225,7 @@ impl OpTableRow { impl OpTableRow { fn create( - op: &super::Op, + op: &super::types::Op, metadata: &crate::op_set::OpSetMetadata, actor_shorthands: &HashMap, ) -> Self { @@ -236,8 +236,8 @@ impl OpTableRow { crate::OpType::Inc(v) => format!("inc {}", v), }; let prop = match op.key { - crate::Key::Map(k) => metadata.props[k].clone(), - crate::Key::Seq(e) => print_opid(&e.0, actor_shorthands), + crate::types::Key::Map(k) => metadata.props[k].clone(), + crate::types::Key::Seq(e) => print_opid(&e.0, actor_shorthands), }; let succ = op .succ @@ -254,6 +254,6 @@ impl OpTableRow { } } -fn print_opid(opid: &crate::OpId, actor_shorthands: &HashMap) -> String { +fn print_opid(opid: &crate::types::OpId, actor_shorthands: &HashMap) -> String { format!("{}@{}", opid.counter(), actor_shorthands[&opid.actor()]) } diff --git a/automerge/tests/helpers/mod.rs b/automerge/tests/helpers/mod.rs index d93a211b..ec4beb0f 100644 --- a/automerge/tests/helpers/mod.rs +++ b/automerge/tests/helpers/mod.rs @@ -1,4 +1,8 @@ -use std::{collections::HashMap, convert::TryInto, hash::Hash}; +use std::{ + collections::{BTreeMap, BTreeSet}, + convert::TryInto, + hash::Hash, +}; use serde::ser::{SerializeMap, SerializeSeq}; @@ -42,7 +46,7 @@ pub fn sorted_actors() -> (automerge::ActorId, automerge::ActorId) { /// map!{ /// "todos" => { /// todos => list![ -/// { todo => map!{ title = "water plants" } } +/// { map!{ title = "water plants" } } /// ] /// } /// } @@ -50,9 +54,9 @@ pub fn sorted_actors() -> (automerge::ActorId, automerge::ActorId) { /// /// ``` /// -/// This might look more complicated than you were expecting. Why are there OpIds (`todos`, `todo`, -/// `title`) in there? Well the `RealizedObject` contains all the changes in the document tagged by -/// OpId. This makes it easy to test for conflicts: +/// This might look more complicated than you were expecting. Why is the first element in the list +/// wrapped in braces? Because every property in an automerge document can have multiple +/// conflicting values we must capture all of these. /// /// ```rust /// let mut doc1 = automerge::Automerge::new(); @@ -70,33 +74,20 @@ pub fn sorted_actors() -> (automerge::ActorId, automerge::ActorId) { /// } /// ); /// ``` -/// -/// ## Translating OpIds -/// -/// One thing you may have noticed in the example above is the `op2.translate(&doc2)` call. What is -/// that doing there? Well, the problem is that automerge OpIDs (in the current API) are specific -/// to a document. Using an opid from one document in a different document will not work. Therefore -/// this module defines an `OpIdExt` trait with a `translate` method on it. This method takes a -/// document and converts the opid into something which knows how to be compared with opids from -/// another document by using the document you pass to `translate`. Again, all you really need to -/// know is that when constructing a document for comparison you should call `translate(fromdoc)` -/// on opids which come from a document other than the one you pass to `assert_doc`. #[macro_export] macro_rules! assert_doc { ($doc: expr, $expected: expr) => {{ - use $crate::helpers::{realize, ExportableOpId}; + use $crate::helpers::realize; let realized = realize($doc); - let to_export: RealizedObject> = $expected.into(); - let exported = to_export.export($doc); - if realized != exported { + let expected_obj = $expected.into(); + if realized != expected_obj { let serde_right = serde_json::to_string_pretty(&realized).unwrap(); - let serde_left = serde_json::to_string_pretty(&exported).unwrap(); + let serde_left = serde_json::to_string_pretty(&expected_obj).unwrap(); panic!( "documents didn't match\n expected\n{}\n got\n{}", &serde_left, &serde_right ); } - pretty_assertions::assert_eq!(realized, exported); }}; } @@ -105,63 +96,52 @@ macro_rules! assert_doc { #[macro_export] macro_rules! assert_obj { ($doc: expr, $obj_id: expr, $prop: expr, $expected: expr) => {{ - use $crate::helpers::{realize_prop, ExportableOpId}; + use $crate::helpers::realize_prop; let realized = realize_prop($doc, $obj_id, $prop); - let to_export: RealizedObject> = $expected.into(); - let exported = to_export.export($doc); - if realized != exported { + let expected_obj = $expected.into(); + if realized != expected_obj { let serde_right = serde_json::to_string_pretty(&realized).unwrap(); - let serde_left = serde_json::to_string_pretty(&exported).unwrap(); + let serde_left = serde_json::to_string_pretty(&expected_obj).unwrap(); panic!( "documents didn't match\n expected\n{}\n got\n{}", &serde_left, &serde_right ); } - pretty_assertions::assert_eq!(realized, exported); }}; } /// Construct `RealizedObject::Map`. This macro takes a nested set of curl braces. The outer set is -/// the keys of the map, the inner set is the opid tagged values: +/// the keys of the map, the inner set is the set of values for that key: /// /// ``` /// map!{ /// "key" => { -/// opid1 => "value1", -/// opid2 => "value2", +/// "value1", +/// "value2", /// } /// } /// ``` /// /// The map above would represent a map with a conflict on the "key" property. The values can be -/// anything which implements `Into>`. Including nested calls to -/// `map!` or `list!`. +/// anything which implements `Into`. Including nested calls to `map!` or `list!`. #[macro_export] macro_rules! map { - (@single $($x:tt)*) => (()); - (@count $($rest:expr),*) => (<[()]>::len(&[$(map!(@single $rest)),*])); - - (@inner { $($opid:expr => $value:expr,)+ }) => { map!(@inner { $($opid => $value),+ }) }; - (@inner { $($opid:expr => $value:expr),* }) => { + (@inner { $($value:expr,)+ }) => { map!(@inner { $($value),+ }) }; + (@inner { $($value:expr),* }) => { { - use std::collections::HashMap; - let mut inner: HashMap, RealizedObject>> = HashMap::new(); + use std::collections::BTreeSet; + let mut inner: BTreeSet = BTreeSet::new(); $( - let _ = inner.insert($opid.into(), $value.into()); + let _ = inner.insert($value.into()); )* inner } }; - //(&inner $map:expr, $opid:expr => $value:expr, $($tail:tt),*) => { - //$map.insert($opid.into(), $value.into()); - //} ($($key:expr => $inner:tt,)+) => { map!($($key => $inner),+) }; ($($key:expr => $inner:tt),*) => { { - use std::collections::HashMap; - use crate::helpers::ExportableOpId; - let _cap = map!(@count $($key),*); - let mut _map: HashMap, RealizedObject>>> = ::std::collections::HashMap::with_capacity(_cap); + use std::collections::{BTreeMap, BTreeSet}; + let mut _map: BTreeMap> = ::std::collections::BTreeMap::new(); $( let inner = map!(@inner $inner); let _ = _map.insert($key.to_string(), inner); @@ -171,32 +151,32 @@ macro_rules! map { } } -/// Construct `RealizedObject::Sequence`. This macro represents a sequence of opid tagged values +/// Construct `RealizedObject::Sequence`. This macro represents a sequence of values /// /// ``` /// list![ /// { -/// opid1 => "value1", -/// opid2 => "value2", +/// "value1", +/// "value2", /// } /// ] /// ``` /// /// The list above would represent a list with a conflict on the 0 index. The values can be -/// anything which implements `Into>` including nested calls to +/// anything which implements `Into` including nested calls to /// `map!` or `list!`. #[macro_export] macro_rules! list { (@single $($x:tt)*) => (()); (@count $($rest:tt),*) => (<[()]>::len(&[$(list!(@single $rest)),*])); - (@inner { $($opid:expr => $value:expr,)+ }) => { list!(@inner { $($opid => $value),+ }) }; - (@inner { $($opid:expr => $value:expr),* }) => { + (@inner { $($value:expr,)+ }) => { list!(@inner { $($value),+ }) }; + (@inner { $($value:expr),* }) => { { - use std::collections::HashMap; - let mut inner: HashMap, RealizedObject>> = HashMap::new(); + use std::collections::BTreeSet; + let mut inner: BTreeSet = BTreeSet::new(); $( - let _ = inner.insert($opid.into(), $value.into()); + let _ = inner.insert($value.into()); )* inner } @@ -204,9 +184,8 @@ macro_rules! list { ($($inner:tt,)+) => { list!($($inner),+) }; ($($inner:tt),*) => { { - use crate::helpers::ExportableOpId; let _cap = list!(@count $($inner),*); - let mut _list: Vec, RealizedObject>>> = Vec::new(); + let mut _list: Vec> = Vec::new(); $( //println!("{}", stringify!($inner)); let inner = list!(@inner $inner); @@ -217,26 +196,6 @@ macro_rules! list { } } -/// Translate an op ID produced by one document to an op ID which can be understood by -/// another -/// -/// The current API of automerge exposes OpIds of the form (u64, usize) where the first component -/// is the counter of an actors lamport timestamp and the second component is the index into an -/// array of actor IDs stored by the document where the opid was generated. Obviously this is not -/// portable between documents as the index of the actor array is unlikely to match between two -/// documents. This function translates between the two representations. -/// -/// At some point we will probably change the API to not be document specific but this function -/// allows us to write tests first. -pub fn translate_obj_id( - from: &automerge::Automerge, - to: &automerge::Automerge, - id: automerge::OpId, -) -> automerge::OpId { - let exported = from.export(id); - to.import(&exported).unwrap() -} - pub fn mk_counter(value: i64) -> automerge::ScalarValue { automerge::ScalarValue::Counter(value) } @@ -252,14 +211,72 @@ impl std::fmt::Display for ExportedOpId { /// A `RealizedObject` is a representation of all the current values in a document - including /// conflicts. -#[derive(PartialEq, Debug)] -pub enum RealizedObject { - Map(HashMap>>), - Sequence(Vec>>), - Value(automerge::ScalarValue), +#[derive(PartialEq, PartialOrd, Ord, Eq, Hash, Debug)] +pub enum RealizedObject { + Map(BTreeMap>), + Sequence(Vec>), + Value(OrdScalarValue), } -impl serde::Serialize for RealizedObject { +// A copy of automerge::ScalarValue which uses decorum::Total for floating point values. This makes the type +// orderable, which is useful when we want to compare conflicting values of a register in an +// automerge document. +#[derive(PartialEq, Eq, PartialOrd, Ord, Debug, Hash)] +pub enum OrdScalarValue { + Bytes(Vec), + Str(smol_str::SmolStr), + Int(i64), + Uint(u64), + F64(decorum::Total), + Counter(i64), + Timestamp(i64), + Boolean(bool), + Null, +} + +impl From for OrdScalarValue { + fn from(v: automerge::ScalarValue) -> Self { + match v { + automerge::ScalarValue::Bytes(v) => OrdScalarValue::Bytes(v), + automerge::ScalarValue::Str(v) => OrdScalarValue::Str(v), + automerge::ScalarValue::Int(v) => OrdScalarValue::Int(v), + automerge::ScalarValue::Uint(v) => OrdScalarValue::Uint(v), + automerge::ScalarValue::F64(v) => OrdScalarValue::F64(decorum::Total::from(v)), + automerge::ScalarValue::Counter(v) => OrdScalarValue::Counter(v), + automerge::ScalarValue::Timestamp(v) => OrdScalarValue::Timestamp(v), + automerge::ScalarValue::Boolean(v) => OrdScalarValue::Boolean(v), + automerge::ScalarValue::Null => OrdScalarValue::Null, + } + } +} + +impl From<&OrdScalarValue> for automerge::ScalarValue { + fn from(v: &OrdScalarValue) -> Self { + match v { + OrdScalarValue::Bytes(v) => automerge::ScalarValue::Bytes(v.clone()), + OrdScalarValue::Str(v) => automerge::ScalarValue::Str(v.clone()), + OrdScalarValue::Int(v) => automerge::ScalarValue::Int(*v), + OrdScalarValue::Uint(v) => automerge::ScalarValue::Uint(*v), + OrdScalarValue::F64(v) => automerge::ScalarValue::F64(v.into_inner()), + OrdScalarValue::Counter(v) => automerge::ScalarValue::Counter(*v), + OrdScalarValue::Timestamp(v) => automerge::ScalarValue::Timestamp(*v), + OrdScalarValue::Boolean(v) => automerge::ScalarValue::Boolean(*v), + OrdScalarValue::Null => automerge::ScalarValue::Null, + } + } +} + +impl serde::Serialize for OrdScalarValue { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + let s = automerge::ScalarValue::from(self); + s.serialize(serializer) + } +} + +impl serde::Serialize for RealizedObject { fn serialize(&self, serializer: S) -> Result where S: serde::Serializer, @@ -267,23 +284,17 @@ impl serde::Serialize for RealizedObject { match self { Self::Map(kvs) => { let mut map_ser = serializer.serialize_map(Some(kvs.len()))?; - for (k, kvs) in kvs { - let kvs_serded = kvs - .iter() - .map(|(opid, value)| (opid.to_string(), value)) - .collect::>>(); - map_ser.serialize_entry(k, &kvs_serded)?; + for (k, vs) in kvs { + let vs_serded = vs.iter().collect::>(); + map_ser.serialize_entry(k, &vs_serded)?; } map_ser.end() } Self::Sequence(elems) => { let mut list_ser = serializer.serialize_seq(Some(elems.len()))?; for elem in elems { - let kvs_serded = elem - .iter() - .map(|(opid, value)| (opid.to_string(), value)) - .collect::>>(); - list_ser.serialize_element(&kvs_serded)?; + let vs_serded = elem.iter().collect::>(); + list_ser.serialize_element(&vs_serded)?; } list_ser.end() } @@ -292,30 +303,30 @@ impl serde::Serialize for RealizedObject { } } -pub fn realize(doc: &automerge::Automerge) -> RealizedObject { - realize_obj(doc, automerge::ROOT, automerge::ObjType::Map) +pub fn realize(doc: &automerge::Automerge) -> RealizedObject { + realize_obj(doc, &automerge::ROOT, automerge::ObjType::Map) } pub fn realize_prop>( doc: &automerge::Automerge, - obj_id: automerge::OpId, + obj_id: &automerge::ObjId, prop: P, -) -> RealizedObject { +) -> RealizedObject { let (val, obj_id) = doc.value(obj_id, prop).unwrap().unwrap(); match val { - automerge::Value::Object(obj_type) => realize_obj(doc, obj_id, obj_type), - automerge::Value::Scalar(v) => RealizedObject::Value(v), + automerge::Value::Object(obj_type) => realize_obj(doc, &obj_id, obj_type), + automerge::Value::Scalar(v) => RealizedObject::Value(OrdScalarValue::from(v)), } } pub fn realize_obj( doc: &automerge::Automerge, - obj_id: automerge::OpId, + obj_id: &automerge::ObjId, objtype: automerge::ObjType, -) -> RealizedObject { +) -> RealizedObject { match objtype { automerge::ObjType::Map | automerge::ObjType::Table => { - let mut result = HashMap::new(); + let mut result = BTreeMap::new(); for key in doc.keys(obj_id) { result.insert(key.clone(), realize_values(doc, obj_id, key)); } @@ -334,166 +345,63 @@ pub fn realize_obj( fn realize_values>( doc: &automerge::Automerge, - obj_id: automerge::OpId, + obj_id: &automerge::ObjId, key: K, -) -> HashMap> { - let mut values_by_opid = HashMap::new(); - for (value, opid) in doc.values(obj_id, key).unwrap() { +) -> BTreeSet { + let mut values = BTreeSet::new(); + for (value, objid) in doc.values(obj_id, key).unwrap() { let realized = match value { - automerge::Value::Object(objtype) => realize_obj(doc, opid, objtype), - automerge::Value::Scalar(v) => RealizedObject::Value(v), + automerge::Value::Object(objtype) => realize_obj(doc, &objid, objtype), + automerge::Value::Scalar(v) => RealizedObject::Value(OrdScalarValue::from(v)), }; - let exported_opid = ExportedOpId(doc.export(opid)); - values_by_opid.insert(exported_opid, realized); + values.insert(realized); } - values_by_opid + values } -impl<'a> RealizedObject> { - pub fn export(self, doc: &automerge::Automerge) -> RealizedObject { - match self { - Self::Map(kvs) => RealizedObject::Map( - kvs.into_iter() - .map(|(k, v)| { - ( - k, - v.into_iter() - .map(|(k, v)| (k.export(doc), v.export(doc))) - .collect(), - ) - }) - .collect(), - ), - Self::Sequence(values) => RealizedObject::Sequence( - values - .into_iter() - .map(|v| { - v.into_iter() - .map(|(k, v)| (k.export(doc), v.export(doc))) - .collect() - }) - .collect(), - ), - Self::Value(v) => RealizedObject::Value(v), - } - } -} - -impl<'a, O: Into>, I: Into>>> - From>> for RealizedObject> -{ - fn from(values: HashMap<&str, HashMap>) -> Self { +impl> From>> for RealizedObject { + fn from(values: BTreeMap<&str, BTreeSet>) -> Self { let intoed = values .into_iter() - .map(|(k, v)| { - ( - k.to_string(), - v.into_iter().map(|(k, v)| (k.into(), v.into())).collect(), - ) - }) + .map(|(k, v)| (k.to_string(), v.into_iter().map(|v| v.into()).collect())) .collect(); RealizedObject::Map(intoed) } } -impl<'a, O: Into>, I: Into>>> - From>> for RealizedObject> -{ - fn from(values: Vec>) -> Self { +impl> From>> for RealizedObject { + fn from(values: Vec>) -> Self { RealizedObject::Sequence( values .into_iter() - .map(|v| v.into_iter().map(|(k, v)| (k.into(), v.into())).collect()) + .map(|v| v.into_iter().map(|v| v.into()).collect()) .collect(), ) } } -impl From for RealizedObject> { +impl From for RealizedObject { fn from(b: bool) -> Self { - RealizedObject::Value(b.into()) + RealizedObject::Value(OrdScalarValue::Boolean(b)) } } -impl From for RealizedObject> { +impl From for RealizedObject { fn from(u: usize) -> Self { let v = u.try_into().unwrap(); - RealizedObject::Value(automerge::ScalarValue::Int(v)) + RealizedObject::Value(OrdScalarValue::Int(v)) } } -impl From for RealizedObject> { +impl From for RealizedObject { fn from(s: automerge::ScalarValue) -> Self { - RealizedObject::Value(s) + RealizedObject::Value(OrdScalarValue::from(s)) } } -impl From<&str> for RealizedObject> { +impl From<&str> for RealizedObject { fn from(s: &str) -> Self { - RealizedObject::Value(automerge::ScalarValue::Str(s.into())) - } -} - -#[derive(Eq, PartialEq, Hash)] -pub enum ExportableOpId<'a> { - Native(automerge::OpId), - Translate(Translate<'a>), -} - -impl<'a> ExportableOpId<'a> { - fn export(self, doc: &automerge::Automerge) -> ExportedOpId { - let oid = match self { - Self::Native(oid) => oid, - Self::Translate(Translate { from, opid }) => translate_obj_id(from, doc, opid), - }; - ExportedOpId(doc.export(oid)) - } -} - -pub struct Translate<'a> { - from: &'a automerge::Automerge, - opid: automerge::OpId, -} - -impl<'a> PartialEq for Translate<'a> { - fn eq(&self, other: &Self) -> bool { - self.from.maybe_get_actor().unwrap() == other.from.maybe_get_actor().unwrap() - && self.opid == other.opid - } -} - -impl<'a> Eq for Translate<'a> {} - -impl<'a> Hash for Translate<'a> { - fn hash(&self, state: &mut H) { - self.from.maybe_get_actor().unwrap().hash(state); - self.opid.hash(state); - } -} - -pub trait OpIdExt { - fn native(self) -> ExportableOpId<'static>; - fn translate(self, doc: &automerge::Automerge) -> ExportableOpId<'_>; -} - -impl OpIdExt for automerge::OpId { - /// Use this opid directly when exporting - fn native(self) -> ExportableOpId<'static> { - ExportableOpId::Native(self) - } - - /// Translate this OpID from `doc` when exporting - fn translate(self, doc: &automerge::Automerge) -> ExportableOpId<'_> { - ExportableOpId::Translate(Translate { - from: doc, - opid: self, - }) - } -} - -impl From for ExportableOpId<'_> { - fn from(oid: automerge::OpId) -> Self { - ExportableOpId::Native(oid) + RealizedObject::Value(OrdScalarValue::Str(smol_str::SmolStr::from(s))) } } diff --git a/automerge/tests/test.rs b/automerge/tests/test.rs index 8dcc51df..2253f22b 100644 --- a/automerge/tests/test.rs +++ b/automerge/tests/test.rs @@ -4,17 +4,17 @@ mod helpers; #[allow(unused_imports)] use helpers::{ mk_counter, new_doc, new_doc_with_actor, pretty_print, realize, realize_obj, sorted_actors, - translate_obj_id, OpIdExt, RealizedObject, + RealizedObject, }; #[test] fn no_conflict_on_repeated_assignment() { let mut doc = Automerge::new(); - doc.set(automerge::ROOT, "foo", 1).unwrap(); - let op = doc.set(automerge::ROOT, "foo", 2).unwrap().unwrap(); + doc.set(&automerge::ROOT, "foo", 1).unwrap(); + doc.set(&automerge::ROOT, "foo", 2).unwrap(); assert_doc!( &doc, map! { - "foo" => { op => 2}, + "foo" => { 2 }, } ); } @@ -22,51 +22,49 @@ fn no_conflict_on_repeated_assignment() { #[test] fn no_change_on_repeated_map_set() { let mut doc = new_doc(); - doc.set(automerge::ROOT, "foo", 1).unwrap(); - assert!(doc.set(automerge::ROOT, "foo", 1).unwrap().is_none()); + doc.set(&automerge::ROOT, "foo", 1).unwrap(); + assert!(doc.set(&automerge::ROOT, "foo", 1).unwrap().is_none()); } #[test] fn no_change_on_repeated_list_set() { let mut doc = new_doc(); let list_id = doc - .set(automerge::ROOT, "list", automerge::Value::list()) + .set(&automerge::ROOT, "list", automerge::Value::list()) .unwrap() .unwrap(); - doc.insert(list_id, 0, 1).unwrap(); - doc.set(list_id, 0, 1).unwrap(); - assert!(doc.set(list_id, 0, 1).unwrap().is_none()); + doc.insert(&list_id, 0, 1).unwrap(); + doc.set(&list_id, 0, 1).unwrap(); + assert!(doc.set(&list_id, 0, 1).unwrap().is_none()); } #[test] fn no_change_on_list_insert_followed_by_set_of_same_value() { let mut doc = new_doc(); let list_id = doc - .set(automerge::ROOT, "list", automerge::Value::list()) + .set(&automerge::ROOT, "list", automerge::Value::list()) .unwrap() .unwrap(); - doc.insert(list_id, 0, 1).unwrap(); - assert!(doc.set(list_id, 0, 1).unwrap().is_none()); + doc.insert(&list_id, 0, 1).unwrap(); + assert!(doc.set(&list_id, 0, 1).unwrap().is_none()); } #[test] fn repeated_map_assignment_which_resolves_conflict_not_ignored() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); - doc1.set(automerge::ROOT, "field", 123).unwrap(); + doc1.set(&automerge::ROOT, "field", 123).unwrap(); doc2.merge(&mut doc1); - doc2.set(automerge::ROOT, "field", 456).unwrap(); - doc1.set(automerge::ROOT, "field", 789).unwrap(); + doc2.set(&automerge::ROOT, "field", 456).unwrap(); + doc1.set(&automerge::ROOT, "field", 789).unwrap(); doc1.merge(&mut doc2); - assert_eq!(doc1.values(automerge::ROOT, "field").unwrap().len(), 2); + assert_eq!(doc1.values(&automerge::ROOT, "field").unwrap().len(), 2); - let op = doc1.set(automerge::ROOT, "field", 123).unwrap().unwrap(); + doc1.set(&automerge::ROOT, "field", 123).unwrap(); assert_doc!( &doc1, map! { - "field" => { - op => 123 - } + "field" => { 123 } } ); } @@ -76,22 +74,21 @@ fn repeated_list_assignment_which_resolves_conflict_not_ignored() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); let list_id = doc1 - .set(automerge::ROOT, "list", automerge::Value::list()) + .set(&automerge::ROOT, "list", automerge::Value::list()) .unwrap() .unwrap(); - doc1.insert(list_id, 0, 123).unwrap(); + doc1.insert(&list_id, 0, 123).unwrap(); doc2.merge(&mut doc1); - let list_id_in_doc2 = translate_obj_id(&doc1, &doc2, list_id); - doc2.set(list_id_in_doc2, 0, 456).unwrap().unwrap(); + doc2.set(&list_id, 0, 456).unwrap(); doc1.merge(&mut doc2); - let doc1_op = doc1.set(list_id, 0, 789).unwrap().unwrap(); + doc1.set(&list_id, 0, 789).unwrap(); assert_doc!( &doc1, map! { "list" => { - list_id => list![ - { doc1_op => 789 }, + list![ + { 789 }, ] } } @@ -102,19 +99,19 @@ fn repeated_list_assignment_which_resolves_conflict_not_ignored() { fn list_deletion() { let mut doc = new_doc(); let list_id = doc - .set(automerge::ROOT, "list", automerge::Value::list()) + .set(&automerge::ROOT, "list", automerge::Value::list()) .unwrap() .unwrap(); - let op1 = doc.insert(list_id, 0, 123).unwrap(); - doc.insert(list_id, 1, 456).unwrap(); - let op3 = doc.insert(list_id, 2, 789).unwrap(); - doc.del(list_id, 1).unwrap(); + doc.insert(&list_id, 0, 123).unwrap(); + doc.insert(&list_id, 1, 456).unwrap(); + doc.insert(&list_id, 2, 789).unwrap(); + doc.del(&list_id, 1).unwrap(); assert_doc!( &doc, map! { - "list" => {list_id => list![ - { op1 => 123 }, - { op3 => 789 }, + "list" => { list![ + { 123 }, + { 789 }, ]} } ) @@ -124,29 +121,26 @@ fn list_deletion() { fn merge_concurrent_map_prop_updates() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); - let op1 = doc1.set(automerge::ROOT, "foo", "bar").unwrap().unwrap(); - let hello = doc2 - .set(automerge::ROOT, "hello", "world") - .unwrap() - .unwrap(); + doc1.set(&automerge::ROOT, "foo", "bar").unwrap(); + doc2.set(&automerge::ROOT, "hello", "world").unwrap(); doc1.merge(&mut doc2); assert_eq!( - doc1.value(automerge::ROOT, "foo").unwrap().unwrap().0, + doc1.value(&automerge::ROOT, "foo").unwrap().unwrap().0, "bar".into() ); assert_doc!( &doc1, map! { - "foo" => { op1 => "bar" }, - "hello" => { hello.translate(&doc2) => "world" }, + "foo" => { "bar" }, + "hello" => { "world" }, } ); doc2.merge(&mut doc1); assert_doc!( &doc2, map! { - "foo" => { op1.translate(&doc1) => "bar" }, - "hello" => { hello => "world" }, + "foo" => { "bar" }, + "hello" => { "world" }, } ); assert_eq!(realize(&doc1), realize(&doc2)); @@ -156,19 +150,17 @@ fn merge_concurrent_map_prop_updates() { fn add_concurrent_increments_of_same_property() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); - let counter_id = doc1 - .set(automerge::ROOT, "counter", mk_counter(0)) - .unwrap() + doc1.set(&automerge::ROOT, "counter", mk_counter(0)) .unwrap(); doc2.merge(&mut doc1); - doc1.inc(automerge::ROOT, "counter", 1).unwrap(); - doc2.inc(automerge::ROOT, "counter", 2).unwrap(); + doc1.inc(&automerge::ROOT, "counter", 1).unwrap(); + doc2.inc(&automerge::ROOT, "counter", 2).unwrap(); doc1.merge(&mut doc2); assert_doc!( &doc1, map! { "counter" => { - counter_id => mk_counter(3) + mk_counter(3) } } ); @@ -179,19 +171,14 @@ fn add_increments_only_to_preceeded_values() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); - // create a counter in doc1 - let doc1_counter_id = doc1 - .set(automerge::ROOT, "counter", mk_counter(0)) - .unwrap() + doc1.set(&automerge::ROOT, "counter", mk_counter(0)) .unwrap(); - doc1.inc(automerge::ROOT, "counter", 1).unwrap(); + doc1.inc(&automerge::ROOT, "counter", 1).unwrap(); // create a counter in doc2 - let doc2_counter_id = doc2 - .set(automerge::ROOT, "counter", mk_counter(0)) - .unwrap() + doc2.set(&automerge::ROOT, "counter", mk_counter(0)) .unwrap(); - doc2.inc(automerge::ROOT, "counter", 3).unwrap(); + doc2.inc(&automerge::ROOT, "counter", 3).unwrap(); // The two values should be conflicting rather than added doc1.merge(&mut doc2); @@ -200,8 +187,8 @@ fn add_increments_only_to_preceeded_values() { &doc1, map! { "counter" => { - doc1_counter_id.native() => mk_counter(1), - doc2_counter_id.translate(&doc2) => mk_counter(3), + mk_counter(1), + mk_counter(3), } } ); @@ -211,8 +198,8 @@ fn add_increments_only_to_preceeded_values() { fn concurrent_updates_of_same_field() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); - let set_one_opid = doc1.set(automerge::ROOT, "field", "one").unwrap().unwrap(); - let set_two_opid = doc2.set(automerge::ROOT, "field", "two").unwrap().unwrap(); + doc1.set(&automerge::ROOT, "field", "one").unwrap(); + doc2.set(&automerge::ROOT, "field", "two").unwrap(); doc1.merge(&mut doc2); @@ -220,8 +207,8 @@ fn concurrent_updates_of_same_field() { &doc1, map! { "field" => { - set_one_opid.native() => "one", - set_two_opid.translate(&doc2) => "two", + "one", + "two", } } ); @@ -232,14 +219,13 @@ fn concurrent_updates_of_same_list_element() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); let list_id = doc1 - .set(automerge::ROOT, "birds", automerge::Value::list()) + .set(&automerge::ROOT, "birds", automerge::Value::list()) .unwrap() .unwrap(); - doc1.insert(list_id, 0, "finch").unwrap(); + doc1.insert(&list_id, 0, "finch").unwrap(); doc2.merge(&mut doc1); - let set_one_op = doc1.set(list_id, 0, "greenfinch").unwrap().unwrap(); - let list_id_in_doc2 = translate_obj_id(&doc1, &doc2, list_id); - let set_op_two = doc2.set(list_id_in_doc2, 0, "goldfinch").unwrap().unwrap(); + doc1.set(&list_id, 0, "greenfinch").unwrap(); + doc2.set(&list_id, 0, "goldfinch").unwrap(); doc1.merge(&mut doc2); @@ -247,9 +233,9 @@ fn concurrent_updates_of_same_list_element() { &doc1, map! { "birds" => { - list_id => list![{ - set_one_op.native() => "greenfinch", - set_op_two.translate(&doc2) => "goldfinch", + list![{ + "greenfinch", + "goldfinch", }] } } @@ -261,19 +247,11 @@ fn assignment_conflicts_of_different_types() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); let mut doc3 = new_doc(); - let op_one = doc1 - .set(automerge::ROOT, "field", "string") - .unwrap() + doc1.set(&automerge::ROOT, "field", "string").unwrap(); + doc2.set(&automerge::ROOT, "field", automerge::Value::list()) .unwrap(); - let op_two = doc2 - .set(automerge::ROOT, "field", automerge::Value::list()) - .unwrap() + doc3.set(&automerge::ROOT, "field", automerge::Value::map()) .unwrap(); - let op_three = doc3 - .set(automerge::ROOT, "field", automerge::Value::map()) - .unwrap() - .unwrap(); - doc1.merge(&mut doc2); doc1.merge(&mut doc3); @@ -281,9 +259,9 @@ fn assignment_conflicts_of_different_types() { &doc1, map! { "field" => { - op_one.native() => "string", - op_two.translate(&doc2) => list!{}, - op_three.translate(&doc3) => map!{}, + "string", + list!{}, + map!{}, } } ); @@ -293,25 +271,22 @@ fn assignment_conflicts_of_different_types() { fn changes_within_conflicting_map_field() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); - let op_one = doc1 - .set(automerge::ROOT, "field", "string") - .unwrap() - .unwrap(); + doc1.set(&automerge::ROOT, "field", "string").unwrap(); let map_id = doc2 - .set(automerge::ROOT, "field", automerge::Value::map()) + .set(&automerge::ROOT, "field", automerge::Value::map()) .unwrap() .unwrap(); - let set_in_doc2 = doc2.set(map_id, "innerKey", 42).unwrap().unwrap(); + doc2.set(&map_id, "innerKey", 42).unwrap(); doc1.merge(&mut doc2); assert_doc!( &doc1, map! { "field" => { - op_one.native() => "string", - map_id.translate(&doc2) => map!{ + "string", + map!{ "innerKey" => { - set_in_doc2.translate(&doc2) => 42, + 42, } } } @@ -325,27 +300,26 @@ fn changes_within_conflicting_list_element() { let mut doc1 = new_doc_with_actor(actor1); let mut doc2 = new_doc_with_actor(actor2); let list_id = doc1 - .set(automerge::ROOT, "list", automerge::Value::list()) + .set(&automerge::ROOT, "list", automerge::Value::list()) .unwrap() .unwrap(); - doc1.insert(list_id, 0, "hello").unwrap(); + doc1.insert(&list_id, 0, "hello").unwrap(); doc2.merge(&mut doc1); let map_in_doc1 = doc1 - .set(list_id, 0, automerge::Value::map()) + .set(&list_id, 0, automerge::Value::map()) .unwrap() .unwrap(); - let set_map1 = doc1.set(map_in_doc1, "map1", true).unwrap().unwrap(); - let set_key1 = doc1.set(map_in_doc1, "key", 1).unwrap().unwrap(); + doc1.set(&map_in_doc1, "map1", true).unwrap(); + doc1.set(&map_in_doc1, "key", 1).unwrap(); - let list_id_in_doc2 = translate_obj_id(&doc1, &doc2, list_id); let map_in_doc2 = doc2 - .set(list_id_in_doc2, 0, automerge::Value::map()) + .set(&list_id, 0, automerge::Value::map()) .unwrap() .unwrap(); doc1.merge(&mut doc2); - let set_map2 = doc2.set(map_in_doc2, "map2", true).unwrap().unwrap(); - let set_key2 = doc2.set(map_in_doc2, "key", 2).unwrap().unwrap(); + doc2.set(&map_in_doc2, "map2", true).unwrap(); + doc2.set(&map_in_doc2, "key", 2).unwrap(); doc1.merge(&mut doc2); @@ -353,15 +327,15 @@ fn changes_within_conflicting_list_element() { &doc1, map! { "list" => { - list_id => list![ + list![ { - map_in_doc2.translate(&doc2) => map!{ - "map2" => { set_map2.translate(&doc2) => true }, - "key" => { set_key2.translate(&doc2) => 2 }, + map!{ + "map2" => { true }, + "key" => { 2 }, }, - map_in_doc1.native() => map!{ - "key" => { set_key1.native() => 1 }, - "map1" => { set_map1.native() => true }, + map!{ + "key" => { 1 }, + "map1" => { true }, } } ] @@ -376,22 +350,16 @@ fn concurrently_assigned_nested_maps_should_not_merge() { let mut doc2 = new_doc(); let doc1_map_id = doc1 - .set(automerge::ROOT, "config", automerge::Value::map()) - .unwrap() - .unwrap(); - let doc1_field = doc1 - .set(doc1_map_id, "background", "blue") + .set(&automerge::ROOT, "config", automerge::Value::map()) .unwrap() .unwrap(); + doc1.set(&doc1_map_id, "background", "blue").unwrap(); let doc2_map_id = doc2 - .set(automerge::ROOT, "config", automerge::Value::map()) - .unwrap() - .unwrap(); - let doc2_field = doc2 - .set(doc2_map_id, "logo_url", "logo.png") + .set(&automerge::ROOT, "config", automerge::Value::map()) .unwrap() .unwrap(); + doc2.set(&doc2_map_id, "logo_url", "logo.png").unwrap(); doc1.merge(&mut doc2); @@ -399,11 +367,11 @@ fn concurrently_assigned_nested_maps_should_not_merge() { &doc1, map! { "config" => { - doc1_map_id.native() => map!{ - "background" => {doc1_field.native() => "blue"} + map!{ + "background" => {"blue"} }, - doc2_map_id.translate(&doc2) => map!{ - "logo_url" => {doc2_field.translate(&doc2) => "logo.png"} + map!{ + "logo_url" => {"logo.png"} } } } @@ -418,16 +386,15 @@ fn concurrent_insertions_at_different_list_positions() { assert!(doc1.maybe_get_actor().unwrap() < doc2.maybe_get_actor().unwrap()); let list_id = doc1 - .set(automerge::ROOT, "list", automerge::Value::list()) + .set(&automerge::ROOT, "list", automerge::Value::list()) .unwrap() .unwrap(); - let one = doc1.insert(list_id, 0, "one").unwrap(); - let three = doc1.insert(list_id, 1, "three").unwrap(); + doc1.insert(&list_id, 0, "one").unwrap(); + doc1.insert(&list_id, 1, "three").unwrap(); doc2.merge(&mut doc1); - let two = doc1.splice(list_id, 1, 0, vec!["two".into()]).unwrap()[0]; - let list_id_in_doc2 = translate_obj_id(&doc1, &doc2, list_id); - let four = doc2.insert(list_id_in_doc2, 2, "four").unwrap(); + doc1.splice(&list_id, 1, 0, vec!["two".into()]).unwrap(); + doc2.insert(&list_id, 2, "four").unwrap(); doc1.merge(&mut doc2); @@ -435,11 +402,11 @@ fn concurrent_insertions_at_different_list_positions() { &doc1, map! { "list" => { - list_id => list![ - {one.native() => "one"}, - {two.native() => "two"}, - {three.native() => "three"}, - {four.translate(&doc2) => "four"}, + list![ + {"one"}, + {"two"}, + {"three"}, + {"four"}, ] } } @@ -454,30 +421,29 @@ fn concurrent_insertions_at_same_list_position() { assert!(doc1.maybe_get_actor().unwrap() < doc2.maybe_get_actor().unwrap()); let list_id = doc1 - .set(automerge::ROOT, "birds", automerge::Value::list()) + .set(&automerge::ROOT, "birds", automerge::Value::list()) .unwrap() .unwrap(); - let parakeet = doc1.insert(list_id, 0, "parakeet").unwrap(); + doc1.insert(&list_id, 0, "parakeet").unwrap(); doc2.merge(&mut doc1); - let list_id_in_doc2 = translate_obj_id(&doc1, &doc2, list_id); - let starling = doc1.insert(list_id, 1, "starling").unwrap(); - let chaffinch = doc2.insert(list_id_in_doc2, 1, "chaffinch").unwrap(); + doc1.insert(&list_id, 1, "starling").unwrap(); + doc2.insert(&list_id, 1, "chaffinch").unwrap(); doc1.merge(&mut doc2); assert_doc!( &doc1, map! { "birds" => { - list_id => list![ + list![ { - parakeet.native() => "parakeet", + "parakeet", }, { - starling.native() => "starling", + "starling", }, { - chaffinch.translate(&doc2) => "chaffinch", + "chaffinch", }, ] }, @@ -489,13 +455,10 @@ fn concurrent_insertions_at_same_list_position() { fn concurrent_assignment_and_deletion_of_a_map_entry() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); - doc1.set(automerge::ROOT, "bestBird", "robin").unwrap(); + doc1.set(&automerge::ROOT, "bestBird", "robin").unwrap(); doc2.merge(&mut doc1); - doc1.del(automerge::ROOT, "bestBird").unwrap(); - let set_two = doc2 - .set(automerge::ROOT, "bestBird", "magpie") - .unwrap() - .unwrap(); + doc1.del(&automerge::ROOT, "bestBird").unwrap(); + doc2.set(&automerge::ROOT, "bestBird", "magpie").unwrap(); doc1.merge(&mut doc2); @@ -503,7 +466,7 @@ fn concurrent_assignment_and_deletion_of_a_map_entry() { &doc1, map! { "bestBird" => { - set_two.translate(&doc2) => "magpie", + "magpie", } } ); @@ -514,25 +477,22 @@ fn concurrent_assignment_and_deletion_of_list_entry() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); let list_id = doc1 - .set(automerge::ROOT, "birds", automerge::Value::list()) + .set(&automerge::ROOT, "birds", automerge::Value::list()) .unwrap() .unwrap(); - let blackbird = doc1.insert(list_id, 0, "blackbird").unwrap(); - doc1.insert(list_id, 1, "thrush").unwrap(); - let goldfinch = doc1.insert(list_id, 2, "goldfinch").unwrap(); + doc1.insert(&list_id, 0, "blackbird").unwrap(); + doc1.insert(&list_id, 1, "thrush").unwrap(); + doc1.insert(&list_id, 2, "goldfinch").unwrap(); doc2.merge(&mut doc1); - - let starling = doc1.set(list_id, 1, "starling").unwrap().unwrap(); - - let list_id_in_doc2 = translate_obj_id(&doc1, &doc2, list_id); - doc2.del(list_id_in_doc2, 1).unwrap(); + doc1.set(&list_id, 1, "starling").unwrap(); + doc2.del(&list_id, 1).unwrap(); assert_doc!( &doc2, map! { - "birds" => {list_id.translate(&doc1) => list![ - { blackbird.translate(&doc1) => "blackbird"}, - { goldfinch.translate(&doc1) => "goldfinch"}, + "birds" => {list![ + {"blackbird"}, + {"goldfinch"}, ]} } ); @@ -540,10 +500,10 @@ fn concurrent_assignment_and_deletion_of_list_entry() { assert_doc!( &doc1, map! { - "birds" => {list_id => list![ - { blackbird => "blackbird" }, - { starling => "starling" }, - { goldfinch => "goldfinch" }, + "birds" => {list![ + { "blackbird" }, + { "starling" }, + { "goldfinch" }, ]} } ); @@ -553,10 +513,10 @@ fn concurrent_assignment_and_deletion_of_list_entry() { assert_doc!( &doc1, map! { - "birds" => {list_id => list![ - { blackbird => "blackbird" }, - { starling => "starling" }, - { goldfinch => "goldfinch" }, + "birds" => {list![ + { "blackbird" }, + { "starling" }, + { "goldfinch" }, ]} } ); @@ -567,31 +527,29 @@ fn insertion_after_a_deleted_list_element() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); let list_id = doc1 - .set(automerge::ROOT, "birds", automerge::Value::list()) + .set(&automerge::ROOT, "birds", automerge::Value::list()) .unwrap() .unwrap(); - let blackbird = doc1.insert(list_id, 0, "blackbird").unwrap(); - doc1.insert(list_id, 1, "thrush").unwrap(); - doc1.insert(list_id, 2, "goldfinch").unwrap(); + doc1.insert(&list_id, 0, "blackbird").unwrap(); + doc1.insert(&list_id, 1, "thrush").unwrap(); + doc1.insert(&list_id, 2, "goldfinch").unwrap(); doc2.merge(&mut doc1); - doc1.splice(list_id, 1, 2, Vec::new()).unwrap(); + doc1.splice(&list_id, 1, 2, Vec::new()).unwrap(); - let list_id_in_doc2 = translate_obj_id(&doc1, &doc2, list_id); - let starling = doc2 - .splice(list_id_in_doc2, 2, 0, vec!["starling".into()]) - .unwrap()[0]; + doc2.splice(&list_id, 2, 0, vec!["starling".into()]) + .unwrap(); doc1.merge(&mut doc2); assert_doc!( &doc1, map! { - "birds" => {list_id => list![ - { blackbird.native() => "blackbird" }, - { starling.translate(&doc2) => "starling" } + "birds" => {list![ + { "blackbird" }, + { "starling" } ]} } ); @@ -600,9 +558,9 @@ fn insertion_after_a_deleted_list_element() { assert_doc!( &doc2, map! { - "birds" => {list_id.translate(&doc1) => list![ - { blackbird.translate(&doc1) => "blackbird" }, - { starling.native() => "starling" } + "birds" => {list![ + { "blackbird" }, + { "starling" } ]} } ); @@ -613,29 +571,28 @@ fn concurrent_deletion_of_same_list_element() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); let list_id = doc1 - .set(automerge::ROOT, "birds", automerge::Value::list()) + .set(&automerge::ROOT, "birds", automerge::Value::list()) .unwrap() .unwrap(); - let albatross = doc1.insert(list_id, 0, "albatross").unwrap(); - doc1.insert(list_id, 1, "buzzard").unwrap(); - let cormorant = doc1.insert(list_id, 2, "cormorant").unwrap(); + doc1.insert(&list_id, 0, "albatross").unwrap(); + doc1.insert(&list_id, 1, "buzzard").unwrap(); + doc1.insert(&list_id, 2, "cormorant").unwrap(); doc2.merge(&mut doc1); - doc1.del(list_id, 1).unwrap(); + doc1.del(&list_id, 1).unwrap(); - let list_id_in_doc2 = translate_obj_id(&doc1, &doc2, list_id); - doc2.del(list_id_in_doc2, 1).unwrap(); + doc2.del(&list_id, 1).unwrap(); doc1.merge(&mut doc2); assert_doc!( &doc1, map! { - "birds" => {list_id => list![ - { albatross => "albatross" }, - { cormorant => "cormorant" } + "birds" => {list![ + { "albatross" }, + { "cormorant" } ]} } ); @@ -644,9 +601,9 @@ fn concurrent_deletion_of_same_list_element() { assert_doc!( &doc2, map! { - "birds" => {list_id.translate(&doc1) => list![ - { albatross.translate(&doc1) => "albatross" }, - { cormorant.translate(&doc1) => "cormorant" } + "birds" => {list![ + { "albatross" }, + { "cormorant" } ]} } ); @@ -658,48 +615,47 @@ fn concurrent_updates_at_different_levels() { let mut doc2 = new_doc(); let animals = doc1 - .set(automerge::ROOT, "animals", automerge::Value::map()) + .set(&automerge::ROOT, "animals", automerge::Value::map()) .unwrap() .unwrap(); let birds = doc1 - .set(animals, "birds", automerge::Value::map()) + .set(&animals, "birds", automerge::Value::map()) .unwrap() .unwrap(); - doc1.set(birds, "pink", "flamingo").unwrap().unwrap(); - doc1.set(birds, "black", "starling").unwrap().unwrap(); + doc1.set(&birds, "pink", "flamingo").unwrap(); + doc1.set(&birds, "black", "starling").unwrap(); let mammals = doc1 - .set(animals, "mammals", automerge::Value::list()) + .set(&animals, "mammals", automerge::Value::list()) .unwrap() .unwrap(); - let badger = doc1.insert(mammals, 0, "badger").unwrap(); + doc1.insert(&mammals, 0, "badger").unwrap(); doc2.merge(&mut doc1); - doc1.set(birds, "brown", "sparrow").unwrap().unwrap(); + doc1.set(&birds, "brown", "sparrow").unwrap(); - let animals_in_doc2 = translate_obj_id(&doc1, &doc2, animals); - doc2.del(animals_in_doc2, "birds").unwrap(); + doc2.del(&animals, "birds").unwrap(); doc1.merge(&mut doc2); assert_obj!( &doc1, - automerge::ROOT, + &automerge::ROOT, "animals", map! { "mammals" => { - mammals => list![{ badger => "badger" }], + list![{ "badger" }], } } ); assert_obj!( &doc2, - automerge::ROOT, + &automerge::ROOT, "animals", map! { "mammals" => { - mammals.translate(&doc1) => list![{ badger.translate(&doc1) => "badger" }], + list![{ "badger" }], } } ); @@ -711,21 +667,20 @@ fn concurrent_updates_of_concurrently_deleted_objects() { let mut doc2 = new_doc(); let birds = doc1 - .set(automerge::ROOT, "birds", automerge::Value::map()) + .set(&automerge::ROOT, "birds", automerge::Value::map()) .unwrap() .unwrap(); let blackbird = doc1 - .set(birds, "blackbird", automerge::Value::map()) + .set(&birds, "blackbird", automerge::Value::map()) .unwrap() .unwrap(); - doc1.set(blackbird, "feathers", "black").unwrap().unwrap(); + doc1.set(&blackbird, "feathers", "black").unwrap(); doc2.merge(&mut doc1); - doc1.del(birds, "blackbird").unwrap(); + doc1.del(&birds, "blackbird").unwrap(); - translate_obj_id(&doc1, &doc2, blackbird); - doc2.set(blackbird, "beak", "orange").unwrap(); + doc2.set(&blackbird, "beak", "orange").unwrap(); doc1.merge(&mut doc2); @@ -733,7 +688,7 @@ fn concurrent_updates_of_concurrently_deleted_objects() { &doc1, map! { "birds" => { - birds => map!{}, + map!{}, } } ); @@ -746,58 +701,55 @@ fn does_not_interleave_sequence_insertions_at_same_position() { let mut doc2 = new_doc_with_actor(actor2); let wisdom = doc1 - .set(automerge::ROOT, "wisdom", automerge::Value::list()) + .set(&automerge::ROOT, "wisdom", automerge::Value::list()) .unwrap() .unwrap(); doc2.merge(&mut doc1); - let doc1elems = doc1 - .splice( - wisdom, - 0, - 0, - vec![ - "to".into(), - "be".into(), - "is".into(), - "to".into(), - "do".into(), - ], - ) - .unwrap(); + doc1.splice( + &wisdom, + 0, + 0, + vec![ + "to".into(), + "be".into(), + "is".into(), + "to".into(), + "do".into(), + ], + ) + .unwrap(); - let wisdom_in_doc2 = translate_obj_id(&doc1, &doc2, wisdom); - let doc2elems = doc2 - .splice( - wisdom_in_doc2, - 0, - 0, - vec![ - "to".into(), - "do".into(), - "is".into(), - "to".into(), - "be".into(), - ], - ) - .unwrap(); + doc2.splice( + &wisdom, + 0, + 0, + vec![ + "to".into(), + "do".into(), + "is".into(), + "to".into(), + "be".into(), + ], + ) + .unwrap(); doc1.merge(&mut doc2); assert_doc!( &doc1, map! { - "wisdom" => {wisdom => list![ - {doc1elems[0].native() => "to"}, - {doc1elems[1].native() => "be"}, - {doc1elems[2].native() => "is"}, - {doc1elems[3].native() => "to"}, - {doc1elems[4].native() => "do"}, - {doc2elems[0].translate(&doc2) => "to"}, - {doc2elems[1].translate(&doc2) => "do"}, - {doc2elems[2].translate(&doc2) => "is"}, - {doc2elems[3].translate(&doc2) => "to"}, - {doc2elems[4].translate(&doc2) => "be"}, + "wisdom" => {list![ + {"to"}, + {"be"}, + {"is"}, + {"to"}, + {"do"}, + {"to"}, + {"do"}, + {"is"}, + {"to"}, + {"be"}, ]} } ); @@ -811,20 +763,19 @@ fn mutliple_insertions_at_same_list_position_with_insertion_by_greater_actor_id( let mut doc2 = new_doc_with_actor(actor2); let list = doc1 - .set(automerge::ROOT, "list", automerge::Value::list()) + .set(&automerge::ROOT, "list", automerge::Value::list()) .unwrap() .unwrap(); - let two = doc1.insert(list, 0, "two").unwrap(); + doc1.insert(&list, 0, "two").unwrap(); doc2.merge(&mut doc1); - let list_in_doc2 = translate_obj_id(&doc1, &doc2, list); - let one = doc2.insert(list_in_doc2, 0, "one").unwrap(); + doc2.insert(&list, 0, "one").unwrap(); assert_doc!( &doc2, map! { - "list" => { list.translate(&doc1) => list![ - { one.native() => "one" }, - { two.translate(&doc1) => "two" }, + "list" => { list![ + { "one" }, + { "two" }, ]} } ); @@ -838,20 +789,19 @@ fn mutliple_insertions_at_same_list_position_with_insertion_by_lesser_actor_id() let mut doc2 = new_doc_with_actor(actor2); let list = doc1 - .set(automerge::ROOT, "list", automerge::Value::list()) + .set(&automerge::ROOT, "list", automerge::Value::list()) .unwrap() .unwrap(); - let two = doc1.insert(list, 0, "two").unwrap(); + doc1.insert(&list, 0, "two").unwrap(); doc2.merge(&mut doc1); - let list_in_doc2 = translate_obj_id(&doc1, &doc2, list); - let one = doc2.insert(list_in_doc2, 0, "one").unwrap(); + doc2.insert(&list, 0, "one").unwrap(); assert_doc!( &doc2, map! { - "list" => { list.translate(&doc1) => list![ - { one.native() => "one" }, - { two.translate(&doc1) => "two" }, + "list" => { list![ + { "one" }, + { "two" }, ]} } ); @@ -863,26 +813,25 @@ fn insertion_consistent_with_causality() { let mut doc2 = new_doc(); let list = doc1 - .set(automerge::ROOT, "list", automerge::Value::list()) + .set(&automerge::ROOT, "list", automerge::Value::list()) .unwrap() .unwrap(); - let four = doc1.insert(list, 0, "four").unwrap(); + doc1.insert(&list, 0, "four").unwrap(); doc2.merge(&mut doc1); - let list_in_doc2 = translate_obj_id(&doc1, &doc2, list); - let three = doc2.insert(list_in_doc2, 0, "three").unwrap(); + doc2.insert(&list, 0, "three").unwrap(); doc1.merge(&mut doc2); - let two = doc1.insert(list, 0, "two").unwrap(); + doc1.insert(&list, 0, "two").unwrap(); doc2.merge(&mut doc1); - let one = doc2.insert(list_in_doc2, 0, "one").unwrap(); + doc2.insert(&list, 0, "one").unwrap(); assert_doc!( &doc2, map! { - "list" => {list.translate(&doc1) => list![ - {one.native() => "one"}, - {two.translate(&doc1) => "two"}, - {three.native() => "three" }, - {four.translate(&doc1) => "four"}, + "list" => { list![ + {"one"}, + {"two"}, + {"three" }, + {"four"}, ]} } ); @@ -900,28 +849,22 @@ fn save_and_restore_empty() { fn save_restore_complex() { let mut doc1 = new_doc(); let todos = doc1 - .set(automerge::ROOT, "todos", automerge::Value::list()) + .set(&automerge::ROOT, "todos", automerge::Value::list()) .unwrap() .unwrap(); - let first_todo = doc1.insert(todos, 0, automerge::Value::map()).unwrap(); - doc1.set(first_todo, "title", "water plants") + let first_todo = doc1 + .insert(&todos, 0, automerge::Value::map()) .unwrap() .unwrap(); - let first_done = doc1.set(first_todo, "done", false).unwrap().unwrap(); + doc1.set(&first_todo, "title", "water plants").unwrap(); + doc1.set(&first_todo, "done", false).unwrap(); let mut doc2 = new_doc(); doc2.merge(&mut doc1); - let first_todo_in_doc2 = translate_obj_id(&doc1, &doc2, first_todo); - let weed_title = doc2 - .set(first_todo_in_doc2, "title", "weed plants") - .unwrap() - .unwrap(); + doc2.set(&first_todo, "title", "weed plants").unwrap(); - let kill_title = doc1 - .set(first_todo, "title", "kill plants") - .unwrap() - .unwrap(); + doc1.set(&first_todo, "title", "kill plants").unwrap(); doc1.merge(&mut doc2); let reloaded = Automerge::load(&doc1.save().unwrap()).unwrap(); @@ -929,13 +872,13 @@ fn save_restore_complex() { assert_doc!( &reloaded, map! { - "todos" => {todos.translate(&doc1) => list![ - {first_todo.translate(&doc1) => map!{ + "todos" => {list![ + {map!{ "title" => { - weed_title.translate(&doc2) => "weed plants", - kill_title.translate(&doc1) => "kill plants", + "weed plants", + "kill plants", }, - "done" => {first_done.translate(&doc1) => false}, + "done" => {false}, }} ]} } diff --git a/edit-trace/benches/main.rs b/edit-trace/benches/main.rs index fed72f1e..197614f6 100644 --- a/edit-trace/benches/main.rs +++ b/edit-trace/benches/main.rs @@ -5,9 +5,9 @@ use std::fs; fn replay_trace(commands: Vec<(usize, usize, Vec)>) -> Automerge { let mut doc = Automerge::new(); - let text = doc.set(ROOT, "text", Value::text()).unwrap().unwrap(); + let text = doc.set(&ROOT, "text", Value::text()).unwrap().unwrap(); for (pos, del, vals) in commands { - doc.splice(text, pos, del, vals).unwrap(); + doc.splice(&text, pos, del, vals).unwrap(); } doc.commit(None, None); doc diff --git a/edit-trace/src/main.rs b/edit-trace/src/main.rs index 94fde72c..db39bcdd 100644 --- a/edit-trace/src/main.rs +++ b/edit-trace/src/main.rs @@ -19,12 +19,12 @@ fn main() -> Result<(), AutomergeError> { let mut doc = Automerge::new(); let now = Instant::now(); - let text = doc.set(ROOT, "text", Value::text()).unwrap().unwrap(); + let text = doc.set( &ROOT, "text", Value::text()).unwrap().unwrap(); for (i, (pos, del, vals)) in commands.into_iter().enumerate() { if i % 1000 == 0 { println!("Processed {} edits in {} ms", i, now.elapsed().as_millis()); } - doc.splice(text, pos, del, vals)?; + doc.splice(&text, pos, del, vals)?; } let _ = doc.save(); println!("Done in {} ms", now.elapsed().as_millis()); From dc8140cb0b6cc171d8d8b501b5a513b5afd4a0cd Mon Sep 17 00:00:00 2001 From: Alex Good Date: Sat, 1 Jan 2022 20:17:38 +0000 Subject: [PATCH 010/730] =?UTF-8?q?fmt=20=F0=9F=99=84?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- edit-trace/src/main.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/edit-trace/src/main.rs b/edit-trace/src/main.rs index db39bcdd..6c54bcad 100644 --- a/edit-trace/src/main.rs +++ b/edit-trace/src/main.rs @@ -19,7 +19,7 @@ fn main() -> Result<(), AutomergeError> { let mut doc = Automerge::new(); let now = Instant::now(); - let text = doc.set( &ROOT, "text", Value::text()).unwrap().unwrap(); + let text = doc.set(&ROOT, "text", Value::text()).unwrap().unwrap(); for (i, (pos, del, vals)) in commands.into_iter().enumerate() { if i % 1000 == 0 { println!("Processed {} edits in {} ms", i, now.elapsed().as_millis()); From 4c4484b8976652082cc9a973e8f31fe97f50b221 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Mon, 3 Jan 2022 12:58:08 -0500 Subject: [PATCH 011/730] fix bug in wasm --- automerge-wasm/src/lib.rs | 28 +++++++++++++++++----------- automerge-wasm/test/test.js | 22 ++++++++++++++++++++++ automerge/src/automerge.rs | 2 +- 3 files changed, 40 insertions(+), 12 deletions(-) diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index d0c6232d..0bab4b14 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -178,7 +178,7 @@ impl Automerge { start: JsValue, delete_count: JsValue, text: JsValue, - ) -> Result<(), JsValue> { + ) -> Result { let obj = self.import(obj)?; let start = to_usize(start, "start")?; let delete_count = to_usize(delete_count, "deleteCount")?; @@ -187,24 +187,31 @@ impl Automerge { self.0 .splice_text(&obj, start, delete_count, &t) .map_err(to_js_err)?; + Ok(JsValue::null()) } else { if let Ok(array) = text.dyn_into::() { for i in array.iter() { - if let Some(t) = i.as_string() { - vals.push(t.into()); - } else if let Ok(array) = i.dyn_into::() { + if let Ok(array) = i.clone().dyn_into::() { let value = array.get(1); let datatype = array.get(2); - let value = self.import_value(value, datatype)?; + let value = self.import_value(value, datatype.as_string())?; vals.push(value); + } else { + let value = self.import_value(i, None)?; + vals.push(value.into()); } } } - self.0 + let result = self.0 .splice(&obj, start, delete_count, vals) .map_err(to_js_err)?; + if result.len() == 0 { + Ok(JsValue::null()) + } else { + let result : Array = result.iter().map(|r| JsValue::from(r.to_string())).collect(); + Ok(result.into()) + } } - Ok(()) } pub fn insert( @@ -220,7 +227,7 @@ impl Automerge { .as_f64() .ok_or_else(|| "insert index must be a number".into()); let index = index?; - let value = self.import_value(value, datatype)?; + let value = self.import_value(value, datatype.as_string())?; let opid = self .0 .insert(&obj, index as usize, value) @@ -240,7 +247,7 @@ impl Automerge { ) -> Result { let obj = self.import(obj)?; let prop = self.import_prop(prop)?; - let value = self.import_value(value, datatype)?; + let value = self.import_value(value, datatype.as_string())?; let opid = self.0.set(&obj, prop, value).map_err(to_js_err)?; match opid { Some(opid) => Ok(self.export(opid)), @@ -467,8 +474,7 @@ impl Automerge { } } - fn import_value(&mut self, value: JsValue, datatype: JsValue) -> Result { - let datatype = datatype.as_string(); + fn import_value(&mut self, value: JsValue, datatype: Option) -> Result { match datatype.as_deref() { Some("boolean") => value .as_bool() diff --git a/automerge-wasm/test/test.js b/automerge-wasm/test/test.js index 22aebcfd..7638f146 100644 --- a/automerge-wasm/test/test.js +++ b/automerge-wasm/test/test.js @@ -280,5 +280,27 @@ describe('Automerge', () => { assert.deepEqual(doc4.save(), save); }) + it('only returns an object id when objects are created', () => { + let doc = Automerge.init("aaaa") + let r1 = doc.set("_root","foo","bar") + let r2 = doc.set("_root","list",LIST) + let r3 = doc.set("_root","counter",10, "counter") + let r4 = doc.inc("_root","counter",1) + let r5 = doc.del("_root","counter") + let r6 = doc.insert(r2,0,10); + let r7 = doc.insert(r2,0,MAP); + let r8 = doc.splice(r2,1,0,["a","b","c"]); + let r9 = doc.splice(r2,1,0,["a",LIST,MAP,"d"]); + assert.deepEqual(r1,null); + assert.deepEqual(r2,"2@aaaa"); + assert.deepEqual(r3,null); + assert.deepEqual(r4,null); + assert.deepEqual(r5,null); + assert.deepEqual(r6,null); + assert.deepEqual(r7,"7@aaaa"); + assert.deepEqual(r8,null); + assert.deepEqual(r9,["12@aaaa","13@aaaa"]); + }) + }) }) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 9b70ce9f..45cd7863 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -388,7 +388,7 @@ impl Automerge { let mut results = Vec::new(); for v in vals { // insert() - let id = self.do_insert(obj, pos, v)?; + let id = self.do_insert(obj, pos, v.clone())?; if let Some(id) = id { results.push(self.id_to_exid(id)); } From 96a8357e360f72dc9b11ea5713eeb3310de5c7c6 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Mon, 3 Jan 2022 14:59:34 -0500 Subject: [PATCH 012/730] add hasher for exid --- automerge/src/exid.rs | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/automerge/src/exid.rs b/automerge/src/exid.rs index d79b35ce..bf7e9ed5 100644 --- a/automerge/src/exid.rs +++ b/automerge/src/exid.rs @@ -1,5 +1,6 @@ use crate::ActorId; use std::fmt; +use std::hash::{Hash, Hasher}; #[derive(Debug, Clone)] pub enum ExId { @@ -31,3 +32,15 @@ impl fmt::Display for ExId { } } } + +impl Hash for ExId { + fn hash(&self, state: &mut H) { + match self { + ExId::Root => 0.hash(state), + ExId::Id(ctr, actor, _) => { + ctr.hash(state); + actor.hash(state); + } + } + } +} From ef89520d7ce0dc524fc8e91fc8bc480949da0c99 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Mon, 3 Jan 2022 14:59:46 -0500 Subject: [PATCH 013/730] more tests for wasm --- automerge-wasm/src/lib.rs | 1 + automerge-wasm/test/test.js | 45 ++++++++++++++++++++++++++++++++++++- 2 files changed, 45 insertions(+), 1 deletion(-) diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 0bab4b14..9680cd9f 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -129,6 +129,7 @@ impl Automerge { pub fn free(self) {} + #[wasm_bindgen(js_name = pendingOps)] pub fn pending_ops(&self) -> JsValue { (self.0.pending_ops() as u32).into() } diff --git a/automerge-wasm/test/test.js b/automerge-wasm/test/test.js index 7638f146..325b35ae 100644 --- a/automerge-wasm/test/test.js +++ b/automerge-wasm/test/test.js @@ -25,6 +25,7 @@ describe('Automerge', () => { it('should be able to start and commit', () => { let doc = Automerge.init() doc.commit() + doc.free() }) it('getting a nonexistant prop does not throw an error', () => { @@ -32,6 +33,7 @@ describe('Automerge', () => { let root = "_root" let result = doc.value(root,"hello") assert.deepEqual(result,[]) + doc.free() }) it('should be able to set and get a simple value', () => { @@ -72,6 +74,7 @@ describe('Automerge', () => { result = doc.value(root,"bool") assert.deepEqual(result,["boolean",false]) + doc.free() }) it('should be able to use bytes', () => { @@ -82,6 +85,7 @@ describe('Automerge', () => { assert.deepEqual(value1, ["bytes", new Uint8Array([10,11,12])]); let value2 = doc.value("_root", "data2") assert.deepEqual(value2, ["bytes", new Uint8Array([13,14,15])]); + doc.free() }) it('should be able to make sub objects', () => { @@ -91,13 +95,14 @@ describe('Automerge', () => { let submap = doc.set(root, "submap", MAP) doc.set(submap, "number", 6, "uint") - assert.strictEqual(doc.pending_ops(),2) + assert.strictEqual(doc.pendingOps(),2) result = doc.value(root,"submap") assert.deepEqual(result,["map",submap]) result = doc.value(submap,"number") assert.deepEqual(result,["uint",6]) + doc.free() }) it('should be able to make lists', () => { @@ -120,6 +125,7 @@ describe('Automerge', () => { assert.deepEqual(doc.value(submap, 2),["str","b v2"]) assert.deepEqual(doc.length(submap),4) + doc.free() }) it('should be able delete non-existant props', () => { @@ -138,6 +144,7 @@ describe('Automerge', () => { assert.deepEqual(doc.keys("_root"),["bip"]) assert.deepEqual(doc.keys("_root", heads1),["bip", "foo"]) assert.deepEqual(doc.keys("_root", heads2),["bip"]) + doc.free() }) it('should be able to del', () => { @@ -148,6 +155,7 @@ describe('Automerge', () => { assert.deepEqual(doc.value(root, "xxx"),["str","xxx"]) doc.del(root, "xxx"); assert.deepEqual(doc.value(root, "xxx"),[]) + doc.free() }) it('should be able to use counters', () => { @@ -160,6 +168,7 @@ describe('Automerge', () => { assert.deepEqual(doc.value(root, "counter"),["counter",20]) doc.inc(root, "counter", -5); assert.deepEqual(doc.value(root, "counter"),["counter",15]) + doc.free() }) it('should be able to splice text', () => { @@ -176,6 +185,7 @@ describe('Automerge', () => { assert.deepEqual(doc.value(text, 10),["str","d"]) assert.deepEqual(doc.value(text, 11),["str","!"]) assert.deepEqual(doc.value(text, 12),["str","?"]) + doc.free() }) it('should be able save all or incrementally', () => { @@ -208,6 +218,10 @@ describe('Automerge', () => { assert.deepEqual(docA.keys("_root"), docB.keys("_root")); assert.deepEqual(docA.save(), docB.save()); assert.deepEqual(docA.save(), docC.save()); + doc.free() + docA.free() + docB.free() + docC.free() }) it('should be able to splice text', () => { @@ -223,6 +237,7 @@ describe('Automerge', () => { assert.strictEqual(doc.length(text, heads1), 11) assert.strictEqual(doc.text(text, heads2), "hello big bad world") assert.strictEqual(doc.length(text, heads2), 19) + doc.free() }) it('local inc increments all visible counters in a map', () => { @@ -250,6 +265,10 @@ describe('Automerge', () => { let save1 = doc1.save() let doc4 = Automerge.load(save1) assert.deepEqual(doc4.save(), save1); + doc1.free() + doc2.free() + doc3.free() + doc4.free() }) it('local inc increments all visible counters in a sequence', () => { @@ -278,6 +297,10 @@ describe('Automerge', () => { let save = doc1.save() let doc4 = Automerge.load(save) assert.deepEqual(doc4.save(), save); + doc1.free() + doc2.free() + doc3.free() + doc4.free() }) it('only returns an object id when objects are created', () => { @@ -300,6 +323,26 @@ describe('Automerge', () => { assert.deepEqual(r7,"7@aaaa"); assert.deepEqual(r8,null); assert.deepEqual(r9,["12@aaaa","13@aaaa"]); + doc.free() + }) + + it('objects without properties are preserved', () => { + let doc1 = Automerge.init("aaaa") + let a = doc1.set("_root","a",MAP); + let b = doc1.set("_root","b",MAP); + let c = doc1.set("_root","c",MAP); + let d = doc1.set(c,"d","dd"); + let saved = doc1.save(); + let doc2 = Automerge.load(saved); + assert.deepEqual(doc2.value("_root","a"),["map",a]) + assert.deepEqual(doc2.keys(a),[]) + assert.deepEqual(doc2.value("_root","b"),["map",b]) + assert.deepEqual(doc2.keys(b),[]) + assert.deepEqual(doc2.value("_root","c"),["map",c]) + assert.deepEqual(doc2.keys(c),["d"]) + assert.deepEqual(doc2.value(c,"d"),["str","dd"]) + doc1.free() + doc2.free() }) }) From 1f0a1e40714d19091a051c86c0b25306193bff93 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Sun, 2 Jan 2022 20:37:13 +0000 Subject: [PATCH 014/730] Correctly sort actor IDs when encoding changes This is a port of a fix previously merged into `main`. The javascript implementation of automerge sorts actor IDs lexicographically when encoding changes. We were sorting actor IDs in the order the appear in the change we're encoding. This meant that the index that we assigned to operations in the encoded change was different to that which the javascript implementation assigns, resulting in mismatched head errors as the hashes we created did not match the javascript implementation. This change fixes the issue by sorting actor IDs lexicographically. We make a pass over the operations in the change before encoding to collect the actor IDs and sort them. This means we no longer need to pass a mutable `Vec` to the various encode functions, which cleans things up a little. --- automerge/src/change.rs | 79 +++++++++++++++++++++++++++++++++++++-- automerge/src/columnar.rs | 40 +++++++------------- automerge/src/encoding.rs | 6 +-- 3 files changed, 91 insertions(+), 34 deletions(-) diff --git a/automerge/src/change.rs b/automerge/src/change.rs index 846cc71d..42576e7f 100644 --- a/automerge/src/change.rs +++ b/automerge/src/change.rs @@ -113,6 +113,42 @@ pub(crate) fn encode_document( Ok(bytes) } +/// When encoding a change we take all the actor IDs referenced by a change and place them in an +/// array. The array has the actor who authored the change as the first element and all remaining +/// actors (i.e. those referenced in object IDs in the target of an operation or in the `pred` of +/// an operation) lexicographically ordered following the change author. +fn actor_ids_in_change(change: &::Change) -> Vec { + let mut other_ids: Vec<&::ActorId> = change + .operations + .iter() + .flat_map(opids_in_operation) + .filter(|a| *a != &change.actor_id) + .unique() + .collect(); + other_ids.sort(); + // Now prepend the change actor + std::iter::once(&change.actor_id) + .chain(other_ids.into_iter()) + .cloned() + .collect() +} + +fn opids_in_operation(op: &::Op) -> impl Iterator { + let obj_actor_id = match &op.obj { + amp::ObjectId::Root => None, + amp::ObjectId::Id(opid) => Some(opid.actor()), + }; + let pred_ids = op.pred.iter().map(amp::OpId::actor); + let key_actor = match &op.key { + amp::Key::Seq(amp::ElementId::Id(i)) => Some(i.actor()), + _ => None, + }; + obj_actor_id + .into_iter() + .chain(key_actor.into_iter()) + .chain(pred_ids) +} + impl From for Change { fn from(value: amp::Change) -> Self { encode(&value) @@ -200,8 +236,7 @@ fn encode_chunk(change: &::Change, deps: &[amp::ChangeHash]) -> ChunkIntermed bytes.write_all(&hash.0).unwrap(); } - // encode first actor - let mut actors = vec![change.actor_id.clone()]; + let actors = actor_ids_in_change(change); change.actor_id.to_bytes().encode(&mut bytes).unwrap(); // encode seq, start_op, time, message @@ -213,7 +248,7 @@ fn encode_chunk(change: &::Change, deps: &[amp::ChangeHash]) -> ChunkIntermed let message = message..bytes.len(); // encode ops into a side buffer - collect all other actors - let (ops_buf, mut ops) = ColumnEncoder::encode_ops(&change.operations, &mut actors); + let (ops_buf, mut ops) = ColumnEncoder::encode_ops(&change.operations, &actors); // encode all other actors actors[1..].encode(&mut bytes).unwrap(); @@ -916,3 +951,41 @@ fn pred_into( pred.map(|(ctr, actor)| amp::OpId(ctr, actors[actor].clone())) .collect() } + +#[cfg(test)] +mod tests { + use crate::legacy as amp; + #[test] + fn mismatched_head_repro_one() { + let op_json = serde_json::json!({ + "ops": [ + { + "action": "del", + "obj": "1@1485eebc689d47efbf8b892e81653eb3", + "elemId": "3164@0dcdf83d9594477199f80ccd25e87053", + "pred": [ + "3164@0dcdf83d9594477199f80ccd25e87053" + ], + "insert": false + }, + ], + "actor": "e63cf5ed1f0a4fb28b2c5bc6793b9272", + "hash": "e7fd5c02c8fdd2cdc3071ce898a5839bf36229678af3b940f347da541d147ae2", + "seq": 1, + "startOp": 3179, + "time": 1634146652, + "message": null, + "deps": [ + "2603cded00f91e525507fc9e030e77f9253b239d90264ee343753efa99e3fec1" + ] + }); + + let change: amp::Change = serde_json::from_value(op_json).unwrap(); + let expected_hash: super::amp::ChangeHash = + "4dff4665d658a28bb6dcace8764eb35fa8e48e0a255e70b6b8cbf8e8456e5c50" + .parse() + .unwrap(); + let encoded: super::Change = change.into(); + assert_eq!(encoded.hash, expected_hash); + } +} diff --git a/automerge/src/columnar.rs b/automerge/src/columnar.rs index c821b9bb..c9945945 100644 --- a/automerge/src/columnar.rs +++ b/automerge/src/columnar.rs @@ -41,22 +41,13 @@ impl Encodable for [ActorId] { } } -fn map_actor(actor: &ActorId, actors: &mut Vec) -> usize { - if let Some(pos) = actors.iter().position(|a| a == actor) { - pos - } else { - actors.push(actor.clone()); - actors.len() - 1 - } +fn actor_index(actor: &ActorId, actors: &[ActorId]) -> usize { + actors.iter().position(|a| a == actor).unwrap() } impl Encodable for ActorId { - fn encode_with_actors( - &self, - buf: &mut R, - actors: &mut Vec, - ) -> io::Result { - map_actor(self, actors).encode(buf) + fn encode_with_actors(&self, buf: &mut R, actors: &[ActorId]) -> io::Result { + actor_index(self, actors).encode(buf) } fn encode(&self, _buf: &mut R) -> io::Result { @@ -601,7 +592,7 @@ impl ValEncoder { } } - fn append_value2(&mut self, val: &ScalarValue, actors: &mut Vec) { + fn append_value2(&mut self, val: &ScalarValue, actors: &[ActorId]) { // It may seem weird to have two consecutive matches on the same value. The reason is so // that we don't have to repeat the `append_null` calls on ref_actor and ref_counter in // every arm of the next match @@ -725,7 +716,7 @@ impl KeyEncoderOld { } } - fn append(&mut self, key: amp::Key, actors: &mut Vec) { + fn append(&mut self, key: amp::Key, actors: &[ActorId]) { match key { amp::Key::Map(s) => { self.actor.append_null(); @@ -738,7 +729,7 @@ impl KeyEncoderOld { self.str.append_null(); } amp::Key::Seq(amp::ElementId::Id(amp::OpId(ctr, actor))) => { - self.actor.append_value(map_actor(&actor, actors)); + self.actor.append_value(actor_index(&actor, actors)); self.ctr.append_value(ctr); self.str.append_null(); } @@ -811,11 +802,11 @@ impl PredEncoder { } } - fn append(&mut self, pred: &SortedVec, actors: &mut Vec) { + fn append(&mut self, pred: &SortedVec, actors: &[ActorId]) { self.num.append_value(pred.len()); for p in pred.iter() { self.ctr.append_value(p.0); - self.actor.append_value(map_actor(&p.1, actors)); + self.actor.append_value(actor_index(&p.1, actors)); } } @@ -879,14 +870,14 @@ impl ObjEncoderOld { } } - fn append(&mut self, obj: &::ObjectId, actors: &mut Vec) { + fn append(&mut self, obj: &::ObjectId, actors: &[ActorId]) { match obj { amp::ObjectId::Root => { self.actor.append_null(); self.ctr.append_null(); } amp::ObjectId::Id(amp::OpId(ctr, actor)) => { - self.actor.append_value(map_actor(actor, actors)); + self.actor.append_value(actor_index(actor, actors)); self.ctr.append_value(*ctr); } } @@ -1131,10 +1122,7 @@ pub(crate) struct ColumnEncoder { } impl ColumnEncoder { - pub fn encode_ops<'a, I>( - ops: I, - actors: &'a mut Vec, - ) -> (Vec, HashMap>) + pub fn encode_ops<'a, I>(ops: I, actors: &[ActorId]) -> (Vec, HashMap>) where I: IntoIterator, { @@ -1154,7 +1142,7 @@ impl ColumnEncoder { } } - fn encode<'a, 'b, I>(&'a mut self, ops: I, actors: &'b mut Vec) + fn encode<'a, 'b, I>(&'a mut self, ops: I, actors: &[ActorId]) where I: IntoIterator, { @@ -1163,7 +1151,7 @@ impl ColumnEncoder { } } - fn append(&mut self, op: &::Op, actors: &mut Vec) { + fn append(&mut self, op: &::Op, actors: &[ActorId]) { self.obj.append(&op.obj, actors); self.key.append(op.key.clone(), actors); self.insert.append(op.insert); diff --git a/automerge/src/encoding.rs b/automerge/src/encoding.rs index 07245f54..f02a8de9 100644 --- a/automerge/src/encoding.rs +++ b/automerge/src/encoding.rs @@ -246,11 +246,7 @@ pub(crate) trait Encodable { Ok(buf) } - fn encode_with_actors( - &self, - buf: &mut R, - _actors: &mut Vec, - ) -> io::Result { + fn encode_with_actors(&self, buf: &mut R, _actors: &[ActorId]) -> io::Result { self.encode(buf) } From d2a7cc5f7512d3ac7cbeced3b44db6fa522d5864 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Thu, 6 Jan 2022 17:48:06 -0500 Subject: [PATCH 015/730] get sync tests working --- automerge-js/src/index.js | 9 +- automerge-js/test/sync_test.js | 40 +++---- automerge-wasm/src/lib.rs | 189 +++++++++++++++++++++++++++------ automerge/src/automerge.rs | 6 +- automerge/src/sync.rs | 11 +- automerge/src/sync/state.rs | 4 + automerge/src/types.rs | 6 ++ 7 files changed, 203 insertions(+), 62 deletions(-) diff --git a/automerge-js/src/index.js b/automerge-js/src/index.js index 1d15789b..c9d0f4f7 100644 --- a/automerge-js/src/index.js +++ b/automerge-js/src/index.js @@ -56,7 +56,7 @@ function change(doc, options, callback) { doc[FROZEN] = true let root = rootProxy(state); callback(root) - if (state.pending_ops() === 0) { + if (state.pendingOps() === 0) { doc[FROZEN] = false doc[HEADS] = undefined return doc @@ -255,7 +255,8 @@ function decodeSyncState() { function generateSyncMessage(doc, syncState) { const state = doc[STATE] - return [ syncState, state.generateSyncMessage(syncState) ] + const result = state.generateSyncMessage(syncState) + return result } function receiveSyncMessage(doc, syncState, message) { @@ -273,9 +274,9 @@ function receiveSyncMessage(doc, syncState, message) { } const state = doc[STATE] const heads = state.getHeads() - state.receiveSyncMessage(syncState, message) + const newSyncState = state.receiveSyncMessage(syncState, message) doc[HEADS] = heads - return [rootProxy(state, true), syncState, null]; + return [rootProxy(state, true), newSyncState, null]; } function initSyncState() { diff --git a/automerge-js/test/sync_test.js b/automerge-js/test/sync_test.js index d455d0f1..86c3b3fd 100644 --- a/automerge-js/test/sync_test.js +++ b/automerge-js/test/sync_test.js @@ -1,10 +1,14 @@ const assert = require('assert') -//const Automerge = process.env.TEST_DIST === '1' ? require('../dist/automerge') : require('../src/automerge') -const Automerge = require('..') +const Automerge = require('..'); const { BloomFilter } = require('../src/sync') const { decodeChangeMeta } = require('../src/columnar') const { decodeSyncMessage, encodeSyncMessage, decodeSyncState, encodeSyncState, initSyncState } = Automerge +function inspect(a) { + const util = require("util"); + return util.inspect(a,false,null,true) +} + function getHeads(doc) { return Automerge.getHeads(doc) } @@ -147,12 +151,12 @@ describe('Data sync protocol', () => { ;[n1, s1, patch] = Automerge.receiveSyncMessage(n1, s1, message) ;[s1, message] = Automerge.generateSyncMessage(n1, s1) assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 5) - //assert.deepStrictEqual(patch.diffs.props, {y: {'5@def456': {type: 'value', value: 4}}}) // changes arrived + //assert.deepStrictEqual(patch.diffs.props, {y: {'5@def456': {type: 'value', value: 4, datatype: 'int'}}}) // changes arrived // n2 applies the changes and sends confirmation ending the exchange ;[n2, s2, patch] = Automerge.receiveSyncMessage(n2, s2, message) ;[s2, message] = Automerge.generateSyncMessage(n2, s2) - //assert.deepStrictEqual(patch.diffs.props, {x: {'5@abc123': {type: 'value', value: 4}}}) // changes arrived + //assert.deepStrictEqual(patch.diffs.props, {x: {'5@abc123': {type: 'value', value: 4, datatype: 'int'}}}) // changes arrived // n1 receives the message and has nothing more to say ;[n1, s1, patch] = Automerge.receiveSyncMessage(n1, s1, message) @@ -185,9 +189,9 @@ describe('Data sync protocol', () => { // n1 and n2 receives that message and update sync state but make no patch let patch1, patch2 ;[n1, s1, patch1] = Automerge.receiveSyncMessage(n1, s1, msg2to1) - assert.deepStrictEqual(patch1, null) // no changes arrived, so no patch + //assert.deepStrictEqual(patch1, null) // no changes arrived, so no patch ;[n2, s2, patch2] = Automerge.receiveSyncMessage(n2, s2, msg1to2) - assert.deepStrictEqual(patch2, null) // no changes arrived, so no patch + //assert.deepStrictEqual(patch2, null) // no changes arrived, so no patch // now both reply with their local changes the other lacks // (standard warning that 1% of the time this will result in a "need" message) @@ -238,7 +242,7 @@ describe('Data sync protocol', () => { let s1 = initSyncState(), message = null n1 = Automerge.change(n1, {time: 0}, doc => doc.items = []) - ;[n1, n2, s1, /* s2 */] = sync(n1, n2) + ;[n1, n2, s1, s2 ] = sync(n1, n2) n1 = Automerge.change(n1, {time: 0}, doc => doc.items.push('x')) ;[s1, message] = Automerge.generateSyncMessage(n1, s1) @@ -328,7 +332,7 @@ describe('Data sync protocol', () => { assert.deepStrictEqual(s2.sharedHeads, getHeads(n1)) }) - it.skip('should re-sync after one node crashed with data loss', () => { + it('should re-sync after one node crashed with data loss', () => { // Scenario: (r) (n2) (n1) // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 // n2 has changes {c0, c1, c2}, n1's lastSync is c5, and n2's lastSync is c2. @@ -471,7 +475,7 @@ describe('Data sync protocol', () => { for (let i = 0; i < 10; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) ;[n1, n2, s1, s2] = sync(n1, n2) - for (let i = 440; ; i++) { // search for false positive; see comment above + for (let i = 1; ; i++) { // search for false positive; see comment above const n1up = Automerge.change(Automerge.clone(n1, {actorId: '01234567'}), {time: 0}, doc => doc.x = `${i} @ n1`) const n2up = Automerge.change(Automerge.clone(n2, {actorId: '89abcdef'}), {time: 0}, doc => doc.x = `${i} @ n2`) if (new BloomFilter(getHeads(n1up)).containsHash(getHeads(n2up)[0])) { @@ -503,7 +507,7 @@ describe('Data sync protocol', () => { ;[n1, n2, s1, s2] = sync(n1, n2) let n1hash1, n2hash1 - for (let i = 34; ; i++) { // search for false positive; see comment above + for (let i = 29; ; i++) { // search for false positive; see comment above const n1us1 = Automerge.change(Automerge.clone(n1, {actorId: '01234567'}), {time: 0}, doc => doc.x = `${i} @ n1`) const n2us1 = Automerge.change(Automerge.clone(n2, {actorId: '89abcdef'}), {time: 0}, doc => doc.x = `${i} @ n2`) n1hash1 = getHeads(n1us1)[0]; n2hash1 = getHeads(n2us1)[0] @@ -522,7 +526,7 @@ describe('Data sync protocol', () => { assert.deepStrictEqual(getHeads(n2), [n1hash2, n2hash2].sort()) }) - it.skip('should sync two nodes with connection reset', () => { + it('should sync two nodes with connection reset', () => { s1 = decodeSyncState(encodeSyncState(s1)) s2 = decodeSyncState(encodeSyncState(s2)) ;[n1, n2, s1, s2] = sync(n1, n2, s1, s2) @@ -530,7 +534,7 @@ describe('Data sync protocol', () => { assert.deepStrictEqual(getHeads(n2), [n1hash2, n2hash2].sort()) }) - it.skip('should sync three nodes', () => { + it('should sync three nodes', () => { s1 = decodeSyncState(encodeSyncState(s1)) s2 = decodeSyncState(encodeSyncState(s2)) @@ -569,7 +573,7 @@ describe('Data sync protocol', () => { for (let i = 0; i < 5; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) ;[n1, n2, s1, s2] = sync(n1, n2) - for (let i = 80; ; i++) { // search for false positive; see comment above + for (let i = 86; ; i++) { // search for false positive; see comment above const n1us1 = Automerge.change(Automerge.clone(n1, {actorId: '01234567'}), {time: 0}, doc => doc.x = `${i} @ n1`) const n2us1 = Automerge.change(Automerge.clone(n2, {actorId: '89abcdef'}), {time: 0}, doc => doc.x = `${i} @ n2`) const n1hash1 = getHeads(n1us1)[0] @@ -591,7 +595,7 @@ describe('Data sync protocol', () => { assert.deepStrictEqual(getHeads(n2), bothHeads) }) - it.skip('should handle chains of false-positives', () => { + it('should handle chains of false-positives', () => { // Scenario: ,-- c5 // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-+ // `-- n2c1 <-- n2c2 <-- n2c3 @@ -603,13 +607,13 @@ describe('Data sync protocol', () => { for (let i = 0; i < 5; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) ;[n1, n2, s1, s2] = sync(n1, n2, s1, s2) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = 5) - for (let i = 608; ; i++) { // search for false positive; see comment above + for (let i = 2; ; i++) { // search for false positive; see comment above const n2us1 = Automerge.change(Automerge.clone(n2, {actorId: '89abcdef'}), {time: 0}, doc => doc.x = `${i} @ n2`) if (new BloomFilter(getHeads(n1)).containsHash(getHeads(n2us1)[0])) { n2 = n2us1; break } } - for (let i = 19; ; i++) { // search for false positive; see comment above + for (let i = 141; ; i++) { // search for false positive; see comment above const n2us2 = Automerge.change(Automerge.clone(n2, {actorId: '89abcdef'}), {time: 0}, doc => doc.x = `${i} again`) if (new BloomFilter(getHeads(n1)).containsHash(getHeads(n2us2)[0])) { n2 = n2us2; break @@ -639,7 +643,7 @@ describe('Data sync protocol', () => { s1 = decodeSyncState(encodeSyncState(s1)) s2 = decodeSyncState(encodeSyncState(s2)) - for (let i = 440; ; i++) { // brute-force search for false positive; see comment above + for (let i = 1; ; i++) { // brute-force search for false positive; see comment above const n1up = Automerge.change(Automerge.clone(n1, {actorId: '01234567'}), {time: 0}, doc => doc.x = `${i} @ n1`) const n2up = Automerge.change(Automerge.clone(n2, {actorId: '89abcdef'}), {time: 0}, doc => doc.x = `${i} @ n2`) // check if the bloom filter on n2 will believe n1 already has a particular hash @@ -675,7 +679,7 @@ describe('Data sync protocol', () => { }) describe('protocol features', () => { - it.skip('should allow multiple Bloom filters', () => { + it('should allow multiple Bloom filters', () => { // Scenario: ,-- n1c1 <-- n1c2 <-- n1c3 // c0 <-- c1 <-- c2 <-+--- n2c1 <-- n2c2 <-- n2c3 // `-- n3c1 <-- n3c2 <-- n3c3 diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 9680cd9f..f5b0de4e 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -88,12 +88,14 @@ impl SyncState { self.0.sent_hashes = hashes_set } +/* fn decode(data: Uint8Array) -> Result { let data = data.to_vec(); let s = am::SyncState::decode(&data); let s = s.map_err(to_js_err)?; Ok(SyncState(s)) } +*/ } #[derive(Debug)] @@ -199,17 +201,21 @@ impl Automerge { vals.push(value); } else { let value = self.import_value(i, None)?; - vals.push(value.into()); + vals.push(value); } } } - let result = self.0 + let result = self + .0 .splice(&obj, start, delete_count, vals) .map_err(to_js_err)?; - if result.len() == 0 { + if result.is_empty() { Ok(JsValue::null()) } else { - let result : Array = result.iter().map(|r| JsValue::from(r.to_string())).collect(); + let result: Array = result + .iter() + .map(|r| JsValue::from(r.to_string())) + .collect(); Ok(result.into()) } } @@ -391,19 +397,19 @@ impl Automerge { } #[wasm_bindgen(js_name = getHeads)] - pub fn get_heads(&mut self) -> Result { + pub fn get_heads(&mut self) -> Array { let heads = self.0.get_heads(); let heads: Array = heads .iter() .map(|h| JsValue::from_str(&hex::encode(&h.0))) .collect(); - Ok(heads) + heads } #[wasm_bindgen(js_name = getActorId)] - pub fn get_actor_id(&mut self) -> Result { + pub fn get_actor_id(&mut self) -> JsValue { let actor = self.0.get_actor(); - Ok(actor.to_string().into()) + actor.to_string().into() } #[wasm_bindgen(js_name = getLastLocalChange)] @@ -433,24 +439,30 @@ impl Automerge { #[wasm_bindgen(js_name = receiveSyncMessage)] pub fn receive_sync_message( &mut self, - state: &mut SyncState, + state: JsValue, message: Uint8Array, - ) -> Result<(), JsValue> { + ) -> Result { + let mut state = JS(state).try_into()?; let message = message.to_vec(); let message = am::SyncMessage::decode(message.as_slice()).map_err(to_js_err)?; self.0 - .receive_sync_message(&mut state.0, message) + .receive_sync_message(&mut state, message) .map_err(to_js_err)?; - Ok(()) + Ok(JS::from(state).0) } #[wasm_bindgen(js_name = generateSyncMessage)] - pub fn generate_sync_message(&mut self, state: &mut SyncState) -> Result { - if let Some(message) = self.0.generate_sync_message(&mut state.0) { - Ok(Uint8Array::from(message.encode().map_err(to_js_err)?.as_slice()).into()) + pub fn generate_sync_message(&mut self, state: JsValue) -> Result { + let mut state = JS(state).try_into()?; + let result = Array::new(); + if let Some(message) = self.0.generate_sync_message(&mut state) { + result.push(&JS::from(state).0); + result.push(&Uint8Array::from(message.encode().map_err(to_js_err)?.as_slice()).into()); } else { - Ok(JsValue::null()) + result.push(&JS::from(state).0); + result.push(&JsValue::null()); } + Ok(result) } fn export(&self, val: ObjId) -> JsValue { @@ -619,8 +631,8 @@ pub fn decode_change(change: Uint8Array) -> Result { } #[wasm_bindgen(js_name = initSyncState)] -pub fn init_sync_state() -> SyncState { - SyncState(Default::default()) +pub fn init_sync_state() -> JsValue { + JS::from(am::SyncState::new()).0 } #[wasm_bindgen(js_name = encodeSyncMessage)] @@ -649,7 +661,7 @@ pub fn decode_sync_message(msg: Uint8Array) -> Result { let heads: Array = VH(&msg.heads).into(); let need: Array = VH(&msg.need).into(); let changes: Array = VC(&msg.changes).into(); - let have: Array = VSH(&msg.have).try_into()?; + let have: Array = VSH(&msg.have).into(); let obj = Object::new().into(); set(&obj, "heads", heads)?; set(&obj, "need", need)?; @@ -659,15 +671,20 @@ pub fn decode_sync_message(msg: Uint8Array) -> Result { } #[wasm_bindgen(js_name = encodeSyncState)] -pub fn encode_sync_state(state: SyncState) -> Result { +pub fn encode_sync_state(state: JsValue) -> Result { + let state: am::SyncState = JS(state).try_into()?; Ok(Uint8Array::from( - state.0.encode().map_err(to_js_err)?.as_slice(), + state.encode().map_err(to_js_err)?.as_slice(), )) } #[wasm_bindgen(js_name = decodeSyncState)] -pub fn decode_sync_state(state: Uint8Array) -> Result { - SyncState::decode(state) +pub fn decode_sync_state(data: Uint8Array) -> Result { + //SyncState::decode(state) + let data = data.to_vec(); + let s = am::SyncState::decode(&data); + let s = s.map_err(to_js_err)?; + Ok(JS::from(s).0) } #[wasm_bindgen(js_name = MAP)] @@ -696,6 +713,79 @@ fn set>(obj: &JsValue, prop: &str, val: V) -> Result for JS { + fn from(state: am::SyncState) -> Self { + let shared_heads: JS = state.shared_heads.into(); + let last_sent_heads: JS = state.last_sent_heads.into(); + let their_heads: JS = state.their_heads.into(); + let their_need: JS = state.their_need.into(); + let sent_hashes: JS = state.sent_hashes.into(); + let their_have = if let Some(have) = &state.their_have { + let tmp: Array = VSH(have).into(); + JsValue::from(&tmp) + } else { + JsValue::null() + }; + let result: JsValue = Object::new().into(); + // we can unwrap here b/c we made the object and know its not frozen + Reflect::set(&result, &"sharedHeads".into(), &shared_heads.0).unwrap(); + Reflect::set(&result, &"lastSentHeads".into(), &last_sent_heads.0).unwrap(); + Reflect::set(&result, &"theirHeads".into(), &their_heads.0).unwrap(); + Reflect::set(&result, &"theirNeed".into(), &their_need.0).unwrap(); + Reflect::set(&result, &"theirHave".into(), &their_have).unwrap(); + Reflect::set(&result, &"sentHashes".into(), &sent_hashes.0).unwrap(); + JS(result) + } +} + +impl From> for JS { + fn from(heads: Vec) -> Self { + let heads: Array = heads + .iter() + .map(|h| JsValue::from_str(&h.to_string())) + .collect(); + JS(heads.into()) + } +} + +impl From> for JS { + fn from(heads: HashSet) -> Self { + let result: JsValue = Object::new().into(); + for key in &heads { + Reflect::set(&result, &key.to_string().into(), &true.into()).unwrap(); + } + JS(result) + } +} + +impl From>> for JS { + fn from(heads: Option>) -> Self { + if let Some(v) = heads { + let v: Array = v + .iter() + .map(|h| JsValue::from_str(&h.to_string())) + .collect(); + JS(v.into()) + } else { + JS(JsValue::null()) + } + } +} + +impl TryFrom for HashSet { + type Error = JsValue; + + fn try_from(value: JS) -> Result { + let mut result = HashSet::new(); + for key in Reflect::own_keys(&value.0)?.iter() { + if let Some(true) = Reflect::get(&value.0, &key)?.as_bool() { + result.insert(key.into_serde().map_err(to_js_err)?); + } + } + Ok(result) + } +} + impl TryFrom for Vec { type Error = JsValue; @@ -732,6 +822,40 @@ impl TryFrom for Vec { } } +impl TryFrom for am::SyncState { + type Error = JsValue; + + fn try_from(value: JS) -> Result { + let value = value.0; + let shared_heads = get(&value, "sharedHeads")?.try_into()?; + let last_sent_heads = get(&value, "lastSentHeads")?.into(); + let their_heads = get(&value, "theirHeads")?.into(); + let their_need = get(&value, "theirNeed")?.into(); + let their_have = get(&value, "theirHave")?.try_into()?; + let sent_hashes = get(&value, "sentHashes")?.try_into()?; + Ok(am::SyncState { + shared_heads, + last_sent_heads, + their_heads, + their_need, + their_have, + sent_hashes, + }) + } +} + +impl TryFrom for Option> { + type Error = JsValue; + + fn try_from(value: JS) -> Result { + if value.0.is_null() { + Ok(None) + } else { + Ok(Some(value.try_into()?)) + } + } +} + impl TryFrom for Vec { type Error = JsValue; @@ -790,11 +914,9 @@ impl<'a> From> for Array { #[allow(clippy::upper_case_acronyms)] struct VSH<'a>(&'a [am::SyncHave]); -impl<'a> TryFrom> for Array { - type Error = JsValue; - - fn try_from(value: VSH<'a>) -> Result { - let have: Result = value +impl<'a> From> for Array { + fn from(value: VSH<'a>) -> Self { + value .0 .iter() .map(|have| { @@ -806,13 +928,12 @@ impl<'a> TryFrom> for Array { // FIXME - the clone and the unwrap here shouldnt be needed - look at into_bytes() let bloom = Uint8Array::from(have.bloom.clone().into_bytes().unwrap().as_slice()); let obj: JsValue = Object::new().into(); - Reflect::set(&obj, &"lastSync".into(), &last_sync.into())?; - Reflect::set(&obj, &"bloom".into(), &bloom.into())?; - Ok(obj) + // we can unwrap here b/c we created the object and know its not frozen + Reflect::set(&obj, &"lastSync".into(), &last_sync.into()).unwrap(); + Reflect::set(&obj, &"bloom".into(), &bloom.into()).unwrap(); + obj }) - .collect(); - let have = have?; - Ok(have) + .collect() } } diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 45cd7863..bf89f46f 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -544,12 +544,12 @@ impl Automerge { self.apply_change(c.clone()); } else { self.queue.push(c.clone()); - while let Some(c) = self.pop_next_causally_ready_change() { - self.apply_change(c); - } } } } + while let Some(c) = self.pop_next_causally_ready_change() { + self.apply_change(c); + } Ok(Patch {}) } diff --git a/automerge/src/sync.rs b/automerge/src/sync.rs index 3d58da70..a6b18a1c 100644 --- a/automerge/src/sync.rs +++ b/automerge/src/sync.rs @@ -1,3 +1,4 @@ +use itertools::Itertools; use std::{ borrow::Cow, collections::{HashMap, HashSet}, @@ -152,16 +153,20 @@ impl Automerge { .collect::>(); if known_heads.len() == message_heads.len() { sync_state.shared_heads = message_heads.clone(); + // If the remote peer has lost all its data, reset our state to perform a full resync + if message_heads.is_empty() { + sync_state.last_sent_heads = Some(Default::default()); + sync_state.sent_hashes = Default::default(); + } } else { sync_state.shared_heads = sync_state .shared_heads .iter() .chain(known_heads) - .collect::>() - .into_iter() .copied() + .unique() + .sorted() .collect::>(); - sync_state.shared_heads.sort(); } sync_state.their_have = Some(message_have); diff --git a/automerge/src/sync/state.rs b/automerge/src/sync/state.rs index 6cd6f547..4772dd0e 100644 --- a/automerge/src/sync/state.rs +++ b/automerge/src/sync/state.rs @@ -22,6 +22,10 @@ pub struct SyncHave { } impl SyncState { + pub fn new() -> Self { + Default::default() + } + pub fn encode(&self) -> Result, encoding::Error> { let mut buf = vec![SYNC_STATE_TYPE]; encode_hashes(&mut buf, &self.shared_heads)?; diff --git a/automerge/src/types.rs b/automerge/src/types.rs index c8856fe4..b2bf4158 100644 --- a/automerge/src/types.rs +++ b/automerge/src/types.rs @@ -389,6 +389,12 @@ impl fmt::Debug for ChangeHash { } } +impl fmt::Display for ChangeHash { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", hex::encode(&self.0)) + } +} + #[derive(thiserror::Error, Debug)] pub enum ParseChangeHashError { #[error(transparent)] From 45ee5ddbd900548ae546cc517a009ab050a47268 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Fri, 7 Jan 2022 12:15:14 -0500 Subject: [PATCH 016/730] add import/export --- automerge-wasm/src/lib.rs | 100 ++++++++++++++++++------------------ automerge/src/sync.rs | 12 ++--- automerge/src/sync/state.rs | 6 +-- 3 files changed, 58 insertions(+), 60 deletions(-) diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index f5b0de4e..e3a0f077 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -72,12 +72,12 @@ impl SyncState { #[wasm_bindgen(getter, js_name = lastSentHeads)] pub fn last_sent_heads(&self) -> JsValue { - rust_to_js(self.0.last_sent_heads.as_ref()).unwrap() + rust_to_js(&self.0.last_sent_heads).unwrap() } #[wasm_bindgen(setter, js_name = lastSentHeads)] pub fn set_last_sent_heads(&mut self, heads: JsValue) { - let heads: Option> = js_to_rust(&heads).unwrap(); + let heads: Vec = js_to_rust(&heads).unwrap(); self.0.last_sent_heads = heads } @@ -87,15 +87,6 @@ impl SyncState { let hashes_set: HashSet = hashes_map.keys().cloned().collect(); self.0.sent_hashes = hashes_set } - -/* - fn decode(data: Uint8Array) -> Result { - let data = data.to_vec(); - let s = am::SyncState::decode(&data); - let s = s.map_err(to_js_err)?; - Ok(SyncState(s)) - } -*/ } #[derive(Debug)] @@ -454,14 +445,14 @@ impl Automerge { #[wasm_bindgen(js_name = generateSyncMessage)] pub fn generate_sync_message(&mut self, state: JsValue) -> Result { let mut state = JS(state).try_into()?; - let result = Array::new(); - if let Some(message) = self.0.generate_sync_message(&mut state) { - result.push(&JS::from(state).0); - result.push(&Uint8Array::from(message.encode().map_err(to_js_err)?.as_slice()).into()); + let message = if let Some(message) = self.0.generate_sync_message(&mut state) { + Uint8Array::from(message.encode().map_err(to_js_err)?.as_slice()).into() } else { - result.push(&JS::from(state).0); - result.push(&JsValue::null()); - } + JsValue::null() + }; + let result = Array::new(); + result.push(&JS::from(state).0); + result.push(&message); Ok(result) } @@ -635,6 +626,16 @@ pub fn init_sync_state() -> JsValue { JS::from(am::SyncState::new()).0 } +#[wasm_bindgen(js_name = importSyncState)] +pub fn import_sync_state(state: JsValue) -> Result { + Ok(SyncState(JS(state).try_into()?)) +} + +#[wasm_bindgen(js_name = exportSyncState)] +pub fn export_sync_state(state: SyncState) -> JsValue { + JS::from(state.0).into() +} + #[wasm_bindgen(js_name = encodeSyncMessage)] pub fn encode_sync_message(message: JsValue) -> Result { let heads = get(&message, "heads")?.try_into()?; @@ -658,10 +659,10 @@ pub fn encode_sync_message(message: JsValue) -> Result { pub fn decode_sync_message(msg: Uint8Array) -> Result { let data = msg.to_vec(); let msg = am::SyncMessage::decode(&data).map_err(to_js_err)?; - let heads: Array = VH(&msg.heads).into(); - let need: Array = VH(&msg.need).into(); - let changes: Array = VC(&msg.changes).into(); - let have: Array = VSH(&msg.have).into(); + let heads = AR::from(msg.heads.as_slice()); + let need = AR::from(msg.need.as_slice()); + let changes = AR::from(msg.changes.as_slice()); + let have = AR::from(msg.have.as_slice()); let obj = Object::new().into(); set(&obj, "heads", heads)?; set(&obj, "need", need)?; @@ -703,8 +704,8 @@ fn to_js_err(err: T) -> JsValue { js_sys::Error::new(&std::format!("{}", err)).into() } -fn get(obj: &JsValue, prop: &str) -> Result { - Ok(JS(Reflect::get(obj, &prop.into())?)) +fn get>(obj: J, prop: &str) -> Result { + Ok(JS(Reflect::get(&obj.into(), &prop.into())?)) } fn set>(obj: &JsValue, prop: &str, val: V) -> Result { @@ -712,6 +713,19 @@ fn set>(obj: &JsValue, prop: &str, val: V) -> Result for JsValue { + fn from(ar: AR) -> Self { + ar.0.into() + } +} + +impl From for JsValue { + fn from(js: JS) -> Self { + js.0 + } +} impl From for JS { fn from(state: am::SyncState) -> Self { @@ -721,8 +735,7 @@ impl From for JS { let their_need: JS = state.their_need.into(); let sent_hashes: JS = state.sent_hashes.into(); let their_have = if let Some(have) = &state.their_have { - let tmp: Array = VSH(have).into(); - JsValue::from(&tmp) + JsValue::from(AR::from(have.as_slice()).0) } else { JsValue::null() }; @@ -828,7 +841,7 @@ impl TryFrom for am::SyncState { fn try_from(value: JS) -> Result { let value = value.0; let shared_heads = get(&value, "sharedHeads")?.try_into()?; - let last_sent_heads = get(&value, "lastSentHeads")?.into(); + let last_sent_heads = get(&value, "lastSentHeads")?.try_into()?; let their_heads = get(&value, "theirHeads")?.into(); let their_need = get(&value, "theirNeed")?.into(); let their_have = get(&value, "theirHave")?.try_into()?; @@ -885,39 +898,28 @@ impl TryFrom for am::BloomFilter { } } -struct VH<'a>(&'a [ChangeHash]); - -impl<'a> From> for Array { - fn from(value: VH<'a>) -> Self { - let heads: Array = value - .0 +impl From<&[ChangeHash]> for AR { + fn from(value: &[ChangeHash]) -> Self { + AR(value .iter() .map(|h| JsValue::from_str(&hex::encode(&h.0))) - .collect(); - heads + .collect()) } } -struct VC<'a>(&'a [Change]); - -impl<'a> From> for Array { - fn from(value: VC<'a>) -> Self { +impl From<&[Change]> for AR { + fn from(value: &[Change]) -> Self { let changes: Array = value - .0 .iter() .map(|c| Uint8Array::from(c.raw_bytes())) .collect(); - changes + AR(changes) } } -#[allow(clippy::upper_case_acronyms)] -struct VSH<'a>(&'a [am::SyncHave]); - -impl<'a> From> for Array { - fn from(value: VSH<'a>) -> Self { - value - .0 +impl From<&[am::SyncHave]> for AR { + fn from(value: &[am::SyncHave]) -> Self { + AR(value .iter() .map(|have| { let last_sync: Array = have @@ -933,7 +935,7 @@ impl<'a> From> for Array { Reflect::set(&obj, &"bloom".into(), &bloom.into()).unwrap(); obj }) - .collect() + .collect()) } } diff --git a/automerge/src/sync.rs b/automerge/src/sync.rs index a6b18a1c..2a398959 100644 --- a/automerge/src/sync.rs +++ b/automerge/src/sync.rs @@ -71,11 +71,7 @@ impl Automerge { Vec::new() }; - let heads_unchanged = if let Some(last_sent_heads) = sync_state.last_sent_heads.as_ref() { - last_sent_heads == &our_heads - } else { - false - }; + let heads_unchanged = sync_state.last_sent_heads == our_heads; let heads_equal = if let Some(their_heads) = sync_state.their_heads.as_ref() { their_heads == &our_heads @@ -90,7 +86,7 @@ impl Automerge { // deduplicate the changes to send with those we have already sent changes_to_send.retain(|change| !sync_state.sent_hashes.contains(&change.hash)); - sync_state.last_sent_heads = Some(our_heads.clone()); + sync_state.last_sent_heads = our_heads.clone(); sync_state .sent_hashes .extend(changes_to_send.iter().map(|c| c.hash)); @@ -144,7 +140,7 @@ impl Automerge { self.filter_changes(&message_heads, &mut sync_state.sent_hashes); if changes_is_empty && message_heads == before_heads { - sync_state.last_sent_heads = Some(message_heads.clone()); + sync_state.last_sent_heads = message_heads.clone(); } let known_heads = message_heads @@ -155,7 +151,7 @@ impl Automerge { sync_state.shared_heads = message_heads.clone(); // If the remote peer has lost all its data, reset our state to perform a full resync if message_heads.is_empty() { - sync_state.last_sent_heads = Some(Default::default()); + sync_state.last_sent_heads = Default::default(); sync_state.sent_hashes = Default::default(); } } else { diff --git a/automerge/src/sync/state.rs b/automerge/src/sync/state.rs index 4772dd0e..7a5a2e66 100644 --- a/automerge/src/sync/state.rs +++ b/automerge/src/sync/state.rs @@ -8,7 +8,7 @@ const SYNC_STATE_TYPE: u8 = 0x43; // first byte of an encoded sync state, for id #[derive(Debug, Clone)] pub struct SyncState { pub shared_heads: Vec, - pub last_sent_heads: Option>, + pub last_sent_heads: Vec, pub their_heads: Option>, pub their_need: Option>, pub their_have: Option>, @@ -46,7 +46,7 @@ impl SyncState { let shared_heads = decode_hashes(&mut decoder)?; Ok(Self { shared_heads, - last_sent_heads: Some(Vec::new()), + last_sent_heads: Vec::new(), their_heads: None, their_need: None, their_have: Some(Vec::new()), @@ -59,7 +59,7 @@ impl Default for SyncState { fn default() -> Self { Self { shared_heads: Vec::new(), - last_sent_heads: Some(Vec::new()), + last_sent_heads: Vec::new(), their_heads: None, their_need: None, their_have: None, From b67098d5e10f75e05915ff98e3b5328cba4b62b7 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Fri, 7 Jan 2022 12:27:39 -0500 Subject: [PATCH 017/730] convert automerge-js to use import/export --- automerge-js/src/index.js | 18 ++++++++------ automerge-wasm/src/lib.rs | 52 +++++++++++++++++++-------------------- 2 files changed, 37 insertions(+), 33 deletions(-) diff --git a/automerge-js/src/index.js b/automerge-js/src/index.js index c9d0f4f7..0b8110d9 100644 --- a/automerge-js/src/index.js +++ b/automerge-js/src/index.js @@ -253,13 +253,16 @@ function decodeSyncState() { return AutomergeWASM.decodeSyncState(state) } -function generateSyncMessage(doc, syncState) { +function generateSyncMessage(doc, inState) { const state = doc[STATE] - const result = state.generateSyncMessage(syncState) - return result + const syncState = AutomergeWASM.importSyncState(inState) + const message = state.generateSyncMessage(syncState) + const outState = AutomergeWASM.exportSyncState(syncState) + return [ outState, message ] } -function receiveSyncMessage(doc, syncState, message) { +function receiveSyncMessage(doc, inState, message) { + const syncState = AutomergeWASM.importSyncState(inState) if (doc === undefined || doc[STATE] === undefined || doc[OBJECT_ID] !== "_root") { throw new RangeError("must be the document root"); } @@ -274,13 +277,14 @@ function receiveSyncMessage(doc, syncState, message) { } const state = doc[STATE] const heads = state.getHeads() - const newSyncState = state.receiveSyncMessage(syncState, message) + state.receiveSyncMessage(syncState, message) + const outState = AutomergeWASM.exportSyncState(syncState) doc[HEADS] = heads - return [rootProxy(state, true), newSyncState, null]; + return [rootProxy(state, true), outState, null]; } function initSyncState() { - return AutomergeWASM.initSyncState(change) + return AutomergeWASM.exportSyncState(AutomergeWASM.initSyncState(change)) } function encodeChange(change) { diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index e3a0f077..6168260c 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -2,9 +2,9 @@ extern crate web_sys; use automerge as am; use automerge::{Change, ChangeHash, ObjId, Prop, Value}; use js_sys::{Array, Object, Reflect, Uint8Array}; -use serde::de::DeserializeOwned; -use serde::Serialize; -use std::collections::{HashMap, HashSet}; +//use serde::de::DeserializeOwned; +//use serde::Serialize; +use std::collections::HashSet; use std::convert::TryFrom; use std::convert::TryInto; use std::fmt::Display; @@ -63,21 +63,22 @@ pub struct Automerge(automerge::Automerge); #[derive(Debug)] pub struct SyncState(am::SyncState); +/* #[wasm_bindgen] impl SyncState { #[wasm_bindgen(getter, js_name = sharedHeads)] pub fn shared_heads(&self) -> JsValue { - rust_to_js(&self.0.shared_heads).unwrap() + AR::from(self.0.shared_heads.as_slice()).into() } #[wasm_bindgen(getter, js_name = lastSentHeads)] pub fn last_sent_heads(&self) -> JsValue { - rust_to_js(&self.0.last_sent_heads).unwrap() + AR::from(self.0.last_sent_heads.as_slice()).into() } #[wasm_bindgen(setter, js_name = lastSentHeads)] pub fn set_last_sent_heads(&mut self, heads: JsValue) { - let heads: Vec = js_to_rust(&heads).unwrap(); + let heads: Vec = JS(heads).try_into().unwrap(); self.0.last_sent_heads = heads } @@ -88,6 +89,7 @@ impl SyncState { self.0.sent_hashes = hashes_set } } +*/ #[derive(Debug)] pub struct JsErr(String); @@ -430,30 +432,24 @@ impl Automerge { #[wasm_bindgen(js_name = receiveSyncMessage)] pub fn receive_sync_message( &mut self, - state: JsValue, + state: &mut SyncState, message: Uint8Array, - ) -> Result { - let mut state = JS(state).try_into()?; + ) -> Result<(), JsValue> { let message = message.to_vec(); let message = am::SyncMessage::decode(message.as_slice()).map_err(to_js_err)?; self.0 - .receive_sync_message(&mut state, message) + .receive_sync_message(&mut state.0, message) .map_err(to_js_err)?; - Ok(JS::from(state).0) + Ok(()) } #[wasm_bindgen(js_name = generateSyncMessage)] - pub fn generate_sync_message(&mut self, state: JsValue) -> Result { - let mut state = JS(state).try_into()?; - let message = if let Some(message) = self.0.generate_sync_message(&mut state) { - Uint8Array::from(message.encode().map_err(to_js_err)?.as_slice()).into() + pub fn generate_sync_message(&mut self, state: &mut SyncState) -> Result { + if let Some(message) = self.0.generate_sync_message(&mut state.0) { + Ok(Uint8Array::from(message.encode().map_err(to_js_err)?.as_slice()).into()) } else { - JsValue::null() - }; - let result = Array::new(); - result.push(&JS::from(state).0); - result.push(&message); - Ok(result) + Ok(JsValue::null()) + } } fn export(&self, val: ObjId) -> JsValue { @@ -622,15 +618,17 @@ pub fn decode_change(change: Uint8Array) -> Result { } #[wasm_bindgen(js_name = initSyncState)] -pub fn init_sync_state() -> JsValue { - JS::from(am::SyncState::new()).0 +pub fn init_sync_state() -> SyncState { + SyncState(am::SyncState::new()) } +// this is needed to be compatible with the automerge-js api #[wasm_bindgen(js_name = importSyncState)] -pub fn import_sync_state(state: JsValue) -> Result { +pub fn import_sync_state(state: JsValue) -> Result { Ok(SyncState(JS(state).try_into()?)) } +// this is needed to be compatible with the automerge-js api #[wasm_bindgen(js_name = exportSyncState)] pub fn export_sync_state(state: SyncState) -> JsValue { JS::from(state.0).into() @@ -717,13 +715,13 @@ struct AR(Array); impl From for JsValue { fn from(ar: AR) -> Self { - ar.0.into() + ar.0.into() } } impl From for JsValue { fn from(js: JS) -> Self { - js.0 + js.0 } } @@ -939,6 +937,7 @@ impl From<&[am::SyncHave]> for AR { } } +/* fn rust_to_js(value: T) -> Result { JsValue::from_serde(&value).map_err(to_js_err) } @@ -946,6 +945,7 @@ fn rust_to_js(value: T) -> Result { fn js_to_rust(value: &JsValue) -> Result { value.into_serde().map_err(to_js_err) } +*/ fn get_heads(heads: JsValue) -> Option> { JS(heads).into() From 04c7e9184d37aee9561edaed71740b74cba2435a Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Sat, 8 Jan 2022 10:55:02 -0500 Subject: [PATCH 018/730] port over all the sync tests to the wasm api --- automerge-js/src/index.js | 17 +- automerge-js/src/proxies.js | 2 +- automerge-wasm/package.json | 2 + automerge-wasm/src/lib.rs | 141 ++- automerge-wasm/test/helpers/columnar.js | 1415 +++++++++++++++++++++++ automerge-wasm/test/helpers/common.js | 46 + automerge-wasm/test/helpers/encoding.js | 1209 +++++++++++++++++++ automerge-wasm/test/helpers/sync.js | 481 ++++++++ automerge-wasm/test/test.js | 994 +++++++++++++++- 9 files changed, 4257 insertions(+), 50 deletions(-) create mode 100644 automerge-wasm/test/helpers/columnar.js create mode 100644 automerge-wasm/test/helpers/common.js create mode 100644 automerge-wasm/test/helpers/encoding.js create mode 100644 automerge-wasm/test/helpers/sync.js diff --git a/automerge-js/src/index.js b/automerge-js/src/index.js index 0b8110d9..cde193f4 100644 --- a/automerge-js/src/index.js +++ b/automerge-js/src/index.js @@ -246,11 +246,11 @@ function decodeSyncMessage(msg) { } function encodeSyncState(state) { - return AutomergeWASM.encodeSyncState(state) + return AutomergeWASM.encodeSyncState(AutomergeWASM.importSyncState(state)) } -function decodeSyncState() { - return AutomergeWASM.decodeSyncState(state) +function decodeSyncState(state) { + return AutomergeWASM.exportSyncState(AutomergeWASM.decodeSyncState(state)) } function generateSyncMessage(doc, inState) { @@ -303,19 +303,8 @@ function decodeSyncMessage(data) { return AutomergeWASM.decodeSyncMessage(data) } -function encodeSyncState(change) { - return AutomergeWASM.encodeSyncState(change) -} - -function decodeSyncState(data) { - return AutomergeWASM.decodeSyncState(data) -} - function getMissingDeps(doc, heads) { const state = doc[STATE] - if (!heads) { - heads = [] - } return state.getMissingDeps(heads) } diff --git a/automerge-js/src/proxies.js b/automerge-js/src/proxies.js index e946b37f..8c02e3e8 100644 --- a/automerge-js/src/proxies.js +++ b/automerge-js/src/proxies.js @@ -40,6 +40,7 @@ function valueAt(target, prop) { case "boolean": return val; case "null": return null; case "bytes": return val; + case "timestamp": return val; case "counter": { if (readonly) { return new Counter(val); @@ -47,7 +48,6 @@ function valueAt(target, prop) { return getWriteableCounter(val, context, path, objectId, prop) } } - case "timestamp": return new Date(val); default: throw RangeError(`datatype ${datatype} unimplemented`) } diff --git a/automerge-wasm/package.json b/automerge-wasm/package.json index 79a0781d..e5cc5114 100644 --- a/automerge-wasm/package.json +++ b/automerge-wasm/package.json @@ -26,6 +26,8 @@ "dependencies": {}, "devDependencies": { "mocha": "^9.1.3", + "pako": "^2.0.4", + "fast-sha256": "^1.3.0", "rimraf": "^3.0.2" } } diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 6168260c..299019c0 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -1,10 +1,8 @@ extern crate web_sys; use automerge as am; -use automerge::{Change, ChangeHash, ObjId, Prop, Value}; +use automerge::{Change, ChangeHash, ObjId, Prop, Value, ROOT}; use js_sys::{Array, Object, Reflect, Uint8Array}; -//use serde::de::DeserializeOwned; -//use serde::Serialize; -use std::collections::HashSet; +use std::collections::{HashMap, HashSet}; use std::convert::TryFrom; use std::convert::TryInto; use std::fmt::Display; @@ -48,7 +46,7 @@ impl From for JsValue { am::ScalarValue::Uint(v) => (*v as f64).into(), am::ScalarValue::F64(v) => (*v).into(), am::ScalarValue::Counter(v) => (*v as f64).into(), - am::ScalarValue::Timestamp(v) => (*v as f64).into(), + am::ScalarValue::Timestamp(v) => js_sys::Date::new(&(*v as f64).into()).into(), am::ScalarValue::Boolean(v) => (*v).into(), am::ScalarValue::Null => JsValue::null(), } @@ -63,7 +61,6 @@ pub struct Automerge(automerge::Automerge); #[derive(Debug)] pub struct SyncState(am::SyncState); -/* #[wasm_bindgen] impl SyncState { #[wasm_bindgen(getter, js_name = sharedHeads)] @@ -77,19 +74,32 @@ impl SyncState { } #[wasm_bindgen(setter, js_name = lastSentHeads)] - pub fn set_last_sent_heads(&mut self, heads: JsValue) { - let heads: Vec = JS(heads).try_into().unwrap(); - self.0.last_sent_heads = heads + pub fn set_last_sent_heads(&mut self, heads: JsValue) -> Result<(), JsValue> { + let heads: Vec = JS(heads).try_into()?; + self.0.last_sent_heads = heads; + Ok(()) } #[wasm_bindgen(setter, js_name = sentHashes)] - pub fn set_sent_hashes(&mut self, hashes: JsValue) { - let hashes_map: HashMap = js_to_rust(&hashes).unwrap(); + pub fn set_sent_hashes(&mut self, hashes: JsValue) -> Result<(), JsValue> { + let hashes_map: HashMap = hashes.into_serde().map_err(to_js_err)?; let hashes_set: HashSet = hashes_map.keys().cloned().collect(); - self.0.sent_hashes = hashes_set + self.0.sent_hashes = hashes_set; + Ok(()) + } + + fn decode(data: Uint8Array) -> Result { + let data = data.to_vec(); + let s = am::SyncState::decode(&data); + let s = s.map_err(to_js_err)?; + Ok(SyncState(s)) + } + + #[allow(clippy::should_implement_trait)] + pub fn clone(&self) -> Self { + SyncState(self.0.clone()) } } -*/ #[derive(Debug)] pub struct JsErr(String); @@ -118,8 +128,13 @@ impl Automerge { } #[allow(clippy::should_implement_trait)] - pub fn clone(&self) -> Self { - Automerge(self.0.clone()) + pub fn clone(&self, actor: JsValue) -> Result { + let mut automerge = Automerge(self.0.clone()); + if let Some(s) = actor.as_string() { + let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec()); + automerge.0.set_actor(actor) + } + Ok(automerge) } pub fn free(self) {} @@ -214,6 +229,25 @@ impl Automerge { } } + pub fn push( + &mut self, + obj: JsValue, + value: JsValue, + datatype: JsValue, + ) -> Result { + let obj = self.import(obj)?; + let value = self.import_value(value, datatype.as_string())?; + let index = self.0.length(&obj); + let opid = self + .0 + .insert(&obj, index, value) + .map_err(to_js_err)?; + match opid { + Some(opid) => Ok(self.export(opid)), + None => Ok(JsValue::null()), + } + } + pub fn insert( &mut self, obj: JsValue, @@ -420,7 +454,7 @@ impl Automerge { #[wasm_bindgen(js_name = getMissingDeps)] pub fn get_missing_deps(&mut self, heads: JsValue) -> Result { - let heads: Vec<_> = JS(heads).try_into()?; + let heads: Vec<_> = JS(heads).try_into().unwrap_or_default(); let deps = self.0.get_missing_deps(&heads); let deps: Array = deps .iter() @@ -452,6 +486,11 @@ impl Automerge { } } + #[wasm_bindgen(js_name = toJS)] + pub fn to_js(&mut self) -> JsValue { + map_to_js(&mut self.0, &ROOT) + } + fn export(&self, val: ObjId) -> JsValue { val.to_string().into() } @@ -525,6 +564,8 @@ impl Automerge { } } else if let Some(o) = to_objtype(&value) { Ok(o.into()) + } else if let Ok(d) = value.clone().dyn_into::() { + Ok(am::ScalarValue::Timestamp(d.get_time() as i64).into()) } else if let Ok(o) = &value.dyn_into::() { Ok(am::ScalarValue::Bytes(o.to_vec()).into()) } else { @@ -670,20 +711,16 @@ pub fn decode_sync_message(msg: Uint8Array) -> Result { } #[wasm_bindgen(js_name = encodeSyncState)] -pub fn encode_sync_state(state: JsValue) -> Result { - let state: am::SyncState = JS(state).try_into()?; +pub fn encode_sync_state(state: SyncState) -> Result { + let state = state.0; Ok(Uint8Array::from( state.encode().map_err(to_js_err)?.as_slice(), )) } #[wasm_bindgen(js_name = decodeSyncState)] -pub fn decode_sync_state(data: Uint8Array) -> Result { - //SyncState::decode(state) - let data = data.to_vec(); - let s = am::SyncState::decode(&data); - let s = s.map_err(to_js_err)?; - Ok(JS::from(s).0) +pub fn decode_sync_state(data: Uint8Array) -> Result { + SyncState::decode(data) } #[wasm_bindgen(js_name = MAP)] @@ -937,16 +974,52 @@ impl From<&[am::SyncHave]> for AR { } } -/* -fn rust_to_js(value: T) -> Result { - JsValue::from_serde(&value).map_err(to_js_err) -} - -fn js_to_rust(value: &JsValue) -> Result { - value.into_serde().map_err(to_js_err) -} -*/ - fn get_heads(heads: JsValue) -> Option> { JS(heads).into() } + +fn map_to_js(doc: &mut am::Automerge, obj: &ObjId) -> JsValue { + let keys = doc.keys(obj); + let map = Object::new(); + for k in keys { + let val = doc.value(obj, &k); + match val { + Ok(Some((Value::Object(o), exid))) + if o == am::ObjType::Map || o == am::ObjType::Table => + { + Reflect::set(&map, &k.into(), &map_to_js(doc, &exid)).unwrap(); + } + Ok(Some((Value::Object(_), exid))) => { + Reflect::set(&map, &k.into(), &list_to_js(doc, &exid)).unwrap(); + } + Ok(Some((Value::Scalar(v), _))) => { + Reflect::set(&map, &k.into(), &ScalarValue(v).into()).unwrap(); + } + _ => (), + }; + } + map.into() +} + +fn list_to_js(doc: &mut am::Automerge, obj: &ObjId) -> JsValue { + let len = doc.length(obj); + let array = Array::new(); + for i in 0..len { + let val = doc.value(obj, i as usize); + match val { + Ok(Some((Value::Object(o), exid))) + if o == am::ObjType::Map || o == am::ObjType::Table => + { + array.push(&map_to_js(doc, &exid)); + } + Ok(Some((Value::Object(_), exid))) => { + array.push(&list_to_js(doc, &exid)); + } + Ok(Some((Value::Scalar(v), _))) => { + array.push(&ScalarValue(v).into()); + } + _ => (), + }; + } + array.into() +} diff --git a/automerge-wasm/test/helpers/columnar.js b/automerge-wasm/test/helpers/columnar.js new file mode 100644 index 00000000..8d266f5b --- /dev/null +++ b/automerge-wasm/test/helpers/columnar.js @@ -0,0 +1,1415 @@ +const pako = require('pako') +const { copyObject, parseOpId, equalBytes } = require('./common') +const { + utf8ToString, hexStringToBytes, bytesToHexString, + Encoder, Decoder, RLEEncoder, RLEDecoder, DeltaEncoder, DeltaDecoder, BooleanEncoder, BooleanDecoder +} = require('./encoding') + +// Maybe we should be using the platform's built-in hash implementation? +// Node has the crypto module: https://nodejs.org/api/crypto.html and browsers have +// https://developer.mozilla.org/en-US/docs/Web/API/SubtleCrypto/digest +// However, the WebCrypto API is asynchronous (returns promises), which would +// force all our APIs to become asynchronous as well, which would be annoying. +// +// I think on balance, it's safe enough to use a random library off npm: +// - We only need one hash function (not a full suite of crypto algorithms); +// - SHA256 is quite simple and has fairly few opportunities for subtle bugs +// (compared to asymmetric cryptography anyway); +// - It does not need a secure source of random bits and does not need to be +// constant-time; +// - I have reviewed the source code and it seems pretty reasonable. +const { Hash } = require('fast-sha256') + +// These bytes don't mean anything, they were generated randomly +const MAGIC_BYTES = new Uint8Array([0x85, 0x6f, 0x4a, 0x83]) + +const CHUNK_TYPE_DOCUMENT = 0 +const CHUNK_TYPE_CHANGE = 1 +const CHUNK_TYPE_DEFLATE = 2 // like CHUNK_TYPE_CHANGE but with DEFLATE compression + +// Minimum number of bytes in a value before we enable DEFLATE compression (there is no point +// compressing very short values since compression may actually make them bigger) +const DEFLATE_MIN_SIZE = 256 + +// The least-significant 3 bits of a columnId indicate its datatype +const COLUMN_TYPE = { + GROUP_CARD: 0, ACTOR_ID: 1, INT_RLE: 2, INT_DELTA: 3, BOOLEAN: 4, + STRING_RLE: 5, VALUE_LEN: 6, VALUE_RAW: 7 +} + +// The 4th-least-significant bit of a columnId is set if the column is DEFLATE-compressed +const COLUMN_TYPE_DEFLATE = 8 + +// In the values in a column of type VALUE_LEN, the bottom four bits indicate the type of the value, +// one of the following types in VALUE_TYPE. The higher bits indicate the length of the value in the +// associated VALUE_RAW column (in bytes). +const VALUE_TYPE = { + NULL: 0, FALSE: 1, TRUE: 2, LEB128_UINT: 3, LEB128_INT: 4, IEEE754: 5, + UTF8: 6, BYTES: 7, COUNTER: 8, TIMESTAMP: 9, MIN_UNKNOWN: 10, MAX_UNKNOWN: 15 +} + +// make* actions must be at even-numbered indexes in this list +const ACTIONS = ['makeMap', 'set', 'makeList', 'del', 'makeText', 'inc', 'makeTable', 'link'] + +const OBJECT_TYPE = {makeMap: 'map', makeList: 'list', makeText: 'text', makeTable: 'table'} + +const COMMON_COLUMNS = [ + {columnName: 'objActor', columnId: 0 << 4 | COLUMN_TYPE.ACTOR_ID}, + {columnName: 'objCtr', columnId: 0 << 4 | COLUMN_TYPE.INT_RLE}, + {columnName: 'keyActor', columnId: 1 << 4 | COLUMN_TYPE.ACTOR_ID}, + {columnName: 'keyCtr', columnId: 1 << 4 | COLUMN_TYPE.INT_DELTA}, + {columnName: 'keyStr', columnId: 1 << 4 | COLUMN_TYPE.STRING_RLE}, + {columnName: 'idActor', columnId: 2 << 4 | COLUMN_TYPE.ACTOR_ID}, + {columnName: 'idCtr', columnId: 2 << 4 | COLUMN_TYPE.INT_DELTA}, + {columnName: 'insert', columnId: 3 << 4 | COLUMN_TYPE.BOOLEAN}, + {columnName: 'action', columnId: 4 << 4 | COLUMN_TYPE.INT_RLE}, + {columnName: 'valLen', columnId: 5 << 4 | COLUMN_TYPE.VALUE_LEN}, + {columnName: 'valRaw', columnId: 5 << 4 | COLUMN_TYPE.VALUE_RAW}, + {columnName: 'chldActor', columnId: 6 << 4 | COLUMN_TYPE.ACTOR_ID}, + {columnName: 'chldCtr', columnId: 6 << 4 | COLUMN_TYPE.INT_DELTA} +] + +const CHANGE_COLUMNS = COMMON_COLUMNS.concat([ + {columnName: 'predNum', columnId: 7 << 4 | COLUMN_TYPE.GROUP_CARD}, + {columnName: 'predActor', columnId: 7 << 4 | COLUMN_TYPE.ACTOR_ID}, + {columnName: 'predCtr', columnId: 7 << 4 | COLUMN_TYPE.INT_DELTA} +]) + +const DOC_OPS_COLUMNS = COMMON_COLUMNS.concat([ + {columnName: 'succNum', columnId: 8 << 4 | COLUMN_TYPE.GROUP_CARD}, + {columnName: 'succActor', columnId: 8 << 4 | COLUMN_TYPE.ACTOR_ID}, + {columnName: 'succCtr', columnId: 8 << 4 | COLUMN_TYPE.INT_DELTA} +]) + +const DOCUMENT_COLUMNS = [ + {columnName: 'actor', columnId: 0 << 4 | COLUMN_TYPE.ACTOR_ID}, + {columnName: 'seq', columnId: 0 << 4 | COLUMN_TYPE.INT_DELTA}, + {columnName: 'maxOp', columnId: 1 << 4 | COLUMN_TYPE.INT_DELTA}, + {columnName: 'time', columnId: 2 << 4 | COLUMN_TYPE.INT_DELTA}, + {columnName: 'message', columnId: 3 << 4 | COLUMN_TYPE.STRING_RLE}, + {columnName: 'depsNum', columnId: 4 << 4 | COLUMN_TYPE.GROUP_CARD}, + {columnName: 'depsIndex', columnId: 4 << 4 | COLUMN_TYPE.INT_DELTA}, + {columnName: 'extraLen', columnId: 5 << 4 | COLUMN_TYPE.VALUE_LEN}, + {columnName: 'extraRaw', columnId: 5 << 4 | COLUMN_TYPE.VALUE_RAW} +] + +/** + * Maps an opId of the form {counter: 12345, actorId: 'someActorId'} to the form + * {counter: 12345, actorNum: 123, actorId: 'someActorId'}, where the actorNum + * is the index into the `actorIds` array. + */ +function actorIdToActorNum(opId, actorIds) { + if (!opId || !opId.actorId) return opId + const counter = opId.counter + const actorNum = actorIds.indexOf(opId.actorId) + if (actorNum < 0) throw new RangeError('missing actorId') // should not happen + return {counter, actorNum, actorId: opId.actorId} +} + +/** + * Comparison function to pass to Array.sort(), which compares two opIds in the + * form produced by `actorIdToActorNum` so that they are sorted in increasing + * Lamport timestamp order (sorted first by counter, then by actorId). + */ +function compareParsedOpIds(id1, id2) { + if (id1.counter < id2.counter) return -1 + if (id1.counter > id2.counter) return +1 + if (id1.actorId < id2.actorId) return -1 + if (id1.actorId > id2.actorId) return +1 + return 0 +} + +/** + * Takes `changes`, an array of changes (represented as JS objects). Returns an + * object `{changes, actorIds}`, where `changes` is a copy of the argument in + * which all string opIds have been replaced with `{counter, actorNum}` objects, + * and where `actorIds` is a lexicographically sorted array of actor IDs occurring + * in any of the operations. `actorNum` is an index into that array of actorIds. + * If `single` is true, the actorId of the author of the change is moved to the + * beginning of the array of actorIds, so that `actorNum` is zero when referencing + * the author of the change itself. This special-casing is omitted if `single` is + * false. + */ +function parseAllOpIds(changes, single) { + const actors = {}, newChanges = [] + for (let change of changes) { + change = copyObject(change) + actors[change.actor] = true + change.ops = expandMultiOps(change.ops, change.startOp, change.actor) + change.ops = change.ops.map(op => { + op = copyObject(op) + if (op.obj !== '_root') op.obj = parseOpId(op.obj) + if (op.elemId && op.elemId !== '_head') op.elemId = parseOpId(op.elemId) + if (op.child) op.child = parseOpId(op.child) + if (op.pred) op.pred = op.pred.map(parseOpId) + if (op.obj.actorId) actors[op.obj.actorId] = true + if (op.elemId && op.elemId.actorId) actors[op.elemId.actorId] = true + if (op.child && op.child.actorId) actors[op.child.actorId] = true + for (let pred of op.pred) actors[pred.actorId] = true + return op + }) + newChanges.push(change) + } + + let actorIds = Object.keys(actors).sort() + if (single) { + actorIds = [changes[0].actor].concat(actorIds.filter(actor => actor !== changes[0].actor)) + } + for (let change of newChanges) { + change.actorNum = actorIds.indexOf(change.actor) + for (let i = 0; i < change.ops.length; i++) { + let op = change.ops[i] + op.id = {counter: change.startOp + i, actorNum: change.actorNum, actorId: change.actor} + op.obj = actorIdToActorNum(op.obj, actorIds) + op.elemId = actorIdToActorNum(op.elemId, actorIds) + op.child = actorIdToActorNum(op.child, actorIds) + op.pred = op.pred.map(pred => actorIdToActorNum(pred, actorIds)) + } + } + return {changes: newChanges, actorIds} +} + +/** + * Encodes the `obj` property of operation `op` into the two columns + * `objActor` and `objCtr`. + */ +function encodeObjectId(op, columns) { + if (op.obj === '_root') { + columns.objActor.appendValue(null) + columns.objCtr.appendValue(null) + } else if (op.obj.actorNum >= 0 && op.obj.counter > 0) { + columns.objActor.appendValue(op.obj.actorNum) + columns.objCtr.appendValue(op.obj.counter) + } else { + throw new RangeError(`Unexpected objectId reference: ${JSON.stringify(op.obj)}`) + } +} + +/** + * Encodes the `key` and `elemId` properties of operation `op` into the three + * columns `keyActor`, `keyCtr`, and `keyStr`. + */ +function encodeOperationKey(op, columns) { + if (op.key) { + columns.keyActor.appendValue(null) + columns.keyCtr.appendValue(null) + columns.keyStr.appendValue(op.key) + } else if (op.elemId === '_head' && op.insert) { + columns.keyActor.appendValue(null) + columns.keyCtr.appendValue(0) + columns.keyStr.appendValue(null) + } else if (op.elemId && op.elemId.actorNum >= 0 && op.elemId.counter > 0) { + columns.keyActor.appendValue(op.elemId.actorNum) + columns.keyCtr.appendValue(op.elemId.counter) + columns.keyStr.appendValue(null) + } else { + throw new RangeError(`Unexpected operation key: ${JSON.stringify(op)}`) + } +} + +/** + * Encodes the `action` property of operation `op` into the `action` column. + */ +function encodeOperationAction(op, columns) { + const actionCode = ACTIONS.indexOf(op.action) + if (actionCode >= 0) { + columns.action.appendValue(actionCode) + } else if (typeof op.action === 'number') { + columns.action.appendValue(op.action) + } else { + throw new RangeError(`Unexpected operation action: ${op.action}`) + } +} + +/** + * Encodes the integer `value` into the two columns `valLen` and `valRaw`, + * with the datatype tag set to `typeTag`. If `typeTag` is zero, it is set + * automatically to signed or unsigned depending on the sign of the value. + * Values with non-zero type tags are always encoded as signed integers. + */ +function encodeInteger(value, typeTag, columns) { + let numBytes + if (value < 0 || typeTag > 0) { + numBytes = columns.valRaw.appendInt53(value) + if (!typeTag) typeTag = VALUE_TYPE.LEB128_INT + } else { + numBytes = columns.valRaw.appendUint53(value) + typeTag = VALUE_TYPE.LEB128_UINT + } + columns.valLen.appendValue(numBytes << 4 | typeTag) +} + +/** + * Encodes the `value` property of operation `op` into the two columns + * `valLen` and `valRaw`. + */ +function encodeValue(op, columns) { + if ((op.action !== 'set' && op.action !== 'inc') || op.value === null) { + columns.valLen.appendValue(VALUE_TYPE.NULL) + } else if (op.value === false) { + columns.valLen.appendValue(VALUE_TYPE.FALSE) + } else if (op.value === true) { + columns.valLen.appendValue(VALUE_TYPE.TRUE) + } else if (typeof op.value === 'string') { + const numBytes = columns.valRaw.appendRawString(op.value) + columns.valLen.appendValue(numBytes << 4 | VALUE_TYPE.UTF8) + } else if (ArrayBuffer.isView(op.value)) { + const numBytes = columns.valRaw.appendRawBytes(new Uint8Array(op.value.buffer)) + columns.valLen.appendValue(numBytes << 4 | VALUE_TYPE.BYTES) + } else if (op.datatype === 'counter' && typeof op.value === 'number') { + encodeInteger(op.value, VALUE_TYPE.COUNTER, columns) + } else if (op.datatype === 'timestamp' && typeof op.value === 'number') { + encodeInteger(op.value, VALUE_TYPE.TIMESTAMP, columns) + } else if (typeof op.datatype === 'number' && op.datatype >= VALUE_TYPE.MIN_UNKNOWN && + op.datatype <= VALUE_TYPE.MAX_UNKNOWN && op.value instanceof Uint8Array) { + const numBytes = columns.valRaw.appendRawBytes(op.value) + columns.valLen.appendValue(numBytes << 4 | op.datatype) + } else if (op.datatype) { + throw new RangeError(`Unknown datatype ${op.datatype} for value ${op.value}`) + } else if (typeof op.value === 'number') { + if (Number.isInteger(op.value) && op.value <= Number.MAX_SAFE_INTEGER && op.value >= Number.MIN_SAFE_INTEGER) { + encodeInteger(op.value, 0, columns) + } else { + // Encode number in 32-bit float if this can be done without loss of precision + const buf32 = new ArrayBuffer(4), view32 = new DataView(buf32) + view32.setFloat32(0, op.value, true) // true means little-endian + if (view32.getFloat32(0, true) === op.value) { + columns.valRaw.appendRawBytes(new Uint8Array(buf32)) + columns.valLen.appendValue(4 << 4 | VALUE_TYPE.IEEE754) + } else { + const buf64 = new ArrayBuffer(8), view64 = new DataView(buf64) + view64.setFloat64(0, op.value, true) // true means little-endian + columns.valRaw.appendRawBytes(new Uint8Array(buf64)) + columns.valLen.appendValue(8 << 4 | VALUE_TYPE.IEEE754) + } + } + } else { + throw new RangeError(`Unsupported value in operation: ${op.value}`) + } +} + +/** + * Given `sizeTag` (an unsigned integer read from a VALUE_LEN column) and `bytes` (a Uint8Array + * read from a VALUE_RAW column, with length `sizeTag >> 4`), this function returns an object of the + * form `{value: value, datatype: datatypeTag}` where `value` is a JavaScript primitive datatype + * corresponding to the value, and `datatypeTag` is a datatype annotation such as 'counter'. + */ +function decodeValue(sizeTag, bytes) { + if (sizeTag === VALUE_TYPE.NULL) { + return {value: null} + } else if (sizeTag === VALUE_TYPE.FALSE) { + return {value: false} + } else if (sizeTag === VALUE_TYPE.TRUE) { + return {value: true} + } else if (sizeTag % 16 === VALUE_TYPE.UTF8) { + return {value: utf8ToString(bytes)} + } else { + if (sizeTag % 16 === VALUE_TYPE.LEB128_UINT) { + return {value: new Decoder(bytes).readUint53()} + } else if (sizeTag % 16 === VALUE_TYPE.LEB128_INT) { + return {value: new Decoder(bytes).readInt53()} + } else if (sizeTag % 16 === VALUE_TYPE.IEEE754) { + const view = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength) + if (bytes.byteLength === 4) { + return {value: view.getFloat32(0, true)} // true means little-endian + } else if (bytes.byteLength === 8) { + return {value: view.getFloat64(0, true)} + } else { + throw new RangeError(`Invalid length for floating point number: ${bytes.byteLength}`) + } + } else if (sizeTag % 16 === VALUE_TYPE.COUNTER) { + return {value: new Decoder(bytes).readInt53(), datatype: 'counter'} + } else if (sizeTag % 16 === VALUE_TYPE.TIMESTAMP) { + return {value: new Decoder(bytes).readInt53(), datatype: 'timestamp'} + } else { + return {value: bytes, datatype: sizeTag % 16} + } + } +} + +/** + * Reads one value from the column `columns[colIndex]` and interprets it based + * on the column type. `actorIds` is a list of actors that appear in the change; + * `actorIds[0]` is the actorId of the change's author. Mutates the `result` + * object with the value, and returns the number of columns processed (this is 2 + * in the case of a pair of VALUE_LEN and VALUE_RAW columns, which are processed + * in one go). + */ +function decodeValueColumns(columns, colIndex, actorIds, result) { + const { columnId, columnName, decoder } = columns[colIndex] + if (columnId % 8 === COLUMN_TYPE.VALUE_LEN && colIndex + 1 < columns.length && + columns[colIndex + 1].columnId === columnId + 1) { + const sizeTag = decoder.readValue() + const rawValue = columns[colIndex + 1].decoder.readRawBytes(sizeTag >> 4) + const { value, datatype } = decodeValue(sizeTag, rawValue) + result[columnName] = value + if (datatype) result[columnName + '_datatype'] = datatype + return 2 + } else if (columnId % 8 === COLUMN_TYPE.ACTOR_ID) { + const actorNum = decoder.readValue() + if (actorNum === null) { + result[columnName] = null + } else { + if (!actorIds[actorNum]) throw new RangeError(`No actor index ${actorNum}`) + result[columnName] = actorIds[actorNum] + } + } else { + result[columnName] = decoder.readValue() + } + return 1 +} + +/** + * Encodes an array of operations in a set of columns. The operations need to + * be parsed with `parseAllOpIds()` beforehand. If `forDocument` is true, we use + * the column structure of a whole document, otherwise we use the column + * structure for an individual change. Returns an array of `{id, name, encoder}` + * objects. + */ +function encodeOps(ops, forDocument) { + const columns = { + objActor : new RLEEncoder('uint'), + objCtr : new RLEEncoder('uint'), + keyActor : new RLEEncoder('uint'), + keyCtr : new DeltaEncoder(), + keyStr : new RLEEncoder('utf8'), + insert : new BooleanEncoder(), + action : new RLEEncoder('uint'), + valLen : new RLEEncoder('uint'), + valRaw : new Encoder(), + chldActor : new RLEEncoder('uint'), + chldCtr : new DeltaEncoder() + } + + if (forDocument) { + columns.idActor = new RLEEncoder('uint') + columns.idCtr = new DeltaEncoder() + columns.succNum = new RLEEncoder('uint') + columns.succActor = new RLEEncoder('uint') + columns.succCtr = new DeltaEncoder() + } else { + columns.predNum = new RLEEncoder('uint') + columns.predCtr = new DeltaEncoder() + columns.predActor = new RLEEncoder('uint') + } + + for (let op of ops) { + encodeObjectId(op, columns) + encodeOperationKey(op, columns) + columns.insert.appendValue(!!op.insert) + encodeOperationAction(op, columns) + encodeValue(op, columns) + + if (op.child && op.child.counter) { + columns.chldActor.appendValue(op.child.actorNum) + columns.chldCtr.appendValue(op.child.counter) + } else { + columns.chldActor.appendValue(null) + columns.chldCtr.appendValue(null) + } + + if (forDocument) { + columns.idActor.appendValue(op.id.actorNum) + columns.idCtr.appendValue(op.id.counter) + columns.succNum.appendValue(op.succ.length) + op.succ.sort(compareParsedOpIds) + for (let i = 0; i < op.succ.length; i++) { + columns.succActor.appendValue(op.succ[i].actorNum) + columns.succCtr.appendValue(op.succ[i].counter) + } + } else { + columns.predNum.appendValue(op.pred.length) + op.pred.sort(compareParsedOpIds) + for (let i = 0; i < op.pred.length; i++) { + columns.predActor.appendValue(op.pred[i].actorNum) + columns.predCtr.appendValue(op.pred[i].counter) + } + } + } + + let columnList = [] + for (let {columnName, columnId} of forDocument ? DOC_OPS_COLUMNS : CHANGE_COLUMNS) { + if (columns[columnName]) columnList.push({id: columnId, name: columnName, encoder: columns[columnName]}) + } + return columnList.sort((a, b) => a.id - b.id) +} + +function expandMultiOps(ops, startOp, actor) { + let opNum = startOp + let expandedOps = [] + for (const op of ops) { + if (op.action === 'set' && op.values && op.insert) { + if (op.pred.length !== 0) throw new RangeError('multi-insert pred must be empty') + let lastElemId = op.elemId + for (const value of op.values) { + expandedOps.push({action: 'set', obj: op.obj, elemId: lastElemId, value, pred: [], insert: true}) + lastElemId = `${opNum}@${actor}` + opNum += 1 + } + } else if (op.action === 'del' && op.multiOp > 1) { + if (op.pred.length !== 1) throw new RangeError('multiOp deletion must have exactly one pred') + const startElemId = parseOpId(op.elemId), startPred = parseOpId(op.pred[0]) + for (let i = 0; i < op.multiOp; i++) { + const elemId = `${startElemId.counter + i}@${startElemId.actorId}` + const pred = [`${startPred.counter + i}@${startPred.actorId}`] + expandedOps.push({action: 'del', obj: op.obj, elemId, pred}) + opNum += 1 + } + } else { + expandedOps.push(op) + opNum += 1 + } + } + return expandedOps +} + +/** + * Takes a change as decoded by `decodeColumns`, and changes it into the form + * expected by the rest of the backend. If `forDocument` is true, we use the op + * structure of a whole document, otherwise we use the op structure for an + * individual change. + */ +function decodeOps(ops, forDocument) { + const newOps = [] + for (let op of ops) { + const obj = (op.objCtr === null) ? '_root' : `${op.objCtr}@${op.objActor}` + const elemId = op.keyStr ? undefined : (op.keyCtr === 0 ? '_head' : `${op.keyCtr}@${op.keyActor}`) + const action = ACTIONS[op.action] || op.action + const newOp = elemId ? {obj, elemId, action} : {obj, key: op.keyStr, action} + newOp.insert = !!op.insert + if (ACTIONS[op.action] === 'set' || ACTIONS[op.action] === 'inc') { + newOp.value = op.valLen + if (op.valLen_datatype) newOp.datatype = op.valLen_datatype + } + if (!!op.chldCtr !== !!op.chldActor) { + throw new RangeError(`Mismatched child columns: ${op.chldCtr} and ${op.chldActor}`) + } + if (op.chldCtr !== null) newOp.child = `${op.chldCtr}@${op.chldActor}` + if (forDocument) { + newOp.id = `${op.idCtr}@${op.idActor}` + newOp.succ = op.succNum.map(succ => `${succ.succCtr}@${succ.succActor}`) + checkSortedOpIds(op.succNum.map(succ => ({counter: succ.succCtr, actorId: succ.succActor}))) + } else { + newOp.pred = op.predNum.map(pred => `${pred.predCtr}@${pred.predActor}`) + checkSortedOpIds(op.predNum.map(pred => ({counter: pred.predCtr, actorId: pred.predActor}))) + } + newOps.push(newOp) + } + return newOps +} + +/** + * Throws an exception if the opIds in the given array are not in sorted order. + */ +function checkSortedOpIds(opIds) { + let last = null + for (let opId of opIds) { + if (last && compareParsedOpIds(last, opId) !== -1) { + throw new RangeError('operation IDs are not in ascending order') + } + last = opId + } +} + +function encoderByColumnId(columnId) { + if ((columnId & 7) === COLUMN_TYPE.INT_DELTA) { + return new DeltaEncoder() + } else if ((columnId & 7) === COLUMN_TYPE.BOOLEAN) { + return new BooleanEncoder() + } else if ((columnId & 7) === COLUMN_TYPE.STRING_RLE) { + return new RLEEncoder('utf8') + } else if ((columnId & 7) === COLUMN_TYPE.VALUE_RAW) { + return new Encoder() + } else { + return new RLEEncoder('uint') + } +} + +function decoderByColumnId(columnId, buffer) { + if ((columnId & 7) === COLUMN_TYPE.INT_DELTA) { + return new DeltaDecoder(buffer) + } else if ((columnId & 7) === COLUMN_TYPE.BOOLEAN) { + return new BooleanDecoder(buffer) + } else if ((columnId & 7) === COLUMN_TYPE.STRING_RLE) { + return new RLEDecoder('utf8', buffer) + } else if ((columnId & 7) === COLUMN_TYPE.VALUE_RAW) { + return new Decoder(buffer) + } else { + return new RLEDecoder('uint', buffer) + } +} + +function makeDecoders(columns, columnSpec) { + const emptyBuf = new Uint8Array(0) + let decoders = [], columnIndex = 0, specIndex = 0 + + while (columnIndex < columns.length || specIndex < columnSpec.length) { + if (columnIndex === columns.length || + (specIndex < columnSpec.length && columnSpec[specIndex].columnId < columns[columnIndex].columnId)) { + const {columnId, columnName} = columnSpec[specIndex] + decoders.push({columnId, columnName, decoder: decoderByColumnId(columnId, emptyBuf)}) + specIndex++ + } else if (specIndex === columnSpec.length || columns[columnIndex].columnId < columnSpec[specIndex].columnId) { + const {columnId, buffer} = columns[columnIndex] + decoders.push({columnId, decoder: decoderByColumnId(columnId, buffer)}) + columnIndex++ + } else { // columns[columnIndex].columnId === columnSpec[specIndex].columnId + const {columnId, buffer} = columns[columnIndex], {columnName} = columnSpec[specIndex] + decoders.push({columnId, columnName, decoder: decoderByColumnId(columnId, buffer)}) + columnIndex++ + specIndex++ + } + } + return decoders +} + +function decodeColumns(columns, actorIds, columnSpec) { + columns = makeDecoders(columns, columnSpec) + let parsedRows = [] + while (columns.some(col => !col.decoder.done)) { + let row = {}, col = 0 + while (col < columns.length) { + const columnId = columns[col].columnId + let groupId = columnId >> 4, groupCols = 1 + while (col + groupCols < columns.length && columns[col + groupCols].columnId >> 4 === groupId) { + groupCols++ + } + + if (columnId % 8 === COLUMN_TYPE.GROUP_CARD) { + const values = [], count = columns[col].decoder.readValue() + for (let i = 0; i < count; i++) { + let value = {} + for (let colOffset = 1; colOffset < groupCols; colOffset++) { + decodeValueColumns(columns, col + colOffset, actorIds, value) + } + values.push(value) + } + row[columns[col].columnName] = values + col += groupCols + } else { + col += decodeValueColumns(columns, col, actorIds, row) + } + } + parsedRows.push(row) + } + return parsedRows +} + +function decodeColumnInfo(decoder) { + // A number that is all 1 bits except for the bit that indicates whether a column is + // deflate-compressed. We ignore this bit when checking whether columns are sorted by ID. + const COLUMN_ID_MASK = (-1 ^ COLUMN_TYPE_DEFLATE) >>> 0 + + let lastColumnId = -1, columns = [], numColumns = decoder.readUint53() + for (let i = 0; i < numColumns; i++) { + const columnId = decoder.readUint53(), bufferLen = decoder.readUint53() + if ((columnId & COLUMN_ID_MASK) <= (lastColumnId & COLUMN_ID_MASK)) { + throw new RangeError('Columns must be in ascending order') + } + lastColumnId = columnId + columns.push({columnId, bufferLen}) + } + return columns +} + +function encodeColumnInfo(encoder, columns) { + const nonEmptyColumns = columns.filter(column => column.encoder.buffer.byteLength > 0) + encoder.appendUint53(nonEmptyColumns.length) + for (let column of nonEmptyColumns) { + encoder.appendUint53(column.id) + encoder.appendUint53(column.encoder.buffer.byteLength) + } +} + +function decodeChangeHeader(decoder) { + const numDeps = decoder.readUint53(), deps = [] + for (let i = 0; i < numDeps; i++) { + deps.push(bytesToHexString(decoder.readRawBytes(32))) + } + let change = { + actor: decoder.readHexString(), + seq: decoder.readUint53(), + startOp: decoder.readUint53(), + time: decoder.readInt53(), + message: decoder.readPrefixedString(), + deps + } + const actorIds = [change.actor], numActorIds = decoder.readUint53() + for (let i = 0; i < numActorIds; i++) actorIds.push(decoder.readHexString()) + change.actorIds = actorIds + return change +} + +/** + * Assembles a chunk of encoded data containing a checksum, headers, and a + * series of encoded columns. Calls `encodeHeaderCallback` with an encoder that + * should be used to add the headers. The columns should be given as `columns`. + */ +function encodeContainer(chunkType, encodeContentsCallback) { + const CHECKSUM_SIZE = 4 // checksum is first 4 bytes of SHA-256 hash of the rest of the data + const HEADER_SPACE = MAGIC_BYTES.byteLength + CHECKSUM_SIZE + 1 + 5 // 1 byte type + 5 bytes length + const body = new Encoder() + // Make space for the header at the beginning of the body buffer. We will + // copy the header in here later. This is cheaper than copying the body since + // the body is likely to be much larger than the header. + body.appendRawBytes(new Uint8Array(HEADER_SPACE)) + encodeContentsCallback(body) + + const bodyBuf = body.buffer + const header = new Encoder() + header.appendByte(chunkType) + header.appendUint53(bodyBuf.byteLength - HEADER_SPACE) + + // Compute the hash over chunkType, length, and body + const headerBuf = header.buffer + const sha256 = new Hash() + sha256.update(headerBuf) + sha256.update(bodyBuf.subarray(HEADER_SPACE)) + const hash = sha256.digest(), checksum = hash.subarray(0, CHECKSUM_SIZE) + + // Copy header into the body buffer so that they are contiguous + bodyBuf.set(MAGIC_BYTES, HEADER_SPACE - headerBuf.byteLength - CHECKSUM_SIZE - MAGIC_BYTES.byteLength) + bodyBuf.set(checksum, HEADER_SPACE - headerBuf.byteLength - CHECKSUM_SIZE) + bodyBuf.set(headerBuf, HEADER_SPACE - headerBuf.byteLength) + return {hash, bytes: bodyBuf.subarray(HEADER_SPACE - headerBuf.byteLength - CHECKSUM_SIZE - MAGIC_BYTES.byteLength)} +} + +function decodeContainerHeader(decoder, computeHash) { + if (!equalBytes(decoder.readRawBytes(MAGIC_BYTES.byteLength), MAGIC_BYTES)) { + throw new RangeError('Data does not begin with magic bytes 85 6f 4a 83') + } + const expectedHash = decoder.readRawBytes(4) + const hashStartOffset = decoder.offset + const chunkType = decoder.readByte() + const chunkLength = decoder.readUint53() + const header = {chunkType, chunkLength, chunkData: decoder.readRawBytes(chunkLength)} + + if (computeHash) { + const sha256 = new Hash() + sha256.update(decoder.buf.subarray(hashStartOffset, decoder.offset)) + const binaryHash = sha256.digest() + if (!equalBytes(binaryHash.subarray(0, 4), expectedHash)) { + throw new RangeError('checksum does not match data') + } + header.hash = bytesToHexString(binaryHash) + } + return header +} + +/** + * Returns the checksum of a change (bytes 4 to 7) as a 32-bit unsigned integer. + */ +function getChangeChecksum(change) { + if (change[0] !== MAGIC_BYTES[0] || change[1] !== MAGIC_BYTES[1] || + change[2] !== MAGIC_BYTES[2] || change[3] !== MAGIC_BYTES[3]) { + throw new RangeError('Data does not begin with magic bytes 85 6f 4a 83') + } + return ((change[4] << 24) | (change[5] << 16) | (change[6] << 8) | change[7]) >>> 0 +} + +function encodeChange(changeObj) { + const { changes, actorIds } = parseAllOpIds([changeObj], true) + const change = changes[0] + + const { hash, bytes } = encodeContainer(CHUNK_TYPE_CHANGE, encoder => { + if (!Array.isArray(change.deps)) throw new TypeError('deps is not an array') + encoder.appendUint53(change.deps.length) + for (let hash of change.deps.slice().sort()) { + encoder.appendRawBytes(hexStringToBytes(hash)) + } + encoder.appendHexString(change.actor) + encoder.appendUint53(change.seq) + encoder.appendUint53(change.startOp) + encoder.appendInt53(change.time) + encoder.appendPrefixedString(change.message || '') + encoder.appendUint53(actorIds.length - 1) + for (let actor of actorIds.slice(1)) encoder.appendHexString(actor) + + const columns = encodeOps(change.ops, false) + encodeColumnInfo(encoder, columns) + for (let column of columns) encoder.appendRawBytes(column.encoder.buffer) + if (change.extraBytes) encoder.appendRawBytes(change.extraBytes) + }) + + const hexHash = bytesToHexString(hash) + if (changeObj.hash && changeObj.hash !== hexHash) { + throw new RangeError(`Change hash does not match encoding: ${changeObj.hash} != ${hexHash}`) + } + return (bytes.byteLength >= DEFLATE_MIN_SIZE) ? deflateChange(bytes) : bytes +} + +function decodeChangeColumns(buffer) { + if (buffer[8] === CHUNK_TYPE_DEFLATE) buffer = inflateChange(buffer) + const decoder = new Decoder(buffer) + const header = decodeContainerHeader(decoder, true) + const chunkDecoder = new Decoder(header.chunkData) + if (!decoder.done) throw new RangeError('Encoded change has trailing data') + if (header.chunkType !== CHUNK_TYPE_CHANGE) throw new RangeError(`Unexpected chunk type: ${header.chunkType}`) + + const change = decodeChangeHeader(chunkDecoder) + const columns = decodeColumnInfo(chunkDecoder) + for (let i = 0; i < columns.length; i++) { + if ((columns[i].columnId & COLUMN_TYPE_DEFLATE) !== 0) { + throw new RangeError('change must not contain deflated columns') + } + columns[i].buffer = chunkDecoder.readRawBytes(columns[i].bufferLen) + } + if (!chunkDecoder.done) { + const restLen = chunkDecoder.buf.byteLength - chunkDecoder.offset + change.extraBytes = chunkDecoder.readRawBytes(restLen) + } + + change.columns = columns + change.hash = header.hash + return change +} + +/** + * Decodes one change in binary format into its JS object representation. + */ +function decodeChange(buffer) { + const change = decodeChangeColumns(buffer) + change.ops = decodeOps(decodeColumns(change.columns, change.actorIds, CHANGE_COLUMNS), false) + delete change.actorIds + delete change.columns + return change +} + +/** + * Decodes the header fields of a change in binary format, but does not decode + * the operations. Saves work when we only need to inspect the headers. Only + * computes the hash of the change if `computeHash` is true. + */ +function decodeChangeMeta(buffer, computeHash) { + if (buffer[8] === CHUNK_TYPE_DEFLATE) buffer = inflateChange(buffer) + const header = decodeContainerHeader(new Decoder(buffer), computeHash) + if (header.chunkType !== CHUNK_TYPE_CHANGE) { + throw new RangeError('Buffer chunk type is not a change') + } + const meta = decodeChangeHeader(new Decoder(header.chunkData)) + meta.change = buffer + if (computeHash) meta.hash = header.hash + return meta +} + +/** + * Compresses a binary change using DEFLATE. + */ +function deflateChange(buffer) { + const header = decodeContainerHeader(new Decoder(buffer), false) + if (header.chunkType !== CHUNK_TYPE_CHANGE) throw new RangeError(`Unexpected chunk type: ${header.chunkType}`) + const compressed = pako.deflateRaw(header.chunkData) + const encoder = new Encoder() + encoder.appendRawBytes(buffer.subarray(0, 8)) // copy MAGIC_BYTES and checksum + encoder.appendByte(CHUNK_TYPE_DEFLATE) + encoder.appendUint53(compressed.byteLength) + encoder.appendRawBytes(compressed) + return encoder.buffer +} + +/** + * Decompresses a binary change that has been compressed with DEFLATE. + */ +function inflateChange(buffer) { + const header = decodeContainerHeader(new Decoder(buffer), false) + if (header.chunkType !== CHUNK_TYPE_DEFLATE) throw new RangeError(`Unexpected chunk type: ${header.chunkType}`) + const decompressed = pako.inflateRaw(header.chunkData) + const encoder = new Encoder() + encoder.appendRawBytes(buffer.subarray(0, 8)) // copy MAGIC_BYTES and checksum + encoder.appendByte(CHUNK_TYPE_CHANGE) + encoder.appendUint53(decompressed.byteLength) + encoder.appendRawBytes(decompressed) + return encoder.buffer +} + +/** + * Takes an Uint8Array that may contain multiple concatenated changes, and + * returns an array of subarrays, each subarray containing one change. + */ +function splitContainers(buffer) { + let decoder = new Decoder(buffer), chunks = [], startOffset = 0 + while (!decoder.done) { + decodeContainerHeader(decoder, false) + chunks.push(buffer.subarray(startOffset, decoder.offset)) + startOffset = decoder.offset + } + return chunks +} + +/** + * Decodes a list of changes from the binary format into JS objects. + * `binaryChanges` is an array of `Uint8Array` objects. + */ +function decodeChanges(binaryChanges) { + let decoded = [] + for (let binaryChange of binaryChanges) { + for (let chunk of splitContainers(binaryChange)) { + if (chunk[8] === CHUNK_TYPE_DOCUMENT) { + decoded = decoded.concat(decodeDocument(chunk)) + } else if (chunk[8] === CHUNK_TYPE_CHANGE || chunk[8] === CHUNK_TYPE_DEFLATE) { + decoded.push(decodeChange(chunk)) + } else { + // ignoring chunk of unknown type + } + } + } + return decoded +} + +function sortOpIds(a, b) { + if (a === b) return 0 + if (a === '_root') return -1 + if (b === '_root') return +1 + const a_ = parseOpId(a), b_ = parseOpId(b) + if (a_.counter < b_.counter) return -1 + if (a_.counter > b_.counter) return +1 + if (a_.actorId < b_.actorId) return -1 + if (a_.actorId > b_.actorId) return +1 + return 0 +} + +function groupDocumentOps(changes) { + let byObjectId = {}, byReference = {}, objectType = {} + for (let change of changes) { + for (let i = 0; i < change.ops.length; i++) { + const op = change.ops[i], opId = `${op.id.counter}@${op.id.actorId}` + const objectId = (op.obj === '_root') ? '_root' : `${op.obj.counter}@${op.obj.actorId}` + if (op.action.startsWith('make')) { + objectType[opId] = op.action + if (op.action === 'makeList' || op.action === 'makeText') { + byReference[opId] = {'_head': []} + } + } + + let key + if (objectId === '_root' || objectType[objectId] === 'makeMap' || objectType[objectId] === 'makeTable') { + key = op.key + } else if (objectType[objectId] === 'makeList' || objectType[objectId] === 'makeText') { + if (op.insert) { + key = opId + const ref = (op.elemId === '_head') ? '_head' : `${op.elemId.counter}@${op.elemId.actorId}` + byReference[objectId][ref].push(opId) + byReference[objectId][opId] = [] + } else { + key = `${op.elemId.counter}@${op.elemId.actorId}` + } + } else { + throw new RangeError(`Unknown object type for object ${objectId}`) + } + + if (!byObjectId[objectId]) byObjectId[objectId] = {} + if (!byObjectId[objectId][key]) byObjectId[objectId][key] = {} + byObjectId[objectId][key][opId] = op + op.succ = [] + + for (let pred of op.pred) { + const predId = `${pred.counter}@${pred.actorId}` + if (!byObjectId[objectId][key][predId]) { + throw new RangeError(`No predecessor operation ${predId}`) + } + byObjectId[objectId][key][predId].succ.push(op.id) + } + } + } + + let ops = [] + for (let objectId of Object.keys(byObjectId).sort(sortOpIds)) { + let keys = [] + if (objectType[objectId] === 'makeList' || objectType[objectId] === 'makeText') { + let stack = ['_head'] + while (stack.length > 0) { + const key = stack.pop() + if (key !== '_head') keys.push(key) + for (let opId of byReference[objectId][key].sort(sortOpIds)) stack.push(opId) + } + } else { + // FIXME JavaScript sorts based on UTF-16 encoding. We should change this to use the UTF-8 + // encoding instead (the sort order will be different beyond the basic multilingual plane) + keys = Object.keys(byObjectId[objectId]).sort() + } + + for (let key of keys) { + for (let opId of Object.keys(byObjectId[objectId][key]).sort(sortOpIds)) { + const op = byObjectId[objectId][key][opId] + if (op.action !== 'del') ops.push(op) + } + } + } + return ops +} + +/** + * Takes a set of operations `ops` loaded from an encoded document, and + * reconstructs the changes that they originally came from. + * Does not return anything, only mutates `changes`. + */ +function groupChangeOps(changes, ops) { + let changesByActor = {} // map from actorId to array of changes by that actor + for (let change of changes) { + change.ops = [] + if (!changesByActor[change.actor]) changesByActor[change.actor] = [] + if (change.seq !== changesByActor[change.actor].length + 1) { + throw new RangeError(`Expected seq = ${changesByActor[change.actor].length + 1}, got ${change.seq}`) + } + if (change.seq > 1 && changesByActor[change.actor][change.seq - 2].maxOp > change.maxOp) { + throw new RangeError('maxOp must increase monotonically per actor') + } + changesByActor[change.actor].push(change) + } + + let opsById = {} + for (let op of ops) { + if (op.action === 'del') throw new RangeError('document should not contain del operations') + op.pred = opsById[op.id] ? opsById[op.id].pred : [] + opsById[op.id] = op + for (let succ of op.succ) { + if (!opsById[succ]) { + if (op.elemId) { + const elemId = op.insert ? op.id : op.elemId + opsById[succ] = {id: succ, action: 'del', obj: op.obj, elemId, pred: []} + } else { + opsById[succ] = {id: succ, action: 'del', obj: op.obj, key: op.key, pred: []} + } + } + opsById[succ].pred.push(op.id) + } + delete op.succ + } + for (let op of Object.values(opsById)) { + if (op.action === 'del') ops.push(op) + } + + for (let op of ops) { + const { counter, actorId } = parseOpId(op.id) + const actorChanges = changesByActor[actorId] + // Binary search to find the change that should contain this operation + let left = 0, right = actorChanges.length + while (left < right) { + const index = Math.floor((left + right) / 2) + if (actorChanges[index].maxOp < counter) { + left = index + 1 + } else { + right = index + } + } + if (left >= actorChanges.length) { + throw new RangeError(`Operation ID ${op.id} outside of allowed range`) + } + actorChanges[left].ops.push(op) + } + + for (let change of changes) { + change.ops.sort((op1, op2) => sortOpIds(op1.id, op2.id)) + change.startOp = change.maxOp - change.ops.length + 1 + delete change.maxOp + for (let i = 0; i < change.ops.length; i++) { + const op = change.ops[i], expectedId = `${change.startOp + i}@${change.actor}` + if (op.id !== expectedId) { + throw new RangeError(`Expected opId ${expectedId}, got ${op.id}`) + } + delete op.id + } + } +} + +function encodeDocumentChanges(changes) { + const columns = { // see DOCUMENT_COLUMNS + actor : new RLEEncoder('uint'), + seq : new DeltaEncoder(), + maxOp : new DeltaEncoder(), + time : new DeltaEncoder(), + message : new RLEEncoder('utf8'), + depsNum : new RLEEncoder('uint'), + depsIndex : new DeltaEncoder(), + extraLen : new RLEEncoder('uint'), + extraRaw : new Encoder() + } + let indexByHash = {} // map from change hash to its index in the changes array + let heads = {} // change hashes that are not a dependency of any other change + + for (let i = 0; i < changes.length; i++) { + const change = changes[i] + indexByHash[change.hash] = i + heads[change.hash] = true + + columns.actor.appendValue(change.actorNum) + columns.seq.appendValue(change.seq) + columns.maxOp.appendValue(change.startOp + change.ops.length - 1) + columns.time.appendValue(change.time) + columns.message.appendValue(change.message) + columns.depsNum.appendValue(change.deps.length) + + for (let dep of change.deps) { + if (typeof indexByHash[dep] !== 'number') { + throw new RangeError(`Unknown dependency hash: ${dep}`) + } + columns.depsIndex.appendValue(indexByHash[dep]) + if (heads[dep]) delete heads[dep] + } + + if (change.extraBytes) { + columns.extraLen.appendValue(change.extraBytes.byteLength << 4 | VALUE_TYPE.BYTES) + columns.extraRaw.appendRawBytes(change.extraBytes) + } else { + columns.extraLen.appendValue(VALUE_TYPE.BYTES) // zero-length byte array + } + } + + let changesColumns = [] + for (let {columnName, columnId} of DOCUMENT_COLUMNS) { + changesColumns.push({id: columnId, name: columnName, encoder: columns[columnName]}) + } + changesColumns.sort((a, b) => a.id - b.id) + return { changesColumns, heads: Object.keys(heads).sort() } +} + +function decodeDocumentChanges(changes, expectedHeads) { + let heads = {} // change hashes that are not a dependency of any other change + for (let i = 0; i < changes.length; i++) { + let change = changes[i] + change.deps = [] + for (let index of change.depsNum.map(d => d.depsIndex)) { + if (!changes[index] || !changes[index].hash) { + throw new RangeError(`No hash for index ${index} while processing index ${i}`) + } + const hash = changes[index].hash + change.deps.push(hash) + if (heads[hash]) delete heads[hash] + } + change.deps.sort() + delete change.depsNum + + if (change.extraLen_datatype !== VALUE_TYPE.BYTES) { + throw new RangeError(`Bad datatype for extra bytes: ${VALUE_TYPE.BYTES}`) + } + change.extraBytes = change.extraLen + delete change.extraLen_datatype + + // Encoding and decoding again to compute the hash of the change + changes[i] = decodeChange(encodeChange(change)) + heads[changes[i].hash] = true + } + + const actualHeads = Object.keys(heads).sort() + let headsEqual = (actualHeads.length === expectedHeads.length), i = 0 + while (headsEqual && i < actualHeads.length) { + headsEqual = (actualHeads[i] === expectedHeads[i]) + i++ + } + if (!headsEqual) { + throw new RangeError(`Mismatched heads hashes: expected ${expectedHeads.join(', ')}, got ${actualHeads.join(', ')}`) + } +} + +/** + * Transforms a list of changes into a binary representation of the document state. + */ +function encodeDocument(binaryChanges) { + const { changes, actorIds } = parseAllOpIds(decodeChanges(binaryChanges), false) + const { changesColumns, heads } = encodeDocumentChanges(changes) + const opsColumns = encodeOps(groupDocumentOps(changes), true) + for (let column of changesColumns) deflateColumn(column) + for (let column of opsColumns) deflateColumn(column) + + return encodeContainer(CHUNK_TYPE_DOCUMENT, encoder => { + encoder.appendUint53(actorIds.length) + for (let actor of actorIds) { + encoder.appendHexString(actor) + } + encoder.appendUint53(heads.length) + for (let head of heads.sort()) { + encoder.appendRawBytes(hexStringToBytes(head)) + } + encodeColumnInfo(encoder, changesColumns) + encodeColumnInfo(encoder, opsColumns) + for (let column of changesColumns) encoder.appendRawBytes(column.encoder.buffer) + for (let column of opsColumns) encoder.appendRawBytes(column.encoder.buffer) + }).bytes +} + +function decodeDocumentHeader(buffer) { + const documentDecoder = new Decoder(buffer) + const header = decodeContainerHeader(documentDecoder, true) + const decoder = new Decoder(header.chunkData) + if (!documentDecoder.done) throw new RangeError('Encoded document has trailing data') + if (header.chunkType !== CHUNK_TYPE_DOCUMENT) throw new RangeError(`Unexpected chunk type: ${header.chunkType}`) + + const actorIds = [], numActors = decoder.readUint53() + for (let i = 0; i < numActors; i++) { + actorIds.push(decoder.readHexString()) + } + const heads = [], numHeads = decoder.readUint53() + for (let i = 0; i < numHeads; i++) { + heads.push(bytesToHexString(decoder.readRawBytes(32))) + } + + const changesColumns = decodeColumnInfo(decoder) + const opsColumns = decodeColumnInfo(decoder) + for (let i = 0; i < changesColumns.length; i++) { + changesColumns[i].buffer = decoder.readRawBytes(changesColumns[i].bufferLen) + inflateColumn(changesColumns[i]) + } + for (let i = 0; i < opsColumns.length; i++) { + opsColumns[i].buffer = decoder.readRawBytes(opsColumns[i].bufferLen) + inflateColumn(opsColumns[i]) + } + + const extraBytes = decoder.readRawBytes(decoder.buf.byteLength - decoder.offset) + return { changesColumns, opsColumns, actorIds, heads, extraBytes } +} + +function decodeDocument(buffer) { + const { changesColumns, opsColumns, actorIds, heads } = decodeDocumentHeader(buffer) + const changes = decodeColumns(changesColumns, actorIds, DOCUMENT_COLUMNS) + const ops = decodeOps(decodeColumns(opsColumns, actorIds, DOC_OPS_COLUMNS), true) + groupChangeOps(changes, ops) + decodeDocumentChanges(changes, heads) + return changes +} + +/** + * DEFLATE-compresses the given column if it is large enough to make the compression worthwhile. + */ +function deflateColumn(column) { + if (column.encoder.buffer.byteLength >= DEFLATE_MIN_SIZE) { + column.encoder = {buffer: pako.deflateRaw(column.encoder.buffer)} + column.id |= COLUMN_TYPE_DEFLATE + } +} + +/** + * Decompresses the given column if it is DEFLATE-compressed. + */ +function inflateColumn(column) { + if ((column.columnId & COLUMN_TYPE_DEFLATE) !== 0) { + column.buffer = pako.inflateRaw(column.buffer) + column.columnId ^= COLUMN_TYPE_DEFLATE + } +} + +/** + * Takes all the operations for the same property (i.e. the same key in a map, or the same list + * element) and mutates the object patch to reflect the current value(s) of that property. There + * might be multiple values in the case of a conflict. `objects` is a map from objectId to the + * patch for that object. `property` contains `objId`, `key`, a list of `ops`, and `index` (the + * current list index if the object is a list). Returns true if one or more values are present, + * or false if the property has been deleted. + */ +function addPatchProperty(objects, property) { + let values = {}, counter = null + for (let op of property.ops) { + // Apply counters and their increments regardless of the number of successor operations + if (op.actionName === 'set' && op.value.datatype === 'counter') { + if (!counter) counter = {opId: op.opId, value: 0, succ: {}} + counter.value += op.value.value + for (let succId of op.succ) counter.succ[succId] = true + } else if (op.actionName === 'inc') { + if (!counter) throw new RangeError(`inc operation ${op.opId} without a counter`) + counter.value += op.value.value + delete counter.succ[op.opId] + for (let succId of op.succ) counter.succ[succId] = true + + } else if (op.succ.length === 0) { // Ignore any ops that have been overwritten + if (op.actionName.startsWith('make')) { + values[op.opId] = objects[op.opId] + } else if (op.actionName === 'set') { + values[op.opId] = {value: op.value.value, type: 'value'} + if (op.value.datatype) { + values[op.opId].datatype = op.value.datatype + } + } else if (op.actionName === 'link') { + // NB. This assumes that the ID of the child object is greater than the ID of the current + // object. This is true as long as link operations are only used to redo undone make* + // operations, but it will cease to be true once subtree moves are allowed. + if (!op.childId) throw new RangeError(`link operation ${op.opId} without a childId`) + values[op.opId] = objects[op.childId] + } else { + throw new RangeError(`Unexpected action type: ${op.actionName}`) + } + } + } + + // If the counter had any successor operation that was not an increment, that means the counter + // must have been deleted, so we omit it from the patch. + if (counter && Object.keys(counter.succ).length === 0) { + values[counter.opId] = {type: 'value', value: counter.value, datatype: 'counter'} + } + + if (Object.keys(values).length > 0) { + let obj = objects[property.objId] + if (obj.type === 'map' || obj.type === 'table') { + obj.props[property.key] = values + } else if (obj.type === 'list' || obj.type === 'text') { + makeListEdits(obj, values, property.key, property.index) + } + return true + } else { + return false + } +} + +/** + * When constructing a patch to instantiate a loaded document, this function adds the edits to + * insert one list element. Usually there is one value, but in the case of a conflict there may be + * several values. `elemId` is the ID of the list element, and `index` is the list index at which + * the value(s) should be placed. + */ +function makeListEdits(list, values, elemId, index) { + let firstValue = true + const opIds = Object.keys(values).sort((id1, id2) => compareParsedOpIds(parseOpId(id1), parseOpId(id2))) + for (const opId of opIds) { + if (firstValue) { + list.edits.push({action: 'insert', value: values[opId], elemId, opId, index}) + } else { + list.edits.push({action: 'update', value: values[opId], opId, index}) + } + firstValue = false + } +} + +/** + * Recursively walks the patch tree, calling appendEdit on every list edit in order to consense + * consecutive sequences of insertions into multi-inserts. + */ +function condenseEdits(diff) { + if (diff.type === 'list' || diff.type === 'text') { + diff.edits.forEach(e => condenseEdits(e.value)) + let newEdits = diff.edits + diff.edits = [] + for (const edit of newEdits) appendEdit(diff.edits, edit) + } else if (diff.type === 'map' || diff.type === 'table') { + for (const prop of Object.keys(diff.props)) { + for (const opId of Object.keys(diff.props[prop])) { + condenseEdits(diff.props[prop][opId]) + } + } + } +} + +/** + * Appends a list edit operation (insert, update, remove) to an array of existing operations. If the + * last existing operation can be extended (as a multi-op), we do that. + */ +function appendEdit(existingEdits, nextEdit) { + if (existingEdits.length === 0) { + existingEdits.push(nextEdit) + return + } + + let lastEdit = existingEdits[existingEdits.length - 1] + if (lastEdit.action === 'insert' && nextEdit.action === 'insert' && + lastEdit.index === nextEdit.index - 1 && + lastEdit.value.type === 'value' && nextEdit.value.type === 'value' && + lastEdit.elemId === lastEdit.opId && nextEdit.elemId === nextEdit.opId && + opIdDelta(lastEdit.elemId, nextEdit.elemId, 1)) { + lastEdit.action = 'multi-insert' + lastEdit.values = [lastEdit.value.value, nextEdit.value.value] + delete lastEdit.value + delete lastEdit.opId + + } else if (lastEdit.action === 'multi-insert' && nextEdit.action === 'insert' && + lastEdit.index + lastEdit.values.length === nextEdit.index && + nextEdit.value.type === 'value' && nextEdit.elemId === nextEdit.opId && + opIdDelta(lastEdit.elemId, nextEdit.elemId, lastEdit.values.length)) { + lastEdit.values.push(nextEdit.value.value) + + } else if (lastEdit.action === 'remove' && nextEdit.action === 'remove' && + lastEdit.index === nextEdit.index) { + lastEdit.count += nextEdit.count + + } else { + existingEdits.push(nextEdit) + } +} + +/** + * Returns true if the two given operation IDs have the same actor ID, and the counter of `id2` is + * exactly `delta` greater than the counter of `id1`. + */ +function opIdDelta(id1, id2, delta = 1) { + const parsed1 = parseOpId(id1), parsed2 = parseOpId(id2) + return parsed1.actorId === parsed2.actorId && parsed1.counter + delta === parsed2.counter +} + +/** + * Parses the document (in compressed binary format) given as `documentBuffer` + * and returns a patch that can be sent to the frontend to instantiate the + * current state of that document. + */ +function constructPatch(documentBuffer) { + const { opsColumns, actorIds } = decodeDocumentHeader(documentBuffer) + const col = makeDecoders(opsColumns, DOC_OPS_COLUMNS).reduce( + (acc, col) => Object.assign(acc, {[col.columnName]: col.decoder}), {}) + + let objects = {_root: {objectId: '_root', type: 'map', props: {}}} + let property = null + + while (!col.idActor.done) { + const opId = `${col.idCtr.readValue()}@${actorIds[col.idActor.readValue()]}` + const action = col.action.readValue(), actionName = ACTIONS[action] + if (action % 2 === 0) { // even-numbered actions are object creation + const type = OBJECT_TYPE[actionName] || 'unknown' + if (type === 'list' || type === 'text') { + objects[opId] = {objectId: opId, type, edits: []} + } else { + objects[opId] = {objectId: opId, type, props: {}} + } + } + + const objActor = col.objActor.readValue(), objCtr = col.objCtr.readValue() + const objId = objActor === null ? '_root' : `${objCtr}@${actorIds[objActor]}` + let obj = objects[objId] + if (!obj) throw new RangeError(`Operation for nonexistent object: ${objId}`) + + const keyActor = col.keyActor.readValue(), keyCtr = col.keyCtr.readValue() + const keyStr = col.keyStr.readValue(), insert = !!col.insert.readValue() + const chldActor = col.chldActor.readValue(), chldCtr = col.chldCtr.readValue() + const childId = chldActor === null ? null : `${chldCtr}@${actorIds[chldActor]}` + const sizeTag = col.valLen.readValue() + const rawValue = col.valRaw.readRawBytes(sizeTag >> 4) + const value = decodeValue(sizeTag, rawValue) + const succNum = col.succNum.readValue() + let succ = [] + for (let i = 0; i < succNum; i++) { + succ.push(`${col.succCtr.readValue()}@${actorIds[col.succActor.readValue()]}`) + } + + if (!actionName || obj.type === 'unknown') continue + + let key + if (obj.type === 'list' || obj.type === 'text') { + if (keyCtr === null || (keyCtr === 0 && !insert)) { + throw new RangeError(`Operation ${opId} on ${obj.type} object has no key`) + } + key = insert ? opId : `${keyCtr}@${actorIds[keyActor]}` + } else { + if (keyStr === null) { + throw new RangeError(`Operation ${opId} on ${obj.type} object has no key`) + } + key = keyStr + } + + if (!property || property.objId !== objId || property.key !== key) { + let index = 0 + if (property) { + index = property.index + if (addPatchProperty(objects, property)) index += 1 + if (property.objId !== objId) index = 0 + } + property = {objId, key, index, ops: []} + } + property.ops.push({opId, actionName, value, childId, succ}) + } + + if (property) addPatchProperty(objects, property) + condenseEdits(objects._root) + return objects._root +} + +module.exports = { + COLUMN_TYPE, VALUE_TYPE, ACTIONS, OBJECT_TYPE, DOC_OPS_COLUMNS, CHANGE_COLUMNS, + encoderByColumnId, decoderByColumnId, makeDecoders, decodeValue, + splitContainers, encodeChange, decodeChangeColumns, decodeChange, decodeChangeMeta, decodeChanges, + decodeDocumentHeader, encodeDocument, decodeDocument, + getChangeChecksum, appendEdit, constructPatch +} diff --git a/automerge-wasm/test/helpers/common.js b/automerge-wasm/test/helpers/common.js new file mode 100644 index 00000000..b41cadc8 --- /dev/null +++ b/automerge-wasm/test/helpers/common.js @@ -0,0 +1,46 @@ +function isObject(obj) { + return typeof obj === 'object' && obj !== null +} + +/** + * Returns a shallow copy of the object `obj`. Faster than `Object.assign({}, obj)`. + * https://jsperf.com/cloning-large-objects/1 + */ +function copyObject(obj) { + if (!isObject(obj)) return {} + let copy = {} + for (let key of Object.keys(obj)) { + copy[key] = obj[key] + } + return copy +} + +/** + * Takes a string in the form that is used to identify operations (a counter concatenated + * with an actor ID, separated by an `@` sign) and returns an object `{counter, actorId}`. + */ +function parseOpId(opId) { + const match = /^(\d+)@(.*)$/.exec(opId || '') + if (!match) { + throw new RangeError(`Not a valid opId: ${opId}`) + } + return {counter: parseInt(match[1], 10), actorId: match[2]} +} + +/** + * Returns true if the two byte arrays contain the same data, false if not. + */ +function equalBytes(array1, array2) { + if (!(array1 instanceof Uint8Array) || !(array2 instanceof Uint8Array)) { + throw new TypeError('equalBytes can only compare Uint8Arrays') + } + if (array1.byteLength !== array2.byteLength) return false + for (let i = 0; i < array1.byteLength; i++) { + if (array1[i] !== array2[i]) return false + } + return true +} + +module.exports = { + isObject, copyObject, parseOpId, equalBytes +} diff --git a/automerge-wasm/test/helpers/encoding.js b/automerge-wasm/test/helpers/encoding.js new file mode 100644 index 00000000..92b62df6 --- /dev/null +++ b/automerge-wasm/test/helpers/encoding.js @@ -0,0 +1,1209 @@ +/** + * UTF-8 decoding and encoding using API that is supported in Node >= 12 and modern browsers: + * https://developer.mozilla.org/en-US/docs/Web/API/TextEncoder/encode + * https://developer.mozilla.org/en-US/docs/Web/API/TextDecoder/decode + * If you're running in an environment where it's not available, please use a polyfill, such as: + * https://github.com/anonyco/FastestSmallestTextEncoderDecoder + */ +const utf8encoder = new TextEncoder() +const utf8decoder = new TextDecoder('utf-8') + +function stringToUtf8(string) { + return utf8encoder.encode(string) +} + +function utf8ToString(buffer) { + return utf8decoder.decode(buffer) +} + +/** + * Converts a string consisting of hexadecimal digits into an Uint8Array. + */ +function hexStringToBytes(value) { + if (typeof value !== 'string') { + throw new TypeError('value is not a string') + } + if (!/^([0-9a-f][0-9a-f])*$/.test(value)) { + throw new RangeError('value is not hexadecimal') + } + if (value === '') { + return new Uint8Array(0) + } else { + return new Uint8Array(value.match(/../g).map(b => parseInt(b, 16))) + } +} + +const NIBBLE_TO_HEX = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f'] +const BYTE_TO_HEX = new Array(256) +for (let i = 0; i < 256; i++) { + BYTE_TO_HEX[i] = `${NIBBLE_TO_HEX[(i >>> 4) & 0xf]}${NIBBLE_TO_HEX[i & 0xf]}`; +} + +/** + * Converts a Uint8Array into the equivalent hexadecimal string. + */ +function bytesToHexString(bytes) { + let hex = '', len = bytes.byteLength + for (let i = 0; i < len; i++) { + hex += BYTE_TO_HEX[bytes[i]] + } + return hex +} + +/** + * Wrapper around an Uint8Array that allows values to be appended to the buffer, + * and that automatically grows the buffer when space runs out. + */ +class Encoder { + constructor() { + this.buf = new Uint8Array(16) + this.offset = 0 + } + + /** + * Returns the byte array containing the encoded data. + */ + get buffer() { + this.finish() + return this.buf.subarray(0, this.offset) + } + + /** + * Reallocates the encoder's buffer to be bigger. + */ + grow(minSize = 0) { + let newSize = this.buf.byteLength * 4 + while (newSize < minSize) newSize *= 2 + const newBuf = new Uint8Array(newSize) + newBuf.set(this.buf, 0) + this.buf = newBuf + return this + } + + /** + * Appends one byte (0 to 255) to the buffer. + */ + appendByte(value) { + if (this.offset >= this.buf.byteLength) this.grow() + this.buf[this.offset] = value + this.offset += 1 + } + + /** + * Encodes a 32-bit nonnegative integer in a variable number of bytes using + * the LEB128 encoding scheme (https://en.wikipedia.org/wiki/LEB128) and + * appends it to the buffer. Returns the number of bytes written. + */ + appendUint32(value) { + if (!Number.isInteger(value)) throw new RangeError('value is not an integer') + if (value < 0 || value > 0xffffffff) throw new RangeError('number out of range') + + const numBytes = Math.max(1, Math.ceil((32 - Math.clz32(value)) / 7)) + if (this.offset + numBytes > this.buf.byteLength) this.grow() + + for (let i = 0; i < numBytes; i++) { + this.buf[this.offset + i] = (value & 0x7f) | (i === numBytes - 1 ? 0x00 : 0x80) + value >>>= 7 // zero-filling right shift + } + this.offset += numBytes + return numBytes + } + + /** + * Encodes a 32-bit signed integer in a variable number of bytes using the + * LEB128 encoding scheme (https://en.wikipedia.org/wiki/LEB128) and appends + * it to the buffer. Returns the number of bytes written. + */ + appendInt32(value) { + if (!Number.isInteger(value)) throw new RangeError('value is not an integer') + if (value < -0x80000000 || value > 0x7fffffff) throw new RangeError('number out of range') + + const numBytes = Math.ceil((33 - Math.clz32(value >= 0 ? value : -value - 1)) / 7) + if (this.offset + numBytes > this.buf.byteLength) this.grow() + + for (let i = 0; i < numBytes; i++) { + this.buf[this.offset + i] = (value & 0x7f) | (i === numBytes - 1 ? 0x00 : 0x80) + value >>= 7 // sign-propagating right shift + } + this.offset += numBytes + return numBytes + } + + /** + * Encodes a nonnegative integer in a variable number of bytes using the LEB128 + * encoding scheme, up to the maximum size of integers supported by JavaScript + * (53 bits). + */ + appendUint53(value) { + if (!Number.isInteger(value)) throw new RangeError('value is not an integer') + if (value < 0 || value > Number.MAX_SAFE_INTEGER) { + throw new RangeError('number out of range') + } + const high32 = Math.floor(value / 0x100000000) + const low32 = (value & 0xffffffff) >>> 0 // right shift to interpret as unsigned + return this.appendUint64(high32, low32) + } + + /** + * Encodes a signed integer in a variable number of bytes using the LEB128 + * encoding scheme, up to the maximum size of integers supported by JavaScript + * (53 bits). + */ + appendInt53(value) { + if (!Number.isInteger(value)) throw new RangeError('value is not an integer') + if (value < Number.MIN_SAFE_INTEGER || value > Number.MAX_SAFE_INTEGER) { + throw new RangeError('number out of range') + } + const high32 = Math.floor(value / 0x100000000) + const low32 = (value & 0xffffffff) >>> 0 // right shift to interpret as unsigned + return this.appendInt64(high32, low32) + } + + /** + * Encodes a 64-bit nonnegative integer in a variable number of bytes using + * the LEB128 encoding scheme, and appends it to the buffer. The number is + * given as two 32-bit halves since JavaScript cannot accurately represent + * integers with more than 53 bits in a single variable. + */ + appendUint64(high32, low32) { + if (!Number.isInteger(high32) || !Number.isInteger(low32)) { + throw new RangeError('value is not an integer') + } + if (high32 < 0 || high32 > 0xffffffff || low32 < 0 || low32 > 0xffffffff) { + throw new RangeError('number out of range') + } + if (high32 === 0) return this.appendUint32(low32) + + const numBytes = Math.ceil((64 - Math.clz32(high32)) / 7) + if (this.offset + numBytes > this.buf.byteLength) this.grow() + for (let i = 0; i < 4; i++) { + this.buf[this.offset + i] = (low32 & 0x7f) | 0x80 + low32 >>>= 7 // zero-filling right shift + } + this.buf[this.offset + 4] = (low32 & 0x0f) | ((high32 & 0x07) << 4) | (numBytes === 5 ? 0x00 : 0x80) + high32 >>>= 3 + for (let i = 5; i < numBytes; i++) { + this.buf[this.offset + i] = (high32 & 0x7f) | (i === numBytes - 1 ? 0x00 : 0x80) + high32 >>>= 7 + } + this.offset += numBytes + return numBytes + } + + /** + * Encodes a 64-bit signed integer in a variable number of bytes using the + * LEB128 encoding scheme, and appends it to the buffer. The number is given + * as two 32-bit halves since JavaScript cannot accurately represent integers + * with more than 53 bits in a single variable. The sign of the 64-bit + * number is determined by the sign of the `high32` half; the sign of the + * `low32` half is ignored. + */ + appendInt64(high32, low32) { + if (!Number.isInteger(high32) || !Number.isInteger(low32)) { + throw new RangeError('value is not an integer') + } + if (high32 < -0x80000000 || high32 > 0x7fffffff || low32 < -0x80000000 || low32 > 0xffffffff) { + throw new RangeError('number out of range') + } + low32 >>>= 0 // interpret as unsigned + if (high32 === 0 && low32 <= 0x7fffffff) return this.appendInt32(low32) + if (high32 === -1 && low32 >= 0x80000000) return this.appendInt32(low32 - 0x100000000) + + const numBytes = Math.ceil((65 - Math.clz32(high32 >= 0 ? high32 : -high32 - 1)) / 7) + if (this.offset + numBytes > this.buf.byteLength) this.grow() + for (let i = 0; i < 4; i++) { + this.buf[this.offset + i] = (low32 & 0x7f) | 0x80 + low32 >>>= 7 // zero-filling right shift + } + this.buf[this.offset + 4] = (low32 & 0x0f) | ((high32 & 0x07) << 4) | (numBytes === 5 ? 0x00 : 0x80) + high32 >>= 3 // sign-propagating right shift + for (let i = 5; i < numBytes; i++) { + this.buf[this.offset + i] = (high32 & 0x7f) | (i === numBytes - 1 ? 0x00 : 0x80) + high32 >>= 7 + } + this.offset += numBytes + return numBytes + } + + /** + * Appends the contents of byte buffer `data` to the buffer. Returns the + * number of bytes appended. + */ + appendRawBytes(data) { + if (this.offset + data.byteLength > this.buf.byteLength) { + this.grow(this.offset + data.byteLength) + } + this.buf.set(data, this.offset) + this.offset += data.byteLength + return data.byteLength + } + + /** + * Appends a UTF-8 string to the buffer, without any metadata. Returns the + * number of bytes appended. + */ + appendRawString(value) { + if (typeof value !== 'string') throw new TypeError('value is not a string') + return this.appendRawBytes(stringToUtf8(value)) + } + + /** + * Appends the contents of byte buffer `data` to the buffer, prefixed with the + * number of bytes in the buffer (as a LEB128-encoded unsigned integer). + */ + appendPrefixedBytes(data) { + this.appendUint53(data.byteLength) + this.appendRawBytes(data) + return this + } + + /** + * Appends a UTF-8 string to the buffer, prefixed with its length in bytes + * (where the length is encoded as an unsigned LEB128 integer). + */ + appendPrefixedString(value) { + if (typeof value !== 'string') throw new TypeError('value is not a string') + this.appendPrefixedBytes(stringToUtf8(value)) + return this + } + + /** + * Takes a value, which must be a string consisting only of hexadecimal + * digits, maps it to a byte array, and appends it to the buffer, prefixed + * with its length in bytes. + */ + appendHexString(value) { + this.appendPrefixedBytes(hexStringToBytes(value)) + return this + } + + /** + * Flushes any unwritten data to the buffer. Call this before reading from + * the buffer constructed by this Encoder. + */ + finish() { + } +} + +/** + * Counterpart to Encoder. Wraps a Uint8Array buffer with a cursor indicating + * the current decoding position, and allows values to be incrementally read by + * decoding the bytes at the current position. + */ +class Decoder { + constructor(buffer) { + if (!(buffer instanceof Uint8Array)) { + throw new TypeError(`Not a byte array: ${buffer}`) + } + this.buf = buffer + this.offset = 0 + } + + /** + * Returns false if there is still data to be read at the current decoding + * position, and true if we are at the end of the buffer. + */ + get done() { + return this.offset === this.buf.byteLength + } + + /** + * Resets the cursor position, so that the next read goes back to the + * beginning of the buffer. + */ + reset() { + this.offset = 0 + } + + /** + * Moves the current decoding position forward by the specified number of + * bytes, without decoding anything. + */ + skip(bytes) { + if (this.offset + bytes > this.buf.byteLength) { + throw new RangeError('cannot skip beyond end of buffer') + } + this.offset += bytes + } + + /** + * Reads one byte (0 to 255) from the buffer. + */ + readByte() { + this.offset += 1 + return this.buf[this.offset - 1] + } + + /** + * Reads a LEB128-encoded unsigned integer from the current position in the buffer. + * Throws an exception if the value doesn't fit in a 32-bit unsigned int. + */ + readUint32() { + let result = 0, shift = 0 + while (this.offset < this.buf.byteLength) { + const nextByte = this.buf[this.offset] + if (shift === 28 && (nextByte & 0xf0) !== 0) { // more than 5 bytes, or value > 0xffffffff + throw new RangeError('number out of range') + } + result = (result | (nextByte & 0x7f) << shift) >>> 0 // right shift to interpret value as unsigned + shift += 7 + this.offset++ + if ((nextByte & 0x80) === 0) return result + } + throw new RangeError('buffer ended with incomplete number') + } + + /** + * Reads a LEB128-encoded signed integer from the current position in the buffer. + * Throws an exception if the value doesn't fit in a 32-bit signed int. + */ + readInt32() { + let result = 0, shift = 0 + while (this.offset < this.buf.byteLength) { + const nextByte = this.buf[this.offset] + if ((shift === 28 && (nextByte & 0x80) !== 0) || // more than 5 bytes + (shift === 28 && (nextByte & 0x40) === 0 && (nextByte & 0x38) !== 0) || // positive int > 0x7fffffff + (shift === 28 && (nextByte & 0x40) !== 0 && (nextByte & 0x38) !== 0x38)) { // negative int < -0x80000000 + throw new RangeError('number out of range') + } + result |= (nextByte & 0x7f) << shift + shift += 7 + this.offset++ + + if ((nextByte & 0x80) === 0) { + if ((nextByte & 0x40) === 0 || shift > 28) { + return result // positive, or negative value that doesn't need sign-extending + } else { + return result | (-1 << shift) // sign-extend negative integer + } + } + } + throw new RangeError('buffer ended with incomplete number') + } + + /** + * Reads a LEB128-encoded unsigned integer from the current position in the + * buffer. Allows any integer that can be safely represented by JavaScript + * (up to 2^53 - 1), and throws an exception outside of that range. + */ + readUint53() { + const { low32, high32 } = this.readUint64() + if (high32 < 0 || high32 > 0x1fffff) { + throw new RangeError('number out of range') + } + return high32 * 0x100000000 + low32 + } + + /** + * Reads a LEB128-encoded signed integer from the current position in the + * buffer. Allows any integer that can be safely represented by JavaScript + * (between -(2^53 - 1) and 2^53 - 1), throws an exception outside of that range. + */ + readInt53() { + const { low32, high32 } = this.readInt64() + if (high32 < -0x200000 || (high32 === -0x200000 && low32 === 0) || high32 > 0x1fffff) { + throw new RangeError('number out of range') + } + return high32 * 0x100000000 + low32 + } + + /** + * Reads a LEB128-encoded unsigned integer from the current position in the + * buffer. Throws an exception if the value doesn't fit in a 64-bit unsigned + * int. Returns the number in two 32-bit halves, as an object of the form + * `{high32, low32}`. + */ + readUint64() { + let low32 = 0, high32 = 0, shift = 0 + while (this.offset < this.buf.byteLength && shift <= 28) { + const nextByte = this.buf[this.offset] + low32 = (low32 | (nextByte & 0x7f) << shift) >>> 0 // right shift to interpret value as unsigned + if (shift === 28) { + high32 = (nextByte & 0x70) >>> 4 + } + shift += 7 + this.offset++ + if ((nextByte & 0x80) === 0) return { high32, low32 } + } + + shift = 3 + while (this.offset < this.buf.byteLength) { + const nextByte = this.buf[this.offset] + if (shift === 31 && (nextByte & 0xfe) !== 0) { // more than 10 bytes, or value > 2^64 - 1 + throw new RangeError('number out of range') + } + high32 = (high32 | (nextByte & 0x7f) << shift) >>> 0 + shift += 7 + this.offset++ + if ((nextByte & 0x80) === 0) return { high32, low32 } + } + throw new RangeError('buffer ended with incomplete number') + } + + /** + * Reads a LEB128-encoded signed integer from the current position in the + * buffer. Throws an exception if the value doesn't fit in a 64-bit signed + * int. Returns the number in two 32-bit halves, as an object of the form + * `{high32, low32}`. The `low32` half is always non-negative, and the + * sign of the `high32` half indicates the sign of the 64-bit number. + */ + readInt64() { + let low32 = 0, high32 = 0, shift = 0 + while (this.offset < this.buf.byteLength && shift <= 28) { + const nextByte = this.buf[this.offset] + low32 = (low32 | (nextByte & 0x7f) << shift) >>> 0 // right shift to interpret value as unsigned + if (shift === 28) { + high32 = (nextByte & 0x70) >>> 4 + } + shift += 7 + this.offset++ + if ((nextByte & 0x80) === 0) { + if ((nextByte & 0x40) !== 0) { // sign-extend negative integer + if (shift < 32) low32 = (low32 | (-1 << shift)) >>> 0 + high32 |= -1 << Math.max(shift - 32, 0) + } + return { high32, low32 } + } + } + + shift = 3 + while (this.offset < this.buf.byteLength) { + const nextByte = this.buf[this.offset] + // On the 10th byte there are only two valid values: all 7 value bits zero + // (if the value is positive) or all 7 bits one (if the value is negative) + if (shift === 31 && nextByte !== 0 && nextByte !== 0x7f) { + throw new RangeError('number out of range') + } + high32 |= (nextByte & 0x7f) << shift + shift += 7 + this.offset++ + if ((nextByte & 0x80) === 0) { + if ((nextByte & 0x40) !== 0 && shift < 32) { // sign-extend negative integer + high32 |= -1 << shift + } + return { high32, low32 } + } + } + throw new RangeError('buffer ended with incomplete number') + } + + /** + * Extracts a subarray `length` bytes in size, starting from the current + * position in the buffer, and moves the position forward. + */ + readRawBytes(length) { + const start = this.offset + if (start + length > this.buf.byteLength) { + throw new RangeError('subarray exceeds buffer size') + } + this.offset += length + return this.buf.subarray(start, this.offset) + } + + /** + * Extracts `length` bytes from the buffer, starting from the current position, + * and returns the UTF-8 string decoding of those bytes. + */ + readRawString(length) { + return utf8ToString(this.readRawBytes(length)) + } + + /** + * Extracts a subarray from the current position in the buffer, prefixed with + * its length in bytes (encoded as an unsigned LEB128 integer). + */ + readPrefixedBytes() { + return this.readRawBytes(this.readUint53()) + } + + /** + * Reads a UTF-8 string from the current position in the buffer, prefixed with its + * length in bytes (where the length is encoded as an unsigned LEB128 integer). + */ + readPrefixedString() { + return utf8ToString(this.readPrefixedBytes()) + } + + /** + * Reads a byte array from the current position in the buffer, prefixed with its + * length in bytes. Returns that byte array converted to a hexadecimal string. + */ + readHexString() { + return bytesToHexString(this.readPrefixedBytes()) + } +} + +/** + * An encoder that uses run-length encoding to compress sequences of repeated + * values. The constructor argument specifies the type of values, which may be + * either 'int', 'uint', or 'utf8'. Besides valid values of the selected + * datatype, values may also be null. + * + * The encoded buffer starts with a LEB128-encoded signed integer, the + * repetition count. The interpretation of the following values depends on this + * repetition count: + * - If this number is a positive value n, the next value in the buffer + * (encoded as the specified datatype) is repeated n times in the sequence. + * - If the repetition count is a negative value -n, then the next n values + * (encoded as the specified datatype) in the buffer are treated as a + * literal, i.e. they appear in the sequence without any further + * interpretation or repetition. + * - If the repetition count is zero, then the next value in the buffer is a + * LEB128-encoded unsigned integer indicating the number of null values + * that appear at the current position in the sequence. + * + * After one of these three has completed, the process repeats, starting again + * with a repetition count, until we reach the end of the buffer. + */ +class RLEEncoder extends Encoder { + constructor(type) { + super() + this.type = type + this.state = 'empty' + this.lastValue = undefined + this.count = 0 + this.literal = [] + } + + /** + * Appends a new value to the sequence. If `repetitions` is given, the value is repeated + * `repetitions` times. + */ + appendValue(value, repetitions = 1) { + this._appendValue(value, repetitions) + } + + /** + * Like `appendValue()`, but this method is not overridden by `DeltaEncoder`. + */ + _appendValue(value, repetitions = 1) { + if (repetitions <= 0) return + if (this.state === 'empty') { + this.state = (value === null ? 'nulls' : (repetitions === 1 ? 'loneValue' : 'repetition')) + this.lastValue = value + this.count = repetitions + } else if (this.state === 'loneValue') { + if (value === null) { + this.flush() + this.state = 'nulls' + this.count = repetitions + } else if (value === this.lastValue) { + this.state = 'repetition' + this.count = 1 + repetitions + } else if (repetitions > 1) { + this.flush() + this.state = 'repetition' + this.count = repetitions + this.lastValue = value + } else { + this.state = 'literal' + this.literal = [this.lastValue] + this.lastValue = value + } + } else if (this.state === 'repetition') { + if (value === null) { + this.flush() + this.state = 'nulls' + this.count = repetitions + } else if (value === this.lastValue) { + this.count += repetitions + } else if (repetitions > 1) { + this.flush() + this.state = 'repetition' + this.count = repetitions + this.lastValue = value + } else { + this.flush() + this.state = 'loneValue' + this.lastValue = value + } + } else if (this.state === 'literal') { + if (value === null) { + this.literal.push(this.lastValue) + this.flush() + this.state = 'nulls' + this.count = repetitions + } else if (value === this.lastValue) { + this.flush() + this.state = 'repetition' + this.count = 1 + repetitions + } else if (repetitions > 1) { + this.literal.push(this.lastValue) + this.flush() + this.state = 'repetition' + this.count = repetitions + this.lastValue = value + } else { + this.literal.push(this.lastValue) + this.lastValue = value + } + } else if (this.state === 'nulls') { + if (value === null) { + this.count += repetitions + } else if (repetitions > 1) { + this.flush() + this.state = 'repetition' + this.count = repetitions + this.lastValue = value + } else { + this.flush() + this.state = 'loneValue' + this.lastValue = value + } + } + } + + /** + * Copies values from the RLEDecoder `decoder` into this encoder. The `options` object may + * contain the following keys: + * - `count`: The number of values to copy. If not specified, copies all remaining values. + * - `sumValues`: If true, the function computes the sum of all numeric values as they are + * copied (null values are counted as zero), and returns that number. + * - `sumShift`: If set, values are shifted right by `sumShift` bits before adding to the sum. + * + * Returns an object of the form `{nonNullValues, sum}` where `nonNullValues` is the number of + * non-null values copied, and `sum` is the sum (only if the `sumValues` option is set). + */ + copyFrom(decoder, options = {}) { + const { count, sumValues, sumShift } = options + if (!(decoder instanceof RLEDecoder) || (decoder.type !== this.type)) { + throw new TypeError('incompatible type of decoder') + } + let remaining = (typeof count === 'number' ? count : Number.MAX_SAFE_INTEGER) + let nonNullValues = 0, sum = 0 + if (count && remaining > 0 && decoder.done) throw new RangeError(`cannot copy ${count} values`) + if (remaining === 0 || decoder.done) return sumValues ? {nonNullValues, sum} : {nonNullValues} + + // Copy a value so that we have a well-defined starting state. NB: when super.copyFrom() is + // called by the DeltaEncoder subclass, the following calls to readValue() and appendValue() + // refer to the overridden methods, while later readRecord(), readRawValue() and _appendValue() + // calls refer to the non-overridden RLEDecoder/RLEEncoder methods. + let firstValue = decoder.readValue() + if (firstValue === null) { + const numNulls = Math.min(decoder.count + 1, remaining) + remaining -= numNulls + decoder.count -= numNulls - 1 + this.appendValue(null, numNulls) + if (count && remaining > 0 && decoder.done) throw new RangeError(`cannot copy ${count} values`) + if (remaining === 0 || decoder.done) return sumValues ? {nonNullValues, sum} : {nonNullValues} + firstValue = decoder.readValue() + if (firstValue === null) throw new RangeError('null run must be followed by non-null value') + } + this.appendValue(firstValue) + remaining-- + nonNullValues++ + if (sumValues) sum += (sumShift ? (firstValue >>> sumShift) : firstValue) + if (count && remaining > 0 && decoder.done) throw new RangeError(`cannot copy ${count} values`) + if (remaining === 0 || decoder.done) return sumValues ? {nonNullValues, sum} : {nonNullValues} + + // Copy data at the record level without expanding repetitions + let firstRun = (decoder.count > 0) + while (remaining > 0 && !decoder.done) { + if (!firstRun) decoder.readRecord() + const numValues = Math.min(decoder.count, remaining) + decoder.count -= numValues + + if (decoder.state === 'literal') { + nonNullValues += numValues + for (let i = 0; i < numValues; i++) { + if (decoder.done) throw new RangeError('incomplete literal') + const value = decoder.readRawValue() + if (value === decoder.lastValue) throw new RangeError('Repetition of values is not allowed in literal') + decoder.lastValue = value + this._appendValue(value) + if (sumValues) sum += (sumShift ? (value >>> sumShift) : value) + } + } else if (decoder.state === 'repetition') { + nonNullValues += numValues + if (sumValues) sum += numValues * (sumShift ? (decoder.lastValue >>> sumShift) : decoder.lastValue) + const value = decoder.lastValue + this._appendValue(value) + if (numValues > 1) { + this._appendValue(value) + if (this.state !== 'repetition') throw new RangeError(`Unexpected state ${this.state}`) + this.count += numValues - 2 + } + } else if (decoder.state === 'nulls') { + this._appendValue(null) + if (this.state !== 'nulls') throw new RangeError(`Unexpected state ${this.state}`) + this.count += numValues - 1 + } + + firstRun = false + remaining -= numValues + } + if (count && remaining > 0 && decoder.done) throw new RangeError(`cannot copy ${count} values`) + return sumValues ? {nonNullValues, sum} : {nonNullValues} + } + + /** + * Private method, do not call from outside the class. + */ + flush() { + if (this.state === 'loneValue') { + this.appendInt32(-1) + this.appendRawValue(this.lastValue) + } else if (this.state === 'repetition') { + this.appendInt53(this.count) + this.appendRawValue(this.lastValue) + } else if (this.state === 'literal') { + this.appendInt53(-this.literal.length) + for (let v of this.literal) this.appendRawValue(v) + } else if (this.state === 'nulls') { + this.appendInt32(0) + this.appendUint53(this.count) + } + this.state = 'empty' + } + + /** + * Private method, do not call from outside the class. + */ + appendRawValue(value) { + if (this.type === 'int') { + this.appendInt53(value) + } else if (this.type === 'uint') { + this.appendUint53(value) + } else if (this.type === 'utf8') { + this.appendPrefixedString(value) + } else { + throw new RangeError(`Unknown RLEEncoder datatype: ${this.type}`) + } + } + + /** + * Flushes any unwritten data to the buffer. Call this before reading from + * the buffer constructed by this Encoder. + */ + finish() { + if (this.state === 'literal') this.literal.push(this.lastValue) + // Don't write anything if the only values we have seen are nulls + if (this.state !== 'nulls' || this.offset > 0) this.flush() + } +} + +/** + * Counterpart to RLEEncoder: reads values from an RLE-compressed sequence, + * returning nulls and repeated values as required. + */ +class RLEDecoder extends Decoder { + constructor(type, buffer) { + super(buffer) + this.type = type + this.lastValue = undefined + this.count = 0 + this.state = undefined + } + + /** + * Returns false if there is still data to be read at the current decoding + * position, and true if we are at the end of the buffer. + */ + get done() { + return (this.count === 0) && (this.offset === this.buf.byteLength) + } + + /** + * Resets the cursor position, so that the next read goes back to the + * beginning of the buffer. + */ + reset() { + this.offset = 0 + this.lastValue = undefined + this.count = 0 + this.state = undefined + } + + /** + * Returns the next value (or null) in the sequence. + */ + readValue() { + if (this.done) return null + if (this.count === 0) this.readRecord() + this.count -= 1 + if (this.state === 'literal') { + const value = this.readRawValue() + if (value === this.lastValue) throw new RangeError('Repetition of values is not allowed in literal') + this.lastValue = value + return value + } else { + return this.lastValue + } + } + + /** + * Discards the next `numSkip` values in the sequence. + */ + skipValues(numSkip) { + while (numSkip > 0 && !this.done) { + if (this.count === 0) { + this.count = this.readInt53() + if (this.count > 0) { + this.lastValue = (this.count <= numSkip) ? this.skipRawValues(1) : this.readRawValue() + this.state = 'repetition' + } else if (this.count < 0) { + this.count = -this.count + this.state = 'literal' + } else { // this.count == 0 + this.count = this.readUint53() + this.lastValue = null + this.state = 'nulls' + } + } + + const consume = Math.min(numSkip, this.count) + if (this.state === 'literal') this.skipRawValues(consume) + numSkip -= consume + this.count -= consume + } + } + + /** + * Private method, do not call from outside the class. + * Reads a repetition count from the buffer and sets up the state appropriately. + */ + readRecord() { + this.count = this.readInt53() + if (this.count > 1) { + const value = this.readRawValue() + if ((this.state === 'repetition' || this.state === 'literal') && this.lastValue === value) { + throw new RangeError('Successive repetitions with the same value are not allowed') + } + this.state = 'repetition' + this.lastValue = value + } else if (this.count === 1) { + throw new RangeError('Repetition count of 1 is not allowed, use a literal instead') + } else if (this.count < 0) { + this.count = -this.count + if (this.state === 'literal') throw new RangeError('Successive literals are not allowed') + this.state = 'literal' + } else { // this.count == 0 + if (this.state === 'nulls') throw new RangeError('Successive null runs are not allowed') + this.count = this.readUint53() + if (this.count === 0) throw new RangeError('Zero-length null runs are not allowed') + this.lastValue = null + this.state = 'nulls' + } + } + + /** + * Private method, do not call from outside the class. + * Reads one value of the datatype configured on construction. + */ + readRawValue() { + if (this.type === 'int') { + return this.readInt53() + } else if (this.type === 'uint') { + return this.readUint53() + } else if (this.type === 'utf8') { + return this.readPrefixedString() + } else { + throw new RangeError(`Unknown RLEDecoder datatype: ${this.type}`) + } + } + + /** + * Private method, do not call from outside the class. + * Skips over `num` values of the datatype configured on construction. + */ + skipRawValues(num) { + if (this.type === 'utf8') { + for (let i = 0; i < num; i++) this.skip(this.readUint53()) + } else { + while (num > 0 && this.offset < this.buf.byteLength) { + if ((this.buf[this.offset] & 0x80) === 0) num-- + this.offset++ + } + if (num > 0) throw new RangeError('cannot skip beyond end of buffer') + } + } +} + +/** + * A variant of RLEEncoder: rather than storing the actual values passed to + * appendValue(), this version stores only the first value, and for all + * subsequent values it stores the difference to the previous value. This + * encoding is good when values tend to come in sequentially incrementing runs, + * because the delta between successive values is 1, and repeated values of 1 + * are easily compressed with run-length encoding. + * + * Null values are also allowed, as with RLEEncoder. + */ +class DeltaEncoder extends RLEEncoder { + constructor() { + super('int') + this.absoluteValue = 0 + } + + /** + * Appends a new integer value to the sequence. If `repetitions` is given, the value is repeated + * `repetitions` times. + */ + appendValue(value, repetitions = 1) { + if (repetitions <= 0) return + if (typeof value === 'number') { + super.appendValue(value - this.absoluteValue, 1) + this.absoluteValue = value + if (repetitions > 1) super.appendValue(0, repetitions - 1) + } else { + super.appendValue(value, repetitions) + } + } + + /** + * Copies values from the DeltaDecoder `decoder` into this encoder. The `options` object may + * contain the key `count`, indicating the number of values to copy. If not specified, copies + * all remaining values in the decoder. + */ + copyFrom(decoder, options = {}) { + if (options.sumValues) { + throw new RangeError('unsupported options for DeltaEncoder.copyFrom()') + } + if (!(decoder instanceof DeltaDecoder)) { + throw new TypeError('incompatible type of decoder') + } + + let remaining = options.count + if (remaining > 0 && decoder.done) throw new RangeError(`cannot copy ${remaining} values`) + if (remaining === 0 || decoder.done) return + + // Copy any null values, and the first non-null value, so that appendValue() computes the + // difference between the encoder's last value and the decoder's first (absolute) value. + let value = decoder.readValue(), nulls = 0 + this.appendValue(value) + if (value === null) { + nulls = decoder.count + 1 + if (remaining !== undefined && remaining < nulls) nulls = remaining + decoder.count -= nulls - 1 + this.count += nulls - 1 + if (remaining > nulls && decoder.done) throw new RangeError(`cannot copy ${remaining} values`) + if (remaining === nulls || decoder.done) return + + // The next value read is certain to be non-null because we're not at the end of the decoder, + // and a run of nulls must be followed by a run of non-nulls. + if (decoder.count === 0) this.appendValue(decoder.readValue()) + } + + // Once we have the first value, the subsequent relative values can be copied verbatim without + // any further processing. Note that the first value copied by super.copyFrom() is an absolute + // value, while subsequent values are relative. Thus, the sum of all of the (non-null) copied + // values must equal the absolute value of the final element copied. + if (remaining !== undefined) remaining -= nulls + 1 + const { nonNullValues, sum } = super.copyFrom(decoder, {count: remaining, sumValues: true}) + if (nonNullValues > 0) { + this.absoluteValue = sum + decoder.absoluteValue = sum + } + } +} + +/** + * Counterpart to DeltaEncoder: reads values from a delta-compressed sequence of + * numbers (may include null values). + */ +class DeltaDecoder extends RLEDecoder { + constructor(buffer) { + super('int', buffer) + this.absoluteValue = 0 + } + + /** + * Resets the cursor position, so that the next read goes back to the + * beginning of the buffer. + */ + reset() { + this.offset = 0 + this.lastValue = undefined + this.count = 0 + this.state = undefined + this.absoluteValue = 0 + } + + /** + * Returns the next integer (or null) value in the sequence. + */ + readValue() { + const value = super.readValue() + if (value === null) return null + this.absoluteValue += value + return this.absoluteValue + } + + /** + * Discards the next `numSkip` values in the sequence. + */ + skipValues(numSkip) { + while (numSkip > 0 && !this.done) { + if (this.count === 0) this.readRecord() + const consume = Math.min(numSkip, this.count) + if (this.state === 'literal') { + for (let i = 0; i < consume; i++) { + this.lastValue = this.readRawValue() + this.absoluteValue += this.lastValue + } + } else if (this.state === 'repetition') { + this.absoluteValue += consume * this.lastValue + } + numSkip -= consume + this.count -= consume + } + } +} + +/** + * Encodes a sequence of boolean values by mapping it to a sequence of integers: + * the number of false values, followed by the number of true values, followed + * by the number of false values, and so on. Each number is encoded as a LEB128 + * unsigned integer. This encoding is a bit like RLEEncoder, except that we + * only encode the repetition count but not the actual value, since the values + * just alternate between false and true (starting with false). + */ +class BooleanEncoder extends Encoder { + constructor() { + super() + this.lastValue = false + this.count = 0 + } + + /** + * Appends a new value to the sequence. If `repetitions` is given, the value is repeated + * `repetitions` times. + */ + appendValue(value, repetitions = 1) { + if (value !== false && value !== true) { + throw new RangeError(`Unsupported value for BooleanEncoder: ${value}`) + } + if (repetitions <= 0) return + if (this.lastValue === value) { + this.count += repetitions + } else { + this.appendUint53(this.count) + this.lastValue = value + this.count = repetitions + } + } + + /** + * Copies values from the BooleanDecoder `decoder` into this encoder. The `options` object may + * contain the key `count`, indicating the number of values to copy. If not specified, copies + * all remaining values in the decoder. + */ + copyFrom(decoder, options = {}) { + if (!(decoder instanceof BooleanDecoder)) { + throw new TypeError('incompatible type of decoder') + } + + const { count } = options + let remaining = (typeof count === 'number' ? count : Number.MAX_SAFE_INTEGER) + if (count && remaining > 0 && decoder.done) throw new RangeError(`cannot copy ${count} values`) + if (remaining === 0 || decoder.done) return + + // Copy one value to bring decoder and encoder state into sync, then finish that value's repetitions + this.appendValue(decoder.readValue()) + remaining-- + const firstCopy = Math.min(decoder.count, remaining) + this.count += firstCopy + decoder.count -= firstCopy + remaining -= firstCopy + + while (remaining > 0 && !decoder.done) { + decoder.count = decoder.readUint53() + if (decoder.count === 0) throw new RangeError('Zero-length runs are not allowed') + decoder.lastValue = !decoder.lastValue + this.appendUint53(this.count) + + const numCopied = Math.min(decoder.count, remaining) + this.count = numCopied + this.lastValue = decoder.lastValue + decoder.count -= numCopied + remaining -= numCopied + } + + if (count && remaining > 0 && decoder.done) throw new RangeError(`cannot copy ${count} values`) + } + + /** + * Flushes any unwritten data to the buffer. Call this before reading from + * the buffer constructed by this Encoder. + */ + finish() { + if (this.count > 0) { + this.appendUint53(this.count) + this.count = 0 + } + } +} + +/** + * Counterpart to BooleanEncoder: reads boolean values from a runlength-encoded + * sequence. + */ +class BooleanDecoder extends Decoder { + constructor(buffer) { + super(buffer) + this.lastValue = true // is negated the first time we read a count + this.firstRun = true + this.count = 0 + } + + /** + * Returns false if there is still data to be read at the current decoding + * position, and true if we are at the end of the buffer. + */ + get done() { + return (this.count === 0) && (this.offset === this.buf.byteLength) + } + + /** + * Resets the cursor position, so that the next read goes back to the + * beginning of the buffer. + */ + reset() { + this.offset = 0 + this.lastValue = true + this.firstRun = true + this.count = 0 + } + + /** + * Returns the next value in the sequence. + */ + readValue() { + if (this.done) return false + while (this.count === 0) { + this.count = this.readUint53() + this.lastValue = !this.lastValue + if (this.count === 0 && !this.firstRun) { + throw new RangeError('Zero-length runs are not allowed') + } + this.firstRun = false + } + this.count -= 1 + return this.lastValue + } + + /** + * Discards the next `numSkip` values in the sequence. + */ + skipValues(numSkip) { + while (numSkip > 0 && !this.done) { + if (this.count === 0) { + this.count = this.readUint53() + this.lastValue = !this.lastValue + if (this.count === 0) throw new RangeError('Zero-length runs are not allowed') + } + if (this.count < numSkip) { + numSkip -= this.count + this.count = 0 + } else { + this.count -= numSkip + numSkip = 0 + } + } + } +} + +module.exports = { + stringToUtf8, utf8ToString, hexStringToBytes, bytesToHexString, + Encoder, Decoder, RLEEncoder, RLEDecoder, DeltaEncoder, DeltaDecoder, BooleanEncoder, BooleanDecoder +} diff --git a/automerge-wasm/test/helpers/sync.js b/automerge-wasm/test/helpers/sync.js new file mode 100644 index 00000000..ce265ef4 --- /dev/null +++ b/automerge-wasm/test/helpers/sync.js @@ -0,0 +1,481 @@ +/** + * Implementation of the data synchronisation protocol that brings a local and a remote document + * into the same state. This is typically used when two nodes have been disconnected for some time, + * and need to exchange any changes that happened while they were disconnected. The two nodes that + * are syncing could be client and server, or server and client, or two peers with symmetric roles. + * + * The protocol is based on this paper: Martin Kleppmann and Heidi Howard. Byzantine Eventual + * Consistency and the Fundamental Limits of Peer-to-Peer Databases. https://arxiv.org/abs/2012.00472 + * + * The protocol assumes that every time a node successfully syncs with another node, it remembers + * the current heads (as returned by `Backend.getHeads()`) after the last sync with that node. The + * next time we try to sync with the same node, we start from the assumption that the other node's + * document version is no older than the outcome of the last sync, so we only need to exchange any + * changes that are more recent than the last sync. This assumption may not be true if the other + * node did not correctly persist its state (perhaps it crashed before writing the result of the + * last sync to disk), and we fall back to sending the entire document in this case. + */ + +//const Backend = require('./backend') +const Backend = {} //require('./backend') +const { hexStringToBytes, bytesToHexString, Encoder, Decoder } = require('./encoding') +const { decodeChangeMeta } = require('./columnar') +const { copyObject } = require('./common') + +const HASH_SIZE = 32 // 256 bits = 32 bytes +const MESSAGE_TYPE_SYNC = 0x42 // first byte of a sync message, for identification +const PEER_STATE_TYPE = 0x43 // first byte of an encoded peer state, for identification + +// These constants correspond to a 1% false positive rate. The values can be changed without +// breaking compatibility of the network protocol, since the parameters used for a particular +// Bloom filter are encoded in the wire format. +const BITS_PER_ENTRY = 10, NUM_PROBES = 7 + +/** + * A Bloom filter implementation that can be serialised to a byte array for transmission + * over a network. The entries that are added are assumed to already be SHA-256 hashes, + * so this implementation does not perform its own hashing. + */ +class BloomFilter { + constructor (arg) { + if (Array.isArray(arg)) { + // arg is an array of SHA256 hashes in hexadecimal encoding + this.numEntries = arg.length + this.numBitsPerEntry = BITS_PER_ENTRY + this.numProbes = NUM_PROBES + this.bits = new Uint8Array(Math.ceil(this.numEntries * this.numBitsPerEntry / 8)) + for (let hash of arg) this.addHash(hash) + } else if (arg instanceof Uint8Array) { + if (arg.byteLength === 0) { + this.numEntries = 0 + this.numBitsPerEntry = 0 + this.numProbes = 0 + this.bits = arg + } else { + const decoder = new Decoder(arg) + this.numEntries = decoder.readUint32() + this.numBitsPerEntry = decoder.readUint32() + this.numProbes = decoder.readUint32() + this.bits = decoder.readRawBytes(Math.ceil(this.numEntries * this.numBitsPerEntry / 8)) + } + } else { + throw new TypeError('invalid argument') + } + } + + /** + * Returns the Bloom filter state, encoded as a byte array. + */ + get bytes() { + if (this.numEntries === 0) return new Uint8Array(0) + const encoder = new Encoder() + encoder.appendUint32(this.numEntries) + encoder.appendUint32(this.numBitsPerEntry) + encoder.appendUint32(this.numProbes) + encoder.appendRawBytes(this.bits) + return encoder.buffer + } + + /** + * Given a SHA-256 hash (as hex string), returns an array of probe indexes indicating which bits + * in the Bloom filter need to be tested or set for this particular entry. We do this by + * interpreting the first 12 bytes of the hash as three little-endian 32-bit unsigned integers, + * and then using triple hashing to compute the probe indexes. The algorithm comes from: + * + * Peter C. Dillinger and Panagiotis Manolios. Bloom Filters in Probabilistic Verification. + * 5th International Conference on Formal Methods in Computer-Aided Design (FMCAD), November 2004. + * http://www.ccis.northeastern.edu/home/pete/pub/bloom-filters-verification.pdf + */ + getProbes(hash) { + const hashBytes = hexStringToBytes(hash), modulo = 8 * this.bits.byteLength + if (hashBytes.byteLength !== 32) throw new RangeError(`Not a 256-bit hash: ${hash}`) + // on the next three lines, the right shift means interpret value as unsigned + let x = ((hashBytes[0] | hashBytes[1] << 8 | hashBytes[2] << 16 | hashBytes[3] << 24) >>> 0) % modulo + let y = ((hashBytes[4] | hashBytes[5] << 8 | hashBytes[6] << 16 | hashBytes[7] << 24) >>> 0) % modulo + let z = ((hashBytes[8] | hashBytes[9] << 8 | hashBytes[10] << 16 | hashBytes[11] << 24) >>> 0) % modulo + const probes = [x] + for (let i = 1; i < this.numProbes; i++) { + x = (x + y) % modulo + y = (y + z) % modulo + probes.push(x) + } + return probes + } + + /** + * Sets the Bloom filter bits corresponding to a given SHA-256 hash (given as hex string). + */ + addHash(hash) { + for (let probe of this.getProbes(hash)) { + this.bits[probe >>> 3] |= 1 << (probe & 7) + } + } + + /** + * Tests whether a given SHA-256 hash (given as hex string) is contained in the Bloom filter. + */ + containsHash(hash) { + if (this.numEntries === 0) return false + for (let probe of this.getProbes(hash)) { + if ((this.bits[probe >>> 3] & (1 << (probe & 7))) === 0) { + return false + } + } + return true + } +} + +/** + * Encodes a sorted array of SHA-256 hashes (as hexadecimal strings) into a byte array. + */ +function encodeHashes(encoder, hashes) { + if (!Array.isArray(hashes)) throw new TypeError('hashes must be an array') + encoder.appendUint32(hashes.length) + for (let i = 0; i < hashes.length; i++) { + if (i > 0 && hashes[i - 1] >= hashes[i]) throw new RangeError('hashes must be sorted') + const bytes = hexStringToBytes(hashes[i]) + if (bytes.byteLength !== HASH_SIZE) throw new TypeError('heads hashes must be 256 bits') + encoder.appendRawBytes(bytes) + } +} + +/** + * Decodes a byte array in the format returned by encodeHashes(), and returns its content as an + * array of hex strings. + */ +function decodeHashes(decoder) { + let length = decoder.readUint32(), hashes = [] + for (let i = 0; i < length; i++) { + hashes.push(bytesToHexString(decoder.readRawBytes(HASH_SIZE))) + } + return hashes +} + +/** + * Takes a sync message of the form `{heads, need, have, changes}` and encodes it as a byte array for + * transmission. + */ +function encodeSyncMessage(message) { + const encoder = new Encoder() + encoder.appendByte(MESSAGE_TYPE_SYNC) + encodeHashes(encoder, message.heads) + encodeHashes(encoder, message.need) + encoder.appendUint32(message.have.length) + for (let have of message.have) { + encodeHashes(encoder, have.lastSync) + encoder.appendPrefixedBytes(have.bloom) + } + encoder.appendUint32(message.changes.length) + for (let change of message.changes) { + encoder.appendPrefixedBytes(change) + } + return encoder.buffer +} + +/** + * Takes a binary-encoded sync message and decodes it into the form `{heads, need, have, changes}`. + */ +function decodeSyncMessage(bytes) { + const decoder = new Decoder(bytes) + const messageType = decoder.readByte() + if (messageType !== MESSAGE_TYPE_SYNC) { + throw new RangeError(`Unexpected message type: ${messageType}`) + } + const heads = decodeHashes(decoder) + const need = decodeHashes(decoder) + const haveCount = decoder.readUint32() + let message = {heads, need, have: [], changes: []} + for (let i = 0; i < haveCount; i++) { + const lastSync = decodeHashes(decoder) + const bloom = decoder.readPrefixedBytes(decoder) + message.have.push({lastSync, bloom}) + } + const changeCount = decoder.readUint32() + for (let i = 0; i < changeCount; i++) { + const change = decoder.readPrefixedBytes() + message.changes.push(change) + } + // Ignore any trailing bytes -- they can be used for extensions by future versions of the protocol + return message +} + +/** + * Takes a SyncState and encodes as a byte array those parts of the state that should persist across + * an application restart or disconnect and reconnect. The ephemeral parts of the state that should + * be cleared on reconnect are not encoded. + */ +function encodeSyncState(syncState) { + const encoder = new Encoder() + encoder.appendByte(PEER_STATE_TYPE) + encodeHashes(encoder, syncState.sharedHeads) + return encoder.buffer +} + +/** + * Takes a persisted peer state as encoded by `encodeSyncState` and decodes it into a SyncState + * object. The parts of the peer state that were not encoded are initialised with default values. + */ +function decodeSyncState(bytes) { + const decoder = new Decoder(bytes) + const recordType = decoder.readByte() + if (recordType !== PEER_STATE_TYPE) { + throw new RangeError(`Unexpected record type: ${recordType}`) + } + const sharedHeads = decodeHashes(decoder) + return Object.assign(initSyncState(), { sharedHeads }) +} + +/** + * Constructs a Bloom filter containing all changes that are not one of the hashes in + * `lastSync` or its transitive dependencies. In other words, the filter contains those + * changes that have been applied since the version identified by `lastSync`. Returns + * an object of the form `{lastSync, bloom}` as required for the `have` field of a sync + * message. + */ +function makeBloomFilter(backend, lastSync) { + const newChanges = Backend.getChanges(backend, lastSync) + const hashes = newChanges.map(change => decodeChangeMeta(change, true).hash) + return {lastSync, bloom: new BloomFilter(hashes).bytes} +} + +/** + * Call this function when a sync message is received from another node. The `message` argument + * needs to already have been decoded using `decodeSyncMessage()`. This function determines the + * changes that we need to send to the other node in response. Returns an array of changes (as + * byte arrays). + */ +function getChangesToSend(backend, have, need) { + if (have.length === 0) { + return need.map(hash => Backend.getChangeByHash(backend, hash)).filter(change => change !== undefined) + } + + let lastSyncHashes = {}, bloomFilters = [] + for (let h of have) { + for (let hash of h.lastSync) lastSyncHashes[hash] = true + bloomFilters.push(new BloomFilter(h.bloom)) + } + + // Get all changes that were added since the last sync + const changes = Backend.getChanges(backend, Object.keys(lastSyncHashes)) + .map(change => decodeChangeMeta(change, true)) + + let changeHashes = {}, dependents = {}, hashesToSend = {} + for (let change of changes) { + changeHashes[change.hash] = true + + // For each change, make a list of changes that depend on it + for (let dep of change.deps) { + if (!dependents[dep]) dependents[dep] = [] + dependents[dep].push(change.hash) + } + + // Exclude any change hashes contained in one or more Bloom filters + if (bloomFilters.every(bloom => !bloom.containsHash(change.hash))) { + hashesToSend[change.hash] = true + } + } + + // Include any changes that depend on a Bloom-negative change + let stack = Object.keys(hashesToSend) + while (stack.length > 0) { + const hash = stack.pop() + if (dependents[hash]) { + for (let dep of dependents[hash]) { + if (!hashesToSend[dep]) { + hashesToSend[dep] = true + stack.push(dep) + } + } + } + } + + // Include any explicitly requested changes + let changesToSend = [] + for (let hash of need) { + hashesToSend[hash] = true + if (!changeHashes[hash]) { // Change is not among those returned by getMissingChanges()? + const change = Backend.getChangeByHash(backend, hash) + if (change) changesToSend.push(change) + } + } + + // Return changes in the order they were returned by getMissingChanges() + for (let change of changes) { + if (hashesToSend[change.hash]) changesToSend.push(change.change) + } + return changesToSend +} + +function initSyncState() { + return { + sharedHeads: [], + lastSentHeads: [], + theirHeads: null, + theirNeed: null, + theirHave: null, + sentHashes: {}, + } +} + +function compareArrays(a, b) { + return (a.length === b.length) && a.every((v, i) => v === b[i]) +} + +/** + * Given a backend and what we believe to be the state of our peer, generate a message which tells + * them about we have and includes any changes we believe they need + */ +function generateSyncMessage(backend, syncState) { + if (!backend) { + throw new Error("generateSyncMessage called with no Automerge document") + } + if (!syncState) { + throw new Error("generateSyncMessage requires a syncState, which can be created with initSyncState()") + } + + let { sharedHeads, lastSentHeads, theirHeads, theirNeed, theirHave, sentHashes } = syncState + const ourHeads = Backend.getHeads(backend) + + // Hashes to explicitly request from the remote peer: any missing dependencies of unapplied + // changes, and any of the remote peer's heads that we don't know about + const ourNeed = Backend.getMissingDeps(backend, theirHeads || []) + + // There are two reasons why ourNeed may be nonempty: 1. we might be missing dependencies due to + // Bloom filter false positives; 2. we might be missing heads that the other peer mentioned + // because they (intentionally) only sent us a subset of changes. In case 1, we leave the `have` + // field of the message empty because we just want to fill in the missing dependencies for now. + // In case 2, or if ourNeed is empty, we send a Bloom filter to request any unsent changes. + let ourHave = [] + if (!theirHeads || ourNeed.every(hash => theirHeads.includes(hash))) { + ourHave = [makeBloomFilter(backend, sharedHeads)] + } + + // Fall back to a full re-sync if the sender's last sync state includes hashes + // that we don't know. This could happen if we crashed after the last sync and + // failed to persist changes that the other node already sent us. + if (theirHave && theirHave.length > 0) { + const lastSync = theirHave[0].lastSync + if (!lastSync.every(hash => Backend.getChangeByHash(backend, hash))) { + // we need to queue them to send us a fresh sync message, the one they sent is uninteligible so we don't know what they need + const resetMsg = {heads: ourHeads, need: [], have: [{ lastSync: [], bloom: new Uint8Array(0) }], changes: []} + return [syncState, encodeSyncMessage(resetMsg)] + } + } + + // XXX: we should limit ourselves to only sending a subset of all the messages, probably limited by a total message size + // these changes should ideally be RLE encoded but we haven't implemented that yet. + let changesToSend = Array.isArray(theirHave) && Array.isArray(theirNeed) ? getChangesToSend(backend, theirHave, theirNeed) : [] + + // If the heads are equal, we're in sync and don't need to do anything further + const headsUnchanged = Array.isArray(lastSentHeads) && compareArrays(ourHeads, lastSentHeads) + const headsEqual = Array.isArray(theirHeads) && compareArrays(ourHeads, theirHeads) + if (headsUnchanged && headsEqual && changesToSend.length === 0) { + // no need to send a sync message if we know we're synced! + return [syncState, null] + } + + // TODO: this recomputes the SHA-256 hash of each change; we should restructure this to avoid the + // unnecessary recomputation + changesToSend = changesToSend.filter(change => !sentHashes[decodeChangeMeta(change, true).hash]) + + // Regular response to a sync message: send any changes that the other node + // doesn't have. We leave the "have" field empty because the previous message + // generated by `syncStart` already indicated what changes we have. + const syncMessage = {heads: ourHeads, have: ourHave, need: ourNeed, changes: changesToSend} + if (changesToSend.length > 0) { + sentHashes = copyObject(sentHashes) + for (const change of changesToSend) { + sentHashes[decodeChangeMeta(change, true).hash] = true + } + } + + syncState = Object.assign({}, syncState, {lastSentHeads: ourHeads, sentHashes}) + return [syncState, encodeSyncMessage(syncMessage)] +} + +/** + * Computes the heads that we share with a peer after we have just received some changes from that + * peer and applied them. This may not be sufficient to bring our heads in sync with the other + * peer's heads, since they may have only sent us a subset of their outstanding changes. + * + * `myOldHeads` are the local heads before the most recent changes were applied, `myNewHeads` are + * the local heads after those changes were applied, and `ourOldSharedHeads` is the previous set of + * shared heads. Applying the changes will have replaced some heads with others, but some heads may + * have remained unchanged (because they are for branches on which no changes have been added). Any + * such unchanged heads remain in the sharedHeads. Any sharedHeads that were replaced by applying + * changes are also replaced as sharedHeads. This is safe because if we received some changes from + * another peer, that means that peer had those changes, and therefore we now both know about them. + */ +function advanceHeads(myOldHeads, myNewHeads, ourOldSharedHeads) { + const newHeads = myNewHeads.filter((head) => !myOldHeads.includes(head)) + const commonHeads = ourOldSharedHeads.filter((head) => myNewHeads.includes(head)) + const advancedHeads = [...new Set([...newHeads, ...commonHeads])].sort() + return advancedHeads +} + + +/** + * Given a backend, a message message and the state of our peer, apply any changes, update what + * we believe about the peer, and (if there were applied changes) produce a patch for the frontend + */ +function receiveSyncMessage(backend, oldSyncState, binaryMessage) { + if (!backend) { + throw new Error("generateSyncMessage called with no Automerge document") + } + if (!oldSyncState) { + throw new Error("generateSyncMessage requires a syncState, which can be created with initSyncState()") + } + + let { sharedHeads, lastSentHeads, sentHashes } = oldSyncState, patch = null + const message = decodeSyncMessage(binaryMessage) + const beforeHeads = Backend.getHeads(backend) + + // If we received changes, we try to apply them to the document. There may still be missing + // dependencies due to Bloom filter false positives, in which case the backend will enqueue the + // changes without applying them. The set of changes may also be incomplete if the sender decided + // to break a large set of changes into chunks. + if (message.changes.length > 0) { + [backend, patch] = Backend.applyChanges(backend, message.changes) + sharedHeads = advanceHeads(beforeHeads, Backend.getHeads(backend), sharedHeads) + } + + // If heads are equal, indicate we don't need to send a response message + if (message.changes.length === 0 && compareArrays(message.heads, beforeHeads)) { + lastSentHeads = message.heads + } + + // If all of the remote heads are known to us, that means either our heads are equal, or we are + // ahead of the remote peer. In this case, take the remote heads to be our shared heads. + const knownHeads = message.heads.filter(head => Backend.getChangeByHash(backend, head)) + if (knownHeads.length === message.heads.length) { + sharedHeads = message.heads + // If the remote peer has lost all its data, reset our state to perform a full resync + if (message.heads.length === 0) { + lastSentHeads = [] + sentHashes = [] + } + } else { + // If some remote heads are unknown to us, we add all the remote heads we know to + // sharedHeads, but don't remove anything from sharedHeads. This might cause sharedHeads to + // contain some redundant hashes (where one hash is actually a transitive dependency of + // another), but this will be cleared up as soon as we know all the remote heads. + sharedHeads = [...new Set(knownHeads.concat(sharedHeads))].sort() + } + + const syncState = { + sharedHeads, // what we have in common to generate an efficient bloom filter + lastSentHeads, + theirHave: message.have, // the information we need to calculate the changes they need + theirHeads: message.heads, + theirNeed: message.need, + sentHashes + } + return [backend, syncState, patch] +} + +module.exports = { + receiveSyncMessage, generateSyncMessage, + encodeSyncMessage, decodeSyncMessage, + initSyncState, encodeSyncState, decodeSyncState, + BloomFilter // BloomFilter is a private API, exported only for testing purposes +} diff --git a/automerge-wasm/test/test.js b/automerge-wasm/test/test.js index 325b35ae..c9020ca7 100644 --- a/automerge-wasm/test/test.js +++ b/automerge-wasm/test/test.js @@ -1,8 +1,9 @@ const assert = require('assert') const util = require('util') +const { BloomFilter } = require('./helpers/sync') const Automerge = require('..') -const { MAP, LIST, TEXT } = Automerge +const { MAP, LIST, TEXT, initSyncState, decodeSyncMessage, decodeSyncState, encodeSyncState }= Automerge // str to uint8array function en(str) { @@ -13,6 +14,26 @@ function de(bytes) { return new TextDecoder('utf8').decode(bytes); } +function sync(a, b, aSyncState = initSyncState(), bSyncState = initSyncState()) { + const MAX_ITER = 10 + let aToBmsg = null, bToAmsg = null, i = 0 + do { + aToBmsg = a.generateSyncMessage(aSyncState) + bToAmsg = b.generateSyncMessage(bSyncState) + + if (aToBmsg) { + b.receiveSyncMessage(bSyncState, aToBmsg) + } + if (bToAmsg) { + a.receiveSyncMessage(aSyncState, bToAmsg) + } + + if (i++ > MAX_ITER) { + throw new Error(`Did not synchronize within ${MAX_ITER} iterations`) + } + } while (aToBmsg || bToAmsg) +} + describe('Automerge', () => { describe('basics', () => { it('should init clone and free', () => { @@ -48,6 +69,8 @@ describe('Automerge', () => { doc.set(root, "number4", 5.5, "f64") doc.set(root, "number5", 5.5, "int") doc.set(root, "bool", true) + doc.set(root, "time1", 1000, "timestamp") + doc.set(root, "time2", new Date(1001)) result = doc.value(root,"hello") assert.deepEqual(result,["str","world"]) @@ -74,6 +97,13 @@ describe('Automerge', () => { result = doc.value(root,"bool") assert.deepEqual(result,["boolean",false]) + + result = doc.value(root,"time1") + assert.deepEqual(result,["timestamp",new Date(1000)]) + + result = doc.value(root,"time2") + assert.deepEqual(result,["timestamp",new Date(1001)]) + doc.free() }) @@ -128,6 +158,27 @@ describe('Automerge', () => { doc.free() }) + it('lists have insert, set, splice, and push ops', () => { + let doc = Automerge.init() + let root = "_root" + + let submap = doc.set(root, "letters", LIST) + doc.insert(submap, 0, "a"); + doc.insert(submap, 0, "b"); + assert.deepEqual(doc.toJS(), { letters: ["b", "a" ] }) + doc.push(submap, "c"); + assert.deepEqual(doc.toJS(), { letters: ["b", "a", "c" ] }) + doc.push(submap, 3, "timestamp"); + assert.deepEqual(doc.toJS(), { letters: ["b", "a", "c", new Date(3) ] }) + doc.splice(submap, 1, 1, ["d","e","f"]); + assert.deepEqual(doc.toJS(), { letters: ["b", "d", "e", "f", "c", new Date(3) ] }) + doc.set(submap, 0, "z"); + assert.deepEqual(doc.toJS(), { letters: ["z", "d", "e", "f", "c", new Date(3) ] }) + assert.deepEqual(doc.length(submap),6) + + doc.free() + }) + it('should be able delete non-existant props', () => { let doc = Automerge.init() @@ -344,6 +395,947 @@ describe('Automerge', () => { doc1.free() doc2.free() }) + }) + describe('sync', () => { + it('should send a sync message implying no local data', () => { + let doc = Automerge.init() + let s1 = initSyncState() + let m1 = doc.generateSyncMessage(s1) + const message = decodeSyncMessage(m1) + assert.deepStrictEqual(message.heads, []) + assert.deepStrictEqual(message.need, []) + assert.deepStrictEqual(message.have.length, 1) + assert.deepStrictEqual(message.have[0].lastSync, []) + assert.deepStrictEqual(message.have[0].bloom.byteLength, 0) + assert.deepStrictEqual(message.changes, []) + }) + it('should not reply if we have no data as well', () => { + let n1 = Automerge.init(), n2 = Automerge.init() + let s1 = initSyncState(), s2 = initSyncState() + let m1 = n1.generateSyncMessage(s1) + n2.receiveSyncMessage(s2, m1) + let m2 = n2.generateSyncMessage(s2) + assert.deepStrictEqual(m2, null) + }) + + it('repos with equal heads do not need a reply message', () => { + let n1 = Automerge.init(), n2 = Automerge.init() + let s1 = initSyncState(), s2 = initSyncState() + + // make two nodes with the same changes + let list = n1.set("_root","n", LIST) + n1.commit("",0) + for (let i = 0; i < 10; i++) { + n1.insert(list,i,i) + n1.commit("",0) + } + n2.applyChanges(n1.getChanges([])) + assert.deepStrictEqual(n1.toJS(), n2.toJS()) + + // generate a naive sync message + let m1 = n1.generateSyncMessage(s1) + assert.deepStrictEqual(s1.lastSentHeads, n1.getHeads()) + + // heads are equal so this message should be null + n2.receiveSyncMessage(s2, m1) + let m2 = n2.generateSyncMessage(s2) + assert.strictEqual(m2, null) + }) + + it('n1 should offer all changes to n2 when starting from nothing', () => { + let n1 = Automerge.init(), n2 = Automerge.init() + + // make changes for n1 that n2 should request + let list = n1.set("_root","n",LIST) + n1.commit("",0) + for (let i = 0; i < 10; i++) { + n1.insert(list, i, i) + n1.commit("",0) + } + + assert.notDeepStrictEqual(n1.toJS(), n2.toJS()) + sync(n1, n2) + assert.deepStrictEqual(n1.toJS(), n2.toJS()) + }) + + it('should sync peers where one has commits the other does not', () => { + let n1 = Automerge.init(), n2 = Automerge.init() + + // make changes for n1 that n2 should request + let list = n1.set("_root","n",LIST) + n1.commit("",0) + for (let i = 0; i < 10; i++) { + n1.insert(list,i,i) + n1.commit("",0) + } + + assert.notDeepStrictEqual(n1.toJS(), n2.toJS()) + sync(n1, n2) + assert.deepStrictEqual(n1.toJS(), n2.toJS()) + }) + + it('should work with prior sync state', () => { + // create & synchronize two nodes + let n1 = Automerge.init(), n2 = Automerge.init() + let s1 = initSyncState(), s2 = initSyncState() + + for (let i = 0; i < 5; i++) { + n1.set("_root","x",i) + n1.commit("",0) + } + + sync(n1, n2, s1, s2) + + // modify the first node further + for (let i = 5; i < 10; i++) { + n1.set("_root", "x", i) + n1.commit("",0) + } + + assert.notDeepStrictEqual(n1.toJS(), n2.toJS()) + sync(n1, n2, s1, s2) + assert.deepStrictEqual(n1.toJS(), n2.toJS()) + }) + + it('should not generate messages once synced', () => { + // create & synchronize two nodes + let n1 = Automerge.init('abc123'), n2 = Automerge.init('def456') + let s1 = initSyncState(), s2 = initSyncState() + + let message, patch + for (let i = 0; i < 5; i++) { + n1.set("_root","x",i) + n1.commit("",0) + } + for (let i = 0; i < 5; i++) { + n2.set("_root","y",i) + n2.commit("",0) + } + + // n1 reports what it has + message = n1.generateSyncMessage(s1) + + // n2 receives that message and sends changes along with what it has + n2.receiveSyncMessage(s2, message) + message = n2.generateSyncMessage(s2) + assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 5) + //assert.deepStrictEqual(patch, null) // no changes arrived + + // n1 receives the changes and replies with the changes it now knows n2 needs + n1.receiveSyncMessage(s1, message) + message = n1.generateSyncMessage(s1) + assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 5) + + // n2 applies the changes and sends confirmation ending the exchange + n2.receiveSyncMessage(s2, message) + message = n2.generateSyncMessage(s2) + + // n1 receives the message and has nothing more to say + n1.receiveSyncMessage(s1, message) + message = n1.generateSyncMessage(s1) + assert.deepStrictEqual(message, null) + //assert.deepStrictEqual(patch, null) // no changes arrived + + // n2 also has nothing left to say + message = n2.generateSyncMessage(s2) + assert.deepStrictEqual(message, null) + }) + + it('should allow simultaneous messages during synchronization', () => { + // create & synchronize two nodes + let n1 = Automerge.init('abc123'), n2 = Automerge.init('def456') + let s1 = initSyncState(), s2 = initSyncState() + + for (let i = 0; i < 5; i++) { + n1.set("_root", "x", i) + n1.commit("",0) + } + for (let i = 0; i < 5; i++) { + n2.set("_root","y", i) + n2.commit("",0) + } + + const head1 = n1.getHeads()[0], head2 = n2.getHeads()[0] + + // both sides report what they have but have no shared peer state + let msg1to2, msg2to1 + msg1to2 = n1.generateSyncMessage(s1) + msg2to1 = n2.generateSyncMessage(s2) + assert.deepStrictEqual(decodeSyncMessage(msg1to2).changes.length, 0) + assert.deepStrictEqual(decodeSyncMessage(msg1to2).have[0].lastSync.length, 0) + assert.deepStrictEqual(decodeSyncMessage(msg2to1).changes.length, 0) + assert.deepStrictEqual(decodeSyncMessage(msg2to1).have[0].lastSync.length, 0) + + // n1 and n2 receives that message and update sync state but make no patch + n1.receiveSyncMessage(s1, msg2to1) + n2.receiveSyncMessage(s2, msg1to2) + + // now both reply with their local changes the other lacks + // (standard warning that 1% of the time this will result in a "need" message) + msg1to2 = n1.generateSyncMessage(s1) + assert.deepStrictEqual(decodeSyncMessage(msg1to2).changes.length, 5) + msg2to1 = n2.generateSyncMessage(s2) + assert.deepStrictEqual(decodeSyncMessage(msg2to1).changes.length, 5) + + // both should now apply the changes and update the frontend + n1.receiveSyncMessage(s1, msg2to1) + assert.deepStrictEqual(n1.getMissingDeps(), []) + //assert.notDeepStrictEqual(patch1, null) + assert.deepStrictEqual(n1.toJS(), {x: 4, y: 4}) + + n2.receiveSyncMessage(s2, msg1to2) + assert.deepStrictEqual(n2.getMissingDeps(), []) + //assert.notDeepStrictEqual(patch2, null) + assert.deepStrictEqual(n2.toJS(), {x: 4, y: 4}) + + // The response acknowledges the changes received, and sends no further changes + msg1to2 = n1.generateSyncMessage(s1) + assert.deepStrictEqual(decodeSyncMessage(msg1to2).changes.length, 0) + msg2to1 = n2.generateSyncMessage(s2) + assert.deepStrictEqual(decodeSyncMessage(msg2to1).changes.length, 0) + + // After receiving acknowledgements, their shared heads should be equal + n1.receiveSyncMessage(s1, msg2to1) + n2.receiveSyncMessage(s2, msg1to2) + assert.deepStrictEqual(s1.sharedHeads, [head1, head2].sort()) + assert.deepStrictEqual(s2.sharedHeads, [head1, head2].sort()) + //assert.deepStrictEqual(patch1, null) + //assert.deepStrictEqual(patch2, null) + + // We're in sync, no more messages required + msg1to2 = n1.generateSyncMessage(s1) + msg2to1 = n2.generateSyncMessage(s2) + assert.deepStrictEqual(msg1to2, null) + assert.deepStrictEqual(msg2to1, null) + + // If we make one more change, and start another sync, its lastSync should be updated + n1.set("_root","x",5) + msg1to2 = n1.generateSyncMessage(s1) + assert.deepStrictEqual(decodeSyncMessage(msg1to2).have[0].lastSync, [head1, head2].sort()) + }) + + it('should assume sent changes were recieved until we hear otherwise', () => { + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + let s1 = initSyncState(), s2 = initSyncState(), message = null + + let items = n1.set("_root", "items", LIST) + n1.commit("",0) + + sync(n1, n2, s1, s2) + + n1.push(items, "x") + n1.commit("",0) + message = n1.generateSyncMessage(s1) + assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) + + n1.push(items, "y") + n1.commit("",0) + message = n1.generateSyncMessage(s1) + assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) + + n1.push(items, "z") + n1.commit("",0) + + message = n1.generateSyncMessage(s1) + assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) + }) + + it('should work regardless of who initiates the exchange', () => { + // create & synchronize two nodes + let n1 = Automerge.init(), n2 = Automerge.init() + let s1 = initSyncState(), s2 = initSyncState() + + for (let i = 0; i < 5; i++) { + n1.set("_root", "x", i) + n1.commit("",0) + } + + sync(n1, n2, s1, s2) + + // modify the first node further + for (let i = 5; i < 10; i++) { + n1.set("_root", "x", i) + n1.commit("",0) + } + + assert.notDeepStrictEqual(n1.toJS(), n2.toJS()) + sync(n1, n2, s1, s2) + assert.deepStrictEqual(n1.toJS(), n2.toJS()) + }) + + it('should work without prior sync state', () => { + // Scenario: ,-- c10 <-- c11 <-- c12 <-- c13 <-- c14 + // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ + // `-- c15 <-- c16 <-- c17 + // lastSync is undefined. + + // create two peers both with divergent commits + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + let s1 = initSyncState(), s2 = initSyncState() + + for (let i = 0; i < 10; i++) { + n1.set("_root","x",i) + n1.commit("",0) + } + + sync(n1, n2) + + for (let i = 10; i < 15; i++) { + n1.set("_root","x",i) + n1.commit("",0) + } + + for (let i = 15; i < 18; i++) { + n2.set("_root","x",i) + n2.commit("",0) + } + + assert.notDeepStrictEqual(n1.toJS(), n2.toJS()) + sync(n1, n2) + assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) + assert.deepStrictEqual(n1.toJS(), n2.toJS()) + }) + + it('should work with prior sync state', () => { + // Scenario: ,-- c10 <-- c11 <-- c12 <-- c13 <-- c14 + // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ + // `-- c15 <-- c16 <-- c17 + // lastSync is c9. + + // create two peers both with divergent commits + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + let s1 = initSyncState(), s2 = initSyncState() + + for (let i = 0; i < 10; i++) { + n1.set("_root","x",i) + n1.commit("",0) + } + + sync(n1, n2, s1, s2) + + for (let i = 10; i < 15; i++) { + n1.set("_root","x",i) + n1.commit("",0) + } + for (let i = 15; i < 18; i++) { + n2.set("_root","x",i) + n2.commit("",0) + } + + s1 = decodeSyncState(encodeSyncState(s1)) + s2 = decodeSyncState(encodeSyncState(s2)) + + assert.notDeepStrictEqual(n1.toJS(), n2.toJS()) + sync(n1, n2, s1, s2) + assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) + assert.deepStrictEqual(n1.toJS(), n2.toJS()) + }) + + it('should ensure non-empty state after sync', () => { + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + let s1 = initSyncState(), s2 = initSyncState() + + for (let i = 0; i < 3; i++) { + n1.set("_root","x",i) + n1.commit("",0) + } + + sync(n1, n2, s1, s2) + + assert.deepStrictEqual(s1.sharedHeads, n1.getHeads()) + assert.deepStrictEqual(s2.sharedHeads, n1.getHeads()) + }) + + it('should re-sync after one node crashed with data loss', () => { + // Scenario: (r) (n2) (n1) + // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 + // n2 has changes {c0, c1, c2}, n1's lastSync is c5, and n2's lastSync is c2. + // we want to successfully sync (n1) with (r), even though (n1) believes it's talking to (n2) + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + let s1 = initSyncState(), s2 = initSyncState() + + // n1 makes three changes, which we sync to n2 + for (let i = 0; i < 3; i++) { + n1.set("_root","x",i) + n1.commit("",0) + } + + sync(n1, n2, s1, s2) + + // save a copy of n2 as "r" to simulate recovering from crash + let r, rSyncState + ;[r, rSyncState] = [n2.clone(), s2.clone()] + + // sync another few commits + for (let i = 3; i < 6; i++) { + n1.set("_root","x",i) + n1.commit("",0) + } + + sync(n1, n2, s1, s2) + + // everyone should be on the same page here + assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) + assert.deepStrictEqual(n1.toJS(), n2.toJS()) + + // now make a few more changes, then attempt to sync the fully-up-to-date n1 with the confused r + for (let i = 6; i < 9; i++) { + n1.set("_root","x",i) + n1.commit("",0) + } + + s1 = decodeSyncState(encodeSyncState(s1)) + rSyncState = decodeSyncState(encodeSyncState(rSyncState)) + + assert.notDeepStrictEqual(n1.getHeads(), r.getHeads()) + assert.notDeepStrictEqual(n1.toJS(), r.toJS()) + assert.deepStrictEqual(n1.toJS(), {x: 8}) + assert.deepStrictEqual(r.toJS(), {x: 2}) + sync(n1, r, s1, rSyncState) + assert.deepStrictEqual(n1.getHeads(), r.getHeads()) + assert.deepStrictEqual(n1.toJS(), r.toJS()) + }) + + it('should resync after one node experiences data loss without disconnecting', () => { + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + let s1 = initSyncState(), s2 = initSyncState() + + // n1 makes three changes, which we sync to n2 + for (let i = 0; i < 3; i++) { + n1.set("_root","x",i) + n1.commit("",0) + } + + sync(n1, n2, s1, s2) + + assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) + assert.deepStrictEqual(n1.toJS(), n2.toJS()) + + let n2AfterDataLoss = Automerge.init('89abcdef') + + // "n2" now has no data, but n1 still thinks it does. Note we don't do + // decodeSyncState(encodeSyncState(s1)) in order to simulate data loss without disconnecting + sync(n1, n2AfterDataLoss, s1, initSyncState()) + assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) + assert.deepStrictEqual(n1.toJS(), n2.toJS()) + }) + + it('should handle changes concurrent to the last sync heads', () => { + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef'), n3 = Automerge.init('fedcba98') + let s12 = initSyncState(), s21 = initSyncState(), s23 = initSyncState(), s32 = initSyncState() + + // Change 1 is known to all three nodes + //n1 = Automerge.change(n1, {time: 0}, doc => doc.x = 1) + n1.set("_root","x",1); n1.commit("",0) + + sync(n1, n2, s12, s21) + sync(n2, n3, s23, s32) + + // Change 2 is known to n1 and n2 + n1.set("_root","x",2); n1.commit("",0) + + sync(n1, n2, s12, s21) + + // Each of the three nodes makes one change (changes 3, 4, 5) + n1.set("_root","x",3); n1.commit("",0) + n2.set("_root","x",4); n2.commit("",0) + n3.set("_root","x",5); n3.commit("",0) + + // Apply n3's latest change to n2. If running in Node, turn the Uint8Array into a Buffer, to + // simulate transmission over a network (see https://github.com/automerge/automerge/pull/362) + let change = n3.getLastLocalChange() + if (typeof Buffer === 'function') change = Buffer.from(change) + n2.applyChanges([change]) + + // Now sync n1 and n2. n3's change is concurrent to n1 and n2's last sync heads + sync(n1, n2, s12, s21) + assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) + assert.deepStrictEqual(n1.toJS(), n2.toJS()) + }) + + it('should handle histories with lots of branching and merging', () => { + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef'), n3 = Automerge.init('fedcba98') + n1.set("_root","x",0); n1.commit("",0) + n2.applyChanges([n1.getLastLocalChange()]) + n3.applyChanges([n1.getLastLocalChange()]) + n3.set("_root","x",1); n3.commit("",0) + + // - n1c1 <------ n1c2 <------ n1c3 <-- etc. <-- n1c20 <------ n1c21 + // / \/ \/ \/ + // / /\ /\ /\ + // c0 <---- n2c1 <------ n2c2 <------ n2c3 <-- etc. <-- n2c20 <------ n2c21 + // \ / + // ---------------------------------------------- n3c1 <----- + for (let i = 1; i < 20; i++) { + n1.set("_root","n1",i); n1.commit("",0) + n2.set("_root","n2",i); n2.commit("",0) + const change1 = n1.getLastLocalChange() + const change2 = n2.getLastLocalChange() + n1.applyChanges([change2]) + n2.applyChanges([change1]) + } + + let s1 = initSyncState(), s2 = initSyncState() + sync(n1, n2, s1, s2) + + // Having n3's last change concurrent to the last sync heads forces us into the slower code path + n2.applyChanges([n3.getLastLocalChange()]) + n1.set("_root","n1","final"); n1.commit("",0) + n2.set("_root","n2","final"); n2.commit("",0) + + sync(n1, n2, s1, s2) + assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) + assert.deepStrictEqual(n1.toJS(), n2.toJS()) + }) + + it('should handle a false-positive head', () => { + // Scenario: ,-- n1 + // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ + // `-- n2 + // where n2 is a false positive in the Bloom filter containing {n1}. + // lastSync is c9. + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + let s1 = initSyncState(), s2 = initSyncState() + + for (let i = 0; i < 10; i++) { + n1.set("_root","x",i); n1.commit("",0) + } + + sync(n1, n2, s1, s2) + for (let i = 1; ; i++) { // search for false positive; see comment above + const n1up = n1.clone('01234567'); + n1up.set("_root","x",`${i} @ n1`); n1up.commit("",0) + const n2up = n2.clone('89abcdef'); + n2up.set("_root","x",`${i} @ n2`); n2up.commit("",0) + if (new BloomFilter(n1up.getHeads()).containsHash(n2up.getHeads()[0])) { + n1.free(); n2.free() + n1 = n1up; n2 = n2up; break + } + } + const allHeads = [...n1.getHeads(), ...n2.getHeads()].sort() + s1 = decodeSyncState(encodeSyncState(s1)) + s2 = decodeSyncState(encodeSyncState(s2)) + sync(n1, n2, s1, s2) + assert.deepStrictEqual(n1.getHeads(), allHeads) + assert.deepStrictEqual(n2.getHeads(), allHeads) + }) + + + describe('with a false-positive dependency', () => { + let n1, n2, s1, s2, n1hash2, n2hash2 + + beforeEach(() => { + // Scenario: ,-- n1c1 <-- n1c2 + // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ + // `-- n2c1 <-- n2c2 + // where n2c1 is a false positive in the Bloom filter containing {n1c1, n1c2}. + // lastSync is c9. + n1 = Automerge.init('01234567') + n2 = Automerge.init('89abcdef') + s1 = initSyncState() + s2 = initSyncState() + for (let i = 0; i < 10; i++) { + n1.set("_root","x",i); n1.commit("",0) + } + sync(n1, n2, s1, s2) + + let n1hash1, n2hash1 + for (let i = 29; ; i++) { // search for false positive; see comment above + const n1us1 = n1.clone('01234567') + n1us1.set("_root","x",`${i} @ n1`); n1us1.commit("",0) + + const n2us1 = n2.clone('89abcdef') + n2us1.set("_root","x",`${i} @ n1`); n2us1.commit("",0) + + n1hash1 = n1us1.getHeads()[0]; n2hash1 = n2us1.getHeads()[0] + + const n1us2 = n1us1.clone(); + n1us2.set("_root","x",`final @ n1`); n1us2.commit("",0) + + const n2us2 = n2us1.clone(); + n2us2.set("_root","x",`final @ n2`); n2us2.commit("",0) + + n1hash2 = n1us2.getHeads()[0]; n2hash2 = n2us2.getHeads()[0] + if (new BloomFilter([n1hash1, n1hash2]).containsHash(n2hash1)) { + n1.free(); n2.free() + n1 = n1us2; n2 = n2us2; break + } + } + }) + + it('should sync two nodes without connection reset', () => { + sync(n1, n2, s1, s2) + assert.deepStrictEqual(n1.getHeads(), [n1hash2, n2hash2].sort()) + assert.deepStrictEqual(n2.getHeads(), [n1hash2, n2hash2].sort()) + }) + + it('should sync two nodes with connection reset', () => { + s1 = decodeSyncState(encodeSyncState(s1)) + s2 = decodeSyncState(encodeSyncState(s2)) + sync(n1, n2, s1, s2) + assert.deepStrictEqual(n1.getHeads(), [n1hash2, n2hash2].sort()) + assert.deepStrictEqual(n2.getHeads(), [n1hash2, n2hash2].sort()) + }) + + it('should sync three nodes', () => { + s1 = decodeSyncState(encodeSyncState(s1)) + s2 = decodeSyncState(encodeSyncState(s2)) + + // First n1 and n2 exchange Bloom filters + let m1, m2 + m1 = n1.generateSyncMessage(s1) + m2 = n2.generateSyncMessage(s2) + n1.receiveSyncMessage(s1, m2) + n2.receiveSyncMessage(s2, m1) + + // Then n1 and n2 send each other their changes, except for the false positive + m1 = n1.generateSyncMessage(s1) + m2 = n2.generateSyncMessage(s2) + n1.receiveSyncMessage(s1, m2) + n2.receiveSyncMessage(s2, m1) + assert.strictEqual(decodeSyncMessage(m1).changes.length, 2) // n1c1 and n1c2 + assert.strictEqual(decodeSyncMessage(m2).changes.length, 1) // only n2c2; change n2c1 is not sent + + // n3 is a node that doesn't have the missing change. Nevertheless n1 is going to ask n3 for it + let n3 = Automerge.init('fedcba98'), s13 = initSyncState(), s31 = initSyncState() + sync(n1, n3, s13, s31) + assert.deepStrictEqual(n1.getHeads(), [n1hash2]) + assert.deepStrictEqual(n3.getHeads(), [n1hash2]) + }) + }) + + it('should not require an additional request when a false-positive depends on a true-negative', () => { + // Scenario: ,-- n1c1 <-- n1c2 <-- n1c3 + // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-+ + // `-- n2c1 <-- n2c2 <-- n2c3 + // where n2c2 is a false positive in the Bloom filter containing {n1c1, n1c2, n1c3}. + // lastSync is c4. + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + let s1 = initSyncState(), s2 = initSyncState() + let n1hash3, n2hash3 + + for (let i = 0; i < 5; i++) { + n1.set("_root","x",i); n1.commit("",0) + } + sync(n1, n2, s1, s2) + for (let i = 86; ; i++) { // search for false positive; see comment above + const n1us1 = n1.clone('01234567') + n1us1.set("_root","x",`${i} @ n1`); n1us1.commit("",0) + + const n2us1 = n2.clone('89abcdef') + n2us1.set("_root","x",`${i} @ n2`); n2us1.commit("",0) + + //const n1us1 = Automerge.change(Automerge.clone(n1, {actorId: '01234567'}), {time: 0}, doc => doc.x = `${i} @ n1`) + //const n2us1 = Automerge.change(Automerge.clone(n2, {actorId: '89abcdef'}), {time: 0}, doc => doc.x = `${i} @ n2`) + const n1hash1 = n1us1.getHeads()[0] + + const n1us2 = n1us1.clone() + n1us2.set("_root","x",`${i + 1} @ n1`); n1us2.commit("",0) + + const n2us2 = n2us1.clone() + n2us2.set("_root","x",`${i + 1} @ n2`); n2us2.commit("",0) + + const n1hash2 = n1us2.getHeads()[0], n2hash2 = n2us2.getHeads()[0] + + const n1us3 = n1us2.clone() + n1us3.set("_root","x",`final @ n1`); n1us3.commit("",0) + + const n2us3 = n2us2.clone() + n2us3.set("_root","x",`final @ n2`); n2us3.commit("",0) + + n1hash3 = n1us3.getHeads()[0]; n2hash3 = n2us3.getHeads()[0] + + if (new BloomFilter([n1hash1, n1hash2, n1hash3]).containsHash(n2hash2)) { + n1.free(); n2.free(); + n1 = n1us3; n2 = n2us3; break + } + } + const bothHeads = [n1hash3, n2hash3].sort() + s1 = decodeSyncState(encodeSyncState(s1)) + s2 = decodeSyncState(encodeSyncState(s2)) + sync(n1, n2, s1, s2) + assert.deepStrictEqual(n1.getHeads(), bothHeads) + assert.deepStrictEqual(n2.getHeads(), bothHeads) + }) + + it('should handle chains of false-positives', () => { + // Scenario: ,-- c5 + // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-+ + // `-- n2c1 <-- n2c2 <-- n2c3 + // where n2c1 and n2c2 are both false positives in the Bloom filter containing {c5}. + // lastSync is c4. + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + let s1 = initSyncState(), s2 = initSyncState() + + for (let i = 0; i < 5; i++) { + n1.set("_root","x",i); n1.commit("",0) + } + + sync(n1, n2, s1, s2) + + n1.set("_root","x",5); n1.commit("",0) + + for (let i = 2; ; i++) { // search for false positive; see comment above + const n2us1 = n2.clone('89abcdef') + n2us1.set("_root","x",`${i} @ n2`); n2us1.commit("",0) + if (new BloomFilter(n1.getHeads()).containsHash(n2us1.getHeads()[0])) { + n2 = n2us1; break + } + } + for (let i = 141; ; i++) { // search for false positive; see comment above + const n2us2 = n2.clone('89abcdef') + n2us2.set("_root","x",`${i} again`); n2us2.commit("",0) + if (new BloomFilter(n1.getHeads()).containsHash(n2us2.getHeads()[0])) { + n2 = n2us2; break + } + } + n2.set("_root","x",`final @ n2`); n2.commit("",0) + + const allHeads = [...n1.getHeads(), ...n2.getHeads()].sort() + s1 = decodeSyncState(encodeSyncState(s1)) + s2 = decodeSyncState(encodeSyncState(s2)) + sync(n1, n2, s1, s2) + assert.deepStrictEqual(n1.getHeads(), allHeads) + assert.deepStrictEqual(n2.getHeads(), allHeads) + }) + + it('should allow the false-positive hash to be explicitly requested', () => { + // Scenario: ,-- n1 + // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ + // `-- n2 + // where n2 causes a false positive in the Bloom filter containing {n1}. + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + let s1 = initSyncState(), s2 = initSyncState() + let message + + for (let i = 0; i < 10; i++) { + n1.set("_root","x",i); n1.commit("",0) + } + + sync(n1, n2, s1, s2) + + s1 = decodeSyncState(encodeSyncState(s1)) + s2 = decodeSyncState(encodeSyncState(s2)) + + for (let i = 1; ; i++) { // brute-force search for false positive; see comment above + const n1up = n1.clone('01234567'); n1up.set("_root","x",`${i} @ n1`); n1up.commit("",0) + const n2up = n1.clone('89abcdef'); n2up.set("_root","x",`${i} @ n2`); n2up.commit("",0) + + // check if the bloom filter on n2 will believe n1 already has a particular hash + // this will mean n2 won't offer that data to n2 by receiving a sync message from n1 + if (new BloomFilter(n1up.getHeads()).containsHash(n2up.getHeads()[0])) { + n1 = n1up; n2 = n2up; break + } + } + + // n1 creates a sync message for n2 with an ill-fated bloom + message = n1.generateSyncMessage(s1) + assert.strictEqual(decodeSyncMessage(message).changes.length, 0) + + // n2 receives it and DOESN'T send a change back + n2.receiveSyncMessage(s2, message) + message = n2.generateSyncMessage(s2) + assert.strictEqual(decodeSyncMessage(message).changes.length, 0) + + // n1 should now realize it's missing that change and request it explicitly + n1.receiveSyncMessage(s1, message) + message = n1.generateSyncMessage(s1) + assert.deepStrictEqual(decodeSyncMessage(message).need, n2.getHeads()) + + // n2 should fulfill that request + n2.receiveSyncMessage(s2, message) + message = n2.generateSyncMessage(s2) + assert.strictEqual(decodeSyncMessage(message).changes.length, 1) + + // n1 should apply the change and the two should now be in sync + n1.receiveSyncMessage(s1, message) + assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) + }) + + describe('protocol features', () => { + it('should allow multiple Bloom filters', () => { + // Scenario: ,-- n1c1 <-- n1c2 <-- n1c3 + // c0 <-- c1 <-- c2 <-+--- n2c1 <-- n2c2 <-- n2c3 + // `-- n3c1 <-- n3c2 <-- n3c3 + // n1 has {c0, c1, c2, n1c1, n1c2, n1c3, n2c1, n2c2}; + // n2 has {c0, c1, c2, n1c1, n1c2, n2c1, n2c2, n2c3}; + // n3 has {c0, c1, c2, n3c1, n3c2, n3c3}. + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef'), n3 = Automerge.init('76543210') + let s13 = initSyncState(), s12 = initSyncState(), s21 = initSyncState() + let s32 = initSyncState(), s31 = initSyncState(), s23 = initSyncState() + let message1, message2, message3 + + for (let i = 0; i < 3; i++) { + n1.set("_root","x",i); n1.commit("",0) + } + + // sync all 3 nodes + sync(n1, n2, s12, s21) // eslint-disable-line no-unused-vars -- kept for consistency + sync(n1, n3, s13, s31) + sync(n3, n2, s32, s23) + for (let i = 0; i < 2; i++) { + n1.set("_root","x",`${i} @ n1`); n1.commit("",0) + } + for (let i = 0; i < 2; i++) { + n2.set("_root","x",`${i} @ n2`); n2.commit("",0) + } + n1.applyChanges(n2.getChanges([])) + n2.applyChanges(n1.getChanges([])) + n1.set("_root","x",`3 @ n1`); n1.commit("",0) + n2.set("_root","x",`3 @ n2`); n2.commit("",0) +//n1 = Automerge.change(n1, {time: 0}, doc => doc.x = `3 @ n1`) +//n2 = Automerge.change(n2, {time: 0}, doc => doc.x = `3 @ n2`) + + for (let i = 0; i < 3; i++) { + n3.set("_root","x",`${i} @ n3`); n3.commit("",0) + } +//n3 = Automerge.change(n3, {time: 0}, doc => doc.x = `${i} @ n3`) + const n1c3 = n1.getHeads()[0], n2c3 = n2.getHeads()[0], n3c3 = n3.getHeads()[0] + s13 = decodeSyncState(encodeSyncState(s13)) + s31 = decodeSyncState(encodeSyncState(s31)) + s23 = decodeSyncState(encodeSyncState(s23)) + s32 = decodeSyncState(encodeSyncState(s32)) + + + // Now n3 concurrently syncs with n1 and n2. Doing this naively would result in n3 receiving + // changes {n1c1, n1c2, n2c1, n2c2} twice (those are the changes that both n1 and n2 have, but + // that n3 does not have). We want to prevent this duplication. + message1 = n1.generateSyncMessage(s13) // message from n1 to n3 + assert.strictEqual(decodeSyncMessage(message1).changes.length, 0) + n3.receiveSyncMessage(s31, message1) + message3 = n3.generateSyncMessage(s31) // message from n3 to n1 + assert.strictEqual(decodeSyncMessage(message3).changes.length, 3) // {n3c1, n3c2, n3c3} + n1.receiveSyncMessage(s13, message3) + + // Copy the Bloom filter received from n1 into the message sent from n3 to n2. This Bloom + // filter indicates what changes n3 is going to receive from n1. + message3 = n3.generateSyncMessage(s32) // message from n3 to n2 + const modifiedMessage = decodeSyncMessage(message3) + modifiedMessage.have.push(decodeSyncMessage(message1).have[0]) + assert.strictEqual(modifiedMessage.changes.length, 0) + n2.receiveSyncMessage(s23, Automerge.encodeSyncMessage(modifiedMessage)) + + // n2 replies to n3, sending only n2c3 (the one change that n2 has but n1 doesn't) + message2 = n2.generateSyncMessage(s23) + assert.strictEqual(decodeSyncMessage(message2).changes.length, 1) // {n2c3} + n3.receiveSyncMessage(s32, message2) + + // n1 replies to n3 + message1 = n1.generateSyncMessage(s13) + assert.strictEqual(decodeSyncMessage(message1).changes.length, 5) // {n1c1, n1c2, n1c3, n2c1, n2c2} + n3.receiveSyncMessage(s31, message1) + assert.deepStrictEqual(n3.getHeads(), [n1c3, n2c3, n3c3].sort()) + }) + + it('should allow any change to be requested', () => { + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + let s1 = initSyncState(), s2 = initSyncState() + let message = null + + for (let i = 0; i < 3; i++) { + n1.set("_root","x",i); n1.commit("",0) + } + + const lastSync = n1.getHeads() + + for (let i = 3; i < 6; i++) { + n1.set("_root","x",i); n1.commit("",0) + } + + sync(n1, n2, s1, s2) + s1.lastSentHeads = [] // force generateSyncMessage to return a message even though nothing changed + message = n1.generateSyncMessage(s1) + const modMsg = decodeSyncMessage(message) + modMsg.need = lastSync // re-request change 2 + n2.receiveSyncMessage(s2, Automerge.encodeSyncMessage(modMsg)) + message = n2.generateSyncMessage(s2) + assert.strictEqual(decodeSyncMessage(message).changes.length, 1) + assert.strictEqual(Automerge.decodeChange(decodeSyncMessage(message).changes[0]).hash, lastSync[0]) + }) + + it('should ignore requests for a nonexistent change', () => { + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + let s1 = initSyncState(), s2 = initSyncState() + let message = null + + for (let i = 0; i < 3; i++) { + n1.set("_root","x",i); n1.commit("",0) + } + + n2.applyChanges(n1.getChanges([])) + message = n1.generateSyncMessage(s1) + message.need = ['0000000000000000000000000000000000000000000000000000000000000000'] + n2.receiveSyncMessage(s2, message) + message = n2.generateSyncMessage(s2) + assert.strictEqual(message, null) + }) + + it('should allow a subset of changes to be sent', () => { + // ,-- c1 <-- c2 + // c0 <-+ + // `-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef'), n3 = Automerge.init('76543210') + let s1 = initSyncState(), s2 = initSyncState() + let msg, decodedMsg + + n1.set("_root","x",0); n1.commit("",0) + n3.applyChanges(n3.getChangesAdded(n1)) // merge() + for (let i = 1; i <= 2; i++) { + n1.set("_root","x",i); n1.commit("",0) + } + for (let i = 3; i <= 4; i++) { + n3.set("_root","x",i); n3.commit("",0) + } + const c2 = n1.getHeads()[0], c4 = n3.getHeads()[0] + n2.applyChanges(n2.getChangesAdded(n3)) // merge() + + // Sync n1 and n2, so their shared heads are {c2, c4} + sync(n1, n2, s1, s2) + s1 = decodeSyncState(encodeSyncState(s1)) + s2 = decodeSyncState(encodeSyncState(s2)) + assert.deepStrictEqual(s1.sharedHeads, [c2, c4].sort()) + assert.deepStrictEqual(s2.sharedHeads, [c2, c4].sort()) + + // n2 and n3 apply {c5, c6, c7, c8} + n3.set("_root","x",5); n3.commit("",0) + const change5 = n3.getLastLocalChange() + n3.set("_root","x",6); n3.commit("",0) + const change6 = n3.getLastLocalChange(n3), c6 = n3.getHeads()[0] + for (let i = 7; i <= 8; i++) { + n3.set("_root","x",i); n3.commit("",0) + } + const c8 = n3.getHeads()[0] + n2.applyChanges(n2.getChangesAdded(n3)) // merge() + + // Now n1 initiates a sync with n2, and n2 replies with {c5, c6}. n2 does not send {c7, c8} + msg = n1.generateSyncMessage(s1) + n2.receiveSyncMessage(s2, msg) + msg = n2.generateSyncMessage(s2) + decodedMsg = decodeSyncMessage(msg) + decodedMsg.changes = [change5, change6] + msg = Automerge.encodeSyncMessage(decodedMsg) + const sentHashes = {} + sentHashes[Automerge.decodeChange(change5, true).hash] = true + sentHashes[Automerge.decodeChange(change6, true).hash] = true + s2.sentHashes = sentHashes + n1.receiveSyncMessage(s1, msg) + assert.deepStrictEqual(s1.sharedHeads, [c2, c6].sort()) + + // n1 replies, confirming the receipt of {c5, c6} and requesting the remaining changes + msg = n1.generateSyncMessage(s1) + n2.receiveSyncMessage(s2, msg) + assert.deepStrictEqual(decodeSyncMessage(msg).need, [c8]) + assert.deepStrictEqual(decodeSyncMessage(msg).have[0].lastSync, [c2, c6].sort()) + assert.deepStrictEqual(s1.sharedHeads, [c2, c6].sort()) + assert.deepStrictEqual(s2.sharedHeads, [c2, c6].sort()) + + // n2 sends the remaining changes {c7, c8} + msg = n2.generateSyncMessage(s2) + n1.receiveSyncMessage(s1, msg) + assert.strictEqual(decodeSyncMessage(msg).changes.length, 2) + assert.deepStrictEqual(s1.sharedHeads, [c2, c8].sort()) + }) + }) }) }) From a2e6778730a92af433e7f79f7779c0ea28d69c3f Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Sat, 8 Jan 2022 12:45:13 -0500 Subject: [PATCH 019/730] fmt --- automerge-wasm/src/lib.rs | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 299019c0..7760f128 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -238,10 +238,7 @@ impl Automerge { let obj = self.import(obj)?; let value = self.import_value(value, datatype.as_string())?; let index = self.0.length(&obj); - let opid = self - .0 - .insert(&obj, index, value) - .map_err(to_js_err)?; + let opid = self.0.insert(&obj, index, value).map_err(to_js_err)?; match opid { Some(opid) => Ok(self.export(opid)), None => Ok(JsValue::null()), From 557bfe1cc986eabe49a253b3b76932df7cb1a965 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Sat, 8 Jan 2022 12:51:28 -0500 Subject: [PATCH 020/730] update todo --- TODO.md | 3 --- 1 file changed, 3 deletions(-) diff --git a/TODO.md b/TODO.md index 5e6889dc..e4c70665 100644 --- a/TODO.md +++ b/TODO.md @@ -9,9 +9,6 @@ 1. single pass (fast) load 2. micro-patches / bare bones observation API / fully hydrated documents -### sync - 1. get all sync tests passing - ### maybe: 1. tables From fdab61e213868f2a2b8bc07bf04618808b6aa579 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Sat, 8 Jan 2022 17:53:10 -0500 Subject: [PATCH 021/730] derive default --- automerge/src/sync/state.rs | 15 +-------------- 1 file changed, 1 insertion(+), 14 deletions(-) diff --git a/automerge/src/sync/state.rs b/automerge/src/sync/state.rs index 7a5a2e66..209dbaf5 100644 --- a/automerge/src/sync/state.rs +++ b/automerge/src/sync/state.rs @@ -5,7 +5,7 @@ use crate::{decoding, decoding::Decoder, encoding, BloomFilter, ChangeHash}; const SYNC_STATE_TYPE: u8 = 0x43; // first byte of an encoded sync state, for identification -#[derive(Debug, Clone)] +#[derive(Debug, Clone, Default)] pub struct SyncState { pub shared_heads: Vec, pub last_sent_heads: Vec, @@ -54,16 +54,3 @@ impl SyncState { }) } } - -impl Default for SyncState { - fn default() -> Self { - Self { - shared_heads: Vec::new(), - last_sent_heads: Vec::new(), - their_heads: None, - their_need: None, - their_have: None, - sent_hashes: HashSet::new(), - } - } -} From 067df1f8945350521dbeaff4528a0c1e66fc1fc4 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Sat, 8 Jan 2022 18:06:08 -0500 Subject: [PATCH 022/730] break sync, interop, and value code into their own files --- automerge-wasm/src/interop.rs | 336 ++++++++++++++++++++++++ automerge-wasm/src/lib.rs | 466 ++-------------------------------- automerge-wasm/src/sync.rs | 52 ++++ automerge-wasm/src/value.rs | 36 +++ 4 files changed, 444 insertions(+), 446 deletions(-) create mode 100644 automerge-wasm/src/interop.rs create mode 100644 automerge-wasm/src/sync.rs create mode 100644 automerge-wasm/src/value.rs diff --git a/automerge-wasm/src/interop.rs b/automerge-wasm/src/interop.rs new file mode 100644 index 00000000..47dcb6d5 --- /dev/null +++ b/automerge-wasm/src/interop.rs @@ -0,0 +1,336 @@ +use automerge as am; +use automerge::{Change, ChangeHash, Prop}; +use js_sys::{Array, Object, Reflect, Uint8Array}; +use std::collections::HashSet; +use std::convert::TryFrom; +use std::convert::TryInto; +use std::fmt::Display; +use wasm_bindgen::prelude::*; +use wasm_bindgen::JsCast; + +use crate::{ObjId, ScalarValue, Value}; + +pub(crate) struct JS(pub JsValue); +pub(crate) struct AR(pub Array); + +impl From for JsValue { + fn from(ar: AR) -> Self { + ar.0.into() + } +} + +impl From for JsValue { + fn from(js: JS) -> Self { + js.0 + } +} + +impl From for JS { + fn from(state: am::SyncState) -> Self { + let shared_heads: JS = state.shared_heads.into(); + let last_sent_heads: JS = state.last_sent_heads.into(); + let their_heads: JS = state.their_heads.into(); + let their_need: JS = state.their_need.into(); + let sent_hashes: JS = state.sent_hashes.into(); + let their_have = if let Some(have) = &state.their_have { + JsValue::from(AR::from(have.as_slice()).0) + } else { + JsValue::null() + }; + let result: JsValue = Object::new().into(); + // we can unwrap here b/c we made the object and know its not frozen + Reflect::set(&result, &"sharedHeads".into(), &shared_heads.0).unwrap(); + Reflect::set(&result, &"lastSentHeads".into(), &last_sent_heads.0).unwrap(); + Reflect::set(&result, &"theirHeads".into(), &their_heads.0).unwrap(); + Reflect::set(&result, &"theirNeed".into(), &their_need.0).unwrap(); + Reflect::set(&result, &"theirHave".into(), &their_have).unwrap(); + Reflect::set(&result, &"sentHashes".into(), &sent_hashes.0).unwrap(); + JS(result) + } +} + +impl From> for JS { + fn from(heads: Vec) -> Self { + let heads: Array = heads + .iter() + .map(|h| JsValue::from_str(&h.to_string())) + .collect(); + JS(heads.into()) + } +} + +impl From> for JS { + fn from(heads: HashSet) -> Self { + let result: JsValue = Object::new().into(); + for key in &heads { + Reflect::set(&result, &key.to_string().into(), &true.into()).unwrap(); + } + JS(result) + } +} + +impl From>> for JS { + fn from(heads: Option>) -> Self { + if let Some(v) = heads { + let v: Array = v + .iter() + .map(|h| JsValue::from_str(&h.to_string())) + .collect(); + JS(v.into()) + } else { + JS(JsValue::null()) + } + } +} + +impl TryFrom for HashSet { + type Error = JsValue; + + fn try_from(value: JS) -> Result { + let mut result = HashSet::new(); + for key in Reflect::own_keys(&value.0)?.iter() { + if let Some(true) = Reflect::get(&value.0, &key)?.as_bool() { + result.insert(key.into_serde().map_err(to_js_err)?); + } + } + Ok(result) + } +} + +impl TryFrom for Vec { + type Error = JsValue; + + fn try_from(value: JS) -> Result { + let value = value.0.dyn_into::()?; + let value: Result, _> = value.iter().map(|j| j.into_serde()).collect(); + let value = value.map_err(to_js_err)?; + Ok(value) + } +} + +impl From for Option> { + fn from(value: JS) -> Self { + let value = value.0.dyn_into::().ok()?; + let value: Result, _> = value.iter().map(|j| j.into_serde()).collect(); + let value = value.ok()?; + Some(value) + } +} + +impl TryFrom for Vec { + type Error = JsValue; + + fn try_from(value: JS) -> Result { + let value = value.0.dyn_into::()?; + let changes: Result, _> = value.iter().map(|j| j.dyn_into()).collect(); + let changes = changes?; + let changes: Result, _> = changes + .iter() + .map(|a| am::decode_change(a.to_vec())) + .collect(); + let changes = changes.map_err(to_js_err)?; + Ok(changes) + } +} + +impl TryFrom for am::SyncState { + type Error = JsValue; + + fn try_from(value: JS) -> Result { + let value = value.0; + let shared_heads = js_get(&value, "sharedHeads")?.try_into()?; + let last_sent_heads = js_get(&value, "lastSentHeads")?.try_into()?; + let their_heads = js_get(&value, "theirHeads")?.into(); + let their_need = js_get(&value, "theirNeed")?.into(); + let their_have = js_get(&value, "theirHave")?.try_into()?; + let sent_hashes = js_get(&value, "sentHashes")?.try_into()?; + Ok(am::SyncState { + shared_heads, + last_sent_heads, + their_heads, + their_need, + their_have, + sent_hashes, + }) + } +} + +impl TryFrom for Option> { + type Error = JsValue; + + fn try_from(value: JS) -> Result { + if value.0.is_null() { + Ok(None) + } else { + Ok(Some(value.try_into()?)) + } + } +} + +impl TryFrom for Vec { + type Error = JsValue; + + fn try_from(value: JS) -> Result { + let value = value.0.dyn_into::()?; + let have: Result, JsValue> = value + .iter() + .map(|s| { + let last_sync = js_get(&s, "lastSync")?.try_into()?; + let bloom = js_get(&s, "bloom")?.try_into()?; + Ok(am::SyncHave { last_sync, bloom }) + }) + .collect(); + let have = have?; + Ok(have) + } +} + +impl TryFrom for am::BloomFilter { + type Error = JsValue; + + fn try_from(value: JS) -> Result { + let value: Uint8Array = value.0.dyn_into()?; + let value = value.to_vec(); + let value = value.as_slice().try_into().map_err(to_js_err)?; + Ok(value) + } +} + +impl From<&[ChangeHash]> for AR { + fn from(value: &[ChangeHash]) -> Self { + AR(value + .iter() + .map(|h| JsValue::from_str(&hex::encode(&h.0))) + .collect()) + } +} + +impl From<&[Change]> for AR { + fn from(value: &[Change]) -> Self { + let changes: Array = value + .iter() + .map(|c| Uint8Array::from(c.raw_bytes())) + .collect(); + AR(changes) + } +} + +impl From<&[am::SyncHave]> for AR { + fn from(value: &[am::SyncHave]) -> Self { + AR(value + .iter() + .map(|have| { + let last_sync: Array = have + .last_sync + .iter() + .map(|h| JsValue::from_str(&hex::encode(&h.0))) + .collect(); + // FIXME - the clone and the unwrap here shouldnt be needed - look at into_bytes() + let bloom = Uint8Array::from(have.bloom.clone().into_bytes().unwrap().as_slice()); + let obj: JsValue = Object::new().into(); + // we can unwrap here b/c we created the object and know its not frozen + Reflect::set(&obj, &"lastSync".into(), &last_sync.into()).unwrap(); + Reflect::set(&obj, &"bloom".into(), &bloom.into()).unwrap(); + obj + }) + .collect()) + } +} + +pub(crate) fn to_js_err(err: T) -> JsValue { + js_sys::Error::new(&std::format!("{}", err)).into() +} + +pub(crate) fn js_get>(obj: J, prop: &str) -> Result { + Ok(JS(Reflect::get(&obj.into(), &prop.into())?)) +} + +pub(crate) fn js_set>(obj: &JsValue, prop: &str, val: V) -> Result { + Reflect::set(obj, &prop.into(), &val.into()) +} + +pub(crate) fn to_usize(val: JsValue, name: &str) -> Result { + match val.as_f64() { + Some(n) => Ok(n as usize), + None => Err(format!("{} must be a number", name).into()), + } +} + +pub(crate) fn to_prop(p: JsValue) -> Result { + if let Some(s) = p.as_string() { + Ok(Prop::Map(s)) + } else if let Some(n) = p.as_f64() { + Ok(Prop::Seq(n as usize)) + } else { + Err("prop must me a string or number".into()) + } +} + +pub(crate) fn to_objtype(a: &JsValue) -> Option { + if !a.is_function() { + return None; + } + let f: js_sys::Function = a.clone().try_into().unwrap(); + let f = f.to_string(); + if f.starts_with("class MAP", 0) { + Some(am::ObjType::Map) + } else if f.starts_with("class LIST", 0) { + Some(am::ObjType::List) + } else if f.starts_with("class TEXT", 0) { + Some(am::ObjType::Text) + } else if f.starts_with("class TABLE", 0) { + Some(am::ObjType::Table) + } else { + None + } +} + +pub(crate) fn get_heads(heads: JsValue) -> Option> { + JS(heads).into() +} + +pub(crate) fn map_to_js(doc: &mut am::Automerge, obj: &ObjId) -> JsValue { + let keys = doc.keys(obj); + let map = Object::new(); + for k in keys { + let val = doc.value(obj, &k); + match val { + Ok(Some((Value::Object(o), exid))) + if o == am::ObjType::Map || o == am::ObjType::Table => + { + Reflect::set(&map, &k.into(), &map_to_js(doc, &exid)).unwrap(); + } + Ok(Some((Value::Object(_), exid))) => { + Reflect::set(&map, &k.into(), &list_to_js(doc, &exid)).unwrap(); + } + Ok(Some((Value::Scalar(v), _))) => { + Reflect::set(&map, &k.into(), &ScalarValue(v).into()).unwrap(); + } + _ => (), + }; + } + map.into() +} + +fn list_to_js(doc: &mut am::Automerge, obj: &ObjId) -> JsValue { + let len = doc.length(obj); + let array = Array::new(); + for i in 0..len { + let val = doc.value(obj, i as usize); + match val { + Ok(Some((Value::Object(o), exid))) + if o == am::ObjType::Map || o == am::ObjType::Table => + { + array.push(&map_to_js(doc, &exid)); + } + Ok(Some((Value::Object(_), exid))) => { + array.push(&list_to_js(doc, &exid)); + } + Ok(Some((Value::Scalar(v), _))) => { + array.push(&ScalarValue(v).into()); + } + _ => (), + }; + } + array.into() +} diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 7760f128..9b080b89 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -1,14 +1,20 @@ -extern crate web_sys; use automerge as am; -use automerge::{Change, ChangeHash, ObjId, Prop, Value, ROOT}; -use js_sys::{Array, Object, Reflect, Uint8Array}; -use std::collections::{HashMap, HashSet}; -use std::convert::TryFrom; +use automerge::{Change, ObjId, Prop, Value, ROOT}; +use js_sys::{Array, Object, Uint8Array}; use std::convert::TryInto; -use std::fmt::Display; use wasm_bindgen::prelude::*; use wasm_bindgen::JsCast; +mod interop; +mod sync; +mod value; + +use interop::{ + get_heads, js_get, js_set, map_to_js, to_js_err, to_objtype, to_prop, to_usize, AR, JS, +}; +use sync::SyncState; +use value::{datatype, ScalarValue}; + #[allow(unused_macros)] macro_rules! log { ( $( $t:tt )* ) => { @@ -20,102 +26,10 @@ macro_rules! log { #[global_allocator] static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT; -fn datatype(s: &am::ScalarValue) -> String { - match s { - am::ScalarValue::Bytes(_) => "bytes".into(), - am::ScalarValue::Str(_) => "str".into(), - am::ScalarValue::Int(_) => "int".into(), - am::ScalarValue::Uint(_) => "uint".into(), - am::ScalarValue::F64(_) => "f64".into(), - am::ScalarValue::Counter(_) => "counter".into(), - am::ScalarValue::Timestamp(_) => "timestamp".into(), - am::ScalarValue::Boolean(_) => "boolean".into(), - am::ScalarValue::Null => "null".into(), - } -} - -#[derive(Debug)] -pub struct ScalarValue(am::ScalarValue); - -impl From for JsValue { - fn from(val: ScalarValue) -> Self { - match &val.0 { - am::ScalarValue::Bytes(v) => Uint8Array::from(v.as_slice()).into(), - am::ScalarValue::Str(v) => v.to_string().into(), - am::ScalarValue::Int(v) => (*v as f64).into(), - am::ScalarValue::Uint(v) => (*v as f64).into(), - am::ScalarValue::F64(v) => (*v).into(), - am::ScalarValue::Counter(v) => (*v as f64).into(), - am::ScalarValue::Timestamp(v) => js_sys::Date::new(&(*v as f64).into()).into(), - am::ScalarValue::Boolean(v) => (*v).into(), - am::ScalarValue::Null => JsValue::null(), - } - } -} - #[wasm_bindgen] #[derive(Debug)] pub struct Automerge(automerge::Automerge); -#[wasm_bindgen] -#[derive(Debug)] -pub struct SyncState(am::SyncState); - -#[wasm_bindgen] -impl SyncState { - #[wasm_bindgen(getter, js_name = sharedHeads)] - pub fn shared_heads(&self) -> JsValue { - AR::from(self.0.shared_heads.as_slice()).into() - } - - #[wasm_bindgen(getter, js_name = lastSentHeads)] - pub fn last_sent_heads(&self) -> JsValue { - AR::from(self.0.last_sent_heads.as_slice()).into() - } - - #[wasm_bindgen(setter, js_name = lastSentHeads)] - pub fn set_last_sent_heads(&mut self, heads: JsValue) -> Result<(), JsValue> { - let heads: Vec = JS(heads).try_into()?; - self.0.last_sent_heads = heads; - Ok(()) - } - - #[wasm_bindgen(setter, js_name = sentHashes)] - pub fn set_sent_hashes(&mut self, hashes: JsValue) -> Result<(), JsValue> { - let hashes_map: HashMap = hashes.into_serde().map_err(to_js_err)?; - let hashes_set: HashSet = hashes_map.keys().cloned().collect(); - self.0.sent_hashes = hashes_set; - Ok(()) - } - - fn decode(data: Uint8Array) -> Result { - let data = data.to_vec(); - let s = am::SyncState::decode(&data); - let s = s.map_err(to_js_err)?; - Ok(SyncState(s)) - } - - #[allow(clippy::should_implement_trait)] - pub fn clone(&self) -> Self { - SyncState(self.0.clone()) - } -} - -#[derive(Debug)] -pub struct JsErr(String); - -impl From for JsValue { - fn from(err: JsErr) -> Self { - js_sys::Error::new(&std::format!("{}", err.0)).into() - } -} - -impl<'a> From<&'a str> for JsErr { - fn from(s: &'a str) -> Self { - JsErr(s.to_owned()) - } -} - #[wasm_bindgen] impl Automerge { pub fn new(actor: JsValue) -> Result { @@ -573,57 +487,6 @@ impl Automerge { } } -pub fn to_usize(val: JsValue, name: &str) -> Result { - match val.as_f64() { - Some(n) => Ok(n as usize), - None => Err(format!("{} must be a number", name).into()), - } -} - -pub fn to_prop(p: JsValue) -> Result { - if let Some(s) = p.as_string() { - Ok(Prop::Map(s)) - } else if let Some(n) = p.as_f64() { - Ok(Prop::Seq(n as usize)) - } else { - Err("prop must me a string or number".into()) - } -} - -fn to_objtype(a: &JsValue) -> Option { - if !a.is_function() { - return None; - } - let f: js_sys::Function = a.clone().try_into().unwrap(); - let f = f.to_string(); - if f.starts_with("class MAP", 0) { - Some(am::ObjType::Map) - } else if f.starts_with("class LIST", 0) { - Some(am::ObjType::List) - } else if f.starts_with("class TEXT", 0) { - Some(am::ObjType::Text) - } else if f.starts_with("class TABLE", 0) { - Some(am::ObjType::Table) - } else { - None - } -} - -struct ObjType(am::ObjType); - -impl TryFrom for ObjType { - type Error = JsValue; - - fn try_from(val: JsValue) -> Result { - match &val.as_string() { - Some(o) if o == "map" => Ok(ObjType(am::ObjType::Map)), - Some(o) if o == "list" => Ok(ObjType(am::ObjType::List)), - Some(o) => Err(format!("unknown obj type {}", o).into()), - _ => Err("obj type must be a string".into()), - } - } -} - #[wasm_bindgen] pub fn init(actor: JsValue) -> Result { console_error_panic_hook::set_once(); @@ -674,10 +537,10 @@ pub fn export_sync_state(state: SyncState) -> JsValue { #[wasm_bindgen(js_name = encodeSyncMessage)] pub fn encode_sync_message(message: JsValue) -> Result { - let heads = get(&message, "heads")?.try_into()?; - let need = get(&message, "need")?.try_into()?; - let changes = get(&message, "changes")?.try_into()?; - let have = get(&message, "have")?.try_into()?; + let heads = js_get(&message, "heads")?.try_into()?; + let need = js_get(&message, "need")?.try_into()?; + let changes = js_get(&message, "changes")?.try_into()?; + let have = js_get(&message, "have")?.try_into()?; Ok(Uint8Array::from( am::SyncMessage { heads, @@ -700,10 +563,10 @@ pub fn decode_sync_message(msg: Uint8Array) -> Result { let changes = AR::from(msg.changes.as_slice()); let have = AR::from(msg.have.as_slice()); let obj = Object::new().into(); - set(&obj, "heads", heads)?; - set(&obj, "need", need)?; - set(&obj, "have", have)?; - set(&obj, "changes", changes)?; + js_set(&obj, "heads", heads)?; + js_set(&obj, "need", need)?; + js_set(&obj, "have", have)?; + js_set(&obj, "changes", changes)?; Ok(obj) } @@ -731,292 +594,3 @@ pub struct Text {} #[wasm_bindgen(js_name = TABLE)] pub struct Table {} - -fn to_js_err(err: T) -> JsValue { - js_sys::Error::new(&std::format!("{}", err)).into() -} - -fn get>(obj: J, prop: &str) -> Result { - Ok(JS(Reflect::get(&obj.into(), &prop.into())?)) -} - -fn set>(obj: &JsValue, prop: &str, val: V) -> Result { - Reflect::set(obj, &prop.into(), &val.into()) -} - -struct JS(JsValue); -struct AR(Array); - -impl From for JsValue { - fn from(ar: AR) -> Self { - ar.0.into() - } -} - -impl From for JsValue { - fn from(js: JS) -> Self { - js.0 - } -} - -impl From for JS { - fn from(state: am::SyncState) -> Self { - let shared_heads: JS = state.shared_heads.into(); - let last_sent_heads: JS = state.last_sent_heads.into(); - let their_heads: JS = state.their_heads.into(); - let their_need: JS = state.their_need.into(); - let sent_hashes: JS = state.sent_hashes.into(); - let their_have = if let Some(have) = &state.their_have { - JsValue::from(AR::from(have.as_slice()).0) - } else { - JsValue::null() - }; - let result: JsValue = Object::new().into(); - // we can unwrap here b/c we made the object and know its not frozen - Reflect::set(&result, &"sharedHeads".into(), &shared_heads.0).unwrap(); - Reflect::set(&result, &"lastSentHeads".into(), &last_sent_heads.0).unwrap(); - Reflect::set(&result, &"theirHeads".into(), &their_heads.0).unwrap(); - Reflect::set(&result, &"theirNeed".into(), &their_need.0).unwrap(); - Reflect::set(&result, &"theirHave".into(), &their_have).unwrap(); - Reflect::set(&result, &"sentHashes".into(), &sent_hashes.0).unwrap(); - JS(result) - } -} - -impl From> for JS { - fn from(heads: Vec) -> Self { - let heads: Array = heads - .iter() - .map(|h| JsValue::from_str(&h.to_string())) - .collect(); - JS(heads.into()) - } -} - -impl From> for JS { - fn from(heads: HashSet) -> Self { - let result: JsValue = Object::new().into(); - for key in &heads { - Reflect::set(&result, &key.to_string().into(), &true.into()).unwrap(); - } - JS(result) - } -} - -impl From>> for JS { - fn from(heads: Option>) -> Self { - if let Some(v) = heads { - let v: Array = v - .iter() - .map(|h| JsValue::from_str(&h.to_string())) - .collect(); - JS(v.into()) - } else { - JS(JsValue::null()) - } - } -} - -impl TryFrom for HashSet { - type Error = JsValue; - - fn try_from(value: JS) -> Result { - let mut result = HashSet::new(); - for key in Reflect::own_keys(&value.0)?.iter() { - if let Some(true) = Reflect::get(&value.0, &key)?.as_bool() { - result.insert(key.into_serde().map_err(to_js_err)?); - } - } - Ok(result) - } -} - -impl TryFrom for Vec { - type Error = JsValue; - - fn try_from(value: JS) -> Result { - let value = value.0.dyn_into::()?; - let value: Result, _> = value.iter().map(|j| j.into_serde()).collect(); - let value = value.map_err(to_js_err)?; - Ok(value) - } -} - -impl From for Option> { - fn from(value: JS) -> Self { - let value = value.0.dyn_into::().ok()?; - let value: Result, _> = value.iter().map(|j| j.into_serde()).collect(); - let value = value.ok()?; - Some(value) - } -} - -impl TryFrom for Vec { - type Error = JsValue; - - fn try_from(value: JS) -> Result { - let value = value.0.dyn_into::()?; - let changes: Result, _> = value.iter().map(|j| j.dyn_into()).collect(); - let changes = changes?; - let changes: Result, _> = changes - .iter() - .map(|a| am::decode_change(a.to_vec())) - .collect(); - let changes = changes.map_err(to_js_err)?; - Ok(changes) - } -} - -impl TryFrom for am::SyncState { - type Error = JsValue; - - fn try_from(value: JS) -> Result { - let value = value.0; - let shared_heads = get(&value, "sharedHeads")?.try_into()?; - let last_sent_heads = get(&value, "lastSentHeads")?.try_into()?; - let their_heads = get(&value, "theirHeads")?.into(); - let their_need = get(&value, "theirNeed")?.into(); - let their_have = get(&value, "theirHave")?.try_into()?; - let sent_hashes = get(&value, "sentHashes")?.try_into()?; - Ok(am::SyncState { - shared_heads, - last_sent_heads, - their_heads, - their_need, - their_have, - sent_hashes, - }) - } -} - -impl TryFrom for Option> { - type Error = JsValue; - - fn try_from(value: JS) -> Result { - if value.0.is_null() { - Ok(None) - } else { - Ok(Some(value.try_into()?)) - } - } -} - -impl TryFrom for Vec { - type Error = JsValue; - - fn try_from(value: JS) -> Result { - let value = value.0.dyn_into::()?; - let have: Result, JsValue> = value - .iter() - .map(|s| { - let last_sync = get(&s, "lastSync")?.try_into()?; - let bloom = get(&s, "bloom")?.try_into()?; - Ok(am::SyncHave { last_sync, bloom }) - }) - .collect(); - let have = have?; - Ok(have) - } -} - -impl TryFrom for am::BloomFilter { - type Error = JsValue; - - fn try_from(value: JS) -> Result { - let value: Uint8Array = value.0.dyn_into()?; - let value = value.to_vec(); - let value = value.as_slice().try_into().map_err(to_js_err)?; - Ok(value) - } -} - -impl From<&[ChangeHash]> for AR { - fn from(value: &[ChangeHash]) -> Self { - AR(value - .iter() - .map(|h| JsValue::from_str(&hex::encode(&h.0))) - .collect()) - } -} - -impl From<&[Change]> for AR { - fn from(value: &[Change]) -> Self { - let changes: Array = value - .iter() - .map(|c| Uint8Array::from(c.raw_bytes())) - .collect(); - AR(changes) - } -} - -impl From<&[am::SyncHave]> for AR { - fn from(value: &[am::SyncHave]) -> Self { - AR(value - .iter() - .map(|have| { - let last_sync: Array = have - .last_sync - .iter() - .map(|h| JsValue::from_str(&hex::encode(&h.0))) - .collect(); - // FIXME - the clone and the unwrap here shouldnt be needed - look at into_bytes() - let bloom = Uint8Array::from(have.bloom.clone().into_bytes().unwrap().as_slice()); - let obj: JsValue = Object::new().into(); - // we can unwrap here b/c we created the object and know its not frozen - Reflect::set(&obj, &"lastSync".into(), &last_sync.into()).unwrap(); - Reflect::set(&obj, &"bloom".into(), &bloom.into()).unwrap(); - obj - }) - .collect()) - } -} - -fn get_heads(heads: JsValue) -> Option> { - JS(heads).into() -} - -fn map_to_js(doc: &mut am::Automerge, obj: &ObjId) -> JsValue { - let keys = doc.keys(obj); - let map = Object::new(); - for k in keys { - let val = doc.value(obj, &k); - match val { - Ok(Some((Value::Object(o), exid))) - if o == am::ObjType::Map || o == am::ObjType::Table => - { - Reflect::set(&map, &k.into(), &map_to_js(doc, &exid)).unwrap(); - } - Ok(Some((Value::Object(_), exid))) => { - Reflect::set(&map, &k.into(), &list_to_js(doc, &exid)).unwrap(); - } - Ok(Some((Value::Scalar(v), _))) => { - Reflect::set(&map, &k.into(), &ScalarValue(v).into()).unwrap(); - } - _ => (), - }; - } - map.into() -} - -fn list_to_js(doc: &mut am::Automerge, obj: &ObjId) -> JsValue { - let len = doc.length(obj); - let array = Array::new(); - for i in 0..len { - let val = doc.value(obj, i as usize); - match val { - Ok(Some((Value::Object(o), exid))) - if o == am::ObjType::Map || o == am::ObjType::Table => - { - array.push(&map_to_js(doc, &exid)); - } - Ok(Some((Value::Object(_), exid))) => { - array.push(&list_to_js(doc, &exid)); - } - Ok(Some((Value::Scalar(v), _))) => { - array.push(&ScalarValue(v).into()); - } - _ => (), - }; - } - array.into() -} diff --git a/automerge-wasm/src/sync.rs b/automerge-wasm/src/sync.rs new file mode 100644 index 00000000..7c201e61 --- /dev/null +++ b/automerge-wasm/src/sync.rs @@ -0,0 +1,52 @@ +use automerge as am; +use automerge::ChangeHash; +use js_sys::Uint8Array; +use std::collections::{HashMap, HashSet}; +use std::convert::TryInto; +use wasm_bindgen::prelude::*; + +use crate::interop::{to_js_err, AR, JS}; + +#[wasm_bindgen] +#[derive(Debug)] +pub struct SyncState(pub(crate) am::SyncState); + +#[wasm_bindgen] +impl SyncState { + #[wasm_bindgen(getter, js_name = sharedHeads)] + pub fn shared_heads(&self) -> JsValue { + AR::from(self.0.shared_heads.as_slice()).into() + } + + #[wasm_bindgen(getter, js_name = lastSentHeads)] + pub fn last_sent_heads(&self) -> JsValue { + AR::from(self.0.last_sent_heads.as_slice()).into() + } + + #[wasm_bindgen(setter, js_name = lastSentHeads)] + pub fn set_last_sent_heads(&mut self, heads: JsValue) -> Result<(), JsValue> { + let heads: Vec = JS(heads).try_into()?; + self.0.last_sent_heads = heads; + Ok(()) + } + + #[wasm_bindgen(setter, js_name = sentHashes)] + pub fn set_sent_hashes(&mut self, hashes: JsValue) -> Result<(), JsValue> { + let hashes_map: HashMap = hashes.into_serde().map_err(to_js_err)?; + let hashes_set: HashSet = hashes_map.keys().cloned().collect(); + self.0.sent_hashes = hashes_set; + Ok(()) + } + + #[allow(clippy::should_implement_trait)] + pub fn clone(&self) -> Self { + SyncState(self.0.clone()) + } + + pub(crate) fn decode(data: Uint8Array) -> Result { + let data = data.to_vec(); + let s = am::SyncState::decode(&data); + let s = s.map_err(to_js_err)?; + Ok(SyncState(s)) + } +} diff --git a/automerge-wasm/src/value.rs b/automerge-wasm/src/value.rs new file mode 100644 index 00000000..ab49d980 --- /dev/null +++ b/automerge-wasm/src/value.rs @@ -0,0 +1,36 @@ +use automerge as am; +use js_sys::Uint8Array; +use wasm_bindgen::prelude::*; + +#[derive(Debug)] +pub struct ScalarValue(pub(crate) am::ScalarValue); + +impl From for JsValue { + fn from(val: ScalarValue) -> Self { + match &val.0 { + am::ScalarValue::Bytes(v) => Uint8Array::from(v.as_slice()).into(), + am::ScalarValue::Str(v) => v.to_string().into(), + am::ScalarValue::Int(v) => (*v as f64).into(), + am::ScalarValue::Uint(v) => (*v as f64).into(), + am::ScalarValue::F64(v) => (*v).into(), + am::ScalarValue::Counter(v) => (*v as f64).into(), + am::ScalarValue::Timestamp(v) => js_sys::Date::new(&(*v as f64).into()).into(), + am::ScalarValue::Boolean(v) => (*v).into(), + am::ScalarValue::Null => JsValue::null(), + } + } +} + +pub(crate) fn datatype(s: &am::ScalarValue) -> String { + match s { + am::ScalarValue::Bytes(_) => "bytes".into(), + am::ScalarValue::Str(_) => "str".into(), + am::ScalarValue::Int(_) => "int".into(), + am::ScalarValue::Uint(_) => "uint".into(), + am::ScalarValue::F64(_) => "f64".into(), + am::ScalarValue::Counter(_) => "counter".into(), + am::ScalarValue::Timestamp(_) => "timestamp".into(), + am::ScalarValue::Boolean(_) => "boolean".into(), + am::ScalarValue::Null => "null".into(), + } +} From 642a7ce3168d79ffb0bf44ac6b3d8701f9ff0c08 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Tue, 11 Jan 2022 17:54:23 -0500 Subject: [PATCH 023/730] update todo list --- TODO.md | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/TODO.md b/TODO.md index e4c70665..5840525b 100644 --- a/TODO.md +++ b/TODO.md @@ -9,9 +9,23 @@ 1. single pass (fast) load 2. micro-patches / bare bones observation API / fully hydrated documents +### future: + 1. handle columns with unknown data in and out + 2. branches with different indexes + +### Peritext + 1. add mark / remove mark -- type, start/end elemid (inclusive,exclusive) + 2. track any formatting ops that start or end on a character + 3. ops right before the character, ops right after that character + 4. query a single charaacter - character, plus marks that start or end on that character + what is its current formatting, + what are the ops that include that in their span, + None = same as last time, Set( bold, italic ), + keep these on index + 5. op probably belongs with the start character - possible packed at the beginer or end of the list + ### maybe: 1. tables ### no: 1. cursors - From e59d24f68bf42ed51af8360d4906f9a0e8d1fe9a Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Thu, 13 Jan 2022 10:41:15 -0500 Subject: [PATCH 024/730] return values are sorted - add counter del test --- automerge/src/automerge.rs | 43 +++++++++++---------- automerge/src/exid.rs | 19 ++++++++++ automerge/tests/test.rs | 78 +++++++++++++++++++++++++++++++++++++- 3 files changed, 119 insertions(+), 21 deletions(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index bf89f46f..e7815397 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -466,9 +466,9 @@ impl Automerge { Prop::Map(p) => { let prop = self.ops.m.props.lookup(&p); if let Some(p) = prop { - self.ops - .search(obj, query::Prop::new(obj, p)) - .ops + let mut result = self.ops.search(obj, query::Prop::new(obj, p)); + result.ops.sort_by(|a,b| self.ops.m.lamport_cmp(a.id,b.id)); + result.ops .into_iter() .map(|o| (o.value(), self.id_to_exid(o.id))) .collect() @@ -476,13 +476,14 @@ impl Automerge { vec![] } } - Prop::Seq(n) => self - .ops - .search(obj, query::Nth::new(n)) - .ops - .into_iter() - .map(|o| (o.value(), self.id_to_exid(o.id))) - .collect(), + Prop::Seq(n) => { + let mut result = self.ops.search(obj, query::Nth::new(n)); + result.ops.sort_by(|a,b| self.ops.m.lamport_cmp(a.id,b.id)); + result.ops + .into_iter() + .map(|o| (o.value(), self.id_to_exid(o.id))) + .collect() + } }; Ok(result) } @@ -500,9 +501,10 @@ impl Automerge { Prop::Map(p) => { let prop = self.ops.m.props.lookup(&p); if let Some(p) = prop { - self.ops - .search(obj, query::PropAt::new(p, clock)) - .ops + let mut result = self.ops + .search(obj, query::PropAt::new(p, clock)); + result.ops.sort_by(|a,b| self.ops.m.lamport_cmp(a.id,b.id)); + result.ops .into_iter() .map(|o| (o.value(), self.id_to_exid(o.id))) .collect() @@ -510,13 +512,14 @@ impl Automerge { vec![] } } - Prop::Seq(n) => self - .ops - .search(obj, query::NthAt::new(n, clock)) - .ops - .into_iter() - .map(|o| (o.value(), self.id_to_exid(o.id))) - .collect(), + Prop::Seq(n) => { + let mut result = self.ops.search(obj, query::NthAt::new(n, clock)); + result.ops.sort_by(|a,b| self.ops.m.lamport_cmp(a.id,b.id)); + result.ops + .into_iter() + .map(|o| (o.value(), self.id_to_exid(o.id))) + .collect() + } }; Ok(result) } diff --git a/automerge/src/exid.rs b/automerge/src/exid.rs index bf7e9ed5..a12ed1cd 100644 --- a/automerge/src/exid.rs +++ b/automerge/src/exid.rs @@ -1,5 +1,6 @@ use crate::ActorId; use std::fmt; +use std::cmp::{ Ord, Ordering }; use std::hash::{Hash, Hasher}; #[derive(Debug, Clone)] @@ -44,3 +45,21 @@ impl Hash for ExId { } } } + +impl Ord for ExId { + fn cmp(&self, other: &Self) -> Ordering { + match (self, other) { + (ExId::Root, ExId::Root) => Ordering::Equal, + (ExId::Root, _) => Ordering::Less, + (_, ExId::Root) => Ordering::Greater, + (ExId::Id(c1, a1, _), ExId::Id(c2, a2, _)) if c1 == c2 => a2.cmp(&a1), + (ExId::Id(c1, _, _), ExId::Id(c2, _, _)) => c1.cmp(&c2), + } + } +} + +impl PartialOrd for ExId { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} diff --git a/automerge/tests/test.rs b/automerge/tests/test.rs index 2253f22b..51228781 100644 --- a/automerge/tests/test.rs +++ b/automerge/tests/test.rs @@ -1,4 +1,4 @@ -use automerge::Automerge; +use automerge::{ Automerge, ActorId, ROOT, Value }; mod helpers; #[allow(unused_imports)] @@ -884,3 +884,79 @@ fn save_restore_complex() { } ); } + +#[test] +fn list_counter_del() -> Result<(), automerge::AutomergeError> { + let mut v = vec![ActorId::random(), ActorId::random(), ActorId::random()]; + v.sort(); + println!("{:?}", v); + let actor1 = v[2].clone(); + let actor2 = v[1].clone(); + let actor3 = v[0].clone(); + + let mut doc1 = new_doc_with_actor(actor1); + + let list = doc1.set(&ROOT, "list", Value::list())?.unwrap(); + doc1.insert(&list, 0, "a")?; + doc1.insert(&list, 1, "b")?; + doc1.insert(&list, 2, "c")?; + + let mut doc2 = Automerge::load(&doc1.save()?)?; + doc2.set_actor(actor2); + + let mut doc3 = Automerge::load(&doc1.save()?)?; + doc3.set_actor(actor3); + + doc1.set(&list, 1, Value::counter(0))?; + doc2.set(&list, 1, Value::counter(10))?; + doc3.set(&list, 1, Value::counter(100))?; + + doc1.set(&list, 2, Value::counter(0))?; + doc2.set(&list, 2, Value::counter(10))?; + doc3.set(&list, 2, Value::int(100))?; + + doc1.inc(&list, 1, 1)?; + doc1.inc(&list, 2, 1)?; + + doc1.merge(&mut doc2); + doc1.merge(&mut doc3); + + let values = doc1.values(&list, 1)?; + assert_eq!(values.len(), 3); + assert_eq!(&values[0].0, &Value::counter(1)); + assert_eq!(&values[1].0, &Value::counter(10)); + assert_eq!(&values[2].0, &Value::counter(100)); + + let values = doc1.values(&list, 2)?; + assert_eq!(values.len(), 3); + assert_eq!(&values[0].0, &Value::counter(1)); + assert_eq!(&values[1].0, &Value::counter(10)); + assert_eq!(&values[2].0, &Value::int(100)); + + doc1.inc(&list, 1, 1)?; + doc1.inc(&list, 2, 1)?; + + let values = doc1.values(&list, 1)?; + assert_eq!(values.len(), 3); + assert_eq!(&values[0].0, &Value::counter(2)); + assert_eq!(&values[1].0, &Value::counter(11)); + assert_eq!(&values[2].0, &Value::counter(101)); + + let values = doc1.values(&list, 2)?; + assert_eq!(values.len(), 2); + assert_eq!(&values[0].0, &Value::counter(2)); + assert_eq!(&values[1].0, &Value::counter(11)); + + assert_eq!(doc1.length(&list), 3); + + println!("-------------"); + doc1.del(&list,2)?; + + //assert_eq!(doc1.length(&list), 2); + + //let doc2 = Automerge::load(&doc1.save()?); + + //assert_eq!(doc1.length(&list), 2); + + Ok(()) +} From d50062b769128c7e83b3749005e29bc58c4270ed Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Thu, 13 Jan 2022 12:11:36 -0500 Subject: [PATCH 025/730] move values into the counter type - remove need for vis_window --- automerge-js/test/legacy_tests.js | 4 +- automerge-wasm/src/lib.rs | 2 +- automerge-wasm/src/value.rs | 4 +- automerge-wasm/test/test.js | 3 -- automerge/src/automerge.rs | 51 ++++++++---------- automerge/src/columnar.rs | 6 +-- automerge/src/exid.rs | 6 +-- automerge/src/legacy/serde_impls/op.rs | 4 +- .../src/legacy/utility_impls/scalar_value.rs | 2 +- automerge/src/query.rs | 52 +++---------------- automerge/src/query/insert.rs | 6 +-- automerge/src/query/keys.rs | 11 ++-- automerge/src/query/list_vals_at.rs | 15 ++++-- automerge/src/query/nth.rs | 12 ++--- automerge/src/query/nth_at.rs | 15 +++++- automerge/src/query/prop_at.rs | 15 +++++- automerge/src/types.rs | 46 ++++++++++++++++ automerge/src/value.rs | 38 +++++++++++--- automerge/src/visualisation.rs | 2 +- automerge/tests/helpers/mod.rs | 6 +-- automerge/tests/test.rs | 19 ++++--- 21 files changed, 185 insertions(+), 134 deletions(-) diff --git a/automerge-js/test/legacy_tests.js b/automerge-js/test/legacy_tests.js index 49cbb079..75232cd2 100644 --- a/automerge-js/test/legacy_tests.js +++ b/automerge-js/test/legacy_tests.js @@ -775,12 +775,14 @@ describe('Automerge', () => { }) // counter - it.skip('should not allow deleting counters from lists', () => { + /* + it('should not allow deleting counters from lists', () => { const s1 = Automerge.change(Automerge.init(), doc => doc.recordings = [new Automerge.Counter(1)]) const s2 = Automerge.change(s1, doc => doc.recordings[0].increment(2)) assert.deepStrictEqual(s2, {recordings: [new Automerge.Counter(3)]}) assert.throws(() => { Automerge.change(s2, doc => doc.recordings.deleteAt(0)) }, /Unsupported operation/) }) + */ }) }) diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 9b080b89..8660afe2 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -448,7 +448,7 @@ impl Automerge { Some("counter") => value .as_f64() .ok_or_else(|| "value must be a number".into()) - .map(|v| am::ScalarValue::Counter(v as i64).into()), + .map(|v| am::ScalarValue::counter(v as i64).into()), Some("timestamp") => value .as_f64() .ok_or_else(|| "value must be a number".into()) diff --git a/automerge-wasm/src/value.rs b/automerge-wasm/src/value.rs index ab49d980..1799fde8 100644 --- a/automerge-wasm/src/value.rs +++ b/automerge-wasm/src/value.rs @@ -13,7 +13,7 @@ impl From for JsValue { am::ScalarValue::Int(v) => (*v as f64).into(), am::ScalarValue::Uint(v) => (*v as f64).into(), am::ScalarValue::F64(v) => (*v).into(), - am::ScalarValue::Counter(v) => (*v as f64).into(), + am::ScalarValue::Counter(_, v, _) => (*v as f64).into(), am::ScalarValue::Timestamp(v) => js_sys::Date::new(&(*v as f64).into()).into(), am::ScalarValue::Boolean(v) => (*v).into(), am::ScalarValue::Null => JsValue::null(), @@ -28,7 +28,7 @@ pub(crate) fn datatype(s: &am::ScalarValue) -> String { am::ScalarValue::Int(_) => "int".into(), am::ScalarValue::Uint(_) => "uint".into(), am::ScalarValue::F64(_) => "f64".into(), - am::ScalarValue::Counter(_) => "counter".into(), + am::ScalarValue::Counter(_, _, _) => "counter".into(), am::ScalarValue::Timestamp(_) => "timestamp".into(), am::ScalarValue::Boolean(_) => "boolean".into(), am::ScalarValue::Null => "null".into(), diff --git a/automerge-wasm/test/test.js b/automerge-wasm/test/test.js index c9020ca7..bf476243 100644 --- a/automerge-wasm/test/test.js +++ b/automerge-wasm/test/test.js @@ -1184,13 +1184,10 @@ describe('Automerge', () => { n2.applyChanges(n1.getChanges([])) n1.set("_root","x",`3 @ n1`); n1.commit("",0) n2.set("_root","x",`3 @ n2`); n2.commit("",0) -//n1 = Automerge.change(n1, {time: 0}, doc => doc.x = `3 @ n1`) -//n2 = Automerge.change(n2, {time: 0}, doc => doc.x = `3 @ n2`) for (let i = 0; i < 3; i++) { n3.set("_root","x",`${i} @ n3`); n3.commit("",0) } -//n3 = Automerge.change(n3, {time: 0}, doc => doc.x = `${i} @ n3`) const n1c3 = n1.getHeads()[0], n2c3 = n2.getHeads()[0], n3c3 = n3.getHeads()[0] s13 = decodeSyncState(encodeSyncState(s13)) s31 = decodeSyncState(encodeSyncState(s31)) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index e7815397..d06aa59a 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -157,8 +157,7 @@ impl Automerge { for pred_id in &op.pred { // FIXME - use query to make this fast if let Some(p) = self.ops.iter().position(|o| o.id == *pred_id) { - self.ops - .replace(op.obj, p, |o| o.succ.retain(|i| i != pred_id)); + self.ops.replace(op.obj, p, |o| o.remove_succ(op)); } } if let Some(pos) = self.ops.iter().position(|o| o.id == op.id) { @@ -179,7 +178,7 @@ impl Automerge { fn insert_local_op(&mut self, op: Op, pos: usize, succ_pos: &[usize]) { for succ in succ_pos { self.ops.replace(op.obj, *succ, |old_op| { - old_op.succ.push(op.id); + old_op.add_succ(&op); }); } @@ -194,8 +193,7 @@ impl Automerge { let q = self.ops.search(op.obj, query::SeekOp::new(&op)); for i in q.succ { - self.ops - .replace(op.obj, i, |old_op| old_op.succ.push(op.id)); + self.ops.replace(op.obj, i, |old_op| old_op.add_succ(&op)); } if !op.is_del() { @@ -466,9 +464,9 @@ impl Automerge { Prop::Map(p) => { let prop = self.ops.m.props.lookup(&p); if let Some(p) = prop { - let mut result = self.ops.search(obj, query::Prop::new(obj, p)); - result.ops.sort_by(|a,b| self.ops.m.lamport_cmp(a.id,b.id)); - result.ops + self.ops + .search(obj, query::Prop::new(obj, p)) + .ops .into_iter() .map(|o| (o.value(), self.id_to_exid(o.id))) .collect() @@ -476,14 +474,13 @@ impl Automerge { vec![] } } - Prop::Seq(n) => { - let mut result = self.ops.search(obj, query::Nth::new(n)); - result.ops.sort_by(|a,b| self.ops.m.lamport_cmp(a.id,b.id)); - result.ops - .into_iter() - .map(|o| (o.value(), self.id_to_exid(o.id))) - .collect() - } + Prop::Seq(n) => self + .ops + .search(obj, query::Nth::new(n)) + .ops + .into_iter() + .map(|o| (o.value(), self.id_to_exid(o.id))) + .collect(), }; Ok(result) } @@ -501,10 +498,9 @@ impl Automerge { Prop::Map(p) => { let prop = self.ops.m.props.lookup(&p); if let Some(p) = prop { - let mut result = self.ops - .search(obj, query::PropAt::new(p, clock)); - result.ops.sort_by(|a,b| self.ops.m.lamport_cmp(a.id,b.id)); - result.ops + self.ops + .search(obj, query::PropAt::new(p, clock)) + .ops .into_iter() .map(|o| (o.value(), self.id_to_exid(o.id))) .collect() @@ -512,14 +508,13 @@ impl Automerge { vec![] } } - Prop::Seq(n) => { - let mut result = self.ops.search(obj, query::NthAt::new(n, clock)); - result.ops.sort_by(|a,b| self.ops.m.lamport_cmp(a.id,b.id)); - result.ops - .into_iter() - .map(|o| (o.value(), self.id_to_exid(o.id))) - .collect() - } + Prop::Seq(n) => self + .ops + .search(obj, query::NthAt::new(n, clock)) + .ops + .into_iter() + .map(|o| (o.value(), self.id_to_exid(o.id))) + .collect(), }; Ok(result) } diff --git a/automerge/src/columnar.rs b/automerge/src/columnar.rs index c9945945..137cd458 100644 --- a/automerge/src/columnar.rs +++ b/automerge/src/columnar.rs @@ -397,7 +397,7 @@ impl<'a> Iterator for ValueIterator<'a> { if len != self.val_raw.last_read { return None; } - Some(ScalarValue::Counter(val)) + Some(ScalarValue::counter(val)) } v if v % 16 == VALUE_TYPE_TIMESTAMP => { let len = v >> 4; @@ -569,7 +569,7 @@ impl ValEncoder { self.raw.extend(bytes); self.len.append_value(len << 4 | VALUE_TYPE_UTF8); } - ScalarValue::Counter(count) => { + ScalarValue::Counter(count, _, _) => { let len = count.encode(&mut self.raw).unwrap(); self.len.append_value(len << 4 | VALUE_TYPE_COUNTER); } @@ -613,7 +613,7 @@ impl ValEncoder { self.raw.extend(bytes); self.len.append_value(len << 4 | VALUE_TYPE_UTF8); } - ScalarValue::Counter(count) => { + ScalarValue::Counter(count, _, _) => { let len = count.encode(&mut self.raw).unwrap(); self.len.append_value(len << 4 | VALUE_TYPE_COUNTER); } diff --git a/automerge/src/exid.rs b/automerge/src/exid.rs index a12ed1cd..7d00e953 100644 --- a/automerge/src/exid.rs +++ b/automerge/src/exid.rs @@ -1,6 +1,6 @@ use crate::ActorId; +use std::cmp::{Ord, Ordering}; use std::fmt; -use std::cmp::{ Ord, Ordering }; use std::hash::{Hash, Hasher}; #[derive(Debug, Clone)] @@ -52,8 +52,8 @@ impl Ord for ExId { (ExId::Root, ExId::Root) => Ordering::Equal, (ExId::Root, _) => Ordering::Less, (_, ExId::Root) => Ordering::Greater, - (ExId::Id(c1, a1, _), ExId::Id(c2, a2, _)) if c1 == c2 => a2.cmp(&a1), - (ExId::Id(c1, _, _), ExId::Id(c2, _, _)) => c1.cmp(&c2), + (ExId::Id(c1, a1, _), ExId::Id(c2, a2, _)) if c1 == c2 => a2.cmp(a1), + (ExId::Id(c1, _, _), ExId::Id(c2, _, _)) => c1.cmp(c2), } } } diff --git a/automerge/src/legacy/serde_impls/op.rs b/automerge/src/legacy/serde_impls/op.rs index 9b549ad8..fddfd82d 100644 --- a/automerge/src/legacy/serde_impls/op.rs +++ b/automerge/src/legacy/serde_impls/op.rs @@ -204,7 +204,7 @@ impl<'de> Deserialize<'de> for Op { Some(ScalarValue::Int(n)) => Ok(OpType::Inc(n)), Some(ScalarValue::Uint(n)) => Ok(OpType::Inc(n as i64)), Some(ScalarValue::F64(n)) => Ok(OpType::Inc(n as i64)), - Some(ScalarValue::Counter(n)) => Ok(OpType::Inc(n)), + Some(ScalarValue::Counter(n, _, _)) => Ok(OpType::Inc(n)), Some(ScalarValue::Timestamp(n)) => Ok(OpType::Inc(n)), Some(ScalarValue::Bytes(s)) => { Err(Error::invalid_value(Unexpected::Bytes(&s), &"a number")) @@ -376,7 +376,7 @@ mod tests { "pred": [] }), expected: Ok(Op { - action: OpType::Set(ScalarValue::Counter(123)), + action: OpType::Set(ScalarValue::Counter(123, 123, 0)), obj: ObjectId::Root, key: "somekey".into(), insert: false, diff --git a/automerge/src/legacy/utility_impls/scalar_value.rs b/automerge/src/legacy/utility_impls/scalar_value.rs index cc5bc48e..b509d7fc 100644 --- a/automerge/src/legacy/utility_impls/scalar_value.rs +++ b/automerge/src/legacy/utility_impls/scalar_value.rs @@ -48,7 +48,7 @@ impl fmt::Display for ScalarValue { ScalarValue::Int(i) => write!(f, "{}", i), ScalarValue::Uint(i) => write!(f, "{}", i), ScalarValue::F64(n) => write!(f, "{:.324}", n), - ScalarValue::Counter(c) => write!(f, "Counter: {}", c), + ScalarValue::Counter(_, c, _) => write!(f, "Counter: {}", c), ScalarValue::Timestamp(i) => write!(f, "Timestamp: {}", i), ScalarValue::Boolean(b) => write!(f, "{}", b), ScalarValue::Null => write!(f, "null"), diff --git a/automerge/src/query.rs b/automerge/src/query.rs index c062c964..3b550403 100644 --- a/automerge/src/query.rs +++ b/automerge/src/query.rs @@ -102,7 +102,7 @@ impl Index { assert!(new.key == old.key); - match (new.succ.is_empty(), old.succ.is_empty(), new.elemid()) { + match (new.visible(), old.visible(), new.elemid()) { (false, true, Some(elem)) => match self.visible.get(&elem).copied() { Some(n) if n == 1 => { self.len -= 1; @@ -128,7 +128,7 @@ impl Index { pub fn insert(&mut self, op: &Op) { self.ops.insert(op.id); - if op.succ.is_empty() { + if op.visible() { if let Some(elem) = op.elemid() { match self.visible.get(&elem).copied() { Some(n) => { @@ -145,7 +145,7 @@ impl Index { pub fn remove(&mut self, op: &Op) { self.ops.remove(&op.id); - if op.succ.is_empty() { + if op.visible() { if let Some(elem) = op.elemid() { match self.visible.get(&elem).copied() { Some(n) if n == 1 => { @@ -191,44 +191,6 @@ pub(crate) struct VisWindow { } impl VisWindow { - fn visible(&mut self, op: &Op, pos: usize) -> bool { - let mut visible = false; - match op.action { - OpType::Set(ScalarValue::Counter(val)) => { - self.counters.insert( - op.id, - CounterData { - pos, - val, - succ: op.succ.iter().cloned().collect(), - op: op.clone(), - }, - ); - if op.succ.is_empty() { - visible = true; - } - } - OpType::Inc(inc_val) => { - for id in &op.pred { - if let Some(mut entry) = self.counters.get_mut(id) { - entry.succ.remove(&op.id); - entry.val += inc_val; - entry.op.action = OpType::Set(ScalarValue::Counter(entry.val)); - if entry.succ.is_empty() { - visible = true; - } - } - } - } - _ => { - if op.succ.is_empty() { - visible = true; - } - } - }; - visible - } - fn visible_at(&mut self, op: &Op, pos: usize, clock: &Clock) -> bool { if !clock.covers(&op.id) { return false; @@ -236,7 +198,7 @@ impl VisWindow { let mut visible = false; match op.action { - OpType::Set(ScalarValue::Counter(val)) => { + OpType::Set(ScalarValue::Counter(val, _, _)) => { self.counters.insert( op.id, CounterData { @@ -256,7 +218,7 @@ impl VisWindow { if let Some(mut entry) = self.counters.get_mut(id) { entry.succ.remove(&op.id); entry.val += inc_val; - entry.op.action = OpType::Set(ScalarValue::Counter(entry.val)); + entry.op.action = OpType::Set(ScalarValue::counter(entry.val)); if !entry.succ.iter().any(|i| clock.covers(i)) { visible = true; } @@ -290,7 +252,7 @@ impl VisWindow { pub(crate) fn is_visible(op: &Op, pos: usize, counters: &mut HashMap) -> bool { let mut visible = false; match op.action { - OpType::Set(ScalarValue::Counter(val)) => { + OpType::Set(ScalarValue::Counter(val, _, _)) => { counters.insert( op.id, CounterData { @@ -309,7 +271,7 @@ pub(crate) fn is_visible(op: &Op, pos: usize, counters: &mut HashMap { pub pos: usize, last_seen: Option, last_insert: Option, - window: VisWindow, } impl InsertNth { @@ -22,7 +21,6 @@ impl InsertNth { pos: 0, last_seen: None, last_insert: None, - window: Default::default(), } } @@ -71,7 +69,7 @@ impl TreeQuery for InsertNth { self.last_seen = None; self.last_insert = element.elemid(); } - if self.last_seen.is_none() && self.window.visible(element, self.pos) { + if self.last_seen.is_none() && element.visible() { self.seen += 1; self.last_seen = element.elemid() } diff --git a/automerge/src/query/keys.rs b/automerge/src/query/keys.rs index e6f6486f..57b90526 100644 --- a/automerge/src/query/keys.rs +++ b/automerge/src/query/keys.rs @@ -1,20 +1,16 @@ use crate::op_tree::OpTreeNode; -use crate::query::{QueryResult, TreeQuery, VisWindow}; +use crate::query::{QueryResult, TreeQuery}; use crate::types::Key; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] pub(crate) struct Keys { pub keys: Vec, - window: VisWindow, } impl Keys { pub fn new() -> Self { - Keys { - keys: vec![], - window: Default::default(), - } + Keys { keys: vec![] } } } @@ -23,8 +19,7 @@ impl TreeQuery for Keys { let mut last = None; for i in 0..child.len() { let op = child.get(i).unwrap(); - let visible = self.window.visible(op, i); - if Some(op.key) != last && visible { + if Some(op.key) != last && op.visible() { self.keys.push(op.key); last = Some(op.key); } diff --git a/automerge/src/query/list_vals_at.rs b/automerge/src/query/list_vals_at.rs index 5d720bf6..c447f314 100644 --- a/automerge/src/query/list_vals_at.rs +++ b/automerge/src/query/list_vals_at.rs @@ -1,4 +1,4 @@ -use crate::query::{QueryResult, TreeQuery, VisWindow}; +use crate::query::{OpSetMetadata, QueryResult, TreeQuery, VisWindow}; use crate::types::{Clock, ElemId, Op}; use std::fmt::Debug; @@ -24,14 +24,23 @@ impl ListValsAt { } impl TreeQuery for ListValsAt { - fn query_element(&mut self, op: &Op) -> QueryResult { + fn query_element_with_metadata(&mut self, op: &Op, m: &OpSetMetadata) -> QueryResult { if op.insert { self.last_elem = None; } if self.last_elem.is_none() && self.window.visible_at(op, self.pos, &self.clock) { for (_, vop) in self.window.seen_op(op, self.pos) { self.last_elem = vop.elemid(); - self.ops.push(vop); + if vop.is_counter() { + // this could be out of order - because of inc's - insert in the right place + let pos = self + .ops + .binary_search_by(|probe| m.lamport_cmp(probe.id, op.id)) + .unwrap_err(); + self.ops.insert(pos, vop); + } else { + self.ops.push(vop); + } } } self.pos += 1; diff --git a/automerge/src/query/nth.rs b/automerge/src/query/nth.rs index 6000b71a..8d692a76 100644 --- a/automerge/src/query/nth.rs +++ b/automerge/src/query/nth.rs @@ -1,6 +1,6 @@ use crate::error::AutomergeError; use crate::op_tree::OpTreeNode; -use crate::query::{QueryResult, TreeQuery, VisWindow}; +use crate::query::{QueryResult, TreeQuery}; use crate::types::{ElemId, Key, Op}; use std::fmt::Debug; @@ -10,7 +10,6 @@ pub(crate) struct Nth { seen: usize, last_seen: Option, last_elem: Option, - window: VisWindow, pub ops: Vec, pub ops_pos: Vec, pub pos: usize, @@ -26,7 +25,6 @@ impl Nth { ops_pos: vec![], pos: 0, last_elem: None, - window: Default::default(), } } @@ -71,16 +69,14 @@ impl TreeQuery for Nth { self.last_elem = element.elemid(); self.last_seen = None } - let visible = self.window.visible(element, self.pos); + let visible = element.visible(); if visible && self.last_seen.is_none() { self.seen += 1; self.last_seen = element.elemid() } if self.seen == self.target + 1 && visible { - for (vpos, vop) in self.window.seen_op(element, self.pos) { - self.ops.push(vop); - self.ops_pos.push(vpos); - } + self.ops.push(element.clone()); + self.ops_pos.push(self.pos); } self.pos += 1; QueryResult::Next diff --git a/automerge/src/query/nth_at.rs b/automerge/src/query/nth_at.rs index 7a867cad..3d90e590 100644 --- a/automerge/src/query/nth_at.rs +++ b/automerge/src/query/nth_at.rs @@ -47,8 +47,19 @@ impl TreeQuery for NthAt { } if self.seen == self.target + 1 && visible { for (vpos, vop) in self.window.seen_op(element, self.pos) { - self.ops.push(vop); - self.ops_pos.push(vpos); + if vop.is_counter() { + // this could be out of order because of inc's - we can find the right place + // since pos will always be in order + let pos = self + .ops_pos + .binary_search_by(|probe| probe.cmp(&vpos)) + .unwrap_err(); + self.ops.insert(pos, vop); + self.ops_pos.insert(pos, vpos); + } else { + self.ops.push(vop); + self.ops_pos.push(vpos); + } } } self.pos += 1; diff --git a/automerge/src/query/prop_at.rs b/automerge/src/query/prop_at.rs index a5c02e34..11cbf752 100644 --- a/automerge/src/query/prop_at.rs +++ b/automerge/src/query/prop_at.rs @@ -40,8 +40,19 @@ impl TreeQuery for PropAt { } if window.visible_at(op, pos, &self.clock) { for (vpos, vop) in window.seen_op(op, pos) { - self.ops.push(vop); - self.ops_pos.push(vpos); + if vop.is_counter() { + // this could be out of order because of inc's - we can find the right place + // since pos will always be in order + let pos = self + .ops_pos + .binary_search_by(|probe| probe.cmp(&vpos)) + .unwrap_err(); + self.ops.insert(pos, vop); + self.ops_pos.insert(pos, vpos); + } else { + self.ops.push(vop); + self.ops_pos.push(vpos); + } } } self.pos += 1; diff --git a/automerge/src/types.rs b/automerge/src/types.rs index b2bf4158..b7452ffe 100644 --- a/automerge/src/types.rs +++ b/automerge/src/types.rs @@ -335,10 +335,56 @@ pub(crate) struct Op { } impl Op { + pub(crate) fn add_succ(&mut self, op: &Op) { + self.succ.push(op.id); + if let OpType::Set(ScalarValue::Counter(_orig, total, incs)) = &mut self.action { + if let OpType::Inc(n) = &op.action { + *total += *n; + *incs += 1; + } + } + } + + pub(crate) fn remove_succ(&mut self, op: &Op) { + self.succ.retain(|id| id != &op.id); + if let OpType::Set(ScalarValue::Counter(_orig, total, incs)) = &mut self.action { + if let OpType::Inc(n) = &op.action { + *total -= *n; + *incs -= 1; + } + } + } + + pub fn visible(&self) -> bool { + if self.is_inc() { + false + } else if self.is_counter() { + self.succ.len() <= self.incs() + } else { + self.succ.is_empty() + } + } + + pub fn incs(&self) -> usize { + if let OpType::Set(ScalarValue::Counter(_, _, incs)) = &self.action { + *incs + } else { + 0 + } + } + pub fn is_del(&self) -> bool { matches!(&self.action, OpType::Del) } + pub fn is_inc(&self) -> bool { + matches!(&self.action, OpType::Inc(_)) + } + + pub fn is_counter(&self) -> bool { + matches!(&self.action, OpType::Set(ScalarValue::Counter(_, _, _))) + } + pub fn is_noop(&self, action: &OpType) -> bool { matches!((&self.action, action), (OpType::Set(n), OpType::Set(m)) if n == m) } diff --git a/automerge/src/value.rs b/automerge/src/value.rs index e5af0cb6..326a3694 100644 --- a/automerge/src/value.rs +++ b/automerge/src/value.rs @@ -47,7 +47,7 @@ impl Value { } pub fn counter(n: i64) -> Value { - Value::Scalar(ScalarValue::Counter(n)) + Value::Scalar(ScalarValue::counter(n)) } pub fn timestamp(n: i64) -> Value { @@ -158,7 +158,7 @@ pub(crate) enum DataType { Undefined, } -#[derive(Serialize, PartialEq, Debug, Clone)] +#[derive(Serialize, Debug, Clone)] #[serde(untagged)] pub enum ScalarValue { Bytes(Vec), @@ -166,21 +166,39 @@ pub enum ScalarValue { Int(i64), Uint(u64), F64(f64), - Counter(i64), + Counter(i64, i64, usize), Timestamp(i64), Boolean(bool), Null, } +// we need to define manually now b/c of Counter +impl PartialEq for ScalarValue { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (Self::Str(a), Self::Str(b)) => a == b, + (Self::Int(a), Self::Int(b)) => a == b, + (Self::Uint(a), Self::Uint(b)) => a == b, + (Self::Counter(_, a, _), Self::Counter(_, b, _)) => a == b, + (Self::Timestamp(a), Self::Timestamp(b)) => a == b, + (Self::Boolean(a), Self::Boolean(b)) => a == b, + (Self::Null, Self::Null) => true, + (Self::F64(a), Self::F64(b)) => a.eq(b), + (Self::Bytes(a), Self::Bytes(b)) => a.eq(b), + _ => false, + } + } +} + impl ScalarValue { pub(crate) fn as_datatype( &self, datatype: DataType, ) -> Result { match (datatype, self) { - (DataType::Counter, ScalarValue::Int(i)) => Ok(ScalarValue::Counter(*i)), + (DataType::Counter, ScalarValue::Int(i)) => Ok(ScalarValue::Counter(*i, *i, 0)), (DataType::Counter, ScalarValue::Uint(u)) => match i64::try_from(*u) { - Ok(i) => Ok(ScalarValue::Counter(i)), + Ok(i) => Ok(ScalarValue::Counter(i, i, 0)), Err(_) => Err(error::InvalidScalarValue { raw_value: self.clone(), expected: "an integer".to_string(), @@ -266,7 +284,7 @@ impl ScalarValue { ScalarValue::Int(n) => Some(*n), ScalarValue::Uint(n) => Some(*n as i64), ScalarValue::F64(n) => Some(*n as i64), - ScalarValue::Counter(n) => Some(*n), + ScalarValue::Counter(_, n, _) => Some(*n), ScalarValue::Timestamp(n) => Some(*n), _ => None, } @@ -277,7 +295,7 @@ impl ScalarValue { ScalarValue::Int(n) => Some(*n as u64), ScalarValue::Uint(n) => Some(*n), ScalarValue::F64(n) => Some(*n as u64), - ScalarValue::Counter(n) => Some(*n as u64), + ScalarValue::Counter(_, n, _) => Some(*n as u64), ScalarValue::Timestamp(n) => Some(*n as u64), _ => None, } @@ -288,9 +306,13 @@ impl ScalarValue { ScalarValue::Int(n) => Some(*n as f64), ScalarValue::Uint(n) => Some(*n as f64), ScalarValue::F64(n) => Some(*n), - ScalarValue::Counter(n) => Some(*n as f64), + ScalarValue::Counter(_, n, _) => Some(*n as f64), ScalarValue::Timestamp(n) => Some(*n as f64), _ => None, } } + + pub fn counter(n: i64) -> ScalarValue { + ScalarValue::Counter(n, n, 0) + } } diff --git a/automerge/src/visualisation.rs b/automerge/src/visualisation.rs index 81f52470..75a2c9f6 100644 --- a/automerge/src/visualisation.rs +++ b/automerge/src/visualisation.rs @@ -131,7 +131,7 @@ impl<'a, const B: usize> dot::Labeller<'a, &'a Node<'a, B>, Edge> for GraphVisua } fn node_id(&'a self, n: &&Node<'a, B>) -> dot::Id<'a> { - dot::Id::new(format!("node_{}", n.id.0.to_string())).unwrap() + dot::Id::new(format!("node_{}", n.id.0)).unwrap() } fn node_shape(&'a self, node: &&'a Node<'a, B>) -> Option> { diff --git a/automerge/tests/helpers/mod.rs b/automerge/tests/helpers/mod.rs index ec4beb0f..cae37801 100644 --- a/automerge/tests/helpers/mod.rs +++ b/automerge/tests/helpers/mod.rs @@ -197,7 +197,7 @@ macro_rules! list { } pub fn mk_counter(value: i64) -> automerge::ScalarValue { - automerge::ScalarValue::Counter(value) + automerge::ScalarValue::counter(value) } #[derive(Eq, Hash, PartialEq, Debug)] @@ -242,7 +242,7 @@ impl From for OrdScalarValue { automerge::ScalarValue::Int(v) => OrdScalarValue::Int(v), automerge::ScalarValue::Uint(v) => OrdScalarValue::Uint(v), automerge::ScalarValue::F64(v) => OrdScalarValue::F64(decorum::Total::from(v)), - automerge::ScalarValue::Counter(v) => OrdScalarValue::Counter(v), + automerge::ScalarValue::Counter(v, _, _) => OrdScalarValue::Counter(v), automerge::ScalarValue::Timestamp(v) => OrdScalarValue::Timestamp(v), automerge::ScalarValue::Boolean(v) => OrdScalarValue::Boolean(v), automerge::ScalarValue::Null => OrdScalarValue::Null, @@ -258,7 +258,7 @@ impl From<&OrdScalarValue> for automerge::ScalarValue { OrdScalarValue::Int(v) => automerge::ScalarValue::Int(*v), OrdScalarValue::Uint(v) => automerge::ScalarValue::Uint(*v), OrdScalarValue::F64(v) => automerge::ScalarValue::F64(v.into_inner()), - OrdScalarValue::Counter(v) => automerge::ScalarValue::Counter(*v), + OrdScalarValue::Counter(v) => automerge::ScalarValue::counter(*v), OrdScalarValue::Timestamp(v) => automerge::ScalarValue::Timestamp(*v), OrdScalarValue::Boolean(v) => automerge::ScalarValue::Boolean(*v), OrdScalarValue::Null => automerge::ScalarValue::Null, diff --git a/automerge/tests/test.rs b/automerge/tests/test.rs index 51228781..5b101de8 100644 --- a/automerge/tests/test.rs +++ b/automerge/tests/test.rs @@ -1,4 +1,4 @@ -use automerge::{ Automerge, ActorId, ROOT, Value }; +use automerge::{ActorId, Automerge, Value, ROOT}; mod helpers; #[allow(unused_imports)] @@ -949,14 +949,21 @@ fn list_counter_del() -> Result<(), automerge::AutomergeError> { assert_eq!(doc1.length(&list), 3); - println!("-------------"); - doc1.del(&list,2)?; + doc1.del(&list, 2)?; - //assert_eq!(doc1.length(&list), 2); + assert_eq!(doc1.length(&list), 2); - //let doc2 = Automerge::load(&doc1.save()?); + let doc4 = Automerge::load(&doc1.save()?)?; - //assert_eq!(doc1.length(&list), 2); + assert_eq!(doc4.length(&list), 2); + + doc1.del(&list, 1)?; + + assert_eq!(doc1.length(&list), 1); + + let doc5 = Automerge::load(&doc1.save()?)?; + + assert_eq!(doc5.length(&list), 1); Ok(()) } From b30a2b9cc1a8fc1fd3bf27d27945b0f7cd8dcd94 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Thu, 13 Jan 2022 18:23:39 -0500 Subject: [PATCH 026/730] give Counter its own type --- automerge-wasm/src/value.rs | 4 +- automerge/src/columnar.rs | 8 +- automerge/src/legacy/serde_impls/op.rs | 4 +- .../src/legacy/utility_impls/scalar_value.rs | 4 +- automerge/src/query.rs | 62 +---------- automerge/src/query/list_vals.rs | 11 +- automerge/src/query/prop.rs | 11 +- automerge/src/types.rs | 30 ++++-- automerge/src/value.rs | 101 ++++++++++++++---- automerge/tests/helpers/mod.rs | 2 +- 10 files changed, 120 insertions(+), 117 deletions(-) diff --git a/automerge-wasm/src/value.rs b/automerge-wasm/src/value.rs index 1799fde8..a2388436 100644 --- a/automerge-wasm/src/value.rs +++ b/automerge-wasm/src/value.rs @@ -13,7 +13,7 @@ impl From for JsValue { am::ScalarValue::Int(v) => (*v as f64).into(), am::ScalarValue::Uint(v) => (*v as f64).into(), am::ScalarValue::F64(v) => (*v).into(), - am::ScalarValue::Counter(_, v, _) => (*v as f64).into(), + am::ScalarValue::Counter(v) => (f64::from(v)).into(), am::ScalarValue::Timestamp(v) => js_sys::Date::new(&(*v as f64).into()).into(), am::ScalarValue::Boolean(v) => (*v).into(), am::ScalarValue::Null => JsValue::null(), @@ -28,7 +28,7 @@ pub(crate) fn datatype(s: &am::ScalarValue) -> String { am::ScalarValue::Int(_) => "int".into(), am::ScalarValue::Uint(_) => "uint".into(), am::ScalarValue::F64(_) => "f64".into(), - am::ScalarValue::Counter(_, _, _) => "counter".into(), + am::ScalarValue::Counter(_) => "counter".into(), am::ScalarValue::Timestamp(_) => "timestamp".into(), am::ScalarValue::Boolean(_) => "boolean".into(), am::ScalarValue::Null => "null".into(), diff --git a/automerge/src/columnar.rs b/automerge/src/columnar.rs index 137cd458..ffb38ea8 100644 --- a/automerge/src/columnar.rs +++ b/automerge/src/columnar.rs @@ -569,8 +569,8 @@ impl ValEncoder { self.raw.extend(bytes); self.len.append_value(len << 4 | VALUE_TYPE_UTF8); } - ScalarValue::Counter(count, _, _) => { - let len = count.encode(&mut self.raw).unwrap(); + ScalarValue::Counter(count) => { + let len = count.start.encode(&mut self.raw).unwrap(); self.len.append_value(len << 4 | VALUE_TYPE_COUNTER); } ScalarValue::Timestamp(time) => { @@ -613,8 +613,8 @@ impl ValEncoder { self.raw.extend(bytes); self.len.append_value(len << 4 | VALUE_TYPE_UTF8); } - ScalarValue::Counter(count, _, _) => { - let len = count.encode(&mut self.raw).unwrap(); + ScalarValue::Counter(c) => { + let len = c.start.encode(&mut self.raw).unwrap(); self.len.append_value(len << 4 | VALUE_TYPE_COUNTER); } ScalarValue::Timestamp(time) => { diff --git a/automerge/src/legacy/serde_impls/op.rs b/automerge/src/legacy/serde_impls/op.rs index fddfd82d..5f0db62d 100644 --- a/automerge/src/legacy/serde_impls/op.rs +++ b/automerge/src/legacy/serde_impls/op.rs @@ -204,7 +204,7 @@ impl<'de> Deserialize<'de> for Op { Some(ScalarValue::Int(n)) => Ok(OpType::Inc(n)), Some(ScalarValue::Uint(n)) => Ok(OpType::Inc(n as i64)), Some(ScalarValue::F64(n)) => Ok(OpType::Inc(n as i64)), - Some(ScalarValue::Counter(n, _, _)) => Ok(OpType::Inc(n)), + Some(ScalarValue::Counter(n)) => Ok(OpType::Inc(n.into())), Some(ScalarValue::Timestamp(n)) => Ok(OpType::Inc(n)), Some(ScalarValue::Bytes(s)) => { Err(Error::invalid_value(Unexpected::Bytes(&s), &"a number")) @@ -376,7 +376,7 @@ mod tests { "pred": [] }), expected: Ok(Op { - action: OpType::Set(ScalarValue::Counter(123, 123, 0)), + action: OpType::Set(ScalarValue::Counter(123.into())), obj: ObjectId::Root, key: "somekey".into(), insert: false, diff --git a/automerge/src/legacy/utility_impls/scalar_value.rs b/automerge/src/legacy/utility_impls/scalar_value.rs index b509d7fc..ef0a3305 100644 --- a/automerge/src/legacy/utility_impls/scalar_value.rs +++ b/automerge/src/legacy/utility_impls/scalar_value.rs @@ -2,7 +2,7 @@ use std::fmt; use smol_str::SmolStr; -use crate::legacy::ScalarValue; +use crate::value::ScalarValue; impl From<&str> for ScalarValue { fn from(s: &str) -> Self { @@ -48,7 +48,7 @@ impl fmt::Display for ScalarValue { ScalarValue::Int(i) => write!(f, "{}", i), ScalarValue::Uint(i) => write!(f, "{}", i), ScalarValue::F64(n) => write!(f, "{:.324}", n), - ScalarValue::Counter(_, c, _) => write!(f, "Counter: {}", c), + ScalarValue::Counter(c) => write!(f, "Counter: {}", c), ScalarValue::Timestamp(i) => write!(f, "Timestamp: {}", i), ScalarValue::Boolean(b) => write!(f, "{}", b), ScalarValue::Null => write!(f, "null"), diff --git a/automerge/src/query.rs b/automerge/src/query.rs index 3b550403..7911e1bb 100644 --- a/automerge/src/query.rs +++ b/automerge/src/query.rs @@ -1,5 +1,5 @@ use crate::op_tree::{OpSetMetadata, OpTreeNode}; -use crate::types::{Clock, ElemId, Op, OpId, OpType, ScalarValue}; +use crate::types::{Clock, Counter, ElemId, Op, OpId, OpType, ScalarValue}; use fxhash::FxBuildHasher; use std::cmp::Ordering; use std::collections::{HashMap, HashSet}; @@ -198,12 +198,12 @@ impl VisWindow { let mut visible = false; match op.action { - OpType::Set(ScalarValue::Counter(val, _, _)) => { + OpType::Set(ScalarValue::Counter(Counter { start, .. })) => { self.counters.insert( op.id, CounterData { pos, - val, + val: start, succ: op.succ.iter().cloned().collect(), op: op.clone(), }, @@ -249,62 +249,6 @@ impl VisWindow { } } -pub(crate) fn is_visible(op: &Op, pos: usize, counters: &mut HashMap) -> bool { - let mut visible = false; - match op.action { - OpType::Set(ScalarValue::Counter(val, _, _)) => { - counters.insert( - op.id, - CounterData { - pos, - val, - succ: op.succ.iter().cloned().collect(), - op: op.clone(), - }, - ); - if op.succ.is_empty() { - visible = true; - } - } - OpType::Inc(inc_val) => { - for id in &op.pred { - if let Some(mut entry) = counters.get_mut(id) { - entry.succ.remove(&op.id); - entry.val += inc_val; - entry.op.action = OpType::Set(ScalarValue::counter(entry.val)); - if entry.succ.is_empty() { - visible = true; - } - } - } - } - _ => { - if op.succ.is_empty() { - visible = true; - } - } - }; - visible -} - -pub(crate) fn visible_op( - op: &Op, - pos: usize, - counters: &HashMap, -) -> Vec<(usize, Op)> { - let mut result = vec![]; - for pred in &op.pred { - if let Some(entry) = counters.get(pred) { - result.push((entry.pos, entry.op.clone())); - } - } - if result.is_empty() { - vec![(pos, op.clone())] - } else { - result - } -} - pub(crate) fn binary_search_by(node: &OpTreeNode, f: F) -> usize where F: Fn(&Op) -> Ordering, diff --git a/automerge/src/query/list_vals.rs b/automerge/src/query/list_vals.rs index 0d8958fd..ae5b7315 100644 --- a/automerge/src/query/list_vals.rs +++ b/automerge/src/query/list_vals.rs @@ -1,5 +1,5 @@ use crate::op_tree::{OpSetMetadata, OpTreeNode}; -use crate::query::{binary_search_by, is_visible, visible_op, QueryResult, TreeQuery}; +use crate::query::{binary_search_by, QueryResult, TreeQuery}; use crate::types::{ElemId, ObjId, Op}; use std::fmt::Debug; @@ -27,7 +27,6 @@ impl TreeQuery for ListVals { m: &OpSetMetadata, ) -> QueryResult { let start = binary_search_by(child, |op| m.lamport_cmp(op.obj.0, self.obj.0)); - let mut counters = Default::default(); for pos in start..child.len() { let op = child.get(pos).unwrap(); if op.obj != self.obj { @@ -36,11 +35,9 @@ impl TreeQuery for ListVals { if op.insert { self.last_elem = None; } - if self.last_elem.is_none() && is_visible(op, pos, &mut counters) { - for (_, vop) in visible_op(op, pos, &counters) { - self.last_elem = vop.elemid(); - self.ops.push(vop); - } + if self.last_elem.is_none() && op.visible() { + self.last_elem = op.elemid(); + self.ops.push(op.clone()); } } QueryResult::Finish diff --git a/automerge/src/query/prop.rs b/automerge/src/query/prop.rs index 11d2b0cd..3993c97d 100644 --- a/automerge/src/query/prop.rs +++ b/automerge/src/query/prop.rs @@ -1,5 +1,5 @@ use crate::op_tree::{OpSetMetadata, OpTreeNode}; -use crate::query::{binary_search_by, is_visible, visible_op, QueryResult, TreeQuery}; +use crate::query::{binary_search_by, QueryResult, TreeQuery}; use crate::types::{Key, ObjId, Op}; use std::fmt::Debug; @@ -34,18 +34,15 @@ impl TreeQuery for Prop { m.lamport_cmp(op.obj.0, self.obj.0) .then_with(|| m.key_cmp(&op.key, &self.key)) }); - let mut counters = Default::default(); self.pos = start; for pos in start..child.len() { let op = child.get(pos).unwrap(); if !(op.obj == self.obj && op.key == self.key) { break; } - if is_visible(op, pos, &mut counters) { - for (vpos, vop) in visible_op(op, pos, &counters) { - self.ops.push(vop); - self.ops_pos.push(vpos); - } + if op.visible() { + self.ops.push(op.clone()); + self.ops_pos.push(pos); } self.pos += 1; } diff --git a/automerge/src/types.rs b/automerge/src/types.rs index b7452ffe..d00334f8 100644 --- a/automerge/src/types.rs +++ b/automerge/src/types.rs @@ -9,7 +9,7 @@ use std::str::FromStr; use tinyvec::{ArrayVec, TinyVec}; pub(crate) use crate::clock::Clock; -pub(crate) use crate::value::{ScalarValue, Value}; +pub(crate) use crate::value::{Counter, ScalarValue, Value}; pub(crate) const HEAD: ElemId = ElemId(OpId(0, 0)); pub(crate) const ROOT: OpId = OpId(0, 0); @@ -337,20 +337,30 @@ pub(crate) struct Op { impl Op { pub(crate) fn add_succ(&mut self, op: &Op) { self.succ.push(op.id); - if let OpType::Set(ScalarValue::Counter(_orig, total, incs)) = &mut self.action { + if let OpType::Set(ScalarValue::Counter(Counter { + current, + increments, + .. + })) = &mut self.action + { if let OpType::Inc(n) = &op.action { - *total += *n; - *incs += 1; + *current += *n; + *increments += 1; } } } pub(crate) fn remove_succ(&mut self, op: &Op) { self.succ.retain(|id| id != &op.id); - if let OpType::Set(ScalarValue::Counter(_orig, total, incs)) = &mut self.action { + if let OpType::Set(ScalarValue::Counter(Counter { + current, + increments, + .. + })) = &mut self.action + { if let OpType::Inc(n) = &op.action { - *total -= *n; - *incs -= 1; + *current -= *n; + *increments -= 1; } } } @@ -366,8 +376,8 @@ impl Op { } pub fn incs(&self) -> usize { - if let OpType::Set(ScalarValue::Counter(_, _, incs)) = &self.action { - *incs + if let OpType::Set(ScalarValue::Counter(Counter { increments, .. })) = &self.action { + *increments } else { 0 } @@ -382,7 +392,7 @@ impl Op { } pub fn is_counter(&self) -> bool { - matches!(&self.action, OpType::Set(ScalarValue::Counter(_, _, _))) + matches!(&self.action, OpType::Set(ScalarValue::Counter(_))) } pub fn is_noop(&self, action: &OpType) -> bool { diff --git a/automerge/src/value.rs b/automerge/src/value.rs index 326a3694..5f54d825 100644 --- a/automerge/src/value.rs +++ b/automerge/src/value.rs @@ -1,8 +1,9 @@ use crate::error; use crate::types::{ObjType, Op, OpId, OpType}; -use serde::{Deserialize, Serialize}; +use serde::{Deserialize, Serialize, Serializer}; use smol_str::SmolStr; use std::convert::TryFrom; +use std::fmt; #[derive(Debug, Clone, PartialEq)] pub enum Value { @@ -158,7 +159,73 @@ pub(crate) enum DataType { Undefined, } -#[derive(Serialize, Debug, Clone)] +#[derive(Debug, Clone)] +pub struct Counter { + pub(crate) start: i64, + pub(crate) current: i64, + pub(crate) increments: usize, +} + +impl Serialize for Counter { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + serializer.serialize_i64(self.start) + } +} + +impl fmt::Display for Counter { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", &self.current) + } +} + +impl From for Counter { + fn from(n: i64) -> Self { + Counter { + start: n, + current: n, + increments: 0, + } + } +} + +impl From<&i64> for Counter { + fn from(n: &i64) -> Self { + Counter { + start: *n, + current: *n, + increments: 0, + } + } +} + +impl From<&Counter> for i64 { + fn from(val: &Counter) -> Self { + val.current + } +} + +impl From for i64 { + fn from(val: Counter) -> Self { + val.current + } +} + +impl From<&Counter> for u64 { + fn from(val: &Counter) -> Self { + val.current as u64 + } +} + +impl From<&Counter> for f64 { + fn from(val: &Counter) -> Self { + val.current as f64 + } +} + +#[derive(Serialize, PartialEq, Debug, Clone)] #[serde(untagged)] pub enum ScalarValue { Bytes(Vec), @@ -166,27 +233,15 @@ pub enum ScalarValue { Int(i64), Uint(u64), F64(f64), - Counter(i64, i64, usize), + Counter(Counter), Timestamp(i64), Boolean(bool), Null, } -// we need to define manually now b/c of Counter -impl PartialEq for ScalarValue { +impl PartialEq for Counter { fn eq(&self, other: &Self) -> bool { - match (self, other) { - (Self::Str(a), Self::Str(b)) => a == b, - (Self::Int(a), Self::Int(b)) => a == b, - (Self::Uint(a), Self::Uint(b)) => a == b, - (Self::Counter(_, a, _), Self::Counter(_, b, _)) => a == b, - (Self::Timestamp(a), Self::Timestamp(b)) => a == b, - (Self::Boolean(a), Self::Boolean(b)) => a == b, - (Self::Null, Self::Null) => true, - (Self::F64(a), Self::F64(b)) => a.eq(b), - (Self::Bytes(a), Self::Bytes(b)) => a.eq(b), - _ => false, - } + self.current == other.current } } @@ -196,9 +251,9 @@ impl ScalarValue { datatype: DataType, ) -> Result { match (datatype, self) { - (DataType::Counter, ScalarValue::Int(i)) => Ok(ScalarValue::Counter(*i, *i, 0)), + (DataType::Counter, ScalarValue::Int(i)) => Ok(ScalarValue::Counter(i.into())), (DataType::Counter, ScalarValue::Uint(u)) => match i64::try_from(*u) { - Ok(i) => Ok(ScalarValue::Counter(i, i, 0)), + Ok(i) => Ok(ScalarValue::Counter(i.into())), Err(_) => Err(error::InvalidScalarValue { raw_value: self.clone(), expected: "an integer".to_string(), @@ -284,7 +339,7 @@ impl ScalarValue { ScalarValue::Int(n) => Some(*n), ScalarValue::Uint(n) => Some(*n as i64), ScalarValue::F64(n) => Some(*n as i64), - ScalarValue::Counter(_, n, _) => Some(*n), + ScalarValue::Counter(n) => Some(n.into()), ScalarValue::Timestamp(n) => Some(*n), _ => None, } @@ -295,7 +350,7 @@ impl ScalarValue { ScalarValue::Int(n) => Some(*n as u64), ScalarValue::Uint(n) => Some(*n), ScalarValue::F64(n) => Some(*n as u64), - ScalarValue::Counter(_, n, _) => Some(*n as u64), + ScalarValue::Counter(n) => Some(n.into()), ScalarValue::Timestamp(n) => Some(*n as u64), _ => None, } @@ -306,13 +361,13 @@ impl ScalarValue { ScalarValue::Int(n) => Some(*n as f64), ScalarValue::Uint(n) => Some(*n as f64), ScalarValue::F64(n) => Some(*n), - ScalarValue::Counter(_, n, _) => Some(*n as f64), + ScalarValue::Counter(n) => Some(n.into()), ScalarValue::Timestamp(n) => Some(*n as f64), _ => None, } } pub fn counter(n: i64) -> ScalarValue { - ScalarValue::Counter(n, n, 0) + ScalarValue::Counter(n.into()) } } diff --git a/automerge/tests/helpers/mod.rs b/automerge/tests/helpers/mod.rs index cae37801..7d157953 100644 --- a/automerge/tests/helpers/mod.rs +++ b/automerge/tests/helpers/mod.rs @@ -242,7 +242,7 @@ impl From for OrdScalarValue { automerge::ScalarValue::Int(v) => OrdScalarValue::Int(v), automerge::ScalarValue::Uint(v) => OrdScalarValue::Uint(v), automerge::ScalarValue::F64(v) => OrdScalarValue::F64(decorum::Total::from(v)), - automerge::ScalarValue::Counter(v, _, _) => OrdScalarValue::Counter(v), + automerge::ScalarValue::Counter(c) => OrdScalarValue::Counter(c.into()), automerge::ScalarValue::Timestamp(v) => OrdScalarValue::Timestamp(v), automerge::ScalarValue::Boolean(v) => OrdScalarValue::Boolean(v), automerge::ScalarValue::Null => OrdScalarValue::Null, From acbf394290ce77cfef90645c08147161322310fb Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Thu, 13 Jan 2022 18:30:34 -0500 Subject: [PATCH 027/730] cleanup some dead code --- automerge/src/automerge.rs | 6 +++--- automerge/src/query/list_vals.rs | 21 ++++++--------------- automerge/src/query/prop.rs | 13 ++++--------- automerge/src/query/seek_op.rs | 8 ++------ 4 files changed, 15 insertions(+), 33 deletions(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index d06aa59a..478ca4de 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -411,7 +411,7 @@ impl Automerge { pub fn text(&self, obj: &ExId) -> Result { let obj = self.exid_to_obj(obj)?; - let query = self.ops.search(obj, query::ListVals::new(obj)); + let query = self.ops.search(obj, query::ListVals::new()); let mut buffer = String::new(); for q in &query.ops { if let OpType::Set(ScalarValue::Str(s)) = &q.action { @@ -465,7 +465,7 @@ impl Automerge { let prop = self.ops.m.props.lookup(&p); if let Some(p) = prop { self.ops - .search(obj, query::Prop::new(obj, p)) + .search(obj, query::Prop::new(p)) .ops .into_iter() .map(|o| (o.value(), self.id_to_exid(o.id))) @@ -584,7 +584,7 @@ impl Automerge { let id = self.next_id(); let prop = self.ops.m.props.cache(prop); - let query = self.ops.search(obj, query::Prop::new(obj, prop)); + let query = self.ops.search(obj, query::Prop::new(prop)); if query.ops.len() == 1 && query.ops[0].is_noop(&action) { return Ok(None); diff --git a/automerge/src/query/list_vals.rs b/automerge/src/query/list_vals.rs index ae5b7315..6e433a77 100644 --- a/automerge/src/query/list_vals.rs +++ b/automerge/src/query/list_vals.rs @@ -1,19 +1,17 @@ -use crate::op_tree::{OpSetMetadata, OpTreeNode}; -use crate::query::{binary_search_by, QueryResult, TreeQuery}; -use crate::types::{ElemId, ObjId, Op}; +use crate::op_tree::OpTreeNode; +use crate::query::{QueryResult, TreeQuery}; +use crate::types::{ElemId, Op}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] pub(crate) struct ListVals { - obj: ObjId, last_elem: Option, pub ops: Vec, } impl ListVals { - pub fn new(obj: ObjId) -> Self { + pub fn new() -> Self { ListVals { - obj, last_elem: None, ops: vec![], } @@ -21,17 +19,10 @@ impl ListVals { } impl TreeQuery for ListVals { - fn query_node_with_metadata( - &mut self, - child: &OpTreeNode, - m: &OpSetMetadata, - ) -> QueryResult { - let start = binary_search_by(child, |op| m.lamport_cmp(op.obj.0, self.obj.0)); + fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { + let start = 0; for pos in start..child.len() { let op = child.get(pos).unwrap(); - if op.obj != self.obj { - break; - } if op.insert { self.last_elem = None; } diff --git a/automerge/src/query/prop.rs b/automerge/src/query/prop.rs index 3993c97d..6c97cfcd 100644 --- a/automerge/src/query/prop.rs +++ b/automerge/src/query/prop.rs @@ -1,11 +1,10 @@ use crate::op_tree::{OpSetMetadata, OpTreeNode}; use crate::query::{binary_search_by, QueryResult, TreeQuery}; -use crate::types::{Key, ObjId, Op}; +use crate::types::{Key, Op}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] pub(crate) struct Prop { - obj: ObjId, key: Key, pub ops: Vec, pub ops_pos: Vec, @@ -13,9 +12,8 @@ pub(crate) struct Prop { } impl Prop { - pub fn new(obj: ObjId, prop: usize) -> Self { + pub fn new(prop: usize) -> Self { Prop { - obj, key: Key::Map(prop), ops: vec![], ops_pos: vec![], @@ -30,14 +28,11 @@ impl TreeQuery for Prop { child: &OpTreeNode, m: &OpSetMetadata, ) -> QueryResult { - let start = binary_search_by(child, |op| { - m.lamport_cmp(op.obj.0, self.obj.0) - .then_with(|| m.key_cmp(&op.key, &self.key)) - }); + let start = binary_search_by(child, |op| m.key_cmp(&op.key, &self.key)); self.pos = start; for pos in start..child.len() { let op = child.get(pos).unwrap(); - if !(op.obj == self.obj && op.key == self.key) { + if op.key != self.key { break; } if op.visible() { diff --git a/automerge/src/query/seek_op.rs b/automerge/src/query/seek_op.rs index c30a15f5..9aba4b46 100644 --- a/automerge/src/query/seek_op.rs +++ b/automerge/src/query/seek_op.rs @@ -22,10 +22,6 @@ impl SeekOp { } } - fn different_obj(&self, op: &Op) -> bool { - op.obj != self.op.obj - } - fn lesser_insert(&self, op: &Op, m: &OpSetMetadata) -> bool { op.insert && m.lamport_cmp(op.id, self.op.id) == Ordering::Less } @@ -112,13 +108,13 @@ impl TreeQuery for SeekOp { self.succ.push(self.pos); } if self.op.insert { - if self.different_obj(e) || self.lesser_insert(e, m) { + if self.lesser_insert(e, m) { QueryResult::Finish } else { self.pos += 1; QueryResult::Next } - } else if e.insert || self.different_obj(e) || self.greater_opid(e, m) { + } else if e.insert || self.greater_opid(e, m) { QueryResult::Finish } else { self.pos += 1; From 8b2f0238f3c203183a6561fd9374f0d382fde1eb Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Thu, 13 Jan 2022 19:09:33 -0500 Subject: [PATCH 028/730] create sub op tree at a time when we know the type --- automerge/src/op_set.rs | 64 ++++++++++++++++++++--------------------- 1 file changed, 31 insertions(+), 33 deletions(-) diff --git a/automerge/src/op_set.rs b/automerge/src/op_set.rs index 79fef3e4..70dae272 100644 --- a/automerge/src/op_set.rs +++ b/automerge/src/op_set.rs @@ -1,7 +1,7 @@ use crate::indexed_cache::IndexedCache; use crate::op_tree::OpTreeInternal; use crate::query::TreeQuery; -use crate::types::{ActorId, Key, ObjId, Op, OpId}; +use crate::types::{ActorId, Key, ObjId, Op, OpId, OpType}; use fxhash::FxBuildHasher; use std::cmp::Ordering; use std::collections::HashMap; @@ -11,16 +11,16 @@ pub(crate) type OpSet = OpSetInternal<16>; #[derive(Debug, Clone)] pub(crate) struct OpSetInternal { trees: HashMap, FxBuildHasher>, - objs: Vec, length: usize, pub m: OpSetMetadata, } impl OpSetInternal { pub fn new() -> Self { + let mut trees: HashMap<_, _, _> = Default::default(); + trees.insert(ObjId::root(), Default::default()); OpSetInternal { - trees: Default::default(), - objs: Default::default(), + trees, length: 0, m: OpSetMetadata { actors: IndexedCache::new(), @@ -30,10 +30,13 @@ impl OpSetInternal { } pub fn iter(&self) -> Iter<'_, B> { + let mut objs: Vec<_> = self.trees.keys().collect(); + objs.sort_by(|a, b| self.m.lamport_cmp(a.0, b.0)); Iter { inner: self, index: 0, sub_index: 0, + objs, } } @@ -74,23 +77,15 @@ impl OpSetInternal { } pub fn insert(&mut self, index: usize, element: Op) { - let Self { - ref mut trees, - ref mut objs, - ref mut m, - .. - } = self; - trees - .entry(element.obj) - .or_insert_with(|| { - let pos = objs - .binary_search_by(|probe| m.lamport_cmp(probe.0, element.obj.0)) - .unwrap_err(); - objs.insert(pos, element.obj); - Default::default() - }) - .insert(index, element); - self.length += 1; + if let OpType::Make(_) = element.action { + self.trees.insert(element.id.into(), Default::default()); + } + + if let Some(tree) = self.trees.get_mut(&element.obj) { + //let tree = self.trees.get_mut(&element.obj).unwrap(); + tree.insert(index, element); + self.length += 1; + } } #[cfg(feature = "optree-visualisation")] @@ -114,9 +109,12 @@ impl<'a, const B: usize> IntoIterator for &'a OpSetInternal { type IntoIter = Iter<'a, B>; fn into_iter(self) -> Self::IntoIter { + let mut objs: Vec<_> = self.trees.keys().collect(); + objs.sort_by(|a, b| self.m.lamport_cmp(a.0, b.0)); Iter { inner: self, index: 0, + objs, sub_index: 0, } } @@ -125,6 +123,7 @@ impl<'a, const B: usize> IntoIterator for &'a OpSetInternal { pub(crate) struct Iter<'a, const B: usize> { inner: &'a OpSetInternal, index: usize, + objs: Vec<&'a ObjId>, sub_index: usize, } @@ -132,20 +131,19 @@ impl<'a, const B: usize> Iterator for Iter<'a, B> { type Item = &'a Op; fn next(&mut self) -> Option { - let obj = self.inner.objs.get(self.index)?; - let tree = self.inner.trees.get(obj)?; - self.sub_index += 1; - if let Some(op) = tree.get(self.sub_index - 1) { - Some(op) - } else { - self.index += 1; - self.sub_index = 1; - // FIXME is it possible that a rolled back transaction could break the iterator by - // having an empty tree? - let obj = self.inner.objs.get(self.index)?; + let mut result = None; + for obj in self.objs.iter().skip(self.index) { let tree = self.inner.trees.get(obj)?; - tree.get(self.sub_index - 1) + result = tree.get(self.sub_index); + if result.is_some() { + self.sub_index += 1; + break; + } else { + self.index += 1; + self.sub_index = 0; + } } + result } } From faf3e2cae4f8e6953990bc59a87fbceee107c76a Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Tue, 18 Jan 2022 12:40:48 -0500 Subject: [PATCH 029/730] update todo --- TODO.md | 9 +++++---- automerge/TODO.md | 18 ------------------ 2 files changed, 5 insertions(+), 22 deletions(-) delete mode 100644 automerge/TODO.md diff --git a/TODO.md b/TODO.md index 5840525b..646c0c20 100644 --- a/TODO.md +++ b/TODO.md @@ -1,8 +1,9 @@ - ### next steps: 1. C API + 2. port rust command line tool + 3. fast load -### ergronomics: +### ergonomics: 1. value() -> () or something that into's a value ### automerge: @@ -17,12 +18,12 @@ 1. add mark / remove mark -- type, start/end elemid (inclusive,exclusive) 2. track any formatting ops that start or end on a character 3. ops right before the character, ops right after that character - 4. query a single charaacter - character, plus marks that start or end on that character + 4. query a single character - character, plus marks that start or end on that character what is its current formatting, what are the ops that include that in their span, None = same as last time, Set( bold, italic ), keep these on index - 5. op probably belongs with the start character - possible packed at the beginer or end of the list + 5. op probably belongs with the start character - possible packed at the beginning or end of the list ### maybe: 1. tables diff --git a/automerge/TODO.md b/automerge/TODO.md deleted file mode 100644 index 68fa633f..00000000 --- a/automerge/TODO.md +++ /dev/null @@ -1,18 +0,0 @@ - -counters -> Visibility - -fast load - -values at clock -length at clock -keys at clock -text at clock - -extra tests - counters in lists -> inserts with tombstones - -ergronomics - - set(obj, prop, val) vs mapset(obj, str, val) and seqset(obj, usize, val) - value() -> (id, value) - From a12af10ee1c25eacf994e3f32891d62efadc25b7 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Wed, 19 Jan 2022 18:08:15 -0500 Subject: [PATCH 030/730] optimize js --- automerge-js/src/proxies.js | 19 ++++++++++--------- edit-trace/automerge-js.js | 5 +++-- 2 files changed, 13 insertions(+), 11 deletions(-) diff --git a/automerge-js/src/proxies.js b/automerge-js/src/proxies.js index 8c02e3e8..e9fec220 100644 --- a/automerge-js/src/proxies.js +++ b/automerge-js/src/proxies.js @@ -99,18 +99,22 @@ function import_value(value) { const MapHandler = { get (target, key) { - const { context, objectId, path, readonly, frozen, heads } = target + const { context, objectId, path, readonly, frozen, heads, cache } = target if (key === Symbol.toStringTag) { return target[Symbol.toStringTag] } if (key === OBJECT_ID) return objectId if (key === READ_ONLY) return readonly if (key === FROZEN) return frozen if (key === HEADS) return heads if (key === STATE) return context; - return valueAt(target, key) + if (!cache[key]) { + cache[key] = valueAt(target, key) + } + return cache[key] }, set (target, key, val) { let { context, objectId, path, readonly, frozen} = target + target.cache = {} // reset cache on set if (val && val[OBJECT_ID]) { throw new RangeError('Cannot create a reference to an existing document object') } @@ -159,6 +163,7 @@ const MapHandler = { deleteProperty (target, key) { const { context, objectId, path, readonly, frozen } = target + target.cache = {} // reset cache on delete if (readonly) { throw new RangeError(`Object property "${key}" cannot be modified`) } @@ -365,18 +370,18 @@ const TextHandler = Object.assign({}, ListHandler, { }) function mapProxy(context, objectId, path, readonly, heads) { - return new Proxy({context, objectId, path, readonly: !!readonly, frozen: false, heads}, MapHandler) + return new Proxy({context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}}, MapHandler) } function listProxy(context, objectId, path, readonly, heads) { let target = [] - Object.assign(target, {context, objectId, path, readonly: !!readonly, frozen: false, heads}) + Object.assign(target, {context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}}) return new Proxy(target, ListHandler) } function textProxy(context, objectId, path, readonly, heads) { let target = [] - Object.assign(target, {context, objectId, path, readonly: !!readonly, frozen: false, heads}) + Object.assign(target, {context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}}) return new Proxy(target, TextHandler) } @@ -388,10 +393,6 @@ function listMethods(target) { const {context, objectId, path, readonly, frozen, heads} = target const methods = { deleteAt(index, numDelete) { - // FIXME - what about many deletes? - if (context.value(objectId, index)[0] == "counter") { - throw new TypeError('Unsupported operation: deleting a counter from a list') - } if (typeof numDelete === 'number') { context.splice(objectId, index, numDelete) } else { diff --git a/edit-trace/automerge-js.js b/edit-trace/automerge-js.js index 6985a7c7..bdfa8455 100644 --- a/edit-trace/automerge-js.js +++ b/edit-trace/automerge-js.js @@ -10,8 +10,9 @@ state = Automerge.change(state, doc => { if (i % 1000 === 0) { console.log(`Processed ${i} edits in ${new Date() - start} ms`) } - if (edits[i][1] > 0) doc.text.deleteAt(edits[i][0], edits[i][1]) - if (edits[i].length > 2) doc.text.insertAt(edits[i][0], ...edits[i].slice(2)) + let edit = edits[i] + if (edit[1] > 0) doc.text.deleteAt(edit[0], edit[1]) + if (edit.length > 2) doc.text.insertAt(edit[0], ...edit.slice(2)) } }) From 0ccf36fe49c03aaa3fd89c69b86ed04df435943e Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Tue, 18 Jan 2022 21:27:09 +0000 Subject: [PATCH 031/730] Add test and doc update for setting scalarvalues --- automerge/src/automerge.rs | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 478ca4de..3dfc2160 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -253,7 +253,7 @@ impl Automerge { /// # Returns /// /// The opid of the operation which was created, or None if this operation doesn't change the - /// document + /// document or create a new object. /// /// # Errors /// @@ -1135,6 +1135,20 @@ mod tests { Ok(()) } + #[test] + fn test_set() -> Result<(), AutomergeError> { + let mut doc = Automerge::new(); + // setting a scalar value shouldn't return an opid as no object was created. + assert!(doc.set(&ROOT, "a", 1)?.is_none()); + // setting the same value shouldn't return an opid as there is no change. + assert!(doc.set(&ROOT, "a", 1)?.is_none()); + + assert!(doc.set(&ROOT, "b", Value::map())?.is_some()); + // object already exists at b but setting a map again overwrites it so we get an opid. + assert!(doc.set(&ROOT, "b", Value::map())?.is_some()); + Ok(()) + } + #[test] fn test_list() -> Result<(), AutomergeError> { let mut doc = Automerge::new(); From 5867c8d1313e27cceac1b61a7f3ecc3c056c67f1 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Wed, 19 Jan 2022 10:02:49 +0000 Subject: [PATCH 032/730] Fixup CI --- automerge-wasm/src/interop.rs | 4 ++-- automerge-wasm/src/lib.rs | 4 ++-- automerge/src/visualisation.rs | 2 +- flake.nix | 1 + 4 files changed, 6 insertions(+), 5 deletions(-) diff --git a/automerge-wasm/src/interop.rs b/automerge-wasm/src/interop.rs index 47dcb6d5..a4c7fbe9 100644 --- a/automerge-wasm/src/interop.rs +++ b/automerge-wasm/src/interop.rs @@ -289,7 +289,7 @@ pub(crate) fn get_heads(heads: JsValue) -> Option> { JS(heads).into() } -pub(crate) fn map_to_js(doc: &mut am::Automerge, obj: &ObjId) -> JsValue { +pub(crate) fn map_to_js(doc: &am::Automerge, obj: &ObjId) -> JsValue { let keys = doc.keys(obj); let map = Object::new(); for k in keys { @@ -312,7 +312,7 @@ pub(crate) fn map_to_js(doc: &mut am::Automerge, obj: &ObjId) -> JsValue { map.into() } -fn list_to_js(doc: &mut am::Automerge, obj: &ObjId) -> JsValue { +fn list_to_js(doc: &am::Automerge, obj: &ObjId) -> JsValue { let len = doc.length(obj); let array = Array::new(); for i in 0..len { diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 8660afe2..78cb170b 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -398,8 +398,8 @@ impl Automerge { } #[wasm_bindgen(js_name = toJS)] - pub fn to_js(&mut self) -> JsValue { - map_to_js(&mut self.0, &ROOT) + pub fn to_js(&self) -> JsValue { + map_to_js(&self.0, &ROOT) } fn export(&self, val: ObjId) -> JsValue { diff --git a/automerge/src/visualisation.rs b/automerge/src/visualisation.rs index 75a2c9f6..b0c7e4dd 100644 --- a/automerge/src/visualisation.rs +++ b/automerge/src/visualisation.rs @@ -69,8 +69,8 @@ impl<'a, const B: usize> GraphVisualisation<'a, B> { actor_shorthands.insert(actor, format!("actor{}", actor)); } GraphVisualisation { - actor_shorthands, nodes, + actor_shorthands, } } diff --git a/flake.nix b/flake.nix index b22e5245..7fe145d6 100644 --- a/flake.nix +++ b/flake.nix @@ -42,6 +42,7 @@ cargo-criterion cargo-fuzz cargo-flamegraph + cargo-deny crate2nix wasm-pack pkgconfig From 812c7df3a7947e09efc4ec03e1322b857b5eb107 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Tue, 18 Jan 2022 22:47:00 +0000 Subject: [PATCH 033/730] Add length tests to props tests --- automerge/src/automerge.rs | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 3dfc2160..6dd4ac6f 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -1267,31 +1267,38 @@ mod tests { doc.commit(None, None); let heads5 = doc.get_heads(); assert!(doc.keys_at(&ROOT, &heads1) == vec!["prop1".to_owned()]); + assert_eq!(doc.length_at(&ROOT, &heads1), 1); assert!(doc.value_at(&ROOT, "prop1", &heads1)?.unwrap().0 == Value::str("val1")); assert!(doc.value_at(&ROOT, "prop2", &heads1)? == None); assert!(doc.value_at(&ROOT, "prop3", &heads1)? == None); assert!(doc.keys_at(&ROOT, &heads2) == vec!["prop1".to_owned()]); + assert_eq!(doc.length_at(&ROOT, &heads2), 1); assert!(doc.value_at(&ROOT, "prop1", &heads2)?.unwrap().0 == Value::str("val2")); assert!(doc.value_at(&ROOT, "prop2", &heads2)? == None); assert!(doc.value_at(&ROOT, "prop3", &heads2)? == None); assert!(doc.keys_at(&ROOT, &heads3) == vec!["prop1".to_owned(), "prop2".to_owned()]); + assert_eq!(doc.length_at(&ROOT, &heads3), 2); assert!(doc.value_at(&ROOT, "prop1", &heads3)?.unwrap().0 == Value::str("val2")); assert!(doc.value_at(&ROOT, "prop2", &heads3)?.unwrap().0 == Value::str("val3")); assert!(doc.value_at(&ROOT, "prop3", &heads3)? == None); assert!(doc.keys_at(&ROOT, &heads4) == vec!["prop2".to_owned()]); + assert_eq!(doc.length_at(&ROOT, &heads4), 1); assert!(doc.value_at(&ROOT, "prop1", &heads4)? == None); assert!(doc.value_at(&ROOT, "prop2", &heads4)?.unwrap().0 == Value::str("val3")); assert!(doc.value_at(&ROOT, "prop3", &heads4)? == None); assert!(doc.keys_at(&ROOT, &heads5) == vec!["prop2".to_owned(), "prop3".to_owned()]); + assert_eq!(doc.length_at(&ROOT, &heads5), 2); + assert_eq!(doc.length(&ROOT), 2); assert!(doc.value_at(&ROOT, "prop1", &heads5)? == None); assert!(doc.value_at(&ROOT, "prop2", &heads5)?.unwrap().0 == Value::str("val3")); assert!(doc.value_at(&ROOT, "prop3", &heads5)?.unwrap().0 == Value::str("val4")); assert!(doc.keys_at(&ROOT, &[]).is_empty()); + assert_eq!(doc.length_at(&ROOT, &[]), 0); assert!(doc.value_at(&ROOT, "prop1", &[])? == None); assert!(doc.value_at(&ROOT, "prop2", &[])? == None); assert!(doc.value_at(&ROOT, "prop3", &[])? == None); @@ -1343,6 +1350,7 @@ mod tests { assert!(doc.value_at(&list, 1, &heads5)?.unwrap().0 == Value::int(50)); assert!(doc.length_at(&list, &heads6) == 1); + assert!(doc.length(&list) == 1); assert!(doc.value_at(&list, 0, &heads6)?.unwrap().0 == Value::int(50)); Ok(()) From 2015428452fca95752ddfa2aa9a9a7cb21d25c77 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Wed, 19 Jan 2022 17:14:49 +0000 Subject: [PATCH 034/730] Detect object type before getting length --- automerge/src/automerge.rs | 22 +++++++++++++++++----- automerge/src/op_set.rs | 24 +++++++++++++++--------- automerge/src/visualisation.rs | 4 ++-- 3 files changed, 34 insertions(+), 16 deletions(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 6dd4ac6f..a67e9f2d 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -8,7 +8,7 @@ use crate::types::{ ActorId, ChangeHash, Clock, ElemId, Export, Exportable, Key, ObjId, Op, OpId, OpType, Patch, ScalarValue, Value, }; -use crate::{legacy, query, types}; +use crate::{legacy, query, types, ObjType}; use crate::{AutomergeError, Change, Prop}; #[derive(Debug, Clone)] @@ -227,17 +227,29 @@ impl Automerge { } pub fn length(&self, obj: &ExId) -> usize { - if let Ok(obj) = self.exid_to_obj(obj) { - self.ops.search(obj, query::Len::new()).len + if let Ok(inner_obj) = self.exid_to_obj(obj) { + match self.ops.object_type(&inner_obj) { + Some(ObjType::Map) | Some(ObjType::Table) => self.keys(obj).len(), + Some(ObjType::List) | Some(ObjType::Text) => { + self.ops.search(inner_obj, query::Len::new()).len + } + None => 0, + } } else { 0 } } pub fn length_at(&self, obj: &ExId, heads: &[ChangeHash]) -> usize { - if let Ok(obj) = self.exid_to_obj(obj) { + if let Ok(inner_obj) = self.exid_to_obj(obj) { let clock = self.clock_at(heads); - self.ops.search(obj, query::LenAt::new(clock)).len + match self.ops.object_type(&inner_obj) { + Some(ObjType::Map) | Some(ObjType::Table) => self.keys_at(obj, heads).len(), + Some(ObjType::List) | Some(ObjType::Text) => { + self.ops.search(inner_obj, query::LenAt::new(clock)).len + } + None => 0, + } } else { 0 } diff --git a/automerge/src/op_set.rs b/automerge/src/op_set.rs index 70dae272..9f35d993 100644 --- a/automerge/src/op_set.rs +++ b/automerge/src/op_set.rs @@ -2,6 +2,7 @@ use crate::indexed_cache::IndexedCache; use crate::op_tree::OpTreeInternal; use crate::query::TreeQuery; use crate::types::{ActorId, Key, ObjId, Op, OpId, OpType}; +use crate::ObjType; use fxhash::FxBuildHasher; use std::cmp::Ordering; use std::collections::HashMap; @@ -10,7 +11,7 @@ pub(crate) type OpSet = OpSetInternal<16>; #[derive(Debug, Clone)] pub(crate) struct OpSetInternal { - trees: HashMap, FxBuildHasher>, + trees: HashMap), FxBuildHasher>, length: usize, pub m: OpSetMetadata, } @@ -18,7 +19,7 @@ pub(crate) struct OpSetInternal { impl OpSetInternal { pub fn new() -> Self { let mut trees: HashMap<_, _, _> = Default::default(); - trees.insert(ObjId::root(), Default::default()); + trees.insert(ObjId::root(), (ObjType::Map, Default::default())); OpSetInternal { trees, length: 0, @@ -44,7 +45,7 @@ impl OpSetInternal { where Q: TreeQuery, { - if let Some(tree) = self.trees.get(&obj) { + if let Some((_typ, tree)) = self.trees.get(&obj) { tree.search(query, &self.m) } else { query @@ -55,7 +56,7 @@ impl OpSetInternal { where F: FnMut(&mut Op), { - if let Some(tree) = self.trees.get_mut(&obj) { + if let Some((_typ, tree)) = self.trees.get_mut(&obj) { tree.replace(index, f) } else { None @@ -63,7 +64,7 @@ impl OpSetInternal { } pub fn remove(&mut self, obj: ObjId, index: usize) -> Op { - let tree = self.trees.get_mut(&obj).unwrap(); + let (_typ, tree) = self.trees.get_mut(&obj).unwrap(); self.length -= 1; let op = tree.remove(index); if tree.is_empty() { @@ -77,17 +78,22 @@ impl OpSetInternal { } pub fn insert(&mut self, index: usize, element: Op) { - if let OpType::Make(_) = element.action { - self.trees.insert(element.id.into(), Default::default()); + if let OpType::Make(typ) = element.action { + self.trees + .insert(element.id.into(), (typ, Default::default())); } - if let Some(tree) = self.trees.get_mut(&element.obj) { + if let Some((_typ, tree)) = self.trees.get_mut(&element.obj) { //let tree = self.trees.get_mut(&element.obj).unwrap(); tree.insert(index, element); self.length += 1; } } + pub fn object_type(&self, id: &ObjId) -> Option { + self.trees.get(id).map(|(typ, _)| *typ) + } + #[cfg(feature = "optree-visualisation")] pub fn visualise(&self) -> String { let mut out = Vec::new(); @@ -133,7 +139,7 @@ impl<'a, const B: usize> Iterator for Iter<'a, B> { fn next(&mut self) -> Option { let mut result = None; for obj in self.objs.iter().skip(self.index) { - let tree = self.inner.trees.get(obj)?; + let (_typ, tree) = self.inner.trees.get(obj)?; result = tree.get(self.sub_index); if result.is_some() { self.sub_index += 1; diff --git a/automerge/src/visualisation.rs b/automerge/src/visualisation.rs index b0c7e4dd..442c9eb4 100644 --- a/automerge/src/visualisation.rs +++ b/automerge/src/visualisation.rs @@ -43,13 +43,13 @@ impl<'a, const B: usize> GraphVisualisation<'a, B> { pub(super) fn construct( trees: &'a HashMap< crate::types::ObjId, - crate::op_tree::OpTreeInternal, + (crate::types::ObjType, crate::op_tree::OpTreeInternal), BuildHasherDefault, >, metadata: &'a crate::op_set::OpSetMetadata, ) -> GraphVisualisation<'a, B> { let mut nodes = HashMap::new(); - for (obj_id, tree) in trees { + for (obj_id, (_, tree)) in trees { if let Some(root_node) = &tree.root_node { let tree_id = Self::construct_nodes(root_node, &mut nodes, metadata); let obj_tree_id = NodeId::default(); From 429426a693bc03489b749a5f18a0f2627219e553 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Wed, 19 Jan 2022 17:32:57 +0000 Subject: [PATCH 035/730] Fix removal and rollback Credit to @orionz --- automerge/src/automerge.rs | 3 ++- automerge/src/op_set.rs | 5 +++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index a67e9f2d..ebe230a9 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -153,7 +153,8 @@ impl Automerge { pub fn rollback(&mut self) -> usize { if let Some(tx) = self.transaction.take() { let num = tx.operations.len(); - for op in &tx.operations { + // remove in reverse order so sets are removed before makes etc... + for op in tx.operations.iter().rev() { for pred_id in &op.pred { // FIXME - use query to make this fast if let Some(p) = self.ops.iter().position(|o| o.id == *pred_id) { diff --git a/automerge/src/op_set.rs b/automerge/src/op_set.rs index 9f35d993..d92ebbe2 100644 --- a/automerge/src/op_set.rs +++ b/automerge/src/op_set.rs @@ -64,11 +64,12 @@ impl OpSetInternal { } pub fn remove(&mut self, obj: ObjId, index: usize) -> Op { + // this happens on rollback - be sure to go back to the old state let (_typ, tree) = self.trees.get_mut(&obj).unwrap(); self.length -= 1; let op = tree.remove(index); - if tree.is_empty() { - self.trees.remove(&obj); + if let OpType::Make(_) = &op.action { + self.trees.remove(&op.id.into()); } op } From 0388c4648005ada5c2bb8b794e983b79fc39d889 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Wed, 19 Jan 2022 17:39:39 +0000 Subject: [PATCH 036/730] Remove unused is_empty function on optrees --- automerge/src/op_tree.rs | 5 ----- 1 file changed, 5 deletions(-) diff --git a/automerge/src/op_tree.rs b/automerge/src/op_tree.rs index c91c150e..ed0e9060 100644 --- a/automerge/src/op_tree.rs +++ b/automerge/src/op_tree.rs @@ -49,11 +49,6 @@ impl OpTreeInternal { query } - /// Check if the sequence is empty. - pub fn is_empty(&self) -> bool { - self.len() == 0 - } - /// Create an iterator through the sequence. pub fn iter(&self) -> Iter<'_, B> { Iter { From 54fec3e4385fc3666a19f0ce22f19b5c7ec898fe Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Thu, 20 Jan 2022 13:16:01 -0500 Subject: [PATCH 037/730] lamport compare was backward on actorids and so was value resolution --- automerge-js/test/legacy_tests.js | 23 +++++++++++++++++++++++ automerge-wasm/test/test.js | 14 ++++++++------ automerge/src/automerge.rs | 4 ++-- automerge/src/op_set.rs | 3 +-- automerge/tests/test.rs | 18 +++++++++--------- 5 files changed, 43 insertions(+), 19 deletions(-) diff --git a/automerge-js/test/legacy_tests.js b/automerge-js/test/legacy_tests.js index 75232cd2..689880bd 100644 --- a/automerge-js/test/legacy_tests.js +++ b/automerge-js/test/legacy_tests.js @@ -613,6 +613,29 @@ describe('Automerge', () => { assert.strictEqual(s1.japaneseFood.length, 3) }) + it('concurrent edits insert in reverse actorid order if counters equal', () => { + s1 = Automerge.init('aaaa') + s2 = Automerge.init('bbbb') + s1 = Automerge.change(s1, doc => doc.list = []) + s2 = Automerge.merge(s2, s1) + s1 = Automerge.change(s1, doc => doc.list.splice(0, 0, "2@aaaa")) + s2 = Automerge.change(s2, doc => doc.list.splice(0, 0, "2@bbbb")) + s2 = Automerge.merge(s2, s1) + assert.deepStrictEqual(Automerge.toJS(s2).list, ["2@bbbb", "2@aaaa"]) + }) + + it('concurrent edits insert in reverse counter order if different', () => { + s1 = Automerge.init('aaaa') + s2 = Automerge.init('bbbb') + s1 = Automerge.change(s1, doc => doc.list = []) + s2 = Automerge.merge(s2, s1) + s1 = Automerge.change(s1, doc => doc.list.splice(0, 0, "2@aaaa")) + s2 = Automerge.change(s2, doc => doc.foo = "2@bbbb") + s2 = Automerge.change(s2, doc => doc.list.splice(0, 0, "3@bbbb")) + s2 = Automerge.merge(s2, s1) + assert.deepStrictEqual(s2.list, ["3@bbbb", "2@aaaa"]) + }) + it('should treat out-by-one assignment as insertion', () => { s1 = Automerge.change(s1, doc => doc.japaneseFood = ['udon']) s1 = Automerge.change(s1, doc => doc.japaneseFood[1] = 'sushi') diff --git a/automerge-wasm/test/test.js b/automerge-wasm/test/test.js index bf476243..97e0a0ec 100644 --- a/automerge-wasm/test/test.js +++ b/automerge-wasm/test/test.js @@ -303,14 +303,15 @@ describe('Automerge', () => { doc1.applyChanges(doc3.getChanges(doc1.getHeads())) let result = doc1.values("_root", "cnt") assert.deepEqual(result,[ - ['counter',10,'2@cccc'], + ['int',20,'2@aaaa'], ['counter',0,'2@bbbb'], - ['int',20,'2@aaaa'] + ['counter',10,'2@cccc'], ]) doc1.inc("_root", "cnt", 5) result = doc1.values("_root", "cnt") assert.deepEqual(result, [ - [ 'counter', 15, '2@cccc' ], [ 'counter', 5, '2@bbbb' ] + [ 'counter', 5, '2@bbbb' ], + [ 'counter', 15, '2@cccc' ], ]) let save1 = doc1.save() @@ -335,14 +336,15 @@ describe('Automerge', () => { doc1.applyChanges(doc3.getChanges(doc1.getHeads())) let result = doc1.values(seq, 0) assert.deepEqual(result,[ - ['counter',10,'3@cccc'], + ['int',20,'3@aaaa'], ['counter',0,'3@bbbb'], - ['int',20,'3@aaaa'] + ['counter',10,'3@cccc'], ]) doc1.inc(seq, 0, 5) result = doc1.values(seq, 0) assert.deepEqual(result, [ - [ 'counter', 15, '3@cccc' ], [ 'counter', 5, '3@bbbb' ] + [ 'counter', 5, '3@bbbb' ], + [ 'counter', 15, '3@cccc' ], ]) let save = doc1.save() diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index ebe230a9..146a35c4 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -455,7 +455,7 @@ impl Automerge { obj: &ExId, prop: P, ) -> Result, AutomergeError> { - Ok(self.values(obj, prop.into())?.first().cloned()) + Ok(self.values(obj, prop.into())?.last().cloned()) } pub fn value_at>( @@ -464,7 +464,7 @@ impl Automerge { prop: P, heads: &[ChangeHash], ) -> Result, AutomergeError> { - Ok(self.values_at(obj, prop, heads)?.first().cloned()) + Ok(self.values_at(obj, prop, heads)?.last().cloned()) } pub fn values>( diff --git a/automerge/src/op_set.rs b/automerge/src/op_set.rs index d92ebbe2..49310bc6 100644 --- a/automerge/src/op_set.rs +++ b/automerge/src/op_set.rs @@ -173,8 +173,7 @@ impl OpSetMetadata { (OpId(0, _), OpId(0, _)) => Ordering::Equal, (OpId(0, _), OpId(_, _)) => Ordering::Less, (OpId(_, _), OpId(0, _)) => Ordering::Greater, - // FIXME - this one seems backwards to me - why - is values() returning in the wrong order? - (OpId(a, x), OpId(b, y)) if a == b => self.actors[y].cmp(&self.actors[x]), + (OpId(a, x), OpId(b, y)) if a == b => self.actors[x].cmp(&self.actors[y]), (OpId(a, _), OpId(b, _)) => a.cmp(&b), } } diff --git a/automerge/tests/test.rs b/automerge/tests/test.rs index 5b101de8..d8637283 100644 --- a/automerge/tests/test.rs +++ b/automerge/tests/test.rs @@ -440,10 +440,10 @@ fn concurrent_insertions_at_same_list_position() { "parakeet", }, { - "starling", + "chaffinch", }, { - "chaffinch", + "starling", }, ] }, @@ -740,16 +740,16 @@ fn does_not_interleave_sequence_insertions_at_same_position() { &doc1, map! { "wisdom" => {list![ - {"to"}, - {"be"}, - {"is"}, - {"to"}, - {"do"}, {"to"}, {"do"}, {"is"}, {"to"}, {"be"}, + {"to"}, + {"be"}, + {"is"}, + {"to"}, + {"do"}, ]} } ); @@ -890,9 +890,9 @@ fn list_counter_del() -> Result<(), automerge::AutomergeError> { let mut v = vec![ActorId::random(), ActorId::random(), ActorId::random()]; v.sort(); println!("{:?}", v); - let actor1 = v[2].clone(); + let actor1 = v[0].clone(); let actor2 = v[1].clone(); - let actor3 = v[0].clone(); + let actor3 = v[2].clone(); let mut doc1 = new_doc_with_actor(actor1); From 7b3db2f15a92c594235df0c8e9b245e420a0117b Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Thu, 20 Jan 2022 14:25:25 -0500 Subject: [PATCH 038/730] clippy lint --- automerge/src/automerge.rs | 4 ++-- automerge/src/columnar.rs | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 146a35c4..ca9e1463 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -745,7 +745,7 @@ impl Automerge { &self.ops.m.props.cache, ); if bytes.is_ok() { - self.saved = self.get_heads().iter().copied().collect(); + self.saved = self.get_heads().to_vec(); } bytes } @@ -759,7 +759,7 @@ impl Automerge { bytes.extend(c.raw_bytes()); } if !bytes.is_empty() { - self.saved = self._get_heads().iter().copied().collect(); + self.saved = self._get_heads().to_vec() } bytes } diff --git a/automerge/src/columnar.rs b/automerge/src/columnar.rs index ffb38ea8..53a9d488 100644 --- a/automerge/src/columnar.rs +++ b/automerge/src/columnar.rs @@ -260,7 +260,7 @@ impl<'a> Iterator for ChangeIterator<'a> { let max_op = self.max_op.next()??; let time = self.time.next()?? as i64; let message = self.message.next()?; - let extra_bytes = self.extra.next().unwrap_or_else(Vec::new); + let extra_bytes = self.extra.next().unwrap_or_default(); Some(DocChange { actor, seq, From 3393a60e59cc831e8b7494a4e51bec8ac81dce84 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Thu, 20 Jan 2022 14:37:19 -0500 Subject: [PATCH 039/730] clippy lint --- automerge-wasm/src/lib.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 78cb170b..0882ceaf 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -1,3 +1,4 @@ +#![allow(clippy::unused_unit)] use automerge as am; use automerge::{Change, ObjId, Prop, Value, ROOT}; use js_sys::{Array, Object, Uint8Array}; From cfa1067c19ca0039d13c312ca39a132bb6bf4386 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Fri, 28 Jan 2022 16:41:17 -0500 Subject: [PATCH 040/730] rework wasm function to use js types more directly --- automerge-js/src/index.js | 14 +-- automerge-js/src/proxies.js | 2 +- automerge-js/test/legacy_tests.js | 2 +- automerge-wasm/src/interop.rs | 13 +-- automerge-wasm/src/lib.rs | 164 ++++++++++++++---------------- automerge-wasm/test/test.js | 114 ++++++++++----------- 6 files changed, 145 insertions(+), 164 deletions(-) diff --git a/automerge-js/src/index.js b/automerge-js/src/index.js index cde193f4..3ffe9697 100644 --- a/automerge-js/src/index.js +++ b/automerge-js/src/index.js @@ -8,7 +8,10 @@ let { Int, Uint, Float64 } = require("./numbers") let { STATE, HEADS, OBJECT_ID, READ_ONLY, FROZEN } = require("./constants") function init(actor) { - const state = AutomergeWASM.init(actor) + if (typeof actor != 'string') { + actor = null + } + const state = AutomergeWASM.create(actor) return rootProxy(state, true); } @@ -43,7 +46,6 @@ function change(doc, options, callback) { throw new RangeError("Attempting to use an outdated Automerge document") } if (!!doc[HEADS] === true) { - console.log("HEADS", doc[HEADS]) throw new RangeError("Attempting to change an out of date document"); } if (doc[READ_ONLY] === false) { @@ -97,7 +99,7 @@ function emptyChange(doc, options) { } function load(data, actor) { - const state = AutomergeWASM.load(data, actor) + const state = AutomergeWASM.loadDoc(data, actor) return rootProxy(state, true); } @@ -135,13 +137,13 @@ function conflictAt(context, objectId, prop) { const value = conflict[1] switch (datatype) { case "map": - result[value] = mapProxy(context, value, [ prop ], true, true) + result[value] = mapProxy(context, value, [ prop ], true) break; case "list": - result[value] = listProxy(context, value, [ prop ], true, true) + result[value] = listProxy(context, value, [ prop ], true) break; case "text": - result[value] = textProxy(context, value, [ prop ], true, true) + result[value] = textProxy(context, value, [ prop ], true) break; //case "table": //case "cursor": diff --git a/automerge-js/src/proxies.js b/automerge-js/src/proxies.js index e9fec220..878ae101 100644 --- a/automerge-js/src/proxies.js +++ b/automerge-js/src/proxies.js @@ -386,7 +386,7 @@ function textProxy(context, objectId, path, readonly, heads) { } function rootProxy(context, readonly) { - return mapProxy(context, "_root", [], readonly, false) + return mapProxy(context, "_root", [], readonly) } function listMethods(target) { diff --git a/automerge-js/test/legacy_tests.js b/automerge-js/test/legacy_tests.js index 689880bd..76348d06 100644 --- a/automerge-js/test/legacy_tests.js +++ b/automerge-js/test/legacy_tests.js @@ -90,7 +90,7 @@ describe('Automerge', () => { const change1 = Automerge.getLastLocalChange(s1) s2 = Automerge.change(s1, doc => doc.foo = 'bar') const change2 = Automerge.getLastLocalChange(s2) - assert.strictEqual(change1, null) + assert.strictEqual(change1, undefined) const change = decodeChange(change2) assert.deepStrictEqual(change, { actor: change.actor, deps: [], seq: 1, startOp: 1, diff --git a/automerge-wasm/src/interop.rs b/automerge-wasm/src/interop.rs index a4c7fbe9..f8420aa6 100644 --- a/automerge-wasm/src/interop.rs +++ b/automerge-wasm/src/interop.rs @@ -249,13 +249,6 @@ pub(crate) fn js_set>(obj: &JsValue, prop: &str, val: V) -> Res Reflect::set(obj, &prop.into(), &val.into()) } -pub(crate) fn to_usize(val: JsValue, name: &str) -> Result { - match val.as_f64() { - Some(n) => Ok(n as usize), - None => Err(format!("{} must be a number", name).into()), - } -} - pub(crate) fn to_prop(p: JsValue) -> Result { if let Some(s) = p.as_string() { Ok(Prop::Map(s)) @@ -285,8 +278,10 @@ pub(crate) fn to_objtype(a: &JsValue) -> Option { } } -pub(crate) fn get_heads(heads: JsValue) -> Option> { - JS(heads).into() +pub(crate) fn get_heads(heads: Option) -> Option> { + let heads = heads?; + let heads: Result, _> = heads.iter().map(|j| j.into_serde()).collect(); + heads.ok() } pub(crate) fn map_to_js(doc: &am::Automerge, obj: &ObjId) -> JsValue { diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 0882ceaf..46d06490 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -10,9 +10,7 @@ mod interop; mod sync; mod value; -use interop::{ - get_heads, js_get, js_set, map_to_js, to_js_err, to_objtype, to_prop, to_usize, AR, JS, -}; +use interop::{get_heads, js_get, js_set, map_to_js, to_js_err, to_objtype, to_prop, AR, JS}; use sync::SyncState; use value::{datatype, ScalarValue}; @@ -33,9 +31,9 @@ pub struct Automerge(automerge::Automerge); #[wasm_bindgen] impl Automerge { - pub fn new(actor: JsValue) -> Result { + pub fn new(actor: Option) -> Result { let mut automerge = automerge::Automerge::new(); - if let Some(a) = actor.as_string() { + if let Some(a) = actor { let a = automerge::ActorId::from(hex::decode(a).map_err(to_js_err)?.to_vec()); automerge.set_actor(a); } @@ -43,9 +41,9 @@ impl Automerge { } #[allow(clippy::should_implement_trait)] - pub fn clone(&self, actor: JsValue) -> Result { + pub fn clone(&self, actor: Option) -> Result { let mut automerge = Automerge(self.0.clone()); - if let Some(s) = actor.as_string() { + if let Some(s) = actor { let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec()); automerge.0.set_actor(actor) } @@ -59,10 +57,8 @@ impl Automerge { (self.0.pending_ops() as u32).into() } - pub fn commit(&mut self, message: JsValue, time: JsValue) -> Array { - let message = message.as_string(); - let time = time.as_f64().map(|v| v as i64); - let heads = self.0.commit(message, time); + pub fn commit(&mut self, message: Option, time: Option) -> Array { + let heads = self.0.commit(message, time.map(|n| n as i64)); let heads: Array = heads .iter() .map(|h| JsValue::from_str(&hex::encode(&h.0))) @@ -70,11 +66,11 @@ impl Automerge { heads } - pub fn rollback(&mut self) -> JsValue { - self.0.rollback().into() + pub fn rollback(&mut self) -> f64 { + self.0.rollback() as f64 } - pub fn keys(&mut self, obj: JsValue, heads: JsValue) -> Result { + pub fn keys(&mut self, obj: String, heads: Option) -> Result { let obj = self.import(obj)?; let result = if let Some(heads) = get_heads(heads) { self.0.keys_at(&obj, &heads) @@ -87,7 +83,7 @@ impl Automerge { Ok(result) } - pub fn text(&mut self, obj: JsValue, heads: JsValue) -> Result { + pub fn text(&mut self, obj: String, heads: Option) -> Result { let obj = self.import(obj)?; if let Some(heads) = get_heads(heads) { self.0.text_at(&obj, &heads) @@ -95,25 +91,24 @@ impl Automerge { self.0.text(&obj) } .map_err(to_js_err) - .map(|t| t.into()) } pub fn splice( &mut self, - obj: JsValue, - start: JsValue, - delete_count: JsValue, + obj: String, + start: f64, + delete_count: f64, text: JsValue, - ) -> Result { + ) -> Result, JsValue> { let obj = self.import(obj)?; - let start = to_usize(start, "start")?; - let delete_count = to_usize(delete_count, "deleteCount")?; + let start = start as usize; + let delete_count = delete_count as usize; let mut vals = vec![]; if let Some(t) = text.as_string() { self.0 .splice_text(&obj, start, delete_count, &t) .map_err(to_js_err)?; - Ok(JsValue::null()) + Ok(None) } else { if let Ok(array) = text.dyn_into::() { for i in array.iter() { @@ -133,7 +128,7 @@ impl Automerge { .splice(&obj, start, delete_count, vals) .map_err(to_js_err)?; if result.is_empty() { - Ok(JsValue::null()) + Ok(None) } else { let result: Array = result .iter() @@ -146,62 +141,49 @@ impl Automerge { pub fn push( &mut self, - obj: JsValue, + obj: String, value: JsValue, - datatype: JsValue, - ) -> Result { + datatype: Option, + ) -> Result, JsValue> { let obj = self.import(obj)?; - let value = self.import_value(value, datatype.as_string())?; + let value = self.import_value(value, datatype)?; let index = self.0.length(&obj); let opid = self.0.insert(&obj, index, value).map_err(to_js_err)?; - match opid { - Some(opid) => Ok(self.export(opid)), - None => Ok(JsValue::null()), - } + Ok(opid.map(|id| id.to_string())) } pub fn insert( &mut self, - obj: JsValue, - index: JsValue, + obj: String, + index: f64, value: JsValue, - datatype: JsValue, - ) -> Result { + datatype: Option, + ) -> Result, JsValue> { let obj = self.import(obj)?; - //let key = self.insert_pos_for_index(&obj, prop)?; - let index: Result<_, JsValue> = index - .as_f64() - .ok_or_else(|| "insert index must be a number".into()); - let index = index?; - let value = self.import_value(value, datatype.as_string())?; + let index = index as f64; + let value = self.import_value(value, datatype)?; let opid = self .0 .insert(&obj, index as usize, value) .map_err(to_js_err)?; - match opid { - Some(opid) => Ok(self.export(opid)), - None => Ok(JsValue::null()), - } + Ok(opid.map(|id| id.to_string())) } pub fn set( &mut self, - obj: JsValue, + obj: String, prop: JsValue, value: JsValue, - datatype: JsValue, - ) -> Result { + datatype: Option, + ) -> Result, JsValue> { let obj = self.import(obj)?; let prop = self.import_prop(prop)?; - let value = self.import_value(value, datatype.as_string())?; + let value = self.import_value(value, datatype)?; let opid = self.0.set(&obj, prop, value).map_err(to_js_err)?; - match opid { - Some(opid) => Ok(self.export(opid)), - None => Ok(JsValue::null()), - } + Ok(opid.map(|id| id.to_string())) } - pub fn inc(&mut self, obj: JsValue, prop: JsValue, value: JsValue) -> Result<(), JsValue> { + pub fn inc(&mut self, obj: String, prop: JsValue, value: JsValue) -> Result<(), JsValue> { let obj = self.import(obj)?; let prop = self.import_prop(prop)?; let value: f64 = value @@ -212,7 +194,12 @@ impl Automerge { Ok(()) } - pub fn value(&mut self, obj: JsValue, prop: JsValue, heads: JsValue) -> Result { + pub fn value( + &mut self, + obj: String, + prop: JsValue, + heads: Option, + ) -> Result { let obj = self.import(obj)?; let result = Array::new(); let prop = to_prop(prop); @@ -227,7 +214,7 @@ impl Automerge { match value { Some((Value::Object(obj_type), obj_id)) => { result.push(&obj_type.to_string().into()); - result.push(&self.export(obj_id)); + result.push(&obj_id.to_string().into()); } Some((Value::Scalar(value), _)) => { result.push(&datatype(&value).into()); @@ -239,7 +226,12 @@ impl Automerge { Ok(result) } - pub fn values(&mut self, obj: JsValue, arg: JsValue, heads: JsValue) -> Result { + pub fn values( + &mut self, + obj: String, + arg: JsValue, + heads: Option, + ) -> Result { let obj = self.import(obj)?; let result = Array::new(); let prop = to_prop(arg); @@ -255,14 +247,14 @@ impl Automerge { (Value::Object(obj_type), obj_id) => { let sub = Array::new(); sub.push(&obj_type.to_string().into()); - sub.push(&self.export(obj_id)); + sub.push(&obj_id.to_string().into()); result.push(&sub.into()); } (Value::Scalar(value), id) => { let sub = Array::new(); sub.push(&datatype(&value).into()); sub.push(&ScalarValue(value).into()); - sub.push(&self.export(id)); + sub.push(&id.to_string().into()); result.push(&sub.into()); } } @@ -271,16 +263,16 @@ impl Automerge { Ok(result) } - pub fn length(&mut self, obj: JsValue, heads: JsValue) -> Result { + pub fn length(&mut self, obj: String, heads: Option) -> Result { let obj = self.import(obj)?; if let Some(heads) = get_heads(heads) { - Ok((self.0.length_at(&obj, &heads) as f64).into()) + Ok(self.0.length_at(&obj, &heads) as f64) } else { - Ok((self.0.length(&obj) as f64).into()) + Ok(self.0.length(&obj) as f64) } } - pub fn del(&mut self, obj: JsValue, prop: JsValue) -> Result<(), JsValue> { + pub fn del(&mut self, obj: String, prop: JsValue) -> Result<(), JsValue> { let obj = self.import(obj)?; let prop = to_prop(prop)?; self.0.del(&obj, prop).map_err(to_js_err)?; @@ -295,16 +287,16 @@ impl Automerge { } #[wasm_bindgen(js_name = saveIncremental)] - pub fn save_incremental(&mut self) -> JsValue { + pub fn save_incremental(&mut self) -> Uint8Array { let bytes = self.0.save_incremental(); - Uint8Array::from(bytes.as_slice()).into() + Uint8Array::from(bytes.as_slice()) } #[wasm_bindgen(js_name = loadIncremental)] - pub fn load_incremental(&mut self, data: Uint8Array) -> Result { + pub fn load_incremental(&mut self, data: Uint8Array) -> Result { let data = data.to_vec(); let len = self.0.load_incremental(&data).map_err(to_js_err)?; - Ok(len.into()) + Ok(len as f64) } #[wasm_bindgen(js_name = applyChanges)] @@ -346,17 +338,17 @@ impl Automerge { } #[wasm_bindgen(js_name = getActorId)] - pub fn get_actor_id(&mut self) -> JsValue { + pub fn get_actor_id(&mut self) -> String { let actor = self.0.get_actor(); - actor.to_string().into() + actor.to_string() } #[wasm_bindgen(js_name = getLastLocalChange)] - pub fn get_last_local_change(&mut self) -> Result { + pub fn get_last_local_change(&mut self) -> Result, JsValue> { if let Some(change) = self.0.get_last_local_change() { - Ok(Uint8Array::from(change.raw_bytes()).into()) + Ok(Some(Uint8Array::from(change.raw_bytes()))) } else { - Ok(JsValue::null()) + Ok(None) } } @@ -365,8 +357,8 @@ impl Automerge { } #[wasm_bindgen(js_name = getMissingDeps)] - pub fn get_missing_deps(&mut self, heads: JsValue) -> Result { - let heads: Vec<_> = JS(heads).try_into().unwrap_or_default(); + pub fn get_missing_deps(&mut self, heads: Option) -> Result { + let heads = get_heads(heads).unwrap_or_default(); let deps = self.0.get_missing_deps(&heads); let deps: Array = deps .iter() @@ -403,16 +395,8 @@ impl Automerge { map_to_js(&self.0, &ROOT) } - fn export(&self, val: ObjId) -> JsValue { - val.to_string().into() - } - - fn import(&self, id: JsValue) -> Result { - let id_str = id - .as_string() - .ok_or("invalid opid/objid/elemid") - .map_err(to_js_err)?; - self.0.import(&id_str).map_err(to_js_err) + fn import(&self, id: String) -> Result { + self.0.import(&id).map_err(to_js_err) } fn import_prop(&mut self, prop: JsValue) -> Result { @@ -488,17 +472,17 @@ impl Automerge { } } -#[wasm_bindgen] -pub fn init(actor: JsValue) -> Result { +#[wasm_bindgen(js_name = create)] +pub fn init(actor: Option) -> Result { console_error_panic_hook::set_once(); Automerge::new(actor) } -#[wasm_bindgen] -pub fn load(data: Uint8Array, actor: JsValue) -> Result { +#[wasm_bindgen(js_name = loadDoc)] +pub fn load(data: Uint8Array, actor: Option) -> Result { let data = data.to_vec(); let mut automerge = am::Automerge::load(&data).map_err(to_js_err)?; - if let Some(s) = actor.as_string() { + if let Some(s) = actor { let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec()); automerge.set_actor(actor) } diff --git a/automerge-wasm/test/test.js b/automerge-wasm/test/test.js index 97e0a0ec..7c5e7bab 100644 --- a/automerge-wasm/test/test.js +++ b/automerge-wasm/test/test.js @@ -37,20 +37,20 @@ function sync(a, b, aSyncState = initSyncState(), bSyncState = initSyncState()) describe('Automerge', () => { describe('basics', () => { it('should init clone and free', () => { - let doc1 = Automerge.init() + let doc1 = Automerge.create() let doc2 = doc1.clone() doc1.free() doc2.free() }) it('should be able to start and commit', () => { - let doc = Automerge.init() + let doc = Automerge.create() doc.commit() doc.free() }) it('getting a nonexistant prop does not throw an error', () => { - let doc = Automerge.init() + let doc = Automerge.create() let root = "_root" let result = doc.value(root,"hello") assert.deepEqual(result,[]) @@ -58,7 +58,7 @@ describe('Automerge', () => { }) it('should be able to set and get a simple value', () => { - let doc = Automerge.init() + let doc = Automerge.create() let root = "_root" let result @@ -108,7 +108,7 @@ describe('Automerge', () => { }) it('should be able to use bytes', () => { - let doc = Automerge.init() + let doc = Automerge.create() doc.set("_root","data1", new Uint8Array([10,11,12])); doc.set("_root","data2", new Uint8Array([13,14,15]), "bytes"); let value1 = doc.value("_root", "data1") @@ -119,7 +119,7 @@ describe('Automerge', () => { }) it('should be able to make sub objects', () => { - let doc = Automerge.init() + let doc = Automerge.create() let root = "_root" let result @@ -136,7 +136,7 @@ describe('Automerge', () => { }) it('should be able to make lists', () => { - let doc = Automerge.init() + let doc = Automerge.create() let root = "_root" let submap = doc.set(root, "numbers", LIST) @@ -159,7 +159,7 @@ describe('Automerge', () => { }) it('lists have insert, set, splice, and push ops', () => { - let doc = Automerge.init() + let doc = Automerge.create() let root = "_root" let submap = doc.set(root, "letters", LIST) @@ -180,7 +180,7 @@ describe('Automerge', () => { }) it('should be able delete non-existant props', () => { - let doc = Automerge.init() + let doc = Automerge.create() doc.set("_root", "foo","bar") doc.set("_root", "bip","bap") @@ -199,7 +199,7 @@ describe('Automerge', () => { }) it('should be able to del', () => { - let doc = Automerge.init() + let doc = Automerge.create() let root = "_root" doc.set(root, "xxx", "xxx"); @@ -210,7 +210,7 @@ describe('Automerge', () => { }) it('should be able to use counters', () => { - let doc = Automerge.init() + let doc = Automerge.create() let root = "_root" doc.set(root, "counter", 10, "counter"); @@ -223,7 +223,7 @@ describe('Automerge', () => { }) it('should be able to splice text', () => { - let doc = Automerge.init() + let doc = Automerge.create() let root = "_root"; let text = doc.set(root, "text", Automerge.TEXT); @@ -240,7 +240,7 @@ describe('Automerge', () => { }) it('should be able save all or incrementally', () => { - let doc = Automerge.init() + let doc = Automerge.create() doc.set("_root", "foo", 1) @@ -261,9 +261,9 @@ describe('Automerge', () => { assert.notDeepEqual(saveA, saveB); - let docA = Automerge.load(saveA); - let docB = Automerge.load(saveB); - let docC = Automerge.load(saveMidway) + let docA = Automerge.loadDoc(saveA); + let docB = Automerge.loadDoc(saveB); + let docC = Automerge.loadDoc(saveMidway) docC.loadIncremental(save3) assert.deepEqual(docA.keys("_root"), docB.keys("_root")); @@ -276,7 +276,7 @@ describe('Automerge', () => { }) it('should be able to splice text', () => { - let doc = Automerge.init() + let doc = Automerge.create() let text = doc.set("_root", "text", TEXT); doc.splice(text, 0, 0, "hello world"); let heads1 = doc.commit(); @@ -292,10 +292,10 @@ describe('Automerge', () => { }) it('local inc increments all visible counters in a map', () => { - let doc1 = Automerge.init("aaaa") + let doc1 = Automerge.create("aaaa") doc1.set("_root", "hello", "world") - let doc2 = Automerge.load(doc1.save(), "bbbb"); - let doc3 = Automerge.load(doc1.save(), "cccc"); + let doc2 = Automerge.loadDoc(doc1.save(), "bbbb"); + let doc3 = Automerge.loadDoc(doc1.save(), "cccc"); doc1.set("_root", "cnt", 20) doc2.set("_root", "cnt", 0, "counter") doc3.set("_root", "cnt", 10, "counter") @@ -315,7 +315,7 @@ describe('Automerge', () => { ]) let save1 = doc1.save() - let doc4 = Automerge.load(save1) + let doc4 = Automerge.loadDoc(save1) assert.deepEqual(doc4.save(), save1); doc1.free() doc2.free() @@ -324,11 +324,11 @@ describe('Automerge', () => { }) it('local inc increments all visible counters in a sequence', () => { - let doc1 = Automerge.init("aaaa") + let doc1 = Automerge.create("aaaa") let seq = doc1.set("_root", "seq", LIST) doc1.insert(seq, 0, "hello") - let doc2 = Automerge.load(doc1.save(), "bbbb"); - let doc3 = Automerge.load(doc1.save(), "cccc"); + let doc2 = Automerge.loadDoc(doc1.save(), "bbbb"); + let doc3 = Automerge.loadDoc(doc1.save(), "cccc"); doc1.set(seq, 0, 20) doc2.set(seq, 0, 0, "counter") doc3.set(seq, 0, 10, "counter") @@ -348,7 +348,7 @@ describe('Automerge', () => { ]) let save = doc1.save() - let doc4 = Automerge.load(save) + let doc4 = Automerge.loadDoc(save) assert.deepEqual(doc4.save(), save); doc1.free() doc2.free() @@ -357,7 +357,7 @@ describe('Automerge', () => { }) it('only returns an object id when objects are created', () => { - let doc = Automerge.init("aaaa") + let doc = Automerge.create("aaaa") let r1 = doc.set("_root","foo","bar") let r2 = doc.set("_root","list",LIST) let r3 = doc.set("_root","counter",10, "counter") @@ -380,13 +380,13 @@ describe('Automerge', () => { }) it('objects without properties are preserved', () => { - let doc1 = Automerge.init("aaaa") + let doc1 = Automerge.create("aaaa") let a = doc1.set("_root","a",MAP); let b = doc1.set("_root","b",MAP); let c = doc1.set("_root","c",MAP); let d = doc1.set(c,"d","dd"); let saved = doc1.save(); - let doc2 = Automerge.load(saved); + let doc2 = Automerge.loadDoc(saved); assert.deepEqual(doc2.value("_root","a"),["map",a]) assert.deepEqual(doc2.keys(a),[]) assert.deepEqual(doc2.value("_root","b"),["map",b]) @@ -400,7 +400,7 @@ describe('Automerge', () => { }) describe('sync', () => { it('should send a sync message implying no local data', () => { - let doc = Automerge.init() + let doc = Automerge.create() let s1 = initSyncState() let m1 = doc.generateSyncMessage(s1) const message = decodeSyncMessage(m1) @@ -413,7 +413,7 @@ describe('Automerge', () => { }) it('should not reply if we have no data as well', () => { - let n1 = Automerge.init(), n2 = Automerge.init() + let n1 = Automerge.create(), n2 = Automerge.create() let s1 = initSyncState(), s2 = initSyncState() let m1 = n1.generateSyncMessage(s1) n2.receiveSyncMessage(s2, m1) @@ -422,7 +422,7 @@ describe('Automerge', () => { }) it('repos with equal heads do not need a reply message', () => { - let n1 = Automerge.init(), n2 = Automerge.init() + let n1 = Automerge.create(), n2 = Automerge.create() let s1 = initSyncState(), s2 = initSyncState() // make two nodes with the same changes @@ -446,7 +446,7 @@ describe('Automerge', () => { }) it('n1 should offer all changes to n2 when starting from nothing', () => { - let n1 = Automerge.init(), n2 = Automerge.init() + let n1 = Automerge.create(), n2 = Automerge.create() // make changes for n1 that n2 should request let list = n1.set("_root","n",LIST) @@ -462,7 +462,7 @@ describe('Automerge', () => { }) it('should sync peers where one has commits the other does not', () => { - let n1 = Automerge.init(), n2 = Automerge.init() + let n1 = Automerge.create(), n2 = Automerge.create() // make changes for n1 that n2 should request let list = n1.set("_root","n",LIST) @@ -479,7 +479,7 @@ describe('Automerge', () => { it('should work with prior sync state', () => { // create & synchronize two nodes - let n1 = Automerge.init(), n2 = Automerge.init() + let n1 = Automerge.create(), n2 = Automerge.create() let s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 5; i++) { @@ -502,7 +502,7 @@ describe('Automerge', () => { it('should not generate messages once synced', () => { // create & synchronize two nodes - let n1 = Automerge.init('abc123'), n2 = Automerge.init('def456') + let n1 = Automerge.create('abc123'), n2 = Automerge.create('def456') let s1 = initSyncState(), s2 = initSyncState() let message, patch @@ -546,7 +546,7 @@ describe('Automerge', () => { it('should allow simultaneous messages during synchronization', () => { // create & synchronize two nodes - let n1 = Automerge.init('abc123'), n2 = Automerge.init('def456') + let n1 = Automerge.create('abc123'), n2 = Automerge.create('def456') let s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 5; i++) { @@ -618,7 +618,7 @@ describe('Automerge', () => { }) it('should assume sent changes were recieved until we hear otherwise', () => { - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') let s1 = initSyncState(), s2 = initSyncState(), message = null let items = n1.set("_root", "items", LIST) @@ -645,7 +645,7 @@ describe('Automerge', () => { it('should work regardless of who initiates the exchange', () => { // create & synchronize two nodes - let n1 = Automerge.init(), n2 = Automerge.init() + let n1 = Automerge.create(), n2 = Automerge.create() let s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 5; i++) { @@ -673,7 +673,7 @@ describe('Automerge', () => { // lastSync is undefined. // create two peers both with divergent commits - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') let s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 10; i++) { @@ -706,7 +706,7 @@ describe('Automerge', () => { // lastSync is c9. // create two peers both with divergent commits - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') let s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 10; i++) { @@ -735,7 +735,7 @@ describe('Automerge', () => { }) it('should ensure non-empty state after sync', () => { - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') let s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 3; i++) { @@ -754,7 +754,7 @@ describe('Automerge', () => { // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 // n2 has changes {c0, c1, c2}, n1's lastSync is c5, and n2's lastSync is c2. // we want to successfully sync (n1) with (r), even though (n1) believes it's talking to (n2) - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') let s1 = initSyncState(), s2 = initSyncState() // n1 makes three changes, which we sync to n2 @@ -800,7 +800,7 @@ describe('Automerge', () => { }) it('should resync after one node experiences data loss without disconnecting', () => { - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') let s1 = initSyncState(), s2 = initSyncState() // n1 makes three changes, which we sync to n2 @@ -814,7 +814,7 @@ describe('Automerge', () => { assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) assert.deepStrictEqual(n1.toJS(), n2.toJS()) - let n2AfterDataLoss = Automerge.init('89abcdef') + let n2AfterDataLoss = Automerge.create('89abcdef') // "n2" now has no data, but n1 still thinks it does. Note we don't do // decodeSyncState(encodeSyncState(s1)) in order to simulate data loss without disconnecting @@ -824,7 +824,7 @@ describe('Automerge', () => { }) it('should handle changes concurrent to the last sync heads', () => { - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef'), n3 = Automerge.init('fedcba98') + let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef'), n3 = Automerge.create('fedcba98') let s12 = initSyncState(), s21 = initSyncState(), s23 = initSyncState(), s32 = initSyncState() // Change 1 is known to all three nodes @@ -857,7 +857,7 @@ describe('Automerge', () => { }) it('should handle histories with lots of branching and merging', () => { - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef'), n3 = Automerge.init('fedcba98') + let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef'), n3 = Automerge.create('fedcba98') n1.set("_root","x",0); n1.commit("",0) n2.applyChanges([n1.getLastLocalChange()]) n3.applyChanges([n1.getLastLocalChange()]) @@ -897,7 +897,7 @@ describe('Automerge', () => { // `-- n2 // where n2 is a false positive in the Bloom filter containing {n1}. // lastSync is c9. - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') let s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 10; i++) { @@ -933,8 +933,8 @@ describe('Automerge', () => { // `-- n2c1 <-- n2c2 // where n2c1 is a false positive in the Bloom filter containing {n1c1, n1c2}. // lastSync is c9. - n1 = Automerge.init('01234567') - n2 = Automerge.init('89abcdef') + n1 = Automerge.create('01234567') + n2 = Automerge.create('89abcdef') s1 = initSyncState() s2 = initSyncState() for (let i = 0; i < 10; i++) { @@ -1000,7 +1000,7 @@ describe('Automerge', () => { assert.strictEqual(decodeSyncMessage(m2).changes.length, 1) // only n2c2; change n2c1 is not sent // n3 is a node that doesn't have the missing change. Nevertheless n1 is going to ask n3 for it - let n3 = Automerge.init('fedcba98'), s13 = initSyncState(), s31 = initSyncState() + let n3 = Automerge.create('fedcba98'), s13 = initSyncState(), s31 = initSyncState() sync(n1, n3, s13, s31) assert.deepStrictEqual(n1.getHeads(), [n1hash2]) assert.deepStrictEqual(n3.getHeads(), [n1hash2]) @@ -1013,7 +1013,7 @@ describe('Automerge', () => { // `-- n2c1 <-- n2c2 <-- n2c3 // where n2c2 is a false positive in the Bloom filter containing {n1c1, n1c2, n1c3}. // lastSync is c4. - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') let s1 = initSyncState(), s2 = initSyncState() let n1hash3, n2hash3 @@ -1067,7 +1067,7 @@ describe('Automerge', () => { // `-- n2c1 <-- n2c2 <-- n2c3 // where n2c1 and n2c2 are both false positives in the Bloom filter containing {c5}. // lastSync is c4. - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') let s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 5; i++) { @@ -1107,7 +1107,7 @@ describe('Automerge', () => { // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ // `-- n2 // where n2 causes a false positive in the Bloom filter containing {n1}. - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') let s1 = initSyncState(), s2 = initSyncState() let message @@ -1163,7 +1163,7 @@ describe('Automerge', () => { // n1 has {c0, c1, c2, n1c1, n1c2, n1c3, n2c1, n2c2}; // n2 has {c0, c1, c2, n1c1, n1c2, n2c1, n2c2, n2c3}; // n3 has {c0, c1, c2, n3c1, n3c2, n3c3}. - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef'), n3 = Automerge.init('76543210') + let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef'), n3 = Automerge.create('76543210') let s13 = initSyncState(), s12 = initSyncState(), s21 = initSyncState() let s32 = initSyncState(), s31 = initSyncState(), s23 = initSyncState() let message1, message2, message3 @@ -1228,7 +1228,7 @@ describe('Automerge', () => { }) it('should allow any change to be requested', () => { - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') let s1 = initSyncState(), s2 = initSyncState() let message = null @@ -1254,7 +1254,7 @@ describe('Automerge', () => { }) it('should ignore requests for a nonexistent change', () => { - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') let s1 = initSyncState(), s2 = initSyncState() let message = null @@ -1274,7 +1274,7 @@ describe('Automerge', () => { // ,-- c1 <-- c2 // c0 <-+ // `-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef'), n3 = Automerge.init('76543210') + let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef'), n3 = Automerge.create('76543210') let s1 = initSyncState(), s2 = initSyncState() let msg, decodedMsg From 9ff0c60ccb3b821769d5c94e73af5ad9ec228b6b Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Fri, 28 Jan 2022 18:05:33 -0500 Subject: [PATCH 041/730] add cra example code --- automerge-wasm/Cargo.toml | 2 +- automerge-wasm/package.json | 1 + automerge-wasm/src/lib.rs | 17 +++++++++++ automerge/src/value.rs | 8 +++++ examples/cra/.gitignore | 2 ++ examples/cra/README.md | 22 ++++++++++++++ examples/cra/package.json | 44 ++++++++++++++++++++++++++++ examples/cra/public/favicon.ico | Bin 0 -> 3870 bytes examples/cra/public/index.html | 43 +++++++++++++++++++++++++++ examples/cra/public/logo192.png | Bin 0 -> 5347 bytes examples/cra/public/logo512.png | Bin 0 -> 9664 bytes examples/cra/public/manifest.json | 25 ++++++++++++++++ examples/cra/public/robots.txt | 3 ++ examples/cra/src/.App.tsx.swp | Bin 0 -> 12288 bytes examples/cra/src/App.css | 38 ++++++++++++++++++++++++ examples/cra/src/App.test.tsx | 9 ++++++ examples/cra/src/App.tsx | 43 +++++++++++++++++++++++++++ examples/cra/src/index.css | 13 ++++++++ examples/cra/src/index.tsx | 17 +++++++++++ examples/cra/src/logo.svg | 1 + examples/cra/src/react-app-env.d.ts | 1 + examples/cra/src/reportWebVitals.ts | 15 ++++++++++ examples/cra/src/setupTests.ts | 5 ++++ examples/cra/tsconfig.json | 26 ++++++++++++++++ 24 files changed, 334 insertions(+), 1 deletion(-) create mode 100644 examples/cra/.gitignore create mode 100644 examples/cra/README.md create mode 100644 examples/cra/package.json create mode 100644 examples/cra/public/favicon.ico create mode 100644 examples/cra/public/index.html create mode 100644 examples/cra/public/logo192.png create mode 100644 examples/cra/public/logo512.png create mode 100644 examples/cra/public/manifest.json create mode 100644 examples/cra/public/robots.txt create mode 100644 examples/cra/src/.App.tsx.swp create mode 100644 examples/cra/src/App.css create mode 100644 examples/cra/src/App.test.tsx create mode 100644 examples/cra/src/App.tsx create mode 100644 examples/cra/src/index.css create mode 100644 examples/cra/src/index.tsx create mode 100644 examples/cra/src/logo.svg create mode 100644 examples/cra/src/react-app-env.d.ts create mode 100644 examples/cra/src/reportWebVitals.ts create mode 100644 examples/cra/src/setupTests.ts create mode 100644 examples/cra/tsconfig.json diff --git a/automerge-wasm/Cargo.toml b/automerge-wasm/Cargo.toml index 995bfbb2..ba29a413 100644 --- a/automerge-wasm/Cargo.toml +++ b/automerge-wasm/Cargo.toml @@ -39,7 +39,7 @@ version = "^0.2" features = ["serde-serialize", "std"] [package.metadata.wasm-pack.profile.release] -wasm-opt = false +# wasm-opt = false [package.metadata.wasm-pack.profile.profiling] wasm-opt = false diff --git a/automerge-wasm/package.json b/automerge-wasm/package.json index e5cc5114..a67f957e 100644 --- a/automerge-wasm/package.json +++ b/automerge-wasm/package.json @@ -19,6 +19,7 @@ "scripts": { "build": "rimraf ./dev && wasm-pack build --target nodejs --dev --out-name index -d dev", "release": "rimraf ./dev && wasm-pack build --target nodejs --release --out-name index -d dev && yarn opt", + "pkg": "rimraf ./pkg && wasm-pack build --target web --release --out-name index -d pkg && cd pkg && yarn pack && mv automerge-wasm*tgz ..", "prof": "rimraf ./dev && wasm-pack build --target nodejs --profiling --out-name index -d dev", "opt": "wasm-opt -Oz dev/index_bg.wasm -o tmp.wasm && mv tmp.wasm dev/index_bg.wasm", "test": "yarn build && mocha --bail --full-trace" diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 46d06490..5ab2e5cb 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -183,6 +183,23 @@ impl Automerge { Ok(opid.map(|id| id.to_string())) } + pub fn make( + &mut self, + obj: String, + prop: JsValue, + value: JsValue, + ) -> Result { + let obj = self.import(obj)?; + let prop = self.import_prop(prop)?; + let value = self.import_value(value, None)?; + if value.is_object() { + let opid = self.0.set(&obj, prop, value).map_err(to_js_err)?; + Ok(opid.unwrap().to_string()) + } else { + Err("invalid object type".into()) + } + } + pub fn inc(&mut self, obj: String, prop: JsValue, value: JsValue) -> Result<(), JsValue> { let obj = self.import(obj)?; let prop = self.import_prop(prop)?; diff --git a/automerge/src/value.rs b/automerge/src/value.rs index 5f54d825..eef757fd 100644 --- a/automerge/src/value.rs +++ b/automerge/src/value.rs @@ -62,6 +62,14 @@ impl Value { pub fn bytes(b: Vec) -> Value { Value::Scalar(ScalarValue::Bytes(b)) } + + pub fn is_object(&self) -> bool { + matches!(&self, Value::Object(_)) + } + + pub fn is_scalar(&self) -> bool { + matches!(&self, Value::Scalar(_)) + } } impl From<&str> for Value { diff --git a/examples/cra/.gitignore b/examples/cra/.gitignore new file mode 100644 index 00000000..d5f19d89 --- /dev/null +++ b/examples/cra/.gitignore @@ -0,0 +1,2 @@ +node_modules +package-lock.json diff --git a/examples/cra/README.md b/examples/cra/README.md new file mode 100644 index 00000000..0b1e4ca5 --- /dev/null +++ b/examples/cra/README.md @@ -0,0 +1,22 @@ +## Example CRA App using AutomergeWASM + +### Creating this example app + +```bash + $ cd automerge-wasm && yarn pkg # this builds the npm package + $ cd ../examples + $ npx create-react-app cra --template typescript + $ cd cra + $ npm install ../../automerge-wasm/automerge-wasm-v0.1.0.tgz +``` + +Then I just needed to add the import "automerge-wasm" and `{ useEffect, useState }` code to `./src/App.tsx` + +```bash + $ npm start +``` + +### Open Issues + +The example app currently doesn't do anything useful. Perhaps someone with some react experience and figure out the right way to wire everything up for an actual demo. + diff --git a/examples/cra/package.json b/examples/cra/package.json new file mode 100644 index 00000000..76d55ff8 --- /dev/null +++ b/examples/cra/package.json @@ -0,0 +1,44 @@ +{ + "name": "cra", + "version": "0.1.0", + "private": true, + "dependencies": { + "@testing-library/jest-dom": "^5.16.1", + "@testing-library/react": "^12.1.2", + "@testing-library/user-event": "^13.5.0", + "@types/jest": "^27.4.0", + "@types/node": "^16.11.21", + "@types/react": "^17.0.38", + "@types/react-dom": "^17.0.11", + "automerge-wasm": "file:../automerge-wasm-v0.1.0.tgz", + "react": "^17.0.2", + "react-dom": "^17.0.2", + "react-scripts": "5.0.0", + "typescript": "^4.5.5", + "web-vitals": "^2.1.4" + }, + "scripts": { + "start": "react-scripts start", + "build": "react-scripts build", + "test": "react-scripts test", + "eject": "react-scripts eject" + }, + "eslintConfig": { + "extends": [ + "react-app", + "react-app/jest" + ] + }, + "browserslist": { + "production": [ + ">0.2%", + "not dead", + "not op_mini all" + ], + "development": [ + "last 1 chrome version", + "last 1 firefox version", + "last 1 safari version" + ] + } +} diff --git a/examples/cra/public/favicon.ico b/examples/cra/public/favicon.ico new file mode 100644 index 0000000000000000000000000000000000000000..a11777cc471a4344702741ab1c8a588998b1311a GIT binary patch literal 3870 zcma);c{J4h9>;%nil|2-o+rCuEF-(I%-F}ijC~o(k~HKAkr0)!FCj~d>`RtpD?8b; zXOC1OD!V*IsqUwzbMF1)-gEDD=A573Z-&G7^LoAC9|WO7Xc0Cx1g^Zu0u_SjAPB3vGa^W|sj)80f#V0@M_CAZTIO(t--xg= z!sii`1giyH7EKL_+Wi0ab<)&E_0KD!3Rp2^HNB*K2@PHCs4PWSA32*-^7d{9nH2_E zmC{C*N*)(vEF1_aMamw2A{ZH5aIDqiabnFdJ|y0%aS|64E$`s2ccV~3lR!u<){eS` z#^Mx6o(iP1Ix%4dv`t@!&Za-K@mTm#vadc{0aWDV*_%EiGK7qMC_(`exc>-$Gb9~W!w_^{*pYRm~G zBN{nA;cm^w$VWg1O^^<6vY`1XCD|s_zv*g*5&V#wv&s#h$xlUilPe4U@I&UXZbL z0)%9Uj&@yd03n;!7do+bfixH^FeZ-Ema}s;DQX2gY+7g0s(9;`8GyvPY1*vxiF&|w z>!vA~GA<~JUqH}d;DfBSi^IT*#lrzXl$fNpq0_T1tA+`A$1?(gLb?e#0>UELvljtQ zK+*74m0jn&)5yk8mLBv;=@}c{t0ztT<v;Avck$S6D`Z)^c0(jiwKhQsn|LDRY&w(Fmi91I7H6S;b0XM{e zXp0~(T@k_r-!jkLwd1_Vre^v$G4|kh4}=Gi?$AaJ)3I+^m|Zyj#*?Kp@w(lQdJZf4 z#|IJW5z+S^e9@(6hW6N~{pj8|NO*>1)E=%?nNUAkmv~OY&ZV;m-%?pQ_11)hAr0oAwILrlsGawpxx4D43J&K=n+p3WLnlDsQ$b(9+4 z?mO^hmV^F8MV{4Lx>(Q=aHhQ1){0d*(e&s%G=i5rq3;t{JC zmgbn5Nkl)t@fPH$v;af26lyhH!k+#}_&aBK4baYPbZy$5aFx4}ka&qxl z$=Rh$W;U)>-=S-0=?7FH9dUAd2(q#4TCAHky!$^~;Dz^j|8_wuKc*YzfdAht@Q&ror?91Dm!N03=4=O!a)I*0q~p0g$Fm$pmr$ zb;wD;STDIi$@M%y1>p&_>%?UP($15gou_ue1u0!4(%81;qcIW8NyxFEvXpiJ|H4wz z*mFT(qVx1FKufG11hByuX%lPk4t#WZ{>8ka2efjY`~;AL6vWyQKpJun2nRiZYDij$ zP>4jQXPaP$UC$yIVgGa)jDV;F0l^n(V=HMRB5)20V7&r$jmk{UUIe zVjKroK}JAbD>B`2cwNQ&GDLx8{pg`7hbA~grk|W6LgiZ`8y`{Iq0i>t!3p2}MS6S+ zO_ruKyAElt)rdS>CtF7j{&6rP-#c=7evGMt7B6`7HG|-(WL`bDUAjyn+k$mx$CH;q2Dz4x;cPP$hW=`pFfLO)!jaCL@V2+F)So3}vg|%O*^T1j>C2lx zsURO-zIJC$^$g2byVbRIo^w>UxK}74^TqUiRR#7s_X$e)$6iYG1(PcW7un-va-S&u zHk9-6Zn&>T==A)lM^D~bk{&rFzCi35>UR!ZjQkdSiNX*-;l4z9j*7|q`TBl~Au`5& z+c)*8?#-tgUR$Zd%Q3bs96w6k7q@#tUn`5rj+r@_sAVVLqco|6O{ILX&U-&-cbVa3 zY?ngHR@%l{;`ri%H*0EhBWrGjv!LE4db?HEWb5mu*t@{kv|XwK8?npOshmzf=vZA@ zVSN9sL~!sn?r(AK)Q7Jk2(|M67Uy3I{eRy z_l&Y@A>;vjkWN5I2xvFFTLX0i+`{qz7C_@bo`ZUzDugfq4+>a3?1v%)O+YTd6@Ul7 zAfLfm=nhZ`)P~&v90$&UcF+yXm9sq!qCx3^9gzIcO|Y(js^Fj)Rvq>nQAHI92ap=P z10A4@prk+AGWCb`2)dQYFuR$|H6iDE8p}9a?#nV2}LBCoCf(Xi2@szia7#gY>b|l!-U`c}@ zLdhvQjc!BdLJvYvzzzngnw51yRYCqh4}$oRCy-z|v3Hc*d|?^Wj=l~18*E~*cR_kU z{XsxM1i{V*4GujHQ3DBpl2w4FgFR48Nma@HPgnyKoIEY-MqmMeY=I<%oG~l!f<+FN z1ZY^;10j4M4#HYXP zw5eJpA_y(>uLQ~OucgxDLuf}fVs272FaMxhn4xnDGIyLXnw>Xsd^J8XhcWIwIoQ9} z%FoSJTAGW(SRGwJwb=@pY7r$uQRK3Zd~XbxU)ts!4XsJrCycrWSI?e!IqwqIR8+Jh zlRjZ`UO1I!BtJR_2~7AbkbSm%XQqxEPkz6BTGWx8e}nQ=w7bZ|eVP4?*Tb!$(R)iC z9)&%bS*u(lXqzitAN)Oo=&Ytn>%Hzjc<5liuPi>zC_nw;Z0AE3Y$Jao_Q90R-gl~5 z_xAb2J%eArrC1CN4G$}-zVvCqF1;H;abAu6G*+PDHSYFx@Tdbfox*uEd3}BUyYY-l zTfEsOqsi#f9^FoLO;ChK<554qkri&Av~SIM*{fEYRE?vH7pTAOmu2pz3X?Wn*!ROX ztd54huAk&mFBemMooL33RV-*1f0Q3_(7hl$<#*|WF9P!;r;4_+X~k~uKEqdzZ$5Al zV63XN@)j$FN#cCD;ek1R#l zv%pGrhB~KWgoCj%GT?%{@@o(AJGt*PG#l3i>lhmb_twKH^EYvacVY-6bsCl5*^~L0 zonm@lk2UvvTKr2RS%}T>^~EYqdL1q4nD%0n&Xqr^cK^`J5W;lRRB^R-O8b&HENO||mo0xaD+S=I8RTlIfVgqN@SXDr2&-)we--K7w= zJVU8?Z+7k9dy;s;^gDkQa`0nz6N{T?(A&Iz)2!DEecLyRa&FI!id#5Z7B*O2=PsR0 zEvc|8{NS^)!d)MDX(97Xw}m&kEO@5jqRaDZ!+%`wYOI<23q|&js`&o4xvjP7D_xv@ z5hEwpsp{HezI9!~6O{~)lLR@oF7?J7i>1|5a~UuoN=q&6N}EJPV_GD`&M*v8Y`^2j zKII*d_@Fi$+i*YEW+Hbzn{iQk~yP z>7N{S4)r*!NwQ`(qcN#8SRQsNK6>{)X12nbF`*7#ecO7I)Q$uZsV+xS4E7aUn+U(K baj7?x%VD!5Cxk2YbYLNVeiXvvpMCWYo=by@ literal 0 HcmV?d00001 diff --git a/examples/cra/public/index.html b/examples/cra/public/index.html new file mode 100644 index 00000000..aa069f27 --- /dev/null +++ b/examples/cra/public/index.html @@ -0,0 +1,43 @@ + + + + + + + + + + + + + React App + + + +
+ + + diff --git a/examples/cra/public/logo192.png b/examples/cra/public/logo192.png new file mode 100644 index 0000000000000000000000000000000000000000..fc44b0a3796c0e0a64c3d858ca038bd4570465d9 GIT binary patch literal 5347 zcmZWtbyO6NvR-oO24RV%BvuJ&=?+<7=`LvyB&A_#M7mSDYw1v6DJkiYl9XjT!%$dLEBTQ8R9|wd3008in6lFF3GV-6mLi?MoP_y~}QUnaDCHI#t z7w^m$@6DI)|C8_jrT?q=f8D?0AM?L)Z}xAo^e^W>t$*Y0KlT5=@bBjT9kxb%-KNdk zeOS1tKO#ChhG7%{ApNBzE2ZVNcxbrin#E1TiAw#BlUhXllzhN$qWez5l;h+t^q#Eav8PhR2|T}y5kkflaK`ba-eoE+Z2q@o6P$)=&` z+(8}+-McnNO>e#$Rr{32ngsZIAX>GH??tqgwUuUz6kjns|LjsB37zUEWd|(&O!)DY zQLrq%Y>)Y8G`yYbYCx&aVHi@-vZ3|ebG!f$sTQqMgi0hWRJ^Wc+Ibv!udh_r%2|U) zPi|E^PK?UE!>_4`f`1k4hqqj_$+d!EB_#IYt;f9)fBOumGNyglU(ofY`yHq4Y?B%- zp&G!MRY<~ajTgIHErMe(Z8JG*;D-PJhd@RX@QatggM7+G(Lz8eZ;73)72Hfx5KDOE zkT(m}i2;@X2AT5fW?qVp?@WgN$aT+f_6eo?IsLh;jscNRp|8H}Z9p_UBO^SJXpZew zEK8fz|0Th%(Wr|KZBGTM4yxkA5CFdAj8=QSrT$fKW#tweUFqr0TZ9D~a5lF{)%-tTGMK^2tz(y2v$i%V8XAxIywrZCp=)83p(zIk6@S5AWl|Oa2hF`~~^W zI;KeOSkw1O#TiQ8;U7OPXjZM|KrnN}9arP)m0v$c|L)lF`j_rpG(zW1Qjv$=^|p*f z>)Na{D&>n`jOWMwB^TM}slgTEcjxTlUby89j1)|6ydRfWERn3|7Zd2&e7?!K&5G$x z`5U3uFtn4~SZq|LjFVrz$3iln-+ucY4q$BC{CSm7Xe5c1J<=%Oagztj{ifpaZk_bQ z9Sb-LaQMKp-qJA*bP6DzgE3`}*i1o3GKmo2pn@dj0;He}F=BgINo};6gQF8!n0ULZ zL>kC0nPSFzlcB7p41doao2F7%6IUTi_+!L`MM4o*#Y#0v~WiO8uSeAUNp=vA2KaR&=jNR2iVwG>7t%sG2x_~yXzY)7K& zk3p+O0AFZ1eu^T3s};B%6TpJ6h-Y%B^*zT&SN7C=N;g|#dGIVMSOru3iv^SvO>h4M=t-N1GSLLDqVTcgurco6)3&XpU!FP6Hlrmj}f$ zp95;b)>M~`kxuZF3r~a!rMf4|&1=uMG$;h^g=Kl;H&Np-(pFT9FF@++MMEx3RBsK?AU0fPk-#mdR)Wdkj)`>ZMl#^<80kM87VvsI3r_c@_vX=fdQ`_9-d(xiI z4K;1y1TiPj_RPh*SpDI7U~^QQ?%0&!$Sh#?x_@;ag)P}ZkAik{_WPB4rHyW#%>|Gs zdbhyt=qQPA7`?h2_8T;-E6HI#im9K>au*(j4;kzwMSLgo6u*}-K`$_Gzgu&XE)udQ zmQ72^eZd|vzI)~!20JV-v-T|<4@7ruqrj|o4=JJPlybwMg;M$Ud7>h6g()CT@wXm` zbq=A(t;RJ^{Xxi*Ff~!|3!-l_PS{AyNAU~t{h;(N(PXMEf^R(B+ZVX3 z8y0;0A8hJYp@g+c*`>eTA|3Tgv9U8#BDTO9@a@gVMDxr(fVaEqL1tl?md{v^j8aUv zm&%PX4^|rX|?E4^CkplWWNv*OKM>DxPa z!RJ)U^0-WJMi)Ksc!^ixOtw^egoAZZ2Cg;X7(5xZG7yL_;UJ#yp*ZD-;I^Z9qkP`} zwCTs0*%rIVF1sgLervtnUo&brwz?6?PXRuOCS*JI-WL6GKy7-~yi0giTEMmDs_-UX zo=+nFrW_EfTg>oY72_4Z0*uG>MnXP=c0VpT&*|rvv1iStW;*^={rP1y?Hv+6R6bxFMkxpWkJ>m7Ba{>zc_q zEefC3jsXdyS5??Mz7IET$Kft|EMNJIv7Ny8ZOcKnzf`K5Cd)&`-fTY#W&jnV0l2vt z?Gqhic}l}mCv1yUEy$%DP}4AN;36$=7aNI^*AzV(eYGeJ(Px-j<^gSDp5dBAv2#?; zcMXv#aj>%;MiG^q^$0MSg-(uTl!xm49dH!{X0){Ew7ThWV~Gtj7h%ZD zVN-R-^7Cf0VH!8O)uUHPL2mO2tmE*cecwQv_5CzWeh)ykX8r5Hi`ehYo)d{Jnh&3p z9ndXT$OW51#H5cFKa76c<%nNkP~FU93b5h-|Cb}ScHs@4Q#|}byWg;KDMJ#|l zE=MKD*F@HDBcX@~QJH%56eh~jfPO-uKm}~t7VkHxHT;)4sd+?Wc4* z>CyR*{w@4(gnYRdFq=^(#-ytb^5ESD?x<0Skhb%Pt?npNW1m+Nv`tr9+qN<3H1f<% zZvNEqyK5FgPsQ`QIu9P0x_}wJR~^CotL|n zk?dn;tLRw9jJTur4uWoX6iMm914f0AJfB@C74a;_qRrAP4E7l890P&{v<}>_&GLrW z)klculcg`?zJO~4;BBAa=POU%aN|pmZJn2{hA!d!*lwO%YSIzv8bTJ}=nhC^n}g(ld^rn#kq9Z3)z`k9lvV>y#!F4e{5c$tnr9M{V)0m(Z< z#88vX6-AW7T2UUwW`g<;8I$Jb!R%z@rCcGT)-2k7&x9kZZT66}Ztid~6t0jKb&9mm zpa}LCb`bz`{MzpZR#E*QuBiZXI#<`5qxx=&LMr-UUf~@dRk}YI2hbMsAMWOmDzYtm zjof16D=mc`^B$+_bCG$$@R0t;e?~UkF?7<(vkb70*EQB1rfUWXh$j)R2)+dNAH5%R zEBs^?N;UMdy}V};59Gu#0$q53$}|+q7CIGg_w_WlvE}AdqoS<7DY1LWS9?TrfmcvT zaypmplwn=P4;a8-%l^e?f`OpGb}%(_mFsL&GywhyN(-VROj`4~V~9bGv%UhcA|YW% zs{;nh@aDX11y^HOFXB$a7#Sr3cEtNd4eLm@Y#fc&j)TGvbbMwze zXtekX_wJqxe4NhuW$r}cNy|L{V=t#$%SuWEW)YZTH|!iT79k#?632OFse{+BT_gau zJwQcbH{b}dzKO?^dV&3nTILYlGw{27UJ72ZN){BILd_HV_s$WfI2DC<9LIHFmtyw? zQ;?MuK7g%Ym+4e^W#5}WDLpko%jPOC=aN)3!=8)s#Rnercak&b3ESRX3z{xfKBF8L z5%CGkFmGO@x?_mPGlpEej!3!AMddChabyf~nJNZxx!D&{@xEb!TDyvqSj%Y5@A{}9 zRzoBn0?x}=krh{ok3Nn%e)#~uh;6jpezhA)ySb^b#E>73e*frBFu6IZ^D7Ii&rsiU z%jzygxT-n*joJpY4o&8UXr2s%j^Q{?e-voloX`4DQyEK+DmrZh8A$)iWL#NO9+Y@!sO2f@rI!@jN@>HOA< z?q2l{^%mY*PNx2FoX+A7X3N}(RV$B`g&N=e0uvAvEN1W^{*W?zT1i#fxuw10%~))J zjx#gxoVlXREWZf4hRkgdHx5V_S*;p-y%JtGgQ4}lnA~MBz-AFdxUxU1RIT$`sal|X zPB6sEVRjGbXIP0U+?rT|y5+ev&OMX*5C$n2SBPZr`jqzrmpVrNciR0e*Wm?fK6DY& zl(XQZ60yWXV-|Ps!A{EF;=_z(YAF=T(-MkJXUoX zI{UMQDAV2}Ya?EisdEW;@pE6dt;j0fg5oT2dxCi{wqWJ<)|SR6fxX~5CzblPGr8cb zUBVJ2CQd~3L?7yfTpLNbt)He1D>*KXI^GK%<`bq^cUq$Q@uJifG>p3LU(!H=C)aEL zenk7pVg}0{dKU}&l)Y2Y2eFMdS(JS0}oZUuVaf2+K*YFNGHB`^YGcIpnBlMhO7d4@vV zv(@N}(k#REdul8~fP+^F@ky*wt@~&|(&&meNO>rKDEnB{ykAZ}k>e@lad7to>Ao$B zz<1(L=#J*u4_LB=8w+*{KFK^u00NAmeNN7pr+Pf+N*Zl^dO{LM-hMHyP6N!~`24jd zXYP|Ze;dRXKdF2iJG$U{k=S86l@pytLx}$JFFs8e)*Vi?aVBtGJ3JZUj!~c{(rw5>vuRF$`^p!P8w1B=O!skwkO5yd4_XuG^QVF z`-r5K7(IPSiKQ2|U9+`@Js!g6sfJwAHVd|s?|mnC*q zp|B|z)(8+mxXyxQ{8Pg3F4|tdpgZZSoU4P&9I8)nHo1@)9_9u&NcT^FI)6|hsAZFk zZ+arl&@*>RXBf-OZxhZerOr&dN5LW9@gV=oGFbK*J+m#R-|e6(Loz(;g@T^*oO)0R zN`N=X46b{7yk5FZGr#5&n1!-@j@g02g|X>MOpF3#IjZ_4wg{dX+G9eqS+Es9@6nC7 zD9$NuVJI}6ZlwtUm5cCAiYv0(Yi{%eH+}t)!E^>^KxB5^L~a`4%1~5q6h>d;paC9c zTj0wTCKrhWf+F#5>EgX`sl%POl?oyCq0(w0xoL?L%)|Q7d|Hl92rUYAU#lc**I&^6p=4lNQPa0 znQ|A~i0ip@`B=FW-Q;zh?-wF;Wl5!+q3GXDu-x&}$gUO)NoO7^$BeEIrd~1Dh{Tr` z8s<(Bn@gZ(mkIGnmYh_ehXnq78QL$pNDi)|QcT*|GtS%nz1uKE+E{7jdEBp%h0}%r zD2|KmYGiPa4;md-t_m5YDz#c*oV_FqXd85d@eub?9N61QuYcb3CnVWpM(D-^|CmkL z(F}L&N7qhL2PCq)fRh}XO@U`Yn<?TNGR4L(mF7#4u29{i~@k;pLsgl({YW5`Mo+p=zZn3L*4{JU;++dG9 X@eDJUQo;Ye2mwlRs?y0|+_a0zY+Zo%Dkae}+MySoIppb75o?vUW_?)>@g{U2`ERQIXV zeY$JrWnMZ$QC<=ii4X|@0H8`si75jB(ElJb00HAB%>SlLR{!zO|C9P3zxw_U8?1d8uRZ=({Ga4shyN}3 zAK}WA(ds|``G4jA)9}Bt2Hy0+f3rV1E6b|@?hpGA=PI&r8)ah|)I2s(P5Ic*Ndhn^ z*T&j@gbCTv7+8rpYbR^Ty}1AY)YH;p!m948r#%7x^Z@_-w{pDl|1S4`EM3n_PaXvK z1JF)E3qy$qTj5Xs{jU9k=y%SQ0>8E$;x?p9ayU0bZZeo{5Z@&FKX>}s!0+^>C^D#z z>xsCPvxD3Z=dP}TTOSJhNTPyVt14VCQ9MQFN`rn!c&_p?&4<5_PGm4a;WS&1(!qKE z_H$;dDdiPQ!F_gsN`2>`X}$I=B;={R8%L~`>RyKcS$72ai$!2>d(YkciA^J0@X%G4 z4cu!%Ps~2JuJ8ex`&;Fa0NQOq_nDZ&X;^A=oc1&f#3P1(!5il>6?uK4QpEG8z0Rhu zvBJ+A9RV?z%v?!$=(vcH?*;vRs*+PPbOQ3cdPr5=tOcLqmfx@#hOqX0iN)wTTO21jH<>jpmwRIAGw7`a|sl?9y9zRBh>(_%| zF?h|P7}~RKj?HR+q|4U`CjRmV-$mLW>MScKnNXiv{vD3&2@*u)-6P@h0A`eeZ7}71 zK(w%@R<4lLt`O7fs1E)$5iGb~fPfJ?WxhY7c3Q>T-w#wT&zW522pH-B%r5v#5y^CF zcC30Se|`D2mY$hAlIULL%-PNXgbbpRHgn<&X3N9W!@BUk@9g*P5mz-YnZBb*-$zMM z7Qq}ic0mR8n{^L|=+diODdV}Q!gwr?y+2m=3HWwMq4z)DqYVg0J~^}-%7rMR@S1;9 z7GFj6K}i32X;3*$SmzB&HW{PJ55kT+EI#SsZf}bD7nW^Haf}_gXciYKX{QBxIPSx2Ma? zHQqgzZq!_{&zg{yxqv3xq8YV+`S}F6A>Gtl39_m;K4dA{pP$BW0oIXJ>jEQ!2V3A2 zdpoTxG&V=(?^q?ZTj2ZUpDUdMb)T?E$}CI>r@}PFPWD9@*%V6;4Ag>D#h>!s)=$0R zRXvdkZ%|c}ubej`jl?cS$onl9Tw52rBKT)kgyw~Xy%z62Lr%V6Y=f?2)J|bZJ5(Wx zmji`O;_B+*X@qe-#~`HFP<{8$w@z4@&`q^Q-Zk8JG3>WalhnW1cvnoVw>*R@c&|o8 zZ%w!{Z+MHeZ*OE4v*otkZqz11*s!#s^Gq>+o`8Z5 z^i-qzJLJh9!W-;SmFkR8HEZJWiXk$40i6)7 zZpr=k2lp}SasbM*Nbn3j$sn0;rUI;%EDbi7T1ZI4qL6PNNM2Y%6{LMIKW+FY_yF3) zSKQ2QSujzNMSL2r&bYs`|i2Dnn z=>}c0>a}>|uT!IiMOA~pVT~R@bGlm}Edf}Kq0?*Af6#mW9f9!}RjW7om0c9Qlp;yK z)=XQs(|6GCadQbWIhYF=rf{Y)sj%^Id-ARO0=O^Ad;Ph+ z0?$eE1xhH?{T$QI>0JP75`r)U_$#%K1^BQ8z#uciKf(C701&RyLQWBUp*Q7eyn76} z6JHpC9}R$J#(R0cDCkXoFSp;j6{x{b&0yE@P7{;pCEpKjS(+1RQy38`=&Yxo%F=3y zCPeefABp34U-s?WmU#JJw23dcC{sPPFc2#J$ZgEN%zod}J~8dLm*fx9f6SpO zn^Ww3bt9-r0XaT2a@Wpw;C23XM}7_14#%QpubrIw5aZtP+CqIFmsG4`Cm6rfxl9n5 z7=r2C-+lM2AB9X0T_`?EW&Byv&K?HS4QLoylJ|OAF z`8atBNTzJ&AQ!>sOo$?^0xj~D(;kS$`9zbEGd>f6r`NC3X`tX)sWgWUUOQ7w=$TO&*j;=u%25ay-%>3@81tGe^_z*C7pb9y*Ed^H3t$BIKH2o+olp#$q;)_ zfpjCb_^VFg5fU~K)nf*d*r@BCC>UZ!0&b?AGk_jTPXaSnCuW110wjHPPe^9R^;jo3 zwvzTl)C`Zl5}O2}3lec=hZ*$JnkW#7enKKc)(pM${_$9Hc=Sr_A9Biwe*Y=T?~1CK z6eZ9uPICjy-sMGbZl$yQmpB&`ouS8v{58__t0$JP%i3R&%QR3ianbZqDs<2#5FdN@n5bCn^ZtH992~5k(eA|8|@G9u`wdn7bnpg|@{m z^d6Y`*$Zf2Xr&|g%sai#5}Syvv(>Jnx&EM7-|Jr7!M~zdAyjt*xl;OLhvW-a%H1m0 z*x5*nb=R5u><7lyVpNAR?q@1U59 zO+)QWwL8t zyip?u_nI+K$uh{y)~}qj?(w0&=SE^8`_WMM zTybjG=999h38Yes7}-4*LJ7H)UE8{mE(6;8voE+TYY%33A>S6`G_95^5QHNTo_;Ao ztIQIZ_}49%{8|=O;isBZ?=7kfdF8_@azfoTd+hEJKWE!)$)N%HIe2cplaK`ry#=pV z0q{9w-`i0h@!R8K3GC{ivt{70IWG`EP|(1g7i_Q<>aEAT{5(yD z=!O?kq61VegV+st@XCw475j6vS)_z@efuqQgHQR1T4;|-#OLZNQJPV4k$AX1Uk8Lm z{N*b*ia=I+MB}kWpupJ~>!C@xEN#Wa7V+7{m4j8c?)ChV=D?o~sjT?0C_AQ7B-vxqX30s0I_`2$in86#`mAsT-w?j{&AL@B3$;P z31G4(lV|b}uSDCIrjk+M1R!X7s4Aabn<)zpgT}#gE|mIvV38^ODy@<&yflpCwS#fRf9ZX3lPV_?8@C5)A;T zqmouFLFk;qIs4rA=hh=GL~sCFsXHsqO6_y~*AFt939UYVBSx1s(=Kb&5;j7cSowdE;7()CC2|-i9Zz+_BIw8#ll~-tyH?F3{%`QCsYa*b#s*9iCc`1P1oC26?`g<9))EJ3%xz+O!B3 zZ7$j~To)C@PquR>a1+Dh>-a%IvH_Y7^ys|4o?E%3`I&ADXfC8++hAdZfzIT#%C+Jz z1lU~K_vAm0m8Qk}K$F>|>RPK%<1SI0(G+8q~H zAsjezyP+u!Se4q3GW)`h`NPSRlMoBjCzNPesWJwVTY!o@G8=(6I%4XHGaSiS3MEBK zhgGFv6Jc>L$4jVE!I?TQuwvz_%CyO!bLh94nqK11C2W$*aa2ueGopG8DnBICVUORP zgytv#)49fVXDaR$SukloYC3u7#5H)}1K21=?DKj^U)8G;MS)&Op)g^zR2($<>C*zW z;X7`hLxiIO#J`ANdyAOJle4V%ppa*(+0i3w;8i*BA_;u8gOO6)MY`ueq7stBMJTB; z-a0R>hT*}>z|Gg}@^zDL1MrH+2hsR8 zHc}*9IvuQC^Ju)^#Y{fOr(96rQNPNhxc;mH@W*m206>Lo<*SaaH?~8zg&f&%YiOEG zGiz?*CP>Bci}!WiS=zj#K5I}>DtpregpP_tfZtPa(N<%vo^#WCQ5BTv0vr%Z{)0q+ z)RbfHktUm|lg&U3YM%lMUM(fu}i#kjX9h>GYctkx9Mt_8{@s%!K_EI zScgwy6%_fR?CGJQtmgNAj^h9B#zmaMDWgH55pGuY1Gv7D z;8Psm(vEPiwn#MgJYu4Ty9D|h!?Rj0ddE|&L3S{IP%H4^N!m`60ZwZw^;eg4sk6K{ ziA^`Sbl_4~f&Oo%n;8Ye(tiAdlZKI!Z=|j$5hS|D$bDJ}p{gh$KN&JZYLUjv4h{NY zBJ>X9z!xfDGY z+oh_Z&_e#Q(-}>ssZfm=j$D&4W4FNy&-kAO1~#3Im;F)Nwe{(*75(p=P^VI?X0GFakfh+X-px4a%Uw@fSbmp9hM1_~R>?Z8+ ziy|e9>8V*`OP}4x5JjdWp}7eX;lVxp5qS}0YZek;SNmm7tEeSF*-dI)6U-A%m6YvCgM(}_=k#a6o^%-K4{`B1+}O4x zztDT%hVb;v#?j`lTvlFQ3aV#zkX=7;YFLS$uIzb0E3lozs5`Xy zi~vF+%{z9uLjKvKPhP%x5f~7-Gj+%5N`%^=yk*Qn{`> z;xj&ROY6g`iy2a@{O)V(jk&8#hHACVDXey5a+KDod_Z&}kHM}xt7}Md@pil{2x7E~ zL$k^d2@Ec2XskjrN+IILw;#7((abu;OJii&v3?60x>d_Ma(onIPtcVnX@ELF0aL?T zSmWiL3(dOFkt!x=1O!_0n(cAzZW+3nHJ{2S>tgSK?~cFha^y(l@-Mr2W$%MN{#af8J;V*>hdq!gx=d0h$T7l}>91Wh07)9CTX zh2_ZdQCyFOQ)l(}gft0UZG`Sh2`x-w`5vC2UD}lZs*5 zG76$akzn}Xi))L3oGJ75#pcN=cX3!=57$Ha=hQ2^lwdyU#a}4JJOz6ddR%zae%#4& za)bFj)z=YQela(F#Y|Q#dp}PJghITwXouVaMq$BM?K%cXn9^Y@g43$=O)F&ZlOUom zJiad#dea;-eywBA@e&D6Pdso1?2^(pXiN91?jvcaUyYoKUmvl5G9e$W!okWe*@a<^ z8cQQ6cNSf+UPDx%?_G4aIiybZHHagF{;IcD(dPO!#=u zWfqLcPc^+7Uu#l(Bpxft{*4lv#*u7X9AOzDO z1D9?^jIo}?%iz(_dwLa{ex#T}76ZfN_Z-hwpus9y+4xaUu9cX}&P{XrZVWE{1^0yw zO;YhLEW!pJcbCt3L8~a7>jsaN{V3>tz6_7`&pi%GxZ=V3?3K^U+*ryLSb)8^IblJ0 zSRLNDvIxt)S}g30?s_3NX>F?NKIGrG_zB9@Z>uSW3k2es_H2kU;Rnn%j5qP)!XHKE zPB2mHP~tLCg4K_vH$xv`HbRsJwbZMUV(t=ez;Ec(vyHH)FbfLg`c61I$W_uBB>i^r z&{_P;369-&>23R%qNIULe=1~T$(DA`ev*EWZ6j(B$(te}x1WvmIll21zvygkS%vwG zzkR6Z#RKA2!z!C%M!O>!=Gr0(J0FP=-MN=5t-Ir)of50y10W}j`GtRCsXBakrKtG& zazmITDJMA0C51&BnLY)SY9r)NVTMs);1<=oosS9g31l{4ztjD3#+2H7u_|66b|_*O z;Qk6nalpqdHOjx|K&vUS_6ITgGll;TdaN*ta=M_YtyC)I9Tmr~VaPrH2qb6sd~=AcIxV+%z{E&0@y=DPArw zdV7z(G1hBx7hd{>(cr43^WF%4Y@PXZ?wPpj{OQ#tvc$pABJbvPGvdR`cAtHn)cSEV zrpu}1tJwQ3y!mSmH*uz*x0o|CS<^w%&KJzsj~DU0cLQUxk5B!hWE>aBkjJle8z~;s z-!A=($+}Jq_BTK5^B!`R>!MulZN)F=iXXeUd0w5lUsE5VP*H*oCy(;?S$p*TVvTxwAeWFB$jHyb0593)$zqalVlDX=GcCN1gU0 zlgU)I$LcXZ8Oyc2TZYTPu@-;7<4YYB-``Qa;IDcvydIA$%kHhJKV^m*-zxcvU4viy&Kr5GVM{IT>WRywKQ9;>SEiQD*NqplK-KK4YR`p0@JW)n_{TU3bt0 zim%;(m1=#v2}zTps=?fU5w^(*y)xT%1vtQH&}50ZF!9YxW=&7*W($2kgKyz1mUgfs zfV<*XVVIFnohW=|j+@Kfo!#liQR^x>2yQdrG;2o8WZR+XzU_nG=Ed2rK?ntA;K5B{ z>M8+*A4!Jm^Bg}aW?R?6;@QG@uQ8&oJ{hFixcfEnJ4QH?A4>P=q29oDGW;L;= z9-a0;g%c`C+Ai!UmK$NC*4#;Jp<1=TioL=t^YM)<<%u#hnnfSS`nq63QKGO1L8RzX z@MFDqs1z ztYmxDl@LU)5acvHk)~Z`RW7=aJ_nGD!mOSYD>5Odjn@TK#LY{jf?+piB5AM-CAoT_ z?S-*q7}wyLJzK>N%eMPuFgN)Q_otKP;aqy=D5f!7<=n(lNkYRXVpkB{TAYLYg{|(jtRqYmg$xH zjmq?B(RE4 zQx^~Pt}gxC2~l=K$$-sYy_r$CO(d=+b3H1MB*y_5g6WLaWTXn+TKQ|hNY^>Mp6k*$ zwkovomhu776vQATqT4blf~g;TY(MWCrf^^yfWJvSAB$p5l;jm@o#=!lqw+Lqfq>X= z$6~kxfm7`3q4zUEB;u4qa#BdJxO!;xGm)wwuisj{0y2x{R(IGMrsIzDY9LW>m!Y`= z04sx3IjnYvL<4JqxQ8f7qYd0s2Ig%`ytYPEMKI)s(LD}D@EY>x`VFtqvnADNBdeao zC96X+MxnwKmjpg{U&gP3HE}1=s!lv&D{6(g_lzyF3A`7Jn*&d_kL<;dAFx!UZ>hB8 z5A*%LsAn;VLp>3${0>M?PSQ)9s3}|h2e?TG4_F{}{Cs>#3Q*t$(CUc}M)I}8cPF6% z=+h(Kh^8)}gj(0}#e7O^FQ6`~fd1#8#!}LMuo3A0bN`o}PYsm!Y}sdOz$+Tegc=qT z8x`PH$7lvnhJp{kHWb22l;@7B7|4yL4UOOVM0MP_>P%S1Lnid)+k9{+3D+JFa#Pyf zhVc#&df87APl4W9X)F3pGS>@etfl=_E5tBcVoOfrD4hmVeTY-cj((pkn%n@EgN{0f zwb_^Rk0I#iZuHK!l*lN`ceJn(sI{$Fq6nN& zE<-=0_2WN}m+*ivmIOxB@#~Q-cZ>l136w{#TIJe478`KE7@=a{>SzPHsKLzYAyBQO zAtuuF$-JSDy_S@6GW0MOE~R)b;+0f%_NMrW(+V#c_d&U8Z9+ec4=HmOHw?gdjF(Lu zzra83M_BoO-1b3;9`%&DHfuUY)6YDV21P$C!Rc?mv&{lx#f8oc6?0?x zK08{WP65?#>(vPfA-c=MCY|%*1_<3D4NX zeVTi-JGl2uP_2@0F{G({pxQOXt_d{g_CV6b?jNpfUG9;8yle-^4KHRvZs-_2siata zt+d_T@U$&t*xaD22(fH(W1r$Mo?3dc%Tncm=C6{V9y{v&VT#^1L04vDrLM9qBoZ4@ z6DBN#m57hX7$C(=#$Y5$bJmwA$T8jKD8+6A!-IJwA{WOfs%s}yxUw^?MRZjF$n_KN z6`_bGXcmE#5e4Ym)aQJ)xg3Pg0@k`iGuHe?f(5LtuzSq=nS^5z>vqU0EuZ&75V%Z{ zYyhRLN^)$c6Ds{f7*FBpE;n5iglx5PkHfWrj3`x^j^t z7ntuV`g!9Xg#^3!x)l*}IW=(Tz3>Y5l4uGaB&lz{GDjm2D5S$CExLT`I1#n^lBH7Y zDgpMag@`iETKAI=p<5E#LTkwzVR@=yY|uBVI1HG|8h+d;G-qfuj}-ZR6fN>EfCCW z9~wRQoAPEa#aO?3h?x{YvV*d+NtPkf&4V0k4|L=uj!U{L+oLa(z#&iuhJr3-PjO3R z5s?=nn_5^*^Rawr>>Nr@K(jwkB#JK-=+HqwfdO<+P5byeim)wvqGlP-P|~Nse8=XF zz`?RYB|D6SwS}C+YQv+;}k6$-%D(@+t14BL@vM z2q%q?f6D-A5s$_WY3{^G0F131bbh|g!}#BKw=HQ7mx;Dzg4Z*bTLQSfo{ed{4}NZW zfrRm^Ca$rlE{Ue~uYv>R9{3smwATcdM_6+yWIO z*ZRH~uXE@#p$XTbCt5j7j2=86e{9>HIB6xDzV+vAo&B?KUiMP|ttOElepnl%|DPqL b{|{}U^kRn2wo}j7|0ATu<;8xA7zX}7|B6mN literal 0 HcmV?d00001 diff --git a/examples/cra/public/manifest.json b/examples/cra/public/manifest.json new file mode 100644 index 00000000..080d6c77 --- /dev/null +++ b/examples/cra/public/manifest.json @@ -0,0 +1,25 @@ +{ + "short_name": "React App", + "name": "Create React App Sample", + "icons": [ + { + "src": "favicon.ico", + "sizes": "64x64 32x32 24x24 16x16", + "type": "image/x-icon" + }, + { + "src": "logo192.png", + "type": "image/png", + "sizes": "192x192" + }, + { + "src": "logo512.png", + "type": "image/png", + "sizes": "512x512" + } + ], + "start_url": ".", + "display": "standalone", + "theme_color": "#000000", + "background_color": "#ffffff" +} diff --git a/examples/cra/public/robots.txt b/examples/cra/public/robots.txt new file mode 100644 index 00000000..e9e57dc4 --- /dev/null +++ b/examples/cra/public/robots.txt @@ -0,0 +1,3 @@ +# https://www.robotstxt.org/robotstxt.html +User-agent: * +Disallow: diff --git a/examples/cra/src/.App.tsx.swp b/examples/cra/src/.App.tsx.swp new file mode 100644 index 0000000000000000000000000000000000000000..aa37a74570ef672a8deb03368eea52e2c89fb073 GIT binary patch literal 12288 zcmeI2L2nyH6vw9&35m9V;0$^!m9iFFZ<<~xv5lgVLs1hD4iP9S)pR|!w_WcnGh@ez z>mt4YClKO@kU$(bBXQ%-2jIkwI|uIYpS9O^Emcu3NR{3y|E%ZDdvD(R?X0CJb9eLb z<{rIO-x9c95~4r+N1p%nRq-VZ+N3)7_A@RcPfs&-h7+bAmI-4ybb6pnPhBxKR%1CR zGG*h$$k>`B-YJT@v*WX~u?G`i0#8QZlDM{Yb<@YZ5xh>Xy!_FVg|jykU;<2l2`~XB zzyz286JP>QH33(g6Q84lXXahJIlrGNfC(@GCcp%q zLIScQ#PRb&ymJA?Tb0mll$FhpZeaa+=VCmYw=C(JGE~&?Ce*CziH{mgNbW(R}?85T7S1#Uai!4pl%pTFz&4{i= z6xd9=e(-odxZx*oeMO%Q)8syN^_cDr2Zbd)!and5nLItg!o5WrUs1L1%B7VHgF8lR z7a&(TgAWJ0A0O1{!2=4aJ>cWuk2VtQ5U0XfU1^@WuvW)Xc^KBH*`f(B4XxVkc2(k5 zY>CdTqa(szkLK{`Hqybj?R>%@z?;(@=841EbN@ae-$X3-1KqmWK8R><=Yv^!G^MW5 z0}7Uh%8#WTRP_>(%Ka{DpXAA&vR3v~gtv9ww9M~Y@y=y6(Mt5(u#BA$H0VZ=I z5%~@Lu1>1h8-1BC7oCr)D8Jfc8kpzZsFx?=6KglNt0=#{XHl5-gSs8{meb`oL`0M2 Q1Be!UD_$Gr3yzro0FPvwxBvhE literal 0 HcmV?d00001 diff --git a/examples/cra/src/App.css b/examples/cra/src/App.css new file mode 100644 index 00000000..74b5e053 --- /dev/null +++ b/examples/cra/src/App.css @@ -0,0 +1,38 @@ +.App { + text-align: center; +} + +.App-logo { + height: 40vmin; + pointer-events: none; +} + +@media (prefers-reduced-motion: no-preference) { + .App-logo { + animation: App-logo-spin infinite 20s linear; + } +} + +.App-header { + background-color: #282c34; + min-height: 100vh; + display: flex; + flex-direction: column; + align-items: center; + justify-content: center; + font-size: calc(10px + 2vmin); + color: white; +} + +.App-link { + color: #61dafb; +} + +@keyframes App-logo-spin { + from { + transform: rotate(0deg); + } + to { + transform: rotate(360deg); + } +} diff --git a/examples/cra/src/App.test.tsx b/examples/cra/src/App.test.tsx new file mode 100644 index 00000000..2a68616d --- /dev/null +++ b/examples/cra/src/App.test.tsx @@ -0,0 +1,9 @@ +import React from 'react'; +import { render, screen } from '@testing-library/react'; +import App from './App'; + +test('renders learn react link', () => { + render(); + const linkElement = screen.getByText(/learn react/i); + expect(linkElement).toBeInTheDocument(); +}); diff --git a/examples/cra/src/App.tsx b/examples/cra/src/App.tsx new file mode 100644 index 00000000..f2d2172e --- /dev/null +++ b/examples/cra/src/App.tsx @@ -0,0 +1,43 @@ +import React, { useEffect, useState } from 'react'; +import logo from './logo.svg'; +import './App.css'; +import init from "automerge-wasm" +import { create, loadDoc, encodeChange, decodeChange, + initSyncState, encodeSyncState, decodeSyncState, + encodeSyncMessage, decodeSyncMessage, + LIST, MAP, TEXT } from "automerge-wasm" + +function App() { + const [ val, setVal ] = useState(""); + useEffect(() => { + init().then(() => { + let doc = create() + let edits = doc.set("_root", "edits", TEXT) || "" + doc.splice(edits, 0, 0, "the quick fox jumps over the lazy dog") + doc.splice(edits, 10, 3, "sloth") + let result = doc.text(edits) + setVal(JSON.stringify(result)) + }) + }, []) + return ( + + ); +} + +export default App; diff --git a/examples/cra/src/index.css b/examples/cra/src/index.css new file mode 100644 index 00000000..ec2585e8 --- /dev/null +++ b/examples/cra/src/index.css @@ -0,0 +1,13 @@ +body { + margin: 0; + font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', 'Roboto', 'Oxygen', + 'Ubuntu', 'Cantarell', 'Fira Sans', 'Droid Sans', 'Helvetica Neue', + sans-serif; + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; +} + +code { + font-family: source-code-pro, Menlo, Monaco, Consolas, 'Courier New', + monospace; +} diff --git a/examples/cra/src/index.tsx b/examples/cra/src/index.tsx new file mode 100644 index 00000000..ef2edf8e --- /dev/null +++ b/examples/cra/src/index.tsx @@ -0,0 +1,17 @@ +import React from 'react'; +import ReactDOM from 'react-dom'; +import './index.css'; +import App from './App'; +import reportWebVitals from './reportWebVitals'; + +ReactDOM.render( + + + , + document.getElementById('root') +); + +// If you want to start measuring performance in your app, pass a function +// to log results (for example: reportWebVitals(console.log)) +// or send to an analytics endpoint. Learn more: https://bit.ly/CRA-vitals +reportWebVitals(); diff --git a/examples/cra/src/logo.svg b/examples/cra/src/logo.svg new file mode 100644 index 00000000..9dfc1c05 --- /dev/null +++ b/examples/cra/src/logo.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/examples/cra/src/react-app-env.d.ts b/examples/cra/src/react-app-env.d.ts new file mode 100644 index 00000000..6431bc5f --- /dev/null +++ b/examples/cra/src/react-app-env.d.ts @@ -0,0 +1 @@ +/// diff --git a/examples/cra/src/reportWebVitals.ts b/examples/cra/src/reportWebVitals.ts new file mode 100644 index 00000000..49a2a16e --- /dev/null +++ b/examples/cra/src/reportWebVitals.ts @@ -0,0 +1,15 @@ +import { ReportHandler } from 'web-vitals'; + +const reportWebVitals = (onPerfEntry?: ReportHandler) => { + if (onPerfEntry && onPerfEntry instanceof Function) { + import('web-vitals').then(({ getCLS, getFID, getFCP, getLCP, getTTFB }) => { + getCLS(onPerfEntry); + getFID(onPerfEntry); + getFCP(onPerfEntry); + getLCP(onPerfEntry); + getTTFB(onPerfEntry); + }); + } +}; + +export default reportWebVitals; diff --git a/examples/cra/src/setupTests.ts b/examples/cra/src/setupTests.ts new file mode 100644 index 00000000..8f2609b7 --- /dev/null +++ b/examples/cra/src/setupTests.ts @@ -0,0 +1,5 @@ +// jest-dom adds custom jest matchers for asserting on DOM nodes. +// allows you to do things like: +// expect(element).toHaveTextContent(/react/i) +// learn more: https://github.com/testing-library/jest-dom +import '@testing-library/jest-dom'; diff --git a/examples/cra/tsconfig.json b/examples/cra/tsconfig.json new file mode 100644 index 00000000..a273b0cf --- /dev/null +++ b/examples/cra/tsconfig.json @@ -0,0 +1,26 @@ +{ + "compilerOptions": { + "target": "es5", + "lib": [ + "dom", + "dom.iterable", + "esnext" + ], + "allowJs": true, + "skipLibCheck": true, + "esModuleInterop": true, + "allowSyntheticDefaultImports": true, + "strict": true, + "forceConsistentCasingInFileNames": true, + "noFallthroughCasesInSwitch": true, + "module": "esnext", + "moduleResolution": "node", + "resolveJsonModule": true, + "isolatedModules": true, + "noEmit": true, + "jsx": "react-jsx" + }, + "include": [ + "src" + ] +} From 2990f33803d35a68b2aaac5fb79b3f88f27ae526 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Fri, 28 Jan 2022 18:07:08 -0500 Subject: [PATCH 042/730] remove tmp file --- examples/cra/src/.App.tsx.swp | Bin 12288 -> 0 bytes 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 examples/cra/src/.App.tsx.swp diff --git a/examples/cra/src/.App.tsx.swp b/examples/cra/src/.App.tsx.swp deleted file mode 100644 index aa37a74570ef672a8deb03368eea52e2c89fb073..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 12288 zcmeI2L2nyH6vw9&35m9V;0$^!m9iFFZ<<~xv5lgVLs1hD4iP9S)pR|!w_WcnGh@ez z>mt4YClKO@kU$(bBXQ%-2jIkwI|uIYpS9O^Emcu3NR{3y|E%ZDdvD(R?X0CJb9eLb z<{rIO-x9c95~4r+N1p%nRq-VZ+N3)7_A@RcPfs&-h7+bAmI-4ybb6pnPhBxKR%1CR zGG*h$$k>`B-YJT@v*WX~u?G`i0#8QZlDM{Yb<@YZ5xh>Xy!_FVg|jykU;<2l2`~XB zzyz286JP>QH33(g6Q84lXXahJIlrGNfC(@GCcp%q zLIScQ#PRb&ymJA?Tb0mll$FhpZeaa+=VCmYw=C(JGE~&?Ce*CziH{mgNbW(R}?85T7S1#Uai!4pl%pTFz&4{i= z6xd9=e(-odxZx*oeMO%Q)8syN^_cDr2Zbd)!and5nLItg!o5WrUs1L1%B7VHgF8lR z7a&(TgAWJ0A0O1{!2=4aJ>cWuk2VtQ5U0XfU1^@WuvW)Xc^KBH*`f(B4XxVkc2(k5 zY>CdTqa(szkLK{`Hqybj?R>%@z?;(@=841EbN@ae-$X3-1KqmWK8R><=Yv^!G^MW5 z0}7Uh%8#WTRP_>(%Ka{DpXAA&vR3v~gtv9ww9M~Y@y=y6(Mt5(u#BA$H0VZ=I z5%~@Lu1>1h8-1BC7oCr)D8Jfc8kpzZsFx?=6KglNt0=#{XHl5-gSs8{meb`oL`0M2 Q1Be!UD_$Gr3yzro0FPvwxBvhE From e85f47b1f44e0d2eeb4c5251e2dc7f7b13c4a4b3 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Fri, 28 Jan 2022 18:58:47 -0500 Subject: [PATCH 043/730] remove from package.json --- examples/cra/package.json | 1 - 1 file changed, 1 deletion(-) diff --git a/examples/cra/package.json b/examples/cra/package.json index 76d55ff8..ccf145a9 100644 --- a/examples/cra/package.json +++ b/examples/cra/package.json @@ -10,7 +10,6 @@ "@types/node": "^16.11.21", "@types/react": "^17.0.38", "@types/react-dom": "^17.0.11", - "automerge-wasm": "file:../automerge-wasm-v0.1.0.tgz", "react": "^17.0.2", "react-dom": "^17.0.2", "react-scripts": "5.0.0", From a59ffebd64f469f6411f42718133cb72dfc2af75 Mon Sep 17 00:00:00 2001 From: karissa <633012+okdistribute@users.noreply.github.com> Date: Fri, 28 Jan 2022 21:33:24 -0700 Subject: [PATCH 044/730] Update app to include text editor, import Automerge correctly --- examples/cra/src/App.tsx | 52 ++++++++++++++++++++------------------ examples/cra/src/index.tsx | 15 ++++++----- 2 files changed, 36 insertions(+), 31 deletions(-) diff --git a/examples/cra/src/App.tsx b/examples/cra/src/App.tsx index f2d2172e..fa6fba64 100644 --- a/examples/cra/src/App.tsx +++ b/examples/cra/src/App.tsx @@ -1,40 +1,42 @@ import React, { useEffect, useState } from 'react'; -import logo from './logo.svg'; import './App.css'; -import init from "automerge-wasm" -import { create, loadDoc, encodeChange, decodeChange, - initSyncState, encodeSyncState, decodeSyncState, - encodeSyncMessage, decodeSyncMessage, - LIST, MAP, TEXT } from "automerge-wasm" +import * as Automerge from "automerge-wasm" + function App() { + const [ doc, ] = useState(Automerge.create()) + const [ edits, ] = useState(doc.set("_root", "edits", Automerge.TEXT) || "") const [ val, setVal ] = useState(""); useEffect(() => { - init().then(() => { - let doc = create() - let edits = doc.set("_root", "edits", TEXT) || "" doc.splice(edits, 0, 0, "the quick fox jumps over the lazy dog") - doc.splice(edits, 10, 3, "sloth") let result = doc.text(edits) - setVal(JSON.stringify(result)) - }) + setVal(result) }, []) + + function updateTextarea(e: any) { + e.preventDefault() + let event: InputEvent = e.nativeEvent + console.log(edits, e.target.selectionEnd) + switch (event.inputType) { + case 'insertText': + //@ts-ignore + doc.splice(edits, e.target.selectionEnd - 1, 0, e.nativeEvent.data) + break; + case 'deleteContentBackward': + //@ts-ignore + doc.splice(edits, e.target.selectionEnd, 1) + break; + case 'insertLineBreak': + //@ts-ignore + doc.splice(edits, e.target.selectionEnd - 1, '\n') + break; + } + setVal(doc.text(edits)) + } return (
- logo -

- Edit src/App.tsx and save to reload. -

- - Learn React - -

edits = {val}

+
); diff --git a/examples/cra/src/index.tsx b/examples/cra/src/index.tsx index ef2edf8e..d84251d4 100644 --- a/examples/cra/src/index.tsx +++ b/examples/cra/src/index.tsx @@ -3,13 +3,16 @@ import ReactDOM from 'react-dom'; import './index.css'; import App from './App'; import reportWebVitals from './reportWebVitals'; +import init from "automerge-wasm" -ReactDOM.render( - - - , - document.getElementById('root') -); +init().then(_ => { + ReactDOM.render( + + + , + document.getElementById('root') + ); +}) // If you want to start measuring performance in your app, pass a function // to log results (for example: reportWebVitals(console.log)) From e679c4f6a0b27b78e214e0afe497f811d4a425bc Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Tue, 25 Jan 2022 19:19:51 -0500 Subject: [PATCH 045/730] v0 wip --- automerge-wasm/src/lib.rs | 112 +++++++++++++++++++- automerge-wasm/test/test.js | 31 ++++++ automerge/src/automerge.rs | 65 ++++++++++++ automerge/src/columnar.rs | 4 + automerge/src/legacy/serde_impls/op_type.rs | 2 + automerge/src/query.rs | 4 + automerge/src/query/insert.rs | 2 +- automerge/src/query/mark.rs | 69 ++++++++++++ automerge/src/query/spans.rs | 96 +++++++++++++++++ automerge/src/types.rs | 26 ++++- edit-trace/automerge-wasm.js | 4 + 11 files changed, 409 insertions(+), 6 deletions(-) create mode 100644 automerge/src/query/mark.rs create mode 100644 automerge/src/query/spans.rs diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 5ab2e5cb..7c8e8c19 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -296,6 +296,40 @@ impl Automerge { Ok(()) } + pub fn mark(&mut self, obj: JsValue, start: JsValue, end: JsValue, name: JsValue, value: JsValue, datatype: JsValue) -> Result<(), JsValue> { + let obj = self.import(obj)?; + let start = to_usize(start, "start")?; + let end = to_usize(end, "end")?; + let name = name + .as_string() + .ok_or("invalid mark name") + .map_err(to_js_err)?; + let value = self.import_scalar(&value, datatype.as_string())?; + self.0.mark(&obj, start, end, &name, value).map_err(to_js_err)?; + Ok(()) + } + + pub fn spans(&mut self, obj: JsValue) -> Result { + let obj = self.import(obj)?; + let spans = self.0.spans(&obj).map_err(to_js_err)?; + let result = Array::new(); + for s in spans { + let marks = Array::new(); + for m in s.marks { + let mark = Array::new(); + mark.push(&m.0.into()); + mark.push(&datatype(&m.1).into()); + mark.push(&ScalarValue(m.1).into()); + marks.push(&mark.into()); + } + let obj = Object::new().into(); + js_set(&obj, "pos", s.pos as i32)?; + js_set(&obj, "marks", marks)?; + result.push(&obj); + } + Ok(result.into()) + } + pub fn save(&mut self) -> Result { self.0 .save() @@ -426,7 +460,80 @@ impl Automerge { } } + fn import_scalar(&mut self, value: &JsValue, datatype: Option) -> Result { + match datatype.as_deref() { + Some("boolean") => value + .as_bool() + .ok_or_else(|| "value must be a bool".into()) + .map(|v| am::ScalarValue::Boolean(v)), + Some("int") => value + .as_f64() + .ok_or_else(|| "value must be a number".into()) + .map(|v| am::ScalarValue::Int(v as i64)), + Some("uint") => value + .as_f64() + .ok_or_else(|| "value must be a number".into()) + .map(|v| am::ScalarValue::Uint(v as u64)), + Some("f64") => value + .as_f64() + .ok_or_else(|| "value must be a number".into()) + .map(|n| am::ScalarValue::F64(n)), + Some("bytes") => { + Ok(am::ScalarValue::Bytes(value.clone().dyn_into::().unwrap().to_vec())) + } + Some("counter") => value + .as_f64() + .ok_or_else(|| "value must be a number".into()) + .map(|v| am::ScalarValue::counter(v as i64)), + Some("timestamp") => value + .as_f64() + .ok_or_else(|| "value must be a number".into()) + .map(|v| am::ScalarValue::Timestamp(v as i64)), + /* + Some("bytes") => unimplemented!(), + Some("cursor") => unimplemented!(), + */ + Some("null") => Ok(am::ScalarValue::Null), + Some(_) => Err(format!("unknown datatype {:?}", datatype).into()), + None => { + if value.is_null() { + Ok(am::ScalarValue::Null) + } else if let Some(b) = value.as_bool() { + Ok(am::ScalarValue::Boolean(b)) + } else if let Some(s) = value.as_string() { + // FIXME - we need to detect str vs int vs float vs bool here :/ + Ok(am::ScalarValue::Str(s.into())) + } else if let Some(n) = value.as_f64() { + if (n.round() - n).abs() < f64::EPSILON { + Ok(am::ScalarValue::Int(n as i64)) + } else { + Ok(am::ScalarValue::F64(n)) + } +// } else if let Some(o) = to_objtype(&value) { +// Ok(o.into()) + } else if let Ok(d) = value.clone().dyn_into::() { + Ok(am::ScalarValue::Timestamp(d.get_time() as i64)) + } else if let Ok(o) = &value.clone().dyn_into::() { + Ok(am::ScalarValue::Bytes(o.to_vec())) + } else { + Err("value is invalid".into()) + } + } + } + } + fn import_value(&mut self, value: JsValue, datatype: Option) -> Result { + match self.import_scalar(&value,datatype) { + Ok(val) => Ok(val.into()), + Err(err) => { + if let Some(o) = to_objtype(&value) { + Ok(o.into()) + } else { + Err(err) + } + } + } + /* match datatype.as_deref() { Some("boolean") => value .as_bool() @@ -455,10 +562,6 @@ impl Automerge { .as_f64() .ok_or_else(|| "value must be a number".into()) .map(|v| am::ScalarValue::Timestamp(v as i64).into()), - /* - Some("bytes") => unimplemented!(), - Some("cursor") => unimplemented!(), - */ Some("null") => Ok(am::ScalarValue::Null.into()), Some(_) => Err(format!("unknown datatype {:?}", datatype).into()), None => { @@ -486,6 +589,7 @@ impl Automerge { } } } + */ } } diff --git a/automerge-wasm/test/test.js b/automerge-wasm/test/test.js index 7c5e7bab..58f1ddfa 100644 --- a/automerge-wasm/test/test.js +++ b/automerge-wasm/test/test.js @@ -1270,6 +1270,37 @@ describe('Automerge', () => { assert.strictEqual(message, null) }) + it('should handle marks', () => { + let doc = Automerge.init() + let list = doc.set("_root", "list", Automerge.TEXT) + doc.splice(list, 0, 0, "the quick fox jumps over the lazy dog") + doc.mark(list, 1,15, "bold" , true) + doc.mark(list, 5,12, "itallic" , true) + doc.mark(list, 9,11, "comment" , "foxes are my favorite animal!") + let spans = doc.spans(list); + assert.deepStrictEqual(spans, [ + { pos: 1, marks: [ [ 'bold', 'boolean', true ] ] }, + { + pos: 5, + marks: [ [ 'bold', 'boolean', true ], [ 'itallic', 'boolean', true ] ] + }, + { + pos: 9, + marks: [ + [ 'bold', 'boolean', true ], + [ 'itallic', 'boolean', true ], + [ 'comment', 'str', 'foxes are my favorite animal!' ] + ] + }, + { + pos: 11, + marks: [ [ 'itallic', 'boolean', true ], [ 'bold', 'boolean', true ] ] + }, + { pos: 12, marks: [ [ 'bold', 'boolean', true ] ] }, + { pos: 15, marks: [] } + ]) + }) + it('should allow a subset of changes to be sent', () => { // ,-- c1 <-- c2 // c0 <-+ diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index ca9e1463..5643eb8b 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -447,6 +447,69 @@ impl Automerge { Ok(buffer) } + pub fn spans(&self, obj: &ExId) -> Result,AutomergeError> { + let obj = self.exid_to_obj(obj)?; + let mut query = self.ops.search(obj, query::Spans::new()); + query.check_marks(); + Ok(query.spans) + } + + pub fn mark( + &mut self, + obj: &ExId, + start: usize, + end: usize, + mark: &str, + value: ScalarValue, + ) -> Result<(), AutomergeError> { + let obj = self.exid_to_obj(obj)?; + let query = self.ops.search(obj, query::Mark::new(start, end)); + + let (a, b) = query.ops()?; + let (pos, key) = a; + let id = self.next_id(); + let op = Op { + change: self.history.len(), + id, + action: OpType::Mark(mark.into(), value), + obj, + key, + succ: Default::default(), + pred: Default::default(), + insert: false, + }; + self.ops.insert(pos, op.clone()); + self.tx().operations.push(op); + + let (pos, key) = b; + let id = self.next_id(); + let op = Op { + change: self.history.len(), + id, + action: OpType::Unmark, + obj, + key, + succ: Default::default(), + pred: Default::default(), + insert: true, + }; + self.ops.insert(pos, op.clone()); + self.tx().operations.push(op); + + Ok(()) + } + + pub fn unmark( + &self, + _obj: &ExId, + _start: usize, + _end: usize, + _inclusive: bool, + _mark: &str, + ) -> Result { + unimplemented!() + } + // TODO - I need to return these OpId's here **only** to get // the legacy conflicts format of { [opid]: value } // Something better? @@ -1091,6 +1154,8 @@ impl Automerge { OpType::Set(value) => format!("{}", value), OpType::Make(obj) => format!("make{}", obj), OpType::Inc(obj) => format!("inc{}", obj), + OpType::Mark(s,_) => format!("mark{}", s), + OpType::Unmark => format!("unmark"), OpType::Del => format!("del{}", 0), }; let pred: Vec<_> = i.pred.iter().map(|id| self.to_string(*id)).collect(); diff --git a/automerge/src/columnar.rs b/automerge/src/columnar.rs index 53a9d488..fa852687 100644 --- a/automerge/src/columnar.rs +++ b/automerge/src/columnar.rs @@ -1064,6 +1064,8 @@ impl DocOpEncoder { self.val.append_null(); Action::Del } + amp::OpType::Mark(_,_) => unimplemented!(), + amp::OpType::Unmark => unimplemented!(), amp::OpType::Make(kind) => { self.val.append_null(); match kind { @@ -1170,6 +1172,8 @@ impl ColumnEncoder { self.val.append_null(); Action::Del } + OpType::Mark(_,_) => unimplemented!(), + OpType::Unmark => unimplemented!(), OpType::Make(kind) => { self.val.append_null(); match kind { diff --git a/automerge/src/legacy/serde_impls/op_type.rs b/automerge/src/legacy/serde_impls/op_type.rs index 19849674..5ad642c8 100644 --- a/automerge/src/legacy/serde_impls/op_type.rs +++ b/automerge/src/legacy/serde_impls/op_type.rs @@ -15,6 +15,8 @@ impl Serialize for OpType { OpType::Make(ObjType::Table) => RawOpType::MakeTable, OpType::Make(ObjType::List) => RawOpType::MakeList, OpType::Make(ObjType::Text) => RawOpType::MakeText, + OpType::Mark(_,_) => unimplemented!(), + OpType::Unmark => unimplemented!(), OpType::Del => RawOpType::Del, OpType::Inc(_) => RawOpType::Inc, OpType::Set(_) => RawOpType::Set, diff --git a/automerge/src/query.rs b/automerge/src/query.rs index 7911e1bb..80af06b2 100644 --- a/automerge/src/query.rs +++ b/automerge/src/query.rs @@ -12,11 +12,13 @@ mod len; mod len_at; mod list_vals; mod list_vals_at; +mod mark; mod nth; mod nth_at; mod prop; mod prop_at; mod seek_op; +mod spans; pub(crate) use insert::InsertNth; pub(crate) use keys::Keys; @@ -25,11 +27,13 @@ pub(crate) use len::Len; pub(crate) use len_at::LenAt; pub(crate) use list_vals::ListVals; pub(crate) use list_vals_at::ListValsAt; +pub(crate) use mark::Mark; pub(crate) use nth::Nth; pub(crate) use nth_at::NthAt; pub(crate) use prop::Prop; pub(crate) use prop_at::PropAt; pub(crate) use seek_op::SeekOp; +pub(crate) use spans::{Span, Spans}; #[derive(Debug, Clone, PartialEq)] pub(crate) struct CounterData { diff --git a/automerge/src/query/insert.rs b/automerge/src/query/insert.rs index 085bf868..12bbe70a 100644 --- a/automerge/src/query/insert.rs +++ b/automerge/src/query/insert.rs @@ -65,7 +65,7 @@ impl TreeQuery for InsertNth { if element.insert { if self.seen >= self.target { return QueryResult::Finish; - }; + } self.last_seen = None; self.last_insert = element.elemid(); } diff --git a/automerge/src/query/mark.rs b/automerge/src/query/mark.rs new file mode 100644 index 00000000..080b56ad --- /dev/null +++ b/automerge/src/query/mark.rs @@ -0,0 +1,69 @@ +use crate::AutomergeError; +use crate::query::{QueryResult, TreeQuery}; +use crate::types::{ElemId, Key, Op}; +use std::fmt::Debug; + +#[derive(Debug, Clone, PartialEq)] +pub(crate) struct Mark { + start: usize, + end: usize, + pos: usize, + seen: usize, + _ops: Vec<(usize, Key)>, + count: usize, + last_seen: Option, + last_insert: Option, +} + +impl Mark { + pub fn new(start: usize, end: usize) -> Self { + Mark { + start, + end, + pos: 0, + seen: 0, + _ops: Vec::new(), + count: 0, + last_seen: None, + last_insert: None, + } + } + + pub fn ops(&self) -> Result<((usize,Key),(usize,Key)),AutomergeError> { + if self._ops.len() == 2 { + Ok((self._ops[0], self._ops[1])) + } else { + Err(AutomergeError::Fail) + } + } +} + +impl TreeQuery for Mark { + /* + fn query_node(&mut self, _child: &OpTreeNode) -> QueryResult { + unimplemented!() + } + */ + + fn query_element(&mut self, element: &Op) -> QueryResult { + // find location to insert + // mark or set + if element.insert { + if self.seen >= self.end { + self._ops.push((self.pos + 1, self.last_insert.into())); + return QueryResult::Finish; + } + if self.seen >= self.start && self._ops.is_empty() { + self._ops.push((self.pos, self.last_insert.into())); + } + self.last_seen = None; + self.last_insert = element.elemid(); + } + if self.last_seen.is_none() && element.visible() { + self.seen += 1; + self.last_seen = element.elemid() + } + self.pos += 1; + QueryResult::Next + } +} diff --git a/automerge/src/query/spans.rs b/automerge/src/query/spans.rs new file mode 100644 index 00000000..d90cb5bb --- /dev/null +++ b/automerge/src/query/spans.rs @@ -0,0 +1,96 @@ +use std::collections::HashMap; +use std::fmt::Debug; +use crate::query::{QueryResult, TreeQuery, OpSetMetadata}; +use crate::types::{ElemId, Op, ScalarValue, OpType}; + +#[derive(Debug, Clone, PartialEq)] +pub(crate) struct Spans { + pos: usize, + seen: usize, + last_seen: Option, + last_insert: Option, + seen_at_this_mark: Option, + seen_at_last_mark: Option, + ops: Vec, + marks: HashMap, + changed: bool, + pub spans: Vec, +} + +#[derive(Debug, Clone, PartialEq)] +pub struct Span { + pub pos: usize, + pub marks: Vec<(String, ScalarValue)>, +} + +impl Spans { + pub fn new() -> Self { + Spans { + pos: 0, + seen: 0, + last_seen: None, + last_insert: None, + seen_at_last_mark: None, + seen_at_this_mark: None, + changed: false, + ops: Vec::new(), + marks: HashMap::new(), + spans: Vec::new(), + } + } + + pub fn check_marks(&mut self) { + let mut new_marks = HashMap::new(); + for op in &self.ops { + if let OpType::Mark(n,v) = &op.action { + new_marks.insert(n.clone(),v.clone()); + } + } + if new_marks != self.marks { + self.changed = true; + self.marks = new_marks; + } + if self.changed && self.seen_at_last_mark != self.seen_at_this_mark { + self.changed = false; + self.seen_at_last_mark = self.seen_at_this_mark; + self.spans.push(Span { + pos: self.seen, + marks: self.marks.iter().map(|(key, val)| (key.clone(), val.clone())).collect() + }); + } + } +} + +impl TreeQuery for Spans { + /* + fn query_node(&mut self, _child: &OpTreeNode) -> QueryResult { + unimplemented!() + } + */ + + fn query_element_with_metadata(&mut self, element: &Op, m: &OpSetMetadata) -> QueryResult { + // find location to insert + // mark or set + if element.succ.is_empty() { + if let OpType::Mark(_,_) = &element.action { + let pos = self.ops.binary_search_by(|probe| m.lamport_cmp(probe.id, element.id)).unwrap_err(); + self.ops.insert(pos, element.clone()); + } + if let OpType::Unmark = &element.action { + self.ops.retain(|op| op.id != element.id.prev()); + } + } + if element.insert { + self.last_seen = None; + self.last_insert = element.elemid(); + } + if self.last_seen.is_none() && element.visible() { + self.check_marks(); + self.seen += 1; + self.last_seen = element.elemid(); + self.seen_at_this_mark = element.elemid(); + } + self.pos += 1; + QueryResult::Next + } +} diff --git a/automerge/src/types.rs b/automerge/src/types.rs index d00334f8..5d8430c0 100644 --- a/automerge/src/types.rs +++ b/automerge/src/types.rs @@ -160,6 +160,8 @@ pub enum OpType { Del, Inc(i64), Set(ScalarValue), + Mark(String, ScalarValue), + Unmark, } #[derive(Debug)] @@ -182,6 +184,10 @@ impl OpId { pub fn actor(&self) -> usize { self.1 } + #[inline] + pub fn prev(&self) -> OpId { + OpId(self.0 - 1, self.1) + } } impl Exportable for ObjId { @@ -283,6 +289,18 @@ impl From for Key { } } +impl From> for ElemId { + fn from(e: Option) -> Self { + e.unwrap_or(HEAD) + } +} + +impl From> for Key { + fn from(e: Option) -> Self { + Key::Seq(e.into()) + } +} + #[derive(Debug, PartialEq, PartialOrd, Eq, Ord, Clone, Copy, Hash)] pub(crate) enum Key { Map(usize), @@ -366,7 +384,7 @@ impl Op { } pub fn visible(&self) -> bool { - if self.is_inc() { + if self.is_inc() || self.is_mark() { false } else if self.is_counter() { self.succ.len() <= self.incs() @@ -391,6 +409,10 @@ impl Op { matches!(&self.action, OpType::Inc(_)) } + pub fn is_mark(&self) -> bool { + matches!(&self.action, OpType::Mark(_,_)) || matches!(&self.action, OpType::Unmark) + } + pub fn is_counter(&self) -> bool { matches!(&self.action, OpType::Set(ScalarValue::Counter(_))) } @@ -425,6 +447,8 @@ impl Op { OpType::Set(value) if self.insert => format!("i:{}", value), OpType::Set(value) => format!("s:{}", value), OpType::Make(obj) => format!("make{}", obj), + OpType::Mark(s,_) => format!("mark{}", s), + OpType::Unmark=> format!("unmark"), OpType::Inc(val) => format!("inc:{}", val), OpType::Del => "del".to_string(), } diff --git a/edit-trace/automerge-wasm.js b/edit-trace/automerge-wasm.js index 2d2b0cc9..02130686 100644 --- a/edit-trace/automerge-wasm.js +++ b/edit-trace/automerge-wasm.js @@ -25,6 +25,10 @@ let _ = doc.save() console.log(`Done in ${new Date() - start} ms`) +let t_time = new Date() +let t = doc.text(text); +console.log(`doc.text in ${new Date() - t_time} ms`) + if (doc.text(text) !== finalText) { throw new RangeError('ERROR: final text did not match expectation') } From b794f4803d5aca48836c8aca2cb809233a8f2780 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Thu, 27 Jan 2022 08:49:44 -0500 Subject: [PATCH 046/730] rework marks as inserts between values --- automerge-wasm/Cargo.toml | 1 + automerge-wasm/src/lib.rs | 65 +++++++--- automerge-wasm/test/test.js | 125 +++++++++++++++----- automerge/src/automerge.rs | 22 ++-- automerge/src/columnar.rs | 8 +- automerge/src/legacy/serde_impls/op_type.rs | 4 +- automerge/src/query/insert.rs | 53 ++++++--- automerge/src/query/mark.rs | 2 + automerge/src/query/spans.rs | 10 +- automerge/src/types.rs | 21 +++- 10 files changed, 221 insertions(+), 90 deletions(-) diff --git a/automerge-wasm/Cargo.toml b/automerge-wasm/Cargo.toml index ba29a413..58141803 100644 --- a/automerge-wasm/Cargo.toml +++ b/automerge-wasm/Cargo.toml @@ -32,6 +32,7 @@ serde-wasm-bindgen = "0.1.3" serde_bytes = "0.11.5" unicode-segmentation = "1.7.1" hex = "^0.4.3" +regex = "^1.5" [dependencies.wasm-bindgen] version = "^0.2" diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 7c8e8c19..e58a0611 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -2,6 +2,7 @@ use automerge as am; use automerge::{Change, ObjId, Prop, Value, ROOT}; use js_sys::{Array, Object, Uint8Array}; +use regex::Regex; use std::convert::TryInto; use wasm_bindgen::prelude::*; use wasm_bindgen::JsCast; @@ -296,22 +297,38 @@ impl Automerge { Ok(()) } - pub fn mark(&mut self, obj: JsValue, start: JsValue, end: JsValue, name: JsValue, value: JsValue, datatype: JsValue) -> Result<(), JsValue> { + pub fn mark( + &mut self, + obj: JsValue, + range: JsValue, + name: JsValue, + value: JsValue, + datatype: JsValue, + ) -> Result<(), JsValue> { let obj = self.import(obj)?; - let start = to_usize(start, "start")?; - let end = to_usize(end, "end")?; + let re = Regex::new(r"([\[\(])(\d+)\.\.(\d+)([\)\]])").unwrap(); + let range = range.as_string().ok_or("range must be a string")?; + let cap = re.captures_iter(&range).next().ok_or("range must be in the form of (start..end] or [start..end) etc... () for sticky, [] for normal")?; + let start: usize = cap[2].parse().map_err(|_| to_js_err("invalid start"))?; + let end: usize = cap[3].parse().map_err(|_| to_js_err("invalid end"))?; + let start_sticky = &cap[1] == "("; + let end_sticky = &cap[4] == ")"; let name = name .as_string() .ok_or("invalid mark name") .map_err(to_js_err)?; let value = self.import_scalar(&value, datatype.as_string())?; - self.0.mark(&obj, start, end, &name, value).map_err(to_js_err)?; + self.0 + .mark(&obj, start, start_sticky, end, end_sticky, &name, value) + .map_err(to_js_err)?; Ok(()) } pub fn spans(&mut self, obj: JsValue) -> Result { let obj = self.import(obj)?; + let text = self.0.text(&obj).map_err(to_js_err)?; let spans = self.0.spans(&obj).map_err(to_js_err)?; + let mut last_pos = 0; let result = Array::new(); for s in spans { let marks = Array::new(); @@ -322,10 +339,20 @@ impl Automerge { mark.push(&ScalarValue(m.1).into()); marks.push(&mark.into()); } - let obj = Object::new().into(); - js_set(&obj, "pos", s.pos as i32)?; - js_set(&obj, "marks", marks)?; - result.push(&obj); + let text_span = &text[last_pos..s.pos];//.slice(last_pos, s.pos); + if text_span.len() > 0 { + result.push(&text_span.into()); + } + result.push(&marks); + last_pos = s.pos; + //let obj = Object::new().into(); + //js_set(&obj, "pos", s.pos as i32)?; + //js_set(&obj, "marks", marks)?; + //result.push(&obj.into()); + } + let text_span = &text[last_pos..]; + if text_span.len() > 0 { + result.push(&text_span.into()); } Ok(result.into()) } @@ -460,12 +487,16 @@ impl Automerge { } } - fn import_scalar(&mut self, value: &JsValue, datatype: Option) -> Result { + fn import_scalar( + &mut self, + value: &JsValue, + datatype: Option, + ) -> Result { match datatype.as_deref() { Some("boolean") => value .as_bool() .ok_or_else(|| "value must be a bool".into()) - .map(|v| am::ScalarValue::Boolean(v)), + .map(am::ScalarValue::Boolean), Some("int") => value .as_f64() .ok_or_else(|| "value must be a number".into()) @@ -477,10 +508,10 @@ impl Automerge { Some("f64") => value .as_f64() .ok_or_else(|| "value must be a number".into()) - .map(|n| am::ScalarValue::F64(n)), - Some("bytes") => { - Ok(am::ScalarValue::Bytes(value.clone().dyn_into::().unwrap().to_vec())) - } + .map(am::ScalarValue::F64), + Some("bytes") => Ok(am::ScalarValue::Bytes( + value.clone().dyn_into::().unwrap().to_vec(), + )), Some("counter") => value .as_f64() .ok_or_else(|| "value must be a number".into()) @@ -509,8 +540,8 @@ impl Automerge { } else { Ok(am::ScalarValue::F64(n)) } -// } else if let Some(o) = to_objtype(&value) { -// Ok(o.into()) + // } else if let Some(o) = to_objtype(&value) { + // Ok(o.into()) } else if let Ok(d) = value.clone().dyn_into::() { Ok(am::ScalarValue::Timestamp(d.get_time() as i64)) } else if let Ok(o) = &value.clone().dyn_into::() { @@ -523,7 +554,7 @@ impl Automerge { } fn import_value(&mut self, value: JsValue, datatype: Option) -> Result { - match self.import_scalar(&value,datatype) { + match self.import_scalar(&value, datatype) { Ok(val) => Ok(val.into()), Err(err) => { if let Some(o) = to_objtype(&value) { diff --git a/automerge-wasm/test/test.js b/automerge-wasm/test/test.js index 58f1ddfa..7d1519c1 100644 --- a/automerge-wasm/test/test.js +++ b/automerge-wasm/test/test.js @@ -397,6 +397,100 @@ describe('Automerge', () => { doc1.free() doc2.free() }) + + it.only('should handle marks [..]', () => { + let doc = Automerge.init() + let list = doc.set("_root", "list", Automerge.TEXT) + doc.splice(list, 0, 0, "aaabbbccc") + doc.mark(list, "[3..6]", "bold" , true) + let spans = doc.spans(list); + assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'ccc' ]); + doc.insert(list, 6, "A") + doc.insert(list, 3, "A") + spans = doc.spans(list); + assert.deepStrictEqual(spans, [ 'aaaA', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'Accc' ]); + }) + + it.only('should handle marks with deleted ends [..]', () => { + let doc = Automerge.init() + let list = doc.set("_root", "list", Automerge.TEXT) + doc.splice(list, 0, 0, "aaabbbccc") + doc.mark(list, "[3..6]", "bold" , true) + let spans = doc.spans(list); + assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'ccc' ]); + doc.del(list,5); + doc.del(list,5); + doc.del(list,2); + doc.del(list,2); + spans = doc.spans(list); + assert.deepStrictEqual(spans, [ 'aa', [ [ 'bold', 'boolean', true ] ], 'b', [], 'cc' ]) + doc.insert(list, 3, "A") + doc.insert(list, 2, "A") + spans = doc.spans(list); + assert.deepStrictEqual(spans, [ 'aaA', [ [ 'bold', 'boolean', true ] ], 'b', [], 'Acc' ]) + }) + + it.only('should handle sticky marks (..)', () => { + let doc = Automerge.init() + let list = doc.set("_root", "list", Automerge.TEXT) + doc.splice(list, 0, 0, "aaabbbccc") + doc.mark(list, "(3..6)", "bold" , true) + let spans = doc.spans(list); + assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'ccc' ]); + doc.insert(list, 6, "A") + doc.insert(list, 3, "A") + spans = doc.spans(list); + assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'AbbbA', [], 'ccc' ]); + }) + + it.only('should handle sticky marks with deleted ends (..)', () => { + let doc = Automerge.init() + let list = doc.set("_root", "list", Automerge.TEXT) + doc.splice(list, 0, 0, "aaabbbccc") + doc.mark(list, "(3..6)", "bold" , true) + let spans = doc.spans(list); + assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'ccc' ]); + doc.del(list,5); + doc.del(list,5); + doc.del(list,2); + doc.del(list,2); + spans = doc.spans(list); + assert.deepStrictEqual(spans, [ 'aa', [ [ 'bold', 'boolean', true ] ], 'b', [], 'cc' ]) + doc.insert(list, 3, "A") + doc.insert(list, 2, "A") + spans = doc.spans(list); + assert.deepStrictEqual(spans, [ 'aa', [ [ 'bold', 'boolean', true ] ], 'AbA', [], 'cc' ]) + }) + + it.only('should handle overlapping marks', () => { + let doc = Automerge.init() + let list = doc.set("_root", "list", Automerge.TEXT) + doc.splice(list, 0, 0, "the quick fox jumps over the lazy dog") + doc.mark(list, "[0..37]", "bold" , true) + doc.mark(list, "[4..19]", "itallic" , true) + doc.mark(list, "[10..13]", "comment" , "foxes are my favorite animal!") + let spans = doc.spans(list); + assert.deepStrictEqual(spans, + [ + [ [ 'bold', 'boolean', true ] ], + 'the ', + [ [ 'bold', 'boolean', true ], [ 'itallic', 'boolean', true ] ], + 'quick ', + [ + [ 'bold', 'boolean', true ], + [ 'comment', 'str', 'foxes are my favorite animal!' ], + [ 'itallic', 'boolean', true ] + ], + 'fox', + [ [ 'bold', 'boolean', true ], [ 'itallic', 'boolean', true ] ], + ' jumps', + [ [ 'bold', 'boolean', true ] ], + ' over the lazy dog', + [], + ] + ) + }) + }) describe('sync', () => { it('should send a sync message implying no local data', () => { @@ -1270,37 +1364,6 @@ describe('Automerge', () => { assert.strictEqual(message, null) }) - it('should handle marks', () => { - let doc = Automerge.init() - let list = doc.set("_root", "list", Automerge.TEXT) - doc.splice(list, 0, 0, "the quick fox jumps over the lazy dog") - doc.mark(list, 1,15, "bold" , true) - doc.mark(list, 5,12, "itallic" , true) - doc.mark(list, 9,11, "comment" , "foxes are my favorite animal!") - let spans = doc.spans(list); - assert.deepStrictEqual(spans, [ - { pos: 1, marks: [ [ 'bold', 'boolean', true ] ] }, - { - pos: 5, - marks: [ [ 'bold', 'boolean', true ], [ 'itallic', 'boolean', true ] ] - }, - { - pos: 9, - marks: [ - [ 'bold', 'boolean', true ], - [ 'itallic', 'boolean', true ], - [ 'comment', 'str', 'foxes are my favorite animal!' ] - ] - }, - { - pos: 11, - marks: [ [ 'itallic', 'boolean', true ], [ 'bold', 'boolean', true ] ] - }, - { pos: 12, marks: [ [ 'bold', 'boolean', true ] ] }, - { pos: 15, marks: [] } - ]) - }) - it('should allow a subset of changes to be sent', () => { // ,-- c1 <-- c2 // c0 <-+ diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 5643eb8b..f74a21d3 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -6,7 +6,7 @@ use crate::exid::ExId; use crate::op_set::OpSet; use crate::types::{ ActorId, ChangeHash, Clock, ElemId, Export, Exportable, Key, ObjId, Op, OpId, OpType, Patch, - ScalarValue, Value, + ScalarValue, Value, MarkData, }; use crate::{legacy, query, types, ObjType}; use crate::{AutomergeError, Change, Prop}; @@ -336,11 +336,11 @@ impl Automerge { value: V, ) -> Result, AutomergeError> { let id = self.next_id(); + let value = value.into(); let query = self.ops.search(obj, query::InsertNth::new(index)); let key = query.key()?; - let value = value.into(); let action = value.into(); let is_make = matches!(&action, OpType::Make(_)); @@ -355,7 +355,7 @@ impl Automerge { insert: true, }; - self.ops.insert(query.pos, op.clone()); + self.ops.insert(query.pos(), op.clone()); self.tx().operations.push(op); if is_make { @@ -458,7 +458,9 @@ impl Automerge { &mut self, obj: &ExId, start: usize, + start_sticky: bool, end: usize, + end_sticky: bool, mark: &str, value: ScalarValue, ) -> Result<(), AutomergeError> { @@ -471,12 +473,12 @@ impl Automerge { let op = Op { change: self.history.len(), id, - action: OpType::Mark(mark.into(), value), + action: OpType::Mark(MarkData { name: mark.into(), sticky: start_sticky, value}), obj, key, succ: Default::default(), pred: Default::default(), - insert: false, + insert: true, }; self.ops.insert(pos, op.clone()); self.tx().operations.push(op); @@ -486,7 +488,7 @@ impl Automerge { let op = Op { change: self.history.len(), id, - action: OpType::Unmark, + action: OpType::Unmark(end_sticky), obj, key, succ: Default::default(), @@ -1152,10 +1154,10 @@ impl Automerge { }; let value: String = match &i.action { OpType::Set(value) => format!("{}", value), - OpType::Make(obj) => format!("make{}", obj), - OpType::Inc(obj) => format!("inc{}", obj), - OpType::Mark(s,_) => format!("mark{}", s), - OpType::Unmark => format!("unmark"), + OpType::Make(obj) => format!("make({})", obj), + OpType::Inc(obj) => format!("inc({})", obj), + OpType::Mark(m) => format!("mark({}={})", m.name,m.value), + OpType::Unmark(_) => "unmark".into(), OpType::Del => format!("del{}", 0), }; let pred: Vec<_> = i.pred.iter().map(|id| self.to_string(*id)).collect(); diff --git a/automerge/src/columnar.rs b/automerge/src/columnar.rs index fa852687..15ba749d 100644 --- a/automerge/src/columnar.rs +++ b/automerge/src/columnar.rs @@ -1064,8 +1064,8 @@ impl DocOpEncoder { self.val.append_null(); Action::Del } - amp::OpType::Mark(_,_) => unimplemented!(), - amp::OpType::Unmark => unimplemented!(), + amp::OpType::Mark(_) => unimplemented!(), + amp::OpType::Unmark(_) => unimplemented!(), amp::OpType::Make(kind) => { self.val.append_null(); match kind { @@ -1172,8 +1172,8 @@ impl ColumnEncoder { self.val.append_null(); Action::Del } - OpType::Mark(_,_) => unimplemented!(), - OpType::Unmark => unimplemented!(), + OpType::Mark(_) => unimplemented!(), + OpType::Unmark(_) => unimplemented!(), OpType::Make(kind) => { self.val.append_null(); match kind { diff --git a/automerge/src/legacy/serde_impls/op_type.rs b/automerge/src/legacy/serde_impls/op_type.rs index 5ad642c8..01041ef7 100644 --- a/automerge/src/legacy/serde_impls/op_type.rs +++ b/automerge/src/legacy/serde_impls/op_type.rs @@ -15,8 +15,8 @@ impl Serialize for OpType { OpType::Make(ObjType::Table) => RawOpType::MakeTable, OpType::Make(ObjType::List) => RawOpType::MakeList, OpType::Make(ObjType::Text) => RawOpType::MakeText, - OpType::Mark(_,_) => unimplemented!(), - OpType::Unmark => unimplemented!(), + OpType::Mark(_) => unimplemented!(), + OpType::Unmark(_) => unimplemented!(), OpType::Del => RawOpType::Del, OpType::Inc(_) => RawOpType::Inc, OpType::Set(_) => RawOpType::Set, diff --git a/automerge/src/query/insert.rs b/automerge/src/query/insert.rs index 12bbe70a..8a02971a 100644 --- a/automerge/src/query/insert.rs +++ b/automerge/src/query/insert.rs @@ -8,40 +8,53 @@ use std::fmt::Debug; pub(crate) struct InsertNth { target: usize, seen: usize, - pub pos: usize, + //pub pos: usize, + n: usize, + valid: Option, last_seen: Option, last_insert: Option, + last_valid_insert: Option, } impl InsertNth { pub fn new(target: usize) -> Self { + let (valid,last_valid_insert) = if target == 0 { + (Some(0), Some(HEAD)) + } else { + (None, None) + }; InsertNth { target, seen: 0, - pos: 0, + n: 0, + valid, last_seen: None, last_insert: None, + last_valid_insert, } } + pub fn pos(&self) -> usize { + self.valid.unwrap_or(self.n) + } + pub fn key(&self) -> Result { - if self.target == 0 { + Ok(self.last_valid_insert.ok_or(AutomergeError::InvalidIndex(self.target))?.into()) + //if self.target == 0 { + /* + if self.last_insert.is_none() { Ok(HEAD.into()) } else if self.seen == self.target && self.last_insert.is_some() { Ok(Key::Seq(self.last_insert.unwrap())) } else { Err(AutomergeError::InvalidIndex(self.target)) } + */ } } impl TreeQuery for InsertNth { fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { - if self.target == 0 { - // insert at the start of the obj all inserts are lesser b/c this is local - self.pos = 0; - return QueryResult::Finish; - } let mut num_vis = child.index.len; if num_vis > 0 { if child.index.has(&self.last_seen) { @@ -50,30 +63,38 @@ impl TreeQuery for InsertNth { if self.seen + num_vis >= self.target { QueryResult::Decend } else { - self.pos += child.len(); + self.n += child.len(); self.seen += num_vis; self.last_seen = child.last().elemid(); QueryResult::Next } } else { - self.pos += child.len(); + self.n += child.len(); QueryResult::Next } } fn query_element(&mut self, element: &Op) -> QueryResult { if element.insert { - if self.seen >= self.target { - return QueryResult::Finish; + if self.valid.is_none() && self.seen >= self.target { + self.valid = Some(self.n); } self.last_seen = None; self.last_insert = element.elemid(); } - if self.last_seen.is_none() && element.visible() { - self.seen += 1; - self.last_seen = element.elemid() + if self.valid.is_some() && element.valid_mark_anchor() { + self.last_valid_insert = element.elemid(); + self.valid = None; } - self.pos += 1; + if self.last_seen.is_none() && element.visible() { + if self.seen >= self.target { + return QueryResult::Finish; + } + self.seen += 1; + self.last_seen = element.elemid(); + self.last_valid_insert = self.last_seen + } + self.n += 1; QueryResult::Next } } diff --git a/automerge/src/query/mark.rs b/automerge/src/query/mark.rs index 080b56ad..3757677b 100644 --- a/automerge/src/query/mark.rs +++ b/automerge/src/query/mark.rs @@ -32,6 +32,8 @@ impl Mark { pub fn ops(&self) -> Result<((usize,Key),(usize,Key)),AutomergeError> { if self._ops.len() == 2 { Ok((self._ops[0], self._ops[1])) + } else if self._ops.len() == 1 { + Ok((self._ops[0], (self.pos + 1, self.last_insert.into()))) } else { Err(AutomergeError::Fail) } diff --git a/automerge/src/query/spans.rs b/automerge/src/query/spans.rs index d90cb5bb..461c4a70 100644 --- a/automerge/src/query/spans.rs +++ b/automerge/src/query/spans.rs @@ -42,15 +42,15 @@ impl Spans { pub fn check_marks(&mut self) { let mut new_marks = HashMap::new(); for op in &self.ops { - if let OpType::Mark(n,v) = &op.action { - new_marks.insert(n.clone(),v.clone()); + if let OpType::Mark(m) = &op.action { + new_marks.insert(m.name.clone(),m.value.clone()); } } if new_marks != self.marks { self.changed = true; self.marks = new_marks; } - if self.changed && self.seen_at_last_mark != self.seen_at_this_mark { + if self.changed && (self.seen_at_last_mark != self.seen_at_this_mark || self.seen_at_last_mark.is_none() && self.seen_at_this_mark.is_none()) { self.changed = false; self.seen_at_last_mark = self.seen_at_this_mark; self.spans.push(Span { @@ -72,11 +72,11 @@ impl TreeQuery for Spans { // find location to insert // mark or set if element.succ.is_empty() { - if let OpType::Mark(_,_) = &element.action { + if let OpType::Mark(_) = &element.action { let pos = self.ops.binary_search_by(|probe| m.lamport_cmp(probe.id, element.id)).unwrap_err(); self.ops.insert(pos, element.clone()); } - if let OpType::Unmark = &element.action { + if let OpType::Unmark(_) = &element.action { self.ops.retain(|op| op.id != element.id.prev()); } } diff --git a/automerge/src/types.rs b/automerge/src/types.rs index 5d8430c0..6b883cfa 100644 --- a/automerge/src/types.rs +++ b/automerge/src/types.rs @@ -160,8 +160,15 @@ pub enum OpType { Del, Inc(i64), Set(ScalarValue), - Mark(String, ScalarValue), - Unmark, + Mark(MarkData), + Unmark(bool), +} + +#[derive(PartialEq, Debug, Clone)] +pub struct MarkData { + pub name: String, + pub value: ScalarValue, + pub sticky: bool, } #[derive(Debug)] @@ -409,8 +416,12 @@ impl Op { matches!(&self.action, OpType::Inc(_)) } + pub fn valid_mark_anchor(&self) -> bool { + self.succ.is_empty() && matches!(&self.action, OpType::Mark(MarkData { sticky: true, ..}) | OpType::Unmark(false)) + } + pub fn is_mark(&self) -> bool { - matches!(&self.action, OpType::Mark(_,_)) || matches!(&self.action, OpType::Unmark) + matches!(&self.action, OpType::Mark(_) | OpType::Unmark(_)) } pub fn is_counter(&self) -> bool { @@ -447,8 +458,8 @@ impl Op { OpType::Set(value) if self.insert => format!("i:{}", value), OpType::Set(value) => format!("s:{}", value), OpType::Make(obj) => format!("make{}", obj), - OpType::Mark(s,_) => format!("mark{}", s), - OpType::Unmark=> format!("unmark"), + OpType::Mark(m) => format!("mark{}={}", m.name, m.value), + OpType::Unmark(_) => "unmark".into(), OpType::Inc(val) => format!("inc:{}", val), OpType::Del => "del".to_string(), } From a2e433348ae4365526304202a24fb881088467b3 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Fri, 28 Jan 2022 12:24:58 -0500 Subject: [PATCH 047/730] mark encode/decode/serde --- automerge-wasm/test/test.js | 22 +++++- automerge/src/automerge.rs | 19 ++--- automerge/src/columnar.rs | 52 ++++++++++++-- automerge/src/legacy/serde_impls/op.rs | 42 +++++++++++ automerge/src/legacy/serde_impls/op_type.rs | 4 +- automerge/src/legacy/utility_impls/mod.rs | 1 - .../src/legacy/utility_impls/scalar_value.rs | 57 --------------- automerge/src/query.rs | 2 - automerge/src/query/mark.rs | 71 ------------------ automerge/src/query/spans.rs | 4 +- automerge/src/types.rs | 6 ++ automerge/src/value.rs | 72 +++++++++++++++++++ 12 files changed, 203 insertions(+), 149 deletions(-) delete mode 100644 automerge/src/legacy/utility_impls/scalar_value.rs delete mode 100644 automerge/src/query/mark.rs diff --git a/automerge-wasm/test/test.js b/automerge-wasm/test/test.js index 7d1519c1..38d68eba 100644 --- a/automerge-wasm/test/test.js +++ b/automerge-wasm/test/test.js @@ -3,7 +3,7 @@ const assert = require('assert') const util = require('util') const { BloomFilter } = require('./helpers/sync') const Automerge = require('..') -const { MAP, LIST, TEXT, initSyncState, decodeSyncMessage, decodeSyncState, encodeSyncState }= Automerge +const { MAP, LIST, TEXT, encodeChange, decodeChange, initSyncState, decodeSyncMessage, decodeSyncState, encodeSyncState }= Automerge // str to uint8array function en(str) { @@ -460,6 +460,15 @@ describe('Automerge', () => { doc.insert(list, 2, "A") spans = doc.spans(list); assert.deepStrictEqual(spans, [ 'aa', [ [ 'bold', 'boolean', true ] ], 'AbA', [], 'cc' ]) + + // make sure save/load can handle marks + + let doc2 = Automerge.load(doc.save()) + spans = doc2.spans(list); + assert.deepStrictEqual(spans, [ 'aa', [ [ 'bold', 'boolean', true ] ], 'AbA', [], 'cc' ]) + + assert.deepStrictEqual(doc.getHeads(), doc2.getHeads()) + assert.deepStrictEqual(doc.save(), doc2.save()) }) it.only('should handle overlapping marks', () => { @@ -489,6 +498,17 @@ describe('Automerge', () => { [], ] ) + + // mark sure encode decode can handle marks + + let all = doc.getChanges([]) + let decoded = all.map((c) => decodeChange(c)) + let encoded = decoded.map((c) => encodeChange(c)) + let doc2 = Automerge.init(); + doc2.applyChanges(encoded) + + assert.deepStrictEqual(doc.spans(list) , doc2.spans(list)) + assert.deepStrictEqual(doc.save(), doc2.save()) }) }) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index f74a21d3..afed341e 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -6,7 +6,7 @@ use crate::exid::ExId; use crate::op_set::OpSet; use crate::types::{ ActorId, ChangeHash, Clock, ElemId, Export, Exportable, Key, ObjId, Op, OpId, OpType, Patch, - ScalarValue, Value, MarkData, + ScalarValue, Value, }; use crate::{legacy, query, types, ObjType}; use crate::{AutomergeError, Change, Prop}; @@ -322,26 +322,25 @@ impl Automerge { value: V, ) -> Result, AutomergeError> { let obj = self.exid_to_obj(obj)?; - if let Some(id) = self.do_insert(obj, index, value)? { + let value = value.into(); + if let Some(id) = self.do_insert(obj, index, value.into())? { Ok(Some(self.id_to_exid(id))) } else { Ok(None) } } - fn do_insert>( + fn do_insert( &mut self, obj: ObjId, index: usize, - value: V, + action: OpType, ) -> Result, AutomergeError> { let id = self.next_id(); - let value = value.into(); let query = self.ops.search(obj, query::InsertNth::new(index)); let key = query.key()?; - let action = value.into(); let is_make = matches!(&action, OpType::Make(_)); let op = Op { @@ -399,7 +398,7 @@ impl Automerge { let mut results = Vec::new(); for v in vals { // insert() - let id = self.do_insert(obj, pos, v.clone())?; + let id = self.do_insert(obj, pos, v.into())?; if let Some(id) = id { results.push(self.id_to_exid(id)); } @@ -465,8 +464,11 @@ impl Automerge { value: ScalarValue, ) -> Result<(), AutomergeError> { let obj = self.exid_to_obj(obj)?; - let query = self.ops.search(obj, query::Mark::new(start, end)); + self.do_insert(obj, start, OpType::mark(mark.into(), start_sticky, value))?; + self.do_insert(obj, end, OpType::Unmark(end_sticky))?; + +/* let (a, b) = query.ops()?; let (pos, key) = a; let id = self.next_id(); @@ -497,6 +499,7 @@ impl Automerge { }; self.ops.insert(pos, op.clone()); self.tx().operations.push(op); +*/ Ok(()) } diff --git a/automerge/src/columnar.rs b/automerge/src/columnar.rs index 15ba749d..f64f8186 100644 --- a/automerge/src/columnar.rs +++ b/automerge/src/columnar.rs @@ -11,7 +11,7 @@ use std::{ str, }; -use crate::types::{ActorId, ElemId, Key, ObjId, ObjType, Op, OpId, OpType, ScalarValue}; +use crate::types::{ActorId, ElemId, Key, ObjId, ObjType, Op, OpId, OpType, ScalarValue, MarkData }; use crate::legacy as amp; use amp::SortedVec; @@ -134,6 +134,15 @@ impl<'a> Iterator for OperationIterator<'a> { Action::MakeTable => OpType::Make(ObjType::Table), Action::Del => OpType::Del, Action::Inc => OpType::Inc(value.to_i64()?), + Action::Mark => { + // mark has 3 things in the val column + let name = value.to_string()?; + let sticky = self.value.next()?.to_bool()?; + let value = self.value.next()?; + OpType::Mark(MarkData { name, sticky, value }) + } + Action::Unmark => OpType::Unmark(value.to_bool()?), + Action::Unused => panic!("invalid action"), }; Some(amp::Op { action, @@ -175,6 +184,15 @@ impl<'a> Iterator for DocOpIterator<'a> { Action::MakeTable => OpType::Make(ObjType::Table), Action::Del => OpType::Del, Action::Inc => OpType::Inc(value.to_i64()?), + Action::Mark => { + // mark has 3 things in the val column + let name = value.to_string()?; + let sticky = self.value.next()?.to_bool()?; + let value = self.value.next()?; + OpType::Mark(MarkData { name, sticky, value }) + } + Action::Unmark => OpType::Unmark(value.to_bool()?), + Action::Unused => panic!("invalid action"), }; Some(DocOp { actor, @@ -1064,8 +1082,16 @@ impl DocOpEncoder { self.val.append_null(); Action::Del } - amp::OpType::Mark(_) => unimplemented!(), - amp::OpType::Unmark(_) => unimplemented!(), + amp::OpType::Mark(m) => { + self.val.append_value(&m.name.clone().into(), actors); + self.val.append_value(&m.sticky.into(), actors); + self.val.append_value(&m.value.clone().into(), actors); + Action::Mark + } + amp::OpType::Unmark(s) => { + self.val.append_value(&(*s).into(), actors); + Action::Unmark + } amp::OpType::Make(kind) => { self.val.append_null(); match kind { @@ -1172,8 +1198,16 @@ impl ColumnEncoder { self.val.append_null(); Action::Del } - OpType::Mark(_) => unimplemented!(), - OpType::Unmark(_) => unimplemented!(), + OpType::Mark(m) => { + self.val.append_value2(&m.name.clone().into(), actors); + self.val.append_value2(&m.sticky.into(), actors); + self.val.append_value2(&m.value.clone().into(), actors); + Action::Mark + } + OpType::Unmark(s) => { + self.val.append_value2(&(*s).into(), actors); + Action::Unmark + } OpType::Make(kind) => { self.val.append_null(); match kind { @@ -1279,8 +1313,11 @@ pub(crate) enum Action { MakeText, Inc, MakeTable, + Mark, + Unused, // final bit is used to mask `Make` actions + Unmark, } -const ACTIONS: [Action; 7] = [ +const ACTIONS: [Action; 10] = [ Action::MakeMap, Action::Set, Action::MakeList, @@ -1288,6 +1325,9 @@ const ACTIONS: [Action; 7] = [ Action::MakeText, Action::Inc, Action::MakeTable, + Action::Mark, + Action::Unused, + Action::Unmark, ]; impl Decodable for Action { diff --git a/automerge/src/legacy/serde_impls/op.rs b/automerge/src/legacy/serde_impls/op.rs index 5f0db62d..298464b0 100644 --- a/automerge/src/legacy/serde_impls/op.rs +++ b/automerge/src/legacy/serde_impls/op.rs @@ -49,6 +49,12 @@ impl Serialize for Op { match &self.action { OpType::Inc(n) => op.serialize_field("value", &n)?, OpType::Set(value) => op.serialize_field("value", &value)?, + OpType::Mark(m) => { + op.serialize_field("name", &m.name)?; + op.serialize_field("sticky", &m.sticky)?; + op.serialize_field("value", &m.value)?; + } + OpType::Unmark(s) => op.serialize_field("sticky", &s)?, _ => {} } op.serialize_field("pred", &self.pred)?; @@ -70,6 +76,8 @@ pub(crate) enum RawOpType { Del, Inc, Set, + Mark, + Unmark, } impl Serialize for RawOpType { @@ -85,6 +93,8 @@ impl Serialize for RawOpType { RawOpType::Del => "del", RawOpType::Inc => "inc", RawOpType::Set => "set", + RawOpType::Mark => "mark", + RawOpType::Unmark => "unmark", }; serializer.serialize_str(s) } @@ -103,6 +113,8 @@ impl<'de> Deserialize<'de> for RawOpType { "del", "inc", "set", + "mark", + "unmark", ]; // TODO: Probably more efficient to deserialize to a `&str` let raw_type = String::deserialize(deserializer)?; @@ -114,6 +126,8 @@ impl<'de> Deserialize<'de> for RawOpType { "del" => Ok(RawOpType::Del), "inc" => Ok(RawOpType::Inc), "set" => Ok(RawOpType::Set), + "mark" => Ok(RawOpType::Mark), + "unmark" => Ok(RawOpType::Unmark), other => Err(Error::unknown_variant(other, VARIANTS)), } } @@ -144,6 +158,8 @@ impl<'de> Deserialize<'de> for Op { let mut insert: Option = None; let mut datatype: Option = None; let mut value: Option> = None; + let mut name: Option = None; + let mut sticky: Option = None; let mut ref_id: Option = None; while let Some(field) = map.next_key::()? { match field.as_ref() { @@ -167,6 +183,8 @@ impl<'de> Deserialize<'de> for Op { "insert" => read_field("insert", &mut insert, &mut map)?, "datatype" => read_field("datatype", &mut datatype, &mut map)?, "value" => read_field("value", &mut value, &mut map)?, + "name" => read_field("name", &mut name, &mut map)?, + "sticky" => read_field("sticky", &mut sticky, &mut map)?, "ref" => read_field("ref", &mut ref_id, &mut map)?, _ => return Err(Error::unknown_field(&field, FIELDS)), } @@ -182,6 +200,30 @@ impl<'de> Deserialize<'de> for Op { RawOpType::MakeList => OpType::Make(ObjType::List), RawOpType::MakeText => OpType::Make(ObjType::Text), RawOpType::Del => OpType::Del, + RawOpType::Mark => { + let name = name.ok_or_else(|| Error::missing_field("mark(name)"))?; + let sticky = sticky.unwrap_or(false); + let value = if let Some(datatype) = datatype { + let raw_value = value + .ok_or_else(|| Error::missing_field("value"))? + .unwrap_or(ScalarValue::Null); + raw_value.as_datatype(datatype).map_err(|e| { + Error::invalid_value( + Unexpected::Other(e.unexpected.as_str()), + &e.expected.as_str(), + ) + })? + } else { + value + .ok_or_else(|| Error::missing_field("value"))? + .unwrap_or(ScalarValue::Null) + }; + OpType::mark(name, sticky, value) + } + RawOpType::Unmark => { + let sticky = sticky.unwrap_or(true); + OpType::Unmark(sticky) + } RawOpType::Set => { let value = if let Some(datatype) = datatype { let raw_value = value diff --git a/automerge/src/legacy/serde_impls/op_type.rs b/automerge/src/legacy/serde_impls/op_type.rs index 01041ef7..a36355e2 100644 --- a/automerge/src/legacy/serde_impls/op_type.rs +++ b/automerge/src/legacy/serde_impls/op_type.rs @@ -15,8 +15,8 @@ impl Serialize for OpType { OpType::Make(ObjType::Table) => RawOpType::MakeTable, OpType::Make(ObjType::List) => RawOpType::MakeList, OpType::Make(ObjType::Text) => RawOpType::MakeText, - OpType::Mark(_) => unimplemented!(), - OpType::Unmark(_) => unimplemented!(), + OpType::Mark(_) => RawOpType::Mark, + OpType::Unmark(_) => RawOpType::Unmark, OpType::Del => RawOpType::Del, OpType::Inc(_) => RawOpType::Inc, OpType::Set(_) => RawOpType::Set, diff --git a/automerge/src/legacy/utility_impls/mod.rs b/automerge/src/legacy/utility_impls/mod.rs index 3476b8da..99fa1750 100644 --- a/automerge/src/legacy/utility_impls/mod.rs +++ b/automerge/src/legacy/utility_impls/mod.rs @@ -2,4 +2,3 @@ mod element_id; mod key; mod object_id; mod opid; -mod scalar_value; diff --git a/automerge/src/legacy/utility_impls/scalar_value.rs b/automerge/src/legacy/utility_impls/scalar_value.rs deleted file mode 100644 index ef0a3305..00000000 --- a/automerge/src/legacy/utility_impls/scalar_value.rs +++ /dev/null @@ -1,57 +0,0 @@ -use std::fmt; - -use smol_str::SmolStr; - -use crate::value::ScalarValue; - -impl From<&str> for ScalarValue { - fn from(s: &str) -> Self { - ScalarValue::Str(s.into()) - } -} - -impl From for ScalarValue { - fn from(n: i64) -> Self { - ScalarValue::Int(n) - } -} - -impl From for ScalarValue { - fn from(n: u64) -> Self { - ScalarValue::Uint(n) - } -} - -impl From for ScalarValue { - fn from(n: i32) -> Self { - ScalarValue::Int(n as i64) - } -} - -impl From for ScalarValue { - fn from(b: bool) -> Self { - ScalarValue::Boolean(b) - } -} - -impl From for ScalarValue { - fn from(c: char) -> Self { - ScalarValue::Str(SmolStr::new(c.to_string())) - } -} - -impl fmt::Display for ScalarValue { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - ScalarValue::Bytes(b) => write!(f, "\"{:?}\"", b), - ScalarValue::Str(s) => write!(f, "\"{}\"", s), - ScalarValue::Int(i) => write!(f, "{}", i), - ScalarValue::Uint(i) => write!(f, "{}", i), - ScalarValue::F64(n) => write!(f, "{:.324}", n), - ScalarValue::Counter(c) => write!(f, "Counter: {}", c), - ScalarValue::Timestamp(i) => write!(f, "Timestamp: {}", i), - ScalarValue::Boolean(b) => write!(f, "{}", b), - ScalarValue::Null => write!(f, "null"), - } - } -} diff --git a/automerge/src/query.rs b/automerge/src/query.rs index 80af06b2..7bd50158 100644 --- a/automerge/src/query.rs +++ b/automerge/src/query.rs @@ -12,7 +12,6 @@ mod len; mod len_at; mod list_vals; mod list_vals_at; -mod mark; mod nth; mod nth_at; mod prop; @@ -27,7 +26,6 @@ pub(crate) use len::Len; pub(crate) use len_at::LenAt; pub(crate) use list_vals::ListVals; pub(crate) use list_vals_at::ListValsAt; -pub(crate) use mark::Mark; pub(crate) use nth::Nth; pub(crate) use nth_at::NthAt; pub(crate) use prop::Prop; diff --git a/automerge/src/query/mark.rs b/automerge/src/query/mark.rs deleted file mode 100644 index 3757677b..00000000 --- a/automerge/src/query/mark.rs +++ /dev/null @@ -1,71 +0,0 @@ -use crate::AutomergeError; -use crate::query::{QueryResult, TreeQuery}; -use crate::types::{ElemId, Key, Op}; -use std::fmt::Debug; - -#[derive(Debug, Clone, PartialEq)] -pub(crate) struct Mark { - start: usize, - end: usize, - pos: usize, - seen: usize, - _ops: Vec<(usize, Key)>, - count: usize, - last_seen: Option, - last_insert: Option, -} - -impl Mark { - pub fn new(start: usize, end: usize) -> Self { - Mark { - start, - end, - pos: 0, - seen: 0, - _ops: Vec::new(), - count: 0, - last_seen: None, - last_insert: None, - } - } - - pub fn ops(&self) -> Result<((usize,Key),(usize,Key)),AutomergeError> { - if self._ops.len() == 2 { - Ok((self._ops[0], self._ops[1])) - } else if self._ops.len() == 1 { - Ok((self._ops[0], (self.pos + 1, self.last_insert.into()))) - } else { - Err(AutomergeError::Fail) - } - } -} - -impl TreeQuery for Mark { - /* - fn query_node(&mut self, _child: &OpTreeNode) -> QueryResult { - unimplemented!() - } - */ - - fn query_element(&mut self, element: &Op) -> QueryResult { - // find location to insert - // mark or set - if element.insert { - if self.seen >= self.end { - self._ops.push((self.pos + 1, self.last_insert.into())); - return QueryResult::Finish; - } - if self.seen >= self.start && self._ops.is_empty() { - self._ops.push((self.pos, self.last_insert.into())); - } - self.last_seen = None; - self.last_insert = element.elemid(); - } - if self.last_seen.is_none() && element.visible() { - self.seen += 1; - self.last_seen = element.elemid() - } - self.pos += 1; - QueryResult::Next - } -} diff --git a/automerge/src/query/spans.rs b/automerge/src/query/spans.rs index 461c4a70..f39f3fc1 100644 --- a/automerge/src/query/spans.rs +++ b/automerge/src/query/spans.rs @@ -53,9 +53,11 @@ impl Spans { if self.changed && (self.seen_at_last_mark != self.seen_at_this_mark || self.seen_at_last_mark.is_none() && self.seen_at_this_mark.is_none()) { self.changed = false; self.seen_at_last_mark = self.seen_at_this_mark; + let mut marks : Vec<_> = self.marks.iter().map(|(key, val)| (key.clone(), val.clone())).collect(); + marks.sort_by(|(k1,_),(k2,_)| k1.cmp(k2)); self.spans.push(Span { pos: self.seen, - marks: self.marks.iter().map(|(key, val)| (key.clone(), val.clone())).collect() + marks, }); } } diff --git a/automerge/src/types.rs b/automerge/src/types.rs index 6b883cfa..91a45781 100644 --- a/automerge/src/types.rs +++ b/automerge/src/types.rs @@ -164,6 +164,12 @@ pub enum OpType { Unmark(bool), } +impl OpType { + pub (crate) fn mark(name: String, sticky: bool, value: ScalarValue) -> Self { + OpType::Mark(MarkData { name, sticky, value }) + } +} + #[derive(PartialEq, Debug, Clone)] pub struct MarkData { pub name: String, diff --git a/automerge/src/value.rs b/automerge/src/value.rs index eef757fd..9ade57ef 100644 --- a/automerge/src/value.rs +++ b/automerge/src/value.rs @@ -375,7 +375,79 @@ impl ScalarValue { } } + pub fn to_bool(self) -> Option { + match self { + ScalarValue::Boolean(b) => Some(b), + _ => None, + } + } + + pub fn to_string(self) -> Option { + match self { + ScalarValue::Str(s) => Some(s.to_string()), + _ => None, + } + } + pub fn counter(n: i64) -> ScalarValue { ScalarValue::Counter(n.into()) } } + +impl From<&str> for ScalarValue { + fn from(s: &str) -> Self { + ScalarValue::Str(s.into()) + } +} + +impl From for ScalarValue { + fn from(s: String) -> Self { + ScalarValue::Str(s.into()) + } +} + +impl From for ScalarValue { + fn from(n: i64) -> Self { + ScalarValue::Int(n) + } +} + +impl From for ScalarValue { + fn from(n: u64) -> Self { + ScalarValue::Uint(n) + } +} + +impl From for ScalarValue { + fn from(n: i32) -> Self { + ScalarValue::Int(n as i64) + } +} + +impl From for ScalarValue { + fn from(b: bool) -> Self { + ScalarValue::Boolean(b) + } +} + +impl From for ScalarValue { + fn from(c: char) -> Self { + ScalarValue::Str(SmolStr::new(c.to_string())) + } +} + +impl fmt::Display for ScalarValue { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + ScalarValue::Bytes(b) => write!(f, "\"{:?}\"", b), + ScalarValue::Str(s) => write!(f, "\"{}\"", s), + ScalarValue::Int(i) => write!(f, "{}", i), + ScalarValue::Uint(i) => write!(f, "{}", i), + ScalarValue::F64(n) => write!(f, "{:.324}", n), + ScalarValue::Counter(c) => write!(f, "Counter: {}", c), + ScalarValue::Timestamp(i) => write!(f, "Timestamp: {}", i), + ScalarValue::Boolean(b) => write!(f, "{}", b), + ScalarValue::Null => write!(f, "null"), + } + } +} From bfc051f4fbbfabbb87a84ed0233b068a15c40302 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Fri, 28 Jan 2022 12:37:35 -0500 Subject: [PATCH 048/730] cleanup / rename --- automerge-wasm/src/lib.rs | 6 +- automerge/src/automerge.rs | 76 ++++++++++----------- automerge/src/columnar.rs | 62 ++++++++--------- automerge/src/legacy/serde_impls/op.rs | 38 +++++------ automerge/src/legacy/serde_impls/op_type.rs | 4 +- automerge/src/query/insert.rs | 7 +- automerge/src/query/spans.rs | 36 ++++++---- automerge/src/types.rs | 30 +++++--- 8 files changed, 140 insertions(+), 119 deletions(-) diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index e58a0611..86564cdc 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -339,9 +339,9 @@ impl Automerge { mark.push(&ScalarValue(m.1).into()); marks.push(&mark.into()); } - let text_span = &text[last_pos..s.pos];//.slice(last_pos, s.pos); + let text_span = &text[last_pos..s.pos]; //.slice(last_pos, s.pos); if text_span.len() > 0 { - result.push(&text_span.into()); + result.push(&text_span.into()); } result.push(&marks); last_pos = s.pos; @@ -352,7 +352,7 @@ impl Automerge { } let text_span = &text[last_pos..]; if text_span.len() > 0 { - result.push(&text_span.into()); + result.push(&text_span.into()); } Ok(result.into()) } diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index afed341e..40e37320 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -446,7 +446,7 @@ impl Automerge { Ok(buffer) } - pub fn spans(&self, obj: &ExId) -> Result,AutomergeError> { + pub fn spans(&self, obj: &ExId) -> Result, AutomergeError> { let obj = self.exid_to_obj(obj)?; let mut query = self.ops.search(obj, query::Spans::new()); query.check_marks(); @@ -457,49 +457,49 @@ impl Automerge { &mut self, obj: &ExId, start: usize, - start_sticky: bool, + expand_start: bool, end: usize, - end_sticky: bool, + expand_end: bool, mark: &str, value: ScalarValue, ) -> Result<(), AutomergeError> { let obj = self.exid_to_obj(obj)?; - self.do_insert(obj, start, OpType::mark(mark.into(), start_sticky, value))?; - self.do_insert(obj, end, OpType::Unmark(end_sticky))?; + self.do_insert(obj, start, OpType::mark(mark.into(), expand_start, value))?; + self.do_insert(obj, end, OpType::MarkEnd(expand_end))?; -/* - let (a, b) = query.ops()?; - let (pos, key) = a; - let id = self.next_id(); - let op = Op { - change: self.history.len(), - id, - action: OpType::Mark(MarkData { name: mark.into(), sticky: start_sticky, value}), - obj, - key, - succ: Default::default(), - pred: Default::default(), - insert: true, - }; - self.ops.insert(pos, op.clone()); - self.tx().operations.push(op); + /* + let (a, b) = query.ops()?; + let (pos, key) = a; + let id = self.next_id(); + let op = Op { + change: self.history.len(), + id, + action: OpType::Mark(MarkData { name: mark.into(), expand: expand_start, value}), + obj, + key, + succ: Default::default(), + pred: Default::default(), + insert: true, + }; + self.ops.insert(pos, op.clone()); + self.tx().operations.push(op); - let (pos, key) = b; - let id = self.next_id(); - let op = Op { - change: self.history.len(), - id, - action: OpType::Unmark(end_sticky), - obj, - key, - succ: Default::default(), - pred: Default::default(), - insert: true, - }; - self.ops.insert(pos, op.clone()); - self.tx().operations.push(op); -*/ + let (pos, key) = b; + let id = self.next_id(); + let op = Op { + change: self.history.len(), + id, + action: OpType::Unmark(expand_end), + obj, + key, + succ: Default::default(), + pred: Default::default(), + insert: true, + }; + self.ops.insert(pos, op.clone()); + self.tx().operations.push(op); + */ Ok(()) } @@ -1159,8 +1159,8 @@ impl Automerge { OpType::Set(value) => format!("{}", value), OpType::Make(obj) => format!("make({})", obj), OpType::Inc(obj) => format!("inc({})", obj), - OpType::Mark(m) => format!("mark({}={})", m.name,m.value), - OpType::Unmark(_) => "unmark".into(), + OpType::MarkBegin(m) => format!("mark({}={})", m.name, m.value), + OpType::MarkEnd(_) => "/mark".into(), OpType::Del => format!("del{}", 0), }; let pred: Vec<_> = i.pred.iter().map(|id| self.to_string(*id)).collect(); diff --git a/automerge/src/columnar.rs b/automerge/src/columnar.rs index f64f8186..28aca822 100644 --- a/automerge/src/columnar.rs +++ b/automerge/src/columnar.rs @@ -11,7 +11,7 @@ use std::{ str, }; -use crate::types::{ActorId, ElemId, Key, ObjId, ObjType, Op, OpId, OpType, ScalarValue, MarkData }; +use crate::types::{ActorId, ElemId, Key, ObjId, ObjType, Op, OpId, OpType, ScalarValue}; use crate::legacy as amp; use amp::SortedVec; @@ -134,14 +134,14 @@ impl<'a> Iterator for OperationIterator<'a> { Action::MakeTable => OpType::Make(ObjType::Table), Action::Del => OpType::Del, Action::Inc => OpType::Inc(value.to_i64()?), - Action::Mark => { - // mark has 3 things in the val column - let name = value.to_string()?; - let sticky = self.value.next()?.to_bool()?; - let value = self.value.next()?; - OpType::Mark(MarkData { name, sticky, value }) + Action::MarkBegin => { + // mark has 3 things in the val column + let name = value.to_string()?; + let expand = self.value.next()?.to_bool()?; + let value = self.value.next()?; + OpType::mark(name, expand, value) } - Action::Unmark => OpType::Unmark(value.to_bool()?), + Action::MarkEnd => OpType::MarkEnd(value.to_bool()?), Action::Unused => panic!("invalid action"), }; Some(amp::Op { @@ -184,14 +184,14 @@ impl<'a> Iterator for DocOpIterator<'a> { Action::MakeTable => OpType::Make(ObjType::Table), Action::Del => OpType::Del, Action::Inc => OpType::Inc(value.to_i64()?), - Action::Mark => { - // mark has 3 things in the val column - let name = value.to_string()?; - let sticky = self.value.next()?.to_bool()?; - let value = self.value.next()?; - OpType::Mark(MarkData { name, sticky, value }) + Action::MarkBegin => { + // mark has 3 things in the val column + let name = value.to_string()?; + let expand = self.value.next()?.to_bool()?; + let value = self.value.next()?; + OpType::mark(name, expand, value) } - Action::Unmark => OpType::Unmark(value.to_bool()?), + Action::MarkEnd => OpType::MarkEnd(value.to_bool()?), Action::Unused => panic!("invalid action"), }; Some(DocOp { @@ -1082,15 +1082,15 @@ impl DocOpEncoder { self.val.append_null(); Action::Del } - amp::OpType::Mark(m) => { + amp::OpType::MarkBegin(m) => { self.val.append_value(&m.name.clone().into(), actors); - self.val.append_value(&m.sticky.into(), actors); - self.val.append_value(&m.value.clone().into(), actors); - Action::Mark + self.val.append_value(&m.expand.into(), actors); + self.val.append_value(&m.value.clone(), actors); + Action::MarkBegin } - amp::OpType::Unmark(s) => { + amp::OpType::MarkEnd(s) => { self.val.append_value(&(*s).into(), actors); - Action::Unmark + Action::MarkEnd } amp::OpType::Make(kind) => { self.val.append_null(); @@ -1198,15 +1198,15 @@ impl ColumnEncoder { self.val.append_null(); Action::Del } - OpType::Mark(m) => { + OpType::MarkBegin(m) => { self.val.append_value2(&m.name.clone().into(), actors); - self.val.append_value2(&m.sticky.into(), actors); - self.val.append_value2(&m.value.clone().into(), actors); - Action::Mark + self.val.append_value2(&m.expand.into(), actors); + self.val.append_value2(&m.value.clone(), actors); + Action::MarkBegin } - OpType::Unmark(s) => { + OpType::MarkEnd(s) => { self.val.append_value2(&(*s).into(), actors); - Action::Unmark + Action::MarkEnd } OpType::Make(kind) => { self.val.append_null(); @@ -1313,9 +1313,9 @@ pub(crate) enum Action { MakeText, Inc, MakeTable, - Mark, + MarkBegin, Unused, // final bit is used to mask `Make` actions - Unmark, + MarkEnd, } const ACTIONS: [Action; 10] = [ Action::MakeMap, @@ -1325,9 +1325,9 @@ const ACTIONS: [Action; 10] = [ Action::MakeText, Action::Inc, Action::MakeTable, - Action::Mark, + Action::MarkBegin, Action::Unused, - Action::Unmark, + Action::MarkEnd, ]; impl Decodable for Action { diff --git a/automerge/src/legacy/serde_impls/op.rs b/automerge/src/legacy/serde_impls/op.rs index 298464b0..b91ae7e8 100644 --- a/automerge/src/legacy/serde_impls/op.rs +++ b/automerge/src/legacy/serde_impls/op.rs @@ -49,12 +49,12 @@ impl Serialize for Op { match &self.action { OpType::Inc(n) => op.serialize_field("value", &n)?, OpType::Set(value) => op.serialize_field("value", &value)?, - OpType::Mark(m) => { - op.serialize_field("name", &m.name)?; - op.serialize_field("sticky", &m.sticky)?; - op.serialize_field("value", &m.value)?; + OpType::MarkBegin(m) => { + op.serialize_field("name", &m.name)?; + op.serialize_field("expand", &m.expand)?; + op.serialize_field("value", &m.value)?; } - OpType::Unmark(s) => op.serialize_field("sticky", &s)?, + OpType::MarkEnd(s) => op.serialize_field("expand", &s)?, _ => {} } op.serialize_field("pred", &self.pred)?; @@ -76,8 +76,8 @@ pub(crate) enum RawOpType { Del, Inc, Set, - Mark, - Unmark, + MarkBegin, + MarkEnd, } impl Serialize for RawOpType { @@ -93,8 +93,8 @@ impl Serialize for RawOpType { RawOpType::Del => "del", RawOpType::Inc => "inc", RawOpType::Set => "set", - RawOpType::Mark => "mark", - RawOpType::Unmark => "unmark", + RawOpType::MarkBegin => "mark_begin", + RawOpType::MarkEnd => "mark_end", }; serializer.serialize_str(s) } @@ -126,8 +126,8 @@ impl<'de> Deserialize<'de> for RawOpType { "del" => Ok(RawOpType::Del), "inc" => Ok(RawOpType::Inc), "set" => Ok(RawOpType::Set), - "mark" => Ok(RawOpType::Mark), - "unmark" => Ok(RawOpType::Unmark), + "mark_begin" => Ok(RawOpType::MarkBegin), + "mark_end" => Ok(RawOpType::MarkEnd), other => Err(Error::unknown_variant(other, VARIANTS)), } } @@ -159,7 +159,7 @@ impl<'de> Deserialize<'de> for Op { let mut datatype: Option = None; let mut value: Option> = None; let mut name: Option = None; - let mut sticky: Option = None; + let mut expand: Option = None; let mut ref_id: Option = None; while let Some(field) = map.next_key::()? { match field.as_ref() { @@ -184,7 +184,7 @@ impl<'de> Deserialize<'de> for Op { "datatype" => read_field("datatype", &mut datatype, &mut map)?, "value" => read_field("value", &mut value, &mut map)?, "name" => read_field("name", &mut name, &mut map)?, - "sticky" => read_field("sticky", &mut sticky, &mut map)?, + "expand" => read_field("expand", &mut expand, &mut map)?, "ref" => read_field("ref", &mut ref_id, &mut map)?, _ => return Err(Error::unknown_field(&field, FIELDS)), } @@ -200,9 +200,9 @@ impl<'de> Deserialize<'de> for Op { RawOpType::MakeList => OpType::Make(ObjType::List), RawOpType::MakeText => OpType::Make(ObjType::Text), RawOpType::Del => OpType::Del, - RawOpType::Mark => { + RawOpType::MarkBegin => { let name = name.ok_or_else(|| Error::missing_field("mark(name)"))?; - let sticky = sticky.unwrap_or(false); + let expand = expand.unwrap_or(false); let value = if let Some(datatype) = datatype { let raw_value = value .ok_or_else(|| Error::missing_field("value"))? @@ -218,11 +218,11 @@ impl<'de> Deserialize<'de> for Op { .ok_or_else(|| Error::missing_field("value"))? .unwrap_or(ScalarValue::Null) }; - OpType::mark(name, sticky, value) + OpType::mark(name, expand, value) } - RawOpType::Unmark => { - let sticky = sticky.unwrap_or(true); - OpType::Unmark(sticky) + RawOpType::MarkEnd => { + let expand = expand.unwrap_or(true); + OpType::MarkEnd(expand) } RawOpType::Set => { let value = if let Some(datatype) = datatype { diff --git a/automerge/src/legacy/serde_impls/op_type.rs b/automerge/src/legacy/serde_impls/op_type.rs index a36355e2..0959b11d 100644 --- a/automerge/src/legacy/serde_impls/op_type.rs +++ b/automerge/src/legacy/serde_impls/op_type.rs @@ -15,8 +15,8 @@ impl Serialize for OpType { OpType::Make(ObjType::Table) => RawOpType::MakeTable, OpType::Make(ObjType::List) => RawOpType::MakeList, OpType::Make(ObjType::Text) => RawOpType::MakeText, - OpType::Mark(_) => RawOpType::Mark, - OpType::Unmark(_) => RawOpType::Unmark, + OpType::MarkBegin(_) => RawOpType::MarkBegin, + OpType::MarkEnd(_) => RawOpType::MarkEnd, OpType::Del => RawOpType::Del, OpType::Inc(_) => RawOpType::Inc, OpType::Set(_) => RawOpType::Set, diff --git a/automerge/src/query/insert.rs b/automerge/src/query/insert.rs index 8a02971a..38a58e45 100644 --- a/automerge/src/query/insert.rs +++ b/automerge/src/query/insert.rs @@ -18,7 +18,7 @@ pub(crate) struct InsertNth { impl InsertNth { pub fn new(target: usize) -> Self { - let (valid,last_valid_insert) = if target == 0 { + let (valid, last_valid_insert) = if target == 0 { (Some(0), Some(HEAD)) } else { (None, None) @@ -39,7 +39,10 @@ impl InsertNth { } pub fn key(&self) -> Result { - Ok(self.last_valid_insert.ok_or(AutomergeError::InvalidIndex(self.target))?.into()) + Ok(self + .last_valid_insert + .ok_or(AutomergeError::InvalidIndex(self.target))? + .into()) //if self.target == 0 { /* if self.last_insert.is_none() { diff --git a/automerge/src/query/spans.rs b/automerge/src/query/spans.rs index f39f3fc1..589dba03 100644 --- a/automerge/src/query/spans.rs +++ b/automerge/src/query/spans.rs @@ -1,7 +1,7 @@ +use crate::query::{OpSetMetadata, QueryResult, TreeQuery}; +use crate::types::{ElemId, Op, OpType, ScalarValue}; use std::collections::HashMap; use std::fmt::Debug; -use crate::query::{QueryResult, TreeQuery, OpSetMetadata}; -use crate::types::{ElemId, Op, ScalarValue, OpType}; #[derive(Debug, Clone, PartialEq)] pub(crate) struct Spans { @@ -12,7 +12,7 @@ pub(crate) struct Spans { seen_at_this_mark: Option, seen_at_last_mark: Option, ops: Vec, - marks: HashMap, + marks: HashMap, changed: bool, pub spans: Vec, } @@ -24,7 +24,7 @@ pub struct Span { } impl Spans { - pub fn new() -> Self { + pub fn new() -> Self { Spans { pos: 0, seen: 0, @@ -42,20 +42,27 @@ impl Spans { pub fn check_marks(&mut self) { let mut new_marks = HashMap::new(); for op in &self.ops { - if let OpType::Mark(m) = &op.action { - new_marks.insert(m.name.clone(),m.value.clone()); + if let OpType::MarkBegin(m) = &op.action { + new_marks.insert(m.name.clone(), m.value.clone()); } } if new_marks != self.marks { self.changed = true; self.marks = new_marks; } - if self.changed && (self.seen_at_last_mark != self.seen_at_this_mark || self.seen_at_last_mark.is_none() && self.seen_at_this_mark.is_none()) { + if self.changed + && (self.seen_at_last_mark != self.seen_at_this_mark + || self.seen_at_last_mark.is_none() && self.seen_at_this_mark.is_none()) + { self.changed = false; self.seen_at_last_mark = self.seen_at_this_mark; - let mut marks : Vec<_> = self.marks.iter().map(|(key, val)| (key.clone(), val.clone())).collect(); - marks.sort_by(|(k1,_),(k2,_)| k1.cmp(k2)); - self.spans.push(Span { + let mut marks: Vec<_> = self + .marks + .iter() + .map(|(key, val)| (key.clone(), val.clone())) + .collect(); + marks.sort_by(|(k1, _), (k2, _)| k1.cmp(k2)); + self.spans.push(Span { pos: self.seen, marks, }); @@ -74,11 +81,14 @@ impl TreeQuery for Spans { // find location to insert // mark or set if element.succ.is_empty() { - if let OpType::Mark(_) = &element.action { - let pos = self.ops.binary_search_by(|probe| m.lamport_cmp(probe.id, element.id)).unwrap_err(); + if let OpType::MarkBegin(_) = &element.action { + let pos = self + .ops + .binary_search_by(|probe| m.lamport_cmp(probe.id, element.id)) + .unwrap_err(); self.ops.insert(pos, element.clone()); } - if let OpType::Unmark(_) = &element.action { + if let OpType::MarkEnd(_) = &element.action { self.ops.retain(|op| op.id != element.id.prev()); } } diff --git a/automerge/src/types.rs b/automerge/src/types.rs index 91a45781..cbf6ed22 100644 --- a/automerge/src/types.rs +++ b/automerge/src/types.rs @@ -160,21 +160,25 @@ pub enum OpType { Del, Inc(i64), Set(ScalarValue), - Mark(MarkData), - Unmark(bool), + MarkBegin(MarkData), + MarkEnd(bool), } impl OpType { - pub (crate) fn mark(name: String, sticky: bool, value: ScalarValue) -> Self { - OpType::Mark(MarkData { name, sticky, value }) - } + pub(crate) fn mark(name: String, expand: bool, value: ScalarValue) -> Self { + OpType::MarkBegin(MarkData { + name, + expand, + value, + }) + } } #[derive(PartialEq, Debug, Clone)] pub struct MarkData { pub name: String, pub value: ScalarValue, - pub sticky: bool, + pub expand: bool, } #[derive(Debug)] @@ -397,7 +401,7 @@ impl Op { } pub fn visible(&self) -> bool { - if self.is_inc() || self.is_mark() { + if self.is_inc() || self.is_mark() { false } else if self.is_counter() { self.succ.len() <= self.incs() @@ -423,11 +427,15 @@ impl Op { } pub fn valid_mark_anchor(&self) -> bool { - self.succ.is_empty() && matches!(&self.action, OpType::Mark(MarkData { sticky: true, ..}) | OpType::Unmark(false)) + self.succ.is_empty() + && matches!( + &self.action, + OpType::MarkBegin(MarkData { expand: true, .. }) | OpType::MarkEnd(false) + ) } pub fn is_mark(&self) -> bool { - matches!(&self.action, OpType::Mark(_) | OpType::Unmark(_)) + matches!(&self.action, OpType::MarkBegin(_) | OpType::MarkEnd(_)) } pub fn is_counter(&self) -> bool { @@ -464,8 +472,8 @@ impl Op { OpType::Set(value) if self.insert => format!("i:{}", value), OpType::Set(value) => format!("s:{}", value), OpType::Make(obj) => format!("make{}", obj), - OpType::Mark(m) => format!("mark{}={}", m.name, m.value), - OpType::Unmark(_) => "unmark".into(), + OpType::MarkBegin(m) => format!("mark{}={}", m.name, m.value), + OpType::MarkEnd(_) => "unmark".into(), OpType::Inc(val) => format!("inc:{}", val), OpType::Del => "del".to_string(), } From 4c84ccba06b5068d68105c36656b5e79059e38d2 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Mon, 31 Jan 2022 15:23:46 -0500 Subject: [PATCH 049/730] half done - not working typescript --- automerge-wasm/README.md | 694 +++++++++++++++- automerge-wasm/index.d.ts | 204 +++++ automerge-wasm/package.json | 12 +- automerge-wasm/src/lib.rs | 12 +- automerge-wasm/test/test.ts | 1458 ++++++++++++++++++++++++++++++++++ automerge-wasm/tsconfig.json | 17 + 6 files changed, 2390 insertions(+), 7 deletions(-) create mode 100644 automerge-wasm/index.d.ts create mode 100644 automerge-wasm/test/test.ts create mode 100644 automerge-wasm/tsconfig.json diff --git a/automerge-wasm/README.md b/automerge-wasm/README.md index 258cd572..985955a3 100644 --- a/automerge-wasm/README.md +++ b/automerge-wasm/README.md @@ -1 +1,693 @@ -todo +## Automerge WASM Low Level Interface + +This is a low level automerge library written in rust exporting a javascript API via WASM. This low level api is the underpinning to the `automerge-js` library that reimplements the Automerge API via these low level functions. + +### Static Functions + +### Methods + + `doc.clone(actor?: string)` : Make a complete + + `doc.free()` : deallocate WASM memory associated with a document + +#[wasm_bindgen] + pub fn free(self) {} + + #[wasm_bindgen(js_name = pendingOps)] + pub fn pending_ops(&self) -> JsValue { + (self.0.pending_ops() as u32).into() + } + + pub fn commit(&mut self, message: Option, time: Option) -> Array { + let heads = self.0.commit(message, time.map(|n| n as i64)); + let heads: Array = heads + .iter() + .map(|h| JsValue::from_str(&hex::encode(&h.0))) + .collect(); + heads + } + + pub fn rollback(&mut self) -> f64 { + self.0.rollback() as f64 + } + + pub fn keys(&mut self, obj: String, heads: Option) -> Result { + let obj = self.import(obj)?; + let result = if let Some(heads) = get_heads(heads) { + self.0.keys_at(&obj, &heads) + } else { + self.0.keys(&obj) + } + .iter() + .map(|s| JsValue::from_str(s)) + .collect(); + Ok(result) + } + + pub fn text(&mut self, obj: String, heads: Option) -> Result { + let obj = self.import(obj)?; + if let Some(heads) = get_heads(heads) { + self.0.text_at(&obj, &heads) + } else { + self.0.text(&obj) + } + .map_err(to_js_err) + } + + pub fn splice( + &mut self, + obj: String, + start: f64, + delete_count: f64, + text: JsValue, + ) -> Result, JsValue> { + let obj = self.import(obj)?; + let start = start as usize; + let delete_count = delete_count as usize; + let mut vals = vec![]; + if let Some(t) = text.as_string() { + self.0 + .splice_text(&obj, start, delete_count, &t) + .map_err(to_js_err)?; + Ok(None) + } else { + if let Ok(array) = text.dyn_into::() { + for i in array.iter() { + if let Ok(array) = i.clone().dyn_into::() { + let value = array.get(1); + let datatype = array.get(2); + let value = self.import_value(value, datatype.as_string())?; + vals.push(value); + } else { + let value = self.import_value(i, None)?; + vals.push(value); + } + } + } + let result = self + .0 + .splice(&obj, start, delete_count, vals) + .map_err(to_js_err)?; + if result.is_empty() { + Ok(None) + } else { + let result: Array = result + .iter() + .map(|r| JsValue::from(r.to_string())) + .collect(); + Ok(result.into()) + } + } + } + + pub fn push( + &mut self, + obj: String, + value: JsValue, + datatype: Option, + ) -> Result, JsValue> { + let obj = self.import(obj)?; + let value = self.import_value(value, datatype)?; + let index = self.0.length(&obj); + let opid = self.0.insert(&obj, index, value).map_err(to_js_err)?; + Ok(opid.map(|id| id.to_string())) + } + + pub fn insert( + &mut self, + obj: String, + index: f64, + value: JsValue, + datatype: Option, + ) -> Result, JsValue> { + let obj = self.import(obj)?; + let index = index as f64; + let value = self.import_value(value, datatype)?; + let opid = self + .0 + .insert(&obj, index as usize, value) + .map_err(to_js_err)?; + Ok(opid.map(|id| id.to_string())) + } + + pub fn set( + &mut self, + obj: String, + prop: JsValue, + value: JsValue, + datatype: Option, + ) -> Result, JsValue> { + let obj = self.import(obj)?; + let prop = self.import_prop(prop)?; + let value = self.import_value(value, datatype)?; + let opid = self.0.set(&obj, prop, value).map_err(to_js_err)?; + Ok(opid.map(|id| id.to_string())) + } + + pub fn make( + &mut self, + obj: String, + prop: JsValue, + value: JsValue, + ) -> Result { + let obj = self.import(obj)?; + let prop = self.import_prop(prop)?; + let value = self.import_value(value, None)?; + if value.is_object() { + let opid = self.0.set(&obj, prop, value).map_err(to_js_err)?; + Ok(opid.unwrap().to_string()) + } else { + Err("invalid object type".into()) + } + } + + pub fn inc(&mut self, obj: String, prop: JsValue, value: JsValue) -> Result<(), JsValue> { + let obj = self.import(obj)?; + let prop = self.import_prop(prop)?; + let value: f64 = value + .as_f64() + .ok_or("inc needs a numberic value") + .map_err(to_js_err)?; + self.0.inc(&obj, prop, value as i64).map_err(to_js_err)?; + Ok(()) + } + + pub fn value( + &mut self, + obj: String, + prop: JsValue, + heads: Option, + ) -> Result { + let obj = self.import(obj)?; + let result = Array::new(); + let prop = to_prop(prop); + let heads = get_heads(heads); + if let Ok(prop) = prop { + let value = if let Some(h) = heads { + self.0.value_at(&obj, prop, &h) + } else { + self.0.value(&obj, prop) + } + .map_err(to_js_err)?; + match value { + Some((Value::Object(obj_type), obj_id)) => { + result.push(&obj_type.to_string().into()); + result.push(&obj_id.to_string().into()); + } + Some((Value::Scalar(value), _)) => { + result.push(&datatype(&value).into()); + result.push(&ScalarValue(value).into()); + } + None => {} + } + } + Ok(result) + } + + pub fn values( + &mut self, + obj: String, + arg: JsValue, + heads: Option, + ) -> Result { + let obj = self.import(obj)?; + let result = Array::new(); + let prop = to_prop(arg); + if let Ok(prop) = prop { + let values = if let Some(heads) = get_heads(heads) { + self.0.values_at(&obj, prop, &heads) + } else { + self.0.values(&obj, prop) + } + .map_err(to_js_err)?; + for value in values { + match value { + (Value::Object(obj_type), obj_id) => { + let sub = Array::new(); + sub.push(&obj_type.to_string().into()); + sub.push(&obj_id.to_string().into()); + result.push(&sub.into()); + } + (Value::Scalar(value), id) => { + let sub = Array::new(); + sub.push(&datatype(&value).into()); + sub.push(&ScalarValue(value).into()); + sub.push(&id.to_string().into()); + result.push(&sub.into()); + } + } + } + } + Ok(result) + } + + pub fn length(&mut self, obj: String, heads: Option) -> Result { + let obj = self.import(obj)?; + if let Some(heads) = get_heads(heads) { + Ok(self.0.length_at(&obj, &heads) as f64) + } else { + Ok(self.0.length(&obj) as f64) + } + } + + pub fn del(&mut self, obj: String, prop: JsValue) -> Result<(), JsValue> { + let obj = self.import(obj)?; + let prop = to_prop(prop)?; + self.0.del(&obj, prop).map_err(to_js_err)?; + Ok(()) + } + + pub fn mark( + &mut self, + obj: JsValue, + range: JsValue, + name: JsValue, + value: JsValue, + datatype: JsValue, + ) -> Result<(), JsValue> { + let obj = self.import(obj)?; + let re = Regex::new(r"([\[\(])(\d+)\.\.(\d+)([\)\]])").unwrap(); + let range = range.as_string().ok_or("range must be a string")?; + let cap = re.captures_iter(&range).next().ok_or("range must be in the form of (start..end] or [start..end) etc... () for sticky, [] for normal")?; + let start: usize = cap[2].parse().map_err(|_| to_js_err("invalid start"))?; + let end: usize = cap[3].parse().map_err(|_| to_js_err("invalid end"))?; + let start_sticky = &cap[1] == "("; + let end_sticky = &cap[4] == ")"; + let name = name + .as_string() + .ok_or("invalid mark name") + .map_err(to_js_err)?; + let value = self.import_scalar(&value, datatype.as_string())?; + self.0 + .mark(&obj, start, start_sticky, end, end_sticky, &name, value) + .map_err(to_js_err)?; + Ok(()) + } + + pub fn spans(&mut self, obj: JsValue) -> Result { + let obj = self.import(obj)?; + let text = self.0.text(&obj).map_err(to_js_err)?; + let spans = self.0.spans(&obj).map_err(to_js_err)?; + let mut last_pos = 0; + let result = Array::new(); + for s in spans { + let marks = Array::new(); + for m in s.marks { + let mark = Array::new(); + mark.push(&m.0.into()); + mark.push(&datatype(&m.1).into()); + mark.push(&ScalarValue(m.1).into()); + marks.push(&mark.into()); + } + let text_span = &text[last_pos..s.pos]; //.slice(last_pos, s.pos); + if text_span.len() > 0 { + result.push(&text_span.into()); + } + result.push(&marks); + last_pos = s.pos; + //let obj = Object::new().into(); + //js_set(&obj, "pos", s.pos as i32)?; + //js_set(&obj, "marks", marks)?; + //result.push(&obj.into()); + } + let text_span = &text[last_pos..]; + if text_span.len() > 0 { + result.push(&text_span.into()); + } + Ok(result.into()) + } + + pub fn save(&mut self) -> Result { + self.0 + .save() + .map(|v| Uint8Array::from(v.as_slice())) + .map_err(to_js_err) + } + + #[wasm_bindgen(js_name = saveIncremental)] + pub fn save_incremental(&mut self) -> Uint8Array { + let bytes = self.0.save_incremental(); + Uint8Array::from(bytes.as_slice()) + } + + #[wasm_bindgen(js_name = loadIncremental)] + pub fn load_incremental(&mut self, data: Uint8Array) -> Result { + let data = data.to_vec(); + let len = self.0.load_incremental(&data).map_err(to_js_err)?; + Ok(len as f64) + } + + #[wasm_bindgen(js_name = applyChanges)] + pub fn apply_changes(&mut self, changes: JsValue) -> Result<(), JsValue> { + let changes: Vec<_> = JS(changes).try_into()?; + self.0.apply_changes(&changes).map_err(to_js_err)?; + Ok(()) + } + + #[wasm_bindgen(js_name = getChanges)] + pub fn get_changes(&mut self, have_deps: JsValue) -> Result { + let deps: Vec<_> = JS(have_deps).try_into()?; + let changes = self.0.get_changes(&deps); + let changes: Array = changes + .iter() + .map(|c| Uint8Array::from(c.raw_bytes())) + .collect(); + Ok(changes) + } + + #[wasm_bindgen(js_name = getChangesAdded)] + pub fn get_changes_added(&mut self, other: &Automerge) -> Result { + let changes = self.0.get_changes_added(&other.0); + let changes: Array = changes + .iter() + .map(|c| Uint8Array::from(c.raw_bytes())) + .collect(); + Ok(changes) + } + + #[wasm_bindgen(js_name = getHeads)] + pub fn get_heads(&mut self) -> Array { + let heads = self.0.get_heads(); + let heads: Array = heads + .iter() + .map(|h| JsValue::from_str(&hex::encode(&h.0))) + .collect(); + heads + } + + #[wasm_bindgen(js_name = getActorId)] + pub fn get_actor_id(&mut self) -> String { + let actor = self.0.get_actor(); + actor.to_string() + } + + #[wasm_bindgen(js_name = getLastLocalChange)] + pub fn get_last_local_change(&mut self) -> Result, JsValue> { + if let Some(change) = self.0.get_last_local_change() { + Ok(Some(Uint8Array::from(change.raw_bytes()))) + } else { + Ok(None) + } + } + + pub fn dump(&self) { + self.0.dump() + } + + #[wasm_bindgen(js_name = getMissingDeps)] + pub fn get_missing_deps(&mut self, heads: Option) -> Result { + let heads = get_heads(heads).unwrap_or_default(); + let deps = self.0.get_missing_deps(&heads); + let deps: Array = deps + .iter() + .map(|h| JsValue::from_str(&hex::encode(&h.0))) + .collect(); + Ok(deps) + } + + #[wasm_bindgen(js_name = receiveSyncMessage)] + pub fn receive_sync_message( + &mut self, + state: &mut SyncState, + message: Uint8Array, + ) -> Result<(), JsValue> { + let message = message.to_vec(); + let message = am::SyncMessage::decode(message.as_slice()).map_err(to_js_err)?; + self.0 + .receive_sync_message(&mut state.0, message) + .map_err(to_js_err)?; + Ok(()) + } + + #[wasm_bindgen(js_name = generateSyncMessage)] + pub fn generate_sync_message(&mut self, state: &mut SyncState) -> Result { + if let Some(message) = self.0.generate_sync_message(&mut state.0) { + Ok(Uint8Array::from(message.encode().map_err(to_js_err)?.as_slice()).into()) + } else { + Ok(JsValue::null()) + } + } + + #[wasm_bindgen(js_name = toJS)] + pub fn to_js(&self) -> JsValue { + map_to_js(&self.0, &ROOT) + } + + fn import(&self, id: String) -> Result { + self.0.import(&id).map_err(to_js_err) + } + + fn import_prop(&mut self, prop: JsValue) -> Result { + if let Some(s) = prop.as_string() { + Ok(s.into()) + } else if let Some(n) = prop.as_f64() { + Ok((n as usize).into()) + } else { + Err(format!("invalid prop {:?}", prop).into()) + } + } + + fn import_scalar( + &mut self, + value: &JsValue, + datatype: Option, + ) -> Result { + match datatype.as_deref() { + Some("boolean") => value + .as_bool() + .ok_or_else(|| "value must be a bool".into()) + .map(am::ScalarValue::Boolean), + Some("int") => value + .as_f64() + .ok_or_else(|| "value must be a number".into()) + .map(|v| am::ScalarValue::Int(v as i64)), + Some("uint") => value + .as_f64() + .ok_or_else(|| "value must be a number".into()) + .map(|v| am::ScalarValue::Uint(v as u64)), + Some("f64") => value + .as_f64() + .ok_or_else(|| "value must be a number".into()) + .map(am::ScalarValue::F64), + Some("bytes") => Ok(am::ScalarValue::Bytes( + value.clone().dyn_into::().unwrap().to_vec(), + )), + Some("counter") => value + .as_f64() + .ok_or_else(|| "value must be a number".into()) + .map(|v| am::ScalarValue::counter(v as i64)), + Some("timestamp") => value + .as_f64() + .ok_or_else(|| "value must be a number".into()) + .map(|v| am::ScalarValue::Timestamp(v as i64)), + /* + Some("bytes") => unimplemented!(), + Some("cursor") => unimplemented!(), + */ + Some("null") => Ok(am::ScalarValue::Null), + Some(_) => Err(format!("unknown datatype {:?}", datatype).into()), + None => { + if value.is_null() { + Ok(am::ScalarValue::Null) + } else if let Some(b) = value.as_bool() { + Ok(am::ScalarValue::Boolean(b)) + } else if let Some(s) = value.as_string() { + // FIXME - we need to detect str vs int vs float vs bool here :/ + Ok(am::ScalarValue::Str(s.into())) + } else if let Some(n) = value.as_f64() { + if (n.round() - n).abs() < f64::EPSILON { + Ok(am::ScalarValue::Int(n as i64)) + } else { + Ok(am::ScalarValue::F64(n)) + } + // } else if let Some(o) = to_objtype(&value) { + // Ok(o.into()) + } else if let Ok(d) = value.clone().dyn_into::() { + Ok(am::ScalarValue::Timestamp(d.get_time() as i64)) + } else if let Ok(o) = &value.clone().dyn_into::() { + Ok(am::ScalarValue::Bytes(o.to_vec())) + } else { + Err("value is invalid".into()) + } + } + } + } + + fn import_value(&mut self, value: JsValue, datatype: Option) -> Result { + match self.import_scalar(&value, datatype) { + Ok(val) => Ok(val.into()), + Err(err) => { + if let Some(o) = to_objtype(&value) { + Ok(o.into()) + } else { + Err(err) + } + } + } + /* + match datatype.as_deref() { + Some("boolean") => value + .as_bool() + .ok_or_else(|| "value must be a bool".into()) + .map(|v| am::ScalarValue::Boolean(v).into()), + Some("int") => value + .as_f64() + .ok_or_else(|| "value must be a number".into()) + .map(|v| am::ScalarValue::Int(v as i64).into()), + Some("uint") => value + .as_f64() + .ok_or_else(|| "value must be a number".into()) + .map(|v| am::ScalarValue::Uint(v as u64).into()), + Some("f64") => value + .as_f64() + .ok_or_else(|| "value must be a number".into()) + .map(|n| am::ScalarValue::F64(n).into()), + Some("bytes") => { + Ok(am::ScalarValue::Bytes(value.dyn_into::().unwrap().to_vec()).into()) + } + Some("counter") => value + .as_f64() + .ok_or_else(|| "value must be a number".into()) + .map(|v| am::ScalarValue::counter(v as i64).into()), + Some("timestamp") => value + .as_f64() + .ok_or_else(|| "value must be a number".into()) + .map(|v| am::ScalarValue::Timestamp(v as i64).into()), + Some("null") => Ok(am::ScalarValue::Null.into()), + Some(_) => Err(format!("unknown datatype {:?}", datatype).into()), + None => { + if value.is_null() { + Ok(am::ScalarValue::Null.into()) + } else if let Some(b) = value.as_bool() { + Ok(am::ScalarValue::Boolean(b).into()) + } else if let Some(s) = value.as_string() { + // FIXME - we need to detect str vs int vs float vs bool here :/ + Ok(am::ScalarValue::Str(s.into()).into()) + } else if let Some(n) = value.as_f64() { + if (n.round() - n).abs() < f64::EPSILON { + Ok(am::ScalarValue::Int(n as i64).into()) + } else { + Ok(am::ScalarValue::F64(n).into()) + } + } else if let Some(o) = to_objtype(&value) { + Ok(o.into()) + } else if let Ok(d) = value.clone().dyn_into::() { + Ok(am::ScalarValue::Timestamp(d.get_time() as i64).into()) + } else if let Ok(o) = &value.dyn_into::() { + Ok(am::ScalarValue::Bytes(o.to_vec()).into()) + } else { + Err("value is invalid".into()) + } + } + } + */ + } +} + +#[wasm_bindgen(js_name = create)] +pub fn init(actor: Option) -> Result { + console_error_panic_hook::set_once(); + Automerge::new(actor) +} + +#[wasm_bindgen(js_name = loadDoc)] +pub fn load(data: Uint8Array, actor: Option) -> Result { + let data = data.to_vec(); + let mut automerge = am::Automerge::load(&data).map_err(to_js_err)?; + if let Some(s) = actor { + let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec()); + automerge.set_actor(actor) + } + Ok(Automerge(automerge)) +} + +#[wasm_bindgen(js_name = encodeChange)] +pub fn encode_change(change: JsValue) -> Result { + let change: am::ExpandedChange = change.into_serde().map_err(to_js_err)?; + let change: Change = change.into(); + Ok(Uint8Array::from(change.raw_bytes())) +} + +#[wasm_bindgen(js_name = decodeChange)] +pub fn decode_change(change: Uint8Array) -> Result { + let change = Change::from_bytes(change.to_vec()).map_err(to_js_err)?; + let change: am::ExpandedChange = change.decode(); + JsValue::from_serde(&change).map_err(to_js_err) +} + +#[wasm_bindgen(js_name = initSyncState)] +pub fn init_sync_state() -> SyncState { + SyncState(am::SyncState::new()) +} + +// this is needed to be compatible with the automerge-js api +#[wasm_bindgen(js_name = importSyncState)] +pub fn import_sync_state(state: JsValue) -> Result { + Ok(SyncState(JS(state).try_into()?)) +} + +// this is needed to be compatible with the automerge-js api +#[wasm_bindgen(js_name = exportSyncState)] +pub fn export_sync_state(state: SyncState) -> JsValue { + JS::from(state.0).into() +} + +#[wasm_bindgen(js_name = encodeSyncMessage)] +pub fn encode_sync_message(message: JsValue) -> Result { + let heads = js_get(&message, "heads")?.try_into()?; + let need = js_get(&message, "need")?.try_into()?; + let changes = js_get(&message, "changes")?.try_into()?; + let have = js_get(&message, "have")?.try_into()?; + Ok(Uint8Array::from( + am::SyncMessage { + heads, + need, + have, + changes, + } + .encode() + .unwrap() + .as_slice(), + )) +} + +#[wasm_bindgen(js_name = decodeSyncMessage)] +pub fn decode_sync_message(msg: Uint8Array) -> Result { + let data = msg.to_vec(); + let msg = am::SyncMessage::decode(&data).map_err(to_js_err)?; + let heads = AR::from(msg.heads.as_slice()); + let need = AR::from(msg.need.as_slice()); + let changes = AR::from(msg.changes.as_slice()); + let have = AR::from(msg.have.as_slice()); + let obj = Object::new().into(); + js_set(&obj, "heads", heads)?; + js_set(&obj, "need", need)?; + js_set(&obj, "have", have)?; + js_set(&obj, "changes", changes)?; + Ok(obj) +} + +#[wasm_bindgen(js_name = encodeSyncState)] +pub fn encode_sync_state(state: SyncState) -> Result { + let state = state.0; + Ok(Uint8Array::from( + state.encode().map_err(to_js_err)?.as_slice(), + )) +} + +#[wasm_bindgen(js_name = decodeSyncState)] +pub fn decode_sync_state(data: Uint8Array) -> Result { + SyncState::decode(data) +} + +#[wasm_bindgen(js_name = MAP)] +pub struct Map {} + +#[wasm_bindgen(js_name = LIST)] +pub struct List {} + +#[wasm_bindgen(js_name = TEXT)] +pub struct Text {} + +#[wasm_bindgen(js_name = TABLE)] +pub struct Table {} diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts new file mode 100644 index 00000000..e08df172 --- /dev/null +++ b/automerge-wasm/index.d.ts @@ -0,0 +1,204 @@ + +export type Actor = string; +export type ObjID = string; +export type Change = Uint8Array; +export type SyncMessage = Uint8Array; +export type Prop = string | number; +export type Hash = string; +export type Heads = Hash[]; +export type ObjectType = string; // opaque ?? +export type Value = string | number | boolean | Date | Uint8Array | ObjectType; +export type ObjTypeString = "map" | "list" | "text" | "table" +export type OutValue = + [Datatype.str, string] | + [Datatype.f64, number] | + [Datatype.u64, number] | + [Datatype.f64, number] | + [Datatype.boolean, boolean] | + [Datatype.timestamp, Date] | + [Datatype.counter, number] | + [Datatype.bytes, Uint8Array] | + [ObjTypeName.list, ObjID] | + [ObjTypeName.map, ObjID] | + [ObjTypeName.text, ObjID] | + [ObjTypeName.table, ObjID] | + +export const ROOT: ObjID = "_root"; + +export const LIST : ObjectType; +export const MAP : ObjectType; +export const TABLE : ObjectType; +export const TEXT : ObjectType; + +export enum ObjTypeName { + list = "list", + map = "map", + table = "table", + text = "text", +} + +export enum Datatype { + boolean = "boolean", + str = "str", + i64 = "i64", + u64 = "u64", + f64 = "f64", + timestamp = "timestamp", + counter = "counter", + bytes = "bytes", +} + +export type DecodedChange = { + message: string, + seq: number, + ops: Op[] +} + +export type Op = { + action: string, + value?: string | number | boolean, +} + +export function create(actor?: Actor): Automerge; +export function loadDoc(data: Uint8Array, actor?: Actor): Automerge; +export function encodeChange(change: DecodedChange): Change; +export function decodeChange(change: Change): DecodedChange; +export function initSyncState(): SyncState; +export function importSyncState(state: any): SyncState; // FIXME +export function exportSyncState(state: SyncState): any; +export function encodeSyncMessage(message: DecodedSyncMessage): SyncMessage; +export function decodeSyncMessage(msg: SyncMessage): DecodedSyncMessage; +export function encodeSyncState(state: SyncState): Uint8Array; +export function decodeSyncState(data: Uint8Array): SyncState; + +export class Automerge { + set(obj: ObjID, prop: Prop, value: Value, datatype?: Datatype): ObjID | undefined; + insert(obj: ObjID, index: number, value: Value, datatype?: Datatype): ObjID | undefined; + push(obj: ObjID, value: Value, datatype?: Datatype): ObjID | undefined; + splice(obj: ObjID, start: number, delete_count: number, text: string | Value[] | OutValue[] ): ObjID[] | undefined; + inc(obj: ObjID, prop: Prop, value: number): void; + del(obj: ObjID, prop: Prop): void; + + // returns a single value - if there is a conflict return the winner + value(obj: ObjID, prop: any, heads?: Heads): OutValue | null; + // return all values in case of a conflict + values(obj: ObjID, arg: any, heads?: Heads): OutValue[]; + keys(obj: ObjID, heads?: Heads): string[]; + text(obj: ObjID, heads?: Heads): string; + length(obj: ObjID, heads?: Heads): number; + + commit(message?: string, time?: number): Heads; + getActorId(): Actor; + pendingOps(): number; + rollback(): number; + + // save and load to local store + save(): Uint8Array; + saveIncremental(): Uint8Array; + loadIncremental(data: Uint8Array): number; + + // sync over network + receiveSyncMessage(state: SyncState, message: SyncMessage): void; + generateSyncMessage(state: SyncState): SyncMessage; + + // low level change functions + applyChanges(changes: Change[]): void; + getChanges(have_deps: Heads): Change[]; + getChangesAdded(other: Automerge): Change[]; + getHeads(): Heads; + getLastLocalChange(): Change | undefined; + getMissingDeps(heads?: Heads): Heads; + + // memory management + free(): void; + clone(actor?: string): Automerge; + + // dump internal state to console.log + dump(): void; + + // dump internal state to a JS object + toJS(): any; +} + +export class SyncState { + free(): void; + clone(): SyncState; + lastSentHeads: any; + sentHashes: any; + readonly sharedHeads: any; +} + +export type InitInput = RequestInfo | URL | Response | BufferSource | WebAssembly.Module; + +export interface InitOutput { + readonly memory: WebAssembly.Memory; + readonly __wbg_automerge_free: (a: number) => void; + readonly automerge_new: (a: number, b: number, c: number) => void; + readonly automerge_clone: (a: number, b: number, c: number, d: number) => void; + readonly automerge_free: (a: number) => void; + readonly automerge_pendingOps: (a: number) => number; + readonly automerge_commit: (a: number, b: number, c: number, d: number, e: number) => number; + readonly automerge_rollback: (a: number) => number; + readonly automerge_keys: (a: number, b: number, c: number, d: number, e: number) => void; + readonly automerge_text: (a: number, b: number, c: number, d: number, e: number) => void; + readonly automerge_splice: (a: number, b: number, c: number, d: number, e: number, f: number, g: number) => void; + readonly automerge_push: (a: number, b: number, c: number, d: number, e: number, f: number, g: number) => void; + readonly automerge_insert: (a: number, b: number, c: number, d: number, e: number, f: number, g: number, h: number) => void; + readonly automerge_set: (a: number, b: number, c: number, d: number, e: number, f: number, g: number, h: number) => void; + readonly automerge_inc: (a: number, b: number, c: number, d: number, e: number, f: number) => void; + readonly automerge_value: (a: number, b: number, c: number, d: number, e: number, f: number) => void; + readonly automerge_values: (a: number, b: number, c: number, d: number, e: number, f: number) => void; + readonly automerge_length: (a: number, b: number, c: number, d: number, e: number) => void; + readonly automerge_del: (a: number, b: number, c: number, d: number, e: number) => void; + readonly automerge_save: (a: number, b: number) => void; + readonly automerge_saveIncremental: (a: number) => number; + readonly automerge_loadIncremental: (a: number, b: number, c: number) => void; + readonly automerge_applyChanges: (a: number, b: number, c: number) => void; + readonly automerge_getChanges: (a: number, b: number, c: number) => void; + readonly automerge_getChangesAdded: (a: number, b: number, c: number) => void; + readonly automerge_getHeads: (a: number) => number; + readonly automerge_getActorId: (a: number, b: number) => void; + readonly automerge_getLastLocalChange: (a: number, b: number) => void; + readonly automerge_dump: (a: number) => void; + readonly automerge_getMissingDeps: (a: number, b: number, c: number) => void; + readonly automerge_receiveSyncMessage: (a: number, b: number, c: number, d: number) => void; + readonly automerge_generateSyncMessage: (a: number, b: number, c: number) => void; + readonly automerge_toJS: (a: number) => number; + readonly create: (a: number, b: number, c: number) => void; + readonly loadDoc: (a: number, b: number, c: number, d: number) => void; + readonly encodeChange: (a: number, b: number) => void; + readonly decodeChange: (a: number, b: number) => void; + readonly initSyncState: () => number; + readonly importSyncState: (a: number, b: number) => void; + readonly exportSyncState: (a: number) => number; + readonly encodeSyncMessage: (a: number, b: number) => void; + readonly decodeSyncMessage: (a: number, b: number) => void; + readonly encodeSyncState: (a: number, b: number) => void; + readonly decodeSyncState: (a: number, b: number) => void; + readonly __wbg_list_free: (a: number) => void; + readonly __wbg_map_free: (a: number) => void; + readonly __wbg_text_free: (a: number) => void; + readonly __wbg_table_free: (a: number) => void; + readonly __wbg_syncstate_free: (a: number) => void; + readonly syncstate_sharedHeads: (a: number) => number; + readonly syncstate_lastSentHeads: (a: number) => number; + readonly syncstate_set_lastSentHeads: (a: number, b: number, c: number) => void; + readonly syncstate_set_sentHashes: (a: number, b: number, c: number) => void; + readonly syncstate_clone: (a: number) => number; + readonly __wbindgen_malloc: (a: number) => number; + readonly __wbindgen_realloc: (a: number, b: number, c: number) => number; + readonly __wbindgen_add_to_stack_pointer: (a: number) => number; + readonly __wbindgen_free: (a: number, b: number) => void; + readonly __wbindgen_exn_store: (a: number) => void; +} + +/** +* If `module_or_path` is {RequestInfo} or {URL}, makes a request and +* for everything else, calls `WebAssembly.instantiate` directly. +* +* @param {InitInput | Promise} module_or_path +* +* @returns {Promise} +*/ + +export default function init (module_or_path?: InitInput | Promise): Promise; diff --git a/automerge-wasm/package.json b/automerge-wasm/package.json index a67f957e..88ceccd6 100644 --- a/automerge-wasm/package.json +++ b/automerge-wasm/package.json @@ -17,18 +17,22 @@ ], "main": "./dev/index.js", "scripts": { - "build": "rimraf ./dev && wasm-pack build --target nodejs --dev --out-name index -d dev", + "build": "rimraf ./dev && wasm-pack build --target nodejs --dev --out-name index -d dev && cp index.d.ts dev", "release": "rimraf ./dev && wasm-pack build --target nodejs --release --out-name index -d dev && yarn opt", "pkg": "rimraf ./pkg && wasm-pack build --target web --release --out-name index -d pkg && cd pkg && yarn pack && mv automerge-wasm*tgz ..", "prof": "rimraf ./dev && wasm-pack build --target nodejs --profiling --out-name index -d dev", "opt": "wasm-opt -Oz dev/index_bg.wasm -o tmp.wasm && mv tmp.wasm dev/index_bg.wasm", - "test": "yarn build && mocha --bail --full-trace" + "test": "yarn build && ts-mocha --bail --full-trace" }, "dependencies": {}, "devDependencies": { + "@types/expect": "^24.3.0", + "@types/mocha": "^9.1.0", + "fast-sha256": "^1.3.0", "mocha": "^9.1.3", "pako": "^2.0.4", - "fast-sha256": "^1.3.0", - "rimraf": "^3.0.2" + "rimraf": "^3.0.2", + "ts-mocha": "^9.0.2", + "typescript": "^4.5.5" } } diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 86564cdc..1edc1381 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -241,7 +241,15 @@ impl Automerge { None => {} } } + Ok(result) +/* fixme + if result.len() == { + Ok(JsValue::null()) + } else { + Ok(result) + } +*/ } pub fn values( @@ -299,7 +307,7 @@ impl Automerge { pub fn mark( &mut self, - obj: JsValue, + obj: String, range: JsValue, name: JsValue, value: JsValue, @@ -324,7 +332,7 @@ impl Automerge { Ok(()) } - pub fn spans(&mut self, obj: JsValue) -> Result { + pub fn spans(&mut self, obj: String) -> Result { let obj = self.import(obj)?; let text = self.0.text(&obj).map_err(to_js_err)?; let spans = self.0.spans(&obj).map_err(to_js_err)?; diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts new file mode 100644 index 00000000..7498de5d --- /dev/null +++ b/automerge-wasm/test/test.ts @@ -0,0 +1,1458 @@ + +const assert = require('assert') +const util = require('util') +const { BloomFilter } = require('./helpers/sync') +const Automerge = require('..') +const { MAP, LIST, TEXT, encodeChange, decodeChange, initSyncState, decodeSyncMessage, decodeSyncState, encodeSyncState }= Automerge + +// str to uint8array +function en(str) { + return new TextEncoder('utf8').encode(str) +} +// uint8array to str +function de(bytes) { + return new TextDecoder('utf8').decode(bytes); +} + +function sync(a, b, aSyncState = initSyncState(), bSyncState = initSyncState()) { + const MAX_ITER = 10 + let aToBmsg = null, bToAmsg = null, i = 0 + do { + aToBmsg = a.generateSyncMessage(aSyncState) + bToAmsg = b.generateSyncMessage(bSyncState) + + if (aToBmsg) { + b.receiveSyncMessage(bSyncState, aToBmsg) + } + if (bToAmsg) { + a.receiveSyncMessage(aSyncState, bToAmsg) + } + + if (i++ > MAX_ITER) { + throw new Error(`Did not synchronize within ${MAX_ITER} iterations`) + } + } while (aToBmsg || bToAmsg) +} + +describe('Automerge', () => { + describe('basics', () => { + it('should init clone and free', () => { + let doc1 = Automerge.create() + let doc2 = doc1.clone() + doc1.free() + doc2.free() + }) + + it('should be able to start and commit', () => { + let doc = Automerge.create() + doc.commit() + doc.free() + }) + + it('getting a nonexistant prop does not throw an error', () => { + let doc = Automerge.create() + let root = "_root" + let result = doc.value(root,"hello") + assert.deepEqual(result,[]) + doc.free() + }) + + it('should be able to set and get a simple value', () => { + let doc : Automerge = Automerge.create("aabbcc") + let root = "_root" + let result + + doc.set(root, "hello", "world") + doc.set(root, "number1", 5, "uint") + doc.set(root, "number2", 5) + doc.set(root, "number3", 5.5) + doc.set(root, "number4", 5.5, "f64") + doc.set(root, "number5", 5.5, "int") + doc.set(root, "bool", true) + doc.set(root, "time1", 1000, "timestamp") + doc.set(root, "time2", new Date(1001)) + doc.set(root, "list", Automerge.LIST); + + result = doc.value(root,"hello") + assert.deepEqual(result,["str","world"]) + + result = doc.value(root,"number1") + assert.deepEqual(result,["uint",5]) + + result = doc.value(root,"number2") + assert.deepEqual(result,["int",5]) + + result = doc.value(root,"number3") + assert.deepEqual(result,["f64",5.5]) + + result = doc.value(root,"number4") + assert.deepEqual(result,["f64",5.5]) + + result = doc.value(root,"number5") + assert.deepEqual(result,["int",5]) + + result = doc.value(root,"bool") + assert.deepEqual(result,["boolean",true]) + + doc.set(root, "bool", false, "boolean") + + result = doc.value(root,"bool") + assert.deepEqual(result,["boolean",false]) + + result = doc.value(root,"time1") + assert.deepEqual(result,["timestamp",new Date(1000)]) + + result = doc.value(root,"time2") + assert.deepEqual(result,["timestamp",new Date(1001)]) + + result = doc.value(root,"list") + assert.deepEqual(result,["list","10@aabbcc"]); + + doc.free() + }) + + it('should be able to use bytes', () => { + let doc = Automerge.create() + doc.set("_root","data1", new Uint8Array([10,11,12])); + doc.set("_root","data2", new Uint8Array([13,14,15]), "bytes"); + let value1 = doc.value("_root", "data1") + assert.deepEqual(value1, ["bytes", new Uint8Array([10,11,12])]); + let value2 = doc.value("_root", "data2") + assert.deepEqual(value2, ["bytes", new Uint8Array([13,14,15])]); + doc.free() + }) + + it('should be able to make sub objects', () => { + let doc = Automerge.create() + let root = "_root" + let result + + let submap = doc.set(root, "submap", MAP) + doc.set(submap, "number", 6, "uint") + assert.strictEqual(doc.pendingOps(),2) + + result = doc.value(root,"submap") + assert.deepEqual(result,["map",submap]) + + result = doc.value(submap,"number") + assert.deepEqual(result,["uint",6]) + doc.free() + }) + + it('should be able to make lists', () => { + let doc = Automerge.create() + let root = "_root" + + let submap = doc.set(root, "numbers", LIST) + doc.insert(submap, 0, "a"); + doc.insert(submap, 1, "b"); + doc.insert(submap, 2, "c"); + doc.insert(submap, 0, "z"); + + assert.deepEqual(doc.value(submap, 0),["str","z"]) + assert.deepEqual(doc.value(submap, 1),["str","a"]) + assert.deepEqual(doc.value(submap, 2),["str","b"]) + assert.deepEqual(doc.value(submap, 3),["str","c"]) + assert.deepEqual(doc.length(submap),4) + + doc.set(submap, 2, "b v2"); + + assert.deepEqual(doc.value(submap, 2),["str","b v2"]) + assert.deepEqual(doc.length(submap),4) + doc.free() + }) + + it('lists have insert, set, splice, and push ops', () => { + let doc = Automerge.create() + let root = "_root" + + let submap = doc.set(root, "letters", LIST) + doc.insert(submap, 0, "a"); + doc.insert(submap, 0, "b"); + assert.deepEqual(doc.toJS(), { letters: ["b", "a" ] }) + doc.push(submap, "c"); + assert.deepEqual(doc.toJS(), { letters: ["b", "a", "c" ] }) + doc.push(submap, 3, "timestamp"); + assert.deepEqual(doc.toJS(), { letters: ["b", "a", "c", new Date(3) ] }) + doc.splice(submap, 1, 1, ["d","e","f"]); + assert.deepEqual(doc.toJS(), { letters: ["b", "d", "e", "f", "c", new Date(3) ] }) + doc.set(submap, 0, "z"); + assert.deepEqual(doc.toJS(), { letters: ["z", "d", "e", "f", "c", new Date(3) ] }) + assert.deepEqual(doc.length(submap),6) + + doc.free() + }) + + it('should be able delete non-existant props', () => { + let doc = Automerge.create() + + doc.set("_root", "foo","bar") + doc.set("_root", "bip","bap") + let heads1 = doc.commit() + + assert.deepEqual(doc.keys("_root"),["bip","foo"]) + + doc.del("_root", "foo") + doc.del("_root", "baz") + let heads2 = doc.commit() + + assert.deepEqual(doc.keys("_root"),["bip"]) + assert.deepEqual(doc.keys("_root", heads1),["bip", "foo"]) + assert.deepEqual(doc.keys("_root", heads2),["bip"]) + doc.free() + }) + + it('should be able to del', () => { + let doc = Automerge.create() + let root = "_root" + + doc.set(root, "xxx", "xxx"); + assert.deepEqual(doc.value(root, "xxx"),["str","xxx"]) + doc.del(root, "xxx"); + assert.deepEqual(doc.value(root, "xxx"),[]) + doc.free() + }) + + it('should be able to use counters', () => { + let doc = Automerge.create() + let root = "_root" + + doc.set(root, "counter", 10, "counter"); + assert.deepEqual(doc.value(root, "counter"),["counter",10]) + doc.inc(root, "counter", 10); + assert.deepEqual(doc.value(root, "counter"),["counter",20]) + doc.inc(root, "counter", -5); + assert.deepEqual(doc.value(root, "counter"),["counter",15]) + doc.free() + }) + + it('should be able to splice text', () => { + let doc = Automerge.create() + let root = "_root"; + + let text = doc.set(root, "text", Automerge.TEXT); + doc.splice(text, 0, 0, "hello ") + doc.splice(text, 6, 0, ["w","o","r","l","d"]) + doc.splice(text, 11, 0, [["str","!"],["str","?"]]) + assert.deepEqual(doc.value(text, 0),["str","h"]) + assert.deepEqual(doc.value(text, 1),["str","e"]) + assert.deepEqual(doc.value(text, 9),["str","l"]) + assert.deepEqual(doc.value(text, 10),["str","d"]) + assert.deepEqual(doc.value(text, 11),["str","!"]) + assert.deepEqual(doc.value(text, 12),["str","?"]) + doc.free() + }) + + it('should be able save all or incrementally', () => { + let doc = Automerge.create() + + doc.set("_root", "foo", 1) + + let save1 = doc.save() + + doc.set("_root", "bar", 2) + + let saveMidway = doc.clone().save(); + + let save2 = doc.saveIncremental(); + + doc.set("_root", "baz", 3); + + let save3 = doc.saveIncremental(); + + let saveA = doc.save(); + let saveB = new Uint8Array([... save1, ...save2, ...save3]); + + assert.notDeepEqual(saveA, saveB); + + let docA = Automerge.loadDoc(saveA); + let docB = Automerge.loadDoc(saveB); + let docC = Automerge.loadDoc(saveMidway) + docC.loadIncremental(save3) + + assert.deepEqual(docA.keys("_root"), docB.keys("_root")); + assert.deepEqual(docA.save(), docB.save()); + assert.deepEqual(docA.save(), docC.save()); + doc.free() + docA.free() + docB.free() + docC.free() + }) + + it('should be able to splice text', () => { + let doc = Automerge.create() + let text = doc.set("_root", "text", TEXT); + doc.splice(text, 0, 0, "hello world"); + let heads1 = doc.commit(); + doc.splice(text, 6, 0, "big bad "); + let heads2 = doc.commit(); + assert.strictEqual(doc.text(text), "hello big bad world") + assert.strictEqual(doc.length(text), 19) + assert.strictEqual(doc.text(text, heads1), "hello world") + assert.strictEqual(doc.length(text, heads1), 11) + assert.strictEqual(doc.text(text, heads2), "hello big bad world") + assert.strictEqual(doc.length(text, heads2), 19) + doc.free() + }) + + it('local inc increments all visible counters in a map', () => { + let doc1 = Automerge.create("aaaa") + doc1.set("_root", "hello", "world") + let doc2 = Automerge.loadDoc(doc1.save(), "bbbb"); + let doc3 = Automerge.loadDoc(doc1.save(), "cccc"); + doc1.set("_root", "cnt", 20) + doc2.set("_root", "cnt", 0, "counter") + doc3.set("_root", "cnt", 10, "counter") + doc1.applyChanges(doc2.getChanges(doc1.getHeads())) + doc1.applyChanges(doc3.getChanges(doc1.getHeads())) + let result = doc1.values("_root", "cnt") + assert.deepEqual(result,[ + ['int',20,'2@aaaa'], + ['counter',0,'2@bbbb'], + ['counter',10,'2@cccc'], + ]) + doc1.inc("_root", "cnt", 5) + result = doc1.values("_root", "cnt") + assert.deepEqual(result, [ + [ 'counter', 5, '2@bbbb' ], + [ 'counter', 15, '2@cccc' ], + ]) + + let save1 = doc1.save() + let doc4 = Automerge.loadDoc(save1) + assert.deepEqual(doc4.save(), save1); + doc1.free() + doc2.free() + doc3.free() + doc4.free() + }) + + it('local inc increments all visible counters in a sequence', () => { + let doc1 = Automerge.create("aaaa") + let seq = doc1.set("_root", "seq", LIST) + doc1.insert(seq, 0, "hello") + let doc2 = Automerge.loadDoc(doc1.save(), "bbbb"); + let doc3 = Automerge.loadDoc(doc1.save(), "cccc"); + doc1.set(seq, 0, 20) + doc2.set(seq, 0, 0, "counter") + doc3.set(seq, 0, 10, "counter") + doc1.applyChanges(doc2.getChanges(doc1.getHeads())) + doc1.applyChanges(doc3.getChanges(doc1.getHeads())) + let result = doc1.values(seq, 0) + assert.deepEqual(result,[ + ['int',20,'3@aaaa'], + ['counter',0,'3@bbbb'], + ['counter',10,'3@cccc'], + ]) + doc1.inc(seq, 0, 5) + result = doc1.values(seq, 0) + assert.deepEqual(result, [ + [ 'counter', 5, '3@bbbb' ], + [ 'counter', 15, '3@cccc' ], + ]) + + let save = doc1.save() + let doc4 = Automerge.loadDoc(save) + assert.deepEqual(doc4.save(), save); + doc1.free() + doc2.free() + doc3.free() + doc4.free() + }) + + it('only returns an object id when objects are created', () => { + let doc = Automerge.create("aaaa") + let r1 = doc.set("_root","foo","bar") + let r2 = doc.set("_root","list",LIST) + let r3 = doc.set("_root","counter",10, "counter") + let r4 = doc.inc("_root","counter",1) + let r5 = doc.del("_root","counter") + let r6 = doc.insert(r2,0,10); + let r7 = doc.insert(r2,0,MAP); + let r8 = doc.splice(r2,1,0,["a","b","c"]); + let r9 = doc.splice(r2,1,0,["a",LIST,MAP,"d"]); + assert.deepEqual(r1,null); + assert.deepEqual(r2,"2@aaaa"); + assert.deepEqual(r3,null); + assert.deepEqual(r4,null); + assert.deepEqual(r5,null); + assert.deepEqual(r6,null); + assert.deepEqual(r7,"7@aaaa"); + assert.deepEqual(r8,null); + assert.deepEqual(r9,["12@aaaa","13@aaaa"]); + doc.free() + }) + + it('objects without properties are preserved', () => { + let doc1 = Automerge.create("aaaa") + let a = doc1.set("_root","a",MAP); + let b = doc1.set("_root","b",MAP); + let c = doc1.set("_root","c",MAP); + let d = doc1.set(c,"d","dd"); + let saved = doc1.save(); + let doc2 = Automerge.loadDoc(saved); + assert.deepEqual(doc2.value("_root","a"),["map",a]) + assert.deepEqual(doc2.keys(a),[]) + assert.deepEqual(doc2.value("_root","b"),["map",b]) + assert.deepEqual(doc2.keys(b),[]) + assert.deepEqual(doc2.value("_root","c"),["map",c]) + assert.deepEqual(doc2.keys(c),["d"]) + assert.deepEqual(doc2.value(c,"d"),["str","dd"]) + doc1.free() + doc2.free() + }) + + it('should handle marks [..]', () => { + let doc = Automerge.create() + let list = doc.set("_root", "list", Automerge.TEXT) + doc.splice(list, 0, 0, "aaabbbccc") + doc.mark(list, "[3..6]", "bold" , true) + let spans = doc.spans(list); + assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'ccc' ]); + doc.insert(list, 6, "A") + doc.insert(list, 3, "A") + spans = doc.spans(list); + assert.deepStrictEqual(spans, [ 'aaaA', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'Accc' ]); + }) + + it('should handle marks with deleted ends [..]', () => { + let doc = Automerge.create() + let list = doc.set("_root", "list", Automerge.TEXT) + doc.splice(list, 0, 0, "aaabbbccc") + doc.mark(list, "[3..6]", "bold" , true) + let spans = doc.spans(list); + assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'ccc' ]); + doc.del(list,5); + doc.del(list,5); + doc.del(list,2); + doc.del(list,2); + spans = doc.spans(list); + assert.deepStrictEqual(spans, [ 'aa', [ [ 'bold', 'boolean', true ] ], 'b', [], 'cc' ]) + doc.insert(list, 3, "A") + doc.insert(list, 2, "A") + spans = doc.spans(list); + assert.deepStrictEqual(spans, [ 'aaA', [ [ 'bold', 'boolean', true ] ], 'b', [], 'Acc' ]) + }) + + it('should handle sticky marks (..)', () => { + let doc = Automerge.create() + let list = doc.set("_root", "list", Automerge.TEXT) + doc.splice(list, 0, 0, "aaabbbccc") + doc.mark(list, "(3..6)", "bold" , true) + let spans = doc.spans(list); + assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'ccc' ]); + doc.insert(list, 6, "A") + doc.insert(list, 3, "A") + spans = doc.spans(list); + assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'AbbbA', [], 'ccc' ]); + }) + + it('should handle sticky marks with deleted ends (..)', () => { + let doc = Automerge.create() + let list = doc.set("_root", "list", Automerge.TEXT) + doc.splice(list, 0, 0, "aaabbbccc") + doc.mark(list, "(3..6)", "bold" , true) + let spans = doc.spans(list); + assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'ccc' ]); + doc.del(list,5); + doc.del(list,5); + doc.del(list,2); + doc.del(list,2); + spans = doc.spans(list); + assert.deepStrictEqual(spans, [ 'aa', [ [ 'bold', 'boolean', true ] ], 'b', [], 'cc' ]) + doc.insert(list, 3, "A") + doc.insert(list, 2, "A") + spans = doc.spans(list); + assert.deepStrictEqual(spans, [ 'aa', [ [ 'bold', 'boolean', true ] ], 'AbA', [], 'cc' ]) + + // make sure save/load can handle marks + + let doc2 = Automerge.loadDoc(doc.save()) + spans = doc2.spans(list); + assert.deepStrictEqual(spans, [ 'aa', [ [ 'bold', 'boolean', true ] ], 'AbA', [], 'cc' ]) + + assert.deepStrictEqual(doc.getHeads(), doc2.getHeads()) + assert.deepStrictEqual(doc.save(), doc2.save()) + }) + + it('should handle overlapping marks', () => { + let doc : Automerge = Automerge.create() + let list = doc.set("_root", "list", Automerge.TEXT) + doc.splice(list, 0, 0, "the quick fox jumps over the lazy dog") + doc.mark(list, "[0..37]", "bold" , true) + doc.mark(list, "[4..19]", "itallic" , true) + doc.mark(list, "[10..13]", "comment" , "foxes are my favorite animal!") + let spans = doc.spans(list); + assert.deepStrictEqual(spans, + [ + [ [ 'bold', 'boolean', true ] ], + 'the ', + [ [ 'bold', 'boolean', true ], [ 'itallic', 'boolean', true ] ], + 'quick ', + [ + [ 'bold', 'boolean', true ], + [ 'comment', 'str', 'foxes are my favorite animal!' ], + [ 'itallic', 'boolean', true ] + ], + 'fox', + [ [ 'bold', 'boolean', true ], [ 'itallic', 'boolean', true ] ], + ' jumps', + [ [ 'bold', 'boolean', true ] ], + ' over the lazy dog', + [], + ] + ) + + // mark sure encode decode can handle marks + + let all = doc.getChanges([]) + let decoded = all.map((c) => decodeChange(c)) + let encoded = decoded.map((c) => encodeChange(c)) + let doc2 = Automerge.create(); + doc2.applyChanges(encoded) + + assert.deepStrictEqual(doc.spans(list) , doc2.spans(list)) + assert.deepStrictEqual(doc.save(), doc2.save()) + }) + + }) + describe('sync', () => { + it('should send a sync message implying no local data', () => { + let doc = Automerge.create() + let s1 = initSyncState() + let m1 = doc.generateSyncMessage(s1) + const message = decodeSyncMessage(m1) + assert.deepStrictEqual(message.heads, []) + assert.deepStrictEqual(message.need, []) + assert.deepStrictEqual(message.have.length, 1) + assert.deepStrictEqual(message.have[0].lastSync, []) + assert.deepStrictEqual(message.have[0].bloom.byteLength, 0) + assert.deepStrictEqual(message.changes, []) + }) + + it('should not reply if we have no data as well', () => { + let n1 = Automerge.create(), n2 = Automerge.create() + let s1 = initSyncState(), s2 = initSyncState() + let m1 = n1.generateSyncMessage(s1) + n2.receiveSyncMessage(s2, m1) + let m2 = n2.generateSyncMessage(s2) + assert.deepStrictEqual(m2, null) + }) + + it('repos with equal heads do not need a reply message', () => { + let n1 = Automerge.create(), n2 = Automerge.create() + let s1 = initSyncState(), s2 = initSyncState() + + // make two nodes with the same changes + let list = n1.set("_root","n", LIST) + n1.commit("",0) + for (let i = 0; i < 10; i++) { + n1.insert(list,i,i) + n1.commit("",0) + } + n2.applyChanges(n1.getChanges([])) + assert.deepStrictEqual(n1.toJS(), n2.toJS()) + + // generate a naive sync message + let m1 = n1.generateSyncMessage(s1) + assert.deepStrictEqual(s1.lastSentHeads, n1.getHeads()) + + // heads are equal so this message should be null + n2.receiveSyncMessage(s2, m1) + let m2 = n2.generateSyncMessage(s2) + assert.strictEqual(m2, null) + }) + + it('n1 should offer all changes to n2 when starting from nothing', () => { + let n1 = Automerge.create(), n2 = Automerge.create() + + // make changes for n1 that n2 should request + let list = n1.set("_root","n",LIST) + n1.commit("",0) + for (let i = 0; i < 10; i++) { + n1.insert(list, i, i) + n1.commit("",0) + } + + assert.notDeepStrictEqual(n1.toJS(), n2.toJS()) + sync(n1, n2) + assert.deepStrictEqual(n1.toJS(), n2.toJS()) + }) + + it('should sync peers where one has commits the other does not', () => { + let n1 = Automerge.create(), n2 = Automerge.create() + + // make changes for n1 that n2 should request + let list = n1.set("_root","n",LIST) + n1.commit("",0) + for (let i = 0; i < 10; i++) { + n1.insert(list,i,i) + n1.commit("",0) + } + + assert.notDeepStrictEqual(n1.toJS(), n2.toJS()) + sync(n1, n2) + assert.deepStrictEqual(n1.toJS(), n2.toJS()) + }) + + it('should work with prior sync state', () => { + // create & synchronize two nodes + let n1 = Automerge.create(), n2 = Automerge.create() + let s1 = initSyncState(), s2 = initSyncState() + + for (let i = 0; i < 5; i++) { + n1.set("_root","x",i) + n1.commit("",0) + } + + sync(n1, n2, s1, s2) + + // modify the first node further + for (let i = 5; i < 10; i++) { + n1.set("_root", "x", i) + n1.commit("",0) + } + + assert.notDeepStrictEqual(n1.toJS(), n2.toJS()) + sync(n1, n2, s1, s2) + assert.deepStrictEqual(n1.toJS(), n2.toJS()) + }) + + it('should not generate messages once synced', () => { + // create & synchronize two nodes + let n1 = Automerge.create('abc123'), n2 = Automerge.create('def456') + let s1 = initSyncState(), s2 = initSyncState() + + let message, patch + for (let i = 0; i < 5; i++) { + n1.set("_root","x",i) + n1.commit("",0) + } + for (let i = 0; i < 5; i++) { + n2.set("_root","y",i) + n2.commit("",0) + } + + // n1 reports what it has + message = n1.generateSyncMessage(s1) + + // n2 receives that message and sends changes along with what it has + n2.receiveSyncMessage(s2, message) + message = n2.generateSyncMessage(s2) + assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 5) + //assert.deepStrictEqual(patch, null) // no changes arrived + + // n1 receives the changes and replies with the changes it now knows n2 needs + n1.receiveSyncMessage(s1, message) + message = n1.generateSyncMessage(s1) + assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 5) + + // n2 applies the changes and sends confirmation ending the exchange + n2.receiveSyncMessage(s2, message) + message = n2.generateSyncMessage(s2) + + // n1 receives the message and has nothing more to say + n1.receiveSyncMessage(s1, message) + message = n1.generateSyncMessage(s1) + assert.deepStrictEqual(message, null) + //assert.deepStrictEqual(patch, null) // no changes arrived + + // n2 also has nothing left to say + message = n2.generateSyncMessage(s2) + assert.deepStrictEqual(message, null) + }) + + it('should allow simultaneous messages during synchronization', () => { + // create & synchronize two nodes + let n1 = Automerge.create('abc123'), n2 = Automerge.create('def456') + let s1 = initSyncState(), s2 = initSyncState() + + for (let i = 0; i < 5; i++) { + n1.set("_root", "x", i) + n1.commit("",0) + } + for (let i = 0; i < 5; i++) { + n2.set("_root","y", i) + n2.commit("",0) + } + + const head1 = n1.getHeads()[0], head2 = n2.getHeads()[0] + + // both sides report what they have but have no shared peer state + let msg1to2, msg2to1 + msg1to2 = n1.generateSyncMessage(s1) + msg2to1 = n2.generateSyncMessage(s2) + assert.deepStrictEqual(decodeSyncMessage(msg1to2).changes.length, 0) + assert.deepStrictEqual(decodeSyncMessage(msg1to2).have[0].lastSync.length, 0) + assert.deepStrictEqual(decodeSyncMessage(msg2to1).changes.length, 0) + assert.deepStrictEqual(decodeSyncMessage(msg2to1).have[0].lastSync.length, 0) + + // n1 and n2 receives that message and update sync state but make no patch + n1.receiveSyncMessage(s1, msg2to1) + n2.receiveSyncMessage(s2, msg1to2) + + // now both reply with their local changes the other lacks + // (standard warning that 1% of the time this will result in a "need" message) + msg1to2 = n1.generateSyncMessage(s1) + assert.deepStrictEqual(decodeSyncMessage(msg1to2).changes.length, 5) + msg2to1 = n2.generateSyncMessage(s2) + assert.deepStrictEqual(decodeSyncMessage(msg2to1).changes.length, 5) + + // both should now apply the changes and update the frontend + n1.receiveSyncMessage(s1, msg2to1) + assert.deepStrictEqual(n1.getMissingDeps(), []) + //assert.notDeepStrictEqual(patch1, null) + assert.deepStrictEqual(n1.toJS(), {x: 4, y: 4}) + + n2.receiveSyncMessage(s2, msg1to2) + assert.deepStrictEqual(n2.getMissingDeps(), []) + //assert.notDeepStrictEqual(patch2, null) + assert.deepStrictEqual(n2.toJS(), {x: 4, y: 4}) + + // The response acknowledges the changes received, and sends no further changes + msg1to2 = n1.generateSyncMessage(s1) + assert.deepStrictEqual(decodeSyncMessage(msg1to2).changes.length, 0) + msg2to1 = n2.generateSyncMessage(s2) + assert.deepStrictEqual(decodeSyncMessage(msg2to1).changes.length, 0) + + // After receiving acknowledgements, their shared heads should be equal + n1.receiveSyncMessage(s1, msg2to1) + n2.receiveSyncMessage(s2, msg1to2) + assert.deepStrictEqual(s1.sharedHeads, [head1, head2].sort()) + assert.deepStrictEqual(s2.sharedHeads, [head1, head2].sort()) + //assert.deepStrictEqual(patch1, null) + //assert.deepStrictEqual(patch2, null) + + // We're in sync, no more messages required + msg1to2 = n1.generateSyncMessage(s1) + msg2to1 = n2.generateSyncMessage(s2) + assert.deepStrictEqual(msg1to2, null) + assert.deepStrictEqual(msg2to1, null) + + // If we make one more change, and start another sync, its lastSync should be updated + n1.set("_root","x",5) + msg1to2 = n1.generateSyncMessage(s1) + assert.deepStrictEqual(decodeSyncMessage(msg1to2).have[0].lastSync, [head1, head2].sort()) + }) + + it('should assume sent changes were recieved until we hear otherwise', () => { + let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') + let s1 = initSyncState(), s2 = initSyncState(), message = null + + let items = n1.set("_root", "items", LIST) + n1.commit("",0) + + sync(n1, n2, s1, s2) + + n1.push(items, "x") + n1.commit("",0) + message = n1.generateSyncMessage(s1) + assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) + + n1.push(items, "y") + n1.commit("",0) + message = n1.generateSyncMessage(s1) + assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) + + n1.push(items, "z") + n1.commit("",0) + + message = n1.generateSyncMessage(s1) + assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) + }) + + it('should work regardless of who initiates the exchange', () => { + // create & synchronize two nodes + let n1 = Automerge.create(), n2 = Automerge.create() + let s1 = initSyncState(), s2 = initSyncState() + + for (let i = 0; i < 5; i++) { + n1.set("_root", "x", i) + n1.commit("",0) + } + + sync(n1, n2, s1, s2) + + // modify the first node further + for (let i = 5; i < 10; i++) { + n1.set("_root", "x", i) + n1.commit("",0) + } + + assert.notDeepStrictEqual(n1.toJS(), n2.toJS()) + sync(n1, n2, s1, s2) + assert.deepStrictEqual(n1.toJS(), n2.toJS()) + }) + + it('should work without prior sync state', () => { + // Scenario: ,-- c10 <-- c11 <-- c12 <-- c13 <-- c14 + // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ + // `-- c15 <-- c16 <-- c17 + // lastSync is undefined. + + // create two peers both with divergent commits + let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') + let s1 = initSyncState(), s2 = initSyncState() + + for (let i = 0; i < 10; i++) { + n1.set("_root","x",i) + n1.commit("",0) + } + + sync(n1, n2) + + for (let i = 10; i < 15; i++) { + n1.set("_root","x",i) + n1.commit("",0) + } + + for (let i = 15; i < 18; i++) { + n2.set("_root","x",i) + n2.commit("",0) + } + + assert.notDeepStrictEqual(n1.toJS(), n2.toJS()) + sync(n1, n2) + assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) + assert.deepStrictEqual(n1.toJS(), n2.toJS()) + }) + + it('should work with prior sync state', () => { + // Scenario: ,-- c10 <-- c11 <-- c12 <-- c13 <-- c14 + // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ + // `-- c15 <-- c16 <-- c17 + // lastSync is c9. + + // create two peers both with divergent commits + let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') + let s1 = initSyncState(), s2 = initSyncState() + + for (let i = 0; i < 10; i++) { + n1.set("_root","x",i) + n1.commit("",0) + } + + sync(n1, n2, s1, s2) + + for (let i = 10; i < 15; i++) { + n1.set("_root","x",i) + n1.commit("",0) + } + for (let i = 15; i < 18; i++) { + n2.set("_root","x",i) + n2.commit("",0) + } + + s1 = decodeSyncState(encodeSyncState(s1)) + s2 = decodeSyncState(encodeSyncState(s2)) + + assert.notDeepStrictEqual(n1.toJS(), n2.toJS()) + sync(n1, n2, s1, s2) + assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) + assert.deepStrictEqual(n1.toJS(), n2.toJS()) + }) + + it('should ensure non-empty state after sync', () => { + let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') + let s1 = initSyncState(), s2 = initSyncState() + + for (let i = 0; i < 3; i++) { + n1.set("_root","x",i) + n1.commit("",0) + } + + sync(n1, n2, s1, s2) + + assert.deepStrictEqual(s1.sharedHeads, n1.getHeads()) + assert.deepStrictEqual(s2.sharedHeads, n1.getHeads()) + }) + + it('should re-sync after one node crashed with data loss', () => { + // Scenario: (r) (n2) (n1) + // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 + // n2 has changes {c0, c1, c2}, n1's lastSync is c5, and n2's lastSync is c2. + // we want to successfully sync (n1) with (r), even though (n1) believes it's talking to (n2) + let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') + let s1 = initSyncState(), s2 = initSyncState() + + // n1 makes three changes, which we sync to n2 + for (let i = 0; i < 3; i++) { + n1.set("_root","x",i) + n1.commit("",0) + } + + sync(n1, n2, s1, s2) + + // save a copy of n2 as "r" to simulate recovering from crash + let r, rSyncState + ;[r, rSyncState] = [n2.clone(), s2.clone()] + + // sync another few commits + for (let i = 3; i < 6; i++) { + n1.set("_root","x",i) + n1.commit("",0) + } + + sync(n1, n2, s1, s2) + + // everyone should be on the same page here + assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) + assert.deepStrictEqual(n1.toJS(), n2.toJS()) + + // now make a few more changes, then attempt to sync the fully-up-to-date n1 with the confused r + for (let i = 6; i < 9; i++) { + n1.set("_root","x",i) + n1.commit("",0) + } + + s1 = decodeSyncState(encodeSyncState(s1)) + rSyncState = decodeSyncState(encodeSyncState(rSyncState)) + + assert.notDeepStrictEqual(n1.getHeads(), r.getHeads()) + assert.notDeepStrictEqual(n1.toJS(), r.toJS()) + assert.deepStrictEqual(n1.toJS(), {x: 8}) + assert.deepStrictEqual(r.toJS(), {x: 2}) + sync(n1, r, s1, rSyncState) + assert.deepStrictEqual(n1.getHeads(), r.getHeads()) + assert.deepStrictEqual(n1.toJS(), r.toJS()) + }) + + it('should resync after one node experiences data loss without disconnecting', () => { + let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') + let s1 = initSyncState(), s2 = initSyncState() + + // n1 makes three changes, which we sync to n2 + for (let i = 0; i < 3; i++) { + n1.set("_root","x",i) + n1.commit("",0) + } + + sync(n1, n2, s1, s2) + + assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) + assert.deepStrictEqual(n1.toJS(), n2.toJS()) + + let n2AfterDataLoss = Automerge.create('89abcdef') + + // "n2" now has no data, but n1 still thinks it does. Note we don't do + // decodeSyncState(encodeSyncState(s1)) in order to simulate data loss without disconnecting + sync(n1, n2AfterDataLoss, s1, initSyncState()) + assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) + assert.deepStrictEqual(n1.toJS(), n2.toJS()) + }) + + it('should handle changes concurrent to the last sync heads', () => { + let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef'), n3 = Automerge.create('fedcba98') + let s12 = initSyncState(), s21 = initSyncState(), s23 = initSyncState(), s32 = initSyncState() + + // Change 1 is known to all three nodes + //n1 = Automerge.change(n1, {time: 0}, doc => doc.x = 1) + n1.set("_root","x",1); n1.commit("",0) + + sync(n1, n2, s12, s21) + sync(n2, n3, s23, s32) + + // Change 2 is known to n1 and n2 + n1.set("_root","x",2); n1.commit("",0) + + sync(n1, n2, s12, s21) + + // Each of the three nodes makes one change (changes 3, 4, 5) + n1.set("_root","x",3); n1.commit("",0) + n2.set("_root","x",4); n2.commit("",0) + n3.set("_root","x",5); n3.commit("",0) + + // Apply n3's latest change to n2. If running in Node, turn the Uint8Array into a Buffer, to + // simulate transmission over a network (see https://github.com/automerge/automerge/pull/362) + let change = n3.getLastLocalChange() + if (typeof Buffer === 'function') change = Buffer.from(change) + n2.applyChanges([change]) + + // Now sync n1 and n2. n3's change is concurrent to n1 and n2's last sync heads + sync(n1, n2, s12, s21) + assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) + assert.deepStrictEqual(n1.toJS(), n2.toJS()) + }) + + it('should handle histories with lots of branching and merging', () => { + let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef'), n3 = Automerge.create('fedcba98') + n1.set("_root","x",0); n1.commit("",0) + n2.applyChanges([n1.getLastLocalChange()]) + n3.applyChanges([n1.getLastLocalChange()]) + n3.set("_root","x",1); n3.commit("",0) + + // - n1c1 <------ n1c2 <------ n1c3 <-- etc. <-- n1c20 <------ n1c21 + // / \/ \/ \/ + // / /\ /\ /\ + // c0 <---- n2c1 <------ n2c2 <------ n2c3 <-- etc. <-- n2c20 <------ n2c21 + // \ / + // ---------------------------------------------- n3c1 <----- + for (let i = 1; i < 20; i++) { + n1.set("_root","n1",i); n1.commit("",0) + n2.set("_root","n2",i); n2.commit("",0) + const change1 = n1.getLastLocalChange() + const change2 = n2.getLastLocalChange() + n1.applyChanges([change2]) + n2.applyChanges([change1]) + } + + let s1 = initSyncState(), s2 = initSyncState() + sync(n1, n2, s1, s2) + + // Having n3's last change concurrent to the last sync heads forces us into the slower code path + n2.applyChanges([n3.getLastLocalChange()]) + n1.set("_root","n1","final"); n1.commit("",0) + n2.set("_root","n2","final"); n2.commit("",0) + + sync(n1, n2, s1, s2) + assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) + assert.deepStrictEqual(n1.toJS(), n2.toJS()) + }) + + it('should handle a false-positive head', () => { + // Scenario: ,-- n1 + // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ + // `-- n2 + // where n2 is a false positive in the Bloom filter containing {n1}. + // lastSync is c9. + let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') + let s1 = initSyncState(), s2 = initSyncState() + + for (let i = 0; i < 10; i++) { + n1.set("_root","x",i); n1.commit("",0) + } + + sync(n1, n2, s1, s2) + for (let i = 1; ; i++) { // search for false positive; see comment above + const n1up = n1.clone('01234567'); + n1up.set("_root","x",`${i} @ n1`); n1up.commit("",0) + const n2up = n2.clone('89abcdef'); + n2up.set("_root","x",`${i} @ n2`); n2up.commit("",0) + if (new BloomFilter(n1up.getHeads()).containsHash(n2up.getHeads()[0])) { + n1.free(); n2.free() + n1 = n1up; n2 = n2up; break + } + } + const allHeads = [...n1.getHeads(), ...n2.getHeads()].sort() + s1 = decodeSyncState(encodeSyncState(s1)) + s2 = decodeSyncState(encodeSyncState(s2)) + sync(n1, n2, s1, s2) + assert.deepStrictEqual(n1.getHeads(), allHeads) + assert.deepStrictEqual(n2.getHeads(), allHeads) + }) + + + describe('with a false-positive dependency', () => { + let n1, n2, s1, s2, n1hash2, n2hash2 + + beforeEach(() => { + // Scenario: ,-- n1c1 <-- n1c2 + // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ + // `-- n2c1 <-- n2c2 + // where n2c1 is a false positive in the Bloom filter containing {n1c1, n1c2}. + // lastSync is c9. + n1 = Automerge.create('01234567') + n2 = Automerge.create('89abcdef') + s1 = initSyncState() + s2 = initSyncState() + for (let i = 0; i < 10; i++) { + n1.set("_root","x",i); n1.commit("",0) + } + sync(n1, n2, s1, s2) + + let n1hash1, n2hash1 + for (let i = 29; ; i++) { // search for false positive; see comment above + const n1us1 = n1.clone('01234567') + n1us1.set("_root","x",`${i} @ n1`); n1us1.commit("",0) + + const n2us1 = n2.clone('89abcdef') + n2us1.set("_root","x",`${i} @ n1`); n2us1.commit("",0) + + n1hash1 = n1us1.getHeads()[0]; n2hash1 = n2us1.getHeads()[0] + + const n1us2 = n1us1.clone(); + n1us2.set("_root","x",`final @ n1`); n1us2.commit("",0) + + const n2us2 = n2us1.clone(); + n2us2.set("_root","x",`final @ n2`); n2us2.commit("",0) + + n1hash2 = n1us2.getHeads()[0]; n2hash2 = n2us2.getHeads()[0] + if (new BloomFilter([n1hash1, n1hash2]).containsHash(n2hash1)) { + n1.free(); n2.free() + n1 = n1us2; n2 = n2us2; break + } + } + }) + + it('should sync two nodes without connection reset', () => { + sync(n1, n2, s1, s2) + assert.deepStrictEqual(n1.getHeads(), [n1hash2, n2hash2].sort()) + assert.deepStrictEqual(n2.getHeads(), [n1hash2, n2hash2].sort()) + }) + + it('should sync two nodes with connection reset', () => { + s1 = decodeSyncState(encodeSyncState(s1)) + s2 = decodeSyncState(encodeSyncState(s2)) + sync(n1, n2, s1, s2) + assert.deepStrictEqual(n1.getHeads(), [n1hash2, n2hash2].sort()) + assert.deepStrictEqual(n2.getHeads(), [n1hash2, n2hash2].sort()) + }) + + it('should sync three nodes', () => { + s1 = decodeSyncState(encodeSyncState(s1)) + s2 = decodeSyncState(encodeSyncState(s2)) + + // First n1 and n2 exchange Bloom filters + let m1, m2 + m1 = n1.generateSyncMessage(s1) + m2 = n2.generateSyncMessage(s2) + n1.receiveSyncMessage(s1, m2) + n2.receiveSyncMessage(s2, m1) + + // Then n1 and n2 send each other their changes, except for the false positive + m1 = n1.generateSyncMessage(s1) + m2 = n2.generateSyncMessage(s2) + n1.receiveSyncMessage(s1, m2) + n2.receiveSyncMessage(s2, m1) + assert.strictEqual(decodeSyncMessage(m1).changes.length, 2) // n1c1 and n1c2 + assert.strictEqual(decodeSyncMessage(m2).changes.length, 1) // only n2c2; change n2c1 is not sent + + // n3 is a node that doesn't have the missing change. Nevertheless n1 is going to ask n3 for it + let n3 = Automerge.create('fedcba98'), s13 = initSyncState(), s31 = initSyncState() + sync(n1, n3, s13, s31) + assert.deepStrictEqual(n1.getHeads(), [n1hash2]) + assert.deepStrictEqual(n3.getHeads(), [n1hash2]) + }) + }) + + it('should not require an additional request when a false-positive depends on a true-negative', () => { + // Scenario: ,-- n1c1 <-- n1c2 <-- n1c3 + // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-+ + // `-- n2c1 <-- n2c2 <-- n2c3 + // where n2c2 is a false positive in the Bloom filter containing {n1c1, n1c2, n1c3}. + // lastSync is c4. + let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') + let s1 = initSyncState(), s2 = initSyncState() + let n1hash3, n2hash3 + + for (let i = 0; i < 5; i++) { + n1.set("_root","x",i); n1.commit("",0) + } + sync(n1, n2, s1, s2) + for (let i = 86; ; i++) { // search for false positive; see comment above + const n1us1 = n1.clone('01234567') + n1us1.set("_root","x",`${i} @ n1`); n1us1.commit("",0) + + const n2us1 = n2.clone('89abcdef') + n2us1.set("_root","x",`${i} @ n2`); n2us1.commit("",0) + + //const n1us1 = Automerge.change(Automerge.clone(n1, {actorId: '01234567'}), {time: 0}, doc => doc.x = `${i} @ n1`) + //const n2us1 = Automerge.change(Automerge.clone(n2, {actorId: '89abcdef'}), {time: 0}, doc => doc.x = `${i} @ n2`) + const n1hash1 = n1us1.getHeads()[0] + + const n1us2 = n1us1.clone() + n1us2.set("_root","x",`${i + 1} @ n1`); n1us2.commit("",0) + + const n2us2 = n2us1.clone() + n2us2.set("_root","x",`${i + 1} @ n2`); n2us2.commit("",0) + + const n1hash2 = n1us2.getHeads()[0], n2hash2 = n2us2.getHeads()[0] + + const n1us3 = n1us2.clone() + n1us3.set("_root","x",`final @ n1`); n1us3.commit("",0) + + const n2us3 = n2us2.clone() + n2us3.set("_root","x",`final @ n2`); n2us3.commit("",0) + + n1hash3 = n1us3.getHeads()[0]; n2hash3 = n2us3.getHeads()[0] + + if (new BloomFilter([n1hash1, n1hash2, n1hash3]).containsHash(n2hash2)) { + n1.free(); n2.free(); + n1 = n1us3; n2 = n2us3; break + } + } + const bothHeads = [n1hash3, n2hash3].sort() + s1 = decodeSyncState(encodeSyncState(s1)) + s2 = decodeSyncState(encodeSyncState(s2)) + sync(n1, n2, s1, s2) + assert.deepStrictEqual(n1.getHeads(), bothHeads) + assert.deepStrictEqual(n2.getHeads(), bothHeads) + }) + + it('should handle chains of false-positives', () => { + // Scenario: ,-- c5 + // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-+ + // `-- n2c1 <-- n2c2 <-- n2c3 + // where n2c1 and n2c2 are both false positives in the Bloom filter containing {c5}. + // lastSync is c4. + let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') + let s1 = initSyncState(), s2 = initSyncState() + + for (let i = 0; i < 5; i++) { + n1.set("_root","x",i); n1.commit("",0) + } + + sync(n1, n2, s1, s2) + + n1.set("_root","x",5); n1.commit("",0) + + for (let i = 2; ; i++) { // search for false positive; see comment above + const n2us1 = n2.clone('89abcdef') + n2us1.set("_root","x",`${i} @ n2`); n2us1.commit("",0) + if (new BloomFilter(n1.getHeads()).containsHash(n2us1.getHeads()[0])) { + n2 = n2us1; break + } + } + for (let i = 141; ; i++) { // search for false positive; see comment above + const n2us2 = n2.clone('89abcdef') + n2us2.set("_root","x",`${i} again`); n2us2.commit("",0) + if (new BloomFilter(n1.getHeads()).containsHash(n2us2.getHeads()[0])) { + n2 = n2us2; break + } + } + n2.set("_root","x",`final @ n2`); n2.commit("",0) + + const allHeads = [...n1.getHeads(), ...n2.getHeads()].sort() + s1 = decodeSyncState(encodeSyncState(s1)) + s2 = decodeSyncState(encodeSyncState(s2)) + sync(n1, n2, s1, s2) + assert.deepStrictEqual(n1.getHeads(), allHeads) + assert.deepStrictEqual(n2.getHeads(), allHeads) + }) + + it('should allow the false-positive hash to be explicitly requested', () => { + // Scenario: ,-- n1 + // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ + // `-- n2 + // where n2 causes a false positive in the Bloom filter containing {n1}. + let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') + let s1 = initSyncState(), s2 = initSyncState() + let message + + for (let i = 0; i < 10; i++) { + n1.set("_root","x",i); n1.commit("",0) + } + + sync(n1, n2, s1, s2) + + s1 = decodeSyncState(encodeSyncState(s1)) + s2 = decodeSyncState(encodeSyncState(s2)) + + for (let i = 1; ; i++) { // brute-force search for false positive; see comment above + const n1up = n1.clone('01234567'); n1up.set("_root","x",`${i} @ n1`); n1up.commit("",0) + const n2up = n1.clone('89abcdef'); n2up.set("_root","x",`${i} @ n2`); n2up.commit("",0) + + // check if the bloom filter on n2 will believe n1 already has a particular hash + // this will mean n2 won't offer that data to n2 by receiving a sync message from n1 + if (new BloomFilter(n1up.getHeads()).containsHash(n2up.getHeads()[0])) { + n1 = n1up; n2 = n2up; break + } + } + + // n1 creates a sync message for n2 with an ill-fated bloom + message = n1.generateSyncMessage(s1) + assert.strictEqual(decodeSyncMessage(message).changes.length, 0) + + // n2 receives it and DOESN'T send a change back + n2.receiveSyncMessage(s2, message) + message = n2.generateSyncMessage(s2) + assert.strictEqual(decodeSyncMessage(message).changes.length, 0) + + // n1 should now realize it's missing that change and request it explicitly + n1.receiveSyncMessage(s1, message) + message = n1.generateSyncMessage(s1) + assert.deepStrictEqual(decodeSyncMessage(message).need, n2.getHeads()) + + // n2 should fulfill that request + n2.receiveSyncMessage(s2, message) + message = n2.generateSyncMessage(s2) + assert.strictEqual(decodeSyncMessage(message).changes.length, 1) + + // n1 should apply the change and the two should now be in sync + n1.receiveSyncMessage(s1, message) + assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) + }) + + describe('protocol features', () => { + it('should allow multiple Bloom filters', () => { + // Scenario: ,-- n1c1 <-- n1c2 <-- n1c3 + // c0 <-- c1 <-- c2 <-+--- n2c1 <-- n2c2 <-- n2c3 + // `-- n3c1 <-- n3c2 <-- n3c3 + // n1 has {c0, c1, c2, n1c1, n1c2, n1c3, n2c1, n2c2}; + // n2 has {c0, c1, c2, n1c1, n1c2, n2c1, n2c2, n2c3}; + // n3 has {c0, c1, c2, n3c1, n3c2, n3c3}. + let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef'), n3 = Automerge.create('76543210') + let s13 = initSyncState(), s12 = initSyncState(), s21 = initSyncState() + let s32 = initSyncState(), s31 = initSyncState(), s23 = initSyncState() + let message1, message2, message3 + + for (let i = 0; i < 3; i++) { + n1.set("_root","x",i); n1.commit("",0) + } + + // sync all 3 nodes + sync(n1, n2, s12, s21) // eslint-disable-line no-unused-vars -- kept for consistency + sync(n1, n3, s13, s31) + sync(n3, n2, s32, s23) + for (let i = 0; i < 2; i++) { + n1.set("_root","x",`${i} @ n1`); n1.commit("",0) + } + for (let i = 0; i < 2; i++) { + n2.set("_root","x",`${i} @ n2`); n2.commit("",0) + } + n1.applyChanges(n2.getChanges([])) + n2.applyChanges(n1.getChanges([])) + n1.set("_root","x",`3 @ n1`); n1.commit("",0) + n2.set("_root","x",`3 @ n2`); n2.commit("",0) + + for (let i = 0; i < 3; i++) { + n3.set("_root","x",`${i} @ n3`); n3.commit("",0) + } + const n1c3 = n1.getHeads()[0], n2c3 = n2.getHeads()[0], n3c3 = n3.getHeads()[0] + s13 = decodeSyncState(encodeSyncState(s13)) + s31 = decodeSyncState(encodeSyncState(s31)) + s23 = decodeSyncState(encodeSyncState(s23)) + s32 = decodeSyncState(encodeSyncState(s32)) + + + // Now n3 concurrently syncs with n1 and n2. Doing this naively would result in n3 receiving + // changes {n1c1, n1c2, n2c1, n2c2} twice (those are the changes that both n1 and n2 have, but + // that n3 does not have). We want to prevent this duplication. + message1 = n1.generateSyncMessage(s13) // message from n1 to n3 + assert.strictEqual(decodeSyncMessage(message1).changes.length, 0) + n3.receiveSyncMessage(s31, message1) + message3 = n3.generateSyncMessage(s31) // message from n3 to n1 + assert.strictEqual(decodeSyncMessage(message3).changes.length, 3) // {n3c1, n3c2, n3c3} + n1.receiveSyncMessage(s13, message3) + + // Copy the Bloom filter received from n1 into the message sent from n3 to n2. This Bloom + // filter indicates what changes n3 is going to receive from n1. + message3 = n3.generateSyncMessage(s32) // message from n3 to n2 + const modifiedMessage = decodeSyncMessage(message3) + modifiedMessage.have.push(decodeSyncMessage(message1).have[0]) + assert.strictEqual(modifiedMessage.changes.length, 0) + n2.receiveSyncMessage(s23, Automerge.encodeSyncMessage(modifiedMessage)) + + // n2 replies to n3, sending only n2c3 (the one change that n2 has but n1 doesn't) + message2 = n2.generateSyncMessage(s23) + assert.strictEqual(decodeSyncMessage(message2).changes.length, 1) // {n2c3} + n3.receiveSyncMessage(s32, message2) + + // n1 replies to n3 + message1 = n1.generateSyncMessage(s13) + assert.strictEqual(decodeSyncMessage(message1).changes.length, 5) // {n1c1, n1c2, n1c3, n2c1, n2c2} + n3.receiveSyncMessage(s31, message1) + assert.deepStrictEqual(n3.getHeads(), [n1c3, n2c3, n3c3].sort()) + }) + + it('should allow any change to be requested', () => { + let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') + let s1 = initSyncState(), s2 = initSyncState() + let message = null + + for (let i = 0; i < 3; i++) { + n1.set("_root","x",i); n1.commit("",0) + } + + const lastSync = n1.getHeads() + + for (let i = 3; i < 6; i++) { + n1.set("_root","x",i); n1.commit("",0) + } + + sync(n1, n2, s1, s2) + s1.lastSentHeads = [] // force generateSyncMessage to return a message even though nothing changed + message = n1.generateSyncMessage(s1) + const modMsg = decodeSyncMessage(message) + modMsg.need = lastSync // re-request change 2 + n2.receiveSyncMessage(s2, Automerge.encodeSyncMessage(modMsg)) + message = n2.generateSyncMessage(s2) + assert.strictEqual(decodeSyncMessage(message).changes.length, 1) + assert.strictEqual(Automerge.decodeChange(decodeSyncMessage(message).changes[0]).hash, lastSync[0]) + }) + + it('should ignore requests for a nonexistent change', () => { + let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') + let s1 = initSyncState(), s2 = initSyncState() + let message = null + + for (let i = 0; i < 3; i++) { + n1.set("_root","x",i); n1.commit("",0) + } + + n2.applyChanges(n1.getChanges([])) + message = n1.generateSyncMessage(s1) + message.need = ['0000000000000000000000000000000000000000000000000000000000000000'] + n2.receiveSyncMessage(s2, message) + message = n2.generateSyncMessage(s2) + assert.strictEqual(message, null) + }) + + it('should allow a subset of changes to be sent', () => { + // ,-- c1 <-- c2 + // c0 <-+ + // `-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 + let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef'), n3 = Automerge.create('76543210') + let s1 = initSyncState(), s2 = initSyncState() + let msg, decodedMsg + + n1.set("_root","x",0); n1.commit("",0) + n3.applyChanges(n3.getChangesAdded(n1)) // merge() + for (let i = 1; i <= 2; i++) { + n1.set("_root","x",i); n1.commit("",0) + } + for (let i = 3; i <= 4; i++) { + n3.set("_root","x",i); n3.commit("",0) + } + const c2 = n1.getHeads()[0], c4 = n3.getHeads()[0] + n2.applyChanges(n2.getChangesAdded(n3)) // merge() + + // Sync n1 and n2, so their shared heads are {c2, c4} + sync(n1, n2, s1, s2) + s1 = decodeSyncState(encodeSyncState(s1)) + s2 = decodeSyncState(encodeSyncState(s2)) + assert.deepStrictEqual(s1.sharedHeads, [c2, c4].sort()) + assert.deepStrictEqual(s2.sharedHeads, [c2, c4].sort()) + + // n2 and n3 apply {c5, c6, c7, c8} + n3.set("_root","x",5); n3.commit("",0) + const change5 = n3.getLastLocalChange() + n3.set("_root","x",6); n3.commit("",0) + const change6 = n3.getLastLocalChange(n3), c6 = n3.getHeads()[0] + for (let i = 7; i <= 8; i++) { + n3.set("_root","x",i); n3.commit("",0) + } + const c8 = n3.getHeads()[0] + n2.applyChanges(n2.getChangesAdded(n3)) // merge() + + // Now n1 initiates a sync with n2, and n2 replies with {c5, c6}. n2 does not send {c7, c8} + msg = n1.generateSyncMessage(s1) + n2.receiveSyncMessage(s2, msg) + msg = n2.generateSyncMessage(s2) + decodedMsg = decodeSyncMessage(msg) + decodedMsg.changes = [change5, change6] + msg = Automerge.encodeSyncMessage(decodedMsg) + const sentHashes = {} + sentHashes[Automerge.decodeChange(change5, true).hash] = true + sentHashes[Automerge.decodeChange(change6, true).hash] = true + s2.sentHashes = sentHashes + n1.receiveSyncMessage(s1, msg) + assert.deepStrictEqual(s1.sharedHeads, [c2, c6].sort()) + + // n1 replies, confirming the receipt of {c5, c6} and requesting the remaining changes + msg = n1.generateSyncMessage(s1) + n2.receiveSyncMessage(s2, msg) + assert.deepStrictEqual(decodeSyncMessage(msg).need, [c8]) + assert.deepStrictEqual(decodeSyncMessage(msg).have[0].lastSync, [c2, c6].sort()) + assert.deepStrictEqual(s1.sharedHeads, [c2, c6].sort()) + assert.deepStrictEqual(s2.sharedHeads, [c2, c6].sort()) + + // n2 sends the remaining changes {c7, c8} + msg = n2.generateSyncMessage(s2) + n1.receiveSyncMessage(s1, msg) + assert.strictEqual(decodeSyncMessage(msg).changes.length, 2) + assert.deepStrictEqual(s1.sharedHeads, [c2, c8].sort()) + }) + }) + }) +}) diff --git a/automerge-wasm/tsconfig.json b/automerge-wasm/tsconfig.json new file mode 100644 index 00000000..1dc480a4 --- /dev/null +++ b/automerge-wasm/tsconfig.json @@ -0,0 +1,17 @@ +{ + "compilerOptions": { + "noImplicitAny": true, + "strict": true, + "allowJs": false, + "baseUrl": ".", + "esModuleInterop": true, + "lib": ["dom", "esnext.asynciterable", "es2017", "es2016", "es2015"], + "module": "commonjs", + "moduleResolution": "node", + "paths": { "dev": ["*"]}, + "rootDir": "", + "target": "es2016", + "typeRoots": ["./dev/index.d.ts"] + }, + "exclude": ["dist/**/*"] +} From 831faa2589b335c7f42c7b93078ddbda2ae4d372 Mon Sep 17 00:00:00 2001 From: Karissa McKelvey <633012+okdistribute@users.noreply.github.com> Date: Mon, 31 Jan 2022 12:48:49 -0800 Subject: [PATCH 050/730] uint datatypes & fix some more typescript errors --- automerge-wasm/index.d.ts | 11 +- automerge-wasm/package.json | 4 +- automerge-wasm/test/test.js | 1454 ---------------------------------- automerge-wasm/test/test.ts | 178 +++-- automerge-wasm/tsconfig.json | 2 +- 5 files changed, 103 insertions(+), 1546 deletions(-) delete mode 100644 automerge-wasm/test/test.js diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts index e08df172..01b061a8 100644 --- a/automerge-wasm/index.d.ts +++ b/automerge-wasm/index.d.ts @@ -11,6 +11,7 @@ export type Value = string | number | boolean | Date | Uint8Array | ObjectType; export type ObjTypeString = "map" | "list" | "text" | "table" export type OutValue = [Datatype.str, string] | + [Datatype.uint, number] | [Datatype.f64, number] | [Datatype.u64, number] | [Datatype.f64, number] | @@ -21,9 +22,9 @@ export type OutValue = [ObjTypeName.list, ObjID] | [ObjTypeName.map, ObjID] | [ObjTypeName.text, ObjID] | - [ObjTypeName.table, ObjID] | + [ObjTypeName.table, ObjID] -export const ROOT: ObjID = "_root"; +export type ROOT = "_root"; export const LIST : ObjectType; export const MAP : ObjectType; @@ -41,13 +42,19 @@ export enum Datatype { boolean = "boolean", str = "str", i64 = "i64", + uint = "uint", u64 = "u64", f64 = "f64", + int = "int", timestamp = "timestamp", counter = "counter", bytes = "bytes", } +export type DecodedSyncMessage = { + +} + export type DecodedChange = { message: string, seq: number, diff --git a/automerge-wasm/package.json b/automerge-wasm/package.json index 88ceccd6..a1577790 100644 --- a/automerge-wasm/package.json +++ b/automerge-wasm/package.json @@ -22,12 +22,14 @@ "pkg": "rimraf ./pkg && wasm-pack build --target web --release --out-name index -d pkg && cd pkg && yarn pack && mv automerge-wasm*tgz ..", "prof": "rimraf ./dev && wasm-pack build --target nodejs --profiling --out-name index -d dev", "opt": "wasm-opt -Oz dev/index_bg.wasm -o tmp.wasm && mv tmp.wasm dev/index_bg.wasm", - "test": "yarn build && ts-mocha --bail --full-trace" + "test": "yarn build && ts-mocha -p tsconfig.json --type-check --bail --full-trace test/*.ts" }, "dependencies": {}, "devDependencies": { "@types/expect": "^24.3.0", + "@types/jest": "^27.4.0", "@types/mocha": "^9.1.0", + "@types/node": "^17.0.13", "fast-sha256": "^1.3.0", "mocha": "^9.1.3", "pako": "^2.0.4", diff --git a/automerge-wasm/test/test.js b/automerge-wasm/test/test.js deleted file mode 100644 index 38d68eba..00000000 --- a/automerge-wasm/test/test.js +++ /dev/null @@ -1,1454 +0,0 @@ - -const assert = require('assert') -const util = require('util') -const { BloomFilter } = require('./helpers/sync') -const Automerge = require('..') -const { MAP, LIST, TEXT, encodeChange, decodeChange, initSyncState, decodeSyncMessage, decodeSyncState, encodeSyncState }= Automerge - -// str to uint8array -function en(str) { - return new TextEncoder('utf8').encode(str) -} -// uint8array to str -function de(bytes) { - return new TextDecoder('utf8').decode(bytes); -} - -function sync(a, b, aSyncState = initSyncState(), bSyncState = initSyncState()) { - const MAX_ITER = 10 - let aToBmsg = null, bToAmsg = null, i = 0 - do { - aToBmsg = a.generateSyncMessage(aSyncState) - bToAmsg = b.generateSyncMessage(bSyncState) - - if (aToBmsg) { - b.receiveSyncMessage(bSyncState, aToBmsg) - } - if (bToAmsg) { - a.receiveSyncMessage(aSyncState, bToAmsg) - } - - if (i++ > MAX_ITER) { - throw new Error(`Did not synchronize within ${MAX_ITER} iterations`) - } - } while (aToBmsg || bToAmsg) -} - -describe('Automerge', () => { - describe('basics', () => { - it('should init clone and free', () => { - let doc1 = Automerge.create() - let doc2 = doc1.clone() - doc1.free() - doc2.free() - }) - - it('should be able to start and commit', () => { - let doc = Automerge.create() - doc.commit() - doc.free() - }) - - it('getting a nonexistant prop does not throw an error', () => { - let doc = Automerge.create() - let root = "_root" - let result = doc.value(root,"hello") - assert.deepEqual(result,[]) - doc.free() - }) - - it('should be able to set and get a simple value', () => { - let doc = Automerge.create() - let root = "_root" - let result - - doc.set(root, "hello", "world") - doc.set(root, "number1", 5, "uint") - doc.set(root, "number2", 5) - doc.set(root, "number3", 5.5) - doc.set(root, "number4", 5.5, "f64") - doc.set(root, "number5", 5.5, "int") - doc.set(root, "bool", true) - doc.set(root, "time1", 1000, "timestamp") - doc.set(root, "time2", new Date(1001)) - - result = doc.value(root,"hello") - assert.deepEqual(result,["str","world"]) - - result = doc.value(root,"number1") - assert.deepEqual(result,["uint",5]) - - result = doc.value(root,"number2") - assert.deepEqual(result,["int",5]) - - result = doc.value(root,"number3") - assert.deepEqual(result,["f64",5.5]) - - result = doc.value(root,"number4") - assert.deepEqual(result,["f64",5.5]) - - result = doc.value(root,"number5") - assert.deepEqual(result,["int",5]) - - result = doc.value(root,"bool") - assert.deepEqual(result,["boolean",true]) - - doc.set(root, "bool", false, "boolean") - - result = doc.value(root,"bool") - assert.deepEqual(result,["boolean",false]) - - result = doc.value(root,"time1") - assert.deepEqual(result,["timestamp",new Date(1000)]) - - result = doc.value(root,"time2") - assert.deepEqual(result,["timestamp",new Date(1001)]) - - doc.free() - }) - - it('should be able to use bytes', () => { - let doc = Automerge.create() - doc.set("_root","data1", new Uint8Array([10,11,12])); - doc.set("_root","data2", new Uint8Array([13,14,15]), "bytes"); - let value1 = doc.value("_root", "data1") - assert.deepEqual(value1, ["bytes", new Uint8Array([10,11,12])]); - let value2 = doc.value("_root", "data2") - assert.deepEqual(value2, ["bytes", new Uint8Array([13,14,15])]); - doc.free() - }) - - it('should be able to make sub objects', () => { - let doc = Automerge.create() - let root = "_root" - let result - - let submap = doc.set(root, "submap", MAP) - doc.set(submap, "number", 6, "uint") - assert.strictEqual(doc.pendingOps(),2) - - result = doc.value(root,"submap") - assert.deepEqual(result,["map",submap]) - - result = doc.value(submap,"number") - assert.deepEqual(result,["uint",6]) - doc.free() - }) - - it('should be able to make lists', () => { - let doc = Automerge.create() - let root = "_root" - - let submap = doc.set(root, "numbers", LIST) - doc.insert(submap, 0, "a"); - doc.insert(submap, 1, "b"); - doc.insert(submap, 2, "c"); - doc.insert(submap, 0, "z"); - - assert.deepEqual(doc.value(submap, 0),["str","z"]) - assert.deepEqual(doc.value(submap, 1),["str","a"]) - assert.deepEqual(doc.value(submap, 2),["str","b"]) - assert.deepEqual(doc.value(submap, 3),["str","c"]) - assert.deepEqual(doc.length(submap),4) - - doc.set(submap, 2, "b v2"); - - assert.deepEqual(doc.value(submap, 2),["str","b v2"]) - assert.deepEqual(doc.length(submap),4) - doc.free() - }) - - it('lists have insert, set, splice, and push ops', () => { - let doc = Automerge.create() - let root = "_root" - - let submap = doc.set(root, "letters", LIST) - doc.insert(submap, 0, "a"); - doc.insert(submap, 0, "b"); - assert.deepEqual(doc.toJS(), { letters: ["b", "a" ] }) - doc.push(submap, "c"); - assert.deepEqual(doc.toJS(), { letters: ["b", "a", "c" ] }) - doc.push(submap, 3, "timestamp"); - assert.deepEqual(doc.toJS(), { letters: ["b", "a", "c", new Date(3) ] }) - doc.splice(submap, 1, 1, ["d","e","f"]); - assert.deepEqual(doc.toJS(), { letters: ["b", "d", "e", "f", "c", new Date(3) ] }) - doc.set(submap, 0, "z"); - assert.deepEqual(doc.toJS(), { letters: ["z", "d", "e", "f", "c", new Date(3) ] }) - assert.deepEqual(doc.length(submap),6) - - doc.free() - }) - - it('should be able delete non-existant props', () => { - let doc = Automerge.create() - - doc.set("_root", "foo","bar") - doc.set("_root", "bip","bap") - let heads1 = doc.commit() - - assert.deepEqual(doc.keys("_root"),["bip","foo"]) - - doc.del("_root", "foo") - doc.del("_root", "baz") - let heads2 = doc.commit() - - assert.deepEqual(doc.keys("_root"),["bip"]) - assert.deepEqual(doc.keys("_root", heads1),["bip", "foo"]) - assert.deepEqual(doc.keys("_root", heads2),["bip"]) - doc.free() - }) - - it('should be able to del', () => { - let doc = Automerge.create() - let root = "_root" - - doc.set(root, "xxx", "xxx"); - assert.deepEqual(doc.value(root, "xxx"),["str","xxx"]) - doc.del(root, "xxx"); - assert.deepEqual(doc.value(root, "xxx"),[]) - doc.free() - }) - - it('should be able to use counters', () => { - let doc = Automerge.create() - let root = "_root" - - doc.set(root, "counter", 10, "counter"); - assert.deepEqual(doc.value(root, "counter"),["counter",10]) - doc.inc(root, "counter", 10); - assert.deepEqual(doc.value(root, "counter"),["counter",20]) - doc.inc(root, "counter", -5); - assert.deepEqual(doc.value(root, "counter"),["counter",15]) - doc.free() - }) - - it('should be able to splice text', () => { - let doc = Automerge.create() - let root = "_root"; - - let text = doc.set(root, "text", Automerge.TEXT); - doc.splice(text, 0, 0, "hello ") - doc.splice(text, 6, 0, ["w","o","r","l","d"]) - doc.splice(text, 11, 0, [["str","!"],["str","?"]]) - assert.deepEqual(doc.value(text, 0),["str","h"]) - assert.deepEqual(doc.value(text, 1),["str","e"]) - assert.deepEqual(doc.value(text, 9),["str","l"]) - assert.deepEqual(doc.value(text, 10),["str","d"]) - assert.deepEqual(doc.value(text, 11),["str","!"]) - assert.deepEqual(doc.value(text, 12),["str","?"]) - doc.free() - }) - - it('should be able save all or incrementally', () => { - let doc = Automerge.create() - - doc.set("_root", "foo", 1) - - let save1 = doc.save() - - doc.set("_root", "bar", 2) - - let saveMidway = doc.clone().save(); - - let save2 = doc.saveIncremental(); - - doc.set("_root", "baz", 3); - - let save3 = doc.saveIncremental(); - - let saveA = doc.save(); - let saveB = new Uint8Array([... save1, ...save2, ...save3]); - - assert.notDeepEqual(saveA, saveB); - - let docA = Automerge.loadDoc(saveA); - let docB = Automerge.loadDoc(saveB); - let docC = Automerge.loadDoc(saveMidway) - docC.loadIncremental(save3) - - assert.deepEqual(docA.keys("_root"), docB.keys("_root")); - assert.deepEqual(docA.save(), docB.save()); - assert.deepEqual(docA.save(), docC.save()); - doc.free() - docA.free() - docB.free() - docC.free() - }) - - it('should be able to splice text', () => { - let doc = Automerge.create() - let text = doc.set("_root", "text", TEXT); - doc.splice(text, 0, 0, "hello world"); - let heads1 = doc.commit(); - doc.splice(text, 6, 0, "big bad "); - let heads2 = doc.commit(); - assert.strictEqual(doc.text(text), "hello big bad world") - assert.strictEqual(doc.length(text), 19) - assert.strictEqual(doc.text(text, heads1), "hello world") - assert.strictEqual(doc.length(text, heads1), 11) - assert.strictEqual(doc.text(text, heads2), "hello big bad world") - assert.strictEqual(doc.length(text, heads2), 19) - doc.free() - }) - - it('local inc increments all visible counters in a map', () => { - let doc1 = Automerge.create("aaaa") - doc1.set("_root", "hello", "world") - let doc2 = Automerge.loadDoc(doc1.save(), "bbbb"); - let doc3 = Automerge.loadDoc(doc1.save(), "cccc"); - doc1.set("_root", "cnt", 20) - doc2.set("_root", "cnt", 0, "counter") - doc3.set("_root", "cnt", 10, "counter") - doc1.applyChanges(doc2.getChanges(doc1.getHeads())) - doc1.applyChanges(doc3.getChanges(doc1.getHeads())) - let result = doc1.values("_root", "cnt") - assert.deepEqual(result,[ - ['int',20,'2@aaaa'], - ['counter',0,'2@bbbb'], - ['counter',10,'2@cccc'], - ]) - doc1.inc("_root", "cnt", 5) - result = doc1.values("_root", "cnt") - assert.deepEqual(result, [ - [ 'counter', 5, '2@bbbb' ], - [ 'counter', 15, '2@cccc' ], - ]) - - let save1 = doc1.save() - let doc4 = Automerge.loadDoc(save1) - assert.deepEqual(doc4.save(), save1); - doc1.free() - doc2.free() - doc3.free() - doc4.free() - }) - - it('local inc increments all visible counters in a sequence', () => { - let doc1 = Automerge.create("aaaa") - let seq = doc1.set("_root", "seq", LIST) - doc1.insert(seq, 0, "hello") - let doc2 = Automerge.loadDoc(doc1.save(), "bbbb"); - let doc3 = Automerge.loadDoc(doc1.save(), "cccc"); - doc1.set(seq, 0, 20) - doc2.set(seq, 0, 0, "counter") - doc3.set(seq, 0, 10, "counter") - doc1.applyChanges(doc2.getChanges(doc1.getHeads())) - doc1.applyChanges(doc3.getChanges(doc1.getHeads())) - let result = doc1.values(seq, 0) - assert.deepEqual(result,[ - ['int',20,'3@aaaa'], - ['counter',0,'3@bbbb'], - ['counter',10,'3@cccc'], - ]) - doc1.inc(seq, 0, 5) - result = doc1.values(seq, 0) - assert.deepEqual(result, [ - [ 'counter', 5, '3@bbbb' ], - [ 'counter', 15, '3@cccc' ], - ]) - - let save = doc1.save() - let doc4 = Automerge.loadDoc(save) - assert.deepEqual(doc4.save(), save); - doc1.free() - doc2.free() - doc3.free() - doc4.free() - }) - - it('only returns an object id when objects are created', () => { - let doc = Automerge.create("aaaa") - let r1 = doc.set("_root","foo","bar") - let r2 = doc.set("_root","list",LIST) - let r3 = doc.set("_root","counter",10, "counter") - let r4 = doc.inc("_root","counter",1) - let r5 = doc.del("_root","counter") - let r6 = doc.insert(r2,0,10); - let r7 = doc.insert(r2,0,MAP); - let r8 = doc.splice(r2,1,0,["a","b","c"]); - let r9 = doc.splice(r2,1,0,["a",LIST,MAP,"d"]); - assert.deepEqual(r1,null); - assert.deepEqual(r2,"2@aaaa"); - assert.deepEqual(r3,null); - assert.deepEqual(r4,null); - assert.deepEqual(r5,null); - assert.deepEqual(r6,null); - assert.deepEqual(r7,"7@aaaa"); - assert.deepEqual(r8,null); - assert.deepEqual(r9,["12@aaaa","13@aaaa"]); - doc.free() - }) - - it('objects without properties are preserved', () => { - let doc1 = Automerge.create("aaaa") - let a = doc1.set("_root","a",MAP); - let b = doc1.set("_root","b",MAP); - let c = doc1.set("_root","c",MAP); - let d = doc1.set(c,"d","dd"); - let saved = doc1.save(); - let doc2 = Automerge.loadDoc(saved); - assert.deepEqual(doc2.value("_root","a"),["map",a]) - assert.deepEqual(doc2.keys(a),[]) - assert.deepEqual(doc2.value("_root","b"),["map",b]) - assert.deepEqual(doc2.keys(b),[]) - assert.deepEqual(doc2.value("_root","c"),["map",c]) - assert.deepEqual(doc2.keys(c),["d"]) - assert.deepEqual(doc2.value(c,"d"),["str","dd"]) - doc1.free() - doc2.free() - }) - - it.only('should handle marks [..]', () => { - let doc = Automerge.init() - let list = doc.set("_root", "list", Automerge.TEXT) - doc.splice(list, 0, 0, "aaabbbccc") - doc.mark(list, "[3..6]", "bold" , true) - let spans = doc.spans(list); - assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'ccc' ]); - doc.insert(list, 6, "A") - doc.insert(list, 3, "A") - spans = doc.spans(list); - assert.deepStrictEqual(spans, [ 'aaaA', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'Accc' ]); - }) - - it.only('should handle marks with deleted ends [..]', () => { - let doc = Automerge.init() - let list = doc.set("_root", "list", Automerge.TEXT) - doc.splice(list, 0, 0, "aaabbbccc") - doc.mark(list, "[3..6]", "bold" , true) - let spans = doc.spans(list); - assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'ccc' ]); - doc.del(list,5); - doc.del(list,5); - doc.del(list,2); - doc.del(list,2); - spans = doc.spans(list); - assert.deepStrictEqual(spans, [ 'aa', [ [ 'bold', 'boolean', true ] ], 'b', [], 'cc' ]) - doc.insert(list, 3, "A") - doc.insert(list, 2, "A") - spans = doc.spans(list); - assert.deepStrictEqual(spans, [ 'aaA', [ [ 'bold', 'boolean', true ] ], 'b', [], 'Acc' ]) - }) - - it.only('should handle sticky marks (..)', () => { - let doc = Automerge.init() - let list = doc.set("_root", "list", Automerge.TEXT) - doc.splice(list, 0, 0, "aaabbbccc") - doc.mark(list, "(3..6)", "bold" , true) - let spans = doc.spans(list); - assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'ccc' ]); - doc.insert(list, 6, "A") - doc.insert(list, 3, "A") - spans = doc.spans(list); - assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'AbbbA', [], 'ccc' ]); - }) - - it.only('should handle sticky marks with deleted ends (..)', () => { - let doc = Automerge.init() - let list = doc.set("_root", "list", Automerge.TEXT) - doc.splice(list, 0, 0, "aaabbbccc") - doc.mark(list, "(3..6)", "bold" , true) - let spans = doc.spans(list); - assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'ccc' ]); - doc.del(list,5); - doc.del(list,5); - doc.del(list,2); - doc.del(list,2); - spans = doc.spans(list); - assert.deepStrictEqual(spans, [ 'aa', [ [ 'bold', 'boolean', true ] ], 'b', [], 'cc' ]) - doc.insert(list, 3, "A") - doc.insert(list, 2, "A") - spans = doc.spans(list); - assert.deepStrictEqual(spans, [ 'aa', [ [ 'bold', 'boolean', true ] ], 'AbA', [], 'cc' ]) - - // make sure save/load can handle marks - - let doc2 = Automerge.load(doc.save()) - spans = doc2.spans(list); - assert.deepStrictEqual(spans, [ 'aa', [ [ 'bold', 'boolean', true ] ], 'AbA', [], 'cc' ]) - - assert.deepStrictEqual(doc.getHeads(), doc2.getHeads()) - assert.deepStrictEqual(doc.save(), doc2.save()) - }) - - it.only('should handle overlapping marks', () => { - let doc = Automerge.init() - let list = doc.set("_root", "list", Automerge.TEXT) - doc.splice(list, 0, 0, "the quick fox jumps over the lazy dog") - doc.mark(list, "[0..37]", "bold" , true) - doc.mark(list, "[4..19]", "itallic" , true) - doc.mark(list, "[10..13]", "comment" , "foxes are my favorite animal!") - let spans = doc.spans(list); - assert.deepStrictEqual(spans, - [ - [ [ 'bold', 'boolean', true ] ], - 'the ', - [ [ 'bold', 'boolean', true ], [ 'itallic', 'boolean', true ] ], - 'quick ', - [ - [ 'bold', 'boolean', true ], - [ 'comment', 'str', 'foxes are my favorite animal!' ], - [ 'itallic', 'boolean', true ] - ], - 'fox', - [ [ 'bold', 'boolean', true ], [ 'itallic', 'boolean', true ] ], - ' jumps', - [ [ 'bold', 'boolean', true ] ], - ' over the lazy dog', - [], - ] - ) - - // mark sure encode decode can handle marks - - let all = doc.getChanges([]) - let decoded = all.map((c) => decodeChange(c)) - let encoded = decoded.map((c) => encodeChange(c)) - let doc2 = Automerge.init(); - doc2.applyChanges(encoded) - - assert.deepStrictEqual(doc.spans(list) , doc2.spans(list)) - assert.deepStrictEqual(doc.save(), doc2.save()) - }) - - }) - describe('sync', () => { - it('should send a sync message implying no local data', () => { - let doc = Automerge.create() - let s1 = initSyncState() - let m1 = doc.generateSyncMessage(s1) - const message = decodeSyncMessage(m1) - assert.deepStrictEqual(message.heads, []) - assert.deepStrictEqual(message.need, []) - assert.deepStrictEqual(message.have.length, 1) - assert.deepStrictEqual(message.have[0].lastSync, []) - assert.deepStrictEqual(message.have[0].bloom.byteLength, 0) - assert.deepStrictEqual(message.changes, []) - }) - - it('should not reply if we have no data as well', () => { - let n1 = Automerge.create(), n2 = Automerge.create() - let s1 = initSyncState(), s2 = initSyncState() - let m1 = n1.generateSyncMessage(s1) - n2.receiveSyncMessage(s2, m1) - let m2 = n2.generateSyncMessage(s2) - assert.deepStrictEqual(m2, null) - }) - - it('repos with equal heads do not need a reply message', () => { - let n1 = Automerge.create(), n2 = Automerge.create() - let s1 = initSyncState(), s2 = initSyncState() - - // make two nodes with the same changes - let list = n1.set("_root","n", LIST) - n1.commit("",0) - for (let i = 0; i < 10; i++) { - n1.insert(list,i,i) - n1.commit("",0) - } - n2.applyChanges(n1.getChanges([])) - assert.deepStrictEqual(n1.toJS(), n2.toJS()) - - // generate a naive sync message - let m1 = n1.generateSyncMessage(s1) - assert.deepStrictEqual(s1.lastSentHeads, n1.getHeads()) - - // heads are equal so this message should be null - n2.receiveSyncMessage(s2, m1) - let m2 = n2.generateSyncMessage(s2) - assert.strictEqual(m2, null) - }) - - it('n1 should offer all changes to n2 when starting from nothing', () => { - let n1 = Automerge.create(), n2 = Automerge.create() - - // make changes for n1 that n2 should request - let list = n1.set("_root","n",LIST) - n1.commit("",0) - for (let i = 0; i < 10; i++) { - n1.insert(list, i, i) - n1.commit("",0) - } - - assert.notDeepStrictEqual(n1.toJS(), n2.toJS()) - sync(n1, n2) - assert.deepStrictEqual(n1.toJS(), n2.toJS()) - }) - - it('should sync peers where one has commits the other does not', () => { - let n1 = Automerge.create(), n2 = Automerge.create() - - // make changes for n1 that n2 should request - let list = n1.set("_root","n",LIST) - n1.commit("",0) - for (let i = 0; i < 10; i++) { - n1.insert(list,i,i) - n1.commit("",0) - } - - assert.notDeepStrictEqual(n1.toJS(), n2.toJS()) - sync(n1, n2) - assert.deepStrictEqual(n1.toJS(), n2.toJS()) - }) - - it('should work with prior sync state', () => { - // create & synchronize two nodes - let n1 = Automerge.create(), n2 = Automerge.create() - let s1 = initSyncState(), s2 = initSyncState() - - for (let i = 0; i < 5; i++) { - n1.set("_root","x",i) - n1.commit("",0) - } - - sync(n1, n2, s1, s2) - - // modify the first node further - for (let i = 5; i < 10; i++) { - n1.set("_root", "x", i) - n1.commit("",0) - } - - assert.notDeepStrictEqual(n1.toJS(), n2.toJS()) - sync(n1, n2, s1, s2) - assert.deepStrictEqual(n1.toJS(), n2.toJS()) - }) - - it('should not generate messages once synced', () => { - // create & synchronize two nodes - let n1 = Automerge.create('abc123'), n2 = Automerge.create('def456') - let s1 = initSyncState(), s2 = initSyncState() - - let message, patch - for (let i = 0; i < 5; i++) { - n1.set("_root","x",i) - n1.commit("",0) - } - for (let i = 0; i < 5; i++) { - n2.set("_root","y",i) - n2.commit("",0) - } - - // n1 reports what it has - message = n1.generateSyncMessage(s1) - - // n2 receives that message and sends changes along with what it has - n2.receiveSyncMessage(s2, message) - message = n2.generateSyncMessage(s2) - assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 5) - //assert.deepStrictEqual(patch, null) // no changes arrived - - // n1 receives the changes and replies with the changes it now knows n2 needs - n1.receiveSyncMessage(s1, message) - message = n1.generateSyncMessage(s1) - assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 5) - - // n2 applies the changes and sends confirmation ending the exchange - n2.receiveSyncMessage(s2, message) - message = n2.generateSyncMessage(s2) - - // n1 receives the message and has nothing more to say - n1.receiveSyncMessage(s1, message) - message = n1.generateSyncMessage(s1) - assert.deepStrictEqual(message, null) - //assert.deepStrictEqual(patch, null) // no changes arrived - - // n2 also has nothing left to say - message = n2.generateSyncMessage(s2) - assert.deepStrictEqual(message, null) - }) - - it('should allow simultaneous messages during synchronization', () => { - // create & synchronize two nodes - let n1 = Automerge.create('abc123'), n2 = Automerge.create('def456') - let s1 = initSyncState(), s2 = initSyncState() - - for (let i = 0; i < 5; i++) { - n1.set("_root", "x", i) - n1.commit("",0) - } - for (let i = 0; i < 5; i++) { - n2.set("_root","y", i) - n2.commit("",0) - } - - const head1 = n1.getHeads()[0], head2 = n2.getHeads()[0] - - // both sides report what they have but have no shared peer state - let msg1to2, msg2to1 - msg1to2 = n1.generateSyncMessage(s1) - msg2to1 = n2.generateSyncMessage(s2) - assert.deepStrictEqual(decodeSyncMessage(msg1to2).changes.length, 0) - assert.deepStrictEqual(decodeSyncMessage(msg1to2).have[0].lastSync.length, 0) - assert.deepStrictEqual(decodeSyncMessage(msg2to1).changes.length, 0) - assert.deepStrictEqual(decodeSyncMessage(msg2to1).have[0].lastSync.length, 0) - - // n1 and n2 receives that message and update sync state but make no patch - n1.receiveSyncMessage(s1, msg2to1) - n2.receiveSyncMessage(s2, msg1to2) - - // now both reply with their local changes the other lacks - // (standard warning that 1% of the time this will result in a "need" message) - msg1to2 = n1.generateSyncMessage(s1) - assert.deepStrictEqual(decodeSyncMessage(msg1to2).changes.length, 5) - msg2to1 = n2.generateSyncMessage(s2) - assert.deepStrictEqual(decodeSyncMessage(msg2to1).changes.length, 5) - - // both should now apply the changes and update the frontend - n1.receiveSyncMessage(s1, msg2to1) - assert.deepStrictEqual(n1.getMissingDeps(), []) - //assert.notDeepStrictEqual(patch1, null) - assert.deepStrictEqual(n1.toJS(), {x: 4, y: 4}) - - n2.receiveSyncMessage(s2, msg1to2) - assert.deepStrictEqual(n2.getMissingDeps(), []) - //assert.notDeepStrictEqual(patch2, null) - assert.deepStrictEqual(n2.toJS(), {x: 4, y: 4}) - - // The response acknowledges the changes received, and sends no further changes - msg1to2 = n1.generateSyncMessage(s1) - assert.deepStrictEqual(decodeSyncMessage(msg1to2).changes.length, 0) - msg2to1 = n2.generateSyncMessage(s2) - assert.deepStrictEqual(decodeSyncMessage(msg2to1).changes.length, 0) - - // After receiving acknowledgements, their shared heads should be equal - n1.receiveSyncMessage(s1, msg2to1) - n2.receiveSyncMessage(s2, msg1to2) - assert.deepStrictEqual(s1.sharedHeads, [head1, head2].sort()) - assert.deepStrictEqual(s2.sharedHeads, [head1, head2].sort()) - //assert.deepStrictEqual(patch1, null) - //assert.deepStrictEqual(patch2, null) - - // We're in sync, no more messages required - msg1to2 = n1.generateSyncMessage(s1) - msg2to1 = n2.generateSyncMessage(s2) - assert.deepStrictEqual(msg1to2, null) - assert.deepStrictEqual(msg2to1, null) - - // If we make one more change, and start another sync, its lastSync should be updated - n1.set("_root","x",5) - msg1to2 = n1.generateSyncMessage(s1) - assert.deepStrictEqual(decodeSyncMessage(msg1to2).have[0].lastSync, [head1, head2].sort()) - }) - - it('should assume sent changes were recieved until we hear otherwise', () => { - let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') - let s1 = initSyncState(), s2 = initSyncState(), message = null - - let items = n1.set("_root", "items", LIST) - n1.commit("",0) - - sync(n1, n2, s1, s2) - - n1.push(items, "x") - n1.commit("",0) - message = n1.generateSyncMessage(s1) - assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) - - n1.push(items, "y") - n1.commit("",0) - message = n1.generateSyncMessage(s1) - assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) - - n1.push(items, "z") - n1.commit("",0) - - message = n1.generateSyncMessage(s1) - assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) - }) - - it('should work regardless of who initiates the exchange', () => { - // create & synchronize two nodes - let n1 = Automerge.create(), n2 = Automerge.create() - let s1 = initSyncState(), s2 = initSyncState() - - for (let i = 0; i < 5; i++) { - n1.set("_root", "x", i) - n1.commit("",0) - } - - sync(n1, n2, s1, s2) - - // modify the first node further - for (let i = 5; i < 10; i++) { - n1.set("_root", "x", i) - n1.commit("",0) - } - - assert.notDeepStrictEqual(n1.toJS(), n2.toJS()) - sync(n1, n2, s1, s2) - assert.deepStrictEqual(n1.toJS(), n2.toJS()) - }) - - it('should work without prior sync state', () => { - // Scenario: ,-- c10 <-- c11 <-- c12 <-- c13 <-- c14 - // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ - // `-- c15 <-- c16 <-- c17 - // lastSync is undefined. - - // create two peers both with divergent commits - let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') - let s1 = initSyncState(), s2 = initSyncState() - - for (let i = 0; i < 10; i++) { - n1.set("_root","x",i) - n1.commit("",0) - } - - sync(n1, n2) - - for (let i = 10; i < 15; i++) { - n1.set("_root","x",i) - n1.commit("",0) - } - - for (let i = 15; i < 18; i++) { - n2.set("_root","x",i) - n2.commit("",0) - } - - assert.notDeepStrictEqual(n1.toJS(), n2.toJS()) - sync(n1, n2) - assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) - assert.deepStrictEqual(n1.toJS(), n2.toJS()) - }) - - it('should work with prior sync state', () => { - // Scenario: ,-- c10 <-- c11 <-- c12 <-- c13 <-- c14 - // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ - // `-- c15 <-- c16 <-- c17 - // lastSync is c9. - - // create two peers both with divergent commits - let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') - let s1 = initSyncState(), s2 = initSyncState() - - for (let i = 0; i < 10; i++) { - n1.set("_root","x",i) - n1.commit("",0) - } - - sync(n1, n2, s1, s2) - - for (let i = 10; i < 15; i++) { - n1.set("_root","x",i) - n1.commit("",0) - } - for (let i = 15; i < 18; i++) { - n2.set("_root","x",i) - n2.commit("",0) - } - - s1 = decodeSyncState(encodeSyncState(s1)) - s2 = decodeSyncState(encodeSyncState(s2)) - - assert.notDeepStrictEqual(n1.toJS(), n2.toJS()) - sync(n1, n2, s1, s2) - assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) - assert.deepStrictEqual(n1.toJS(), n2.toJS()) - }) - - it('should ensure non-empty state after sync', () => { - let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') - let s1 = initSyncState(), s2 = initSyncState() - - for (let i = 0; i < 3; i++) { - n1.set("_root","x",i) - n1.commit("",0) - } - - sync(n1, n2, s1, s2) - - assert.deepStrictEqual(s1.sharedHeads, n1.getHeads()) - assert.deepStrictEqual(s2.sharedHeads, n1.getHeads()) - }) - - it('should re-sync after one node crashed with data loss', () => { - // Scenario: (r) (n2) (n1) - // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 - // n2 has changes {c0, c1, c2}, n1's lastSync is c5, and n2's lastSync is c2. - // we want to successfully sync (n1) with (r), even though (n1) believes it's talking to (n2) - let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') - let s1 = initSyncState(), s2 = initSyncState() - - // n1 makes three changes, which we sync to n2 - for (let i = 0; i < 3; i++) { - n1.set("_root","x",i) - n1.commit("",0) - } - - sync(n1, n2, s1, s2) - - // save a copy of n2 as "r" to simulate recovering from crash - let r, rSyncState - ;[r, rSyncState] = [n2.clone(), s2.clone()] - - // sync another few commits - for (let i = 3; i < 6; i++) { - n1.set("_root","x",i) - n1.commit("",0) - } - - sync(n1, n2, s1, s2) - - // everyone should be on the same page here - assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) - assert.deepStrictEqual(n1.toJS(), n2.toJS()) - - // now make a few more changes, then attempt to sync the fully-up-to-date n1 with the confused r - for (let i = 6; i < 9; i++) { - n1.set("_root","x",i) - n1.commit("",0) - } - - s1 = decodeSyncState(encodeSyncState(s1)) - rSyncState = decodeSyncState(encodeSyncState(rSyncState)) - - assert.notDeepStrictEqual(n1.getHeads(), r.getHeads()) - assert.notDeepStrictEqual(n1.toJS(), r.toJS()) - assert.deepStrictEqual(n1.toJS(), {x: 8}) - assert.deepStrictEqual(r.toJS(), {x: 2}) - sync(n1, r, s1, rSyncState) - assert.deepStrictEqual(n1.getHeads(), r.getHeads()) - assert.deepStrictEqual(n1.toJS(), r.toJS()) - }) - - it('should resync after one node experiences data loss without disconnecting', () => { - let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') - let s1 = initSyncState(), s2 = initSyncState() - - // n1 makes three changes, which we sync to n2 - for (let i = 0; i < 3; i++) { - n1.set("_root","x",i) - n1.commit("",0) - } - - sync(n1, n2, s1, s2) - - assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) - assert.deepStrictEqual(n1.toJS(), n2.toJS()) - - let n2AfterDataLoss = Automerge.create('89abcdef') - - // "n2" now has no data, but n1 still thinks it does. Note we don't do - // decodeSyncState(encodeSyncState(s1)) in order to simulate data loss without disconnecting - sync(n1, n2AfterDataLoss, s1, initSyncState()) - assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) - assert.deepStrictEqual(n1.toJS(), n2.toJS()) - }) - - it('should handle changes concurrent to the last sync heads', () => { - let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef'), n3 = Automerge.create('fedcba98') - let s12 = initSyncState(), s21 = initSyncState(), s23 = initSyncState(), s32 = initSyncState() - - // Change 1 is known to all three nodes - //n1 = Automerge.change(n1, {time: 0}, doc => doc.x = 1) - n1.set("_root","x",1); n1.commit("",0) - - sync(n1, n2, s12, s21) - sync(n2, n3, s23, s32) - - // Change 2 is known to n1 and n2 - n1.set("_root","x",2); n1.commit("",0) - - sync(n1, n2, s12, s21) - - // Each of the three nodes makes one change (changes 3, 4, 5) - n1.set("_root","x",3); n1.commit("",0) - n2.set("_root","x",4); n2.commit("",0) - n3.set("_root","x",5); n3.commit("",0) - - // Apply n3's latest change to n2. If running in Node, turn the Uint8Array into a Buffer, to - // simulate transmission over a network (see https://github.com/automerge/automerge/pull/362) - let change = n3.getLastLocalChange() - if (typeof Buffer === 'function') change = Buffer.from(change) - n2.applyChanges([change]) - - // Now sync n1 and n2. n3's change is concurrent to n1 and n2's last sync heads - sync(n1, n2, s12, s21) - assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) - assert.deepStrictEqual(n1.toJS(), n2.toJS()) - }) - - it('should handle histories with lots of branching and merging', () => { - let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef'), n3 = Automerge.create('fedcba98') - n1.set("_root","x",0); n1.commit("",0) - n2.applyChanges([n1.getLastLocalChange()]) - n3.applyChanges([n1.getLastLocalChange()]) - n3.set("_root","x",1); n3.commit("",0) - - // - n1c1 <------ n1c2 <------ n1c3 <-- etc. <-- n1c20 <------ n1c21 - // / \/ \/ \/ - // / /\ /\ /\ - // c0 <---- n2c1 <------ n2c2 <------ n2c3 <-- etc. <-- n2c20 <------ n2c21 - // \ / - // ---------------------------------------------- n3c1 <----- - for (let i = 1; i < 20; i++) { - n1.set("_root","n1",i); n1.commit("",0) - n2.set("_root","n2",i); n2.commit("",0) - const change1 = n1.getLastLocalChange() - const change2 = n2.getLastLocalChange() - n1.applyChanges([change2]) - n2.applyChanges([change1]) - } - - let s1 = initSyncState(), s2 = initSyncState() - sync(n1, n2, s1, s2) - - // Having n3's last change concurrent to the last sync heads forces us into the slower code path - n2.applyChanges([n3.getLastLocalChange()]) - n1.set("_root","n1","final"); n1.commit("",0) - n2.set("_root","n2","final"); n2.commit("",0) - - sync(n1, n2, s1, s2) - assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) - assert.deepStrictEqual(n1.toJS(), n2.toJS()) - }) - - it('should handle a false-positive head', () => { - // Scenario: ,-- n1 - // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ - // `-- n2 - // where n2 is a false positive in the Bloom filter containing {n1}. - // lastSync is c9. - let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') - let s1 = initSyncState(), s2 = initSyncState() - - for (let i = 0; i < 10; i++) { - n1.set("_root","x",i); n1.commit("",0) - } - - sync(n1, n2, s1, s2) - for (let i = 1; ; i++) { // search for false positive; see comment above - const n1up = n1.clone('01234567'); - n1up.set("_root","x",`${i} @ n1`); n1up.commit("",0) - const n2up = n2.clone('89abcdef'); - n2up.set("_root","x",`${i} @ n2`); n2up.commit("",0) - if (new BloomFilter(n1up.getHeads()).containsHash(n2up.getHeads()[0])) { - n1.free(); n2.free() - n1 = n1up; n2 = n2up; break - } - } - const allHeads = [...n1.getHeads(), ...n2.getHeads()].sort() - s1 = decodeSyncState(encodeSyncState(s1)) - s2 = decodeSyncState(encodeSyncState(s2)) - sync(n1, n2, s1, s2) - assert.deepStrictEqual(n1.getHeads(), allHeads) - assert.deepStrictEqual(n2.getHeads(), allHeads) - }) - - - describe('with a false-positive dependency', () => { - let n1, n2, s1, s2, n1hash2, n2hash2 - - beforeEach(() => { - // Scenario: ,-- n1c1 <-- n1c2 - // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ - // `-- n2c1 <-- n2c2 - // where n2c1 is a false positive in the Bloom filter containing {n1c1, n1c2}. - // lastSync is c9. - n1 = Automerge.create('01234567') - n2 = Automerge.create('89abcdef') - s1 = initSyncState() - s2 = initSyncState() - for (let i = 0; i < 10; i++) { - n1.set("_root","x",i); n1.commit("",0) - } - sync(n1, n2, s1, s2) - - let n1hash1, n2hash1 - for (let i = 29; ; i++) { // search for false positive; see comment above - const n1us1 = n1.clone('01234567') - n1us1.set("_root","x",`${i} @ n1`); n1us1.commit("",0) - - const n2us1 = n2.clone('89abcdef') - n2us1.set("_root","x",`${i} @ n1`); n2us1.commit("",0) - - n1hash1 = n1us1.getHeads()[0]; n2hash1 = n2us1.getHeads()[0] - - const n1us2 = n1us1.clone(); - n1us2.set("_root","x",`final @ n1`); n1us2.commit("",0) - - const n2us2 = n2us1.clone(); - n2us2.set("_root","x",`final @ n2`); n2us2.commit("",0) - - n1hash2 = n1us2.getHeads()[0]; n2hash2 = n2us2.getHeads()[0] - if (new BloomFilter([n1hash1, n1hash2]).containsHash(n2hash1)) { - n1.free(); n2.free() - n1 = n1us2; n2 = n2us2; break - } - } - }) - - it('should sync two nodes without connection reset', () => { - sync(n1, n2, s1, s2) - assert.deepStrictEqual(n1.getHeads(), [n1hash2, n2hash2].sort()) - assert.deepStrictEqual(n2.getHeads(), [n1hash2, n2hash2].sort()) - }) - - it('should sync two nodes with connection reset', () => { - s1 = decodeSyncState(encodeSyncState(s1)) - s2 = decodeSyncState(encodeSyncState(s2)) - sync(n1, n2, s1, s2) - assert.deepStrictEqual(n1.getHeads(), [n1hash2, n2hash2].sort()) - assert.deepStrictEqual(n2.getHeads(), [n1hash2, n2hash2].sort()) - }) - - it('should sync three nodes', () => { - s1 = decodeSyncState(encodeSyncState(s1)) - s2 = decodeSyncState(encodeSyncState(s2)) - - // First n1 and n2 exchange Bloom filters - let m1, m2 - m1 = n1.generateSyncMessage(s1) - m2 = n2.generateSyncMessage(s2) - n1.receiveSyncMessage(s1, m2) - n2.receiveSyncMessage(s2, m1) - - // Then n1 and n2 send each other their changes, except for the false positive - m1 = n1.generateSyncMessage(s1) - m2 = n2.generateSyncMessage(s2) - n1.receiveSyncMessage(s1, m2) - n2.receiveSyncMessage(s2, m1) - assert.strictEqual(decodeSyncMessage(m1).changes.length, 2) // n1c1 and n1c2 - assert.strictEqual(decodeSyncMessage(m2).changes.length, 1) // only n2c2; change n2c1 is not sent - - // n3 is a node that doesn't have the missing change. Nevertheless n1 is going to ask n3 for it - let n3 = Automerge.create('fedcba98'), s13 = initSyncState(), s31 = initSyncState() - sync(n1, n3, s13, s31) - assert.deepStrictEqual(n1.getHeads(), [n1hash2]) - assert.deepStrictEqual(n3.getHeads(), [n1hash2]) - }) - }) - - it('should not require an additional request when a false-positive depends on a true-negative', () => { - // Scenario: ,-- n1c1 <-- n1c2 <-- n1c3 - // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-+ - // `-- n2c1 <-- n2c2 <-- n2c3 - // where n2c2 is a false positive in the Bloom filter containing {n1c1, n1c2, n1c3}. - // lastSync is c4. - let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') - let s1 = initSyncState(), s2 = initSyncState() - let n1hash3, n2hash3 - - for (let i = 0; i < 5; i++) { - n1.set("_root","x",i); n1.commit("",0) - } - sync(n1, n2, s1, s2) - for (let i = 86; ; i++) { // search for false positive; see comment above - const n1us1 = n1.clone('01234567') - n1us1.set("_root","x",`${i} @ n1`); n1us1.commit("",0) - - const n2us1 = n2.clone('89abcdef') - n2us1.set("_root","x",`${i} @ n2`); n2us1.commit("",0) - - //const n1us1 = Automerge.change(Automerge.clone(n1, {actorId: '01234567'}), {time: 0}, doc => doc.x = `${i} @ n1`) - //const n2us1 = Automerge.change(Automerge.clone(n2, {actorId: '89abcdef'}), {time: 0}, doc => doc.x = `${i} @ n2`) - const n1hash1 = n1us1.getHeads()[0] - - const n1us2 = n1us1.clone() - n1us2.set("_root","x",`${i + 1} @ n1`); n1us2.commit("",0) - - const n2us2 = n2us1.clone() - n2us2.set("_root","x",`${i + 1} @ n2`); n2us2.commit("",0) - - const n1hash2 = n1us2.getHeads()[0], n2hash2 = n2us2.getHeads()[0] - - const n1us3 = n1us2.clone() - n1us3.set("_root","x",`final @ n1`); n1us3.commit("",0) - - const n2us3 = n2us2.clone() - n2us3.set("_root","x",`final @ n2`); n2us3.commit("",0) - - n1hash3 = n1us3.getHeads()[0]; n2hash3 = n2us3.getHeads()[0] - - if (new BloomFilter([n1hash1, n1hash2, n1hash3]).containsHash(n2hash2)) { - n1.free(); n2.free(); - n1 = n1us3; n2 = n2us3; break - } - } - const bothHeads = [n1hash3, n2hash3].sort() - s1 = decodeSyncState(encodeSyncState(s1)) - s2 = decodeSyncState(encodeSyncState(s2)) - sync(n1, n2, s1, s2) - assert.deepStrictEqual(n1.getHeads(), bothHeads) - assert.deepStrictEqual(n2.getHeads(), bothHeads) - }) - - it('should handle chains of false-positives', () => { - // Scenario: ,-- c5 - // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-+ - // `-- n2c1 <-- n2c2 <-- n2c3 - // where n2c1 and n2c2 are both false positives in the Bloom filter containing {c5}. - // lastSync is c4. - let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') - let s1 = initSyncState(), s2 = initSyncState() - - for (let i = 0; i < 5; i++) { - n1.set("_root","x",i); n1.commit("",0) - } - - sync(n1, n2, s1, s2) - - n1.set("_root","x",5); n1.commit("",0) - - for (let i = 2; ; i++) { // search for false positive; see comment above - const n2us1 = n2.clone('89abcdef') - n2us1.set("_root","x",`${i} @ n2`); n2us1.commit("",0) - if (new BloomFilter(n1.getHeads()).containsHash(n2us1.getHeads()[0])) { - n2 = n2us1; break - } - } - for (let i = 141; ; i++) { // search for false positive; see comment above - const n2us2 = n2.clone('89abcdef') - n2us2.set("_root","x",`${i} again`); n2us2.commit("",0) - if (new BloomFilter(n1.getHeads()).containsHash(n2us2.getHeads()[0])) { - n2 = n2us2; break - } - } - n2.set("_root","x",`final @ n2`); n2.commit("",0) - - const allHeads = [...n1.getHeads(), ...n2.getHeads()].sort() - s1 = decodeSyncState(encodeSyncState(s1)) - s2 = decodeSyncState(encodeSyncState(s2)) - sync(n1, n2, s1, s2) - assert.deepStrictEqual(n1.getHeads(), allHeads) - assert.deepStrictEqual(n2.getHeads(), allHeads) - }) - - it('should allow the false-positive hash to be explicitly requested', () => { - // Scenario: ,-- n1 - // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ - // `-- n2 - // where n2 causes a false positive in the Bloom filter containing {n1}. - let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') - let s1 = initSyncState(), s2 = initSyncState() - let message - - for (let i = 0; i < 10; i++) { - n1.set("_root","x",i); n1.commit("",0) - } - - sync(n1, n2, s1, s2) - - s1 = decodeSyncState(encodeSyncState(s1)) - s2 = decodeSyncState(encodeSyncState(s2)) - - for (let i = 1; ; i++) { // brute-force search for false positive; see comment above - const n1up = n1.clone('01234567'); n1up.set("_root","x",`${i} @ n1`); n1up.commit("",0) - const n2up = n1.clone('89abcdef'); n2up.set("_root","x",`${i} @ n2`); n2up.commit("",0) - - // check if the bloom filter on n2 will believe n1 already has a particular hash - // this will mean n2 won't offer that data to n2 by receiving a sync message from n1 - if (new BloomFilter(n1up.getHeads()).containsHash(n2up.getHeads()[0])) { - n1 = n1up; n2 = n2up; break - } - } - - // n1 creates a sync message for n2 with an ill-fated bloom - message = n1.generateSyncMessage(s1) - assert.strictEqual(decodeSyncMessage(message).changes.length, 0) - - // n2 receives it and DOESN'T send a change back - n2.receiveSyncMessage(s2, message) - message = n2.generateSyncMessage(s2) - assert.strictEqual(decodeSyncMessage(message).changes.length, 0) - - // n1 should now realize it's missing that change and request it explicitly - n1.receiveSyncMessage(s1, message) - message = n1.generateSyncMessage(s1) - assert.deepStrictEqual(decodeSyncMessage(message).need, n2.getHeads()) - - // n2 should fulfill that request - n2.receiveSyncMessage(s2, message) - message = n2.generateSyncMessage(s2) - assert.strictEqual(decodeSyncMessage(message).changes.length, 1) - - // n1 should apply the change and the two should now be in sync - n1.receiveSyncMessage(s1, message) - assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) - }) - - describe('protocol features', () => { - it('should allow multiple Bloom filters', () => { - // Scenario: ,-- n1c1 <-- n1c2 <-- n1c3 - // c0 <-- c1 <-- c2 <-+--- n2c1 <-- n2c2 <-- n2c3 - // `-- n3c1 <-- n3c2 <-- n3c3 - // n1 has {c0, c1, c2, n1c1, n1c2, n1c3, n2c1, n2c2}; - // n2 has {c0, c1, c2, n1c1, n1c2, n2c1, n2c2, n2c3}; - // n3 has {c0, c1, c2, n3c1, n3c2, n3c3}. - let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef'), n3 = Automerge.create('76543210') - let s13 = initSyncState(), s12 = initSyncState(), s21 = initSyncState() - let s32 = initSyncState(), s31 = initSyncState(), s23 = initSyncState() - let message1, message2, message3 - - for (let i = 0; i < 3; i++) { - n1.set("_root","x",i); n1.commit("",0) - } - - // sync all 3 nodes - sync(n1, n2, s12, s21) // eslint-disable-line no-unused-vars -- kept for consistency - sync(n1, n3, s13, s31) - sync(n3, n2, s32, s23) - for (let i = 0; i < 2; i++) { - n1.set("_root","x",`${i} @ n1`); n1.commit("",0) - } - for (let i = 0; i < 2; i++) { - n2.set("_root","x",`${i} @ n2`); n2.commit("",0) - } - n1.applyChanges(n2.getChanges([])) - n2.applyChanges(n1.getChanges([])) - n1.set("_root","x",`3 @ n1`); n1.commit("",0) - n2.set("_root","x",`3 @ n2`); n2.commit("",0) - - for (let i = 0; i < 3; i++) { - n3.set("_root","x",`${i} @ n3`); n3.commit("",0) - } - const n1c3 = n1.getHeads()[0], n2c3 = n2.getHeads()[0], n3c3 = n3.getHeads()[0] - s13 = decodeSyncState(encodeSyncState(s13)) - s31 = decodeSyncState(encodeSyncState(s31)) - s23 = decodeSyncState(encodeSyncState(s23)) - s32 = decodeSyncState(encodeSyncState(s32)) - - - // Now n3 concurrently syncs with n1 and n2. Doing this naively would result in n3 receiving - // changes {n1c1, n1c2, n2c1, n2c2} twice (those are the changes that both n1 and n2 have, but - // that n3 does not have). We want to prevent this duplication. - message1 = n1.generateSyncMessage(s13) // message from n1 to n3 - assert.strictEqual(decodeSyncMessage(message1).changes.length, 0) - n3.receiveSyncMessage(s31, message1) - message3 = n3.generateSyncMessage(s31) // message from n3 to n1 - assert.strictEqual(decodeSyncMessage(message3).changes.length, 3) // {n3c1, n3c2, n3c3} - n1.receiveSyncMessage(s13, message3) - - // Copy the Bloom filter received from n1 into the message sent from n3 to n2. This Bloom - // filter indicates what changes n3 is going to receive from n1. - message3 = n3.generateSyncMessage(s32) // message from n3 to n2 - const modifiedMessage = decodeSyncMessage(message3) - modifiedMessage.have.push(decodeSyncMessage(message1).have[0]) - assert.strictEqual(modifiedMessage.changes.length, 0) - n2.receiveSyncMessage(s23, Automerge.encodeSyncMessage(modifiedMessage)) - - // n2 replies to n3, sending only n2c3 (the one change that n2 has but n1 doesn't) - message2 = n2.generateSyncMessage(s23) - assert.strictEqual(decodeSyncMessage(message2).changes.length, 1) // {n2c3} - n3.receiveSyncMessage(s32, message2) - - // n1 replies to n3 - message1 = n1.generateSyncMessage(s13) - assert.strictEqual(decodeSyncMessage(message1).changes.length, 5) // {n1c1, n1c2, n1c3, n2c1, n2c2} - n3.receiveSyncMessage(s31, message1) - assert.deepStrictEqual(n3.getHeads(), [n1c3, n2c3, n3c3].sort()) - }) - - it('should allow any change to be requested', () => { - let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') - let s1 = initSyncState(), s2 = initSyncState() - let message = null - - for (let i = 0; i < 3; i++) { - n1.set("_root","x",i); n1.commit("",0) - } - - const lastSync = n1.getHeads() - - for (let i = 3; i < 6; i++) { - n1.set("_root","x",i); n1.commit("",0) - } - - sync(n1, n2, s1, s2) - s1.lastSentHeads = [] // force generateSyncMessage to return a message even though nothing changed - message = n1.generateSyncMessage(s1) - const modMsg = decodeSyncMessage(message) - modMsg.need = lastSync // re-request change 2 - n2.receiveSyncMessage(s2, Automerge.encodeSyncMessage(modMsg)) - message = n2.generateSyncMessage(s2) - assert.strictEqual(decodeSyncMessage(message).changes.length, 1) - assert.strictEqual(Automerge.decodeChange(decodeSyncMessage(message).changes[0]).hash, lastSync[0]) - }) - - it('should ignore requests for a nonexistent change', () => { - let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') - let s1 = initSyncState(), s2 = initSyncState() - let message = null - - for (let i = 0; i < 3; i++) { - n1.set("_root","x",i); n1.commit("",0) - } - - n2.applyChanges(n1.getChanges([])) - message = n1.generateSyncMessage(s1) - message.need = ['0000000000000000000000000000000000000000000000000000000000000000'] - n2.receiveSyncMessage(s2, message) - message = n2.generateSyncMessage(s2) - assert.strictEqual(message, null) - }) - - it('should allow a subset of changes to be sent', () => { - // ,-- c1 <-- c2 - // c0 <-+ - // `-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 - let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef'), n3 = Automerge.create('76543210') - let s1 = initSyncState(), s2 = initSyncState() - let msg, decodedMsg - - n1.set("_root","x",0); n1.commit("",0) - n3.applyChanges(n3.getChangesAdded(n1)) // merge() - for (let i = 1; i <= 2; i++) { - n1.set("_root","x",i); n1.commit("",0) - } - for (let i = 3; i <= 4; i++) { - n3.set("_root","x",i); n3.commit("",0) - } - const c2 = n1.getHeads()[0], c4 = n3.getHeads()[0] - n2.applyChanges(n2.getChangesAdded(n3)) // merge() - - // Sync n1 and n2, so their shared heads are {c2, c4} - sync(n1, n2, s1, s2) - s1 = decodeSyncState(encodeSyncState(s1)) - s2 = decodeSyncState(encodeSyncState(s2)) - assert.deepStrictEqual(s1.sharedHeads, [c2, c4].sort()) - assert.deepStrictEqual(s2.sharedHeads, [c2, c4].sort()) - - // n2 and n3 apply {c5, c6, c7, c8} - n3.set("_root","x",5); n3.commit("",0) - const change5 = n3.getLastLocalChange() - n3.set("_root","x",6); n3.commit("",0) - const change6 = n3.getLastLocalChange(n3), c6 = n3.getHeads()[0] - for (let i = 7; i <= 8; i++) { - n3.set("_root","x",i); n3.commit("",0) - } - const c8 = n3.getHeads()[0] - n2.applyChanges(n2.getChangesAdded(n3)) // merge() - - // Now n1 initiates a sync with n2, and n2 replies with {c5, c6}. n2 does not send {c7, c8} - msg = n1.generateSyncMessage(s1) - n2.receiveSyncMessage(s2, msg) - msg = n2.generateSyncMessage(s2) - decodedMsg = decodeSyncMessage(msg) - decodedMsg.changes = [change5, change6] - msg = Automerge.encodeSyncMessage(decodedMsg) - const sentHashes = {} - sentHashes[Automerge.decodeChange(change5, true).hash] = true - sentHashes[Automerge.decodeChange(change6, true).hash] = true - s2.sentHashes = sentHashes - n1.receiveSyncMessage(s1, msg) - assert.deepStrictEqual(s1.sharedHeads, [c2, c6].sort()) - - // n1 replies, confirming the receipt of {c5, c6} and requesting the remaining changes - msg = n1.generateSyncMessage(s1) - n2.receiveSyncMessage(s2, msg) - assert.deepStrictEqual(decodeSyncMessage(msg).need, [c8]) - assert.deepStrictEqual(decodeSyncMessage(msg).have[0].lastSync, [c2, c6].sort()) - assert.deepStrictEqual(s1.sharedHeads, [c2, c6].sort()) - assert.deepStrictEqual(s2.sharedHeads, [c2, c6].sort()) - - // n2 sends the remaining changes {c7, c8} - msg = n2.generateSyncMessage(s2) - n1.receiveSyncMessage(s1, msg) - assert.strictEqual(decodeSyncMessage(msg).changes.length, 2) - assert.deepStrictEqual(s1.sharedHeads, [c2, c8].sort()) - }) - }) - }) -}) diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts index 7498de5d..b8b0acc7 100644 --- a/automerge-wasm/test/test.ts +++ b/automerge-wasm/test/test.ts @@ -1,20 +1,21 @@ - -const assert = require('assert') -const util = require('util') -const { BloomFilter } = require('./helpers/sync') -const Automerge = require('..') -const { MAP, LIST, TEXT, encodeChange, decodeChange, initSyncState, decodeSyncMessage, decodeSyncState, encodeSyncState }= Automerge +import { describe, it } from 'mocha'; +//@ts-ignore +import assert from 'assert' +//@ts-ignore +import { BloomFilter } from './helpers/sync' +import { create, loadDoc, Datatype, Automerge, MAP, LIST, TEXT, encodeChange, decodeChange, initSyncState, decodeSyncMessage, decodeSyncState, encodeSyncState, encodeSyncMessage } from '..' // str to uint8array -function en(str) { +function en(str: string) { + //@ts-ignore return new TextEncoder('utf8').encode(str) } // uint8array to str -function de(bytes) { +function de(bytes: Uint8Array) { return new TextDecoder('utf8').decode(bytes); } -function sync(a, b, aSyncState = initSyncState(), bSyncState = initSyncState()) { +function sync(a: Automerge, b: Automerge, aSyncState = initSyncState(), bSyncState = initSyncState()) { const MAX_ITER = 10 let aToBmsg = null, bToAmsg = null, i = 0 do { @@ -37,20 +38,20 @@ function sync(a, b, aSyncState = initSyncState(), bSyncState = initSyncState()) describe('Automerge', () => { describe('basics', () => { it('should init clone and free', () => { - let doc1 = Automerge.create() + let doc1 = create() let doc2 = doc1.clone() doc1.free() doc2.free() }) it('should be able to start and commit', () => { - let doc = Automerge.create() + let doc = create() doc.commit() doc.free() }) it('getting a nonexistant prop does not throw an error', () => { - let doc = Automerge.create() + let doc = create() let root = "_root" let result = doc.value(root,"hello") assert.deepEqual(result,[]) @@ -58,20 +59,20 @@ describe('Automerge', () => { }) it('should be able to set and get a simple value', () => { - let doc : Automerge = Automerge.create("aabbcc") + let doc : Automerge = create("aabbcc") let root = "_root" let result doc.set(root, "hello", "world") - doc.set(root, "number1", 5, "uint") + doc.set(root, "number1", 5, Datatype.uint) doc.set(root, "number2", 5) doc.set(root, "number3", 5.5) - doc.set(root, "number4", 5.5, "f64") - doc.set(root, "number5", 5.5, "int") + doc.set(root, "number4", 5.5, Datatype.f64) + doc.set(root, "number5", 5.5, Datatype.int) doc.set(root, "bool", true) doc.set(root, "time1", 1000, "timestamp") doc.set(root, "time2", new Date(1001)) - doc.set(root, "list", Automerge.LIST); + doc.set(root, "list", LIST); result = doc.value(root,"hello") assert.deepEqual(result,["str","world"]) @@ -80,16 +81,16 @@ describe('Automerge', () => { assert.deepEqual(result,["uint",5]) result = doc.value(root,"number2") - assert.deepEqual(result,["int",5]) + assert.deepEqual(result,[Datatype.int,5]) result = doc.value(root,"number3") - assert.deepEqual(result,["f64",5.5]) + assert.deepEqual(result,[Datatype.f64,5.5]) result = doc.value(root,"number4") - assert.deepEqual(result,["f64",5.5]) + assert.deepEqual(result,[Datatype.f64,5.5]) result = doc.value(root,"number5") - assert.deepEqual(result,["int",5]) + assert.deepEqual(result,[Datatype.int,5]) result = doc.value(root,"bool") assert.deepEqual(result,["boolean",true]) @@ -112,7 +113,7 @@ describe('Automerge', () => { }) it('should be able to use bytes', () => { - let doc = Automerge.create() + let doc = create() doc.set("_root","data1", new Uint8Array([10,11,12])); doc.set("_root","data2", new Uint8Array([13,14,15]), "bytes"); let value1 = doc.value("_root", "data1") @@ -123,7 +124,7 @@ describe('Automerge', () => { }) it('should be able to make sub objects', () => { - let doc = Automerge.create() + let doc = create() let root = "_root" let result @@ -140,7 +141,7 @@ describe('Automerge', () => { }) it('should be able to make lists', () => { - let doc = Automerge.create() + let doc = create() let root = "_root" let submap = doc.set(root, "numbers", LIST) @@ -163,7 +164,7 @@ describe('Automerge', () => { }) it('lists have insert, set, splice, and push ops', () => { - let doc = Automerge.create() + let doc = create() let root = "_root" let submap = doc.set(root, "letters", LIST) @@ -184,7 +185,7 @@ describe('Automerge', () => { }) it('should be able delete non-existant props', () => { - let doc = Automerge.create() + let doc = create() doc.set("_root", "foo","bar") doc.set("_root", "bip","bap") @@ -203,7 +204,7 @@ describe('Automerge', () => { }) it('should be able to del', () => { - let doc = Automerge.create() + let doc = create() let root = "_root" doc.set(root, "xxx", "xxx"); @@ -214,7 +215,7 @@ describe('Automerge', () => { }) it('should be able to use counters', () => { - let doc = Automerge.create() + let doc = create() let root = "_root" doc.set(root, "counter", 10, "counter"); @@ -227,10 +228,10 @@ describe('Automerge', () => { }) it('should be able to splice text', () => { - let doc = Automerge.create() + let doc = create() let root = "_root"; - let text = doc.set(root, "text", Automerge.TEXT); + let text = doc.set(root, "text", TEXT); doc.splice(text, 0, 0, "hello ") doc.splice(text, 6, 0, ["w","o","r","l","d"]) doc.splice(text, 11, 0, [["str","!"],["str","?"]]) @@ -244,7 +245,7 @@ describe('Automerge', () => { }) it('should be able save all or incrementally', () => { - let doc = Automerge.create() + let doc = create() doc.set("_root", "foo", 1) @@ -265,9 +266,9 @@ describe('Automerge', () => { assert.notDeepEqual(saveA, saveB); - let docA = Automerge.loadDoc(saveA); - let docB = Automerge.loadDoc(saveB); - let docC = Automerge.loadDoc(saveMidway) + let docA = loadDoc(saveA); + let docB = loadDoc(saveB); + let docC = loadDoc(saveMidway) docC.loadIncremental(save3) assert.deepEqual(docA.keys("_root"), docB.keys("_root")); @@ -280,7 +281,7 @@ describe('Automerge', () => { }) it('should be able to splice text', () => { - let doc = Automerge.create() + let doc = create() let text = doc.set("_root", "text", TEXT); doc.splice(text, 0, 0, "hello world"); let heads1 = doc.commit(); @@ -296,10 +297,10 @@ describe('Automerge', () => { }) it('local inc increments all visible counters in a map', () => { - let doc1 = Automerge.create("aaaa") + let doc1 = create("aaaa") doc1.set("_root", "hello", "world") - let doc2 = Automerge.loadDoc(doc1.save(), "bbbb"); - let doc3 = Automerge.loadDoc(doc1.save(), "cccc"); + let doc2 = loadDoc(doc1.save(), "bbbb"); + let doc3 = loadDoc(doc1.save(), "cccc"); doc1.set("_root", "cnt", 20) doc2.set("_root", "cnt", 0, "counter") doc3.set("_root", "cnt", 10, "counter") @@ -319,7 +320,7 @@ describe('Automerge', () => { ]) let save1 = doc1.save() - let doc4 = Automerge.loadDoc(save1) + let doc4 = loadDoc(save1) assert.deepEqual(doc4.save(), save1); doc1.free() doc2.free() @@ -328,11 +329,11 @@ describe('Automerge', () => { }) it('local inc increments all visible counters in a sequence', () => { - let doc1 = Automerge.create("aaaa") + let doc1 = create("aaaa") let seq = doc1.set("_root", "seq", LIST) doc1.insert(seq, 0, "hello") - let doc2 = Automerge.loadDoc(doc1.save(), "bbbb"); - let doc3 = Automerge.loadDoc(doc1.save(), "cccc"); + let doc2 = loadDoc(doc1.save(), "bbbb"); + let doc3 = loadDoc(doc1.save(), "cccc"); doc1.set(seq, 0, 20) doc2.set(seq, 0, 0, "counter") doc3.set(seq, 0, 10, "counter") @@ -352,7 +353,7 @@ describe('Automerge', () => { ]) let save = doc1.save() - let doc4 = Automerge.loadDoc(save) + let doc4 = loadDoc(save) assert.deepEqual(doc4.save(), save); doc1.free() doc2.free() @@ -361,7 +362,7 @@ describe('Automerge', () => { }) it('only returns an object id when objects are created', () => { - let doc = Automerge.create("aaaa") + let doc = create("aaaa") let r1 = doc.set("_root","foo","bar") let r2 = doc.set("_root","list",LIST) let r3 = doc.set("_root","counter",10, "counter") @@ -384,13 +385,13 @@ describe('Automerge', () => { }) it('objects without properties are preserved', () => { - let doc1 = Automerge.create("aaaa") + let doc1 = create("aaaa") let a = doc1.set("_root","a",MAP); let b = doc1.set("_root","b",MAP); let c = doc1.set("_root","c",MAP); let d = doc1.set(c,"d","dd"); let saved = doc1.save(); - let doc2 = Automerge.loadDoc(saved); + let doc2 = loadDoc(saved); assert.deepEqual(doc2.value("_root","a"),["map",a]) assert.deepEqual(doc2.keys(a),[]) assert.deepEqual(doc2.value("_root","b"),["map",b]) @@ -403,8 +404,8 @@ describe('Automerge', () => { }) it('should handle marks [..]', () => { - let doc = Automerge.create() - let list = doc.set("_root", "list", Automerge.TEXT) + let doc = create() + let list = doc.set("_root", "list", TEXT) doc.splice(list, 0, 0, "aaabbbccc") doc.mark(list, "[3..6]", "bold" , true) let spans = doc.spans(list); @@ -416,8 +417,8 @@ describe('Automerge', () => { }) it('should handle marks with deleted ends [..]', () => { - let doc = Automerge.create() - let list = doc.set("_root", "list", Automerge.TEXT) + let doc = create() + let list = doc.set("_root", "list", TEXT) doc.splice(list, 0, 0, "aaabbbccc") doc.mark(list, "[3..6]", "bold" , true) let spans = doc.spans(list); @@ -435,8 +436,8 @@ describe('Automerge', () => { }) it('should handle sticky marks (..)', () => { - let doc = Automerge.create() - let list = doc.set("_root", "list", Automerge.TEXT) + let doc = create() + let list = doc.set("_root", "list", TEXT) doc.splice(list, 0, 0, "aaabbbccc") doc.mark(list, "(3..6)", "bold" , true) let spans = doc.spans(list); @@ -448,8 +449,8 @@ describe('Automerge', () => { }) it('should handle sticky marks with deleted ends (..)', () => { - let doc = Automerge.create() - let list = doc.set("_root", "list", Automerge.TEXT) + let doc = create() + let list = doc.set("_root", "list", TEXT) doc.splice(list, 0, 0, "aaabbbccc") doc.mark(list, "(3..6)", "bold" , true) let spans = doc.spans(list); @@ -467,7 +468,7 @@ describe('Automerge', () => { // make sure save/load can handle marks - let doc2 = Automerge.loadDoc(doc.save()) + let doc2 = loadDoc(doc.save()) spans = doc2.spans(list); assert.deepStrictEqual(spans, [ 'aa', [ [ 'bold', 'boolean', true ] ], 'AbA', [], 'cc' ]) @@ -476,8 +477,8 @@ describe('Automerge', () => { }) it('should handle overlapping marks', () => { - let doc : Automerge = Automerge.create() - let list = doc.set("_root", "list", Automerge.TEXT) + let doc : Automerge = create() + let list = doc.set("_root", "list", TEXT) doc.splice(list, 0, 0, "the quick fox jumps over the lazy dog") doc.mark(list, "[0..37]", "bold" , true) doc.mark(list, "[4..19]", "itallic" , true) @@ -508,7 +509,7 @@ describe('Automerge', () => { let all = doc.getChanges([]) let decoded = all.map((c) => decodeChange(c)) let encoded = decoded.map((c) => encodeChange(c)) - let doc2 = Automerge.create(); + let doc2 = create(); doc2.applyChanges(encoded) assert.deepStrictEqual(doc.spans(list) , doc2.spans(list)) @@ -518,7 +519,7 @@ describe('Automerge', () => { }) describe('sync', () => { it('should send a sync message implying no local data', () => { - let doc = Automerge.create() + let doc = create() let s1 = initSyncState() let m1 = doc.generateSyncMessage(s1) const message = decodeSyncMessage(m1) @@ -531,7 +532,7 @@ describe('Automerge', () => { }) it('should not reply if we have no data as well', () => { - let n1 = Automerge.create(), n2 = Automerge.create() + let n1 = create(), n2 = create() let s1 = initSyncState(), s2 = initSyncState() let m1 = n1.generateSyncMessage(s1) n2.receiveSyncMessage(s2, m1) @@ -540,7 +541,7 @@ describe('Automerge', () => { }) it('repos with equal heads do not need a reply message', () => { - let n1 = Automerge.create(), n2 = Automerge.create() + let n1 = create(), n2 = create() let s1 = initSyncState(), s2 = initSyncState() // make two nodes with the same changes @@ -564,7 +565,7 @@ describe('Automerge', () => { }) it('n1 should offer all changes to n2 when starting from nothing', () => { - let n1 = Automerge.create(), n2 = Automerge.create() + let n1 = create(), n2 = create() // make changes for n1 that n2 should request let list = n1.set("_root","n",LIST) @@ -580,7 +581,7 @@ describe('Automerge', () => { }) it('should sync peers where one has commits the other does not', () => { - let n1 = Automerge.create(), n2 = Automerge.create() + let n1 = create(), n2 = create() // make changes for n1 that n2 should request let list = n1.set("_root","n",LIST) @@ -597,7 +598,7 @@ describe('Automerge', () => { it('should work with prior sync state', () => { // create & synchronize two nodes - let n1 = Automerge.create(), n2 = Automerge.create() + let n1 = create(), n2 = create() let s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 5; i++) { @@ -620,7 +621,7 @@ describe('Automerge', () => { it('should not generate messages once synced', () => { // create & synchronize two nodes - let n1 = Automerge.create('abc123'), n2 = Automerge.create('def456') + let n1 = create('abc123'), n2 = create('def456') let s1 = initSyncState(), s2 = initSyncState() let message, patch @@ -664,7 +665,7 @@ describe('Automerge', () => { it('should allow simultaneous messages during synchronization', () => { // create & synchronize two nodes - let n1 = Automerge.create('abc123'), n2 = Automerge.create('def456') + let n1 = create('abc123'), n2 = create('def456') let s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 5; i++) { @@ -736,7 +737,7 @@ describe('Automerge', () => { }) it('should assume sent changes were recieved until we hear otherwise', () => { - let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') + let n1 = create('01234567'), n2 = create('89abcdef') let s1 = initSyncState(), s2 = initSyncState(), message = null let items = n1.set("_root", "items", LIST) @@ -763,7 +764,7 @@ describe('Automerge', () => { it('should work regardless of who initiates the exchange', () => { // create & synchronize two nodes - let n1 = Automerge.create(), n2 = Automerge.create() + let n1 = create(), n2 = create() let s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 5; i++) { @@ -791,7 +792,7 @@ describe('Automerge', () => { // lastSync is undefined. // create two peers both with divergent commits - let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') + let n1 = create('01234567'), n2 = create('89abcdef') let s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 10; i++) { @@ -824,7 +825,7 @@ describe('Automerge', () => { // lastSync is c9. // create two peers both with divergent commits - let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') + let n1 = create('01234567'), n2 = create('89abcdef') let s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 10; i++) { @@ -853,7 +854,7 @@ describe('Automerge', () => { }) it('should ensure non-empty state after sync', () => { - let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') + let n1 = create('01234567'), n2 = create('89abcdef') let s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 3; i++) { @@ -872,7 +873,7 @@ describe('Automerge', () => { // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 // n2 has changes {c0, c1, c2}, n1's lastSync is c5, and n2's lastSync is c2. // we want to successfully sync (n1) with (r), even though (n1) believes it's talking to (n2) - let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') + let n1 = create('01234567'), n2 = create('89abcdef') let s1 = initSyncState(), s2 = initSyncState() // n1 makes three changes, which we sync to n2 @@ -918,7 +919,7 @@ describe('Automerge', () => { }) it('should resync after one node experiences data loss without disconnecting', () => { - let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') + let n1 = create('01234567'), n2 = create('89abcdef') let s1 = initSyncState(), s2 = initSyncState() // n1 makes three changes, which we sync to n2 @@ -932,7 +933,7 @@ describe('Automerge', () => { assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) assert.deepStrictEqual(n1.toJS(), n2.toJS()) - let n2AfterDataLoss = Automerge.create('89abcdef') + let n2AfterDataLoss = create('89abcdef') // "n2" now has no data, but n1 still thinks it does. Note we don't do // decodeSyncState(encodeSyncState(s1)) in order to simulate data loss without disconnecting @@ -942,7 +943,7 @@ describe('Automerge', () => { }) it('should handle changes concurrent to the last sync heads', () => { - let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef'), n3 = Automerge.create('fedcba98') + let n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('fedcba98') let s12 = initSyncState(), s21 = initSyncState(), s23 = initSyncState(), s32 = initSyncState() // Change 1 is known to all three nodes @@ -965,6 +966,7 @@ describe('Automerge', () => { // Apply n3's latest change to n2. If running in Node, turn the Uint8Array into a Buffer, to // simulate transmission over a network (see https://github.com/automerge/automerge/pull/362) let change = n3.getLastLocalChange() + //@ts-ignore if (typeof Buffer === 'function') change = Buffer.from(change) n2.applyChanges([change]) @@ -975,7 +977,7 @@ describe('Automerge', () => { }) it('should handle histories with lots of branching and merging', () => { - let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef'), n3 = Automerge.create('fedcba98') + let n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('fedcba98') n1.set("_root","x",0); n1.commit("",0) n2.applyChanges([n1.getLastLocalChange()]) n3.applyChanges([n1.getLastLocalChange()]) @@ -1015,7 +1017,7 @@ describe('Automerge', () => { // `-- n2 // where n2 is a false positive in the Bloom filter containing {n1}. // lastSync is c9. - let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') + let n1 = create('01234567'), n2 = create('89abcdef') let s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 10; i++) { @@ -1051,8 +1053,8 @@ describe('Automerge', () => { // `-- n2c1 <-- n2c2 // where n2c1 is a false positive in the Bloom filter containing {n1c1, n1c2}. // lastSync is c9. - n1 = Automerge.create('01234567') - n2 = Automerge.create('89abcdef') + n1 = create('01234567') + n2 = create('89abcdef') s1 = initSyncState() s2 = initSyncState() for (let i = 0; i < 10; i++) { @@ -1118,7 +1120,7 @@ describe('Automerge', () => { assert.strictEqual(decodeSyncMessage(m2).changes.length, 1) // only n2c2; change n2c1 is not sent // n3 is a node that doesn't have the missing change. Nevertheless n1 is going to ask n3 for it - let n3 = Automerge.create('fedcba98'), s13 = initSyncState(), s31 = initSyncState() + let n3 = create('fedcba98'), s13 = initSyncState(), s31 = initSyncState() sync(n1, n3, s13, s31) assert.deepStrictEqual(n1.getHeads(), [n1hash2]) assert.deepStrictEqual(n3.getHeads(), [n1hash2]) @@ -1131,7 +1133,7 @@ describe('Automerge', () => { // `-- n2c1 <-- n2c2 <-- n2c3 // where n2c2 is a false positive in the Bloom filter containing {n1c1, n1c2, n1c3}. // lastSync is c4. - let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') + let n1 = create('01234567'), n2 = create('89abcdef') let s1 = initSyncState(), s2 = initSyncState() let n1hash3, n2hash3 @@ -1185,7 +1187,7 @@ describe('Automerge', () => { // `-- n2c1 <-- n2c2 <-- n2c3 // where n2c1 and n2c2 are both false positives in the Bloom filter containing {c5}. // lastSync is c4. - let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') + let n1 = create('01234567'), n2 = create('89abcdef') let s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 5; i++) { @@ -1225,7 +1227,7 @@ describe('Automerge', () => { // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ // `-- n2 // where n2 causes a false positive in the Bloom filter containing {n1}. - let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') + let n1 = create('01234567'), n2 = create('89abcdef') let s1 = initSyncState(), s2 = initSyncState() let message @@ -1281,7 +1283,7 @@ describe('Automerge', () => { // n1 has {c0, c1, c2, n1c1, n1c2, n1c3, n2c1, n2c2}; // n2 has {c0, c1, c2, n1c1, n1c2, n2c1, n2c2, n2c3}; // n3 has {c0, c1, c2, n3c1, n3c2, n3c3}. - let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef'), n3 = Automerge.create('76543210') + let n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('76543210') let s13 = initSyncState(), s12 = initSyncState(), s21 = initSyncState() let s32 = initSyncState(), s31 = initSyncState(), s23 = initSyncState() let message1, message2, message3 @@ -1331,7 +1333,7 @@ describe('Automerge', () => { const modifiedMessage = decodeSyncMessage(message3) modifiedMessage.have.push(decodeSyncMessage(message1).have[0]) assert.strictEqual(modifiedMessage.changes.length, 0) - n2.receiveSyncMessage(s23, Automerge.encodeSyncMessage(modifiedMessage)) + n2.receiveSyncMessage(s23, encodeSyncMessage(modifiedMessage)) // n2 replies to n3, sending only n2c3 (the one change that n2 has but n1 doesn't) message2 = n2.generateSyncMessage(s23) @@ -1346,7 +1348,7 @@ describe('Automerge', () => { }) it('should allow any change to be requested', () => { - let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') + let n1 = create('01234567'), n2 = create('89abcdef') let s1 = initSyncState(), s2 = initSyncState() let message = null @@ -1372,7 +1374,7 @@ describe('Automerge', () => { }) it('should ignore requests for a nonexistent change', () => { - let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef') + let n1 = create('01234567'), n2 = create('89abcdef') let s1 = initSyncState(), s2 = initSyncState() let message = null @@ -1392,7 +1394,7 @@ describe('Automerge', () => { // ,-- c1 <-- c2 // c0 <-+ // `-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 - let n1 = Automerge.create('01234567'), n2 = Automerge.create('89abcdef'), n3 = Automerge.create('76543210') + let n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('76543210') let s1 = initSyncState(), s2 = initSyncState() let msg, decodedMsg diff --git a/automerge-wasm/tsconfig.json b/automerge-wasm/tsconfig.json index 1dc480a4..cf6f9fef 100644 --- a/automerge-wasm/tsconfig.json +++ b/automerge-wasm/tsconfig.json @@ -6,7 +6,7 @@ "baseUrl": ".", "esModuleInterop": true, "lib": ["dom", "esnext.asynciterable", "es2017", "es2016", "es2015"], - "module": "commonjs", + "module": "esnext", "moduleResolution": "node", "paths": { "dev": ["*"]}, "rootDir": "", From d3f4be0654b66814cae615b6246645edd85b0e49 Mon Sep 17 00:00:00 2001 From: Karissa McKelvey <633012+okdistribute@users.noreply.github.com> Date: Mon, 31 Jan 2022 13:03:27 -0800 Subject: [PATCH 051/730] Fix typescript errors in test --- automerge-wasm/index.d.ts | 6 +++- automerge-wasm/test/test.ts | 70 ++++++++++++++++++++++++------------- 2 files changed, 51 insertions(+), 25 deletions(-) diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts index 01b061a8..2eb26e4f 100644 --- a/automerge-wasm/index.d.ts +++ b/automerge-wasm/index.d.ts @@ -52,11 +52,15 @@ export enum Datatype { } export type DecodedSyncMessage = { - + heads: Heads, + need: Heads, + have: any[] + changes: Change[] } export type DecodedChange = { message: string, + hash: Hash, seq: number, ops: Op[] } diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts index b8b0acc7..7f40cfa6 100644 --- a/automerge-wasm/test/test.ts +++ b/automerge-wasm/test/test.ts @@ -3,7 +3,9 @@ import { describe, it } from 'mocha'; import assert from 'assert' //@ts-ignore import { BloomFilter } from './helpers/sync' -import { create, loadDoc, Datatype, Automerge, MAP, LIST, TEXT, encodeChange, decodeChange, initSyncState, decodeSyncMessage, decodeSyncState, encodeSyncState, encodeSyncMessage } from '..' +import { create, loadDoc, SyncState, Datatype, Automerge, MAP, LIST, TEXT, encodeChange, decodeChange, initSyncState, decodeSyncMessage, decodeSyncState, encodeSyncState, encodeSyncMessage } from '..' +import { DecodedSyncMessage } from '../index'; +import { Hash } from '../dev/index'; // str to uint8array function en(str: string) { @@ -70,7 +72,7 @@ describe('Automerge', () => { doc.set(root, "number4", 5.5, Datatype.f64) doc.set(root, "number5", 5.5, Datatype.int) doc.set(root, "bool", true) - doc.set(root, "time1", 1000, "timestamp") + doc.set(root, "time1", 1000, Datatype.timestamp) doc.set(root, "time2", new Date(1001)) doc.set(root, "list", LIST); @@ -95,7 +97,7 @@ describe('Automerge', () => { result = doc.value(root,"bool") assert.deepEqual(result,["boolean",true]) - doc.set(root, "bool", false, "boolean") + doc.set(root, "bool", false, Datatype.boolean) result = doc.value(root,"bool") assert.deepEqual(result,["boolean",false]) @@ -115,11 +117,11 @@ describe('Automerge', () => { it('should be able to use bytes', () => { let doc = create() doc.set("_root","data1", new Uint8Array([10,11,12])); - doc.set("_root","data2", new Uint8Array([13,14,15]), "bytes"); + doc.set("_root","data2", new Uint8Array([13,14,15]), Datatype.bytes); let value1 = doc.value("_root", "data1") - assert.deepEqual(value1, ["bytes", new Uint8Array([10,11,12])]); + assert.deepEqual(value1, [Datatype.bytes, new Uint8Array([10,11,12])]); let value2 = doc.value("_root", "data2") - assert.deepEqual(value2, ["bytes", new Uint8Array([13,14,15])]); + assert.deepEqual(value2, [Datatype.bytes, new Uint8Array([13,14,15])]); doc.free() }) @@ -129,7 +131,8 @@ describe('Automerge', () => { let result let submap = doc.set(root, "submap", MAP) - doc.set(submap, "number", 6, "uint") + if (!submap) throw new Error('should be not null') + doc.set(submap, "number", 6, Datatype.uint) assert.strictEqual(doc.pendingOps(),2) result = doc.value(root,"submap") @@ -145,6 +148,7 @@ describe('Automerge', () => { let root = "_root" let submap = doc.set(root, "numbers", LIST) + if (!submap) throw new Error('should be not null') doc.insert(submap, 0, "a"); doc.insert(submap, 1, "b"); doc.insert(submap, 2, "c"); @@ -168,12 +172,13 @@ describe('Automerge', () => { let root = "_root" let submap = doc.set(root, "letters", LIST) + if (!submap) throw new Error('should be not null') doc.insert(submap, 0, "a"); doc.insert(submap, 0, "b"); assert.deepEqual(doc.toJS(), { letters: ["b", "a" ] }) doc.push(submap, "c"); assert.deepEqual(doc.toJS(), { letters: ["b", "a", "c" ] }) - doc.push(submap, 3, "timestamp"); + doc.push(submap, 3, Datatype.timestamp); assert.deepEqual(doc.toJS(), { letters: ["b", "a", "c", new Date(3) ] }) doc.splice(submap, 1, 1, ["d","e","f"]); assert.deepEqual(doc.toJS(), { letters: ["b", "d", "e", "f", "c", new Date(3) ] }) @@ -218,7 +223,7 @@ describe('Automerge', () => { let doc = create() let root = "_root" - doc.set(root, "counter", 10, "counter"); + doc.set(root, "counter", 10, Datatype.counter); assert.deepEqual(doc.value(root, "counter"),["counter",10]) doc.inc(root, "counter", 10); assert.deepEqual(doc.value(root, "counter"),["counter",20]) @@ -232,9 +237,10 @@ describe('Automerge', () => { let root = "_root"; let text = doc.set(root, "text", TEXT); + if (!text) throw new Error('should not be undefined') doc.splice(text, 0, 0, "hello ") doc.splice(text, 6, 0, ["w","o","r","l","d"]) - doc.splice(text, 11, 0, [["str","!"],["str","?"]]) + doc.splice(text, 11, 0, [[Datatype.str,"!"],[Datatype.str,"?"]]) assert.deepEqual(doc.value(text, 0),["str","h"]) assert.deepEqual(doc.value(text, 1),["str","e"]) assert.deepEqual(doc.value(text, 9),["str","l"]) @@ -283,6 +289,7 @@ describe('Automerge', () => { it('should be able to splice text', () => { let doc = create() let text = doc.set("_root", "text", TEXT); + if (!text) throw new Error('should not be undefined') doc.splice(text, 0, 0, "hello world"); let heads1 = doc.commit(); doc.splice(text, 6, 0, "big bad "); @@ -302,8 +309,8 @@ describe('Automerge', () => { let doc2 = loadDoc(doc1.save(), "bbbb"); let doc3 = loadDoc(doc1.save(), "cccc"); doc1.set("_root", "cnt", 20) - doc2.set("_root", "cnt", 0, "counter") - doc3.set("_root", "cnt", 10, "counter") + doc2.set("_root", "cnt", 0, Datatype.counter) + doc3.set("_root", "cnt", 10, Datatype.counter) doc1.applyChanges(doc2.getChanges(doc1.getHeads())) doc1.applyChanges(doc3.getChanges(doc1.getHeads())) let result = doc1.values("_root", "cnt") @@ -331,12 +338,13 @@ describe('Automerge', () => { it('local inc increments all visible counters in a sequence', () => { let doc1 = create("aaaa") let seq = doc1.set("_root", "seq", LIST) + if (!seq) throw new Error('Should not be undefined') doc1.insert(seq, 0, "hello") let doc2 = loadDoc(doc1.save(), "bbbb"); let doc3 = loadDoc(doc1.save(), "cccc"); doc1.set(seq, 0, 20) - doc2.set(seq, 0, 0, "counter") - doc3.set(seq, 0, 10, "counter") + doc2.set(seq, 0, 0, Datatype.counter) + doc3.set(seq, 0, 10, Datatype.counter) doc1.applyChanges(doc2.getChanges(doc1.getHeads())) doc1.applyChanges(doc3.getChanges(doc1.getHeads())) let result = doc1.values(seq, 0) @@ -365,9 +373,10 @@ describe('Automerge', () => { let doc = create("aaaa") let r1 = doc.set("_root","foo","bar") let r2 = doc.set("_root","list",LIST) - let r3 = doc.set("_root","counter",10, "counter") + let r3 = doc.set("_root","counter",10, Datatype.counter) let r4 = doc.inc("_root","counter",1) let r5 = doc.del("_root","counter") + if (!r2) throw new Error('should not be undefined') let r6 = doc.insert(r2,0,10); let r7 = doc.insert(r2,0,MAP); let r8 = doc.splice(r2,1,0,["a","b","c"]); @@ -387,8 +396,11 @@ describe('Automerge', () => { it('objects without properties are preserved', () => { let doc1 = create("aaaa") let a = doc1.set("_root","a",MAP); + if (!a) throw new Error('should not be undefined') let b = doc1.set("_root","b",MAP); + if (!b) throw new Error('should not be undefined') let c = doc1.set("_root","c",MAP); + if (!c) throw new Error('should not be undefined') let d = doc1.set(c,"d","dd"); let saved = doc1.save(); let doc2 = loadDoc(saved); @@ -406,6 +418,7 @@ describe('Automerge', () => { it('should handle marks [..]', () => { let doc = create() let list = doc.set("_root", "list", TEXT) + if (!list) throw new Error('should not be undefined') doc.splice(list, 0, 0, "aaabbbccc") doc.mark(list, "[3..6]", "bold" , true) let spans = doc.spans(list); @@ -419,6 +432,8 @@ describe('Automerge', () => { it('should handle marks with deleted ends [..]', () => { let doc = create() let list = doc.set("_root", "list", TEXT) + if (!list) throw new Error('should not be undefined') + doc.splice(list, 0, 0, "aaabbbccc") doc.mark(list, "[3..6]", "bold" , true) let spans = doc.spans(list); @@ -438,6 +453,7 @@ describe('Automerge', () => { it('should handle sticky marks (..)', () => { let doc = create() let list = doc.set("_root", "list", TEXT) + if (!list) throw new Error('should not be undefined') doc.splice(list, 0, 0, "aaabbbccc") doc.mark(list, "(3..6)", "bold" , true) let spans = doc.spans(list); @@ -451,6 +467,7 @@ describe('Automerge', () => { it('should handle sticky marks with deleted ends (..)', () => { let doc = create() let list = doc.set("_root", "list", TEXT) + if (!list) throw new Error('should not be undefined') doc.splice(list, 0, 0, "aaabbbccc") doc.mark(list, "(3..6)", "bold" , true) let spans = doc.spans(list); @@ -479,6 +496,7 @@ describe('Automerge', () => { it('should handle overlapping marks', () => { let doc : Automerge = create() let list = doc.set("_root", "list", TEXT) + if (!list) throw new Error('should not be undefined') doc.splice(list, 0, 0, "the quick fox jumps over the lazy dog") doc.mark(list, "[0..37]", "bold" , true) doc.mark(list, "[4..19]", "itallic" , true) @@ -522,7 +540,7 @@ describe('Automerge', () => { let doc = create() let s1 = initSyncState() let m1 = doc.generateSyncMessage(s1) - const message = decodeSyncMessage(m1) + const message: DecodedSyncMessage = decodeSyncMessage(m1) assert.deepStrictEqual(message.heads, []) assert.deepStrictEqual(message.need, []) assert.deepStrictEqual(message.have.length, 1) @@ -546,6 +564,7 @@ describe('Automerge', () => { // make two nodes with the same changes let list = n1.set("_root","n", LIST) + if (!list) throw new Error('undefined') n1.commit("",0) for (let i = 0; i < 10; i++) { n1.insert(list,i,i) @@ -569,6 +588,7 @@ describe('Automerge', () => { // make changes for n1 that n2 should request let list = n1.set("_root","n",LIST) + if (!list) throw new Error('undefined') n1.commit("",0) for (let i = 0; i < 10; i++) { n1.insert(list, i, i) @@ -585,6 +605,7 @@ describe('Automerge', () => { // make changes for n1 that n2 should request let list = n1.set("_root","n",LIST) + if (!list) throw new Error('undefined') n1.commit("",0) for (let i = 0; i < 10; i++) { n1.insert(list,i,i) @@ -741,6 +762,7 @@ describe('Automerge', () => { let s1 = initSyncState(), s2 = initSyncState(), message = null let items = n1.set("_root", "items", LIST) + if (!items) throw new Error('undefined') n1.commit("",0) sync(n1, n2, s1, s2) @@ -1045,7 +1067,7 @@ describe('Automerge', () => { describe('with a false-positive dependency', () => { - let n1, n2, s1, s2, n1hash2, n2hash2 + let n1: Automerge, n2: Automerge, s1: SyncState, s2: SyncState, n1hash2: Hash, n2hash2: Hash beforeEach(() => { // Scenario: ,-- n1c1 <-- n1c2 @@ -1367,10 +1389,10 @@ describe('Automerge', () => { message = n1.generateSyncMessage(s1) const modMsg = decodeSyncMessage(message) modMsg.need = lastSync // re-request change 2 - n2.receiveSyncMessage(s2, Automerge.encodeSyncMessage(modMsg)) + n2.receiveSyncMessage(s2, encodeSyncMessage(modMsg)) message = n2.generateSyncMessage(s2) assert.strictEqual(decodeSyncMessage(message).changes.length, 1) - assert.strictEqual(Automerge.decodeChange(decodeSyncMessage(message).changes[0]).hash, lastSync[0]) + assert.strictEqual(decodeChange(decodeSyncMessage(message).changes[0]).hash, lastSync[0]) }) it('should ignore requests for a nonexistent change', () => { @@ -1420,7 +1442,7 @@ describe('Automerge', () => { n3.set("_root","x",5); n3.commit("",0) const change5 = n3.getLastLocalChange() n3.set("_root","x",6); n3.commit("",0) - const change6 = n3.getLastLocalChange(n3), c6 = n3.getHeads()[0] + const change6 = n3.getLastLocalChange(), c6 = n3.getHeads()[0] for (let i = 7; i <= 8; i++) { n3.set("_root","x",i); n3.commit("",0) } @@ -1433,10 +1455,10 @@ describe('Automerge', () => { msg = n2.generateSyncMessage(s2) decodedMsg = decodeSyncMessage(msg) decodedMsg.changes = [change5, change6] - msg = Automerge.encodeSyncMessage(decodedMsg) - const sentHashes = {} - sentHashes[Automerge.decodeChange(change5, true).hash] = true - sentHashes[Automerge.decodeChange(change6, true).hash] = true + msg = encodeSyncMessage(decodedMsg) + const sentHashes: any = {} + sentHashes[decodeChange(change5).hash] = true + sentHashes[decodeChange(change6).hash] = true s2.sentHashes = sentHashes n1.receiveSyncMessage(s1, msg) assert.deepStrictEqual(s1.sharedHeads, [c2, c6].sort()) From c49bf55ea4d1b69796fe3b4150bda2d9fd62fec2 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Mon, 31 Jan 2022 16:48:03 -0500 Subject: [PATCH 052/730] almost working ts --- automerge-wasm/index.d.ts | 17 +++++++++++++---- automerge-wasm/src/lib.rs | 6 +++--- automerge-wasm/test/test.ts | 25 +++++++++++++++---------- automerge-wasm/tsconfig.json | 2 +- 4 files changed, 32 insertions(+), 18 deletions(-) diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts index 2eb26e4f..b41240eb 100644 --- a/automerge-wasm/index.d.ts +++ b/automerge-wasm/index.d.ts @@ -8,7 +8,6 @@ export type Hash = string; export type Heads = Hash[]; export type ObjectType = string; // opaque ?? export type Value = string | number | boolean | Date | Uint8Array | ObjectType; -export type ObjTypeString = "map" | "list" | "text" | "table" export type OutValue = [Datatype.str, string] | [Datatype.uint, number] | @@ -59,15 +58,23 @@ export type DecodedSyncMessage = { } export type DecodedChange = { - message: string, + actor: Actor, + seq: number + startOp: number, + time: number, + message: string | null, + deps: Heads, hash: Hash, - seq: number, ops: Op[] } export type Op = { action: string, + obj: ObjID, + key: string, value?: string | number | boolean, + datatype?: string, + pred: string[], } export function create(actor?: Actor): Automerge; @@ -89,6 +96,7 @@ export class Automerge { splice(obj: ObjID, start: number, delete_count: number, text: string | Value[] | OutValue[] ): ObjID[] | undefined; inc(obj: ObjID, prop: Prop, value: number): void; del(obj: ObjID, prop: Prop): void; + mark(obj: ObjID, name: string, range: string, value: Value, datatype?: Datatype): void; // returns a single value - if there is a conflict return the winner value(obj: ObjID, prop: any, heads?: Heads): OutValue | null; @@ -97,6 +105,7 @@ export class Automerge { keys(obj: ObjID, heads?: Heads): string[]; text(obj: ObjID, heads?: Heads): string; length(obj: ObjID, heads?: Heads): number; + spans(obj: ObjID): any; commit(message?: string, time?: number): Heads; getActorId(): Actor; @@ -117,7 +126,7 @@ export class Automerge { getChanges(have_deps: Heads): Change[]; getChangesAdded(other: Automerge): Change[]; getHeads(): Heads; - getLastLocalChange(): Change | undefined; + getLastLocalChange(): Change; getMissingDeps(heads?: Heads): Heads; // memory management diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 1edc1381..8cc5891f 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -430,11 +430,11 @@ impl Automerge { } #[wasm_bindgen(js_name = getLastLocalChange)] - pub fn get_last_local_change(&mut self) -> Result, JsValue> { + pub fn get_last_local_change(&mut self) -> Result { if let Some(change) = self.0.get_last_local_change() { - Ok(Some(Uint8Array::from(change.raw_bytes()))) + Ok(Uint8Array::from(change.raw_bytes())) } else { - Ok(None) + Err(to_js_err("no local changes")) } } diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts index 7f40cfa6..f53851e3 100644 --- a/automerge-wasm/test/test.ts +++ b/automerge-wasm/test/test.ts @@ -3,7 +3,8 @@ import { describe, it } from 'mocha'; import assert from 'assert' //@ts-ignore import { BloomFilter } from './helpers/sync' -import { create, loadDoc, SyncState, Datatype, Automerge, MAP, LIST, TEXT, encodeChange, decodeChange, initSyncState, decodeSyncMessage, decodeSyncState, encodeSyncState, encodeSyncMessage } from '..' +import { create, loadDoc, SyncState, Automerge, MAP, LIST, TEXT, encodeChange, decodeChange, initSyncState, decodeSyncMessage, decodeSyncState, encodeSyncState, encodeSyncMessage } from '..' +import { Datatype } from '..' import { DecodedSyncMessage } from '../index'; import { Hash } from '../dev/index'; @@ -60,7 +61,7 @@ describe('Automerge', () => { doc.free() }) - it('should be able to set and get a simple value', () => { + it.skip('should be able to set and get a simple value', () => { let doc : Automerge = create("aabbcc") let root = "_root" let result @@ -114,7 +115,7 @@ describe('Automerge', () => { doc.free() }) - it('should be able to use bytes', () => { + it.skip('should be able to use bytes', () => { let doc = create() doc.set("_root","data1", new Uint8Array([10,11,12])); doc.set("_root","data2", new Uint8Array([13,14,15]), Datatype.bytes); @@ -125,7 +126,7 @@ describe('Automerge', () => { doc.free() }) - it('should be able to make sub objects', () => { + it.skip('should be able to make sub objects', () => { let doc = create() let root = "_root" let result @@ -167,7 +168,7 @@ describe('Automerge', () => { doc.free() }) - it('lists have insert, set, splice, and push ops', () => { + it.skip('lists have insert, set, splice, and push ops', () => { let doc = create() let root = "_root" @@ -219,7 +220,7 @@ describe('Automerge', () => { doc.free() }) - it('should be able to use counters', () => { + it.skip('should be able to use counters', () => { let doc = create() let root = "_root" @@ -232,7 +233,7 @@ describe('Automerge', () => { doc.free() }) - it('should be able to splice text', () => { + it.skip('should be able to splice text', () => { let doc = create() let root = "_root"; @@ -303,7 +304,7 @@ describe('Automerge', () => { doc.free() }) - it('local inc increments all visible counters in a map', () => { + it.skip('local inc increments all visible counters in a map', () => { let doc1 = create("aaaa") doc1.set("_root", "hello", "world") let doc2 = loadDoc(doc1.save(), "bbbb"); @@ -335,7 +336,7 @@ describe('Automerge', () => { doc4.free() }) - it('local inc increments all visible counters in a sequence', () => { + it.skip('local inc increments all visible counters in a sequence', () => { let doc1 = create("aaaa") let seq = doc1.set("_root", "seq", LIST) if (!seq) throw new Error('Should not be undefined') @@ -369,7 +370,7 @@ describe('Automerge', () => { doc4.free() }) - it('only returns an object id when objects are created', () => { + it.skip('only returns an object id when objects are created', () => { let doc = create("aaaa") let r1 = doc.set("_root","foo","bar") let r2 = doc.set("_root","list",LIST) @@ -990,6 +991,7 @@ describe('Automerge', () => { let change = n3.getLastLocalChange() //@ts-ignore if (typeof Buffer === 'function') change = Buffer.from(change) + if (change === undefined) { throw new RangeError("last local change failed") } n2.applyChanges([change]) // Now sync n1 and n2. n3's change is concurrent to n1 and n2's last sync heads @@ -1406,7 +1408,9 @@ describe('Automerge', () => { n2.applyChanges(n1.getChanges([])) message = n1.generateSyncMessage(s1) + message = decodeSyncMessage(message) message.need = ['0000000000000000000000000000000000000000000000000000000000000000'] + message = encodeSyncMessage(message) n2.receiveSyncMessage(s2, message) message = n2.generateSyncMessage(s2) assert.strictEqual(message, null) @@ -1457,6 +1461,7 @@ describe('Automerge', () => { decodedMsg.changes = [change5, change6] msg = encodeSyncMessage(decodedMsg) const sentHashes: any = {} + sentHashes[decodeChange(change5).hash] = true sentHashes[decodeChange(change6).hash] = true s2.sentHashes = sentHashes diff --git a/automerge-wasm/tsconfig.json b/automerge-wasm/tsconfig.json index cf6f9fef..1dc480a4 100644 --- a/automerge-wasm/tsconfig.json +++ b/automerge-wasm/tsconfig.json @@ -6,7 +6,7 @@ "baseUrl": ".", "esModuleInterop": true, "lib": ["dom", "esnext.asynciterable", "es2017", "es2016", "es2015"], - "module": "esnext", + "module": "commonjs", "moduleResolution": "node", "paths": { "dev": ["*"]}, "rootDir": "", From 7b32faa23846a1c0c3f7c8058ec9b8b8a3c84762 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Mon, 31 Jan 2022 17:07:20 -0500 Subject: [PATCH 053/730] all ts tests passing --- automerge-wasm/index.d.ts | 54 ++++++++++++++---------------- automerge-wasm/test/test.ts | 67 +++++++++++++++++++------------------ 2 files changed, 61 insertions(+), 60 deletions(-) diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts index b41240eb..f7c50762 100644 --- a/automerge-wasm/index.d.ts +++ b/automerge-wasm/index.d.ts @@ -1,27 +1,27 @@ export type Actor = string; -export type ObjID = string; +export type ObjID = string; export type Change = Uint8Array; export type SyncMessage = Uint8Array; export type Prop = string | number; export type Hash = string; export type Heads = Hash[]; export type ObjectType = string; // opaque ?? -export type Value = string | number | boolean | Date | Uint8Array | ObjectType; -export type OutValue = - [Datatype.str, string] | - [Datatype.uint, number] | - [Datatype.f64, number] | - [Datatype.u64, number] | - [Datatype.f64, number] | - [Datatype.boolean, boolean] | - [Datatype.timestamp, Date] | - [Datatype.counter, number] | - [Datatype.bytes, Uint8Array] | - [ObjTypeName.list, ObjID] | - [ObjTypeName.map, ObjID] | - [ObjTypeName.text, ObjID] | - [ObjTypeName.table, ObjID] +export type Value = string | number | boolean | null | Date | Uint8Array | ObjectType; +export type OutValue = + ["str", string] | + ["int", number] | + ["uint", number] | + ["f64", number] | + ["boolean", boolean] | + ["timestamp", Date] | + ["counter", number] | + ["bytes", Uint8Array] | + ["null", Uint8Array] | + ["map", ObjID] | + ["list", ObjID] | + ["text", ObjID] | + ["table", ObjID] export type ROOT = "_root"; @@ -37,18 +37,16 @@ export enum ObjTypeName { text = "text", } -export enum Datatype { - boolean = "boolean", - str = "str", - i64 = "i64", - uint = "uint", - u64 = "u64", - f64 = "f64", - int = "int", - timestamp = "timestamp", - counter = "counter", - bytes = "bytes", -} +export type Datatype = + "boolean" | + "str" | + "int" | + "uint" | + "f64" | + "null" | + "timestamp" | + "counter" | + "bytes"; export type DecodedSyncMessage = { heads: Heads, diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts index f53851e3..b185cfcd 100644 --- a/automerge-wasm/test/test.ts +++ b/automerge-wasm/test/test.ts @@ -3,8 +3,7 @@ import { describe, it } from 'mocha'; import assert from 'assert' //@ts-ignore import { BloomFilter } from './helpers/sync' -import { create, loadDoc, SyncState, Automerge, MAP, LIST, TEXT, encodeChange, decodeChange, initSyncState, decodeSyncMessage, decodeSyncState, encodeSyncState, encodeSyncMessage } from '..' -import { Datatype } from '..' +import { create, loadDoc, SyncState, Automerge, MAP, LIST, TEXT, encodeChange, decodeChange, initSyncState, decodeSyncMessage, decodeSyncState, encodeSyncState, encodeSyncMessage } from '../dev/index' import { DecodedSyncMessage } from '../index'; import { Hash } from '../dev/index'; @@ -61,21 +60,22 @@ describe('Automerge', () => { doc.free() }) - it.skip('should be able to set and get a simple value', () => { + it('should be able to set and get a simple value', () => { let doc : Automerge = create("aabbcc") let root = "_root" let result doc.set(root, "hello", "world") - doc.set(root, "number1", 5, Datatype.uint) + doc.set(root, "number1", 5, "uint") doc.set(root, "number2", 5) doc.set(root, "number3", 5.5) - doc.set(root, "number4", 5.5, Datatype.f64) - doc.set(root, "number5", 5.5, Datatype.int) + doc.set(root, "number4", 5.5, "f64") + doc.set(root, "number5", 5.5, "int") doc.set(root, "bool", true) - doc.set(root, "time1", 1000, Datatype.timestamp) + doc.set(root, "time1", 1000, "timestamp") doc.set(root, "time2", new Date(1001)) doc.set(root, "list", LIST); + doc.set(root, "null", null) result = doc.value(root,"hello") assert.deepEqual(result,["str","world"]) @@ -84,21 +84,21 @@ describe('Automerge', () => { assert.deepEqual(result,["uint",5]) result = doc.value(root,"number2") - assert.deepEqual(result,[Datatype.int,5]) + assert.deepEqual(result,["int",5]) result = doc.value(root,"number3") - assert.deepEqual(result,[Datatype.f64,5.5]) + assert.deepEqual(result,["f64",5.5]) result = doc.value(root,"number4") - assert.deepEqual(result,[Datatype.f64,5.5]) + assert.deepEqual(result,["f64",5.5]) result = doc.value(root,"number5") - assert.deepEqual(result,[Datatype.int,5]) + assert.deepEqual(result,["int",5]) result = doc.value(root,"bool") assert.deepEqual(result,["boolean",true]) - doc.set(root, "bool", false, Datatype.boolean) + doc.set(root, "bool", false, "boolean") result = doc.value(root,"bool") assert.deepEqual(result,["boolean",false]) @@ -112,28 +112,31 @@ describe('Automerge', () => { result = doc.value(root,"list") assert.deepEqual(result,["list","10@aabbcc"]); + result = doc.value(root,"null") + assert.deepEqual(result,["null",null]); + doc.free() }) - it.skip('should be able to use bytes', () => { + it('should be able to use bytes', () => { let doc = create() doc.set("_root","data1", new Uint8Array([10,11,12])); - doc.set("_root","data2", new Uint8Array([13,14,15]), Datatype.bytes); + doc.set("_root","data2", new Uint8Array([13,14,15]), "bytes"); let value1 = doc.value("_root", "data1") - assert.deepEqual(value1, [Datatype.bytes, new Uint8Array([10,11,12])]); + assert.deepEqual(value1, ["bytes", new Uint8Array([10,11,12])]); let value2 = doc.value("_root", "data2") - assert.deepEqual(value2, [Datatype.bytes, new Uint8Array([13,14,15])]); + assert.deepEqual(value2, ["bytes", new Uint8Array([13,14,15])]); doc.free() }) - it.skip('should be able to make sub objects', () => { + it('should be able to make sub objects', () => { let doc = create() let root = "_root" let result let submap = doc.set(root, "submap", MAP) if (!submap) throw new Error('should be not null') - doc.set(submap, "number", 6, Datatype.uint) + doc.set(submap, "number", 6, "uint") assert.strictEqual(doc.pendingOps(),2) result = doc.value(root,"submap") @@ -168,7 +171,7 @@ describe('Automerge', () => { doc.free() }) - it.skip('lists have insert, set, splice, and push ops', () => { + it('lists have insert, set, splice, and push ops', () => { let doc = create() let root = "_root" @@ -179,7 +182,7 @@ describe('Automerge', () => { assert.deepEqual(doc.toJS(), { letters: ["b", "a" ] }) doc.push(submap, "c"); assert.deepEqual(doc.toJS(), { letters: ["b", "a", "c" ] }) - doc.push(submap, 3, Datatype.timestamp); + doc.push(submap, 3, "timestamp"); assert.deepEqual(doc.toJS(), { letters: ["b", "a", "c", new Date(3) ] }) doc.splice(submap, 1, 1, ["d","e","f"]); assert.deepEqual(doc.toJS(), { letters: ["b", "d", "e", "f", "c", new Date(3) ] }) @@ -220,11 +223,11 @@ describe('Automerge', () => { doc.free() }) - it.skip('should be able to use counters', () => { + it('should be able to use counters', () => { let doc = create() let root = "_root" - doc.set(root, "counter", 10, Datatype.counter); + doc.set(root, "counter", 10, "counter"); assert.deepEqual(doc.value(root, "counter"),["counter",10]) doc.inc(root, "counter", 10); assert.deepEqual(doc.value(root, "counter"),["counter",20]) @@ -233,7 +236,7 @@ describe('Automerge', () => { doc.free() }) - it.skip('should be able to splice text', () => { + it('should be able to splice text', () => { let doc = create() let root = "_root"; @@ -241,7 +244,7 @@ describe('Automerge', () => { if (!text) throw new Error('should not be undefined') doc.splice(text, 0, 0, "hello ") doc.splice(text, 6, 0, ["w","o","r","l","d"]) - doc.splice(text, 11, 0, [[Datatype.str,"!"],[Datatype.str,"?"]]) + doc.splice(text, 11, 0, [["str","!"],["str","?"]]) assert.deepEqual(doc.value(text, 0),["str","h"]) assert.deepEqual(doc.value(text, 1),["str","e"]) assert.deepEqual(doc.value(text, 9),["str","l"]) @@ -304,14 +307,14 @@ describe('Automerge', () => { doc.free() }) - it.skip('local inc increments all visible counters in a map', () => { + it('local inc increments all visible counters in a map', () => { let doc1 = create("aaaa") doc1.set("_root", "hello", "world") let doc2 = loadDoc(doc1.save(), "bbbb"); let doc3 = loadDoc(doc1.save(), "cccc"); doc1.set("_root", "cnt", 20) - doc2.set("_root", "cnt", 0, Datatype.counter) - doc3.set("_root", "cnt", 10, Datatype.counter) + doc2.set("_root", "cnt", 0, "counter") + doc3.set("_root", "cnt", 10, "counter") doc1.applyChanges(doc2.getChanges(doc1.getHeads())) doc1.applyChanges(doc3.getChanges(doc1.getHeads())) let result = doc1.values("_root", "cnt") @@ -336,7 +339,7 @@ describe('Automerge', () => { doc4.free() }) - it.skip('local inc increments all visible counters in a sequence', () => { + it('local inc increments all visible counters in a sequence', () => { let doc1 = create("aaaa") let seq = doc1.set("_root", "seq", LIST) if (!seq) throw new Error('Should not be undefined') @@ -344,8 +347,8 @@ describe('Automerge', () => { let doc2 = loadDoc(doc1.save(), "bbbb"); let doc3 = loadDoc(doc1.save(), "cccc"); doc1.set(seq, 0, 20) - doc2.set(seq, 0, 0, Datatype.counter) - doc3.set(seq, 0, 10, Datatype.counter) + doc2.set(seq, 0, 0, "counter") + doc3.set(seq, 0, 10, "counter") doc1.applyChanges(doc2.getChanges(doc1.getHeads())) doc1.applyChanges(doc3.getChanges(doc1.getHeads())) let result = doc1.values(seq, 0) @@ -370,11 +373,11 @@ describe('Automerge', () => { doc4.free() }) - it.skip('only returns an object id when objects are created', () => { + it('only returns an object id when objects are created', () => { let doc = create("aaaa") let r1 = doc.set("_root","foo","bar") let r2 = doc.set("_root","list",LIST) - let r3 = doc.set("_root","counter",10, Datatype.counter) + let r3 = doc.set("_root","counter",10, "counter") let r4 = doc.inc("_root","counter",1) let r5 = doc.del("_root","counter") if (!r2) throw new Error('should not be undefined') From a9dec7aa0be2a20d8f9a365255cfd4f0d5f60095 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Mon, 31 Jan 2022 17:11:22 -0500 Subject: [PATCH 054/730] remove dead code --- automerge-wasm/index.d.ts | 4 ---- 1 file changed, 4 deletions(-) diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts index f7c50762..7232c906 100644 --- a/automerge-wasm/index.d.ts +++ b/automerge-wasm/index.d.ts @@ -23,8 +23,6 @@ export type OutValue = ["text", ObjID] | ["table", ObjID] -export type ROOT = "_root"; - export const LIST : ObjectType; export const MAP : ObjectType; export const TABLE : ObjectType; @@ -80,8 +78,6 @@ export function loadDoc(data: Uint8Array, actor?: Actor): Automerge; export function encodeChange(change: DecodedChange): Change; export function decodeChange(change: Change): DecodedChange; export function initSyncState(): SyncState; -export function importSyncState(state: any): SyncState; // FIXME -export function exportSyncState(state: SyncState): any; export function encodeSyncMessage(message: DecodedSyncMessage): SyncMessage; export function decodeSyncMessage(msg: SyncMessage): DecodedSyncMessage; export function encodeSyncState(state: SyncState): Uint8Array; From 836e6ba51031d951447fdbf519c42f441e9a0e34 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Mon, 31 Jan 2022 17:21:16 -0500 Subject: [PATCH 055/730] fix return types --- automerge-js/src/index.js | 6 +++++- automerge-wasm/index.d.ts | 8 ++++---- automerge-wasm/src/lib.rs | 32 +++++++++++--------------------- automerge-wasm/test/test.ts | 14 ++------------ automerge/src/automerge.rs | 1 + 5 files changed, 23 insertions(+), 38 deletions(-) diff --git a/automerge-js/src/index.js b/automerge-js/src/index.js index 3ffe9697..326fc967 100644 --- a/automerge-js/src/index.js +++ b/automerge-js/src/index.js @@ -177,7 +177,11 @@ function getConflicts(doc, prop) { function getLastLocalChange(doc) { const state = doc[STATE] - return state.getLastLocalChange() + try { + return state.getLastLocalChange() + } catch (e) { + return + } } function getObjectId(doc) { diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts index 7232c906..ef0c090b 100644 --- a/automerge-wasm/index.d.ts +++ b/automerge-wasm/index.d.ts @@ -8,7 +8,7 @@ export type Hash = string; export type Heads = Hash[]; export type ObjectType = string; // opaque ?? export type Value = string | number | boolean | null | Date | Uint8Array | ObjectType; -export type OutValue = +export type FullValue = ["str", string] | ["int", number] | ["uint", number] | @@ -87,15 +87,15 @@ export class Automerge { set(obj: ObjID, prop: Prop, value: Value, datatype?: Datatype): ObjID | undefined; insert(obj: ObjID, index: number, value: Value, datatype?: Datatype): ObjID | undefined; push(obj: ObjID, value: Value, datatype?: Datatype): ObjID | undefined; - splice(obj: ObjID, start: number, delete_count: number, text: string | Value[] | OutValue[] ): ObjID[] | undefined; + splice(obj: ObjID, start: number, delete_count: number, text: string | Array): ObjID[] | undefined; inc(obj: ObjID, prop: Prop, value: number): void; del(obj: ObjID, prop: Prop): void; mark(obj: ObjID, name: string, range: string, value: Value, datatype?: Datatype): void; // returns a single value - if there is a conflict return the winner - value(obj: ObjID, prop: any, heads?: Heads): OutValue | null; + value(obj: ObjID, prop: any, heads?: Heads): FullValue | null; // return all values in case of a conflict - values(obj: ObjID, arg: any, heads?: Heads): OutValue[]; + values(obj: ObjID, arg: any, heads?: Heads): FullValue[]; keys(obj: ObjID, heads?: Heads): string[]; text(obj: ObjID, heads?: Heads): string; length(obj: ObjID, heads?: Heads): number; diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 8cc5891f..c5ec8ec4 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -184,20 +184,15 @@ impl Automerge { Ok(opid.map(|id| id.to_string())) } - pub fn make( - &mut self, - obj: String, - prop: JsValue, - value: JsValue, - ) -> Result { + pub fn make(&mut self, obj: String, prop: JsValue, value: JsValue) -> Result { let obj = self.import(obj)?; let prop = self.import_prop(prop)?; let value = self.import_value(value, None)?; if value.is_object() { - let opid = self.0.set(&obj, prop, value).map_err(to_js_err)?; - Ok(opid.unwrap().to_string()) + let opid = self.0.set(&obj, prop, value).map_err(to_js_err)?; + Ok(opid.unwrap().to_string()) } else { - Err("invalid object type".into()) + Err("invalid object type".into()) } } @@ -217,7 +212,7 @@ impl Automerge { obj: String, prop: JsValue, heads: Option, - ) -> Result { + ) -> Result, JsValue> { let obj = self.import(obj)?; let result = Array::new(); let prop = to_prop(prop); @@ -233,23 +228,18 @@ impl Automerge { Some((Value::Object(obj_type), obj_id)) => { result.push(&obj_type.to_string().into()); result.push(&obj_id.to_string().into()); + Ok(Some(result)) } Some((Value::Scalar(value), _)) => { result.push(&datatype(&value).into()); result.push(&ScalarValue(value).into()); + Ok(Some(result)) } - None => {} + None => Ok(None) } - } - - Ok(result) -/* fixme - if result.len() == { - Ok(JsValue::null()) } else { - Ok(result) + Ok(None) } -*/ } pub fn values( @@ -348,7 +338,7 @@ impl Automerge { marks.push(&mark.into()); } let text_span = &text[last_pos..s.pos]; //.slice(last_pos, s.pos); - if text_span.len() > 0 { + if !text_span.is_empty() { result.push(&text_span.into()); } result.push(&marks); @@ -359,7 +349,7 @@ impl Automerge { //result.push(&obj.into()); } let text_span = &text[last_pos..]; - if text_span.len() > 0 { + if !text_span.is_empty() { result.push(&text_span.into()); } Ok(result.into()) diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts index b185cfcd..c5438e0c 100644 --- a/automerge-wasm/test/test.ts +++ b/automerge-wasm/test/test.ts @@ -7,16 +7,6 @@ import { create, loadDoc, SyncState, Automerge, MAP, LIST, TEXT, encodeChange, d import { DecodedSyncMessage } from '../index'; import { Hash } from '../dev/index'; -// str to uint8array -function en(str: string) { - //@ts-ignore - return new TextEncoder('utf8').encode(str) -} -// uint8array to str -function de(bytes: Uint8Array) { - return new TextDecoder('utf8').decode(bytes); -} - function sync(a: Automerge, b: Automerge, aSyncState = initSyncState(), bSyncState = initSyncState()) { const MAX_ITER = 10 let aToBmsg = null, bToAmsg = null, i = 0 @@ -56,7 +46,7 @@ describe('Automerge', () => { let doc = create() let root = "_root" let result = doc.value(root,"hello") - assert.deepEqual(result,[]) + assert.deepEqual(result,undefined) doc.free() }) @@ -219,7 +209,7 @@ describe('Automerge', () => { doc.set(root, "xxx", "xxx"); assert.deepEqual(doc.value(root, "xxx"),["str","xxx"]) doc.del(root, "xxx"); - assert.deepEqual(doc.value(root, "xxx"),[]) + assert.deepEqual(doc.value(root, "xxx"),undefined) doc.free() }) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 40e37320..b07a96d4 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -453,6 +453,7 @@ impl Automerge { Ok(query.spans) } + #[allow(clippy::too_many_arguments)] pub fn mark( &mut self, obj: &ExId, From 1269a8951e6e8cc07b06c9085542641837cd59d4 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Mon, 31 Jan 2022 17:24:17 -0500 Subject: [PATCH 056/730] use types in pkg --- automerge-wasm/package.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/automerge-wasm/package.json b/automerge-wasm/package.json index a1577790..e6682928 100644 --- a/automerge-wasm/package.json +++ b/automerge-wasm/package.json @@ -18,8 +18,8 @@ "main": "./dev/index.js", "scripts": { "build": "rimraf ./dev && wasm-pack build --target nodejs --dev --out-name index -d dev && cp index.d.ts dev", - "release": "rimraf ./dev && wasm-pack build --target nodejs --release --out-name index -d dev && yarn opt", - "pkg": "rimraf ./pkg && wasm-pack build --target web --release --out-name index -d pkg && cd pkg && yarn pack && mv automerge-wasm*tgz ..", + "release": "rimraf ./dev && wasm-pack build --target nodejs --release --out-name index -d dev && yarn opt && cp index.d.ts dev", + "pkg": "rimraf ./pkg && wasm-pack build --target web --release --out-name index -d pkg && cp index.d.ts dev && cd pkg && yarn pack && mv automerge-wasm*tgz ..", "prof": "rimraf ./dev && wasm-pack build --target nodejs --profiling --out-name index -d dev", "opt": "wasm-opt -Oz dev/index_bg.wasm -o tmp.wasm && mv tmp.wasm dev/index_bg.wasm", "test": "yarn build && ts-mocha -p tsconfig.json --type-check --bail --full-trace test/*.ts" From 17e6a9a95555f2731f949f3775bae7a1ab4c9e92 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Mon, 31 Jan 2022 17:24:46 -0500 Subject: [PATCH 057/730] fixed fixed --- automerge-wasm/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automerge-wasm/package.json b/automerge-wasm/package.json index e6682928..3054d47b 100644 --- a/automerge-wasm/package.json +++ b/automerge-wasm/package.json @@ -19,7 +19,7 @@ "scripts": { "build": "rimraf ./dev && wasm-pack build --target nodejs --dev --out-name index -d dev && cp index.d.ts dev", "release": "rimraf ./dev && wasm-pack build --target nodejs --release --out-name index -d dev && yarn opt && cp index.d.ts dev", - "pkg": "rimraf ./pkg && wasm-pack build --target web --release --out-name index -d pkg && cp index.d.ts dev && cd pkg && yarn pack && mv automerge-wasm*tgz ..", + "pkg": "rimraf ./pkg && wasm-pack build --target web --release --out-name index -d pkg && cp index.d.ts pkg && cd pkg && yarn pack && mv automerge-wasm*tgz ..", "prof": "rimraf ./dev && wasm-pack build --target nodejs --profiling --out-name index -d dev", "opt": "wasm-opt -Oz dev/index_bg.wasm -o tmp.wasm && mv tmp.wasm dev/index_bg.wasm", "test": "yarn build && ts-mocha -p tsconfig.json --type-check --bail --full-trace test/*.ts" From 5b9360155ccbe24ac82ff5daaf07ccea434b296b Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Mon, 31 Jan 2022 17:28:24 -0500 Subject: [PATCH 058/730] Remove make --- automerge-wasm/src/lib.rs | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index c5ec8ec4..aeb881bc 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -184,18 +184,6 @@ impl Automerge { Ok(opid.map(|id| id.to_string())) } - pub fn make(&mut self, obj: String, prop: JsValue, value: JsValue) -> Result { - let obj = self.import(obj)?; - let prop = self.import_prop(prop)?; - let value = self.import_value(value, None)?; - if value.is_object() { - let opid = self.0.set(&obj, prop, value).map_err(to_js_err)?; - Ok(opid.unwrap().to_string()) - } else { - Err("invalid object type".into()) - } - } - pub fn inc(&mut self, obj: String, prop: JsValue, value: JsValue) -> Result<(), JsValue> { let obj = self.import(obj)?; let prop = self.import_prop(prop)?; From e88f673d6344a04dc027245e0c41fb623bf4f23d Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Mon, 31 Jan 2022 17:43:56 -0500 Subject: [PATCH 059/730] Revert "Remove make" This reverts commit 5b9360155ccbe24ac82ff5daaf07ccea434b296b. --- automerge-wasm/src/lib.rs | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index aeb881bc..c5ec8ec4 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -184,6 +184,18 @@ impl Automerge { Ok(opid.map(|id| id.to_string())) } + pub fn make(&mut self, obj: String, prop: JsValue, value: JsValue) -> Result { + let obj = self.import(obj)?; + let prop = self.import_prop(prop)?; + let value = self.import_value(value, None)?; + if value.is_object() { + let opid = self.0.set(&obj, prop, value).map_err(to_js_err)?; + Ok(opid.unwrap().to_string()) + } else { + Err("invalid object type".into()) + } + } + pub fn inc(&mut self, obj: String, prop: JsValue, value: JsValue) -> Result<(), JsValue> { let obj = self.import(obj)?; let prop = self.import_prop(prop)?; From da73607c98e277544e89b064926559e8d40156e0 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Mon, 31 Jan 2022 17:45:07 -0500 Subject: [PATCH 060/730] adding make --- automerge-wasm/index.d.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts index ef0c090b..0c0d88d7 100644 --- a/automerge-wasm/index.d.ts +++ b/automerge-wasm/index.d.ts @@ -85,6 +85,7 @@ export function decodeSyncState(data: Uint8Array): SyncState; export class Automerge { set(obj: ObjID, prop: Prop, value: Value, datatype?: Datatype): ObjID | undefined; + make(obj: ObjID, prop: Prop, value: ObjectType): ObjID; insert(obj: ObjID, index: number, value: Value, datatype?: Datatype): ObjID | undefined; push(obj: ObjID, value: Value, datatype?: Datatype): ObjID | undefined; splice(obj: ObjID, start: number, delete_count: number, text: string | Array): ObjID[] | undefined; From ee80837feb07254dfe042238f2812cef0059f8dc Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Wed, 2 Feb 2022 15:55:41 -0500 Subject: [PATCH 061/730] raw_spans experiment --- automerge-wasm/index.d.ts | 1 + automerge-wasm/src/lib.rs | 10 +++++ automerge-wasm/test/test.ts | 9 ++++ automerge/src/automerge.rs | 6 +++ automerge/src/query.rs | 2 + automerge/src/query/raw_spans.rs | 72 ++++++++++++++++++++++++++++++++ 6 files changed, 100 insertions(+) create mode 100644 automerge/src/query/raw_spans.rs diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts index 0c0d88d7..f3ca99c6 100644 --- a/automerge-wasm/index.d.ts +++ b/automerge-wasm/index.d.ts @@ -101,6 +101,7 @@ export class Automerge { text(obj: ObjID, heads?: Heads): string; length(obj: ObjID, heads?: Heads): number; spans(obj: ObjID): any; + raw_spans(obj: ObjID): any; commit(message?: string, time?: number): Heads; getActorId(): Actor; diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index c5ec8ec4..1bff796f 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -355,6 +355,16 @@ impl Automerge { Ok(result.into()) } + pub fn raw_spans(&mut self, obj: String) -> Result { + let obj = self.import(obj)?; + let spans = self.0.raw_spans(&obj).map_err(to_js_err)?; + let result = Array::new(); + for s in spans { + result.push(&JsValue::from_serde(&s).map_err(to_js_err)?); + } + Ok(result) + } + pub fn save(&mut self) -> Result { self.0 .save() diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts index c5438e0c..57ded838 100644 --- a/automerge-wasm/test/test.ts +++ b/automerge-wasm/test/test.ts @@ -515,6 +515,15 @@ describe('Automerge', () => { [], ] ) + let text = doc.text(list); + assert.deepStrictEqual(text, "the quick fox jumps over the lazy dog"); + let raw_spans = doc.raw_spans(list); + assert.deepStrictEqual(raw_spans, + [ + { start: 0, end: 37, name: 'bold', value: true }, + { start: 4, end: 19, name: 'itallic', value: true }, + { start: 10, end: 13, name: 'comment', value: 'foxes are my favorite animal!' } + ]); // mark sure encode decode can handle marks diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index b07a96d4..20208ef3 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -453,6 +453,12 @@ impl Automerge { Ok(query.spans) } + pub fn raw_spans(&self, obj: &ExId) -> Result, AutomergeError> { + let obj = self.exid_to_obj(obj)?; + let query = self.ops.search(obj, query::RawSpans::new()); + Ok(query.spans) + } + #[allow(clippy::too_many_arguments)] pub fn mark( &mut self, diff --git a/automerge/src/query.rs b/automerge/src/query.rs index 7bd50158..632a60a7 100644 --- a/automerge/src/query.rs +++ b/automerge/src/query.rs @@ -18,6 +18,7 @@ mod prop; mod prop_at; mod seek_op; mod spans; +mod raw_spans; pub(crate) use insert::InsertNth; pub(crate) use keys::Keys; @@ -32,6 +33,7 @@ pub(crate) use prop::Prop; pub(crate) use prop_at::PropAt; pub(crate) use seek_op::SeekOp; pub(crate) use spans::{Span, Spans}; +pub(crate) use raw_spans::{RawSpan, RawSpans}; #[derive(Debug, Clone, PartialEq)] pub(crate) struct CounterData { diff --git a/automerge/src/query/raw_spans.rs b/automerge/src/query/raw_spans.rs new file mode 100644 index 00000000..bf182015 --- /dev/null +++ b/automerge/src/query/raw_spans.rs @@ -0,0 +1,72 @@ +use crate::query::{OpSetMetadata, QueryResult, TreeQuery}; +use crate::types::{ElemId, Op, OpId, OpType, ScalarValue}; +use std::fmt::Debug; +use serde::{ Serialize }; + +#[derive(Debug, Clone, PartialEq)] +pub(crate) struct RawSpans { + pos: usize, + seen: usize, + last_seen: Option, + last_insert: Option, + changed: bool, + pub spans: Vec, +} + +#[derive(Serialize, Debug, Clone, PartialEq)] +pub struct RawSpan { + #[serde(skip)] + id: OpId, + pub start: usize, + pub end: usize, + pub name: String, + pub value: ScalarValue, +} + +impl RawSpans { + pub fn new() -> Self { + RawSpans { + pos: 0, + seen: 0, + last_seen: None, + last_insert: None, + changed: false, + spans: Vec::new(), + } + } +} + +impl TreeQuery for RawSpans { + + fn query_element_with_metadata(&mut self, element: &Op, m: &OpSetMetadata) -> QueryResult { + // find location to insert + // mark or set + if element.succ.is_empty() { + if let OpType::MarkBegin(md) = &element.action { + let pos = self + .spans + .binary_search_by(|probe| m.lamport_cmp(probe.id, element.id)) + .unwrap_err(); + self.spans.insert(pos, RawSpan { id: element.id, start: self.seen, end: 0, name: md.name.clone(), value: md.value.clone() }); + } + if let OpType::MarkEnd(_) = &element.action { + for s in self.spans.iter_mut() { + if s.id == element.id.prev() { + s.end = self.seen; + break; + } + } + } + } + if element.insert { + self.last_seen = None; + self.last_insert = element.elemid(); + } + if self.last_seen.is_none() && element.visible() { + self.seen += 1; + self.last_seen = element.elemid(); + } + self.pos += 1; + QueryResult::Next + } +} From 1d0c54ca9a50aecd1d0d9fd162dfb85f3d3a9114 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Wed, 2 Feb 2022 16:21:33 -0500 Subject: [PATCH 062/730] raw_spans with ids --- automerge-wasm/test/test.ts | 9 +++++---- automerge/src/automerge.rs | 24 +++++++++++++++++++++--- automerge/src/exid.rs | 11 +++++++++++ automerge/src/legacy/serde_impls/opid.rs | 1 + automerge/src/query.rs | 2 +- automerge/src/query/raw_spans.rs | 11 +++++------ 6 files changed, 44 insertions(+), 14 deletions(-) diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts index 57ded838..6611c8df 100644 --- a/automerge-wasm/test/test.ts +++ b/automerge-wasm/test/test.ts @@ -488,13 +488,14 @@ describe('Automerge', () => { }) it('should handle overlapping marks', () => { - let doc : Automerge = create() + let doc : Automerge = create("aabbcc") let list = doc.set("_root", "list", TEXT) if (!list) throw new Error('should not be undefined') doc.splice(list, 0, 0, "the quick fox jumps over the lazy dog") doc.mark(list, "[0..37]", "bold" , true) doc.mark(list, "[4..19]", "itallic" , true) doc.mark(list, "[10..13]", "comment" , "foxes are my favorite animal!") + doc.commit("marks",999); let spans = doc.spans(list); assert.deepStrictEqual(spans, [ @@ -520,9 +521,9 @@ describe('Automerge', () => { let raw_spans = doc.raw_spans(list); assert.deepStrictEqual(raw_spans, [ - { start: 0, end: 37, name: 'bold', value: true }, - { start: 4, end: 19, name: 'itallic', value: true }, - { start: 10, end: 13, name: 'comment', value: 'foxes are my favorite animal!' } + { id: "39@aabbcc", time: 999, start: 0, end: 37, name: 'bold', value: true }, + { id: "41@aabbcc", time: 999, start: 4, end: 19, name: 'itallic', value: true }, + { id: "43@aabbcc", time: 999, start: 10, end: 13, name: 'comment', value: 'foxes are my favorite animal!' } ]); // mark sure encode decode can handle marks diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 20208ef3..91ebd596 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -1,6 +1,6 @@ use std::collections::{HashMap, HashSet, VecDeque}; use unicode_segmentation::UnicodeSegmentation; - +use serde::Serialize; use crate::change::{encode_document, export_change}; use crate::exid::ExId; use crate::op_set::OpSet; @@ -453,10 +453,18 @@ impl Automerge { Ok(query.spans) } - pub fn raw_spans(&self, obj: &ExId) -> Result, AutomergeError> { + pub fn raw_spans(&self, obj: &ExId) -> Result, AutomergeError> { let obj = self.exid_to_obj(obj)?; let query = self.ops.search(obj, query::RawSpans::new()); - Ok(query.spans) + let result = query.spans.into_iter().map(|s| SpanInfo { + id: self.id_to_exid(s.id), + time: self.history[s.change].time, + start: s.start, + end: s.end, + name: s.name, + value: s.value, + }).collect(); + Ok(result) } #[allow(clippy::too_many_arguments)] @@ -1209,6 +1217,16 @@ impl Default for Automerge { } } +#[derive(Serialize, Debug, Clone, PartialEq)] +pub struct SpanInfo { + pub id: ExId, + pub time: i64, + pub start: usize, + pub end: usize, + pub name: String, + pub value: ScalarValue, +} + #[cfg(test)] mod tests { use super::*; diff --git a/automerge/src/exid.rs b/automerge/src/exid.rs index 7d00e953..e9ed6cf1 100644 --- a/automerge/src/exid.rs +++ b/automerge/src/exid.rs @@ -2,6 +2,8 @@ use crate::ActorId; use std::cmp::{Ord, Ordering}; use std::fmt; use std::hash::{Hash, Hasher}; +use serde::Serialize; +use serde::Serializer; #[derive(Debug, Clone)] pub enum ExId { @@ -63,3 +65,12 @@ impl PartialOrd for ExId { Some(self.cmp(other)) } } + +impl Serialize for ExId { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + serializer.serialize_str(self.to_string().as_str()) + } +} diff --git a/automerge/src/legacy/serde_impls/opid.rs b/automerge/src/legacy/serde_impls/opid.rs index 06792cd4..75247157 100644 --- a/automerge/src/legacy/serde_impls/opid.rs +++ b/automerge/src/legacy/serde_impls/opid.rs @@ -23,3 +23,4 @@ impl Serialize for OpId { serializer.serialize_str(self.to_string().as_str()) } } + diff --git a/automerge/src/query.rs b/automerge/src/query.rs index 632a60a7..ff97532e 100644 --- a/automerge/src/query.rs +++ b/automerge/src/query.rs @@ -33,7 +33,7 @@ pub(crate) use prop::Prop; pub(crate) use prop_at::PropAt; pub(crate) use seek_op::SeekOp; pub(crate) use spans::{Span, Spans}; -pub(crate) use raw_spans::{RawSpan, RawSpans}; +pub(crate) use raw_spans::RawSpans; #[derive(Debug, Clone, PartialEq)] pub(crate) struct CounterData { diff --git a/automerge/src/query/raw_spans.rs b/automerge/src/query/raw_spans.rs index bf182015..77a45741 100644 --- a/automerge/src/query/raw_spans.rs +++ b/automerge/src/query/raw_spans.rs @@ -1,7 +1,6 @@ use crate::query::{OpSetMetadata, QueryResult, TreeQuery}; use crate::types::{ElemId, Op, OpId, OpType, ScalarValue}; use std::fmt::Debug; -use serde::{ Serialize }; #[derive(Debug, Clone, PartialEq)] pub(crate) struct RawSpans { @@ -13,10 +12,10 @@ pub(crate) struct RawSpans { pub spans: Vec, } -#[derive(Serialize, Debug, Clone, PartialEq)] -pub struct RawSpan { - #[serde(skip)] - id: OpId, +#[derive(Debug, Clone, PartialEq)] +pub(crate) struct RawSpan { + pub id: OpId, + pub change: usize, pub start: usize, pub end: usize, pub name: String, @@ -47,7 +46,7 @@ impl TreeQuery for RawSpans { .spans .binary_search_by(|probe| m.lamport_cmp(probe.id, element.id)) .unwrap_err(); - self.spans.insert(pos, RawSpan { id: element.id, start: self.seen, end: 0, name: md.name.clone(), value: md.value.clone() }); + self.spans.insert(pos, RawSpan { id: element.id, change: element.change, start: self.seen, end: 0, name: md.name.clone(), value: md.value.clone() }); } if let OpType::MarkEnd(_) = &element.action { for s in self.spans.iter_mut() { From 0f49608dde5df576a49fe3c8f51e19288c7c8aaa Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Wed, 2 Feb 2022 16:29:23 -0500 Subject: [PATCH 063/730] spans have types not names --- automerge-wasm/test/test.ts | 6 +++--- automerge/src/automerge.rs | 5 +++-- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts index 6611c8df..65f263c9 100644 --- a/automerge-wasm/test/test.ts +++ b/automerge-wasm/test/test.ts @@ -521,9 +521,9 @@ describe('Automerge', () => { let raw_spans = doc.raw_spans(list); assert.deepStrictEqual(raw_spans, [ - { id: "39@aabbcc", time: 999, start: 0, end: 37, name: 'bold', value: true }, - { id: "41@aabbcc", time: 999, start: 4, end: 19, name: 'itallic', value: true }, - { id: "43@aabbcc", time: 999, start: 10, end: 13, name: 'comment', value: 'foxes are my favorite animal!' } + { id: "39@aabbcc", time: 999, start: 0, end: 37, type: 'bold', value: true }, + { id: "41@aabbcc", time: 999, start: 4, end: 19, type: 'itallic', value: true }, + { id: "43@aabbcc", time: 999, start: 10, end: 13, type: 'comment', value: 'foxes are my favorite animal!' } ]); // mark sure encode decode can handle marks diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 91ebd596..688969a7 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -461,7 +461,7 @@ impl Automerge { time: self.history[s.change].time, start: s.start, end: s.end, - name: s.name, + span_type: s.name, value: s.value, }).collect(); Ok(result) @@ -1223,7 +1223,8 @@ pub struct SpanInfo { pub time: i64, pub start: usize, pub end: usize, - pub name: String, + #[serde(rename = "type")] + pub span_type: String, pub value: ScalarValue, } From 2019943849acee323a6dd6e93fe5d6ff9c2c5048 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Thu, 3 Feb 2022 14:38:21 -0500 Subject: [PATCH 064/730] bump edition from 2018 to 2021 --- automerge-wasm/Cargo.toml | 2 +- automerge/Cargo.toml | 2 +- edit-trace/Cargo.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/automerge-wasm/Cargo.toml b/automerge-wasm/Cargo.toml index 995bfbb2..7b961a66 100644 --- a/automerge-wasm/Cargo.toml +++ b/automerge-wasm/Cargo.toml @@ -7,7 +7,7 @@ version = "0.1.0" authors = ["Alex Good ","Orion Henry ", "Martin Kleppmann"] categories = ["wasm"] readme = "README.md" -edition = "2018" +edition = "2021" license = "MIT" [lib] diff --git a/automerge/Cargo.toml b/automerge/Cargo.toml index 2212cb02..8a47f73f 100644 --- a/automerge/Cargo.toml +++ b/automerge/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "automerge" version = "0.1.0" -edition = "2018" +edition = "2021" license = "MIT" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html diff --git a/edit-trace/Cargo.toml b/edit-trace/Cargo.toml index 68d47433..7514e626 100644 --- a/edit-trace/Cargo.toml +++ b/edit-trace/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "edit-trace" version = "0.1.0" -edition = "2018" +edition = "2021" license = "MIT" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html From bf184fe9808735fc80664d5c2515da8694331a15 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Thu, 3 Feb 2022 14:43:02 -0500 Subject: [PATCH 065/730] remove some un needed imports --- automerge-wasm/src/interop.rs | 2 -- automerge/src/decoding.rs | 2 +- automerge/src/legacy/mod.rs | 1 - automerge/src/legacy/utility_impls/element_id.rs | 1 - automerge/src/legacy/utility_impls/object_id.rs | 1 - automerge/src/legacy/utility_impls/opid.rs | 1 - automerge/src/sync.rs | 1 - automerge/src/sync/bloom.rs | 2 +- automerge/src/types.rs | 2 -- automerge/src/value.rs | 1 - 10 files changed, 2 insertions(+), 12 deletions(-) diff --git a/automerge-wasm/src/interop.rs b/automerge-wasm/src/interop.rs index a4c7fbe9..887326a9 100644 --- a/automerge-wasm/src/interop.rs +++ b/automerge-wasm/src/interop.rs @@ -2,8 +2,6 @@ use automerge as am; use automerge::{Change, ChangeHash, Prop}; use js_sys::{Array, Object, Reflect, Uint8Array}; use std::collections::HashSet; -use std::convert::TryFrom; -use std::convert::TryInto; use std::fmt::Display; use wasm_bindgen::prelude::*; use wasm_bindgen::JsCast; diff --git a/automerge/src/decoding.rs b/automerge/src/decoding.rs index 3ca8c1a8..a7f2a357 100644 --- a/automerge/src/decoding.rs +++ b/automerge/src/decoding.rs @@ -1,5 +1,5 @@ use core::fmt::Debug; -use std::{borrow::Cow, convert::TryFrom, io, io::Read, str}; +use std::{borrow::Cow, io, io::Read, str}; use crate::error; use crate::legacy as amp; diff --git a/automerge/src/legacy/mod.rs b/automerge/src/legacy/mod.rs index 91e07298..7f5e7dfd 100644 --- a/automerge/src/legacy/mod.rs +++ b/automerge/src/legacy/mod.rs @@ -1,6 +1,5 @@ mod serde_impls; mod utility_impls; -use std::iter::FromIterator; pub(crate) use crate::types::{ActorId, ChangeHash, ObjType, OpType, ScalarValue}; pub(crate) use crate::value::DataType; diff --git a/automerge/src/legacy/utility_impls/element_id.rs b/automerge/src/legacy/utility_impls/element_id.rs index dd6238af..623234f5 100644 --- a/automerge/src/legacy/utility_impls/element_id.rs +++ b/automerge/src/legacy/utility_impls/element_id.rs @@ -1,6 +1,5 @@ use std::{ cmp::{Ordering, PartialOrd}, - convert::TryFrom, str::FromStr, }; diff --git a/automerge/src/legacy/utility_impls/object_id.rs b/automerge/src/legacy/utility_impls/object_id.rs index 53b2d44b..c4f20f8c 100644 --- a/automerge/src/legacy/utility_impls/object_id.rs +++ b/automerge/src/legacy/utility_impls/object_id.rs @@ -1,6 +1,5 @@ use std::{ cmp::{Ordering, PartialOrd}, - convert::TryFrom, fmt, str::FromStr, }; diff --git a/automerge/src/legacy/utility_impls/opid.rs b/automerge/src/legacy/utility_impls/opid.rs index f85bb9ee..aabdc67c 100644 --- a/automerge/src/legacy/utility_impls/opid.rs +++ b/automerge/src/legacy/utility_impls/opid.rs @@ -1,7 +1,6 @@ use core::fmt; use std::{ cmp::{Ordering, PartialOrd}, - convert::TryFrom, str::FromStr, }; diff --git a/automerge/src/sync.rs b/automerge/src/sync.rs index 2a398959..a8b0c3eb 100644 --- a/automerge/src/sync.rs +++ b/automerge/src/sync.rs @@ -2,7 +2,6 @@ use itertools::Itertools; use std::{ borrow::Cow, collections::{HashMap, HashSet}, - convert::TryFrom, io, io::Write, }; diff --git a/automerge/src/sync/bloom.rs b/automerge/src/sync/bloom.rs index 65463fbb..5145848e 100644 --- a/automerge/src/sync/bloom.rs +++ b/automerge/src/sync/bloom.rs @@ -1,4 +1,4 @@ -use std::{borrow::Cow, convert::TryFrom}; +use std::borrow::Cow; use crate::{decoding, decoding::Decoder, encoding, encoding::Encodable, ChangeHash}; diff --git a/automerge/src/types.rs b/automerge/src/types.rs index d00334f8..99693105 100644 --- a/automerge/src/types.rs +++ b/automerge/src/types.rs @@ -2,8 +2,6 @@ use crate::error; use crate::legacy as amp; use serde::{Deserialize, Serialize}; use std::cmp::Eq; -use std::convert::TryFrom; -use std::convert::TryInto; use std::fmt; use std::str::FromStr; use tinyvec::{ArrayVec, TinyVec}; diff --git a/automerge/src/value.rs b/automerge/src/value.rs index 5f54d825..b462a6b2 100644 --- a/automerge/src/value.rs +++ b/automerge/src/value.rs @@ -2,7 +2,6 @@ use crate::error; use crate::types::{ObjType, Op, OpId, OpType}; use serde::{Deserialize, Serialize, Serializer}; use smol_str::SmolStr; -use std::convert::TryFrom; use std::fmt; #[derive(Debug, Clone, PartialEq)] From 7607ebbfcc9f9899143bd9c536c425242888b6e7 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 4 Feb 2022 11:37:33 +0000 Subject: [PATCH 066/730] Add from () for Value --- automerge/src/value.rs | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/automerge/src/value.rs b/automerge/src/value.rs index b462a6b2..81111cd5 100644 --- a/automerge/src/value.rs +++ b/automerge/src/value.rs @@ -99,6 +99,12 @@ impl From for Value { } } +impl From<()> for Value { + fn from(_: ()) -> Self { + Value::Scalar(ScalarValue::Null) + } +} + impl From for Value { fn from(o: ObjType) -> Self { Value::Object(o) From df435b671f99618f006d8cff26017c121e787ba1 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 4 Feb 2022 16:56:38 +0000 Subject: [PATCH 067/730] flake.lock: Update MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Flake lock file changes: • Updated input 'flake-utils': 'github:numtide/flake-utils/2ebf2558e5bf978c7fb8ea927dfaed8fefab2e28' (2021-04-25) → 'github:numtide/flake-utils/846b2ae0fc4cc943637d3d1def4454213e203cba' (2022-01-20) • Updated input 'nixpkgs': 'github:nixos/nixpkgs/63586475587d7e0e078291ad4b49b6f6a6885100' (2021-05-06) → 'github:nixos/nixpkgs/554d2d8aa25b6e583575459c297ec23750adb6cb' (2022-02-02) • Updated input 'rust-overlay': 'github:oxalica/rust-overlay/d8efe70dc561c4bea0b7bf440d36ce98c497e054' (2021-05-07) → 'github:oxalica/rust-overlay/674156c4c2f46dd6a6846466cb8f9fee84c211ca' (2022-02-04) • Updated input 'rust-overlay/flake-utils': 'github:numtide/flake-utils/5466c5bbece17adaab2d82fae80b46e807611bf3' (2021-02-28) → 'github:numtide/flake-utils/bba5dcc8e0b20ab664967ad83d24d64cb64ec4f4' (2021-11-15) • Updated input 'rust-overlay/nixpkgs': 'github:nixos/nixpkgs/54c1e44240d8a527a8f4892608c4bce5440c3ecb' (2021-04-02) → 'github:NixOS/nixpkgs/8afc4e543663ca0a6a4f496262cd05233737e732' (2021-11-21) --- flake.lock | 33 +++++++++++++++++---------------- 1 file changed, 17 insertions(+), 16 deletions(-) diff --git a/flake.lock b/flake.lock index 401f1fd6..b2070c2d 100644 --- a/flake.lock +++ b/flake.lock @@ -2,11 +2,11 @@ "nodes": { "flake-utils": { "locked": { - "lastModified": 1619345332, - "narHash": "sha256-qHnQkEp1uklKTpx3MvKtY6xzgcqXDsz5nLilbbuL+3A=", + "lastModified": 1642700792, + "narHash": "sha256-XqHrk7hFb+zBvRg6Ghl+AZDq03ov6OshJLiSWOoX5es=", "owner": "numtide", "repo": "flake-utils", - "rev": "2ebf2558e5bf978c7fb8ea927dfaed8fefab2e28", + "rev": "846b2ae0fc4cc943637d3d1def4454213e203cba", "type": "github" }, "original": { @@ -17,11 +17,11 @@ }, "flake-utils_2": { "locked": { - "lastModified": 1614513358, - "narHash": "sha256-LakhOx3S1dRjnh0b5Dg3mbZyH0ToC9I8Y2wKSkBaTzU=", + "lastModified": 1637014545, + "narHash": "sha256-26IZAc5yzlD9FlDT54io1oqG/bBoyka+FJk5guaX4x4=", "owner": "numtide", "repo": "flake-utils", - "rev": "5466c5bbece17adaab2d82fae80b46e807611bf3", + "rev": "bba5dcc8e0b20ab664967ad83d24d64cb64ec4f4", "type": "github" }, "original": { @@ -32,11 +32,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1620340338, - "narHash": "sha256-Op/4K0+Z9Sp5jtFH0s/zMM4H7VFZxrekcAmjQ6JpQ4w=", + "lastModified": 1643805626, + "narHash": "sha256-AXLDVMG+UaAGsGSpOtQHPIKB+IZ0KSd9WS77aanGzgc=", "owner": "nixos", "repo": "nixpkgs", - "rev": "63586475587d7e0e078291ad4b49b6f6a6885100", + "rev": "554d2d8aa25b6e583575459c297ec23750adb6cb", "type": "github" }, "original": { @@ -48,15 +48,16 @@ }, "nixpkgs_2": { "locked": { - "lastModified": 1617325113, - "narHash": "sha256-GksR0nvGxfZ79T91UUtWjjccxazv6Yh/MvEJ82v1Xmw=", - "owner": "nixos", + "lastModified": 1637453606, + "narHash": "sha256-Gy6cwUswft9xqsjWxFYEnx/63/qzaFUwatcbV5GF/GQ=", + "owner": "NixOS", "repo": "nixpkgs", - "rev": "54c1e44240d8a527a8f4892608c4bce5440c3ecb", + "rev": "8afc4e543663ca0a6a4f496262cd05233737e732", "type": "github" }, "original": { "owner": "NixOS", + "ref": "nixpkgs-unstable", "repo": "nixpkgs", "type": "github" } @@ -74,11 +75,11 @@ "nixpkgs": "nixpkgs_2" }, "locked": { - "lastModified": 1620355527, - "narHash": "sha256-mUTnUODiAtxH83gbv7uuvCbqZ/BNkYYk/wa3MkwrskE=", + "lastModified": 1643941258, + "narHash": "sha256-uHyEuICSu8qQp6adPTqV33ajiwoF0sCh+Iazaz5r7fo=", "owner": "oxalica", "repo": "rust-overlay", - "rev": "d8efe70dc561c4bea0b7bf440d36ce98c497e054", + "rev": "674156c4c2f46dd6a6846466cb8f9fee84c211ca", "type": "github" }, "original": { From 70c5fea96882bae88fb3647e9a2d3ea773a3f1af Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 4 Feb 2022 16:58:48 +0000 Subject: [PATCH 068/730] Change rust flake to use default profile --- flake.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/flake.nix b/flake.nix index 7fe145d6..ea17d00b 100644 --- a/flake.nix +++ b/flake.nix @@ -19,7 +19,7 @@ inherit system; }; lib = pkgs.lib; - rust = pkgs.rust-bin.stable.latest.rust; + rust = pkgs.rust-bin.stable.latest.default; cargoNix = pkgs.callPackage ./Cargo.nix { inherit pkgs; release = true; From c54aab66c49d8b5445fda83032d7e080d491c960 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Fri, 4 Feb 2022 14:43:22 -0500 Subject: [PATCH 069/730] better error on invalid value --- automerge-wasm/src/interop.rs | 6 +++- automerge-wasm/src/lib.rs | 67 +++-------------------------------- 2 files changed, 10 insertions(+), 63 deletions(-) diff --git a/automerge-wasm/src/interop.rs b/automerge-wasm/src/interop.rs index f8420aa6..88f313c3 100644 --- a/automerge-wasm/src/interop.rs +++ b/automerge-wasm/src/interop.rs @@ -245,6 +245,10 @@ pub(crate) fn js_get>(obj: J, prop: &str) -> Result String { + js_sys::JSON::stringify(val).map(|j| j.into()).unwrap_or("JSON::stringify_eror".into()) +} + pub(crate) fn js_set>(obj: &JsValue, prop: &str, val: V) -> Result { Reflect::set(obj, &prop.into(), &val.into()) } @@ -255,7 +259,7 @@ pub(crate) fn to_prop(p: JsValue) -> Result { } else if let Some(n) = p.as_f64() { Ok(Prop::Seq(n as usize)) } else { - Err("prop must me a string or number".into()) + Err(to_js_err("prop must me a string or number")) } } diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 1bff796f..497949b9 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -11,7 +11,7 @@ mod interop; mod sync; mod value; -use interop::{get_heads, js_get, js_set, map_to_js, to_js_err, to_objtype, to_prop, AR, JS}; +use interop::{get_heads, js_get, js_set, map_to_js, to_js_err, to_objtype, to_prop, AR, JS, stringify}; use sync::SyncState; use value::{datatype, ScalarValue}; @@ -192,7 +192,7 @@ impl Automerge { let opid = self.0.set(&obj, prop, value).map_err(to_js_err)?; Ok(opid.unwrap().to_string()) } else { - Err("invalid object type".into()) + Err(to_js_err("invalid object type")) } } @@ -491,7 +491,7 @@ impl Automerge { } else if let Some(n) = prop.as_f64() { Ok((n as usize).into()) } else { - Err(format!("invalid prop {:?}", prop).into()) + Err(to_js_err(format!("invalid prop {:?}", prop))) } } @@ -533,7 +533,7 @@ impl Automerge { Some("cursor") => unimplemented!(), */ Some("null") => Ok(am::ScalarValue::Null), - Some(_) => Err(format!("unknown datatype {:?}", datatype).into()), + Some(_) => Err(to_js_err(format!("unknown datatype {:?}", datatype))), None => { if value.is_null() { Ok(am::ScalarValue::Null) @@ -555,7 +555,7 @@ impl Automerge { } else if let Ok(o) = &value.clone().dyn_into::() { Ok(am::ScalarValue::Bytes(o.to_vec())) } else { - Err("value is invalid".into()) + Err(to_js_err(format!("value '{}' is invalid", stringify(value) ))) } } } @@ -572,63 +572,6 @@ impl Automerge { } } } - /* - match datatype.as_deref() { - Some("boolean") => value - .as_bool() - .ok_or_else(|| "value must be a bool".into()) - .map(|v| am::ScalarValue::Boolean(v).into()), - Some("int") => value - .as_f64() - .ok_or_else(|| "value must be a number".into()) - .map(|v| am::ScalarValue::Int(v as i64).into()), - Some("uint") => value - .as_f64() - .ok_or_else(|| "value must be a number".into()) - .map(|v| am::ScalarValue::Uint(v as u64).into()), - Some("f64") => value - .as_f64() - .ok_or_else(|| "value must be a number".into()) - .map(|n| am::ScalarValue::F64(n).into()), - Some("bytes") => { - Ok(am::ScalarValue::Bytes(value.dyn_into::().unwrap().to_vec()).into()) - } - Some("counter") => value - .as_f64() - .ok_or_else(|| "value must be a number".into()) - .map(|v| am::ScalarValue::counter(v as i64).into()), - Some("timestamp") => value - .as_f64() - .ok_or_else(|| "value must be a number".into()) - .map(|v| am::ScalarValue::Timestamp(v as i64).into()), - Some("null") => Ok(am::ScalarValue::Null.into()), - Some(_) => Err(format!("unknown datatype {:?}", datatype).into()), - None => { - if value.is_null() { - Ok(am::ScalarValue::Null.into()) - } else if let Some(b) = value.as_bool() { - Ok(am::ScalarValue::Boolean(b).into()) - } else if let Some(s) = value.as_string() { - // FIXME - we need to detect str vs int vs float vs bool here :/ - Ok(am::ScalarValue::Str(s.into()).into()) - } else if let Some(n) = value.as_f64() { - if (n.round() - n).abs() < f64::EPSILON { - Ok(am::ScalarValue::Int(n as i64).into()) - } else { - Ok(am::ScalarValue::F64(n).into()) - } - } else if let Some(o) = to_objtype(&value) { - Ok(o.into()) - } else if let Ok(d) = value.clone().dyn_into::() { - Ok(am::ScalarValue::Timestamp(d.get_time() as i64).into()) - } else if let Ok(o) = &value.dyn_into::() { - Ok(am::ScalarValue::Bytes(o.to_vec()).into()) - } else { - Err("value is invalid".into()) - } - } - } - */ } } From 3f82850e4441d4a776ac3321c4f66233c0725ea7 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Fri, 4 Feb 2022 20:15:57 -0500 Subject: [PATCH 070/730] fix bug in set scalar --- automerge-wasm/src/interop.rs | 4 +- automerge-wasm/src/lib.rs | 89 ++++++++++++++--------------------- 2 files changed, 39 insertions(+), 54 deletions(-) diff --git a/automerge-wasm/src/interop.rs b/automerge-wasm/src/interop.rs index 88f313c3..310ed563 100644 --- a/automerge-wasm/src/interop.rs +++ b/automerge-wasm/src/interop.rs @@ -246,7 +246,9 @@ pub(crate) fn js_get>(obj: J, prop: &str) -> Result String { - js_sys::JSON::stringify(val).map(|j| j.into()).unwrap_or("JSON::stringify_eror".into()) + js_sys::JSON::stringify(val) + .map(|j| j.into()) + .unwrap_or_else(|_| "JSON::stringify_eror".into()) } pub(crate) fn js_set>(obj: &JsValue, prop: &str, val: V) -> Result { diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 497949b9..21681e07 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -11,7 +11,9 @@ mod interop; mod sync; mod value; -use interop::{get_heads, js_get, js_set, map_to_js, to_js_err, to_objtype, to_prop, AR, JS, stringify}; +use interop::{ + get_heads, js_get, js_set, map_to_js, stringify, to_js_err, to_objtype, to_prop, AR, JS, +}; use sync::SyncState; use value::{datatype, ScalarValue}; @@ -235,10 +237,10 @@ impl Automerge { result.push(&ScalarValue(value).into()); Ok(Some(result)) } - None => Ok(None) + None => Ok(None), } } else { - Ok(None) + Ok(None) } } @@ -315,7 +317,9 @@ impl Automerge { .as_string() .ok_or("invalid mark name") .map_err(to_js_err)?; - let value = self.import_scalar(&value, datatype.as_string())?; + let value = self + .import_scalar(&value, &datatype.as_string()) + .ok_or_else(|| to_js_err("invalid value"))?; self.0 .mark(&obj, start, start_sticky, end, end_sticky, &name, value) .map_err(to_js_err)?; @@ -360,7 +364,7 @@ impl Automerge { let spans = self.0.raw_spans(&obj).map_err(to_js_err)?; let result = Array::new(); for s in spans { - result.push(&JsValue::from_serde(&s).map_err(to_js_err)?); + result.push(&JsValue::from_serde(&s).map_err(to_js_err)?); } Ok(result) } @@ -498,77 +502,56 @@ impl Automerge { fn import_scalar( &mut self, value: &JsValue, - datatype: Option, - ) -> Result { + datatype: &Option, + ) -> Option { match datatype.as_deref() { - Some("boolean") => value - .as_bool() - .ok_or_else(|| "value must be a bool".into()) - .map(am::ScalarValue::Boolean), - Some("int") => value - .as_f64() - .ok_or_else(|| "value must be a number".into()) - .map(|v| am::ScalarValue::Int(v as i64)), - Some("uint") => value - .as_f64() - .ok_or_else(|| "value must be a number".into()) - .map(|v| am::ScalarValue::Uint(v as u64)), - Some("f64") => value - .as_f64() - .ok_or_else(|| "value must be a number".into()) - .map(am::ScalarValue::F64), - Some("bytes") => Ok(am::ScalarValue::Bytes( + Some("boolean") => value.as_bool().map(am::ScalarValue::Boolean), + Some("int") => value.as_f64().map(|v| am::ScalarValue::Int(v as i64)), + Some("uint") => value.as_f64().map(|v| am::ScalarValue::Uint(v as u64)), + Some("f64") => value.as_f64().map(am::ScalarValue::F64), + Some("bytes") => Some(am::ScalarValue::Bytes( value.clone().dyn_into::().unwrap().to_vec(), )), - Some("counter") => value - .as_f64() - .ok_or_else(|| "value must be a number".into()) - .map(|v| am::ScalarValue::counter(v as i64)), - Some("timestamp") => value - .as_f64() - .ok_or_else(|| "value must be a number".into()) - .map(|v| am::ScalarValue::Timestamp(v as i64)), - /* - Some("bytes") => unimplemented!(), - Some("cursor") => unimplemented!(), - */ - Some("null") => Ok(am::ScalarValue::Null), - Some(_) => Err(to_js_err(format!("unknown datatype {:?}", datatype))), + Some("counter") => value.as_f64().map(|v| am::ScalarValue::counter(v as i64)), + Some("timestamp") => value.as_f64().map(|v| am::ScalarValue::Timestamp(v as i64)), + Some("null") => Some(am::ScalarValue::Null), + Some(_) => None, None => { if value.is_null() { - Ok(am::ScalarValue::Null) + Some(am::ScalarValue::Null) } else if let Some(b) = value.as_bool() { - Ok(am::ScalarValue::Boolean(b)) + Some(am::ScalarValue::Boolean(b)) } else if let Some(s) = value.as_string() { - // FIXME - we need to detect str vs int vs float vs bool here :/ - Ok(am::ScalarValue::Str(s.into())) + Some(am::ScalarValue::Str(s.into())) } else if let Some(n) = value.as_f64() { if (n.round() - n).abs() < f64::EPSILON { - Ok(am::ScalarValue::Int(n as i64)) + Some(am::ScalarValue::Int(n as i64)) } else { - Ok(am::ScalarValue::F64(n)) + Some(am::ScalarValue::F64(n)) } - // } else if let Some(o) = to_objtype(&value) { - // Ok(o.into()) } else if let Ok(d) = value.clone().dyn_into::() { - Ok(am::ScalarValue::Timestamp(d.get_time() as i64)) + Some(am::ScalarValue::Timestamp(d.get_time() as i64)) } else if let Ok(o) = &value.clone().dyn_into::() { - Ok(am::ScalarValue::Bytes(o.to_vec())) + Some(am::ScalarValue::Bytes(o.to_vec())) } else { - Err(to_js_err(format!("value '{}' is invalid", stringify(value) ))) + None } } } } fn import_value(&mut self, value: JsValue, datatype: Option) -> Result { - match self.import_scalar(&value, datatype) { - Ok(val) => Ok(val.into()), - Err(err) => { + match self.import_scalar(&value, &datatype) { + Some(val) => Ok(val.into()), + None => { if let Some(o) = to_objtype(&value) { Ok(o.into()) } else { - Err(err) + Err(to_js_err(format!( + "invalid value ({},{:?})", + stringify(&value), + datatype + ))) } } } From a9ddb9398cf351020d06dfe8017c6f1b76233749 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Sun, 6 Feb 2022 19:01:37 -0500 Subject: [PATCH 071/730] cleanup typescript defs --- automerge-wasm/index.d.ts | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts index f3ca99c6..e4410306 100644 --- a/automerge-wasm/index.d.ts +++ b/automerge-wasm/index.d.ts @@ -84,6 +84,7 @@ export function encodeSyncState(state: SyncState): Uint8Array; export function decodeSyncState(data: Uint8Array): SyncState; export class Automerge { + // change state set(obj: ObjID, prop: Prop, value: Value, datatype?: Datatype): ObjID | undefined; make(obj: ObjID, prop: Prop, value: ObjectType): ObjID; insert(obj: ObjID, index: number, value: Value, datatype?: Datatype): ObjID | undefined; @@ -91,7 +92,6 @@ export class Automerge { splice(obj: ObjID, start: number, delete_count: number, text: string | Array): ObjID[] | undefined; inc(obj: ObjID, prop: Prop, value: number): void; del(obj: ObjID, prop: Prop): void; - mark(obj: ObjID, name: string, range: string, value: Value, datatype?: Datatype): void; // returns a single value - if there is a conflict return the winner value(obj: ObjID, prop: any, heads?: Heads): FullValue | null; @@ -100,9 +100,13 @@ export class Automerge { keys(obj: ObjID, heads?: Heads): string[]; text(obj: ObjID, heads?: Heads): string; length(obj: ObjID, heads?: Heads): number; + + // experimental spans api - unstable! + mark(obj: ObjID, name: string, range: string, value: Value, datatype?: Datatype): void; spans(obj: ObjID): any; raw_spans(obj: ObjID): any; + // transactions commit(message?: string, time?: number): Heads; getActorId(): Actor; pendingOps(): number; From 1aab66d160da229071d044cbef2390a3a82b2ce6 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Sun, 6 Feb 2022 19:57:25 -0500 Subject: [PATCH 072/730] fix version number --- automerge-wasm/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automerge-wasm/package.json b/automerge-wasm/package.json index 3054d47b..9238fedc 100644 --- a/automerge-wasm/package.json +++ b/automerge-wasm/package.json @@ -6,7 +6,7 @@ ], "name": "automerge-wasm", "description": "wasm-bindgen bindings to the automerge rust implementation", - "version": "0.1.0", + "version": "0.0.1", "license": "MIT", "files": [ "README.md", From c655427f9adffe57b1007248f49393ae16890f1b Mon Sep 17 00:00:00 2001 From: rae <633012+okdistribute@users.noreply.github.com> Date: Mon, 7 Feb 2022 16:33:10 -0800 Subject: [PATCH 073/730] Add support for web --- automerge-wasm/package.json | 1 + 1 file changed, 1 insertion(+) diff --git a/automerge-wasm/package.json b/automerge-wasm/package.json index 9238fedc..c39299eb 100644 --- a/automerge-wasm/package.json +++ b/automerge-wasm/package.json @@ -15,6 +15,7 @@ "automerge_wasm_bg.wasm", "automerge_wasm.js" ], + "module": "./pkg/index.js", "main": "./dev/index.js", "scripts": { "build": "rimraf ./dev && wasm-pack build --target nodejs --dev --out-name index -d dev && cp index.d.ts dev", From 98a65f98f781f79752195209db9bd4e65474f614 Mon Sep 17 00:00:00 2001 From: Karissa McKelvey <633012+okdistribute@users.noreply.github.com> Date: Tue, 8 Feb 2022 16:34:08 -0800 Subject: [PATCH 074/730] Add failing test for decoding a conflicted merge --- automerge-wasm/test/test.ts | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts index 65f263c9..cf051950 100644 --- a/automerge-wasm/test/test.ts +++ b/automerge-wasm/test/test.ts @@ -538,6 +538,31 @@ describe('Automerge', () => { assert.deepStrictEqual(doc.save(), doc2.save()) }) + it('should handle merging text conflicts then saving & loading', () => { + let A = create() + let At = A.make('_root', 'text', TEXT) + A.splice(At, 0, 0, Array.from('hello')) + + let B = A.clone() + let Bt = B.value('_root', 'text') + if (!Bt || Bt[0] !== 'text') return assert.fail() + let obj = Bt[1] + B.splice(obj, 4, 1, '') + B.splice(obj, 4, 0, '!') + B.splice(obj, 5, 0, ' ') + B.splice(obj, 6, 0, Array.from('world')) + + A.applyChanges(B.getChanges(A.getHeads())) + + let binary = A.save() + + let C = loadDoc(binary) + + assert.deepEqual(C.value('_root', 'text'), ['text', 'hello world']) + + + }) + }) describe('sync', () => { it('should send a sync message implying no local data', () => { From 9136f00e43891648198a6f9b6ad887fa7c253c71 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Thu, 10 Feb 2022 11:14:44 -0500 Subject: [PATCH 075/730] bugfix: duplicate seq not blocked on apply_changes, clone did not close a transaction, added fork and merge to wasm --- automerge-wasm/index.d.ts | 4 ++- automerge-wasm/src/lib.rs | 60 +++++++++++++++++++++++-------------- automerge-wasm/test/test.ts | 27 ++++++++--------- automerge/Cargo.toml | 2 ++ automerge/src/automerge.rs | 29 +++++++++++++++--- automerge/src/error.rs | 10 ++++++- 6 files changed, 90 insertions(+), 42 deletions(-) diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts index e4410306..8a7e9408 100644 --- a/automerge-wasm/index.d.ts +++ b/automerge-wasm/index.d.ts @@ -89,7 +89,7 @@ export class Automerge { make(obj: ObjID, prop: Prop, value: ObjectType): ObjID; insert(obj: ObjID, index: number, value: Value, datatype?: Datatype): ObjID | undefined; push(obj: ObjID, value: Value, datatype?: Datatype): ObjID | undefined; - splice(obj: ObjID, start: number, delete_count: number, text: string | Array): ObjID[] | undefined; + splice(obj: ObjID, start: number, delete_count: number, text?: string | Array): ObjID[] | undefined; inc(obj: ObjID, prop: Prop, value: number): void; del(obj: ObjID, prop: Prop): void; @@ -108,6 +108,7 @@ export class Automerge { // transactions commit(message?: string, time?: number): Heads; + merge(other: Automerge): Heads; getActorId(): Actor; pendingOps(): number; rollback(): number; @@ -132,6 +133,7 @@ export class Automerge { // memory management free(): void; clone(actor?: string): Automerge; + fork(actor?: string): Automerge; // dump internal state to console.log dump(): void; diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 21681e07..82541978 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -44,7 +44,10 @@ impl Automerge { } #[allow(clippy::should_implement_trait)] - pub fn clone(&self, actor: Option) -> Result { + pub fn clone(&mut self, actor: Option) -> Result { + if self.0.pending_ops() > 0 { + self.0.commit(None,None); + } let mut automerge = Automerge(self.0.clone()); if let Some(s) = actor { let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec()); @@ -53,6 +56,16 @@ impl Automerge { Ok(automerge) } + #[allow(clippy::should_implement_trait)] + pub fn fork(&mut self, actor: Option) -> Result { + let mut automerge = Automerge(self.0.fork()); + if let Some(s) = actor { + let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec()); + automerge.0.set_actor(actor) + } + Ok(automerge) + } + pub fn free(self) {} #[wasm_bindgen(js_name = pendingOps)] @@ -69,6 +82,15 @@ impl Automerge { heads } + pub fn merge(&mut self, other: &mut Automerge) -> Result { + let heads = self.0.merge(&mut other.0)?; + let heads: Array = heads + .iter() + .map(|h| JsValue::from_str(&hex::encode(&h.0))) + .collect(); + Ok(heads) + } + pub fn rollback(&mut self) -> f64 { self.0.rollback() as f64 } @@ -89,11 +111,10 @@ impl Automerge { pub fn text(&mut self, obj: String, heads: Option) -> Result { let obj = self.import(obj)?; if let Some(heads) = get_heads(heads) { - self.0.text_at(&obj, &heads) + Ok(self.0.text_at(&obj, &heads)?) } else { - self.0.text(&obj) + Ok(self.0.text(&obj)?) } - .map_err(to_js_err) } pub fn splice( @@ -109,8 +130,7 @@ impl Automerge { let mut vals = vec![]; if let Some(t) = text.as_string() { self.0 - .splice_text(&obj, start, delete_count, &t) - .map_err(to_js_err)?; + .splice_text(&obj, start, delete_count, &t)?; Ok(None) } else { if let Ok(array) = text.dyn_into::() { @@ -128,8 +148,7 @@ impl Automerge { } let result = self .0 - .splice(&obj, start, delete_count, vals) - .map_err(to_js_err)?; + .splice(&obj, start, delete_count, vals)?; if result.is_empty() { Ok(None) } else { @@ -151,7 +170,7 @@ impl Automerge { let obj = self.import(obj)?; let value = self.import_value(value, datatype)?; let index = self.0.length(&obj); - let opid = self.0.insert(&obj, index, value).map_err(to_js_err)?; + let opid = self.0.insert(&obj, index, value)?; Ok(opid.map(|id| id.to_string())) } @@ -167,8 +186,7 @@ impl Automerge { let value = self.import_value(value, datatype)?; let opid = self .0 - .insert(&obj, index as usize, value) - .map_err(to_js_err)?; + .insert(&obj, index as usize, value)?; Ok(opid.map(|id| id.to_string())) } @@ -182,7 +200,7 @@ impl Automerge { let obj = self.import(obj)?; let prop = self.import_prop(prop)?; let value = self.import_value(value, datatype)?; - let opid = self.0.set(&obj, prop, value).map_err(to_js_err)?; + let opid = self.0.set(&obj, prop, value)?; Ok(opid.map(|id| id.to_string())) } @@ -191,7 +209,7 @@ impl Automerge { let prop = self.import_prop(prop)?; let value = self.import_value(value, None)?; if value.is_object() { - let opid = self.0.set(&obj, prop, value).map_err(to_js_err)?; + let opid = self.0.set(&obj, prop, value)?; Ok(opid.unwrap().to_string()) } else { Err(to_js_err("invalid object type")) @@ -203,9 +221,8 @@ impl Automerge { let prop = self.import_prop(prop)?; let value: f64 = value .as_f64() - .ok_or("inc needs a numberic value") - .map_err(to_js_err)?; - self.0.inc(&obj, prop, value as i64).map_err(to_js_err)?; + .ok_or(to_js_err("inc needs a numberic value"))?; + self.0.inc(&obj, prop, value as i64)?; Ok(()) } @@ -221,11 +238,10 @@ impl Automerge { let heads = get_heads(heads); if let Ok(prop) = prop { let value = if let Some(h) = heads { - self.0.value_at(&obj, prop, &h) + self.0.value_at(&obj, prop, &h)? } else { - self.0.value(&obj, prop) - } - .map_err(to_js_err)?; + self.0.value(&obj, prop)? + }; match value { Some((Value::Object(obj_type), obj_id)) => { result.push(&obj_type.to_string().into()); @@ -408,8 +424,8 @@ impl Automerge { } #[wasm_bindgen(js_name = getChangesAdded)] - pub fn get_changes_added(&mut self, other: &Automerge) -> Result { - let changes = self.0.get_changes_added(&other.0); + pub fn get_changes_added(&mut self, other: &mut Automerge) -> Result { + let changes = self.0.get_changes_added(&mut other.0); let changes: Array = changes .iter() .map(|c| Uint8Array::from(c.raw_bytes())) diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts index cf051950..375afaca 100644 --- a/automerge-wasm/test/test.ts +++ b/automerge-wasm/test/test.ts @@ -539,28 +539,27 @@ describe('Automerge', () => { }) it('should handle merging text conflicts then saving & loading', () => { - let A = create() + let A = create("aabbcc") let At = A.make('_root', 'text', TEXT) - A.splice(At, 0, 0, Array.from('hello')) + A.splice(At, 0, 0, 'hello') - let B = A.clone() - let Bt = B.value('_root', 'text') - if (!Bt || Bt[0] !== 'text') return assert.fail() - let obj = Bt[1] - B.splice(obj, 4, 1, '') - B.splice(obj, 4, 0, '!') - B.splice(obj, 5, 0, ' ') - B.splice(obj, 6, 0, Array.from('world')) + let B = A.fork() - A.applyChanges(B.getChanges(A.getHeads())) + assert.deepEqual(B.value("_root","text"), [ "text", At]) + + B.splice(At, 4, 1) + B.splice(At, 4, 0, '!') + B.splice(At, 5, 0, ' ') + B.splice(At, 6, 0, 'world') + + A.merge(B) let binary = A.save() let C = loadDoc(binary) - assert.deepEqual(C.value('_root', 'text'), ['text', 'hello world']) - - + assert.deepEqual(C.value('_root', 'text'), ['text', '1@aabbcc']) + assert.deepEqual(C.text(At), 'hell! world') }) }) diff --git a/automerge/Cargo.toml b/automerge/Cargo.toml index 8a47f73f..7d385301 100644 --- a/automerge/Cargo.toml +++ b/automerge/Cargo.toml @@ -26,6 +26,8 @@ tinyvec = { version = "^1.5.1", features = ["alloc"] } unicode-segmentation = "1.7.1" serde = { version = "^1.0", features=["derive"] } dot = { version = "0.1.4", optional = true } +js-sys = "^0.3" +wasm-bindgen = "^0.2" [dependencies.web-sys] version = "^0.3.55" diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 688969a7..0d88d42b 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -126,6 +126,13 @@ impl Automerge { self.transaction.as_mut().unwrap() } + pub fn fork(&mut self) -> Self { + self.ensure_transaction_closed(); + let mut f = self.clone(); + f.actor = None; + f + } + pub fn commit(&mut self, message: Option, time: Option) -> Vec { let tx = self.tx(); @@ -630,10 +637,23 @@ impl Automerge { Ok(delta) } + fn duplicate_seq(&self, change: &Change) -> bool { + let mut dup = false; + if let Some(actor_index) = self.ops.m.actors.lookup(change.actor_id()) { + if let Some(s) = self.states.get(&actor_index) { + dup = s.len() >= change.seq as usize; + } + } + dup + } + pub fn apply_changes(&mut self, changes: &[Change]) -> Result { self.ensure_transaction_closed(); for c in changes { if !self.history_index.contains_key(&c.hash) { + if self.duplicate_seq(c) { + return Err(AutomergeError::DuplicateSeqNumber(c.seq,c.actor_id().clone())) + } if self.is_causally_ready(c) { self.apply_change(c.clone()); } else { @@ -804,15 +824,15 @@ impl Automerge { } /// Takes all the changes in `other` which are not in `self` and applies them - pub fn merge(&mut self, other: &mut Self) { + pub fn merge(&mut self, other: &mut Self) -> Result,AutomergeError> { // TODO: Make this fallible and figure out how to do this transactionally - other.ensure_transaction_closed(); let changes = self .get_changes_added(other) .into_iter() .cloned() .collect::>(); - self.apply_changes(&changes).unwrap(); + self.apply_changes(&changes)?; + Ok(self._get_heads()) } pub fn save(&mut self) -> Result, AutomergeError> { @@ -1046,8 +1066,9 @@ impl Automerge { .and_then(|index| self.history.get(*index)) } - pub fn get_changes_added<'a>(&mut self, other: &'a Self) -> Vec<&'a Change> { + pub fn get_changes_added<'a>(&mut self, other: &'a mut Self) -> Vec<&'a Change> { self.ensure_transaction_closed(); + other.ensure_transaction_closed(); self._get_changes_added(other) } diff --git a/automerge/src/error.rs b/automerge/src/error.rs index 32eb9d1d..15f1be7c 100644 --- a/automerge/src/error.rs +++ b/automerge/src/error.rs @@ -1,5 +1,5 @@ use crate::decoding; -use crate::types::ScalarValue; +use crate::types::{ ActorId, ScalarValue}; use crate::value::DataType; use thiserror::Error; @@ -17,6 +17,8 @@ pub enum AutomergeError { InvalidSeq(u64), #[error("index {0} is out of bounds")] InvalidIndex(usize), + #[error("duplicate seq {0} found for actor {1}")] + DuplicateSeqNumber(u64,ActorId), #[error("generic automerge error")] Fail, } @@ -33,6 +35,12 @@ impl From for AutomergeError { } } +impl From for wasm_bindgen::JsValue { + fn from(err: AutomergeError) -> Self { + js_sys::Error::new(&std::format!("{}", err)).into() + } +} + #[derive(Error, Debug)] #[error("Invalid actor ID: {0}")] pub struct InvalidActorId(pub String); From d1b0d41239bde37a84d171a1ba9071b3b059aa1d Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Thu, 10 Feb 2022 11:17:15 -0500 Subject: [PATCH 076/730] move marks into its own test --- automerge-wasm/test/marks.ts | 138 +++++++++++++++++++++++++++++++++++ automerge-wasm/test/test.ts | 129 -------------------------------- 2 files changed, 138 insertions(+), 129 deletions(-) create mode 100644 automerge-wasm/test/marks.ts diff --git a/automerge-wasm/test/marks.ts b/automerge-wasm/test/marks.ts new file mode 100644 index 00000000..61951056 --- /dev/null +++ b/automerge-wasm/test/marks.ts @@ -0,0 +1,138 @@ +import { describe, it } from 'mocha'; +//@ts-ignore +import assert from 'assert' +//@ts-ignore +import { create, loadDoc, Automerge, TEXT, encodeChange, decodeChange } from '../dev/index' + +describe('Automerge', () => { + describe('marks', () => { + it('should handle marks [..]', () => { + let doc = create() + let list = doc.set("_root", "list", TEXT) + if (!list) throw new Error('should not be undefined') + doc.splice(list, 0, 0, "aaabbbccc") + doc.mark(list, "[3..6]", "bold" , true) + let spans = doc.spans(list); + assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'ccc' ]); + doc.insert(list, 6, "A") + doc.insert(list, 3, "A") + spans = doc.spans(list); + assert.deepStrictEqual(spans, [ 'aaaA', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'Accc' ]); + }) + + it('should handle marks with deleted ends [..]', () => { + let doc = create() + let list = doc.set("_root", "list", TEXT) + if (!list) throw new Error('should not be undefined') + + doc.splice(list, 0, 0, "aaabbbccc") + doc.mark(list, "[3..6]", "bold" , true) + let spans = doc.spans(list); + assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'ccc' ]); + doc.del(list,5); + doc.del(list,5); + doc.del(list,2); + doc.del(list,2); + spans = doc.spans(list); + assert.deepStrictEqual(spans, [ 'aa', [ [ 'bold', 'boolean', true ] ], 'b', [], 'cc' ]) + doc.insert(list, 3, "A") + doc.insert(list, 2, "A") + spans = doc.spans(list); + assert.deepStrictEqual(spans, [ 'aaA', [ [ 'bold', 'boolean', true ] ], 'b', [], 'Acc' ]) + }) + + it('should handle sticky marks (..)', () => { + let doc = create() + let list = doc.set("_root", "list", TEXT) + if (!list) throw new Error('should not be undefined') + doc.splice(list, 0, 0, "aaabbbccc") + doc.mark(list, "(3..6)", "bold" , true) + let spans = doc.spans(list); + assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'ccc' ]); + doc.insert(list, 6, "A") + doc.insert(list, 3, "A") + spans = doc.spans(list); + assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'AbbbA', [], 'ccc' ]); + }) + + it('should handle sticky marks with deleted ends (..)', () => { + let doc = create() + let list = doc.set("_root", "list", TEXT) + if (!list) throw new Error('should not be undefined') + doc.splice(list, 0, 0, "aaabbbccc") + doc.mark(list, "(3..6)", "bold" , true) + let spans = doc.spans(list); + assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'ccc' ]); + doc.del(list,5); + doc.del(list,5); + doc.del(list,2); + doc.del(list,2); + spans = doc.spans(list); + assert.deepStrictEqual(spans, [ 'aa', [ [ 'bold', 'boolean', true ] ], 'b', [], 'cc' ]) + doc.insert(list, 3, "A") + doc.insert(list, 2, "A") + spans = doc.spans(list); + assert.deepStrictEqual(spans, [ 'aa', [ [ 'bold', 'boolean', true ] ], 'AbA', [], 'cc' ]) + + // make sure save/load can handle marks + + let doc2 = loadDoc(doc.save()) + spans = doc2.spans(list); + assert.deepStrictEqual(spans, [ 'aa', [ [ 'bold', 'boolean', true ] ], 'AbA', [], 'cc' ]) + + assert.deepStrictEqual(doc.getHeads(), doc2.getHeads()) + assert.deepStrictEqual(doc.save(), doc2.save()) + }) + + it('should handle overlapping marks', () => { + let doc : Automerge = create("aabbcc") + let list = doc.set("_root", "list", TEXT) + if (!list) throw new Error('should not be undefined') + doc.splice(list, 0, 0, "the quick fox jumps over the lazy dog") + doc.mark(list, "[0..37]", "bold" , true) + doc.mark(list, "[4..19]", "itallic" , true) + doc.mark(list, "[10..13]", "comment" , "foxes are my favorite animal!") + doc.commit("marks",999); + let spans = doc.spans(list); + assert.deepStrictEqual(spans, + [ + [ [ 'bold', 'boolean', true ] ], + 'the ', + [ [ 'bold', 'boolean', true ], [ 'itallic', 'boolean', true ] ], + 'quick ', + [ + [ 'bold', 'boolean', true ], + [ 'comment', 'str', 'foxes are my favorite animal!' ], + [ 'itallic', 'boolean', true ] + ], + 'fox', + [ [ 'bold', 'boolean', true ], [ 'itallic', 'boolean', true ] ], + ' jumps', + [ [ 'bold', 'boolean', true ] ], + ' over the lazy dog', + [], + ] + ) + let text = doc.text(list); + assert.deepStrictEqual(text, "the quick fox jumps over the lazy dog"); + let raw_spans = doc.raw_spans(list); + assert.deepStrictEqual(raw_spans, + [ + { id: "39@aabbcc", time: 999, start: 0, end: 37, type: 'bold', value: true }, + { id: "41@aabbcc", time: 999, start: 4, end: 19, type: 'itallic', value: true }, + { id: "43@aabbcc", time: 999, start: 10, end: 13, type: 'comment', value: 'foxes are my favorite animal!' } + ]); + + // mark sure encode decode can handle marks + + let all = doc.getChanges([]) + let decoded = all.map((c) => decodeChange(c)) + let encoded = decoded.map((c) => encodeChange(c)) + let doc2 = create(); + doc2.applyChanges(encoded) + + assert.deepStrictEqual(doc.spans(list) , doc2.spans(list)) + assert.deepStrictEqual(doc.save(), doc2.save()) + }) + }) +}) diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts index 375afaca..f72d0979 100644 --- a/automerge-wasm/test/test.ts +++ b/automerge-wasm/test/test.ts @@ -409,135 +409,6 @@ describe('Automerge', () => { doc2.free() }) - it('should handle marks [..]', () => { - let doc = create() - let list = doc.set("_root", "list", TEXT) - if (!list) throw new Error('should not be undefined') - doc.splice(list, 0, 0, "aaabbbccc") - doc.mark(list, "[3..6]", "bold" , true) - let spans = doc.spans(list); - assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'ccc' ]); - doc.insert(list, 6, "A") - doc.insert(list, 3, "A") - spans = doc.spans(list); - assert.deepStrictEqual(spans, [ 'aaaA', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'Accc' ]); - }) - - it('should handle marks with deleted ends [..]', () => { - let doc = create() - let list = doc.set("_root", "list", TEXT) - if (!list) throw new Error('should not be undefined') - - doc.splice(list, 0, 0, "aaabbbccc") - doc.mark(list, "[3..6]", "bold" , true) - let spans = doc.spans(list); - assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'ccc' ]); - doc.del(list,5); - doc.del(list,5); - doc.del(list,2); - doc.del(list,2); - spans = doc.spans(list); - assert.deepStrictEqual(spans, [ 'aa', [ [ 'bold', 'boolean', true ] ], 'b', [], 'cc' ]) - doc.insert(list, 3, "A") - doc.insert(list, 2, "A") - spans = doc.spans(list); - assert.deepStrictEqual(spans, [ 'aaA', [ [ 'bold', 'boolean', true ] ], 'b', [], 'Acc' ]) - }) - - it('should handle sticky marks (..)', () => { - let doc = create() - let list = doc.set("_root", "list", TEXT) - if (!list) throw new Error('should not be undefined') - doc.splice(list, 0, 0, "aaabbbccc") - doc.mark(list, "(3..6)", "bold" , true) - let spans = doc.spans(list); - assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'ccc' ]); - doc.insert(list, 6, "A") - doc.insert(list, 3, "A") - spans = doc.spans(list); - assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'AbbbA', [], 'ccc' ]); - }) - - it('should handle sticky marks with deleted ends (..)', () => { - let doc = create() - let list = doc.set("_root", "list", TEXT) - if (!list) throw new Error('should not be undefined') - doc.splice(list, 0, 0, "aaabbbccc") - doc.mark(list, "(3..6)", "bold" , true) - let spans = doc.spans(list); - assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'ccc' ]); - doc.del(list,5); - doc.del(list,5); - doc.del(list,2); - doc.del(list,2); - spans = doc.spans(list); - assert.deepStrictEqual(spans, [ 'aa', [ [ 'bold', 'boolean', true ] ], 'b', [], 'cc' ]) - doc.insert(list, 3, "A") - doc.insert(list, 2, "A") - spans = doc.spans(list); - assert.deepStrictEqual(spans, [ 'aa', [ [ 'bold', 'boolean', true ] ], 'AbA', [], 'cc' ]) - - // make sure save/load can handle marks - - let doc2 = loadDoc(doc.save()) - spans = doc2.spans(list); - assert.deepStrictEqual(spans, [ 'aa', [ [ 'bold', 'boolean', true ] ], 'AbA', [], 'cc' ]) - - assert.deepStrictEqual(doc.getHeads(), doc2.getHeads()) - assert.deepStrictEqual(doc.save(), doc2.save()) - }) - - it('should handle overlapping marks', () => { - let doc : Automerge = create("aabbcc") - let list = doc.set("_root", "list", TEXT) - if (!list) throw new Error('should not be undefined') - doc.splice(list, 0, 0, "the quick fox jumps over the lazy dog") - doc.mark(list, "[0..37]", "bold" , true) - doc.mark(list, "[4..19]", "itallic" , true) - doc.mark(list, "[10..13]", "comment" , "foxes are my favorite animal!") - doc.commit("marks",999); - let spans = doc.spans(list); - assert.deepStrictEqual(spans, - [ - [ [ 'bold', 'boolean', true ] ], - 'the ', - [ [ 'bold', 'boolean', true ], [ 'itallic', 'boolean', true ] ], - 'quick ', - [ - [ 'bold', 'boolean', true ], - [ 'comment', 'str', 'foxes are my favorite animal!' ], - [ 'itallic', 'boolean', true ] - ], - 'fox', - [ [ 'bold', 'boolean', true ], [ 'itallic', 'boolean', true ] ], - ' jumps', - [ [ 'bold', 'boolean', true ] ], - ' over the lazy dog', - [], - ] - ) - let text = doc.text(list); - assert.deepStrictEqual(text, "the quick fox jumps over the lazy dog"); - let raw_spans = doc.raw_spans(list); - assert.deepStrictEqual(raw_spans, - [ - { id: "39@aabbcc", time: 999, start: 0, end: 37, type: 'bold', value: true }, - { id: "41@aabbcc", time: 999, start: 4, end: 19, type: 'itallic', value: true }, - { id: "43@aabbcc", time: 999, start: 10, end: 13, type: 'comment', value: 'foxes are my favorite animal!' } - ]); - - // mark sure encode decode can handle marks - - let all = doc.getChanges([]) - let decoded = all.map((c) => decodeChange(c)) - let encoded = decoded.map((c) => encodeChange(c)) - let doc2 = create(); - doc2.applyChanges(encoded) - - assert.deepStrictEqual(doc.spans(list) , doc2.spans(list)) - assert.deepStrictEqual(doc.save(), doc2.save()) - }) - it('should handle merging text conflicts then saving & loading', () => { let A = create("aabbcc") let At = A.make('_root', 'text', TEXT) From c8c695618b678b63aac31364cf4203d7a6f507b2 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Thu, 10 Feb 2022 11:38:12 -0500 Subject: [PATCH 077/730] remove marks --- automerge-wasm/index.d.ts | 5 - automerge-wasm/src/lib.rs | 73 ----------- automerge-wasm/test/marks.ts | 138 -------------------- automerge/src/automerge.rs | 88 +------------ automerge/src/columnar.rs | 46 +------ automerge/src/legacy/serde_impls/op.rs | 36 ----- automerge/src/legacy/serde_impls/op_type.rs | 2 - automerge/src/query.rs | 4 - automerge/src/query/insert.rs | 4 - automerge/src/query/raw_spans.rs | 71 ---------- automerge/src/query/spans.rs | 108 --------------- automerge/src/types.rs | 39 +----- automerge/tests/test.rs | 96 +++++++------- 13 files changed, 52 insertions(+), 658 deletions(-) delete mode 100644 automerge-wasm/test/marks.ts delete mode 100644 automerge/src/query/raw_spans.rs delete mode 100644 automerge/src/query/spans.rs diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts index 8a7e9408..20189dab 100644 --- a/automerge-wasm/index.d.ts +++ b/automerge-wasm/index.d.ts @@ -101,11 +101,6 @@ export class Automerge { text(obj: ObjID, heads?: Heads): string; length(obj: ObjID, heads?: Heads): number; - // experimental spans api - unstable! - mark(obj: ObjID, name: string, range: string, value: Value, datatype?: Datatype): void; - spans(obj: ObjID): any; - raw_spans(obj: ObjID): any; - // transactions commit(message?: string, time?: number): Heads; merge(other: Automerge): Heads; diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 82541978..9fdd8ff2 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -2,7 +2,6 @@ use automerge as am; use automerge::{Change, ObjId, Prop, Value, ROOT}; use js_sys::{Array, Object, Uint8Array}; -use regex::Regex; use std::convert::TryInto; use wasm_bindgen::prelude::*; use wasm_bindgen::JsCast; @@ -313,78 +312,6 @@ impl Automerge { Ok(()) } - pub fn mark( - &mut self, - obj: String, - range: JsValue, - name: JsValue, - value: JsValue, - datatype: JsValue, - ) -> Result<(), JsValue> { - let obj = self.import(obj)?; - let re = Regex::new(r"([\[\(])(\d+)\.\.(\d+)([\)\]])").unwrap(); - let range = range.as_string().ok_or("range must be a string")?; - let cap = re.captures_iter(&range).next().ok_or("range must be in the form of (start..end] or [start..end) etc... () for sticky, [] for normal")?; - let start: usize = cap[2].parse().map_err(|_| to_js_err("invalid start"))?; - let end: usize = cap[3].parse().map_err(|_| to_js_err("invalid end"))?; - let start_sticky = &cap[1] == "("; - let end_sticky = &cap[4] == ")"; - let name = name - .as_string() - .ok_or("invalid mark name") - .map_err(to_js_err)?; - let value = self - .import_scalar(&value, &datatype.as_string()) - .ok_or_else(|| to_js_err("invalid value"))?; - self.0 - .mark(&obj, start, start_sticky, end, end_sticky, &name, value) - .map_err(to_js_err)?; - Ok(()) - } - - pub fn spans(&mut self, obj: String) -> Result { - let obj = self.import(obj)?; - let text = self.0.text(&obj).map_err(to_js_err)?; - let spans = self.0.spans(&obj).map_err(to_js_err)?; - let mut last_pos = 0; - let result = Array::new(); - for s in spans { - let marks = Array::new(); - for m in s.marks { - let mark = Array::new(); - mark.push(&m.0.into()); - mark.push(&datatype(&m.1).into()); - mark.push(&ScalarValue(m.1).into()); - marks.push(&mark.into()); - } - let text_span = &text[last_pos..s.pos]; //.slice(last_pos, s.pos); - if !text_span.is_empty() { - result.push(&text_span.into()); - } - result.push(&marks); - last_pos = s.pos; - //let obj = Object::new().into(); - //js_set(&obj, "pos", s.pos as i32)?; - //js_set(&obj, "marks", marks)?; - //result.push(&obj.into()); - } - let text_span = &text[last_pos..]; - if !text_span.is_empty() { - result.push(&text_span.into()); - } - Ok(result.into()) - } - - pub fn raw_spans(&mut self, obj: String) -> Result { - let obj = self.import(obj)?; - let spans = self.0.raw_spans(&obj).map_err(to_js_err)?; - let result = Array::new(); - for s in spans { - result.push(&JsValue::from_serde(&s).map_err(to_js_err)?); - } - Ok(result) - } - pub fn save(&mut self) -> Result { self.0 .save() diff --git a/automerge-wasm/test/marks.ts b/automerge-wasm/test/marks.ts deleted file mode 100644 index 61951056..00000000 --- a/automerge-wasm/test/marks.ts +++ /dev/null @@ -1,138 +0,0 @@ -import { describe, it } from 'mocha'; -//@ts-ignore -import assert from 'assert' -//@ts-ignore -import { create, loadDoc, Automerge, TEXT, encodeChange, decodeChange } from '../dev/index' - -describe('Automerge', () => { - describe('marks', () => { - it('should handle marks [..]', () => { - let doc = create() - let list = doc.set("_root", "list", TEXT) - if (!list) throw new Error('should not be undefined') - doc.splice(list, 0, 0, "aaabbbccc") - doc.mark(list, "[3..6]", "bold" , true) - let spans = doc.spans(list); - assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'ccc' ]); - doc.insert(list, 6, "A") - doc.insert(list, 3, "A") - spans = doc.spans(list); - assert.deepStrictEqual(spans, [ 'aaaA', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'Accc' ]); - }) - - it('should handle marks with deleted ends [..]', () => { - let doc = create() - let list = doc.set("_root", "list", TEXT) - if (!list) throw new Error('should not be undefined') - - doc.splice(list, 0, 0, "aaabbbccc") - doc.mark(list, "[3..6]", "bold" , true) - let spans = doc.spans(list); - assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'ccc' ]); - doc.del(list,5); - doc.del(list,5); - doc.del(list,2); - doc.del(list,2); - spans = doc.spans(list); - assert.deepStrictEqual(spans, [ 'aa', [ [ 'bold', 'boolean', true ] ], 'b', [], 'cc' ]) - doc.insert(list, 3, "A") - doc.insert(list, 2, "A") - spans = doc.spans(list); - assert.deepStrictEqual(spans, [ 'aaA', [ [ 'bold', 'boolean', true ] ], 'b', [], 'Acc' ]) - }) - - it('should handle sticky marks (..)', () => { - let doc = create() - let list = doc.set("_root", "list", TEXT) - if (!list) throw new Error('should not be undefined') - doc.splice(list, 0, 0, "aaabbbccc") - doc.mark(list, "(3..6)", "bold" , true) - let spans = doc.spans(list); - assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'ccc' ]); - doc.insert(list, 6, "A") - doc.insert(list, 3, "A") - spans = doc.spans(list); - assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'AbbbA', [], 'ccc' ]); - }) - - it('should handle sticky marks with deleted ends (..)', () => { - let doc = create() - let list = doc.set("_root", "list", TEXT) - if (!list) throw new Error('should not be undefined') - doc.splice(list, 0, 0, "aaabbbccc") - doc.mark(list, "(3..6)", "bold" , true) - let spans = doc.spans(list); - assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'ccc' ]); - doc.del(list,5); - doc.del(list,5); - doc.del(list,2); - doc.del(list,2); - spans = doc.spans(list); - assert.deepStrictEqual(spans, [ 'aa', [ [ 'bold', 'boolean', true ] ], 'b', [], 'cc' ]) - doc.insert(list, 3, "A") - doc.insert(list, 2, "A") - spans = doc.spans(list); - assert.deepStrictEqual(spans, [ 'aa', [ [ 'bold', 'boolean', true ] ], 'AbA', [], 'cc' ]) - - // make sure save/load can handle marks - - let doc2 = loadDoc(doc.save()) - spans = doc2.spans(list); - assert.deepStrictEqual(spans, [ 'aa', [ [ 'bold', 'boolean', true ] ], 'AbA', [], 'cc' ]) - - assert.deepStrictEqual(doc.getHeads(), doc2.getHeads()) - assert.deepStrictEqual(doc.save(), doc2.save()) - }) - - it('should handle overlapping marks', () => { - let doc : Automerge = create("aabbcc") - let list = doc.set("_root", "list", TEXT) - if (!list) throw new Error('should not be undefined') - doc.splice(list, 0, 0, "the quick fox jumps over the lazy dog") - doc.mark(list, "[0..37]", "bold" , true) - doc.mark(list, "[4..19]", "itallic" , true) - doc.mark(list, "[10..13]", "comment" , "foxes are my favorite animal!") - doc.commit("marks",999); - let spans = doc.spans(list); - assert.deepStrictEqual(spans, - [ - [ [ 'bold', 'boolean', true ] ], - 'the ', - [ [ 'bold', 'boolean', true ], [ 'itallic', 'boolean', true ] ], - 'quick ', - [ - [ 'bold', 'boolean', true ], - [ 'comment', 'str', 'foxes are my favorite animal!' ], - [ 'itallic', 'boolean', true ] - ], - 'fox', - [ [ 'bold', 'boolean', true ], [ 'itallic', 'boolean', true ] ], - ' jumps', - [ [ 'bold', 'boolean', true ] ], - ' over the lazy dog', - [], - ] - ) - let text = doc.text(list); - assert.deepStrictEqual(text, "the quick fox jumps over the lazy dog"); - let raw_spans = doc.raw_spans(list); - assert.deepStrictEqual(raw_spans, - [ - { id: "39@aabbcc", time: 999, start: 0, end: 37, type: 'bold', value: true }, - { id: "41@aabbcc", time: 999, start: 4, end: 19, type: 'itallic', value: true }, - { id: "43@aabbcc", time: 999, start: 10, end: 13, type: 'comment', value: 'foxes are my favorite animal!' } - ]); - - // mark sure encode decode can handle marks - - let all = doc.getChanges([]) - let decoded = all.map((c) => decodeChange(c)) - let encoded = decoded.map((c) => encodeChange(c)) - let doc2 = create(); - doc2.applyChanges(encoded) - - assert.deepStrictEqual(doc.spans(list) , doc2.spans(list)) - assert.deepStrictEqual(doc.save(), doc2.save()) - }) - }) -}) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 0d88d42b..f4fd2014 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -453,90 +453,6 @@ impl Automerge { Ok(buffer) } - pub fn spans(&self, obj: &ExId) -> Result, AutomergeError> { - let obj = self.exid_to_obj(obj)?; - let mut query = self.ops.search(obj, query::Spans::new()); - query.check_marks(); - Ok(query.spans) - } - - pub fn raw_spans(&self, obj: &ExId) -> Result, AutomergeError> { - let obj = self.exid_to_obj(obj)?; - let query = self.ops.search(obj, query::RawSpans::new()); - let result = query.spans.into_iter().map(|s| SpanInfo { - id: self.id_to_exid(s.id), - time: self.history[s.change].time, - start: s.start, - end: s.end, - span_type: s.name, - value: s.value, - }).collect(); - Ok(result) - } - - #[allow(clippy::too_many_arguments)] - pub fn mark( - &mut self, - obj: &ExId, - start: usize, - expand_start: bool, - end: usize, - expand_end: bool, - mark: &str, - value: ScalarValue, - ) -> Result<(), AutomergeError> { - let obj = self.exid_to_obj(obj)?; - - self.do_insert(obj, start, OpType::mark(mark.into(), expand_start, value))?; - self.do_insert(obj, end, OpType::MarkEnd(expand_end))?; - - /* - let (a, b) = query.ops()?; - let (pos, key) = a; - let id = self.next_id(); - let op = Op { - change: self.history.len(), - id, - action: OpType::Mark(MarkData { name: mark.into(), expand: expand_start, value}), - obj, - key, - succ: Default::default(), - pred: Default::default(), - insert: true, - }; - self.ops.insert(pos, op.clone()); - self.tx().operations.push(op); - - let (pos, key) = b; - let id = self.next_id(); - let op = Op { - change: self.history.len(), - id, - action: OpType::Unmark(expand_end), - obj, - key, - succ: Default::default(), - pred: Default::default(), - insert: true, - }; - self.ops.insert(pos, op.clone()); - self.tx().operations.push(op); - */ - - Ok(()) - } - - pub fn unmark( - &self, - _obj: &ExId, - _start: usize, - _end: usize, - _inclusive: bool, - _mark: &str, - ) -> Result { - unimplemented!() - } - // TODO - I need to return these OpId's here **only** to get // the legacy conflicts format of { [opid]: value } // Something better? @@ -1195,8 +1111,6 @@ impl Automerge { OpType::Set(value) => format!("{}", value), OpType::Make(obj) => format!("make({})", obj), OpType::Inc(obj) => format!("inc({})", obj), - OpType::MarkBegin(m) => format!("mark({}={})", m.name, m.value), - OpType::MarkEnd(_) => "/mark".into(), OpType::Del => format!("del{}", 0), }; let pred: Vec<_> = i.pred.iter().map(|id| self.to_string(*id)).collect(); @@ -1467,6 +1381,8 @@ mod tests { assert!(doc.value_at(&list, 0, &heads2)?.unwrap().0 == Value::int(10)); assert!(doc.length_at(&list, &heads3) == 2); + doc.dump(); + //log!("{:?}", doc.value_at(&list, 0, &heads3)?.unwrap().0); assert!(doc.value_at(&list, 0, &heads3)?.unwrap().0 == Value::int(30)); assert!(doc.value_at(&list, 1, &heads3)?.unwrap().0 == Value::int(20)); diff --git a/automerge/src/columnar.rs b/automerge/src/columnar.rs index 28aca822..53a9d488 100644 --- a/automerge/src/columnar.rs +++ b/automerge/src/columnar.rs @@ -134,15 +134,6 @@ impl<'a> Iterator for OperationIterator<'a> { Action::MakeTable => OpType::Make(ObjType::Table), Action::Del => OpType::Del, Action::Inc => OpType::Inc(value.to_i64()?), - Action::MarkBegin => { - // mark has 3 things in the val column - let name = value.to_string()?; - let expand = self.value.next()?.to_bool()?; - let value = self.value.next()?; - OpType::mark(name, expand, value) - } - Action::MarkEnd => OpType::MarkEnd(value.to_bool()?), - Action::Unused => panic!("invalid action"), }; Some(amp::Op { action, @@ -184,15 +175,6 @@ impl<'a> Iterator for DocOpIterator<'a> { Action::MakeTable => OpType::Make(ObjType::Table), Action::Del => OpType::Del, Action::Inc => OpType::Inc(value.to_i64()?), - Action::MarkBegin => { - // mark has 3 things in the val column - let name = value.to_string()?; - let expand = self.value.next()?.to_bool()?; - let value = self.value.next()?; - OpType::mark(name, expand, value) - } - Action::MarkEnd => OpType::MarkEnd(value.to_bool()?), - Action::Unused => panic!("invalid action"), }; Some(DocOp { actor, @@ -1082,16 +1064,6 @@ impl DocOpEncoder { self.val.append_null(); Action::Del } - amp::OpType::MarkBegin(m) => { - self.val.append_value(&m.name.clone().into(), actors); - self.val.append_value(&m.expand.into(), actors); - self.val.append_value(&m.value.clone(), actors); - Action::MarkBegin - } - amp::OpType::MarkEnd(s) => { - self.val.append_value(&(*s).into(), actors); - Action::MarkEnd - } amp::OpType::Make(kind) => { self.val.append_null(); match kind { @@ -1198,16 +1170,6 @@ impl ColumnEncoder { self.val.append_null(); Action::Del } - OpType::MarkBegin(m) => { - self.val.append_value2(&m.name.clone().into(), actors); - self.val.append_value2(&m.expand.into(), actors); - self.val.append_value2(&m.value.clone(), actors); - Action::MarkBegin - } - OpType::MarkEnd(s) => { - self.val.append_value2(&(*s).into(), actors); - Action::MarkEnd - } OpType::Make(kind) => { self.val.append_null(); match kind { @@ -1313,11 +1275,8 @@ pub(crate) enum Action { MakeText, Inc, MakeTable, - MarkBegin, - Unused, // final bit is used to mask `Make` actions - MarkEnd, } -const ACTIONS: [Action; 10] = [ +const ACTIONS: [Action; 7] = [ Action::MakeMap, Action::Set, Action::MakeList, @@ -1325,9 +1284,6 @@ const ACTIONS: [Action; 10] = [ Action::MakeText, Action::Inc, Action::MakeTable, - Action::MarkBegin, - Action::Unused, - Action::MarkEnd, ]; impl Decodable for Action { diff --git a/automerge/src/legacy/serde_impls/op.rs b/automerge/src/legacy/serde_impls/op.rs index b91ae7e8..1d2a4125 100644 --- a/automerge/src/legacy/serde_impls/op.rs +++ b/automerge/src/legacy/serde_impls/op.rs @@ -49,12 +49,6 @@ impl Serialize for Op { match &self.action { OpType::Inc(n) => op.serialize_field("value", &n)?, OpType::Set(value) => op.serialize_field("value", &value)?, - OpType::MarkBegin(m) => { - op.serialize_field("name", &m.name)?; - op.serialize_field("expand", &m.expand)?; - op.serialize_field("value", &m.value)?; - } - OpType::MarkEnd(s) => op.serialize_field("expand", &s)?, _ => {} } op.serialize_field("pred", &self.pred)?; @@ -76,8 +70,6 @@ pub(crate) enum RawOpType { Del, Inc, Set, - MarkBegin, - MarkEnd, } impl Serialize for RawOpType { @@ -93,8 +85,6 @@ impl Serialize for RawOpType { RawOpType::Del => "del", RawOpType::Inc => "inc", RawOpType::Set => "set", - RawOpType::MarkBegin => "mark_begin", - RawOpType::MarkEnd => "mark_end", }; serializer.serialize_str(s) } @@ -126,8 +116,6 @@ impl<'de> Deserialize<'de> for RawOpType { "del" => Ok(RawOpType::Del), "inc" => Ok(RawOpType::Inc), "set" => Ok(RawOpType::Set), - "mark_begin" => Ok(RawOpType::MarkBegin), - "mark_end" => Ok(RawOpType::MarkEnd), other => Err(Error::unknown_variant(other, VARIANTS)), } } @@ -200,30 +188,6 @@ impl<'de> Deserialize<'de> for Op { RawOpType::MakeList => OpType::Make(ObjType::List), RawOpType::MakeText => OpType::Make(ObjType::Text), RawOpType::Del => OpType::Del, - RawOpType::MarkBegin => { - let name = name.ok_or_else(|| Error::missing_field("mark(name)"))?; - let expand = expand.unwrap_or(false); - let value = if let Some(datatype) = datatype { - let raw_value = value - .ok_or_else(|| Error::missing_field("value"))? - .unwrap_or(ScalarValue::Null); - raw_value.as_datatype(datatype).map_err(|e| { - Error::invalid_value( - Unexpected::Other(e.unexpected.as_str()), - &e.expected.as_str(), - ) - })? - } else { - value - .ok_or_else(|| Error::missing_field("value"))? - .unwrap_or(ScalarValue::Null) - }; - OpType::mark(name, expand, value) - } - RawOpType::MarkEnd => { - let expand = expand.unwrap_or(true); - OpType::MarkEnd(expand) - } RawOpType::Set => { let value = if let Some(datatype) = datatype { let raw_value = value diff --git a/automerge/src/legacy/serde_impls/op_type.rs b/automerge/src/legacy/serde_impls/op_type.rs index 0959b11d..19849674 100644 --- a/automerge/src/legacy/serde_impls/op_type.rs +++ b/automerge/src/legacy/serde_impls/op_type.rs @@ -15,8 +15,6 @@ impl Serialize for OpType { OpType::Make(ObjType::Table) => RawOpType::MakeTable, OpType::Make(ObjType::List) => RawOpType::MakeList, OpType::Make(ObjType::Text) => RawOpType::MakeText, - OpType::MarkBegin(_) => RawOpType::MarkBegin, - OpType::MarkEnd(_) => RawOpType::MarkEnd, OpType::Del => RawOpType::Del, OpType::Inc(_) => RawOpType::Inc, OpType::Set(_) => RawOpType::Set, diff --git a/automerge/src/query.rs b/automerge/src/query.rs index ff97532e..7911e1bb 100644 --- a/automerge/src/query.rs +++ b/automerge/src/query.rs @@ -17,8 +17,6 @@ mod nth_at; mod prop; mod prop_at; mod seek_op; -mod spans; -mod raw_spans; pub(crate) use insert::InsertNth; pub(crate) use keys::Keys; @@ -32,8 +30,6 @@ pub(crate) use nth_at::NthAt; pub(crate) use prop::Prop; pub(crate) use prop_at::PropAt; pub(crate) use seek_op::SeekOp; -pub(crate) use spans::{Span, Spans}; -pub(crate) use raw_spans::RawSpans; #[derive(Debug, Clone, PartialEq)] pub(crate) struct CounterData { diff --git a/automerge/src/query/insert.rs b/automerge/src/query/insert.rs index 38a58e45..62da48f9 100644 --- a/automerge/src/query/insert.rs +++ b/automerge/src/query/insert.rs @@ -85,10 +85,6 @@ impl TreeQuery for InsertNth { self.last_seen = None; self.last_insert = element.elemid(); } - if self.valid.is_some() && element.valid_mark_anchor() { - self.last_valid_insert = element.elemid(); - self.valid = None; - } if self.last_seen.is_none() && element.visible() { if self.seen >= self.target { return QueryResult::Finish; diff --git a/automerge/src/query/raw_spans.rs b/automerge/src/query/raw_spans.rs deleted file mode 100644 index 77a45741..00000000 --- a/automerge/src/query/raw_spans.rs +++ /dev/null @@ -1,71 +0,0 @@ -use crate::query::{OpSetMetadata, QueryResult, TreeQuery}; -use crate::types::{ElemId, Op, OpId, OpType, ScalarValue}; -use std::fmt::Debug; - -#[derive(Debug, Clone, PartialEq)] -pub(crate) struct RawSpans { - pos: usize, - seen: usize, - last_seen: Option, - last_insert: Option, - changed: bool, - pub spans: Vec, -} - -#[derive(Debug, Clone, PartialEq)] -pub(crate) struct RawSpan { - pub id: OpId, - pub change: usize, - pub start: usize, - pub end: usize, - pub name: String, - pub value: ScalarValue, -} - -impl RawSpans { - pub fn new() -> Self { - RawSpans { - pos: 0, - seen: 0, - last_seen: None, - last_insert: None, - changed: false, - spans: Vec::new(), - } - } -} - -impl TreeQuery for RawSpans { - - fn query_element_with_metadata(&mut self, element: &Op, m: &OpSetMetadata) -> QueryResult { - // find location to insert - // mark or set - if element.succ.is_empty() { - if let OpType::MarkBegin(md) = &element.action { - let pos = self - .spans - .binary_search_by(|probe| m.lamport_cmp(probe.id, element.id)) - .unwrap_err(); - self.spans.insert(pos, RawSpan { id: element.id, change: element.change, start: self.seen, end: 0, name: md.name.clone(), value: md.value.clone() }); - } - if let OpType::MarkEnd(_) = &element.action { - for s in self.spans.iter_mut() { - if s.id == element.id.prev() { - s.end = self.seen; - break; - } - } - } - } - if element.insert { - self.last_seen = None; - self.last_insert = element.elemid(); - } - if self.last_seen.is_none() && element.visible() { - self.seen += 1; - self.last_seen = element.elemid(); - } - self.pos += 1; - QueryResult::Next - } -} diff --git a/automerge/src/query/spans.rs b/automerge/src/query/spans.rs deleted file mode 100644 index 589dba03..00000000 --- a/automerge/src/query/spans.rs +++ /dev/null @@ -1,108 +0,0 @@ -use crate::query::{OpSetMetadata, QueryResult, TreeQuery}; -use crate::types::{ElemId, Op, OpType, ScalarValue}; -use std::collections::HashMap; -use std::fmt::Debug; - -#[derive(Debug, Clone, PartialEq)] -pub(crate) struct Spans { - pos: usize, - seen: usize, - last_seen: Option, - last_insert: Option, - seen_at_this_mark: Option, - seen_at_last_mark: Option, - ops: Vec, - marks: HashMap, - changed: bool, - pub spans: Vec, -} - -#[derive(Debug, Clone, PartialEq)] -pub struct Span { - pub pos: usize, - pub marks: Vec<(String, ScalarValue)>, -} - -impl Spans { - pub fn new() -> Self { - Spans { - pos: 0, - seen: 0, - last_seen: None, - last_insert: None, - seen_at_last_mark: None, - seen_at_this_mark: None, - changed: false, - ops: Vec::new(), - marks: HashMap::new(), - spans: Vec::new(), - } - } - - pub fn check_marks(&mut self) { - let mut new_marks = HashMap::new(); - for op in &self.ops { - if let OpType::MarkBegin(m) = &op.action { - new_marks.insert(m.name.clone(), m.value.clone()); - } - } - if new_marks != self.marks { - self.changed = true; - self.marks = new_marks; - } - if self.changed - && (self.seen_at_last_mark != self.seen_at_this_mark - || self.seen_at_last_mark.is_none() && self.seen_at_this_mark.is_none()) - { - self.changed = false; - self.seen_at_last_mark = self.seen_at_this_mark; - let mut marks: Vec<_> = self - .marks - .iter() - .map(|(key, val)| (key.clone(), val.clone())) - .collect(); - marks.sort_by(|(k1, _), (k2, _)| k1.cmp(k2)); - self.spans.push(Span { - pos: self.seen, - marks, - }); - } - } -} - -impl TreeQuery for Spans { - /* - fn query_node(&mut self, _child: &OpTreeNode) -> QueryResult { - unimplemented!() - } - */ - - fn query_element_with_metadata(&mut self, element: &Op, m: &OpSetMetadata) -> QueryResult { - // find location to insert - // mark or set - if element.succ.is_empty() { - if let OpType::MarkBegin(_) = &element.action { - let pos = self - .ops - .binary_search_by(|probe| m.lamport_cmp(probe.id, element.id)) - .unwrap_err(); - self.ops.insert(pos, element.clone()); - } - if let OpType::MarkEnd(_) = &element.action { - self.ops.retain(|op| op.id != element.id.prev()); - } - } - if element.insert { - self.last_seen = None; - self.last_insert = element.elemid(); - } - if self.last_seen.is_none() && element.visible() { - self.check_marks(); - self.seen += 1; - self.last_seen = element.elemid(); - self.seen_at_this_mark = element.elemid(); - } - self.pos += 1; - QueryResult::Next - } -} diff --git a/automerge/src/types.rs b/automerge/src/types.rs index 54192908..4494f6d9 100644 --- a/automerge/src/types.rs +++ b/automerge/src/types.rs @@ -158,25 +158,6 @@ pub enum OpType { Del, Inc(i64), Set(ScalarValue), - MarkBegin(MarkData), - MarkEnd(bool), -} - -impl OpType { - pub(crate) fn mark(name: String, expand: bool, value: ScalarValue) -> Self { - OpType::MarkBegin(MarkData { - name, - expand, - value, - }) - } -} - -#[derive(PartialEq, Debug, Clone)] -pub struct MarkData { - pub name: String, - pub value: ScalarValue, - pub expand: bool, } #[derive(Debug)] @@ -199,10 +180,6 @@ impl OpId { pub fn actor(&self) -> usize { self.1 } - #[inline] - pub fn prev(&self) -> OpId { - OpId(self.0 - 1, self.1) - } } impl Exportable for ObjId { @@ -399,7 +376,7 @@ impl Op { } pub fn visible(&self) -> bool { - if self.is_inc() || self.is_mark() { + if self.is_inc() { false } else if self.is_counter() { self.succ.len() <= self.incs() @@ -424,18 +401,6 @@ impl Op { matches!(&self.action, OpType::Inc(_)) } - pub fn valid_mark_anchor(&self) -> bool { - self.succ.is_empty() - && matches!( - &self.action, - OpType::MarkBegin(MarkData { expand: true, .. }) | OpType::MarkEnd(false) - ) - } - - pub fn is_mark(&self) -> bool { - matches!(&self.action, OpType::MarkBegin(_) | OpType::MarkEnd(_)) - } - pub fn is_counter(&self) -> bool { matches!(&self.action, OpType::Set(ScalarValue::Counter(_))) } @@ -470,8 +435,6 @@ impl Op { OpType::Set(value) if self.insert => format!("i:{}", value), OpType::Set(value) => format!("s:{}", value), OpType::Make(obj) => format!("make{}", obj), - OpType::MarkBegin(m) => format!("mark{}={}", m.name, m.value), - OpType::MarkEnd(_) => "unmark".into(), OpType::Inc(val) => format!("inc:{}", val), OpType::Del => "del".to_string(), } diff --git a/automerge/tests/test.rs b/automerge/tests/test.rs index d8637283..03d5a5d2 100644 --- a/automerge/tests/test.rs +++ b/automerge/tests/test.rs @@ -54,10 +54,10 @@ fn repeated_map_assignment_which_resolves_conflict_not_ignored() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); doc1.set(&automerge::ROOT, "field", 123).unwrap(); - doc2.merge(&mut doc1); + doc2.merge(&mut doc1).unwrap(); doc2.set(&automerge::ROOT, "field", 456).unwrap(); doc1.set(&automerge::ROOT, "field", 789).unwrap(); - doc1.merge(&mut doc2); + doc1.merge(&mut doc2).unwrap(); assert_eq!(doc1.values(&automerge::ROOT, "field").unwrap().len(), 2); doc1.set(&automerge::ROOT, "field", 123).unwrap(); @@ -78,9 +78,9 @@ fn repeated_list_assignment_which_resolves_conflict_not_ignored() { .unwrap() .unwrap(); doc1.insert(&list_id, 0, 123).unwrap(); - doc2.merge(&mut doc1); + doc2.merge(&mut doc1).unwrap(); doc2.set(&list_id, 0, 456).unwrap(); - doc1.merge(&mut doc2); + doc1.merge(&mut doc2).unwrap(); doc1.set(&list_id, 0, 789).unwrap(); assert_doc!( @@ -123,7 +123,7 @@ fn merge_concurrent_map_prop_updates() { let mut doc2 = new_doc(); doc1.set(&automerge::ROOT, "foo", "bar").unwrap(); doc2.set(&automerge::ROOT, "hello", "world").unwrap(); - doc1.merge(&mut doc2); + doc1.merge(&mut doc2).unwrap(); assert_eq!( doc1.value(&automerge::ROOT, "foo").unwrap().unwrap().0, "bar".into() @@ -135,7 +135,7 @@ fn merge_concurrent_map_prop_updates() { "hello" => { "world" }, } ); - doc2.merge(&mut doc1); + doc2.merge(&mut doc1).unwrap(); assert_doc!( &doc2, map! { @@ -152,10 +152,10 @@ fn add_concurrent_increments_of_same_property() { let mut doc2 = new_doc(); doc1.set(&automerge::ROOT, "counter", mk_counter(0)) .unwrap(); - doc2.merge(&mut doc1); + doc2.merge(&mut doc1).unwrap(); doc1.inc(&automerge::ROOT, "counter", 1).unwrap(); doc2.inc(&automerge::ROOT, "counter", 2).unwrap(); - doc1.merge(&mut doc2); + doc1.merge(&mut doc2).unwrap(); assert_doc!( &doc1, map! { @@ -181,7 +181,7 @@ fn add_increments_only_to_preceeded_values() { doc2.inc(&automerge::ROOT, "counter", 3).unwrap(); // The two values should be conflicting rather than added - doc1.merge(&mut doc2); + doc1.merge(&mut doc2).unwrap(); assert_doc!( &doc1, @@ -201,7 +201,7 @@ fn concurrent_updates_of_same_field() { doc1.set(&automerge::ROOT, "field", "one").unwrap(); doc2.set(&automerge::ROOT, "field", "two").unwrap(); - doc1.merge(&mut doc2); + doc1.merge(&mut doc2).unwrap(); assert_doc!( &doc1, @@ -223,11 +223,11 @@ fn concurrent_updates_of_same_list_element() { .unwrap() .unwrap(); doc1.insert(&list_id, 0, "finch").unwrap(); - doc2.merge(&mut doc1); + doc2.merge(&mut doc1).unwrap(); doc1.set(&list_id, 0, "greenfinch").unwrap(); doc2.set(&list_id, 0, "goldfinch").unwrap(); - doc1.merge(&mut doc2); + doc1.merge(&mut doc2).unwrap(); assert_doc!( &doc1, @@ -252,8 +252,8 @@ fn assignment_conflicts_of_different_types() { .unwrap(); doc3.set(&automerge::ROOT, "field", automerge::Value::map()) .unwrap(); - doc1.merge(&mut doc2); - doc1.merge(&mut doc3); + doc1.merge(&mut doc2).unwrap(); + doc1.merge(&mut doc3).unwrap(); assert_doc!( &doc1, @@ -277,7 +277,7 @@ fn changes_within_conflicting_map_field() { .unwrap() .unwrap(); doc2.set(&map_id, "innerKey", 42).unwrap(); - doc1.merge(&mut doc2); + doc1.merge(&mut doc2).unwrap(); assert_doc!( &doc1, @@ -304,7 +304,7 @@ fn changes_within_conflicting_list_element() { .unwrap() .unwrap(); doc1.insert(&list_id, 0, "hello").unwrap(); - doc2.merge(&mut doc1); + doc2.merge(&mut doc1).unwrap(); let map_in_doc1 = doc1 .set(&list_id, 0, automerge::Value::map()) @@ -317,11 +317,11 @@ fn changes_within_conflicting_list_element() { .set(&list_id, 0, automerge::Value::map()) .unwrap() .unwrap(); - doc1.merge(&mut doc2); + doc1.merge(&mut doc2).unwrap(); doc2.set(&map_in_doc2, "map2", true).unwrap(); doc2.set(&map_in_doc2, "key", 2).unwrap(); - doc1.merge(&mut doc2); + doc1.merge(&mut doc2).unwrap(); assert_doc!( &doc1, @@ -361,7 +361,7 @@ fn concurrently_assigned_nested_maps_should_not_merge() { .unwrap(); doc2.set(&doc2_map_id, "logo_url", "logo.png").unwrap(); - doc1.merge(&mut doc2); + doc1.merge(&mut doc2).unwrap(); assert_doc!( &doc1, @@ -392,11 +392,11 @@ fn concurrent_insertions_at_different_list_positions() { doc1.insert(&list_id, 0, "one").unwrap(); doc1.insert(&list_id, 1, "three").unwrap(); - doc2.merge(&mut doc1); + doc2.merge(&mut doc1).unwrap(); doc1.splice(&list_id, 1, 0, vec!["two".into()]).unwrap(); doc2.insert(&list_id, 2, "four").unwrap(); - doc1.merge(&mut doc2); + doc1.merge(&mut doc2).unwrap(); assert_doc!( &doc1, @@ -426,10 +426,10 @@ fn concurrent_insertions_at_same_list_position() { .unwrap(); doc1.insert(&list_id, 0, "parakeet").unwrap(); - doc2.merge(&mut doc1); + doc2.merge(&mut doc1).unwrap(); doc1.insert(&list_id, 1, "starling").unwrap(); doc2.insert(&list_id, 1, "chaffinch").unwrap(); - doc1.merge(&mut doc2); + doc1.merge(&mut doc2).unwrap(); assert_doc!( &doc1, @@ -456,11 +456,11 @@ fn concurrent_assignment_and_deletion_of_a_map_entry() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); doc1.set(&automerge::ROOT, "bestBird", "robin").unwrap(); - doc2.merge(&mut doc1); + doc2.merge(&mut doc1).unwrap(); doc1.del(&automerge::ROOT, "bestBird").unwrap(); doc2.set(&automerge::ROOT, "bestBird", "magpie").unwrap(); - doc1.merge(&mut doc2); + doc1.merge(&mut doc2).unwrap(); assert_doc!( &doc1, @@ -483,7 +483,7 @@ fn concurrent_assignment_and_deletion_of_list_entry() { doc1.insert(&list_id, 0, "blackbird").unwrap(); doc1.insert(&list_id, 1, "thrush").unwrap(); doc1.insert(&list_id, 2, "goldfinch").unwrap(); - doc2.merge(&mut doc1); + doc2.merge(&mut doc1).unwrap(); doc1.set(&list_id, 1, "starling").unwrap(); doc2.del(&list_id, 1).unwrap(); @@ -508,7 +508,7 @@ fn concurrent_assignment_and_deletion_of_list_entry() { } ); - doc1.merge(&mut doc2); + doc1.merge(&mut doc2).unwrap(); assert_doc!( &doc1, @@ -535,14 +535,14 @@ fn insertion_after_a_deleted_list_element() { doc1.insert(&list_id, 1, "thrush").unwrap(); doc1.insert(&list_id, 2, "goldfinch").unwrap(); - doc2.merge(&mut doc1); + doc2.merge(&mut doc1).unwrap(); doc1.splice(&list_id, 1, 2, Vec::new()).unwrap(); doc2.splice(&list_id, 2, 0, vec!["starling".into()]) .unwrap(); - doc1.merge(&mut doc2); + doc1.merge(&mut doc2).unwrap(); assert_doc!( &doc1, @@ -554,7 +554,7 @@ fn insertion_after_a_deleted_list_element() { } ); - doc2.merge(&mut doc1); + doc2.merge(&mut doc1).unwrap(); assert_doc!( &doc2, map! { @@ -579,13 +579,13 @@ fn concurrent_deletion_of_same_list_element() { doc1.insert(&list_id, 1, "buzzard").unwrap(); doc1.insert(&list_id, 2, "cormorant").unwrap(); - doc2.merge(&mut doc1); + doc2.merge(&mut doc1).unwrap(); doc1.del(&list_id, 1).unwrap(); doc2.del(&list_id, 1).unwrap(); - doc1.merge(&mut doc2); + doc1.merge(&mut doc2).unwrap(); assert_doc!( &doc1, @@ -597,7 +597,7 @@ fn concurrent_deletion_of_same_list_element() { } ); - doc2.merge(&mut doc1); + doc2.merge(&mut doc1).unwrap(); assert_doc!( &doc2, map! { @@ -631,12 +631,12 @@ fn concurrent_updates_at_different_levels() { .unwrap(); doc1.insert(&mammals, 0, "badger").unwrap(); - doc2.merge(&mut doc1); + doc2.merge(&mut doc1).unwrap(); doc1.set(&birds, "brown", "sparrow").unwrap(); doc2.del(&animals, "birds").unwrap(); - doc1.merge(&mut doc2); + doc1.merge(&mut doc2).unwrap(); assert_obj!( &doc1, @@ -676,13 +676,13 @@ fn concurrent_updates_of_concurrently_deleted_objects() { .unwrap(); doc1.set(&blackbird, "feathers", "black").unwrap(); - doc2.merge(&mut doc1); + doc2.merge(&mut doc1).unwrap(); doc1.del(&birds, "blackbird").unwrap(); doc2.set(&blackbird, "beak", "orange").unwrap(); - doc1.merge(&mut doc2); + doc1.merge(&mut doc2).unwrap(); assert_doc!( &doc1, @@ -704,7 +704,7 @@ fn does_not_interleave_sequence_insertions_at_same_position() { .set(&automerge::ROOT, "wisdom", automerge::Value::list()) .unwrap() .unwrap(); - doc2.merge(&mut doc1); + doc2.merge(&mut doc1).unwrap(); doc1.splice( &wisdom, @@ -734,7 +734,7 @@ fn does_not_interleave_sequence_insertions_at_same_position() { ) .unwrap(); - doc1.merge(&mut doc2); + doc1.merge(&mut doc2).unwrap(); assert_doc!( &doc1, @@ -767,7 +767,7 @@ fn mutliple_insertions_at_same_list_position_with_insertion_by_greater_actor_id( .unwrap() .unwrap(); doc1.insert(&list, 0, "two").unwrap(); - doc2.merge(&mut doc1); + doc2.merge(&mut doc1).unwrap(); doc2.insert(&list, 0, "one").unwrap(); assert_doc!( @@ -793,7 +793,7 @@ fn mutliple_insertions_at_same_list_position_with_insertion_by_lesser_actor_id() .unwrap() .unwrap(); doc1.insert(&list, 0, "two").unwrap(); - doc2.merge(&mut doc1); + doc2.merge(&mut doc1).unwrap(); doc2.insert(&list, 0, "one").unwrap(); assert_doc!( @@ -817,11 +817,11 @@ fn insertion_consistent_with_causality() { .unwrap() .unwrap(); doc1.insert(&list, 0, "four").unwrap(); - doc2.merge(&mut doc1); + doc2.merge(&mut doc1).unwrap(); doc2.insert(&list, 0, "three").unwrap(); - doc1.merge(&mut doc2); + doc1.merge(&mut doc2).unwrap(); doc1.insert(&list, 0, "two").unwrap(); - doc2.merge(&mut doc1); + doc2.merge(&mut doc1).unwrap(); doc2.insert(&list, 0, "one").unwrap(); assert_doc!( @@ -861,11 +861,11 @@ fn save_restore_complex() { doc1.set(&first_todo, "done", false).unwrap(); let mut doc2 = new_doc(); - doc2.merge(&mut doc1); + doc2.merge(&mut doc1).unwrap(); doc2.set(&first_todo, "title", "weed plants").unwrap(); doc1.set(&first_todo, "title", "kill plants").unwrap(); - doc1.merge(&mut doc2); + doc1.merge(&mut doc2).unwrap(); let reloaded = Automerge::load(&doc1.save().unwrap()).unwrap(); @@ -918,8 +918,8 @@ fn list_counter_del() -> Result<(), automerge::AutomergeError> { doc1.inc(&list, 1, 1)?; doc1.inc(&list, 2, 1)?; - doc1.merge(&mut doc2); - doc1.merge(&mut doc3); + doc1.merge(&mut doc2).unwrap(); + doc1.merge(&mut doc3).unwrap(); let values = doc1.values(&list, 1)?; assert_eq!(values.len(), 3); From b6e0da28d8ee82309d210e3bc2ec57ef567e6ecb Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Thu, 10 Feb 2022 11:48:09 -0500 Subject: [PATCH 078/730] fmt --- automerge-wasm/src/lib.rs | 17 ++++-------- automerge/src/automerge.rs | 35 +++++++++++++----------- automerge/src/error.rs | 6 ++-- automerge/src/exid.rs | 4 +-- automerge/src/legacy/serde_impls/opid.rs | 1 - automerge/src/value.rs | 4 +-- 6 files changed, 32 insertions(+), 35 deletions(-) diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 9fdd8ff2..44774ef6 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -45,7 +45,7 @@ impl Automerge { #[allow(clippy::should_implement_trait)] pub fn clone(&mut self, actor: Option) -> Result { if self.0.pending_ops() > 0 { - self.0.commit(None,None); + self.0.commit(None, None); } let mut automerge = Automerge(self.0.clone()); if let Some(s) = actor { @@ -81,7 +81,7 @@ impl Automerge { heads } - pub fn merge(&mut self, other: &mut Automerge) -> Result { + pub fn merge(&mut self, other: &mut Automerge) -> Result { let heads = self.0.merge(&mut other.0)?; let heads: Array = heads .iter() @@ -128,8 +128,7 @@ impl Automerge { let delete_count = delete_count as usize; let mut vals = vec![]; if let Some(t) = text.as_string() { - self.0 - .splice_text(&obj, start, delete_count, &t)?; + self.0.splice_text(&obj, start, delete_count, &t)?; Ok(None) } else { if let Ok(array) = text.dyn_into::() { @@ -145,9 +144,7 @@ impl Automerge { } } } - let result = self - .0 - .splice(&obj, start, delete_count, vals)?; + let result = self.0.splice(&obj, start, delete_count, vals)?; if result.is_empty() { Ok(None) } else { @@ -183,9 +180,7 @@ impl Automerge { let obj = self.import(obj)?; let index = index as f64; let value = self.import_value(value, datatype)?; - let opid = self - .0 - .insert(&obj, index as usize, value)?; + let opid = self.0.insert(&obj, index as usize, value)?; Ok(opid.map(|id| id.to_string())) } @@ -220,7 +215,7 @@ impl Automerge { let prop = self.import_prop(prop)?; let value: f64 = value .as_f64() - .ok_or(to_js_err("inc needs a numberic value"))?; + .ok_or_else(|| to_js_err("inc needs a numberic value"))?; self.0.inc(&obj, prop, value as i64)?; Ok(()) } diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index f4fd2014..c3918354 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -1,6 +1,3 @@ -use std::collections::{HashMap, HashSet, VecDeque}; -use unicode_segmentation::UnicodeSegmentation; -use serde::Serialize; use crate::change::{encode_document, export_change}; use crate::exid::ExId; use crate::op_set::OpSet; @@ -10,6 +7,9 @@ use crate::types::{ }; use crate::{legacy, query, types, ObjType}; use crate::{AutomergeError, Change, Prop}; +use serde::Serialize; +use std::collections::{HashMap, HashSet, VecDeque}; +use unicode_segmentation::UnicodeSegmentation; #[derive(Debug, Clone)] pub struct Automerge { @@ -127,10 +127,10 @@ impl Automerge { } pub fn fork(&mut self) -> Self { - self.ensure_transaction_closed(); - let mut f = self.clone(); - f.actor = None; - f + self.ensure_transaction_closed(); + let mut f = self.clone(); + f.actor = None; + f } pub fn commit(&mut self, message: Option, time: Option) -> Vec { @@ -554,13 +554,13 @@ impl Automerge { } fn duplicate_seq(&self, change: &Change) -> bool { - let mut dup = false; - if let Some(actor_index) = self.ops.m.actors.lookup(change.actor_id()) { - if let Some(s) = self.states.get(&actor_index) { - dup = s.len() >= change.seq as usize; - } - } - dup + let mut dup = false; + if let Some(actor_index) = self.ops.m.actors.lookup(change.actor_id()) { + if let Some(s) = self.states.get(&actor_index) { + dup = s.len() >= change.seq as usize; + } + } + dup } pub fn apply_changes(&mut self, changes: &[Change]) -> Result { @@ -568,7 +568,10 @@ impl Automerge { for c in changes { if !self.history_index.contains_key(&c.hash) { if self.duplicate_seq(c) { - return Err(AutomergeError::DuplicateSeqNumber(c.seq,c.actor_id().clone())) + return Err(AutomergeError::DuplicateSeqNumber( + c.seq, + c.actor_id().clone(), + )); } if self.is_causally_ready(c) { self.apply_change(c.clone()); @@ -740,7 +743,7 @@ impl Automerge { } /// Takes all the changes in `other` which are not in `self` and applies them - pub fn merge(&mut self, other: &mut Self) -> Result,AutomergeError> { + pub fn merge(&mut self, other: &mut Self) -> Result, AutomergeError> { // TODO: Make this fallible and figure out how to do this transactionally let changes = self .get_changes_added(other) diff --git a/automerge/src/error.rs b/automerge/src/error.rs index 15f1be7c..8afca4f3 100644 --- a/automerge/src/error.rs +++ b/automerge/src/error.rs @@ -1,5 +1,5 @@ use crate::decoding; -use crate::types::{ ActorId, ScalarValue}; +use crate::types::{ActorId, ScalarValue}; use crate::value::DataType; use thiserror::Error; @@ -18,7 +18,7 @@ pub enum AutomergeError { #[error("index {0} is out of bounds")] InvalidIndex(usize), #[error("duplicate seq {0} found for actor {1}")] - DuplicateSeqNumber(u64,ActorId), + DuplicateSeqNumber(u64, ActorId), #[error("generic automerge error")] Fail, } @@ -37,7 +37,7 @@ impl From for AutomergeError { impl From for wasm_bindgen::JsValue { fn from(err: AutomergeError) -> Self { - js_sys::Error::new(&std::format!("{}", err)).into() + js_sys::Error::new(&std::format!("{}", err)).into() } } diff --git a/automerge/src/exid.rs b/automerge/src/exid.rs index e9ed6cf1..a3eb6087 100644 --- a/automerge/src/exid.rs +++ b/automerge/src/exid.rs @@ -1,9 +1,9 @@ use crate::ActorId; +use serde::Serialize; +use serde::Serializer; use std::cmp::{Ord, Ordering}; use std::fmt; use std::hash::{Hash, Hasher}; -use serde::Serialize; -use serde::Serializer; #[derive(Debug, Clone)] pub enum ExId { diff --git a/automerge/src/legacy/serde_impls/opid.rs b/automerge/src/legacy/serde_impls/opid.rs index 75247157..06792cd4 100644 --- a/automerge/src/legacy/serde_impls/opid.rs +++ b/automerge/src/legacy/serde_impls/opid.rs @@ -23,4 +23,3 @@ impl Serialize for OpId { serializer.serialize_str(self.to_string().as_str()) } } - diff --git a/automerge/src/value.rs b/automerge/src/value.rs index a2a007c1..ac26033c 100644 --- a/automerge/src/value.rs +++ b/automerge/src/value.rs @@ -63,11 +63,11 @@ impl Value { } pub fn is_object(&self) -> bool { - matches!(&self, Value::Object(_)) + matches!(&self, Value::Object(_)) } pub fn is_scalar(&self) -> bool { - matches!(&self, Value::Scalar(_)) + matches!(&self, Value::Scalar(_)) } } From ef938fdf0a370058fe93c6bec00864eb37ac1e9e Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Tue, 15 Feb 2022 14:02:19 -0500 Subject: [PATCH 079/730] manually handle js types - make sure we have good errors --- automerge-wasm/src/lib.rs | 53 +++++++++++++++++++++------------------ 1 file changed, 29 insertions(+), 24 deletions(-) diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 44774ef6..09488c64 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -81,7 +81,7 @@ impl Automerge { heads } - pub fn merge(&mut self, other: &mut Automerge) -> Result { + pub fn merge(&mut self, other: &mut Automerge) -> Result { let heads = self.0.merge(&mut other.0)?; let heads: Array = heads .iter() @@ -94,7 +94,7 @@ impl Automerge { self.0.rollback() as f64 } - pub fn keys(&mut self, obj: String, heads: Option) -> Result { + pub fn keys(&mut self, obj: JsValue, heads: Option) -> Result { let obj = self.import(obj)?; let result = if let Some(heads) = get_heads(heads) { self.0.keys_at(&obj, &heads) @@ -107,7 +107,7 @@ impl Automerge { Ok(result) } - pub fn text(&mut self, obj: String, heads: Option) -> Result { + pub fn text(&mut self, obj: JsValue, heads: Option) -> Result { let obj = self.import(obj)?; if let Some(heads) = get_heads(heads) { Ok(self.0.text_at(&obj, &heads)?) @@ -118,7 +118,7 @@ impl Automerge { pub fn splice( &mut self, - obj: String, + obj: JsValue, start: f64, delete_count: f64, text: JsValue, @@ -136,10 +136,10 @@ impl Automerge { if let Ok(array) = i.clone().dyn_into::() { let value = array.get(1); let datatype = array.get(2); - let value = self.import_value(value, datatype.as_string())?; + let value = self.import_value(value, datatype)?; vals.push(value); } else { - let value = self.import_value(i, None)?; + let value = self.import_value(i, JsValue::null())?; vals.push(value); } } @@ -159,9 +159,9 @@ impl Automerge { pub fn push( &mut self, - obj: String, + obj: JsValue, value: JsValue, - datatype: Option, + datatype: JsValue, ) -> Result, JsValue> { let obj = self.import(obj)?; let value = self.import_value(value, datatype)?; @@ -172,10 +172,10 @@ impl Automerge { pub fn insert( &mut self, - obj: String, + obj: JsValue, index: f64, value: JsValue, - datatype: Option, + datatype: JsValue, ) -> Result, JsValue> { let obj = self.import(obj)?; let index = index as f64; @@ -186,22 +186,22 @@ impl Automerge { pub fn set( &mut self, - obj: String, + obj: JsValue, prop: JsValue, value: JsValue, - datatype: Option, - ) -> Result, JsValue> { + datatype: JsValue, + ) -> Result { let obj = self.import(obj)?; let prop = self.import_prop(prop)?; let value = self.import_value(value, datatype)?; let opid = self.0.set(&obj, prop, value)?; - Ok(opid.map(|id| id.to_string())) + Ok(opid.map(|id| id.to_string()).into()) } - pub fn make(&mut self, obj: String, prop: JsValue, value: JsValue) -> Result { + pub fn make(&mut self, obj: JsValue, prop: JsValue, value: JsValue) -> Result { let obj = self.import(obj)?; let prop = self.import_prop(prop)?; - let value = self.import_value(value, None)?; + let value = self.import_value(value, JsValue::null())?; if value.is_object() { let opid = self.0.set(&obj, prop, value)?; Ok(opid.unwrap().to_string()) @@ -210,7 +210,7 @@ impl Automerge { } } - pub fn inc(&mut self, obj: String, prop: JsValue, value: JsValue) -> Result<(), JsValue> { + pub fn inc(&mut self, obj: JsValue, prop: JsValue, value: JsValue) -> Result<(), JsValue> { let obj = self.import(obj)?; let prop = self.import_prop(prop)?; let value: f64 = value @@ -222,7 +222,7 @@ impl Automerge { pub fn value( &mut self, - obj: String, + obj: JsValue, prop: JsValue, heads: Option, ) -> Result, JsValue> { @@ -256,7 +256,7 @@ impl Automerge { pub fn values( &mut self, - obj: String, + obj: JsValue, arg: JsValue, heads: Option, ) -> Result { @@ -291,7 +291,7 @@ impl Automerge { Ok(result) } - pub fn length(&mut self, obj: String, heads: Option) -> Result { + pub fn length(&mut self, obj: JsValue, heads: Option) -> Result { let obj = self.import(obj)?; if let Some(heads) = get_heads(heads) { Ok(self.0.length_at(&obj, &heads) as f64) @@ -300,7 +300,7 @@ impl Automerge { } } - pub fn del(&mut self, obj: String, prop: JsValue) -> Result<(), JsValue> { + pub fn del(&mut self, obj: JsValue, prop: JsValue) -> Result<(), JsValue> { let obj = self.import(obj)?; let prop = to_prop(prop)?; self.0.del(&obj, prop).map_err(to_js_err)?; @@ -423,8 +423,12 @@ impl Automerge { map_to_js(&self.0, &ROOT) } - fn import(&self, id: String) -> Result { - self.0.import(&id).map_err(to_js_err) + fn import(&self, id: JsValue) -> Result { + if let Some(s) = id.as_string() { + Ok(self.0.import(&s)?) + } else { + Err(to_js_err("invalid objid")) + } } fn import_prop(&mut self, prop: JsValue) -> Result { @@ -478,7 +482,8 @@ impl Automerge { } } - fn import_value(&mut self, value: JsValue, datatype: Option) -> Result { + fn import_value(&mut self, value: JsValue, datatype: JsValue) -> Result { + let datatype = datatype.as_string(); match self.import_scalar(&value, &datatype) { Some(val) => Ok(val.into()), None => { From 735a4ab84c5244340378fae642f181913038a56d Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Wed, 19 Jan 2022 14:31:31 +0000 Subject: [PATCH 080/730] Add explicit transaction API This removes the requirement for `&mut self`s on some of the immutable methods on `Automerge` which can be quite inconvenient. I've reimplemented the main functions on `Automerge` that manipulate state to create a transaction for their op for ease of use but not performance. I've updated the edit trace to run in a single transaction, like on a page load. Wasm API still needs working on at the moment to expose this properly. --- automerge/src/automerge.rs | 521 ++++++++++------------------------- automerge/src/change.rs | 2 +- automerge/src/lib.rs | 2 + automerge/src/sync.rs | 2 - automerge/src/transaction.rs | 321 +++++++++++++++++++++ edit-trace/src/main.rs | 6 +- 6 files changed, 477 insertions(+), 377 deletions(-) create mode 100644 automerge/src/transaction.rs diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index c3918354..bfb9f21f 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -1,6 +1,9 @@ -use crate::change::{encode_document, export_change}; +use std::collections::{HashMap, HashSet, VecDeque}; + +use crate::change::encode_document; use crate::exid::ExId; use crate::op_set::OpSet; +use crate::transaction::Transaction; use crate::types::{ ActorId, ChangeHash, Clock, ElemId, Export, Exportable, Key, ObjId, Op, OpId, OpType, Patch, ScalarValue, Value, @@ -8,21 +11,18 @@ use crate::types::{ use crate::{legacy, query, types, ObjType}; use crate::{AutomergeError, Change, Prop}; use serde::Serialize; -use std::collections::{HashMap, HashSet, VecDeque}; -use unicode_segmentation::UnicodeSegmentation; #[derive(Debug, Clone)] pub struct Automerge { queue: Vec, - history: Vec, + pub(crate) history: Vec, history_index: HashMap, states: HashMap>, deps: HashSet, saved: Vec, - ops: OpSet, + pub(crate) ops: OpSet, actor: Option, max_op: u64, - transaction: Option, } impl Automerge { @@ -37,12 +37,10 @@ impl Automerge { saved: Default::default(), actor: None, max_op: 0, - transaction: None, } } pub fn set_actor(&mut self, actor: ActorId) { - self.ensure_transaction_closed(); self.actor = Some(self.ops.m.actors.cache(actor)) } @@ -84,117 +82,152 @@ impl Automerge { saved: Default::default(), actor: None, max_op: 0, - transaction: None, }; am.actor = Some(am.ops.m.actors.cache(actor)); am } - pub fn pending_ops(&self) -> u64 { - self.transaction - .as_ref() - .map(|t| t.operations.len() as u64) - .unwrap_or(0) - } + pub fn tx(&mut self) -> Transaction { + let actor = self.get_actor_index(); - fn tx(&mut self) -> &mut Transaction { - if self.transaction.is_none() { - let actor = self.get_actor_index(); - - let seq = self.states.entry(actor).or_default().len() as u64 + 1; - let mut deps = self.get_heads(); - if seq > 1 { - let last_hash = self.get_hash(actor, seq - 1).unwrap(); - if !deps.contains(&last_hash) { - deps.push(last_hash); - } + let seq = self.states.entry(actor).or_default().len() as u64 + 1; + let mut deps = self.get_heads(); + if seq > 1 { + let last_hash = self.get_hash(actor, seq - 1).unwrap(); + if !deps.contains(&last_hash) { + deps.push(last_hash); } - - self.transaction = Some(Transaction { - actor, - seq, - start_op: self.max_op + 1, - time: 0, - message: None, - extra_bytes: Default::default(), - hash: None, - operations: vec![], - deps, - }); } - self.transaction.as_mut().unwrap() + Transaction { + actor, + seq, + start_op: self.max_op + 1, + time: 0, + message: None, + extra_bytes: Default::default(), + hash: None, + operations: vec![], + deps, + doc: self, + } } pub fn fork(&mut self) -> Self { - self.ensure_transaction_closed(); let mut f = self.clone(); f.actor = None; f } - pub fn commit(&mut self, message: Option, time: Option) -> Vec { - let tx = self.tx(); - - if message.is_some() { - tx.message = message; - } - - if let Some(t) = time { - tx.time = t; - } - - tx.operations.len(); - - self.ensure_transaction_closed(); - - self.get_heads() - } - - pub fn ensure_transaction_closed(&mut self) { - if let Some(tx) = self.transaction.take() { - self.update_history(export_change(&tx, &self.ops.m.actors, &self.ops.m.props)); - } - } - - pub fn rollback(&mut self) -> usize { - if let Some(tx) = self.transaction.take() { - let num = tx.operations.len(); - // remove in reverse order so sets are removed before makes etc... - for op in tx.operations.iter().rev() { - for pred_id in &op.pred { - // FIXME - use query to make this fast - if let Some(p) = self.ops.iter().position(|o| o.id == *pred_id) { - self.ops.replace(op.obj, p, |o| o.remove_succ(op)); - } - } - if let Some(pos) = self.ops.iter().position(|o| o.id == op.id) { - self.ops.remove(op.obj, pos); - } + pub fn set, V: Into>( + &mut self, + obj: &ExId, + prop: P, + value: V, + ) -> Result, AutomergeError> { + let mut tx = self.tx(); + match tx.set(obj, prop, value) { + Ok(opt) => { + tx.commit(None, None); + Ok(opt) + } + Err(e) => { + tx.rollback(); + Err(e) } - num - } else { - 0 } } - fn next_id(&mut self) -> OpId { - let tx = self.tx(); - OpId(tx.start_op + tx.operations.len() as u64, tx.actor) + pub fn insert>( + &mut self, + obj: &ExId, + index: usize, + value: V, + ) -> Result, AutomergeError> { + let mut tx = self.tx(); + match tx.insert(obj, index, value) { + Ok(opt) => { + tx.commit(None, None); + Ok(opt) + } + Err(e) => { + tx.rollback(); + Err(e) + } + } } - fn insert_local_op(&mut self, op: Op, pos: usize, succ_pos: &[usize]) { - for succ in succ_pos { - self.ops.replace(op.obj, *succ, |old_op| { - old_op.add_succ(&op); - }); + pub fn del>(&mut self, obj: &ExId, prop: P) -> Result<(), AutomergeError> { + let mut tx = self.tx(); + match tx.del(obj, prop) { + Ok(opt) => { + tx.commit(None, None); + Ok(opt) + } + Err(e) => { + tx.rollback(); + Err(e) + } } + } - if !op.is_del() { - self.ops.insert(pos, op.clone()); + pub fn inc>( + &mut self, + obj: &ExId, + prop: P, + value: i64, + ) -> Result<(), AutomergeError> { + let mut tx = self.tx(); + match tx.inc(obj, prop, value) { + Ok(opt) => { + tx.commit(None, None); + Ok(opt) + } + Err(e) => { + tx.rollback(); + Err(e) + } } + } - self.tx().operations.push(op); + pub fn splice( + &mut self, + obj: &ExId, + pos: usize, + del: usize, + vals: Vec, + ) -> Result, AutomergeError> { + let mut tx = self.tx(); + match tx.splice(obj, pos, del, vals) { + Ok(opt) => { + tx.commit(None, None); + Ok(opt) + } + Err(e) => { + tx.rollback(); + Err(e) + } + } + } + + pub fn splice_text( + &mut self, + obj: &ExId, + pos: usize, + del: usize, + text: &str, + ) -> Result, AutomergeError> { + let mut tx = self.tx(); + match tx.splice_text(obj, pos, del, text) { + Ok(opt) => { + tx.commit(None, None); + Ok(opt) + } + Err(e) => { + tx.rollback(); + Err(e) + } + } } fn insert_op(&mut self, op: Op) -> Op { @@ -263,40 +296,7 @@ impl Automerge { } } - // set(obj, prop, value) - value can be scalar or objtype - // del(obj, prop) - // inc(obj, prop, value) - // insert(obj, index, value) - - /// Set the value of property `P` to value `V` in object `obj`. - /// - /// # Returns - /// - /// The opid of the operation which was created, or None if this operation doesn't change the - /// document or create a new object. - /// - /// # Errors - /// - /// This will return an error if - /// - The object does not exist - /// - The key is the wrong type for the object - /// - The key does not exist in the object - pub fn set, V: Into>( - &mut self, - obj: &ExId, - prop: P, - value: V, - ) -> Result, AutomergeError> { - let obj = self.exid_to_obj(obj)?; - let value = value.into(); - if let Some(id) = self.local_op(obj, prop.into(), value.into())? { - Ok(Some(self.id_to_exid(id))) - } else { - Ok(None) - } - } - - fn exid_to_obj(&self, id: &ExId) -> Result { + pub(crate) fn exid_to_obj(&self, id: &ExId) -> Result { match id { ExId::Root => Ok(ObjId::root()), ExId::Id(ctr, actor, idx) => { @@ -318,116 +318,10 @@ impl Automerge { } } - fn id_to_exid(&self, id: OpId) -> ExId { + pub(crate) fn id_to_exid(&self, id: OpId) -> ExId { ExId::Id(id.0, self.ops.m.actors.cache[id.1].clone(), id.1) } - pub fn insert>( - &mut self, - obj: &ExId, - index: usize, - value: V, - ) -> Result, AutomergeError> { - let obj = self.exid_to_obj(obj)?; - let value = value.into(); - if let Some(id) = self.do_insert(obj, index, value.into())? { - Ok(Some(self.id_to_exid(id))) - } else { - Ok(None) - } - } - - fn do_insert( - &mut self, - obj: ObjId, - index: usize, - action: OpType, - ) -> Result, AutomergeError> { - let id = self.next_id(); - - let query = self.ops.search(obj, query::InsertNth::new(index)); - - let key = query.key()?; - let is_make = matches!(&action, OpType::Make(_)); - - let op = Op { - change: self.history.len(), - id, - action, - obj, - key, - succ: Default::default(), - pred: Default::default(), - insert: true, - }; - - self.ops.insert(query.pos(), op.clone()); - self.tx().operations.push(op); - - if is_make { - Ok(Some(id)) - } else { - Ok(None) - } - } - - pub fn inc>( - &mut self, - obj: &ExId, - prop: P, - value: i64, - ) -> Result<(), AutomergeError> { - let obj = self.exid_to_obj(obj)?; - self.local_op(obj, prop.into(), OpType::Inc(value))?; - Ok(()) - } - - pub fn del>(&mut self, obj: &ExId, prop: P) -> Result<(), AutomergeError> { - let obj = self.exid_to_obj(obj)?; - self.local_op(obj, prop.into(), OpType::Del)?; - Ok(()) - } - - /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert - /// the new elements - pub fn splice( - &mut self, - obj: &ExId, - mut pos: usize, - del: usize, - vals: Vec, - ) -> Result, AutomergeError> { - let obj = self.exid_to_obj(obj)?; - for _ in 0..del { - // del() - self.local_op(obj, pos.into(), OpType::Del)?; - } - let mut results = Vec::new(); - for v in vals { - // insert() - let id = self.do_insert(obj, pos, v.into())?; - if let Some(id) = id { - results.push(self.id_to_exid(id)); - } - pos += 1; - } - Ok(results) - } - - pub fn splice_text( - &mut self, - obj: &ExId, - pos: usize, - del: usize, - text: &str, - ) -> Result, AutomergeError> { - let mut vals = vec![]; - for c in text.to_owned().graphemes(true) { - vals.push(c.into()); - } - self.splice(obj, pos, del, vals) - } - pub fn text(&self, obj: &ExId) -> Result { let obj = self.exid_to_obj(obj)?; let query = self.ops.search(obj, query::ListVals::new()); @@ -564,7 +458,6 @@ impl Automerge { } pub fn apply_changes(&mut self, changes: &[Change]) -> Result { - self.ensure_transaction_closed(); for c in changes { if !self.history_index.contains_key(&c.hash) { if self.duplicate_seq(c) { @@ -587,7 +480,6 @@ impl Automerge { } pub fn apply_change(&mut self, change: Change) { - self.ensure_transaction_closed(); let ops = self.import_ops(&change, self.history.len()); self.update_history(change); for op in ops { @@ -595,98 +487,6 @@ impl Automerge { } } - fn local_op( - &mut self, - obj: ObjId, - prop: Prop, - action: OpType, - ) -> Result, AutomergeError> { - match prop { - Prop::Map(s) => self.local_map_op(obj, s, action), - Prop::Seq(n) => self.local_list_op(obj, n, action), - } - } - - fn local_map_op( - &mut self, - obj: ObjId, - prop: String, - action: OpType, - ) -> Result, AutomergeError> { - if prop.is_empty() { - return Err(AutomergeError::EmptyStringKey); - } - - let id = self.next_id(); - let prop = self.ops.m.props.cache(prop); - let query = self.ops.search(obj, query::Prop::new(prop)); - - if query.ops.len() == 1 && query.ops[0].is_noop(&action) { - return Ok(None); - } - - let is_make = matches!(&action, OpType::Make(_)); - - let pred = query.ops.iter().map(|op| op.id).collect(); - - let op = Op { - change: self.history.len(), - id, - action, - obj, - key: Key::Map(prop), - succ: Default::default(), - pred, - insert: false, - }; - - self.insert_local_op(op, query.pos, &query.ops_pos); - - if is_make { - Ok(Some(id)) - } else { - Ok(None) - } - } - - fn local_list_op( - &mut self, - obj: ObjId, - index: usize, - action: OpType, - ) -> Result, AutomergeError> { - let query = self.ops.search(obj, query::Nth::new(index)); - - let id = self.next_id(); - let pred = query.ops.iter().map(|op| op.id).collect(); - let key = query.key()?; - - if query.ops.len() == 1 && query.ops[0].is_noop(&action) { - return Ok(None); - } - - let is_make = matches!(&action, OpType::Make(_)); - - let op = Op { - change: self.history.len(), - id, - action, - obj, - key, - succ: Default::default(), - pred, - insert: false, - }; - - self.insert_local_op(op, query.pos, &query.ops_pos); - - if is_make { - Ok(Some(id)) - } else { - Ok(None) - } - } - fn is_causally_ready(&self, change: &Change) -> bool { change .deps @@ -755,7 +555,6 @@ impl Automerge { } pub fn save(&mut self) -> Result, AutomergeError> { - self.ensure_transaction_closed(); // TODO - would be nice if I could pass an iterator instead of a collection here let c: Vec<_> = self.history.iter().map(|c| c.decode()).collect(); let ops: Vec<_> = self.ops.iter().cloned().collect(); @@ -774,7 +573,6 @@ impl Automerge { // should this return an empty vec instead of None? pub fn save_incremental(&mut self) -> Vec { - self.ensure_transaction_closed(); let changes = self._get_changes(self.saved.as_slice()); let mut bytes = vec![]; for c in changes { @@ -847,8 +645,7 @@ impl Automerge { } } - pub fn get_missing_deps(&mut self, heads: &[ChangeHash]) -> Vec { - self.ensure_transaction_closed(); + pub fn get_missing_deps(&self, heads: &[ChangeHash]) -> Vec { self._get_missing_deps(heads) } @@ -932,8 +729,7 @@ impl Automerge { .collect() } - pub fn get_last_local_change(&mut self) -> Option<&Change> { - self.ensure_transaction_closed(); + pub fn get_last_local_change(&self) -> Option<&Change> { if let Some(actor) = &self.actor { let actor = &self.ops.m.actors[*actor]; return self.history.iter().rev().find(|c| c.actor_id() == actor); @@ -941,8 +737,7 @@ impl Automerge { None } - pub fn get_changes(&mut self, have_deps: &[ChangeHash]) -> Vec<&Change> { - self.ensure_transaction_closed(); + pub fn get_changes(&self, have_deps: &[ChangeHash]) -> Vec<&Change> { self._get_changes(have_deps) } @@ -974,8 +769,7 @@ impl Automerge { clock } - pub fn get_change_by_hash(&mut self, hash: &ChangeHash) -> Option<&Change> { - self.ensure_transaction_closed(); + pub fn get_change_by_hash(&self, hash: &ChangeHash) -> Option<&Change> { self._get_change_by_hash(hash) } @@ -985,9 +779,7 @@ impl Automerge { .and_then(|index| self.history.get(*index)) } - pub fn get_changes_added<'a>(&mut self, other: &'a mut Self) -> Vec<&'a Change> { - self.ensure_transaction_closed(); - other.ensure_transaction_closed(); + pub fn get_changes_added<'a>(&self, other: &'a Self) -> Vec<&'a Change> { self._get_changes_added(other) } @@ -1015,8 +807,7 @@ impl Automerge { .collect() } - pub fn get_heads(&mut self) -> Vec { - self.ensure_transaction_closed(); + pub fn get_heads(&self) -> Vec { self._get_heads() } @@ -1026,7 +817,7 @@ impl Automerge { deps } - fn get_hash(&mut self, actor: usize, seq: u64) -> Result { + fn get_hash(&self, actor: usize, seq: u64) -> Result { self.states .get(&actor) .and_then(|v| v.get(seq as usize - 1)) @@ -1035,7 +826,7 @@ impl Automerge { .ok_or(AutomergeError::InvalidSeq(seq)) } - fn update_history(&mut self, change: Change) -> usize { + pub(crate) fn update_history(&mut self, change: Change) -> usize { self.max_op = std::cmp::max(self.max_op, change.start_op + change.len() as u64 - 1); self.update_deps(&change); @@ -1136,19 +927,6 @@ impl Automerge { } } -#[derive(Debug, Clone)] -pub(crate) struct Transaction { - pub actor: usize, - pub seq: u64, - pub start_op: u64, - pub time: i64, - pub message: Option, - pub extra_bytes: Vec, - pub hash: Option, - pub deps: Vec, - pub operations: Vec, -} - impl Default for Automerge { fn default() -> Self { Self::new() @@ -1177,7 +955,6 @@ mod tests { let mut doc = Automerge::new(); doc.set_actor(ActorId::random()); doc.set(&ROOT, "hello", "world")?; - assert!(doc.pending_ops() == 1); doc.value(&ROOT, "hello")?; Ok(()) } @@ -1280,11 +1057,11 @@ mod tests { fn test_save_text() -> Result<(), AutomergeError> { let mut doc = Automerge::new(); let text = doc.set(&ROOT, "text", Value::text())?.unwrap(); - let heads1 = doc.commit(None, None); + let heads1 = doc.get_heads(); doc.splice_text(&text, 0, 0, "hello world")?; - let heads2 = doc.commit(None, None); + let heads2 = doc.get_heads(); doc.splice_text(&text, 6, 0, "big bad ")?; - let heads3 = doc.commit(None, None); + let heads3 = doc.get_heads(); assert!(&doc.text(&text)? == "hello big bad world"); assert!(&doc.text_at(&text, &heads1)?.is_empty()); @@ -1299,19 +1076,19 @@ mod tests { let mut doc = Automerge::new(); doc.set_actor("aaaa".try_into().unwrap()); doc.set(&ROOT, "prop1", "val1")?; - doc.commit(None, None); + doc.get_heads(); let heads1 = doc.get_heads(); doc.set(&ROOT, "prop1", "val2")?; - doc.commit(None, None); + doc.get_heads(); let heads2 = doc.get_heads(); doc.set(&ROOT, "prop2", "val3")?; - doc.commit(None, None); + doc.get_heads(); let heads3 = doc.get_heads(); doc.del(&ROOT, "prop1")?; - doc.commit(None, None); + doc.get_heads(); let heads4 = doc.get_heads(); doc.set(&ROOT, "prop3", "val4")?; - doc.commit(None, None); + doc.get_heads(); let heads5 = doc.get_heads(); assert!(doc.keys_at(&ROOT, &heads1) == vec!["prop1".to_owned()]); assert_eq!(doc.length_at(&ROOT, &heads1), 1); @@ -1358,24 +1135,24 @@ mod tests { doc.set_actor("aaaa".try_into().unwrap()); let list = doc.set(&ROOT, "list", Value::list())?.unwrap(); - let heads1 = doc.commit(None, None); + let heads1 = doc.get_heads(); doc.insert(&list, 0, Value::int(10))?; - let heads2 = doc.commit(None, None); + let heads2 = doc.get_heads(); doc.set(&list, 0, Value::int(20))?; doc.insert(&list, 0, Value::int(30))?; - let heads3 = doc.commit(None, None); + let heads3 = doc.get_heads(); doc.set(&list, 1, Value::int(40))?; doc.insert(&list, 1, Value::int(50))?; - let heads4 = doc.commit(None, None); + let heads4 = doc.get_heads(); doc.del(&list, 2)?; - let heads5 = doc.commit(None, None); + let heads5 = doc.get_heads(); doc.del(&list, 0)?; - let heads6 = doc.commit(None, None); + let heads6 = doc.get_heads(); assert!(doc.length_at(&list, &heads1) == 0); assert!(doc.value_at(&list, 0, &heads1)?.is_none()); diff --git a/automerge/src/change.rs b/automerge/src/change.rs index 42576e7f..c1208609 100644 --- a/automerge/src/change.rs +++ b/automerge/src/change.rs @@ -1,4 +1,3 @@ -use crate::automerge::Transaction; use crate::columnar::{ ChangeEncoder, ChangeIterator, ColumnEncoder, DepsIterator, DocChange, DocOp, DocOpEncoder, DocOpIterator, OperationIterator, COLUMN_TYPE_DEFLATE, @@ -9,6 +8,7 @@ use crate::encoding::{Encodable, DEFLATE_MIN_SIZE}; use crate::error::AutomergeError; use crate::indexed_cache::IndexedCache; use crate::legacy as amp; +use crate::transaction::Transaction; use crate::types; use crate::types::{ActorId, ElemId, Key, ObjId, Op, OpId, OpType}; use core::ops::Range; diff --git a/automerge/src/lib.rs b/automerge/src/lib.rs index 27de9c39..652f85e6 100644 --- a/automerge/src/lib.rs +++ b/automerge/src/lib.rs @@ -38,6 +38,7 @@ mod op_set; mod op_tree; mod query; mod sync; +mod transaction; mod types; mod value; #[cfg(feature = "optree-visualisation")] @@ -49,6 +50,7 @@ pub use error::AutomergeError; pub use exid::ExId as ObjId; pub use legacy::Change as ExpandedChange; pub use sync::{BloomFilter, SyncHave, SyncMessage, SyncState}; +pub use transaction::Transaction; pub use types::{ActorId, ChangeHash, ObjType, OpType, Prop}; pub use value::{ScalarValue, Value}; diff --git a/automerge/src/sync.rs b/automerge/src/sync.rs index a8b0c3eb..69a6ad1c 100644 --- a/automerge/src/sync.rs +++ b/automerge/src/sync.rs @@ -23,7 +23,6 @@ const MESSAGE_TYPE_SYNC: u8 = 0x42; // first byte of a sync message, for identif impl Automerge { pub fn generate_sync_message(&mut self, sync_state: &mut SyncState) -> Option { - self.ensure_transaction_closed(); self._generate_sync_message(sync_state) } @@ -105,7 +104,6 @@ impl Automerge { sync_state: &mut SyncState, message: SyncMessage, ) -> Result, AutomergeError> { - self.ensure_transaction_closed(); self._receive_sync_message(sync_state, message) } diff --git a/automerge/src/transaction.rs b/automerge/src/transaction.rs new file mode 100644 index 00000000..6c010708 --- /dev/null +++ b/automerge/src/transaction.rs @@ -0,0 +1,321 @@ +use std::ops::Deref; + +use crate::exid::ExId; +use crate::query; +use crate::types::{Key, ObjId, OpId}; +use crate::{change::export_change, types::Op, Automerge, ChangeHash, Prop, Value}; +use crate::{AutomergeError, OpType}; +use unicode_segmentation::UnicodeSegmentation; + +#[derive(Debug)] +pub struct Transaction<'a> { + pub(crate) actor: usize, + pub(crate) seq: u64, + pub(crate) start_op: u64, + pub(crate) time: i64, + pub(crate) message: Option, + pub(crate) extra_bytes: Vec, + pub(crate) hash: Option, + pub(crate) deps: Vec, + pub(crate) operations: Vec, + pub(crate) doc: &'a mut Automerge, +} + +impl<'a> Transaction<'a> { + pub fn pending_ops(&self) -> usize { + self.operations.len() + } + + /// Commit the operations performed in this transaction, returning the hashes corresponding to + /// the new heads. + pub fn commit(mut self, message: Option, time: Option) -> Vec { + if message.is_some() { + self.message = message; + } + + if let Some(t) = time { + self.time = t; + } + + self.operations.len(); + + self.doc.update_history(export_change( + &self, + &self.doc.ops.m.actors, + &self.doc.ops.m.props, + )); + + self.doc.get_heads() + } + + /// Undo the operations added in this transaction, returning the number of cancelled + /// operations. + pub fn rollback(self) -> usize { + let num = self.operations.len(); + for op in &self.operations { + for pred_id in &op.pred { + // FIXME - use query to make this fast + if let Some(p) = self.doc.ops.iter().position(|o| o.id == *pred_id) { + self.doc.ops.replace(op.obj, p, |o| o.remove_succ(op)); + } + } + if let Some(pos) = self.doc.ops.iter().position(|o| o.id == op.id) { + self.doc.ops.remove(op.obj, pos); + } + } + num + } + + /// Set the value of property `P` to value `V` in object `obj`. + /// + /// # Returns + /// + /// The opid of the operation which was created, or None if this operation doesn't change the + /// document + /// + /// # Errors + /// + /// This will return an error if + /// - The object does not exist + /// - The key is the wrong type for the object + /// - The key does not exist in the object + pub fn set, V: Into>( + &mut self, + obj: &ExId, + prop: P, + value: V, + ) -> Result, AutomergeError> { + let obj = self.doc.exid_to_obj(obj)?; + let value = value.into(); + if let Some(id) = self.local_op(obj, prop.into(), value.into())? { + Ok(Some(self.doc.id_to_exid(id))) + } else { + Ok(None) + } + } + + fn next_id(&mut self) -> OpId { + OpId(self.start_op + self.operations.len() as u64, self.actor) + } + + fn insert_local_op(&mut self, op: Op, pos: usize, succ_pos: &[usize]) { + for succ in succ_pos { + self.doc.ops.replace(op.obj, *succ, |old_op| { + old_op.add_succ(&op); + }); + } + + if !op.is_del() { + self.doc.ops.insert(pos, op.clone()); + } + + self.operations.push(op); + } + + pub fn insert>( + &mut self, + obj: &ExId, + index: usize, + value: V, + ) -> Result, AutomergeError> { + let obj = self.doc.exid_to_obj(obj)?; + if let Some(id) = self.do_insert(obj, index, value)? { + Ok(Some(self.doc.id_to_exid(id))) + } else { + Ok(None) + } + } + + fn do_insert>( + &mut self, + obj: ObjId, + index: usize, + value: V, + ) -> Result, AutomergeError> { + let id = self.next_id(); + + let query = self.doc.ops.search(obj, query::InsertNth::new(index)); + + let key = query.key()?; + let value = value.into(); + let action = value.into(); + let is_make = matches!(&action, OpType::Make(_)); + + let op = Op { + change: self.doc.history.len(), + id, + action, + obj, + key, + succ: Default::default(), + pred: Default::default(), + insert: true, + }; + + self.doc.ops.insert(query.pos(), op.clone()); + self.operations.push(op); + + if is_make { + Ok(Some(id)) + } else { + Ok(None) + } + } + + pub(crate) fn local_op( + &mut self, + obj: ObjId, + prop: Prop, + action: OpType, + ) -> Result, AutomergeError> { + match prop { + Prop::Map(s) => self.local_map_op(obj, s, action), + Prop::Seq(n) => self.local_list_op(obj, n, action), + } + } + + fn local_map_op( + &mut self, + obj: ObjId, + prop: String, + action: OpType, + ) -> Result, AutomergeError> { + if prop.is_empty() { + return Err(AutomergeError::EmptyStringKey); + } + + let id = self.next_id(); + let prop = self.doc.ops.m.props.cache(prop); + let query = self.doc.ops.search(obj, query::Prop::new(prop)); + + if query.ops.len() == 1 && query.ops[0].is_noop(&action) { + return Ok(None); + } + + let is_make = matches!(&action, OpType::Make(_)); + + let pred = query.ops.iter().map(|op| op.id).collect(); + + let op = Op { + change: self.doc.history.len(), + id, + action, + obj, + key: Key::Map(prop), + succ: Default::default(), + pred, + insert: false, + }; + + self.insert_local_op(op, query.pos, &query.ops_pos); + + if is_make { + Ok(Some(id)) + } else { + Ok(None) + } + } + + fn local_list_op( + &mut self, + obj: ObjId, + index: usize, + action: OpType, + ) -> Result, AutomergeError> { + let query = self.doc.ops.search(obj, query::Nth::new(index)); + + let id = self.next_id(); + let pred = query.ops.iter().map(|op| op.id).collect(); + let key = query.key()?; + + if query.ops.len() == 1 && query.ops[0].is_noop(&action) { + return Ok(None); + } + + let is_make = matches!(&action, OpType::Make(_)); + + let op = Op { + change: self.doc.history.len(), + id, + action, + obj, + key, + succ: Default::default(), + pred, + insert: false, + }; + + self.insert_local_op(op, query.pos, &query.ops_pos); + + if is_make { + Ok(Some(id)) + } else { + Ok(None) + } + } + + pub fn inc>( + &mut self, + obj: &ExId, + prop: P, + value: i64, + ) -> Result<(), AutomergeError> { + let obj = self.doc.exid_to_obj(obj)?; + self.local_op(obj, prop.into(), OpType::Inc(value))?; + Ok(()) + } + + pub fn del>(&mut self, obj: &ExId, prop: P) -> Result<(), AutomergeError> { + let obj = self.doc.exid_to_obj(obj)?; + self.local_op(obj, prop.into(), OpType::Del)?; + Ok(()) + } + + /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert + /// the new elements + pub fn splice( + &mut self, + obj: &ExId, + mut pos: usize, + del: usize, + vals: Vec, + ) -> Result, AutomergeError> { + let obj = self.doc.exid_to_obj(obj)?; + for _ in 0..del { + // del() + self.local_op(obj, pos.into(), OpType::Del)?; + } + let mut results = Vec::new(); + for v in vals { + // insert() + let id = self.do_insert(obj, pos, v.clone())?; + if let Some(id) = id { + results.push(self.doc.id_to_exid(id)); + } + pos += 1; + } + Ok(results) + } + + pub fn splice_text( + &mut self, + obj: &ExId, + pos: usize, + del: usize, + text: &str, + ) -> Result, AutomergeError> { + let mut vals = vec![]; + for c in text.to_owned().graphemes(true) { + vals.push(c.into()); + } + self.splice(obj, pos, del, vals) + } +} + +impl<'a> Deref for Transaction<'a> { + type Target = Automerge; + + fn deref(&self) -> &Self::Target { + self.doc + } +} diff --git a/edit-trace/src/main.rs b/edit-trace/src/main.rs index 6c54bcad..a2f233cb 100644 --- a/edit-trace/src/main.rs +++ b/edit-trace/src/main.rs @@ -19,13 +19,15 @@ fn main() -> Result<(), AutomergeError> { let mut doc = Automerge::new(); let now = Instant::now(); - let text = doc.set(&ROOT, "text", Value::text()).unwrap().unwrap(); + let mut tx = doc.tx(); + let text = tx.set(&ROOT, "text", Value::text()).unwrap().unwrap(); for (i, (pos, del, vals)) in commands.into_iter().enumerate() { if i % 1000 == 0 { println!("Processed {} edits in {} ms", i, now.elapsed().as_millis()); } - doc.splice(&text, pos, del, vals)?; + tx.splice(&text, pos, del, vals)?; } + tx.commit(None, None); let _ = doc.save(); println!("Done in {} ms", now.elapsed().as_millis()); Ok(()) From d7da7267d9670cbdbfb1ebbac96f88ee2dddeabc Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Wed, 19 Jan 2022 15:27:15 +0000 Subject: [PATCH 081/730] Initial wasm fix --- automerge-wasm/src/lib.rs | 7 +++++++ automerge-wasm/src/transaction.rs | 29 +++++++++++++++++++++++++++++ 2 files changed, 36 insertions(+) create mode 100644 automerge-wasm/src/transaction.rs diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 44774ef6..33a654a0 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -8,6 +8,7 @@ use wasm_bindgen::JsCast; mod interop; mod sync; +mod transaction; mod value; use interop::{ @@ -16,6 +17,8 @@ use interop::{ use sync::SyncState; use value::{datatype, ScalarValue}; +pub use transaction::Transaction; + #[allow(unused_macros)] macro_rules! log { ( $( $t:tt )* ) => { @@ -94,6 +97,10 @@ impl Automerge { self.0.rollback() as f64 } + pub fn tx(&mut self) -> Transaction { + Transaction(self.0.tx()) + } + pub fn keys(&mut self, obj: String, heads: Option) -> Result { let obj = self.import(obj)?; let result = if let Some(heads) = get_heads(heads) { diff --git a/automerge-wasm/src/transaction.rs b/automerge-wasm/src/transaction.rs new file mode 100644 index 00000000..237e3370 --- /dev/null +++ b/automerge-wasm/src/transaction.rs @@ -0,0 +1,29 @@ +use js_sys::Array; +use wasm_bindgen::prelude::*; + +#[wasm_bindgen] +#[derive(Debug)] +pub struct Transaction(pub(crate) automerge::Transaction<'static>); + +#[wasm_bindgen] +impl Transaction { + #[wasm_bindgen(js_name = pendingOps)] + pub fn pending_ops(&self) -> JsValue { + (self.0.pending_ops() as u32).into() + } + + pub fn commit(&mut self, message: JsValue, time: JsValue) -> Array { + let message = message.as_string(); + let time = time.as_f64().map(|v| v as i64); + let heads = self.0.commit(message, time); + let heads: Array = heads + .iter() + .map(|h| JsValue::from_str(&hex::encode(&h.0))) + .collect(); + heads + } + + pub fn rollback(&mut self) -> JsValue { + self.0.rollback().into() + } +} From 7cbd6effb799ead9c644b200c761ebaae1fa45d4 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Wed, 16 Feb 2022 13:35:55 +0000 Subject: [PATCH 082/730] Add autotxn document for wasm and cross-language use These don't have the ability to preserve the semantics of the reference based transaction model and so can make use of the nicer auto transaction model. --- automerge-wasm/src/interop.rs | 4 +- automerge-wasm/src/lib.rs | 13 +- automerge-wasm/src/transaction.rs | 29 -- automerge/src/automerge.rs | 24 +- automerge/src/autotxn.rs | 401 ++++++++++++++++++++++ automerge/src/change.rs | 4 +- automerge/src/lib.rs | 2 + automerge/src/transaction.rs | 533 ++++++++++++++++++------------ edit-trace/benches/main.rs | 55 ++- 9 files changed, 788 insertions(+), 277 deletions(-) delete mode 100644 automerge-wasm/src/transaction.rs create mode 100644 automerge/src/autotxn.rs diff --git a/automerge-wasm/src/interop.rs b/automerge-wasm/src/interop.rs index 67d0bb0f..839f958c 100644 --- a/automerge-wasm/src/interop.rs +++ b/automerge-wasm/src/interop.rs @@ -288,7 +288,7 @@ pub(crate) fn get_heads(heads: Option) -> Option> { heads.ok() } -pub(crate) fn map_to_js(doc: &am::Automerge, obj: &ObjId) -> JsValue { +pub(crate) fn map_to_js(doc: &am::AutoTxn, obj: &ObjId) -> JsValue { let keys = doc.keys(obj); let map = Object::new(); for k in keys { @@ -311,7 +311,7 @@ pub(crate) fn map_to_js(doc: &am::Automerge, obj: &ObjId) -> JsValue { map.into() } -fn list_to_js(doc: &am::Automerge, obj: &ObjId) -> JsValue { +fn list_to_js(doc: &am::AutoTxn, obj: &ObjId) -> JsValue { let len = doc.length(obj); let array = Array::new(); for i in 0..len { diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 33a654a0..2d6c873f 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -8,7 +8,6 @@ use wasm_bindgen::JsCast; mod interop; mod sync; -mod transaction; mod value; use interop::{ @@ -17,8 +16,6 @@ use interop::{ use sync::SyncState; use value::{datatype, ScalarValue}; -pub use transaction::Transaction; - #[allow(unused_macros)] macro_rules! log { ( $( $t:tt )* ) => { @@ -32,12 +29,12 @@ static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT; #[wasm_bindgen] #[derive(Debug)] -pub struct Automerge(automerge::Automerge); +pub struct Automerge(automerge::AutoTxn); #[wasm_bindgen] impl Automerge { pub fn new(actor: Option) -> Result { - let mut automerge = automerge::Automerge::new(); + let mut automerge = automerge::AutoTxn::new(); if let Some(a) = actor { let a = automerge::ActorId::from(hex::decode(a).map_err(to_js_err)?.to_vec()); automerge.set_actor(a); @@ -97,10 +94,6 @@ impl Automerge { self.0.rollback() as f64 } - pub fn tx(&mut self) -> Transaction { - Transaction(self.0.tx()) - } - pub fn keys(&mut self, obj: String, heads: Option) -> Result { let obj = self.import(obj)?; let result = if let Some(heads) = get_heads(heads) { @@ -512,7 +505,7 @@ pub fn init(actor: Option) -> Result { #[wasm_bindgen(js_name = loadDoc)] pub fn load(data: Uint8Array, actor: Option) -> Result { let data = data.to_vec(); - let mut automerge = am::Automerge::load(&data).map_err(to_js_err)?; + let mut automerge = am::AutoTxn::load(&data).map_err(to_js_err)?; if let Some(s) = actor { let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec()); automerge.set_actor(actor) diff --git a/automerge-wasm/src/transaction.rs b/automerge-wasm/src/transaction.rs deleted file mode 100644 index 237e3370..00000000 --- a/automerge-wasm/src/transaction.rs +++ /dev/null @@ -1,29 +0,0 @@ -use js_sys::Array; -use wasm_bindgen::prelude::*; - -#[wasm_bindgen] -#[derive(Debug)] -pub struct Transaction(pub(crate) automerge::Transaction<'static>); - -#[wasm_bindgen] -impl Transaction { - #[wasm_bindgen(js_name = pendingOps)] - pub fn pending_ops(&self) -> JsValue { - (self.0.pending_ops() as u32).into() - } - - pub fn commit(&mut self, message: JsValue, time: JsValue) -> Array { - let message = message.as_string(); - let time = time.as_f64().map(|v| v as i64); - let heads = self.0.commit(message, time); - let heads: Array = heads - .iter() - .map(|h| JsValue::from_str(&hex::encode(&h.0))) - .collect(); - heads - } - - pub fn rollback(&mut self) -> JsValue { - self.0.rollback().into() - } -} diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index bfb9f21f..0d414329 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -3,7 +3,7 @@ use std::collections::{HashMap, HashSet, VecDeque}; use crate::change::encode_document; use crate::exid::ExId; use crate::op_set::OpSet; -use crate::transaction::Transaction; +use crate::transaction::{Transaction, TransactionInner}; use crate::types::{ ActorId, ChangeHash, Clock, ElemId, Export, Exportable, Key, ObjId, Op, OpId, OpType, Patch, ScalarValue, Value, @@ -12,17 +12,18 @@ use crate::{legacy, query, types, ObjType}; use crate::{AutomergeError, Change, Prop}; use serde::Serialize; +/// An automerge document. #[derive(Debug, Clone)] pub struct Automerge { - queue: Vec, + pub(crate) queue: Vec, pub(crate) history: Vec, - history_index: HashMap, - states: HashMap>, - deps: HashSet, - saved: Vec, + pub(crate) history_index: HashMap, + pub(crate) states: HashMap>, + pub(crate) deps: HashSet, + pub(crate) saved: Vec, pub(crate) ops: OpSet, - actor: Option, - max_op: u64, + pub(crate) actor: Option, + pub(crate) max_op: u64, } impl Automerge { @@ -62,7 +63,7 @@ impl Automerge { self.actor.map(|i| self.ops.m.actors[i].clone()) } - fn get_actor_index(&mut self) -> usize { + pub(crate) fn get_actor_index(&mut self) -> usize { if let Some(actor) = self.actor { actor } else { @@ -99,7 +100,7 @@ impl Automerge { } } - Transaction { + let tx_inner = TransactionInner { actor, seq, start_op: self.max_op + 1, @@ -109,6 +110,9 @@ impl Automerge { hash: None, operations: vec![], deps, + }; + Transaction { + inner: tx_inner, doc: self, } } diff --git a/automerge/src/autotxn.rs b/automerge/src/autotxn.rs new file mode 100644 index 00000000..ba02eafb --- /dev/null +++ b/automerge/src/autotxn.rs @@ -0,0 +1,401 @@ +use crate::exid::ExId; +use crate::types::Patch; +use crate::{ + change::export_change, transaction::TransactionInner, ActorId, Automerge, AutomergeError, + Change, ChangeHash, Prop, Value, +}; +use crate::{SyncMessage, SyncState}; + +/// An automerge document that automatically manages transactions. +#[derive(Debug, Clone)] +pub struct AutoTxn { + doc: Automerge, + transaction: Option, +} + +impl Default for AutoTxn { + fn default() -> Self { + Self::new() + } +} + +impl AutoTxn { + pub fn new() -> Self { + Self { + doc: Automerge::new(), + transaction: None, + } + } + + pub fn set_actor(&mut self, actor: ActorId) { + self.ensure_transaction_closed(); + self.doc.set_actor(actor) + } + + pub fn get_actor(&mut self) -> ActorId { + self.doc.get_actor() + } + + pub fn maybe_get_actor(&self) -> Option { + self.doc.maybe_get_actor() + } + + pub fn new_with_actor_id(actor: ActorId) -> Self { + Self { + doc: Automerge::new_with_actor_id(actor), + transaction: None, + } + } + + pub fn pending_ops(&self) -> usize { + self.transaction + .as_ref() + .map(|t| t.pending_ops()) + .unwrap_or(0) + } + + fn tx(&mut self) { + if self.transaction.is_none() { + let actor = self.doc.get_actor_index(); + + let seq = self.doc.states.entry(actor).or_default().len() as u64 + 1; + let mut deps = self.get_heads(); + if seq > 1 { + let last_hash = self.get_hash(actor, seq - 1).unwrap(); + if !deps.contains(&last_hash) { + deps.push(last_hash); + } + } + + self.transaction = Some(TransactionInner { + actor, + seq, + start_op: self.doc.max_op + 1, + time: 0, + message: None, + extra_bytes: Default::default(), + hash: None, + operations: vec![], + deps, + }); + } + } + + pub fn fork(&mut self) -> Self { + self.ensure_transaction_closed(); + Self { + doc: self.doc.fork(), + transaction: self.transaction.clone(), + } + } + + pub fn commit(&mut self, message: Option, time: Option) -> Vec { + // ensure that even no changes triggers a change + self.tx(); + self.transaction + .take() + .map(|tx| tx.commit(&mut self.doc, message, time)) + .unwrap_or_else(|| self.doc.get_heads()) + } + + pub fn ensure_transaction_closed(&mut self) { + if let Some(tx) = self.transaction.take() { + self.update_history(export_change( + &tx, + &self.doc.ops.m.actors, + &self.doc.ops.m.props, + )); + } + } + + pub fn rollback(&mut self) -> usize { + self.transaction + .take() + .map(|tx| tx.rollback(&mut self.doc)) + .unwrap_or(0) + } + + // KeysAt::() + // LenAt::() + // PropAt::() + // NthAt::() + + pub fn keys(&self, obj: &ExId) -> Vec { + self.doc.keys(obj) + } + + pub fn keys_at(&self, obj: &ExId, heads: &[ChangeHash]) -> Vec { + self.doc.keys_at(obj, heads) + } + + pub fn length(&self, obj: &ExId) -> usize { + self.doc.length(obj) + } + + pub fn length_at(&self, obj: &ExId, heads: &[ChangeHash]) -> usize { + self.doc.length_at(obj, heads) + } + + // set(obj, prop, value) - value can be scalar or objtype + // del(obj, prop) + // inc(obj, prop, value) + // insert(obj, index, value) + + /// Set the value of property `P` to value `V` in object `obj`. + /// + /// # Returns + /// + /// The opid of the operation which was created, or None if this operation doesn't change the + /// document or create a new object. + /// + /// # Errors + /// + /// This will return an error if + /// - The object does not exist + /// - The key is the wrong type for the object + /// - The key does not exist in the object + pub fn set, V: Into>( + &mut self, + obj: &ExId, + prop: P, + value: V, + ) -> Result, AutomergeError> { + self.tx(); + let tx = self.transaction.as_mut().unwrap(); + tx.set(&mut self.doc, obj, prop, value) + } + + pub fn insert>( + &mut self, + obj: &ExId, + index: usize, + value: V, + ) -> Result, AutomergeError> { + self.tx(); + let tx = self.transaction.as_mut().unwrap(); + tx.insert(&mut self.doc, obj, index, value) + } + + pub fn inc>( + &mut self, + obj: &ExId, + prop: P, + value: i64, + ) -> Result<(), AutomergeError> { + self.tx(); + let tx = self.transaction.as_mut().unwrap(); + tx.inc(&mut self.doc, obj, prop, value) + } + + pub fn del>(&mut self, obj: &ExId, prop: P) -> Result<(), AutomergeError> { + self.tx(); + let tx = self.transaction.as_mut().unwrap(); + tx.del(&mut self.doc, obj, prop) + } + + /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert + /// the new elements + pub fn splice( + &mut self, + obj: &ExId, + pos: usize, + del: usize, + vals: Vec, + ) -> Result, AutomergeError> { + self.tx(); + let tx = self.transaction.as_mut().unwrap(); + tx.splice(&mut self.doc, obj, pos, del, vals) + } + + pub fn splice_text( + &mut self, + obj: &ExId, + pos: usize, + del: usize, + text: &str, + ) -> Result, AutomergeError> { + self.tx(); + let tx = self.transaction.as_mut().unwrap(); + tx.splice_text(&mut self.doc, obj, pos, del, text) + } + + pub fn text(&self, obj: &ExId) -> Result { + self.doc.text(obj) + } + + pub fn text_at(&self, obj: &ExId, heads: &[ChangeHash]) -> Result { + self.doc.text_at(obj, heads) + } + + // TODO - I need to return these OpId's here **only** to get + // the legacy conflicts format of { [opid]: value } + // Something better? + pub fn value>( + &self, + obj: &ExId, + prop: P, + ) -> Result, AutomergeError> { + self.doc.value(obj, prop) + } + + pub fn value_at>( + &self, + obj: &ExId, + prop: P, + heads: &[ChangeHash], + ) -> Result, AutomergeError> { + self.doc.value_at(obj, prop, heads) + } + + pub fn values>( + &self, + obj: &ExId, + prop: P, + ) -> Result, AutomergeError> { + self.doc.values(obj, prop) + } + + pub fn values_at>( + &self, + obj: &ExId, + prop: P, + heads: &[ChangeHash], + ) -> Result, AutomergeError> { + self.doc.values_at(obj, prop, heads) + } + + pub fn load(data: &[u8]) -> Result { + let doc = Automerge::load(data)?; + Ok(Self { + doc, + transaction: None, + }) + } + + pub fn load_incremental(&mut self, data: &[u8]) -> Result { + self.ensure_transaction_closed(); + self.doc.load_incremental(data) + } + + pub fn apply_changes(&mut self, changes: &[Change]) -> Result { + self.ensure_transaction_closed(); + self.doc.apply_changes(changes) + } + + pub fn apply_change(&mut self, change: Change) { + self.ensure_transaction_closed(); + self.doc.apply_change(change) + } + + /// Takes all the changes in `other` which are not in `self` and applies them + pub fn merge(&mut self, other: &mut Self) -> Result, AutomergeError> { + self.ensure_transaction_closed(); + other.ensure_transaction_closed(); + self.doc.merge(&mut other.doc) + } + + pub fn save(&mut self) -> Result, AutomergeError> { + self.ensure_transaction_closed(); + self.doc.save() + } + + // should this return an empty vec instead of None? + pub fn save_incremental(&mut self) -> Vec { + self.ensure_transaction_closed(); + self.doc.save_incremental() + } + + pub fn get_missing_deps(&mut self, heads: &[ChangeHash]) -> Vec { + self.ensure_transaction_closed(); + self.doc.get_missing_deps(heads) + } + + pub fn get_last_local_change(&mut self) -> Option<&Change> { + self.ensure_transaction_closed(); + self.doc.get_last_local_change() + } + + pub fn get_changes(&mut self, have_deps: &[ChangeHash]) -> Vec<&Change> { + self.ensure_transaction_closed(); + self.doc.get_changes(have_deps) + } + + pub fn get_change_by_hash(&mut self, hash: &ChangeHash) -> Option<&Change> { + self.ensure_transaction_closed(); + self.doc.get_change_by_hash(hash) + } + + pub fn get_changes_added<'a>(&mut self, other: &'a mut Self) -> Vec<&'a Change> { + self.ensure_transaction_closed(); + other.ensure_transaction_closed(); + self.doc.get_changes_added(&other.doc) + } + + pub fn get_heads(&mut self) -> Vec { + self.ensure_transaction_closed(); + self.doc.get_heads() + } + + fn get_hash(&mut self, actor: usize, seq: u64) -> Result { + self.doc + .states + .get(&actor) + .and_then(|v| v.get(seq as usize - 1)) + .and_then(|&i| self.doc.history.get(i)) + .map(|c| c.hash) + .ok_or(AutomergeError::InvalidSeq(seq)) + } + + fn update_history(&mut self, change: Change) -> usize { + self.doc.max_op = std::cmp::max(self.doc.max_op, change.start_op + change.len() as u64 - 1); + + self.update_deps(&change); + + let history_index = self.doc.history.len(); + + self.doc + .states + .entry(self.doc.ops.m.actors.cache(change.actor_id().clone())) + .or_default() + .push(history_index); + + self.doc.history_index.insert(change.hash, history_index); + self.doc.history.push(change); + + history_index + } + + fn update_deps(&mut self, change: &Change) { + for d in &change.deps { + self.doc.deps.remove(d); + } + self.doc.deps.insert(change.hash); + } + + pub fn import(&self, s: &str) -> Result { + self.doc.import(s) + } + + pub fn dump(&self) { + self.doc.dump() + } + + pub fn generate_sync_message(&mut self, sync_state: &mut SyncState) -> Option { + self.ensure_transaction_closed(); + self.doc.generate_sync_message(sync_state) + } + + pub fn receive_sync_message( + &mut self, + sync_state: &mut SyncState, + message: SyncMessage, + ) -> Result, AutomergeError> { + self.ensure_transaction_closed(); + self.doc.receive_sync_message(sync_state, message) + } + + #[cfg(feature = "optree-visualisation")] + pub fn visualise_optree(&self) -> String { + self.doc.visualise_optree() + } +} diff --git a/automerge/src/change.rs b/automerge/src/change.rs index c1208609..67c56d6c 100644 --- a/automerge/src/change.rs +++ b/automerge/src/change.rs @@ -8,7 +8,7 @@ use crate::encoding::{Encodable, DEFLATE_MIN_SIZE}; use crate::error::AutomergeError; use crate::indexed_cache::IndexedCache; use crate::legacy as amp; -use crate::transaction::Transaction; +use crate::transaction::TransactionInner; use crate::types; use crate::types::{ActorId, ElemId, Key, ObjId, Op, OpId, OpType}; use core::ops::Range; @@ -490,7 +490,7 @@ fn export_op(op: &Op, actors: &IndexedCache, props: &IndexedCache, props: &IndexedCache, ) -> Change { diff --git a/automerge/src/lib.rs b/automerge/src/lib.rs index 652f85e6..d94518be 100644 --- a/automerge/src/lib.rs +++ b/automerge/src/lib.rs @@ -25,6 +25,7 @@ macro_rules! __log { } mod automerge; +mod autotxn; mod change; mod clock; mod columnar; @@ -45,6 +46,7 @@ mod value; mod visualisation; pub use crate::automerge::Automerge; +pub use autotxn::AutoTxn; pub use change::{decode_change, Change}; pub use error::AutomergeError; pub use exid::ExId as ObjId; diff --git a/automerge/src/transaction.rs b/automerge/src/transaction.rs index 6c010708..9b6e886e 100644 --- a/automerge/src/transaction.rs +++ b/automerge/src/transaction.rs @@ -1,5 +1,3 @@ -use std::ops::Deref; - use crate::exid::ExId; use crate::query; use crate::types::{Key, ObjId, OpId}; @@ -7,8 +5,8 @@ use crate::{change::export_change, types::Op, Automerge, ChangeHash, Prop, Value use crate::{AutomergeError, OpType}; use unicode_segmentation::UnicodeSegmentation; -#[derive(Debug)] -pub struct Transaction<'a> { +#[derive(Debug, Clone)] +pub struct TransactionInner { pub(crate) actor: usize, pub(crate) seq: u64, pub(crate) start_op: u64, @@ -18,17 +16,27 @@ pub struct Transaction<'a> { pub(crate) hash: Option, pub(crate) deps: Vec, pub(crate) operations: Vec, +} + +#[derive(Debug)] +pub struct Transaction<'a> { + pub(crate) inner: TransactionInner, pub(crate) doc: &'a mut Automerge, } -impl<'a> Transaction<'a> { +impl TransactionInner { pub fn pending_ops(&self) -> usize { self.operations.len() } /// Commit the operations performed in this transaction, returning the hashes corresponding to /// the new heads. - pub fn commit(mut self, message: Option, time: Option) -> Vec { + pub fn commit( + mut self, + doc: &mut Automerge, + message: Option, + time: Option, + ) -> Vec { if message.is_some() { self.message = message; } @@ -37,33 +45,305 @@ impl<'a> Transaction<'a> { self.time = t; } - self.operations.len(); + doc.update_history(export_change(&self, &doc.ops.m.actors, &doc.ops.m.props)); - self.doc.update_history(export_change( - &self, - &self.doc.ops.m.actors, - &self.doc.ops.m.props, - )); + doc.get_heads() + } - self.doc.get_heads() + /// Undo the operations added in this transaction, returning the number of cancelled + /// operations. + pub fn rollback(self, doc: &mut Automerge) -> usize { + let num = self.operations.len(); + // remove in reverse order so sets are removed before makes etc... + for op in self.operations.iter().rev() { + for pred_id in &op.pred { + // FIXME - use query to make this fast + if let Some(p) = doc.ops.iter().position(|o| o.id == *pred_id) { + doc.ops.replace(op.obj, p, |o| o.remove_succ(op)); + } + } + if let Some(pos) = doc.ops.iter().position(|o| o.id == op.id) { + doc.ops.remove(op.obj, pos); + } + } + num + } + + /// Set the value of property `P` to value `V` in object `obj`. + /// + /// # Returns + /// + /// The opid of the operation which was created, or None if this operation doesn't change the + /// document + /// + /// # Errors + /// + /// This will return an error if + /// - The object does not exist + /// - The key is the wrong type for the object + /// - The key does not exist in the object + pub fn set, V: Into>( + &mut self, + doc: &mut Automerge, + obj: &ExId, + prop: P, + value: V, + ) -> Result, AutomergeError> { + let obj = doc.exid_to_obj(obj)?; + let value = value.into(); + if let Some(id) = self.local_op(doc, obj, prop.into(), value.into())? { + Ok(Some(doc.id_to_exid(id))) + } else { + Ok(None) + } + } + + fn next_id(&mut self) -> OpId { + OpId(self.start_op + self.operations.len() as u64, self.actor) + } + + fn insert_local_op(&mut self, doc: &mut Automerge, op: Op, pos: usize, succ_pos: &[usize]) { + for succ in succ_pos { + doc.ops.replace(op.obj, *succ, |old_op| { + old_op.add_succ(&op); + }); + } + + if !op.is_del() { + doc.ops.insert(pos, op.clone()); + } + + self.operations.push(op); + } + + pub fn insert>( + &mut self, + doc: &mut Automerge, + obj: &ExId, + index: usize, + value: V, + ) -> Result, AutomergeError> { + let obj = doc.exid_to_obj(obj)?; + if let Some(id) = self.do_insert(doc, obj, index, value)? { + Ok(Some(doc.id_to_exid(id))) + } else { + Ok(None) + } + } + + fn do_insert>( + &mut self, + doc: &mut Automerge, + obj: ObjId, + index: usize, + value: V, + ) -> Result, AutomergeError> { + let id = self.next_id(); + + let query = doc.ops.search(obj, query::InsertNth::new(index)); + + let key = query.key()?; + let value = value.into(); + let action = value.into(); + let is_make = matches!(&action, OpType::Make(_)); + + let op = Op { + change: doc.history.len(), + id, + action, + obj, + key, + succ: Default::default(), + pred: Default::default(), + insert: true, + }; + + doc.ops.insert(query.pos(), op.clone()); + self.operations.push(op); + + if is_make { + Ok(Some(id)) + } else { + Ok(None) + } + } + + pub(crate) fn local_op( + &mut self, + doc: &mut Automerge, + obj: ObjId, + prop: Prop, + action: OpType, + ) -> Result, AutomergeError> { + match prop { + Prop::Map(s) => self.local_map_op(doc, obj, s, action), + Prop::Seq(n) => self.local_list_op(doc, obj, n, action), + } + } + + fn local_map_op( + &mut self, + doc: &mut Automerge, + obj: ObjId, + prop: String, + action: OpType, + ) -> Result, AutomergeError> { + if prop.is_empty() { + return Err(AutomergeError::EmptyStringKey); + } + + let id = self.next_id(); + let prop = doc.ops.m.props.cache(prop); + let query = doc.ops.search(obj, query::Prop::new(prop)); + + if query.ops.len() == 1 && query.ops[0].is_noop(&action) { + return Ok(None); + } + + let is_make = matches!(&action, OpType::Make(_)); + + let pred = query.ops.iter().map(|op| op.id).collect(); + + let op = Op { + change: doc.history.len(), + id, + action, + obj, + key: Key::Map(prop), + succ: Default::default(), + pred, + insert: false, + }; + + self.insert_local_op(doc, op, query.pos, &query.ops_pos); + + if is_make { + Ok(Some(id)) + } else { + Ok(None) + } + } + + fn local_list_op( + &mut self, + doc: &mut Automerge, + obj: ObjId, + index: usize, + action: OpType, + ) -> Result, AutomergeError> { + let query = doc.ops.search(obj, query::Nth::new(index)); + + let id = self.next_id(); + let pred = query.ops.iter().map(|op| op.id).collect(); + let key = query.key()?; + + if query.ops.len() == 1 && query.ops[0].is_noop(&action) { + return Ok(None); + } + + let is_make = matches!(&action, OpType::Make(_)); + + let op = Op { + change: doc.history.len(), + id, + action, + obj, + key, + succ: Default::default(), + pred, + insert: false, + }; + + self.insert_local_op(doc, op, query.pos, &query.ops_pos); + + if is_make { + Ok(Some(id)) + } else { + Ok(None) + } + } + + pub fn inc>( + &mut self, + doc: &mut Automerge, + obj: &ExId, + prop: P, + value: i64, + ) -> Result<(), AutomergeError> { + let obj = doc.exid_to_obj(obj)?; + self.local_op(doc, obj, prop.into(), OpType::Inc(value))?; + Ok(()) + } + + pub fn del>( + &mut self, + doc: &mut Automerge, + obj: &ExId, + prop: P, + ) -> Result<(), AutomergeError> { + let obj = doc.exid_to_obj(obj)?; + self.local_op(doc, obj, prop.into(), OpType::Del)?; + Ok(()) + } + + /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert + /// the new elements + pub fn splice( + &mut self, + doc: &mut Automerge, + obj: &ExId, + mut pos: usize, + del: usize, + vals: Vec, + ) -> Result, AutomergeError> { + let obj = doc.exid_to_obj(obj)?; + for _ in 0..del { + // del() + self.local_op(doc, obj, pos.into(), OpType::Del)?; + } + let mut results = Vec::new(); + for v in vals { + // insert() + let id = self.do_insert(doc, obj, pos, v.clone())?; + if let Some(id) = id { + results.push(doc.id_to_exid(id)); + } + pos += 1; + } + Ok(results) + } + + pub fn splice_text( + &mut self, + doc: &mut Automerge, + obj: &ExId, + pos: usize, + del: usize, + text: &str, + ) -> Result, AutomergeError> { + let mut vals = vec![]; + for c in text.to_owned().graphemes(true) { + vals.push(c.into()); + } + self.splice(doc, obj, pos, del, vals) + } +} + +impl<'a> Transaction<'a> { + pub fn pending_ops(&self) -> usize { + self.inner.pending_ops() + } + + /// Commit the operations performed in this transaction, returning the hashes corresponding to + /// the new heads. + pub fn commit(self, message: Option, time: Option) -> Vec { + self.inner.commit(self.doc, message, time) } /// Undo the operations added in this transaction, returning the number of cancelled /// operations. pub fn rollback(self) -> usize { - let num = self.operations.len(); - for op in &self.operations { - for pred_id in &op.pred { - // FIXME - use query to make this fast - if let Some(p) = self.doc.ops.iter().position(|o| o.id == *pred_id) { - self.doc.ops.replace(op.obj, p, |o| o.remove_succ(op)); - } - } - if let Some(pos) = self.doc.ops.iter().position(|o| o.id == op.id) { - self.doc.ops.remove(op.obj, pos); - } - } - num + self.inner.rollback(self.doc) } /// Set the value of property `P` to value `V` in object `obj`. @@ -85,31 +365,7 @@ impl<'a> Transaction<'a> { prop: P, value: V, ) -> Result, AutomergeError> { - let obj = self.doc.exid_to_obj(obj)?; - let value = value.into(); - if let Some(id) = self.local_op(obj, prop.into(), value.into())? { - Ok(Some(self.doc.id_to_exid(id))) - } else { - Ok(None) - } - } - - fn next_id(&mut self) -> OpId { - OpId(self.start_op + self.operations.len() as u64, self.actor) - } - - fn insert_local_op(&mut self, op: Op, pos: usize, succ_pos: &[usize]) { - for succ in succ_pos { - self.doc.ops.replace(op.obj, *succ, |old_op| { - old_op.add_succ(&op); - }); - } - - if !op.is_del() { - self.doc.ops.insert(pos, op.clone()); - } - - self.operations.push(op); + self.inner.set(self.doc, obj, prop, value) } pub fn insert>( @@ -118,140 +374,7 @@ impl<'a> Transaction<'a> { index: usize, value: V, ) -> Result, AutomergeError> { - let obj = self.doc.exid_to_obj(obj)?; - if let Some(id) = self.do_insert(obj, index, value)? { - Ok(Some(self.doc.id_to_exid(id))) - } else { - Ok(None) - } - } - - fn do_insert>( - &mut self, - obj: ObjId, - index: usize, - value: V, - ) -> Result, AutomergeError> { - let id = self.next_id(); - - let query = self.doc.ops.search(obj, query::InsertNth::new(index)); - - let key = query.key()?; - let value = value.into(); - let action = value.into(); - let is_make = matches!(&action, OpType::Make(_)); - - let op = Op { - change: self.doc.history.len(), - id, - action, - obj, - key, - succ: Default::default(), - pred: Default::default(), - insert: true, - }; - - self.doc.ops.insert(query.pos(), op.clone()); - self.operations.push(op); - - if is_make { - Ok(Some(id)) - } else { - Ok(None) - } - } - - pub(crate) fn local_op( - &mut self, - obj: ObjId, - prop: Prop, - action: OpType, - ) -> Result, AutomergeError> { - match prop { - Prop::Map(s) => self.local_map_op(obj, s, action), - Prop::Seq(n) => self.local_list_op(obj, n, action), - } - } - - fn local_map_op( - &mut self, - obj: ObjId, - prop: String, - action: OpType, - ) -> Result, AutomergeError> { - if prop.is_empty() { - return Err(AutomergeError::EmptyStringKey); - } - - let id = self.next_id(); - let prop = self.doc.ops.m.props.cache(prop); - let query = self.doc.ops.search(obj, query::Prop::new(prop)); - - if query.ops.len() == 1 && query.ops[0].is_noop(&action) { - return Ok(None); - } - - let is_make = matches!(&action, OpType::Make(_)); - - let pred = query.ops.iter().map(|op| op.id).collect(); - - let op = Op { - change: self.doc.history.len(), - id, - action, - obj, - key: Key::Map(prop), - succ: Default::default(), - pred, - insert: false, - }; - - self.insert_local_op(op, query.pos, &query.ops_pos); - - if is_make { - Ok(Some(id)) - } else { - Ok(None) - } - } - - fn local_list_op( - &mut self, - obj: ObjId, - index: usize, - action: OpType, - ) -> Result, AutomergeError> { - let query = self.doc.ops.search(obj, query::Nth::new(index)); - - let id = self.next_id(); - let pred = query.ops.iter().map(|op| op.id).collect(); - let key = query.key()?; - - if query.ops.len() == 1 && query.ops[0].is_noop(&action) { - return Ok(None); - } - - let is_make = matches!(&action, OpType::Make(_)); - - let op = Op { - change: self.doc.history.len(), - id, - action, - obj, - key, - succ: Default::default(), - pred, - insert: false, - }; - - self.insert_local_op(op, query.pos, &query.ops_pos); - - if is_make { - Ok(Some(id)) - } else { - Ok(None) - } + self.inner.insert(self.doc, obj, index, value) } pub fn inc>( @@ -260,15 +383,11 @@ impl<'a> Transaction<'a> { prop: P, value: i64, ) -> Result<(), AutomergeError> { - let obj = self.doc.exid_to_obj(obj)?; - self.local_op(obj, prop.into(), OpType::Inc(value))?; - Ok(()) + self.inner.inc(self.doc, obj, prop, value) } pub fn del>(&mut self, obj: &ExId, prop: P) -> Result<(), AutomergeError> { - let obj = self.doc.exid_to_obj(obj)?; - self.local_op(obj, prop.into(), OpType::Del)?; - Ok(()) + self.inner.del(self.doc, obj, prop) } /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert @@ -276,25 +395,11 @@ impl<'a> Transaction<'a> { pub fn splice( &mut self, obj: &ExId, - mut pos: usize, + pos: usize, del: usize, vals: Vec, ) -> Result, AutomergeError> { - let obj = self.doc.exid_to_obj(obj)?; - for _ in 0..del { - // del() - self.local_op(obj, pos.into(), OpType::Del)?; - } - let mut results = Vec::new(); - for v in vals { - // insert() - let id = self.do_insert(obj, pos, v.clone())?; - if let Some(id) = id { - results.push(self.doc.id_to_exid(id)); - } - pos += 1; - } - Ok(results) + self.inner.splice(self.doc, obj, pos, del, vals) } pub fn splice_text( @@ -304,18 +409,6 @@ impl<'a> Transaction<'a> { del: usize, text: &str, ) -> Result, AutomergeError> { - let mut vals = vec![]; - for c in text.to_owned().graphemes(true) { - vals.push(c.into()); - } - self.splice(obj, pos, del, vals) - } -} - -impl<'a> Deref for Transaction<'a> { - type Target = Automerge; - - fn deref(&self) -> &Self::Target { - self.doc + self.inner.splice_text(self.doc, obj, pos, del, text) } } diff --git a/edit-trace/benches/main.rs b/edit-trace/benches/main.rs index 197614f6..ec740238 100644 --- a/edit-trace/benches/main.rs +++ b/edit-trace/benches/main.rs @@ -1,10 +1,20 @@ -use automerge::{Automerge, Value, ROOT}; +use automerge::{AutoTxn, Automerge, Value, ROOT}; use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion, Throughput}; use std::fs; -fn replay_trace(commands: Vec<(usize, usize, Vec)>) -> Automerge { +fn replay_trace_tx(commands: Vec<(usize, usize, Vec)>) -> Automerge { let mut doc = Automerge::new(); + let text = doc.set(&ROOT, "text", Value::text()).unwrap().unwrap(); + let mut tx = doc.tx(); + for (pos, del, vals) in commands { + tx.splice(&text, pos, del, vals).unwrap(); + } + tx.commit(None, None); + doc +} +fn replay_trace_autotx(commands: Vec<(usize, usize, Vec)>) -> AutoTxn { + let mut doc = AutoTxn::new(); let text = doc.set(&ROOT, "text", Value::text()).unwrap().unwrap(); for (pos, del, vals) in commands { doc.splice(&text, pos, del, vals).unwrap(); @@ -17,10 +27,18 @@ fn save_trace(mut doc: Automerge) { doc.save().unwrap(); } +fn save_trace_autotx(mut doc: AutoTxn) { + doc.save().unwrap(); +} + fn load_trace(bytes: &[u8]) { Automerge::load(bytes).unwrap(); } +fn load_trace_autotx(bytes: &[u8]) { + AutoTxn::load(bytes).unwrap(); +} + fn bench(c: &mut Criterion) { let contents = fs::read_to_string("edits.json").expect("cannot read edits file"); let edits = json::parse(&contents).expect("cant parse edits"); @@ -45,14 +63,14 @@ fn bench(c: &mut Criterion) { |b, commands| { b.iter_batched( || commands.clone(), - replay_trace, + replay_trace_tx, criterion::BatchSize::LargeInput, ) }, ); let commands_len = commands.len(); - let mut doc = replay_trace(commands); + let mut doc = replay_trace_tx(commands.clone()); group.bench_with_input(BenchmarkId::new("save", commands_len), &doc, |b, doc| { b.iter_batched(|| doc.clone(), save_trace, criterion::BatchSize::LargeInput) }); @@ -64,6 +82,35 @@ fn bench(c: &mut Criterion) { |b, bytes| b.iter(|| load_trace(bytes)), ); + group.bench_with_input( + BenchmarkId::new("replay", commands_len), + &commands, + |b, commands| { + b.iter_batched( + || commands.clone(), + replay_trace_autotx, + criterion::BatchSize::LargeInput, + ) + }, + ); + + let commands_len = commands.len(); + let mut doc = replay_trace_autotx(commands); + group.bench_with_input(BenchmarkId::new("save", commands_len), &doc, |b, doc| { + b.iter_batched( + || doc.clone(), + save_trace_autotx, + criterion::BatchSize::LargeInput, + ) + }); + + let bytes = doc.save().unwrap(); + group.bench_with_input( + BenchmarkId::new("load", commands_len), + &bytes, + |b, bytes| b.iter(|| load_trace_autotx(bytes)), + ); + group.finish(); } From ea826b70f4a878cecba3cac4ef7943ceaca26200 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Wed, 16 Feb 2022 14:15:36 +0000 Subject: [PATCH 083/730] Move TransactionInner and add get methods to Transaction --- automerge/src/transaction.rs | 384 +++++------------------------ automerge/src/transaction/inner.rs | 324 ++++++++++++++++++++++++ 2 files changed, 386 insertions(+), 322 deletions(-) create mode 100644 automerge/src/transaction/inner.rs diff --git a/automerge/src/transaction.rs b/automerge/src/transaction.rs index 9b6e886e..5263685e 100644 --- a/automerge/src/transaction.rs +++ b/automerge/src/transaction.rs @@ -1,22 +1,9 @@ use crate::exid::ExId; -use crate::query; -use crate::types::{Key, ObjId, OpId}; -use crate::{change::export_change, types::Op, Automerge, ChangeHash, Prop, Value}; -use crate::{AutomergeError, OpType}; -use unicode_segmentation::UnicodeSegmentation; +use crate::AutomergeError; +use crate::{Automerge, ChangeHash, Prop, Value}; -#[derive(Debug, Clone)] -pub struct TransactionInner { - pub(crate) actor: usize, - pub(crate) seq: u64, - pub(crate) start_op: u64, - pub(crate) time: i64, - pub(crate) message: Option, - pub(crate) extra_bytes: Vec, - pub(crate) hash: Option, - pub(crate) deps: Vec, - pub(crate) operations: Vec, -} +mod inner; +pub(crate) use inner::TransactionInner; #[derive(Debug)] pub struct Transaction<'a> { @@ -24,311 +11,6 @@ pub struct Transaction<'a> { pub(crate) doc: &'a mut Automerge, } -impl TransactionInner { - pub fn pending_ops(&self) -> usize { - self.operations.len() - } - - /// Commit the operations performed in this transaction, returning the hashes corresponding to - /// the new heads. - pub fn commit( - mut self, - doc: &mut Automerge, - message: Option, - time: Option, - ) -> Vec { - if message.is_some() { - self.message = message; - } - - if let Some(t) = time { - self.time = t; - } - - doc.update_history(export_change(&self, &doc.ops.m.actors, &doc.ops.m.props)); - - doc.get_heads() - } - - /// Undo the operations added in this transaction, returning the number of cancelled - /// operations. - pub fn rollback(self, doc: &mut Automerge) -> usize { - let num = self.operations.len(); - // remove in reverse order so sets are removed before makes etc... - for op in self.operations.iter().rev() { - for pred_id in &op.pred { - // FIXME - use query to make this fast - if let Some(p) = doc.ops.iter().position(|o| o.id == *pred_id) { - doc.ops.replace(op.obj, p, |o| o.remove_succ(op)); - } - } - if let Some(pos) = doc.ops.iter().position(|o| o.id == op.id) { - doc.ops.remove(op.obj, pos); - } - } - num - } - - /// Set the value of property `P` to value `V` in object `obj`. - /// - /// # Returns - /// - /// The opid of the operation which was created, or None if this operation doesn't change the - /// document - /// - /// # Errors - /// - /// This will return an error if - /// - The object does not exist - /// - The key is the wrong type for the object - /// - The key does not exist in the object - pub fn set, V: Into>( - &mut self, - doc: &mut Automerge, - obj: &ExId, - prop: P, - value: V, - ) -> Result, AutomergeError> { - let obj = doc.exid_to_obj(obj)?; - let value = value.into(); - if let Some(id) = self.local_op(doc, obj, prop.into(), value.into())? { - Ok(Some(doc.id_to_exid(id))) - } else { - Ok(None) - } - } - - fn next_id(&mut self) -> OpId { - OpId(self.start_op + self.operations.len() as u64, self.actor) - } - - fn insert_local_op(&mut self, doc: &mut Automerge, op: Op, pos: usize, succ_pos: &[usize]) { - for succ in succ_pos { - doc.ops.replace(op.obj, *succ, |old_op| { - old_op.add_succ(&op); - }); - } - - if !op.is_del() { - doc.ops.insert(pos, op.clone()); - } - - self.operations.push(op); - } - - pub fn insert>( - &mut self, - doc: &mut Automerge, - obj: &ExId, - index: usize, - value: V, - ) -> Result, AutomergeError> { - let obj = doc.exid_to_obj(obj)?; - if let Some(id) = self.do_insert(doc, obj, index, value)? { - Ok(Some(doc.id_to_exid(id))) - } else { - Ok(None) - } - } - - fn do_insert>( - &mut self, - doc: &mut Automerge, - obj: ObjId, - index: usize, - value: V, - ) -> Result, AutomergeError> { - let id = self.next_id(); - - let query = doc.ops.search(obj, query::InsertNth::new(index)); - - let key = query.key()?; - let value = value.into(); - let action = value.into(); - let is_make = matches!(&action, OpType::Make(_)); - - let op = Op { - change: doc.history.len(), - id, - action, - obj, - key, - succ: Default::default(), - pred: Default::default(), - insert: true, - }; - - doc.ops.insert(query.pos(), op.clone()); - self.operations.push(op); - - if is_make { - Ok(Some(id)) - } else { - Ok(None) - } - } - - pub(crate) fn local_op( - &mut self, - doc: &mut Automerge, - obj: ObjId, - prop: Prop, - action: OpType, - ) -> Result, AutomergeError> { - match prop { - Prop::Map(s) => self.local_map_op(doc, obj, s, action), - Prop::Seq(n) => self.local_list_op(doc, obj, n, action), - } - } - - fn local_map_op( - &mut self, - doc: &mut Automerge, - obj: ObjId, - prop: String, - action: OpType, - ) -> Result, AutomergeError> { - if prop.is_empty() { - return Err(AutomergeError::EmptyStringKey); - } - - let id = self.next_id(); - let prop = doc.ops.m.props.cache(prop); - let query = doc.ops.search(obj, query::Prop::new(prop)); - - if query.ops.len() == 1 && query.ops[0].is_noop(&action) { - return Ok(None); - } - - let is_make = matches!(&action, OpType::Make(_)); - - let pred = query.ops.iter().map(|op| op.id).collect(); - - let op = Op { - change: doc.history.len(), - id, - action, - obj, - key: Key::Map(prop), - succ: Default::default(), - pred, - insert: false, - }; - - self.insert_local_op(doc, op, query.pos, &query.ops_pos); - - if is_make { - Ok(Some(id)) - } else { - Ok(None) - } - } - - fn local_list_op( - &mut self, - doc: &mut Automerge, - obj: ObjId, - index: usize, - action: OpType, - ) -> Result, AutomergeError> { - let query = doc.ops.search(obj, query::Nth::new(index)); - - let id = self.next_id(); - let pred = query.ops.iter().map(|op| op.id).collect(); - let key = query.key()?; - - if query.ops.len() == 1 && query.ops[0].is_noop(&action) { - return Ok(None); - } - - let is_make = matches!(&action, OpType::Make(_)); - - let op = Op { - change: doc.history.len(), - id, - action, - obj, - key, - succ: Default::default(), - pred, - insert: false, - }; - - self.insert_local_op(doc, op, query.pos, &query.ops_pos); - - if is_make { - Ok(Some(id)) - } else { - Ok(None) - } - } - - pub fn inc>( - &mut self, - doc: &mut Automerge, - obj: &ExId, - prop: P, - value: i64, - ) -> Result<(), AutomergeError> { - let obj = doc.exid_to_obj(obj)?; - self.local_op(doc, obj, prop.into(), OpType::Inc(value))?; - Ok(()) - } - - pub fn del>( - &mut self, - doc: &mut Automerge, - obj: &ExId, - prop: P, - ) -> Result<(), AutomergeError> { - let obj = doc.exid_to_obj(obj)?; - self.local_op(doc, obj, prop.into(), OpType::Del)?; - Ok(()) - } - - /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert - /// the new elements - pub fn splice( - &mut self, - doc: &mut Automerge, - obj: &ExId, - mut pos: usize, - del: usize, - vals: Vec, - ) -> Result, AutomergeError> { - let obj = doc.exid_to_obj(obj)?; - for _ in 0..del { - // del() - self.local_op(doc, obj, pos.into(), OpType::Del)?; - } - let mut results = Vec::new(); - for v in vals { - // insert() - let id = self.do_insert(doc, obj, pos, v.clone())?; - if let Some(id) = id { - results.push(doc.id_to_exid(id)); - } - pos += 1; - } - Ok(results) - } - - pub fn splice_text( - &mut self, - doc: &mut Automerge, - obj: &ExId, - pos: usize, - del: usize, - text: &str, - ) -> Result, AutomergeError> { - let mut vals = vec![]; - for c in text.to_owned().graphemes(true) { - vals.push(c.into()); - } - self.splice(doc, obj, pos, del, vals) - } -} - impl<'a> Transaction<'a> { pub fn pending_ops(&self) -> usize { self.inner.pending_ops() @@ -411,4 +93,62 @@ impl<'a> Transaction<'a> { ) -> Result, AutomergeError> { self.inner.splice_text(self.doc, obj, pos, del, text) } + + pub fn keys(&self, obj: &ExId) -> Vec { + self.doc.keys(obj) + } + + pub fn keys_at(&self, obj: &ExId, heads: &[ChangeHash]) -> Vec { + self.doc.keys_at(obj, heads) + } + + pub fn length(&self, obj: &ExId) -> usize { + self.doc.length(obj) + } + + pub fn length_at(&self, obj: &ExId, heads: &[ChangeHash]) -> usize { + self.doc.length_at(obj, heads) + } + + pub fn text(&self, obj: &ExId) -> Result { + self.doc.text(obj) + } + + pub fn text_at(&self, obj: &ExId, heads: &[ChangeHash]) -> Result { + self.doc.text_at(obj, heads) + } + + pub fn value>( + &self, + obj: &ExId, + prop: P, + ) -> Result, AutomergeError> { + self.doc.value(obj, prop) + } + + pub fn value_at>( + &self, + obj: &ExId, + prop: P, + heads: &[ChangeHash], + ) -> Result, AutomergeError> { + self.doc.value_at(obj, prop, heads) + } + + pub fn values>( + &self, + obj: &ExId, + prop: P, + ) -> Result, AutomergeError> { + self.doc.values(obj, prop) + } + + pub fn values_at>( + &self, + obj: &ExId, + prop: P, + heads: &[ChangeHash], + ) -> Result, AutomergeError> { + self.doc.values_at(obj, prop, heads) + } } diff --git a/automerge/src/transaction/inner.rs b/automerge/src/transaction/inner.rs new file mode 100644 index 00000000..ee971e1c --- /dev/null +++ b/automerge/src/transaction/inner.rs @@ -0,0 +1,324 @@ +use crate::exid::ExId; +use crate::query; +use crate::types::{Key, ObjId, OpId}; +use crate::{change::export_change, types::Op, Automerge, ChangeHash, Prop, Value}; +use crate::{AutomergeError, OpType}; +use unicode_segmentation::UnicodeSegmentation; + +#[derive(Debug, Clone)] +pub struct TransactionInner { + pub(crate) actor: usize, + pub(crate) seq: u64, + pub(crate) start_op: u64, + pub(crate) time: i64, + pub(crate) message: Option, + pub(crate) extra_bytes: Vec, + pub(crate) hash: Option, + pub(crate) deps: Vec, + pub(crate) operations: Vec, +} + +impl TransactionInner { + pub fn pending_ops(&self) -> usize { + self.operations.len() + } + + /// Commit the operations performed in this transaction, returning the hashes corresponding to + /// the new heads. + pub fn commit( + mut self, + doc: &mut Automerge, + message: Option, + time: Option, + ) -> Vec { + if message.is_some() { + self.message = message; + } + + if let Some(t) = time { + self.time = t; + } + + doc.update_history(export_change(&self, &doc.ops.m.actors, &doc.ops.m.props)); + + doc.get_heads() + } + + /// Undo the operations added in this transaction, returning the number of cancelled + /// operations. + pub fn rollback(self, doc: &mut Automerge) -> usize { + let num = self.operations.len(); + // remove in reverse order so sets are removed before makes etc... + for op in self.operations.iter().rev() { + for pred_id in &op.pred { + // FIXME - use query to make this fast + if let Some(p) = doc.ops.iter().position(|o| o.id == *pred_id) { + doc.ops.replace(op.obj, p, |o| o.remove_succ(op)); + } + } + if let Some(pos) = doc.ops.iter().position(|o| o.id == op.id) { + doc.ops.remove(op.obj, pos); + } + } + num + } + + /// Set the value of property `P` to value `V` in object `obj`. + /// + /// # Returns + /// + /// The opid of the operation which was created, or None if this operation doesn't change the + /// document + /// + /// # Errors + /// + /// This will return an error if + /// - The object does not exist + /// - The key is the wrong type for the object + /// - The key does not exist in the object + pub fn set, V: Into>( + &mut self, + doc: &mut Automerge, + obj: &ExId, + prop: P, + value: V, + ) -> Result, AutomergeError> { + let obj = doc.exid_to_obj(obj)?; + let value = value.into(); + if let Some(id) = self.local_op(doc, obj, prop.into(), value.into())? { + Ok(Some(doc.id_to_exid(id))) + } else { + Ok(None) + } + } + + fn next_id(&mut self) -> OpId { + OpId(self.start_op + self.operations.len() as u64, self.actor) + } + + fn insert_local_op(&mut self, doc: &mut Automerge, op: Op, pos: usize, succ_pos: &[usize]) { + for succ in succ_pos { + doc.ops.replace(op.obj, *succ, |old_op| { + old_op.add_succ(&op); + }); + } + + if !op.is_del() { + doc.ops.insert(pos, op.clone()); + } + + self.operations.push(op); + } + + pub fn insert>( + &mut self, + doc: &mut Automerge, + obj: &ExId, + index: usize, + value: V, + ) -> Result, AutomergeError> { + let obj = doc.exid_to_obj(obj)?; + if let Some(id) = self.do_insert(doc, obj, index, value)? { + Ok(Some(doc.id_to_exid(id))) + } else { + Ok(None) + } + } + + fn do_insert>( + &mut self, + doc: &mut Automerge, + obj: ObjId, + index: usize, + value: V, + ) -> Result, AutomergeError> { + let id = self.next_id(); + + let query = doc.ops.search(obj, query::InsertNth::new(index)); + + let key = query.key()?; + let value = value.into(); + let action = value.into(); + let is_make = matches!(&action, OpType::Make(_)); + + let op = Op { + change: doc.history.len(), + id, + action, + obj, + key, + succ: Default::default(), + pred: Default::default(), + insert: true, + }; + + doc.ops.insert(query.pos(), op.clone()); + self.operations.push(op); + + if is_make { + Ok(Some(id)) + } else { + Ok(None) + } + } + + pub(crate) fn local_op( + &mut self, + doc: &mut Automerge, + obj: ObjId, + prop: Prop, + action: OpType, + ) -> Result, AutomergeError> { + match prop { + Prop::Map(s) => self.local_map_op(doc, obj, s, action), + Prop::Seq(n) => self.local_list_op(doc, obj, n, action), + } + } + + fn local_map_op( + &mut self, + doc: &mut Automerge, + obj: ObjId, + prop: String, + action: OpType, + ) -> Result, AutomergeError> { + if prop.is_empty() { + return Err(AutomergeError::EmptyStringKey); + } + + let id = self.next_id(); + let prop = doc.ops.m.props.cache(prop); + let query = doc.ops.search(obj, query::Prop::new(prop)); + + if query.ops.len() == 1 && query.ops[0].is_noop(&action) { + return Ok(None); + } + + let is_make = matches!(&action, OpType::Make(_)); + + let pred = query.ops.iter().map(|op| op.id).collect(); + + let op = Op { + change: doc.history.len(), + id, + action, + obj, + key: Key::Map(prop), + succ: Default::default(), + pred, + insert: false, + }; + + self.insert_local_op(doc, op, query.pos, &query.ops_pos); + + if is_make { + Ok(Some(id)) + } else { + Ok(None) + } + } + + fn local_list_op( + &mut self, + doc: &mut Automerge, + obj: ObjId, + index: usize, + action: OpType, + ) -> Result, AutomergeError> { + let query = doc.ops.search(obj, query::Nth::new(index)); + + let id = self.next_id(); + let pred = query.ops.iter().map(|op| op.id).collect(); + let key = query.key()?; + + if query.ops.len() == 1 && query.ops[0].is_noop(&action) { + return Ok(None); + } + + let is_make = matches!(&action, OpType::Make(_)); + + let op = Op { + change: doc.history.len(), + id, + action, + obj, + key, + succ: Default::default(), + pred, + insert: false, + }; + + self.insert_local_op(doc, op, query.pos, &query.ops_pos); + + if is_make { + Ok(Some(id)) + } else { + Ok(None) + } + } + + pub fn inc>( + &mut self, + doc: &mut Automerge, + obj: &ExId, + prop: P, + value: i64, + ) -> Result<(), AutomergeError> { + let obj = doc.exid_to_obj(obj)?; + self.local_op(doc, obj, prop.into(), OpType::Inc(value))?; + Ok(()) + } + + pub fn del>( + &mut self, + doc: &mut Automerge, + obj: &ExId, + prop: P, + ) -> Result<(), AutomergeError> { + let obj = doc.exid_to_obj(obj)?; + self.local_op(doc, obj, prop.into(), OpType::Del)?; + Ok(()) + } + + /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert + /// the new elements + pub fn splice( + &mut self, + doc: &mut Automerge, + obj: &ExId, + mut pos: usize, + del: usize, + vals: Vec, + ) -> Result, AutomergeError> { + let obj = doc.exid_to_obj(obj)?; + for _ in 0..del { + // del() + self.local_op(doc, obj, pos.into(), OpType::Del)?; + } + let mut results = Vec::new(); + for v in vals { + // insert() + let id = self.do_insert(doc, obj, pos, v.clone())?; + if let Some(id) = id { + results.push(doc.id_to_exid(id)); + } + pos += 1; + } + Ok(results) + } + + pub fn splice_text( + &mut self, + doc: &mut Automerge, + obj: &ExId, + pos: usize, + del: usize, + text: &str, + ) -> Result, AutomergeError> { + let mut vals = vec![]; + for c in text.to_owned().graphemes(true) { + vals.push(c.into()); + } + self.splice(doc, obj, pos, del, vals) + } +} From 2f49a82eea4de98229f85ea7637f2345eff1dd0d Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Wed, 16 Feb 2022 14:20:49 +0000 Subject: [PATCH 084/730] Have generate_sync_message not take mut self --- automerge/src/sync.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automerge/src/sync.rs b/automerge/src/sync.rs index 69a6ad1c..62a0f977 100644 --- a/automerge/src/sync.rs +++ b/automerge/src/sync.rs @@ -22,7 +22,7 @@ const HASH_SIZE: usize = 32; // 256 bits = 32 bytes const MESSAGE_TYPE_SYNC: u8 = 0x42; // first byte of a sync message, for identification impl Automerge { - pub fn generate_sync_message(&mut self, sync_state: &mut SyncState) -> Option { + pub fn generate_sync_message(&self, sync_state: &mut SyncState) -> Option { self._generate_sync_message(sync_state) } From e970854042222eb75e8e72269c5e6a1a3213854d Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Wed, 16 Feb 2022 14:56:17 +0000 Subject: [PATCH 085/730] Fix benchmark ids --- edit-trace/benches/main.rs | 22 +++++++++++++--------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/edit-trace/benches/main.rs b/edit-trace/benches/main.rs index ec740238..93074c72 100644 --- a/edit-trace/benches/main.rs +++ b/edit-trace/benches/main.rs @@ -83,7 +83,7 @@ fn bench(c: &mut Criterion) { ); group.bench_with_input( - BenchmarkId::new("replay", commands_len), + BenchmarkId::new("replay autotx", commands_len), &commands, |b, commands| { b.iter_batched( @@ -96,17 +96,21 @@ fn bench(c: &mut Criterion) { let commands_len = commands.len(); let mut doc = replay_trace_autotx(commands); - group.bench_with_input(BenchmarkId::new("save", commands_len), &doc, |b, doc| { - b.iter_batched( - || doc.clone(), - save_trace_autotx, - criterion::BatchSize::LargeInput, - ) - }); + group.bench_with_input( + BenchmarkId::new("save autotx", commands_len), + &doc, + |b, doc| { + b.iter_batched( + || doc.clone(), + save_trace_autotx, + criterion::BatchSize::LargeInput, + ) + }, + ); let bytes = doc.save().unwrap(); group.bench_with_input( - BenchmarkId::new("load", commands_len), + BenchmarkId::new("load autotx", commands_len), &bytes, |b, bytes| b.iter(|| load_trace_autotx(bytes)), ); From 62c71845cd7e23ae07e2467e72c5a4562e5ebdfa Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Wed, 16 Feb 2022 15:12:51 +0000 Subject: [PATCH 086/730] Add some basic docs for Automerge mutations --- automerge/src/automerge.rs | 25 +++++++++++++++++++++++++ automerge/src/transaction.rs | 5 +++++ 2 files changed, 30 insertions(+) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 0d414329..98958eed 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -88,6 +88,7 @@ impl Automerge { am } + /// Start a transaction. pub fn tx(&mut self) -> Transaction { let actor = self.get_actor_index(); @@ -123,6 +124,10 @@ impl Automerge { f } + /// Set a prop to have the given value in an object. + /// + /// This creates a new transaction internally and commits it upon success. + /// If you want to have multiple operations grouped together then use [`Self::tx`]. pub fn set, V: Into>( &mut self, obj: &ExId, @@ -142,6 +147,10 @@ impl Automerge { } } + /// Insert a value into an object. + /// + /// This creates a new transaction internally and commits it upon success. + /// If you want to have multiple operations grouped together then use [`Self::tx`]. pub fn insert>( &mut self, obj: &ExId, @@ -161,6 +170,10 @@ impl Automerge { } } + /// Delete a value in an object. + /// + /// This creates a new transaction internally and commits it upon success. + /// If you want to have multiple operations grouped together then use [`Self::tx`]. pub fn del>(&mut self, obj: &ExId, prop: P) -> Result<(), AutomergeError> { let mut tx = self.tx(); match tx.del(obj, prop) { @@ -175,6 +188,10 @@ impl Automerge { } } + /// Increment a counter in an object. + /// + /// This creates a new transaction internally and commits it upon success. + /// If you want to have multiple operations grouped together then use [`Self::tx`]. pub fn inc>( &mut self, obj: &ExId, @@ -194,6 +211,10 @@ impl Automerge { } } + /// Splice elements into a list or text. + /// + /// This creates a new transaction internally and commits it upon success. + /// If you want to have multiple operations grouped together then use [`Self::tx`]. pub fn splice( &mut self, obj: &ExId, @@ -214,6 +235,10 @@ impl Automerge { } } + /// Splice elements into text. + /// + /// This creates a new transaction internally and commits it upon success. + /// If you want to have multiple operations grouped together then use [`Self::tx`]. pub fn splice_text( &mut self, obj: &ExId, diff --git a/automerge/src/transaction.rs b/automerge/src/transaction.rs index 5263685e..3aca4912 100644 --- a/automerge/src/transaction.rs +++ b/automerge/src/transaction.rs @@ -5,6 +5,11 @@ use crate::{Automerge, ChangeHash, Prop, Value}; mod inner; pub(crate) use inner::TransactionInner; +/// A transaction on a document. +/// Transactions group operations into a single change so that no other operations can happen +/// in-between. +/// +/// Created from [`Automerge::tx`]. #[derive(Debug)] pub struct Transaction<'a> { pub(crate) inner: TransactionInner, From 59e36cebe4b3abb8fdd9718af59cd6349c03c3b3 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 17 Feb 2022 11:07:59 +0000 Subject: [PATCH 087/730] Improve transactions with drop, transact and better commit Also remove modification operations directly on Automerge and switch tests to using AutoTxn. --- automerge-wasm/src/lib.rs | 12 +- automerge/src/automerge.rs | 362 +++++++++++----------------- automerge/src/autotxn.rs | 36 ++- automerge/src/lib.rs | 3 +- automerge/src/sync.rs | 26 +- automerge/src/transaction.rs | 65 ++++- automerge/src/transaction/commit.rs | 32 +++ automerge/src/transaction/result.rs | 54 +++++ automerge/tests/helpers/mod.rs | 8 +- automerge/tests/test.rs | 76 +++--- edit-trace/benches/main.rs | 8 +- edit-trace/src/main.rs | 4 +- 12 files changed, 377 insertions(+), 309 deletions(-) create mode 100644 automerge/src/transaction/commit.rs create mode 100644 automerge/src/transaction/result.rs diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 2d6c873f..60ff78d9 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -1,4 +1,5 @@ #![allow(clippy::unused_unit)] +use am::transaction::CommitOptions; use automerge as am; use automerge::{Change, ObjId, Prop, Value, ROOT}; use js_sys::{Array, Object, Uint8Array}; @@ -45,7 +46,7 @@ impl Automerge { #[allow(clippy::should_implement_trait)] pub fn clone(&mut self, actor: Option) -> Result { if self.0.pending_ops() > 0 { - self.0.commit(None, None); + self.0.commit(); } let mut automerge = Automerge(self.0.clone()); if let Some(s) = actor { @@ -73,7 +74,14 @@ impl Automerge { } pub fn commit(&mut self, message: Option, time: Option) -> Array { - let heads = self.0.commit(message, time.map(|n| n as i64)); + let mut commit_opts = CommitOptions::default(); + if let Some(message) = message { + commit_opts.set_message(message); + } + if let Some(time) = time { + commit_opts.set_time(time as i64); + } + let heads = self.0.commit_with(commit_opts); let heads: Array = heads .iter() .map(|h| JsValue::from_str(&hex::encode(&h.0))) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 98958eed..d35a2b8b 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -3,7 +3,10 @@ use std::collections::{HashMap, HashSet, VecDeque}; use crate::change::encode_document; use crate::exid::ExId; use crate::op_set::OpSet; -use crate::transaction::{Transaction, TransactionInner}; +use crate::transaction::{ + CommitOptions, Transaction, TransactionFailure, TransactionInner, TransactionResult, + TransactionSuccess, +}; use crate::types::{ ActorId, ChangeHash, Clock, ElemId, Export, Exportable, Key, ObjId, Op, OpId, OpType, Patch, ScalarValue, Value, @@ -89,7 +92,7 @@ impl Automerge { } /// Start a transaction. - pub fn tx(&mut self) -> Transaction { + pub fn transaction(&mut self) -> Transaction { let actor = self.get_actor_index(); let seq = self.states.entry(actor).or_default().len() as u64 + 1; @@ -113,152 +116,57 @@ impl Automerge { deps, }; Transaction { - inner: tx_inner, + inner: Some(tx_inner), doc: self, } } - pub fn fork(&mut self) -> Self { + /// Run a transaction on this document in a closure, automatically handling commit or rollback + /// afterwards. + pub fn transact(&mut self, f: F) -> TransactionResult + where + F: FnOnce(&mut Transaction) -> Result, + { + let mut tx = self.transaction(); + let result = f(&mut tx); + match result { + Ok(result) => Ok(TransactionSuccess { + result, + heads: tx.commit(), + }), + Err(error) => Err(TransactionFailure { + error, + cancelled: tx.rollback(), + }), + } + } + + /// Like [`Self::transact`] but with a function for generating the commit options. + pub fn transact_with(&mut self, f: F, c: C) -> TransactionResult + where + F: FnOnce(&mut Transaction) -> Result, + C: FnOnce() -> CommitOptions, + { + let mut tx = self.transaction(); + let result = f(&mut tx); + match result { + Ok(result) => Ok(TransactionSuccess { + result, + heads: tx.commit_with(c()), + }), + Err(error) => Err(TransactionFailure { + error, + cancelled: tx.rollback(), + }), + } + } + + pub fn fork(&self) -> Self { let mut f = self.clone(); f.actor = None; f } - /// Set a prop to have the given value in an object. - /// - /// This creates a new transaction internally and commits it upon success. - /// If you want to have multiple operations grouped together then use [`Self::tx`]. - pub fn set, V: Into>( - &mut self, - obj: &ExId, - prop: P, - value: V, - ) -> Result, AutomergeError> { - let mut tx = self.tx(); - match tx.set(obj, prop, value) { - Ok(opt) => { - tx.commit(None, None); - Ok(opt) - } - Err(e) => { - tx.rollback(); - Err(e) - } - } - } - - /// Insert a value into an object. - /// - /// This creates a new transaction internally and commits it upon success. - /// If you want to have multiple operations grouped together then use [`Self::tx`]. - pub fn insert>( - &mut self, - obj: &ExId, - index: usize, - value: V, - ) -> Result, AutomergeError> { - let mut tx = self.tx(); - match tx.insert(obj, index, value) { - Ok(opt) => { - tx.commit(None, None); - Ok(opt) - } - Err(e) => { - tx.rollback(); - Err(e) - } - } - } - - /// Delete a value in an object. - /// - /// This creates a new transaction internally and commits it upon success. - /// If you want to have multiple operations grouped together then use [`Self::tx`]. - pub fn del>(&mut self, obj: &ExId, prop: P) -> Result<(), AutomergeError> { - let mut tx = self.tx(); - match tx.del(obj, prop) { - Ok(opt) => { - tx.commit(None, None); - Ok(opt) - } - Err(e) => { - tx.rollback(); - Err(e) - } - } - } - - /// Increment a counter in an object. - /// - /// This creates a new transaction internally and commits it upon success. - /// If you want to have multiple operations grouped together then use [`Self::tx`]. - pub fn inc>( - &mut self, - obj: &ExId, - prop: P, - value: i64, - ) -> Result<(), AutomergeError> { - let mut tx = self.tx(); - match tx.inc(obj, prop, value) { - Ok(opt) => { - tx.commit(None, None); - Ok(opt) - } - Err(e) => { - tx.rollback(); - Err(e) - } - } - } - - /// Splice elements into a list or text. - /// - /// This creates a new transaction internally and commits it upon success. - /// If you want to have multiple operations grouped together then use [`Self::tx`]. - pub fn splice( - &mut self, - obj: &ExId, - pos: usize, - del: usize, - vals: Vec, - ) -> Result, AutomergeError> { - let mut tx = self.tx(); - match tx.splice(obj, pos, del, vals) { - Ok(opt) => { - tx.commit(None, None); - Ok(opt) - } - Err(e) => { - tx.rollback(); - Err(e) - } - } - } - - /// Splice elements into text. - /// - /// This creates a new transaction internally and commits it upon success. - /// If you want to have multiple operations grouped together then use [`Self::tx`]. - pub fn splice_text( - &mut self, - obj: &ExId, - pos: usize, - del: usize, - text: &str, - ) -> Result, AutomergeError> { - let mut tx = self.tx(); - match tx.splice_text(obj, pos, del, text) { - Ok(opt) => { - tx.commit(None, None); - Ok(opt) - } - Err(e) => { - tx.rollback(); - Err(e) - } - } - } - fn insert_op(&mut self, op: Op) -> Op { let q = self.ops.search(op.obj, query::SeekOp::new(&op)); @@ -580,7 +488,7 @@ impl Automerge { .cloned() .collect::>(); self.apply_changes(&changes)?; - Ok(self._get_heads()) + Ok(self.get_heads()) } pub fn save(&mut self) -> Result, AutomergeError> { @@ -602,13 +510,13 @@ impl Automerge { // should this return an empty vec instead of None? pub fn save_incremental(&mut self) -> Vec { - let changes = self._get_changes(self.saved.as_slice()); + let changes = self.get_changes(self.saved.as_slice()); let mut bytes = vec![]; for c in changes { bytes.extend(c.raw_bytes()); } if !bytes.is_empty() { - self.saved = self._get_heads().to_vec() + self.saved = self.get_heads().to_vec() } bytes } @@ -675,10 +583,6 @@ impl Automerge { } pub fn get_missing_deps(&self, heads: &[ChangeHash]) -> Vec { - self._get_missing_deps(heads) - } - - pub(crate) fn _get_missing_deps(&self, heads: &[ChangeHash]) -> Vec { let in_queue: HashSet<_> = self.queue.iter().map(|change| change.hash).collect(); let mut missing = HashSet::new(); @@ -729,7 +633,7 @@ impl Automerge { } // if we get to the end and there is a head we haven't seen then fast path cant work - if self._get_heads().iter().all(|h| has_seen.contains(h)) { + if self.get_heads().iter().all(|h| has_seen.contains(h)) { Some(missing_changes) } else { None @@ -767,10 +671,6 @@ impl Automerge { } pub fn get_changes(&self, have_deps: &[ChangeHash]) -> Vec<&Change> { - self._get_changes(have_deps) - } - - pub(crate) fn _get_changes(&self, have_deps: &[ChangeHash]) -> Vec<&Change> { if let Some(changes) = self.get_changes_fast(have_deps) { changes } else { @@ -784,7 +684,7 @@ impl Automerge { let mut to_see = heads.to_vec(); // FIXME - faster while let Some(hash) = to_see.pop() { - if let Some(c) = self._get_change_by_hash(&hash) { + if let Some(c) = self.get_change_by_hash(&hash) { for h in &c.deps { if !seen.contains(h) { to_see.push(*h); @@ -799,30 +699,22 @@ impl Automerge { } pub fn get_change_by_hash(&self, hash: &ChangeHash) -> Option<&Change> { - self._get_change_by_hash(hash) - } - - pub(crate) fn _get_change_by_hash(&self, hash: &ChangeHash) -> Option<&Change> { self.history_index .get(hash) .and_then(|index| self.history.get(*index)) } pub fn get_changes_added<'a>(&self, other: &'a Self) -> Vec<&'a Change> { - self._get_changes_added(other) - } - - pub(crate) fn _get_changes_added<'a>(&self, other: &'a Self) -> Vec<&'a Change> { // Depth-first traversal from the heads through the dependency graph, // until we reach a change that is already present in other - let mut stack: Vec<_> = other._get_heads(); + let mut stack: Vec<_> = other.get_heads(); let mut seen_hashes = HashSet::new(); let mut added_change_hashes = Vec::new(); while let Some(hash) = stack.pop() { - if !seen_hashes.contains(&hash) && self._get_change_by_hash(&hash).is_none() { + if !seen_hashes.contains(&hash) && self.get_change_by_hash(&hash).is_none() { seen_hashes.insert(hash); added_change_hashes.push(hash); - if let Some(change) = other._get_change_by_hash(&hash) { + if let Some(change) = other.get_change_by_hash(&hash) { stack.extend(&change.deps); } } @@ -832,15 +724,11 @@ impl Automerge { added_change_hashes.reverse(); added_change_hashes .into_iter() - .filter_map(|h| other._get_change_by_hash(&h)) + .filter_map(|h| other.get_change_by_hash(&h)) .collect() } pub fn get_heads(&self) -> Vec { - self._get_heads() - } - - pub(crate) fn _get_heads(&self) -> Vec { let mut deps: Vec<_> = self.deps.iter().copied().collect(); deps.sort_unstable(); deps @@ -983,22 +871,26 @@ mod tests { fn insert_op() -> Result<(), AutomergeError> { let mut doc = Automerge::new(); doc.set_actor(ActorId::random()); - doc.set(&ROOT, "hello", "world")?; - doc.value(&ROOT, "hello")?; + let mut tx = doc.transaction(); + tx.set(&ROOT, "hello", "world")?; + tx.value(&ROOT, "hello")?; + tx.commit(); Ok(()) } #[test] fn test_set() -> Result<(), AutomergeError> { let mut doc = Automerge::new(); + let mut tx = doc.transaction(); // setting a scalar value shouldn't return an opid as no object was created. - assert!(doc.set(&ROOT, "a", 1)?.is_none()); + assert!(tx.set(&ROOT, "a", 1)?.is_none()); // setting the same value shouldn't return an opid as there is no change. - assert!(doc.set(&ROOT, "a", 1)?.is_none()); + assert!(tx.set(&ROOT, "a", 1)?.is_none()); - assert!(doc.set(&ROOT, "b", Value::map())?.is_some()); + assert!(tx.set(&ROOT, "b", Value::map())?.is_some()); // object already exists at b but setting a map again overwrites it so we get an opid. - assert!(doc.set(&ROOT, "b", Value::map())?.is_some()); + assert!(tx.set(&ROOT, "b", Value::map())?.is_some()); + tx.commit(); Ok(()) } @@ -1006,18 +898,20 @@ mod tests { fn test_list() -> Result<(), AutomergeError> { let mut doc = Automerge::new(); doc.set_actor(ActorId::random()); - let list_id = doc.set(&ROOT, "items", Value::list())?.unwrap(); - doc.set(&ROOT, "zzz", "zzzval")?; - assert!(doc.value(&ROOT, "items")?.unwrap().1 == list_id); - doc.insert(&list_id, 0, "a")?; - doc.insert(&list_id, 0, "b")?; - doc.insert(&list_id, 2, "c")?; - doc.insert(&list_id, 1, "d")?; - assert!(doc.value(&list_id, 0)?.unwrap().0 == "b".into()); - assert!(doc.value(&list_id, 1)?.unwrap().0 == "d".into()); - assert!(doc.value(&list_id, 2)?.unwrap().0 == "a".into()); - assert!(doc.value(&list_id, 3)?.unwrap().0 == "c".into()); - assert!(doc.length(&list_id) == 4); + let mut tx = doc.transaction(); + let list_id = tx.set(&ROOT, "items", Value::list())?.unwrap(); + tx.set(&ROOT, "zzz", "zzzval")?; + assert!(tx.value(&ROOT, "items")?.unwrap().1 == list_id); + tx.insert(&list_id, 0, "a")?; + tx.insert(&list_id, 0, "b")?; + tx.insert(&list_id, 2, "c")?; + tx.insert(&list_id, 1, "d")?; + assert!(tx.value(&list_id, 0)?.unwrap().0 == "b".into()); + assert!(tx.value(&list_id, 1)?.unwrap().0 == "d".into()); + assert!(tx.value(&list_id, 2)?.unwrap().0 == "a".into()); + assert!(tx.value(&list_id, 3)?.unwrap().0 == "c".into()); + assert!(tx.length(&list_id) == 4); + tx.commit(); doc.save()?; Ok(()) } @@ -1026,22 +920,26 @@ mod tests { fn test_del() -> Result<(), AutomergeError> { let mut doc = Automerge::new(); doc.set_actor(ActorId::random()); - doc.set(&ROOT, "xxx", "xxx")?; - assert!(!doc.values(&ROOT, "xxx")?.is_empty()); - doc.del(&ROOT, "xxx")?; - assert!(doc.values(&ROOT, "xxx")?.is_empty()); + let mut tx = doc.transaction(); + tx.set(&ROOT, "xxx", "xxx")?; + assert!(!tx.values(&ROOT, "xxx")?.is_empty()); + tx.del(&ROOT, "xxx")?; + assert!(tx.values(&ROOT, "xxx")?.is_empty()); + tx.commit(); Ok(()) } #[test] fn test_inc() -> Result<(), AutomergeError> { let mut doc = Automerge::new(); - doc.set(&ROOT, "counter", Value::counter(10))?; - assert!(doc.value(&ROOT, "counter")?.unwrap().0 == Value::counter(10)); - doc.inc(&ROOT, "counter", 10)?; - assert!(doc.value(&ROOT, "counter")?.unwrap().0 == Value::counter(20)); - doc.inc(&ROOT, "counter", -5)?; - assert!(doc.value(&ROOT, "counter")?.unwrap().0 == Value::counter(15)); + let mut tx = doc.transaction(); + tx.set(&ROOT, "counter", Value::counter(10))?; + assert!(tx.value(&ROOT, "counter")?.unwrap().0 == Value::counter(10)); + tx.inc(&ROOT, "counter", 10)?; + assert!(tx.value(&ROOT, "counter")?.unwrap().0 == Value::counter(20)); + tx.inc(&ROOT, "counter", -5)?; + assert!(tx.value(&ROOT, "counter")?.unwrap().0 == Value::counter(15)); + tx.commit(); Ok(()) } @@ -1049,15 +947,21 @@ mod tests { fn test_save_incremental() -> Result<(), AutomergeError> { let mut doc = Automerge::new(); - doc.set(&ROOT, "foo", 1)?; + let mut tx = doc.transaction(); + tx.set(&ROOT, "foo", 1)?; + tx.commit(); let save1 = doc.save().unwrap(); - doc.set(&ROOT, "bar", 2)?; + let mut tx = doc.transaction(); + tx.set(&ROOT, "bar", 2)?; + tx.commit(); let save2 = doc.save_incremental(); - doc.set(&ROOT, "baz", 3)?; + let mut tx = doc.transaction(); + tx.set(&ROOT, "baz", 3)?; + tx.commit(); let save3 = doc.save_incremental(); @@ -1085,11 +989,17 @@ mod tests { #[test] fn test_save_text() -> Result<(), AutomergeError> { let mut doc = Automerge::new(); - let text = doc.set(&ROOT, "text", Value::text())?.unwrap(); + let mut tx = doc.transaction(); + let text = tx.set(&ROOT, "text", Value::text())?.unwrap(); + tx.commit(); let heads1 = doc.get_heads(); - doc.splice_text(&text, 0, 0, "hello world")?; + let mut tx = doc.transaction(); + tx.splice_text(&text, 0, 0, "hello world")?; + tx.commit(); let heads2 = doc.get_heads(); - doc.splice_text(&text, 6, 0, "big bad ")?; + let mut tx = doc.transaction(); + tx.splice_text(&text, 6, 0, "big bad ")?; + tx.commit(); let heads3 = doc.get_heads(); assert!(&doc.text(&text)? == "hello big bad world"); @@ -1104,19 +1014,29 @@ mod tests { fn test_props_vals_at() -> Result<(), AutomergeError> { let mut doc = Automerge::new(); doc.set_actor("aaaa".try_into().unwrap()); - doc.set(&ROOT, "prop1", "val1")?; + let mut tx = doc.transaction(); + tx.set(&ROOT, "prop1", "val1")?; + tx.commit(); doc.get_heads(); let heads1 = doc.get_heads(); - doc.set(&ROOT, "prop1", "val2")?; + let mut tx = doc.transaction(); + tx.set(&ROOT, "prop1", "val2")?; + tx.commit(); doc.get_heads(); let heads2 = doc.get_heads(); - doc.set(&ROOT, "prop2", "val3")?; + let mut tx = doc.transaction(); + tx.set(&ROOT, "prop2", "val3")?; + tx.commit(); doc.get_heads(); let heads3 = doc.get_heads(); - doc.del(&ROOT, "prop1")?; + let mut tx = doc.transaction(); + tx.del(&ROOT, "prop1")?; + tx.commit(); doc.get_heads(); let heads4 = doc.get_heads(); - doc.set(&ROOT, "prop3", "val4")?; + let mut tx = doc.transaction(); + tx.set(&ROOT, "prop3", "val4")?; + tx.commit(); doc.get_heads(); let heads5 = doc.get_heads(); assert!(doc.keys_at(&ROOT, &heads1) == vec!["prop1".to_owned()]); @@ -1163,24 +1083,36 @@ mod tests { let mut doc = Automerge::new(); doc.set_actor("aaaa".try_into().unwrap()); - let list = doc.set(&ROOT, "list", Value::list())?.unwrap(); + let mut tx = doc.transaction(); + let list = tx.set(&ROOT, "list", Value::list())?.unwrap(); + tx.commit(); let heads1 = doc.get_heads(); - doc.insert(&list, 0, Value::int(10))?; + let mut tx = doc.transaction(); + tx.insert(&list, 0, Value::int(10))?; + tx.commit(); let heads2 = doc.get_heads(); - doc.set(&list, 0, Value::int(20))?; - doc.insert(&list, 0, Value::int(30))?; + let mut tx = doc.transaction(); + tx.set(&list, 0, Value::int(20))?; + tx.insert(&list, 0, Value::int(30))?; + tx.commit(); let heads3 = doc.get_heads(); - doc.set(&list, 1, Value::int(40))?; - doc.insert(&list, 1, Value::int(50))?; + let mut tx = doc.transaction(); + tx.set(&list, 1, Value::int(40))?; + tx.insert(&list, 1, Value::int(50))?; + tx.commit(); let heads4 = doc.get_heads(); - doc.del(&list, 2)?; + let mut tx = doc.transaction(); + tx.del(&list, 2)?; + tx.commit(); let heads5 = doc.get_heads(); - doc.del(&list, 0)?; + let mut tx = doc.transaction(); + tx.del(&list, 0)?; + tx.commit(); let heads6 = doc.get_heads(); assert!(doc.length_at(&list, &heads1) == 0); diff --git a/automerge/src/autotxn.rs b/automerge/src/autotxn.rs index ba02eafb..3284ecd7 100644 --- a/automerge/src/autotxn.rs +++ b/automerge/src/autotxn.rs @@ -1,4 +1,5 @@ use crate::exid::ExId; +use crate::transaction::CommitOptions; use crate::types::Patch; use crate::{ change::export_change, transaction::TransactionInner, ActorId, Automerge, AutomergeError, @@ -27,6 +28,13 @@ impl AutoTxn { } } + /// Get the inner document. + #[doc(hidden)] + pub fn document(&mut self) -> &Automerge { + self.ensure_transaction_closed(); + &self.doc + } + pub fn set_actor(&mut self, actor: ActorId) { self.ensure_transaction_closed(); self.doc.set_actor(actor) @@ -54,7 +62,7 @@ impl AutoTxn { .unwrap_or(0) } - fn tx(&mut self) { + fn try_start_transaction(&mut self) { if self.transaction.is_none() { let actor = self.doc.get_actor_index(); @@ -89,12 +97,20 @@ impl AutoTxn { } } - pub fn commit(&mut self, message: Option, time: Option) -> Vec { + pub fn commit(&mut self) -> Vec { // ensure that even no changes triggers a change - self.tx(); + self.try_start_transaction(); self.transaction .take() - .map(|tx| tx.commit(&mut self.doc, message, time)) + .map(|tx| tx.commit(&mut self.doc, None, None)) + .unwrap_or_else(|| self.doc.get_heads()) + } + + pub fn commit_with(&mut self, options: CommitOptions) -> Vec { + self.try_start_transaction(); + self.transaction + .take() + .map(|tx| tx.commit(&mut self.doc, options.message, options.time)) .unwrap_or_else(|| self.doc.get_heads()) } @@ -160,7 +176,7 @@ impl AutoTxn { prop: P, value: V, ) -> Result, AutomergeError> { - self.tx(); + self.try_start_transaction(); let tx = self.transaction.as_mut().unwrap(); tx.set(&mut self.doc, obj, prop, value) } @@ -171,7 +187,7 @@ impl AutoTxn { index: usize, value: V, ) -> Result, AutomergeError> { - self.tx(); + self.try_start_transaction(); let tx = self.transaction.as_mut().unwrap(); tx.insert(&mut self.doc, obj, index, value) } @@ -182,13 +198,13 @@ impl AutoTxn { prop: P, value: i64, ) -> Result<(), AutomergeError> { - self.tx(); + self.try_start_transaction(); let tx = self.transaction.as_mut().unwrap(); tx.inc(&mut self.doc, obj, prop, value) } pub fn del>(&mut self, obj: &ExId, prop: P) -> Result<(), AutomergeError> { - self.tx(); + self.try_start_transaction(); let tx = self.transaction.as_mut().unwrap(); tx.del(&mut self.doc, obj, prop) } @@ -202,7 +218,7 @@ impl AutoTxn { del: usize, vals: Vec, ) -> Result, AutomergeError> { - self.tx(); + self.try_start_transaction(); let tx = self.transaction.as_mut().unwrap(); tx.splice(&mut self.doc, obj, pos, del, vals) } @@ -214,7 +230,7 @@ impl AutoTxn { del: usize, text: &str, ) -> Result, AutomergeError> { - self.tx(); + self.try_start_transaction(); let tx = self.transaction.as_mut().unwrap(); tx.splice_text(&mut self.doc, obj, pos, del, text) } diff --git a/automerge/src/lib.rs b/automerge/src/lib.rs index d94518be..e2794ef5 100644 --- a/automerge/src/lib.rs +++ b/automerge/src/lib.rs @@ -39,7 +39,7 @@ mod op_set; mod op_tree; mod query; mod sync; -mod transaction; +pub mod transaction; mod types; mod value; #[cfg(feature = "optree-visualisation")] @@ -52,7 +52,6 @@ pub use error::AutomergeError; pub use exid::ExId as ObjId; pub use legacy::Change as ExpandedChange; pub use sync::{BloomFilter, SyncHave, SyncMessage, SyncState}; -pub use transaction::Transaction; pub use types::{ActorId, ChangeHash, ObjType, OpType, Prop}; pub use value::{ScalarValue, Value}; diff --git a/automerge/src/sync.rs b/automerge/src/sync.rs index 62a0f977..0874ca5a 100644 --- a/automerge/src/sync.rs +++ b/automerge/src/sync.rs @@ -23,13 +23,9 @@ const MESSAGE_TYPE_SYNC: u8 = 0x42; // first byte of a sync message, for identif impl Automerge { pub fn generate_sync_message(&self, sync_state: &mut SyncState) -> Option { - self._generate_sync_message(sync_state) - } + let our_heads = self.get_heads(); - fn _generate_sync_message(&self, sync_state: &mut SyncState) -> Option { - let our_heads = self._get_heads(); - - let our_need = self._get_missing_deps(sync_state.their_heads.as_ref().unwrap_or(&vec![])); + let our_need = self.get_missing_deps(sync_state.their_heads.as_ref().unwrap_or(&vec![])); let their_heads_set = if let Some(ref heads) = sync_state.their_heads { heads.iter().collect::>() @@ -47,7 +43,7 @@ impl Automerge { if !first_have .last_sync .iter() - .all(|hash| self._get_change_by_hash(hash).is_some()) + .all(|hash| self.get_change_by_hash(hash).is_some()) { let reset_msg = SyncMessage { heads: our_heads, @@ -103,14 +99,6 @@ impl Automerge { &mut self, sync_state: &mut SyncState, message: SyncMessage, - ) -> Result, AutomergeError> { - self._receive_sync_message(sync_state, message) - } - - fn _receive_sync_message( - &mut self, - sync_state: &mut SyncState, - message: SyncMessage, ) -> Result, AutomergeError> { let mut patch = None; @@ -170,7 +158,7 @@ impl Automerge { } fn make_bloom_filter(&self, last_sync: Vec) -> SyncHave { - let new_changes = self._get_changes(&last_sync); + let new_changes = self.get_changes(&last_sync); let hashes = new_changes .into_iter() .map(|change| change.hash) @@ -184,7 +172,7 @@ impl Automerge { fn get_changes_to_send(&self, have: Vec, need: &[ChangeHash]) -> Vec<&Change> { if have.is_empty() { need.iter() - .filter_map(|hash| self._get_change_by_hash(hash)) + .filter_map(|hash| self.get_change_by_hash(hash)) .collect() } else { let mut last_sync_hashes = HashSet::new(); @@ -199,7 +187,7 @@ impl Automerge { } let last_sync_hashes = last_sync_hashes.into_iter().collect::>(); - let changes = self._get_changes(&last_sync_hashes); + let changes = self.get_changes(&last_sync_hashes); let mut change_hashes = HashSet::with_capacity(changes.len()); let mut dependents: HashMap> = HashMap::new(); @@ -235,7 +223,7 @@ impl Automerge { for hash in need { hashes_to_send.insert(*hash); if !change_hashes.contains(hash) { - let change = self._get_change_by_hash(hash); + let change = self.get_change_by_hash(hash); if let Some(change) = change { changes_to_send.push(change); } diff --git a/automerge/src/transaction.rs b/automerge/src/transaction.rs index 3aca4912..f7722a45 100644 --- a/automerge/src/transaction.rs +++ b/automerge/src/transaction.rs @@ -2,35 +2,53 @@ use crate::exid::ExId; use crate::AutomergeError; use crate::{Automerge, ChangeHash, Prop, Value}; +mod commit; mod inner; +mod result; pub(crate) use inner::TransactionInner; +pub use result::TransactionFailure; +pub use result::TransactionSuccess; + +pub type TransactionResult = Result, TransactionFailure>; + +pub use self::commit::CommitOptions; /// A transaction on a document. /// Transactions group operations into a single change so that no other operations can happen /// in-between. /// -/// Created from [`Automerge::tx`]. +/// Created from [`Automerge::transaction`]. #[derive(Debug)] pub struct Transaction<'a> { - pub(crate) inner: TransactionInner, + // this is an option so that we can take it during commit and rollback to prevent it being + // rolled back during drop. + pub(crate) inner: Option, pub(crate) doc: &'a mut Automerge, } impl<'a> Transaction<'a> { + /// Get the number of pending operations in this transaction. pub fn pending_ops(&self) -> usize { - self.inner.pending_ops() + self.inner.as_ref().unwrap().pending_ops() } /// Commit the operations performed in this transaction, returning the hashes corresponding to /// the new heads. - pub fn commit(self, message: Option, time: Option) -> Vec { - self.inner.commit(self.doc, message, time) + pub fn commit(mut self) -> Vec { + self.inner.take().unwrap().commit(self.doc, None, None) + } + + pub fn commit_with(mut self, options: CommitOptions) -> Vec { + self.inner + .take() + .unwrap() + .commit(self.doc, options.message, options.time) } /// Undo the operations added in this transaction, returning the number of cancelled /// operations. - pub fn rollback(self) -> usize { - self.inner.rollback(self.doc) + pub fn rollback(mut self) -> usize { + self.inner.take().unwrap().rollback(self.doc) } /// Set the value of property `P` to value `V` in object `obj`. @@ -52,7 +70,7 @@ impl<'a> Transaction<'a> { prop: P, value: V, ) -> Result, AutomergeError> { - self.inner.set(self.doc, obj, prop, value) + self.inner.as_mut().unwrap().set(self.doc, obj, prop, value) } pub fn insert>( @@ -61,7 +79,10 @@ impl<'a> Transaction<'a> { index: usize, value: V, ) -> Result, AutomergeError> { - self.inner.insert(self.doc, obj, index, value) + self.inner + .as_mut() + .unwrap() + .insert(self.doc, obj, index, value) } pub fn inc>( @@ -70,11 +91,11 @@ impl<'a> Transaction<'a> { prop: P, value: i64, ) -> Result<(), AutomergeError> { - self.inner.inc(self.doc, obj, prop, value) + self.inner.as_mut().unwrap().inc(self.doc, obj, prop, value) } pub fn del>(&mut self, obj: &ExId, prop: P) -> Result<(), AutomergeError> { - self.inner.del(self.doc, obj, prop) + self.inner.as_mut().unwrap().del(self.doc, obj, prop) } /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert @@ -86,7 +107,10 @@ impl<'a> Transaction<'a> { del: usize, vals: Vec, ) -> Result, AutomergeError> { - self.inner.splice(self.doc, obj, pos, del, vals) + self.inner + .as_mut() + .unwrap() + .splice(self.doc, obj, pos, del, vals) } pub fn splice_text( @@ -96,7 +120,10 @@ impl<'a> Transaction<'a> { del: usize, text: &str, ) -> Result, AutomergeError> { - self.inner.splice_text(self.doc, obj, pos, del, text) + self.inner + .as_mut() + .unwrap() + .splice_text(self.doc, obj, pos, del, text) } pub fn keys(&self, obj: &ExId) -> Vec { @@ -157,3 +184,15 @@ impl<'a> Transaction<'a> { self.doc.values_at(obj, prop, heads) } } + +// If a transaction is not commited or rolled back manually then it can leave the document in an +// intermediate state. +// This defaults to rolling back the transaction to be compatible with `?` error returning before +// reaching a call to `commit`. +impl<'a> Drop for Transaction<'a> { + fn drop(&mut self) { + if let Some(txn) = self.inner.take() { + txn.rollback(self.doc); + } + } +} diff --git a/automerge/src/transaction/commit.rs b/automerge/src/transaction/commit.rs new file mode 100644 index 00000000..49d204fd --- /dev/null +++ b/automerge/src/transaction/commit.rs @@ -0,0 +1,32 @@ +/// Optional metadata for a commit. +#[derive(Debug, Default, Clone)] +pub struct CommitOptions { + pub(crate) message: Option, + pub(crate) time: Option, +} + +impl CommitOptions { + /// Add a message to the commit. + pub fn with_message(mut self, message: String) -> Self { + self.message = Some(message); + self + } + + /// Add a message to the commit. + pub fn set_message(&mut self, message: String) -> &mut Self { + self.message = Some(message); + self + } + + /// Add a timestamp to the commit. + pub fn with_time(mut self, time: i64) -> Self { + self.time = Some(time); + self + } + + /// Add a timestamp to the commit. + pub fn set_time(&mut self, time: i64) -> &mut Self { + self.time = Some(time); + self + } +} diff --git a/automerge/src/transaction/result.rs b/automerge/src/transaction/result.rs new file mode 100644 index 00000000..7f01ead2 --- /dev/null +++ b/automerge/src/transaction/result.rs @@ -0,0 +1,54 @@ +use crate::ChangeHash; + +/// The result of a successful, and committed, transaction. +#[derive(Debug)] +pub struct TransactionSuccess { + pub(crate) result: O, + pub(crate) heads: Vec, +} + +impl TransactionSuccess { + /// Get the result of the transaction. + pub fn result(&self) -> &O { + &self.result + } + + /// Get the result of the transaction. + pub fn into_result(self) -> O { + self.result + } + + /// Get the new heads of the document after commiting the transaction. + pub fn heads(&self) -> &[ChangeHash] { + &self.heads + } + + /// Get the new heads of the document after commiting the transaction. + pub fn into_heads(self) -> Vec { + self.heads + } +} + +/// The result of a failed, and rolled back, transaction. +#[derive(Debug)] +pub struct TransactionFailure { + pub(crate) error: E, + pub(crate) cancelled: usize, +} + +impl TransactionFailure { + /// Get the error of the transaction. + pub fn error(&self) -> &E { + &self.error + } + + /// Get the error of the transaction. + pub fn into_error(self) -> E { + self.error + } + + /// Get the number of cancelled operations in the transaction. + pub fn cancelled(&self) -> usize { + self.cancelled + } +} diff --git a/automerge/tests/helpers/mod.rs b/automerge/tests/helpers/mod.rs index 7d157953..dfcf869e 100644 --- a/automerge/tests/helpers/mod.rs +++ b/automerge/tests/helpers/mod.rs @@ -6,12 +6,12 @@ use std::{ use serde::ser::{SerializeMap, SerializeSeq}; -pub fn new_doc() -> automerge::Automerge { - automerge::Automerge::new_with_actor_id(automerge::ActorId::random()) +pub fn new_doc() -> automerge::AutoTxn { + automerge::AutoTxn::new_with_actor_id(automerge::ActorId::random()) } -pub fn new_doc_with_actor(actor: automerge::ActorId) -> automerge::Automerge { - automerge::Automerge::new_with_actor_id(actor) +pub fn new_doc_with_actor(actor: automerge::ActorId) -> automerge::AutoTxn { + automerge::AutoTxn::new_with_actor_id(actor) } /// Returns two actor IDs, the first considered to be ordered before the second diff --git a/automerge/tests/test.rs b/automerge/tests/test.rs index 03d5a5d2..7497ea54 100644 --- a/automerge/tests/test.rs +++ b/automerge/tests/test.rs @@ -1,4 +1,4 @@ -use automerge::{ActorId, Automerge, Value, ROOT}; +use automerge::{ActorId, AutoTxn, Automerge, Value, ROOT}; mod helpers; #[allow(unused_imports)] @@ -8,11 +8,11 @@ use helpers::{ }; #[test] fn no_conflict_on_repeated_assignment() { - let mut doc = Automerge::new(); + let mut doc = AutoTxn::new(); doc.set(&automerge::ROOT, "foo", 1).unwrap(); doc.set(&automerge::ROOT, "foo", 2).unwrap(); assert_doc!( - &doc, + doc.document(), map! { "foo" => { 2 }, } @@ -62,7 +62,7 @@ fn repeated_map_assignment_which_resolves_conflict_not_ignored() { doc1.set(&automerge::ROOT, "field", 123).unwrap(); assert_doc!( - &doc1, + doc1.document(), map! { "field" => { 123 } } @@ -84,7 +84,7 @@ fn repeated_list_assignment_which_resolves_conflict_not_ignored() { doc1.set(&list_id, 0, 789).unwrap(); assert_doc!( - &doc1, + doc1.document(), map! { "list" => { list![ @@ -107,7 +107,7 @@ fn list_deletion() { doc.insert(&list_id, 2, 789).unwrap(); doc.del(&list_id, 1).unwrap(); assert_doc!( - &doc, + doc.document(), map! { "list" => { list![ { 123 }, @@ -129,7 +129,7 @@ fn merge_concurrent_map_prop_updates() { "bar".into() ); assert_doc!( - &doc1, + doc1.document(), map! { "foo" => { "bar" }, "hello" => { "world" }, @@ -137,13 +137,13 @@ fn merge_concurrent_map_prop_updates() { ); doc2.merge(&mut doc1).unwrap(); assert_doc!( - &doc2, + doc2.document(), map! { "foo" => { "bar" }, "hello" => { "world" }, } ); - assert_eq!(realize(&doc1), realize(&doc2)); + assert_eq!(realize(doc1.document()), realize(doc2.document())); } #[test] @@ -157,7 +157,7 @@ fn add_concurrent_increments_of_same_property() { doc2.inc(&automerge::ROOT, "counter", 2).unwrap(); doc1.merge(&mut doc2).unwrap(); assert_doc!( - &doc1, + doc1.document(), map! { "counter" => { mk_counter(3) @@ -184,7 +184,7 @@ fn add_increments_only_to_preceeded_values() { doc1.merge(&mut doc2).unwrap(); assert_doc!( - &doc1, + doc1.document(), map! { "counter" => { mk_counter(1), @@ -204,7 +204,7 @@ fn concurrent_updates_of_same_field() { doc1.merge(&mut doc2).unwrap(); assert_doc!( - &doc1, + doc1.document(), map! { "field" => { "one", @@ -230,7 +230,7 @@ fn concurrent_updates_of_same_list_element() { doc1.merge(&mut doc2).unwrap(); assert_doc!( - &doc1, + doc1.document(), map! { "birds" => { list![{ @@ -256,7 +256,7 @@ fn assignment_conflicts_of_different_types() { doc1.merge(&mut doc3).unwrap(); assert_doc!( - &doc1, + doc1.document(), map! { "field" => { "string", @@ -280,7 +280,7 @@ fn changes_within_conflicting_map_field() { doc1.merge(&mut doc2).unwrap(); assert_doc!( - &doc1, + doc1.document(), map! { "field" => { "string", @@ -324,7 +324,7 @@ fn changes_within_conflicting_list_element() { doc1.merge(&mut doc2).unwrap(); assert_doc!( - &doc1, + doc1.document(), map! { "list" => { list![ @@ -364,7 +364,7 @@ fn concurrently_assigned_nested_maps_should_not_merge() { doc1.merge(&mut doc2).unwrap(); assert_doc!( - &doc1, + doc1.document(), map! { "config" => { map!{ @@ -399,7 +399,7 @@ fn concurrent_insertions_at_different_list_positions() { doc1.merge(&mut doc2).unwrap(); assert_doc!( - &doc1, + doc1.document(), map! { "list" => { list![ @@ -432,7 +432,7 @@ fn concurrent_insertions_at_same_list_position() { doc1.merge(&mut doc2).unwrap(); assert_doc!( - &doc1, + doc1.document(), map! { "birds" => { list![ @@ -463,7 +463,7 @@ fn concurrent_assignment_and_deletion_of_a_map_entry() { doc1.merge(&mut doc2).unwrap(); assert_doc!( - &doc1, + doc1.document(), map! { "bestBird" => { "magpie", @@ -488,7 +488,7 @@ fn concurrent_assignment_and_deletion_of_list_entry() { doc2.del(&list_id, 1).unwrap(); assert_doc!( - &doc2, + doc2.document(), map! { "birds" => {list![ {"blackbird"}, @@ -498,7 +498,7 @@ fn concurrent_assignment_and_deletion_of_list_entry() { ); assert_doc!( - &doc1, + doc1.document(), map! { "birds" => {list![ { "blackbird" }, @@ -511,7 +511,7 @@ fn concurrent_assignment_and_deletion_of_list_entry() { doc1.merge(&mut doc2).unwrap(); assert_doc!( - &doc1, + doc1.document(), map! { "birds" => {list![ { "blackbird" }, @@ -545,7 +545,7 @@ fn insertion_after_a_deleted_list_element() { doc1.merge(&mut doc2).unwrap(); assert_doc!( - &doc1, + doc1.document(), map! { "birds" => {list![ { "blackbird" }, @@ -556,7 +556,7 @@ fn insertion_after_a_deleted_list_element() { doc2.merge(&mut doc1).unwrap(); assert_doc!( - &doc2, + doc2.document(), map! { "birds" => {list![ { "blackbird" }, @@ -588,7 +588,7 @@ fn concurrent_deletion_of_same_list_element() { doc1.merge(&mut doc2).unwrap(); assert_doc!( - &doc1, + doc1.document(), map! { "birds" => {list![ { "albatross" }, @@ -599,7 +599,7 @@ fn concurrent_deletion_of_same_list_element() { doc2.merge(&mut doc1).unwrap(); assert_doc!( - &doc2, + doc2.document(), map! { "birds" => {list![ { "albatross" }, @@ -639,7 +639,7 @@ fn concurrent_updates_at_different_levels() { doc1.merge(&mut doc2).unwrap(); assert_obj!( - &doc1, + doc1.document(), &automerge::ROOT, "animals", map! { @@ -650,7 +650,7 @@ fn concurrent_updates_at_different_levels() { ); assert_obj!( - &doc2, + doc2.document(), &automerge::ROOT, "animals", map! { @@ -685,7 +685,7 @@ fn concurrent_updates_of_concurrently_deleted_objects() { doc1.merge(&mut doc2).unwrap(); assert_doc!( - &doc1, + doc1.document(), map! { "birds" => { map!{}, @@ -737,7 +737,7 @@ fn does_not_interleave_sequence_insertions_at_same_position() { doc1.merge(&mut doc2).unwrap(); assert_doc!( - &doc1, + doc1.document(), map! { "wisdom" => {list![ {"to"}, @@ -771,7 +771,7 @@ fn mutliple_insertions_at_same_list_position_with_insertion_by_greater_actor_id( doc2.insert(&list, 0, "one").unwrap(); assert_doc!( - &doc2, + doc2.document(), map! { "list" => { list![ { "one" }, @@ -797,7 +797,7 @@ fn mutliple_insertions_at_same_list_position_with_insertion_by_lesser_actor_id() doc2.insert(&list, 0, "one").unwrap(); assert_doc!( - &doc2, + doc2.document(), map! { "list" => { list![ { "one" }, @@ -825,7 +825,7 @@ fn insertion_consistent_with_causality() { doc2.insert(&list, 0, "one").unwrap(); assert_doc!( - &doc2, + doc2.document(), map! { "list" => { list![ {"one"}, @@ -901,10 +901,10 @@ fn list_counter_del() -> Result<(), automerge::AutomergeError> { doc1.insert(&list, 1, "b")?; doc1.insert(&list, 2, "c")?; - let mut doc2 = Automerge::load(&doc1.save()?)?; + let mut doc2 = AutoTxn::load(&doc1.save()?)?; doc2.set_actor(actor2); - let mut doc3 = Automerge::load(&doc1.save()?)?; + let mut doc3 = AutoTxn::load(&doc1.save()?)?; doc3.set_actor(actor3); doc1.set(&list, 1, Value::counter(0))?; @@ -953,7 +953,7 @@ fn list_counter_del() -> Result<(), automerge::AutomergeError> { assert_eq!(doc1.length(&list), 2); - let doc4 = Automerge::load(&doc1.save()?)?; + let doc4 = AutoTxn::load(&doc1.save()?)?; assert_eq!(doc4.length(&list), 2); @@ -961,7 +961,7 @@ fn list_counter_del() -> Result<(), automerge::AutomergeError> { assert_eq!(doc1.length(&list), 1); - let doc5 = Automerge::load(&doc1.save()?)?; + let doc5 = AutoTxn::load(&doc1.save()?)?; assert_eq!(doc5.length(&list), 1); diff --git a/edit-trace/benches/main.rs b/edit-trace/benches/main.rs index 93074c72..fb5fa5fe 100644 --- a/edit-trace/benches/main.rs +++ b/edit-trace/benches/main.rs @@ -4,12 +4,12 @@ use std::fs; fn replay_trace_tx(commands: Vec<(usize, usize, Vec)>) -> Automerge { let mut doc = Automerge::new(); - let text = doc.set(&ROOT, "text", Value::text()).unwrap().unwrap(); - let mut tx = doc.tx(); + let mut tx = doc.transaction(); + let text = tx.set(&ROOT, "text", Value::text()).unwrap().unwrap(); for (pos, del, vals) in commands { tx.splice(&text, pos, del, vals).unwrap(); } - tx.commit(None, None); + tx.commit(); doc } @@ -19,7 +19,7 @@ fn replay_trace_autotx(commands: Vec<(usize, usize, Vec)>) -> AutoTxn { for (pos, del, vals) in commands { doc.splice(&text, pos, del, vals).unwrap(); } - doc.commit(None, None); + doc.commit(); doc } diff --git a/edit-trace/src/main.rs b/edit-trace/src/main.rs index a2f233cb..aa7f6ac1 100644 --- a/edit-trace/src/main.rs +++ b/edit-trace/src/main.rs @@ -19,7 +19,7 @@ fn main() -> Result<(), AutomergeError> { let mut doc = Automerge::new(); let now = Instant::now(); - let mut tx = doc.tx(); + let mut tx = doc.transaction(); let text = tx.set(&ROOT, "text", Value::text()).unwrap().unwrap(); for (i, (pos, del, vals)) in commands.into_iter().enumerate() { if i % 1000 == 0 { @@ -27,7 +27,7 @@ fn main() -> Result<(), AutomergeError> { } tx.splice(&text, pos, del, vals)?; } - tx.commit(None, None); + tx.commit(); let _ = doc.save(); println!("Done in {} ms", now.elapsed().as_millis()); Ok(()) From f8c9343a456ac29e4d09894d482aad41e163d215 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Sat, 19 Feb 2022 18:57:32 +0000 Subject: [PATCH 088/730] Add get_heads to transaction --- automerge/src/transaction.rs | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/automerge/src/transaction.rs b/automerge/src/transaction.rs index f7722a45..8cc17885 100644 --- a/automerge/src/transaction.rs +++ b/automerge/src/transaction.rs @@ -27,6 +27,11 @@ pub struct Transaction<'a> { } impl<'a> Transaction<'a> { + /// Get the heads of the document before this transaction was started. + pub fn get_heads(&self) -> Vec { + self.doc.get_heads() + } + /// Get the number of pending operations in this transaction. pub fn pending_ops(&self) -> usize { self.inner.as_ref().unwrap().pending_ops() From 50a1b4f99cc571c2896a870b409c3f8bf6e1d170 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Mon, 21 Feb 2022 10:32:57 +0000 Subject: [PATCH 089/730] Add transactable trait --- automerge-wasm/src/interop.rs | 1 + automerge-wasm/src/lib.rs | 1 + automerge/src/automerge.rs | 2 + automerge/src/autotxn.rs | 439 +++++++++++----------- automerge/src/transaction.rs | 198 +--------- automerge/src/transaction/inner.rs | 16 - automerge/src/transaction/transactable.rs | 115 ++++++ automerge/src/transaction/transaction.rs | 183 +++++++++ automerge/tests/test.rs | 1 + edit-trace/benches/main.rs | 2 +- edit-trace/src/main.rs | 2 +- 11 files changed, 526 insertions(+), 434 deletions(-) create mode 100644 automerge/src/transaction/transactable.rs create mode 100644 automerge/src/transaction/transaction.rs diff --git a/automerge-wasm/src/interop.rs b/automerge-wasm/src/interop.rs index 839f958c..2b36896c 100644 --- a/automerge-wasm/src/interop.rs +++ b/automerge-wasm/src/interop.rs @@ -1,4 +1,5 @@ use automerge as am; +use automerge::transaction::Transactable; use automerge::{Change, ChangeHash, Prop}; use js_sys::{Array, Object, Reflect, Uint8Array}; use std::collections::HashSet; diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 60ff78d9..9835f53c 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -1,5 +1,6 @@ #![allow(clippy::unused_unit)] use am::transaction::CommitOptions; +use am::transaction::Transactable; use automerge as am; use automerge::{Change, ObjId, Prop, Value, ROOT}; use js_sys::{Array, Object, Uint8Array}; diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index d35a2b8b..15e4c098 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -728,6 +728,7 @@ impl Automerge { .collect() } + /// Get the heads of this document. pub fn get_heads(&self) -> Vec { let mut deps: Vec<_> = self.deps.iter().copied().collect(); deps.sort_unstable(); @@ -864,6 +865,7 @@ pub struct SpanInfo { #[cfg(test)] mod tests { use super::*; + use crate::transaction::Transactable; use crate::*; use std::convert::TryInto; diff --git a/automerge/src/autotxn.rs b/automerge/src/autotxn.rs index 3284ecd7..a584d874 100644 --- a/automerge/src/autotxn.rs +++ b/automerge/src/autotxn.rs @@ -1,5 +1,5 @@ use crate::exid::ExId; -use crate::transaction::CommitOptions; +use crate::transaction::{CommitOptions, Transactable}; use crate::types::Patch; use crate::{ change::export_change, transaction::TransactionInner, ActorId, Automerge, AutomergeError, @@ -55,19 +55,12 @@ impl AutoTxn { } } - pub fn pending_ops(&self) -> usize { - self.transaction - .as_ref() - .map(|t| t.pending_ops()) - .unwrap_or(0) - } - fn try_start_transaction(&mut self) { if self.transaction.is_none() { let actor = self.doc.get_actor_index(); let seq = self.doc.states.entry(actor).or_default().len() as u64 + 1; - let mut deps = self.get_heads(); + let mut deps = self.doc.get_heads(); if seq > 1 { let last_hash = self.get_hash(actor, seq - 1).unwrap(); if !deps.contains(&last_hash) { @@ -89,6 +82,42 @@ impl AutoTxn { } } + fn get_hash(&mut self, actor: usize, seq: u64) -> Result { + self.doc + .states + .get(&actor) + .and_then(|v| v.get(seq as usize - 1)) + .and_then(|&i| self.doc.history.get(i)) + .map(|c| c.hash) + .ok_or(AutomergeError::InvalidSeq(seq)) + } + + fn update_history(&mut self, change: Change) -> usize { + self.doc.max_op = std::cmp::max(self.doc.max_op, change.start_op + change.len() as u64 - 1); + + self.update_deps(&change); + + let history_index = self.doc.history.len(); + + self.doc + .states + .entry(self.doc.ops.m.actors.cache(change.actor_id().clone())) + .or_default() + .push(history_index); + + self.doc.history_index.insert(change.hash, history_index); + self.doc.history.push(change); + + history_index + } + + fn update_deps(&mut self, change: &Change) { + for d in &change.deps { + self.doc.deps.remove(d); + } + self.doc.deps.insert(change.hash); + } + pub fn fork(&mut self) -> Self { self.ensure_transaction_closed(); Self { @@ -97,23 +126,6 @@ impl AutoTxn { } } - pub fn commit(&mut self) -> Vec { - // ensure that even no changes triggers a change - self.try_start_transaction(); - self.transaction - .take() - .map(|tx| tx.commit(&mut self.doc, None, None)) - .unwrap_or_else(|| self.doc.get_heads()) - } - - pub fn commit_with(&mut self, options: CommitOptions) -> Vec { - self.try_start_transaction(); - self.transaction - .take() - .map(|tx| tx.commit(&mut self.doc, options.message, options.time)) - .unwrap_or_else(|| self.doc.get_heads()) - } - pub fn ensure_transaction_closed(&mut self) { if let Some(tx) = self.transaction.take() { self.update_history(export_change( @@ -124,162 +136,6 @@ impl AutoTxn { } } - pub fn rollback(&mut self) -> usize { - self.transaction - .take() - .map(|tx| tx.rollback(&mut self.doc)) - .unwrap_or(0) - } - - // KeysAt::() - // LenAt::() - // PropAt::() - // NthAt::() - - pub fn keys(&self, obj: &ExId) -> Vec { - self.doc.keys(obj) - } - - pub fn keys_at(&self, obj: &ExId, heads: &[ChangeHash]) -> Vec { - self.doc.keys_at(obj, heads) - } - - pub fn length(&self, obj: &ExId) -> usize { - self.doc.length(obj) - } - - pub fn length_at(&self, obj: &ExId, heads: &[ChangeHash]) -> usize { - self.doc.length_at(obj, heads) - } - - // set(obj, prop, value) - value can be scalar or objtype - // del(obj, prop) - // inc(obj, prop, value) - // insert(obj, index, value) - - /// Set the value of property `P` to value `V` in object `obj`. - /// - /// # Returns - /// - /// The opid of the operation which was created, or None if this operation doesn't change the - /// document or create a new object. - /// - /// # Errors - /// - /// This will return an error if - /// - The object does not exist - /// - The key is the wrong type for the object - /// - The key does not exist in the object - pub fn set, V: Into>( - &mut self, - obj: &ExId, - prop: P, - value: V, - ) -> Result, AutomergeError> { - self.try_start_transaction(); - let tx = self.transaction.as_mut().unwrap(); - tx.set(&mut self.doc, obj, prop, value) - } - - pub fn insert>( - &mut self, - obj: &ExId, - index: usize, - value: V, - ) -> Result, AutomergeError> { - self.try_start_transaction(); - let tx = self.transaction.as_mut().unwrap(); - tx.insert(&mut self.doc, obj, index, value) - } - - pub fn inc>( - &mut self, - obj: &ExId, - prop: P, - value: i64, - ) -> Result<(), AutomergeError> { - self.try_start_transaction(); - let tx = self.transaction.as_mut().unwrap(); - tx.inc(&mut self.doc, obj, prop, value) - } - - pub fn del>(&mut self, obj: &ExId, prop: P) -> Result<(), AutomergeError> { - self.try_start_transaction(); - let tx = self.transaction.as_mut().unwrap(); - tx.del(&mut self.doc, obj, prop) - } - - /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert - /// the new elements - pub fn splice( - &mut self, - obj: &ExId, - pos: usize, - del: usize, - vals: Vec, - ) -> Result, AutomergeError> { - self.try_start_transaction(); - let tx = self.transaction.as_mut().unwrap(); - tx.splice(&mut self.doc, obj, pos, del, vals) - } - - pub fn splice_text( - &mut self, - obj: &ExId, - pos: usize, - del: usize, - text: &str, - ) -> Result, AutomergeError> { - self.try_start_transaction(); - let tx = self.transaction.as_mut().unwrap(); - tx.splice_text(&mut self.doc, obj, pos, del, text) - } - - pub fn text(&self, obj: &ExId) -> Result { - self.doc.text(obj) - } - - pub fn text_at(&self, obj: &ExId, heads: &[ChangeHash]) -> Result { - self.doc.text_at(obj, heads) - } - - // TODO - I need to return these OpId's here **only** to get - // the legacy conflicts format of { [opid]: value } - // Something better? - pub fn value>( - &self, - obj: &ExId, - prop: P, - ) -> Result, AutomergeError> { - self.doc.value(obj, prop) - } - - pub fn value_at>( - &self, - obj: &ExId, - prop: P, - heads: &[ChangeHash], - ) -> Result, AutomergeError> { - self.doc.value_at(obj, prop, heads) - } - - pub fn values>( - &self, - obj: &ExId, - prop: P, - ) -> Result, AutomergeError> { - self.doc.values(obj, prop) - } - - pub fn values_at>( - &self, - obj: &ExId, - prop: P, - heads: &[ChangeHash], - ) -> Result, AutomergeError> { - self.doc.values_at(obj, prop, heads) - } - pub fn load(data: &[u8]) -> Result { let doc = Automerge::load(data)?; Ok(Self { @@ -347,47 +203,6 @@ impl AutoTxn { self.doc.get_changes_added(&other.doc) } - pub fn get_heads(&mut self) -> Vec { - self.ensure_transaction_closed(); - self.doc.get_heads() - } - - fn get_hash(&mut self, actor: usize, seq: u64) -> Result { - self.doc - .states - .get(&actor) - .and_then(|v| v.get(seq as usize - 1)) - .and_then(|&i| self.doc.history.get(i)) - .map(|c| c.hash) - .ok_or(AutomergeError::InvalidSeq(seq)) - } - - fn update_history(&mut self, change: Change) -> usize { - self.doc.max_op = std::cmp::max(self.doc.max_op, change.start_op + change.len() as u64 - 1); - - self.update_deps(&change); - - let history_index = self.doc.history.len(); - - self.doc - .states - .entry(self.doc.ops.m.actors.cache(change.actor_id().clone())) - .or_default() - .push(history_index); - - self.doc.history_index.insert(change.hash, history_index); - self.doc.history.push(change); - - history_index - } - - fn update_deps(&mut self, change: &Change) { - for d in &change.deps { - self.doc.deps.remove(d); - } - self.doc.deps.insert(change.hash); - } - pub fn import(&self, s: &str) -> Result { self.doc.import(s) } @@ -414,4 +229,182 @@ impl AutoTxn { pub fn visualise_optree(&self) -> String { self.doc.visualise_optree() } + + /// Get the current heads of the document. + /// + /// This closes the transaction first, if one is in progress. + pub fn get_heads(&mut self) -> Vec { + self.ensure_transaction_closed(); + self.doc.get_heads() + } + + pub fn commit(&mut self) -> Vec { + // ensure that even no changes triggers a change + self.try_start_transaction(); + self.transaction + .take() + .map(|tx| tx.commit(&mut self.doc, None, None)) + .unwrap_or_else(|| self.doc.get_heads()) + } + + pub fn commit_with(&mut self, options: CommitOptions) -> Vec { + self.try_start_transaction(); + self.transaction + .take() + .map(|tx| tx.commit(&mut self.doc, options.message, options.time)) + .unwrap_or_else(|| self.doc.get_heads()) + } + + pub fn rollback(&mut self) -> usize { + self.transaction + .take() + .map(|tx| tx.rollback(&mut self.doc)) + .unwrap_or(0) + } +} + +impl Transactable for AutoTxn { + fn pending_ops(&self) -> usize { + self.transaction + .as_ref() + .map(|t| t.pending_ops()) + .unwrap_or(0) + } + + // KeysAt::() + // LenAt::() + // PropAt::() + // NthAt::() + + fn keys(&self, obj: &ExId) -> Vec { + self.doc.keys(obj) + } + + fn keys_at(&self, obj: &ExId, heads: &[ChangeHash]) -> Vec { + self.doc.keys_at(obj, heads) + } + + fn length(&self, obj: &ExId) -> usize { + self.doc.length(obj) + } + + fn length_at(&self, obj: &ExId, heads: &[ChangeHash]) -> usize { + self.doc.length_at(obj, heads) + } + + // set(obj, prop, value) - value can be scalar or objtype + // del(obj, prop) + // inc(obj, prop, value) + // insert(obj, index, value) + + /// Set the value of property `P` to value `V` in object `obj`. + /// + /// # Returns + /// + /// The opid of the operation which was created, or None if this operation doesn't change the + /// document or create a new object. + /// + /// # Errors + /// + /// This will return an error if + /// - The object does not exist + /// - The key is the wrong type for the object + /// - The key does not exist in the object + fn set, V: Into>( + &mut self, + obj: &ExId, + prop: P, + value: V, + ) -> Result, AutomergeError> { + self.try_start_transaction(); + let tx = self.transaction.as_mut().unwrap(); + tx.set(&mut self.doc, obj, prop, value) + } + + fn insert>( + &mut self, + obj: &ExId, + index: usize, + value: V, + ) -> Result, AutomergeError> { + self.try_start_transaction(); + let tx = self.transaction.as_mut().unwrap(); + tx.insert(&mut self.doc, obj, index, value) + } + + fn inc>( + &mut self, + obj: &ExId, + prop: P, + value: i64, + ) -> Result<(), AutomergeError> { + self.try_start_transaction(); + let tx = self.transaction.as_mut().unwrap(); + tx.inc(&mut self.doc, obj, prop, value) + } + + fn del>(&mut self, obj: &ExId, prop: P) -> Result<(), AutomergeError> { + self.try_start_transaction(); + let tx = self.transaction.as_mut().unwrap(); + tx.del(&mut self.doc, obj, prop) + } + + /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert + /// the new elements + fn splice( + &mut self, + obj: &ExId, + pos: usize, + del: usize, + vals: Vec, + ) -> Result, AutomergeError> { + self.try_start_transaction(); + let tx = self.transaction.as_mut().unwrap(); + tx.splice(&mut self.doc, obj, pos, del, vals) + } + + fn text(&self, obj: &ExId) -> Result { + self.doc.text(obj) + } + + fn text_at(&self, obj: &ExId, heads: &[ChangeHash]) -> Result { + self.doc.text_at(obj, heads) + } + + // TODO - I need to return these OpId's here **only** to get + // the legacy conflicts format of { [opid]: value } + // Something better? + fn value>( + &self, + obj: &ExId, + prop: P, + ) -> Result, AutomergeError> { + self.doc.value(obj, prop) + } + + fn value_at>( + &self, + obj: &ExId, + prop: P, + heads: &[ChangeHash], + ) -> Result, AutomergeError> { + self.doc.value_at(obj, prop, heads) + } + + fn values>( + &self, + obj: &ExId, + prop: P, + ) -> Result, AutomergeError> { + self.doc.values(obj, prop) + } + + fn values_at>( + &self, + obj: &ExId, + prop: P, + heads: &[ChangeHash], + ) -> Result, AutomergeError> { + self.doc.values_at(obj, prop, heads) + } } diff --git a/automerge/src/transaction.rs b/automerge/src/transaction.rs index 8cc17885..d3f51344 100644 --- a/automerge/src/transaction.rs +++ b/automerge/src/transaction.rs @@ -1,10 +1,9 @@ -use crate::exid::ExId; -use crate::AutomergeError; -use crate::{Automerge, ChangeHash, Prop, Value}; - mod commit; mod inner; mod result; +mod transactable; +mod transaction; + pub(crate) use inner::TransactionInner; pub use result::TransactionFailure; pub use result::TransactionSuccess; @@ -12,192 +11,5 @@ pub use result::TransactionSuccess; pub type TransactionResult = Result, TransactionFailure>; pub use self::commit::CommitOptions; - -/// A transaction on a document. -/// Transactions group operations into a single change so that no other operations can happen -/// in-between. -/// -/// Created from [`Automerge::transaction`]. -#[derive(Debug)] -pub struct Transaction<'a> { - // this is an option so that we can take it during commit and rollback to prevent it being - // rolled back during drop. - pub(crate) inner: Option, - pub(crate) doc: &'a mut Automerge, -} - -impl<'a> Transaction<'a> { - /// Get the heads of the document before this transaction was started. - pub fn get_heads(&self) -> Vec { - self.doc.get_heads() - } - - /// Get the number of pending operations in this transaction. - pub fn pending_ops(&self) -> usize { - self.inner.as_ref().unwrap().pending_ops() - } - - /// Commit the operations performed in this transaction, returning the hashes corresponding to - /// the new heads. - pub fn commit(mut self) -> Vec { - self.inner.take().unwrap().commit(self.doc, None, None) - } - - pub fn commit_with(mut self, options: CommitOptions) -> Vec { - self.inner - .take() - .unwrap() - .commit(self.doc, options.message, options.time) - } - - /// Undo the operations added in this transaction, returning the number of cancelled - /// operations. - pub fn rollback(mut self) -> usize { - self.inner.take().unwrap().rollback(self.doc) - } - - /// Set the value of property `P` to value `V` in object `obj`. - /// - /// # Returns - /// - /// The opid of the operation which was created, or None if this operation doesn't change the - /// document - /// - /// # Errors - /// - /// This will return an error if - /// - The object does not exist - /// - The key is the wrong type for the object - /// - The key does not exist in the object - pub fn set, V: Into>( - &mut self, - obj: &ExId, - prop: P, - value: V, - ) -> Result, AutomergeError> { - self.inner.as_mut().unwrap().set(self.doc, obj, prop, value) - } - - pub fn insert>( - &mut self, - obj: &ExId, - index: usize, - value: V, - ) -> Result, AutomergeError> { - self.inner - .as_mut() - .unwrap() - .insert(self.doc, obj, index, value) - } - - pub fn inc>( - &mut self, - obj: &ExId, - prop: P, - value: i64, - ) -> Result<(), AutomergeError> { - self.inner.as_mut().unwrap().inc(self.doc, obj, prop, value) - } - - pub fn del>(&mut self, obj: &ExId, prop: P) -> Result<(), AutomergeError> { - self.inner.as_mut().unwrap().del(self.doc, obj, prop) - } - - /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert - /// the new elements - pub fn splice( - &mut self, - obj: &ExId, - pos: usize, - del: usize, - vals: Vec, - ) -> Result, AutomergeError> { - self.inner - .as_mut() - .unwrap() - .splice(self.doc, obj, pos, del, vals) - } - - pub fn splice_text( - &mut self, - obj: &ExId, - pos: usize, - del: usize, - text: &str, - ) -> Result, AutomergeError> { - self.inner - .as_mut() - .unwrap() - .splice_text(self.doc, obj, pos, del, text) - } - - pub fn keys(&self, obj: &ExId) -> Vec { - self.doc.keys(obj) - } - - pub fn keys_at(&self, obj: &ExId, heads: &[ChangeHash]) -> Vec { - self.doc.keys_at(obj, heads) - } - - pub fn length(&self, obj: &ExId) -> usize { - self.doc.length(obj) - } - - pub fn length_at(&self, obj: &ExId, heads: &[ChangeHash]) -> usize { - self.doc.length_at(obj, heads) - } - - pub fn text(&self, obj: &ExId) -> Result { - self.doc.text(obj) - } - - pub fn text_at(&self, obj: &ExId, heads: &[ChangeHash]) -> Result { - self.doc.text_at(obj, heads) - } - - pub fn value>( - &self, - obj: &ExId, - prop: P, - ) -> Result, AutomergeError> { - self.doc.value(obj, prop) - } - - pub fn value_at>( - &self, - obj: &ExId, - prop: P, - heads: &[ChangeHash], - ) -> Result, AutomergeError> { - self.doc.value_at(obj, prop, heads) - } - - pub fn values>( - &self, - obj: &ExId, - prop: P, - ) -> Result, AutomergeError> { - self.doc.values(obj, prop) - } - - pub fn values_at>( - &self, - obj: &ExId, - prop: P, - heads: &[ChangeHash], - ) -> Result, AutomergeError> { - self.doc.values_at(obj, prop, heads) - } -} - -// If a transaction is not commited or rolled back manually then it can leave the document in an -// intermediate state. -// This defaults to rolling back the transaction to be compatible with `?` error returning before -// reaching a call to `commit`. -impl<'a> Drop for Transaction<'a> { - fn drop(&mut self) { - if let Some(txn) = self.inner.take() { - txn.rollback(self.doc); - } - } -} +pub use self::transactable::Transactable; +pub use transaction::Transaction; diff --git a/automerge/src/transaction/inner.rs b/automerge/src/transaction/inner.rs index ee971e1c..f71318af 100644 --- a/automerge/src/transaction/inner.rs +++ b/automerge/src/transaction/inner.rs @@ -3,7 +3,6 @@ use crate::query; use crate::types::{Key, ObjId, OpId}; use crate::{change::export_change, types::Op, Automerge, ChangeHash, Prop, Value}; use crate::{AutomergeError, OpType}; -use unicode_segmentation::UnicodeSegmentation; #[derive(Debug, Clone)] pub struct TransactionInner { @@ -306,19 +305,4 @@ impl TransactionInner { } Ok(results) } - - pub fn splice_text( - &mut self, - doc: &mut Automerge, - obj: &ExId, - pos: usize, - del: usize, - text: &str, - ) -> Result, AutomergeError> { - let mut vals = vec![]; - for c in text.to_owned().graphemes(true) { - vals.push(c.into()); - } - self.splice(doc, obj, pos, del, vals) - } } diff --git a/automerge/src/transaction/transactable.rs b/automerge/src/transaction/transactable.rs new file mode 100644 index 00000000..dbce1d14 --- /dev/null +++ b/automerge/src/transaction/transactable.rs @@ -0,0 +1,115 @@ +use crate::exid::ExId; +use crate::{AutomergeError, ChangeHash, Prop, Value}; +use unicode_segmentation::UnicodeSegmentation; + +/// A way of mutating a document within a single change. +pub trait Transactable { + /// Get the number of pending operations in this transaction. + fn pending_ops(&self) -> usize; + + /// Set the value of property `P` to value `V` in object `obj`. + /// + /// # Returns + /// + /// The opid of the operation which was created, or None if this operation doesn't change the + /// document + /// + /// # Errors + /// + /// This will return an error if + /// - The object does not exist + /// - The key is the wrong type for the object + /// - The key does not exist in the object + fn set, V: Into>( + &mut self, + obj: &ExId, + prop: P, + value: V, + ) -> Result, AutomergeError>; + + /// Insert a value into a list at the given index. + fn insert>( + &mut self, + obj: &ExId, + index: usize, + value: V, + ) -> Result, AutomergeError>; + + /// Increment the counter at the prop in the object by `value`. + fn inc>(&mut self, obj: &ExId, prop: P, value: i64) + -> Result<(), AutomergeError>; + + /// Delete the value at prop in the object. + fn del>(&mut self, obj: &ExId, prop: P) -> Result<(), AutomergeError>; + + /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert + /// the new elements. + fn splice( + &mut self, + obj: &ExId, + pos: usize, + del: usize, + vals: Vec, + ) -> Result, AutomergeError>; + + /// Like [`Self::splice`] but for text. + fn splice_text( + &mut self, + obj: &ExId, + pos: usize, + del: usize, + text: &str, + ) -> Result, AutomergeError> { + let mut vals = vec![]; + for c in text.to_owned().graphemes(true) { + vals.push(c.into()); + } + self.splice(obj, pos, del, vals) + } + + /// Get the keys of the given object, it should be a map. + fn keys(&self, obj: &ExId) -> Vec; + + /// Get the keys of the given object at a point in history. + fn keys_at(&self, obj: &ExId, heads: &[ChangeHash]) -> Vec; + + /// Get the length of the given object. + fn length(&self, obj: &ExId) -> usize; + + /// Get the length of the given object at a point in history. + fn length_at(&self, obj: &ExId, heads: &[ChangeHash]) -> usize; + + /// Get the string that this text object represents. + fn text(&self, obj: &ExId) -> Result; + + /// Get the string that this text object represents at a point in history. + fn text_at(&self, obj: &ExId, heads: &[ChangeHash]) -> Result; + + /// Get the value at this prop in the object. + fn value>( + &self, + obj: &ExId, + prop: P, + ) -> Result, AutomergeError>; + + /// Get the value at this prop in the object at a point in history. + fn value_at>( + &self, + obj: &ExId, + prop: P, + heads: &[ChangeHash], + ) -> Result, AutomergeError>; + + fn values>( + &self, + obj: &ExId, + prop: P, + ) -> Result, AutomergeError>; + + fn values_at>( + &self, + obj: &ExId, + prop: P, + heads: &[ChangeHash], + ) -> Result, AutomergeError>; +} diff --git a/automerge/src/transaction/transaction.rs b/automerge/src/transaction/transaction.rs new file mode 100644 index 00000000..ea7347de --- /dev/null +++ b/automerge/src/transaction/transaction.rs @@ -0,0 +1,183 @@ +use crate::exid::ExId; +use crate::AutomergeError; +use crate::{Automerge, ChangeHash, Prop, Value}; + +use super::{CommitOptions, Transactable, TransactionInner}; + +/// A transaction on a document. +/// Transactions group operations into a single change so that no other operations can happen +/// in-between. +/// +/// Created from [`Automerge::transaction`]. +#[derive(Debug)] +pub struct Transaction<'a> { + // this is an option so that we can take it during commit and rollback to prevent it being + // rolled back during drop. + pub(crate) inner: Option, + pub(crate) doc: &'a mut Automerge, +} + +impl<'a> Transaction<'a> { + /// Get the heads of the document before this transaction was started. + pub fn get_heads(&self) -> Vec { + self.doc.get_heads() + } + + /// Commit the operations performed in this transaction, returning the hashes corresponding to + /// the new heads. + pub fn commit(mut self) -> Vec { + self.inner.take().unwrap().commit(self.doc, None, None) + } + + pub fn commit_with(mut self, options: CommitOptions) -> Vec { + self.inner + .take() + .unwrap() + .commit(self.doc, options.message, options.time) + } + + /// Undo the operations added in this transaction, returning the number of cancelled + /// operations. + pub fn rollback(mut self) -> usize { + self.inner.take().unwrap().rollback(self.doc) + } +} + +impl<'a> Transactable for Transaction<'a> { + /// Get the number of pending operations in this transaction. + fn pending_ops(&self) -> usize { + self.inner.as_ref().unwrap().pending_ops() + } + + /// Set the value of property `P` to value `V` in object `obj`. + /// + /// # Returns + /// + /// The opid of the operation which was created, or None if this operation doesn't change the + /// document + /// + /// # Errors + /// + /// This will return an error if + /// - The object does not exist + /// - The key is the wrong type for the object + /// - The key does not exist in the object + fn set, V: Into>( + &mut self, + obj: &ExId, + prop: P, + value: V, + ) -> Result, AutomergeError> { + self.inner.as_mut().unwrap().set(self.doc, obj, prop, value) + } + + fn insert>( + &mut self, + obj: &ExId, + index: usize, + value: V, + ) -> Result, AutomergeError> { + self.inner + .as_mut() + .unwrap() + .insert(self.doc, obj, index, value) + } + + fn inc>( + &mut self, + obj: &ExId, + prop: P, + value: i64, + ) -> Result<(), AutomergeError> { + self.inner.as_mut().unwrap().inc(self.doc, obj, prop, value) + } + + fn del>(&mut self, obj: &ExId, prop: P) -> Result<(), AutomergeError> { + self.inner.as_mut().unwrap().del(self.doc, obj, prop) + } + + /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert + /// the new elements + fn splice( + &mut self, + obj: &ExId, + pos: usize, + del: usize, + vals: Vec, + ) -> Result, AutomergeError> { + self.inner + .as_mut() + .unwrap() + .splice(self.doc, obj, pos, del, vals) + } + + fn keys(&self, obj: &ExId) -> Vec { + self.doc.keys(obj) + } + + fn keys_at(&self, obj: &ExId, heads: &[ChangeHash]) -> Vec { + self.doc.keys_at(obj, heads) + } + + fn length(&self, obj: &ExId) -> usize { + self.doc.length(obj) + } + + fn length_at(&self, obj: &ExId, heads: &[ChangeHash]) -> usize { + self.doc.length_at(obj, heads) + } + + fn text(&self, obj: &ExId) -> Result { + self.doc.text(obj) + } + + fn text_at(&self, obj: &ExId, heads: &[ChangeHash]) -> Result { + self.doc.text_at(obj, heads) + } + + fn value>( + &self, + obj: &ExId, + prop: P, + ) -> Result, AutomergeError> { + self.doc.value(obj, prop) + } + + fn value_at>( + &self, + obj: &ExId, + prop: P, + heads: &[ChangeHash], + ) -> Result, AutomergeError> { + self.doc.value_at(obj, prop, heads) + } + + fn values>( + &self, + obj: &ExId, + prop: P, + ) -> Result, AutomergeError> { + self.doc.values(obj, prop) + } + + fn values_at>( + &self, + obj: &ExId, + prop: P, + heads: &[ChangeHash], + ) -> Result, AutomergeError> { + self.doc.values_at(obj, prop, heads) + } +} + +// If a transaction is not commited or rolled back manually then it can leave the document in an +// intermediate state. +// This defaults to rolling back the transaction to be compatible with `?` error returning before +// reaching a call to `commit`. +impl<'a> Drop for Transaction<'a> { + fn drop(&mut self) { + if let Some(txn) = self.inner.take() { + txn.rollback(self.doc); + } + } +} diff --git a/automerge/tests/test.rs b/automerge/tests/test.rs index 7497ea54..02892061 100644 --- a/automerge/tests/test.rs +++ b/automerge/tests/test.rs @@ -1,3 +1,4 @@ +use automerge::transaction::Transactable; use automerge::{ActorId, AutoTxn, Automerge, Value, ROOT}; mod helpers; diff --git a/edit-trace/benches/main.rs b/edit-trace/benches/main.rs index fb5fa5fe..4004bd41 100644 --- a/edit-trace/benches/main.rs +++ b/edit-trace/benches/main.rs @@ -1,4 +1,4 @@ -use automerge::{AutoTxn, Automerge, Value, ROOT}; +use automerge::{transaction::Transactable, AutoTxn, Automerge, Value, ROOT}; use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion, Throughput}; use std::fs; diff --git a/edit-trace/src/main.rs b/edit-trace/src/main.rs index aa7f6ac1..62b0356b 100644 --- a/edit-trace/src/main.rs +++ b/edit-trace/src/main.rs @@ -1,4 +1,4 @@ -use automerge::{Automerge, AutomergeError, Value, ROOT}; +use automerge::{transaction::Transactable, Automerge, AutomergeError, Value, ROOT}; use std::fs; use std::time::Instant; From 66f8c73dbaf62f6acbf0f2e8f1d6f2cf818e680d Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Mon, 21 Feb 2022 10:36:42 +0000 Subject: [PATCH 090/730] Document drop on transaction --- automerge/src/transaction/transaction.rs | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/automerge/src/transaction/transaction.rs b/automerge/src/transaction/transaction.rs index ea7347de..8e67a169 100644 --- a/automerge/src/transaction/transaction.rs +++ b/automerge/src/transaction/transaction.rs @@ -9,6 +9,13 @@ use super::{CommitOptions, Transactable, TransactionInner}; /// in-between. /// /// Created from [`Automerge::transaction`]. +/// +/// ## Drop +/// +/// This transaction should be manually committed or rolled back. If not done manually then it will +/// be rolled back when it is dropped. This is to prevent the document being in an unsafe +/// intermediate state. +/// This is consistent with `?` error handling. #[derive(Debug)] pub struct Transaction<'a> { // this is an option so that we can take it during commit and rollback to prevent it being From cbd3406f8da874e52b58a9e7457f7f56da0a1dd3 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Mon, 21 Feb 2022 10:47:23 +0000 Subject: [PATCH 091/730] Document commit_with and CommitOptions --- automerge/src/autotxn.rs | 15 +++++++++++++++ automerge/src/transaction/commit.rs | 8 ++++---- automerge/src/transaction/transaction.rs | 16 ++++++++++++++++ 3 files changed, 35 insertions(+), 4 deletions(-) diff --git a/automerge/src/autotxn.rs b/automerge/src/autotxn.rs index a584d874..09d05bc0 100644 --- a/automerge/src/autotxn.rs +++ b/automerge/src/autotxn.rs @@ -247,6 +247,21 @@ impl AutoTxn { .unwrap_or_else(|| self.doc.get_heads()) } + /// Commit the current operations with some options. + /// + /// ``` + /// # use automerge::transaction::CommitOptions; + /// # use automerge::transaction::Transactable; + /// # use automerge::Value; + /// # use automerge::ROOT; + /// # use automerge::AutoTxn; + /// # use std::time::SystemTime; + /// let mut doc = AutoTxn::new(); + /// doc.set(&ROOT, "todos", Value::list()).unwrap(); + /// let now = SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap().as_secs() as + /// i64; + /// doc.commit_with(CommitOptions::default().with_message("Create todos list").with_time(now)); + /// ``` pub fn commit_with(&mut self, options: CommitOptions) -> Vec { self.try_start_transaction(); self.transaction diff --git a/automerge/src/transaction/commit.rs b/automerge/src/transaction/commit.rs index 49d204fd..8f7007f8 100644 --- a/automerge/src/transaction/commit.rs +++ b/automerge/src/transaction/commit.rs @@ -7,14 +7,14 @@ pub struct CommitOptions { impl CommitOptions { /// Add a message to the commit. - pub fn with_message(mut self, message: String) -> Self { - self.message = Some(message); + pub fn with_message>(mut self, message: S) -> Self { + self.message = Some(message.into()); self } /// Add a message to the commit. - pub fn set_message(&mut self, message: String) -> &mut Self { - self.message = Some(message); + pub fn set_message>(&mut self, message: S) -> &mut Self { + self.message = Some(message.into()); self } diff --git a/automerge/src/transaction/transaction.rs b/automerge/src/transaction/transaction.rs index 8e67a169..d52b9219 100644 --- a/automerge/src/transaction/transaction.rs +++ b/automerge/src/transaction/transaction.rs @@ -36,6 +36,22 @@ impl<'a> Transaction<'a> { self.inner.take().unwrap().commit(self.doc, None, None) } + /// Commit the operations in this transaction with some options. + /// + /// ``` + /// # use automerge::transaction::CommitOptions; + /// # use automerge::transaction::Transactable; + /// # use automerge::Value; + /// # use automerge::ROOT; + /// # use automerge::Automerge; + /// # use std::time::SystemTime; + /// let mut doc = Automerge::new(); + /// let mut tx = doc.transaction(); + /// tx.set(&ROOT, "todos", Value::list()).unwrap(); + /// let now = SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap().as_secs() as + /// i64; + /// tx.commit_with(CommitOptions::default().with_message("Create todos list").with_time(now)); + /// ``` pub fn commit_with(mut self, options: CommitOptions) -> Vec { self.inner .take() From 3493dbd74a3fa7a8db34553d76aa3f7064cfa2da Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Mon, 21 Feb 2022 10:49:14 +0000 Subject: [PATCH 092/730] Rename autotxn to autocommit --- automerge-wasm/src/interop.rs | 4 ++-- automerge-wasm/src/lib.rs | 6 +++--- automerge/src/{autotxn.rs => autocommit.rs} | 12 ++++++------ automerge/src/lib.rs | 4 ++-- automerge/tests/helpers/mod.rs | 8 ++++---- automerge/tests/test.rs | 12 ++++++------ edit-trace/benches/main.rs | 10 +++++----- 7 files changed, 28 insertions(+), 28 deletions(-) rename automerge/src/{autotxn.rs => autocommit.rs} (98%) diff --git a/automerge-wasm/src/interop.rs b/automerge-wasm/src/interop.rs index 2b36896c..2712eeae 100644 --- a/automerge-wasm/src/interop.rs +++ b/automerge-wasm/src/interop.rs @@ -289,7 +289,7 @@ pub(crate) fn get_heads(heads: Option) -> Option> { heads.ok() } -pub(crate) fn map_to_js(doc: &am::AutoTxn, obj: &ObjId) -> JsValue { +pub(crate) fn map_to_js(doc: &am::AutoCommit, obj: &ObjId) -> JsValue { let keys = doc.keys(obj); let map = Object::new(); for k in keys { @@ -312,7 +312,7 @@ pub(crate) fn map_to_js(doc: &am::AutoTxn, obj: &ObjId) -> JsValue { map.into() } -fn list_to_js(doc: &am::AutoTxn, obj: &ObjId) -> JsValue { +fn list_to_js(doc: &am::AutoCommit, obj: &ObjId) -> JsValue { let len = doc.length(obj); let array = Array::new(); for i in 0..len { diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 9835f53c..658fce1f 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -31,12 +31,12 @@ static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT; #[wasm_bindgen] #[derive(Debug)] -pub struct Automerge(automerge::AutoTxn); +pub struct Automerge(automerge::AutoCommit); #[wasm_bindgen] impl Automerge { pub fn new(actor: Option) -> Result { - let mut automerge = automerge::AutoTxn::new(); + let mut automerge = automerge::AutoCommit::new(); if let Some(a) = actor { let a = automerge::ActorId::from(hex::decode(a).map_err(to_js_err)?.to_vec()); automerge.set_actor(a); @@ -514,7 +514,7 @@ pub fn init(actor: Option) -> Result { #[wasm_bindgen(js_name = loadDoc)] pub fn load(data: Uint8Array, actor: Option) -> Result { let data = data.to_vec(); - let mut automerge = am::AutoTxn::load(&data).map_err(to_js_err)?; + let mut automerge = am::AutoCommit::load(&data).map_err(to_js_err)?; if let Some(s) = actor { let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec()); automerge.set_actor(actor) diff --git a/automerge/src/autotxn.rs b/automerge/src/autocommit.rs similarity index 98% rename from automerge/src/autotxn.rs rename to automerge/src/autocommit.rs index 09d05bc0..f326e1b6 100644 --- a/automerge/src/autotxn.rs +++ b/automerge/src/autocommit.rs @@ -9,18 +9,18 @@ use crate::{SyncMessage, SyncState}; /// An automerge document that automatically manages transactions. #[derive(Debug, Clone)] -pub struct AutoTxn { +pub struct AutoCommit { doc: Automerge, transaction: Option, } -impl Default for AutoTxn { +impl Default for AutoCommit { fn default() -> Self { Self::new() } } -impl AutoTxn { +impl AutoCommit { pub fn new() -> Self { Self { doc: Automerge::new(), @@ -254,9 +254,9 @@ impl AutoTxn { /// # use automerge::transaction::Transactable; /// # use automerge::Value; /// # use automerge::ROOT; - /// # use automerge::AutoTxn; + /// # use automerge::AutoCommit; /// # use std::time::SystemTime; - /// let mut doc = AutoTxn::new(); + /// let mut doc = AutoCommit::new(); /// doc.set(&ROOT, "todos", Value::list()).unwrap(); /// let now = SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap().as_secs() as /// i64; @@ -278,7 +278,7 @@ impl AutoTxn { } } -impl Transactable for AutoTxn { +impl Transactable for AutoCommit { fn pending_ops(&self) -> usize { self.transaction .as_ref() diff --git a/automerge/src/lib.rs b/automerge/src/lib.rs index e2794ef5..cd5f924c 100644 --- a/automerge/src/lib.rs +++ b/automerge/src/lib.rs @@ -24,8 +24,8 @@ macro_rules! __log { } } +mod autocommit; mod automerge; -mod autotxn; mod change; mod clock; mod columnar; @@ -46,7 +46,7 @@ mod value; mod visualisation; pub use crate::automerge::Automerge; -pub use autotxn::AutoTxn; +pub use autocommit::AutoCommit; pub use change::{decode_change, Change}; pub use error::AutomergeError; pub use exid::ExId as ObjId; diff --git a/automerge/tests/helpers/mod.rs b/automerge/tests/helpers/mod.rs index dfcf869e..db089d92 100644 --- a/automerge/tests/helpers/mod.rs +++ b/automerge/tests/helpers/mod.rs @@ -6,12 +6,12 @@ use std::{ use serde::ser::{SerializeMap, SerializeSeq}; -pub fn new_doc() -> automerge::AutoTxn { - automerge::AutoTxn::new_with_actor_id(automerge::ActorId::random()) +pub fn new_doc() -> automerge::AutoCommit { + automerge::AutoCommit::new_with_actor_id(automerge::ActorId::random()) } -pub fn new_doc_with_actor(actor: automerge::ActorId) -> automerge::AutoTxn { - automerge::AutoTxn::new_with_actor_id(actor) +pub fn new_doc_with_actor(actor: automerge::ActorId) -> automerge::AutoCommit { + automerge::AutoCommit::new_with_actor_id(actor) } /// Returns two actor IDs, the first considered to be ordered before the second diff --git a/automerge/tests/test.rs b/automerge/tests/test.rs index 02892061..34a9777b 100644 --- a/automerge/tests/test.rs +++ b/automerge/tests/test.rs @@ -1,5 +1,5 @@ use automerge::transaction::Transactable; -use automerge::{ActorId, AutoTxn, Automerge, Value, ROOT}; +use automerge::{ActorId, AutoCommit, Automerge, Value, ROOT}; mod helpers; #[allow(unused_imports)] @@ -9,7 +9,7 @@ use helpers::{ }; #[test] fn no_conflict_on_repeated_assignment() { - let mut doc = AutoTxn::new(); + let mut doc = AutoCommit::new(); doc.set(&automerge::ROOT, "foo", 1).unwrap(); doc.set(&automerge::ROOT, "foo", 2).unwrap(); assert_doc!( @@ -902,10 +902,10 @@ fn list_counter_del() -> Result<(), automerge::AutomergeError> { doc1.insert(&list, 1, "b")?; doc1.insert(&list, 2, "c")?; - let mut doc2 = AutoTxn::load(&doc1.save()?)?; + let mut doc2 = AutoCommit::load(&doc1.save()?)?; doc2.set_actor(actor2); - let mut doc3 = AutoTxn::load(&doc1.save()?)?; + let mut doc3 = AutoCommit::load(&doc1.save()?)?; doc3.set_actor(actor3); doc1.set(&list, 1, Value::counter(0))?; @@ -954,7 +954,7 @@ fn list_counter_del() -> Result<(), automerge::AutomergeError> { assert_eq!(doc1.length(&list), 2); - let doc4 = AutoTxn::load(&doc1.save()?)?; + let doc4 = AutoCommit::load(&doc1.save()?)?; assert_eq!(doc4.length(&list), 2); @@ -962,7 +962,7 @@ fn list_counter_del() -> Result<(), automerge::AutomergeError> { assert_eq!(doc1.length(&list), 1); - let doc5 = AutoTxn::load(&doc1.save()?)?; + let doc5 = AutoCommit::load(&doc1.save()?)?; assert_eq!(doc5.length(&list), 1); diff --git a/edit-trace/benches/main.rs b/edit-trace/benches/main.rs index 4004bd41..af7a85ef 100644 --- a/edit-trace/benches/main.rs +++ b/edit-trace/benches/main.rs @@ -1,4 +1,4 @@ -use automerge::{transaction::Transactable, AutoTxn, Automerge, Value, ROOT}; +use automerge::{transaction::Transactable, AutoCommit, Automerge, Value, ROOT}; use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion, Throughput}; use std::fs; @@ -13,8 +13,8 @@ fn replay_trace_tx(commands: Vec<(usize, usize, Vec)>) -> Automerge { doc } -fn replay_trace_autotx(commands: Vec<(usize, usize, Vec)>) -> AutoTxn { - let mut doc = AutoTxn::new(); +fn replay_trace_autotx(commands: Vec<(usize, usize, Vec)>) -> AutoCommit { + let mut doc = AutoCommit::new(); let text = doc.set(&ROOT, "text", Value::text()).unwrap().unwrap(); for (pos, del, vals) in commands { doc.splice(&text, pos, del, vals).unwrap(); @@ -27,7 +27,7 @@ fn save_trace(mut doc: Automerge) { doc.save().unwrap(); } -fn save_trace_autotx(mut doc: AutoTxn) { +fn save_trace_autotx(mut doc: AutoCommit) { doc.save().unwrap(); } @@ -36,7 +36,7 @@ fn load_trace(bytes: &[u8]) { } fn load_trace_autotx(bytes: &[u8]) { - AutoTxn::load(bytes).unwrap(); + AutoCommit::load(bytes).unwrap(); } fn bench(c: &mut Criterion) { From 355cbdd2516ffd564a39a79de742ff398b4cddb0 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Mon, 21 Feb 2022 10:49:58 +0000 Subject: [PATCH 093/730] Rename try_start_transaction to ensure_transaction_open --- automerge/src/autocommit.rs | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index f326e1b6..f54fea03 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -55,7 +55,7 @@ impl AutoCommit { } } - fn try_start_transaction(&mut self) { + fn ensure_transaction_open(&mut self) { if self.transaction.is_none() { let actor = self.doc.get_actor_index(); @@ -240,7 +240,7 @@ impl AutoCommit { pub fn commit(&mut self) -> Vec { // ensure that even no changes triggers a change - self.try_start_transaction(); + self.ensure_transaction_open(); self.transaction .take() .map(|tx| tx.commit(&mut self.doc, None, None)) @@ -263,7 +263,7 @@ impl AutoCommit { /// doc.commit_with(CommitOptions::default().with_message("Create todos list").with_time(now)); /// ``` pub fn commit_with(&mut self, options: CommitOptions) -> Vec { - self.try_start_transaction(); + self.ensure_transaction_open(); self.transaction .take() .map(|tx| tx.commit(&mut self.doc, options.message, options.time)) @@ -331,7 +331,7 @@ impl Transactable for AutoCommit { prop: P, value: V, ) -> Result, AutomergeError> { - self.try_start_transaction(); + self.ensure_transaction_open(); let tx = self.transaction.as_mut().unwrap(); tx.set(&mut self.doc, obj, prop, value) } @@ -342,7 +342,7 @@ impl Transactable for AutoCommit { index: usize, value: V, ) -> Result, AutomergeError> { - self.try_start_transaction(); + self.ensure_transaction_open(); let tx = self.transaction.as_mut().unwrap(); tx.insert(&mut self.doc, obj, index, value) } @@ -353,13 +353,13 @@ impl Transactable for AutoCommit { prop: P, value: i64, ) -> Result<(), AutomergeError> { - self.try_start_transaction(); + self.ensure_transaction_open(); let tx = self.transaction.as_mut().unwrap(); tx.inc(&mut self.doc, obj, prop, value) } fn del>(&mut self, obj: &ExId, prop: P) -> Result<(), AutomergeError> { - self.try_start_transaction(); + self.ensure_transaction_open(); let tx = self.transaction.as_mut().unwrap(); tx.del(&mut self.doc, obj, prop) } @@ -373,7 +373,7 @@ impl Transactable for AutoCommit { del: usize, vals: Vec, ) -> Result, AutomergeError> { - self.try_start_transaction(); + self.ensure_transaction_open(); let tx = self.transaction.as_mut().unwrap(); tx.splice(&mut self.doc, obj, pos, del, vals) } From 6b4393c0b39e07862b9ccc7ca8af2b0ea0b9f2b1 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Mon, 21 Feb 2022 11:40:49 +0000 Subject: [PATCH 094/730] Rename transaction module --- automerge/src/transaction.rs | 9 ++++----- .../{transaction.rs => manual_transaction.rs} | 0 2 files changed, 4 insertions(+), 5 deletions(-) rename automerge/src/transaction/{transaction.rs => manual_transaction.rs} (100%) diff --git a/automerge/src/transaction.rs b/automerge/src/transaction.rs index d3f51344..fe24619e 100644 --- a/automerge/src/transaction.rs +++ b/automerge/src/transaction.rs @@ -1,15 +1,14 @@ mod commit; mod inner; +mod manual_transaction; mod result; mod transactable; -mod transaction; +pub use self::commit::CommitOptions; +pub use self::transactable::Transactable; pub(crate) use inner::TransactionInner; +pub use manual_transaction::Transaction; pub use result::TransactionFailure; pub use result::TransactionSuccess; pub type TransactionResult = Result, TransactionFailure>; - -pub use self::commit::CommitOptions; -pub use self::transactable::Transactable; -pub use transaction::Transaction; diff --git a/automerge/src/transaction/transaction.rs b/automerge/src/transaction/manual_transaction.rs similarity index 100% rename from automerge/src/transaction/transaction.rs rename to automerge/src/transaction/manual_transaction.rs From 4a6b91adb2504e49042362022bdf3243014e135d Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Mon, 21 Feb 2022 13:29:59 +0000 Subject: [PATCH 095/730] Add test for broken rollback --- automerge/src/transaction/inner.rs | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/automerge/src/transaction/inner.rs b/automerge/src/transaction/inner.rs index f71318af..0dc156fb 100644 --- a/automerge/src/transaction/inner.rs +++ b/automerge/src/transaction/inner.rs @@ -306,3 +306,20 @@ impl TransactionInner { Ok(results) } } + +#[cfg(test)] +mod tests { + use crate::{transaction::Transactable, ROOT}; + + use super::*; + + #[test] + fn map_rollback_doesnt_panic() { + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + + let a = tx.set(&ROOT, "a", Value::map()).unwrap().unwrap(); + tx.set(&a, "b", 1).unwrap(); + assert!(tx.value(&a, "b").unwrap().is_some()); + } +} From 8d24c9e4c36a71a3ee253e99208ecf3610603cc6 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Mon, 21 Feb 2022 14:00:41 +0000 Subject: [PATCH 096/730] Fix rollback of transaction using index into the tree --- automerge/src/query.rs | 2 ++ automerge/src/query/opid.rs | 52 ++++++++++++++++++++++++++++++ automerge/src/transaction/inner.rs | 7 ++-- 3 files changed, 57 insertions(+), 4 deletions(-) create mode 100644 automerge/src/query/opid.rs diff --git a/automerge/src/query.rs b/automerge/src/query.rs index 7911e1bb..84a70c49 100644 --- a/automerge/src/query.rs +++ b/automerge/src/query.rs @@ -14,6 +14,7 @@ mod list_vals; mod list_vals_at; mod nth; mod nth_at; +mod opid; mod prop; mod prop_at; mod seek_op; @@ -27,6 +28,7 @@ pub(crate) use list_vals::ListVals; pub(crate) use list_vals_at::ListValsAt; pub(crate) use nth::Nth; pub(crate) use nth_at::NthAt; +pub(crate) use opid::OpIdSearch; pub(crate) use prop::Prop; pub(crate) use prop_at::PropAt; pub(crate) use seek_op::SeekOp; diff --git a/automerge/src/query/opid.rs b/automerge/src/query/opid.rs new file mode 100644 index 00000000..fc2132f2 --- /dev/null +++ b/automerge/src/query/opid.rs @@ -0,0 +1,52 @@ +use crate::op_tree::OpTreeNode; +use crate::query::{QueryResult, TreeQuery}; +use crate::types::{Op, OpId}; + +/// Search for an OpId in a tree. +/// Returns the index of the operation in the tree. +#[derive(Debug, Clone, PartialEq)] +pub(crate) struct OpIdSearch { + target: OpId, + pos: usize, + found: bool, +} + +impl OpIdSearch { + pub fn new(target: OpId) -> Self { + OpIdSearch { + target, + pos: 0, + found: false, + } + } + + /// Get the index of the operation, if found. + pub fn index(&self) -> Option { + if self.found { + Some(self.pos) + } else { + None + } + } +} + +impl TreeQuery for OpIdSearch { + fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { + if child.index.ops.contains(&self.target) { + QueryResult::Decend + } else { + self.pos += child.len(); + QueryResult::Next + } + } + + fn query_element(&mut self, element: &Op) -> QueryResult { + if element.id == self.target { + self.found = true; + QueryResult::Finish + } else { + self.pos += 1; + QueryResult::Next + } + } +} diff --git a/automerge/src/transaction/inner.rs b/automerge/src/transaction/inner.rs index 0dc156fb..aaa26a99 100644 --- a/automerge/src/transaction/inner.rs +++ b/automerge/src/transaction/inner.rs @@ -1,5 +1,5 @@ use crate::exid::ExId; -use crate::query; +use crate::query::{self, OpIdSearch}; use crate::types::{Key, ObjId, OpId}; use crate::{change::export_change, types::Op, Automerge, ChangeHash, Prop, Value}; use crate::{AutomergeError, OpType}; @@ -50,12 +50,11 @@ impl TransactionInner { // remove in reverse order so sets are removed before makes etc... for op in self.operations.iter().rev() { for pred_id in &op.pred { - // FIXME - use query to make this fast - if let Some(p) = doc.ops.iter().position(|o| o.id == *pred_id) { + if let Some(p) = doc.ops.search(op.obj, OpIdSearch::new(*pred_id)).index() { doc.ops.replace(op.obj, p, |o| o.remove_succ(op)); } } - if let Some(pos) = doc.ops.iter().position(|o| o.id == op.id) { + if let Some(pos) = doc.ops.search(op.obj, OpIdSearch::new(op.id)).index() { doc.ops.remove(op.obj, pos); } } From b96aa168b4728d1f34e2aae009bb432f6f6c360e Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Tue, 15 Feb 2022 14:40:40 -0500 Subject: [PATCH 097/730] choking on bad value function --- automerge-wasm/src/interop.rs | 7 +------ automerge-wasm/src/lib.rs | 15 +++++---------- 2 files changed, 6 insertions(+), 16 deletions(-) diff --git a/automerge-wasm/src/interop.rs b/automerge-wasm/src/interop.rs index 67d0bb0f..fc4c39f9 100644 --- a/automerge-wasm/src/interop.rs +++ b/automerge-wasm/src/interop.rs @@ -243,12 +243,6 @@ pub(crate) fn js_get>(obj: J, prop: &str) -> Result String { - js_sys::JSON::stringify(val) - .map(|j| j.into()) - .unwrap_or_else(|_| "JSON::stringify_eror".into()) -} - pub(crate) fn js_set>(obj: &JsValue, prop: &str, val: V) -> Result { Reflect::set(obj, &prop.into(), &val.into()) } @@ -278,6 +272,7 @@ pub(crate) fn to_objtype(a: &JsValue) -> Option { } else if f.starts_with("class TABLE", 0) { Some(am::ObjType::Table) } else { + am::log!("to_objtype(function) -> {}", f); None } } diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 09488c64..b4973f6f 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -10,9 +10,7 @@ mod interop; mod sync; mod value; -use interop::{ - get_heads, js_get, js_set, map_to_js, stringify, to_js_err, to_objtype, to_prop, AR, JS, -}; +use interop::{get_heads, js_get, js_set, map_to_js, to_js_err, to_objtype, to_prop, AR, JS}; use sync::SyncState; use value::{datatype, ScalarValue}; @@ -483,18 +481,15 @@ impl Automerge { } fn import_value(&mut self, value: JsValue, datatype: JsValue) -> Result { - let datatype = datatype.as_string(); - match self.import_scalar(&value, &datatype) { + let d = datatype.as_string(); + match self.import_scalar(&value, &d) { Some(val) => Ok(val.into()), None => { if let Some(o) = to_objtype(&value) { Ok(o.into()) } else { - Err(to_js_err(format!( - "invalid value ({},{:?})", - stringify(&value), - datatype - ))) + web_sys::console::log_3(&"Invalid value".into(), &value, &datatype); + Err(to_js_err("invalid value")) } } } From 2fc0705907173403d0093b2539e2e1f848a2e3f5 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Wed, 23 Feb 2022 19:43:13 -0500 Subject: [PATCH 098/730] change MAP,LIST,TEXT to be {},[],'' - allow recursion --- automerge-js/src/proxies.js | 25 ++++++----- automerge-wasm/index.d.ts | 17 +++----- automerge-wasm/package.json | 3 +- automerge-wasm/src/interop.rs | 72 +++++++++++++++++++++++-------- automerge-wasm/src/lib.rs | 80 +++++++++++++++++++++-------------- automerge-wasm/test/test.ts | 54 ++++++++++++++--------- edit-trace/automerge-wasm.js | 4 +- 7 files changed, 159 insertions(+), 96 deletions(-) diff --git a/automerge-js/src/proxies.js b/automerge-js/src/proxies.js index 878ae101..ed3a4b97 100644 --- a/automerge-js/src/proxies.js +++ b/automerge-js/src/proxies.js @@ -4,7 +4,6 @@ const { Int, Uint, Float64 } = require("./numbers"); const { Counter, getWriteableCounter } = require("./counter"); const { Text } = require("./text"); const { STATE, HEADS, FROZEN, OBJECT_ID, READ_ONLY } = require("./constants") -const { MAP, LIST, TABLE, TEXT } = require("automerge-wasm") function parseListIndex(key) { if (typeof key === 'string' && /^[0-9]+$/.test(key)) key = parseInt(key, 10) @@ -135,21 +134,21 @@ const MapHandler = { } switch (datatype) { case "list": - const list = context.set(objectId, key, LIST) + const list = context.set(objectId, key, []) const proxyList = listProxy(context, list, [ ... path, key ], readonly ); for (let i = 0; i < value.length; i++) { proxyList[i] = value[i] } break; case "text": - const text = context.set(objectId, key, TEXT) + const text = context.set(objectId, key, "", "text") const proxyText = textProxy(context, text, [ ... path, key ], readonly ); for (let i = 0; i < value.length; i++) { proxyText[i] = value.get(i) } break; case "map": - const map = context.set(objectId, key, MAP) + const map = context.set(objectId, key, {}) const proxyMap = mapProxy(context, map, [ ... path, key ], readonly ); for (const key in value) { proxyMap[key] = value[key] @@ -252,9 +251,9 @@ const ListHandler = { case "list": let list if (index >= context.length(objectId)) { - list = context.insert(objectId, index, LIST) + list = context.insert(objectId, index, []) } else { - list = context.set(objectId, index, LIST) + list = context.set(objectId, index, []) } const proxyList = listProxy(context, list, [ ... path, index ], readonly); proxyList.splice(0,0,...value) @@ -262,9 +261,9 @@ const ListHandler = { case "text": let text if (index >= context.length(objectId)) { - text = context.insert(objectId, index, TEXT) + text = context.insert(objectId, index, "", "text") } else { - text = context.set(objectId, index, TEXT) + text = context.set(objectId, index, "", "text") } const proxyText = textProxy(context, text, [ ... path, index ], readonly); proxyText.splice(0,0,...value) @@ -272,9 +271,9 @@ const ListHandler = { case "map": let map if (index >= context.length(objectId)) { - map = context.insert(objectId, index, MAP) + map = context.insert(objectId, index, {}) } else { - map = context.set(objectId, index, MAP) + map = context.set(objectId, index, {}) } const proxyMap = mapProxy(context, map, [ ... path, index ], readonly); for (const key in value) { @@ -479,17 +478,17 @@ function listMethods(target) { for (let [value,datatype] of values) { switch (datatype) { case "list": - const list = context.insert(objectId, index, LIST) + const list = context.insert(objectId, index, []) const proxyList = listProxy(context, list, [ ... path, index ], readonly); proxyList.splice(0,0,...value) break; case "text": - const text = context.insert(objectId, index, TEXT) + const text = context.insert(objectId, index, "", "text") const proxyText = textProxy(context, text, [ ... path, index ], readonly); proxyText.splice(0,0,...value) break; case "map": - const map = context.insert(objectId, index, MAP) + const map = context.insert(objectId, index, {}) const proxyMap = mapProxy(context, map, [ ... path, index ], readonly); for (const key in value) { proxyMap[key] = value[key] diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts index 20189dab..b9249b41 100644 --- a/automerge-wasm/index.d.ts +++ b/automerge-wasm/index.d.ts @@ -6,8 +6,7 @@ export type SyncMessage = Uint8Array; export type Prop = string | number; export type Hash = string; export type Heads = Hash[]; -export type ObjectType = string; // opaque ?? -export type Value = string | number | boolean | null | Date | Uint8Array | ObjectType; +export type Value = string | number | boolean | null | Date | Uint8Array | Array | Object; export type FullValue = ["str", string] | ["int", number] | @@ -23,11 +22,6 @@ export type FullValue = ["text", ObjID] | ["table", ObjID] -export const LIST : ObjectType; -export const MAP : ObjectType; -export const TABLE : ObjectType; -export const TEXT : ObjectType; - export enum ObjTypeName { list = "list", map = "map", @@ -44,7 +38,10 @@ export type Datatype = "null" | "timestamp" | "counter" | - "bytes"; + "bytes" | + "map" | + "text" | + "list"; export type DecodedSyncMessage = { heads: Heads, @@ -86,10 +83,10 @@ export function decodeSyncState(data: Uint8Array): SyncState; export class Automerge { // change state set(obj: ObjID, prop: Prop, value: Value, datatype?: Datatype): ObjID | undefined; - make(obj: ObjID, prop: Prop, value: ObjectType): ObjID; + make(obj: ObjID, prop: Prop, value: Value, datatype?: Datatype): ObjID; insert(obj: ObjID, index: number, value: Value, datatype?: Datatype): ObjID | undefined; push(obj: ObjID, value: Value, datatype?: Datatype): ObjID | undefined; - splice(obj: ObjID, start: number, delete_count: number, text?: string | Array): ObjID[] | undefined; + splice(obj: ObjID, start: number, delete_count: number, text?: string | Array): ObjID[] | undefined; inc(obj: ObjID, prop: Prop, value: number): void; del(obj: ObjID, prop: Prop): void; diff --git a/automerge-wasm/package.json b/automerge-wasm/package.json index c39299eb..92eb79f8 100644 --- a/automerge-wasm/package.json +++ b/automerge-wasm/package.json @@ -19,10 +19,9 @@ "main": "./dev/index.js", "scripts": { "build": "rimraf ./dev && wasm-pack build --target nodejs --dev --out-name index -d dev && cp index.d.ts dev", - "release": "rimraf ./dev && wasm-pack build --target nodejs --release --out-name index -d dev && yarn opt && cp index.d.ts dev", + "release": "rimraf ./dev && wasm-pack build --target nodejs --release --out-name index -d dev && cp index.d.ts dev", "pkg": "rimraf ./pkg && wasm-pack build --target web --release --out-name index -d pkg && cp index.d.ts pkg && cd pkg && yarn pack && mv automerge-wasm*tgz ..", "prof": "rimraf ./dev && wasm-pack build --target nodejs --profiling --out-name index -d dev", - "opt": "wasm-opt -Oz dev/index_bg.wasm -o tmp.wasm && mv tmp.wasm dev/index_bg.wasm", "test": "yarn build && ts-mocha -p tsconfig.json --type-check --bail --full-trace test/*.ts" }, "dependencies": {}, diff --git a/automerge-wasm/src/interop.rs b/automerge-wasm/src/interop.rs index fc4c39f9..17a46251 100644 --- a/automerge-wasm/src/interop.rs +++ b/automerge-wasm/src/interop.rs @@ -3,6 +3,7 @@ use automerge::{Change, ChangeHash, Prop}; use js_sys::{Array, Object, Reflect, Uint8Array}; use std::collections::HashSet; use std::fmt::Display; +use unicode_segmentation::UnicodeSegmentation; use wasm_bindgen::prelude::*; use wasm_bindgen::JsCast; @@ -257,23 +258,60 @@ pub(crate) fn to_prop(p: JsValue) -> Result { } } -pub(crate) fn to_objtype(a: &JsValue) -> Option { - if !a.is_function() { - return None; - } - let f: js_sys::Function = a.clone().try_into().unwrap(); - let f = f.to_string(); - if f.starts_with("class MAP", 0) { - Some(am::ObjType::Map) - } else if f.starts_with("class LIST", 0) { - Some(am::ObjType::List) - } else if f.starts_with("class TEXT", 0) { - Some(am::ObjType::Text) - } else if f.starts_with("class TABLE", 0) { - Some(am::ObjType::Table) - } else { - am::log!("to_objtype(function) -> {}", f); - None +pub(crate) fn to_objtype( + value: &JsValue, + datatype: &Option, +) -> Option<(am::ObjType, Vec<(Prop, JsValue)>)> { + match datatype.as_deref() { + Some("map") => { + let map = value.clone().dyn_into::().ok()?; + // FIXME unwrap + let map = js_sys::Object::keys(&map) + .iter() + .zip(js_sys::Object::values(&map).iter()) + .map(|(key, val)| (key.as_string().unwrap().into(), val)) + .collect(); + Some((am::ObjType::Map, map)) + } + Some("list") => { + let list = value.clone().dyn_into::().ok()?; + let list = list + .iter() + .enumerate() + .map(|(i, e)| (i.into(), e)) + .collect(); + Some((am::ObjType::List, list)) + } + Some("text") => { + let text = value.as_string()?; + let text = text + .graphemes(true) + .enumerate() + .map(|(i, ch)| (i.into(), ch.into())) + .collect(); + Some((am::ObjType::Text, text)) + } + Some(_) => None, + None => { + if let Ok(list) = value.clone().dyn_into::() { + let list = list + .iter() + .enumerate() + .map(|(i, e)| (i.into(), e)) + .collect(); + Some((am::ObjType::List, list)) + } else if let Ok(map) = value.clone().dyn_into::() { + // FIXME unwrap + let map = js_sys::Object::keys(&map) + .iter() + .zip(js_sys::Object::values(&map).iter()) + .map(|(key, val)| (key.as_string().unwrap().into(), val)) + .collect(); + Some((am::ObjType::Map, map)) + } else { + None + } + } } } diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index b4973f6f..fd4fcd3e 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -131,15 +131,11 @@ impl Automerge { } else { if let Ok(array) = text.dyn_into::() { for i in array.iter() { - if let Ok(array) = i.clone().dyn_into::() { - let value = array.get(1); - let datatype = array.get(2); - let value = self.import_value(value, datatype)?; - vals.push(value); - } else { - let value = self.import_value(i, JsValue::null())?; - vals.push(value); + let (value, subvals) = self.import_value(&i, None)?; + if !subvals.is_empty() { + return Err(to_js_err("splice must be shallow")); } + vals.push(value); } } let result = self.0.splice(&obj, start, delete_count, vals)?; @@ -162,9 +158,10 @@ impl Automerge { datatype: JsValue, ) -> Result, JsValue> { let obj = self.import(obj)?; - let value = self.import_value(value, datatype)?; + let (value, subvals) = self.import_value(&value, datatype.as_string())?; let index = self.0.length(&obj); let opid = self.0.insert(&obj, index, value)?; + self.subset(&opid, subvals)?; Ok(opid.map(|id| id.to_string())) } @@ -177,8 +174,9 @@ impl Automerge { ) -> Result, JsValue> { let obj = self.import(obj)?; let index = index as f64; - let value = self.import_value(value, datatype)?; + let (value, subvals) = self.import_value(&value, datatype.as_string())?; let opid = self.0.insert(&obj, index as usize, value)?; + self.subset(&opid, subvals)?; Ok(opid.map(|id| id.to_string())) } @@ -191,17 +189,44 @@ impl Automerge { ) -> Result { let obj = self.import(obj)?; let prop = self.import_prop(prop)?; - let value = self.import_value(value, datatype)?; + let (value, subvals) = self.import_value(&value, datatype.as_string())?; let opid = self.0.set(&obj, prop, value)?; + self.subset(&opid, subvals)?; Ok(opid.map(|id| id.to_string()).into()) } - pub fn make(&mut self, obj: JsValue, prop: JsValue, value: JsValue) -> Result { + fn subset( + &mut self, + obj: &Option, + vals: Vec<(am::Prop, JsValue)>, + ) -> Result<(), JsValue> { + if let Some(id) = obj { + for (p, v) in vals { + let (value, subvals) = self.import_value(&v, None)?; + //let opid = self.0.set(id, p, value)?; + let opid = match p { + Prop::Map(s) => self.0.set(id, s, value)?, + Prop::Seq(i) => self.0.insert(id, i, value)?, + }; + self.subset(&opid, subvals)?; + } + } + Ok(()) + } + + pub fn make( + &mut self, + obj: JsValue, + prop: JsValue, + value: JsValue, + datatype: JsValue, + ) -> Result { let obj = self.import(obj)?; let prop = self.import_prop(prop)?; - let value = self.import_value(value, JsValue::null())?; + let (value, subvals) = self.import_value(&value, datatype.as_string())?; if value.is_object() { let opid = self.0.set(&obj, prop, value)?; + self.subset(&opid, subvals)?; Ok(opid.unwrap().to_string()) } else { Err(to_js_err("invalid object type")) @@ -480,15 +505,18 @@ impl Automerge { } } - fn import_value(&mut self, value: JsValue, datatype: JsValue) -> Result { - let d = datatype.as_string(); - match self.import_scalar(&value, &d) { - Some(val) => Ok(val.into()), + fn import_value( + &mut self, + value: &JsValue, + datatype: Option, + ) -> Result<(Value, Vec<(Prop, JsValue)>), JsValue> { + match self.import_scalar(value, &datatype) { + Some(val) => Ok((val.into(), vec![])), None => { - if let Some(o) = to_objtype(&value) { - Ok(o.into()) + if let Some((o, subvals)) = to_objtype(value, &datatype) { + Ok((o.into(), subvals)) } else { - web_sys::console::log_3(&"Invalid value".into(), &value, &datatype); + web_sys::console::log_2(&"Invalid value".into(), value); Err(to_js_err("invalid value")) } } @@ -591,15 +619,3 @@ pub fn encode_sync_state(state: SyncState) -> Result { pub fn decode_sync_state(data: Uint8Array) -> Result { SyncState::decode(data) } - -#[wasm_bindgen(js_name = MAP)] -pub struct Map {} - -#[wasm_bindgen(js_name = LIST)] -pub struct List {} - -#[wasm_bindgen(js_name = TEXT)] -pub struct Text {} - -#[wasm_bindgen(js_name = TABLE)] -pub struct Table {} diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts index f72d0979..ef24a73d 100644 --- a/automerge-wasm/test/test.ts +++ b/automerge-wasm/test/test.ts @@ -3,7 +3,7 @@ import { describe, it } from 'mocha'; import assert from 'assert' //@ts-ignore import { BloomFilter } from './helpers/sync' -import { create, loadDoc, SyncState, Automerge, MAP, LIST, TEXT, encodeChange, decodeChange, initSyncState, decodeSyncMessage, decodeSyncState, encodeSyncState, encodeSyncMessage } from '../dev/index' +import { create, loadDoc, SyncState, Automerge, encodeChange, decodeChange, initSyncState, decodeSyncMessage, decodeSyncState, encodeSyncState, encodeSyncMessage } from '../dev/index' import { DecodedSyncMessage } from '../index'; import { Hash } from '../dev/index'; @@ -64,7 +64,7 @@ describe('Automerge', () => { doc.set(root, "bool", true) doc.set(root, "time1", 1000, "timestamp") doc.set(root, "time2", new Date(1001)) - doc.set(root, "list", LIST); + doc.set(root, "list", []); doc.set(root, "null", null) result = doc.value(root,"hello") @@ -124,7 +124,7 @@ describe('Automerge', () => { let root = "_root" let result - let submap = doc.set(root, "submap", MAP) + let submap = doc.set(root, "submap", {}) if (!submap) throw new Error('should be not null') doc.set(submap, "number", 6, "uint") assert.strictEqual(doc.pendingOps(),2) @@ -141,7 +141,7 @@ describe('Automerge', () => { let doc = create() let root = "_root" - let submap = doc.set(root, "numbers", LIST) + let submap = doc.set(root, "numbers", []) if (!submap) throw new Error('should be not null') doc.insert(submap, 0, "a"); doc.insert(submap, 1, "b"); @@ -165,7 +165,7 @@ describe('Automerge', () => { let doc = create() let root = "_root" - let submap = doc.set(root, "letters", LIST) + let submap = doc.set(root, "letters", []) if (!submap) throw new Error('should be not null') doc.insert(submap, 0, "a"); doc.insert(submap, 0, "b"); @@ -230,11 +230,11 @@ describe('Automerge', () => { let doc = create() let root = "_root"; - let text = doc.set(root, "text", TEXT); + let text = doc.set(root, "text", "", "text"); if (!text) throw new Error('should not be undefined') doc.splice(text, 0, 0, "hello ") doc.splice(text, 6, 0, ["w","o","r","l","d"]) - doc.splice(text, 11, 0, [["str","!"],["str","?"]]) + doc.splice(text, 11, 0, ["!","?"]) assert.deepEqual(doc.value(text, 0),["str","h"]) assert.deepEqual(doc.value(text, 1),["str","e"]) assert.deepEqual(doc.value(text, 9),["str","l"]) @@ -282,7 +282,7 @@ describe('Automerge', () => { it('should be able to splice text', () => { let doc = create() - let text = doc.set("_root", "text", TEXT); + let text = doc.set("_root", "text", "", "text"); if (!text) throw new Error('should not be undefined') doc.splice(text, 0, 0, "hello world"); let heads1 = doc.commit(); @@ -331,7 +331,7 @@ describe('Automerge', () => { it('local inc increments all visible counters in a sequence', () => { let doc1 = create("aaaa") - let seq = doc1.set("_root", "seq", LIST) + let seq = doc1.set("_root", "seq", []) if (!seq) throw new Error('Should not be undefined') doc1.insert(seq, 0, "hello") let doc2 = loadDoc(doc1.save(), "bbbb"); @@ -363,18 +363,32 @@ describe('Automerge', () => { doc4.free() }) + it('recursive sets are possible', () => { + let doc = create("aaaa") + let l1 = doc.make("_root","list",[{ foo: "bar"}, [1,2,3]]) + let l2 = doc.insert(l1, 0, { zip: ["a", "b"] }) + let l3 = doc.set("_root","info1","hello world","text") + let l4 = doc.set("_root","info2","hello world") + assert.deepEqual(doc.toJS(), { + "list": [ { zip: ["a", "b"] }, { foo: "bar"}, [ 1,2,3]], + "info1": "hello world".split(""), + "info2": "hello world" + }) + doc.free() + }) + it('only returns an object id when objects are created', () => { let doc = create("aaaa") let r1 = doc.set("_root","foo","bar") - let r2 = doc.set("_root","list",LIST) + let r2 = doc.set("_root","list",[]) let r3 = doc.set("_root","counter",10, "counter") let r4 = doc.inc("_root","counter",1) let r5 = doc.del("_root","counter") if (!r2) throw new Error('should not be undefined') let r6 = doc.insert(r2,0,10); - let r7 = doc.insert(r2,0,MAP); + let r7 = doc.insert(r2,0,{}); let r8 = doc.splice(r2,1,0,["a","b","c"]); - let r9 = doc.splice(r2,1,0,["a",LIST,MAP,"d"]); + let r9 = doc.splice(r2,1,0,["a",[],{},"d"]); assert.deepEqual(r1,null); assert.deepEqual(r2,"2@aaaa"); assert.deepEqual(r3,null); @@ -389,11 +403,11 @@ describe('Automerge', () => { it('objects without properties are preserved', () => { let doc1 = create("aaaa") - let a = doc1.set("_root","a",MAP); + let a = doc1.set("_root","a",{}); if (!a) throw new Error('should not be undefined') - let b = doc1.set("_root","b",MAP); + let b = doc1.set("_root","b",{}); if (!b) throw new Error('should not be undefined') - let c = doc1.set("_root","c",MAP); + let c = doc1.set("_root","c",{}); if (!c) throw new Error('should not be undefined') let d = doc1.set(c,"d","dd"); let saved = doc1.save(); @@ -411,7 +425,7 @@ describe('Automerge', () => { it('should handle merging text conflicts then saving & loading', () => { let A = create("aabbcc") - let At = A.make('_root', 'text', TEXT) + let At = A.make('_root', 'text', "", "text") A.splice(At, 0, 0, 'hello') let B = A.fork() @@ -462,7 +476,7 @@ describe('Automerge', () => { let s1 = initSyncState(), s2 = initSyncState() // make two nodes with the same changes - let list = n1.set("_root","n", LIST) + let list = n1.set("_root","n", []) if (!list) throw new Error('undefined') n1.commit("",0) for (let i = 0; i < 10; i++) { @@ -486,7 +500,7 @@ describe('Automerge', () => { let n1 = create(), n2 = create() // make changes for n1 that n2 should request - let list = n1.set("_root","n",LIST) + let list = n1.set("_root","n",[]) if (!list) throw new Error('undefined') n1.commit("",0) for (let i = 0; i < 10; i++) { @@ -503,7 +517,7 @@ describe('Automerge', () => { let n1 = create(), n2 = create() // make changes for n1 that n2 should request - let list = n1.set("_root","n",LIST) + let list = n1.set("_root","n",[]) if (!list) throw new Error('undefined') n1.commit("",0) for (let i = 0; i < 10; i++) { @@ -660,7 +674,7 @@ describe('Automerge', () => { let n1 = create('01234567'), n2 = create('89abcdef') let s1 = initSyncState(), s2 = initSyncState(), message = null - let items = n1.set("_root", "items", LIST) + let items = n1.set("_root", "items", []) if (!items) throw new Error('undefined') n1.commit("",0) diff --git a/edit-trace/automerge-wasm.js b/edit-trace/automerge-wasm.js index 02130686..3680efc0 100644 --- a/edit-trace/automerge-wasm.js +++ b/edit-trace/automerge-wasm.js @@ -10,8 +10,8 @@ const Automerge = require('../automerge-wasm') const start = new Date() -let doc = Automerge.init(); -let text = doc.set("_root", "text", Automerge.TEXT) +let doc = Automerge.create(); +let text = doc.set("_root", "text", "", "text") for (let i = 0; i < edits.length; i++) { let edit = edits[i] From 06302e4a1783004254e1d1395dfa618141816dad Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Thu, 24 Feb 2022 00:22:56 -0500 Subject: [PATCH 099/730] make() defaults to text --- automerge-wasm/src/interop.rs | 7 +++++++ automerge-wasm/src/lib.rs | 11 ++--------- automerge-wasm/test/test.ts | 8 +++++--- 3 files changed, 14 insertions(+), 12 deletions(-) diff --git a/automerge-wasm/src/interop.rs b/automerge-wasm/src/interop.rs index 17a46251..61cc81e4 100644 --- a/automerge-wasm/src/interop.rs +++ b/automerge-wasm/src/interop.rs @@ -308,6 +308,13 @@ pub(crate) fn to_objtype( .map(|(key, val)| (key.as_string().unwrap().into(), val)) .collect(); Some((am::ObjType::Map, map)) + } else if let Some(text) = value.as_string() { + let text = text + .graphemes(true) + .enumerate() + .map(|(i, ch)| (i.into(), ch.into())) + .collect(); + Some((am::ObjType::Text, text)) } else { None } diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index fd4fcd3e..4759894d 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -214,17 +214,10 @@ impl Automerge { Ok(()) } - pub fn make( - &mut self, - obj: JsValue, - prop: JsValue, - value: JsValue, - datatype: JsValue, - ) -> Result { + pub fn make(&mut self, obj: JsValue, prop: JsValue, value: JsValue) -> Result { let obj = self.import(obj)?; let prop = self.import_prop(prop)?; - let (value, subvals) = self.import_value(&value, datatype.as_string())?; - if value.is_object() { + if let Some((value, subvals)) = to_objtype(&value, &None) { let opid = self.0.set(&obj, prop, value)?; self.subset(&opid, subvals)?; Ok(opid.unwrap().to_string()) diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts index ef24a73d..af6b2315 100644 --- a/automerge-wasm/test/test.ts +++ b/automerge-wasm/test/test.ts @@ -367,12 +367,14 @@ describe('Automerge', () => { let doc = create("aaaa") let l1 = doc.make("_root","list",[{ foo: "bar"}, [1,2,3]]) let l2 = doc.insert(l1, 0, { zip: ["a", "b"] }) - let l3 = doc.set("_root","info1","hello world","text") - let l4 = doc.set("_root","info2","hello world") + let l3 = doc.make("_root","info1","hello world") // 'text' + let l4 = doc.set("_root","info2","hello world") // 'str' + let l5 = doc.set("_root","info3","hello world", "text") assert.deepEqual(doc.toJS(), { "list": [ { zip: ["a", "b"] }, { foo: "bar"}, [ 1,2,3]], "info1": "hello world".split(""), - "info2": "hello world" + "info2": "hello world", + "info3": "hello world".split("") }) doc.free() }) From 7439593caf74766df79d5c4ebfab5fcf8979ef94 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 25 Feb 2022 17:30:34 +0000 Subject: [PATCH 100/730] Document keys functions --- automerge/src/automerge.rs | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 15e4c098..b6447988 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -185,6 +185,10 @@ impl Automerge { // PropAt::() // NthAt::() + /// Get the keys of the object `obj`. + /// + /// For a map this returns the keys of the map. + /// For a list this returns the element ids (opids) encoded as strings. pub fn keys(&self, obj: &ExId) -> Vec { if let Ok(obj) = self.exid_to_obj(obj) { let q = self.ops.search(obj, query::Keys::new()); @@ -194,6 +198,7 @@ impl Automerge { } } + /// Historical version of [`keys`](Self::keys). pub fn keys_at(&self, obj: &ExId, heads: &[ChangeHash]) -> Vec { if let Ok(obj) = self.exid_to_obj(obj) { let clock = self.clock_at(heads); From a726cf33c7f6a127091ccefa59ef6a5612ffc716 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 25 Feb 2022 11:01:20 +0000 Subject: [PATCH 101/730] Add keys struct for iteration This at least helps to not convert all of the keys to their strings automatically but still allocates a vec. --- automerge-wasm/src/lib.rs | 3 +- automerge/src/autocommit.rs | 6 ++-- automerge/src/automerge.rs | 35 ++++++++++++------- automerge/src/keys.rs | 34 ++++++++++++++++++ automerge/src/lib.rs | 2 ++ .../src/transaction/manual_transaction.rs | 6 ++-- automerge/src/transaction/transactable.rs | 6 ++-- 7 files changed, 68 insertions(+), 24 deletions(-) create mode 100644 automerge/src/keys.rs diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index c31c925d..b0925d8e 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -108,8 +108,7 @@ impl Automerge { } else { self.0.keys(&obj) } - .iter() - .map(|s| JsValue::from_str(s)) + .map(|s| JsValue::from_str(&s)) .collect(); Ok(result) } diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index f54fea03..1af5d888 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -5,7 +5,7 @@ use crate::{ change::export_change, transaction::TransactionInner, ActorId, Automerge, AutomergeError, Change, ChangeHash, Prop, Value, }; -use crate::{SyncMessage, SyncState}; +use crate::{Keys, SyncMessage, SyncState}; /// An automerge document that automatically manages transactions. #[derive(Debug, Clone)] @@ -291,11 +291,11 @@ impl Transactable for AutoCommit { // PropAt::() // NthAt::() - fn keys(&self, obj: &ExId) -> Vec { + fn keys(&self, obj: &ExId) -> Keys { self.doc.keys(obj) } - fn keys_at(&self, obj: &ExId, heads: &[ChangeHash]) -> Vec { + fn keys_at(&self, obj: &ExId, heads: &[ChangeHash]) -> Keys { self.doc.keys_at(obj, heads) } diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index b6447988..d635c62e 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -2,6 +2,7 @@ use std::collections::{HashMap, HashSet, VecDeque}; use crate::change::encode_document; use crate::exid::ExId; +use crate::keys::Keys; use crate::op_set::OpSet; use crate::transaction::{ CommitOptions, Transaction, TransactionFailure, TransactionInner, TransactionResult, @@ -189,23 +190,23 @@ impl Automerge { /// /// For a map this returns the keys of the map. /// For a list this returns the element ids (opids) encoded as strings. - pub fn keys(&self, obj: &ExId) -> Vec { + pub fn keys(&self, obj: &ExId) -> Keys { if let Ok(obj) = self.exid_to_obj(obj) { let q = self.ops.search(obj, query::Keys::new()); - q.keys.iter().map(|k| self.to_string(*k)).collect() + Keys::new(self, q.keys) } else { - vec![] + Keys::new(self, vec![]) } } /// Historical version of [`keys`](Self::keys). - pub fn keys_at(&self, obj: &ExId, heads: &[ChangeHash]) -> Vec { + pub fn keys_at(&self, obj: &ExId, heads: &[ChangeHash]) -> Keys { if let Ok(obj) = self.exid_to_obj(obj) { let clock = self.clock_at(heads); let q = self.ops.search(obj, query::KeysAt::new(clock)); - q.keys.iter().map(|k| self.to_string(*k)).collect() + Keys::new(self, q.keys) } else { - vec![] + Keys::new(self, vec![]) } } @@ -799,7 +800,7 @@ impl Automerge { } } - fn to_string(&self, id: E) -> String { + pub(crate) fn to_string(&self, id: E) -> String { match id.export() { Export::Id(id) => format!("{}@{}", id.counter(), self.ops.m.actors[id.actor()]), Export::Prop(index) => self.ops.m.props[index].clone(), @@ -869,6 +870,8 @@ pub struct SpanInfo { #[cfg(test)] mod tests { + use itertools::Itertools; + use super::*; use crate::transaction::Transactable; use crate::*; @@ -1046,38 +1049,44 @@ mod tests { tx.commit(); doc.get_heads(); let heads5 = doc.get_heads(); - assert!(doc.keys_at(&ROOT, &heads1) == vec!["prop1".to_owned()]); + assert!(doc.keys_at(&ROOT, &heads1).collect_vec() == vec!["prop1".to_owned()]); assert_eq!(doc.length_at(&ROOT, &heads1), 1); assert!(doc.value_at(&ROOT, "prop1", &heads1)?.unwrap().0 == Value::str("val1")); assert!(doc.value_at(&ROOT, "prop2", &heads1)? == None); assert!(doc.value_at(&ROOT, "prop3", &heads1)? == None); - assert!(doc.keys_at(&ROOT, &heads2) == vec!["prop1".to_owned()]); + assert!(doc.keys_at(&ROOT, &heads2).collect_vec() == vec!["prop1".to_owned()]); assert_eq!(doc.length_at(&ROOT, &heads2), 1); assert!(doc.value_at(&ROOT, "prop1", &heads2)?.unwrap().0 == Value::str("val2")); assert!(doc.value_at(&ROOT, "prop2", &heads2)? == None); assert!(doc.value_at(&ROOT, "prop3", &heads2)? == None); - assert!(doc.keys_at(&ROOT, &heads3) == vec!["prop1".to_owned(), "prop2".to_owned()]); + assert!( + doc.keys_at(&ROOT, &heads3).collect_vec() + == vec!["prop1".to_owned(), "prop2".to_owned()] + ); assert_eq!(doc.length_at(&ROOT, &heads3), 2); assert!(doc.value_at(&ROOT, "prop1", &heads3)?.unwrap().0 == Value::str("val2")); assert!(doc.value_at(&ROOT, "prop2", &heads3)?.unwrap().0 == Value::str("val3")); assert!(doc.value_at(&ROOT, "prop3", &heads3)? == None); - assert!(doc.keys_at(&ROOT, &heads4) == vec!["prop2".to_owned()]); + assert!(doc.keys_at(&ROOT, &heads4).collect_vec() == vec!["prop2".to_owned()]); assert_eq!(doc.length_at(&ROOT, &heads4), 1); assert!(doc.value_at(&ROOT, "prop1", &heads4)? == None); assert!(doc.value_at(&ROOT, "prop2", &heads4)?.unwrap().0 == Value::str("val3")); assert!(doc.value_at(&ROOT, "prop3", &heads4)? == None); - assert!(doc.keys_at(&ROOT, &heads5) == vec!["prop2".to_owned(), "prop3".to_owned()]); + assert!( + doc.keys_at(&ROOT, &heads5).collect_vec() + == vec!["prop2".to_owned(), "prop3".to_owned()] + ); assert_eq!(doc.length_at(&ROOT, &heads5), 2); assert_eq!(doc.length(&ROOT), 2); assert!(doc.value_at(&ROOT, "prop1", &heads5)? == None); assert!(doc.value_at(&ROOT, "prop2", &heads5)?.unwrap().0 == Value::str("val3")); assert!(doc.value_at(&ROOT, "prop3", &heads5)?.unwrap().0 == Value::str("val4")); - assert!(doc.keys_at(&ROOT, &[]).is_empty()); + assert_eq!(doc.keys_at(&ROOT, &[]).len(), 0); assert_eq!(doc.length_at(&ROOT, &[]), 0); assert!(doc.value_at(&ROOT, "prop1", &[])? == None); assert!(doc.value_at(&ROOT, "prop2", &[])? == None); diff --git a/automerge/src/keys.rs b/automerge/src/keys.rs new file mode 100644 index 00000000..ddb86c41 --- /dev/null +++ b/automerge/src/keys.rs @@ -0,0 +1,34 @@ +use crate::{types::Key, Automerge}; + +pub struct Keys<'a> { + index: usize, + keys: Vec, + doc: &'a Automerge, +} + +impl<'a> Keys<'a> { + pub(crate) fn new(doc: &'a Automerge, keys: Vec) -> Self { + Self { + index: 0, + keys, + doc, + } + } +} + +impl<'a> Iterator for Keys<'a> { + type Item = String; + + fn next(&mut self) -> Option { + let n = self.keys.get(self.index)?; + self.index += 1; + Some(self.doc.to_string(*n)) + } + + fn size_hint(&self) -> (usize, Option) { + let num_left = self.keys.len() - self.index; + (num_left, Some(num_left)) + } +} + +impl<'a> ExactSizeIterator for Keys<'a> {} diff --git a/automerge/src/lib.rs b/automerge/src/lib.rs index cd5f924c..2705de91 100644 --- a/automerge/src/lib.rs +++ b/automerge/src/lib.rs @@ -34,6 +34,7 @@ mod encoding; mod error; mod exid; mod indexed_cache; +mod keys; mod legacy; mod op_set; mod op_tree; @@ -50,6 +51,7 @@ pub use autocommit::AutoCommit; pub use change::{decode_change, Change}; pub use error::AutomergeError; pub use exid::ExId as ObjId; +pub use keys::Keys; pub use legacy::Change as ExpandedChange; pub use sync::{BloomFilter, SyncHave, SyncMessage, SyncState}; pub use types::{ActorId, ChangeHash, ObjType, OpType, Prop}; diff --git a/automerge/src/transaction/manual_transaction.rs b/automerge/src/transaction/manual_transaction.rs index d52b9219..9655ef93 100644 --- a/automerge/src/transaction/manual_transaction.rs +++ b/automerge/src/transaction/manual_transaction.rs @@ -1,6 +1,6 @@ use crate::exid::ExId; -use crate::AutomergeError; use crate::{Automerge, ChangeHash, Prop, Value}; +use crate::{AutomergeError, Keys}; use super::{CommitOptions, Transactable, TransactionInner}; @@ -134,11 +134,11 @@ impl<'a> Transactable for Transaction<'a> { .splice(self.doc, obj, pos, del, vals) } - fn keys(&self, obj: &ExId) -> Vec { + fn keys(&self, obj: &ExId) -> Keys { self.doc.keys(obj) } - fn keys_at(&self, obj: &ExId, heads: &[ChangeHash]) -> Vec { + fn keys_at(&self, obj: &ExId, heads: &[ChangeHash]) -> Keys { self.doc.keys_at(obj, heads) } diff --git a/automerge/src/transaction/transactable.rs b/automerge/src/transaction/transactable.rs index dbce1d14..5bfc759e 100644 --- a/automerge/src/transaction/transactable.rs +++ b/automerge/src/transaction/transactable.rs @@ -1,5 +1,5 @@ use crate::exid::ExId; -use crate::{AutomergeError, ChangeHash, Prop, Value}; +use crate::{AutomergeError, ChangeHash, Keys, Prop, Value}; use unicode_segmentation::UnicodeSegmentation; /// A way of mutating a document within a single change. @@ -68,10 +68,10 @@ pub trait Transactable { } /// Get the keys of the given object, it should be a map. - fn keys(&self, obj: &ExId) -> Vec; + fn keys(&self, obj: &ExId) -> Keys; /// Get the keys of the given object at a point in history. - fn keys_at(&self, obj: &ExId, heads: &[ChangeHash]) -> Vec; + fn keys_at(&self, obj: &ExId, heads: &[ChangeHash]) -> Keys; /// Get the length of the given object. fn length(&self, obj: &ExId) -> usize; From f51e44c211f301cac8764bcf21902734608a01ee Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 25 Feb 2022 12:06:15 +0000 Subject: [PATCH 102/730] Update keys iterator to iterate at the tree level No more big vec allocation now! --- automerge-wasm/src/lib.rs | 11 ++-- automerge/src/autocommit.rs | 7 +-- automerge/src/automerge.rs | 23 +++++---- automerge/src/keys.rs | 31 ++++-------- automerge/src/keys_at.rs | 21 ++++++++ automerge/src/lib.rs | 2 + automerge/src/op_set.rs | 22 +++++++- automerge/src/op_tree.rs | 15 +++++- automerge/src/query.rs | 4 +- automerge/src/query/keys.rs | 39 +++++++++------ automerge/src/query/keys_at.rs | 50 +++++++++++-------- .../src/transaction/manual_transaction.rs | 7 +-- automerge/src/transaction/transactable.rs | 7 +-- 13 files changed, 149 insertions(+), 90 deletions(-) create mode 100644 automerge/src/keys_at.rs diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index b0925d8e..e040a029 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -104,12 +104,13 @@ impl Automerge { pub fn keys(&mut self, obj: JsValue, heads: Option) -> Result { let obj = self.import(obj)?; let result = if let Some(heads) = get_heads(heads) { - self.0.keys_at(&obj, &heads) + self.0 + .keys_at(&obj, &heads) + .map(|s| JsValue::from_str(&s)) + .collect() } else { - self.0.keys(&obj) - } - .map(|s| JsValue::from_str(&s)) - .collect(); + self.0.keys(&obj).map(|s| JsValue::from_str(&s)).collect() + }; Ok(result) } diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index 1af5d888..aeb87c00 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -1,11 +1,12 @@ use crate::exid::ExId; +use crate::op_set::B; use crate::transaction::{CommitOptions, Transactable}; use crate::types::Patch; use crate::{ change::export_change, transaction::TransactionInner, ActorId, Automerge, AutomergeError, Change, ChangeHash, Prop, Value, }; -use crate::{Keys, SyncMessage, SyncState}; +use crate::{Keys, KeysAt, SyncMessage, SyncState}; /// An automerge document that automatically manages transactions. #[derive(Debug, Clone)] @@ -291,11 +292,11 @@ impl Transactable for AutoCommit { // PropAt::() // NthAt::() - fn keys(&self, obj: &ExId) -> Keys { + fn keys(&self, obj: &ExId) -> Keys<{ B }> { self.doc.keys(obj) } - fn keys_at(&self, obj: &ExId, heads: &[ChangeHash]) -> Keys { + fn keys_at(&self, obj: &ExId, heads: &[ChangeHash]) -> KeysAt<{ B }> { self.doc.keys_at(obj, heads) } diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index d635c62e..190bff89 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -4,6 +4,7 @@ use crate::change::encode_document; use crate::exid::ExId; use crate::keys::Keys; use crate::op_set::OpSet; +use crate::op_set::B; use crate::transaction::{ CommitOptions, Transaction, TransactionFailure, TransactionInner, TransactionResult, TransactionSuccess, @@ -12,6 +13,7 @@ use crate::types::{ ActorId, ChangeHash, Clock, ElemId, Export, Exportable, Key, ObjId, Op, OpId, OpType, Patch, ScalarValue, Value, }; +use crate::KeysAt; use crate::{legacy, query, types, ObjType}; use crate::{AutomergeError, Change, Prop}; use serde::Serialize; @@ -190,30 +192,29 @@ impl Automerge { /// /// For a map this returns the keys of the map. /// For a list this returns the element ids (opids) encoded as strings. - pub fn keys(&self, obj: &ExId) -> Keys { + pub fn keys(&self, obj: &ExId) -> Keys<{ B }> { if let Ok(obj) = self.exid_to_obj(obj) { - let q = self.ops.search(obj, query::Keys::new()); - Keys::new(self, q.keys) + let iter_keys = self.ops.keys(obj); + Keys::new(self, iter_keys) } else { - Keys::new(self, vec![]) + Keys::new(self, None) } } /// Historical version of [`keys`](Self::keys). - pub fn keys_at(&self, obj: &ExId, heads: &[ChangeHash]) -> Keys { + pub fn keys_at(&self, obj: &ExId, heads: &[ChangeHash]) -> KeysAt<{ B }> { if let Ok(obj) = self.exid_to_obj(obj) { let clock = self.clock_at(heads); - let q = self.ops.search(obj, query::KeysAt::new(clock)); - Keys::new(self, q.keys) + KeysAt::new(self, self.ops.keys_at(obj, clock)) } else { - Keys::new(self, vec![]) + KeysAt::new(self, None) } } pub fn length(&self, obj: &ExId) -> usize { if let Ok(inner_obj) = self.exid_to_obj(obj) { match self.ops.object_type(&inner_obj) { - Some(ObjType::Map) | Some(ObjType::Table) => self.keys(obj).len(), + Some(ObjType::Map) | Some(ObjType::Table) => self.keys(obj).count(), Some(ObjType::List) | Some(ObjType::Text) => { self.ops.search(inner_obj, query::Len::new()).len } @@ -228,7 +229,7 @@ impl Automerge { if let Ok(inner_obj) = self.exid_to_obj(obj) { let clock = self.clock_at(heads); match self.ops.object_type(&inner_obj) { - Some(ObjType::Map) | Some(ObjType::Table) => self.keys_at(obj, heads).len(), + Some(ObjType::Map) | Some(ObjType::Table) => self.keys_at(obj, heads).count(), Some(ObjType::List) | Some(ObjType::Text) => { self.ops.search(inner_obj, query::LenAt::new(clock)).len } @@ -1086,7 +1087,7 @@ mod tests { assert!(doc.value_at(&ROOT, "prop2", &heads5)?.unwrap().0 == Value::str("val3")); assert!(doc.value_at(&ROOT, "prop3", &heads5)?.unwrap().0 == Value::str("val4")); - assert_eq!(doc.keys_at(&ROOT, &[]).len(), 0); + assert_eq!(doc.keys_at(&ROOT, &[]).count(), 0); assert_eq!(doc.length_at(&ROOT, &[]), 0); assert!(doc.value_at(&ROOT, "prop1", &[])? == None); assert!(doc.value_at(&ROOT, "prop2", &[])? == None); diff --git a/automerge/src/keys.rs b/automerge/src/keys.rs index ddb86c41..1d152b6b 100644 --- a/automerge/src/keys.rs +++ b/automerge/src/keys.rs @@ -1,34 +1,21 @@ -use crate::{types::Key, Automerge}; +use crate::{query::IterKeys, Automerge}; -pub struct Keys<'a> { - index: usize, - keys: Vec, +pub struct Keys<'a, 'k, const B: usize> { + keys: Option>, doc: &'a Automerge, } -impl<'a> Keys<'a> { - pub(crate) fn new(doc: &'a Automerge, keys: Vec) -> Self { - Self { - index: 0, - keys, - doc, - } +impl<'a, 'k, const B: usize> Keys<'a, 'k, B> { + pub(crate) fn new(doc: &'a Automerge, keys: Option>) -> Self { + Self { keys, doc } } } -impl<'a> Iterator for Keys<'a> { +impl<'a, 'k, const B: usize> Iterator for Keys<'a, 'k, B> { type Item = String; fn next(&mut self) -> Option { - let n = self.keys.get(self.index)?; - self.index += 1; - Some(self.doc.to_string(*n)) - } - - fn size_hint(&self) -> (usize, Option) { - let num_left = self.keys.len() - self.index; - (num_left, Some(num_left)) + let key = self.keys.as_mut()?.next()?; + Some(self.doc.to_string(key)) } } - -impl<'a> ExactSizeIterator for Keys<'a> {} diff --git a/automerge/src/keys_at.rs b/automerge/src/keys_at.rs new file mode 100644 index 00000000..be43aff1 --- /dev/null +++ b/automerge/src/keys_at.rs @@ -0,0 +1,21 @@ +use crate::{query::IterKeysAt, Automerge}; + +pub struct KeysAt<'a, 'k, const B: usize> { + keys: Option>, + doc: &'a Automerge, +} + +impl<'a, 'k, const B: usize> KeysAt<'a, 'k, B> { + pub(crate) fn new(doc: &'a Automerge, keys: Option>) -> Self { + Self { keys, doc } + } +} + +impl<'a, 'k, const B: usize> Iterator for KeysAt<'a, 'k, B> { + type Item = String; + + fn next(&mut self) -> Option { + let key = self.keys.as_mut()?.next()?; + Some(self.doc.to_string(key)) + } +} diff --git a/automerge/src/lib.rs b/automerge/src/lib.rs index 2705de91..59714ac3 100644 --- a/automerge/src/lib.rs +++ b/automerge/src/lib.rs @@ -35,6 +35,7 @@ mod error; mod exid; mod indexed_cache; mod keys; +mod keys_at; mod legacy; mod op_set; mod op_tree; @@ -52,6 +53,7 @@ pub use change::{decode_change, Change}; pub use error::AutomergeError; pub use exid::ExId as ObjId; pub use keys::Keys; +pub use keys_at::KeysAt; pub use legacy::Change as ExpandedChange; pub use sync::{BloomFilter, SyncHave, SyncMessage, SyncState}; pub use types::{ActorId, ChangeHash, ObjType, OpType, Prop}; diff --git a/automerge/src/op_set.rs b/automerge/src/op_set.rs index 49310bc6..804ab8ca 100644 --- a/automerge/src/op_set.rs +++ b/automerge/src/op_set.rs @@ -1,13 +1,15 @@ +use crate::clock::Clock; use crate::indexed_cache::IndexedCache; use crate::op_tree::OpTreeInternal; -use crate::query::TreeQuery; +use crate::query::{self, TreeQuery}; use crate::types::{ActorId, Key, ObjId, Op, OpId, OpType}; use crate::ObjType; use fxhash::FxBuildHasher; use std::cmp::Ordering; use std::collections::HashMap; -pub(crate) type OpSet = OpSetInternal<16>; +pub(crate) const B: usize = 16; +pub(crate) type OpSet = OpSetInternal; #[derive(Debug, Clone)] pub(crate) struct OpSetInternal { @@ -41,6 +43,22 @@ impl OpSetInternal { } } + pub fn keys(&self, obj: ObjId) -> Option> { + if let Some((_typ, tree)) = self.trees.get(&obj) { + tree.keys() + } else { + None + } + } + + pub fn keys_at(&self, obj: ObjId, clock: Clock) -> Option> { + if let Some((_typ, tree)) = self.trees.get(&obj) { + tree.keys_at(clock) + } else { + None + } + } + pub fn search(&self, obj: ObjId, query: Q) -> Q where Q: TreeQuery, diff --git a/automerge/src/op_tree.rs b/automerge/src/op_tree.rs index ed0e9060..d9d86d78 100644 --- a/automerge/src/op_tree.rs +++ b/automerge/src/op_tree.rs @@ -5,8 +5,11 @@ use std::{ }; pub(crate) use crate::op_set::OpSetMetadata; -use crate::query::{Index, QueryResult, TreeQuery}; use crate::types::{Op, OpId}; +use crate::{ + clock::Clock, + query::{self, Index, QueryResult, TreeQuery}, +}; use std::collections::HashSet; #[allow(dead_code)] @@ -36,6 +39,16 @@ impl OpTreeInternal { self.root_node.as_ref().map_or(0, |n| n.len()) } + pub fn keys(&self) -> Option> { + self.root_node.as_ref().map(query::IterKeys::new) + } + + pub fn keys_at(&self, clock: Clock) -> Option> { + self.root_node + .as_ref() + .map(|root| query::IterKeysAt::new(root, clock)) + } + pub fn search(&self, mut query: Q, m: &OpSetMetadata) -> Q where Q: TreeQuery, diff --git a/automerge/src/query.rs b/automerge/src/query.rs index 84a70c49..f9a303a6 100644 --- a/automerge/src/query.rs +++ b/automerge/src/query.rs @@ -20,8 +20,8 @@ mod prop_at; mod seek_op; pub(crate) use insert::InsertNth; -pub(crate) use keys::Keys; -pub(crate) use keys_at::KeysAt; +pub(crate) use keys::IterKeys; +pub(crate) use keys_at::IterKeysAt; pub(crate) use len::Len; pub(crate) use len_at::LenAt; pub(crate) use list_vals::ListVals; diff --git a/automerge/src/query/keys.rs b/automerge/src/query/keys.rs index 57b90526..7f1b8781 100644 --- a/automerge/src/query/keys.rs +++ b/automerge/src/query/keys.rs @@ -1,29 +1,36 @@ use crate::op_tree::OpTreeNode; -use crate::query::{QueryResult, TreeQuery}; use crate::types::Key; use std::fmt::Debug; -#[derive(Debug, Clone, PartialEq)] -pub(crate) struct Keys { - pub keys: Vec, +#[derive(Debug)] +pub(crate) struct IterKeys<'a, const B: usize> { + index: usize, + last_key: Option, + root_child: &'a OpTreeNode, } -impl Keys { - pub fn new() -> Self { - Keys { keys: vec![] } +impl<'a, const B: usize> IterKeys<'a, B> { + pub(crate) fn new(root_child: &'a OpTreeNode) -> Self { + Self { + index: 0, + last_key: None, + root_child, + } } } -impl TreeQuery for Keys { - fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { - let mut last = None; - for i in 0..child.len() { - let op = child.get(i).unwrap(); - if Some(op.key) != last && op.visible() { - self.keys.push(op.key); - last = Some(op.key); +impl<'a, const B: usize> Iterator for IterKeys<'a, B> { + type Item = Key; + + fn next(&mut self) -> Option { + for i in self.index..self.root_child.len() { + let op = self.root_child.get(i)?; + self.index += 1; + if Some(op.key) != self.last_key && op.visible() { + self.last_key = Some(op.key); + return Some(op.key); } } - QueryResult::Finish + None } } diff --git a/automerge/src/query/keys_at.rs b/automerge/src/query/keys_at.rs index 81c8ba86..878c7ce1 100644 --- a/automerge/src/query/keys_at.rs +++ b/automerge/src/query/keys_at.rs @@ -1,36 +1,42 @@ -use crate::query::{QueryResult, TreeQuery, VisWindow}; -use crate::types::{Clock, Key, Op}; +use crate::op_tree::OpTreeNode; +use crate::query::VisWindow; +use crate::types::{Clock, Key}; use std::fmt::Debug; -#[derive(Debug, Clone, PartialEq)] -pub(crate) struct KeysAt { +#[derive(Debug)] +pub(crate) struct IterKeysAt<'a, const B: usize> { clock: Clock, - pub keys: Vec, - last: Option, window: VisWindow, - pos: usize, + index: usize, + last_key: Option, + root_child: &'a OpTreeNode, } -impl KeysAt { - pub fn new(clock: Clock) -> Self { - KeysAt { +impl<'a, const B: usize> IterKeysAt<'a, B> { + pub(crate) fn new(root_child: &'a OpTreeNode, clock: Clock) -> Self { + Self { clock, - pos: 0, - last: None, - keys: vec![], - window: Default::default(), + window: VisWindow::default(), + index: 0, + last_key: None, + root_child, } } } -impl TreeQuery for KeysAt { - fn query_element(&mut self, op: &Op) -> QueryResult { - let visible = self.window.visible_at(op, self.pos, &self.clock); - if Some(op.key) != self.last && visible { - self.keys.push(op.key); - self.last = Some(op.key); +impl<'a, const B: usize> Iterator for IterKeysAt<'a, B> { + type Item = Key; + + fn next(&mut self) -> Option { + for i in self.index..self.root_child.len() { + let op = self.root_child.get(i)?; + let visible = self.window.visible_at(op, self.index, &self.clock); + self.index += 1; + if Some(op.key) != self.last_key && visible { + self.last_key = Some(op.key); + return Some(op.key); + } } - self.pos += 1; - QueryResult::Next + None } } diff --git a/automerge/src/transaction/manual_transaction.rs b/automerge/src/transaction/manual_transaction.rs index 9655ef93..5a4a25af 100644 --- a/automerge/src/transaction/manual_transaction.rs +++ b/automerge/src/transaction/manual_transaction.rs @@ -1,5 +1,6 @@ use crate::exid::ExId; -use crate::{Automerge, ChangeHash, Prop, Value}; +use crate::op_set::B; +use crate::{Automerge, ChangeHash, KeysAt, Prop, Value}; use crate::{AutomergeError, Keys}; use super::{CommitOptions, Transactable, TransactionInner}; @@ -134,11 +135,11 @@ impl<'a> Transactable for Transaction<'a> { .splice(self.doc, obj, pos, del, vals) } - fn keys(&self, obj: &ExId) -> Keys { + fn keys(&self, obj: &ExId) -> Keys<{ B }> { self.doc.keys(obj) } - fn keys_at(&self, obj: &ExId, heads: &[ChangeHash]) -> Keys { + fn keys_at(&self, obj: &ExId, heads: &[ChangeHash]) -> KeysAt<{ B }> { self.doc.keys_at(obj, heads) } diff --git a/automerge/src/transaction/transactable.rs b/automerge/src/transaction/transactable.rs index 5bfc759e..48865add 100644 --- a/automerge/src/transaction/transactable.rs +++ b/automerge/src/transaction/transactable.rs @@ -1,5 +1,6 @@ use crate::exid::ExId; -use crate::{AutomergeError, ChangeHash, Keys, Prop, Value}; +use crate::op_set::B; +use crate::{AutomergeError, ChangeHash, Keys, KeysAt, Prop, Value}; use unicode_segmentation::UnicodeSegmentation; /// A way of mutating a document within a single change. @@ -68,10 +69,10 @@ pub trait Transactable { } /// Get the keys of the given object, it should be a map. - fn keys(&self, obj: &ExId) -> Keys; + fn keys(&self, obj: &ExId) -> Keys<{ B }>; /// Get the keys of the given object at a point in history. - fn keys_at(&self, obj: &ExId, heads: &[ChangeHash]) -> Keys; + fn keys_at(&self, obj: &ExId, heads: &[ChangeHash]) -> KeysAt<{ B }>; /// Get the length of the given object. fn length(&self, obj: &ExId) -> usize; From 989310866f1ea0e46196d190291046c5c6d69e82 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 25 Feb 2022 16:50:06 +0000 Subject: [PATCH 103/730] Add DoubleEndedIterator for Keys --- automerge/src/automerge.rs | 50 +++++++++++++++++++++++++++++++++++++ automerge/src/keys.rs | 7 ++++++ automerge/src/query/keys.rs | 20 ++++++++++++++- 3 files changed, 76 insertions(+), 1 deletion(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 190bff89..17cfdd0f 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -872,6 +872,7 @@ pub struct SpanInfo { #[cfg(test)] mod tests { use itertools::Itertools; + use pretty_assertions::assert_eq; use super::*; use crate::transaction::Transactable; @@ -1159,4 +1160,53 @@ mod tests { Ok(()) } + + #[test] + fn keys_iter() { + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + tx.set(&ROOT, "a", 3).unwrap(); + tx.set(&ROOT, "b", 4).unwrap(); + tx.set(&ROOT, "c", 5).unwrap(); + tx.set(&ROOT, "d", 6).unwrap(); + tx.commit(); + let mut tx = doc.transaction(); + tx.set(&ROOT, "a", 7).unwrap(); + tx.commit(); + let mut tx = doc.transaction(); + tx.set(&ROOT, "a", 8).unwrap(); + tx.set(&ROOT, "d", 9).unwrap(); + tx.commit(); + assert_eq!(doc.keys(&ROOT).count(), 4); + + let mut keys = doc.keys(&ROOT); + assert_eq!(keys.next(), Some("a".into())); + assert_eq!(keys.next(), Some("b".into())); + assert_eq!(keys.next(), Some("c".into())); + assert_eq!(keys.next(), Some("d".into())); + assert_eq!(keys.next(), None); + + let mut keys = doc.keys(&ROOT); + assert_eq!(keys.next_back(), Some("d".into())); + assert_eq!(keys.next_back(), Some("c".into())); + assert_eq!(keys.next_back(), Some("b".into())); + assert_eq!(keys.next_back(), Some("a".into())); + assert_eq!(keys.next_back(), None); + + let mut keys = doc.keys(&ROOT); + assert_eq!(keys.next(), Some("a".into())); + assert_eq!(keys.next_back(), Some("d".into())); + assert_eq!(keys.next_back(), Some("c".into())); + assert_eq!(keys.next_back(), Some("b".into())); + assert_eq!(keys.next_back(), None); + + let mut keys = doc.keys(&ROOT); + assert_eq!(keys.next_back(), Some("d".into())); + assert_eq!(keys.next(), Some("a".into())); + assert_eq!(keys.next(), Some("b".into())); + assert_eq!(keys.next(), Some("c".into())); + assert_eq!(keys.next(), None); + let keys = doc.keys(&ROOT); + assert_eq!(keys.collect::>(), vec!["a", "b", "c", "d"]); + } } diff --git a/automerge/src/keys.rs b/automerge/src/keys.rs index 1d152b6b..3381025c 100644 --- a/automerge/src/keys.rs +++ b/automerge/src/keys.rs @@ -19,3 +19,10 @@ impl<'a, 'k, const B: usize> Iterator for Keys<'a, 'k, B> { Some(self.doc.to_string(key)) } } + +impl<'a, 'k, const B: usize> DoubleEndedIterator for Keys<'a, 'k, B> { + fn next_back(&mut self) -> Option { + let key = self.keys.as_mut()?.next_back()?; + Some(self.doc.to_string(key)) + } +} diff --git a/automerge/src/query/keys.rs b/automerge/src/query/keys.rs index 7f1b8781..18643ab7 100644 --- a/automerge/src/query/keys.rs +++ b/automerge/src/query/keys.rs @@ -6,6 +6,8 @@ use std::fmt::Debug; pub(crate) struct IterKeys<'a, const B: usize> { index: usize, last_key: Option, + index_back: usize, + last_key_back: Option, root_child: &'a OpTreeNode, } @@ -14,6 +16,8 @@ impl<'a, const B: usize> IterKeys<'a, B> { Self { index: 0, last_key: None, + index_back: root_child.len(), + last_key_back: None, root_child, } } @@ -23,7 +27,7 @@ impl<'a, const B: usize> Iterator for IterKeys<'a, B> { type Item = Key; fn next(&mut self) -> Option { - for i in self.index..self.root_child.len() { + for i in self.index..self.index_back { let op = self.root_child.get(i)?; self.index += 1; if Some(op.key) != self.last_key && op.visible() { @@ -34,3 +38,17 @@ impl<'a, const B: usize> Iterator for IterKeys<'a, B> { None } } + +impl<'a, const B: usize> DoubleEndedIterator for IterKeys<'a, B> { + fn next_back(&mut self) -> Option { + for i in (self.index..self.index_back).rev() { + let op = self.root_child.get(i)?; + self.index_back -= 1; + if Some(op.key) != self.last_key_back && op.visible() { + self.last_key_back = Some(op.key); + return Some(op.key); + } + } + None + } +} From 4ff456cdcccc9a36ec03736cb05b2041bcd9cc0a Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 25 Feb 2022 16:55:18 +0000 Subject: [PATCH 104/730] Update keys to use map --- automerge/src/keys.rs | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/automerge/src/keys.rs b/automerge/src/keys.rs index 3381025c..15c98dfd 100644 --- a/automerge/src/keys.rs +++ b/automerge/src/keys.rs @@ -15,14 +15,18 @@ impl<'a, 'k, const B: usize> Iterator for Keys<'a, 'k, B> { type Item = String; fn next(&mut self) -> Option { - let key = self.keys.as_mut()?.next()?; - Some(self.doc.to_string(key)) + self.keys + .as_mut()? + .next() + .map(|key| self.doc.to_string(key)) } } impl<'a, 'k, const B: usize> DoubleEndedIterator for Keys<'a, 'k, B> { fn next_back(&mut self) -> Option { - let key = self.keys.as_mut()?.next_back()?; - Some(self.doc.to_string(key)) + self.keys + .as_mut()? + .next_back() + .map(|key| self.doc.to_string(key)) } } From 6f2536c2324a8174b8b465e4a6cd98e4eb1511ab Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 25 Feb 2022 16:55:49 +0000 Subject: [PATCH 105/730] Make keysat double ended --- automerge/src/keys_at.rs | 15 +++++++++++++-- automerge/src/query/keys_at.rs | 21 ++++++++++++++++++++- 2 files changed, 33 insertions(+), 3 deletions(-) diff --git a/automerge/src/keys_at.rs b/automerge/src/keys_at.rs index be43aff1..dfdfa572 100644 --- a/automerge/src/keys_at.rs +++ b/automerge/src/keys_at.rs @@ -15,7 +15,18 @@ impl<'a, 'k, const B: usize> Iterator for KeysAt<'a, 'k, B> { type Item = String; fn next(&mut self) -> Option { - let key = self.keys.as_mut()?.next()?; - Some(self.doc.to_string(key)) + self.keys + .as_mut()? + .next() + .map(|key| self.doc.to_string(key)) + } +} + +impl<'a, 'k, const B: usize> DoubleEndedIterator for KeysAt<'a, 'k, B> { + fn next_back(&mut self) -> Option { + self.keys + .as_mut()? + .next() + .map(|key| self.doc.to_string(key)) } } diff --git a/automerge/src/query/keys_at.rs b/automerge/src/query/keys_at.rs index 878c7ce1..fb077648 100644 --- a/automerge/src/query/keys_at.rs +++ b/automerge/src/query/keys_at.rs @@ -9,6 +9,8 @@ pub(crate) struct IterKeysAt<'a, const B: usize> { window: VisWindow, index: usize, last_key: Option, + index_back: usize, + last_key_back: Option, root_child: &'a OpTreeNode, } @@ -19,6 +21,8 @@ impl<'a, const B: usize> IterKeysAt<'a, B> { window: VisWindow::default(), index: 0, last_key: None, + index_back: root_child.len(), + last_key_back: None, root_child, } } @@ -30,7 +34,7 @@ impl<'a, const B: usize> Iterator for IterKeysAt<'a, B> { fn next(&mut self) -> Option { for i in self.index..self.root_child.len() { let op = self.root_child.get(i)?; - let visible = self.window.visible_at(op, self.index, &self.clock); + let visible = self.window.visible_at(op, i, &self.clock); self.index += 1; if Some(op.key) != self.last_key && visible { self.last_key = Some(op.key); @@ -40,3 +44,18 @@ impl<'a, const B: usize> Iterator for IterKeysAt<'a, B> { None } } + +impl<'a, const B: usize> DoubleEndedIterator for IterKeysAt<'a, B> { + fn next_back(&mut self) -> Option { + for i in self.index..self.index_back { + let op = self.root_child.get(i)?; + let visible = self.window.visible_at(op, i, &self.clock); + self.index_back -= 1; + if Some(op.key) != self.last_key_back && visible { + self.last_key_back = Some(op.key); + return Some(op.key); + } + } + None + } +} From f8af94b3173b9b0873382830a19a6691e670c6a4 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 25 Feb 2022 17:02:16 +0000 Subject: [PATCH 106/730] Move B to internal Keys --- automerge/src/autocommit.rs | 5 ++--- automerge/src/automerge.rs | 5 ++--- automerge/src/keys.rs | 9 +++++---- automerge/src/keys_at.rs | 9 +++++---- automerge/src/transaction/manual_transaction.rs | 5 ++--- automerge/src/transaction/transactable.rs | 5 ++--- 6 files changed, 18 insertions(+), 20 deletions(-) diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index aeb87c00..3d833266 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -1,5 +1,4 @@ use crate::exid::ExId; -use crate::op_set::B; use crate::transaction::{CommitOptions, Transactable}; use crate::types::Patch; use crate::{ @@ -292,11 +291,11 @@ impl Transactable for AutoCommit { // PropAt::() // NthAt::() - fn keys(&self, obj: &ExId) -> Keys<{ B }> { + fn keys(&self, obj: &ExId) -> Keys { self.doc.keys(obj) } - fn keys_at(&self, obj: &ExId, heads: &[ChangeHash]) -> KeysAt<{ B }> { + fn keys_at(&self, obj: &ExId, heads: &[ChangeHash]) -> KeysAt { self.doc.keys_at(obj, heads) } diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 17cfdd0f..e2308454 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -4,7 +4,6 @@ use crate::change::encode_document; use crate::exid::ExId; use crate::keys::Keys; use crate::op_set::OpSet; -use crate::op_set::B; use crate::transaction::{ CommitOptions, Transaction, TransactionFailure, TransactionInner, TransactionResult, TransactionSuccess, @@ -192,7 +191,7 @@ impl Automerge { /// /// For a map this returns the keys of the map. /// For a list this returns the element ids (opids) encoded as strings. - pub fn keys(&self, obj: &ExId) -> Keys<{ B }> { + pub fn keys(&self, obj: &ExId) -> Keys { if let Ok(obj) = self.exid_to_obj(obj) { let iter_keys = self.ops.keys(obj); Keys::new(self, iter_keys) @@ -202,7 +201,7 @@ impl Automerge { } /// Historical version of [`keys`](Self::keys). - pub fn keys_at(&self, obj: &ExId, heads: &[ChangeHash]) -> KeysAt<{ B }> { + pub fn keys_at(&self, obj: &ExId, heads: &[ChangeHash]) -> KeysAt { if let Ok(obj) = self.exid_to_obj(obj) { let clock = self.clock_at(heads); KeysAt::new(self, self.ops.keys_at(obj, clock)) diff --git a/automerge/src/keys.rs b/automerge/src/keys.rs index 15c98dfd..90e799e1 100644 --- a/automerge/src/keys.rs +++ b/automerge/src/keys.rs @@ -1,17 +1,18 @@ +use crate::op_set::B; use crate::{query::IterKeys, Automerge}; -pub struct Keys<'a, 'k, const B: usize> { +pub struct Keys<'a, 'k> { keys: Option>, doc: &'a Automerge, } -impl<'a, 'k, const B: usize> Keys<'a, 'k, B> { +impl<'a, 'k> Keys<'a, 'k> { pub(crate) fn new(doc: &'a Automerge, keys: Option>) -> Self { Self { keys, doc } } } -impl<'a, 'k, const B: usize> Iterator for Keys<'a, 'k, B> { +impl<'a, 'k> Iterator for Keys<'a, 'k> { type Item = String; fn next(&mut self) -> Option { @@ -22,7 +23,7 @@ impl<'a, 'k, const B: usize> Iterator for Keys<'a, 'k, B> { } } -impl<'a, 'k, const B: usize> DoubleEndedIterator for Keys<'a, 'k, B> { +impl<'a, 'k> DoubleEndedIterator for Keys<'a, 'k> { fn next_back(&mut self) -> Option { self.keys .as_mut()? diff --git a/automerge/src/keys_at.rs b/automerge/src/keys_at.rs index dfdfa572..8ed0e4bb 100644 --- a/automerge/src/keys_at.rs +++ b/automerge/src/keys_at.rs @@ -1,17 +1,18 @@ +use crate::op_set::B; use crate::{query::IterKeysAt, Automerge}; -pub struct KeysAt<'a, 'k, const B: usize> { +pub struct KeysAt<'a, 'k> { keys: Option>, doc: &'a Automerge, } -impl<'a, 'k, const B: usize> KeysAt<'a, 'k, B> { +impl<'a, 'k> KeysAt<'a, 'k> { pub(crate) fn new(doc: &'a Automerge, keys: Option>) -> Self { Self { keys, doc } } } -impl<'a, 'k, const B: usize> Iterator for KeysAt<'a, 'k, B> { +impl<'a, 'k> Iterator for KeysAt<'a, 'k> { type Item = String; fn next(&mut self) -> Option { @@ -22,7 +23,7 @@ impl<'a, 'k, const B: usize> Iterator for KeysAt<'a, 'k, B> { } } -impl<'a, 'k, const B: usize> DoubleEndedIterator for KeysAt<'a, 'k, B> { +impl<'a, 'k> DoubleEndedIterator for KeysAt<'a, 'k> { fn next_back(&mut self) -> Option { self.keys .as_mut()? diff --git a/automerge/src/transaction/manual_transaction.rs b/automerge/src/transaction/manual_transaction.rs index 5a4a25af..93dcf615 100644 --- a/automerge/src/transaction/manual_transaction.rs +++ b/automerge/src/transaction/manual_transaction.rs @@ -1,5 +1,4 @@ use crate::exid::ExId; -use crate::op_set::B; use crate::{Automerge, ChangeHash, KeysAt, Prop, Value}; use crate::{AutomergeError, Keys}; @@ -135,11 +134,11 @@ impl<'a> Transactable for Transaction<'a> { .splice(self.doc, obj, pos, del, vals) } - fn keys(&self, obj: &ExId) -> Keys<{ B }> { + fn keys(&self, obj: &ExId) -> Keys { self.doc.keys(obj) } - fn keys_at(&self, obj: &ExId, heads: &[ChangeHash]) -> KeysAt<{ B }> { + fn keys_at(&self, obj: &ExId, heads: &[ChangeHash]) -> KeysAt { self.doc.keys_at(obj, heads) } diff --git a/automerge/src/transaction/transactable.rs b/automerge/src/transaction/transactable.rs index 48865add..93d65cd1 100644 --- a/automerge/src/transaction/transactable.rs +++ b/automerge/src/transaction/transactable.rs @@ -1,5 +1,4 @@ use crate::exid::ExId; -use crate::op_set::B; use crate::{AutomergeError, ChangeHash, Keys, KeysAt, Prop, Value}; use unicode_segmentation::UnicodeSegmentation; @@ -69,10 +68,10 @@ pub trait Transactable { } /// Get the keys of the given object, it should be a map. - fn keys(&self, obj: &ExId) -> Keys<{ B }>; + fn keys(&self, obj: &ExId) -> Keys; /// Get the keys of the given object at a point in history. - fn keys_at(&self, obj: &ExId, heads: &[ChangeHash]) -> KeysAt<{ B }>; + fn keys_at(&self, obj: &ExId, heads: &[ChangeHash]) -> KeysAt; /// Get the length of the given object. fn length(&self, obj: &ExId) -> usize; From d80a9c6746db7984300a40c0288e192c7892814f Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 25 Feb 2022 17:07:09 +0000 Subject: [PATCH 107/730] Rename IterKeys and IterKeysAt --- automerge/src/keys.rs | 6 +++--- automerge/src/keys_at.rs | 6 +++--- automerge/src/op_set.rs | 4 ++-- automerge/src/op_tree.rs | 8 ++++---- automerge/src/query.rs | 4 ++-- automerge/src/query/keys.rs | 8 ++++---- automerge/src/query/keys_at.rs | 8 ++++---- 7 files changed, 22 insertions(+), 22 deletions(-) diff --git a/automerge/src/keys.rs b/automerge/src/keys.rs index 90e799e1..89b20f90 100644 --- a/automerge/src/keys.rs +++ b/automerge/src/keys.rs @@ -1,13 +1,13 @@ use crate::op_set::B; -use crate::{query::IterKeys, Automerge}; +use crate::{query, Automerge}; pub struct Keys<'a, 'k> { - keys: Option>, + keys: Option>, doc: &'a Automerge, } impl<'a, 'k> Keys<'a, 'k> { - pub(crate) fn new(doc: &'a Automerge, keys: Option>) -> Self { + pub(crate) fn new(doc: &'a Automerge, keys: Option>) -> Self { Self { keys, doc } } } diff --git a/automerge/src/keys_at.rs b/automerge/src/keys_at.rs index 8ed0e4bb..42a2ec9c 100644 --- a/automerge/src/keys_at.rs +++ b/automerge/src/keys_at.rs @@ -1,13 +1,13 @@ use crate::op_set::B; -use crate::{query::IterKeysAt, Automerge}; +use crate::{query, Automerge}; pub struct KeysAt<'a, 'k> { - keys: Option>, + keys: Option>, doc: &'a Automerge, } impl<'a, 'k> KeysAt<'a, 'k> { - pub(crate) fn new(doc: &'a Automerge, keys: Option>) -> Self { + pub(crate) fn new(doc: &'a Automerge, keys: Option>) -> Self { Self { keys, doc } } } diff --git a/automerge/src/op_set.rs b/automerge/src/op_set.rs index 804ab8ca..73b974c3 100644 --- a/automerge/src/op_set.rs +++ b/automerge/src/op_set.rs @@ -43,7 +43,7 @@ impl OpSetInternal { } } - pub fn keys(&self, obj: ObjId) -> Option> { + pub fn keys(&self, obj: ObjId) -> Option> { if let Some((_typ, tree)) = self.trees.get(&obj) { tree.keys() } else { @@ -51,7 +51,7 @@ impl OpSetInternal { } } - pub fn keys_at(&self, obj: ObjId, clock: Clock) -> Option> { + pub fn keys_at(&self, obj: ObjId, clock: Clock) -> Option> { if let Some((_typ, tree)) = self.trees.get(&obj) { tree.keys_at(clock) } else { diff --git a/automerge/src/op_tree.rs b/automerge/src/op_tree.rs index d9d86d78..5503d164 100644 --- a/automerge/src/op_tree.rs +++ b/automerge/src/op_tree.rs @@ -39,14 +39,14 @@ impl OpTreeInternal { self.root_node.as_ref().map_or(0, |n| n.len()) } - pub fn keys(&self) -> Option> { - self.root_node.as_ref().map(query::IterKeys::new) + pub fn keys(&self) -> Option> { + self.root_node.as_ref().map(query::Keys::new) } - pub fn keys_at(&self, clock: Clock) -> Option> { + pub fn keys_at(&self, clock: Clock) -> Option> { self.root_node .as_ref() - .map(|root| query::IterKeysAt::new(root, clock)) + .map(|root| query::KeysAt::new(root, clock)) } pub fn search(&self, mut query: Q, m: &OpSetMetadata) -> Q diff --git a/automerge/src/query.rs b/automerge/src/query.rs index f9a303a6..84a70c49 100644 --- a/automerge/src/query.rs +++ b/automerge/src/query.rs @@ -20,8 +20,8 @@ mod prop_at; mod seek_op; pub(crate) use insert::InsertNth; -pub(crate) use keys::IterKeys; -pub(crate) use keys_at::IterKeysAt; +pub(crate) use keys::Keys; +pub(crate) use keys_at::KeysAt; pub(crate) use len::Len; pub(crate) use len_at::LenAt; pub(crate) use list_vals::ListVals; diff --git a/automerge/src/query/keys.rs b/automerge/src/query/keys.rs index 18643ab7..f780effa 100644 --- a/automerge/src/query/keys.rs +++ b/automerge/src/query/keys.rs @@ -3,7 +3,7 @@ use crate::types::Key; use std::fmt::Debug; #[derive(Debug)] -pub(crate) struct IterKeys<'a, const B: usize> { +pub(crate) struct Keys<'a, const B: usize> { index: usize, last_key: Option, index_back: usize, @@ -11,7 +11,7 @@ pub(crate) struct IterKeys<'a, const B: usize> { root_child: &'a OpTreeNode, } -impl<'a, const B: usize> IterKeys<'a, B> { +impl<'a, const B: usize> Keys<'a, B> { pub(crate) fn new(root_child: &'a OpTreeNode) -> Self { Self { index: 0, @@ -23,7 +23,7 @@ impl<'a, const B: usize> IterKeys<'a, B> { } } -impl<'a, const B: usize> Iterator for IterKeys<'a, B> { +impl<'a, const B: usize> Iterator for Keys<'a, B> { type Item = Key; fn next(&mut self) -> Option { @@ -39,7 +39,7 @@ impl<'a, const B: usize> Iterator for IterKeys<'a, B> { } } -impl<'a, const B: usize> DoubleEndedIterator for IterKeys<'a, B> { +impl<'a, const B: usize> DoubleEndedIterator for Keys<'a, B> { fn next_back(&mut self) -> Option { for i in (self.index..self.index_back).rev() { let op = self.root_child.get(i)?; diff --git a/automerge/src/query/keys_at.rs b/automerge/src/query/keys_at.rs index fb077648..b5262ed6 100644 --- a/automerge/src/query/keys_at.rs +++ b/automerge/src/query/keys_at.rs @@ -4,7 +4,7 @@ use crate::types::{Clock, Key}; use std::fmt::Debug; #[derive(Debug)] -pub(crate) struct IterKeysAt<'a, const B: usize> { +pub(crate) struct KeysAt<'a, const B: usize> { clock: Clock, window: VisWindow, index: usize, @@ -14,7 +14,7 @@ pub(crate) struct IterKeysAt<'a, const B: usize> { root_child: &'a OpTreeNode, } -impl<'a, const B: usize> IterKeysAt<'a, B> { +impl<'a, const B: usize> KeysAt<'a, B> { pub(crate) fn new(root_child: &'a OpTreeNode, clock: Clock) -> Self { Self { clock, @@ -28,7 +28,7 @@ impl<'a, const B: usize> IterKeysAt<'a, B> { } } -impl<'a, const B: usize> Iterator for IterKeysAt<'a, B> { +impl<'a, const B: usize> Iterator for KeysAt<'a, B> { type Item = Key; fn next(&mut self) -> Option { @@ -45,7 +45,7 @@ impl<'a, const B: usize> Iterator for IterKeysAt<'a, B> { } } -impl<'a, const B: usize> DoubleEndedIterator for IterKeysAt<'a, B> { +impl<'a, const B: usize> DoubleEndedIterator for KeysAt<'a, B> { fn next_back(&mut self) -> Option { for i in self.index..self.index_back { let op = self.root_child.get(i)?; From dfb21ea8d686b79abf70ad603c71d425bd289c7a Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Mon, 28 Feb 2022 11:49:36 +0000 Subject: [PATCH 108/730] Add quickstart example using new transaction (#273) * Add quickstart example Also change ordering of transact_with arguments. This makes it more natural read: transact_with these commit options, doing this. --- automerge/examples/README.md | 7 ++++ automerge/examples/quickstart.rs | 56 ++++++++++++++++++++++++++++++++ automerge/src/automerge.rs | 2 +- automerge/src/change.rs | 2 +- 4 files changed, 65 insertions(+), 2 deletions(-) create mode 100644 automerge/examples/README.md create mode 100644 automerge/examples/quickstart.rs diff --git a/automerge/examples/README.md b/automerge/examples/README.md new file mode 100644 index 00000000..c9a9630e --- /dev/null +++ b/automerge/examples/README.md @@ -0,0 +1,7 @@ +# Automerge examples + +## Quickstart + +```shell +cargo run --example quickstart +``` diff --git a/automerge/examples/quickstart.rs b/automerge/examples/quickstart.rs new file mode 100644 index 00000000..fd8b7871 --- /dev/null +++ b/automerge/examples/quickstart.rs @@ -0,0 +1,56 @@ +use automerge::transaction::CommitOptions; +use automerge::transaction::Transactable; +use automerge::{Automerge, ROOT}; +use automerge::{AutomergeError, Value}; + +// Based on https://automerge.github.io/docs/quickstart +fn main() { + let mut doc1 = Automerge::new(); + let (cards, card1) = doc1 + .transact_with::<_, _, AutomergeError, _>( + || CommitOptions::default().with_message("Add card".to_owned()), + |tx| { + let cards = tx.set(&ROOT, "cards", Value::list()).unwrap().unwrap(); + let card1 = tx.insert(&cards, 0, Value::map())?.unwrap(); + tx.set(&card1, "title", "Rewrite everything in Clojure")?; + tx.set(&card1, "done", false)?; + let card2 = tx.insert(&cards, 0, Value::map())?.unwrap(); + tx.set(&card2, "title", "Rewrite everything in Haskell")?; + tx.set(&card2, "done", false)?; + Ok((cards, card1)) + }, + ) + .unwrap() + .into_result(); + + let mut doc2 = Automerge::new(); + doc2.merge(&mut doc1).unwrap(); + + let binary = doc1.save().unwrap(); + let mut doc2 = Automerge::load(&binary).unwrap(); + + doc1.transact_with::<_, _, AutomergeError, _>( + || CommitOptions::default().with_message("Mark card as done".to_owned()), + |tx| { + tx.set(&card1, "done", true)?; + Ok(()) + }, + ) + .unwrap(); + + doc2.transact_with::<_, _, AutomergeError, _>( + || CommitOptions::default().with_message("Delete card".to_owned()), + |tx| { + tx.del(&cards, 0)?; + Ok(()) + }, + ) + .unwrap(); + + doc1.merge(&mut doc2).unwrap(); + + for change in doc1.get_changes(&[]) { + let length = doc1.length_at(&cards, &[change.hash]); + println!("{} {}", change.message().unwrap(), length); + } +} diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index b6447988..f5c8c98d 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -142,7 +142,7 @@ impl Automerge { } /// Like [`Self::transact`] but with a function for generating the commit options. - pub fn transact_with(&mut self, f: F, c: C) -> TransactionResult + pub fn transact_with(&mut self, c: C, f: F) -> TransactionResult where F: FnOnce(&mut Transaction) -> Result, C: FnOnce() -> CommitOptions, diff --git a/automerge/src/change.rs b/automerge/src/change.rs index 67c56d6c..ae72a963 100644 --- a/automerge/src/change.rs +++ b/automerge/src/change.rs @@ -365,7 +365,7 @@ impl Change { self.start_op + (self.len() as u64) - 1 } - fn message(&self) -> Option { + pub fn message(&self) -> Option { let m = &self.bytes.uncompressed()[self.message.clone()]; if m.is_empty() { None From cffadafbd0a5ef982c8187b4194568bf5c0508e8 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Wed, 2 Mar 2022 10:27:08 +0000 Subject: [PATCH 109/730] Stop collecting to vecs in save --- automerge/src/automerge.rs | 13 ++++--------- automerge/src/change.rs | 23 +++++------------------ automerge/src/columnar.rs | 16 +++++++--------- 3 files changed, 16 insertions(+), 36 deletions(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index f5c8c98d..861f1c1a 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -497,16 +497,11 @@ impl Automerge { } pub fn save(&mut self) -> Result, AutomergeError> { - // TODO - would be nice if I could pass an iterator instead of a collection here - let c: Vec<_> = self.history.iter().map(|c| c.decode()).collect(); - let ops: Vec<_> = self.ops.iter().cloned().collect(); + let heads = self.get_heads(); + let c = self.history.iter().map(|c| c.decode()); + let ops = self.ops.iter(); // TODO - can we make encode_document error free - let bytes = encode_document( - &c, - ops.as_slice(), - &self.ops.m.actors, - &self.ops.m.props.cache, - ); + let bytes = encode_document(heads, c, ops, &self.ops.m.actors, &self.ops.m.props.cache); if bytes.is_ok() { self.saved = self.get_heads().to_vec(); } diff --git a/automerge/src/change.rs b/automerge/src/change.rs index ae72a963..e34917c3 100644 --- a/automerge/src/change.rs +++ b/automerge/src/change.rs @@ -36,28 +36,15 @@ const BLOCK_TYPE_DEFLATE: u8 = 2; const CHUNK_START: usize = 8; const HASH_RANGE: Range = 4..8; -fn get_heads(changes: &[amp::Change]) -> HashSet { - changes.iter().fold(HashSet::new(), |mut acc, c| { - if let Some(h) = c.hash { - acc.insert(h); - } - for dep in &c.deps { - acc.remove(dep); - } - acc - }) -} - -pub(crate) fn encode_document( - changes: &[amp::Change], - doc_ops: &[Op], +pub(crate) fn encode_document<'a>( + heads: Vec, + changes: impl Iterator, + doc_ops: impl Iterator, actors_index: &IndexedCache, - props: &[String], + props: &'a [String], ) -> Result, AutomergeError> { let mut bytes: Vec = Vec::new(); - let heads = get_heads(changes); - let actors_map = actors_index.encode_index(); let actors = actors_index.sorted(); diff --git a/automerge/src/columnar.rs b/automerge/src/columnar.rs index 53a9d488..b10780ff 100644 --- a/automerge/src/columnar.rs +++ b/automerge/src/columnar.rs @@ -905,12 +905,12 @@ pub(crate) struct ChangeEncoder { impl ChangeEncoder { #[instrument(level = "debug", skip(changes, actors))] - pub fn encode_changes<'a, 'b, I>( + pub fn encode_changes<'a, I>( changes: I, actors: &'a IndexedCache, ) -> (Vec, Vec) where - I: IntoIterator, + I: IntoIterator, { let mut e = Self::new(); e.encode(changes, actors); @@ -931,9 +931,9 @@ impl ChangeEncoder { } } - fn encode<'a, 'b, 'c, I>(&'a mut self, changes: I, actors: &'b IndexedCache) + fn encode(&mut self, changes: I, actors: &IndexedCache) where - I: IntoIterator, + I: IntoIterator, { let mut index_by_hash: HashMap = HashMap::new(); for (index, change) in changes.into_iter().enumerate() { @@ -1010,17 +1010,15 @@ pub(crate) struct DocOpEncoder { succ: SuccEncoder, } -// FIXME - actors should not be mut here - impl DocOpEncoder { #[instrument(level = "debug", skip(ops, actors))] - pub(crate) fn encode_doc_ops<'a, I>( + pub(crate) fn encode_doc_ops<'a, 'b, 'c, I>( ops: I, actors: &'a [usize], - props: &'a [String], + props: &'b [String], ) -> (Vec, Vec) where - I: IntoIterator, + I: IntoIterator, { let mut e = Self::new(); e.encode(ops, actors, props); From 7a930db44d0ea1e8a2f126e3243736ba2762c431 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Wed, 2 Mar 2022 10:45:25 +0000 Subject: [PATCH 110/730] Don't decode changes for save --- automerge/src/automerge.rs | 2 +- automerge/src/change.rs | 6 +++--- automerge/src/columnar.rs | 27 ++++++++++++++------------- 3 files changed, 18 insertions(+), 17 deletions(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 861f1c1a..b1d93f6e 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -498,7 +498,7 @@ impl Automerge { pub fn save(&mut self) -> Result, AutomergeError> { let heads = self.get_heads(); - let c = self.history.iter().map(|c| c.decode()); + let c = self.history.iter(); let ops = self.ops.iter(); // TODO - can we make encode_document error free let bytes = encode_document(heads, c, ops, &self.ops.m.actors, &self.ops.m.props.cache); diff --git a/automerge/src/change.rs b/automerge/src/change.rs index e34917c3..a225175d 100644 --- a/automerge/src/change.rs +++ b/automerge/src/change.rs @@ -36,10 +36,10 @@ const BLOCK_TYPE_DEFLATE: u8 = 2; const CHUNK_START: usize = 8; const HASH_RANGE: Range = 4..8; -pub(crate) fn encode_document<'a>( +pub(crate) fn encode_document<'a, 'b>( heads: Vec, - changes: impl Iterator, - doc_ops: impl Iterator, + changes: impl Iterator, + doc_ops: impl Iterator, actors_index: &IndexedCache, props: &'a [String], ) -> Result, AutomergeError> { diff --git a/automerge/src/columnar.rs b/automerge/src/columnar.rs index b10780ff..d7337359 100644 --- a/automerge/src/columnar.rs +++ b/automerge/src/columnar.rs @@ -11,7 +11,10 @@ use std::{ str, }; -use crate::types::{ActorId, ElemId, Key, ObjId, ObjType, Op, OpId, OpType, ScalarValue}; +use crate::{ + types::{ActorId, ElemId, Key, ObjId, ObjType, Op, OpId, OpType, ScalarValue}, + Change, +}; use crate::legacy as amp; use amp::SortedVec; @@ -905,12 +908,12 @@ pub(crate) struct ChangeEncoder { impl ChangeEncoder { #[instrument(level = "debug", skip(changes, actors))] - pub fn encode_changes<'a, I>( + pub fn encode_changes<'a, 'b, I>( changes: I, actors: &'a IndexedCache, ) -> (Vec, Vec) where - I: IntoIterator, + I: IntoIterator, { let mut e = Self::new(); e.encode(changes, actors); @@ -931,23 +934,21 @@ impl ChangeEncoder { } } - fn encode(&mut self, changes: I, actors: &IndexedCache) + fn encode<'a, I>(&mut self, changes: I, actors: &IndexedCache) where - I: IntoIterator, + I: IntoIterator, { let mut index_by_hash: HashMap = HashMap::new(); for (index, change) in changes.into_iter().enumerate() { - if let Some(hash) = change.hash { - index_by_hash.insert(hash, index); - } + index_by_hash.insert(change.hash, index); self.actor - .append_value(actors.lookup(&change.actor_id).unwrap()); //actors.iter().position(|a| a == &change.actor_id).unwrap()); + .append_value(actors.lookup(change.actor_id()).unwrap()); //actors.iter().position(|a| a == &change.actor_id).unwrap()); self.seq.append_value(change.seq); // FIXME iterops.count is crazy slow self.max_op - .append_value(change.start_op + change.operations.len() as u64 - 1); + .append_value(change.start_op + change.iter_ops().count() as u64 - 1); self.time.append_value(change.time as u64); - self.message.append_value(change.message.clone()); + self.message.append_value(change.message()); self.deps_num.append_value(change.deps.len()); for dep in &change.deps { if let Some(dep_index) = index_by_hash.get(dep) { @@ -961,8 +962,8 @@ impl ChangeEncoder { } } self.extra_len - .append_value(change.extra_bytes.len() << 4 | VALUE_TYPE_BYTES); - self.extra_raw.extend(&change.extra_bytes); + .append_value(change.extra_bytes().len() << 4 | VALUE_TYPE_BYTES); + self.extra_raw.extend(change.extra_bytes()); } } From 57cf8200ac83e30d3d01a68dd8675f636d93dbde Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Wed, 2 Mar 2022 10:46:13 +0000 Subject: [PATCH 111/730] Remove unnecessary to_vec --- automerge/src/automerge.rs | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index b1d93f6e..7a6bf3f1 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -503,12 +503,11 @@ impl Automerge { // TODO - can we make encode_document error free let bytes = encode_document(heads, c, ops, &self.ops.m.actors, &self.ops.m.props.cache); if bytes.is_ok() { - self.saved = self.get_heads().to_vec(); + self.saved = self.get_heads(); } bytes } - // should this return an empty vec instead of None? pub fn save_incremental(&mut self) -> Vec { let changes = self.get_changes(self.saved.as_slice()); let mut bytes = vec![]; @@ -516,7 +515,7 @@ impl Automerge { bytes.extend(c.raw_bytes()); } if !bytes.is_empty() { - self.saved = self.get_heads().to_vec() + self.saved = self.get_heads() } bytes } From d2e33867f6ac4110f3f115bd262d2ac9b074f888 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Wed, 2 Mar 2022 10:51:09 +0000 Subject: [PATCH 112/730] Update style --- automerge/src/change.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/automerge/src/change.rs b/automerge/src/change.rs index a225175d..b00de21f 100644 --- a/automerge/src/change.rs +++ b/automerge/src/change.rs @@ -66,8 +66,8 @@ pub(crate) fn encode_document<'a, 'b>( let (ops_bytes, ops_info) = DocOpEncoder::encode_doc_ops(doc_ops, &actors_map, props); - bytes.extend(&MAGIC_BYTES); - bytes.extend(vec![0, 0, 0, 0]); // we dont know the hash yet so fill in a fake + bytes.extend(MAGIC_BYTES); + bytes.extend([0, 0, 0, 0]); // we dont know the hash yet so fill in a fake bytes.push(BLOCK_TYPE_DOC); let mut chunk = Vec::new(); @@ -79,7 +79,7 @@ pub(crate) fn encode_document<'a, 'b>( } heads.len().encode(&mut chunk)?; - for head in heads.iter().sorted() { + for head in heads.iter() { chunk.write_all(&head.0).unwrap(); } From dfd3d27d4457408a9bc5bde75e54c2a042c2d1e5 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Wed, 2 Mar 2022 14:25:02 +0000 Subject: [PATCH 113/730] Don't clone value in splice --- automerge/src/transaction/inner.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automerge/src/transaction/inner.rs b/automerge/src/transaction/inner.rs index aaa26a99..4aec42de 100644 --- a/automerge/src/transaction/inner.rs +++ b/automerge/src/transaction/inner.rs @@ -296,7 +296,7 @@ impl TransactionInner { let mut results = Vec::new(); for v in vals { // insert() - let id = self.do_insert(doc, obj, pos, v.clone())?; + let id = self.do_insert(doc, obj, pos, v)?; if let Some(id) = id { results.push(doc.id_to_exid(id)); } From 2747d5bf2bd9b0575201cc563d20280792b63240 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Wed, 2 Mar 2022 11:05:48 -0500 Subject: [PATCH 114/730] move wasm to feature flag --- automerge-wasm/Cargo.toml | 2 +- automerge/Cargo.toml | 5 +++-- automerge/src/automerge.rs | 4 ++-- automerge/src/error.rs | 1 + automerge/src/lib.rs | 4 ++-- 5 files changed, 9 insertions(+), 7 deletions(-) diff --git a/automerge-wasm/Cargo.toml b/automerge-wasm/Cargo.toml index 8225d811..81c5a2a3 100644 --- a/automerge-wasm/Cargo.toml +++ b/automerge-wasm/Cargo.toml @@ -21,7 +21,7 @@ default = ["console_error_panic_hook"] [dependencies] console_error_panic_hook = { version = "^0.1", optional = true } # wee_alloc = { version = "^0.4", optional = true } -automerge = { path = "../automerge" } +automerge = { path = "../automerge", features=["wasm"] } js-sys = "^0.3" serde = "^1.0" serde_json = "^1.0" diff --git a/automerge/Cargo.toml b/automerge/Cargo.toml index 7d385301..03f7d9c6 100644 --- a/automerge/Cargo.toml +++ b/automerge/Cargo.toml @@ -8,6 +8,7 @@ license = "MIT" [features] optree-visualisation = ["dot"] +wasm = ["js-sys", "wasm-bindgen"] [dependencies] hex = "^0.4.3" @@ -26,8 +27,8 @@ tinyvec = { version = "^1.5.1", features = ["alloc"] } unicode-segmentation = "1.7.1" serde = { version = "^1.0", features=["derive"] } dot = { version = "0.1.4", optional = true } -js-sys = "^0.3" -wasm-bindgen = "^0.2" +js-sys = { version = "^0.3", optional = true } +wasm-bindgen = { version = "^0.2", optional = true } [dependencies.web-sys] version = "^0.3.55" diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index dd2fbd77..7ab2fe3a 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -1133,8 +1133,8 @@ mod tests { assert!(doc.value_at(&list, 0, &heads2)?.unwrap().0 == Value::int(10)); assert!(doc.length_at(&list, &heads3) == 2); - doc.dump(); - //log!("{:?}", doc.value_at(&list, 0, &heads3)?.unwrap().0); + //doc.dump(); + log!("{:?}", doc.value_at(&list, 0, &heads3)?.unwrap().0); assert!(doc.value_at(&list, 0, &heads3)?.unwrap().0 == Value::int(30)); assert!(doc.value_at(&list, 1, &heads3)?.unwrap().0 == Value::int(20)); diff --git a/automerge/src/error.rs b/automerge/src/error.rs index 8afca4f3..1a57a865 100644 --- a/automerge/src/error.rs +++ b/automerge/src/error.rs @@ -35,6 +35,7 @@ impl From for AutomergeError { } } +#[cfg(feature = "wasm")] impl From for wasm_bindgen::JsValue { fn from(err: AutomergeError) -> Self { js_sys::Error::new(&std::format!("{}", err)).into() diff --git a/automerge/src/lib.rs b/automerge/src/lib.rs index 59714ac3..b67772a5 100644 --- a/automerge/src/lib.rs +++ b/automerge/src/lib.rs @@ -8,7 +8,7 @@ macro_rules! log { } } -#[cfg(target_family = "wasm")] +#[cfg(all(feature = "wasm", target_family = "wasm"))] #[macro_export] macro_rules! __log { ( $( $t:tt )* ) => { @@ -16,7 +16,7 @@ macro_rules! __log { } } -#[cfg(not(target_family = "wasm"))] +#[cfg(not(all(feature = "wasm", target_family = "wasm")))] #[macro_export] macro_rules! __log { ( $( $t:tt )* ) => { From 8eea9d7c0bc4a1b29075c2578207b8c812101b50 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Wed, 2 Mar 2022 16:59:45 +0000 Subject: [PATCH 115/730] Always have an actor --- automerge/src/autocommit.rs | 8 ++---- automerge/src/automerge.rs | 57 +++++++++++++------------------------ automerge/tests/test.rs | 4 +-- 3 files changed, 24 insertions(+), 45 deletions(-) diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index 3d833266..f1881d86 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -40,14 +40,10 @@ impl AutoCommit { self.doc.set_actor(actor) } - pub fn get_actor(&mut self) -> ActorId { + pub fn get_actor(&self) -> &ActorId { self.doc.get_actor() } - pub fn maybe_get_actor(&self) -> Option { - self.doc.maybe_get_actor() - } - pub fn new_with_actor_id(actor: ActorId) -> Self { Self { doc: Automerge::new_with_actor_id(actor), @@ -57,7 +53,7 @@ impl AutoCommit { fn ensure_transaction_open(&mut self) { if self.transaction.is_none() { - let actor = self.doc.get_actor_index(); + let actor = self.doc.actor; let seq = self.doc.states.entry(actor).or_default().len() as u64 + 1; let mut deps = self.doc.get_heads(); diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index dd2fbd77..225d56ec 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -27,13 +27,13 @@ pub struct Automerge { pub(crate) deps: HashSet, pub(crate) saved: Vec, pub(crate) ops: OpSet, - pub(crate) actor: Option, + pub(crate) actor: usize, pub(crate) max_op: u64, } impl Automerge { pub fn new() -> Self { - Automerge { + let mut am = Automerge { queue: vec![], history: vec![], history_index: HashMap::new(), @@ -41,40 +41,24 @@ impl Automerge { ops: Default::default(), deps: Default::default(), saved: Default::default(), - actor: None, + actor: 0, max_op: 0, - } + }; + am.set_random_actor(); + am } pub fn set_actor(&mut self, actor: ActorId) { - self.actor = Some(self.ops.m.actors.cache(actor)) + self.actor = self.ops.m.actors.cache(actor) } - fn random_actor(&mut self) -> ActorId { + fn set_random_actor(&mut self) { let actor = ActorId::from(uuid::Uuid::new_v4().as_bytes().to_vec()); - self.actor = Some(self.ops.m.actors.cache(actor.clone())); - actor + self.actor = self.ops.m.actors.cache(actor); } - pub fn get_actor(&mut self) -> ActorId { - if let Some(actor) = self.actor { - self.ops.m.actors[actor].clone() - } else { - self.random_actor() - } - } - - pub fn maybe_get_actor(&self) -> Option { - self.actor.map(|i| self.ops.m.actors[i].clone()) - } - - pub(crate) fn get_actor_index(&mut self) -> usize { - if let Some(actor) = self.actor { - actor - } else { - self.random_actor(); - self.actor.unwrap() // random_actor always sets actor to is_some() - } + pub fn get_actor(&self) -> &ActorId { + &self.ops.m.actors[self.actor] } pub fn new_with_actor_id(actor: ActorId) -> Self { @@ -86,17 +70,16 @@ impl Automerge { ops: Default::default(), deps: Default::default(), saved: Default::default(), - actor: None, + actor: 0, max_op: 0, }; - am.actor = Some(am.ops.m.actors.cache(actor)); + am.actor = am.ops.m.actors.cache(actor); am } /// Start a transaction. pub fn transaction(&mut self) -> Transaction { - let actor = self.get_actor_index(); - + let actor = self.actor; let seq = self.states.entry(actor).or_default().len() as u64 + 1; let mut deps = self.get_heads(); if seq > 1 { @@ -165,7 +148,7 @@ impl Automerge { pub fn fork(&self) -> Self { let mut f = self.clone(); - f.actor = None; + f.set_random_actor(); f } @@ -663,11 +646,11 @@ impl Automerge { } pub fn get_last_local_change(&self) -> Option<&Change> { - if let Some(actor) = &self.actor { - let actor = &self.ops.m.actors[*actor]; - return self.history.iter().rev().find(|c| c.actor_id() == actor); - } - None + return self + .history + .iter() + .rev() + .find(|c| c.actor_id() == self.get_actor()); } pub fn get_changes(&self, have_deps: &[ChangeHash]) -> Vec<&Change> { diff --git a/automerge/tests/test.rs b/automerge/tests/test.rs index 34a9777b..31e1353e 100644 --- a/automerge/tests/test.rs +++ b/automerge/tests/test.rs @@ -384,7 +384,7 @@ fn concurrent_insertions_at_different_list_positions() { let (actor1, actor2) = sorted_actors(); let mut doc1 = new_doc_with_actor(actor1); let mut doc2 = new_doc_with_actor(actor2); - assert!(doc1.maybe_get_actor().unwrap() < doc2.maybe_get_actor().unwrap()); + assert!(doc1.get_actor() < doc2.get_actor()); let list_id = doc1 .set(&automerge::ROOT, "list", automerge::Value::list()) @@ -419,7 +419,7 @@ fn concurrent_insertions_at_same_list_position() { let (actor1, actor2) = sorted_actors(); let mut doc1 = new_doc_with_actor(actor1); let mut doc2 = new_doc_with_actor(actor2); - assert!(doc1.maybe_get_actor().unwrap() < doc2.maybe_get_actor().unwrap()); + assert!(doc1.get_actor() < doc2.get_actor()); let list_id = doc1 .set(&automerge::ROOT, "birds", automerge::Value::list()) From 30e0748c1574c64297a4929dd617ada52740c897 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Wed, 2 Mar 2022 17:02:26 +0000 Subject: [PATCH 116/730] Remove new_with_actor_id on documents --- automerge/src/autocommit.rs | 7 ------- automerge/src/automerge.rs | 16 ---------------- automerge/tests/helpers/mod.rs | 8 ++++++-- 3 files changed, 6 insertions(+), 25 deletions(-) diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index f1881d86..901af906 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -44,13 +44,6 @@ impl AutoCommit { self.doc.get_actor() } - pub fn new_with_actor_id(actor: ActorId) -> Self { - Self { - doc: Automerge::new_with_actor_id(actor), - transaction: None, - } - } - fn ensure_transaction_open(&mut self) { if self.transaction.is_none() { let actor = self.doc.actor; diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 225d56ec..97be678a 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -61,22 +61,6 @@ impl Automerge { &self.ops.m.actors[self.actor] } - pub fn new_with_actor_id(actor: ActorId) -> Self { - let mut am = Automerge { - queue: vec![], - history: vec![], - history_index: HashMap::new(), - states: HashMap::new(), - ops: Default::default(), - deps: Default::default(), - saved: Default::default(), - actor: 0, - max_op: 0, - }; - am.actor = am.ops.m.actors.cache(actor); - am - } - /// Start a transaction. pub fn transaction(&mut self) -> Transaction { let actor = self.actor; diff --git a/automerge/tests/helpers/mod.rs b/automerge/tests/helpers/mod.rs index db089d92..5384c218 100644 --- a/automerge/tests/helpers/mod.rs +++ b/automerge/tests/helpers/mod.rs @@ -7,11 +7,15 @@ use std::{ use serde::ser::{SerializeMap, SerializeSeq}; pub fn new_doc() -> automerge::AutoCommit { - automerge::AutoCommit::new_with_actor_id(automerge::ActorId::random()) + let mut d = automerge::AutoCommit::new(); + d.set_actor(automerge::ActorId::random()); + d } pub fn new_doc_with_actor(actor: automerge::ActorId) -> automerge::AutoCommit { - automerge::AutoCommit::new_with_actor_id(actor) + let mut d = automerge::AutoCommit::new(); + d.set_actor(actor); + d } /// Returns two actor IDs, the first considered to be ordered before the second From 52eb1939502a84f74b12dfbecd3c87fd5d06590d Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Wed, 2 Mar 2022 17:20:44 +0000 Subject: [PATCH 117/730] Add custom actor enum to avoid caching an unused one --- automerge/src/autocommit.rs | 2 +- automerge/src/automerge.rs | 43 ++++++++++++++++++++++++------------- automerge/src/types.rs | 2 +- 3 files changed, 30 insertions(+), 17 deletions(-) diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index 901af906..54d6b920 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -46,7 +46,7 @@ impl AutoCommit { fn ensure_transaction_open(&mut self) { if self.transaction.is_none() { - let actor = self.doc.actor; + let actor = self.doc.get_actor_index(); let seq = self.doc.states.entry(actor).or_default().len() as u64 + 1; let mut deps = self.doc.get_heads(); diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 97be678a..9701a6eb 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -17,6 +17,12 @@ use crate::{legacy, query, types, ObjType}; use crate::{AutomergeError, Change, Prop}; use serde::Serialize; +#[derive(Debug, Clone)] +pub(crate) enum Actor { + Unused(ActorId), + Cached(usize), +} + /// An automerge document. #[derive(Debug, Clone)] pub struct Automerge { @@ -27,13 +33,13 @@ pub struct Automerge { pub(crate) deps: HashSet, pub(crate) saved: Vec, pub(crate) ops: OpSet, - pub(crate) actor: usize, + pub(crate) actor: Actor, pub(crate) max_op: u64, } impl Automerge { pub fn new() -> Self { - let mut am = Automerge { + Automerge { queue: vec![], history: vec![], history_index: HashMap::new(), @@ -41,29 +47,36 @@ impl Automerge { ops: Default::default(), deps: Default::default(), saved: Default::default(), - actor: 0, + actor: Actor::Unused(ActorId::random()), max_op: 0, - }; - am.set_random_actor(); - am + } } pub fn set_actor(&mut self, actor: ActorId) { - self.actor = self.ops.m.actors.cache(actor) - } - - fn set_random_actor(&mut self) { - let actor = ActorId::from(uuid::Uuid::new_v4().as_bytes().to_vec()); - self.actor = self.ops.m.actors.cache(actor); + self.actor = Actor::Unused(actor); } pub fn get_actor(&self) -> &ActorId { - &self.ops.m.actors[self.actor] + match &self.actor { + Actor::Unused(actor) => actor, + Actor::Cached(index) => self.ops.m.actors.get(*index), + } + } + + pub(crate) fn get_actor_index(&mut self) -> usize { + match &mut self.actor { + Actor::Unused(actor) => { + let index = self.ops.m.actors.cache(std::mem::take(actor)); + self.actor = Actor::Cached(index); + index + } + Actor::Cached(index) => *index, + } } /// Start a transaction. pub fn transaction(&mut self) -> Transaction { - let actor = self.actor; + let actor = self.get_actor_index(); let seq = self.states.entry(actor).or_default().len() as u64 + 1; let mut deps = self.get_heads(); if seq > 1 { @@ -132,7 +145,7 @@ impl Automerge { pub fn fork(&self) -> Self { let mut f = self.clone(); - f.set_random_actor(); + f.set_actor(ActorId::random()); f } diff --git a/automerge/src/types.rs b/automerge/src/types.rs index 4494f6d9..64ba05e2 100644 --- a/automerge/src/types.rs +++ b/automerge/src/types.rs @@ -24,7 +24,7 @@ const HEAD_STR: &str = "_head"; // Note that change encoding relies on the Ord implementation for the ActorId being implemented in // terms of the lexicographic ordering of the underlying bytes. Be aware of this if you are // changing the ActorId implementation in ways which might affect the Ord implementation -#[derive(Eq, PartialEq, Hash, Clone, PartialOrd, Ord)] +#[derive(Eq, PartialEq, Hash, Clone, PartialOrd, Ord, Default)] #[cfg_attr(feature = "derive-arbitrary", derive(arbitrary::Arbitrary))] pub struct ActorId(TinyVec<[u8; 16]>); From 06241336feac61c421991d7d8edec0b4e54b61ff Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Wed, 2 Mar 2022 17:22:26 +0000 Subject: [PATCH 118/730] Add with_actor for functional style --- automerge/src/autocommit.rs | 11 +++++++++-- automerge/src/automerge.rs | 8 +++++++- 2 files changed, 16 insertions(+), 3 deletions(-) diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index 54d6b920..43f32345 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -35,9 +35,16 @@ impl AutoCommit { &self.doc } - pub fn set_actor(&mut self, actor: ActorId) { + pub fn with_actor(mut self, actor: ActorId) -> Self { self.ensure_transaction_closed(); - self.doc.set_actor(actor) + self.doc.set_actor(actor); + self + } + + pub fn set_actor(&mut self, actor: ActorId) -> &mut Self { + self.ensure_transaction_closed(); + self.doc.set_actor(actor); + self } pub fn get_actor(&self) -> &ActorId { diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 9701a6eb..b9c86606 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -52,8 +52,14 @@ impl Automerge { } } - pub fn set_actor(&mut self, actor: ActorId) { + pub fn with_actor(mut self, actor: ActorId) -> Self { self.actor = Actor::Unused(actor); + self + } + + pub fn set_actor(&mut self, actor: ActorId) -> &mut Self { + self.actor = Actor::Unused(actor); + self } pub fn get_actor(&self) -> &ActorId { From 8f2877a67ce98ced8130af9083fa9d9330e9277e Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Wed, 2 Mar 2022 17:24:15 +0000 Subject: [PATCH 119/730] Fix wasm --- automerge-wasm/src/lib.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index e040a029..47f151ca 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -50,7 +50,7 @@ impl Automerge { let mut automerge = Automerge(self.0.clone()); if let Some(s) = actor { let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec()); - automerge.0.set_actor(actor) + automerge.0.set_actor(actor); } Ok(automerge) } @@ -60,7 +60,7 @@ impl Automerge { let mut automerge = Automerge(self.0.fork()); if let Some(s) = actor { let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec()); - automerge.0.set_actor(actor) + automerge.0.set_actor(actor); } Ok(automerge) } @@ -538,7 +538,7 @@ pub fn load(data: Uint8Array, actor: Option) -> Result Date: Wed, 2 Mar 2022 18:03:11 +0000 Subject: [PATCH 120/730] Add test for rolling back a transaction --- automerge/src/automerge.rs | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index b9c86606..a7e36f4a 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -1188,4 +1188,17 @@ mod tests { let keys = doc.keys(&ROOT); assert_eq!(keys.collect::>(), vec!["a", "b", "c", "d"]); } + + #[test] + fn rolling_back_transaction_has_no_effect() { + let mut doc = Automerge::new(); + let old_states = doc.states.clone(); + let bytes = doc.save().unwrap(); + let tx = doc.transaction(); + tx.rollback(); + let new_states = doc.states.clone(); + assert_eq!(old_states, new_states); + let new_bytes = doc.save().unwrap(); + assert_eq!(bytes, new_bytes); + } } From f6f6b5181d4f6471c0ec0ed19d12553f79024942 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Wed, 2 Mar 2022 18:03:27 +0000 Subject: [PATCH 121/730] Fix rolling back of transaction infecting document --- automerge/src/automerge.rs | 2 +- automerge/src/indexed_cache.rs | 13 +++++++++++++ automerge/src/transaction/inner.rs | 7 +++++++ 3 files changed, 21 insertions(+), 1 deletion(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index a7e36f4a..773e387f 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -83,7 +83,7 @@ impl Automerge { /// Start a transaction. pub fn transaction(&mut self) -> Transaction { let actor = self.get_actor_index(); - let seq = self.states.entry(actor).or_default().len() as u64 + 1; + let seq = self.states.get(&actor).map_or(0, |v| v.len()) as u64 + 1; let mut deps = self.get_heads(); if seq > 1 { let last_hash = self.get_hash(actor, seq - 1).unwrap(); diff --git a/automerge/src/indexed_cache.rs b/automerge/src/indexed_cache.rs index b11f39ad..2b5811ba 100644 --- a/automerge/src/indexed_cache.rs +++ b/automerge/src/indexed_cache.rs @@ -43,6 +43,19 @@ where &self.cache[index] } + /// Remove the last inserted entry into this cache. + /// This is safe to do as it does not require reshuffling other entries. + /// + /// # Panics + /// + /// Panics on an empty cache. + pub fn remove_last(&mut self) -> T { + let last = self.cache.len() - 1; + let t = self.cache.remove(last); + self.lookup.remove(&t); + t + } + pub fn sorted(&self) -> IndexedCache { let mut sorted = Self::new(); self.cache.iter().sorted().cloned().for_each(|item| { diff --git a/automerge/src/transaction/inner.rs b/automerge/src/transaction/inner.rs index 4aec42de..91a601c1 100644 --- a/automerge/src/transaction/inner.rs +++ b/automerge/src/transaction/inner.rs @@ -1,3 +1,4 @@ +use crate::automerge::Actor; use crate::exid::ExId; use crate::query::{self, OpIdSearch}; use crate::types::{Key, ObjId, OpId}; @@ -46,6 +47,12 @@ impl TransactionInner { /// Undo the operations added in this transaction, returning the number of cancelled /// operations. pub fn rollback(self, doc: &mut Automerge) -> usize { + // remove the actor from the cache so that it doesn't end up in the saved document + if doc.states.get(&self.actor).is_none() { + let actor = doc.ops.m.actors.remove_last(); + doc.actor = Actor::Unused(actor); + } + let num = self.operations.len(); // remove in reverse order so sets are removed before makes etc... for op in self.operations.iter().rev() { From 0141bcdc8fbfc91adfe0cfda04f503f105f9e8be Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Wed, 2 Mar 2022 14:05:10 -0500 Subject: [PATCH 122/730] import cli --- Cargo.toml | 1 + automerge-cli/Cargo.toml | 28 ++++ automerge-cli/IDEAS.md | 58 +++++++ automerge-cli/src/change.rs | 238 +++++++++++++++++++++++++++++ automerge-cli/src/examine.rs | 46 ++++++ automerge-cli/src/export.rs | 154 +++++++++++++++++++ automerge-cli/src/import.rs | 108 +++++++++++++ automerge-cli/src/main.rs | 213 ++++++++++++++++++++++++++ automerge-cli/src/merge.rs | 58 +++++++ automerge-cli/tests/integration.rs | 93 +++++++++++ automerge/src/value.rs | 12 ++ 11 files changed, 1009 insertions(+) create mode 100644 automerge-cli/Cargo.toml create mode 100644 automerge-cli/IDEAS.md create mode 100644 automerge-cli/src/change.rs create mode 100644 automerge-cli/src/examine.rs create mode 100644 automerge-cli/src/export.rs create mode 100644 automerge-cli/src/import.rs create mode 100644 automerge-cli/src/main.rs create mode 100644 automerge-cli/src/merge.rs create mode 100644 automerge-cli/tests/integration.rs diff --git a/Cargo.toml b/Cargo.toml index 4510fefd..e1941120 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -2,6 +2,7 @@ members = [ "automerge", "automerge-wasm", + "automerge-cli", "edit-trace", ] diff --git a/automerge-cli/Cargo.toml b/automerge-cli/Cargo.toml new file mode 100644 index 00000000..1761e489 --- /dev/null +++ b/automerge-cli/Cargo.toml @@ -0,0 +1,28 @@ +[package] +name = "automerge-cli" +version = "0.1.0" +authors = ["Alex Good "] +edition = "2018" +license = "MIT" + +[[bin]] +name = "automerge" +path = "src/main.rs" +bench = false +doc = false + +[dependencies] +clap = {version = "~3.1", features = ["derive"]} +serde_json = "^1.0" +anyhow = "1.0" +atty = "0.2" +thiserror = "1.0.16" +combine = "4.5.2" +maplit = "1.0.2" +colored_json = "2.1.0" +tracing-subscriber = "~0.3" + +automerge = { path = "../automerge" } + +[dev-dependencies] +duct = "0.13" diff --git a/automerge-cli/IDEAS.md b/automerge-cli/IDEAS.md new file mode 100644 index 00000000..d391fa51 --- /dev/null +++ b/automerge-cli/IDEAS.md @@ -0,0 +1,58 @@ + +### Some ideas of what this could look like + +```bash + $ automege export foo.mpl + { + "name": "bob", + "numbers": [ 1,2,3,4 ] + } + $ automerge export --format toml foo.mpl + name = "bob" + numbers = [ + 1.0, + 2.0, + 3.0, + 4.0 + ] + $ automerge import foo.json --out bar.mpl + $ automerge export foo.mpl | automerge import - > bar.mpl + $ automerge set foo.mpl "birds[3].name" "wren" + $ cat wren.json | automerge set foo.mpl "birds[3]" + $ automerge get foo.mpl "birds[3].name" + $ automerge union foo.mpl bar.mpl --out baz.mpl + $ automerge intersect foo.mpl bar.mpl --out baz.mpl + $ automerge union foo.mpl bar.mpl - > baz.mpl + $ cat foo.mpl bar.mpl | automerge union --out baz.mpl + $ cat foo.mpl bar.mpl | automerge union - > baz.mpl + $ automerge status foo.mpl + 1022 changes, 10942 operations, 47 objects + created: 2:31pm April 4, 2020 + actors: + 1111111: 124 changes, 1:01am April 6, 2020 + 2222222: 457 changes, 8:10pm April 4, 2020 + 3333333: 590 changes, 10:01pm May 2, 2020 + $ automerge log foo.mpl --actor 11111111 + aa88f76 : [ ] : Jan 1, 20202, "first commit" + 87fa8c1 : [ aa88f76, 971651 ] : Jan 2, 20202, "seccond commit" + 776aa5c : [ 87fa8c1 ] : Jan 2, 20202, "third commit" + $ automerge fork 776aa5c foo.mpl --out bar.mpl + $ automerge fork 776aa5c - < foo.mpl > bar.mpl + $ automerge diff foo.mpl bar.mpl + 182 changes in common: + + 87c162 + + 97ac42 + - ffac11 + - 1adaf1 + { + - "name":"bob", + + "name":"joe", + "numbers": [ + - 1, + - 2, + + 3, + 4 + ] + } + +``` diff --git a/automerge-cli/src/change.rs b/automerge-cli/src/change.rs new file mode 100644 index 00000000..eef6aade --- /dev/null +++ b/automerge-cli/src/change.rs @@ -0,0 +1,238 @@ +use automerge as am; +use combine::{parser::char as charparser, EasyParser, ParseError, Parser}; +use thiserror::Error; + +#[derive(Error, Debug)] +pub enum ChangeError { + #[error("Invalid change script: {message}")] + InvalidChangeScript { message: String }, + #[error("Error reading changes: {:?}", source)] + ErrReadingChanges { + #[source] + source: std::io::Error, + }, + #[error("Error loading changes: {:?}", source)] + ErrApplyingInitialChanges { + #[source] + source: am::AutomergeError, + }, + #[error("Error writing changes to output file: {:?}", source)] + ErrWritingChanges { + #[source] + source: std::io::Error, + }, +} + +#[derive(Debug)] +enum Op { + Set, + Insert, + Delete, + Increment, +} + +fn case_insensitive_string(s: &'static str) -> impl Parser +where + Input: combine::Stream, + Input::Error: combine::ParseError, +{ + charparser::string_cmp(s, |l, r| l.eq_ignore_ascii_case(&r)).map(|s| s.to_lowercase()) +} + +fn op_parser() -> impl combine::Parser +where + Input: combine::Stream, +{ + combine::choice(( + combine::attempt(case_insensitive_string("set")).map(|_| Op::Set), + combine::attempt(case_insensitive_string("insert")).map(|_| Op::Insert), + combine::attempt(case_insensitive_string("delete")).map(|_| Op::Delete), + combine::attempt(case_insensitive_string("increment")).map(|_| Op::Increment), + )) +} + +fn key_parser() -> impl Parser +where + Input: combine::Stream, +{ + let key_char_parser = combine::choice::(( + charparser::alpha_num(), + charparser::char('-'), + charparser::char('_'), + )); + combine::many1(key_char_parser).map(|chars: Vec| chars.into_iter().collect()) +} + +fn index_parser() -> impl Parser +where + Input: combine::Stream, +{ + combine::many1::, Input, _>(charparser::digit()).map(|digits| { + let num_string: String = digits.iter().collect(); + num_string.parse::().unwrap() + }) +} + +combine::parser! { + fn path_segment_parser[Input](path_so_far: amf::Path)(Input) -> amf::Path + where [Input: combine::Stream] + { + let key_path_so_far = path_so_far.clone(); + let key_segment_parser = charparser::string("[\"") + .with(key_parser()) + .skip(charparser::string("\"]")) + .then(move |key| path_segment_parser(key_path_so_far.clone().key(key))); + + let index_path_so_far = path_so_far.clone(); + let index_segment_parser = charparser::char('[') + .with(index_parser()) + .skip(charparser::char(']')) + .then(move |index| path_segment_parser(index_path_so_far.clone().index(index))); + + combine::choice(( + combine::attempt(key_segment_parser), + combine::attempt(index_segment_parser), + combine::value(path_so_far.clone()) + )) + } +} + +fn value_parser<'a, Input>( +) -> Box + 'a> +where + Input: 'a, + Input: combine::Stream, + Input::Error: combine::ParseError, +{ + combine::parser::combinator::no_partial( + //combine::position().and(combine::many1::, _, _>(combine::any())).and_then( + combine::position().and(combine::many1::, _, _>(combine::any())).flat_map( + |(position, chars): (Input::Position, Vec)| -> Result { + let json_str: String = chars.into_iter().collect(); + let json: serde_json::Value = serde_json::from_str(json_str.as_str()).map_err(|e| { + //let pe = >::StreamError::message::>(combine::error::Format(e.to_string())); + //let pe = >::StreamError::message(e.to_string().into()); + let mut pe = Input::Error::empty(position); + pe.add_message(combine::error::Format(e.to_string())); + //let pe = combine::ParseError:::wmpty(position); + pe + })?; + Ok(amf::Value::from_json(&json)) + }, + ) + ).boxed() +} + +fn change_parser<'a, Input: 'a>() -> impl combine::Parser + 'a +where + Input: 'a, + Input: combine::stream::Stream, + Input::Error: combine::ParseError, +{ + charparser::spaces() + .with( + op_parser() + .skip(charparser::spaces()) + .skip(charparser::string("$")) + .and(path_segment_parser(am::Path::root())), + ) + .skip(charparser::spaces()) + .then(|(operation, path)| { + let onwards: Box< + dyn combine::Parser, + > = match operation { + Op::Set => value_parser::<'a>() + .map(move |value| amf::LocalChange::set(path.clone(), value)) + .boxed(), + Op::Insert => value_parser::<'a>() + .map(move |value| amf::LocalChange::insert(path.clone(), value)) + .boxed(), + Op::Delete => combine::value(amf::LocalChange::delete(path)).boxed(), + Op::Increment => combine::value(amf::LocalChange::increment(path)).boxed(), + }; + onwards + }) +} + +fn parse_change_script(input: &str) -> Result { + let (change, _) = + change_parser() + .easy_parse(input) + .map_err(|e| ChangeError::InvalidChangeScript { + message: e.to_string(), + })?; + Ok(change) +} + +pub fn change( + mut reader: impl std::io::Read, + mut writer: impl std::io::Write, + script: &str, +) -> Result<(), ChangeError> { + let mut buf: Vec = Vec::new(); + reader + .read_to_end(&mut buf) + .map_err(|e| ChangeError::ErrReadingChanges { source: e })?; + let backend = am::Automerge::load(&buf) + .map_err(|e| ChangeError::ErrApplyingInitialChanges { source: e })?; + let local_change = parse_change_script(script)?; + let ((), new_changes) = frontend.change::<_, _, amf::InvalidChangeRequest>(None, |d| { + d.add_change(local_change)?; + Ok(()) + })?; + let change_bytes = backend.save().unwrap(); + writer + .write_all(&change_bytes) + .map_err(|e| ChangeError::ErrWritingChanges { source: e })?; + Ok(()) +} + +#[cfg(test)] +mod tests { + use maplit::hashmap; + + use super::*; + + #[test] + fn test_parse_change_script() { + struct Scenario { + input: &'static str, + expected: amf::LocalChange, + } + let scenarios = vec![ + Scenario { + input: "set $[\"map\"][0] {\"some\": \"value\"}", + expected: amf::LocalChange::set( + amf::Path::root().key("map").index(0), + amf::Value::from(hashmap! {"some" => "value"}), + ), + }, + Scenario { + input: "insert $[\"map\"][0] {\"some\": \"value\"}", + expected: amf::LocalChange::insert( + amf::Path::root().key("map").index(0), + hashmap! {"some" => "value"}.into(), + ), + }, + Scenario { + input: "delete $[\"map\"][0]", + expected: amf::LocalChange::delete(amf::Path::root().key("map").index(0)), + }, + Scenario { + input: "increment $[\"map\"][0]", + expected: amf::LocalChange::increment(amf::Path::root().key("map").index(0)), + }, + ]; + for (index, scenario) in scenarios.into_iter().enumerate() { + let result: Result<(amf::LocalChange, _), _> = + change_parser().easy_parse(scenario.input); + let change = result.unwrap().0; + assert_eq!( + change, + scenario.expected, + "Failed on scenario: {0}", + index + 1, + ); + } + } +} diff --git a/automerge-cli/src/examine.rs b/automerge-cli/src/examine.rs new file mode 100644 index 00000000..7f15a625 --- /dev/null +++ b/automerge-cli/src/examine.rs @@ -0,0 +1,46 @@ +use automerge as am; +use thiserror::Error; + +#[derive(Error, Debug)] +pub enum ExamineError { + #[error("Error reading change file: {:?}", source)] + ReadingChanges { + #[source] + source: std::io::Error, + }, + #[error("Error loading changes: {:?}", source)] + ApplyingInitialChanges { + #[source] + source: am::AutomergeError, + }, + #[error("Error writing to output: {:?}", source)] + WritingToOutput { + #[source] + source: std::io::Error, + }, +} + +pub fn examine( + mut input: impl std::io::Read, + mut output: impl std::io::Write, + is_tty: bool, +) -> Result<(), ExamineError> { + let mut buf: Vec = Vec::new(); + input + .read_to_end(&mut buf) + .map_err(|e| ExamineError::ReadingChanges { source: e })?; + let doc = am::Automerge::load(&buf) + .map_err(|e| ExamineError::ApplyingInitialChanges { source: e })?; + let uncompressed_changes: Vec<_> = doc.get_changes(&[]).iter().map(|c| c.decode()).collect(); + if is_tty { + let json_changes = serde_json::to_value(uncompressed_changes).unwrap(); + colored_json::write_colored_json(&json_changes, &mut output).unwrap(); + writeln!(&mut output).unwrap(); + } else { + let json_changes = serde_json::to_string_pretty(&uncompressed_changes).unwrap(); + output + .write_all(&json_changes.into_bytes()) + .map_err(|e| ExamineError::WritingToOutput { source: e })?; + } + Ok(()) +} diff --git a/automerge-cli/src/export.rs b/automerge-cli/src/export.rs new file mode 100644 index 00000000..a2751d07 --- /dev/null +++ b/automerge-cli/src/export.rs @@ -0,0 +1,154 @@ +use anyhow::Result; +use automerge as am; + +pub(crate) fn map_to_json(doc: &am::Automerge, obj: &am::ObjId) -> serde_json::Value { + let keys = doc.keys(obj); + let mut map = serde_json::Map::new(); + for k in keys { + let val = doc.value(obj, &k); + match val { + Ok(Some((am::Value::Object(o), exid))) + if o == am::ObjType::Map || o == am::ObjType::Table => + { + map.insert(k.to_owned(), map_to_json(doc, &exid)); + } + Ok(Some((am::Value::Object(_), exid))) => { + map.insert(k.to_owned(), list_to_json(doc, &exid)); + } + Ok(Some((am::Value::Scalar(v), _))) => { + map.insert(k.to_owned(), scalar_to_json(&v)); + } + _ => (), + }; + } + serde_json::Value::Object(map) +} + +fn list_to_json(doc: &am::Automerge, obj: &am::ObjId) -> serde_json::Value { + let len = doc.length(obj); + let mut array = Vec::new(); + for i in 0..len { + let val = doc.value(obj, i as usize); + match val { + Ok(Some((am::Value::Object(o), exid))) + if o == am::ObjType::Map || o == am::ObjType::Table => + { + array.push(map_to_json(doc, &exid)); + } + Ok(Some((am::Value::Object(_), exid))) => { + array.push(list_to_json(doc, &exid)); + } + Ok(Some((am::Value::Scalar(v), _))) => { + array.push(scalar_to_json(&v)); + } + _ => (), + }; + } + serde_json::Value::Array(array) +} + +fn scalar_to_json(val: &am::ScalarValue) -> serde_json::Value { + match val { + am::ScalarValue::Str(s) => serde_json::Value::String(s.to_string()), + am::ScalarValue::Bytes(b) => serde_json::Value::Array( + b.iter() + .map(|byte| serde_json::Value::Number((*byte).into())) + .collect(), + ), + am::ScalarValue::Int(n) => serde_json::Value::Number((*n).into()), + am::ScalarValue::Uint(n) => serde_json::Value::Number((*n).into()), + am::ScalarValue::F64(n) => serde_json::Number::from_f64(*n) + .unwrap_or_else(|| 0_i64.into()) + .into(), + am::ScalarValue::Counter(c) => serde_json::Value::Number(i64::from(c).into()), + am::ScalarValue::Timestamp(n) => serde_json::Value::Number((*n).into()), + am::ScalarValue::Boolean(b) => serde_json::Value::Bool(*b), + am::ScalarValue::Null => serde_json::Value::Null, + } +} + +fn get_state_json(input_data: Vec) -> Result { + let doc = am::Automerge::load(&input_data).unwrap(); // FIXME + Ok(map_to_json(&doc, &am::ObjId::Root)) +} + +pub fn export_json( + mut changes_reader: impl std::io::Read, + mut writer: impl std::io::Write, + is_tty: bool, +) -> Result<()> { + let mut input_data = vec![]; + changes_reader.read_to_end(&mut input_data)?; + + let state_json = get_state_json(input_data)?; + if is_tty { + colored_json::write_colored_json(&state_json, &mut writer).unwrap(); + writeln!(writer).unwrap(); + } else { + writeln!( + writer, + "{}", + serde_json::to_string_pretty(&state_json).unwrap() + )?; + } + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::import::initialize_from_json; + + #[test] + fn cli_export_with_empty_input() { + assert_eq!(get_state_json(vec![]).unwrap(), serde_json::json!({})) + } + + #[test] + fn cli_export_with_flat_map() { + let initial_state_json: serde_json::Value = + serde_json::from_str(r#"{"sparrows": 15.0}"#).unwrap(); + //let value: am::Value = am::Value::from_json(&initial_state_json); + //let (_, initial_change) = am::Frontend::new_with_initial_state(value).unwrap(); + //let mut backend = am::Automerge::new(); + //backend.apply_local_change(initial_change).unwrap(); + let mut backend = initialize_from_json(&initial_state_json).unwrap(); + let change_bytes = backend.save().unwrap(); + assert_eq!( + get_state_json(change_bytes).unwrap(), + serde_json::json!({"sparrows": 15.0}) + ) + } + + #[test] + fn cli_export_with_nested_map() { + let initial_state_json: serde_json::Value = serde_json::from_str( + r#"{ + "birds": { + "wrens": 3.0, + "sparrows": 15.0 + } +}"#, + ) + .unwrap(); + let mut backend = initialize_from_json(&initial_state_json).unwrap(); + /* + let value: am::Value = am::Value::from_json(&initial_state_json); + + //let (_, initial_change) = am::Frontend::new_with_initial_state(value).unwrap(); + let mut backend = am::Automerge::new(); + //backend.apply_local_change(initial_change).unwrap(); + + */ + let change_bytes = backend.save().unwrap(); + assert_eq!( + get_state_json(change_bytes).unwrap(), + serde_json::json!({ + "birds": { + "wrens": 3.0, + "sparrows": 15.0 + } + }) + ) + } +} diff --git a/automerge-cli/src/import.rs b/automerge-cli/src/import.rs new file mode 100644 index 00000000..d7d3c45b --- /dev/null +++ b/automerge-cli/src/import.rs @@ -0,0 +1,108 @@ +use automerge as am; +use automerge::transaction::Transactable; + +pub(crate) fn initialize_from_json( + json_value: &serde_json::Value, +) -> Result { + let mut doc = am::AutoCommit::new(); + match json_value { + serde_json::Value::Object(m) => { + import_map(&mut doc, &am::ObjId::Root, m)?; + Ok(doc) + } + _ => Err(am::AutomergeError::Decoding), + } +} + +fn import_map( + doc: &mut am::AutoCommit, + obj: &am::ObjId, + map: &serde_json::Map, +) -> Result<(), am::AutomergeError> { + for (key, value) in map { + match value { + serde_json::Value::Null => { + doc.set(obj, key, ())?; + } + serde_json::Value::Bool(b) => { + doc.set(obj, key, *b)?; + } + serde_json::Value::String(s) => { + doc.set(obj, key, s.as_ref())?; + } + serde_json::Value::Array(vec) => { + let id = doc.set(obj, key, am::Value::list())?.unwrap(); + import_list(doc, &id, vec)?; + } + serde_json::Value::Number(n) => { + if let Some(m) = n.as_i64() { + doc.set(obj, key, m)?; + } else if let Some(m) = n.as_u64() { + doc.set(obj, key, m)?; + } else if let Some(m) = n.as_f64() { + doc.set(obj, key, m)?; + } else { + return Err(am::AutomergeError::Decoding); + } + } + serde_json::Value::Object(map) => { + let id = doc.set(obj, key, am::Value::map())?.unwrap(); + import_map(doc, &id, map)?; + } + } + } + Ok(()) +} + +fn import_list( + doc: &mut am::AutoCommit, + obj: &am::ObjId, + list: &[serde_json::Value], +) -> Result<(), am::AutomergeError> { + for (i, value) in list.iter().enumerate() { + match value { + serde_json::Value::Null => { + doc.insert(obj, i, ())?; + } + serde_json::Value::Bool(b) => { + doc.insert(obj, i, *b)?; + } + serde_json::Value::String(s) => { + doc.insert(obj, i, s.as_ref())?; + } + serde_json::Value::Array(vec) => { + let id = doc.insert(obj, i, am::Value::list())?.unwrap(); + import_list(doc, &id, vec)?; + } + serde_json::Value::Number(n) => { + if let Some(m) = n.as_i64() { + doc.insert(obj, i, m)?; + } else if let Some(m) = n.as_u64() { + doc.insert(obj, i, m)?; + } else if let Some(m) = n.as_f64() { + doc.insert(obj, i, m)?; + } else { + return Err(am::AutomergeError::Decoding); + } + } + serde_json::Value::Object(map) => { + let id = doc.insert(obj, i, am::Value::map())?.unwrap(); + import_map(doc, &id, map)?; + } + } + } + Ok(()) +} + +pub fn import_json( + mut reader: impl std::io::Read, + mut writer: impl std::io::Write, +) -> anyhow::Result<()> { + let mut buffer = String::new(); + reader.read_to_string(&mut buffer)?; + + let json_value: serde_json::Value = serde_json::from_str(&buffer)?; + let mut doc = initialize_from_json(&json_value)?; + writer.write_all(&doc.save()?)?; + Ok(()) +} diff --git a/automerge-cli/src/main.rs b/automerge-cli/src/main.rs new file mode 100644 index 00000000..ffc13012 --- /dev/null +++ b/automerge-cli/src/main.rs @@ -0,0 +1,213 @@ +use std::{fs::File, path::PathBuf, str::FromStr}; + +use anyhow::{anyhow, Result}; +use clap::Parser; + +//mod change; +mod examine; +mod export; +mod import; +mod merge; + +#[derive(Parser, Debug)] +#[clap(about = "Automerge CLI")] +struct Opts { + #[clap(subcommand)] + cmd: Command, +} + +#[derive(Debug)] +enum ExportFormat { + Json, + Toml, +} + +impl FromStr for ExportFormat { + type Err = anyhow::Error; + + fn from_str(input: &str) -> Result { + match input { + "json" => Ok(ExportFormat::Json), + "toml" => Ok(ExportFormat::Toml), + _ => Err(anyhow!("Invalid export format: {}", input)), + } + } +} + +#[derive(Debug, Parser)] +enum Command { + /// Output current state of an Automerge document in a specified format + Export { + /// Format for output: json, toml + #[clap(long, short, default_value = "json")] + format: ExportFormat, + + /// Path that contains Automerge changes + #[clap(parse(from_os_str))] + changes_file: Option, + + /// The file to write to. If omitted assumes stdout + #[clap(parse(from_os_str), long("out"), short('o'))] + output_file: Option, + }, + + Import { + /// Format for input: json, toml + #[clap(long, short, default_value = "json")] + format: ExportFormat, + + #[clap(parse(from_os_str))] + input_file: Option, + + /// Path to write Automerge changes to + #[clap(parse(from_os_str), long("out"), short('o'))] + changes_file: Option, + }, + + /// Read an automerge document from a file or stdin, perform a change on it and write a new + /// document to stdout or the specified output file. + Change { + /// The change script to perform. Change scripts have the form []. + /// The possible commands are 'set', 'insert', 'delete', and 'increment'. + /// + /// Paths look like this: $["mapkey"][0]. They always lways start with a '$', then each + /// subsequent segment of the path is either a string in double quotes to index a key in a + /// map, or an integer index to address an array element. + /// + /// Examples + /// + /// ## set + /// + /// > automerge change 'set $["someobject"] {"items": []}' somefile + /// + /// ## insert + /// + /// > automerge change 'insert $["someobject"]["items"][0] "item1"' somefile + /// + /// ## increment + /// + /// > automerge change 'increment $["mycounter"]' + /// + /// ## delete + /// + /// > automerge change 'delete $["someobject"]["items"]' somefile + script: String, + + /// The file to change, if omitted will assume stdin + #[clap(parse(from_os_str))] + input_file: Option, + + /// Path to write Automerge changes to, if omitted will write to stdout + #[clap(parse(from_os_str), long("out"), short('o'))] + output_file: Option, + }, + + /// Read an automerge document and print a JSON representation of the changes in it to stdout + Examine { input_file: Option }, + + /// Read one or more automerge documents and output a merged, compacted version of them + Merge { + /// The file to write to. If omitted assumes stdout + #[clap(parse(from_os_str), long("out"), short('o'))] + output_file: Option, + /// The file(s) to compact. If empty assumes stdin + input: Vec, + }, +} + +fn open_file_or_stdin(maybe_path: Option) -> Result> { + if atty::is(atty::Stream::Stdin) { + if let Some(path) = maybe_path { + Ok(Box::new(File::open(&path).unwrap())) + } else { + Err(anyhow!( + "Must provide file path if not providing input via stdin" + )) + } + } else { + Ok(Box::new(std::io::stdin())) + } +} + +fn create_file_or_stdout(maybe_path: Option) -> Result> { + if atty::is(atty::Stream::Stdout) { + if let Some(path) = maybe_path { + Ok(Box::new(File::create(&path).unwrap())) + } else { + Err(anyhow!("Must provide file path if not piping to stdout")) + } + } else { + Ok(Box::new(std::io::stdout())) + } +} + +fn main() -> Result<()> { + tracing_subscriber::fmt::init(); + let opts = Opts::parse(); + match opts.cmd { + Command::Export { + changes_file, + format, + output_file, + } => { + let output: Box = if let Some(output_file) = output_file { + Box::new(File::create(&output_file)?) + } else { + Box::new(std::io::stdout()) + }; + match format { + ExportFormat::Json => { + let mut in_buffer = open_file_or_stdin(changes_file)?; + export::export_json(&mut in_buffer, output, atty::is(atty::Stream::Stdout)) + } + ExportFormat::Toml => unimplemented!(), + } + } + Command::Import { + format, + input_file, + changes_file, + } => match format { + ExportFormat::Json => { + let mut out_buffer = create_file_or_stdout(changes_file)?; + let mut in_buffer = open_file_or_stdin(input_file)?; + import::import_json(&mut in_buffer, &mut out_buffer) + } + ExportFormat::Toml => unimplemented!(), + }, + Command::Change { .. + //input_file, + //output_file, + //script, + } => { + unimplemented!() +/* + let in_buffer = open_file_or_stdin(input_file)?; + let mut out_buffer = create_file_or_stdout(output_file)?; + change::change(in_buffer, &mut out_buffer, script.as_str()) + .map_err(|e| anyhow::format_err!("Unable to make changes: {:?}", e)) +*/ + } + Command::Examine { input_file } => { + let in_buffer = open_file_or_stdin(input_file)?; + let out_buffer = std::io::stdout(); + match examine::examine(in_buffer, out_buffer, atty::is(atty::Stream::Stdout)) { + Ok(()) => {} + Err(e) => { + eprintln!("Error: {:?}", e); + } + } + Ok(()) + } + Command::Merge { input, output_file } => { + let out_buffer = create_file_or_stdout(output_file)?; + match merge::merge(input.into(), out_buffer) { + Ok(()) => {} + Err(e) => { + eprintln!("Failed to merge: {}", e); + } + }; + Ok(()) + } + } +} diff --git a/automerge-cli/src/merge.rs b/automerge-cli/src/merge.rs new file mode 100644 index 00000000..caaa5c54 --- /dev/null +++ b/automerge-cli/src/merge.rs @@ -0,0 +1,58 @@ +use automerge as am; +use std::{ + io::Read, + path::{Path, PathBuf}, +}; + +pub(super) enum Inputs { + Stdin, + Paths(Vec), +} + +impl From> for Inputs { + fn from(i: Vec) -> Self { + if i.is_empty() { + Inputs::Stdin + } else { + Inputs::Paths(i) + } + } +} + +#[derive(Debug, thiserror::Error)] +pub(super) enum MergeError { + #[error(transparent)] + Io(#[from] std::io::Error), + #[error("failed to load {path}: {error}")] + FailedToLoad { + path: PathBuf, + error: Box, + }, + #[error(transparent)] + Automerge(#[from] am::AutomergeError), +} + +pub(super) fn merge(inputs: Inputs, mut output: W) -> Result<(), MergeError> { + let mut backend = am::Automerge::new(); + match inputs { + Inputs::Stdin => { + let mut input = Vec::new(); + std::io::stdin().read_to_end(&mut input)?; + backend.load_incremental(&input)?; + } + Inputs::Paths(paths) => { + for path in paths { + load_path(&mut backend, &path) + .map_err(|error| MergeError::FailedToLoad { path, error })?; + } + } + } + output.write_all(&backend.save().unwrap())?; + Ok(()) +} + +fn load_path(backend: &mut am::Automerge, path: &Path) -> Result<(), Box> { + let input = std::fs::read(path).map_err(Box::new)?; + backend.load_incremental(&input).map_err(Box::new)?; + Ok(()) +} diff --git a/automerge-cli/tests/integration.rs b/automerge-cli/tests/integration.rs new file mode 100644 index 00000000..87e30db7 --- /dev/null +++ b/automerge-cli/tests/integration.rs @@ -0,0 +1,93 @@ +use std::env; + +use duct::cmd; + +// #[test] +// fn import_stdin() { +// let bin = env!("CARGO_BIN_EXE_automerge"); +// let initial_state_json = serde_json::json!({ +// "birds": { +// "wrens": 3.0, +// "sparrows": 15.0 +// } +// }); +// let json_bytes = serde_json::to_string_pretty(&initial_state_json).unwrap(); + +// let no_pipe_no_file = cmd!(bin, "import").stdin_bytes(json_bytes.clone()).run(); + +// assert!(no_pipe_no_file.is_err()); + +// let pipe_no_file = cmd!(bin, "import") +// .stdin_bytes(json_bytes.clone()) +// .stdout_capture() +// .run(); + +// assert!(pipe_no_file.is_ok()); + +// let mut temp_file = std::env::temp_dir(); +// temp_file.push("import_test.mpl"); +// let no_pipe_file = cmd!(bin, "import", "--out", &temp_file) +// .stdin_bytes(json_bytes) +// .run(); + +// assert!(no_pipe_file.is_ok()); +// std::fs::remove_file(temp_file).unwrap(); +// } + +// #[test] +// fn export_stdout() { +// let bin = env!("CARGO_BIN_EXE_automerge"); +// let no_pipe_no_file = cmd!(bin, "export").stdout_capture().run(); + +// assert!(no_pipe_no_file.is_err()); +// } + +#[test] +fn import_export_isomorphic() { + let bin = env!("CARGO_BIN_EXE_automerge"); + let initial_state_json = serde_json::json!({ + "birds": { + "wrens": 3.0, + "sparrows": 15.0 + } + }); + let json_bytes = serde_json::to_string_pretty(&initial_state_json).unwrap(); + + let stdout = cmd!(bin, "import") + .stdin_bytes(json_bytes.clone()) + .pipe(cmd!(bin, "export")) + .read() + .unwrap(); + assert_eq!(stdout, json_bytes); +} + +/* +#[test] +fn import_change_export() { + let bin = env!("CARGO_BIN_EXE_automerge"); + let initial_state_json = serde_json::json!({ + "birds": { + "wrens": 3.0, + "sparrows": 15.0 + } + }); + let json_bytes = serde_json::to_string_pretty(&initial_state_json).unwrap(); + + let stdout = cmd!(bin, "import") + .stdin_bytes(json_bytes.clone()) + .pipe(cmd!(bin, "change", "set $[\"birds\"][\"owls\"] 12.0")) + .stdin_bytes(json_bytes) + .pipe(cmd!(bin, "export")) + .read() + .unwrap(); + let result: serde_json::Value = serde_json::from_str(stdout.as_str()).unwrap(); + let expected = serde_json::json!({ + "birds": { + "wrens": 3.0, + "sparrows": 15.0, + "owls": 12.0, + } + }); + assert_eq!(result, expected); +} +*/ diff --git a/automerge/src/value.rs b/automerge/src/value.rs index ac26033c..21b03a52 100644 --- a/automerge/src/value.rs +++ b/automerge/src/value.rs @@ -83,6 +83,12 @@ impl From for Value { } } +impl From for Value { + fn from(n: f64) -> Self { + Value::Scalar(ScalarValue::F64(n)) + } +} + impl From for Value { fn from(n: i64) -> Self { Value::Scalar(ScalarValue::Int(n)) @@ -417,6 +423,12 @@ impl From for ScalarValue { } } +impl From for ScalarValue { + fn from(n: f64) -> Self { + ScalarValue::F64(n) + } +} + impl From for ScalarValue { fn from(n: u64) -> Self { ScalarValue::Uint(n) From 682b8007b99e056c1b04f53c8a45914fdc4f1dfd Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 3 Mar 2022 08:58:04 +0000 Subject: [PATCH 123/730] Borrow exid to avoid &ROOT everywhere --- automerge-wasm/README.md | 13 +- automerge/examples/quickstart.rs | 2 +- automerge/src/autocommit.rs | 68 +++--- automerge/src/automerge.rs | 197 +++++++++--------- automerge/src/transaction/inner.rs | 2 +- .../src/transaction/manual_transaction.rs | 77 ++++--- automerge/src/transaction/transactable.rs | 64 +++--- automerge/tests/test.rs | 2 +- edit-trace/benches/main.rs | 4 +- edit-trace/src/main.rs | 2 +- 10 files changed, 241 insertions(+), 190 deletions(-) diff --git a/automerge-wasm/README.md b/automerge-wasm/README.md index 985955a3..63548307 100644 --- a/automerge-wasm/README.md +++ b/automerge-wasm/README.md @@ -1,16 +1,17 @@ ## Automerge WASM Low Level Interface -This is a low level automerge library written in rust exporting a javascript API via WASM. This low level api is the underpinning to the `automerge-js` library that reimplements the Automerge API via these low level functions. +This is a low level automerge library written in rust exporting a javascript API via WASM. This low level api is the underpinning to the `automerge-js` library that reimplements the Automerge API via these low level functions. ### Static Functions ### Methods - `doc.clone(actor?: string)` : Make a complete +`doc.clone(actor?: string)` : Make a complete - `doc.free()` : deallocate WASM memory associated with a document +`doc.free()` : deallocate WASM memory associated with a document -#[wasm_bindgen] +```rust + #[wasm_bindgen] pub fn free(self) {} #[wasm_bindgen(js_name = pendingOps)] @@ -430,7 +431,7 @@ This is a low level automerge library written in rust exporting a javascript API #[wasm_bindgen(js_name = toJS)] pub fn to_js(&self) -> JsValue { - map_to_js(&self.0, &ROOT) + map_to_js(&self.0, ROOT) } fn import(&self, id: String) -> Result { @@ -582,6 +583,7 @@ This is a low level automerge library written in rust exporting a javascript API } */ } + } #[wasm_bindgen(js_name = create)] @@ -691,3 +693,4 @@ pub struct Text {} #[wasm_bindgen(js_name = TABLE)] pub struct Table {} +``` diff --git a/automerge/examples/quickstart.rs b/automerge/examples/quickstart.rs index fd8b7871..9972cb1b 100644 --- a/automerge/examples/quickstart.rs +++ b/automerge/examples/quickstart.rs @@ -10,7 +10,7 @@ fn main() { .transact_with::<_, _, AutomergeError, _>( || CommitOptions::default().with_message("Add card".to_owned()), |tx| { - let cards = tx.set(&ROOT, "cards", Value::list()).unwrap().unwrap(); + let cards = tx.set(ROOT, "cards", Value::list()).unwrap().unwrap(); let card1 = tx.insert(&cards, 0, Value::map())?.unwrap(); tx.set(&card1, "title", "Rewrite everything in Clojure")?; tx.set(&card1, "done", false)?; diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index 43f32345..57c182ad 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -1,3 +1,5 @@ +use std::borrow::Borrow; + use crate::exid::ExId; use crate::transaction::{CommitOptions, Transactable}; use crate::types::Patch; @@ -253,7 +255,7 @@ impl AutoCommit { /// # use automerge::AutoCommit; /// # use std::time::SystemTime; /// let mut doc = AutoCommit::new(); - /// doc.set(&ROOT, "todos", Value::list()).unwrap(); + /// doc.set(ROOT, "todos", Value::list()).unwrap(); /// let now = SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap().as_secs() as /// i64; /// doc.commit_with(CommitOptions::default().with_message("Create todos list").with_time(now)); @@ -287,19 +289,19 @@ impl Transactable for AutoCommit { // PropAt::() // NthAt::() - fn keys(&self, obj: &ExId) -> Keys { + fn keys>(&self, obj: O) -> Keys { self.doc.keys(obj) } - fn keys_at(&self, obj: &ExId, heads: &[ChangeHash]) -> KeysAt { + fn keys_at>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt { self.doc.keys_at(obj, heads) } - fn length(&self, obj: &ExId) -> usize { + fn length>(&self, obj: O) -> usize { self.doc.length(obj) } - fn length_at(&self, obj: &ExId, heads: &[ChangeHash]) -> usize { + fn length_at>(&self, obj: O, heads: &[ChangeHash]) -> usize { self.doc.length_at(obj, heads) } @@ -321,98 +323,106 @@ impl Transactable for AutoCommit { /// - The object does not exist /// - The key is the wrong type for the object /// - The key does not exist in the object - fn set, V: Into>( + fn set, V: Into, O: Borrow>( &mut self, - obj: &ExId, + obj: O, prop: P, value: V, ) -> Result, AutomergeError> { self.ensure_transaction_open(); let tx = self.transaction.as_mut().unwrap(); - tx.set(&mut self.doc, obj, prop, value) + tx.set(&mut self.doc, obj.borrow(), prop, value) } - fn insert>( + fn insert, O: Borrow>( &mut self, - obj: &ExId, + obj: O, index: usize, value: V, ) -> Result, AutomergeError> { self.ensure_transaction_open(); let tx = self.transaction.as_mut().unwrap(); - tx.insert(&mut self.doc, obj, index, value) + tx.insert(&mut self.doc, obj.borrow(), index, value) } - fn inc>( + fn inc, O: Borrow>( &mut self, - obj: &ExId, + obj: O, prop: P, value: i64, ) -> Result<(), AutomergeError> { self.ensure_transaction_open(); let tx = self.transaction.as_mut().unwrap(); - tx.inc(&mut self.doc, obj, prop, value) + tx.inc(&mut self.doc, obj.borrow(), prop, value) } - fn del>(&mut self, obj: &ExId, prop: P) -> Result<(), AutomergeError> { + fn del, O: Borrow>( + &mut self, + obj: O, + prop: P, + ) -> Result<(), AutomergeError> { self.ensure_transaction_open(); let tx = self.transaction.as_mut().unwrap(); - tx.del(&mut self.doc, obj, prop) + tx.del(&mut self.doc, obj.borrow(), prop) } /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert /// the new elements - fn splice( + fn splice>( &mut self, - obj: &ExId, + obj: O, pos: usize, del: usize, vals: Vec, ) -> Result, AutomergeError> { self.ensure_transaction_open(); let tx = self.transaction.as_mut().unwrap(); - tx.splice(&mut self.doc, obj, pos, del, vals) + tx.splice(&mut self.doc, obj.borrow(), pos, del, vals) } - fn text(&self, obj: &ExId) -> Result { + fn text>(&self, obj: O) -> Result { self.doc.text(obj) } - fn text_at(&self, obj: &ExId, heads: &[ChangeHash]) -> Result { + fn text_at>( + &self, + obj: O, + heads: &[ChangeHash], + ) -> Result { self.doc.text_at(obj, heads) } // TODO - I need to return these OpId's here **only** to get // the legacy conflicts format of { [opid]: value } // Something better? - fn value>( + fn value, O: Borrow>( &self, - obj: &ExId, + obj: O, prop: P, ) -> Result, AutomergeError> { self.doc.value(obj, prop) } - fn value_at>( + fn value_at, O: Borrow>( &self, - obj: &ExId, + obj: O, prop: P, heads: &[ChangeHash], ) -> Result, AutomergeError> { self.doc.value_at(obj, prop, heads) } - fn values>( + fn values, O: Borrow>( &self, - obj: &ExId, + obj: O, prop: P, ) -> Result, AutomergeError> { self.doc.values(obj, prop) } - fn values_at>( + fn values_at, O: Borrow>( &self, - obj: &ExId, + obj: O, prop: P, heads: &[ChangeHash], ) -> Result, AutomergeError> { diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 6fa040fb..cc60b2b0 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -1,3 +1,4 @@ +use std::borrow::Borrow; use std::collections::{HashMap, HashSet, VecDeque}; use crate::change::encode_document; @@ -177,8 +178,8 @@ impl Automerge { /// /// For a map this returns the keys of the map. /// For a list this returns the element ids (opids) encoded as strings. - pub fn keys(&self, obj: &ExId) -> Keys { - if let Ok(obj) = self.exid_to_obj(obj) { + pub fn keys>(&self, obj: O) -> Keys { + if let Ok(obj) = self.exid_to_obj(obj.borrow()) { let iter_keys = self.ops.keys(obj); Keys::new(self, iter_keys) } else { @@ -187,8 +188,8 @@ impl Automerge { } /// Historical version of [`keys`](Self::keys). - pub fn keys_at(&self, obj: &ExId, heads: &[ChangeHash]) -> KeysAt { - if let Ok(obj) = self.exid_to_obj(obj) { + pub fn keys_at>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt { + if let Ok(obj) = self.exid_to_obj(obj.borrow()) { let clock = self.clock_at(heads); KeysAt::new(self, self.ops.keys_at(obj, clock)) } else { @@ -196,8 +197,8 @@ impl Automerge { } } - pub fn length(&self, obj: &ExId) -> usize { - if let Ok(inner_obj) = self.exid_to_obj(obj) { + pub fn length>(&self, obj: O) -> usize { + if let Ok(inner_obj) = self.exid_to_obj(obj.borrow()) { match self.ops.object_type(&inner_obj) { Some(ObjType::Map) | Some(ObjType::Table) => self.keys(obj).count(), Some(ObjType::List) | Some(ObjType::Text) => { @@ -210,8 +211,8 @@ impl Automerge { } } - pub fn length_at(&self, obj: &ExId, heads: &[ChangeHash]) -> usize { - if let Ok(inner_obj) = self.exid_to_obj(obj) { + pub fn length_at>(&self, obj: O, heads: &[ChangeHash]) -> usize { + if let Ok(inner_obj) = self.exid_to_obj(obj.borrow()) { let clock = self.clock_at(heads); match self.ops.object_type(&inner_obj) { Some(ObjType::Map) | Some(ObjType::Table) => self.keys_at(obj, heads).count(), @@ -251,8 +252,8 @@ impl Automerge { ExId::Id(id.0, self.ops.m.actors.cache[id.1].clone(), id.1) } - pub fn text(&self, obj: &ExId) -> Result { - let obj = self.exid_to_obj(obj)?; + pub fn text>(&self, obj: O) -> Result { + let obj = self.exid_to_obj(obj.borrow())?; let query = self.ops.search(obj, query::ListVals::new()); let mut buffer = String::new(); for q in &query.ops { @@ -263,8 +264,12 @@ impl Automerge { Ok(buffer) } - pub fn text_at(&self, obj: &ExId, heads: &[ChangeHash]) -> Result { - let obj = self.exid_to_obj(obj)?; + pub fn text_at>( + &self, + obj: O, + heads: &[ChangeHash], + ) -> Result { + let obj = self.exid_to_obj(obj.borrow())?; let clock = self.clock_at(heads); let query = self.ops.search(obj, query::ListValsAt::new(clock)); let mut buffer = String::new(); @@ -279,29 +284,29 @@ impl Automerge { // TODO - I need to return these OpId's here **only** to get // the legacy conflicts format of { [opid]: value } // Something better? - pub fn value>( + pub fn value, O: Borrow>( &self, - obj: &ExId, + obj: O, prop: P, ) -> Result, AutomergeError> { Ok(self.values(obj, prop.into())?.last().cloned()) } - pub fn value_at>( + pub fn value_at, O: Borrow>( &self, - obj: &ExId, + obj: O, prop: P, heads: &[ChangeHash], ) -> Result, AutomergeError> { Ok(self.values_at(obj, prop, heads)?.last().cloned()) } - pub fn values>( + pub fn values, O: Borrow>( &self, - obj: &ExId, + obj: O, prop: P, ) -> Result, AutomergeError> { - let obj = self.exid_to_obj(obj)?; + let obj = self.exid_to_obj(obj.borrow())?; let result = match prop.into() { Prop::Map(p) => { let prop = self.ops.m.props.lookup(&p); @@ -327,14 +332,14 @@ impl Automerge { Ok(result) } - pub fn values_at>( + pub fn values_at, O: Borrow>( &self, - obj: &ExId, + obj: O, prop: P, heads: &[ChangeHash], ) -> Result, AutomergeError> { let prop = prop.into(); - let obj = self.exid_to_obj(obj)?; + let obj = self.exid_to_obj(obj.borrow())?; let clock = self.clock_at(heads); let result = match prop { Prop::Map(p) => { @@ -863,8 +868,8 @@ mod tests { let mut doc = Automerge::new(); doc.set_actor(ActorId::random()); let mut tx = doc.transaction(); - tx.set(&ROOT, "hello", "world")?; - tx.value(&ROOT, "hello")?; + tx.set(ROOT, "hello", "world")?; + tx.value(ROOT, "hello")?; tx.commit(); Ok(()) } @@ -874,13 +879,13 @@ mod tests { let mut doc = Automerge::new(); let mut tx = doc.transaction(); // setting a scalar value shouldn't return an opid as no object was created. - assert!(tx.set(&ROOT, "a", 1)?.is_none()); + assert!(tx.set(ROOT, "a", 1)?.is_none()); // setting the same value shouldn't return an opid as there is no change. - assert!(tx.set(&ROOT, "a", 1)?.is_none()); + assert!(tx.set(ROOT, "a", 1)?.is_none()); - assert!(tx.set(&ROOT, "b", Value::map())?.is_some()); + assert!(tx.set(ROOT, "b", Value::map())?.is_some()); // object already exists at b but setting a map again overwrites it so we get an opid. - assert!(tx.set(&ROOT, "b", Value::map())?.is_some()); + assert!(tx.set(ROOT, "b", Value::map())?.is_some()); tx.commit(); Ok(()) } @@ -890,9 +895,9 @@ mod tests { let mut doc = Automerge::new(); doc.set_actor(ActorId::random()); let mut tx = doc.transaction(); - let list_id = tx.set(&ROOT, "items", Value::list())?.unwrap(); - tx.set(&ROOT, "zzz", "zzzval")?; - assert!(tx.value(&ROOT, "items")?.unwrap().1 == list_id); + let list_id = tx.set(ROOT, "items", Value::list())?.unwrap(); + tx.set(ROOT, "zzz", "zzzval")?; + assert!(tx.value(ROOT, "items")?.unwrap().1 == list_id); tx.insert(&list_id, 0, "a")?; tx.insert(&list_id, 0, "b")?; tx.insert(&list_id, 2, "c")?; @@ -912,10 +917,10 @@ mod tests { let mut doc = Automerge::new(); doc.set_actor(ActorId::random()); let mut tx = doc.transaction(); - tx.set(&ROOT, "xxx", "xxx")?; - assert!(!tx.values(&ROOT, "xxx")?.is_empty()); - tx.del(&ROOT, "xxx")?; - assert!(tx.values(&ROOT, "xxx")?.is_empty()); + tx.set(ROOT, "xxx", "xxx")?; + assert!(!tx.values(ROOT, "xxx")?.is_empty()); + tx.del(ROOT, "xxx")?; + assert!(tx.values(ROOT, "xxx")?.is_empty()); tx.commit(); Ok(()) } @@ -924,12 +929,12 @@ mod tests { fn test_inc() -> Result<(), AutomergeError> { let mut doc = Automerge::new(); let mut tx = doc.transaction(); - tx.set(&ROOT, "counter", Value::counter(10))?; - assert!(tx.value(&ROOT, "counter")?.unwrap().0 == Value::counter(10)); - tx.inc(&ROOT, "counter", 10)?; - assert!(tx.value(&ROOT, "counter")?.unwrap().0 == Value::counter(20)); - tx.inc(&ROOT, "counter", -5)?; - assert!(tx.value(&ROOT, "counter")?.unwrap().0 == Value::counter(15)); + tx.set(ROOT, "counter", Value::counter(10))?; + assert!(tx.value(ROOT, "counter")?.unwrap().0 == Value::counter(10)); + tx.inc(ROOT, "counter", 10)?; + assert!(tx.value(ROOT, "counter")?.unwrap().0 == Value::counter(20)); + tx.inc(ROOT, "counter", -5)?; + assert!(tx.value(ROOT, "counter")?.unwrap().0 == Value::counter(15)); tx.commit(); Ok(()) } @@ -939,19 +944,19 @@ mod tests { let mut doc = Automerge::new(); let mut tx = doc.transaction(); - tx.set(&ROOT, "foo", 1)?; + tx.set(ROOT, "foo", 1)?; tx.commit(); let save1 = doc.save().unwrap(); let mut tx = doc.transaction(); - tx.set(&ROOT, "bar", 2)?; + tx.set(ROOT, "bar", 2)?; tx.commit(); let save2 = doc.save_incremental(); let mut tx = doc.transaction(); - tx.set(&ROOT, "baz", 3)?; + tx.set(ROOT, "baz", 3)?; tx.commit(); let save3 = doc.save_incremental(); @@ -970,7 +975,7 @@ mod tests { let mut doc_a = Automerge::load(&save_a)?; let mut doc_b = Automerge::load(&save_b)?; - assert!(doc_a.values(&ROOT, "baz")? == doc_b.values(&ROOT, "baz")?); + assert!(doc_a.values(ROOT, "baz")? == doc_b.values(ROOT, "baz")?); assert!(doc_a.save().unwrap() == doc_b.save().unwrap()); @@ -981,7 +986,7 @@ mod tests { fn test_save_text() -> Result<(), AutomergeError> { let mut doc = Automerge::new(); let mut tx = doc.transaction(); - let text = tx.set(&ROOT, "text", Value::text())?.unwrap(); + let text = tx.set(ROOT, "text", Value::text())?.unwrap(); tx.commit(); let heads1 = doc.get_heads(); let mut tx = doc.transaction(); @@ -1006,72 +1011,72 @@ mod tests { let mut doc = Automerge::new(); doc.set_actor("aaaa".try_into().unwrap()); let mut tx = doc.transaction(); - tx.set(&ROOT, "prop1", "val1")?; + tx.set(ROOT, "prop1", "val1")?; tx.commit(); doc.get_heads(); let heads1 = doc.get_heads(); let mut tx = doc.transaction(); - tx.set(&ROOT, "prop1", "val2")?; + tx.set(ROOT, "prop1", "val2")?; tx.commit(); doc.get_heads(); let heads2 = doc.get_heads(); let mut tx = doc.transaction(); - tx.set(&ROOT, "prop2", "val3")?; + tx.set(ROOT, "prop2", "val3")?; tx.commit(); doc.get_heads(); let heads3 = doc.get_heads(); let mut tx = doc.transaction(); - tx.del(&ROOT, "prop1")?; + tx.del(ROOT, "prop1")?; tx.commit(); doc.get_heads(); let heads4 = doc.get_heads(); let mut tx = doc.transaction(); - tx.set(&ROOT, "prop3", "val4")?; + tx.set(ROOT, "prop3", "val4")?; tx.commit(); doc.get_heads(); let heads5 = doc.get_heads(); - assert!(doc.keys_at(&ROOT, &heads1).collect_vec() == vec!["prop1".to_owned()]); - assert_eq!(doc.length_at(&ROOT, &heads1), 1); - assert!(doc.value_at(&ROOT, "prop1", &heads1)?.unwrap().0 == Value::str("val1")); - assert!(doc.value_at(&ROOT, "prop2", &heads1)? == None); - assert!(doc.value_at(&ROOT, "prop3", &heads1)? == None); + assert!(doc.keys_at(ROOT, &heads1).collect_vec() == vec!["prop1".to_owned()]); + assert_eq!(doc.length_at(ROOT, &heads1), 1); + assert!(doc.value_at(ROOT, "prop1", &heads1)?.unwrap().0 == Value::str("val1")); + assert!(doc.value_at(ROOT, "prop2", &heads1)? == None); + assert!(doc.value_at(ROOT, "prop3", &heads1)? == None); - assert!(doc.keys_at(&ROOT, &heads2).collect_vec() == vec!["prop1".to_owned()]); - assert_eq!(doc.length_at(&ROOT, &heads2), 1); - assert!(doc.value_at(&ROOT, "prop1", &heads2)?.unwrap().0 == Value::str("val2")); - assert!(doc.value_at(&ROOT, "prop2", &heads2)? == None); - assert!(doc.value_at(&ROOT, "prop3", &heads2)? == None); + assert!(doc.keys_at(ROOT, &heads2).collect_vec() == vec!["prop1".to_owned()]); + assert_eq!(doc.length_at(ROOT, &heads2), 1); + assert!(doc.value_at(ROOT, "prop1", &heads2)?.unwrap().0 == Value::str("val2")); + assert!(doc.value_at(ROOT, "prop2", &heads2)? == None); + assert!(doc.value_at(ROOT, "prop3", &heads2)? == None); assert!( - doc.keys_at(&ROOT, &heads3).collect_vec() + doc.keys_at(ROOT, &heads3).collect_vec() == vec!["prop1".to_owned(), "prop2".to_owned()] ); - assert_eq!(doc.length_at(&ROOT, &heads3), 2); - assert!(doc.value_at(&ROOT, "prop1", &heads3)?.unwrap().0 == Value::str("val2")); - assert!(doc.value_at(&ROOT, "prop2", &heads3)?.unwrap().0 == Value::str("val3")); - assert!(doc.value_at(&ROOT, "prop3", &heads3)? == None); + assert_eq!(doc.length_at(ROOT, &heads3), 2); + assert!(doc.value_at(ROOT, "prop1", &heads3)?.unwrap().0 == Value::str("val2")); + assert!(doc.value_at(ROOT, "prop2", &heads3)?.unwrap().0 == Value::str("val3")); + assert!(doc.value_at(ROOT, "prop3", &heads3)? == None); - assert!(doc.keys_at(&ROOT, &heads4).collect_vec() == vec!["prop2".to_owned()]); - assert_eq!(doc.length_at(&ROOT, &heads4), 1); - assert!(doc.value_at(&ROOT, "prop1", &heads4)? == None); - assert!(doc.value_at(&ROOT, "prop2", &heads4)?.unwrap().0 == Value::str("val3")); - assert!(doc.value_at(&ROOT, "prop3", &heads4)? == None); + assert!(doc.keys_at(ROOT, &heads4).collect_vec() == vec!["prop2".to_owned()]); + assert_eq!(doc.length_at(ROOT, &heads4), 1); + assert!(doc.value_at(ROOT, "prop1", &heads4)? == None); + assert!(doc.value_at(ROOT, "prop2", &heads4)?.unwrap().0 == Value::str("val3")); + assert!(doc.value_at(ROOT, "prop3", &heads4)? == None); assert!( - doc.keys_at(&ROOT, &heads5).collect_vec() + doc.keys_at(ROOT, &heads5).collect_vec() == vec!["prop2".to_owned(), "prop3".to_owned()] ); - assert_eq!(doc.length_at(&ROOT, &heads5), 2); - assert_eq!(doc.length(&ROOT), 2); - assert!(doc.value_at(&ROOT, "prop1", &heads5)? == None); - assert!(doc.value_at(&ROOT, "prop2", &heads5)?.unwrap().0 == Value::str("val3")); - assert!(doc.value_at(&ROOT, "prop3", &heads5)?.unwrap().0 == Value::str("val4")); + assert_eq!(doc.length_at(ROOT, &heads5), 2); + assert_eq!(doc.length(ROOT), 2); + assert!(doc.value_at(ROOT, "prop1", &heads5)? == None); + assert!(doc.value_at(ROOT, "prop2", &heads5)?.unwrap().0 == Value::str("val3")); + assert!(doc.value_at(ROOT, "prop3", &heads5)?.unwrap().0 == Value::str("val4")); - assert_eq!(doc.keys_at(&ROOT, &[]).count(), 0); - assert_eq!(doc.length_at(&ROOT, &[]), 0); - assert!(doc.value_at(&ROOT, "prop1", &[])? == None); - assert!(doc.value_at(&ROOT, "prop2", &[])? == None); - assert!(doc.value_at(&ROOT, "prop3", &[])? == None); + assert_eq!(doc.keys_at(ROOT, &[]).count(), 0); + assert_eq!(doc.length_at(ROOT, &[]), 0); + assert!(doc.value_at(ROOT, "prop1", &[])? == None); + assert!(doc.value_at(ROOT, "prop2", &[])? == None); + assert!(doc.value_at(ROOT, "prop3", &[])? == None); Ok(()) } @@ -1081,7 +1086,7 @@ mod tests { doc.set_actor("aaaa".try_into().unwrap()); let mut tx = doc.transaction(); - let list = tx.set(&ROOT, "list", Value::list())?.unwrap(); + let list = tx.set(ROOT, "list", Value::list())?.unwrap(); tx.commit(); let heads1 = doc.get_heads(); @@ -1144,48 +1149,48 @@ mod tests { fn keys_iter() { let mut doc = Automerge::new(); let mut tx = doc.transaction(); - tx.set(&ROOT, "a", 3).unwrap(); - tx.set(&ROOT, "b", 4).unwrap(); - tx.set(&ROOT, "c", 5).unwrap(); - tx.set(&ROOT, "d", 6).unwrap(); + tx.set(ROOT, "a", 3).unwrap(); + tx.set(ROOT, "b", 4).unwrap(); + tx.set(ROOT, "c", 5).unwrap(); + tx.set(ROOT, "d", 6).unwrap(); tx.commit(); let mut tx = doc.transaction(); - tx.set(&ROOT, "a", 7).unwrap(); + tx.set(ROOT, "a", 7).unwrap(); tx.commit(); let mut tx = doc.transaction(); - tx.set(&ROOT, "a", 8).unwrap(); - tx.set(&ROOT, "d", 9).unwrap(); + tx.set(ROOT, "a", 8).unwrap(); + tx.set(ROOT, "d", 9).unwrap(); tx.commit(); - assert_eq!(doc.keys(&ROOT).count(), 4); + assert_eq!(doc.keys(ROOT).count(), 4); - let mut keys = doc.keys(&ROOT); + let mut keys = doc.keys(ROOT); assert_eq!(keys.next(), Some("a".into())); assert_eq!(keys.next(), Some("b".into())); assert_eq!(keys.next(), Some("c".into())); assert_eq!(keys.next(), Some("d".into())); assert_eq!(keys.next(), None); - let mut keys = doc.keys(&ROOT); + let mut keys = doc.keys(ROOT); assert_eq!(keys.next_back(), Some("d".into())); assert_eq!(keys.next_back(), Some("c".into())); assert_eq!(keys.next_back(), Some("b".into())); assert_eq!(keys.next_back(), Some("a".into())); assert_eq!(keys.next_back(), None); - let mut keys = doc.keys(&ROOT); + let mut keys = doc.keys(ROOT); assert_eq!(keys.next(), Some("a".into())); assert_eq!(keys.next_back(), Some("d".into())); assert_eq!(keys.next_back(), Some("c".into())); assert_eq!(keys.next_back(), Some("b".into())); assert_eq!(keys.next_back(), None); - let mut keys = doc.keys(&ROOT); + let mut keys = doc.keys(ROOT); assert_eq!(keys.next_back(), Some("d".into())); assert_eq!(keys.next(), Some("a".into())); assert_eq!(keys.next(), Some("b".into())); assert_eq!(keys.next(), Some("c".into())); assert_eq!(keys.next(), None); - let keys = doc.keys(&ROOT); + let keys = doc.keys(ROOT); assert_eq!(keys.collect::>(), vec!["a", "b", "c", "d"]); } diff --git a/automerge/src/transaction/inner.rs b/automerge/src/transaction/inner.rs index 91a601c1..6a32178e 100644 --- a/automerge/src/transaction/inner.rs +++ b/automerge/src/transaction/inner.rs @@ -324,7 +324,7 @@ mod tests { let mut doc = Automerge::new(); let mut tx = doc.transaction(); - let a = tx.set(&ROOT, "a", Value::map()).unwrap().unwrap(); + let a = tx.set(ROOT, "a", Value::map()).unwrap().unwrap(); tx.set(&a, "b", 1).unwrap(); assert!(tx.value(&a, "b").unwrap().is_some()); } diff --git a/automerge/src/transaction/manual_transaction.rs b/automerge/src/transaction/manual_transaction.rs index 93dcf615..2993d96f 100644 --- a/automerge/src/transaction/manual_transaction.rs +++ b/automerge/src/transaction/manual_transaction.rs @@ -1,3 +1,5 @@ +use std::borrow::Borrow; + use crate::exid::ExId; use crate::{Automerge, ChangeHash, KeysAt, Prop, Value}; use crate::{AutomergeError, Keys}; @@ -47,7 +49,7 @@ impl<'a> Transaction<'a> { /// # use std::time::SystemTime; /// let mut doc = Automerge::new(); /// let mut tx = doc.transaction(); - /// tx.set(&ROOT, "todos", Value::list()).unwrap(); + /// tx.set(ROOT, "todos", Value::list()).unwrap(); /// let now = SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap().as_secs() as /// i64; /// tx.commit_with(CommitOptions::default().with_message("Create todos list").with_time(now)); @@ -85,45 +87,58 @@ impl<'a> Transactable for Transaction<'a> { /// - The object does not exist /// - The key is the wrong type for the object /// - The key does not exist in the object - fn set, V: Into>( + fn set, V: Into, O: Borrow>( &mut self, - obj: &ExId, + obj: O, prop: P, value: V, ) -> Result, AutomergeError> { - self.inner.as_mut().unwrap().set(self.doc, obj, prop, value) + self.inner + .as_mut() + .unwrap() + .set(self.doc, obj.borrow(), prop, value) } - fn insert>( + fn insert, O: Borrow>( &mut self, - obj: &ExId, + obj: O, index: usize, value: V, ) -> Result, AutomergeError> { self.inner .as_mut() .unwrap() - .insert(self.doc, obj, index, value) + .insert(self.doc, obj.borrow(), index, value) } - fn inc>( + fn inc, O: Borrow>( &mut self, - obj: &ExId, + obj: O, prop: P, value: i64, ) -> Result<(), AutomergeError> { - self.inner.as_mut().unwrap().inc(self.doc, obj, prop, value) + self.inner + .as_mut() + .unwrap() + .inc(self.doc, obj.borrow(), prop, value) } - fn del>(&mut self, obj: &ExId, prop: P) -> Result<(), AutomergeError> { - self.inner.as_mut().unwrap().del(self.doc, obj, prop) + fn del, O: Borrow>( + &mut self, + obj: O, + prop: P, + ) -> Result<(), AutomergeError> { + self.inner + .as_mut() + .unwrap() + .del(self.doc, obj.borrow(), prop) } /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert /// the new elements - fn splice( + fn splice>( &mut self, - obj: &ExId, + obj: O, pos: usize, del: usize, vals: Vec, @@ -131,61 +146,65 @@ impl<'a> Transactable for Transaction<'a> { self.inner .as_mut() .unwrap() - .splice(self.doc, obj, pos, del, vals) + .splice(self.doc, obj.borrow(), pos, del, vals) } - fn keys(&self, obj: &ExId) -> Keys { + fn keys>(&self, obj: O) -> Keys { self.doc.keys(obj) } - fn keys_at(&self, obj: &ExId, heads: &[ChangeHash]) -> KeysAt { + fn keys_at>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt { self.doc.keys_at(obj, heads) } - fn length(&self, obj: &ExId) -> usize { + fn length>(&self, obj: O) -> usize { self.doc.length(obj) } - fn length_at(&self, obj: &ExId, heads: &[ChangeHash]) -> usize { + fn length_at>(&self, obj: O, heads: &[ChangeHash]) -> usize { self.doc.length_at(obj, heads) } - fn text(&self, obj: &ExId) -> Result { + fn text>(&self, obj: O) -> Result { self.doc.text(obj) } - fn text_at(&self, obj: &ExId, heads: &[ChangeHash]) -> Result { + fn text_at>( + &self, + obj: O, + heads: &[ChangeHash], + ) -> Result { self.doc.text_at(obj, heads) } - fn value>( + fn value, O: Borrow>( &self, - obj: &ExId, + obj: O, prop: P, ) -> Result, AutomergeError> { self.doc.value(obj, prop) } - fn value_at>( + fn value_at, O: Borrow>( &self, - obj: &ExId, + obj: O, prop: P, heads: &[ChangeHash], ) -> Result, AutomergeError> { self.doc.value_at(obj, prop, heads) } - fn values>( + fn values, O: Borrow>( &self, - obj: &ExId, + obj: O, prop: P, ) -> Result, AutomergeError> { self.doc.values(obj, prop) } - fn values_at>( + fn values_at, O: Borrow>( &self, - obj: &ExId, + obj: O, prop: P, heads: &[ChangeHash], ) -> Result, AutomergeError> { diff --git a/automerge/src/transaction/transactable.rs b/automerge/src/transaction/transactable.rs index 93d65cd1..455f5444 100644 --- a/automerge/src/transaction/transactable.rs +++ b/automerge/src/transaction/transactable.rs @@ -1,3 +1,5 @@ +use std::borrow::Borrow; + use crate::exid::ExId; use crate::{AutomergeError, ChangeHash, Keys, KeysAt, Prop, Value}; use unicode_segmentation::UnicodeSegmentation; @@ -20,42 +22,50 @@ pub trait Transactable { /// - The object does not exist /// - The key is the wrong type for the object /// - The key does not exist in the object - fn set, V: Into>( + fn set, V: Into, O: Borrow>( &mut self, - obj: &ExId, + obj: O, prop: P, value: V, ) -> Result, AutomergeError>; /// Insert a value into a list at the given index. - fn insert>( + fn insert, O: Borrow>( &mut self, - obj: &ExId, + obj: O, index: usize, value: V, ) -> Result, AutomergeError>; /// Increment the counter at the prop in the object by `value`. - fn inc>(&mut self, obj: &ExId, prop: P, value: i64) - -> Result<(), AutomergeError>; + fn inc, O: Borrow>( + &mut self, + obj: O, + prop: P, + value: i64, + ) -> Result<(), AutomergeError>; /// Delete the value at prop in the object. - fn del>(&mut self, obj: &ExId, prop: P) -> Result<(), AutomergeError>; + fn del, O: Borrow>( + &mut self, + obj: O, + prop: P, + ) -> Result<(), AutomergeError>; /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert /// the new elements. - fn splice( + fn splice>( &mut self, - obj: &ExId, + obj: O, pos: usize, del: usize, vals: Vec, ) -> Result, AutomergeError>; /// Like [`Self::splice`] but for text. - fn splice_text( + fn splice_text>( &mut self, - obj: &ExId, + obj: O, pos: usize, del: usize, text: &str, @@ -68,47 +78,51 @@ pub trait Transactable { } /// Get the keys of the given object, it should be a map. - fn keys(&self, obj: &ExId) -> Keys; + fn keys>(&self, obj: O) -> Keys; /// Get the keys of the given object at a point in history. - fn keys_at(&self, obj: &ExId, heads: &[ChangeHash]) -> KeysAt; + fn keys_at>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt; /// Get the length of the given object. - fn length(&self, obj: &ExId) -> usize; + fn length>(&self, obj: O) -> usize; /// Get the length of the given object at a point in history. - fn length_at(&self, obj: &ExId, heads: &[ChangeHash]) -> usize; + fn length_at>(&self, obj: O, heads: &[ChangeHash]) -> usize; /// Get the string that this text object represents. - fn text(&self, obj: &ExId) -> Result; + fn text>(&self, obj: O) -> Result; /// Get the string that this text object represents at a point in history. - fn text_at(&self, obj: &ExId, heads: &[ChangeHash]) -> Result; + fn text_at>( + &self, + obj: O, + heads: &[ChangeHash], + ) -> Result; /// Get the value at this prop in the object. - fn value>( + fn value, O: Borrow>( &self, - obj: &ExId, + obj: O, prop: P, ) -> Result, AutomergeError>; /// Get the value at this prop in the object at a point in history. - fn value_at>( + fn value_at, O: Borrow>( &self, - obj: &ExId, + obj: O, prop: P, heads: &[ChangeHash], ) -> Result, AutomergeError>; - fn values>( + fn values, O: Borrow>( &self, - obj: &ExId, + obj: O, prop: P, ) -> Result, AutomergeError>; - fn values_at>( + fn values_at, O: Borrow>( &self, - obj: &ExId, + obj: O, prop: P, heads: &[ChangeHash], ) -> Result, AutomergeError>; diff --git a/automerge/tests/test.rs b/automerge/tests/test.rs index 31e1353e..ab15f949 100644 --- a/automerge/tests/test.rs +++ b/automerge/tests/test.rs @@ -897,7 +897,7 @@ fn list_counter_del() -> Result<(), automerge::AutomergeError> { let mut doc1 = new_doc_with_actor(actor1); - let list = doc1.set(&ROOT, "list", Value::list())?.unwrap(); + let list = doc1.set(ROOT, "list", Value::list())?.unwrap(); doc1.insert(&list, 0, "a")?; doc1.insert(&list, 1, "b")?; doc1.insert(&list, 2, "c")?; diff --git a/edit-trace/benches/main.rs b/edit-trace/benches/main.rs index af7a85ef..a5e85791 100644 --- a/edit-trace/benches/main.rs +++ b/edit-trace/benches/main.rs @@ -5,7 +5,7 @@ use std::fs; fn replay_trace_tx(commands: Vec<(usize, usize, Vec)>) -> Automerge { let mut doc = Automerge::new(); let mut tx = doc.transaction(); - let text = tx.set(&ROOT, "text", Value::text()).unwrap().unwrap(); + let text = tx.set(ROOT, "text", Value::text()).unwrap().unwrap(); for (pos, del, vals) in commands { tx.splice(&text, pos, del, vals).unwrap(); } @@ -15,7 +15,7 @@ fn replay_trace_tx(commands: Vec<(usize, usize, Vec)>) -> Automerge { fn replay_trace_autotx(commands: Vec<(usize, usize, Vec)>) -> AutoCommit { let mut doc = AutoCommit::new(); - let text = doc.set(&ROOT, "text", Value::text()).unwrap().unwrap(); + let text = doc.set(ROOT, "text", Value::text()).unwrap().unwrap(); for (pos, del, vals) in commands { doc.splice(&text, pos, del, vals).unwrap(); } diff --git a/edit-trace/src/main.rs b/edit-trace/src/main.rs index 62b0356b..98d6198e 100644 --- a/edit-trace/src/main.rs +++ b/edit-trace/src/main.rs @@ -20,7 +20,7 @@ fn main() -> Result<(), AutomergeError> { let now = Instant::now(); let mut tx = doc.transaction(); - let text = tx.set(&ROOT, "text", Value::text()).unwrap().unwrap(); + let text = tx.set(ROOT, "text", Value::text()).unwrap().unwrap(); for (i, (pos, del, vals)) in commands.into_iter().enumerate() { if i % 1000 == 0 { println!("Processed {} edits in {} ms", i, now.elapsed().as_millis()); From b323f988f9f41fe700b920560f6bc787f067f5b2 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 3 Mar 2022 10:28:40 +0000 Subject: [PATCH 124/730] Add test for overwriting a map and getting value from old one --- automerge/src/automerge.rs | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 6fa040fb..d3668589 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -1201,4 +1201,29 @@ mod tests { let new_bytes = doc.save().unwrap(); assert_eq!(bytes, new_bytes); } + + #[test] + fn overwrite_map() { + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + // create a map + let map1 = tx.set(&ROOT, "a", Value::map()).unwrap().unwrap(); + tx.set(&map1, "b", 1).unwrap(); + // overwrite the first map with a new one + let map2 = tx.set(&ROOT, "a", Value::map()).unwrap().unwrap(); + tx.set(&map2, "c", 2).unwrap(); + tx.commit(); + + // we can get the new map by traversing the tree + let map = doc.value(&ROOT, "a").unwrap().unwrap().1; + assert_eq!(doc.value(&map, "b").unwrap(), None); + // and get values from it + assert_eq!( + doc.value(&map, "c").unwrap().map(|s| s.0), + Some(ScalarValue::Int(2).into()) + ); + + // but we can still access the old one if we know the ID! + assert_eq!(doc.value(&map1, "b").unwrap(), None); + } } From 51f1c05545f065753ee216ea96780fb3759aeb40 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 3 Mar 2022 10:36:10 +0000 Subject: [PATCH 125/730] Add mutation of old object --- automerge/src/automerge.rs | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index d3668589..ebe9cc2d 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -1224,6 +1224,14 @@ mod tests { ); // but we can still access the old one if we know the ID! - assert_eq!(doc.value(&map1, "b").unwrap(), None); + assert_eq!(doc.value(&map1, "b").unwrap().unwrap().0, Value::int(1)); + // and even set new things in it! + let mut tx = doc.transaction(); + + // This should panic as we are modifying an old object + tx.set(&map1, "c", 3).unwrap(); + tx.commit(); + + assert_eq!(doc.value(&map1, "c").unwrap(), None); } } From 9ae988e75409ccc376958d8f5969ccd730fee9a0 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 3 Mar 2022 14:37:24 +0000 Subject: [PATCH 126/730] Use as_ref instead of borrow --- automerge/src/autocommit.rs | 42 +++++++++---------- automerge/src/automerge.rs | 37 ++++++++-------- automerge/src/exid.rs | 6 +++ .../src/transaction/manual_transaction.rs | 42 +++++++++---------- automerge/src/transaction/transactable.rs | 39 ++++++++--------- 5 files changed, 81 insertions(+), 85 deletions(-) diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index 57c182ad..66f928f5 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -1,5 +1,3 @@ -use std::borrow::Borrow; - use crate::exid::ExId; use crate::transaction::{CommitOptions, Transactable}; use crate::types::Patch; @@ -289,19 +287,19 @@ impl Transactable for AutoCommit { // PropAt::() // NthAt::() - fn keys>(&self, obj: O) -> Keys { + fn keys>(&self, obj: O) -> Keys { self.doc.keys(obj) } - fn keys_at>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt { + fn keys_at>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt { self.doc.keys_at(obj, heads) } - fn length>(&self, obj: O) -> usize { + fn length>(&self, obj: O) -> usize { self.doc.length(obj) } - fn length_at>(&self, obj: O, heads: &[ChangeHash]) -> usize { + fn length_at>(&self, obj: O, heads: &[ChangeHash]) -> usize { self.doc.length_at(obj, heads) } @@ -323,7 +321,7 @@ impl Transactable for AutoCommit { /// - The object does not exist /// - The key is the wrong type for the object /// - The key does not exist in the object - fn set, V: Into, O: Borrow>( + fn set, V: Into, O: AsRef>( &mut self, obj: O, prop: P, @@ -331,10 +329,10 @@ impl Transactable for AutoCommit { ) -> Result, AutomergeError> { self.ensure_transaction_open(); let tx = self.transaction.as_mut().unwrap(); - tx.set(&mut self.doc, obj.borrow(), prop, value) + tx.set(&mut self.doc, obj.as_ref(), prop, value) } - fn insert, O: Borrow>( + fn insert, O: AsRef>( &mut self, obj: O, index: usize, @@ -342,10 +340,10 @@ impl Transactable for AutoCommit { ) -> Result, AutomergeError> { self.ensure_transaction_open(); let tx = self.transaction.as_mut().unwrap(); - tx.insert(&mut self.doc, obj.borrow(), index, value) + tx.insert(&mut self.doc, obj.as_ref(), index, value) } - fn inc, O: Borrow>( + fn inc, O: AsRef>( &mut self, obj: O, prop: P, @@ -353,22 +351,22 @@ impl Transactable for AutoCommit { ) -> Result<(), AutomergeError> { self.ensure_transaction_open(); let tx = self.transaction.as_mut().unwrap(); - tx.inc(&mut self.doc, obj.borrow(), prop, value) + tx.inc(&mut self.doc, obj.as_ref(), prop, value) } - fn del, O: Borrow>( + fn del, O: AsRef>( &mut self, obj: O, prop: P, ) -> Result<(), AutomergeError> { self.ensure_transaction_open(); let tx = self.transaction.as_mut().unwrap(); - tx.del(&mut self.doc, obj.borrow(), prop) + tx.del(&mut self.doc, obj.as_ref(), prop) } /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert /// the new elements - fn splice>( + fn splice>( &mut self, obj: O, pos: usize, @@ -377,14 +375,14 @@ impl Transactable for AutoCommit { ) -> Result, AutomergeError> { self.ensure_transaction_open(); let tx = self.transaction.as_mut().unwrap(); - tx.splice(&mut self.doc, obj.borrow(), pos, del, vals) + tx.splice(&mut self.doc, obj.as_ref(), pos, del, vals) } - fn text>(&self, obj: O) -> Result { + fn text>(&self, obj: O) -> Result { self.doc.text(obj) } - fn text_at>( + fn text_at>( &self, obj: O, heads: &[ChangeHash], @@ -395,7 +393,7 @@ impl Transactable for AutoCommit { // TODO - I need to return these OpId's here **only** to get // the legacy conflicts format of { [opid]: value } // Something better? - fn value, O: Borrow>( + fn value, O: AsRef>( &self, obj: O, prop: P, @@ -403,7 +401,7 @@ impl Transactable for AutoCommit { self.doc.value(obj, prop) } - fn value_at, O: Borrow>( + fn value_at, O: AsRef>( &self, obj: O, prop: P, @@ -412,7 +410,7 @@ impl Transactable for AutoCommit { self.doc.value_at(obj, prop, heads) } - fn values, O: Borrow>( + fn values, O: AsRef>( &self, obj: O, prop: P, @@ -420,7 +418,7 @@ impl Transactable for AutoCommit { self.doc.values(obj, prop) } - fn values_at, O: Borrow>( + fn values_at, O: AsRef>( &self, obj: O, prop: P, diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index cc60b2b0..ee89658b 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -1,4 +1,3 @@ -use std::borrow::Borrow; use std::collections::{HashMap, HashSet, VecDeque}; use crate::change::encode_document; @@ -178,8 +177,8 @@ impl Automerge { /// /// For a map this returns the keys of the map. /// For a list this returns the element ids (opids) encoded as strings. - pub fn keys>(&self, obj: O) -> Keys { - if let Ok(obj) = self.exid_to_obj(obj.borrow()) { + pub fn keys>(&self, obj: O) -> Keys { + if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { let iter_keys = self.ops.keys(obj); Keys::new(self, iter_keys) } else { @@ -188,8 +187,8 @@ impl Automerge { } /// Historical version of [`keys`](Self::keys). - pub fn keys_at>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt { - if let Ok(obj) = self.exid_to_obj(obj.borrow()) { + pub fn keys_at>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt { + if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { let clock = self.clock_at(heads); KeysAt::new(self, self.ops.keys_at(obj, clock)) } else { @@ -197,8 +196,8 @@ impl Automerge { } } - pub fn length>(&self, obj: O) -> usize { - if let Ok(inner_obj) = self.exid_to_obj(obj.borrow()) { + pub fn length>(&self, obj: O) -> usize { + if let Ok(inner_obj) = self.exid_to_obj(obj.as_ref()) { match self.ops.object_type(&inner_obj) { Some(ObjType::Map) | Some(ObjType::Table) => self.keys(obj).count(), Some(ObjType::List) | Some(ObjType::Text) => { @@ -211,8 +210,8 @@ impl Automerge { } } - pub fn length_at>(&self, obj: O, heads: &[ChangeHash]) -> usize { - if let Ok(inner_obj) = self.exid_to_obj(obj.borrow()) { + pub fn length_at>(&self, obj: O, heads: &[ChangeHash]) -> usize { + if let Ok(inner_obj) = self.exid_to_obj(obj.as_ref()) { let clock = self.clock_at(heads); match self.ops.object_type(&inner_obj) { Some(ObjType::Map) | Some(ObjType::Table) => self.keys_at(obj, heads).count(), @@ -252,8 +251,8 @@ impl Automerge { ExId::Id(id.0, self.ops.m.actors.cache[id.1].clone(), id.1) } - pub fn text>(&self, obj: O) -> Result { - let obj = self.exid_to_obj(obj.borrow())?; + pub fn text>(&self, obj: O) -> Result { + let obj = self.exid_to_obj(obj.as_ref())?; let query = self.ops.search(obj, query::ListVals::new()); let mut buffer = String::new(); for q in &query.ops { @@ -264,12 +263,12 @@ impl Automerge { Ok(buffer) } - pub fn text_at>( + pub fn text_at>( &self, obj: O, heads: &[ChangeHash], ) -> Result { - let obj = self.exid_to_obj(obj.borrow())?; + let obj = self.exid_to_obj(obj.as_ref())?; let clock = self.clock_at(heads); let query = self.ops.search(obj, query::ListValsAt::new(clock)); let mut buffer = String::new(); @@ -284,7 +283,7 @@ impl Automerge { // TODO - I need to return these OpId's here **only** to get // the legacy conflicts format of { [opid]: value } // Something better? - pub fn value, O: Borrow>( + pub fn value, O: AsRef>( &self, obj: O, prop: P, @@ -292,7 +291,7 @@ impl Automerge { Ok(self.values(obj, prop.into())?.last().cloned()) } - pub fn value_at, O: Borrow>( + pub fn value_at, O: AsRef>( &self, obj: O, prop: P, @@ -301,12 +300,12 @@ impl Automerge { Ok(self.values_at(obj, prop, heads)?.last().cloned()) } - pub fn values, O: Borrow>( + pub fn values, O: AsRef>( &self, obj: O, prop: P, ) -> Result, AutomergeError> { - let obj = self.exid_to_obj(obj.borrow())?; + let obj = self.exid_to_obj(obj.as_ref())?; let result = match prop.into() { Prop::Map(p) => { let prop = self.ops.m.props.lookup(&p); @@ -332,14 +331,14 @@ impl Automerge { Ok(result) } - pub fn values_at, O: Borrow>( + pub fn values_at, O: AsRef>( &self, obj: O, prop: P, heads: &[ChangeHash], ) -> Result, AutomergeError> { let prop = prop.into(); - let obj = self.exid_to_obj(obj.borrow())?; + let obj = self.exid_to_obj(obj.as_ref())?; let clock = self.clock_at(heads); let result = match prop { Prop::Map(p) => { diff --git a/automerge/src/exid.rs b/automerge/src/exid.rs index a3eb6087..c2cee4d8 100644 --- a/automerge/src/exid.rs +++ b/automerge/src/exid.rs @@ -74,3 +74,9 @@ impl Serialize for ExId { serializer.serialize_str(self.to_string().as_str()) } } + +impl AsRef for ExId { + fn as_ref(&self) -> &ExId { + &self + } +} diff --git a/automerge/src/transaction/manual_transaction.rs b/automerge/src/transaction/manual_transaction.rs index 2993d96f..44e5640d 100644 --- a/automerge/src/transaction/manual_transaction.rs +++ b/automerge/src/transaction/manual_transaction.rs @@ -1,5 +1,3 @@ -use std::borrow::Borrow; - use crate::exid::ExId; use crate::{Automerge, ChangeHash, KeysAt, Prop, Value}; use crate::{AutomergeError, Keys}; @@ -87,7 +85,7 @@ impl<'a> Transactable for Transaction<'a> { /// - The object does not exist /// - The key is the wrong type for the object /// - The key does not exist in the object - fn set, V: Into, O: Borrow>( + fn set, V: Into, O: AsRef>( &mut self, obj: O, prop: P, @@ -96,10 +94,10 @@ impl<'a> Transactable for Transaction<'a> { self.inner .as_mut() .unwrap() - .set(self.doc, obj.borrow(), prop, value) + .set(self.doc, obj.as_ref(), prop, value) } - fn insert, O: Borrow>( + fn insert, O: AsRef>( &mut self, obj: O, index: usize, @@ -108,10 +106,10 @@ impl<'a> Transactable for Transaction<'a> { self.inner .as_mut() .unwrap() - .insert(self.doc, obj.borrow(), index, value) + .insert(self.doc, obj.as_ref(), index, value) } - fn inc, O: Borrow>( + fn inc, O: AsRef>( &mut self, obj: O, prop: P, @@ -120,10 +118,10 @@ impl<'a> Transactable for Transaction<'a> { self.inner .as_mut() .unwrap() - .inc(self.doc, obj.borrow(), prop, value) + .inc(self.doc, obj.as_ref(), prop, value) } - fn del, O: Borrow>( + fn del, O: AsRef>( &mut self, obj: O, prop: P, @@ -131,12 +129,12 @@ impl<'a> Transactable for Transaction<'a> { self.inner .as_mut() .unwrap() - .del(self.doc, obj.borrow(), prop) + .del(self.doc, obj.as_ref(), prop) } /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert /// the new elements - fn splice>( + fn splice>( &mut self, obj: O, pos: usize, @@ -146,30 +144,30 @@ impl<'a> Transactable for Transaction<'a> { self.inner .as_mut() .unwrap() - .splice(self.doc, obj.borrow(), pos, del, vals) + .splice(self.doc, obj.as_ref(), pos, del, vals) } - fn keys>(&self, obj: O) -> Keys { + fn keys>(&self, obj: O) -> Keys { self.doc.keys(obj) } - fn keys_at>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt { + fn keys_at>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt { self.doc.keys_at(obj, heads) } - fn length>(&self, obj: O) -> usize { + fn length>(&self, obj: O) -> usize { self.doc.length(obj) } - fn length_at>(&self, obj: O, heads: &[ChangeHash]) -> usize { + fn length_at>(&self, obj: O, heads: &[ChangeHash]) -> usize { self.doc.length_at(obj, heads) } - fn text>(&self, obj: O) -> Result { + fn text>(&self, obj: O) -> Result { self.doc.text(obj) } - fn text_at>( + fn text_at>( &self, obj: O, heads: &[ChangeHash], @@ -177,7 +175,7 @@ impl<'a> Transactable for Transaction<'a> { self.doc.text_at(obj, heads) } - fn value, O: Borrow>( + fn value, O: AsRef>( &self, obj: O, prop: P, @@ -185,7 +183,7 @@ impl<'a> Transactable for Transaction<'a> { self.doc.value(obj, prop) } - fn value_at, O: Borrow>( + fn value_at, O: AsRef>( &self, obj: O, prop: P, @@ -194,7 +192,7 @@ impl<'a> Transactable for Transaction<'a> { self.doc.value_at(obj, prop, heads) } - fn values, O: Borrow>( + fn values, O: AsRef>( &self, obj: O, prop: P, @@ -202,7 +200,7 @@ impl<'a> Transactable for Transaction<'a> { self.doc.values(obj, prop) } - fn values_at, O: Borrow>( + fn values_at, O: AsRef>( &self, obj: O, prop: P, diff --git a/automerge/src/transaction/transactable.rs b/automerge/src/transaction/transactable.rs index 455f5444..d3f40621 100644 --- a/automerge/src/transaction/transactable.rs +++ b/automerge/src/transaction/transactable.rs @@ -1,5 +1,3 @@ -use std::borrow::Borrow; - use crate::exid::ExId; use crate::{AutomergeError, ChangeHash, Keys, KeysAt, Prop, Value}; use unicode_segmentation::UnicodeSegmentation; @@ -22,7 +20,7 @@ pub trait Transactable { /// - The object does not exist /// - The key is the wrong type for the object /// - The key does not exist in the object - fn set, V: Into, O: Borrow>( + fn set, V: Into, O: AsRef>( &mut self, obj: O, prop: P, @@ -30,7 +28,7 @@ pub trait Transactable { ) -> Result, AutomergeError>; /// Insert a value into a list at the given index. - fn insert, O: Borrow>( + fn insert, O: AsRef>( &mut self, obj: O, index: usize, @@ -38,7 +36,7 @@ pub trait Transactable { ) -> Result, AutomergeError>; /// Increment the counter at the prop in the object by `value`. - fn inc, O: Borrow>( + fn inc, O: AsRef>( &mut self, obj: O, prop: P, @@ -46,15 +44,12 @@ pub trait Transactable { ) -> Result<(), AutomergeError>; /// Delete the value at prop in the object. - fn del, O: Borrow>( - &mut self, - obj: O, - prop: P, - ) -> Result<(), AutomergeError>; + fn del, O: AsRef>(&mut self, obj: O, prop: P) + -> Result<(), AutomergeError>; /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert /// the new elements. - fn splice>( + fn splice>( &mut self, obj: O, pos: usize, @@ -63,7 +58,7 @@ pub trait Transactable { ) -> Result, AutomergeError>; /// Like [`Self::splice`] but for text. - fn splice_text>( + fn splice_text>( &mut self, obj: O, pos: usize, @@ -78,49 +73,49 @@ pub trait Transactable { } /// Get the keys of the given object, it should be a map. - fn keys>(&self, obj: O) -> Keys; + fn keys>(&self, obj: O) -> Keys; /// Get the keys of the given object at a point in history. - fn keys_at>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt; + fn keys_at>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt; /// Get the length of the given object. - fn length>(&self, obj: O) -> usize; + fn length>(&self, obj: O) -> usize; /// Get the length of the given object at a point in history. - fn length_at>(&self, obj: O, heads: &[ChangeHash]) -> usize; + fn length_at>(&self, obj: O, heads: &[ChangeHash]) -> usize; /// Get the string that this text object represents. - fn text>(&self, obj: O) -> Result; + fn text>(&self, obj: O) -> Result; /// Get the string that this text object represents at a point in history. - fn text_at>( + fn text_at>( &self, obj: O, heads: &[ChangeHash], ) -> Result; /// Get the value at this prop in the object. - fn value, O: Borrow>( + fn value, O: AsRef>( &self, obj: O, prop: P, ) -> Result, AutomergeError>; /// Get the value at this prop in the object at a point in history. - fn value_at, O: Borrow>( + fn value_at, O: AsRef>( &self, obj: O, prop: P, heads: &[ChangeHash], ) -> Result, AutomergeError>; - fn values, O: Borrow>( + fn values, O: AsRef>( &self, obj: O, prop: P, ) -> Result, AutomergeError>; - fn values_at, O: Borrow>( + fn values_at, O: AsRef>( &self, obj: O, prop: P, From 7cf9faf7da5760756711fafd7071091da5ff60a8 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 3 Mar 2022 14:40:35 +0000 Subject: [PATCH 127/730] Fix overwriting maps test --- automerge/src/automerge.rs | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index ebe9cc2d..478eed7a 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -1203,7 +1203,7 @@ mod tests { } #[test] - fn overwrite_map() { + fn mutate_old_objects() { let mut doc = Automerge::new(); let mut tx = doc.transaction(); // create a map @@ -1227,11 +1227,9 @@ mod tests { assert_eq!(doc.value(&map1, "b").unwrap().unwrap().0, Value::int(1)); // and even set new things in it! let mut tx = doc.transaction(); - - // This should panic as we are modifying an old object tx.set(&map1, "c", 3).unwrap(); tx.commit(); - assert_eq!(doc.value(&map1, "c").unwrap(), None); + assert_eq!(doc.value(&map1, "c").unwrap().unwrap().0, Value::int(3)); } } From 76ff910e060a7eb960c3f470ac89da07fca86fc0 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Thu, 3 Mar 2022 11:09:26 -0500 Subject: [PATCH 128/730] update license deny.yaml --- automerge-cli/Cargo.lock | 857 +++++++++++++++++++++++++++++++++++++++ automerge-cli/Cargo.toml | 12 +- deny.toml | 10 +- 3 files changed, 869 insertions(+), 10 deletions(-) create mode 100644 automerge-cli/Cargo.lock diff --git a/automerge-cli/Cargo.lock b/automerge-cli/Cargo.lock new file mode 100644 index 00000000..a330ee89 --- /dev/null +++ b/automerge-cli/Cargo.lock @@ -0,0 +1,857 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "adler" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" + +[[package]] +name = "ansi_term" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2" +dependencies = [ + "winapi", +] + +[[package]] +name = "anyhow" +version = "1.0.55" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "159bb86af3a200e19a068f4224eae4c8bb2d0fa054c7e5d1cacd5cef95e684cd" + +[[package]] +name = "atty" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" +dependencies = [ + "hermit-abi", + "libc", + "winapi", +] + +[[package]] +name = "autocfg" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" + +[[package]] +name = "automerge" +version = "0.1.0" +dependencies = [ + "flate2", + "fxhash", + "hex", + "itertools", + "js-sys", + "leb128", + "nonzero_ext", + "rand", + "serde", + "sha2", + "smol_str", + "thiserror", + "tinyvec", + "tracing", + "unicode-segmentation", + "uuid", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "automerge-cli" +version = "0.1.0" +dependencies = [ + "anyhow", + "atty", + "automerge", + "clap", + "colored_json", + "combine", + "duct", + "maplit", + "serde_json", + "thiserror", + "tracing-subscriber", +] + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "block-buffer" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bf7fe51849ea569fd452f37822f606a5cabb684dc918707a0193fd4664ff324" +dependencies = [ + "generic-array", +] + +[[package]] +name = "bumpalo" +version = "3.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4a45a46ab1f2412e53d3a0ade76ffad2025804294569aae387231a0cd6e0899" + +[[package]] +name = "byteorder" +version = "1.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" + +[[package]] +name = "bytes" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4872d67bab6358e59559027aa3b9157c53d9358c51423c17554809a8858e0f8" + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "clap" +version = "3.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ced1892c55c910c1219e98d6fc8d71f6bddba7905866ce740066d8bfea859312" +dependencies = [ + "atty", + "bitflags", + "clap_derive", + "indexmap", + "lazy_static", + "os_str_bytes", + "strsim", + "termcolor", + "textwrap", +] + +[[package]] +name = "clap_derive" +version = "3.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da95d038ede1a964ce99f49cbe27a7fb538d1da595e4b4f70b8c8f338d17bf16" +dependencies = [ + "heck", + "proc-macro-error", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "colored_json" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd32eb54d016e203b7c2600e3a7802c75843a92e38ccc4869aefeca21771a64" +dependencies = [ + "ansi_term", + "atty", + "libc", + "serde", + "serde_json", +] + +[[package]] +name = "combine" +version = "4.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50b727aacc797f9fc28e355d21f34709ac4fc9adecfe470ad07b8f4464f53062" +dependencies = [ + "bytes", + "memchr", +] + +[[package]] +name = "cpufeatures" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95059428f66df56b63431fdb4e1947ed2190586af5c5a8a8b71122bdf5a7f469" +dependencies = [ + "libc", +] + +[[package]] +name = "crc32fast" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "crypto-common" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57952ca27b5e3606ff4dd79b0020231aaf9d6aa76dc05fd30137538c50bd3ce8" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "digest" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2fb860ca6fafa5552fb6d0e816a69c8e49f0908bf524e30a90d97c85892d506" +dependencies = [ + "block-buffer", + "crypto-common", +] + +[[package]] +name = "duct" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fc6a0a59ed0888e0041cf708e66357b7ae1a82f1c67247e1f93b5e0818f7d8d" +dependencies = [ + "libc", + "once_cell", + "os_pipe", + "shared_child", +] + +[[package]] +name = "either" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457" + +[[package]] +name = "flate2" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e6988e897c1c9c485f43b47a529cef42fde0547f9d8d41a7062518f1d8fc53f" +dependencies = [ + "cfg-if", + "crc32fast", + "libc", + "miniz_oxide", +] + +[[package]] +name = "fxhash" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c" +dependencies = [ + "byteorder", +] + +[[package]] +name = "generic-array" +version = "0.14.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd48d33ec7f05fbfa152300fdad764757cbded343c1aa1cff2fbaf4134851803" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "getrandom" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d39cd93900197114fa1fcb7ae84ca742095eed9442088988ae74fa744e930e77" +dependencies = [ + "cfg-if", + "js-sys", + "libc", + "wasi", + "wasm-bindgen", +] + +[[package]] +name = "hashbrown" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e" + +[[package]] +name = "heck" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2540771e65fc8cb83cd6e8a237f70c319bd5c29f78ed1084ba5d50eeac86f7f9" + +[[package]] +name = "hermit-abi" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" +dependencies = [ + "libc", +] + +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + +[[package]] +name = "indexmap" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282a6247722caba404c065016bbfa522806e51714c34f5dfc3e4a3a46fcb4223" +dependencies = [ + "autocfg", + "hashbrown", +] + +[[package]] +name = "itertools" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9a9d19fa1e79b6215ff29b9d6880b706147f16e9b1dbb1e4e5947b5b02bc5e3" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1aab8fc367588b89dcee83ab0fd66b72b50b72fa1904d7095045ace2b0c81c35" + +[[package]] +name = "js-sys" +version = "0.3.56" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a38fc24e30fd564ce974c02bf1d337caddff65be6cc4735a1f7eab22a7440f04" +dependencies = [ + "wasm-bindgen", +] + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" + +[[package]] +name = "leb128" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67" + +[[package]] +name = "libc" +version = "0.2.119" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bf2e165bb3457c8e098ea76f3e3bc9db55f87aa90d52d0e6be741470916aaa4" + +[[package]] +name = "log" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51b9bbe6c47d51fc3e1a9b945965946b4c44142ab8792c50835a980d362c2710" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "maplit" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d" + +[[package]] +name = "memchr" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a" + +[[package]] +name = "miniz_oxide" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a92518e98c078586bc6c934028adcca4c92a53d6a958196de835170a01d84e4b" +dependencies = [ + "adler", + "autocfg", +] + +[[package]] +name = "nonzero_ext" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44a1290799eababa63ea60af0cbc3f03363e328e58f32fb0294798ed3e85f444" + +[[package]] +name = "once_cell" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da32515d9f6e6e489d7bc9d84c71b060db7247dc035bbe44eac88cf87486d8d5" + +[[package]] +name = "os_pipe" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb233f06c2307e1f5ce2ecad9f8121cffbbee2c95428f44ea85222e460d0d213" +dependencies = [ + "libc", + "winapi", +] + +[[package]] +name = "os_str_bytes" +version = "6.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e22443d1643a904602595ba1cd8f7d896afe56d26712531c5ff73a15b2fbf64" +dependencies = [ + "memchr", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e280fbe77cc62c91527259e9442153f4688736748d24660126286329742b4c6c" + +[[package]] +name = "ppv-lite86" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb9f9e6e233e5c4a35559a617bf40a4ec447db2e84c20b55a6f83167b7e57872" + +[[package]] +name = "proc-macro-error" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" +dependencies = [ + "proc-macro-error-attr", + "proc-macro2", + "quote", + "syn", + "version_check", +] + +[[package]] +name = "proc-macro-error-attr" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" +dependencies = [ + "proc-macro2", + "quote", + "version_check", +] + +[[package]] +name = "proc-macro2" +version = "1.0.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7342d5883fbccae1cc37a2353b09c87c9b0f3afd73f5fb9bba687a1f733b029" +dependencies = [ + "unicode-xid", +] + +[[package]] +name = "quote" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "864d3e96a899863136fc6e99f3d7cae289dafe43bf2c5ac19b70df7210c0a145" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha", + "rand_core", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7" +dependencies = [ + "getrandom", +] + +[[package]] +name = "ryu" +version = "1.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73b4b750c782965c211b42f022f59af1fbceabdd026623714f104152f1ec149f" + +[[package]] +name = "serde" +version = "1.0.136" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce31e24b01e1e524df96f1c2fdd054405f8d7376249a5110886fb4b658484789" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.136" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08597e7152fcd306f41838ed3e37be9eaeed2b61c42e2117266a554fab4662f9" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e8d9fa5c3b304765ce1fd9c4c8a3de2c8db365a5b91be52f186efc675681d95" +dependencies = [ + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "sha2" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55deaec60f81eefe3cce0dc50bda92d6d8e88f2a27df7c5033b42afeb1ed2676" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "sharded-slab" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "shared_child" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6be9f7d5565b1483af3e72975e2dee33879b3b86bd48c0929fccf6585d79e65a" +dependencies = [ + "libc", + "winapi", +] + +[[package]] +name = "smallvec" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2dd574626839106c320a323308629dcb1acfc96e32a8cba364ddc61ac23ee83" + +[[package]] +name = "smol_str" +version = "0.1.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61d15c83e300cce35b7c8cd39ff567c1ef42dde6d4a1a38dbdbf9a59902261bd" +dependencies = [ + "serde", +] + +[[package]] +name = "strsim" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" + +[[package]] +name = "syn" +version = "1.0.86" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a65b3f4ffa0092e9887669db0eae07941f023991ab58ea44da8fe8e2d511c6b" +dependencies = [ + "proc-macro2", + "quote", + "unicode-xid", +] + +[[package]] +name = "termcolor" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bab24d30b911b2376f3a13cc2cd443142f0c81dda04c118693e35b3835757755" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "textwrap" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1141d4d61095b28419e22cb0bbf02755f5e54e0526f97f1e3d1d160e60885fb" + +[[package]] +name = "thiserror" +version = "1.0.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "854babe52e4df1653706b98fcfc05843010039b406875930a70e4d9644e5c417" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa32fd3f627f367fe16f893e2597ae3c05020f8bba2666a4e6ea73d377e5714b" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "thread_local" +version = "1.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5516c27b78311c50bf42c071425c560ac799b11c30b31f87e3081965fe5e0180" +dependencies = [ + "once_cell", +] + +[[package]] +name = "tinyvec" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c1c1d5a42b6245520c249549ec267180beaffcc0615401ac8e31853d4b6d8d2" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" + +[[package]] +name = "tracing" +version = "0.1.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6c650a8ef0cd2dd93736f033d21cbd1224c5a967aa0c258d00fcf7dafef9b9f" +dependencies = [ + "cfg-if", + "log", + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8276d9a4a3a558d7b7ad5303ad50b53d58264641b82914b7ada36bd762e7a716" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tracing-core" +version = "0.1.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03cfcb51380632a72d3111cb8d3447a8d908e577d31beeac006f836383d29a23" +dependencies = [ + "lazy_static", + "valuable", +] + +[[package]] +name = "tracing-log" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6923477a48e41c1951f1999ef8bb5a3023eb723ceadafe78ffb65dc366761e3" +dependencies = [ + "lazy_static", + "log", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e0ab7bdc962035a87fba73f3acca9b8a8d0034c2e6f60b84aeaaddddc155dce" +dependencies = [ + "ansi_term", + "sharded-slab", + "smallvec", + "thread_local", + "tracing-core", + "tracing-log", +] + +[[package]] +name = "typenum" +version = "1.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987" + +[[package]] +name = "unicode-segmentation" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e8820f5d777f6224dc4be3632222971ac30164d4a258d595640799554ebfd99" + +[[package]] +name = "unicode-xid" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3" + +[[package]] +name = "uuid" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7" +dependencies = [ + "getrandom", + "serde", +] + +[[package]] +name = "valuable" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" + +[[package]] +name = "version_check" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" + +[[package]] +name = "wasi" +version = "0.10.2+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6" + +[[package]] +name = "wasm-bindgen" +version = "0.2.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25f1af7423d8588a3d840681122e72e6a24ddbcb3f0ec385cac0d12d24256c06" +dependencies = [ + "cfg-if", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b21c0df030f5a177f3cba22e9bc4322695ec43e7257d865302900290bcdedca" +dependencies = [ + "bumpalo", + "lazy_static", + "log", + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f4203d69e40a52ee523b2529a773d5ffc1dc0071801c87b3d270b471b80ed01" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa8a30d46208db204854cadbb5d4baf5fcf8071ba5bf48190c3e59937962ebc" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d958d035c4438e28c70e4321a2911302f10135ce78a9c7834c0cab4123d06a2" + +[[package]] +name = "web-sys" +version = "0.3.56" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c060b319f29dd25724f09a2ba1418f142f539b2be99fbf4d2d5a8f7330afb8eb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" +dependencies = [ + "winapi", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" diff --git a/automerge-cli/Cargo.toml b/automerge-cli/Cargo.toml index 1761e489..38dec0e6 100644 --- a/automerge-cli/Cargo.toml +++ b/automerge-cli/Cargo.toml @@ -15,14 +15,14 @@ doc = false clap = {version = "~3.1", features = ["derive"]} serde_json = "^1.0" anyhow = "1.0" -atty = "0.2" -thiserror = "1.0.16" -combine = "4.5.2" -maplit = "1.0.2" -colored_json = "2.1.0" +atty = "^0.2" +thiserror = "^1.0" +combine = "^4.5" +maplit = "^1.0" +colored_json = "^2.1" tracing-subscriber = "~0.3" automerge = { path = "../automerge" } [dev-dependencies] -duct = "0.13" +duct = "^0.13" diff --git a/deny.toml b/deny.toml index cd56a097..888b7c58 100644 --- a/deny.toml +++ b/deny.toml @@ -100,9 +100,9 @@ confidence-threshold = 0.8 # Allow 1 or more licenses on a per-crate basis, so that particular licenses # aren't accepted for every possible crate as with the normal allow list exceptions = [ - # Each entry is the crate and version constraint, and its specific allow - # list - #{ allow = ["Zlib"], name = "adler32", version = "*" }, + # this is a LGPL like license in the CLI + # since this is an application not a library people would link to it should be fine + { allow = ["EPL-2.0"], name = "colored_json" }, ] # Some crates don't have (easily) machine readable licensing information, @@ -165,8 +165,10 @@ deny = [ ] # Certain crates/versions that will be skipped when doing duplicate detection. skip = [ - # This is a transitive depdendency of criterion, which is only included for benchmarking anyway + # These are transitive depdendencies of criterion, which is only included for benchmarking anyway { name = "itoa", version = "0.4.8" }, + { name = "textwrap", version = "0.11.0" }, + { name = "clap", version = "2.34.0" }, ] # Similarly to `skip` allows you to skip certain crates during duplicate # detection. Unlike skip, it also includes the entire tree of transitive From c0070e081d5e24f7ee3dd25fed50df9ce9848f9a Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 3 Mar 2022 18:21:58 +0000 Subject: [PATCH 129/730] Reorder generics --- automerge/src/autocommit.rs | 16 ++++++++-------- automerge/src/automerge.rs | 8 ++++---- automerge/src/transaction/manual_transaction.rs | 16 ++++++++-------- automerge/src/transaction/transactable.rs | 16 ++++++++-------- 4 files changed, 28 insertions(+), 28 deletions(-) diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index 66f928f5..35dc590a 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -321,7 +321,7 @@ impl Transactable for AutoCommit { /// - The object does not exist /// - The key is the wrong type for the object /// - The key does not exist in the object - fn set, V: Into, O: AsRef>( + fn set, P: Into, V: Into>( &mut self, obj: O, prop: P, @@ -332,7 +332,7 @@ impl Transactable for AutoCommit { tx.set(&mut self.doc, obj.as_ref(), prop, value) } - fn insert, O: AsRef>( + fn insert, V: Into>( &mut self, obj: O, index: usize, @@ -343,7 +343,7 @@ impl Transactable for AutoCommit { tx.insert(&mut self.doc, obj.as_ref(), index, value) } - fn inc, O: AsRef>( + fn inc, P: Into>( &mut self, obj: O, prop: P, @@ -354,7 +354,7 @@ impl Transactable for AutoCommit { tx.inc(&mut self.doc, obj.as_ref(), prop, value) } - fn del, O: AsRef>( + fn del, P: Into>( &mut self, obj: O, prop: P, @@ -393,7 +393,7 @@ impl Transactable for AutoCommit { // TODO - I need to return these OpId's here **only** to get // the legacy conflicts format of { [opid]: value } // Something better? - fn value, O: AsRef>( + fn value, P: Into>( &self, obj: O, prop: P, @@ -401,7 +401,7 @@ impl Transactable for AutoCommit { self.doc.value(obj, prop) } - fn value_at, O: AsRef>( + fn value_at, P: Into>( &self, obj: O, prop: P, @@ -410,7 +410,7 @@ impl Transactable for AutoCommit { self.doc.value_at(obj, prop, heads) } - fn values, O: AsRef>( + fn values, P: Into>( &self, obj: O, prop: P, @@ -418,7 +418,7 @@ impl Transactable for AutoCommit { self.doc.values(obj, prop) } - fn values_at, O: AsRef>( + fn values_at, P: Into>( &self, obj: O, prop: P, diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index ee89658b..3eb66423 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -283,7 +283,7 @@ impl Automerge { // TODO - I need to return these OpId's here **only** to get // the legacy conflicts format of { [opid]: value } // Something better? - pub fn value, O: AsRef>( + pub fn value, P: Into>( &self, obj: O, prop: P, @@ -291,7 +291,7 @@ impl Automerge { Ok(self.values(obj, prop.into())?.last().cloned()) } - pub fn value_at, O: AsRef>( + pub fn value_at, P: Into>( &self, obj: O, prop: P, @@ -300,7 +300,7 @@ impl Automerge { Ok(self.values_at(obj, prop, heads)?.last().cloned()) } - pub fn values, O: AsRef>( + pub fn values, P: Into>( &self, obj: O, prop: P, @@ -331,7 +331,7 @@ impl Automerge { Ok(result) } - pub fn values_at, O: AsRef>( + pub fn values_at, P: Into>( &self, obj: O, prop: P, diff --git a/automerge/src/transaction/manual_transaction.rs b/automerge/src/transaction/manual_transaction.rs index 44e5640d..d87a594d 100644 --- a/automerge/src/transaction/manual_transaction.rs +++ b/automerge/src/transaction/manual_transaction.rs @@ -85,7 +85,7 @@ impl<'a> Transactable for Transaction<'a> { /// - The object does not exist /// - The key is the wrong type for the object /// - The key does not exist in the object - fn set, V: Into, O: AsRef>( + fn set, P: Into, V: Into>( &mut self, obj: O, prop: P, @@ -97,7 +97,7 @@ impl<'a> Transactable for Transaction<'a> { .set(self.doc, obj.as_ref(), prop, value) } - fn insert, O: AsRef>( + fn insert, V: Into>( &mut self, obj: O, index: usize, @@ -109,7 +109,7 @@ impl<'a> Transactable for Transaction<'a> { .insert(self.doc, obj.as_ref(), index, value) } - fn inc, O: AsRef>( + fn inc, P: Into>( &mut self, obj: O, prop: P, @@ -121,7 +121,7 @@ impl<'a> Transactable for Transaction<'a> { .inc(self.doc, obj.as_ref(), prop, value) } - fn del, O: AsRef>( + fn del, P: Into>( &mut self, obj: O, prop: P, @@ -175,7 +175,7 @@ impl<'a> Transactable for Transaction<'a> { self.doc.text_at(obj, heads) } - fn value, O: AsRef>( + fn value, P: Into>( &self, obj: O, prop: P, @@ -183,7 +183,7 @@ impl<'a> Transactable for Transaction<'a> { self.doc.value(obj, prop) } - fn value_at, O: AsRef>( + fn value_at, P: Into>( &self, obj: O, prop: P, @@ -192,7 +192,7 @@ impl<'a> Transactable for Transaction<'a> { self.doc.value_at(obj, prop, heads) } - fn values, O: AsRef>( + fn values, P: Into>( &self, obj: O, prop: P, @@ -200,7 +200,7 @@ impl<'a> Transactable for Transaction<'a> { self.doc.values(obj, prop) } - fn values_at, O: AsRef>( + fn values_at, P: Into>( &self, obj: O, prop: P, diff --git a/automerge/src/transaction/transactable.rs b/automerge/src/transaction/transactable.rs index d3f40621..4ef9950d 100644 --- a/automerge/src/transaction/transactable.rs +++ b/automerge/src/transaction/transactable.rs @@ -20,7 +20,7 @@ pub trait Transactable { /// - The object does not exist /// - The key is the wrong type for the object /// - The key does not exist in the object - fn set, V: Into, O: AsRef>( + fn set, P: Into, V: Into>( &mut self, obj: O, prop: P, @@ -28,7 +28,7 @@ pub trait Transactable { ) -> Result, AutomergeError>; /// Insert a value into a list at the given index. - fn insert, O: AsRef>( + fn insert, V: Into>( &mut self, obj: O, index: usize, @@ -36,7 +36,7 @@ pub trait Transactable { ) -> Result, AutomergeError>; /// Increment the counter at the prop in the object by `value`. - fn inc, O: AsRef>( + fn inc, P: Into>( &mut self, obj: O, prop: P, @@ -44,7 +44,7 @@ pub trait Transactable { ) -> Result<(), AutomergeError>; /// Delete the value at prop in the object. - fn del, O: AsRef>(&mut self, obj: O, prop: P) + fn del, P: Into>(&mut self, obj: O, prop: P) -> Result<(), AutomergeError>; /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert @@ -95,27 +95,27 @@ pub trait Transactable { ) -> Result; /// Get the value at this prop in the object. - fn value, O: AsRef>( + fn value, P: Into>( &self, obj: O, prop: P, ) -> Result, AutomergeError>; /// Get the value at this prop in the object at a point in history. - fn value_at, O: AsRef>( + fn value_at, P: Into>( &self, obj: O, prop: P, heads: &[ChangeHash], ) -> Result, AutomergeError>; - fn values, O: AsRef>( + fn values, P: Into>( &self, obj: O, prop: P, ) -> Result, AutomergeError>; - fn values_at, O: AsRef>( + fn values_at, P: Into>( &self, obj: O, prop: P, From 967b467aa6cda429cd43161fe37dc4b3b626a0bf Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 3 Mar 2022 18:22:42 +0000 Subject: [PATCH 130/730] Fix clippy --- automerge/src/exid.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automerge/src/exid.rs b/automerge/src/exid.rs index c2cee4d8..2c174e28 100644 --- a/automerge/src/exid.rs +++ b/automerge/src/exid.rs @@ -77,6 +77,6 @@ impl Serialize for ExId { impl AsRef for ExId { fn as_ref(&self) -> &ExId { - &self + self } } From 9d01406e1344556316de76f120d8e04d14ddefff Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Thu, 3 Mar 2022 14:36:10 -0500 Subject: [PATCH 131/730] missing gitignore --- automerge-cli/.gitignore | 1 + 1 file changed, 1 insertion(+) create mode 100644 automerge-cli/.gitignore diff --git a/automerge-cli/.gitignore b/automerge-cli/.gitignore new file mode 100644 index 00000000..eb5a316c --- /dev/null +++ b/automerge-cli/.gitignore @@ -0,0 +1 @@ +target From affb85b0b42a35919fee0cbb4bc05ea69611a8d8 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Wed, 2 Mar 2022 16:10:13 +0000 Subject: [PATCH 132/730] Add make to transaction API --- automerge/src/autocommit.rs | 17 ++++++-- automerge/src/transaction/inner.rs | 41 +++++++++++++++---- .../src/transaction/manual_transaction.rs | 18 ++++++-- automerge/src/transaction/transactable.rs | 26 ++++++++++-- 4 files changed, 84 insertions(+), 18 deletions(-) diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index 35dc590a..9ca683a1 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -5,7 +5,7 @@ use crate::{ change::export_change, transaction::TransactionInner, ActorId, Automerge, AutomergeError, Change, ChangeHash, Prop, Value, }; -use crate::{Keys, KeysAt, SyncMessage, SyncState}; +use crate::{Keys, KeysAt, ObjType, ScalarValue, SyncMessage, SyncState}; /// An automerge document that automatically manages transactions. #[derive(Debug, Clone)] @@ -321,17 +321,28 @@ impl Transactable for AutoCommit { /// - The object does not exist /// - The key is the wrong type for the object /// - The key does not exist in the object - fn set, P: Into, V: Into>( + fn set, P: Into, V: Into>( &mut self, obj: O, prop: P, value: V, - ) -> Result, AutomergeError> { + ) -> Result<(), AutomergeError> { self.ensure_transaction_open(); let tx = self.transaction.as_mut().unwrap(); tx.set(&mut self.doc, obj.as_ref(), prop, value) } + fn make, P: Into, V: Into>( + &mut self, + obj: O, + prop: P, + value: V, + ) -> Result { + self.ensure_transaction_open(); + let tx = self.transaction.as_mut().unwrap(); + tx.make(&mut self.doc, obj.as_ref(), prop, value) + } + fn insert, V: Into>( &mut self, obj: O, diff --git a/automerge/src/transaction/inner.rs b/automerge/src/transaction/inner.rs index 6a32178e..dc9cc160 100644 --- a/automerge/src/transaction/inner.rs +++ b/automerge/src/transaction/inner.rs @@ -3,7 +3,7 @@ use crate::exid::ExId; use crate::query::{self, OpIdSearch}; use crate::types::{Key, ObjId, OpId}; use crate::{change::export_change, types::Op, Automerge, ChangeHash, Prop, Value}; -use crate::{AutomergeError, OpType}; +use crate::{AutomergeError, ObjType, OpType, ScalarValue}; #[derive(Debug, Clone)] pub struct TransactionInner { @@ -81,20 +81,43 @@ impl TransactionInner { /// - The object does not exist /// - The key is the wrong type for the object /// - The key does not exist in the object - pub fn set, V: Into>( + pub fn set, V: Into>( &mut self, doc: &mut Automerge, obj: &ExId, prop: P, value: V, - ) -> Result, AutomergeError> { + ) -> Result<(), AutomergeError> { let obj = doc.exid_to_obj(obj)?; - let value = value.into(); - if let Some(id) = self.local_op(doc, obj, prop.into(), value.into())? { - Ok(Some(doc.id_to_exid(id))) - } else { - Ok(None) - } + let value = Value::Scalar(value.into()); + self.local_op(doc, obj, prop.into(), value.into())?; + Ok(()) + } + + /// Set the value of property `P` to value `V` in object `obj`. + /// + /// # Returns + /// + /// The opid of the operation which was created, or None if this operation doesn't change the + /// document + /// + /// # Errors + /// + /// This will return an error if + /// - The object does not exist + /// - The key is the wrong type for the object + /// - The key does not exist in the object + pub fn make, V: Into>( + &mut self, + doc: &mut Automerge, + obj: &ExId, + prop: P, + value: V, + ) -> Result { + let obj = doc.exid_to_obj(obj)?; + let value = Value::Object(value.into()); + let id = self.local_op(doc, obj, prop.into(), value.into())?.unwrap(); + Ok(doc.id_to_exid(id)) } fn next_id(&mut self) -> OpId { diff --git a/automerge/src/transaction/manual_transaction.rs b/automerge/src/transaction/manual_transaction.rs index d87a594d..6999a004 100644 --- a/automerge/src/transaction/manual_transaction.rs +++ b/automerge/src/transaction/manual_transaction.rs @@ -1,5 +1,5 @@ use crate::exid::ExId; -use crate::{Automerge, ChangeHash, KeysAt, Prop, Value}; +use crate::{Automerge, ChangeHash, KeysAt, ObjType, Prop, ScalarValue, Value}; use crate::{AutomergeError, Keys}; use super::{CommitOptions, Transactable, TransactionInner}; @@ -85,18 +85,30 @@ impl<'a> Transactable for Transaction<'a> { /// - The object does not exist /// - The key is the wrong type for the object /// - The key does not exist in the object - fn set, P: Into, V: Into>( + fn set, P: Into, V: Into>( &mut self, obj: O, prop: P, value: V, - ) -> Result, AutomergeError> { + ) -> Result<(), AutomergeError> { self.inner .as_mut() .unwrap() .set(self.doc, obj.as_ref(), prop, value) } + fn make, P: Into, V: Into>( + &mut self, + obj: O, + prop: P, + value: V, + ) -> Result { + self.inner + .as_mut() + .unwrap() + .make(self.doc, obj.as_ref(), prop, value) + } + fn insert, V: Into>( &mut self, obj: O, diff --git a/automerge/src/transaction/transactable.rs b/automerge/src/transaction/transactable.rs index 4ef9950d..f97ca0f7 100644 --- a/automerge/src/transaction/transactable.rs +++ b/automerge/src/transaction/transactable.rs @@ -1,5 +1,5 @@ use crate::exid::ExId; -use crate::{AutomergeError, ChangeHash, Keys, KeysAt, Prop, Value}; +use crate::{AutomergeError, ChangeHash, Keys, KeysAt, ObjType, Prop, ScalarValue, Value}; use unicode_segmentation::UnicodeSegmentation; /// A way of mutating a document within a single change. @@ -20,12 +20,32 @@ pub trait Transactable { /// - The object does not exist /// - The key is the wrong type for the object /// - The key does not exist in the object - fn set, P: Into, V: Into>( + fn set, P: Into, V: Into>( &mut self, obj: O, prop: P, value: V, - ) -> Result, AutomergeError>; + ) -> Result<(), AutomergeError>; + + /// Set the value of property `P` to value `V` in object `obj`. + /// + /// # Returns + /// + /// The opid of the operation which was created, or None if this operation doesn't change the + /// document + /// + /// # Errors + /// + /// This will return an error if + /// - The object does not exist + /// - The key is the wrong type for the object + /// - The key does not exist in the object + fn make, P: Into, V: Into>( + &mut self, + obj: O, + prop: P, + value: V, + ) -> Result; /// Insert a value into a list at the given index. fn insert, V: Into>( From 1a6abddb501dc7b05ff173d3062de7e1cc1cd4b4 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Wed, 2 Mar 2022 16:45:50 +0000 Subject: [PATCH 133/730] Example of make in the API --- automerge/src/autocommit.rs | 15 +++++++++-- automerge/src/transaction/inner.rs | 25 +++++++++++++------ .../src/transaction/manual_transaction.rs | 16 ++++++++++-- automerge/src/transaction/transactable.rs | 12 +++++++-- 4 files changed, 55 insertions(+), 13 deletions(-) diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index 9ca683a1..d50a5767 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -343,17 +343,28 @@ impl Transactable for AutoCommit { tx.make(&mut self.doc, obj.as_ref(), prop, value) } - fn insert, V: Into>( + fn insert, V: Into>( &mut self, obj: O, index: usize, value: V, - ) -> Result, AutomergeError> { + ) -> Result<(), AutomergeError> { self.ensure_transaction_open(); let tx = self.transaction.as_mut().unwrap(); tx.insert(&mut self.doc, obj.as_ref(), index, value) } + fn make_insert>( + &mut self, + obj: &ExId, + index: usize, + value: V, + ) -> Result { + self.ensure_transaction_open(); + let tx = self.transaction.as_mut().unwrap(); + tx.make_insert(&mut self.doc, obj, index, value) + } + fn inc, P: Into>( &mut self, obj: O, diff --git a/automerge/src/transaction/inner.rs b/automerge/src/transaction/inner.rs index dc9cc160..413fd9be 100644 --- a/automerge/src/transaction/inner.rs +++ b/automerge/src/transaction/inner.rs @@ -138,19 +138,30 @@ impl TransactionInner { self.operations.push(op); } - pub fn insert>( + pub fn insert>( &mut self, doc: &mut Automerge, obj: &ExId, index: usize, value: V, - ) -> Result, AutomergeError> { + ) -> Result<(), AutomergeError> { let obj = doc.exid_to_obj(obj)?; - if let Some(id) = self.do_insert(doc, obj, index, value)? { - Ok(Some(doc.id_to_exid(id))) - } else { - Ok(None) - } + self.do_insert(doc, obj, index, Value::Scalar(value.into()))?; + Ok(()) + } + + pub fn make_insert>( + &mut self, + doc: &mut Automerge, + obj: &ExId, + index: usize, + value: V, + ) -> Result { + let obj = doc.exid_to_obj(obj)?; + let id = self + .do_insert(doc, obj, index, Value::Object(value.into()))? + .unwrap(); + Ok(doc.id_to_exid(id)) } fn do_insert>( diff --git a/automerge/src/transaction/manual_transaction.rs b/automerge/src/transaction/manual_transaction.rs index 6999a004..0e2e105a 100644 --- a/automerge/src/transaction/manual_transaction.rs +++ b/automerge/src/transaction/manual_transaction.rs @@ -109,18 +109,30 @@ impl<'a> Transactable for Transaction<'a> { .make(self.doc, obj.as_ref(), prop, value) } - fn insert, V: Into>( + fn insert, V: Into>( &mut self, obj: O, index: usize, value: V, - ) -> Result, AutomergeError> { + ) -> Result<(), AutomergeError> { self.inner .as_mut() .unwrap() .insert(self.doc, obj.as_ref(), index, value) } + fn make_insert>( + &mut self, + obj: &ExId, + index: usize, + value: V, + ) -> Result { + self.inner + .as_mut() + .unwrap() + .make_insert(self.doc, obj, index, value) + } + fn inc, P: Into>( &mut self, obj: O, diff --git a/automerge/src/transaction/transactable.rs b/automerge/src/transaction/transactable.rs index f97ca0f7..5a42f963 100644 --- a/automerge/src/transaction/transactable.rs +++ b/automerge/src/transaction/transactable.rs @@ -48,12 +48,20 @@ pub trait Transactable { ) -> Result; /// Insert a value into a list at the given index. - fn insert, V: Into>( + fn insert, V: Into>( &mut self, obj: O, index: usize, value: V, - ) -> Result, AutomergeError>; + ) -> Result<(), AutomergeError>; + + /// Insert a value into a list at the given index. + fn make_insert>( + &mut self, + obj: &ExId, + index: usize, + value: V, + ) -> Result; /// Increment the counter at the prop in the object by `value`. fn inc, P: Into>( From 9406bf09eaf58684bb91bf8926bb8e915d192d5d Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Wed, 2 Mar 2022 16:55:06 +0000 Subject: [PATCH 134/730] Fix some tests --- automerge/src/transaction/inner.rs | 2 +- automerge/tests/test.rs | 87 ++++++------------------------ 2 files changed, 18 insertions(+), 71 deletions(-) diff --git a/automerge/src/transaction/inner.rs b/automerge/src/transaction/inner.rs index 413fd9be..b2693470 100644 --- a/automerge/src/transaction/inner.rs +++ b/automerge/src/transaction/inner.rs @@ -358,7 +358,7 @@ mod tests { let mut doc = Automerge::new(); let mut tx = doc.transaction(); - let a = tx.set(ROOT, "a", Value::map()).unwrap().unwrap(); + let a = tx.make(ROOT, "a", ObjType::Map).unwrap(); tx.set(&a, "b", 1).unwrap(); assert!(tx.value(&a, "b").unwrap().is_some()); } diff --git a/automerge/tests/test.rs b/automerge/tests/test.rs index ab15f949..65b05138 100644 --- a/automerge/tests/test.rs +++ b/automerge/tests/test.rs @@ -1,5 +1,5 @@ use automerge::transaction::Transactable; -use automerge::{ActorId, AutoCommit, Automerge, Value, ROOT}; +use automerge::{ActorId, AutoCommit, Automerge, ObjType, ScalarValue, Value, ROOT}; mod helpers; #[allow(unused_imports)] @@ -20,36 +20,6 @@ fn no_conflict_on_repeated_assignment() { ); } -#[test] -fn no_change_on_repeated_map_set() { - let mut doc = new_doc(); - doc.set(&automerge::ROOT, "foo", 1).unwrap(); - assert!(doc.set(&automerge::ROOT, "foo", 1).unwrap().is_none()); -} - -#[test] -fn no_change_on_repeated_list_set() { - let mut doc = new_doc(); - let list_id = doc - .set(&automerge::ROOT, "list", automerge::Value::list()) - .unwrap() - .unwrap(); - doc.insert(&list_id, 0, 1).unwrap(); - doc.set(&list_id, 0, 1).unwrap(); - assert!(doc.set(&list_id, 0, 1).unwrap().is_none()); -} - -#[test] -fn no_change_on_list_insert_followed_by_set_of_same_value() { - let mut doc = new_doc(); - let list_id = doc - .set(&automerge::ROOT, "list", automerge::Value::list()) - .unwrap() - .unwrap(); - doc.insert(&list_id, 0, 1).unwrap(); - assert!(doc.set(&list_id, 0, 1).unwrap().is_none()); -} - #[test] fn repeated_map_assignment_which_resolves_conflict_not_ignored() { let mut doc1 = new_doc(); @@ -74,10 +44,7 @@ fn repeated_map_assignment_which_resolves_conflict_not_ignored() { fn repeated_list_assignment_which_resolves_conflict_not_ignored() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); - let list_id = doc1 - .set(&automerge::ROOT, "list", automerge::Value::list()) - .unwrap() - .unwrap(); + let list_id = doc1.make(&automerge::ROOT, "list", ObjType::List).unwrap(); doc1.insert(&list_id, 0, 123).unwrap(); doc2.merge(&mut doc1).unwrap(); doc2.set(&list_id, 0, 456).unwrap(); @@ -99,10 +66,7 @@ fn repeated_list_assignment_which_resolves_conflict_not_ignored() { #[test] fn list_deletion() { let mut doc = new_doc(); - let list_id = doc - .set(&automerge::ROOT, "list", automerge::Value::list()) - .unwrap() - .unwrap(); + let list_id = doc.make(&automerge::ROOT, "list", ObjType::List).unwrap(); doc.insert(&list_id, 0, 123).unwrap(); doc.insert(&list_id, 1, 456).unwrap(); doc.insert(&list_id, 2, 789).unwrap(); @@ -219,10 +183,7 @@ fn concurrent_updates_of_same_field() { fn concurrent_updates_of_same_list_element() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); - let list_id = doc1 - .set(&automerge::ROOT, "birds", automerge::Value::list()) - .unwrap() - .unwrap(); + let list_id = doc1.make(&automerge::ROOT, "birds", ObjType::List).unwrap(); doc1.insert(&list_id, 0, "finch").unwrap(); doc2.merge(&mut doc1).unwrap(); doc1.set(&list_id, 0, "greenfinch").unwrap(); @@ -249,10 +210,8 @@ fn assignment_conflicts_of_different_types() { let mut doc2 = new_doc(); let mut doc3 = new_doc(); doc1.set(&automerge::ROOT, "field", "string").unwrap(); - doc2.set(&automerge::ROOT, "field", automerge::Value::list()) - .unwrap(); - doc3.set(&automerge::ROOT, "field", automerge::Value::map()) - .unwrap(); + doc2.make(&automerge::ROOT, "field", ObjType::List).unwrap(); + doc3.make(&automerge::ROOT, "field", ObjType::Map).unwrap(); doc1.merge(&mut doc2).unwrap(); doc1.merge(&mut doc3).unwrap(); @@ -273,10 +232,7 @@ fn changes_within_conflicting_map_field() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); doc1.set(&automerge::ROOT, "field", "string").unwrap(); - let map_id = doc2 - .set(&automerge::ROOT, "field", automerge::Value::map()) - .unwrap() - .unwrap(); + let map_id = doc2.make(&automerge::ROOT, "field", ObjType::Map).unwrap(); doc2.set(&map_id, "innerKey", 42).unwrap(); doc1.merge(&mut doc2).unwrap(); @@ -300,24 +256,15 @@ fn changes_within_conflicting_list_element() { let (actor1, actor2) = sorted_actors(); let mut doc1 = new_doc_with_actor(actor1); let mut doc2 = new_doc_with_actor(actor2); - let list_id = doc1 - .set(&automerge::ROOT, "list", automerge::Value::list()) - .unwrap() - .unwrap(); + let list_id = doc1.make(&automerge::ROOT, "list", ObjType::List).unwrap(); doc1.insert(&list_id, 0, "hello").unwrap(); doc2.merge(&mut doc1).unwrap(); - let map_in_doc1 = doc1 - .set(&list_id, 0, automerge::Value::map()) - .unwrap() - .unwrap(); + let map_in_doc1 = doc1.make(&list_id, 0, ObjType::Map).unwrap(); doc1.set(&map_in_doc1, "map1", true).unwrap(); doc1.set(&map_in_doc1, "key", 1).unwrap(); - let map_in_doc2 = doc2 - .set(&list_id, 0, automerge::Value::map()) - .unwrap() - .unwrap(); + let map_in_doc2 = doc2.make(&list_id, 0, ObjType::Map).unwrap(); doc1.merge(&mut doc2).unwrap(); doc2.set(&map_in_doc2, "map2", true).unwrap(); doc2.set(&map_in_doc2, "key", 2).unwrap(); @@ -897,7 +844,7 @@ fn list_counter_del() -> Result<(), automerge::AutomergeError> { let mut doc1 = new_doc_with_actor(actor1); - let list = doc1.set(ROOT, "list", Value::list())?.unwrap(); + let list = doc1.make(ROOT, "list", ObjType::List)?; doc1.insert(&list, 0, "a")?; doc1.insert(&list, 1, "b")?; doc1.insert(&list, 2, "c")?; @@ -908,13 +855,13 @@ fn list_counter_del() -> Result<(), automerge::AutomergeError> { let mut doc3 = AutoCommit::load(&doc1.save()?)?; doc3.set_actor(actor3); - doc1.set(&list, 1, Value::counter(0))?; - doc2.set(&list, 1, Value::counter(10))?; - doc3.set(&list, 1, Value::counter(100))?; + doc1.set(&list, 1, ScalarValue::counter(0))?; + doc2.set(&list, 1, ScalarValue::counter(10))?; + doc3.set(&list, 1, ScalarValue::counter(100))?; - doc1.set(&list, 2, Value::counter(0))?; - doc2.set(&list, 2, Value::counter(10))?; - doc3.set(&list, 2, Value::int(100))?; + doc1.set(&list, 2, ScalarValue::counter(0))?; + doc2.set(&list, 2, ScalarValue::counter(10))?; + doc3.set(&list, 2, 100)?; doc1.inc(&list, 1, 1)?; doc1.inc(&list, 2, 1)?; From e42adaf84b7a65113e75ec99d0bef220f3a993df Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 3 Mar 2022 09:31:45 +0000 Subject: [PATCH 135/730] Fixup automerge tests --- automerge/examples/quickstart.rs | 7 +- automerge/src/autocommit.rs | 4 +- automerge/src/automerge.rs | 38 +++++--- .../src/transaction/manual_transaction.rs | 4 +- automerge/tests/test.rs | 86 ++++--------------- 5 files changed, 51 insertions(+), 88 deletions(-) diff --git a/automerge/examples/quickstart.rs b/automerge/examples/quickstart.rs index 9972cb1b..4d233f21 100644 --- a/automerge/examples/quickstart.rs +++ b/automerge/examples/quickstart.rs @@ -1,7 +1,8 @@ use automerge::transaction::CommitOptions; use automerge::transaction::Transactable; +use automerge::AutomergeError; +use automerge::ObjType; use automerge::{Automerge, ROOT}; -use automerge::{AutomergeError, Value}; // Based on https://automerge.github.io/docs/quickstart fn main() { @@ -12,9 +13,11 @@ fn main() { |tx| { let cards = tx.set(ROOT, "cards", Value::list()).unwrap().unwrap(); let card1 = tx.insert(&cards, 0, Value::map())?.unwrap(); + let cards = tx.make(ROOT, "cards", ObjType::List).unwrap(); + let card1 = tx.make_insert(&cards, 0, ObjType::Map)?; tx.set(&card1, "title", "Rewrite everything in Clojure")?; tx.set(&card1, "done", false)?; - let card2 = tx.insert(&cards, 0, Value::map())?.unwrap(); + let card2 = tx.make_insert(&cards, 0, ObjType::Map)?; tx.set(&card2, "title", "Rewrite everything in Haskell")?; tx.set(&card2, "done", false)?; Ok((cards, card1)) diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index d50a5767..0573d9de 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -248,12 +248,12 @@ impl AutoCommit { /// ``` /// # use automerge::transaction::CommitOptions; /// # use automerge::transaction::Transactable; - /// # use automerge::Value; /// # use automerge::ROOT; /// # use automerge::AutoCommit; + /// # use automerge::ObjType; /// # use std::time::SystemTime; /// let mut doc = AutoCommit::new(); - /// doc.set(ROOT, "todos", Value::list()).unwrap(); + /// doc.make(ROOT, "todos", ObjType::List).unwrap(); /// let now = SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap().as_secs() as /// i64; /// doc.commit_with(CommitOptions::default().with_message("Create todos list").with_time(now)); diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index eeeb6dd4..1d543a77 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -878,13 +878,23 @@ mod tests { let mut doc = Automerge::new(); let mut tx = doc.transaction(); // setting a scalar value shouldn't return an opid as no object was created. - assert!(tx.set(ROOT, "a", 1)?.is_none()); - // setting the same value shouldn't return an opid as there is no change. - assert!(tx.set(ROOT, "a", 1)?.is_none()); + tx.set(ROOT, "a", 1)?; - assert!(tx.set(ROOT, "b", Value::map())?.is_some()); + // setting the same value shouldn't return an opid as there is no change. + tx.set(ROOT, "a", 1)?; + + assert_eq!(tx.pending_ops(), 1); + + let map = tx.make(ROOT, "b", ObjType::Map)?; // object already exists at b but setting a map again overwrites it so we get an opid. - assert!(tx.set(ROOT, "b", Value::map())?.is_some()); + tx.set(map, "a", 2)?; + + tx.make(ROOT, "b", ObjType::Map)?; + + assert_eq!(tx.pending_ops(), 4); + let map = tx.value(ROOT, "b").unwrap().unwrap().1; + assert_eq!(tx.value(&map, "a")?, None); + tx.commit(); Ok(()) } @@ -894,7 +904,7 @@ mod tests { let mut doc = Automerge::new(); doc.set_actor(ActorId::random()); let mut tx = doc.transaction(); - let list_id = tx.set(ROOT, "items", Value::list())?.unwrap(); + let list_id = tx.make(ROOT, "items", ObjType::List)?; tx.set(ROOT, "zzz", "zzzval")?; assert!(tx.value(ROOT, "items")?.unwrap().1 == list_id); tx.insert(&list_id, 0, "a")?; @@ -928,7 +938,7 @@ mod tests { fn test_inc() -> Result<(), AutomergeError> { let mut doc = Automerge::new(); let mut tx = doc.transaction(); - tx.set(ROOT, "counter", Value::counter(10))?; + tx.set(ROOT, "counter", ScalarValue::counter(10))?; assert!(tx.value(ROOT, "counter")?.unwrap().0 == Value::counter(10)); tx.inc(ROOT, "counter", 10)?; assert!(tx.value(ROOT, "counter")?.unwrap().0 == Value::counter(20)); @@ -985,7 +995,7 @@ mod tests { fn test_save_text() -> Result<(), AutomergeError> { let mut doc = Automerge::new(); let mut tx = doc.transaction(); - let text = tx.set(ROOT, "text", Value::text())?.unwrap(); + let text = tx.make(ROOT, "text", ObjType::Text)?; tx.commit(); let heads1 = doc.get_heads(); let mut tx = doc.transaction(); @@ -1085,24 +1095,24 @@ mod tests { doc.set_actor("aaaa".try_into().unwrap()); let mut tx = doc.transaction(); - let list = tx.set(ROOT, "list", Value::list())?.unwrap(); + let list = tx.make(ROOT, "list", ObjType::List)?; tx.commit(); let heads1 = doc.get_heads(); let mut tx = doc.transaction(); - tx.insert(&list, 0, Value::int(10))?; + tx.insert(&list, 0, 10)?; tx.commit(); let heads2 = doc.get_heads(); let mut tx = doc.transaction(); - tx.set(&list, 0, Value::int(20))?; - tx.insert(&list, 0, Value::int(30))?; + tx.set(&list, 0, 20)?; + tx.insert(&list, 0, 30)?; tx.commit(); let heads3 = doc.get_heads(); let mut tx = doc.transaction(); - tx.set(&list, 1, Value::int(40))?; - tx.insert(&list, 1, Value::int(50))?; + tx.set(&list, 1, 40)?; + tx.insert(&list, 1, 50)?; tx.commit(); let heads4 = doc.get_heads(); diff --git a/automerge/src/transaction/manual_transaction.rs b/automerge/src/transaction/manual_transaction.rs index 0e2e105a..50f2cb0d 100644 --- a/automerge/src/transaction/manual_transaction.rs +++ b/automerge/src/transaction/manual_transaction.rs @@ -41,13 +41,13 @@ impl<'a> Transaction<'a> { /// ``` /// # use automerge::transaction::CommitOptions; /// # use automerge::transaction::Transactable; - /// # use automerge::Value; /// # use automerge::ROOT; /// # use automerge::Automerge; + /// # use automerge::ObjType; /// # use std::time::SystemTime; /// let mut doc = Automerge::new(); /// let mut tx = doc.transaction(); - /// tx.set(ROOT, "todos", Value::list()).unwrap(); + /// tx.make(ROOT, "todos", ObjType::List).unwrap(); /// let now = SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap().as_secs() as /// i64; /// tx.commit_with(CommitOptions::default().with_message("Create todos list").with_time(now)); diff --git a/automerge/tests/test.rs b/automerge/tests/test.rs index 65b05138..bd091845 100644 --- a/automerge/tests/test.rs +++ b/automerge/tests/test.rs @@ -297,16 +297,10 @@ fn concurrently_assigned_nested_maps_should_not_merge() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); - let doc1_map_id = doc1 - .set(&automerge::ROOT, "config", automerge::Value::map()) - .unwrap() - .unwrap(); + let doc1_map_id = doc1.make(&automerge::ROOT, "config", ObjType::Map).unwrap(); doc1.set(&doc1_map_id, "background", "blue").unwrap(); - let doc2_map_id = doc2 - .set(&automerge::ROOT, "config", automerge::Value::map()) - .unwrap() - .unwrap(); + let doc2_map_id = doc2.make(&automerge::ROOT, "config", ObjType::Map).unwrap(); doc2.set(&doc2_map_id, "logo_url", "logo.png").unwrap(); doc1.merge(&mut doc2).unwrap(); @@ -333,10 +327,7 @@ fn concurrent_insertions_at_different_list_positions() { let mut doc2 = new_doc_with_actor(actor2); assert!(doc1.get_actor() < doc2.get_actor()); - let list_id = doc1 - .set(&automerge::ROOT, "list", automerge::Value::list()) - .unwrap() - .unwrap(); + let list_id = doc1.make(&automerge::ROOT, "list", ObjType::List).unwrap(); doc1.insert(&list_id, 0, "one").unwrap(); doc1.insert(&list_id, 1, "three").unwrap(); @@ -368,10 +359,7 @@ fn concurrent_insertions_at_same_list_position() { let mut doc2 = new_doc_with_actor(actor2); assert!(doc1.get_actor() < doc2.get_actor()); - let list_id = doc1 - .set(&automerge::ROOT, "birds", automerge::Value::list()) - .unwrap() - .unwrap(); + let list_id = doc1.make(&automerge::ROOT, "birds", ObjType::List).unwrap(); doc1.insert(&list_id, 0, "parakeet").unwrap(); doc2.merge(&mut doc1).unwrap(); @@ -424,10 +412,7 @@ fn concurrent_assignment_and_deletion_of_a_map_entry() { fn concurrent_assignment_and_deletion_of_list_entry() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); - let list_id = doc1 - .set(&automerge::ROOT, "birds", automerge::Value::list()) - .unwrap() - .unwrap(); + let list_id = doc1.make(&automerge::ROOT, "birds", ObjType::List).unwrap(); doc1.insert(&list_id, 0, "blackbird").unwrap(); doc1.insert(&list_id, 1, "thrush").unwrap(); doc1.insert(&list_id, 2, "goldfinch").unwrap(); @@ -474,10 +459,7 @@ fn concurrent_assignment_and_deletion_of_list_entry() { fn insertion_after_a_deleted_list_element() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); - let list_id = doc1 - .set(&automerge::ROOT, "birds", automerge::Value::list()) - .unwrap() - .unwrap(); + let list_id = doc1.make(&automerge::ROOT, "birds", ObjType::List).unwrap(); doc1.insert(&list_id, 0, "blackbird").unwrap(); doc1.insert(&list_id, 1, "thrush").unwrap(); @@ -518,10 +500,7 @@ fn insertion_after_a_deleted_list_element() { fn concurrent_deletion_of_same_list_element() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); - let list_id = doc1 - .set(&automerge::ROOT, "birds", automerge::Value::list()) - .unwrap() - .unwrap(); + let list_id = doc1.make(&automerge::ROOT, "birds", ObjType::List).unwrap(); doc1.insert(&list_id, 0, "albatross").unwrap(); doc1.insert(&list_id, 1, "buzzard").unwrap(); @@ -563,20 +542,13 @@ fn concurrent_updates_at_different_levels() { let mut doc2 = new_doc(); let animals = doc1 - .set(&automerge::ROOT, "animals", automerge::Value::map()) - .unwrap() - .unwrap(); - let birds = doc1 - .set(&animals, "birds", automerge::Value::map()) - .unwrap() + .make(&automerge::ROOT, "animals", ObjType::Map) .unwrap(); + let birds = doc1.make(&animals, "birds", ObjType::Map).unwrap(); doc1.set(&birds, "pink", "flamingo").unwrap(); doc1.set(&birds, "black", "starling").unwrap(); - let mammals = doc1 - .set(&animals, "mammals", automerge::Value::list()) - .unwrap() - .unwrap(); + let mammals = doc1.make(&animals, "mammals", ObjType::List).unwrap(); doc1.insert(&mammals, 0, "badger").unwrap(); doc2.merge(&mut doc1).unwrap(); @@ -614,14 +586,8 @@ fn concurrent_updates_of_concurrently_deleted_objects() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); - let birds = doc1 - .set(&automerge::ROOT, "birds", automerge::Value::map()) - .unwrap() - .unwrap(); - let blackbird = doc1 - .set(&birds, "blackbird", automerge::Value::map()) - .unwrap() - .unwrap(); + let birds = doc1.make(&automerge::ROOT, "birds", ObjType::Map).unwrap(); + let blackbird = doc1.make(&birds, "blackbird", ObjType::Map).unwrap(); doc1.set(&blackbird, "feathers", "black").unwrap(); doc2.merge(&mut doc1).unwrap(); @@ -649,8 +615,7 @@ fn does_not_interleave_sequence_insertions_at_same_position() { let mut doc2 = new_doc_with_actor(actor2); let wisdom = doc1 - .set(&automerge::ROOT, "wisdom", automerge::Value::list()) - .unwrap() + .make(&automerge::ROOT, "wisdom", ObjType::List) .unwrap(); doc2.merge(&mut doc1).unwrap(); @@ -710,10 +675,7 @@ fn mutliple_insertions_at_same_list_position_with_insertion_by_greater_actor_id( let mut doc1 = new_doc_with_actor(actor1); let mut doc2 = new_doc_with_actor(actor2); - let list = doc1 - .set(&automerge::ROOT, "list", automerge::Value::list()) - .unwrap() - .unwrap(); + let list = doc1.make(&automerge::ROOT, "list", ObjType::List).unwrap(); doc1.insert(&list, 0, "two").unwrap(); doc2.merge(&mut doc1).unwrap(); @@ -736,10 +698,7 @@ fn mutliple_insertions_at_same_list_position_with_insertion_by_lesser_actor_id() let mut doc1 = new_doc_with_actor(actor1); let mut doc2 = new_doc_with_actor(actor2); - let list = doc1 - .set(&automerge::ROOT, "list", automerge::Value::list()) - .unwrap() - .unwrap(); + let list = doc1.make(&automerge::ROOT, "list", ObjType::List).unwrap(); doc1.insert(&list, 0, "two").unwrap(); doc2.merge(&mut doc1).unwrap(); @@ -760,10 +719,7 @@ fn insertion_consistent_with_causality() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); - let list = doc1 - .set(&automerge::ROOT, "list", automerge::Value::list()) - .unwrap() - .unwrap(); + let list = doc1.make(&automerge::ROOT, "list", ObjType::List).unwrap(); doc1.insert(&list, 0, "four").unwrap(); doc2.merge(&mut doc1).unwrap(); doc2.insert(&list, 0, "three").unwrap(); @@ -796,15 +752,9 @@ fn save_and_restore_empty() { #[test] fn save_restore_complex() { let mut doc1 = new_doc(); - let todos = doc1 - .set(&automerge::ROOT, "todos", automerge::Value::list()) - .unwrap() - .unwrap(); + let todos = doc1.make(&automerge::ROOT, "todos", ObjType::List).unwrap(); - let first_todo = doc1 - .insert(&todos, 0, automerge::Value::map()) - .unwrap() - .unwrap(); + let first_todo = doc1.make_insert(&todos, 0, ObjType::Map).unwrap(); doc1.set(&first_todo, "title", "water plants").unwrap(); doc1.set(&first_todo, "done", false).unwrap(); From 79d493ddd207ed6a8ed065b0891919f05dac229a Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 3 Mar 2022 09:33:42 +0000 Subject: [PATCH 136/730] Rename make to set_object --- automerge/examples/quickstart.rs | 8 +- automerge/src/autocommit.rs | 10 +- automerge/src/automerge.rs | 10 +- automerge/src/transaction/inner.rs | 6 +- .../src/transaction/manual_transaction.rs | 10 +- automerge/src/transaction/transactable.rs | 4 +- automerge/tests/test.rs | 92 +++++++++++++------ 7 files changed, 87 insertions(+), 53 deletions(-) diff --git a/automerge/examples/quickstart.rs b/automerge/examples/quickstart.rs index 4d233f21..47dea7bf 100644 --- a/automerge/examples/quickstart.rs +++ b/automerge/examples/quickstart.rs @@ -11,13 +11,11 @@ fn main() { .transact_with::<_, _, AutomergeError, _>( || CommitOptions::default().with_message("Add card".to_owned()), |tx| { - let cards = tx.set(ROOT, "cards", Value::list()).unwrap().unwrap(); - let card1 = tx.insert(&cards, 0, Value::map())?.unwrap(); - let cards = tx.make(ROOT, "cards", ObjType::List).unwrap(); - let card1 = tx.make_insert(&cards, 0, ObjType::Map)?; + let cards = tx.set_object(ROOT, "cards", ObjType::List).unwrap(); + let card1 = tx.insert_object(&cards, 0, ObjType::Map)?; tx.set(&card1, "title", "Rewrite everything in Clojure")?; tx.set(&card1, "done", false)?; - let card2 = tx.make_insert(&cards, 0, ObjType::Map)?; + let card2 = tx.insert_object(&cards, 0, ObjType::Map)?; tx.set(&card2, "title", "Rewrite everything in Haskell")?; tx.set(&card2, "done", false)?; Ok((cards, card1)) diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index 0573d9de..107f63f6 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -253,7 +253,7 @@ impl AutoCommit { /// # use automerge::ObjType; /// # use std::time::SystemTime; /// let mut doc = AutoCommit::new(); - /// doc.make(ROOT, "todos", ObjType::List).unwrap(); + /// doc.set_object(&ROOT, "todos", ObjType::List).unwrap(); /// let now = SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap().as_secs() as /// i64; /// doc.commit_with(CommitOptions::default().with_message("Create todos list").with_time(now)); @@ -332,7 +332,7 @@ impl Transactable for AutoCommit { tx.set(&mut self.doc, obj.as_ref(), prop, value) } - fn make, P: Into, V: Into>( + fn set_object, P: Into, V: Into>( &mut self, obj: O, prop: P, @@ -340,7 +340,7 @@ impl Transactable for AutoCommit { ) -> Result { self.ensure_transaction_open(); let tx = self.transaction.as_mut().unwrap(); - tx.make(&mut self.doc, obj.as_ref(), prop, value) + tx.set_object(&mut self.doc, obj.as_ref(), prop, value) } fn insert, V: Into>( @@ -354,7 +354,7 @@ impl Transactable for AutoCommit { tx.insert(&mut self.doc, obj.as_ref(), index, value) } - fn make_insert>( + fn insert_object>( &mut self, obj: &ExId, index: usize, @@ -362,7 +362,7 @@ impl Transactable for AutoCommit { ) -> Result { self.ensure_transaction_open(); let tx = self.transaction.as_mut().unwrap(); - tx.make_insert(&mut self.doc, obj, index, value) + tx.insert_object(&mut self.doc, obj, index, value) } fn inc, P: Into>( diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 1d543a77..a583c123 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -885,11 +885,11 @@ mod tests { assert_eq!(tx.pending_ops(), 1); - let map = tx.make(ROOT, "b", ObjType::Map)?; + let map = tx.set_object(ROOT, "b", ObjType::Map)?; // object already exists at b but setting a map again overwrites it so we get an opid. tx.set(map, "a", 2)?; - tx.make(ROOT, "b", ObjType::Map)?; + tx.set_object(ROOT, "b", ObjType::Map)?; assert_eq!(tx.pending_ops(), 4); let map = tx.value(ROOT, "b").unwrap().unwrap().1; @@ -904,7 +904,7 @@ mod tests { let mut doc = Automerge::new(); doc.set_actor(ActorId::random()); let mut tx = doc.transaction(); - let list_id = tx.make(ROOT, "items", ObjType::List)?; + let list_id = tx.set_object(ROOT, "items", ObjType::List)?; tx.set(ROOT, "zzz", "zzzval")?; assert!(tx.value(ROOT, "items")?.unwrap().1 == list_id); tx.insert(&list_id, 0, "a")?; @@ -995,7 +995,7 @@ mod tests { fn test_save_text() -> Result<(), AutomergeError> { let mut doc = Automerge::new(); let mut tx = doc.transaction(); - let text = tx.make(ROOT, "text", ObjType::Text)?; + let text = tx.set_object(ROOT, "text", ObjType::Text)?; tx.commit(); let heads1 = doc.get_heads(); let mut tx = doc.transaction(); @@ -1095,7 +1095,7 @@ mod tests { doc.set_actor("aaaa".try_into().unwrap()); let mut tx = doc.transaction(); - let list = tx.make(ROOT, "list", ObjType::List)?; + let list = tx.set_object(ROOT, "list", ObjType::List)?; tx.commit(); let heads1 = doc.get_heads(); diff --git a/automerge/src/transaction/inner.rs b/automerge/src/transaction/inner.rs index b2693470..b7b657c6 100644 --- a/automerge/src/transaction/inner.rs +++ b/automerge/src/transaction/inner.rs @@ -107,7 +107,7 @@ impl TransactionInner { /// - The object does not exist /// - The key is the wrong type for the object /// - The key does not exist in the object - pub fn make, V: Into>( + pub fn set_object, V: Into>( &mut self, doc: &mut Automerge, obj: &ExId, @@ -150,7 +150,7 @@ impl TransactionInner { Ok(()) } - pub fn make_insert>( + pub fn insert_object>( &mut self, doc: &mut Automerge, obj: &ExId, @@ -358,7 +358,7 @@ mod tests { let mut doc = Automerge::new(); let mut tx = doc.transaction(); - let a = tx.make(ROOT, "a", ObjType::Map).unwrap(); + let a = tx.set_object(ROOT, "a", ObjType::Map).unwrap(); tx.set(&a, "b", 1).unwrap(); assert!(tx.value(&a, "b").unwrap().is_some()); } diff --git a/automerge/src/transaction/manual_transaction.rs b/automerge/src/transaction/manual_transaction.rs index 50f2cb0d..716bf636 100644 --- a/automerge/src/transaction/manual_transaction.rs +++ b/automerge/src/transaction/manual_transaction.rs @@ -47,7 +47,7 @@ impl<'a> Transaction<'a> { /// # use std::time::SystemTime; /// let mut doc = Automerge::new(); /// let mut tx = doc.transaction(); - /// tx.make(ROOT, "todos", ObjType::List).unwrap(); + /// tx.set_object(ROOT, "todos", ObjType::List).unwrap(); /// let now = SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap().as_secs() as /// i64; /// tx.commit_with(CommitOptions::default().with_message("Create todos list").with_time(now)); @@ -97,7 +97,7 @@ impl<'a> Transactable for Transaction<'a> { .set(self.doc, obj.as_ref(), prop, value) } - fn make, P: Into, V: Into>( + fn set_object, P: Into, V: Into>( &mut self, obj: O, prop: P, @@ -106,7 +106,7 @@ impl<'a> Transactable for Transaction<'a> { self.inner .as_mut() .unwrap() - .make(self.doc, obj.as_ref(), prop, value) + .set_object(self.doc, obj.as_ref(), prop, value) } fn insert, V: Into>( @@ -121,7 +121,7 @@ impl<'a> Transactable for Transaction<'a> { .insert(self.doc, obj.as_ref(), index, value) } - fn make_insert>( + fn insert_object>( &mut self, obj: &ExId, index: usize, @@ -130,7 +130,7 @@ impl<'a> Transactable for Transaction<'a> { self.inner .as_mut() .unwrap() - .make_insert(self.doc, obj, index, value) + .insert_object(self.doc, obj, index, value) } fn inc, P: Into>( diff --git a/automerge/src/transaction/transactable.rs b/automerge/src/transaction/transactable.rs index 5a42f963..3f1c29db 100644 --- a/automerge/src/transaction/transactable.rs +++ b/automerge/src/transaction/transactable.rs @@ -40,7 +40,7 @@ pub trait Transactable { /// - The object does not exist /// - The key is the wrong type for the object /// - The key does not exist in the object - fn make, P: Into, V: Into>( + fn set_object, P: Into, V: Into>( &mut self, obj: O, prop: P, @@ -56,7 +56,7 @@ pub trait Transactable { ) -> Result<(), AutomergeError>; /// Insert a value into a list at the given index. - fn make_insert>( + fn insert_object>( &mut self, obj: &ExId, index: usize, diff --git a/automerge/tests/test.rs b/automerge/tests/test.rs index bd091845..72b79730 100644 --- a/automerge/tests/test.rs +++ b/automerge/tests/test.rs @@ -44,7 +44,9 @@ fn repeated_map_assignment_which_resolves_conflict_not_ignored() { fn repeated_list_assignment_which_resolves_conflict_not_ignored() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); - let list_id = doc1.make(&automerge::ROOT, "list", ObjType::List).unwrap(); + let list_id = doc1 + .set_object(&automerge::ROOT, "list", ObjType::List) + .unwrap(); doc1.insert(&list_id, 0, 123).unwrap(); doc2.merge(&mut doc1).unwrap(); doc2.set(&list_id, 0, 456).unwrap(); @@ -66,7 +68,9 @@ fn repeated_list_assignment_which_resolves_conflict_not_ignored() { #[test] fn list_deletion() { let mut doc = new_doc(); - let list_id = doc.make(&automerge::ROOT, "list", ObjType::List).unwrap(); + let list_id = doc + .set_object(&automerge::ROOT, "list", ObjType::List) + .unwrap(); doc.insert(&list_id, 0, 123).unwrap(); doc.insert(&list_id, 1, 456).unwrap(); doc.insert(&list_id, 2, 789).unwrap(); @@ -183,7 +187,9 @@ fn concurrent_updates_of_same_field() { fn concurrent_updates_of_same_list_element() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); - let list_id = doc1.make(&automerge::ROOT, "birds", ObjType::List).unwrap(); + let list_id = doc1 + .set_object(&automerge::ROOT, "birds", ObjType::List) + .unwrap(); doc1.insert(&list_id, 0, "finch").unwrap(); doc2.merge(&mut doc1).unwrap(); doc1.set(&list_id, 0, "greenfinch").unwrap(); @@ -210,8 +216,10 @@ fn assignment_conflicts_of_different_types() { let mut doc2 = new_doc(); let mut doc3 = new_doc(); doc1.set(&automerge::ROOT, "field", "string").unwrap(); - doc2.make(&automerge::ROOT, "field", ObjType::List).unwrap(); - doc3.make(&automerge::ROOT, "field", ObjType::Map).unwrap(); + doc2.set_object(&automerge::ROOT, "field", ObjType::List) + .unwrap(); + doc3.set_object(&automerge::ROOT, "field", ObjType::Map) + .unwrap(); doc1.merge(&mut doc2).unwrap(); doc1.merge(&mut doc3).unwrap(); @@ -232,7 +240,9 @@ fn changes_within_conflicting_map_field() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); doc1.set(&automerge::ROOT, "field", "string").unwrap(); - let map_id = doc2.make(&automerge::ROOT, "field", ObjType::Map).unwrap(); + let map_id = doc2 + .set_object(&automerge::ROOT, "field", ObjType::Map) + .unwrap(); doc2.set(&map_id, "innerKey", 42).unwrap(); doc1.merge(&mut doc2).unwrap(); @@ -256,15 +266,17 @@ fn changes_within_conflicting_list_element() { let (actor1, actor2) = sorted_actors(); let mut doc1 = new_doc_with_actor(actor1); let mut doc2 = new_doc_with_actor(actor2); - let list_id = doc1.make(&automerge::ROOT, "list", ObjType::List).unwrap(); + let list_id = doc1 + .set_object(&automerge::ROOT, "list", ObjType::List) + .unwrap(); doc1.insert(&list_id, 0, "hello").unwrap(); doc2.merge(&mut doc1).unwrap(); - let map_in_doc1 = doc1.make(&list_id, 0, ObjType::Map).unwrap(); + let map_in_doc1 = doc1.set_object(&list_id, 0, ObjType::Map).unwrap(); doc1.set(&map_in_doc1, "map1", true).unwrap(); doc1.set(&map_in_doc1, "key", 1).unwrap(); - let map_in_doc2 = doc2.make(&list_id, 0, ObjType::Map).unwrap(); + let map_in_doc2 = doc2.set_object(&list_id, 0, ObjType::Map).unwrap(); doc1.merge(&mut doc2).unwrap(); doc2.set(&map_in_doc2, "map2", true).unwrap(); doc2.set(&map_in_doc2, "key", 2).unwrap(); @@ -297,10 +309,14 @@ fn concurrently_assigned_nested_maps_should_not_merge() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); - let doc1_map_id = doc1.make(&automerge::ROOT, "config", ObjType::Map).unwrap(); + let doc1_map_id = doc1 + .set_object(&automerge::ROOT, "config", ObjType::Map) + .unwrap(); doc1.set(&doc1_map_id, "background", "blue").unwrap(); - let doc2_map_id = doc2.make(&automerge::ROOT, "config", ObjType::Map).unwrap(); + let doc2_map_id = doc2 + .set_object(&automerge::ROOT, "config", ObjType::Map) + .unwrap(); doc2.set(&doc2_map_id, "logo_url", "logo.png").unwrap(); doc1.merge(&mut doc2).unwrap(); @@ -327,7 +343,9 @@ fn concurrent_insertions_at_different_list_positions() { let mut doc2 = new_doc_with_actor(actor2); assert!(doc1.get_actor() < doc2.get_actor()); - let list_id = doc1.make(&automerge::ROOT, "list", ObjType::List).unwrap(); + let list_id = doc1 + .set_object(&automerge::ROOT, "list", ObjType::List) + .unwrap(); doc1.insert(&list_id, 0, "one").unwrap(); doc1.insert(&list_id, 1, "three").unwrap(); @@ -359,7 +377,9 @@ fn concurrent_insertions_at_same_list_position() { let mut doc2 = new_doc_with_actor(actor2); assert!(doc1.get_actor() < doc2.get_actor()); - let list_id = doc1.make(&automerge::ROOT, "birds", ObjType::List).unwrap(); + let list_id = doc1 + .set_object(&automerge::ROOT, "birds", ObjType::List) + .unwrap(); doc1.insert(&list_id, 0, "parakeet").unwrap(); doc2.merge(&mut doc1).unwrap(); @@ -412,7 +432,9 @@ fn concurrent_assignment_and_deletion_of_a_map_entry() { fn concurrent_assignment_and_deletion_of_list_entry() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); - let list_id = doc1.make(&automerge::ROOT, "birds", ObjType::List).unwrap(); + let list_id = doc1 + .set_object(&automerge::ROOT, "birds", ObjType::List) + .unwrap(); doc1.insert(&list_id, 0, "blackbird").unwrap(); doc1.insert(&list_id, 1, "thrush").unwrap(); doc1.insert(&list_id, 2, "goldfinch").unwrap(); @@ -459,7 +481,9 @@ fn concurrent_assignment_and_deletion_of_list_entry() { fn insertion_after_a_deleted_list_element() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); - let list_id = doc1.make(&automerge::ROOT, "birds", ObjType::List).unwrap(); + let list_id = doc1 + .set_object(&automerge::ROOT, "birds", ObjType::List) + .unwrap(); doc1.insert(&list_id, 0, "blackbird").unwrap(); doc1.insert(&list_id, 1, "thrush").unwrap(); @@ -500,7 +524,9 @@ fn insertion_after_a_deleted_list_element() { fn concurrent_deletion_of_same_list_element() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); - let list_id = doc1.make(&automerge::ROOT, "birds", ObjType::List).unwrap(); + let list_id = doc1 + .set_object(&automerge::ROOT, "birds", ObjType::List) + .unwrap(); doc1.insert(&list_id, 0, "albatross").unwrap(); doc1.insert(&list_id, 1, "buzzard").unwrap(); @@ -542,13 +568,13 @@ fn concurrent_updates_at_different_levels() { let mut doc2 = new_doc(); let animals = doc1 - .make(&automerge::ROOT, "animals", ObjType::Map) + .set_object(&automerge::ROOT, "animals", ObjType::Map) .unwrap(); - let birds = doc1.make(&animals, "birds", ObjType::Map).unwrap(); + let birds = doc1.set_object(&animals, "birds", ObjType::Map).unwrap(); doc1.set(&birds, "pink", "flamingo").unwrap(); doc1.set(&birds, "black", "starling").unwrap(); - let mammals = doc1.make(&animals, "mammals", ObjType::List).unwrap(); + let mammals = doc1.set_object(&animals, "mammals", ObjType::List).unwrap(); doc1.insert(&mammals, 0, "badger").unwrap(); doc2.merge(&mut doc1).unwrap(); @@ -586,8 +612,10 @@ fn concurrent_updates_of_concurrently_deleted_objects() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); - let birds = doc1.make(&automerge::ROOT, "birds", ObjType::Map).unwrap(); - let blackbird = doc1.make(&birds, "blackbird", ObjType::Map).unwrap(); + let birds = doc1 + .set_object(&automerge::ROOT, "birds", ObjType::Map) + .unwrap(); + let blackbird = doc1.set_object(&birds, "blackbird", ObjType::Map).unwrap(); doc1.set(&blackbird, "feathers", "black").unwrap(); doc2.merge(&mut doc1).unwrap(); @@ -615,7 +643,7 @@ fn does_not_interleave_sequence_insertions_at_same_position() { let mut doc2 = new_doc_with_actor(actor2); let wisdom = doc1 - .make(&automerge::ROOT, "wisdom", ObjType::List) + .set_object(&automerge::ROOT, "wisdom", ObjType::List) .unwrap(); doc2.merge(&mut doc1).unwrap(); @@ -675,7 +703,9 @@ fn mutliple_insertions_at_same_list_position_with_insertion_by_greater_actor_id( let mut doc1 = new_doc_with_actor(actor1); let mut doc2 = new_doc_with_actor(actor2); - let list = doc1.make(&automerge::ROOT, "list", ObjType::List).unwrap(); + let list = doc1 + .set_object(&automerge::ROOT, "list", ObjType::List) + .unwrap(); doc1.insert(&list, 0, "two").unwrap(); doc2.merge(&mut doc1).unwrap(); @@ -698,7 +728,9 @@ fn mutliple_insertions_at_same_list_position_with_insertion_by_lesser_actor_id() let mut doc1 = new_doc_with_actor(actor1); let mut doc2 = new_doc_with_actor(actor2); - let list = doc1.make(&automerge::ROOT, "list", ObjType::List).unwrap(); + let list = doc1 + .set_object(&automerge::ROOT, "list", ObjType::List) + .unwrap(); doc1.insert(&list, 0, "two").unwrap(); doc2.merge(&mut doc1).unwrap(); @@ -719,7 +751,9 @@ fn insertion_consistent_with_causality() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); - let list = doc1.make(&automerge::ROOT, "list", ObjType::List).unwrap(); + let list = doc1 + .set_object(&automerge::ROOT, "list", ObjType::List) + .unwrap(); doc1.insert(&list, 0, "four").unwrap(); doc2.merge(&mut doc1).unwrap(); doc2.insert(&list, 0, "three").unwrap(); @@ -752,9 +786,11 @@ fn save_and_restore_empty() { #[test] fn save_restore_complex() { let mut doc1 = new_doc(); - let todos = doc1.make(&automerge::ROOT, "todos", ObjType::List).unwrap(); + let todos = doc1 + .set_object(&automerge::ROOT, "todos", ObjType::List) + .unwrap(); - let first_todo = doc1.make_insert(&todos, 0, ObjType::Map).unwrap(); + let first_todo = doc1.insert_object(&todos, 0, ObjType::Map).unwrap(); doc1.set(&first_todo, "title", "water plants").unwrap(); doc1.set(&first_todo, "done", false).unwrap(); @@ -794,7 +830,7 @@ fn list_counter_del() -> Result<(), automerge::AutomergeError> { let mut doc1 = new_doc_with_actor(actor1); - let list = doc1.make(ROOT, "list", ObjType::List)?; + let list = doc1.set_object(ROOT, "list", ObjType::List)?; doc1.insert(&list, 0, "a")?; doc1.insert(&list, 1, "b")?; doc1.insert(&list, 2, "c")?; From 338dc1bece08f43a9b9efff0b65a2d8c1908e9f3 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 3 Mar 2022 09:40:26 +0000 Subject: [PATCH 137/730] Change splice to accept scalars only --- automerge/src/autocommit.rs | 4 ++-- automerge/src/transaction/inner.rs | 12 ++++------ .../src/transaction/manual_transaction.rs | 4 ++-- automerge/src/transaction/transactable.rs | 22 ++++++------------- 4 files changed, 15 insertions(+), 27 deletions(-) diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index 107f63f6..241d5bfe 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -393,8 +393,8 @@ impl Transactable for AutoCommit { obj: O, pos: usize, del: usize, - vals: Vec, - ) -> Result, AutomergeError> { + vals: Vec, + ) -> Result<(), AutomergeError> { self.ensure_transaction_open(); let tx = self.transaction.as_mut().unwrap(); tx.splice(&mut self.doc, obj.as_ref(), pos, del, vals) diff --git a/automerge/src/transaction/inner.rs b/automerge/src/transaction/inner.rs index b7b657c6..819bc11f 100644 --- a/automerge/src/transaction/inner.rs +++ b/automerge/src/transaction/inner.rs @@ -327,23 +327,19 @@ impl TransactionInner { obj: &ExId, mut pos: usize, del: usize, - vals: Vec, - ) -> Result, AutomergeError> { + vals: Vec, + ) -> Result<(), AutomergeError> { let obj = doc.exid_to_obj(obj)?; for _ in 0..del { // del() self.local_op(doc, obj, pos.into(), OpType::Del)?; } - let mut results = Vec::new(); for v in vals { // insert() - let id = self.do_insert(doc, obj, pos, v)?; - if let Some(id) = id { - results.push(doc.id_to_exid(id)); - } + self.do_insert(doc, obj, pos, v)?; pos += 1; } - Ok(results) + Ok(()) } } diff --git a/automerge/src/transaction/manual_transaction.rs b/automerge/src/transaction/manual_transaction.rs index 716bf636..92962b2d 100644 --- a/automerge/src/transaction/manual_transaction.rs +++ b/automerge/src/transaction/manual_transaction.rs @@ -163,8 +163,8 @@ impl<'a> Transactable for Transaction<'a> { obj: O, pos: usize, del: usize, - vals: Vec, - ) -> Result, AutomergeError> { + vals: Vec, + ) -> Result<(), AutomergeError> { self.inner .as_mut() .unwrap() diff --git a/automerge/src/transaction/transactable.rs b/automerge/src/transaction/transactable.rs index 3f1c29db..d50a1717 100644 --- a/automerge/src/transaction/transactable.rs +++ b/automerge/src/transaction/transactable.rs @@ -9,11 +9,6 @@ pub trait Transactable { /// Set the value of property `P` to value `V` in object `obj`. /// - /// # Returns - /// - /// The opid of the operation which was created, or None if this operation doesn't change the - /// document - /// /// # Errors /// /// This will return an error if @@ -27,12 +22,11 @@ pub trait Transactable { value: V, ) -> Result<(), AutomergeError>; - /// Set the value of property `P` to value `V` in object `obj`. + /// Set the value of property `P` to the new object `V` in object `obj`. /// /// # Returns /// - /// The opid of the operation which was created, or None if this operation doesn't change the - /// document + /// The id of the object which was created. /// /// # Errors /// @@ -44,7 +38,7 @@ pub trait Transactable { &mut self, obj: O, prop: P, - value: V, + object: V, ) -> Result; /// Insert a value into a list at the given index. @@ -55,7 +49,7 @@ pub trait Transactable { value: V, ) -> Result<(), AutomergeError>; - /// Insert a value into a list at the given index. + /// Insert an object into a list at the given index. fn insert_object>( &mut self, obj: &ExId, @@ -75,15 +69,13 @@ pub trait Transactable { fn del, P: Into>(&mut self, obj: O, prop: P) -> Result<(), AutomergeError>; - /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert - /// the new elements. fn splice>( &mut self, obj: O, pos: usize, del: usize, - vals: Vec, - ) -> Result, AutomergeError>; + vals: Vec, + ) -> Result<(), AutomergeError>; /// Like [`Self::splice`] but for text. fn splice_text>( @@ -92,7 +84,7 @@ pub trait Transactable { pos: usize, del: usize, text: &str, - ) -> Result, AutomergeError> { + ) -> Result<(), AutomergeError> { let mut vals = vec![]; for c in text.to_owned().graphemes(true) { vals.push(c.into()); From b6c9d90d8411268d8628b164a3e6326d7ec0ac4d Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 3 Mar 2022 09:41:29 +0000 Subject: [PATCH 138/730] Rename value to object in insert_object --- automerge/src/transaction/transactable.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automerge/src/transaction/transactable.rs b/automerge/src/transaction/transactable.rs index d50a1717..8ae8f100 100644 --- a/automerge/src/transaction/transactable.rs +++ b/automerge/src/transaction/transactable.rs @@ -54,7 +54,7 @@ pub trait Transactable { &mut self, obj: &ExId, index: usize, - value: V, + object: V, ) -> Result; /// Increment the counter at the prop in the object by `value`. From f8cffa3deb5b239a27b6341477b1f385ed4180a3 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 3 Mar 2022 10:43:05 +0000 Subject: [PATCH 139/730] Fix edit trace --- edit-trace/benches/main.rs | 12 ++++++------ edit-trace/src/main.rs | 7 ++++--- 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/edit-trace/benches/main.rs b/edit-trace/benches/main.rs index a5e85791..77b68714 100644 --- a/edit-trace/benches/main.rs +++ b/edit-trace/benches/main.rs @@ -1,11 +1,11 @@ -use automerge::{transaction::Transactable, AutoCommit, Automerge, Value, ROOT}; +use automerge::{transaction::Transactable, AutoCommit, Automerge, ObjType, ScalarValue, ROOT}; use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion, Throughput}; use std::fs; -fn replay_trace_tx(commands: Vec<(usize, usize, Vec)>) -> Automerge { +fn replay_trace_tx(commands: Vec<(usize, usize, Vec)>) -> Automerge { let mut doc = Automerge::new(); let mut tx = doc.transaction(); - let text = tx.set(ROOT, "text", Value::text()).unwrap().unwrap(); + let text = tx.set_object(ROOT, "text", ObjType::Text).unwrap(); for (pos, del, vals) in commands { tx.splice(&text, pos, del, vals).unwrap(); } @@ -13,9 +13,9 @@ fn replay_trace_tx(commands: Vec<(usize, usize, Vec)>) -> Automerge { doc } -fn replay_trace_autotx(commands: Vec<(usize, usize, Vec)>) -> AutoCommit { +fn replay_trace_autotx(commands: Vec<(usize, usize, Vec)>) -> AutoCommit { let mut doc = AutoCommit::new(); - let text = doc.set(ROOT, "text", Value::text()).unwrap().unwrap(); + let text = doc.set_object(ROOT, "text", ObjType::Text).unwrap(); for (pos, del, vals) in commands { doc.splice(&text, pos, del, vals).unwrap(); } @@ -49,7 +49,7 @@ fn bench(c: &mut Criterion) { let mut vals = vec![]; for j in 2..edits[i].len() { let v = edits[i][j].as_str().unwrap(); - vals.push(Value::str(v)); + vals.push(ScalarValue::Str(v.into())); } commands.push((pos, del, vals)); } diff --git a/edit-trace/src/main.rs b/edit-trace/src/main.rs index 98d6198e..c3063084 100644 --- a/edit-trace/src/main.rs +++ b/edit-trace/src/main.rs @@ -1,4 +1,5 @@ -use automerge::{transaction::Transactable, Automerge, AutomergeError, Value, ROOT}; +use automerge::{transaction::Transactable, Automerge, AutomergeError, ROOT}; +use automerge::{ObjType, ScalarValue}; use std::fs; use std::time::Instant; @@ -12,7 +13,7 @@ fn main() -> Result<(), AutomergeError> { let mut vals = vec![]; for j in 2..edits[i].len() { let v = edits[i][j].as_str().unwrap(); - vals.push(Value::str(v)); + vals.push(ScalarValue::Str(v.into())); } commands.push((pos, del, vals)); } @@ -20,7 +21,7 @@ fn main() -> Result<(), AutomergeError> { let now = Instant::now(); let mut tx = doc.transaction(); - let text = tx.set(ROOT, "text", Value::text()).unwrap().unwrap(); + let text = tx.set_object(ROOT, "text", ObjType::Text).unwrap(); for (i, (pos, del, vals)) in commands.into_iter().enumerate() { if i % 1000 == 0 { println!("Processed {} edits in {} ms", i, now.elapsed().as_millis()); From 93a20f302d4792d93887593b53a43da6e2b97831 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 3 Mar 2022 11:01:21 +0000 Subject: [PATCH 140/730] Fixup wasm lib --- automerge-wasm/src/lib.rs | 131 +++++++++++++++++++++++--------------- 1 file changed, 80 insertions(+), 51 deletions(-) diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 47f151ca..705cd67a 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -129,49 +129,50 @@ impl Automerge { start: f64, delete_count: f64, text: JsValue, - ) -> Result, JsValue> { + ) -> Result<(), JsValue> { let obj = self.import(obj)?; let start = start as usize; let delete_count = delete_count as usize; let mut vals = vec![]; if let Some(t) = text.as_string() { self.0.splice_text(&obj, start, delete_count, &t)?; - Ok(None) } else { if let Ok(array) = text.dyn_into::() { for i in array.iter() { - let (value, subvals) = self.import_value(&i, None)?; - if !subvals.is_empty() { - return Err(to_js_err("splice must be shallow")); - } + let value = self + .import_scalar(&i, &None) + .ok_or_else(|| to_js_err("expected scalar"))?; vals.push(value); } } - let result = self.0.splice(&obj, start, delete_count, vals)?; - if result.is_empty() { - Ok(None) - } else { - let result: Array = result - .iter() - .map(|r| JsValue::from(r.to_string())) - .collect(); - Ok(result.into()) - } + self.0.splice(&obj, start, delete_count, vals)?; } + Ok(()) } - pub fn push( + pub fn push(&mut self, obj: JsValue, value: JsValue, datatype: JsValue) -> Result<(), JsValue> { + let obj = self.import(obj)?; + let value = self + .import_scalar(&value, &datatype.as_string()) + .ok_or_else(|| to_js_err("invalid scalar value"))?; + let index = self.0.length(&obj); + self.0.insert(&obj, index, value)?; + Ok(()) + } + + pub fn push_object( &mut self, obj: JsValue, value: JsValue, datatype: JsValue, ) -> Result, JsValue> { let obj = self.import(obj)?; - let (value, subvals) = self.import_value(&value, datatype.as_string())?; + let (value, subvals) = + to_objtype(&value, &datatype.as_string()).ok_or(to_js_err("expected object"))?; let index = self.0.length(&obj); - let opid = self.0.insert(&obj, index, value)?; + let opid = self.0.insert_object(&obj, index, value)?; self.subset(&opid, subvals)?; - Ok(opid.map(|id| id.to_string())) + Ok(opid.to_string().into()) } pub fn insert( @@ -180,13 +181,30 @@ impl Automerge { index: f64, value: JsValue, datatype: JsValue, + ) -> Result<(), JsValue> { + let obj = self.import(obj)?; + let index = index as f64; + let value = self + .import_scalar(&value, &datatype.as_string()) + .ok_or(to_js_err("expected scalar value"))?; + self.0.insert(&obj, index as usize, value)?; + Ok(()) + } + + pub fn insert_object( + &mut self, + obj: JsValue, + index: f64, + value: JsValue, + datatype: JsValue, ) -> Result, JsValue> { let obj = self.import(obj)?; let index = index as f64; - let (value, subvals) = self.import_value(&value, datatype.as_string())?; - let opid = self.0.insert(&obj, index as usize, value)?; + let (value, subvals) = + to_objtype(&value, &datatype.as_string()).ok_or(to_js_err("expected object"))?; + let opid = self.0.insert_object(&obj, index as usize, value)?; self.subset(&opid, subvals)?; - Ok(opid.map(|id| id.to_string())) + Ok(opid.to_string().into()) } pub fn set( @@ -195,46 +213,57 @@ impl Automerge { prop: JsValue, value: JsValue, datatype: JsValue, + ) -> Result<(), JsValue> { + let obj = self.import(obj)?; + let prop = self.import_prop(prop)?; + let value = self + .import_scalar(&value, &datatype.as_string()) + .ok_or(to_js_err("expected scalar value"))?; + self.0.set(&obj, prop, value)?; + Ok(()) + } + + pub fn set_object( + &mut self, + obj: JsValue, + prop: JsValue, + value: JsValue, + datatype: JsValue, ) -> Result { let obj = self.import(obj)?; let prop = self.import_prop(prop)?; - let (value, subvals) = self.import_value(&value, datatype.as_string())?; - let opid = self.0.set(&obj, prop, value)?; + let (value, subvals) = + to_objtype(&value, &datatype.as_string()).ok_or(to_js_err("expected object"))?; + let opid = self.0.set_object(&obj, prop, value)?; self.subset(&opid, subvals)?; - Ok(opid.map(|id| id.to_string()).into()) + Ok(opid.to_string().into()) } - fn subset( - &mut self, - obj: &Option, - vals: Vec<(am::Prop, JsValue)>, - ) -> Result<(), JsValue> { - if let Some(id) = obj { - for (p, v) in vals { - let (value, subvals) = self.import_value(&v, None)?; - //let opid = self.0.set(id, p, value)?; - let opid = match p { - Prop::Map(s) => self.0.set(id, s, value)?, - Prop::Seq(i) => self.0.insert(id, i, value)?, - }; + fn subset(&mut self, obj: &am::ObjId, vals: Vec<(am::Prop, JsValue)>) -> Result<(), JsValue> { + for (p, v) in vals { + let (value, subvals) = self.import_value(&v, None)?; + //let opid = self.0.set(id, p, value)?; + let opid = match (p, value) { + (Prop::Map(s), Value::Object(objtype)) => Some(self.0.set_object(obj, s, objtype)?), + (Prop::Map(s), Value::Scalar(scalar)) => { + self.0.set(obj, s, scalar)?; + None + } + (Prop::Seq(i), Value::Object(objtype)) => { + Some(self.0.insert_object(obj, i, objtype)?) + } + (Prop::Seq(i), Value::Scalar(scalar)) => { + self.0.insert(obj, i, scalar)?; + None + } + }; + if let Some(opid) = opid { self.subset(&opid, subvals)?; } } Ok(()) } - pub fn make(&mut self, obj: JsValue, prop: JsValue, value: JsValue) -> Result { - let obj = self.import(obj)?; - let prop = self.import_prop(prop)?; - if let Some((value, subvals)) = to_objtype(&value, &None) { - let opid = self.0.set(&obj, prop, value)?; - self.subset(&opid, subvals)?; - Ok(opid.unwrap().to_string()) - } else { - Err(to_js_err("invalid object type")) - } - } - pub fn inc(&mut self, obj: JsValue, prop: JsValue, value: JsValue) -> Result<(), JsValue> { let obj = self.import(obj)?; let prop = self.import_prop(prop)?; From 4fe7df3d0e5b5132006684bc2b42a3f241229409 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 3 Mar 2022 11:10:14 +0000 Subject: [PATCH 141/730] Fix clippy lint --- automerge-wasm/src/lib.rs | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 705cd67a..7a1c4b2f 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -167,8 +167,8 @@ impl Automerge { datatype: JsValue, ) -> Result, JsValue> { let obj = self.import(obj)?; - let (value, subvals) = - to_objtype(&value, &datatype.as_string()).ok_or(to_js_err("expected object"))?; + let (value, subvals) = to_objtype(&value, &datatype.as_string()) + .ok_or_else(|| to_js_err("expected object"))?; let index = self.0.length(&obj); let opid = self.0.insert_object(&obj, index, value)?; self.subset(&opid, subvals)?; @@ -186,7 +186,7 @@ impl Automerge { let index = index as f64; let value = self .import_scalar(&value, &datatype.as_string()) - .ok_or(to_js_err("expected scalar value"))?; + .ok_or_else(|| to_js_err("expected scalar value"))?; self.0.insert(&obj, index as usize, value)?; Ok(()) } @@ -200,8 +200,8 @@ impl Automerge { ) -> Result, JsValue> { let obj = self.import(obj)?; let index = index as f64; - let (value, subvals) = - to_objtype(&value, &datatype.as_string()).ok_or(to_js_err("expected object"))?; + let (value, subvals) = to_objtype(&value, &datatype.as_string()) + .ok_or_else(|| to_js_err("expected object"))?; let opid = self.0.insert_object(&obj, index as usize, value)?; self.subset(&opid, subvals)?; Ok(opid.to_string().into()) @@ -218,7 +218,7 @@ impl Automerge { let prop = self.import_prop(prop)?; let value = self .import_scalar(&value, &datatype.as_string()) - .ok_or(to_js_err("expected scalar value"))?; + .ok_or_else(|| to_js_err("expected scalar value"))?; self.0.set(&obj, prop, value)?; Ok(()) } @@ -232,8 +232,8 @@ impl Automerge { ) -> Result { let obj = self.import(obj)?; let prop = self.import_prop(prop)?; - let (value, subvals) = - to_objtype(&value, &datatype.as_string()).ok_or(to_js_err("expected object"))?; + let (value, subvals) = to_objtype(&value, &datatype.as_string()) + .ok_or_else(|| to_js_err("expected object"))?; let opid = self.0.set_object(&obj, prop, value)?; self.subset(&opid, subvals)?; Ok(opid.to_string().into()) From e1aeb4fd88efb31bc17ebb484bbb74e7c0e637f1 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 4 Mar 2022 11:33:03 +0000 Subject: [PATCH 142/730] Fixup new test after rebase --- automerge/src/automerge.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index a583c123..c78dd99a 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -1221,10 +1221,10 @@ mod tests { let mut doc = Automerge::new(); let mut tx = doc.transaction(); // create a map - let map1 = tx.set(&ROOT, "a", Value::map()).unwrap().unwrap(); + let map1 = tx.set_object(ROOT, "a", ObjType::Map).unwrap(); tx.set(&map1, "b", 1).unwrap(); // overwrite the first map with a new one - let map2 = tx.set(&ROOT, "a", Value::map()).unwrap().unwrap(); + let map2 = tx.set_object(ROOT, "a", ObjType::Map).unwrap(); tx.set(&map2, "c", 2).unwrap(); tx.commit(); From 2ebb3fea6fa8ea1234d7b600e8c54997f22a702a Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 4 Mar 2022 11:37:44 +0000 Subject: [PATCH 143/730] Fixup cli --- automerge-cli/src/import.rs | 8 ++++---- automerge/src/value.rs | 6 ++++++ 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/automerge-cli/src/import.rs b/automerge-cli/src/import.rs index d7d3c45b..2a2833d2 100644 --- a/automerge-cli/src/import.rs +++ b/automerge-cli/src/import.rs @@ -31,7 +31,7 @@ fn import_map( doc.set(obj, key, s.as_ref())?; } serde_json::Value::Array(vec) => { - let id = doc.set(obj, key, am::Value::list())?.unwrap(); + let id = doc.set_object(obj, key, am::ObjType::List)?; import_list(doc, &id, vec)?; } serde_json::Value::Number(n) => { @@ -46,7 +46,7 @@ fn import_map( } } serde_json::Value::Object(map) => { - let id = doc.set(obj, key, am::Value::map())?.unwrap(); + let id = doc.set_object(obj, key, am::ObjType::Map)?; import_map(doc, &id, map)?; } } @@ -71,7 +71,7 @@ fn import_list( doc.insert(obj, i, s.as_ref())?; } serde_json::Value::Array(vec) => { - let id = doc.insert(obj, i, am::Value::list())?.unwrap(); + let id = doc.insert_object(obj, i, am::ObjType::List)?; import_list(doc, &id, vec)?; } serde_json::Value::Number(n) => { @@ -86,7 +86,7 @@ fn import_list( } } serde_json::Value::Object(map) => { - let id = doc.insert(obj, i, am::Value::map())?.unwrap(); + let id = doc.insert_object(obj, i, am::ObjType::Map)?; import_map(doc, &id, map)?; } } diff --git a/automerge/src/value.rs b/automerge/src/value.rs index 21b03a52..2d272506 100644 --- a/automerge/src/value.rs +++ b/automerge/src/value.rs @@ -447,6 +447,12 @@ impl From for ScalarValue { } } +impl From<()> for ScalarValue { + fn from(_: ()) -> Self { + ScalarValue::Null + } +} + impl From for ScalarValue { fn from(c: char) -> Self { ScalarValue::Str(SmolStr::new(c.to_string())) From 535d2eb92f6f281a9d1ae7c53ff344c755a8e0c1 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 4 Mar 2022 11:46:03 +0000 Subject: [PATCH 144/730] Fix js proxy api --- automerge-js/src/proxies.js | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/automerge-js/src/proxies.js b/automerge-js/src/proxies.js index ed3a4b97..f9e27855 100644 --- a/automerge-js/src/proxies.js +++ b/automerge-js/src/proxies.js @@ -134,21 +134,21 @@ const MapHandler = { } switch (datatype) { case "list": - const list = context.set(objectId, key, []) + const list = context.set_object(objectId, key, []) const proxyList = listProxy(context, list, [ ... path, key ], readonly ); for (let i = 0; i < value.length; i++) { proxyList[i] = value[i] } break; case "text": - const text = context.set(objectId, key, "", "text") + const text = context.set_object(objectId, key, "", "text") const proxyText = textProxy(context, text, [ ... path, key ], readonly ); for (let i = 0; i < value.length; i++) { proxyText[i] = value.get(i) } break; case "map": - const map = context.set(objectId, key, {}) + const map = context.set_object(objectId, key, {}) const proxyMap = mapProxy(context, map, [ ... path, key ], readonly ); for (const key in value) { proxyMap[key] = value[key] @@ -251,9 +251,9 @@ const ListHandler = { case "list": let list if (index >= context.length(objectId)) { - list = context.insert(objectId, index, []) + list = context.insert_object(objectId, index, []) } else { - list = context.set(objectId, index, []) + list = context.set_object(objectId, index, []) } const proxyList = listProxy(context, list, [ ... path, index ], readonly); proxyList.splice(0,0,...value) @@ -261,9 +261,9 @@ const ListHandler = { case "text": let text if (index >= context.length(objectId)) { - text = context.insert(objectId, index, "", "text") + text = context.insert_object(objectId, index, "", "text") } else { - text = context.set(objectId, index, "", "text") + text = context.set_object(objectId, index, "", "text") } const proxyText = textProxy(context, text, [ ... path, index ], readonly); proxyText.splice(0,0,...value) @@ -271,9 +271,9 @@ const ListHandler = { case "map": let map if (index >= context.length(objectId)) { - map = context.insert(objectId, index, {}) + map = context.insert_object(objectId, index, {}) } else { - map = context.set(objectId, index, {}) + map = context.set_object(objectId, index, {}) } const proxyMap = mapProxy(context, map, [ ... path, index ], readonly); for (const key in value) { @@ -478,17 +478,17 @@ function listMethods(target) { for (let [value,datatype] of values) { switch (datatype) { case "list": - const list = context.insert(objectId, index, []) + const list = context.insert_object(objectId, index, []) const proxyList = listProxy(context, list, [ ... path, index ], readonly); proxyList.splice(0,0,...value) break; case "text": - const text = context.insert(objectId, index, "", "text") + const text = context.insert_object(objectId, index, "", "text") const proxyText = textProxy(context, text, [ ... path, index ], readonly); proxyText.splice(0,0,...value) break; case "map": - const map = context.insert(objectId, index, {}) + const map = context.insert_object(objectId, index, {}) const proxyMap = mapProxy(context, map, [ ... path, index ], readonly); for (const key in value) { proxyMap[key] = value[key] From 555f4c6b9804fa9efa0106cc719e7cb0fcc668dd Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 4 Mar 2022 11:59:44 +0000 Subject: [PATCH 145/730] Unpub ensure_transaction_closed This does the same functionality as a commit but without messages or timestamps and doesn't return the heads. This shouldn't really be a public API as they should use commit. --- automerge/src/autocommit.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index 35dc590a..b733817e 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -122,7 +122,7 @@ impl AutoCommit { } } - pub fn ensure_transaction_closed(&mut self) { + fn ensure_transaction_closed(&mut self) { if let Some(tx) = self.transaction.take() { self.update_history(export_change( &tx, From 2f3fe0e34220e3d1a9d3b87ac83131eac102c74f Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 4 Mar 2022 12:06:43 +0000 Subject: [PATCH 146/730] Cleanup actor id api Default can be a footgun and confuse users, it was used internally but that now uses the `from` impls. Also, opidat wasn't used and doesn't seem to need to be public. --- automerge/src/automerge.rs | 6 +++++- automerge/src/types.rs | 6 +----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index eeeb6dd4..3b258112 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -72,7 +72,11 @@ impl Automerge { pub(crate) fn get_actor_index(&mut self) -> usize { match &mut self.actor { Actor::Unused(actor) => { - let index = self.ops.m.actors.cache(std::mem::take(actor)); + let index = self + .ops + .m + .actors + .cache(std::mem::replace(actor, ActorId::from(&[][..]))); self.actor = Actor::Cached(index); index } diff --git a/automerge/src/types.rs b/automerge/src/types.rs index 64ba05e2..d5134335 100644 --- a/automerge/src/types.rs +++ b/automerge/src/types.rs @@ -24,7 +24,7 @@ const HEAD_STR: &str = "_head"; // Note that change encoding relies on the Ord implementation for the ActorId being implemented in // terms of the lexicographic ordering of the underlying bytes. Be aware of this if you are // changing the ActorId implementation in ways which might affect the Ord implementation -#[derive(Eq, PartialEq, Hash, Clone, PartialOrd, Ord, Default)] +#[derive(Eq, PartialEq, Hash, Clone, PartialOrd, Ord)] #[cfg_attr(feature = "derive-arbitrary", derive(arbitrary::Arbitrary))] pub struct ActorId(TinyVec<[u8; 16]>); @@ -48,10 +48,6 @@ impl ActorId { pub fn to_hex_string(&self) -> String { hex::encode(&self.0) } - - pub fn op_id_at(&self, seq: u64) -> amp::OpId { - amp::OpId(seq, self.clone()) - } } impl TryFrom<&str> for ActorId { From d71e87882e6c83acf2945174abdcaa1ef590305e Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 4 Mar 2022 12:28:05 +0000 Subject: [PATCH 147/730] Make save infallible --- automerge-cli/src/export.rs | 4 ++-- automerge-cli/src/import.rs | 2 +- automerge-cli/src/merge.rs | 2 +- automerge-wasm/src/lib.rs | 7 ++----- automerge/examples/quickstart.rs | 2 +- automerge/src/autocommit.rs | 2 +- automerge/src/automerge.rs | 19 ++++++++----------- automerge/src/change.rs | 10 +++++----- automerge/src/encoding.rs | 4 ++++ automerge/tests/test.rs | 12 ++++++------ edit-trace/benches/main.rs | 8 ++++---- 11 files changed, 35 insertions(+), 37 deletions(-) diff --git a/automerge-cli/src/export.rs b/automerge-cli/src/export.rs index a2751d07..7b0be98e 100644 --- a/automerge-cli/src/export.rs +++ b/automerge-cli/src/export.rs @@ -113,7 +113,7 @@ mod tests { //let mut backend = am::Automerge::new(); //backend.apply_local_change(initial_change).unwrap(); let mut backend = initialize_from_json(&initial_state_json).unwrap(); - let change_bytes = backend.save().unwrap(); + let change_bytes = backend.save(); assert_eq!( get_state_json(change_bytes).unwrap(), serde_json::json!({"sparrows": 15.0}) @@ -140,7 +140,7 @@ mod tests { //backend.apply_local_change(initial_change).unwrap(); */ - let change_bytes = backend.save().unwrap(); + let change_bytes = backend.save(); assert_eq!( get_state_json(change_bytes).unwrap(), serde_json::json!({ diff --git a/automerge-cli/src/import.rs b/automerge-cli/src/import.rs index d7d3c45b..3935a9ac 100644 --- a/automerge-cli/src/import.rs +++ b/automerge-cli/src/import.rs @@ -103,6 +103,6 @@ pub fn import_json( let json_value: serde_json::Value = serde_json::from_str(&buffer)?; let mut doc = initialize_from_json(&json_value)?; - writer.write_all(&doc.save()?)?; + writer.write_all(&doc.save())?; Ok(()) } diff --git a/automerge-cli/src/merge.rs b/automerge-cli/src/merge.rs index caaa5c54..936af246 100644 --- a/automerge-cli/src/merge.rs +++ b/automerge-cli/src/merge.rs @@ -47,7 +47,7 @@ pub(super) fn merge(inputs: Inputs, mut output: W) -> Result< } } } - output.write_all(&backend.save().unwrap())?; + output.write_all(&backend.save())?; Ok(()) } diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 47f151ca..816f77f2 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -332,11 +332,8 @@ impl Automerge { Ok(()) } - pub fn save(&mut self) -> Result { - self.0 - .save() - .map(|v| Uint8Array::from(v.as_slice())) - .map_err(to_js_err) + pub fn save(&mut self) -> Uint8Array { + Uint8Array::from(self.0.save().as_slice()) } #[wasm_bindgen(js_name = saveIncremental)] diff --git a/automerge/examples/quickstart.rs b/automerge/examples/quickstart.rs index 9972cb1b..f6eaaebd 100644 --- a/automerge/examples/quickstart.rs +++ b/automerge/examples/quickstart.rs @@ -26,7 +26,7 @@ fn main() { let mut doc2 = Automerge::new(); doc2.merge(&mut doc1).unwrap(); - let binary = doc1.save().unwrap(); + let binary = doc1.save(); let mut doc2 = Automerge::load(&binary).unwrap(); doc1.transact_with::<_, _, AutomergeError, _>( diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index 35dc590a..07ed5e76 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -162,7 +162,7 @@ impl AutoCommit { self.doc.merge(&mut other.doc) } - pub fn save(&mut self) -> Result, AutomergeError> { + pub fn save(&mut self) -> Vec { self.ensure_transaction_closed(); self.doc.save() } diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index eeeb6dd4..96f41ef6 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -487,15 +487,12 @@ impl Automerge { Ok(self.get_heads()) } - pub fn save(&mut self) -> Result, AutomergeError> { + pub fn save(&mut self) -> Vec { let heads = self.get_heads(); let c = self.history.iter(); let ops = self.ops.iter(); - // TODO - can we make encode_document error free let bytes = encode_document(heads, c, ops, &self.ops.m.actors, &self.ops.m.props.cache); - if bytes.is_ok() { - self.saved = self.get_heads(); - } + self.saved = self.get_heads(); bytes } @@ -907,7 +904,7 @@ mod tests { assert!(tx.value(&list_id, 3)?.unwrap().0 == "c".into()); assert!(tx.length(&list_id) == 4); tx.commit(); - doc.save()?; + doc.save(); Ok(()) } @@ -946,7 +943,7 @@ mod tests { tx.set(ROOT, "foo", 1)?; tx.commit(); - let save1 = doc.save().unwrap(); + let save1 = doc.save(); let mut tx = doc.transaction(); tx.set(ROOT, "bar", 2)?; @@ -967,7 +964,7 @@ mod tests { assert!(doc.save_incremental().is_empty()); - let save_b = doc.save().unwrap(); + let save_b = doc.save(); assert!(save_b.len() < save_a.len()); @@ -976,7 +973,7 @@ mod tests { assert!(doc_a.values(ROOT, "baz")? == doc_b.values(ROOT, "baz")?); - assert!(doc_a.save().unwrap() == doc_b.save().unwrap()); + assert!(doc_a.save() == doc_b.save()); Ok(()) } @@ -1197,12 +1194,12 @@ mod tests { fn rolling_back_transaction_has_no_effect() { let mut doc = Automerge::new(); let old_states = doc.states.clone(); - let bytes = doc.save().unwrap(); + let bytes = doc.save(); let tx = doc.transaction(); tx.rollback(); let new_states = doc.states.clone(); assert_eq!(old_states, new_states); - let new_bytes = doc.save().unwrap(); + let new_bytes = doc.save(); assert_eq!(bytes, new_bytes); } diff --git a/automerge/src/change.rs b/automerge/src/change.rs index b00de21f..abf241d0 100644 --- a/automerge/src/change.rs +++ b/automerge/src/change.rs @@ -42,7 +42,7 @@ pub(crate) fn encode_document<'a, 'b>( doc_ops: impl Iterator, actors_index: &IndexedCache, props: &'a [String], -) -> Result, AutomergeError> { +) -> Vec { let mut bytes: Vec = Vec::new(); let actors_map = actors_index.encode_index(); @@ -72,13 +72,13 @@ pub(crate) fn encode_document<'a, 'b>( let mut chunk = Vec::new(); - actors.len().encode(&mut chunk)?; + actors.len().encode_vec(&mut chunk); for a in actors.into_iter() { - a.to_bytes().encode(&mut chunk)?; + a.to_bytes().encode_vec(&mut chunk); } - heads.len().encode(&mut chunk)?; + heads.len().encode_vec(&mut chunk); for head in heads.iter() { chunk.write_all(&head.0).unwrap(); } @@ -97,7 +97,7 @@ pub(crate) fn encode_document<'a, 'b>( bytes.splice(HASH_RANGE, hash_result[0..4].iter().copied()); - Ok(bytes) + bytes } /// When encoding a change we take all the actor IDs referenced by a change and place them in an diff --git a/automerge/src/encoding.rs b/automerge/src/encoding.rs index f02a8de9..6c8e461c 100644 --- a/automerge/src/encoding.rs +++ b/automerge/src/encoding.rs @@ -251,6 +251,10 @@ pub(crate) trait Encodable { } fn encode(&self, buf: &mut R) -> io::Result; + + fn encode_vec(&self, buf: &mut Vec) -> usize { + self.encode(buf).unwrap() + } } impl Encodable for SmolStr { diff --git a/automerge/tests/test.rs b/automerge/tests/test.rs index ab15f949..b8d4658e 100644 --- a/automerge/tests/test.rs +++ b/automerge/tests/test.rs @@ -841,7 +841,7 @@ fn insertion_consistent_with_causality() { #[test] fn save_and_restore_empty() { let mut doc = new_doc(); - let loaded = Automerge::load(&doc.save().unwrap()).unwrap(); + let loaded = Automerge::load(&doc.save()).unwrap(); assert_doc!(&loaded, map! {}); } @@ -868,7 +868,7 @@ fn save_restore_complex() { doc1.set(&first_todo, "title", "kill plants").unwrap(); doc1.merge(&mut doc2).unwrap(); - let reloaded = Automerge::load(&doc1.save().unwrap()).unwrap(); + let reloaded = Automerge::load(&doc1.save()).unwrap(); assert_doc!( &reloaded, @@ -902,10 +902,10 @@ fn list_counter_del() -> Result<(), automerge::AutomergeError> { doc1.insert(&list, 1, "b")?; doc1.insert(&list, 2, "c")?; - let mut doc2 = AutoCommit::load(&doc1.save()?)?; + let mut doc2 = AutoCommit::load(&doc1.save())?; doc2.set_actor(actor2); - let mut doc3 = AutoCommit::load(&doc1.save()?)?; + let mut doc3 = AutoCommit::load(&doc1.save())?; doc3.set_actor(actor3); doc1.set(&list, 1, Value::counter(0))?; @@ -954,7 +954,7 @@ fn list_counter_del() -> Result<(), automerge::AutomergeError> { assert_eq!(doc1.length(&list), 2); - let doc4 = AutoCommit::load(&doc1.save()?)?; + let doc4 = AutoCommit::load(&doc1.save())?; assert_eq!(doc4.length(&list), 2); @@ -962,7 +962,7 @@ fn list_counter_del() -> Result<(), automerge::AutomergeError> { assert_eq!(doc1.length(&list), 1); - let doc5 = AutoCommit::load(&doc1.save()?)?; + let doc5 = AutoCommit::load(&doc1.save())?; assert_eq!(doc5.length(&list), 1); diff --git a/edit-trace/benches/main.rs b/edit-trace/benches/main.rs index a5e85791..111a4759 100644 --- a/edit-trace/benches/main.rs +++ b/edit-trace/benches/main.rs @@ -24,11 +24,11 @@ fn replay_trace_autotx(commands: Vec<(usize, usize, Vec)>) -> AutoCommit } fn save_trace(mut doc: Automerge) { - doc.save().unwrap(); + doc.save(); } fn save_trace_autotx(mut doc: AutoCommit) { - doc.save().unwrap(); + doc.save(); } fn load_trace(bytes: &[u8]) { @@ -75,7 +75,7 @@ fn bench(c: &mut Criterion) { b.iter_batched(|| doc.clone(), save_trace, criterion::BatchSize::LargeInput) }); - let bytes = doc.save().unwrap(); + let bytes = doc.save(); group.bench_with_input( BenchmarkId::new("load", commands_len), &bytes, @@ -108,7 +108,7 @@ fn bench(c: &mut Criterion) { }, ); - let bytes = doc.save().unwrap(); + let bytes = doc.save(); group.bench_with_input( BenchmarkId::new("load autotx", commands_len), &bytes, From 000576191ea8011ae0955959482b4c7d74c6951c Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 4 Mar 2022 12:17:13 +0000 Subject: [PATCH 148/730] Clean up sync api --- automerge-wasm/src/interop.rs | 2 +- automerge-wasm/src/lib.rs | 7 ++----- automerge/src/encoding.rs | 4 ++++ automerge/src/sync.rs | 26 ++++++++++++-------------- automerge/src/sync/bloom.rs | 22 +++++++++------------- automerge/src/sync/state.rs | 8 ++++---- 6 files changed, 32 insertions(+), 37 deletions(-) diff --git a/automerge-wasm/src/interop.rs b/automerge-wasm/src/interop.rs index cf53abf3..85834a4d 100644 --- a/automerge-wasm/src/interop.rs +++ b/automerge-wasm/src/interop.rs @@ -226,7 +226,7 @@ impl From<&[am::SyncHave]> for AR { .map(|h| JsValue::from_str(&hex::encode(&h.0))) .collect(); // FIXME - the clone and the unwrap here shouldnt be needed - look at into_bytes() - let bloom = Uint8Array::from(have.bloom.clone().into_bytes().unwrap().as_slice()); + let bloom = Uint8Array::from(have.bloom.to_bytes().as_slice()); let obj: JsValue = Object::new().into(); // we can unwrap here b/c we created the object and know its not frozen Reflect::set(&obj, &"lastSync".into(), &last_sync.into()).unwrap(); diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 47f151ca..27c540b3 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -437,7 +437,7 @@ impl Automerge { #[wasm_bindgen(js_name = generateSyncMessage)] pub fn generate_sync_message(&mut self, state: &mut SyncState) -> Result { if let Some(message) = self.0.generate_sync_message(&mut state.0) { - Ok(Uint8Array::from(message.encode().map_err(to_js_err)?.as_slice()).into()) + Ok(Uint8Array::from(message.encode().as_slice()).into()) } else { Ok(JsValue::null()) } @@ -588,7 +588,6 @@ pub fn encode_sync_message(message: JsValue) -> Result { changes, } .encode() - .unwrap() .as_slice(), )) } @@ -612,9 +611,7 @@ pub fn decode_sync_message(msg: Uint8Array) -> Result { #[wasm_bindgen(js_name = encodeSyncState)] pub fn encode_sync_state(state: SyncState) -> Result { let state = state.0; - Ok(Uint8Array::from( - state.encode().map_err(to_js_err)?.as_slice(), - )) + Ok(Uint8Array::from(state.encode().as_slice())) } #[wasm_bindgen(js_name = decodeSyncState)] diff --git a/automerge/src/encoding.rs b/automerge/src/encoding.rs index f02a8de9..6c8e461c 100644 --- a/automerge/src/encoding.rs +++ b/automerge/src/encoding.rs @@ -251,6 +251,10 @@ pub(crate) trait Encodable { } fn encode(&self, buf: &mut R) -> io::Result; + + fn encode_vec(&self, buf: &mut Vec) -> usize { + self.encode(buf).unwrap() + } } impl Encodable for SmolStr { diff --git a/automerge/src/sync.rs b/automerge/src/sync.rs index 0874ca5a..862df8a5 100644 --- a/automerge/src/sync.rs +++ b/automerge/src/sync.rs @@ -8,8 +8,7 @@ use std::{ use crate::types::Patch; use crate::{ - decoding, decoding::Decoder, encoding, encoding::Encodable, Automerge, AutomergeError, Change, - ChangeHash, + decoding, decoding::Decoder, encoding::Encodable, Automerge, AutomergeError, Change, ChangeHash, }; mod bloom; @@ -249,24 +248,24 @@ pub struct SyncMessage { } impl SyncMessage { - pub fn encode(self) -> Result, encoding::Error> { + pub fn encode(self) -> Vec { let mut buf = vec![MESSAGE_TYPE_SYNC]; - encode_hashes(&mut buf, &self.heads)?; - encode_hashes(&mut buf, &self.need)?; - (self.have.len() as u32).encode(&mut buf)?; + encode_hashes(&mut buf, &self.heads); + encode_hashes(&mut buf, &self.need); + (self.have.len() as u32).encode_vec(&mut buf); for have in self.have { - encode_hashes(&mut buf, &have.last_sync)?; - have.bloom.into_bytes()?.encode(&mut buf)?; + encode_hashes(&mut buf, &have.last_sync); + have.bloom.to_bytes().encode_vec(&mut buf); } - (self.changes.len() as u32).encode(&mut buf)?; + (self.changes.len() as u32).encode_vec(&mut buf); for mut change in self.changes { change.compress(); - change.raw_bytes().encode(&mut buf)?; + change.raw_bytes().encode_vec(&mut buf); } - Ok(buf) + buf } pub fn decode(bytes: &[u8]) -> Result { @@ -307,13 +306,12 @@ impl SyncMessage { } } -fn encode_hashes(buf: &mut Vec, hashes: &[ChangeHash]) -> Result<(), encoding::Error> { +fn encode_hashes(buf: &mut Vec, hashes: &[ChangeHash]) { debug_assert!( hashes.windows(2).all(|h| h[0] <= h[1]), "hashes were not sorted" ); - hashes.encode(buf)?; - Ok(()) + hashes.encode_vec(buf); } impl Encodable for &[ChangeHash] { diff --git a/automerge/src/sync/bloom.rs b/automerge/src/sync/bloom.rs index 5145848e..d20df5fd 100644 --- a/automerge/src/sync/bloom.rs +++ b/automerge/src/sync/bloom.rs @@ -1,6 +1,6 @@ use std::borrow::Cow; -use crate::{decoding, decoding::Decoder, encoding, encoding::Encodable, ChangeHash}; +use crate::{decoding, decoding::Decoder, encoding::Encodable, ChangeHash}; // These constants correspond to a 1% false positive rate. The values can be changed without // breaking compatibility of the network protocol, since the parameters used for a particular @@ -17,19 +17,15 @@ pub struct BloomFilter { } impl BloomFilter { - // FIXME - we can avoid a result here - why do we need to consume the bloom filter? requires - // me to clone in places I shouldn't need to - pub fn into_bytes(self) -> Result, encoding::Error> { - if self.num_entries == 0 { - Ok(Vec::new()) - } else { - let mut buf = Vec::new(); - self.num_entries.encode(&mut buf)?; - self.num_bits_per_entry.encode(&mut buf)?; - self.num_probes.encode(&mut buf)?; - buf.extend(self.bits); - Ok(buf) + pub fn to_bytes(&self) -> Vec { + let mut buf = Vec::new(); + if self.num_entries != 0 { + self.num_entries.encode_vec(&mut buf); + self.num_bits_per_entry.encode_vec(&mut buf); + self.num_probes.encode_vec(&mut buf); + buf.extend(&self.bits); } + buf } fn get_probes(&self, hash: &ChangeHash) -> Vec { diff --git a/automerge/src/sync/state.rs b/automerge/src/sync/state.rs index 209dbaf5..48b183b4 100644 --- a/automerge/src/sync/state.rs +++ b/automerge/src/sync/state.rs @@ -1,7 +1,7 @@ use std::{borrow::Cow, collections::HashSet}; use super::{decode_hashes, encode_hashes}; -use crate::{decoding, decoding::Decoder, encoding, BloomFilter, ChangeHash}; +use crate::{decoding, decoding::Decoder, BloomFilter, ChangeHash}; const SYNC_STATE_TYPE: u8 = 0x43; // first byte of an encoded sync state, for identification @@ -26,10 +26,10 @@ impl SyncState { Default::default() } - pub fn encode(&self) -> Result, encoding::Error> { + pub fn encode(&self) -> Vec { let mut buf = vec![SYNC_STATE_TYPE]; - encode_hashes(&mut buf, &self.shared_heads)?; - Ok(buf) + encode_hashes(&mut buf, &self.shared_heads); + buf } pub fn decode(bytes: &[u8]) -> Result { From a4432bdc3ddcbc99d9554e47c1400f6a1ebf1221 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 4 Mar 2022 13:03:19 +0000 Subject: [PATCH 149/730] Nothing really Into's ObjType so just take it directly --- automerge/src/autocommit.rs | 8 +++--- automerge/src/transaction/inner.rs | 28 ++++++++----------- .../src/transaction/manual_transaction.rs | 8 +++--- automerge/src/transaction/transactable.rs | 8 +++--- automerge/src/types.rs | 12 ++++++++ 5 files changed, 36 insertions(+), 28 deletions(-) diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index 241d5bfe..8192c18e 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -332,11 +332,11 @@ impl Transactable for AutoCommit { tx.set(&mut self.doc, obj.as_ref(), prop, value) } - fn set_object, P: Into, V: Into>( + fn set_object, P: Into>( &mut self, obj: O, prop: P, - value: V, + value: ObjType, ) -> Result { self.ensure_transaction_open(); let tx = self.transaction.as_mut().unwrap(); @@ -354,11 +354,11 @@ impl Transactable for AutoCommit { tx.insert(&mut self.doc, obj.as_ref(), index, value) } - fn insert_object>( + fn insert_object( &mut self, obj: &ExId, index: usize, - value: V, + value: ObjType, ) -> Result { self.ensure_transaction_open(); let tx = self.transaction.as_mut().unwrap(); diff --git a/automerge/src/transaction/inner.rs b/automerge/src/transaction/inner.rs index 819bc11f..a481e393 100644 --- a/automerge/src/transaction/inner.rs +++ b/automerge/src/transaction/inner.rs @@ -2,7 +2,7 @@ use crate::automerge::Actor; use crate::exid::ExId; use crate::query::{self, OpIdSearch}; use crate::types::{Key, ObjId, OpId}; -use crate::{change::export_change, types::Op, Automerge, ChangeHash, Prop, Value}; +use crate::{change::export_change, types::Op, Automerge, ChangeHash, Prop}; use crate::{AutomergeError, ObjType, OpType, ScalarValue}; #[derive(Debug, Clone)] @@ -89,7 +89,7 @@ impl TransactionInner { value: V, ) -> Result<(), AutomergeError> { let obj = doc.exid_to_obj(obj)?; - let value = Value::Scalar(value.into()); + let value = value.into(); self.local_op(doc, obj, prop.into(), value.into())?; Ok(()) } @@ -107,15 +107,14 @@ impl TransactionInner { /// - The object does not exist /// - The key is the wrong type for the object /// - The key does not exist in the object - pub fn set_object, V: Into>( + pub fn set_object>( &mut self, doc: &mut Automerge, obj: &ExId, prop: P, - value: V, + value: ObjType, ) -> Result { let obj = doc.exid_to_obj(obj)?; - let value = Value::Object(value.into()); let id = self.local_op(doc, obj, prop.into(), value.into())?.unwrap(); Ok(doc.id_to_exid(id)) } @@ -146,38 +145,35 @@ impl TransactionInner { value: V, ) -> Result<(), AutomergeError> { let obj = doc.exid_to_obj(obj)?; - self.do_insert(doc, obj, index, Value::Scalar(value.into()))?; + let value = value.into(); + self.do_insert(doc, obj, index, value.into())?; Ok(()) } - pub fn insert_object>( + pub fn insert_object( &mut self, doc: &mut Automerge, obj: &ExId, index: usize, - value: V, + value: ObjType, ) -> Result { let obj = doc.exid_to_obj(obj)?; - let id = self - .do_insert(doc, obj, index, Value::Object(value.into()))? - .unwrap(); + let id = self.do_insert(doc, obj, index, value.into())?.unwrap(); Ok(doc.id_to_exid(id)) } - fn do_insert>( + fn do_insert( &mut self, doc: &mut Automerge, obj: ObjId, index: usize, - value: V, + action: OpType, ) -> Result, AutomergeError> { let id = self.next_id(); let query = doc.ops.search(obj, query::InsertNth::new(index)); let key = query.key()?; - let value = value.into(); - let action = value.into(); let is_make = matches!(&action, OpType::Make(_)); let op = Op { @@ -336,7 +332,7 @@ impl TransactionInner { } for v in vals { // insert() - self.do_insert(doc, obj, pos, v)?; + self.do_insert(doc, obj, pos, v.into())?; pos += 1; } Ok(()) diff --git a/automerge/src/transaction/manual_transaction.rs b/automerge/src/transaction/manual_transaction.rs index 92962b2d..79ae07c2 100644 --- a/automerge/src/transaction/manual_transaction.rs +++ b/automerge/src/transaction/manual_transaction.rs @@ -97,11 +97,11 @@ impl<'a> Transactable for Transaction<'a> { .set(self.doc, obj.as_ref(), prop, value) } - fn set_object, P: Into, V: Into>( + fn set_object, P: Into>( &mut self, obj: O, prop: P, - value: V, + value: ObjType, ) -> Result { self.inner .as_mut() @@ -121,11 +121,11 @@ impl<'a> Transactable for Transaction<'a> { .insert(self.doc, obj.as_ref(), index, value) } - fn insert_object>( + fn insert_object( &mut self, obj: &ExId, index: usize, - value: V, + value: ObjType, ) -> Result { self.inner .as_mut() diff --git a/automerge/src/transaction/transactable.rs b/automerge/src/transaction/transactable.rs index 8ae8f100..593a25ca 100644 --- a/automerge/src/transaction/transactable.rs +++ b/automerge/src/transaction/transactable.rs @@ -34,11 +34,11 @@ pub trait Transactable { /// - The object does not exist /// - The key is the wrong type for the object /// - The key does not exist in the object - fn set_object, P: Into, V: Into>( + fn set_object, P: Into>( &mut self, obj: O, prop: P, - object: V, + object: ObjType, ) -> Result; /// Insert a value into a list at the given index. @@ -50,11 +50,11 @@ pub trait Transactable { ) -> Result<(), AutomergeError>; /// Insert an object into a list at the given index. - fn insert_object>( + fn insert_object( &mut self, obj: &ExId, index: usize, - object: V, + object: ObjType, ) -> Result; /// Increment the counter at the prop in the object by `value`. diff --git a/automerge/src/types.rs b/automerge/src/types.rs index 64ba05e2..a9ab1e7e 100644 --- a/automerge/src/types.rs +++ b/automerge/src/types.rs @@ -160,6 +160,18 @@ pub enum OpType { Set(ScalarValue), } +impl From for OpType { + fn from(v: ObjType) -> Self { + OpType::Make(v) + } +} + +impl From for OpType { + fn from(v: ScalarValue) -> Self { + OpType::Set(v) + } +} + #[derive(Debug)] pub(crate) enum Export { Id(OpId), From cd5e73473574a7ca7014279b46296efc3f08db18 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 4 Mar 2022 13:09:29 +0000 Subject: [PATCH 150/730] Make decode_change an associated function --- automerge-wasm/src/interop.rs | 2 +- automerge/src/change.rs | 96 ++++++++++++++++++----------------- automerge/src/lib.rs | 2 +- 3 files changed, 52 insertions(+), 48 deletions(-) diff --git a/automerge-wasm/src/interop.rs b/automerge-wasm/src/interop.rs index cf53abf3..2cb41078 100644 --- a/automerge-wasm/src/interop.rs +++ b/automerge-wasm/src/interop.rs @@ -126,7 +126,7 @@ impl TryFrom for Vec { let changes = changes?; let changes: Result, _> = changes .iter() - .map(|a| am::decode_change(a.to_vec())) + .map(|a| Change::try_from(a.to_vec())) .collect(); let changes = changes.map_err(to_js_err)?; Ok(changes) diff --git a/automerge/src/change.rs b/automerge/src/change.rs index b00de21f..e0510d30 100644 --- a/automerge/src/change.rs +++ b/automerge/src/change.rs @@ -336,7 +336,7 @@ impl Change { } pub fn from_bytes(bytes: Vec) -> Result { - decode_change(bytes) + Change::try_from(bytes) } pub fn is_empty(&self) -> bool { @@ -499,53 +499,57 @@ pub(crate) fn export_change( .into() } -pub fn decode_change(bytes: Vec) -> Result { - let (chunktype, body) = decode_header_without_hash(&bytes)?; - let bytes = if chunktype == BLOCK_TYPE_DEFLATE { - decompress_chunk(0..PREAMBLE_BYTES, body, bytes)? - } else { - ChangeBytes::Uncompressed(bytes) - }; +impl TryFrom> for Change { + type Error = decoding::Error; - let (chunktype, hash, body) = decode_header(bytes.uncompressed())?; + fn try_from(bytes: Vec) -> Result { + let (chunktype, body) = decode_header_without_hash(&bytes)?; + let bytes = if chunktype == BLOCK_TYPE_DEFLATE { + decompress_chunk(0..PREAMBLE_BYTES, body, bytes)? + } else { + ChangeBytes::Uncompressed(bytes) + }; - if chunktype != BLOCK_TYPE_CHANGE { - return Err(decoding::Error::WrongType { - expected_one_of: vec![BLOCK_TYPE_CHANGE], - found: chunktype, - }); + let (chunktype, hash, body) = decode_header(bytes.uncompressed())?; + + if chunktype != BLOCK_TYPE_CHANGE { + return Err(decoding::Error::WrongType { + expected_one_of: vec![BLOCK_TYPE_CHANGE], + found: chunktype, + }); + } + + let body_start = body.start; + let mut cursor = body; + + let deps = decode_hashes(bytes.uncompressed(), &mut cursor)?; + + let actor = + ActorId::from(&bytes.uncompressed()[slice_bytes(bytes.uncompressed(), &mut cursor)?]); + let seq = read_slice(bytes.uncompressed(), &mut cursor)?; + let start_op = read_slice(bytes.uncompressed(), &mut cursor)?; + let time = read_slice(bytes.uncompressed(), &mut cursor)?; + let message = slice_bytes(bytes.uncompressed(), &mut cursor)?; + + let actors = decode_actors(bytes.uncompressed(), &mut cursor, Some(actor))?; + + let ops_info = decode_column_info(bytes.uncompressed(), &mut cursor, false)?; + let ops = decode_columns(&mut cursor, &ops_info); + + Ok(Change { + bytes, + body_start, + hash, + seq, + start_op, + time, + actors, + message, + deps, + ops, + extra_bytes: cursor, + }) } - - let body_start = body.start; - let mut cursor = body; - - let deps = decode_hashes(bytes.uncompressed(), &mut cursor)?; - - let actor = - ActorId::from(&bytes.uncompressed()[slice_bytes(bytes.uncompressed(), &mut cursor)?]); - let seq = read_slice(bytes.uncompressed(), &mut cursor)?; - let start_op = read_slice(bytes.uncompressed(), &mut cursor)?; - let time = read_slice(bytes.uncompressed(), &mut cursor)?; - let message = slice_bytes(bytes.uncompressed(), &mut cursor)?; - - let actors = decode_actors(bytes.uncompressed(), &mut cursor, Some(actor))?; - - let ops_info = decode_column_info(bytes.uncompressed(), &mut cursor, false)?; - let ops = decode_columns(&mut cursor, &ops_info); - - Ok(Change { - bytes, - body_start, - hash, - seq, - start_op, - time, - actors, - message, - deps, - ops, - extra_bytes: cursor, - }) } fn decompress_chunk( @@ -740,7 +744,7 @@ fn decode_block(bytes: &[u8], changes: &mut Vec) -> Result<(), decoding: Ok(()) } BLOCK_TYPE_CHANGE | BLOCK_TYPE_DEFLATE => { - changes.push(decode_change(bytes.to_vec())?); + changes.push(Change::try_from(bytes.to_vec())?); Ok(()) } found => Err(decoding::Error::WrongType { diff --git a/automerge/src/lib.rs b/automerge/src/lib.rs index b67772a5..f3c62dce 100644 --- a/automerge/src/lib.rs +++ b/automerge/src/lib.rs @@ -49,7 +49,7 @@ mod visualisation; pub use crate::automerge::Automerge; pub use autocommit::AutoCommit; -pub use change::{decode_change, Change}; +pub use change::Change; pub use error::AutomergeError; pub use exid::ExId as ObjId; pub use keys::Keys; From beae33402aa4aa3830d2e4a093f9ff4972137615 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Mon, 7 Mar 2022 11:46:25 -0500 Subject: [PATCH 151/730] update wasm test for set_object --- automerge-wasm/index.d.ts | 13 ++++---- automerge-wasm/src/lib.rs | 21 +++++-------- automerge-wasm/test/test.ts | 59 +++++++++++++++---------------------- 3 files changed, 38 insertions(+), 55 deletions(-) diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts index b9249b41..d032a62a 100644 --- a/automerge-wasm/index.d.ts +++ b/automerge-wasm/index.d.ts @@ -6,7 +6,8 @@ export type SyncMessage = Uint8Array; export type Prop = string | number; export type Hash = string; export type Heads = Hash[]; -export type Value = string | number | boolean | null | Date | Uint8Array | Array | Object; +export type Value = string | number | boolean | null | Date | Uint8Array +export type ObjType = string | Array | Object export type FullValue = ["str", string] | ["int", number] | @@ -82,10 +83,12 @@ export function decodeSyncState(data: Uint8Array): SyncState; export class Automerge { // change state - set(obj: ObjID, prop: Prop, value: Value, datatype?: Datatype): ObjID | undefined; - make(obj: ObjID, prop: Prop, value: Value, datatype?: Datatype): ObjID; - insert(obj: ObjID, index: number, value: Value, datatype?: Datatype): ObjID | undefined; - push(obj: ObjID, value: Value, datatype?: Datatype): ObjID | undefined; + set(obj: ObjID, prop: Prop, value: Value, datatype?: Datatype): undefined; + set_object(obj: ObjID, prop: Prop, value: ObjType): ObjID; + insert(obj: ObjID, index: number, value: Value, datatype?: Datatype): undefined; + insert_object(obj: ObjID, index: number, value: ObjType): ObjID; + push(obj: ObjID, value: Value, datatype?: Datatype): undefined; + push_object(obj: ObjID, value: ObjType): ObjID; splice(obj: ObjID, start: number, delete_count: number, text?: string | Array): ObjID[] | undefined; inc(obj: ObjID, prop: Prop, value: number): void; del(obj: ObjID, prop: Prop): void; diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index eb094c33..a075dcf0 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -160,15 +160,10 @@ impl Automerge { Ok(()) } - pub fn push_object( - &mut self, - obj: JsValue, - value: JsValue, - datatype: JsValue, - ) -> Result, JsValue> { + pub fn push_object(&mut self, obj: JsValue, value: JsValue) -> Result, JsValue> { let obj = self.import(obj)?; - let (value, subvals) = to_objtype(&value, &datatype.as_string()) - .ok_or_else(|| to_js_err("expected object"))?; + let (value, subvals) = + to_objtype(&value, &None).ok_or_else(|| to_js_err("expected object"))?; let index = self.0.length(&obj); let opid = self.0.insert_object(&obj, index, value)?; self.subset(&opid, subvals)?; @@ -196,12 +191,11 @@ impl Automerge { obj: JsValue, index: f64, value: JsValue, - datatype: JsValue, ) -> Result, JsValue> { let obj = self.import(obj)?; let index = index as f64; - let (value, subvals) = to_objtype(&value, &datatype.as_string()) - .ok_or_else(|| to_js_err("expected object"))?; + let (value, subvals) = + to_objtype(&value, &None).ok_or_else(|| to_js_err("expected object"))?; let opid = self.0.insert_object(&obj, index as usize, value)?; self.subset(&opid, subvals)?; Ok(opid.to_string().into()) @@ -228,12 +222,11 @@ impl Automerge { obj: JsValue, prop: JsValue, value: JsValue, - datatype: JsValue, ) -> Result { let obj = self.import(obj)?; let prop = self.import_prop(prop)?; - let (value, subvals) = to_objtype(&value, &datatype.as_string()) - .ok_or_else(|| to_js_err("expected object"))?; + let (value, subvals) = + to_objtype(&value, &None).ok_or_else(|| to_js_err("expected object"))?; let opid = self.0.set_object(&obj, prop, value)?; self.subset(&opid, subvals)?; Ok(opid.to_string().into()) diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts index af6b2315..7bcd0517 100644 --- a/automerge-wasm/test/test.ts +++ b/automerge-wasm/test/test.ts @@ -64,7 +64,7 @@ describe('Automerge', () => { doc.set(root, "bool", true) doc.set(root, "time1", 1000, "timestamp") doc.set(root, "time2", new Date(1001)) - doc.set(root, "list", []); + doc.set_object(root, "list", []); doc.set(root, "null", null) result = doc.value(root,"hello") @@ -124,8 +124,7 @@ describe('Automerge', () => { let root = "_root" let result - let submap = doc.set(root, "submap", {}) - if (!submap) throw new Error('should be not null') + let submap = doc.set_object(root, "submap", {}) doc.set(submap, "number", 6, "uint") assert.strictEqual(doc.pendingOps(),2) @@ -141,8 +140,7 @@ describe('Automerge', () => { let doc = create() let root = "_root" - let submap = doc.set(root, "numbers", []) - if (!submap) throw new Error('should be not null') + let submap = doc.set_object(root, "numbers", []) doc.insert(submap, 0, "a"); doc.insert(submap, 1, "b"); doc.insert(submap, 2, "c"); @@ -165,8 +163,7 @@ describe('Automerge', () => { let doc = create() let root = "_root" - let submap = doc.set(root, "letters", []) - if (!submap) throw new Error('should be not null') + let submap = doc.set_object(root, "letters", []) doc.insert(submap, 0, "a"); doc.insert(submap, 0, "b"); assert.deepEqual(doc.toJS(), { letters: ["b", "a" ] }) @@ -230,7 +227,7 @@ describe('Automerge', () => { let doc = create() let root = "_root"; - let text = doc.set(root, "text", "", "text"); + let text = doc.set_object(root, "text", ""); if (!text) throw new Error('should not be undefined') doc.splice(text, 0, 0, "hello ") doc.splice(text, 6, 0, ["w","o","r","l","d"]) @@ -282,8 +279,7 @@ describe('Automerge', () => { it('should be able to splice text', () => { let doc = create() - let text = doc.set("_root", "text", "", "text"); - if (!text) throw new Error('should not be undefined') + let text = doc.set_object("_root", "text", ""); doc.splice(text, 0, 0, "hello world"); let heads1 = doc.commit(); doc.splice(text, 6, 0, "big bad "); @@ -331,8 +327,7 @@ describe('Automerge', () => { it('local inc increments all visible counters in a sequence', () => { let doc1 = create("aaaa") - let seq = doc1.set("_root", "seq", []) - if (!seq) throw new Error('Should not be undefined') + let seq = doc1.set_object("_root", "seq", []) doc1.insert(seq, 0, "hello") let doc2 = loadDoc(doc1.save(), "bbbb"); let doc3 = loadDoc(doc1.save(), "cccc"); @@ -365,11 +360,11 @@ describe('Automerge', () => { it('recursive sets are possible', () => { let doc = create("aaaa") - let l1 = doc.make("_root","list",[{ foo: "bar"}, [1,2,3]]) - let l2 = doc.insert(l1, 0, { zip: ["a", "b"] }) - let l3 = doc.make("_root","info1","hello world") // 'text' + let l1 = doc.set_object("_root","list",[{ foo: "bar"}, [1,2,3]]) + let l2 = doc.insert_object(l1, 0, { zip: ["a", "b"] }) + let l3 = doc.set_object("_root","info1","hello world") // 'text' let l4 = doc.set("_root","info2","hello world") // 'str' - let l5 = doc.set("_root","info3","hello world", "text") + let l5 = doc.set_object("_root","info3","hello world") assert.deepEqual(doc.toJS(), { "list": [ { zip: ["a", "b"] }, { foo: "bar"}, [ 1,2,3]], "info1": "hello world".split(""), @@ -382,15 +377,14 @@ describe('Automerge', () => { it('only returns an object id when objects are created', () => { let doc = create("aaaa") let r1 = doc.set("_root","foo","bar") - let r2 = doc.set("_root","list",[]) + let r2 = doc.set_object("_root","list",[]) let r3 = doc.set("_root","counter",10, "counter") let r4 = doc.inc("_root","counter",1) let r5 = doc.del("_root","counter") - if (!r2) throw new Error('should not be undefined') let r6 = doc.insert(r2,0,10); - let r7 = doc.insert(r2,0,{}); + let r7 = doc.insert_object(r2,0,{}); let r8 = doc.splice(r2,1,0,["a","b","c"]); - let r9 = doc.splice(r2,1,0,["a",[],{},"d"]); + //let r9 = doc.splice(r2,1,0,["a",[],{},"d"]); assert.deepEqual(r1,null); assert.deepEqual(r2,"2@aaaa"); assert.deepEqual(r3,null); @@ -399,18 +393,15 @@ describe('Automerge', () => { assert.deepEqual(r6,null); assert.deepEqual(r7,"7@aaaa"); assert.deepEqual(r8,null); - assert.deepEqual(r9,["12@aaaa","13@aaaa"]); + //assert.deepEqual(r9,["12@aaaa","13@aaaa"]); doc.free() }) it('objects without properties are preserved', () => { let doc1 = create("aaaa") - let a = doc1.set("_root","a",{}); - if (!a) throw new Error('should not be undefined') - let b = doc1.set("_root","b",{}); - if (!b) throw new Error('should not be undefined') - let c = doc1.set("_root","c",{}); - if (!c) throw new Error('should not be undefined') + let a = doc1.set_object("_root","a",{}); + let b = doc1.set_object("_root","b",{}); + let c = doc1.set_object("_root","c",{}); let d = doc1.set(c,"d","dd"); let saved = doc1.save(); let doc2 = loadDoc(saved); @@ -427,7 +418,7 @@ describe('Automerge', () => { it('should handle merging text conflicts then saving & loading', () => { let A = create("aabbcc") - let At = A.make('_root', 'text', "", "text") + let At = A.set_object('_root', 'text', "") A.splice(At, 0, 0, 'hello') let B = A.fork() @@ -478,8 +469,7 @@ describe('Automerge', () => { let s1 = initSyncState(), s2 = initSyncState() // make two nodes with the same changes - let list = n1.set("_root","n", []) - if (!list) throw new Error('undefined') + let list = n1.set_object("_root","n", []) n1.commit("",0) for (let i = 0; i < 10; i++) { n1.insert(list,i,i) @@ -502,8 +492,7 @@ describe('Automerge', () => { let n1 = create(), n2 = create() // make changes for n1 that n2 should request - let list = n1.set("_root","n",[]) - if (!list) throw new Error('undefined') + let list = n1.set_object("_root","n",[]) n1.commit("",0) for (let i = 0; i < 10; i++) { n1.insert(list, i, i) @@ -519,8 +508,7 @@ describe('Automerge', () => { let n1 = create(), n2 = create() // make changes for n1 that n2 should request - let list = n1.set("_root","n",[]) - if (!list) throw new Error('undefined') + let list = n1.set_object("_root","n",[]) n1.commit("",0) for (let i = 0; i < 10; i++) { n1.insert(list,i,i) @@ -676,8 +664,7 @@ describe('Automerge', () => { let n1 = create('01234567'), n2 = create('89abcdef') let s1 = initSyncState(), s2 = initSyncState(), message = null - let items = n1.set("_root", "items", []) - if (!items) throw new Error('undefined') + let items = n1.set_object("_root", "items", []) n1.commit("",0) sync(n1, n2, s1, s2) From 4b32ee882ae0a0d7e3a92ff9e8eb0a9ecc92164a Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Wed, 9 Mar 2022 10:28:25 +0000 Subject: [PATCH 152/730] Cleanup value API Adds conversions to contained types, is_* methods for checking the variant and conversions from more types. --- automerge/src/value.rs | 237 ++++++++++++++++++++++++++++++++++++++--- 1 file changed, 220 insertions(+), 17 deletions(-) diff --git a/automerge/src/value.rs b/automerge/src/value.rs index 2d272506..f837ad63 100644 --- a/automerge/src/value.rs +++ b/automerge/src/value.rs @@ -11,13 +11,6 @@ pub enum Value { } impl Value { - pub fn to_string(&self) -> Option { - match self { - Value::Scalar(val) => Some(val.to_string()), - _ => None, - } - } - pub fn map() -> Value { Value::Object(ObjType::Map) } @@ -69,11 +62,134 @@ impl Value { pub fn is_scalar(&self) -> bool { matches!(&self, Value::Scalar(_)) } + + pub fn is_bytes(&self) -> bool { + matches!(self, Self::Scalar(ScalarValue::Bytes(_))) + } + + pub fn is_str(&self) -> bool { + matches!(self, Self::Scalar(ScalarValue::Str(_))) + } + + pub fn is_int(&self) -> bool { + matches!(self, Self::Scalar(ScalarValue::Int(_))) + } + + pub fn is_uint(&self) -> bool { + matches!(self, Self::Scalar(ScalarValue::Uint(_))) + } + + pub fn is_f64(&self) -> bool { + matches!(self, Self::Scalar(ScalarValue::F64(_))) + } + + pub fn is_counter(&self) -> bool { + matches!(self, Self::Scalar(ScalarValue::Counter(_))) + } + + pub fn is_timestamp(&self) -> bool { + matches!(self, Self::Scalar(ScalarValue::Timestamp(_))) + } + + pub fn is_boolean(&self) -> bool { + matches!(self, Self::Scalar(ScalarValue::Boolean(_))) + } + + pub fn is_null(&self) -> bool { + matches!(self, Self::Scalar(ScalarValue::Null)) + } + + pub fn into_scalar(self) -> Result { + match self { + Self::Scalar(s) => Ok(s), + _ => Err(self), + } + } + + pub fn to_scalar(&self) -> Option<&ScalarValue> { + match self { + Self::Scalar(s) => Some(s), + _ => None, + } + } + + pub fn to_objtype(&self) -> Option { + match self { + Self::Object(o) => Some(*o), + _ => None, + } + } + + pub fn into_bytes(self) -> Result, Self> { + match self { + Value::Scalar(s) => s.into_bytes().map_err(Value::Scalar), + _ => Err(self), + } + } + + pub fn to_bytes(&self) -> Option<&[u8]> { + match self { + Value::Scalar(s) => s.to_bytes(), + _ => None, + } + } + + pub fn into_string(self) -> Result { + match self { + Value::Scalar(s) => s.into_string().map_err(Value::Scalar), + _ => Err(self), + } + } + + pub fn to_str(&self) -> Option<&str> { + match self { + Value::Scalar(val) => val.to_str(), + _ => None, + } + } + + /// If this value can be coerced to an i64, return the i64 value + pub fn to_i64(&self) -> Option { + match self { + Value::Scalar(s) => s.to_i64(), + _ => None, + } + } + + pub fn to_u64(&self) -> Option { + match self { + Value::Scalar(s) => s.to_u64(), + _ => None, + } + } + + pub fn to_f64(&self) -> Option { + match self { + Value::Scalar(s) => s.to_f64(), + _ => None, + } + } + + pub fn to_bool(&self) -> Option { + match self { + Value::Scalar(s) => s.to_bool(), + _ => None, + } + } +} + +impl fmt::Display for Value { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Value::Object(o) => write!(f, "Object: {}", o), + Value::Scalar(s) => write!(f, "Scalar: {}", s), + } + } } impl From<&str> for Value { fn from(s: &str) -> Self { - Value::Scalar(s.into()) + Value::Scalar(ScalarValue::Str(s.into())) } } @@ -83,6 +199,18 @@ impl From for Value { } } +impl From for Value { + fn from(c: char) -> Self { + Value::Scalar(ScalarValue::Str(SmolStr::new(c.to_string()))) + } +} + +impl From> for Value { + fn from(v: Vec) -> Self { + Value::Scalar(ScalarValue::Bytes(v)) + } +} + impl From for Value { fn from(n: f64) -> Self { Value::Scalar(ScalarValue::F64(n)) @@ -101,6 +229,12 @@ impl From for Value { } } +impl From for Value { + fn from(n: u32) -> Self { + Value::Scalar(ScalarValue::Uint(n.into())) + } +} + impl From for Value { fn from(n: u64) -> Self { Value::Scalar(ScalarValue::Uint(n)) @@ -352,6 +486,70 @@ impl ScalarValue { } } + pub fn is_bytes(&self) -> bool { + matches!(self, Self::Bytes(_)) + } + + pub fn is_str(&self) -> bool { + matches!(self, Self::Str(_)) + } + + pub fn is_int(&self) -> bool { + matches!(self, Self::Int(_)) + } + + pub fn is_uint(&self) -> bool { + matches!(self, Self::Uint(_)) + } + + pub fn is_f64(&self) -> bool { + matches!(self, Self::F64(_)) + } + + pub fn is_counter(&self) -> bool { + matches!(self, Self::Counter(_)) + } + + pub fn is_timestamp(&self) -> bool { + matches!(self, Self::Timestamp(_)) + } + + pub fn is_boolean(&self) -> bool { + matches!(self, Self::Boolean(_)) + } + + pub fn is_null(&self) -> bool { + matches!(self, Self::Null) + } + + pub fn into_bytes(self) -> Result, Self> { + match self { + ScalarValue::Bytes(b) => Ok(b), + _ => Err(self), + } + } + + pub fn to_bytes(&self) -> Option<&[u8]> { + match self { + ScalarValue::Bytes(b) => Some(b), + _ => None, + } + } + + pub fn into_string(self) -> Result { + match self { + ScalarValue::Str(s) => Ok(s.to_string()), + _ => Err(self), + } + } + + pub fn to_str(&self) -> Option<&str> { + match self { + ScalarValue::Str(s) => Some(s), + _ => None, + } + } + /// If this value can be coerced to an i64, return the i64 value pub fn to_i64(&self) -> Option { match self { @@ -386,16 +584,9 @@ impl ScalarValue { } } - pub fn to_bool(self) -> Option { + pub fn to_bool(&self) -> Option { match self { - ScalarValue::Boolean(b) => Some(b), - _ => None, - } - } - - pub fn to_string(self) -> Option { - match self { - ScalarValue::Str(s) => Some(s.to_string()), + ScalarValue::Boolean(b) => Some(*b), _ => None, } } @@ -417,6 +608,12 @@ impl From for ScalarValue { } } +impl From> for ScalarValue { + fn from(b: Vec) -> Self { + ScalarValue::Bytes(b) + } +} + impl From for ScalarValue { fn from(n: i64) -> Self { ScalarValue::Int(n) @@ -435,6 +632,12 @@ impl From for ScalarValue { } } +impl From for ScalarValue { + fn from(n: u32) -> Self { + ScalarValue::Uint(n.into()) + } +} + impl From for ScalarValue { fn from(n: i32) -> Self { ScalarValue::Int(n as i64) From 73ac96b7a2ed32986cb02f25266f677ceb119221 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 4 Mar 2022 17:30:19 +0000 Subject: [PATCH 153/730] Add failing tests for deleting nothing --- automerge/src/automerge.rs | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index cdb100e3..5c59ef29 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -1247,4 +1247,28 @@ mod tests { assert_eq!(doc.value(&map1, "c").unwrap().unwrap().0, Value::int(3)); } + + #[test] + fn delete_nothing_returns_error_map() { + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + assert!(tx.del(ROOT, "a").is_err()); + // not an error currently so breaks loading + tx.commit(); + + let bytes = doc.save().unwrap(); + assert!(Automerge::load(&bytes).is_err()); + } + + #[test] + fn delete_nothing_returns_error_list() { + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + assert!(tx.del(ROOT, 0).is_err()); + // not an error currently so breaks loading + tx.commit(); + + let bytes = doc.save().unwrap(); + assert!(Automerge::load(&bytes).is_err()); + } } From a9737a6815e0687836af14637ac079e65756a6e3 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Tue, 8 Mar 2022 17:42:03 +0000 Subject: [PATCH 154/730] Fix del missing key in map --- automerge/src/automerge.rs | 20 ++++++++++++++++---- automerge/src/transaction/inner.rs | 5 +++++ 2 files changed, 21 insertions(+), 4 deletions(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 5c59ef29..ec397926 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -1252,23 +1252,35 @@ mod tests { fn delete_nothing_returns_error_map() { let mut doc = Automerge::new(); let mut tx = doc.transaction(); - assert!(tx.del(ROOT, "a").is_err()); + assert!(tx.del(ROOT, "a").is_ok()); // not an error currently so breaks loading tx.commit(); + dbg!(doc.get_last_local_change()); let bytes = doc.save().unwrap(); - assert!(Automerge::load(&bytes).is_err()); + assert!(Automerge::load(&bytes).is_ok()); + + let mut tx = doc.transaction(); + tx.set(ROOT, "a", 1).unwrap(); + tx.commit(); + + let mut tx = doc.transaction(); + // a real op + tx.del(ROOT, "a").unwrap(); + // a no-op + tx.del(ROOT, "a").unwrap(); + tx.commit(); } #[test] fn delete_nothing_returns_error_list() { let mut doc = Automerge::new(); let mut tx = doc.transaction(); - assert!(tx.del(ROOT, 0).is_err()); + tx.del(ROOT, 0).unwrap(); // not an error currently so breaks loading tx.commit(); let bytes = doc.save().unwrap(); - assert!(Automerge::load(&bytes).is_err()); + assert!(Automerge::load(&bytes).is_ok()); } } diff --git a/automerge/src/transaction/inner.rs b/automerge/src/transaction/inner.rs index a481e393..b544a952 100644 --- a/automerge/src/transaction/inner.rs +++ b/automerge/src/transaction/inner.rs @@ -225,6 +225,11 @@ impl TransactionInner { let prop = doc.ops.m.props.cache(prop); let query = doc.ops.search(obj, query::Prop::new(prop)); + // no key present to delete + if query.ops.is_empty() && action == OpType::Del { + return Ok(None); + } + if query.ops.len() == 1 && query.ops[0].is_noop(&action) { return Ok(None); } From 5f200e3bf58cdc7ba617b463ff7d87bcc3698a7d Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Wed, 9 Mar 2022 09:38:03 +0000 Subject: [PATCH 155/730] Update delete nothing tests --- automerge/src/automerge.rs | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index ec397926..bd5790be 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -1249,13 +1249,14 @@ mod tests { } #[test] - fn delete_nothing_returns_error_map() { + fn delete_nothing_in_map_is_noop() { let mut doc = Automerge::new(); let mut tx = doc.transaction(); + // deleting a missing key in a map should just be a noop assert!(tx.del(ROOT, "a").is_ok()); - // not an error currently so breaks loading tx.commit(); - dbg!(doc.get_last_local_change()); + let last_change = doc.get_last_local_change().unwrap(); + assert_eq!(last_change.len(), 0); let bytes = doc.save().unwrap(); assert!(Automerge::load(&bytes).is_ok()); @@ -1263,6 +1264,8 @@ mod tests { let mut tx = doc.transaction(); tx.set(ROOT, "a", 1).unwrap(); tx.commit(); + let last_change = doc.get_last_local_change().unwrap(); + assert_eq!(last_change.len(), 1); let mut tx = doc.transaction(); // a real op @@ -1270,17 +1273,15 @@ mod tests { // a no-op tx.del(ROOT, "a").unwrap(); tx.commit(); + let last_change = doc.get_last_local_change().unwrap(); + assert_eq!(last_change.len(), 1); } #[test] - fn delete_nothing_returns_error_list() { + fn delete_nothing_in_list_returns_error() { let mut doc = Automerge::new(); let mut tx = doc.transaction(); - tx.del(ROOT, 0).unwrap(); - // not an error currently so breaks loading - tx.commit(); - - let bytes = doc.save().unwrap(); - assert!(Automerge::load(&bytes).is_ok()); + // deleting an element in a list that does not exist is an error + assert!(tx.del(ROOT, 0).is_err()); } } From 875bfdd7f260cc9b1ccc5f304e1bbaac75df97e3 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Wed, 9 Mar 2022 10:33:57 +0000 Subject: [PATCH 156/730] Update save call --- automerge/src/automerge.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index bd5790be..1d6e0fe9 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -1258,7 +1258,7 @@ mod tests { let last_change = doc.get_last_local_change().unwrap(); assert_eq!(last_change.len(), 0); - let bytes = doc.save().unwrap(); + let bytes = doc.save(); assert!(Automerge::load(&bytes).is_ok()); let mut tx = doc.transaction(); From ebe7bae992ea715246219cf037497d1b24c0f66b Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Wed, 9 Mar 2022 11:14:21 +0000 Subject: [PATCH 157/730] Fix typo on QueryResult --- automerge/src/op_tree.rs | 4 ++-- automerge/src/query.rs | 4 ++-- automerge/src/query/insert.rs | 2 +- automerge/src/query/nth.rs | 2 +- automerge/src/query/opid.rs | 2 +- automerge/src/query/seek_op.rs | 4 ++-- 6 files changed, 9 insertions(+), 9 deletions(-) diff --git a/automerge/src/op_tree.rs b/automerge/src/op_tree.rs index 5503d164..903cfde6 100644 --- a/automerge/src/op_tree.rs +++ b/automerge/src/op_tree.rs @@ -56,7 +56,7 @@ impl OpTreeInternal { self.root_node .as_ref() .map(|root| match query.query_node_with_metadata(root, m) { - QueryResult::Decend => root.search(&mut query, m), + QueryResult::Descend => root.search(&mut query, m), _ => true, }); query @@ -199,7 +199,7 @@ impl OpTreeNode { } else { for (child_index, child) in self.children.iter().enumerate() { match query.query_node_with_metadata(child, m) { - QueryResult::Decend => { + QueryResult::Descend => { if child.search(query, m) { return true; } diff --git a/automerge/src/query.rs b/automerge/src/query.rs index 84a70c49..4d6c47c9 100644 --- a/automerge/src/query.rs +++ b/automerge/src/query.rs @@ -52,7 +52,7 @@ pub(crate) trait TreeQuery { } fn query_node(&mut self, _child: &OpTreeNode) -> QueryResult { - QueryResult::Decend + QueryResult::Descend } #[inline(always)] @@ -68,7 +68,7 @@ pub(crate) trait TreeQuery { #[derive(Debug, Clone, PartialEq)] pub(crate) enum QueryResult { Next, - Decend, + Descend, Finish, } diff --git a/automerge/src/query/insert.rs b/automerge/src/query/insert.rs index 62da48f9..80d48762 100644 --- a/automerge/src/query/insert.rs +++ b/automerge/src/query/insert.rs @@ -64,7 +64,7 @@ impl TreeQuery for InsertNth { num_vis -= 1; } if self.seen + num_vis >= self.target { - QueryResult::Decend + QueryResult::Descend } else { self.n += child.len(); self.seen += num_vis; diff --git a/automerge/src/query/nth.rs b/automerge/src/query/nth.rs index 8d692a76..b62410e4 100644 --- a/automerge/src/query/nth.rs +++ b/automerge/src/query/nth.rs @@ -48,7 +48,7 @@ impl TreeQuery for Nth { num_vis -= 1; } if self.seen + num_vis > self.target { - QueryResult::Decend + QueryResult::Descend } else { self.pos += child.len(); self.seen += num_vis; diff --git a/automerge/src/query/opid.rs b/automerge/src/query/opid.rs index fc2132f2..2a68ad1c 100644 --- a/automerge/src/query/opid.rs +++ b/automerge/src/query/opid.rs @@ -33,7 +33,7 @@ impl OpIdSearch { impl TreeQuery for OpIdSearch { fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { if child.index.ops.contains(&self.target) { - QueryResult::Decend + QueryResult::Descend } else { self.pos += child.len(); QueryResult::Next diff --git a/automerge/src/query/seek_op.rs b/automerge/src/query/seek_op.rs index 9aba4b46..a2f3b750 100644 --- a/automerge/src/query/seek_op.rs +++ b/automerge/src/query/seek_op.rs @@ -49,7 +49,7 @@ impl TreeQuery for SeekOp { m: &OpSetMetadata, ) -> QueryResult { if self.found { - return QueryResult::Decend; + return QueryResult::Descend; } match self.op.key { Key::Seq(e) if e == HEAD => { @@ -67,7 +67,7 @@ impl TreeQuery for SeekOp { } Key::Seq(e) => { if self.found || child.index.ops.contains(&e.0) { - QueryResult::Decend + QueryResult::Descend } else { self.pos += child.len(); QueryResult::Next From d00cee1637fa0354d968d9570e1b1ea439554410 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Wed, 9 Mar 2022 12:33:20 +0000 Subject: [PATCH 158/730] Misc API updates - Commit now returns just a single hash rather than a vec. Since the change we create from committing has all of the heads as deps there can only be one hash/head after committing. - Apply changes now takes a Vec rather than a slice. This avoids having to clone them inside. - transact_with now passes the result of the closure to the commit options function - Remove patch struct - Change receive_sync_message to return a () instead of the `Option` - Change `Transaction*` structs to just `*` and use the transaction module - Make CommitOptions fields public --- automerge-wasm/src/lib.rs | 12 ++-- automerge/examples/quickstart.rs | 8 +-- automerge/src/autocommit.rs | 21 +++---- automerge/src/automerge.rs | 50 ++++++++--------- automerge/src/sync.rs | 9 +-- automerge/src/transaction.rs | 6 +- automerge/src/transaction/commit.rs | 4 +- automerge/src/transaction/inner.rs | 10 ++-- .../src/transaction/manual_transaction.rs | 4 +- automerge/src/transaction/result.rs | 55 ++++--------------- automerge/src/types.rs | 3 - 11 files changed, 67 insertions(+), 115 deletions(-) diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index a075dcf0..c09a2019 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -72,7 +72,7 @@ impl Automerge { (self.0.pending_ops() as u32).into() } - pub fn commit(&mut self, message: Option, time: Option) -> Array { + pub fn commit(&mut self, message: Option, time: Option) -> JsValue { let mut commit_opts = CommitOptions::default(); if let Some(message) = message { commit_opts.set_message(message); @@ -80,12 +80,8 @@ impl Automerge { if let Some(time) = time { commit_opts.set_time(time as i64); } - let heads = self.0.commit_with(commit_opts); - let heads: Array = heads - .iter() - .map(|h| JsValue::from_str(&hex::encode(&h.0))) - .collect(); - heads + let hash = self.0.commit_with(commit_opts); + JsValue::from_str(&hex::encode(&hash.0)) } pub fn merge(&mut self, other: &mut Automerge) -> Result { @@ -374,7 +370,7 @@ impl Automerge { #[wasm_bindgen(js_name = applyChanges)] pub fn apply_changes(&mut self, changes: JsValue) -> Result<(), JsValue> { let changes: Vec<_> = JS(changes).try_into()?; - self.0.apply_changes(&changes).map_err(to_js_err)?; + self.0.apply_changes(changes).map_err(to_js_err)?; Ok(()) } diff --git a/automerge/examples/quickstart.rs b/automerge/examples/quickstart.rs index 1a48e1a5..db0024c6 100644 --- a/automerge/examples/quickstart.rs +++ b/automerge/examples/quickstart.rs @@ -9,7 +9,7 @@ fn main() { let mut doc1 = Automerge::new(); let (cards, card1) = doc1 .transact_with::<_, _, AutomergeError, _>( - || CommitOptions::default().with_message("Add card".to_owned()), + |_| CommitOptions::default().with_message("Add card".to_owned()), |tx| { let cards = tx.set_object(ROOT, "cards", ObjType::List).unwrap(); let card1 = tx.insert_object(&cards, 0, ObjType::Map)?; @@ -22,7 +22,7 @@ fn main() { }, ) .unwrap() - .into_result(); + .result; let mut doc2 = Automerge::new(); doc2.merge(&mut doc1).unwrap(); @@ -31,7 +31,7 @@ fn main() { let mut doc2 = Automerge::load(&binary).unwrap(); doc1.transact_with::<_, _, AutomergeError, _>( - || CommitOptions::default().with_message("Mark card as done".to_owned()), + |_| CommitOptions::default().with_message("Mark card as done".to_owned()), |tx| { tx.set(&card1, "done", true)?; Ok(()) @@ -40,7 +40,7 @@ fn main() { .unwrap(); doc2.transact_with::<_, _, AutomergeError, _>( - || CommitOptions::default().with_message("Delete card".to_owned()), + |_| CommitOptions::default().with_message("Delete card".to_owned()), |tx| { tx.del(&cards, 0)?; Ok(()) diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index df83c9fe..4a8a0b3a 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -1,6 +1,5 @@ use crate::exid::ExId; use crate::transaction::{CommitOptions, Transactable}; -use crate::types::Patch; use crate::{ change::export_change, transaction::TransactionInner, ActorId, Automerge, AutomergeError, Change, ChangeHash, Prop, Value, @@ -145,7 +144,7 @@ impl AutoCommit { self.doc.load_incremental(data) } - pub fn apply_changes(&mut self, changes: &[Change]) -> Result { + pub fn apply_changes(&mut self, changes: Vec) -> Result<(), AutomergeError> { self.ensure_transaction_closed(); self.doc.apply_changes(changes) } @@ -216,7 +215,7 @@ impl AutoCommit { &mut self, sync_state: &mut SyncState, message: SyncMessage, - ) -> Result, AutomergeError> { + ) -> Result<(), AutomergeError> { self.ensure_transaction_closed(); self.doc.receive_sync_message(sync_state, message) } @@ -234,13 +233,11 @@ impl AutoCommit { self.doc.get_heads() } - pub fn commit(&mut self) -> Vec { + pub fn commit(&mut self) -> ChangeHash { // ensure that even no changes triggers a change self.ensure_transaction_open(); - self.transaction - .take() - .map(|tx| tx.commit(&mut self.doc, None, None)) - .unwrap_or_else(|| self.doc.get_heads()) + let tx = self.transaction.take().unwrap(); + tx.commit(&mut self.doc, None, None) } /// Commit the current operations with some options. @@ -258,12 +255,10 @@ impl AutoCommit { /// i64; /// doc.commit_with(CommitOptions::default().with_message("Create todos list").with_time(now)); /// ``` - pub fn commit_with(&mut self, options: CommitOptions) -> Vec { + pub fn commit_with(&mut self, options: CommitOptions) -> ChangeHash { self.ensure_transaction_open(); - self.transaction - .take() - .map(|tx| tx.commit(&mut self.doc, options.message, options.time)) - .unwrap_or_else(|| self.doc.get_heads()) + let tx = self.transaction.take().unwrap(); + tx.commit(&mut self.doc, options.message, options.time) } pub fn rollback(&mut self) -> usize { diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 1d6e0fe9..6a0269e7 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -4,12 +4,9 @@ use crate::change::encode_document; use crate::exid::ExId; use crate::keys::Keys; use crate::op_set::OpSet; -use crate::transaction::{ - CommitOptions, Transaction, TransactionFailure, TransactionInner, TransactionResult, - TransactionSuccess, -}; +use crate::transaction::{self, CommitOptions, Failure, Success, Transaction, TransactionInner}; use crate::types::{ - ActorId, ChangeHash, Clock, ElemId, Export, Exportable, Key, ObjId, Op, OpId, OpType, Patch, + ActorId, ChangeHash, Clock, ElemId, Export, Exportable, Key, ObjId, Op, OpId, OpType, ScalarValue, Value, }; use crate::KeysAt; @@ -115,18 +112,18 @@ impl Automerge { /// Run a transaction on this document in a closure, automatically handling commit or rollback /// afterwards. - pub fn transact(&mut self, f: F) -> TransactionResult + pub fn transact(&mut self, f: F) -> transaction::Result where F: FnOnce(&mut Transaction) -> Result, { let mut tx = self.transaction(); let result = f(&mut tx); match result { - Ok(result) => Ok(TransactionSuccess { + Ok(result) => Ok(Success { result, - heads: tx.commit(), + hash: tx.commit(), }), - Err(error) => Err(TransactionFailure { + Err(error) => Err(Failure { error, cancelled: tx.rollback(), }), @@ -134,19 +131,22 @@ impl Automerge { } /// Like [`Self::transact`] but with a function for generating the commit options. - pub fn transact_with(&mut self, c: C, f: F) -> TransactionResult + pub fn transact_with(&mut self, c: C, f: F) -> transaction::Result where F: FnOnce(&mut Transaction) -> Result, - C: FnOnce() -> CommitOptions, + C: FnOnce(&O) -> CommitOptions, { let mut tx = self.transaction(); let result = f(&mut tx); match result { - Ok(result) => Ok(TransactionSuccess { - result, - heads: tx.commit_with(c()), - }), - Err(error) => Err(TransactionFailure { + Ok(result) => { + let commit_options = c(&result); + Ok(Success { + result, + hash: tx.commit_with(commit_options), + }) + } + Err(error) => Err(Failure { error, cancelled: tx.rollback(), }), @@ -372,14 +372,14 @@ impl Automerge { pub fn load(data: &[u8]) -> Result { let changes = Change::load_document(data)?; let mut doc = Self::new(); - doc.apply_changes(&changes)?; + doc.apply_changes(changes)?; Ok(doc) } pub fn load_incremental(&mut self, data: &[u8]) -> Result { let changes = Change::load_document(data)?; let start = self.ops.len(); - self.apply_changes(&changes)?; + self.apply_changes(changes)?; let delta = self.ops.len() - start; Ok(delta) } @@ -394,26 +394,26 @@ impl Automerge { dup } - pub fn apply_changes(&mut self, changes: &[Change]) -> Result { + pub fn apply_changes(&mut self, changes: Vec) -> Result<(), AutomergeError> { for c in changes { if !self.history_index.contains_key(&c.hash) { - if self.duplicate_seq(c) { + if self.duplicate_seq(&c) { return Err(AutomergeError::DuplicateSeqNumber( c.seq, c.actor_id().clone(), )); } - if self.is_causally_ready(c) { - self.apply_change(c.clone()); + if self.is_causally_ready(&c) { + self.apply_change(c); } else { - self.queue.push(c.clone()); + self.queue.push(c); } } } while let Some(c) = self.pop_next_causally_ready_change() { self.apply_change(c); } - Ok(Patch {}) + Ok(()) } pub fn apply_change(&mut self, change: Change) { @@ -487,7 +487,7 @@ impl Automerge { .into_iter() .cloned() .collect::>(); - self.apply_changes(&changes)?; + self.apply_changes(changes)?; Ok(self.get_heads()) } diff --git a/automerge/src/sync.rs b/automerge/src/sync.rs index 862df8a5..469151fb 100644 --- a/automerge/src/sync.rs +++ b/automerge/src/sync.rs @@ -6,7 +6,6 @@ use std::{ io::Write, }; -use crate::types::Patch; use crate::{ decoding, decoding::Decoder, encoding::Encodable, Automerge, AutomergeError, Change, ChangeHash, }; @@ -98,9 +97,7 @@ impl Automerge { &mut self, sync_state: &mut SyncState, message: SyncMessage, - ) -> Result, AutomergeError> { - let mut patch = None; - + ) -> Result<(), AutomergeError> { let before_heads = self.get_heads(); let SyncMessage { @@ -112,7 +109,7 @@ impl Automerge { let changes_is_empty = message_changes.is_empty(); if !changes_is_empty { - patch = Some(self.apply_changes(&message_changes)?); + self.apply_changes(message_changes)?; sync_state.shared_heads = advance_heads( &before_heads.iter().collect(), &self.get_heads().into_iter().collect(), @@ -153,7 +150,7 @@ impl Automerge { sync_state.their_heads = Some(message_heads); sync_state.their_need = Some(message_need); - Ok(patch) + Ok(()) } fn make_bloom_filter(&self, last_sync: Vec) -> SyncHave { diff --git a/automerge/src/transaction.rs b/automerge/src/transaction.rs index fe24619e..667503ae 100644 --- a/automerge/src/transaction.rs +++ b/automerge/src/transaction.rs @@ -8,7 +8,7 @@ pub use self::commit::CommitOptions; pub use self::transactable::Transactable; pub(crate) use inner::TransactionInner; pub use manual_transaction::Transaction; -pub use result::TransactionFailure; -pub use result::TransactionSuccess; +pub use result::Failure; +pub use result::Success; -pub type TransactionResult = Result, TransactionFailure>; +pub type Result = std::result::Result, Failure>; diff --git a/automerge/src/transaction/commit.rs b/automerge/src/transaction/commit.rs index 8f7007f8..d4b12a97 100644 --- a/automerge/src/transaction/commit.rs +++ b/automerge/src/transaction/commit.rs @@ -1,8 +1,8 @@ /// Optional metadata for a commit. #[derive(Debug, Default, Clone)] pub struct CommitOptions { - pub(crate) message: Option, - pub(crate) time: Option, + pub message: Option, + pub time: Option, } impl CommitOptions { diff --git a/automerge/src/transaction/inner.rs b/automerge/src/transaction/inner.rs index b544a952..6c415965 100644 --- a/automerge/src/transaction/inner.rs +++ b/automerge/src/transaction/inner.rs @@ -30,7 +30,7 @@ impl TransactionInner { doc: &mut Automerge, message: Option, time: Option, - ) -> Vec { + ) -> ChangeHash { if message.is_some() { self.message = message; } @@ -39,9 +39,11 @@ impl TransactionInner { self.time = t; } - doc.update_history(export_change(&self, &doc.ops.m.actors, &doc.ops.m.props)); - - doc.get_heads() + let change = export_change(&self, &doc.ops.m.actors, &doc.ops.m.props); + let hash = change.hash; + doc.update_history(change); + debug_assert_eq!(doc.get_heads(), vec![hash]); + hash } /// Undo the operations added in this transaction, returning the number of cancelled diff --git a/automerge/src/transaction/manual_transaction.rs b/automerge/src/transaction/manual_transaction.rs index 79ae07c2..e25a4053 100644 --- a/automerge/src/transaction/manual_transaction.rs +++ b/automerge/src/transaction/manual_transaction.rs @@ -32,7 +32,7 @@ impl<'a> Transaction<'a> { /// Commit the operations performed in this transaction, returning the hashes corresponding to /// the new heads. - pub fn commit(mut self) -> Vec { + pub fn commit(mut self) -> ChangeHash { self.inner.take().unwrap().commit(self.doc, None, None) } @@ -52,7 +52,7 @@ impl<'a> Transaction<'a> { /// i64; /// tx.commit_with(CommitOptions::default().with_message("Create todos list").with_time(now)); /// ``` - pub fn commit_with(mut self, options: CommitOptions) -> Vec { + pub fn commit_with(mut self, options: CommitOptions) -> ChangeHash { self.inner .take() .unwrap() diff --git a/automerge/src/transaction/result.rs b/automerge/src/transaction/result.rs index 7f01ead2..345c9f2c 100644 --- a/automerge/src/transaction/result.rs +++ b/automerge/src/transaction/result.rs @@ -2,53 +2,18 @@ use crate::ChangeHash; /// The result of a successful, and committed, transaction. #[derive(Debug)] -pub struct TransactionSuccess { - pub(crate) result: O, - pub(crate) heads: Vec, -} - -impl TransactionSuccess { - /// Get the result of the transaction. - pub fn result(&self) -> &O { - &self.result - } - - /// Get the result of the transaction. - pub fn into_result(self) -> O { - self.result - } - - /// Get the new heads of the document after commiting the transaction. - pub fn heads(&self) -> &[ChangeHash] { - &self.heads - } - - /// Get the new heads of the document after commiting the transaction. - pub fn into_heads(self) -> Vec { - self.heads - } +pub struct Success { + /// The result of the transaction. + pub result: O, + /// The hash of the change, also the head of the document. + pub hash: ChangeHash, } /// The result of a failed, and rolled back, transaction. #[derive(Debug)] -pub struct TransactionFailure { - pub(crate) error: E, - pub(crate) cancelled: usize, -} - -impl TransactionFailure { - /// Get the error of the transaction. - pub fn error(&self) -> &E { - &self.error - } - - /// Get the error of the transaction. - pub fn into_error(self) -> E { - self.error - } - - /// Get the number of cancelled operations in the transaction. - pub fn cancelled(&self) -> usize { - self.cancelled - } +pub struct Failure { + /// The error returned from the transaction. + pub error: E, + /// The number of operations cancelled. + pub cancelled: usize, } diff --git a/automerge/src/types.rs b/automerge/src/types.rs index 3eeaca30..30641ee2 100644 --- a/automerge/src/types.rs +++ b/automerge/src/types.rs @@ -313,9 +313,6 @@ pub enum Prop { Seq(usize), } -#[derive(Debug, PartialEq, PartialOrd, Eq, Ord, Clone)] -pub struct Patch {} - impl Key { pub fn elemid(&self) -> Option { match self { From e26837b09dba25fc624bbd72dbf4b3d5844bd35d Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Wed, 9 Mar 2022 12:43:52 +0000 Subject: [PATCH 159/730] Move sync structs to module --- automerge-wasm/src/interop.rs | 22 ++++++++++----------- automerge-wasm/src/lib.rs | 8 ++++---- automerge-wasm/src/sync.rs | 4 ++-- automerge/src/autocommit.rs | 8 ++++---- automerge/src/lib.rs | 3 +-- automerge/src/sync.rs | 36 +++++++++++++++++------------------ automerge/src/sync/state.rs | 12 ++++++------ 7 files changed, 46 insertions(+), 47 deletions(-) diff --git a/automerge-wasm/src/interop.rs b/automerge-wasm/src/interop.rs index 153274fb..00c4deb2 100644 --- a/automerge-wasm/src/interop.rs +++ b/automerge-wasm/src/interop.rs @@ -25,8 +25,8 @@ impl From for JsValue { } } -impl From for JS { - fn from(state: am::SyncState) -> Self { +impl From for JS { + fn from(state: am::sync::State) -> Self { let shared_heads: JS = state.shared_heads.into(); let last_sent_heads: JS = state.last_sent_heads.into(); let their_heads: JS = state.their_heads.into(); @@ -133,7 +133,7 @@ impl TryFrom for Vec { } } -impl TryFrom for am::SyncState { +impl TryFrom for am::sync::State { type Error = JsValue; fn try_from(value: JS) -> Result { @@ -144,7 +144,7 @@ impl TryFrom for am::SyncState { let their_need = js_get(&value, "theirNeed")?.into(); let their_have = js_get(&value, "theirHave")?.try_into()?; let sent_hashes = js_get(&value, "sentHashes")?.try_into()?; - Ok(am::SyncState { + Ok(am::sync::State { shared_heads, last_sent_heads, their_heads, @@ -155,7 +155,7 @@ impl TryFrom for am::SyncState { } } -impl TryFrom for Option> { +impl TryFrom for Option> { type Error = JsValue; fn try_from(value: JS) -> Result { @@ -167,17 +167,17 @@ impl TryFrom for Option> { } } -impl TryFrom for Vec { +impl TryFrom for Vec { type Error = JsValue; fn try_from(value: JS) -> Result { let value = value.0.dyn_into::()?; - let have: Result, JsValue> = value + let have: Result, JsValue> = value .iter() .map(|s| { let last_sync = js_get(&s, "lastSync")?.try_into()?; let bloom = js_get(&s, "bloom")?.try_into()?; - Ok(am::SyncHave { last_sync, bloom }) + Ok(am::sync::Have { last_sync, bloom }) }) .collect(); let have = have?; @@ -185,7 +185,7 @@ impl TryFrom for Vec { } } -impl TryFrom for am::BloomFilter { +impl TryFrom for am::sync::BloomFilter { type Error = JsValue; fn try_from(value: JS) -> Result { @@ -215,8 +215,8 @@ impl From<&[Change]> for AR { } } -impl From<&[am::SyncHave]> for AR { - fn from(value: &[am::SyncHave]) -> Self { +impl From<&[am::sync::Have]> for AR { + fn from(value: &[am::sync::Have]) -> Self { AR(value .iter() .map(|have| { diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index c09a2019..27331e8d 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -442,7 +442,7 @@ impl Automerge { message: Uint8Array, ) -> Result<(), JsValue> { let message = message.to_vec(); - let message = am::SyncMessage::decode(message.as_slice()).map_err(to_js_err)?; + let message = am::sync::Message::decode(message.as_slice()).map_err(to_js_err)?; self.0 .receive_sync_message(&mut state.0, message) .map_err(to_js_err)?; @@ -574,7 +574,7 @@ pub fn decode_change(change: Uint8Array) -> Result { #[wasm_bindgen(js_name = initSyncState)] pub fn init_sync_state() -> SyncState { - SyncState(am::SyncState::new()) + SyncState(am::sync::State::new()) } // this is needed to be compatible with the automerge-js api @@ -596,7 +596,7 @@ pub fn encode_sync_message(message: JsValue) -> Result { let changes = js_get(&message, "changes")?.try_into()?; let have = js_get(&message, "have")?.try_into()?; Ok(Uint8Array::from( - am::SyncMessage { + am::sync::Message { heads, need, have, @@ -610,7 +610,7 @@ pub fn encode_sync_message(message: JsValue) -> Result { #[wasm_bindgen(js_name = decodeSyncMessage)] pub fn decode_sync_message(msg: Uint8Array) -> Result { let data = msg.to_vec(); - let msg = am::SyncMessage::decode(&data).map_err(to_js_err)?; + let msg = am::sync::Message::decode(&data).map_err(to_js_err)?; let heads = AR::from(msg.heads.as_slice()); let need = AR::from(msg.need.as_slice()); let changes = AR::from(msg.changes.as_slice()); diff --git a/automerge-wasm/src/sync.rs b/automerge-wasm/src/sync.rs index 7c201e61..5a24a28c 100644 --- a/automerge-wasm/src/sync.rs +++ b/automerge-wasm/src/sync.rs @@ -9,7 +9,7 @@ use crate::interop::{to_js_err, AR, JS}; #[wasm_bindgen] #[derive(Debug)] -pub struct SyncState(pub(crate) am::SyncState); +pub struct SyncState(pub(crate) am::sync::State); #[wasm_bindgen] impl SyncState { @@ -45,7 +45,7 @@ impl SyncState { pub(crate) fn decode(data: Uint8Array) -> Result { let data = data.to_vec(); - let s = am::SyncState::decode(&data); + let s = am::sync::State::decode(&data); let s = s.map_err(to_js_err)?; Ok(SyncState(s)) } diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index 4a8a0b3a..4be78664 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -4,7 +4,7 @@ use crate::{ change::export_change, transaction::TransactionInner, ActorId, Automerge, AutomergeError, Change, ChangeHash, Prop, Value, }; -use crate::{Keys, KeysAt, ObjType, ScalarValue, SyncMessage, SyncState}; +use crate::{sync, Keys, KeysAt, ObjType, ScalarValue}; /// An automerge document that automatically manages transactions. #[derive(Debug, Clone)] @@ -206,15 +206,15 @@ impl AutoCommit { self.doc.dump() } - pub fn generate_sync_message(&mut self, sync_state: &mut SyncState) -> Option { + pub fn generate_sync_message(&mut self, sync_state: &mut sync::State) -> Option { self.ensure_transaction_closed(); self.doc.generate_sync_message(sync_state) } pub fn receive_sync_message( &mut self, - sync_state: &mut SyncState, - message: SyncMessage, + sync_state: &mut sync::State, + message: sync::Message, ) -> Result<(), AutomergeError> { self.ensure_transaction_closed(); self.doc.receive_sync_message(sync_state, message) diff --git a/automerge/src/lib.rs b/automerge/src/lib.rs index f3c62dce..a98ef14e 100644 --- a/automerge/src/lib.rs +++ b/automerge/src/lib.rs @@ -40,7 +40,7 @@ mod legacy; mod op_set; mod op_tree; mod query; -mod sync; +pub mod sync; pub mod transaction; mod types; mod value; @@ -55,7 +55,6 @@ pub use exid::ExId as ObjId; pub use keys::Keys; pub use keys_at::KeysAt; pub use legacy::Change as ExpandedChange; -pub use sync::{BloomFilter, SyncHave, SyncMessage, SyncState}; pub use types::{ActorId, ChangeHash, ObjType, OpType, Prop}; pub use value::{ScalarValue, Value}; diff --git a/automerge/src/sync.rs b/automerge/src/sync.rs index 469151fb..cc4aca51 100644 --- a/automerge/src/sync.rs +++ b/automerge/src/sync.rs @@ -14,13 +14,13 @@ mod bloom; mod state; pub use bloom::BloomFilter; -pub use state::{SyncHave, SyncState}; +pub use state::{Have, State}; const HASH_SIZE: usize = 32; // 256 bits = 32 bytes const MESSAGE_TYPE_SYNC: u8 = 0x42; // first byte of a sync message, for identification impl Automerge { - pub fn generate_sync_message(&self, sync_state: &mut SyncState) -> Option { + pub fn generate_sync_message(&self, sync_state: &mut State) -> Option { let our_heads = self.get_heads(); let our_need = self.get_missing_deps(sync_state.their_heads.as_ref().unwrap_or(&vec![])); @@ -43,10 +43,10 @@ impl Automerge { .iter() .all(|hash| self.get_change_by_hash(hash).is_some()) { - let reset_msg = SyncMessage { + let reset_msg = Message { heads: our_heads, need: Vec::new(), - have: vec![SyncHave::default()], + have: vec![Have::default()], changes: Vec::new(), }; return Some(reset_msg); @@ -83,7 +83,7 @@ impl Automerge { .sent_hashes .extend(changes_to_send.iter().map(|c| c.hash)); - let sync_message = SyncMessage { + let sync_message = Message { heads: our_heads, have: our_have, need: our_need, @@ -95,12 +95,12 @@ impl Automerge { pub fn receive_sync_message( &mut self, - sync_state: &mut SyncState, - message: SyncMessage, + sync_state: &mut State, + message: Message, ) -> Result<(), AutomergeError> { let before_heads = self.get_heads(); - let SyncMessage { + let Message { heads: message_heads, changes: message_changes, need: message_need, @@ -153,19 +153,19 @@ impl Automerge { Ok(()) } - fn make_bloom_filter(&self, last_sync: Vec) -> SyncHave { + fn make_bloom_filter(&self, last_sync: Vec) -> Have { let new_changes = self.get_changes(&last_sync); let hashes = new_changes .into_iter() .map(|change| change.hash) .collect::>(); - SyncHave { + Have { last_sync, bloom: BloomFilter::from(&hashes[..]), } } - fn get_changes_to_send(&self, have: Vec, need: &[ChangeHash]) -> Vec<&Change> { + fn get_changes_to_send(&self, have: Vec, need: &[ChangeHash]) -> Vec<&Change> { if have.is_empty() { need.iter() .filter_map(|hash| self.get_change_by_hash(hash)) @@ -175,7 +175,7 @@ impl Automerge { let mut bloom_filters = Vec::with_capacity(have.len()); for h in have { - let SyncHave { last_sync, bloom } = h; + let Have { last_sync, bloom } = h; for hash in last_sync { last_sync_hashes.insert(hash); } @@ -237,14 +237,14 @@ impl Automerge { } #[derive(Debug, Clone)] -pub struct SyncMessage { +pub struct Message { pub heads: Vec, pub need: Vec, - pub have: Vec, + pub have: Vec, pub changes: Vec, } -impl SyncMessage { +impl Message { pub fn encode(self) -> Vec { let mut buf = vec![MESSAGE_TYPE_SYNC]; @@ -265,7 +265,7 @@ impl SyncMessage { buf } - pub fn decode(bytes: &[u8]) -> Result { + pub fn decode(bytes: &[u8]) -> Result { let mut decoder = Decoder::new(Cow::Borrowed(bytes)); let message_type = decoder.read::()?; @@ -284,7 +284,7 @@ impl SyncMessage { let last_sync = decode_hashes(&mut decoder)?; let bloom_bytes: Vec = decoder.read()?; let bloom = BloomFilter::try_from(bloom_bytes.as_slice())?; - have.push(SyncHave { last_sync, bloom }); + have.push(Have { last_sync, bloom }); } let change_count = decoder.read::()?; @@ -294,7 +294,7 @@ impl SyncMessage { changes.push(Change::from_bytes(change)?); } - Ok(SyncMessage { + Ok(Message { heads, need, have, diff --git a/automerge/src/sync/state.rs b/automerge/src/sync/state.rs index 48b183b4..bc1360e2 100644 --- a/automerge/src/sync/state.rs +++ b/automerge/src/sync/state.rs @@ -1,27 +1,27 @@ use std::{borrow::Cow, collections::HashSet}; -use super::{decode_hashes, encode_hashes}; -use crate::{decoding, decoding::Decoder, BloomFilter, ChangeHash}; +use super::{decode_hashes, encode_hashes, BloomFilter}; +use crate::{decoding, decoding::Decoder, ChangeHash}; const SYNC_STATE_TYPE: u8 = 0x43; // first byte of an encoded sync state, for identification #[derive(Debug, Clone, Default)] -pub struct SyncState { +pub struct State { pub shared_heads: Vec, pub last_sent_heads: Vec, pub their_heads: Option>, pub their_need: Option>, - pub their_have: Option>, + pub their_have: Option>, pub sent_hashes: HashSet, } #[derive(Debug, Clone, Default)] -pub struct SyncHave { +pub struct Have { pub last_sync: Vec, pub bloom: BloomFilter, } -impl SyncState { +impl State { pub fn new() -> Self { Default::default() } From 266f112e91fe4059df856c34fdb525f9fc8889d5 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Wed, 9 Mar 2022 13:04:10 +0000 Subject: [PATCH 160/730] Document some sync api --- automerge/src/sync.rs | 5 +++++ automerge/src/sync/state.rs | 7 +++++++ 2 files changed, 12 insertions(+) diff --git a/automerge/src/sync.rs b/automerge/src/sync.rs index cc4aca51..43801b9c 100644 --- a/automerge/src/sync.rs +++ b/automerge/src/sync.rs @@ -236,11 +236,16 @@ impl Automerge { } } +/// The sync message to be sent. #[derive(Debug, Clone)] pub struct Message { + /// The heads of the sender. pub heads: Vec, + /// The hashes of any changes that are being explicitly requested from the recipient. pub need: Vec, + /// A summary of the changes that the sender already has. pub have: Vec, + /// The changes for the recipient to apply. pub changes: Vec, } diff --git a/automerge/src/sync/state.rs b/automerge/src/sync/state.rs index bc1360e2..9828060c 100644 --- a/automerge/src/sync/state.rs +++ b/automerge/src/sync/state.rs @@ -5,6 +5,7 @@ use crate::{decoding, decoding::Decoder, ChangeHash}; const SYNC_STATE_TYPE: u8 = 0x43; // first byte of an encoded sync state, for identification +/// The state of synchronisation with a peer. #[derive(Debug, Clone, Default)] pub struct State { pub shared_heads: Vec, @@ -15,9 +16,15 @@ pub struct State { pub sent_hashes: HashSet, } +/// A summary of the changes that the sender of the message already has. +/// This is implicitly a request to the recipient to send all changes that the +/// sender does not already have. #[derive(Debug, Clone, Default)] pub struct Have { + /// The heads at the time of the last successful sync with this recipient. pub last_sync: Vec, + /// A bloom filter summarising all of the changes that the sender of the message has added + /// since the last sync. pub bloom: BloomFilter, } From f230be8aecd745b65244d5e1cb402d94dfc62ca8 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Wed, 9 Mar 2022 10:41:14 -0500 Subject: [PATCH 161/730] change the wasm commit back to an array --- automerge-wasm/src/lib.rs | 4 +++- scripts/ci/run | 1 + scripts/ci/wasm_tests | 6 ++++++ 3 files changed, 10 insertions(+), 1 deletion(-) create mode 100755 scripts/ci/wasm_tests diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 27331e8d..9f838028 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -81,7 +81,9 @@ impl Automerge { commit_opts.set_time(time as i64); } let hash = self.0.commit_with(commit_opts); - JsValue::from_str(&hex::encode(&hash.0)) + let result = Array::new(); + result.push(&JsValue::from_str(&hex::encode(&hash.0))); + result.into() } pub fn merge(&mut self, other: &mut Automerge) -> Result { diff --git a/scripts/ci/run b/scripts/ci/run index 42367e10..a0fa3616 100755 --- a/scripts/ci/run +++ b/scripts/ci/run @@ -6,4 +6,5 @@ set -eou pipefail ./scripts/ci/build-test ./scripts/ci/docs ./scripts/ci/advisory +./scripts/ci/wasm_tests ./scripts/ci/js_tests diff --git a/scripts/ci/wasm_tests b/scripts/ci/wasm_tests new file mode 100755 index 00000000..778e1e1f --- /dev/null +++ b/scripts/ci/wasm_tests @@ -0,0 +1,6 @@ +THIS_SCRIPT=$(dirname "$0"); +WASM_PROJECT=$THIS_SCRIPT/../../automerge-wasm; + +yarn --cwd $WASM_PROJECT install; +yarn --cwd $WASM_PROJECT build; +yarn --cwd $WASM_PROJECT test; From d3b97a3cbb2244bc369e4687c0f92df699c3adfd Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Wed, 9 Mar 2022 16:02:08 +0000 Subject: [PATCH 162/730] Add wasm tests to CI --- .github/workflows/ci.yaml | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index b6e8dc31..9a9753d0 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -1,5 +1,5 @@ name: ci -on: +on: push: branches: - experiment @@ -59,6 +59,15 @@ jobs: with: command: check ${{ matrix.checks }} + wasm_tests: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Install wasm-pack + run: curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh + - name: run tests + run: ./scripts/ci/wasm_tests + js_tests: runs-on: ubuntu-latest steps: From 42233414b3f632bb190738981b933f0fade1e0f5 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Wed, 9 Mar 2022 16:52:52 +0000 Subject: [PATCH 163/730] Add some documentation --- automerge/src/automerge.rs | 42 +++++++++++++++++++++++++++++++++++++ automerge/src/change.rs | 8 +++++++ automerge/src/legacy/mod.rs | 9 ++++++++ automerge/src/types.rs | 1 + 4 files changed, 60 insertions(+) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 6a0269e7..0a80d0cd 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -35,6 +35,7 @@ pub struct Automerge { } impl Automerge { + /// Create a new document with a random actor id. pub fn new() -> Self { Automerge { queue: vec![], @@ -49,16 +50,19 @@ impl Automerge { } } + /// Set the actor id for this document. pub fn with_actor(mut self, actor: ActorId) -> Self { self.actor = Actor::Unused(actor); self } + /// Set the actor id for this document. pub fn set_actor(&mut self, actor: ActorId) -> &mut Self { self.actor = Actor::Unused(actor); self } + /// Get the current actor id of this document. pub fn get_actor(&self) -> &ActorId { match &self.actor { Actor::Unused(actor) => actor, @@ -153,6 +157,7 @@ impl Automerge { } } + /// Fork this document at the current point for use by a different actor. pub fn fork(&self) -> Self { let mut f = self.clone(); f.set_actor(ActorId::random()); @@ -200,6 +205,7 @@ impl Automerge { } } + /// Get the length of the given object. pub fn length>(&self, obj: O) -> usize { if let Ok(inner_obj) = self.exid_to_obj(obj.as_ref()) { match self.ops.object_type(&inner_obj) { @@ -214,6 +220,7 @@ impl Automerge { } } + /// Historical version of [`length`](Self::length). pub fn length_at>(&self, obj: O, heads: &[ChangeHash]) -> usize { if let Ok(inner_obj) = self.exid_to_obj(obj.as_ref()) { let clock = self.clock_at(heads); @@ -255,6 +262,7 @@ impl Automerge { ExId::Id(id.0, self.ops.m.actors.cache[id.1].clone(), id.1) } + /// Get the string represented by the given text object. pub fn text>(&self, obj: O) -> Result { let obj = self.exid_to_obj(obj.as_ref())?; let query = self.ops.search(obj, query::ListVals::new()); @@ -267,6 +275,7 @@ impl Automerge { Ok(buffer) } + /// Historical version of [`text`](Self::text). pub fn text_at>( &self, obj: O, @@ -287,6 +296,10 @@ impl Automerge { // TODO - I need to return these OpId's here **only** to get // the legacy conflicts format of { [opid]: value } // Something better? + /// Get a value out of the document. + /// + /// Returns both the value and the id of the operation that created it, useful for handling + /// conflicts and serves as the object id if the value is an object. pub fn value, P: Into>( &self, obj: O, @@ -295,6 +308,7 @@ impl Automerge { Ok(self.values(obj, prop.into())?.last().cloned()) } + /// Historical version of [`value`](Self::value). pub fn value_at, P: Into>( &self, obj: O, @@ -304,6 +318,10 @@ impl Automerge { Ok(self.values_at(obj, prop, heads)?.last().cloned()) } + /// Get all values out of the document at this prop that conflict. + /// + /// Returns both the value and the id of the operation that created it, useful for handling + /// conflicts and serves as the object id if the value is an object. pub fn values, P: Into>( &self, obj: O, @@ -335,6 +353,7 @@ impl Automerge { Ok(result) } + /// Historical version of [`values`](Self::values). pub fn values_at, P: Into>( &self, obj: O, @@ -369,6 +388,7 @@ impl Automerge { Ok(result) } + /// Load a document. pub fn load(data: &[u8]) -> Result { let changes = Change::load_document(data)?; let mut doc = Self::new(); @@ -376,6 +396,7 @@ impl Automerge { Ok(doc) } + /// Load an incremental save of a document. pub fn load_incremental(&mut self, data: &[u8]) -> Result { let changes = Change::load_document(data)?; let start = self.ops.len(); @@ -394,6 +415,7 @@ impl Automerge { dup } + /// Apply changes to this document. pub fn apply_changes(&mut self, changes: Vec) -> Result<(), AutomergeError> { for c in changes { if !self.history_index.contains_key(&c.hash) { @@ -416,6 +438,7 @@ impl Automerge { Ok(()) } + /// Apply a single change to this document. pub fn apply_change(&mut self, change: Change) { let ops = self.import_ops(&change, self.history.len()); self.update_history(change); @@ -491,6 +514,7 @@ impl Automerge { Ok(self.get_heads()) } + /// Save the entirety of this document in a compact form. pub fn save(&mut self) -> Vec { let heads = self.get_heads(); let c = self.history.iter(); @@ -500,6 +524,7 @@ impl Automerge { bytes } + /// Save the changes since last save in a compact form. pub fn save_incremental(&mut self) -> Vec { let changes = self.get_changes(self.saved.as_slice()); let mut bytes = vec![]; @@ -573,6 +598,8 @@ impl Automerge { } } + /// Get the hashes of the changes in this document that aren't transitive dependencies of the + /// given `heads`. pub fn get_missing_deps(&self, heads: &[ChangeHash]) -> Vec { let in_queue: HashSet<_> = self.queue.iter().map(|change| change.hash).collect(); let mut missing = HashSet::new(); @@ -653,6 +680,7 @@ impl Automerge { .collect() } + /// Get the last change this actor made to the document. pub fn get_last_local_change(&self) -> Option<&Change> { return self .history @@ -689,12 +717,14 @@ impl Automerge { clock } + /// Get a change by its hash. pub fn get_change_by_hash(&self, hash: &ChangeHash) -> Option<&Change> { self.history_index .get(hash) .and_then(|index| self.history.get(*index)) } + /// Get the changes that the other document added compared to this document. pub fn get_changes_added<'a>(&self, other: &'a Self) -> Vec<&'a Change> { // Depth-first traversal from the heads through the dependency graph, // until we reach a change that is already present in other @@ -1284,4 +1314,16 @@ mod tests { // deleting an element in a list that does not exist is an error assert!(tx.del(ROOT, 0).is_err()); } + + #[test] + fn loaded_doc_changes_have_hash() { + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + tx.set(ROOT, "a", 1).unwrap(); + tx.commit(); + let hash = doc.get_last_local_change().unwrap().hash; + let bytes = doc.save(); + let doc = Automerge::load(&bytes).unwrap(); + assert_eq!(doc.get_change_by_hash(&hash).unwrap().hash, hash); + } } diff --git a/automerge/src/change.rs b/automerge/src/change.rs index e339fd29..3de175f8 100644 --- a/automerge/src/change.rs +++ b/automerge/src/change.rs @@ -310,16 +310,24 @@ impl ChangeBytes { } } +/// A change represents a group of operations performed by an actor. #[derive(PartialEq, Debug, Clone)] pub struct Change { bytes: ChangeBytes, body_start: usize, + /// Hash of this change. pub hash: amp::ChangeHash, + /// The index of this change in the changes from this actor. pub seq: u64, + /// The start operation index. pub start_op: u64, + /// The time that this change was committed. pub time: i64, + /// The message of this change. message: Range, + /// The actors referenced in this change. actors: Vec, + /// The dependencies of this change. pub deps: Vec, ops: HashMap>, extra_bytes: Range, diff --git a/automerge/src/legacy/mod.rs b/automerge/src/legacy/mod.rs index 7f5e7dfd..afa93bc2 100644 --- a/automerge/src/legacy/mod.rs +++ b/automerge/src/legacy/mod.rs @@ -232,19 +232,28 @@ impl Op { } } +/// A change represents a group of operations performed by an actor. #[derive(Deserialize, Serialize, Debug, Clone)] pub struct Change { + /// The operations performed in this change. #[serde(rename = "ops")] pub operations: Vec, + /// The actor that performed this change. #[serde(rename = "actor")] pub actor_id: ActorId, + /// The hash of this change. #[serde(skip_serializing_if = "Option::is_none", default)] pub hash: Option, + /// The index of this change in the changes from this actor. pub seq: u64, + /// The start operation index. #[serde(rename = "startOp")] pub start_op: u64, + /// The time that this change was committed. pub time: i64, + /// The message of this change. pub message: Option, + /// The dependencies of this change. pub deps: Vec, #[serde(skip_serializing_if = "Vec::is_empty", default = "Default::default")] pub extra_bytes: Vec, diff --git a/automerge/src/types.rs b/automerge/src/types.rs index 30641ee2..24516a28 100644 --- a/automerge/src/types.rs +++ b/automerge/src/types.rs @@ -449,6 +449,7 @@ impl Op { #[derive(Debug, Clone)] pub struct Peer {} +/// The sha256 hash of a change. #[derive(Eq, PartialEq, Hash, Clone, PartialOrd, Ord, Copy)] pub struct ChangeHash(pub [u8; 32]); From 0fca6a48ee64e68172567d2eeabbc7c2c06d4456 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Wed, 9 Mar 2022 18:12:05 +0000 Subject: [PATCH 164/730] Add loading to edit-trace rust benchmark --- edit-trace/src/main.rs | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/edit-trace/src/main.rs b/edit-trace/src/main.rs index c3063084..b6a452a2 100644 --- a/edit-trace/src/main.rs +++ b/edit-trace/src/main.rs @@ -1,11 +1,10 @@ use automerge::{transaction::Transactable, Automerge, AutomergeError, ROOT}; use automerge::{ObjType, ScalarValue}; -use std::fs; use std::time::Instant; fn main() -> Result<(), AutomergeError> { - let contents = fs::read_to_string("edits.json").expect("cannot read edits file"); - let edits = json::parse(&contents).expect("cant parse edits"); + let contents = include_str!("../edits.json"); + let edits = json::parse(contents).expect("cant parse edits"); let mut commands = vec![]; for i in 0..edits.len() { let pos: usize = edits[i][0].as_usize().unwrap(); @@ -29,7 +28,14 @@ fn main() -> Result<(), AutomergeError> { tx.splice(&text, pos, del, vals)?; } tx.commit(); - let _ = doc.save(); + let save = Instant::now(); + let bytes = doc.save(); + println!("Saved in {} ms", save.elapsed().as_millis()); + + let load = Instant::now(); + let _ = Automerge::load(&bytes).unwrap(); + println!("Loaded in {} ms", load.elapsed().as_millis()); + println!("Done in {} ms", now.elapsed().as_millis()); Ok(()) } From c51073c150b1b780f0a7b8d9a9e9b0b21a7ba90c Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Wed, 9 Mar 2022 17:53:30 -0500 Subject: [PATCH 165/730] add paths/materialize to api --- automerge-wasm/index.d.ts | 3 +- automerge-wasm/src/interop.rs | 7 +- automerge-wasm/src/lib.rs | 49 +++++++- automerge-wasm/test/test.ts | 119 ++++++++++++------ automerge/src/autocommit.rs | 4 + automerge/src/automerge.rs | 7 +- automerge/src/error.rs | 2 + .../src/transaction/manual_transaction.rs | 4 + automerge/src/transaction/transactable.rs | 3 + 9 files changed, 157 insertions(+), 41 deletions(-) diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts index d032a62a..2897955e 100644 --- a/automerge-wasm/index.d.ts +++ b/automerge-wasm/index.d.ts @@ -100,6 +100,7 @@ export class Automerge { keys(obj: ObjID, heads?: Heads): string[]; text(obj: ObjID, heads?: Heads): string; length(obj: ObjID, heads?: Heads): number; + materialize(obj?: ObjID): any; // transactions commit(message?: string, time?: number): Heads; @@ -115,7 +116,7 @@ export class Automerge { // sync over network receiveSyncMessage(state: SyncState, message: SyncMessage): void; - generateSyncMessage(state: SyncState): SyncMessage; + generateSyncMessage(state: SyncState): SyncMessage | null; // low level change functions applyChanges(changes: Change[]): void; diff --git a/automerge-wasm/src/interop.rs b/automerge-wasm/src/interop.rs index 00c4deb2..69dd38f7 100644 --- a/automerge-wasm/src/interop.rs +++ b/automerge-wasm/src/interop.rs @@ -340,9 +340,12 @@ pub(crate) fn map_to_js(doc: &am::AutoCommit, obj: &ObjId) -> JsValue { { Reflect::set(&map, &k.into(), &map_to_js(doc, &exid)).unwrap(); } - Ok(Some((Value::Object(_), exid))) => { + Ok(Some((Value::Object(o), exid))) if o == am::ObjType::List => { Reflect::set(&map, &k.into(), &list_to_js(doc, &exid)).unwrap(); } + Ok(Some((Value::Object(o), exid))) if o == am::ObjType::Text => { + Reflect::set(&map, &k.into(), &doc.text(&exid).unwrap().into()).unwrap(); + } Ok(Some((Value::Scalar(v), _))) => { Reflect::set(&map, &k.into(), &ScalarValue(v).into()).unwrap(); } @@ -352,7 +355,7 @@ pub(crate) fn map_to_js(doc: &am::AutoCommit, obj: &ObjId) -> JsValue { map.into() } -fn list_to_js(doc: &am::AutoCommit, obj: &ObjId) -> JsValue { +pub(crate) fn list_to_js(doc: &am::AutoCommit, obj: &ObjId) -> JsValue { let len = doc.length(obj); let array = Array::new(); for i in 0..len { diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 9f838028..3d92be5d 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -12,7 +12,9 @@ mod interop; mod sync; mod value; -use interop::{get_heads, js_get, js_set, map_to_js, to_js_err, to_objtype, to_prop, AR, JS}; +use interop::{ + get_heads, js_get, js_set, list_to_js, map_to_js, to_js_err, to_objtype, to_prop, AR, JS, +}; use sync::SyncState; use value::{datatype, ScalarValue}; @@ -465,9 +467,52 @@ impl Automerge { map_to_js(&self.0, &ROOT) } + pub fn materialize(&self, obj: JsValue) -> Result { + let obj = self.import(obj).unwrap_or(ROOT); + match self.0.object_type(&obj)? { + am::ObjType::Map => Ok(map_to_js(&self.0, &obj)), + am::ObjType::List => Ok(list_to_js(&self.0, &obj)), + am::ObjType::Text => Ok(self.0.text(&obj)?.into()), + am::ObjType::Table => Ok(map_to_js(&self.0, &obj)), + } + } + fn import(&self, id: JsValue) -> Result { if let Some(s) = id.as_string() { - Ok(self.0.import(&s)?) + if let Some(post) = s.strip_prefix('/') { + let mut obj = ROOT; + let mut is_map = true; + let parts = post.split('/'); + for prop in parts { + if prop.is_empty() { + break; + } + let val = if is_map { + self.0.value(obj, prop)? + } else { + self.0.value(obj, am::Prop::Seq(prop.parse().unwrap()))? + }; + match val { + Some((am::Value::Object(am::ObjType::Map), id)) => { + is_map = true; + obj = id; + } + Some((am::Value::Object(am::ObjType::Table), id)) => { + is_map = true; + obj = id; + } + Some((am::Value::Object(_), id)) => { + is_map = false; + obj = id; + } + None => return Err(to_js_err(format!("invalid path '{}'", s))), + _ => return Err(to_js_err(format!("path '{}' is not an object", s))), + }; + } + Ok(obj) + } else { + Ok(self.0.import(&s)?) + } } else { Err(to_js_err("invalid objid")) } diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts index 7bcd0517..f2ea4369 100644 --- a/automerge-wasm/test/test.ts +++ b/automerge-wasm/test/test.ts @@ -166,15 +166,16 @@ describe('Automerge', () => { let submap = doc.set_object(root, "letters", []) doc.insert(submap, 0, "a"); doc.insert(submap, 0, "b"); - assert.deepEqual(doc.toJS(), { letters: ["b", "a" ] }) + assert.deepEqual(doc.materialize(), { letters: ["b", "a" ] }) doc.push(submap, "c"); - assert.deepEqual(doc.toJS(), { letters: ["b", "a", "c" ] }) + assert.deepEqual(doc.materialize(), { letters: ["b", "a", "c" ] }) doc.push(submap, 3, "timestamp"); - assert.deepEqual(doc.toJS(), { letters: ["b", "a", "c", new Date(3) ] }) + assert.deepEqual(doc.materialize(), { letters: ["b", "a", "c", new Date(3) ] }) doc.splice(submap, 1, 1, ["d","e","f"]); - assert.deepEqual(doc.toJS(), { letters: ["b", "d", "e", "f", "c", new Date(3) ] }) + assert.deepEqual(doc.materialize(), { letters: ["b", "d", "e", "f", "c", new Date(3) ] }) doc.set(submap, 0, "z"); - assert.deepEqual(doc.toJS(), { letters: ["z", "d", "e", "f", "c", new Date(3) ] }) + assert.deepEqual(doc.materialize(), { letters: ["z", "d", "e", "f", "c", new Date(3) ] }) + assert.deepEqual(doc.materialize(submap), ["z", "d", "e", "f", "c", new Date(3) ]) assert.deepEqual(doc.length(submap),6) doc.free() @@ -358,19 +359,30 @@ describe('Automerge', () => { doc4.free() }) + it('paths can be used instead of objids', () => { + let doc = create("aaaa") + doc.set_object("_root","list",[{ foo: "bar"}, [1,2,3]]) + assert.deepEqual(doc.materialize("/"), { list: [{ foo: "bar"}, [1,2,3]] }) + assert.deepEqual(doc.materialize("/list"), [{ foo: "bar"}, [1,2,3]]) + assert.deepEqual(doc.materialize("/list/0"), { foo: "bar"}) + }) + it('recursive sets are possible', () => { let doc = create("aaaa") let l1 = doc.set_object("_root","list",[{ foo: "bar"}, [1,2,3]]) let l2 = doc.insert_object(l1, 0, { zip: ["a", "b"] }) - let l3 = doc.set_object("_root","info1","hello world") // 'text' - let l4 = doc.set("_root","info2","hello world") // 'str' - let l5 = doc.set_object("_root","info3","hello world") - assert.deepEqual(doc.toJS(), { + let l3 = doc.set_object("_root","info1","hello world") // 'text' object + doc.set("_root","info2","hello world") // 'str' + let l4 = doc.set_object("_root","info3","hello world") + assert.deepEqual(doc.materialize(), { "list": [ { zip: ["a", "b"] }, { foo: "bar"}, [ 1,2,3]], - "info1": "hello world".split(""), + "info1": "hello world", "info2": "hello world", - "info3": "hello world".split("") + "info3": "hello world", }) + assert.deepEqual(doc.materialize(l2), { zip: ["a","b"] }) + assert.deepEqual(doc.materialize(l1), [ { zip: ["a","b"] }, { foo: "bar" }, [ 1,2,3] ]) + assert.deepEqual(doc.materialize(l4), "hello world") doc.free() }) @@ -446,6 +458,7 @@ describe('Automerge', () => { let doc = create() let s1 = initSyncState() let m1 = doc.generateSyncMessage(s1) + if (m1 === null) { throw new RangeError("message should not be null") } const message: DecodedSyncMessage = decodeSyncMessage(m1) assert.deepStrictEqual(message.heads, []) assert.deepStrictEqual(message.need, []) @@ -459,6 +472,7 @@ describe('Automerge', () => { let n1 = create(), n2 = create() let s1 = initSyncState(), s2 = initSyncState() let m1 = n1.generateSyncMessage(s1) + if (m1 === null) { throw new RangeError("message should not be null") } n2.receiveSyncMessage(s2, m1) let m2 = n2.generateSyncMessage(s2) assert.deepStrictEqual(m2, null) @@ -476,10 +490,11 @@ describe('Automerge', () => { n1.commit("",0) } n2.applyChanges(n1.getChanges([])) - assert.deepStrictEqual(n1.toJS(), n2.toJS()) + assert.deepStrictEqual(n1.materialize(), n2.materialize()) // generate a naive sync message let m1 = n1.generateSyncMessage(s1) + if (m1 === null) { throw new RangeError("message should not be null") } assert.deepStrictEqual(s1.lastSentHeads, n1.getHeads()) // heads are equal so this message should be null @@ -499,9 +514,9 @@ describe('Automerge', () => { n1.commit("",0) } - assert.notDeepStrictEqual(n1.toJS(), n2.toJS()) + assert.notDeepStrictEqual(n1.materialize(), n2.materialize()) sync(n1, n2) - assert.deepStrictEqual(n1.toJS(), n2.toJS()) + assert.deepStrictEqual(n1.materialize(), n2.materialize()) }) it('should sync peers where one has commits the other does not', () => { @@ -515,9 +530,9 @@ describe('Automerge', () => { n1.commit("",0) } - assert.notDeepStrictEqual(n1.toJS(), n2.toJS()) + assert.notDeepStrictEqual(n1.materialize(), n2.materialize()) sync(n1, n2) - assert.deepStrictEqual(n1.toJS(), n2.toJS()) + assert.deepStrictEqual(n1.materialize(), n2.materialize()) }) it('should work with prior sync state', () => { @@ -538,9 +553,9 @@ describe('Automerge', () => { n1.commit("",0) } - assert.notDeepStrictEqual(n1.toJS(), n2.toJS()) + assert.notDeepStrictEqual(n1.materialize(), n2.materialize()) sync(n1, n2, s1, s2) - assert.deepStrictEqual(n1.toJS(), n2.toJS()) + assert.deepStrictEqual(n1.materialize(), n2.materialize()) }) it('should not generate messages once synced', () => { @@ -560,21 +575,25 @@ describe('Automerge', () => { // n1 reports what it has message = n1.generateSyncMessage(s1) + if (message === null) { throw new RangeError("message should not be null") } // n2 receives that message and sends changes along with what it has n2.receiveSyncMessage(s2, message) message = n2.generateSyncMessage(s2) + if (message === null) { throw new RangeError("message should not be null") } assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 5) //assert.deepStrictEqual(patch, null) // no changes arrived // n1 receives the changes and replies with the changes it now knows n2 needs n1.receiveSyncMessage(s1, message) message = n1.generateSyncMessage(s1) + if (message === null) { throw new RangeError("message should not be null") } assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 5) // n2 applies the changes and sends confirmation ending the exchange n2.receiveSyncMessage(s2, message) message = n2.generateSyncMessage(s2) + if (message === null) { throw new RangeError("message should not be null") } // n1 receives the message and has nothing more to say n1.receiveSyncMessage(s1, message) @@ -607,6 +626,8 @@ describe('Automerge', () => { let msg1to2, msg2to1 msg1to2 = n1.generateSyncMessage(s1) msg2to1 = n2.generateSyncMessage(s2) + if (msg1to2 === null) { throw new RangeError("message should not be null") } + if (msg2to1 === null) { throw new RangeError("message should not be null") } assert.deepStrictEqual(decodeSyncMessage(msg1to2).changes.length, 0) assert.deepStrictEqual(decodeSyncMessage(msg1to2).have[0].lastSync.length, 0) assert.deepStrictEqual(decodeSyncMessage(msg2to1).changes.length, 0) @@ -619,25 +640,29 @@ describe('Automerge', () => { // now both reply with their local changes the other lacks // (standard warning that 1% of the time this will result in a "need" message) msg1to2 = n1.generateSyncMessage(s1) + if (msg1to2 === null) { throw new RangeError("message should not be null") } assert.deepStrictEqual(decodeSyncMessage(msg1to2).changes.length, 5) msg2to1 = n2.generateSyncMessage(s2) + if (msg2to1 === null) { throw new RangeError("message should not be null") } assert.deepStrictEqual(decodeSyncMessage(msg2to1).changes.length, 5) // both should now apply the changes and update the frontend n1.receiveSyncMessage(s1, msg2to1) assert.deepStrictEqual(n1.getMissingDeps(), []) //assert.notDeepStrictEqual(patch1, null) - assert.deepStrictEqual(n1.toJS(), {x: 4, y: 4}) + assert.deepStrictEqual(n1.materialize(), {x: 4, y: 4}) n2.receiveSyncMessage(s2, msg1to2) assert.deepStrictEqual(n2.getMissingDeps(), []) //assert.notDeepStrictEqual(patch2, null) - assert.deepStrictEqual(n2.toJS(), {x: 4, y: 4}) + assert.deepStrictEqual(n2.materialize(), {x: 4, y: 4}) // The response acknowledges the changes received, and sends no further changes msg1to2 = n1.generateSyncMessage(s1) + if (msg1to2 === null) { throw new RangeError("message should not be null") } assert.deepStrictEqual(decodeSyncMessage(msg1to2).changes.length, 0) msg2to1 = n2.generateSyncMessage(s2) + if (msg2to1 === null) { throw new RangeError("message should not be null") } assert.deepStrictEqual(decodeSyncMessage(msg2to1).changes.length, 0) // After receiving acknowledgements, their shared heads should be equal @@ -657,6 +682,7 @@ describe('Automerge', () => { // If we make one more change, and start another sync, its lastSync should be updated n1.set("_root","x",5) msg1to2 = n1.generateSyncMessage(s1) + if (msg1to2 === null) { throw new RangeError("message should not be null") } assert.deepStrictEqual(decodeSyncMessage(msg1to2).have[0].lastSync, [head1, head2].sort()) }) @@ -672,17 +698,20 @@ describe('Automerge', () => { n1.push(items, "x") n1.commit("",0) message = n1.generateSyncMessage(s1) + if (message === null) { throw new RangeError("message should not be null") } assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) n1.push(items, "y") n1.commit("",0) message = n1.generateSyncMessage(s1) + if (message === null) { throw new RangeError("message should not be null") } assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) n1.push(items, "z") n1.commit("",0) message = n1.generateSyncMessage(s1) + if (message === null) { throw new RangeError("message should not be null") } assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) }) @@ -704,9 +733,9 @@ describe('Automerge', () => { n1.commit("",0) } - assert.notDeepStrictEqual(n1.toJS(), n2.toJS()) + assert.notDeepStrictEqual(n1.materialize(), n2.materialize()) sync(n1, n2, s1, s2) - assert.deepStrictEqual(n1.toJS(), n2.toJS()) + assert.deepStrictEqual(n1.materialize(), n2.materialize()) }) it('should work without prior sync state', () => { @@ -736,10 +765,10 @@ describe('Automerge', () => { n2.commit("",0) } - assert.notDeepStrictEqual(n1.toJS(), n2.toJS()) + assert.notDeepStrictEqual(n1.materialize(), n2.materialize()) sync(n1, n2) assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) - assert.deepStrictEqual(n1.toJS(), n2.toJS()) + assert.deepStrictEqual(n1.materialize(), n2.materialize()) }) it('should work with prior sync state', () => { @@ -771,10 +800,10 @@ describe('Automerge', () => { s1 = decodeSyncState(encodeSyncState(s1)) s2 = decodeSyncState(encodeSyncState(s2)) - assert.notDeepStrictEqual(n1.toJS(), n2.toJS()) + assert.notDeepStrictEqual(n1.materialize(), n2.materialize()) sync(n1, n2, s1, s2) assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) - assert.deepStrictEqual(n1.toJS(), n2.toJS()) + assert.deepStrictEqual(n1.materialize(), n2.materialize()) }) it('should ensure non-empty state after sync', () => { @@ -822,7 +851,7 @@ describe('Automerge', () => { // everyone should be on the same page here assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) - assert.deepStrictEqual(n1.toJS(), n2.toJS()) + assert.deepStrictEqual(n1.materialize(), n2.materialize()) // now make a few more changes, then attempt to sync the fully-up-to-date n1 with the confused r for (let i = 6; i < 9; i++) { @@ -834,12 +863,12 @@ describe('Automerge', () => { rSyncState = decodeSyncState(encodeSyncState(rSyncState)) assert.notDeepStrictEqual(n1.getHeads(), r.getHeads()) - assert.notDeepStrictEqual(n1.toJS(), r.toJS()) - assert.deepStrictEqual(n1.toJS(), {x: 8}) - assert.deepStrictEqual(r.toJS(), {x: 2}) + assert.notDeepStrictEqual(n1.materialize(), r.materialize()) + assert.deepStrictEqual(n1.materialize(), {x: 8}) + assert.deepStrictEqual(r.materialize(), {x: 2}) sync(n1, r, s1, rSyncState) assert.deepStrictEqual(n1.getHeads(), r.getHeads()) - assert.deepStrictEqual(n1.toJS(), r.toJS()) + assert.deepStrictEqual(n1.materialize(), r.materialize()) }) it('should resync after one node experiences data loss without disconnecting', () => { @@ -855,7 +884,7 @@ describe('Automerge', () => { sync(n1, n2, s1, s2) assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) - assert.deepStrictEqual(n1.toJS(), n2.toJS()) + assert.deepStrictEqual(n1.materialize(), n2.materialize()) let n2AfterDataLoss = create('89abcdef') @@ -863,7 +892,7 @@ describe('Automerge', () => { // decodeSyncState(encodeSyncState(s1)) in order to simulate data loss without disconnecting sync(n1, n2AfterDataLoss, s1, initSyncState()) assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) - assert.deepStrictEqual(n1.toJS(), n2.toJS()) + assert.deepStrictEqual(n1.materialize(), n2.materialize()) }) it('should handle changes concurrent to the last sync heads', () => { @@ -898,7 +927,7 @@ describe('Automerge', () => { // Now sync n1 and n2. n3's change is concurrent to n1 and n2's last sync heads sync(n1, n2, s12, s21) assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) - assert.deepStrictEqual(n1.toJS(), n2.toJS()) + assert.deepStrictEqual(n1.materialize(), n2.materialize()) }) it('should handle histories with lots of branching and merging', () => { @@ -933,7 +962,7 @@ describe('Automerge', () => { sync(n1, n2, s1, s2) assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) - assert.deepStrictEqual(n1.toJS(), n2.toJS()) + assert.deepStrictEqual(n1.materialize(), n2.materialize()) }) it('should handle a false-positive head', () => { @@ -1033,12 +1062,16 @@ describe('Automerge', () => { let m1, m2 m1 = n1.generateSyncMessage(s1) m2 = n2.generateSyncMessage(s2) + if (m1 === null) { throw new RangeError("message should not be null") } + if (m2 === null) { throw new RangeError("message should not be null") } n1.receiveSyncMessage(s1, m2) n2.receiveSyncMessage(s2, m1) // Then n1 and n2 send each other their changes, except for the false positive m1 = n1.generateSyncMessage(s1) m2 = n2.generateSyncMessage(s2) + if (m1 === null) { throw new RangeError("message should not be null") } + if (m2 === null) { throw new RangeError("message should not be null") } n1.receiveSyncMessage(s1, m2) n2.receiveSyncMessage(s2, m1) assert.strictEqual(decodeSyncMessage(m1).changes.length, 2) // n1c1 and n1c2 @@ -1178,21 +1211,25 @@ describe('Automerge', () => { // n1 creates a sync message for n2 with an ill-fated bloom message = n1.generateSyncMessage(s1) + if (message === null) { throw new RangeError("message should not be null") } assert.strictEqual(decodeSyncMessage(message).changes.length, 0) // n2 receives it and DOESN'T send a change back n2.receiveSyncMessage(s2, message) message = n2.generateSyncMessage(s2) + if (message === null) { throw new RangeError("message should not be null") } assert.strictEqual(decodeSyncMessage(message).changes.length, 0) // n1 should now realize it's missing that change and request it explicitly n1.receiveSyncMessage(s1, message) message = n1.generateSyncMessage(s1) + if (message === null) { throw new RangeError("message should not be null") } assert.deepStrictEqual(decodeSyncMessage(message).need, n2.getHeads()) // n2 should fulfill that request n2.receiveSyncMessage(s2, message) message = n2.generateSyncMessage(s2) + if (message === null) { throw new RangeError("message should not be null") } assert.strictEqual(decodeSyncMessage(message).changes.length, 1) // n1 should apply the change and the two should now be in sync @@ -1246,15 +1283,18 @@ describe('Automerge', () => { // changes {n1c1, n1c2, n2c1, n2c2} twice (those are the changes that both n1 and n2 have, but // that n3 does not have). We want to prevent this duplication. message1 = n1.generateSyncMessage(s13) // message from n1 to n3 + if (message1 === null) { throw new RangeError("message should not be null") } assert.strictEqual(decodeSyncMessage(message1).changes.length, 0) n3.receiveSyncMessage(s31, message1) message3 = n3.generateSyncMessage(s31) // message from n3 to n1 + if (message3 === null) { throw new RangeError("message should not be null") } assert.strictEqual(decodeSyncMessage(message3).changes.length, 3) // {n3c1, n3c2, n3c3} n1.receiveSyncMessage(s13, message3) // Copy the Bloom filter received from n1 into the message sent from n3 to n2. This Bloom // filter indicates what changes n3 is going to receive from n1. message3 = n3.generateSyncMessage(s32) // message from n3 to n2 + if (message3 === null) { throw new RangeError("message should not be null") } const modifiedMessage = decodeSyncMessage(message3) modifiedMessage.have.push(decodeSyncMessage(message1).have[0]) assert.strictEqual(modifiedMessage.changes.length, 0) @@ -1262,11 +1302,13 @@ describe('Automerge', () => { // n2 replies to n3, sending only n2c3 (the one change that n2 has but n1 doesn't) message2 = n2.generateSyncMessage(s23) + if (message2 === null) { throw new RangeError("message should not be null") } assert.strictEqual(decodeSyncMessage(message2).changes.length, 1) // {n2c3} n3.receiveSyncMessage(s32, message2) // n1 replies to n3 message1 = n1.generateSyncMessage(s13) + if (message1 === null) { throw new RangeError("message should not be null") } assert.strictEqual(decodeSyncMessage(message1).changes.length, 5) // {n1c1, n1c2, n1c3, n2c1, n2c2} n3.receiveSyncMessage(s31, message1) assert.deepStrictEqual(n3.getHeads(), [n1c3, n2c3, n3c3].sort()) @@ -1290,10 +1332,12 @@ describe('Automerge', () => { sync(n1, n2, s1, s2) s1.lastSentHeads = [] // force generateSyncMessage to return a message even though nothing changed message = n1.generateSyncMessage(s1) + if (message === null) { throw new RangeError("message should not be null") } const modMsg = decodeSyncMessage(message) modMsg.need = lastSync // re-request change 2 n2.receiveSyncMessage(s2, encodeSyncMessage(modMsg)) message = n2.generateSyncMessage(s2) + if (message === null) { throw new RangeError("message should not be null") } assert.strictEqual(decodeSyncMessage(message).changes.length, 1) assert.strictEqual(decodeChange(decodeSyncMessage(message).changes[0]).hash, lastSync[0]) }) @@ -1309,6 +1353,7 @@ describe('Automerge', () => { n2.applyChanges(n1.getChanges([])) message = n1.generateSyncMessage(s1) + if (message === null) { throw new RangeError("message should not be null") } message = decodeSyncMessage(message) message.need = ['0000000000000000000000000000000000000000000000000000000000000000'] message = encodeSyncMessage(message) @@ -1356,8 +1401,10 @@ describe('Automerge', () => { // Now n1 initiates a sync with n2, and n2 replies with {c5, c6}. n2 does not send {c7, c8} msg = n1.generateSyncMessage(s1) + if (msg === null) { throw new RangeError("message should not be null") } n2.receiveSyncMessage(s2, msg) msg = n2.generateSyncMessage(s2) + if (msg === null) { throw new RangeError("message should not be null") } decodedMsg = decodeSyncMessage(msg) decodedMsg.changes = [change5, change6] msg = encodeSyncMessage(decodedMsg) @@ -1371,6 +1418,7 @@ describe('Automerge', () => { // n1 replies, confirming the receipt of {c5, c6} and requesting the remaining changes msg = n1.generateSyncMessage(s1) + if (msg === null) { throw new RangeError("message should not be null") } n2.receiveSyncMessage(s2, msg) assert.deepStrictEqual(decodeSyncMessage(msg).need, [c8]) assert.deepStrictEqual(decodeSyncMessage(msg).have[0].lastSync, [c2, c6].sort()) @@ -1379,6 +1427,7 @@ describe('Automerge', () => { // n2 sends the remaining changes {c7, c8} msg = n2.generateSyncMessage(s2) + if (msg === null) { throw new RangeError("message should not be null") } n1.receiveSyncMessage(s1, msg) assert.strictEqual(decodeSyncMessage(msg).changes.length, 2) assert.deepStrictEqual(s1.sharedHeads, [c2, c8].sort()) diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index 4be78664..95fa411c 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -298,6 +298,10 @@ impl Transactable for AutoCommit { self.doc.length_at(obj, heads) } + fn object_type>(&self, obj: O) -> Result { + self.doc.object_type(obj) + } + // set(obj, prop, value) - value can be scalar or objtype // del(obj, prop) // inc(obj, prop, value) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 0a80d0cd..94d71b8f 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -236,6 +236,11 @@ impl Automerge { } } + pub fn object_type>(&self, obj: O) -> Result { + let obj = self.exid_to_obj(obj.as_ref())?; + self.ops.object_type(&obj).ok_or(AutomergeError::Fail) + } + pub(crate) fn exid_to_obj(&self, id: &ExId) -> Result { match id { ExId::Root => Ok(ObjId::root()), @@ -806,7 +811,7 @@ impl Automerge { .m .actors .lookup(&actor) - .ok_or_else(|| AutomergeError::InvalidOpId(s.to_owned()))?; + .ok_or_else(|| AutomergeError::InvalidActor(actor.to_hex_string()))?; Ok(ExId::Id( counter, self.ops.m.actors.cache[actor].clone(), diff --git a/automerge/src/error.rs b/automerge/src/error.rs index 1a57a865..0fb0b38b 100644 --- a/automerge/src/error.rs +++ b/automerge/src/error.rs @@ -7,6 +7,8 @@ use thiserror::Error; pub enum AutomergeError { #[error("invalid opid format `{0}`")] InvalidOpId(String), + #[error("invalid actor `{0}`")] + InvalidActor(String), #[error("there was an ecoding problem")] Encoding, #[error("there was a decoding problem")] diff --git a/automerge/src/transaction/manual_transaction.rs b/automerge/src/transaction/manual_transaction.rs index e25a4053..209838ec 100644 --- a/automerge/src/transaction/manual_transaction.rs +++ b/automerge/src/transaction/manual_transaction.rs @@ -187,6 +187,10 @@ impl<'a> Transactable for Transaction<'a> { self.doc.length_at(obj, heads) } + fn object_type>(&self, obj: O) -> Result { + self.doc.object_type(obj) + } + fn text>(&self, obj: O) -> Result { self.doc.text(obj) } diff --git a/automerge/src/transaction/transactable.rs b/automerge/src/transaction/transactable.rs index 593a25ca..adc148d4 100644 --- a/automerge/src/transaction/transactable.rs +++ b/automerge/src/transaction/transactable.rs @@ -104,6 +104,9 @@ pub trait Transactable { /// Get the length of the given object at a point in history. fn length_at>(&self, obj: O, heads: &[ChangeHash]) -> usize; + /// Get type for object + fn object_type>(&self, obj: O) -> Result; + /// Get the string that this text object represents. fn text>(&self, obj: O) -> Result; From ee116bb5d71348e008e03fb4b9e95df2b903b6e2 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Wed, 9 Mar 2022 19:42:58 -0500 Subject: [PATCH 166/730] object_type returns an option --- automerge-wasm/src/lib.rs | 11 ++++++----- automerge/src/autocommit.rs | 2 +- automerge/src/automerge.rs | 6 +++--- automerge/src/transaction/manual_transaction.rs | 2 +- automerge/src/transaction/transactable.rs | 2 +- 5 files changed, 12 insertions(+), 11 deletions(-) diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 3d92be5d..b5e799f5 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -469,11 +469,12 @@ impl Automerge { pub fn materialize(&self, obj: JsValue) -> Result { let obj = self.import(obj).unwrap_or(ROOT); - match self.0.object_type(&obj)? { - am::ObjType::Map => Ok(map_to_js(&self.0, &obj)), - am::ObjType::List => Ok(list_to_js(&self.0, &obj)), - am::ObjType::Text => Ok(self.0.text(&obj)?.into()), - am::ObjType::Table => Ok(map_to_js(&self.0, &obj)), + match self.0.object_type(&obj) { + Some(am::ObjType::Map) => Ok(map_to_js(&self.0, &obj)), + Some(am::ObjType::List) => Ok(list_to_js(&self.0, &obj)), + Some(am::ObjType::Text) => Ok(self.0.text(&obj)?.into()), + Some(am::ObjType::Table) => Ok(map_to_js(&self.0, &obj)), + None => Err(to_js_err(format!("invalid obj {}", obj))), } } diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index 95fa411c..30a34965 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -298,7 +298,7 @@ impl Transactable for AutoCommit { self.doc.length_at(obj, heads) } - fn object_type>(&self, obj: O) -> Result { + fn object_type>(&self, obj: O) -> Option { self.doc.object_type(obj) } diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 94d71b8f..b3942ce1 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -236,9 +236,9 @@ impl Automerge { } } - pub fn object_type>(&self, obj: O) -> Result { - let obj = self.exid_to_obj(obj.as_ref())?; - self.ops.object_type(&obj).ok_or(AutomergeError::Fail) + pub fn object_type>(&self, obj: O) -> Option { + let obj = self.exid_to_obj(obj.as_ref()).ok()?; + self.ops.object_type(&obj) } pub(crate) fn exid_to_obj(&self, id: &ExId) -> Result { diff --git a/automerge/src/transaction/manual_transaction.rs b/automerge/src/transaction/manual_transaction.rs index 209838ec..5647e0a2 100644 --- a/automerge/src/transaction/manual_transaction.rs +++ b/automerge/src/transaction/manual_transaction.rs @@ -187,7 +187,7 @@ impl<'a> Transactable for Transaction<'a> { self.doc.length_at(obj, heads) } - fn object_type>(&self, obj: O) -> Result { + fn object_type>(&self, obj: O) -> Option { self.doc.object_type(obj) } diff --git a/automerge/src/transaction/transactable.rs b/automerge/src/transaction/transactable.rs index adc148d4..19820492 100644 --- a/automerge/src/transaction/transactable.rs +++ b/automerge/src/transaction/transactable.rs @@ -105,7 +105,7 @@ pub trait Transactable { fn length_at>(&self, obj: O, heads: &[ChangeHash]) -> usize; /// Get type for object - fn object_type>(&self, obj: O) -> Result; + fn object_type>(&self, obj: O) -> Option; /// Get the string that this text object represents. fn text>(&self, obj: O) -> Result; From 4ff6dca175bcee8cf4155666f5d6598d0450647e Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Thu, 10 Mar 2022 08:47:52 -0500 Subject: [PATCH 167/730] rename error message for foreign objid --- automerge/src/automerge.rs | 2 +- automerge/src/error.rs | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index b3942ce1..94f57a50 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -811,7 +811,7 @@ impl Automerge { .m .actors .lookup(&actor) - .ok_or_else(|| AutomergeError::InvalidActor(actor.to_hex_string()))?; + .ok_or_else(|| AutomergeError::ForeignObjId(s.to_owned()))?; Ok(ExId::Id( counter, self.ops.m.actors.cache[actor].clone(), diff --git a/automerge/src/error.rs b/automerge/src/error.rs index 0fb0b38b..5b529fa5 100644 --- a/automerge/src/error.rs +++ b/automerge/src/error.rs @@ -7,8 +7,8 @@ use thiserror::Error; pub enum AutomergeError { #[error("invalid opid format `{0}`")] InvalidOpId(String), - #[error("invalid actor `{0}`")] - InvalidActor(String), + #[error("obj id not from this document `{0}`")] + ForeignObjId(String), #[error("there was an ecoding problem")] Encoding, #[error("there was a decoding problem")] From 4fa1d056c6f8bee950144f4180f34ef848296f08 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 10 Mar 2022 18:22:06 +0000 Subject: [PATCH 168/730] Stop exposing apply_change It doesn't do checks or raise errors so shouldn't really be exposed. --- automerge/src/autocommit.rs | 5 ----- automerge/src/automerge.rs | 2 +- 2 files changed, 1 insertion(+), 6 deletions(-) diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index 4be78664..a43f7f17 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -149,11 +149,6 @@ impl AutoCommit { self.doc.apply_changes(changes) } - pub fn apply_change(&mut self, change: Change) { - self.ensure_transaction_closed(); - self.doc.apply_change(change) - } - /// Takes all the changes in `other` which are not in `self` and applies them pub fn merge(&mut self, other: &mut Self) -> Result, AutomergeError> { self.ensure_transaction_closed(); diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 0a80d0cd..39ad7539 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -439,7 +439,7 @@ impl Automerge { } /// Apply a single change to this document. - pub fn apply_change(&mut self, change: Change) { + fn apply_change(&mut self, change: Change) { let ops = self.import_ops(&change, self.history.len()); self.update_history(change); for op in ops { From a2cb15e93630e2b76778f8507fb0522c8a5baed1 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 11 Mar 2022 09:08:15 +0000 Subject: [PATCH 169/730] Remove obj from the op as it can be gotten from the optree This makes the Op struct smaller, helping memory usage and cache coherence. --- automerge/src/automerge.rs | 64 +++++++++++++++--------------- automerge/src/change.rs | 13 ++++-- automerge/src/columnar.rs | 8 ++-- automerge/src/op_set.rs | 22 +++++----- automerge/src/op_tree.rs | 24 +++++------ automerge/src/transaction/inner.rs | 44 ++++++++++---------- automerge/src/types.rs | 1 - automerge/src/visualisation.rs | 22 +++++----- 8 files changed, 105 insertions(+), 93 deletions(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 62dac00c..65fe5acb 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -164,15 +164,15 @@ impl Automerge { f } - fn insert_op(&mut self, op: Op) -> Op { - let q = self.ops.search(op.obj, query::SeekOp::new(&op)); + fn insert_op(&mut self, obj: &ObjId, op: Op) -> Op { + let q = self.ops.search(obj, query::SeekOp::new(&op)); for i in q.succ { - self.ops.replace(op.obj, i, |old_op| old_op.add_succ(&op)); + self.ops.replace(obj, i, |old_op| old_op.add_succ(&op)); } if !op.is_del() { - self.ops.insert(q.pos, op.clone()); + self.ops.insert(q.pos, obj, op.clone()); } op } @@ -211,7 +211,7 @@ impl Automerge { match self.ops.object_type(&inner_obj) { Some(ObjType::Map) | Some(ObjType::Table) => self.keys(obj).count(), Some(ObjType::List) | Some(ObjType::Text) => { - self.ops.search(inner_obj, query::Len::new()).len + self.ops.search(&inner_obj, query::Len::new()).len } None => 0, } @@ -227,7 +227,7 @@ impl Automerge { match self.ops.object_type(&inner_obj) { Some(ObjType::Map) | Some(ObjType::Table) => self.keys_at(obj, heads).count(), Some(ObjType::List) | Some(ObjType::Text) => { - self.ops.search(inner_obj, query::LenAt::new(clock)).len + self.ops.search(&inner_obj, query::LenAt::new(clock)).len } None => 0, } @@ -270,7 +270,7 @@ impl Automerge { /// Get the string represented by the given text object. pub fn text>(&self, obj: O) -> Result { let obj = self.exid_to_obj(obj.as_ref())?; - let query = self.ops.search(obj, query::ListVals::new()); + let query = self.ops.search(&obj, query::ListVals::new()); let mut buffer = String::new(); for q in &query.ops { if let OpType::Set(ScalarValue::Str(s)) = &q.action { @@ -288,7 +288,7 @@ impl Automerge { ) -> Result { let obj = self.exid_to_obj(obj.as_ref())?; let clock = self.clock_at(heads); - let query = self.ops.search(obj, query::ListValsAt::new(clock)); + let query = self.ops.search(&obj, query::ListValsAt::new(clock)); let mut buffer = String::new(); for q in &query.ops { if let OpType::Set(ScalarValue::Str(s)) = &q.action { @@ -338,7 +338,7 @@ impl Automerge { let prop = self.ops.m.props.lookup(&p); if let Some(p) = prop { self.ops - .search(obj, query::Prop::new(p)) + .search(&obj, query::Prop::new(p)) .ops .into_iter() .map(|o| (o.value(), self.id_to_exid(o.id))) @@ -349,7 +349,7 @@ impl Automerge { } Prop::Seq(n) => self .ops - .search(obj, query::Nth::new(n)) + .search(&obj, query::Nth::new(n)) .ops .into_iter() .map(|o| (o.value(), self.id_to_exid(o.id))) @@ -373,7 +373,7 @@ impl Automerge { let prop = self.ops.m.props.lookup(&p); if let Some(p) = prop { self.ops - .search(obj, query::PropAt::new(p, clock)) + .search(&obj, query::PropAt::new(p, clock)) .ops .into_iter() .map(|o| (o.value(), self.id_to_exid(o.id))) @@ -384,7 +384,7 @@ impl Automerge { } Prop::Seq(n) => self .ops - .search(obj, query::NthAt::new(n, clock)) + .search(&obj, query::NthAt::new(n, clock)) .ops .into_iter() .map(|o| (o.value(), self.id_to_exid(o.id))) @@ -447,8 +447,8 @@ impl Automerge { fn apply_change(&mut self, change: Change) { let ops = self.import_ops(&change, self.history.len()); self.update_history(change); - for op in ops { - self.insert_op(op); + for (obj, op) in ops { + self.insert_op(&obj, op); } } @@ -470,7 +470,7 @@ impl Automerge { None } - fn import_ops(&mut self, change: &Change, change_id: usize) -> Vec { + fn import_ops(&mut self, change: &Change, change_id: usize) -> Vec<(ObjId, Op)> { change .iter_ops() .enumerate() @@ -493,16 +493,18 @@ impl Automerge { Key::Seq(ElemId(OpId(i.0, self.ops.m.actors.cache(i.1.clone())))) } }; - Op { - change: change_id, - id, - action: c.action, + ( obj, - key, - succ: Default::default(), - pred, - insert: c.insert, - } + Op { + change: change_id, + id, + action: c.action, + key, + succ: Default::default(), + pred, + insert: c.insert, + }, + ) }) .collect() } @@ -838,21 +840,21 @@ impl Automerge { "pred", "succ" ); - for i in self.ops.iter() { - let id = self.to_string(i.id); - let obj = self.to_string(i.obj); - let key = match i.key { + for (obj, op) in self.ops.iter() { + let id = self.to_string(op.id); + let obj = self.to_string(obj); + let key = match op.key { Key::Map(n) => self.ops.m.props[n].clone(), Key::Seq(n) => self.to_string(n), }; - let value: String = match &i.action { + let value: String = match &op.action { OpType::Set(value) => format!("{}", value), OpType::Make(obj) => format!("make({})", obj), OpType::Inc(obj) => format!("inc({})", obj), OpType::Del => format!("del{}", 0), }; - let pred: Vec<_> = i.pred.iter().map(|id| self.to_string(*id)).collect(); - let succ: Vec<_> = i.succ.iter().map(|id| self.to_string(*id)).collect(); + let pred: Vec<_> = op.pred.iter().map(|id| self.to_string(*id)).collect(); + let succ: Vec<_> = op.succ.iter().map(|id| self.to_string(*id)).collect(); log!( " {:12} {:12} {:12} {} {:?} {:?}", id, diff --git a/automerge/src/change.rs b/automerge/src/change.rs index 3de175f8..6016f206 100644 --- a/automerge/src/change.rs +++ b/automerge/src/change.rs @@ -39,7 +39,7 @@ const HASH_RANGE: Range = 4..8; pub(crate) fn encode_document<'a, 'b>( heads: Vec, changes: impl Iterator, - doc_ops: impl Iterator, + doc_ops: impl Iterator, actors_index: &IndexedCache, props: &'a [String], ) -> Vec { @@ -467,13 +467,18 @@ fn export_opid(id: &OpId, actors: &IndexedCache) -> amp::OpId { amp::OpId(id.0, actors.get(id.1).clone()) } -fn export_op(op: &Op, actors: &IndexedCache, props: &IndexedCache) -> amp::Op { +fn export_op( + op: &Op, + obj: &ObjId, + actors: &IndexedCache, + props: &IndexedCache, +) -> amp::Op { let action = op.action.clone(); let key = match &op.key { Key::Map(n) => amp::Key::Map(props.get(*n).clone().into()), Key::Seq(id) => amp::Key::Seq(export_elemid(id, actors)), }; - let obj = export_objid(&op.obj, actors); + let obj = export_objid(obj, actors); let pred = op.pred.iter().map(|id| export_opid(id, actors)).collect(); amp::Op { action, @@ -500,7 +505,7 @@ pub(crate) fn export_change( operations: change .operations .iter() - .map(|op| export_op(op, actors, props)) + .map(|(obj, op)| export_op(op, obj, actors, props)) .collect(), extra_bytes: change.extra_bytes.clone(), } diff --git a/automerge/src/columnar.rs b/automerge/src/columnar.rs index d7337359..30429d64 100644 --- a/automerge/src/columnar.rs +++ b/automerge/src/columnar.rs @@ -1019,7 +1019,7 @@ impl DocOpEncoder { props: &'b [String], ) -> (Vec, Vec) where - I: IntoIterator, + I: IntoIterator, { let mut e = Self::new(); e.encode(ops, actors, props); @@ -1041,12 +1041,12 @@ impl DocOpEncoder { fn encode<'a, I>(&mut self, ops: I, actors: &[usize], props: &[String]) where - I: IntoIterator, + I: IntoIterator, { - for op in ops { + for (obj, op) in ops { self.actor.append_value(actors[op.id.actor()]); self.ctr.append_value(op.id.counter()); - self.obj.append(&op.obj, actors); + self.obj.append(obj, actors); self.key.append(op.key, actors, props); self.insert.append(op.insert); self.succ.append(&op.succ, actors); diff --git a/automerge/src/op_set.rs b/automerge/src/op_set.rs index 73b974c3..c2393252 100644 --- a/automerge/src/op_set.rs +++ b/automerge/src/op_set.rs @@ -59,31 +59,31 @@ impl OpSetInternal { } } - pub fn search(&self, obj: ObjId, query: Q) -> Q + pub fn search(&self, obj: &ObjId, query: Q) -> Q where Q: TreeQuery, { - if let Some((_typ, tree)) = self.trees.get(&obj) { + if let Some((_typ, tree)) = self.trees.get(obj) { tree.search(query, &self.m) } else { query } } - pub fn replace(&mut self, obj: ObjId, index: usize, f: F) -> Option + pub fn replace(&mut self, obj: &ObjId, index: usize, f: F) -> Option where F: FnMut(&mut Op), { - if let Some((_typ, tree)) = self.trees.get_mut(&obj) { + if let Some((_typ, tree)) = self.trees.get_mut(obj) { tree.replace(index, f) } else { None } } - pub fn remove(&mut self, obj: ObjId, index: usize) -> Op { + pub fn remove(&mut self, obj: &ObjId, index: usize) -> Op { // this happens on rollback - be sure to go back to the old state - let (_typ, tree) = self.trees.get_mut(&obj).unwrap(); + let (_typ, tree) = self.trees.get_mut(obj).unwrap(); self.length -= 1; let op = tree.remove(index); if let OpType::Make(_) = &op.action { @@ -96,13 +96,13 @@ impl OpSetInternal { self.length } - pub fn insert(&mut self, index: usize, element: Op) { + pub fn insert(&mut self, index: usize, obj: &ObjId, element: Op) { if let OpType::Make(typ) = element.action { self.trees .insert(element.id.into(), (typ, Default::default())); } - if let Some((_typ, tree)) = self.trees.get_mut(&element.obj) { + if let Some((_typ, tree)) = self.trees.get_mut(obj) { //let tree = self.trees.get_mut(&element.obj).unwrap(); tree.insert(index, element); self.length += 1; @@ -129,7 +129,7 @@ impl Default for OpSetInternal { } impl<'a, const B: usize> IntoIterator for &'a OpSetInternal { - type Item = &'a Op; + type Item = (&'a ObjId, &'a Op); type IntoIter = Iter<'a, B>; @@ -153,13 +153,13 @@ pub(crate) struct Iter<'a, const B: usize> { } impl<'a, const B: usize> Iterator for Iter<'a, B> { - type Item = &'a Op; + type Item = (&'a ObjId, &'a Op); fn next(&mut self) -> Option { let mut result = None; for obj in self.objs.iter().skip(self.index) { let (_typ, tree) = self.inner.trees.get(obj)?; - result = tree.get(self.sub_index); + result = tree.get(self.sub_index).map(|op| (*obj, op)); if result.is_some() { self.sub_index += 1; break; diff --git a/automerge/src/op_tree.rs b/automerge/src/op_tree.rs index 903cfde6..9e6a0b71 100644 --- a/automerge/src/op_tree.rs +++ b/automerge/src/op_tree.rs @@ -640,13 +640,11 @@ mod tests { use super::*; - fn op(n: usize) -> Op { + fn op() -> Op { let zero = OpId(0, 0); Op { - change: n, id: zero, action: amp::OpType::Set(0.into()), - obj: zero.into(), key: zero.into(), succ: vec![], pred: vec![], @@ -658,13 +656,13 @@ mod tests { fn insert() { let mut t = OpTree::new(); - t.insert(0, op(1)); - t.insert(1, op(1)); - t.insert(0, op(1)); - t.insert(0, op(1)); - t.insert(0, op(1)); - t.insert(3, op(1)); - t.insert(4, op(1)); + t.insert(0, op()); + t.insert(1, op()); + t.insert(0, op()); + t.insert(0, op()); + t.insert(0, op()); + t.insert(3, op()); + t.insert(4, op()); } #[test] @@ -672,7 +670,7 @@ mod tests { let mut t = OpTree::new(); for i in 0..100 { - t.insert(i % 2, op(i)); + t.insert(i % 2, op()); } } @@ -682,8 +680,8 @@ mod tests { let mut v = Vec::new(); for i in 0..100 { - t.insert(i % 3, op(i)); - v.insert(i % 3, op(i)); + t.insert(i % 3, op()); + v.insert(i % 3, op()); assert_eq!(v, t.iter().cloned().collect::>()) } diff --git a/automerge/src/transaction/inner.rs b/automerge/src/transaction/inner.rs index 6c415965..c80fc405 100644 --- a/automerge/src/transaction/inner.rs +++ b/automerge/src/transaction/inner.rs @@ -15,7 +15,7 @@ pub struct TransactionInner { pub(crate) extra_bytes: Vec, pub(crate) hash: Option, pub(crate) deps: Vec, - pub(crate) operations: Vec, + pub(crate) operations: Vec<(ObjId, Op)>, } impl TransactionInner { @@ -57,14 +57,14 @@ impl TransactionInner { let num = self.operations.len(); // remove in reverse order so sets are removed before makes etc... - for op in self.operations.iter().rev() { + for (obj, op) in self.operations.iter().rev() { for pred_id in &op.pred { - if let Some(p) = doc.ops.search(op.obj, OpIdSearch::new(*pred_id)).index() { - doc.ops.replace(op.obj, p, |o| o.remove_succ(op)); + if let Some(p) = doc.ops.search(obj, OpIdSearch::new(*pred_id)).index() { + doc.ops.replace(obj, p, |o| o.remove_succ(op)); } } - if let Some(pos) = doc.ops.search(op.obj, OpIdSearch::new(op.id)).index() { - doc.ops.remove(op.obj, pos); + if let Some(pos) = doc.ops.search(obj, OpIdSearch::new(op.id)).index() { + doc.ops.remove(obj, pos); } } num @@ -125,18 +125,25 @@ impl TransactionInner { OpId(self.start_op + self.operations.len() as u64, self.actor) } - fn insert_local_op(&mut self, doc: &mut Automerge, op: Op, pos: usize, succ_pos: &[usize]) { + fn insert_local_op( + &mut self, + doc: &mut Automerge, + op: Op, + pos: usize, + obj: ObjId, + succ_pos: &[usize], + ) { for succ in succ_pos { - doc.ops.replace(op.obj, *succ, |old_op| { + doc.ops.replace(&obj, *succ, |old_op| { old_op.add_succ(&op); }); } if !op.is_del() { - doc.ops.insert(pos, op.clone()); + doc.ops.insert(pos, &obj, op.clone()); } - self.operations.push(op); + self.operations.push((obj, op)); } pub fn insert>( @@ -173,7 +180,7 @@ impl TransactionInner { ) -> Result, AutomergeError> { let id = self.next_id(); - let query = doc.ops.search(obj, query::InsertNth::new(index)); + let query = doc.ops.search(&obj, query::InsertNth::new(index)); let key = query.key()?; let is_make = matches!(&action, OpType::Make(_)); @@ -182,15 +189,14 @@ impl TransactionInner { change: doc.history.len(), id, action, - obj, key, succ: Default::default(), pred: Default::default(), insert: true, }; - doc.ops.insert(query.pos(), op.clone()); - self.operations.push(op); + doc.ops.insert(query.pos(), &obj, op.clone()); + self.operations.push((obj, op)); if is_make { Ok(Some(id)) @@ -225,7 +231,7 @@ impl TransactionInner { let id = self.next_id(); let prop = doc.ops.m.props.cache(prop); - let query = doc.ops.search(obj, query::Prop::new(prop)); + let query = doc.ops.search(&obj, query::Prop::new(prop)); // no key present to delete if query.ops.is_empty() && action == OpType::Del { @@ -244,14 +250,13 @@ impl TransactionInner { change: doc.history.len(), id, action, - obj, key: Key::Map(prop), succ: Default::default(), pred, insert: false, }; - self.insert_local_op(doc, op, query.pos, &query.ops_pos); + self.insert_local_op(doc, op, query.pos, obj, &query.ops_pos); if is_make { Ok(Some(id)) @@ -267,7 +272,7 @@ impl TransactionInner { index: usize, action: OpType, ) -> Result, AutomergeError> { - let query = doc.ops.search(obj, query::Nth::new(index)); + let query = doc.ops.search(&obj, query::Nth::new(index)); let id = self.next_id(); let pred = query.ops.iter().map(|op| op.id).collect(); @@ -283,14 +288,13 @@ impl TransactionInner { change: doc.history.len(), id, action, - obj, key, succ: Default::default(), pred, insert: false, }; - self.insert_local_op(doc, op, query.pos, &query.ops_pos); + self.insert_local_op(doc, op, query.pos, obj, &query.ops_pos); if is_make { Ok(Some(id)) diff --git a/automerge/src/types.rs b/automerge/src/types.rs index 24516a28..ccde70b7 100644 --- a/automerge/src/types.rs +++ b/automerge/src/types.rs @@ -342,7 +342,6 @@ pub(crate) struct Op { pub change: usize, pub id: OpId, pub action: OpType, - pub obj: ObjId, pub key: Key, pub succ: Vec, pub pred: Vec, diff --git a/automerge/src/visualisation.rs b/automerge/src/visualisation.rs index 442c9eb4..6f6a36b0 100644 --- a/automerge/src/visualisation.rs +++ b/automerge/src/visualisation.rs @@ -1,3 +1,4 @@ +use crate::types::ObjId; use fxhash::FxHasher; use std::{borrow::Cow, collections::HashMap, hash::BuildHasherDefault}; @@ -25,7 +26,7 @@ pub(crate) struct Node<'a, const B: usize> { #[derive(Clone)] pub(crate) enum NodeType<'a, const B: usize> { ObjRoot(crate::types::ObjId), - ObjTreeNode(&'a crate::op_tree::OpTreeNode), + ObjTreeNode(ObjId, &'a crate::op_tree::OpTreeNode), } #[derive(Clone)] @@ -51,7 +52,7 @@ impl<'a, const B: usize> GraphVisualisation<'a, B> { let mut nodes = HashMap::new(); for (obj_id, (_, tree)) in trees { if let Some(root_node) = &tree.root_node { - let tree_id = Self::construct_nodes(root_node, &mut nodes, metadata); + let tree_id = Self::construct_nodes(root_node, obj_id, &mut nodes, metadata); let obj_tree_id = NodeId::default(); nodes.insert( obj_tree_id, @@ -76,13 +77,14 @@ impl<'a, const B: usize> GraphVisualisation<'a, B> { fn construct_nodes( node: &'a crate::op_tree::OpTreeNode, + objid: &ObjId, nodes: &mut HashMap>, m: &'a crate::op_set::OpSetMetadata, ) -> NodeId { let node_id = NodeId::default(); let mut child_ids = Vec::new(); for child in &node.children { - let child_id = Self::construct_nodes(child, nodes, m); + let child_id = Self::construct_nodes(child, objid, nodes, m); child_ids.push(child_id); } nodes.insert( @@ -90,7 +92,7 @@ impl<'a, const B: usize> GraphVisualisation<'a, B> { Node { id: node_id, children: child_ids, - node_type: NodeType::ObjTreeNode(node), + node_type: NodeType::ObjTreeNode(*objid, node), metadata: m, }, ); @@ -136,7 +138,7 @@ impl<'a, const B: usize> dot::Labeller<'a, &'a Node<'a, B>, Edge> for GraphVisua fn node_shape(&'a self, node: &&'a Node<'a, B>) -> Option> { let shape = match node.node_type { - NodeType::ObjTreeNode(_) => dot::LabelText::label("none"), + NodeType::ObjTreeNode(_, _) => dot::LabelText::label("none"), NodeType::ObjRoot(_) => dot::LabelText::label("ellipse"), }; Some(shape) @@ -144,8 +146,8 @@ impl<'a, const B: usize> dot::Labeller<'a, &'a Node<'a, B>, Edge> for GraphVisua fn node_label(&'a self, n: &&Node<'a, B>) -> dot::LabelText<'a> { match n.node_type { - NodeType::ObjTreeNode(tree_node) => dot::LabelText::HtmlStr( - OpTable::create(tree_node, n.metadata, &self.actor_shorthands) + NodeType::ObjTreeNode(objid, tree_node) => dot::LabelText::HtmlStr( + OpTable::create(tree_node, &objid, n.metadata, &self.actor_shorthands) .to_html() .into(), ), @@ -163,13 +165,14 @@ struct OpTable { impl OpTable { fn create<'a, const B: usize>( node: &'a crate::op_tree::OpTreeNode, + obj: &ObjId, metadata: &crate::op_set::OpSetMetadata, actor_shorthands: &HashMap, ) -> Self { let rows = node .elements .iter() - .map(|e| OpTableRow::create(e, metadata, actor_shorthands)) + .map(|e| OpTableRow::create(e, obj, metadata, actor_shorthands)) .collect(); OpTable { rows } } @@ -226,6 +229,7 @@ impl OpTableRow { impl OpTableRow { fn create( op: &super::types::Op, + obj: &ObjId, metadata: &crate::op_set::OpSetMetadata, actor_shorthands: &HashMap, ) -> Self { @@ -246,7 +250,7 @@ impl OpTableRow { .collect(); OpTableRow { op_description, - obj_id: print_opid(&op.obj.0, actor_shorthands), + obj_id: print_opid(&obj.0, actor_shorthands), op_id: print_opid(&op.id, actor_shorthands), prop, succ, From 488df553851a1c1d243f21ad2ebb3cc4cd9b3e3a Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 11 Mar 2022 09:10:06 +0000 Subject: [PATCH 170/730] Remove change field on Op as unused This field was never read from. --- automerge/src/automerge.rs | 5 ++--- automerge/src/transaction/inner.rs | 3 --- automerge/src/types.rs | 1 - 3 files changed, 2 insertions(+), 7 deletions(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 65fe5acb..f593f7db 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -445,7 +445,7 @@ impl Automerge { /// Apply a single change to this document. fn apply_change(&mut self, change: Change) { - let ops = self.import_ops(&change, self.history.len()); + let ops = self.import_ops(&change); self.update_history(change); for (obj, op) in ops { self.insert_op(&obj, op); @@ -470,7 +470,7 @@ impl Automerge { None } - fn import_ops(&mut self, change: &Change, change_id: usize) -> Vec<(ObjId, Op)> { + fn import_ops(&mut self, change: &Change) -> Vec<(ObjId, Op)> { change .iter_ops() .enumerate() @@ -496,7 +496,6 @@ impl Automerge { ( obj, Op { - change: change_id, id, action: c.action, key, diff --git a/automerge/src/transaction/inner.rs b/automerge/src/transaction/inner.rs index c80fc405..3fb03270 100644 --- a/automerge/src/transaction/inner.rs +++ b/automerge/src/transaction/inner.rs @@ -186,7 +186,6 @@ impl TransactionInner { let is_make = matches!(&action, OpType::Make(_)); let op = Op { - change: doc.history.len(), id, action, key, @@ -247,7 +246,6 @@ impl TransactionInner { let pred = query.ops.iter().map(|op| op.id).collect(); let op = Op { - change: doc.history.len(), id, action, key: Key::Map(prop), @@ -285,7 +283,6 @@ impl TransactionInner { let is_make = matches!(&action, OpType::Make(_)); let op = Op { - change: doc.history.len(), id, action, key, diff --git a/automerge/src/types.rs b/automerge/src/types.rs index ccde70b7..1f52731d 100644 --- a/automerge/src/types.rs +++ b/automerge/src/types.rs @@ -339,7 +339,6 @@ pub(crate) struct ElemId(pub OpId); #[derive(Debug, Clone, PartialEq)] pub(crate) struct Op { - pub change: usize, pub id: OpId, pub action: OpType, pub key: Key, From 927c8678840c7b3a11fb7b5a5593b65c39031710 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 11 Mar 2022 12:04:00 +0000 Subject: [PATCH 171/730] Replace no longer returns an op --- automerge/src/op_set.rs | 4 +--- automerge/src/op_tree.rs | 7 ++----- 2 files changed, 3 insertions(+), 8 deletions(-) diff --git a/automerge/src/op_set.rs b/automerge/src/op_set.rs index c2393252..bd84911d 100644 --- a/automerge/src/op_set.rs +++ b/automerge/src/op_set.rs @@ -70,14 +70,12 @@ impl OpSetInternal { } } - pub fn replace(&mut self, obj: &ObjId, index: usize, f: F) -> Option + pub fn replace(&mut self, obj: &ObjId, index: usize, f: F) where F: FnMut(&mut Op), { if let Some((_typ, tree)) = self.trees.get_mut(obj) { tree.replace(index, f) - } else { - None } } diff --git a/automerge/src/op_tree.rs b/automerge/src/op_tree.rs index 9e6a0b71..55503b2a 100644 --- a/automerge/src/op_tree.rs +++ b/automerge/src/op_tree.rs @@ -123,18 +123,15 @@ impl OpTreeInternal { } // this replaces get_mut() because it allows the indexes to update correctly - pub fn replace(&mut self, index: usize, mut f: F) -> Option + pub fn replace(&mut self, index: usize, mut f: F) where F: FnMut(&mut Op), { if self.len() > index { - let op = self.get(index).unwrap().clone(); + let op = self.get(index).unwrap(); let mut new_op = op.clone(); f(&mut new_op); self.set(index, new_op); - Some(op) - } else { - None } } From 2e49561ab20e9c33f1203ccab0069eb59430d61f Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 11 Mar 2022 12:13:11 +0000 Subject: [PATCH 172/730] Make splice take iterator instead of vec --- automerge-wasm/src/lib.rs | 2 +- automerge/src/autocommit.rs | 4 ++-- automerge/src/transaction/inner.rs | 2 +- automerge/src/transaction/manual_transaction.rs | 4 ++-- automerge/src/transaction/transactable.rs | 10 ++++------ 5 files changed, 10 insertions(+), 12 deletions(-) diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index b5e799f5..63b7e78a 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -145,7 +145,7 @@ impl Automerge { vals.push(value); } } - self.0.splice(&obj, start, delete_count, vals)?; + self.0.splice(&obj, start, delete_count, vals.into_iter())?; } Ok(()) } diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index 91ca5db0..a91ed1e2 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -382,12 +382,12 @@ impl Transactable for AutoCommit { /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert /// the new elements - fn splice>( + fn splice, V: Iterator>( &mut self, obj: O, pos: usize, del: usize, - vals: Vec, + vals: V, ) -> Result<(), AutomergeError> { self.ensure_transaction_open(); let tx = self.transaction.as_mut().unwrap(); diff --git a/automerge/src/transaction/inner.rs b/automerge/src/transaction/inner.rs index 3fb03270..6b5533f1 100644 --- a/automerge/src/transaction/inner.rs +++ b/automerge/src/transaction/inner.rs @@ -331,7 +331,7 @@ impl TransactionInner { obj: &ExId, mut pos: usize, del: usize, - vals: Vec, + vals: impl Iterator, ) -> Result<(), AutomergeError> { let obj = doc.exid_to_obj(obj)?; for _ in 0..del { diff --git a/automerge/src/transaction/manual_transaction.rs b/automerge/src/transaction/manual_transaction.rs index 5647e0a2..36106261 100644 --- a/automerge/src/transaction/manual_transaction.rs +++ b/automerge/src/transaction/manual_transaction.rs @@ -158,12 +158,12 @@ impl<'a> Transactable for Transaction<'a> { /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert /// the new elements - fn splice>( + fn splice, V: Iterator>( &mut self, obj: O, pos: usize, del: usize, - vals: Vec, + vals: V, ) -> Result<(), AutomergeError> { self.inner .as_mut() diff --git a/automerge/src/transaction/transactable.rs b/automerge/src/transaction/transactable.rs index 19820492..4654a478 100644 --- a/automerge/src/transaction/transactable.rs +++ b/automerge/src/transaction/transactable.rs @@ -69,12 +69,12 @@ pub trait Transactable { fn del, P: Into>(&mut self, obj: O, prop: P) -> Result<(), AutomergeError>; - fn splice>( + fn splice, V: Iterator>( &mut self, obj: O, pos: usize, del: usize, - vals: Vec, + vals: V, ) -> Result<(), AutomergeError>; /// Like [`Self::splice`] but for text. @@ -85,10 +85,8 @@ pub trait Transactable { del: usize, text: &str, ) -> Result<(), AutomergeError> { - let mut vals = vec![]; - for c in text.to_owned().graphemes(true) { - vals.push(c.into()); - } + let text = text.to_owned(); + let vals = text.graphemes(true).map(|c| c.into()); self.splice(obj, pos, del, vals) } From 67251f4d53892b663c7cf98896633487642aa8ce Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 11 Mar 2022 12:24:02 +0000 Subject: [PATCH 173/730] Have splice take IntoIterator --- automerge/src/autocommit.rs | 2 +- automerge/src/transaction/manual_transaction.rs | 2 +- automerge/src/transaction/transactable.rs | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index a91ed1e2..c1cf6695 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -382,7 +382,7 @@ impl Transactable for AutoCommit { /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert /// the new elements - fn splice, V: Iterator>( + fn splice, V: IntoIterator>( &mut self, obj: O, pos: usize, diff --git a/automerge/src/transaction/manual_transaction.rs b/automerge/src/transaction/manual_transaction.rs index 36106261..2303bb34 100644 --- a/automerge/src/transaction/manual_transaction.rs +++ b/automerge/src/transaction/manual_transaction.rs @@ -158,7 +158,7 @@ impl<'a> Transactable for Transaction<'a> { /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert /// the new elements - fn splice, V: Iterator>( + fn splice, V: IntoIterator>( &mut self, obj: O, pos: usize, diff --git a/automerge/src/transaction/transactable.rs b/automerge/src/transaction/transactable.rs index 4654a478..68852180 100644 --- a/automerge/src/transaction/transactable.rs +++ b/automerge/src/transaction/transactable.rs @@ -69,7 +69,7 @@ pub trait Transactable { fn del, P: Into>(&mut self, obj: O, prop: P) -> Result<(), AutomergeError>; - fn splice, V: Iterator>( + fn splice, V: IntoIterator>( &mut self, obj: O, pos: usize, From ac18f7116f789df64784647edd2358992b82c8d1 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 11 Mar 2022 12:25:18 +0000 Subject: [PATCH 174/730] And fixup IntoIterator --- automerge/src/transaction/inner.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automerge/src/transaction/inner.rs b/automerge/src/transaction/inner.rs index 6b5533f1..a6a9852f 100644 --- a/automerge/src/transaction/inner.rs +++ b/automerge/src/transaction/inner.rs @@ -331,7 +331,7 @@ impl TransactionInner { obj: &ExId, mut pos: usize, del: usize, - vals: impl Iterator, + vals: impl IntoIterator, ) -> Result<(), AutomergeError> { let obj = doc.exid_to_obj(obj)?; for _ in 0..del { From a4e8d202661e00cc5f8d905e6299e89b398fa0cd Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 11 Mar 2022 12:25:34 +0000 Subject: [PATCH 175/730] Optimise getting number of ops when applying tx or changes --- automerge/src/autocommit.rs | 2 +- automerge/src/automerge.rs | 6 +++--- automerge/src/change.rs | 8 ++++---- automerge/src/transaction/inner.rs | 5 +++-- 4 files changed, 11 insertions(+), 10 deletions(-) diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index c1cf6695..75b5dbe4 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -124,7 +124,7 @@ impl AutoCommit { fn ensure_transaction_closed(&mut self) { if let Some(tx) = self.transaction.take() { self.update_history(export_change( - &tx, + tx, &self.doc.ops.m.actors, &self.doc.ops.m.props, )); diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index f593f7db..4db9df06 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -446,7 +446,7 @@ impl Automerge { /// Apply a single change to this document. fn apply_change(&mut self, change: Change) { let ops = self.import_ops(&change); - self.update_history(change); + self.update_history(change, ops.len()); for (obj, op) in ops { self.insert_op(&obj, op); } @@ -771,8 +771,8 @@ impl Automerge { .ok_or(AutomergeError::InvalidSeq(seq)) } - pub(crate) fn update_history(&mut self, change: Change) -> usize { - self.max_op = std::cmp::max(self.max_op, change.start_op + change.len() as u64 - 1); + pub(crate) fn update_history(&mut self, change: Change, num_ops: usize) -> usize { + self.max_op = std::cmp::max(self.max_op, change.start_op + num_ops as u64 - 1); self.update_deps(&change); diff --git a/automerge/src/change.rs b/automerge/src/change.rs index 6016f206..a454e277 100644 --- a/automerge/src/change.rs +++ b/automerge/src/change.rs @@ -490,7 +490,7 @@ fn export_op( } pub(crate) fn export_change( - change: &TransactionInner, + change: TransactionInner, actors: &IndexedCache, props: &IndexedCache, ) -> Change { @@ -499,15 +499,15 @@ pub(crate) fn export_change( seq: change.seq, start_op: change.start_op, time: change.time, - deps: change.deps.clone(), - message: change.message.clone(), + deps: change.deps, + message: change.message, hash: change.hash, operations: change .operations .iter() .map(|(obj, op)| export_op(op, obj, actors, props)) .collect(), - extra_bytes: change.extra_bytes.clone(), + extra_bytes: change.extra_bytes, } .into() } diff --git a/automerge/src/transaction/inner.rs b/automerge/src/transaction/inner.rs index a6a9852f..71374e7e 100644 --- a/automerge/src/transaction/inner.rs +++ b/automerge/src/transaction/inner.rs @@ -39,9 +39,10 @@ impl TransactionInner { self.time = t; } - let change = export_change(&self, &doc.ops.m.actors, &doc.ops.m.props); + let num_ops = self.operations.len(); + let change = export_change(self, &doc.ops.m.actors, &doc.ops.m.props); let hash = change.hash; - doc.update_history(change); + doc.update_history(change, num_ops); debug_assert_eq!(doc.get_heads(), vec![hash]); hash } From 9e1a063bc0031666498836ec33a379dfc3327923 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Mon, 14 Mar 2022 14:47:12 -0400 Subject: [PATCH 176/730] v20 - object replacement char --- automerge-wasm/test/test.ts | 9 +++++++++ automerge/src/automerge.rs | 2 ++ 2 files changed, 11 insertions(+) diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts index f2ea4369..4c9b08be 100644 --- a/automerge-wasm/test/test.ts +++ b/automerge-wasm/test/test.ts @@ -242,6 +242,15 @@ describe('Automerge', () => { doc.free() }) + it('should be able to insert objects into text', () => { + let doc = create() + let text = doc.set_object("/", "text", "Hello world"); + let obj = doc.insert_object(text, 6, { hello: "world" }); + assert.deepEqual(doc.text(text), "Hello \ufffcworld"); + assert.deepEqual(doc.value(text, 6), ["map", obj]); + assert.deepEqual(doc.value(obj, "hello"), ["str", "world"]); + }) + it('should be able save all or incrementally', () => { let doc = create() diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 4db9df06..8316f088 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -275,6 +275,8 @@ impl Automerge { for q in &query.ops { if let OpType::Set(ScalarValue::Str(s)) = &q.action { buffer.push_str(s); + } else { + buffer.push('\u{fffc}'); } } Ok(buffer) From f5e8b998caf7e7bd7f09351454595421e5c9e13b Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Wed, 23 Mar 2022 09:34:44 -0400 Subject: [PATCH 177/730] expose getChangeByHash in wasm --- automerge-wasm/index.d.ts | 1 + automerge-wasm/src/lib.rs | 11 +++++++++++ automerge-wasm/test/test.ts | 14 ++++++++++++++ 3 files changed, 26 insertions(+) diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts index 2897955e..ab43d968 100644 --- a/automerge-wasm/index.d.ts +++ b/automerge-wasm/index.d.ts @@ -121,6 +121,7 @@ export class Automerge { // low level change functions applyChanges(changes: Change[]): void; getChanges(have_deps: Heads): Change[]; + getChangeByHash(hash: Hash): Change | null; getChangesAdded(other: Automerge): Change[]; getHeads(): Heads; getLastLocalChange(): Change; diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 63b7e78a..14c631fb 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -389,6 +389,17 @@ impl Automerge { Ok(changes) } + #[wasm_bindgen(js_name = getChangeByHash)] + pub fn get_change_by_hash(&mut self, hash: JsValue) -> Result { + let hash = hash.into_serde().map_err(to_js_err)?; + let change = self.0.get_change_by_hash(&hash); + if let Some(c) = change { + Ok(Uint8Array::from(c.raw_bytes()).into()) + } else { + Ok(JsValue::null()) + } + } + #[wasm_bindgen(js_name = getChangesAdded)] pub fn get_changes_added(&mut self, other: &mut Automerge) -> Result { let changes = self.0.get_changes_added(&mut other.0); diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts index 4c9b08be..4c89e81d 100644 --- a/automerge-wasm/test/test.ts +++ b/automerge-wasm/test/test.ts @@ -376,6 +376,20 @@ describe('Automerge', () => { assert.deepEqual(doc.materialize("/list/0"), { foo: "bar"}) }) + it('should be able to fetch changes by hash', () => { + let doc1 = create("aaaa") + let doc2 = create("bbbb") + doc1.set("/","a","b") + doc2.set("/","b","c") + let head1 = doc1.getHeads() + let head2 = doc2.getHeads() + let change1 = doc1.getChangeByHash(head1[0]) + let change2 = doc1.getChangeByHash(head2[0]) + assert.deepEqual(change2, null) + if (change1 === null) { throw new RangeError("change1 should not be null") } + assert.deepEqual(decodeChange(change1).hash, head1[0]) + }) + it('recursive sets are possible', () => { let doc = create("aaaa") let l1 = doc.set_object("_root","list",[{ foo: "bar"}, [1,2,3]]) From ec3785ab2bfaae91bc346210a74ead69cece530e Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 24 Mar 2022 16:20:23 +0000 Subject: [PATCH 178/730] Expose encoding and decoding errors --- automerge-cli/src/import.rs | 12 ++++++------ automerge/src/error.rs | 22 +++++----------------- automerge/src/lib.rs | 3 +++ 3 files changed, 14 insertions(+), 23 deletions(-) diff --git a/automerge-cli/src/import.rs b/automerge-cli/src/import.rs index cd955762..9f9a3210 100644 --- a/automerge-cli/src/import.rs +++ b/automerge-cli/src/import.rs @@ -3,14 +3,14 @@ use automerge::transaction::Transactable; pub(crate) fn initialize_from_json( json_value: &serde_json::Value, -) -> Result { +) -> anyhow::Result { let mut doc = am::AutoCommit::new(); match json_value { serde_json::Value::Object(m) => { import_map(&mut doc, &am::ObjId::Root, m)?; Ok(doc) } - _ => Err(am::AutomergeError::Decoding), + _ => anyhow::bail!("expected an object"), } } @@ -18,7 +18,7 @@ fn import_map( doc: &mut am::AutoCommit, obj: &am::ObjId, map: &serde_json::Map, -) -> Result<(), am::AutomergeError> { +) -> anyhow::Result<()> { for (key, value) in map { match value { serde_json::Value::Null => { @@ -42,7 +42,7 @@ fn import_map( } else if let Some(m) = n.as_f64() { doc.set(obj, key, m)?; } else { - return Err(am::AutomergeError::Decoding); + anyhow::bail!("not a number"); } } serde_json::Value::Object(map) => { @@ -58,7 +58,7 @@ fn import_list( doc: &mut am::AutoCommit, obj: &am::ObjId, list: &[serde_json::Value], -) -> Result<(), am::AutomergeError> { +) -> anyhow::Result<()> { for (i, value) in list.iter().enumerate() { match value { serde_json::Value::Null => { @@ -82,7 +82,7 @@ fn import_list( } else if let Some(m) = n.as_f64() { doc.insert(obj, i, m)?; } else { - return Err(am::AutomergeError::Decoding); + anyhow::bail!("not a number"); } } serde_json::Value::Object(map) => { diff --git a/automerge/src/error.rs b/automerge/src/error.rs index 5b529fa5..aaf9b61d 100644 --- a/automerge/src/error.rs +++ b/automerge/src/error.rs @@ -1,6 +1,6 @@ -use crate::decoding; use crate::types::{ActorId, ScalarValue}; use crate::value::DataType; +use crate::{decoding, encoding}; use thiserror::Error; #[derive(Error, Debug)] @@ -9,10 +9,10 @@ pub enum AutomergeError { InvalidOpId(String), #[error("obj id not from this document `{0}`")] ForeignObjId(String), - #[error("there was an ecoding problem")] - Encoding, - #[error("there was a decoding problem")] - Decoding, + #[error("there was an encoding problem: {0}")] + Encoding(#[from] encoding::Error), + #[error("there was a decoding problem: {0}")] + Decoding(#[from] decoding::Error), #[error("key must not be an empty string")] EmptyStringKey, #[error("invalid seq {0}")] @@ -25,18 +25,6 @@ pub enum AutomergeError { Fail, } -impl From for AutomergeError { - fn from(_: std::io::Error) -> Self { - AutomergeError::Encoding - } -} - -impl From for AutomergeError { - fn from(_: decoding::Error) -> Self { - AutomergeError::Decoding - } -} - #[cfg(feature = "wasm")] impl From for wasm_bindgen::JsValue { fn from(err: AutomergeError) -> Self { diff --git a/automerge/src/lib.rs b/automerge/src/lib.rs index a98ef14e..ed91adbc 100644 --- a/automerge/src/lib.rs +++ b/automerge/src/lib.rs @@ -50,6 +50,9 @@ mod visualisation; pub use crate::automerge::Automerge; pub use autocommit::AutoCommit; pub use change::Change; +pub use decoding::Error as DecodingError; +pub use decoding::InvalidChangeError; +pub use encoding::Error as EncodingError; pub use error::AutomergeError; pub use exid::ExId as ObjId; pub use keys::Keys; From ed244d980a9892b7494d296fcde1b93ffa98287d Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 24 Mar 2022 16:42:46 +0000 Subject: [PATCH 179/730] Make start_op be nonzero to prevent bad loads --- automerge-cli/src/examine.rs | 2 +- automerge/src/autocommit.rs | 79 +++--------------------------- automerge/src/automerge.rs | 35 ++++++++++--- automerge/src/change.rs | 10 ++-- automerge/src/columnar.rs | 2 +- automerge/src/decoding.rs | 10 ++++ automerge/src/encoding.rs | 9 +++- automerge/src/legacy/mod.rs | 6 ++- automerge/src/transaction/inner.rs | 9 +++- 9 files changed, 72 insertions(+), 90 deletions(-) diff --git a/automerge-cli/src/examine.rs b/automerge-cli/src/examine.rs index 7f15a625..010fa0f1 100644 --- a/automerge-cli/src/examine.rs +++ b/automerge-cli/src/examine.rs @@ -35,7 +35,7 @@ pub fn examine( if is_tty { let json_changes = serde_json::to_value(uncompressed_changes).unwrap(); colored_json::write_colored_json(&json_changes, &mut output).unwrap(); - writeln!(&mut output).unwrap(); + writeln!(output).unwrap(); } else { let json_changes = serde_json::to_string_pretty(&uncompressed_changes).unwrap(); output diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index 75b5dbe4..ebe65409 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -1,10 +1,10 @@ use crate::exid::ExId; use crate::transaction::{CommitOptions, Transactable}; -use crate::{ - change::export_change, transaction::TransactionInner, ActorId, Automerge, AutomergeError, - Change, ChangeHash, Prop, Value, -}; use crate::{sync, Keys, KeysAt, ObjType, ScalarValue}; +use crate::{ + transaction::TransactionInner, ActorId, Automerge, AutomergeError, Change, ChangeHash, Prop, + Value, +}; /// An automerge document that automatically manages transactions. #[derive(Debug, Clone)] @@ -52,67 +52,10 @@ impl AutoCommit { fn ensure_transaction_open(&mut self) { if self.transaction.is_none() { - let actor = self.doc.get_actor_index(); - - let seq = self.doc.states.entry(actor).or_default().len() as u64 + 1; - let mut deps = self.doc.get_heads(); - if seq > 1 { - let last_hash = self.get_hash(actor, seq - 1).unwrap(); - if !deps.contains(&last_hash) { - deps.push(last_hash); - } - } - - self.transaction = Some(TransactionInner { - actor, - seq, - start_op: self.doc.max_op + 1, - time: 0, - message: None, - extra_bytes: Default::default(), - hash: None, - operations: vec![], - deps, - }); + self.transaction = Some(self.doc.transaction_inner()); } } - fn get_hash(&mut self, actor: usize, seq: u64) -> Result { - self.doc - .states - .get(&actor) - .and_then(|v| v.get(seq as usize - 1)) - .and_then(|&i| self.doc.history.get(i)) - .map(|c| c.hash) - .ok_or(AutomergeError::InvalidSeq(seq)) - } - - fn update_history(&mut self, change: Change) -> usize { - self.doc.max_op = std::cmp::max(self.doc.max_op, change.start_op + change.len() as u64 - 1); - - self.update_deps(&change); - - let history_index = self.doc.history.len(); - - self.doc - .states - .entry(self.doc.ops.m.actors.cache(change.actor_id().clone())) - .or_default() - .push(history_index); - - self.doc.history_index.insert(change.hash, history_index); - self.doc.history.push(change); - - history_index - } - - fn update_deps(&mut self, change: &Change) { - for d in &change.deps { - self.doc.deps.remove(d); - } - self.doc.deps.insert(change.hash); - } - pub fn fork(&mut self) -> Self { self.ensure_transaction_closed(); Self { @@ -123,11 +66,7 @@ impl AutoCommit { fn ensure_transaction_closed(&mut self) { if let Some(tx) = self.transaction.take() { - self.update_history(export_change( - tx, - &self.doc.ops.m.actors, - &self.doc.ops.m.props, - )); + tx.commit(&mut self.doc, None, None); } } @@ -229,10 +168,7 @@ impl AutoCommit { } pub fn commit(&mut self) -> ChangeHash { - // ensure that even no changes triggers a change - self.ensure_transaction_open(); - let tx = self.transaction.take().unwrap(); - tx.commit(&mut self.doc, None, None) + self.commit_with(CommitOptions::default()) } /// Commit the current operations with some options. @@ -251,6 +187,7 @@ impl AutoCommit { /// doc.commit_with(CommitOptions::default().with_message("Create todos list").with_time(now)); /// ``` pub fn commit_with(&mut self, options: CommitOptions) -> ChangeHash { + // ensure that even no changes triggers a change self.ensure_transaction_open(); let tx = self.transaction.take().unwrap(); tx.commit(&mut self.doc, options.message, options.time) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 8316f088..d9d9cc90 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -1,4 +1,5 @@ use std::collections::{HashMap, HashSet, VecDeque}; +use std::num::NonZeroU64; use crate::change::encode_document; use crate::exid::ExId; @@ -87,6 +88,13 @@ impl Automerge { /// Start a transaction. pub fn transaction(&mut self) -> Transaction { + Transaction { + inner: Some(self.transaction_inner()), + doc: self, + } + } + + pub(crate) fn transaction_inner(&mut self) -> TransactionInner { let actor = self.get_actor_index(); let seq = self.states.get(&actor).map_or(0, |v| v.len()) as u64 + 1; let mut deps = self.get_heads(); @@ -97,20 +105,17 @@ impl Automerge { } } - let tx_inner = TransactionInner { + TransactionInner { actor, seq, - start_op: self.max_op + 1, + // SAFETY: this unwrap is safe as we always add 1 + start_op: NonZeroU64::new(self.max_op + 1).unwrap(), time: 0, message: None, extra_bytes: Default::default(), hash: None, operations: vec![], deps, - }; - Transaction { - inner: Some(tx_inner), - doc: self, } } @@ -478,7 +483,7 @@ impl Automerge { .enumerate() .map(|(i, c)| { let actor = self.ops.m.actors.cache(change.actor_id().clone()); - let id = OpId(change.start_op + i as u64, actor); + let id = OpId(change.start_op.get() + i as u64, actor); let obj = match c.obj { legacy::ObjectId::Root => ObjId::root(), legacy::ObjectId::Id(id) => ObjId(OpId(id.0, self.ops.m.actors.cache(id.1))), @@ -774,7 +779,7 @@ impl Automerge { } pub(crate) fn update_history(&mut self, change: Change, num_ops: usize) -> usize { - self.max_op = std::cmp::max(self.max_op, change.start_op + num_ops as u64 - 1); + self.max_op = std::cmp::max(self.max_op, change.start_op.get() + num_ops as u64 - 1); self.update_deps(&change); @@ -1334,4 +1339,18 @@ mod tests { let doc = Automerge::load(&bytes).unwrap(); assert_eq!(doc.get_change_by_hash(&hash).unwrap().hash, hash); } + + #[test] + fn load_change_with_zero_start_op() { + let bytes = &[ + 133, 111, 74, 131, 202, 50, 52, 158, 2, 96, 163, 163, 83, 255, 255, 255, 50, 50, 50, + 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 255, 255, 245, 53, 1, 0, 0, 0, 0, 0, 0, 4, + 233, 245, 239, 255, 1, 0, 0, 0, 133, 111, 74, 131, 163, 96, 0, 0, 2, 10, 202, 144, 125, + 19, 48, 89, 133, 49, 10, 10, 67, 91, 111, 10, 74, 131, 96, 0, 163, 131, 255, 255, 255, + 255, 255, 255, 255, 255, 255, 1, 153, 0, 0, 246, 255, 255, 255, 157, 157, 157, 157, + 157, 157, 157, 157, 157, 157, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, + 255, 255, 255, 255, 255, 255, 255, 48, 254, 208, + ]; + let _ = Automerge::load(bytes); + } } diff --git a/automerge/src/change.rs b/automerge/src/change.rs index a454e277..24baf8ef 100644 --- a/automerge/src/change.rs +++ b/automerge/src/change.rs @@ -23,6 +23,7 @@ use std::collections::{HashMap, HashSet}; use std::convert::TryInto; use std::fmt::Debug; use std::io::{Read, Write}; +use std::num::NonZeroU64; use tracing::instrument; const MAGIC_BYTES: [u8; 4] = [0x85, 0x6f, 0x4a, 0x83]; @@ -319,8 +320,8 @@ pub struct Change { pub hash: amp::ChangeHash, /// The index of this change in the changes from this actor. pub seq: u64, - /// The start operation index. - pub start_op: u64, + /// The start operation index. Starts at 1. + pub start_op: NonZeroU64, /// The time that this change was committed. pub time: i64, /// The message of this change. @@ -357,7 +358,7 @@ impl Change { } pub fn max_op(&self) -> u64 { - self.start_op + (self.len() as u64) - 1 + self.start_op.get() + (self.len() as u64) - 1 } pub fn message(&self) -> Option { @@ -928,7 +929,8 @@ fn doc_changes_to_uncompressed_changes<'a>( actor_id: actors[change.actor].clone(), seq: change.seq, time: change.time, - start_op: change.max_op - change.ops.len() as u64 + 1, + // SAFETY: this unwrap is safe as we always add 1 + start_op: NonZeroU64::new(change.max_op - change.ops.len() as u64 + 1).unwrap(), hash: None, message: change.message, operations: change diff --git a/automerge/src/columnar.rs b/automerge/src/columnar.rs index 30429d64..7cb38872 100644 --- a/automerge/src/columnar.rs +++ b/automerge/src/columnar.rs @@ -946,7 +946,7 @@ impl ChangeEncoder { self.seq.append_value(change.seq); // FIXME iterops.count is crazy slow self.max_op - .append_value(change.start_op + change.iter_ops().count() as u64 - 1); + .append_value(change.start_op.get() + change.iter_ops().count() as u64 - 1); self.time.append_value(change.time as u64); self.message.append_value(change.message()); self.deps_num.append_value(change.deps.len()); diff --git a/automerge/src/decoding.rs b/automerge/src/decoding.rs index a7f2a357..739e856d 100644 --- a/automerge/src/decoding.rs +++ b/automerge/src/decoding.rs @@ -1,4 +1,5 @@ use core::fmt::Debug; +use std::num::NonZeroU64; use std::{borrow::Cow, io, io::Read, str}; use crate::error; @@ -353,6 +354,15 @@ impl Decodable for u64 { } } +impl Decodable for NonZeroU64 { + fn decode(bytes: &mut R) -> Option + where + R: Read, + { + NonZeroU64::new(leb128::read::unsigned(bytes).ok()?) + } +} + impl Decodable for Vec { fn decode(bytes: &mut R) -> Option where diff --git a/automerge/src/encoding.rs b/automerge/src/encoding.rs index 6c8e461c..c5aa6fa2 100644 --- a/automerge/src/encoding.rs +++ b/automerge/src/encoding.rs @@ -3,6 +3,7 @@ use std::{ io, io::{Read, Write}, mem, + num::NonZeroU64, }; use flate2::{bufread::DeflateEncoder, Compression}; @@ -240,7 +241,7 @@ where } pub(crate) trait Encodable { - fn encode_with_actors_to_vec(&self, actors: &mut Vec) -> io::Result> { + fn encode_with_actors_to_vec(&self, actors: &mut [ActorId]) -> io::Result> { let mut buf = Vec::new(); self.encode_with_actors(&mut buf, actors)?; Ok(buf) @@ -291,6 +292,12 @@ impl Encodable for u64 { } } +impl Encodable for NonZeroU64 { + fn encode(&self, buf: &mut R) -> io::Result { + leb128::write::unsigned(buf, self.get()) + } +} + impl Encodable for f64 { fn encode(&self, buf: &mut R) -> io::Result { let bytes = self.to_le_bytes(); diff --git a/automerge/src/legacy/mod.rs b/automerge/src/legacy/mod.rs index afa93bc2..835c6597 100644 --- a/automerge/src/legacy/mod.rs +++ b/automerge/src/legacy/mod.rs @@ -1,6 +1,8 @@ mod serde_impls; mod utility_impls; +use std::num::NonZeroU64; + pub(crate) use crate::types::{ActorId, ChangeHash, ObjType, OpType, ScalarValue}; pub(crate) use crate::value::DataType; @@ -246,9 +248,9 @@ pub struct Change { pub hash: Option, /// The index of this change in the changes from this actor. pub seq: u64, - /// The start operation index. + /// The start operation index. Starts at 1. #[serde(rename = "startOp")] - pub start_op: u64, + pub start_op: NonZeroU64, /// The time that this change was committed. pub time: i64, /// The message of this change. diff --git a/automerge/src/transaction/inner.rs b/automerge/src/transaction/inner.rs index 71374e7e..c471c057 100644 --- a/automerge/src/transaction/inner.rs +++ b/automerge/src/transaction/inner.rs @@ -1,3 +1,5 @@ +use std::num::NonZeroU64; + use crate::automerge::Actor; use crate::exid::ExId; use crate::query::{self, OpIdSearch}; @@ -9,7 +11,7 @@ use crate::{AutomergeError, ObjType, OpType, ScalarValue}; pub struct TransactionInner { pub(crate) actor: usize, pub(crate) seq: u64, - pub(crate) start_op: u64, + pub(crate) start_op: NonZeroU64, pub(crate) time: i64, pub(crate) message: Option, pub(crate) extra_bytes: Vec, @@ -123,7 +125,10 @@ impl TransactionInner { } fn next_id(&mut self) -> OpId { - OpId(self.start_op + self.operations.len() as u64, self.actor) + OpId( + self.start_op.get() + self.operations.len() as u64, + self.actor, + ) } fn insert_local_op( From 666782896d51e1f6491688db1d6de0035cd1a784 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Sat, 26 Mar 2022 09:11:41 +0000 Subject: [PATCH 180/730] Remove unnecessary consts in queries --- automerge/src/query/insert.rs | 12 +++++++++--- automerge/src/query/len.rs | 6 +++--- automerge/src/query/len_at.rs | 6 +++--- automerge/src/query/nth.rs | 6 +++--- automerge/src/query/nth_at.rs | 6 +++--- automerge/src/query/seek_op.rs | 10 +++++++--- 6 files changed, 28 insertions(+), 18 deletions(-) diff --git a/automerge/src/query/insert.rs b/automerge/src/query/insert.rs index 80d48762..1023b07b 100644 --- a/automerge/src/query/insert.rs +++ b/automerge/src/query/insert.rs @@ -5,10 +5,13 @@ use crate::types::{ElemId, Key, Op, HEAD}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] -pub(crate) struct InsertNth { +pub(crate) struct InsertNth { + /// the index in the realised list that we want to insert at target: usize, + /// the number of visible operations seen seen: usize, //pub pos: usize, + /// the number of operations (including non-visible) that we have seen n: usize, valid: Option, last_seen: Option, @@ -16,7 +19,7 @@ pub(crate) struct InsertNth { last_valid_insert: Option, } -impl InsertNth { +impl InsertNth { pub fn new(target: usize) -> Self { let (valid, last_valid_insert) = if target == 0 { (Some(0), Some(HEAD)) @@ -56,16 +59,19 @@ impl InsertNth { } } -impl TreeQuery for InsertNth { +impl TreeQuery for InsertNth { fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { let mut num_vis = child.index.len; + // if this node has some visible elements then we may find our target within if num_vis > 0 { if child.index.has(&self.last_seen) { num_vis -= 1; } if self.seen + num_vis >= self.target { + // our target is within this node QueryResult::Descend } else { + // our target is not in this node so try the next one self.n += child.len(); self.seen += num_vis; self.last_seen = child.last().elemid(); diff --git a/automerge/src/query/len.rs b/automerge/src/query/len.rs index f92b8096..34bf408c 100644 --- a/automerge/src/query/len.rs +++ b/automerge/src/query/len.rs @@ -3,17 +3,17 @@ use crate::query::{QueryResult, TreeQuery}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] -pub(crate) struct Len { +pub(crate) struct Len { pub len: usize, } -impl Len { +impl Len { pub fn new() -> Self { Len { len: 0 } } } -impl TreeQuery for Len { +impl TreeQuery for Len { fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { self.len = child.index.len; QueryResult::Finish diff --git a/automerge/src/query/len_at.rs b/automerge/src/query/len_at.rs index 03187db1..2f277f3e 100644 --- a/automerge/src/query/len_at.rs +++ b/automerge/src/query/len_at.rs @@ -3,7 +3,7 @@ use crate::types::{Clock, ElemId, Op}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] -pub(crate) struct LenAt { +pub(crate) struct LenAt { pub len: usize, clock: Clock, pos: usize, @@ -11,7 +11,7 @@ pub(crate) struct LenAt { window: VisWindow, } -impl LenAt { +impl LenAt { pub fn new(clock: Clock) -> Self { LenAt { clock, @@ -23,7 +23,7 @@ impl LenAt { } } -impl TreeQuery for LenAt { +impl TreeQuery for LenAt { fn query_element(&mut self, op: &Op) -> QueryResult { if op.insert { self.last = None; diff --git a/automerge/src/query/nth.rs b/automerge/src/query/nth.rs index b62410e4..7d59ac4e 100644 --- a/automerge/src/query/nth.rs +++ b/automerge/src/query/nth.rs @@ -5,7 +5,7 @@ use crate::types::{ElemId, Key, Op}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] -pub(crate) struct Nth { +pub(crate) struct Nth { target: usize, seen: usize, last_seen: Option, @@ -15,7 +15,7 @@ pub(crate) struct Nth { pub pos: usize, } -impl Nth { +impl Nth { pub fn new(target: usize) -> Self { Nth { target, @@ -37,7 +37,7 @@ impl Nth { } } -impl TreeQuery for Nth { +impl TreeQuery for Nth { fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { let mut num_vis = child.index.len; if num_vis > 0 { diff --git a/automerge/src/query/nth_at.rs b/automerge/src/query/nth_at.rs index 3d90e590..ff5d1263 100644 --- a/automerge/src/query/nth_at.rs +++ b/automerge/src/query/nth_at.rs @@ -3,7 +3,7 @@ use crate::types::{Clock, ElemId, Op}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] -pub(crate) struct NthAt { +pub(crate) struct NthAt { clock: Clock, target: usize, seen: usize, @@ -15,7 +15,7 @@ pub(crate) struct NthAt { pub pos: usize, } -impl NthAt { +impl NthAt { pub fn new(target: usize, clock: Clock) -> Self { NthAt { clock, @@ -31,7 +31,7 @@ impl NthAt { } } -impl TreeQuery for NthAt { +impl TreeQuery for NthAt { fn query_element(&mut self, element: &Op) -> QueryResult { if element.insert { if self.seen > self.target { diff --git a/automerge/src/query/seek_op.rs b/automerge/src/query/seek_op.rs index a2f3b750..c782b544 100644 --- a/automerge/src/query/seek_op.rs +++ b/automerge/src/query/seek_op.rs @@ -5,14 +5,18 @@ use std::cmp::Ordering; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] -pub(crate) struct SeekOp { +pub(crate) struct SeekOp { + /// the op we are looking for op: Op, + /// The position to insert at pub pos: usize, + /// The indices of ops that this op overwrites pub succ: Vec, + /// whether a position has been found found: bool, } -impl SeekOp { +impl SeekOp { pub fn new(op: &Op) -> Self { SeekOp { op: op.clone(), @@ -42,7 +46,7 @@ impl SeekOp { } } -impl TreeQuery for SeekOp { +impl TreeQuery for SeekOp { fn query_node_with_metadata( &mut self, child: &OpTreeNode, From 83d298ce8d633cc6ab3eed8bbe0e8a0dba53547f Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Sun, 27 Mar 2022 11:55:37 +0100 Subject: [PATCH 181/730] Add test for broken last_elem --- automerge/src/automerge.rs | 41 ++++++++++++++++++++++++++++++++++++++ automerge/src/types.rs | 17 ++++++++++++++++ 2 files changed, 58 insertions(+) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index d9d9cc90..e64ee49f 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -902,6 +902,7 @@ mod tests { use pretty_assertions::assert_eq; use super::*; + use crate::op_set::B; use crate::transaction::Transactable; use crate::*; use std::convert::TryInto; @@ -1353,4 +1354,44 @@ mod tests { ]; let _ = Automerge::load(bytes); } + + #[test] + fn compute_list_indexes_correctly_when_list_element_is_split_across_tree_nodes() { + let max = B as u64 * 2; + let actor1 = ActorId::from(b"aaaa"); + let mut doc1 = AutoCommit::new().with_actor(actor1.clone()); + let actor2 = ActorId::from(b"bbbb"); + let mut doc2 = AutoCommit::new().with_actor(actor2.clone()); + let list = doc1.set_object(ROOT, "list", ObjType::List).unwrap(); + doc1.insert(&list, 0, 0).unwrap(); + doc2.load_incremental(&doc1.save_incremental()).unwrap(); + for i in 1..=max { + doc1.set(&list, 0, i).unwrap() + } + for i in 1..=max { + doc2.set(&list, 0, i).unwrap() + } + let change1 = doc1.save_incremental(); + let change2 = doc2.save_incremental(); + doc2.load_incremental(&change1).unwrap(); + doc1.load_incremental(&change2).unwrap(); + assert_eq!(doc1.length(&list), 1); + assert_eq!(doc2.length(&list), 1); + assert_eq!( + doc1.values(&list, 0).unwrap(), + vec![ + (max.into(), ExId::Id(max + 2, actor1.clone(), 0)), + (max.into(), ExId::Id(max + 2, actor2.clone(), 1)) + ] + ); + assert_eq!( + doc2.values(&list, 0).unwrap(), + vec![ + (max.into(), ExId::Id(max + 2, actor1, 0)), + (max.into(), ExId::Id(max + 2, actor2, 1)) + ] + ); + assert!(doc1.value(&list, 1).unwrap().is_none()); + assert!(doc2.value(&list, 1).unwrap().is_none()); + } } diff --git a/automerge/src/types.rs b/automerge/src/types.rs index 1f52731d..557594c8 100644 --- a/automerge/src/types.rs +++ b/automerge/src/types.rs @@ -89,6 +89,23 @@ impl From> for ActorId { } } +impl From<[u8; N]> for ActorId { + fn from(array: [u8; N]) -> Self { + ActorId::from(&array) + } +} + +impl From<&[u8; N]> for ActorId { + fn from(slice: &[u8; N]) -> Self { + let inner = if let Ok(arr) = ArrayVec::try_from(slice.as_slice()) { + TinyVec::Inline(arr) + } else { + TinyVec::Heap(slice.to_vec()) + }; + ActorId(inner) + } +} + impl FromStr for ActorId { type Err = error::InvalidActorId; From 20229ee2d080aa5090e4b9b21e3135155fdb67bf Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Sun, 27 Mar 2022 12:26:56 +0100 Subject: [PATCH 182/730] Remove last_elem in nth query --- automerge/src/query/nth.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/automerge/src/query/nth.rs b/automerge/src/query/nth.rs index b62410e4..144c3ae0 100644 --- a/automerge/src/query/nth.rs +++ b/automerge/src/query/nth.rs @@ -9,7 +9,6 @@ pub(crate) struct Nth { target: usize, seen: usize, last_seen: Option, - last_elem: Option, pub ops: Vec, pub ops_pos: Vec, pub pos: usize, @@ -24,12 +23,13 @@ impl Nth { ops: vec![], ops_pos: vec![], pos: 0, - last_elem: None, } } + /// Get the key pub fn key(&self) -> Result { - if let Some(e) = self.last_elem { + // the query collects the ops so we can use that to get the key they all use + if let Some(e) = self.ops.first().and_then(|op| op.elemid()) { Ok(Key::Seq(e)) } else { Err(AutomergeError::InvalidIndex(self.target)) @@ -56,6 +56,7 @@ impl TreeQuery for Nth { QueryResult::Next } } else { + // skip this node as no useful ops in it self.pos += child.len(); QueryResult::Next } @@ -66,7 +67,6 @@ impl TreeQuery for Nth { if self.seen > self.target { return QueryResult::Finish; }; - self.last_elem = element.elemid(); self.last_seen = None } let visible = element.visible(); From e945ebbe746fdf23b1e701013962c4988ad0d787 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Sun, 27 Mar 2022 15:35:44 +0100 Subject: [PATCH 183/730] Remove last_elem from nth_at --- automerge/src/query/nth_at.rs | 3 --- 1 file changed, 3 deletions(-) diff --git a/automerge/src/query/nth_at.rs b/automerge/src/query/nth_at.rs index 3d90e590..c886d824 100644 --- a/automerge/src/query/nth_at.rs +++ b/automerge/src/query/nth_at.rs @@ -8,7 +8,6 @@ pub(crate) struct NthAt { target: usize, seen: usize, last_seen: Option, - last_elem: Option, window: VisWindow, pub ops: Vec, pub ops_pos: Vec, @@ -25,7 +24,6 @@ impl NthAt { ops: vec![], ops_pos: vec![], pos: 0, - last_elem: None, window: Default::default(), } } @@ -37,7 +35,6 @@ impl TreeQuery for NthAt { if self.seen > self.target { return QueryResult::Finish; }; - self.last_elem = element.elemid(); self.last_seen = None } let visible = self.window.visible_at(element, self.pos, &self.clock); From a30bdc38881e2501a2770c318e61c063ffd73a59 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Sat, 26 Mar 2022 10:02:54 +0000 Subject: [PATCH 184/730] Add broken list tests --- automerge/src/automerge.rs | 146 ++++++++++++++++++++++++++++++++- automerge/src/indexed_cache.rs | 9 ++ automerge/src/op_set.rs | 6 +- automerge/src/query.rs | 1 + automerge/src/query/seek_op.rs | 5 +- 5 files changed, 161 insertions(+), 6 deletions(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index d9d9cc90..7b01decb 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -15,7 +15,7 @@ use crate::{legacy, query, types, ObjType}; use crate::{AutomergeError, Change, Prop}; use serde::Serialize; -#[derive(Debug, Clone)] +#[derive(Debug, Clone, PartialEq)] pub(crate) enum Actor { Unused(ActorId), Cached(usize), @@ -1353,4 +1353,148 @@ mod tests { ]; let _ = Automerge::load(bytes); } + + #[test] + fn load_broken_list() { + enum Action { + InsertText(usize, char), + DelText(usize), + } + use Action::*; + let actions = [ + InsertText(0, 'a'), + InsertText(0, 'b'), + DelText(1), + InsertText(0, 'c'), + DelText(1), + DelText(0), + InsertText(0, 'd'), + InsertText(0, 'e'), + InsertText(1, 'f'), + DelText(2), + DelText(1), + InsertText(0, 'g'), + DelText(1), + DelText(0), + InsertText(0, 'h'), + InsertText(1, 'i'), + DelText(1), + DelText(0), + InsertText(0, 'j'), + InsertText(0, 'k'), + DelText(1), + DelText(0), + InsertText(0, 'l'), + DelText(0), + InsertText(0, 'm'), + InsertText(0, 'n'), + DelText(1), + DelText(0), + InsertText(0, 'o'), + DelText(0), + InsertText(0, 'p'), + InsertText(1, 'q'), + InsertText(1, 'r'), + InsertText(1, 's'), + InsertText(3, 't'), + InsertText(5, 'u'), + InsertText(0, 'v'), + InsertText(3, 'w'), + InsertText(4, 'x'), + InsertText(0, 'y'), + InsertText(6, 'z'), + InsertText(11, '1'), + InsertText(0, '2'), + InsertText(0, '3'), + InsertText(0, '4'), + InsertText(13, '5'), + InsertText(11, '6'), + InsertText(17, '7'), + ]; + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + let list = tx.set_object(ROOT, "list", ObjType::List).unwrap(); + for action in actions { + match action { + Action::InsertText(index, c) => { + println!("inserting {} at {}", c, index); + tx.insert(&list, index, c).unwrap(); + } + Action::DelText(index) => { + println!("deleting at {} ", index); + tx.del(&list, index).unwrap(); + } + } + } + tx.commit(); + let bytes = doc.save(); + println!("doc2 time"); + let mut doc2 = Automerge::load(&bytes).unwrap(); + let bytes2 = doc2.save(); + assert_eq!(doc.text(&list).unwrap(), doc2.text(&list).unwrap()); + + assert_eq!(doc.queue, doc2.queue); + assert_eq!(doc.history, doc2.history); + assert_eq!(doc.history_index, doc2.history_index); + assert_eq!(doc.states, doc2.states); + assert_eq!(doc.deps, doc2.deps); + assert_eq!(doc.saved, doc2.saved); + assert_eq!(doc.ops, doc2.ops); + assert_eq!(doc.max_op, doc2.max_op); + + assert_eq!(bytes, bytes2); + } + + #[test] + fn load_broken_list_short() { + // breaks when the B constant in OpSet is 3 + enum Action { + InsertText(usize, char), + DelText(usize), + } + use Action::*; + let actions = [ + InsertText(0, 'a'), + InsertText(1, 'b'), + DelText(1), + InsertText(1, 'c'), + InsertText(2, 'd'), + InsertText(2, 'e'), + InsertText(0, 'f'), + DelText(4), + InsertText(4, 'g'), + ]; + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + let list = tx.set_object(ROOT, "list", ObjType::List).unwrap(); + for action in actions { + match action { + Action::InsertText(index, c) => { + println!("inserting {} at {}", c, index); + tx.insert(&list, index, c).unwrap(); + } + Action::DelText(index) => { + println!("deleting at {} ", index); + tx.del(&list, index).unwrap(); + } + } + } + tx.commit(); + let bytes = doc.save(); + println!("doc2 time"); + let mut doc2 = Automerge::load(&bytes).unwrap(); + let bytes2 = doc2.save(); + assert_eq!(doc.text(&list).unwrap(), doc2.text(&list).unwrap()); + + assert_eq!(doc.queue, doc2.queue); + assert_eq!(doc.history, doc2.history); + assert_eq!(doc.history_index, doc2.history_index); + assert_eq!(doc.states, doc2.states); + assert_eq!(doc.deps, doc2.deps); + assert_eq!(doc.saved, doc2.saved); + assert_eq!(doc.ops, doc2.ops); + assert_eq!(doc.max_op, doc2.max_op); + + assert_eq!(bytes, bytes2); + } } diff --git a/automerge/src/indexed_cache.rs b/automerge/src/indexed_cache.rs index 2b5811ba..6d760637 100644 --- a/automerge/src/indexed_cache.rs +++ b/automerge/src/indexed_cache.rs @@ -9,6 +9,15 @@ pub(crate) struct IndexedCache { lookup: HashMap, } +impl PartialEq for IndexedCache +where + T: PartialEq, +{ + fn eq(&self, other: &Self) -> bool { + self.cache == other.cache + } +} + impl IndexedCache where T: Clone + Eq + Hash + Ord, diff --git a/automerge/src/op_set.rs b/automerge/src/op_set.rs index bd84911d..9b77bcf0 100644 --- a/automerge/src/op_set.rs +++ b/automerge/src/op_set.rs @@ -8,10 +8,10 @@ use fxhash::FxBuildHasher; use std::cmp::Ordering; use std::collections::HashMap; -pub(crate) const B: usize = 16; +pub(crate) const B: usize = 3; pub(crate) type OpSet = OpSetInternal; -#[derive(Debug, Clone)] +#[derive(Debug, Clone, PartialEq)] pub(crate) struct OpSetInternal { trees: HashMap), FxBuildHasher>, length: usize, @@ -170,7 +170,7 @@ impl<'a, const B: usize> Iterator for Iter<'a, B> { } } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, PartialEq)] pub(crate) struct OpSetMetadata { pub actors: IndexedCache, pub props: IndexedCache, diff --git a/automerge/src/query.rs b/automerge/src/query.rs index 4d6c47c9..3d85193c 100644 --- a/automerge/src/query.rs +++ b/automerge/src/query.rs @@ -76,6 +76,7 @@ pub(crate) enum QueryResult { pub(crate) struct Index { pub len: usize, pub visible: HashMap, + /// Set of opids found in this node and below. pub ops: HashSet, } diff --git a/automerge/src/query/seek_op.rs b/automerge/src/query/seek_op.rs index c782b544..e584bea6 100644 --- a/automerge/src/query/seek_op.rs +++ b/automerge/src/query/seek_op.rs @@ -56,7 +56,7 @@ impl TreeQuery for SeekOp { return QueryResult::Descend; } match self.op.key { - Key::Seq(e) if e == HEAD => { + Key::Seq(HEAD) => { while self.pos < child.len() { let op = child.get(self.pos).unwrap(); if self.op.overwrites(op) { @@ -70,7 +70,7 @@ impl TreeQuery for SeekOp { QueryResult::Finish } Key::Seq(e) => { - if self.found || child.index.ops.contains(&e.0) { + if child.index.ops.contains(&e.0) { QueryResult::Descend } else { self.pos += child.len(); @@ -108,6 +108,7 @@ impl TreeQuery for SeekOp { self.pos += 1; QueryResult::Next } else { + // we have already found the target if self.op.overwrites(e) { self.succ.push(self.pos); } From 49c4bf4911a331059fffe49b0d12f64717919d0d Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Sun, 27 Mar 2022 21:10:08 +0100 Subject: [PATCH 185/730] Rename has to has_visible --- automerge/src/query.rs | 2 +- automerge/src/query/insert.rs | 2 +- automerge/src/query/nth.rs | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/automerge/src/query.rs b/automerge/src/query.rs index 3d85193c..91a582c6 100644 --- a/automerge/src/query.rs +++ b/automerge/src/query.rs @@ -89,7 +89,7 @@ impl Index { } } - pub fn has(&self, e: &Option) -> bool { + pub fn has_visible(&self, e: &Option) -> bool { if let Some(seen) = e { self.visible.contains_key(seen) } else { diff --git a/automerge/src/query/insert.rs b/automerge/src/query/insert.rs index 1023b07b..9931573d 100644 --- a/automerge/src/query/insert.rs +++ b/automerge/src/query/insert.rs @@ -64,7 +64,7 @@ impl TreeQuery for InsertNth { let mut num_vis = child.index.len; // if this node has some visible elements then we may find our target within if num_vis > 0 { - if child.index.has(&self.last_seen) { + if child.index.has_visible(&self.last_seen) { num_vis -= 1; } if self.seen + num_vis >= self.target { diff --git a/automerge/src/query/nth.rs b/automerge/src/query/nth.rs index 7d59ac4e..f382788f 100644 --- a/automerge/src/query/nth.rs +++ b/automerge/src/query/nth.rs @@ -44,7 +44,7 @@ impl TreeQuery for Nth { // num vis is the number of keys in the index // minus one if we're counting last_seen // let mut num_vis = s.keys().count(); - if child.index.has(&self.last_seen) { + if child.index.has_visible(&self.last_seen) { num_vis -= 1; } if self.seen + num_vis > self.target { From 1b5730c0ae5d1306acd5d8e5ae83c8180f386a6d Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Sun, 27 Mar 2022 21:10:23 +0100 Subject: [PATCH 186/730] Fix insert query to not skip past insert positions When inserting and we have seen enough elements then look for the first index to insert at rather than skipping over it. --- automerge/src/query/insert.rs | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/automerge/src/query/insert.rs b/automerge/src/query/insert.rs index 9931573d..805a84df 100644 --- a/automerge/src/query/insert.rs +++ b/automerge/src/query/insert.rs @@ -78,8 +78,14 @@ impl TreeQuery for InsertNth { QueryResult::Next } } else { - self.n += child.len(); - QueryResult::Next + if self.seen + num_vis >= self.target { + // we may have found the point to insert at so descend to check + QueryResult::Descend + } else { + // we haven't found the point to insert at so just skip this node + self.n += child.len(); + QueryResult::Next + } } } From b280138f84becf257f570075b7491e17ec097553 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Mon, 28 Mar 2022 09:17:59 +0100 Subject: [PATCH 187/730] Remove explicit len on index --- automerge/src/query.rs | 12 +++++------- automerge/src/query/insert.rs | 2 +- automerge/src/query/len.rs | 2 +- automerge/src/query/nth.rs | 2 +- 4 files changed, 8 insertions(+), 10 deletions(-) diff --git a/automerge/src/query.rs b/automerge/src/query.rs index 91a582c6..7732e908 100644 --- a/automerge/src/query.rs +++ b/automerge/src/query.rs @@ -74,7 +74,6 @@ pub(crate) enum QueryResult { #[derive(Clone, Debug, PartialEq)] pub(crate) struct Index { - pub len: usize, pub visible: HashMap, /// Set of opids found in this node and below. pub ops: HashSet, @@ -83,12 +82,16 @@ pub(crate) struct Index { impl Index { pub fn new() -> Self { Index { - len: 0, visible: Default::default(), ops: Default::default(), } } + /// Get the number of visible elements in this index. + pub fn visible_len(&self) -> usize { + self.visible.len() + } + pub fn has_visible(&self, e: &Option) -> bool { if let Some(seen) = e { self.visible.contains_key(seen) @@ -108,7 +111,6 @@ impl Index { match (new.visible(), old.visible(), new.elemid()) { (false, true, Some(elem)) => match self.visible.get(&elem).copied() { Some(n) if n == 1 => { - self.len -= 1; self.visible.remove(&elem); } Some(n) => { @@ -121,7 +123,6 @@ impl Index { self.visible.insert(elem, n + 1); } None => { - self.len += 1; self.visible.insert(elem, 1); } }, @@ -138,7 +139,6 @@ impl Index { self.visible.insert(elem, n + 1); } None => { - self.len += 1; self.visible.insert(elem, 1); } } @@ -152,7 +152,6 @@ impl Index { if let Some(elem) = op.elemid() { match self.visible.get(&elem).copied() { Some(n) if n == 1 => { - self.len -= 1; self.visible.remove(&elem); } Some(n) => { @@ -172,7 +171,6 @@ impl Index { match self.visible.get(elem).cloned() { None => { self.visible.insert(*elem, 1); - self.len += 1; } Some(m) => { self.visible.insert(*elem, m + n); diff --git a/automerge/src/query/insert.rs b/automerge/src/query/insert.rs index 805a84df..c06029c4 100644 --- a/automerge/src/query/insert.rs +++ b/automerge/src/query/insert.rs @@ -61,8 +61,8 @@ impl InsertNth { impl TreeQuery for InsertNth { fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { - let mut num_vis = child.index.len; // if this node has some visible elements then we may find our target within + let mut num_vis = child.index.visible_len(); if num_vis > 0 { if child.index.has_visible(&self.last_seen) { num_vis -= 1; diff --git a/automerge/src/query/len.rs b/automerge/src/query/len.rs index 34bf408c..ab745f75 100644 --- a/automerge/src/query/len.rs +++ b/automerge/src/query/len.rs @@ -15,7 +15,7 @@ impl Len { impl TreeQuery for Len { fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { - self.len = child.index.len; + self.len = child.index.visible_len(); QueryResult::Finish } } diff --git a/automerge/src/query/nth.rs b/automerge/src/query/nth.rs index f382788f..beb8144a 100644 --- a/automerge/src/query/nth.rs +++ b/automerge/src/query/nth.rs @@ -39,7 +39,7 @@ impl Nth { impl TreeQuery for Nth { fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { - let mut num_vis = child.index.len; + let mut num_vis = child.index.visible_len(); if num_vis > 0 { // num vis is the number of keys in the index // minus one if we're counting last_seen From a19aae484c0d432d5bed236855d778e45c876551 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Mon, 28 Mar 2022 09:19:17 +0100 Subject: [PATCH 188/730] Don't set last_seen unless the elemid was actually visible --- automerge/src/query/nth.rs | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/automerge/src/query/nth.rs b/automerge/src/query/nth.rs index beb8144a..2f8d1abc 100644 --- a/automerge/src/query/nth.rs +++ b/automerge/src/query/nth.rs @@ -52,7 +52,11 @@ impl TreeQuery for Nth { } else { self.pos += child.len(); self.seen += num_vis; - self.last_seen = child.last().elemid(); + // if this node has the last elemid as visible then we can set it, otherwise other state doesn't get set correctly. + let last_elemid = child.last().elemid(); + if child.index.has_visible(&last_elemid) { + self.last_seen = last_elemid; + } QueryResult::Next } } else { From 8f201562c3b466351123f9c554b5225829dcdb18 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Mon, 28 Mar 2022 10:04:22 +0100 Subject: [PATCH 189/730] Add better comments --- automerge/src/query/nth.rs | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/automerge/src/query/nth.rs b/automerge/src/query/nth.rs index 2f8d1abc..1f64c128 100644 --- a/automerge/src/query/nth.rs +++ b/automerge/src/query/nth.rs @@ -8,6 +8,8 @@ use std::fmt::Debug; pub(crate) struct Nth { target: usize, seen: usize, + // last_seen is the target elemid of the last `seen` operation. + // It is used to avoid double counting visible elements (which arise through conflicts) that are split across nodes. last_seen: Option, last_elem: Option, pub ops: Vec, @@ -52,7 +54,13 @@ impl TreeQuery for Nth { } else { self.pos += child.len(); self.seen += num_vis; - // if this node has the last elemid as visible then we can set it, otherwise other state doesn't get set correctly. + + // We have updated seen by the number of visible elements in this index, before we skip it. + // We also need to keep track of the last elemid that we have seen (and counted as seen). + // We can just use the elemid of the last op in this node as either: + // - the insert was at a previous node and this is a long run of overwrites so last_seen should already be set correctly + // - the visible op is in this node and the elemid references it so it can be set here + // - the visible op is in a future node and so it will be counted as seen there let last_elemid = child.last().elemid(); if child.index.has_visible(&last_elemid) { self.last_seen = last_elemid; @@ -71,11 +79,13 @@ impl TreeQuery for Nth { return QueryResult::Finish; }; self.last_elem = element.elemid(); + // we have a new potentially visible element so reset last_seen self.last_seen = None } let visible = element.visible(); if visible && self.last_seen.is_none() { self.seen += 1; + // we have a new visible element self.last_seen = element.elemid() } if self.seen == self.target + 1 && visible { From ece1e2228336e2e2866130bdbd13d9c3cf5476c1 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Mon, 28 Mar 2022 10:05:33 +0100 Subject: [PATCH 190/730] Fix clippy --- automerge/src/query/insert.rs | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/automerge/src/query/insert.rs b/automerge/src/query/insert.rs index c06029c4..abfdde61 100644 --- a/automerge/src/query/insert.rs +++ b/automerge/src/query/insert.rs @@ -77,15 +77,13 @@ impl TreeQuery for InsertNth { self.last_seen = child.last().elemid(); QueryResult::Next } + } else if self.seen + num_vis >= self.target { + // we may have found the point to insert at so descend to check + QueryResult::Descend } else { - if self.seen + num_vis >= self.target { - // we may have found the point to insert at so descend to check - QueryResult::Descend - } else { - // we haven't found the point to insert at so just skip this node - self.n += child.len(); - QueryResult::Next - } + // we haven't found the point to insert at so just skip this node + self.n += child.len(); + QueryResult::Next } } From fb6f2787b2708090498126758a2befa9c6a94b91 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Sun, 27 Mar 2022 12:26:56 +0100 Subject: [PATCH 191/730] Remove last_elem in nth query --- automerge/src/query/nth.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/automerge/src/query/nth.rs b/automerge/src/query/nth.rs index 1f64c128..d361db1e 100644 --- a/automerge/src/query/nth.rs +++ b/automerge/src/query/nth.rs @@ -11,7 +11,6 @@ pub(crate) struct Nth { // last_seen is the target elemid of the last `seen` operation. // It is used to avoid double counting visible elements (which arise through conflicts) that are split across nodes. last_seen: Option, - last_elem: Option, pub ops: Vec, pub ops_pos: Vec, pub pos: usize, @@ -26,12 +25,13 @@ impl Nth { ops: vec![], ops_pos: vec![], pos: 0, - last_elem: None, } } + /// Get the key pub fn key(&self) -> Result { - if let Some(e) = self.last_elem { + // the query collects the ops so we can use that to get the key they all use + if let Some(e) = self.ops.first().and_then(|op| op.elemid()) { Ok(Key::Seq(e)) } else { Err(AutomergeError::InvalidIndex(self.target)) @@ -68,6 +68,7 @@ impl TreeQuery for Nth { QueryResult::Next } } else { + // skip this node as no useful ops in it self.pos += child.len(); QueryResult::Next } @@ -77,8 +78,7 @@ impl TreeQuery for Nth { if element.insert { if self.seen > self.target { return QueryResult::Finish; - }; - self.last_elem = element.elemid(); + } // we have a new potentially visible element so reset last_seen self.last_seen = None } From 1c6032bee057862d775f6e862772da2019cd2e67 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Mon, 28 Mar 2022 10:33:42 +0100 Subject: [PATCH 192/730] Reset B to 16 --- automerge/src/op_set.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automerge/src/op_set.rs b/automerge/src/op_set.rs index 9b77bcf0..840d6617 100644 --- a/automerge/src/op_set.rs +++ b/automerge/src/op_set.rs @@ -8,7 +8,7 @@ use fxhash::FxBuildHasher; use std::cmp::Ordering; use std::collections::HashMap; -pub(crate) const B: usize = 3; +pub(crate) const B: usize = 16; pub(crate) type OpSet = OpSetInternal; #[derive(Debug, Clone, PartialEq)] From 636fe756478986bde7121b1d2661fbd48784a1c0 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Mon, 28 Mar 2022 10:34:00 +0100 Subject: [PATCH 193/730] Simplify query_node for insert and nth --- automerge/src/query/insert.rs | 36 ++++++++++++++++-------------- automerge/src/query/nth.rs | 42 ++++++++++++++--------------------- 2 files changed, 36 insertions(+), 42 deletions(-) diff --git a/automerge/src/query/insert.rs b/automerge/src/query/insert.rs index abfdde61..fe99c691 100644 --- a/automerge/src/query/insert.rs +++ b/automerge/src/query/insert.rs @@ -63,26 +63,28 @@ impl TreeQuery for InsertNth { fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { // if this node has some visible elements then we may find our target within let mut num_vis = child.index.visible_len(); - if num_vis > 0 { - if child.index.has_visible(&self.last_seen) { - num_vis -= 1; - } - if self.seen + num_vis >= self.target { - // our target is within this node - QueryResult::Descend - } else { - // our target is not in this node so try the next one - self.n += child.len(); - self.seen += num_vis; - self.last_seen = child.last().elemid(); - QueryResult::Next - } - } else if self.seen + num_vis >= self.target { - // we may have found the point to insert at so descend to check + if child.index.has_visible(&self.last_seen) { + num_vis -= 1; + } + + if self.seen + num_vis >= self.target { + // our target is within this node QueryResult::Descend } else { - // we haven't found the point to insert at so just skip this node + // our target is not in this node so try the next one self.n += child.len(); + self.seen += num_vis; + + // We have updated seen by the number of visible elements in this index, before we skip it. + // We also need to keep track of the last elemid that we have seen (and counted as seen). + // We can just use the elemid of the last op in this node as either: + // - the insert was at a previous node and this is a long run of overwrites so last_seen should already be set correctly + // - the visible op is in this node and the elemid references it so it can be set here + // - the visible op is in a future node and so it will be counted as seen there + let last_elemid = child.last().elemid(); + if child.index.has_visible(&last_elemid) { + self.last_seen = last_elemid; + } QueryResult::Next } } diff --git a/automerge/src/query/nth.rs b/automerge/src/query/nth.rs index d361db1e..2f404125 100644 --- a/automerge/src/query/nth.rs +++ b/automerge/src/query/nth.rs @@ -42,34 +42,26 @@ impl Nth { impl TreeQuery for Nth { fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { let mut num_vis = child.index.visible_len(); - if num_vis > 0 { - // num vis is the number of keys in the index - // minus one if we're counting last_seen - // let mut num_vis = s.keys().count(); - if child.index.has_visible(&self.last_seen) { - num_vis -= 1; - } - if self.seen + num_vis > self.target { - QueryResult::Descend - } else { - self.pos += child.len(); - self.seen += num_vis; + if child.index.has_visible(&self.last_seen) { + num_vis -= 1; + } - // We have updated seen by the number of visible elements in this index, before we skip it. - // We also need to keep track of the last elemid that we have seen (and counted as seen). - // We can just use the elemid of the last op in this node as either: - // - the insert was at a previous node and this is a long run of overwrites so last_seen should already be set correctly - // - the visible op is in this node and the elemid references it so it can be set here - // - the visible op is in a future node and so it will be counted as seen there - let last_elemid = child.last().elemid(); - if child.index.has_visible(&last_elemid) { - self.last_seen = last_elemid; - } - QueryResult::Next - } + if self.seen + num_vis > self.target { + QueryResult::Descend } else { - // skip this node as no useful ops in it self.pos += child.len(); + self.seen += num_vis; + + // We have updated seen by the number of visible elements in this index, before we skip it. + // We also need to keep track of the last elemid that we have seen (and counted as seen). + // We can just use the elemid of the last op in this node as either: + // - the insert was at a previous node and this is a long run of overwrites so last_seen should already be set correctly + // - the visible op is in this node and the elemid references it so it can be set here + // - the visible op is in a future node and so it will be counted as seen there + let last_elemid = child.last().elemid(); + if child.index.has_visible(&last_elemid) { + self.last_seen = last_elemid; + } QueryResult::Next } } From f002e7261bde9df1923bc6c7e4cb85d1d1d84396 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Mon, 28 Mar 2022 10:37:14 +0100 Subject: [PATCH 194/730] Update comments --- automerge/src/query/insert.rs | 2 ++ automerge/src/query/nth.rs | 4 ++-- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/automerge/src/query/insert.rs b/automerge/src/query/insert.rs index fe99c691..eb855ce9 100644 --- a/automerge/src/query/insert.rs +++ b/automerge/src/query/insert.rs @@ -14,6 +14,8 @@ pub(crate) struct InsertNth { /// the number of operations (including non-visible) that we have seen n: usize, valid: Option, + /// last_seen is the target elemid of the last `seen` operation. + /// It is used to avoid double counting visible elements (which arise through conflicts) that are split across nodes. last_seen: Option, last_insert: Option, last_valid_insert: Option, diff --git a/automerge/src/query/nth.rs b/automerge/src/query/nth.rs index 2f404125..1b4b1e13 100644 --- a/automerge/src/query/nth.rs +++ b/automerge/src/query/nth.rs @@ -8,8 +8,8 @@ use std::fmt::Debug; pub(crate) struct Nth { target: usize, seen: usize, - // last_seen is the target elemid of the last `seen` operation. - // It is used to avoid double counting visible elements (which arise through conflicts) that are split across nodes. + /// last_seen is the target elemid of the last `seen` operation. + /// It is used to avoid double counting visible elements (which arise through conflicts) that are split across nodes. last_seen: Option, pub ops: Vec, pub ops_pos: Vec, From be8f367d07054149729e80cb70ca288d87a8103f Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Tue, 29 Mar 2022 11:39:25 -0400 Subject: [PATCH 195/730] missing test tag --- automerge/src/automerge.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 0715385a..b1b9ff1d 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -1498,7 +1498,8 @@ mod tests { assert_eq!(bytes, bytes2); } - + + #[test] fn compute_list_indexes_correctly_when_list_element_is_split_across_tree_nodes() { let max = B as u64 * 2; let actor1 = ActorId::from(b"aaaa"); From 0da8ceddce2ada2943274363c567aee088a9162b Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Tue, 29 Mar 2022 20:33:38 +0100 Subject: [PATCH 196/730] Use iter() in IntoIterator --- automerge/src/op_set.rs | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/automerge/src/op_set.rs b/automerge/src/op_set.rs index 840d6617..ff35bf16 100644 --- a/automerge/src/op_set.rs +++ b/automerge/src/op_set.rs @@ -132,14 +132,7 @@ impl<'a, const B: usize> IntoIterator for &'a OpSetInternal { type IntoIter = Iter<'a, B>; fn into_iter(self) -> Self::IntoIter { - let mut objs: Vec<_> = self.trees.keys().collect(); - objs.sort_by(|a, b| self.m.lamport_cmp(a.0, b.0)); - Iter { - inner: self, - index: 0, - objs, - sub_index: 0, - } + self.iter() } } From 0af471a1a1406a9f1ab76761f3f6db169fecaf8b Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Tue, 29 Mar 2022 20:45:20 +0100 Subject: [PATCH 197/730] Document object_type function --- automerge/src/automerge.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index b1b9ff1d..e16b2ba2 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -241,6 +241,7 @@ impl Automerge { } } + /// Get the type of this object, if it is an object. pub fn object_type>(&self, obj: O) -> Option { let obj = self.exid_to_obj(obj.as_ref()).ok()?; self.ops.object_type(&obj) From 3c294d8fca5d1b445756e0f53aa43e53e8e2ee1a Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Tue, 29 Mar 2022 21:05:03 +0100 Subject: [PATCH 198/730] Document some fields on structs --- automerge/src/automerge.rs | 9 +++++++++ automerge/src/op_set.rs | 3 +++ automerge/src/query.rs | 1 + 3 files changed, 13 insertions(+) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index e16b2ba2..9f2aa4a6 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -24,14 +24,23 @@ pub(crate) enum Actor { /// An automerge document. #[derive(Debug, Clone)] pub struct Automerge { + /// The list of unapplied changes that are not causally ready. pub(crate) queue: Vec, + /// The history of changes that form this document, topologically sorted too. pub(crate) history: Vec, + /// Mapping from change hash to index into the history list. pub(crate) history_index: HashMap, + /// Mapping from actor index to list of seqs seen for them. pub(crate) states: HashMap>, + /// Current dependencies of this document (heads hashes). pub(crate) deps: HashSet, + /// Heads at the last save. pub(crate) saved: Vec, + /// The set of operations that form this document. pub(crate) ops: OpSet, + /// The current actor. pub(crate) actor: Actor, + /// The maximum operation counter this document has seen. pub(crate) max_op: u64, } diff --git a/automerge/src/op_set.rs b/automerge/src/op_set.rs index ff35bf16..ce7b165e 100644 --- a/automerge/src/op_set.rs +++ b/automerge/src/op_set.rs @@ -13,8 +13,11 @@ pub(crate) type OpSet = OpSetInternal; #[derive(Debug, Clone, PartialEq)] pub(crate) struct OpSetInternal { + /// The map of objects to their type and ops. trees: HashMap), FxBuildHasher>, + /// The number of operations in the opset. length: usize, + /// Metadata about the operations in this opset. pub m: OpSetMetadata, } diff --git a/automerge/src/query.rs b/automerge/src/query.rs index 7732e908..89937f96 100644 --- a/automerge/src/query.rs +++ b/automerge/src/query.rs @@ -74,6 +74,7 @@ pub(crate) enum QueryResult { #[derive(Clone, Debug, PartialEq)] pub(crate) struct Index { + /// The map of visible elements to the number of operations targetting them. pub visible: HashMap, /// Set of opids found in this node and below. pub ops: HashSet, From a989e294f8824e8c89fc90f4d9e2b0a2ddd9ad1e Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Tue, 29 Mar 2022 21:05:09 +0100 Subject: [PATCH 199/730] Use entry api in index --- automerge/src/query.rs | 27 +++------------------------ 1 file changed, 3 insertions(+), 24 deletions(-) diff --git a/automerge/src/query.rs b/automerge/src/query.rs index 89937f96..ed0dbc9e 100644 --- a/automerge/src/query.rs +++ b/automerge/src/query.rs @@ -119,14 +119,7 @@ impl Index { } None => panic!("remove overun in index"), }, - (true, false, Some(elem)) => match self.visible.get(&elem).copied() { - Some(n) => { - self.visible.insert(elem, n + 1); - } - None => { - self.visible.insert(elem, 1); - } - }, + (true, false, Some(elem)) => *self.visible.entry(elem).or_default() += 1, _ => {} } } @@ -135,14 +128,7 @@ impl Index { self.ops.insert(op.id); if op.visible() { if let Some(elem) = op.elemid() { - match self.visible.get(&elem).copied() { - Some(n) => { - self.visible.insert(elem, n + 1); - } - None => { - self.visible.insert(elem, 1); - } - } + *self.visible.entry(elem).or_default() += 1; } } } @@ -169,14 +155,7 @@ impl Index { self.ops.insert(*id); } for (elem, n) in other.visible.iter() { - match self.visible.get(elem).cloned() { - None => { - self.visible.insert(*elem, 1); - } - Some(m) => { - self.visible.insert(*elem, m + n); - } - } + *self.visible.entry(*elem).or_default() += n; } } } From 3039efca9b049aa4c4273347c83fc5694fc90df0 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Wed, 30 Mar 2022 12:18:44 +0100 Subject: [PATCH 200/730] Use pending_ops rather than direct len of operations --- automerge/src/transaction/inner.rs | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/automerge/src/transaction/inner.rs b/automerge/src/transaction/inner.rs index c471c057..f96d8492 100644 --- a/automerge/src/transaction/inner.rs +++ b/automerge/src/transaction/inner.rs @@ -41,7 +41,7 @@ impl TransactionInner { self.time = t; } - let num_ops = self.operations.len(); + let num_ops = self.pending_ops(); let change = export_change(self, &doc.ops.m.actors, &doc.ops.m.props); let hash = change.hash; doc.update_history(change, num_ops); @@ -58,7 +58,7 @@ impl TransactionInner { doc.actor = Actor::Unused(actor); } - let num = self.operations.len(); + let num = self.pending_ops(); // remove in reverse order so sets are removed before makes etc... for (obj, op) in self.operations.iter().rev() { for pred_id in &op.pred { @@ -125,10 +125,7 @@ impl TransactionInner { } fn next_id(&mut self) -> OpId { - OpId( - self.start_op.get() + self.operations.len() as u64, - self.actor, - ) + OpId(self.start_op.get() + self.pending_ops() as u64, self.actor) } fn insert_local_op( From 26efee509da2af0c43c11233afb6a71b8820b1a0 Mon Sep 17 00:00:00 2001 From: Martin Kleppmann Date: Tue, 15 Mar 2022 21:40:14 +0000 Subject: [PATCH 201/730] First patch implementation from pairing session with Orion --- automerge-wasm/index.d.ts | 13 ++ automerge-wasm/src/lib.rs | 65 ++++++++ automerge-wasm/test/test.ts | 9 ++ automerge/src/autocommit.rs | 9 ++ automerge/src/automerge.rs | 83 +++++++++- automerge/src/lib.rs | 2 +- automerge/src/query.rs | 2 + automerge/src/query/seek_op_with_patch.rs | 176 ++++++++++++++++++++++ automerge/src/types.rs | 26 ++++ 9 files changed, 380 insertions(+), 5 deletions(-) create mode 100644 automerge/src/query/seek_op_with_patch.rs diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts index 2897955e..781b6cb1 100644 --- a/automerge-wasm/index.d.ts +++ b/automerge-wasm/index.d.ts @@ -71,6 +71,15 @@ export type Op = { pred: string[], } +export type Patch = { + obj: ObjID + action: 'set' | 'del' | 'insert' + key: Prop + value: Value + datatype: Datatype + conflict: boolean +} + export function create(actor?: Actor): Automerge; export function loadDoc(data: Uint8Array, actor?: Actor): Automerge; export function encodeChange(change: DecodedChange): Change; @@ -109,6 +118,10 @@ export class Automerge { pendingOps(): number; rollback(): number; + // patches + enablePatches(enable: boolean): void; + popPatches(): Patch[]; + // save and load to local store save(): Uint8Array; saveIncremental(): Uint8Array; diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 63b7e78a..7c9bce17 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -338,6 +338,71 @@ impl Automerge { Ok(result) } + #[wasm_bindgen(js_name = enablePatches)] + pub fn enable_patches(&mut self, enable: JsValue) -> Result<(), JsValue> { + let enable = enable + .as_bool() + .ok_or_else(|| to_js_err("expected boolean"))?; + self.0.enable_patches(enable); + Ok(()) + } + + #[wasm_bindgen(js_name = popPatches)] + pub fn pop_patches(&mut self) -> Result { + let patches = self.0.pop_patches(); + let result = Array::new(); + for p in patches { + let patch = Object::new(); + match p { + am::Patch::Del(obj, key) => { + js_set(&patch, "action", "del")?; + js_set(&patch, "obj", obj.to_string())?; + js_set(&patch, "key", key)?; + } + + am::Patch::Set(am::PatchSet { + obj, + key, + value, + conflict, + }) => { + js_set(&patch, "action", "set")?; + js_set(&patch, "obj", obj.to_string())?; + js_set(&patch, "key", key)?; + match value { + (Value::Object(obj_type), obj_id) => { + js_set(&patch, "datatype", obj_type.to_string())?; + js_set(&patch, "value", obj_id.to_string())?; + } + (Value::Scalar(value), _) => { + js_set(&patch, "datatype", datatype(&value))?; + js_set(&patch, "value", ScalarValue(value))?; + } + }; + js_set(&patch, "conflict", conflict)?; + } + + am::Patch::Insert(obj, index, value) => { + js_set(&patch, "action", "insert")?; + js_set(&patch, "obj", obj.to_string())?; + js_set(&patch, "index", index)?; + match value { + (Value::Object(obj_type), obj_id) => { + js_set(&patch, "datatype", obj_type.to_string())?; + js_set(&patch, "value", obj_id.to_string())?; + } + (Value::Scalar(value), _) => { + js_set(&patch, "datatype", datatype(&value))?; + js_set(&patch, "value", ScalarValue(value))?; + } + }; + } + } + result.push(&patch); + } + Ok(result) + } + pub fn length(&mut self, obj: JsValue, heads: Option) -> Result { let obj = self.import(obj)?; if let Some(heads) = get_heads(heads) { diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts index 4c9b08be..7cf88e77 100644 --- a/automerge-wasm/test/test.ts +++ b/automerge-wasm/test/test.ts @@ -29,6 +29,15 @@ function sync(a: Automerge, b: Automerge, aSyncState = initSyncState(), bSyncSta describe('Automerge', () => { describe('basics', () => { + it.only('patch generation', () => { + let doc1 = create("aaaa"), doc2 = create("bbbb") + doc1.set("_root", "hello", "world") + doc2.enablePatches(true) + doc2.applyChanges(doc1.getChanges([])) + let patches = doc2.popPatches() + assert.deepEqual(patches, []) + }) + it('should init clone and free', () => { let doc1 = create() let doc2 = doc1.clone() diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index 75b5dbe4..71d2e0aa 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -4,6 +4,7 @@ use crate::{ change::export_change, transaction::TransactionInner, ActorId, Automerge, AutomergeError, Change, ChangeHash, Prop, Value, }; +use crate::types::Patch; use crate::{sync, Keys, KeysAt, ObjType, ScalarValue}; /// An automerge document that automatically manages transactions. @@ -50,6 +51,14 @@ impl AutoCommit { self.doc.get_actor() } + pub fn enable_patches(&mut self, enable: bool) { + self.doc.enable_patches(enable) + } + + pub fn pop_patches(&mut self) -> Vec { + self.doc.pop_patches() + } + fn ensure_transaction_open(&mut self) { if self.transaction.is_none() { let actor = self.doc.get_actor_index(); diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 8316f088..fbec8660 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -1,12 +1,13 @@ use std::collections::{HashMap, HashSet, VecDeque}; +use std::cmp::Ordering; use crate::change::encode_document; use crate::exid::ExId; use crate::keys::Keys; use crate::op_set::OpSet; use crate::transaction::{self, CommitOptions, Failure, Success, Transaction, TransactionInner}; use crate::types::{ - ActorId, ChangeHash, Clock, ElemId, Export, Exportable, Key, ObjId, Op, OpId, OpType, + ActorId, ChangeHash, Clock, ElemId, Export, Exportable, Key, ObjId, Op, OpId, OpType, Patch, PatchSet, ScalarValue, Value, }; use crate::KeysAt; @@ -32,6 +33,7 @@ pub struct Automerge { pub(crate) ops: OpSet, pub(crate) actor: Actor, pub(crate) max_op: u64, + pub(crate) patches: Option>, } impl Automerge { @@ -47,6 +49,7 @@ impl Automerge { saved: Default::default(), actor: Actor::Unused(ActorId::random()), max_op: 0, + patches: None, } } @@ -85,6 +88,23 @@ impl Automerge { } } + pub fn enable_patches(&mut self, enable: bool) { + match (enable, &self.patches) { + (true, None) => self.patches = Some(vec![]), + (false, Some(_)) => self.patches = None, + _ => (), + } + } + + pub fn pop_patches(&mut self) -> Vec { + if let Some(patches) = self.patches.take() { + self.patches = Some(Vec::new()); + patches + } else { + Vec::new() + } + } + /// Start a transaction. pub fn transaction(&mut self) -> Transaction { let actor = self.get_actor_index(); @@ -177,6 +197,56 @@ impl Automerge { op } + fn insert_op_with_patch(&mut self, obj: &ObjId, op: Op) -> Op { + let q = self.ops.search(obj, query::SeekOpWithPatch::new(&op)); + + for i in q.succ { + self.ops.replace(obj, i, |old_op| old_op.add_succ(&op)); + } + + if !op.is_del() { + self.ops.insert(q.pos, obj, op.clone()); + } + + let obj = self.id_to_exid(obj.0); + let key = match op.key { + Key::Map(index) => self.ops.m.props[index].clone().into(), + Key::Seq(_) => q.seen.into(), + }; + + let patch = if op.insert { + let value = (op.value(), self.id_to_exid(op.id)); + Patch::Insert(obj, q.seen, value) + } else if op.is_del() { + if let Some(winner) = &q.values.last() { + let value = (winner.value(), self.id_to_exid(winner.id)); + let conflict = q.values.len() > 1; + Patch::Set(PatchSet {obj, key, value, conflict}) + } else { + Patch::Del(obj, key) + } + } else { + let winner = if let Some(last_value) = q.values.last() { + if self.ops.m.lamport_cmp(op.id, last_value.id) == Ordering::Greater { + &op + } else { + last_value + } + } else { + &op + }; + let value = (winner.value(), self.id_to_exid(winner.id)); + let conflict = !q.values.is_empty(); + Patch::Set(PatchSet {obj, key, value, conflict}) + }; + + if let Some(patches) = &mut self.patches { + patches.push(patch); + } + + op + } + // KeysAt::() // LenAt::() // PropAt::() @@ -445,12 +515,17 @@ impl Automerge { Ok(()) } - /// Apply a single change to this document. fn apply_change(&mut self, change: Change) { let ops = self.import_ops(&change); self.update_history(change, ops.len()); - for (obj, op) in ops { - self.insert_op(&obj, op); + if self.patches.is_some() { + for (obj, op) in ops { + self.insert_op_with_patch(&obj, op); + } + } else { + for (obj, op) in ops { + self.insert_op(&obj, op); + } } } diff --git a/automerge/src/lib.rs b/automerge/src/lib.rs index a98ef14e..a47618c3 100644 --- a/automerge/src/lib.rs +++ b/automerge/src/lib.rs @@ -55,7 +55,7 @@ pub use exid::ExId as ObjId; pub use keys::Keys; pub use keys_at::KeysAt; pub use legacy::Change as ExpandedChange; -pub use types::{ActorId, ChangeHash, ObjType, OpType, Prop}; +pub use types::{ActorId, ChangeHash, ObjType, OpType, Prop, Patch, PatchSet}; pub use value::{ScalarValue, Value}; pub const ROOT: ObjId = ObjId::Root; diff --git a/automerge/src/query.rs b/automerge/src/query.rs index 4d6c47c9..d929827e 100644 --- a/automerge/src/query.rs +++ b/automerge/src/query.rs @@ -18,6 +18,7 @@ mod opid; mod prop; mod prop_at; mod seek_op; +mod seek_op_with_patch; pub(crate) use insert::InsertNth; pub(crate) use keys::Keys; @@ -32,6 +33,7 @@ pub(crate) use opid::OpIdSearch; pub(crate) use prop::Prop; pub(crate) use prop_at::PropAt; pub(crate) use seek_op::SeekOp; +pub(crate) use seek_op_with_patch::SeekOpWithPatch; #[derive(Debug, Clone, PartialEq)] pub(crate) struct CounterData { diff --git a/automerge/src/query/seek_op_with_patch.rs b/automerge/src/query/seek_op_with_patch.rs new file mode 100644 index 00000000..60420923 --- /dev/null +++ b/automerge/src/query/seek_op_with_patch.rs @@ -0,0 +1,176 @@ +use crate::op_tree::{OpSetMetadata, OpTreeNode}; +use crate::query::{binary_search_by, QueryResult, TreeQuery}; +use crate::types::{ElemId, Key, Op, HEAD}; +use std::cmp::Ordering; +use std::fmt::Debug; + +#[derive(Debug, Clone, PartialEq)] +pub(crate) struct SeekOpWithPatch { + op: Op, + pub pos: usize, + pub succ: Vec, + found: bool, + pub seen: usize, + last_seen: Option, + pub values: Vec, +} + +impl SeekOpWithPatch { + pub fn new(op: &Op) -> Self { + SeekOpWithPatch { + op: op.clone(), + succ: vec![], + pos: 0, + found: false, + seen: 0, + last_seen: None, + values: vec![], + } + } + + fn lesser_insert(&self, op: &Op, m: &OpSetMetadata) -> bool { + op.insert && m.lamport_cmp(op.id, self.op.id) == Ordering::Less + } + + fn greater_opid(&self, op: &Op, m: &OpSetMetadata) -> bool { + m.lamport_cmp(op.id, self.op.id) == Ordering::Greater + } + + fn is_target_insert(&self, op: &Op) -> bool { + if !op.insert { + return false; + } + op.elemid() == self.op.elemid() + } + + fn count_visible(&mut self, e: &Op) { + if e.elemid() == self.op.elemid() { + return; + } + if e.insert { + self.last_seen = None + } + if e.visible() && self.last_seen.is_none() { + self.seen += 1; + self.last_seen = e.elemid() + } + } +} + +impl TreeQuery for SeekOpWithPatch { + fn query_node_with_metadata( + &mut self, + child: &OpTreeNode, + m: &OpSetMetadata, + ) -> QueryResult { + if self.found { + return QueryResult::Descend; + } + match self.op.key { + Key::Seq(e) if e == HEAD => { + while self.pos < child.len() { + let op = child.get(self.pos).unwrap(); + self.count_visible(&op); + if self.op.overwrites(op) { + self.succ.push(self.pos); + } + if op.insert && m.lamport_cmp(op.id, self.op.id) == Ordering::Less { + break; + } + self.pos += 1; + } + QueryResult::Finish + } + + Key::Seq(e) => { + if self.found || child.index.ops.contains(&e.0) { + QueryResult::Descend + } else { + self.pos += child.len(); + + let mut num_vis = child.index.len; + if num_vis > 0 { + // num vis is the number of keys in the index + // minus one if we're counting last_seen + // let mut num_vis = s.keys().count(); + if child.index.has(&self.last_seen) { + num_vis -= 1; + } + self.seen += num_vis; + self.last_seen = child.last().elemid(); + } + QueryResult::Next + } + } + + Key::Map(_) => { + self.pos = binary_search_by(child, |op| m.key_cmp(&op.key, &self.op.key)); + while self.pos < child.len() { + let op = child.get(self.pos).unwrap(); + if op.key != self.op.key { + break; + } + if self.op.overwrites(op) { + self.succ.push(self.pos); + } else if op.visible() { + self.values.push(op.clone()); + } + if m.lamport_cmp(op.id, self.op.id) == Ordering::Greater { + break; + } + self.pos += 1; + } + + let mut later_pos = self.pos; + while later_pos < child.len() { + let op = child.get(later_pos).unwrap(); + if op.key != self.op.key { + break; + } + if op.visible() { + self.values.push(op.clone()); + } + later_pos += 1; + } + QueryResult::Finish + } + } + } + + fn query_element_with_metadata(&mut self, e: &Op, m: &OpSetMetadata) -> QueryResult { + self.count_visible(e); + + if !self.found { + if self.is_target_insert(e) { + self.found = true; + if self.op.overwrites(e) { + self.succ.push(self.pos); + } + } + self.pos += 1; + QueryResult::Next + } else { + if self.op.overwrites(e) { + self.succ.push(self.pos); + } + if self.op.insert { + if self.lesser_insert(e, m) { + QueryResult::Finish + } else { + self.pos += 1; + QueryResult::Next + } + } else if e.insert { + QueryResult::Finish + } else if self.greater_opid(e, m) { + if e.visible() { + self.values.push(e.clone()); + } + QueryResult::Next + } else { + self.pos += 1; + QueryResult::Next + } + } + } +} diff --git a/automerge/src/types.rs b/automerge/src/types.rs index 1f52731d..64b92a7f 100644 --- a/automerge/src/types.rs +++ b/automerge/src/types.rs @@ -1,4 +1,5 @@ use crate::error; +use crate::exid::ExId; use crate::legacy as amp; use serde::{Deserialize, Serialize}; use std::cmp::Eq; @@ -501,3 +502,28 @@ impl TryFrom<&[u8]> for ChangeHash { } } } + +#[derive(Debug, Clone, PartialEq)] +pub struct PatchSet { + pub obj: ExId, + pub key: Prop, + pub value: (Value, ExId), + pub conflict: bool, +} + +#[derive(Debug, Clone, PartialEq)] +pub enum Patch { + Del(ExId, Prop), + Set(PatchSet), + Insert(ExId, usize, (Value, ExId)), +} + +#[cfg(feature = "wasm")] +impl From for wasm_bindgen::JsValue { + fn from(prop: Prop) -> Self { + match prop { + Prop::Map(key) => key.into(), + Prop::Seq(index) => index.into(), + } + } +} From 94ff10f6901a9cb2b429300ec24d69ed60a9e204 Mon Sep 17 00:00:00 2001 From: Martin Kleppmann Date: Wed, 16 Mar 2022 13:37:02 +0000 Subject: [PATCH 202/730] Rename and reformat a bit --- automerge-wasm/index.d.ts | 2 +- automerge-wasm/src/lib.rs | 17 +++++++++-------- automerge/src/autocommit.rs | 2 +- automerge/src/automerge.rs | 22 ++++++++++++++++------ automerge/src/lib.rs | 2 +- automerge/src/types.rs | 11 ++++++++--- 6 files changed, 36 insertions(+), 20 deletions(-) diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts index 781b6cb1..8081c822 100644 --- a/automerge-wasm/index.d.ts +++ b/automerge-wasm/index.d.ts @@ -73,7 +73,7 @@ export type Op = { export type Patch = { obj: ObjID - action: 'set' | 'del' | 'insert' + action: 'assign' | 'insert' | 'delete' key: Prop value: Value datatype: Datatype diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 7c9bce17..c007eda7 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -354,19 +354,13 @@ impl Automerge { for p in patches { let patch = Object::new(); match p { - am::Patch::Del(obj, key) => { - js_set(&patch, "action", "del")?; - js_set(&patch, "obj", obj.to_string())?; - js_set(&patch, "key", key)?; - } - - am::Patch::Set(am::PatchSet { + am::Patch::Assign(am::AssignPatch { obj, key, value, conflict, }) => { - js_set(&patch, "action", "set")?; + js_set(&patch, "action", "assign")?; js_set(&patch, "obj", obj.to_string())?; js_set(&patch, "key", key)?; match value { @@ -397,6 +391,13 @@ impl Automerge { } }; } + + am::Patch::Delete(obj, key) => { + js_set(&patch, "action", "delete")?; + js_set(&patch, "obj", obj.to_string())?; + js_set(&patch, "key", key)?; + } + } result.push(&patch); } diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index 71d2e0aa..88204a1c 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -1,10 +1,10 @@ use crate::exid::ExId; use crate::transaction::{CommitOptions, Transactable}; +use crate::types::Patch; use crate::{ change::export_change, transaction::TransactionInner, ActorId, Automerge, AutomergeError, Change, ChangeHash, Prop, Value, }; -use crate::types::Patch; use crate::{sync, Keys, KeysAt, ObjType, ScalarValue}; /// An automerge document that automatically manages transactions. diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index fbec8660..3c18d4ef 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -1,19 +1,19 @@ use std::collections::{HashMap, HashSet, VecDeque}; -use std::cmp::Ordering; use crate::change::encode_document; use crate::exid::ExId; use crate::keys::Keys; use crate::op_set::OpSet; use crate::transaction::{self, CommitOptions, Failure, Success, Transaction, TransactionInner}; use crate::types::{ - ActorId, ChangeHash, Clock, ElemId, Export, Exportable, Key, ObjId, Op, OpId, OpType, Patch, PatchSet, - ScalarValue, Value, + ActorId, AssignPatch, ChangeHash, Clock, ElemId, Export, Exportable, Key, ObjId, Op, OpId, + OpType, Patch, ScalarValue, Value, }; use crate::KeysAt; use crate::{legacy, query, types, ObjType}; use crate::{AutomergeError, Change, Prop}; use serde::Serialize; +use std::cmp::Ordering; #[derive(Debug, Clone)] pub(crate) enum Actor { @@ -221,9 +221,14 @@ impl Automerge { if let Some(winner) = &q.values.last() { let value = (winner.value(), self.id_to_exid(winner.id)); let conflict = q.values.len() > 1; - Patch::Set(PatchSet {obj, key, value, conflict}) + Patch::Assign(AssignPatch { + obj, + key, + value, + conflict, + }) } else { - Patch::Del(obj, key) + Patch::Delete(obj, key) } } else { let winner = if let Some(last_value) = q.values.last() { @@ -237,7 +242,12 @@ impl Automerge { }; let value = (winner.value(), self.id_to_exid(winner.id)); let conflict = !q.values.is_empty(); - Patch::Set(PatchSet {obj, key, value, conflict}) + Patch::Assign(AssignPatch { + obj, + key, + value, + conflict, + }) }; if let Some(patches) = &mut self.patches { diff --git a/automerge/src/lib.rs b/automerge/src/lib.rs index a47618c3..6bcc984b 100644 --- a/automerge/src/lib.rs +++ b/automerge/src/lib.rs @@ -55,7 +55,7 @@ pub use exid::ExId as ObjId; pub use keys::Keys; pub use keys_at::KeysAt; pub use legacy::Change as ExpandedChange; -pub use types::{ActorId, ChangeHash, ObjType, OpType, Prop, Patch, PatchSet}; +pub use types::{ActorId, AssignPatch, ChangeHash, ObjType, OpType, Patch, Prop}; pub use value::{ScalarValue, Value}; pub const ROOT: ObjId = ObjId::Root; diff --git a/automerge/src/types.rs b/automerge/src/types.rs index 64b92a7f..dd323346 100644 --- a/automerge/src/types.rs +++ b/automerge/src/types.rs @@ -503,19 +503,24 @@ impl TryFrom<&[u8]> for ChangeHash { } } +/// Properties of `Patch::Assign` #[derive(Debug, Clone, PartialEq)] -pub struct PatchSet { +pub struct AssignPatch { pub obj: ExId, pub key: Prop, pub value: (Value, ExId), pub conflict: bool, } +/// A notification to the application that something has changed in a document. #[derive(Debug, Clone, PartialEq)] pub enum Patch { - Del(ExId, Prop), - Set(PatchSet), + /// Associating a new value with a key in a map, or an existing list element + Assign(AssignPatch), + /// Inserting a new element into a list/text Insert(ExId, usize, (Value, ExId)), + /// Deleting an element from a list/text + Delete(ExId, Prop), } #[cfg(feature = "wasm")] From 145969152a9f090a2a5a0b4b5a11431620fdc33e Mon Sep 17 00:00:00 2001 From: Martin Kleppmann Date: Thu, 17 Mar 2022 15:29:57 +0000 Subject: [PATCH 203/730] Fix conversion of OpId to ExId when referring to root object --- automerge/src/automerge.rs | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 3c18d4ef..98253c17 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -344,7 +344,11 @@ impl Automerge { } pub(crate) fn id_to_exid(&self, id: OpId) -> ExId { - ExId::Id(id.0, self.ops.m.actors.cache[id.1].clone(), id.1) + if id == types::ROOT { + ExId::Root + } else { + ExId::Id(id.0, self.ops.m.actors.cache[id.1].clone(), id.1) + } } /// Get the string represented by the given text object. From 7025bb654177824b9f628a0fa8b523c30620d9f8 Mon Sep 17 00:00:00 2001 From: Martin Kleppmann Date: Thu, 17 Mar 2022 19:59:21 +0000 Subject: [PATCH 204/730] Tests and fixes for list patches --- automerge-wasm/src/lib.rs | 2 +- automerge-wasm/test/test.ts | 87 ++++++++++++++++++++--- automerge/src/query/seek_op_with_patch.rs | 9 ++- automerge/src/types.rs | 2 +- 4 files changed, 85 insertions(+), 15 deletions(-) diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index c007eda7..d6f85c89 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -379,7 +379,7 @@ impl Automerge { am::Patch::Insert(obj, index, value) => { js_set(&patch, "action", "insert")?; js_set(&patch, "obj", obj.to_string())?; - js_set(&patch, "index", index)?; + js_set(&patch, "key", index as f64)?; match value { (Value::Object(obj_type), obj_id) => { js_set(&patch, "datatype", obj_type.to_string())?; diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts index 7cf88e77..3937cfd4 100644 --- a/automerge-wasm/test/test.ts +++ b/automerge-wasm/test/test.ts @@ -29,15 +29,6 @@ function sync(a: Automerge, b: Automerge, aSyncState = initSyncState(), bSyncSta describe('Automerge', () => { describe('basics', () => { - it.only('patch generation', () => { - let doc1 = create("aaaa"), doc2 = create("bbbb") - doc1.set("_root", "hello", "world") - doc2.enablePatches(true) - doc2.applyChanges(doc1.getChanges([])) - let patches = doc2.popPatches() - assert.deepEqual(patches, []) - }) - it('should init clone and free', () => { let doc1 = create() let doc2 = doc1.clone() @@ -469,8 +460,84 @@ describe('Automerge', () => { assert.deepEqual(C.value('_root', 'text'), ['text', '1@aabbcc']) assert.deepEqual(C.text(At), 'hell! world') }) - }) + + describe('patch generation', () => { + it('should include root object key updates', () => { + let doc1 = create('aaaa'), doc2 = create('bbbb') + doc1.set('_root', 'hello', 'world') + doc2.enablePatches(true) + doc2.loadIncremental(doc1.saveIncremental()) + assert.deepEqual(doc2.popPatches(), [ + {action: 'assign', obj: '_root', key: 'hello', value: 'world', datatype: 'str', conflict: false} + ]) + doc1.free() + doc2.free() + }) + + it('should include nested object creation', () => { + let doc1 = create('aaaa'), doc2 = create('bbbb') + doc1.set_object('_root', 'birds', {friday: {robins: 3}}) + doc2.enablePatches(true) + doc2.loadIncremental(doc1.saveIncremental()) + assert.deepEqual(doc2.popPatches(), [ + {action: 'assign', obj: '_root', key: 'birds', value: '1@aaaa', datatype: 'map', conflict: false}, + {action: 'assign', obj: '1@aaaa', key: 'friday', value: '2@aaaa', datatype: 'map', conflict: false}, + {action: 'assign', obj: '2@aaaa', key: 'robins', value: 3, datatype: 'int', conflict: false} + ]) + doc1.free() + doc2.free() + }) + + it('should include list element insertion', () => { + let doc1 = create('aaaa'), doc2 = create('bbbb') + doc1.set_object('_root', 'birds', ['Goldfinch', 'Chaffinch']) + doc2.enablePatches(true) + doc2.loadIncremental(doc1.saveIncremental()) + assert.deepEqual(doc2.popPatches(), [ + {action: 'assign', obj: '_root', key: 'birds', value: '1@aaaa', datatype: 'list', conflict: false}, + {action: 'insert', obj: '1@aaaa', key: 0, value: 'Goldfinch', datatype: 'str'}, + {action: 'insert', obj: '1@aaaa', key: 1, value: 'Chaffinch', datatype: 'str'} + ]) + doc1.free() + doc2.free() + }) + + it('should insert nested maps into a list', () => { + let doc1 = create('aaaa'), doc2 = create('bbbb') + doc1.set_object('_root', 'birds', []) + doc2.loadIncremental(doc1.saveIncremental()) + doc1.insert_object('1@aaaa', 0, {species: 'Goldfinch', count: 3}) + doc2.enablePatches(true) + doc2.loadIncremental(doc1.saveIncremental()) + assert.deepEqual(doc2.popPatches(), [ + {action: 'insert', obj: '1@aaaa', key: 0, value: '2@aaaa', datatype: 'map'}, + {action: 'assign', obj: '2@aaaa', key: 'species', value: 'Goldfinch', datatype: 'str', conflict: false}, + {action: 'assign', obj: '2@aaaa', key: 'count', value: 3, datatype: 'int', conflict: false} + ]) + doc1.free() + doc2.free() + }) + + it('should calculate list indexes based on visible elements', () => { + let doc1 = create('aaaa'), doc2 = create('bbbb') + doc1.set_object('_root', 'birds', ['Goldfinch', 'Chaffinch']) + doc2.loadIncremental(doc1.saveIncremental()) + doc1.del('1@aaaa', 0) + doc1.insert('1@aaaa', 1, 'Greenfinch') + doc2.enablePatches(true) + doc2.loadIncremental(doc1.saveIncremental()) + assert.deepEqual(doc1.value('1@aaaa', 0), ['str', 'Chaffinch']) + assert.deepEqual(doc1.value('1@aaaa', 1), ['str', 'Greenfinch']) + assert.deepEqual(doc2.popPatches(), [ + {action: 'delete', obj: '1@aaaa', key: 0}, + {action: 'insert', obj: '1@aaaa', key: 1, value: 'Greenfinch', datatype: 'str'} + ]) + doc1.free() + doc2.free() + }) + }) + describe('sync', () => { it('should send a sync message implying no local data', () => { let doc = create() diff --git a/automerge/src/query/seek_op_with_patch.rs b/automerge/src/query/seek_op_with_patch.rs index 60420923..72d95f4f 100644 --- a/automerge/src/query/seek_op_with_patch.rs +++ b/automerge/src/query/seek_op_with_patch.rs @@ -138,9 +138,7 @@ impl TreeQuery for SeekOpWithPatch { } fn query_element_with_metadata(&mut self, e: &Op, m: &OpSetMetadata) -> QueryResult { - self.count_visible(e); - - if !self.found { + let result = if !self.found { if self.is_target_insert(e) { self.found = true; if self.op.overwrites(e) { @@ -171,6 +169,11 @@ impl TreeQuery for SeekOpWithPatch { self.pos += 1; QueryResult::Next } + }; + + if result == QueryResult::Next { + self.count_visible(e); } + result } } diff --git a/automerge/src/types.rs b/automerge/src/types.rs index dd323346..bfe646c3 100644 --- a/automerge/src/types.rs +++ b/automerge/src/types.rs @@ -528,7 +528,7 @@ impl From for wasm_bindgen::JsValue { fn from(prop: Prop) -> Self { match prop { Prop::Map(key) => key.into(), - Prop::Seq(index) => index.into(), + Prop::Seq(index) => (index as f64).into(), } } } From c9089793722c496f9b20a34cefca853db30d23f0 Mon Sep 17 00:00:00 2001 From: Martin Kleppmann Date: Thu, 17 Mar 2022 21:14:44 +0000 Subject: [PATCH 205/730] Fix search for the correct insertion position --- automerge-wasm/test/test.ts | 21 +++++++++++++++++++++ automerge/src/query/seek_op_with_patch.rs | 5 +---- 2 files changed, 22 insertions(+), 4 deletions(-) diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts index 3937cfd4..3781d59c 100644 --- a/automerge-wasm/test/test.ts +++ b/automerge-wasm/test/test.ts @@ -536,6 +536,27 @@ describe('Automerge', () => { doc1.free() doc2.free() }) + + it('should handle concurrent insertions at the head of a list', () => { + let doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd') + doc1.set_object('_root', 'values', []) + let change1 = doc1.saveIncremental() + doc2.loadIncremental(change1) + doc3.loadIncremental(change1) + doc4.loadIncremental(change1) + doc1.insert('1@aaaa', 0, 'c') + doc1.insert('1@aaaa', 1, 'd') + doc2.insert('1@aaaa', 0, 'a') + doc2.insert('1@aaaa', 1, 'b') + let change2 = doc1.saveIncremental(), change3 = doc2.saveIncremental() + doc3.loadIncremental(change2); doc3.loadIncremental(change3) + doc4.loadIncremental(change3); doc4.loadIncremental(change2) + assert.deepEqual([0, 1, 2, 3].map(i => (doc3.value('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd']) + assert.deepEqual([0, 1, 2, 3].map(i => (doc4.value('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd']) + doc1.free(); doc2.free(); doc3.free(); doc4.free() + }) + + // TODO: concurrent insertions at the same position }) describe('sync', () => { diff --git a/automerge/src/query/seek_op_with_patch.rs b/automerge/src/query/seek_op_with_patch.rs index 72d95f4f..3ff944e9 100644 --- a/automerge/src/query/seek_op_with_patch.rs +++ b/automerge/src/query/seek_op_with_patch.rs @@ -37,10 +37,7 @@ impl SeekOpWithPatch { } fn is_target_insert(&self, op: &Op) -> bool { - if !op.insert { - return false; - } - op.elemid() == self.op.elemid() + return op.insert && op.elemid() == self.op.key.elemid() } fn count_visible(&mut self, e: &Op) { From 289dd95196cde134b35f79e4cb1816f7bfecbd2f Mon Sep 17 00:00:00 2001 From: Martin Kleppmann Date: Thu, 17 Mar 2022 21:23:05 +0000 Subject: [PATCH 206/730] Fix index calculation for insertions at the head --- automerge-wasm/test/test.ts | 14 ++++++++++++++ automerge/src/query/seek_op_with_patch.rs | 2 +- 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts index 3781d59c..84fb52c1 100644 --- a/automerge-wasm/test/test.ts +++ b/automerge-wasm/test/test.ts @@ -549,10 +549,24 @@ describe('Automerge', () => { doc2.insert('1@aaaa', 0, 'a') doc2.insert('1@aaaa', 1, 'b') let change2 = doc1.saveIncremental(), change3 = doc2.saveIncremental() + doc3.enablePatches(true) + doc4.enablePatches(true) doc3.loadIncremental(change2); doc3.loadIncremental(change3) doc4.loadIncremental(change3); doc4.loadIncremental(change2) assert.deepEqual([0, 1, 2, 3].map(i => (doc3.value('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd']) assert.deepEqual([0, 1, 2, 3].map(i => (doc4.value('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd']) + assert.deepEqual(doc3.popPatches(), [ + {action: 'insert', obj: '1@aaaa', key: 0, value: 'c', datatype: 'str'}, + {action: 'insert', obj: '1@aaaa', key: 1, value: 'd', datatype: 'str'}, + {action: 'insert', obj: '1@aaaa', key: 0, value: 'a', datatype: 'str'}, + {action: 'insert', obj: '1@aaaa', key: 1, value: 'b', datatype: 'str'} + ]) + assert.deepEqual(doc4.popPatches(), [ + {action: 'insert', obj: '1@aaaa', key: 0, value: 'a', datatype: 'str'}, + {action: 'insert', obj: '1@aaaa', key: 1, value: 'b', datatype: 'str'}, + {action: 'insert', obj: '1@aaaa', key: 2, value: 'c', datatype: 'str'}, + {action: 'insert', obj: '1@aaaa', key: 3, value: 'd', datatype: 'str'} + ]) doc1.free(); doc2.free(); doc3.free(); doc4.free() }) diff --git a/automerge/src/query/seek_op_with_patch.rs b/automerge/src/query/seek_op_with_patch.rs index 3ff944e9..4182a6ee 100644 --- a/automerge/src/query/seek_op_with_patch.rs +++ b/automerge/src/query/seek_op_with_patch.rs @@ -67,13 +67,13 @@ impl TreeQuery for SeekOpWithPatch { Key::Seq(e) if e == HEAD => { while self.pos < child.len() { let op = child.get(self.pos).unwrap(); - self.count_visible(&op); if self.op.overwrites(op) { self.succ.push(self.pos); } if op.insert && m.lamport_cmp(op.id, self.op.id) == Ordering::Less { break; } + self.count_visible(&op); self.pos += 1; } QueryResult::Finish From cf508a94a9afa580bb717c6edb4130f2b147b87b Mon Sep 17 00:00:00 2001 From: Martin Kleppmann Date: Fri, 18 Mar 2022 12:25:36 +0000 Subject: [PATCH 207/730] Slight simplification --- automerge/src/query/seek_op.rs | 9 +-------- automerge/src/query/seek_op_with_patch.rs | 2 +- 2 files changed, 2 insertions(+), 9 deletions(-) diff --git a/automerge/src/query/seek_op.rs b/automerge/src/query/seek_op.rs index a2f3b750..a8770f5e 100644 --- a/automerge/src/query/seek_op.rs +++ b/automerge/src/query/seek_op.rs @@ -31,14 +31,7 @@ impl SeekOp { } fn is_target_insert(&self, op: &Op) -> bool { - if !op.insert { - return false; - } - if self.op.insert { - op.elemid() == self.op.key.elemid() - } else { - op.elemid() == self.op.elemid() - } + op.insert && op.elemid() == self.op.key.elemid() } } diff --git a/automerge/src/query/seek_op_with_patch.rs b/automerge/src/query/seek_op_with_patch.rs index 4182a6ee..0f3c74e9 100644 --- a/automerge/src/query/seek_op_with_patch.rs +++ b/automerge/src/query/seek_op_with_patch.rs @@ -37,7 +37,7 @@ impl SeekOpWithPatch { } fn is_target_insert(&self, op: &Op) -> bool { - return op.insert && op.elemid() == self.op.key.elemid() + op.insert && op.elemid() == self.op.key.elemid() } fn count_visible(&mut self, e: &Op) { From fa0a8953dc451ec0e2f8bb82bbf55db3e6572166 Mon Sep 17 00:00:00 2001 From: Martin Kleppmann Date: Fri, 18 Mar 2022 14:32:30 +0000 Subject: [PATCH 208/730] More tests and comments --- automerge-wasm/test/test.ts | 87 ++++++++++++++++++++++- automerge/src/query/seek_op_with_patch.rs | 49 +++++++++++++ 2 files changed, 135 insertions(+), 1 deletion(-) diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts index 84fb52c1..93c55c93 100644 --- a/automerge-wasm/test/test.ts +++ b/automerge-wasm/test/test.ts @@ -570,7 +570,92 @@ describe('Automerge', () => { doc1.free(); doc2.free(); doc3.free(); doc4.free() }) - // TODO: concurrent insertions at the same position + it('should handle conflicts on root object keys', () => { + let doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd') + doc1.set('_root', 'bird', 'Greenfinch') + doc2.set('_root', 'bird', 'Goldfinch') + let change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental() + doc3.enablePatches(true) + doc4.enablePatches(true) + doc3.loadIncremental(change1); doc3.loadIncremental(change2) + doc4.loadIncremental(change2); doc4.loadIncremental(change1) + assert.deepEqual(doc3.value('_root', 'bird'), ['str', 'Goldfinch']) + assert.deepEqual(doc3.values('_root', 'bird'), [['str', 'Greenfinch', '1@aaaa'], ['str', 'Goldfinch', '1@bbbb']]) + assert.deepEqual(doc4.value('_root', 'bird'), ['str', 'Goldfinch']) + assert.deepEqual(doc4.values('_root', 'bird'), [['str', 'Greenfinch', '1@aaaa'], ['str', 'Goldfinch', '1@bbbb']]) + assert.deepEqual(doc3.popPatches(), [ + {action: 'assign', obj: '_root', key: 'bird', value: 'Greenfinch', datatype: 'str', conflict: false}, + {action: 'assign', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: true} + ]) + assert.deepEqual(doc4.popPatches(), [ + {action: 'assign', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: false}, + {action: 'assign', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: true} + ]) + doc1.free(); doc2.free(); doc3.free(); doc4.free() + }) + + it('should handle three-way conflicts', () => { + let doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc') + doc1.set('_root', 'bird', 'Greenfinch') + doc2.set('_root', 'bird', 'Chaffinch') + doc3.set('_root', 'bird', 'Goldfinch') + let change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental(), change3 = doc3.saveIncremental() + doc1.enablePatches(true) + doc2.enablePatches(true) + doc3.enablePatches(true) + doc1.loadIncremental(change2); doc1.loadIncremental(change3) + doc2.loadIncremental(change3); doc2.loadIncremental(change1) + doc3.loadIncremental(change1); doc3.loadIncremental(change2) + assert.deepEqual(doc1.value('_root', 'bird'), ['str', 'Goldfinch']) + assert.deepEqual(doc1.values('_root', 'bird'), [ + ['str', 'Greenfinch', '1@aaaa'], ['str', 'Chaffinch', '1@bbbb'], ['str', 'Goldfinch', '1@cccc'] + ]) + assert.deepEqual(doc2.value('_root', 'bird'), ['str', 'Goldfinch']) + assert.deepEqual(doc2.values('_root', 'bird'), [ + ['str', 'Greenfinch', '1@aaaa'], ['str', 'Chaffinch', '1@bbbb'], ['str', 'Goldfinch', '1@cccc'] + ]) + assert.deepEqual(doc3.value('_root', 'bird'), ['str', 'Goldfinch']) + assert.deepEqual(doc3.values('_root', 'bird'), [ + ['str', 'Greenfinch', '1@aaaa'], ['str', 'Chaffinch', '1@bbbb'], ['str', 'Goldfinch', '1@cccc'] + ]) + assert.deepEqual(doc1.popPatches(), [ + {action: 'assign', obj: '_root', key: 'bird', value: 'Chaffinch', datatype: 'str', conflict: true}, + {action: 'assign', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: true} + ]) + assert.deepEqual(doc2.popPatches(), [ + {action: 'assign', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: true}, + {action: 'assign', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: true} + ]) + assert.deepEqual(doc3.popPatches(), [ + {action: 'assign', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: true}, + {action: 'assign', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: true} + ]) + doc1.free(); doc2.free(); doc3.free() + }) + + it('should handle a concurrent map key overwrite and delete', () => { + let doc1 = create('aaaa'), doc2 = create('bbbb') + doc1.set('_root', 'bird', 'Greenfinch') + doc2.loadIncremental(doc1.saveIncremental()) + doc1.set('_root', 'bird', 'Goldfinch') + doc2.del('_root', 'bird') + let change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental() + doc1.enablePatches(true) + doc2.enablePatches(true) + doc1.loadIncremental(change2) + doc2.loadIncremental(change1) + assert.deepEqual(doc1.value('_root', 'bird'), ['str', 'Goldfinch']) + assert.deepEqual(doc1.values('_root', 'bird'), [['str', 'Goldfinch', '2@aaaa']]) + assert.deepEqual(doc2.value('_root', 'bird'), ['str', 'Goldfinch']) + assert.deepEqual(doc2.values('_root', 'bird'), [['str', 'Goldfinch', '2@aaaa']]) + assert.deepEqual(doc1.popPatches(), [ + {action: 'assign', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: false} + ]) + assert.deepEqual(doc2.popPatches(), [ + {action: 'assign', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: false} + ]) + doc1.free(); doc2.free() + }) }) describe('sync', () => { diff --git a/automerge/src/query/seek_op_with_patch.rs b/automerge/src/query/seek_op_with_patch.rs index 0f3c74e9..e3df3fb2 100644 --- a/automerge/src/query/seek_op_with_patch.rs +++ b/automerge/src/query/seek_op_with_patch.rs @@ -40,6 +40,10 @@ impl SeekOpWithPatch { op.insert && op.elemid() == self.op.key.elemid() } + /// Keeps track of the number of visible list elements we have seen. Increments `self.seen` if + /// operation `e` associates a visible value with a list element, and if we have not already + /// counted that list element (this ensures that if a list element has several values, i.e. + /// a conflict, then it is still only counted once). fn count_visible(&mut self, e: &Op) { if e.elemid() == self.op.elemid() { return; @@ -64,6 +68,9 @@ impl TreeQuery for SeekOpWithPatch { return QueryResult::Descend; } match self.op.key { + // Special case for insertion at the head of the list (`e == HEAD` is only possible for + // an insertion operation). Skip over any list elements whose elemId is greater than + // the opId of the operation being inserted. Key::Seq(e) if e == HEAD => { while self.pos < child.len() { let op = child.get(self.pos).unwrap(); @@ -79,12 +86,19 @@ impl TreeQuery for SeekOpWithPatch { QueryResult::Finish } + // Updating a list: search for the tree node that contains the new operation's + // reference element (i.e. the element we're updating or inserting after) Key::Seq(e) => { if self.found || child.index.ops.contains(&e.0) { QueryResult::Descend } else { self.pos += child.len(); + // When we skip over a subtree, we need to count the number of visible list + // elements we're skipping over. Each node stores the number of visible + // elements it contains. However, it could happen that a visible element is + // split across two tree nodes. To avoid double-counting in this situation, we + // subtract one if the last visible element also appears in this tree node. let mut num_vis = child.index.len; if num_vis > 0 { // num vis is the number of keys in the index @@ -100,30 +114,45 @@ impl TreeQuery for SeekOpWithPatch { } } + // Updating a map: operations appear in sorted order by key Key::Map(_) => { + // Search for the place where we need to insert the new operation. First find the + // first op with a key >= the key we're updating self.pos = binary_search_by(child, |op| m.key_cmp(&op.key, &self.op.key)); while self.pos < child.len() { + // Iterate over any existing operations for the same key; stop when we reach an + // operation with a different key let op = child.get(self.pos).unwrap(); if op.key != self.op.key { break; } + + // Keep track of any ops we're overwriting and any conflicts on this key if self.op.overwrites(op) { self.succ.push(self.pos); } else if op.visible() { self.values.push(op.clone()); } + + // Ops for the same key should be in ascending order of opId, so we break when + // we reach an op with an opId greater than that of the new operation if m.lamport_cmp(op.id, self.op.id) == Ordering::Greater { break; } self.pos += 1; } + // For the purpose of reporting conflicts, we also need to take into account any + // ops for the same key that appear after the new operation let mut later_pos = self.pos; while later_pos < child.len() { let op = child.get(later_pos).unwrap(); if op.key != self.op.key { break; } + // No need to check if `self.op.overwrites(op)` because an operation's `preds` + // must always have lower Lamport timestamps than that op itself, and the ops + // here all have greater opIds than the new op if op.visible() { self.values.push(op.clone()); } @@ -134,8 +163,12 @@ impl TreeQuery for SeekOpWithPatch { } } + // Only called when operating on a sequence (list/text) object, since updates of a map are + // handled in `query_node_with_metadata`. fn query_element_with_metadata(&mut self, e: &Op, m: &OpSetMetadata) -> QueryResult { let result = if !self.found { + // First search for the referenced list element (i.e. the element we're updating, or + // after which we're inserting) if self.is_target_insert(e) { self.found = true; if self.op.overwrites(e) { @@ -144,20 +177,34 @@ impl TreeQuery for SeekOpWithPatch { } self.pos += 1; QueryResult::Next + } else { + // Once we've found the reference element, keep track of any ops that we're overwriting if self.op.overwrites(e) { self.succ.push(self.pos); } + + // If the new op is an insertion, skip over any existing list elements whose elemId is + // greater than the ID of the new insertion if self.op.insert { if self.lesser_insert(e, m) { + // Insert before the first existing list element whose elemId is less than that + // of the new insertion QueryResult::Finish } else { self.pos += 1; QueryResult::Next } + } else if e.insert { + // If the new op is an update of an existing list element, the first insertion op + // we encounter after the reference element indicates the end of the reference elem QueryResult::Finish + } else if self.greater_opid(e, m) { + // When updating an existing list element, we need to put the ops for the same list + // element into ascending order, so we skip over any ops whose ID is less than that + // of the new operation. Also keep track of any conflicts on this list element. if e.visible() { self.values.push(e.clone()); } @@ -168,6 +215,8 @@ impl TreeQuery for SeekOpWithPatch { } }; + // The patch needs to know the list index of each operation, so we count the number of + // visible list elements up to the insertion position of the new operation if result == QueryResult::Next { self.count_visible(e); } From ba177c3d83d19194821b06fa20e4eca9870e7529 Mon Sep 17 00:00:00 2001 From: Martin Kleppmann Date: Fri, 18 Mar 2022 15:20:53 +0000 Subject: [PATCH 209/730] Fix broken handling of conflicts on list elements --- automerge-wasm/test/test.ts | 29 +++++++++++++++++++++++ automerge/src/query/seek_op_with_patch.rs | 19 ++++++++------- 2 files changed, 39 insertions(+), 9 deletions(-) diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts index 93c55c93..6a7f7b7d 100644 --- a/automerge-wasm/test/test.ts +++ b/automerge-wasm/test/test.ts @@ -656,6 +656,35 @@ describe('Automerge', () => { ]) doc1.free(); doc2.free() }) + + it('should handle a conflict on a list element', () => { + let doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd') + doc1.set_object('_root', 'birds', ['Thrush', 'Magpie']) + let change1 = doc1.saveIncremental() + doc2.loadIncremental(change1) + doc3.loadIncremental(change1) + doc4.loadIncremental(change1) + doc1.set('1@aaaa', 0, 'Song Thrush') + doc2.set('1@aaaa', 0, 'Redwing') + let change2 = doc1.saveIncremental(), change3 = doc2.saveIncremental() + doc3.enablePatches(true) + doc4.enablePatches(true) + doc3.loadIncremental(change2); doc3.loadIncremental(change3) + doc4.loadIncremental(change3); doc4.loadIncremental(change2) + assert.deepEqual(doc3.value('1@aaaa', 0), ['str', 'Redwing']) + assert.deepEqual(doc3.values('1@aaaa', 0), [['str', 'Song Thrush', '4@aaaa'], ['str', 'Redwing', '4@bbbb']]) + assert.deepEqual(doc4.value('1@aaaa', 0), ['str', 'Redwing']) + assert.deepEqual(doc4.values('1@aaaa', 0), [['str', 'Song Thrush', '4@aaaa'], ['str', 'Redwing', '4@bbbb']]) + assert.deepEqual(doc3.popPatches(), [ + {action: 'assign', obj: '1@aaaa', key: 0, value: 'Song Thrush', datatype: 'str', conflict: false}, + {action: 'assign', obj: '1@aaaa', key: 0, value: 'Redwing', datatype: 'str', conflict: true} + ]) + assert.deepEqual(doc4.popPatches(), [ + {action: 'assign', obj: '1@aaaa', key: 0, value: 'Redwing', datatype: 'str', conflict: false}, + {action: 'assign', obj: '1@aaaa', key: 0, value: 'Redwing', datatype: 'str', conflict: true} + ]) + doc1.free(); doc2.free(); doc3.free(); doc4.free() + }) }) describe('sync', () => { diff --git a/automerge/src/query/seek_op_with_patch.rs b/automerge/src/query/seek_op_with_patch.rs index e3df3fb2..3b4ec516 100644 --- a/automerge/src/query/seek_op_with_patch.rs +++ b/automerge/src/query/seek_op_with_patch.rs @@ -180,7 +180,8 @@ impl TreeQuery for SeekOpWithPatch { } else { // Once we've found the reference element, keep track of any ops that we're overwriting - if self.op.overwrites(e) { + let overwritten = self.op.overwrites(e); + if overwritten { self.succ.push(self.pos); } @@ -201,16 +202,16 @@ impl TreeQuery for SeekOpWithPatch { // we encounter after the reference element indicates the end of the reference elem QueryResult::Finish - } else if self.greater_opid(e, m) { - // When updating an existing list element, we need to put the ops for the same list - // element into ascending order, so we skip over any ops whose ID is less than that - // of the new operation. Also keep track of any conflicts on this list element. - if e.visible() { + } else { + // When updating an existing list element, keep track of any conflicts on this list + // element. We now need to put the ops for the same list element into ascending + // order, so we skip over any ops whose ID is less than that of the new operation. + if !overwritten && e.visible() { self.values.push(e.clone()); } - QueryResult::Next - } else { - self.pos += 1; + if !self.greater_opid(e, m) { + self.pos += 1; + } QueryResult::Next } }; From 361db06eb589f7b876f77397f070b31782dbc6a1 Mon Sep 17 00:00:00 2001 From: Martin Kleppmann Date: Fri, 18 Mar 2022 15:21:31 +0000 Subject: [PATCH 210/730] Delete unnecessary code This check is not needed because the case `e == HEAD` can only happen if `self.op` is a list insertion operation, and an insertion operation always has empty `preds`, so it can never overwrite any existing list element. --- automerge/src/query/seek_op.rs | 3 --- automerge/src/query/seek_op_with_patch.rs | 3 --- 2 files changed, 6 deletions(-) diff --git a/automerge/src/query/seek_op.rs b/automerge/src/query/seek_op.rs index a8770f5e..00c68c98 100644 --- a/automerge/src/query/seek_op.rs +++ b/automerge/src/query/seek_op.rs @@ -48,9 +48,6 @@ impl TreeQuery for SeekOp { Key::Seq(e) if e == HEAD => { while self.pos < child.len() { let op = child.get(self.pos).unwrap(); - if self.op.overwrites(op) { - self.succ.push(self.pos); - } if op.insert && m.lamport_cmp(op.id, self.op.id) == Ordering::Less { break; } diff --git a/automerge/src/query/seek_op_with_patch.rs b/automerge/src/query/seek_op_with_patch.rs index 3b4ec516..5bf38df8 100644 --- a/automerge/src/query/seek_op_with_patch.rs +++ b/automerge/src/query/seek_op_with_patch.rs @@ -74,9 +74,6 @@ impl TreeQuery for SeekOpWithPatch { Key::Seq(e) if e == HEAD => { while self.pos < child.len() { let op = child.get(self.pos).unwrap(); - if self.op.overwrites(op) { - self.succ.push(self.pos); - } if op.insert && m.lamport_cmp(op.id, self.op.id) == Ordering::Less { break; } From 975338900c7295b3adba0ec37b18dad53b086076 Mon Sep 17 00:00:00 2001 From: Martin Kleppmann Date: Fri, 18 Mar 2022 15:22:50 +0000 Subject: [PATCH 211/730] Document another suspected bug Testing this is harder because I need to construct a tree in which list elements are split across multiple tree nodes, and the number of list elements required to trigger this condition depends on the branching factor of the tree, which I don't really want to hard-code into the tests in case we change it... --- automerge/src/query/seek_op_with_patch.rs | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/automerge/src/query/seek_op_with_patch.rs b/automerge/src/query/seek_op_with_patch.rs index 5bf38df8..f1abd762 100644 --- a/automerge/src/query/seek_op_with_patch.rs +++ b/automerge/src/query/seek_op_with_patch.rs @@ -98,13 +98,20 @@ impl TreeQuery for SeekOpWithPatch { // subtract one if the last visible element also appears in this tree node. let mut num_vis = child.index.len; if num_vis > 0 { - // num vis is the number of keys in the index - // minus one if we're counting last_seen - // let mut num_vis = s.keys().count(); + // FIXME: I think this is wrong: we should subtract one only if this + // subtree contains a *visible* (i.e. empty succs) operation for the list + // element with elemId `last_seen`; this will subtract one even if all + // values for this list element have been deleted in this subtree. if child.index.has(&self.last_seen) { num_vis -= 1; } self.seen += num_vis; + + // FIXME: this is also wrong: `last_seen` needs to be the elemId of the + // last *visible* list element in this subtree, but I think this returns + // the last operation's elemId regardless of whether it's visible or not. + // This will lead to incorrect counting if `last_seen` is not visible: it's + // not counted towards `num_vis`, so we shouldn't be subtracting 1. self.last_seen = child.last().elemid(); } QueryResult::Next From a9eddd88cccc1ae831f6de9fba2a7a9e19b2b020 Mon Sep 17 00:00:00 2001 From: Martin Kleppmann Date: Fri, 18 Mar 2022 16:47:13 +0000 Subject: [PATCH 212/730] Bugfix: resurrection of deleted list elements --- automerge-wasm/test/test.ts | 35 +++++++++++++++++++++++ automerge/src/automerge.rs | 17 ++++++----- automerge/src/query/seek_op_with_patch.rs | 20 ++++++++++--- automerge/src/types.rs | 4 +++ 4 files changed, 65 insertions(+), 11 deletions(-) diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts index 6a7f7b7d..32217693 100644 --- a/automerge-wasm/test/test.ts +++ b/automerge-wasm/test/test.ts @@ -685,6 +685,41 @@ describe('Automerge', () => { ]) doc1.free(); doc2.free(); doc3.free(); doc4.free() }) + + it('should handle a concurrent list element overwrite and delete', () => { + let doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd') + doc1.set_object('_root', 'birds', ['Parakeet', 'Magpie', 'Thrush']) + let change1 = doc1.saveIncremental() + doc2.loadIncremental(change1) + doc3.loadIncremental(change1) + doc4.loadIncremental(change1) + doc1.del('1@aaaa', 0) + doc1.set('1@aaaa', 1, 'Song Thrush') + doc2.set('1@aaaa', 0, 'Ring-necked parakeet') + doc2.set('1@aaaa', 2, 'Redwing') + let change2 = doc1.saveIncremental(), change3 = doc2.saveIncremental() + doc3.enablePatches(true) + doc4.enablePatches(true) + doc3.loadIncremental(change2); doc3.loadIncremental(change3) + doc4.loadIncremental(change3); doc4.loadIncremental(change2) + assert.deepEqual(doc3.values('1@aaaa', 0), [['str', 'Ring-necked parakeet', '5@bbbb']]) + assert.deepEqual(doc3.values('1@aaaa', 2), [['str', 'Song Thrush', '6@aaaa'], ['str', 'Redwing', '6@bbbb']]) + assert.deepEqual(doc4.values('1@aaaa', 0), [['str', 'Ring-necked parakeet', '5@bbbb']]) + assert.deepEqual(doc4.values('1@aaaa', 2), [['str', 'Song Thrush', '6@aaaa'], ['str', 'Redwing', '6@bbbb']]) + assert.deepEqual(doc3.popPatches(), [ + {action: 'delete', obj: '1@aaaa', key: 0}, + {action: 'assign', obj: '1@aaaa', key: 1, value: 'Song Thrush', datatype: 'str', conflict: false}, + {action: 'insert', obj: '1@aaaa', key: 0, value: 'Ring-necked parakeet', datatype: 'str'}, + {action: 'assign', obj: '1@aaaa', key: 2, value: 'Redwing', datatype: 'str', conflict: true} + ]) + assert.deepEqual(doc4.popPatches(), [ + {action: 'assign', obj: '1@aaaa', key: 0, value: 'Ring-necked parakeet', datatype: 'str', conflict: false}, + {action: 'assign', obj: '1@aaaa', key: 2, value: 'Redwing', datatype: 'str', conflict: false}, + {action: 'assign', obj: '1@aaaa', key: 0, value: 'Ring-necked parakeet', datatype: 'str', conflict: false}, + {action: 'assign', obj: '1@aaaa', key: 2, value: 'Redwing', datatype: 'str', conflict: true} + ]) + doc1.free(); doc2.free(); doc3.free(); doc4.free() + }) }) describe('sync', () => { diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 98253c17..f1d1f058 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -241,13 +241,16 @@ impl Automerge { &op }; let value = (winner.value(), self.id_to_exid(winner.id)); - let conflict = !q.values.is_empty(); - Patch::Assign(AssignPatch { - obj, - key, - value, - conflict, - }) + if op.is_list_op() && !q.had_value_before { + Patch::Insert(obj, q.seen, value) + } else { + Patch::Assign(AssignPatch { + obj, + key, + value, + conflict: !q.values.is_empty(), + }) + } }; if let Some(patches) = &mut self.patches { diff --git a/automerge/src/query/seek_op_with_patch.rs b/automerge/src/query/seek_op_with_patch.rs index f1abd762..ed9ed1df 100644 --- a/automerge/src/query/seek_op_with_patch.rs +++ b/automerge/src/query/seek_op_with_patch.rs @@ -13,6 +13,7 @@ pub(crate) struct SeekOpWithPatch { pub seen: usize, last_seen: Option, pub values: Vec, + pub had_value_before: bool, } impl SeekOpWithPatch { @@ -25,6 +26,7 @@ impl SeekOpWithPatch { seen: 0, last_seen: None, values: vec![], + had_value_before: false, } } @@ -178,6 +180,9 @@ impl TreeQuery for SeekOpWithPatch { if self.op.overwrites(e) { self.succ.push(self.pos); } + if e.visible() { + self.had_value_before = true; + } } self.pos += 1; QueryResult::Next @@ -208,11 +213,18 @@ impl TreeQuery for SeekOpWithPatch { } else { // When updating an existing list element, keep track of any conflicts on this list - // element. We now need to put the ops for the same list element into ascending - // order, so we skip over any ops whose ID is less than that of the new operation. - if !overwritten && e.visible() { - self.values.push(e.clone()); + // element. We also need to remember if the list element had any visible elements + // prior to applying the new operation: if not, the new operation is resurrecting + // a deleted list element, so it looks like an insertion in the patch. + if e.visible() { + self.had_value_before = true; + if !overwritten { + self.values.push(e.clone()); + } } + + // We now need to put the ops for the same list element into ascending order, so we + // skip over any ops whose ID is less than that of the new operation. if !self.greater_opid(e, m) { self.pos += 1; } diff --git a/automerge/src/types.rs b/automerge/src/types.rs index bfe646c3..1caea529 100644 --- a/automerge/src/types.rs +++ b/automerge/src/types.rs @@ -413,6 +413,10 @@ impl Op { matches!((&self.action, action), (OpType::Set(n), OpType::Set(m)) if n == m) } + pub fn is_list_op(&self) -> bool { + matches!(&self.key, Key::Seq(_)) + } + pub fn overwrites(&self, other: &Op) -> bool { self.pred.iter().any(|i| i == &other.id) } From ab4dc331ac8536c14b564599b1631be24661ea67 Mon Sep 17 00:00:00 2001 From: Martin Kleppmann Date: Fri, 18 Mar 2022 17:18:15 +0000 Subject: [PATCH 213/730] cargo fmt --- automerge-wasm/src/lib.rs | 1 - automerge/src/query/seek_op_with_patch.rs | 3 --- 2 files changed, 4 deletions(-) diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index d6f85c89..5f1c97d0 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -397,7 +397,6 @@ impl Automerge { js_set(&patch, "obj", obj.to_string())?; js_set(&patch, "key", key)?; } - } result.push(&patch); } diff --git a/automerge/src/query/seek_op_with_patch.rs b/automerge/src/query/seek_op_with_patch.rs index ed9ed1df..8eab1e8d 100644 --- a/automerge/src/query/seek_op_with_patch.rs +++ b/automerge/src/query/seek_op_with_patch.rs @@ -186,7 +186,6 @@ impl TreeQuery for SeekOpWithPatch { } self.pos += 1; QueryResult::Next - } else { // Once we've found the reference element, keep track of any ops that we're overwriting let overwritten = self.op.overwrites(e); @@ -205,12 +204,10 @@ impl TreeQuery for SeekOpWithPatch { self.pos += 1; QueryResult::Next } - } else if e.insert { // If the new op is an update of an existing list element, the first insertion op // we encounter after the reference element indicates the end of the reference elem QueryResult::Finish - } else { // When updating an existing list element, keep track of any conflicts on this list // element. We also need to remember if the list element had any visible elements From f83fb5ec61d9ec629ea1eecb24e63e9978c28947 Mon Sep 17 00:00:00 2001 From: Martin Kleppmann Date: Fri, 18 Mar 2022 22:00:23 +0000 Subject: [PATCH 214/730] More tests --- automerge-wasm/test/test.ts | 146 ++++++++++++++++++++++++++++++++++++ 1 file changed, 146 insertions(+) diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts index 32217693..8c1d87a0 100644 --- a/automerge-wasm/test/test.ts +++ b/automerge-wasm/test/test.ts @@ -489,6 +489,21 @@ describe('Automerge', () => { doc2.free() }) + it('should delete map keys', () => { + let doc1 = create('aaaa'), doc2 = create('bbbb') + doc1.set('_root', 'favouriteBird', 'Robin') + doc2.enablePatches(true) + doc2.loadIncremental(doc1.saveIncremental()) + doc1.del('_root', 'favouriteBird') + doc2.loadIncremental(doc1.saveIncremental()) + assert.deepEqual(doc2.popPatches(), [ + {action: 'assign', obj: '_root', key: 'favouriteBird', value: 'Robin', datatype: 'str', conflict: false}, + {action: 'delete', obj: '_root', key: 'favouriteBird'} + ]) + doc1.free() + doc2.free() + }) + it('should include list element insertion', () => { let doc1 = create('aaaa'), doc2 = create('bbbb') doc1.set_object('_root', 'birds', ['Goldfinch', 'Chaffinch']) @@ -570,6 +585,39 @@ describe('Automerge', () => { doc1.free(); doc2.free(); doc3.free(); doc4.free() }) + it('should handle concurrent insertions beyond the head', () => { + let doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd') + doc1.set_object('_root', 'values', ['a', 'b']) + let change1 = doc1.saveIncremental() + doc2.loadIncremental(change1) + doc3.loadIncremental(change1) + doc4.loadIncremental(change1) + doc1.insert('1@aaaa', 2, 'e') + doc1.insert('1@aaaa', 3, 'f') + doc2.insert('1@aaaa', 2, 'c') + doc2.insert('1@aaaa', 3, 'd') + let change2 = doc1.saveIncremental(), change3 = doc2.saveIncremental() + doc3.enablePatches(true) + doc4.enablePatches(true) + doc3.loadIncremental(change2); doc3.loadIncremental(change3) + doc4.loadIncremental(change3); doc4.loadIncremental(change2) + assert.deepEqual([0, 1, 2, 3, 4, 5].map(i => (doc3.value('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd', 'e', 'f']) + assert.deepEqual([0, 1, 2, 3, 4, 5].map(i => (doc4.value('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd', 'e', 'f']) + assert.deepEqual(doc3.popPatches(), [ + {action: 'insert', obj: '1@aaaa', key: 2, value: 'e', datatype: 'str'}, + {action: 'insert', obj: '1@aaaa', key: 3, value: 'f', datatype: 'str'}, + {action: 'insert', obj: '1@aaaa', key: 2, value: 'c', datatype: 'str'}, + {action: 'insert', obj: '1@aaaa', key: 3, value: 'd', datatype: 'str'} + ]) + assert.deepEqual(doc4.popPatches(), [ + {action: 'insert', obj: '1@aaaa', key: 2, value: 'c', datatype: 'str'}, + {action: 'insert', obj: '1@aaaa', key: 3, value: 'd', datatype: 'str'}, + {action: 'insert', obj: '1@aaaa', key: 4, value: 'e', datatype: 'str'}, + {action: 'insert', obj: '1@aaaa', key: 5, value: 'f', datatype: 'str'} + ]) + doc1.free(); doc2.free(); doc3.free(); doc4.free() + }) + it('should handle conflicts on root object keys', () => { let doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd') doc1.set('_root', 'bird', 'Greenfinch') @@ -633,6 +681,25 @@ describe('Automerge', () => { doc1.free(); doc2.free(); doc3.free() }) + it('should allow a conflict to be resolved', () => { + let doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc') + doc1.set('_root', 'bird', 'Greenfinch') + doc2.set('_root', 'bird', 'Chaffinch') + doc3.enablePatches(true) + let change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental() + doc1.loadIncremental(change2); doc3.loadIncremental(change1) + doc2.loadIncremental(change1); doc3.loadIncremental(change2) + doc1.set('_root', 'bird', 'Goldfinch') + doc3.loadIncremental(doc1.saveIncremental()) + assert.deepEqual(doc3.values('_root', 'bird'), [['str', 'Goldfinch', '2@aaaa']]) + assert.deepEqual(doc3.popPatches(), [ + {action: 'assign', obj: '_root', key: 'bird', value: 'Greenfinch', datatype: 'str', conflict: false}, + {action: 'assign', obj: '_root', key: 'bird', value: 'Chaffinch', datatype: 'str', conflict: true}, + {action: 'assign', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: false} + ]) + doc1.free(); doc2.free(); doc3.free() + }) + it('should handle a concurrent map key overwrite and delete', () => { let doc1 = create('aaaa'), doc2 = create('bbbb') doc1.set('_root', 'bird', 'Greenfinch') @@ -720,6 +787,85 @@ describe('Automerge', () => { ]) doc1.free(); doc2.free(); doc3.free(); doc4.free() }) + + it('should handle deletion of a conflict value', () => { + let doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc') + doc1.set('_root', 'bird', 'Robin') + doc2.set('_root', 'bird', 'Wren') + let change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental() + doc2.del('_root', 'bird') + let change3 = doc2.saveIncremental() + doc3.enablePatches(true) + doc3.loadIncremental(change1) + doc3.loadIncremental(change2) + assert.deepEqual(doc3.values('_root', 'bird'), [['str', 'Robin', '1@aaaa'], ['str', 'Wren', '1@bbbb']]) + assert.deepEqual(doc3.popPatches(), [ + {action: 'assign', obj: '_root', key: 'bird', value: 'Robin', datatype: 'str', conflict: false}, + {action: 'assign', obj: '_root', key: 'bird', value: 'Wren', datatype: 'str', conflict: true} + ]) + doc3.loadIncremental(change3) + assert.deepEqual(doc3.value('_root', 'bird'), ['str', 'Robin']) + assert.deepEqual(doc3.values('_root', 'bird'), [['str', 'Robin', '1@aaaa']]) + assert.deepEqual(doc3.popPatches(), [ + {action: 'assign', obj: '_root', key: 'bird', value: 'Robin', datatype: 'str', conflict: false} + ]) + doc1.free(); doc2.free(); doc3.free() + }) + + it('should handle conflicting nested objects', () => { + let doc1 = create('aaaa'), doc2 = create('bbbb') + doc1.set_object('_root', 'birds', ['Parakeet']) + doc2.set_object('_root', 'birds', {'Sparrowhawk': 1}) + let change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental() + doc1.enablePatches(true) + doc2.enablePatches(true) + doc1.loadIncremental(change2) + doc2.loadIncremental(change1) + assert.deepEqual(doc1.values('_root', 'birds'), [['list', '1@aaaa'], ['map', '1@bbbb']]) + assert.deepEqual(doc1.popPatches(), [ + {action: 'assign', obj: '_root', key: 'birds', value: '1@bbbb', datatype: 'map', conflict: true}, + {action: 'assign', obj: '1@bbbb', key: 'Sparrowhawk', value: 1, datatype: 'int', conflict: false} + ]) + assert.deepEqual(doc2.values('_root', 'birds'), [['list', '1@aaaa'], ['map', '1@bbbb']]) + assert.deepEqual(doc2.popPatches(), [ + {action: 'assign', obj: '_root', key: 'birds', value: '1@bbbb', datatype: 'map', conflict: true}, + {action: 'insert', obj: '1@aaaa', key: 0, value: 'Parakeet', datatype: 'str'} + ]) + doc1.free(); doc2.free() + }) + + it('should support date objects', () => { + // FIXME: either use Date objects or use numbers consistently + let doc1 = create('aaaa'), doc2 = create('bbbb'), now = new Date() + doc1.set('_root', 'createdAt', now.getTime(), 'timestamp') + doc2.enablePatches(true) + doc2.loadIncremental(doc1.saveIncremental()) + assert.deepEqual(doc2.value('_root', 'createdAt'), ['timestamp', now]) + assert.deepEqual(doc2.popPatches(), [ + {action: 'assign', obj: '_root', key: 'createdAt', value: now, datatype: 'timestamp', conflict: false} + ]) + doc1.free(); doc2.free() + }) + + it.skip('should support counters in a map', () => { + let doc1 = create('aaaa'), doc2 = create('bbbb') + doc2.enablePatches(true) + doc1.set('_root', 'starlings', 2, 'counter') + doc2.loadIncremental(doc1.saveIncremental()) + doc1.inc('_root', 'starlings', 1) + doc1.dump() + doc2.loadIncremental(doc1.saveIncremental()) + assert.deepEqual(doc2.value('_root', 'starlings'), ['counter', 3]) + assert.deepEqual(doc2.popPatches(), [ + {action: 'assign', obj: '_root', key: 'starlings', value: 2, datatype: 'counter', conflict: false}, + {action: 'assign', obj: '_root', key: 'starlings', value: 3, datatype: 'counter', conflict: false} + ]) + doc1.free(); doc2.free() + }) + + it('should support counters in a list') // TODO + + it('should delete a counter from a map') // TODO }) describe('sync', () => { From 2dcbfbf27d616dc171c5162783d03c8650506e4b Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Wed, 30 Mar 2022 13:28:52 -0400 Subject: [PATCH 215/730] clippy --- automerge/src/query/seek_op_with_patch.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automerge/src/query/seek_op_with_patch.rs b/automerge/src/query/seek_op_with_patch.rs index 8eab1e8d..179a80de 100644 --- a/automerge/src/query/seek_op_with_patch.rs +++ b/automerge/src/query/seek_op_with_patch.rs @@ -79,7 +79,7 @@ impl TreeQuery for SeekOpWithPatch { if op.insert && m.lamport_cmp(op.id, self.op.id) == Ordering::Less { break; } - self.count_visible(&op); + self.count_visible(op); self.pos += 1; } QueryResult::Finish From a69643c9cc4a03048d2f7506cc867d05abef7459 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Wed, 30 Mar 2022 15:24:36 +0100 Subject: [PATCH 216/730] Change set to update to avoid cloning and make it more efficient --- automerge/src/op_set.rs | 2 +- automerge/src/op_tree.rs | 49 ++++++++++++++++++++-------------------- 2 files changed, 25 insertions(+), 26 deletions(-) diff --git a/automerge/src/op_set.rs b/automerge/src/op_set.rs index ce7b165e..758febed 100644 --- a/automerge/src/op_set.rs +++ b/automerge/src/op_set.rs @@ -78,7 +78,7 @@ impl OpSetInternal { F: FnMut(&mut Op), { if let Some((_typ, tree)) = self.trees.get_mut(obj) { - tree.replace(index, f) + tree.update(index, f) } } diff --git a/automerge/src/op_tree.rs b/automerge/src/op_tree.rs index 55503b2a..34286450 100644 --- a/automerge/src/op_tree.rs +++ b/automerge/src/op_tree.rs @@ -123,15 +123,12 @@ impl OpTreeInternal { } // this replaces get_mut() because it allows the indexes to update correctly - pub fn replace(&mut self, index: usize, mut f: F) + pub fn update(&mut self, index: usize, f: F) where F: FnMut(&mut Op), { if self.len() > index { - let op = self.get(index).unwrap(); - let mut new_op = op.clone(); - f(&mut new_op); - self.set(index, new_op); + self.root_node.as_mut().unwrap().update(index, f); } } @@ -161,15 +158,6 @@ impl OpTreeInternal { panic!("remove from empty tree") } } - - /// Update the `element` at `index` in the sequence, returning the old value. - /// - /// # Panics - /// - /// Panics if `index > len` - pub fn set(&mut self, index: usize, element: Op) -> Op { - self.root_node.as_mut().unwrap().set(index, element) - } } impl OpTreeNode { @@ -519,31 +507,42 @@ impl OpTreeNode { assert!(self.is_full()); } - pub fn set(&mut self, index: usize, element: Op) -> Op { + /// Update the operation at the given index using the provided function. + /// + /// This handles updating the indices after the update. + pub fn update(&mut self, index: usize, f: F) -> (Op, &Op) + where + F: FnOnce(&mut Op), + { if self.is_leaf() { - let old_element = self.elements.get_mut(index).unwrap(); - self.index.replace(old_element, &element); - mem::replace(old_element, element) + let new_element = self.elements.get_mut(index).unwrap(); + let old_element = new_element.clone(); + f(new_element); + self.index.replace(&old_element, new_element); + (old_element, new_element) } else { let mut cumulative_len = 0; + let len = self.len(); for (child_index, child) in self.children.iter_mut().enumerate() { match (cumulative_len + child.len()).cmp(&index) { Ordering::Less => { cumulative_len += child.len() + 1; } Ordering::Equal => { - let old_element = self.elements.get_mut(child_index).unwrap(); - self.index.replace(old_element, &element); - return mem::replace(old_element, element); + let new_element = self.elements.get_mut(child_index).unwrap(); + let old_element = new_element.clone(); + f(new_element); + self.index.replace(&old_element, new_element); + return (old_element, new_element); } Ordering::Greater => { - let old_element = child.set(index - cumulative_len, element.clone()); - self.index.replace(&old_element, &element); - return old_element; + let (old_element, new_element) = child.update(index - cumulative_len, f); + self.index.replace(&old_element, new_element); + return (old_element, new_element); } } } - panic!("Invalid index to set: {} but len was {}", index, self.len()) + panic!("Invalid index to set: {} but len was {}", index, len) } } From 0e457d58916b405341b8769d4c7700fd51f048f3 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 31 Mar 2022 13:53:26 +0100 Subject: [PATCH 217/730] Remove const B: usize requirement everywhere This doesn't need to be generic on everything, just defined once as a const and referenced. --- automerge/src/automerge.rs | 2 +- automerge/src/keys.rs | 5 ++- automerge/src/keys_at.rs | 5 ++- automerge/src/op_set.rs | 29 ++++++++-------- automerge/src/op_tree.rs | 42 ++++++++++++----------- automerge/src/query.rs | 12 +++---- automerge/src/query/insert.rs | 4 +-- automerge/src/query/keys.rs | 12 +++---- automerge/src/query/keys_at.rs | 12 +++---- automerge/src/query/len.rs | 4 +-- automerge/src/query/len_at.rs | 2 +- automerge/src/query/list_vals.rs | 4 +-- automerge/src/query/list_vals_at.rs | 2 +- automerge/src/query/nth.rs | 4 +-- automerge/src/query/nth_at.rs | 2 +- automerge/src/query/opid.rs | 4 +-- automerge/src/query/prop.rs | 8 ++--- automerge/src/query/prop_at.rs | 8 ++--- automerge/src/query/seek_op.rs | 8 ++--- automerge/src/query/seek_op_with_patch.rs | 12 +++---- automerge/src/sequence_tree.rs | 18 +++++----- automerge/src/visualisation.rs | 42 +++++++++++------------ 22 files changed, 110 insertions(+), 131 deletions(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 47233be9..c20e58dc 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -1004,7 +1004,7 @@ mod tests { use pretty_assertions::assert_eq; use super::*; - use crate::op_set::B; + use crate::op_tree::B; use crate::transaction::Transactable; use crate::*; use std::convert::TryInto; diff --git a/automerge/src/keys.rs b/automerge/src/keys.rs index 89b20f90..109586c7 100644 --- a/automerge/src/keys.rs +++ b/automerge/src/keys.rs @@ -1,13 +1,12 @@ -use crate::op_set::B; use crate::{query, Automerge}; pub struct Keys<'a, 'k> { - keys: Option>, + keys: Option>, doc: &'a Automerge, } impl<'a, 'k> Keys<'a, 'k> { - pub(crate) fn new(doc: &'a Automerge, keys: Option>) -> Self { + pub(crate) fn new(doc: &'a Automerge, keys: Option>) -> Self { Self { keys, doc } } } diff --git a/automerge/src/keys_at.rs b/automerge/src/keys_at.rs index 42a2ec9c..0d0beb35 100644 --- a/automerge/src/keys_at.rs +++ b/automerge/src/keys_at.rs @@ -1,13 +1,12 @@ -use crate::op_set::B; use crate::{query, Automerge}; pub struct KeysAt<'a, 'k> { - keys: Option>, + keys: Option>, doc: &'a Automerge, } impl<'a, 'k> KeysAt<'a, 'k> { - pub(crate) fn new(doc: &'a Automerge, keys: Option>) -> Self { + pub(crate) fn new(doc: &'a Automerge, keys: Option>) -> Self { Self { keys, doc } } } diff --git a/automerge/src/op_set.rs b/automerge/src/op_set.rs index 758febed..b2e53eba 100644 --- a/automerge/src/op_set.rs +++ b/automerge/src/op_set.rs @@ -8,20 +8,19 @@ use fxhash::FxBuildHasher; use std::cmp::Ordering; use std::collections::HashMap; -pub(crate) const B: usize = 16; -pub(crate) type OpSet = OpSetInternal; +pub(crate) type OpSet = OpSetInternal; #[derive(Debug, Clone, PartialEq)] -pub(crate) struct OpSetInternal { +pub(crate) struct OpSetInternal { /// The map of objects to their type and ops. - trees: HashMap), FxBuildHasher>, + trees: HashMap, /// The number of operations in the opset. length: usize, /// Metadata about the operations in this opset. pub m: OpSetMetadata, } -impl OpSetInternal { +impl OpSetInternal { pub fn new() -> Self { let mut trees: HashMap<_, _, _> = Default::default(); trees.insert(ObjId::root(), (ObjType::Map, Default::default())); @@ -35,7 +34,7 @@ impl OpSetInternal { } } - pub fn iter(&self) -> Iter<'_, B> { + pub fn iter(&self) -> Iter<'_> { let mut objs: Vec<_> = self.trees.keys().collect(); objs.sort_by(|a, b| self.m.lamport_cmp(a.0, b.0)); Iter { @@ -46,7 +45,7 @@ impl OpSetInternal { } } - pub fn keys(&self, obj: ObjId) -> Option> { + pub fn keys(&self, obj: ObjId) -> Option { if let Some((_typ, tree)) = self.trees.get(&obj) { tree.keys() } else { @@ -54,7 +53,7 @@ impl OpSetInternal { } } - pub fn keys_at(&self, obj: ObjId, clock: Clock) -> Option> { + pub fn keys_at(&self, obj: ObjId, clock: Clock) -> Option { if let Some((_typ, tree)) = self.trees.get(&obj) { tree.keys_at(clock) } else { @@ -64,7 +63,7 @@ impl OpSetInternal { pub fn search(&self, obj: &ObjId, query: Q) -> Q where - Q: TreeQuery, + Q: TreeQuery, { if let Some((_typ, tree)) = self.trees.get(obj) { tree.search(query, &self.m) @@ -123,30 +122,30 @@ impl OpSetInternal { } } -impl Default for OpSetInternal { +impl Default for OpSetInternal { fn default() -> Self { Self::new() } } -impl<'a, const B: usize> IntoIterator for &'a OpSetInternal { +impl<'a> IntoIterator for &'a OpSetInternal { type Item = (&'a ObjId, &'a Op); - type IntoIter = Iter<'a, B>; + type IntoIter = Iter<'a>; fn into_iter(self) -> Self::IntoIter { self.iter() } } -pub(crate) struct Iter<'a, const B: usize> { - inner: &'a OpSetInternal, +pub(crate) struct Iter<'a> { + inner: &'a OpSetInternal, index: usize, objs: Vec<&'a ObjId>, sub_index: usize, } -impl<'a, const B: usize> Iterator for Iter<'a, B> { +impl<'a> Iterator for Iter<'a> { type Item = (&'a ObjId, &'a Op); fn next(&mut self) -> Option { diff --git a/automerge/src/op_tree.rs b/automerge/src/op_tree.rs index 34286450..6d4cfb10 100644 --- a/automerge/src/op_tree.rs +++ b/automerge/src/op_tree.rs @@ -12,23 +12,25 @@ use crate::{ }; use std::collections::HashSet; +pub(crate) const B: usize = 16; + #[allow(dead_code)] -pub(crate) type OpTree = OpTreeInternal<16>; +pub(crate) type OpTree = OpTreeInternal; #[derive(Clone, Debug)] -pub(crate) struct OpTreeInternal { - pub(crate) root_node: Option>, +pub(crate) struct OpTreeInternal { + pub(crate) root_node: Option, } #[derive(Clone, Debug)] -pub(crate) struct OpTreeNode { +pub(crate) struct OpTreeNode { pub(crate) elements: Vec, - pub(crate) children: Vec>, + pub(crate) children: Vec, pub index: Index, length: usize, } -impl OpTreeInternal { +impl OpTreeInternal { /// Construct a new, empty, sequence. pub fn new() -> Self { Self { root_node: None } @@ -39,11 +41,11 @@ impl OpTreeInternal { self.root_node.as_ref().map_or(0, |n| n.len()) } - pub fn keys(&self) -> Option> { + pub fn keys(&self) -> Option { self.root_node.as_ref().map(query::Keys::new) } - pub fn keys_at(&self, clock: Clock) -> Option> { + pub fn keys_at(&self, clock: Clock) -> Option { self.root_node .as_ref() .map(|root| query::KeysAt::new(root, clock)) @@ -51,7 +53,7 @@ impl OpTreeInternal { pub fn search(&self, mut query: Q, m: &OpSetMetadata) -> Q where - Q: TreeQuery, + Q: TreeQuery, { self.root_node .as_ref() @@ -63,7 +65,7 @@ impl OpTreeInternal { } /// Create an iterator through the sequence. - pub fn iter(&self) -> Iter<'_, B> { + pub fn iter(&self) -> Iter { Iter { inner: self, index: 0, @@ -160,7 +162,7 @@ impl OpTreeInternal { } } -impl OpTreeNode { +impl OpTreeNode { fn new() -> Self { Self { elements: Vec::new(), @@ -172,7 +174,7 @@ impl OpTreeNode { pub fn search(&self, query: &mut Q, m: &OpSetMetadata) -> bool where - Q: TreeQuery, + Q: TreeQuery, { if self.is_leaf() { for e in &self.elements { @@ -497,7 +499,7 @@ impl OpTreeNode { } } - fn merge(&mut self, middle: Op, successor_sibling: OpTreeNode) { + fn merge(&mut self, middle: Op, successor_sibling: OpTreeNode) { self.index.insert(&middle); self.index.merge(&successor_sibling.index); self.elements.push(middle); @@ -577,22 +579,22 @@ impl OpTreeNode { } } -impl Default for OpTreeInternal { +impl Default for OpTreeInternal { fn default() -> Self { Self::new() } } -impl PartialEq for OpTreeInternal { +impl PartialEq for OpTreeInternal { fn eq(&self, other: &Self) -> bool { self.len() == other.len() && self.iter().zip(other.iter()).all(|(a, b)| a == b) } } -impl<'a, const B: usize> IntoIterator for &'a OpTreeInternal { +impl<'a> IntoIterator for &'a OpTreeInternal { type Item = &'a Op; - type IntoIter = Iter<'a, B>; + type IntoIter = Iter<'a>; fn into_iter(self) -> Self::IntoIter { Iter { @@ -602,12 +604,12 @@ impl<'a, const B: usize> IntoIterator for &'a OpTreeInternal { } } -pub(crate) struct Iter<'a, const B: usize> { - inner: &'a OpTreeInternal, +pub(crate) struct Iter<'a> { + inner: &'a OpTreeInternal, index: usize, } -impl<'a, const B: usize> Iterator for Iter<'a, B> { +impl<'a> Iterator for Iter<'a> { type Item = &'a Op; fn next(&mut self) -> Option { diff --git a/automerge/src/query.rs b/automerge/src/query.rs index 4c3895b6..ca02bce0 100644 --- a/automerge/src/query.rs +++ b/automerge/src/query.rs @@ -43,17 +43,13 @@ pub(crate) struct CounterData { op: Op, } -pub(crate) trait TreeQuery { +pub(crate) trait TreeQuery { #[inline(always)] - fn query_node_with_metadata( - &mut self, - child: &OpTreeNode, - _m: &OpSetMetadata, - ) -> QueryResult { + fn query_node_with_metadata(&mut self, child: &OpTreeNode, _m: &OpSetMetadata) -> QueryResult { self.query_node(child) } - fn query_node(&mut self, _child: &OpTreeNode) -> QueryResult { + fn query_node(&mut self, _child: &OpTreeNode) -> QueryResult { QueryResult::Descend } @@ -232,7 +228,7 @@ impl VisWindow { } } -pub(crate) fn binary_search_by(node: &OpTreeNode, f: F) -> usize +pub(crate) fn binary_search_by(node: &OpTreeNode, f: F) -> usize where F: Fn(&Op) -> Ordering, { diff --git a/automerge/src/query/insert.rs b/automerge/src/query/insert.rs index eb855ce9..6f82061a 100644 --- a/automerge/src/query/insert.rs +++ b/automerge/src/query/insert.rs @@ -61,8 +61,8 @@ impl InsertNth { } } -impl TreeQuery for InsertNth { - fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { +impl TreeQuery for InsertNth { + fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { // if this node has some visible elements then we may find our target within let mut num_vis = child.index.visible_len(); if child.index.has_visible(&self.last_seen) { diff --git a/automerge/src/query/keys.rs b/automerge/src/query/keys.rs index f780effa..f9e25727 100644 --- a/automerge/src/query/keys.rs +++ b/automerge/src/query/keys.rs @@ -3,16 +3,16 @@ use crate::types::Key; use std::fmt::Debug; #[derive(Debug)] -pub(crate) struct Keys<'a, const B: usize> { +pub(crate) struct Keys<'a> { index: usize, last_key: Option, index_back: usize, last_key_back: Option, - root_child: &'a OpTreeNode, + root_child: &'a OpTreeNode, } -impl<'a, const B: usize> Keys<'a, B> { - pub(crate) fn new(root_child: &'a OpTreeNode) -> Self { +impl<'a> Keys<'a> { + pub(crate) fn new(root_child: &'a OpTreeNode) -> Self { Self { index: 0, last_key: None, @@ -23,7 +23,7 @@ impl<'a, const B: usize> Keys<'a, B> { } } -impl<'a, const B: usize> Iterator for Keys<'a, B> { +impl<'a> Iterator for Keys<'a> { type Item = Key; fn next(&mut self) -> Option { @@ -39,7 +39,7 @@ impl<'a, const B: usize> Iterator for Keys<'a, B> { } } -impl<'a, const B: usize> DoubleEndedIterator for Keys<'a, B> { +impl<'a> DoubleEndedIterator for Keys<'a> { fn next_back(&mut self) -> Option { for i in (self.index..self.index_back).rev() { let op = self.root_child.get(i)?; diff --git a/automerge/src/query/keys_at.rs b/automerge/src/query/keys_at.rs index b5262ed6..2fe04747 100644 --- a/automerge/src/query/keys_at.rs +++ b/automerge/src/query/keys_at.rs @@ -4,18 +4,18 @@ use crate::types::{Clock, Key}; use std::fmt::Debug; #[derive(Debug)] -pub(crate) struct KeysAt<'a, const B: usize> { +pub(crate) struct KeysAt<'a> { clock: Clock, window: VisWindow, index: usize, last_key: Option, index_back: usize, last_key_back: Option, - root_child: &'a OpTreeNode, + root_child: &'a OpTreeNode, } -impl<'a, const B: usize> KeysAt<'a, B> { - pub(crate) fn new(root_child: &'a OpTreeNode, clock: Clock) -> Self { +impl<'a> KeysAt<'a> { + pub(crate) fn new(root_child: &'a OpTreeNode, clock: Clock) -> Self { Self { clock, window: VisWindow::default(), @@ -28,7 +28,7 @@ impl<'a, const B: usize> KeysAt<'a, B> { } } -impl<'a, const B: usize> Iterator for KeysAt<'a, B> { +impl<'a> Iterator for KeysAt<'a> { type Item = Key; fn next(&mut self) -> Option { @@ -45,7 +45,7 @@ impl<'a, const B: usize> Iterator for KeysAt<'a, B> { } } -impl<'a, const B: usize> DoubleEndedIterator for KeysAt<'a, B> { +impl<'a> DoubleEndedIterator for KeysAt<'a> { fn next_back(&mut self) -> Option { for i in self.index..self.index_back { let op = self.root_child.get(i)?; diff --git a/automerge/src/query/len.rs b/automerge/src/query/len.rs index ab745f75..6fd35b5f 100644 --- a/automerge/src/query/len.rs +++ b/automerge/src/query/len.rs @@ -13,8 +13,8 @@ impl Len { } } -impl TreeQuery for Len { - fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { +impl TreeQuery for Len { + fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { self.len = child.index.visible_len(); QueryResult::Finish } diff --git a/automerge/src/query/len_at.rs b/automerge/src/query/len_at.rs index 2f277f3e..2f183517 100644 --- a/automerge/src/query/len_at.rs +++ b/automerge/src/query/len_at.rs @@ -23,7 +23,7 @@ impl LenAt { } } -impl TreeQuery for LenAt { +impl TreeQuery for LenAt { fn query_element(&mut self, op: &Op) -> QueryResult { if op.insert { self.last = None; diff --git a/automerge/src/query/list_vals.rs b/automerge/src/query/list_vals.rs index 6e433a77..37d1a7cc 100644 --- a/automerge/src/query/list_vals.rs +++ b/automerge/src/query/list_vals.rs @@ -18,8 +18,8 @@ impl ListVals { } } -impl TreeQuery for ListVals { - fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { +impl TreeQuery for ListVals { + fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { let start = 0; for pos in start..child.len() { let op = child.get(pos).unwrap(); diff --git a/automerge/src/query/list_vals_at.rs b/automerge/src/query/list_vals_at.rs index c447f314..97dc4802 100644 --- a/automerge/src/query/list_vals_at.rs +++ b/automerge/src/query/list_vals_at.rs @@ -23,7 +23,7 @@ impl ListValsAt { } } -impl TreeQuery for ListValsAt { +impl TreeQuery for ListValsAt { fn query_element_with_metadata(&mut self, op: &Op, m: &OpSetMetadata) -> QueryResult { if op.insert { self.last_elem = None; diff --git a/automerge/src/query/nth.rs b/automerge/src/query/nth.rs index f8c136f6..8ee2f5fa 100644 --- a/automerge/src/query/nth.rs +++ b/automerge/src/query/nth.rs @@ -39,8 +39,8 @@ impl Nth { } } -impl TreeQuery for Nth { - fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { +impl TreeQuery for Nth { + fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { let mut num_vis = child.index.visible_len(); if child.index.has_visible(&self.last_seen) { num_vis -= 1; diff --git a/automerge/src/query/nth_at.rs b/automerge/src/query/nth_at.rs index 39f29a47..cfa36632 100644 --- a/automerge/src/query/nth_at.rs +++ b/automerge/src/query/nth_at.rs @@ -29,7 +29,7 @@ impl NthAt { } } -impl TreeQuery for NthAt { +impl TreeQuery for NthAt { fn query_element(&mut self, element: &Op) -> QueryResult { if element.insert { if self.seen > self.target { diff --git a/automerge/src/query/opid.rs b/automerge/src/query/opid.rs index 2a68ad1c..b00c6420 100644 --- a/automerge/src/query/opid.rs +++ b/automerge/src/query/opid.rs @@ -30,8 +30,8 @@ impl OpIdSearch { } } -impl TreeQuery for OpIdSearch { - fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { +impl TreeQuery for OpIdSearch { + fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { if child.index.ops.contains(&self.target) { QueryResult::Descend } else { diff --git a/automerge/src/query/prop.rs b/automerge/src/query/prop.rs index 6c97cfcd..42131e1b 100644 --- a/automerge/src/query/prop.rs +++ b/automerge/src/query/prop.rs @@ -22,12 +22,8 @@ impl Prop { } } -impl TreeQuery for Prop { - fn query_node_with_metadata( - &mut self, - child: &OpTreeNode, - m: &OpSetMetadata, - ) -> QueryResult { +impl TreeQuery for Prop { + fn query_node_with_metadata(&mut self, child: &OpTreeNode, m: &OpSetMetadata) -> QueryResult { let start = binary_search_by(child, |op| m.key_cmp(&op.key, &self.key)); self.pos = start; for pos in start..child.len() { diff --git a/automerge/src/query/prop_at.rs b/automerge/src/query/prop_at.rs index 11cbf752..3d9f1a3c 100644 --- a/automerge/src/query/prop_at.rs +++ b/automerge/src/query/prop_at.rs @@ -24,12 +24,8 @@ impl PropAt { } } -impl TreeQuery for PropAt { - fn query_node_with_metadata( - &mut self, - child: &OpTreeNode, - m: &OpSetMetadata, - ) -> QueryResult { +impl TreeQuery for PropAt { + fn query_node_with_metadata(&mut self, child: &OpTreeNode, m: &OpSetMetadata) -> QueryResult { let start = binary_search_by(child, |op| m.key_cmp(&op.key, &self.key)); let mut window: VisWindow = Default::default(); self.pos = start; diff --git a/automerge/src/query/seek_op.rs b/automerge/src/query/seek_op.rs index 59c26a10..dda35b4a 100644 --- a/automerge/src/query/seek_op.rs +++ b/automerge/src/query/seek_op.rs @@ -39,12 +39,8 @@ impl SeekOp { } } -impl TreeQuery for SeekOp { - fn query_node_with_metadata( - &mut self, - child: &OpTreeNode, - m: &OpSetMetadata, - ) -> QueryResult { +impl TreeQuery for SeekOp { + fn query_node_with_metadata(&mut self, child: &OpTreeNode, m: &OpSetMetadata) -> QueryResult { if self.found { return QueryResult::Descend; } diff --git a/automerge/src/query/seek_op_with_patch.rs b/automerge/src/query/seek_op_with_patch.rs index 107b76d8..e713e77e 100644 --- a/automerge/src/query/seek_op_with_patch.rs +++ b/automerge/src/query/seek_op_with_patch.rs @@ -5,7 +5,7 @@ use std::cmp::Ordering; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] -pub(crate) struct SeekOpWithPatch { +pub(crate) struct SeekOpWithPatch { op: Op, pub pos: usize, pub succ: Vec, @@ -16,7 +16,7 @@ pub(crate) struct SeekOpWithPatch { pub had_value_before: bool, } -impl SeekOpWithPatch { +impl SeekOpWithPatch { pub fn new(op: &Op) -> Self { SeekOpWithPatch { op: op.clone(), @@ -60,12 +60,8 @@ impl SeekOpWithPatch { } } -impl TreeQuery for SeekOpWithPatch { - fn query_node_with_metadata( - &mut self, - child: &OpTreeNode, - m: &OpSetMetadata, - ) -> QueryResult { +impl TreeQuery for SeekOpWithPatch { + fn query_node_with_metadata(&mut self, child: &OpTreeNode, m: &OpSetMetadata) -> QueryResult { if self.found { return QueryResult::Descend; } diff --git a/automerge/src/sequence_tree.rs b/automerge/src/sequence_tree.rs index 3031e391..ba5c7ff6 100644 --- a/automerge/src/sequence_tree.rs +++ b/automerge/src/sequence_tree.rs @@ -7,18 +7,18 @@ use std::{ pub type SequenceTree = SequenceTreeInternal; #[derive(Clone, Debug)] -pub struct SequenceTreeInternal { +pub struct SequenceTreeInternal { root_node: Option>, } #[derive(Clone, Debug, PartialEq)] -struct SequenceTreeNode { +struct SequenceTreeNode { elements: Vec, children: Vec>, length: usize, } -impl SequenceTreeInternal +impl SequenceTreeInternal where T: Clone + Debug, { @@ -145,7 +145,7 @@ where } } -impl SequenceTreeNode +impl SequenceTreeNode where T: Clone + Debug, { @@ -496,7 +496,7 @@ where } } -impl Default for SequenceTreeInternal +impl Default for SequenceTreeInternal where T: Clone + Debug, { @@ -505,7 +505,7 @@ where } } -impl PartialEq for SequenceTreeInternal +impl PartialEq for SequenceTreeInternal where T: Clone + Debug + PartialEq, { @@ -514,7 +514,7 @@ where } } -impl<'a, T, const B: usize> IntoIterator for &'a SequenceTreeInternal +impl<'a, T> IntoIterator for &'a SequenceTreeInternal where T: Clone + Debug, { @@ -530,12 +530,12 @@ where } } -pub struct Iter<'a, T, const B: usize> { +pub struct Iter<'a, T> { inner: &'a SequenceTreeInternal, index: usize, } -impl<'a, T, const B: usize> Iterator for Iter<'a, T, B> +impl<'a, T> Iterator for Iter<'a, T, B> where T: Clone + Debug, { diff --git a/automerge/src/visualisation.rs b/automerge/src/visualisation.rs index 6f6a36b0..cf283eb0 100644 --- a/automerge/src/visualisation.rs +++ b/automerge/src/visualisation.rs @@ -16,17 +16,17 @@ impl Default for NodeId { } #[derive(Clone)] -pub(crate) struct Node<'a, const B: usize> { +pub(crate) struct Node<'a> { id: NodeId, children: Vec, - node_type: NodeType<'a, B>, + node_type: NodeType<'a>, metadata: &'a crate::op_set::OpSetMetadata, } #[derive(Clone)] -pub(crate) enum NodeType<'a, const B: usize> { +pub(crate) enum NodeType<'a> { ObjRoot(crate::types::ObjId), - ObjTreeNode(ObjId, &'a crate::op_tree::OpTreeNode), + ObjTreeNode(ObjId, &'a crate::op_tree::OpTreeNode), } #[derive(Clone)] @@ -35,20 +35,20 @@ pub(crate) struct Edge { child_id: NodeId, } -pub(crate) struct GraphVisualisation<'a, const B: usize> { - nodes: HashMap>, +pub(crate) struct GraphVisualisation<'a> { + nodes: HashMap>, actor_shorthands: HashMap, } -impl<'a, const B: usize> GraphVisualisation<'a, B> { +impl<'a> GraphVisualisation<'a> { pub(super) fn construct( trees: &'a HashMap< crate::types::ObjId, - (crate::types::ObjType, crate::op_tree::OpTreeInternal), + (crate::types::ObjType, crate::op_tree::OpTreeInternal), BuildHasherDefault, >, metadata: &'a crate::op_set::OpSetMetadata, - ) -> GraphVisualisation<'a, B> { + ) -> GraphVisualisation<'a> { let mut nodes = HashMap::new(); for (obj_id, (_, tree)) in trees { if let Some(root_node) = &tree.root_node { @@ -76,9 +76,9 @@ impl<'a, const B: usize> GraphVisualisation<'a, B> { } fn construct_nodes( - node: &'a crate::op_tree::OpTreeNode, + node: &'a crate::op_tree::OpTreeNode, objid: &ObjId, - nodes: &mut HashMap>, + nodes: &mut HashMap>, m: &'a crate::op_set::OpSetMetadata, ) -> NodeId { let node_id = NodeId::default(); @@ -100,8 +100,8 @@ impl<'a, const B: usize> GraphVisualisation<'a, B> { } } -impl<'a, const B: usize> dot::GraphWalk<'a, &'a Node<'a, B>, Edge> for GraphVisualisation<'a, B> { - fn nodes(&'a self) -> dot::Nodes<'a, &'a Node<'a, B>> { +impl<'a> dot::GraphWalk<'a, &'a Node<'a>, Edge> for GraphVisualisation<'a> { + fn nodes(&'a self) -> dot::Nodes<'a, &'a Node<'a>> { Cow::Owned(self.nodes.values().collect::>()) } @@ -118,25 +118,25 @@ impl<'a, const B: usize> dot::GraphWalk<'a, &'a Node<'a, B>, Edge> for GraphVisu Cow::Owned(edges) } - fn source(&'a self, edge: &Edge) -> &'a Node<'a, B> { + fn source(&'a self, edge: &Edge) -> &'a Node<'a> { self.nodes.get(&edge.parent_id).unwrap() } - fn target(&'a self, edge: &Edge) -> &'a Node<'a, B> { + fn target(&'a self, edge: &Edge) -> &'a Node<'a> { self.nodes.get(&edge.child_id).unwrap() } } -impl<'a, const B: usize> dot::Labeller<'a, &'a Node<'a, B>, Edge> for GraphVisualisation<'a, B> { +impl<'a> dot::Labeller<'a, &'a Node<'a>, Edge> for GraphVisualisation<'a> { fn graph_id(&'a self) -> dot::Id<'a> { dot::Id::new("OpSet").unwrap() } - fn node_id(&'a self, n: &&Node<'a, B>) -> dot::Id<'a> { + fn node_id(&'a self, n: &&Node<'a>) -> dot::Id<'a> { dot::Id::new(format!("node_{}", n.id.0)).unwrap() } - fn node_shape(&'a self, node: &&'a Node<'a, B>) -> Option> { + fn node_shape(&'a self, node: &&'a Node<'a>) -> Option> { let shape = match node.node_type { NodeType::ObjTreeNode(_, _) => dot::LabelText::label("none"), NodeType::ObjRoot(_) => dot::LabelText::label("ellipse"), @@ -144,7 +144,7 @@ impl<'a, const B: usize> dot::Labeller<'a, &'a Node<'a, B>, Edge> for GraphVisua Some(shape) } - fn node_label(&'a self, n: &&Node<'a, B>) -> dot::LabelText<'a> { + fn node_label(&'a self, n: &&Node<'a>) -> dot::LabelText<'a> { match n.node_type { NodeType::ObjTreeNode(objid, tree_node) => dot::LabelText::HtmlStr( OpTable::create(tree_node, &objid, n.metadata, &self.actor_shorthands) @@ -163,8 +163,8 @@ struct OpTable { } impl OpTable { - fn create<'a, const B: usize>( - node: &'a crate::op_tree::OpTreeNode, + fn create<'a>( + node: &'a crate::op_tree::OpTreeNode, obj: &ObjId, metadata: &crate::op_set::OpSetMetadata, actor_shorthands: &HashMap, From 0c16dfe2aad1b7cd5ab51a86327f18ca7911f357 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 1 Apr 2022 11:46:43 +0100 Subject: [PATCH 218/730] Change js function names to camelCase --- automerge-wasm/index.d.ts | 6 +++--- automerge-wasm/src/lib.rs | 3 +++ 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts index 5da27ae9..5b833dda 100644 --- a/automerge-wasm/index.d.ts +++ b/automerge-wasm/index.d.ts @@ -93,11 +93,11 @@ export function decodeSyncState(data: Uint8Array): SyncState; export class Automerge { // change state set(obj: ObjID, prop: Prop, value: Value, datatype?: Datatype): undefined; - set_object(obj: ObjID, prop: Prop, value: ObjType): ObjID; + setObject(obj: ObjID, prop: Prop, value: ObjType): ObjID; insert(obj: ObjID, index: number, value: Value, datatype?: Datatype): undefined; - insert_object(obj: ObjID, index: number, value: ObjType): ObjID; + insertObject(obj: ObjID, index: number, value: ObjType): ObjID; push(obj: ObjID, value: Value, datatype?: Datatype): undefined; - push_object(obj: ObjID, value: ObjType): ObjID; + pushObject(obj: ObjID, value: ObjType): ObjID; splice(obj: ObjID, start: number, delete_count: number, text?: string | Array): ObjID[] | undefined; inc(obj: ObjID, prop: Prop, value: number): void; del(obj: ObjID, prop: Prop): void; diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index b8e9acf0..39d7f035 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -160,6 +160,7 @@ impl Automerge { Ok(()) } + #[wasm_bindgen(js_name = pushObject)] pub fn push_object(&mut self, obj: JsValue, value: JsValue) -> Result, JsValue> { let obj = self.import(obj)?; let (value, subvals) = @@ -186,6 +187,7 @@ impl Automerge { Ok(()) } + #[wasm_bindgen(js_name = insertObject)] pub fn insert_object( &mut self, obj: JsValue, @@ -217,6 +219,7 @@ impl Automerge { Ok(()) } + #[wasm_bindgen(js_name = setObject)] pub fn set_object( &mut self, obj: JsValue, From 3631ddfd55a0a88137093029a48f2ecc621b98cf Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 1 Apr 2022 11:48:04 +0100 Subject: [PATCH 219/730] Fix js side --- automerge-js/src/proxies.js | 24 ++++++------ automerge-wasm/test/test.ts | 76 ++++++++++++++++++------------------- 2 files changed, 50 insertions(+), 50 deletions(-) diff --git a/automerge-js/src/proxies.js b/automerge-js/src/proxies.js index f9e27855..1d337a06 100644 --- a/automerge-js/src/proxies.js +++ b/automerge-js/src/proxies.js @@ -134,21 +134,21 @@ const MapHandler = { } switch (datatype) { case "list": - const list = context.set_object(objectId, key, []) + const list = context.setObject(objectId, key, []) const proxyList = listProxy(context, list, [ ... path, key ], readonly ); for (let i = 0; i < value.length; i++) { proxyList[i] = value[i] } break; case "text": - const text = context.set_object(objectId, key, "", "text") + const text = context.setObject(objectId, key, "", "text") const proxyText = textProxy(context, text, [ ... path, key ], readonly ); for (let i = 0; i < value.length; i++) { proxyText[i] = value.get(i) } break; case "map": - const map = context.set_object(objectId, key, {}) + const map = context.setObject(objectId, key, {}) const proxyMap = mapProxy(context, map, [ ... path, key ], readonly ); for (const key in value) { proxyMap[key] = value[key] @@ -251,9 +251,9 @@ const ListHandler = { case "list": let list if (index >= context.length(objectId)) { - list = context.insert_object(objectId, index, []) + list = context.insertObject(objectId, index, []) } else { - list = context.set_object(objectId, index, []) + list = context.setObject(objectId, index, []) } const proxyList = listProxy(context, list, [ ... path, index ], readonly); proxyList.splice(0,0,...value) @@ -261,9 +261,9 @@ const ListHandler = { case "text": let text if (index >= context.length(objectId)) { - text = context.insert_object(objectId, index, "", "text") + text = context.insertObject(objectId, index, "", "text") } else { - text = context.set_object(objectId, index, "", "text") + text = context.setObject(objectId, index, "", "text") } const proxyText = textProxy(context, text, [ ... path, index ], readonly); proxyText.splice(0,0,...value) @@ -271,9 +271,9 @@ const ListHandler = { case "map": let map if (index >= context.length(objectId)) { - map = context.insert_object(objectId, index, {}) + map = context.insertObject(objectId, index, {}) } else { - map = context.set_object(objectId, index, {}) + map = context.setObject(objectId, index, {}) } const proxyMap = mapProxy(context, map, [ ... path, index ], readonly); for (const key in value) { @@ -478,17 +478,17 @@ function listMethods(target) { for (let [value,datatype] of values) { switch (datatype) { case "list": - const list = context.insert_object(objectId, index, []) + const list = context.insertObject(objectId, index, []) const proxyList = listProxy(context, list, [ ... path, index ], readonly); proxyList.splice(0,0,...value) break; case "text": - const text = context.insert_object(objectId, index, "", "text") + const text = context.insertObject(objectId, index, "", "text") const proxyText = textProxy(context, text, [ ... path, index ], readonly); proxyText.splice(0,0,...value) break; case "map": - const map = context.insert_object(objectId, index, {}) + const map = context.insertObject(objectId, index, {}) const proxyMap = mapProxy(context, map, [ ... path, index ], readonly); for (const key in value) { proxyMap[key] = value[key] diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts index 73fb988e..81e7842e 100644 --- a/automerge-wasm/test/test.ts +++ b/automerge-wasm/test/test.ts @@ -64,7 +64,7 @@ describe('Automerge', () => { doc.set(root, "bool", true) doc.set(root, "time1", 1000, "timestamp") doc.set(root, "time2", new Date(1001)) - doc.set_object(root, "list", []); + doc.setObject(root, "list", []); doc.set(root, "null", null) result = doc.value(root,"hello") @@ -124,7 +124,7 @@ describe('Automerge', () => { let root = "_root" let result - let submap = doc.set_object(root, "submap", {}) + let submap = doc.setObject(root, "submap", {}) doc.set(submap, "number", 6, "uint") assert.strictEqual(doc.pendingOps(),2) @@ -140,7 +140,7 @@ describe('Automerge', () => { let doc = create() let root = "_root" - let submap = doc.set_object(root, "numbers", []) + let submap = doc.setObject(root, "numbers", []) doc.insert(submap, 0, "a"); doc.insert(submap, 1, "b"); doc.insert(submap, 2, "c"); @@ -163,7 +163,7 @@ describe('Automerge', () => { let doc = create() let root = "_root" - let submap = doc.set_object(root, "letters", []) + let submap = doc.setObject(root, "letters", []) doc.insert(submap, 0, "a"); doc.insert(submap, 0, "b"); assert.deepEqual(doc.materialize(), { letters: ["b", "a" ] }) @@ -228,7 +228,7 @@ describe('Automerge', () => { let doc = create() let root = "_root"; - let text = doc.set_object(root, "text", ""); + let text = doc.setObject(root, "text", ""); if (!text) throw new Error('should not be undefined') doc.splice(text, 0, 0, "hello ") doc.splice(text, 6, 0, ["w","o","r","l","d"]) @@ -244,8 +244,8 @@ describe('Automerge', () => { it('should be able to insert objects into text', () => { let doc = create() - let text = doc.set_object("/", "text", "Hello world"); - let obj = doc.insert_object(text, 6, { hello: "world" }); + let text = doc.setObject("/", "text", "Hello world"); + let obj = doc.insertObject(text, 6, { hello: "world" }); assert.deepEqual(doc.text(text), "Hello \ufffcworld"); assert.deepEqual(doc.value(text, 6), ["map", obj]); assert.deepEqual(doc.value(obj, "hello"), ["str", "world"]); @@ -289,7 +289,7 @@ describe('Automerge', () => { it('should be able to splice text', () => { let doc = create() - let text = doc.set_object("_root", "text", ""); + let text = doc.setObject("_root", "text", ""); doc.splice(text, 0, 0, "hello world"); let heads1 = doc.commit(); doc.splice(text, 6, 0, "big bad "); @@ -337,7 +337,7 @@ describe('Automerge', () => { it('local inc increments all visible counters in a sequence', () => { let doc1 = create("aaaa") - let seq = doc1.set_object("_root", "seq", []) + let seq = doc1.setObject("_root", "seq", []) doc1.insert(seq, 0, "hello") let doc2 = loadDoc(doc1.save(), "bbbb"); let doc3 = loadDoc(doc1.save(), "cccc"); @@ -370,10 +370,10 @@ describe('Automerge', () => { it('paths can be used instead of objids', () => { let doc = create("aaaa") - doc.set_object("_root","list",[{ foo: "bar"}, [1,2,3]]) - assert.deepEqual(doc.materialize("/"), { list: [{ foo: "bar"}, [1,2,3]] }) - assert.deepEqual(doc.materialize("/list"), [{ foo: "bar"}, [1,2,3]]) - assert.deepEqual(doc.materialize("/list/0"), { foo: "bar"}) + doc.setObject("_root","list",[{ foo: "bar"}, [1,2,3]]) + assert.deepEqual(doc.materialize("/"), { list: [{ foo: "bar"}, [1,2,3]] }) + assert.deepEqual(doc.materialize("/list"), [{ foo: "bar"}, [1,2,3]]) + assert.deepEqual(doc.materialize("/list/0"), { foo: "bar"}) }) it('should be able to fetch changes by hash', () => { @@ -392,11 +392,11 @@ describe('Automerge', () => { it('recursive sets are possible', () => { let doc = create("aaaa") - let l1 = doc.set_object("_root","list",[{ foo: "bar"}, [1,2,3]]) - let l2 = doc.insert_object(l1, 0, { zip: ["a", "b"] }) - let l3 = doc.set_object("_root","info1","hello world") // 'text' object + let l1 = doc.setObject("_root","list",[{ foo: "bar"}, [1,2,3]]) + let l2 = doc.insertObject(l1, 0, { zip: ["a", "b"] }) + let l3 = doc.setObject("_root","info1","hello world") // 'text' object doc.set("_root","info2","hello world") // 'str' - let l4 = doc.set_object("_root","info3","hello world") + let l4 = doc.setObject("_root","info3","hello world") assert.deepEqual(doc.materialize(), { "list": [ { zip: ["a", "b"] }, { foo: "bar"}, [ 1,2,3]], "info1": "hello world", @@ -412,12 +412,12 @@ describe('Automerge', () => { it('only returns an object id when objects are created', () => { let doc = create("aaaa") let r1 = doc.set("_root","foo","bar") - let r2 = doc.set_object("_root","list",[]) + let r2 = doc.setObject("_root","list",[]) let r3 = doc.set("_root","counter",10, "counter") let r4 = doc.inc("_root","counter",1) let r5 = doc.del("_root","counter") let r6 = doc.insert(r2,0,10); - let r7 = doc.insert_object(r2,0,{}); + let r7 = doc.insertObject(r2,0,{}); let r8 = doc.splice(r2,1,0,["a","b","c"]); //let r9 = doc.splice(r2,1,0,["a",[],{},"d"]); assert.deepEqual(r1,null); @@ -434,9 +434,9 @@ describe('Automerge', () => { it('objects without properties are preserved', () => { let doc1 = create("aaaa") - let a = doc1.set_object("_root","a",{}); - let b = doc1.set_object("_root","b",{}); - let c = doc1.set_object("_root","c",{}); + let a = doc1.setObject("_root","a",{}); + let b = doc1.setObject("_root","b",{}); + let c = doc1.setObject("_root","c",{}); let d = doc1.set(c,"d","dd"); let saved = doc1.save(); let doc2 = loadDoc(saved); @@ -453,7 +453,7 @@ describe('Automerge', () => { it('should handle merging text conflicts then saving & loading', () => { let A = create("aabbcc") - let At = A.set_object('_root', 'text', "") + let At = A.setObject('_root', 'text', "") A.splice(At, 0, 0, 'hello') let B = A.fork() @@ -491,7 +491,7 @@ describe('Automerge', () => { it('should include nested object creation', () => { let doc1 = create('aaaa'), doc2 = create('bbbb') - doc1.set_object('_root', 'birds', {friday: {robins: 3}}) + doc1.setObject('_root', 'birds', {friday: {robins: 3}}) doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.popPatches(), [ @@ -520,7 +520,7 @@ describe('Automerge', () => { it('should include list element insertion', () => { let doc1 = create('aaaa'), doc2 = create('bbbb') - doc1.set_object('_root', 'birds', ['Goldfinch', 'Chaffinch']) + doc1.setObject('_root', 'birds', ['Goldfinch', 'Chaffinch']) doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.popPatches(), [ @@ -534,9 +534,9 @@ describe('Automerge', () => { it('should insert nested maps into a list', () => { let doc1 = create('aaaa'), doc2 = create('bbbb') - doc1.set_object('_root', 'birds', []) + doc1.setObject('_root', 'birds', []) doc2.loadIncremental(doc1.saveIncremental()) - doc1.insert_object('1@aaaa', 0, {species: 'Goldfinch', count: 3}) + doc1.insertObject('1@aaaa', 0, {species: 'Goldfinch', count: 3}) doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.popPatches(), [ @@ -550,7 +550,7 @@ describe('Automerge', () => { it('should calculate list indexes based on visible elements', () => { let doc1 = create('aaaa'), doc2 = create('bbbb') - doc1.set_object('_root', 'birds', ['Goldfinch', 'Chaffinch']) + doc1.setObject('_root', 'birds', ['Goldfinch', 'Chaffinch']) doc2.loadIncremental(doc1.saveIncremental()) doc1.del('1@aaaa', 0) doc1.insert('1@aaaa', 1, 'Greenfinch') @@ -568,7 +568,7 @@ describe('Automerge', () => { it('should handle concurrent insertions at the head of a list', () => { let doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd') - doc1.set_object('_root', 'values', []) + doc1.setObject('_root', 'values', []) let change1 = doc1.saveIncremental() doc2.loadIncremental(change1) doc3.loadIncremental(change1) @@ -601,7 +601,7 @@ describe('Automerge', () => { it('should handle concurrent insertions beyond the head', () => { let doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd') - doc1.set_object('_root', 'values', ['a', 'b']) + doc1.setObject('_root', 'values', ['a', 'b']) let change1 = doc1.saveIncremental() doc2.loadIncremental(change1) doc3.loadIncremental(change1) @@ -740,7 +740,7 @@ describe('Automerge', () => { it('should handle a conflict on a list element', () => { let doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd') - doc1.set_object('_root', 'birds', ['Thrush', 'Magpie']) + doc1.setObject('_root', 'birds', ['Thrush', 'Magpie']) let change1 = doc1.saveIncremental() doc2.loadIncremental(change1) doc3.loadIncremental(change1) @@ -769,7 +769,7 @@ describe('Automerge', () => { it('should handle a concurrent list element overwrite and delete', () => { let doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd') - doc1.set_object('_root', 'birds', ['Parakeet', 'Magpie', 'Thrush']) + doc1.setObject('_root', 'birds', ['Parakeet', 'Magpie', 'Thrush']) let change1 = doc1.saveIncremental() doc2.loadIncremental(change1) doc3.loadIncremental(change1) @@ -828,8 +828,8 @@ describe('Automerge', () => { it('should handle conflicting nested objects', () => { let doc1 = create('aaaa'), doc2 = create('bbbb') - doc1.set_object('_root', 'birds', ['Parakeet']) - doc2.set_object('_root', 'birds', {'Sparrowhawk': 1}) + doc1.setObject('_root', 'birds', ['Parakeet']) + doc2.setObject('_root', 'birds', {'Sparrowhawk': 1}) let change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental() doc1.enablePatches(true) doc2.enablePatches(true) @@ -912,7 +912,7 @@ describe('Automerge', () => { let s1 = initSyncState(), s2 = initSyncState() // make two nodes with the same changes - let list = n1.set_object("_root","n", []) + let list = n1.setObject("_root","n", []) n1.commit("",0) for (let i = 0; i < 10; i++) { n1.insert(list,i,i) @@ -936,7 +936,7 @@ describe('Automerge', () => { let n1 = create(), n2 = create() // make changes for n1 that n2 should request - let list = n1.set_object("_root","n",[]) + let list = n1.setObject("_root","n",[]) n1.commit("",0) for (let i = 0; i < 10; i++) { n1.insert(list, i, i) @@ -952,7 +952,7 @@ describe('Automerge', () => { let n1 = create(), n2 = create() // make changes for n1 that n2 should request - let list = n1.set_object("_root","n",[]) + let list = n1.setObject("_root","n",[]) n1.commit("",0) for (let i = 0; i < 10; i++) { n1.insert(list,i,i) @@ -1119,7 +1119,7 @@ describe('Automerge', () => { let n1 = create('01234567'), n2 = create('89abcdef') let s1 = initSyncState(), s2 = initSyncState(), message = null - let items = n1.set_object("_root", "items", []) + let items = n1.setObject("_root", "items", []) n1.commit("",0) sync(n1, n2, s1, s2) From 1a66dc7ab137febc65f16d7ebd05c3cd6d885fe2 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 1 Apr 2022 11:39:51 +0100 Subject: [PATCH 220/730] Use full names for delete and increment --- automerge-wasm/src/lib.rs | 4 +-- automerge/examples/quickstart.rs | 2 +- automerge/src/autocommit.rs | 4 +-- automerge/src/automerge.rs | 24 ++++++------- .../src/transaction/manual_transaction.rs | 4 +-- automerge/src/transaction/transactable.rs | 9 +++-- automerge/tests/test.rs | 34 +++++++++---------- 7 files changed, 42 insertions(+), 39 deletions(-) diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 39d7f035..f9d8134c 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -266,7 +266,7 @@ impl Automerge { let value: f64 = value .as_f64() .ok_or_else(|| to_js_err("inc needs a numberic value"))?; - self.0.inc(&obj, prop, value as i64)?; + self.0.increment(&obj, prop, value as i64)?; Ok(()) } @@ -418,7 +418,7 @@ impl Automerge { pub fn del(&mut self, obj: JsValue, prop: JsValue) -> Result<(), JsValue> { let obj = self.import(obj)?; let prop = to_prop(prop)?; - self.0.del(&obj, prop).map_err(to_js_err)?; + self.0.delete(&obj, prop).map_err(to_js_err)?; Ok(()) } diff --git a/automerge/examples/quickstart.rs b/automerge/examples/quickstart.rs index db0024c6..c485835e 100644 --- a/automerge/examples/quickstart.rs +++ b/automerge/examples/quickstart.rs @@ -42,7 +42,7 @@ fn main() { doc2.transact_with::<_, _, AutomergeError, _>( |_| CommitOptions::default().with_message("Delete card".to_owned()), |tx| { - tx.del(&cards, 0)?; + tx.delete(&cards, 0)?; Ok(()) }, ) diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index 578c3a79..13d413cf 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -305,7 +305,7 @@ impl Transactable for AutoCommit { tx.insert_object(&mut self.doc, obj, index, value) } - fn inc, P: Into>( + fn increment, P: Into>( &mut self, obj: O, prop: P, @@ -316,7 +316,7 @@ impl Transactable for AutoCommit { tx.inc(&mut self.doc, obj.as_ref(), prop, value) } - fn del, P: Into>( + fn delete, P: Into>( &mut self, obj: O, prop: P, diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index c20e58dc..975e6967 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -1075,7 +1075,7 @@ mod tests { let mut tx = doc.transaction(); tx.set(ROOT, "xxx", "xxx")?; assert!(!tx.values(ROOT, "xxx")?.is_empty()); - tx.del(ROOT, "xxx")?; + tx.delete(ROOT, "xxx")?; assert!(tx.values(ROOT, "xxx")?.is_empty()); tx.commit(); Ok(()) @@ -1087,9 +1087,9 @@ mod tests { let mut tx = doc.transaction(); tx.set(ROOT, "counter", ScalarValue::counter(10))?; assert!(tx.value(ROOT, "counter")?.unwrap().0 == Value::counter(10)); - tx.inc(ROOT, "counter", 10)?; + tx.increment(ROOT, "counter", 10)?; assert!(tx.value(ROOT, "counter")?.unwrap().0 == Value::counter(20)); - tx.inc(ROOT, "counter", -5)?; + tx.increment(ROOT, "counter", -5)?; assert!(tx.value(ROOT, "counter")?.unwrap().0 == Value::counter(15)); tx.commit(); Ok(()) @@ -1182,7 +1182,7 @@ mod tests { doc.get_heads(); let heads3 = doc.get_heads(); let mut tx = doc.transaction(); - tx.del(ROOT, "prop1")?; + tx.delete(ROOT, "prop1")?; tx.commit(); doc.get_heads(); let heads4 = doc.get_heads(); @@ -1264,12 +1264,12 @@ mod tests { let heads4 = doc.get_heads(); let mut tx = doc.transaction(); - tx.del(&list, 2)?; + tx.delete(&list, 2)?; tx.commit(); let heads5 = doc.get_heads(); let mut tx = doc.transaction(); - tx.del(&list, 0)?; + tx.delete(&list, 0)?; tx.commit(); let heads6 = doc.get_heads(); @@ -1399,7 +1399,7 @@ mod tests { let mut doc = Automerge::new(); let mut tx = doc.transaction(); // deleting a missing key in a map should just be a noop - assert!(tx.del(ROOT, "a").is_ok()); + assert!(tx.delete(ROOT, "a").is_ok()); tx.commit(); let last_change = doc.get_last_local_change().unwrap(); assert_eq!(last_change.len(), 0); @@ -1415,9 +1415,9 @@ mod tests { let mut tx = doc.transaction(); // a real op - tx.del(ROOT, "a").unwrap(); + tx.delete(ROOT, "a").unwrap(); // a no-op - tx.del(ROOT, "a").unwrap(); + tx.delete(ROOT, "a").unwrap(); tx.commit(); let last_change = doc.get_last_local_change().unwrap(); assert_eq!(last_change.len(), 1); @@ -1428,7 +1428,7 @@ mod tests { let mut doc = Automerge::new(); let mut tx = doc.transaction(); // deleting an element in a list that does not exist is an error - assert!(tx.del(ROOT, 0).is_err()); + assert!(tx.delete(ROOT, 0).is_err()); } #[test] @@ -1525,7 +1525,7 @@ mod tests { } Action::DelText(index) => { println!("deleting at {} ", index); - tx.del(&list, index).unwrap(); + tx.delete(&list, index).unwrap(); } } } @@ -1578,7 +1578,7 @@ mod tests { } Action::DelText(index) => { println!("deleting at {} ", index); - tx.del(&list, index).unwrap(); + tx.delete(&list, index).unwrap(); } } } diff --git a/automerge/src/transaction/manual_transaction.rs b/automerge/src/transaction/manual_transaction.rs index 2303bb34..5a2db0c0 100644 --- a/automerge/src/transaction/manual_transaction.rs +++ b/automerge/src/transaction/manual_transaction.rs @@ -133,7 +133,7 @@ impl<'a> Transactable for Transaction<'a> { .insert_object(self.doc, obj, index, value) } - fn inc, P: Into>( + fn increment, P: Into>( &mut self, obj: O, prop: P, @@ -145,7 +145,7 @@ impl<'a> Transactable for Transaction<'a> { .inc(self.doc, obj.as_ref(), prop, value) } - fn del, P: Into>( + fn delete, P: Into>( &mut self, obj: O, prop: P, diff --git a/automerge/src/transaction/transactable.rs b/automerge/src/transaction/transactable.rs index 68852180..503035d9 100644 --- a/automerge/src/transaction/transactable.rs +++ b/automerge/src/transaction/transactable.rs @@ -58,7 +58,7 @@ pub trait Transactable { ) -> Result; /// Increment the counter at the prop in the object by `value`. - fn inc, P: Into>( + fn increment, P: Into>( &mut self, obj: O, prop: P, @@ -66,8 +66,11 @@ pub trait Transactable { ) -> Result<(), AutomergeError>; /// Delete the value at prop in the object. - fn del, P: Into>(&mut self, obj: O, prop: P) - -> Result<(), AutomergeError>; + fn delete, P: Into>( + &mut self, + obj: O, + prop: P, + ) -> Result<(), AutomergeError>; fn splice, V: IntoIterator>( &mut self, diff --git a/automerge/tests/test.rs b/automerge/tests/test.rs index a912c01c..b3459fe9 100644 --- a/automerge/tests/test.rs +++ b/automerge/tests/test.rs @@ -74,7 +74,7 @@ fn list_deletion() { doc.insert(&list_id, 0, 123).unwrap(); doc.insert(&list_id, 1, 456).unwrap(); doc.insert(&list_id, 2, 789).unwrap(); - doc.del(&list_id, 1).unwrap(); + doc.delete(&list_id, 1).unwrap(); assert_doc!( doc.document(), map! { @@ -122,8 +122,8 @@ fn add_concurrent_increments_of_same_property() { doc1.set(&automerge::ROOT, "counter", mk_counter(0)) .unwrap(); doc2.merge(&mut doc1).unwrap(); - doc1.inc(&automerge::ROOT, "counter", 1).unwrap(); - doc2.inc(&automerge::ROOT, "counter", 2).unwrap(); + doc1.increment(&automerge::ROOT, "counter", 1).unwrap(); + doc2.increment(&automerge::ROOT, "counter", 2).unwrap(); doc1.merge(&mut doc2).unwrap(); assert_doc!( doc1.document(), @@ -142,12 +142,12 @@ fn add_increments_only_to_preceeded_values() { doc1.set(&automerge::ROOT, "counter", mk_counter(0)) .unwrap(); - doc1.inc(&automerge::ROOT, "counter", 1).unwrap(); + doc1.increment(&automerge::ROOT, "counter", 1).unwrap(); // create a counter in doc2 doc2.set(&automerge::ROOT, "counter", mk_counter(0)) .unwrap(); - doc2.inc(&automerge::ROOT, "counter", 3).unwrap(); + doc2.increment(&automerge::ROOT, "counter", 3).unwrap(); // The two values should be conflicting rather than added doc1.merge(&mut doc2).unwrap(); @@ -413,7 +413,7 @@ fn concurrent_assignment_and_deletion_of_a_map_entry() { let mut doc2 = new_doc(); doc1.set(&automerge::ROOT, "bestBird", "robin").unwrap(); doc2.merge(&mut doc1).unwrap(); - doc1.del(&automerge::ROOT, "bestBird").unwrap(); + doc1.delete(&automerge::ROOT, "bestBird").unwrap(); doc2.set(&automerge::ROOT, "bestBird", "magpie").unwrap(); doc1.merge(&mut doc2).unwrap(); @@ -440,7 +440,7 @@ fn concurrent_assignment_and_deletion_of_list_entry() { doc1.insert(&list_id, 2, "goldfinch").unwrap(); doc2.merge(&mut doc1).unwrap(); doc1.set(&list_id, 1, "starling").unwrap(); - doc2.del(&list_id, 1).unwrap(); + doc2.delete(&list_id, 1).unwrap(); assert_doc!( doc2.document(), @@ -534,9 +534,9 @@ fn concurrent_deletion_of_same_list_element() { doc2.merge(&mut doc1).unwrap(); - doc1.del(&list_id, 1).unwrap(); + doc1.delete(&list_id, 1).unwrap(); - doc2.del(&list_id, 1).unwrap(); + doc2.delete(&list_id, 1).unwrap(); doc1.merge(&mut doc2).unwrap(); @@ -581,7 +581,7 @@ fn concurrent_updates_at_different_levels() { doc1.set(&birds, "brown", "sparrow").unwrap(); - doc2.del(&animals, "birds").unwrap(); + doc2.delete(&animals, "birds").unwrap(); doc1.merge(&mut doc2).unwrap(); assert_obj!( @@ -620,7 +620,7 @@ fn concurrent_updates_of_concurrently_deleted_objects() { doc2.merge(&mut doc1).unwrap(); - doc1.del(&birds, "blackbird").unwrap(); + doc1.delete(&birds, "blackbird").unwrap(); doc2.set(&blackbird, "beak", "orange").unwrap(); @@ -849,8 +849,8 @@ fn list_counter_del() -> Result<(), automerge::AutomergeError> { doc2.set(&list, 2, ScalarValue::counter(10))?; doc3.set(&list, 2, 100)?; - doc1.inc(&list, 1, 1)?; - doc1.inc(&list, 2, 1)?; + doc1.increment(&list, 1, 1)?; + doc1.increment(&list, 2, 1)?; doc1.merge(&mut doc2).unwrap(); doc1.merge(&mut doc3).unwrap(); @@ -867,8 +867,8 @@ fn list_counter_del() -> Result<(), automerge::AutomergeError> { assert_eq!(&values[1].0, &Value::counter(10)); assert_eq!(&values[2].0, &Value::int(100)); - doc1.inc(&list, 1, 1)?; - doc1.inc(&list, 2, 1)?; + doc1.increment(&list, 1, 1)?; + doc1.increment(&list, 2, 1)?; let values = doc1.values(&list, 1)?; assert_eq!(values.len(), 3); @@ -883,7 +883,7 @@ fn list_counter_del() -> Result<(), automerge::AutomergeError> { assert_eq!(doc1.length(&list), 3); - doc1.del(&list, 2)?; + doc1.delete(&list, 2)?; assert_eq!(doc1.length(&list), 2); @@ -891,7 +891,7 @@ fn list_counter_del() -> Result<(), automerge::AutomergeError> { assert_eq!(doc4.length(&list), 2); - doc1.del(&list, 1)?; + doc1.delete(&list, 1)?; assert_eq!(doc1.length(&list), 1); From 632857a4e6af95e2abffd040f235ddc4a8eb15b8 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 1 Apr 2022 11:42:38 +0100 Subject: [PATCH 221/730] Rename del and inc in wasm and js --- automerge-js/src/counter.js | 4 ++-- automerge-js/src/proxies.js | 12 ++++++------ automerge-wasm/index.d.ts | 4 ++-- automerge-wasm/src/lib.rs | 11 ++++++++--- automerge-wasm/test/test.ts | 30 +++++++++++++++--------------- 5 files changed, 33 insertions(+), 28 deletions(-) diff --git a/automerge-js/src/counter.js b/automerge-js/src/counter.js index 1ea56479..6ca54f6d 100644 --- a/automerge-js/src/counter.js +++ b/automerge-js/src/counter.js @@ -50,7 +50,7 @@ class WriteableCounter extends Counter { */ increment(delta) { delta = typeof delta === 'number' ? delta : 1 - this.context.inc(this.objectId, this.key, delta) + this.context.increment(this.objectId, this.key, delta) this.value += delta return this.value } @@ -60,7 +60,7 @@ class WriteableCounter extends Counter { * decreases the value of the counter by 1. */ decrement(delta) { - return this.inc(typeof delta === 'number' ? -delta : -1) + return this.increment(typeof delta === 'number' ? -delta : -1) } } diff --git a/automerge-js/src/proxies.js b/automerge-js/src/proxies.js index 1d337a06..30e89b6b 100644 --- a/automerge-js/src/proxies.js +++ b/automerge-js/src/proxies.js @@ -166,7 +166,7 @@ const MapHandler = { if (readonly) { throw new RangeError(`Object property "${key}" cannot be modified`) } - context.del(objectId, key) + context.delete(objectId, key) return true }, @@ -296,7 +296,7 @@ const ListHandler = { if (context.value(objectId, index)[0] == "counter") { throw new TypeError('Unsupported operation: deleting a counter from a list') } - context.del(objectId, index) + context.delete(objectId, index) return true }, @@ -395,7 +395,7 @@ function listMethods(target) { if (typeof numDelete === 'number') { context.splice(objectId, index, numDelete) } else { - context.del(objectId, index) + context.delete(objectId, index) } return this }, @@ -437,7 +437,7 @@ function listMethods(target) { return undefined } let last = valueAt(target, length - 1) - context.del(objectId, length - 1) + context.delete(objectId, length - 1) return last }, @@ -450,7 +450,7 @@ function listMethods(target) { shift() { if (context.length(objectId) == 0) return const first = valueAt(target, 0) - context.del(objectId, 0) + context.delete(objectId, 0) return first }, @@ -472,7 +472,7 @@ function listMethods(target) { for (let i = 0; i < del; i++) { let value = valueAt(target, index) result.push(value) - context.del(objectId, index) + context.delete(objectId, index) } const values = vals.map((val) => import_value(val)) for (let [value,datatype] of values) { diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts index 5b833dda..69373ebe 100644 --- a/automerge-wasm/index.d.ts +++ b/automerge-wasm/index.d.ts @@ -99,8 +99,8 @@ export class Automerge { push(obj: ObjID, value: Value, datatype?: Datatype): undefined; pushObject(obj: ObjID, value: ObjType): ObjID; splice(obj: ObjID, start: number, delete_count: number, text?: string | Array): ObjID[] | undefined; - inc(obj: ObjID, prop: Prop, value: number): void; - del(obj: ObjID, prop: Prop): void; + increment(obj: ObjID, prop: Prop, value: number): void; + delete(obj: ObjID, prop: Prop): void; // returns a single value - if there is a conflict return the winner value(obj: ObjID, prop: any, heads?: Heads): FullValue | null; diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index f9d8134c..cb4e7a38 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -260,12 +260,17 @@ impl Automerge { Ok(()) } - pub fn inc(&mut self, obj: JsValue, prop: JsValue, value: JsValue) -> Result<(), JsValue> { + pub fn increment( + &mut self, + obj: JsValue, + prop: JsValue, + value: JsValue, + ) -> Result<(), JsValue> { let obj = self.import(obj)?; let prop = self.import_prop(prop)?; let value: f64 = value .as_f64() - .ok_or_else(|| to_js_err("inc needs a numberic value"))?; + .ok_or_else(|| to_js_err("increment needs a numeric value"))?; self.0.increment(&obj, prop, value as i64)?; Ok(()) } @@ -415,7 +420,7 @@ impl Automerge { } } - pub fn del(&mut self, obj: JsValue, prop: JsValue) -> Result<(), JsValue> { + pub fn delete(&mut self, obj: JsValue, prop: JsValue) -> Result<(), JsValue> { let obj = self.import(obj)?; let prop = to_prop(prop)?; self.0.delete(&obj, prop).map_err(to_js_err)?; diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts index 81e7842e..62eb6910 100644 --- a/automerge-wasm/test/test.ts +++ b/automerge-wasm/test/test.ts @@ -190,8 +190,8 @@ describe('Automerge', () => { assert.deepEqual(doc.keys("_root"),["bip","foo"]) - doc.del("_root", "foo") - doc.del("_root", "baz") + doc.delete("_root", "foo") + doc.delete("_root", "baz") let heads2 = doc.commit() assert.deepEqual(doc.keys("_root"),["bip"]) @@ -206,7 +206,7 @@ describe('Automerge', () => { doc.set(root, "xxx", "xxx"); assert.deepEqual(doc.value(root, "xxx"),["str","xxx"]) - doc.del(root, "xxx"); + doc.delete(root, "xxx"); assert.deepEqual(doc.value(root, "xxx"),undefined) doc.free() }) @@ -217,9 +217,9 @@ describe('Automerge', () => { doc.set(root, "counter", 10, "counter"); assert.deepEqual(doc.value(root, "counter"),["counter",10]) - doc.inc(root, "counter", 10); + doc.increment(root, "counter", 10); assert.deepEqual(doc.value(root, "counter"),["counter",20]) - doc.inc(root, "counter", -5); + doc.increment(root, "counter", -5); assert.deepEqual(doc.value(root, "counter"),["counter",15]) doc.free() }) @@ -319,7 +319,7 @@ describe('Automerge', () => { ['counter',0,'2@bbbb'], ['counter',10,'2@cccc'], ]) - doc1.inc("_root", "cnt", 5) + doc1.increment("_root", "cnt", 5) result = doc1.values("_root", "cnt") assert.deepEqual(result, [ [ 'counter', 5, '2@bbbb' ], @@ -352,7 +352,7 @@ describe('Automerge', () => { ['counter',0,'3@bbbb'], ['counter',10,'3@cccc'], ]) - doc1.inc(seq, 0, 5) + doc1.increment(seq, 0, 5) result = doc1.values(seq, 0) assert.deepEqual(result, [ [ 'counter', 5, '3@bbbb' ], @@ -414,8 +414,8 @@ describe('Automerge', () => { let r1 = doc.set("_root","foo","bar") let r2 = doc.setObject("_root","list",[]) let r3 = doc.set("_root","counter",10, "counter") - let r4 = doc.inc("_root","counter",1) - let r5 = doc.del("_root","counter") + let r4 = doc.increment("_root","counter",1) + let r5 = doc.delete("_root","counter") let r6 = doc.insert(r2,0,10); let r7 = doc.insertObject(r2,0,{}); let r8 = doc.splice(r2,1,0,["a","b","c"]); @@ -508,7 +508,7 @@ describe('Automerge', () => { doc1.set('_root', 'favouriteBird', 'Robin') doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) - doc1.del('_root', 'favouriteBird') + doc1.delete('_root', 'favouriteBird') doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.popPatches(), [ {action: 'assign', obj: '_root', key: 'favouriteBird', value: 'Robin', datatype: 'str', conflict: false}, @@ -552,7 +552,7 @@ describe('Automerge', () => { let doc1 = create('aaaa'), doc2 = create('bbbb') doc1.setObject('_root', 'birds', ['Goldfinch', 'Chaffinch']) doc2.loadIncremental(doc1.saveIncremental()) - doc1.del('1@aaaa', 0) + doc1.delete('1@aaaa', 0) doc1.insert('1@aaaa', 1, 'Greenfinch') doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) @@ -719,7 +719,7 @@ describe('Automerge', () => { doc1.set('_root', 'bird', 'Greenfinch') doc2.loadIncremental(doc1.saveIncremental()) doc1.set('_root', 'bird', 'Goldfinch') - doc2.del('_root', 'bird') + doc2.delete('_root', 'bird') let change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental() doc1.enablePatches(true) doc2.enablePatches(true) @@ -774,7 +774,7 @@ describe('Automerge', () => { doc2.loadIncremental(change1) doc3.loadIncremental(change1) doc4.loadIncremental(change1) - doc1.del('1@aaaa', 0) + doc1.delete('1@aaaa', 0) doc1.set('1@aaaa', 1, 'Song Thrush') doc2.set('1@aaaa', 0, 'Ring-necked parakeet') doc2.set('1@aaaa', 2, 'Redwing') @@ -807,7 +807,7 @@ describe('Automerge', () => { doc1.set('_root', 'bird', 'Robin') doc2.set('_root', 'bird', 'Wren') let change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental() - doc2.del('_root', 'bird') + doc2.delete('_root', 'bird') let change3 = doc2.saveIncremental() doc3.enablePatches(true) doc3.loadIncremental(change1) @@ -866,7 +866,7 @@ describe('Automerge', () => { doc2.enablePatches(true) doc1.set('_root', 'starlings', 2, 'counter') doc2.loadIncremental(doc1.saveIncremental()) - doc1.inc('_root', 'starlings', 1) + doc1.increment('_root', 'starlings', 1) doc1.dump() doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.value('_root', 'starlings'), ['counter', 3]) From 5cbc977076bae8d5c34e1821612d4db4273659da Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 1 Apr 2022 11:52:14 +0100 Subject: [PATCH 222/730] More internal renames of del and inc --- automerge/src/autocommit.rs | 4 +-- automerge/src/automerge.rs | 10 +++---- automerge/src/change.rs | 2 +- automerge/src/columnar.rs | 16 ++++++------ automerge/src/legacy/mod.rs | 2 +- automerge/src/legacy/serde_impls/op.rs | 26 +++++++++---------- automerge/src/legacy/serde_impls/op_type.rs | 4 +-- automerge/src/query.rs | 2 +- automerge/src/transaction/inner.rs | 14 +++++----- .../src/transaction/manual_transaction.rs | 4 +-- automerge/src/types.rs | 19 +++++++------- automerge/src/visualisation.rs | 4 +-- 12 files changed, 53 insertions(+), 54 deletions(-) diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index 13d413cf..889e9b53 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -313,7 +313,7 @@ impl Transactable for AutoCommit { ) -> Result<(), AutomergeError> { self.ensure_transaction_open(); let tx = self.transaction.as_mut().unwrap(); - tx.inc(&mut self.doc, obj.as_ref(), prop, value) + tx.increment(&mut self.doc, obj.as_ref(), prop, value) } fn delete, P: Into>( @@ -323,7 +323,7 @@ impl Transactable for AutoCommit { ) -> Result<(), AutomergeError> { self.ensure_transaction_open(); let tx = self.transaction.as_mut().unwrap(); - tx.del(&mut self.doc, obj.as_ref(), prop) + tx.delete(&mut self.doc, obj.as_ref(), prop) } /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 975e6967..562c0d32 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -205,7 +205,7 @@ impl Automerge { self.ops.replace(obj, i, |old_op| old_op.add_succ(&op)); } - if !op.is_del() { + if !op.is_delete() { self.ops.insert(q.pos, obj, op.clone()); } op @@ -218,7 +218,7 @@ impl Automerge { self.ops.replace(obj, i, |old_op| old_op.add_succ(&op)); } - if !op.is_del() { + if !op.is_delete() { self.ops.insert(q.pos, obj, op.clone()); } @@ -231,7 +231,7 @@ impl Automerge { let patch = if op.insert { let value = (op.value(), self.id_to_exid(op.id)); Patch::Insert(obj, q.seen, value) - } else if op.is_del() { + } else if op.is_delete() { if let Some(winner) = &q.values.last() { let value = (winner.value(), self.id_to_exid(winner.id)); let conflict = q.values.len() > 1; @@ -958,8 +958,8 @@ impl Automerge { let value: String = match &op.action { OpType::Set(value) => format!("{}", value), OpType::Make(obj) => format!("make({})", obj), - OpType::Inc(obj) => format!("inc({})", obj), - OpType::Del => format!("del{}", 0), + OpType::Increment(obj) => format!("inc({})", obj), + OpType::Delete => format!("del{}", 0), }; let pred: Vec<_> = op.pred.iter().map(|id| self.to_string(*id)).collect(); let succ: Vec<_> = op.succ.iter().map(|id| self.to_string(*id)).collect(); diff --git a/automerge/src/change.rs b/automerge/src/change.rs index 24baf8ef..c33aba2c 100644 --- a/automerge/src/change.rs +++ b/automerge/src/change.rs @@ -879,7 +879,7 @@ fn group_doc_change_and_doc_ops( let del = DocOp { actor: succ.1, ctr: succ.0, - action: OpType::Del, + action: OpType::Delete, obj: op.obj.clone(), key, succ: Vec::new(), diff --git a/automerge/src/columnar.rs b/automerge/src/columnar.rs index 7cb38872..5b6bf21b 100644 --- a/automerge/src/columnar.rs +++ b/automerge/src/columnar.rs @@ -135,8 +135,8 @@ impl<'a> Iterator for OperationIterator<'a> { Action::MakeText => OpType::Make(ObjType::Text), Action::MakeMap => OpType::Make(ObjType::Map), Action::MakeTable => OpType::Make(ObjType::Table), - Action::Del => OpType::Del, - Action::Inc => OpType::Inc(value.to_i64()?), + Action::Del => OpType::Delete, + Action::Inc => OpType::Increment(value.to_i64()?), }; Some(amp::Op { action, @@ -176,8 +176,8 @@ impl<'a> Iterator for DocOpIterator<'a> { Action::MakeText => OpType::Make(ObjType::Text), Action::MakeMap => OpType::Make(ObjType::Map), Action::MakeTable => OpType::Make(ObjType::Table), - Action::Del => OpType::Del, - Action::Inc => OpType::Inc(value.to_i64()?), + Action::Del => OpType::Delete, + Action::Inc => OpType::Increment(value.to_i64()?), }; Some(DocOp { actor, @@ -1055,11 +1055,11 @@ impl DocOpEncoder { self.val.append_value(value, actors); Action::Set } - amp::OpType::Inc(val) => { + amp::OpType::Increment(val) => { self.val.append_value(&ScalarValue::Int(*val), actors); Action::Inc } - amp::OpType::Del => { + amp::OpType::Delete => { self.val.append_null(); Action::Del } @@ -1161,11 +1161,11 @@ impl ColumnEncoder { self.val.append_value2(value, actors); Action::Set } - OpType::Inc(val) => { + OpType::Increment(val) => { self.val.append_value2(&ScalarValue::Int(*val), actors); Action::Inc } - OpType::Del => { + OpType::Delete => { self.val.append_null(); Action::Del } diff --git a/automerge/src/legacy/mod.rs b/automerge/src/legacy/mod.rs index 835c6597..c1bfbac7 100644 --- a/automerge/src/legacy/mod.rs +++ b/automerge/src/legacy/mod.rs @@ -217,7 +217,7 @@ impl Op { pub fn primitive_value(&self) -> Option { match &self.action { OpType::Set(v) => Some(v.clone()), - OpType::Inc(i) => Some(ScalarValue::Int(*i)), + OpType::Increment(i) => Some(ScalarValue::Int(*i)), _ => None, } } diff --git a/automerge/src/legacy/serde_impls/op.rs b/automerge/src/legacy/serde_impls/op.rs index 1d2a4125..ac4d4737 100644 --- a/automerge/src/legacy/serde_impls/op.rs +++ b/automerge/src/legacy/serde_impls/op.rs @@ -47,7 +47,7 @@ impl Serialize for Op { op.serialize_field("datatype", &datatype)?; } match &self.action { - OpType::Inc(n) => op.serialize_field("value", &n)?, + OpType::Increment(n) => op.serialize_field("value", &n)?, OpType::Set(value) => op.serialize_field("value", &value)?, _ => {} } @@ -187,7 +187,7 @@ impl<'de> Deserialize<'de> for Op { RawOpType::MakeTable => OpType::Make(ObjType::Table), RawOpType::MakeList => OpType::Make(ObjType::List), RawOpType::MakeText => OpType::Make(ObjType::Text), - RawOpType::Del => OpType::Del, + RawOpType::Del => OpType::Delete, RawOpType::Set => { let value = if let Some(datatype) = datatype { let raw_value = value @@ -207,11 +207,11 @@ impl<'de> Deserialize<'de> for Op { OpType::Set(value) } RawOpType::Inc => match value.flatten() { - Some(ScalarValue::Int(n)) => Ok(OpType::Inc(n)), - Some(ScalarValue::Uint(n)) => Ok(OpType::Inc(n as i64)), - Some(ScalarValue::F64(n)) => Ok(OpType::Inc(n as i64)), - Some(ScalarValue::Counter(n)) => Ok(OpType::Inc(n.into())), - Some(ScalarValue::Timestamp(n)) => Ok(OpType::Inc(n)), + Some(ScalarValue::Int(n)) => Ok(OpType::Increment(n)), + Some(ScalarValue::Uint(n)) => Ok(OpType::Increment(n as i64)), + Some(ScalarValue::F64(n)) => Ok(OpType::Increment(n as i64)), + Some(ScalarValue::Counter(n)) => Ok(OpType::Increment(n.into())), + Some(ScalarValue::Timestamp(n)) => Ok(OpType::Increment(n)), Some(ScalarValue::Bytes(s)) => { Err(Error::invalid_value(Unexpected::Bytes(&s), &"a number")) } @@ -430,7 +430,7 @@ mod tests { "pred": [] }), expected: Ok(Op { - action: OpType::Inc(12), + action: OpType::Increment(12), obj: ObjectId::Root, key: "somekey".into(), insert: false, @@ -447,7 +447,7 @@ mod tests { "pred": [] }), expected: Ok(Op { - action: OpType::Inc(12), + action: OpType::Increment(12), obj: ObjectId::Root, key: "somekey".into(), insert: false, @@ -552,7 +552,7 @@ mod tests { #[test] fn test_serialize_key() { let map_key = Op { - action: OpType::Inc(12), + action: OpType::Increment(12), obj: ObjectId::Root, key: "somekey".into(), insert: false, @@ -563,7 +563,7 @@ mod tests { assert_eq!(json.as_object().unwrap().get("key"), Some(&expected)); let elemid_key = Op { - action: OpType::Inc(12), + action: OpType::Increment(12), obj: ObjectId::Root, key: OpId::from_str("1@7ef48769b04d47e9a88e98a134d62716") .unwrap() @@ -587,7 +587,7 @@ mod tests { pred: SortedVec::new(), }, Op { - action: OpType::Inc(12), + action: OpType::Increment(12), obj: ObjectId::from_str("1@7ef48769b04d47e9a88e98a134d62716").unwrap(), key: "somekey".into(), insert: false, @@ -601,7 +601,7 @@ mod tests { pred: vec![OpId::from_str("1@7ef48769b04d47e9a88e98a134d62716").unwrap()].into(), }, Op { - action: OpType::Inc(12), + action: OpType::Increment(12), obj: ObjectId::Root, key: "somekey".into(), insert: false, diff --git a/automerge/src/legacy/serde_impls/op_type.rs b/automerge/src/legacy/serde_impls/op_type.rs index 19849674..1b81e181 100644 --- a/automerge/src/legacy/serde_impls/op_type.rs +++ b/automerge/src/legacy/serde_impls/op_type.rs @@ -15,8 +15,8 @@ impl Serialize for OpType { OpType::Make(ObjType::Table) => RawOpType::MakeTable, OpType::Make(ObjType::List) => RawOpType::MakeList, OpType::Make(ObjType::Text) => RawOpType::MakeText, - OpType::Del => RawOpType::Del, - OpType::Inc(_) => RawOpType::Inc, + OpType::Delete => RawOpType::Del, + OpType::Increment(_) => RawOpType::Inc, OpType::Set(_) => RawOpType::Set, }; raw_type.serialize(serializer) diff --git a/automerge/src/query.rs b/automerge/src/query.rs index ca02bce0..9446376d 100644 --- a/automerge/src/query.rs +++ b/automerge/src/query.rs @@ -191,7 +191,7 @@ impl VisWindow { visible = true; } } - OpType::Inc(inc_val) => { + OpType::Increment(inc_val) => { for id in &op.pred { // pred is always before op.id so we can see them if let Some(mut entry) = self.counters.get_mut(id) { diff --git a/automerge/src/transaction/inner.rs b/automerge/src/transaction/inner.rs index f96d8492..7b188dfe 100644 --- a/automerge/src/transaction/inner.rs +++ b/automerge/src/transaction/inner.rs @@ -142,7 +142,7 @@ impl TransactionInner { }); } - if !op.is_del() { + if !op.is_delete() { doc.ops.insert(pos, &obj, op.clone()); } @@ -236,7 +236,7 @@ impl TransactionInner { let query = doc.ops.search(&obj, query::Prop::new(prop)); // no key present to delete - if query.ops.is_empty() && action == OpType::Del { + if query.ops.is_empty() && action == OpType::Delete { return Ok(None); } @@ -303,7 +303,7 @@ impl TransactionInner { } } - pub fn inc>( + pub fn increment>( &mut self, doc: &mut Automerge, obj: &ExId, @@ -311,18 +311,18 @@ impl TransactionInner { value: i64, ) -> Result<(), AutomergeError> { let obj = doc.exid_to_obj(obj)?; - self.local_op(doc, obj, prop.into(), OpType::Inc(value))?; + self.local_op(doc, obj, prop.into(), OpType::Increment(value))?; Ok(()) } - pub fn del>( + pub fn delete>( &mut self, doc: &mut Automerge, obj: &ExId, prop: P, ) -> Result<(), AutomergeError> { let obj = doc.exid_to_obj(obj)?; - self.local_op(doc, obj, prop.into(), OpType::Del)?; + self.local_op(doc, obj, prop.into(), OpType::Delete)?; Ok(()) } @@ -339,7 +339,7 @@ impl TransactionInner { let obj = doc.exid_to_obj(obj)?; for _ in 0..del { // del() - self.local_op(doc, obj, pos.into(), OpType::Del)?; + self.local_op(doc, obj, pos.into(), OpType::Delete)?; } for v in vals { // insert() diff --git a/automerge/src/transaction/manual_transaction.rs b/automerge/src/transaction/manual_transaction.rs index 5a2db0c0..24ab844a 100644 --- a/automerge/src/transaction/manual_transaction.rs +++ b/automerge/src/transaction/manual_transaction.rs @@ -142,7 +142,7 @@ impl<'a> Transactable for Transaction<'a> { self.inner .as_mut() .unwrap() - .inc(self.doc, obj.as_ref(), prop, value) + .increment(self.doc, obj.as_ref(), prop, value) } fn delete, P: Into>( @@ -153,7 +153,7 @@ impl<'a> Transactable for Transaction<'a> { self.inner .as_mut() .unwrap() - .del(self.doc, obj.as_ref(), prop) + .delete(self.doc, obj.as_ref(), prop) } /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert diff --git a/automerge/src/types.rs b/automerge/src/types.rs index 4a8052b7..f879e3d8 100644 --- a/automerge/src/types.rs +++ b/automerge/src/types.rs @@ -168,9 +168,8 @@ impl fmt::Display for ObjType { #[derive(PartialEq, Debug, Clone)] pub enum OpType { Make(ObjType), - /// Perform a deletion, expanding the operation to cover `n` deletions (multiOp). - Del, - Inc(i64), + Delete, + Increment(i64), Set(ScalarValue), } @@ -374,7 +373,7 @@ impl Op { .. })) = &mut self.action { - if let OpType::Inc(n) = &op.action { + if let OpType::Increment(n) = &op.action { *current += *n; *increments += 1; } @@ -389,7 +388,7 @@ impl Op { .. })) = &mut self.action { - if let OpType::Inc(n) = &op.action { + if let OpType::Increment(n) = &op.action { *current -= *n; *increments -= 1; } @@ -414,12 +413,12 @@ impl Op { } } - pub fn is_del(&self) -> bool { - matches!(&self.action, OpType::Del) + pub fn is_delete(&self) -> bool { + matches!(&self.action, OpType::Delete) } pub fn is_inc(&self) -> bool { - matches!(&self.action, OpType::Inc(_)) + matches!(&self.action, OpType::Increment(_)) } pub fn is_counter(&self) -> bool { @@ -460,8 +459,8 @@ impl Op { OpType::Set(value) if self.insert => format!("i:{}", value), OpType::Set(value) => format!("s:{}", value), OpType::Make(obj) => format!("make{}", obj), - OpType::Inc(val) => format!("inc:{}", val), - OpType::Del => "del".to_string(), + OpType::Increment(val) => format!("inc:{}", val), + OpType::Delete => "del".to_string(), } } } diff --git a/automerge/src/visualisation.rs b/automerge/src/visualisation.rs index cf283eb0..9ba4f428 100644 --- a/automerge/src/visualisation.rs +++ b/automerge/src/visualisation.rs @@ -234,10 +234,10 @@ impl OpTableRow { actor_shorthands: &HashMap, ) -> Self { let op_description = match &op.action { - crate::OpType::Del => "del".to_string(), + crate::OpType::Delete => "del".to_string(), crate::OpType::Set(v) => format!("set {}", v), crate::OpType::Make(obj) => format!("make {}", obj), - crate::OpType::Inc(v) => format!("inc {}", v), + crate::OpType::Increment(v) => format!("inc {}", v), }; let prop = match op.key { crate::types::Key::Map(k) => metadata.props[k].clone(), From d331ceb6d4f6b8cb4065da2ba65661d568c63ea7 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 1 Apr 2022 12:38:03 +0100 Subject: [PATCH 223/730] Rename set to put and set_object to put_object --- automerge-cli/src/import.rs | 16 +- automerge-js/src/proxies.js | 18 +- automerge-wasm/index.d.ts | 4 +- automerge-wasm/src/lib.rs | 14 +- automerge-wasm/test/test.ts | 314 +++++++++--------- automerge/examples/quickstart.rs | 12 +- automerge/src/autocommit.rs | 10 +- automerge/src/automerge.rs | 86 ++--- automerge/src/columnar.rs | 8 +- automerge/src/legacy/mod.rs | 2 +- automerge/src/legacy/serde_impls/op.rs | 28 +- automerge/src/legacy/serde_impls/op_type.rs | 2 +- automerge/src/op_tree.rs | 2 +- automerge/src/query.rs | 4 +- automerge/src/transaction/inner.rs | 8 +- .../src/transaction/manual_transaction.rs | 10 +- automerge/src/transaction/transactable.rs | 4 +- automerge/src/types.rs | 20 +- automerge/src/value.rs | 6 +- automerge/src/visualisation.rs | 2 +- automerge/tests/test.rs | 142 ++++---- edit-trace/automerge-wasm.js | 4 +- edit-trace/benches/main.rs | 4 +- edit-trace/src/main.rs | 2 +- 24 files changed, 361 insertions(+), 361 deletions(-) diff --git a/automerge-cli/src/import.rs b/automerge-cli/src/import.rs index 9f9a3210..ecc184f4 100644 --- a/automerge-cli/src/import.rs +++ b/automerge-cli/src/import.rs @@ -22,31 +22,31 @@ fn import_map( for (key, value) in map { match value { serde_json::Value::Null => { - doc.set(obj, key, ())?; + doc.put(obj, key, ())?; } serde_json::Value::Bool(b) => { - doc.set(obj, key, *b)?; + doc.put(obj, key, *b)?; } serde_json::Value::String(s) => { - doc.set(obj, key, s.as_ref())?; + doc.put(obj, key, s.as_ref())?; } serde_json::Value::Array(vec) => { - let id = doc.set_object(obj, key, am::ObjType::List)?; + let id = doc.put_object(obj, key, am::ObjType::List)?; import_list(doc, &id, vec)?; } serde_json::Value::Number(n) => { if let Some(m) = n.as_i64() { - doc.set(obj, key, m)?; + doc.put(obj, key, m)?; } else if let Some(m) = n.as_u64() { - doc.set(obj, key, m)?; + doc.put(obj, key, m)?; } else if let Some(m) = n.as_f64() { - doc.set(obj, key, m)?; + doc.put(obj, key, m)?; } else { anyhow::bail!("not a number"); } } serde_json::Value::Object(map) => { - let id = doc.set_object(obj, key, am::ObjType::Map)?; + let id = doc.put_object(obj, key, am::ObjType::Map)?; import_map(doc, &id, map)?; } } diff --git a/automerge-js/src/proxies.js b/automerge-js/src/proxies.js index 30e89b6b..1086bcb0 100644 --- a/automerge-js/src/proxies.js +++ b/automerge-js/src/proxies.js @@ -134,28 +134,28 @@ const MapHandler = { } switch (datatype) { case "list": - const list = context.setObject(objectId, key, []) + const list = context.putObject(objectId, key, []) const proxyList = listProxy(context, list, [ ... path, key ], readonly ); for (let i = 0; i < value.length; i++) { proxyList[i] = value[i] } break; case "text": - const text = context.setObject(objectId, key, "", "text") + const text = context.putObject(objectId, key, "", "text") const proxyText = textProxy(context, text, [ ... path, key ], readonly ); for (let i = 0; i < value.length; i++) { proxyText[i] = value.get(i) } break; case "map": - const map = context.setObject(objectId, key, {}) + const map = context.putObject(objectId, key, {}) const proxyMap = mapProxy(context, map, [ ... path, key ], readonly ); for (const key in value) { proxyMap[key] = value[key] } break; default: - context.set(objectId, key, value, datatype) + context.put(objectId, key, value, datatype) } return true }, @@ -253,7 +253,7 @@ const ListHandler = { if (index >= context.length(objectId)) { list = context.insertObject(objectId, index, []) } else { - list = context.setObject(objectId, index, []) + list = context.putObject(objectId, index, []) } const proxyList = listProxy(context, list, [ ... path, index ], readonly); proxyList.splice(0,0,...value) @@ -263,7 +263,7 @@ const ListHandler = { if (index >= context.length(objectId)) { text = context.insertObject(objectId, index, "", "text") } else { - text = context.setObject(objectId, index, "", "text") + text = context.putObject(objectId, index, "", "text") } const proxyText = textProxy(context, text, [ ... path, index ], readonly); proxyText.splice(0,0,...value) @@ -273,7 +273,7 @@ const ListHandler = { if (index >= context.length(objectId)) { map = context.insertObject(objectId, index, {}) } else { - map = context.setObject(objectId, index, {}) + map = context.putObject(objectId, index, {}) } const proxyMap = mapProxy(context, map, [ ... path, index ], readonly); for (const key in value) { @@ -284,7 +284,7 @@ const ListHandler = { if (index >= context.length(objectId)) { context.insert(objectId, index, value, datatype) } else { - context.set(objectId, index, value, datatype) + context.put(objectId, index, value, datatype) } } return true @@ -405,7 +405,7 @@ function listMethods(target) { let list = context.getObject(objectId) let [value, datatype] = valueAt(target, index) for (let index = parseListIndex(start || 0); index < parseListIndex(end || list.length); index++) { - context.set(objectId, index, value, datatype) + context.put(objectId, index, value, datatype) } return this }, diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts index 69373ebe..d694714d 100644 --- a/automerge-wasm/index.d.ts +++ b/automerge-wasm/index.d.ts @@ -92,8 +92,8 @@ export function decodeSyncState(data: Uint8Array): SyncState; export class Automerge { // change state - set(obj: ObjID, prop: Prop, value: Value, datatype?: Datatype): undefined; - setObject(obj: ObjID, prop: Prop, value: ObjType): ObjID; + put(obj: ObjID, prop: Prop, value: Value, datatype?: Datatype): undefined; + putObject(obj: ObjID, prop: Prop, value: ObjType): ObjID; insert(obj: ObjID, index: number, value: Value, datatype?: Datatype): undefined; insertObject(obj: ObjID, index: number, value: ObjType): ObjID; push(obj: ObjID, value: Value, datatype?: Datatype): undefined; diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index cb4e7a38..54480197 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -203,7 +203,7 @@ impl Automerge { Ok(opid.to_string().into()) } - pub fn set( + pub fn put( &mut self, obj: JsValue, prop: JsValue, @@ -215,12 +215,12 @@ impl Automerge { let value = self .import_scalar(&value, &datatype.as_string()) .ok_or_else(|| to_js_err("expected scalar value"))?; - self.0.set(&obj, prop, value)?; + self.0.put(&obj, prop, value)?; Ok(()) } - #[wasm_bindgen(js_name = setObject)] - pub fn set_object( + #[wasm_bindgen(js_name = putObject)] + pub fn put_object( &mut self, obj: JsValue, prop: JsValue, @@ -230,7 +230,7 @@ impl Automerge { let prop = self.import_prop(prop)?; let (value, subvals) = to_objtype(&value, &None).ok_or_else(|| to_js_err("expected object"))?; - let opid = self.0.set_object(&obj, prop, value)?; + let opid = self.0.put_object(&obj, prop, value)?; self.subset(&opid, subvals)?; Ok(opid.to_string().into()) } @@ -240,9 +240,9 @@ impl Automerge { let (value, subvals) = self.import_value(&v, None)?; //let opid = self.0.set(id, p, value)?; let opid = match (p, value) { - (Prop::Map(s), Value::Object(objtype)) => Some(self.0.set_object(obj, s, objtype)?), + (Prop::Map(s), Value::Object(objtype)) => Some(self.0.put_object(obj, s, objtype)?), (Prop::Map(s), Value::Scalar(scalar)) => { - self.0.set(obj, s, scalar)?; + self.0.put(obj, s, scalar)?; None } (Prop::Seq(i), Value::Object(objtype)) => { diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts index 62eb6910..f9ff6da3 100644 --- a/automerge-wasm/test/test.ts +++ b/automerge-wasm/test/test.ts @@ -55,17 +55,17 @@ describe('Automerge', () => { let root = "_root" let result - doc.set(root, "hello", "world") - doc.set(root, "number1", 5, "uint") - doc.set(root, "number2", 5) - doc.set(root, "number3", 5.5) - doc.set(root, "number4", 5.5, "f64") - doc.set(root, "number5", 5.5, "int") - doc.set(root, "bool", true) - doc.set(root, "time1", 1000, "timestamp") - doc.set(root, "time2", new Date(1001)) - doc.setObject(root, "list", []); - doc.set(root, "null", null) + doc.put(root, "hello", "world") + doc.put(root, "number1", 5, "uint") + doc.put(root, "number2", 5) + doc.put(root, "number3", 5.5) + doc.put(root, "number4", 5.5, "f64") + doc.put(root, "number5", 5.5, "int") + doc.put(root, "bool", true) + doc.put(root, "time1", 1000, "timestamp") + doc.put(root, "time2", new Date(1001)) + doc.putObject(root, "list", []); + doc.put(root, "null", null) result = doc.value(root,"hello") assert.deepEqual(result,["str","world"]) @@ -88,7 +88,7 @@ describe('Automerge', () => { result = doc.value(root,"bool") assert.deepEqual(result,["boolean",true]) - doc.set(root, "bool", false, "boolean") + doc.put(root, "bool", false, "boolean") result = doc.value(root,"bool") assert.deepEqual(result,["boolean",false]) @@ -110,8 +110,8 @@ describe('Automerge', () => { it('should be able to use bytes', () => { let doc = create() - doc.set("_root","data1", new Uint8Array([10,11,12])); - doc.set("_root","data2", new Uint8Array([13,14,15]), "bytes"); + doc.put("_root","data1", new Uint8Array([10,11,12])); + doc.put("_root","data2", new Uint8Array([13,14,15]), "bytes"); let value1 = doc.value("_root", "data1") assert.deepEqual(value1, ["bytes", new Uint8Array([10,11,12])]); let value2 = doc.value("_root", "data2") @@ -124,8 +124,8 @@ describe('Automerge', () => { let root = "_root" let result - let submap = doc.setObject(root, "submap", {}) - doc.set(submap, "number", 6, "uint") + let submap = doc.putObject(root, "submap", {}) + doc.put(submap, "number", 6, "uint") assert.strictEqual(doc.pendingOps(),2) result = doc.value(root,"submap") @@ -140,7 +140,7 @@ describe('Automerge', () => { let doc = create() let root = "_root" - let submap = doc.setObject(root, "numbers", []) + let submap = doc.putObject(root, "numbers", []) doc.insert(submap, 0, "a"); doc.insert(submap, 1, "b"); doc.insert(submap, 2, "c"); @@ -152,7 +152,7 @@ describe('Automerge', () => { assert.deepEqual(doc.value(submap, 3),["str","c"]) assert.deepEqual(doc.length(submap),4) - doc.set(submap, 2, "b v2"); + doc.put(submap, 2, "b v2"); assert.deepEqual(doc.value(submap, 2),["str","b v2"]) assert.deepEqual(doc.length(submap),4) @@ -163,7 +163,7 @@ describe('Automerge', () => { let doc = create() let root = "_root" - let submap = doc.setObject(root, "letters", []) + let submap = doc.putObject(root, "letters", []) doc.insert(submap, 0, "a"); doc.insert(submap, 0, "b"); assert.deepEqual(doc.materialize(), { letters: ["b", "a" ] }) @@ -173,7 +173,7 @@ describe('Automerge', () => { assert.deepEqual(doc.materialize(), { letters: ["b", "a", "c", new Date(3) ] }) doc.splice(submap, 1, 1, ["d","e","f"]); assert.deepEqual(doc.materialize(), { letters: ["b", "d", "e", "f", "c", new Date(3) ] }) - doc.set(submap, 0, "z"); + doc.put(submap, 0, "z"); assert.deepEqual(doc.materialize(), { letters: ["z", "d", "e", "f", "c", new Date(3) ] }) assert.deepEqual(doc.materialize(submap), ["z", "d", "e", "f", "c", new Date(3) ]) assert.deepEqual(doc.length(submap),6) @@ -184,8 +184,8 @@ describe('Automerge', () => { it('should be able delete non-existant props', () => { let doc = create() - doc.set("_root", "foo","bar") - doc.set("_root", "bip","bap") + doc.put("_root", "foo","bar") + doc.put("_root", "bip","bap") let heads1 = doc.commit() assert.deepEqual(doc.keys("_root"),["bip","foo"]) @@ -204,7 +204,7 @@ describe('Automerge', () => { let doc = create() let root = "_root" - doc.set(root, "xxx", "xxx"); + doc.put(root, "xxx", "xxx"); assert.deepEqual(doc.value(root, "xxx"),["str","xxx"]) doc.delete(root, "xxx"); assert.deepEqual(doc.value(root, "xxx"),undefined) @@ -215,7 +215,7 @@ describe('Automerge', () => { let doc = create() let root = "_root" - doc.set(root, "counter", 10, "counter"); + doc.put(root, "counter", 10, "counter"); assert.deepEqual(doc.value(root, "counter"),["counter",10]) doc.increment(root, "counter", 10); assert.deepEqual(doc.value(root, "counter"),["counter",20]) @@ -228,7 +228,7 @@ describe('Automerge', () => { let doc = create() let root = "_root"; - let text = doc.setObject(root, "text", ""); + let text = doc.putObject(root, "text", ""); if (!text) throw new Error('should not be undefined') doc.splice(text, 0, 0, "hello ") doc.splice(text, 6, 0, ["w","o","r","l","d"]) @@ -244,7 +244,7 @@ describe('Automerge', () => { it('should be able to insert objects into text', () => { let doc = create() - let text = doc.setObject("/", "text", "Hello world"); + let text = doc.putObject("/", "text", "Hello world"); let obj = doc.insertObject(text, 6, { hello: "world" }); assert.deepEqual(doc.text(text), "Hello \ufffcworld"); assert.deepEqual(doc.value(text, 6), ["map", obj]); @@ -254,17 +254,17 @@ describe('Automerge', () => { it('should be able save all or incrementally', () => { let doc = create() - doc.set("_root", "foo", 1) + doc.put("_root", "foo", 1) let save1 = doc.save() - doc.set("_root", "bar", 2) + doc.put("_root", "bar", 2) let saveMidway = doc.clone().save(); let save2 = doc.saveIncremental(); - doc.set("_root", "baz", 3); + doc.put("_root", "baz", 3); let save3 = doc.saveIncremental(); @@ -289,7 +289,7 @@ describe('Automerge', () => { it('should be able to splice text', () => { let doc = create() - let text = doc.setObject("_root", "text", ""); + let text = doc.putObject("_root", "text", ""); doc.splice(text, 0, 0, "hello world"); let heads1 = doc.commit(); doc.splice(text, 6, 0, "big bad "); @@ -305,12 +305,12 @@ describe('Automerge', () => { it('local inc increments all visible counters in a map', () => { let doc1 = create("aaaa") - doc1.set("_root", "hello", "world") + doc1.put("_root", "hello", "world") let doc2 = loadDoc(doc1.save(), "bbbb"); let doc3 = loadDoc(doc1.save(), "cccc"); - doc1.set("_root", "cnt", 20) - doc2.set("_root", "cnt", 0, "counter") - doc3.set("_root", "cnt", 10, "counter") + doc1.put("_root", "cnt", 20) + doc2.put("_root", "cnt", 0, "counter") + doc3.put("_root", "cnt", 10, "counter") doc1.applyChanges(doc2.getChanges(doc1.getHeads())) doc1.applyChanges(doc3.getChanges(doc1.getHeads())) let result = doc1.values("_root", "cnt") @@ -337,13 +337,13 @@ describe('Automerge', () => { it('local inc increments all visible counters in a sequence', () => { let doc1 = create("aaaa") - let seq = doc1.setObject("_root", "seq", []) + let seq = doc1.putObject("_root", "seq", []) doc1.insert(seq, 0, "hello") let doc2 = loadDoc(doc1.save(), "bbbb"); let doc3 = loadDoc(doc1.save(), "cccc"); - doc1.set(seq, 0, 20) - doc2.set(seq, 0, 0, "counter") - doc3.set(seq, 0, 10, "counter") + doc1.put(seq, 0, 20) + doc2.put(seq, 0, 0, "counter") + doc3.put(seq, 0, 10, "counter") doc1.applyChanges(doc2.getChanges(doc1.getHeads())) doc1.applyChanges(doc3.getChanges(doc1.getHeads())) let result = doc1.values(seq, 0) @@ -370,7 +370,7 @@ describe('Automerge', () => { it('paths can be used instead of objids', () => { let doc = create("aaaa") - doc.setObject("_root","list",[{ foo: "bar"}, [1,2,3]]) + doc.putObject("_root","list",[{ foo: "bar"}, [1,2,3]]) assert.deepEqual(doc.materialize("/"), { list: [{ foo: "bar"}, [1,2,3]] }) assert.deepEqual(doc.materialize("/list"), [{ foo: "bar"}, [1,2,3]]) assert.deepEqual(doc.materialize("/list/0"), { foo: "bar"}) @@ -379,8 +379,8 @@ describe('Automerge', () => { it('should be able to fetch changes by hash', () => { let doc1 = create("aaaa") let doc2 = create("bbbb") - doc1.set("/","a","b") - doc2.set("/","b","c") + doc1.put("/","a","b") + doc2.put("/","b","c") let head1 = doc1.getHeads() let head2 = doc2.getHeads() let change1 = doc1.getChangeByHash(head1[0]) @@ -392,11 +392,11 @@ describe('Automerge', () => { it('recursive sets are possible', () => { let doc = create("aaaa") - let l1 = doc.setObject("_root","list",[{ foo: "bar"}, [1,2,3]]) + let l1 = doc.putObject("_root","list",[{ foo: "bar"}, [1,2,3]]) let l2 = doc.insertObject(l1, 0, { zip: ["a", "b"] }) - let l3 = doc.setObject("_root","info1","hello world") // 'text' object - doc.set("_root","info2","hello world") // 'str' - let l4 = doc.setObject("_root","info3","hello world") + let l3 = doc.putObject("_root","info1","hello world") // 'text' object + doc.put("_root","info2","hello world") // 'str' + let l4 = doc.putObject("_root","info3","hello world") assert.deepEqual(doc.materialize(), { "list": [ { zip: ["a", "b"] }, { foo: "bar"}, [ 1,2,3]], "info1": "hello world", @@ -411,9 +411,9 @@ describe('Automerge', () => { it('only returns an object id when objects are created', () => { let doc = create("aaaa") - let r1 = doc.set("_root","foo","bar") - let r2 = doc.setObject("_root","list",[]) - let r3 = doc.set("_root","counter",10, "counter") + let r1 = doc.put("_root","foo","bar") + let r2 = doc.putObject("_root","list",[]) + let r3 = doc.put("_root","counter",10, "counter") let r4 = doc.increment("_root","counter",1) let r5 = doc.delete("_root","counter") let r6 = doc.insert(r2,0,10); @@ -434,10 +434,10 @@ describe('Automerge', () => { it('objects without properties are preserved', () => { let doc1 = create("aaaa") - let a = doc1.setObject("_root","a",{}); - let b = doc1.setObject("_root","b",{}); - let c = doc1.setObject("_root","c",{}); - let d = doc1.set(c,"d","dd"); + let a = doc1.putObject("_root","a",{}); + let b = doc1.putObject("_root","b",{}); + let c = doc1.putObject("_root","c",{}); + let d = doc1.put(c,"d","dd"); let saved = doc1.save(); let doc2 = loadDoc(saved); assert.deepEqual(doc2.value("_root","a"),["map",a]) @@ -453,7 +453,7 @@ describe('Automerge', () => { it('should handle merging text conflicts then saving & loading', () => { let A = create("aabbcc") - let At = A.setObject('_root', 'text', "") + let At = A.putObject('_root', 'text', "") A.splice(At, 0, 0, 'hello') let B = A.fork() @@ -479,7 +479,7 @@ describe('Automerge', () => { describe('patch generation', () => { it('should include root object key updates', () => { let doc1 = create('aaaa'), doc2 = create('bbbb') - doc1.set('_root', 'hello', 'world') + doc1.put('_root', 'hello', 'world') doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.popPatches(), [ @@ -491,7 +491,7 @@ describe('Automerge', () => { it('should include nested object creation', () => { let doc1 = create('aaaa'), doc2 = create('bbbb') - doc1.setObject('_root', 'birds', {friday: {robins: 3}}) + doc1.putObject('_root', 'birds', {friday: {robins: 3}}) doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.popPatches(), [ @@ -505,7 +505,7 @@ describe('Automerge', () => { it('should delete map keys', () => { let doc1 = create('aaaa'), doc2 = create('bbbb') - doc1.set('_root', 'favouriteBird', 'Robin') + doc1.put('_root', 'favouriteBird', 'Robin') doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) doc1.delete('_root', 'favouriteBird') @@ -520,7 +520,7 @@ describe('Automerge', () => { it('should include list element insertion', () => { let doc1 = create('aaaa'), doc2 = create('bbbb') - doc1.setObject('_root', 'birds', ['Goldfinch', 'Chaffinch']) + doc1.putObject('_root', 'birds', ['Goldfinch', 'Chaffinch']) doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.popPatches(), [ @@ -534,7 +534,7 @@ describe('Automerge', () => { it('should insert nested maps into a list', () => { let doc1 = create('aaaa'), doc2 = create('bbbb') - doc1.setObject('_root', 'birds', []) + doc1.putObject('_root', 'birds', []) doc2.loadIncremental(doc1.saveIncremental()) doc1.insertObject('1@aaaa', 0, {species: 'Goldfinch', count: 3}) doc2.enablePatches(true) @@ -550,7 +550,7 @@ describe('Automerge', () => { it('should calculate list indexes based on visible elements', () => { let doc1 = create('aaaa'), doc2 = create('bbbb') - doc1.setObject('_root', 'birds', ['Goldfinch', 'Chaffinch']) + doc1.putObject('_root', 'birds', ['Goldfinch', 'Chaffinch']) doc2.loadIncremental(doc1.saveIncremental()) doc1.delete('1@aaaa', 0) doc1.insert('1@aaaa', 1, 'Greenfinch') @@ -568,7 +568,7 @@ describe('Automerge', () => { it('should handle concurrent insertions at the head of a list', () => { let doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd') - doc1.setObject('_root', 'values', []) + doc1.putObject('_root', 'values', []) let change1 = doc1.saveIncremental() doc2.loadIncremental(change1) doc3.loadIncremental(change1) @@ -601,7 +601,7 @@ describe('Automerge', () => { it('should handle concurrent insertions beyond the head', () => { let doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd') - doc1.setObject('_root', 'values', ['a', 'b']) + doc1.putObject('_root', 'values', ['a', 'b']) let change1 = doc1.saveIncremental() doc2.loadIncremental(change1) doc3.loadIncremental(change1) @@ -634,8 +634,8 @@ describe('Automerge', () => { it('should handle conflicts on root object keys', () => { let doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd') - doc1.set('_root', 'bird', 'Greenfinch') - doc2.set('_root', 'bird', 'Goldfinch') + doc1.put('_root', 'bird', 'Greenfinch') + doc2.put('_root', 'bird', 'Goldfinch') let change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental() doc3.enablePatches(true) doc4.enablePatches(true) @@ -658,9 +658,9 @@ describe('Automerge', () => { it('should handle three-way conflicts', () => { let doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc') - doc1.set('_root', 'bird', 'Greenfinch') - doc2.set('_root', 'bird', 'Chaffinch') - doc3.set('_root', 'bird', 'Goldfinch') + doc1.put('_root', 'bird', 'Greenfinch') + doc2.put('_root', 'bird', 'Chaffinch') + doc3.put('_root', 'bird', 'Goldfinch') let change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental(), change3 = doc3.saveIncremental() doc1.enablePatches(true) doc2.enablePatches(true) @@ -697,13 +697,13 @@ describe('Automerge', () => { it('should allow a conflict to be resolved', () => { let doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc') - doc1.set('_root', 'bird', 'Greenfinch') - doc2.set('_root', 'bird', 'Chaffinch') + doc1.put('_root', 'bird', 'Greenfinch') + doc2.put('_root', 'bird', 'Chaffinch') doc3.enablePatches(true) let change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental() doc1.loadIncremental(change2); doc3.loadIncremental(change1) doc2.loadIncremental(change1); doc3.loadIncremental(change2) - doc1.set('_root', 'bird', 'Goldfinch') + doc1.put('_root', 'bird', 'Goldfinch') doc3.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc3.values('_root', 'bird'), [['str', 'Goldfinch', '2@aaaa']]) assert.deepEqual(doc3.popPatches(), [ @@ -716,9 +716,9 @@ describe('Automerge', () => { it('should handle a concurrent map key overwrite and delete', () => { let doc1 = create('aaaa'), doc2 = create('bbbb') - doc1.set('_root', 'bird', 'Greenfinch') + doc1.put('_root', 'bird', 'Greenfinch') doc2.loadIncremental(doc1.saveIncremental()) - doc1.set('_root', 'bird', 'Goldfinch') + doc1.put('_root', 'bird', 'Goldfinch') doc2.delete('_root', 'bird') let change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental() doc1.enablePatches(true) @@ -740,13 +740,13 @@ describe('Automerge', () => { it('should handle a conflict on a list element', () => { let doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd') - doc1.setObject('_root', 'birds', ['Thrush', 'Magpie']) + doc1.putObject('_root', 'birds', ['Thrush', 'Magpie']) let change1 = doc1.saveIncremental() doc2.loadIncremental(change1) doc3.loadIncremental(change1) doc4.loadIncremental(change1) - doc1.set('1@aaaa', 0, 'Song Thrush') - doc2.set('1@aaaa', 0, 'Redwing') + doc1.put('1@aaaa', 0, 'Song Thrush') + doc2.put('1@aaaa', 0, 'Redwing') let change2 = doc1.saveIncremental(), change3 = doc2.saveIncremental() doc3.enablePatches(true) doc4.enablePatches(true) @@ -769,15 +769,15 @@ describe('Automerge', () => { it('should handle a concurrent list element overwrite and delete', () => { let doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd') - doc1.setObject('_root', 'birds', ['Parakeet', 'Magpie', 'Thrush']) + doc1.putObject('_root', 'birds', ['Parakeet', 'Magpie', 'Thrush']) let change1 = doc1.saveIncremental() doc2.loadIncremental(change1) doc3.loadIncremental(change1) doc4.loadIncremental(change1) doc1.delete('1@aaaa', 0) - doc1.set('1@aaaa', 1, 'Song Thrush') - doc2.set('1@aaaa', 0, 'Ring-necked parakeet') - doc2.set('1@aaaa', 2, 'Redwing') + doc1.put('1@aaaa', 1, 'Song Thrush') + doc2.put('1@aaaa', 0, 'Ring-necked parakeet') + doc2.put('1@aaaa', 2, 'Redwing') let change2 = doc1.saveIncremental(), change3 = doc2.saveIncremental() doc3.enablePatches(true) doc4.enablePatches(true) @@ -804,8 +804,8 @@ describe('Automerge', () => { it('should handle deletion of a conflict value', () => { let doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc') - doc1.set('_root', 'bird', 'Robin') - doc2.set('_root', 'bird', 'Wren') + doc1.put('_root', 'bird', 'Robin') + doc2.put('_root', 'bird', 'Wren') let change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental() doc2.delete('_root', 'bird') let change3 = doc2.saveIncremental() @@ -828,8 +828,8 @@ describe('Automerge', () => { it('should handle conflicting nested objects', () => { let doc1 = create('aaaa'), doc2 = create('bbbb') - doc1.setObject('_root', 'birds', ['Parakeet']) - doc2.setObject('_root', 'birds', {'Sparrowhawk': 1}) + doc1.putObject('_root', 'birds', ['Parakeet']) + doc2.putObject('_root', 'birds', {'Sparrowhawk': 1}) let change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental() doc1.enablePatches(true) doc2.enablePatches(true) @@ -851,7 +851,7 @@ describe('Automerge', () => { it('should support date objects', () => { // FIXME: either use Date objects or use numbers consistently let doc1 = create('aaaa'), doc2 = create('bbbb'), now = new Date() - doc1.set('_root', 'createdAt', now.getTime(), 'timestamp') + doc1.put('_root', 'createdAt', now.getTime(), 'timestamp') doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.value('_root', 'createdAt'), ['timestamp', now]) @@ -864,7 +864,7 @@ describe('Automerge', () => { it.skip('should support counters in a map', () => { let doc1 = create('aaaa'), doc2 = create('bbbb') doc2.enablePatches(true) - doc1.set('_root', 'starlings', 2, 'counter') + doc1.put('_root', 'starlings', 2, 'counter') doc2.loadIncremental(doc1.saveIncremental()) doc1.increment('_root', 'starlings', 1) doc1.dump() @@ -912,7 +912,7 @@ describe('Automerge', () => { let s1 = initSyncState(), s2 = initSyncState() // make two nodes with the same changes - let list = n1.setObject("_root","n", []) + let list = n1.putObject("_root","n", []) n1.commit("",0) for (let i = 0; i < 10; i++) { n1.insert(list,i,i) @@ -936,7 +936,7 @@ describe('Automerge', () => { let n1 = create(), n2 = create() // make changes for n1 that n2 should request - let list = n1.setObject("_root","n",[]) + let list = n1.putObject("_root","n",[]) n1.commit("",0) for (let i = 0; i < 10; i++) { n1.insert(list, i, i) @@ -952,7 +952,7 @@ describe('Automerge', () => { let n1 = create(), n2 = create() // make changes for n1 that n2 should request - let list = n1.setObject("_root","n",[]) + let list = n1.putObject("_root","n",[]) n1.commit("",0) for (let i = 0; i < 10; i++) { n1.insert(list,i,i) @@ -970,7 +970,7 @@ describe('Automerge', () => { let s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 5; i++) { - n1.set("_root","x",i) + n1.put("_root","x",i) n1.commit("",0) } @@ -978,7 +978,7 @@ describe('Automerge', () => { // modify the first node further for (let i = 5; i < 10; i++) { - n1.set("_root", "x", i) + n1.put("_root", "x", i) n1.commit("",0) } @@ -994,11 +994,11 @@ describe('Automerge', () => { let message, patch for (let i = 0; i < 5; i++) { - n1.set("_root","x",i) + n1.put("_root","x",i) n1.commit("",0) } for (let i = 0; i < 5; i++) { - n2.set("_root","y",i) + n2.put("_root","y",i) n2.commit("",0) } @@ -1041,11 +1041,11 @@ describe('Automerge', () => { let s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 5; i++) { - n1.set("_root", "x", i) + n1.put("_root", "x", i) n1.commit("",0) } for (let i = 0; i < 5; i++) { - n2.set("_root","y", i) + n2.put("_root","y", i) n2.commit("",0) } @@ -1109,7 +1109,7 @@ describe('Automerge', () => { assert.deepStrictEqual(msg2to1, null) // If we make one more change, and start another sync, its lastSync should be updated - n1.set("_root","x",5) + n1.put("_root","x",5) msg1to2 = n1.generateSyncMessage(s1) if (msg1to2 === null) { throw new RangeError("message should not be null") } assert.deepStrictEqual(decodeSyncMessage(msg1to2).have[0].lastSync, [head1, head2].sort()) @@ -1119,7 +1119,7 @@ describe('Automerge', () => { let n1 = create('01234567'), n2 = create('89abcdef') let s1 = initSyncState(), s2 = initSyncState(), message = null - let items = n1.setObject("_root", "items", []) + let items = n1.putObject("_root", "items", []) n1.commit("",0) sync(n1, n2, s1, s2) @@ -1150,7 +1150,7 @@ describe('Automerge', () => { let s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 5; i++) { - n1.set("_root", "x", i) + n1.put("_root", "x", i) n1.commit("",0) } @@ -1158,7 +1158,7 @@ describe('Automerge', () => { // modify the first node further for (let i = 5; i < 10; i++) { - n1.set("_root", "x", i) + n1.put("_root", "x", i) n1.commit("",0) } @@ -1178,19 +1178,19 @@ describe('Automerge', () => { let s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 10; i++) { - n1.set("_root","x",i) + n1.put("_root","x",i) n1.commit("",0) } sync(n1, n2) for (let i = 10; i < 15; i++) { - n1.set("_root","x",i) + n1.put("_root","x",i) n1.commit("",0) } for (let i = 15; i < 18; i++) { - n2.set("_root","x",i) + n2.put("_root","x",i) n2.commit("",0) } @@ -1211,18 +1211,18 @@ describe('Automerge', () => { let s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 10; i++) { - n1.set("_root","x",i) + n1.put("_root","x",i) n1.commit("",0) } sync(n1, n2, s1, s2) for (let i = 10; i < 15; i++) { - n1.set("_root","x",i) + n1.put("_root","x",i) n1.commit("",0) } for (let i = 15; i < 18; i++) { - n2.set("_root","x",i) + n2.put("_root","x",i) n2.commit("",0) } @@ -1240,7 +1240,7 @@ describe('Automerge', () => { let s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 3; i++) { - n1.set("_root","x",i) + n1.put("_root","x",i) n1.commit("",0) } @@ -1260,7 +1260,7 @@ describe('Automerge', () => { // n1 makes three changes, which we sync to n2 for (let i = 0; i < 3; i++) { - n1.set("_root","x",i) + n1.put("_root","x",i) n1.commit("",0) } @@ -1272,7 +1272,7 @@ describe('Automerge', () => { // sync another few commits for (let i = 3; i < 6; i++) { - n1.set("_root","x",i) + n1.put("_root","x",i) n1.commit("",0) } @@ -1284,7 +1284,7 @@ describe('Automerge', () => { // now make a few more changes, then attempt to sync the fully-up-to-date n1 with the confused r for (let i = 6; i < 9; i++) { - n1.set("_root","x",i) + n1.put("_root","x",i) n1.commit("",0) } @@ -1306,7 +1306,7 @@ describe('Automerge', () => { // n1 makes three changes, which we sync to n2 for (let i = 0; i < 3; i++) { - n1.set("_root","x",i) + n1.put("_root","x",i) n1.commit("",0) } @@ -1330,20 +1330,20 @@ describe('Automerge', () => { // Change 1 is known to all three nodes //n1 = Automerge.change(n1, {time: 0}, doc => doc.x = 1) - n1.set("_root","x",1); n1.commit("",0) + n1.put("_root","x",1); n1.commit("",0) sync(n1, n2, s12, s21) sync(n2, n3, s23, s32) // Change 2 is known to n1 and n2 - n1.set("_root","x",2); n1.commit("",0) + n1.put("_root","x",2); n1.commit("",0) sync(n1, n2, s12, s21) // Each of the three nodes makes one change (changes 3, 4, 5) - n1.set("_root","x",3); n1.commit("",0) - n2.set("_root","x",4); n2.commit("",0) - n3.set("_root","x",5); n3.commit("",0) + n1.put("_root","x",3); n1.commit("",0) + n2.put("_root","x",4); n2.commit("",0) + n3.put("_root","x",5); n3.commit("",0) // Apply n3's latest change to n2. If running in Node, turn the Uint8Array into a Buffer, to // simulate transmission over a network (see https://github.com/automerge/automerge/pull/362) @@ -1361,10 +1361,10 @@ describe('Automerge', () => { it('should handle histories with lots of branching and merging', () => { let n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('fedcba98') - n1.set("_root","x",0); n1.commit("",0) + n1.put("_root","x",0); n1.commit("",0) n2.applyChanges([n1.getLastLocalChange()]) n3.applyChanges([n1.getLastLocalChange()]) - n3.set("_root","x",1); n3.commit("",0) + n3.put("_root","x",1); n3.commit("",0) // - n1c1 <------ n1c2 <------ n1c3 <-- etc. <-- n1c20 <------ n1c21 // / \/ \/ \/ @@ -1373,8 +1373,8 @@ describe('Automerge', () => { // \ / // ---------------------------------------------- n3c1 <----- for (let i = 1; i < 20; i++) { - n1.set("_root","n1",i); n1.commit("",0) - n2.set("_root","n2",i); n2.commit("",0) + n1.put("_root","n1",i); n1.commit("",0) + n2.put("_root","n2",i); n2.commit("",0) const change1 = n1.getLastLocalChange() const change2 = n2.getLastLocalChange() n1.applyChanges([change2]) @@ -1386,8 +1386,8 @@ describe('Automerge', () => { // Having n3's last change concurrent to the last sync heads forces us into the slower code path n2.applyChanges([n3.getLastLocalChange()]) - n1.set("_root","n1","final"); n1.commit("",0) - n2.set("_root","n2","final"); n2.commit("",0) + n1.put("_root","n1","final"); n1.commit("",0) + n2.put("_root","n2","final"); n2.commit("",0) sync(n1, n2, s1, s2) assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) @@ -1404,15 +1404,15 @@ describe('Automerge', () => { let s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 10; i++) { - n1.set("_root","x",i); n1.commit("",0) + n1.put("_root","x",i); n1.commit("",0) } sync(n1, n2, s1, s2) for (let i = 1; ; i++) { // search for false positive; see comment above const n1up = n1.clone('01234567'); - n1up.set("_root","x",`${i} @ n1`); n1up.commit("",0) + n1up.put("_root","x",`${i} @ n1`); n1up.commit("",0) const n2up = n2.clone('89abcdef'); - n2up.set("_root","x",`${i} @ n2`); n2up.commit("",0) + n2up.put("_root","x",`${i} @ n2`); n2up.commit("",0) if (new BloomFilter(n1up.getHeads()).containsHash(n2up.getHeads()[0])) { n1.free(); n2.free() n1 = n1up; n2 = n2up; break @@ -1441,25 +1441,25 @@ describe('Automerge', () => { s1 = initSyncState() s2 = initSyncState() for (let i = 0; i < 10; i++) { - n1.set("_root","x",i); n1.commit("",0) + n1.put("_root","x",i); n1.commit("",0) } sync(n1, n2, s1, s2) let n1hash1, n2hash1 for (let i = 29; ; i++) { // search for false positive; see comment above const n1us1 = n1.clone('01234567') - n1us1.set("_root","x",`${i} @ n1`); n1us1.commit("",0) + n1us1.put("_root","x",`${i} @ n1`); n1us1.commit("",0) const n2us1 = n2.clone('89abcdef') - n2us1.set("_root","x",`${i} @ n1`); n2us1.commit("",0) + n2us1.put("_root","x",`${i} @ n1`); n2us1.commit("",0) n1hash1 = n1us1.getHeads()[0]; n2hash1 = n2us1.getHeads()[0] const n1us2 = n1us1.clone(); - n1us2.set("_root","x",`final @ n1`); n1us2.commit("",0) + n1us2.put("_root","x",`final @ n1`); n1us2.commit("",0) const n2us2 = n2us1.clone(); - n2us2.set("_root","x",`final @ n2`); n2us2.commit("",0) + n2us2.put("_root","x",`final @ n2`); n2us2.commit("",0) n1hash2 = n1us2.getHeads()[0]; n2hash2 = n2us2.getHeads()[0] if (new BloomFilter([n1hash1, n1hash2]).containsHash(n2hash1)) { @@ -1525,33 +1525,33 @@ describe('Automerge', () => { let n1hash3, n2hash3 for (let i = 0; i < 5; i++) { - n1.set("_root","x",i); n1.commit("",0) + n1.put("_root","x",i); n1.commit("",0) } sync(n1, n2, s1, s2) for (let i = 86; ; i++) { // search for false positive; see comment above const n1us1 = n1.clone('01234567') - n1us1.set("_root","x",`${i} @ n1`); n1us1.commit("",0) + n1us1.put("_root","x",`${i} @ n1`); n1us1.commit("",0) const n2us1 = n2.clone('89abcdef') - n2us1.set("_root","x",`${i} @ n2`); n2us1.commit("",0) + n2us1.put("_root","x",`${i} @ n2`); n2us1.commit("",0) //const n1us1 = Automerge.change(Automerge.clone(n1, {actorId: '01234567'}), {time: 0}, doc => doc.x = `${i} @ n1`) //const n2us1 = Automerge.change(Automerge.clone(n2, {actorId: '89abcdef'}), {time: 0}, doc => doc.x = `${i} @ n2`) const n1hash1 = n1us1.getHeads()[0] const n1us2 = n1us1.clone() - n1us2.set("_root","x",`${i + 1} @ n1`); n1us2.commit("",0) + n1us2.put("_root","x",`${i + 1} @ n1`); n1us2.commit("",0) const n2us2 = n2us1.clone() - n2us2.set("_root","x",`${i + 1} @ n2`); n2us2.commit("",0) + n2us2.put("_root","x",`${i + 1} @ n2`); n2us2.commit("",0) const n1hash2 = n1us2.getHeads()[0], n2hash2 = n2us2.getHeads()[0] const n1us3 = n1us2.clone() - n1us3.set("_root","x",`final @ n1`); n1us3.commit("",0) + n1us3.put("_root","x",`final @ n1`); n1us3.commit("",0) const n2us3 = n2us2.clone() - n2us3.set("_root","x",`final @ n2`); n2us3.commit("",0) + n2us3.put("_root","x",`final @ n2`); n2us3.commit("",0) n1hash3 = n1us3.getHeads()[0]; n2hash3 = n2us3.getHeads()[0] @@ -1578,28 +1578,28 @@ describe('Automerge', () => { let s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 5; i++) { - n1.set("_root","x",i); n1.commit("",0) + n1.put("_root","x",i); n1.commit("",0) } sync(n1, n2, s1, s2) - n1.set("_root","x",5); n1.commit("",0) + n1.put("_root","x",5); n1.commit("",0) for (let i = 2; ; i++) { // search for false positive; see comment above const n2us1 = n2.clone('89abcdef') - n2us1.set("_root","x",`${i} @ n2`); n2us1.commit("",0) + n2us1.put("_root","x",`${i} @ n2`); n2us1.commit("",0) if (new BloomFilter(n1.getHeads()).containsHash(n2us1.getHeads()[0])) { n2 = n2us1; break } } for (let i = 141; ; i++) { // search for false positive; see comment above const n2us2 = n2.clone('89abcdef') - n2us2.set("_root","x",`${i} again`); n2us2.commit("",0) + n2us2.put("_root","x",`${i} again`); n2us2.commit("",0) if (new BloomFilter(n1.getHeads()).containsHash(n2us2.getHeads()[0])) { n2 = n2us2; break } } - n2.set("_root","x",`final @ n2`); n2.commit("",0) + n2.put("_root","x",`final @ n2`); n2.commit("",0) const allHeads = [...n1.getHeads(), ...n2.getHeads()].sort() s1 = decodeSyncState(encodeSyncState(s1)) @@ -1619,7 +1619,7 @@ describe('Automerge', () => { let message for (let i = 0; i < 10; i++) { - n1.set("_root","x",i); n1.commit("",0) + n1.put("_root","x",i); n1.commit("",0) } sync(n1, n2, s1, s2) @@ -1628,8 +1628,8 @@ describe('Automerge', () => { s2 = decodeSyncState(encodeSyncState(s2)) for (let i = 1; ; i++) { // brute-force search for false positive; see comment above - const n1up = n1.clone('01234567'); n1up.set("_root","x",`${i} @ n1`); n1up.commit("",0) - const n2up = n1.clone('89abcdef'); n2up.set("_root","x",`${i} @ n2`); n2up.commit("",0) + const n1up = n1.clone('01234567'); n1up.put("_root","x",`${i} @ n1`); n1up.commit("",0) + const n2up = n1.clone('89abcdef'); n2up.put("_root","x",`${i} @ n2`); n2up.commit("",0) // check if the bloom filter on n2 will believe n1 already has a particular hash // this will mean n2 won't offer that data to n2 by receiving a sync message from n1 @@ -1680,7 +1680,7 @@ describe('Automerge', () => { let message1, message2, message3 for (let i = 0; i < 3; i++) { - n1.set("_root","x",i); n1.commit("",0) + n1.put("_root","x",i); n1.commit("",0) } // sync all 3 nodes @@ -1688,18 +1688,18 @@ describe('Automerge', () => { sync(n1, n3, s13, s31) sync(n3, n2, s32, s23) for (let i = 0; i < 2; i++) { - n1.set("_root","x",`${i} @ n1`); n1.commit("",0) + n1.put("_root","x",`${i} @ n1`); n1.commit("",0) } for (let i = 0; i < 2; i++) { - n2.set("_root","x",`${i} @ n2`); n2.commit("",0) + n2.put("_root","x",`${i} @ n2`); n2.commit("",0) } n1.applyChanges(n2.getChanges([])) n2.applyChanges(n1.getChanges([])) - n1.set("_root","x",`3 @ n1`); n1.commit("",0) - n2.set("_root","x",`3 @ n2`); n2.commit("",0) + n1.put("_root","x",`3 @ n1`); n1.commit("",0) + n2.put("_root","x",`3 @ n2`); n2.commit("",0) for (let i = 0; i < 3; i++) { - n3.set("_root","x",`${i} @ n3`); n3.commit("",0) + n3.put("_root","x",`${i} @ n3`); n3.commit("",0) } const n1c3 = n1.getHeads()[0], n2c3 = n2.getHeads()[0], n3c3 = n3.getHeads()[0] s13 = decodeSyncState(encodeSyncState(s13)) @@ -1749,13 +1749,13 @@ describe('Automerge', () => { let message = null for (let i = 0; i < 3; i++) { - n1.set("_root","x",i); n1.commit("",0) + n1.put("_root","x",i); n1.commit("",0) } const lastSync = n1.getHeads() for (let i = 3; i < 6; i++) { - n1.set("_root","x",i); n1.commit("",0) + n1.put("_root","x",i); n1.commit("",0) } sync(n1, n2, s1, s2) @@ -1777,7 +1777,7 @@ describe('Automerge', () => { let message = null for (let i = 0; i < 3; i++) { - n1.set("_root","x",i); n1.commit("",0) + n1.put("_root","x",i); n1.commit("",0) } n2.applyChanges(n1.getChanges([])) @@ -1799,13 +1799,13 @@ describe('Automerge', () => { let s1 = initSyncState(), s2 = initSyncState() let msg, decodedMsg - n1.set("_root","x",0); n1.commit("",0) + n1.put("_root","x",0); n1.commit("",0) n3.applyChanges(n3.getChangesAdded(n1)) // merge() for (let i = 1; i <= 2; i++) { - n1.set("_root","x",i); n1.commit("",0) + n1.put("_root","x",i); n1.commit("",0) } for (let i = 3; i <= 4; i++) { - n3.set("_root","x",i); n3.commit("",0) + n3.put("_root","x",i); n3.commit("",0) } const c2 = n1.getHeads()[0], c4 = n3.getHeads()[0] n2.applyChanges(n2.getChangesAdded(n3)) // merge() @@ -1818,12 +1818,12 @@ describe('Automerge', () => { assert.deepStrictEqual(s2.sharedHeads, [c2, c4].sort()) // n2 and n3 apply {c5, c6, c7, c8} - n3.set("_root","x",5); n3.commit("",0) + n3.put("_root","x",5); n3.commit("",0) const change5 = n3.getLastLocalChange() - n3.set("_root","x",6); n3.commit("",0) + n3.put("_root","x",6); n3.commit("",0) const change6 = n3.getLastLocalChange(), c6 = n3.getHeads()[0] for (let i = 7; i <= 8; i++) { - n3.set("_root","x",i); n3.commit("",0) + n3.put("_root","x",i); n3.commit("",0) } const c8 = n3.getHeads()[0] n2.applyChanges(n2.getChangesAdded(n3)) // merge() diff --git a/automerge/examples/quickstart.rs b/automerge/examples/quickstart.rs index c485835e..1b3a1a16 100644 --- a/automerge/examples/quickstart.rs +++ b/automerge/examples/quickstart.rs @@ -11,13 +11,13 @@ fn main() { .transact_with::<_, _, AutomergeError, _>( |_| CommitOptions::default().with_message("Add card".to_owned()), |tx| { - let cards = tx.set_object(ROOT, "cards", ObjType::List).unwrap(); + let cards = tx.put_object(ROOT, "cards", ObjType::List).unwrap(); let card1 = tx.insert_object(&cards, 0, ObjType::Map)?; - tx.set(&card1, "title", "Rewrite everything in Clojure")?; - tx.set(&card1, "done", false)?; + tx.put(&card1, "title", "Rewrite everything in Clojure")?; + tx.put(&card1, "done", false)?; let card2 = tx.insert_object(&cards, 0, ObjType::Map)?; - tx.set(&card2, "title", "Rewrite everything in Haskell")?; - tx.set(&card2, "done", false)?; + tx.put(&card2, "title", "Rewrite everything in Haskell")?; + tx.put(&card2, "done", false)?; Ok((cards, card1)) }, ) @@ -33,7 +33,7 @@ fn main() { doc1.transact_with::<_, _, AutomergeError, _>( |_| CommitOptions::default().with_message("Mark card as done".to_owned()), |tx| { - tx.set(&card1, "done", true)?; + tx.put(&card1, "done", true)?; Ok(()) }, ) diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index 889e9b53..a6d42d16 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -190,7 +190,7 @@ impl AutoCommit { /// # use automerge::ObjType; /// # use std::time::SystemTime; /// let mut doc = AutoCommit::new(); - /// doc.set_object(&ROOT, "todos", ObjType::List).unwrap(); + /// doc.put_object(&ROOT, "todos", ObjType::List).unwrap(); /// let now = SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap().as_secs() as /// i64; /// doc.commit_with(CommitOptions::default().with_message("Create todos list").with_time(now)); @@ -261,7 +261,7 @@ impl Transactable for AutoCommit { /// - The object does not exist /// - The key is the wrong type for the object /// - The key does not exist in the object - fn set, P: Into, V: Into>( + fn put, P: Into, V: Into>( &mut self, obj: O, prop: P, @@ -269,10 +269,10 @@ impl Transactable for AutoCommit { ) -> Result<(), AutomergeError> { self.ensure_transaction_open(); let tx = self.transaction.as_mut().unwrap(); - tx.set(&mut self.doc, obj.as_ref(), prop, value) + tx.put(&mut self.doc, obj.as_ref(), prop, value) } - fn set_object, P: Into>( + fn put_object, P: Into>( &mut self, obj: O, prop: P, @@ -280,7 +280,7 @@ impl Transactable for AutoCommit { ) -> Result { self.ensure_transaction_open(); let tx = self.transaction.as_mut().unwrap(); - tx.set_object(&mut self.doc, obj.as_ref(), prop, value) + tx.put_object(&mut self.doc, obj.as_ref(), prop, value) } fn insert, V: Into>( diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 562c0d32..f1e7611e 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -375,7 +375,7 @@ impl Automerge { let query = self.ops.search(&obj, query::ListVals::new()); let mut buffer = String::new(); for q in &query.ops { - if let OpType::Set(ScalarValue::Str(s)) = &q.action { + if let OpType::Put(ScalarValue::Str(s)) = &q.action { buffer.push_str(s); } else { buffer.push('\u{fffc}'); @@ -395,7 +395,7 @@ impl Automerge { let query = self.ops.search(&obj, query::ListValsAt::new(clock)); let mut buffer = String::new(); for q in &query.ops { - if let OpType::Set(ScalarValue::Str(s)) = &q.action { + if let OpType::Put(ScalarValue::Str(s)) = &q.action { buffer.push_str(s); } } @@ -956,7 +956,7 @@ impl Automerge { Key::Seq(n) => self.to_string(n), }; let value: String = match &op.action { - OpType::Set(value) => format!("{}", value), + OpType::Put(value) => format!("{}", value), OpType::Make(obj) => format!("make({})", obj), OpType::Increment(obj) => format!("inc({})", obj), OpType::Delete => format!("del{}", 0), @@ -1014,7 +1014,7 @@ mod tests { let mut doc = Automerge::new(); doc.set_actor(ActorId::random()); let mut tx = doc.transaction(); - tx.set(ROOT, "hello", "world")?; + tx.put(ROOT, "hello", "world")?; tx.value(ROOT, "hello")?; tx.commit(); Ok(()) @@ -1025,18 +1025,18 @@ mod tests { let mut doc = Automerge::new(); let mut tx = doc.transaction(); // setting a scalar value shouldn't return an opid as no object was created. - tx.set(ROOT, "a", 1)?; + tx.put(ROOT, "a", 1)?; // setting the same value shouldn't return an opid as there is no change. - tx.set(ROOT, "a", 1)?; + tx.put(ROOT, "a", 1)?; assert_eq!(tx.pending_ops(), 1); - let map = tx.set_object(ROOT, "b", ObjType::Map)?; + let map = tx.put_object(ROOT, "b", ObjType::Map)?; // object already exists at b but setting a map again overwrites it so we get an opid. - tx.set(map, "a", 2)?; + tx.put(map, "a", 2)?; - tx.set_object(ROOT, "b", ObjType::Map)?; + tx.put_object(ROOT, "b", ObjType::Map)?; assert_eq!(tx.pending_ops(), 4); let map = tx.value(ROOT, "b").unwrap().unwrap().1; @@ -1051,8 +1051,8 @@ mod tests { let mut doc = Automerge::new(); doc.set_actor(ActorId::random()); let mut tx = doc.transaction(); - let list_id = tx.set_object(ROOT, "items", ObjType::List)?; - tx.set(ROOT, "zzz", "zzzval")?; + let list_id = tx.put_object(ROOT, "items", ObjType::List)?; + tx.put(ROOT, "zzz", "zzzval")?; assert!(tx.value(ROOT, "items")?.unwrap().1 == list_id); tx.insert(&list_id, 0, "a")?; tx.insert(&list_id, 0, "b")?; @@ -1073,7 +1073,7 @@ mod tests { let mut doc = Automerge::new(); doc.set_actor(ActorId::random()); let mut tx = doc.transaction(); - tx.set(ROOT, "xxx", "xxx")?; + tx.put(ROOT, "xxx", "xxx")?; assert!(!tx.values(ROOT, "xxx")?.is_empty()); tx.delete(ROOT, "xxx")?; assert!(tx.values(ROOT, "xxx")?.is_empty()); @@ -1085,7 +1085,7 @@ mod tests { fn test_inc() -> Result<(), AutomergeError> { let mut doc = Automerge::new(); let mut tx = doc.transaction(); - tx.set(ROOT, "counter", ScalarValue::counter(10))?; + tx.put(ROOT, "counter", ScalarValue::counter(10))?; assert!(tx.value(ROOT, "counter")?.unwrap().0 == Value::counter(10)); tx.increment(ROOT, "counter", 10)?; assert!(tx.value(ROOT, "counter")?.unwrap().0 == Value::counter(20)); @@ -1100,19 +1100,19 @@ mod tests { let mut doc = Automerge::new(); let mut tx = doc.transaction(); - tx.set(ROOT, "foo", 1)?; + tx.put(ROOT, "foo", 1)?; tx.commit(); let save1 = doc.save(); let mut tx = doc.transaction(); - tx.set(ROOT, "bar", 2)?; + tx.put(ROOT, "bar", 2)?; tx.commit(); let save2 = doc.save_incremental(); let mut tx = doc.transaction(); - tx.set(ROOT, "baz", 3)?; + tx.put(ROOT, "baz", 3)?; tx.commit(); let save3 = doc.save_incremental(); @@ -1142,7 +1142,7 @@ mod tests { fn test_save_text() -> Result<(), AutomergeError> { let mut doc = Automerge::new(); let mut tx = doc.transaction(); - let text = tx.set_object(ROOT, "text", ObjType::Text)?; + let text = tx.put_object(ROOT, "text", ObjType::Text)?; tx.commit(); let heads1 = doc.get_heads(); let mut tx = doc.transaction(); @@ -1167,17 +1167,17 @@ mod tests { let mut doc = Automerge::new(); doc.set_actor("aaaa".try_into().unwrap()); let mut tx = doc.transaction(); - tx.set(ROOT, "prop1", "val1")?; + tx.put(ROOT, "prop1", "val1")?; tx.commit(); doc.get_heads(); let heads1 = doc.get_heads(); let mut tx = doc.transaction(); - tx.set(ROOT, "prop1", "val2")?; + tx.put(ROOT, "prop1", "val2")?; tx.commit(); doc.get_heads(); let heads2 = doc.get_heads(); let mut tx = doc.transaction(); - tx.set(ROOT, "prop2", "val3")?; + tx.put(ROOT, "prop2", "val3")?; tx.commit(); doc.get_heads(); let heads3 = doc.get_heads(); @@ -1187,7 +1187,7 @@ mod tests { doc.get_heads(); let heads4 = doc.get_heads(); let mut tx = doc.transaction(); - tx.set(ROOT, "prop3", "val4")?; + tx.put(ROOT, "prop3", "val4")?; tx.commit(); doc.get_heads(); let heads5 = doc.get_heads(); @@ -1242,7 +1242,7 @@ mod tests { doc.set_actor("aaaa".try_into().unwrap()); let mut tx = doc.transaction(); - let list = tx.set_object(ROOT, "list", ObjType::List)?; + let list = tx.put_object(ROOT, "list", ObjType::List)?; tx.commit(); let heads1 = doc.get_heads(); @@ -1252,13 +1252,13 @@ mod tests { let heads2 = doc.get_heads(); let mut tx = doc.transaction(); - tx.set(&list, 0, 20)?; + tx.put(&list, 0, 20)?; tx.insert(&list, 0, 30)?; tx.commit(); let heads3 = doc.get_heads(); let mut tx = doc.transaction(); - tx.set(&list, 1, 40)?; + tx.put(&list, 1, 40)?; tx.insert(&list, 1, 50)?; tx.commit(); let heads4 = doc.get_heads(); @@ -1305,17 +1305,17 @@ mod tests { fn keys_iter() { let mut doc = Automerge::new(); let mut tx = doc.transaction(); - tx.set(ROOT, "a", 3).unwrap(); - tx.set(ROOT, "b", 4).unwrap(); - tx.set(ROOT, "c", 5).unwrap(); - tx.set(ROOT, "d", 6).unwrap(); + tx.put(ROOT, "a", 3).unwrap(); + tx.put(ROOT, "b", 4).unwrap(); + tx.put(ROOT, "c", 5).unwrap(); + tx.put(ROOT, "d", 6).unwrap(); tx.commit(); let mut tx = doc.transaction(); - tx.set(ROOT, "a", 7).unwrap(); + tx.put(ROOT, "a", 7).unwrap(); tx.commit(); let mut tx = doc.transaction(); - tx.set(ROOT, "a", 8).unwrap(); - tx.set(ROOT, "d", 9).unwrap(); + tx.put(ROOT, "a", 8).unwrap(); + tx.put(ROOT, "d", 9).unwrap(); tx.commit(); assert_eq!(doc.keys(ROOT).count(), 4); @@ -1368,11 +1368,11 @@ mod tests { let mut doc = Automerge::new(); let mut tx = doc.transaction(); // create a map - let map1 = tx.set_object(ROOT, "a", ObjType::Map).unwrap(); - tx.set(&map1, "b", 1).unwrap(); + let map1 = tx.put_object(ROOT, "a", ObjType::Map).unwrap(); + tx.put(&map1, "b", 1).unwrap(); // overwrite the first map with a new one - let map2 = tx.set_object(ROOT, "a", ObjType::Map).unwrap(); - tx.set(&map2, "c", 2).unwrap(); + let map2 = tx.put_object(ROOT, "a", ObjType::Map).unwrap(); + tx.put(&map2, "c", 2).unwrap(); tx.commit(); // we can get the new map by traversing the tree @@ -1388,7 +1388,7 @@ mod tests { assert_eq!(doc.value(&map1, "b").unwrap().unwrap().0, Value::int(1)); // and even set new things in it! let mut tx = doc.transaction(); - tx.set(&map1, "c", 3).unwrap(); + tx.put(&map1, "c", 3).unwrap(); tx.commit(); assert_eq!(doc.value(&map1, "c").unwrap().unwrap().0, Value::int(3)); @@ -1408,7 +1408,7 @@ mod tests { assert!(Automerge::load(&bytes).is_ok()); let mut tx = doc.transaction(); - tx.set(ROOT, "a", 1).unwrap(); + tx.put(ROOT, "a", 1).unwrap(); tx.commit(); let last_change = doc.get_last_local_change().unwrap(); assert_eq!(last_change.len(), 1); @@ -1435,7 +1435,7 @@ mod tests { fn loaded_doc_changes_have_hash() { let mut doc = Automerge::new(); let mut tx = doc.transaction(); - tx.set(ROOT, "a", 1).unwrap(); + tx.put(ROOT, "a", 1).unwrap(); tx.commit(); let hash = doc.get_last_local_change().unwrap().hash; let bytes = doc.save(); @@ -1516,7 +1516,7 @@ mod tests { ]; let mut doc = Automerge::new(); let mut tx = doc.transaction(); - let list = tx.set_object(ROOT, "list", ObjType::List).unwrap(); + let list = tx.put_object(ROOT, "list", ObjType::List).unwrap(); for action in actions { match action { Action::InsertText(index, c) => { @@ -1569,7 +1569,7 @@ mod tests { ]; let mut doc = Automerge::new(); let mut tx = doc.transaction(); - let list = tx.set_object(ROOT, "list", ObjType::List).unwrap(); + let list = tx.put_object(ROOT, "list", ObjType::List).unwrap(); for action in actions { match action { Action::InsertText(index, c) => { @@ -1608,14 +1608,14 @@ mod tests { let mut doc1 = AutoCommit::new().with_actor(actor1.clone()); let actor2 = ActorId::from(b"bbbb"); let mut doc2 = AutoCommit::new().with_actor(actor2.clone()); - let list = doc1.set_object(ROOT, "list", ObjType::List).unwrap(); + let list = doc1.put_object(ROOT, "list", ObjType::List).unwrap(); doc1.insert(&list, 0, 0).unwrap(); doc2.load_incremental(&doc1.save_incremental()).unwrap(); for i in 1..=max { - doc1.set(&list, 0, i).unwrap() + doc1.put(&list, 0, i).unwrap() } for i in 1..=max { - doc2.set(&list, 0, i).unwrap() + doc2.put(&list, 0, i).unwrap() } let change1 = doc1.save_incremental(); let change2 = doc2.save_incremental(); diff --git a/automerge/src/columnar.rs b/automerge/src/columnar.rs index 5b6bf21b..e0a007e0 100644 --- a/automerge/src/columnar.rs +++ b/automerge/src/columnar.rs @@ -130,7 +130,7 @@ impl<'a> Iterator for OperationIterator<'a> { let pred = self.pred.next()?; let value = self.value.next()?; let action = match action { - Action::Set => OpType::Set(value), + Action::Set => OpType::Put(value), Action::MakeList => OpType::Make(ObjType::List), Action::MakeText => OpType::Make(ObjType::Text), Action::MakeMap => OpType::Make(ObjType::Map), @@ -171,7 +171,7 @@ impl<'a> Iterator for DocOpIterator<'a> { let succ = self.succ.next()?; let value = self.value.next()?; let action = match action { - Action::Set => OpType::Set(value), + Action::Set => OpType::Put(value), Action::MakeList => OpType::Make(ObjType::List), Action::MakeText => OpType::Make(ObjType::Text), Action::MakeMap => OpType::Make(ObjType::Map), @@ -1051,7 +1051,7 @@ impl DocOpEncoder { self.insert.append(op.insert); self.succ.append(&op.succ, actors); let action = match &op.action { - amp::OpType::Set(value) => { + amp::OpType::Put(value) => { self.val.append_value(value, actors); Action::Set } @@ -1157,7 +1157,7 @@ impl ColumnEncoder { self.pred.append(&op.pred, actors); let action = match &op.action { - OpType::Set(value) => { + OpType::Put(value) => { self.val.append_value2(value, actors); Action::Set } diff --git a/automerge/src/legacy/mod.rs b/automerge/src/legacy/mod.rs index c1bfbac7..91d612bf 100644 --- a/automerge/src/legacy/mod.rs +++ b/automerge/src/legacy/mod.rs @@ -216,7 +216,7 @@ pub struct Op { impl Op { pub fn primitive_value(&self) -> Option { match &self.action { - OpType::Set(v) => Some(v.clone()), + OpType::Put(v) => Some(v.clone()), OpType::Increment(i) => Some(ScalarValue::Int(*i)), _ => None, } diff --git a/automerge/src/legacy/serde_impls/op.rs b/automerge/src/legacy/serde_impls/op.rs index ac4d4737..feaccfb8 100644 --- a/automerge/src/legacy/serde_impls/op.rs +++ b/automerge/src/legacy/serde_impls/op.rs @@ -19,7 +19,7 @@ impl Serialize for Op { } let numerical_datatype = match &self.action { - OpType::Set(value) => value.as_numerical_datatype(), + OpType::Put(value) => value.as_numerical_datatype(), _ => None, }; @@ -48,7 +48,7 @@ impl Serialize for Op { } match &self.action { OpType::Increment(n) => op.serialize_field("value", &n)?, - OpType::Set(value) => op.serialize_field("value", &value)?, + OpType::Put(value) => op.serialize_field("value", &value)?, _ => {} } op.serialize_field("pred", &self.pred)?; @@ -204,7 +204,7 @@ impl<'de> Deserialize<'de> for Op { .ok_or_else(|| Error::missing_field("value"))? .unwrap_or(ScalarValue::Null) }; - OpType::Set(value) + OpType::Put(value) } RawOpType::Inc => match value.flatten() { Some(ScalarValue::Int(n)) => Ok(OpType::Increment(n)), @@ -266,7 +266,7 @@ mod tests { "pred": [] }), expected: Ok(Op { - action: OpType::Set(ScalarValue::Uint(123)), + action: OpType::Put(ScalarValue::Uint(123)), obj: ObjectId::Root, key: "somekey".into(), insert: false, @@ -284,7 +284,7 @@ mod tests { "pred": [] }), expected: Ok(Op { - action: OpType::Set(ScalarValue::Int(-123)), + action: OpType::Put(ScalarValue::Int(-123)), obj: ObjectId::Root, key: "somekey".into(), insert: false, @@ -302,7 +302,7 @@ mod tests { "pred": [] }), expected: Ok(Op { - action: OpType::Set(ScalarValue::F64(-123.0)), + action: OpType::Put(ScalarValue::F64(-123.0)), obj: ObjectId::Root, key: "somekey".into(), insert: false, @@ -319,7 +319,7 @@ mod tests { "pred": [] }), expected: Ok(Op { - action: OpType::Set(ScalarValue::Str("somestring".into())), + action: OpType::Put(ScalarValue::Str("somestring".into())), obj: ObjectId::Root, key: "somekey".into(), insert: false, @@ -336,7 +336,7 @@ mod tests { "pred": [] }), expected: Ok(Op { - action: OpType::Set(ScalarValue::F64(1.23)), + action: OpType::Put(ScalarValue::F64(1.23)), obj: ObjectId::Root, key: "somekey".into(), insert: false, @@ -353,7 +353,7 @@ mod tests { "pred": [] }), expected: Ok(Op { - action: OpType::Set(ScalarValue::Boolean(true)), + action: OpType::Put(ScalarValue::Boolean(true)), obj: ObjectId::Root, key: "somekey".into(), insert: false, @@ -382,7 +382,7 @@ mod tests { "pred": [] }), expected: Ok(Op { - action: OpType::Set(ScalarValue::Counter(123.into())), + action: OpType::Put(ScalarValue::Counter(123.into())), obj: ObjectId::Root, key: "somekey".into(), insert: false, @@ -474,7 +474,7 @@ mod tests { "pred": [] }), expected: Ok(Op { - action: OpType::Set(ScalarValue::Null), + action: OpType::Put(ScalarValue::Null), obj: ObjectId::Root, key: "somekey".into(), insert: false, @@ -580,7 +580,7 @@ mod tests { fn test_round_trips() { let testcases = vec![ Op { - action: OpType::Set(ScalarValue::Uint(12)), + action: OpType::Put(ScalarValue::Uint(12)), obj: ObjectId::Root, key: "somekey".into(), insert: false, @@ -594,7 +594,7 @@ mod tests { pred: SortedVec::new(), }, Op { - action: OpType::Set(ScalarValue::Uint(12)), + action: OpType::Put(ScalarValue::Uint(12)), obj: ObjectId::from_str("1@7ef48769b04d47e9a88e98a134d62716").unwrap(), key: "somekey".into(), insert: false, @@ -608,7 +608,7 @@ mod tests { pred: SortedVec::new(), }, Op { - action: OpType::Set("seomthing".into()), + action: OpType::Put("seomthing".into()), obj: ObjectId::from_str("1@7ef48769b04d47e9a88e98a134d62716").unwrap(), key: OpId::from_str("1@7ef48769b04d47e9a88e98a134d62716") .unwrap() diff --git a/automerge/src/legacy/serde_impls/op_type.rs b/automerge/src/legacy/serde_impls/op_type.rs index 1b81e181..b054bad7 100644 --- a/automerge/src/legacy/serde_impls/op_type.rs +++ b/automerge/src/legacy/serde_impls/op_type.rs @@ -17,7 +17,7 @@ impl Serialize for OpType { OpType::Make(ObjType::Text) => RawOpType::MakeText, OpType::Delete => RawOpType::Del, OpType::Increment(_) => RawOpType::Inc, - OpType::Set(_) => RawOpType::Set, + OpType::Put(_) => RawOpType::Set, }; raw_type.serialize(serializer) } diff --git a/automerge/src/op_tree.rs b/automerge/src/op_tree.rs index 6d4cfb10..788167ee 100644 --- a/automerge/src/op_tree.rs +++ b/automerge/src/op_tree.rs @@ -642,7 +642,7 @@ mod tests { let zero = OpId(0, 0); Op { id: zero, - action: amp::OpType::Set(0.into()), + action: amp::OpType::Put(0.into()), key: zero.into(), succ: vec![], pred: vec![], diff --git a/automerge/src/query.rs b/automerge/src/query.rs index 9446376d..3e6e80d7 100644 --- a/automerge/src/query.rs +++ b/automerge/src/query.rs @@ -177,7 +177,7 @@ impl VisWindow { let mut visible = false; match op.action { - OpType::Set(ScalarValue::Counter(Counter { start, .. })) => { + OpType::Put(ScalarValue::Counter(Counter { start, .. })) => { self.counters.insert( op.id, CounterData { @@ -197,7 +197,7 @@ impl VisWindow { if let Some(mut entry) = self.counters.get_mut(id) { entry.succ.remove(&op.id); entry.val += inc_val; - entry.op.action = OpType::Set(ScalarValue::counter(entry.val)); + entry.op.action = OpType::Put(ScalarValue::counter(entry.val)); if !entry.succ.iter().any(|i| clock.covers(i)) { visible = true; } diff --git a/automerge/src/transaction/inner.rs b/automerge/src/transaction/inner.rs index 7b188dfe..84460c49 100644 --- a/automerge/src/transaction/inner.rs +++ b/automerge/src/transaction/inner.rs @@ -86,7 +86,7 @@ impl TransactionInner { /// - The object does not exist /// - The key is the wrong type for the object /// - The key does not exist in the object - pub fn set, V: Into>( + pub fn put, V: Into>( &mut self, doc: &mut Automerge, obj: &ExId, @@ -112,7 +112,7 @@ impl TransactionInner { /// - The object does not exist /// - The key is the wrong type for the object /// - The key does not exist in the object - pub fn set_object>( + pub fn put_object>( &mut self, doc: &mut Automerge, obj: &ExId, @@ -361,8 +361,8 @@ mod tests { let mut doc = Automerge::new(); let mut tx = doc.transaction(); - let a = tx.set_object(ROOT, "a", ObjType::Map).unwrap(); - tx.set(&a, "b", 1).unwrap(); + let a = tx.put_object(ROOT, "a", ObjType::Map).unwrap(); + tx.put(&a, "b", 1).unwrap(); assert!(tx.value(&a, "b").unwrap().is_some()); } } diff --git a/automerge/src/transaction/manual_transaction.rs b/automerge/src/transaction/manual_transaction.rs index 24ab844a..2cfc7073 100644 --- a/automerge/src/transaction/manual_transaction.rs +++ b/automerge/src/transaction/manual_transaction.rs @@ -47,7 +47,7 @@ impl<'a> Transaction<'a> { /// # use std::time::SystemTime; /// let mut doc = Automerge::new(); /// let mut tx = doc.transaction(); - /// tx.set_object(ROOT, "todos", ObjType::List).unwrap(); + /// tx.put_object(ROOT, "todos", ObjType::List).unwrap(); /// let now = SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap().as_secs() as /// i64; /// tx.commit_with(CommitOptions::default().with_message("Create todos list").with_time(now)); @@ -85,7 +85,7 @@ impl<'a> Transactable for Transaction<'a> { /// - The object does not exist /// - The key is the wrong type for the object /// - The key does not exist in the object - fn set, P: Into, V: Into>( + fn put, P: Into, V: Into>( &mut self, obj: O, prop: P, @@ -94,10 +94,10 @@ impl<'a> Transactable for Transaction<'a> { self.inner .as_mut() .unwrap() - .set(self.doc, obj.as_ref(), prop, value) + .put(self.doc, obj.as_ref(), prop, value) } - fn set_object, P: Into>( + fn put_object, P: Into>( &mut self, obj: O, prop: P, @@ -106,7 +106,7 @@ impl<'a> Transactable for Transaction<'a> { self.inner .as_mut() .unwrap() - .set_object(self.doc, obj.as_ref(), prop, value) + .put_object(self.doc, obj.as_ref(), prop, value) } fn insert, V: Into>( diff --git a/automerge/src/transaction/transactable.rs b/automerge/src/transaction/transactable.rs index 503035d9..ed562828 100644 --- a/automerge/src/transaction/transactable.rs +++ b/automerge/src/transaction/transactable.rs @@ -15,7 +15,7 @@ pub trait Transactable { /// - The object does not exist /// - The key is the wrong type for the object /// - The key does not exist in the object - fn set, P: Into, V: Into>( + fn put, P: Into, V: Into>( &mut self, obj: O, prop: P, @@ -34,7 +34,7 @@ pub trait Transactable { /// - The object does not exist /// - The key is the wrong type for the object /// - The key does not exist in the object - fn set_object, P: Into>( + fn put_object, P: Into>( &mut self, obj: O, prop: P, diff --git a/automerge/src/types.rs b/automerge/src/types.rs index f879e3d8..6e6bfbbb 100644 --- a/automerge/src/types.rs +++ b/automerge/src/types.rs @@ -170,7 +170,7 @@ pub enum OpType { Make(ObjType), Delete, Increment(i64), - Set(ScalarValue), + Put(ScalarValue), } impl From for OpType { @@ -181,7 +181,7 @@ impl From for OpType { impl From for OpType { fn from(v: ScalarValue) -> Self { - OpType::Set(v) + OpType::Put(v) } } @@ -367,7 +367,7 @@ pub(crate) struct Op { impl Op { pub(crate) fn add_succ(&mut self, op: &Op) { self.succ.push(op.id); - if let OpType::Set(ScalarValue::Counter(Counter { + if let OpType::Put(ScalarValue::Counter(Counter { current, increments, .. @@ -382,7 +382,7 @@ impl Op { pub(crate) fn remove_succ(&mut self, op: &Op) { self.succ.retain(|id| id != &op.id); - if let OpType::Set(ScalarValue::Counter(Counter { + if let OpType::Put(ScalarValue::Counter(Counter { current, increments, .. @@ -406,7 +406,7 @@ impl Op { } pub fn incs(&self) -> usize { - if let OpType::Set(ScalarValue::Counter(Counter { increments, .. })) = &self.action { + if let OpType::Put(ScalarValue::Counter(Counter { increments, .. })) = &self.action { *increments } else { 0 @@ -422,11 +422,11 @@ impl Op { } pub fn is_counter(&self) -> bool { - matches!(&self.action, OpType::Set(ScalarValue::Counter(_))) + matches!(&self.action, OpType::Put(ScalarValue::Counter(_))) } pub fn is_noop(&self, action: &OpType) -> bool { - matches!((&self.action, action), (OpType::Set(n), OpType::Set(m)) if n == m) + matches!((&self.action, action), (OpType::Put(n), OpType::Put(m)) if n == m) } pub fn is_list_op(&self) -> bool { @@ -448,7 +448,7 @@ impl Op { pub fn value(&self) -> Value { match &self.action { OpType::Make(obj_type) => Value::Object(*obj_type), - OpType::Set(scalar) => Value::Scalar(scalar.clone()), + OpType::Put(scalar) => Value::Scalar(scalar.clone()), _ => panic!("cant convert op into a value - {:?}", self), } } @@ -456,8 +456,8 @@ impl Op { #[allow(dead_code)] pub fn dump(&self) -> String { match &self.action { - OpType::Set(value) if self.insert => format!("i:{}", value), - OpType::Set(value) => format!("s:{}", value), + OpType::Put(value) if self.insert => format!("i:{}", value), + OpType::Put(value) => format!("s:{}", value), OpType::Make(obj) => format!("make{}", obj), OpType::Increment(val) => format!("inc:{}", val), OpType::Delete => "del".to_string(), diff --git a/automerge/src/value.rs b/automerge/src/value.rs index f837ad63..47279381 100644 --- a/automerge/src/value.rs +++ b/automerge/src/value.rs @@ -269,7 +269,7 @@ impl From<&Op> for (Value, OpId) { fn from(op: &Op) -> Self { match &op.action { OpType::Make(obj_type) => (Value::Object(*obj_type), op.id), - OpType::Set(scalar) => (Value::Scalar(scalar.clone()), op.id), + OpType::Put(scalar) => (Value::Scalar(scalar.clone()), op.id), _ => panic!("cant convert op into a value - {:?}", op), } } @@ -279,7 +279,7 @@ impl From for (Value, OpId) { fn from(op: Op) -> Self { match &op.action { OpType::Make(obj_type) => (Value::Object(*obj_type), op.id), - OpType::Set(scalar) => (Value::Scalar(scalar.clone()), op.id), + OpType::Put(scalar) => (Value::Scalar(scalar.clone()), op.id), _ => panic!("cant convert op into a value - {:?}", op), } } @@ -289,7 +289,7 @@ impl From for OpType { fn from(v: Value) -> Self { match v { Value::Object(o) => OpType::Make(o), - Value::Scalar(s) => OpType::Set(s), + Value::Scalar(s) => OpType::Put(s), } } } diff --git a/automerge/src/visualisation.rs b/automerge/src/visualisation.rs index 9ba4f428..74a93b1d 100644 --- a/automerge/src/visualisation.rs +++ b/automerge/src/visualisation.rs @@ -235,7 +235,7 @@ impl OpTableRow { ) -> Self { let op_description = match &op.action { crate::OpType::Delete => "del".to_string(), - crate::OpType::Set(v) => format!("set {}", v), + crate::OpType::Put(v) => format!("set {}", v), crate::OpType::Make(obj) => format!("make {}", obj), crate::OpType::Increment(v) => format!("inc {}", v), }; diff --git a/automerge/tests/test.rs b/automerge/tests/test.rs index b3459fe9..5178c2c0 100644 --- a/automerge/tests/test.rs +++ b/automerge/tests/test.rs @@ -10,8 +10,8 @@ use helpers::{ #[test] fn no_conflict_on_repeated_assignment() { let mut doc = AutoCommit::new(); - doc.set(&automerge::ROOT, "foo", 1).unwrap(); - doc.set(&automerge::ROOT, "foo", 2).unwrap(); + doc.put(&automerge::ROOT, "foo", 1).unwrap(); + doc.put(&automerge::ROOT, "foo", 2).unwrap(); assert_doc!( doc.document(), map! { @@ -24,14 +24,14 @@ fn no_conflict_on_repeated_assignment() { fn repeated_map_assignment_which_resolves_conflict_not_ignored() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); - doc1.set(&automerge::ROOT, "field", 123).unwrap(); + doc1.put(&automerge::ROOT, "field", 123).unwrap(); doc2.merge(&mut doc1).unwrap(); - doc2.set(&automerge::ROOT, "field", 456).unwrap(); - doc1.set(&automerge::ROOT, "field", 789).unwrap(); + doc2.put(&automerge::ROOT, "field", 456).unwrap(); + doc1.put(&automerge::ROOT, "field", 789).unwrap(); doc1.merge(&mut doc2).unwrap(); assert_eq!(doc1.values(&automerge::ROOT, "field").unwrap().len(), 2); - doc1.set(&automerge::ROOT, "field", 123).unwrap(); + doc1.put(&automerge::ROOT, "field", 123).unwrap(); assert_doc!( doc1.document(), map! { @@ -45,13 +45,13 @@ fn repeated_list_assignment_which_resolves_conflict_not_ignored() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); let list_id = doc1 - .set_object(&automerge::ROOT, "list", ObjType::List) + .put_object(&automerge::ROOT, "list", ObjType::List) .unwrap(); doc1.insert(&list_id, 0, 123).unwrap(); doc2.merge(&mut doc1).unwrap(); - doc2.set(&list_id, 0, 456).unwrap(); + doc2.put(&list_id, 0, 456).unwrap(); doc1.merge(&mut doc2).unwrap(); - doc1.set(&list_id, 0, 789).unwrap(); + doc1.put(&list_id, 0, 789).unwrap(); assert_doc!( doc1.document(), @@ -69,7 +69,7 @@ fn repeated_list_assignment_which_resolves_conflict_not_ignored() { fn list_deletion() { let mut doc = new_doc(); let list_id = doc - .set_object(&automerge::ROOT, "list", ObjType::List) + .put_object(&automerge::ROOT, "list", ObjType::List) .unwrap(); doc.insert(&list_id, 0, 123).unwrap(); doc.insert(&list_id, 1, 456).unwrap(); @@ -90,8 +90,8 @@ fn list_deletion() { fn merge_concurrent_map_prop_updates() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); - doc1.set(&automerge::ROOT, "foo", "bar").unwrap(); - doc2.set(&automerge::ROOT, "hello", "world").unwrap(); + doc1.put(&automerge::ROOT, "foo", "bar").unwrap(); + doc2.put(&automerge::ROOT, "hello", "world").unwrap(); doc1.merge(&mut doc2).unwrap(); assert_eq!( doc1.value(&automerge::ROOT, "foo").unwrap().unwrap().0, @@ -119,7 +119,7 @@ fn merge_concurrent_map_prop_updates() { fn add_concurrent_increments_of_same_property() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); - doc1.set(&automerge::ROOT, "counter", mk_counter(0)) + doc1.put(&automerge::ROOT, "counter", mk_counter(0)) .unwrap(); doc2.merge(&mut doc1).unwrap(); doc1.increment(&automerge::ROOT, "counter", 1).unwrap(); @@ -140,12 +140,12 @@ fn add_increments_only_to_preceeded_values() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); - doc1.set(&automerge::ROOT, "counter", mk_counter(0)) + doc1.put(&automerge::ROOT, "counter", mk_counter(0)) .unwrap(); doc1.increment(&automerge::ROOT, "counter", 1).unwrap(); // create a counter in doc2 - doc2.set(&automerge::ROOT, "counter", mk_counter(0)) + doc2.put(&automerge::ROOT, "counter", mk_counter(0)) .unwrap(); doc2.increment(&automerge::ROOT, "counter", 3).unwrap(); @@ -167,8 +167,8 @@ fn add_increments_only_to_preceeded_values() { fn concurrent_updates_of_same_field() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); - doc1.set(&automerge::ROOT, "field", "one").unwrap(); - doc2.set(&automerge::ROOT, "field", "two").unwrap(); + doc1.put(&automerge::ROOT, "field", "one").unwrap(); + doc2.put(&automerge::ROOT, "field", "two").unwrap(); doc1.merge(&mut doc2).unwrap(); @@ -188,12 +188,12 @@ fn concurrent_updates_of_same_list_element() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); let list_id = doc1 - .set_object(&automerge::ROOT, "birds", ObjType::List) + .put_object(&automerge::ROOT, "birds", ObjType::List) .unwrap(); doc1.insert(&list_id, 0, "finch").unwrap(); doc2.merge(&mut doc1).unwrap(); - doc1.set(&list_id, 0, "greenfinch").unwrap(); - doc2.set(&list_id, 0, "goldfinch").unwrap(); + doc1.put(&list_id, 0, "greenfinch").unwrap(); + doc2.put(&list_id, 0, "goldfinch").unwrap(); doc1.merge(&mut doc2).unwrap(); @@ -215,10 +215,10 @@ fn assignment_conflicts_of_different_types() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); let mut doc3 = new_doc(); - doc1.set(&automerge::ROOT, "field", "string").unwrap(); - doc2.set_object(&automerge::ROOT, "field", ObjType::List) + doc1.put(&automerge::ROOT, "field", "string").unwrap(); + doc2.put_object(&automerge::ROOT, "field", ObjType::List) .unwrap(); - doc3.set_object(&automerge::ROOT, "field", ObjType::Map) + doc3.put_object(&automerge::ROOT, "field", ObjType::Map) .unwrap(); doc1.merge(&mut doc2).unwrap(); doc1.merge(&mut doc3).unwrap(); @@ -239,11 +239,11 @@ fn assignment_conflicts_of_different_types() { fn changes_within_conflicting_map_field() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); - doc1.set(&automerge::ROOT, "field", "string").unwrap(); + doc1.put(&automerge::ROOT, "field", "string").unwrap(); let map_id = doc2 - .set_object(&automerge::ROOT, "field", ObjType::Map) + .put_object(&automerge::ROOT, "field", ObjType::Map) .unwrap(); - doc2.set(&map_id, "innerKey", 42).unwrap(); + doc2.put(&map_id, "innerKey", 42).unwrap(); doc1.merge(&mut doc2).unwrap(); assert_doc!( @@ -267,19 +267,19 @@ fn changes_within_conflicting_list_element() { let mut doc1 = new_doc_with_actor(actor1); let mut doc2 = new_doc_with_actor(actor2); let list_id = doc1 - .set_object(&automerge::ROOT, "list", ObjType::List) + .put_object(&automerge::ROOT, "list", ObjType::List) .unwrap(); doc1.insert(&list_id, 0, "hello").unwrap(); doc2.merge(&mut doc1).unwrap(); - let map_in_doc1 = doc1.set_object(&list_id, 0, ObjType::Map).unwrap(); - doc1.set(&map_in_doc1, "map1", true).unwrap(); - doc1.set(&map_in_doc1, "key", 1).unwrap(); + let map_in_doc1 = doc1.put_object(&list_id, 0, ObjType::Map).unwrap(); + doc1.put(&map_in_doc1, "map1", true).unwrap(); + doc1.put(&map_in_doc1, "key", 1).unwrap(); - let map_in_doc2 = doc2.set_object(&list_id, 0, ObjType::Map).unwrap(); + let map_in_doc2 = doc2.put_object(&list_id, 0, ObjType::Map).unwrap(); doc1.merge(&mut doc2).unwrap(); - doc2.set(&map_in_doc2, "map2", true).unwrap(); - doc2.set(&map_in_doc2, "key", 2).unwrap(); + doc2.put(&map_in_doc2, "map2", true).unwrap(); + doc2.put(&map_in_doc2, "key", 2).unwrap(); doc1.merge(&mut doc2).unwrap(); @@ -310,14 +310,14 @@ fn concurrently_assigned_nested_maps_should_not_merge() { let mut doc2 = new_doc(); let doc1_map_id = doc1 - .set_object(&automerge::ROOT, "config", ObjType::Map) + .put_object(&automerge::ROOT, "config", ObjType::Map) .unwrap(); - doc1.set(&doc1_map_id, "background", "blue").unwrap(); + doc1.put(&doc1_map_id, "background", "blue").unwrap(); let doc2_map_id = doc2 - .set_object(&automerge::ROOT, "config", ObjType::Map) + .put_object(&automerge::ROOT, "config", ObjType::Map) .unwrap(); - doc2.set(&doc2_map_id, "logo_url", "logo.png").unwrap(); + doc2.put(&doc2_map_id, "logo_url", "logo.png").unwrap(); doc1.merge(&mut doc2).unwrap(); @@ -344,7 +344,7 @@ fn concurrent_insertions_at_different_list_positions() { assert!(doc1.get_actor() < doc2.get_actor()); let list_id = doc1 - .set_object(&automerge::ROOT, "list", ObjType::List) + .put_object(&automerge::ROOT, "list", ObjType::List) .unwrap(); doc1.insert(&list_id, 0, "one").unwrap(); @@ -378,7 +378,7 @@ fn concurrent_insertions_at_same_list_position() { assert!(doc1.get_actor() < doc2.get_actor()); let list_id = doc1 - .set_object(&automerge::ROOT, "birds", ObjType::List) + .put_object(&automerge::ROOT, "birds", ObjType::List) .unwrap(); doc1.insert(&list_id, 0, "parakeet").unwrap(); @@ -411,10 +411,10 @@ fn concurrent_insertions_at_same_list_position() { fn concurrent_assignment_and_deletion_of_a_map_entry() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); - doc1.set(&automerge::ROOT, "bestBird", "robin").unwrap(); + doc1.put(&automerge::ROOT, "bestBird", "robin").unwrap(); doc2.merge(&mut doc1).unwrap(); doc1.delete(&automerge::ROOT, "bestBird").unwrap(); - doc2.set(&automerge::ROOT, "bestBird", "magpie").unwrap(); + doc2.put(&automerge::ROOT, "bestBird", "magpie").unwrap(); doc1.merge(&mut doc2).unwrap(); @@ -433,13 +433,13 @@ fn concurrent_assignment_and_deletion_of_list_entry() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); let list_id = doc1 - .set_object(&automerge::ROOT, "birds", ObjType::List) + .put_object(&automerge::ROOT, "birds", ObjType::List) .unwrap(); doc1.insert(&list_id, 0, "blackbird").unwrap(); doc1.insert(&list_id, 1, "thrush").unwrap(); doc1.insert(&list_id, 2, "goldfinch").unwrap(); doc2.merge(&mut doc1).unwrap(); - doc1.set(&list_id, 1, "starling").unwrap(); + doc1.put(&list_id, 1, "starling").unwrap(); doc2.delete(&list_id, 1).unwrap(); assert_doc!( @@ -482,7 +482,7 @@ fn insertion_after_a_deleted_list_element() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); let list_id = doc1 - .set_object(&automerge::ROOT, "birds", ObjType::List) + .put_object(&automerge::ROOT, "birds", ObjType::List) .unwrap(); doc1.insert(&list_id, 0, "blackbird").unwrap(); @@ -525,7 +525,7 @@ fn concurrent_deletion_of_same_list_element() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); let list_id = doc1 - .set_object(&automerge::ROOT, "birds", ObjType::List) + .put_object(&automerge::ROOT, "birds", ObjType::List) .unwrap(); doc1.insert(&list_id, 0, "albatross").unwrap(); @@ -568,18 +568,18 @@ fn concurrent_updates_at_different_levels() { let mut doc2 = new_doc(); let animals = doc1 - .set_object(&automerge::ROOT, "animals", ObjType::Map) + .put_object(&automerge::ROOT, "animals", ObjType::Map) .unwrap(); - let birds = doc1.set_object(&animals, "birds", ObjType::Map).unwrap(); - doc1.set(&birds, "pink", "flamingo").unwrap(); - doc1.set(&birds, "black", "starling").unwrap(); + let birds = doc1.put_object(&animals, "birds", ObjType::Map).unwrap(); + doc1.put(&birds, "pink", "flamingo").unwrap(); + doc1.put(&birds, "black", "starling").unwrap(); - let mammals = doc1.set_object(&animals, "mammals", ObjType::List).unwrap(); + let mammals = doc1.put_object(&animals, "mammals", ObjType::List).unwrap(); doc1.insert(&mammals, 0, "badger").unwrap(); doc2.merge(&mut doc1).unwrap(); - doc1.set(&birds, "brown", "sparrow").unwrap(); + doc1.put(&birds, "brown", "sparrow").unwrap(); doc2.delete(&animals, "birds").unwrap(); doc1.merge(&mut doc2).unwrap(); @@ -613,16 +613,16 @@ fn concurrent_updates_of_concurrently_deleted_objects() { let mut doc2 = new_doc(); let birds = doc1 - .set_object(&automerge::ROOT, "birds", ObjType::Map) + .put_object(&automerge::ROOT, "birds", ObjType::Map) .unwrap(); - let blackbird = doc1.set_object(&birds, "blackbird", ObjType::Map).unwrap(); - doc1.set(&blackbird, "feathers", "black").unwrap(); + let blackbird = doc1.put_object(&birds, "blackbird", ObjType::Map).unwrap(); + doc1.put(&blackbird, "feathers", "black").unwrap(); doc2.merge(&mut doc1).unwrap(); doc1.delete(&birds, "blackbird").unwrap(); - doc2.set(&blackbird, "beak", "orange").unwrap(); + doc2.put(&blackbird, "beak", "orange").unwrap(); doc1.merge(&mut doc2).unwrap(); @@ -643,7 +643,7 @@ fn does_not_interleave_sequence_insertions_at_same_position() { let mut doc2 = new_doc_with_actor(actor2); let wisdom = doc1 - .set_object(&automerge::ROOT, "wisdom", ObjType::List) + .put_object(&automerge::ROOT, "wisdom", ObjType::List) .unwrap(); doc2.merge(&mut doc1).unwrap(); @@ -704,7 +704,7 @@ fn mutliple_insertions_at_same_list_position_with_insertion_by_greater_actor_id( let mut doc2 = new_doc_with_actor(actor2); let list = doc1 - .set_object(&automerge::ROOT, "list", ObjType::List) + .put_object(&automerge::ROOT, "list", ObjType::List) .unwrap(); doc1.insert(&list, 0, "two").unwrap(); doc2.merge(&mut doc1).unwrap(); @@ -729,7 +729,7 @@ fn mutliple_insertions_at_same_list_position_with_insertion_by_lesser_actor_id() let mut doc2 = new_doc_with_actor(actor2); let list = doc1 - .set_object(&automerge::ROOT, "list", ObjType::List) + .put_object(&automerge::ROOT, "list", ObjType::List) .unwrap(); doc1.insert(&list, 0, "two").unwrap(); doc2.merge(&mut doc1).unwrap(); @@ -752,7 +752,7 @@ fn insertion_consistent_with_causality() { let mut doc2 = new_doc(); let list = doc1 - .set_object(&automerge::ROOT, "list", ObjType::List) + .put_object(&automerge::ROOT, "list", ObjType::List) .unwrap(); doc1.insert(&list, 0, "four").unwrap(); doc2.merge(&mut doc1).unwrap(); @@ -787,18 +787,18 @@ fn save_and_restore_empty() { fn save_restore_complex() { let mut doc1 = new_doc(); let todos = doc1 - .set_object(&automerge::ROOT, "todos", ObjType::List) + .put_object(&automerge::ROOT, "todos", ObjType::List) .unwrap(); let first_todo = doc1.insert_object(&todos, 0, ObjType::Map).unwrap(); - doc1.set(&first_todo, "title", "water plants").unwrap(); - doc1.set(&first_todo, "done", false).unwrap(); + doc1.put(&first_todo, "title", "water plants").unwrap(); + doc1.put(&first_todo, "done", false).unwrap(); let mut doc2 = new_doc(); doc2.merge(&mut doc1).unwrap(); - doc2.set(&first_todo, "title", "weed plants").unwrap(); + doc2.put(&first_todo, "title", "weed plants").unwrap(); - doc1.set(&first_todo, "title", "kill plants").unwrap(); + doc1.put(&first_todo, "title", "kill plants").unwrap(); doc1.merge(&mut doc2).unwrap(); let reloaded = Automerge::load(&doc1.save()).unwrap(); @@ -830,7 +830,7 @@ fn list_counter_del() -> Result<(), automerge::AutomergeError> { let mut doc1 = new_doc_with_actor(actor1); - let list = doc1.set_object(ROOT, "list", ObjType::List)?; + let list = doc1.put_object(ROOT, "list", ObjType::List)?; doc1.insert(&list, 0, "a")?; doc1.insert(&list, 1, "b")?; doc1.insert(&list, 2, "c")?; @@ -841,13 +841,13 @@ fn list_counter_del() -> Result<(), automerge::AutomergeError> { let mut doc3 = AutoCommit::load(&doc1.save())?; doc3.set_actor(actor3); - doc1.set(&list, 1, ScalarValue::counter(0))?; - doc2.set(&list, 1, ScalarValue::counter(10))?; - doc3.set(&list, 1, ScalarValue::counter(100))?; + doc1.put(&list, 1, ScalarValue::counter(0))?; + doc2.put(&list, 1, ScalarValue::counter(10))?; + doc3.put(&list, 1, ScalarValue::counter(100))?; - doc1.set(&list, 2, ScalarValue::counter(0))?; - doc2.set(&list, 2, ScalarValue::counter(10))?; - doc3.set(&list, 2, 100)?; + doc1.put(&list, 2, ScalarValue::counter(0))?; + doc2.put(&list, 2, ScalarValue::counter(10))?; + doc3.put(&list, 2, 100)?; doc1.increment(&list, 1, 1)?; doc1.increment(&list, 2, 1)?; diff --git a/edit-trace/automerge-wasm.js b/edit-trace/automerge-wasm.js index 3680efc0..c91b7550 100644 --- a/edit-trace/automerge-wasm.js +++ b/edit-trace/automerge-wasm.js @@ -1,5 +1,5 @@ -// make sure to +// make sure to // # cd ../automerge-wasm // # yarn release @@ -11,7 +11,7 @@ const Automerge = require('../automerge-wasm') const start = new Date() let doc = Automerge.create(); -let text = doc.set("_root", "text", "", "text") +let text = doc.put("_root", "text", "", "text") for (let i = 0; i < edits.length; i++) { let edit = edits[i] diff --git a/edit-trace/benches/main.rs b/edit-trace/benches/main.rs index 6bb4d6c0..012ac649 100644 --- a/edit-trace/benches/main.rs +++ b/edit-trace/benches/main.rs @@ -5,7 +5,7 @@ use std::fs; fn replay_trace_tx(commands: Vec<(usize, usize, Vec)>) -> Automerge { let mut doc = Automerge::new(); let mut tx = doc.transaction(); - let text = tx.set_object(ROOT, "text", ObjType::Text).unwrap(); + let text = tx.put_object(ROOT, "text", ObjType::Text).unwrap(); for (pos, del, vals) in commands { tx.splice(&text, pos, del, vals).unwrap(); } @@ -15,7 +15,7 @@ fn replay_trace_tx(commands: Vec<(usize, usize, Vec)>) -> Automerge fn replay_trace_autotx(commands: Vec<(usize, usize, Vec)>) -> AutoCommit { let mut doc = AutoCommit::new(); - let text = doc.set_object(ROOT, "text", ObjType::Text).unwrap(); + let text = doc.put_object(ROOT, "text", ObjType::Text).unwrap(); for (pos, del, vals) in commands { doc.splice(&text, pos, del, vals).unwrap(); } diff --git a/edit-trace/src/main.rs b/edit-trace/src/main.rs index b6a452a2..f9838227 100644 --- a/edit-trace/src/main.rs +++ b/edit-trace/src/main.rs @@ -20,7 +20,7 @@ fn main() -> Result<(), AutomergeError> { let now = Instant::now(); let mut tx = doc.transaction(); - let text = tx.set_object(ROOT, "text", ObjType::Text).unwrap(); + let text = tx.put_object(ROOT, "text", ObjType::Text).unwrap(); for (i, (pos, del, vals)) in commands.into_iter().enumerate() { if i % 1000 == 0 { println!("Processed {} edits in {} ms", i, now.elapsed().as_millis()); From b54075fe4d885a737ba29352979c4f4dd28e7bfc Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 1 Apr 2022 13:56:15 +0100 Subject: [PATCH 224/730] Add makefile to run edit-traces --- edit-trace/Makefile | 19 +++++++++++++++++++ edit-trace/automerge-js.js | 2 +- edit-trace/automerge-rs.js | 2 +- edit-trace/automerge-wasm.js | 5 ++--- edit-trace/baseline.js | 2 +- 5 files changed, 24 insertions(+), 6 deletions(-) create mode 100644 edit-trace/Makefile diff --git a/edit-trace/Makefile b/edit-trace/Makefile new file mode 100644 index 00000000..05001dbf --- /dev/null +++ b/edit-trace/Makefile @@ -0,0 +1,19 @@ +rust: + cargo run --release + +build-wasm: ../automerge-wasm ../automerge + cd ../automerge-wasm && yarn + cd ../automerge-wasm && yarn release + +wasm: build-wasm + node automerge-wasm.js + +build-js: build-wasm + cd ../automerge-js && yarn + cd ../automerge-js && yarn link "automerge-wasm" + +js: build-js + node automerge-js.js + +baseline: + node baseline.js diff --git a/edit-trace/automerge-js.js b/edit-trace/automerge-js.js index bdfa8455..994c87c8 100644 --- a/edit-trace/automerge-js.js +++ b/edit-trace/automerge-js.js @@ -7,7 +7,7 @@ let state = Automerge.from({text: new Automerge.Text()}) state = Automerge.change(state, doc => { for (let i = 0; i < edits.length; i++) { - if (i % 1000 === 0) { + if (i % 10000 === 0) { console.log(`Processed ${i} edits in ${new Date() - start} ms`) } let edit = edits[i] diff --git a/edit-trace/automerge-rs.js b/edit-trace/automerge-rs.js index 8786b412..342f5268 100644 --- a/edit-trace/automerge-rs.js +++ b/edit-trace/automerge-rs.js @@ -16,7 +16,7 @@ let state = Automerge.from({text: new Automerge.Text()}) state = Automerge.change(state, doc => { for (let i = 0; i < edits.length; i++) { - if (i % 1000 === 0) { + if (i % 10000 === 0) { console.log(`Processed ${i} edits in ${new Date() - start} ms`) } if (edits[i][1] > 0) doc.text.deleteAt(edits[i][0], edits[i][1]) diff --git a/edit-trace/automerge-wasm.js b/edit-trace/automerge-wasm.js index c91b7550..cd153c2d 100644 --- a/edit-trace/automerge-wasm.js +++ b/edit-trace/automerge-wasm.js @@ -3,7 +3,6 @@ // # cd ../automerge-wasm // # yarn release -// # yarn opt const { edits, finalText } = require('./editing-trace') const Automerge = require('../automerge-wasm') @@ -11,11 +10,11 @@ const Automerge = require('../automerge-wasm') const start = new Date() let doc = Automerge.create(); -let text = doc.put("_root", "text", "", "text") +let text = doc.putObject("_root", "text", "", "text") for (let i = 0; i < edits.length; i++) { let edit = edits[i] - if (i % 1000 === 0) { + if (i % 10000 === 0) { console.log(`Processed ${i} edits in ${new Date() - start} ms`) } doc.splice(text, ...edit) diff --git a/edit-trace/baseline.js b/edit-trace/baseline.js index 803ee122..b99f0ae7 100644 --- a/edit-trace/baseline.js +++ b/edit-trace/baseline.js @@ -5,7 +5,7 @@ const start = new Date() let chars = [] for (let i = 0; i < edits.length; i++) { let edit = edits[i] - if (i % 1000 === 0) { + if (i % 10000 === 0) { console.log(`Processed ${i} edits in ${new Date() - start} ms`) } chars.splice(...edit) From 8f4562b2cb75b5ee6902320717ab6334d8a91d37 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 1 Apr 2022 23:02:56 +0100 Subject: [PATCH 225/730] Have apply_changes take an iterator --- automerge/src/automerge.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index f1e7611e..8b524d75 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -525,7 +525,10 @@ impl Automerge { } /// Apply changes to this document. - pub fn apply_changes(&mut self, changes: Vec) -> Result<(), AutomergeError> { + pub fn apply_changes( + &mut self, + changes: impl IntoIterator, + ) -> Result<(), AutomergeError> { for c in changes { if !self.history_index.contains_key(&c.hash) { if self.duplicate_seq(&c) { From 48ce85dbfbf17a948e179c66f6d43407d8ea009e Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Mon, 4 Apr 2022 11:55:22 +0100 Subject: [PATCH 226/730] Add ref to treequery to allow borrowing ops --- automerge/src/op_set.rs | 4 ++-- automerge/src/op_tree.rs | 8 ++++---- automerge/src/query.rs | 14 +++++++++----- automerge/src/query/insert.rs | 2 +- automerge/src/query/len.rs | 2 +- automerge/src/query/len_at.rs | 2 +- automerge/src/query/list_vals.rs | 2 +- automerge/src/query/list_vals_at.rs | 2 +- automerge/src/query/nth.rs | 12 ++++++------ automerge/src/query/nth_at.rs | 2 +- automerge/src/query/opid.rs | 2 +- automerge/src/query/prop.rs | 16 ++++++++++------ automerge/src/query/prop_at.rs | 2 +- automerge/src/query/seek_op.rs | 2 +- automerge/src/query/seek_op_with_patch.rs | 2 +- automerge/src/transaction/inner.rs | 8 ++++++-- 16 files changed, 47 insertions(+), 35 deletions(-) diff --git a/automerge/src/op_set.rs b/automerge/src/op_set.rs index b2e53eba..59820e10 100644 --- a/automerge/src/op_set.rs +++ b/automerge/src/op_set.rs @@ -61,9 +61,9 @@ impl OpSetInternal { } } - pub fn search(&self, obj: &ObjId, query: Q) -> Q + pub fn search<'a, 'b: 'a, Q>(&'b self, obj: &ObjId, query: Q) -> Q where - Q: TreeQuery, + Q: TreeQuery<'a>, { if let Some((_typ, tree)) = self.trees.get(obj) { tree.search(query, &self.m) diff --git a/automerge/src/op_tree.rs b/automerge/src/op_tree.rs index 788167ee..863f9f29 100644 --- a/automerge/src/op_tree.rs +++ b/automerge/src/op_tree.rs @@ -51,9 +51,9 @@ impl OpTreeInternal { .map(|root| query::KeysAt::new(root, clock)) } - pub fn search(&self, mut query: Q, m: &OpSetMetadata) -> Q + pub fn search<'a, 'b: 'a, Q>(&'b self, mut query: Q, m: &OpSetMetadata) -> Q where - Q: TreeQuery, + Q: TreeQuery<'a>, { self.root_node .as_ref() @@ -172,9 +172,9 @@ impl OpTreeNode { } } - pub fn search(&self, query: &mut Q, m: &OpSetMetadata) -> bool + pub fn search<'a, 'b: 'a, Q>(&'b self, query: &mut Q, m: &OpSetMetadata) -> bool where - Q: TreeQuery, + Q: TreeQuery<'a>, { if self.is_leaf() { for e in &self.elements { diff --git a/automerge/src/query.rs b/automerge/src/query.rs index 3e6e80d7..a20fa558 100644 --- a/automerge/src/query.rs +++ b/automerge/src/query.rs @@ -43,22 +43,26 @@ pub(crate) struct CounterData { op: Op, } -pub(crate) trait TreeQuery { +pub(crate) trait TreeQuery<'a> { #[inline(always)] - fn query_node_with_metadata(&mut self, child: &OpTreeNode, _m: &OpSetMetadata) -> QueryResult { + fn query_node_with_metadata( + &mut self, + child: &'a OpTreeNode, + _m: &OpSetMetadata, + ) -> QueryResult { self.query_node(child) } - fn query_node(&mut self, _child: &OpTreeNode) -> QueryResult { + fn query_node(&mut self, _child: &'a OpTreeNode) -> QueryResult { QueryResult::Descend } #[inline(always)] - fn query_element_with_metadata(&mut self, element: &Op, _m: &OpSetMetadata) -> QueryResult { + fn query_element_with_metadata(&mut self, element: &'a Op, _m: &OpSetMetadata) -> QueryResult { self.query_element(element) } - fn query_element(&mut self, _element: &Op) -> QueryResult { + fn query_element(&mut self, _element: &'a Op) -> QueryResult { panic!("invalid element query") } } diff --git a/automerge/src/query/insert.rs b/automerge/src/query/insert.rs index 6f82061a..afb773db 100644 --- a/automerge/src/query/insert.rs +++ b/automerge/src/query/insert.rs @@ -61,7 +61,7 @@ impl InsertNth { } } -impl TreeQuery for InsertNth { +impl<'a> TreeQuery<'a> for InsertNth { fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { // if this node has some visible elements then we may find our target within let mut num_vis = child.index.visible_len(); diff --git a/automerge/src/query/len.rs b/automerge/src/query/len.rs index 6fd35b5f..ea678414 100644 --- a/automerge/src/query/len.rs +++ b/automerge/src/query/len.rs @@ -13,7 +13,7 @@ impl Len { } } -impl TreeQuery for Len { +impl<'a> TreeQuery<'a> for Len { fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { self.len = child.index.visible_len(); QueryResult::Finish diff --git a/automerge/src/query/len_at.rs b/automerge/src/query/len_at.rs index 2f183517..fb8bfe3d 100644 --- a/automerge/src/query/len_at.rs +++ b/automerge/src/query/len_at.rs @@ -23,7 +23,7 @@ impl LenAt { } } -impl TreeQuery for LenAt { +impl<'a> TreeQuery<'a> for LenAt { fn query_element(&mut self, op: &Op) -> QueryResult { if op.insert { self.last = None; diff --git a/automerge/src/query/list_vals.rs b/automerge/src/query/list_vals.rs index 37d1a7cc..a1a5d738 100644 --- a/automerge/src/query/list_vals.rs +++ b/automerge/src/query/list_vals.rs @@ -18,7 +18,7 @@ impl ListVals { } } -impl TreeQuery for ListVals { +impl<'a> TreeQuery<'a> for ListVals { fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { let start = 0; for pos in start..child.len() { diff --git a/automerge/src/query/list_vals_at.rs b/automerge/src/query/list_vals_at.rs index 97dc4802..5eb23d21 100644 --- a/automerge/src/query/list_vals_at.rs +++ b/automerge/src/query/list_vals_at.rs @@ -23,7 +23,7 @@ impl ListValsAt { } } -impl TreeQuery for ListValsAt { +impl<'a> TreeQuery<'a> for ListValsAt { fn query_element_with_metadata(&mut self, op: &Op, m: &OpSetMetadata) -> QueryResult { if op.insert { self.last_elem = None; diff --git a/automerge/src/query/nth.rs b/automerge/src/query/nth.rs index 8ee2f5fa..25eaf7b9 100644 --- a/automerge/src/query/nth.rs +++ b/automerge/src/query/nth.rs @@ -5,18 +5,18 @@ use crate::types::{ElemId, Key, Op}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] -pub(crate) struct Nth { +pub(crate) struct Nth<'a> { target: usize, seen: usize, /// last_seen is the target elemid of the last `seen` operation. /// It is used to avoid double counting visible elements (which arise through conflicts) that are split across nodes. last_seen: Option, - pub ops: Vec, + pub ops: Vec<&'a Op>, pub ops_pos: Vec, pub pos: usize, } -impl Nth { +impl<'a> Nth<'a> { pub fn new(target: usize) -> Self { Nth { target, @@ -39,7 +39,7 @@ impl Nth { } } -impl TreeQuery for Nth { +impl<'a> TreeQuery<'a> for Nth<'a> { fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { let mut num_vis = child.index.visible_len(); if child.index.has_visible(&self.last_seen) { @@ -67,7 +67,7 @@ impl TreeQuery for Nth { } } - fn query_element(&mut self, element: &Op) -> QueryResult { + fn query_element(&mut self, element: &'a Op) -> QueryResult { if element.insert { if self.seen > self.target { return QueryResult::Finish; @@ -82,7 +82,7 @@ impl TreeQuery for Nth { self.last_seen = element.elemid() } if self.seen == self.target + 1 && visible { - self.ops.push(element.clone()); + self.ops.push(element); self.ops_pos.push(self.pos); } self.pos += 1; diff --git a/automerge/src/query/nth_at.rs b/automerge/src/query/nth_at.rs index cfa36632..45acc764 100644 --- a/automerge/src/query/nth_at.rs +++ b/automerge/src/query/nth_at.rs @@ -29,7 +29,7 @@ impl NthAt { } } -impl TreeQuery for NthAt { +impl<'a> TreeQuery<'a> for NthAt { fn query_element(&mut self, element: &Op) -> QueryResult { if element.insert { if self.seen > self.target { diff --git a/automerge/src/query/opid.rs b/automerge/src/query/opid.rs index b00c6420..1dbc76e5 100644 --- a/automerge/src/query/opid.rs +++ b/automerge/src/query/opid.rs @@ -30,7 +30,7 @@ impl OpIdSearch { } } -impl TreeQuery for OpIdSearch { +impl<'a> TreeQuery<'a> for OpIdSearch { fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { if child.index.ops.contains(&self.target) { QueryResult::Descend diff --git a/automerge/src/query/prop.rs b/automerge/src/query/prop.rs index 42131e1b..1c37f600 100644 --- a/automerge/src/query/prop.rs +++ b/automerge/src/query/prop.rs @@ -4,14 +4,14 @@ use crate::types::{Key, Op}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] -pub(crate) struct Prop { +pub(crate) struct Prop<'a> { key: Key, - pub ops: Vec, + pub ops: Vec<&'a Op>, pub ops_pos: Vec, pub pos: usize, } -impl Prop { +impl<'a> Prop<'a> { pub fn new(prop: usize) -> Self { Prop { key: Key::Map(prop), @@ -22,8 +22,12 @@ impl Prop { } } -impl TreeQuery for Prop { - fn query_node_with_metadata(&mut self, child: &OpTreeNode, m: &OpSetMetadata) -> QueryResult { +impl<'a> TreeQuery<'a> for Prop<'a> { + fn query_node_with_metadata( + &mut self, + child: &'a OpTreeNode, + m: &OpSetMetadata, + ) -> QueryResult { let start = binary_search_by(child, |op| m.key_cmp(&op.key, &self.key)); self.pos = start; for pos in start..child.len() { @@ -32,7 +36,7 @@ impl TreeQuery for Prop { break; } if op.visible() { - self.ops.push(op.clone()); + self.ops.push(op); self.ops_pos.push(pos); } self.pos += 1; diff --git a/automerge/src/query/prop_at.rs b/automerge/src/query/prop_at.rs index 3d9f1a3c..ffa66dd5 100644 --- a/automerge/src/query/prop_at.rs +++ b/automerge/src/query/prop_at.rs @@ -24,7 +24,7 @@ impl PropAt { } } -impl TreeQuery for PropAt { +impl<'a> TreeQuery<'a> for PropAt { fn query_node_with_metadata(&mut self, child: &OpTreeNode, m: &OpSetMetadata) -> QueryResult { let start = binary_search_by(child, |op| m.key_cmp(&op.key, &self.key)); let mut window: VisWindow = Default::default(); diff --git a/automerge/src/query/seek_op.rs b/automerge/src/query/seek_op.rs index dda35b4a..145e1123 100644 --- a/automerge/src/query/seek_op.rs +++ b/automerge/src/query/seek_op.rs @@ -39,7 +39,7 @@ impl SeekOp { } } -impl TreeQuery for SeekOp { +impl<'a> TreeQuery<'a> for SeekOp { fn query_node_with_metadata(&mut self, child: &OpTreeNode, m: &OpSetMetadata) -> QueryResult { if self.found { return QueryResult::Descend; diff --git a/automerge/src/query/seek_op_with_patch.rs b/automerge/src/query/seek_op_with_patch.rs index e713e77e..ae57bd6d 100644 --- a/automerge/src/query/seek_op_with_patch.rs +++ b/automerge/src/query/seek_op_with_patch.rs @@ -60,7 +60,7 @@ impl SeekOpWithPatch { } } -impl TreeQuery for SeekOpWithPatch { +impl<'a> TreeQuery<'a> for SeekOpWithPatch { fn query_node_with_metadata(&mut self, child: &OpTreeNode, m: &OpSetMetadata) -> QueryResult { if self.found { return QueryResult::Descend; diff --git a/automerge/src/transaction/inner.rs b/automerge/src/transaction/inner.rs index 84460c49..95b1e6c4 100644 --- a/automerge/src/transaction/inner.rs +++ b/automerge/src/transaction/inner.rs @@ -257,7 +257,9 @@ impl TransactionInner { insert: false, }; - self.insert_local_op(doc, op, query.pos, obj, &query.ops_pos); + let pos = query.pos; + let ops_pos = query.ops_pos; + self.insert_local_op(doc, op, pos, obj, &ops_pos); if is_make { Ok(Some(id)) @@ -294,7 +296,9 @@ impl TransactionInner { insert: false, }; - self.insert_local_op(doc, op, query.pos, obj, &query.ops_pos); + let pos = query.pos; + let ops_pos = query.ops_pos; + self.insert_local_op(doc, op, pos, obj, &ops_pos); if is_make { Ok(Some(id)) From a2d4b2a77891d4ebf3aa5fa64832d901486b3738 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Mon, 4 Apr 2022 11:58:37 +0100 Subject: [PATCH 227/730] Use ref on seek_op --- automerge/src/automerge.rs | 9 +++++---- automerge/src/query/seek_op.rs | 12 ++++++------ 2 files changed, 11 insertions(+), 10 deletions(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 8b524d75..e7c45a8b 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -198,17 +198,18 @@ impl Automerge { f } - fn insert_op(&mut self, obj: &ObjId, op: Op) -> Op { + fn insert_op(&mut self, obj: &ObjId, op: Op) { let q = self.ops.search(obj, query::SeekOp::new(&op)); - for i in q.succ { + let succ = q.succ; + let pos = q.pos; + for i in succ { self.ops.replace(obj, i, |old_op| old_op.add_succ(&op)); } if !op.is_delete() { - self.ops.insert(q.pos, obj, op.clone()); + self.ops.insert(pos, obj, op); } - op } fn insert_op_with_patch(&mut self, obj: &ObjId, op: Op) -> Op { diff --git a/automerge/src/query/seek_op.rs b/automerge/src/query/seek_op.rs index 145e1123..0fe5c50e 100644 --- a/automerge/src/query/seek_op.rs +++ b/automerge/src/query/seek_op.rs @@ -5,9 +5,9 @@ use std::cmp::Ordering; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] -pub(crate) struct SeekOp { +pub(crate) struct SeekOp<'a> { /// the op we are looking for - op: Op, + op: &'a Op, /// The position to insert at pub pos: usize, /// The indices of ops that this op overwrites @@ -16,10 +16,10 @@ pub(crate) struct SeekOp { found: bool, } -impl SeekOp { - pub fn new(op: &Op) -> Self { +impl<'a> SeekOp<'a> { + pub fn new(op: &'a Op) -> Self { SeekOp { - op: op.clone(), + op, succ: vec![], pos: 0, found: false, @@ -39,7 +39,7 @@ impl SeekOp { } } -impl<'a> TreeQuery<'a> for SeekOp { +impl<'a> TreeQuery<'a> for SeekOp<'a> { fn query_node_with_metadata(&mut self, child: &OpTreeNode, m: &OpSetMetadata) -> QueryResult { if self.found { return QueryResult::Descend; From fa2971a29a6b5f5d674d4205e1250b695346e49b Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Mon, 4 Apr 2022 12:47:08 +0100 Subject: [PATCH 228/730] Have value be a reference for scalars --- automerge-wasm/src/interop.rs | 4 +- automerge-wasm/src/lib.rs | 14 +- automerge/src/automerge.rs | 55 +++--- automerge/src/query.rs | 25 ++- automerge/src/query/keys_at.rs | 2 +- automerge/src/query/len_at.rs | 10 +- automerge/src/query/list_vals_at.rs | 12 +- automerge/src/query/nth_at.rs | 12 +- automerge/src/query/prop_at.rs | 14 +- automerge/src/query/seek_op_with_patch.rs | 22 ++- automerge/src/types.rs | 15 +- automerge/src/value.rs | 194 ++++++++++++---------- automerge/tests/helpers/mod.rs | 6 +- 13 files changed, 213 insertions(+), 172 deletions(-) diff --git a/automerge-wasm/src/interop.rs b/automerge-wasm/src/interop.rs index 69dd38f7..68311aa6 100644 --- a/automerge-wasm/src/interop.rs +++ b/automerge-wasm/src/interop.rs @@ -347,7 +347,7 @@ pub(crate) fn map_to_js(doc: &am::AutoCommit, obj: &ObjId) -> JsValue { Reflect::set(&map, &k.into(), &doc.text(&exid).unwrap().into()).unwrap(); } Ok(Some((Value::Scalar(v), _))) => { - Reflect::set(&map, &k.into(), &ScalarValue(v).into()).unwrap(); + Reflect::set(&map, &k.into(), &ScalarValue(v.into_owned()).into()).unwrap(); } _ => (), }; @@ -370,7 +370,7 @@ pub(crate) fn list_to_js(doc: &am::AutoCommit, obj: &ObjId) -> JsValue { array.push(&list_to_js(doc, &exid)); } Ok(Some((Value::Scalar(v), _))) => { - array.push(&ScalarValue(v).into()); + array.push(&ScalarValue(v.into_owned()).into()); } _ => (), }; diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 54480197..985f0d9e 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -242,14 +242,14 @@ impl Automerge { let opid = match (p, value) { (Prop::Map(s), Value::Object(objtype)) => Some(self.0.put_object(obj, s, objtype)?), (Prop::Map(s), Value::Scalar(scalar)) => { - self.0.put(obj, s, scalar)?; + self.0.put(obj, s, scalar.into_owned())?; None } (Prop::Seq(i), Value::Object(objtype)) => { Some(self.0.insert_object(obj, i, objtype)?) } (Prop::Seq(i), Value::Scalar(scalar)) => { - self.0.insert(obj, i, scalar)?; + self.0.insert(obj, i, scalar.into_owned())?; None } }; @@ -299,7 +299,7 @@ impl Automerge { } Some((Value::Scalar(value), _)) => { result.push(&datatype(&value).into()); - result.push(&ScalarValue(value).into()); + result.push(&ScalarValue(value.into_owned()).into()); Ok(Some(result)) } None => Ok(None), @@ -336,7 +336,7 @@ impl Automerge { (Value::Scalar(value), id) => { let sub = Array::new(); sub.push(&datatype(&value).into()); - sub.push(&ScalarValue(value).into()); + sub.push(&ScalarValue(value.into_owned()).into()); sub.push(&id.to_string().into()); result.push(&sub.into()); } @@ -378,7 +378,7 @@ impl Automerge { } (Value::Scalar(value), _) => { js_set(&patch, "datatype", datatype(&value))?; - js_set(&patch, "value", ScalarValue(value))?; + js_set(&patch, "value", ScalarValue(value.into_owned()))?; } }; js_set(&patch, "conflict", conflict)?; @@ -395,7 +395,7 @@ impl Automerge { } (Value::Scalar(value), _) => { js_set(&patch, "datatype", datatype(&value))?; - js_set(&patch, "value", ScalarValue(value))?; + js_set(&patch, "value", ScalarValue(value.into_owned()))?; } }; } @@ -658,7 +658,7 @@ impl Automerge { &mut self, value: &JsValue, datatype: Option, - ) -> Result<(Value, Vec<(Prop, JsValue)>), JsValue> { + ) -> Result<(Value<'static>, Vec<(Prop, JsValue)>), JsValue> { match self.import_scalar(value, &datatype) { Some(val) => Ok((val.into(), vec![])), None => { diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index e7c45a8b..d4c395da 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -212,41 +212,42 @@ impl Automerge { } } - fn insert_op_with_patch(&mut self, obj: &ObjId, op: Op) -> Op { + fn insert_op_with_patch(&mut self, obj: &ObjId, op: Op) { let q = self.ops.search(obj, query::SeekOpWithPatch::new(&op)); - for i in q.succ { - self.ops.replace(obj, i, |old_op| old_op.add_succ(&op)); - } + let query::SeekOpWithPatch { + pos, + succ, + seen, + values, + had_value_before, + .. + } = q; - if !op.is_delete() { - self.ops.insert(q.pos, obj, op.clone()); - } - - let obj = self.id_to_exid(obj.0); + let ex_obj = self.id_to_exid(obj.0); let key = match op.key { Key::Map(index) => self.ops.m.props[index].clone().into(), - Key::Seq(_) => q.seen.into(), + Key::Seq(_) => seen.into(), }; let patch = if op.insert { - let value = (op.value(), self.id_to_exid(op.id)); - Patch::Insert(obj, q.seen, value) + let value = (op.clone_value(), self.id_to_exid(op.id)); + Patch::Insert(ex_obj, seen, value) } else if op.is_delete() { - if let Some(winner) = &q.values.last() { - let value = (winner.value(), self.id_to_exid(winner.id)); - let conflict = q.values.len() > 1; + if let Some(winner) = &values.last() { + let value = (winner.clone_value(), self.id_to_exid(winner.id)); + let conflict = values.len() > 1; Patch::Assign(AssignPatch { - obj, + obj: ex_obj, key, value, conflict, }) } else { - Patch::Delete(obj, key) + Patch::Delete(ex_obj, key) } } else { - let winner = if let Some(last_value) = q.values.last() { + let winner = if let Some(last_value) = values.last() { if self.ops.m.lamport_cmp(op.id, last_value.id) == Ordering::Greater { &op } else { @@ -255,15 +256,15 @@ impl Automerge { } else { &op }; - let value = (winner.value(), self.id_to_exid(winner.id)); - if op.is_list_op() && !q.had_value_before { - Patch::Insert(obj, q.seen, value) + let value = (winner.clone_value(), self.id_to_exid(winner.id)); + if op.is_list_op() && !had_value_before { + Patch::Insert(ex_obj, seen, value) } else { Patch::Assign(AssignPatch { - obj, + obj: ex_obj, key, value, - conflict: !q.values.is_empty(), + conflict: !values.is_empty(), }) } }; @@ -272,7 +273,13 @@ impl Automerge { patches.push(patch); } - op + for i in succ { + self.ops.replace(obj, i, |old_op| old_op.add_succ(&op)); + } + + if !op.is_delete() { + self.ops.insert(pos, obj, op); + } } // KeysAt::() diff --git a/automerge/src/query.rs b/automerge/src/query.rs index a20fa558..37b3934b 100644 --- a/automerge/src/query.rs +++ b/automerge/src/query.rs @@ -36,11 +36,11 @@ pub(crate) use seek_op::SeekOp; pub(crate) use seek_op_with_patch::SeekOpWithPatch; #[derive(Debug, Clone, PartialEq)] -pub(crate) struct CounterData { +pub(crate) struct CounterData<'a> { pos: usize, val: i64, succ: HashSet, - op: Op, + op: &'a Op, } pub(crate) trait TreeQuery<'a> { @@ -169,12 +169,12 @@ impl Default for Index { } #[derive(Debug, Clone, PartialEq, Default)] -pub(crate) struct VisWindow { - counters: HashMap, +pub(crate) struct VisWindow<'a> { + counters: HashMap>, } -impl VisWindow { - fn visible_at(&mut self, op: &Op, pos: usize, clock: &Clock) -> bool { +impl<'a> VisWindow<'a> { + fn visible_at(&mut self, op: &'a Op, pos: usize, clock: &Clock) -> bool { if !clock.covers(&op.id) { return false; } @@ -188,7 +188,7 @@ impl VisWindow { pos, val: start, succ: op.succ.iter().cloned().collect(), - op: op.clone(), + op, }, ); if !op.succ.iter().any(|i| clock.covers(i)) { @@ -201,7 +201,7 @@ impl VisWindow { if let Some(mut entry) = self.counters.get_mut(id) { entry.succ.remove(&op.id); entry.val += inc_val; - entry.op.action = OpType::Put(ScalarValue::counter(entry.val)); + // entry.op.action = OpType::Put(ScalarValue::counter(entry.val)); if !entry.succ.iter().any(|i| clock.covers(i)) { visible = true; } @@ -217,18 +217,17 @@ impl VisWindow { visible } - pub fn seen_op(&self, op: &Op, pos: usize) -> Vec<(usize, Op)> { + pub fn seen_op(&self, op: &'a Op, pos: usize) -> Vec<(usize, &'a Op)> { let mut result = vec![]; for pred in &op.pred { if let Some(entry) = self.counters.get(pred) { - result.push((entry.pos, entry.op.clone())); + result.push((entry.pos, entry.op)); } } if result.is_empty() { - vec![(pos, op.clone())] - } else { - result + result.push((pos, op)); } + result } } diff --git a/automerge/src/query/keys_at.rs b/automerge/src/query/keys_at.rs index 2fe04747..495c56e4 100644 --- a/automerge/src/query/keys_at.rs +++ b/automerge/src/query/keys_at.rs @@ -6,7 +6,7 @@ use std::fmt::Debug; #[derive(Debug)] pub(crate) struct KeysAt<'a> { clock: Clock, - window: VisWindow, + window: VisWindow<'a>, index: usize, last_key: Option, index_back: usize, diff --git a/automerge/src/query/len_at.rs b/automerge/src/query/len_at.rs index fb8bfe3d..9ae88bcc 100644 --- a/automerge/src/query/len_at.rs +++ b/automerge/src/query/len_at.rs @@ -3,15 +3,15 @@ use crate::types::{Clock, ElemId, Op}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] -pub(crate) struct LenAt { +pub(crate) struct LenAt<'a> { pub len: usize, clock: Clock, pos: usize, last: Option, - window: VisWindow, + window: VisWindow<'a>, } -impl LenAt { +impl<'a> LenAt<'a> { pub fn new(clock: Clock) -> Self { LenAt { clock, @@ -23,8 +23,8 @@ impl LenAt { } } -impl<'a> TreeQuery<'a> for LenAt { - fn query_element(&mut self, op: &Op) -> QueryResult { +impl<'a> TreeQuery<'a> for LenAt<'a> { + fn query_element(&mut self, op: &'a Op) -> QueryResult { if op.insert { self.last = None; } diff --git a/automerge/src/query/list_vals_at.rs b/automerge/src/query/list_vals_at.rs index 5eb23d21..1120b5b9 100644 --- a/automerge/src/query/list_vals_at.rs +++ b/automerge/src/query/list_vals_at.rs @@ -3,15 +3,15 @@ use crate::types::{Clock, ElemId, Op}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] -pub(crate) struct ListValsAt { +pub(crate) struct ListValsAt<'a> { clock: Clock, last_elem: Option, - pub ops: Vec, - window: VisWindow, + pub ops: Vec<&'a Op>, + window: VisWindow<'a>, pos: usize, } -impl ListValsAt { +impl<'a> ListValsAt<'a> { pub fn new(clock: Clock) -> Self { ListValsAt { clock, @@ -23,8 +23,8 @@ impl ListValsAt { } } -impl<'a> TreeQuery<'a> for ListValsAt { - fn query_element_with_metadata(&mut self, op: &Op, m: &OpSetMetadata) -> QueryResult { +impl<'a> TreeQuery<'a> for ListValsAt<'a> { + fn query_element_with_metadata(&mut self, op: &'a Op, m: &OpSetMetadata) -> QueryResult { if op.insert { self.last_elem = None; } diff --git a/automerge/src/query/nth_at.rs b/automerge/src/query/nth_at.rs index 45acc764..3314bdac 100644 --- a/automerge/src/query/nth_at.rs +++ b/automerge/src/query/nth_at.rs @@ -3,18 +3,18 @@ use crate::types::{Clock, ElemId, Op}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] -pub(crate) struct NthAt { +pub(crate) struct NthAt<'a> { clock: Clock, target: usize, seen: usize, last_seen: Option, - window: VisWindow, - pub ops: Vec, + window: VisWindow<'a>, + pub ops: Vec<&'a Op>, pub ops_pos: Vec, pub pos: usize, } -impl NthAt { +impl<'a> NthAt<'a> { pub fn new(target: usize, clock: Clock) -> Self { NthAt { clock, @@ -29,8 +29,8 @@ impl NthAt { } } -impl<'a> TreeQuery<'a> for NthAt { - fn query_element(&mut self, element: &Op) -> QueryResult { +impl<'a> TreeQuery<'a> for NthAt<'a> { + fn query_element(&mut self, element: &'a Op) -> QueryResult { if element.insert { if self.seen > self.target { return QueryResult::Finish; diff --git a/automerge/src/query/prop_at.rs b/automerge/src/query/prop_at.rs index ffa66dd5..a053e0cf 100644 --- a/automerge/src/query/prop_at.rs +++ b/automerge/src/query/prop_at.rs @@ -4,15 +4,15 @@ use crate::types::{Clock, Key, Op}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] -pub(crate) struct PropAt { +pub(crate) struct PropAt<'a> { clock: Clock, key: Key, - pub ops: Vec, + pub ops: Vec<&'a Op>, pub ops_pos: Vec, pub pos: usize, } -impl PropAt { +impl<'a> PropAt<'a> { pub fn new(prop: usize, clock: Clock) -> Self { PropAt { clock, @@ -24,8 +24,12 @@ impl PropAt { } } -impl<'a> TreeQuery<'a> for PropAt { - fn query_node_with_metadata(&mut self, child: &OpTreeNode, m: &OpSetMetadata) -> QueryResult { +impl<'a> TreeQuery<'a> for PropAt<'a> { + fn query_node_with_metadata( + &mut self, + child: &'a OpTreeNode, + m: &OpSetMetadata, + ) -> QueryResult { let start = binary_search_by(child, |op| m.key_cmp(&op.key, &self.key)); let mut window: VisWindow = Default::default(); self.pos = start; diff --git a/automerge/src/query/seek_op_with_patch.rs b/automerge/src/query/seek_op_with_patch.rs index ae57bd6d..d25d8dcc 100644 --- a/automerge/src/query/seek_op_with_patch.rs +++ b/automerge/src/query/seek_op_with_patch.rs @@ -5,18 +5,18 @@ use std::cmp::Ordering; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] -pub(crate) struct SeekOpWithPatch { +pub(crate) struct SeekOpWithPatch<'a> { op: Op, pub pos: usize, pub succ: Vec, found: bool, pub seen: usize, last_seen: Option, - pub values: Vec, + pub values: Vec<&'a Op>, pub had_value_before: bool, } -impl SeekOpWithPatch { +impl<'a> SeekOpWithPatch<'a> { pub fn new(op: &Op) -> Self { SeekOpWithPatch { op: op.clone(), @@ -60,8 +60,12 @@ impl SeekOpWithPatch { } } -impl<'a> TreeQuery<'a> for SeekOpWithPatch { - fn query_node_with_metadata(&mut self, child: &OpTreeNode, m: &OpSetMetadata) -> QueryResult { +impl<'a> TreeQuery<'a> for SeekOpWithPatch<'a> { + fn query_node_with_metadata( + &mut self, + child: &'a OpTreeNode, + m: &OpSetMetadata, + ) -> QueryResult { if self.found { return QueryResult::Descend; } @@ -133,7 +137,7 @@ impl<'a> TreeQuery<'a> for SeekOpWithPatch { if self.op.overwrites(op) { self.succ.push(self.pos); } else if op.visible() { - self.values.push(op.clone()); + self.values.push(op); } // Ops for the same key should be in ascending order of opId, so we break when @@ -156,7 +160,7 @@ impl<'a> TreeQuery<'a> for SeekOpWithPatch { // must always have lower Lamport timestamps than that op itself, and the ops // here all have greater opIds than the new op if op.visible() { - self.values.push(op.clone()); + self.values.push(op); } later_pos += 1; } @@ -167,7 +171,7 @@ impl<'a> TreeQuery<'a> for SeekOpWithPatch { // Only called when operating on a sequence (list/text) object, since updates of a map are // handled in `query_node_with_metadata`. - fn query_element_with_metadata(&mut self, e: &Op, m: &OpSetMetadata) -> QueryResult { + fn query_element_with_metadata(&mut self, e: &'a Op, m: &OpSetMetadata) -> QueryResult { let result = if !self.found { // First search for the referenced list element (i.e. the element we're updating, or // after which we're inserting) @@ -212,7 +216,7 @@ impl<'a> TreeQuery<'a> for SeekOpWithPatch { if e.visible() { self.had_value_before = true; if !overwritten { - self.values.push(e.clone()); + self.values.push(e); } } diff --git a/automerge/src/types.rs b/automerge/src/types.rs index 6e6bfbbb..a94aba76 100644 --- a/automerge/src/types.rs +++ b/automerge/src/types.rs @@ -2,6 +2,7 @@ use crate::error; use crate::exid::ExId; use crate::legacy as amp; use serde::{Deserialize, Serialize}; +use std::borrow::Cow; use std::cmp::Eq; use std::fmt; use std::str::FromStr; @@ -448,7 +449,15 @@ impl Op { pub fn value(&self) -> Value { match &self.action { OpType::Make(obj_type) => Value::Object(*obj_type), - OpType::Put(scalar) => Value::Scalar(scalar.clone()), + OpType::Put(scalar) => Value::Scalar(Cow::Borrowed(scalar)), + _ => panic!("cant convert op into a value - {:?}", self), + } + } + + pub fn clone_value(&self) -> Value<'static> { + match &self.action { + OpType::Make(obj_type) => Value::Object(*obj_type), + OpType::Put(scalar) => Value::Scalar(Cow::Owned(scalar.clone())), _ => panic!("cant convert op into a value - {:?}", self), } } @@ -528,7 +537,7 @@ impl TryFrom<&[u8]> for ChangeHash { pub struct AssignPatch { pub obj: ExId, pub key: Prop, - pub value: (Value, ExId), + pub value: (Value<'static>, ExId), pub conflict: bool, } @@ -538,7 +547,7 @@ pub enum Patch { /// Associating a new value with a key in a map, or an existing list element Assign(AssignPatch), /// Inserting a new element into a list/text - Insert(ExId, usize, (Value, ExId)), + Insert(ExId, usize, (Value<'static>, ExId)), /// Deleting an element from a list/text Delete(ExId, Prop), } diff --git a/automerge/src/value.rs b/automerge/src/value.rs index 47279381..1f9913f5 100644 --- a/automerge/src/value.rs +++ b/automerge/src/value.rs @@ -1,58 +1,61 @@ use crate::error; -use crate::types::{ObjType, Op, OpId, OpType}; +use crate::types::ObjType; use serde::{Deserialize, Serialize, Serializer}; use smol_str::SmolStr; +use std::borrow::Cow; use std::fmt; #[derive(Debug, Clone, PartialEq)] -pub enum Value { +pub enum Value<'a> { Object(ObjType), - Scalar(ScalarValue), + // TODO: if we don't have to store this in patches any more then it might be able to be just a + // &'a ScalarValue rather than a Cow + Scalar(Cow<'a, ScalarValue>), } -impl Value { - pub fn map() -> Value { +impl<'a> Value<'a> { + pub fn map() -> Value<'a> { Value::Object(ObjType::Map) } - pub fn list() -> Value { + pub fn list() -> Value<'a> { Value::Object(ObjType::List) } - pub fn text() -> Value { + pub fn text() -> Value<'a> { Value::Object(ObjType::Text) } - pub fn table() -> Value { + pub fn table() -> Value<'a> { Value::Object(ObjType::Table) } - pub fn str(s: &str) -> Value { - Value::Scalar(ScalarValue::Str(s.into())) + pub fn str(s: &str) -> Value<'a> { + Value::Scalar(Cow::Owned(ScalarValue::Str(s.into()))) } - pub fn int(n: i64) -> Value { - Value::Scalar(ScalarValue::Int(n)) + pub fn int(n: i64) -> Value<'a> { + Value::Scalar(Cow::Owned(ScalarValue::Int(n))) } - pub fn uint(n: u64) -> Value { - Value::Scalar(ScalarValue::Uint(n)) + pub fn uint(n: u64) -> Value<'a> { + Value::Scalar(Cow::Owned(ScalarValue::Uint(n))) } - pub fn counter(n: i64) -> Value { - Value::Scalar(ScalarValue::counter(n)) + pub fn counter(n: i64) -> Value<'a> { + Value::Scalar(Cow::Owned(ScalarValue::counter(n))) } - pub fn timestamp(n: i64) -> Value { - Value::Scalar(ScalarValue::Timestamp(n)) + pub fn timestamp(n: i64) -> Value<'a> { + Value::Scalar(Cow::Owned(ScalarValue::Timestamp(n))) } - pub fn f64(n: f64) -> Value { - Value::Scalar(ScalarValue::F64(n)) + pub fn f64(n: f64) -> Value<'a> { + Value::Scalar(Cow::Owned(ScalarValue::F64(n))) } - pub fn bytes(b: Vec) -> Value { - Value::Scalar(ScalarValue::Bytes(b)) + pub fn bytes(b: Vec) -> Value<'a> { + Value::Scalar(Cow::Owned(ScalarValue::Bytes(b))) } pub fn is_object(&self) -> bool { @@ -64,44 +67,80 @@ impl Value { } pub fn is_bytes(&self) -> bool { - matches!(self, Self::Scalar(ScalarValue::Bytes(_))) + if let Self::Scalar(s) = self { + s.is_bytes() + } else { + false + } } pub fn is_str(&self) -> bool { - matches!(self, Self::Scalar(ScalarValue::Str(_))) + if let Self::Scalar(s) = self { + s.is_str() + } else { + false + } } pub fn is_int(&self) -> bool { - matches!(self, Self::Scalar(ScalarValue::Int(_))) + if let Self::Scalar(s) = self { + s.is_int() + } else { + false + } } pub fn is_uint(&self) -> bool { - matches!(self, Self::Scalar(ScalarValue::Uint(_))) + if let Self::Scalar(s) = self { + s.is_uint() + } else { + false + } } pub fn is_f64(&self) -> bool { - matches!(self, Self::Scalar(ScalarValue::F64(_))) + if let Self::Scalar(s) = self { + s.is_f64() + } else { + false + } } pub fn is_counter(&self) -> bool { - matches!(self, Self::Scalar(ScalarValue::Counter(_))) + if let Self::Scalar(s) = self { + s.is_counter() + } else { + false + } } pub fn is_timestamp(&self) -> bool { - matches!(self, Self::Scalar(ScalarValue::Timestamp(_))) + if let Self::Scalar(s) = self { + s.is_timestamp() + } else { + false + } } pub fn is_boolean(&self) -> bool { - matches!(self, Self::Scalar(ScalarValue::Boolean(_))) + if let Self::Scalar(s) = self { + s.is_boolean() + } else { + false + } } pub fn is_null(&self) -> bool { - matches!(self, Self::Scalar(ScalarValue::Null)) + if let Self::Scalar(s) = self { + s.is_null() + } else { + false + } } pub fn into_scalar(self) -> Result { match self { - Self::Scalar(s) => Ok(s), + Self::Scalar(s) => Ok(s.into_owned()), _ => Err(self), } } @@ -122,7 +161,10 @@ impl Value { pub fn into_bytes(self) -> Result, Self> { match self { - Value::Scalar(s) => s.into_bytes().map_err(Value::Scalar), + Value::Scalar(s) => s + .into_owned() + .into_bytes() + .map_err(|v| Value::Scalar(Cow::Owned(v))), _ => Err(self), } } @@ -136,7 +178,10 @@ impl Value { pub fn into_string(self) -> Result { match self { - Value::Scalar(s) => s.into_string().map_err(Value::Scalar), + Value::Scalar(s) => s + .into_owned() + .into_string() + .map_err(|v| Value::Scalar(Cow::Owned(v))), _ => Err(self), } } @@ -178,7 +223,7 @@ impl Value { } } -impl fmt::Display for Value { +impl<'a> fmt::Display for Value<'a> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Value::Object(o) => write!(f, "Object: {}", o), @@ -187,110 +232,81 @@ impl fmt::Display for Value { } } -impl From<&str> for Value { +impl<'a> From<&str> for Value<'a> { fn from(s: &str) -> Self { - Value::Scalar(ScalarValue::Str(s.into())) + Value::Scalar(Cow::Owned(ScalarValue::Str(s.into()))) } } -impl From for Value { +impl<'a> From for Value<'a> { fn from(s: String) -> Self { - Value::Scalar(ScalarValue::Str(s.into())) + Value::Scalar(Cow::Owned(ScalarValue::Str(s.into()))) } } -impl From for Value { +impl<'a> From for Value<'a> { fn from(c: char) -> Self { - Value::Scalar(ScalarValue::Str(SmolStr::new(c.to_string()))) + Value::Scalar(Cow::Owned(ScalarValue::Str(SmolStr::new(c.to_string())))) } } -impl From> for Value { +impl<'a> From> for Value<'a> { fn from(v: Vec) -> Self { - Value::Scalar(ScalarValue::Bytes(v)) + Value::Scalar(Cow::Owned(ScalarValue::Bytes(v))) } } -impl From for Value { +impl<'a> From for Value<'a> { fn from(n: f64) -> Self { - Value::Scalar(ScalarValue::F64(n)) + Value::Scalar(Cow::Owned(ScalarValue::F64(n))) } } -impl From for Value { +impl<'a> From for Value<'a> { fn from(n: i64) -> Self { - Value::Scalar(ScalarValue::Int(n)) + Value::Scalar(Cow::Owned(ScalarValue::Int(n))) } } -impl From for Value { +impl<'a> From for Value<'a> { fn from(n: i32) -> Self { - Value::Scalar(ScalarValue::Int(n.into())) + Value::Scalar(Cow::Owned(ScalarValue::Int(n.into()))) } } -impl From for Value { +impl<'a> From for Value<'a> { fn from(n: u32) -> Self { - Value::Scalar(ScalarValue::Uint(n.into())) + Value::Scalar(Cow::Owned(ScalarValue::Uint(n.into()))) } } -impl From for Value { +impl<'a> From for Value<'a> { fn from(n: u64) -> Self { - Value::Scalar(ScalarValue::Uint(n)) + Value::Scalar(Cow::Owned(ScalarValue::Uint(n))) } } -impl From for Value { +impl<'a> From for Value<'a> { fn from(v: bool) -> Self { - Value::Scalar(ScalarValue::Boolean(v)) + Value::Scalar(Cow::Owned(ScalarValue::Boolean(v))) } } -impl From<()> for Value { +impl<'a> From<()> for Value<'a> { fn from(_: ()) -> Self { - Value::Scalar(ScalarValue::Null) + Value::Scalar(Cow::Owned(ScalarValue::Null)) } } -impl From for Value { +impl<'a> From for Value<'a> { fn from(o: ObjType) -> Self { Value::Object(o) } } -impl From for Value { +impl<'a> From for Value<'a> { fn from(v: ScalarValue) -> Self { - Value::Scalar(v) - } -} - -impl From<&Op> for (Value, OpId) { - fn from(op: &Op) -> Self { - match &op.action { - OpType::Make(obj_type) => (Value::Object(*obj_type), op.id), - OpType::Put(scalar) => (Value::Scalar(scalar.clone()), op.id), - _ => panic!("cant convert op into a value - {:?}", op), - } - } -} - -impl From for (Value, OpId) { - fn from(op: Op) -> Self { - match &op.action { - OpType::Make(obj_type) => (Value::Object(*obj_type), op.id), - OpType::Put(scalar) => (Value::Scalar(scalar.clone()), op.id), - _ => panic!("cant convert op into a value - {:?}", op), - } - } -} - -impl From for OpType { - fn from(v: Value) -> Self { - match v { - Value::Object(o) => OpType::Make(o), - Value::Scalar(s) => OpType::Put(s), - } + Value::Scalar(Cow::Owned(v)) } } diff --git a/automerge/tests/helpers/mod.rs b/automerge/tests/helpers/mod.rs index 5384c218..44b691a0 100644 --- a/automerge/tests/helpers/mod.rs +++ b/automerge/tests/helpers/mod.rs @@ -319,7 +319,7 @@ pub fn realize_prop>( let (val, obj_id) = doc.value(obj_id, prop).unwrap().unwrap(); match val { automerge::Value::Object(obj_type) => realize_obj(doc, &obj_id, obj_type), - automerge::Value::Scalar(v) => RealizedObject::Value(OrdScalarValue::from(v)), + automerge::Value::Scalar(v) => RealizedObject::Value(OrdScalarValue::from(v.into_owned())), } } @@ -356,7 +356,9 @@ fn realize_values>( for (value, objid) in doc.values(obj_id, key).unwrap() { let realized = match value { automerge::Value::Object(objtype) => realize_obj(doc, &objid, objtype), - automerge::Value::Scalar(v) => RealizedObject::Value(OrdScalarValue::from(v)), + automerge::Value::Scalar(v) => { + RealizedObject::Value(OrdScalarValue::from(v.into_owned())) + } }; values.insert(realized); } From 545807cf7495e1bbc0ad0cbdc46b4be373aff1ae Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Mon, 4 Apr 2022 13:06:36 +0100 Subject: [PATCH 229/730] Have historic versions clone the value again This is to currently avoid the issue with counters. --- automerge/src/automerge.rs | 4 ++-- automerge/src/query.rs | 22 +++++++++++----------- automerge/src/query/keys_at.rs | 2 +- automerge/src/query/len_at.rs | 8 ++++---- automerge/src/query/list_vals_at.rs | 10 +++++----- automerge/src/query/nth_at.rs | 10 +++++----- automerge/src/query/prop_at.rs | 8 ++++---- 7 files changed, 32 insertions(+), 32 deletions(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index d4c395da..e9808660 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -488,7 +488,7 @@ impl Automerge { .search(&obj, query::PropAt::new(p, clock)) .ops .into_iter() - .map(|o| (o.value(), self.id_to_exid(o.id))) + .map(|o| (o.clone_value(), self.id_to_exid(o.id))) .collect() } else { vec![] @@ -499,7 +499,7 @@ impl Automerge { .search(&obj, query::NthAt::new(n, clock)) .ops .into_iter() - .map(|o| (o.value(), self.id_to_exid(o.id))) + .map(|o| (o.clone_value(), self.id_to_exid(o.id))) .collect(), }; Ok(result) diff --git a/automerge/src/query.rs b/automerge/src/query.rs index 37b3934b..d6a6166a 100644 --- a/automerge/src/query.rs +++ b/automerge/src/query.rs @@ -36,11 +36,11 @@ pub(crate) use seek_op::SeekOp; pub(crate) use seek_op_with_patch::SeekOpWithPatch; #[derive(Debug, Clone, PartialEq)] -pub(crate) struct CounterData<'a> { +pub(crate) struct CounterData { pos: usize, val: i64, succ: HashSet, - op: &'a Op, + op: Op, } pub(crate) trait TreeQuery<'a> { @@ -169,12 +169,12 @@ impl Default for Index { } #[derive(Debug, Clone, PartialEq, Default)] -pub(crate) struct VisWindow<'a> { - counters: HashMap>, +pub(crate) struct VisWindow { + counters: HashMap, } -impl<'a> VisWindow<'a> { - fn visible_at(&mut self, op: &'a Op, pos: usize, clock: &Clock) -> bool { +impl VisWindow { + fn visible_at(&mut self, op: &Op, pos: usize, clock: &Clock) -> bool { if !clock.covers(&op.id) { return false; } @@ -188,7 +188,7 @@ impl<'a> VisWindow<'a> { pos, val: start, succ: op.succ.iter().cloned().collect(), - op, + op: op.clone(), }, ); if !op.succ.iter().any(|i| clock.covers(i)) { @@ -201,7 +201,7 @@ impl<'a> VisWindow<'a> { if let Some(mut entry) = self.counters.get_mut(id) { entry.succ.remove(&op.id); entry.val += inc_val; - // entry.op.action = OpType::Put(ScalarValue::counter(entry.val)); + entry.op.action = OpType::Put(ScalarValue::counter(entry.val)); if !entry.succ.iter().any(|i| clock.covers(i)) { visible = true; } @@ -217,15 +217,15 @@ impl<'a> VisWindow<'a> { visible } - pub fn seen_op(&self, op: &'a Op, pos: usize) -> Vec<(usize, &'a Op)> { + pub fn seen_op(&self, op: &Op, pos: usize) -> Vec<(usize, Op)> { let mut result = vec![]; for pred in &op.pred { if let Some(entry) = self.counters.get(pred) { - result.push((entry.pos, entry.op)); + result.push((entry.pos, entry.op.clone())); } } if result.is_empty() { - result.push((pos, op)); + result.push((pos, op.clone())); } result } diff --git a/automerge/src/query/keys_at.rs b/automerge/src/query/keys_at.rs index 495c56e4..2fe04747 100644 --- a/automerge/src/query/keys_at.rs +++ b/automerge/src/query/keys_at.rs @@ -6,7 +6,7 @@ use std::fmt::Debug; #[derive(Debug)] pub(crate) struct KeysAt<'a> { clock: Clock, - window: VisWindow<'a>, + window: VisWindow, index: usize, last_key: Option, index_back: usize, diff --git a/automerge/src/query/len_at.rs b/automerge/src/query/len_at.rs index 9ae88bcc..25ed342a 100644 --- a/automerge/src/query/len_at.rs +++ b/automerge/src/query/len_at.rs @@ -3,15 +3,15 @@ use crate::types::{Clock, ElemId, Op}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] -pub(crate) struct LenAt<'a> { +pub(crate) struct LenAt { pub len: usize, clock: Clock, pos: usize, last: Option, - window: VisWindow<'a>, + window: VisWindow, } -impl<'a> LenAt<'a> { +impl LenAt { pub fn new(clock: Clock) -> Self { LenAt { clock, @@ -23,7 +23,7 @@ impl<'a> LenAt<'a> { } } -impl<'a> TreeQuery<'a> for LenAt<'a> { +impl<'a> TreeQuery<'a> for LenAt { fn query_element(&mut self, op: &'a Op) -> QueryResult { if op.insert { self.last = None; diff --git a/automerge/src/query/list_vals_at.rs b/automerge/src/query/list_vals_at.rs index 1120b5b9..185194fc 100644 --- a/automerge/src/query/list_vals_at.rs +++ b/automerge/src/query/list_vals_at.rs @@ -3,15 +3,15 @@ use crate::types::{Clock, ElemId, Op}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] -pub(crate) struct ListValsAt<'a> { +pub(crate) struct ListValsAt { clock: Clock, last_elem: Option, - pub ops: Vec<&'a Op>, - window: VisWindow<'a>, + pub ops: Vec, + window: VisWindow, pos: usize, } -impl<'a> ListValsAt<'a> { +impl ListValsAt { pub fn new(clock: Clock) -> Self { ListValsAt { clock, @@ -23,7 +23,7 @@ impl<'a> ListValsAt<'a> { } } -impl<'a> TreeQuery<'a> for ListValsAt<'a> { +impl<'a> TreeQuery<'a> for ListValsAt { fn query_element_with_metadata(&mut self, op: &'a Op, m: &OpSetMetadata) -> QueryResult { if op.insert { self.last_elem = None; diff --git a/automerge/src/query/nth_at.rs b/automerge/src/query/nth_at.rs index 3314bdac..cb7db8d5 100644 --- a/automerge/src/query/nth_at.rs +++ b/automerge/src/query/nth_at.rs @@ -3,18 +3,18 @@ use crate::types::{Clock, ElemId, Op}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] -pub(crate) struct NthAt<'a> { +pub(crate) struct NthAt { clock: Clock, target: usize, seen: usize, last_seen: Option, - window: VisWindow<'a>, - pub ops: Vec<&'a Op>, + window: VisWindow, + pub ops: Vec, pub ops_pos: Vec, pub pos: usize, } -impl<'a> NthAt<'a> { +impl NthAt { pub fn new(target: usize, clock: Clock) -> Self { NthAt { clock, @@ -29,7 +29,7 @@ impl<'a> NthAt<'a> { } } -impl<'a> TreeQuery<'a> for NthAt<'a> { +impl<'a> TreeQuery<'a> for NthAt { fn query_element(&mut self, element: &'a Op) -> QueryResult { if element.insert { if self.seen > self.target { diff --git a/automerge/src/query/prop_at.rs b/automerge/src/query/prop_at.rs index a053e0cf..aeec0bf2 100644 --- a/automerge/src/query/prop_at.rs +++ b/automerge/src/query/prop_at.rs @@ -4,15 +4,15 @@ use crate::types::{Clock, Key, Op}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] -pub(crate) struct PropAt<'a> { +pub(crate) struct PropAt { clock: Clock, key: Key, - pub ops: Vec<&'a Op>, + pub ops: Vec, pub ops_pos: Vec, pub pos: usize, } -impl<'a> PropAt<'a> { +impl PropAt { pub fn new(prop: usize, clock: Clock) -> Self { PropAt { clock, @@ -24,7 +24,7 @@ impl<'a> PropAt<'a> { } } -impl<'a> TreeQuery<'a> for PropAt<'a> { +impl<'a> TreeQuery<'a> for PropAt { fn query_node_with_metadata( &mut self, child: &'a OpTreeNode, From d8c126d1bc60e599d5c2921bda0aaa4b5b041c9f Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Mon, 14 Mar 2022 12:08:03 -0400 Subject: [PATCH 230/730] wip --- automerge-wasm/README.md | 853 +++++++---------------------------- automerge-wasm/index.d.ts | 78 +--- automerge-wasm/node-index.js | 5 + automerge-wasm/package.json | 25 +- automerge-wasm/test/test.ts | 11 +- 5 files changed, 200 insertions(+), 772 deletions(-) create mode 100644 automerge-wasm/node-index.js diff --git a/automerge-wasm/README.md b/automerge-wasm/README.md index 63548307..527f0baa 100644 --- a/automerge-wasm/README.md +++ b/automerge-wasm/README.md @@ -1,696 +1,185 @@ ## Automerge WASM Low Level Interface -This is a low level automerge library written in rust exporting a javascript API via WASM. This low level api is the underpinning to the `automerge-js` library that reimplements the Automerge API via these low level functions. +This package is a low level interface to the [automerge rust](https://github.com/automerge/automerge-rs/tree/experiment) CRDT. The api is intended to be a "close to the metal" as possible only a few ease of use accomodations. This library is used as the underpinnings for the [Automerge JS wrapper](https://github.com/automerge/automerge-rs/tree/experiment/automerge-js) and can be used as is or as a basis for another higher level expression of a CRDT. -### Static Functions +### Why CRDT? -### Methods +// TODO -`doc.clone(actor?: string)` : Make a complete +### Terminology -`doc.free()` : deallocate WASM memory associated with a document +The term Actor, Object Id and Heads are used through this documentation. Detailed explanations are in the glossary at the end of this readme. But the most basic definition would be... -```rust - #[wasm_bindgen] - pub fn free(self) {} +An Actor is a unique id that distinguishes a single writer to a document. It can be any hex string. - #[wasm_bindgen(js_name = pendingOps)] - pub fn pending_ops(&self) -> JsValue { - (self.0.pending_ops() as u32).into() - } +An Object id uniquely identifies a Map, List or Text object within a document. This id comes as a string in the form on `{number}@{actor}` - so `"10@aabbcc"` for example. The string `"_root"` or `"/"` can also be used to refer to the document root. These strings are durable and can be used on any decendant or copy of the document that generated them. - pub fn commit(&mut self, message: Option, time: Option) -> Array { - let heads = self.0.commit(message, time.map(|n| n as i64)); - let heads: Array = heads - .iter() - .map(|h| JsValue::from_str(&hex::encode(&h.0))) - .collect(); - heads - } +Heads refers to a set of hashes that uniquly identifies a point in time in a documents history. Heads are useful for comparing documents state or retrieving past states from the document. - pub fn rollback(&mut self) -> f64 { - self.0.rollback() as f64 - } +### Using the Library and Creating a Document - pub fn keys(&mut self, obj: String, heads: Option) -> Result { - let obj = self.import(obj)?; - let result = if let Some(heads) = get_heads(heads) { - self.0.keys_at(&obj, &heads) - } else { - self.0.keys(&obj) - } - .iter() - .map(|s| JsValue::from_str(s)) - .collect(); - Ok(result) - } +This is a rust/wasm package and will work in a node or web environment. Node is able to load wasm syncronously but a web environment is not. The default import of the package is a function that returns a promise that resolves once the wasm is loaded. - pub fn text(&mut self, obj: String, heads: Option) -> Result { - let obj = self.import(obj)?; - if let Some(heads) = get_heads(heads) { - self.0.text_at(&obj, &heads) - } else { - self.0.text(&obj) - } - .map_err(to_js_err) - } +This creates a document in node. The memory allocated is handled by wasm and isn't managed by the javascript garbage collector and thus needs to be manually freed. - pub fn splice( - &mut self, - obj: String, - start: f64, - delete_count: f64, - text: JsValue, - ) -> Result, JsValue> { - let obj = self.import(obj)?; - let start = start as usize; - let delete_count = delete_count as usize; - let mut vals = vec![]; - if let Some(t) = text.as_string() { - self.0 - .splice_text(&obj, start, delete_count, &t) - .map_err(to_js_err)?; - Ok(None) - } else { - if let Ok(array) = text.dyn_into::() { - for i in array.iter() { - if let Ok(array) = i.clone().dyn_into::() { - let value = array.get(1); - let datatype = array.get(2); - let value = self.import_value(value, datatype.as_string())?; - vals.push(value); - } else { - let value = self.import_value(i, None)?; - vals.push(value); - } - } - } - let result = self - .0 - .splice(&obj, start, delete_count, vals) - .map_err(to_js_err)?; - if result.is_empty() { - Ok(None) - } else { - let result: Array = result - .iter() - .map(|r| JsValue::from(r.to_string())) - .collect(); - Ok(result.into()) - } - } - } - - pub fn push( - &mut self, - obj: String, - value: JsValue, - datatype: Option, - ) -> Result, JsValue> { - let obj = self.import(obj)?; - let value = self.import_value(value, datatype)?; - let index = self.0.length(&obj); - let opid = self.0.insert(&obj, index, value).map_err(to_js_err)?; - Ok(opid.map(|id| id.to_string())) - } - - pub fn insert( - &mut self, - obj: String, - index: f64, - value: JsValue, - datatype: Option, - ) -> Result, JsValue> { - let obj = self.import(obj)?; - let index = index as f64; - let value = self.import_value(value, datatype)?; - let opid = self - .0 - .insert(&obj, index as usize, value) - .map_err(to_js_err)?; - Ok(opid.map(|id| id.to_string())) - } - - pub fn set( - &mut self, - obj: String, - prop: JsValue, - value: JsValue, - datatype: Option, - ) -> Result, JsValue> { - let obj = self.import(obj)?; - let prop = self.import_prop(prop)?; - let value = self.import_value(value, datatype)?; - let opid = self.0.set(&obj, prop, value).map_err(to_js_err)?; - Ok(opid.map(|id| id.to_string())) - } - - pub fn make( - &mut self, - obj: String, - prop: JsValue, - value: JsValue, - ) -> Result { - let obj = self.import(obj)?; - let prop = self.import_prop(prop)?; - let value = self.import_value(value, None)?; - if value.is_object() { - let opid = self.0.set(&obj, prop, value).map_err(to_js_err)?; - Ok(opid.unwrap().to_string()) - } else { - Err("invalid object type".into()) - } - } - - pub fn inc(&mut self, obj: String, prop: JsValue, value: JsValue) -> Result<(), JsValue> { - let obj = self.import(obj)?; - let prop = self.import_prop(prop)?; - let value: f64 = value - .as_f64() - .ok_or("inc needs a numberic value") - .map_err(to_js_err)?; - self.0.inc(&obj, prop, value as i64).map_err(to_js_err)?; - Ok(()) - } - - pub fn value( - &mut self, - obj: String, - prop: JsValue, - heads: Option, - ) -> Result { - let obj = self.import(obj)?; - let result = Array::new(); - let prop = to_prop(prop); - let heads = get_heads(heads); - if let Ok(prop) = prop { - let value = if let Some(h) = heads { - self.0.value_at(&obj, prop, &h) - } else { - self.0.value(&obj, prop) - } - .map_err(to_js_err)?; - match value { - Some((Value::Object(obj_type), obj_id)) => { - result.push(&obj_type.to_string().into()); - result.push(&obj_id.to_string().into()); - } - Some((Value::Scalar(value), _)) => { - result.push(&datatype(&value).into()); - result.push(&ScalarValue(value).into()); - } - None => {} - } - } - Ok(result) - } - - pub fn values( - &mut self, - obj: String, - arg: JsValue, - heads: Option, - ) -> Result { - let obj = self.import(obj)?; - let result = Array::new(); - let prop = to_prop(arg); - if let Ok(prop) = prop { - let values = if let Some(heads) = get_heads(heads) { - self.0.values_at(&obj, prop, &heads) - } else { - self.0.values(&obj, prop) - } - .map_err(to_js_err)?; - for value in values { - match value { - (Value::Object(obj_type), obj_id) => { - let sub = Array::new(); - sub.push(&obj_type.to_string().into()); - sub.push(&obj_id.to_string().into()); - result.push(&sub.into()); - } - (Value::Scalar(value), id) => { - let sub = Array::new(); - sub.push(&datatype(&value).into()); - sub.push(&ScalarValue(value).into()); - sub.push(&id.to_string().into()); - result.push(&sub.into()); - } - } - } - } - Ok(result) - } - - pub fn length(&mut self, obj: String, heads: Option) -> Result { - let obj = self.import(obj)?; - if let Some(heads) = get_heads(heads) { - Ok(self.0.length_at(&obj, &heads) as f64) - } else { - Ok(self.0.length(&obj) as f64) - } - } - - pub fn del(&mut self, obj: String, prop: JsValue) -> Result<(), JsValue> { - let obj = self.import(obj)?; - let prop = to_prop(prop)?; - self.0.del(&obj, prop).map_err(to_js_err)?; - Ok(()) - } - - pub fn mark( - &mut self, - obj: JsValue, - range: JsValue, - name: JsValue, - value: JsValue, - datatype: JsValue, - ) -> Result<(), JsValue> { - let obj = self.import(obj)?; - let re = Regex::new(r"([\[\(])(\d+)\.\.(\d+)([\)\]])").unwrap(); - let range = range.as_string().ok_or("range must be a string")?; - let cap = re.captures_iter(&range).next().ok_or("range must be in the form of (start..end] or [start..end) etc... () for sticky, [] for normal")?; - let start: usize = cap[2].parse().map_err(|_| to_js_err("invalid start"))?; - let end: usize = cap[3].parse().map_err(|_| to_js_err("invalid end"))?; - let start_sticky = &cap[1] == "("; - let end_sticky = &cap[4] == ")"; - let name = name - .as_string() - .ok_or("invalid mark name") - .map_err(to_js_err)?; - let value = self.import_scalar(&value, datatype.as_string())?; - self.0 - .mark(&obj, start, start_sticky, end, end_sticky, &name, value) - .map_err(to_js_err)?; - Ok(()) - } - - pub fn spans(&mut self, obj: JsValue) -> Result { - let obj = self.import(obj)?; - let text = self.0.text(&obj).map_err(to_js_err)?; - let spans = self.0.spans(&obj).map_err(to_js_err)?; - let mut last_pos = 0; - let result = Array::new(); - for s in spans { - let marks = Array::new(); - for m in s.marks { - let mark = Array::new(); - mark.push(&m.0.into()); - mark.push(&datatype(&m.1).into()); - mark.push(&ScalarValue(m.1).into()); - marks.push(&mark.into()); - } - let text_span = &text[last_pos..s.pos]; //.slice(last_pos, s.pos); - if text_span.len() > 0 { - result.push(&text_span.into()); - } - result.push(&marks); - last_pos = s.pos; - //let obj = Object::new().into(); - //js_set(&obj, "pos", s.pos as i32)?; - //js_set(&obj, "marks", marks)?; - //result.push(&obj.into()); - } - let text_span = &text[last_pos..]; - if text_span.len() > 0 { - result.push(&text_span.into()); - } - Ok(result.into()) - } - - pub fn save(&mut self) -> Result { - self.0 - .save() - .map(|v| Uint8Array::from(v.as_slice())) - .map_err(to_js_err) - } - - #[wasm_bindgen(js_name = saveIncremental)] - pub fn save_incremental(&mut self) -> Uint8Array { - let bytes = self.0.save_incremental(); - Uint8Array::from(bytes.as_slice()) - } - - #[wasm_bindgen(js_name = loadIncremental)] - pub fn load_incremental(&mut self, data: Uint8Array) -> Result { - let data = data.to_vec(); - let len = self.0.load_incremental(&data).map_err(to_js_err)?; - Ok(len as f64) - } - - #[wasm_bindgen(js_name = applyChanges)] - pub fn apply_changes(&mut self, changes: JsValue) -> Result<(), JsValue> { - let changes: Vec<_> = JS(changes).try_into()?; - self.0.apply_changes(&changes).map_err(to_js_err)?; - Ok(()) - } - - #[wasm_bindgen(js_name = getChanges)] - pub fn get_changes(&mut self, have_deps: JsValue) -> Result { - let deps: Vec<_> = JS(have_deps).try_into()?; - let changes = self.0.get_changes(&deps); - let changes: Array = changes - .iter() - .map(|c| Uint8Array::from(c.raw_bytes())) - .collect(); - Ok(changes) - } - - #[wasm_bindgen(js_name = getChangesAdded)] - pub fn get_changes_added(&mut self, other: &Automerge) -> Result { - let changes = self.0.get_changes_added(&other.0); - let changes: Array = changes - .iter() - .map(|c| Uint8Array::from(c.raw_bytes())) - .collect(); - Ok(changes) - } - - #[wasm_bindgen(js_name = getHeads)] - pub fn get_heads(&mut self) -> Array { - let heads = self.0.get_heads(); - let heads: Array = heads - .iter() - .map(|h| JsValue::from_str(&hex::encode(&h.0))) - .collect(); - heads - } - - #[wasm_bindgen(js_name = getActorId)] - pub fn get_actor_id(&mut self) -> String { - let actor = self.0.get_actor(); - actor.to_string() - } - - #[wasm_bindgen(js_name = getLastLocalChange)] - pub fn get_last_local_change(&mut self) -> Result, JsValue> { - if let Some(change) = self.0.get_last_local_change() { - Ok(Some(Uint8Array::from(change.raw_bytes()))) - } else { - Ok(None) - } - } - - pub fn dump(&self) { - self.0.dump() - } - - #[wasm_bindgen(js_name = getMissingDeps)] - pub fn get_missing_deps(&mut self, heads: Option) -> Result { - let heads = get_heads(heads).unwrap_or_default(); - let deps = self.0.get_missing_deps(&heads); - let deps: Array = deps - .iter() - .map(|h| JsValue::from_str(&hex::encode(&h.0))) - .collect(); - Ok(deps) - } - - #[wasm_bindgen(js_name = receiveSyncMessage)] - pub fn receive_sync_message( - &mut self, - state: &mut SyncState, - message: Uint8Array, - ) -> Result<(), JsValue> { - let message = message.to_vec(); - let message = am::SyncMessage::decode(message.as_slice()).map_err(to_js_err)?; - self.0 - .receive_sync_message(&mut state.0, message) - .map_err(to_js_err)?; - Ok(()) - } - - #[wasm_bindgen(js_name = generateSyncMessage)] - pub fn generate_sync_message(&mut self, state: &mut SyncState) -> Result { - if let Some(message) = self.0.generate_sync_message(&mut state.0) { - Ok(Uint8Array::from(message.encode().map_err(to_js_err)?.as_slice()).into()) - } else { - Ok(JsValue::null()) - } - } - - #[wasm_bindgen(js_name = toJS)] - pub fn to_js(&self) -> JsValue { - map_to_js(&self.0, ROOT) - } - - fn import(&self, id: String) -> Result { - self.0.import(&id).map_err(to_js_err) - } - - fn import_prop(&mut self, prop: JsValue) -> Result { - if let Some(s) = prop.as_string() { - Ok(s.into()) - } else if let Some(n) = prop.as_f64() { - Ok((n as usize).into()) - } else { - Err(format!("invalid prop {:?}", prop).into()) - } - } - - fn import_scalar( - &mut self, - value: &JsValue, - datatype: Option, - ) -> Result { - match datatype.as_deref() { - Some("boolean") => value - .as_bool() - .ok_or_else(|| "value must be a bool".into()) - .map(am::ScalarValue::Boolean), - Some("int") => value - .as_f64() - .ok_or_else(|| "value must be a number".into()) - .map(|v| am::ScalarValue::Int(v as i64)), - Some("uint") => value - .as_f64() - .ok_or_else(|| "value must be a number".into()) - .map(|v| am::ScalarValue::Uint(v as u64)), - Some("f64") => value - .as_f64() - .ok_or_else(|| "value must be a number".into()) - .map(am::ScalarValue::F64), - Some("bytes") => Ok(am::ScalarValue::Bytes( - value.clone().dyn_into::().unwrap().to_vec(), - )), - Some("counter") => value - .as_f64() - .ok_or_else(|| "value must be a number".into()) - .map(|v| am::ScalarValue::counter(v as i64)), - Some("timestamp") => value - .as_f64() - .ok_or_else(|| "value must be a number".into()) - .map(|v| am::ScalarValue::Timestamp(v as i64)), - /* - Some("bytes") => unimplemented!(), - Some("cursor") => unimplemented!(), - */ - Some("null") => Ok(am::ScalarValue::Null), - Some(_) => Err(format!("unknown datatype {:?}", datatype).into()), - None => { - if value.is_null() { - Ok(am::ScalarValue::Null) - } else if let Some(b) = value.as_bool() { - Ok(am::ScalarValue::Boolean(b)) - } else if let Some(s) = value.as_string() { - // FIXME - we need to detect str vs int vs float vs bool here :/ - Ok(am::ScalarValue::Str(s.into())) - } else if let Some(n) = value.as_f64() { - if (n.round() - n).abs() < f64::EPSILON { - Ok(am::ScalarValue::Int(n as i64)) - } else { - Ok(am::ScalarValue::F64(n)) - } - // } else if let Some(o) = to_objtype(&value) { - // Ok(o.into()) - } else if let Ok(d) = value.clone().dyn_into::() { - Ok(am::ScalarValue::Timestamp(d.get_time() as i64)) - } else if let Ok(o) = &value.clone().dyn_into::() { - Ok(am::ScalarValue::Bytes(o.to_vec())) - } else { - Err("value is invalid".into()) - } - } - } - } - - fn import_value(&mut self, value: JsValue, datatype: Option) -> Result { - match self.import_scalar(&value, datatype) { - Ok(val) => Ok(val.into()), - Err(err) => { - if let Some(o) = to_objtype(&value) { - Ok(o.into()) - } else { - Err(err) - } - } - } - /* - match datatype.as_deref() { - Some("boolean") => value - .as_bool() - .ok_or_else(|| "value must be a bool".into()) - .map(|v| am::ScalarValue::Boolean(v).into()), - Some("int") => value - .as_f64() - .ok_or_else(|| "value must be a number".into()) - .map(|v| am::ScalarValue::Int(v as i64).into()), - Some("uint") => value - .as_f64() - .ok_or_else(|| "value must be a number".into()) - .map(|v| am::ScalarValue::Uint(v as u64).into()), - Some("f64") => value - .as_f64() - .ok_or_else(|| "value must be a number".into()) - .map(|n| am::ScalarValue::F64(n).into()), - Some("bytes") => { - Ok(am::ScalarValue::Bytes(value.dyn_into::().unwrap().to_vec()).into()) - } - Some("counter") => value - .as_f64() - .ok_or_else(|| "value must be a number".into()) - .map(|v| am::ScalarValue::counter(v as i64).into()), - Some("timestamp") => value - .as_f64() - .ok_or_else(|| "value must be a number".into()) - .map(|v| am::ScalarValue::Timestamp(v as i64).into()), - Some("null") => Ok(am::ScalarValue::Null.into()), - Some(_) => Err(format!("unknown datatype {:?}", datatype).into()), - None => { - if value.is_null() { - Ok(am::ScalarValue::Null.into()) - } else if let Some(b) = value.as_bool() { - Ok(am::ScalarValue::Boolean(b).into()) - } else if let Some(s) = value.as_string() { - // FIXME - we need to detect str vs int vs float vs bool here :/ - Ok(am::ScalarValue::Str(s.into()).into()) - } else if let Some(n) = value.as_f64() { - if (n.round() - n).abs() < f64::EPSILON { - Ok(am::ScalarValue::Int(n as i64).into()) - } else { - Ok(am::ScalarValue::F64(n).into()) - } - } else if let Some(o) = to_objtype(&value) { - Ok(o.into()) - } else if let Ok(d) = value.clone().dyn_into::() { - Ok(am::ScalarValue::Timestamp(d.get_time() as i64).into()) - } else if let Ok(o) = &value.dyn_into::() { - Ok(am::ScalarValue::Bytes(o.to_vec()).into()) - } else { - Err("value is invalid".into()) - } - } - } - */ - } - -} - -#[wasm_bindgen(js_name = create)] -pub fn init(actor: Option) -> Result { - console_error_panic_hook::set_once(); - Automerge::new(actor) -} - -#[wasm_bindgen(js_name = loadDoc)] -pub fn load(data: Uint8Array, actor: Option) -> Result { - let data = data.to_vec(); - let mut automerge = am::Automerge::load(&data).map_err(to_js_err)?; - if let Some(s) = actor { - let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec()); - automerge.set_actor(actor) - } - Ok(Automerge(automerge)) -} - -#[wasm_bindgen(js_name = encodeChange)] -pub fn encode_change(change: JsValue) -> Result { - let change: am::ExpandedChange = change.into_serde().map_err(to_js_err)?; - let change: Change = change.into(); - Ok(Uint8Array::from(change.raw_bytes())) -} - -#[wasm_bindgen(js_name = decodeChange)] -pub fn decode_change(change: Uint8Array) -> Result { - let change = Change::from_bytes(change.to_vec()).map_err(to_js_err)?; - let change: am::ExpandedChange = change.decode(); - JsValue::from_serde(&change).map_err(to_js_err) -} - -#[wasm_bindgen(js_name = initSyncState)] -pub fn init_sync_state() -> SyncState { - SyncState(am::SyncState::new()) -} - -// this is needed to be compatible with the automerge-js api -#[wasm_bindgen(js_name = importSyncState)] -pub fn import_sync_state(state: JsValue) -> Result { - Ok(SyncState(JS(state).try_into()?)) -} - -// this is needed to be compatible with the automerge-js api -#[wasm_bindgen(js_name = exportSyncState)] -pub fn export_sync_state(state: SyncState) -> JsValue { - JS::from(state.0).into() -} - -#[wasm_bindgen(js_name = encodeSyncMessage)] -pub fn encode_sync_message(message: JsValue) -> Result { - let heads = js_get(&message, "heads")?.try_into()?; - let need = js_get(&message, "need")?.try_into()?; - let changes = js_get(&message, "changes")?.try_into()?; - let have = js_get(&message, "have")?.try_into()?; - Ok(Uint8Array::from( - am::SyncMessage { - heads, - need, - have, - changes, - } - .encode() - .unwrap() - .as_slice(), - )) -} - -#[wasm_bindgen(js_name = decodeSyncMessage)] -pub fn decode_sync_message(msg: Uint8Array) -> Result { - let data = msg.to_vec(); - let msg = am::SyncMessage::decode(&data).map_err(to_js_err)?; - let heads = AR::from(msg.heads.as_slice()); - let need = AR::from(msg.need.as_slice()); - let changes = AR::from(msg.changes.as_slice()); - let have = AR::from(msg.have.as_slice()); - let obj = Object::new().into(); - js_set(&obj, "heads", heads)?; - js_set(&obj, "need", need)?; - js_set(&obj, "have", have)?; - js_set(&obj, "changes", changes)?; - Ok(obj) -} - -#[wasm_bindgen(js_name = encodeSyncState)] -pub fn encode_sync_state(state: SyncState) -> Result { - let state = state.0; - Ok(Uint8Array::from( - state.encode().map_err(to_js_err)?.as_slice(), - )) -} - -#[wasm_bindgen(js_name = decodeSyncState)] -pub fn decode_sync_state(data: Uint8Array) -> Result { - SyncState::decode(data) -} - -#[wasm_bindgen(js_name = MAP)] -pub struct Map {} - -#[wasm_bindgen(js_name = LIST)] -pub struct List {} - -#[wasm_bindgen(js_name = TEXT)] -pub struct Text {} - -#[wasm_bindgen(js_name = TABLE)] -pub struct Table {} ``` + import { create } from "automerge-wasm" + + let doc = create() + + doc.free() + +``` + +While this will work in both node and in a web context + +``` + import init, { create } from "automerge-wasm" + + init().then(_ => { + let doc = create() + doc.free() + }) + +``` + +The examples below will assume a node context for brevity. + +### Automerge Scalar Types + +Automerge has many scalar types. Methods like `set()` and `insert()` take an optional datatype parameter. Normally the type can be inferred but in some cases, such as telling the difference between int, uint and a counter, it cannot. + +These are sets without a datatype + +``` + import { create } from "automerge-wasm" + + let doc = create() + doc.set("/", "prop1", 100) // int + doc.set("/", "prop2", 3.14) + doc.set("/", "prop3", "hello world") + doc.set("/", "prop4", new Date()) + doc.set("/", "prop5", new Uint8Array([1,2,3])) + doc.set("/", "prop6", true) + doc.set("/", "prop7", null) + doc.free() +``` + +Sets with a datatype and examples of all the supported datatypes. + +While int vs uint vs f64 matters little in javascript, Automerge is a cross platform library where these distinctions matter. + +``` + import { create } from "automerge-wasm" + + let doc = create() + doc.set("/", "prop1", 100, "int") + doc.set("/", "prop2", 100, "uint") + doc.set("/", "prop3", 100.5, "f64") + doc.set("/", "prop4", 100, "counter") + doc.set("/", "prop5", new Date(), "timestamp") + doc.set("/", "prop6", "hello world", "str") + doc.set("/", "prop7", new Uint8Array([1,2,3]), "bytes") + doc.set("/", "prop8", true, "boolean") + doc.set("/", "prop9", null, "null") + doc.free() +``` + +### Automerge Object Types + +Automerge WASM supports 3 object types. Maps, lists, and text. Maps are key value stores where the values can be any scalar type or any object type. Lists are numerically indexed set of data that can hold any scalar or any object type. Text is numerically indexed sets of graphmeme clusters. + +``` + import { create } from "automerge-wasm" + + let doc = create() + + // you can create an object by passing in the inital state - if blank pass in `{}` + // the return value is the Object Id + // these functions all return an object id + + let config = doc.set_object("/", "config", { align: "left", archived: false, cycles: [10, 19, 21] }) + let token = doc.set_object("/", "tokens", {}) + + // lists can be made with javascript arrays + + let birds = doc.set_object("/", "birds", ["bluejay", "penguin", "puffin"]) + let bots = doc.set_object("/", "bots", []) + + // text is initialized with a string + + let notes = doc.set_object("/", "notes", "Hello world!") + + doc.free() +``` + +You can access objects by passing the object id as the first parameter for a call. + +``` + import { create } from "automerge-wasm" + + let doc = create() + + let config = doc.set_object("/", "config", { align: "left", archived: false, cycles: [10, 19, 21] }) + + doc.set(config, "align", "right") +``` + +Anywhere Object Id's are being used a path can also be used. The following two statements are equivelent: + +``` + // get the id then use it + + let id = doc.value("/", "config")[1] + doc.set(id, "align", "right") + + // use a path instead + + doc.set("/config", "align", "right") +``` + +Using the id directly is always faster (as it prevents the path to id conversion internally) so it is preferred for performance critical code. + +### Maps + +### Lists + +### Text + +### Tables + +Automerge's Table type is currently not implemented + +### Counters + +### Viewing Old Versions of the Document + +### Forking and Merging + +### Saving and Loading + +### Syncing + +### Glossery: Actors + +Some basic concepts you will need to know to better understand the api are Actors and Object Ids. + +Actors are ids that need to be unique to each process writing to a document. This is normally one actor per device. Or for a web app one actor per tab per browser would be needed. It can be a uuid, or a public key, or a certificate, as your application demands. All that matters is that its bytes are unique. Actors are always expressed in this api as a hex string. + +Methods that create new documents will generate random actors automatically - if you wish to supply your own it is always taken as an optional argument. This is true for the following functions. + +``` + import { create, loadDoc } from "automerge-wasm" + + let doc1 = create() // random actorid + let doc2 = create("aabbccdd") + let doc3 = doc1.fork() // random actorid + let doc4 = doc2.for("ccdd0011") + let doc5 = loadDoc(doc3.save()) // random actorid + let doc6 = loadDoc(doc4.save(), "00aabb11") + + let actor = doc1.getActor() +``` + +### Glossery: Object Id's +### Glossery: Heads diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts index d694714d..e90db501 100644 --- a/automerge-wasm/index.d.ts +++ b/automerge-wasm/index.d.ts @@ -142,7 +142,7 @@ export class Automerge { // memory management free(): void; - clone(actor?: string): Automerge; + //clone(actor?: string): Automerge; fork(actor?: string): Automerge; // dump internal state to console.log @@ -160,77 +160,5 @@ export class SyncState { readonly sharedHeads: any; } -export type InitInput = RequestInfo | URL | Response | BufferSource | WebAssembly.Module; - -export interface InitOutput { - readonly memory: WebAssembly.Memory; - readonly __wbg_automerge_free: (a: number) => void; - readonly automerge_new: (a: number, b: number, c: number) => void; - readonly automerge_clone: (a: number, b: number, c: number, d: number) => void; - readonly automerge_free: (a: number) => void; - readonly automerge_pendingOps: (a: number) => number; - readonly automerge_commit: (a: number, b: number, c: number, d: number, e: number) => number; - readonly automerge_rollback: (a: number) => number; - readonly automerge_keys: (a: number, b: number, c: number, d: number, e: number) => void; - readonly automerge_text: (a: number, b: number, c: number, d: number, e: number) => void; - readonly automerge_splice: (a: number, b: number, c: number, d: number, e: number, f: number, g: number) => void; - readonly automerge_push: (a: number, b: number, c: number, d: number, e: number, f: number, g: number) => void; - readonly automerge_insert: (a: number, b: number, c: number, d: number, e: number, f: number, g: number, h: number) => void; - readonly automerge_set: (a: number, b: number, c: number, d: number, e: number, f: number, g: number, h: number) => void; - readonly automerge_inc: (a: number, b: number, c: number, d: number, e: number, f: number) => void; - readonly automerge_value: (a: number, b: number, c: number, d: number, e: number, f: number) => void; - readonly automerge_values: (a: number, b: number, c: number, d: number, e: number, f: number) => void; - readonly automerge_length: (a: number, b: number, c: number, d: number, e: number) => void; - readonly automerge_del: (a: number, b: number, c: number, d: number, e: number) => void; - readonly automerge_save: (a: number, b: number) => void; - readonly automerge_saveIncremental: (a: number) => number; - readonly automerge_loadIncremental: (a: number, b: number, c: number) => void; - readonly automerge_applyChanges: (a: number, b: number, c: number) => void; - readonly automerge_getChanges: (a: number, b: number, c: number) => void; - readonly automerge_getChangesAdded: (a: number, b: number, c: number) => void; - readonly automerge_getHeads: (a: number) => number; - readonly automerge_getActorId: (a: number, b: number) => void; - readonly automerge_getLastLocalChange: (a: number, b: number) => void; - readonly automerge_dump: (a: number) => void; - readonly automerge_getMissingDeps: (a: number, b: number, c: number) => void; - readonly automerge_receiveSyncMessage: (a: number, b: number, c: number, d: number) => void; - readonly automerge_generateSyncMessage: (a: number, b: number, c: number) => void; - readonly automerge_toJS: (a: number) => number; - readonly create: (a: number, b: number, c: number) => void; - readonly loadDoc: (a: number, b: number, c: number, d: number) => void; - readonly encodeChange: (a: number, b: number) => void; - readonly decodeChange: (a: number, b: number) => void; - readonly initSyncState: () => number; - readonly importSyncState: (a: number, b: number) => void; - readonly exportSyncState: (a: number) => number; - readonly encodeSyncMessage: (a: number, b: number) => void; - readonly decodeSyncMessage: (a: number, b: number) => void; - readonly encodeSyncState: (a: number, b: number) => void; - readonly decodeSyncState: (a: number, b: number) => void; - readonly __wbg_list_free: (a: number) => void; - readonly __wbg_map_free: (a: number) => void; - readonly __wbg_text_free: (a: number) => void; - readonly __wbg_table_free: (a: number) => void; - readonly __wbg_syncstate_free: (a: number) => void; - readonly syncstate_sharedHeads: (a: number) => number; - readonly syncstate_lastSentHeads: (a: number) => number; - readonly syncstate_set_lastSentHeads: (a: number, b: number, c: number) => void; - readonly syncstate_set_sentHashes: (a: number, b: number, c: number) => void; - readonly syncstate_clone: (a: number) => number; - readonly __wbindgen_malloc: (a: number) => number; - readonly __wbindgen_realloc: (a: number, b: number, c: number) => number; - readonly __wbindgen_add_to_stack_pointer: (a: number) => number; - readonly __wbindgen_free: (a: number, b: number) => void; - readonly __wbindgen_exn_store: (a: number) => void; -} - -/** -* If `module_or_path` is {RequestInfo} or {URL}, makes a request and -* for everything else, calls `WebAssembly.instantiate` directly. -* -* @param {InitInput | Promise} module_or_path -* -* @returns {Promise} -*/ - -export default function init (module_or_path?: InitInput | Promise): Promise; +//export default function init (module_or_path?: InitInput | Promise): Promise; +export default function init (): Promise<()>; diff --git a/automerge-wasm/node-index.js b/automerge-wasm/node-index.js new file mode 100644 index 00000000..07fadf71 --- /dev/null +++ b/automerge-wasm/node-index.js @@ -0,0 +1,5 @@ +let wasm = require("./bindgen") +module.exports = wasm +Object.defineProperty(module.exports, "__esModule", { value: true }); +module.exports.default = () => (new Promise(() => {})) + diff --git a/automerge-wasm/package.json b/automerge-wasm/package.json index 92eb79f8..c0304e26 100644 --- a/automerge-wasm/package.json +++ b/automerge-wasm/package.json @@ -6,23 +6,26 @@ ], "name": "automerge-wasm", "description": "wasm-bindgen bindings to the automerge rust implementation", - "version": "0.0.1", + "version": "0.0.19", "license": "MIT", "files": [ "README.md", - "LICENSE", "package.json", - "automerge_wasm_bg.wasm", - "automerge_wasm.js" + "index.d.ts", + "node/index.js", + "node/index_bg.wasm", + "web/index.js", + "web/index_bg.wasm" ], - "module": "./pkg/index.js", - "main": "./dev/index.js", + "types": "index.d.ts", + "module": "./web/index.js", + "main": "./node/index.js", "scripts": { - "build": "rimraf ./dev && wasm-pack build --target nodejs --dev --out-name index -d dev && cp index.d.ts dev", - "release": "rimraf ./dev && wasm-pack build --target nodejs --release --out-name index -d dev && cp index.d.ts dev", - "pkg": "rimraf ./pkg && wasm-pack build --target web --release --out-name index -d pkg && cp index.d.ts pkg && cd pkg && yarn pack && mv automerge-wasm*tgz ..", - "prof": "rimraf ./dev && wasm-pack build --target nodejs --profiling --out-name index -d dev", - "test": "yarn build && ts-mocha -p tsconfig.json --type-check --bail --full-trace test/*.ts" + "build": "rimraf ./node && wasm-pack build --target nodejs --dev --out-name bindgen -d node && cp node-index.js node/index.js", + "release-w": "rimraf ./web && wasm-pack build --target web --release --out-name index -d web", + "release-n": "rimraf ./node && wasm-pack build --target nodejs --release --out-name index -d node && cp node-index.js node/index.js", + "release": "yarn release-w && yarn release-n", + "test": "ts-mocha -p tsconfig.json --type-check --bail --full-trace test/*.ts" }, "dependencies": {}, "devDependencies": { diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts index f9ff6da3..151e7236 100644 --- a/automerge-wasm/test/test.ts +++ b/automerge-wasm/test/test.ts @@ -3,9 +3,8 @@ import { describe, it } from 'mocha'; import assert from 'assert' //@ts-ignore import { BloomFilter } from './helpers/sync' -import { create, loadDoc, SyncState, Automerge, encodeChange, decodeChange, initSyncState, decodeSyncMessage, decodeSyncState, encodeSyncState, encodeSyncMessage } from '../dev/index' -import { DecodedSyncMessage } from '../index'; -import { Hash } from '../dev/index'; +import init, { create, loadDoc, SyncState, Automerge, encodeChange, decodeChange, initSyncState, decodeSyncMessage, decodeSyncState, encodeSyncState, encodeSyncMessage } from '..' +import { DecodedSyncMessage, Hash } from '..'; function sync(a: Automerge, b: Automerge, aSyncState = initSyncState(), bSyncState = initSyncState()) { const MAX_ITER = 10 @@ -29,7 +28,11 @@ function sync(a: Automerge, b: Automerge, aSyncState = initSyncState(), bSyncSta describe('Automerge', () => { describe('basics', () => { - it('should init clone and free', () => { + it('default import init() should return a promise', () => { + assert(init() instanceof Promise) + }) + + it('should create, clone and free', () => { let doc1 = create() let doc2 = doc1.clone() doc1.free() From 83c08344e71f130d670b41be92a19f3d9f76ff90 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Wed, 16 Mar 2022 14:07:45 -0400 Subject: [PATCH 231/730] wip2 --- automerge-wasm/README.md | 44 +++++++++++++++++++++++++++++++++---- automerge-wasm/index.d.ts | 4 ++-- automerge-wasm/src/lib.rs | 4 +--- automerge-wasm/test/test.ts | 21 +++++++++--------- automerge/src/automerge.rs | 6 ++--- automerge/src/error.rs | 8 +++---- 6 files changed, 60 insertions(+), 27 deletions(-) diff --git a/automerge-wasm/README.md b/automerge-wasm/README.md index 527f0baa..a44df87a 100644 --- a/automerge-wasm/README.md +++ b/automerge-wasm/README.md @@ -4,7 +4,7 @@ This package is a low level interface to the [automerge rust](https://github.com ### Why CRDT? -// TODO +CRDT stands for Conflict Free Replicated Datatype. It is a datastructure that offers eventual consistency where multiple actors can write to the document independantly and then these edits can be automatically merged together into a coherent document that, as much as possible, preserves the inten of the different writers. This allows for novel masterless application design where different components need not have a central coordinating server when altering application state. ### Terminology @@ -55,8 +55,8 @@ These are sets without a datatype import { create } from "automerge-wasm" let doc = create() - doc.set("/", "prop1", 100) // int - doc.set("/", "prop2", 3.14) + doc.set("/", "prop1", 100) // int + doc.set("/", "prop2", 3.14) // f64 doc.set("/", "prop3", "hello world") doc.set("/", "prop4", new Date()) doc.set("/", "prop5", new Uint8Array([1,2,3])) @@ -146,12 +146,27 @@ Using the id directly is always faster (as it prevents the path to id conversion ### Text +Text is a specialized list type intended for modifying a text document. The primary way to interact with a text document is via the slice operation. + +``` + let doc = create() + let notes = doc.set_object("_root", "notes", "Hello world") + doc.splice(notes, 6, 5, "everyone") + assert.equal(doc.text(notes), "Hello everyone") + + // Non text can be inserted into a text document and will be represented with the unicode object replacement character + +``` + + ### Tables -Automerge's Table type is currently not implemented +Automerge's Table type is currently not implemented. ### Counters +### Transactions + ### Viewing Old Versions of the Document ### Forking and Merging @@ -182,4 +197,25 @@ Methods that create new documents will generate random actors automatically - if ``` ### Glossery: Object Id's + +Object Id's uniquly identify an object within a document. They are represented as strings in the format of `{counter}@{actor}`. The root object is a special case and can be referred to as `_root`. The counter in an ever increasing integer, starting at 1, that is always one higher than the highest counter seen in the document thus far. Object Id's do not change when the object is modified but they do if it is overwritten with a new object. + +``` + let doc = create("aabbcc") + let o1 = doc.set_object("_root", "o1", {}) + let o2 = doc.set_object("_root", "o2", {}) + doc.set(o1, "hello", "world") + + assert.deepEqual(doc.materialize("_root"), { "o1": { hello: "world" }, "o2": {} }) + assert.equal(o1, "1@aabbcc") + assert.equal(o2, "2@aabbcc") + + let o1v2 = doc.set_object("_root", "o1", {}) + + doc.set(o1, "a", "b") // modifying an overwritten object - does nothing + doc.set(o1v2, "x", "y") // modifying the new "o1" object + + assert.deepEqual(doc.materialize("_root"), { "o1": { x: "y" }, "o2": {} }) +``` + ### Glossery: Heads diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts index e90db501..ab8c0909 100644 --- a/automerge-wasm/index.d.ts +++ b/automerge-wasm/index.d.ts @@ -112,7 +112,7 @@ export class Automerge { materialize(obj?: ObjID): any; // transactions - commit(message?: string, time?: number): Heads; + commit(message?: string, time?: number): Hash; merge(other: Automerge): Heads; getActorId(): Actor; pendingOps(): number; @@ -142,7 +142,7 @@ export class Automerge { // memory management free(): void; - //clone(actor?: string): Automerge; + clone(actor?: string): Automerge; fork(actor?: string): Automerge; // dump internal state to console.log diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 54480197..3522aa4d 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -83,9 +83,7 @@ impl Automerge { commit_opts.set_time(time as i64); } let hash = self.0.commit_with(commit_opts); - let result = Array::new(); - result.push(&JsValue::from_str(&hex::encode(&hash.0))); - result.into() + JsValue::from_str(&hex::encode(&hash.0)) } pub fn merge(&mut self, other: &mut Automerge) -> Result { diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts index 151e7236..3e7ee923 100644 --- a/automerge-wasm/test/test.ts +++ b/automerge-wasm/test/test.ts @@ -189,17 +189,17 @@ describe('Automerge', () => { doc.put("_root", "foo","bar") doc.put("_root", "bip","bap") - let heads1 = doc.commit() + let hash1 = doc.commit() assert.deepEqual(doc.keys("_root"),["bip","foo"]) doc.delete("_root", "foo") doc.delete("_root", "baz") - let heads2 = doc.commit() + let hash2 = doc.commit() assert.deepEqual(doc.keys("_root"),["bip"]) - assert.deepEqual(doc.keys("_root", heads1),["bip", "foo"]) - assert.deepEqual(doc.keys("_root", heads2),["bip"]) + assert.deepEqual(doc.keys("_root", [hash1]),["bip", "foo"]) + assert.deepEqual(doc.keys("_root", [hash2]),["bip"]) doc.free() }) @@ -232,7 +232,6 @@ describe('Automerge', () => { let root = "_root"; let text = doc.putObject(root, "text", ""); - if (!text) throw new Error('should not be undefined') doc.splice(text, 0, 0, "hello ") doc.splice(text, 6, 0, ["w","o","r","l","d"]) doc.splice(text, 11, 0, ["!","?"]) @@ -294,15 +293,15 @@ describe('Automerge', () => { let doc = create() let text = doc.putObject("_root", "text", ""); doc.splice(text, 0, 0, "hello world"); - let heads1 = doc.commit(); + let hash1 = doc.commit(); doc.splice(text, 6, 0, "big bad "); - let heads2 = doc.commit(); + let hash2 = doc.commit(); assert.strictEqual(doc.text(text), "hello big bad world") assert.strictEqual(doc.length(text), 19) - assert.strictEqual(doc.text(text, heads1), "hello world") - assert.strictEqual(doc.length(text, heads1), 11) - assert.strictEqual(doc.text(text, heads2), "hello big bad world") - assert.strictEqual(doc.length(text, heads2), 19) + assert.strictEqual(doc.text(text, [ hash1 ]), "hello world") + assert.strictEqual(doc.length(text, [ hash1 ]), 11) + assert.strictEqual(doc.text(text, [ hash2 ]), "hello big bad world") + assert.strictEqual(doc.length(text, [ hash2 ]), 19) doc.free() }) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 8b524d75..91ca77bc 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -914,17 +914,17 @@ impl Automerge { } else { let n = s .find('@') - .ok_or_else(|| AutomergeError::InvalidOpId(s.to_owned()))?; + .ok_or_else(|| AutomergeError::InvalidObjIdFormat(s.to_owned()))?; let counter = s[0..n] .parse() - .map_err(|_| AutomergeError::InvalidOpId(s.to_owned()))?; + .map_err(|_| AutomergeError::InvalidObjIdFormat(s.to_owned()))?; let actor = ActorId::from(hex::decode(&s[(n + 1)..]).unwrap()); let actor = self .ops .m .actors .lookup(&actor) - .ok_or_else(|| AutomergeError::ForeignObjId(s.to_owned()))?; + .ok_or_else(|| AutomergeError::InvalidObjId(s.to_owned()))?; Ok(ExId::Id( counter, self.ops.m.actors.cache[actor].clone(), diff --git a/automerge/src/error.rs b/automerge/src/error.rs index aaf9b61d..31c97acd 100644 --- a/automerge/src/error.rs +++ b/automerge/src/error.rs @@ -5,10 +5,10 @@ use thiserror::Error; #[derive(Error, Debug)] pub enum AutomergeError { - #[error("invalid opid format `{0}`")] - InvalidOpId(String), - #[error("obj id not from this document `{0}`")] - ForeignObjId(String), + #[error("invalid obj id format `{0}`")] + InvalidObjIdFormat(String), + #[error("invalid obj id `{0}`")] + InvalidObjId(String), #[error("there was an encoding problem: {0}")] Encoding(#[from] encoding::Error), #[error("there was a decoding problem: {0}")] From 051a0bbb54d51966fbcbbcbdf39b099f42381f66 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Wed, 16 Mar 2022 18:34:30 -0400 Subject: [PATCH 232/730] early draft of the readme --- automerge-wasm/README.md | 122 ++++++++++++++++++++++++++++++++++----- 1 file changed, 109 insertions(+), 13 deletions(-) diff --git a/automerge-wasm/README.md b/automerge-wasm/README.md index a44df87a..ca9f7a6d 100644 --- a/automerge-wasm/README.md +++ b/automerge-wasm/README.md @@ -22,18 +22,17 @@ This is a rust/wasm package and will work in a node or web environment. Node is This creates a document in node. The memory allocated is handled by wasm and isn't managed by the javascript garbage collector and thus needs to be manually freed. -``` +```javascript import { create } from "automerge-wasm" let doc = create() doc.free() - ``` While this will work in both node and in a web context -``` +```javascript import init, { create } from "automerge-wasm" init().then(_ => { @@ -51,7 +50,7 @@ Automerge has many scalar types. Methods like `set()` and `insert()` take an op These are sets without a datatype -``` +```javascript import { create } from "automerge-wasm" let doc = create() @@ -69,7 +68,7 @@ Sets with a datatype and examples of all the supported datatypes. While int vs uint vs f64 matters little in javascript, Automerge is a cross platform library where these distinctions matter. -``` +```javascript import { create } from "automerge-wasm" let doc = create() @@ -89,7 +88,7 @@ While int vs uint vs f64 matters little in javascript, Automerge is a cross plat Automerge WASM supports 3 object types. Maps, lists, and text. Maps are key value stores where the values can be any scalar type or any object type. Lists are numerically indexed set of data that can hold any scalar or any object type. Text is numerically indexed sets of graphmeme clusters. -``` +```javascript import { create } from "automerge-wasm" let doc = create() @@ -115,7 +114,7 @@ Automerge WASM supports 3 object types. Maps, lists, and text. Maps are key va You can access objects by passing the object id as the first parameter for a call. -``` +```javascript import { create } from "automerge-wasm" let doc = create() @@ -127,7 +126,7 @@ You can access objects by passing the object id as the first parameter for a cal Anywhere Object Id's are being used a path can also be used. The following two statements are equivelent: -``` +```javascript // get the id then use it let id = doc.value("/", "config")[1] @@ -142,20 +141,60 @@ Using the id directly is always faster (as it prevents the path to id conversion ### Maps +Maps are key/value store. The root object is always a map. The keys are always strings. The values can be any scalar type or any object. + +```javascript + let doc = create() + let mymap = doc.set_object("_root", "mymap", { foo: "bar"}) + // make a new map with the foo key + + doc.set(mymap, "bytes", new Uint8Array([1,2,3])) + // assign a byte array to key `bytes` of the mymap object + + let submap = doc.set_object(mymap, "sub", {}) + // make a new empty object and assign it to the key `sub` of mymap + + doc.keys(mymap) // returns ["bytes","foo","sub"] + doc.materialize("_root") // returns { mymap: { bytes: new Uint8Array([1,2,3]), foo: "bar", sub: {} } + doc.free() +``` + ### Lists +Lists are index addressable sets of values. These values can be any scalar or object type. You can manipulate lists with with `insert()`, `set()`, `push()`, `splice()`, and `del()`. + +```javascript + let doc = create() + let items = doc.set_object("_root", "items", [10,"box"]) + // init a new list with two elements + doc.push(items, true) // push `true` to the end of the list + doc.set_object(items, 0, { hello: "world" }) // overwrite the value 10 with an object with a key and value + doc.del(items, 1) // delete "box" + doc.splice(items, 2, 0, ["bag", "brick"]) // splice in "bag" and "brick" at position 2 + doc.insert(items, 0, "bat") // insert "bat" to the beginning of the list + + doc.materialize(items) // returns [ "bat", { hello : "world" }, true, "bag", "brick"] + doc.length(items) // returns 5 + doc.free() +``` + ### Text -Text is a specialized list type intended for modifying a text document. The primary way to interact with a text document is via the slice operation. +Text is a specialized list type intended for modifying a text document. The primary way to interact with a text document is via the slice operation. Non text can be inserted into a text document and will be represented with the unicode object replacement character. -``` - let doc = create() +```javascript + let doc = create("aaaaaa") let notes = doc.set_object("_root", "notes", "Hello world") doc.splice(notes, 6, 5, "everyone") - assert.equal(doc.text(notes), "Hello everyone") - // Non text can be inserted into a text document and will be represented with the unicode object replacement character + doc.text(notes) // returns "Hello everyone" + let obj = doc.insert_object(text, 6, { hi: "there" }); + + doc.text(text) // returns "Hello \ufffceveryone" + doc.value(text, 6) // returns ["map", obj] + doc.value(obj, "hi") // returns ["str", "there"] + doc.free() ``` @@ -163,12 +202,60 @@ Text is a specialized list type intended for modifying a text document. The pri Automerge's Table type is currently not implemented. +### Querying Data + +When querying maps use the `value()` method with the object in question and the property to query. This method returns a tuple with the datatype and the data. The `keys()` method will return all the keys on the object. If you are interested in conflicted values from a merge use `values()` instead which returns an array of values instead of just the winner. + +```javascript + let doc1 = create("aabbcc") + doc1.set("_root", "key1", "val1") + let key2 = doc1.set_object("_root", "key2", []) + + doc1.value("_root", "key1") // returns ["str", "val1"] + doc1.value("_root", "key1") // returns ["list", "2@aabbcc"] + doc1.keys("_root") // returns ["key1", "key2"] + + let doc2 = doc1.fork("ffaaff") + + // set a value concurrently + doc1.set("_root","key3","doc1val") + doc2.set("_root","key3","doc2val") + + doc1.merge(doc2) + + doc1.value("_root","key3") // returns ["str", "doc2val"] + doc1.values("_root","key3") // returns [[ "str", "doc1val"], ["str", "doc2val"]] + doc1.free(); doc2.free() +``` + ### Counters +// TODO + ### Transactions +// TODO + ### Viewing Old Versions of the Document +All query functions can take a optional argument of `heads` in which case you are query the document state. Heads is a set of change hashes that uniquly identifies a point in the document history. The `getHeads()` method can retrieve these at any point. + +```javascript + let doc = create() + doc.set("_root", "key", "val1") + let heads1 = doc.getHeads() + doc.set("_root", "key", "val2") + let heads2 = doc.getHeads() + doc.set("_root", "key", "val3") + + doc.value("_root","key") // returns ["str","val3"] + doc.value("_root","key",heads2) // returns ["str","val2"] + doc.value("_root","key",heads1) // returns ["str","val1"] + doc.value("_root","key",[]) // returns null +``` + +This works for `value()`, `values()`, `keys()`, `length()`, `text()`, and `materialize()` + ### Forking and Merging ### Saving and Loading @@ -194,6 +281,8 @@ Methods that create new documents will generate random actors automatically - if let doc6 = loadDoc(doc4.save(), "00aabb11") let actor = doc1.getActor() + + doc1.free(); doc2.free(); doc3.free(); doc4.free(); doc5.free(); doc6.free() ``` ### Glossery: Object Id's @@ -216,6 +305,13 @@ Object Id's uniquly identify an object within a document. They are represented doc.set(o1v2, "x", "y") // modifying the new "o1" object assert.deepEqual(doc.materialize("_root"), { "o1": { x: "y" }, "o2": {} }) + ``` ### Glossery: Heads + +// FIXME +loadDoc() +forkAt() +set_object() -> setObject() +materialize(heads) From 3737ad316b224e45182d28b176541e742c9dbe82 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Wed, 16 Mar 2022 18:42:55 -0400 Subject: [PATCH 233/730] spelling --- automerge-wasm/README.md | 34 +++++++++++++++++----------------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/automerge-wasm/README.md b/automerge-wasm/README.md index ca9f7a6d..8506701a 100644 --- a/automerge-wasm/README.md +++ b/automerge-wasm/README.md @@ -1,10 +1,10 @@ ## Automerge WASM Low Level Interface -This package is a low level interface to the [automerge rust](https://github.com/automerge/automerge-rs/tree/experiment) CRDT. The api is intended to be a "close to the metal" as possible only a few ease of use accomodations. This library is used as the underpinnings for the [Automerge JS wrapper](https://github.com/automerge/automerge-rs/tree/experiment/automerge-js) and can be used as is or as a basis for another higher level expression of a CRDT. +This package is a low level interface to the [automerge rust](https://github.com/automerge/automerge-rs/tree/experiment) CRDT. The api is intended to be as "close to the metal" as possible with only a few ease of use accommodations. This library is used as the underpinnings for the [Automerge JS wrapper](https://github.com/automerge/automerge-rs/tree/experiment/automerge-js) and can be used as is or as a basis for another higher level expression of a CRDT. ### Why CRDT? -CRDT stands for Conflict Free Replicated Datatype. It is a datastructure that offers eventual consistency where multiple actors can write to the document independantly and then these edits can be automatically merged together into a coherent document that, as much as possible, preserves the inten of the different writers. This allows for novel masterless application design where different components need not have a central coordinating server when altering application state. +CRDT stands for Conflict Free Replicated Data Type. It is a data structure that offers eventual consistency where multiple actors can write to the document independently and then these edits can be automatically merged together into a coherent document that, as much as possible, preserves the intent of the different writers. This allows for novel masterless application design where different components need not have a central coordinating server when altering application state. ### Terminology @@ -12,13 +12,13 @@ The term Actor, Object Id and Heads are used through this documentation. Detail An Actor is a unique id that distinguishes a single writer to a document. It can be any hex string. -An Object id uniquely identifies a Map, List or Text object within a document. This id comes as a string in the form on `{number}@{actor}` - so `"10@aabbcc"` for example. The string `"_root"` or `"/"` can also be used to refer to the document root. These strings are durable and can be used on any decendant or copy of the document that generated them. +An Object id uniquely identifies a Map, List or Text object within a document. This id comes as a string in the form of `{number}@{actor}` - so `"10@aabbcc"` for example. The string `"_root"` or `"/"` can also be used to refer to the document root. These strings are durable and can be used on any descendant or copy of the document that generated them. -Heads refers to a set of hashes that uniquly identifies a point in time in a documents history. Heads are useful for comparing documents state or retrieving past states from the document. +Heads refers to a set of hashes that uniquely identifies a point in time in a document's history. Heads are useful for comparing documents state or retrieving past states from the document. ### Using the Library and Creating a Document -This is a rust/wasm package and will work in a node or web environment. Node is able to load wasm syncronously but a web environment is not. The default import of the package is a function that returns a promise that resolves once the wasm is loaded. +This is a rust/wasm package and will work in a node or web environment. Node is able to load wasm synchronously but a web environment is not. The default import of the package is a function that returns a promise that resolves once the wasm is loaded. This creates a document in node. The memory allocated is handled by wasm and isn't managed by the javascript garbage collector and thus needs to be manually freed. @@ -46,9 +46,9 @@ The examples below will assume a node context for brevity. ### Automerge Scalar Types -Automerge has many scalar types. Methods like `set()` and `insert()` take an optional datatype parameter. Normally the type can be inferred but in some cases, such as telling the difference between int, uint and a counter, it cannot. +Automerge has many scalar types. Methods like `set()` and `insert()` take an optional data type parameter. Normally the type can be inferred but in some cases, such as telling the difference between int, uint and a counter, it cannot. -These are sets without a datatype +These are sets without a data type ```javascript import { create } from "automerge-wasm" @@ -64,7 +64,7 @@ These are sets without a datatype doc.free() ``` -Sets with a datatype and examples of all the supported datatypes. +Sets with a data type and examples of all the supported data types. While int vs uint vs f64 matters little in javascript, Automerge is a cross platform library where these distinctions matter. @@ -86,7 +86,7 @@ While int vs uint vs f64 matters little in javascript, Automerge is a cross plat ### Automerge Object Types -Automerge WASM supports 3 object types. Maps, lists, and text. Maps are key value stores where the values can be any scalar type or any object type. Lists are numerically indexed set of data that can hold any scalar or any object type. Text is numerically indexed sets of graphmeme clusters. +Automerge WASM supports 3 object types. Maps, lists, and text. Maps are key value stores where the values can be any scalar type or any object type. Lists are numerically indexed sets of data that can hold any scalar or any object type. Text is numerically indexed sets of grapheme clusters. ```javascript import { create } from "automerge-wasm" @@ -124,7 +124,7 @@ You can access objects by passing the object id as the first parameter for a cal doc.set(config, "align", "right") ``` -Anywhere Object Id's are being used a path can also be used. The following two statements are equivelent: +Anywhere Object Ids are being used a path can also be used. The following two statements are equivalent: ```javascript // get the id then use it @@ -141,7 +141,7 @@ Using the id directly is always faster (as it prevents the path to id conversion ### Maps -Maps are key/value store. The root object is always a map. The keys are always strings. The values can be any scalar type or any object. +Maps are key/value stores. The root object is always a map. The keys are always strings. The values can be any scalar type or any object. ```javascript let doc = create() @@ -161,7 +161,7 @@ Maps are key/value store. The root object is always a map. The keys are always ### Lists -Lists are index addressable sets of values. These values can be any scalar or object type. You can manipulate lists with with `insert()`, `set()`, `push()`, `splice()`, and `del()`. +Lists are index addressable sets of values. These values can be any scalar or object type. You can manipulate lists with `insert()`, `set()`, `push()`, `splice()`, and `del()`. ```javascript let doc = create() @@ -204,7 +204,7 @@ Automerge's Table type is currently not implemented. ### Querying Data -When querying maps use the `value()` method with the object in question and the property to query. This method returns a tuple with the datatype and the data. The `keys()` method will return all the keys on the object. If you are interested in conflicted values from a merge use `values()` instead which returns an array of values instead of just the winner. +When querying maps use the `value()` method with the object in question and the property to query. This method returns a tuple with the data type and the data. The `keys()` method will return all the keys on the object. If you are interested in conflicted values from a merge use `values()` instead which returns an array of values instead of just the winner. ```javascript let doc1 = create("aabbcc") @@ -238,7 +238,7 @@ When querying maps use the `value()` method with the object in question and the ### Viewing Old Versions of the Document -All query functions can take a optional argument of `heads` in which case you are query the document state. Heads is a set of change hashes that uniquly identifies a point in the document history. The `getHeads()` method can retrieve these at any point. +All query functions can take an optional argument of `heads` which allow you to query a prior document state. Heads are a set of change hashes that uniquly identify a point in the document history. The `getHeads()` method can retrieve these at any point. ```javascript let doc = create() @@ -262,7 +262,7 @@ This works for `value()`, `values()`, `keys()`, `length()`, `text()`, and `mater ### Syncing -### Glossery: Actors +### Glossary: Actors Some basic concepts you will need to know to better understand the api are Actors and Object Ids. @@ -285,7 +285,7 @@ Methods that create new documents will generate random actors automatically - if doc1.free(); doc2.free(); doc3.free(); doc4.free(); doc5.free(); doc6.free() ``` -### Glossery: Object Id's +### Glossary: Object Id's Object Id's uniquly identify an object within a document. They are represented as strings in the format of `{counter}@{actor}`. The root object is a special case and can be referred to as `_root`. The counter in an ever increasing integer, starting at 1, that is always one higher than the highest counter seen in the document thus far. Object Id's do not change when the object is modified but they do if it is overwritten with a new object. @@ -308,7 +308,7 @@ Object Id's uniquly identify an object within a document. They are represented ``` -### Glossery: Heads +### Glossary: Heads // FIXME loadDoc() From 4edb034a64d5b43e26527a7ed412592cbfd0e1d1 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Thu, 17 Mar 2022 12:50:31 -0400 Subject: [PATCH 234/730] adding readme tests --- automerge-js/package.json | 2 +- automerge-wasm/README.md | 317 ++++++++++++++++++++++++++-------- automerge-wasm/index.d.ts | 2 +- automerge-wasm/node-index.js | 2 +- automerge-wasm/src/interop.rs | 57 +++++- automerge-wasm/src/lib.rs | 40 +++-- automerge-wasm/test/readme.ts | 281 ++++++++++++++++++++++++++++++ automerge-wasm/test/test.ts | 2 + 8 files changed, 614 insertions(+), 89 deletions(-) create mode 100644 automerge-wasm/test/readme.ts diff --git a/automerge-js/package.json b/automerge-js/package.json index 8742d99a..17018429 100644 --- a/automerge-js/package.json +++ b/automerge-js/package.json @@ -10,7 +10,7 @@ "mocha": "^9.1.1" }, "dependencies": { - "automerge-wasm": "file:../automerge-wasm/dev", + "automerge-wasm": "file:../automerge-wasm", "fast-sha256": "^1.3.0", "pako": "^2.0.4", "uuid": "^8.3" diff --git a/automerge-wasm/README.md b/automerge-wasm/README.md index 8506701a..fcf61c2f 100644 --- a/automerge-wasm/README.md +++ b/automerge-wasm/README.md @@ -2,6 +2,8 @@ This package is a low level interface to the [automerge rust](https://github.com/automerge/automerge-rs/tree/experiment) CRDT. The api is intended to be as "close to the metal" as possible with only a few ease of use accommodations. This library is used as the underpinnings for the [Automerge JS wrapper](https://github.com/automerge/automerge-rs/tree/experiment/automerge-js) and can be used as is or as a basis for another higher level expression of a CRDT. +All example code can be found in `test/readme.ts` + ### Why CRDT? CRDT stands for Conflict Free Replicated Data Type. It is a data structure that offers eventual consistency where multiple actors can write to the document independently and then these edits can be automatically merged together into a coherent document that, as much as possible, preserves the intent of the different writers. This allows for novel masterless application design where different components need not have a central coordinating server when altering application state. @@ -46,21 +48,21 @@ The examples below will assume a node context for brevity. ### Automerge Scalar Types -Automerge has many scalar types. Methods like `set()` and `insert()` take an optional data type parameter. Normally the type can be inferred but in some cases, such as telling the difference between int, uint and a counter, it cannot. +Automerge has many scalar types. Methods like `put()` and `insert()` take an optional data type parameter. Normally the type can be inferred but in some cases, such as telling the difference between int, uint and a counter, it cannot. -These are sets without a data type +These are puts without a data type ```javascript import { create } from "automerge-wasm" let doc = create() - doc.set("/", "prop1", 100) // int - doc.set("/", "prop2", 3.14) // f64 - doc.set("/", "prop3", "hello world") - doc.set("/", "prop4", new Date()) - doc.set("/", "prop5", new Uint8Array([1,2,3])) - doc.set("/", "prop6", true) - doc.set("/", "prop7", null) + doc.put("/", "prop1", 100) // int + doc.put("/", "prop2", 3.14) // f64 + doc.put("/", "prop3", "hello world") + doc.put("/", "prop4", new Date()) + doc.put("/", "prop5", new Uint8Array([1,2,3])) + doc.put("/", "prop6", true) + doc.put("/", "prop7", null) doc.free() ``` @@ -72,15 +74,16 @@ While int vs uint vs f64 matters little in javascript, Automerge is a cross plat import { create } from "automerge-wasm" let doc = create() - doc.set("/", "prop1", 100, "int") - doc.set("/", "prop2", 100, "uint") - doc.set("/", "prop3", 100.5, "f64") - doc.set("/", "prop4", 100, "counter") - doc.set("/", "prop5", new Date(), "timestamp") - doc.set("/", "prop6", "hello world", "str") - doc.set("/", "prop7", new Uint8Array([1,2,3]), "bytes") - doc.set("/", "prop8", true, "boolean") - doc.set("/", "prop9", null, "null") + doc.put("/", "prop1", 100, "int") + doc.put("/", "prop2", 100, "uint") + doc.put("/", "prop3", 100.5, "f64") + doc.put("/", "prop4", 100, "counter") + doc.put("/", "prop5", 1647531707301, "timestamp") + doc.put("/", "prop6", new Date(), "timestamp") + doc.put("/", "prop7", "hello world", "str") + doc.put("/", "prop8", new Uint8Array([1,2,3]), "bytes") + doc.put("/", "prop9", true, "boolean") + doc.put("/", "prop10", null, "null") doc.free() ``` @@ -97,17 +100,17 @@ Automerge WASM supports 3 object types. Maps, lists, and text. Maps are key va // the return value is the Object Id // these functions all return an object id - let config = doc.set_object("/", "config", { align: "left", archived: false, cycles: [10, 19, 21] }) - let token = doc.set_object("/", "tokens", {}) + let config = doc.putObject("/", "config", { align: "left", archived: false, cycles: [10, 19, 21] }) + let token = doc.putObject("/", "tokens", {}) // lists can be made with javascript arrays - let birds = doc.set_object("/", "birds", ["bluejay", "penguin", "puffin"]) - let bots = doc.set_object("/", "bots", []) + let birds = doc.putObject("/", "birds", ["bluejay", "penguin", "puffin"]) + let bots = doc.putObject("/", "bots", []) // text is initialized with a string - let notes = doc.set_object("/", "notes", "Hello world!") + let notes = doc.putObject("/", "notes", "Hello world!") doc.free() ``` @@ -119,22 +122,25 @@ You can access objects by passing the object id as the first parameter for a cal let doc = create() - let config = doc.set_object("/", "config", { align: "left", archived: false, cycles: [10, 19, 21] }) + let config = doc.putObject("/", "config", { align: "left", archived: false, cycles: [10, 19, 21] }) - doc.set(config, "align", "right") -``` + doc.put(config, "align", "right") -Anywhere Object Ids are being used a path can also be used. The following two statements are equivalent: + // Anywhere Object Ids are being used a path can also be used. + // The following two statements are equivalent: -```javascript // get the id then use it let id = doc.value("/", "config")[1] - doc.set(id, "align", "right") + if (id && id[0] === 'map') { + doc.put(id[1], "align", "right") + } // use a path instead - doc.set("/config", "align", "right") + doc.put("/config", "align", "right") + + doc.free() ``` Using the id directly is always faster (as it prevents the path to id conversion internally) so it is preferred for performance critical code. @@ -145,36 +151,37 @@ Maps are key/value stores. The root object is always a map. The keys are alway ```javascript let doc = create() - let mymap = doc.set_object("_root", "mymap", { foo: "bar"}) + let mymap = doc.putObject("_root", "mymap", { foo: "bar"}) // make a new map with the foo key - doc.set(mymap, "bytes", new Uint8Array([1,2,3])) + doc.put(mymap, "bytes", new Uint8Array([1,2,3])) // assign a byte array to key `bytes` of the mymap object - let submap = doc.set_object(mymap, "sub", {}) + let submap = doc.putObject(mymap, "sub", {}) // make a new empty object and assign it to the key `sub` of mymap doc.keys(mymap) // returns ["bytes","foo","sub"] - doc.materialize("_root") // returns { mymap: { bytes: new Uint8Array([1,2,3]), foo: "bar", sub: {} } + doc.materialize("_root") // returns { mymap: { bytes: new Uint8Array([1,2,3]), foo: "bar", sub: {}}} doc.free() ``` ### Lists -Lists are index addressable sets of values. These values can be any scalar or object type. You can manipulate lists with `insert()`, `set()`, `push()`, `splice()`, and `del()`. +Lists are index addressable sets of values. These values can be any scalar or object type. You can manipulate lists with `insert()`, `put()`, `insertObject()`, `pubObject()`, `push()`, `pushObject()`, `splice()`, and `del()`. ```javascript let doc = create() - let items = doc.set_object("_root", "items", [10,"box"]) + let items = doc.putObject("_root", "items", [10,"box"]) // init a new list with two elements doc.push(items, true) // push `true` to the end of the list - doc.set_object(items, 0, { hello: "world" }) // overwrite the value 10 with an object with a key and value + doc.putObject(items, 0, { hello: "world" }) // overwrite the value 10 with an object with a key and value doc.del(items, 1) // delete "box" doc.splice(items, 2, 0, ["bag", "brick"]) // splice in "bag" and "brick" at position 2 doc.insert(items, 0, "bat") // insert "bat" to the beginning of the list + doc.insertObject(items, 1, [1,2]) // insert a list with 2 values at pos 1 - doc.materialize(items) // returns [ "bat", { hello : "world" }, true, "bag", "brick"] - doc.length(items) // returns 5 + doc.materialize(items) // returns [ "bat", [1,2], { hello : "world" }, true, "bag", "brick"] + doc.length(items) // returns 6 doc.free() ``` @@ -184,20 +191,19 @@ Text is a specialized list type intended for modifying a text document. The pri ```javascript let doc = create("aaaaaa") - let notes = doc.set_object("_root", "notes", "Hello world") + let notes = doc.putObject("_root", "notes", "Hello world") doc.splice(notes, 6, 5, "everyone") doc.text(notes) // returns "Hello everyone" - let obj = doc.insert_object(text, 6, { hi: "there" }); + let obj = doc.insert_object(notes, 6, { hi: "there" }) - doc.text(text) // returns "Hello \ufffceveryone" - doc.value(text, 6) // returns ["map", obj] + doc.text(notes) // returns "Hello \ufffceveryone" + doc.value(notes, 6) // returns ["map", obj] doc.value(obj, "hi") // returns ["str", "there"] doc.free() ``` - ### Tables Automerge's Table type is currently not implemented. @@ -208,60 +214,221 @@ When querying maps use the `value()` method with the object in question and the ```javascript let doc1 = create("aabbcc") - doc1.set("_root", "key1", "val1") - let key2 = doc1.set_object("_root", "key2", []) + doc1.put("_root", "key1", "val1") + let key2 = doc1.putObject("_root", "key2", []) doc1.value("_root", "key1") // returns ["str", "val1"] - doc1.value("_root", "key1") // returns ["list", "2@aabbcc"] + doc1.value("_root", "key2") // returns ["list", "2@aabbcc"] doc1.keys("_root") // returns ["key1", "key2"] let doc2 = doc1.fork("ffaaff") - - // set a value concurrently - doc1.set("_root","key3","doc1val") - doc2.set("_root","key3","doc2val") + + // put a value concurrently + doc1.put("_root","key3","doc1val") + doc2.put("_root","key3","doc2val") doc1.merge(doc2) doc1.value("_root","key3") // returns ["str", "doc2val"] doc1.values("_root","key3") // returns [[ "str", "doc1val"], ["str", "doc2val"]] - doc1.free(); doc2.free() + doc1.free(); doc2.free() ``` ### Counters -// TODO +Counters are 64 bit ints that support the increment operation. Frequently different actords will want to increment or decrement a number and have all these coalesse into a merged value. + +``` + let doc1 = create("aaaaaa") + doc1.put("_root", "number", 0) + doc1.put("_root", "total", 0, "counter") + + let doc2 = doc1.fork("bbbbbb") + doc2.put("_root", "number", 10) + doc2.inc("_root", "total", 11) + + doc1.put("_root", "number", 20) + doc1.inc("_root", "total", 22) + + doc1.merge(doc2) + + doc1.materialize("_root") // returns { number: 10, total: 33 } + + doc1.free(); doc2.free() +``` ### Transactions -// TODO +Generally speaking you don't need to think about transaction when using automerge. Normal edits queue up into an in progress trasnaction. You can query the number of ops in the current transaction with `pendingOps()`. The transaction will commit automatically on certains calls such as `save()`, `saveIncremental()`, `fork()`, `merge()`, `getHeads()`, `applyChanges()`, `generateSyncMessage()`, and `receiveSyncMessage()`. When the transaction commits the heads of the document change. If you want to roll back all the in progress ops you can call `doc.rollback()`. If you want to manually commit a transaction in progress you can call`doc.commit()` with an optional commit message and timestamp. + +```javascript + let doc = create() + + doc.put("_root", "key", "val1") + + doc.value("_root", "key") // returns ["str","val1"] + doc.pendingOps() // returns 1 + + doc.rollback() + + doc.value("_root", "key") // returns null + doc.pendingOps() // returns 0 + + doc.put("_root", "key", "val2") + + doc.pendingOps() // returns 1 + + doc.commit("test commit 1") + + doc.value("_root", "key") // returns ["str","val2"] + doc.pendingOps() // returns 0 + + doc.free() +``` ### Viewing Old Versions of the Document All query functions can take an optional argument of `heads` which allow you to query a prior document state. Heads are a set of change hashes that uniquly identify a point in the document history. The `getHeads()` method can retrieve these at any point. ```javascript - let doc = create() - doc.set("_root", "key", "val1") + let doc = create() + + doc.put("_root", "key", "val1") let heads1 = doc.getHeads() - doc.set("_root", "key", "val2") + + doc.put("_root", "key", "val2") let heads2 = doc.getHeads() - doc.set("_root", "key", "val3") + + doc.put("_root", "key", "val3") doc.value("_root","key") // returns ["str","val3"] doc.value("_root","key",heads2) // returns ["str","val2"] doc.value("_root","key",heads1) // returns ["str","val1"] doc.value("_root","key",[]) // returns null + + doc.free() ``` This works for `value()`, `values()`, `keys()`, `length()`, `text()`, and `materialize()` +Queries of old document states are not indexed internally and will be slower than normal access. If you need a fast indexed version of a document at a previous point in time you can create one with `doc.forkAt(heads, actor?)` + ### Forking and Merging +You can `fork()` a document which makes an exact copy of it. This assigns a new actor so changes made to the fork can be merged back in with the origional. The `forkAt()` takes a heads allowing you to fork off a document from a previous point in its history. These documents allocate new memory in WASM and need to be freed. + +The `merge()` command applys all changes in the argument doc into the calling doc. Therefore if doc a has 1000 changes that doc b lacks and doc b has only 10 changes that doc a lacks, `a.merge(b)` will be much faster than `b.merge(a)`. + +```javascript + let doc1 = create() + doc1.put("_root", "key1", "val1") + + let doc2 = doc1.fork() + + doc1.put("_root", "key2", "val2") + doc2.put("_root", "key3", "val3") + + doc1.merge(doc2) + + doc1.materialize("_root") // returns { key1: "val1", key2: "val2", key3: "val3" } + doc2.materialize("_root") // returns { key1: "val1", key3: "val3" } + + doc1.free(); doc2.free() +``` + +Note that calling `a.merge(a)` will produce an unrecoverable error from the wasm-bindgen layer which (as of this writing) there is no workaround for. + ### Saving and Loading +Calling `save()` converts the document to a compressed `Uint8Array()` that can be saved to durable storage. This format uses a columnar storage format that compresses away most of the automerge metadata needed to manage the CRDT state, but does include all of the change history. + +If you wish to incrementally update a saved automerge doc you can call `saveIncremental()` to get a `Uint8Array()` of bytes that can be appended to the file with all the new changes(). Note that the `saveIncremental()` bytes are not as compressed the whole document save as each chunk has metadata information needed to parse it. It may make sense to periodically perform a new `save()` to get the smallest possible file footprint. + +The `load()` function takes a `Uint8Array()` of bytes produced in this way and constitutes a new document. The `loadIncremental()` method is availble if you wish to consume the result of a `saveIncremental()` with a already instanciated document. + +```javascript + import { create, load } from "automerge-wasm" + + let doc1 = create() + + doc1.put("_root", "key1", "value1") + + let save1 = doc1.save() + + let doc2 = load(save1) + + doc2.materialize("_root") // returns { key1: "value1" } + + doc1.put("_root", "key2", "value2") + + let saveIncremental = doc1.saveIncremental() + + let save2 = doc1.save() + + let save3 = new Uint8Array([... save1, ... saveIncremental]) + + // save2 has fewer bytes than save3 but contains the same ops + + doc2.loadIncremental(saveIncremental) + + let doc3 = load(save2) + + let doc4 = load(save3) + + doc1.materialize("_root") // returns { key1: "value1", key2: "value2" } + doc2.materialize("_root") // returns { key1: "value1", key2: "value2" } + doc3.materialize("_root") // returns { key1: "value1", key2: "value2" } + doc4.materialize("_root") // returns { key1: "value1", key2: "value2" } + + doc1.free(); doc2.free(); doc3.free(); doc4.free() +``` + ### Syncing +When syncing a document the `generateSyncMessage()` and `receiveSyncMessage()` methods will produce and consume sync messages. A sync state object will need to be managed for the duration of the connection (created by the function `initSyncState()` and can be serialized to a Uint8Array() to preserve sync state with the `encodeSyncState()` and `decodeSyncState()` functions. + +A very simple sync implementation might look like this. + +```javascript + import { encodeSyncState, decodeSyncState, initSyncState } from "automerge-wasm" + + let states = {} + + function receiveMessageFromPeer(doc, peer_id, message) { + let syncState = states[peer_id] + doc.receiveMessage(syncState, message) + let reply = doc.generateSyncMessage(syncState) + if (reply) { + sendMessage(peer_id, reply) + } + } + + function notifyPeerAboutUpdates(doc, peer_id) { + let syncState = states[peer_id] + let message = doc.generateSyncMessage(syncState) + if (message) { + sendMessage(peer_id, message) + } + } + + function onDisconnect(peer_id) { + let state = states[peer_id] + if (state) { + saveSyncToStorage(peer_id, encodeSyncState(state)) + } + delete states[peer_id] + } + + function onConnect(peer_id) { + let state = loadSyncFromStorage(peer_id) + if (state) { + states[peer_id] = decodeSyncState(state) + } else { + states[peer_id] = initSyncState() + } + } +``` + ### Glossary: Actors Some basic concepts you will need to know to better understand the api are Actors and Object Ids. @@ -271,14 +438,14 @@ Actors are ids that need to be unique to each process writing to a document. Th Methods that create new documents will generate random actors automatically - if you wish to supply your own it is always taken as an optional argument. This is true for the following functions. ``` - import { create, loadDoc } from "automerge-wasm" + import { create, load } from "automerge-wasm" let doc1 = create() // random actorid let doc2 = create("aabbccdd") let doc3 = doc1.fork() // random actorid - let doc4 = doc2.for("ccdd0011") - let doc5 = loadDoc(doc3.save()) // random actorid - let doc6 = loadDoc(doc4.save(), "00aabb11") + let doc4 = doc2.fork("ccdd0011") + let doc5 = load(doc3.save()) // random actorid + let doc6 = load(doc4.save(), "00aabb11") let actor = doc1.getActor() @@ -291,27 +458,27 @@ Object Id's uniquly identify an object within a document. They are represented ``` let doc = create("aabbcc") - let o1 = doc.set_object("_root", "o1", {}) - let o2 = doc.set_object("_root", "o2", {}) - doc.set(o1, "hello", "world") + let o1 = doc.putObject("_root", "o1", {}) + let o2 = doc.putObject("_root", "o2", {}) + doc.put(o1, "hello", "world") assert.deepEqual(doc.materialize("_root"), { "o1": { hello: "world" }, "o2": {} }) assert.equal(o1, "1@aabbcc") assert.equal(o2, "2@aabbcc") - let o1v2 = doc.set_object("_root", "o1", {}) + let o1v2 = doc.putObject("_root", "o1", {}) - doc.set(o1, "a", "b") // modifying an overwritten object - does nothing - doc.set(o1v2, "x", "y") // modifying the new "o1" object + doc.put(o1, "a", "b") // modifying an overwritten object - does nothing + doc.put(o1v2, "x", "y") // modifying the new "o1" object assert.deepEqual(doc.materialize("_root"), { "o1": { x: "y" }, "o2": {} }) + doc.free() ``` -### Glossary: Heads - -// FIXME -loadDoc() -forkAt() -set_object() -> setObject() -materialize(heads) +// TODO: +can I rename loadDoc() to load()??? +implement forkAt() +port sync example to typescript +what about a doc.equals() +remove the opid from values diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts index ab8c0909..764c4fa4 100644 --- a/automerge-wasm/index.d.ts +++ b/automerge-wasm/index.d.ts @@ -109,7 +109,7 @@ export class Automerge { keys(obj: ObjID, heads?: Heads): string[]; text(obj: ObjID, heads?: Heads): string; length(obj: ObjID, heads?: Heads): number; - materialize(obj?: ObjID): any; + materialize(obj?: ObjID, heas?: Heads): any; // transactions commit(message?: string, time?: number): Hash; diff --git a/automerge-wasm/node-index.js b/automerge-wasm/node-index.js index 07fadf71..30a5c54a 100644 --- a/automerge-wasm/node-index.js +++ b/automerge-wasm/node-index.js @@ -1,5 +1,5 @@ let wasm = require("./bindgen") module.exports = wasm Object.defineProperty(module.exports, "__esModule", { value: true }); -module.exports.default = () => (new Promise(() => {})) +module.exports.default = () => (new Promise((resolve,reject) => { resolve() })) diff --git a/automerge-wasm/src/interop.rs b/automerge-wasm/src/interop.rs index 69dd38f7..a47445cf 100644 --- a/automerge-wasm/src/interop.rs +++ b/automerge-wasm/src/interop.rs @@ -355,6 +355,32 @@ pub(crate) fn map_to_js(doc: &am::AutoCommit, obj: &ObjId) -> JsValue { map.into() } +pub(crate) fn map_to_js_at(doc: &am::AutoCommit, obj: &ObjId, heads: &[ChangeHash]) -> JsValue { + let keys = doc.keys(obj); + let map = Object::new(); + for k in keys { + let val = doc.value_at(obj, &k, heads); + match val { + Ok(Some((Value::Object(o), exid))) + if o == am::ObjType::Map || o == am::ObjType::Table => + { + Reflect::set(&map, &k.into(), &map_to_js_at(doc, &exid, heads)).unwrap(); + } + Ok(Some((Value::Object(o), exid))) if o == am::ObjType::List => { + Reflect::set(&map, &k.into(), &list_to_js_at(doc, &exid, heads)).unwrap(); + } + Ok(Some((Value::Object(o), exid))) if o == am::ObjType::Text => { + Reflect::set(&map, &k.into(), &doc.text_at(&exid, heads).unwrap().into()).unwrap(); + } + Ok(Some((Value::Scalar(v), _))) => { + Reflect::set(&map, &k.into(), &ScalarValue(v).into()).unwrap(); + } + _ => (), + }; + } + map.into() +} + pub(crate) fn list_to_js(doc: &am::AutoCommit, obj: &ObjId) -> JsValue { let len = doc.length(obj); let array = Array::new(); @@ -366,9 +392,38 @@ pub(crate) fn list_to_js(doc: &am::AutoCommit, obj: &ObjId) -> JsValue { { array.push(&map_to_js(doc, &exid)); } - Ok(Some((Value::Object(_), exid))) => { + Ok(Some((Value::Object(o), exid))) if o == am::ObjType::List => { array.push(&list_to_js(doc, &exid)); } + Ok(Some((Value::Object(o), exid))) if o == am::ObjType::Text => { + array.push(&doc.text(&exid).unwrap().into()); + } + Ok(Some((Value::Scalar(v), _))) => { + array.push(&ScalarValue(v).into()); + } + _ => (), + }; + } + array.into() +} + +pub(crate) fn list_to_js_at(doc: &am::AutoCommit, obj: &ObjId, heads: &[ChangeHash]) -> JsValue { + let len = doc.length(obj); + let array = Array::new(); + for i in 0..len { + let val = doc.value_at(obj, i as usize, heads); + match val { + Ok(Some((Value::Object(o), exid))) + if o == am::ObjType::Map || o == am::ObjType::Table => + { + array.push(&map_to_js_at(doc, &exid, heads)); + } + Ok(Some((Value::Object(o), exid))) if o == am::ObjType::List => { + array.push(&list_to_js_at(doc, &exid, heads)); + } + Ok(Some((Value::Object(o), exid))) if o == am::ObjType::Text => { + array.push(&doc.text_at(exid, heads).unwrap().into()); + } Ok(Some((Value::Scalar(v), _))) => { array.push(&ScalarValue(v).into()); } diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 3522aa4d..7ca672ef 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -13,7 +13,8 @@ mod sync; mod value; use interop::{ - get_heads, js_get, js_set, list_to_js, map_to_js, to_js_err, to_objtype, to_prop, AR, JS, + get_heads, js_get, js_set, list_to_js, list_to_js_at, map_to_js, map_to_js_at, to_js_err, + to_objtype, to_prop, AR, JS, }; use sync::SyncState; use value::{datatype, ScalarValue}; @@ -57,7 +58,6 @@ impl Automerge { Ok(automerge) } - #[allow(clippy::should_implement_trait)] pub fn fork(&mut self, actor: Option) -> Result { let mut automerge = Automerge(self.0.fork()); if let Some(s) = actor { @@ -549,14 +549,25 @@ impl Automerge { map_to_js(&self.0, &ROOT) } - pub fn materialize(&self, obj: JsValue) -> Result { + pub fn materialize(&self, obj: JsValue, heads: Option) -> Result { let obj = self.import(obj).unwrap_or(ROOT); - match self.0.object_type(&obj) { - Some(am::ObjType::Map) => Ok(map_to_js(&self.0, &obj)), - Some(am::ObjType::List) => Ok(list_to_js(&self.0, &obj)), - Some(am::ObjType::Text) => Ok(self.0.text(&obj)?.into()), - Some(am::ObjType::Table) => Ok(map_to_js(&self.0, &obj)), - None => Err(to_js_err(format!("invalid obj {}", obj))), + let heads = get_heads(heads); + if let Some(heads) = heads { + match self.0.object_type(&obj) { + Some(am::ObjType::Map) => Ok(map_to_js_at(&self.0, &obj, heads.as_slice())), + Some(am::ObjType::List) => Ok(list_to_js_at(&self.0, &obj, heads.as_slice())), + Some(am::ObjType::Text) => Ok(self.0.text_at(&obj, heads.as_slice())?.into()), + Some(am::ObjType::Table) => Ok(map_to_js_at(&self.0, &obj, heads.as_slice())), + None => Err(to_js_err(format!("invalid obj {}", obj))), + } + } else { + match self.0.object_type(&obj) { + Some(am::ObjType::Map) => Ok(map_to_js(&self.0, &obj)), + Some(am::ObjType::List) => Ok(list_to_js(&self.0, &obj)), + Some(am::ObjType::Text) => Ok(self.0.text(&obj)?.into()), + Some(am::ObjType::Table) => Ok(map_to_js(&self.0, &obj)), + None => Err(to_js_err(format!("invalid obj {}", obj))), + } } } @@ -620,12 +631,21 @@ impl Automerge { Some("boolean") => value.as_bool().map(am::ScalarValue::Boolean), Some("int") => value.as_f64().map(|v| am::ScalarValue::Int(v as i64)), Some("uint") => value.as_f64().map(|v| am::ScalarValue::Uint(v as u64)), + Some("str") => value.as_string().map(|v| am::ScalarValue::Str(v.into())), Some("f64") => value.as_f64().map(am::ScalarValue::F64), Some("bytes") => Some(am::ScalarValue::Bytes( value.clone().dyn_into::().unwrap().to_vec(), )), Some("counter") => value.as_f64().map(|v| am::ScalarValue::counter(v as i64)), - Some("timestamp") => value.as_f64().map(|v| am::ScalarValue::Timestamp(v as i64)), + Some("timestamp") => { + if let Some(v) = value.as_f64() { + Some(am::ScalarValue::Timestamp(v as i64)) + } else if let Ok(d) = value.clone().dyn_into::() { + Some(am::ScalarValue::Timestamp(d.get_time() as i64)) + } else { + None + } + } Some("null") => Some(am::ScalarValue::Null), Some(_) => None, None => { diff --git a/automerge-wasm/test/readme.ts b/automerge-wasm/test/readme.ts new file mode 100644 index 00000000..a70d5dfd --- /dev/null +++ b/automerge-wasm/test/readme.ts @@ -0,0 +1,281 @@ +import { describe, it } from 'mocha'; +//@ts-ignore +import assert from 'assert' +//@ts-ignore +import init, { create, loadDoc } from '..' + +describe('Automerge', () => { + describe('Readme Examples', () => { + it('Using the Library and Creating a Document (1)', () => { + let doc = create() + doc.free() + }) + it('Using the Library and Creating a Document (2)', (done) => { + init().then(_ => { + let doc = create() + doc.free() + done() + }) + }) + it('Automerge Scalar Types (1)', () => { + let doc = create() + doc.put("/", "prop1", 100) // int + doc.put("/", "prop2", 3.14) // f64 + doc.put("/", "prop3", "hello world") + doc.put("/", "prop4", new Date(0)) + doc.put("/", "prop5", new Uint8Array([1,2,3])) + doc.put("/", "prop6", true) + doc.put("/", "prop7", null) + + assert.deepEqual(doc.materialize("/"), { + prop1: 100, + prop2: 3.14, + prop3: "hello world", + prop4: new Date(0), + prop5: new Uint8Array([1,2,3]), + prop6: true, + prop7: null + }) + + doc.free() + }) + it('Automerge Scalar Types (2)', () => { + let doc = create() + doc.put("/", "prop1", 100, "int") + doc.put("/", "prop2", 100, "uint") + doc.put("/", "prop3", 100.5, "f64") + doc.put("/", "prop4", 100, "counter") + doc.put("/", "prop5", 1647531707301, "timestamp") + doc.put("/", "prop6", new Date(), "timestamp") + doc.put("/", "prop7", "hello world", "str") + doc.put("/", "prop8", new Uint8Array([1,2,3]), "bytes") + doc.put("/", "prop9", true, "boolean") + doc.put("/", "prop10", null, "null") + doc.free() + }) + it('Automerge Object Types (1)', () => { + let doc = create() + + // you can create an object by passing in the inital state - if blank pass in `{}` + // the return value is the Object Id + // these functions all return an object id + + let config = doc.putObject("/", "config", { align: "left", archived: false, cycles: [10, 19, 21] }) + let token = doc.putObject("/", "tokens", {}) + + // lists can be made with javascript arrays + + let birds = doc.putObject("/", "birds", ["bluejay", "penguin", "puffin"]) + let bots = doc.putObject("/", "bots", []) + + // text is initialized with a string + + let notes = doc.putObject("/", "notes", "Hello world!") + + doc.free() + }) + it('Automerge Object Types (2)', () => { + let doc = create() + + let config = doc.putObject("/", "config", { align: "left", archived: false, cycles: [10, 19, 21] }) + + doc.put(config, "align", "right") + + // Anywhere Object Ids are being used a path can also be used. + // The following two statements are equivalent: + + let id = doc.value("/", "config") + if (id && id[0] === 'map') { + doc.put(id[1], "align", "right") + } + + doc.put("/config", "align", "right") + + assert.deepEqual(doc.materialize("/"), { + config: { align: "right", archived: false, cycles: [ 10, 19, 21 ] } + }) + + doc.free() + }) + it('Maps (1)', () => { + let doc = create() + let mymap = doc.putObject("_root", "mymap", { foo: "bar"}) + // make a new map with the foo key + + doc.put(mymap, "bytes", new Uint8Array([1,2,3])) + // assign a byte array to key `bytes` of the mymap object + + let submap = doc.putObject(mymap, "sub", {}) + // make a new empty object and assign it to the key `sub` of mymap + + assert.deepEqual(doc.keys(mymap),["bytes","foo","sub"]) + assert.deepEqual(doc.materialize("_root"), { mymap: { bytes: new Uint8Array([1,2,3]), foo: "bar", sub: {} }}) + + doc.free() + }) + it('Lists (1)', () => { + let doc = create() + let items = doc.putObject("_root", "items", [10,"box"]) + // init a new list with two elements + doc.push(items, true) // push `true` to the end of the list + doc.putObject(items, 0, { hello: "world" }) // overwrite the value 10 with an object with a key and value + doc.delete(items, 1) // delete "box" + doc.splice(items, 2, 0, ["bag", "brick"]) // splice in "bag" and "brick" at position 2 + doc.insert(items, 0, "bat") // insert "bat" to the beginning of the list + doc.insertObject(items, 1, [ 1, 2 ]) // insert a list with 2 values at pos 1 + + assert.deepEqual(doc.materialize(items),[ "bat", [ 1 ,2 ], { hello : "world" }, true, "bag", "brick" ]) + assert.deepEqual(doc.length(items),6) + + doc.free() + }) + it('Text (1)', () => { + let doc = create("aaaaaa") + let notes = doc.putObject("_root", "notes", "Hello world") + doc.splice(notes, 6, 5, "everyone") + + assert.deepEqual(doc.text(notes), "Hello everyone") + + let obj = doc.insertObject(notes, 6, { hi: "there" }) + + assert.deepEqual(doc.text(notes), "Hello \ufffceveryone") + assert.deepEqual(doc.value(notes, 6), ["map", obj]) + assert.deepEqual(doc.value(obj, "hi"), ["str", "there"]) + + doc.free() + }) + it('Querying Data (1)', () => { + let doc1 = create("aabbcc") + doc1.put("_root", "key1", "val1") + let key2 = doc1.putObject("_root", "key2", []) + + assert.deepEqual(doc1.value("_root", "key1"), ["str", "val1"]) + assert.deepEqual(doc1.value("_root", "key2"), ["list", "2@aabbcc"]) + assert.deepEqual(doc1.keys("_root"), ["key1", "key2"]) + + let doc2 = doc1.fork("ffaaff") + + // set a value concurrently + doc1.put("_root","key3","doc1val") + doc2.put("_root","key3","doc2val") + + doc1.merge(doc2) + + assert.deepEqual(doc1.value("_root","key3"), ["str", "doc2val"]) + assert.deepEqual(doc1.values("_root","key3"),[[ "str", "doc1val", "3@aabbcc"], ["str", "doc2val", "3@ffaaff"]]) + + doc1.free(); doc2.free() + }) + it('Counters (1)', () => { + let doc1 = create("aaaaaa") + doc1.put("_root", "number", 0) + doc1.put("_root", "total", 0, "counter") + + let doc2 = doc1.fork("bbbbbb") + doc2.put("_root", "number", 10) + doc2.increment("_root", "total", 11) + + doc1.put("_root", "number", 20) + doc1.increment("_root", "total", 22) + + doc1.merge(doc2) + + assert.deepEqual(doc1.materialize("_root"), { number: 10, total: 33 }) + + doc1.free(); doc2.free() + }) + it('Transactions (1)', () => { + let doc = create() + + doc.put("_root", "key", "val1") + + assert.deepEqual(doc.value("_root", "key"),["str","val1"]) + assert.deepEqual(doc.pendingOps(),1) + + doc.rollback() + + assert.deepEqual(doc.value("_root", "key"),null) + assert.deepEqual(doc.pendingOps(),0) + + doc.put("_root", "key", "val2") + + assert.deepEqual(doc.pendingOps(),1) + + doc.commit("test commit 1") + + assert.deepEqual(doc.value("_root", "key"),["str","val2"]) + assert.deepEqual(doc.pendingOps(),0) + + doc.free() + }) + it('Viewing Old Versions of the Document (1)', () => { + let doc = create() + + doc.put("_root", "key", "val1") + let heads1 = doc.getHeads() + + doc.put("_root", "key", "val2") + let heads2 = doc.getHeads() + + doc.put("_root", "key", "val3") + + assert.deepEqual(doc.value("_root","key"), ["str","val3"]) + assert.deepEqual(doc.value("_root","key",heads2), ["str","val2"]) + assert.deepEqual(doc.value("_root","key",heads1), ["str","val1"]) + assert.deepEqual(doc.value("_root","key",[]), null) + + doc.free() + }) + it('Forking And Merging (1)', () => { + let doc1 = create() + doc1.put("_root", "key1", "val1") + + let doc2 = doc1.fork() + + doc1.put("_root", "key2", "val2") + doc2.put("_root", "key3", "val3") + + doc1.merge(doc2) + + assert.deepEqual(doc1.materialize("_root"), { key1: "val1", key2: "val2", key3: "val3" }) + assert.deepEqual(doc2.materialize("_root"), { key1: "val1", key3: "val3" }) + + doc1.free(); doc2.free() + }) + it('Saving And Loading (1)', () => { + let doc1 = create() + + doc1.put("_root", "key1", "value1") + + let save1 = doc1.save() + + let doc2 = loadDoc(save1) + + doc2.materialize("_root") // returns { key1: "value1" } + + doc1.put("_root", "key2", "value2") + + let saveIncremental = doc1.saveIncremental() + + let save2 = doc1.save() + + let save3 = new Uint8Array([... save1, ... saveIncremental]) + + // save2 has fewer bytes than save3 but contains the same ops + + doc2.loadIncremental(saveIncremental) + + let doc3 = loadDoc(save2) + + let doc4 = loadDoc(save3) + + assert.deepEqual(doc1.materialize("_root"), { key1: "value1", key2: "value2" }) + assert.deepEqual(doc2.materialize("_root"), { key1: "value1", key2: "value2" }) + assert.deepEqual(doc3.materialize("_root"), { key1: "value1", key2: "value2" }) + assert.deepEqual(doc4.materialize("_root"), { key1: "value1", key2: "value2" }) + + doc1.free(); doc2.free(); doc3.free(); doc4.free() + }) + it.skip('Syncing (1)', () => { }) + }) +}) diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts index 3e7ee923..8d7cf41c 100644 --- a/automerge-wasm/test/test.ts +++ b/automerge-wasm/test/test.ts @@ -171,6 +171,7 @@ describe('Automerge', () => { doc.insert(submap, 0, "b"); assert.deepEqual(doc.materialize(), { letters: ["b", "a" ] }) doc.push(submap, "c"); + let heads = doc.getHeads() assert.deepEqual(doc.materialize(), { letters: ["b", "a", "c" ] }) doc.push(submap, 3, "timestamp"); assert.deepEqual(doc.materialize(), { letters: ["b", "a", "c", new Date(3) ] }) @@ -180,6 +181,7 @@ describe('Automerge', () => { assert.deepEqual(doc.materialize(), { letters: ["z", "d", "e", "f", "c", new Date(3) ] }) assert.deepEqual(doc.materialize(submap), ["z", "d", "e", "f", "c", new Date(3) ]) assert.deepEqual(doc.length(submap),6) + assert.deepEqual(doc.materialize("/", heads), { letters: ["b", "a", "c" ] }) doc.free() }) From 777a5160513ee062a6e1d6d1bbb34b0d47b007c6 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Thu, 17 Mar 2022 12:56:57 -0400 Subject: [PATCH 235/730] spelling/grammar --- automerge-wasm/README.md | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/automerge-wasm/README.md b/automerge-wasm/README.md index fcf61c2f..e6a7e819 100644 --- a/automerge-wasm/README.md +++ b/automerge-wasm/README.md @@ -236,7 +236,7 @@ When querying maps use the `value()` method with the object in question and the ### Counters -Counters are 64 bit ints that support the increment operation. Frequently different actords will want to increment or decrement a number and have all these coalesse into a merged value. +Counters are 64 bit ints that support the increment operation. Frequently different actors will want to increment or decrement a number and have all these coalesse into a merged value. ``` let doc1 = create("aaaaaa") @@ -259,7 +259,7 @@ Counters are 64 bit ints that support the increment operation. Frequently diffe ### Transactions -Generally speaking you don't need to think about transaction when using automerge. Normal edits queue up into an in progress trasnaction. You can query the number of ops in the current transaction with `pendingOps()`. The transaction will commit automatically on certains calls such as `save()`, `saveIncremental()`, `fork()`, `merge()`, `getHeads()`, `applyChanges()`, `generateSyncMessage()`, and `receiveSyncMessage()`. When the transaction commits the heads of the document change. If you want to roll back all the in progress ops you can call `doc.rollback()`. If you want to manually commit a transaction in progress you can call`doc.commit()` with an optional commit message and timestamp. +Generally speaking you don't need to think about transactions when using Automerge. Normal edits queue up into an in-progress transaction. You can query the number of ops in the current transaction with `pendingOps()`. The transaction will commit automatically on certains calls such as `save()`, `saveIncremental()`, `fork()`, `merge()`, `getHeads()`, `applyChanges()`, `generateSyncMessage()`, and `receiveSyncMessage()`. When the transaction commits the heads of the document change. If you want to roll back all the in progress ops you can call `doc.rollback()`. If you want to manually commit a transaction in progress you can call `doc.commit()` with an optional commit message and timestamp. ```javascript let doc = create() @@ -288,7 +288,7 @@ Generally speaking you don't need to think about transaction when using automerg ### Viewing Old Versions of the Document -All query functions can take an optional argument of `heads` which allow you to query a prior document state. Heads are a set of change hashes that uniquly identify a point in the document history. The `getHeads()` method can retrieve these at any point. +All query functions can take an optional argument of `heads` which allow you to query a prior document state. Heads are a set of change hashes that uniquely identify a point in the document history. The `getHeads()` method can retrieve these at any point. ```javascript let doc = create() @@ -315,9 +315,9 @@ Queries of old document states are not indexed internally and will be slower tha ### Forking and Merging -You can `fork()` a document which makes an exact copy of it. This assigns a new actor so changes made to the fork can be merged back in with the origional. The `forkAt()` takes a heads allowing you to fork off a document from a previous point in its history. These documents allocate new memory in WASM and need to be freed. +You can `fork()` a document which makes an exact copy of it. This assigns a new actor so changes made to the fork can be merged back in with the original. The `forkAt()` takes a Heads, allowing you to fork off a document from a previous point in its history. These documents allocate new memory in WASM and need to be freed. -The `merge()` command applys all changes in the argument doc into the calling doc. Therefore if doc a has 1000 changes that doc b lacks and doc b has only 10 changes that doc a lacks, `a.merge(b)` will be much faster than `b.merge(a)`. +The `merge()` command applies all changes in the argument doc into the calling doc. Therefore if doc a has 1000 changes that doc b lacks and doc b has only 10 changes that doc a lacks, `a.merge(b)` will be much faster than `b.merge(a)`. ```javascript let doc1 = create() @@ -340,11 +340,11 @@ Note that calling `a.merge(a)` will produce an unrecoverable error from the wasm ### Saving and Loading -Calling `save()` converts the document to a compressed `Uint8Array()` that can be saved to durable storage. This format uses a columnar storage format that compresses away most of the automerge metadata needed to manage the CRDT state, but does include all of the change history. +Calling `save()` converts the document to a compressed `Uint8Array()` that can be saved to durable storage. This format uses a columnar storage format that compresses away most of the Automerge metadata needed to manage the CRDT state, but does include all of the change history. -If you wish to incrementally update a saved automerge doc you can call `saveIncremental()` to get a `Uint8Array()` of bytes that can be appended to the file with all the new changes(). Note that the `saveIncremental()` bytes are not as compressed the whole document save as each chunk has metadata information needed to parse it. It may make sense to periodically perform a new `save()` to get the smallest possible file footprint. +If you wish to incrementally update a saved Automerge doc you can call `saveIncremental()` to get a `Uint8Array()` of bytes that can be appended to the file with all the new changes(). Note that the `saveIncremental()` bytes are not as compressed as the whole document save as each chunk has metadata information needed to parse it. It may make sense to periodically perform a new `save()` to get the smallest possible file footprint. -The `load()` function takes a `Uint8Array()` of bytes produced in this way and constitutes a new document. The `loadIncremental()` method is availble if you wish to consume the result of a `saveIncremental()` with a already instanciated document. +The `load()` function takes a `Uint8Array()` of bytes produced in this way and constitutes a new document. The `loadIncremental()` method is available if you wish to consume the result of a `saveIncremental()` with an already instanciated document. ```javascript import { create, load } from "automerge-wasm" @@ -454,7 +454,7 @@ Methods that create new documents will generate random actors automatically - if ### Glossary: Object Id's -Object Id's uniquly identify an object within a document. They are represented as strings in the format of `{counter}@{actor}`. The root object is a special case and can be referred to as `_root`. The counter in an ever increasing integer, starting at 1, that is always one higher than the highest counter seen in the document thus far. Object Id's do not change when the object is modified but they do if it is overwritten with a new object. +Object Ids uniquely identify an object within a document. They are represented as strings in the format of `{counter}@{actor}`. The root object is a special case and can be referred to as `_root`. The counter is an ever increasing integer, starting at 1, that is always one higher than the highest counter seen in the document thus far. Object Id's do not change when the object is modified but they do if it is overwritten with a new object. ``` let doc = create("aabbcc") From 0d83f5f595588c5a07b67d365f46f477aa4e6202 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Thu, 17 Mar 2022 13:00:22 -0400 Subject: [PATCH 236/730] decorate --- automerge-wasm/README.md | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/automerge-wasm/README.md b/automerge-wasm/README.md index e6a7e819..2a4be410 100644 --- a/automerge-wasm/README.md +++ b/automerge-wasm/README.md @@ -238,7 +238,7 @@ When querying maps use the `value()` method with the object in question and the Counters are 64 bit ints that support the increment operation. Frequently different actors will want to increment or decrement a number and have all these coalesse into a merged value. -``` +```javascript let doc1 = create("aaaaaa") doc1.put("_root", "number", 0) doc1.put("_root", "total", 0, "counter") @@ -437,7 +437,7 @@ Actors are ids that need to be unique to each process writing to a document. Th Methods that create new documents will generate random actors automatically - if you wish to supply your own it is always taken as an optional argument. This is true for the following functions. -``` +```javascript import { create, load } from "automerge-wasm" let doc1 = create() // random actorid @@ -456,7 +456,7 @@ Methods that create new documents will generate random actors automatically - if Object Ids uniquely identify an object within a document. They are represented as strings in the format of `{counter}@{actor}`. The root object is a special case and can be referred to as `_root`. The counter is an ever increasing integer, starting at 1, that is always one higher than the highest counter seen in the document thus far. Object Id's do not change when the object is modified but they do if it is overwritten with a new object. -``` +```javascript let doc = create("aabbcc") let o1 = doc.putObject("_root", "o1", {}) let o2 = doc.putObject("_root", "o2", {}) @@ -476,9 +476,10 @@ Object Ids uniquely identify an object within a document. They are represented doc.free() ``` +``` // TODO: can I rename loadDoc() to load()??? implement forkAt() -port sync example to typescript -what about a doc.equals() -remove the opid from values +port sync example to typescript - make sure its right +remove the opid from values() +``` From 17acab25b505467c851b73a06bbf9c3486bc2413 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Thu, 17 Mar 2022 13:50:38 -0400 Subject: [PATCH 237/730] fix _obj notation --- automerge-wasm/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automerge-wasm/README.md b/automerge-wasm/README.md index 2a4be410..2690ea83 100644 --- a/automerge-wasm/README.md +++ b/automerge-wasm/README.md @@ -196,7 +196,7 @@ Text is a specialized list type intended for modifying a text document. The pri doc.text(notes) // returns "Hello everyone" - let obj = doc.insert_object(notes, 6, { hi: "there" }) + let obj = doc.insertObject(notes, 6, { hi: "there" }) doc.text(notes) // returns "Hello \ufffceveryone" doc.value(notes, 6) // returns ["map", obj] From 330aebb44a837c5e8e0add175209ff9ed850b8a9 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Mon, 4 Apr 2022 21:04:23 +0100 Subject: [PATCH 238/730] Make wasm ScalarValue take a cow --- automerge-wasm/src/interop.rs | 4 ++-- automerge-wasm/src/lib.rs | 8 ++++---- automerge-wasm/src/value.rs | 8 +++++--- 3 files changed, 11 insertions(+), 9 deletions(-) diff --git a/automerge-wasm/src/interop.rs b/automerge-wasm/src/interop.rs index 68311aa6..69dd38f7 100644 --- a/automerge-wasm/src/interop.rs +++ b/automerge-wasm/src/interop.rs @@ -347,7 +347,7 @@ pub(crate) fn map_to_js(doc: &am::AutoCommit, obj: &ObjId) -> JsValue { Reflect::set(&map, &k.into(), &doc.text(&exid).unwrap().into()).unwrap(); } Ok(Some((Value::Scalar(v), _))) => { - Reflect::set(&map, &k.into(), &ScalarValue(v.into_owned()).into()).unwrap(); + Reflect::set(&map, &k.into(), &ScalarValue(v).into()).unwrap(); } _ => (), }; @@ -370,7 +370,7 @@ pub(crate) fn list_to_js(doc: &am::AutoCommit, obj: &ObjId) -> JsValue { array.push(&list_to_js(doc, &exid)); } Ok(Some((Value::Scalar(v), _))) => { - array.push(&ScalarValue(v.into_owned()).into()); + array.push(&ScalarValue(v).into()); } _ => (), }; diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 985f0d9e..2a153c37 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -299,7 +299,7 @@ impl Automerge { } Some((Value::Scalar(value), _)) => { result.push(&datatype(&value).into()); - result.push(&ScalarValue(value.into_owned()).into()); + result.push(&ScalarValue(value).into()); Ok(Some(result)) } None => Ok(None), @@ -336,7 +336,7 @@ impl Automerge { (Value::Scalar(value), id) => { let sub = Array::new(); sub.push(&datatype(&value).into()); - sub.push(&ScalarValue(value.into_owned()).into()); + sub.push(&ScalarValue(value).into()); sub.push(&id.to_string().into()); result.push(&sub.into()); } @@ -378,7 +378,7 @@ impl Automerge { } (Value::Scalar(value), _) => { js_set(&patch, "datatype", datatype(&value))?; - js_set(&patch, "value", ScalarValue(value.into_owned()))?; + js_set(&patch, "value", ScalarValue(value))?; } }; js_set(&patch, "conflict", conflict)?; @@ -395,7 +395,7 @@ impl Automerge { } (Value::Scalar(value), _) => { js_set(&patch, "datatype", datatype(&value))?; - js_set(&patch, "value", ScalarValue(value.into_owned()))?; + js_set(&patch, "value", ScalarValue(value))?; } }; } diff --git a/automerge-wasm/src/value.rs b/automerge-wasm/src/value.rs index a2388436..84e7b376 100644 --- a/automerge-wasm/src/value.rs +++ b/automerge-wasm/src/value.rs @@ -1,13 +1,15 @@ +use std::borrow::Cow; + use automerge as am; use js_sys::Uint8Array; use wasm_bindgen::prelude::*; #[derive(Debug)] -pub struct ScalarValue(pub(crate) am::ScalarValue); +pub struct ScalarValue<'a>(pub(crate) Cow<'a, am::ScalarValue>); -impl From for JsValue { +impl<'a> From> for JsValue { fn from(val: ScalarValue) -> Self { - match &val.0 { + match &*val.0 { am::ScalarValue::Bytes(v) => Uint8Array::from(v.as_slice()).into(), am::ScalarValue::Str(v) => v.to_string().into(), am::ScalarValue::Int(v) => (*v as f64).into(), From 53f6904ae5a5da57ff67ac95dc0f8d28b5dd4cac Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Mon, 4 Apr 2022 21:13:09 +0100 Subject: [PATCH 239/730] Add to_owned method to get a static value --- automerge/src/value.rs | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/automerge/src/value.rs b/automerge/src/value.rs index 1f9913f5..ff32ddca 100644 --- a/automerge/src/value.rs +++ b/automerge/src/value.rs @@ -159,6 +159,14 @@ impl<'a> Value<'a> { } } + pub fn to_owned(&self) -> Value<'static> { + match self { + Value::Object(o) => Value::Object(*o), + Value::Scalar(Cow::Owned(s)) => Value::Scalar(Cow::Owned(s.clone())), + Value::Scalar(Cow::Borrowed(s)) => Value::Scalar(Cow::Owned((*s).clone())), + } + } + pub fn into_bytes(self) -> Result, Self> { match self { Value::Scalar(s) => s From 9fe8447d2163b42c3cb718d833369b5cc0547ac7 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Thu, 7 Apr 2022 01:19:27 -0400 Subject: [PATCH 240/730] loadDoc -> load() and forkAt() --- automerge-js/src/index.js | 2 +- automerge-wasm/README.md | 7 ------ automerge-wasm/index.d.ts | 3 ++- automerge-wasm/node-index.js | 3 ++- automerge-wasm/package.json | 12 +++++----- automerge-wasm/src/lib.rs | 11 ++++++++++ automerge-wasm/test/readme.ts | 10 ++++----- automerge-wasm/test/test.ts | 41 +++++++++++++++++++++++++---------- automerge-wasm/tsconfig.json | 2 +- automerge-wasm/web-index.js | 13 +++++++++++ automerge/src/autocommit.rs | 8 +++++++ automerge/src/automerge.rs | 19 ++++++++++++++++ automerge/src/error.rs | 6 +++-- examples/cra/package.json | 1 + examples/cra/src/App.tsx | 6 ++++- 15 files changed, 108 insertions(+), 36 deletions(-) create mode 100644 automerge-wasm/web-index.js diff --git a/automerge-js/src/index.js b/automerge-js/src/index.js index 326fc967..a9e49f62 100644 --- a/automerge-js/src/index.js +++ b/automerge-js/src/index.js @@ -99,7 +99,7 @@ function emptyChange(doc, options) { } function load(data, actor) { - const state = AutomergeWASM.loadDoc(data, actor) + const state = AutomergeWASM.load(data, actor) return rootProxy(state, true); } diff --git a/automerge-wasm/README.md b/automerge-wasm/README.md index 2690ea83..1f517109 100644 --- a/automerge-wasm/README.md +++ b/automerge-wasm/README.md @@ -476,10 +476,3 @@ Object Ids uniquely identify an object within a document. They are represented doc.free() ``` -``` -// TODO: -can I rename loadDoc() to load()??? -implement forkAt() -port sync example to typescript - make sure its right -remove the opid from values() -``` diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts index 764c4fa4..64c1a80b 100644 --- a/automerge-wasm/index.d.ts +++ b/automerge-wasm/index.d.ts @@ -81,7 +81,7 @@ export type Patch = { } export function create(actor?: Actor): Automerge; -export function loadDoc(data: Uint8Array, actor?: Actor): Automerge; +export function load(data: Uint8Array, actor?: Actor): Automerge; export function encodeChange(change: DecodedChange): Change; export function decodeChange(change: Change): DecodedChange; export function initSyncState(): SyncState; @@ -144,6 +144,7 @@ export class Automerge { free(): void; clone(actor?: string): Automerge; fork(actor?: string): Automerge; + forkAt(heads: Heads, actor?: string): Automerge; // dump internal state to console.log dump(): void; diff --git a/automerge-wasm/node-index.js b/automerge-wasm/node-index.js index 30a5c54a..a8b9b1cd 100644 --- a/automerge-wasm/node-index.js +++ b/automerge-wasm/node-index.js @@ -1,5 +1,6 @@ let wasm = require("./bindgen") module.exports = wasm +module.exports.load = module.exports.loadDoc +delete module.exports.loadDoc Object.defineProperty(module.exports, "__esModule", { value: true }); module.exports.default = () => (new Promise((resolve,reject) => { resolve() })) - diff --git a/automerge-wasm/package.json b/automerge-wasm/package.json index c0304e26..e574118e 100644 --- a/automerge-wasm/package.json +++ b/automerge-wasm/package.json @@ -6,24 +6,26 @@ ], "name": "automerge-wasm", "description": "wasm-bindgen bindings to the automerge rust implementation", - "version": "0.0.19", + "version": "0.0.24", "license": "MIT", "files": [ "README.md", "package.json", "index.d.ts", "node/index.js", - "node/index_bg.wasm", + "node/bindgen.js", + "node/bindgen_bg.wasm", "web/index.js", - "web/index_bg.wasm" + "web/bindgen.js", + "web/bindgen_bg.wasm" ], "types": "index.d.ts", "module": "./web/index.js", "main": "./node/index.js", "scripts": { "build": "rimraf ./node && wasm-pack build --target nodejs --dev --out-name bindgen -d node && cp node-index.js node/index.js", - "release-w": "rimraf ./web && wasm-pack build --target web --release --out-name index -d web", - "release-n": "rimraf ./node && wasm-pack build --target nodejs --release --out-name index -d node && cp node-index.js node/index.js", + "release-w": "rimraf ./web && wasm-pack build --target web --release --out-name bindgen -d web && cp web-index.js web/index.js", + "release-n": "rimraf ./node && wasm-pack build --target nodejs --release --out-name bindgen -d node && cp node-index.js node/index.js", "release": "yarn release-w && yarn release-n", "test": "ts-mocha -p tsconfig.json --type-check --bail --full-trace test/*.ts" }, diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 7ca672ef..f186b219 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -67,6 +67,17 @@ impl Automerge { Ok(automerge) } + #[wasm_bindgen(js_name = forkAt)] + pub fn fork_at(&mut self, heads: JsValue, actor: Option) -> Result { + let deps: Vec<_> = JS(heads).try_into()?; + let mut automerge = Automerge(self.0.fork_at(&deps)?); + if let Some(s) = actor { + let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec()); + automerge.0.set_actor(actor); + } + Ok(automerge) + } + pub fn free(self) {} #[wasm_bindgen(js_name = pendingOps)] diff --git a/automerge-wasm/test/readme.ts b/automerge-wasm/test/readme.ts index a70d5dfd..5817ae30 100644 --- a/automerge-wasm/test/readme.ts +++ b/automerge-wasm/test/readme.ts @@ -2,7 +2,7 @@ import { describe, it } from 'mocha'; //@ts-ignore import assert from 'assert' //@ts-ignore -import init, { create, loadDoc } from '..' +import init, { create, load } from '..' describe('Automerge', () => { describe('Readme Examples', () => { @@ -11,7 +11,7 @@ describe('Automerge', () => { doc.free() }) it('Using the Library and Creating a Document (2)', (done) => { - init().then(_ => { + init().then((_:any) => { let doc = create() doc.free() done() @@ -249,7 +249,7 @@ describe('Automerge', () => { let save1 = doc1.save() - let doc2 = loadDoc(save1) + let doc2 = load(save1) doc2.materialize("_root") // returns { key1: "value1" } @@ -265,9 +265,9 @@ describe('Automerge', () => { doc2.loadIncremental(saveIncremental) - let doc3 = loadDoc(save2) + let doc3 = load(save2) - let doc4 = loadDoc(save3) + let doc4 = load(save3) assert.deepEqual(doc1.materialize("_root"), { key1: "value1", key2: "value2" }) assert.deepEqual(doc2.materialize("_root"), { key1: "value1", key2: "value2" }) diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts index 8d7cf41c..30c830f9 100644 --- a/automerge-wasm/test/test.ts +++ b/automerge-wasm/test/test.ts @@ -3,7 +3,7 @@ import { describe, it } from 'mocha'; import assert from 'assert' //@ts-ignore import { BloomFilter } from './helpers/sync' -import init, { create, loadDoc, SyncState, Automerge, encodeChange, decodeChange, initSyncState, decodeSyncMessage, decodeSyncState, encodeSyncState, encodeSyncMessage } from '..' +import init, { create, load, SyncState, Automerge, encodeChange, decodeChange, initSyncState, decodeSyncMessage, decodeSyncState, encodeSyncState, encodeSyncMessage } from '..' import { DecodedSyncMessage, Hash } from '..'; function sync(a: Automerge, b: Automerge, aSyncState = initSyncState(), bSyncState = initSyncState()) { @@ -277,9 +277,9 @@ describe('Automerge', () => { assert.notDeepEqual(saveA, saveB); - let docA = loadDoc(saveA); - let docB = loadDoc(saveB); - let docC = loadDoc(saveMidway) + let docA = load(saveA); + let docB = load(saveB); + let docC = load(saveMidway) docC.loadIncremental(save3) assert.deepEqual(docA.keys("_root"), docB.keys("_root")); @@ -310,8 +310,8 @@ describe('Automerge', () => { it('local inc increments all visible counters in a map', () => { let doc1 = create("aaaa") doc1.put("_root", "hello", "world") - let doc2 = loadDoc(doc1.save(), "bbbb"); - let doc3 = loadDoc(doc1.save(), "cccc"); + let doc2 = load(doc1.save(), "bbbb"); + let doc3 = load(doc1.save(), "cccc"); doc1.put("_root", "cnt", 20) doc2.put("_root", "cnt", 0, "counter") doc3.put("_root", "cnt", 10, "counter") @@ -331,7 +331,7 @@ describe('Automerge', () => { ]) let save1 = doc1.save() - let doc4 = loadDoc(save1) + let doc4 = load(save1) assert.deepEqual(doc4.save(), save1); doc1.free() doc2.free() @@ -343,8 +343,8 @@ describe('Automerge', () => { let doc1 = create("aaaa") let seq = doc1.putObject("_root", "seq", []) doc1.insert(seq, 0, "hello") - let doc2 = loadDoc(doc1.save(), "bbbb"); - let doc3 = loadDoc(doc1.save(), "cccc"); + let doc2 = load(doc1.save(), "bbbb"); + let doc3 = load(doc1.save(), "cccc"); doc1.put(seq, 0, 20) doc2.put(seq, 0, 0, "counter") doc3.put(seq, 0, 10, "counter") @@ -364,7 +364,7 @@ describe('Automerge', () => { ]) let save = doc1.save() - let doc4 = loadDoc(save) + let doc4 = load(save) assert.deepEqual(doc4.save(), save); doc1.free() doc2.free() @@ -443,7 +443,7 @@ describe('Automerge', () => { let c = doc1.putObject("_root","c",{}); let d = doc1.put(c,"d","dd"); let saved = doc1.save(); - let doc2 = loadDoc(saved); + let doc2 = load(saved); assert.deepEqual(doc2.value("_root","a"),["map",a]) assert.deepEqual(doc2.keys(a),[]) assert.deepEqual(doc2.value("_root","b"),["map",b]) @@ -455,6 +455,23 @@ describe('Automerge', () => { doc2.free() }) + it('should allow you to forkAt a heads', () => { + let A = create("aaaaaa") + A.put("/", "key1","val1"); + A.put("/", "key2","val2"); + let heads1 = A.getHeads(); + let B = A.fork("bbbbbb") + A.put("/", "key3","val3"); + B.put("/", "key4","val4"); + A.merge(B) + let heads2 = A.getHeads(); + A.put("/", "key5","val5"); + let C = A.forkAt(heads1) + let D = A.forkAt(heads2) + assert.deepEqual(C.materialize("/"), A.materialize("/",heads1)) + assert.deepEqual(D.materialize("/"), A.materialize("/",heads2)) + }) + it('should handle merging text conflicts then saving & loading', () => { let A = create("aabbcc") let At = A.putObject('_root', 'text', "") @@ -473,7 +490,7 @@ describe('Automerge', () => { let binary = A.save() - let C = loadDoc(binary) + let C = load(binary) assert.deepEqual(C.value('_root', 'text'), ['text', '1@aabbcc']) assert.deepEqual(C.text(At), 'hell! world') diff --git a/automerge-wasm/tsconfig.json b/automerge-wasm/tsconfig.json index 1dc480a4..69ca846b 100644 --- a/automerge-wasm/tsconfig.json +++ b/automerge-wasm/tsconfig.json @@ -11,7 +11,7 @@ "paths": { "dev": ["*"]}, "rootDir": "", "target": "es2016", - "typeRoots": ["./dev/index.d.ts"] + "typeRoots": ["./index.d.ts"] }, "exclude": ["dist/**/*"] } diff --git a/automerge-wasm/web-index.js b/automerge-wasm/web-index.js new file mode 100644 index 00000000..ab9e8a1d --- /dev/null +++ b/automerge-wasm/web-index.js @@ -0,0 +1,13 @@ +export { + loadDoc as load, + create, + encodeChange, + decodeChange, + initSyncState, + encodeSyncMessage, + decodeSyncMessage, + encodeSyncState, + decodeSyncState, +} from "./bindgen.js" +import init from "./bindgen.js" +export default init; diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index a6d42d16..d24af6d5 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -73,6 +73,14 @@ impl AutoCommit { } } + pub fn fork_at(&mut self, heads: &[ChangeHash]) -> Result { + self.ensure_transaction_closed(); + Ok(Self { + doc: self.doc.fork_at(heads)?, + transaction: self.transaction.clone(), + }) + } + fn ensure_transaction_closed(&mut self) { if let Some(tx) = self.transaction.take() { tx.commit(&mut self.doc, None, None); diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 91ca77bc..f7af4a0e 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -198,6 +198,25 @@ impl Automerge { f } + /// Fork this document at the give heads + pub fn fork_at(&self, heads: &[ChangeHash]) -> Result { + let mut heads = heads.to_vec(); + let mut changes = vec![]; + while let Some(hash) = heads.pop() { + if let Some(idx) = self.history_index.get(&hash) { + let change = &self.history[*idx]; + heads.extend(&change.deps); + changes.push(change); + } else { + return Err(AutomergeError::InvalidHash(hash)); + } + } + let mut f = Self::new(); + f.set_actor(ActorId::random()); + f.apply_changes(changes.into_iter().rev().cloned())?; + Ok(f) + } + fn insert_op(&mut self, obj: &ObjId, op: Op) -> Op { let q = self.ops.search(obj, query::SeekOp::new(&op)); diff --git a/automerge/src/error.rs b/automerge/src/error.rs index 31c97acd..6d06e749 100644 --- a/automerge/src/error.rs +++ b/automerge/src/error.rs @@ -1,6 +1,6 @@ use crate::types::{ActorId, ScalarValue}; use crate::value::DataType; -use crate::{decoding, encoding}; +use crate::{decoding, encoding, ChangeHash}; use thiserror::Error; #[derive(Error, Debug)] @@ -21,7 +21,9 @@ pub enum AutomergeError { InvalidIndex(usize), #[error("duplicate seq {0} found for actor {1}")] DuplicateSeqNumber(u64, ActorId), - #[error("generic automerge error")] + #[error("invalid hash {0}")] + InvalidHash(ChangeHash), + #[error("general failure")] Fail, } diff --git a/examples/cra/package.json b/examples/cra/package.json index ccf145a9..0b465b94 100644 --- a/examples/cra/package.json +++ b/examples/cra/package.json @@ -10,6 +10,7 @@ "@types/node": "^16.11.21", "@types/react": "^17.0.38", "@types/react-dom": "^17.0.11", + "automerge-wasm": "file:../../automerge-wasm/automerge-wasm-0.0.24.tgz", "react": "^17.0.2", "react-dom": "^17.0.2", "react-scripts": "5.0.0", diff --git a/examples/cra/src/App.tsx b/examples/cra/src/App.tsx index fa6fba64..177f50bd 100644 --- a/examples/cra/src/App.tsx +++ b/examples/cra/src/App.tsx @@ -5,10 +5,14 @@ import * as Automerge from "automerge-wasm" function App() { const [ doc, ] = useState(Automerge.create()) - const [ edits, ] = useState(doc.set("_root", "edits", Automerge.TEXT) || "") + const [ edits, ] = useState(doc.putObject("_root", "edits", "")) const [ val, setVal ] = useState(""); useEffect(() => { doc.splice(edits, 0, 0, "the quick fox jumps over the lazy dog") + let doc2 = Automerge.load(doc.save()); + console.log("LOAD",Automerge.load) + console.log("DOC",doc.materialize("/")) + console.log("DOC2",doc2.materialize("/")) let result = doc.text(edits) setVal(result) }, []) From 0f2bd3fb27eb205b53f1ab0bac1269203769e3ca Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 7 Apr 2022 12:20:54 +0100 Subject: [PATCH 241/730] Make edit-trace vals be a string and use splice_text --- edit-trace/benches/main.rs | 14 +++++++------- edit-trace/src/main.rs | 8 ++++---- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/edit-trace/benches/main.rs b/edit-trace/benches/main.rs index 012ac649..00028945 100644 --- a/edit-trace/benches/main.rs +++ b/edit-trace/benches/main.rs @@ -1,23 +1,23 @@ -use automerge::{transaction::Transactable, AutoCommit, Automerge, ObjType, ScalarValue, ROOT}; +use automerge::{transaction::Transactable, AutoCommit, Automerge, ObjType, ROOT}; use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion, Throughput}; use std::fs; -fn replay_trace_tx(commands: Vec<(usize, usize, Vec)>) -> Automerge { +fn replay_trace_tx(commands: Vec<(usize, usize, String)>) -> Automerge { let mut doc = Automerge::new(); let mut tx = doc.transaction(); let text = tx.put_object(ROOT, "text", ObjType::Text).unwrap(); for (pos, del, vals) in commands { - tx.splice(&text, pos, del, vals).unwrap(); + tx.splice_text(&text, pos, del, &vals).unwrap(); } tx.commit(); doc } -fn replay_trace_autotx(commands: Vec<(usize, usize, Vec)>) -> AutoCommit { +fn replay_trace_autotx(commands: Vec<(usize, usize, String)>) -> AutoCommit { let mut doc = AutoCommit::new(); let text = doc.put_object(ROOT, "text", ObjType::Text).unwrap(); for (pos, del, vals) in commands { - doc.splice(&text, pos, del, vals).unwrap(); + doc.splice_text(&text, pos, del, &vals).unwrap(); } doc.commit(); doc @@ -46,10 +46,10 @@ fn bench(c: &mut Criterion) { for i in 0..edits.len() { let pos: usize = edits[i][0].as_usize().unwrap(); let del: usize = edits[i][1].as_usize().unwrap(); - let mut vals = vec![]; + let mut vals = String::new(); for j in 2..edits[i].len() { let v = edits[i][j].as_str().unwrap(); - vals.push(ScalarValue::Str(v.into())); + vals.push_str(v); } commands.push((pos, del, vals)); } diff --git a/edit-trace/src/main.rs b/edit-trace/src/main.rs index f9838227..72085fdb 100644 --- a/edit-trace/src/main.rs +++ b/edit-trace/src/main.rs @@ -1,5 +1,5 @@ +use automerge::ObjType; use automerge::{transaction::Transactable, Automerge, AutomergeError, ROOT}; -use automerge::{ObjType, ScalarValue}; use std::time::Instant; fn main() -> Result<(), AutomergeError> { @@ -9,10 +9,10 @@ fn main() -> Result<(), AutomergeError> { for i in 0..edits.len() { let pos: usize = edits[i][0].as_usize().unwrap(); let del: usize = edits[i][1].as_usize().unwrap(); - let mut vals = vec![]; + let mut vals = String::new(); for j in 2..edits[i].len() { let v = edits[i][j].as_str().unwrap(); - vals.push(ScalarValue::Str(v.into())); + vals.push_str(v); } commands.push((pos, del, vals)); } @@ -25,7 +25,7 @@ fn main() -> Result<(), AutomergeError> { if i % 1000 == 0 { println!("Processed {} edits in {} ms", i, now.elapsed().as_millis()); } - tx.splice(&text, pos, del, vals)?; + tx.splice_text(&text, pos, del, &vals)?; } tx.commit(); let save = Instant::now(); From dcc6c684857757bc414491f569039c7052daa33f Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 11 Mar 2022 11:18:08 +0000 Subject: [PATCH 242/730] Add parent's id to the op tree --- automerge/src/automerge.rs | 25 ++++++++++++++++ automerge/src/op_set.rs | 54 +++++++++++++++++++++------------- automerge/src/op_tree.rs | 50 +++++++++++++++++++++---------- automerge/src/query/opid.rs | 9 +++++- automerge/src/visualisation.rs | 6 ++-- 5 files changed, 103 insertions(+), 41 deletions(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index e9808660..856f6363 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -287,6 +287,31 @@ impl Automerge { // PropAt::() // NthAt::() + pub fn parent_object>(&self, obj: O) -> Option<(ExId, crate::legacy::Key)> { + if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { + if obj == ObjId::root() { + // root has no parent + None + } else { + self.ops + .parent_object(&obj) + .map(|(id, key)| (self.id_to_exid(id.0), self.export_key(key))) + } + } else { + None + } + } + + fn export_key(&self, key: Key) -> crate::legacy::Key { + match key { + Key::Map(m) => crate::legacy::Key::Map(self.ops.m.props.get(m).into()), + Key::Seq(ElemId(OpId(0, 0))) => crate::legacy::Key::Seq(crate::legacy::ElementId::Head), + Key::Seq(elem) => crate::legacy::Key::Seq(crate::legacy::ElementId::Id( + crate::legacy::OpId(elem.0 .0, self.ops.m.actors.get(elem.0 .1).clone()), + )), + } + } + /// Get the keys of the object `obj`. /// /// For a map this returns the keys of the map. diff --git a/automerge/src/op_set.rs b/automerge/src/op_set.rs index 59820e10..5f9e8205 100644 --- a/automerge/src/op_set.rs +++ b/automerge/src/op_set.rs @@ -1,7 +1,7 @@ use crate::clock::Clock; use crate::indexed_cache::IndexedCache; -use crate::op_tree::OpTreeInternal; -use crate::query::{self, TreeQuery}; +use crate::op_tree::OpTree; +use crate::query::{self, OpIdSearch, TreeQuery}; use crate::types::{ActorId, Key, ObjId, Op, OpId, OpType}; use crate::ObjType; use fxhash::FxBuildHasher; @@ -13,7 +13,7 @@ pub(crate) type OpSet = OpSetInternal; #[derive(Debug, Clone, PartialEq)] pub(crate) struct OpSetInternal { /// The map of objects to their type and ops. - trees: HashMap, + trees: HashMap, /// The number of operations in the opset. length: usize, /// Metadata about the operations in this opset. @@ -23,7 +23,7 @@ pub(crate) struct OpSetInternal { impl OpSetInternal { pub fn new() -> Self { let mut trees: HashMap<_, _, _> = Default::default(); - trees.insert(ObjId::root(), (ObjType::Map, Default::default())); + trees.insert(ObjId::root(), OpTree::new()); OpSetInternal { trees, length: 0, @@ -45,17 +45,23 @@ impl OpSetInternal { } } + pub fn parent_object(&self, obj: &ObjId) -> Option<(ObjId, Key)> { + let parent = self.trees.get(obj)?.parent?; + let key = self.search(&parent, OpIdSearch::new(obj.0)).key().unwrap(); + Some((parent, key)) + } + pub fn keys(&self, obj: ObjId) -> Option { - if let Some((_typ, tree)) = self.trees.get(&obj) { - tree.keys() + if let Some(tree) = self.trees.get(&obj) { + tree.internal.keys() } else { None } } pub fn keys_at(&self, obj: ObjId, clock: Clock) -> Option { - if let Some((_typ, tree)) = self.trees.get(&obj) { - tree.keys_at(clock) + if let Some(tree) = self.trees.get(&obj) { + tree.internal.keys_at(clock) } else { None } @@ -65,8 +71,8 @@ impl OpSetInternal { where Q: TreeQuery<'a>, { - if let Some((_typ, tree)) = self.trees.get(obj) { - tree.search(query, &self.m) + if let Some(tree) = self.trees.get(obj) { + tree.internal.search(query, &self.m) } else { query } @@ -76,16 +82,16 @@ impl OpSetInternal { where F: FnMut(&mut Op), { - if let Some((_typ, tree)) = self.trees.get_mut(obj) { - tree.update(index, f) + if let Some(tree) = self.trees.get_mut(obj) { + tree.internal.update(index, f) } } pub fn remove(&mut self, obj: &ObjId, index: usize) -> Op { // this happens on rollback - be sure to go back to the old state - let (_typ, tree) = self.trees.get_mut(obj).unwrap(); + let tree = self.trees.get_mut(obj).unwrap(); self.length -= 1; - let op = tree.remove(index); + let op = tree.internal.remove(index); if let OpType::Make(_) = &op.action { self.trees.remove(&op.id.into()); } @@ -98,19 +104,25 @@ impl OpSetInternal { pub fn insert(&mut self, index: usize, obj: &ObjId, element: Op) { if let OpType::Make(typ) = element.action { - self.trees - .insert(element.id.into(), (typ, Default::default())); + self.trees.insert( + element.id.into(), + OpTree { + internal: Default::default(), + objtype: typ, + parent: Some(*obj), + }, + ); } - if let Some((_typ, tree)) = self.trees.get_mut(obj) { + if let Some(tree) = self.trees.get_mut(obj) { //let tree = self.trees.get_mut(&element.obj).unwrap(); - tree.insert(index, element); + tree.internal.insert(index, element); self.length += 1; } } pub fn object_type(&self, id: &ObjId) -> Option { - self.trees.get(id).map(|(typ, _)| *typ) + self.trees.get(id).map(|tree| tree.objtype) } #[cfg(feature = "optree-visualisation")] @@ -151,8 +163,8 @@ impl<'a> Iterator for Iter<'a> { fn next(&mut self) -> Option { let mut result = None; for obj in self.objs.iter().skip(self.index) { - let (_typ, tree) = self.inner.trees.get(obj)?; - result = tree.get(self.sub_index).map(|op| (*obj, op)); + let tree = self.inner.trees.get(obj)?; + result = tree.internal.get(self.sub_index); if result.is_some() { self.sub_index += 1; break; diff --git a/automerge/src/op_tree.rs b/automerge/src/op_tree.rs index 863f9f29..b8c9a69e 100644 --- a/automerge/src/op_tree.rs +++ b/automerge/src/op_tree.rs @@ -5,17 +5,35 @@ use std::{ }; pub(crate) use crate::op_set::OpSetMetadata; -use crate::types::{Op, OpId}; use crate::{ clock::Clock, query::{self, Index, QueryResult, TreeQuery}, }; +use crate::{ + types::{ObjId, Op, OpId}, + ObjType, +}; use std::collections::HashSet; pub(crate) const B: usize = 16; -#[allow(dead_code)] -pub(crate) type OpTree = OpTreeInternal; +#[derive(Debug, Clone)] +pub(crate) struct OpTree { + pub internal: OpTreeInternal, + pub objtype: ObjType, + /// The id of the parent object, root has no parent. + pub parent: Option, +} + +impl OpTree { + pub fn new() -> Self { + Self { + internal: Default::default(), + objtype: ObjType::Map, + parent: None, + } + } +} #[derive(Clone, Debug)] pub(crate) struct OpTreeInternal { @@ -652,36 +670,36 @@ mod tests { #[test] fn insert() { - let mut t = OpTree::new(); + let mut t: OpTree<16> = OpTree::new(); - t.insert(0, op()); - t.insert(1, op()); - t.insert(0, op()); - t.insert(0, op()); - t.insert(0, op()); - t.insert(3, op()); - t.insert(4, op()); + t.internal.insert(0, op()); + t.internal.insert(1, op()); + t.internal.insert(0, op()); + t.internal.insert(0, op()); + t.internal.insert(0, op()); + t.internal.insert(3, op()); + t.internal.insert(4, op()); } #[test] fn insert_book() { - let mut t = OpTree::new(); + let mut t: OpTree<16> = OpTree::new(); for i in 0..100 { - t.insert(i % 2, op()); + t.internal.insert(i % 2, op()); } } #[test] fn insert_book_vec() { - let mut t = OpTree::new(); + let mut t: OpTree<16> = OpTree::new(); let mut v = Vec::new(); for i in 0..100 { - t.insert(i % 3, op()); + t.internal.insert(i % 3, op()); v.insert(i % 3, op()); - assert_eq!(v, t.iter().cloned().collect::>()) + assert_eq!(v, t.internal.iter().cloned().collect::>()) } } } diff --git a/automerge/src/query/opid.rs b/automerge/src/query/opid.rs index 1dbc76e5..bc0b605c 100644 --- a/automerge/src/query/opid.rs +++ b/automerge/src/query/opid.rs @@ -1,6 +1,6 @@ use crate::op_tree::OpTreeNode; use crate::query::{QueryResult, TreeQuery}; -use crate::types::{Op, OpId}; +use crate::types::{Key, Op, OpId}; /// Search for an OpId in a tree. /// Returns the index of the operation in the tree. @@ -9,6 +9,7 @@ pub(crate) struct OpIdSearch { target: OpId, pos: usize, found: bool, + key: Option, } impl OpIdSearch { @@ -17,6 +18,7 @@ impl OpIdSearch { target, pos: 0, found: false, + key: None, } } @@ -28,6 +30,10 @@ impl OpIdSearch { None } } + + pub fn key(&self) -> &Option { + &self.key + } } impl<'a> TreeQuery<'a> for OpIdSearch { @@ -43,6 +49,7 @@ impl<'a> TreeQuery<'a> for OpIdSearch { fn query_element(&mut self, element: &Op) -> QueryResult { if element.id == self.target { self.found = true; + self.key = Some(element.key); QueryResult::Finish } else { self.pos += 1; diff --git a/automerge/src/visualisation.rs b/automerge/src/visualisation.rs index 74a93b1d..5e6dae6f 100644 --- a/automerge/src/visualisation.rs +++ b/automerge/src/visualisation.rs @@ -44,14 +44,14 @@ impl<'a> GraphVisualisation<'a> { pub(super) fn construct( trees: &'a HashMap< crate::types::ObjId, - (crate::types::ObjType, crate::op_tree::OpTreeInternal), + crate::op_tree::OpTree, BuildHasherDefault, >, metadata: &'a crate::op_set::OpSetMetadata, ) -> GraphVisualisation<'a> { let mut nodes = HashMap::new(); - for (obj_id, (_, tree)) in trees { - if let Some(root_node) = &tree.root_node { + for (obj_id, tree) in trees { + if let Some(root_node) = &tree.internal.root_node { let tree_id = Self::construct_nodes(root_node, obj_id, &mut nodes, metadata); let obj_tree_id = NodeId::default(); nodes.insert( From aeadedd5846fc101d53ce2e716818f997c2e831c Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 11 Mar 2022 11:20:48 +0000 Subject: [PATCH 243/730] Add watch example --- automerge/examples/watch.rs | 62 +++++++++++++++++++++++++++++++++++++ 1 file changed, 62 insertions(+) create mode 100644 automerge/examples/watch.rs diff --git a/automerge/examples/watch.rs b/automerge/examples/watch.rs new file mode 100644 index 00000000..d21aad2d --- /dev/null +++ b/automerge/examples/watch.rs @@ -0,0 +1,62 @@ +use automerge::transaction::Transactable; +use automerge::Automerge; +use automerge::ChangeHash; +use automerge::ObjId; +use automerge::ROOT; + +fn main() { + let mut doc = Automerge::new(); + let heads1 = doc.get_heads(); + + // a simple scalar change in the root object + let mut tx = doc.transaction(); + tx.set(ROOT, "hello", "world").unwrap(); + let heads2 = tx.commit(); + get_changes(&heads1, &doc); + + let mut tx = doc.transaction(); + let map = tx + .set_object(ROOT, "my new map", automerge::ObjType::Map) + .unwrap(); + tx.set(&map, "blah", 1).unwrap(); + tx.set(&map, "blah2", 1).unwrap(); + let list = tx + .set_object(&map, "blaho", automerge::ObjType::List) + .unwrap(); + tx.insert(list, 0, "yay").unwrap(); + let _heads3 = tx.commit(); + get_changes(&[heads2], &doc); + + // now if a peer were to send us a change that added a key in map we wouldn't know the path to + // the change or we might not have a reference to the map objid. +} + +fn get_changes(heads: &[ChangeHash], doc: &Automerge) { + let changes = doc.get_changes(heads); + // changes should be in topological order + for change in changes { + let change = change.decode(); + for op in change.operations { + // get the object that it changed + let obj = doc.import(&op.obj.to_string()).unwrap(); + // get the prop too + let prop = op.key; + println!( + "changed {:?} in obj {:?}, path {:?}", + prop, + obj, + get_path_for_obj(doc, &obj) + ); + } + } +} + +fn get_path_for_obj(doc: &Automerge, obj: &ObjId) -> String { + let mut s = String::new(); + let mut obj = obj.clone(); + while let Some((parent, key)) = doc.parent_object(obj) { + s = format!("{:?}/{}", key, s); + obj = parent; + } + s +} From 12a4987ce770ac8d987ffe2420befbfe97c1731e Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 11 Mar 2022 12:44:10 +0000 Subject: [PATCH 244/730] Use prop rather than exposing legacy::Key --- automerge/src/automerge.rs | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 856f6363..22c7f29d 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -287,7 +287,7 @@ impl Automerge { // PropAt::() // NthAt::() - pub fn parent_object>(&self, obj: O) -> Option<(ExId, crate::legacy::Key)> { + pub fn parent_object>(&self, obj: O) -> Option<(ExId, Prop)> { if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { if obj == ObjId::root() { // root has no parent @@ -295,20 +295,24 @@ impl Automerge { } else { self.ops .parent_object(&obj) - .map(|(id, key)| (self.id_to_exid(id.0), self.export_key(key))) + .map(|(id, key)| (self.id_to_exid(id.0), self.export_key(obj, key))) } } else { None } } - fn export_key(&self, key: Key) -> crate::legacy::Key { + fn export_key(&self, obj: ObjId, key: Key) -> Prop { match key { - Key::Map(m) => crate::legacy::Key::Map(self.ops.m.props.get(m).into()), - Key::Seq(ElemId(OpId(0, 0))) => crate::legacy::Key::Seq(crate::legacy::ElementId::Head), - Key::Seq(elem) => crate::legacy::Key::Seq(crate::legacy::ElementId::Id( - crate::legacy::OpId(elem.0 .0, self.ops.m.actors.get(elem.0 .1).clone()), - )), + Key::Map(m) => Prop::Map(self.ops.m.props.get(m).into()), + Key::Seq(opid) => { + let i = self + .ops + .search(obj, query::OpIdSearch::new(opid.0)) + .index() + .unwrap(); + Prop::Seq(i) + } } } From 9e71736b88086d74a396d7f20b3f41f290498624 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Mon, 21 Mar 2022 14:05:15 +0000 Subject: [PATCH 245/730] Fixup after rebase --- automerge/src/automerge.rs | 2 +- automerge/src/op_set.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 22c7f29d..e2cf97f6 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -308,7 +308,7 @@ impl Automerge { Key::Seq(opid) => { let i = self .ops - .search(obj, query::OpIdSearch::new(opid.0)) + .search(&obj, query::OpIdSearch::new(opid.0)) .index() .unwrap(); Prop::Seq(i) diff --git a/automerge/src/op_set.rs b/automerge/src/op_set.rs index 5f9e8205..283a4866 100644 --- a/automerge/src/op_set.rs +++ b/automerge/src/op_set.rs @@ -164,7 +164,7 @@ impl<'a> Iterator for Iter<'a> { let mut result = None; for obj in self.objs.iter().skip(self.index) { let tree = self.inner.trees.get(obj)?; - result = tree.internal.get(self.sub_index); + result = tree.internal.get(self.sub_index).map(|op| (*obj, op)); if result.is_some() { self.sub_index += 1; break; From bd2f252e0bd9418d9292f713c281d4de4be12061 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Tue, 22 Mar 2022 07:59:50 +0000 Subject: [PATCH 246/730] Try and fix parent object query --- automerge/examples/watch.rs | 17 +++++++--- automerge/src/automerge.rs | 4 +-- automerge/src/query.rs | 2 ++ automerge/src/query/elem_id_pos.rs | 53 ++++++++++++++++++++++++++++++ automerge/src/types.rs | 10 ++++++ 5 files changed, 79 insertions(+), 7 deletions(-) create mode 100644 automerge/src/query/elem_id_pos.rs diff --git a/automerge/examples/watch.rs b/automerge/examples/watch.rs index d21aad2d..0df766b8 100644 --- a/automerge/examples/watch.rs +++ b/automerge/examples/watch.rs @@ -21,9 +21,14 @@ fn main() { tx.set(&map, "blah", 1).unwrap(); tx.set(&map, "blah2", 1).unwrap(); let list = tx - .set_object(&map, "blaho", automerge::ObjType::List) + .set_object(&map, "my list", automerge::ObjType::List) .unwrap(); - tx.insert(list, 0, "yay").unwrap(); + // tx.insert(&list, 0, "yay").unwrap(); + let m = tx.insert_object(&list, 0, automerge::ObjType::Map).unwrap(); + tx.set(&m, "hi", 2).unwrap(); + tx.insert(&list, 1, "woo").unwrap(); + let m = tx.insert_object(&list, 2, automerge::ObjType::Map).unwrap(); + tx.set(&m, "hi", 2).unwrap(); let _heads3 = tx.commit(); get_changes(&[heads2], &doc); @@ -40,9 +45,11 @@ fn get_changes(heads: &[ChangeHash], doc: &Automerge) { // get the object that it changed let obj = doc.import(&op.obj.to_string()).unwrap(); // get the prop too - let prop = op.key; + let prop = format!("{:?}", op.key); + println!("{:?}", op); println!( - "changed {:?} in obj {:?}, path {:?}", + "{} {:?} in obj {:?}, object path {:?}", + if op.insert { "inserted" } else { "changed" }, prop, obj, get_path_for_obj(doc, &obj) @@ -55,7 +62,7 @@ fn get_path_for_obj(doc: &Automerge, obj: &ObjId) -> String { let mut s = String::new(); let mut obj = obj.clone(); while let Some((parent, key)) = doc.parent_object(obj) { - s = format!("{:?}/{}", key, s); + s = format!("{}/{}", key, s); obj = parent; } s diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index e2cf97f6..a8ba0c39 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -295,7 +295,7 @@ impl Automerge { } else { self.ops .parent_object(&obj) - .map(|(id, key)| (self.id_to_exid(id.0), self.export_key(obj, key))) + .map(|(id, key)| (self.id_to_exid(id.0), self.export_key(id, key))) } } else { None @@ -308,7 +308,7 @@ impl Automerge { Key::Seq(opid) => { let i = self .ops - .search(&obj, query::OpIdSearch::new(opid.0)) + .search(&obj, query::ElemIdPos::new(opid)) .index() .unwrap(); Prop::Seq(i) diff --git a/automerge/src/query.rs b/automerge/src/query.rs index d6a6166a..adb77566 100644 --- a/automerge/src/query.rs +++ b/automerge/src/query.rs @@ -5,6 +5,7 @@ use std::cmp::Ordering; use std::collections::{HashMap, HashSet}; use std::fmt::Debug; +mod elem_id_pos; mod insert; mod keys; mod keys_at; @@ -20,6 +21,7 @@ mod prop_at; mod seek_op; mod seek_op_with_patch; +pub(crate) use elem_id_pos::ElemIdPos; pub(crate) use insert::InsertNth; pub(crate) use keys::Keys; pub(crate) use keys_at::KeysAt; diff --git a/automerge/src/query/elem_id_pos.rs b/automerge/src/query/elem_id_pos.rs new file mode 100644 index 00000000..c0f361cf --- /dev/null +++ b/automerge/src/query/elem_id_pos.rs @@ -0,0 +1,53 @@ +use crate::{op_tree::OpTreeNode, types::ElemId}; + +use super::{QueryResult, TreeQuery}; + +pub(crate) struct ElemIdPos { + elemid: ElemId, + pos: usize, + found: bool, +} + +impl ElemIdPos { + pub fn new(elemid: ElemId) -> Self { + Self { + elemid, + pos: 0, + found: false, + } + } + + pub fn index(&self) -> Option { + if self.found { + Some(self.pos) + } else { + None + } + } +} + +impl TreeQuery for ElemIdPos { + fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { + dbg!(child, &self.elemid); + // if index has our element then we can cont + if child.index.has(&Some(self.elemid)) { + // element is in this node somewhere + QueryResult::Descend + } else { + // not in this node, try the next one + self.pos += child.index.len; + QueryResult::Next + } + } + + fn query_element(&mut self, element: &crate::types::Op) -> QueryResult { + if element.elemid() == Some(self.elemid) { + // this is it + self.found = true; + return QueryResult::Finish; + } else if element.visible() { + self.pos += 1; + } + QueryResult::Next + } +} diff --git a/automerge/src/types.rs b/automerge/src/types.rs index a94aba76..246bcca7 100644 --- a/automerge/src/types.rs +++ b/automerge/src/types.rs @@ -5,6 +5,7 @@ use serde::{Deserialize, Serialize}; use std::borrow::Cow; use std::cmp::Eq; use std::fmt; +use std::fmt::Display; use std::str::FromStr; use tinyvec::{ArrayVec, TinyVec}; @@ -331,6 +332,15 @@ pub enum Prop { Seq(usize), } +impl Display for Prop { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Prop::Map(s) => write!(f, "{}", s), + Prop::Seq(i) => write!(f, "{}", i), + } + } +} + impl Key { pub fn elemid(&self) -> Option { match self { From ebb73738da11742c632c77d5133a8c131b4a2b41 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 7 Apr 2022 14:21:52 +0100 Subject: [PATCH 247/730] Remove B --- automerge/src/query/elem_id_pos.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/automerge/src/query/elem_id_pos.rs b/automerge/src/query/elem_id_pos.rs index c0f361cf..690d8df1 100644 --- a/automerge/src/query/elem_id_pos.rs +++ b/automerge/src/query/elem_id_pos.rs @@ -26,8 +26,8 @@ impl ElemIdPos { } } -impl TreeQuery for ElemIdPos { - fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { +impl TreeQuery for ElemIdPos { + fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { dbg!(child, &self.elemid); // if index has our element then we can cont if child.index.has(&Some(self.elemid)) { From a88d49cf45ae87402301947322fa14fd88b208fa Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 7 Apr 2022 14:32:17 +0100 Subject: [PATCH 248/730] Fixup builds --- automerge/examples/watch.rs | 14 +++++++------- automerge/src/autocommit.rs | 4 ++++ automerge/src/op_tree.rs | 8 ++++---- automerge/src/query/elem_id_pos.rs | 6 +++--- automerge/src/transaction/manual_transaction.rs | 4 ++++ automerge/src/transaction/transactable.rs | 4 ++++ 6 files changed, 26 insertions(+), 14 deletions(-) diff --git a/automerge/examples/watch.rs b/automerge/examples/watch.rs index 0df766b8..aa93127a 100644 --- a/automerge/examples/watch.rs +++ b/automerge/examples/watch.rs @@ -10,25 +10,25 @@ fn main() { // a simple scalar change in the root object let mut tx = doc.transaction(); - tx.set(ROOT, "hello", "world").unwrap(); + tx.put(ROOT, "hello", "world").unwrap(); let heads2 = tx.commit(); get_changes(&heads1, &doc); let mut tx = doc.transaction(); let map = tx - .set_object(ROOT, "my new map", automerge::ObjType::Map) + .put_object(ROOT, "my new map", automerge::ObjType::Map) .unwrap(); - tx.set(&map, "blah", 1).unwrap(); - tx.set(&map, "blah2", 1).unwrap(); + tx.put(&map, "blah", 1).unwrap(); + tx.put(&map, "blah2", 1).unwrap(); let list = tx - .set_object(&map, "my list", automerge::ObjType::List) + .put_object(&map, "my list", automerge::ObjType::List) .unwrap(); // tx.insert(&list, 0, "yay").unwrap(); let m = tx.insert_object(&list, 0, automerge::ObjType::Map).unwrap(); - tx.set(&m, "hi", 2).unwrap(); + tx.put(&m, "hi", 2).unwrap(); tx.insert(&list, 1, "woo").unwrap(); let m = tx.insert_object(&list, 2, automerge::ObjType::Map).unwrap(); - tx.set(&m, "hi", 2).unwrap(); + tx.put(&m, "hi", 2).unwrap(); let _heads3 = tx.commit(); get_changes(&[heads2], &doc); diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index a6d42d16..d5da3f56 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -388,4 +388,8 @@ impl Transactable for AutoCommit { ) -> Result, AutomergeError> { self.doc.values_at(obj, prop, heads) } + + fn parent_object>(&self, obj: O) -> Option<(ExId, Prop)> { + self.doc.parent_object(obj) + } } diff --git a/automerge/src/op_tree.rs b/automerge/src/op_tree.rs index b8c9a69e..5d08df38 100644 --- a/automerge/src/op_tree.rs +++ b/automerge/src/op_tree.rs @@ -17,7 +17,7 @@ use std::collections::HashSet; pub(crate) const B: usize = 16; -#[derive(Debug, Clone)] +#[derive(Debug, Clone, PartialEq)] pub(crate) struct OpTree { pub internal: OpTreeInternal, pub objtype: ObjType, @@ -670,7 +670,7 @@ mod tests { #[test] fn insert() { - let mut t: OpTree<16> = OpTree::new(); + let mut t: OpTree = OpTree::new(); t.internal.insert(0, op()); t.internal.insert(1, op()); @@ -683,7 +683,7 @@ mod tests { #[test] fn insert_book() { - let mut t: OpTree<16> = OpTree::new(); + let mut t: OpTree = OpTree::new(); for i in 0..100 { t.internal.insert(i % 2, op()); @@ -692,7 +692,7 @@ mod tests { #[test] fn insert_book_vec() { - let mut t: OpTree<16> = OpTree::new(); + let mut t: OpTree = OpTree::new(); let mut v = Vec::new(); for i in 0..100 { diff --git a/automerge/src/query/elem_id_pos.rs b/automerge/src/query/elem_id_pos.rs index 690d8df1..75adc559 100644 --- a/automerge/src/query/elem_id_pos.rs +++ b/automerge/src/query/elem_id_pos.rs @@ -26,16 +26,16 @@ impl ElemIdPos { } } -impl TreeQuery for ElemIdPos { +impl<'a> TreeQuery<'a> for ElemIdPos { fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { dbg!(child, &self.elemid); // if index has our element then we can cont - if child.index.has(&Some(self.elemid)) { + if child.index.has_visible(&Some(self.elemid)) { // element is in this node somewhere QueryResult::Descend } else { // not in this node, try the next one - self.pos += child.index.len; + self.pos += child.index.visible_len(); QueryResult::Next } } diff --git a/automerge/src/transaction/manual_transaction.rs b/automerge/src/transaction/manual_transaction.rs index 2cfc7073..844f61f0 100644 --- a/automerge/src/transaction/manual_transaction.rs +++ b/automerge/src/transaction/manual_transaction.rs @@ -236,6 +236,10 @@ impl<'a> Transactable for Transaction<'a> { ) -> Result, AutomergeError> { self.doc.values_at(obj, prop, heads) } + + fn parent_object>(&self, obj: O) -> Option<(ExId, Prop)> { + self.doc.parent_object(obj) + } } // If a transaction is not commited or rolled back manually then it can leave the document in an diff --git a/automerge/src/transaction/transactable.rs b/automerge/src/transaction/transactable.rs index ed562828..1c64e664 100644 --- a/automerge/src/transaction/transactable.rs +++ b/automerge/src/transaction/transactable.rs @@ -145,4 +145,8 @@ pub trait Transactable { prop: P, heads: &[ChangeHash], ) -> Result, AutomergeError>; + + /// Get the object id of the object that contains this object and the prop that this object is + /// at in that object. + fn parent_object>(&self, obj: O) -> Option<(ExId, Prop)>; } From e9adc3248653c1276cfe14c3d6fc764e17366eb0 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 7 Apr 2022 14:51:31 +0100 Subject: [PATCH 249/730] Fixup OpIdSearch's key extraction --- automerge/src/automerge.rs | 13 +++++++++++++ automerge/src/query/elem_id_pos.rs | 4 ++-- automerge/src/query/opid.rs | 8 ++++++-- automerge/src/types.rs | 2 +- 4 files changed, 22 insertions(+), 5 deletions(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index a8ba0c39..d952f509 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -1680,4 +1680,17 @@ mod tests { assert!(doc1.value(&list, 1).unwrap().is_none()); assert!(doc2.value(&list, 1).unwrap().is_none()); } + + #[test] + fn get_parent_objects() { + let mut doc = AutoCommit::new(); + let map = doc.put_object(ROOT, "a", ObjType::Map).unwrap(); + let list = doc.insert_object(&map, 0, ObjType::List).unwrap(); + doc.insert(&list, 0, 2).unwrap(); + let text = doc.put_object(&list, 0, ObjType::Text).unwrap(); + + assert_eq!(doc.parent_object(&map), Some((ROOT, Prop::Map("a".into())))); + assert_eq!(doc.parent_object(&list), Some((map, Prop::Seq(0)))); + assert_eq!(doc.parent_object(&text), Some((list, Prop::Seq(0)))); + } } diff --git a/automerge/src/query/elem_id_pos.rs b/automerge/src/query/elem_id_pos.rs index 75adc559..bce1584a 100644 --- a/automerge/src/query/elem_id_pos.rs +++ b/automerge/src/query/elem_id_pos.rs @@ -2,6 +2,7 @@ use crate::{op_tree::OpTreeNode, types::ElemId}; use super::{QueryResult, TreeQuery}; +/// Lookup the index in the list that this elemid occupies. pub(crate) struct ElemIdPos { elemid: ElemId, pos: usize, @@ -28,8 +29,7 @@ impl ElemIdPos { impl<'a> TreeQuery<'a> for ElemIdPos { fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { - dbg!(child, &self.elemid); - // if index has our element then we can cont + // if index has our element then we can continue if child.index.has_visible(&Some(self.elemid)) { // element is in this node somewhere QueryResult::Descend diff --git a/automerge/src/query/opid.rs b/automerge/src/query/opid.rs index bc0b605c..873f854e 100644 --- a/automerge/src/query/opid.rs +++ b/automerge/src/query/opid.rs @@ -1,6 +1,6 @@ use crate::op_tree::OpTreeNode; use crate::query::{QueryResult, TreeQuery}; -use crate::types::{Key, Op, OpId}; +use crate::types::{ElemId, Key, Op, OpId}; /// Search for an OpId in a tree. /// Returns the index of the operation in the tree. @@ -49,7 +49,11 @@ impl<'a> TreeQuery<'a> for OpIdSearch { fn query_element(&mut self, element: &Op) -> QueryResult { if element.id == self.target { self.found = true; - self.key = Some(element.key); + if element.insert { + self.key = Some(Key::Seq(ElemId(element.id))); + } else { + self.key = Some(element.key); + } QueryResult::Finish } else { self.pos += 1; diff --git a/automerge/src/types.rs b/automerge/src/types.rs index 246bcca7..2221a9eb 100644 --- a/automerge/src/types.rs +++ b/automerge/src/types.rs @@ -357,7 +357,7 @@ pub(crate) struct OpId(pub u64, pub usize); pub(crate) struct ObjId(pub OpId); impl ObjId { - pub fn root() -> Self { + pub const fn root() -> Self { ObjId(OpId(0, 0)) } } From cc8134047ab4e9a4b5cbca79a88c47e3a30d7539 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 7 Apr 2022 14:52:25 +0100 Subject: [PATCH 250/730] Document parent_object --- automerge/src/automerge.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index d952f509..3538baab 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -287,6 +287,8 @@ impl Automerge { // PropAt::() // NthAt::() + /// Get the object id of the object that contains this object and the prop that this object is + /// at in that object. pub fn parent_object>(&self, obj: O) -> Option<(ExId, Prop)> { if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { if obj == ObjId::root() { @@ -302,6 +304,7 @@ impl Automerge { } } + /// Export a key to a prop. fn export_key(&self, obj: ObjId, key: Key) -> Prop { match key { Key::Map(m) => Prop::Map(self.ops.m.props.get(m).into()), From 06d2306d5408172e94abefa145a977bfe18ee3c6 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 7 Apr 2022 15:04:00 +0100 Subject: [PATCH 251/730] Add path_to_object --- automerge/src/autocommit.rs | 4 ++ automerge/src/automerge.rs | 37 +++++++++++++++++++ .../src/transaction/manual_transaction.rs | 4 ++ automerge/src/transaction/transactable.rs | 2 + 4 files changed, 47 insertions(+) diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index d5da3f56..c9adf118 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -392,4 +392,8 @@ impl Transactable for AutoCommit { fn parent_object>(&self, obj: O) -> Option<(ExId, Prop)> { self.doc.parent_object(obj) } + + fn path_to_object>(&self, obj: O) -> Vec<(ExId, Prop)> { + self.doc.path_to_object(obj) + } } diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 3538baab..72b53b91 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -304,6 +304,17 @@ impl Automerge { } } + pub fn path_to_object>(&self, obj: O) -> Vec<(ExId, Prop)> { + let mut path = Vec::new(); + let mut obj = obj.as_ref().clone(); + while let Some(parent) = self.parent_object(obj) { + obj = parent.0.clone(); + path.push(parent); + } + path.reverse(); + path + } + /// Export a key to a prop. fn export_key(&self, obj: ObjId, key: Key) -> Prop { match key { @@ -1696,4 +1707,30 @@ mod tests { assert_eq!(doc.parent_object(&list), Some((map, Prop::Seq(0)))); assert_eq!(doc.parent_object(&text), Some((list, Prop::Seq(0)))); } + + #[test] + fn get_path_to_object() { + let mut doc = AutoCommit::new(); + let map = doc.put_object(ROOT, "a", ObjType::Map).unwrap(); + let list = doc.insert_object(&map, 0, ObjType::List).unwrap(); + doc.insert(&list, 0, 2).unwrap(); + let text = doc.put_object(&list, 0, ObjType::Text).unwrap(); + + assert_eq!( + doc.path_to_object(&map), + vec![(ROOT, Prop::Map("a".into()))] + ); + assert_eq!( + doc.path_to_object(&list), + vec![(ROOT, Prop::Map("a".into())), (map.clone(), Prop::Seq(0)),] + ); + assert_eq!( + doc.path_to_object(&text), + vec![ + (ROOT, Prop::Map("a".into())), + (map, Prop::Seq(0)), + (list, Prop::Seq(0)), + ] + ); + } } diff --git a/automerge/src/transaction/manual_transaction.rs b/automerge/src/transaction/manual_transaction.rs index 844f61f0..c2256368 100644 --- a/automerge/src/transaction/manual_transaction.rs +++ b/automerge/src/transaction/manual_transaction.rs @@ -240,6 +240,10 @@ impl<'a> Transactable for Transaction<'a> { fn parent_object>(&self, obj: O) -> Option<(ExId, Prop)> { self.doc.parent_object(obj) } + + fn path_to_object>(&self, obj: O) -> Vec<(ExId, Prop)> { + self.doc.path_to_object(obj) + } } // If a transaction is not commited or rolled back manually then it can leave the document in an diff --git a/automerge/src/transaction/transactable.rs b/automerge/src/transaction/transactable.rs index 1c64e664..e07b41cc 100644 --- a/automerge/src/transaction/transactable.rs +++ b/automerge/src/transaction/transactable.rs @@ -149,4 +149,6 @@ pub trait Transactable { /// Get the object id of the object that contains this object and the prop that this object is /// at in that object. fn parent_object>(&self, obj: O) -> Option<(ExId, Prop)>; + + fn path_to_object>(&self, obj: O) -> Vec<(ExId, Prop)>; } From 6d9ed5cde456663be55d112138f88c80383c4b8f Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Thu, 7 Apr 2022 14:17:16 -0400 Subject: [PATCH 252/730] start at 0.0.1 --- automerge-wasm/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automerge-wasm/package.json b/automerge-wasm/package.json index e574118e..af2bfba9 100644 --- a/automerge-wasm/package.json +++ b/automerge-wasm/package.json @@ -6,7 +6,7 @@ ], "name": "automerge-wasm", "description": "wasm-bindgen bindings to the automerge rust implementation", - "version": "0.0.24", + "version": "0.0.1", "license": "MIT", "files": [ "README.md", From 9f3ae61b9167f677cf1dcfa414a0b689e25c4cc4 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Thu, 7 Apr 2022 14:24:12 -0400 Subject: [PATCH 253/730] use doc.text() in js toString() --- automerge-js/src/proxies.js | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/automerge-js/src/proxies.js b/automerge-js/src/proxies.js index 1086bcb0..65e6ae3e 100644 --- a/automerge-js/src/proxies.js +++ b/automerge-js/src/proxies.js @@ -574,7 +574,7 @@ function listMethods(target) { } function textMethods(target) { - const {context, objectId, path, readonly, frozen} = target + const {context, objectId, path, readonly, frozen, heads } = target const methods = { set (index, value) { return this[index] = value @@ -583,13 +583,7 @@ function textMethods(target) { return this[index] }, toString () { - let str = '' - let length = this.length - for (let i = 0; i < length; i++) { - const value = this.get(i) - if (typeof value === 'string') str += value - } - return str + return context.text(objectId, heads).replace(//g,'') }, toSpans () { let spans = [] From 37d90c5b8e80176cdf8826ca968502da933960a3 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Thu, 7 Apr 2022 14:43:56 -0400 Subject: [PATCH 254/730] optimize fork_at --- automerge-wasm/test/test.ts | 6 ++---- automerge/src/automerge.rs | 8 +++++++- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts index 30c830f9..d4d0e075 100644 --- a/automerge-wasm/test/test.ts +++ b/automerge-wasm/test/test.ts @@ -466,10 +466,8 @@ describe('Automerge', () => { A.merge(B) let heads2 = A.getHeads(); A.put("/", "key5","val5"); - let C = A.forkAt(heads1) - let D = A.forkAt(heads2) - assert.deepEqual(C.materialize("/"), A.materialize("/",heads1)) - assert.deepEqual(D.materialize("/"), A.materialize("/",heads2)) + assert.deepEqual(A.forkAt(heads1).materialize("/"), A.materialize("/",heads1)) + assert.deepEqual(A.forkAt(heads2).materialize("/"), A.materialize("/",heads2)) }) it('should handle merging text conflicts then saving & loading', () => { diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index f9356399..da2960c7 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -200,13 +200,19 @@ impl Automerge { /// Fork this document at the give heads pub fn fork_at(&self, heads: &[ChangeHash]) -> Result { + let mut seen = heads.iter().cloned().collect::>(); let mut heads = heads.to_vec(); let mut changes = vec![]; while let Some(hash) = heads.pop() { if let Some(idx) = self.history_index.get(&hash) { let change = &self.history[*idx]; - heads.extend(&change.deps); + for dep in &change.deps { + if !seen.contains(dep) { + heads.push(*dep); + } + } changes.push(change); + seen.insert(hash); } else { return Err(AutomergeError::InvalidHash(hash)); } From 9ca47924246eabe4065e7689c8dd400d6cdbb9f6 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Thu, 7 Apr 2022 14:53:14 -0400 Subject: [PATCH 255/730] fmt --- automerge/src/automerge.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index da2960c7..8e7fb7d9 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -207,9 +207,9 @@ impl Automerge { if let Some(idx) = self.history_index.get(&hash) { let change = &self.history[*idx]; for dep in &change.deps { - if !seen.contains(dep) { - heads.push(*dep); - } + if !seen.contains(dep) { + heads.push(*dep); + } } changes.push(change); seen.insert(hash); From 842797f3aa753a8028948cbea8a3f6c04b8d9715 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 7 Apr 2022 12:23:47 +0100 Subject: [PATCH 256/730] Use Unicode Scalars instead of graphemes in text --- automerge-wasm/Cargo.toml | 1 - automerge-wasm/src/interop.rs | 9 ++++----- automerge/Cargo.toml | 1 - automerge/src/transaction/transactable.rs | 4 +--- 4 files changed, 5 insertions(+), 10 deletions(-) diff --git a/automerge-wasm/Cargo.toml b/automerge-wasm/Cargo.toml index 81c5a2a3..a9fe8f34 100644 --- a/automerge-wasm/Cargo.toml +++ b/automerge-wasm/Cargo.toml @@ -30,7 +30,6 @@ getrandom = { version = "^0.2.2", features=["js"] } uuid = { version = "^0.8.2", features=["v4", "wasm-bindgen", "serde"] } serde-wasm-bindgen = "0.1.3" serde_bytes = "0.11.5" -unicode-segmentation = "1.7.1" hex = "^0.4.3" regex = "^1.5" diff --git a/automerge-wasm/src/interop.rs b/automerge-wasm/src/interop.rs index 69dd38f7..fc895e6f 100644 --- a/automerge-wasm/src/interop.rs +++ b/automerge-wasm/src/interop.rs @@ -4,7 +4,6 @@ use automerge::{Change, ChangeHash, Prop}; use js_sys::{Array, Object, Reflect, Uint8Array}; use std::collections::HashSet; use std::fmt::Display; -use unicode_segmentation::UnicodeSegmentation; use wasm_bindgen::prelude::*; use wasm_bindgen::JsCast; @@ -286,9 +285,9 @@ pub(crate) fn to_objtype( Some("text") => { let text = value.as_string()?; let text = text - .graphemes(true) + .chars() .enumerate() - .map(|(i, ch)| (i.into(), ch.into())) + .map(|(i, ch)| (i.into(), ch.to_string().into())) .collect(); Some((am::ObjType::Text, text)) } @@ -311,9 +310,9 @@ pub(crate) fn to_objtype( Some((am::ObjType::Map, map)) } else if let Some(text) = value.as_string() { let text = text - .graphemes(true) + .chars() .enumerate() - .map(|(i, ch)| (i.into(), ch.into())) + .map(|(i, ch)| (i.into(), ch.to_string().into())) .collect(); Some((am::ObjType::Text, text)) } else { diff --git a/automerge/Cargo.toml b/automerge/Cargo.toml index 03f7d9c6..c6f86e65 100644 --- a/automerge/Cargo.toml +++ b/automerge/Cargo.toml @@ -24,7 +24,6 @@ smol_str = "^0.1.21" tracing = { version = "^0.1.29", features = ["log"] } fxhash = "^0.2.1" tinyvec = { version = "^1.5.1", features = ["alloc"] } -unicode-segmentation = "1.7.1" serde = { version = "^1.0", features=["derive"] } dot = { version = "0.1.4", optional = true } js-sys = { version = "^0.3", optional = true } diff --git a/automerge/src/transaction/transactable.rs b/automerge/src/transaction/transactable.rs index e07b41cc..dd966939 100644 --- a/automerge/src/transaction/transactable.rs +++ b/automerge/src/transaction/transactable.rs @@ -1,6 +1,5 @@ use crate::exid::ExId; use crate::{AutomergeError, ChangeHash, Keys, KeysAt, ObjType, Prop, ScalarValue, Value}; -use unicode_segmentation::UnicodeSegmentation; /// A way of mutating a document within a single change. pub trait Transactable { @@ -88,8 +87,7 @@ pub trait Transactable { del: usize, text: &str, ) -> Result<(), AutomergeError> { - let text = text.to_owned(); - let vals = text.graphemes(true).map(|c| c.into()); + let vals = text.chars().map(|c| c.into()); self.splice(obj, pos, del, vals) } From e4e9e9a691ab04a91d9bfab830865f2cb2de3213 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 7 Apr 2022 12:35:15 +0100 Subject: [PATCH 257/730] Add tests for inserting into text This ensures that we can still insert entire graphemes (small strings) and break them into chars automatically. --- automerge/src/automerge.rs | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 72b53b91..76f3ecdb 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -1733,4 +1733,32 @@ mod tests { ] ); } + + #[test] + fn can_insert_a_grapheme_into_text() { + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + let text = tx.put_object(ROOT, "text", ObjType::Text).unwrap(); + let polar_bear = "🐻‍❄️"; + tx.insert(&text, 0, polar_bear).unwrap(); + tx.commit(); + let s = doc.text(&text).unwrap(); + assert_eq!(s, polar_bear); + let len = doc.length(&text); + assert_eq!(len, 1); // just one grapheme + } + + #[test] + fn splice_text_uses_unicode_scalars() { + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + let text = tx.put_object(ROOT, "text", ObjType::Text).unwrap(); + let polar_bear = "🐻‍❄️"; + tx.splice_text(&text, 0, 0, polar_bear).unwrap(); + tx.commit(); + let s = doc.text(&text).unwrap(); + assert_eq!(s, polar_bear); + let len = doc.length(&text); + assert_eq!(len, 4); // 4 chars + } } From 80ce447d72c51a4949b74e421ee8d30dc538c7a3 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 7 Apr 2022 12:48:22 +0100 Subject: [PATCH 258/730] Add conversion from &String for Value and ScalarValue --- automerge-cli/src/import.rs | 4 ++-- automerge/src/value.rs | 12 ++++++++++++ 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/automerge-cli/src/import.rs b/automerge-cli/src/import.rs index ecc184f4..a9556071 100644 --- a/automerge-cli/src/import.rs +++ b/automerge-cli/src/import.rs @@ -28,7 +28,7 @@ fn import_map( doc.put(obj, key, *b)?; } serde_json::Value::String(s) => { - doc.put(obj, key, s.as_ref())?; + doc.put(obj, key, s)?; } serde_json::Value::Array(vec) => { let id = doc.put_object(obj, key, am::ObjType::List)?; @@ -68,7 +68,7 @@ fn import_list( doc.insert(obj, i, *b)?; } serde_json::Value::String(s) => { - doc.insert(obj, i, s.as_ref())?; + doc.insert(obj, i, s)?; } serde_json::Value::Array(vec) => { let id = doc.insert_object(obj, i, am::ObjType::List)?; diff --git a/automerge/src/value.rs b/automerge/src/value.rs index ff32ddca..f378088e 100644 --- a/automerge/src/value.rs +++ b/automerge/src/value.rs @@ -246,6 +246,12 @@ impl<'a> From<&str> for Value<'a> { } } +impl<'a> From<&String> for Value<'a> { + fn from(s: &String) -> Self { + Value::Scalar(Cow::Owned(ScalarValue::Str(s.into()))) + } +} + impl<'a> From for Value<'a> { fn from(s: String) -> Self { Value::Scalar(Cow::Owned(ScalarValue::Str(s.into()))) @@ -626,6 +632,12 @@ impl From<&str> for ScalarValue { } } +impl From<&String> for ScalarValue { + fn from(s: &String) -> Self { + ScalarValue::Str(s.into()) + } +} + impl From for ScalarValue { fn from(s: String) -> Self { ScalarValue::Str(s.into()) From 1bbcd4c151d16d7094ed0229612ae6dceb8d66bf Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 7 Apr 2022 12:48:35 +0100 Subject: [PATCH 259/730] Test that we can insert long strings into text --- automerge/src/automerge.rs | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 76f3ecdb..0b457245 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -1748,6 +1748,21 @@ mod tests { assert_eq!(len, 1); // just one grapheme } + #[test] + fn can_insert_long_string_into_text() { + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + let text = tx.put_object(ROOT, "text", ObjType::Text).unwrap(); + let polar_bear = "🐻‍❄️"; + let polar_bear_army = polar_bear.repeat(100); + tx.insert(&text, 0, &polar_bear_army).unwrap(); + tx.commit(); + let s = doc.text(&text).unwrap(); + assert_eq!(s, polar_bear_army); + let len = doc.length(&text); + assert_eq!(len, 1); // many graphemes + } + #[test] fn splice_text_uses_unicode_scalars() { let mut doc = Automerge::new(); From 94a122478de719324a6ec8565ecb32d9209b0dfd Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 8 Apr 2022 10:13:52 +0100 Subject: [PATCH 260/730] Add object replacement character in text_at --- automerge/src/automerge.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 72b53b91..a0823c9a 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -448,6 +448,8 @@ impl Automerge { for q in &query.ops { if let OpType::Put(ScalarValue::Str(s)) = &q.action { buffer.push_str(s); + } else { + buffer.push('\u{fffc}'); } } Ok(buffer) From 4406a5b20869ab8fe4abdc8aeea747938adb9a81 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 1 Apr 2022 10:09:26 +0100 Subject: [PATCH 261/730] Add range query This is a way of efficiently getting just the keys and values in a range. --- automerge/src/automerge.rs | 270 ++++++++++++++++++++++++++++++++- automerge/src/lib.rs | 2 + automerge/src/op_set.rs | 11 +- automerge/src/op_tree.rs | 13 +- automerge/src/query.rs | 2 + automerge/src/query/keys.rs | 13 +- automerge/src/query/keys_at.rs | 12 +- automerge/src/query/range.rs | 56 +++++++ automerge/src/range.rs | 26 ++++ automerge/src/types.rs | 8 +- 10 files changed, 396 insertions(+), 17 deletions(-) create mode 100644 automerge/src/query/range.rs create mode 100644 automerge/src/range.rs diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 41c82e8a..1711b908 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -1,10 +1,12 @@ use std::collections::{HashMap, HashSet, VecDeque}; use std::num::NonZeroU64; +use std::ops::RangeBounds; use crate::change::encode_document; use crate::exid::ExId; use crate::keys::Keys; use crate::op_set::OpSet; +use crate::range::Range; use crate::transaction::{self, CommitOptions, Failure, Success, Transaction, TransactionInner}; use crate::types::{ ActorId, AssignPatch, ChangeHash, Clock, ElemId, Export, Exportable, Key, ObjId, Op, OpId, @@ -353,6 +355,19 @@ impl Automerge { } } + /// Iterate over the keys and values of the object `obj`. + /// + /// For a map the keys are the keys of the map. + /// For a list the keys are the element ids (opids) encoded as strings. + pub fn range, R: RangeBounds>(&self, obj: O, range: R) -> Range { + if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { + let iter_range = self.ops.range(obj, range); + Range::new(self, iter_range) + } else { + Range::new(self, None) + } + } + /// Get the length of the given object. pub fn length>(&self, obj: O) -> usize { if let Ok(inner_obj) = self.exid_to_obj(obj.as_ref()) { @@ -1358,7 +1373,7 @@ mod tests { } #[test] - fn keys_iter() { + fn keys_iter_map() { let mut doc = Automerge::new(); let mut tx = doc.transaction(); tx.put(ROOT, "a", 3).unwrap(); @@ -1406,6 +1421,259 @@ mod tests { assert_eq!(keys.collect::>(), vec!["a", "b", "c", "d"]); } + #[test] + fn keys_iter_seq() { + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + let list = tx.set_object(ROOT, "list", ObjType::List).unwrap(); + tx.insert(&list, 0, 3).unwrap(); + tx.insert(&list, 1, 4).unwrap(); + tx.insert(&list, 2, 5).unwrap(); + tx.insert(&list, 3, 6).unwrap(); + tx.commit(); + let mut tx = doc.transaction(); + tx.set(&list, 0, 7).unwrap(); + tx.commit(); + let mut tx = doc.transaction(); + tx.set(&list, 0, 8).unwrap(); + tx.set(&list, 3, 9).unwrap(); + tx.commit(); + let actor = doc.get_actor(); + assert_eq!(doc.keys(&list).count(), 4); + + let mut keys = doc.keys(&list); + assert_eq!(keys.next(), Some(format!("2@{}", actor))); + assert_eq!(keys.next(), Some(format!("3@{}", actor))); + assert_eq!(keys.next(), Some(format!("4@{}", actor))); + assert_eq!(keys.next(), Some(format!("5@{}", actor))); + assert_eq!(keys.next(), None); + + let mut keys = doc.keys(&list); + assert_eq!(keys.next_back(), Some(format!("5@{}", actor))); + assert_eq!(keys.next_back(), Some(format!("4@{}", actor))); + assert_eq!(keys.next_back(), Some(format!("3@{}", actor))); + assert_eq!(keys.next_back(), Some(format!("2@{}", actor))); + assert_eq!(keys.next_back(), None); + + let mut keys = doc.keys(&list); + assert_eq!(keys.next(), Some(format!("2@{}", actor))); + assert_eq!(keys.next_back(), Some(format!("5@{}", actor))); + assert_eq!(keys.next_back(), Some(format!("4@{}", actor))); + assert_eq!(keys.next_back(), Some(format!("3@{}", actor))); + assert_eq!(keys.next_back(), None); + + let mut keys = doc.keys(&list); + assert_eq!(keys.next_back(), Some(format!("5@{}", actor))); + assert_eq!(keys.next(), Some(format!("2@{}", actor))); + assert_eq!(keys.next(), Some(format!("3@{}", actor))); + assert_eq!(keys.next(), Some(format!("4@{}", actor))); + assert_eq!(keys.next(), None); + + let keys = doc.keys(&list); + assert_eq!( + keys.collect::>(), + vec![ + format!("2@{}", actor), + format!("3@{}", actor), + format!("4@{}", actor), + format!("5@{}", actor) + ] + ); + } + + #[test] + fn range_iter_map() { + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + tx.set(ROOT, "a", 3).unwrap(); + tx.set(ROOT, "b", 4).unwrap(); + tx.set(ROOT, "c", 5).unwrap(); + tx.set(ROOT, "d", 6).unwrap(); + tx.commit(); + let mut tx = doc.transaction(); + tx.set(ROOT, "a", 7).unwrap(); + tx.commit(); + let mut tx = doc.transaction(); + tx.set(ROOT, "a", 8).unwrap(); + tx.set(ROOT, "d", 9).unwrap(); + tx.commit(); + let actor = doc.get_actor(); + assert_eq!(doc.range(ROOT, ..).count(), 4); + + let mut range = doc.range(ROOT, Prop::Map("b".into()).."d".into()); + assert_eq!( + range.next(), + Some(("b".into(), 4.into(), ExId::Id(2, actor.clone(), 0))) + ); + assert_eq!( + range.next(), + Some(("c".into(), 5.into(), ExId::Id(3, actor.clone(), 0))) + ); + assert_eq!(range.next(), None); + + let mut range = doc.range(ROOT, Prop::Map("b".into())..="d".into()); + assert_eq!( + range.next(), + Some(("b".into(), 4.into(), ExId::Id(2, actor.clone(), 0))) + ); + assert_eq!( + range.next(), + Some(("c".into(), 5.into(), ExId::Id(3, actor.clone(), 0))) + ); + assert_eq!( + range.next(), + Some(("d".into(), 9.into(), ExId::Id(7, actor.clone(), 0))) + ); + assert_eq!(range.next(), None); + + let mut range = doc.range(ROOT, ..=Prop::Map("c".into())); + assert_eq!( + range.next(), + Some(("a".into(), 8.into(), ExId::Id(6, actor.clone(), 0))) + ); + assert_eq!( + range.next(), + Some(("b".into(), 4.into(), ExId::Id(2, actor.clone(), 0))) + ); + assert_eq!( + range.next(), + Some(("c".into(), 5.into(), ExId::Id(3, actor.clone(), 0))) + ); + assert_eq!(range.next(), None); + + let range = doc.range(ROOT, Prop::Map("a".into())..); + assert_eq!( + range.collect::>(), + vec![ + ("a".into(), 8.into(), ExId::Id(6, actor.clone(), 0)), + ("b".into(), 4.into(), ExId::Id(2, actor.clone(), 0)), + ("c".into(), 5.into(), ExId::Id(3, actor.clone(), 0)), + ("d".into(), 9.into(), ExId::Id(7, actor.clone(), 0)), + ] + ); + } + + #[test] + fn range_iter_seq() { + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + let list = tx.set_object(ROOT, "list", ObjType::List).unwrap(); + tx.insert(&list, 0, 3).unwrap(); + tx.insert(&list, 1, 4).unwrap(); + tx.insert(&list, 2, 5).unwrap(); + tx.insert(&list, 3, 6).unwrap(); + tx.commit(); + let mut tx = doc.transaction(); + tx.set(&list, 0, 7).unwrap(); + tx.commit(); + let mut tx = doc.transaction(); + tx.set(&list, 0, 8).unwrap(); + tx.set(&list, 3, 9).unwrap(); + tx.commit(); + let actor = doc.get_actor(); + assert_eq!(doc.range(&list, ..).count(), 4); + + let mut range = doc.range(&list, Prop::Seq(1)..3.into()); + assert_eq!( + range.next(), + Some(( + format!("3@{}", actor), + 4.into(), + ExId::Id(3, actor.clone(), 0) + )) + ); + assert_eq!( + range.next(), + Some(( + format!("4@{}", actor), + 5.into(), + ExId::Id(4, actor.clone(), 0) + )) + ); + assert_eq!(range.next(), None); + + let mut range = doc.range(&list, Prop::Seq(1)..=3.into()); + assert_eq!( + range.next(), + Some(( + format!("3@{}", actor), + 4.into(), + ExId::Id(3, actor.clone(), 0) + )) + ); + assert_eq!( + range.next(), + Some(( + format!("4@{}", actor), + 5.into(), + ExId::Id(4, actor.clone(), 0) + )) + ); + assert_eq!( + range.next(), + Some(( + format!("5@{}", actor), + 9.into(), + ExId::Id(8, actor.clone(), 0) + )) + ); + assert_eq!(range.next(), None); + + let mut range = doc.range(&list, ..Prop::Seq(3)); + assert_eq!( + range.next(), + Some(( + format!("2@{}", actor), + 8.into(), + ExId::Id(7, actor.clone(), 0) + )) + ); + assert_eq!( + range.next(), + Some(( + format!("3@{}", actor), + 4.into(), + ExId::Id(3, actor.clone(), 0) + )) + ); + assert_eq!( + range.next(), + Some(( + format!("4@{}", actor), + 5.into(), + ExId::Id(4, actor.clone(), 0) + )) + ); + assert_eq!(range.next(), None); + + let range = doc.range(&list, ..); + assert_eq!( + range.collect::>(), + vec![ + ( + format!("2@{}", actor), + 8.into(), + ExId::Id(7, actor.clone(), 0) + ), + ( + format!("3@{}", actor), + 4.into(), + ExId::Id(3, actor.clone(), 0) + ), + ( + format!("4@{}", actor), + 5.into(), + ExId::Id(4, actor.clone(), 0) + ), + ( + format!("5@{}", actor), + 9.into(), + ExId::Id(8, actor.clone(), 0) + ), + ] + ); + } + #[test] fn rolling_back_transaction_has_no_effect() { let mut doc = Automerge::new(); diff --git a/automerge/src/lib.rs b/automerge/src/lib.rs index 6b605f3b..28eb91e0 100644 --- a/automerge/src/lib.rs +++ b/automerge/src/lib.rs @@ -40,6 +40,7 @@ mod legacy; mod op_set; mod op_tree; mod query; +mod range; pub mod sync; pub mod transaction; mod types; @@ -58,6 +59,7 @@ pub use exid::ExId as ObjId; pub use keys::Keys; pub use keys_at::KeysAt; pub use legacy::Change as ExpandedChange; +pub use range::Range; pub use types::{ActorId, AssignPatch, ChangeHash, ObjType, OpType, Patch, Prop}; pub use value::{ScalarValue, Value}; diff --git a/automerge/src/op_set.rs b/automerge/src/op_set.rs index 283a4866..a6bcab9e 100644 --- a/automerge/src/op_set.rs +++ b/automerge/src/op_set.rs @@ -3,10 +3,11 @@ use crate::indexed_cache::IndexedCache; use crate::op_tree::OpTree; use crate::query::{self, OpIdSearch, TreeQuery}; use crate::types::{ActorId, Key, ObjId, Op, OpId, OpType}; -use crate::ObjType; +use crate::{ObjType, Prop}; use fxhash::FxBuildHasher; use std::cmp::Ordering; use std::collections::HashMap; +use std::ops::RangeBounds; pub(crate) type OpSet = OpSetInternal; @@ -67,6 +68,14 @@ impl OpSetInternal { } } + pub fn range>(&self, obj: ObjId, range: R) -> Option> { + if let Some((_typ, tree)) = self.trees.get(&obj) { + tree.range(range, &self.m) + } else { + None + } + } + pub fn search<'a, 'b: 'a, Q>(&'b self, obj: &ObjId, query: Q) -> Q where Q: TreeQuery<'a>, diff --git a/automerge/src/op_tree.rs b/automerge/src/op_tree.rs index 5d08df38..a6d52ef8 100644 --- a/automerge/src/op_tree.rs +++ b/automerge/src/op_tree.rs @@ -2,6 +2,7 @@ use std::{ cmp::{min, Ordering}, fmt::Debug, mem, + ops::RangeBounds, }; pub(crate) use crate::op_set::OpSetMetadata; @@ -11,7 +12,7 @@ use crate::{ }; use crate::{ types::{ObjId, Op, OpId}, - ObjType, + ObjType, Prop, }; use std::collections::HashSet; @@ -69,6 +70,16 @@ impl OpTreeInternal { .map(|root| query::KeysAt::new(root, clock)) } + pub fn range<'a, R: RangeBounds>( + &'a self, + range: R, + meta: &'a OpSetMetadata, + ) -> Option> { + self.root_node + .as_ref() + .map(|node| query::Range::new(range, node, meta)) + } + pub fn search<'a, 'b: 'a, Q>(&'b self, mut query: Q, m: &OpSetMetadata) -> Q where Q: TreeQuery<'a>, diff --git a/automerge/src/query.rs b/automerge/src/query.rs index adb77566..815e1299 100644 --- a/automerge/src/query.rs +++ b/automerge/src/query.rs @@ -18,6 +18,7 @@ mod nth_at; mod opid; mod prop; mod prop_at; +mod range; mod seek_op; mod seek_op_with_patch; @@ -34,6 +35,7 @@ pub(crate) use nth_at::NthAt; pub(crate) use opid::OpIdSearch; pub(crate) use prop::Prop; pub(crate) use prop_at::PropAt; +pub(crate) use range::Range; pub(crate) use seek_op::SeekOp; pub(crate) use seek_op_with_patch::SeekOpWithPatch; diff --git a/automerge/src/query/keys.rs b/automerge/src/query/keys.rs index f9e25727..e3224704 100644 --- a/automerge/src/query/keys.rs +++ b/automerge/src/query/keys.rs @@ -30,9 +30,9 @@ impl<'a> Iterator for Keys<'a> { for i in self.index..self.index_back { let op = self.root_child.get(i)?; self.index += 1; - if Some(op.key) != self.last_key && op.visible() { - self.last_key = Some(op.key); - return Some(op.key); + if Some(op.elemid_or_key()) != self.last_key && op.visible() { + self.last_key = Some(op.elemid_or_key()); + return Some(op.elemid_or_key()); } } None @@ -43,10 +43,11 @@ impl<'a> DoubleEndedIterator for Keys<'a> { fn next_back(&mut self) -> Option { for i in (self.index..self.index_back).rev() { let op = self.root_child.get(i)?; + println!("{} {:?}", i, op); self.index_back -= 1; - if Some(op.key) != self.last_key_back && op.visible() { - self.last_key_back = Some(op.key); - return Some(op.key); + if Some(op.elemid_or_key()) != self.last_key_back && op.visible() { + self.last_key_back = Some(op.elemid_or_key()); + return Some(op.elemid_or_key()); } } None diff --git a/automerge/src/query/keys_at.rs b/automerge/src/query/keys_at.rs index 2fe04747..c55282b2 100644 --- a/automerge/src/query/keys_at.rs +++ b/automerge/src/query/keys_at.rs @@ -36,9 +36,9 @@ impl<'a> Iterator for KeysAt<'a> { let op = self.root_child.get(i)?; let visible = self.window.visible_at(op, i, &self.clock); self.index += 1; - if Some(op.key) != self.last_key && visible { - self.last_key = Some(op.key); - return Some(op.key); + if Some(op.elemid_or_key()) != self.last_key && visible { + self.last_key = Some(op.elemid_or_key()); + return Some(op.elemid_or_key()); } } None @@ -51,9 +51,9 @@ impl<'a> DoubleEndedIterator for KeysAt<'a> { let op = self.root_child.get(i)?; let visible = self.window.visible_at(op, i, &self.clock); self.index_back -= 1; - if Some(op.key) != self.last_key_back && visible { - self.last_key_back = Some(op.key); - return Some(op.key); + if Some(op.elemid_or_key()) != self.last_key_back && visible { + self.last_key_back = Some(op.elemid_or_key()); + return Some(op.elemid_or_key()); } } None diff --git a/automerge/src/query/range.rs b/automerge/src/query/range.rs new file mode 100644 index 00000000..f3d0835d --- /dev/null +++ b/automerge/src/query/range.rs @@ -0,0 +1,56 @@ +use crate::op_tree::{OpSetMetadata, OpTreeNode}; +use crate::types::{Key, OpId}; +use crate::{Prop, Value}; +use std::fmt::Debug; +use std::ops::RangeBounds; + +#[derive(Debug)] +pub(crate) struct Range<'a, R: RangeBounds> { + range: R, + index: usize, + /// number of visible elements seen. + seen: usize, + last_key: Option, + root_child: &'a OpTreeNode, + meta: &'a OpSetMetadata, +} + +impl<'a, R: RangeBounds> Range<'a, R> { + pub(crate) fn new(range: R, root_child: &'a OpTreeNode, meta: &'a OpSetMetadata) -> Self { + Self { + range, + index: 0, + seen: 0, + last_key: None, + root_child, + meta, + } + } +} + +impl<'a, 'm, R: RangeBounds> Iterator for Range<'a, R> { + type Item = (Key, Value, OpId); + + fn next(&mut self) -> Option { + for i in self.index..self.root_child.len() { + let op = self.root_child.get(i)?; + println!("{} {:?}", self.index, op); + self.index += 1; + if Some(op.elemid_or_key()) != self.last_key && op.visible() { + self.last_key = Some(op.elemid_or_key()); + let contains = match op.key { + Key::Map(m) => self + .range + .contains(&Prop::Map(self.meta.props.get(m).clone())), + Key::Seq(_) => self.range.contains(&Prop::Seq(self.seen)), + }; + println!("{} {}", self.seen, contains); + self.seen += 1; + if contains { + return Some((op.elemid_or_key(), op.value(), op.id)); + } + } + } + None + } +} diff --git a/automerge/src/range.rs b/automerge/src/range.rs new file mode 100644 index 00000000..94322794 --- /dev/null +++ b/automerge/src/range.rs @@ -0,0 +1,26 @@ +use crate::{exid::ExId, Value}; +use std::ops::RangeBounds; + +use crate::{query, Automerge, Prop}; + +pub struct Range<'a, 'k, R: RangeBounds> { + range: Option>, + doc: &'a Automerge, +} + +impl<'a, 'k, 'm, R: RangeBounds> Range<'a, 'k, R> { + pub(crate) fn new(doc: &'a Automerge, range: Option>) -> Self { + Self { range, doc } + } +} + +impl<'a, 'k, 'm, R: RangeBounds> Iterator for Range<'a, 'k, R> { + type Item = (String, Value, ExId); + + fn next(&mut self) -> Option { + self.range + .as_mut()? + .next() + .map(|(key, value, id)| (self.doc.to_string(key), value, self.doc.id_to_exid(id))) + } +} diff --git a/automerge/src/types.rs b/automerge/src/types.rs index 2221a9eb..ee3b2e1b 100644 --- a/automerge/src/types.rs +++ b/automerge/src/types.rs @@ -449,10 +449,14 @@ impl Op { } pub fn elemid(&self) -> Option { + self.elemid_or_key().elemid() + } + + pub fn elemid_or_key(&self) -> Key { if self.insert { - Some(ElemId(self.id)) + Key::Seq(ElemId(self.id)) } else { - self.key.elemid() + self.key } } From 1ca49cfa9b7f8455f6bb316972b97664efaca520 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 1 Apr 2022 10:20:26 +0100 Subject: [PATCH 262/730] Add range to transactable and rename value to get Also changes values to get_conflicts for more clarity on what it does and opening up the name for iterating over values. --- automerge-cli/src/export.rs | 4 +- automerge-wasm/src/interop.rs | 4 +- automerge-wasm/src/lib.rs | 12 +- automerge/src/autocommit.rs | 24 ++-- automerge/src/automerge.rs | 122 +++++++++--------- automerge/src/transaction/inner.rs | 2 +- .../src/transaction/manual_transaction.rs | 24 ++-- automerge/src/transaction/transactable.rs | 14 +- automerge/tests/helpers/mod.rs | 4 +- automerge/tests/test.rs | 15 ++- 10 files changed, 122 insertions(+), 103 deletions(-) diff --git a/automerge-cli/src/export.rs b/automerge-cli/src/export.rs index 7b0be98e..937ba794 100644 --- a/automerge-cli/src/export.rs +++ b/automerge-cli/src/export.rs @@ -5,7 +5,7 @@ pub(crate) fn map_to_json(doc: &am::Automerge, obj: &am::ObjId) -> serde_json::V let keys = doc.keys(obj); let mut map = serde_json::Map::new(); for k in keys { - let val = doc.value(obj, &k); + let val = doc.get(obj, &k); match val { Ok(Some((am::Value::Object(o), exid))) if o == am::ObjType::Map || o == am::ObjType::Table => @@ -28,7 +28,7 @@ fn list_to_json(doc: &am::Automerge, obj: &am::ObjId) -> serde_json::Value { let len = doc.length(obj); let mut array = Vec::new(); for i in 0..len { - let val = doc.value(obj, i as usize); + let val = doc.get(obj, i as usize); match val { Ok(Some((am::Value::Object(o), exid))) if o == am::ObjType::Map || o == am::ObjType::Table => diff --git a/automerge-wasm/src/interop.rs b/automerge-wasm/src/interop.rs index fc895e6f..5e6c2e65 100644 --- a/automerge-wasm/src/interop.rs +++ b/automerge-wasm/src/interop.rs @@ -332,7 +332,7 @@ pub(crate) fn map_to_js(doc: &am::AutoCommit, obj: &ObjId) -> JsValue { let keys = doc.keys(obj); let map = Object::new(); for k in keys { - let val = doc.value(obj, &k); + let val = doc.get(obj, &k); match val { Ok(Some((Value::Object(o), exid))) if o == am::ObjType::Map || o == am::ObjType::Table => @@ -358,7 +358,7 @@ pub(crate) fn list_to_js(doc: &am::AutoCommit, obj: &ObjId) -> JsValue { let len = doc.length(obj); let array = Array::new(); for i in 0..len { - let val = doc.value(obj, i as usize); + let val = doc.get(obj, i as usize); match val { Ok(Some((Value::Object(o), exid))) if o == am::ObjType::Map || o == am::ObjType::Table => diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 2a153c37..300f6b74 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -287,9 +287,9 @@ impl Automerge { let heads = get_heads(heads); if let Ok(prop) = prop { let value = if let Some(h) = heads { - self.0.value_at(&obj, prop, &h)? + self.0.get_at(&obj, prop, &h)? } else { - self.0.value(&obj, prop)? + self.0.get(&obj, prop)? }; match value { Some((Value::Object(obj_type), obj_id)) => { @@ -320,9 +320,9 @@ impl Automerge { let prop = to_prop(arg); if let Ok(prop) = prop { let values = if let Some(heads) = get_heads(heads) { - self.0.values_at(&obj, prop, &heads) + self.0.get_conflicts_at(&obj, prop, &heads) } else { - self.0.values(&obj, prop) + self.0.get_conflicts(&obj, prop) } .map_err(to_js_err)?; for value in values { @@ -573,9 +573,9 @@ impl Automerge { break; } let val = if is_map { - self.0.value(obj, prop)? + self.0.get(obj, prop)? } else { - self.0.value(obj, am::Prop::Seq(prop.parse().unwrap()))? + self.0.get(obj, am::Prop::Seq(prop.parse().unwrap()))? }; match val { Some((am::Value::Object(am::ObjType::Map), id)) => { diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index c9adf118..3955bcfa 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -1,7 +1,9 @@ +use std::ops::RangeBounds; + use crate::exid::ExId; use crate::transaction::{CommitOptions, Transactable}; use crate::types::Patch; -use crate::{sync, Keys, KeysAt, ObjType, ScalarValue}; +use crate::{sync, Keys, KeysAt, ObjType, Range, ScalarValue}; use crate::{ transaction::TransactionInner, ActorId, Automerge, AutomergeError, Change, ChangeHash, Prop, Value, @@ -231,6 +233,10 @@ impl Transactable for AutoCommit { self.doc.keys_at(obj, heads) } + fn range, R: RangeBounds>(&self, obj: O, range: R) -> Range { + self.doc.range(obj, range) + } + fn length>(&self, obj: O) -> usize { self.doc.length(obj) } @@ -355,38 +361,38 @@ impl Transactable for AutoCommit { // TODO - I need to return these OpId's here **only** to get // the legacy conflicts format of { [opid]: value } // Something better? - fn value, P: Into>( + fn get, P: Into>( &self, obj: O, prop: P, ) -> Result, AutomergeError> { - self.doc.value(obj, prop) + self.doc.get(obj, prop) } - fn value_at, P: Into>( + fn get_at, P: Into>( &self, obj: O, prop: P, heads: &[ChangeHash], ) -> Result, AutomergeError> { - self.doc.value_at(obj, prop, heads) + self.doc.get_at(obj, prop, heads) } - fn values, P: Into>( + fn get_conflicts, P: Into>( &self, obj: O, prop: P, ) -> Result, AutomergeError> { - self.doc.values(obj, prop) + self.doc.get_conflicts(obj, prop) } - fn values_at, P: Into>( + fn get_conflicts_at, P: Into>( &self, obj: O, prop: P, heads: &[ChangeHash], ) -> Result, AutomergeError> { - self.doc.values_at(obj, prop, heads) + self.doc.get_conflicts_at(obj, prop, heads) } fn parent_object>(&self, obj: O) -> Option<(ExId, Prop)> { diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 1711b908..4fccbf2e 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -477,29 +477,29 @@ impl Automerge { /// /// Returns both the value and the id of the operation that created it, useful for handling /// conflicts and serves as the object id if the value is an object. - pub fn value, P: Into>( + pub fn get, P: Into>( &self, obj: O, prop: P, ) -> Result, AutomergeError> { - Ok(self.values(obj, prop.into())?.last().cloned()) + Ok(self.get_conflicts(obj, prop.into())?.last().cloned()) } - /// Historical version of [`value`](Self::value). - pub fn value_at, P: Into>( + /// Historical version of [`get`](Self::get). + pub fn get_at, P: Into>( &self, obj: O, prop: P, heads: &[ChangeHash], ) -> Result, AutomergeError> { - Ok(self.values_at(obj, prop, heads)?.last().cloned()) + Ok(self.get_conflicts_at(obj, prop, heads)?.last().cloned()) } - /// Get all values out of the document at this prop that conflict. + /// Get all conflicting values out of the document at this prop that conflict. /// /// Returns both the value and the id of the operation that created it, useful for handling /// conflicts and serves as the object id if the value is an object. - pub fn values, P: Into>( + pub fn get_conflicts, P: Into>( &self, obj: O, prop: P, @@ -530,8 +530,8 @@ impl Automerge { Ok(result) } - /// Historical version of [`values`](Self::values). - pub fn values_at, P: Into>( + /// Historical version of [`get_conflicts`](Self::get_conflicts). + pub fn get_conflicts_at, P: Into>( &self, obj: O, prop: P, @@ -1086,7 +1086,7 @@ mod tests { doc.set_actor(ActorId::random()); let mut tx = doc.transaction(); tx.put(ROOT, "hello", "world")?; - tx.value(ROOT, "hello")?; + tx.get(ROOT, "hello")?; tx.commit(); Ok(()) } @@ -1110,8 +1110,8 @@ mod tests { tx.put_object(ROOT, "b", ObjType::Map)?; assert_eq!(tx.pending_ops(), 4); - let map = tx.value(ROOT, "b").unwrap().unwrap().1; - assert_eq!(tx.value(&map, "a")?, None); + let map = tx.get(ROOT, "b").unwrap().unwrap().1; + assert_eq!(tx.get(&map, "a")?, None); tx.commit(); Ok(()) @@ -1124,15 +1124,15 @@ mod tests { let mut tx = doc.transaction(); let list_id = tx.put_object(ROOT, "items", ObjType::List)?; tx.put(ROOT, "zzz", "zzzval")?; - assert!(tx.value(ROOT, "items")?.unwrap().1 == list_id); + assert!(tx.get(ROOT, "items")?.unwrap().1 == list_id); tx.insert(&list_id, 0, "a")?; tx.insert(&list_id, 0, "b")?; tx.insert(&list_id, 2, "c")?; tx.insert(&list_id, 1, "d")?; - assert!(tx.value(&list_id, 0)?.unwrap().0 == "b".into()); - assert!(tx.value(&list_id, 1)?.unwrap().0 == "d".into()); - assert!(tx.value(&list_id, 2)?.unwrap().0 == "a".into()); - assert!(tx.value(&list_id, 3)?.unwrap().0 == "c".into()); + assert!(tx.get(&list_id, 0)?.unwrap().0 == "b".into()); + assert!(tx.get(&list_id, 1)?.unwrap().0 == "d".into()); + assert!(tx.get(&list_id, 2)?.unwrap().0 == "a".into()); + assert!(tx.get(&list_id, 3)?.unwrap().0 == "c".into()); assert!(tx.length(&list_id) == 4); tx.commit(); doc.save(); @@ -1145,9 +1145,9 @@ mod tests { doc.set_actor(ActorId::random()); let mut tx = doc.transaction(); tx.put(ROOT, "xxx", "xxx")?; - assert!(!tx.values(ROOT, "xxx")?.is_empty()); + assert!(!tx.get(ROOT, "xxx")?.is_empty()); tx.delete(ROOT, "xxx")?; - assert!(tx.values(ROOT, "xxx")?.is_empty()); + assert!(tx.get(ROOT, "xxx")?.is_empty()); tx.commit(); Ok(()) } @@ -1157,11 +1157,11 @@ mod tests { let mut doc = Automerge::new(); let mut tx = doc.transaction(); tx.put(ROOT, "counter", ScalarValue::counter(10))?; - assert!(tx.value(ROOT, "counter")?.unwrap().0 == Value::counter(10)); + assert!(tx.get(ROOT, "counter")?.unwrap().0 == Value::counter(10)); tx.increment(ROOT, "counter", 10)?; - assert!(tx.value(ROOT, "counter")?.unwrap().0 == Value::counter(20)); + assert!(tx.get(ROOT, "counter")?.unwrap().0 == Value::counter(20)); tx.increment(ROOT, "counter", -5)?; - assert!(tx.value(ROOT, "counter")?.unwrap().0 == Value::counter(15)); + assert!(tx.get(ROOT, "counter")?.unwrap().0 == Value::counter(15)); tx.commit(); Ok(()) } @@ -1202,7 +1202,7 @@ mod tests { let mut doc_a = Automerge::load(&save_a)?; let mut doc_b = Automerge::load(&save_b)?; - assert!(doc_a.values(ROOT, "baz")? == doc_b.values(ROOT, "baz")?); + assert!(doc_a.get_conflicts(ROOT, "baz")? == doc_b.get_conflicts(ROOT, "baz")?); assert!(doc_a.save() == doc_b.save()); @@ -1264,30 +1264,30 @@ mod tests { let heads5 = doc.get_heads(); assert!(doc.keys_at(ROOT, &heads1).collect_vec() == vec!["prop1".to_owned()]); assert_eq!(doc.length_at(ROOT, &heads1), 1); - assert!(doc.value_at(ROOT, "prop1", &heads1)?.unwrap().0 == Value::str("val1")); - assert!(doc.value_at(ROOT, "prop2", &heads1)? == None); - assert!(doc.value_at(ROOT, "prop3", &heads1)? == None); + assert!(doc.get_at(ROOT, "prop1", &heads1)?.unwrap().0 == Value::str("val1")); + assert!(doc.get_at(ROOT, "prop2", &heads1)? == None); + assert!(doc.get_at(ROOT, "prop3", &heads1)? == None); assert!(doc.keys_at(ROOT, &heads2).collect_vec() == vec!["prop1".to_owned()]); assert_eq!(doc.length_at(ROOT, &heads2), 1); - assert!(doc.value_at(ROOT, "prop1", &heads2)?.unwrap().0 == Value::str("val2")); - assert!(doc.value_at(ROOT, "prop2", &heads2)? == None); - assert!(doc.value_at(ROOT, "prop3", &heads2)? == None); + assert!(doc.get_at(ROOT, "prop1", &heads2)?.unwrap().0 == Value::str("val2")); + assert!(doc.get_at(ROOT, "prop2", &heads2)? == None); + assert!(doc.get_at(ROOT, "prop3", &heads2)? == None); assert!( doc.keys_at(ROOT, &heads3).collect_vec() == vec!["prop1".to_owned(), "prop2".to_owned()] ); assert_eq!(doc.length_at(ROOT, &heads3), 2); - assert!(doc.value_at(ROOT, "prop1", &heads3)?.unwrap().0 == Value::str("val2")); - assert!(doc.value_at(ROOT, "prop2", &heads3)?.unwrap().0 == Value::str("val3")); - assert!(doc.value_at(ROOT, "prop3", &heads3)? == None); + assert!(doc.get_at(ROOT, "prop1", &heads3)?.unwrap().0 == Value::str("val2")); + assert!(doc.get_at(ROOT, "prop2", &heads3)?.unwrap().0 == Value::str("val3")); + assert!(doc.get_at(ROOT, "prop3", &heads3)? == None); assert!(doc.keys_at(ROOT, &heads4).collect_vec() == vec!["prop2".to_owned()]); assert_eq!(doc.length_at(ROOT, &heads4), 1); - assert!(doc.value_at(ROOT, "prop1", &heads4)? == None); - assert!(doc.value_at(ROOT, "prop2", &heads4)?.unwrap().0 == Value::str("val3")); - assert!(doc.value_at(ROOT, "prop3", &heads4)? == None); + assert!(doc.get_at(ROOT, "prop1", &heads4)? == None); + assert!(doc.get_at(ROOT, "prop2", &heads4)?.unwrap().0 == Value::str("val3")); + assert!(doc.get_at(ROOT, "prop3", &heads4)? == None); assert!( doc.keys_at(ROOT, &heads5).collect_vec() @@ -1295,15 +1295,15 @@ mod tests { ); assert_eq!(doc.length_at(ROOT, &heads5), 2); assert_eq!(doc.length(ROOT), 2); - assert!(doc.value_at(ROOT, "prop1", &heads5)? == None); - assert!(doc.value_at(ROOT, "prop2", &heads5)?.unwrap().0 == Value::str("val3")); - assert!(doc.value_at(ROOT, "prop3", &heads5)?.unwrap().0 == Value::str("val4")); + assert!(doc.get_at(ROOT, "prop1", &heads5)? == None); + assert!(doc.get_at(ROOT, "prop2", &heads5)?.unwrap().0 == Value::str("val3")); + assert!(doc.get_at(ROOT, "prop3", &heads5)?.unwrap().0 == Value::str("val4")); assert_eq!(doc.keys_at(ROOT, &[]).count(), 0); assert_eq!(doc.length_at(ROOT, &[]), 0); - assert!(doc.value_at(ROOT, "prop1", &[])? == None); - assert!(doc.value_at(ROOT, "prop2", &[])? == None); - assert!(doc.value_at(ROOT, "prop3", &[])? == None); + assert!(doc.get_at(ROOT, "prop1", &[])? == None); + assert!(doc.get_at(ROOT, "prop2", &[])? == None); + assert!(doc.get_at(ROOT, "prop3", &[])? == None); Ok(()) } @@ -1345,29 +1345,29 @@ mod tests { let heads6 = doc.get_heads(); assert!(doc.length_at(&list, &heads1) == 0); - assert!(doc.value_at(&list, 0, &heads1)?.is_none()); + assert!(doc.get_at(&list, 0, &heads1)?.is_none()); assert!(doc.length_at(&list, &heads2) == 1); - assert!(doc.value_at(&list, 0, &heads2)?.unwrap().0 == Value::int(10)); + assert!(doc.get_at(&list, 0, &heads2)?.unwrap().0 == Value::int(10)); assert!(doc.length_at(&list, &heads3) == 2); //doc.dump(); - log!("{:?}", doc.value_at(&list, 0, &heads3)?.unwrap().0); - assert!(doc.value_at(&list, 0, &heads3)?.unwrap().0 == Value::int(30)); - assert!(doc.value_at(&list, 1, &heads3)?.unwrap().0 == Value::int(20)); + log!("{:?}", doc.get_at(&list, 0, &heads3)?.unwrap().0); + assert!(doc.get_at(&list, 0, &heads3)?.unwrap().0 == Value::int(30)); + assert!(doc.get_at(&list, 1, &heads3)?.unwrap().0 == Value::int(20)); assert!(doc.length_at(&list, &heads4) == 3); - assert!(doc.value_at(&list, 0, &heads4)?.unwrap().0 == Value::int(30)); - assert!(doc.value_at(&list, 1, &heads4)?.unwrap().0 == Value::int(50)); - assert!(doc.value_at(&list, 2, &heads4)?.unwrap().0 == Value::int(40)); + assert!(doc.get_at(&list, 0, &heads4)?.unwrap().0 == Value::int(30)); + assert!(doc.get_at(&list, 1, &heads4)?.unwrap().0 == Value::int(50)); + assert!(doc.get_at(&list, 2, &heads4)?.unwrap().0 == Value::int(40)); assert!(doc.length_at(&list, &heads5) == 2); - assert!(doc.value_at(&list, 0, &heads5)?.unwrap().0 == Value::int(30)); - assert!(doc.value_at(&list, 1, &heads5)?.unwrap().0 == Value::int(50)); + assert!(doc.get_at(&list, 0, &heads5)?.unwrap().0 == Value::int(30)); + assert!(doc.get_at(&list, 1, &heads5)?.unwrap().0 == Value::int(50)); assert!(doc.length_at(&list, &heads6) == 1); assert!(doc.length(&list) == 1); - assert!(doc.value_at(&list, 0, &heads6)?.unwrap().0 == Value::int(50)); + assert!(doc.get_at(&list, 0, &heads6)?.unwrap().0 == Value::int(50)); Ok(()) } @@ -1700,22 +1700,22 @@ mod tests { tx.commit(); // we can get the new map by traversing the tree - let map = doc.value(&ROOT, "a").unwrap().unwrap().1; - assert_eq!(doc.value(&map, "b").unwrap(), None); + let map = doc.get(&ROOT, "a").unwrap().unwrap().1; + assert_eq!(doc.get(&map, "b").unwrap(), None); // and get values from it assert_eq!( - doc.value(&map, "c").unwrap().map(|s| s.0), + doc.get(&map, "c").unwrap().map(|s| s.0), Some(ScalarValue::Int(2).into()) ); // but we can still access the old one if we know the ID! - assert_eq!(doc.value(&map1, "b").unwrap().unwrap().0, Value::int(1)); + assert_eq!(doc.get(&map1, "b").unwrap().unwrap().0, Value::int(1)); // and even set new things in it! let mut tx = doc.transaction(); tx.put(&map1, "c", 3).unwrap(); tx.commit(); - assert_eq!(doc.value(&map1, "c").unwrap().unwrap().0, Value::int(3)); + assert_eq!(doc.get(&map1, "c").unwrap().unwrap().0, Value::int(3)); } #[test] @@ -1948,21 +1948,21 @@ mod tests { assert_eq!(doc1.length(&list), 1); assert_eq!(doc2.length(&list), 1); assert_eq!( - doc1.values(&list, 0).unwrap(), + doc1.get_conflicts(&list, 0).unwrap(), vec![ (max.into(), ExId::Id(max + 2, actor1.clone(), 0)), (max.into(), ExId::Id(max + 2, actor2.clone(), 1)) ] ); assert_eq!( - doc2.values(&list, 0).unwrap(), + doc2.get_conflicts(&list, 0).unwrap(), vec![ (max.into(), ExId::Id(max + 2, actor1, 0)), (max.into(), ExId::Id(max + 2, actor2, 1)) ] ); - assert!(doc1.value(&list, 1).unwrap().is_none()); - assert!(doc2.value(&list, 1).unwrap().is_none()); + assert!(doc1.get(&list, 1).unwrap().is_none()); + assert!(doc2.get(&list, 1).unwrap().is_none()); } #[test] diff --git a/automerge/src/transaction/inner.rs b/automerge/src/transaction/inner.rs index 95b1e6c4..cb0c8c14 100644 --- a/automerge/src/transaction/inner.rs +++ b/automerge/src/transaction/inner.rs @@ -367,6 +367,6 @@ mod tests { let a = tx.put_object(ROOT, "a", ObjType::Map).unwrap(); tx.put(&a, "b", 1).unwrap(); - assert!(tx.value(&a, "b").unwrap().is_some()); + assert!(tx.get(&a, "b").unwrap().is_some()); } } diff --git a/automerge/src/transaction/manual_transaction.rs b/automerge/src/transaction/manual_transaction.rs index c2256368..491fc4f3 100644 --- a/automerge/src/transaction/manual_transaction.rs +++ b/automerge/src/transaction/manual_transaction.rs @@ -1,5 +1,7 @@ +use std::ops::RangeBounds; + use crate::exid::ExId; -use crate::{Automerge, ChangeHash, KeysAt, ObjType, Prop, ScalarValue, Value}; +use crate::{Automerge, ChangeHash, KeysAt, ObjType, Prop, Range, ScalarValue, Value}; use crate::{AutomergeError, Keys}; use super::{CommitOptions, Transactable, TransactionInner}; @@ -179,6 +181,10 @@ impl<'a> Transactable for Transaction<'a> { self.doc.keys_at(obj, heads) } + fn range, R: RangeBounds>(&self, obj: O, range: R) -> Range { + self.doc.range(obj, range) + } + fn length>(&self, obj: O) -> usize { self.doc.length(obj) } @@ -203,38 +209,38 @@ impl<'a> Transactable for Transaction<'a> { self.doc.text_at(obj, heads) } - fn value, P: Into>( + fn get, P: Into>( &self, obj: O, prop: P, ) -> Result, AutomergeError> { - self.doc.value(obj, prop) + self.doc.get(obj, prop) } - fn value_at, P: Into>( + fn get_at, P: Into>( &self, obj: O, prop: P, heads: &[ChangeHash], ) -> Result, AutomergeError> { - self.doc.value_at(obj, prop, heads) + self.doc.get_at(obj, prop, heads) } - fn values, P: Into>( + fn get_conflicts, P: Into>( &self, obj: O, prop: P, ) -> Result, AutomergeError> { - self.doc.values(obj, prop) + self.doc.get_conflicts(obj, prop) } - fn values_at, P: Into>( + fn get_conflicts_at, P: Into>( &self, obj: O, prop: P, heads: &[ChangeHash], ) -> Result, AutomergeError> { - self.doc.values_at(obj, prop, heads) + self.doc.get_conflicts_at(obj, prop, heads) } fn parent_object>(&self, obj: O) -> Option<(ExId, Prop)> { diff --git a/automerge/src/transaction/transactable.rs b/automerge/src/transaction/transactable.rs index dd966939..3e820ee0 100644 --- a/automerge/src/transaction/transactable.rs +++ b/automerge/src/transaction/transactable.rs @@ -1,5 +1,7 @@ +use std::ops::RangeBounds; + use crate::exid::ExId; -use crate::{AutomergeError, ChangeHash, Keys, KeysAt, ObjType, Prop, ScalarValue, Value}; +use crate::{AutomergeError, ChangeHash, Keys, KeysAt, ObjType, Prop, Range, ScalarValue, Value}; /// A way of mutating a document within a single change. pub trait Transactable { @@ -97,6 +99,8 @@ pub trait Transactable { /// Get the keys of the given object at a point in history. fn keys_at>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt; + fn range, R: RangeBounds>(&self, obj: O, range: R) -> Range; + /// Get the length of the given object. fn length>(&self, obj: O) -> usize; @@ -117,27 +121,27 @@ pub trait Transactable { ) -> Result; /// Get the value at this prop in the object. - fn value, P: Into>( + fn get, P: Into>( &self, obj: O, prop: P, ) -> Result, AutomergeError>; /// Get the value at this prop in the object at a point in history. - fn value_at, P: Into>( + fn get_at, P: Into>( &self, obj: O, prop: P, heads: &[ChangeHash], ) -> Result, AutomergeError>; - fn values, P: Into>( + fn get_conflicts, P: Into>( &self, obj: O, prop: P, ) -> Result, AutomergeError>; - fn values_at, P: Into>( + fn get_conflicts_at, P: Into>( &self, obj: O, prop: P, diff --git a/automerge/tests/helpers/mod.rs b/automerge/tests/helpers/mod.rs index 44b691a0..2fb5d9fb 100644 --- a/automerge/tests/helpers/mod.rs +++ b/automerge/tests/helpers/mod.rs @@ -316,7 +316,7 @@ pub fn realize_prop>( obj_id: &automerge::ObjId, prop: P, ) -> RealizedObject { - let (val, obj_id) = doc.value(obj_id, prop).unwrap().unwrap(); + let (val, obj_id) = doc.get(obj_id, prop).unwrap().unwrap(); match val { automerge::Value::Object(obj_type) => realize_obj(doc, &obj_id, obj_type), automerge::Value::Scalar(v) => RealizedObject::Value(OrdScalarValue::from(v.into_owned())), @@ -353,7 +353,7 @@ fn realize_values>( key: K, ) -> BTreeSet { let mut values = BTreeSet::new(); - for (value, objid) in doc.values(obj_id, key).unwrap() { + for (value, objid) in doc.get_conflicts(obj_id, key).unwrap() { let realized = match value { automerge::Value::Object(objtype) => realize_obj(doc, &objid, objtype), automerge::Value::Scalar(v) => { diff --git a/automerge/tests/test.rs b/automerge/tests/test.rs index 5178c2c0..8b83e55c 100644 --- a/automerge/tests/test.rs +++ b/automerge/tests/test.rs @@ -29,7 +29,10 @@ fn repeated_map_assignment_which_resolves_conflict_not_ignored() { doc2.put(&automerge::ROOT, "field", 456).unwrap(); doc1.put(&automerge::ROOT, "field", 789).unwrap(); doc1.merge(&mut doc2).unwrap(); - assert_eq!(doc1.values(&automerge::ROOT, "field").unwrap().len(), 2); + assert_eq!( + doc1.get_conflicts(&automerge::ROOT, "field").unwrap().len(), + 2 + ); doc1.put(&automerge::ROOT, "field", 123).unwrap(); assert_doc!( @@ -94,7 +97,7 @@ fn merge_concurrent_map_prop_updates() { doc2.put(&automerge::ROOT, "hello", "world").unwrap(); doc1.merge(&mut doc2).unwrap(); assert_eq!( - doc1.value(&automerge::ROOT, "foo").unwrap().unwrap().0, + doc1.get(&automerge::ROOT, "foo").unwrap().unwrap().0, "bar".into() ); assert_doc!( @@ -855,13 +858,13 @@ fn list_counter_del() -> Result<(), automerge::AutomergeError> { doc1.merge(&mut doc2).unwrap(); doc1.merge(&mut doc3).unwrap(); - let values = doc1.values(&list, 1)?; + let values = doc1.get_conflicts(&list, 1)?; assert_eq!(values.len(), 3); assert_eq!(&values[0].0, &Value::counter(1)); assert_eq!(&values[1].0, &Value::counter(10)); assert_eq!(&values[2].0, &Value::counter(100)); - let values = doc1.values(&list, 2)?; + let values = doc1.get_conflicts(&list, 2)?; assert_eq!(values.len(), 3); assert_eq!(&values[0].0, &Value::counter(1)); assert_eq!(&values[1].0, &Value::counter(10)); @@ -870,13 +873,13 @@ fn list_counter_del() -> Result<(), automerge::AutomergeError> { doc1.increment(&list, 1, 1)?; doc1.increment(&list, 2, 1)?; - let values = doc1.values(&list, 1)?; + let values = doc1.get_conflicts(&list, 1)?; assert_eq!(values.len(), 3); assert_eq!(&values[0].0, &Value::counter(2)); assert_eq!(&values[1].0, &Value::counter(11)); assert_eq!(&values[2].0, &Value::counter(101)); - let values = doc1.values(&list, 2)?; + let values = doc1.get_conflicts(&list, 2)?; assert_eq!(values.len(), 2); assert_eq!(&values[0].0, &Value::counter(2)); assert_eq!(&values[1].0, &Value::counter(11)); From decd03a5d7c4c4f3c8353d3b8c219909061e5474 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 1 Apr 2022 10:27:52 +0100 Subject: [PATCH 263/730] Add values iterator --- automerge/src/autocommit.rs | 6 ++++- automerge/src/automerge.rs | 17 ++++++++++-- automerge/src/lib.rs | 2 ++ .../src/transaction/manual_transaction.rs | 6 ++++- automerge/src/transaction/transactable.rs | 2 ++ automerge/src/values.rs | 26 +++++++++++++++++++ 6 files changed, 55 insertions(+), 4 deletions(-) create mode 100644 automerge/src/values.rs diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index 3955bcfa..09ff65c6 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -3,7 +3,7 @@ use std::ops::RangeBounds; use crate::exid::ExId; use crate::transaction::{CommitOptions, Transactable}; use crate::types::Patch; -use crate::{sync, Keys, KeysAt, ObjType, Range, ScalarValue}; +use crate::{sync, Keys, KeysAt, ObjType, Range, ScalarValue, Values}; use crate::{ transaction::TransactionInner, ActorId, Automerge, AutomergeError, Change, ChangeHash, Prop, Value, @@ -237,6 +237,10 @@ impl Transactable for AutoCommit { self.doc.range(obj, range) } + fn values>(&self, obj: O) -> Values { + self.doc.values(obj) + } + fn length>(&self, obj: O) -> usize { self.doc.length(obj) } diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 4fccbf2e..b25dcc34 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -12,9 +12,9 @@ use crate::types::{ ActorId, AssignPatch, ChangeHash, Clock, ElemId, Export, Exportable, Key, ObjId, Op, OpId, OpType, Patch, ScalarValue, Value, }; -use crate::KeysAt; use crate::{legacy, query, types, ObjType}; use crate::{AutomergeError, Change, Prop}; +use crate::{KeysAt, Values}; use serde::Serialize; use std::cmp::Ordering; @@ -355,7 +355,7 @@ impl Automerge { } } - /// Iterate over the keys and values of the object `obj`. + /// Iterate over the keys and values of the object `obj` in the given range. /// /// For a map the keys are the keys of the map. /// For a list the keys are the element ids (opids) encoded as strings. @@ -368,6 +368,19 @@ impl Automerge { } } + /// Iterate over all the keys and values of the object `obj`. + /// + /// For a map the keys are the keys of the map. + /// For a list the keys are the element ids (opids) encoded as strings. + pub fn values>(&self, obj: O) -> Values { + if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { + let iter_range = self.ops.range(obj, ..); + Values::new(self, iter_range) + } else { + Values::new(self, None) + } + } + /// Get the length of the given object. pub fn length>(&self, obj: O) -> usize { if let Ok(inner_obj) = self.exid_to_obj(obj.as_ref()) { diff --git a/automerge/src/lib.rs b/automerge/src/lib.rs index 28eb91e0..ba5301f5 100644 --- a/automerge/src/lib.rs +++ b/automerge/src/lib.rs @@ -45,6 +45,7 @@ pub mod sync; pub mod transaction; mod types; mod value; +mod values; #[cfg(feature = "optree-visualisation")] mod visualisation; @@ -62,5 +63,6 @@ pub use legacy::Change as ExpandedChange; pub use range::Range; pub use types::{ActorId, AssignPatch, ChangeHash, ObjType, OpType, Patch, Prop}; pub use value::{ScalarValue, Value}; +pub use values::Values; pub const ROOT: ObjId = ObjId::Root; diff --git a/automerge/src/transaction/manual_transaction.rs b/automerge/src/transaction/manual_transaction.rs index 491fc4f3..11f7fdca 100644 --- a/automerge/src/transaction/manual_transaction.rs +++ b/automerge/src/transaction/manual_transaction.rs @@ -1,7 +1,7 @@ use std::ops::RangeBounds; use crate::exid::ExId; -use crate::{Automerge, ChangeHash, KeysAt, ObjType, Prop, Range, ScalarValue, Value}; +use crate::{Automerge, ChangeHash, KeysAt, ObjType, Prop, Range, ScalarValue, Value, Values}; use crate::{AutomergeError, Keys}; use super::{CommitOptions, Transactable, TransactionInner}; @@ -185,6 +185,10 @@ impl<'a> Transactable for Transaction<'a> { self.doc.range(obj, range) } + fn values>(&self, obj: O) -> Values { + self.doc.values(obj) + } + fn length>(&self, obj: O) -> usize { self.doc.length(obj) } diff --git a/automerge/src/transaction/transactable.rs b/automerge/src/transaction/transactable.rs index 3e820ee0..0c0e2abc 100644 --- a/automerge/src/transaction/transactable.rs +++ b/automerge/src/transaction/transactable.rs @@ -101,6 +101,8 @@ pub trait Transactable { fn range, R: RangeBounds>(&self, obj: O, range: R) -> Range; + fn values>(&self, obj: O) -> Values; + /// Get the length of the given object. fn length>(&self, obj: O) -> usize; diff --git a/automerge/src/values.rs b/automerge/src/values.rs new file mode 100644 index 00000000..26be890e --- /dev/null +++ b/automerge/src/values.rs @@ -0,0 +1,26 @@ +use crate::{exid::ExId, Value}; +use std::ops::RangeFull; + +use crate::{query, Automerge}; + +pub struct Values<'a, 'k> { + range: Option>, + doc: &'a Automerge, +} + +impl<'a, 'k> Values<'a, 'k> { + pub(crate) fn new(doc: &'a Automerge, range: Option>) -> Self { + Self { range, doc } + } +} + +impl<'a, 'k> Iterator for Values<'a, 'k> { + type Item = (String, Value, ExId); + + fn next(&mut self) -> Option { + self.range + .as_mut()? + .next() + .map(|(key, value, id)| (self.doc.to_string(key), value, self.doc.id_to_exid(id))) + } +} From baa56b0b57adc60626e3368b291eee8b13875151 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 1 Apr 2022 10:35:38 +0100 Subject: [PATCH 264/730] Add range_at --- automerge/src/automerge.rs | 18 ++++++++- automerge/src/lib.rs | 2 + automerge/src/op_set.rs | 13 +++++++ automerge/src/op_tree.rs | 11 ++++++ automerge/src/query.rs | 2 + automerge/src/query/range_at.rs | 68 +++++++++++++++++++++++++++++++++ automerge/src/range_at.rs | 26 +++++++++++++ 7 files changed, 139 insertions(+), 1 deletion(-) create mode 100644 automerge/src/query/range_at.rs create mode 100644 automerge/src/range_at.rs diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index b25dcc34..af8fdcf0 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -12,7 +12,7 @@ use crate::types::{ ActorId, AssignPatch, ChangeHash, Clock, ElemId, Export, Exportable, Key, ObjId, Op, OpId, OpType, Patch, ScalarValue, Value, }; -use crate::{legacy, query, types, ObjType}; +use crate::{legacy, query, types, ObjType, RangeAt}; use crate::{AutomergeError, Change, Prop}; use crate::{KeysAt, Values}; use serde::Serialize; @@ -368,6 +368,22 @@ impl Automerge { } } + /// Historical version of [`range`](Self::range). + pub fn range_at, R: RangeBounds>( + &self, + obj: O, + range: R, + heads: &[ChangeHash], + ) -> RangeAt { + if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { + let clock = self.clock_at(heads); + let iter_range = self.ops.range_at(obj, range, clock); + RangeAt::new(self, iter_range) + } else { + RangeAt::new(self, None) + } + } + /// Iterate over all the keys and values of the object `obj`. /// /// For a map the keys are the keys of the map. diff --git a/automerge/src/lib.rs b/automerge/src/lib.rs index ba5301f5..fade0787 100644 --- a/automerge/src/lib.rs +++ b/automerge/src/lib.rs @@ -41,6 +41,7 @@ mod op_set; mod op_tree; mod query; mod range; +mod range_at; pub mod sync; pub mod transaction; mod types; @@ -61,6 +62,7 @@ pub use keys::Keys; pub use keys_at::KeysAt; pub use legacy::Change as ExpandedChange; pub use range::Range; +pub use range_at::RangeAt; pub use types::{ActorId, AssignPatch, ChangeHash, ObjType, OpType, Patch, Prop}; pub use value::{ScalarValue, Value}; pub use values::Values; diff --git a/automerge/src/op_set.rs b/automerge/src/op_set.rs index a6bcab9e..a18453da 100644 --- a/automerge/src/op_set.rs +++ b/automerge/src/op_set.rs @@ -76,6 +76,19 @@ impl OpSetInternal { } } + pub fn range_at>( + &self, + obj: ObjId, + range: R, + clock: Clock, + ) -> Option> { + if let Some((_typ, tree)) = self.trees.get(&obj) { + tree.range_at(range, &self.m, clock) + } else { + None + } + } + pub fn search<'a, 'b: 'a, Q>(&'b self, obj: &ObjId, query: Q) -> Q where Q: TreeQuery<'a>, diff --git a/automerge/src/op_tree.rs b/automerge/src/op_tree.rs index a6d52ef8..2113246a 100644 --- a/automerge/src/op_tree.rs +++ b/automerge/src/op_tree.rs @@ -80,6 +80,17 @@ impl OpTreeInternal { .map(|node| query::Range::new(range, node, meta)) } + pub fn range_at<'a, R: RangeBounds>( + &'a self, + range: R, + meta: &'a OpSetMetadata, + clock: Clock, + ) -> Option> { + self.root_node + .as_ref() + .map(|node| query::RangeAt::new(range, node, meta, clock)) + } + pub fn search<'a, 'b: 'a, Q>(&'b self, mut query: Q, m: &OpSetMetadata) -> Q where Q: TreeQuery<'a>, diff --git a/automerge/src/query.rs b/automerge/src/query.rs index 815e1299..7ee0f86b 100644 --- a/automerge/src/query.rs +++ b/automerge/src/query.rs @@ -19,6 +19,7 @@ mod opid; mod prop; mod prop_at; mod range; +mod range_at; mod seek_op; mod seek_op_with_patch; @@ -36,6 +37,7 @@ pub(crate) use opid::OpIdSearch; pub(crate) use prop::Prop; pub(crate) use prop_at::PropAt; pub(crate) use range::Range; +pub(crate) use range_at::RangeAt; pub(crate) use seek_op::SeekOp; pub(crate) use seek_op_with_patch::SeekOpWithPatch; diff --git a/automerge/src/query/range_at.rs b/automerge/src/query/range_at.rs new file mode 100644 index 00000000..6e9b17dd --- /dev/null +++ b/automerge/src/query/range_at.rs @@ -0,0 +1,68 @@ +use crate::clock::Clock; +use crate::op_tree::{OpSetMetadata, OpTreeNode}; +use crate::types::{Key, OpId}; +use crate::{Prop, Value}; +use std::fmt::Debug; +use std::ops::RangeBounds; + +use super::VisWindow; + +#[derive(Debug)] +pub(crate) struct RangeAt<'a, R: RangeBounds> { + clock: Clock, + window: VisWindow, + + range: R, + index: usize, + /// number of visible elements seen. + seen: usize, + last_key: Option, + root_child: &'a OpTreeNode, + meta: &'a OpSetMetadata, +} + +impl<'a, R: RangeBounds> RangeAt<'a, R> { + pub(crate) fn new( + range: R, + root_child: &'a OpTreeNode, + meta: &'a OpSetMetadata, + clock: Clock, + ) -> Self { + Self { + clock, + window: VisWindow::default(), + range, + index: 0, + seen: 0, + last_key: None, + root_child, + meta, + } + } +} + +impl<'a, 'm, R: RangeBounds> Iterator for RangeAt<'a, R> { + type Item = (Key, Value, OpId); + + fn next(&mut self) -> Option { + for i in self.index..self.root_child.len() { + let op = self.root_child.get(i)?; + let visible = self.window.visible_at(op, i, &self.clock); + self.index += 1; + if Some(op.elemid_or_key()) != self.last_key && visible { + self.last_key = Some(op.elemid_or_key()); + let contains = match op.key { + Key::Map(m) => self + .range + .contains(&Prop::Map(self.meta.props.get(m).clone())), + Key::Seq(_) => self.range.contains(&Prop::Seq(self.seen)), + }; + self.seen += 1; + if contains { + return Some((op.elemid_or_key(), op.value(), op.id)); + } + } + } + None + } +} diff --git a/automerge/src/range_at.rs b/automerge/src/range_at.rs new file mode 100644 index 00000000..868a78af --- /dev/null +++ b/automerge/src/range_at.rs @@ -0,0 +1,26 @@ +use crate::{exid::ExId, Value}; +use std::ops::RangeBounds; + +use crate::{query, Automerge, Prop}; + +pub struct RangeAt<'a, 'k, R: RangeBounds> { + range: Option>, + doc: &'a Automerge, +} + +impl<'a, 'k, 'm, R: RangeBounds> RangeAt<'a, 'k, R> { + pub(crate) fn new(doc: &'a Automerge, range: Option>) -> Self { + Self { range, doc } + } +} + +impl<'a, 'k, 'm, R: RangeBounds> Iterator for RangeAt<'a, 'k, R> { + type Item = (String, Value, ExId); + + fn next(&mut self) -> Option { + self.range + .as_mut()? + .next() + .map(|(key, value, id)| (self.doc.to_string(key), value, self.doc.id_to_exid(id))) + } +} From 89eb598858459620be59fa67f64ff00ce3012f04 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 1 Apr 2022 10:35:43 +0100 Subject: [PATCH 265/730] Fix keys_at --- automerge/src/query/keys_at.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automerge/src/query/keys_at.rs b/automerge/src/query/keys_at.rs index c55282b2..71da2927 100644 --- a/automerge/src/query/keys_at.rs +++ b/automerge/src/query/keys_at.rs @@ -32,7 +32,7 @@ impl<'a> Iterator for KeysAt<'a> { type Item = Key; fn next(&mut self) -> Option { - for i in self.index..self.root_child.len() { + for i in self.index..self.index_back { let op = self.root_child.get(i)?; let visible = self.window.visible_at(op, i, &self.clock); self.index += 1; From bcf191bea390452f754c4823a08152abb7eef2ca Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 1 Apr 2022 10:37:40 +0100 Subject: [PATCH 266/730] Add values_at --- automerge/src/automerge.rs | 13 ++++++++++++- automerge/src/lib.rs | 2 ++ automerge/src/values_at.rs | 26 ++++++++++++++++++++++++++ 3 files changed, 40 insertions(+), 1 deletion(-) create mode 100644 automerge/src/values_at.rs diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index af8fdcf0..02595adf 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -12,7 +12,7 @@ use crate::types::{ ActorId, AssignPatch, ChangeHash, Clock, ElemId, Export, Exportable, Key, ObjId, Op, OpId, OpType, Patch, ScalarValue, Value, }; -use crate::{legacy, query, types, ObjType, RangeAt}; +use crate::{legacy, query, types, ObjType, RangeAt, ValuesAt}; use crate::{AutomergeError, Change, Prop}; use crate::{KeysAt, Values}; use serde::Serialize; @@ -397,6 +397,17 @@ impl Automerge { } } + /// Historical version of [`values`](Self::values). + pub fn values_at>(&self, obj: O, heads: &[ChangeHash]) -> ValuesAt { + if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { + let clock = self.clock_at(heads); + let iter_range = self.ops.range_at(obj, .., clock); + ValuesAt::new(self, iter_range) + } else { + ValuesAt::new(self, None) + } + } + /// Get the length of the given object. pub fn length>(&self, obj: O) -> usize { if let Ok(inner_obj) = self.exid_to_obj(obj.as_ref()) { diff --git a/automerge/src/lib.rs b/automerge/src/lib.rs index fade0787..70006ff7 100644 --- a/automerge/src/lib.rs +++ b/automerge/src/lib.rs @@ -47,6 +47,7 @@ pub mod transaction; mod types; mod value; mod values; +mod values_at; #[cfg(feature = "optree-visualisation")] mod visualisation; @@ -66,5 +67,6 @@ pub use range_at::RangeAt; pub use types::{ActorId, AssignPatch, ChangeHash, ObjType, OpType, Patch, Prop}; pub use value::{ScalarValue, Value}; pub use values::Values; +pub use values_at::ValuesAt; pub const ROOT: ObjId = ObjId::Root; diff --git a/automerge/src/values_at.rs b/automerge/src/values_at.rs new file mode 100644 index 00000000..41038975 --- /dev/null +++ b/automerge/src/values_at.rs @@ -0,0 +1,26 @@ +use crate::{exid::ExId, Value}; +use std::ops::RangeFull; + +use crate::{query, Automerge}; + +pub struct ValuesAt<'a, 'k> { + range: Option>, + doc: &'a Automerge, +} + +impl<'a, 'k> ValuesAt<'a, 'k> { + pub(crate) fn new(doc: &'a Automerge, range: Option>) -> Self { + Self { range, doc } + } +} + +impl<'a, 'k> Iterator for ValuesAt<'a, 'k> { + type Item = (String, Value, ExId); + + fn next(&mut self) -> Option { + self.range + .as_mut()? + .next() + .map(|(key, value, id)| (self.doc.to_string(key), value, self.doc.id_to_exid(id))) + } +} From 679b3d20ced55bc8071b91993921f23186e243af Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 1 Apr 2022 10:40:47 +0100 Subject: [PATCH 267/730] Add range_at and values_at to transactable --- automerge/src/autocommit.rs | 15 ++++++++++++++- .../src/transaction/manual_transaction.rs | 18 +++++++++++++++++- automerge/src/transaction/transactable.rs | 14 +++++++++++++- 3 files changed, 44 insertions(+), 3 deletions(-) diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index 09ff65c6..534560f1 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -3,7 +3,7 @@ use std::ops::RangeBounds; use crate::exid::ExId; use crate::transaction::{CommitOptions, Transactable}; use crate::types::Patch; -use crate::{sync, Keys, KeysAt, ObjType, Range, ScalarValue, Values}; +use crate::{sync, Keys, KeysAt, ObjType, Range, RangeAt, ScalarValue, Values, ValuesAt}; use crate::{ transaction::TransactionInner, ActorId, Automerge, AutomergeError, Change, ChangeHash, Prop, Value, @@ -237,10 +237,23 @@ impl Transactable for AutoCommit { self.doc.range(obj, range) } + fn range_at, R: RangeBounds>( + &self, + obj: O, + range: R, + heads: &[ChangeHash], + ) -> RangeAt { + self.doc.range_at(obj, range, heads) + } + fn values>(&self, obj: O) -> Values { self.doc.values(obj) } + fn values_at>(&self, obj: O, heads: &[ChangeHash]) -> ValuesAt { + self.doc.values_at(obj, heads) + } + fn length>(&self, obj: O) -> usize { self.doc.length(obj) } diff --git a/automerge/src/transaction/manual_transaction.rs b/automerge/src/transaction/manual_transaction.rs index 11f7fdca..222ceef8 100644 --- a/automerge/src/transaction/manual_transaction.rs +++ b/automerge/src/transaction/manual_transaction.rs @@ -1,7 +1,10 @@ use std::ops::RangeBounds; use crate::exid::ExId; -use crate::{Automerge, ChangeHash, KeysAt, ObjType, Prop, Range, ScalarValue, Value, Values}; +use crate::{ + Automerge, ChangeHash, KeysAt, ObjType, Prop, Range, RangeAt, ScalarValue, Value, Values, + ValuesAt, +}; use crate::{AutomergeError, Keys}; use super::{CommitOptions, Transactable, TransactionInner}; @@ -185,10 +188,23 @@ impl<'a> Transactable for Transaction<'a> { self.doc.range(obj, range) } + fn range_at, R: RangeBounds>( + &self, + obj: O, + range: R, + heads: &[ChangeHash], + ) -> RangeAt { + self.doc.range_at(obj, range, heads) + } + fn values>(&self, obj: O) -> Values { self.doc.values(obj) } + fn values_at>(&self, obj: O, heads: &[ChangeHash]) -> ValuesAt { + self.doc.values_at(obj, heads) + } + fn length>(&self, obj: O) -> usize { self.doc.length(obj) } diff --git a/automerge/src/transaction/transactable.rs b/automerge/src/transaction/transactable.rs index 0c0e2abc..17faf365 100644 --- a/automerge/src/transaction/transactable.rs +++ b/automerge/src/transaction/transactable.rs @@ -1,7 +1,10 @@ use std::ops::RangeBounds; use crate::exid::ExId; -use crate::{AutomergeError, ChangeHash, Keys, KeysAt, ObjType, Prop, Range, ScalarValue, Value}; +use crate::{ + AutomergeError, ChangeHash, Keys, KeysAt, ObjType, Prop, Range, RangeAt, ScalarValue, Value, + Values, ValuesAt, +}; /// A way of mutating a document within a single change. pub trait Transactable { @@ -101,8 +104,17 @@ pub trait Transactable { fn range, R: RangeBounds>(&self, obj: O, range: R) -> Range; + fn range_at, R: RangeBounds>( + &self, + obj: O, + range: R, + heads: &[ChangeHash], + ) -> RangeAt; + fn values>(&self, obj: O) -> Values; + fn values_at>(&self, obj: O, heads: &[ChangeHash]) -> ValuesAt; + /// Get the length of the given object. fn length>(&self, obj: O) -> usize; From 07553195fa74435a82ece872ab4c46f58d1c9e20 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 1 Apr 2022 10:48:04 +0100 Subject: [PATCH 268/730] Update wasm and js with new names --- automerge-js/src/index.js | 2 +- automerge-js/src/proxies.js | 4 +- automerge-wasm/index.d.ts | 8 +- automerge-wasm/src/lib.rs | 10 +- automerge-wasm/test/test.ts | 162 +++++++++--------- automerge/src/autocommit.rs | 8 +- automerge/src/automerge.rs | 50 +++--- automerge/src/op_set.rs | 8 +- automerge/src/query/keys.rs | 1 - automerge/src/query/range.rs | 6 +- automerge/src/query/range_at.rs | 4 +- automerge/src/range.rs | 12 +- automerge/src/range_at.rs | 12 +- .../src/transaction/manual_transaction.rs | 8 +- automerge/src/transaction/transactable.rs | 4 +- automerge/src/values.rs | 12 +- automerge/src/values_at.rs | 12 +- automerge/tests/helpers/mod.rs | 2 +- automerge/tests/test.rs | 13 +- 19 files changed, 167 insertions(+), 171 deletions(-) diff --git a/automerge-js/src/index.js b/automerge-js/src/index.js index 326fc967..33d6d58c 100644 --- a/automerge-js/src/index.js +++ b/automerge-js/src/index.js @@ -127,7 +127,7 @@ function getActorId(doc) { } function conflictAt(context, objectId, prop) { - let values = context.values(objectId, prop) + let values = context.getAll(objectId, prop) if (values.length <= 1) { return } diff --git a/automerge-js/src/proxies.js b/automerge-js/src/proxies.js index 1086bcb0..f19570fb 100644 --- a/automerge-js/src/proxies.js +++ b/automerge-js/src/proxies.js @@ -19,7 +19,7 @@ function parseListIndex(key) { function valueAt(target, prop) { const { context, objectId, path, readonly, heads} = target - let value = context.value(objectId, prop, heads) + let value = context.get(objectId, prop, heads) if (value === undefined) { return } @@ -293,7 +293,7 @@ const ListHandler = { deleteProperty (target, index) { const {context, objectId} = target index = parseListIndex(index) - if (context.value(objectId, index)[0] == "counter") { + if (context.get(objectId, index)[0] == "counter") { throw new TypeError('Unsupported operation: deleting a counter from a list') } context.delete(objectId, index) diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts index d694714d..9bbdfec9 100644 --- a/automerge-wasm/index.d.ts +++ b/automerge-wasm/index.d.ts @@ -103,9 +103,9 @@ export class Automerge { delete(obj: ObjID, prop: Prop): void; // returns a single value - if there is a conflict return the winner - value(obj: ObjID, prop: any, heads?: Heads): FullValue | null; + get(obj: ObjID, prop: any, heads?: Heads): FullValue | null; // return all values in case of a conflict - values(obj: ObjID, arg: any, heads?: Heads): FullValue[]; + getAll(obj: ObjID, arg: any, heads?: Heads): FullValue[]; keys(obj: ObjID, heads?: Heads): string[]; text(obj: ObjID, heads?: Heads): string; length(obj: ObjID, heads?: Heads): number; @@ -178,8 +178,8 @@ export interface InitOutput { readonly automerge_insert: (a: number, b: number, c: number, d: number, e: number, f: number, g: number, h: number) => void; readonly automerge_set: (a: number, b: number, c: number, d: number, e: number, f: number, g: number, h: number) => void; readonly automerge_inc: (a: number, b: number, c: number, d: number, e: number, f: number) => void; - readonly automerge_value: (a: number, b: number, c: number, d: number, e: number, f: number) => void; - readonly automerge_values: (a: number, b: number, c: number, d: number, e: number, f: number) => void; + readonly automerge_get: (a: number, b: number, c: number, d: number, e: number, f: number) => void; + readonly automerge_get_all: (a: number, b: number, c: number, d: number, e: number, f: number) => void; readonly automerge_length: (a: number, b: number, c: number, d: number, e: number) => void; readonly automerge_del: (a: number, b: number, c: number, d: number, e: number) => void; readonly automerge_save: (a: number, b: number) => void; diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 300f6b74..3b5471eb 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -275,7 +275,8 @@ impl Automerge { Ok(()) } - pub fn value( + #[wasm_bindgen(js_name = get)] + pub fn get( &mut self, obj: JsValue, prop: JsValue, @@ -309,7 +310,8 @@ impl Automerge { } } - pub fn values( + #[wasm_bindgen(js_name = getAll)] + pub fn get_all( &mut self, obj: JsValue, arg: JsValue, @@ -320,9 +322,9 @@ impl Automerge { let prop = to_prop(arg); if let Ok(prop) = prop { let values = if let Some(heads) = get_heads(heads) { - self.0.get_conflicts_at(&obj, prop, &heads) + self.0.get_all_at(&obj, prop, &heads) } else { - self.0.get_conflicts(&obj, prop) + self.0.get_all(&obj, prop) } .map_err(to_js_err)?; for value in values { diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts index f9ff6da3..9b448705 100644 --- a/automerge-wasm/test/test.ts +++ b/automerge-wasm/test/test.ts @@ -45,7 +45,7 @@ describe('Automerge', () => { it('getting a nonexistant prop does not throw an error', () => { let doc = create() let root = "_root" - let result = doc.value(root,"hello") + let result = doc.get(root,"hello") assert.deepEqual(result,undefined) doc.free() }) @@ -67,42 +67,42 @@ describe('Automerge', () => { doc.putObject(root, "list", []); doc.put(root, "null", null) - result = doc.value(root,"hello") + result = doc.get(root,"hello") assert.deepEqual(result,["str","world"]) - result = doc.value(root,"number1") + result = doc.get(root,"number1") assert.deepEqual(result,["uint",5]) - result = doc.value(root,"number2") + result = doc.get(root,"number2") assert.deepEqual(result,["int",5]) - result = doc.value(root,"number3") + result = doc.get(root,"number3") assert.deepEqual(result,["f64",5.5]) - result = doc.value(root,"number4") + result = doc.get(root,"number4") assert.deepEqual(result,["f64",5.5]) - result = doc.value(root,"number5") + result = doc.get(root,"number5") assert.deepEqual(result,["int",5]) - result = doc.value(root,"bool") + result = doc.get(root,"bool") assert.deepEqual(result,["boolean",true]) doc.put(root, "bool", false, "boolean") - result = doc.value(root,"bool") + result = doc.get(root,"bool") assert.deepEqual(result,["boolean",false]) - result = doc.value(root,"time1") + result = doc.get(root,"time1") assert.deepEqual(result,["timestamp",new Date(1000)]) - result = doc.value(root,"time2") + result = doc.get(root,"time2") assert.deepEqual(result,["timestamp",new Date(1001)]) - result = doc.value(root,"list") + result = doc.get(root,"list") assert.deepEqual(result,["list","10@aabbcc"]); - result = doc.value(root,"null") + result = doc.get(root,"null") assert.deepEqual(result,["null",null]); doc.free() @@ -112,9 +112,9 @@ describe('Automerge', () => { let doc = create() doc.put("_root","data1", new Uint8Array([10,11,12])); doc.put("_root","data2", new Uint8Array([13,14,15]), "bytes"); - let value1 = doc.value("_root", "data1") + let value1 = doc.get("_root", "data1") assert.deepEqual(value1, ["bytes", new Uint8Array([10,11,12])]); - let value2 = doc.value("_root", "data2") + let value2 = doc.get("_root", "data2") assert.deepEqual(value2, ["bytes", new Uint8Array([13,14,15])]); doc.free() }) @@ -128,10 +128,10 @@ describe('Automerge', () => { doc.put(submap, "number", 6, "uint") assert.strictEqual(doc.pendingOps(),2) - result = doc.value(root,"submap") + result = doc.get(root,"submap") assert.deepEqual(result,["map",submap]) - result = doc.value(submap,"number") + result = doc.get(submap,"number") assert.deepEqual(result,["uint",6]) doc.free() }) @@ -146,15 +146,15 @@ describe('Automerge', () => { doc.insert(submap, 2, "c"); doc.insert(submap, 0, "z"); - assert.deepEqual(doc.value(submap, 0),["str","z"]) - assert.deepEqual(doc.value(submap, 1),["str","a"]) - assert.deepEqual(doc.value(submap, 2),["str","b"]) - assert.deepEqual(doc.value(submap, 3),["str","c"]) + assert.deepEqual(doc.get(submap, 0),["str","z"]) + assert.deepEqual(doc.get(submap, 1),["str","a"]) + assert.deepEqual(doc.get(submap, 2),["str","b"]) + assert.deepEqual(doc.get(submap, 3),["str","c"]) assert.deepEqual(doc.length(submap),4) doc.put(submap, 2, "b v2"); - assert.deepEqual(doc.value(submap, 2),["str","b v2"]) + assert.deepEqual(doc.get(submap, 2),["str","b v2"]) assert.deepEqual(doc.length(submap),4) doc.free() }) @@ -205,9 +205,9 @@ describe('Automerge', () => { let root = "_root" doc.put(root, "xxx", "xxx"); - assert.deepEqual(doc.value(root, "xxx"),["str","xxx"]) + assert.deepEqual(doc.get(root, "xxx"),["str","xxx"]) doc.delete(root, "xxx"); - assert.deepEqual(doc.value(root, "xxx"),undefined) + assert.deepEqual(doc.get(root, "xxx"),undefined) doc.free() }) @@ -216,11 +216,11 @@ describe('Automerge', () => { let root = "_root" doc.put(root, "counter", 10, "counter"); - assert.deepEqual(doc.value(root, "counter"),["counter",10]) + assert.deepEqual(doc.get(root, "counter"),["counter",10]) doc.increment(root, "counter", 10); - assert.deepEqual(doc.value(root, "counter"),["counter",20]) + assert.deepEqual(doc.get(root, "counter"),["counter",20]) doc.increment(root, "counter", -5); - assert.deepEqual(doc.value(root, "counter"),["counter",15]) + assert.deepEqual(doc.get(root, "counter"),["counter",15]) doc.free() }) @@ -233,12 +233,12 @@ describe('Automerge', () => { doc.splice(text, 0, 0, "hello ") doc.splice(text, 6, 0, ["w","o","r","l","d"]) doc.splice(text, 11, 0, ["!","?"]) - assert.deepEqual(doc.value(text, 0),["str","h"]) - assert.deepEqual(doc.value(text, 1),["str","e"]) - assert.deepEqual(doc.value(text, 9),["str","l"]) - assert.deepEqual(doc.value(text, 10),["str","d"]) - assert.deepEqual(doc.value(text, 11),["str","!"]) - assert.deepEqual(doc.value(text, 12),["str","?"]) + assert.deepEqual(doc.get(text, 0),["str","h"]) + assert.deepEqual(doc.get(text, 1),["str","e"]) + assert.deepEqual(doc.get(text, 9),["str","l"]) + assert.deepEqual(doc.get(text, 10),["str","d"]) + assert.deepEqual(doc.get(text, 11),["str","!"]) + assert.deepEqual(doc.get(text, 12),["str","?"]) doc.free() }) @@ -247,8 +247,8 @@ describe('Automerge', () => { let text = doc.putObject("/", "text", "Hello world"); let obj = doc.insertObject(text, 6, { hello: "world" }); assert.deepEqual(doc.text(text), "Hello \ufffcworld"); - assert.deepEqual(doc.value(text, 6), ["map", obj]); - assert.deepEqual(doc.value(obj, "hello"), ["str", "world"]); + assert.deepEqual(doc.get(text, 6), ["map", obj]); + assert.deepEqual(doc.get(obj, "hello"), ["str", "world"]); }) it('should be able save all or incrementally', () => { @@ -313,14 +313,14 @@ describe('Automerge', () => { doc3.put("_root", "cnt", 10, "counter") doc1.applyChanges(doc2.getChanges(doc1.getHeads())) doc1.applyChanges(doc3.getChanges(doc1.getHeads())) - let result = doc1.values("_root", "cnt") + let result = doc1.getAll("_root", "cnt") assert.deepEqual(result,[ ['int',20,'2@aaaa'], ['counter',0,'2@bbbb'], ['counter',10,'2@cccc'], ]) doc1.increment("_root", "cnt", 5) - result = doc1.values("_root", "cnt") + result = doc1.getAll("_root", "cnt") assert.deepEqual(result, [ [ 'counter', 5, '2@bbbb' ], [ 'counter', 15, '2@cccc' ], @@ -346,14 +346,14 @@ describe('Automerge', () => { doc3.put(seq, 0, 10, "counter") doc1.applyChanges(doc2.getChanges(doc1.getHeads())) doc1.applyChanges(doc3.getChanges(doc1.getHeads())) - let result = doc1.values(seq, 0) + let result = doc1.getAll(seq, 0) assert.deepEqual(result,[ ['int',20,'3@aaaa'], ['counter',0,'3@bbbb'], ['counter',10,'3@cccc'], ]) doc1.increment(seq, 0, 5) - result = doc1.values(seq, 0) + result = doc1.getAll(seq, 0) assert.deepEqual(result, [ [ 'counter', 5, '3@bbbb' ], [ 'counter', 15, '3@cccc' ], @@ -440,13 +440,13 @@ describe('Automerge', () => { let d = doc1.put(c,"d","dd"); let saved = doc1.save(); let doc2 = loadDoc(saved); - assert.deepEqual(doc2.value("_root","a"),["map",a]) + assert.deepEqual(doc2.get("_root","a"),["map",a]) assert.deepEqual(doc2.keys(a),[]) - assert.deepEqual(doc2.value("_root","b"),["map",b]) + assert.deepEqual(doc2.get("_root","b"),["map",b]) assert.deepEqual(doc2.keys(b),[]) - assert.deepEqual(doc2.value("_root","c"),["map",c]) + assert.deepEqual(doc2.get("_root","c"),["map",c]) assert.deepEqual(doc2.keys(c),["d"]) - assert.deepEqual(doc2.value(c,"d"),["str","dd"]) + assert.deepEqual(doc2.get(c,"d"),["str","dd"]) doc1.free() doc2.free() }) @@ -458,7 +458,7 @@ describe('Automerge', () => { let B = A.fork() - assert.deepEqual(B.value("_root","text"), [ "text", At]) + assert.deepEqual(B.get("_root","text"), [ "text", At]) B.splice(At, 4, 1) B.splice(At, 4, 0, '!') @@ -471,7 +471,7 @@ describe('Automerge', () => { let C = loadDoc(binary) - assert.deepEqual(C.value('_root', 'text'), ['text', '1@aabbcc']) + assert.deepEqual(C.get('_root', 'text'), ['text', '1@aabbcc']) assert.deepEqual(C.text(At), 'hell! world') }) }) @@ -556,8 +556,8 @@ describe('Automerge', () => { doc1.insert('1@aaaa', 1, 'Greenfinch') doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) - assert.deepEqual(doc1.value('1@aaaa', 0), ['str', 'Chaffinch']) - assert.deepEqual(doc1.value('1@aaaa', 1), ['str', 'Greenfinch']) + assert.deepEqual(doc1.get('1@aaaa', 0), ['str', 'Chaffinch']) + assert.deepEqual(doc1.get('1@aaaa', 1), ['str', 'Greenfinch']) assert.deepEqual(doc2.popPatches(), [ {action: 'delete', obj: '1@aaaa', key: 0}, {action: 'insert', obj: '1@aaaa', key: 1, value: 'Greenfinch', datatype: 'str'} @@ -582,8 +582,8 @@ describe('Automerge', () => { doc4.enablePatches(true) doc3.loadIncremental(change2); doc3.loadIncremental(change3) doc4.loadIncremental(change3); doc4.loadIncremental(change2) - assert.deepEqual([0, 1, 2, 3].map(i => (doc3.value('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd']) - assert.deepEqual([0, 1, 2, 3].map(i => (doc4.value('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd']) + assert.deepEqual([0, 1, 2, 3].map(i => (doc3.get('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd']) + assert.deepEqual([0, 1, 2, 3].map(i => (doc4.get('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd']) assert.deepEqual(doc3.popPatches(), [ {action: 'insert', obj: '1@aaaa', key: 0, value: 'c', datatype: 'str'}, {action: 'insert', obj: '1@aaaa', key: 1, value: 'd', datatype: 'str'}, @@ -615,8 +615,8 @@ describe('Automerge', () => { doc4.enablePatches(true) doc3.loadIncremental(change2); doc3.loadIncremental(change3) doc4.loadIncremental(change3); doc4.loadIncremental(change2) - assert.deepEqual([0, 1, 2, 3, 4, 5].map(i => (doc3.value('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd', 'e', 'f']) - assert.deepEqual([0, 1, 2, 3, 4, 5].map(i => (doc4.value('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd', 'e', 'f']) + assert.deepEqual([0, 1, 2, 3, 4, 5].map(i => (doc3.get('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd', 'e', 'f']) + assert.deepEqual([0, 1, 2, 3, 4, 5].map(i => (doc4.get('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd', 'e', 'f']) assert.deepEqual(doc3.popPatches(), [ {action: 'insert', obj: '1@aaaa', key: 2, value: 'e', datatype: 'str'}, {action: 'insert', obj: '1@aaaa', key: 3, value: 'f', datatype: 'str'}, @@ -641,10 +641,10 @@ describe('Automerge', () => { doc4.enablePatches(true) doc3.loadIncremental(change1); doc3.loadIncremental(change2) doc4.loadIncremental(change2); doc4.loadIncremental(change1) - assert.deepEqual(doc3.value('_root', 'bird'), ['str', 'Goldfinch']) - assert.deepEqual(doc3.values('_root', 'bird'), [['str', 'Greenfinch', '1@aaaa'], ['str', 'Goldfinch', '1@bbbb']]) - assert.deepEqual(doc4.value('_root', 'bird'), ['str', 'Goldfinch']) - assert.deepEqual(doc4.values('_root', 'bird'), [['str', 'Greenfinch', '1@aaaa'], ['str', 'Goldfinch', '1@bbbb']]) + assert.deepEqual(doc3.get('_root', 'bird'), ['str', 'Goldfinch']) + assert.deepEqual(doc3.getAll('_root', 'bird'), [['str', 'Greenfinch', '1@aaaa'], ['str', 'Goldfinch', '1@bbbb']]) + assert.deepEqual(doc4.get('_root', 'bird'), ['str', 'Goldfinch']) + assert.deepEqual(doc4.getAll('_root', 'bird'), [['str', 'Greenfinch', '1@aaaa'], ['str', 'Goldfinch', '1@bbbb']]) assert.deepEqual(doc3.popPatches(), [ {action: 'assign', obj: '_root', key: 'bird', value: 'Greenfinch', datatype: 'str', conflict: false}, {action: 'assign', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: true} @@ -668,16 +668,16 @@ describe('Automerge', () => { doc1.loadIncremental(change2); doc1.loadIncremental(change3) doc2.loadIncremental(change3); doc2.loadIncremental(change1) doc3.loadIncremental(change1); doc3.loadIncremental(change2) - assert.deepEqual(doc1.value('_root', 'bird'), ['str', 'Goldfinch']) - assert.deepEqual(doc1.values('_root', 'bird'), [ + assert.deepEqual(doc1.get('_root', 'bird'), ['str', 'Goldfinch']) + assert.deepEqual(doc1.getAll('_root', 'bird'), [ ['str', 'Greenfinch', '1@aaaa'], ['str', 'Chaffinch', '1@bbbb'], ['str', 'Goldfinch', '1@cccc'] ]) - assert.deepEqual(doc2.value('_root', 'bird'), ['str', 'Goldfinch']) - assert.deepEqual(doc2.values('_root', 'bird'), [ + assert.deepEqual(doc2.get('_root', 'bird'), ['str', 'Goldfinch']) + assert.deepEqual(doc2.getAll('_root', 'bird'), [ ['str', 'Greenfinch', '1@aaaa'], ['str', 'Chaffinch', '1@bbbb'], ['str', 'Goldfinch', '1@cccc'] ]) - assert.deepEqual(doc3.value('_root', 'bird'), ['str', 'Goldfinch']) - assert.deepEqual(doc3.values('_root', 'bird'), [ + assert.deepEqual(doc3.get('_root', 'bird'), ['str', 'Goldfinch']) + assert.deepEqual(doc3.getAll('_root', 'bird'), [ ['str', 'Greenfinch', '1@aaaa'], ['str', 'Chaffinch', '1@bbbb'], ['str', 'Goldfinch', '1@cccc'] ]) assert.deepEqual(doc1.popPatches(), [ @@ -705,7 +705,7 @@ describe('Automerge', () => { doc2.loadIncremental(change1); doc3.loadIncremental(change2) doc1.put('_root', 'bird', 'Goldfinch') doc3.loadIncremental(doc1.saveIncremental()) - assert.deepEqual(doc3.values('_root', 'bird'), [['str', 'Goldfinch', '2@aaaa']]) + assert.deepEqual(doc3.getAll('_root', 'bird'), [['str', 'Goldfinch', '2@aaaa']]) assert.deepEqual(doc3.popPatches(), [ {action: 'assign', obj: '_root', key: 'bird', value: 'Greenfinch', datatype: 'str', conflict: false}, {action: 'assign', obj: '_root', key: 'bird', value: 'Chaffinch', datatype: 'str', conflict: true}, @@ -725,10 +725,10 @@ describe('Automerge', () => { doc2.enablePatches(true) doc1.loadIncremental(change2) doc2.loadIncremental(change1) - assert.deepEqual(doc1.value('_root', 'bird'), ['str', 'Goldfinch']) - assert.deepEqual(doc1.values('_root', 'bird'), [['str', 'Goldfinch', '2@aaaa']]) - assert.deepEqual(doc2.value('_root', 'bird'), ['str', 'Goldfinch']) - assert.deepEqual(doc2.values('_root', 'bird'), [['str', 'Goldfinch', '2@aaaa']]) + assert.deepEqual(doc1.get('_root', 'bird'), ['str', 'Goldfinch']) + assert.deepEqual(doc1.getAll('_root', 'bird'), [['str', 'Goldfinch', '2@aaaa']]) + assert.deepEqual(doc2.get('_root', 'bird'), ['str', 'Goldfinch']) + assert.deepEqual(doc2.getAll('_root', 'bird'), [['str', 'Goldfinch', '2@aaaa']]) assert.deepEqual(doc1.popPatches(), [ {action: 'assign', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: false} ]) @@ -752,10 +752,10 @@ describe('Automerge', () => { doc4.enablePatches(true) doc3.loadIncremental(change2); doc3.loadIncremental(change3) doc4.loadIncremental(change3); doc4.loadIncremental(change2) - assert.deepEqual(doc3.value('1@aaaa', 0), ['str', 'Redwing']) - assert.deepEqual(doc3.values('1@aaaa', 0), [['str', 'Song Thrush', '4@aaaa'], ['str', 'Redwing', '4@bbbb']]) - assert.deepEqual(doc4.value('1@aaaa', 0), ['str', 'Redwing']) - assert.deepEqual(doc4.values('1@aaaa', 0), [['str', 'Song Thrush', '4@aaaa'], ['str', 'Redwing', '4@bbbb']]) + assert.deepEqual(doc3.get('1@aaaa', 0), ['str', 'Redwing']) + assert.deepEqual(doc3.getAll('1@aaaa', 0), [['str', 'Song Thrush', '4@aaaa'], ['str', 'Redwing', '4@bbbb']]) + assert.deepEqual(doc4.get('1@aaaa', 0), ['str', 'Redwing']) + assert.deepEqual(doc4.getAll('1@aaaa', 0), [['str', 'Song Thrush', '4@aaaa'], ['str', 'Redwing', '4@bbbb']]) assert.deepEqual(doc3.popPatches(), [ {action: 'assign', obj: '1@aaaa', key: 0, value: 'Song Thrush', datatype: 'str', conflict: false}, {action: 'assign', obj: '1@aaaa', key: 0, value: 'Redwing', datatype: 'str', conflict: true} @@ -783,10 +783,10 @@ describe('Automerge', () => { doc4.enablePatches(true) doc3.loadIncremental(change2); doc3.loadIncremental(change3) doc4.loadIncremental(change3); doc4.loadIncremental(change2) - assert.deepEqual(doc3.values('1@aaaa', 0), [['str', 'Ring-necked parakeet', '5@bbbb']]) - assert.deepEqual(doc3.values('1@aaaa', 2), [['str', 'Song Thrush', '6@aaaa'], ['str', 'Redwing', '6@bbbb']]) - assert.deepEqual(doc4.values('1@aaaa', 0), [['str', 'Ring-necked parakeet', '5@bbbb']]) - assert.deepEqual(doc4.values('1@aaaa', 2), [['str', 'Song Thrush', '6@aaaa'], ['str', 'Redwing', '6@bbbb']]) + assert.deepEqual(doc3.getAll('1@aaaa', 0), [['str', 'Ring-necked parakeet', '5@bbbb']]) + assert.deepEqual(doc3.getAll('1@aaaa', 2), [['str', 'Song Thrush', '6@aaaa'], ['str', 'Redwing', '6@bbbb']]) + assert.deepEqual(doc4.getAll('1@aaaa', 0), [['str', 'Ring-necked parakeet', '5@bbbb']]) + assert.deepEqual(doc4.getAll('1@aaaa', 2), [['str', 'Song Thrush', '6@aaaa'], ['str', 'Redwing', '6@bbbb']]) assert.deepEqual(doc3.popPatches(), [ {action: 'delete', obj: '1@aaaa', key: 0}, {action: 'assign', obj: '1@aaaa', key: 1, value: 'Song Thrush', datatype: 'str', conflict: false}, @@ -812,14 +812,14 @@ describe('Automerge', () => { doc3.enablePatches(true) doc3.loadIncremental(change1) doc3.loadIncremental(change2) - assert.deepEqual(doc3.values('_root', 'bird'), [['str', 'Robin', '1@aaaa'], ['str', 'Wren', '1@bbbb']]) + assert.deepEqual(doc3.getAll('_root', 'bird'), [['str', 'Robin', '1@aaaa'], ['str', 'Wren', '1@bbbb']]) assert.deepEqual(doc3.popPatches(), [ {action: 'assign', obj: '_root', key: 'bird', value: 'Robin', datatype: 'str', conflict: false}, {action: 'assign', obj: '_root', key: 'bird', value: 'Wren', datatype: 'str', conflict: true} ]) doc3.loadIncremental(change3) - assert.deepEqual(doc3.value('_root', 'bird'), ['str', 'Robin']) - assert.deepEqual(doc3.values('_root', 'bird'), [['str', 'Robin', '1@aaaa']]) + assert.deepEqual(doc3.get('_root', 'bird'), ['str', 'Robin']) + assert.deepEqual(doc3.getAll('_root', 'bird'), [['str', 'Robin', '1@aaaa']]) assert.deepEqual(doc3.popPatches(), [ {action: 'assign', obj: '_root', key: 'bird', value: 'Robin', datatype: 'str', conflict: false} ]) @@ -835,12 +835,12 @@ describe('Automerge', () => { doc2.enablePatches(true) doc1.loadIncremental(change2) doc2.loadIncremental(change1) - assert.deepEqual(doc1.values('_root', 'birds'), [['list', '1@aaaa'], ['map', '1@bbbb']]) + assert.deepEqual(doc1.getAll('_root', 'birds'), [['list', '1@aaaa'], ['map', '1@bbbb']]) assert.deepEqual(doc1.popPatches(), [ {action: 'assign', obj: '_root', key: 'birds', value: '1@bbbb', datatype: 'map', conflict: true}, {action: 'assign', obj: '1@bbbb', key: 'Sparrowhawk', value: 1, datatype: 'int', conflict: false} ]) - assert.deepEqual(doc2.values('_root', 'birds'), [['list', '1@aaaa'], ['map', '1@bbbb']]) + assert.deepEqual(doc2.getAll('_root', 'birds'), [['list', '1@aaaa'], ['map', '1@bbbb']]) assert.deepEqual(doc2.popPatches(), [ {action: 'assign', obj: '_root', key: 'birds', value: '1@bbbb', datatype: 'map', conflict: true}, {action: 'insert', obj: '1@aaaa', key: 0, value: 'Parakeet', datatype: 'str'} @@ -854,7 +854,7 @@ describe('Automerge', () => { doc1.put('_root', 'createdAt', now.getTime(), 'timestamp') doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) - assert.deepEqual(doc2.value('_root', 'createdAt'), ['timestamp', now]) + assert.deepEqual(doc2.get('_root', 'createdAt'), ['timestamp', now]) assert.deepEqual(doc2.popPatches(), [ {action: 'assign', obj: '_root', key: 'createdAt', value: now, datatype: 'timestamp', conflict: false} ]) @@ -869,7 +869,7 @@ describe('Automerge', () => { doc1.increment('_root', 'starlings', 1) doc1.dump() doc2.loadIncremental(doc1.saveIncremental()) - assert.deepEqual(doc2.value('_root', 'starlings'), ['counter', 3]) + assert.deepEqual(doc2.get('_root', 'starlings'), ['counter', 3]) assert.deepEqual(doc2.popPatches(), [ {action: 'assign', obj: '_root', key: 'starlings', value: 2, datatype: 'counter', conflict: false}, {action: 'assign', obj: '_root', key: 'starlings', value: 3, datatype: 'counter', conflict: false} diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index 534560f1..76c1f60e 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -395,21 +395,21 @@ impl Transactable for AutoCommit { self.doc.get_at(obj, prop, heads) } - fn get_conflicts, P: Into>( + fn get_all, P: Into>( &self, obj: O, prop: P, ) -> Result, AutomergeError> { - self.doc.get_conflicts(obj, prop) + self.doc.get_all(obj, prop) } - fn get_conflicts_at, P: Into>( + fn get_all_at, P: Into>( &self, obj: O, prop: P, heads: &[ChangeHash], ) -> Result, AutomergeError> { - self.doc.get_conflicts_at(obj, prop, heads) + self.doc.get_all_at(obj, prop, heads) } fn parent_object>(&self, obj: O) -> Option<(ExId, Prop)> { diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 02595adf..b76982b3 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -522,7 +522,7 @@ impl Automerge { obj: O, prop: P, ) -> Result, AutomergeError> { - Ok(self.get_conflicts(obj, prop.into())?.last().cloned()) + Ok(self.get_all(obj, prop.into())?.last().cloned()) } /// Historical version of [`get`](Self::get). @@ -532,14 +532,14 @@ impl Automerge { prop: P, heads: &[ChangeHash], ) -> Result, AutomergeError> { - Ok(self.get_conflicts_at(obj, prop, heads)?.last().cloned()) + Ok(self.get_all_at(obj, prop, heads)?.last().cloned()) } /// Get all conflicting values out of the document at this prop that conflict. /// /// Returns both the value and the id of the operation that created it, useful for handling /// conflicts and serves as the object id if the value is an object. - pub fn get_conflicts, P: Into>( + pub fn get_all, P: Into>( &self, obj: O, prop: P, @@ -570,8 +570,8 @@ impl Automerge { Ok(result) } - /// Historical version of [`get_conflicts`](Self::get_conflicts). - pub fn get_conflicts_at, P: Into>( + /// Historical version of [`get_all`](Self::get_all). + pub fn get_all_at, P: Into>( &self, obj: O, prop: P, @@ -1185,9 +1185,9 @@ mod tests { doc.set_actor(ActorId::random()); let mut tx = doc.transaction(); tx.put(ROOT, "xxx", "xxx")?; - assert!(!tx.get(ROOT, "xxx")?.is_empty()); + assert!(tx.get(ROOT, "xxx")?.is_some()); tx.delete(ROOT, "xxx")?; - assert!(tx.get(ROOT, "xxx")?.is_empty()); + assert!(tx.get(ROOT, "xxx")?.is_none()); tx.commit(); Ok(()) } @@ -1242,7 +1242,7 @@ mod tests { let mut doc_a = Automerge::load(&save_a)?; let mut doc_b = Automerge::load(&save_b)?; - assert!(doc_a.get_conflicts(ROOT, "baz")? == doc_b.get_conflicts(ROOT, "baz")?); + assert!(doc_a.get_all(ROOT, "baz")? == doc_b.get_all(ROOT, "baz")?); assert!(doc_a.save() == doc_b.save()); @@ -1465,18 +1465,18 @@ mod tests { fn keys_iter_seq() { let mut doc = Automerge::new(); let mut tx = doc.transaction(); - let list = tx.set_object(ROOT, "list", ObjType::List).unwrap(); + let list = tx.put_object(ROOT, "list", ObjType::List).unwrap(); tx.insert(&list, 0, 3).unwrap(); tx.insert(&list, 1, 4).unwrap(); tx.insert(&list, 2, 5).unwrap(); tx.insert(&list, 3, 6).unwrap(); tx.commit(); let mut tx = doc.transaction(); - tx.set(&list, 0, 7).unwrap(); + tx.put(&list, 0, 7).unwrap(); tx.commit(); let mut tx = doc.transaction(); - tx.set(&list, 0, 8).unwrap(); - tx.set(&list, 3, 9).unwrap(); + tx.put(&list, 0, 8).unwrap(); + tx.put(&list, 3, 9).unwrap(); tx.commit(); let actor = doc.get_actor(); assert_eq!(doc.keys(&list).count(), 4); @@ -1525,17 +1525,17 @@ mod tests { fn range_iter_map() { let mut doc = Automerge::new(); let mut tx = doc.transaction(); - tx.set(ROOT, "a", 3).unwrap(); - tx.set(ROOT, "b", 4).unwrap(); - tx.set(ROOT, "c", 5).unwrap(); - tx.set(ROOT, "d", 6).unwrap(); + tx.put(ROOT, "a", 3).unwrap(); + tx.put(ROOT, "b", 4).unwrap(); + tx.put(ROOT, "c", 5).unwrap(); + tx.put(ROOT, "d", 6).unwrap(); tx.commit(); let mut tx = doc.transaction(); - tx.set(ROOT, "a", 7).unwrap(); + tx.put(ROOT, "a", 7).unwrap(); tx.commit(); let mut tx = doc.transaction(); - tx.set(ROOT, "a", 8).unwrap(); - tx.set(ROOT, "d", 9).unwrap(); + tx.put(ROOT, "a", 8).unwrap(); + tx.put(ROOT, "d", 9).unwrap(); tx.commit(); let actor = doc.get_actor(); assert_eq!(doc.range(ROOT, ..).count(), 4); @@ -1597,18 +1597,18 @@ mod tests { fn range_iter_seq() { let mut doc = Automerge::new(); let mut tx = doc.transaction(); - let list = tx.set_object(ROOT, "list", ObjType::List).unwrap(); + let list = tx.put_object(ROOT, "list", ObjType::List).unwrap(); tx.insert(&list, 0, 3).unwrap(); tx.insert(&list, 1, 4).unwrap(); tx.insert(&list, 2, 5).unwrap(); tx.insert(&list, 3, 6).unwrap(); tx.commit(); let mut tx = doc.transaction(); - tx.set(&list, 0, 7).unwrap(); + tx.put(&list, 0, 7).unwrap(); tx.commit(); let mut tx = doc.transaction(); - tx.set(&list, 0, 8).unwrap(); - tx.set(&list, 3, 9).unwrap(); + tx.put(&list, 0, 8).unwrap(); + tx.put(&list, 3, 9).unwrap(); tx.commit(); let actor = doc.get_actor(); assert_eq!(doc.range(&list, ..).count(), 4); @@ -1988,14 +1988,14 @@ mod tests { assert_eq!(doc1.length(&list), 1); assert_eq!(doc2.length(&list), 1); assert_eq!( - doc1.get_conflicts(&list, 0).unwrap(), + doc1.get_all(&list, 0).unwrap(), vec![ (max.into(), ExId::Id(max + 2, actor1.clone(), 0)), (max.into(), ExId::Id(max + 2, actor2.clone(), 1)) ] ); assert_eq!( - doc2.get_conflicts(&list, 0).unwrap(), + doc2.get_all(&list, 0).unwrap(), vec![ (max.into(), ExId::Id(max + 2, actor1, 0)), (max.into(), ExId::Id(max + 2, actor2, 1)) diff --git a/automerge/src/op_set.rs b/automerge/src/op_set.rs index a18453da..b6480a07 100644 --- a/automerge/src/op_set.rs +++ b/automerge/src/op_set.rs @@ -69,8 +69,8 @@ impl OpSetInternal { } pub fn range>(&self, obj: ObjId, range: R) -> Option> { - if let Some((_typ, tree)) = self.trees.get(&obj) { - tree.range(range, &self.m) + if let Some(tree) = self.trees.get(&obj) { + tree.internal.range(range, &self.m) } else { None } @@ -82,8 +82,8 @@ impl OpSetInternal { range: R, clock: Clock, ) -> Option> { - if let Some((_typ, tree)) = self.trees.get(&obj) { - tree.range_at(range, &self.m, clock) + if let Some(tree) = self.trees.get(&obj) { + tree.internal.range_at(range, &self.m, clock) } else { None } diff --git a/automerge/src/query/keys.rs b/automerge/src/query/keys.rs index e3224704..30436f31 100644 --- a/automerge/src/query/keys.rs +++ b/automerge/src/query/keys.rs @@ -43,7 +43,6 @@ impl<'a> DoubleEndedIterator for Keys<'a> { fn next_back(&mut self) -> Option { for i in (self.index..self.index_back).rev() { let op = self.root_child.get(i)?; - println!("{} {:?}", i, op); self.index_back -= 1; if Some(op.elemid_or_key()) != self.last_key_back && op.visible() { self.last_key_back = Some(op.elemid_or_key()); diff --git a/automerge/src/query/range.rs b/automerge/src/query/range.rs index f3d0835d..0b165999 100644 --- a/automerge/src/query/range.rs +++ b/automerge/src/query/range.rs @@ -28,13 +28,12 @@ impl<'a, R: RangeBounds> Range<'a, R> { } } -impl<'a, 'm, R: RangeBounds> Iterator for Range<'a, R> { - type Item = (Key, Value, OpId); +impl<'a, R: RangeBounds> Iterator for Range<'a, R> { + type Item = (Key, Value<'a>, OpId); fn next(&mut self) -> Option { for i in self.index..self.root_child.len() { let op = self.root_child.get(i)?; - println!("{} {:?}", self.index, op); self.index += 1; if Some(op.elemid_or_key()) != self.last_key && op.visible() { self.last_key = Some(op.elemid_or_key()); @@ -44,7 +43,6 @@ impl<'a, 'm, R: RangeBounds> Iterator for Range<'a, R> { .contains(&Prop::Map(self.meta.props.get(m).clone())), Key::Seq(_) => self.range.contains(&Prop::Seq(self.seen)), }; - println!("{} {}", self.seen, contains); self.seen += 1; if contains { return Some((op.elemid_or_key(), op.value(), op.id)); diff --git a/automerge/src/query/range_at.rs b/automerge/src/query/range_at.rs index 6e9b17dd..5d56ff52 100644 --- a/automerge/src/query/range_at.rs +++ b/automerge/src/query/range_at.rs @@ -41,8 +41,8 @@ impl<'a, R: RangeBounds> RangeAt<'a, R> { } } -impl<'a, 'm, R: RangeBounds> Iterator for RangeAt<'a, R> { - type Item = (Key, Value, OpId); +impl<'a, R: RangeBounds> Iterator for RangeAt<'a, R> { + type Item = (Key, Value<'a>, OpId); fn next(&mut self) -> Option { for i in self.index..self.root_child.len() { diff --git a/automerge/src/range.rs b/automerge/src/range.rs index 94322794..b0127589 100644 --- a/automerge/src/range.rs +++ b/automerge/src/range.rs @@ -3,19 +3,19 @@ use std::ops::RangeBounds; use crate::{query, Automerge, Prop}; -pub struct Range<'a, 'k, R: RangeBounds> { - range: Option>, +pub struct Range<'a, R: RangeBounds> { + range: Option>, doc: &'a Automerge, } -impl<'a, 'k, 'm, R: RangeBounds> Range<'a, 'k, R> { - pub(crate) fn new(doc: &'a Automerge, range: Option>) -> Self { +impl<'a, R: RangeBounds> Range<'a, R> { + pub(crate) fn new(doc: &'a Automerge, range: Option>) -> Self { Self { range, doc } } } -impl<'a, 'k, 'm, R: RangeBounds> Iterator for Range<'a, 'k, R> { - type Item = (String, Value, ExId); +impl<'a, R: RangeBounds> Iterator for Range<'a, R> { + type Item = (String, Value<'a>, ExId); fn next(&mut self) -> Option { self.range diff --git a/automerge/src/range_at.rs b/automerge/src/range_at.rs index 868a78af..dc9c8493 100644 --- a/automerge/src/range_at.rs +++ b/automerge/src/range_at.rs @@ -3,19 +3,19 @@ use std::ops::RangeBounds; use crate::{query, Automerge, Prop}; -pub struct RangeAt<'a, 'k, R: RangeBounds> { - range: Option>, +pub struct RangeAt<'a, R: RangeBounds> { + range: Option>, doc: &'a Automerge, } -impl<'a, 'k, 'm, R: RangeBounds> RangeAt<'a, 'k, R> { - pub(crate) fn new(doc: &'a Automerge, range: Option>) -> Self { +impl<'a, R: RangeBounds> RangeAt<'a, R> { + pub(crate) fn new(doc: &'a Automerge, range: Option>) -> Self { Self { range, doc } } } -impl<'a, 'k, 'm, R: RangeBounds> Iterator for RangeAt<'a, 'k, R> { - type Item = (String, Value, ExId); +impl<'a, R: RangeBounds> Iterator for RangeAt<'a, R> { + type Item = (String, Value<'a>, ExId); fn next(&mut self) -> Option { self.range diff --git a/automerge/src/transaction/manual_transaction.rs b/automerge/src/transaction/manual_transaction.rs index 222ceef8..da189e7a 100644 --- a/automerge/src/transaction/manual_transaction.rs +++ b/automerge/src/transaction/manual_transaction.rs @@ -246,21 +246,21 @@ impl<'a> Transactable for Transaction<'a> { self.doc.get_at(obj, prop, heads) } - fn get_conflicts, P: Into>( + fn get_all, P: Into>( &self, obj: O, prop: P, ) -> Result, AutomergeError> { - self.doc.get_conflicts(obj, prop) + self.doc.get_all(obj, prop) } - fn get_conflicts_at, P: Into>( + fn get_all_at, P: Into>( &self, obj: O, prop: P, heads: &[ChangeHash], ) -> Result, AutomergeError> { - self.doc.get_conflicts_at(obj, prop, heads) + self.doc.get_all_at(obj, prop, heads) } fn parent_object>(&self, obj: O) -> Option<(ExId, Prop)> { diff --git a/automerge/src/transaction/transactable.rs b/automerge/src/transaction/transactable.rs index 17faf365..96f0bea5 100644 --- a/automerge/src/transaction/transactable.rs +++ b/automerge/src/transaction/transactable.rs @@ -149,13 +149,13 @@ pub trait Transactable { heads: &[ChangeHash], ) -> Result, AutomergeError>; - fn get_conflicts, P: Into>( + fn get_all, P: Into>( &self, obj: O, prop: P, ) -> Result, AutomergeError>; - fn get_conflicts_at, P: Into>( + fn get_all_at, P: Into>( &self, obj: O, prop: P, diff --git a/automerge/src/values.rs b/automerge/src/values.rs index 26be890e..4c529522 100644 --- a/automerge/src/values.rs +++ b/automerge/src/values.rs @@ -3,19 +3,19 @@ use std::ops::RangeFull; use crate::{query, Automerge}; -pub struct Values<'a, 'k> { - range: Option>, +pub struct Values<'a> { + range: Option>, doc: &'a Automerge, } -impl<'a, 'k> Values<'a, 'k> { - pub(crate) fn new(doc: &'a Automerge, range: Option>) -> Self { +impl<'a> Values<'a> { + pub(crate) fn new(doc: &'a Automerge, range: Option>) -> Self { Self { range, doc } } } -impl<'a, 'k> Iterator for Values<'a, 'k> { - type Item = (String, Value, ExId); +impl<'a> Iterator for Values<'a> { + type Item = (String, Value<'a>, ExId); fn next(&mut self) -> Option { self.range diff --git a/automerge/src/values_at.rs b/automerge/src/values_at.rs index 41038975..a57e626c 100644 --- a/automerge/src/values_at.rs +++ b/automerge/src/values_at.rs @@ -3,19 +3,19 @@ use std::ops::RangeFull; use crate::{query, Automerge}; -pub struct ValuesAt<'a, 'k> { - range: Option>, +pub struct ValuesAt<'a> { + range: Option>, doc: &'a Automerge, } -impl<'a, 'k> ValuesAt<'a, 'k> { - pub(crate) fn new(doc: &'a Automerge, range: Option>) -> Self { +impl<'a> ValuesAt<'a> { + pub(crate) fn new(doc: &'a Automerge, range: Option>) -> Self { Self { range, doc } } } -impl<'a, 'k> Iterator for ValuesAt<'a, 'k> { - type Item = (String, Value, ExId); +impl<'a> Iterator for ValuesAt<'a> { + type Item = (String, Value<'a>, ExId); fn next(&mut self) -> Option { self.range diff --git a/automerge/tests/helpers/mod.rs b/automerge/tests/helpers/mod.rs index 2fb5d9fb..864fd1cf 100644 --- a/automerge/tests/helpers/mod.rs +++ b/automerge/tests/helpers/mod.rs @@ -353,7 +353,7 @@ fn realize_values>( key: K, ) -> BTreeSet { let mut values = BTreeSet::new(); - for (value, objid) in doc.get_conflicts(obj_id, key).unwrap() { + for (value, objid) in doc.get_all(obj_id, key).unwrap() { let realized = match value { automerge::Value::Object(objtype) => realize_obj(doc, &objid, objtype), automerge::Value::Scalar(v) => { diff --git a/automerge/tests/test.rs b/automerge/tests/test.rs index 8b83e55c..3f4db2ac 100644 --- a/automerge/tests/test.rs +++ b/automerge/tests/test.rs @@ -29,10 +29,7 @@ fn repeated_map_assignment_which_resolves_conflict_not_ignored() { doc2.put(&automerge::ROOT, "field", 456).unwrap(); doc1.put(&automerge::ROOT, "field", 789).unwrap(); doc1.merge(&mut doc2).unwrap(); - assert_eq!( - doc1.get_conflicts(&automerge::ROOT, "field").unwrap().len(), - 2 - ); + assert_eq!(doc1.get_all(&automerge::ROOT, "field").unwrap().len(), 2); doc1.put(&automerge::ROOT, "field", 123).unwrap(); assert_doc!( @@ -858,13 +855,13 @@ fn list_counter_del() -> Result<(), automerge::AutomergeError> { doc1.merge(&mut doc2).unwrap(); doc1.merge(&mut doc3).unwrap(); - let values = doc1.get_conflicts(&list, 1)?; + let values = doc1.get_all(&list, 1)?; assert_eq!(values.len(), 3); assert_eq!(&values[0].0, &Value::counter(1)); assert_eq!(&values[1].0, &Value::counter(10)); assert_eq!(&values[2].0, &Value::counter(100)); - let values = doc1.get_conflicts(&list, 2)?; + let values = doc1.get_all(&list, 2)?; assert_eq!(values.len(), 3); assert_eq!(&values[0].0, &Value::counter(1)); assert_eq!(&values[1].0, &Value::counter(10)); @@ -873,13 +870,13 @@ fn list_counter_del() -> Result<(), automerge::AutomergeError> { doc1.increment(&list, 1, 1)?; doc1.increment(&list, 2, 1)?; - let values = doc1.get_conflicts(&list, 1)?; + let values = doc1.get_all(&list, 1)?; assert_eq!(values.len(), 3); assert_eq!(&values[0].0, &Value::counter(2)); assert_eq!(&values[1].0, &Value::counter(11)); assert_eq!(&values[2].0, &Value::counter(101)); - let values = doc1.get_conflicts(&list, 2)?; + let values = doc1.get_all(&list, 2)?; assert_eq!(values.len(), 2); assert_eq!(&values[0].0, &Value::counter(2)); assert_eq!(&values[1].0, &Value::counter(11)); From 5555d50693e5a805ef4a217d5182f220c89f1cf9 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Fri, 8 Apr 2022 17:10:53 -0400 Subject: [PATCH 269/730] readme fixes --- automerge-wasm/README.md | 12 ++++++------ automerge-wasm/index.d.ts | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/automerge-wasm/README.md b/automerge-wasm/README.md index 1f517109..46ac7eb7 100644 --- a/automerge-wasm/README.md +++ b/automerge-wasm/README.md @@ -66,7 +66,7 @@ These are puts without a data type doc.free() ``` -Sets with a data type and examples of all the supported data types. +Put's with a data type and examples of all the supported data types. While int vs uint vs f64 matters little in javascript, Automerge is a cross platform library where these distinctions matter. @@ -131,7 +131,7 @@ You can access objects by passing the object id as the first parameter for a cal // get the id then use it - let id = doc.value("/", "config")[1] + let id = doc.value("/", "config") if (id && id[0] === 'map') { doc.put(id[1], "align", "right") } @@ -167,7 +167,7 @@ Maps are key/value stores. The root object is always a map. The keys are alway ### Lists -Lists are index addressable sets of values. These values can be any scalar or object type. You can manipulate lists with `insert()`, `put()`, `insertObject()`, `pubObject()`, `push()`, `pushObject()`, `splice()`, and `del()`. +Lists are index addressable sets of values. These values can be any scalar or object type. You can manipulate lists with `insert()`, `put()`, `insertObject()`, `pubObject()`, `push()`, `pushObject()`, `splice()`, and `delete()`. ```javascript let doc = create() @@ -175,7 +175,7 @@ Lists are index addressable sets of values. These values can be any scalar or o // init a new list with two elements doc.push(items, true) // push `true` to the end of the list doc.putObject(items, 0, { hello: "world" }) // overwrite the value 10 with an object with a key and value - doc.del(items, 1) // delete "box" + doc.delete(items, 1) // delete "box" doc.splice(items, 2, 0, ["bag", "brick"]) // splice in "bag" and "brick" at position 2 doc.insert(items, 0, "bat") // insert "bat" to the beginning of the list doc.insertObject(items, 1, [1,2]) // insert a list with 2 values at pos 1 @@ -245,10 +245,10 @@ Counters are 64 bit ints that support the increment operation. Frequently diffe let doc2 = doc1.fork("bbbbbb") doc2.put("_root", "number", 10) - doc2.inc("_root", "total", 11) + doc2.increment("_root", "total", 11) doc1.put("_root", "number", 20) - doc1.inc("_root", "total", 22) + doc1.increment("_root", "total", 22) doc1.merge(doc2) diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts index 64c1a80b..2b84555a 100644 --- a/automerge-wasm/index.d.ts +++ b/automerge-wasm/index.d.ts @@ -109,7 +109,7 @@ export class Automerge { keys(obj: ObjID, heads?: Heads): string[]; text(obj: ObjID, heads?: Heads): string; length(obj: ObjID, heads?: Heads): number; - materialize(obj?: ObjID, heas?: Heads): any; + materialize(obj?: ObjID, heads?: Heads): any; // transactions commit(message?: string, time?: number): Hash; From a791714f74d3d09e5103ca78aa7a1677466e0f8c Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Fri, 8 Apr 2022 18:34:04 -0400 Subject: [PATCH 270/730] extend documentation --- automerge-wasm/README.md | 22 +++++++++++++++++++--- 1 file changed, 19 insertions(+), 3 deletions(-) diff --git a/automerge-wasm/README.md b/automerge-wasm/README.md index dff6a5fd..31670278 100644 --- a/automerge-wasm/README.md +++ b/automerge-wasm/README.md @@ -14,7 +14,7 @@ The term Actor, Object Id and Heads are used through this documentation. Detail An Actor is a unique id that distinguishes a single writer to a document. It can be any hex string. -An Object id uniquely identifies a Map, List or Text object within a document. This id comes as a string in the form of `{number}@{actor}` - so `"10@aabbcc"` for example. The string `"_root"` or `"/"` can also be used to refer to the document root. These strings are durable and can be used on any descendant or copy of the document that generated them. +An Object id uniquely identifies a Map, List or Text object within a document. It can be treated as an opaque string and can be used across documents. This id comes as a string in the form of `{number}@{actor}` - so `"10@aabbcc"` for example. The string `"_root"` or `"/"` can also be used to refer to the document root. These strings are durable and can be used on any descendant or copy of the document that generated them. Heads refers to a set of hashes that uniquely identifies a point in time in a document's history. Heads are useful for comparing documents state or retrieving past states from the document. @@ -89,7 +89,7 @@ While int vs uint vs f64 matters little in javascript, Automerge is a cross plat ### Automerge Object Types -Automerge WASM supports 3 object types. Maps, lists, and text. Maps are key value stores where the values can be any scalar type or any object type. Lists are numerically indexed sets of data that can hold any scalar or any object type. Text is numerically indexed sets of grapheme clusters. +Automerge WASM supports 3 object types. Maps, lists, and text. Maps are key value stores where the values can be any scalar type or any object type. Lists are numerically indexed sets of data that can hold any scalar or any object type. ```javascript import { create } from "automerge-wasm" @@ -187,7 +187,7 @@ Lists are index addressable sets of values. These values can be any scalar or o ### Text -Text is a specialized list type intended for modifying a text document. The primary way to interact with a text document is via the slice operation. Non text can be inserted into a text document and will be represented with the unicode object replacement character. +Text is a specialized list type intended for modifying a text document. The primary way to interact with a text document is via the `splice()` method. Spliced strings will be indexable by character (important to note for platforms that index by graphmeme cluster). Non text can be inserted into a text document and will be represented with the unicode object replacement character. ```javascript let doc = create("aaaaaa") @@ -383,6 +383,22 @@ The `load()` function takes a `Uint8Array()` of bytes produced in this way and c doc1.free(); doc2.free(); doc3.free(); doc4.free() ``` +One interesting feature of automerge binary saves is that they can be concatenated together in any order and can still be loaded into a coherent merged document. + +```javascript +import { load } from "automerge-wasm" +import * as fs from "fs" + +let file1 = fs.readFileSync("automerge_save_1"); +let file2 = fs.readFileSync("automerge_save_2"); + +let docA = load(file1).merge(load(file2)) +let docB = load(Buffer.concat([ file1, file2 ])) + +assert.deepEqual(docA.materialize("/"), docB.materialize("/")) +assert.equal(docA.save(), docB.save()) +``` + ### Syncing When syncing a document the `generateSyncMessage()` and `receiveSyncMessage()` methods will produce and consume sync messages. A sync state object will need to be managed for the duration of the connection (created by the function `initSyncState()` and can be serialized to a Uint8Array() to preserve sync state with the `encodeSyncState()` and `decodeSyncState()` functions. From 99dc6e2314820590e54bfba6f06e8dcc3084a410 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Fri, 8 Apr 2022 18:55:53 -0400 Subject: [PATCH 271/730] fix smol_str dep --- automerge/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automerge/Cargo.toml b/automerge/Cargo.toml index c6f86e65..60a721ee 100644 --- a/automerge/Cargo.toml +++ b/automerge/Cargo.toml @@ -20,7 +20,7 @@ itertools = "^0.10.3" flate2 = "^1.0.22" nonzero_ext = "^0.2.0" uuid = { version = "^0.8.2", features=["v4", "wasm-bindgen", "serde"] } -smol_str = "^0.1.21" +smol_str = { version = "^0.1.22", features=["serde"] } tracing = { version = "^0.1.29", features = ["log"] } fxhash = "^0.2.1" tinyvec = { version = "^1.5.1", features = ["alloc"] } From 93870d4127bca413cc37afbdfed8462957ebb1f9 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Fri, 8 Apr 2022 18:58:52 -0400 Subject: [PATCH 272/730] smol str issue --- automerge/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automerge/Cargo.toml b/automerge/Cargo.toml index 60a721ee..c3fda321 100644 --- a/automerge/Cargo.toml +++ b/automerge/Cargo.toml @@ -20,7 +20,7 @@ itertools = "^0.10.3" flate2 = "^1.0.22" nonzero_ext = "^0.2.0" uuid = { version = "^0.8.2", features=["v4", "wasm-bindgen", "serde"] } -smol_str = { version = "^0.1.22", features=["serde"] } +smol_str = { version = "^0.1.21", features=["serde"] } tracing = { version = "^0.1.29", features = ["log"] } fxhash = "^0.2.1" tinyvec = { version = "^1.5.1", features = ["alloc"] } From cdfc2d056f719ee3a83b4745a8503ccdeff420c5 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 15 Apr 2022 14:39:44 +0100 Subject: [PATCH 273/730] Add double ended iterator for Range and Values --- automerge/src/automerge.rs | 72 +++++++++++++++++++++++++++++++++ automerge/src/query/range.rs | 28 ++++++++++++- automerge/src/query/range_at.rs | 30 +++++++++++++- automerge/src/range.rs | 9 +++++ automerge/src/range_at.rs | 9 +++++ automerge/src/values.rs | 9 +++++ automerge/src/values_at.rs | 9 +++++ 7 files changed, 164 insertions(+), 2 deletions(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 1fb4ec76..dfb850f0 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -1618,6 +1618,78 @@ mod tests { ); } + #[test] + fn range_iter_map_rev() { + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + tx.put(ROOT, "a", 3).unwrap(); + tx.put(ROOT, "b", 4).unwrap(); + tx.put(ROOT, "c", 5).unwrap(); + tx.put(ROOT, "d", 6).unwrap(); + tx.commit(); + let mut tx = doc.transaction(); + tx.put(ROOT, "a", 7).unwrap(); + tx.commit(); + let mut tx = doc.transaction(); + tx.put(ROOT, "a", 8).unwrap(); + tx.put(ROOT, "d", 9).unwrap(); + tx.commit(); + let actor = doc.get_actor(); + assert_eq!(doc.range(ROOT, ..).rev().count(), 4); + + let mut range = doc.range(ROOT, Prop::Map("b".into()).."d".into()).rev(); + assert_eq!( + range.next(), + Some(("c".into(), 5.into(), ExId::Id(3, actor.clone(), 0))) + ); + assert_eq!( + range.next(), + Some(("b".into(), 4.into(), ExId::Id(2, actor.clone(), 0))) + ); + assert_eq!(range.next(), None); + + let mut range = doc.range(ROOT, Prop::Map("b".into())..="d".into()).rev(); + assert_eq!( + range.next(), + Some(("d".into(), 9.into(), ExId::Id(7, actor.clone(), 0))) + ); + assert_eq!( + range.next(), + Some(("c".into(), 5.into(), ExId::Id(3, actor.clone(), 0))) + ); + assert_eq!( + range.next(), + Some(("b".into(), 4.into(), ExId::Id(2, actor.clone(), 0))) + ); + assert_eq!(range.next(), None); + + let mut range = doc.range(ROOT, ..=Prop::Map("c".into())).rev(); + assert_eq!( + range.next(), + Some(("c".into(), 5.into(), ExId::Id(3, actor.clone(), 0))) + ); + assert_eq!( + range.next(), + Some(("b".into(), 4.into(), ExId::Id(2, actor.clone(), 0))) + ); + assert_eq!( + range.next(), + Some(("a".into(), 8.into(), ExId::Id(6, actor.clone(), 0))) + ); + assert_eq!(range.next(), None); + + let range = doc.range(ROOT, Prop::Map("a".into())..).rev(); + assert_eq!( + range.collect::>(), + vec![ + ("d".into(), 9.into(), ExId::Id(7, actor.clone(), 0)), + ("c".into(), 5.into(), ExId::Id(3, actor.clone(), 0)), + ("b".into(), 4.into(), ExId::Id(2, actor.clone(), 0)), + ("a".into(), 8.into(), ExId::Id(6, actor.clone(), 0)), + ] + ); + } + #[test] fn range_iter_seq() { let mut doc = Automerge::new(); diff --git a/automerge/src/query/range.rs b/automerge/src/query/range.rs index 0b165999..3bae5e19 100644 --- a/automerge/src/query/range.rs +++ b/automerge/src/query/range.rs @@ -11,6 +11,8 @@ pub(crate) struct Range<'a, R: RangeBounds> { /// number of visible elements seen. seen: usize, last_key: Option, + index_back: usize, + last_key_back: Option, root_child: &'a OpTreeNode, meta: &'a OpSetMetadata, } @@ -22,6 +24,8 @@ impl<'a, R: RangeBounds> Range<'a, R> { index: 0, seen: 0, last_key: None, + index_back: root_child.len(), + last_key_back: None, root_child, meta, } @@ -32,7 +36,7 @@ impl<'a, R: RangeBounds> Iterator for Range<'a, R> { type Item = (Key, Value<'a>, OpId); fn next(&mut self) -> Option { - for i in self.index..self.root_child.len() { + for i in self.index..self.index_back { let op = self.root_child.get(i)?; self.index += 1; if Some(op.elemid_or_key()) != self.last_key && op.visible() { @@ -52,3 +56,25 @@ impl<'a, R: RangeBounds> Iterator for Range<'a, R> { None } } + +impl<'a, R: RangeBounds> DoubleEndedIterator for Range<'a, R> { + fn next_back(&mut self) -> Option { + for i in (self.index..self.index_back).rev() { + let op = self.root_child.get(i)?; + self.index_back -= 1; + if Some(op.elemid_or_key()) != self.last_key_back && op.visible() { + self.last_key_back = Some(op.elemid_or_key()); + let contains = match op.key { + Key::Map(m) => self + .range + .contains(&Prop::Map(self.meta.props.get(m).clone())), + Key::Seq(_) => panic!("can't iterate through lists backwards"), + }; + if contains { + return Some((op.elemid_or_key(), op.value(), op.id)); + } + } + } + None + } +} diff --git a/automerge/src/query/range_at.rs b/automerge/src/query/range_at.rs index 5d56ff52..0a573604 100644 --- a/automerge/src/query/range_at.rs +++ b/automerge/src/query/range_at.rs @@ -17,6 +17,10 @@ pub(crate) struct RangeAt<'a, R: RangeBounds> { /// number of visible elements seen. seen: usize, last_key: Option, + + index_back: usize, + last_key_back: Option, + root_child: &'a OpTreeNode, meta: &'a OpSetMetadata, } @@ -35,6 +39,8 @@ impl<'a, R: RangeBounds> RangeAt<'a, R> { index: 0, seen: 0, last_key: None, + index_back: root_child.len(), + last_key_back: None, root_child, meta, } @@ -45,7 +51,7 @@ impl<'a, R: RangeBounds> Iterator for RangeAt<'a, R> { type Item = (Key, Value<'a>, OpId); fn next(&mut self) -> Option { - for i in self.index..self.root_child.len() { + for i in self.index..self.index_back { let op = self.root_child.get(i)?; let visible = self.window.visible_at(op, i, &self.clock); self.index += 1; @@ -66,3 +72,25 @@ impl<'a, R: RangeBounds> Iterator for RangeAt<'a, R> { None } } + +impl<'a, R: RangeBounds> DoubleEndedIterator for RangeAt<'a, R> { + fn next_back(&mut self) -> Option { + for i in (self.index..self.index_back).rev() { + let op = self.root_child.get(i)?; + self.index_back -= 1; + if Some(op.elemid_or_key()) != self.last_key_back && op.visible() { + self.last_key_back = Some(op.elemid_or_key()); + let contains = match op.key { + Key::Map(m) => self + .range + .contains(&Prop::Map(self.meta.props.get(m).clone())), + Key::Seq(_) => panic!("can't iterate through lists backwards"), + }; + if contains { + return Some((op.elemid_or_key(), op.value(), op.id)); + } + } + } + None + } +} diff --git a/automerge/src/range.rs b/automerge/src/range.rs index b0127589..9ac54d1d 100644 --- a/automerge/src/range.rs +++ b/automerge/src/range.rs @@ -24,3 +24,12 @@ impl<'a, R: RangeBounds> Iterator for Range<'a, R> { .map(|(key, value, id)| (self.doc.to_string(key), value, self.doc.id_to_exid(id))) } } + +impl<'a, R: RangeBounds> DoubleEndedIterator for Range<'a, R> { + fn next_back(&mut self) -> Option { + self.range + .as_mut()? + .next_back() + .map(|(key, value, id)| (self.doc.to_string(key), value, self.doc.id_to_exid(id))) + } +} diff --git a/automerge/src/range_at.rs b/automerge/src/range_at.rs index dc9c8493..0fb8b16b 100644 --- a/automerge/src/range_at.rs +++ b/automerge/src/range_at.rs @@ -24,3 +24,12 @@ impl<'a, R: RangeBounds> Iterator for RangeAt<'a, R> { .map(|(key, value, id)| (self.doc.to_string(key), value, self.doc.id_to_exid(id))) } } + +impl<'a, R: RangeBounds> DoubleEndedIterator for RangeAt<'a, R> { + fn next_back(&mut self) -> Option { + self.range + .as_mut()? + .next_back() + .map(|(key, value, id)| (self.doc.to_string(key), value, self.doc.id_to_exid(id))) + } +} diff --git a/automerge/src/values.rs b/automerge/src/values.rs index 4c529522..1b58cf94 100644 --- a/automerge/src/values.rs +++ b/automerge/src/values.rs @@ -24,3 +24,12 @@ impl<'a> Iterator for Values<'a> { .map(|(key, value, id)| (self.doc.to_string(key), value, self.doc.id_to_exid(id))) } } + +impl<'a> DoubleEndedIterator for Values<'a> { + fn next_back(&mut self) -> Option { + self.range + .as_mut()? + .next_back() + .map(|(key, value, id)| (self.doc.to_string(key), value, self.doc.id_to_exid(id))) + } +} diff --git a/automerge/src/values_at.rs b/automerge/src/values_at.rs index a57e626c..0af4891e 100644 --- a/automerge/src/values_at.rs +++ b/automerge/src/values_at.rs @@ -24,3 +24,12 @@ impl<'a> Iterator for ValuesAt<'a> { .map(|(key, value, id)| (self.doc.to_string(key), value, self.doc.id_to_exid(id))) } } + +impl<'a> DoubleEndedIterator for ValuesAt<'a> { + fn next_back(&mut self) -> Option { + self.range + .as_mut()? + .next_back() + .map(|(key, value, id)| (self.doc.to_string(key), value, self.doc.id_to_exid(id))) + } +} From fb3b740a57212d09ec7df8ac22a3deced79e8e48 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 15 Apr 2022 14:59:52 +0100 Subject: [PATCH 274/730] Make range just be over maps --- automerge/src/autocommit.rs | 4 +- automerge/src/automerge.rs | 146 ++---------------- automerge/src/op_set.rs | 6 +- automerge/src/op_tree.rs | 6 +- automerge/src/query/range.rs | 30 ++-- automerge/src/query/range_at.rs | 24 +-- automerge/src/range.rs | 10 +- automerge/src/range_at.rs | 10 +- .../src/transaction/manual_transaction.rs | 4 +- automerge/src/transaction/transactable.rs | 4 +- 10 files changed, 52 insertions(+), 192 deletions(-) diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index eea16999..27194a63 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -241,11 +241,11 @@ impl Transactable for AutoCommit { self.doc.keys_at(obj, heads) } - fn range, R: RangeBounds>(&self, obj: O, range: R) -> Range { + fn range, R: RangeBounds>(&self, obj: O, range: R) -> Range { self.doc.range(obj, range) } - fn range_at, R: RangeBounds>( + fn range_at, R: RangeBounds>( &self, obj: O, range: R, diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index dfb850f0..d9049621 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -380,11 +380,8 @@ impl Automerge { } } - /// Iterate over the keys and values of the object `obj` in the given range. - /// - /// For a map the keys are the keys of the map. - /// For a list the keys are the element ids (opids) encoded as strings. - pub fn range, R: RangeBounds>(&self, obj: O, range: R) -> Range { + /// Iterate over the keys and values of the map `obj` in the given range. + pub fn range, R: RangeBounds>(&self, obj: O, range: R) -> Range { if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { let iter_range = self.ops.range(obj, range); Range::new(self, iter_range) @@ -394,7 +391,7 @@ impl Automerge { } /// Historical version of [`range`](Self::range). - pub fn range_at, R: RangeBounds>( + pub fn range_at, R: RangeBounds>( &self, obj: O, range: R, @@ -1565,7 +1562,7 @@ mod tests { let actor = doc.get_actor(); assert_eq!(doc.range(ROOT, ..).count(), 4); - let mut range = doc.range(ROOT, Prop::Map("b".into()).."d".into()); + let mut range = doc.range(ROOT, "b".to_owned().."d".into()); assert_eq!( range.next(), Some(("b".into(), 4.into(), ExId::Id(2, actor.clone(), 0))) @@ -1576,7 +1573,7 @@ mod tests { ); assert_eq!(range.next(), None); - let mut range = doc.range(ROOT, Prop::Map("b".into())..="d".into()); + let mut range = doc.range(ROOT, "b".to_owned()..="d".into()); assert_eq!( range.next(), Some(("b".into(), 4.into(), ExId::Id(2, actor.clone(), 0))) @@ -1591,7 +1588,7 @@ mod tests { ); assert_eq!(range.next(), None); - let mut range = doc.range(ROOT, ..=Prop::Map("c".into())); + let mut range = doc.range(ROOT, ..="c".to_owned()); assert_eq!( range.next(), Some(("a".into(), 8.into(), ExId::Id(6, actor.clone(), 0))) @@ -1606,7 +1603,7 @@ mod tests { ); assert_eq!(range.next(), None); - let range = doc.range(ROOT, Prop::Map("a".into())..); + let range = doc.range(ROOT, "a".to_owned()..); assert_eq!( range.collect::>(), vec![ @@ -1637,7 +1634,7 @@ mod tests { let actor = doc.get_actor(); assert_eq!(doc.range(ROOT, ..).rev().count(), 4); - let mut range = doc.range(ROOT, Prop::Map("b".into()).."d".into()).rev(); + let mut range = doc.range(ROOT, "b".to_owned().."d".into()).rev(); assert_eq!( range.next(), Some(("c".into(), 5.into(), ExId::Id(3, actor.clone(), 0))) @@ -1648,7 +1645,7 @@ mod tests { ); assert_eq!(range.next(), None); - let mut range = doc.range(ROOT, Prop::Map("b".into())..="d".into()).rev(); + let mut range = doc.range(ROOT, "b".to_owned()..="d".into()).rev(); assert_eq!( range.next(), Some(("d".into(), 9.into(), ExId::Id(7, actor.clone(), 0))) @@ -1663,7 +1660,7 @@ mod tests { ); assert_eq!(range.next(), None); - let mut range = doc.range(ROOT, ..=Prop::Map("c".into())).rev(); + let mut range = doc.range(ROOT, ..="c".to_owned()).rev(); assert_eq!( range.next(), Some(("c".into(), 5.into(), ExId::Id(3, actor.clone(), 0))) @@ -1678,7 +1675,7 @@ mod tests { ); assert_eq!(range.next(), None); - let range = doc.range(ROOT, Prop::Map("a".into())..).rev(); + let range = doc.range(ROOT, "a".to_owned()..).rev(); assert_eq!( range.collect::>(), vec![ @@ -1690,127 +1687,6 @@ mod tests { ); } - #[test] - fn range_iter_seq() { - let mut doc = Automerge::new(); - let mut tx = doc.transaction(); - let list = tx.put_object(ROOT, "list", ObjType::List).unwrap(); - tx.insert(&list, 0, 3).unwrap(); - tx.insert(&list, 1, 4).unwrap(); - tx.insert(&list, 2, 5).unwrap(); - tx.insert(&list, 3, 6).unwrap(); - tx.commit(); - let mut tx = doc.transaction(); - tx.put(&list, 0, 7).unwrap(); - tx.commit(); - let mut tx = doc.transaction(); - tx.put(&list, 0, 8).unwrap(); - tx.put(&list, 3, 9).unwrap(); - tx.commit(); - let actor = doc.get_actor(); - assert_eq!(doc.range(&list, ..).count(), 4); - - let mut range = doc.range(&list, Prop::Seq(1)..3.into()); - assert_eq!( - range.next(), - Some(( - format!("3@{}", actor), - 4.into(), - ExId::Id(3, actor.clone(), 0) - )) - ); - assert_eq!( - range.next(), - Some(( - format!("4@{}", actor), - 5.into(), - ExId::Id(4, actor.clone(), 0) - )) - ); - assert_eq!(range.next(), None); - - let mut range = doc.range(&list, Prop::Seq(1)..=3.into()); - assert_eq!( - range.next(), - Some(( - format!("3@{}", actor), - 4.into(), - ExId::Id(3, actor.clone(), 0) - )) - ); - assert_eq!( - range.next(), - Some(( - format!("4@{}", actor), - 5.into(), - ExId::Id(4, actor.clone(), 0) - )) - ); - assert_eq!( - range.next(), - Some(( - format!("5@{}", actor), - 9.into(), - ExId::Id(8, actor.clone(), 0) - )) - ); - assert_eq!(range.next(), None); - - let mut range = doc.range(&list, ..Prop::Seq(3)); - assert_eq!( - range.next(), - Some(( - format!("2@{}", actor), - 8.into(), - ExId::Id(7, actor.clone(), 0) - )) - ); - assert_eq!( - range.next(), - Some(( - format!("3@{}", actor), - 4.into(), - ExId::Id(3, actor.clone(), 0) - )) - ); - assert_eq!( - range.next(), - Some(( - format!("4@{}", actor), - 5.into(), - ExId::Id(4, actor.clone(), 0) - )) - ); - assert_eq!(range.next(), None); - - let range = doc.range(&list, ..); - assert_eq!( - range.collect::>(), - vec![ - ( - format!("2@{}", actor), - 8.into(), - ExId::Id(7, actor.clone(), 0) - ), - ( - format!("3@{}", actor), - 4.into(), - ExId::Id(3, actor.clone(), 0) - ), - ( - format!("4@{}", actor), - 5.into(), - ExId::Id(4, actor.clone(), 0) - ), - ( - format!("5@{}", actor), - 9.into(), - ExId::Id(8, actor.clone(), 0) - ), - ] - ); - } - #[test] fn rolling_back_transaction_has_no_effect() { let mut doc = Automerge::new(); diff --git a/automerge/src/op_set.rs b/automerge/src/op_set.rs index b6480a07..b1e71fda 100644 --- a/automerge/src/op_set.rs +++ b/automerge/src/op_set.rs @@ -3,7 +3,7 @@ use crate::indexed_cache::IndexedCache; use crate::op_tree::OpTree; use crate::query::{self, OpIdSearch, TreeQuery}; use crate::types::{ActorId, Key, ObjId, Op, OpId, OpType}; -use crate::{ObjType, Prop}; +use crate::ObjType; use fxhash::FxBuildHasher; use std::cmp::Ordering; use std::collections::HashMap; @@ -68,7 +68,7 @@ impl OpSetInternal { } } - pub fn range>(&self, obj: ObjId, range: R) -> Option> { + pub fn range>(&self, obj: ObjId, range: R) -> Option> { if let Some(tree) = self.trees.get(&obj) { tree.internal.range(range, &self.m) } else { @@ -76,7 +76,7 @@ impl OpSetInternal { } } - pub fn range_at>( + pub fn range_at>( &self, obj: ObjId, range: R, diff --git a/automerge/src/op_tree.rs b/automerge/src/op_tree.rs index 2113246a..9f9a50c5 100644 --- a/automerge/src/op_tree.rs +++ b/automerge/src/op_tree.rs @@ -12,7 +12,7 @@ use crate::{ }; use crate::{ types::{ObjId, Op, OpId}, - ObjType, Prop, + ObjType, }; use std::collections::HashSet; @@ -70,7 +70,7 @@ impl OpTreeInternal { .map(|root| query::KeysAt::new(root, clock)) } - pub fn range<'a, R: RangeBounds>( + pub fn range<'a, R: RangeBounds>( &'a self, range: R, meta: &'a OpSetMetadata, @@ -80,7 +80,7 @@ impl OpTreeInternal { .map(|node| query::Range::new(range, node, meta)) } - pub fn range_at<'a, R: RangeBounds>( + pub fn range_at<'a, R: RangeBounds>( &'a self, range: R, meta: &'a OpSetMetadata, diff --git a/automerge/src/query/range.rs b/automerge/src/query/range.rs index 3bae5e19..08619290 100644 --- a/automerge/src/query/range.rs +++ b/automerge/src/query/range.rs @@ -1,15 +1,13 @@ use crate::op_tree::{OpSetMetadata, OpTreeNode}; use crate::types::{Key, OpId}; -use crate::{Prop, Value}; +use crate::Value; use std::fmt::Debug; use std::ops::RangeBounds; #[derive(Debug)] -pub(crate) struct Range<'a, R: RangeBounds> { +pub(crate) struct Range<'a, R: RangeBounds> { range: R, index: usize, - /// number of visible elements seen. - seen: usize, last_key: Option, index_back: usize, last_key_back: Option, @@ -17,12 +15,11 @@ pub(crate) struct Range<'a, R: RangeBounds> { meta: &'a OpSetMetadata, } -impl<'a, R: RangeBounds> Range<'a, R> { +impl<'a, R: RangeBounds> Range<'a, R> { pub(crate) fn new(range: R, root_child: &'a OpTreeNode, meta: &'a OpSetMetadata) -> Self { Self { range, index: 0, - seen: 0, last_key: None, index_back: root_child.len(), last_key_back: None, @@ -32,24 +29,21 @@ impl<'a, R: RangeBounds> Range<'a, R> { } } -impl<'a, R: RangeBounds> Iterator for Range<'a, R> { +impl<'a, R: RangeBounds> Iterator for Range<'a, R> { type Item = (Key, Value<'a>, OpId); fn next(&mut self) -> Option { for i in self.index..self.index_back { let op = self.root_child.get(i)?; self.index += 1; - if Some(op.elemid_or_key()) != self.last_key && op.visible() { - self.last_key = Some(op.elemid_or_key()); + if Some(op.key) != self.last_key && op.visible() { + self.last_key = Some(op.key); let contains = match op.key { - Key::Map(m) => self - .range - .contains(&Prop::Map(self.meta.props.get(m).clone())), - Key::Seq(_) => self.range.contains(&Prop::Seq(self.seen)), + Key::Map(m) => self.range.contains(self.meta.props.get(m)), + Key::Seq(_) => panic!("found list op in range query"), }; - self.seen += 1; if contains { - return Some((op.elemid_or_key(), op.value(), op.id)); + return Some((op.key, op.value(), op.id)); } } } @@ -57,7 +51,7 @@ impl<'a, R: RangeBounds> Iterator for Range<'a, R> { } } -impl<'a, R: RangeBounds> DoubleEndedIterator for Range<'a, R> { +impl<'a, R: RangeBounds> DoubleEndedIterator for Range<'a, R> { fn next_back(&mut self) -> Option { for i in (self.index..self.index_back).rev() { let op = self.root_child.get(i)?; @@ -65,9 +59,7 @@ impl<'a, R: RangeBounds> DoubleEndedIterator for Range<'a, R> { if Some(op.elemid_or_key()) != self.last_key_back && op.visible() { self.last_key_back = Some(op.elemid_or_key()); let contains = match op.key { - Key::Map(m) => self - .range - .contains(&Prop::Map(self.meta.props.get(m).clone())), + Key::Map(m) => self.range.contains(self.meta.props.get(m)), Key::Seq(_) => panic!("can't iterate through lists backwards"), }; if contains { diff --git a/automerge/src/query/range_at.rs b/automerge/src/query/range_at.rs index 0a573604..75bbfd57 100644 --- a/automerge/src/query/range_at.rs +++ b/automerge/src/query/range_at.rs @@ -1,21 +1,19 @@ use crate::clock::Clock; use crate::op_tree::{OpSetMetadata, OpTreeNode}; use crate::types::{Key, OpId}; -use crate::{Prop, Value}; +use crate::Value; use std::fmt::Debug; use std::ops::RangeBounds; use super::VisWindow; #[derive(Debug)] -pub(crate) struct RangeAt<'a, R: RangeBounds> { +pub(crate) struct RangeAt<'a, R: RangeBounds> { clock: Clock, window: VisWindow, range: R, index: usize, - /// number of visible elements seen. - seen: usize, last_key: Option, index_back: usize, @@ -25,7 +23,7 @@ pub(crate) struct RangeAt<'a, R: RangeBounds> { meta: &'a OpSetMetadata, } -impl<'a, R: RangeBounds> RangeAt<'a, R> { +impl<'a, R: RangeBounds> RangeAt<'a, R> { pub(crate) fn new( range: R, root_child: &'a OpTreeNode, @@ -37,7 +35,6 @@ impl<'a, R: RangeBounds> RangeAt<'a, R> { window: VisWindow::default(), range, index: 0, - seen: 0, last_key: None, index_back: root_child.len(), last_key_back: None, @@ -47,7 +44,7 @@ impl<'a, R: RangeBounds> RangeAt<'a, R> { } } -impl<'a, R: RangeBounds> Iterator for RangeAt<'a, R> { +impl<'a, R: RangeBounds> Iterator for RangeAt<'a, R> { type Item = (Key, Value<'a>, OpId); fn next(&mut self) -> Option { @@ -58,12 +55,9 @@ impl<'a, R: RangeBounds> Iterator for RangeAt<'a, R> { if Some(op.elemid_or_key()) != self.last_key && visible { self.last_key = Some(op.elemid_or_key()); let contains = match op.key { - Key::Map(m) => self - .range - .contains(&Prop::Map(self.meta.props.get(m).clone())), - Key::Seq(_) => self.range.contains(&Prop::Seq(self.seen)), + Key::Map(m) => self.range.contains(self.meta.props.get(m)), + Key::Seq(_) => panic!("found list op in range query"), }; - self.seen += 1; if contains { return Some((op.elemid_or_key(), op.value(), op.id)); } @@ -73,7 +67,7 @@ impl<'a, R: RangeBounds> Iterator for RangeAt<'a, R> { } } -impl<'a, R: RangeBounds> DoubleEndedIterator for RangeAt<'a, R> { +impl<'a, R: RangeBounds> DoubleEndedIterator for RangeAt<'a, R> { fn next_back(&mut self) -> Option { for i in (self.index..self.index_back).rev() { let op = self.root_child.get(i)?; @@ -81,9 +75,7 @@ impl<'a, R: RangeBounds> DoubleEndedIterator for RangeAt<'a, R> { if Some(op.elemid_or_key()) != self.last_key_back && op.visible() { self.last_key_back = Some(op.elemid_or_key()); let contains = match op.key { - Key::Map(m) => self - .range - .contains(&Prop::Map(self.meta.props.get(m).clone())), + Key::Map(m) => self.range.contains(self.meta.props.get(m)), Key::Seq(_) => panic!("can't iterate through lists backwards"), }; if contains { diff --git a/automerge/src/range.rs b/automerge/src/range.rs index 9ac54d1d..22c6884a 100644 --- a/automerge/src/range.rs +++ b/automerge/src/range.rs @@ -1,20 +1,20 @@ use crate::{exid::ExId, Value}; use std::ops::RangeBounds; -use crate::{query, Automerge, Prop}; +use crate::{query, Automerge}; -pub struct Range<'a, R: RangeBounds> { +pub struct Range<'a, R: RangeBounds> { range: Option>, doc: &'a Automerge, } -impl<'a, R: RangeBounds> Range<'a, R> { +impl<'a, R: RangeBounds> Range<'a, R> { pub(crate) fn new(doc: &'a Automerge, range: Option>) -> Self { Self { range, doc } } } -impl<'a, R: RangeBounds> Iterator for Range<'a, R> { +impl<'a, R: RangeBounds> Iterator for Range<'a, R> { type Item = (String, Value<'a>, ExId); fn next(&mut self) -> Option { @@ -25,7 +25,7 @@ impl<'a, R: RangeBounds> Iterator for Range<'a, R> { } } -impl<'a, R: RangeBounds> DoubleEndedIterator for Range<'a, R> { +impl<'a, R: RangeBounds> DoubleEndedIterator for Range<'a, R> { fn next_back(&mut self) -> Option { self.range .as_mut()? diff --git a/automerge/src/range_at.rs b/automerge/src/range_at.rs index 0fb8b16b..6a0cacfe 100644 --- a/automerge/src/range_at.rs +++ b/automerge/src/range_at.rs @@ -1,20 +1,20 @@ use crate::{exid::ExId, Value}; use std::ops::RangeBounds; -use crate::{query, Automerge, Prop}; +use crate::{query, Automerge}; -pub struct RangeAt<'a, R: RangeBounds> { +pub struct RangeAt<'a, R: RangeBounds> { range: Option>, doc: &'a Automerge, } -impl<'a, R: RangeBounds> RangeAt<'a, R> { +impl<'a, R: RangeBounds> RangeAt<'a, R> { pub(crate) fn new(doc: &'a Automerge, range: Option>) -> Self { Self { range, doc } } } -impl<'a, R: RangeBounds> Iterator for RangeAt<'a, R> { +impl<'a, R: RangeBounds> Iterator for RangeAt<'a, R> { type Item = (String, Value<'a>, ExId); fn next(&mut self) -> Option { @@ -25,7 +25,7 @@ impl<'a, R: RangeBounds> Iterator for RangeAt<'a, R> { } } -impl<'a, R: RangeBounds> DoubleEndedIterator for RangeAt<'a, R> { +impl<'a, R: RangeBounds> DoubleEndedIterator for RangeAt<'a, R> { fn next_back(&mut self) -> Option { self.range .as_mut()? diff --git a/automerge/src/transaction/manual_transaction.rs b/automerge/src/transaction/manual_transaction.rs index da189e7a..3478dc3d 100644 --- a/automerge/src/transaction/manual_transaction.rs +++ b/automerge/src/transaction/manual_transaction.rs @@ -184,11 +184,11 @@ impl<'a> Transactable for Transaction<'a> { self.doc.keys_at(obj, heads) } - fn range, R: RangeBounds>(&self, obj: O, range: R) -> Range { + fn range, R: RangeBounds>(&self, obj: O, range: R) -> Range { self.doc.range(obj, range) } - fn range_at, R: RangeBounds>( + fn range_at, R: RangeBounds>( &self, obj: O, range: R, diff --git a/automerge/src/transaction/transactable.rs b/automerge/src/transaction/transactable.rs index 96f0bea5..36851a4d 100644 --- a/automerge/src/transaction/transactable.rs +++ b/automerge/src/transaction/transactable.rs @@ -102,9 +102,9 @@ pub trait Transactable { /// Get the keys of the given object at a point in history. fn keys_at>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt; - fn range, R: RangeBounds>(&self, obj: O, range: R) -> Range; + fn range, R: RangeBounds>(&self, obj: O, range: R) -> Range; - fn range_at, R: RangeBounds>( + fn range_at, R: RangeBounds>( &self, obj: O, range: R, From 122b227101c3b404aa2557d6884d628ce11b49c6 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 15 Apr 2022 20:47:02 +0100 Subject: [PATCH 275/730] Borrow the key --- automerge/src/automerge.rs | 48 ++++++++++++++++----------------- automerge/src/query/range.rs | 22 +++++++-------- automerge/src/query/range_at.rs | 26 +++++++++--------- automerge/src/range.rs | 6 ++--- automerge/src/range_at.rs | 6 ++--- automerge/src/values.rs | 6 ++--- automerge/src/values_at.rs | 6 ++--- 7 files changed, 60 insertions(+), 60 deletions(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index d9049621..dc1549c7 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -1565,41 +1565,41 @@ mod tests { let mut range = doc.range(ROOT, "b".to_owned().."d".into()); assert_eq!( range.next(), - Some(("b".into(), 4.into(), ExId::Id(2, actor.clone(), 0))) + Some(("b", 4.into(), ExId::Id(2, actor.clone(), 0))) ); assert_eq!( range.next(), - Some(("c".into(), 5.into(), ExId::Id(3, actor.clone(), 0))) + Some(("c", 5.into(), ExId::Id(3, actor.clone(), 0))) ); assert_eq!(range.next(), None); let mut range = doc.range(ROOT, "b".to_owned()..="d".into()); assert_eq!( range.next(), - Some(("b".into(), 4.into(), ExId::Id(2, actor.clone(), 0))) + Some(("b", 4.into(), ExId::Id(2, actor.clone(), 0))) ); assert_eq!( range.next(), - Some(("c".into(), 5.into(), ExId::Id(3, actor.clone(), 0))) + Some(("c", 5.into(), ExId::Id(3, actor.clone(), 0))) ); assert_eq!( range.next(), - Some(("d".into(), 9.into(), ExId::Id(7, actor.clone(), 0))) + Some(("d", 9.into(), ExId::Id(7, actor.clone(), 0))) ); assert_eq!(range.next(), None); let mut range = doc.range(ROOT, ..="c".to_owned()); assert_eq!( range.next(), - Some(("a".into(), 8.into(), ExId::Id(6, actor.clone(), 0))) + Some(("a", 8.into(), ExId::Id(6, actor.clone(), 0))) ); assert_eq!( range.next(), - Some(("b".into(), 4.into(), ExId::Id(2, actor.clone(), 0))) + Some(("b", 4.into(), ExId::Id(2, actor.clone(), 0))) ); assert_eq!( range.next(), - Some(("c".into(), 5.into(), ExId::Id(3, actor.clone(), 0))) + Some(("c", 5.into(), ExId::Id(3, actor.clone(), 0))) ); assert_eq!(range.next(), None); @@ -1607,10 +1607,10 @@ mod tests { assert_eq!( range.collect::>(), vec![ - ("a".into(), 8.into(), ExId::Id(6, actor.clone(), 0)), - ("b".into(), 4.into(), ExId::Id(2, actor.clone(), 0)), - ("c".into(), 5.into(), ExId::Id(3, actor.clone(), 0)), - ("d".into(), 9.into(), ExId::Id(7, actor.clone(), 0)), + ("a", 8.into(), ExId::Id(6, actor.clone(), 0)), + ("b", 4.into(), ExId::Id(2, actor.clone(), 0)), + ("c", 5.into(), ExId::Id(3, actor.clone(), 0)), + ("d", 9.into(), ExId::Id(7, actor.clone(), 0)), ] ); } @@ -1637,41 +1637,41 @@ mod tests { let mut range = doc.range(ROOT, "b".to_owned().."d".into()).rev(); assert_eq!( range.next(), - Some(("c".into(), 5.into(), ExId::Id(3, actor.clone(), 0))) + Some(("c", 5.into(), ExId::Id(3, actor.clone(), 0))) ); assert_eq!( range.next(), - Some(("b".into(), 4.into(), ExId::Id(2, actor.clone(), 0))) + Some(("b", 4.into(), ExId::Id(2, actor.clone(), 0))) ); assert_eq!(range.next(), None); let mut range = doc.range(ROOT, "b".to_owned()..="d".into()).rev(); assert_eq!( range.next(), - Some(("d".into(), 9.into(), ExId::Id(7, actor.clone(), 0))) + Some(("d", 9.into(), ExId::Id(7, actor.clone(), 0))) ); assert_eq!( range.next(), - Some(("c".into(), 5.into(), ExId::Id(3, actor.clone(), 0))) + Some(("c", 5.into(), ExId::Id(3, actor.clone(), 0))) ); assert_eq!( range.next(), - Some(("b".into(), 4.into(), ExId::Id(2, actor.clone(), 0))) + Some(("b", 4.into(), ExId::Id(2, actor.clone(), 0))) ); assert_eq!(range.next(), None); let mut range = doc.range(ROOT, ..="c".to_owned()).rev(); assert_eq!( range.next(), - Some(("c".into(), 5.into(), ExId::Id(3, actor.clone(), 0))) + Some(("c", 5.into(), ExId::Id(3, actor.clone(), 0))) ); assert_eq!( range.next(), - Some(("b".into(), 4.into(), ExId::Id(2, actor.clone(), 0))) + Some(("b", 4.into(), ExId::Id(2, actor.clone(), 0))) ); assert_eq!( range.next(), - Some(("a".into(), 8.into(), ExId::Id(6, actor.clone(), 0))) + Some(("a", 8.into(), ExId::Id(6, actor.clone(), 0))) ); assert_eq!(range.next(), None); @@ -1679,10 +1679,10 @@ mod tests { assert_eq!( range.collect::>(), vec![ - ("d".into(), 9.into(), ExId::Id(7, actor.clone(), 0)), - ("c".into(), 5.into(), ExId::Id(3, actor.clone(), 0)), - ("b".into(), 4.into(), ExId::Id(2, actor.clone(), 0)), - ("a".into(), 8.into(), ExId::Id(6, actor.clone(), 0)), + ("d", 9.into(), ExId::Id(7, actor.clone(), 0)), + ("c", 5.into(), ExId::Id(3, actor.clone(), 0)), + ("b", 4.into(), ExId::Id(2, actor.clone(), 0)), + ("a", 8.into(), ExId::Id(6, actor.clone(), 0)), ] ); } diff --git a/automerge/src/query/range.rs b/automerge/src/query/range.rs index 08619290..ed0c8612 100644 --- a/automerge/src/query/range.rs +++ b/automerge/src/query/range.rs @@ -30,7 +30,7 @@ impl<'a, R: RangeBounds> Range<'a, R> { } impl<'a, R: RangeBounds> Iterator for Range<'a, R> { - type Item = (Key, Value<'a>, OpId); + type Item = (&'a str, Value<'a>, OpId); fn next(&mut self) -> Option { for i in self.index..self.index_back { @@ -38,12 +38,12 @@ impl<'a, R: RangeBounds> Iterator for Range<'a, R> { self.index += 1; if Some(op.key) != self.last_key && op.visible() { self.last_key = Some(op.key); - let contains = match op.key { - Key::Map(m) => self.range.contains(self.meta.props.get(m)), + let prop = match op.key { + Key::Map(m) => self.meta.props.get(m), Key::Seq(_) => panic!("found list op in range query"), }; - if contains { - return Some((op.key, op.value(), op.id)); + if self.range.contains(prop) { + return Some((prop, op.value(), op.id)); } } } @@ -56,14 +56,14 @@ impl<'a, R: RangeBounds> DoubleEndedIterator for Range<'a, R> { for i in (self.index..self.index_back).rev() { let op = self.root_child.get(i)?; self.index_back -= 1; - if Some(op.elemid_or_key()) != self.last_key_back && op.visible() { - self.last_key_back = Some(op.elemid_or_key()); - let contains = match op.key { - Key::Map(m) => self.range.contains(self.meta.props.get(m)), + if Some(op.key) != self.last_key_back && op.visible() { + self.last_key_back = Some(op.key); + let prop = match op.key { + Key::Map(m) => self.meta.props.get(m), Key::Seq(_) => panic!("can't iterate through lists backwards"), }; - if contains { - return Some((op.elemid_or_key(), op.value(), op.id)); + if self.range.contains(prop) { + return Some((prop, op.value(), op.id)); } } } diff --git a/automerge/src/query/range_at.rs b/automerge/src/query/range_at.rs index 75bbfd57..fc65e265 100644 --- a/automerge/src/query/range_at.rs +++ b/automerge/src/query/range_at.rs @@ -45,21 +45,21 @@ impl<'a, R: RangeBounds> RangeAt<'a, R> { } impl<'a, R: RangeBounds> Iterator for RangeAt<'a, R> { - type Item = (Key, Value<'a>, OpId); + type Item = (&'a str, Value<'a>, OpId); fn next(&mut self) -> Option { for i in self.index..self.index_back { let op = self.root_child.get(i)?; let visible = self.window.visible_at(op, i, &self.clock); self.index += 1; - if Some(op.elemid_or_key()) != self.last_key && visible { - self.last_key = Some(op.elemid_or_key()); - let contains = match op.key { - Key::Map(m) => self.range.contains(self.meta.props.get(m)), + if Some(op.key) != self.last_key && visible { + self.last_key = Some(op.key); + let prop = match op.key { + Key::Map(m) => self.meta.props.get(m), Key::Seq(_) => panic!("found list op in range query"), }; - if contains { - return Some((op.elemid_or_key(), op.value(), op.id)); + if self.range.contains(prop) { + return Some((prop, op.value(), op.id)); } } } @@ -72,14 +72,14 @@ impl<'a, R: RangeBounds> DoubleEndedIterator for RangeAt<'a, R> { for i in (self.index..self.index_back).rev() { let op = self.root_child.get(i)?; self.index_back -= 1; - if Some(op.elemid_or_key()) != self.last_key_back && op.visible() { - self.last_key_back = Some(op.elemid_or_key()); - let contains = match op.key { - Key::Map(m) => self.range.contains(self.meta.props.get(m)), + if Some(op.key) != self.last_key_back && op.visible() { + self.last_key_back = Some(op.key); + let prop = match op.key { + Key::Map(m) => self.meta.props.get(m), Key::Seq(_) => panic!("can't iterate through lists backwards"), }; - if contains { - return Some((op.elemid_or_key(), op.value(), op.id)); + if self.range.contains(prop) { + return Some((prop, op.value(), op.id)); } } } diff --git a/automerge/src/range.rs b/automerge/src/range.rs index 22c6884a..b0eff5ad 100644 --- a/automerge/src/range.rs +++ b/automerge/src/range.rs @@ -15,13 +15,13 @@ impl<'a, R: RangeBounds> Range<'a, R> { } impl<'a, R: RangeBounds> Iterator for Range<'a, R> { - type Item = (String, Value<'a>, ExId); + type Item = (&'a str, Value<'a>, ExId); fn next(&mut self) -> Option { self.range .as_mut()? .next() - .map(|(key, value, id)| (self.doc.to_string(key), value, self.doc.id_to_exid(id))) + .map(|(key, value, id)| (key, value, self.doc.id_to_exid(id))) } } @@ -30,6 +30,6 @@ impl<'a, R: RangeBounds> DoubleEndedIterator for Range<'a, R> { self.range .as_mut()? .next_back() - .map(|(key, value, id)| (self.doc.to_string(key), value, self.doc.id_to_exid(id))) + .map(|(key, value, id)| (key, value, self.doc.id_to_exid(id))) } } diff --git a/automerge/src/range_at.rs b/automerge/src/range_at.rs index 6a0cacfe..7862b4fb 100644 --- a/automerge/src/range_at.rs +++ b/automerge/src/range_at.rs @@ -15,13 +15,13 @@ impl<'a, R: RangeBounds> RangeAt<'a, R> { } impl<'a, R: RangeBounds> Iterator for RangeAt<'a, R> { - type Item = (String, Value<'a>, ExId); + type Item = (&'a str, Value<'a>, ExId); fn next(&mut self) -> Option { self.range .as_mut()? .next() - .map(|(key, value, id)| (self.doc.to_string(key), value, self.doc.id_to_exid(id))) + .map(|(key, value, id)| (key, value, self.doc.id_to_exid(id))) } } @@ -30,6 +30,6 @@ impl<'a, R: RangeBounds> DoubleEndedIterator for RangeAt<'a, R> { self.range .as_mut()? .next_back() - .map(|(key, value, id)| (self.doc.to_string(key), value, self.doc.id_to_exid(id))) + .map(|(key, value, id)| (key, value, self.doc.id_to_exid(id))) } } diff --git a/automerge/src/values.rs b/automerge/src/values.rs index 1b58cf94..de195421 100644 --- a/automerge/src/values.rs +++ b/automerge/src/values.rs @@ -15,13 +15,13 @@ impl<'a> Values<'a> { } impl<'a> Iterator for Values<'a> { - type Item = (String, Value<'a>, ExId); + type Item = (&'a str, Value<'a>, ExId); fn next(&mut self) -> Option { self.range .as_mut()? .next() - .map(|(key, value, id)| (self.doc.to_string(key), value, self.doc.id_to_exid(id))) + .map(|(key, value, id)| (key, value, self.doc.id_to_exid(id))) } } @@ -30,6 +30,6 @@ impl<'a> DoubleEndedIterator for Values<'a> { self.range .as_mut()? .next_back() - .map(|(key, value, id)| (self.doc.to_string(key), value, self.doc.id_to_exid(id))) + .map(|(key, value, id)| (key, value, self.doc.id_to_exid(id))) } } diff --git a/automerge/src/values_at.rs b/automerge/src/values_at.rs index 0af4891e..438155d1 100644 --- a/automerge/src/values_at.rs +++ b/automerge/src/values_at.rs @@ -15,13 +15,13 @@ impl<'a> ValuesAt<'a> { } impl<'a> Iterator for ValuesAt<'a> { - type Item = (String, Value<'a>, ExId); + type Item = (&'a str, Value<'a>, ExId); fn next(&mut self) -> Option { self.range .as_mut()? .next() - .map(|(key, value, id)| (self.doc.to_string(key), value, self.doc.id_to_exid(id))) + .map(|(key, value, id)| (key, value, self.doc.id_to_exid(id))) } } @@ -30,6 +30,6 @@ impl<'a> DoubleEndedIterator for ValuesAt<'a> { self.range .as_mut()? .next_back() - .map(|(key, value, id)| (self.doc.to_string(key), value, self.doc.id_to_exid(id))) + .map(|(key, value, id)| (key, value, self.doc.id_to_exid(id))) } } From a65838076d31e50761c829be62527c31852cc8ef Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Mon, 18 Apr 2022 16:15:29 +0100 Subject: [PATCH 276/730] Add parents iterator This allows users to have the convenience of getting all of the parents of an object, whilst allowing them to terminate early when they have found what they need. --- automerge/src/autocommit.rs | 6 ++-- automerge/src/automerge.rs | 28 +++++++++++++++---- automerge/src/lib.rs | 2 ++ automerge/src/parents.rs | 19 +++++++++++++ .../src/transaction/manual_transaction.rs | 4 +-- automerge/src/transaction/transactable.rs | 12 ++++++-- 6 files changed, 57 insertions(+), 14 deletions(-) create mode 100644 automerge/src/parents.rs diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index eea16999..12518fe3 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -3,7 +3,7 @@ use std::ops::RangeBounds; use crate::exid::ExId; use crate::transaction::{CommitOptions, Transactable}; use crate::types::Patch; -use crate::{sync, Keys, KeysAt, ObjType, Range, RangeAt, ScalarValue, Values, ValuesAt}; +use crate::{sync, Keys, KeysAt, ObjType, Parents, Range, RangeAt, ScalarValue, Values, ValuesAt}; use crate::{ transaction::TransactionInner, ActorId, Automerge, AutomergeError, Change, ChangeHash, Prop, Value, @@ -424,7 +424,7 @@ impl Transactable for AutoCommit { self.doc.parent_object(obj) } - fn path_to_object>(&self, obj: O) -> Vec<(ExId, Prop)> { - self.doc.path_to_object(obj) + fn parents(&self, obj: ExId) -> Parents { + self.doc.parents(obj) } } diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 1fb4ec76..e32d433f 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -6,6 +6,7 @@ use crate::change::encode_document; use crate::exid::ExId; use crate::keys::Keys; use crate::op_set::OpSet; +use crate::parents::Parents; use crate::range::Range; use crate::transaction::{self, CommitOptions, Failure, Success, Transaction, TransactionInner}; use crate::types::{ @@ -331,13 +332,13 @@ impl Automerge { } } + /// Get an iterator over the parents of an object. + pub fn parents(&self, obj: ExId) -> Parents { + Parents { obj, doc: self } + } + pub fn path_to_object>(&self, obj: O) -> Vec<(ExId, Prop)> { - let mut path = Vec::new(); - let mut obj = obj.as_ref().clone(); - while let Some(parent) = self.parent_object(obj) { - obj = parent.0.clone(); - path.push(parent); - } + let mut path = self.parents(obj.as_ref().clone()).collect::>(); path.reverse(); path } @@ -2069,6 +2070,21 @@ mod tests { ); } + #[test] + fn parents_iterator() { + let mut doc = AutoCommit::new(); + let map = doc.put_object(ROOT, "a", ObjType::Map).unwrap(); + let list = doc.insert_object(&map, 0, ObjType::List).unwrap(); + doc.insert(&list, 0, 2).unwrap(); + let text = doc.put_object(&list, 0, ObjType::Text).unwrap(); + + let mut parents = doc.parents(text); + assert_eq!(parents.next(), Some((list, Prop::Seq(0)))); + assert_eq!(parents.next(), Some((map, Prop::Seq(0)))); + assert_eq!(parents.next(), Some((ROOT, Prop::Map("a".into())))); + assert_eq!(parents.next(), None); + } + #[test] fn can_insert_a_grapheme_into_text() { let mut doc = Automerge::new(); diff --git a/automerge/src/lib.rs b/automerge/src/lib.rs index 70006ff7..4b2e5c8b 100644 --- a/automerge/src/lib.rs +++ b/automerge/src/lib.rs @@ -39,6 +39,7 @@ mod keys_at; mod legacy; mod op_set; mod op_tree; +mod parents; mod query; mod range; mod range_at; @@ -62,6 +63,7 @@ pub use exid::ExId as ObjId; pub use keys::Keys; pub use keys_at::KeysAt; pub use legacy::Change as ExpandedChange; +pub use parents::Parents; pub use range::Range; pub use range_at::RangeAt; pub use types::{ActorId, AssignPatch, ChangeHash, ObjType, OpType, Patch, Prop}; diff --git a/automerge/src/parents.rs b/automerge/src/parents.rs new file mode 100644 index 00000000..6bc16b5c --- /dev/null +++ b/automerge/src/parents.rs @@ -0,0 +1,19 @@ +use crate::{exid::ExId, Automerge, Prop}; + +pub struct Parents<'a> { + pub(crate) obj: ExId, + pub(crate) doc: &'a Automerge, +} + +impl<'a> Iterator for Parents<'a> { + type Item = (ExId, Prop); + + fn next(&mut self) -> Option { + if let Some((obj, prop)) = self.doc.parent_object(&self.obj) { + self.obj = obj.clone(); + Some((obj, prop)) + } else { + None + } + } +} diff --git a/automerge/src/transaction/manual_transaction.rs b/automerge/src/transaction/manual_transaction.rs index da189e7a..99ce13d3 100644 --- a/automerge/src/transaction/manual_transaction.rs +++ b/automerge/src/transaction/manual_transaction.rs @@ -267,8 +267,8 @@ impl<'a> Transactable for Transaction<'a> { self.doc.parent_object(obj) } - fn path_to_object>(&self, obj: O) -> Vec<(ExId, Prop)> { - self.doc.path_to_object(obj) + fn parents(&self, obj: ExId) -> crate::Parents { + self.doc.parents(obj) } } diff --git a/automerge/src/transaction/transactable.rs b/automerge/src/transaction/transactable.rs index 96f0bea5..c7c19470 100644 --- a/automerge/src/transaction/transactable.rs +++ b/automerge/src/transaction/transactable.rs @@ -2,8 +2,8 @@ use std::ops::RangeBounds; use crate::exid::ExId; use crate::{ - AutomergeError, ChangeHash, Keys, KeysAt, ObjType, Prop, Range, RangeAt, ScalarValue, Value, - Values, ValuesAt, + AutomergeError, ChangeHash, Keys, KeysAt, ObjType, Parents, Prop, Range, RangeAt, ScalarValue, + Value, Values, ValuesAt, }; /// A way of mutating a document within a single change. @@ -166,5 +166,11 @@ pub trait Transactable { /// at in that object. fn parent_object>(&self, obj: O) -> Option<(ExId, Prop)>; - fn path_to_object>(&self, obj: O) -> Vec<(ExId, Prop)>; + fn parents(&self, obj: ExId) -> Parents; + + fn path_to_object>(&self, obj: O) -> Vec<(ExId, Prop)> { + let mut path = self.parents(obj.as_ref().clone()).collect::>(); + path.reverse(); + path + } } From 5923d67beac0cd68d821cee95a9f8c428e6b14e0 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Mon, 18 Apr 2022 16:31:13 -0400 Subject: [PATCH 277/730] duplicate changes in the queue could corrupt internal state --- automerge/src/automerge.rs | 4 +++- automerge/tests/test.rs | 20 ++++++++++++++++++++ 2 files changed, 23 insertions(+), 1 deletion(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 1fb4ec76..7b64d6e2 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -678,7 +678,9 @@ impl Automerge { } } while let Some(c) = self.pop_next_causally_ready_change() { - self.apply_change(c); + if !self.history_index.contains_key(&c.hash) { + self.apply_change(c); + } } Ok(()) } diff --git a/automerge/tests/test.rs b/automerge/tests/test.rs index 3f4db2ac..5932b28f 100644 --- a/automerge/tests/test.rs +++ b/automerge/tests/test.rs @@ -819,6 +819,26 @@ fn save_restore_complex() { ); } +#[test] +fn handle_repeated_out_of_order_changes() -> Result<(), automerge::AutomergeError> { + let mut doc1 = new_doc(); + let list = doc1.put_object(ROOT, "list", ObjType::List)?; + doc1.insert(&list, 0, "a")?; + let mut doc2 = doc1.fork(); + doc1.insert(&list, 1, "b")?; + doc1.commit(); + doc1.insert(&list, 2, "c")?; + doc1.commit(); + doc1.insert(&list, 3, "d")?; + doc1.commit(); + let changes = doc1.get_changes(&[]); + doc2.apply_changes(changes[2..].iter().cloned().cloned().collect())?; + doc2.apply_changes(changes[2..].iter().cloned().cloned().collect())?; + doc2.apply_changes(changes.iter().cloned().cloned().collect())?; + assert_eq!(doc1.save(), doc2.save()); + Ok(()) +} + #[test] fn list_counter_del() -> Result<(), automerge::AutomergeError> { let mut v = vec![ActorId::random(), ActorId::random(), ActorId::random()]; From ab09a7aa5d0518d809fd0d699be5f9fa0427dd0d Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Mon, 18 Apr 2022 16:36:03 -0400 Subject: [PATCH 278/730] make test simpler --- automerge/tests/test.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/automerge/tests/test.rs b/automerge/tests/test.rs index 5932b28f..38a17df9 100644 --- a/automerge/tests/test.rs +++ b/automerge/tests/test.rs @@ -831,10 +831,10 @@ fn handle_repeated_out_of_order_changes() -> Result<(), automerge::AutomergeErro doc1.commit(); doc1.insert(&list, 3, "d")?; doc1.commit(); - let changes = doc1.get_changes(&[]); - doc2.apply_changes(changes[2..].iter().cloned().cloned().collect())?; - doc2.apply_changes(changes[2..].iter().cloned().cloned().collect())?; - doc2.apply_changes(changes.iter().cloned().cloned().collect())?; + let changes = doc1.get_changes(&[]).into_iter().cloned().collect::>(); + doc2.apply_changes(changes[2..].iter().cloned().collect())?; + doc2.apply_changes(changes[2..].iter().cloned().collect())?; + doc2.apply_changes(changes)?; assert_eq!(doc1.save(), doc2.save()); Ok(()) } From c66d8a5b54fe6c92c14ff5de94d89582ef6bce6e Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Mon, 18 Apr 2022 16:43:28 -0400 Subject: [PATCH 279/730] fmt --- automerge/tests/test.rs | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/automerge/tests/test.rs b/automerge/tests/test.rs index 38a17df9..6c1087d2 100644 --- a/automerge/tests/test.rs +++ b/automerge/tests/test.rs @@ -831,7 +831,11 @@ fn handle_repeated_out_of_order_changes() -> Result<(), automerge::AutomergeErro doc1.commit(); doc1.insert(&list, 3, "d")?; doc1.commit(); - let changes = doc1.get_changes(&[]).into_iter().cloned().collect::>(); + let changes = doc1 + .get_changes(&[]) + .into_iter() + .cloned() + .collect::>(); doc2.apply_changes(changes[2..].iter().cloned().collect())?; doc2.apply_changes(changes[2..].iter().cloned().collect())?; doc2.apply_changes(changes)?; From 757f1f058aa2a15b81fe415ebe3dd705ffa1ae8a Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Mon, 18 Apr 2022 17:03:32 -0400 Subject: [PATCH 280/730] simplify test more --- automerge/tests/test.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/automerge/tests/test.rs b/automerge/tests/test.rs index 6c1087d2..4907334a 100644 --- a/automerge/tests/test.rs +++ b/automerge/tests/test.rs @@ -836,8 +836,8 @@ fn handle_repeated_out_of_order_changes() -> Result<(), automerge::AutomergeErro .into_iter() .cloned() .collect::>(); - doc2.apply_changes(changes[2..].iter().cloned().collect())?; - doc2.apply_changes(changes[2..].iter().cloned().collect())?; + doc2.apply_changes(changes[2..].to_vec())?; + doc2.apply_changes(changes[2..].to_vec())?; doc2.apply_changes(changes)?; assert_eq!(doc1.save(), doc2.save()); Ok(()) From 9d7798a8c4948b911020b78b486deefca820d651 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Mon, 18 Apr 2022 18:41:34 -0400 Subject: [PATCH 281/730] readme updates --- README.md | 23 +++++++++++++---------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/README.md b/README.md index e7a277a8..e2d84bda 100644 --- a/README.md +++ b/README.md @@ -1,8 +1,15 @@ -# Automerge - NEXT +# Automerge RS -This is pretty much a ground up rewrite of automerge-rs. The objective of this -rewrite is to radically simplify the API. The end goal being to produce a library -which is easy to work with both in Rust and from FFI. +This is a rust implementation of the [Automerge](https://github.com/automerge/automerge) file format and network protocol. + +## Status + +This project has 4 components: + +1. *automerge* - a rust implementation of the library. This project is the most mature and being used in a handful of small applications. +2. *automerge-wasm* - a js/wasm interface to the underlying rust library. This api is generally mature and in use in a handful of projects as well. +3. *automerge-js* - this is a javascript library using the wasm interface to export the same public api of the primary automerge project. Currently this project passes all of automerge's tests but has not been used in any real project or packaged as an NPM. Alpha testers welcome. +4. *automerge-c* - this is a c library intended to be an ffi integration point for all other languages. It is currently a work in progress and not yet ready for any testing. ## How? @@ -21,13 +28,10 @@ optree and producing state of some kind. User facing operations are exposed on an `Automerge` object, under the covers these operations typically instantiate some `TreeQuery` and run it over the `OpTree`. -## Status - -We have working code which passes all of the tests in the JS test suite. We're -now working on writing a bunch more tests and cleaning up the API. - ## Development +Please feel free to open issues and pull requests. + ### Running CI The steps CI will run are all defined in `./scripts/ci`. Obviously CI will run @@ -61,7 +65,6 @@ To build and test the wasm library: ## cutting a release or doing benchmarking $ yarn release - $ yarn opt ## or set `wasm-opt = false` in Cargo.toml on supported platforms (not arm64 osx) ``` And finally to test the js library. This is where most of the tests reside. From d4a904414d4b3b1e5d9ba46932917433451e83c3 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Tue, 19 Apr 2022 08:35:44 -0600 Subject: [PATCH 282/730] Squashed commit of the following: commit e1f8d769f44b48cdeb03855ac9e2b223fb229187 Author: Orion Henry Date: Thu Mar 10 08:53:07 2022 -0500 update authors commit 3e5525f1a6541664e5e30ad179b2f217ba7b7422 Merge: f4ba1770 1c21abc5 Author: Orion Henry Date: Wed Mar 9 14:36:29 2022 -0500 Merge pull request #304 from jkankiewicz/c_api_exp Fix "fmt" workflow step violations commit 1c21abc5a37941ce189ff48d994c1ce3dda4827f Author: Jason Kankiewicz Date: Wed Mar 9 11:13:01 2022 -0800 Fix CMake and Rust code formatting issues. commit f4ba1770a96c5200ae124706dffbab327a628c23 Merge: bf1ae609 f41b30d1 Author: Orion Henry Date: Wed Mar 9 12:05:58 2022 -0500 Merge pull request #300 from jkankiewicz/c_api_exp Add unit test suites for the `AMlistSet*` and `AMmapSet*` functions commit f41b30d118ef60965239a02afddbbfb14dc4392f Author: Jason Kankiewicz Date: Tue Mar 8 22:08:36 2022 -0800 Added a brief description of the `AmObjType` enum. Added the `AmStatus` enum to the enum docs page. commit af7386a482d2cb9cee2d188ef443498ddab40ddb Author: Jason Kankiewicz Date: Tue Mar 8 21:50:52 2022 -0800 Added a unit test suite for the `AMlistSet*` functions. commit 1eb70c6eee846b912806d058a43c6e04a9184a62 Author: Jason Kankiewicz Date: Tue Mar 8 21:42:42 2022 -0800 Added the rest of the `AMlistSet*` functions. Started the enum tags at `1` so they won't be inherently false. Alphabetized enum tags for the docs. Improved the docs. commit 6489cba13b49656aa0704d329a90c6e7e4a809e1 Author: Jason Kankiewicz Date: Tue Mar 8 18:01:46 2022 -0800 Alphabetize functions in the docs. commit 74c245b82d49971094d731ef4442b9af14e53c15 Author: Jason Kankiewicz Date: Tue Mar 8 07:54:25 2022 -0800 Fix a typo in `AMmapSetObject()`'s documentation. commit b2a879ba4e23c884f2bf43d77e950c1ad8a34c7e Author: Jason Kankiewicz Date: Tue Mar 8 06:24:22 2022 -0800 Append missing EOF linefeed. commit fbf0f29b66ad9109081490754872d149d5575ac6 Merge: c56d54b5 bf1ae609 Author: Jason Kankiewicz Date: Tue Mar 8 01:08:12 2022 -0800 Merge branch 'c_api_exp' of https://github.com/automerge/automerge-rs into c_api_exp commit c56d54b565451c0fe65e86b658e094178579fd25 Author: Jason Kankiewicz Date: Tue Mar 8 01:07:11 2022 -0800 Added unit test cases for the new `AMmapSet*` functions by @orionz. Moved the unit test cases for the `AMmapSet*` functions into their own unit test suite. commit 7e59b5576034bc0404fe793758088fea2eae6330 Author: Jason Kankiewicz Date: Tue Mar 8 01:01:47 2022 -0800 Edited the Doxygen documentation. commit bf1ae6091318d005c49407e10a20f4822e41adc1 Author: Orion Henry Date: Mon Mar 7 11:59:22 2022 -0500 fmt commit e82a7cc78e166cc05e90138df040cdcdb4d83c13 Merge: a44e69d2 965c2d56 Author: Orion Henry Date: Mon Mar 7 11:55:32 2022 -0500 Merge pull request #299 from jkankiewicz/c_api_exp Enable unit testing of the C API commit 965c2d56c32068d3af07705aff7d0a4386393004 Author: Jason Kankiewicz Date: Mon Mar 7 06:37:36 2022 -0800 Enable unit testing of the C API. commit a44e69d2c72ff3e712c1109e3cf2f0d00f2af0c0 Author: Orion Henry Date: Sun Mar 6 14:00:46 2022 -0500 remove datatype mapset commit 88153c44e767b47fa1bb0bdd0d4bc5d43f2b6a7a Merge: 41512e9c c6194e97 Author: Orion Henry Date: Sun Mar 6 10:32:39 2022 -0500 Merge pull request #298 from jkankiewicz/rebase_c_api_exp Rebase the "c_api_exp" branch on the "experiment" branch commit c6194e973235ac43bcdf97b19b0274653b2e6b99 Merge: a2d745c8 41512e9c Author: Jason Kankiewicz Date: Sun Mar 6 01:09:56 2022 -0800 Merge branch 'c_api_exp' into rebase_c_api_exp commit a2d745c8d9c649d444339dc5e1df733af0a5bfec Author: Jason Kankiewicz Date: Sun Mar 6 00:44:37 2022 -0800 Replace the `utils::import_value` function with the `utils::import_scalar` function. Exclude `# Safety` comments from the documentation. commit 0681e28b4063988fada6d632366b471996d6d66a Author: Orion Henry Date: Thu Mar 3 16:04:17 2022 -0500 support new as_ref api commit 916e23fcc20aae9a27f17e9b98f267fac364d5bb Author: Orion Henry Date: Thu Mar 3 15:56:27 2022 -0500 fmt commit 71cd6a1f18796ab8f502d07d1d80784d1e3b9cd2 Author: Orion Henry Date: Thu Mar 3 15:54:38 2022 -0500 lock data at 64 bit - no c_long commit e00bd4c20191e86b9f1bd11be2bab31e3a23fc97 Author: Orion Henry Date: Thu Mar 3 15:27:55 2022 -0500 verbose commit 39d157c554a71e00978b1d9244e1c1cfbe7c24f3 Author: Orion Henry Date: Thu Mar 3 14:56:23 2022 -0500 clippy cleanup commit 7f650fb8e0e7173c72d192c60506e46abb41f92f Author: Jason Kankiewicz Date: Wed Feb 23 02:14:06 2022 -0800 Added Doxygen documentation generation. Renamed `AMDatatype` to `AmDataType`. Reorganized the `AmDataType` tags. Renamed `AMfree()` to `AMdestroy()`. Renamed `AMclone()` to `AMdup()`. commit b0b803eef8d07b5171b9ab736eae3e2f13d6158b Author: Orion Henry Date: Tue Feb 22 11:30:42 2022 -0500 get simple test passing commit cab9017ffa369365afb73c37275063a0c174b12e Author: Orion Henry Date: Wed Feb 9 15:50:44 2022 -0500 rework to return a queriable result commit a557e848f39d21b69d4b6c56434c1600b1cf6d94 Author: Jason Kankiewicz Date: Mon Feb 14 14:38:00 2022 -0800 Add a CI step to run the CMake build of the C bindings for @alexjg. commit c8c0c72f3bfced414cc443255b6a912c56b9ecfc Author: Jason Kankiewicz Date: Mon Feb 14 14:09:58 2022 -0800 Add CMake instructions for @orionz. commit fb62c4b02a0971bf814272a58c500dd5a31e694a Author: Jason Kankiewicz Date: Thu Feb 10 23:28:54 2022 -0800 Add CMake support. commit 7bc3bb6850de84eff9ad5a66eed2363c12ad7738 Author: Jason Kankiewicz Date: Thu Feb 10 22:49:53 2022 -0800 Replace *intptr_t in C function signatures. commit 60395a2db04d37d549cd9b0e5178af5f52ceb5f3 Author: Orion Henry Date: Sun Feb 6 18:59:19 2022 -0500 am_pop and am_pop_value commit b1e88047d22c4382aa58fc3fc337eb3d8a36140a Author: Orion Henry Date: Thu Feb 3 19:43:36 2022 -0500 break the ground commit 41512e9c78a649186646bcaa5d78632724bd8403 Author: Orion Henry Date: Thu Mar 3 16:04:17 2022 -0500 support new as_ref api commit bcee6a9623c0ce1289271df180368a81d50508d6 Merge: cf98f78d 9a89db3f Author: Orion Henry Date: Thu Mar 3 15:58:19 2022 -0500 Merge remote-tracking branch 'origin/experiment' into c_api_exp commit cf98f78dd172f214d3f417913470956ff998d9d5 Author: Orion Henry Date: Thu Mar 3 15:56:27 2022 -0500 fmt commit 3c1f449c5c024892658b37abbc74e19b746413e2 Author: Orion Henry Date: Thu Mar 3 15:54:38 2022 -0500 lock data at 64 bit - no c_long commit 2c2ec0b0c5d74bbe950f629a701ea1209f8ad4b6 Author: Orion Henry Date: Thu Mar 3 15:27:55 2022 -0500 verbose commit b72b9c989a60ca4bbcd7c47e1e6bf89356346a2f Author: Orion Henry Date: Thu Mar 3 14:56:23 2022 -0500 clippy cleanup commit 3ba28f91ccb5a7499a5442804ffa91001b01a3ef Author: Jason Kankiewicz Date: Wed Feb 23 02:14:06 2022 -0800 Added Doxygen documentation generation. Renamed `AMDatatype` to `AmDataType`. Reorganized the `AmDataType` tags. Renamed `AMfree()` to `AMdestroy()`. Renamed `AMclone()` to `AMdup()`. commit 8564e5b7531fb4e5194cf845b3d48e52a16827bd Author: Orion Henry Date: Tue Feb 22 11:30:42 2022 -0500 get simple test passing commit 60835e6ae7361199e03225dd33a8aa9c4614a20e Author: Orion Henry Date: Wed Feb 9 15:50:44 2022 -0500 rework to return a queriable result commit 89466d9e8c77b67b3526d56beb5f263aadb7fca0 Author: Jason Kankiewicz Date: Mon Feb 14 14:38:00 2022 -0800 Add a CI step to run the CMake build of the C bindings for @alexjg. commit e2485bd5fda8b0e290038c579b2080faadbc5026 Author: Jason Kankiewicz Date: Mon Feb 14 14:09:58 2022 -0800 Add CMake instructions for @orionz. commit b5cc7dd63dd76c816be1f23a0b91ba1f7327a32c Author: Jason Kankiewicz Date: Thu Feb 10 23:28:54 2022 -0800 Add CMake support. commit 685536f0cf9808b10af5efc9341f85acc2490fdf Author: Jason Kankiewicz Date: Thu Feb 10 22:49:53 2022 -0800 Replace *intptr_t in C function signatures. commit c1c6e7bb6615d168bcdad41f1621b17ff8ea7725 Author: Orion Henry Date: Sun Feb 6 18:59:19 2022 -0500 am_pop and am_pop_value commit e68c8d347e5e45451b6b2193542b9f2bdaf283ce Author: Orion Henry Date: Thu Feb 3 19:43:36 2022 -0500 break the ground --- .github/workflows/ci.yaml | 19 + .gitignore | 1 + Cargo.toml | 3 +- README.md | 20 +- automerge-c/.gitignore | 3 + automerge-c/CMakeLists.txt | 135 +++ automerge-c/Cargo.toml | 19 + automerge-c/Makefile | 30 + automerge-c/README.md | 95 ++ automerge-c/automerge.c | 36 + automerge-c/build.rs | 25 + automerge-c/cbindgen.toml | 39 + automerge-c/cmake/automerge-c-config.cmake.in | 99 ++ automerge-c/cmake/config.h.in | 14 + automerge-c/img/brandmark.png | Bin 0 -> 1419 bytes automerge-c/src/CMakeLists.txt | 194 ++++ automerge-c/src/doc.rs | 33 + automerge-c/src/lib.rs | 869 ++++++++++++++++++ automerge-c/src/result.rs | 36 + automerge-c/src/utils.rs | 24 + automerge-c/test/CMakeLists.txt | 49 + automerge-c/test/amlistset_tests.c | 170 ++++ automerge-c/test/ammapset_tests.c | 97 ++ automerge-c/test/group_state.c | 18 + automerge-c/test/group_state.h | 15 + automerge-c/test/main.c | 31 + automerge/src/types.rs | 10 + deny.toml | 9 + scripts/ci/cmake-build | 18 + scripts/ci/run | 1 + 30 files changed, 2110 insertions(+), 2 deletions(-) create mode 100644 automerge-c/.gitignore create mode 100644 automerge-c/CMakeLists.txt create mode 100644 automerge-c/Cargo.toml create mode 100644 automerge-c/Makefile create mode 100644 automerge-c/README.md create mode 100644 automerge-c/automerge.c create mode 100644 automerge-c/build.rs create mode 100644 automerge-c/cbindgen.toml create mode 100644 automerge-c/cmake/automerge-c-config.cmake.in create mode 100644 automerge-c/cmake/config.h.in create mode 100644 automerge-c/img/brandmark.png create mode 100644 automerge-c/src/CMakeLists.txt create mode 100644 automerge-c/src/doc.rs create mode 100644 automerge-c/src/lib.rs create mode 100644 automerge-c/src/result.rs create mode 100644 automerge-c/src/utils.rs create mode 100644 automerge-c/test/CMakeLists.txt create mode 100644 automerge-c/test/amlistset_tests.c create mode 100644 automerge-c/test/ammapset_tests.c create mode 100644 automerge-c/test/group_state.c create mode 100644 automerge-c/test/group_state.h create mode 100644 automerge-c/test/main.c create mode 100755 scripts/ci/cmake-build diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 9a9753d0..d6bfc5d3 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -77,6 +77,25 @@ jobs: - name: run tests run: ./scripts/ci/js_tests + cmake_build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions-rs/toolchain@v1 + with: + profile: minimal + toolchain: stable + - uses: Swatinem/rust-cache@v1 + - name: Install CMocka + run: sudo apt-get install -y libcmocka-dev + - name: Install/update CMake + uses: jwlawson/actions-setup-cmake@v1.12 + with: + cmake-version: latest + - name: Build and test C bindings + run: ./scripts/ci/cmake-build Release Static + shell: bash + linux: runs-on: ubuntu-latest strategy: diff --git a/.gitignore b/.gitignore index 95d3d639..eca9df3f 100644 --- a/.gitignore +++ b/.gitignore @@ -2,3 +2,4 @@ /.direnv perf.* /Cargo.lock +build/ diff --git a/Cargo.toml b/Cargo.toml index e1941120..7eb899e8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,8 +1,9 @@ [workspace] members = [ "automerge", - "automerge-wasm", + "automerge-c", "automerge-cli", + "automerge-wasm", "edit-trace", ] diff --git a/README.md b/README.md index e2d84bda..16284141 100644 --- a/README.md +++ b/README.md @@ -67,7 +67,7 @@ To build and test the wasm library: $ yarn release ``` -And finally to test the js library. This is where most of the tests reside. +To test the js library. This is where most of the tests reside. ```shell ## setup @@ -79,6 +79,24 @@ And finally to test the js library. This is where most of the tests reside. $ yarn test ``` +And finally, to build and test the C bindings with CMake: + +```shell +## setup +$ cd automerge-c +$ mkdir -p build +$ cd build +$ cmake -S .. -DCMAKE_BUILD_TYPE=Release -DBUILD_SHARED_LIBS=OFF +## building and testing +$ cmake --build . +``` +To add debugging symbols, replace `Release` with `Debug`. +To build a shared library instead of a static one, replace `OFF` with `ON`. + +The C bindings can be built and tested on any platform for which CMake is +available but the steps for doing so vary across platforms and are too numerous +to list here. + ## Benchmarking The `edit-trace` folder has the main code for running the edit trace benchmarking. diff --git a/automerge-c/.gitignore b/automerge-c/.gitignore new file mode 100644 index 00000000..cb544af0 --- /dev/null +++ b/automerge-c/.gitignore @@ -0,0 +1,3 @@ +automerge +automerge.h +automerge.o diff --git a/automerge-c/CMakeLists.txt b/automerge-c/CMakeLists.txt new file mode 100644 index 00000000..188780f9 --- /dev/null +++ b/automerge-c/CMakeLists.txt @@ -0,0 +1,135 @@ +cmake_minimum_required(VERSION 3.18 FATAL_ERROR) + +set(CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/cmake") + +# Parse the library name, project name and project version out of Cargo's TOML file. +set(CARGO_LIB_SECTION OFF) + +set(LIBRARY_NAME "") + +set(CARGO_PKG_SECTION OFF) + +set(CARGO_PKG_NAME "") + +set(CARGO_PKG_VERSION "") + +file(READ Cargo.toml TOML_STRING) + +string(REPLACE ";" "\\\\;" TOML_STRING "${TOML_STRING}") + +string(REPLACE "\n" ";" TOML_LINES "${TOML_STRING}") + +foreach(TOML_LINE IN ITEMS ${TOML_LINES}) + string(REGEX MATCH "^\\[(lib|package)\\]$" _ ${TOML_LINE}) + + if(CMAKE_MATCH_1 STREQUAL "lib") + set(CARGO_LIB_SECTION ON) + + set(CARGO_PKG_SECTION OFF) + elseif(CMAKE_MATCH_1 STREQUAL "package") + set(CARGO_LIB_SECTION OFF) + + set(CARGO_PKG_SECTION ON) + endif() + + string(REGEX MATCH "^name += +\"([^\"]+)\"$" _ ${TOML_LINE}) + + if(CMAKE_MATCH_1 AND (CARGO_LIB_SECTION AND NOT CARGO_PKG_SECTION)) + set(LIBRARY_NAME "${CMAKE_MATCH_1}") + elseif(CMAKE_MATCH_1 AND (NOT CARGO_LIB_SECTION AND CARGO_PKG_SECTION)) + set(CARGO_PKG_NAME "${CMAKE_MATCH_1}") + endif() + + string(REGEX MATCH "^version += +\"([^\"]+)\"$" _ ${TOML_LINE}) + + if(CMAKE_MATCH_1 AND CARGO_PKG_SECTION) + set(CARGO_PKG_VERSION "${CMAKE_MATCH_1}") + endif() + + if(LIBRARY_NAME AND (CARGO_PKG_NAME AND CARGO_PKG_VERSION)) + break() + endif() +endforeach() + +project(${CARGO_PKG_NAME} VERSION ${CARGO_PKG_VERSION} LANGUAGES C DESCRIPTION "C bindings for the Automerge Rust backend.") + +include(CTest) + +option(BUILD_SHARED_LIBS "Enable the choice of a shared or static library.") + +include(CMakePackageConfigHelpers) + +include(GNUInstallDirs) + +string(MAKE_C_IDENTIFIER ${PROJECT_NAME} SYMBOL_PREFIX) + +string(TOUPPER ${SYMBOL_PREFIX} SYMBOL_PREFIX) + +set(CARGO_TARGET_DIR "${CMAKE_CURRENT_BINARY_DIR}/Cargo/target") + +add_subdirectory(src) + +# Generate and install the configuration header. +math(EXPR INTEGER_PROJECT_VERSION_MAJOR "${PROJECT_VERSION_MAJOR} * 100000") + +math(EXPR INTEGER_PROJECT_VERSION_MINOR "${PROJECT_VERSION_MINOR} * 100") + +math(EXPR INTEGER_PROJECT_VERSION_PATCH "${PROJECT_VERSION_PATCH}") + +math(EXPR INTEGER_PROJECT_VERSION "${INTEGER_PROJECT_VERSION_MAJOR} + ${INTEGER_PROJECT_VERSION_MINOR} + ${INTEGER_PROJECT_VERSION_PATCH}") + +configure_file( + ${CMAKE_MODULE_PATH}/config.h.in + config.h + @ONLY + NEWLINE_STYLE LF +) + +install( + FILES ${CMAKE_BINARY_DIR}/config.h + DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME} +) + +if(BUILD_TESTING) + add_subdirectory(test) + + enable_testing() +endif() + +# Generate and install .cmake files +set(PROJECT_CONFIG_NAME "${PROJECT_NAME}-config") + +set(PROJECT_CONFIG_VERSION_NAME "${PROJECT_CONFIG_NAME}-version") + +write_basic_package_version_file( + ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_CONFIG_VERSION_NAME}.cmake + VERSION ${PROJECT_VERSION} + COMPATIBILITY ExactVersion +) + +# The namespace label starts with the title-cased library name. +string(SUBSTRING ${LIBRARY_NAME} 0 1 NS_FIRST) + +string(SUBSTRING ${LIBRARY_NAME} 1 -1 NS_REST) + +string(TOUPPER ${NS_FIRST} NS_FIRST) + +string(TOLOWER ${NS_REST} NS_REST) + +string(CONCAT NAMESPACE ${NS_FIRST} ${NS_REST} "::") + +# \note CMake doesn't automate the exporting of an imported library's targets +# so the package configuration script must do it. +configure_package_config_file( + ${CMAKE_MODULE_PATH}/${PROJECT_CONFIG_NAME}.cmake.in + ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_CONFIG_NAME}.cmake + INSTALL_DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME} +) + +install( + FILES + ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_CONFIG_NAME}.cmake + ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_CONFIG_VERSION_NAME}.cmake + DESTINATION + ${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME} +) diff --git a/automerge-c/Cargo.toml b/automerge-c/Cargo.toml new file mode 100644 index 00000000..bcb07ce5 --- /dev/null +++ b/automerge-c/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "automerge-c" +version = "0.1.0" +authors = ["Orion Henry ", "Jason Kankiewicz "] +edition = "2021" +license = "MIT" + +[lib] +name = "automerge" +crate-type = ["cdylib", "staticlib"] +bench = false +doc = false + +[dependencies] +automerge = { path = "../automerge" } +libc = "^0.2" + +[build-dependencies] +cbindgen = "^0.20" diff --git a/automerge-c/Makefile b/automerge-c/Makefile new file mode 100644 index 00000000..a5ab353b --- /dev/null +++ b/automerge-c/Makefile @@ -0,0 +1,30 @@ + +CC=gcc +CFLAGS=-I. +DEPS=automerge.h +LIBS=-lpthread -ldl -lm +LDIR=../target/release +LIB=../target/release/libautomerge.a +DEBUG_LIB=../target/debug/libautomerge.a + +all: $(DEBUG_LIB) automerge + +debug: LDIR=../target/debug +debug: automerge $(DEBUG_LIB) + +automerge: automerge.o $(LDIR)/libautomerge.a + $(CC) -o $@ automerge.o $(LDIR)/libautomerge.a $(LIBS) -L$(LDIR) + +$(DEBUG_LIB): src/*.rs + cargo build + +$(LIB): src/*.rs + cargo build --release + +%.o: %.c $(DEPS) + $(CC) -c -o $@ $< $(CFLAGS) + +.PHONY: clean + +clean: + rm -f *.o automerge $(LIB) $(DEBUG_LIB) diff --git a/automerge-c/README.md b/automerge-c/README.md new file mode 100644 index 00000000..d500f330 --- /dev/null +++ b/automerge-c/README.md @@ -0,0 +1,95 @@ + +## Methods we need to support + +### Basic management + + 1. `AMcreate()` + 1. `AMclone(doc)` + 1. `AMfree(doc)` + 1. `AMconfig(doc, key, val)` // set actor + 1. `actor = get_actor(doc)` + +### Transactions + + 1. `AMpendingOps(doc)` + 1. `AMcommit(doc, message, time)` + 1. `AMrollback(doc)` + +### Write + + 1. `AMset{Map|List}(doc, obj, prop, value)` + 1. `AMinsert(doc, obj, index, value)` + 1. `AMpush(doc, obj, value)` + 1. `AMdel{Map|List}(doc, obj, prop)` + 1. `AMinc{Map|List}(doc, obj, prop, value)` + 1. `AMspliceText(doc, obj, start, num_del, text)` + +### Read + + 1. `AMkeys(doc, obj, heads)` + 1. `AMlength(doc, obj, heads)` + 1. `AMvalues(doc, obj, heads)` + 1. `AMtext(doc, obj, heads)` + +### Sync + + 1. `AMgenerateSyncMessage(doc, state)` + 1. `AMreceiveSyncMessage(doc, state, message)` + 1. `AMinitSyncState()` + +### Save / Load + + 1. `AMload(data)` + 1. `AMloadIncremental(doc, data)` + 1. `AMsave(doc)` + 1. `AMsaveIncremental(doc)` + +### Low Level Access + + 1. `AMapplyChanges(doc, changes)` + 1. `AMgetChanges(doc, deps)` + 1. `AMgetChangesAdded(doc1, doc2)` + 1. `AMgetHeads(doc)` + 1. `AMgetLastLocalChange(doc)` + 1. `AMgetMissingDeps(doc, heads)` + +### Encode/Decode + + 1. `AMencodeChange(change)` + 1. `AMdecodeChange(change)` + 1. `AMencodeSyncMessage(change)` + 1. `AMdecodeSyncMessage(change)` + 1. `AMencodeSyncState(change)` + 1. `AMdecodeSyncState(change)` + +## Open Question - Memory management + +Most of these calls return one or more items of arbitrary length. Doing memory management in C is tricky. This is my proposed solution... + +### + + ``` + // returns 1 or zero opids + n = automerge_set(doc, "_root", "hello", datatype, value); + if (n) { + automerge_pop(doc, &obj, len); + } + + // returns n values + n = automerge_values(doc, "_root", "hello"); + for (i = 0; i +#include +#include +#include +#include "automerge.h" + +#define MAX_BUFF_SIZE 4096 + +int main() { + int n = 0; + int data_type = 0; + char buff[MAX_BUFF_SIZE]; + char obj[MAX_BUFF_SIZE]; + AMresult* res = NULL; + + printf("begin\n"); + + AMdoc* doc = AMcreate(); + + printf("AMconfig()..."); + AMconfig(doc, "actor", "aabbcc"); + printf("pass!\n"); + + printf("AMmapSetStr()...\n"); + res = AMmapSetStr(doc, NULL, "string", "hello world"); + if (AMresultStatus(res) != AM_STATUS_COMMAND_OK) + { + printf("AMmapSet() failed: %s\n", AMerrorMessage(res)); + return 1; + } + AMclear(res); + printf("pass!\n"); + + AMdestroy(doc); + printf("end\n"); +} diff --git a/automerge-c/build.rs b/automerge-c/build.rs new file mode 100644 index 00000000..e953527f --- /dev/null +++ b/automerge-c/build.rs @@ -0,0 +1,25 @@ +extern crate cbindgen; + +use std::{env, path::PathBuf}; + +fn main() { + let crate_dir = PathBuf::from( + env::var("CARGO_MANIFEST_DIR").expect("CARGO_MANIFEST_DIR env var is not defined"), + ); + + let config = cbindgen::Config::from_file("cbindgen.toml") + .expect("Unable to find cbindgen.toml configuration file"); + + // let mut config: cbindgen::Config = Default::default(); + // config.language = cbindgen::Language::C; + + if let Ok(writer) = cbindgen::generate_with_config(&crate_dir, config) { + writer.write_to_file(crate_dir.join("automerge.h")); + + // Also write the generated header into the target directory when + // specified (necessary for an out-of-source build a la CMake). + if let Ok(target_dir) = env::var("CARGO_TARGET_DIR") { + writer.write_to_file(PathBuf::from(target_dir).join("automerge.h")); + } + } +} diff --git a/automerge-c/cbindgen.toml b/automerge-c/cbindgen.toml new file mode 100644 index 00000000..649d3204 --- /dev/null +++ b/automerge-c/cbindgen.toml @@ -0,0 +1,39 @@ +header = """ +/** \\file + * All constants, functions and types in the Automerge library's C API. + */ + """ +include_guard = "automerge_h" +autogen_warning = "/* Warning, this file is autogenerated by cbindgen. Don't modify this manually. */" +language = "C" +includes = [] +sys_includes = ["stddef.h", "stdint.h", "stdbool.h"] +no_includes = true +line_length = 140 +documentation = true +documentation_style = "doxy" + +after_includes = """\n +/** + * \\defgroup enumerations Public Enumerations + Symbolic names for integer constants. + */ + +/** + * \\memberof AMdoc + * \\def AM_ROOT + * \\brief The root object of an `AMdoc` struct. + */ +#define AM_ROOT NULL +""" +usize_is_size_t = true + +[enum] +rename_variants = "ScreamingSnakeCase" +enum_class = true +prefix_with_name = true +derive_const_casts = true +must_use = "MUST_USE_ENUM" + +[export] +item_types = ["enums", "structs", "opaque", "constants", "functions"] diff --git a/automerge-c/cmake/automerge-c-config.cmake.in b/automerge-c/cmake/automerge-c-config.cmake.in new file mode 100644 index 00000000..fd39aee6 --- /dev/null +++ b/automerge-c/cmake/automerge-c-config.cmake.in @@ -0,0 +1,99 @@ +@PACKAGE_INIT@ + +include(CMakeFindDependencyMacro) + +set(CMAKE_THREAD_PREFER_PTHREAD TRUE) + +set(THREADS_PREFER_PTHREAD_FLAG TRUE) + +find_dependency(Threads) + +find_library(@SYMBOL_PREFIX@_IMPLIB_DEBUG @LIBRARY_NAME@${CMAKE_DEBUG_POSTFIX} PATHS "${PACKAGE_PREFIX_DIR}/debug/${CMAKE_INSTALL_LIBDIR}" "${PACKAGE_PREFIX_DIR}/${CMAKE_INSTALL_LIBDIR}" NO_DEFAULT_PATH) + +find_library(@SYMBOL_PREFIX@_IMPLIB_RELEASE @LIBRARY_NAME@${CMAKE_RELEASE_POSTFIX} PATHS "${PACKAGE_PREFIX_DIR}/${CMAKE_INSTALL_LIBDIR}" NO_DEFAULT_PATH) + +find_file(@SYMBOL_PREFIX@_LOCATION_DEBUG "${CMAKE_SHARED_LIBRARY_PREFIX}@LIBRARY_NAME@${CMAKE_DEBUG_POSTFIX}${CMAKE_SHARED_LIBRARY_SUFFIX}" PATHS "${PACKAGE_PREFIX_DIR}/debug/${CMAKE_INSTALL_BINDIR}" "${PACKAGE_PREFIX_DIR}/${CMAKE_INSTALL_LIBDIR}" NO_DEFAULT_PATH) + +find_file(@SYMBOL_PREFIX@_LOCATION_RELEASE "${CMAKE_SHARED_LIBRARY_PREFIX}@LIBRARY_NAME@${CMAKE_RELEASE_POSTFIX}${CMAKE_SHARED_LIBRARY_SUFFIX}" PATHS "${PACKAGE_PREFIX_DIR}/${CMAKE_INSTALL_BINDIR}" NO_DEFAULT_PATH) + +if(@BUILD_SHARED_LIBS@) + set(@SYMBOL_PREFIX@_DEFINE_SYMBOL "@SYMBOL_PREFIX@_EXPORTS") + + if(WIN32) + set(@SYMBOL_PREFIX@_NO_SONAME_DEBUG "TRUE") + + set(@SYMBOL_PREFIX@_NO_SONAME_RELEASE "TRUE") + + set(@SYMBOL_PREFIX@_SONAME_DEBUG "") + + set(@SYMBOL_PREFIX@_SONAME_RELEASE "") + else() + set(@SYMBOL_PREFIX@_NO_SONAME_DEBUG "FALSE") + + set(@SYMBOL_PREFIX@_NO_SONAME_RELEASE "FALSE") + + get_filename_component(@SYMBOL_PREFIX@_SONAME_DEBUG "${@SYMBOL_PREFIX@_LOCATION_DEBUG}" NAME) + + get_filename_component(@SYMBOL_PREFIX@_SONAME_RELEASE "${@SYMBOL_PREFIX@_LOCATION_RELEASE}" NAME) + endif() + + set(@SYMBOL_PREFIX@_TYPE "SHARED") +else() + set(@SYMBOL_PREFIX@_DEFINE_SYMBOL "") + + set(@SYMBOL_PREFIX@_LOCATION_DEBUG "${@SYMBOL_PREFIX@_IMPLIB_DEBUG}") + + set(@SYMBOL_PREFIX@_IMPLIB_DEBUG "") + + set(@SYMBOL_PREFIX@_LOCATION_RELEASE "${@SYMBOL_PREFIX@_IMPLIB_RELEASE}") + + set(@SYMBOL_PREFIX@_IMPLIB_RELEASE "") + + set(@SYMBOL_PREFIX@_NO_SONAME_DEBUG "TRUE") + + set(@SYMBOL_PREFIX@_NO_SONAME_RELEASE "TRUE") + + set(@SYMBOL_PREFIX@_SONAME_DEBUG "") + + set(@SYMBOL_PREFIX@_SONAME_RELEASE "") + + set(@SYMBOL_PREFIX@_TYPE "STATIC") +endif() + +add_library(@NAMESPACE@@PROJECT_NAME@ ${@SYMBOL_PREFIX@_TYPE} IMPORTED) + +set_target_properties( + @NAMESPACE@@PROJECT_NAME@ + PROPERTIES + # \note Cargo writes a debug build into a nested directory instead of + # decorating its name. + DEBUG_POSTFIX "" + DEFINE_SYMBOL "${@SYMBOL_PREFIX@_DEFINE_SYMBOL}" + IMPORTED_CONFIGURATIONS "RELEASE;DEBUG" + IMPORTED_IMPLIB_DEBUG "${@SYMBOL_PREFIX@_IMPLIB_DEBUG}" + IMPORTED_IMPLIB_RELEASE "${@SYMBOL_PREFIX@_IMPLIB_RELEASE}" + IMPORTED_LOCATION_DEBUG "${@SYMBOL_PREFIX@_LOCATION_DEBUG}" + IMPORTED_LOCATION_RELEASE "${@SYMBOL_PREFIX@_LOCATION_RELEASE}" + IMPORTED_NO_SONAME_DEBUG "${@SYMBOL_PREFIX@_NO_SONAME_DEBUG}" + IMPORTED_NO_SONAME_RELEASE "${@SYMBOL_PREFIX@_NO_SONAME_RELEASE}" + IMPORTED_SONAME_DEBUG "${@SYMBOL_PREFIX@_SONAME_DEBUG}" + IMPORTED_SONAME_RELEASE "${@SYMBOL_PREFIX@_SONAME_RELEASE}" + INTERFACE_INCLUDE_DIRECTORIES "${PACKAGE_PREFIX_DIR}/${CMAKE_INSTALL_INCLUDEDIR}" + LINKER_LANGUAGE C + PUBLIC_HEADER "${PACKAGE_PREFIX_DIR}/${CMAKE_INSTALL_INCLUDEDIR}/@PROJECT_NAME@/@LIBRARY_NAME@.h" + SOVERSION "@PROJECT_VERSION_MAJOR@" + VERSION "@PROJECT_VERSION@" + # \note Cargo exports all of the symbols automatically. + WINDOWS_EXPORT_ALL_SYMBOLS "TRUE" +) + +# Remove the variables that the find_* command calls cached. +unset(@SYMBOL_PREFIX@_IMPLIB_DEBUG CACHE) + +unset(@SYMBOL_PREFIX@_IMPLIB_RELEASE CACHE) + +unset(@SYMBOL_PREFIX@_LOCATION_DEBUG CACHE) + +unset(@SYMBOL_PREFIX@_LOCATION_RELEASE CACHE) + +check_required_components(@PROJECT_NAME@) diff --git a/automerge-c/cmake/config.h.in b/automerge-c/cmake/config.h.in new file mode 100644 index 00000000..08643fc5 --- /dev/null +++ b/automerge-c/cmake/config.h.in @@ -0,0 +1,14 @@ +#ifndef @SYMBOL_PREFIX@_CONFIG_INCLUDED +#define @SYMBOL_PREFIX@_CONFIG_INCLUDED + +/* This header is auto-generated by CMake. */ + +#define @SYMBOL_PREFIX@_VERSION @INTEGER_PROJECT_VERSION@ + +#define @SYMBOL_PREFIX@_MAJOR_VERSION (@SYMBOL_PREFIX@_VERSION / 100000) + +#define @SYMBOL_PREFIX@_MINOR_VERSION ((@SYMBOL_PREFIX@_VERSION / 100) % 1000) + +#define @SYMBOL_PREFIX@_PATCH_VERSION (@SYMBOL_PREFIX@_VERSION % 100) + +#endif /* @SYMBOL_PREFIX@_CONFIG_INCLUDED */ diff --git a/automerge-c/img/brandmark.png b/automerge-c/img/brandmark.png new file mode 100644 index 0000000000000000000000000000000000000000..56e1c82d0d10e3317dd08c4c9901e4c4cd36b1f1 GIT binary patch literal 1419 zcmV;61$6p}P)`btzv-C5LJ#%AhX1l`U*kCPXtLd0!`?fGgy-F`|yP^#A|>4s=pZQvl$w zkU+p-pb*gTP#}N+aIC}t000SaNLh0L02U5`%_ z5CC9ycFsM(6Vy@P|1q~9tU{6weT8)YkwT=tA((VJGL>$;zco$s@~V7R&G=|s{4_6Y znhyh-SUiPRjZ>jb<6LOb=!Cm#U2nRbN#iKo*M00Q8fQY2Mo(za7zoW7r^3zg*x57A zgxjWR4%Uo;(3~+MG-iwnO&I~9AtNX>V@wE*7?VO1#s#4Px*j{k)z(Vo<;{Am%)e>p6hv>)-xi7yNyTu2Af*jh|O&-wI6U(>l(4bLm*5w zwWbj(y!*{Od;o50JtI1UZ6m_Ez9c@OWJC*ZQDI%5a~sjZEg(!bwT2NtPuWAnWK+wH zc;OKg!l|Q%5ibOrdH5(WOHeT4g|qA-3}}`hHWKE`T~fHv)O;gBI874cO`WCwNDv0u zLm1#JL13g*MZ)YM3~-jfHd2Isk{FMLlaP%RA)=Y<<))?@Nwu$d_7FxQb)*|fLMVF( zBau2%jkNk z%7u+T7P5%2Q9-z?Rfv#LK^UgCOk&)qAq?jSTw=_qA`Iu!Y~tM5KMU}#BE0)T0da18 zt*Rfx##JVaO!o!E(de?wm+I5)9QLKEL<GLO#1oE41yn?I zjpDYeKkij{6(h;5oVXc@o46!o8B>LBGI3eRFm6*7P%054BpcH!E<_?u zNHp%?r9V0mE2J4077Iv3ypUq_yBojNDMX48Z}dmzgP^8E6Jm{ihjb(O(j3 z1(h+n5M}f(Vt;-vkt3uVho3+&i98|I=nmi!xk94Rz0D%>g(TylVG|`nf^igbh%zDC z`1t7qohTI|jjpJ{BFcp*<76k5s39cgW<;WnkY=P2wS*)iiKr)}7%4>I(5j zJW*SSHlm68LaY%>%ppV?k;FVgobhm+M|=?{mr$V|o?WPA%qi3{<`rrfa|`9h{6eX5 z4WZ1qj!>WkLaMQZkY+3+q!>#H@y2pOtg)mJXDlnk z7)uM6jpc<)Mhn8U(S|T(v?7F!c7%}8k}z(xC5#!Z34P=FbT%jSjAPgm#*Eg4q0yeu zH(C@vjJy8#MOTx;hp}n?|Emad!imwW&@-A8&WuKdRrTqf1u!h^pOWgI`}q2;cfzWw Z{sEp`E%^as(r*9&002ovPDHLkV1h|7iLd|w literal 0 HcmV?d00001 diff --git a/automerge-c/src/CMakeLists.txt b/automerge-c/src/CMakeLists.txt new file mode 100644 index 00000000..78f4e976 --- /dev/null +++ b/automerge-c/src/CMakeLists.txt @@ -0,0 +1,194 @@ +cmake_minimum_required(VERSION 3.18 FATAL_ERROR) + +find_program ( + CARGO_CMD + "cargo" + PATHS "$ENV{CARGO_HOME}/bin" + DOC "The Cargo command" +) + +if(NOT CARGO_CMD) + message(FATAL_ERROR "Cargo (Rust package manager) not found! Install it and/or set the CARGO_HOME environment variable.") +endif() + +string(TOLOWER "${CMAKE_BUILD_TYPE}" BUILD_TYPE_LOWER) + +if(BUILD_TYPE_LOWER STREQUAL debug) + set(CARGO_BUILD_TYPE "debug") + + set(CARGO_FLAG "") +else() + set(CARGO_BUILD_TYPE "release") + + set(CARGO_FLAG "--release") +endif() + +set(CARGO_CURRENT_BINARY_DIR "${CARGO_TARGET_DIR}/${CARGO_BUILD_TYPE}") + +set( + CARGO_OUTPUT + # \note cbindgen won't regenerate a missing header so it can't be cleaned. + #${CARGO_TARGET_DIR}/${LIBRARY_NAME}.h + ${CARGO_CURRENT_BINARY_DIR}/${CMAKE_SHARED_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_SHARED_LIBRARY_SUFFIX} + ${CARGO_CURRENT_BINARY_DIR}/${CMAKE_STATIC_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_STATIC_LIBRARY_SUFFIX} +) + +if(WIN32) + # \note The basename of an import library output by Cargo is the filename + # of its corresponding shared library. + list(APPEND CARGO_OUTPUT ${CARGO_CURRENT_BINARY_DIR}/${CMAKE_SHARED_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_SHARED_LIBRARY_SUFFIX}${CMAKE_STATIC_LIBRARY_SUFFIX}) +endif() + +add_custom_command( + OUTPUT ${CARGO_OUTPUT} + COMMAND + ${CMAKE_COMMAND} -E env CARGO_TARGET_DIR=${CARGO_TARGET_DIR} ${CARGO_CMD} build ${CARGO_FLAG} + MAIN_DEPENDENCY + lib.rs + DEPENDS + ${CMAKE_SOURCE_DIR}/build.rs + ${CMAKE_SOURCE_DIR}/Cargo.toml + ${CMAKE_SOURCE_DIR}/cbindgen.toml + WORKING_DIRECTORY + ${CMAKE_SOURCE_DIR} + COMMENT + "Producing the library artifacts with Cargo..." + VERBATIM +) + +# \note This target is only necessary because cbindgen won't allow the +# generated header to be listed in the Cargo command's output, being +# another target's source file would've been enough otherwise. +add_custom_target( + ${LIBRARY_NAME}_artifacts + DEPENDS ${CARGO_OUTPUT} +) + +if(BUILD_SHARED_LIBS) + if(WIN32) + set(LIBRARY_DESTINATION "${CMAKE_INSTALL_BINDIR}") + else() + set(LIBRARY_DESTINATION "${CMAKE_INSTALL_LIBDIR}") + endif() + + set(LIBRARY_DEFINE_SYMBOL "${SYMBOL_PREFIX}_EXPORTS") + + # \note The basename of an import library output by Cargo is the filename + # of its corresponding shared library. + set(LIBRARY_IMPLIB "${CARGO_CURRENT_BINARY_DIR}/${CMAKE_SHARED_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_SHARED_LIBRARY_SUFFIX}${CMAKE_STATIC_LIBRARY_SUFFIX}") + + set(LIBRARY_LOCATION "${CARGO_CURRENT_BINARY_DIR}/${CMAKE_SHARED_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_SHARED_LIBRARY_SUFFIX}") + + set(LIBRARY_NO_SONAME "${WIN32}") + + set(LIBRARY_SONAME "${CMAKE_SHARED_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_${CMAKE_BUILD_TYPE}_POSTFIX}${CMAKE_SHARED_LIBRARY_SUFFIX}") + + set(LIBRARY_TYPE "SHARED") +else() + set(LIBRARY_DEFINE_SYMBOL "") + + set(LIBRARY_DESTINATION "${CMAKE_INSTALL_LIBDIR}") + + set(LIBRARY_IMPLIB "") + + set(LIBRARY_LOCATION "${CARGO_CURRENT_BINARY_DIR}/${CMAKE_STATIC_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_STATIC_LIBRARY_SUFFIX}") + + set(LIBRARY_NO_SONAME "TRUE") + + set(LIBRARY_SONAME "") + + set(LIBRARY_TYPE "STATIC") +endif() + +add_library(${LIBRARY_NAME} ${LIBRARY_TYPE} IMPORTED GLOBAL) + +add_dependencies(${LIBRARY_NAME} ${LIBRARY_NAME}_artifacts) + +set_target_properties( + ${LIBRARY_NAME} + PROPERTIES + # \note Cargo writes a debug build into a nested directory instead of + # decorating its name. + DEBUG_POSTFIX "" + DEFINE_SYMBOL "${LIBRARY_DEFINE_SYMBOL}" + IMPORTED_IMPLIB "${LIBRARY_IMPLIB}" + IMPORTED_LOCATION "${LIBRARY_LOCATION}" + IMPORTED_NO_SONAME "${LIBRARY_NO_SONAME}" + IMPORTED_SONAME "${LIBRARY_SONAME}" + LINKER_LANGUAGE C + PUBLIC_HEADER "${CARGO_TARGET_DIR}/${LIBRARY_NAME}.h" + SOVERSION "${PROJECT_VERSION_MAJOR}" + VERSION "${PROJECT_VERSION}" + # \note Cargo exports all of the symbols automatically. + WINDOWS_EXPORT_ALL_SYMBOLS "TRUE" +) + +target_compile_definitions(${LIBRARY_NAME} INTERFACE $) + +target_include_directories( + ${LIBRARY_NAME} + INTERFACE + "$" +) + +set(CMAKE_THREAD_PREFER_PTHREAD TRUE) + +set(THREADS_PREFER_PTHREAD_FLAG TRUE) + +find_package(Threads REQUIRED) + +set(LIBRARY_DEPENDENCIES Threads::Threads ${CMAKE_DL_LIBS}) + +if(WIN32) + list(APPEND LIBRARY_DEPENDENCIES Bcrypt userenv ws2_32) +else() + list(APPEND LIBRARY_DEPENDENCIES m) +endif() + +target_link_libraries(${LIBRARY_NAME} INTERFACE ${LIBRARY_DEPENDENCIES}) + +install( + FILES $ + TYPE LIB + # \note The basename of an import library output by Cargo is the filename + # of its corresponding shared library. + RENAME "${CMAKE_STATIC_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_${CMAKE_BUILD_TYPE}_POSTFIX}${CMAKE_STATIC_LIBRARY_SUFFIX}" + OPTIONAL +) + +set(LIBRARY_FILE_NAME "${CMAKE_${LIBRARY_TYPE}_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_${CMAKE_BUILD_TYPE}_POSTFIX}${CMAKE_${LIBRARY_TYPE}_LIBRARY_SUFFIX}") + +install( + FILES $ + RENAME "${LIBRARY_FILE_NAME}" + DESTINATION ${LIBRARY_DESTINATION} +) + +install( + FILES $ + DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME} +) + +find_package(Doxygen OPTIONAL_COMPONENTS dot) + +if(DOXYGEN_FOUND) + set(DOXYGEN_GENERATE_LATEX YES) + + set(DOXYGEN_PDF_HYPERLINKS YES) + + set(DOXYGEN_PROJECT_LOGO "${CMAKE_SOURCE_DIR}/img/brandmark.png") + + set(DOXYGEN_SORT_BRIEF_DOCS YES) + + set(DOXYGEN_USE_MDFILE_AS_MAINPAGE "${CMAKE_SOURCE_DIR}/README.md") + + doxygen_add_docs( + ${LIBRARY_NAME}_docs + "${CARGO_TARGET_DIR}/${LIBRARY_NAME}.h" + "${CMAKE_SOURCE_DIR}/README.md" + WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} + COMMENT "Producing documentation with Doxygen..." + ) + + add_dependencies(${LIBRARY_NAME}_docs ${LIBRARY_NAME}_artifacts) +endif() diff --git a/automerge-c/src/doc.rs b/automerge-c/src/doc.rs new file mode 100644 index 00000000..b8432271 --- /dev/null +++ b/automerge-c/src/doc.rs @@ -0,0 +1,33 @@ +use automerge as am; +use std::ops::{Deref, DerefMut}; + +/// \struct AMdoc +/// \brief A JSON-like CRDT. +#[derive(Clone)] +pub struct AMdoc(am::AutoCommit); + +impl AMdoc { + pub fn create(handle: am::AutoCommit) -> AMdoc { + AMdoc(handle) + } +} + +impl Deref for AMdoc { + type Target = am::AutoCommit; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl DerefMut for AMdoc { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} + +impl From for *mut AMdoc { + fn from(b: AMdoc) -> Self { + Box::into_raw(Box::new(b)) + } +} diff --git a/automerge-c/src/lib.rs b/automerge-c/src/lib.rs new file mode 100644 index 00000000..a8fde724 --- /dev/null +++ b/automerge-c/src/lib.rs @@ -0,0 +1,869 @@ +use automerge as am; +use std::{ffi::CStr, os::raw::c_char}; + +mod doc; +mod result; +mod utils; + +use automerge::transaction::Transactable; +use doc::AMdoc; +use result::AMresult; + +/// \ingroup enumerations +/// \enum AmObjType +/// \brief The type of an object value. +#[repr(u8)] +pub enum AmObjType { + /// A list. + List = 1, + /// A key-value map. + Map, + /// A list of Unicode graphemes. + Text, +} + +impl From for am::ObjType { + fn from(o: AmObjType) -> Self { + match o { + AmObjType::Map => am::ObjType::Map, + AmObjType::List => am::ObjType::List, + AmObjType::Text => am::ObjType::Text, + } + } +} + +/// \ingroup enumerations +/// \enum AmStatus +/// \brief The status of an API call. +#[derive(Debug)] +#[repr(u8)] +pub enum AmStatus { + /// The result is one or more changes. + ChangesOk = 1, + /// The command was successful. + CommandOk, + /// The result was an error. + Error, + /// The result is invalid. + InvalidResult, + /// The result is an object ID. + ObjOk, + /// The result is one or more values. + ValuesOk, +} + +unsafe fn to_str(c: *const c_char) -> String { + CStr::from_ptr(c).to_string_lossy().to_string() +} + +macro_rules! to_doc { + ($handle:expr) => {{ + let handle = $handle.as_mut(); + match handle { + Some(b) => b, + None => return AMresult::err("Invalid AMdoc pointer").into(), + } + }}; +} + +macro_rules! to_obj { + ($handle:expr) => {{ + match $handle.as_ref() { + Some(b) => b, + None => &AMobj(am::ObjId::Root), + } + }}; +} + +fn to_result>(r: R) -> *mut AMresult { + (r.into()).into() +} + +/// \struct AMobj +/// \brief An object's unique identifier. +#[derive(Clone)] +pub struct AMobj(am::ObjId); + +impl AsRef for AMobj { + fn as_ref(&self) -> &am::ObjId { + &self.0 + } +} + +/// \memberof AMdoc +/// \brief Allocates a new `AMdoc` struct and initializes it with defaults. +/// +/// \return A pointer to an `AMdoc` struct. +/// \warning To avoid a memory leak, the returned pointer must be deallocated +/// with `AMdestroy()`. +#[no_mangle] +pub extern "C" fn AMcreate() -> *mut AMdoc { + AMdoc::create(am::AutoCommit::new()).into() +} + +/// \memberof AMdoc +/// \brief Deallocates the storage for an `AMdoc` struct previously +/// allocated by `AMcreate()` or `AMdup()`. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \pre \p doc must be a valid address. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +#[no_mangle] +pub unsafe extern "C" fn AMdestroy(doc: *mut AMdoc) { + if !doc.is_null() { + let doc: AMdoc = *Box::from_raw(doc); + drop(doc) + } +} + +/// \memberof AMdoc +/// \brief Allocates storage for an `AMdoc` struct and initializes it by +/// duplicating the `AMdoc` struct pointed to by \p doc. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \return A pointer to an `AMdoc` struct. +/// \pre \p doc must be a valid address. +/// \warning To avoid a memory leak, the returned pointer must be deallocated +/// with `AMdestroy()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +#[no_mangle] +pub unsafe extern "C" fn AMdup(doc: *mut AMdoc) -> *mut AMdoc { + let doc = *Box::from_raw(doc); + let copy = doc.clone(); + std::mem::forget(doc); + copy.into() +} + +/// \memberof AMdoc +/// \brief Set a configuration property of an `AMdoc` struct. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] key A configuration property's UTF-8 string key. +/// \param[in] value A configuration property's UTF-8 string value or `NULL`. +/// \return A pointer to an `AMresult` struct containing no value. +/// \pre \p doc must be a valid address. +/// \pre \p key must be a valid address. +/// \warning To avoid a memory leak, the returned pointer must be deallocated +/// with `AMclear()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// key and value must be valid c strings +#[no_mangle] +pub unsafe extern "C" fn AMconfig( + doc: *mut AMdoc, + key: *const c_char, + value: *const c_char, +) -> *mut AMresult { + let doc = to_doc!(doc); + let key = to_str(key); + match key.as_str() { + "actor" => { + let actor = to_str(value); + if let Ok(actor) = actor.try_into() { + doc.set_actor(actor); + AMresult::Ok.into() + } else { + AMresult::err(&format!("Invalid actor '{}'", to_str(value))).into() + } + } + k => AMresult::err(&format!("Invalid config key '{}'", k)).into(), + } +} + +/// \memberof AMdoc +/// \brief Get an `AMdoc` struct's actor ID value as a hexadecimal string. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \return A pointer to an `AMresult` struct containing a UTF-8 string value. +/// \pre \p doc must be a valid address. +/// \warning To avoid a memory leak, the returned pointer must be deallocated +/// with `AMclear()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +#[no_mangle] +pub unsafe extern "C" fn AMgetActor(_doc: *mut AMdoc) -> *mut AMresult { + unimplemented!() +} + +/// \memberof AMresult +/// \brief Get the status code of an `AMresult` struct. +/// +/// \param[in] result A pointer to an `AMresult` struct. +/// \return An `AmStatus` enum tag. +/// \pre \p result must be a valid address. +/// \internal +/// +/// # Safety +/// result must be a pointer to a valid AMresult +#[no_mangle] +pub unsafe extern "C" fn AMresultStatus(result: *mut AMresult) -> AmStatus { + match result.as_mut() { + Some(AMresult::Ok) => AmStatus::CommandOk, + Some(AMresult::Error(_)) => AmStatus::Error, + Some(AMresult::ObjId(_)) => AmStatus::ObjOk, + Some(AMresult::Values(_)) => AmStatus::ValuesOk, + Some(AMresult::Changes(_)) => AmStatus::ChangesOk, + None => AmStatus::InvalidResult, + } +} + +/// \memberof AMdoc +/// \brief Set a map object's key to a signed integer value. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj A pointer to an `AMobj` struct or `NULL`. +/// \param[in] key A UTF-8 string key for the map object identified by \p obj. +/// \param[in] value A 64-bit signed integer. +/// \return A pointer to an `AMresult` struct containing no value. +/// \pre \p doc must be a valid address. +/// \pre \p key must be a valid address. +/// \warning To avoid a memory leak, the returned pointer must be deallocated +/// with `AMclear()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj must be a pointer to a valid AMobj or NULL +/// key must be a c string of the map key to be used +#[no_mangle] +pub unsafe extern "C" fn AMmapSetInt( + doc: *mut AMdoc, + obj: *mut AMobj, + key: *const c_char, + value: i64, +) -> *mut AMresult { + let doc = to_doc!(doc); + to_result(doc.put(to_obj!(obj), to_str(key), value)) +} + +/// \memberof AMdoc +/// \brief Set a map object's key to an unsigned integer value. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj A pointer to an `AMobj` struct or `NULL`. +/// \param[in] key A UTF-8 string key for the map object identified by \p obj. +/// \param[in] value A 64-bit unsigned integer. +/// \return A pointer to an `AMresult` struct containing no value. +/// \pre \p doc must be a valid address. +/// \pre \p key must be a valid address. +/// \warning To avoid a memory leak, the returned pointer must be deallocated +/// with `AMclear()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj must be a pointer to a valid AMobj or NULL +/// key must be a c string of the map key to be used +#[no_mangle] +pub unsafe extern "C" fn AMmapSetUint( + doc: *mut AMdoc, + obj: *mut AMobj, + key: *const c_char, + value: u64, +) -> *mut AMresult { + let doc = to_doc!(doc); + to_result(doc.put(to_obj!(obj), to_str(key), value)) +} + +/// \memberof AMdoc +/// \brief Set a map object's key to a UTF-8 string value. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj A pointer to an `AMobj` struct or `NULL`. +/// \param[in] key A UTF-8 string key for the map object identified by \p obj. +/// \param[in] value A UTF-8 string. +/// \return A pointer to an `AMresult` struct containing no value. +/// \pre \p doc must be a valid address. +/// \pre \p key must be a valid address. +/// \pre \p value must be a valid address. +/// \warning To avoid a memory leak, the returned pointer must be deallocated +/// with `AMclear()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj must be a pointer to a valid AMobj or NULL +/// key must be a c string of the map key to be used +#[no_mangle] +pub unsafe extern "C" fn AMmapSetStr( + doc: *mut AMdoc, + obj: *mut AMobj, + key: *const c_char, + value: *const c_char, +) -> *mut AMresult { + let doc = to_doc!(doc); + to_result(doc.put(to_obj!(obj), to_str(key), to_str(value))) +} + +/// \memberof AMdoc +/// \brief Set a map object's key to a byte array value. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj A pointer to an `AMobj` struct or `NULL`. +/// \param[in] key A UTF-8 string key for the map object identified by \p obj. +/// \param[in] value A pointer to an array of bytes. +/// \param[in] count The number of bytes to copy from \p value. +/// \return A pointer to an `AMresult` struct containing no value. +/// \pre \p doc must be a valid address. +/// \pre \p key must be a valid address. +/// \pre \p value must be a valid address. +/// \pre `0 <=` \p count `<=` length of \p value. +/// \warning To avoid a memory leak, the returned pointer must be deallocated +/// with `AMclear()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj must be a pointer to a valid AMobj or NULL +/// value must be a byte array of length `count` +/// key must be a c string of the map key to be used +#[no_mangle] +pub unsafe extern "C" fn AMmapSetBytes( + doc: *mut AMdoc, + obj: *mut AMobj, + key: *const c_char, + value: *const u8, + count: usize, +) -> *mut AMresult { + let doc = to_doc!(doc); + let slice = std::slice::from_raw_parts(value, count); + let mut vec = Vec::new(); + vec.extend_from_slice(slice); + to_result(doc.put(to_obj!(obj), to_str(key), vec)) +} + +/// \memberof AMdoc +/// \brief Set a map object's key to a float value. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj A pointer to an `AMobj` struct or `NULL`. +/// \param[in] key A UTF-8 string key for the map object identified by \p obj. +/// \param[in] value A 64-bit float. +/// \return A pointer to an `AMresult` struct containing no value. +/// \pre \p doc must be a valid address. +/// \pre \p key must be a valid address. +/// \warning To avoid a memory leak, the returned pointer must be deallocated +/// with `AMclear()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj must be a pointer to a valid AMobj or NULL +/// key must be a c string of the map key to be used +#[no_mangle] +pub unsafe extern "C" fn AMmapSetF64( + doc: *mut AMdoc, + obj: *mut AMobj, + key: *const c_char, + value: f64, +) -> *mut AMresult { + let doc = to_doc!(doc); + to_result(doc.put(to_obj!(obj), to_str(key), value)) +} + +/// \memberof AMdoc +/// \brief Set a map object's key to a CRDT counter value. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj A pointer to an `AMobj` struct or `NULL`. +/// \param[in] key A UTF-8 string key for the map object identified by \p obj. +/// \param[in] value A 64-bit signed integer. +/// \return A pointer to an `AMresult` struct containing no value. +/// \pre \p doc must be a valid address. +/// \pre \p key must be a valid address. +/// \warning To avoid a memory leak, the returned pointer must be deallocated +/// with `AMclear()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj must be a pointer to a valid AMobj or NULL +/// key must be a c string of the map key to be used +#[no_mangle] +pub unsafe extern "C" fn AMmapSetCounter( + doc: *mut AMdoc, + obj: *mut AMobj, + key: *const c_char, + value: i64, +) -> *mut AMresult { + let doc = to_doc!(doc); + to_result(doc.put( + to_obj!(obj), + to_str(key), + am::ScalarValue::Counter(value.into()), + )) +} + +/// \memberof AMdoc +/// \brief Set a map object's key to a Lamport timestamp value. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj A pointer to an `AMobj` struct or `NULL`. +/// \param[in] key A UTF-8 string key for the map object identified by \p obj. +/// \param[in] value A 64-bit signed integer. +/// \return A pointer to an `AMresult` struct containing no value. +/// \pre \p doc must be a valid address. +/// \pre \p key must be a valid address. +/// \warning To avoid a memory leak, the returned pointer must be deallocated +/// with `AMclear()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj must be a pointer to a valid AMobj or NULL +/// key must be a c string of the map key to be used +#[no_mangle] +pub unsafe extern "C" fn AMmapSetTimestamp( + doc: *mut AMdoc, + obj: *mut AMobj, + key: *const c_char, + value: i64, +) -> *mut AMresult { + let doc = to_doc!(doc); + to_result(doc.put(to_obj!(obj), to_str(key), am::ScalarValue::Timestamp(value))) +} + +/// \memberof AMdoc +/// \brief Set a map object's key to a null value. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj A pointer to an `AMobj` struct or `NULL`. +/// \param[in] key A UTF-8 string key for the map object identified by \p obj. +/// \return A pointer to an `AMresult` struct containing no value. +/// \pre \p doc must be a valid address. +/// \pre \p key must be a valid address. +/// \warning To avoid a memory leak, the returned pointer must be deallocated +/// with `AMclear()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj must be a pointer to a valid AMobj or NULL +/// key must be a c string of the map key to be used +#[no_mangle] +pub unsafe extern "C" fn AMmapSetNull( + doc: *mut AMdoc, + obj: *mut AMobj, + key: *const c_char, +) -> *mut AMresult { + let doc = to_doc!(doc); + to_result(doc.put(to_obj!(obj), to_str(key), ())) +} + +/// \memberof AMdoc +/// \brief Set a map object's key to an empty object value. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj A pointer to an `AMobj` struct or `NULL`. +/// \param[in] key A UTF-8 string key for the map object identified by \p obj. +/// \param[in] obj_type An `AmObjType` enum tag. +/// \return A pointer to an `AMresult` struct containing a pointer to an `AMobj` struct. +/// \pre \p doc must be a valid address. +/// \pre \p key must be a valid address. +/// \warning To avoid a memory leak, the returned pointer must be deallocated +/// with `AMclear()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj must be a pointer to a valid AMobj or NULL +/// key must be a c string of the map key to be used +#[no_mangle] +pub unsafe extern "C" fn AMmapSetObject( + doc: *mut AMdoc, + obj: *mut AMobj, + key: *const c_char, + obj_type: AmObjType, +) -> *mut AMresult { + let doc = to_doc!(doc); + to_result(doc.put_object(to_obj!(obj), to_str(key), obj_type.into())) +} + +/// \memberof AMdoc +/// \brief Set a list object's index to a byte array value. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj A pointer to an `AMobj` struct or `NULL`. +/// \param[in] index An index within the list object identified by \p obj. +/// \param[in] insert A flag to insert \p value before \p index instead of writing \p value over \p index. +/// \param[in] value A pointer to an array of bytes. +/// \param[in] count The number of bytes to copy from \p value. +/// \return A pointer to an `AMresult` struct containing no value. +/// \pre \p doc must be a valid address. +/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj. +/// \pre \p value must be a valid address. +/// \pre `0 <=` \p count `<=` length of \p value. +/// \warning To avoid a memory leak, the returned pointer must be deallocated +/// with `AMclear()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj must be a pointer to a valid AMobj or NULL +/// value must be a byte array of length `count` +/// key must be a c string of the map key to be used +#[no_mangle] +pub unsafe extern "C" fn AMlistSetBytes( + doc: *mut AMdoc, + obj: *mut AMobj, + index: usize, + insert: bool, + value: *const u8, + count: usize, +) -> *mut AMresult { + let doc = to_doc!(doc); + let obj = to_obj!(obj); + let slice = std::slice::from_raw_parts(value, count); + let mut vec = Vec::new(); + vec.extend_from_slice(slice); + to_result(if insert { + doc.insert(obj, index, vec) + } else { + doc.put(obj, index, vec) + }) +} + +/// \memberof AMdoc +/// \brief Set a list object's index to a CRDT counter value. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj A pointer to an `AMobj` struct or `NULL`. +/// \param[in] index An index within the list object identified by \p obj. +/// \param[in] insert A flag to insert \p value before \p index instead of writing \p value over \p index. +/// \param[in] value A 64-bit signed integer. +/// \return A pointer to an `AMresult` struct containing no value. +/// \pre \p doc must be a valid address. +/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj. +/// \warning To avoid a memory leak, the returned pointer must be deallocated +/// with `AMclear()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj must be a pointer to a valid AMobj or NULL +#[no_mangle] +pub unsafe extern "C" fn AMlistSetCounter( + doc: *mut AMdoc, + obj: *mut AMobj, + index: usize, + insert: bool, + value: i64, +) -> *mut AMresult { + let doc = to_doc!(doc); + let obj = to_obj!(obj); + let value = am::ScalarValue::Counter(value.into()); + to_result(if insert { + doc.insert(obj, index, value) + } else { + doc.put(obj, index, value) + }) +} + +/// \memberof AMdoc +/// \brief Set a list object's index to a float value. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj A pointer to an `AMobj` struct or `NULL`. +/// \param[in] index An index within the list object identified by \p obj. +/// \param[in] insert A flag to insert \p value before \p index instead of writing \p value over \p index. +/// \param[in] value A 64-bit float. +/// \return A pointer to an `AMresult` struct containing no value. +/// \pre \p doc must be a valid address. +/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj. +/// \warning To avoid a memory leak, the returned pointer must be deallocated +/// with `AMclear()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj must be a pointer to a valid AMobj or NULL +#[no_mangle] +pub unsafe extern "C" fn AMlistSetF64( + doc: *mut AMdoc, + obj: *mut AMobj, + index: usize, + insert: bool, + value: f64, +) -> *mut AMresult { + let doc = to_doc!(doc); + let obj = to_obj!(obj); + to_result(if insert { + doc.insert(obj, index, value) + } else { + doc.put(obj, index, value) + }) +} + +/// \memberof AMdoc +/// \brief Set a list object's index to a signed integer value. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj A pointer to an `AMobj` struct or `NULL`. +/// \param[in] index An index within the list object identified by \p obj. +/// \param[in] insert A flag to insert \p value before \p index instead of writing \p value over \p index. +/// \param[in] value A 64-bit signed integer. +/// \return A pointer to an `AMresult` struct containing no value. +/// \pre \p doc must be a valid address. +/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj. +/// \warning To avoid a memory leak, the returned pointer must be deallocated +/// with `AMclear()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj must be a pointer to a valid AMobj or NULL +#[no_mangle] +pub unsafe extern "C" fn AMlistSetInt( + doc: *mut AMdoc, + obj: *mut AMobj, + index: usize, + insert: bool, + value: i64, +) -> *mut AMresult { + let doc = to_doc!(doc); + let obj = to_obj!(obj); + to_result(if insert { + doc.insert(obj, index, value) + } else { + doc.put(obj, index, value) + }) +} + +/// \memberof AMdoc +/// \brief Set a list object's index to a null value. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj A pointer to an `AMobj` struct or `NULL`. +/// \param[in] index An index within the list object identified by \p obj. +/// \param[in] insert A flag to insert \p value before \p index instead of writing \p value over \p index. +/// \return A pointer to an `AMresult` struct containing no value. +/// \pre \p doc must be a valid address. +/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj. +/// \warning To avoid a memory leak, the returned pointer must be deallocated +/// with `AMclear()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj must be a pointer to a valid AMobj or NULL +#[no_mangle] +pub unsafe extern "C" fn AMlistSetNull( + doc: *mut AMdoc, + obj: *mut AMobj, + index: usize, + insert: bool, +) -> *mut AMresult { + let doc = to_doc!(doc); + let obj = to_obj!(obj); + let value = (); + to_result(if insert { + doc.insert(obj, index, value) + } else { + doc.put(obj, index, value) + }) +} + +/// \memberof AMdoc +/// \brief Set a list object's index to an empty object value. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj A pointer to an `AMobj` struct or `NULL`. +/// \param[in] index An index within the list object identified by \p obj. +/// \param[in] insert A flag to insert \p value before \p index instead of writing \p value over \p index. +/// \param[in] obj_type An `AmObjType` enum tag. +/// \return A pointer to an `AMresult` struct containing a pointer to an `AMobj` struct. +/// \pre \p doc must be a valid address. +/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj. +/// \warning To avoid a memory leak, the returned pointer must be deallocated +/// with `AMclear()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj must be a pointer to a valid AMobj or NULL +#[no_mangle] +pub unsafe extern "C" fn AMlistSetObject( + doc: *mut AMdoc, + obj: *mut AMobj, + index: usize, + insert: bool, + obj_type: AmObjType, +) -> *mut AMresult { + let doc = to_doc!(doc); + let obj = to_obj!(obj); + let value = obj_type.into(); + to_result(if insert { + doc.insert_object(obj, index, value) + } else { + doc.put_object(obj, index, value) + }) +} + +/// \memberof AMdoc +/// \brief Set a list object's index to a UTF-8 string value. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj A pointer to an `AMobj` struct or `NULL`. +/// \param[in] index An index within the list object identified by \p obj. +/// \param[in] insert A flag to insert \p value before \p index instead of writing \p value over \p index. +/// \param[in] value A UTF-8 string. +/// \return A pointer to an `AMresult` struct containing no value. +/// \pre \p doc must be a valid address. +/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj. +/// \pre \p value must be a valid address. +/// \warning To avoid a memory leak, the returned pointer must be deallocated +/// with `AMclear()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj must be a pointer to a valid AMobj or NULL +/// value must be a pointer to a valid address. +#[no_mangle] +pub unsafe extern "C" fn AMlistSetStr( + doc: *mut AMdoc, + obj: *mut AMobj, + index: usize, + insert: bool, + value: *const c_char, +) -> *mut AMresult { + let doc = to_doc!(doc); + let obj = to_obj!(obj); + let value = to_str(value); + to_result(if insert { + doc.insert(obj, index, value) + } else { + doc.put(obj, index, value) + }) +} + +/// \memberof AMdoc +/// \brief Set a list object's index to a Lamport timestamp value. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj A pointer to an `AMobj` struct or `NULL`. +/// \param[in] index An index within the list object identified by \p obj. +/// \param[in] insert A flag to insert \p value before \p index instead of writing \p value over \p index. +/// \param[in] value A 64-bit signed integer. +/// \return A pointer to an `AMresult` struct containing no value. +/// \pre \p doc must be a valid address. +/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj. +/// \warning To avoid a memory leak, the returned pointer must be deallocated +/// with `AMclear()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj must be a pointer to a valid AMobj or NULL +#[no_mangle] +pub unsafe extern "C" fn AMlistSetTimestamp( + doc: *mut AMdoc, + obj: *mut AMobj, + index: usize, + insert: bool, + value: i64, +) -> *mut AMresult { + let doc = to_doc!(doc); + let obj = to_obj!(obj); + let value = am::ScalarValue::Timestamp(value); + to_result(if insert { + doc.insert(obj, index, value) + } else { + doc.put(obj, index, value) + }) +} + +/// \memberof AMdoc +/// \brief Set a list object's index to an unsigned integer value. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj A pointer to an `AMobj` struct or `NULL`. +/// \param[in] index An index within the list object identified by \p obj. +/// \param[in] insert A flag to insert \p value before \p index instead of writing \p value over \p index. +/// \param[in] value A 64-bit unsigned integer. +/// \return A pointer to an `AMresult` struct containing no value. +/// \pre \p doc must be a valid address. +/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj. +/// \warning To avoid a memory leak, the returned pointer must be deallocated +/// with `AMclear()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj must be a pointer to a valid AMobj or NULL +#[no_mangle] +pub unsafe extern "C" fn AMlistSetUint( + doc: *mut AMdoc, + obj: *mut AMobj, + index: usize, + insert: bool, + value: u64, +) -> *mut AMresult { + let doc = to_doc!(doc); + let obj = to_obj!(obj); + to_result(if insert { + doc.insert(obj, index, value) + } else { + doc.put(obj, index, value) + }) +} + +/// \memberof AMresult +/// \brief Get an `AMresult` struct's `AMobj` struct value. +/// +/// \param[in] result A pointer to an `AMresult` struct. +/// \return A pointer to an `AMobj` struct. +/// \pre \p result must be a valid address. +/// \internal +/// +/// # Safety +/// result must be a pointer to a valid AMresult +#[no_mangle] +pub unsafe extern "C" fn AMgetObj(_result: *mut AMresult) -> *mut AMobj { + unimplemented!() +} + +/// \memberof AMresult +/// \brief Deallocates the storage for an `AMresult` struct. +/// +/// \param[in] result A pointer to an `AMresult` struct. +/// \pre \p result must be a valid address. +/// \internal +/// +/// # Safety +/// result must be a pointer to a valid AMresult +#[no_mangle] +pub unsafe extern "C" fn AMclear(result: *mut AMresult) { + if !result.is_null() { + let result: AMresult = *Box::from_raw(result); + drop(result) + } +} + +/// \memberof AMresult +/// \brief Get an `AMresult` struct's error message string. +/// +/// \param[in] result A pointer to an `AMresult` struct. +/// \return A UTF-8 string value or `NULL`. +/// \pre \p result must be a valid address. +/// \internal +/// +/// # Safety +/// result must be a pointer to a valid AMresult +#[no_mangle] +pub unsafe extern "C" fn AMerrorMessage(result: *mut AMresult) -> *const c_char { + match result.as_mut() { + Some(AMresult::Error(s)) => s.as_ptr(), + _ => std::ptr::null::(), + } +} diff --git a/automerge-c/src/result.rs b/automerge-c/src/result.rs new file mode 100644 index 00000000..e51f251b --- /dev/null +++ b/automerge-c/src/result.rs @@ -0,0 +1,36 @@ +use automerge as am; +use std::ffi::CString; + +/// \struct AMresult +/// \brief A container of result codes, messages and values. +pub enum AMresult { + Ok, + ObjId(am::ObjId), + Values(Vec>), + Changes(Vec), + Error(CString), +} + +impl AMresult { + pub(crate) fn err(s: &str) -> Self { + AMresult::Error(CString::new(s).unwrap()) + } +} + +impl From> for AMresult { + fn from(maybe: Result) -> Self { + match maybe { + Ok(obj) => AMresult::ObjId(obj), + Err(e) => AMresult::Error(CString::new(e.to_string()).unwrap()), + } + } +} + +impl From> for AMresult { + fn from(maybe: Result<(), am::AutomergeError>) -> Self { + match maybe { + Ok(()) => AMresult::Ok, + Err(e) => AMresult::Error(CString::new(e.to_string()).unwrap()), + } + } +} diff --git a/automerge-c/src/utils.rs b/automerge-c/src/utils.rs new file mode 100644 index 00000000..e6b50074 --- /dev/null +++ b/automerge-c/src/utils.rs @@ -0,0 +1,24 @@ +use crate::{AMobj, AMresult}; +use automerge as am; +use std::ops::Deref; + +impl Deref for AMobj { + type Target = am::ObjId; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +#[allow(clippy::not_unsafe_ptr_arg_deref)] +impl From<*const AMobj> for AMobj { + fn from(obj: *const AMobj) -> Self { + unsafe { obj.as_ref().cloned().unwrap_or(AMobj(am::ROOT)) } + } +} + +impl From for *mut AMresult { + fn from(b: AMresult) -> Self { + Box::into_raw(Box::new(b)) + } +} diff --git a/automerge-c/test/CMakeLists.txt b/automerge-c/test/CMakeLists.txt new file mode 100644 index 00000000..f680043a --- /dev/null +++ b/automerge-c/test/CMakeLists.txt @@ -0,0 +1,49 @@ +cmake_minimum_required(VERSION 3.18 FATAL_ERROR) + +find_package(cmocka REQUIRED) + +add_executable( + test_${LIBRARY_NAME} + group_state.c + amlistset_tests.c + ammapset_tests.c + main.c +) + +set_target_properties(test_${LIBRARY_NAME} PROPERTIES LINKER_LANGUAGE C) + +# \note An imported library's INTERFACE_INCLUDE_DIRECTORIES property can't +# contain a non-existent path so its build-time include directory +# must be specified for all of its dependent targets instead. +target_include_directories( + test_${LIBRARY_NAME} + PRIVATE "$" +) + +target_link_libraries(test_${LIBRARY_NAME} PRIVATE cmocka ${LIBRARY_NAME}) + +add_dependencies(test_${LIBRARY_NAME} ${LIBRARY_NAME}_artifacts) + +if(BUILD_SHARED_LIBS AND WIN32) + add_custom_command( + TARGET test_${LIBRARY_NAME} + POST_BUILD + COMMAND ${CMAKE_COMMAND} -E copy_if_different + ${CARGO_CURRENT_BINARY_DIR}/${CMAKE_SHARED_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_${CMAKE_BUILD_TYPE}_POSTFIX}${CMAKE_SHARED_LIBRARY_SUFFIX} + ${CMAKE_CURRENT_BINARY_DIR} + COMMENT "Copying the DLL built by Cargo into the test directory..." + VERBATIM + ) +endif() + +add_test(NAME test_${LIBRARY_NAME} COMMAND test_${LIBRARY_NAME}) + +add_custom_command( + TARGET test_${LIBRARY_NAME} + POST_BUILD + COMMAND + ${CMAKE_CTEST_COMMAND} --config $ --output-on-failure + COMMENT + "Running the test(s)..." + VERBATIM +) diff --git a/automerge-c/test/amlistset_tests.c b/automerge-c/test/amlistset_tests.c new file mode 100644 index 00000000..dcafb513 --- /dev/null +++ b/automerge-c/test/amlistset_tests.c @@ -0,0 +1,170 @@ +#include +#include +#include +#include +#include +#include +#include + +/* third-party */ +#include + +/* local */ +#include "group_state.h" + +#define test_AMlistSet(label, mode) test_AMlistSet ## label ## _ ## mode + +#define static_void_test_AMlistSet(label, mode, value) \ +static void test_AMlistSet ## label ## _ ## mode(void **state) { \ + GroupState* group_state = *state; \ + AMresult* res = AMlistSet ## label(group_state->doc, AM_ROOT, 0, !strcmp(#mode, "insert"), value); \ + if (AMresultStatus(res) != AM_STATUS_COMMAND_OK) { \ + fail_msg("%s", AMerrorMessage(res)); \ + } \ +} + +static_void_test_AMlistSet(Counter, insert, INT64_MAX) + +static_void_test_AMlistSet(Counter, update, INT64_MAX) + +static_void_test_AMlistSet(F64, insert, DBL_MAX) + +static_void_test_AMlistSet(F64, update, DBL_MAX) + +static_void_test_AMlistSet(Int, insert, INT64_MAX) + +static_void_test_AMlistSet(Int, update, INT64_MAX) + +static_void_test_AMlistSet(Str, insert, "Hello, world!") + +static_void_test_AMlistSet(Str, update, "Hello, world!") + +static_void_test_AMlistSet(Timestamp, insert, INT64_MAX) + +static_void_test_AMlistSet(Timestamp, update, INT64_MAX) + +static_void_test_AMlistSet(Uint, insert, UINT64_MAX) + +static_void_test_AMlistSet(Uint, update, UINT64_MAX) + +static void test_AMlistSetBytes_insert(void **state) { + static uint8_t const BYTES_VALUE[] = {INT8_MIN, INT8_MAX / 2, INT8_MAX}; + + GroupState* group_state = *state; + AMresult* res = AMlistSetBytes( + group_state->doc, + AM_ROOT, + 0, + true, + BYTES_VALUE, + sizeof(BYTES_VALUE) / sizeof(uint8_t) + ); + if (AMresultStatus(res) != AM_STATUS_COMMAND_OK) { + fail_msg("%s", AMerrorMessage(res)); + } +} + +static void test_AMlistSetBytes_update(void **state) { + static uint8_t const BYTES_VALUE[] = {INT8_MIN, INT8_MAX / 2, INT8_MAX}; + + GroupState* group_state = *state; + AMresult* res = AMlistSetBytes( + group_state->doc, + AM_ROOT, + 0, + false, + BYTES_VALUE, + sizeof(BYTES_VALUE) / sizeof(uint8_t) + ); + if (AMresultStatus(res) != AM_STATUS_COMMAND_OK) { + fail_msg("%s", AMerrorMessage(res)); + } +} + + +static void test_AMlistSetNull_insert(void **state) { + GroupState* group_state = *state; + AMresult* res = AMlistSetNull(group_state->doc, AM_ROOT, 0, true); + if (AMresultStatus(res) != AM_STATUS_COMMAND_OK) { + fail_msg("%s", AMerrorMessage(res)); + } +} + +static void test_AMlistSetNull_update(void **state) { + GroupState* group_state = *state; + AMresult* res = AMlistSetNull(group_state->doc, AM_ROOT, 0, false); + if (AMresultStatus(res) != AM_STATUS_COMMAND_OK) { + fail_msg("would be consolidated into%s", AMerrorMessage(res)); + } +} + +static void test_AMlistSetObject_insert(void **state) { + static AmObjType const OBJ_TYPES[] = { + AM_OBJ_TYPE_LIST, + AM_OBJ_TYPE_MAP, + AM_OBJ_TYPE_TEXT, + }; + static AmObjType const* const end = OBJ_TYPES + sizeof(OBJ_TYPES) / sizeof(AmObjType); + + GroupState* group_state = *state; + for (AmObjType const* next = OBJ_TYPES; next != end; ++next) { + AMresult* res = AMlistSetObject( + group_state->doc, + AM_ROOT, + 0, + true, + *next + ); + if (AMresultStatus(res) != AM_STATUS_OBJ_OK) { + fail_msg("%s", AMerrorMessage(res)); + } + } +} + +static void test_AMlistSetObject_update(void **state) { + static AmObjType const OBJ_TYPES[] = { + AM_OBJ_TYPE_LIST, + AM_OBJ_TYPE_MAP, + AM_OBJ_TYPE_TEXT, + }; + static AmObjType const* const end = OBJ_TYPES + sizeof(OBJ_TYPES) / sizeof(AmObjType); + + GroupState* group_state = *state; + for (AmObjType const* next = OBJ_TYPES; next != end; ++next) { + AMresult* res = AMlistSetObject( + group_state->doc, + AM_ROOT, + 0, + false, + *next + ); + if (AMresultStatus(res) != AM_STATUS_OBJ_OK) { + fail_msg("%s", AMerrorMessage(res)); + } + } +} + +int run_AMlistSet_tests(void) { + const struct CMUnitTest tests[] = { + cmocka_unit_test(test_AMlistSetBytes_insert), + cmocka_unit_test(test_AMlistSetBytes_update), + cmocka_unit_test(test_AMlistSet(Counter, insert)), + cmocka_unit_test(test_AMlistSet(Counter, update)), + cmocka_unit_test(test_AMlistSet(F64, insert)), + cmocka_unit_test(test_AMlistSet(F64, update)), + cmocka_unit_test(test_AMlistSet(Int, insert)), + cmocka_unit_test(test_AMlistSet(Int, update)), + cmocka_unit_test(test_AMlistSetNull_insert), + cmocka_unit_test(test_AMlistSetNull_update), + cmocka_unit_test(test_AMlistSetObject_insert), + cmocka_unit_test(test_AMlistSetObject_update), + cmocka_unit_test(test_AMlistSet(Str, insert)), + cmocka_unit_test(test_AMlistSet(Str, update)), + cmocka_unit_test(test_AMlistSet(Timestamp, insert)), + cmocka_unit_test(test_AMlistSet(Timestamp, update)), + cmocka_unit_test(test_AMlistSet(Uint, insert)), + cmocka_unit_test(test_AMlistSet(Uint, update)), + }; + + return cmocka_run_group_tests(tests, group_setup, group_teardown); +} diff --git a/automerge-c/test/ammapset_tests.c b/automerge-c/test/ammapset_tests.c new file mode 100644 index 00000000..f2bb675c --- /dev/null +++ b/automerge-c/test/ammapset_tests.c @@ -0,0 +1,97 @@ +#include +#include +#include +#include +#include +#include + +/* third-party */ +#include + +/* local */ +#include "group_state.h" + +#define test_AMmapSet(label) test_AMmapSet ## label + +#define static_void_test_AMmapSet(label, value) \ +static void test_AMmapSet ## label(void **state) { \ + GroupState* group_state = *state; \ + AMresult* res = AMmapSet ## label(group_state->doc, AM_ROOT, #label, value); \ + if (AMresultStatus(res) != AM_STATUS_COMMAND_OK) { \ + fail_msg("%s", AMerrorMessage(res)); \ + } \ +} + +static_void_test_AMmapSet(Int, INT64_MAX) + +static_void_test_AMmapSet(Uint, UINT64_MAX) + +static_void_test_AMmapSet(Str, "Hello, world!") + +static_void_test_AMmapSet(F64, DBL_MAX) + +static_void_test_AMmapSet(Counter, INT64_MAX) + +static_void_test_AMmapSet(Timestamp, INT64_MAX) + +static void test_AMmapSetBytes(void **state) { + static uint8_t const BYTES_VALUE[] = {INT8_MIN, INT8_MAX / 2, INT8_MAX}; + + GroupState* group_state = *state; + AMresult* res = AMmapSetBytes( + group_state->doc, + AM_ROOT, + "Bytes", + BYTES_VALUE, + sizeof(BYTES_VALUE) / sizeof(uint8_t) + ); + if (AMresultStatus(res) != AM_STATUS_COMMAND_OK) { + fail_msg("%s", AMerrorMessage(res)); + } +} + +static void test_AMmapSetNull(void **state) { + GroupState* group_state = *state; + AMresult* res = AMmapSetNull(group_state->doc, AM_ROOT, "Null"); + if (AMresultStatus(res) != AM_STATUS_COMMAND_OK) { + fail_msg("%s", AMerrorMessage(res)); + } +} + +static void test_AMmapSetObject(void **state) { + static AmObjType const OBJ_TYPES[] = { + AM_OBJ_TYPE_LIST, + AM_OBJ_TYPE_MAP, + AM_OBJ_TYPE_TEXT, + }; + static AmObjType const* const end = OBJ_TYPES + sizeof(OBJ_TYPES) / sizeof(AmObjType); + + GroupState* group_state = *state; + for (AmObjType const* next = OBJ_TYPES; next != end; ++next) { + AMresult* res = AMmapSetObject( + group_state->doc, + AM_ROOT, + "Object", + *next + ); + if (AMresultStatus(res) != AM_STATUS_OBJ_OK) { + fail_msg("%s", AMerrorMessage(res)); + } + } +} + +int run_AMmapSet_tests(void) { + const struct CMUnitTest tests[] = { + cmocka_unit_test(test_AMmapSetBytes), + cmocka_unit_test(test_AMmapSet(Counter)), + cmocka_unit_test(test_AMmapSet(F64)), + cmocka_unit_test(test_AMmapSet(Int)), + cmocka_unit_test(test_AMmapSetNull), + cmocka_unit_test(test_AMmapSetObject), + cmocka_unit_test(test_AMmapSet(Str)), + cmocka_unit_test(test_AMmapSet(Timestamp)), + cmocka_unit_test(test_AMmapSet(Uint)), + }; + + return cmocka_run_group_tests(tests, group_setup, group_teardown); +} diff --git a/automerge-c/test/group_state.c b/automerge-c/test/group_state.c new file mode 100644 index 00000000..6c785907 --- /dev/null +++ b/automerge-c/test/group_state.c @@ -0,0 +1,18 @@ +#include + +/* local */ +#include "group_state.h" + +int group_setup(void** state) { + GroupState* group_state = calloc(1, sizeof(GroupState)); + group_state->doc = AMcreate(); + *state = group_state; + return 0; +} + +int group_teardown(void** state) { + GroupState* group_state = *state; + AMdestroy(group_state->doc); + free(group_state); + return 0; +} diff --git a/automerge-c/test/group_state.h b/automerge-c/test/group_state.h new file mode 100644 index 00000000..749209c2 --- /dev/null +++ b/automerge-c/test/group_state.h @@ -0,0 +1,15 @@ +#ifndef GROUP_STATE_INCLUDED +#define GROUP_STATE_INCLUDED + +/* local */ +#include "automerge.h" + +typedef struct { + AMdoc* doc; +} GroupState; + +int group_setup(void** state); + +int group_teardown(void** state); + +#endif diff --git a/automerge-c/test/main.c b/automerge-c/test/main.c new file mode 100644 index 00000000..b637434e --- /dev/null +++ b/automerge-c/test/main.c @@ -0,0 +1,31 @@ +#include +#include +#include +#include + +/* third-party */ +#include + +/* local */ +#include "group_state.h" + +extern int run_AMlistSet_tests(void); + +extern int run_AMmapSet_tests(void); + +static void test_AMconfig(void **state) { + GroupState* group_state = *state; + AMconfig(group_state->doc, "actor", "aabbcc"); +} + +int main(void) { + const struct CMUnitTest tests[] = { + cmocka_unit_test(test_AMconfig), + }; + + return ( + run_AMlistSet_tests() + + run_AMmapSet_tests() + + cmocka_run_group_tests(tests, group_setup, group_teardown) + ); +} diff --git a/automerge/src/types.rs b/automerge/src/types.rs index ee3b2e1b..1ee7d9b7 100644 --- a/automerge/src/types.rs +++ b/automerge/src/types.rs @@ -63,6 +63,16 @@ impl TryFrom<&str> for ActorId { } } +impl TryFrom for ActorId { + type Error = error::InvalidActorId; + + fn try_from(s: String) -> Result { + hex::decode(&s) + .map(ActorId::from) + .map_err(|_| error::InvalidActorId(s)) + } +} + impl From for ActorId { fn from(u: uuid::Uuid) -> Self { ActorId(TinyVec::from(*u.as_bytes())) diff --git a/deny.toml b/deny.toml index 888b7c58..4246fa07 100644 --- a/deny.toml +++ b/deny.toml @@ -103,6 +103,11 @@ exceptions = [ # this is a LGPL like license in the CLI # since this is an application not a library people would link to it should be fine { allow = ["EPL-2.0"], name = "colored_json" }, + + # these are needed by cbindgen and its dependancies + # should be revied more fully before release + { allow = ["MPL-2.0"], name = "cbindgen" }, + { allow = ["BSD-3-Clause"], name = "instant" }, ] # Some crates don't have (easily) machine readable licensing information, @@ -169,6 +174,10 @@ skip = [ { name = "itoa", version = "0.4.8" }, { name = "textwrap", version = "0.11.0" }, { name = "clap", version = "2.34.0" }, + + # These are transitive depdendencies of cbindgen + { name = "strsim", version = "0.8.0" }, + { name = "heck", version = "0.3.3" }, ] # Similarly to `skip` allows you to skip certain crates during duplicate # detection. Unlike skip, it also includes the entire tree of transitive diff --git a/scripts/ci/cmake-build b/scripts/ci/cmake-build new file mode 100755 index 00000000..ac715859 --- /dev/null +++ b/scripts/ci/cmake-build @@ -0,0 +1,18 @@ +#!/usr/bin/env bash +set -eoux pipefail + +THIS_SCRIPT=$(dirname "$0"); +# \note CMake's default build types are "Debug", "MinSizeRel", "Release" and +# "RelWithDebInfo" but custom ones can also be defined so we pass it verbatim. +BUILD_TYPE=$1; +LIB_TYPE=$2; +if [ "${LIB_TYPE,,}" == "shared" ]; then + SHARED_TOGGLE="ON" +else + SHARED_TOGGLE="OFF" +fi +C_PROJECT=$THIS_SCRIPT/../../automerge-c; +mkdir -p $C_PROJECT/build; +cd $C_PROJECT/build; +cmake --log-level=ERROR -B . -S .. -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DBUILD_SHARED_LIBS=$SHARED_TOGGLE; +cmake --build .; diff --git a/scripts/ci/run b/scripts/ci/run index a0fa3616..3c3f2d6d 100755 --- a/scripts/ci/run +++ b/scripts/ci/run @@ -8,3 +8,4 @@ set -eou pipefail ./scripts/ci/advisory ./scripts/ci/wasm_tests ./scripts/ci/js_tests +./scripts/ci/cmake-build Release static From b6fd7ac26e0004aecf736ac2744c483bc79cc823 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 1 Apr 2022 16:55:59 +0100 Subject: [PATCH 283/730] Add op_observer to documents and transactions This replaces the built-in patches with a more generic mechanism, and includes a convenience observer which uses the old patches. --- automerge-cli/src/examine.rs | 2 +- automerge-cli/src/export.rs | 2 +- automerge-cli/src/merge.rs | 6 +- automerge-wasm/src/lib.rs | 251 +++++++++++------- automerge-wasm/test/test.ts | 190 ++++++++++--- automerge/examples/quickstart.rs | 13 +- automerge/src/autocommit.rs | 70 +++-- automerge/src/automerge.rs | 194 ++++---------- automerge/src/change.rs | 2 +- automerge/src/lib.rs | 7 +- automerge/src/op_observer.rs | 87 ++++++ automerge/src/op_set.rs | 94 ++++++- automerge/src/sync.rs | 8 +- automerge/src/transaction/commit.rs | 17 +- automerge/src/transaction/inner.rs | 133 +++++----- .../src/transaction/manual_transaction.rs | 31 ++- automerge/src/transaction/transactable.rs | 4 +- automerge/src/types.rs | 21 -- automerge/src/value.rs | 8 + automerge/tests/test.rs | 110 ++++---- edit-trace/benches/main.rs | 6 +- edit-trace/src/main.rs | 4 +- 22 files changed, 775 insertions(+), 485 deletions(-) create mode 100644 automerge/src/op_observer.rs diff --git a/automerge-cli/src/examine.rs b/automerge-cli/src/examine.rs index 010fa0f1..555eda82 100644 --- a/automerge-cli/src/examine.rs +++ b/automerge-cli/src/examine.rs @@ -29,7 +29,7 @@ pub fn examine( input .read_to_end(&mut buf) .map_err(|e| ExamineError::ReadingChanges { source: e })?; - let doc = am::Automerge::load(&buf) + let doc = am::Automerge::load(&buf, am::NULL_OBSERVER) .map_err(|e| ExamineError::ApplyingInitialChanges { source: e })?; let uncompressed_changes: Vec<_> = doc.get_changes(&[]).iter().map(|c| c.decode()).collect(); if is_tty { diff --git a/automerge-cli/src/export.rs b/automerge-cli/src/export.rs index 937ba794..e1e6cc32 100644 --- a/automerge-cli/src/export.rs +++ b/automerge-cli/src/export.rs @@ -68,7 +68,7 @@ fn scalar_to_json(val: &am::ScalarValue) -> serde_json::Value { } fn get_state_json(input_data: Vec) -> Result { - let doc = am::Automerge::load(&input_data).unwrap(); // FIXME + let doc = am::Automerge::load(&input_data, am::NULL_OBSERVER).unwrap(); // FIXME Ok(map_to_json(&doc, &am::ObjId::Root)) } diff --git a/automerge-cli/src/merge.rs b/automerge-cli/src/merge.rs index 936af246..a0ef6dd1 100644 --- a/automerge-cli/src/merge.rs +++ b/automerge-cli/src/merge.rs @@ -38,7 +38,7 @@ pub(super) fn merge(inputs: Inputs, mut output: W) -> Result< Inputs::Stdin => { let mut input = Vec::new(); std::io::stdin().read_to_end(&mut input)?; - backend.load_incremental(&input)?; + backend.load_incremental(&input, am::NULL_OBSERVER)?; } Inputs::Paths(paths) => { for path in paths { @@ -53,6 +53,8 @@ pub(super) fn merge(inputs: Inputs, mut output: W) -> Result< fn load_path(backend: &mut am::Automerge, path: &Path) -> Result<(), Box> { let input = std::fs::read(path).map_err(Box::new)?; - backend.load_incremental(&input).map_err(Box::new)?; + backend + .load_incremental(&input, am::NULL_OBSERVER) + .map_err(Box::new)?; Ok(()) } diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 9c23685c..198d00ab 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -2,6 +2,8 @@ use am::transaction::CommitOptions; use am::transaction::Transactable; use automerge as am; +use automerge::Patch; +use automerge::VecOpObserver; use automerge::{Change, ObjId, Prop, Value, ROOT}; use js_sys::{Array, Object, Uint8Array}; use std::convert::TryInto; @@ -32,7 +34,10 @@ static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT; #[wasm_bindgen] #[derive(Debug)] -pub struct Automerge(automerge::AutoCommit); +pub struct Automerge { + doc: automerge::AutoCommit, + observer: Option, +} #[wasm_bindgen] impl Automerge { @@ -42,27 +47,45 @@ impl Automerge { let a = automerge::ActorId::from(hex::decode(a).map_err(to_js_err)?.to_vec()); automerge.set_actor(a); } - Ok(Automerge(automerge)) + Ok(Automerge { + doc: automerge, + observer: None, + }) + } + + fn ensure_transaction_closed(&mut self) { + if self.doc.pending_ops() > 0 { + let mut opts = CommitOptions::default(); + if let Some(observer) = self.observer.as_mut() { + opts.set_op_observer(observer); + } + self.doc.commit_with(opts); + } } #[allow(clippy::should_implement_trait)] pub fn clone(&mut self, actor: Option) -> Result { - if self.0.pending_ops() > 0 { - self.0.commit(); - } - let mut automerge = Automerge(self.0.clone()); + self.ensure_transaction_closed(); + let mut automerge = Automerge { + doc: self.doc.clone(), + observer: None, + }; if let Some(s) = actor { let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec()); - automerge.0.set_actor(actor); + automerge.doc.set_actor(actor); } Ok(automerge) } pub fn fork(&mut self, actor: Option) -> Result { - let mut automerge = Automerge(self.0.fork()); + self.ensure_transaction_closed(); + let mut automerge = Automerge { + doc: self.doc.fork(), + observer: None, + }; if let Some(s) = actor { let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec()); - automerge.0.set_actor(actor); + automerge.doc.set_actor(actor); } Ok(automerge) } @@ -70,10 +93,13 @@ impl Automerge { #[wasm_bindgen(js_name = forkAt)] pub fn fork_at(&mut self, heads: JsValue, actor: Option) -> Result { let deps: Vec<_> = JS(heads).try_into()?; - let mut automerge = Automerge(self.0.fork_at(&deps)?); + let mut automerge = Automerge { + doc: self.doc.fork_at(&deps)?, + observer: None, + }; if let Some(s) = actor { let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec()); - automerge.0.set_actor(actor); + automerge.doc.set_actor(actor); } Ok(automerge) } @@ -82,7 +108,7 @@ impl Automerge { #[wasm_bindgen(js_name = pendingOps)] pub fn pending_ops(&self) -> JsValue { - (self.0.pending_ops() as u32).into() + (self.doc.pending_ops() as u32).into() } pub fn commit(&mut self, message: Option, time: Option) -> JsValue { @@ -93,12 +119,16 @@ impl Automerge { if let Some(time) = time { commit_opts.set_time(time as i64); } - let hash = self.0.commit_with(commit_opts); + if let Some(observer) = self.observer.as_mut() { + commit_opts.set_op_observer(observer); + } + let hash = self.doc.commit_with(commit_opts); JsValue::from_str(&hex::encode(&hash.0)) } pub fn merge(&mut self, other: &mut Automerge) -> Result { - let heads = self.0.merge(&mut other.0)?; + self.ensure_transaction_closed(); + let heads = self.doc.merge(&mut other.doc, self.observer.as_mut())?; let heads: Array = heads .iter() .map(|h| JsValue::from_str(&hex::encode(&h.0))) @@ -107,28 +137,28 @@ impl Automerge { } pub fn rollback(&mut self) -> f64 { - self.0.rollback() as f64 + self.doc.rollback() as f64 } - pub fn keys(&mut self, obj: JsValue, heads: Option) -> Result { + pub fn keys(&self, obj: JsValue, heads: Option) -> Result { let obj = self.import(obj)?; let result = if let Some(heads) = get_heads(heads) { - self.0 + self.doc .keys_at(&obj, &heads) .map(|s| JsValue::from_str(&s)) .collect() } else { - self.0.keys(&obj).map(|s| JsValue::from_str(&s)).collect() + self.doc.keys(&obj).map(|s| JsValue::from_str(&s)).collect() }; Ok(result) } - pub fn text(&mut self, obj: JsValue, heads: Option) -> Result { + pub fn text(&self, obj: JsValue, heads: Option) -> Result { let obj = self.import(obj)?; if let Some(heads) = get_heads(heads) { - Ok(self.0.text_at(&obj, &heads)?) + Ok(self.doc.text_at(&obj, &heads)?) } else { - Ok(self.0.text(&obj)?) + Ok(self.doc.text(&obj)?) } } @@ -144,7 +174,7 @@ impl Automerge { let delete_count = delete_count as usize; let mut vals = vec![]; if let Some(t) = text.as_string() { - self.0.splice_text(&obj, start, delete_count, &t)?; + self.doc.splice_text(&obj, start, delete_count, &t)?; } else { if let Ok(array) = text.dyn_into::() { for i in array.iter() { @@ -154,7 +184,8 @@ impl Automerge { vals.push(value); } } - self.0.splice(&obj, start, delete_count, vals.into_iter())?; + self.doc + .splice(&obj, start, delete_count, vals.into_iter())?; } Ok(()) } @@ -164,8 +195,8 @@ impl Automerge { let value = self .import_scalar(&value, &datatype.as_string()) .ok_or_else(|| to_js_err("invalid scalar value"))?; - let index = self.0.length(&obj); - self.0.insert(&obj, index, value)?; + let index = self.doc.length(&obj); + self.doc.insert(&obj, index, value)?; Ok(()) } @@ -174,8 +205,8 @@ impl Automerge { let obj = self.import(obj)?; let (value, subvals) = to_objtype(&value, &None).ok_or_else(|| to_js_err("expected object"))?; - let index = self.0.length(&obj); - let opid = self.0.insert_object(&obj, index, value)?; + let index = self.doc.length(&obj); + let opid = self.doc.insert_object(&obj, index, value)?; self.subset(&opid, subvals)?; Ok(opid.to_string().into()) } @@ -192,7 +223,7 @@ impl Automerge { let value = self .import_scalar(&value, &datatype.as_string()) .ok_or_else(|| to_js_err("expected scalar value"))?; - self.0.insert(&obj, index as usize, value)?; + self.doc.insert(&obj, index as usize, value)?; Ok(()) } @@ -207,7 +238,7 @@ impl Automerge { let index = index as f64; let (value, subvals) = to_objtype(&value, &None).ok_or_else(|| to_js_err("expected object"))?; - let opid = self.0.insert_object(&obj, index as usize, value)?; + let opid = self.doc.insert_object(&obj, index as usize, value)?; self.subset(&opid, subvals)?; Ok(opid.to_string().into()) } @@ -224,7 +255,7 @@ impl Automerge { let value = self .import_scalar(&value, &datatype.as_string()) .ok_or_else(|| to_js_err("expected scalar value"))?; - self.0.put(&obj, prop, value)?; + self.doc.put(&obj, prop, value)?; Ok(()) } @@ -239,7 +270,7 @@ impl Automerge { let prop = self.import_prop(prop)?; let (value, subvals) = to_objtype(&value, &None).ok_or_else(|| to_js_err("expected object"))?; - let opid = self.0.put_object(&obj, prop, value)?; + let opid = self.doc.put_object(&obj, prop, value)?; self.subset(&opid, subvals)?; Ok(opid.to_string().into()) } @@ -249,16 +280,18 @@ impl Automerge { let (value, subvals) = self.import_value(&v, None)?; //let opid = self.0.set(id, p, value)?; let opid = match (p, value) { - (Prop::Map(s), Value::Object(objtype)) => Some(self.0.put_object(obj, s, objtype)?), + (Prop::Map(s), Value::Object(objtype)) => { + Some(self.doc.put_object(obj, s, objtype)?) + } (Prop::Map(s), Value::Scalar(scalar)) => { - self.0.put(obj, s, scalar.into_owned())?; + self.doc.put(obj, s, scalar.into_owned())?; None } (Prop::Seq(i), Value::Object(objtype)) => { - Some(self.0.insert_object(obj, i, objtype)?) + Some(self.doc.insert_object(obj, i, objtype)?) } (Prop::Seq(i), Value::Scalar(scalar)) => { - self.0.insert(obj, i, scalar.into_owned())?; + self.doc.insert(obj, i, scalar.into_owned())?; None } }; @@ -280,13 +313,13 @@ impl Automerge { let value: f64 = value .as_f64() .ok_or_else(|| to_js_err("increment needs a numeric value"))?; - self.0.increment(&obj, prop, value as i64)?; + self.doc.increment(&obj, prop, value as i64)?; Ok(()) } #[wasm_bindgen(js_name = get)] pub fn get( - &mut self, + &self, obj: JsValue, prop: JsValue, heads: Option, @@ -297,9 +330,9 @@ impl Automerge { let heads = get_heads(heads); if let Ok(prop) = prop { let value = if let Some(h) = heads { - self.0.get_at(&obj, prop, &h)? + self.doc.get_at(&obj, prop, &h)? } else { - self.0.get(&obj, prop)? + self.doc.get(&obj, prop)? }; match value { Some((Value::Object(obj_type), obj_id)) => { @@ -321,7 +354,7 @@ impl Automerge { #[wasm_bindgen(js_name = getAll)] pub fn get_all( - &mut self, + &self, obj: JsValue, arg: JsValue, heads: Option, @@ -331,9 +364,9 @@ impl Automerge { let prop = to_prop(arg); if let Ok(prop) = prop { let values = if let Some(heads) = get_heads(heads) { - self.0.get_all_at(&obj, prop, &heads) + self.doc.get_all_at(&obj, prop, &heads) } else { - self.0.get_all(&obj, prop) + self.doc.get_all(&obj, prop) } .map_err(to_js_err)?; for value in values { @@ -362,24 +395,38 @@ impl Automerge { let enable = enable .as_bool() .ok_or_else(|| to_js_err("expected boolean"))?; - self.0.enable_patches(enable); + if enable { + if self.observer.is_none() { + self.observer = Some(VecOpObserver::default()); + } + } else { + self.observer = None; + } Ok(()) } #[wasm_bindgen(js_name = popPatches)] pub fn pop_patches(&mut self) -> Result { - let patches = self.0.pop_patches(); + // transactions send out observer updates as they occur, not waiting for them to be + // committed. + // If we pop the patches then we won't be able to revert them. + self.ensure_transaction_closed(); + + let patches = self + .observer + .as_mut() + .map_or_else(Vec::new, |o| o.take_patches()); let result = Array::new(); for p in patches { let patch = Object::new(); match p { - am::Patch::Assign(am::AssignPatch { + Patch::Put { obj, key, value, conflict, - }) => { - js_set(&patch, "action", "assign")?; + } => { + js_set(&patch, "action", "put")?; js_set(&patch, "obj", obj.to_string())?; js_set(&patch, "key", key)?; match value { @@ -395,7 +442,7 @@ impl Automerge { js_set(&patch, "conflict", conflict)?; } - am::Patch::Insert(obj, index, value) => { + Patch::Insert(obj, index, value) => { js_set(&patch, "action", "insert")?; js_set(&patch, "obj", obj.to_string())?; js_set(&patch, "key", index as f64)?; @@ -411,7 +458,7 @@ impl Automerge { }; } - am::Patch::Delete(obj, key) => { + Patch::Delete(obj, key) => { js_set(&patch, "action", "delete")?; js_set(&patch, "obj", obj.to_string())?; js_set(&patch, "key", key)?; @@ -422,50 +469,60 @@ impl Automerge { Ok(result) } - pub fn length(&mut self, obj: JsValue, heads: Option) -> Result { + pub fn length(&self, obj: JsValue, heads: Option) -> Result { let obj = self.import(obj)?; if let Some(heads) = get_heads(heads) { - Ok(self.0.length_at(&obj, &heads) as f64) + Ok(self.doc.length_at(&obj, &heads) as f64) } else { - Ok(self.0.length(&obj) as f64) + Ok(self.doc.length(&obj) as f64) } } pub fn delete(&mut self, obj: JsValue, prop: JsValue) -> Result<(), JsValue> { let obj = self.import(obj)?; let prop = to_prop(prop)?; - self.0.delete(&obj, prop).map_err(to_js_err)?; + self.doc.delete(&obj, prop).map_err(to_js_err)?; Ok(()) } pub fn save(&mut self) -> Uint8Array { - Uint8Array::from(self.0.save().as_slice()) + self.ensure_transaction_closed(); + Uint8Array::from(self.doc.save().as_slice()) } #[wasm_bindgen(js_name = saveIncremental)] pub fn save_incremental(&mut self) -> Uint8Array { - let bytes = self.0.save_incremental(); + self.ensure_transaction_closed(); + let bytes = self.doc.save_incremental(); Uint8Array::from(bytes.as_slice()) } #[wasm_bindgen(js_name = loadIncremental)] pub fn load_incremental(&mut self, data: Uint8Array) -> Result { + self.ensure_transaction_closed(); let data = data.to_vec(); - let len = self.0.load_incremental(&data).map_err(to_js_err)?; + let len = self + .doc + .load_incremental(&data, self.observer.as_mut()) + .map_err(to_js_err)?; Ok(len as f64) } #[wasm_bindgen(js_name = applyChanges)] pub fn apply_changes(&mut self, changes: JsValue) -> Result<(), JsValue> { + self.ensure_transaction_closed(); let changes: Vec<_> = JS(changes).try_into()?; - self.0.apply_changes(changes).map_err(to_js_err)?; + self.doc + .apply_changes(changes, self.observer.as_mut()) + .map_err(to_js_err)?; Ok(()) } #[wasm_bindgen(js_name = getChanges)] pub fn get_changes(&mut self, have_deps: JsValue) -> Result { + self.ensure_transaction_closed(); let deps: Vec<_> = JS(have_deps).try_into()?; - let changes = self.0.get_changes(&deps); + let changes = self.doc.get_changes(&deps); let changes: Array = changes .iter() .map(|c| Uint8Array::from(c.raw_bytes())) @@ -475,8 +532,9 @@ impl Automerge { #[wasm_bindgen(js_name = getChangeByHash)] pub fn get_change_by_hash(&mut self, hash: JsValue) -> Result { + self.ensure_transaction_closed(); let hash = hash.into_serde().map_err(to_js_err)?; - let change = self.0.get_change_by_hash(&hash); + let change = self.doc.get_change_by_hash(&hash); if let Some(c) = change { Ok(Uint8Array::from(c.raw_bytes()).into()) } else { @@ -486,7 +544,8 @@ impl Automerge { #[wasm_bindgen(js_name = getChangesAdded)] pub fn get_changes_added(&mut self, other: &mut Automerge) -> Result { - let changes = self.0.get_changes_added(&mut other.0); + self.ensure_transaction_closed(); + let changes = self.doc.get_changes_added(&mut other.doc); let changes: Array = changes .iter() .map(|c| Uint8Array::from(c.raw_bytes())) @@ -496,7 +555,8 @@ impl Automerge { #[wasm_bindgen(js_name = getHeads)] pub fn get_heads(&mut self) -> Array { - let heads = self.0.get_heads(); + self.ensure_transaction_closed(); + let heads = self.doc.get_heads(); let heads: Array = heads .iter() .map(|h| JsValue::from_str(&hex::encode(&h.0))) @@ -505,28 +565,31 @@ impl Automerge { } #[wasm_bindgen(js_name = getActorId)] - pub fn get_actor_id(&mut self) -> String { - let actor = self.0.get_actor(); + pub fn get_actor_id(&self) -> String { + let actor = self.doc.get_actor(); actor.to_string() } #[wasm_bindgen(js_name = getLastLocalChange)] pub fn get_last_local_change(&mut self) -> Result { - if let Some(change) = self.0.get_last_local_change() { + self.ensure_transaction_closed(); + if let Some(change) = self.doc.get_last_local_change() { Ok(Uint8Array::from(change.raw_bytes())) } else { Err(to_js_err("no local changes")) } } - pub fn dump(&self) { - self.0.dump() + pub fn dump(&mut self) { + self.ensure_transaction_closed(); + self.doc.dump() } #[wasm_bindgen(js_name = getMissingDeps)] pub fn get_missing_deps(&mut self, heads: Option) -> Result { + self.ensure_transaction_closed(); let heads = get_heads(heads).unwrap_or_default(); - let deps = self.0.get_missing_deps(&heads); + let deps = self.doc.get_missing_deps(&heads); let deps: Array = deps .iter() .map(|h| JsValue::from_str(&hex::encode(&h.0))) @@ -540,17 +603,19 @@ impl Automerge { state: &mut SyncState, message: Uint8Array, ) -> Result<(), JsValue> { + self.ensure_transaction_closed(); let message = message.to_vec(); let message = am::sync::Message::decode(message.as_slice()).map_err(to_js_err)?; - self.0 - .receive_sync_message(&mut state.0, message) + self.doc + .receive_sync_message(&mut state.0, message, self.observer.as_mut()) .map_err(to_js_err)?; Ok(()) } #[wasm_bindgen(js_name = generateSyncMessage)] pub fn generate_sync_message(&mut self, state: &mut SyncState) -> Result { - if let Some(message) = self.0.generate_sync_message(&mut state.0) { + self.ensure_transaction_closed(); + if let Some(message) = self.doc.generate_sync_message(&mut state.0) { Ok(Uint8Array::from(message.encode().as_slice()).into()) } else { Ok(JsValue::null()) @@ -559,26 +624,26 @@ impl Automerge { #[wasm_bindgen(js_name = toJS)] pub fn to_js(&self) -> JsValue { - map_to_js(&self.0, &ROOT) + map_to_js(&self.doc, &ROOT) } pub fn materialize(&self, obj: JsValue, heads: Option) -> Result { let obj = self.import(obj).unwrap_or(ROOT); let heads = get_heads(heads); if let Some(heads) = heads { - match self.0.object_type(&obj) { - Some(am::ObjType::Map) => Ok(map_to_js_at(&self.0, &obj, heads.as_slice())), - Some(am::ObjType::List) => Ok(list_to_js_at(&self.0, &obj, heads.as_slice())), - Some(am::ObjType::Text) => Ok(self.0.text_at(&obj, heads.as_slice())?.into()), - Some(am::ObjType::Table) => Ok(map_to_js_at(&self.0, &obj, heads.as_slice())), + match self.doc.object_type(&obj) { + Some(am::ObjType::Map) => Ok(map_to_js_at(&self.doc, &obj, heads.as_slice())), + Some(am::ObjType::List) => Ok(list_to_js_at(&self.doc, &obj, heads.as_slice())), + Some(am::ObjType::Text) => Ok(self.doc.text_at(&obj, heads.as_slice())?.into()), + Some(am::ObjType::Table) => Ok(map_to_js_at(&self.doc, &obj, heads.as_slice())), None => Err(to_js_err(format!("invalid obj {}", obj))), } } else { - match self.0.object_type(&obj) { - Some(am::ObjType::Map) => Ok(map_to_js(&self.0, &obj)), - Some(am::ObjType::List) => Ok(list_to_js(&self.0, &obj)), - Some(am::ObjType::Text) => Ok(self.0.text(&obj)?.into()), - Some(am::ObjType::Table) => Ok(map_to_js(&self.0, &obj)), + match self.doc.object_type(&obj) { + Some(am::ObjType::Map) => Ok(map_to_js(&self.doc, &obj)), + Some(am::ObjType::List) => Ok(list_to_js(&self.doc, &obj)), + Some(am::ObjType::Text) => Ok(self.doc.text(&obj)?.into()), + Some(am::ObjType::Table) => Ok(map_to_js(&self.doc, &obj)), None => Err(to_js_err(format!("invalid obj {}", obj))), } } @@ -595,9 +660,9 @@ impl Automerge { break; } let val = if is_map { - self.0.get(obj, prop)? + self.doc.get(obj, prop)? } else { - self.0.get(obj, am::Prop::Seq(prop.parse().unwrap()))? + self.doc.get(obj, am::Prop::Seq(prop.parse().unwrap()))? }; match val { Some((am::Value::Object(am::ObjType::Map), id)) => { @@ -618,14 +683,14 @@ impl Automerge { } Ok(obj) } else { - Ok(self.0.import(&s)?) + Ok(self.doc.import(&s)?) } } else { Err(to_js_err("invalid objid")) } } - fn import_prop(&mut self, prop: JsValue) -> Result { + fn import_prop(&self, prop: JsValue) -> Result { if let Some(s) = prop.as_string() { Ok(s.into()) } else if let Some(n) = prop.as_f64() { @@ -635,11 +700,7 @@ impl Automerge { } } - fn import_scalar( - &mut self, - value: &JsValue, - datatype: &Option, - ) -> Option { + fn import_scalar(&self, value: &JsValue, datatype: &Option) -> Option { match datatype.as_deref() { Some("boolean") => value.as_bool().map(am::ScalarValue::Boolean), Some("int") => value.as_f64().map(|v| am::ScalarValue::Int(v as i64)), @@ -686,7 +747,7 @@ impl Automerge { } fn import_value( - &mut self, + &self, value: &JsValue, datatype: Option, ) -> Result<(Value<'static>, Vec<(Prop, JsValue)>), JsValue> { @@ -713,12 +774,16 @@ pub fn init(actor: Option) -> Result { #[wasm_bindgen(js_name = loadDoc)] pub fn load(data: Uint8Array, actor: Option) -> Result { let data = data.to_vec(); - let mut automerge = am::AutoCommit::load(&data).map_err(to_js_err)?; + let mut observer = None; + let mut automerge = am::AutoCommit::load(&data, observer.as_mut()).map_err(to_js_err)?; if let Some(s) = actor { let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec()); automerge.set_actor(actor); } - Ok(Automerge(automerge)) + Ok(Automerge { + doc: automerge, + observer, + }) } #[wasm_bindgen(js_name = encodeChange)] diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts index 1f824ee6..dd12a153 100644 --- a/automerge-wasm/test/test.ts +++ b/automerge-wasm/test/test.ts @@ -502,7 +502,7 @@ describe('Automerge', () => { doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.popPatches(), [ - {action: 'assign', obj: '_root', key: 'hello', value: 'world', datatype: 'str', conflict: false} + {action: 'put', obj: '_root', key: 'hello', value: 'world', datatype: 'str', conflict: false} ]) doc1.free() doc2.free() @@ -514,9 +514,9 @@ describe('Automerge', () => { doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.popPatches(), [ - {action: 'assign', obj: '_root', key: 'birds', value: '1@aaaa', datatype: 'map', conflict: false}, - {action: 'assign', obj: '1@aaaa', key: 'friday', value: '2@aaaa', datatype: 'map', conflict: false}, - {action: 'assign', obj: '2@aaaa', key: 'robins', value: 3, datatype: 'int', conflict: false} + {action: 'put', obj: '_root', key: 'birds', value: '1@aaaa', datatype: 'map', conflict: false}, + {action: 'put', obj: '1@aaaa', key: 'friday', value: '2@aaaa', datatype: 'map', conflict: false}, + {action: 'put', obj: '2@aaaa', key: 'robins', value: 3, datatype: 'int', conflict: false} ]) doc1.free() doc2.free() @@ -530,7 +530,7 @@ describe('Automerge', () => { doc1.delete('_root', 'favouriteBird') doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.popPatches(), [ - {action: 'assign', obj: '_root', key: 'favouriteBird', value: 'Robin', datatype: 'str', conflict: false}, + {action: 'put', obj: '_root', key: 'favouriteBird', value: 'Robin', datatype: 'str', conflict: false}, {action: 'delete', obj: '_root', key: 'favouriteBird'} ]) doc1.free() @@ -543,7 +543,7 @@ describe('Automerge', () => { doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.popPatches(), [ - {action: 'assign', obj: '_root', key: 'birds', value: '1@aaaa', datatype: 'list', conflict: false}, + {action: 'put', obj: '_root', key: 'birds', value: '1@aaaa', datatype: 'list', conflict: false}, {action: 'insert', obj: '1@aaaa', key: 0, value: 'Goldfinch', datatype: 'str'}, {action: 'insert', obj: '1@aaaa', key: 1, value: 'Chaffinch', datatype: 'str'} ]) @@ -560,8 +560,8 @@ describe('Automerge', () => { doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.popPatches(), [ {action: 'insert', obj: '1@aaaa', key: 0, value: '2@aaaa', datatype: 'map'}, - {action: 'assign', obj: '2@aaaa', key: 'species', value: 'Goldfinch', datatype: 'str', conflict: false}, - {action: 'assign', obj: '2@aaaa', key: 'count', value: 3, datatype: 'int', conflict: false} + {action: 'put', obj: '2@aaaa', key: 'species', value: 'Goldfinch', datatype: 'str', conflict: false}, + {action: 'put', obj: '2@aaaa', key: 'count', value: 3, datatype: 'int', conflict: false} ]) doc1.free() doc2.free() @@ -665,12 +665,12 @@ describe('Automerge', () => { assert.deepEqual(doc4.get('_root', 'bird'), ['str', 'Goldfinch']) assert.deepEqual(doc4.getAll('_root', 'bird'), [['str', 'Greenfinch', '1@aaaa'], ['str', 'Goldfinch', '1@bbbb']]) assert.deepEqual(doc3.popPatches(), [ - {action: 'assign', obj: '_root', key: 'bird', value: 'Greenfinch', datatype: 'str', conflict: false}, - {action: 'assign', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: true} + {action: 'put', obj: '_root', key: 'bird', value: 'Greenfinch', datatype: 'str', conflict: false}, + {action: 'put', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: true} ]) assert.deepEqual(doc4.popPatches(), [ - {action: 'assign', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: false}, - {action: 'assign', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: true} + {action: 'put', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: false}, + {action: 'put', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: true} ]) doc1.free(); doc2.free(); doc3.free(); doc4.free() }) @@ -700,16 +700,16 @@ describe('Automerge', () => { ['str', 'Greenfinch', '1@aaaa'], ['str', 'Chaffinch', '1@bbbb'], ['str', 'Goldfinch', '1@cccc'] ]) assert.deepEqual(doc1.popPatches(), [ - {action: 'assign', obj: '_root', key: 'bird', value: 'Chaffinch', datatype: 'str', conflict: true}, - {action: 'assign', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: true} + {action: 'put', obj: '_root', key: 'bird', value: 'Chaffinch', datatype: 'str', conflict: true}, + {action: 'put', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: true} ]) assert.deepEqual(doc2.popPatches(), [ - {action: 'assign', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: true}, - {action: 'assign', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: true} + {action: 'put', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: true}, + {action: 'put', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: true} ]) assert.deepEqual(doc3.popPatches(), [ - {action: 'assign', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: true}, - {action: 'assign', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: true} + {action: 'put', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: true}, + {action: 'put', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: true} ]) doc1.free(); doc2.free(); doc3.free() }) @@ -726,9 +726,9 @@ describe('Automerge', () => { doc3.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc3.getAll('_root', 'bird'), [['str', 'Goldfinch', '2@aaaa']]) assert.deepEqual(doc3.popPatches(), [ - {action: 'assign', obj: '_root', key: 'bird', value: 'Greenfinch', datatype: 'str', conflict: false}, - {action: 'assign', obj: '_root', key: 'bird', value: 'Chaffinch', datatype: 'str', conflict: true}, - {action: 'assign', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: false} + {action: 'put', obj: '_root', key: 'bird', value: 'Greenfinch', datatype: 'str', conflict: false}, + {action: 'put', obj: '_root', key: 'bird', value: 'Chaffinch', datatype: 'str', conflict: true}, + {action: 'put', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: false} ]) doc1.free(); doc2.free(); doc3.free() }) @@ -749,10 +749,10 @@ describe('Automerge', () => { assert.deepEqual(doc2.get('_root', 'bird'), ['str', 'Goldfinch']) assert.deepEqual(doc2.getAll('_root', 'bird'), [['str', 'Goldfinch', '2@aaaa']]) assert.deepEqual(doc1.popPatches(), [ - {action: 'assign', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: false} + {action: 'put', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: false} ]) assert.deepEqual(doc2.popPatches(), [ - {action: 'assign', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: false} + {action: 'put', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: false} ]) doc1.free(); doc2.free() }) @@ -776,12 +776,12 @@ describe('Automerge', () => { assert.deepEqual(doc4.get('1@aaaa', 0), ['str', 'Redwing']) assert.deepEqual(doc4.getAll('1@aaaa', 0), [['str', 'Song Thrush', '4@aaaa'], ['str', 'Redwing', '4@bbbb']]) assert.deepEqual(doc3.popPatches(), [ - {action: 'assign', obj: '1@aaaa', key: 0, value: 'Song Thrush', datatype: 'str', conflict: false}, - {action: 'assign', obj: '1@aaaa', key: 0, value: 'Redwing', datatype: 'str', conflict: true} + {action: 'put', obj: '1@aaaa', key: 0, value: 'Song Thrush', datatype: 'str', conflict: false}, + {action: 'put', obj: '1@aaaa', key: 0, value: 'Redwing', datatype: 'str', conflict: true} ]) assert.deepEqual(doc4.popPatches(), [ - {action: 'assign', obj: '1@aaaa', key: 0, value: 'Redwing', datatype: 'str', conflict: false}, - {action: 'assign', obj: '1@aaaa', key: 0, value: 'Redwing', datatype: 'str', conflict: true} + {action: 'put', obj: '1@aaaa', key: 0, value: 'Redwing', datatype: 'str', conflict: false}, + {action: 'put', obj: '1@aaaa', key: 0, value: 'Redwing', datatype: 'str', conflict: true} ]) doc1.free(); doc2.free(); doc3.free(); doc4.free() }) @@ -808,15 +808,15 @@ describe('Automerge', () => { assert.deepEqual(doc4.getAll('1@aaaa', 2), [['str', 'Song Thrush', '6@aaaa'], ['str', 'Redwing', '6@bbbb']]) assert.deepEqual(doc3.popPatches(), [ {action: 'delete', obj: '1@aaaa', key: 0}, - {action: 'assign', obj: '1@aaaa', key: 1, value: 'Song Thrush', datatype: 'str', conflict: false}, + {action: 'put', obj: '1@aaaa', key: 1, value: 'Song Thrush', datatype: 'str', conflict: false}, {action: 'insert', obj: '1@aaaa', key: 0, value: 'Ring-necked parakeet', datatype: 'str'}, - {action: 'assign', obj: '1@aaaa', key: 2, value: 'Redwing', datatype: 'str', conflict: true} + {action: 'put', obj: '1@aaaa', key: 2, value: 'Redwing', datatype: 'str', conflict: true} ]) assert.deepEqual(doc4.popPatches(), [ - {action: 'assign', obj: '1@aaaa', key: 0, value: 'Ring-necked parakeet', datatype: 'str', conflict: false}, - {action: 'assign', obj: '1@aaaa', key: 2, value: 'Redwing', datatype: 'str', conflict: false}, - {action: 'assign', obj: '1@aaaa', key: 0, value: 'Ring-necked parakeet', datatype: 'str', conflict: false}, - {action: 'assign', obj: '1@aaaa', key: 2, value: 'Redwing', datatype: 'str', conflict: true} + {action: 'put', obj: '1@aaaa', key: 0, value: 'Ring-necked parakeet', datatype: 'str', conflict: false}, + {action: 'put', obj: '1@aaaa', key: 2, value: 'Redwing', datatype: 'str', conflict: false}, + {action: 'put', obj: '1@aaaa', key: 0, value: 'Ring-necked parakeet', datatype: 'str', conflict: false}, + {action: 'put', obj: '1@aaaa', key: 2, value: 'Redwing', datatype: 'str', conflict: true} ]) doc1.free(); doc2.free(); doc3.free(); doc4.free() }) @@ -833,14 +833,14 @@ describe('Automerge', () => { doc3.loadIncremental(change2) assert.deepEqual(doc3.getAll('_root', 'bird'), [['str', 'Robin', '1@aaaa'], ['str', 'Wren', '1@bbbb']]) assert.deepEqual(doc3.popPatches(), [ - {action: 'assign', obj: '_root', key: 'bird', value: 'Robin', datatype: 'str', conflict: false}, - {action: 'assign', obj: '_root', key: 'bird', value: 'Wren', datatype: 'str', conflict: true} + {action: 'put', obj: '_root', key: 'bird', value: 'Robin', datatype: 'str', conflict: false}, + {action: 'put', obj: '_root', key: 'bird', value: 'Wren', datatype: 'str', conflict: true} ]) doc3.loadIncremental(change3) assert.deepEqual(doc3.get('_root', 'bird'), ['str', 'Robin']) assert.deepEqual(doc3.getAll('_root', 'bird'), [['str', 'Robin', '1@aaaa']]) assert.deepEqual(doc3.popPatches(), [ - {action: 'assign', obj: '_root', key: 'bird', value: 'Robin', datatype: 'str', conflict: false} + {action: 'put', obj: '_root', key: 'bird', value: 'Robin', datatype: 'str', conflict: false} ]) doc1.free(); doc2.free(); doc3.free() }) @@ -856,12 +856,12 @@ describe('Automerge', () => { doc2.loadIncremental(change1) assert.deepEqual(doc1.getAll('_root', 'birds'), [['list', '1@aaaa'], ['map', '1@bbbb']]) assert.deepEqual(doc1.popPatches(), [ - {action: 'assign', obj: '_root', key: 'birds', value: '1@bbbb', datatype: 'map', conflict: true}, - {action: 'assign', obj: '1@bbbb', key: 'Sparrowhawk', value: 1, datatype: 'int', conflict: false} + {action: 'put', obj: '_root', key: 'birds', value: '1@bbbb', datatype: 'map', conflict: true}, + {action: 'put', obj: '1@bbbb', key: 'Sparrowhawk', value: 1, datatype: 'int', conflict: false} ]) assert.deepEqual(doc2.getAll('_root', 'birds'), [['list', '1@aaaa'], ['map', '1@bbbb']]) assert.deepEqual(doc2.popPatches(), [ - {action: 'assign', obj: '_root', key: 'birds', value: '1@bbbb', datatype: 'map', conflict: true}, + {action: 'put', obj: '_root', key: 'birds', value: '1@bbbb', datatype: 'map', conflict: true}, {action: 'insert', obj: '1@aaaa', key: 0, value: 'Parakeet', datatype: 'str'} ]) doc1.free(); doc2.free() @@ -875,11 +875,117 @@ describe('Automerge', () => { doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.get('_root', 'createdAt'), ['timestamp', now]) assert.deepEqual(doc2.popPatches(), [ - {action: 'assign', obj: '_root', key: 'createdAt', value: now, datatype: 'timestamp', conflict: false} + {action: 'put', obj: '_root', key: 'createdAt', value: now, datatype: 'timestamp', conflict: false} ]) doc1.free(); doc2.free() }) + it('should capture local put ops', () => { + let doc1 = create('aaaa') + doc1.enablePatches(true) + doc1.put('_root', 'key1', 1) + doc1.put('_root', 'key1', 2) + doc1.put('_root', 'key2', 3) + const map = doc1.putObject('_root', 'map', {}) + const list = doc1.putObject('_root', 'list', []) + + assert.deepEqual(doc1.popPatches(), [ + {action: 'put', obj: '_root', key: 'key1', value: 1, datatype: 'int', conflict: false}, + {action: 'put', obj: '_root', key: 'key1', value: 2, datatype: 'int', conflict: false}, + {action: 'put', obj: '_root', key: 'key2', value: 3, datatype: 'int', conflict: false}, + {action: 'put', obj: '_root', key: 'map', value: map, datatype: 'map', conflict: false}, + {action: 'put', obj: '_root', key: 'list', value: list, datatype: 'list', conflict: false}, + ]) + doc1.free() + }) + + it('should capture local insert ops', () => { + let doc1 = create('aaaa') + doc1.enablePatches(true) + const list = doc1.putObject('_root', 'list', []) + doc1.insert(list, 0, 1) + doc1.insert(list, 0, 2) + doc1.insert(list, 2, 3) + const map = doc1.insertObject(list, 2, {}) + const list2 = doc1.insertObject(list, 2, []) + + assert.deepEqual(doc1.popPatches(), [ + {action: 'put', obj: '_root', key: 'list', value: list, datatype: 'list', conflict: false}, + {action: 'insert', obj: list, key: 0, value: 1, datatype: 'int'}, + {action: 'insert', obj: list, key: 0, value: 2, datatype: 'int'}, + {action: 'insert', obj: list, key: 2, value: 3, datatype: 'int'}, + {action: 'insert', obj: list, key: 2, value: map, datatype: 'map'}, + {action: 'insert', obj: list, key: 2, value: list2, datatype: 'list'}, + ]) + doc1.free() + }) + + it('should capture local push ops', () => { + let doc1 = create('aaaa') + doc1.enablePatches(true) + const list = doc1.putObject('_root', 'list', []) + doc1.push(list, 1) + const map = doc1.pushObject(list, {}) + const list2 = doc1.pushObject(list, []) + + assert.deepEqual(doc1.popPatches(), [ + {action: 'put', obj: '_root', key: 'list', value: list, datatype: 'list', conflict: false}, + {action: 'insert', obj: list, key: 0, value: 1, datatype: 'int'}, + {action: 'insert', obj: list, key: 1, value: map, datatype: 'map'}, + {action: 'insert', obj: list, key: 2, value: list2, datatype: 'list'}, + ]) + doc1.free() + }) + + it('should capture local splice ops', () => { + let doc1 = create('aaaa') + doc1.enablePatches(true) + const list = doc1.putObject('_root', 'list', []) + doc1.splice(list, 0, 0, [1,2,3,4]) + doc1.splice(list, 1, 2) + + assert.deepEqual(doc1.popPatches(), [ + {action: 'put', obj: '_root', key: 'list', value: list, datatype: 'list', conflict: false}, + {action: 'insert', obj: list, key: 0, value: 1, datatype: 'int'}, + {action: 'insert', obj: list, key: 1, value: 2, datatype: 'int'}, + {action: 'insert', obj: list, key: 2, value: 3, datatype: 'int'}, + {action: 'insert', obj: list, key: 3, value: 4, datatype: 'int'}, + {action: 'delete', obj: list, key: 1}, + {action: 'delete', obj: list, key: 1}, + ]) + doc1.free() + }) + + it.skip('should capture local increment ops', () => { + let doc1 = create('aaaa') + doc1.enablePatches(true) + doc1.put('_root', 'counter', 2, 'counter') + doc1.increment('_root', 'counter', 4) + + assert.deepEqual(doc1.popPatches(), [ + {action: 'put', obj: '_root', key: 'counter', value: 2, datatype: 'counter', conflict: false}, + {action: 'put', obj: '_root', key: 'counter', value: 6, datatype: 'counter', conflict: false}, + ]) + doc1.free() + }) + + + it('should capture local delete ops', () => { + let doc1 = create('aaaa') + doc1.enablePatches(true) + doc1.put('_root', 'key1', 1) + doc1.put('_root', 'key2', 2) + doc1.delete('_root', 'key1') + doc1.delete('_root', 'key2') + assert.deepEqual(doc1.popPatches(), [ + {action: 'put', obj: '_root', key: 'key1', value: 1, datatype: 'int', conflict: false}, + {action: 'put', obj: '_root', key: 'key2', value: 2, datatype: 'int', conflict: false}, + {action: 'delete', obj: '_root', key: 'key1'}, + {action: 'delete', obj: '_root', key: 'key2'}, + ]) + doc1.free() + }) + it.skip('should support counters in a map', () => { let doc1 = create('aaaa'), doc2 = create('bbbb') doc2.enablePatches(true) @@ -890,8 +996,8 @@ describe('Automerge', () => { doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.get('_root', 'starlings'), ['counter', 3]) assert.deepEqual(doc2.popPatches(), [ - {action: 'assign', obj: '_root', key: 'starlings', value: 2, datatype: 'counter', conflict: false}, - {action: 'assign', obj: '_root', key: 'starlings', value: 3, datatype: 'counter', conflict: false} + {action: 'put', obj: '_root', key: 'starlings', value: 2, datatype: 'counter', conflict: false}, + {action: 'put', obj: '_root', key: 'starlings', value: 3, datatype: 'counter', conflict: false} ]) doc1.free(); doc2.free() }) diff --git a/automerge/examples/quickstart.rs b/automerge/examples/quickstart.rs index 1b3a1a16..2afcd211 100644 --- a/automerge/examples/quickstart.rs +++ b/automerge/examples/quickstart.rs @@ -2,13 +2,14 @@ use automerge::transaction::CommitOptions; use automerge::transaction::Transactable; use automerge::AutomergeError; use automerge::ObjType; +use automerge::NULL_OBSERVER; use automerge::{Automerge, ROOT}; // Based on https://automerge.github.io/docs/quickstart fn main() { let mut doc1 = Automerge::new(); let (cards, card1) = doc1 - .transact_with::<_, _, AutomergeError, _>( + .transact_with::<_, _, AutomergeError, _, ()>( |_| CommitOptions::default().with_message("Add card".to_owned()), |tx| { let cards = tx.put_object(ROOT, "cards", ObjType::List).unwrap(); @@ -25,12 +26,12 @@ fn main() { .result; let mut doc2 = Automerge::new(); - doc2.merge(&mut doc1).unwrap(); + doc2.merge(&mut doc1, NULL_OBSERVER).unwrap(); let binary = doc1.save(); - let mut doc2 = Automerge::load(&binary).unwrap(); + let mut doc2 = Automerge::load(&binary, NULL_OBSERVER).unwrap(); - doc1.transact_with::<_, _, AutomergeError, _>( + doc1.transact_with::<_, _, AutomergeError, _, ()>( |_| CommitOptions::default().with_message("Mark card as done".to_owned()), |tx| { tx.put(&card1, "done", true)?; @@ -39,7 +40,7 @@ fn main() { ) .unwrap(); - doc2.transact_with::<_, _, AutomergeError, _>( + doc2.transact_with::<_, _, AutomergeError, _, ()>( |_| CommitOptions::default().with_message("Delete card".to_owned()), |tx| { tx.delete(&cards, 0)?; @@ -48,7 +49,7 @@ fn main() { ) .unwrap(); - doc1.merge(&mut doc2).unwrap(); + doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); for change in doc1.get_changes(&[]) { let length = doc1.length_at(&cards, &[change.hash]); diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index bc4add59..44a4126f 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -1,6 +1,7 @@ use std::ops::RangeBounds; use crate::exid::ExId; +use crate::op_observer::OpObserver; use crate::transaction::{CommitOptions, Transactable}; use crate::types::Patch; use crate::{sync, Keys, KeysAt, ObjType, Parents, Range, RangeAt, ScalarValue, Values, ValuesAt}; @@ -53,14 +54,6 @@ impl AutoCommit { self.doc.get_actor() } - pub fn enable_patches(&mut self, enable: bool) { - self.doc.enable_patches(enable) - } - - pub fn pop_patches(&mut self) -> Vec { - self.doc.pop_patches() - } - fn ensure_transaction_open(&mut self) { if self.transaction.is_none() { self.transaction = Some(self.doc.transaction_inner()); @@ -85,33 +78,48 @@ impl AutoCommit { fn ensure_transaction_closed(&mut self) { if let Some(tx) = self.transaction.take() { - tx.commit(&mut self.doc, None, None); + tx.commit::<()>(&mut self.doc, None, None, None); } } - pub fn load(data: &[u8]) -> Result { - let doc = Automerge::load(data)?; + pub fn load( + data: &[u8], + op_observer: Option<&mut Obs>, + ) -> Result { + let doc = Automerge::load(data, op_observer)?; Ok(Self { doc, transaction: None, }) } - pub fn load_incremental(&mut self, data: &[u8]) -> Result { + pub fn load_incremental( + &mut self, + data: &[u8], + op_observer: Option<&mut Obs>, + ) -> Result { self.ensure_transaction_closed(); - self.doc.load_incremental(data) + self.doc.load_incremental(data, op_observer) } - pub fn apply_changes(&mut self, changes: Vec) -> Result<(), AutomergeError> { + pub fn apply_changes( + &mut self, + changes: Vec, + op_observer: Option<&mut Obs>, + ) -> Result<(), AutomergeError> { self.ensure_transaction_closed(); - self.doc.apply_changes(changes) + self.doc.apply_changes(changes, op_observer) } /// Takes all the changes in `other` which are not in `self` and applies them - pub fn merge(&mut self, other: &mut Self) -> Result, AutomergeError> { + pub fn merge( + &mut self, + other: &mut Self, + op_observer: Option<&mut Obs>, + ) -> Result, AutomergeError> { self.ensure_transaction_closed(); other.ensure_transaction_closed(); - self.doc.merge(&mut other.doc) + self.doc.merge(&mut other.doc, op_observer) } pub fn save(&mut self) -> Vec { @@ -155,7 +163,8 @@ impl AutoCommit { self.doc.import(s) } - pub fn dump(&self) { + pub fn dump(&mut self) { + self.ensure_transaction_closed(); self.doc.dump() } @@ -164,13 +173,15 @@ impl AutoCommit { self.doc.generate_sync_message(sync_state) } - pub fn receive_sync_message( + pub fn receive_sync_message( &mut self, sync_state: &mut sync::State, message: sync::Message, + op_observer: Option<&mut Obs>, ) -> Result<(), AutomergeError> { self.ensure_transaction_closed(); - self.doc.receive_sync_message(sync_state, message) + self.doc + .receive_sync_message(sync_state, message, op_observer) } #[cfg(feature = "optree-visualisation")] @@ -187,7 +198,7 @@ impl AutoCommit { } pub fn commit(&mut self) -> ChangeHash { - self.commit_with(CommitOptions::default()) + self.commit_with::<()>(CommitOptions::default()) } /// Commit the current operations with some options. @@ -203,13 +214,18 @@ impl AutoCommit { /// doc.put_object(&ROOT, "todos", ObjType::List).unwrap(); /// let now = SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap().as_secs() as /// i64; - /// doc.commit_with(CommitOptions::default().with_message("Create todos list").with_time(now)); + /// doc.commit_with::<()>(CommitOptions::default().with_message("Create todos list").with_time(now)); /// ``` - pub fn commit_with(&mut self, options: CommitOptions) -> ChangeHash { + pub fn commit_with(&mut self, options: CommitOptions) -> ChangeHash { // ensure that even no changes triggers a change self.ensure_transaction_open(); let tx = self.transaction.take().unwrap(); - tx.commit(&mut self.doc, options.message, options.time) + tx.commit( + &mut self.doc, + options.message, + options.time, + options.op_observer, + ) } pub fn rollback(&mut self) -> usize { @@ -325,15 +341,15 @@ impl Transactable for AutoCommit { tx.insert(&mut self.doc, obj.as_ref(), index, value) } - fn insert_object( + fn insert_object>( &mut self, - obj: &ExId, + obj: O, index: usize, value: ObjType, ) -> Result { self.ensure_transaction_open(); let tx = self.transaction.as_mut().unwrap(); - tx.insert_object(&mut self.doc, obj, index, value) + tx.insert_object(&mut self.doc, obj.as_ref(), index, value) } fn increment, P: Into>( diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 7e856313..2ec6465a 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -1,23 +1,24 @@ use std::collections::{HashMap, HashSet, VecDeque}; +use std::fmt::Debug; use std::num::NonZeroU64; use std::ops::RangeBounds; use crate::change::encode_document; use crate::exid::ExId; use crate::keys::Keys; +use crate::op_observer::OpObserver; use crate::op_set::OpSet; use crate::parents::Parents; use crate::range::Range; use crate::transaction::{self, CommitOptions, Failure, Success, Transaction, TransactionInner}; use crate::types::{ - ActorId, AssignPatch, ChangeHash, Clock, ElemId, Export, Exportable, Key, ObjId, Op, OpId, - OpType, Patch, ScalarValue, Value, + ActorId, ChangeHash, Clock, ElemId, Export, Exportable, Key, ObjId, Op, OpId, OpType, + ScalarValue, Value, }; -use crate::{legacy, query, types, ObjType, RangeAt, ValuesAt}; +use crate::{legacy, query, types, ObjType, RangeAt, ValuesAt, NULL_OBSERVER}; use crate::{AutomergeError, Change, Prop}; use crate::{KeysAt, Values}; use serde::Serialize; -use std::cmp::Ordering; #[derive(Debug, Clone, PartialEq)] pub(crate) enum Actor { @@ -46,7 +47,6 @@ pub struct Automerge { pub(crate) actor: Actor, /// The maximum operation counter this document has seen. pub(crate) max_op: u64, - pub(crate) patches: Option>, } impl Automerge { @@ -62,7 +62,6 @@ impl Automerge { saved: Default::default(), actor: Actor::Unused(ActorId::random()), max_op: 0, - patches: None, } } @@ -101,23 +100,6 @@ impl Automerge { } } - pub fn enable_patches(&mut self, enable: bool) { - match (enable, &self.patches) { - (true, None) => self.patches = Some(vec![]), - (false, Some(_)) => self.patches = None, - _ => (), - } - } - - pub fn pop_patches(&mut self) -> Vec { - if let Some(patches) = self.patches.take() { - self.patches = Some(Vec::new()); - patches - } else { - Vec::new() - } - } - /// Start a transaction. pub fn transaction(&mut self) -> Transaction { Transaction { @@ -172,20 +154,19 @@ impl Automerge { } /// Like [`Self::transact`] but with a function for generating the commit options. - pub fn transact_with(&mut self, c: C, f: F) -> transaction::Result + pub fn transact_with(&mut self, c: C, f: F) -> transaction::Result where F: FnOnce(&mut Transaction) -> Result, - C: FnOnce(&O) -> CommitOptions, + C: FnOnce(&O) -> CommitOptions, + Obs: OpObserver, { let mut tx = self.transaction(); let result = f(&mut tx); match result { Ok(result) => { let commit_options = c(&result); - Ok(Success { - result, - hash: tx.commit_with(commit_options), - }) + let hash = tx.commit_with(commit_options); + Ok(Success { result, hash }) } Err(error) => Err(Failure { error, @@ -222,94 +203,10 @@ impl Automerge { } let mut f = Self::new(); f.set_actor(ActorId::random()); - f.apply_changes(changes.into_iter().rev().cloned())?; + f.apply_changes(changes.into_iter().rev().cloned(), NULL_OBSERVER)?; Ok(f) } - fn insert_op(&mut self, obj: &ObjId, op: Op) { - let q = self.ops.search(obj, query::SeekOp::new(&op)); - - let succ = q.succ; - let pos = q.pos; - for i in succ { - self.ops.replace(obj, i, |old_op| old_op.add_succ(&op)); - } - - if !op.is_delete() { - self.ops.insert(pos, obj, op); - } - } - - fn insert_op_with_patch(&mut self, obj: &ObjId, op: Op) { - let q = self.ops.search(obj, query::SeekOpWithPatch::new(&op)); - - let query::SeekOpWithPatch { - pos, - succ, - seen, - values, - had_value_before, - .. - } = q; - - let ex_obj = self.id_to_exid(obj.0); - let key = match op.key { - Key::Map(index) => self.ops.m.props[index].clone().into(), - Key::Seq(_) => seen.into(), - }; - - let patch = if op.insert { - let value = (op.clone_value(), self.id_to_exid(op.id)); - Patch::Insert(ex_obj, seen, value) - } else if op.is_delete() { - if let Some(winner) = &values.last() { - let value = (winner.clone_value(), self.id_to_exid(winner.id)); - let conflict = values.len() > 1; - Patch::Assign(AssignPatch { - obj: ex_obj, - key, - value, - conflict, - }) - } else { - Patch::Delete(ex_obj, key) - } - } else { - let winner = if let Some(last_value) = values.last() { - if self.ops.m.lamport_cmp(op.id, last_value.id) == Ordering::Greater { - &op - } else { - last_value - } - } else { - &op - }; - let value = (winner.clone_value(), self.id_to_exid(winner.id)); - if op.is_list_op() && !had_value_before { - Patch::Insert(ex_obj, seen, value) - } else { - Patch::Assign(AssignPatch { - obj: ex_obj, - key, - value, - conflict: !values.is_empty(), - }) - } - }; - - if let Some(patches) = &mut self.patches { - patches.push(patch); - } - - for i in succ { - self.ops.replace(obj, i, |old_op| old_op.add_succ(&op)); - } - - if !op.is_delete() { - self.ops.insert(pos, obj, op); - } - } - // KeysAt::() // LenAt::() // PropAt::() @@ -491,11 +388,7 @@ impl Automerge { } pub(crate) fn id_to_exid(&self, id: OpId) -> ExId { - if id == types::ROOT { - ExId::Root - } else { - ExId::Id(id.0, self.ops.m.actors.cache[id.1].clone(), id.1) - } + self.ops.id_to_exid(id) } /// Get the string represented by the given text object. @@ -629,18 +522,25 @@ impl Automerge { } /// Load a document. - pub fn load(data: &[u8]) -> Result { + pub fn load( + data: &[u8], + op_observer: Option<&mut Obs>, + ) -> Result { let changes = Change::load_document(data)?; let mut doc = Self::new(); - doc.apply_changes(changes)?; + doc.apply_changes(changes, op_observer)?; Ok(doc) } /// Load an incremental save of a document. - pub fn load_incremental(&mut self, data: &[u8]) -> Result { + pub fn load_incremental( + &mut self, + data: &[u8], + op_observer: Option<&mut Obs>, + ) -> Result { let changes = Change::load_document(data)?; let start = self.ops.len(); - self.apply_changes(changes)?; + self.apply_changes(changes, op_observer)?; let delta = self.ops.len() - start; Ok(delta) } @@ -656,9 +556,10 @@ impl Automerge { } /// Apply changes to this document. - pub fn apply_changes( + pub fn apply_changes( &mut self, changes: impl IntoIterator, + mut observer: Option<&mut Obs>, ) -> Result<(), AutomergeError> { for c in changes { if !self.history_index.contains_key(&c.hash) { @@ -669,7 +570,7 @@ impl Automerge { )); } if self.is_causally_ready(&c) { - self.apply_change(c); + self.apply_change(c, &mut observer); } else { self.queue.push(c); } @@ -677,22 +578,22 @@ impl Automerge { } while let Some(c) = self.pop_next_causally_ready_change() { if !self.history_index.contains_key(&c.hash) { - self.apply_change(c); + self.apply_change(c, &mut observer); } } Ok(()) } - fn apply_change(&mut self, change: Change) { + fn apply_change(&mut self, change: Change, observer: &mut Option<&mut Obs>) { let ops = self.import_ops(&change); self.update_history(change, ops.len()); - if self.patches.is_some() { + if let Some(observer) = observer { for (obj, op) in ops { - self.insert_op_with_patch(&obj, op); + self.ops.insert_op_with_observer(&obj, op, *observer); } } else { for (obj, op) in ops { - self.insert_op(&obj, op); + self.ops.insert_op(&obj, op); } } } @@ -754,14 +655,18 @@ impl Automerge { } /// Takes all the changes in `other` which are not in `self` and applies them - pub fn merge(&mut self, other: &mut Self) -> Result, AutomergeError> { + pub fn merge( + &mut self, + other: &mut Self, + op_observer: Option<&mut Obs>, + ) -> Result, AutomergeError> { // TODO: Make this fallible and figure out how to do this transactionally let changes = self .get_changes_added(other) .into_iter() .cloned() .collect::>(); - self.apply_changes(changes)?; + self.apply_changes(changes, op_observer)?; Ok(self.get_heads()) } @@ -1264,8 +1169,8 @@ mod tests { assert!(save_b.len() < save_a.len()); - let mut doc_a = Automerge::load(&save_a)?; - let mut doc_b = Automerge::load(&save_b)?; + let mut doc_a = Automerge::load(&save_a, NULL_OBSERVER)?; + let mut doc_b = Automerge::load(&save_b, NULL_OBSERVER)?; assert!(doc_a.get_all(ROOT, "baz")? == doc_b.get_all(ROOT, "baz")?); @@ -1739,13 +1644,13 @@ mod tests { let mut doc = Automerge::new(); let mut tx = doc.transaction(); // deleting a missing key in a map should just be a noop - assert!(tx.delete(ROOT, "a").is_ok()); + assert!(tx.delete(ROOT, "a",).is_ok()); tx.commit(); let last_change = doc.get_last_local_change().unwrap(); assert_eq!(last_change.len(), 0); let bytes = doc.save(); - assert!(Automerge::load(&bytes).is_ok()); + assert!(Automerge::load(&bytes, NULL_OBSERVER).is_ok()); let mut tx = doc.transaction(); tx.put(ROOT, "a", 1).unwrap(); @@ -1768,7 +1673,7 @@ mod tests { let mut doc = Automerge::new(); let mut tx = doc.transaction(); // deleting an element in a list that does not exist is an error - assert!(tx.delete(ROOT, 0).is_err()); + assert!(tx.delete(ROOT, 0,).is_err()); } #[test] @@ -1779,7 +1684,7 @@ mod tests { tx.commit(); let hash = doc.get_last_local_change().unwrap().hash; let bytes = doc.save(); - let doc = Automerge::load(&bytes).unwrap(); + let doc = Automerge::load(&bytes, NULL_OBSERVER).unwrap(); assert_eq!(doc.get_change_by_hash(&hash).unwrap().hash, hash); } @@ -1794,7 +1699,7 @@ mod tests { 157, 157, 157, 157, 157, 157, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 48, 254, 208, ]; - let _ = Automerge::load(bytes); + let _ = Automerge::load(bytes, NULL_OBSERVER); } #[test] @@ -1872,7 +1777,7 @@ mod tests { tx.commit(); let bytes = doc.save(); println!("doc2 time"); - let mut doc2 = Automerge::load(&bytes).unwrap(); + let mut doc2 = Automerge::load(&bytes, NULL_OBSERVER).unwrap(); let bytes2 = doc2.save(); assert_eq!(doc.text(&list).unwrap(), doc2.text(&list).unwrap()); @@ -1925,7 +1830,7 @@ mod tests { tx.commit(); let bytes = doc.save(); println!("doc2 time"); - let mut doc2 = Automerge::load(&bytes).unwrap(); + let mut doc2 = Automerge::load(&bytes, NULL_OBSERVER).unwrap(); let bytes2 = doc2.save(); assert_eq!(doc.text(&list).unwrap(), doc2.text(&list).unwrap()); @@ -1950,7 +1855,8 @@ mod tests { let mut doc2 = AutoCommit::new().with_actor(actor2.clone()); let list = doc1.put_object(ROOT, "list", ObjType::List).unwrap(); doc1.insert(&list, 0, 0).unwrap(); - doc2.load_incremental(&doc1.save_incremental()).unwrap(); + doc2.load_incremental(&doc1.save_incremental(), NULL_OBSERVER) + .unwrap(); for i in 1..=max { doc1.put(&list, 0, i).unwrap() } @@ -1959,8 +1865,8 @@ mod tests { } let change1 = doc1.save_incremental(); let change2 = doc2.save_incremental(); - doc2.load_incremental(&change1).unwrap(); - doc1.load_incremental(&change2).unwrap(); + doc2.load_incremental(&change1, NULL_OBSERVER).unwrap(); + doc1.load_incremental(&change2, NULL_OBSERVER).unwrap(); assert_eq!(doc1.length(&list), 1); assert_eq!(doc2.length(&list), 1); assert_eq!( diff --git a/automerge/src/change.rs b/automerge/src/change.rs index c33aba2c..2e420928 100644 --- a/automerge/src/change.rs +++ b/automerge/src/change.rs @@ -506,7 +506,7 @@ pub(crate) fn export_change( operations: change .operations .iter() - .map(|(obj, op)| export_op(op, obj, actors, props)) + .map(|(obj, _, op)| export_op(op, obj, actors, props)) .collect(), extra_bytes: change.extra_bytes, } diff --git a/automerge/src/lib.rs b/automerge/src/lib.rs index 4b2e5c8b..4879f95a 100644 --- a/automerge/src/lib.rs +++ b/automerge/src/lib.rs @@ -37,6 +37,7 @@ mod indexed_cache; mod keys; mod keys_at; mod legacy; +mod op_observer; mod op_set; mod op_tree; mod parents; @@ -63,10 +64,14 @@ pub use exid::ExId as ObjId; pub use keys::Keys; pub use keys_at::KeysAt; pub use legacy::Change as ExpandedChange; +pub use op_observer::OpObserver; +pub use op_observer::Patch; +pub use op_observer::VecOpObserver; +pub use op_observer::NULL_OBSERVER; pub use parents::Parents; pub use range::Range; pub use range_at::RangeAt; -pub use types::{ActorId, AssignPatch, ChangeHash, ObjType, OpType, Patch, Prop}; +pub use types::{ActorId, ChangeHash, ObjType, OpType, Prop}; pub use value::{ScalarValue, Value}; pub use values::Values; pub use values_at::ValuesAt; diff --git a/automerge/src/op_observer.rs b/automerge/src/op_observer.rs new file mode 100644 index 00000000..9aa1b8b4 --- /dev/null +++ b/automerge/src/op_observer.rs @@ -0,0 +1,87 @@ +use crate::exid::ExId; +use crate::Prop; +use crate::Value; + +/// An observer of operations applied to the document. +pub trait OpObserver { + /// A new value has been inserted into the given object. + /// + /// - `objid`: the object that has been inserted into. + /// - `index`: the index the new value has been inserted at. + /// - `tagged_value`: the value that has been inserted and the id of the operation that did the + /// insert. + fn insert(&mut self, objid: ExId, index: usize, tagged_value: (Value, ExId)); + + /// A new value has been put into the given object. + /// + /// - `objid`: the object that has been put into. + /// - `key`: the key that the value as been put at. + /// - `tagged_value`: the value that has been put into the object and the id of the operation + /// that did the put. + /// - `conflict`: whether this put conflicts with other operations. + fn put(&mut self, objid: ExId, key: Prop, tagged_value: (Value, ExId), conflict: bool); + + /// A value has beeen deleted. + /// + /// - `objid`: the object that has been deleted in. + /// - `key`: the key of the value that has been deleted. + fn delete(&mut self, objid: ExId, key: Prop); +} + +impl OpObserver for () { + fn insert(&mut self, _objid: ExId, _index: usize, _tagged_value: (Value, ExId)) {} + + fn put(&mut self, _objid: ExId, _key: Prop, _tagged_value: (Value, ExId), _conflict: bool) {} + + fn delete(&mut self, _objid: ExId, _key: Prop) {} +} + +pub const NULL_OBSERVER: Option<&mut ()> = None; + +/// Capture operations and store them as patches. +#[derive(Default, Debug, Clone)] +pub struct VecOpObserver { + patches: Vec, +} + +impl VecOpObserver { + pub fn take_patches(&mut self) -> Vec { + std::mem::take(&mut self.patches) + } +} + +impl OpObserver for VecOpObserver { + fn insert(&mut self, obj_id: ExId, index: usize, (value, id): (Value, ExId)) { + self.patches + .push(Patch::Insert(obj_id, index, (value.into_owned(), id))); + } + + fn put(&mut self, objid: ExId, key: Prop, (value, id): (Value, ExId), conflict: bool) { + self.patches.push(Patch::Put { + obj: objid, + key, + value: (value.into_owned(), id), + conflict, + }); + } + + fn delete(&mut self, objid: ExId, key: Prop) { + self.patches.push(Patch::Delete(objid, key)) + } +} + +/// A notification to the application that something has changed in a document. +#[derive(Debug, Clone, PartialEq)] +pub enum Patch { + /// Associating a new value with a key in a map, or an existing list element + Put { + obj: ExId, + key: Prop, + value: (Value<'static>, ExId), + conflict: bool, + }, + /// Inserting a new element into a list/text + Insert(ExId, usize, (Value<'static>, ExId)), + /// Deleting an element from a list/text + Delete(ExId, Prop), +} diff --git a/automerge/src/op_set.rs b/automerge/src/op_set.rs index b1e71fda..b4db50ef 100644 --- a/automerge/src/op_set.rs +++ b/automerge/src/op_set.rs @@ -1,9 +1,11 @@ use crate::clock::Clock; +use crate::exid::ExId; use crate::indexed_cache::IndexedCache; use crate::op_tree::OpTree; use crate::query::{self, OpIdSearch, TreeQuery}; -use crate::types::{ActorId, Key, ObjId, Op, OpId, OpType}; -use crate::ObjType; +use crate::types::{self, ActorId, Key, ObjId, Op, OpId, OpType}; +use crate::Prop; +use crate::{ObjType, OpObserver}; use fxhash::FxBuildHasher; use std::cmp::Ordering; use std::collections::HashMap; @@ -35,6 +37,14 @@ impl OpSetInternal { } } + pub(crate) fn id_to_exid(&self, id: OpId) -> ExId { + if id == types::ROOT { + ExId::Root + } else { + ExId::Id(id.0, self.m.actors.cache[id.1].clone(), id.1) + } + } + pub fn iter(&self) -> Iter<'_> { let mut objs: Vec<_> = self.trees.keys().collect(); objs.sort_by(|a, b| self.m.lamport_cmp(a.0, b.0)); @@ -143,6 +153,86 @@ impl OpSetInternal { } } + pub(crate) fn insert_op(&mut self, obj: &ObjId, op: Op) -> Op { + let q = self.search(obj, query::SeekOp::new(&op)); + + let succ = q.succ; + let pos = q.pos; + + for i in succ { + self.replace(obj, i, |old_op| old_op.add_succ(&op)); + } + + if !op.is_delete() { + self.insert(pos, obj, op.clone()); + } + op + } + + pub(crate) fn insert_op_with_observer( + &mut self, + obj: &ObjId, + op: Op, + observer: &mut Obs, + ) -> Op { + let q = self.search(obj, query::SeekOpWithPatch::new(&op)); + + let query::SeekOpWithPatch { + pos, + succ, + seen, + values, + had_value_before, + .. + } = q; + + let ex_obj = self.id_to_exid(obj.0); + let key = match op.key { + Key::Map(index) => self.m.props[index].clone().into(), + Key::Seq(_) => seen.into(), + }; + + if op.insert { + let value = (op.value(), self.id_to_exid(op.id)); + observer.insert(ex_obj, seen, value); + } else if op.is_delete() { + if let Some(winner) = &values.last() { + let value = (winner.value(), self.id_to_exid(winner.id)); + let conflict = values.len() > 1; + observer.put(ex_obj, key, value, conflict); + } else { + observer.delete(ex_obj, key); + } + } else { + let winner = if let Some(last_value) = values.last() { + if self.m.lamport_cmp(op.id, last_value.id) == Ordering::Greater { + &op + } else { + last_value + } + } else { + &op + }; + let value = (winner.value(), self.id_to_exid(winner.id)); + if op.is_list_op() && !had_value_before { + observer.insert(ex_obj, seen, value); + } else { + let conflict = !values.is_empty(); + observer.put(ex_obj, key, value, conflict); + } + } + + for i in succ { + self.replace(obj, i, |old_op| old_op.add_succ(&op)); + } + + if !op.is_delete() { + self.insert(pos, obj, op.clone()); + } + + op + } + pub fn object_type(&self, id: &ObjId) -> Option { self.trees.get(id).map(|tree| tree.objtype) } diff --git a/automerge/src/sync.rs b/automerge/src/sync.rs index 43801b9c..52164319 100644 --- a/automerge/src/sync.rs +++ b/automerge/src/sync.rs @@ -7,7 +7,8 @@ use std::{ }; use crate::{ - decoding, decoding::Decoder, encoding::Encodable, Automerge, AutomergeError, Change, ChangeHash, + decoding, decoding::Decoder, encoding::Encodable, Automerge, AutomergeError, Change, + ChangeHash, OpObserver, }; mod bloom; @@ -93,10 +94,11 @@ impl Automerge { Some(sync_message) } - pub fn receive_sync_message( + pub fn receive_sync_message( &mut self, sync_state: &mut State, message: Message, + op_observer: Option<&mut Obs>, ) -> Result<(), AutomergeError> { let before_heads = self.get_heads(); @@ -109,7 +111,7 @@ impl Automerge { let changes_is_empty = message_changes.is_empty(); if !changes_is_empty { - self.apply_changes(message_changes)?; + self.apply_changes(message_changes, op_observer)?; sync_state.shared_heads = advance_heads( &before_heads.iter().collect(), &self.get_heads().into_iter().collect(), diff --git a/automerge/src/transaction/commit.rs b/automerge/src/transaction/commit.rs index d4b12a97..2aaac57c 100644 --- a/automerge/src/transaction/commit.rs +++ b/automerge/src/transaction/commit.rs @@ -1,11 +1,12 @@ /// Optional metadata for a commit. -#[derive(Debug, Default, Clone)] -pub struct CommitOptions { +#[derive(Default)] +pub struct CommitOptions<'a, Obs> { pub message: Option, pub time: Option, + pub op_observer: Option<&'a mut Obs>, } -impl CommitOptions { +impl<'a, Obs> CommitOptions<'a, Obs> { /// Add a message to the commit. pub fn with_message>(mut self, message: S) -> Self { self.message = Some(message.into()); @@ -29,4 +30,14 @@ impl CommitOptions { self.time = Some(time); self } + + pub fn with_op_observer(mut self, op_observer: &'a mut Obs) -> Self { + self.op_observer = Some(op_observer); + self + } + + pub fn set_op_observer(&mut self, op_observer: &'a mut Obs) -> &mut Self { + self.op_observer = Some(op_observer); + self + } } diff --git a/automerge/src/transaction/inner.rs b/automerge/src/transaction/inner.rs index cb0c8c14..e53a8fdf 100644 --- a/automerge/src/transaction/inner.rs +++ b/automerge/src/transaction/inner.rs @@ -5,7 +5,7 @@ use crate::exid::ExId; use crate::query::{self, OpIdSearch}; use crate::types::{Key, ObjId, OpId}; use crate::{change::export_change, types::Op, Automerge, ChangeHash, Prop}; -use crate::{AutomergeError, ObjType, OpType, ScalarValue}; +use crate::{AutomergeError, ObjType, OpObserver, OpType, ScalarValue}; #[derive(Debug, Clone)] pub struct TransactionInner { @@ -17,7 +17,7 @@ pub struct TransactionInner { pub(crate) extra_bytes: Vec, pub(crate) hash: Option, pub(crate) deps: Vec, - pub(crate) operations: Vec<(ObjId, Op)>, + pub(crate) operations: Vec<(ObjId, Prop, Op)>, } impl TransactionInner { @@ -27,11 +27,12 @@ impl TransactionInner { /// Commit the operations performed in this transaction, returning the hashes corresponding to /// the new heads. - pub fn commit( + pub fn commit( mut self, doc: &mut Automerge, message: Option, time: Option, + op_observer: Option<&mut Obs>, ) -> ChangeHash { if message.is_some() { self.message = message; @@ -41,6 +42,24 @@ impl TransactionInner { self.time = t; } + if let Some(observer) = op_observer { + for (obj, prop, op) in &self.operations { + let ex_obj = doc.ops.id_to_exid(obj.0); + if op.insert { + let value = (op.value(), doc.id_to_exid(op.id)); + match prop { + Prop::Map(_) => panic!("insert into a map"), + Prop::Seq(index) => observer.insert(ex_obj, *index, value), + } + } else if op.is_delete() { + observer.delete(ex_obj, prop.clone()); + } else { + let value = (op.value(), doc.ops.id_to_exid(op.id)); + observer.put(ex_obj, prop.clone(), value, false); + } + } + } + let num_ops = self.pending_ops(); let change = export_change(self, &doc.ops.m.actors, &doc.ops.m.props); let hash = change.hash; @@ -52,24 +71,25 @@ impl TransactionInner { /// Undo the operations added in this transaction, returning the number of cancelled /// operations. pub fn rollback(self, doc: &mut Automerge) -> usize { + let num = self.pending_ops(); + // remove in reverse order so sets are removed before makes etc... + for (obj, _prop, op) in self.operations.into_iter().rev() { + for pred_id in &op.pred { + if let Some(p) = doc.ops.search(&obj, OpIdSearch::new(*pred_id)).index() { + doc.ops.replace(&obj, p, |o| o.remove_succ(&op)); + } + } + if let Some(pos) = doc.ops.search(&obj, OpIdSearch::new(op.id)).index() { + doc.ops.remove(&obj, pos); + } + } + // remove the actor from the cache so that it doesn't end up in the saved document if doc.states.get(&self.actor).is_none() { let actor = doc.ops.m.actors.remove_last(); doc.actor = Actor::Unused(actor); } - let num = self.pending_ops(); - // remove in reverse order so sets are removed before makes etc... - for (obj, op) in self.operations.iter().rev() { - for pred_id in &op.pred { - if let Some(p) = doc.ops.search(obj, OpIdSearch::new(*pred_id)).index() { - doc.ops.replace(obj, p, |o| o.remove_succ(op)); - } - } - if let Some(pos) = doc.ops.search(obj, OpIdSearch::new(op.id)).index() { - doc.ops.remove(obj, pos); - } - } num } @@ -89,13 +109,14 @@ impl TransactionInner { pub fn put, V: Into>( &mut self, doc: &mut Automerge, - obj: &ExId, + ex_obj: &ExId, prop: P, value: V, ) -> Result<(), AutomergeError> { - let obj = doc.exid_to_obj(obj)?; + let obj = doc.exid_to_obj(ex_obj)?; let value = value.into(); - self.local_op(doc, obj, prop.into(), value.into())?; + let prop = prop.into(); + self.local_op(doc, obj, prop, value.into())?; Ok(()) } @@ -115,13 +136,15 @@ impl TransactionInner { pub fn put_object>( &mut self, doc: &mut Automerge, - obj: &ExId, + ex_obj: &ExId, prop: P, value: ObjType, ) -> Result { - let obj = doc.exid_to_obj(obj)?; - let id = self.local_op(doc, obj, prop.into(), value.into())?.unwrap(); - Ok(doc.id_to_exid(id)) + let obj = doc.exid_to_obj(ex_obj)?; + let prop = prop.into(); + let id = self.local_op(doc, obj, prop, value.into())?.unwrap(); + let id = doc.id_to_exid(id); + Ok(id) } fn next_id(&mut self) -> OpId { @@ -131,6 +154,7 @@ impl TransactionInner { fn insert_local_op( &mut self, doc: &mut Automerge, + prop: Prop, op: Op, pos: usize, obj: ObjId, @@ -146,17 +170,17 @@ impl TransactionInner { doc.ops.insert(pos, &obj, op.clone()); } - self.operations.push((obj, op)); + self.operations.push((obj, prop, op)); } pub fn insert>( &mut self, doc: &mut Automerge, - obj: &ExId, + ex_obj: &ExId, index: usize, value: V, ) -> Result<(), AutomergeError> { - let obj = doc.exid_to_obj(obj)?; + let obj = doc.exid_to_obj(ex_obj)?; let value = value.into(); self.do_insert(doc, obj, index, value.into())?; Ok(()) @@ -165,13 +189,14 @@ impl TransactionInner { pub fn insert_object( &mut self, doc: &mut Automerge, - obj: &ExId, + ex_obj: &ExId, index: usize, value: ObjType, ) -> Result { - let obj = doc.exid_to_obj(obj)?; - let id = self.do_insert(doc, obj, index, value.into())?.unwrap(); - Ok(doc.id_to_exid(id)) + let obj = doc.exid_to_obj(ex_obj)?; + let id = self.do_insert(doc, obj, index, value.into())?; + let id = doc.id_to_exid(id); + Ok(id) } fn do_insert( @@ -180,13 +205,12 @@ impl TransactionInner { obj: ObjId, index: usize, action: OpType, - ) -> Result, AutomergeError> { + ) -> Result { let id = self.next_id(); let query = doc.ops.search(&obj, query::InsertNth::new(index)); let key = query.key()?; - let is_make = matches!(&action, OpType::Make(_)); let op = Op { id, @@ -198,13 +222,9 @@ impl TransactionInner { }; doc.ops.insert(query.pos(), &obj, op.clone()); - self.operations.push((obj, op)); + self.operations.push((obj, Prop::Seq(index), op)); - if is_make { - Ok(Some(id)) - } else { - Ok(None) - } + Ok(id) } pub(crate) fn local_op( @@ -232,8 +252,8 @@ impl TransactionInner { } let id = self.next_id(); - let prop = doc.ops.m.props.cache(prop); - let query = doc.ops.search(&obj, query::Prop::new(prop)); + let prop_index = doc.ops.m.props.cache(prop.clone()); + let query = doc.ops.search(&obj, query::Prop::new(prop_index)); // no key present to delete if query.ops.is_empty() && action == OpType::Delete { @@ -244,14 +264,12 @@ impl TransactionInner { return Ok(None); } - let is_make = matches!(&action, OpType::Make(_)); - let pred = query.ops.iter().map(|op| op.id).collect(); let op = Op { id, action, - key: Key::Map(prop), + key: Key::Map(prop_index), succ: Default::default(), pred, insert: false, @@ -259,13 +277,9 @@ impl TransactionInner { let pos = query.pos; let ops_pos = query.ops_pos; - self.insert_local_op(doc, op, pos, obj, &ops_pos); + self.insert_local_op(doc, Prop::Map(prop), op, pos, obj, &ops_pos); - if is_make { - Ok(Some(id)) - } else { - Ok(None) - } + Ok(Some(id)) } fn local_list_op( @@ -285,8 +299,6 @@ impl TransactionInner { return Ok(None); } - let is_make = matches!(&action, OpType::Make(_)); - let op = Op { id, action, @@ -298,13 +310,9 @@ impl TransactionInner { let pos = query.pos; let ops_pos = query.ops_pos; - self.insert_local_op(doc, op, pos, obj, &ops_pos); + self.insert_local_op(doc, Prop::Seq(index), op, pos, obj, &ops_pos); - if is_make { - Ok(Some(id)) - } else { - Ok(None) - } + Ok(Some(id)) } pub fn increment>( @@ -322,11 +330,12 @@ impl TransactionInner { pub fn delete>( &mut self, doc: &mut Automerge, - obj: &ExId, + ex_obj: &ExId, prop: P, ) -> Result<(), AutomergeError> { - let obj = doc.exid_to_obj(obj)?; - self.local_op(doc, obj, prop.into(), OpType::Delete)?; + let obj = doc.exid_to_obj(ex_obj)?; + let prop = prop.into(); + self.local_op(doc, obj, prop, OpType::Delete)?; Ok(()) } @@ -335,19 +344,19 @@ impl TransactionInner { pub fn splice( &mut self, doc: &mut Automerge, - obj: &ExId, + ex_obj: &ExId, mut pos: usize, del: usize, vals: impl IntoIterator, ) -> Result<(), AutomergeError> { - let obj = doc.exid_to_obj(obj)?; + let obj = doc.exid_to_obj(ex_obj)?; for _ in 0..del { // del() self.local_op(doc, obj, pos.into(), OpType::Delete)?; } for v in vals { // insert() - self.do_insert(doc, obj, pos, v.into())?; + self.do_insert(doc, obj, pos, v.clone().into())?; pos += 1; } Ok(()) diff --git a/automerge/src/transaction/manual_transaction.rs b/automerge/src/transaction/manual_transaction.rs index 2a983b3f..2015b263 100644 --- a/automerge/src/transaction/manual_transaction.rs +++ b/automerge/src/transaction/manual_transaction.rs @@ -1,11 +1,9 @@ use std::ops::RangeBounds; use crate::exid::ExId; -use crate::{ - Automerge, ChangeHash, KeysAt, ObjType, Prop, Range, RangeAt, ScalarValue, Value, Values, - ValuesAt, -}; +use crate::{Automerge, ChangeHash, KeysAt, ObjType, OpObserver, Prop, ScalarValue, Value}; use crate::{AutomergeError, Keys}; +use crate::{Range, RangeAt, Values, ValuesAt}; use super::{CommitOptions, Transactable, TransactionInner}; @@ -38,7 +36,10 @@ impl<'a> Transaction<'a> { /// Commit the operations performed in this transaction, returning the hashes corresponding to /// the new heads. pub fn commit(mut self) -> ChangeHash { - self.inner.take().unwrap().commit(self.doc, None, None) + self.inner + .take() + .unwrap() + .commit::<()>(self.doc, None, None, None) } /// Commit the operations in this transaction with some options. @@ -55,13 +56,15 @@ impl<'a> Transaction<'a> { /// tx.put_object(ROOT, "todos", ObjType::List).unwrap(); /// let now = SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap().as_secs() as /// i64; - /// tx.commit_with(CommitOptions::default().with_message("Create todos list").with_time(now)); + /// tx.commit_with::<()>(CommitOptions::default().with_message("Create todos list").with_time(now)); /// ``` - pub fn commit_with(mut self, options: CommitOptions) -> ChangeHash { - self.inner - .take() - .unwrap() - .commit(self.doc, options.message, options.time) + pub fn commit_with(mut self, options: CommitOptions) -> ChangeHash { + self.inner.take().unwrap().commit( + self.doc, + options.message, + options.time, + options.op_observer, + ) } /// Undo the operations added in this transaction, returning the number of cancelled @@ -126,16 +129,16 @@ impl<'a> Transactable for Transaction<'a> { .insert(self.doc, obj.as_ref(), index, value) } - fn insert_object( + fn insert_object>( &mut self, - obj: &ExId, + obj: O, index: usize, value: ObjType, ) -> Result { self.inner .as_mut() .unwrap() - .insert_object(self.doc, obj, index, value) + .insert_object(self.doc, obj.as_ref(), index, value) } fn increment, P: Into>( diff --git a/automerge/src/transaction/transactable.rs b/automerge/src/transaction/transactable.rs index ef019a5d..3df7784c 100644 --- a/automerge/src/transaction/transactable.rs +++ b/automerge/src/transaction/transactable.rs @@ -54,9 +54,9 @@ pub trait Transactable { ) -> Result<(), AutomergeError>; /// Insert an object into a list at the given index. - fn insert_object( + fn insert_object>( &mut self, - obj: &ExId, + obj: O, index: usize, object: ObjType, ) -> Result; diff --git a/automerge/src/types.rs b/automerge/src/types.rs index ee3b2e1b..26b4f3ce 100644 --- a/automerge/src/types.rs +++ b/automerge/src/types.rs @@ -1,5 +1,4 @@ use crate::error; -use crate::exid::ExId; use crate::legacy as amp; use serde::{Deserialize, Serialize}; use std::borrow::Cow; @@ -546,26 +545,6 @@ impl TryFrom<&[u8]> for ChangeHash { } } -/// Properties of `Patch::Assign` -#[derive(Debug, Clone, PartialEq)] -pub struct AssignPatch { - pub obj: ExId, - pub key: Prop, - pub value: (Value<'static>, ExId), - pub conflict: bool, -} - -/// A notification to the application that something has changed in a document. -#[derive(Debug, Clone, PartialEq)] -pub enum Patch { - /// Associating a new value with a key in a map, or an existing list element - Assign(AssignPatch), - /// Inserting a new element into a list/text - Insert(ExId, usize, (Value<'static>, ExId)), - /// Deleting an element from a list/text - Delete(ExId, Prop), -} - #[cfg(feature = "wasm")] impl From for wasm_bindgen::JsValue { fn from(prop: Prop) -> Self { diff --git a/automerge/src/value.rs b/automerge/src/value.rs index f378088e..eb6c268b 100644 --- a/automerge/src/value.rs +++ b/automerge/src/value.rs @@ -159,6 +159,14 @@ impl<'a> Value<'a> { } } + pub fn into_owned(self) -> Value<'static> { + match self { + Value::Object(o) => Value::Object(o), + Value::Scalar(Cow::Owned(s)) => Value::Scalar(Cow::Owned(s)), + Value::Scalar(Cow::Borrowed(s)) => Value::Scalar(Cow::Owned((*s).clone())), + } + } + pub fn to_owned(&self) -> Value<'static> { match self { Value::Object(o) => Value::Object(*o), diff --git a/automerge/tests/test.rs b/automerge/tests/test.rs index 4907334a..4e6e8051 100644 --- a/automerge/tests/test.rs +++ b/automerge/tests/test.rs @@ -1,5 +1,5 @@ use automerge::transaction::Transactable; -use automerge::{ActorId, AutoCommit, Automerge, ObjType, ScalarValue, Value, ROOT}; +use automerge::{ActorId, AutoCommit, Automerge, ObjType, ScalarValue, Value, NULL_OBSERVER, ROOT}; mod helpers; #[allow(unused_imports)] @@ -25,10 +25,10 @@ fn repeated_map_assignment_which_resolves_conflict_not_ignored() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); doc1.put(&automerge::ROOT, "field", 123).unwrap(); - doc2.merge(&mut doc1).unwrap(); + doc2.merge(&mut doc1, NULL_OBSERVER).unwrap(); doc2.put(&automerge::ROOT, "field", 456).unwrap(); doc1.put(&automerge::ROOT, "field", 789).unwrap(); - doc1.merge(&mut doc2).unwrap(); + doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); assert_eq!(doc1.get_all(&automerge::ROOT, "field").unwrap().len(), 2); doc1.put(&automerge::ROOT, "field", 123).unwrap(); @@ -48,9 +48,9 @@ fn repeated_list_assignment_which_resolves_conflict_not_ignored() { .put_object(&automerge::ROOT, "list", ObjType::List) .unwrap(); doc1.insert(&list_id, 0, 123).unwrap(); - doc2.merge(&mut doc1).unwrap(); + doc2.merge(&mut doc1, NULL_OBSERVER).unwrap(); doc2.put(&list_id, 0, 456).unwrap(); - doc1.merge(&mut doc2).unwrap(); + doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); doc1.put(&list_id, 0, 789).unwrap(); assert_doc!( @@ -92,7 +92,7 @@ fn merge_concurrent_map_prop_updates() { let mut doc2 = new_doc(); doc1.put(&automerge::ROOT, "foo", "bar").unwrap(); doc2.put(&automerge::ROOT, "hello", "world").unwrap(); - doc1.merge(&mut doc2).unwrap(); + doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); assert_eq!( doc1.get(&automerge::ROOT, "foo").unwrap().unwrap().0, "bar".into() @@ -104,7 +104,7 @@ fn merge_concurrent_map_prop_updates() { "hello" => { "world" }, } ); - doc2.merge(&mut doc1).unwrap(); + doc2.merge(&mut doc1, NULL_OBSERVER).unwrap(); assert_doc!( doc2.document(), map! { @@ -121,10 +121,10 @@ fn add_concurrent_increments_of_same_property() { let mut doc2 = new_doc(); doc1.put(&automerge::ROOT, "counter", mk_counter(0)) .unwrap(); - doc2.merge(&mut doc1).unwrap(); + doc2.merge(&mut doc1, NULL_OBSERVER).unwrap(); doc1.increment(&automerge::ROOT, "counter", 1).unwrap(); doc2.increment(&automerge::ROOT, "counter", 2).unwrap(); - doc1.merge(&mut doc2).unwrap(); + doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); assert_doc!( doc1.document(), map! { @@ -150,7 +150,7 @@ fn add_increments_only_to_preceeded_values() { doc2.increment(&automerge::ROOT, "counter", 3).unwrap(); // The two values should be conflicting rather than added - doc1.merge(&mut doc2).unwrap(); + doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); assert_doc!( doc1.document(), @@ -170,7 +170,7 @@ fn concurrent_updates_of_same_field() { doc1.put(&automerge::ROOT, "field", "one").unwrap(); doc2.put(&automerge::ROOT, "field", "two").unwrap(); - doc1.merge(&mut doc2).unwrap(); + doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); assert_doc!( doc1.document(), @@ -191,11 +191,11 @@ fn concurrent_updates_of_same_list_element() { .put_object(&automerge::ROOT, "birds", ObjType::List) .unwrap(); doc1.insert(&list_id, 0, "finch").unwrap(); - doc2.merge(&mut doc1).unwrap(); + doc2.merge(&mut doc1, NULL_OBSERVER).unwrap(); doc1.put(&list_id, 0, "greenfinch").unwrap(); doc2.put(&list_id, 0, "goldfinch").unwrap(); - doc1.merge(&mut doc2).unwrap(); + doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); assert_doc!( doc1.document(), @@ -220,8 +220,8 @@ fn assignment_conflicts_of_different_types() { .unwrap(); doc3.put_object(&automerge::ROOT, "field", ObjType::Map) .unwrap(); - doc1.merge(&mut doc2).unwrap(); - doc1.merge(&mut doc3).unwrap(); + doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); + doc1.merge(&mut doc3, NULL_OBSERVER).unwrap(); assert_doc!( doc1.document(), @@ -244,7 +244,7 @@ fn changes_within_conflicting_map_field() { .put_object(&automerge::ROOT, "field", ObjType::Map) .unwrap(); doc2.put(&map_id, "innerKey", 42).unwrap(); - doc1.merge(&mut doc2).unwrap(); + doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); assert_doc!( doc1.document(), @@ -270,18 +270,18 @@ fn changes_within_conflicting_list_element() { .put_object(&automerge::ROOT, "list", ObjType::List) .unwrap(); doc1.insert(&list_id, 0, "hello").unwrap(); - doc2.merge(&mut doc1).unwrap(); + doc2.merge(&mut doc1, NULL_OBSERVER).unwrap(); let map_in_doc1 = doc1.put_object(&list_id, 0, ObjType::Map).unwrap(); doc1.put(&map_in_doc1, "map1", true).unwrap(); doc1.put(&map_in_doc1, "key", 1).unwrap(); let map_in_doc2 = doc2.put_object(&list_id, 0, ObjType::Map).unwrap(); - doc1.merge(&mut doc2).unwrap(); + doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); doc2.put(&map_in_doc2, "map2", true).unwrap(); doc2.put(&map_in_doc2, "key", 2).unwrap(); - doc1.merge(&mut doc2).unwrap(); + doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); assert_doc!( doc1.document(), @@ -319,7 +319,7 @@ fn concurrently_assigned_nested_maps_should_not_merge() { .unwrap(); doc2.put(&doc2_map_id, "logo_url", "logo.png").unwrap(); - doc1.merge(&mut doc2).unwrap(); + doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); assert_doc!( doc1.document(), @@ -349,11 +349,11 @@ fn concurrent_insertions_at_different_list_positions() { doc1.insert(&list_id, 0, "one").unwrap(); doc1.insert(&list_id, 1, "three").unwrap(); - doc2.merge(&mut doc1).unwrap(); + doc2.merge(&mut doc1, NULL_OBSERVER).unwrap(); doc1.splice(&list_id, 1, 0, vec!["two".into()]).unwrap(); doc2.insert(&list_id, 2, "four").unwrap(); - doc1.merge(&mut doc2).unwrap(); + doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); assert_doc!( doc1.document(), @@ -382,10 +382,10 @@ fn concurrent_insertions_at_same_list_position() { .unwrap(); doc1.insert(&list_id, 0, "parakeet").unwrap(); - doc2.merge(&mut doc1).unwrap(); + doc2.merge(&mut doc1, NULL_OBSERVER).unwrap(); doc1.insert(&list_id, 1, "starling").unwrap(); doc2.insert(&list_id, 1, "chaffinch").unwrap(); - doc1.merge(&mut doc2).unwrap(); + doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); assert_doc!( doc1.document(), @@ -412,11 +412,11 @@ fn concurrent_assignment_and_deletion_of_a_map_entry() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); doc1.put(&automerge::ROOT, "bestBird", "robin").unwrap(); - doc2.merge(&mut doc1).unwrap(); + doc2.merge(&mut doc1, NULL_OBSERVER).unwrap(); doc1.delete(&automerge::ROOT, "bestBird").unwrap(); doc2.put(&automerge::ROOT, "bestBird", "magpie").unwrap(); - doc1.merge(&mut doc2).unwrap(); + doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); assert_doc!( doc1.document(), @@ -438,7 +438,7 @@ fn concurrent_assignment_and_deletion_of_list_entry() { doc1.insert(&list_id, 0, "blackbird").unwrap(); doc1.insert(&list_id, 1, "thrush").unwrap(); doc1.insert(&list_id, 2, "goldfinch").unwrap(); - doc2.merge(&mut doc1).unwrap(); + doc2.merge(&mut doc1, NULL_OBSERVER).unwrap(); doc1.put(&list_id, 1, "starling").unwrap(); doc2.delete(&list_id, 1).unwrap(); @@ -463,7 +463,7 @@ fn concurrent_assignment_and_deletion_of_list_entry() { } ); - doc1.merge(&mut doc2).unwrap(); + doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); assert_doc!( doc1.document(), @@ -489,14 +489,14 @@ fn insertion_after_a_deleted_list_element() { doc1.insert(&list_id, 1, "thrush").unwrap(); doc1.insert(&list_id, 2, "goldfinch").unwrap(); - doc2.merge(&mut doc1).unwrap(); + doc2.merge(&mut doc1, NULL_OBSERVER).unwrap(); doc1.splice(&list_id, 1, 2, Vec::new()).unwrap(); doc2.splice(&list_id, 2, 0, vec!["starling".into()]) .unwrap(); - doc1.merge(&mut doc2).unwrap(); + doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); assert_doc!( doc1.document(), @@ -508,7 +508,7 @@ fn insertion_after_a_deleted_list_element() { } ); - doc2.merge(&mut doc1).unwrap(); + doc2.merge(&mut doc1, NULL_OBSERVER).unwrap(); assert_doc!( doc2.document(), map! { @@ -532,13 +532,13 @@ fn concurrent_deletion_of_same_list_element() { doc1.insert(&list_id, 1, "buzzard").unwrap(); doc1.insert(&list_id, 2, "cormorant").unwrap(); - doc2.merge(&mut doc1).unwrap(); + doc2.merge(&mut doc1, NULL_OBSERVER).unwrap(); doc1.delete(&list_id, 1).unwrap(); doc2.delete(&list_id, 1).unwrap(); - doc1.merge(&mut doc2).unwrap(); + doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); assert_doc!( doc1.document(), @@ -550,7 +550,7 @@ fn concurrent_deletion_of_same_list_element() { } ); - doc2.merge(&mut doc1).unwrap(); + doc2.merge(&mut doc1, NULL_OBSERVER).unwrap(); assert_doc!( doc2.document(), map! { @@ -577,12 +577,12 @@ fn concurrent_updates_at_different_levels() { let mammals = doc1.put_object(&animals, "mammals", ObjType::List).unwrap(); doc1.insert(&mammals, 0, "badger").unwrap(); - doc2.merge(&mut doc1).unwrap(); + doc2.merge(&mut doc1, NULL_OBSERVER).unwrap(); doc1.put(&birds, "brown", "sparrow").unwrap(); doc2.delete(&animals, "birds").unwrap(); - doc1.merge(&mut doc2).unwrap(); + doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); assert_obj!( doc1.document(), @@ -618,13 +618,13 @@ fn concurrent_updates_of_concurrently_deleted_objects() { let blackbird = doc1.put_object(&birds, "blackbird", ObjType::Map).unwrap(); doc1.put(&blackbird, "feathers", "black").unwrap(); - doc2.merge(&mut doc1).unwrap(); + doc2.merge(&mut doc1, NULL_OBSERVER).unwrap(); doc1.delete(&birds, "blackbird").unwrap(); doc2.put(&blackbird, "beak", "orange").unwrap(); - doc1.merge(&mut doc2).unwrap(); + doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); assert_doc!( doc1.document(), @@ -645,7 +645,7 @@ fn does_not_interleave_sequence_insertions_at_same_position() { let wisdom = doc1 .put_object(&automerge::ROOT, "wisdom", ObjType::List) .unwrap(); - doc2.merge(&mut doc1).unwrap(); + doc2.merge(&mut doc1, NULL_OBSERVER).unwrap(); doc1.splice( &wisdom, @@ -675,7 +675,7 @@ fn does_not_interleave_sequence_insertions_at_same_position() { ) .unwrap(); - doc1.merge(&mut doc2).unwrap(); + doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); assert_doc!( doc1.document(), @@ -707,7 +707,7 @@ fn mutliple_insertions_at_same_list_position_with_insertion_by_greater_actor_id( .put_object(&automerge::ROOT, "list", ObjType::List) .unwrap(); doc1.insert(&list, 0, "two").unwrap(); - doc2.merge(&mut doc1).unwrap(); + doc2.merge(&mut doc1, NULL_OBSERVER).unwrap(); doc2.insert(&list, 0, "one").unwrap(); assert_doc!( @@ -732,7 +732,7 @@ fn mutliple_insertions_at_same_list_position_with_insertion_by_lesser_actor_id() .put_object(&automerge::ROOT, "list", ObjType::List) .unwrap(); doc1.insert(&list, 0, "two").unwrap(); - doc2.merge(&mut doc1).unwrap(); + doc2.merge(&mut doc1, NULL_OBSERVER).unwrap(); doc2.insert(&list, 0, "one").unwrap(); assert_doc!( @@ -755,11 +755,11 @@ fn insertion_consistent_with_causality() { .put_object(&automerge::ROOT, "list", ObjType::List) .unwrap(); doc1.insert(&list, 0, "four").unwrap(); - doc2.merge(&mut doc1).unwrap(); + doc2.merge(&mut doc1, NULL_OBSERVER).unwrap(); doc2.insert(&list, 0, "three").unwrap(); - doc1.merge(&mut doc2).unwrap(); + doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); doc1.insert(&list, 0, "two").unwrap(); - doc2.merge(&mut doc1).unwrap(); + doc2.merge(&mut doc1, NULL_OBSERVER).unwrap(); doc2.insert(&list, 0, "one").unwrap(); assert_doc!( @@ -778,7 +778,7 @@ fn insertion_consistent_with_causality() { #[test] fn save_and_restore_empty() { let mut doc = new_doc(); - let loaded = Automerge::load(&doc.save()).unwrap(); + let loaded = Automerge::load(&doc.save(), NULL_OBSERVER).unwrap(); assert_doc!(&loaded, map! {}); } @@ -795,13 +795,13 @@ fn save_restore_complex() { doc1.put(&first_todo, "done", false).unwrap(); let mut doc2 = new_doc(); - doc2.merge(&mut doc1).unwrap(); + doc2.merge(&mut doc1, NULL_OBSERVER).unwrap(); doc2.put(&first_todo, "title", "weed plants").unwrap(); doc1.put(&first_todo, "title", "kill plants").unwrap(); - doc1.merge(&mut doc2).unwrap(); + doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); - let reloaded = Automerge::load(&doc1.save()).unwrap(); + let reloaded = Automerge::load(&doc1.save(), NULL_OBSERVER).unwrap(); assert_doc!( &reloaded, @@ -859,10 +859,10 @@ fn list_counter_del() -> Result<(), automerge::AutomergeError> { doc1.insert(&list, 1, "b")?; doc1.insert(&list, 2, "c")?; - let mut doc2 = AutoCommit::load(&doc1.save())?; + let mut doc2 = AutoCommit::load(&doc1.save(), NULL_OBSERVER)?; doc2.set_actor(actor2); - let mut doc3 = AutoCommit::load(&doc1.save())?; + let mut doc3 = AutoCommit::load(&doc1.save(), NULL_OBSERVER)?; doc3.set_actor(actor3); doc1.put(&list, 1, ScalarValue::counter(0))?; @@ -876,8 +876,8 @@ fn list_counter_del() -> Result<(), automerge::AutomergeError> { doc1.increment(&list, 1, 1)?; doc1.increment(&list, 2, 1)?; - doc1.merge(&mut doc2).unwrap(); - doc1.merge(&mut doc3).unwrap(); + doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); + doc1.merge(&mut doc3, NULL_OBSERVER).unwrap(); let values = doc1.get_all(&list, 1)?; assert_eq!(values.len(), 3); @@ -911,7 +911,7 @@ fn list_counter_del() -> Result<(), automerge::AutomergeError> { assert_eq!(doc1.length(&list), 2); - let doc4 = AutoCommit::load(&doc1.save())?; + let doc4 = AutoCommit::load(&doc1.save(), NULL_OBSERVER)?; assert_eq!(doc4.length(&list), 2); @@ -919,7 +919,7 @@ fn list_counter_del() -> Result<(), automerge::AutomergeError> { assert_eq!(doc1.length(&list), 1); - let doc5 = AutoCommit::load(&doc1.save())?; + let doc5 = AutoCommit::load(&doc1.save(), NULL_OBSERVER)?; assert_eq!(doc5.length(&list), 1); diff --git a/edit-trace/benches/main.rs b/edit-trace/benches/main.rs index 00028945..2d79722f 100644 --- a/edit-trace/benches/main.rs +++ b/edit-trace/benches/main.rs @@ -1,4 +1,4 @@ -use automerge::{transaction::Transactable, AutoCommit, Automerge, ObjType, ROOT}; +use automerge::{transaction::Transactable, AutoCommit, Automerge, ObjType, NULL_OBSERVER, ROOT}; use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion, Throughput}; use std::fs; @@ -32,11 +32,11 @@ fn save_trace_autotx(mut doc: AutoCommit) { } fn load_trace(bytes: &[u8]) { - Automerge::load(bytes).unwrap(); + Automerge::load(bytes, NULL_OBSERVER).unwrap(); } fn load_trace_autotx(bytes: &[u8]) { - AutoCommit::load(bytes).unwrap(); + AutoCommit::load(bytes, NULL_OBSERVER).unwrap(); } fn bench(c: &mut Criterion) { diff --git a/edit-trace/src/main.rs b/edit-trace/src/main.rs index 72085fdb..558a2e6d 100644 --- a/edit-trace/src/main.rs +++ b/edit-trace/src/main.rs @@ -1,5 +1,5 @@ -use automerge::ObjType; use automerge::{transaction::Transactable, Automerge, AutomergeError, ROOT}; +use automerge::{ObjType, NULL_OBSERVER}; use std::time::Instant; fn main() -> Result<(), AutomergeError> { @@ -33,7 +33,7 @@ fn main() -> Result<(), AutomergeError> { println!("Saved in {} ms", save.elapsed().as_millis()); let load = Instant::now(); - let _ = Automerge::load(&bytes).unwrap(); + let _ = Automerge::load(&bytes, NULL_OBSERVER).unwrap(); println!("Loaded in {} ms", load.elapsed().as_millis()); println!("Done in {} ms", now.elapsed().as_millis()); From 702a0ec172d8349869810e083cdf2950b4aa085b Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Mon, 18 Apr 2022 15:44:06 +0100 Subject: [PATCH 284/730] Add lifetimes to transact_with and fixup watch example --- automerge-wasm/src/lib.rs | 4 +- automerge/examples/watch.rs | 88 ++++++++++++++++++++---------------- automerge/src/automerge.rs | 6 +-- automerge/src/op_observer.rs | 17 +++++-- 4 files changed, 65 insertions(+), 50 deletions(-) diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 198d00ab..e816bbf1 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -442,7 +442,7 @@ impl Automerge { js_set(&patch, "conflict", conflict)?; } - Patch::Insert(obj, index, value) => { + Patch::Insert { obj, index, value } => { js_set(&patch, "action", "insert")?; js_set(&patch, "obj", obj.to_string())?; js_set(&patch, "key", index as f64)?; @@ -458,7 +458,7 @@ impl Automerge { }; } - Patch::Delete(obj, key) => { + Patch::Delete { obj, key } => { js_set(&patch, "action", "delete")?; js_set(&patch, "obj", obj.to_string())?; js_set(&patch, "key", key)?; diff --git a/automerge/examples/watch.rs b/automerge/examples/watch.rs index aa93127a..4ba65e61 100644 --- a/automerge/examples/watch.rs +++ b/automerge/examples/watch.rs @@ -1,18 +1,25 @@ +use automerge::transaction::CommitOptions; use automerge::transaction::Transactable; use automerge::Automerge; -use automerge::ChangeHash; -use automerge::ObjId; +use automerge::AutomergeError; +use automerge::Patch; +use automerge::VecOpObserver; use automerge::ROOT; fn main() { let mut doc = Automerge::new(); - let heads1 = doc.get_heads(); + let mut observer = VecOpObserver::default(); // a simple scalar change in the root object - let mut tx = doc.transaction(); - tx.put(ROOT, "hello", "world").unwrap(); - let heads2 = tx.commit(); - get_changes(&heads1, &doc); + doc.transact_with::<_, _, AutomergeError, _, _>( + |_result| CommitOptions::default().with_op_observer(&mut observer), + |tx| { + tx.put(ROOT, "hello", "world").unwrap(); + Ok(()) + }, + ) + .unwrap(); + get_changes(&doc, observer.take_patches()); let mut tx = doc.transaction(); let map = tx @@ -23,47 +30,48 @@ fn main() { let list = tx .put_object(&map, "my list", automerge::ObjType::List) .unwrap(); - // tx.insert(&list, 0, "yay").unwrap(); + tx.insert(&list, 0, "yay").unwrap(); let m = tx.insert_object(&list, 0, automerge::ObjType::Map).unwrap(); tx.put(&m, "hi", 2).unwrap(); tx.insert(&list, 1, "woo").unwrap(); let m = tx.insert_object(&list, 2, automerge::ObjType::Map).unwrap(); tx.put(&m, "hi", 2).unwrap(); - let _heads3 = tx.commit(); - get_changes(&[heads2], &doc); - - // now if a peer were to send us a change that added a key in map we wouldn't know the path to - // the change or we might not have a reference to the map objid. + let _heads3 = tx.commit_with(CommitOptions::default().with_op_observer(&mut observer)); + get_changes(&doc, observer.take_patches()); } -fn get_changes(heads: &[ChangeHash], doc: &Automerge) { - let changes = doc.get_changes(heads); - // changes should be in topological order - for change in changes { - let change = change.decode(); - for op in change.operations { - // get the object that it changed - let obj = doc.import(&op.obj.to_string()).unwrap(); - // get the prop too - let prop = format!("{:?}", op.key); - println!("{:?}", op); - println!( - "{} {:?} in obj {:?}, object path {:?}", - if op.insert { "inserted" } else { "changed" }, - prop, +fn get_changes(doc: &Automerge, patches: Vec) { + for patch in patches { + match patch { + Patch::Put { obj, - get_path_for_obj(doc, &obj) - ); + key, + value, + conflict: _, + } => { + println!( + "put {:?} at {:?} in obj {:?}, object path {:?}", + value, + key, + obj, + doc.path_to_object(&obj) + ) + } + Patch::Insert { obj, index, value } => { + println!( + "insert {:?} at {:?} in obj {:?}, object path {:?}", + value, + index, + obj, + doc.path_to_object(&obj) + ) + } + Patch::Delete { obj, key } => println!( + "delete {:?} in obj {:?}, object path {:?}", + key, + obj, + doc.path_to_object(&obj) + ), } } } - -fn get_path_for_obj(doc: &Automerge, obj: &ObjId) -> String { - let mut s = String::new(); - let mut obj = obj.clone(); - while let Some((parent, key)) = doc.parent_object(obj) { - s = format!("{}/{}", key, s); - obj = parent; - } - s -} diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 2ec6465a..5a0cb48d 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -154,11 +154,11 @@ impl Automerge { } /// Like [`Self::transact`] but with a function for generating the commit options. - pub fn transact_with(&mut self, c: C, f: F) -> transaction::Result + pub fn transact_with<'a, F, O, E, C, Obs>(&mut self, c: C, f: F) -> transaction::Result where F: FnOnce(&mut Transaction) -> Result, - C: FnOnce(&O) -> CommitOptions, - Obs: OpObserver, + C: FnOnce(&O) -> CommitOptions<'a, Obs>, + Obs: 'a + OpObserver, { let mut tx = self.transaction(); let result = f(&mut tx); diff --git a/automerge/src/op_observer.rs b/automerge/src/op_observer.rs index 9aa1b8b4..5aaca4fa 100644 --- a/automerge/src/op_observer.rs +++ b/automerge/src/op_observer.rs @@ -52,8 +52,11 @@ impl VecOpObserver { impl OpObserver for VecOpObserver { fn insert(&mut self, obj_id: ExId, index: usize, (value, id): (Value, ExId)) { - self.patches - .push(Patch::Insert(obj_id, index, (value.into_owned(), id))); + self.patches.push(Patch::Insert { + obj: obj_id, + index, + value: (value.into_owned(), id), + }); } fn put(&mut self, objid: ExId, key: Prop, (value, id): (Value, ExId), conflict: bool) { @@ -66,7 +69,7 @@ impl OpObserver for VecOpObserver { } fn delete(&mut self, objid: ExId, key: Prop) { - self.patches.push(Patch::Delete(objid, key)) + self.patches.push(Patch::Delete { obj: objid, key }) } } @@ -81,7 +84,11 @@ pub enum Patch { conflict: bool, }, /// Inserting a new element into a list/text - Insert(ExId, usize, (Value<'static>, ExId)), + Insert { + obj: ExId, + index: usize, + value: (Value<'static>, ExId), + }, /// Deleting an element from a list/text - Delete(ExId, Prop), + Delete { obj: ExId, key: Prop }, } From e1283e781d9bd208c19fc9cc335c28aa54f2e489 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Mon, 18 Apr 2022 15:49:28 +0100 Subject: [PATCH 285/730] Add some more docs to the patches --- automerge/src/op_observer.rs | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/automerge/src/op_observer.rs b/automerge/src/op_observer.rs index 5aaca4fa..88adb861 100644 --- a/automerge/src/op_observer.rs +++ b/automerge/src/op_observer.rs @@ -38,13 +38,15 @@ impl OpObserver for () { pub const NULL_OBSERVER: Option<&mut ()> = None; -/// Capture operations and store them as patches. +/// Capture operations into a [`Vec`] and store them as patches. #[derive(Default, Debug, Clone)] pub struct VecOpObserver { patches: Vec, } impl VecOpObserver { + /// Take the current list of patches, leaving the internal list empty and ready for new + /// patches. pub fn take_patches(&mut self) -> Vec { std::mem::take(&mut self.patches) } @@ -78,17 +80,29 @@ impl OpObserver for VecOpObserver { pub enum Patch { /// Associating a new value with a key in a map, or an existing list element Put { + /// The object that was put into. obj: ExId, + /// The key that the new value was put at. key: Prop, + /// The value that was put, and the id of the operation that put it there. value: (Value<'static>, ExId), + /// Whether this put conflicts with another. conflict: bool, }, /// Inserting a new element into a list/text Insert { + /// The object that was inserted into. obj: ExId, + /// The index that the new value was inserted at. index: usize, + /// The value that was inserted, and the id of the operation that inserted it there. value: (Value<'static>, ExId), }, /// Deleting an element from a list/text - Delete { obj: ExId, key: Prop }, + Delete { + /// The object that was deleted from. + obj: ExId, + /// The key that was deleted. + key: Prop, + }, } From 76a19185b71e5b67e98aaee112966abceaaca967 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Tue, 19 Apr 2022 17:48:11 +0100 Subject: [PATCH 286/730] Add separate functions for with op_observer --- automerge-cli/src/examine.rs | 2 +- automerge-cli/src/export.rs | 2 +- automerge-cli/src/merge.rs | 6 +- automerge-wasm/src/lib.rs | 12 ++-- automerge/examples/quickstart.rs | 7 +- automerge/src/autocommit.rs | 61 +++++++++++++---- automerge/src/automerge.rs | 60 +++++++++++------ automerge/src/op_set.rs | 1 - automerge/src/sync.rs | 14 +++- automerge/tests/test.rs | 110 +++++++++++++++---------------- edit-trace/benches/main.rs | 6 +- edit-trace/src/main.rs | 4 +- 12 files changed, 173 insertions(+), 112 deletions(-) diff --git a/automerge-cli/src/examine.rs b/automerge-cli/src/examine.rs index 555eda82..010fa0f1 100644 --- a/automerge-cli/src/examine.rs +++ b/automerge-cli/src/examine.rs @@ -29,7 +29,7 @@ pub fn examine( input .read_to_end(&mut buf) .map_err(|e| ExamineError::ReadingChanges { source: e })?; - let doc = am::Automerge::load(&buf, am::NULL_OBSERVER) + let doc = am::Automerge::load(&buf) .map_err(|e| ExamineError::ApplyingInitialChanges { source: e })?; let uncompressed_changes: Vec<_> = doc.get_changes(&[]).iter().map(|c| c.decode()).collect(); if is_tty { diff --git a/automerge-cli/src/export.rs b/automerge-cli/src/export.rs index e1e6cc32..937ba794 100644 --- a/automerge-cli/src/export.rs +++ b/automerge-cli/src/export.rs @@ -68,7 +68,7 @@ fn scalar_to_json(val: &am::ScalarValue) -> serde_json::Value { } fn get_state_json(input_data: Vec) -> Result { - let doc = am::Automerge::load(&input_data, am::NULL_OBSERVER).unwrap(); // FIXME + let doc = am::Automerge::load(&input_data).unwrap(); // FIXME Ok(map_to_json(&doc, &am::ObjId::Root)) } diff --git a/automerge-cli/src/merge.rs b/automerge-cli/src/merge.rs index a0ef6dd1..936af246 100644 --- a/automerge-cli/src/merge.rs +++ b/automerge-cli/src/merge.rs @@ -38,7 +38,7 @@ pub(super) fn merge(inputs: Inputs, mut output: W) -> Result< Inputs::Stdin => { let mut input = Vec::new(); std::io::stdin().read_to_end(&mut input)?; - backend.load_incremental(&input, am::NULL_OBSERVER)?; + backend.load_incremental(&input)?; } Inputs::Paths(paths) => { for path in paths { @@ -53,8 +53,6 @@ pub(super) fn merge(inputs: Inputs, mut output: W) -> Result< fn load_path(backend: &mut am::Automerge, path: &Path) -> Result<(), Box> { let input = std::fs::read(path).map_err(Box::new)?; - backend - .load_incremental(&input, am::NULL_OBSERVER) - .map_err(Box::new)?; + backend.load_incremental(&input).map_err(Box::new)?; Ok(()) } diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index e816bbf1..091a24de 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -128,7 +128,9 @@ impl Automerge { pub fn merge(&mut self, other: &mut Automerge) -> Result { self.ensure_transaction_closed(); - let heads = self.doc.merge(&mut other.doc, self.observer.as_mut())?; + let heads = self + .doc + .merge_with(&mut other.doc, self.observer.as_mut())?; let heads: Array = heads .iter() .map(|h| JsValue::from_str(&hex::encode(&h.0))) @@ -503,7 +505,7 @@ impl Automerge { let data = data.to_vec(); let len = self .doc - .load_incremental(&data, self.observer.as_mut()) + .load_incremental_with(&data, self.observer.as_mut()) .map_err(to_js_err)?; Ok(len as f64) } @@ -513,7 +515,7 @@ impl Automerge { self.ensure_transaction_closed(); let changes: Vec<_> = JS(changes).try_into()?; self.doc - .apply_changes(changes, self.observer.as_mut()) + .apply_changes_with(changes, self.observer.as_mut()) .map_err(to_js_err)?; Ok(()) } @@ -607,7 +609,7 @@ impl Automerge { let message = message.to_vec(); let message = am::sync::Message::decode(message.as_slice()).map_err(to_js_err)?; self.doc - .receive_sync_message(&mut state.0, message, self.observer.as_mut()) + .receive_sync_message_with(&mut state.0, message, self.observer.as_mut()) .map_err(to_js_err)?; Ok(()) } @@ -775,7 +777,7 @@ pub fn init(actor: Option) -> Result { pub fn load(data: Uint8Array, actor: Option) -> Result { let data = data.to_vec(); let mut observer = None; - let mut automerge = am::AutoCommit::load(&data, observer.as_mut()).map_err(to_js_err)?; + let mut automerge = am::AutoCommit::load_with(&data, observer.as_mut()).map_err(to_js_err)?; if let Some(s) = actor { let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec()); automerge.set_actor(actor); diff --git a/automerge/examples/quickstart.rs b/automerge/examples/quickstart.rs index 2afcd211..0ce5de72 100644 --- a/automerge/examples/quickstart.rs +++ b/automerge/examples/quickstart.rs @@ -2,7 +2,6 @@ use automerge::transaction::CommitOptions; use automerge::transaction::Transactable; use automerge::AutomergeError; use automerge::ObjType; -use automerge::NULL_OBSERVER; use automerge::{Automerge, ROOT}; // Based on https://automerge.github.io/docs/quickstart @@ -26,10 +25,10 @@ fn main() { .result; let mut doc2 = Automerge::new(); - doc2.merge(&mut doc1, NULL_OBSERVER).unwrap(); + doc2.merge(&mut doc1).unwrap(); let binary = doc1.save(); - let mut doc2 = Automerge::load(&binary, NULL_OBSERVER).unwrap(); + let mut doc2 = Automerge::load(&binary).unwrap(); doc1.transact_with::<_, _, AutomergeError, _, ()>( |_| CommitOptions::default().with_message("Mark card as done".to_owned()), @@ -49,7 +48,7 @@ fn main() { ) .unwrap(); - doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); + doc1.merge(&mut doc2).unwrap(); for change in doc1.get_changes(&[]) { let length = doc1.length_at(&cards, &[change.hash]); diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index 44a4126f..4ce7606e 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -3,7 +3,6 @@ use std::ops::RangeBounds; use crate::exid::ExId; use crate::op_observer::OpObserver; use crate::transaction::{CommitOptions, Transactable}; -use crate::types::Patch; use crate::{sync, Keys, KeysAt, ObjType, Parents, Range, RangeAt, ScalarValue, Values, ValuesAt}; use crate::{ transaction::TransactionInner, ActorId, Automerge, AutomergeError, Change, ChangeHash, Prop, @@ -82,44 +81,69 @@ impl AutoCommit { } } - pub fn load( - data: &[u8], - op_observer: Option<&mut Obs>, - ) -> Result { - let doc = Automerge::load(data, op_observer)?; + pub fn load(data: &[u8]) -> Result { + let doc = Automerge::load(data)?; Ok(Self { doc, transaction: None, }) } - pub fn load_incremental( + pub fn load_with( + data: &[u8], + op_observer: Option<&mut Obs>, + ) -> Result { + let doc = Automerge::load_with(data, op_observer)?; + Ok(Self { + doc, + transaction: None, + }) + } + + pub fn load_incremental(&mut self, data: &[u8]) -> Result { + self.ensure_transaction_closed(); + self.doc.load_incremental(data) + } + + pub fn load_incremental_with( &mut self, data: &[u8], op_observer: Option<&mut Obs>, ) -> Result { self.ensure_transaction_closed(); - self.doc.load_incremental(data, op_observer) + self.doc.load_incremental_with(data, op_observer) } - pub fn apply_changes( + pub fn apply_changes(&mut self, changes: Vec) -> Result<(), AutomergeError> { + self.ensure_transaction_closed(); + self.doc.apply_changes(changes) + } + + pub fn apply_changes_with( &mut self, changes: Vec, op_observer: Option<&mut Obs>, ) -> Result<(), AutomergeError> { self.ensure_transaction_closed(); - self.doc.apply_changes(changes, op_observer) + self.doc.apply_changes_with(changes, op_observer) } /// Takes all the changes in `other` which are not in `self` and applies them - pub fn merge( + pub fn merge(&mut self, other: &mut Self) -> Result, AutomergeError> { + self.ensure_transaction_closed(); + other.ensure_transaction_closed(); + self.doc.merge(&mut other.doc) + } + + /// Takes all the changes in `other` which are not in `self` and applies them + pub fn merge_with( &mut self, other: &mut Self, op_observer: Option<&mut Obs>, ) -> Result, AutomergeError> { self.ensure_transaction_closed(); other.ensure_transaction_closed(); - self.doc.merge(&mut other.doc, op_observer) + self.doc.merge_with(&mut other.doc, op_observer) } pub fn save(&mut self) -> Vec { @@ -173,7 +197,16 @@ impl AutoCommit { self.doc.generate_sync_message(sync_state) } - pub fn receive_sync_message( + pub fn receive_sync_message( + &mut self, + sync_state: &mut sync::State, + message: sync::Message, + ) -> Result<(), AutomergeError> { + self.ensure_transaction_closed(); + self.doc.receive_sync_message(sync_state, message) + } + + pub fn receive_sync_message_with( &mut self, sync_state: &mut sync::State, message: sync::Message, @@ -181,7 +214,7 @@ impl AutoCommit { ) -> Result<(), AutomergeError> { self.ensure_transaction_closed(); self.doc - .receive_sync_message(sync_state, message, op_observer) + .receive_sync_message_with(sync_state, message, op_observer) } #[cfg(feature = "optree-visualisation")] diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 5a0cb48d..c3652470 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -203,7 +203,7 @@ impl Automerge { } let mut f = Self::new(); f.set_actor(ActorId::random()); - f.apply_changes(changes.into_iter().rev().cloned(), NULL_OBSERVER)?; + f.apply_changes_with(changes.into_iter().rev().cloned(), NULL_OBSERVER)?; Ok(f) } @@ -522,25 +522,35 @@ impl Automerge { } /// Load a document. - pub fn load( + pub fn load(data: &[u8]) -> Result { + Self::load_with(data, NULL_OBSERVER) + } + + /// Load a document. + pub fn load_with( data: &[u8], op_observer: Option<&mut Obs>, ) -> Result { let changes = Change::load_document(data)?; let mut doc = Self::new(); - doc.apply_changes(changes, op_observer)?; + doc.apply_changes_with(changes, op_observer)?; Ok(doc) } /// Load an incremental save of a document. - pub fn load_incremental( + pub fn load_incremental(&mut self, data: &[u8]) -> Result { + self.load_incremental_with(data, NULL_OBSERVER) + } + + /// Load an incremental save of a document. + pub fn load_incremental_with( &mut self, data: &[u8], op_observer: Option<&mut Obs>, ) -> Result { let changes = Change::load_document(data)?; let start = self.ops.len(); - self.apply_changes(changes, op_observer)?; + self.apply_changes_with(changes, op_observer)?; let delta = self.ops.len() - start; Ok(delta) } @@ -556,7 +566,15 @@ impl Automerge { } /// Apply changes to this document. - pub fn apply_changes( + pub fn apply_changes( + &mut self, + changes: impl IntoIterator, + ) -> Result<(), AutomergeError> { + self.apply_changes_with(changes, NULL_OBSERVER) + } + + /// Apply changes to this document. + pub fn apply_changes_with( &mut self, changes: impl IntoIterator, mut observer: Option<&mut Obs>, @@ -655,7 +673,12 @@ impl Automerge { } /// Takes all the changes in `other` which are not in `self` and applies them - pub fn merge( + pub fn merge(&mut self, other: &mut Self) -> Result, AutomergeError> { + self.merge_with(other, NULL_OBSERVER) + } + + /// Takes all the changes in `other` which are not in `self` and applies them + pub fn merge_with( &mut self, other: &mut Self, op_observer: Option<&mut Obs>, @@ -666,7 +689,7 @@ impl Automerge { .into_iter() .cloned() .collect::>(); - self.apply_changes(changes, op_observer)?; + self.apply_changes_with(changes, op_observer)?; Ok(self.get_heads()) } @@ -1169,8 +1192,8 @@ mod tests { assert!(save_b.len() < save_a.len()); - let mut doc_a = Automerge::load(&save_a, NULL_OBSERVER)?; - let mut doc_b = Automerge::load(&save_b, NULL_OBSERVER)?; + let mut doc_a = Automerge::load(&save_a)?; + let mut doc_b = Automerge::load(&save_b)?; assert!(doc_a.get_all(ROOT, "baz")? == doc_b.get_all(ROOT, "baz")?); @@ -1650,7 +1673,7 @@ mod tests { assert_eq!(last_change.len(), 0); let bytes = doc.save(); - assert!(Automerge::load(&bytes, NULL_OBSERVER).is_ok()); + assert!(Automerge::load(&bytes,).is_ok()); let mut tx = doc.transaction(); tx.put(ROOT, "a", 1).unwrap(); @@ -1684,7 +1707,7 @@ mod tests { tx.commit(); let hash = doc.get_last_local_change().unwrap().hash; let bytes = doc.save(); - let doc = Automerge::load(&bytes, NULL_OBSERVER).unwrap(); + let doc = Automerge::load(&bytes).unwrap(); assert_eq!(doc.get_change_by_hash(&hash).unwrap().hash, hash); } @@ -1699,7 +1722,7 @@ mod tests { 157, 157, 157, 157, 157, 157, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 48, 254, 208, ]; - let _ = Automerge::load(bytes, NULL_OBSERVER); + let _ = Automerge::load(bytes); } #[test] @@ -1777,7 +1800,7 @@ mod tests { tx.commit(); let bytes = doc.save(); println!("doc2 time"); - let mut doc2 = Automerge::load(&bytes, NULL_OBSERVER).unwrap(); + let mut doc2 = Automerge::load(&bytes).unwrap(); let bytes2 = doc2.save(); assert_eq!(doc.text(&list).unwrap(), doc2.text(&list).unwrap()); @@ -1830,7 +1853,7 @@ mod tests { tx.commit(); let bytes = doc.save(); println!("doc2 time"); - let mut doc2 = Automerge::load(&bytes, NULL_OBSERVER).unwrap(); + let mut doc2 = Automerge::load(&bytes).unwrap(); let bytes2 = doc2.save(); assert_eq!(doc.text(&list).unwrap(), doc2.text(&list).unwrap()); @@ -1855,8 +1878,7 @@ mod tests { let mut doc2 = AutoCommit::new().with_actor(actor2.clone()); let list = doc1.put_object(ROOT, "list", ObjType::List).unwrap(); doc1.insert(&list, 0, 0).unwrap(); - doc2.load_incremental(&doc1.save_incremental(), NULL_OBSERVER) - .unwrap(); + doc2.load_incremental(&doc1.save_incremental()).unwrap(); for i in 1..=max { doc1.put(&list, 0, i).unwrap() } @@ -1865,8 +1887,8 @@ mod tests { } let change1 = doc1.save_incremental(); let change2 = doc2.save_incremental(); - doc2.load_incremental(&change1, NULL_OBSERVER).unwrap(); - doc1.load_incremental(&change2, NULL_OBSERVER).unwrap(); + doc2.load_incremental(&change1).unwrap(); + doc1.load_incremental(&change2).unwrap(); assert_eq!(doc1.length(&list), 1); assert_eq!(doc2.length(&list), 1); assert_eq!( diff --git a/automerge/src/op_set.rs b/automerge/src/op_set.rs index b4db50ef..022750ca 100644 --- a/automerge/src/op_set.rs +++ b/automerge/src/op_set.rs @@ -4,7 +4,6 @@ use crate::indexed_cache::IndexedCache; use crate::op_tree::OpTree; use crate::query::{self, OpIdSearch, TreeQuery}; use crate::types::{self, ActorId, Key, ObjId, Op, OpId, OpType}; -use crate::Prop; use crate::{ObjType, OpObserver}; use fxhash::FxBuildHasher; use std::cmp::Ordering; diff --git a/automerge/src/sync.rs b/automerge/src/sync.rs index 52164319..6c9a423e 100644 --- a/automerge/src/sync.rs +++ b/automerge/src/sync.rs @@ -8,7 +8,7 @@ use std::{ use crate::{ decoding, decoding::Decoder, encoding::Encodable, Automerge, AutomergeError, Change, - ChangeHash, OpObserver, + ChangeHash, OpObserver, NULL_OBSERVER, }; mod bloom; @@ -94,7 +94,15 @@ impl Automerge { Some(sync_message) } - pub fn receive_sync_message( + pub fn receive_sync_message( + &mut self, + sync_state: &mut State, + message: Message, + ) -> Result<(), AutomergeError> { + self.receive_sync_message_with(sync_state, message, NULL_OBSERVER) + } + + pub fn receive_sync_message_with( &mut self, sync_state: &mut State, message: Message, @@ -111,7 +119,7 @@ impl Automerge { let changes_is_empty = message_changes.is_empty(); if !changes_is_empty { - self.apply_changes(message_changes, op_observer)?; + self.apply_changes_with(message_changes, op_observer)?; sync_state.shared_heads = advance_heads( &before_heads.iter().collect(), &self.get_heads().into_iter().collect(), diff --git a/automerge/tests/test.rs b/automerge/tests/test.rs index 4e6e8051..4907334a 100644 --- a/automerge/tests/test.rs +++ b/automerge/tests/test.rs @@ -1,5 +1,5 @@ use automerge::transaction::Transactable; -use automerge::{ActorId, AutoCommit, Automerge, ObjType, ScalarValue, Value, NULL_OBSERVER, ROOT}; +use automerge::{ActorId, AutoCommit, Automerge, ObjType, ScalarValue, Value, ROOT}; mod helpers; #[allow(unused_imports)] @@ -25,10 +25,10 @@ fn repeated_map_assignment_which_resolves_conflict_not_ignored() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); doc1.put(&automerge::ROOT, "field", 123).unwrap(); - doc2.merge(&mut doc1, NULL_OBSERVER).unwrap(); + doc2.merge(&mut doc1).unwrap(); doc2.put(&automerge::ROOT, "field", 456).unwrap(); doc1.put(&automerge::ROOT, "field", 789).unwrap(); - doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); + doc1.merge(&mut doc2).unwrap(); assert_eq!(doc1.get_all(&automerge::ROOT, "field").unwrap().len(), 2); doc1.put(&automerge::ROOT, "field", 123).unwrap(); @@ -48,9 +48,9 @@ fn repeated_list_assignment_which_resolves_conflict_not_ignored() { .put_object(&automerge::ROOT, "list", ObjType::List) .unwrap(); doc1.insert(&list_id, 0, 123).unwrap(); - doc2.merge(&mut doc1, NULL_OBSERVER).unwrap(); + doc2.merge(&mut doc1).unwrap(); doc2.put(&list_id, 0, 456).unwrap(); - doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); + doc1.merge(&mut doc2).unwrap(); doc1.put(&list_id, 0, 789).unwrap(); assert_doc!( @@ -92,7 +92,7 @@ fn merge_concurrent_map_prop_updates() { let mut doc2 = new_doc(); doc1.put(&automerge::ROOT, "foo", "bar").unwrap(); doc2.put(&automerge::ROOT, "hello", "world").unwrap(); - doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); + doc1.merge(&mut doc2).unwrap(); assert_eq!( doc1.get(&automerge::ROOT, "foo").unwrap().unwrap().0, "bar".into() @@ -104,7 +104,7 @@ fn merge_concurrent_map_prop_updates() { "hello" => { "world" }, } ); - doc2.merge(&mut doc1, NULL_OBSERVER).unwrap(); + doc2.merge(&mut doc1).unwrap(); assert_doc!( doc2.document(), map! { @@ -121,10 +121,10 @@ fn add_concurrent_increments_of_same_property() { let mut doc2 = new_doc(); doc1.put(&automerge::ROOT, "counter", mk_counter(0)) .unwrap(); - doc2.merge(&mut doc1, NULL_OBSERVER).unwrap(); + doc2.merge(&mut doc1).unwrap(); doc1.increment(&automerge::ROOT, "counter", 1).unwrap(); doc2.increment(&automerge::ROOT, "counter", 2).unwrap(); - doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); + doc1.merge(&mut doc2).unwrap(); assert_doc!( doc1.document(), map! { @@ -150,7 +150,7 @@ fn add_increments_only_to_preceeded_values() { doc2.increment(&automerge::ROOT, "counter", 3).unwrap(); // The two values should be conflicting rather than added - doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); + doc1.merge(&mut doc2).unwrap(); assert_doc!( doc1.document(), @@ -170,7 +170,7 @@ fn concurrent_updates_of_same_field() { doc1.put(&automerge::ROOT, "field", "one").unwrap(); doc2.put(&automerge::ROOT, "field", "two").unwrap(); - doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); + doc1.merge(&mut doc2).unwrap(); assert_doc!( doc1.document(), @@ -191,11 +191,11 @@ fn concurrent_updates_of_same_list_element() { .put_object(&automerge::ROOT, "birds", ObjType::List) .unwrap(); doc1.insert(&list_id, 0, "finch").unwrap(); - doc2.merge(&mut doc1, NULL_OBSERVER).unwrap(); + doc2.merge(&mut doc1).unwrap(); doc1.put(&list_id, 0, "greenfinch").unwrap(); doc2.put(&list_id, 0, "goldfinch").unwrap(); - doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); + doc1.merge(&mut doc2).unwrap(); assert_doc!( doc1.document(), @@ -220,8 +220,8 @@ fn assignment_conflicts_of_different_types() { .unwrap(); doc3.put_object(&automerge::ROOT, "field", ObjType::Map) .unwrap(); - doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); - doc1.merge(&mut doc3, NULL_OBSERVER).unwrap(); + doc1.merge(&mut doc2).unwrap(); + doc1.merge(&mut doc3).unwrap(); assert_doc!( doc1.document(), @@ -244,7 +244,7 @@ fn changes_within_conflicting_map_field() { .put_object(&automerge::ROOT, "field", ObjType::Map) .unwrap(); doc2.put(&map_id, "innerKey", 42).unwrap(); - doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); + doc1.merge(&mut doc2).unwrap(); assert_doc!( doc1.document(), @@ -270,18 +270,18 @@ fn changes_within_conflicting_list_element() { .put_object(&automerge::ROOT, "list", ObjType::List) .unwrap(); doc1.insert(&list_id, 0, "hello").unwrap(); - doc2.merge(&mut doc1, NULL_OBSERVER).unwrap(); + doc2.merge(&mut doc1).unwrap(); let map_in_doc1 = doc1.put_object(&list_id, 0, ObjType::Map).unwrap(); doc1.put(&map_in_doc1, "map1", true).unwrap(); doc1.put(&map_in_doc1, "key", 1).unwrap(); let map_in_doc2 = doc2.put_object(&list_id, 0, ObjType::Map).unwrap(); - doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); + doc1.merge(&mut doc2).unwrap(); doc2.put(&map_in_doc2, "map2", true).unwrap(); doc2.put(&map_in_doc2, "key", 2).unwrap(); - doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); + doc1.merge(&mut doc2).unwrap(); assert_doc!( doc1.document(), @@ -319,7 +319,7 @@ fn concurrently_assigned_nested_maps_should_not_merge() { .unwrap(); doc2.put(&doc2_map_id, "logo_url", "logo.png").unwrap(); - doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); + doc1.merge(&mut doc2).unwrap(); assert_doc!( doc1.document(), @@ -349,11 +349,11 @@ fn concurrent_insertions_at_different_list_positions() { doc1.insert(&list_id, 0, "one").unwrap(); doc1.insert(&list_id, 1, "three").unwrap(); - doc2.merge(&mut doc1, NULL_OBSERVER).unwrap(); + doc2.merge(&mut doc1).unwrap(); doc1.splice(&list_id, 1, 0, vec!["two".into()]).unwrap(); doc2.insert(&list_id, 2, "four").unwrap(); - doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); + doc1.merge(&mut doc2).unwrap(); assert_doc!( doc1.document(), @@ -382,10 +382,10 @@ fn concurrent_insertions_at_same_list_position() { .unwrap(); doc1.insert(&list_id, 0, "parakeet").unwrap(); - doc2.merge(&mut doc1, NULL_OBSERVER).unwrap(); + doc2.merge(&mut doc1).unwrap(); doc1.insert(&list_id, 1, "starling").unwrap(); doc2.insert(&list_id, 1, "chaffinch").unwrap(); - doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); + doc1.merge(&mut doc2).unwrap(); assert_doc!( doc1.document(), @@ -412,11 +412,11 @@ fn concurrent_assignment_and_deletion_of_a_map_entry() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); doc1.put(&automerge::ROOT, "bestBird", "robin").unwrap(); - doc2.merge(&mut doc1, NULL_OBSERVER).unwrap(); + doc2.merge(&mut doc1).unwrap(); doc1.delete(&automerge::ROOT, "bestBird").unwrap(); doc2.put(&automerge::ROOT, "bestBird", "magpie").unwrap(); - doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); + doc1.merge(&mut doc2).unwrap(); assert_doc!( doc1.document(), @@ -438,7 +438,7 @@ fn concurrent_assignment_and_deletion_of_list_entry() { doc1.insert(&list_id, 0, "blackbird").unwrap(); doc1.insert(&list_id, 1, "thrush").unwrap(); doc1.insert(&list_id, 2, "goldfinch").unwrap(); - doc2.merge(&mut doc1, NULL_OBSERVER).unwrap(); + doc2.merge(&mut doc1).unwrap(); doc1.put(&list_id, 1, "starling").unwrap(); doc2.delete(&list_id, 1).unwrap(); @@ -463,7 +463,7 @@ fn concurrent_assignment_and_deletion_of_list_entry() { } ); - doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); + doc1.merge(&mut doc2).unwrap(); assert_doc!( doc1.document(), @@ -489,14 +489,14 @@ fn insertion_after_a_deleted_list_element() { doc1.insert(&list_id, 1, "thrush").unwrap(); doc1.insert(&list_id, 2, "goldfinch").unwrap(); - doc2.merge(&mut doc1, NULL_OBSERVER).unwrap(); + doc2.merge(&mut doc1).unwrap(); doc1.splice(&list_id, 1, 2, Vec::new()).unwrap(); doc2.splice(&list_id, 2, 0, vec!["starling".into()]) .unwrap(); - doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); + doc1.merge(&mut doc2).unwrap(); assert_doc!( doc1.document(), @@ -508,7 +508,7 @@ fn insertion_after_a_deleted_list_element() { } ); - doc2.merge(&mut doc1, NULL_OBSERVER).unwrap(); + doc2.merge(&mut doc1).unwrap(); assert_doc!( doc2.document(), map! { @@ -532,13 +532,13 @@ fn concurrent_deletion_of_same_list_element() { doc1.insert(&list_id, 1, "buzzard").unwrap(); doc1.insert(&list_id, 2, "cormorant").unwrap(); - doc2.merge(&mut doc1, NULL_OBSERVER).unwrap(); + doc2.merge(&mut doc1).unwrap(); doc1.delete(&list_id, 1).unwrap(); doc2.delete(&list_id, 1).unwrap(); - doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); + doc1.merge(&mut doc2).unwrap(); assert_doc!( doc1.document(), @@ -550,7 +550,7 @@ fn concurrent_deletion_of_same_list_element() { } ); - doc2.merge(&mut doc1, NULL_OBSERVER).unwrap(); + doc2.merge(&mut doc1).unwrap(); assert_doc!( doc2.document(), map! { @@ -577,12 +577,12 @@ fn concurrent_updates_at_different_levels() { let mammals = doc1.put_object(&animals, "mammals", ObjType::List).unwrap(); doc1.insert(&mammals, 0, "badger").unwrap(); - doc2.merge(&mut doc1, NULL_OBSERVER).unwrap(); + doc2.merge(&mut doc1).unwrap(); doc1.put(&birds, "brown", "sparrow").unwrap(); doc2.delete(&animals, "birds").unwrap(); - doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); + doc1.merge(&mut doc2).unwrap(); assert_obj!( doc1.document(), @@ -618,13 +618,13 @@ fn concurrent_updates_of_concurrently_deleted_objects() { let blackbird = doc1.put_object(&birds, "blackbird", ObjType::Map).unwrap(); doc1.put(&blackbird, "feathers", "black").unwrap(); - doc2.merge(&mut doc1, NULL_OBSERVER).unwrap(); + doc2.merge(&mut doc1).unwrap(); doc1.delete(&birds, "blackbird").unwrap(); doc2.put(&blackbird, "beak", "orange").unwrap(); - doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); + doc1.merge(&mut doc2).unwrap(); assert_doc!( doc1.document(), @@ -645,7 +645,7 @@ fn does_not_interleave_sequence_insertions_at_same_position() { let wisdom = doc1 .put_object(&automerge::ROOT, "wisdom", ObjType::List) .unwrap(); - doc2.merge(&mut doc1, NULL_OBSERVER).unwrap(); + doc2.merge(&mut doc1).unwrap(); doc1.splice( &wisdom, @@ -675,7 +675,7 @@ fn does_not_interleave_sequence_insertions_at_same_position() { ) .unwrap(); - doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); + doc1.merge(&mut doc2).unwrap(); assert_doc!( doc1.document(), @@ -707,7 +707,7 @@ fn mutliple_insertions_at_same_list_position_with_insertion_by_greater_actor_id( .put_object(&automerge::ROOT, "list", ObjType::List) .unwrap(); doc1.insert(&list, 0, "two").unwrap(); - doc2.merge(&mut doc1, NULL_OBSERVER).unwrap(); + doc2.merge(&mut doc1).unwrap(); doc2.insert(&list, 0, "one").unwrap(); assert_doc!( @@ -732,7 +732,7 @@ fn mutliple_insertions_at_same_list_position_with_insertion_by_lesser_actor_id() .put_object(&automerge::ROOT, "list", ObjType::List) .unwrap(); doc1.insert(&list, 0, "two").unwrap(); - doc2.merge(&mut doc1, NULL_OBSERVER).unwrap(); + doc2.merge(&mut doc1).unwrap(); doc2.insert(&list, 0, "one").unwrap(); assert_doc!( @@ -755,11 +755,11 @@ fn insertion_consistent_with_causality() { .put_object(&automerge::ROOT, "list", ObjType::List) .unwrap(); doc1.insert(&list, 0, "four").unwrap(); - doc2.merge(&mut doc1, NULL_OBSERVER).unwrap(); + doc2.merge(&mut doc1).unwrap(); doc2.insert(&list, 0, "three").unwrap(); - doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); + doc1.merge(&mut doc2).unwrap(); doc1.insert(&list, 0, "two").unwrap(); - doc2.merge(&mut doc1, NULL_OBSERVER).unwrap(); + doc2.merge(&mut doc1).unwrap(); doc2.insert(&list, 0, "one").unwrap(); assert_doc!( @@ -778,7 +778,7 @@ fn insertion_consistent_with_causality() { #[test] fn save_and_restore_empty() { let mut doc = new_doc(); - let loaded = Automerge::load(&doc.save(), NULL_OBSERVER).unwrap(); + let loaded = Automerge::load(&doc.save()).unwrap(); assert_doc!(&loaded, map! {}); } @@ -795,13 +795,13 @@ fn save_restore_complex() { doc1.put(&first_todo, "done", false).unwrap(); let mut doc2 = new_doc(); - doc2.merge(&mut doc1, NULL_OBSERVER).unwrap(); + doc2.merge(&mut doc1).unwrap(); doc2.put(&first_todo, "title", "weed plants").unwrap(); doc1.put(&first_todo, "title", "kill plants").unwrap(); - doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); + doc1.merge(&mut doc2).unwrap(); - let reloaded = Automerge::load(&doc1.save(), NULL_OBSERVER).unwrap(); + let reloaded = Automerge::load(&doc1.save()).unwrap(); assert_doc!( &reloaded, @@ -859,10 +859,10 @@ fn list_counter_del() -> Result<(), automerge::AutomergeError> { doc1.insert(&list, 1, "b")?; doc1.insert(&list, 2, "c")?; - let mut doc2 = AutoCommit::load(&doc1.save(), NULL_OBSERVER)?; + let mut doc2 = AutoCommit::load(&doc1.save())?; doc2.set_actor(actor2); - let mut doc3 = AutoCommit::load(&doc1.save(), NULL_OBSERVER)?; + let mut doc3 = AutoCommit::load(&doc1.save())?; doc3.set_actor(actor3); doc1.put(&list, 1, ScalarValue::counter(0))?; @@ -876,8 +876,8 @@ fn list_counter_del() -> Result<(), automerge::AutomergeError> { doc1.increment(&list, 1, 1)?; doc1.increment(&list, 2, 1)?; - doc1.merge(&mut doc2, NULL_OBSERVER).unwrap(); - doc1.merge(&mut doc3, NULL_OBSERVER).unwrap(); + doc1.merge(&mut doc2).unwrap(); + doc1.merge(&mut doc3).unwrap(); let values = doc1.get_all(&list, 1)?; assert_eq!(values.len(), 3); @@ -911,7 +911,7 @@ fn list_counter_del() -> Result<(), automerge::AutomergeError> { assert_eq!(doc1.length(&list), 2); - let doc4 = AutoCommit::load(&doc1.save(), NULL_OBSERVER)?; + let doc4 = AutoCommit::load(&doc1.save())?; assert_eq!(doc4.length(&list), 2); @@ -919,7 +919,7 @@ fn list_counter_del() -> Result<(), automerge::AutomergeError> { assert_eq!(doc1.length(&list), 1); - let doc5 = AutoCommit::load(&doc1.save(), NULL_OBSERVER)?; + let doc5 = AutoCommit::load(&doc1.save())?; assert_eq!(doc5.length(&list), 1); diff --git a/edit-trace/benches/main.rs b/edit-trace/benches/main.rs index 2d79722f..00028945 100644 --- a/edit-trace/benches/main.rs +++ b/edit-trace/benches/main.rs @@ -1,4 +1,4 @@ -use automerge::{transaction::Transactable, AutoCommit, Automerge, ObjType, NULL_OBSERVER, ROOT}; +use automerge::{transaction::Transactable, AutoCommit, Automerge, ObjType, ROOT}; use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion, Throughput}; use std::fs; @@ -32,11 +32,11 @@ fn save_trace_autotx(mut doc: AutoCommit) { } fn load_trace(bytes: &[u8]) { - Automerge::load(bytes, NULL_OBSERVER).unwrap(); + Automerge::load(bytes).unwrap(); } fn load_trace_autotx(bytes: &[u8]) { - AutoCommit::load(bytes, NULL_OBSERVER).unwrap(); + AutoCommit::load(bytes).unwrap(); } fn bench(c: &mut Criterion) { diff --git a/edit-trace/src/main.rs b/edit-trace/src/main.rs index 558a2e6d..72085fdb 100644 --- a/edit-trace/src/main.rs +++ b/edit-trace/src/main.rs @@ -1,5 +1,5 @@ +use automerge::ObjType; use automerge::{transaction::Transactable, Automerge, AutomergeError, ROOT}; -use automerge::{ObjType, NULL_OBSERVER}; use std::time::Instant; fn main() -> Result<(), AutomergeError> { @@ -33,7 +33,7 @@ fn main() -> Result<(), AutomergeError> { println!("Saved in {} ms", save.elapsed().as_millis()); let load = Instant::now(); - let _ = Automerge::load(&bytes, NULL_OBSERVER).unwrap(); + let _ = Automerge::load(&bytes).unwrap(); println!("Loaded in {} ms", load.elapsed().as_millis()); println!("Done in {} ms", now.elapsed().as_millis()); From aa3c32cea3b34ecd6b0fb2413b195b207bc028fc Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Tue, 19 Apr 2022 18:15:15 +0100 Subject: [PATCH 287/730] Add ApplyOptions --- automerge-wasm/src/lib.rs | 36 ++++++++++++++++++++++++++++-------- automerge/src/autocommit.rs | 31 +++++++++++++++++-------------- automerge/src/automerge.rs | 36 ++++++++++++++++++------------------ automerge/src/lib.rs | 3 ++- automerge/src/op_observer.rs | 2 -- automerge/src/options.rs | 16 ++++++++++++++++ automerge/src/sync.rs | 12 ++++++------ 7 files changed, 87 insertions(+), 49 deletions(-) create mode 100644 automerge/src/options.rs diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 091a24de..c4f3faef 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -1,6 +1,7 @@ #![allow(clippy::unused_unit)] use am::transaction::CommitOptions; use am::transaction::Transactable; +use am::ApplyOptions; use automerge as am; use automerge::Patch; use automerge::VecOpObserver; @@ -128,9 +129,12 @@ impl Automerge { pub fn merge(&mut self, other: &mut Automerge) -> Result { self.ensure_transaction_closed(); - let heads = self - .doc - .merge_with(&mut other.doc, self.observer.as_mut())?; + let options = if let Some(observer) = self.observer.as_mut() { + ApplyOptions::default().with_op_observer(observer) + } else { + ApplyOptions::default() + }; + let heads = self.doc.merge_with(&mut other.doc, options)?; let heads: Array = heads .iter() .map(|h| JsValue::from_str(&hex::encode(&h.0))) @@ -503,9 +507,14 @@ impl Automerge { pub fn load_incremental(&mut self, data: Uint8Array) -> Result { self.ensure_transaction_closed(); let data = data.to_vec(); + let options = if let Some(observer) = self.observer.as_mut() { + ApplyOptions::default().with_op_observer(observer) + } else { + ApplyOptions::default() + }; let len = self .doc - .load_incremental_with(&data, self.observer.as_mut()) + .load_incremental_with(&data, options) .map_err(to_js_err)?; Ok(len as f64) } @@ -514,8 +523,13 @@ impl Automerge { pub fn apply_changes(&mut self, changes: JsValue) -> Result<(), JsValue> { self.ensure_transaction_closed(); let changes: Vec<_> = JS(changes).try_into()?; + let options = if let Some(observer) = self.observer.as_mut() { + ApplyOptions::default().with_op_observer(observer) + } else { + ApplyOptions::default() + }; self.doc - .apply_changes_with(changes, self.observer.as_mut()) + .apply_changes_with(changes, options) .map_err(to_js_err)?; Ok(()) } @@ -608,8 +622,13 @@ impl Automerge { self.ensure_transaction_closed(); let message = message.to_vec(); let message = am::sync::Message::decode(message.as_slice()).map_err(to_js_err)?; + let options = if let Some(observer) = self.observer.as_mut() { + ApplyOptions::default().with_op_observer(observer) + } else { + ApplyOptions::default() + }; self.doc - .receive_sync_message_with(&mut state.0, message, self.observer.as_mut()) + .receive_sync_message_with(&mut state.0, message, options) .map_err(to_js_err)?; Ok(()) } @@ -776,8 +795,9 @@ pub fn init(actor: Option) -> Result { #[wasm_bindgen(js_name = loadDoc)] pub fn load(data: Uint8Array, actor: Option) -> Result { let data = data.to_vec(); - let mut observer = None; - let mut automerge = am::AutoCommit::load_with(&data, observer.as_mut()).map_err(to_js_err)?; + let observer = None; + let options = ApplyOptions::<()>::default(); + let mut automerge = am::AutoCommit::load_with(&data, options).map_err(to_js_err)?; if let Some(s) = actor { let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec()); automerge.set_actor(actor); diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index 4ce7606e..ce811bc9 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -3,7 +3,10 @@ use std::ops::RangeBounds; use crate::exid::ExId; use crate::op_observer::OpObserver; use crate::transaction::{CommitOptions, Transactable}; -use crate::{sync, Keys, KeysAt, ObjType, Parents, Range, RangeAt, ScalarValue, Values, ValuesAt}; +use crate::{ + sync, ApplyOptions, Keys, KeysAt, ObjType, Parents, Range, RangeAt, ScalarValue, Values, + ValuesAt, +}; use crate::{ transaction::TransactionInner, ActorId, Automerge, AutomergeError, Change, ChangeHash, Prop, Value, @@ -91,9 +94,9 @@ impl AutoCommit { pub fn load_with( data: &[u8], - op_observer: Option<&mut Obs>, + options: ApplyOptions, ) -> Result { - let doc = Automerge::load_with(data, op_observer)?; + let doc = Automerge::load_with(data, options)?; Ok(Self { doc, transaction: None, @@ -105,13 +108,13 @@ impl AutoCommit { self.doc.load_incremental(data) } - pub fn load_incremental_with( + pub fn load_incremental_with<'a, Obs: OpObserver>( &mut self, data: &[u8], - op_observer: Option<&mut Obs>, + options: ApplyOptions<'a, Obs>, ) -> Result { self.ensure_transaction_closed(); - self.doc.load_incremental_with(data, op_observer) + self.doc.load_incremental_with(data, options) } pub fn apply_changes(&mut self, changes: Vec) -> Result<(), AutomergeError> { @@ -122,10 +125,10 @@ impl AutoCommit { pub fn apply_changes_with( &mut self, changes: Vec, - op_observer: Option<&mut Obs>, + options: ApplyOptions, ) -> Result<(), AutomergeError> { self.ensure_transaction_closed(); - self.doc.apply_changes_with(changes, op_observer) + self.doc.apply_changes_with(changes, options) } /// Takes all the changes in `other` which are not in `self` and applies them @@ -136,14 +139,14 @@ impl AutoCommit { } /// Takes all the changes in `other` which are not in `self` and applies them - pub fn merge_with( + pub fn merge_with<'a, Obs: OpObserver>( &mut self, other: &mut Self, - op_observer: Option<&mut Obs>, + options: ApplyOptions<'a, Obs>, ) -> Result, AutomergeError> { self.ensure_transaction_closed(); other.ensure_transaction_closed(); - self.doc.merge_with(&mut other.doc, op_observer) + self.doc.merge_with(&mut other.doc, options) } pub fn save(&mut self) -> Vec { @@ -206,15 +209,15 @@ impl AutoCommit { self.doc.receive_sync_message(sync_state, message) } - pub fn receive_sync_message_with( + pub fn receive_sync_message_with<'a, Obs: OpObserver>( &mut self, sync_state: &mut sync::State, message: sync::Message, - op_observer: Option<&mut Obs>, + options: ApplyOptions<'a, Obs>, ) -> Result<(), AutomergeError> { self.ensure_transaction_closed(); self.doc - .receive_sync_message_with(sync_state, message, op_observer) + .receive_sync_message_with(sync_state, message, options) } #[cfg(feature = "optree-visualisation")] diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index c3652470..534913cc 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -15,7 +15,7 @@ use crate::types::{ ActorId, ChangeHash, Clock, ElemId, Export, Exportable, Key, ObjId, Op, OpId, OpType, ScalarValue, Value, }; -use crate::{legacy, query, types, ObjType, RangeAt, ValuesAt, NULL_OBSERVER}; +use crate::{legacy, query, types, ApplyOptions, ObjType, RangeAt, ValuesAt}; use crate::{AutomergeError, Change, Prop}; use crate::{KeysAt, Values}; use serde::Serialize; @@ -203,7 +203,7 @@ impl Automerge { } let mut f = Self::new(); f.set_actor(ActorId::random()); - f.apply_changes_with(changes.into_iter().rev().cloned(), NULL_OBSERVER)?; + f.apply_changes(changes.into_iter().rev().cloned())?; Ok(f) } @@ -523,34 +523,34 @@ impl Automerge { /// Load a document. pub fn load(data: &[u8]) -> Result { - Self::load_with(data, NULL_OBSERVER) + Self::load_with::<()>(data, ApplyOptions::default()) } /// Load a document. pub fn load_with( data: &[u8], - op_observer: Option<&mut Obs>, + options: ApplyOptions, ) -> Result { let changes = Change::load_document(data)?; let mut doc = Self::new(); - doc.apply_changes_with(changes, op_observer)?; + doc.apply_changes_with(changes, options)?; Ok(doc) } /// Load an incremental save of a document. pub fn load_incremental(&mut self, data: &[u8]) -> Result { - self.load_incremental_with(data, NULL_OBSERVER) + self.load_incremental_with::<()>(data, ApplyOptions::default()) } /// Load an incremental save of a document. pub fn load_incremental_with( &mut self, data: &[u8], - op_observer: Option<&mut Obs>, + options: ApplyOptions, ) -> Result { let changes = Change::load_document(data)?; let start = self.ops.len(); - self.apply_changes_with(changes, op_observer)?; + self.apply_changes_with(changes, options)?; let delta = self.ops.len() - start; Ok(delta) } @@ -570,14 +570,14 @@ impl Automerge { &mut self, changes: impl IntoIterator, ) -> Result<(), AutomergeError> { - self.apply_changes_with(changes, NULL_OBSERVER) + self.apply_changes_with::<_, ()>(changes, ApplyOptions::default()) } /// Apply changes to this document. - pub fn apply_changes_with( + pub fn apply_changes_with, Obs: OpObserver>( &mut self, - changes: impl IntoIterator, - mut observer: Option<&mut Obs>, + changes: I, + mut options: ApplyOptions, ) -> Result<(), AutomergeError> { for c in changes { if !self.history_index.contains_key(&c.hash) { @@ -588,7 +588,7 @@ impl Automerge { )); } if self.is_causally_ready(&c) { - self.apply_change(c, &mut observer); + self.apply_change(c, &mut options.op_observer); } else { self.queue.push(c); } @@ -596,7 +596,7 @@ impl Automerge { } while let Some(c) = self.pop_next_causally_ready_change() { if !self.history_index.contains_key(&c.hash) { - self.apply_change(c, &mut observer); + self.apply_change(c, &mut options.op_observer); } } Ok(()) @@ -674,14 +674,14 @@ impl Automerge { /// Takes all the changes in `other` which are not in `self` and applies them pub fn merge(&mut self, other: &mut Self) -> Result, AutomergeError> { - self.merge_with(other, NULL_OBSERVER) + self.merge_with::<()>(other, ApplyOptions::default()) } /// Takes all the changes in `other` which are not in `self` and applies them - pub fn merge_with( + pub fn merge_with<'a, Obs: OpObserver>( &mut self, other: &mut Self, - op_observer: Option<&mut Obs>, + options: ApplyOptions<'a, Obs>, ) -> Result, AutomergeError> { // TODO: Make this fallible and figure out how to do this transactionally let changes = self @@ -689,7 +689,7 @@ impl Automerge { .into_iter() .cloned() .collect::>(); - self.apply_changes_with(changes, op_observer)?; + self.apply_changes_with(changes, options)?; Ok(self.get_heads()) } diff --git a/automerge/src/lib.rs b/automerge/src/lib.rs index 4879f95a..a35b2110 100644 --- a/automerge/src/lib.rs +++ b/automerge/src/lib.rs @@ -40,6 +40,7 @@ mod legacy; mod op_observer; mod op_set; mod op_tree; +mod options; mod parents; mod query; mod range; @@ -67,7 +68,7 @@ pub use legacy::Change as ExpandedChange; pub use op_observer::OpObserver; pub use op_observer::Patch; pub use op_observer::VecOpObserver; -pub use op_observer::NULL_OBSERVER; +pub use options::ApplyOptions; pub use parents::Parents; pub use range::Range; pub use range_at::RangeAt; diff --git a/automerge/src/op_observer.rs b/automerge/src/op_observer.rs index 88adb861..30718835 100644 --- a/automerge/src/op_observer.rs +++ b/automerge/src/op_observer.rs @@ -36,8 +36,6 @@ impl OpObserver for () { fn delete(&mut self, _objid: ExId, _key: Prop) {} } -pub const NULL_OBSERVER: Option<&mut ()> = None; - /// Capture operations into a [`Vec`] and store them as patches. #[derive(Default, Debug, Clone)] pub struct VecOpObserver { diff --git a/automerge/src/options.rs b/automerge/src/options.rs new file mode 100644 index 00000000..622985e3 --- /dev/null +++ b/automerge/src/options.rs @@ -0,0 +1,16 @@ +#[derive(Default)] +pub struct ApplyOptions<'a, Obs> { + pub op_observer: Option<&'a mut Obs>, +} + +impl<'a, Obs> ApplyOptions<'a, Obs> { + pub fn with_op_observer(mut self, op_observer: &'a mut Obs) -> Self { + self.op_observer = Some(op_observer); + self + } + + pub fn set_op_observer(&mut self, op_observer: &'a mut Obs) -> &mut Self { + self.op_observer = Some(op_observer); + self + } +} diff --git a/automerge/src/sync.rs b/automerge/src/sync.rs index 6c9a423e..fb0031aa 100644 --- a/automerge/src/sync.rs +++ b/automerge/src/sync.rs @@ -7,8 +7,8 @@ use std::{ }; use crate::{ - decoding, decoding::Decoder, encoding::Encodable, Automerge, AutomergeError, Change, - ChangeHash, OpObserver, NULL_OBSERVER, + decoding, decoding::Decoder, encoding::Encodable, ApplyOptions, Automerge, AutomergeError, + Change, ChangeHash, OpObserver, }; mod bloom; @@ -99,14 +99,14 @@ impl Automerge { sync_state: &mut State, message: Message, ) -> Result<(), AutomergeError> { - self.receive_sync_message_with(sync_state, message, NULL_OBSERVER) + self.receive_sync_message_with::<()>(sync_state, message, ApplyOptions::default()) } - pub fn receive_sync_message_with( + pub fn receive_sync_message_with<'a, Obs: OpObserver>( &mut self, sync_state: &mut State, message: Message, - op_observer: Option<&mut Obs>, + options: ApplyOptions<'a, Obs>, ) -> Result<(), AutomergeError> { let before_heads = self.get_heads(); @@ -119,7 +119,7 @@ impl Automerge { let changes_is_empty = message_changes.is_empty(); if !changes_is_empty { - self.apply_changes_with(message_changes, op_observer)?; + self.apply_changes_with(message_changes, options)?; sync_state.shared_heads = advance_heads( &before_heads.iter().collect(), &self.get_heads().into_iter().collect(), From e65200b1502cac1dcb2ca917a19826364ccf52e9 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Tue, 19 Apr 2022 19:08:28 +0100 Subject: [PATCH 288/730] Add docs workflow --- .github/workflows/docs.yaml | 38 +++++++++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) create mode 100644 .github/workflows/docs.yaml diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml new file mode 100644 index 00000000..6d107184 --- /dev/null +++ b/.github/workflows/docs.yaml @@ -0,0 +1,38 @@ +on: + push: + branches: + - main + +name: Documentation + +jobs: + deploy-docs: + concurrency: deploy-docs + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + + - name: Toolchain + uses: actions-rs/toolchain@v1 + with: + profile: minimal + toolchain: stable + override: true + + - name: Cache + uses: Swatinem/rust-cache@v1 + + - name: Build docs + uses: actions-rs/cargo@v1 + with: + command: doc + args: --workspace --all-features --no-deps + + - name: Configure root page + run: echo '' > target/doc/index.html + + - name: Deploy docs + uses: peaceiris/actions-gh-pages@v3 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: ./target/doc From 439b9104d6f38bb232c78f156e04ce6105d3719e Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Tue, 19 Apr 2022 14:48:33 -0400 Subject: [PATCH 289/730] touch up readme and package files --- README.md | 2 ++ automerge-wasm/Cargo.toml | 2 +- automerge-wasm/LICENSE | 10 +++++++++ .../{node-index.js => nodejs-index.js} | 0 automerge-wasm/package.json | 22 +++++++++++-------- automerge/Cargo.toml | 2 ++ automerge/src/lib.rs | 3 +++ 7 files changed, 31 insertions(+), 10 deletions(-) create mode 100644 automerge-wasm/LICENSE rename automerge-wasm/{node-index.js => nodejs-index.js} (100%) diff --git a/README.md b/README.md index e2d84bda..6d194bd9 100644 --- a/README.md +++ b/README.md @@ -2,6 +2,8 @@ This is a rust implementation of the [Automerge](https://github.com/automerge/automerge) file format and network protocol. +If you are looking for the origional `automerge-rs` project that can be used as a wasm backend to the javascript implementation, it can be found [here](https://github.com/automerge/automerge-rs/tree/automerge-1.0). + ## Status This project has 4 components: diff --git a/automerge-wasm/Cargo.toml b/automerge-wasm/Cargo.toml index a9fe8f34..36b5c3b5 100644 --- a/automerge-wasm/Cargo.toml +++ b/automerge-wasm/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "automerge-wasm" description = "An js/wasm wrapper for the rust implementation of automerge-backend" -# repository = "https://github.com/automerge/automerge-rs" +repository = "https://github.com/automerge/automerge-rs" version = "0.1.0" authors = ["Alex Good ","Orion Henry ", "Martin Kleppmann"] categories = ["wasm"] diff --git a/automerge-wasm/LICENSE b/automerge-wasm/LICENSE new file mode 100644 index 00000000..63b21502 --- /dev/null +++ b/automerge-wasm/LICENSE @@ -0,0 +1,10 @@ +MIT License + +Copyright 2022, Ink & Switch LLC + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/automerge-wasm/node-index.js b/automerge-wasm/nodejs-index.js similarity index 100% rename from automerge-wasm/node-index.js rename to automerge-wasm/nodejs-index.js diff --git a/automerge-wasm/package.json b/automerge-wasm/package.json index af2bfba9..4a678781 100644 --- a/automerge-wasm/package.json +++ b/automerge-wasm/package.json @@ -6,27 +6,30 @@ ], "name": "automerge-wasm", "description": "wasm-bindgen bindings to the automerge rust implementation", - "version": "0.0.1", + "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-wasm", + "repository": "github:automerge/automerge-rs", + "version": "0.1.0", "license": "MIT", "files": [ "README.md", + "LICENSE", "package.json", "index.d.ts", - "node/index.js", - "node/bindgen.js", - "node/bindgen_bg.wasm", + "nodejs/index.js", + "nodejs/bindgen.js", + "nodejs/bindgen_bg.wasm", "web/index.js", "web/bindgen.js", "web/bindgen_bg.wasm" ], "types": "index.d.ts", "module": "./web/index.js", - "main": "./node/index.js", + "main": "./nodejs/index.js", "scripts": { - "build": "rimraf ./node && wasm-pack build --target nodejs --dev --out-name bindgen -d node && cp node-index.js node/index.js", - "release-w": "rimraf ./web && wasm-pack build --target web --release --out-name bindgen -d web && cp web-index.js web/index.js", - "release-n": "rimraf ./node && wasm-pack build --target nodejs --release --out-name bindgen -d node && cp node-index.js node/index.js", - "release": "yarn release-w && yarn release-n", + "build": "cross-env PROFILE=dev TARGET=nodejs yarn target", + "release": "cross-env PROFILE=release yarn buildall", + "buildall": "cross-env TARGET=nodejs yarn target && cross-env TARGET=web yarn target", + "target": "rimraf ./$TARGET && wasm-pack build --target $TARGET --$PROFILE --out-name bindgen -d $TARGET && cp $TARGET-index.js $TARGET/index.js", "test": "ts-mocha -p tsconfig.json --type-check --bail --full-trace test/*.ts" }, "dependencies": {}, @@ -35,6 +38,7 @@ "@types/jest": "^27.4.0", "@types/mocha": "^9.1.0", "@types/node": "^17.0.13", + "cross-env": "^7.0.3", "fast-sha256": "^1.3.0", "mocha": "^9.1.3", "pako": "^2.0.4", diff --git a/automerge/Cargo.toml b/automerge/Cargo.toml index c3fda321..e7f205cf 100644 --- a/automerge/Cargo.toml +++ b/automerge/Cargo.toml @@ -3,6 +3,8 @@ name = "automerge" version = "0.1.0" edition = "2021" license = "MIT" +repository = "https://github.com/automerge/automerge-rs" +documentation = "https://automerge.org/automerge-rs/automerge/" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html diff --git a/automerge/src/lib.rs b/automerge/src/lib.rs index a35b2110..d3d5a8da 100644 --- a/automerge/src/lib.rs +++ b/automerge/src/lib.rs @@ -1,3 +1,4 @@ +#[doc(hidden)] #[macro_export] macro_rules! log { ( $( $t:tt )* ) => { @@ -9,6 +10,7 @@ macro_rules! log { } #[cfg(all(feature = "wasm", target_family = "wasm"))] +#[doc(hidden)] #[macro_export] macro_rules! __log { ( $( $t:tt )* ) => { @@ -17,6 +19,7 @@ macro_rules! __log { } #[cfg(not(all(feature = "wasm", target_family = "wasm")))] +#[doc(hidden)] #[macro_export] macro_rules! __log { ( $( $t:tt )* ) => { From 8005f31a95cffa4f96049bac1406d51d39a7cc9f Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Wed, 20 Apr 2022 00:48:59 -0600 Subject: [PATCH 290/730] Squashed commit of the following: commit 97a36e728e43c9adbe21fd942a73472d0daa2e05 Author: Jason Kankiewicz Date: Fri Apr 8 03:24:46 2022 -0600 Updated the unit test suites. commit 56e2beb946e61d3262f58953d4f2e27cfd4c13b0 Author: Jason Kankiewicz Date: Fri Apr 8 03:22:30 2022 -0600 Tied the lifetime of an `AMobjId` struct to its owning `AMdoc` struct. commit e16c980b2eea39459b1b47045e2fbff69ad2965e Author: Jason Kankiewicz Date: Fri Apr 8 03:21:26 2022 -0600 Reverted the `AMobjId` struct to being an opaque type. Added `AMobjId::new()`to fix a compilation error. Tied the lifetime of an `AMobjId` struct to its owning `AMdoc` struct. Added the `AMvalue::ChangeHash` variant. commit 7c769b2cfe041747565c95aeffef311b45a5a19e Author: Jason Kankiewicz Date: Fri Apr 8 03:12:15 2022 -0600 Renamed the `AMobj` struct to `AMobjId` for clarity. Reverted the `AMobjId` struct to being an opaque type. Tied the lifetime of an `AMobjId` struct to its owning `AMdoc` struct. Renamed `AMcreate()` to `AMallocDoc()` for consistency with C's standard library functions. Renamed `AMdestroy()` to `AMfreeDoc()` for consistency with C's standard library functions. Renamed the `obj` function arguments to `obj_id` for clarity. Replaced the "set" verb in function names with the "put" verb for consistency iwth recent API changes. Renamed `AMclear()` to `AMfreeResult()` for consistency with C's standard library functions. Added `AMfreeObjId()` to enable dropping a persisted `AMojbId` struct. commit 8d1b3bfcf21485be833e66473ec04e5e12282934 Author: Jason Kankiewicz Date: Fri Apr 8 02:52:52 2022 -0600 Added a field for persisting `AMobjId` structs to the `AMdoc` struct. Renamed `AMdoc::create()` to `AMdoc::new()` to be more idiomatic. Added `AMdoc::insert_object()` and `AMdoc::set_object()` for persisting `AMobjId` structs. Added `AMdoc::drop_obj_id()` to enable dropping a persisted `AMobjId` struct. commit b9b0f96357275b522b5f5494dc8bbbee86bf9735 Author: Jason Kankiewicz Date: Thu Mar 17 15:17:08 2022 -0700 Ensure CMake targets can be built after a clean. commit d565db1ea8ada9246f6a0ece35968980da90eff9 Author: Jason Kankiewicz Date: Thu Mar 17 15:10:09 2022 -0700 Prevent unnecessary updating of the generated header file. commit d3647e75d3f01aa91e337ec0e98c4b3a9f5b57f2 Author: Jason Kankiewicz Date: Wed Mar 16 02:50:59 2022 -0700 Documented the `AMObj.ID` struct member. commit cc58cbf4bbbcef7baf074c20c3c22efcd56e668d Author: Jason Kankiewicz Date: Wed Mar 16 02:03:37 2022 -0700 Normalize the formatting of the `AMobjType_tag()` function. commit c2954dd2c7ff228509c5a129c93cdf9e47192643 Author: Jason Kankiewicz Date: Wed Mar 16 02:02:03 2022 -0700 Remove superfluous backslashes. commit bcb6e759a445c01f9bf5c0cfe53a8f3816311eae Author: Jason Kankiewicz Date: Wed Mar 16 02:01:33 2022 -0700 Removed the `AMconfig()` function. Implemented the `AMgetActor()` function. Added the `AMgetActorHex()` function. Added the `AMsetActor()` function. Added the `AMsetActorHex()` function. commit 9b2c566b9efb934cef99622736725abf5779b6af Author: Jason Kankiewicz Date: Wed Mar 16 01:50:31 2022 -0700 Added the "hex" and "smol_str" crate dependencies to the C API. commit 99e06e1f730283e9009d7ad5a3f77828b786df6a Author: Jason Kankiewicz Date: Wed Mar 16 01:06:15 2022 -0700 Corrected a spelling error. commit 629b19c71dcaf0373afeec6f629c8e836db81529 Author: Jason Kankiewicz Date: Sun Mar 13 20:30:54 2022 -0700 Align backslashes. commit 09d25a32b716fb10728c67d35f5eeec480a6ce7c Author: Jason Kankiewicz Date: Sun Mar 13 20:30:23 2022 -0700 Add EOF linefeed. commit 4ed14ee7481d45f9dbedf08f16f435e5608e49d1 Author: Jason Kankiewicz Date: Sun Mar 13 20:05:30 2022 -0700 Fix "fmt" CI job violations. commit f53b40625dc724726bfb739a64cc6913fea8ed99 Merge: 7d5538d8 e1f8d769 Author: Jason Kankiewicz Date: Sun Mar 13 16:34:11 2022 -0700 Merge branch 'c_api_exp' of https://github.com/automerge/automerge-rs into c_api_exp commit 7d5538d8a42b79485348fbbf0b40c6f0dadaa59b Author: Jason Kankiewicz Date: Sun Mar 13 16:31:22 2022 -0700 Updated the C API's unit test suite. commit 335cd1c85f099c431843bc930dafde1bb1174eaf Author: Jason Kankiewicz Date: Sun Mar 13 16:27:39 2022 -0700 Removed superfluous `AMobj` traits. commit 420f8cab64c4614e7fb5c9cfce5cfc59cf7039cb Author: Jason Kankiewicz Date: Sun Mar 13 16:25:48 2022 -0700 Moved the `AMobj` struct into the `result` module. Changed the `AMobj` struct into an enum. Added the `AMbyteSpan` struct. Added the `AMvalue` enum. Added the `AMresult::Nothing` variant. commit 4eca88ff01bcf70ee3002775a8b5faa71bca29f5 Author: Jason Kankiewicz Date: Sun Mar 13 15:56:06 2022 -0700 Normalized all type name prefixes to "AM". Reduced the `AMstatus` enum's `Ok` tags to a single `Ok` tag. Removed the `to_obj` macro. Added the `to_obj_id` macro. Moved the `AMobj` struct into the `result` module. Added the `AMresult::Nothing` variant. Added the `AMresultSize` function. Added the `AMresultValue` function. Added the `AMlistGet` function. Added the `AMmapGet` function. Removed the `AMgetObj` function. Added the `AMobjSize` function. commit 2f94c6fd906bf31a30a1db578334c2d1a41e16c0 Author: Jason Kankiewicz Date: Sun Mar 13 15:29:02 2022 -0700 Compensate for unconfigurable cbindgen behavior. Prevent Doxygen documentation regeneration. commit 5de00b799868339c3fd0f5f1d2c984980989714d Author: Jason Kankiewicz Date: Sun Mar 13 15:24:45 2022 -0700 Alphabetized the cbindgen settings. commit e1f8d769f44b48cdeb03855ac9e2b223fb229187 Author: Orion Henry Date: Thu Mar 10 08:53:07 2022 -0500 update authors commit 3e5525f1a6541664e5e30ad179b2f217ba7b7422 Merge: f4ba1770 1c21abc5 Author: Orion Henry Date: Wed Mar 9 14:36:29 2022 -0500 Merge pull request #304 from jkankiewicz/c_api_exp Fix "fmt" workflow step violations commit 1c21abc5a37941ce189ff48d994c1ce3dda4827f Author: Jason Kankiewicz Date: Wed Mar 9 11:13:01 2022 -0800 Fix CMake and Rust code formatting issues. commit f4ba1770a96c5200ae124706dffbab327a628c23 Merge: bf1ae609 f41b30d1 Author: Orion Henry Date: Wed Mar 9 12:05:58 2022 -0500 Merge pull request #300 from jkankiewicz/c_api_exp Add unit test suites for the `AMlistSet*` and `AMmapSet*` functions commit f41b30d118ef60965239a02afddbbfb14dc4392f Author: Jason Kankiewicz Date: Tue Mar 8 22:08:36 2022 -0800 Added a brief description of the `AmObjType` enum. Added the `AmStatus` enum to the enum docs page. commit af7386a482d2cb9cee2d188ef443498ddab40ddb Author: Jason Kankiewicz Date: Tue Mar 8 21:50:52 2022 -0800 Added a unit test suite for the `AMlistSet*` functions. commit 1eb70c6eee846b912806d058a43c6e04a9184a62 Author: Jason Kankiewicz Date: Tue Mar 8 21:42:42 2022 -0800 Added the rest of the `AMlistSet*` functions. Started the enum tags at `1` so they won't be inherently false. Alphabetized enum tags for the docs. Improved the docs. commit 6489cba13b49656aa0704d329a90c6e7e4a809e1 Author: Jason Kankiewicz Date: Tue Mar 8 18:01:46 2022 -0800 Alphabetize functions in the docs. commit 74c245b82d49971094d731ef4442b9af14e53c15 Author: Jason Kankiewicz Date: Tue Mar 8 07:54:25 2022 -0800 Fix a typo in `AMmapSetObject()`'s documentation. commit b2a879ba4e23c884f2bf43d77e950c1ad8a34c7e Author: Jason Kankiewicz Date: Tue Mar 8 06:24:22 2022 -0800 Append missing EOF linefeed. commit fbf0f29b66ad9109081490754872d149d5575ac6 Merge: c56d54b5 bf1ae609 Author: Jason Kankiewicz Date: Tue Mar 8 01:08:12 2022 -0800 Merge branch 'c_api_exp' of https://github.com/automerge/automerge-rs into c_api_exp commit c56d54b565451c0fe65e86b658e094178579fd25 Author: Jason Kankiewicz Date: Tue Mar 8 01:07:11 2022 -0800 Added unit test cases for the new `AMmapSet*` functions by @orionz. Moved the unit test cases for the `AMmapSet*` functions into their own unit test suite. commit 7e59b5576034bc0404fe793758088fea2eae6330 Author: Jason Kankiewicz Date: Tue Mar 8 01:01:47 2022 -0800 Edited the Doxygen documentation. commit bf1ae6091318d005c49407e10a20f4822e41adc1 Author: Orion Henry Date: Mon Mar 7 11:59:22 2022 -0500 fmt commit e82a7cc78e166cc05e90138df040cdcdb4d83c13 Merge: a44e69d2 965c2d56 Author: Orion Henry Date: Mon Mar 7 11:55:32 2022 -0500 Merge pull request #299 from jkankiewicz/c_api_exp Enable unit testing of the C API commit 965c2d56c32068d3af07705aff7d0a4386393004 Author: Jason Kankiewicz Date: Mon Mar 7 06:37:36 2022 -0800 Enable unit testing of the C API. commit a44e69d2c72ff3e712c1109e3cf2f0d00f2af0c0 Author: Orion Henry Date: Sun Mar 6 14:00:46 2022 -0500 remove datatype mapset commit 88153c44e767b47fa1bb0bdd0d4bc5d43f2b6a7a Merge: 41512e9c c6194e97 Author: Orion Henry Date: Sun Mar 6 10:32:39 2022 -0500 Merge pull request #298 from jkankiewicz/rebase_c_api_exp Rebase the "c_api_exp" branch on the "experiment" branch commit c6194e973235ac43bcdf97b19b0274653b2e6b99 Merge: a2d745c8 41512e9c Author: Jason Kankiewicz Date: Sun Mar 6 01:09:56 2022 -0800 Merge branch 'c_api_exp' into rebase_c_api_exp commit a2d745c8d9c649d444339dc5e1df733af0a5bfec Author: Jason Kankiewicz Date: Sun Mar 6 00:44:37 2022 -0800 Replace the `utils::import_value` function with the `utils::import_scalar` function. Exclude `# Safety` comments from the documentation. commit 0681e28b4063988fada6d632366b471996d6d66a Author: Orion Henry Date: Thu Mar 3 16:04:17 2022 -0500 support new as_ref api commit 916e23fcc20aae9a27f17e9b98f267fac364d5bb Author: Orion Henry Date: Thu Mar 3 15:56:27 2022 -0500 fmt commit 71cd6a1f18796ab8f502d07d1d80784d1e3b9cd2 Author: Orion Henry Date: Thu Mar 3 15:54:38 2022 -0500 lock data at 64 bit - no c_long commit e00bd4c20191e86b9f1bd11be2bab31e3a23fc97 Author: Orion Henry Date: Thu Mar 3 15:27:55 2022 -0500 verbose commit 39d157c554a71e00978b1d9244e1c1cfbe7c24f3 Author: Orion Henry Date: Thu Mar 3 14:56:23 2022 -0500 clippy cleanup commit 7f650fb8e0e7173c72d192c60506e46abb41f92f Author: Jason Kankiewicz Date: Wed Feb 23 02:14:06 2022 -0800 Added Doxygen documentation generation. Renamed `AMDatatype` to `AmDataType`. Reorganized the `AmDataType` tags. Renamed `AMfree()` to `AMdestroy()`. Renamed `AMclone()` to `AMdup()`. commit b0b803eef8d07b5171b9ab736eae3e2f13d6158b Author: Orion Henry Date: Tue Feb 22 11:30:42 2022 -0500 get simple test passing commit cab9017ffa369365afb73c37275063a0c174b12e Author: Orion Henry Date: Wed Feb 9 15:50:44 2022 -0500 rework to return a queriable result commit a557e848f39d21b69d4b6c56434c1600b1cf6d94 Author: Jason Kankiewicz Date: Mon Feb 14 14:38:00 2022 -0800 Add a CI step to run the CMake build of the C bindings for @alexjg. commit c8c0c72f3bfced414cc443255b6a912c56b9ecfc Author: Jason Kankiewicz Date: Mon Feb 14 14:09:58 2022 -0800 Add CMake instructions for @orionz. commit fb62c4b02a0971bf814272a58c500dd5a31e694a Author: Jason Kankiewicz Date: Thu Feb 10 23:28:54 2022 -0800 Add CMake support. commit 7bc3bb6850de84eff9ad5a66eed2363c12ad7738 Author: Jason Kankiewicz Date: Thu Feb 10 22:49:53 2022 -0800 Replace *intptr_t in C function signatures. commit 60395a2db04d37d549cd9b0e5178af5f52ceb5f3 Author: Orion Henry Date: Sun Feb 6 18:59:19 2022 -0500 am_pop and am_pop_value commit b1e88047d22c4382aa58fc3fc337eb3d8a36140a Author: Orion Henry Date: Thu Feb 3 19:43:36 2022 -0500 break the ground commit 41512e9c78a649186646bcaa5d78632724bd8403 Author: Orion Henry Date: Thu Mar 3 16:04:17 2022 -0500 support new as_ref api commit bcee6a9623c0ce1289271df180368a81d50508d6 Merge: cf98f78d 9a89db3f Author: Orion Henry Date: Thu Mar 3 15:58:19 2022 -0500 Merge remote-tracking branch 'origin/experiment' into c_api_exp commit cf98f78dd172f214d3f417913470956ff998d9d5 Author: Orion Henry Date: Thu Mar 3 15:56:27 2022 -0500 fmt commit 3c1f449c5c024892658b37abbc74e19b746413e2 Author: Orion Henry Date: Thu Mar 3 15:54:38 2022 -0500 lock data at 64 bit - no c_long commit 2c2ec0b0c5d74bbe950f629a701ea1209f8ad4b6 Author: Orion Henry Date: Thu Mar 3 15:27:55 2022 -0500 verbose commit b72b9c989a60ca4bbcd7c47e1e6bf89356346a2f Author: Orion Henry Date: Thu Mar 3 14:56:23 2022 -0500 clippy cleanup commit 3ba28f91ccb5a7499a5442804ffa91001b01a3ef Author: Jason Kankiewicz Date: Wed Feb 23 02:14:06 2022 -0800 Added Doxygen documentation generation. Renamed `AMDatatype` to `AmDataType`. Reorganized the `AmDataType` tags. Renamed `AMfree()` to `AMdestroy()`. Renamed `AMclone()` to `AMdup()`. commit 8564e5b7531fb4e5194cf845b3d48e52a16827bd Author: Orion Henry Date: Tue Feb 22 11:30:42 2022 -0500 get simple test passing commit 60835e6ae7361199e03225dd33a8aa9c4614a20e Author: Orion Henry Date: Wed Feb 9 15:50:44 2022 -0500 rework to return a queriable result commit 89466d9e8c77b67b3526d56beb5f263aadb7fca0 Author: Jason Kankiewicz Date: Mon Feb 14 14:38:00 2022 -0800 Add a CI step to run the CMake build of the C bindings for @alexjg. commit e2485bd5fda8b0e290038c579b2080faadbc5026 Author: Jason Kankiewicz Date: Mon Feb 14 14:09:58 2022 -0800 Add CMake instructions for @orionz. commit b5cc7dd63dd76c816be1f23a0b91ba1f7327a32c Author: Jason Kankiewicz Date: Thu Feb 10 23:28:54 2022 -0800 Add CMake support. commit 685536f0cf9808b10af5efc9341f85acc2490fdf Author: Jason Kankiewicz Date: Thu Feb 10 22:49:53 2022 -0800 Replace *intptr_t in C function signatures. commit c1c6e7bb6615d168bcdad41f1621b17ff8ea7725 Author: Orion Henry Date: Sun Feb 6 18:59:19 2022 -0500 am_pop and am_pop_value commit e68c8d347e5e45451b6b2193542b9f2bdaf283ce Author: Orion Henry Date: Thu Feb 3 19:43:36 2022 -0500 break the ground --- automerge-c/Cargo.toml | 2 + automerge-c/cbindgen.toml | 36 +- automerge-c/cmake/file_regex_replace.cmake | 31 + automerge-c/cmake/file_touch.cmake | 33 + automerge-c/src/CMakeLists.txt | 40 +- automerge-c/src/doc.rs | 62 +- automerge-c/src/lib.rs | 786 +++++++++++++-------- automerge-c/src/result.rs | 195 ++++- automerge-c/src/utils.rs | 14 +- automerge-c/test/CMakeLists.txt | 10 +- automerge-c/test/amdoc_property_tests.c | 110 +++ automerge-c/test/amlistset_tests.c | 331 +++++---- automerge-c/test/ammapset_tests.c | 205 ++++-- automerge-c/test/group_state.c | 4 +- automerge-c/test/macro_utils.c | 23 + automerge-c/test/macro_utils.h | 23 + automerge-c/test/main.c | 22 +- automerge/src/value.rs | 6 + 18 files changed, 1379 insertions(+), 554 deletions(-) create mode 100644 automerge-c/cmake/file_regex_replace.cmake create mode 100644 automerge-c/cmake/file_touch.cmake create mode 100644 automerge-c/test/amdoc_property_tests.c create mode 100644 automerge-c/test/macro_utils.c create mode 100644 automerge-c/test/macro_utils.h diff --git a/automerge-c/Cargo.toml b/automerge-c/Cargo.toml index bcb07ce5..ed6c846f 100644 --- a/automerge-c/Cargo.toml +++ b/automerge-c/Cargo.toml @@ -13,7 +13,9 @@ doc = false [dependencies] automerge = { path = "../automerge" } +hex = "^0.4.3" libc = "^0.2" +smol_str = "^0.1.21" [build-dependencies] cbindgen = "^0.20" diff --git a/automerge-c/cbindgen.toml b/automerge-c/cbindgen.toml index 649d3204..aad1850d 100644 --- a/automerge-c/cbindgen.toml +++ b/automerge-c/cbindgen.toml @@ -1,18 +1,3 @@ -header = """ -/** \\file - * All constants, functions and types in the Automerge library's C API. - */ - """ -include_guard = "automerge_h" -autogen_warning = "/* Warning, this file is autogenerated by cbindgen. Don't modify this manually. */" -language = "C" -includes = [] -sys_includes = ["stddef.h", "stdint.h", "stdbool.h"] -no_includes = true -line_length = 140 -documentation = true -documentation_style = "doxy" - after_includes = """\n /** * \\defgroup enumerations Public Enumerations @@ -26,14 +11,29 @@ after_includes = """\n */ #define AM_ROOT NULL """ +autogen_warning = "/* Warning, this file is autogenerated by cbindgen. Don't modify this manually. */" +documentation = true +documentation_style = "doxy" +header = """ +/** \\file + * All constants, functions and types in the Automerge library's C API. + */ + """ +include_guard = "automerge_h" +includes = [] +language = "C" +line_length = 140 +no_includes = true +style = "both" +sys_includes = ["stdbool.h", "stddef.h", "stdint.h"] usize_is_size_t = true [enum] -rename_variants = "ScreamingSnakeCase" -enum_class = true -prefix_with_name = true derive_const_casts = true +enum_class = true must_use = "MUST_USE_ENUM" +prefix_with_name = true +rename_variants = "ScreamingSnakeCase" [export] item_types = ["enums", "structs", "opaque", "constants", "functions"] diff --git a/automerge-c/cmake/file_regex_replace.cmake b/automerge-c/cmake/file_regex_replace.cmake new file mode 100644 index 00000000..27306458 --- /dev/null +++ b/automerge-c/cmake/file_regex_replace.cmake @@ -0,0 +1,31 @@ +cmake_minimum_required(VERSION 3.18 FATAL_ERROR) + +if(NOT DEFINED MATCH_REGEX) + message(FATAL_ERROR "Variable \"MATCH_REGEX\" is not defined.") +elseif(NOT DEFINED REPLACE_EXPR) + message(FATAL_ERROR "Variable \"REPLACE_EXPR\" is not defined.") +elseif(${CMAKE_ARGC} LESS 7) + message(FATAL_ERROR "Too few arguments.") +elseif(${CMAKE_ARGC} GREATER 8) + message(FATAL_ERROR "Too many arguments.") +elseif(NOT EXISTS ${CMAKE_ARGV6}) + message(FATAL_ERROR "Input file \"${CMAKE_ARGV6}\" not found.") +endif() + +message(STATUS "Replacing \"${MATCH_REGEX}\" with \"${REPLACE_EXPR}\" in \"${CMAKE_ARGV6}\"...") + +file(READ ${CMAKE_ARGV6} INPUT_STRING) + +string(REGEX REPLACE "${MATCH_REGEX}" "${REPLACE_EXPR}" OUTPUT_STRING "${INPUT_STRING}") + +if(DEFINED CMAKE_ARGV7) + set(OUTPUT_FILE "${CMAKE_ARGV7}") +else() + set(OUTPUT_FILE "${CMAKE_ARGV6}") +endif() + +if(NOT "${OUTPUT_STRING}" STREQUAL "${INPUT_STRING}") + file(WRITE ${OUTPUT_FILE} "${OUTPUT_STRING}") + + message(STATUS "Created/updated \"${OUTPUT_FILE}\".") +endif() diff --git a/automerge-c/cmake/file_touch.cmake b/automerge-c/cmake/file_touch.cmake new file mode 100644 index 00000000..087d59b6 --- /dev/null +++ b/automerge-c/cmake/file_touch.cmake @@ -0,0 +1,33 @@ +cmake_minimum_required(VERSION 3.18 FATAL_ERROR) + +if(NOT DEFINED CONDITION) + message(FATAL_ERROR "Variable \"CONDITION\" is not defined.") +elseif(${CMAKE_ARGC} LESS 7) + message(FATAL_ERROR "Too few arguments.") +elseif(${CMAKE_ARGC} GREATER 7) + message(FATAL_ERROR "Too many arguments.") +elseif(NOT EXISTS ${CMAKE_ARGV6}) + message(FATAL_ERROR "File \"${CMAKE_ARGV6}\" not found.") +elseif(IS_DIRECTORY "${CMAKE_ARG6}") + message(FATAL_ERROR "Directory \"${CMAKE_ARG6}\" can't be touched.") +endif() + +message(STATUS "Touching \"${CMAKE_ARGV6}\" if ${CONDITION} \"${CMAKE_ARGV5}\"...") + +if(CONDITION STREQUAL "EXISTS") + if(EXISTS "${CMAKE_ARGV5}") + set(DO_IT TRUE) + endif() +elseif((CONDITION STREQUAL "NOT_EXISTS") OR (CONDITION STREQUAL "!EXISTS")) + if(NOT EXISTS "${CMAKE_ARGV5}") + set(DO_IT TRUE) + endif() +else() + message(FATAL_ERROR "Unexpected condition \"${CONDITION}\".") +endif() + +if(DO_IT) + file(TOUCH_NOCREATE "${CMAKE_ARGV6}") + + message(STATUS "Touched \"${CMAKE_ARGV6}\".") +endif() diff --git a/automerge-c/src/CMakeLists.txt b/automerge-c/src/CMakeLists.txt index 78f4e976..11cf5d96 100644 --- a/automerge-c/src/CMakeLists.txt +++ b/automerge-c/src/CMakeLists.txt @@ -27,8 +27,7 @@ set(CARGO_CURRENT_BINARY_DIR "${CARGO_TARGET_DIR}/${CARGO_BUILD_TYPE}") set( CARGO_OUTPUT - # \note cbindgen won't regenerate a missing header so it can't be cleaned. - #${CARGO_TARGET_DIR}/${LIBRARY_NAME}.h + ${CARGO_TARGET_DIR}/${LIBRARY_NAME}.h ${CARGO_CURRENT_BINARY_DIR}/${CMAKE_SHARED_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_SHARED_LIBRARY_SUFFIX} ${CARGO_CURRENT_BINARY_DIR}/${CMAKE_STATIC_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_STATIC_LIBRARY_SUFFIX} ) @@ -41,11 +40,19 @@ endif() add_custom_command( OUTPUT ${CARGO_OUTPUT} + COMMAND + # \note cbindgen won't regenerate its output header file after it's + # been removed but it will after its configuration file has been + # updated. + ${CMAKE_COMMAND} -DCONDITION=NOT_EXISTS -P ${CMAKE_SOURCE_DIR}/cmake/file_touch.cmake -- ${CARGO_TARGET_DIR}/${LIBRARY_NAME}.h ${CMAKE_SOURCE_DIR}/cbindgen.toml COMMAND ${CMAKE_COMMAND} -E env CARGO_TARGET_DIR=${CARGO_TARGET_DIR} ${CARGO_CMD} build ${CARGO_FLAG} MAIN_DEPENDENCY lib.rs DEPENDS + doc.rs + result.rs + utils.rs ${CMAKE_SOURCE_DIR}/build.rs ${CMAKE_SOURCE_DIR}/Cargo.toml ${CMAKE_SOURCE_DIR}/cbindgen.toml @@ -56,14 +63,31 @@ add_custom_command( VERBATIM ) -# \note This target is only necessary because cbindgen won't allow the -# generated header to be listed in the Cargo command's output, being -# another target's source file would've been enough otherwise. add_custom_target( ${LIBRARY_NAME}_artifacts DEPENDS ${CARGO_OUTPUT} ) +# \note cbindgen's naming behavior isn't fully configurable. +add_custom_command( + TARGET ${LIBRARY_NAME}_artifacts + POST_BUILD + COMMAND + # Compensate for cbindgen's variant struct naming. + ${CMAKE_COMMAND} -DMATCH_REGEX=AM\([^_]+_[^_]+\)_Body -DREPLACE_EXPR=AM\\1 -P ${CMAKE_SOURCE_DIR}/cmake/file_regex_replace.cmake -- ${CARGO_TARGET_DIR}/${LIBRARY_NAME}.h + COMMAND + # Compensate for cbindgen's union tag enum type naming. + ${CMAKE_COMMAND} -DMATCH_REGEX=AM\([^_]+\)_Tag -DREPLACE_EXPR=AM\\1Variant -P ${CMAKE_SOURCE_DIR}/cmake/file_regex_replace.cmake -- ${CARGO_TARGET_DIR}/${LIBRARY_NAME}.h + COMMAND + # Compensate for cbindgen's translation of consecutive uppercase letters to "ScreamingSnakeCase". + ${CMAKE_COMMAND} -DMATCH_REGEX=A_M\([^_]+\)_ -DREPLACE_EXPR=AM_\\1_ -P ${CMAKE_SOURCE_DIR}/cmake/file_regex_replace.cmake -- ${CARGO_TARGET_DIR}/${LIBRARY_NAME}.h + WORKING_DIRECTORY + ${CMAKE_SOURCE_DIR} + COMMENT + "Compensating for hard-coded cbindgen naming behaviors..." + VERBATIM +) + if(BUILD_SHARED_LIBS) if(WIN32) set(LIBRARY_DESTINATION "${CMAKE_INSTALL_BINDIR}") @@ -102,8 +126,6 @@ endif() add_library(${LIBRARY_NAME} ${LIBRARY_TYPE} IMPORTED GLOBAL) -add_dependencies(${LIBRARY_NAME} ${LIBRARY_NAME}_artifacts) - set_target_properties( ${LIBRARY_NAME} PROPERTIES @@ -186,9 +208,13 @@ if(DOXYGEN_FOUND) ${LIBRARY_NAME}_docs "${CARGO_TARGET_DIR}/${LIBRARY_NAME}.h" "${CMAKE_SOURCE_DIR}/README.md" + USE_STAMP_FILE WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} COMMENT "Producing documentation with Doxygen..." ) + # \note A Doxygen input file isn't a file-level dependency so the Doxygen + # command must instead depend upon a target that outputs the file or + # it will just output an error message when it can't be found. add_dependencies(${LIBRARY_NAME}_docs ${LIBRARY_NAME}_artifacts) endif() diff --git a/automerge-c/src/doc.rs b/automerge-c/src/doc.rs index b8432271..ab38237a 100644 --- a/automerge-c/src/doc.rs +++ b/automerge-c/src/doc.rs @@ -1,14 +1,66 @@ use automerge as am; +use std::collections::BTreeSet; use std::ops::{Deref, DerefMut}; +use crate::result::AMobjId; +use automerge::transaction::Transactable; + /// \struct AMdoc /// \brief A JSON-like CRDT. #[derive(Clone)] -pub struct AMdoc(am::AutoCommit); +pub struct AMdoc { + body: am::AutoCommit, + obj_ids: BTreeSet, +} impl AMdoc { - pub fn create(handle: am::AutoCommit) -> AMdoc { - AMdoc(handle) + pub fn new(body: am::AutoCommit) -> Self { + Self { + body: body, + obj_ids: BTreeSet::new(), + } + } + + pub fn insert_object( + &mut self, + obj: &am::ObjId, + index: usize, + value: am::ObjType, + ) -> Result<&AMobjId, am::AutomergeError> { + match self.body.insert_object(obj, index, value) { + Ok(ex_id) => { + let obj_id = AMobjId::new(ex_id); + self.obj_ids.insert(obj_id.clone()); + match self.obj_ids.get(&obj_id) { + Some(obj_id) => Ok(obj_id), + None => Err(am::AutomergeError::Fail), + } + } + Err(e) => Err(e), + } + } + + pub fn set_object, P: Into>( + &mut self, + obj: O, + prop: P, + value: am::ObjType, + ) -> Result<&AMobjId, am::AutomergeError> { + match self.body.set_object(obj, prop, value) { + Ok(ex_id) => { + let obj_id = AMobjId::new(ex_id); + self.obj_ids.insert(obj_id.clone()); + match self.obj_ids.get(&obj_id) { + Some(obj_id) => Ok(obj_id), + None => Err(am::AutomergeError::Fail), + } + } + Err(e) => Err(e), + } + } + + pub fn drop_obj_id(&mut self, obj_id: &AMobjId) -> bool { + self.obj_ids.remove(obj_id) } } @@ -16,13 +68,13 @@ impl Deref for AMdoc { type Target = am::AutoCommit; fn deref(&self) -> &Self::Target { - &self.0 + &self.body } } impl DerefMut for AMdoc { fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 + &mut self.body } } diff --git a/automerge-c/src/lib.rs b/automerge-c/src/lib.rs index a8fde724..50d1f9ee 100644 --- a/automerge-c/src/lib.rs +++ b/automerge-c/src/lib.rs @@ -1,5 +1,7 @@ use automerge as am; -use std::{ffi::CStr, os::raw::c_char}; +use hex; +use smol_str::SmolStr; +use std::{ffi::CStr, ffi::CString, os::raw::c_char}; mod doc; mod result; @@ -7,13 +9,13 @@ mod utils; use automerge::transaction::Transactable; use doc::AMdoc; -use result::AMresult; +use result::{AMobjId, AMresult, AMvalue}; /// \ingroup enumerations -/// \enum AmObjType +/// \enum AMobjType /// \brief The type of an object value. #[repr(u8)] -pub enum AmObjType { +pub enum AMobjType { /// A list. List = 1, /// A key-value map. @@ -22,34 +24,29 @@ pub enum AmObjType { Text, } -impl From for am::ObjType { - fn from(o: AmObjType) -> Self { +impl From for am::ObjType { + fn from(o: AMobjType) -> Self { match o { - AmObjType::Map => am::ObjType::Map, - AmObjType::List => am::ObjType::List, - AmObjType::Text => am::ObjType::Text, + AMobjType::Map => am::ObjType::Map, + AMobjType::List => am::ObjType::List, + AMobjType::Text => am::ObjType::Text, } } } /// \ingroup enumerations -/// \enum AmStatus +/// \enum AMstatus /// \brief The status of an API call. #[derive(Debug)] #[repr(u8)] -pub enum AmStatus { - /// The result is one or more changes. - ChangesOk = 1, - /// The command was successful. - CommandOk, - /// The result was an error. +pub enum AMstatus { + /// Success. + /// \note This tag is unalphabetized so that `0` indicates success. + Ok, + /// Failure due to an error. Error, - /// The result is invalid. + /// Failure due to an invalid result. InvalidResult, - /// The result is an object ID. - ObjOk, - /// The result is one or more values. - ValuesOk, } unsafe fn to_str(c: *const c_char) -> String { @@ -66,44 +63,33 @@ macro_rules! to_doc { }}; } -macro_rules! to_obj { +macro_rules! to_obj_id { ($handle:expr) => {{ match $handle.as_ref() { - Some(b) => b, - None => &AMobj(am::ObjId::Root), + Some(am_obj_id) => am_obj_id, + None => &am::ROOT, } }}; } -fn to_result>(r: R) -> *mut AMresult { +fn to_result<'a, R: Into>>(r: R) -> *mut AMresult<'a> { (r.into()).into() } -/// \struct AMobj -/// \brief An object's unique identifier. -#[derive(Clone)] -pub struct AMobj(am::ObjId); - -impl AsRef for AMobj { - fn as_ref(&self) -> &am::ObjId { - &self.0 - } -} - /// \memberof AMdoc /// \brief Allocates a new `AMdoc` struct and initializes it with defaults. /// /// \return A pointer to an `AMdoc` struct. /// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMdestroy()`. +/// with `AMfreeDoc()`. #[no_mangle] -pub extern "C" fn AMcreate() -> *mut AMdoc { - AMdoc::create(am::AutoCommit::new()).into() +pub extern "C" fn AMallocDoc() -> *mut AMdoc { + AMdoc::new(am::AutoCommit::new()).into() } /// \memberof AMdoc /// \brief Deallocates the storage for an `AMdoc` struct previously -/// allocated by `AMcreate()` or `AMdup()`. +/// allocated by `AMallocDoc()` or `AMdup()`. /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \pre \p doc must be a valid address. @@ -112,7 +98,7 @@ pub extern "C" fn AMcreate() -> *mut AMdoc { /// # Safety /// doc must be a pointer to a valid AMdoc #[no_mangle] -pub unsafe extern "C" fn AMdestroy(doc: *mut AMdoc) { +pub unsafe extern "C" fn AMfreeDoc(doc: *mut AMdoc) { if !doc.is_null() { let doc: AMdoc = *Box::from_raw(doc); drop(doc) @@ -127,7 +113,7 @@ pub unsafe extern "C" fn AMdestroy(doc: *mut AMdoc) { /// \return A pointer to an `AMdoc` struct. /// \pre \p doc must be a valid address. /// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMdestroy()`. +/// with `AMfreeDoc()`. /// \internal /// /// # Safety @@ -141,698 +127,876 @@ pub unsafe extern "C" fn AMdup(doc: *mut AMdoc) -> *mut AMdoc { } /// \memberof AMdoc -/// \brief Set a configuration property of an `AMdoc` struct. +/// \brief Gets an `AMdoc` struct's actor ID value as an array of bytes. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] key A configuration property's UTF-8 string key. -/// \param[in] value A configuration property's UTF-8 string value or `NULL`. -/// \return A pointer to an `AMresult` struct containing no value. +/// \return A pointer to an `AMresult` struct containing an `AMbyteSpan`. /// \pre \p doc must be a valid address. -/// \pre \p key must be a valid address. /// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMclear()`. +/// with `AMfreeResult()`. /// \internal /// /// # Safety /// doc must be a pointer to a valid AMdoc -/// key and value must be valid c strings #[no_mangle] -pub unsafe extern "C" fn AMconfig( - doc: *mut AMdoc, - key: *const c_char, - value: *const c_char, -) -> *mut AMresult { +pub unsafe extern "C" fn AMgetActor<'a>(doc: *mut AMdoc) -> *mut AMresult<'a> { let doc = to_doc!(doc); - let key = to_str(key); - match key.as_str() { - "actor" => { - let actor = to_str(value); - if let Ok(actor) = actor.try_into() { - doc.set_actor(actor); - AMresult::Ok.into() - } else { - AMresult::err(&format!("Invalid actor '{}'", to_str(value))).into() - } - } - k => AMresult::err(&format!("Invalid config key '{}'", k)).into(), - } + to_result(Ok(doc.get_actor().clone())) } /// \memberof AMdoc -/// \brief Get an `AMdoc` struct's actor ID value as a hexadecimal string. +/// \brief Gets an `AMdoc` struct's actor ID value as a hexadecimal string. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \return A pointer to an `AMresult` struct containing a UTF-8 string value. +/// \return A pointer to an `AMresult` struct containing a `char const*`. /// \pre \p doc must be a valid address. /// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMclear()`. +/// with `AMfreeResult()`. /// \internal /// /// # Safety /// doc must be a pointer to a valid AMdoc #[no_mangle] -pub unsafe extern "C" fn AMgetActor(_doc: *mut AMdoc) -> *mut AMresult { - unimplemented!() +pub unsafe extern "C" fn AMgetActorHex<'a>(doc: *mut AMdoc) -> *mut AMresult<'a> { + let doc = to_doc!(doc); + let hex_str = doc.get_actor().to_hex_string(); + let value = am::Value::Scalar(am::ScalarValue::Str(SmolStr::new(hex_str))); + to_result(Ok(value)) +} + +/// \memberof AMdoc +/// \brief Puts an array of bytes as the actor ID value of an `AMdoc` struct. . +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] value A pointer to an array of bytes. +/// \param[in] count The number of bytes to copy from \p value. +/// \return A pointer to an `AMresult` struct containing nothing. +/// \pre \p doc must be a valid address. +/// \pre \p value must be a valid address. +/// \pre `0 <=` \p count `<=` length of \p value. +/// \warning To avoid a memory leak, the returned pointer must be deallocated +/// with `AMfreeResult()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// value must be a byte array of length `count` +#[no_mangle] +pub unsafe extern "C" fn AMsetActor<'a>( + doc: *mut AMdoc, + value: *const u8, + count: usize, +) -> *mut AMresult<'a> { + let doc = to_doc!(doc); + let slice = std::slice::from_raw_parts(value, count); + doc.set_actor(am::ActorId::from(slice)); + to_result(Ok(())) +} + +/// \memberof AMdoc +/// \brief Puts a hexadecimal string as the actor ID value of an `AMdoc` struct. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] hex_str A string of hexadecimal characters. +/// \return A pointer to an `AMresult` struct containing nothing. +/// \pre \p doc must be a valid address. +/// \pre \p hex_str must be a valid address. +/// \warning To avoid a memory leak, the returned pointer must be deallocated +/// with `AMfreeResult()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// hex_str must be a null-terminated array of `c_char`. +#[no_mangle] +pub unsafe extern "C" fn AMsetActorHex<'a>( + doc: *mut AMdoc, + hex_str: *const c_char, +) -> *mut AMresult<'a> { + let doc = to_doc!(doc); + let slice = std::slice::from_raw_parts(hex_str as *const u8, libc::strlen(hex_str)); + to_result(match hex::decode(slice) { + Ok(vec) => { + doc.set_actor(vec.into()); + Ok(()) + } + Err(_) => Err(am::AutomergeError::Decoding), + }) } /// \memberof AMresult -/// \brief Get the status code of an `AMresult` struct. +/// \brief Gets the status code of an `AMresult` struct. /// /// \param[in] result A pointer to an `AMresult` struct. -/// \return An `AmStatus` enum tag. +/// \return An `AMstatus` enum tag. /// \pre \p result must be a valid address. /// \internal /// /// # Safety /// result must be a pointer to a valid AMresult #[no_mangle] -pub unsafe extern "C" fn AMresultStatus(result: *mut AMresult) -> AmStatus { +pub unsafe extern "C" fn AMresultStatus(result: *mut AMresult) -> AMstatus { match result.as_mut() { - Some(AMresult::Ok) => AmStatus::CommandOk, - Some(AMresult::Error(_)) => AmStatus::Error, - Some(AMresult::ObjId(_)) => AmStatus::ObjOk, - Some(AMresult::Values(_)) => AmStatus::ValuesOk, - Some(AMresult::Changes(_)) => AmStatus::ChangesOk, - None => AmStatus::InvalidResult, + Some(AMresult::Error(_)) => AMstatus::Error, + None => AMstatus::InvalidResult, + _ => AMstatus::Ok, } } +/// \memberof AMresult +/// \brief Gets the size of an `AMresult` struct. +/// +/// \param[in] result A pointer to an `AMresult` struct. +/// \return The count of values in \p result. +/// \pre \p result must be a valid address. +#[no_mangle] +pub unsafe extern "C" fn AMresultSize(result: *mut AMresult) -> usize { + if let Some(result) = result.as_mut() { + match result { + AMresult::ActorId(_) | AMresult::ObjId(_) => 1, + AMresult::Changes(changes) => changes.len(), + AMresult::Error(_) | AMresult::Nothing => 0, + AMresult::Scalars(vec, _) => vec.len(), + } + } else { + 0 + } +} + +/// \memberof AMresult +/// \brief Gets a value from an `AMresult` struct. +/// +/// \param[in] result A pointer to an `AMresult` struct. +/// \param[in] index The index of a value. +/// \return An `AMvalue` struct. +/// \pre \p result must be a valid address. +/// \pre `0 <=` \p index `<=` AMresultSize() for \p result. +/// \internal +/// +/// # Safety +/// result must be a pointer to a valid AMresult +#[no_mangle] +pub unsafe extern "C" fn AMresultValue<'a>(result: *mut AMresult<'a>, index: usize) -> AMvalue<'a> { + let mut value = AMvalue::Nothing; + if let Some(result) = result.as_mut() { + match result { + AMresult::ActorId(actor_id) => { + if index == 0 { + value = AMvalue::ActorId(actor_id.into()); + } + } + AMresult::Changes(_) => {} + AMresult::Error(_) => {} + AMresult::ObjId(obj_id) => { + if index == 0 { + value = AMvalue::ObjId(obj_id); + } + } + AMresult::Nothing => (), + AMresult::Scalars(vec, hosted_str) => { + if let Some(element) = vec.get(index) { + match element { + am::Value::Scalar(scalar) => match scalar { + am::ScalarValue::Boolean(flag) => { + value = AMvalue::Boolean(*flag as i8); + } + am::ScalarValue::Bytes(bytes) => { + value = AMvalue::Bytes(bytes.into()); + } + am::ScalarValue::Counter(counter) => { + value = AMvalue::Counter(counter.into()); + } + am::ScalarValue::F64(float) => { + value = AMvalue::F64(*float); + } + am::ScalarValue::Int(int) => { + value = AMvalue::Int(*int); + } + am::ScalarValue::Null => { + value = AMvalue::Null; + } + am::ScalarValue::Str(smol_str) => { + *hosted_str = CString::new(smol_str.to_string()).ok(); + if let Some(c_str) = hosted_str { + value = AMvalue::Str(c_str.as_ptr()); + } + } + am::ScalarValue::Timestamp(timestamp) => { + value = AMvalue::Timestamp(*timestamp); + } + am::ScalarValue::Uint(uint) => { + value = AMvalue::Uint(*uint); + } + }, + // \todo Confirm that an object value should be ignored + // when there's no object ID variant. + am::Value::Object(_) => (), + } + } + } + } + }; + value +} + /// \memberof AMdoc -/// \brief Set a map object's key to a signed integer value. +/// \brief Puts a signed integer as the value of a key in a map object. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj A pointer to an `AMobj` struct or `NULL`. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. /// \param[in] key A UTF-8 string key for the map object identified by \p obj. /// \param[in] value A 64-bit signed integer. -/// \return A pointer to an `AMresult` struct containing no value. +/// \return A pointer to an `AMresult` struct containing nothing. /// \pre \p doc must be a valid address. /// \pre \p key must be a valid address. /// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMclear()`. +/// with `AMfreeResult()`. /// \internal /// /// # Safety /// doc must be a pointer to a valid AMdoc -/// obj must be a pointer to a valid AMobj or NULL +/// obj must be a pointer to a valid AMobjId or NULL /// key must be a c string of the map key to be used #[no_mangle] -pub unsafe extern "C" fn AMmapSetInt( +pub unsafe extern "C" fn AMmapPutInt<'a>( doc: *mut AMdoc, - obj: *mut AMobj, + obj_id: *mut AMobjId, key: *const c_char, value: i64, -) -> *mut AMresult { +) -> *mut AMresult<'a> { let doc = to_doc!(doc); - to_result(doc.put(to_obj!(obj), to_str(key), value)) + to_result(doc.put(to_obj_id!(obj_id), to_str(key), value)) } /// \memberof AMdoc -/// \brief Set a map object's key to an unsigned integer value. +/// \brief Puts an unsigned integer as the value of a key in a map object. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj A pointer to an `AMobj` struct or `NULL`. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. /// \param[in] key A UTF-8 string key for the map object identified by \p obj. /// \param[in] value A 64-bit unsigned integer. -/// \return A pointer to an `AMresult` struct containing no value. +/// \return A pointer to an `AMresult` struct containing nothing. /// \pre \p doc must be a valid address. /// \pre \p key must be a valid address. /// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMclear()`. +/// with `AMfreeResult()`. /// \internal /// /// # Safety /// doc must be a pointer to a valid AMdoc -/// obj must be a pointer to a valid AMobj or NULL +/// obj must be a pointer to a valid AMobjId or NULL /// key must be a c string of the map key to be used #[no_mangle] -pub unsafe extern "C" fn AMmapSetUint( +pub unsafe extern "C" fn AMmapPutUint<'a>( doc: *mut AMdoc, - obj: *mut AMobj, + obj_id: *mut AMobjId, key: *const c_char, value: u64, -) -> *mut AMresult { +) -> *mut AMresult<'a> { let doc = to_doc!(doc); - to_result(doc.put(to_obj!(obj), to_str(key), value)) + to_result(doc.put(to_obj_id!(obj_id), to_str(key), value)) } /// \memberof AMdoc -/// \brief Set a map object's key to a UTF-8 string value. +/// \brief Puts a UTF-8 string as the value of a key in a map object. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj A pointer to an `AMobj` struct or `NULL`. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. /// \param[in] key A UTF-8 string key for the map object identified by \p obj. /// \param[in] value A UTF-8 string. -/// \return A pointer to an `AMresult` struct containing no value. +/// \return A pointer to an `AMresult` struct containing nothing. /// \pre \p doc must be a valid address. /// \pre \p key must be a valid address. /// \pre \p value must be a valid address. /// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMclear()`. +/// with `AMfreeResult()`. /// \internal /// /// # Safety /// doc must be a pointer to a valid AMdoc -/// obj must be a pointer to a valid AMobj or NULL +/// obj must be a pointer to a valid AMobjId or NULL /// key must be a c string of the map key to be used #[no_mangle] -pub unsafe extern "C" fn AMmapSetStr( +pub unsafe extern "C" fn AMmapPutStr<'a>( doc: *mut AMdoc, - obj: *mut AMobj, + obj_id: *mut AMobjId, key: *const c_char, value: *const c_char, -) -> *mut AMresult { +) -> *mut AMresult<'a> { let doc = to_doc!(doc); - to_result(doc.put(to_obj!(obj), to_str(key), to_str(value))) + to_result(doc.put(to_obj_id!(obj_id), to_str(key), to_str(value))) } /// \memberof AMdoc -/// \brief Set a map object's key to a byte array value. +/// \brief Puts an array of bytes as the value of a key in a map object. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj A pointer to an `AMobj` struct or `NULL`. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. /// \param[in] key A UTF-8 string key for the map object identified by \p obj. /// \param[in] value A pointer to an array of bytes. /// \param[in] count The number of bytes to copy from \p value. -/// \return A pointer to an `AMresult` struct containing no value. +/// \return A pointer to an `AMresult` struct containing nothing. /// \pre \p doc must be a valid address. /// \pre \p key must be a valid address. /// \pre \p value must be a valid address. /// \pre `0 <=` \p count `<=` length of \p value. /// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMclear()`. +/// with `AMfreeResult()`. /// \internal /// /// # Safety /// doc must be a pointer to a valid AMdoc -/// obj must be a pointer to a valid AMobj or NULL +/// obj must be a pointer to a valid AMobjId or NULL /// value must be a byte array of length `count` /// key must be a c string of the map key to be used #[no_mangle] -pub unsafe extern "C" fn AMmapSetBytes( +pub unsafe extern "C" fn AMmapPutBytes<'a>( doc: *mut AMdoc, - obj: *mut AMobj, + obj_id: *mut AMobjId, key: *const c_char, value: *const u8, count: usize, -) -> *mut AMresult { +) -> *mut AMresult<'a> { let doc = to_doc!(doc); let slice = std::slice::from_raw_parts(value, count); let mut vec = Vec::new(); vec.extend_from_slice(slice); - to_result(doc.put(to_obj!(obj), to_str(key), vec)) + to_result(doc.put(to_obj_id!(obj_id), to_str(key), vec)) } /// \memberof AMdoc -/// \brief Set a map object's key to a float value. +/// \brief Puts a float as the value of a key in a map object. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj A pointer to an `AMobj` struct or `NULL`. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. /// \param[in] key A UTF-8 string key for the map object identified by \p obj. /// \param[in] value A 64-bit float. -/// \return A pointer to an `AMresult` struct containing no value. +/// \return A pointer to an `AMresult` struct containing nothing. /// \pre \p doc must be a valid address. /// \pre \p key must be a valid address. /// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMclear()`. +/// with `AMfreeResult()`. /// \internal /// /// # Safety /// doc must be a pointer to a valid AMdoc -/// obj must be a pointer to a valid AMobj or NULL +/// obj must be a pointer to a valid AMobjId or NULL /// key must be a c string of the map key to be used #[no_mangle] -pub unsafe extern "C" fn AMmapSetF64( +pub unsafe extern "C" fn AMmapPutF64<'a>( doc: *mut AMdoc, - obj: *mut AMobj, + obj_id: *mut AMobjId, key: *const c_char, value: f64, -) -> *mut AMresult { +) -> *mut AMresult<'a> { let doc = to_doc!(doc); - to_result(doc.put(to_obj!(obj), to_str(key), value)) + to_result(doc.put(to_obj_id!(obj_id), to_str(key), value)) } /// \memberof AMdoc -/// \brief Set a map object's key to a CRDT counter value. +/// \brief Puts a CRDT counter as the value of a key in a map object. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj A pointer to an `AMobj` struct or `NULL`. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. /// \param[in] key A UTF-8 string key for the map object identified by \p obj. /// \param[in] value A 64-bit signed integer. -/// \return A pointer to an `AMresult` struct containing no value. +/// \return A pointer to an `AMresult` struct containing nothing. /// \pre \p doc must be a valid address. /// \pre \p key must be a valid address. /// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMclear()`. +/// with `AMfreeResult()`. /// \internal /// /// # Safety /// doc must be a pointer to a valid AMdoc -/// obj must be a pointer to a valid AMobj or NULL +/// obj must be a pointer to a valid AMobjId or NULL /// key must be a c string of the map key to be used #[no_mangle] -pub unsafe extern "C" fn AMmapSetCounter( +pub unsafe extern "C" fn AMmapPutCounter<'a>( doc: *mut AMdoc, - obj: *mut AMobj, + obj_id: *mut AMobjId, key: *const c_char, value: i64, -) -> *mut AMresult { +) -> *mut AMresult<'a> { let doc = to_doc!(doc); to_result(doc.put( - to_obj!(obj), + to_obj_id!(obj_id), to_str(key), am::ScalarValue::Counter(value.into()), )) } /// \memberof AMdoc -/// \brief Set a map object's key to a Lamport timestamp value. +/// \brief Puts a Lamport timestamp as the value of a key in a map object. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj A pointer to an `AMobj` struct or `NULL`. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. /// \param[in] key A UTF-8 string key for the map object identified by \p obj. /// \param[in] value A 64-bit signed integer. -/// \return A pointer to an `AMresult` struct containing no value. +/// \return A pointer to an `AMresult` struct containing nothing. /// \pre \p doc must be a valid address. /// \pre \p key must be a valid address. /// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMclear()`. +/// with `AMfreeResult()`. /// \internal /// /// # Safety /// doc must be a pointer to a valid AMdoc -/// obj must be a pointer to a valid AMobj or NULL +/// obj must be a pointer to a valid AMobjId or NULL /// key must be a c string of the map key to be used #[no_mangle] -pub unsafe extern "C" fn AMmapSetTimestamp( +pub unsafe extern "C" fn AMmapPutTimestamp<'a>( doc: *mut AMdoc, - obj: *mut AMobj, + obj_id: *mut AMobjId, key: *const c_char, value: i64, -) -> *mut AMresult { +) -> *mut AMresult<'a> { let doc = to_doc!(doc); - to_result(doc.put(to_obj!(obj), to_str(key), am::ScalarValue::Timestamp(value))) + to_result(doc.put( + to_obj_id!(obj_id), + to_str(key), + am::ScalarValue::Timestamp(value), + )) } /// \memberof AMdoc -/// \brief Set a map object's key to a null value. +/// \brief Puts null as the value of a key in a map object. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj A pointer to an `AMobj` struct or `NULL`. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. /// \param[in] key A UTF-8 string key for the map object identified by \p obj. -/// \return A pointer to an `AMresult` struct containing no value. +/// \return A pointer to an `AMresult` struct containing nothing. /// \pre \p doc must be a valid address. /// \pre \p key must be a valid address. -/// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMclear()`. +/// \warning To avoid a memory leak, the returned p ointer must be deallocated +/// with `AMfreeResult()`. /// \internal /// /// # Safety /// doc must be a pointer to a valid AMdoc -/// obj must be a pointer to a valid AMobj or NULL +/// obj must be a pointer to a valid AMobjId or NULL /// key must be a c string of the map key to be used #[no_mangle] -pub unsafe extern "C" fn AMmapSetNull( +pub unsafe extern "C" fn AMmapPutNull<'a>( doc: *mut AMdoc, - obj: *mut AMobj, + obj_id: *mut AMobjId, key: *const c_char, -) -> *mut AMresult { +) -> *mut AMresult<'a> { let doc = to_doc!(doc); - to_result(doc.put(to_obj!(obj), to_str(key), ())) + to_result(doc.put(to_obj_id!(obj_id), to_str(key), ())) } /// \memberof AMdoc -/// \brief Set a map object's key to an empty object value. +/// \brief Puts an empty object as the value of a key in a map object. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj A pointer to an `AMobj` struct or `NULL`. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. /// \param[in] key A UTF-8 string key for the map object identified by \p obj. -/// \param[in] obj_type An `AmObjType` enum tag. -/// \return A pointer to an `AMresult` struct containing a pointer to an `AMobj` struct. +/// \param[in] obj_type An `AMobjIdType` enum tag. +/// \return A pointer to an `AMresult` struct containing a pointer to an `AMobjId` struct. /// \pre \p doc must be a valid address. /// \pre \p key must be a valid address. /// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMclear()`. +/// with `AMfreeResult()`. /// \internal /// /// # Safety /// doc must be a pointer to a valid AMdoc -/// obj must be a pointer to a valid AMobj or NULL +/// obj must be a pointer to a valid AMobjId or NULL /// key must be a c string of the map key to be used #[no_mangle] -pub unsafe extern "C" fn AMmapSetObject( +pub unsafe extern "C" fn AMmapPutObject<'a>( doc: *mut AMdoc, - obj: *mut AMobj, + obj_id: *mut AMobjId, key: *const c_char, - obj_type: AmObjType, -) -> *mut AMresult { + obj_type: AMobjType, +) -> *mut AMresult<'a> { let doc = to_doc!(doc); - to_result(doc.put_object(to_obj!(obj), to_str(key), obj_type.into())) + to_result(doc.put_object(to_obj_id!(obj_id), to_str(key), obj_type.into())) } /// \memberof AMdoc -/// \brief Set a list object's index to a byte array value. +/// \brief Gets the value at an index in a list object. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj A pointer to an `AMobj` struct or `NULL`. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. /// \param[in] index An index within the list object identified by \p obj. +/// \return A pointer to an `AMresult` struct. +/// \pre \p doc must be a valid address. +/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj. +/// \warning To avoid a memory leak, the returned pointer must be deallocated +/// with `AMfreeResult()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj must be a pointer to a valid AMobjId or NULL +#[no_mangle] +pub unsafe extern "C" fn AMlistGet<'a>( + doc: *mut AMdoc, + obj_id: *mut AMobjId, + index: usize, +) -> *mut AMresult<'a> { + let doc = to_doc!(doc); + to_result(doc.value(to_obj_id!(obj_id), index)) +} + +/// \memberof AMdoc +/// \brief Gets the value for a key in a map object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] key A UTF-8 string key for the map object identified by \p obj. +/// \return A pointer to an `AMresult` struct. +/// \pre \p doc must be a valid address. +/// \pre \p key must be a valid address. +/// \warning To avoid a memory leak, the returned pointer must be deallocated +/// with `AMfreeResult()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj must be a pointer to a valid AMobjId or NULL +/// key must be a c string of the map key to be used +#[no_mangle] +pub unsafe extern "C" fn AMmapGet<'a>( + doc: *mut AMdoc, + obj_id: *mut AMobjId, + key: *const c_char, +) -> *mut AMresult<'a> { + let doc = to_doc!(doc); + to_result(doc.value(to_obj_id!(obj_id), to_str(key))) +} + +/// \memberof AMdoc +/// \brief Puts an array of bytes as the value at an index in a list object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] index An index in the list object identified by \p obj. /// \param[in] insert A flag to insert \p value before \p index instead of writing \p value over \p index. /// \param[in] value A pointer to an array of bytes. /// \param[in] count The number of bytes to copy from \p value. -/// \return A pointer to an `AMresult` struct containing no value. +/// \return A pointer to an `AMresult` struct containing nothing. /// \pre \p doc must be a valid address. /// \pre `0 <=` \p index `<=` length of the list object identified by \p obj. /// \pre \p value must be a valid address. /// \pre `0 <=` \p count `<=` length of \p value. /// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMclear()`. +/// with `AMfreeResult()`. /// \internal /// /// # Safety /// doc must be a pointer to a valid AMdoc -/// obj must be a pointer to a valid AMobj or NULL +/// obj must be a pointer to a valid AMobjId or NULL /// value must be a byte array of length `count` /// key must be a c string of the map key to be used #[no_mangle] -pub unsafe extern "C" fn AMlistSetBytes( +pub unsafe extern "C" fn AMlistPutBytes<'a>( doc: *mut AMdoc, - obj: *mut AMobj, + obj_id: *mut AMobjId, index: usize, insert: bool, value: *const u8, count: usize, -) -> *mut AMresult { +) -> *mut AMresult<'a> { let doc = to_doc!(doc); - let obj = to_obj!(obj); + let obj_id = to_obj_id!(obj_id); let slice = std::slice::from_raw_parts(value, count); let mut vec = Vec::new(); vec.extend_from_slice(slice); to_result(if insert { - doc.insert(obj, index, vec) + doc.insert(obj_id, index, vec) } else { - doc.put(obj, index, vec) + doc.put(obj_id, index, vec) }) } /// \memberof AMdoc -/// \brief Set a list object's index to a CRDT counter value. +/// \brief Puts a CRDT counter as the value at an index in a list object. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj A pointer to an `AMobj` struct or `NULL`. -/// \param[in] index An index within the list object identified by \p obj. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] index An index in the list object identified by \p obj. /// \param[in] insert A flag to insert \p value before \p index instead of writing \p value over \p index. /// \param[in] value A 64-bit signed integer. -/// \return A pointer to an `AMresult` struct containing no value. +/// \return A pointer to an `AMresult` struct containing nothing. /// \pre \p doc must be a valid address. /// \pre `0 <=` \p index `<=` length of the list object identified by \p obj. /// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMclear()`. +/// with `AMfreeResult()`. /// \internal /// /// # Safety /// doc must be a pointer to a valid AMdoc -/// obj must be a pointer to a valid AMobj or NULL +/// obj must be a pointer to a valid AMobjId or NULL #[no_mangle] -pub unsafe extern "C" fn AMlistSetCounter( +pub unsafe extern "C" fn AMlistPutCounter<'a>( doc: *mut AMdoc, - obj: *mut AMobj, + obj_id: *mut AMobjId, index: usize, insert: bool, value: i64, -) -> *mut AMresult { +) -> *mut AMresult<'a> { let doc = to_doc!(doc); - let obj = to_obj!(obj); + let obj_id = to_obj_id!(obj_id); let value = am::ScalarValue::Counter(value.into()); to_result(if insert { - doc.insert(obj, index, value) + doc.insert(obj_id, index, value) } else { - doc.put(obj, index, value) + doc.put(obj_id, index, value) }) } /// \memberof AMdoc -/// \brief Set a list object's index to a float value. +/// \brief Puts a float as the value at an index in a list object. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj A pointer to an `AMobj` struct or `NULL`. -/// \param[in] index An index within the list object identified by \p obj. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] index An index in the list object identified by \p obj. /// \param[in] insert A flag to insert \p value before \p index instead of writing \p value over \p index. /// \param[in] value A 64-bit float. -/// \return A pointer to an `AMresult` struct containing no value. +/// \return A pointer to an `AMresult` struct containing nothing. /// \pre \p doc must be a valid address. /// \pre `0 <=` \p index `<=` length of the list object identified by \p obj. /// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMclear()`. +/// with `AMfreeResult()`. /// \internal /// /// # Safety /// doc must be a pointer to a valid AMdoc -/// obj must be a pointer to a valid AMobj or NULL +/// obj must be a pointer to a valid AMobjId or NULL #[no_mangle] -pub unsafe extern "C" fn AMlistSetF64( +pub unsafe extern "C" fn AMlistPutF64<'a>( doc: *mut AMdoc, - obj: *mut AMobj, + obj_id: *mut AMobjId, index: usize, insert: bool, value: f64, -) -> *mut AMresult { +) -> *mut AMresult<'a> { let doc = to_doc!(doc); - let obj = to_obj!(obj); + let obj_id = to_obj_id!(obj_id); to_result(if insert { - doc.insert(obj, index, value) + doc.insert(obj_id, index, value) } else { - doc.put(obj, index, value) + doc.put(obj_id, index, value) }) } /// \memberof AMdoc -/// \brief Set a list object's index to a signed integer value. +/// \brief Puts a signed integer as the value at an index in a list object. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj A pointer to an `AMobj` struct or `NULL`. -/// \param[in] index An index within the list object identified by \p obj. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] index An index in the list object identified by \p obj. /// \param[in] insert A flag to insert \p value before \p index instead of writing \p value over \p index. /// \param[in] value A 64-bit signed integer. -/// \return A pointer to an `AMresult` struct containing no value. +/// \return A pointer to an `AMresult` struct containing nothing. /// \pre \p doc must be a valid address. /// \pre `0 <=` \p index `<=` length of the list object identified by \p obj. /// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMclear()`. +/// with `AMfreeResult()`. /// \internal /// /// # Safety /// doc must be a pointer to a valid AMdoc -/// obj must be a pointer to a valid AMobj or NULL +/// obj must be a pointer to a valid AMobjId or NULL #[no_mangle] -pub unsafe extern "C" fn AMlistSetInt( +pub unsafe extern "C" fn AMlistPutInt<'a>( doc: *mut AMdoc, - obj: *mut AMobj, + obj_id: *mut AMobjId, index: usize, insert: bool, value: i64, -) -> *mut AMresult { +) -> *mut AMresult<'a> { let doc = to_doc!(doc); - let obj = to_obj!(obj); + let obj_id = to_obj_id!(obj_id); to_result(if insert { - doc.insert(obj, index, value) + doc.insert(obj_id, index, value) } else { - doc.put(obj, index, value) + doc.put(obj_id, index, value) }) } /// \memberof AMdoc -/// \brief Set a list object's index to a null value. +/// \brief Puts null as the value at an index in a list object. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj A pointer to an `AMobj` struct or `NULL`. -/// \param[in] index An index within the list object identified by \p obj. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] index An index in the list object identified by \p obj. /// \param[in] insert A flag to insert \p value before \p index instead of writing \p value over \p index. -/// \return A pointer to an `AMresult` struct containing no value. +/// \return A pointer to an `AMresult` struct containing nothing. /// \pre \p doc must be a valid address. /// \pre `0 <=` \p index `<=` length of the list object identified by \p obj. /// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMclear()`. +/// with `AMfreeResult()`. /// \internal /// /// # Safety /// doc must be a pointer to a valid AMdoc -/// obj must be a pointer to a valid AMobj or NULL +/// obj must be a pointer to a valid AMobjId or NULL #[no_mangle] -pub unsafe extern "C" fn AMlistSetNull( +pub unsafe extern "C" fn AMlistPutNull<'a>( doc: *mut AMdoc, - obj: *mut AMobj, + obj_id: *mut AMobjId, index: usize, insert: bool, -) -> *mut AMresult { +) -> *mut AMresult<'a> { let doc = to_doc!(doc); - let obj = to_obj!(obj); + let obj_id = to_obj_id!(obj_id); let value = (); to_result(if insert { - doc.insert(obj, index, value) + doc.insert(obj_id, index, value) } else { - doc.put(obj, index, value) + doc.put(obj_id, index, value) }) } /// \memberof AMdoc -/// \brief Set a list object's index to an empty object value. +/// \brief Puts an empty object as the value at an index in a list object. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj A pointer to an `AMobj` struct or `NULL`. -/// \param[in] index An index within the list object identified by \p obj. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] index An index in the list object identified by \p obj. /// \param[in] insert A flag to insert \p value before \p index instead of writing \p value over \p index. -/// \param[in] obj_type An `AmObjType` enum tag. -/// \return A pointer to an `AMresult` struct containing a pointer to an `AMobj` struct. +/// \param[in] obj_type An `AMobjIdType` enum tag. +/// \return A pointer to an `AMresult` struct containing a pointer to an `AMobjId` struct. /// \pre \p doc must be a valid address. /// \pre `0 <=` \p index `<=` length of the list object identified by \p obj. /// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMclear()`. +/// with `AMfreeResult()`. /// \internal /// /// # Safety /// doc must be a pointer to a valid AMdoc -/// obj must be a pointer to a valid AMobj or NULL +/// obj must be a pointer to a valid AMobjId or NULL #[no_mangle] -pub unsafe extern "C" fn AMlistSetObject( +pub unsafe extern "C" fn AMlistPutObject<'a>( doc: *mut AMdoc, - obj: *mut AMobj, + obj_id: *mut AMobjId, index: usize, insert: bool, - obj_type: AmObjType, -) -> *mut AMresult { + obj_type: AMobjType, +) -> *mut AMresult<'a> { let doc = to_doc!(doc); - let obj = to_obj!(obj); + let obj_id = to_obj_id!(obj_id); let value = obj_type.into(); to_result(if insert { - doc.insert_object(obj, index, value) + doc.insert_object(&obj_id, index, value) } else { - doc.put_object(obj, index, value) + doc.put_object(&obj_id, index, value) }) } /// \memberof AMdoc -/// \brief Set a list object's index to a UTF-8 string value. +/// \brief Puts a UTF-8 string as the value at an index in a list object. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj A pointer to an `AMobj` struct or `NULL`. -/// \param[in] index An index within the list object identified by \p obj. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] index An index in the list object identified by \p obj. /// \param[in] insert A flag to insert \p value before \p index instead of writing \p value over \p index. /// \param[in] value A UTF-8 string. -/// \return A pointer to an `AMresult` struct containing no value. +/// \return A pointer to an `AMresult` struct containing nothing. /// \pre \p doc must be a valid address. /// \pre `0 <=` \p index `<=` length of the list object identified by \p obj. /// \pre \p value must be a valid address. /// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMclear()`. +/// with `AMfreeResult()`. /// \internal /// /// # Safety /// doc must be a pointer to a valid AMdoc -/// obj must be a pointer to a valid AMobj or NULL +/// obj must be a pointer to a valid AMobjId or NULL /// value must be a pointer to a valid address. #[no_mangle] -pub unsafe extern "C" fn AMlistSetStr( +pub unsafe extern "C" fn AMlistPutStr<'a>( doc: *mut AMdoc, - obj: *mut AMobj, + obj_id: *mut AMobjId, index: usize, insert: bool, value: *const c_char, -) -> *mut AMresult { +) -> *mut AMresult<'a> { let doc = to_doc!(doc); - let obj = to_obj!(obj); + let obj_id = to_obj_id!(obj_id); let value = to_str(value); to_result(if insert { - doc.insert(obj, index, value) + doc.insert(obj_id, index, value) } else { - doc.put(obj, index, value) + doc.put(obj_id, index, value) }) } /// \memberof AMdoc -/// \brief Set a list object's index to a Lamport timestamp value. +/// \brief Puts a Lamport timestamp as the value at an index in a list object. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj A pointer to an `AMobj` struct or `NULL`. -/// \param[in] index An index within the list object identified by \p obj. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] index An index in the list object identified by \p obj. /// \param[in] insert A flag to insert \p value before \p index instead of writing \p value over \p index. /// \param[in] value A 64-bit signed integer. -/// \return A pointer to an `AMresult` struct containing no value. +/// \return A pointer to an `AMresult` struct containing nothing. /// \pre \p doc must be a valid address. /// \pre `0 <=` \p index `<=` length of the list object identified by \p obj. /// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMclear()`. +/// with `AMfreeResult()`. /// \internal /// /// # Safety /// doc must be a pointer to a valid AMdoc -/// obj must be a pointer to a valid AMobj or NULL +/// obj must be a pointer to a valid AMobjId or NULL #[no_mangle] -pub unsafe extern "C" fn AMlistSetTimestamp( +pub unsafe extern "C" fn AMlistPutTimestamp<'a>( doc: *mut AMdoc, - obj: *mut AMobj, + obj_id: *mut AMobjId, index: usize, insert: bool, value: i64, -) -> *mut AMresult { +) -> *mut AMresult<'a> { let doc = to_doc!(doc); - let obj = to_obj!(obj); + let obj_id = to_obj_id!(obj_id); let value = am::ScalarValue::Timestamp(value); to_result(if insert { - doc.insert(obj, index, value) + doc.insert(obj_id, index, value) } else { - doc.put(obj, index, value) + doc.put(obj_id, index, value) }) } /// \memberof AMdoc -/// \brief Set a list object's index to an unsigned integer value. +/// \brief Puts an unsigned integer as the value at an index in a list object. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj A pointer to an `AMobj` struct or `NULL`. -/// \param[in] index An index within the list object identified by \p obj. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] index An index in the list object identified by \p obj. /// \param[in] insert A flag to insert \p value before \p index instead of writing \p value over \p index. /// \param[in] value A 64-bit unsigned integer. -/// \return A pointer to an `AMresult` struct containing no value. +/// \return A pointer to an `AMresult` struct containing nothing. /// \pre \p doc must be a valid address. /// \pre `0 <=` \p index `<=` length of the list object identified by \p obj. /// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMclear()`. +/// with `AMfreeResult()`. /// \internal /// /// # Safety /// doc must be a pointer to a valid AMdoc -/// obj must be a pointer to a valid AMobj or NULL +/// obj must be a pointer to a valid AMobjId or NULL #[no_mangle] -pub unsafe extern "C" fn AMlistSetUint( +pub unsafe extern "C" fn AMlistPutUint<'a>( doc: *mut AMdoc, - obj: *mut AMobj, + obj_id: *mut AMobjId, index: usize, insert: bool, value: u64, -) -> *mut AMresult { +) -> *mut AMresult<'a> { let doc = to_doc!(doc); - let obj = to_obj!(obj); + let obj_id = to_obj_id!(obj_id); to_result(if insert { - doc.insert(obj, index, value) + doc.insert(obj_id, index, value) } else { - doc.put(obj, index, value) + doc.put(obj_id, index, value) }) } -/// \memberof AMresult -/// \brief Get an `AMresult` struct's `AMobj` struct value. -/// -/// \param[in] result A pointer to an `AMresult` struct. -/// \return A pointer to an `AMobj` struct. -/// \pre \p result must be a valid address. -/// \internal -/// -/// # Safety -/// result must be a pointer to a valid AMresult -#[no_mangle] -pub unsafe extern "C" fn AMgetObj(_result: *mut AMresult) -> *mut AMobj { - unimplemented!() -} - /// \memberof AMresult /// \brief Deallocates the storage for an `AMresult` struct. /// @@ -843,7 +1007,7 @@ pub unsafe extern "C" fn AMgetObj(_result: *mut AMresult) -> *mut AMobj { /// # Safety /// result must be a pointer to a valid AMresult #[no_mangle] -pub unsafe extern "C" fn AMclear(result: *mut AMresult) { +pub unsafe extern "C" fn AMfreeResult(result: *mut AMresult) { if !result.is_null() { let result: AMresult = *Box::from_raw(result); drop(result) @@ -851,7 +1015,7 @@ pub unsafe extern "C" fn AMclear(result: *mut AMresult) { } /// \memberof AMresult -/// \brief Get an `AMresult` struct's error message string. +/// \brief Gets an `AMresult` struct's error message string. /// /// \param[in] result A pointer to an `AMresult` struct. /// \return A UTF-8 string value or `NULL`. @@ -867,3 +1031,43 @@ pub unsafe extern "C" fn AMerrorMessage(result: *mut AMresult) -> *const c_char _ => std::ptr::null::(), } } + +/// \memberof AMdoc +/// \brief Gets the size of an `AMobjId` struct. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \return The count of values in \p obj. +/// \pre \p doc must be a valid address. +#[no_mangle] +pub unsafe extern "C" fn AMobjSize(doc: *const AMdoc, obj_id: *const AMobjId) -> usize { + if let Some(doc) = doc.as_ref() { + doc.length(to_obj_id!(obj_id)) + } else { + 0 + } +} + +/// \memberof AMdoc +/// \brief Deallocates the storage for an `AMobjId` struct. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct. +/// \pre \p doc must be a valid address. +/// \pre \p obj_id must be a valid address. +/// \note An `AMobjId` struct is automatically deallocated along with its owning +/// `AMdoc` struct, this function just enables an `AMobjId` struct to be +/// deallocated sooner than that. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMresult +/// obj_id must be a pointer to a valid AMobjId +#[no_mangle] +pub unsafe extern "C" fn AMfreeObjId(doc: *mut AMdoc, obj_id: *const AMobjId) { + if let Some(doc) = doc.as_mut() { + if let Some(obj_id) = obj_id.as_ref() { + doc.drop_obj_id(obj_id); + }; + }; +} diff --git a/automerge-c/src/result.rs b/automerge-c/src/result.rs index e51f251b..a88cda15 100644 --- a/automerge-c/src/result.rs +++ b/automerge-c/src/result.rs @@ -1,35 +1,200 @@ use automerge as am; use std::ffi::CString; -/// \struct AMresult -/// \brief A container of result codes, messages and values. -pub enum AMresult { - Ok, - ObjId(am::ObjId), - Values(Vec>), - Changes(Vec), - Error(CString), +/// \struct AMobjId +/// \brief An object's unique identifier. +#[derive(Clone, Eq, Ord, PartialEq, PartialOrd)] +pub struct AMobjId(am::ObjId); + +impl AMobjId { + pub fn new(obj_id: am::ObjId) -> Self { + Self(obj_id) + } } -impl AMresult { +impl AsRef for AMobjId { + fn as_ref(&self) -> &am::ObjId { + &self.0 + } +} + +/// \memberof AMvalue +/// \struct AMbyteSpan +/// \brief A contiguous sequence of bytes. +/// +#[repr(C)] +pub struct AMbyteSpan { + /// A pointer to the byte at position zero. + /// \warning \p src is only valid until the `AMfreeResult()` function is called + /// on the `AMresult` struct hosting the array of bytes to which + /// it points. + src: *const u8, + /// The number of bytes in the sequence. + count: usize, +} + +impl From<&Vec> for AMbyteSpan { + fn from(v: &Vec) -> Self { + AMbyteSpan { + src: (*v).as_ptr(), + count: (*v).len(), + } + } +} + +impl From<&mut am::ActorId> for AMbyteSpan { + fn from(actor: &mut am::ActorId) -> Self { + let slice = actor.to_bytes(); + AMbyteSpan { + src: slice.as_ptr(), + count: slice.len(), + } + } +} + +/// \struct AMvalue +/// \brief A discriminated union of value type variants for an `AMresult` struct. +/// +/// \enum AMvalueVariant +/// \brief A value type discriminant. +/// +/// \var AMvalue::tag +/// The variant discriminator of an `AMvalue` struct. +/// +/// \var AMvalue::actor_id +/// An actor ID as an `AMbyteSpan` struct. +/// +/// \var AMvalue::boolean +/// A boolean. +/// +/// \var AMvalue::bytes +/// An array of bytes as an `AMbyteSpan` struct. +/// +/// \var AMvalue::counter +/// A CRDT counter. +/// +/// \var AMvalue::f64 +/// A 64-bit float. +/// +/// \var AMvalue::change_hash +/// A change hash as an `AMbyteSpan` struct. +/// +/// \var AMvalue::int_ +/// A 64-bit signed integer. +/// +/// \var AMvalue::obj_id +/// An object identifier. +/// +/// \var AMvalue::str +/// A UTF-8 string. +/// +/// \var AMvalue::timestamp +/// A Lamport timestamp. +/// +/// \var AMvalue::uint +/// A 64-bit unsigned integer. +#[repr(C)] +pub enum AMvalue<'a> { + /// An actor ID variant. + ActorId(AMbyteSpan), + /// A boolean variant. + Boolean(libc::c_char), + /// An array of bytes variant. + Bytes(AMbyteSpan), + /* + /// A changes variant. + Changes(_), + */ + /// A CRDT counter variant. + Counter(i64), + /// A 64-bit float variant. + F64(f64), + /// A change hash variant. + ChangeHash(AMbyteSpan), + /// A 64-bit signed integer variant. + Int(i64), + /* + /// A keys variant. + Keys(_), + */ + /// A nothing variant. + Nothing, + /// A null variant. + Null, + /// An object identifier variant. + ObjId(&'a AMobjId), + /// A UTF-8 string variant. + Str(*const libc::c_char), + /// A Lamport timestamp variant. + Timestamp(i64), + /* + /// A transaction variant. + Transaction(_), + */ + /// A 64-bit unsigned integer variant. + Uint(u64), +} + +/// \struct AMresult +/// \brief A discriminated union of result variants. +/// +pub enum AMresult<'a> { + ActorId(am::ActorId), + Changes(Vec), + Error(CString), + ObjId(&'a AMobjId), + Nothing, + Scalars(Vec>, Option), +} + +impl<'a> AMresult<'a> { pub(crate) fn err(s: &str) -> Self { AMresult::Error(CString::new(s).unwrap()) } } -impl From> for AMresult { - fn from(maybe: Result) -> Self { +impl<'a> From> for AMresult<'a> { + fn from(maybe: Result) -> Self { match maybe { - Ok(obj) => AMresult::ObjId(obj), + Ok(actor_id) => AMresult::ActorId(actor_id), Err(e) => AMresult::Error(CString::new(e.to_string()).unwrap()), } } } -impl From> for AMresult { - fn from(maybe: Result<(), am::AutomergeError>) -> Self { +impl<'a> From> for AMresult<'a> { + fn from(maybe: Result<&'a AMobjId, am::AutomergeError>) -> Self { match maybe { - Ok(()) => AMresult::Ok, + Ok(obj_id) => AMresult::ObjId(obj_id), + Err(e) => AMresult::Error(CString::new(e.to_string()).unwrap()), + } + } +} + +impl<'a> From> for AMresult<'a> { + fn from(maybe: Result<(), am::AutomergeError>) -> Self { + match maybe { + Ok(()) => AMresult::Nothing, + Err(e) => AMresult::Error(CString::new(e.to_string()).unwrap()), + } + } +} + +impl<'a> From, am::AutomergeError>> for AMresult<'a> { + fn from(maybe: Result, am::AutomergeError>) -> Self { + match maybe { + // \todo Ensure that it's alright to ignore the `am::ObjId` value. + Ok(Some((value, _))) => AMresult::Scalars(vec![value], None), + Ok(None) => AMresult::Nothing, + Err(e) => AMresult::Error(CString::new(e.to_string()).unwrap()), + } + } +} + +impl<'a> From> for AMresult<'a> { + fn from(maybe: Result) -> Self { + match maybe { + Ok(value) => AMresult::Scalars(vec![value], None), Err(e) => AMresult::Error(CString::new(e.to_string()).unwrap()), } } diff --git a/automerge-c/src/utils.rs b/automerge-c/src/utils.rs index e6b50074..b080c229 100644 --- a/automerge-c/src/utils.rs +++ b/automerge-c/src/utils.rs @@ -1,8 +1,8 @@ -use crate::{AMobj, AMresult}; +use crate::{AMobjId, AMresult}; use automerge as am; use std::ops::Deref; -impl Deref for AMobj { +impl Deref for AMobjId { type Target = am::ObjId; fn deref(&self) -> &Self::Target { @@ -11,14 +11,14 @@ impl Deref for AMobj { } #[allow(clippy::not_unsafe_ptr_arg_deref)] -impl From<*const AMobj> for AMobj { - fn from(obj: *const AMobj) -> Self { - unsafe { obj.as_ref().cloned().unwrap_or(AMobj(am::ROOT)) } +impl From<*const AMobjId> for AMobjId { + fn from(obj_id: *const AMobjId) -> Self { + unsafe { obj_id.as_ref().unwrap_or(AMobjId(am::ROOT)) } } } -impl From for *mut AMresult { - fn from(b: AMresult) -> Self { +impl<'a> From> for *mut AMresult<'a> { + fn from(b: AMresult<'a>) -> Self { Box::into_raw(Box::new(b)) } } diff --git a/automerge-c/test/CMakeLists.txt b/automerge-c/test/CMakeLists.txt index f680043a..aef36431 100644 --- a/automerge-c/test/CMakeLists.txt +++ b/automerge-c/test/CMakeLists.txt @@ -4,10 +4,12 @@ find_package(cmocka REQUIRED) add_executable( test_${LIBRARY_NAME} - group_state.c - amlistset_tests.c - ammapset_tests.c - main.c + group_state.c + amdoc_property_tests.c + amlistset_tests.c + ammapset_tests.c + macro_utils.c + main.c ) set_target_properties(test_${LIBRARY_NAME} PROPERTIES LINKER_LANGUAGE C) diff --git a/automerge-c/test/amdoc_property_tests.c b/automerge-c/test/amdoc_property_tests.c new file mode 100644 index 00000000..b9c87b52 --- /dev/null +++ b/automerge-c/test/amdoc_property_tests.c @@ -0,0 +1,110 @@ +#include +#include +#include +#include +#include +#include + +/* third-party */ +#include + +/* local */ +#include "group_state.h" + +typedef struct { + GroupState* group_state; + char const* actor_id_str; + uint8_t* actor_id_bytes; + size_t actor_id_size; +} TestState; + +static void hex_to_bytes(char const* hex_str, uint8_t* bytes, size_t const count) { + unsigned int byte; + char const* next = hex_str; + for (size_t index = 0; *next && index != count; next += 2, ++index) { + if (sscanf(next, "%02x", &byte) == 1) { + bytes[index] = (uint8_t)byte; + } + } +} + +static int setup(void** state) { + TestState* test_state = calloc(1, sizeof(TestState)); + group_setup((void**)&test_state->group_state); + test_state->actor_id_str = "000102030405060708090a0b0c0d0e0f"; + test_state->actor_id_size = strlen(test_state->actor_id_str) / 2; + test_state->actor_id_bytes = malloc(test_state->actor_id_size); + hex_to_bytes(test_state->actor_id_str, test_state->actor_id_bytes, test_state->actor_id_size); + *state = test_state; + return 0; +} + +static int teardown(void** state) { + TestState* test_state = *state; + group_teardown((void**)&test_state->group_state); + free(test_state->actor_id_bytes); + free(test_state); + return 0; +} + +static void test_AMputActor(void **state) { + TestState* test_state = *state; + GroupState* group_state = test_state->group_state; + AMresult* res = AMputActor( + group_state->doc, + test_state->actor_id_bytes, + test_state->actor_id_size + ); + if (AMresultStatus(res) != AM_STATUS_OK) { + fail_msg("%s", AMerrorMessage(res)); + } + assert_int_equal(AMresultSize(res), 0); + AMvalue value = AMresultValue(res, 0); + assert_int_equal(value.tag, AM_VALUE_NOTHING); + AMfreeResult(res); + res = AMgetActor(group_state->doc); + if (AMresultStatus(res) != AM_STATUS_OK) { + fail_msg("%s", AMerrorMessage(res)); + } + assert_int_equal(AMresultSize(res), 1); + value = AMresultValue(res, 0); + assert_int_equal(value.tag, AM_VALUE_ACTOR_ID); + assert_int_equal(value.actor_id.count, test_state->actor_id_size); + assert_memory_equal(value.actor_id.src, test_state->actor_id_bytes, value.actor_id.count); + AMfreeResult(res); +} + +static void test_AMputActorHex(void **state) { + TestState* test_state = *state; + GroupState* group_state = test_state->group_state; + AMresult* res = AMputActorHex( + group_state->doc, + test_state->actor_id_str + ); + if (AMresultStatus(res) != AM_STATUS_OK) { + fail_msg("%s", AMerrorMessage(res)); + } + assert_int_equal(AMresultSize(res), 0); + AMvalue value = AMresultValue(res, 0); + assert_int_equal(value.tag, AM_VALUE_NOTHING); + AMfreeResult(res); + res = AMgetActorHex(group_state->doc); + if (AMresultStatus(res) != AM_STATUS_OK) { + fail_msg("%s", AMerrorMessage(res)); + } + assert_int_equal(AMresultSize(res), 1); + value = AMresultValue(res, 0); + assert_int_equal(value.tag, AM_VALUE_STR); + assert_int_equal(strlen(value.str), test_state->actor_id_size * 2); + assert_string_equal(value.str, test_state->actor_id_str); + AMfreeResult(res); +} + +int run_AMdoc_property_tests(void) { + const struct CMUnitTest tests[] = { + cmocka_unit_test_setup_teardown(test_AMputActor, setup, teardown), + cmocka_unit_test_setup_teardown(test_AMputActorHex, setup, teardown), + }; + + return cmocka_run_group_tests(tests, NULL, NULL); +} diff --git a/automerge-c/test/amlistset_tests.c b/automerge-c/test/amlistset_tests.c index dcafb513..27b4fae9 100644 --- a/automerge-c/test/amlistset_tests.c +++ b/automerge-c/test/amlistset_tests.c @@ -11,159 +11,224 @@ /* local */ #include "group_state.h" +#include "macro_utils.h" -#define test_AMlistSet(label, mode) test_AMlistSet ## label ## _ ## mode +#define test_AMlistPut(suffix, mode) test_AMlistPut ## suffix ## _ ## mode -#define static_void_test_AMlistSet(label, mode, value) \ -static void test_AMlistSet ## label ## _ ## mode(void **state) { \ - GroupState* group_state = *state; \ - AMresult* res = AMlistSet ## label(group_state->doc, AM_ROOT, 0, !strcmp(#mode, "insert"), value); \ - if (AMresultStatus(res) != AM_STATUS_COMMAND_OK) { \ - fail_msg("%s", AMerrorMessage(res)); \ - } \ +#define static_void_test_AMlistPut(suffix, mode, member, scalar_value) \ +static void test_AMlistPut ## suffix ## _ ## mode(void **state) { \ + GroupState* group_state = *state; \ + AMresult* res = AMlistPut ## suffix( \ + group_state->doc, AM_ROOT, 0, !strcmp(#mode, "insert"), scalar_value \ + ); \ + if (AMresultStatus(res) != AM_STATUS_OK) { \ + fail_msg("%s", AMerrorMessage(res)); \ + } \ + assert_int_equal(AMresultSize(res), 0); \ + AMvalue value = AMresultValue(res, 0); \ + assert_int_equal(value.tag, AM_VALUE_NOTHING); \ + AMfreeResult(res); \ + res = AMlistGet(group_state->doc, AM_ROOT, 0); \ + if (AMresultStatus(res) != AM_STATUS_OK) { \ + fail_msg("%s", AMerrorMessage(res)); \ + } \ + assert_int_equal(AMresultSize(res), 1); \ + value = AMresultValue(res, 0); \ + assert_int_equal(value.tag, AMvalue_discriminant(#suffix)); \ + assert_true(value.member == scalar_value); \ + AMfreeResult(res); \ } -static_void_test_AMlistSet(Counter, insert, INT64_MAX) +#define test_AMlistPutBytes(mode) test_AMlistPutBytes ## _ ## mode -static_void_test_AMlistSet(Counter, update, INT64_MAX) - -static_void_test_AMlistSet(F64, insert, DBL_MAX) - -static_void_test_AMlistSet(F64, update, DBL_MAX) - -static_void_test_AMlistSet(Int, insert, INT64_MAX) - -static_void_test_AMlistSet(Int, update, INT64_MAX) - -static_void_test_AMlistSet(Str, insert, "Hello, world!") - -static_void_test_AMlistSet(Str, update, "Hello, world!") - -static_void_test_AMlistSet(Timestamp, insert, INT64_MAX) - -static_void_test_AMlistSet(Timestamp, update, INT64_MAX) - -static_void_test_AMlistSet(Uint, insert, UINT64_MAX) - -static_void_test_AMlistSet(Uint, update, UINT64_MAX) - -static void test_AMlistSetBytes_insert(void **state) { - static uint8_t const BYTES_VALUE[] = {INT8_MIN, INT8_MAX / 2, INT8_MAX}; - - GroupState* group_state = *state; - AMresult* res = AMlistSetBytes( - group_state->doc, - AM_ROOT, - 0, - true, - BYTES_VALUE, - sizeof(BYTES_VALUE) / sizeof(uint8_t) - ); - if (AMresultStatus(res) != AM_STATUS_COMMAND_OK) { - fail_msg("%s", AMerrorMessage(res)); - } +#define static_void_test_AMlistPutBytes(mode, bytes_value) \ +static void test_AMlistPutBytes_ ## mode(void **state) { \ + static size_t const BYTES_SIZE = sizeof(bytes_value) / sizeof(uint8_t); \ + \ + GroupState* group_state = *state; \ + AMresult* res = AMlistPutBytes( \ + group_state->doc, \ + AM_ROOT, \ + 0, \ + !strcmp(#mode, "insert"), \ + bytes_value, \ + BYTES_SIZE \ + ); \ + if (AMresultStatus(res) != AM_STATUS_OK) { \ + fail_msg("%s", AMerrorMessage(res)); \ + } \ + assert_int_equal(AMresultSize(res), 0); \ + AMvalue value = AMresultValue(res, 0); \ + assert_int_equal(value.tag, AM_VALUE_NOTHING); \ + AMfreeResult(res); \ + res = AMlistGet(group_state->doc, AM_ROOT, 0); \ + if (AMresultStatus(res) != AM_STATUS_OK) { \ + fail_msg("%s", AMerrorMessage(res)); \ + } \ + assert_int_equal(AMresultSize(res), 1); \ + value = AMresultValue(res, 0); \ + assert_int_equal(value.tag, AM_VALUE_BYTES); \ + assert_int_equal(value.bytes.count, BYTES_SIZE); \ + assert_memory_equal(value.bytes.src, bytes_value, BYTES_SIZE); \ + AMfreeResult(res); \ } -static void test_AMlistSetBytes_update(void **state) { - static uint8_t const BYTES_VALUE[] = {INT8_MIN, INT8_MAX / 2, INT8_MAX}; +#define test_AMlistPutNull(mode) test_AMlistPutNull_ ## mode - GroupState* group_state = *state; - AMresult* res = AMlistSetBytes( - group_state->doc, - AM_ROOT, - 0, - false, - BYTES_VALUE, - sizeof(BYTES_VALUE) / sizeof(uint8_t) - ); - if (AMresultStatus(res) != AM_STATUS_COMMAND_OK) { - fail_msg("%s", AMerrorMessage(res)); - } +#define static_void_test_AMlistPutNull(mode) \ +static void test_AMlistPutNull_ ## mode(void **state) { \ + GroupState* group_state = *state; \ + AMresult* res = AMlistPutNull( \ + group_state->doc, AM_ROOT, 0, !strcmp(#mode, "insert")); \ + if (AMresultStatus(res) != AM_STATUS_OK) { \ + fail_msg("%s", AMerrorMessage(res)); \ + } \ + assert_int_equal(AMresultSize(res), 0); \ + AMvalue value = AMresultValue(res, 0); \ + assert_int_equal(value.tag, AM_VALUE_NOTHING); \ + AMfreeResult(res); \ + res = AMlistGet(group_state->doc, AM_ROOT, 0); \ + if (AMresultStatus(res) != AM_STATUS_OK) { \ + fail_msg("%s", AMerrorMessage(res)); \ + } \ + assert_int_equal(AMresultSize(res), 1); \ + value = AMresultValue(res, 0); \ + assert_int_equal(value.tag, AM_VALUE_NULL); \ + AMfreeResult(res); \ } +#define test_AMlistPutObject(label, mode) test_AMlistPutObject_ ## label ## _ ## mode -static void test_AMlistSetNull_insert(void **state) { - GroupState* group_state = *state; - AMresult* res = AMlistSetNull(group_state->doc, AM_ROOT, 0, true); - if (AMresultStatus(res) != AM_STATUS_COMMAND_OK) { - fail_msg("%s", AMerrorMessage(res)); - } +#define static_void_test_AMlistPutObject(label, mode) \ +static void test_AMlistPutObject_ ## label ## _ ## mode(void **state) { \ + GroupState* group_state = *state; \ + AMresult* res = AMlistPutObject( \ + group_state->doc, \ + AM_ROOT, \ + 0, \ + !strcmp(#mode, "insert"), \ + AMobjType_tag(#label) \ + ); \ + if (AMresultStatus(res) != AM_STATUS_OK) { \ + fail_msg("%s", AMerrorMessage(res)); \ + } \ + assert_int_equal(AMresultSize(res), 1); \ + AMvalue value = AMresultValue(res, 0); \ + assert_int_equal(value.tag, AM_VALUE_OBJ_ID); \ + /** \ + * \note The `AMresult` struct can be deallocated immediately when its \ + * value is a pointer to an opaque struct because its lifetime \ + * is tied to the `AMdoc` struct instead. \ + */ \ + AMfreeResult(res); \ + assert_non_null(value.obj_id); \ + assert_int_equal(AMobjSize(group_state->doc, value.obj_id), 0); \ + AMfreeObjId(group_state->doc, value.obj_id); \ } -static void test_AMlistSetNull_update(void **state) { - GroupState* group_state = *state; - AMresult* res = AMlistSetNull(group_state->doc, AM_ROOT, 0, false); - if (AMresultStatus(res) != AM_STATUS_COMMAND_OK) { - fail_msg("would be consolidated into%s", AMerrorMessage(res)); - } +#define test_AMlistPutStr(mode) test_AMlistPutStr ## _ ## mode + +#define static_void_test_AMlistPutStr(mode, str_value) \ +static void test_AMlistPutStr_ ## mode(void **state) { \ + static size_t const STR_LEN = strlen(str_value); \ + \ + GroupState* group_state = *state; \ + AMresult* res = AMlistPutStr( \ + group_state->doc, \ + AM_ROOT, \ + 0, \ + !strcmp(#mode, "insert"), \ + str_value \ + ); \ + if (AMresultStatus(res) != AM_STATUS_OK) { \ + fail_msg("%s", AMerrorMessage(res)); \ + } \ + assert_int_equal(AMresultSize(res), 0); \ + AMvalue value = AMresultValue(res, 0); \ + assert_int_equal(value.tag, AM_VALUE_NOTHING); \ + AMfreeResult(res); \ + res = AMlistGet(group_state->doc, AM_ROOT, 0); \ + if (AMresultStatus(res) != AM_STATUS_OK) { \ + fail_msg("%s", AMerrorMessage(res)); \ + } \ + assert_int_equal(AMresultSize(res), 1); \ + value = AMresultValue(res, 0); \ + assert_int_equal(value.tag, AM_VALUE_STR); \ + assert_int_equal(strlen(value.str), STR_LEN); \ + assert_memory_equal(value.str, str_value, STR_LEN + 1); \ + AMfreeResult(res); \ } -static void test_AMlistSetObject_insert(void **state) { - static AmObjType const OBJ_TYPES[] = { - AM_OBJ_TYPE_LIST, - AM_OBJ_TYPE_MAP, - AM_OBJ_TYPE_TEXT, - }; - static AmObjType const* const end = OBJ_TYPES + sizeof(OBJ_TYPES) / sizeof(AmObjType); +static uint8_t const BYTES_VALUE[] = {INT8_MIN, INT8_MAX / 2, INT8_MAX}; - GroupState* group_state = *state; - for (AmObjType const* next = OBJ_TYPES; next != end; ++next) { - AMresult* res = AMlistSetObject( - group_state->doc, - AM_ROOT, - 0, - true, - *next - ); - if (AMresultStatus(res) != AM_STATUS_OBJ_OK) { - fail_msg("%s", AMerrorMessage(res)); - } - } -} +static_void_test_AMlistPutBytes(insert, BYTES_VALUE) -static void test_AMlistSetObject_update(void **state) { - static AmObjType const OBJ_TYPES[] = { - AM_OBJ_TYPE_LIST, - AM_OBJ_TYPE_MAP, - AM_OBJ_TYPE_TEXT, - }; - static AmObjType const* const end = OBJ_TYPES + sizeof(OBJ_TYPES) / sizeof(AmObjType); +static_void_test_AMlistPutBytes(update, BYTES_VALUE) - GroupState* group_state = *state; - for (AmObjType const* next = OBJ_TYPES; next != end; ++next) { - AMresult* res = AMlistSetObject( - group_state->doc, - AM_ROOT, - 0, - false, - *next - ); - if (AMresultStatus(res) != AM_STATUS_OBJ_OK) { - fail_msg("%s", AMerrorMessage(res)); - } - } -} +static_void_test_AMlistPut(Counter, insert, counter, INT64_MAX) -int run_AMlistSet_tests(void) { +static_void_test_AMlistPut(Counter, update, counter, INT64_MAX) + +static_void_test_AMlistPut(F64, insert, f64, DBL_MAX) + +static_void_test_AMlistPut(F64, update, f64, DBL_MAX) + +static_void_test_AMlistPut(Int, insert, int_, INT64_MAX) + +static_void_test_AMlistPut(Int, update, int_, INT64_MAX) + +static_void_test_AMlistPutNull(insert) + +static_void_test_AMlistPutNull(update) + +static_void_test_AMlistPutObject(List, insert) + +static_void_test_AMlistPutObject(List, update) + +static_void_test_AMlistPutObject(Map, insert) + +static_void_test_AMlistPutObject(Map, update) + +static_void_test_AMlistPutObject(Text, insert) + +static_void_test_AMlistPutObject(Text, update) + +static_void_test_AMlistPutStr(insert, "Hello, world!") + +static_void_test_AMlistPutStr(update, "Hello, world!") + +static_void_test_AMlistPut(Timestamp, insert, timestamp, INT64_MAX) + +static_void_test_AMlistPut(Timestamp, update, timestamp, INT64_MAX) + +static_void_test_AMlistPut(Uint, insert, uint, UINT64_MAX) + +static_void_test_AMlistPut(Uint, update, uint, UINT64_MAX) + +int run_AMlistPut_tests(void) { const struct CMUnitTest tests[] = { - cmocka_unit_test(test_AMlistSetBytes_insert), - cmocka_unit_test(test_AMlistSetBytes_update), - cmocka_unit_test(test_AMlistSet(Counter, insert)), - cmocka_unit_test(test_AMlistSet(Counter, update)), - cmocka_unit_test(test_AMlistSet(F64, insert)), - cmocka_unit_test(test_AMlistSet(F64, update)), - cmocka_unit_test(test_AMlistSet(Int, insert)), - cmocka_unit_test(test_AMlistSet(Int, update)), - cmocka_unit_test(test_AMlistSetNull_insert), - cmocka_unit_test(test_AMlistSetNull_update), - cmocka_unit_test(test_AMlistSetObject_insert), - cmocka_unit_test(test_AMlistSetObject_update), - cmocka_unit_test(test_AMlistSet(Str, insert)), - cmocka_unit_test(test_AMlistSet(Str, update)), - cmocka_unit_test(test_AMlistSet(Timestamp, insert)), - cmocka_unit_test(test_AMlistSet(Timestamp, update)), - cmocka_unit_test(test_AMlistSet(Uint, insert)), - cmocka_unit_test(test_AMlistSet(Uint, update)), + cmocka_unit_test(test_AMlistPutBytes(insert)), + cmocka_unit_test(test_AMlistPutBytes(update)), + cmocka_unit_test(test_AMlistPut(Counter, insert)), + cmocka_unit_test(test_AMlistPut(Counter, update)), + cmocka_unit_test(test_AMlistPut(F64, insert)), + cmocka_unit_test(test_AMlistPut(F64, update)), + cmocka_unit_test(test_AMlistPut(Int, insert)), + cmocka_unit_test(test_AMlistPut(Int, update)), + cmocka_unit_test(test_AMlistPutNull(insert)), + cmocka_unit_test(test_AMlistPutNull(update)), + cmocka_unit_test(test_AMlistPutObject(List, insert)), + cmocka_unit_test(test_AMlistPutObject(List, update)), + cmocka_unit_test(test_AMlistPutObject(Map, insert)), + cmocka_unit_test(test_AMlistPutObject(Map, update)), + cmocka_unit_test(test_AMlistPutObject(Text, insert)), + cmocka_unit_test(test_AMlistPutObject(Text, update)), + cmocka_unit_test(test_AMlistPutStr(insert)), + cmocka_unit_test(test_AMlistPutStr(update)), + cmocka_unit_test(test_AMlistPut(Timestamp, insert)), + cmocka_unit_test(test_AMlistPut(Timestamp, update)), + cmocka_unit_test(test_AMlistPut(Uint, insert)), + cmocka_unit_test(test_AMlistPut(Uint, update)), }; return cmocka_run_group_tests(tests, group_setup, group_teardown); diff --git a/automerge-c/test/ammapset_tests.c b/automerge-c/test/ammapset_tests.c index f2bb675c..79b79f62 100644 --- a/automerge-c/test/ammapset_tests.c +++ b/automerge-c/test/ammapset_tests.c @@ -4,93 +4,186 @@ #include #include #include +#include /* third-party */ #include /* local */ #include "group_state.h" +#include "macro_utils.h" -#define test_AMmapSet(label) test_AMmapSet ## label +#define test_AMmapPut(suffix) test_AMmapPut ## suffix -#define static_void_test_AMmapSet(label, value) \ -static void test_AMmapSet ## label(void **state) { \ - GroupState* group_state = *state; \ - AMresult* res = AMmapSet ## label(group_state->doc, AM_ROOT, #label, value); \ - if (AMresultStatus(res) != AM_STATUS_COMMAND_OK) { \ - fail_msg("%s", AMerrorMessage(res)); \ - } \ +#define static_void_test_AMmapPut(suffix, member, scalar_value) \ +static void test_AMmapPut ## suffix(void **state) { \ + GroupState* group_state = *state; \ + AMresult* res = AMmapPut ## suffix( \ + group_state->doc, \ + AM_ROOT, \ + #suffix, \ + scalar_value \ + ); \ + if (AMresultStatus(res) != AM_STATUS_OK) { \ + fail_msg("%s", AMerrorMessage(res)); \ + } \ + assert_int_equal(AMresultSize(res), 0); \ + AMvalue value = AMresultValue(res, 0); \ + assert_int_equal(value.tag, AM_VALUE_NOTHING); \ + AMfreeResult(res); \ + res = AMmapGet(group_state->doc, AM_ROOT, #suffix); \ + if (AMresultStatus(res) != AM_STATUS_OK) { \ + fail_msg("%s", AMerrorMessage(res)); \ + } \ + assert_int_equal(AMresultSize(res), 1); \ + value = AMresultValue(res, 0); \ + assert_int_equal(value.tag, AMvalue_discriminant(#suffix)); \ + assert_true(value.member == scalar_value); \ + AMfreeResult(res); \ } -static_void_test_AMmapSet(Int, INT64_MAX) +#define test_AMmapPutObject(label) test_AMmapPutObject_ ## label -static_void_test_AMmapSet(Uint, UINT64_MAX) +#define static_void_test_AMmapPutObject(label) \ +static void test_AMmapPutObject_ ## label(void **state) { \ + GroupState* group_state = *state; \ + AMresult* res = AMmapPutObject( \ + group_state->doc, \ + AM_ROOT, \ + #label, \ + AMobjType_tag(#label) \ + ); \ + if (AMresultStatus(res) != AM_STATUS_OK) { \ + fail_msg("%s", AMerrorMessage(res)); \ + } \ + assert_int_equal(AMresultSize(res), 1); \ + AMvalue value = AMresultValue(res, 0); \ + assert_int_equal(value.tag, AM_VALUE_OBJ_ID); \ + /** \ + * \note The `AMresult` struct can be deallocated immediately when its \ + * value is a pointer to an opaque struct because its lifetime \ + * is tied to the `AMdoc` struct instead. \ + */ \ + AMfreeResult(res); \ + assert_non_null(value.obj_id); \ + assert_int_equal(AMobjSize(group_state->doc, value.obj_id), 0); \ + AMfreeObjId(group_state->doc, value.obj_id); \ +} -static_void_test_AMmapSet(Str, "Hello, world!") - -static_void_test_AMmapSet(F64, DBL_MAX) - -static_void_test_AMmapSet(Counter, INT64_MAX) - -static_void_test_AMmapSet(Timestamp, INT64_MAX) - -static void test_AMmapSetBytes(void **state) { +static void test_AMmapPutBytes(void **state) { + static char const* const KEY = "Bytes"; static uint8_t const BYTES_VALUE[] = {INT8_MIN, INT8_MAX / 2, INT8_MAX}; + static size_t const BYTES_SIZE = sizeof(BYTES_VALUE) / sizeof(uint8_t); GroupState* group_state = *state; - AMresult* res = AMmapSetBytes( + AMresult* res = AMmapPutBytes( group_state->doc, AM_ROOT, - "Bytes", + KEY, BYTES_VALUE, - sizeof(BYTES_VALUE) / sizeof(uint8_t) + BYTES_SIZE ); - if (AMresultStatus(res) != AM_STATUS_COMMAND_OK) { + if (AMresultStatus(res) != AM_STATUS_OK) { fail_msg("%s", AMerrorMessage(res)); } -} - -static void test_AMmapSetNull(void **state) { - GroupState* group_state = *state; - AMresult* res = AMmapSetNull(group_state->doc, AM_ROOT, "Null"); - if (AMresultStatus(res) != AM_STATUS_COMMAND_OK) { + assert_int_equal(AMresultSize(res), 0); + AMvalue value = AMresultValue(res, 0); + assert_int_equal(value.tag, AM_VALUE_NOTHING); + AMfreeResult(res); + res = AMmapGet(group_state->doc, AM_ROOT, KEY); + if (AMresultStatus(res) != AM_STATUS_OK) { fail_msg("%s", AMerrorMessage(res)); } + assert_int_equal(AMresultSize(res), 1); + value = AMresultValue(res, 0); + assert_int_equal(value.tag, AM_VALUE_BYTES); + assert_int_equal(value.bytes.count, BYTES_SIZE); + assert_memory_equal(value.bytes.src, BYTES_VALUE, BYTES_SIZE); + AMfreeResult(res); } -static void test_AMmapSetObject(void **state) { - static AmObjType const OBJ_TYPES[] = { - AM_OBJ_TYPE_LIST, - AM_OBJ_TYPE_MAP, - AM_OBJ_TYPE_TEXT, - }; - static AmObjType const* const end = OBJ_TYPES + sizeof(OBJ_TYPES) / sizeof(AmObjType); +static_void_test_AMmapPut(Counter, counter, INT64_MAX) + +static_void_test_AMmapPut(F64, f64, DBL_MAX) + +static_void_test_AMmapPut(Int, int_, INT64_MAX) + +static void test_AMmapPutNull(void **state) { + static char const* const KEY = "Null"; GroupState* group_state = *state; - for (AmObjType const* next = OBJ_TYPES; next != end; ++next) { - AMresult* res = AMmapSetObject( - group_state->doc, - AM_ROOT, - "Object", - *next - ); - if (AMresultStatus(res) != AM_STATUS_OBJ_OK) { - fail_msg("%s", AMerrorMessage(res)); - } + AMresult* res = AMmapPutNull(group_state->doc, AM_ROOT, KEY); + if (AMresultStatus(res) != AM_STATUS_OK) { + fail_msg("%s", AMerrorMessage(res)); } + assert_int_equal(AMresultSize(res), 0); + AMvalue value = AMresultValue(res, 0); + assert_int_equal(value.tag, AM_VALUE_NOTHING); + AMfreeResult(res); + res = AMmapGet(group_state->doc, AM_ROOT, KEY); + if (AMresultStatus(res) != AM_STATUS_OK) { + fail_msg("%s", AMerrorMessage(res)); + } + assert_int_equal(AMresultSize(res), 1); + value = AMresultValue(res, 0); + assert_int_equal(value.tag, AM_VALUE_NULL); + AMfreeResult(res); } -int run_AMmapSet_tests(void) { +static_void_test_AMmapPutObject(List) + +static_void_test_AMmapPutObject(Map) + +static_void_test_AMmapPutObject(Text) + +static void test_AMmapPutStr(void **state) { + static char const* const KEY = "Str"; + static char const* const STR_VALUE = "Hello, world!"; + size_t const STR_LEN = strlen(STR_VALUE); + + GroupState* group_state = *state; + AMresult* res = AMmapPutStr( + group_state->doc, + AM_ROOT, + KEY, + STR_VALUE + ); + if (AMresultStatus(res) != AM_STATUS_OK) { + fail_msg("%s", AMerrorMessage(res)); + } + assert_int_equal(AMresultSize(res), 0); + AMvalue value = AMresultValue(res, 0); + assert_int_equal(value.tag, AM_VALUE_NOTHING); + AMfreeResult(res); + res = AMmapGet(group_state->doc, AM_ROOT, KEY); + if (AMresultStatus(res) != AM_STATUS_OK) { + fail_msg("%s", AMerrorMessage(res)); + } + assert_int_equal(AMresultSize(res), 1); + value = AMresultValue(res, 0); + assert_int_equal(value.tag, AM_VALUE_STR); + assert_int_equal(strlen(value.str), STR_LEN); + assert_memory_equal(value.str, STR_VALUE, STR_LEN + 1); + AMfreeResult(res); +} + +static_void_test_AMmapPut(Timestamp, timestamp, INT64_MAX) + +static_void_test_AMmapPut(Uint, uint, UINT64_MAX) + +int run_AMmapPut_tests(void) { const struct CMUnitTest tests[] = { - cmocka_unit_test(test_AMmapSetBytes), - cmocka_unit_test(test_AMmapSet(Counter)), - cmocka_unit_test(test_AMmapSet(F64)), - cmocka_unit_test(test_AMmapSet(Int)), - cmocka_unit_test(test_AMmapSetNull), - cmocka_unit_test(test_AMmapSetObject), - cmocka_unit_test(test_AMmapSet(Str)), - cmocka_unit_test(test_AMmapSet(Timestamp)), - cmocka_unit_test(test_AMmapSet(Uint)), + cmocka_unit_test(test_AMmapPutBytes), + cmocka_unit_test(test_AMmapPut(Counter)), + cmocka_unit_test(test_AMmapPut(F64)), + cmocka_unit_test(test_AMmapPut(Int)), + cmocka_unit_test(test_AMmapPutNull), + cmocka_unit_test(test_AMmapPutObject(List)), + cmocka_unit_test(test_AMmapPutObject(Map)), + cmocka_unit_test(test_AMmapPutObject(Text)), + cmocka_unit_test(test_AMmapPutStr), + cmocka_unit_test(test_AMmapPut(Timestamp)), + cmocka_unit_test(test_AMmapPut(Uint)), }; return cmocka_run_group_tests(tests, group_setup, group_teardown); diff --git a/automerge-c/test/group_state.c b/automerge-c/test/group_state.c index 6c785907..a0a2a049 100644 --- a/automerge-c/test/group_state.c +++ b/automerge-c/test/group_state.c @@ -5,14 +5,14 @@ int group_setup(void** state) { GroupState* group_state = calloc(1, sizeof(GroupState)); - group_state->doc = AMcreate(); + group_state->doc = AMallocDoc(); *state = group_state; return 0; } int group_teardown(void** state) { GroupState* group_state = *state; - AMdestroy(group_state->doc); + AMfreeDoc(group_state->doc); free(group_state); return 0; } diff --git a/automerge-c/test/macro_utils.c b/automerge-c/test/macro_utils.c new file mode 100644 index 00000000..d4343bc0 --- /dev/null +++ b/automerge-c/test/macro_utils.c @@ -0,0 +1,23 @@ +#include + +/* local */ +#include "macro_utils.h" + +AMvalueVariant AMvalue_discriminant(char const* suffix) { + if (!strcmp(suffix, "Bytes")) return AM_VALUE_BYTES; + else if (!strcmp(suffix, "Counter")) return AM_VALUE_COUNTER; + else if (!strcmp(suffix, "F64")) return AM_VALUE_F64; + else if (!strcmp(suffix, "Int")) return AM_VALUE_INT; + else if (!strcmp(suffix, "Null")) return AM_VALUE_NULL; + else if (!strcmp(suffix, "Str")) return AM_VALUE_STR; + else if (!strcmp(suffix, "Timestamp")) return AM_VALUE_TIMESTAMP; + else if (!strcmp(suffix, "Uint")) return AM_VALUE_UINT; + else return AM_VALUE_NOTHING; +} + +AMobjType AMobjType_tag(char const* obj_type_label) { + if (!strcmp(obj_type_label, "List")) return AM_OBJ_TYPE_LIST; + else if (!strcmp(obj_type_label, "Map")) return AM_OBJ_TYPE_MAP; + else if (!strcmp(obj_type_label, "Text")) return AM_OBJ_TYPE_TEXT; + else return 0; +} diff --git a/automerge-c/test/macro_utils.h b/automerge-c/test/macro_utils.h new file mode 100644 index 00000000..5a74c562 --- /dev/null +++ b/automerge-c/test/macro_utils.h @@ -0,0 +1,23 @@ +#ifndef MACRO_UTILS_INCLUDED +#define MACRO_UTILS_INCLUDED + +/* local */ +#include "automerge.h" + +/** + * \brief Gets the `AMvalue` discriminant corresponding to a function name suffix. + * + * \param[in] suffix A string. + * \return An `AMvalue` variant discriminant enum tag. + */ +AMvalueVariant AMvalue_discriminant(char const* suffix); + +/** + * \brief Gets the `AMobjType` tag corresponding to a object type label. + * + * \param[in] obj_type_label A string. + * \return An `AMobjType` enum tag. + */ +AMobjType AMobjType_tag(char const* obj_type_label); + +#endif diff --git a/automerge-c/test/main.c b/automerge-c/test/main.c index b637434e..11a2e888 100644 --- a/automerge-c/test/main.c +++ b/automerge-c/test/main.c @@ -6,26 +6,16 @@ /* third-party */ #include -/* local */ -#include "group_state.h" +extern int run_AMdoc_property_tests(void); -extern int run_AMlistSet_tests(void); +extern int run_AMlistPut_tests(void); -extern int run_AMmapSet_tests(void); - -static void test_AMconfig(void **state) { - GroupState* group_state = *state; - AMconfig(group_state->doc, "actor", "aabbcc"); -} +extern int run_AMmapPut_tests(void); int main(void) { - const struct CMUnitTest tests[] = { - cmocka_unit_test(test_AMconfig), - }; - return ( - run_AMlistSet_tests() + - run_AMmapSet_tests() + - cmocka_run_group_tests(tests, group_setup, group_teardown) + run_AMdoc_property_tests() + + run_AMlistPut_tests() + + run_AMmapPut_tests() ); } diff --git a/automerge/src/value.rs b/automerge/src/value.rs index f378088e..76fd0c03 100644 --- a/automerge/src/value.rs +++ b/automerge/src/value.rs @@ -686,6 +686,12 @@ impl From for ScalarValue { } } +impl From> for ScalarValue { + fn from(b: Vec) -> Self { + ScalarValue::Bytes(b) + } +} + impl From<()> for ScalarValue { fn from(_: ()) -> Self { ScalarValue::Null From aaa2f7489b969722d61ee1e29d9890b52593130d Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Wed, 20 Apr 2022 00:57:52 -0600 Subject: [PATCH 291/730] Fixed the compilation errors caused by merging PR #310 into the "experiment" branch. --- automerge-c/src/doc.rs | 4 ++-- automerge-c/src/lib.rs | 14 +++++++------- automerge-c/src/result.rs | 17 +++++++++++++---- automerge-c/src/utils.rs | 19 +------------------ automerge-c/test/amdoc_property_tests.c | 4 ++-- automerge/src/error.rs | 2 ++ automerge/src/value.rs | 6 ------ 7 files changed, 27 insertions(+), 39 deletions(-) diff --git a/automerge-c/src/doc.rs b/automerge-c/src/doc.rs index ab38237a..221e93ee 100644 --- a/automerge-c/src/doc.rs +++ b/automerge-c/src/doc.rs @@ -40,13 +40,13 @@ impl AMdoc { } } - pub fn set_object, P: Into>( + pub fn put_object, P: Into>( &mut self, obj: O, prop: P, value: am::ObjType, ) -> Result<&AMobjId, am::AutomergeError> { - match self.body.set_object(obj, prop, value) { + match self.body.put_object(obj, prop, value) { Ok(ex_id) => { let obj_id = AMobjId::new(ex_id); self.obj_ids.insert(obj_id.clone()); diff --git a/automerge-c/src/lib.rs b/automerge-c/src/lib.rs index 50d1f9ee..dd019ed9 100644 --- a/automerge-c/src/lib.rs +++ b/automerge-c/src/lib.rs @@ -1,7 +1,7 @@ use automerge as am; use hex; use smol_str::SmolStr; -use std::{ffi::CStr, ffi::CString, os::raw::c_char}; +use std::{borrow::Cow, ffi::CStr, ffi::CString, os::raw::c_char}; mod doc; mod result; @@ -66,7 +66,7 @@ macro_rules! to_doc { macro_rules! to_obj_id { ($handle:expr) => {{ match $handle.as_ref() { - Some(am_obj_id) => am_obj_id, + Some(obj_id) => obj_id, None => &am::ROOT, } }}; @@ -160,7 +160,7 @@ pub unsafe extern "C" fn AMgetActor<'a>(doc: *mut AMdoc) -> *mut AMresult<'a> { pub unsafe extern "C" fn AMgetActorHex<'a>(doc: *mut AMdoc) -> *mut AMresult<'a> { let doc = to_doc!(doc); let hex_str = doc.get_actor().to_hex_string(); - let value = am::Value::Scalar(am::ScalarValue::Str(SmolStr::new(hex_str))); + let value = am::Value::Scalar(Cow::Owned(am::ScalarValue::Str(SmolStr::new(hex_str)))); to_result(Ok(value)) } @@ -220,7 +220,7 @@ pub unsafe extern "C" fn AMsetActorHex<'a>( doc.set_actor(vec.into()); Ok(()) } - Err(_) => Err(am::AutomergeError::Decoding), + Err(error) => Err(am::AutomergeError::HexDecode(error)), }) } @@ -296,7 +296,7 @@ pub unsafe extern "C" fn AMresultValue<'a>(result: *mut AMresult<'a>, index: usi AMresult::Scalars(vec, hosted_str) => { if let Some(element) = vec.get(index) { match element { - am::Value::Scalar(scalar) => match scalar { + am::Value::Scalar(scalar) => match scalar.as_ref() { am::ScalarValue::Boolean(flag) => { value = AMvalue::Boolean(*flag as i8); } @@ -638,7 +638,7 @@ pub unsafe extern "C" fn AMlistGet<'a>( index: usize, ) -> *mut AMresult<'a> { let doc = to_doc!(doc); - to_result(doc.value(to_obj_id!(obj_id), index)) + to_result(doc.get(to_obj_id!(obj_id), index)) } /// \memberof AMdoc @@ -665,7 +665,7 @@ pub unsafe extern "C" fn AMmapGet<'a>( key: *const c_char, ) -> *mut AMresult<'a> { let doc = to_doc!(doc); - to_result(doc.value(to_obj_id!(obj_id), to_str(key))) + to_result(doc.get(to_obj_id!(obj_id), to_str(key))) } /// \memberof AMdoc diff --git a/automerge-c/src/result.rs b/automerge-c/src/result.rs index a88cda15..04bcf433 100644 --- a/automerge-c/src/result.rs +++ b/automerge-c/src/result.rs @@ -1,5 +1,6 @@ use automerge as am; use std::ffi::CString; +use std::ops::Deref; /// \struct AMobjId /// \brief An object's unique identifier. @@ -18,6 +19,14 @@ impl AsRef for AMobjId { } } +impl Deref for AMobjId { + type Target = am::ObjId; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + /// \memberof AMvalue /// \struct AMbyteSpan /// \brief A contiguous sequence of bytes. @@ -180,8 +189,8 @@ impl<'a> From> for AMresult<'a> { } } -impl<'a> From, am::AutomergeError>> for AMresult<'a> { - fn from(maybe: Result, am::AutomergeError>) -> Self { +impl<'a> From, am::ObjId)>, am::AutomergeError>> for AMresult<'a> { + fn from(maybe: Result, am::ObjId)>, am::AutomergeError>) -> Self { match maybe { // \todo Ensure that it's alright to ignore the `am::ObjId` value. Ok(Some((value, _))) => AMresult::Scalars(vec![value], None), @@ -191,8 +200,8 @@ impl<'a> From, am::AutomergeError>> for AM } } -impl<'a> From> for AMresult<'a> { - fn from(maybe: Result) -> Self { +impl<'a> From, am::AutomergeError>> for AMresult<'a> { + fn from(maybe: Result, am::AutomergeError>) -> Self { match maybe { Ok(value) => AMresult::Scalars(vec![value], None), Err(e) => AMresult::Error(CString::new(e.to_string()).unwrap()), diff --git a/automerge-c/src/utils.rs b/automerge-c/src/utils.rs index b080c229..70d2471a 100644 --- a/automerge-c/src/utils.rs +++ b/automerge-c/src/utils.rs @@ -1,21 +1,4 @@ -use crate::{AMobjId, AMresult}; -use automerge as am; -use std::ops::Deref; - -impl Deref for AMobjId { - type Target = am::ObjId; - - fn deref(&self) -> &Self::Target { - &self.0 - } -} - -#[allow(clippy::not_unsafe_ptr_arg_deref)] -impl From<*const AMobjId> for AMobjId { - fn from(obj_id: *const AMobjId) -> Self { - unsafe { obj_id.as_ref().unwrap_or(AMobjId(am::ROOT)) } - } -} +use crate::AMresult; impl<'a> From> for *mut AMresult<'a> { fn from(b: AMresult<'a>) -> Self { diff --git a/automerge-c/test/amdoc_property_tests.c b/automerge-c/test/amdoc_property_tests.c index b9c87b52..4b2b3d2a 100644 --- a/automerge-c/test/amdoc_property_tests.c +++ b/automerge-c/test/amdoc_property_tests.c @@ -50,7 +50,7 @@ static int teardown(void** state) { static void test_AMputActor(void **state) { TestState* test_state = *state; GroupState* group_state = test_state->group_state; - AMresult* res = AMputActor( + AMresult* res = AMsetActor( group_state->doc, test_state->actor_id_bytes, test_state->actor_id_size @@ -77,7 +77,7 @@ static void test_AMputActor(void **state) { static void test_AMputActorHex(void **state) { TestState* test_state = *state; GroupState* group_state = test_state->group_state; - AMresult* res = AMputActorHex( + AMresult* res = AMsetActorHex( group_state->doc, test_state->actor_id_str ); diff --git a/automerge/src/error.rs b/automerge/src/error.rs index 6d06e749..db55489f 100644 --- a/automerge/src/error.rs +++ b/automerge/src/error.rs @@ -25,6 +25,8 @@ pub enum AutomergeError { InvalidHash(ChangeHash), #[error("general failure")] Fail, + #[error(transparent)] + HexDecode(#[from] hex::FromHexError), } #[cfg(feature = "wasm")] diff --git a/automerge/src/value.rs b/automerge/src/value.rs index 76fd0c03..f378088e 100644 --- a/automerge/src/value.rs +++ b/automerge/src/value.rs @@ -686,12 +686,6 @@ impl From for ScalarValue { } } -impl From> for ScalarValue { - fn from(b: Vec) -> Self { - ScalarValue::Bytes(b) - } -} - impl From<()> for ScalarValue { fn from(_: ()) -> Self { ScalarValue::Null From 5128d1926d2dff720b95257c48419a9cf6a24f6c Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Wed, 20 Apr 2022 01:00:49 -0600 Subject: [PATCH 292/730] Replaced the verb "set" with the verb "put" within the names of the source files for unit test suites. --- automerge-c/test/CMakeLists.txt | 4 ++-- automerge-c/test/{amlistset_tests.c => amlistput_tests.c} | 0 automerge-c/test/{ammapset_tests.c => ammapput_tests.c} | 0 3 files changed, 2 insertions(+), 2 deletions(-) rename automerge-c/test/{amlistset_tests.c => amlistput_tests.c} (100%) rename automerge-c/test/{ammapset_tests.c => ammapput_tests.c} (100%) diff --git a/automerge-c/test/CMakeLists.txt b/automerge-c/test/CMakeLists.txt index aef36431..3da6051e 100644 --- a/automerge-c/test/CMakeLists.txt +++ b/automerge-c/test/CMakeLists.txt @@ -6,8 +6,8 @@ add_executable( test_${LIBRARY_NAME} group_state.c amdoc_property_tests.c - amlistset_tests.c - ammapset_tests.c + amlistput_tests.c + ammapput_tests.c macro_utils.c main.c ) diff --git a/automerge-c/test/amlistset_tests.c b/automerge-c/test/amlistput_tests.c similarity index 100% rename from automerge-c/test/amlistset_tests.c rename to automerge-c/test/amlistput_tests.c diff --git a/automerge-c/test/ammapset_tests.c b/automerge-c/test/ammapput_tests.c similarity index 100% rename from automerge-c/test/ammapset_tests.c rename to automerge-c/test/ammapput_tests.c From dad2fd4928d118f4916ccaeaf372f1389f800fe6 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Wed, 20 Apr 2022 01:04:35 -0600 Subject: [PATCH 293/730] Fixed a formatting violation. --- automerge-c/src/result.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/automerge-c/src/result.rs b/automerge-c/src/result.rs index 04bcf433..07395fb1 100644 --- a/automerge-c/src/result.rs +++ b/automerge-c/src/result.rs @@ -189,7 +189,9 @@ impl<'a> From> for AMresult<'a> { } } -impl<'a> From, am::ObjId)>, am::AutomergeError>> for AMresult<'a> { +impl<'a> From, am::ObjId)>, am::AutomergeError>> + for AMresult<'a> +{ fn from(maybe: Result, am::ObjId)>, am::AutomergeError>) -> Self { match maybe { // \todo Ensure that it's alright to ignore the `am::ObjId` value. From bc012674257c52a2f5aa1da8b17129cb7bb82fc9 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Wed, 20 Apr 2022 01:54:57 -0600 Subject: [PATCH 294/730] Fixed the clippy errors whose resolutions don't cause compilation errors. --- automerge-c/src/doc.rs | 2 +- automerge-c/src/lib.rs | 64 ++++++++++++++++++++++++------------------ 2 files changed, 37 insertions(+), 29 deletions(-) diff --git a/automerge-c/src/doc.rs b/automerge-c/src/doc.rs index 221e93ee..4de2524a 100644 --- a/automerge-c/src/doc.rs +++ b/automerge-c/src/doc.rs @@ -16,7 +16,7 @@ pub struct AMdoc { impl AMdoc { pub fn new(body: am::AutoCommit) -> Self { Self { - body: body, + body, obj_ids: BTreeSet::new(), } } diff --git a/automerge-c/src/lib.rs b/automerge-c/src/lib.rs index dd019ed9..2feb032e 100644 --- a/automerge-c/src/lib.rs +++ b/automerge-c/src/lib.rs @@ -1,5 +1,4 @@ use automerge as am; -use hex; use smol_str::SmolStr; use std::{borrow::Cow, ffi::CStr, ffi::CString, os::raw::c_char}; @@ -207,7 +206,7 @@ pub unsafe extern "C" fn AMsetActor<'a>( /// /// # Safety /// doc must be a pointer to a valid AMdoc -/// hex_str must be a null-terminated array of `c_char`. +/// hex_str must be a null-terminated array of `c_char` #[no_mangle] pub unsafe extern "C" fn AMsetActorHex<'a>( doc: *mut AMdoc, @@ -249,6 +248,10 @@ pub unsafe extern "C" fn AMresultStatus(result: *mut AMresult) -> AMstatus { /// \param[in] result A pointer to an `AMresult` struct. /// \return The count of values in \p result. /// \pre \p result must be a valid address. +/// \internal +/// +/// # Safety +/// result must be a pointer to a valid AMresult #[no_mangle] pub unsafe extern "C" fn AMresultSize(result: *mut AMresult) -> usize { if let Some(result) = result.as_mut() { @@ -355,7 +358,7 @@ pub unsafe extern "C" fn AMresultValue<'a>(result: *mut AMresult<'a>, index: usi /// /// # Safety /// doc must be a pointer to a valid AMdoc -/// obj must be a pointer to a valid AMobjId or NULL +/// obj_id must be a pointer to a valid AMobjId or NULL /// key must be a c string of the map key to be used #[no_mangle] pub unsafe extern "C" fn AMmapPutInt<'a>( @@ -384,7 +387,7 @@ pub unsafe extern "C" fn AMmapPutInt<'a>( /// /// # Safety /// doc must be a pointer to a valid AMdoc -/// obj must be a pointer to a valid AMobjId or NULL +/// obj_id must be a pointer to a valid AMobjId or NULL /// key must be a c string of the map key to be used #[no_mangle] pub unsafe extern "C" fn AMmapPutUint<'a>( @@ -414,8 +417,9 @@ pub unsafe extern "C" fn AMmapPutUint<'a>( /// /// # Safety /// doc must be a pointer to a valid AMdoc -/// obj must be a pointer to a valid AMobjId or NULL +/// obj_id must be a pointer to a valid AMobjId or NULL /// key must be a c string of the map key to be used +/// value must be a null-terminated array of `c_char` #[no_mangle] pub unsafe extern "C" fn AMmapPutStr<'a>( doc: *mut AMdoc, @@ -446,9 +450,9 @@ pub unsafe extern "C" fn AMmapPutStr<'a>( /// /// # Safety /// doc must be a pointer to a valid AMdoc -/// obj must be a pointer to a valid AMobjId or NULL -/// value must be a byte array of length `count` +/// obj_id must be a pointer to a valid AMobjId or NULL /// key must be a c string of the map key to be used +/// value must be a byte array of length `count` #[no_mangle] pub unsafe extern "C" fn AMmapPutBytes<'a>( doc: *mut AMdoc, @@ -480,7 +484,7 @@ pub unsafe extern "C" fn AMmapPutBytes<'a>( /// /// # Safety /// doc must be a pointer to a valid AMdoc -/// obj must be a pointer to a valid AMobjId or NULL +/// obj_id must be a pointer to a valid AMobjId or NULL /// key must be a c string of the map key to be used #[no_mangle] pub unsafe extern "C" fn AMmapPutF64<'a>( @@ -509,7 +513,7 @@ pub unsafe extern "C" fn AMmapPutF64<'a>( /// /// # Safety /// doc must be a pointer to a valid AMdoc -/// obj must be a pointer to a valid AMobjId or NULL +/// obj_id must be a pointer to a valid AMobjId or NULL /// key must be a c string of the map key to be used #[no_mangle] pub unsafe extern "C" fn AMmapPutCounter<'a>( @@ -542,7 +546,7 @@ pub unsafe extern "C" fn AMmapPutCounter<'a>( /// /// # Safety /// doc must be a pointer to a valid AMdoc -/// obj must be a pointer to a valid AMobjId or NULL +/// obj_id must be a pointer to a valid AMobjId or NULL /// key must be a c string of the map key to be used #[no_mangle] pub unsafe extern "C" fn AMmapPutTimestamp<'a>( @@ -574,7 +578,7 @@ pub unsafe extern "C" fn AMmapPutTimestamp<'a>( /// /// # Safety /// doc must be a pointer to a valid AMdoc -/// obj must be a pointer to a valid AMobjId or NULL +/// obj_id must be a pointer to a valid AMobjId or NULL /// key must be a c string of the map key to be used #[no_mangle] pub unsafe extern "C" fn AMmapPutNull<'a>( @@ -602,7 +606,7 @@ pub unsafe extern "C" fn AMmapPutNull<'a>( /// /// # Safety /// doc must be a pointer to a valid AMdoc -/// obj must be a pointer to a valid AMobjId or NULL +/// obj_id must be a pointer to a valid AMobjId or NULL /// key must be a c string of the map key to be used #[no_mangle] pub unsafe extern "C" fn AMmapPutObject<'a>( @@ -630,7 +634,7 @@ pub unsafe extern "C" fn AMmapPutObject<'a>( /// /// # Safety /// doc must be a pointer to a valid AMdoc -/// obj must be a pointer to a valid AMobjId or NULL +/// obj_id must be a pointer to a valid AMobjId or NULL #[no_mangle] pub unsafe extern "C" fn AMlistGet<'a>( doc: *mut AMdoc, @@ -656,7 +660,7 @@ pub unsafe extern "C" fn AMlistGet<'a>( /// /// # Safety /// doc must be a pointer to a valid AMdoc -/// obj must be a pointer to a valid AMobjId or NULL +/// obj_id must be a pointer to a valid AMobjId or NULL /// key must be a c string of the map key to be used #[no_mangle] pub unsafe extern "C" fn AMmapGet<'a>( @@ -688,9 +692,8 @@ pub unsafe extern "C" fn AMmapGet<'a>( /// /// # Safety /// doc must be a pointer to a valid AMdoc -/// obj must be a pointer to a valid AMobjId or NULL +/// obj_id must be a pointer to a valid AMobjId or NULL /// value must be a byte array of length `count` -/// key must be a c string of the map key to be used #[no_mangle] pub unsafe extern "C" fn AMlistPutBytes<'a>( doc: *mut AMdoc, @@ -729,7 +732,7 @@ pub unsafe extern "C" fn AMlistPutBytes<'a>( /// /// # Safety /// doc must be a pointer to a valid AMdoc -/// obj must be a pointer to a valid AMobjId or NULL +/// obj_id must be a pointer to a valid AMobjId or NULL #[no_mangle] pub unsafe extern "C" fn AMlistPutCounter<'a>( doc: *mut AMdoc, @@ -765,7 +768,7 @@ pub unsafe extern "C" fn AMlistPutCounter<'a>( /// /// # Safety /// doc must be a pointer to a valid AMdoc -/// obj must be a pointer to a valid AMobjId or NULL +/// obj_id must be a pointer to a valid AMobjId or NULL #[no_mangle] pub unsafe extern "C" fn AMlistPutF64<'a>( doc: *mut AMdoc, @@ -800,7 +803,7 @@ pub unsafe extern "C" fn AMlistPutF64<'a>( /// /// # Safety /// doc must be a pointer to a valid AMdoc -/// obj must be a pointer to a valid AMobjId or NULL +/// obj_id must be a pointer to a valid AMobjId or NULL #[no_mangle] pub unsafe extern "C" fn AMlistPutInt<'a>( doc: *mut AMdoc, @@ -834,7 +837,7 @@ pub unsafe extern "C" fn AMlistPutInt<'a>( /// /// # Safety /// doc must be a pointer to a valid AMdoc -/// obj must be a pointer to a valid AMobjId or NULL +/// obj_id must be a pointer to a valid AMobjId or NULL #[no_mangle] pub unsafe extern "C" fn AMlistPutNull<'a>( doc: *mut AMdoc, @@ -869,7 +872,7 @@ pub unsafe extern "C" fn AMlistPutNull<'a>( /// /// # Safety /// doc must be a pointer to a valid AMdoc -/// obj must be a pointer to a valid AMobjId or NULL +/// obj_id must be a pointer to a valid AMobjId or NULL #[no_mangle] pub unsafe extern "C" fn AMlistPutObject<'a>( doc: *mut AMdoc, @@ -882,7 +885,7 @@ pub unsafe extern "C" fn AMlistPutObject<'a>( let obj_id = to_obj_id!(obj_id); let value = obj_type.into(); to_result(if insert { - doc.insert_object(&obj_id, index, value) + doc.insert_object(obj_id, index, value) } else { doc.put_object(&obj_id, index, value) }) @@ -906,8 +909,8 @@ pub unsafe extern "C" fn AMlistPutObject<'a>( /// /// # Safety /// doc must be a pointer to a valid AMdoc -/// obj must be a pointer to a valid AMobjId or NULL -/// value must be a pointer to a valid address. +/// obj_id must be a pointer to a valid AMobjId or NULL +/// value must be a null-terminated array of `c_char` #[no_mangle] pub unsafe extern "C" fn AMlistPutStr<'a>( doc: *mut AMdoc, @@ -943,7 +946,7 @@ pub unsafe extern "C" fn AMlistPutStr<'a>( /// /// # Safety /// doc must be a pointer to a valid AMdoc -/// obj must be a pointer to a valid AMobjId or NULL +/// obj_id must be a pointer to a valid AMobjId or NULL #[no_mangle] pub unsafe extern "C" fn AMlistPutTimestamp<'a>( doc: *mut AMdoc, @@ -979,7 +982,7 @@ pub unsafe extern "C" fn AMlistPutTimestamp<'a>( /// /// # Safety /// doc must be a pointer to a valid AMdoc -/// obj must be a pointer to a valid AMobjId or NULL +/// obj_id must be a pointer to a valid AMobjId or NULL #[no_mangle] pub unsafe extern "C" fn AMlistPutUint<'a>( doc: *mut AMdoc, @@ -1039,6 +1042,11 @@ pub unsafe extern "C" fn AMerrorMessage(result: *mut AMresult) -> *const c_char /// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. /// \return The count of values in \p obj. /// \pre \p doc must be a valid address. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj_id must be a pointer to a valid AMobjId or NULL #[no_mangle] pub unsafe extern "C" fn AMobjSize(doc: *const AMdoc, obj_id: *const AMobjId) -> usize { if let Some(doc) = doc.as_ref() { @@ -1061,8 +1069,8 @@ pub unsafe extern "C" fn AMobjSize(doc: *const AMdoc, obj_id: *const AMobjId) -> /// \internal /// /// # Safety -/// doc must be a pointer to a valid AMresult -/// obj_id must be a pointer to a valid AMobjId +/// doc must be a pointer to a valid AMdoc +/// obj_id must be a pointer to a valid AMobjId or NULL #[no_mangle] pub unsafe extern "C" fn AMfreeObjId(doc: *mut AMdoc, obj_id: *const AMobjId) { if let Some(doc) = doc.as_mut() { From bfe737896871b8a05e366fee040d690d5648d746 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Wed, 20 Apr 2022 10:57:52 +0100 Subject: [PATCH 295/730] Prevent increment on non-counter --- automerge/src/decoding.rs | 55 ++++++++++++++++++++++- automerge/src/encoding.rs | 8 ++++ automerge/src/error.rs | 4 +- automerge/src/transaction/inner.rs | 12 +++++ automerge/tests/test.rs | 70 +++++++++++++++++++++++++++++- 5 files changed, 146 insertions(+), 3 deletions(-) diff --git a/automerge/src/decoding.rs b/automerge/src/decoding.rs index 739e856d..5b90651f 100644 --- a/automerge/src/decoding.rs +++ b/automerge/src/decoding.rs @@ -52,7 +52,60 @@ pub enum Error { Io(#[from] io::Error), } -#[derive(thiserror::Error, Debug)] +impl PartialEq for Error { + fn eq(&self, other: &Error) -> bool { + match (self, other) { + ( + Self::WrongType { + expected_one_of: l_expected_one_of, + found: l_found, + }, + Self::WrongType { + expected_one_of: r_expected_one_of, + found: r_found, + }, + ) => l_expected_one_of == r_expected_one_of && l_found == r_found, + (Self::BadChangeFormat(l0), Self::BadChangeFormat(r0)) => l0 == r0, + ( + Self::WrongByteLength { + expected: l_expected, + found: l_found, + }, + Self::WrongByteLength { + expected: r_expected, + found: r_found, + }, + ) => l_expected == r_expected && l_found == r_found, + ( + Self::ColumnsNotInAscendingOrder { + last: l_last, + found: l_found, + }, + Self::ColumnsNotInAscendingOrder { + last: r_last, + found: r_found, + }, + ) => l_last == r_last && l_found == r_found, + ( + Self::InvalidChecksum { + found: l_found, + calculated: l_calculated, + }, + Self::InvalidChecksum { + found: r_found, + calculated: r_calculated, + }, + ) => l_found == r_found && l_calculated == r_calculated, + (Self::InvalidChange(l0), Self::InvalidChange(r0)) => l0 == r0, + (Self::ChangeDecompressFailed(l0), Self::ChangeDecompressFailed(r0)) => l0 == r0, + (Self::Leb128(_l0), Self::Leb128(_r0)) => true, + (Self::Io(l0), Self::Io(r0)) => l0.kind() == r0.kind(), + _ => core::mem::discriminant(self) == core::mem::discriminant(other), + } + } +} + +#[derive(thiserror::Error, PartialEq, Debug)] pub enum InvalidChangeError { #[error("Change contained an operation with action 'set' which did not have a 'value'")] SetOpWithoutValue, diff --git a/automerge/src/encoding.rs b/automerge/src/encoding.rs index c5aa6fa2..113fd158 100644 --- a/automerge/src/encoding.rs +++ b/automerge/src/encoding.rs @@ -21,6 +21,14 @@ pub enum Error { Io(#[from] io::Error), } +impl PartialEq for Error { + fn eq(&self, other: &Error) -> bool { + match (self, other) { + (Self::Io(error1), Self::Io(error2)) => error1.kind() == error2.kind(), + } + } +} + /// Encodes booleans by storing the count of the same value. /// /// The sequence of numbers describes the count of false values on even indices (0-indexed) and the diff --git a/automerge/src/error.rs b/automerge/src/error.rs index 6d06e749..0b58c5ae 100644 --- a/automerge/src/error.rs +++ b/automerge/src/error.rs @@ -3,7 +3,7 @@ use crate::value::DataType; use crate::{decoding, encoding, ChangeHash}; use thiserror::Error; -#[derive(Error, Debug)] +#[derive(Error, Debug, PartialEq)] pub enum AutomergeError { #[error("invalid obj id format `{0}`")] InvalidObjIdFormat(String), @@ -23,6 +23,8 @@ pub enum AutomergeError { DuplicateSeqNumber(u64, ActorId), #[error("invalid hash {0}")] InvalidHash(ChangeHash), + #[error("increment operations must be against a counter value")] + MissingCounter, #[error("general failure")] Fail, } diff --git a/automerge/src/transaction/inner.rs b/automerge/src/transaction/inner.rs index e53a8fdf..4b4ad60d 100644 --- a/automerge/src/transaction/inner.rs +++ b/automerge/src/transaction/inner.rs @@ -264,6 +264,12 @@ impl TransactionInner { return Ok(None); } + // increment operations are only valid against counter values. + // if there are multiple values (from conflicts) then we just need one of them to be a counter. + if matches!(action, OpType::Increment(_)) && query.ops.iter().all(|op| !op.is_counter()) { + return Err(AutomergeError::MissingCounter); + } + let pred = query.ops.iter().map(|op| op.id).collect(); let op = Op { @@ -299,6 +305,12 @@ impl TransactionInner { return Ok(None); } + // increment operations are only valid against counter values. + // if there are multiple values (from conflicts) then we just need one of them to be a counter. + if matches!(action, OpType::Increment(_)) && query.ops.iter().all(|op| !op.is_counter()) { + return Err(AutomergeError::MissingCounter); + } + let op = Op { id, action, diff --git a/automerge/tests/test.rs b/automerge/tests/test.rs index 4907334a..2bc46b74 100644 --- a/automerge/tests/test.rs +++ b/automerge/tests/test.rs @@ -1,5 +1,7 @@ use automerge::transaction::Transactable; -use automerge::{ActorId, AutoCommit, Automerge, ObjType, ScalarValue, Value, ROOT}; +use automerge::{ + ActorId, AutoCommit, Automerge, AutomergeError, ObjType, ScalarValue, Value, ROOT, +}; mod helpers; #[allow(unused_imports)] @@ -7,6 +9,8 @@ use helpers::{ mk_counter, new_doc, new_doc_with_actor, pretty_print, realize, realize_obj, sorted_actors, RealizedObject, }; +use pretty_assertions::assert_eq; + #[test] fn no_conflict_on_repeated_assignment() { let mut doc = AutoCommit::new(); @@ -925,3 +929,67 @@ fn list_counter_del() -> Result<(), automerge::AutomergeError> { Ok(()) } + +#[test] +fn increment_non_counter_map() { + let mut doc = AutoCommit::new(); + // can't increment nothing + assert_eq!( + doc.increment(ROOT, "nothing", 2), + Err(AutomergeError::MissingCounter) + ); + + // can't increment a non-counter + doc.put(ROOT, "non-counter", "mystring").unwrap(); + assert_eq!( + doc.increment(ROOT, "non-counter", 2), + Err(AutomergeError::MissingCounter) + ); + + // can increment a counter still + doc.put(ROOT, "counter", ScalarValue::counter(1)).unwrap(); + assert_eq!(doc.increment(ROOT, "counter", 2), Ok(())); + + // can increment a counter that is part of a conflict + let mut doc1 = AutoCommit::new(); + doc1.set_actor(ActorId::from([1])); + let mut doc2 = AutoCommit::new(); + doc2.set_actor(ActorId::from([2])); + + doc1.put(ROOT, "key", ScalarValue::counter(1)).unwrap(); + doc2.put(ROOT, "key", "mystring").unwrap(); + doc1.merge(&mut doc2).unwrap(); + + assert_eq!(doc1.increment(ROOT, "key", 2), Ok(())); +} + +#[test] +fn increment_non_counter_list() { + let mut doc = AutoCommit::new(); + let list = doc.put_object(ROOT, "list", ObjType::List).unwrap(); + + // can't increment a non-counter + doc.insert(&list, 0, "mystring").unwrap(); + assert_eq!( + doc.increment(&list, 0, 2), + Err(AutomergeError::MissingCounter) + ); + + // can increment a counter + doc.insert(&list, 0, ScalarValue::counter(1)).unwrap(); + assert_eq!(doc.increment(&list, 0, 2), Ok(())); + + // can increment a counter that is part of a conflict + let mut doc1 = AutoCommit::new(); + doc1.set_actor(ActorId::from([1])); + let list = doc1.put_object(ROOT, "list", ObjType::List).unwrap(); + doc1.insert(&list, 0, ()).unwrap(); + let mut doc2 = doc1.fork(); + doc2.set_actor(ActorId::from([2])); + + doc1.put(&list, 0, ScalarValue::counter(1)).unwrap(); + doc2.put(&list, 0, "mystring").unwrap(); + doc1.merge(&mut doc2).unwrap(); + + assert_eq!(doc1.increment(&list, 0, 2), Ok(())); +} From d667552a9831d520f44b66fd457f45796afbc4f3 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Wed, 20 Apr 2022 08:10:06 +0100 Subject: [PATCH 296/730] Add increment observation for observer --- automerge-wasm/src/lib.rs | 7 ++ automerge-wasm/test/test.ts | 30 +++++-- automerge/examples/watch.rs | 9 +++ automerge/src/automerge.rs | 96 ++++++++++++++++++++++- automerge/src/op_observer.rs | 28 +++++++ automerge/src/op_set.rs | 7 ++ automerge/src/query/seek_op_with_patch.rs | 16 ++++ automerge/src/transaction/inner.rs | 2 + automerge/src/types.rs | 8 ++ automerge/tests/test.rs | 21 ++++- 10 files changed, 215 insertions(+), 9 deletions(-) diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index c4f3faef..e0cc32f2 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -464,6 +464,13 @@ impl Automerge { }; } + Patch::Increment { obj, key, value } => { + js_set(&patch, "action", "increment")?; + js_set(&patch, "obj", obj.to_string())?; + js_set(&patch, "key", key)?; + js_set(&patch, "value", value.0)?; + } + Patch::Delete { obj, key } => { js_set(&patch, "action", "delete")?; js_set(&patch, "obj", obj.to_string())?; diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts index dd12a153..6e5201ec 100644 --- a/automerge-wasm/test/test.ts +++ b/automerge-wasm/test/test.ts @@ -956,7 +956,7 @@ describe('Automerge', () => { doc1.free() }) - it.skip('should capture local increment ops', () => { + it('should capture local increment ops', () => { let doc1 = create('aaaa') doc1.enablePatches(true) doc1.put('_root', 'counter', 2, 'counter') @@ -964,7 +964,7 @@ describe('Automerge', () => { assert.deepEqual(doc1.popPatches(), [ {action: 'put', obj: '_root', key: 'counter', value: 2, datatype: 'counter', conflict: false}, - {action: 'put', obj: '_root', key: 'counter', value: 6, datatype: 'counter', conflict: false}, + {action: 'increment', obj: '_root', key: 'counter', value: 4}, ]) doc1.free() }) @@ -986,23 +986,41 @@ describe('Automerge', () => { doc1.free() }) - it.skip('should support counters in a map', () => { + it('should support counters in a map', () => { let doc1 = create('aaaa'), doc2 = create('bbbb') doc2.enablePatches(true) doc1.put('_root', 'starlings', 2, 'counter') doc2.loadIncremental(doc1.saveIncremental()) doc1.increment('_root', 'starlings', 1) - doc1.dump() doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.get('_root', 'starlings'), ['counter', 3]) assert.deepEqual(doc2.popPatches(), [ {action: 'put', obj: '_root', key: 'starlings', value: 2, datatype: 'counter', conflict: false}, - {action: 'put', obj: '_root', key: 'starlings', value: 3, datatype: 'counter', conflict: false} + {action: 'increment', obj: '_root', key: 'starlings', value: 1} ]) doc1.free(); doc2.free() }) - it('should support counters in a list') // TODO + it('should support counters in a list', () => { + let doc1 = create('aaaa'), doc2 = create('bbbb') + doc2.enablePatches(true) + const list = doc1.putObject('_root', 'list', []) + doc2.loadIncremental(doc1.saveIncremental()) + doc1.insert(list, 0, 1, 'counter') + doc2.loadIncremental(doc1.saveIncremental()) + doc1.increment(list, 0, 2) + doc2.loadIncremental(doc1.saveIncremental()) + doc1.increment(list, 0, -5) + doc2.loadIncremental(doc1.saveIncremental()) + + assert.deepEqual(doc2.popPatches(), [ + {action: 'put', obj: '_root', key: 'list', value: list, datatype: 'list', conflict: false}, + {action: 'insert', obj: list, key: 0, value: 1, datatype: 'counter'}, + {action: 'increment', obj: list, key: 0, value: 2}, + {action: 'increment', obj: list, key: 0, value: -5}, + ]) + doc1.free(); doc2.free() + }) it('should delete a counter from a map') // TODO }) diff --git a/automerge/examples/watch.rs b/automerge/examples/watch.rs index 4ba65e61..d9668497 100644 --- a/automerge/examples/watch.rs +++ b/automerge/examples/watch.rs @@ -66,6 +66,15 @@ fn get_changes(doc: &Automerge, patches: Vec) { doc.path_to_object(&obj) ) } + Patch::Increment { obj, key, value } => { + println!( + "increment {:?} in obj {:?} by {:?}, object path {:?}", + key, + obj, + value, + doc.path_to_object(&obj) + ) + } Patch::Delete { obj, key } => println!( "delete {:?} in obj {:?}, object path {:?}", key, diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 534913cc..d871ee41 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -1004,7 +1004,7 @@ impl Automerge { pub fn dump(&self) { log!( - " {:12} {:12} {:12} {} {} {}", + " {:12} {:12} {:12} {:12} {:12} {:12}", "id", "obj", "key", @@ -1028,7 +1028,7 @@ impl Automerge { let pred: Vec<_> = op.pred.iter().map(|id| self.to_string(*id)).collect(); let succ: Vec<_> = op.succ.iter().map(|id| self.to_string(*id)).collect(); log!( - " {:12} {:12} {:12} {} {:?} {:?}", + " {:12} {:12} {:12} {:12} {:12?} {:12?}", id, obj, key, @@ -2005,4 +2005,96 @@ mod tests { let len = doc.length(&text); assert_eq!(len, 4); // 4 chars } + + #[test] + fn observe_counter_change_application_overwrite() { + let mut doc1 = AutoCommit::new(); + doc1.set_actor(ActorId::from([1])); + doc1.put(ROOT, "counter", ScalarValue::counter(1)).unwrap(); + doc1.commit(); + + let mut doc2 = doc1.fork(); + doc2.set_actor(ActorId::from([2])); + doc2.put(ROOT, "counter", "mystring").unwrap(); + doc2.commit(); + + doc1.increment(ROOT, "counter", 2).unwrap(); + doc1.commit(); + doc1.increment(ROOT, "counter", 5).unwrap(); + doc1.commit(); + + let mut observer = VecOpObserver::default(); + let mut doc3 = doc1.clone(); + doc3.merge_with( + &mut doc2, + ApplyOptions::default().with_op_observer(&mut observer), + ) + .unwrap(); + + assert_eq!( + observer.take_patches(), + vec![Patch::Put { + obj: ExId::Root, + key: Prop::Map("counter".into()), + value: ( + ScalarValue::Str("mystring".into()).into(), + ExId::Id(2, doc2.get_actor().clone(), 1) + ), + conflict: false + }] + ); + + let mut observer = VecOpObserver::default(); + let mut doc4 = doc2.clone(); + doc4.merge_with( + &mut doc1, + ApplyOptions::default().with_op_observer(&mut observer), + ) + .unwrap(); + + // no patches as the increments operate on an invisible counter + assert_eq!(observer.take_patches(), vec![]); + } + + #[test] + fn observe_counter_change_application() { + let mut doc = AutoCommit::new(); + doc.put(ROOT, "counter", ScalarValue::counter(1)).unwrap(); + doc.increment(ROOT, "counter", 2).unwrap(); + doc.increment(ROOT, "counter", 5).unwrap(); + let changes = doc.get_changes(&[]).into_iter().cloned().collect(); + + let mut new_doc = AutoCommit::new(); + let mut observer = VecOpObserver::default(); + new_doc + .apply_changes_with( + changes, + ApplyOptions::default().with_op_observer(&mut observer), + ) + .unwrap(); + assert_eq!( + observer.take_patches(), + vec![ + Patch::Put { + obj: ExId::Root, + key: Prop::Map("counter".into()), + value: ( + ScalarValue::counter(1).into(), + ExId::Id(1, doc.get_actor().clone(), 0) + ), + conflict: false + }, + Patch::Increment { + obj: ExId::Root, + key: Prop::Map("counter".into()), + value: (2, ExId::Id(2, doc.get_actor().clone(), 0)), + }, + Patch::Increment { + obj: ExId::Root, + key: Prop::Map("counter".into()), + value: (5, ExId::Id(3, doc.get_actor().clone(), 0)), + } + ] + ); + } } diff --git a/automerge/src/op_observer.rs b/automerge/src/op_observer.rs index 30718835..6a665242 100644 --- a/automerge/src/op_observer.rs +++ b/automerge/src/op_observer.rs @@ -21,6 +21,14 @@ pub trait OpObserver { /// - `conflict`: whether this put conflicts with other operations. fn put(&mut self, objid: ExId, key: Prop, tagged_value: (Value, ExId), conflict: bool); + /// A counter has been incremented. + /// + /// - `objid`: the object that contains the counter. + /// - `key`: they key that the chounter is at. + /// - `tagged_value`: the amount the counter has been incremented by, and the the id of the + /// increment operation. + fn increment(&mut self, objid: ExId, key: Prop, tagged_value: (i64, ExId)); + /// A value has beeen deleted. /// /// - `objid`: the object that has been deleted in. @@ -33,6 +41,8 @@ impl OpObserver for () { fn put(&mut self, _objid: ExId, _key: Prop, _tagged_value: (Value, ExId), _conflict: bool) {} + fn increment(&mut self, _objid: ExId, _key: Prop, _tagged_value: (i64, ExId)) {} + fn delete(&mut self, _objid: ExId, _key: Prop) {} } @@ -68,6 +78,14 @@ impl OpObserver for VecOpObserver { }); } + fn increment(&mut self, objid: ExId, key: Prop, tagged_value: (i64, ExId)) { + self.patches.push(Patch::Increment { + obj: objid, + key, + value: tagged_value, + }); + } + fn delete(&mut self, objid: ExId, key: Prop) { self.patches.push(Patch::Delete { obj: objid, key }) } @@ -96,6 +114,16 @@ pub enum Patch { /// The value that was inserted, and the id of the operation that inserted it there. value: (Value<'static>, ExId), }, + /// Incrementing a counter. + Increment { + /// The object that was incremented in. + obj: ExId, + /// The key that was incremented. + key: Prop, + /// The amount that the counter was incremented by, and the id of the operation that + /// did the increment. + value: (i64, ExId), + }, /// Deleting an element from a list/text Delete { /// The object that was deleted from. diff --git a/automerge/src/op_set.rs b/automerge/src/op_set.rs index 022750ca..f16822da 100644 --- a/automerge/src/op_set.rs +++ b/automerge/src/op_set.rs @@ -202,6 +202,13 @@ impl OpSetInternal { } else { observer.delete(ex_obj, key); } + } else if let Some(value) = op.get_increment_value() { + // only observe this increment if the counter is visible, i.e. the counter's + // create op is in the values + if values.iter().any(|value| op.pred.contains(&value.id)) { + // we have observed the value + observer.increment(ex_obj, key, (value, self.id_to_exid(op.id))); + } } else { let winner = if let Some(last_value) = values.last() { if self.m.lamport_cmp(op.id, last_value.id) == Ordering::Greater { diff --git a/automerge/src/query/seek_op_with_patch.rs b/automerge/src/query/seek_op_with_patch.rs index d25d8dcc..fab58a0d 100644 --- a/automerge/src/query/seek_op_with_patch.rs +++ b/automerge/src/query/seek_op_with_patch.rs @@ -135,6 +135,11 @@ impl<'a> TreeQuery<'a> for SeekOpWithPatch<'a> { // Keep track of any ops we're overwriting and any conflicts on this key if self.op.overwrites(op) { + // when we encounter an increment op we also want to find the counter for + // it. + if self.op.is_inc() && op.is_counter() && op.visible() { + self.values.push(op); + } self.succ.push(self.pos); } else if op.visible() { self.values.push(op); @@ -145,6 +150,7 @@ impl<'a> TreeQuery<'a> for SeekOpWithPatch<'a> { if m.lamport_cmp(op.id, self.op.id) == Ordering::Greater { break; } + self.pos += 1; } @@ -178,6 +184,11 @@ impl<'a> TreeQuery<'a> for SeekOpWithPatch<'a> { if self.is_target_insert(e) { self.found = true; if self.op.overwrites(e) { + // when we encounter an increment op we also want to find the counter for + // it. + if self.op.is_inc() && e.is_counter() && e.visible() { + self.values.push(e); + } self.succ.push(self.pos); } if e.visible() { @@ -190,6 +201,11 @@ impl<'a> TreeQuery<'a> for SeekOpWithPatch<'a> { // Once we've found the reference element, keep track of any ops that we're overwriting let overwritten = self.op.overwrites(e); if overwritten { + // when we encounter an increment op we also want to find the counter for + // it. + if self.op.is_inc() && e.is_counter() && e.visible() { + self.values.push(e); + } self.succ.push(self.pos); } diff --git a/automerge/src/transaction/inner.rs b/automerge/src/transaction/inner.rs index e53a8fdf..82dc6526 100644 --- a/automerge/src/transaction/inner.rs +++ b/automerge/src/transaction/inner.rs @@ -53,6 +53,8 @@ impl TransactionInner { } } else if op.is_delete() { observer.delete(ex_obj, prop.clone()); + } else if let Some(value) = op.get_increment_value() { + observer.increment(ex_obj, prop.clone(), (value, doc.id_to_exid(op.id))); } else { let value = (op.value(), doc.ops.id_to_exid(op.id)); observer.put(ex_obj, prop.clone(), value, false); diff --git a/automerge/src/types.rs b/automerge/src/types.rs index 26b4f3ce..ee5adf01 100644 --- a/automerge/src/types.rs +++ b/automerge/src/types.rs @@ -459,6 +459,14 @@ impl Op { } } + pub fn get_increment_value(&self) -> Option { + if let OpType::Increment(i) = self.action { + Some(i) + } else { + None + } + } + pub fn value(&self) -> Value { match &self.action { OpType::Make(obj_type) => Value::Object(*obj_type), diff --git a/automerge/tests/test.rs b/automerge/tests/test.rs index 4907334a..8d5cfe3e 100644 --- a/automerge/tests/test.rs +++ b/automerge/tests/test.rs @@ -1,5 +1,7 @@ use automerge::transaction::Transactable; -use automerge::{ActorId, AutoCommit, Automerge, ObjType, ScalarValue, Value, ROOT}; +use automerge::{ + ActorId, ApplyOptions, AutoCommit, Automerge, ObjType, ScalarValue, Value, VecOpObserver, ROOT, +}; mod helpers; #[allow(unused_imports)] @@ -925,3 +927,20 @@ fn list_counter_del() -> Result<(), automerge::AutomergeError> { Ok(()) } + +#[test] +fn observe_counter_change_application() { + let mut doc = AutoCommit::new(); + doc.put(ROOT, "counter", ScalarValue::counter(1)).unwrap(); + doc.increment(ROOT, "counter", 2).unwrap(); + doc.increment(ROOT, "counter", 5).unwrap(); + let changes = doc.get_changes(&[]).into_iter().cloned().collect(); + + let mut doc = AutoCommit::new(); + let mut observer = VecOpObserver::default(); + doc.apply_changes_with( + changes, + ApplyOptions::default().with_op_observer(&mut observer), + ) + .unwrap(); +} From d099d553cc3bff2102e5667453f41f562cf28d1b Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Wed, 20 Apr 2022 10:21:56 -0600 Subject: [PATCH 297/730] Apply patch from @orionz for the "needless_lifetimes" clippy violation. --- automerge-c/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automerge-c/src/lib.rs b/automerge-c/src/lib.rs index 2feb032e..a880d588 100644 --- a/automerge-c/src/lib.rs +++ b/automerge-c/src/lib.rs @@ -279,7 +279,7 @@ pub unsafe extern "C" fn AMresultSize(result: *mut AMresult) -> usize { /// # Safety /// result must be a pointer to a valid AMresult #[no_mangle] -pub unsafe extern "C" fn AMresultValue<'a>(result: *mut AMresult<'a>, index: usize) -> AMvalue<'a> { +pub unsafe extern "C" fn AMresultValue(result: *mut AMresult, index: usize) -> AMvalue { let mut value = AMvalue::Nothing; if let Some(result) = result.as_mut() { match result { From e41c5ae021e67d2fb6641aaeacb5b869421a9d04 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Wed, 20 Apr 2022 15:52:25 -0400 Subject: [PATCH 298/730] typescript bugfix --- automerge-wasm/index.d.ts | 2 +- automerge-wasm/package.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts index 2f57580e..f6b58bfe 100644 --- a/automerge-wasm/index.d.ts +++ b/automerge-wasm/index.d.ts @@ -161,4 +161,4 @@ export class SyncState { readonly sharedHeads: any; } -export default function init (): Promise<()>; +export default function init (): Promise; diff --git a/automerge-wasm/package.json b/automerge-wasm/package.json index 4a678781..a7243e3e 100644 --- a/automerge-wasm/package.json +++ b/automerge-wasm/package.json @@ -8,7 +8,7 @@ "description": "wasm-bindgen bindings to the automerge rust implementation", "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-wasm", "repository": "github:automerge/automerge-rs", - "version": "0.1.0", + "version": "0.1.2", "license": "MIT", "files": [ "README.md", From 4f187859e7b66b9195170c7bd9634cee115f1c62 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 22 Apr 2022 14:51:54 +0100 Subject: [PATCH 299/730] Make web-sys optional and behind the wasm feature --- automerge/Cargo.toml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/automerge/Cargo.toml b/automerge/Cargo.toml index e7f205cf..ae95fa4e 100644 --- a/automerge/Cargo.toml +++ b/automerge/Cargo.toml @@ -10,7 +10,7 @@ documentation = "https://automerge.org/automerge-rs/automerge/" [features] optree-visualisation = ["dot"] -wasm = ["js-sys", "wasm-bindgen"] +wasm = ["js-sys", "wasm-bindgen", "web-sys"] [dependencies] hex = "^0.4.3" @@ -27,6 +27,7 @@ tracing = { version = "^0.1.29", features = ["log"] } fxhash = "^0.2.1" tinyvec = { version = "^1.5.1", features = ["alloc"] } serde = { version = "^1.0", features=["derive"] } +# optional deps dot = { version = "0.1.4", optional = true } js-sys = { version = "^0.3", optional = true } wasm-bindgen = { version = "^0.2", optional = true } @@ -34,6 +35,7 @@ wasm-bindgen = { version = "^0.2", optional = true } [dependencies.web-sys] version = "^0.3.55" features = ["console"] +optional = true [dev-dependencies] pretty_assertions = "1.0.0" From 070608ddf2c7149fe3db4f39738d22a69cb8b381 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 22 Apr 2022 17:51:01 +0100 Subject: [PATCH 300/730] Update CI to run on main branch --- .github/workflows/ci.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 9a9753d0..6fc3f3f1 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -2,10 +2,10 @@ name: ci on: push: branches: - - experiment + - main pull_request: branches: - - experiment + - main jobs: fmt: runs-on: ubuntu-latest From 64363d7da203977609553644db088500d9fefaf6 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Sat, 23 Apr 2022 09:22:25 +0100 Subject: [PATCH 301/730] Add docs badge --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index 6d194bd9..7c9139d9 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,7 @@ # Automerge RS +[![main docs](https://img.shields.io/badge/docs--main-published-brightgreen)](https://automerge.org/automerge-rs/automerge/) + This is a rust implementation of the [Automerge](https://github.com/automerge/automerge) file format and network protocol. If you are looking for the origional `automerge-rs` project that can be used as a wasm backend to the javascript implementation, it can be found [here](https://github.com/automerge/automerge-rs/tree/automerge-1.0). From 23786bc746211f63134ddba6caa7392acd9a9b2a Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Sat, 23 Apr 2022 09:26:05 +0100 Subject: [PATCH 302/730] Rename workflows --- .github/workflows/advisory-cron.yaml | 2 +- .github/workflows/ci.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/advisory-cron.yaml b/.github/workflows/advisory-cron.yaml index 90923191..31bac5a3 100644 --- a/.github/workflows/advisory-cron.yaml +++ b/.github/workflows/advisory-cron.yaml @@ -1,4 +1,4 @@ -name: ci +name: Advisories on: schedule: - cron: '0 18 * * *' diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 6fc3f3f1..3ad9d8e3 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -1,4 +1,4 @@ -name: ci +name: CI on: push: branches: From e3864e8fbd9a90b020bec1bb34de81bb35a65925 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Sat, 23 Apr 2022 09:26:13 +0100 Subject: [PATCH 303/730] Add ci badge --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 7c9139d9..9466037f 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,7 @@ # Automerge RS [![main docs](https://img.shields.io/badge/docs--main-published-brightgreen)](https://automerge.org/automerge-rs/automerge/) +[![ci](https://github.com/automerge/automerge-rs/actions/workflows/ci.yaml/badge.svg)](https://github.com/automerge/automerge-rs/actions/workflows/ci.yaml) This is a rust implementation of the [Automerge](https://github.com/automerge/automerge) file format and network protocol. From 78ef6e3a2dc248da66fe0a336d11c04f099b77ea Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Sat, 23 Apr 2022 09:27:01 +0100 Subject: [PATCH 304/730] Fix formatting --- automerge/tests/test.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/automerge/tests/test.rs b/automerge/tests/test.rs index d837a59d..e661886a 100644 --- a/automerge/tests/test.rs +++ b/automerge/tests/test.rs @@ -1,6 +1,7 @@ use automerge::transaction::Transactable; use automerge::{ - ActorId, ApplyOptions, AutoCommit, Automerge, AutomergeError, ObjType, ScalarValue, Value, VecOpObserver, ROOT, + ActorId, ApplyOptions, AutoCommit, Automerge, AutomergeError, ObjType, ScalarValue, Value, + VecOpObserver, ROOT, }; mod helpers; From afb1957d19a1ae3914dc93a8a34fd5a215e791d6 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Sat, 23 Apr 2022 09:31:28 +0100 Subject: [PATCH 305/730] Add homepage badge --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 9466037f..afde562f 100644 --- a/README.md +++ b/README.md @@ -2,6 +2,7 @@ [![main docs](https://img.shields.io/badge/docs--main-published-brightgreen)](https://automerge.org/automerge-rs/automerge/) [![ci](https://github.com/automerge/automerge-rs/actions/workflows/ci.yaml/badge.svg)](https://github.com/automerge/automerge-rs/actions/workflows/ci.yaml) +[![homepage](https://img.shields.io/badge/homepage-published-informational)](https://automerge.org/) This is a rust implementation of the [Automerge](https://github.com/automerge/automerge) file format and network protocol. From a033ffa02bbddfe14a9023709b0951fc6239549e Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Sat, 23 Apr 2022 09:32:24 +0100 Subject: [PATCH 306/730] Update docs badge --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index afde562f..86df8c4f 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # Automerge RS -[![main docs](https://img.shields.io/badge/docs--main-published-brightgreen)](https://automerge.org/automerge-rs/automerge/) +[![main docs](https://img.shields.io/badge/docs--main-published-informational)](https://automerge.org/automerge-rs/automerge/) [![ci](https://github.com/automerge/automerge-rs/actions/workflows/ci.yaml/badge.svg)](https://github.com/automerge/automerge-rs/actions/workflows/ci.yaml) [![homepage](https://img.shields.io/badge/homepage-published-informational)](https://automerge.org/) From 64c575fa850152f6df66eabcb8ca6bcc14450993 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Sat, 23 Apr 2022 10:44:16 +0100 Subject: [PATCH 307/730] Add image assets and sign to readme --- README.md | 2 ++ img/brandmark.png | Bin 0 -> 1419 bytes img/brandmark.svg | 1 + img/lockup.png | Bin 0 -> 5885 bytes img/lockup.svg | 1 + img/sign.png | Bin 0 -> 7854 bytes img/sign.svg | 1 + 7 files changed, 5 insertions(+) create mode 100644 img/brandmark.png create mode 100644 img/brandmark.svg create mode 100644 img/lockup.png create mode 100644 img/lockup.svg create mode 100644 img/sign.png create mode 100644 img/sign.svg diff --git a/README.md b/README.md index 86df8c4f..15b274db 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,7 @@ # Automerge RS +Automerge logo + [![main docs](https://img.shields.io/badge/docs--main-published-informational)](https://automerge.org/automerge-rs/automerge/) [![ci](https://github.com/automerge/automerge-rs/actions/workflows/ci.yaml/badge.svg)](https://github.com/automerge/automerge-rs/actions/workflows/ci.yaml) [![homepage](https://img.shields.io/badge/homepage-published-informational)](https://automerge.org/) diff --git a/img/brandmark.png b/img/brandmark.png new file mode 100644 index 0000000000000000000000000000000000000000..56e1c82d0d10e3317dd08c4c9901e4c4cd36b1f1 GIT binary patch literal 1419 zcmV;61$6p}P)`btzv-C5LJ#%AhX1l`U*kCPXtLd0!`?fGgy-F`|yP^#A|>4s=pZQvl$w zkU+p-pb*gTP#}N+aIC}t000SaNLh0L02U5`%_ z5CC9ycFsM(6Vy@P|1q~9tU{6weT8)YkwT=tA((VJGL>$;zco$s@~V7R&G=|s{4_6Y znhyh-SUiPRjZ>jb<6LOb=!Cm#U2nRbN#iKo*M00Q8fQY2Mo(za7zoW7r^3zg*x57A zgxjWR4%Uo;(3~+MG-iwnO&I~9AtNX>V@wE*7?VO1#s#4Px*j{k)z(Vo<;{Am%)e>p6hv>)-xi7yNyTu2Af*jh|O&-wI6U(>l(4bLm*5w zwWbj(y!*{Od;o50JtI1UZ6m_Ez9c@OWJC*ZQDI%5a~sjZEg(!bwT2NtPuWAnWK+wH zc;OKg!l|Q%5ibOrdH5(WOHeT4g|qA-3}}`hHWKE`T~fHv)O;gBI874cO`WCwNDv0u zLm1#JL13g*MZ)YM3~-jfHd2Isk{FMLlaP%RA)=Y<<))?@Nwu$d_7FxQb)*|fLMVF( zBau2%jkNk z%7u+T7P5%2Q9-z?Rfv#LK^UgCOk&)qAq?jSTw=_qA`Iu!Y~tM5KMU}#BE0)T0da18 zt*Rfx##JVaO!o!E(de?wm+I5)9QLKEL<GLO#1oE41yn?I zjpDYeKkij{6(h;5oVXc@o46!o8B>LBGI3eRFm6*7P%054BpcH!E<_?u zNHp%?r9V0mE2J4077Iv3ypUq_yBojNDMX48Z}dmzgP^8E6Jm{ihjb(O(j3 z1(h+n5M}f(Vt;-vkt3uVho3+&i98|I=nmi!xk94Rz0D%>g(TylVG|`nf^igbh%zDC z`1t7qohTI|jjpJ{BFcp*<76k5s39cgW<;WnkY=P2wS*)iiKr)}7%4>I(5j zJW*SSHlm68LaY%>%ppV?k;FVgobhm+M|=?{mr$V|o?WPA%qi3{<`rrfa|`9h{6eX5 z4WZ1qj!>WkLaMQZkY+3+q!>#H@y2pOtg)mJXDlnk z7)uM6jpc<)Mhn8U(S|T(v?7F!c7%}8k}z(xC5#!Z34P=FbT%jSjAPgm#*Eg4q0yeu zH(C@vjJy8#MOTx;hp}n?|Emad!imwW&@-A8&WuKdRrTqf1u!h^pOWgI`}q2;cfzWw Z{sEp`E%^as(r*9&002ovPDHLkV1h|7iLd|w literal 0 HcmV?d00001 diff --git a/img/brandmark.svg b/img/brandmark.svg new file mode 100644 index 00000000..1347dfac --- /dev/null +++ b/img/brandmark.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/img/lockup.png b/img/lockup.png new file mode 100644 index 0000000000000000000000000000000000000000..94e63a4860ece6006a7d81e011231bcc2f45740b GIT binary patch literal 5885 zcmZ`-c{G$!|F>p_#Gn|vnITJ-L5S>wk$tizOSU3p&8{KK41*bjvWv2B*-M1cP_pkK zJ0)9LL+|K4zjJ>7yw5q$z2D{Y`F!tl&%Mt*&x18K)M2FOq^F>uVAMq-P!tr@P_ms5 zpeDDA+4yF1mtd@Ks-@$lPd5L1{rxQSf9C(}$(aAb{%icNmdq!oM0xq;g$2gTBzHeZ zo2bC5;;yCoi`eN3#<~gj=1IJJChl%3gph#**1t$Zmct_Ysrl)5O`HU-7ypSlQj z)4;5s*&!iZC6~IxCTs48&g{*8>3jU5L3i5V&#CHc&_)dp3?^V7qwCoEH%poE!iB>K z3d)zBd*ghuo%36BN{ORR!Sct;1K;h;i-cH<#2~SqlIAKxDODDTi!3Qvd^|?oK{_ZB z!vPawMa7dAKkXcDpFF^4=v%TQRCtY!&rKu+=xLWzhsHlpYV0|q*#{JKcyct!4}XrP zfbd<4?Yv5c_$&!Hknxp(bM19A7~}%B$M5pfb3(lX2#(Z$MZldqw9Mx=1v$Mwdl}Ki z51(mdBa6QZNuwuM@@qpu$TFK|Ze*F^Fj1=WSW=wEIW8A+E_3ke!M~IU<$22J7C8k@ zxbjvW54=q&2=O_&Z<&f2-E)DY9o0^=6_k3+!FyptA;g4`%V`VWX zfeI=ofvkAbki2DMGg+JZNh$43wbEwNdX>@Vxn2$FXq;S9+XVvb(K+LY|i3@ZrO+;Z0F4Fso0{NV!_WZ z8f$iwcyt>jcSGOGQ^E)S$zxpkr*otvN-^&pw~g|f`8c)OUGSu2+&DRU6$RPd1ME2?k#aPUKfAWk z-J~!imb6Iiu*5euB}&l~<=}Mp-FqD2Eq-kHU>X$P*l--{qzy;S@)0?j>0CiaO9Bup z>o84eL6FE+k^}B*L>=a&J#FlW?p^(;4D8_x!orzwH|bCMF>;hOg0ypJaHy4wHQ>%g^V zn}6RQSM|YR#>^k#DbnWzGuqhy#T$QPLR~=fNy)!~y40`M?R`)amj}!;W~X=U8JB&uhd$ z>5Iy$xtqEAMMk#zSJS!sR}79}r1hvDT!7!^U164Bdcv>?78nQY;AV>8sH{{WM> z6VVn(Yg95XQ_Vm1wRt(XPbsx%O6MmVqdx9S?u%rRWydhL#$76`MK6QfYPwH?f}O?6 zY9qvP*=%uk(flUoEeP|@U2s?pc_t%hnoe?*WbEBVPE0Pi6uKSq_0iN~u6e@~?50&G zXQ%2+Bn;>*e z(Y6ja4O9udE1PyfBcmt|EMQpijYUtej?Lf+3T~?!$6+iYBk}k>Q9CQ=t(#2=7<8Pk zyn#Rx`6HPU2Ivt(BCrvZWHG5)wKoq(BWg6e7-5w>^@u@N>$yXsMmSLKF>{OC=PB{( zR4`)yaMdUA| z_se1&5!2l_kPaM^i+i_GV()bXDsu#gY6T87RyQLinXcFb+!;{t726C01sx1LTWcH@ zgnj$o%*B}Zk>U8|#coXZfWwmw=Mt`(u`!K%59$lBV^Y%bf3adMnH#eOD<@Ghb4z0po58&Hei~TAg*2b5s%KYnKxRNxU z&Y)eCQ+(kE)DZYoLUAccni(2yla!fKHd8PQdaU3IXX$fpXniqy{Y}Ud+e*yV6Gr{1JIUJM>Y8ja}G;TgPjs{t+IDf}^sn9*5YMJLMHojhEU~ zl$FEU0{2n4tap*QxjFAIhZJyz7XFFI8>2cCp2XqRHB=J9S-tK6=EL({DgK;}iPH>C zznls2P40LD8A549tPF%#Y~PW%H_L+i{9@ee^#ukcnGy87i|9JDCL5?F&?830Gd2$F z@6^*D!Hdv#NM!^HQw|JADl%^HF?9k;m%z6srUzdg2PA>s#pqPhe*+opw19$}JNT~R z?*rlZLtME@l`^e1y!RP9^aHm^Ky~V=U?Z*UTHdpacqBW{iM-Y*9`K=az=R^Tw6!fj6SFl1P+qi(v3%I_#3KH0UfwmIhk=)Ulz;_|5K{ z7*1e4>8JnP2wlhi&jaCaTVKCRJxEd+5QYjI&?z*)KOK-#f2%OQz~~zR%NER8dH8n& zH=+pFkgz$e!)1r)iyr`W5ly46B(zcR+Ey1=;BN@un%*g#Vgh>CMJy&b3 zN8hLB-D=9U<5m|&Fi;;vC?RfAbxuZE1nk;Xm16sUP=WrEJVl}Fc(iTD4}^LGmhmx# zwnjJO^XsbIPEm1V}Ns(W7D?c#6Sq??=UyAaNcZ_J2EB!3-7|70HC}u^R zC^aO5Bk(6;#({}ylJoU^&PK8qp?Pyx5+goDbp4=HOUs~9m+UE`9R~S|rNVx4_<@{Y ze!5X(VrJz^YVW^!%-=XY9R)&Er66FO8Zw+Ib=Dxy7TaPcqW1uxjb4{nHi!W_VSjv6 zpI&Nl!V8?}G{OX5)7J0{KsBz}CJ(uz^jR`ksHXFjloPz3kWyQvwGIS0V)0W3bSW!X8rxu7P0Ua=y$zqK}5EJ=1J-ID!(s?VC&g9^rLW7)^Sex~(pxm*r!MK3D?iSdC2 zSK|qfE#3Yq8HZX>(vWPaGCm$p15#8Nr<1oCp@iHGKAZXL|9`4R3W0?x8eqVgBJCYN>}wvYuQac^dpJni*y5W%6_ zu+N{x3CDq$Eje2XL&rPjmMo`^NR8!3fW0B>OHjTqnfsEff5*P)pr0G;dzRXasdJe4 z|Ey5cT2#1fg$mDM1E|TR!}?A}`)spUjny6u-a4|)w>%5;#Qt>jC<)mZ*%Joi>M&gu z4_)E6=e8d=P>eGT7adTsNH{3fcze*vf= z-eLjz;x2n^p1C5A!iPq!6_7eO%)L2TC@Ma|0YN{(3T-<}0|HN*G^C-rm}0|nO~?59 zKe5g*zgs?%FzXo55k%KqRR%hVALzF%l1`4o>`F=072fWhG%>;6^nPHZahPO7w{8k#FNF};B$^7Gs9;6~{BeO*W zR|wItX=Sr}NT?5J^xZG4O*_yj;LzQhG;~@T^)ZTCiq?ch6PyqQkG3BY1H&e!9!WlI z6Pe^s(Vx-Ock03l0$JRiDnBP93InWR@H62@FF^L`^Gw41fbeA#<6CkC)Meno)#-XU z7JdS+Ce8)MLm0Un(oI`%6DpAItn;XyU2tVg%61)xyySr@w>Oi4j}PKQ@i}W)XBaie z?KfNL8gzcxH~qS7X4*9AV5y`R{Qy9Z++Ot813 zi|Zesxp-K3wLZV}GG`{T2_|C?d37fSw3p3Pu)slC#@T_ZNKxX`7}AJrty6LSdXO0l z@HP*}F143+eeFa;hfi>w{{t6P z5**;LDiE3YZA{%efOn22yR9k0W^>TG1RODK9m-DE8H8qcfbEKJ;oJxEqlvO`^X1cI zdRP9VICNldle`Km=;wMzrv2ip7IkJde_A66=zy1_Y61OCOJfXy)c8)O)|)k}X6M7Q zrTT4M+eyxKe)UdLNyKaEajt!GHOvy_*VC>Is>gA|EVX}SK8$%ir-ha``R^d z1C81`qKTl1M>WnsC6J`Ky^j+kY!H9*=BqZ<~1L=`CLP05)jf<>@FGGYY{w>Y#T zooY!fGJ9t|^*4@&&*)cf48-LY`KaESfLU1+aP5lu7AR&2qFi(+H7>I7)RzT%geiVx zw-m0ykLJ9l4)Uy|*QVmtAX%V;Ke=DIh>8Qb|Ayl?V8IZudRe;Z{iZI@knnP^JDoS9 zW$z@K`91?6Rd|xYJvxZ6#TK!N8RoqvdN#q$YEvBPWTDYlNWq#nEsD_+2THYONhW5! zzM2F2iao3dEDZ|gINnRanCvUk+%n_gTde@<{sTdXNqp3yqxQBB`(T0E# zz{Ual!mCbf0CV2%B~{}UOSrZdFyYj*Fibo%7M2E5pb4C7<2S5lt|_o|Im@aHBfj8r z$Vh$YPYB%_WSH}EFdziaFjc}9+^{Jb?aqoQ3hypc-xVOtjE z>p@<{2upt6r5P-6Ij-5u*S-3iVQ$KSc#Y$k6C!uIJjFfnCW8V@(Y&6){Q%zuA7X@5 zn;@FzX-c`Qa|iyHliXBqG`J?Vmeapm_PWmv*osd>uUgnLSSkp&#<0MNvn zVgvD;qpuZPNXPa=^GOYO>$qOcj~k)2!q8)1rTN>6y)*Afx0%0szH1R10t*F`KQzcR zC+qGk00j{dTuoCUPP~E0S6l-|w{;r{ewJuXbw)7gS~R4&0E}DPH$CK}yU2-LI00h!c{`^AS&VYw8im>@`aY}( zs2S8KxL+uP_*N*;I}W~c9cJsJZkU=VbQgBllRf`?CWD#93&NoE#Dw{C5q|75X6h%EgyKn5I&(FQj7nyMM?18v1z+ zy{xqYNghiAUc5o67IfG&!X|UX&zrp?EDh~8-n2oFTaC@>hDYiB)jXHl@w_@wjzRNN{{fdH!r(eoXdhyy@v?Yu;%G% zQc2-t((8%aN|oe{B)oBC%(mHqrhuDUOilb*7G0I^xc5kS$?5JsA6MI5mti}d2#Lku zu6$bx-l%%GT=n{4pyh&jq@MrxYEYlhaJ7|XzSF^C#R{fKNH1@qDXW;Ob>(qv-%t?i zZa|zKrK`;I2$t4o34i+OCA~QMCy(x@1ee+FXJVE4QHGxkhO<0%X5}(RxbA;<35mtuu*h$$@5?$P+VSb8!~(I%XVpNaPi)1L|L6t? zekx0K$)`lV0anIA8hMLQsN!k^FBlMN^>icG2`usB)B+M_tV?RCZVutUDSGS~j z|L~9T;U|N_7ZZ$uii(WsOw!*yL~JAA213zYgTmQ)<0GS&lEgqclxKf z$T=|FEJ)&R$@pYILEw4S+&lU;l4->s`?#6db8?dbcj?;M0y_6{Ex>bLZzo7UzS+ZtaZsFlsZ1O*1Zbk$mOx%2H zNX-G05zH2}82B|?RaxvJR>0xA*;&H1`G@dcQJQnlVokzq^?R&56nw7PXxqqi|CbBe zF_vEUe7v*Qe=ez{gcCK%GWz$u$swU(Em?BC9zRxEGF|${zbO;q2lR|f+A)Gsfb?oU zg#Q;muP7~`PJY;wo?+VLkB>es?hu*b&qHnaZ*cQU{(VT+HI_e5W3QmellMII#(5Fe xBD=p*2z?=|C#OLQ+~I$co+YnmKak}-<&zk+hqUqg?VkUz*3~jZRBG5q{s)@#&7A-M literal 0 HcmV?d00001 diff --git a/img/lockup.svg b/img/lockup.svg new file mode 100644 index 00000000..34297ecf --- /dev/null +++ b/img/lockup.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/img/sign.png b/img/sign.png new file mode 100644 index 0000000000000000000000000000000000000000..772396cb8c19e245e609a22e8d2c8ce127adb258 GIT binary patch literal 7854 zcma)h2Uk-|)ONV2NVgzJ6GVs@AV?9UNE4;^1PE1u(2Is9UEqROKq(=HDk4ai5^4xd zrGx}Tnh>f;LJKuC>0kWb_XoUZt#i)o_UvcRtXXT;?ANBoI_xZbEC2w2T~Alr8~|V> z(8uE!8R_41IxDR7U!$oZjAu zH`?#gzQ_&~-4WuwV5TOjD1K8K$UIgq?Pz#4FG_e8BjpF5t$EW_Z&Gy{TPO4^6hZcN*X+#^X%p z2ZY>JL-h}rg+n78MdP+RtGz``iG1@v#(P!KsB)g8LD$0>X00%O- zQi2WK9o7}8#R|U{`;jls<|Zct9tph%gaX&!aS`G9;r5YtD+w+xA^@y8idbfK*lZ@L z?#@L>bS1F^@F`{2TEs1i65yk+BVas)tFoTw0r*tUbPsbLB03Q7iai`K6pS_#2Vh)_ z2~(K8<2A0fgF3s^D#J!lIxC$*h=qIt1+msv+;do;^|;2pu809ZlJ%{7r!MF<&Qq*W z^?(Hqh-8D**i}lH2Zh_SU5IUtY}LwV>q!36botEeXLY%fnWz3p9k`YO0JCU_FNU+s zmLHg*i#Mqux8lzMQtpAkwE~Kf1$;ykc@L+Z5~LIBY|YV^GAwN2<=DC#N- zYN0gk6TaIsIy$<@gVzC^8+k)f8Qp+_l}#h7bSI-uRw$#h&OFuU2?OuLZ?KpKf zRq?SnctTqoi0Xg?w7sb|0oXIz2aXQk&g9O;Cp7Ay`;Lm;1&yzTmIqlDA-N0S8%FU& z0N&abIwQ(6%SoOlZNz=rd|U$uJ;));5MvuRhAdb_)FwAYWlCMfJ-4}z@>@pWo|fRFPja` zP@VRMa0NLh3s69q!Ce1>XaPY6fAemV^D$F&SAYRG=v4+AnzuUkB3_Gf7deOd9vU6d zs=)zzmBof;p8v;oCt4B`lzF`6c)IcI0DphWlOoKYR=z5GFiI?M|6^i~v%92FnpR_` z@}CAg#R2ZNc3AzP6<&~82QJSoHd!4ibZSrr0zk1NAQhv?uyOZ=a>}iz?{-~kQFI{q#6iAGeHVk-0x`COjk z9Z#HjQ}xNTAxMaFV(!HfHt|x( zUdHiW!X|FF;~aRv2x;N%z)DY$O1ObMYaaSD5b7R|Zp`0g6H*~WdbK-+dkr2 zena2hXkn(qM-$YLz4sw+9{wx=n}nbWnEbTh8vj*cfW$#wzrdKvsVl z(WhY$x|enQPVr!cfML_rQ$=TMW=}4tJN-}8XUrJcN&zXT>3G>5i2S%9x-Nm(1VS$S ziwA`nDfHxP-!dPx^Zi*>EYGL%KZYhyV$h3PXWRH+fBG+irAu-BQI9$Pvb_X_xA(5n zd2JJA?0{XH4%x8FiZ8Y$QHn>sMrMYuhCf;xb-T61F5sYMY_AeOj<0)1{F$F_DB-X% znou>}IrrHDNBW#VdDbMEur?t$DCg*x=px8yC8-{kMpHhTMC$>7t)B)uV5ty{B2 z65C&o_#<}KflDYwTmIT_iMa!=3*dpE9UbY4+a&MhDx0-6x4>%0W%clywZIP9kA#@r zN-6q^RbDQ=4px-I%A-|{95=_GNNRkDyARU(hXcF6NOaH3OljdNVMWAHF3E( zhZp*CWv1^Tq~W#dDs;ge#A+Cn^l-GvXpICn=RlojEPcuvqc~jV9cC`k0De#>DnpeP z*!a37d6jyzK_vBSdSSoZ{R?MxIMqcH7vPt@Q1dE}Qo^31wVq>*b`69(W0mh!S&j5V z)1fM2e@R%X1Q=@^MYTI1ql}B{ls5Mn=)rjn|YQLKcEe?y|vmB zIF+Ai^>Jgwnr6J$CIAjCaqcmN)lpY;N_*QJR!{AK^#lvQd! zJnW**QdyeLyjz>R`GZBo{;P6SNF~Va|G5|^WI@%0Pfq_%5?A~JKmFduAqpC%O+C=> z>7WSit6AR|>TkWR><8Yj#QY%76UL<5+aC16N1)(=ohs!{!L3hA-{7|L7fYKDLB_8k zt_fHt%#GyztG6nLES;X+kScGYZgp;zS+9?52y7&I%AF0}ol!}&c(FgF%7nH~BoG&l zUY58dSs67RWr_{5fH!t;-{#5|*FOW8_wMZ9xPcz{sK4iNWxgfuaz?C`x}#;Kt{PoG!#FG` z=Ud0Juj#b{=gh-UJ{f$>$l#rl(&ztj=MV2<)EeK1P+sTIlGX4N2t^8>#Kj zqbff5-G0by1fRVQt{G`~`R@WIH5lW{jU`U6yAS0N+%gOsU3{u&UnuI@f-{l2^(Iv@ zshqh+AGkos5tx;5fxY?y>+7<<;CA|{xa!jLQ7)Zy>%&ml^dFGD&b1$kDfQ=Oo1CkA z2QYiaSLblkgN2Hy0ht4nX>tQ7cu}_ZpZSAZobTjb$VC(Cx;=&d$%lI1@`1o-IE-%2 zufYg}sH=V@xjHOKnDbt*S>8$oav61nf~7%NGxKmGO5VM9S&Y+PCX` ze^f%ZXp@57p`D3wEPUB z4vGr!{kRef``LEVOPJ1jP=4*tmXUtZJz|63UKR%wsONv7+H~1|pv?PsYk%-qrmm;` zD1VyubO-_+;l-EyiAiPqyLIAd{P=MINN68SkAMF!BB_ zH>_M*XAqH=Y&B%JA_b*TZ=_^J``>R0`EYvFdo8?vymjY^+sx}KUC=Z8y)bx*Y~z?A z6i_3`xYbJMqs%7o2;{>juh>++_-NGr)A33R1Qh$!V%Tww+N{9njO)b zf zj4Nl&w#zTLLuLR~*)4P6lkbMEd{HZy&6v*rc*3cgu{x0n`)6y!oXm3cdcJ%$$tVAQ z_ULnnFSbK7+eHAIdPB0(2)`^lQ(pC)@)Nx66<%)`CrhfGsuVKcs&m(WTNCWoX+_$_ zzY@rq;g=@JPhO7z(l+4FxFk1r;=$=ukpfZ?W z2flR}&Q&ty#ulW4mg4WIH+-lFL;?#(FVHU8ge44@t@8Ke{C~eph6|?Hu}m``yN1s= z68QXdMnz!P_}hO6`?LI6Y|4aQ@^y zT{H*{8c?~|Bndfv4dnNh_b2a7MD@ zLfj#C2TT_)D_#C2%TrCVp?o(CT4i4E92(B8C&-P}C?!j%Fe*kQS9@1MOC*Cy*fZND zC{7ORqx6(F%2;5H;&A;t=@GXhWzPZ-3t1PvklnIrTYMyWI4h^Wgux>4yr5urL`Rzk zml{Ztdfl6*T4aWWX*W0?mbDUm+#aVkK53Nj(0^TxEWpF5cOm>J6?s@&e-VdJ3$G)b zRMHeuatcZ)z=XS|CkmtD%3os#^k@DERzamjyH5%KpdCXL^f{Xy(70*+nT1c2Yq?CT?KHSI@j@f1MmN!xpgeJ$F@jWu$=uW#(q?Cdln ztzmBP(P$_i3rmLSELE0iwsvN#sCT0aOHM7vGQZm#NUEHzk!udmhl3Kfp+E*DpQGfj z5MXJ*S9P7_77AgB65EouEl79cV`=V9(yW2}%&BSBSd`F-7A(Oi z9K7+;?cXV_T!498^Yzf1@urX(Tkn}c_nN~GtK$gM*@k%+=Zb_`XM>B{1QdZxTRc~! zWU787hRnl8jTSHuvIsVwA?VxRZOlIq4y6>~+5AY)jM^cqyBo9@gSOFvR`6uvuFf22 z{|TlfQU8^KVP=|Ld5Khg(!Gi+r4`VT-B*vIRHlCPcvp-uHhKEUqjpDTGK`bZ>jKPB zn60>LSvxi3PEsGNzE-kd@oSOfq|f=iOl?cjGGadV3F_!B7OHdMf)V3OSJEN3gd18kCM^-?bN5y~bOJ`3!40iq^8kd&|8>nY5^ReHQR1>-#0*v9Mx z)PgNfnwSHF{QEGGc5gpph?MTUhtTzJE?a7*Tv>UBMZBEdPAZnK-ZZX~*-w@03NzM6 z?kG^R?`%&zT?m$He07a!Y)XuDkhuj>u)vMyI)KSHNZn;IP=zr+91N{6uxG3GD_N!d za<#gBE=TqT-{@xtySJ4X@9)kZ2(1~0w)K?{x|!=#(wDVrB#+$kV!9mNs)QQE6AFAE zSMu6>fd4Q_eI(d<7I!hU?mp#R;j|>lGJJnNHeQR6b3((v)~mDFdTDlA!N`aV!bygq z{pEd?p_Q9FUwM@M%n^bK%T66EEEcE5k0DCJ^)19X5Yxa*=K(@Mfc4TB$~lmM9fvtq z283c&rq?&9b0a>iN&)48weKEdGM=} zDP*sFj)`6Sp0UA2$G2k$!K1@yZ@ahvtfuV?PY8;V*jT+W&J7~6LKa~ZaIjiSjyAlV z$N<`R!MM6~SPMrmJxjJRC62DVX^roHrQ1so^!gBDhx+tMbhgqjAETi)*y8GYF-FB| zKQ-3_veztWg_TBn4D!l*z2R=2njG67=MwR&5JSWFkcB@z^JK5U;3Py{UqxOq8lljLT6jHH*Co%?sf>d;qD`p`gYljDM z+d2)FSdL23kdGiZga6=Tuf)E@Hf`fF8VS7bPIEVWx60Cb67WTu`qck{CjwjUN%I=q z=UiR7!yTg(FpqK=&#k0tq>CaZy%)Iz1DyN{gRK9g@duqJJSL`h?Y)Rum_6AhC-ptZ zDXU_BThIy0TSnvob)4HK%hp%mHTI3!CklD#TS>zo$16W2>(65pkr8S24vg0~EI}^* zW?Yl@XH-|8pDXKziP`q^Z_^s}@F5%_wOL7FZT)&sH6rDV75OEZ@%0*)Bq`BmLp{e`gWn?ogd?ySBiH;)9+Qs=w^{Q>zVsKODSnNki8~0q^9I* zHk3@F+#?vli~n-_HUwGsh-)>*8^8xn=?d)_N4XaCBR`yo9P0C5ylbvp1YFuv8-xYv z_uq*bY#cAMq7qT75ZAwqWNW*X^M;*<7R@sSQ}YN(e<6ye5WlL;7fzXeyhtx5#M4y( zX{E9XEE|$x6ZwhINvHMlr7F~y&W9l-rIc`+UN=Al0mUMCfsDepY*CzGbcYMz9Jw4p zp{LY{F@f(lOrFpVw->9l_@8Z0`k5{Z*9v$B#9O z;sBfe1r1qKg6m(_zRcZ@C4P&VNixpMuVL%p>}UXo%nAs!2~4RUFGgh25VXLz*;@8FEKS!=YL z+_&5fWrsK)SY#faPoQ@V&O!1kY`*0|EJE(Suo|(Ka zYFT|tUq}=?AQIt9d~ASS-3dMY0D+wkFLlCB46ZM@wZi~pgZj9NmAp2~e20XdH%_pw z6f&+<5}>XEIxR>bLgHxi`f#XZyM%fvVZCL)n(-?w!Y{0h zZPlM(M6TpW6 literal 0 HcmV?d00001 diff --git a/img/sign.svg b/img/sign.svg new file mode 100644 index 00000000..df31316e --- /dev/null +++ b/img/sign.svg @@ -0,0 +1 @@ + \ No newline at end of file From 5b0ce54229147330fbd33692b018a63866852aaf Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Sat, 23 Apr 2022 10:44:30 +0100 Subject: [PATCH 308/730] Add logo to docs --- automerge-wasm/src/lib.rs | 3 +++ automerge/src/lib.rs | 4 ++++ 2 files changed, 7 insertions(+) diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index e0cc32f2..95f26e98 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -1,3 +1,6 @@ +#![doc( + html_logo_url = "https://raw.githubusercontent.com/automerge/automerge-rs/main/img/brandmark.svg" +)] #![allow(clippy::unused_unit)] use am::transaction::CommitOptions; use am::transaction::Transactable; diff --git a/automerge/src/lib.rs b/automerge/src/lib.rs index d3d5a8da..ed53fe60 100644 --- a/automerge/src/lib.rs +++ b/automerge/src/lib.rs @@ -1,3 +1,7 @@ +#![doc( + html_logo_url = "https://raw.githubusercontent.com/automerge/automerge-rs/main/img/brandmark.svg" +)] + #[doc(hidden)] #[macro_export] macro_rules! log { From e7a871843428525fa178db2d99c50d3c710c4589 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Sat, 23 Apr 2022 10:47:21 +0100 Subject: [PATCH 309/730] Update badges --- README.md | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index 15b274db..31d27e58 100644 --- a/README.md +++ b/README.md @@ -2,9 +2,9 @@ Automerge logo -[![main docs](https://img.shields.io/badge/docs--main-published-informational)](https://automerge.org/automerge-rs/automerge/) -[![ci](https://github.com/automerge/automerge-rs/actions/workflows/ci.yaml/badge.svg)](https://github.com/automerge/automerge-rs/actions/workflows/ci.yaml) [![homepage](https://img.shields.io/badge/homepage-published-informational)](https://automerge.org/) +[![main docs](https://img.shields.io/badge/docs-main-informational)](https://automerge.org/automerge-rs/automerge/) +[![ci](https://github.com/automerge/automerge-rs/actions/workflows/ci.yaml/badge.svg)](https://github.com/automerge/automerge-rs/actions/workflows/ci.yaml) This is a rust implementation of the [Automerge](https://github.com/automerge/automerge) file format and network protocol. @@ -14,10 +14,10 @@ If you are looking for the origional `automerge-rs` project that can be used as This project has 4 components: -1. *automerge* - a rust implementation of the library. This project is the most mature and being used in a handful of small applications. -2. *automerge-wasm* - a js/wasm interface to the underlying rust library. This api is generally mature and in use in a handful of projects as well. -3. *automerge-js* - this is a javascript library using the wasm interface to export the same public api of the primary automerge project. Currently this project passes all of automerge's tests but has not been used in any real project or packaged as an NPM. Alpha testers welcome. -4. *automerge-c* - this is a c library intended to be an ffi integration point for all other languages. It is currently a work in progress and not yet ready for any testing. +1. _automerge_ - a rust implementation of the library. This project is the most mature and being used in a handful of small applications. +2. _automerge-wasm_ - a js/wasm interface to the underlying rust library. This api is generally mature and in use in a handful of projects as well. +3. _automerge-js_ - this is a javascript library using the wasm interface to export the same public api of the primary automerge project. Currently this project passes all of automerge's tests but has not been used in any real project or packaged as an NPM. Alpha testers welcome. +4. _automerge-c_ - this is a c library intended to be an ffi integration point for all other languages. It is currently a work in progress and not yet ready for any testing. ## How? From 48e397e82f88c7b527637f6beb2476affcd3a383 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Sat, 23 Apr 2022 11:05:43 +0100 Subject: [PATCH 310/730] Add lints --- automerge-wasm/src/interop.rs | 4 +- automerge-wasm/src/lib.rs | 23 ++++++++++ automerge-wasm/src/value.rs | 2 +- automerge/src/autocommit.rs | 28 ++++++------ automerge/src/lib.rs | 22 +++++++++ automerge/src/op_tree.rs | 38 +++++++-------- automerge/src/query.rs | 20 ++++---- automerge/src/query/elem_id_pos.rs | 4 +- automerge/src/query/insert.rs | 6 +-- automerge/src/query/len.rs | 4 +- automerge/src/query/len_at.rs | 4 +- automerge/src/query/list_vals.rs | 4 +- automerge/src/query/list_vals_at.rs | 4 +- automerge/src/query/nth.rs | 10 ++-- automerge/src/query/nth_at.rs | 8 ++-- automerge/src/query/opid.rs | 6 +-- automerge/src/query/prop.rs | 8 ++-- automerge/src/query/prop_at.rs | 8 ++-- automerge/src/query/seek_op.rs | 6 +-- automerge/src/query/seek_op_with_patch.rs | 12 ++--- automerge/src/transaction/inner.rs | 22 ++++----- automerge/src/types.rs | 56 +++++++++++------------ 22 files changed, 172 insertions(+), 127 deletions(-) diff --git a/automerge-wasm/src/interop.rs b/automerge-wasm/src/interop.rs index 6475e37d..be0927f7 100644 --- a/automerge-wasm/src/interop.rs +++ b/automerge-wasm/src/interop.rs @@ -9,8 +9,8 @@ use wasm_bindgen::JsCast; use crate::{ObjId, ScalarValue, Value}; -pub(crate) struct JS(pub JsValue); -pub(crate) struct AR(pub Array); +pub(crate) struct JS(pub(crate) JsValue); +pub(crate) struct AR(pub(crate) Array); impl From for JsValue { fn from(ar: AR) -> Self { diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 95f26e98..9eceece8 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -1,6 +1,29 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/automerge/automerge-rs/main/img/brandmark.svg" )] +#![warn( + missing_debug_implementations, + // missing_docs, // TODO: add documentation! + rust_2021_compatibility, + rust_2018_idioms, + unreachable_pub, + bad_style, + const_err, + dead_code, + improper_ctypes, + non_shorthand_field_patterns, + no_mangle_generic_items, + overflowing_literals, + path_statements, + patterns_in_fns_without_body, + private_in_public, + unconditional_recursion, + unused, + unused_allocation, + unused_comparisons, + unused_parens, + while_true +)] #![allow(clippy::unused_unit)] use am::transaction::CommitOptions; use am::transaction::Transactable; diff --git a/automerge-wasm/src/value.rs b/automerge-wasm/src/value.rs index 84e7b376..5b20cc20 100644 --- a/automerge-wasm/src/value.rs +++ b/automerge-wasm/src/value.rs @@ -8,7 +8,7 @@ use wasm_bindgen::prelude::*; pub struct ScalarValue<'a>(pub(crate) Cow<'a, am::ScalarValue>); impl<'a> From> for JsValue { - fn from(val: ScalarValue) -> Self { + fn from(val: ScalarValue<'a>) -> Self { match &*val.0 { am::ScalarValue::Bytes(v) => Uint8Array::from(v.as_slice()).into(), am::ScalarValue::Str(v) => v.to_string().into(), diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index ce811bc9..27b4e9af 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -94,7 +94,7 @@ impl AutoCommit { pub fn load_with( data: &[u8], - options: ApplyOptions, + options: ApplyOptions<'_, Obs>, ) -> Result { let doc = Automerge::load_with(data, options)?; Ok(Self { @@ -125,7 +125,7 @@ impl AutoCommit { pub fn apply_changes_with( &mut self, changes: Vec, - options: ApplyOptions, + options: ApplyOptions<'_, Obs>, ) -> Result<(), AutomergeError> { self.ensure_transaction_closed(); self.doc.apply_changes_with(changes, options) @@ -252,7 +252,7 @@ impl AutoCommit { /// i64; /// doc.commit_with::<()>(CommitOptions::default().with_message("Create todos list").with_time(now)); /// ``` - pub fn commit_with(&mut self, options: CommitOptions) -> ChangeHash { + pub fn commit_with(&mut self, options: CommitOptions<'_, Obs>) -> ChangeHash { // ensure that even no changes triggers a change self.ensure_transaction_open(); let tx = self.transaction.take().unwrap(); @@ -285,15 +285,15 @@ impl Transactable for AutoCommit { // PropAt::() // NthAt::() - fn keys>(&self, obj: O) -> Keys { + fn keys>(&self, obj: O) -> Keys<'_, '_> { self.doc.keys(obj) } - fn keys_at>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt { + fn keys_at>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt<'_, '_> { self.doc.keys_at(obj, heads) } - fn range, R: RangeBounds>(&self, obj: O, range: R) -> Range { + fn range, R: RangeBounds>(&self, obj: O, range: R) -> Range<'_, R> { self.doc.range(obj, range) } @@ -302,15 +302,15 @@ impl Transactable for AutoCommit { obj: O, range: R, heads: &[ChangeHash], - ) -> RangeAt { + ) -> RangeAt<'_, R> { self.doc.range_at(obj, range, heads) } - fn values>(&self, obj: O) -> Values { + fn values>(&self, obj: O) -> Values<'_> { self.doc.values(obj) } - fn values_at>(&self, obj: O, heads: &[ChangeHash]) -> ValuesAt { + fn values_at>(&self, obj: O, heads: &[ChangeHash]) -> ValuesAt<'_> { self.doc.values_at(obj, heads) } @@ -442,7 +442,7 @@ impl Transactable for AutoCommit { &self, obj: O, prop: P, - ) -> Result, AutomergeError> { + ) -> Result, ExId)>, AutomergeError> { self.doc.get(obj, prop) } @@ -451,7 +451,7 @@ impl Transactable for AutoCommit { obj: O, prop: P, heads: &[ChangeHash], - ) -> Result, AutomergeError> { + ) -> Result, ExId)>, AutomergeError> { self.doc.get_at(obj, prop, heads) } @@ -459,7 +459,7 @@ impl Transactable for AutoCommit { &self, obj: O, prop: P, - ) -> Result, AutomergeError> { + ) -> Result, ExId)>, AutomergeError> { self.doc.get_all(obj, prop) } @@ -468,7 +468,7 @@ impl Transactable for AutoCommit { obj: O, prop: P, heads: &[ChangeHash], - ) -> Result, AutomergeError> { + ) -> Result, ExId)>, AutomergeError> { self.doc.get_all_at(obj, prop, heads) } @@ -476,7 +476,7 @@ impl Transactable for AutoCommit { self.doc.parent_object(obj) } - fn parents(&self, obj: ExId) -> Parents { + fn parents(&self, obj: ExId) -> Parents<'_> { self.doc.parents(obj) } } diff --git a/automerge/src/lib.rs b/automerge/src/lib.rs index ed53fe60..b2d84d50 100644 --- a/automerge/src/lib.rs +++ b/automerge/src/lib.rs @@ -1,6 +1,28 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/automerge/automerge-rs/main/img/brandmark.svg" )] +#![warn( + missing_debug_implementations, + // missing_docs, // TODO: add documentation! + rust_2018_idioms, + unreachable_pub, + bad_style, + const_err, + dead_code, + improper_ctypes, + non_shorthand_field_patterns, + no_mangle_generic_items, + overflowing_literals, + path_statements, + patterns_in_fns_without_body, + private_in_public, + unconditional_recursion, + unused, + unused_allocation, + unused_comparisons, + unused_parens, + while_true +)] #[doc(hidden)] #[macro_export] diff --git a/automerge/src/op_tree.rs b/automerge/src/op_tree.rs index 9f9a50c5..a685de88 100644 --- a/automerge/src/op_tree.rs +++ b/automerge/src/op_tree.rs @@ -20,14 +20,14 @@ pub(crate) const B: usize = 16; #[derive(Debug, Clone, PartialEq)] pub(crate) struct OpTree { - pub internal: OpTreeInternal, - pub objtype: ObjType, + pub(crate) internal: OpTreeInternal, + pub(crate) objtype: ObjType, /// The id of the parent object, root has no parent. - pub parent: Option, + pub(crate) parent: Option, } impl OpTree { - pub fn new() -> Self { + pub(crate) fn new() -> Self { Self { internal: Default::default(), objtype: ObjType::Map, @@ -43,34 +43,34 @@ pub(crate) struct OpTreeInternal { #[derive(Clone, Debug)] pub(crate) struct OpTreeNode { - pub(crate) elements: Vec, pub(crate) children: Vec, - pub index: Index, + pub(crate) elements: Vec, + pub(crate) index: Index, length: usize, } impl OpTreeInternal { /// Construct a new, empty, sequence. - pub fn new() -> Self { + pub(crate) fn new() -> Self { Self { root_node: None } } /// Get the length of the sequence. - pub fn len(&self) -> usize { + pub(crate) fn len(&self) -> usize { self.root_node.as_ref().map_or(0, |n| n.len()) } - pub fn keys(&self) -> Option { + pub(crate) fn keys(&self) -> Option> { self.root_node.as_ref().map(query::Keys::new) } - pub fn keys_at(&self, clock: Clock) -> Option { + pub fn keys_at(&self, clock: Clock) -> Option> { self.root_node .as_ref() .map(|root| query::KeysAt::new(root, clock)) } - pub fn range<'a, R: RangeBounds>( + pub(crate) fn range<'a, R: RangeBounds>( &'a self, range: R, meta: &'a OpSetMetadata, @@ -80,7 +80,7 @@ impl OpTreeInternal { .map(|node| query::Range::new(range, node, meta)) } - pub fn range_at<'a, R: RangeBounds>( + pub(crate) fn range_at<'a, R: RangeBounds>( &'a self, range: R, meta: &'a OpSetMetadata, @@ -91,7 +91,7 @@ impl OpTreeInternal { .map(|node| query::RangeAt::new(range, node, meta, clock)) } - pub fn search<'a, 'b: 'a, Q>(&'b self, mut query: Q, m: &OpSetMetadata) -> Q + pub(crate) fn search<'a, 'b: 'a, Q>(&'b self, mut query: Q, m: &OpSetMetadata) -> Q where Q: TreeQuery<'a>, { @@ -105,7 +105,7 @@ impl OpTreeInternal { } /// Create an iterator through the sequence. - pub fn iter(&self) -> Iter { + pub fn iter(&self) -> Iter<'_> { Iter { inner: self, index: 0, @@ -117,7 +117,7 @@ impl OpTreeInternal { /// # Panics /// /// Panics if `index > len`. - pub fn insert(&mut self, index: usize, element: Op) { + pub(crate) fn insert(&mut self, index: usize, element: Op) { let old_len = self.len(); if let Some(root) = self.root_node.as_mut() { #[cfg(debug_assertions)] @@ -160,12 +160,12 @@ impl OpTreeInternal { } /// Get the `element` at `index` in the sequence. - pub fn get(&self, index: usize) -> Option<&Op> { + pub(crate) fn get(&self, index: usize) -> Option<&Op> { self.root_node.as_ref().and_then(|n| n.get(index)) } // this replaces get_mut() because it allows the indexes to update correctly - pub fn update(&mut self, index: usize, f: F) + pub(crate) fn update(&mut self, index: usize, f: F) where F: FnMut(&mut Op), { @@ -179,7 +179,7 @@ impl OpTreeInternal { /// # Panics /// /// Panics if `index` is out of bounds. - pub fn remove(&mut self, index: usize) -> Op { + pub(crate) fn remove(&mut self, index: usize) -> Op { if let Some(root) = self.root_node.as_mut() { #[cfg(debug_assertions)] let len = root.check(); @@ -212,7 +212,7 @@ impl OpTreeNode { } } - pub fn search<'a, 'b: 'a, Q>(&'b self, query: &mut Q, m: &OpSetMetadata) -> bool + pub(crate) fn search<'a, 'b: 'a, Q>(&'b self, query: &mut Q, m: &OpSetMetadata) -> bool where Q: TreeQuery<'a>, { diff --git a/automerge/src/query.rs b/automerge/src/query.rs index 7ee0f86b..06225885 100644 --- a/automerge/src/query.rs +++ b/automerge/src/query.rs @@ -83,13 +83,13 @@ pub(crate) enum QueryResult { #[derive(Clone, Debug, PartialEq)] pub(crate) struct Index { /// The map of visible elements to the number of operations targetting them. - pub visible: HashMap, + pub(crate) visible: HashMap, /// Set of opids found in this node and below. - pub ops: HashSet, + pub(crate) ops: HashSet, } impl Index { - pub fn new() -> Self { + pub(crate) fn new() -> Self { Index { visible: Default::default(), ops: Default::default(), @@ -97,11 +97,11 @@ impl Index { } /// Get the number of visible elements in this index. - pub fn visible_len(&self) -> usize { + pub(crate) fn visible_len(&self) -> usize { self.visible.len() } - pub fn has_visible(&self, e: &Option) -> bool { + pub(crate) fn has_visible(&self, e: &Option) -> bool { if let Some(seen) = e { self.visible.contains_key(seen) } else { @@ -109,7 +109,7 @@ impl Index { } } - pub fn replace(&mut self, old: &Op, new: &Op) { + pub(crate) fn replace(&mut self, old: &Op, new: &Op) { if old.id != new.id { self.ops.remove(&old.id); self.ops.insert(new.id); @@ -132,7 +132,7 @@ impl Index { } } - pub fn insert(&mut self, op: &Op) { + pub(crate) fn insert(&mut self, op: &Op) { self.ops.insert(op.id); if op.visible() { if let Some(elem) = op.elemid() { @@ -141,7 +141,7 @@ impl Index { } } - pub fn remove(&mut self, op: &Op) { + pub(crate) fn remove(&mut self, op: &Op) { self.ops.remove(&op.id); if op.visible() { if let Some(elem) = op.elemid() { @@ -158,7 +158,7 @@ impl Index { } } - pub fn merge(&mut self, other: &Index) { + pub(crate) fn merge(&mut self, other: &Index) { for id in &other.ops { self.ops.insert(*id); } @@ -223,7 +223,7 @@ impl VisWindow { visible } - pub fn seen_op(&self, op: &Op, pos: usize) -> Vec<(usize, Op)> { + pub(crate) fn seen_op(&self, op: &Op, pos: usize) -> Vec<(usize, Op)> { let mut result = vec![]; for pred in &op.pred { if let Some(entry) = self.counters.get(pred) { diff --git a/automerge/src/query/elem_id_pos.rs b/automerge/src/query/elem_id_pos.rs index bce1584a..214a197a 100644 --- a/automerge/src/query/elem_id_pos.rs +++ b/automerge/src/query/elem_id_pos.rs @@ -10,7 +10,7 @@ pub(crate) struct ElemIdPos { } impl ElemIdPos { - pub fn new(elemid: ElemId) -> Self { + pub(crate) fn new(elemid: ElemId) -> Self { Self { elemid, pos: 0, @@ -18,7 +18,7 @@ impl ElemIdPos { } } - pub fn index(&self) -> Option { + pub(crate) fn index(&self) -> Option { if self.found { Some(self.pos) } else { diff --git a/automerge/src/query/insert.rs b/automerge/src/query/insert.rs index afb773db..6f69474c 100644 --- a/automerge/src/query/insert.rs +++ b/automerge/src/query/insert.rs @@ -22,7 +22,7 @@ pub(crate) struct InsertNth { } impl InsertNth { - pub fn new(target: usize) -> Self { + pub(crate) fn new(target: usize) -> Self { let (valid, last_valid_insert) = if target == 0 { (Some(0), Some(HEAD)) } else { @@ -39,11 +39,11 @@ impl InsertNth { } } - pub fn pos(&self) -> usize { + pub(crate) fn pos(&self) -> usize { self.valid.unwrap_or(self.n) } - pub fn key(&self) -> Result { + pub(crate) fn key(&self) -> Result { Ok(self .last_valid_insert .ok_or(AutomergeError::InvalidIndex(self.target))? diff --git a/automerge/src/query/len.rs b/automerge/src/query/len.rs index ea678414..697d0430 100644 --- a/automerge/src/query/len.rs +++ b/automerge/src/query/len.rs @@ -4,11 +4,11 @@ use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] pub(crate) struct Len { - pub len: usize, + pub(crate) len: usize, } impl Len { - pub fn new() -> Self { + pub(crate) fn new() -> Self { Len { len: 0 } } } diff --git a/automerge/src/query/len_at.rs b/automerge/src/query/len_at.rs index 25ed342a..46744c84 100644 --- a/automerge/src/query/len_at.rs +++ b/automerge/src/query/len_at.rs @@ -4,7 +4,7 @@ use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] pub(crate) struct LenAt { - pub len: usize, + pub(crate) len: usize, clock: Clock, pos: usize, last: Option, @@ -12,7 +12,7 @@ pub(crate) struct LenAt { } impl LenAt { - pub fn new(clock: Clock) -> Self { + pub(crate) fn new(clock: Clock) -> Self { LenAt { clock, pos: 0, diff --git a/automerge/src/query/list_vals.rs b/automerge/src/query/list_vals.rs index a1a5d738..4ad2f47b 100644 --- a/automerge/src/query/list_vals.rs +++ b/automerge/src/query/list_vals.rs @@ -6,11 +6,11 @@ use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] pub(crate) struct ListVals { last_elem: Option, - pub ops: Vec, + pub(crate) ops: Vec, } impl ListVals { - pub fn new() -> Self { + pub(crate) fn new() -> Self { ListVals { last_elem: None, ops: vec![], diff --git a/automerge/src/query/list_vals_at.rs b/automerge/src/query/list_vals_at.rs index 185194fc..57c7596b 100644 --- a/automerge/src/query/list_vals_at.rs +++ b/automerge/src/query/list_vals_at.rs @@ -6,13 +6,13 @@ use std::fmt::Debug; pub(crate) struct ListValsAt { clock: Clock, last_elem: Option, - pub ops: Vec, + pub(crate) ops: Vec, window: VisWindow, pos: usize, } impl ListValsAt { - pub fn new(clock: Clock) -> Self { + pub(crate) fn new(clock: Clock) -> Self { ListValsAt { clock, last_elem: None, diff --git a/automerge/src/query/nth.rs b/automerge/src/query/nth.rs index 25eaf7b9..3924fc62 100644 --- a/automerge/src/query/nth.rs +++ b/automerge/src/query/nth.rs @@ -11,13 +11,13 @@ pub(crate) struct Nth<'a> { /// last_seen is the target elemid of the last `seen` operation. /// It is used to avoid double counting visible elements (which arise through conflicts) that are split across nodes. last_seen: Option, - pub ops: Vec<&'a Op>, - pub ops_pos: Vec, - pub pos: usize, + pub(crate) ops: Vec<&'a Op>, + pub(crate) ops_pos: Vec, + pub(crate) pos: usize, } impl<'a> Nth<'a> { - pub fn new(target: usize) -> Self { + pub(crate) fn new(target: usize) -> Self { Nth { target, seen: 0, @@ -29,7 +29,7 @@ impl<'a> Nth<'a> { } /// Get the key - pub fn key(&self) -> Result { + pub(crate) fn key(&self) -> Result { // the query collects the ops so we can use that to get the key they all use if let Some(e) = self.ops.first().and_then(|op| op.elemid()) { Ok(Key::Seq(e)) diff --git a/automerge/src/query/nth_at.rs b/automerge/src/query/nth_at.rs index cb7db8d5..10851e7c 100644 --- a/automerge/src/query/nth_at.rs +++ b/automerge/src/query/nth_at.rs @@ -9,13 +9,13 @@ pub(crate) struct NthAt { seen: usize, last_seen: Option, window: VisWindow, - pub ops: Vec, - pub ops_pos: Vec, - pub pos: usize, + pub(crate) ops: Vec, + pub(crate) ops_pos: Vec, + pub(crate) pos: usize, } impl NthAt { - pub fn new(target: usize, clock: Clock) -> Self { + pub(crate) fn new(target: usize, clock: Clock) -> Self { NthAt { clock, target, diff --git a/automerge/src/query/opid.rs b/automerge/src/query/opid.rs index 873f854e..6c29dcf6 100644 --- a/automerge/src/query/opid.rs +++ b/automerge/src/query/opid.rs @@ -13,7 +13,7 @@ pub(crate) struct OpIdSearch { } impl OpIdSearch { - pub fn new(target: OpId) -> Self { + pub(crate) fn new(target: OpId) -> Self { OpIdSearch { target, pos: 0, @@ -23,7 +23,7 @@ impl OpIdSearch { } /// Get the index of the operation, if found. - pub fn index(&self) -> Option { + pub(crate) fn index(&self) -> Option { if self.found { Some(self.pos) } else { @@ -31,7 +31,7 @@ impl OpIdSearch { } } - pub fn key(&self) -> &Option { + pub(crate) fn key(&self) -> &Option { &self.key } } diff --git a/automerge/src/query/prop.rs b/automerge/src/query/prop.rs index 1c37f600..7fcb8559 100644 --- a/automerge/src/query/prop.rs +++ b/automerge/src/query/prop.rs @@ -6,13 +6,13 @@ use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] pub(crate) struct Prop<'a> { key: Key, - pub ops: Vec<&'a Op>, - pub ops_pos: Vec, - pub pos: usize, + pub(crate) ops: Vec<&'a Op>, + pub(crate) ops_pos: Vec, + pub(crate) pos: usize, } impl<'a> Prop<'a> { - pub fn new(prop: usize) -> Self { + pub(crate) fn new(prop: usize) -> Self { Prop { key: Key::Map(prop), ops: vec![], diff --git a/automerge/src/query/prop_at.rs b/automerge/src/query/prop_at.rs index aeec0bf2..08b1cb59 100644 --- a/automerge/src/query/prop_at.rs +++ b/automerge/src/query/prop_at.rs @@ -7,13 +7,13 @@ use std::fmt::Debug; pub(crate) struct PropAt { clock: Clock, key: Key, - pub ops: Vec, - pub ops_pos: Vec, - pub pos: usize, + pub(crate) ops: Vec, + pub(crate) ops_pos: Vec, + pub(crate) pos: usize, } impl PropAt { - pub fn new(prop: usize, clock: Clock) -> Self { + pub(crate) fn new(prop: usize, clock: Clock) -> Self { PropAt { clock, key: Key::Map(prop), diff --git a/automerge/src/query/seek_op.rs b/automerge/src/query/seek_op.rs index 0fe5c50e..9f271c26 100644 --- a/automerge/src/query/seek_op.rs +++ b/automerge/src/query/seek_op.rs @@ -9,15 +9,15 @@ pub(crate) struct SeekOp<'a> { /// the op we are looking for op: &'a Op, /// The position to insert at - pub pos: usize, + pub(crate) pos: usize, /// The indices of ops that this op overwrites - pub succ: Vec, + pub(crate) succ: Vec, /// whether a position has been found found: bool, } impl<'a> SeekOp<'a> { - pub fn new(op: &'a Op) -> Self { + pub(crate) fn new(op: &'a Op) -> Self { SeekOp { op, succ: vec![], diff --git a/automerge/src/query/seek_op_with_patch.rs b/automerge/src/query/seek_op_with_patch.rs index fab58a0d..c0431de8 100644 --- a/automerge/src/query/seek_op_with_patch.rs +++ b/automerge/src/query/seek_op_with_patch.rs @@ -7,17 +7,17 @@ use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] pub(crate) struct SeekOpWithPatch<'a> { op: Op, - pub pos: usize, - pub succ: Vec, + pub(crate) pos: usize, + pub(crate) succ: Vec, found: bool, - pub seen: usize, + pub(crate) seen: usize, last_seen: Option, - pub values: Vec<&'a Op>, - pub had_value_before: bool, + pub(crate) values: Vec<&'a Op>, + pub(crate) had_value_before: bool, } impl<'a> SeekOpWithPatch<'a> { - pub fn new(op: &Op) -> Self { + pub(crate) fn new(op: &Op) -> Self { SeekOpWithPatch { op: op.clone(), succ: vec![], diff --git a/automerge/src/transaction/inner.rs b/automerge/src/transaction/inner.rs index 4f291da9..ebfb20ce 100644 --- a/automerge/src/transaction/inner.rs +++ b/automerge/src/transaction/inner.rs @@ -8,7 +8,7 @@ use crate::{change::export_change, types::Op, Automerge, ChangeHash, Prop}; use crate::{AutomergeError, ObjType, OpObserver, OpType, ScalarValue}; #[derive(Debug, Clone)] -pub struct TransactionInner { +pub(crate) struct TransactionInner { pub(crate) actor: usize, pub(crate) seq: u64, pub(crate) start_op: NonZeroU64, @@ -21,13 +21,13 @@ pub struct TransactionInner { } impl TransactionInner { - pub fn pending_ops(&self) -> usize { + pub(crate) fn pending_ops(&self) -> usize { self.operations.len() } /// Commit the operations performed in this transaction, returning the hashes corresponding to /// the new heads. - pub fn commit( + pub(crate) fn commit( mut self, doc: &mut Automerge, message: Option, @@ -72,7 +72,7 @@ impl TransactionInner { /// Undo the operations added in this transaction, returning the number of cancelled /// operations. - pub fn rollback(self, doc: &mut Automerge) -> usize { + pub(crate) fn rollback(self, doc: &mut Automerge) -> usize { let num = self.pending_ops(); // remove in reverse order so sets are removed before makes etc... for (obj, _prop, op) in self.operations.into_iter().rev() { @@ -108,7 +108,7 @@ impl TransactionInner { /// - The object does not exist /// - The key is the wrong type for the object /// - The key does not exist in the object - pub fn put, V: Into>( + pub(crate) fn put, V: Into>( &mut self, doc: &mut Automerge, ex_obj: &ExId, @@ -135,7 +135,7 @@ impl TransactionInner { /// - The object does not exist /// - The key is the wrong type for the object /// - The key does not exist in the object - pub fn put_object>( + pub(crate) fn put_object>( &mut self, doc: &mut Automerge, ex_obj: &ExId, @@ -175,7 +175,7 @@ impl TransactionInner { self.operations.push((obj, prop, op)); } - pub fn insert>( + pub(crate) fn insert>( &mut self, doc: &mut Automerge, ex_obj: &ExId, @@ -188,7 +188,7 @@ impl TransactionInner { Ok(()) } - pub fn insert_object( + pub(crate) fn insert_object( &mut self, doc: &mut Automerge, ex_obj: &ExId, @@ -329,7 +329,7 @@ impl TransactionInner { Ok(Some(id)) } - pub fn increment>( + pub(crate) fn increment>( &mut self, doc: &mut Automerge, obj: &ExId, @@ -341,7 +341,7 @@ impl TransactionInner { Ok(()) } - pub fn delete>( + pub(crate) fn delete>( &mut self, doc: &mut Automerge, ex_obj: &ExId, @@ -355,7 +355,7 @@ impl TransactionInner { /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert /// the new elements - pub fn splice( + pub(crate) fn splice( &mut self, doc: &mut Automerge, ex_obj: &ExId, diff --git a/automerge/src/types.rs b/automerge/src/types.rs index ee5adf01..48d3574f 100644 --- a/automerge/src/types.rs +++ b/automerge/src/types.rs @@ -199,11 +199,11 @@ pub(crate) trait Exportable { impl OpId { #[inline] - pub fn counter(&self) -> u64 { + pub(crate) fn counter(&self) -> u64 { self.0 } #[inline] - pub fn actor(&self) -> usize { + pub(crate) fn actor(&self) -> usize { self.1 } } @@ -341,7 +341,7 @@ impl Display for Prop { } impl Key { - pub fn elemid(&self) -> Option { + pub(crate) fn elemid(&self) -> Option { match self { Key::Map(_) => None, Key::Seq(id) => Some(*id), @@ -350,28 +350,28 @@ impl Key { } #[derive(Debug, Clone, PartialOrd, Ord, Eq, PartialEq, Copy, Hash, Default)] -pub(crate) struct OpId(pub u64, pub usize); +pub(crate) struct OpId(pub(crate) u64, pub(crate) usize); #[derive(Debug, Clone, Copy, PartialOrd, Eq, PartialEq, Ord, Hash, Default)] -pub(crate) struct ObjId(pub OpId); +pub(crate) struct ObjId(pub(crate) OpId); impl ObjId { - pub const fn root() -> Self { + pub(crate) const fn root() -> Self { ObjId(OpId(0, 0)) } } #[derive(Debug, Clone, Copy, PartialOrd, Eq, PartialEq, Ord, Hash, Default)] -pub(crate) struct ElemId(pub OpId); +pub(crate) struct ElemId(pub(crate) OpId); #[derive(Debug, Clone, PartialEq)] pub(crate) struct Op { - pub id: OpId, - pub action: OpType, - pub key: Key, - pub succ: Vec, - pub pred: Vec, - pub insert: bool, + pub(crate) id: OpId, + pub(crate) action: OpType, + pub(crate) key: Key, + pub(crate) succ: Vec, + pub(crate) pred: Vec, + pub(crate) insert: bool, } impl Op { @@ -405,7 +405,7 @@ impl Op { } } - pub fn visible(&self) -> bool { + pub(crate) fn visible(&self) -> bool { if self.is_inc() { false } else if self.is_counter() { @@ -415,7 +415,7 @@ impl Op { } } - pub fn incs(&self) -> usize { + pub(crate) fn incs(&self) -> usize { if let OpType::Put(ScalarValue::Counter(Counter { increments, .. })) = &self.action { *increments } else { @@ -423,35 +423,35 @@ impl Op { } } - pub fn is_delete(&self) -> bool { + pub(crate) fn is_delete(&self) -> bool { matches!(&self.action, OpType::Delete) } - pub fn is_inc(&self) -> bool { + pub(crate) fn is_inc(&self) -> bool { matches!(&self.action, OpType::Increment(_)) } - pub fn is_counter(&self) -> bool { + pub(crate) fn is_counter(&self) -> bool { matches!(&self.action, OpType::Put(ScalarValue::Counter(_))) } - pub fn is_noop(&self, action: &OpType) -> bool { + pub(crate) fn is_noop(&self, action: &OpType) -> bool { matches!((&self.action, action), (OpType::Put(n), OpType::Put(m)) if n == m) } - pub fn is_list_op(&self) -> bool { + pub(crate) fn is_list_op(&self) -> bool { matches!(&self.key, Key::Seq(_)) } - pub fn overwrites(&self, other: &Op) -> bool { + pub(crate) fn overwrites(&self, other: &Op) -> bool { self.pred.iter().any(|i| i == &other.id) } - pub fn elemid(&self) -> Option { + pub(crate) fn elemid(&self) -> Option { self.elemid_or_key().elemid() } - pub fn elemid_or_key(&self) -> Key { + pub(crate) fn elemid_or_key(&self) -> Key { if self.insert { Key::Seq(ElemId(self.id)) } else { @@ -459,7 +459,7 @@ impl Op { } } - pub fn get_increment_value(&self) -> Option { + pub(crate) fn get_increment_value(&self) -> Option { if let OpType::Increment(i) = self.action { Some(i) } else { @@ -467,7 +467,7 @@ impl Op { } } - pub fn value(&self) -> Value { + pub(crate) fn value(&self) -> Value<'_> { match &self.action { OpType::Make(obj_type) => Value::Object(*obj_type), OpType::Put(scalar) => Value::Scalar(Cow::Borrowed(scalar)), @@ -475,7 +475,7 @@ impl Op { } } - pub fn clone_value(&self) -> Value<'static> { + pub(crate) fn clone_value(&self) -> Value<'static> { match &self.action { OpType::Make(obj_type) => Value::Object(*obj_type), OpType::Put(scalar) => Value::Scalar(Cow::Owned(scalar.clone())), @@ -484,7 +484,7 @@ impl Op { } #[allow(dead_code)] - pub fn dump(&self) -> String { + pub(crate) fn dump(&self) -> String { match &self.action { OpType::Put(value) if self.insert => format!("i:{}", value), OpType::Put(value) => format!("s:{}", value), @@ -496,7 +496,7 @@ impl Op { } #[derive(Debug, Clone)] -pub struct Peer {} +pub(crate) struct Peer {} /// The sha256 hash of a change. #[derive(Eq, PartialEq, Hash, Clone, PartialOrd, Ord, Copy)] From af951f324a71bb0af67ffd07f3416e85c11bca96 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Sat, 23 Apr 2022 11:06:39 +0100 Subject: [PATCH 311/730] Run cargo fix --- automerge/src/automerge.rs | 48 +++++++++---------- automerge/src/change.rs | 2 +- automerge/src/clock.rs | 6 +-- automerge/src/columnar.rs | 34 ++++++------- automerge/src/decoding.rs | 14 +++--- automerge/src/encoding.rs | 32 ++++++------- automerge/src/error.rs | 8 ++-- automerge/src/indexed_cache.rs | 18 +++---- automerge/src/legacy/serde_impls/op.rs | 2 +- .../src/legacy/serde_impls/scalar_value.rs | 2 +- automerge/src/op_observer.rs | 12 ++--- automerge/src/op_set.rs | 38 +++++++-------- automerge/src/op_tree.rs | 14 +++--- automerge/src/sync.rs | 2 +- .../src/transaction/manual_transaction.rs | 24 +++++----- automerge/src/transaction/transactable.rs | 22 ++++----- 16 files changed, 139 insertions(+), 139 deletions(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index d871ee41..da9004d6 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -101,7 +101,7 @@ impl Automerge { } /// Start a transaction. - pub fn transaction(&mut self) -> Transaction { + pub fn transaction(&mut self) -> Transaction<'_> { Transaction { inner: Some(self.transaction_inner()), doc: self, @@ -137,7 +137,7 @@ impl Automerge { /// afterwards. pub fn transact(&mut self, f: F) -> transaction::Result where - F: FnOnce(&mut Transaction) -> Result, + F: FnOnce(&mut Transaction<'_>) -> Result, { let mut tx = self.transaction(); let result = f(&mut tx); @@ -156,7 +156,7 @@ impl Automerge { /// Like [`Self::transact`] but with a function for generating the commit options. pub fn transact_with<'a, F, O, E, C, Obs>(&mut self, c: C, f: F) -> transaction::Result where - F: FnOnce(&mut Transaction) -> Result, + F: FnOnce(&mut Transaction<'_>) -> Result, C: FnOnce(&O) -> CommitOptions<'a, Obs>, Obs: 'a + OpObserver, { @@ -230,7 +230,7 @@ impl Automerge { } /// Get an iterator over the parents of an object. - pub fn parents(&self, obj: ExId) -> Parents { + pub fn parents(&self, obj: ExId) -> Parents<'_> { Parents { obj, doc: self } } @@ -259,7 +259,7 @@ impl Automerge { /// /// For a map this returns the keys of the map. /// For a list this returns the element ids (opids) encoded as strings. - pub fn keys>(&self, obj: O) -> Keys { + pub fn keys>(&self, obj: O) -> Keys<'_, '_> { if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { let iter_keys = self.ops.keys(obj); Keys::new(self, iter_keys) @@ -269,7 +269,7 @@ impl Automerge { } /// Historical version of [`keys`](Self::keys). - pub fn keys_at>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt { + pub fn keys_at>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt<'_, '_> { if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { let clock = self.clock_at(heads); KeysAt::new(self, self.ops.keys_at(obj, clock)) @@ -279,7 +279,7 @@ impl Automerge { } /// Iterate over the keys and values of the map `obj` in the given range. - pub fn range, R: RangeBounds>(&self, obj: O, range: R) -> Range { + pub fn range, R: RangeBounds>(&self, obj: O, range: R) -> Range<'_, R> { if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { let iter_range = self.ops.range(obj, range); Range::new(self, iter_range) @@ -294,7 +294,7 @@ impl Automerge { obj: O, range: R, heads: &[ChangeHash], - ) -> RangeAt { + ) -> RangeAt<'_, R> { if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { let clock = self.clock_at(heads); let iter_range = self.ops.range_at(obj, range, clock); @@ -308,7 +308,7 @@ impl Automerge { /// /// For a map the keys are the keys of the map. /// For a list the keys are the element ids (opids) encoded as strings. - pub fn values>(&self, obj: O) -> Values { + pub fn values>(&self, obj: O) -> Values<'_> { if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { let iter_range = self.ops.range(obj, ..); Values::new(self, iter_range) @@ -318,7 +318,7 @@ impl Automerge { } /// Historical version of [`values`](Self::values). - pub fn values_at>(&self, obj: O, heads: &[ChangeHash]) -> ValuesAt { + pub fn values_at>(&self, obj: O, heads: &[ChangeHash]) -> ValuesAt<'_> { if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { let clock = self.clock_at(heads); let iter_range = self.ops.range_at(obj, .., clock); @@ -437,7 +437,7 @@ impl Automerge { &self, obj: O, prop: P, - ) -> Result, AutomergeError> { + ) -> Result, ExId)>, AutomergeError> { Ok(self.get_all(obj, prop.into())?.last().cloned()) } @@ -447,7 +447,7 @@ impl Automerge { obj: O, prop: P, heads: &[ChangeHash], - ) -> Result, AutomergeError> { + ) -> Result, ExId)>, AutomergeError> { Ok(self.get_all_at(obj, prop, heads)?.last().cloned()) } @@ -459,7 +459,7 @@ impl Automerge { &self, obj: O, prop: P, - ) -> Result, AutomergeError> { + ) -> Result, ExId)>, AutomergeError> { let obj = self.exid_to_obj(obj.as_ref())?; let result = match prop.into() { Prop::Map(p) => { @@ -492,7 +492,7 @@ impl Automerge { obj: O, prop: P, heads: &[ChangeHash], - ) -> Result, AutomergeError> { + ) -> Result, ExId)>, AutomergeError> { let prop = prop.into(); let obj = self.exid_to_obj(obj.as_ref())?; let clock = self.clock_at(heads); @@ -529,7 +529,7 @@ impl Automerge { /// Load a document. pub fn load_with( data: &[u8], - options: ApplyOptions, + options: ApplyOptions<'_, Obs>, ) -> Result { let changes = Change::load_document(data)?; let mut doc = Self::new(); @@ -546,7 +546,7 @@ impl Automerge { pub fn load_incremental_with( &mut self, data: &[u8], - options: ApplyOptions, + options: ApplyOptions<'_, Obs>, ) -> Result { let changes = Change::load_document(data)?; let start = self.ops.len(); @@ -577,7 +577,7 @@ impl Automerge { pub fn apply_changes_with, Obs: OpObserver>( &mut self, changes: I, - mut options: ApplyOptions, + mut options: ApplyOptions<'_, Obs>, ) -> Result<(), AutomergeError> { for c in changes { if !self.history_index.contains_key(&c.hash) { @@ -1052,14 +1052,14 @@ impl Default for Automerge { } #[derive(Serialize, Debug, Clone, PartialEq)] -pub struct SpanInfo { - pub id: ExId, - pub time: i64, - pub start: usize, - pub end: usize, +pub(crate) struct SpanInfo { + pub(crate) id: ExId, + pub(crate) time: i64, + pub(crate) start: usize, + pub(crate) end: usize, #[serde(rename = "type")] - pub span_type: String, - pub value: ScalarValue, + pub(crate) span_type: String, + pub(crate) value: ScalarValue, } #[cfg(test)] diff --git a/automerge/src/change.rs b/automerge/src/change.rs index 2e420928..c2bbe66c 100644 --- a/automerge/src/change.rs +++ b/automerge/src/change.rs @@ -393,7 +393,7 @@ impl Change { } } - pub(crate) fn iter_ops(&self) -> OperationIterator { + pub(crate) fn iter_ops(&self) -> OperationIterator<'_> { OperationIterator::new(self.bytes.uncompressed(), self.actors.as_slice(), &self.ops) } diff --git a/automerge/src/clock.rs b/automerge/src/clock.rs index d01c7748..d80d091b 100644 --- a/automerge/src/clock.rs +++ b/automerge/src/clock.rs @@ -7,18 +7,18 @@ use std::collections::HashMap; pub(crate) struct Clock(HashMap); impl Clock { - pub fn new() -> Self { + pub(crate) fn new() -> Self { Clock(Default::default()) } - pub fn include(&mut self, key: usize, n: u64) { + pub(crate) fn include(&mut self, key: usize, n: u64) { self.0 .entry(key) .and_modify(|m| *m = cmp::max(n, *m)) .or_insert(n); } - pub fn covers(&self, id: &OpId) -> bool { + pub(crate) fn covers(&self, id: &OpId) -> bool { if let Some(val) = self.0.get(&id.1) { val >= &id.0 } else { diff --git a/automerge/src/columnar.rs b/automerge/src/columnar.rs index e0a007e0..3613e31f 100644 --- a/automerge/src/columnar.rs +++ b/automerge/src/columnar.rs @@ -289,7 +289,7 @@ pub(crate) struct DepsIterator<'a> { } impl<'a> DepsIterator<'a> { - pub fn new(bytes: &'a [u8], ops: &'a HashMap>) -> Self { + pub(crate) fn new(bytes: &'a [u8], ops: &'a HashMap>) -> Self { Self { num: col_iter(bytes, ops, DOC_DEPS_NUM), dep: col_iter(bytes, ops, DOC_DEPS_INDEX), @@ -491,25 +491,25 @@ impl<'a> Iterator for ObjIterator<'a> { #[derive(PartialEq, Debug, Clone)] pub(crate) struct DocChange { - pub actor: usize, - pub seq: u64, - pub max_op: u64, - pub time: i64, - pub message: Option, - pub extra_bytes: Vec, - pub ops: Vec, + pub(crate) actor: usize, + pub(crate) seq: u64, + pub(crate) max_op: u64, + pub(crate) time: i64, + pub(crate) message: Option, + pub(crate) extra_bytes: Vec, + pub(crate) ops: Vec, } #[derive(Debug, Clone)] pub(crate) struct DocOp { - pub actor: usize, - pub ctr: u64, - pub action: OpType, - pub obj: amp::ObjectId, - pub key: amp::Key, - pub succ: Vec<(u64, usize)>, - pub pred: Vec<(u64, usize)>, - pub insert: bool, + pub(crate) actor: usize, + pub(crate) ctr: u64, + pub(crate) action: OpType, + pub(crate) obj: amp::ObjectId, + pub(crate) key: amp::Key, + pub(crate) succ: Vec<(u64, usize)>, + pub(crate) pred: Vec<(u64, usize)>, + pub(crate) insert: bool, } impl Ord for DocOp { @@ -1121,7 +1121,7 @@ pub(crate) struct ColumnEncoder { } impl ColumnEncoder { - pub fn encode_ops<'a, I>(ops: I, actors: &[ActorId]) -> (Vec, HashMap>) + pub(crate) fn encode_ops<'a, I>(ops: I, actors: &[ActorId]) -> (Vec, HashMap>) where I: IntoIterator, { diff --git a/automerge/src/decoding.rs b/automerge/src/decoding.rs index 5b90651f..cd938a3c 100644 --- a/automerge/src/decoding.rs +++ b/automerge/src/decoding.rs @@ -125,13 +125,13 @@ pub enum InvalidChangeError { #[derive(Clone, Debug)] pub(crate) struct Decoder<'a> { - pub offset: usize, - pub last_read: usize, + pub(crate) offset: usize, + pub(crate) last_read: usize, data: Cow<'a, [u8]>, } impl<'a> Decoder<'a> { - pub fn new(data: Cow<'a, [u8]>) -> Self { + pub(crate) fn new(data: Cow<'a, [u8]>) -> Self { Decoder { offset: 0, last_read: 0, @@ -139,7 +139,7 @@ impl<'a> Decoder<'a> { } } - pub fn read(&mut self) -> Result { + pub(crate) fn read(&mut self) -> Result { let mut buf = &self.data[self.offset..]; let init_len = buf.len(); let val = T::decode::<&[u8]>(&mut buf).ok_or(Error::NoDecodedValue)?; @@ -153,7 +153,7 @@ impl<'a> Decoder<'a> { } } - pub fn read_bytes(&mut self, index: usize) -> Result<&[u8], Error> { + pub(crate) fn read_bytes(&mut self, index: usize) -> Result<&[u8], Error> { if self.offset + index > self.data.len() { Err(Error::TryingToReadPastEnd) } else { @@ -164,7 +164,7 @@ impl<'a> Decoder<'a> { } } - pub fn done(&self) -> bool { + pub(crate) fn done(&self) -> bool { self.offset >= self.data.len() } } @@ -212,7 +212,7 @@ impl<'a> Iterator for BooleanDecoder<'a> { /// See discussion on [`crate::encoding::RleEncoder`] for the format data is stored in. #[derive(Debug)] pub(crate) struct RleDecoder<'a, T> { - pub decoder: Decoder<'a>, + pub(crate) decoder: Decoder<'a>, last_value: Option, count: isize, literal: bool, diff --git a/automerge/src/encoding.rs b/automerge/src/encoding.rs index 113fd158..3b8b470c 100644 --- a/automerge/src/encoding.rs +++ b/automerge/src/encoding.rs @@ -42,7 +42,7 @@ pub(crate) struct BooleanEncoder { } impl BooleanEncoder { - pub fn new() -> BooleanEncoder { + pub(crate) fn new() -> BooleanEncoder { BooleanEncoder { buf: Vec::new(), last: false, @@ -50,7 +50,7 @@ impl BooleanEncoder { } } - pub fn append(&mut self, value: bool) { + pub(crate) fn append(&mut self, value: bool) { if value == self.last { self.count += 1; } else { @@ -60,7 +60,7 @@ impl BooleanEncoder { } } - pub fn finish(mut self, col: u32) -> ColData { + pub(crate) fn finish(mut self, col: u32) -> ColData { if self.count > 0 { self.count.encode(&mut self.buf).ok(); } @@ -79,24 +79,24 @@ pub(crate) struct DeltaEncoder { } impl DeltaEncoder { - pub fn new() -> DeltaEncoder { + pub(crate) fn new() -> DeltaEncoder { DeltaEncoder { rle: RleEncoder::new(), absolute_value: 0, } } - pub fn append_value(&mut self, value: u64) { + pub(crate) fn append_value(&mut self, value: u64) { self.rle .append_value(value as i64 - self.absolute_value as i64); self.absolute_value = value; } - pub fn append_null(&mut self) { + pub(crate) fn append_null(&mut self) { self.rle.append_null(); } - pub fn finish(self, col: u32) -> ColData { + pub(crate) fn finish(self, col: u32) -> ColData { self.rle.finish(col) } } @@ -135,14 +135,14 @@ impl RleEncoder where T: Encodable + PartialEq + Clone, { - pub fn new() -> RleEncoder { + pub(crate) fn new() -> RleEncoder { RleEncoder { buf: Vec::new(), state: RleState::Empty, } } - pub fn finish(mut self, col: u32) -> ColData { + pub(crate) fn finish(mut self, col: u32) -> ColData { match self.take_state() { // this covers `only_nulls` RleState::NullRun(size) => { @@ -184,7 +184,7 @@ where state } - pub fn append_null(&mut self) { + pub(crate) fn append_null(&mut self) { self.state = match self.take_state() { RleState::Empty => RleState::NullRun(1), RleState::NullRun(size) => RleState::NullRun(size + 1), @@ -204,7 +204,7 @@ where } } - pub fn append_value(&mut self, value: T) { + pub(crate) fn append_value(&mut self, value: T) { self.state = match self.take_state() { RleState::Empty => RleState::LoneVal(value), RleState::LoneVal(other) => { @@ -348,14 +348,14 @@ impl Encodable for i32 { #[derive(Debug)] pub(crate) struct ColData { - pub col: u32, - pub data: Vec, + pub(crate) col: u32, + pub(crate) data: Vec, #[cfg(debug_assertions)] has_been_deflated: bool, } impl ColData { - pub fn new(col_id: u32, data: Vec) -> ColData { + pub(crate) fn new(col_id: u32, data: Vec) -> ColData { ColData { col: col_id, data, @@ -364,7 +364,7 @@ impl ColData { } } - pub fn encode_col_len(&self, buf: &mut R) -> io::Result { + pub(crate) fn encode_col_len(&self, buf: &mut R) -> io::Result { let mut len = 0; if !self.data.is_empty() { len += self.col.encode(buf)?; @@ -373,7 +373,7 @@ impl ColData { Ok(len) } - pub fn deflate(&mut self) { + pub(crate) fn deflate(&mut self) { #[cfg(debug_assertions)] { debug_assert!(!self.has_been_deflated); diff --git a/automerge/src/error.rs b/automerge/src/error.rs index 0b58c5ae..77293370 100644 --- a/automerge/src/error.rs +++ b/automerge/src/error.rs @@ -43,10 +43,10 @@ pub struct InvalidActorId(pub String); #[derive(Error, Debug, PartialEq)] #[error("Invalid scalar value, expected {expected} but received {unexpected}")] pub(crate) struct InvalidScalarValue { - pub raw_value: ScalarValue, - pub datatype: DataType, - pub unexpected: String, - pub expected: String, + pub(crate) raw_value: ScalarValue, + pub(crate) datatype: DataType, + pub(crate) unexpected: String, + pub(crate) expected: String, } #[derive(Error, Debug, PartialEq)] diff --git a/automerge/src/indexed_cache.rs b/automerge/src/indexed_cache.rs index 6d760637..1bf92a02 100644 --- a/automerge/src/indexed_cache.rs +++ b/automerge/src/indexed_cache.rs @@ -5,7 +5,7 @@ use std::ops::Index; #[derive(Debug, Clone)] pub(crate) struct IndexedCache { - pub cache: Vec, + pub(crate) cache: Vec, lookup: HashMap, } @@ -22,14 +22,14 @@ impl IndexedCache where T: Clone + Eq + Hash + Ord, { - pub fn new() -> Self { + pub(crate) fn new() -> Self { IndexedCache { cache: Default::default(), lookup: Default::default(), } } - pub fn cache(&mut self, item: T) -> usize { + pub(crate) fn cache(&mut self, item: T) -> usize { if let Some(n) = self.lookup.get(&item) { *n } else { @@ -40,15 +40,15 @@ where } } - pub fn lookup(&self, item: &T) -> Option { + pub(crate) fn lookup(&self, item: &T) -> Option { self.lookup.get(item).cloned() } - pub fn len(&self) -> usize { + pub(crate) fn len(&self) -> usize { self.cache.len() } - pub fn get(&self, index: usize) -> &T { + pub(crate) fn get(&self, index: usize) -> &T { &self.cache[index] } @@ -58,14 +58,14 @@ where /// # Panics /// /// Panics on an empty cache. - pub fn remove_last(&mut self) -> T { + pub(crate) fn remove_last(&mut self) -> T { let last = self.cache.len() - 1; let t = self.cache.remove(last); self.lookup.remove(&t); t } - pub fn sorted(&self) -> IndexedCache { + pub(crate) fn sorted(&self) -> IndexedCache { let mut sorted = Self::new(); self.cache.iter().sorted().cloned().for_each(|item| { let n = sorted.cache.len(); @@ -75,7 +75,7 @@ where sorted } - pub fn encode_index(&self) -> Vec { + pub(crate) fn encode_index(&self) -> Vec { let sorted: Vec<_> = self.cache.iter().sorted().cloned().collect(); self.cache .iter() diff --git a/automerge/src/legacy/serde_impls/op.rs b/automerge/src/legacy/serde_impls/op.rs index feaccfb8..9e9472d8 100644 --- a/automerge/src/legacy/serde_impls/op.rs +++ b/automerge/src/legacy/serde_impls/op.rs @@ -131,7 +131,7 @@ impl<'de> Deserialize<'de> for Op { impl<'de> Visitor<'de> for OperationVisitor { type Value = Op; - fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { formatter.write_str("An operation object") } diff --git a/automerge/src/legacy/serde_impls/scalar_value.rs b/automerge/src/legacy/serde_impls/scalar_value.rs index 7a08f697..b2a559ea 100644 --- a/automerge/src/legacy/serde_impls/scalar_value.rs +++ b/automerge/src/legacy/serde_impls/scalar_value.rs @@ -12,7 +12,7 @@ impl<'de> Deserialize<'de> for ScalarValue { impl<'de> de::Visitor<'de> for ValueVisitor { type Value = ScalarValue; - fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { formatter.write_str("a number, string, bool, or null") } diff --git a/automerge/src/op_observer.rs b/automerge/src/op_observer.rs index 6a665242..4a3092f8 100644 --- a/automerge/src/op_observer.rs +++ b/automerge/src/op_observer.rs @@ -10,7 +10,7 @@ pub trait OpObserver { /// - `index`: the index the new value has been inserted at. /// - `tagged_value`: the value that has been inserted and the id of the operation that did the /// insert. - fn insert(&mut self, objid: ExId, index: usize, tagged_value: (Value, ExId)); + fn insert(&mut self, objid: ExId, index: usize, tagged_value: (Value<'_>, ExId)); /// A new value has been put into the given object. /// @@ -19,7 +19,7 @@ pub trait OpObserver { /// - `tagged_value`: the value that has been put into the object and the id of the operation /// that did the put. /// - `conflict`: whether this put conflicts with other operations. - fn put(&mut self, objid: ExId, key: Prop, tagged_value: (Value, ExId), conflict: bool); + fn put(&mut self, objid: ExId, key: Prop, tagged_value: (Value<'_>, ExId), conflict: bool); /// A counter has been incremented. /// @@ -37,9 +37,9 @@ pub trait OpObserver { } impl OpObserver for () { - fn insert(&mut self, _objid: ExId, _index: usize, _tagged_value: (Value, ExId)) {} + fn insert(&mut self, _objid: ExId, _index: usize, _tagged_value: (Value<'_>, ExId)) {} - fn put(&mut self, _objid: ExId, _key: Prop, _tagged_value: (Value, ExId), _conflict: bool) {} + fn put(&mut self, _objid: ExId, _key: Prop, _tagged_value: (Value<'_>, ExId), _conflict: bool) {} fn increment(&mut self, _objid: ExId, _key: Prop, _tagged_value: (i64, ExId)) {} @@ -61,7 +61,7 @@ impl VecOpObserver { } impl OpObserver for VecOpObserver { - fn insert(&mut self, obj_id: ExId, index: usize, (value, id): (Value, ExId)) { + fn insert(&mut self, obj_id: ExId, index: usize, (value, id): (Value<'_>, ExId)) { self.patches.push(Patch::Insert { obj: obj_id, index, @@ -69,7 +69,7 @@ impl OpObserver for VecOpObserver { }); } - fn put(&mut self, objid: ExId, key: Prop, (value, id): (Value, ExId), conflict: bool) { + fn put(&mut self, objid: ExId, key: Prop, (value, id): (Value<'_>, ExId), conflict: bool) { self.patches.push(Patch::Put { obj: objid, key, diff --git a/automerge/src/op_set.rs b/automerge/src/op_set.rs index f16822da..236f318a 100644 --- a/automerge/src/op_set.rs +++ b/automerge/src/op_set.rs @@ -19,11 +19,11 @@ pub(crate) struct OpSetInternal { /// The number of operations in the opset. length: usize, /// Metadata about the operations in this opset. - pub m: OpSetMetadata, + pub(crate) m: OpSetMetadata, } impl OpSetInternal { - pub fn new() -> Self { + pub(crate) fn new() -> Self { let mut trees: HashMap<_, _, _> = Default::default(); trees.insert(ObjId::root(), OpTree::new()); OpSetInternal { @@ -44,7 +44,7 @@ impl OpSetInternal { } } - pub fn iter(&self) -> Iter<'_> { + pub(crate) fn iter(&self) -> Iter<'_> { let mut objs: Vec<_> = self.trees.keys().collect(); objs.sort_by(|a, b| self.m.lamport_cmp(a.0, b.0)); Iter { @@ -55,13 +55,13 @@ impl OpSetInternal { } } - pub fn parent_object(&self, obj: &ObjId) -> Option<(ObjId, Key)> { + pub(crate) fn parent_object(&self, obj: &ObjId) -> Option<(ObjId, Key)> { let parent = self.trees.get(obj)?.parent?; let key = self.search(&parent, OpIdSearch::new(obj.0)).key().unwrap(); Some((parent, key)) } - pub fn keys(&self, obj: ObjId) -> Option { + pub(crate) fn keys(&self, obj: ObjId) -> Option> { if let Some(tree) = self.trees.get(&obj) { tree.internal.keys() } else { @@ -69,7 +69,7 @@ impl OpSetInternal { } } - pub fn keys_at(&self, obj: ObjId, clock: Clock) -> Option { + pub(crate) fn keys_at(&self, obj: ObjId, clock: Clock) -> Option> { if let Some(tree) = self.trees.get(&obj) { tree.internal.keys_at(clock) } else { @@ -77,7 +77,7 @@ impl OpSetInternal { } } - pub fn range>(&self, obj: ObjId, range: R) -> Option> { + pub(crate) fn range>(&self, obj: ObjId, range: R) -> Option> { if let Some(tree) = self.trees.get(&obj) { tree.internal.range(range, &self.m) } else { @@ -85,12 +85,12 @@ impl OpSetInternal { } } - pub fn range_at>( + pub(crate) fn range_at>( &self, obj: ObjId, range: R, clock: Clock, - ) -> Option> { + ) -> Option> { if let Some(tree) = self.trees.get(&obj) { tree.internal.range_at(range, &self.m, clock) } else { @@ -98,7 +98,7 @@ impl OpSetInternal { } } - pub fn search<'a, 'b: 'a, Q>(&'b self, obj: &ObjId, query: Q) -> Q + pub(crate) fn search<'a, 'b: 'a, Q>(&'b self, obj: &ObjId, query: Q) -> Q where Q: TreeQuery<'a>, { @@ -109,7 +109,7 @@ impl OpSetInternal { } } - pub fn replace(&mut self, obj: &ObjId, index: usize, f: F) + pub(crate) fn replace(&mut self, obj: &ObjId, index: usize, f: F) where F: FnMut(&mut Op), { @@ -118,7 +118,7 @@ impl OpSetInternal { } } - pub fn remove(&mut self, obj: &ObjId, index: usize) -> Op { + pub(crate) fn remove(&mut self, obj: &ObjId, index: usize) -> Op { // this happens on rollback - be sure to go back to the old state let tree = self.trees.get_mut(obj).unwrap(); self.length -= 1; @@ -129,11 +129,11 @@ impl OpSetInternal { op } - pub fn len(&self) -> usize { + pub(crate) fn len(&self) -> usize { self.length } - pub fn insert(&mut self, index: usize, obj: &ObjId, element: Op) { + pub(crate) fn insert(&mut self, index: usize, obj: &ObjId, element: Op) { if let OpType::Make(typ) = element.action { self.trees.insert( element.id.into(), @@ -239,7 +239,7 @@ impl OpSetInternal { op } - pub fn object_type(&self, id: &ObjId) -> Option { + pub(crate) fn object_type(&self, id: &ObjId) -> Option { self.trees.get(id).map(|tree| tree.objtype) } @@ -297,19 +297,19 @@ impl<'a> Iterator for Iter<'a> { #[derive(Clone, Debug, PartialEq)] pub(crate) struct OpSetMetadata { - pub actors: IndexedCache, - pub props: IndexedCache, + pub(crate) actors: IndexedCache, + pub(crate) props: IndexedCache, } impl OpSetMetadata { - pub fn key_cmp(&self, left: &Key, right: &Key) -> Ordering { + pub(crate) fn key_cmp(&self, left: &Key, right: &Key) -> Ordering { match (left, right) { (Key::Map(a), Key::Map(b)) => self.props[*a].cmp(&self.props[*b]), _ => panic!("can only compare map keys"), } } - pub fn lamport_cmp(&self, left: OpId, right: OpId) -> Ordering { + pub(crate) fn lamport_cmp(&self, left: OpId, right: OpId) -> Ordering { match (left, right) { (OpId(0, _), OpId(0, _)) => Ordering::Equal, (OpId(0, _), OpId(_, _)) => Ordering::Less, diff --git a/automerge/src/op_tree.rs b/automerge/src/op_tree.rs index a685de88..908522d5 100644 --- a/automerge/src/op_tree.rs +++ b/automerge/src/op_tree.rs @@ -64,7 +64,7 @@ impl OpTreeInternal { self.root_node.as_ref().map(query::Keys::new) } - pub fn keys_at(&self, clock: Clock) -> Option> { + pub(crate) fn keys_at(&self, clock: Clock) -> Option> { self.root_node .as_ref() .map(|root| query::KeysAt::new(root, clock)) @@ -105,7 +105,7 @@ impl OpTreeInternal { } /// Create an iterator through the sequence. - pub fn iter(&self) -> Iter<'_> { + pub(crate) fn iter(&self) -> Iter<'_> { Iter { inner: self, index: 0, @@ -244,7 +244,7 @@ impl OpTreeNode { } } - pub fn len(&self) -> usize { + pub(crate) fn len(&self) -> usize { self.length } @@ -493,7 +493,7 @@ impl OpTreeNode { l } - pub fn remove(&mut self, index: usize) -> Op { + pub(crate) fn remove(&mut self, index: usize) -> Op { let original_len = self.len(); if self.is_leaf() { let v = self.remove_from_leaf(index); @@ -552,7 +552,7 @@ impl OpTreeNode { /// Update the operation at the given index using the provided function. /// /// This handles updating the indices after the update. - pub fn update(&mut self, index: usize, f: F) -> (Op, &Op) + pub(crate) fn update(&mut self, index: usize, f: F) -> (Op, &Op) where F: FnOnce(&mut Op), { @@ -588,7 +588,7 @@ impl OpTreeNode { } } - pub fn last(&self) -> &Op { + pub(crate) fn last(&self) -> &Op { if self.is_leaf() { // node is never empty so this is safe self.elements.last().unwrap() @@ -598,7 +598,7 @@ impl OpTreeNode { } } - pub fn get(&self, index: usize) -> Option<&Op> { + pub(crate) fn get(&self, index: usize) -> Option<&Op> { if self.is_leaf() { return self.elements.get(index); } else { diff --git a/automerge/src/sync.rs b/automerge/src/sync.rs index fb0031aa..85db6cce 100644 --- a/automerge/src/sync.rs +++ b/automerge/src/sync.rs @@ -338,7 +338,7 @@ impl Encodable for &[ChangeHash] { } } -fn decode_hashes(decoder: &mut Decoder) -> Result, decoding::Error> { +fn decode_hashes(decoder: &mut Decoder<'_>) -> Result, decoding::Error> { let length = decoder.read::()?; let mut hashes = Vec::with_capacity(length as usize); diff --git a/automerge/src/transaction/manual_transaction.rs b/automerge/src/transaction/manual_transaction.rs index 2015b263..949ff437 100644 --- a/automerge/src/transaction/manual_transaction.rs +++ b/automerge/src/transaction/manual_transaction.rs @@ -58,7 +58,7 @@ impl<'a> Transaction<'a> { /// i64; /// tx.commit_with::<()>(CommitOptions::default().with_message("Create todos list").with_time(now)); /// ``` - pub fn commit_with(mut self, options: CommitOptions) -> ChangeHash { + pub fn commit_with(mut self, options: CommitOptions<'_, Obs>) -> ChangeHash { self.inner.take().unwrap().commit( self.doc, options.message, @@ -179,15 +179,15 @@ impl<'a> Transactable for Transaction<'a> { .splice(self.doc, obj.as_ref(), pos, del, vals) } - fn keys>(&self, obj: O) -> Keys { + fn keys>(&self, obj: O) -> Keys<'_, '_> { self.doc.keys(obj) } - fn keys_at>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt { + fn keys_at>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt<'_, '_> { self.doc.keys_at(obj, heads) } - fn range, R: RangeBounds>(&self, obj: O, range: R) -> Range { + fn range, R: RangeBounds>(&self, obj: O, range: R) -> Range<'_, R> { self.doc.range(obj, range) } @@ -196,15 +196,15 @@ impl<'a> Transactable for Transaction<'a> { obj: O, range: R, heads: &[ChangeHash], - ) -> RangeAt { + ) -> RangeAt<'_, R> { self.doc.range_at(obj, range, heads) } - fn values>(&self, obj: O) -> Values { + fn values>(&self, obj: O) -> Values<'_> { self.doc.values(obj) } - fn values_at>(&self, obj: O, heads: &[ChangeHash]) -> ValuesAt { + fn values_at>(&self, obj: O, heads: &[ChangeHash]) -> ValuesAt<'_> { self.doc.values_at(obj, heads) } @@ -236,7 +236,7 @@ impl<'a> Transactable for Transaction<'a> { &self, obj: O, prop: P, - ) -> Result, AutomergeError> { + ) -> Result, ExId)>, AutomergeError> { self.doc.get(obj, prop) } @@ -245,7 +245,7 @@ impl<'a> Transactable for Transaction<'a> { obj: O, prop: P, heads: &[ChangeHash], - ) -> Result, AutomergeError> { + ) -> Result, ExId)>, AutomergeError> { self.doc.get_at(obj, prop, heads) } @@ -253,7 +253,7 @@ impl<'a> Transactable for Transaction<'a> { &self, obj: O, prop: P, - ) -> Result, AutomergeError> { + ) -> Result, ExId)>, AutomergeError> { self.doc.get_all(obj, prop) } @@ -262,7 +262,7 @@ impl<'a> Transactable for Transaction<'a> { obj: O, prop: P, heads: &[ChangeHash], - ) -> Result, AutomergeError> { + ) -> Result, ExId)>, AutomergeError> { self.doc.get_all_at(obj, prop, heads) } @@ -270,7 +270,7 @@ impl<'a> Transactable for Transaction<'a> { self.doc.parent_object(obj) } - fn parents(&self, obj: ExId) -> crate::Parents { + fn parents(&self, obj: ExId) -> crate::Parents<'_> { self.doc.parents(obj) } } diff --git a/automerge/src/transaction/transactable.rs b/automerge/src/transaction/transactable.rs index 3df7784c..44a3e53b 100644 --- a/automerge/src/transaction/transactable.rs +++ b/automerge/src/transaction/transactable.rs @@ -97,23 +97,23 @@ pub trait Transactable { } /// Get the keys of the given object, it should be a map. - fn keys>(&self, obj: O) -> Keys; + fn keys>(&self, obj: O) -> Keys<'_, '_>; /// Get the keys of the given object at a point in history. - fn keys_at>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt; + fn keys_at>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt<'_, '_>; - fn range, R: RangeBounds>(&self, obj: O, range: R) -> Range; + fn range, R: RangeBounds>(&self, obj: O, range: R) -> Range<'_, R>; fn range_at, R: RangeBounds>( &self, obj: O, range: R, heads: &[ChangeHash], - ) -> RangeAt; + ) -> RangeAt<'_, R>; - fn values>(&self, obj: O) -> Values; + fn values>(&self, obj: O) -> Values<'_>; - fn values_at>(&self, obj: O, heads: &[ChangeHash]) -> ValuesAt; + fn values_at>(&self, obj: O, heads: &[ChangeHash]) -> ValuesAt<'_>; /// Get the length of the given object. fn length>(&self, obj: O) -> usize; @@ -139,7 +139,7 @@ pub trait Transactable { &self, obj: O, prop: P, - ) -> Result, AutomergeError>; + ) -> Result, ExId)>, AutomergeError>; /// Get the value at this prop in the object at a point in history. fn get_at, P: Into>( @@ -147,26 +147,26 @@ pub trait Transactable { obj: O, prop: P, heads: &[ChangeHash], - ) -> Result, AutomergeError>; + ) -> Result, ExId)>, AutomergeError>; fn get_all, P: Into>( &self, obj: O, prop: P, - ) -> Result, AutomergeError>; + ) -> Result, ExId)>, AutomergeError>; fn get_all_at, P: Into>( &self, obj: O, prop: P, heads: &[ChangeHash], - ) -> Result, AutomergeError>; + ) -> Result, ExId)>, AutomergeError>; /// Get the object id of the object that contains this object and the prop that this object is /// at in that object. fn parent_object>(&self, obj: O) -> Option<(ExId, Prop)>; - fn parents(&self, obj: ExId) -> Parents; + fn parents(&self, obj: ExId) -> Parents<'_>; fn path_to_object>(&self, obj: O) -> Vec<(ExId, Prop)> { let mut path = self.parents(obj.as_ref().clone()).collect::>(); From 9788cd881dee9738b517434e5d062ad19b5739c1 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Sat, 23 Apr 2022 11:14:07 +0100 Subject: [PATCH 312/730] Add debug impls --- automerge/src/keys.rs | 1 + automerge/src/keys_at.rs | 1 + automerge/src/options.rs | 2 +- automerge/src/parents.rs | 1 + automerge/src/range.rs | 1 + automerge/src/range_at.rs | 1 + automerge/src/transaction/commit.rs | 2 +- automerge/src/values.rs | 1 + automerge/src/values_at.rs | 1 + 9 files changed, 9 insertions(+), 2 deletions(-) diff --git a/automerge/src/keys.rs b/automerge/src/keys.rs index 109586c7..f8e0c676 100644 --- a/automerge/src/keys.rs +++ b/automerge/src/keys.rs @@ -1,5 +1,6 @@ use crate::{query, Automerge}; +#[derive(Debug)] pub struct Keys<'a, 'k> { keys: Option>, doc: &'a Automerge, diff --git a/automerge/src/keys_at.rs b/automerge/src/keys_at.rs index 0d0beb35..c957e175 100644 --- a/automerge/src/keys_at.rs +++ b/automerge/src/keys_at.rs @@ -1,5 +1,6 @@ use crate::{query, Automerge}; +#[derive(Debug)] pub struct KeysAt<'a, 'k> { keys: Option>, doc: &'a Automerge, diff --git a/automerge/src/options.rs b/automerge/src/options.rs index 622985e3..e0fd991f 100644 --- a/automerge/src/options.rs +++ b/automerge/src/options.rs @@ -1,4 +1,4 @@ -#[derive(Default)] +#[derive(Debug, Default)] pub struct ApplyOptions<'a, Obs> { pub op_observer: Option<&'a mut Obs>, } diff --git a/automerge/src/parents.rs b/automerge/src/parents.rs index 6bc16b5c..a6c891bd 100644 --- a/automerge/src/parents.rs +++ b/automerge/src/parents.rs @@ -1,5 +1,6 @@ use crate::{exid::ExId, Automerge, Prop}; +#[derive(Debug)] pub struct Parents<'a> { pub(crate) obj: ExId, pub(crate) doc: &'a Automerge, diff --git a/automerge/src/range.rs b/automerge/src/range.rs index b0eff5ad..0bfca1ea 100644 --- a/automerge/src/range.rs +++ b/automerge/src/range.rs @@ -3,6 +3,7 @@ use std::ops::RangeBounds; use crate::{query, Automerge}; +#[derive(Debug)] pub struct Range<'a, R: RangeBounds> { range: Option>, doc: &'a Automerge, diff --git a/automerge/src/range_at.rs b/automerge/src/range_at.rs index 7862b4fb..321972a8 100644 --- a/automerge/src/range_at.rs +++ b/automerge/src/range_at.rs @@ -3,6 +3,7 @@ use std::ops::RangeBounds; use crate::{query, Automerge}; +#[derive(Debug)] pub struct RangeAt<'a, R: RangeBounds> { range: Option>, doc: &'a Automerge, diff --git a/automerge/src/transaction/commit.rs b/automerge/src/transaction/commit.rs index 2aaac57c..f9e6f3c2 100644 --- a/automerge/src/transaction/commit.rs +++ b/automerge/src/transaction/commit.rs @@ -1,5 +1,5 @@ /// Optional metadata for a commit. -#[derive(Default)] +#[derive(Debug, Default)] pub struct CommitOptions<'a, Obs> { pub message: Option, pub time: Option, diff --git a/automerge/src/values.rs b/automerge/src/values.rs index de195421..7601dde1 100644 --- a/automerge/src/values.rs +++ b/automerge/src/values.rs @@ -3,6 +3,7 @@ use std::ops::RangeFull; use crate::{query, Automerge}; +#[derive(Debug)] pub struct Values<'a> { range: Option>, doc: &'a Automerge, diff --git a/automerge/src/values_at.rs b/automerge/src/values_at.rs index 438155d1..77a52c96 100644 --- a/automerge/src/values_at.rs +++ b/automerge/src/values_at.rs @@ -3,6 +3,7 @@ use std::ops::RangeFull; use crate::{query, Automerge}; +#[derive(Debug)] pub struct ValuesAt<'a> { range: Option>, doc: &'a Automerge, From 67da930a40f5b9a0b2982876fa8a2adbd4e64c73 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Sat, 23 Apr 2022 11:15:15 +0100 Subject: [PATCH 313/730] Add missing lints --- automerge/src/columnar.rs | 5 ++++- automerge/src/op_observer.rs | 3 ++- automerge/src/op_set.rs | 8 ++++++-- 3 files changed, 12 insertions(+), 4 deletions(-) diff --git a/automerge/src/columnar.rs b/automerge/src/columnar.rs index 3613e31f..8744ee77 100644 --- a/automerge/src/columnar.rs +++ b/automerge/src/columnar.rs @@ -1121,7 +1121,10 @@ pub(crate) struct ColumnEncoder { } impl ColumnEncoder { - pub(crate) fn encode_ops<'a, I>(ops: I, actors: &[ActorId]) -> (Vec, HashMap>) + pub(crate) fn encode_ops<'a, I>( + ops: I, + actors: &[ActorId], + ) -> (Vec, HashMap>) where I: IntoIterator, { diff --git a/automerge/src/op_observer.rs b/automerge/src/op_observer.rs index 4a3092f8..96139bab 100644 --- a/automerge/src/op_observer.rs +++ b/automerge/src/op_observer.rs @@ -39,7 +39,8 @@ pub trait OpObserver { impl OpObserver for () { fn insert(&mut self, _objid: ExId, _index: usize, _tagged_value: (Value<'_>, ExId)) {} - fn put(&mut self, _objid: ExId, _key: Prop, _tagged_value: (Value<'_>, ExId), _conflict: bool) {} + fn put(&mut self, _objid: ExId, _key: Prop, _tagged_value: (Value<'_>, ExId), _conflict: bool) { + } fn increment(&mut self, _objid: ExId, _key: Prop, _tagged_value: (i64, ExId)) {} diff --git a/automerge/src/op_set.rs b/automerge/src/op_set.rs index 236f318a..7928fce9 100644 --- a/automerge/src/op_set.rs +++ b/automerge/src/op_set.rs @@ -77,7 +77,11 @@ impl OpSetInternal { } } - pub(crate) fn range>(&self, obj: ObjId, range: R) -> Option> { + pub(crate) fn range>( + &self, + obj: ObjId, + range: R, + ) -> Option> { if let Some(tree) = self.trees.get(&obj) { tree.internal.range(range, &self.m) } else { @@ -244,7 +248,7 @@ impl OpSetInternal { } #[cfg(feature = "optree-visualisation")] - pub fn visualise(&self) -> String { + pub(crate) fn visualise(&self) -> String { let mut out = Vec::new(); let graph = super::visualisation::GraphVisualisation::construct(&self.trees, &self.m); dot::render(&graph, &mut out).unwrap(); From ec446f4839a18757603f5042cee7dc16deee3c4e Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Sat, 23 Apr 2022 11:28:36 +0100 Subject: [PATCH 314/730] Add favicon --- automerge-wasm/src/lib.rs | 3 ++- automerge/src/lib.rs | 3 ++- img/favicon.ico | Bin 0 -> 260098 bytes 3 files changed, 4 insertions(+), 2 deletions(-) create mode 100644 img/favicon.ico diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 9eceece8..4429c0c8 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -1,5 +1,6 @@ #![doc( - html_logo_url = "https://raw.githubusercontent.com/automerge/automerge-rs/main/img/brandmark.svg" + html_logo_url = "https://raw.githubusercontent.com/automerge/automerge-rs/main/img/brandmark.svg", + html_favicon_url = "https:///raw.githubusercontent.com/automerge/automerge-rs/main/img/favicon.ico" )] #![warn( missing_debug_implementations, diff --git a/automerge/src/lib.rs b/automerge/src/lib.rs index b2d84d50..bb39de25 100644 --- a/automerge/src/lib.rs +++ b/automerge/src/lib.rs @@ -1,5 +1,6 @@ #![doc( - html_logo_url = "https://raw.githubusercontent.com/automerge/automerge-rs/main/img/brandmark.svg" + html_logo_url = "https://raw.githubusercontent.com/automerge/automerge-rs/main/img/brandmark.svg", + html_favicon_url = "https:///raw.githubusercontent.com/automerge/automerge-rs/main/img/favicon.ico" )] #![warn( missing_debug_implementations, diff --git a/img/favicon.ico b/img/favicon.ico new file mode 100644 index 0000000000000000000000000000000000000000..90486824c230836fff38cc096efce554b46b73d6 GIT binary patch literal 260098 zcmeI5d$c4)eaDC3Ih><85Dq!!B!6&c?tAaOGxy%P%lk1XNP-f;2N2<)2!12rLqo_bXvRd2W`ST^5#o0FfgebAoXajYN+cc?pI`rK)>s`Y~PA-P4ch zs`@Uc>AC&u>aO3nexI)D>h7viX$Sh>TW^)<|I4Lcy|H8I{iRZAk}h;p>32KW+v(5J zDL>k=GS$isB~B3*vN*7aw|Lv(%Wo?FOnW+(wKp@5zW z9DO(0k@9>l<@sg05Xr*q=adNf13i_1S5QD-1@?NH?A)ohWsG}lY=4$cUsd246wpfn z#w)F-H%sbq597Z5wpX7gyXvI~ynzDxDUgtJehmQc`h^BO(nx{Y_sRQc|Jw#%JoE3!m)y^tE)g9&gbs@0U0^qC{o^is%IJA z>n}*{rY&Kdb31rfH)N1=1r>l&Qy>oKtRJpCGAF%1AKCYCGI7E=WbDx6rTba!`4#A|(=MRkP$3ma zmGhx#GX3{Ak_Gyloxd+%UA2*vCzcAC2k(Up3uH<&1>$fX+TYGNUw&Jcan3qh*KU25 zRL1v^^N#*`&7uqL6h(o&IOoiR_o9?M@?nw!d2u60?>BEp|GXI?3NKSoKvmAcdkTe*Ll~og>YRgjV?q^70``y z@UDHx!ofZi(3NxW-bXMXiM9&p&Uu&j)!WcNuWb~=u`mTv=lsGu`udKA^O*H|7QBar z1ne3rU>fKAI>LCbj4xn(8x3O`F7+y4D(B$6Hx{r(UjvZHoH=k1H{NlT`=x^s7 zM_t#zyKZP`f+;t`FcmU(UO{ui8W^=&w^39K~`D&IiLe zcvlP*#k0#er!lvl$DM2)I*%gy+xvdj5T-4@k5e490cSr=&aGI)bG5!%XV+NpK42g; zJ$>q&UykSJvNlmv8+g|gxB#s``X|s|Hy}22IxRRaHs}000N!5Sl9EzZHaPW0vH3piJkbMT%YBRZTZbE$3jD^olWDM#k%RNgLkbEj^vDqoP%|zeS`kGNSG+Ulc}78b%%AUe!qSL z`sbCWS?qwxoP%{Q>)>6?iRu>9IS1>#tb=#esqW1`IJe^*eRJTwmkxclfO99l!MdXb ztJB>}^|}`B-xbC_ZO&)D`CU(ZxMM|y-(cO#x4+E5yPB|r^WL}y>%E$QcQyIuxd!LK zF%8y(b?M>#sf^6x^dF|F`$@KEu689B=_2{fK z_N%Dh1*8pjbLyOb>wAgW^4Asp{9U{4IZ_$lgOrD+$@DkwO_V3F9*=eKK3JxM&aOS@ zopB`CxC+%PAL*NSupXav@E%0mV78gUd3W5H;aO{Lqkh^5V7))q!TVs?cAr&S&guQR z)ovGz3so7b6V|qm%R5-_pLM47%vFz}f8GsXPQOj%oY(Ke)BBRS`(BL2J6KP~I(W|s zZr5qj=lsD~%*3rvqwYVudK)QEEEPW2<(%{N@cakJ{39<#&O2C7&N_H^ftc}6T{&mZ z)nx5<&bNO(vfp2ldh6v#yo2?0SO@PJLrxE`E9bMfJkuBFocqzkK1J%A-wfp)tf$91 z)<_o(80N!b6&qsE?W-Pec!4w9@ojk}&Y{~v|1je0#h?!kLBo<)15&iTrM^tW60 z=jn59T^GtTqP0rbO$f9~K>-#v_@n`RTA=a~W&T-WB@NA>!**jRzo^|jp=Gt#D zfpdNhu;wJFh}(Qoxfbm=df>;%!V|A|>vyo859=21^c$tJq-;&^uQQ2r&inMG|CCu< zo?QdRk3N+wY!<*MRjLp1tw%bg$Vr$f(~P z&Z#!|`-1i8tn17>IFAiR%DKAoAuamn!Fhk>&+B!zu0Qn#8U5ffh36ft z_g~hk^PW2A*Q`_e`MI0lxWDHg`vqa+#t!m+JnYiH%6^89 zmsaHOM-DuyaJ+-{bjzRd&c?J5^^**0yo=O1zxHus`{9E4_HUcL z@81cV!%-9mgV+w%(`B8NdC_tZRPnUL@YZ`CvHb>`xqjR$+KY zy`Ehx47z>?=h>AlXE|(L-}nj{S$b$8cn9m*#>32|_mMIk|6g=}meu_`IM23R!x(ql zt+|{F@Qn;u&psyd<{g}8U#4!Bqx_ycWl?y&7u5r4ZwKr7#6@1bPrpF^kjCD|DDf9M zJ{!|aKJ1_zl*RmtB(hpT6K$X_>R}t|A=&QM%6yVf#&;b+zzZqX&H=%6YI) zg8JNDK7;rc4y+HBbv8%m$On%WGB~+xc@(~WLgVNqcPs_{o?LdJ@I2$koEGIg*e4~{ z=O6kR{U)*|ERQVed9XfU)@kfaoYEAQ`S`Ja6NRr=Dce5`;@iDVHs2^4KZ$*d>E0i| zvyBZ_wplPOIwn}pwmhToGWMb4gk?VY*{?<6??35z!td8_+$=l~ix2Buow>lHGM;kkoI{mD0?aL3-EiS~T`_J0kXn`rV2UkC3g z-Is^O#nkyXNw2;A*5N_v!1`cVk22pgd%rC#R;c|o`mXGsbl#^qGs6DX+)J`+42u)+ zo=;rNKk^bO=kyI6SkI?S-3*3t&c1=2x#XTGoRg*7$Rgw1`bXb~r|tQB-RBbhu3p!Y zzbDIhM|(a}8HhBFx_$KEV{@t7!Fs-|hqc?;HGX>e?NMWF-9hWgdpPQB9p^hY@Sbnk zi`bv~>J4B$r+X$*8Kk{EWzM%(pC`MzIp@D=zTZoi$D`yQybqrDS=xsZbbp=pHq`6+ zly%CSKey)=vKwbRXbs-;Dc301)3AN&Tizz+iKUtI4%V}e3G2IM**jA4+U=p7bM`yD zuMdkgW=H*=eH_TjVeIge!Mft@_(5^bd8gmNy^qEicrSPyO`dugSTA@P$*$Fd<{Z2a zwyu`NhTk@G@!eouQPzvXIe1rt_dMR0#r=Bvu~0P5!F&2;?5~%T{%yTeBY9UkjXrs> zo^Bizm2>c(ZaF7q+n>I^={27s5opbPBVBYEYV6r(wP_GxK>{Q_#ycZ{C{B?WQ z&)Ct1PhR$_aSq;-m${!)_B|pSM{ePX*Q37Q+_PV$x__qa_#~{exhzo-(B|j z=GRNbzg6WNyr;uEn`eFeqkrvun@q>nA3Nd{GJWA4Wd70ri52rjV}0(ypOWe3YBKhb z&v^WsIOcT1Y%22FZ=O}>9K0vvosH>T8LNxyV){NL`-W}mE7y~`H9rnzW8kdQK7-ji zo+nGrSV>0qKT_-$eO^$R+>6ZK{{2v&`emy~>2%>7y!XfZ{QCbTaoJ>T=q0DEAhWkU zM;0ho{&)gi)-UHEfyo-tB&P>FJoBhhfo@D%!XHf1IQah&m`206(caI%w6hPd1w7ooFS9ftnD9GKdNRog>!yA0q^p5{q%X?0qe9a$L1we zjb&MWP2(KA%Q+6241?#BKgReB;J(&0E)%4k;L3MkTK6@!r`@7$~fb(F^`Bv~A%=^UgXBol! zF zJ9Tg_BDcTo;N8pnk~6=pMO%K!8CNRTR(JOqgY*7)4od~z9p2e|jCswgo>d!R^uUji zg{S{dS=PaM7_rIh;N9VU?*9KEl~KtURSEB-vtryjE*&>-?%sa`=XQUjG5zgv=?Cu) z@6(GnE7_KxI`0N>4$dQ?58gYx)9+)(j{2NZypMk9I8xvGwvw!)-5v?g)OP~!Ht%fg z=*raIO7PCc#hrhk4bHa{wA*7|5x~36`^<{_m0+EXYwzyIR^>P9^O#^}v^#jWd1vEC zX4;>Yu&;0I@RP{`z2{JcZ=O$Zo)Nfx9u~Y4YrLkoW3cCSZD3=}T60t>_8u%a?-R(3 zlYw_aYBxMt$nRrR9OoUJXAEq@!!zEOZX+9b`%AXg^?O)0&T?M$b(Hgrf8tAMw{Ic4 zB@AsO4^Y5#-s_uwM~3%4tPs4j@ucS0{Yoj;*_<_H`kub8I|2Q`%Rp9E;v^U8KrY;H1G7g zl92;HIuPEi@uXTWE5kb4?Mh?b&AiFH&)xSTsf^6$%R9ZFt*zPw&btwaA9*TZGVkm= zlQ?bfr29^O{vvRmXK>?OJqpC(o%JbcGd}IscgWabpU#E%kv;#MEIjeL60D=$-h+r% z8%*VW?xCL&HV=E!I$N5ajs-h=%QN6yD`M1k##G*CF1aUD)|Z^I5}d1zhL*YC>uItx zeWz}-?7cc})2Tf_e#~i^@IJcV5rmC#%QY+N8try1aSnCLMBdq)h{M$b(&c@4mZ&hE z6gUq>LhBt&$x>Q-8I(HT7bFC@S$2k*upZL^QQsG^N@uX0n>%(dqo|wkFH5cT< zA&Gcr<4Mgu@RLH<>EJvKI`lh+X}r(gyq%Qkb2903G|7AO)5~uM=lWq_j7O&NzJ$(K z-4E+yho4mF@uX0n8-r{jM~&ir^Y6*X0Y}B+z3Agff%8P+Z(L%Nc%QxJhonrOn?Xmv?ZkJu*r=2;S4-9h@tTfial}@5y)v=fOF6SMb8;gdPj)V7L-od$+;cmQB z;N8nRI5!>%tpxz@Ht*nE>oA9N9R*D19h?INlN2zOcW`b}l*3o9fN8vgb6)aTo2Y;( zyn}NSqZ>Z^70{h`aPBX0{B5!Vy7CUrO^#~#E>a*q?^Vw@?*2I(Ch2_33v|49dfb65 z;9OMF*bW7{^G!q=&&vKju9&<>;{1YJqh5nBzZ&4sQUTTLcW|y{Si-4{6zK52B})6d zNcB57&nO^pSgjRMjdyUabvVMgj1^FYcW|C@AmH#uC{T3X!MPD(2q$w=ps2iq^PBEaYnz^gzk-mUo}C3W@g zQ*y?copFxuQoV(Mzo0-)3gpH+IM1m>aT+KfRv>fU!MRv`SfD`83d}u6{)jT4+V}6> z@vXpl&LxS{L4j}uV)1VE&)4Ml=;ayow};D)J)poq6o|w7?CNb~WRF7wc^{tt0I6NO z9?#VVQi?b!6o{rk9NrhUyhW;4uO*XT_&S+5?o2Yd>_RfL@J4xnO>XQ31qP}> zEZ+McV;bYV4z!eUawuRckTLJz9DxP}3Zwwzz5Wc@O8e-ew#lRH2hIyr#>fg3;0mP7 zJ2>aUVhsffs6a}*gYyEGEwTg!{0b!G9i00G#oth%zzS5KC%e*n_jNJv-`BBp1$GNr zg91<>K!JnSllRd6x(g}quZJ+t>=&^80}g{tPyh-jqQLyK~EKLlG@)a?I^YXwzPxP-Y%7- z_ExFX^z7iv-(soMklCq_LlXPD)b16)d;GVl-7A3C{wB411@PK`OYL3(y!JM=mn8XX zF?-X!gE##mvp2+cX7&o;NzdH(3E;6a_k99*?96?i03JJY-zR{_-nQ%|Vg6gTz3JM) zlYY^*H$-;Z>=D47-ge(BfZJ}n?-jspx83&&;I`ZDdj)XY+nxNC1o>}qd(+v$oqmzq z8^LyNb_?K2&)xS3;Ieb~Jp#Dw+I|6jm zyWMvQ(6zhWcL~t7yWMvQ(6zhWcL~t7w>|kQ_2j?hwKqFEbki?-?G3-(Ywilb(|g@_ z1mJeB`;GwI?seZ0fZM(9I|6Wf+n2wRFaIsSy=m{j(=Yn%4X@pA<^pum``zyf(6Rg7 z?+Vbd``zyf(6Rg7?+Vbdw|nwe^5nnOYj0XRbkZ;O+8b_ruel?Doxaz7E`V+Cb)O4h z+k4&T0@(Im_qhPJy&aIhk}LnMAbXSTV5eUUvNs%ikl7Z%N+0BYM*zzn?oBKIu;s9of~B>=UH+_wavc9Hv*0MyV zbD!P|v}Nwodx5sN^x5%uX^Tsr*>`Ho+@}Jx#ih^E@6c|umH1z~CAKlkiHmZ3Im|9k z4r`cxVfL0>!V}waduaTG#a~$bhQ)svKcW0d1)%&&1)%)P0AkkqiBNKpKzhnZi z>z_;jcKwtI&|r3n08M6>2tbYD^@msh+7(07iv`%pvWo?vT|qRxSOB~J6bsO)e}w`x z?c)mtpyy`i7exT;Nn&=90IVm9*+l|S_bt0f0M--6(u)Kb;`YT-;J*g9w*t*pN0xQ` zcCei$x6dCefYp&^r4JUs>WH)K!2&S%?c)avV08pq>4OCr>ZT8lfQDla6rkzY0|j7a zJAZ)@VD}^1`3n-j?uWGPK>}Frca9$;k{C)&F?sx5e z0Xpt??S26|?sx5e0Xpt??S27%PnG7NG0C%kCDS>%PnG7NG0C z%kCCnC?J1s0UAMemjKNmyGwws*?oRT0LOi|-4VcX-)(mUaNKv>9RVEo-F8QSp}_n( z0yKi{T>+ZG_O1Yq*>ip_fXjW4oeSV{-(%+jxZL;Hxd1NrJ$5d@kRX3tfQHE45uhou zcLZ>mz2~?rGAz}V(0UBbvB|uYbw*+vTedlKa zc-;5dnE)R5eRd{*$9-{VXF@!P(~pMHDT^W(7R&tcE6 M!=8VWp6BQP56Gi@%>V!Z literal 0 HcmV?d00001 From adf8a5db1289cafc5ca034fc31b0c98c7996268c Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Sat, 23 Apr 2022 11:32:01 +0100 Subject: [PATCH 315/730] Don't document edit-trace bin --- edit-trace/Cargo.toml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/edit-trace/Cargo.toml b/edit-trace/Cargo.toml index 7514e626..ce54bed5 100644 --- a/edit-trace/Cargo.toml +++ b/edit-trace/Cargo.toml @@ -12,6 +12,10 @@ criterion = "0.3.5" json = "0.12.4" rand = "^0.8" +[[bin]] +name = "edit-trace" +doc = false + [[bench]] name = "main" harness = false From 37e29e447303144f389231734b05c484f0c8abc3 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Sat, 23 Apr 2022 11:41:39 +0100 Subject: [PATCH 316/730] Remove docs cache The docs aren't built with deps so it should be relatively quick to do without a cache. The cache is also messing with keeping things from previous versions (e.g. edit-trace). --- .github/workflows/docs.yaml | 3 --- 1 file changed, 3 deletions(-) diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml index 6d107184..f8a3e378 100644 --- a/.github/workflows/docs.yaml +++ b/.github/workflows/docs.yaml @@ -19,9 +19,6 @@ jobs: toolchain: stable override: true - - name: Cache - uses: Swatinem/rust-cache@v1 - - name: Build docs uses: actions-rs/cargo@v1 with: From 8e6306b54671971834814dfa52073747ab40ee3d Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Sat, 23 Apr 2022 11:44:12 +0100 Subject: [PATCH 317/730] Re-add caching and just clean docs dir from cache --- .github/workflows/docs.yaml | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml index f8a3e378..1e928e6e 100644 --- a/.github/workflows/docs.yaml +++ b/.github/workflows/docs.yaml @@ -19,6 +19,15 @@ jobs: toolchain: stable override: true + - name: Cache + uses: Swatinem/rust-cache@v1 + + - name: Clean docs dir + uses: actions-rs/cargo@v1 + with: + command: clean + args: --doc + - name: Build docs uses: actions-rs/cargo@v1 with: From 1f86a92ca13dd1c0cebbef9d08c6f640e1be3dab Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Mon, 25 Apr 2022 12:47:07 -0400 Subject: [PATCH 318/730] typo --- automerge-wasm/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automerge-wasm/README.md b/automerge-wasm/README.md index 31670278..0e37fcf7 100644 --- a/automerge-wasm/README.md +++ b/automerge-wasm/README.md @@ -167,7 +167,7 @@ Maps are key/value stores. The root object is always a map. The keys are alway ### Lists -Lists are index addressable sets of values. These values can be any scalar or object type. You can manipulate lists with `insert()`, `put()`, `insertObject()`, `pubObject()`, `push()`, `pushObject()`, `splice()`, and `delete()`. +Lists are index addressable sets of values. These values can be any scalar or object type. You can manipulate lists with `insert()`, `put()`, `insertObject()`, `putObject()`, `push()`, `pushObject()`, `splice()`, and `delete()`. ```javascript let doc = create() From ca8a2a07628479b60e2c95be97dc37a6755c60c7 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 28 Apr 2022 14:13:36 +0100 Subject: [PATCH 319/730] Add cmake deps to nix flake --- flake.nix | 3 +++ 1 file changed, 3 insertions(+) diff --git a/flake.nix b/flake.nix index ea17d00b..cc1b420f 100644 --- a/flake.nix +++ b/flake.nix @@ -52,6 +52,9 @@ nodejs yarn + cmake + cmocka + rnix-lsp nixpkgs-fmt ]; From a388ffbf195dae6e32dd59af8036586d14c0bd2e Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 22 Apr 2022 15:53:41 +0100 Subject: [PATCH 320/730] Add some benches --- automerge/Cargo.toml | 9 ++++ automerge/benches/map.rs | 84 ++++++++++++++++++++++++++++++++++++++ automerge/benches/range.rs | 52 +++++++++++++++++++++++ 3 files changed, 145 insertions(+) create mode 100644 automerge/benches/map.rs create mode 100644 automerge/benches/range.rs diff --git a/automerge/Cargo.toml b/automerge/Cargo.toml index ae95fa4e..e3f1ec23 100644 --- a/automerge/Cargo.toml +++ b/automerge/Cargo.toml @@ -43,3 +43,12 @@ proptest = { version = "^1.0.0", default-features = false, features = ["std"] } serde_json = { version = "^1.0.73", features=["float_roundtrip"], default-features=true } maplit = { version = "^1.0" } decorum = "0.3.1" +criterion = "0.3.5" + +[[bench]] +name = "range" +harness = false + +[[bench]] +name = "map" +harness = false diff --git a/automerge/benches/map.rs b/automerge/benches/map.rs new file mode 100644 index 00000000..cc7c6426 --- /dev/null +++ b/automerge/benches/map.rs @@ -0,0 +1,84 @@ +use automerge::{transaction::Transactable, Automerge, ScalarValue, ROOT}; +use criterion::{black_box, criterion_group, criterion_main, Criterion}; + +fn repeated_increment(n: u64) -> Automerge { + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + tx.put(ROOT, "counter", ScalarValue::counter(0)).unwrap(); + for _ in 0..n { + tx.increment(ROOT, "counter", 1).unwrap(); + } + tx.commit(); + doc +} + +fn repeated_put(n: u64) -> Automerge { + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + for i in 0..n { + tx.put(ROOT, "0", i).unwrap(); + } + tx.commit(); + doc +} + +fn increasing_put(n: u64) -> Automerge { + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + for i in 0..n { + tx.put(ROOT, i.to_string(), i).unwrap(); + } + tx.commit(); + doc +} + +fn decreasing_put(n: u64) -> Automerge { + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + for i in (0..n).rev() { + tx.put(ROOT, i.to_string(), i).unwrap(); + } + tx.commit(); + doc +} + +fn criterion_benchmark(c: &mut Criterion) { + let small = 1_000; + + c.bench_function(&format!("repeated increment {}", small), |b| { + b.iter(|| repeated_increment(black_box(small))) + }); + + c.bench_function(&format!("repeated put {}", small), |b| { + b.iter(|| repeated_put(black_box(small))) + }); + + c.bench_function(&format!("increasing put {}", small), |b| { + b.iter(|| increasing_put(black_box(small))) + }); + + c.bench_function(&format!("decreasing put {}", small), |b| { + b.iter(|| decreasing_put(black_box(small))) + }); + + let large = 10_000; + + c.bench_function(&format!("repeated increment {}", large), |b| { + b.iter(|| repeated_increment(black_box(large))) + }); + + c.bench_function(&format!("repeated put {}", large), |b| { + b.iter(|| repeated_put(black_box(large))) + }); + + c.bench_function(&format!("increasing put {}", large), |b| { + b.iter(|| increasing_put(black_box(large))) + }); + + c.bench_function(&format!("decreasing put {}", large), |b| { + b.iter(|| decreasing_put(black_box(large))) + }); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); diff --git a/automerge/benches/range.rs b/automerge/benches/range.rs new file mode 100644 index 00000000..aec5c293 --- /dev/null +++ b/automerge/benches/range.rs @@ -0,0 +1,52 @@ +use automerge::{transaction::Transactable, Automerge, ROOT}; +use criterion::{black_box, criterion_group, criterion_main, Criterion}; + +fn doc(n: u64) -> Automerge { + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + for i in 0..n { + tx.put(ROOT, i.to_string(), i.to_string()).unwrap(); + } + tx.commit(); + doc +} + +fn range(doc: &Automerge) { + let range = doc.values(ROOT); + range.for_each(drop); +} + +fn range_rev(doc: &Automerge) { + let range = doc.values(ROOT).rev(); + range.for_each(drop); +} + +fn range_at(doc: &Automerge) { + let range = doc.values_at(ROOT, &doc.get_heads()); + range.for_each(drop); +} + +fn range_at_rev(doc: &Automerge) { + let range = doc.values_at(ROOT, &doc.get_heads()).rev(); + range.for_each(drop); +} + +fn criterion_benchmark(c: &mut Criterion) { + let n = 100_000; + let doc = doc(n); + c.bench_function(&format!("range {}", n), |b| { + b.iter(|| range(black_box(&doc))) + }); + c.bench_function(&format!("range rev {}", n), |b| { + b.iter(|| range_rev(black_box(&doc))) + }); + c.bench_function(&format!("range_at {}", n), |b| { + b.iter(|| range_at(black_box(&doc))) + }); + c.bench_function(&format!("range_at rev {}", n), |b| { + b.iter(|| range_at_rev(black_box(&doc))) + }); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); From bdacaa1703cab89da398ac90df2f5ae508a6496f Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 22 Apr 2022 15:59:22 +0100 Subject: [PATCH 321/730] Use treequery rather than repeated gets --- automerge/src/query/prop.rs | 36 ++++++++++++++++++++++++------------ 1 file changed, 24 insertions(+), 12 deletions(-) diff --git a/automerge/src/query/prop.rs b/automerge/src/query/prop.rs index 7fcb8559..3325f12f 100644 --- a/automerge/src/query/prop.rs +++ b/automerge/src/query/prop.rs @@ -9,6 +9,7 @@ pub(crate) struct Prop<'a> { pub(crate) ops: Vec<&'a Op>, pub(crate) ops_pos: Vec, pub(crate) pos: usize, + start: Option, } impl<'a> Prop<'a> { @@ -18,6 +19,7 @@ impl<'a> Prop<'a> { ops: vec![], ops_pos: vec![], pos: 0, + start: None, } } } @@ -28,19 +30,29 @@ impl<'a> TreeQuery<'a> for Prop<'a> { child: &'a OpTreeNode, m: &OpSetMetadata, ) -> QueryResult { - let start = binary_search_by(child, |op| m.key_cmp(&op.key, &self.key)); - self.pos = start; - for pos in start..child.len() { - let op = child.get(pos).unwrap(); - if op.key != self.key { - break; - } - if op.visible() { - self.ops.push(op); - self.ops_pos.push(pos); - } + // in the root node find the first op position for the key + if self.start.is_none() { + let start = binary_search_by(child, |op| m.key_cmp(&op.key, &self.key)); + self.start = Some(start); + }; + QueryResult::Descend + } + + fn query_element(&mut self, op: &'a Op) -> QueryResult { + // skip to our start + if self.pos < self.start.unwrap() { self.pos += 1; + return QueryResult::Next; } - QueryResult::Finish + // don't bother looking at things past our key + if op.key != self.key { + return QueryResult::Finish; + } + if op.visible() { + self.ops.push(op); + self.ops_pos.push(self.pos); + } + self.pos += 1; + QueryResult::Next } } From bb4727ac349b22fbcbbfaa260f536ba4f6f11114 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 22 Apr 2022 16:14:51 +0100 Subject: [PATCH 322/730] Skip empty nodes in prop query --- automerge/src/query/prop.rs | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/automerge/src/query/prop.rs b/automerge/src/query/prop.rs index 3325f12f..87084b58 100644 --- a/automerge/src/query/prop.rs +++ b/automerge/src/query/prop.rs @@ -34,8 +34,16 @@ impl<'a> TreeQuery<'a> for Prop<'a> { if self.start.is_none() { let start = binary_search_by(child, |op| m.key_cmp(&op.key, &self.key)); self.start = Some(start); - }; - QueryResult::Descend + QueryResult::Descend + } else { + // skip empty nodes + if child.index.visible_len() == 0 { + self.pos += child.len(); + QueryResult::Next + } else { + QueryResult::Descend + } + } } fn query_element(&mut self, op: &'a Op) -> QueryResult { From 7dfe311aae8b1730d80ac768a271a484893bb117 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 22 Apr 2022 16:17:18 +0100 Subject: [PATCH 323/730] Store keys as well as elemids in visible index --- automerge/src/query.rs | 43 ++++++++++------------- automerge/src/query/elem_id_pos.rs | 7 ++-- automerge/src/query/insert.rs | 24 ++++++------- automerge/src/query/nth.rs | 16 +++++---- automerge/src/query/seek_op_with_patch.rs | 14 ++++---- 5 files changed, 52 insertions(+), 52 deletions(-) diff --git a/automerge/src/query.rs b/automerge/src/query.rs index 06225885..fc21d4fd 100644 --- a/automerge/src/query.rs +++ b/automerge/src/query.rs @@ -1,5 +1,5 @@ use crate::op_tree::{OpSetMetadata, OpTreeNode}; -use crate::types::{Clock, Counter, ElemId, Op, OpId, OpType, ScalarValue}; +use crate::types::{Clock, Counter, Key, Op, OpId, OpType, ScalarValue}; use fxhash::FxBuildHasher; use std::cmp::Ordering; use std::collections::{HashMap, HashSet}; @@ -83,7 +83,7 @@ pub(crate) enum QueryResult { #[derive(Clone, Debug, PartialEq)] pub(crate) struct Index { /// The map of visible elements to the number of operations targetting them. - pub(crate) visible: HashMap, + pub(crate) visible: HashMap, /// Set of opids found in this node and below. pub(crate) ops: HashSet, } @@ -101,12 +101,8 @@ impl Index { self.visible.len() } - pub(crate) fn has_visible(&self, e: &Option) -> bool { - if let Some(seen) = e { - self.visible.contains_key(seen) - } else { - false - } + pub(crate) fn has_visible(&self, seen: &Key) -> bool { + self.visible.contains_key(seen) } pub(crate) fn replace(&mut self, old: &Op, new: &Op) { @@ -117,17 +113,17 @@ impl Index { assert!(new.key == old.key); - match (new.visible(), old.visible(), new.elemid()) { - (false, true, Some(elem)) => match self.visible.get(&elem).copied() { + match (new.visible(), old.visible(), new.elemid_or_key()) { + (false, true, key) => match self.visible.get(&key).copied() { Some(n) if n == 1 => { - self.visible.remove(&elem); + self.visible.remove(&key); } Some(n) => { - self.visible.insert(elem, n - 1); + self.visible.insert(key, n - 1); } None => panic!("remove overun in index"), }, - (true, false, Some(elem)) => *self.visible.entry(elem).or_default() += 1, + (true, false, key) => *self.visible.entry(key).or_default() += 1, _ => {} } } @@ -135,25 +131,22 @@ impl Index { pub(crate) fn insert(&mut self, op: &Op) { self.ops.insert(op.id); if op.visible() { - if let Some(elem) = op.elemid() { - *self.visible.entry(elem).or_default() += 1; - } + *self.visible.entry(op.elemid_or_key()).or_default() += 1; } } pub(crate) fn remove(&mut self, op: &Op) { self.ops.remove(&op.id); if op.visible() { - if let Some(elem) = op.elemid() { - match self.visible.get(&elem).copied() { - Some(n) if n == 1 => { - self.visible.remove(&elem); - } - Some(n) => { - self.visible.insert(elem, n - 1); - } - None => panic!("remove overun in index"), + let key = op.elemid_or_key(); + match self.visible.get(&key).copied() { + Some(n) if n == 1 => { + self.visible.remove(&key); } + Some(n) => { + self.visible.insert(key, n - 1); + } + None => panic!("remove overun in index"), } } } diff --git a/automerge/src/query/elem_id_pos.rs b/automerge/src/query/elem_id_pos.rs index 214a197a..809b6061 100644 --- a/automerge/src/query/elem_id_pos.rs +++ b/automerge/src/query/elem_id_pos.rs @@ -1,4 +1,7 @@ -use crate::{op_tree::OpTreeNode, types::ElemId}; +use crate::{ + op_tree::OpTreeNode, + types::{ElemId, Key}, +}; use super::{QueryResult, TreeQuery}; @@ -30,7 +33,7 @@ impl ElemIdPos { impl<'a> TreeQuery<'a> for ElemIdPos { fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { // if index has our element then we can continue - if child.index.has_visible(&Some(self.elemid)) { + if child.index.has_visible(&Key::Seq(self.elemid)) { // element is in this node somewhere QueryResult::Descend } else { diff --git a/automerge/src/query/insert.rs b/automerge/src/query/insert.rs index 6f69474c..9e495c49 100644 --- a/automerge/src/query/insert.rs +++ b/automerge/src/query/insert.rs @@ -16,15 +16,15 @@ pub(crate) struct InsertNth { valid: Option, /// last_seen is the target elemid of the last `seen` operation. /// It is used to avoid double counting visible elements (which arise through conflicts) that are split across nodes. - last_seen: Option, + last_seen: Option, last_insert: Option, - last_valid_insert: Option, + last_valid_insert: Option, } impl InsertNth { pub(crate) fn new(target: usize) -> Self { let (valid, last_valid_insert) = if target == 0 { - (Some(0), Some(HEAD)) + (Some(0), Some(Key::Seq(HEAD))) } else { (None, None) }; @@ -44,10 +44,8 @@ impl InsertNth { } pub(crate) fn key(&self) -> Result { - Ok(self - .last_valid_insert - .ok_or(AutomergeError::InvalidIndex(self.target))? - .into()) + self.last_valid_insert + .ok_or(AutomergeError::InvalidIndex(self.target)) //if self.target == 0 { /* if self.last_insert.is_none() { @@ -65,8 +63,10 @@ impl<'a> TreeQuery<'a> for InsertNth { fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { // if this node has some visible elements then we may find our target within let mut num_vis = child.index.visible_len(); - if child.index.has_visible(&self.last_seen) { - num_vis -= 1; + if let Some(last_seen) = self.last_seen { + if child.index.has_visible(&last_seen) { + num_vis -= 1; + } } if self.seen + num_vis >= self.target { @@ -83,9 +83,9 @@ impl<'a> TreeQuery<'a> for InsertNth { // - the insert was at a previous node and this is a long run of overwrites so last_seen should already be set correctly // - the visible op is in this node and the elemid references it so it can be set here // - the visible op is in a future node and so it will be counted as seen there - let last_elemid = child.last().elemid(); + let last_elemid = child.last().elemid_or_key(); if child.index.has_visible(&last_elemid) { - self.last_seen = last_elemid; + self.last_seen = Some(last_elemid); } QueryResult::Next } @@ -104,7 +104,7 @@ impl<'a> TreeQuery<'a> for InsertNth { return QueryResult::Finish; } self.seen += 1; - self.last_seen = element.elemid(); + self.last_seen = Some(element.elemid_or_key()); self.last_valid_insert = self.last_seen } self.n += 1; diff --git a/automerge/src/query/nth.rs b/automerge/src/query/nth.rs index 3924fc62..f73f2a10 100644 --- a/automerge/src/query/nth.rs +++ b/automerge/src/query/nth.rs @@ -1,7 +1,7 @@ use crate::error::AutomergeError; use crate::op_tree::OpTreeNode; use crate::query::{QueryResult, TreeQuery}; -use crate::types::{ElemId, Key, Op}; +use crate::types::{Key, Op}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] @@ -10,7 +10,7 @@ pub(crate) struct Nth<'a> { seen: usize, /// last_seen is the target elemid of the last `seen` operation. /// It is used to avoid double counting visible elements (which arise through conflicts) that are split across nodes. - last_seen: Option, + last_seen: Option, pub(crate) ops: Vec<&'a Op>, pub(crate) ops_pos: Vec, pub(crate) pos: usize, @@ -42,8 +42,10 @@ impl<'a> Nth<'a> { impl<'a> TreeQuery<'a> for Nth<'a> { fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { let mut num_vis = child.index.visible_len(); - if child.index.has_visible(&self.last_seen) { - num_vis -= 1; + if let Some(last_seen) = self.last_seen { + if child.index.has_visible(&last_seen) { + num_vis -= 1; + } } if self.seen + num_vis > self.target { @@ -59,9 +61,9 @@ impl<'a> TreeQuery<'a> for Nth<'a> { // - the insert was at a previous node and this is a long run of overwrites so last_seen should already be set correctly // - the visible op is in this node and the elemid references it so it can be set here // - the visible op is in a future node and so it will be counted as seen there - let last_elemid = child.last().elemid(); + let last_elemid = child.last().elemid_or_key(); if child.index.has_visible(&last_elemid) { - self.last_seen = last_elemid; + self.last_seen = Some(last_elemid); } QueryResult::Next } @@ -79,7 +81,7 @@ impl<'a> TreeQuery<'a> for Nth<'a> { if visible && self.last_seen.is_none() { self.seen += 1; // we have a new visible element - self.last_seen = element.elemid() + self.last_seen = Some(element.elemid_or_key()) } if self.seen == self.target + 1 && visible { self.ops.push(element); diff --git a/automerge/src/query/seek_op_with_patch.rs b/automerge/src/query/seek_op_with_patch.rs index c0431de8..6958a972 100644 --- a/automerge/src/query/seek_op_with_patch.rs +++ b/automerge/src/query/seek_op_with_patch.rs @@ -1,6 +1,6 @@ use crate::op_tree::{OpSetMetadata, OpTreeNode}; use crate::query::{binary_search_by, QueryResult, TreeQuery}; -use crate::types::{ElemId, Key, Op, HEAD}; +use crate::types::{Key, Op, HEAD}; use std::cmp::Ordering; use std::fmt::Debug; @@ -11,7 +11,7 @@ pub(crate) struct SeekOpWithPatch<'a> { pub(crate) succ: Vec, found: bool, pub(crate) seen: usize, - last_seen: Option, + last_seen: Option, pub(crate) values: Vec<&'a Op>, pub(crate) had_value_before: bool, } @@ -55,7 +55,7 @@ impl<'a> SeekOpWithPatch<'a> { } if e.visible() && self.last_seen.is_none() { self.seen += 1; - self.last_seen = e.elemid() + self.last_seen = Some(e.elemid_or_key()) } } } @@ -104,8 +104,10 @@ impl<'a> TreeQuery<'a> for SeekOpWithPatch<'a> { // subtree contains a *visible* (i.e. empty succs) operation for the list // element with elemId `last_seen`; this will subtract one even if all // values for this list element have been deleted in this subtree. - if child.index.has_visible(&self.last_seen) { - num_vis -= 1; + if let Some(last_seen) = self.last_seen { + if child.index.has_visible(&last_seen) { + num_vis -= 1; + } } self.seen += num_vis; @@ -114,7 +116,7 @@ impl<'a> TreeQuery<'a> for SeekOpWithPatch<'a> { // the last operation's elemId regardless of whether it's visible or not. // This will lead to incorrect counting if `last_seen` is not visible: it's // not counted towards `num_vis`, so we shouldn't be subtracting 1. - self.last_seen = child.last().elemid(); + self.last_seen = Some(child.last().elemid_or_key()); } QueryResult::Next } From db280c3d1d831569b1134e477c729bc4fb703702 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 22 Apr 2022 16:52:46 +0100 Subject: [PATCH 324/730] prop: Skip over nodes --- automerge/src/query/prop.rs | 25 +++++++++++++++---------- 1 file changed, 15 insertions(+), 10 deletions(-) diff --git a/automerge/src/query/prop.rs b/automerge/src/query/prop.rs index 87084b58..96a47614 100644 --- a/automerge/src/query/prop.rs +++ b/automerge/src/query/prop.rs @@ -30,19 +30,24 @@ impl<'a> TreeQuery<'a> for Prop<'a> { child: &'a OpTreeNode, m: &OpSetMetadata, ) -> QueryResult { - // in the root node find the first op position for the key - if self.start.is_none() { + if let Some(start) = self.start { + if self.pos + child.len() >= start { + // skip empty nodes + if child.index.visible_len() == 0 { + self.pos += child.len(); + QueryResult::Next + } else { + QueryResult::Descend + } + } else { + self.pos += child.len(); + QueryResult::Next + } + } else { + // in the root node find the first op position for the key let start = binary_search_by(child, |op| m.key_cmp(&op.key, &self.key)); self.start = Some(start); QueryResult::Descend - } else { - // skip empty nodes - if child.index.visible_len() == 0 { - self.pos += child.len(); - QueryResult::Next - } else { - QueryResult::Descend - } } } From c38b49609f9c0c12f8b20b2d919a8be9c3979b8e Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 22 Apr 2022 17:13:47 +0100 Subject: [PATCH 325/730] Remove clone from update The cloning of the op was eating up a significant part of the increment operation's time. This makes it zero-clone and just extracts the fields needed. --- automerge/src/op_tree.rs | 38 +++++++++++++++++++++++++++----------- automerge/src/query.rs | 39 ++++++++++++++++++++++++++++----------- 2 files changed, 55 insertions(+), 22 deletions(-) diff --git a/automerge/src/op_tree.rs b/automerge/src/op_tree.rs index 908522d5..2ec8f6a9 100644 --- a/automerge/src/op_tree.rs +++ b/automerge/src/op_tree.rs @@ -8,7 +8,7 @@ use std::{ pub(crate) use crate::op_set::OpSetMetadata; use crate::{ clock::Clock, - query::{self, Index, QueryResult, TreeQuery}, + query::{self, Index, QueryResult, ReplaceArgs, TreeQuery}, }; use crate::{ types::{ObjId, Op, OpId}, @@ -552,16 +552,24 @@ impl OpTreeNode { /// Update the operation at the given index using the provided function. /// /// This handles updating the indices after the update. - pub(crate) fn update(&mut self, index: usize, f: F) -> (Op, &Op) + pub(crate) fn update(&mut self, index: usize, f: F) -> ReplaceArgs where F: FnOnce(&mut Op), { if self.is_leaf() { let new_element = self.elements.get_mut(index).unwrap(); - let old_element = new_element.clone(); + let old_id = new_element.id; + let old_visible = new_element.visible(); f(new_element); - self.index.replace(&old_element, new_element); - (old_element, new_element) + let replace_args = ReplaceArgs { + old_id, + new_id: new_element.id, + old_visible, + new_visible: new_element.visible(), + new_key: new_element.elemid_or_key(), + }; + self.index.replace(&replace_args); + replace_args } else { let mut cumulative_len = 0; let len = self.len(); @@ -572,15 +580,23 @@ impl OpTreeNode { } Ordering::Equal => { let new_element = self.elements.get_mut(child_index).unwrap(); - let old_element = new_element.clone(); + let old_id = new_element.id; + let old_visible = new_element.visible(); f(new_element); - self.index.replace(&old_element, new_element); - return (old_element, new_element); + let replace_args = ReplaceArgs { + old_id, + new_id: new_element.id, + old_visible, + new_visible: new_element.visible(), + new_key: new_element.elemid_or_key(), + }; + self.index.replace(&replace_args); + return replace_args; } Ordering::Greater => { - let (old_element, new_element) = child.update(index - cumulative_len, f); - self.index.replace(&old_element, new_element); - return (old_element, new_element); + let replace_args = child.update(index - cumulative_len, f); + self.index.replace(&replace_args); + return replace_args; } } } diff --git a/automerge/src/query.rs b/automerge/src/query.rs index fc21d4fd..f39b0b9a 100644 --- a/automerge/src/query.rs +++ b/automerge/src/query.rs @@ -41,6 +41,16 @@ pub(crate) use range_at::RangeAt; pub(crate) use seek_op::SeekOp; pub(crate) use seek_op_with_patch::SeekOpWithPatch; +// use a struct for the args for clarity as they are passed up the update chain in the optree +#[derive(Debug, Clone)] +pub(crate) struct ReplaceArgs { + pub(crate) old_id: OpId, + pub(crate) new_id: OpId, + pub(crate) old_visible: bool, + pub(crate) new_visible: bool, + pub(crate) new_key: Key, +} + #[derive(Debug, Clone, PartialEq)] pub(crate) struct CounterData { pos: usize, @@ -105,25 +115,32 @@ impl Index { self.visible.contains_key(seen) } - pub(crate) fn replace(&mut self, old: &Op, new: &Op) { - if old.id != new.id { - self.ops.remove(&old.id); - self.ops.insert(new.id); + pub(crate) fn replace( + &mut self, + ReplaceArgs { + old_id, + new_id, + old_visible, + new_visible, + new_key, + }: &ReplaceArgs, + ) { + if old_id != new_id { + self.ops.remove(old_id); + self.ops.insert(*new_id); } - assert!(new.key == old.key); - - match (new.visible(), old.visible(), new.elemid_or_key()) { - (false, true, key) => match self.visible.get(&key).copied() { + match (new_visible, old_visible, new_key) { + (false, true, key) => match self.visible.get(key).copied() { Some(n) if n == 1 => { - self.visible.remove(&key); + self.visible.remove(key); } Some(n) => { - self.visible.insert(key, n - 1); + self.visible.insert(*key, n - 1); } None => panic!("remove overun in index"), }, - (true, false, key) => *self.visible.entry(key).or_default() += 1, + (true, false, key) => *self.visible.entry(*key).or_default() += 1, _ => {} } } From 7de0cff2c9b13eecd986c86bc28d7fdefb495b43 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 22 Apr 2022 18:13:44 +0100 Subject: [PATCH 326/730] Rework benchmarks to be in a group --- automerge/benches/map.rs | 60 ++++++++++++++++++---------------------- 1 file changed, 27 insertions(+), 33 deletions(-) diff --git a/automerge/benches/map.rs b/automerge/benches/map.rs index cc7c6426..747593e4 100644 --- a/automerge/benches/map.rs +++ b/automerge/benches/map.rs @@ -1,5 +1,5 @@ use automerge::{transaction::Transactable, Automerge, ScalarValue, ROOT}; -use criterion::{black_box, criterion_group, criterion_main, Criterion}; +use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion}; fn repeated_increment(n: u64) -> Automerge { let mut doc = Automerge::new(); @@ -43,41 +43,35 @@ fn decreasing_put(n: u64) -> Automerge { } fn criterion_benchmark(c: &mut Criterion) { - let small = 1_000; + let sizes = [100, 1_000, 10_000]; - c.bench_function(&format!("repeated increment {}", small), |b| { - b.iter(|| repeated_increment(black_box(small))) - }); + let mut group = c.benchmark_group("map"); + for size in &sizes { + group.throughput(criterion::Throughput::Elements(*size)); + group.bench_with_input(BenchmarkId::new("repeated put", size), size, |b, &size| { + b.iter(|| repeated_put(size)) + }); + group.bench_with_input( + BenchmarkId::new("repeated increment", size), + size, + |b, &size| b.iter(|| repeated_increment(size)), + ); - c.bench_function(&format!("repeated put {}", small), |b| { - b.iter(|| repeated_put(black_box(small))) - }); + group.throughput(criterion::Throughput::Elements(*size)); + group.bench_with_input( + BenchmarkId::new("increasing put", size), + size, + |b, &size| b.iter(|| increasing_put(size)), + ); - c.bench_function(&format!("increasing put {}", small), |b| { - b.iter(|| increasing_put(black_box(small))) - }); - - c.bench_function(&format!("decreasing put {}", small), |b| { - b.iter(|| decreasing_put(black_box(small))) - }); - - let large = 10_000; - - c.bench_function(&format!("repeated increment {}", large), |b| { - b.iter(|| repeated_increment(black_box(large))) - }); - - c.bench_function(&format!("repeated put {}", large), |b| { - b.iter(|| repeated_put(black_box(large))) - }); - - c.bench_function(&format!("increasing put {}", large), |b| { - b.iter(|| increasing_put(black_box(large))) - }); - - c.bench_function(&format!("decreasing put {}", large), |b| { - b.iter(|| decreasing_put(black_box(large))) - }); + group.throughput(criterion::Throughput::Elements(*size)); + group.bench_with_input( + BenchmarkId::new("decreasing put", size), + size, + |b, &size| b.iter(|| decreasing_put(size)), + ); + } + group.finish(); } criterion_group!(benches, criterion_benchmark); From 8baacb281bc3bc0f75877105fe05abd74c8cc557 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Tue, 26 Apr 2022 17:57:35 +0100 Subject: [PATCH 327/730] Add save and load map benchmarks --- automerge/benches/map.rs | 100 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 100 insertions(+) diff --git a/automerge/benches/map.rs b/automerge/benches/map.rs index 747593e4..9aad6941 100644 --- a/automerge/benches/map.rs +++ b/automerge/benches/map.rs @@ -72,6 +72,106 @@ fn criterion_benchmark(c: &mut Criterion) { ); } group.finish(); + + let mut group = c.benchmark_group("map save"); + for size in &sizes { + group.throughput(criterion::Throughput::Elements(*size)); + group.bench_with_input(BenchmarkId::new("repeated put", size), size, |b, &size| { + b.iter_batched( + || repeated_put(size), + |mut doc| doc.save(), + criterion::BatchSize::LargeInput, + ) + }); + group.bench_with_input( + BenchmarkId::new("repeated increment", size), + size, + |b, &size| { + b.iter_batched( + || repeated_increment(size), + |mut doc| doc.save(), + criterion::BatchSize::LargeInput, + ) + }, + ); + + group.throughput(criterion::Throughput::Elements(*size)); + group.bench_with_input( + BenchmarkId::new("increasing put", size), + size, + |b, &size| { + b.iter_batched( + || increasing_put(size), + |mut doc| doc.save(), + criterion::BatchSize::LargeInput, + ) + }, + ); + + group.throughput(criterion::Throughput::Elements(*size)); + group.bench_with_input( + BenchmarkId::new("decreasing put", size), + size, + |b, &size| { + b.iter_batched( + || decreasing_put(size), + |mut doc| doc.save(), + criterion::BatchSize::LargeInput, + ) + }, + ); + } + group.finish(); + + let mut group = c.benchmark_group("map load"); + for size in &sizes { + group.throughput(criterion::Throughput::Elements(*size)); + group.bench_with_input(BenchmarkId::new("repeated put", size), size, |b, &size| { + b.iter_batched( + || repeated_put(size).save(), + |bytes| Automerge::load(&bytes).unwrap(), + criterion::BatchSize::LargeInput, + ) + }); + group.bench_with_input( + BenchmarkId::new("repeated increment", size), + size, + |b, &size| { + b.iter_batched( + || repeated_increment(size).save(), + |bytes| Automerge::load(&bytes).unwrap(), + criterion::BatchSize::LargeInput, + ) + }, + ); + + group.throughput(criterion::Throughput::Elements(*size)); + group.bench_with_input( + BenchmarkId::new("increasing put", size), + size, + |b, &size| { + b.iter_batched( + || increasing_put(size).save(), + |bytes| Automerge::load(&bytes).unwrap(), + criterion::BatchSize::LargeInput, + ) + }, + ); + + group.throughput(criterion::Throughput::Elements(*size)); + group.bench_with_input( + BenchmarkId::new("decreasing put", size), + size, + |b, &size| { + b.iter_batched( + || decreasing_put(size).save(), + |bytes| Automerge::load(&bytes).unwrap(), + criterion::BatchSize::LargeInput, + ) + }, + ); + } + group.finish(); } criterion_group!(benches, criterion_benchmark); From 6bf03e006c5924ddb4dd69691ff785fd02d4e6d3 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Wed, 27 Apr 2022 19:09:52 +0100 Subject: [PATCH 328/730] Add ability to skip in tree searches --- automerge/src/op_tree.rs | 58 ++++++++++++++++++++++++++++--------- automerge/src/query.rs | 2 ++ automerge/src/query/prop.rs | 8 ++--- 3 files changed, 49 insertions(+), 19 deletions(-) diff --git a/automerge/src/op_tree.rs b/automerge/src/op_tree.rs index 2ec8f6a9..42be72c3 100644 --- a/automerge/src/op_tree.rs +++ b/automerge/src/op_tree.rs @@ -98,7 +98,8 @@ impl OpTreeInternal { self.root_node .as_ref() .map(|root| match query.query_node_with_metadata(root, m) { - QueryResult::Descend => root.search(&mut query, m), + QueryResult::Descend => root.search(&mut query, m, None), + QueryResult::Skip(skip) => root.search(&mut query, m, Some(skip)), _ => true, }); query @@ -212,31 +213,62 @@ impl OpTreeNode { } } - pub(crate) fn search<'a, 'b: 'a, Q>(&'b self, query: &mut Q, m: &OpSetMetadata) -> bool + pub(crate) fn search<'a, 'b: 'a, Q>( + &'b self, + query: &mut Q, + m: &OpSetMetadata, + skip: Option, + ) -> bool where Q: TreeQuery<'a>, { if self.is_leaf() { - for e in &self.elements { + let skip = skip.unwrap_or(0); + for e in self.elements.iter().skip(skip) { if query.query_element_with_metadata(e, m) == QueryResult::Finish { return true; } } false } else { + let mut skip = skip.unwrap_or(0); for (child_index, child) in self.children.iter().enumerate() { - match query.query_node_with_metadata(child, m) { - QueryResult::Descend => { - if child.search(query, m) { - return true; + match skip.cmp(&child.len()) { + Ordering::Greater => { + // not in this child at all + // take off the number of elements in the child as well as the next element + skip -= child.len() + 1; + } + Ordering::Equal => { + // just try the element + skip -= child.len(); + if let Some(e) = self.elements.get(child_index) { + if query.query_element_with_metadata(e, m) == QueryResult::Finish { + return true; + } } } - QueryResult::Finish => return true, - QueryResult::Next => (), - } - if let Some(e) = self.elements.get(child_index) { - if query.query_element_with_metadata(e, m) == QueryResult::Finish { - return true; + Ordering::Less => { + // descend and try find it + match query.query_node_with_metadata(child, m) { + QueryResult::Descend => { + // search in the child node, passing in the number of items left to + // skip + if child.search(query, m, Some(skip)) { + return true; + } + } + QueryResult::Finish => return true, + QueryResult::Next => (), + QueryResult::Skip(_) => panic!("had skip from non-root node"), + } + if let Some(e) = self.elements.get(child_index) { + if query.query_element_with_metadata(e, m) == QueryResult::Finish { + return true; + } + } + // reset the skip to zero so we continue iterating normally + skip = 0; } } } diff --git a/automerge/src/query.rs b/automerge/src/query.rs index f39b0b9a..83f5f045 100644 --- a/automerge/src/query.rs +++ b/automerge/src/query.rs @@ -86,6 +86,8 @@ pub(crate) trait TreeQuery<'a> { #[derive(Debug, Clone, PartialEq)] pub(crate) enum QueryResult { Next, + /// Skip this many elements, only allowed from the root node. + Skip(usize), Descend, Finish, } diff --git a/automerge/src/query/prop.rs b/automerge/src/query/prop.rs index 96a47614..105b268f 100644 --- a/automerge/src/query/prop.rs +++ b/automerge/src/query/prop.rs @@ -47,16 +47,12 @@ impl<'a> TreeQuery<'a> for Prop<'a> { // in the root node find the first op position for the key let start = binary_search_by(child, |op| m.key_cmp(&op.key, &self.key)); self.start = Some(start); - QueryResult::Descend + self.pos = start; + QueryResult::Skip(start) } } fn query_element(&mut self, op: &'a Op) -> QueryResult { - // skip to our start - if self.pos < self.start.unwrap() { - self.pos += 1; - return QueryResult::Next; - } // don't bother looking at things past our key if op.key != self.key { return QueryResult::Finish; From 9e6044c128c89a26f621cc288fb4757f258c75fc Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Fri, 29 Apr 2022 15:11:07 -0400 Subject: [PATCH 329/730] fixed panic in doc.values() - fixed concurrency bugs in range --- automerge/src/automerge.rs | 101 ++++++++++++++++++++++++++- automerge/src/op_set.rs | 25 +++++++ automerge/src/op_tree.rs | 19 +++++ automerge/src/query.rs | 4 ++ automerge/src/query/list_range.rs | 55 +++++++++++++++ automerge/src/query/list_range_at.rs | 62 ++++++++++++++++ automerge/src/query/range.rs | 19 +++-- automerge/src/query/range_at.rs | 22 ++++-- automerge/src/values.rs | 17 ++--- automerge/src/values_at.rs | 20 ++---- 10 files changed, 303 insertions(+), 41 deletions(-) create mode 100644 automerge/src/query/list_range.rs create mode 100644 automerge/src/query/list_range_at.rs diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index da9004d6..abeafa24 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -310,7 +310,7 @@ impl Automerge { /// For a list the keys are the element ids (opids) encoded as strings. pub fn values>(&self, obj: O) -> Values<'_> { if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { - let iter_range = self.ops.range(obj, ..); + let iter_range = self.ops.list_range(obj, ..); Values::new(self, iter_range) } else { Values::new(self, None) @@ -321,7 +321,7 @@ impl Automerge { pub fn values_at>(&self, obj: O, heads: &[ChangeHash]) -> ValuesAt<'_> { if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { let clock = self.clock_at(heads); - let iter_range = self.ops.range_at(obj, .., clock); + let iter_range = self.ops.list_range_at(obj, .., clock); ValuesAt::new(self, iter_range) } else { ValuesAt::new(self, None) @@ -1546,6 +1546,103 @@ mod tests { ); } + #[test] + fn get_list_values() -> Result<(), AutomergeError> { + let mut doc1 = Automerge::new(); + let mut tx = doc1.transaction(); + let list = tx.put_object(ROOT, "list", ObjType::List)?; + + // insert elements + tx.insert(&list, 0, "First")?; + tx.insert(&list, 1, "Second")?; + tx.insert(&list, 2, "Third")?; + tx.commit(); + + let v1 = doc1.get_heads(); + let mut doc2 = doc1.fork(); + + let mut tx = doc1.transaction(); + tx.put(&list, 2, "Third V2")?; + tx.commit(); + + let mut tx = doc2.transaction(); + tx.put(&list, 2, "Third V3")?; + tx.commit(); + + doc1.merge(&mut doc2)?; + + assert_eq!(doc1.values(&list).count(), 3); + + for (i, val1) in doc1.values(&list).enumerate() { + let val2 = doc1.get(&list, i)?; + assert_eq!(Some(val1), val2); + } + + assert_eq!(doc1.values_at(&list, &v1).count(), 3); + for (i, val1) in doc1.values_at(&list, &v1).enumerate() { + let val2 = doc1.get_at(&list, i, &v1)?; + assert_eq!(Some(val1), val2); + } + + Ok(()) + } + + #[test] + fn get_range_values() -> Result<(), AutomergeError> { + let mut doc1 = Automerge::new(); + let mut tx = doc1.transaction(); + tx.put(ROOT, "aa", "aaa")?; + tx.put(ROOT, "bb", "bbb")?; + tx.put(ROOT, "cc", "ccc")?; + tx.put(ROOT, "dd", "ddd")?; + tx.commit(); + + let v1 = doc1.get_heads(); + let mut doc2 = doc1.fork(); + + let mut tx = doc1.transaction(); + tx.put(ROOT, "cc", "ccc V2")?; + tx.commit(); + + let mut tx = doc2.transaction(); + tx.put(ROOT, "cc", "ccc V3")?; + tx.commit(); + + doc1.merge(&mut doc2)?; + + let range = "b".to_string().."d".to_string(); + + assert_eq!(doc1.range(ROOT, range.clone()).count(), 2); + + for (key, val1, id) in doc1.range(ROOT, range.clone()) { + let val2 = doc1.get(ROOT, key)?; + assert_eq!(Some((val1, id)), val2); + } + + assert_eq!(doc1.range(ROOT, range.clone()).rev().count(), 2); + + for (key, val1, id) in doc1.range(ROOT, range.clone()).rev() { + let val2 = doc1.get(ROOT, key)?; + assert_eq!(Some((val1, id)), val2); + } + + assert_eq!(doc1.range_at(ROOT, range.clone(), &v1).count(), 2); + + for (key, val1, id) in doc1.range_at(ROOT, range.clone(), &v1) { + let val2 = doc1.get_at(ROOT, key, &v1)?; + assert_eq!(Some((val1, id)), val2); + } + + assert_eq!(doc1.range_at(ROOT, range.clone(), &v1).rev().count(), 2); + + for (key, val1, id) in doc1.range_at(ROOT, range, &v1).rev() { + let val2 = doc1.get_at(ROOT, key, &v1)?; + assert_eq!(Some((val1, id)), val2); + } + + Ok(()) + } + #[test] fn range_iter_map_rev() { let mut doc = Automerge::new(); diff --git a/automerge/src/op_set.rs b/automerge/src/op_set.rs index 7928fce9..4c2065ed 100644 --- a/automerge/src/op_set.rs +++ b/automerge/src/op_set.rs @@ -102,6 +102,31 @@ impl OpSetInternal { } } + pub(crate) fn list_range>( + &self, + obj: ObjId, + range: R, + ) -> Option> { + if let Some(tree) = self.trees.get(&obj) { + tree.internal.list_range(range) + } else { + None + } + } + + pub(crate) fn list_range_at>( + &self, + obj: ObjId, + range: R, + clock: Clock, + ) -> Option> { + if let Some(tree) = self.trees.get(&obj) { + tree.internal.list_range_at(range, clock) + } else { + None + } + } + pub(crate) fn search<'a, 'b: 'a, Q>(&'b self, obj: &ObjId, query: Q) -> Q where Q: TreeQuery<'a>, diff --git a/automerge/src/op_tree.rs b/automerge/src/op_tree.rs index 908522d5..220b3b86 100644 --- a/automerge/src/op_tree.rs +++ b/automerge/src/op_tree.rs @@ -91,6 +91,25 @@ impl OpTreeInternal { .map(|node| query::RangeAt::new(range, node, meta, clock)) } + pub(crate) fn list_range>( + &self, + range: R, + ) -> Option> { + self.root_node + .as_ref() + .map(|node| query::ListRange::new(range, node)) + } + + pub(crate) fn list_range_at>( + &self, + range: R, + clock: Clock, + ) -> Option> { + self.root_node + .as_ref() + .map(|node| query::ListRangeAt::new(range, clock, node)) + } + pub(crate) fn search<'a, 'b: 'a, Q>(&'b self, mut query: Q, m: &OpSetMetadata) -> Q where Q: TreeQuery<'a>, diff --git a/automerge/src/query.rs b/automerge/src/query.rs index 06225885..124e1232 100644 --- a/automerge/src/query.rs +++ b/automerge/src/query.rs @@ -11,6 +11,8 @@ mod keys; mod keys_at; mod len; mod len_at; +mod list_range; +mod list_range_at; mod list_vals; mod list_vals_at; mod nth; @@ -29,6 +31,8 @@ pub(crate) use keys::Keys; pub(crate) use keys_at::KeysAt; pub(crate) use len::Len; pub(crate) use len_at::LenAt; +pub(crate) use list_range::ListRange; +pub(crate) use list_range_at::ListRangeAt; pub(crate) use list_vals::ListVals; pub(crate) use list_vals_at::ListValsAt; pub(crate) use nth::Nth; diff --git a/automerge/src/query/list_range.rs b/automerge/src/query/list_range.rs new file mode 100644 index 00000000..62031cf2 --- /dev/null +++ b/automerge/src/query/list_range.rs @@ -0,0 +1,55 @@ +use crate::op_tree::OpTreeNode; +use crate::types::{Key, OpId}; +use crate::Value; +use std::fmt::Debug; +use std::ops::RangeBounds; + +#[derive(Debug)] +pub(crate) struct ListRange<'a, R: RangeBounds> { + range: R, + index: usize, + pos: usize, + last_key: Option, + next_result: Option<(usize, Value<'a>, OpId)>, + index_back: usize, + root_child: &'a OpTreeNode, +} + +impl<'a, R: RangeBounds> ListRange<'a, R> { + pub(crate) fn new(range: R, root_child: &'a OpTreeNode) -> Self { + Self { + range, + index: 0, // FIXME root_child.seek_to_pos(range.start) + pos: 0, // FIXME range.start + last_key: None, + next_result: None, + index_back: root_child.len(), + root_child, + } + } +} + +impl<'a, R: RangeBounds> Iterator for ListRange<'a, R> { + type Item = (usize, Value<'a>, OpId); + + fn next(&mut self) -> Option { + let mut result = None; + for i in self.index..self.index_back { + let op = self.root_child.get(i)?; + self.index += 1; + if op.visible() { + if self.range.contains(&self.pos) { + result = self.next_result.replace((self.pos, op.value(), op.id)); + } + if Some(op.key) != self.last_key { + self.last_key = Some(op.key); + self.pos += 1; + if result.is_some() { + return result; + } + } + } + } + self.next_result.take() + } +} diff --git a/automerge/src/query/list_range_at.rs b/automerge/src/query/list_range_at.rs new file mode 100644 index 00000000..e6d6f4bc --- /dev/null +++ b/automerge/src/query/list_range_at.rs @@ -0,0 +1,62 @@ +use super::VisWindow; +use crate::op_tree::OpTreeNode; +use crate::types::{Clock, Key, OpId}; +use crate::Value; +use std::fmt::Debug; +use std::ops::RangeBounds; + +#[derive(Debug)] +pub(crate) struct ListRangeAt<'a, R: RangeBounds> { + range: R, + index: usize, + pos: usize, + last_key: Option, + next_result: Option<(usize, Value<'a>, OpId)>, + index_back: usize, + root_child: &'a OpTreeNode, + clock: Clock, + window: VisWindow, +} + +impl<'a, R: RangeBounds> ListRangeAt<'a, R> { + pub(crate) fn new(range: R, clock: Clock, root_child: &'a OpTreeNode) -> Self { + Self { + range, + index: 0, // FIXME root_child.seek_to_pos(range.start) + pos: 0, // FIXME range.start + last_key: None, + next_result: None, + index_back: root_child.len(), + root_child, + clock, + window: VisWindow::default(), + } + } +} + +impl<'a, R: RangeBounds> Iterator for ListRangeAt<'a, R> { + type Item = (usize, Value<'a>, OpId); + + fn next(&mut self) -> Option { + // FIXME if self.pos > range.end { return None } + let mut result = None; + for i in self.index..self.index_back { + let op = self.root_child.get(i)?; + let visible = self.window.visible_at(op, i, &self.clock); + self.index += 1; + if visible { + if self.range.contains(&self.pos) { + result = self.next_result.replace((self.pos, op.value(), op.id)); + } + if Some(op.key) != self.last_key { + self.last_key = Some(op.key); + self.pos += 1; + if result.is_some() { + return result; + } + } + } + } + self.next_result.take() + } +} diff --git a/automerge/src/query/range.rs b/automerge/src/query/range.rs index ed0c8612..6f0fe094 100644 --- a/automerge/src/query/range.rs +++ b/automerge/src/query/range.rs @@ -9,6 +9,7 @@ pub(crate) struct Range<'a, R: RangeBounds> { range: R, index: usize, last_key: Option, + next_result: Option<(&'a str, Value<'a>, OpId)>, index_back: usize, last_key_back: Option, root_child: &'a OpTreeNode, @@ -21,6 +22,7 @@ impl<'a, R: RangeBounds> Range<'a, R> { range, index: 0, last_key: None, + next_result: None, index_back: root_child.len(), last_key_back: None, root_child, @@ -36,18 +38,23 @@ impl<'a, R: RangeBounds> Iterator for Range<'a, R> { for i in self.index..self.index_back { let op = self.root_child.get(i)?; self.index += 1; - if Some(op.key) != self.last_key && op.visible() { - self.last_key = Some(op.key); + if op.visible() { let prop = match op.key { Key::Map(m) => self.meta.props.get(m), - Key::Seq(_) => panic!("found list op in range query"), + Key::Seq(_) => return None, // this is a list }; if self.range.contains(prop) { - return Some((prop, op.value(), op.id)); + let result = self.next_result.replace((prop, op.value(), op.id)); + if Some(op.key) != self.last_key { + self.last_key = Some(op.key); + if result.is_some() { + return result; + } + } } } } - None + self.next_result.take() } } @@ -60,7 +67,7 @@ impl<'a, R: RangeBounds> DoubleEndedIterator for Range<'a, R> { self.last_key_back = Some(op.key); let prop = match op.key { Key::Map(m) => self.meta.props.get(m), - Key::Seq(_) => panic!("can't iterate through lists backwards"), + Key::Seq(_) => return None, // this is a list }; if self.range.contains(prop) { return Some((prop, op.value(), op.id)); diff --git a/automerge/src/query/range_at.rs b/automerge/src/query/range_at.rs index fc65e265..a8d0d959 100644 --- a/automerge/src/query/range_at.rs +++ b/automerge/src/query/range_at.rs @@ -15,6 +15,7 @@ pub(crate) struct RangeAt<'a, R: RangeBounds> { range: R, index: usize, last_key: Option, + next_result: Option<(&'a str, Value<'a>, OpId)>, index_back: usize, last_key_back: Option, @@ -36,6 +37,7 @@ impl<'a, R: RangeBounds> RangeAt<'a, R> { range, index: 0, last_key: None, + next_result: None, index_back: root_child.len(), last_key_back: None, root_child, @@ -52,18 +54,23 @@ impl<'a, R: RangeBounds> Iterator for RangeAt<'a, R> { let op = self.root_child.get(i)?; let visible = self.window.visible_at(op, i, &self.clock); self.index += 1; - if Some(op.key) != self.last_key && visible { - self.last_key = Some(op.key); + if visible { let prop = match op.key { Key::Map(m) => self.meta.props.get(m), - Key::Seq(_) => panic!("found list op in range query"), + Key::Seq(_) => return None, // this is a list }; if self.range.contains(prop) { - return Some((prop, op.value(), op.id)); + let result = self.next_result.replace((prop, op.value(), op.id)); + if Some(op.key) != self.last_key { + self.last_key = Some(op.key); + if result.is_some() { + return result; + } + } } } } - None + self.next_result.take() } } @@ -71,12 +78,13 @@ impl<'a, R: RangeBounds> DoubleEndedIterator for RangeAt<'a, R> { fn next_back(&mut self) -> Option { for i in (self.index..self.index_back).rev() { let op = self.root_child.get(i)?; + let visible = self.window.visible_at(op, i, &self.clock); self.index_back -= 1; - if Some(op.key) != self.last_key_back && op.visible() { + if Some(op.key) != self.last_key_back && visible { self.last_key_back = Some(op.key); let prop = match op.key { Key::Map(m) => self.meta.props.get(m), - Key::Seq(_) => panic!("can't iterate through lists backwards"), + Key::Seq(_) => return None, // this is a list }; if self.range.contains(prop) { return Some((prop, op.value(), op.id)); diff --git a/automerge/src/values.rs b/automerge/src/values.rs index 7601dde1..eb4a227a 100644 --- a/automerge/src/values.rs +++ b/automerge/src/values.rs @@ -5,32 +5,23 @@ use crate::{query, Automerge}; #[derive(Debug)] pub struct Values<'a> { - range: Option>, + range: Option>, doc: &'a Automerge, } impl<'a> Values<'a> { - pub(crate) fn new(doc: &'a Automerge, range: Option>) -> Self { + pub(crate) fn new(doc: &'a Automerge, range: Option>) -> Self { Self { range, doc } } } impl<'a> Iterator for Values<'a> { - type Item = (&'a str, Value<'a>, ExId); + type Item = (Value<'a>, ExId); fn next(&mut self) -> Option { self.range .as_mut()? .next() - .map(|(key, value, id)| (key, value, self.doc.id_to_exid(id))) - } -} - -impl<'a> DoubleEndedIterator for Values<'a> { - fn next_back(&mut self) -> Option { - self.range - .as_mut()? - .next_back() - .map(|(key, value, id)| (key, value, self.doc.id_to_exid(id))) + .map(|(_idx, value, id)| (value, self.doc.id_to_exid(id))) } } diff --git a/automerge/src/values_at.rs b/automerge/src/values_at.rs index 77a52c96..07099de3 100644 --- a/automerge/src/values_at.rs +++ b/automerge/src/values_at.rs @@ -5,32 +5,26 @@ use crate::{query, Automerge}; #[derive(Debug)] pub struct ValuesAt<'a> { - range: Option>, + range: Option>, doc: &'a Automerge, } impl<'a> ValuesAt<'a> { - pub(crate) fn new(doc: &'a Automerge, range: Option>) -> Self { + pub(crate) fn new( + doc: &'a Automerge, + range: Option>, + ) -> Self { Self { range, doc } } } impl<'a> Iterator for ValuesAt<'a> { - type Item = (&'a str, Value<'a>, ExId); + type Item = (Value<'a>, ExId); fn next(&mut self) -> Option { self.range .as_mut()? .next() - .map(|(key, value, id)| (key, value, self.doc.id_to_exid(id))) - } -} - -impl<'a> DoubleEndedIterator for ValuesAt<'a> { - fn next_back(&mut self) -> Option { - self.range - .as_mut()? - .next_back() - .map(|(key, value, id)| (key, value, self.doc.id_to_exid(id))) + .map(|(_idx, value, id)| (value, self.doc.id_to_exid(id))) } } From 7f4460f2009c2ad2607ee1c17e612d78dd5651a4 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Sat, 30 Apr 2022 23:57:43 +0100 Subject: [PATCH 330/730] Make the OpSet iterator faster The opset iterator was using `OpTreeInternal::get(index)` to fetch each successive element of the OpSet. This is pretty slow. We make this much faster by implementing an iterator which is aware of the internal structure of the OpTree. This speeds up the save benchmark by about 10%. Signed-off-by: Alex Good --- automerge/src/op_set.rs | 45 ++++--- automerge/src/op_tree.rs | 14 ++- automerge/src/op_tree/iter.rs | 230 ++++++++++++++++++++++++++++++++++ 3 files changed, 261 insertions(+), 28 deletions(-) create mode 100644 automerge/src/op_tree/iter.rs diff --git a/automerge/src/op_set.rs b/automerge/src/op_set.rs index 7928fce9..7d4322b6 100644 --- a/automerge/src/op_set.rs +++ b/automerge/src/op_set.rs @@ -1,7 +1,7 @@ use crate::clock::Clock; use crate::exid::ExId; use crate::indexed_cache::IndexedCache; -use crate::op_tree::OpTree; +use crate::op_tree::{self, OpTree}; use crate::query::{self, OpIdSearch, TreeQuery}; use crate::types::{self, ActorId, Key, ObjId, Op, OpId, OpType}; use crate::{ObjType, OpObserver}; @@ -45,13 +45,11 @@ impl OpSetInternal { } pub(crate) fn iter(&self) -> Iter<'_> { - let mut objs: Vec<_> = self.trees.keys().collect(); - objs.sort_by(|a, b| self.m.lamport_cmp(a.0, b.0)); + let mut objs: Vec<_> = self.trees.iter().collect(); + objs.sort_by(|a, b| self.m.lamport_cmp((a.0).0, (b.0).0)); Iter { - inner: self, - index: 0, - sub_index: 0, - objs, + trees: objs.into_iter(), + current: None, } } @@ -272,30 +270,31 @@ impl<'a> IntoIterator for &'a OpSetInternal { } } +#[derive(Clone)] pub(crate) struct Iter<'a> { - inner: &'a OpSetInternal, - index: usize, - objs: Vec<&'a ObjId>, - sub_index: usize, + trees: std::vec::IntoIter<(&'a ObjId, &'a op_tree::OpTree)>, + current: Option<(&'a ObjId, op_tree::OpTreeIter<'a>)>, } - impl<'a> Iterator for Iter<'a> { type Item = (&'a ObjId, &'a Op); fn next(&mut self) -> Option { - let mut result = None; - for obj in self.objs.iter().skip(self.index) { - let tree = self.inner.trees.get(obj)?; - result = tree.internal.get(self.sub_index).map(|op| (*obj, op)); - if result.is_some() { - self.sub_index += 1; - break; - } else { - self.index += 1; - self.sub_index = 0; + if let Some((id, tree)) = &mut self.current { + if let Some(next) = tree.next() { + return Some((id, next)); + } + } + + loop { + self.current = self.trees.next().map(|o| (o.0, o.1.iter())); + if let Some((obj, tree)) = &mut self.current { + if let Some(next) = tree.next() { + return Some((obj, next)); + } + } else { + return None; } } - result } } diff --git a/automerge/src/op_tree.rs b/automerge/src/op_tree.rs index 908522d5..bd0b63ef 100644 --- a/automerge/src/op_tree.rs +++ b/automerge/src/op_tree.rs @@ -18,6 +18,9 @@ use std::collections::HashSet; pub(crate) const B: usize = 16; +mod iter; +pub(crate) use iter::OpTreeIter; + #[derive(Debug, Clone, PartialEq)] pub(crate) struct OpTree { pub(crate) internal: OpTreeInternal, @@ -34,6 +37,10 @@ impl OpTree { parent: None, } } + + pub(crate) fn iter(&self) -> OpTreeIter<'_> { + self.internal.iter() + } } #[derive(Clone, Debug)] @@ -105,11 +112,8 @@ impl OpTreeInternal { } /// Create an iterator through the sequence. - pub(crate) fn iter(&self) -> Iter<'_> { - Iter { - inner: self, - index: 0, - } + pub(crate) fn iter(&self) -> OpTreeIter<'_> { + iter::OpTreeIter::new(self) } /// Insert the `element` into the sequence at `index`. diff --git a/automerge/src/op_tree/iter.rs b/automerge/src/op_tree/iter.rs new file mode 100644 index 00000000..48406444 --- /dev/null +++ b/automerge/src/op_tree/iter.rs @@ -0,0 +1,230 @@ +use crate::types::Op; + +use super::{OpTreeInternal, OpTreeNode}; + +#[derive(Clone)] +pub(crate) enum OpTreeIter<'a> { + Empty, + NonEmpty { + // A stack of (OpTreeNode, index) where `index` is the index in the elements of the optree node + // at which we descended into a child + ancestors: Vec<(&'a OpTreeNode, usize)>, + current: &'a OpTreeNode, + index: usize, + tree: &'a OpTreeInternal, + }, +} + +impl<'a> OpTreeIter<'a> { + pub(crate) fn new(tree: &'a OpTreeInternal) -> OpTreeIter<'a> { + tree.root_node + .as_ref() + .map(|root| OpTreeIter::NonEmpty { + // This is a guess at the average depth of an OpTree + ancestors: Vec::with_capacity(6), + current: root, + index: 0, + tree, + }) + .unwrap_or(OpTreeIter::Empty) + } +} + +impl<'a> Iterator for OpTreeIter<'a> { + type Item = &'a Op; + + fn next(&mut self) -> Option { + match self { + OpTreeIter::Empty => None, + OpTreeIter::NonEmpty { + ancestors, + current, + index, + .. + } => { + if current.is_leaf() { + // If we're in a leaf node and we haven't exhausted it yet we just return the elements + // of the leaf node + if *index < current.len() { + let result = ¤t.elements[*index]; + *index += 1; + Some(result) + } else { + // We've exhausted the leaf node, we must find the nearest non-exhausted parent (lol) + let (parent, parent_index) = loop { + if let Some((parent, parent_index)) = ancestors.pop() { + // We've exhausted this parent + if parent_index >= parent.elements.len() { + continue; + } else { + // This parent still has elements to process, let's use it! + break (parent, parent_index); + } + } else { + // No parents left, we're done + return None; + } + }; + // if we've finished the elements in a leaf node and there's a parent node then we + // return the element from the parent node which is one after the index at which we + // descended into the child + *index = parent_index + 1; + *current = parent; + let result = ¤t.elements[parent_index]; + Some(result) + } + } else { + // If we're in a non-leaf node then the last iteration returned an element from the + // current nodes `elements`, so we must now descend into a leaf child + ancestors.push((current, *index)); + loop { + let child = ¤t.children[*index]; + *index = 0; + if !child.is_leaf() { + ancestors.push((child, 0)); + *current = child + } else { + *current = child; + break; + } + } + self.next() + } + } + } + } + + fn nth(&mut self, n: usize) -> Option { + match self { + Self::Empty => None, + Self::NonEmpty { tree, .. } => tree.get(n), + } + } +} + +#[cfg(test)] +mod tests { + use super::super::OpTreeInternal; + use crate::types::{Key, Op, OpId, OpType, ScalarValue}; + use proptest::prelude::*; + + #[derive(Debug, Clone)] + enum Action { + Insert(usize, Op), + Delete(usize), + } + + fn op(counter: u64) -> Op { + Op { + action: OpType::Put(ScalarValue::Uint(counter)), + id: OpId(counter, 0), + key: Key::Map(0), + succ: Vec::new(), + pred: Vec::new(), + insert: false, + } + } + + /// A model for a property based test of the OpTreeIter. We generate a set of actions, each + /// action pertaining to a `model` - which is just a `Vec`. As we generate each action we + /// apply it to the model and record the action we took. In the property test we replay the + /// same actions against an `OpTree` and check that the iterator returns the same result as the + /// `model`. + #[derive(Debug, Clone)] + struct Model { + actions: Vec, + model: Vec, + } + + impl Model { + fn insert(&self, index: usize, next_op_counter: u64) -> Self { + let mut actions = self.actions.clone(); + let op = op(next_op_counter); + actions.push(Action::Insert(index, op.clone())); + let mut model = self.model.clone(); + model.insert(index, op); + Self { actions, model } + } + + fn delete(&self, index: usize) -> Self { + let mut actions = self.actions.clone(); + actions.push(Action::Delete(index)); + let mut model = self.model.clone(); + model.remove(index); + Self { actions, model } + } + + fn next(self, next_op_counter: u64) -> impl Strategy { + if self.model.is_empty() { + Just(self.insert(0, next_op_counter)).boxed() + } else { + // Note that we have to feed `self` through the `prop_flat_map` using `Just` to + // appease the borrow checker, this is annoying because it does obscure the meaning + // of the code heere which is basically "decide whether the next action should be + // insert, if it is insert choose an index between 0..model.len() + 1 and generate + // an op to insert, otherwise choose an index between 0..model.len() and generate a + // delete action". + // + // 95% chance of inserting to make sure we deal with large lists + (proptest::bool::weighted(0.95), Just(self)) + .prop_flat_map(move |(insert, model)| { + if insert { + (0..model.model.len() + 1, Just(model)) + .prop_map(move |(index, model)| { + model.insert(index, next_op_counter) + }) + .boxed() + } else { + ((0..model.model.len()), Just(model)) + .prop_map(move |(index, model)| model.delete(index)) + .boxed() + } + }) + .boxed() + } + } + } + + fn scenario() -> impl Strategy { + (0_u64..150).prop_flat_map(|num_steps| { + let mut strat = Just(( + 0, + Model { + actions: Vec::new(), + model: Vec::new(), + }, + )) + .boxed(); + for _ in 0..num_steps { + strat = strat + // Note the counter, which we feed through each `prop_flat_map`, incrementing + // it by one each time. This mean that the generated ops have ascending (but + // not necessarily consecutive because not every `Action` is an `Insert`) + // counters. This makes it easier to debug failures - if we just used a random + // counter it would be much harder to see where things are out of order. + .prop_flat_map(|(counter, model)| { + let next_counter = counter + 1; + model.next(counter).prop_map(move |m| (next_counter, m)) + }) + .boxed(); + } + strat.prop_map(|(_, model)| model) + }) + } + + proptest! { + #[test] + fn optree_iter_proptest(Model{actions, model} in scenario()) { + let mut optree = OpTreeInternal::new(); + for action in actions { + match action { + Action::Insert(index, op) => optree.insert(index, op), + Action::Delete(index) => { optree.remove(index); }, + } + } + let iter = super::OpTreeIter::new(&optree); + let iterated = iter.cloned().collect::>(); + assert_eq!(model, iterated) + } + } +} From 0d3eb07f3fc3f377857bc005617ab2383892e566 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Mon, 2 May 2022 13:30:59 -0400 Subject: [PATCH 331/730] fix key/elemid bug and rename range to map_range --- automerge/src/automerge.rs | 13 +++++++++++++ automerge/src/op_set.rs | 4 ++-- automerge/src/op_tree.rs | 8 ++++---- automerge/src/query.rs | 8 ++++---- automerge/src/query/list_range.rs | 10 +++++----- automerge/src/query/list_range_at.rs | 10 +++++----- automerge/src/query/{range.rs => map_range.rs} | 8 ++++---- .../src/query/{range_at.rs => map_range_at.rs} | 8 ++++---- automerge/src/range.rs | 4 ++-- automerge/src/range_at.rs | 4 ++-- 10 files changed, 45 insertions(+), 32 deletions(-) rename automerge/src/query/{range.rs => map_range.rs} (90%) rename automerge/src/query/{range_at.rs => map_range_at.rs} (91%) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index abeafa24..298228b7 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -1546,6 +1546,19 @@ mod tests { ); } + #[test] + fn insert_at_index() { + let mut doc = AutoCommit::new(); + + let list = &doc.put_object(ROOT, "list", ObjType::List).unwrap(); + doc.insert(list, 0, 0).unwrap(); + doc.insert(list, 0, 1).unwrap(); // both inserts at the same index + + assert_eq!(doc.length(list), 2); + assert_eq!(doc.keys(list).count(), 2); + assert_eq!(doc.values(list).count(), 2); // it's just 1! + } + #[test] fn get_list_values() -> Result<(), AutomergeError> { let mut doc1 = Automerge::new(); diff --git a/automerge/src/op_set.rs b/automerge/src/op_set.rs index 4c2065ed..797fdede 100644 --- a/automerge/src/op_set.rs +++ b/automerge/src/op_set.rs @@ -81,7 +81,7 @@ impl OpSetInternal { &self, obj: ObjId, range: R, - ) -> Option> { + ) -> Option> { if let Some(tree) = self.trees.get(&obj) { tree.internal.range(range, &self.m) } else { @@ -94,7 +94,7 @@ impl OpSetInternal { obj: ObjId, range: R, clock: Clock, - ) -> Option> { + ) -> Option> { if let Some(tree) = self.trees.get(&obj) { tree.internal.range_at(range, &self.m, clock) } else { diff --git a/automerge/src/op_tree.rs b/automerge/src/op_tree.rs index 220b3b86..14e81e5a 100644 --- a/automerge/src/op_tree.rs +++ b/automerge/src/op_tree.rs @@ -74,10 +74,10 @@ impl OpTreeInternal { &'a self, range: R, meta: &'a OpSetMetadata, - ) -> Option> { + ) -> Option> { self.root_node .as_ref() - .map(|node| query::Range::new(range, node, meta)) + .map(|node| query::MapRange::new(range, node, meta)) } pub(crate) fn range_at<'a, R: RangeBounds>( @@ -85,10 +85,10 @@ impl OpTreeInternal { range: R, meta: &'a OpSetMetadata, clock: Clock, - ) -> Option> { + ) -> Option> { self.root_node .as_ref() - .map(|node| query::RangeAt::new(range, node, meta, clock)) + .map(|node| query::MapRangeAt::new(range, node, meta, clock)) } pub(crate) fn list_range>( diff --git a/automerge/src/query.rs b/automerge/src/query.rs index 124e1232..fe6c8848 100644 --- a/automerge/src/query.rs +++ b/automerge/src/query.rs @@ -20,8 +20,8 @@ mod nth_at; mod opid; mod prop; mod prop_at; -mod range; -mod range_at; +mod map_range; +mod map_range_at; mod seek_op; mod seek_op_with_patch; @@ -40,8 +40,8 @@ pub(crate) use nth_at::NthAt; pub(crate) use opid::OpIdSearch; pub(crate) use prop::Prop; pub(crate) use prop_at::PropAt; -pub(crate) use range::Range; -pub(crate) use range_at::RangeAt; +pub(crate) use map_range::MapRange; +pub(crate) use map_range_at::MapRangeAt; pub(crate) use seek_op::SeekOp; pub(crate) use seek_op_with_patch::SeekOpWithPatch; diff --git a/automerge/src/query/list_range.rs b/automerge/src/query/list_range.rs index 62031cf2..261e09a2 100644 --- a/automerge/src/query/list_range.rs +++ b/automerge/src/query/list_range.rs @@ -1,5 +1,5 @@ use crate::op_tree::OpTreeNode; -use crate::types::{Key, OpId}; +use crate::types::{ElemId, OpId}; use crate::Value; use std::fmt::Debug; use std::ops::RangeBounds; @@ -9,7 +9,7 @@ pub(crate) struct ListRange<'a, R: RangeBounds> { range: R, index: usize, pos: usize, - last_key: Option, + last_elemid: Option, next_result: Option<(usize, Value<'a>, OpId)>, index_back: usize, root_child: &'a OpTreeNode, @@ -21,7 +21,7 @@ impl<'a, R: RangeBounds> ListRange<'a, R> { range, index: 0, // FIXME root_child.seek_to_pos(range.start) pos: 0, // FIXME range.start - last_key: None, + last_elemid: None, next_result: None, index_back: root_child.len(), root_child, @@ -41,8 +41,8 @@ impl<'a, R: RangeBounds> Iterator for ListRange<'a, R> { if self.range.contains(&self.pos) { result = self.next_result.replace((self.pos, op.value(), op.id)); } - if Some(op.key) != self.last_key { - self.last_key = Some(op.key); + if op.elemid() != self.last_elemid { + self.last_elemid = op.elemid(); self.pos += 1; if result.is_some() { return result; diff --git a/automerge/src/query/list_range_at.rs b/automerge/src/query/list_range_at.rs index e6d6f4bc..a5c37167 100644 --- a/automerge/src/query/list_range_at.rs +++ b/automerge/src/query/list_range_at.rs @@ -1,6 +1,6 @@ use super::VisWindow; use crate::op_tree::OpTreeNode; -use crate::types::{Clock, Key, OpId}; +use crate::types::{Clock, ElemId, OpId}; use crate::Value; use std::fmt::Debug; use std::ops::RangeBounds; @@ -10,7 +10,7 @@ pub(crate) struct ListRangeAt<'a, R: RangeBounds> { range: R, index: usize, pos: usize, - last_key: Option, + last_elemid: Option, next_result: Option<(usize, Value<'a>, OpId)>, index_back: usize, root_child: &'a OpTreeNode, @@ -24,7 +24,7 @@ impl<'a, R: RangeBounds> ListRangeAt<'a, R> { range, index: 0, // FIXME root_child.seek_to_pos(range.start) pos: 0, // FIXME range.start - last_key: None, + last_elemid: None, next_result: None, index_back: root_child.len(), root_child, @@ -48,8 +48,8 @@ impl<'a, R: RangeBounds> Iterator for ListRangeAt<'a, R> { if self.range.contains(&self.pos) { result = self.next_result.replace((self.pos, op.value(), op.id)); } - if Some(op.key) != self.last_key { - self.last_key = Some(op.key); + if op.elemid() != self.last_elemid { + self.last_elemid = op.elemid(); self.pos += 1; if result.is_some() { return result; diff --git a/automerge/src/query/range.rs b/automerge/src/query/map_range.rs similarity index 90% rename from automerge/src/query/range.rs rename to automerge/src/query/map_range.rs index 6f0fe094..b611951d 100644 --- a/automerge/src/query/range.rs +++ b/automerge/src/query/map_range.rs @@ -5,7 +5,7 @@ use std::fmt::Debug; use std::ops::RangeBounds; #[derive(Debug)] -pub(crate) struct Range<'a, R: RangeBounds> { +pub(crate) struct MapRange<'a, R: RangeBounds> { range: R, index: usize, last_key: Option, @@ -16,7 +16,7 @@ pub(crate) struct Range<'a, R: RangeBounds> { meta: &'a OpSetMetadata, } -impl<'a, R: RangeBounds> Range<'a, R> { +impl<'a, R: RangeBounds> MapRange<'a, R> { pub(crate) fn new(range: R, root_child: &'a OpTreeNode, meta: &'a OpSetMetadata) -> Self { Self { range, @@ -31,7 +31,7 @@ impl<'a, R: RangeBounds> Range<'a, R> { } } -impl<'a, R: RangeBounds> Iterator for Range<'a, R> { +impl<'a, R: RangeBounds> Iterator for MapRange<'a, R> { type Item = (&'a str, Value<'a>, OpId); fn next(&mut self) -> Option { @@ -58,7 +58,7 @@ impl<'a, R: RangeBounds> Iterator for Range<'a, R> { } } -impl<'a, R: RangeBounds> DoubleEndedIterator for Range<'a, R> { +impl<'a, R: RangeBounds> DoubleEndedIterator for MapRange<'a, R> { fn next_back(&mut self) -> Option { for i in (self.index..self.index_back).rev() { let op = self.root_child.get(i)?; diff --git a/automerge/src/query/range_at.rs b/automerge/src/query/map_range_at.rs similarity index 91% rename from automerge/src/query/range_at.rs rename to automerge/src/query/map_range_at.rs index a8d0d959..4f10b217 100644 --- a/automerge/src/query/range_at.rs +++ b/automerge/src/query/map_range_at.rs @@ -8,7 +8,7 @@ use std::ops::RangeBounds; use super::VisWindow; #[derive(Debug)] -pub(crate) struct RangeAt<'a, R: RangeBounds> { +pub(crate) struct MapRangeAt<'a, R: RangeBounds> { clock: Clock, window: VisWindow, @@ -24,7 +24,7 @@ pub(crate) struct RangeAt<'a, R: RangeBounds> { meta: &'a OpSetMetadata, } -impl<'a, R: RangeBounds> RangeAt<'a, R> { +impl<'a, R: RangeBounds> MapRangeAt<'a, R> { pub(crate) fn new( range: R, root_child: &'a OpTreeNode, @@ -46,7 +46,7 @@ impl<'a, R: RangeBounds> RangeAt<'a, R> { } } -impl<'a, R: RangeBounds> Iterator for RangeAt<'a, R> { +impl<'a, R: RangeBounds> Iterator for MapRangeAt<'a, R> { type Item = (&'a str, Value<'a>, OpId); fn next(&mut self) -> Option { @@ -74,7 +74,7 @@ impl<'a, R: RangeBounds> Iterator for RangeAt<'a, R> { } } -impl<'a, R: RangeBounds> DoubleEndedIterator for RangeAt<'a, R> { +impl<'a, R: RangeBounds> DoubleEndedIterator for MapRangeAt<'a, R> { fn next_back(&mut self) -> Option { for i in (self.index..self.index_back).rev() { let op = self.root_child.get(i)?; diff --git a/automerge/src/range.rs b/automerge/src/range.rs index 0bfca1ea..a20c1cf9 100644 --- a/automerge/src/range.rs +++ b/automerge/src/range.rs @@ -5,12 +5,12 @@ use crate::{query, Automerge}; #[derive(Debug)] pub struct Range<'a, R: RangeBounds> { - range: Option>, + range: Option>, doc: &'a Automerge, } impl<'a, R: RangeBounds> Range<'a, R> { - pub(crate) fn new(doc: &'a Automerge, range: Option>) -> Self { + pub(crate) fn new(doc: &'a Automerge, range: Option>) -> Self { Self { range, doc } } } diff --git a/automerge/src/range_at.rs b/automerge/src/range_at.rs index 321972a8..1d802a78 100644 --- a/automerge/src/range_at.rs +++ b/automerge/src/range_at.rs @@ -5,12 +5,12 @@ use crate::{query, Automerge}; #[derive(Debug)] pub struct RangeAt<'a, R: RangeBounds> { - range: Option>, + range: Option>, doc: &'a Automerge, } impl<'a, R: RangeBounds> RangeAt<'a, R> { - pub(crate) fn new(doc: &'a Automerge, range: Option>) -> Self { + pub(crate) fn new(doc: &'a Automerge, range: Option>) -> Self { Self { range, doc } } } From bcdc8a27520882bb48f5a4b1bc9eaccc7013bfab Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Mon, 2 May 2022 13:32:59 -0400 Subject: [PATCH 332/730] fmt --- automerge/src/query.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/automerge/src/query.rs b/automerge/src/query.rs index fe6c8848..2160d316 100644 --- a/automerge/src/query.rs +++ b/automerge/src/query.rs @@ -15,13 +15,13 @@ mod list_range; mod list_range_at; mod list_vals; mod list_vals_at; +mod map_range; +mod map_range_at; mod nth; mod nth_at; mod opid; mod prop; mod prop_at; -mod map_range; -mod map_range_at; mod seek_op; mod seek_op_with_patch; @@ -35,13 +35,13 @@ pub(crate) use list_range::ListRange; pub(crate) use list_range_at::ListRangeAt; pub(crate) use list_vals::ListVals; pub(crate) use list_vals_at::ListValsAt; +pub(crate) use map_range::MapRange; +pub(crate) use map_range_at::MapRangeAt; pub(crate) use nth::Nth; pub(crate) use nth_at::NthAt; pub(crate) use opid::OpIdSearch; pub(crate) use prop::Prop; pub(crate) use prop_at::PropAt; -pub(crate) use map_range::MapRange; -pub(crate) use map_range_at::MapRangeAt; pub(crate) use seek_op::SeekOp; pub(crate) use seek_op_with_patch::SeekOpWithPatch; From 3ec1127b507a0d9324eafa5edf07ec79ed659142 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Mon, 2 May 2022 21:05:10 +0100 Subject: [PATCH 333/730] Try 1.57.0 as msrv --- .github/workflows/ci.yaml | 21 ++++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 08133091..2ddae3e0 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -14,7 +14,8 @@ jobs: - uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: stable + toolchain: 1.57.0 + default: true components: rustfmt - uses: Swatinem/rust-cache@v1 - run: ./scripts/ci/fmt @@ -27,7 +28,8 @@ jobs: - uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: stable + toolchain: 1.57.0 + default: true components: clippy - uses: Swatinem/rust-cache@v1 - run: ./scripts/ci/lint @@ -40,7 +42,8 @@ jobs: - uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: stable + toolchain: 1.57.0 + default: true - uses: Swatinem/rust-cache@v1 - run: ./scripts/ci/docs shell: bash @@ -84,7 +87,8 @@ jobs: - uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: stable + toolchain: 1.57.0 + default: true - uses: Swatinem/rust-cache@v1 - name: Install CMocka run: sudo apt-get install -y libcmocka-dev @@ -101,7 +105,7 @@ jobs: strategy: matrix: toolchain: - - stable + - 1.57.0 - nightly continue-on-error: ${{ matrix.toolchain == 'nightly' }} steps: @@ -110,6 +114,7 @@ jobs: with: profile: minimal toolchain: ${{ matrix.toolchain }} + default: true - uses: Swatinem/rust-cache@v1 - run: ./scripts/ci/build-test shell: bash @@ -121,7 +126,8 @@ jobs: - uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: stable + toolchain: 1.57.0 + default: true - uses: Swatinem/rust-cache@v1 - run: ./scripts/ci/build-test shell: bash @@ -133,7 +139,8 @@ jobs: - uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: stable + toolchain: 1.57.0 + default: true - uses: Swatinem/rust-cache@v1 - run: ./scripts/ci/build-test shell: bash From 0aab13a99063d4ae0211f4cced6abe94edc184aa Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Mon, 2 May 2022 21:06:43 +0100 Subject: [PATCH 334/730] Set rust-version in cargo.tomls --- automerge-c/Cargo.toml | 1 + automerge-cli/Cargo.toml | 1 + automerge-wasm/Cargo.toml | 1 + automerge/Cargo.toml | 3 +-- edit-trace/Cargo.toml | 3 +-- 5 files changed, 5 insertions(+), 4 deletions(-) diff --git a/automerge-c/Cargo.toml b/automerge-c/Cargo.toml index ed6c846f..851a3470 100644 --- a/automerge-c/Cargo.toml +++ b/automerge-c/Cargo.toml @@ -4,6 +4,7 @@ version = "0.1.0" authors = ["Orion Henry ", "Jason Kankiewicz "] edition = "2021" license = "MIT" +rust-version = "1.57.0" [lib] name = "automerge" diff --git a/automerge-cli/Cargo.toml b/automerge-cli/Cargo.toml index 38dec0e6..f434bc69 100644 --- a/automerge-cli/Cargo.toml +++ b/automerge-cli/Cargo.toml @@ -4,6 +4,7 @@ version = "0.1.0" authors = ["Alex Good "] edition = "2018" license = "MIT" +rust-version = "1.57.0" [[bin]] name = "automerge" diff --git a/automerge-wasm/Cargo.toml b/automerge-wasm/Cargo.toml index 36b5c3b5..f7668bfa 100644 --- a/automerge-wasm/Cargo.toml +++ b/automerge-wasm/Cargo.toml @@ -9,6 +9,7 @@ categories = ["wasm"] readme = "README.md" edition = "2021" license = "MIT" +rust-version = "1.57.0" [lib] crate-type = ["cdylib","rlib"] diff --git a/automerge/Cargo.toml b/automerge/Cargo.toml index ae95fa4e..e07a4538 100644 --- a/automerge/Cargo.toml +++ b/automerge/Cargo.toml @@ -5,8 +5,7 @@ edition = "2021" license = "MIT" repository = "https://github.com/automerge/automerge-rs" documentation = "https://automerge.org/automerge-rs/automerge/" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html +rust-version = "1.57.0" [features] optree-visualisation = ["dot"] diff --git a/edit-trace/Cargo.toml b/edit-trace/Cargo.toml index ce54bed5..217e686e 100644 --- a/edit-trace/Cargo.toml +++ b/edit-trace/Cargo.toml @@ -3,8 +3,7 @@ name = "edit-trace" version = "0.1.0" edition = "2021" license = "MIT" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html +rust-version = "1.57.0" [dependencies] automerge = { path = "../automerge" } From a728b8216b8d202b49f36ff9d0d195e1c4a43e74 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Tue, 3 May 2022 19:27:10 -0400 Subject: [PATCH 335/730] range -> map_range(), added list_range() values() works on both --- automerge/src/autocommit.rs | 39 +++- automerge/src/automerge.rs | 197 +++++++++++++----- automerge/src/lib.rs | 14 +- automerge/src/list_range.rs | 27 +++ automerge/src/list_range_at.rs | 27 +++ automerge/src/{range.rs => map_range.rs} | 8 +- .../src/{range_at.rs => map_range_at.rs} | 8 +- automerge/src/op_set.rs | 4 +- automerge/src/query/list_range.rs | 23 +- automerge/src/query/list_range_at.rs | 24 ++- automerge/src/query/map_range.rs | 10 +- automerge/src/query/map_range_at.rs | 10 +- .../src/transaction/manual_transaction.rs | 37 +++- automerge/src/transaction/transactable.rs | 29 ++- automerge/src/values.rs | 51 +++-- automerge/src/values_at.rs | 30 --- 16 files changed, 387 insertions(+), 151 deletions(-) create mode 100644 automerge/src/list_range.rs create mode 100644 automerge/src/list_range_at.rs rename automerge/src/{range.rs => map_range.rs} (75%) rename automerge/src/{range_at.rs => map_range_at.rs} (75%) delete mode 100644 automerge/src/values_at.rs diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index 27b4e9af..b0b56709 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -4,12 +4,12 @@ use crate::exid::ExId; use crate::op_observer::OpObserver; use crate::transaction::{CommitOptions, Transactable}; use crate::{ - sync, ApplyOptions, Keys, KeysAt, ObjType, Parents, Range, RangeAt, ScalarValue, Values, - ValuesAt, + sync, ApplyOptions, Keys, KeysAt, ListRange, ListRangeAt, MapRange, MapRangeAt, ObjType, + Parents, ScalarValue, }; use crate::{ transaction::TransactionInner, ActorId, Automerge, AutomergeError, Change, ChangeHash, Prop, - Value, + Value, Values, }; /// An automerge document that automatically manages transactions. @@ -293,24 +293,45 @@ impl Transactable for AutoCommit { self.doc.keys_at(obj, heads) } - fn range, R: RangeBounds>(&self, obj: O, range: R) -> Range<'_, R> { - self.doc.range(obj, range) + fn map_range, R: RangeBounds>( + &self, + obj: O, + range: R, + ) -> MapRange<'_, R> { + self.doc.map_range(obj, range) } - fn range_at, R: RangeBounds>( + fn map_range_at, R: RangeBounds>( &self, obj: O, range: R, heads: &[ChangeHash], - ) -> RangeAt<'_, R> { - self.doc.range_at(obj, range, heads) + ) -> MapRangeAt<'_, R> { + self.doc.map_range_at(obj, range, heads) + } + + fn list_range, R: RangeBounds>( + &self, + obj: O, + range: R, + ) -> ListRange<'_, R> { + self.doc.list_range(obj, range) + } + + fn list_range_at, R: RangeBounds>( + &self, + obj: O, + range: R, + heads: &[ChangeHash], + ) -> ListRangeAt<'_, R> { + self.doc.list_range_at(obj, range, heads) } fn values>(&self, obj: O) -> Values<'_> { self.doc.values(obj) } - fn values_at>(&self, obj: O, heads: &[ChangeHash]) -> ValuesAt<'_> { + fn values_at>(&self, obj: O, heads: &[ChangeHash]) -> Values<'_> { self.doc.values_at(obj, heads) } diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 298228b7..f62ce9bb 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -9,15 +9,17 @@ use crate::keys::Keys; use crate::op_observer::OpObserver; use crate::op_set::OpSet; use crate::parents::Parents; -use crate::range::Range; use crate::transaction::{self, CommitOptions, Failure, Success, Transaction, TransactionInner}; use crate::types::{ ActorId, ChangeHash, Clock, ElemId, Export, Exportable, Key, ObjId, Op, OpId, OpType, ScalarValue, Value, }; -use crate::{legacy, query, types, ApplyOptions, ObjType, RangeAt, ValuesAt}; +use crate::KeysAt; +use crate::{ + legacy, query, types, ApplyOptions, ListRange, ListRangeAt, MapRange, MapRangeAt, ObjType, + Values, +}; use crate::{AutomergeError, Change, Prop}; -use crate::{KeysAt, Values}; use serde::Serialize; #[derive(Debug, Clone, PartialEq)] @@ -279,52 +281,91 @@ impl Automerge { } /// Iterate over the keys and values of the map `obj` in the given range. - pub fn range, R: RangeBounds>(&self, obj: O, range: R) -> Range<'_, R> { + pub fn map_range, R: RangeBounds>( + &self, + obj: O, + range: R, + ) -> MapRange<'_, R> { if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { - let iter_range = self.ops.range(obj, range); - Range::new(self, iter_range) + MapRange::new(self, self.ops.map_range(obj, range)) } else { - Range::new(self, None) + MapRange::new(self, None) } } - /// Historical version of [`range`](Self::range). - pub fn range_at, R: RangeBounds>( + /// Historical version of [`map_range`](Self::map_range). + pub fn map_range_at, R: RangeBounds>( &self, obj: O, range: R, heads: &[ChangeHash], - ) -> RangeAt<'_, R> { + ) -> MapRangeAt<'_, R> { if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { let clock = self.clock_at(heads); - let iter_range = self.ops.range_at(obj, range, clock); - RangeAt::new(self, iter_range) + let iter_range = self.ops.map_range_at(obj, range, clock); + MapRangeAt::new(self, iter_range) } else { - RangeAt::new(self, None) + MapRangeAt::new(self, None) + } + } + + /// Iterate over the indexes and values of the list `obj` in the given range. + pub fn list_range, R: RangeBounds>( + &self, + obj: O, + range: R, + ) -> ListRange<'_, R> { + if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { + ListRange::new(self, self.ops.list_range(obj, range)) + } else { + ListRange::new(self, None) + } + } + + /// Historical version of [`range`](Self::list_range_at). + pub fn list_range_at, R: RangeBounds>( + &self, + obj: O, + range: R, + heads: &[ChangeHash], + ) -> ListRangeAt<'_, R> { + if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { + let clock = self.clock_at(heads); + let iter_range = self.ops.list_range_at(obj, range, clock); + ListRangeAt::new(self, iter_range) + } else { + ListRangeAt::new(self, None) } } - /// Iterate over all the keys and values of the object `obj`. - /// - /// For a map the keys are the keys of the map. - /// For a list the keys are the element ids (opids) encoded as strings. pub fn values>(&self, obj: O) -> Values<'_> { if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { - let iter_range = self.ops.list_range(obj, ..); - Values::new(self, iter_range) + match self.ops.object_type(&obj) { + Some(t) if t.is_sequence() => Values::new(self, self.ops.list_range(obj, ..)), + Some(_) => Values::new(self, self.ops.map_range(obj, ..)), + None => Values::empty(self), + } } else { - Values::new(self, None) + Values::empty(self) } } - /// Historical version of [`values`](Self::values). - pub fn values_at>(&self, obj: O, heads: &[ChangeHash]) -> ValuesAt<'_> { + pub fn values_at>(&self, obj: O, heads: &[ChangeHash]) -> Values<'_> { if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { let clock = self.clock_at(heads); - let iter_range = self.ops.list_range_at(obj, .., clock); - ValuesAt::new(self, iter_range) + match self.ops.object_type(&obj) { + Some(ObjType::Map) | Some(ObjType::Table) => { + let iter_range = self.ops.map_range_at(obj, .., clock); + Values::new(self, iter_range) + } + Some(ObjType::List) | Some(ObjType::Text) => { + let iter_range = self.ops.list_range_at(obj, .., clock); + Values::new(self, iter_range) + } + None => Values::empty(self), + } } else { - ValuesAt::new(self, None) + Values::empty(self) } } @@ -1344,8 +1385,6 @@ mod tests { assert!(doc.get_at(&list, 0, &heads2)?.unwrap().0 == Value::int(10)); assert!(doc.length_at(&list, &heads3) == 2); - //doc.dump(); - log!("{:?}", doc.get_at(&list, 0, &heads3)?.unwrap().0); assert!(doc.get_at(&list, 0, &heads3)?.unwrap().0 == Value::int(30)); assert!(doc.get_at(&list, 1, &heads3)?.unwrap().0 == Value::int(20)); @@ -1491,9 +1530,9 @@ mod tests { tx.put(ROOT, "d", 9).unwrap(); tx.commit(); let actor = doc.get_actor(); - assert_eq!(doc.range(ROOT, ..).count(), 4); + assert_eq!(doc.map_range(ROOT, ..).count(), 4); - let mut range = doc.range(ROOT, "b".to_owned().."d".into()); + let mut range = doc.map_range(ROOT, "b".to_owned().."d".into()); assert_eq!( range.next(), Some(("b", 4.into(), ExId::Id(2, actor.clone(), 0))) @@ -1504,7 +1543,7 @@ mod tests { ); assert_eq!(range.next(), None); - let mut range = doc.range(ROOT, "b".to_owned()..="d".into()); + let mut range = doc.map_range(ROOT, "b".to_owned()..="d".into()); assert_eq!( range.next(), Some(("b", 4.into(), ExId::Id(2, actor.clone(), 0))) @@ -1519,7 +1558,7 @@ mod tests { ); assert_eq!(range.next(), None); - let mut range = doc.range(ROOT, ..="c".to_owned()); + let mut range = doc.map_range(ROOT, ..="c".to_owned()); assert_eq!( range.next(), Some(("a", 8.into(), ExId::Id(6, actor.clone(), 0))) @@ -1534,7 +1573,7 @@ mod tests { ); assert_eq!(range.next(), None); - let range = doc.range(ROOT, "a".to_owned()..); + let range = doc.map_range(ROOT, "a".to_owned()..); assert_eq!( range.collect::>(), vec![ @@ -1556,7 +1595,7 @@ mod tests { assert_eq!(doc.length(list), 2); assert_eq!(doc.keys(list).count(), 2); - assert_eq!(doc.values(list).count(), 2); // it's just 1! + assert_eq!(doc.list_range(list, ..).count(), 2); } #[test] @@ -1569,6 +1608,11 @@ mod tests { tx.insert(&list, 0, "First")?; tx.insert(&list, 1, "Second")?; tx.insert(&list, 2, "Third")?; + tx.insert(&list, 3, "Forth")?; + tx.insert(&list, 4, "Fith")?; + tx.insert(&list, 5, "Sixth")?; + tx.insert(&list, 6, "Seventh")?; + tx.insert(&list, 7, "Eights")?; tx.commit(); let v1 = doc1.get_heads(); @@ -1584,19 +1628,52 @@ mod tests { doc1.merge(&mut doc2)?; - assert_eq!(doc1.values(&list).count(), 3); + assert_eq!(doc1.list_range(&list, ..).count(), 8); - for (i, val1) in doc1.values(&list).enumerate() { + for (i, val1, id) in doc1.list_range(&list, ..) { let val2 = doc1.get(&list, i)?; - assert_eq!(Some(val1), val2); + assert_eq!(Some((val1, id)), val2); } - assert_eq!(doc1.values_at(&list, &v1).count(), 3); - for (i, val1) in doc1.values_at(&list, &v1).enumerate() { - let val2 = doc1.get_at(&list, i, &v1)?; - assert_eq!(Some(val1), val2); + assert_eq!(doc1.list_range(&list, 3..6).count(), 3); + assert_eq!(doc1.list_range(&list, 3..6).next().unwrap().0, 3); + assert_eq!(doc1.list_range(&list, 3..6).last().unwrap().0, 5); + + for (i, val1, id) in doc1.list_range(&list, 3..6) { + let val2 = doc1.get(&list, i)?; + assert_eq!(Some((val1, id)), val2); } + assert_eq!(doc1.list_range_at(&list, .., &v1).count(), 8); + for (i, val1, id) in doc1.list_range_at(&list, .., &v1) { + let val2 = doc1.get_at(&list, i, &v1)?; + assert_eq!(Some((val1, id)), val2); + } + + assert_eq!(doc1.list_range_at(&list, 3..6, &v1).count(), 3); + assert_eq!(doc1.list_range_at(&list, 3..6, &v1).next().unwrap().0, 3); + assert_eq!(doc1.list_range_at(&list, 3..6, &v1).last().unwrap().0, 5); + + for (i, val1, id) in doc1.list_range_at(&list, 3..6, &v1) { + let val2 = doc1.get_at(&list, i, &v1)?; + assert_eq!(Some((val1, id)), val2); + } + + let range: Vec<_> = doc1 + .list_range(&list, ..) + .map(|(_, val, id)| (val, id)) + .collect(); + let values = doc1.values(&list); + let values: Vec<_> = values.collect(); + assert_eq!(range, values); + + let range: Vec<_> = doc1 + .list_range_at(&list, .., &v1) + .map(|(_, val, id)| (val, id)) + .collect(); + let values: Vec<_> = doc1.values_at(&list, &v1).collect(); + assert_eq!(range, values); + Ok(()) } @@ -1625,34 +1702,48 @@ mod tests { let range = "b".to_string().."d".to_string(); - assert_eq!(doc1.range(ROOT, range.clone()).count(), 2); + assert_eq!(doc1.map_range(ROOT, range.clone()).count(), 2); - for (key, val1, id) in doc1.range(ROOT, range.clone()) { + for (key, val1, id) in doc1.map_range(ROOT, range.clone()) { let val2 = doc1.get(ROOT, key)?; assert_eq!(Some((val1, id)), val2); } - assert_eq!(doc1.range(ROOT, range.clone()).rev().count(), 2); + assert_eq!(doc1.map_range(ROOT, range.clone()).rev().count(), 2); - for (key, val1, id) in doc1.range(ROOT, range.clone()).rev() { + for (key, val1, id) in doc1.map_range(ROOT, range.clone()).rev() { let val2 = doc1.get(ROOT, key)?; assert_eq!(Some((val1, id)), val2); } - assert_eq!(doc1.range_at(ROOT, range.clone(), &v1).count(), 2); + assert_eq!(doc1.map_range_at(ROOT, range.clone(), &v1).count(), 2); - for (key, val1, id) in doc1.range_at(ROOT, range.clone(), &v1) { + for (key, val1, id) in doc1.map_range_at(ROOT, range.clone(), &v1) { let val2 = doc1.get_at(ROOT, key, &v1)?; assert_eq!(Some((val1, id)), val2); } - assert_eq!(doc1.range_at(ROOT, range.clone(), &v1).rev().count(), 2); + assert_eq!(doc1.map_range_at(ROOT, range.clone(), &v1).rev().count(), 2); - for (key, val1, id) in doc1.range_at(ROOT, range, &v1).rev() { + for (key, val1, id) in doc1.map_range_at(ROOT, range, &v1).rev() { let val2 = doc1.get_at(ROOT, key, &v1)?; assert_eq!(Some((val1, id)), val2); } + let range: Vec<_> = doc1 + .map_range(ROOT, ..) + .map(|(_, val, id)| (val, id)) + .collect(); + let values: Vec<_> = doc1.values(ROOT).collect(); + assert_eq!(range, values); + + let range: Vec<_> = doc1 + .map_range_at(ROOT, .., &v1) + .map(|(_, val, id)| (val, id)) + .collect(); + let values: Vec<_> = doc1.values_at(ROOT, &v1).collect(); + assert_eq!(range, values); + Ok(()) } @@ -1673,9 +1764,9 @@ mod tests { tx.put(ROOT, "d", 9).unwrap(); tx.commit(); let actor = doc.get_actor(); - assert_eq!(doc.range(ROOT, ..).rev().count(), 4); + assert_eq!(doc.map_range(ROOT, ..).rev().count(), 4); - let mut range = doc.range(ROOT, "b".to_owned().."d".into()).rev(); + let mut range = doc.map_range(ROOT, "b".to_owned().."d".into()).rev(); assert_eq!( range.next(), Some(("c", 5.into(), ExId::Id(3, actor.clone(), 0))) @@ -1686,7 +1777,7 @@ mod tests { ); assert_eq!(range.next(), None); - let mut range = doc.range(ROOT, "b".to_owned()..="d".into()).rev(); + let mut range = doc.map_range(ROOT, "b".to_owned()..="d".into()).rev(); assert_eq!( range.next(), Some(("d", 9.into(), ExId::Id(7, actor.clone(), 0))) @@ -1701,7 +1792,7 @@ mod tests { ); assert_eq!(range.next(), None); - let mut range = doc.range(ROOT, ..="c".to_owned()).rev(); + let mut range = doc.map_range(ROOT, ..="c".to_owned()).rev(); assert_eq!( range.next(), Some(("c", 5.into(), ExId::Id(3, actor.clone(), 0))) @@ -1716,7 +1807,7 @@ mod tests { ); assert_eq!(range.next(), None); - let range = doc.range(ROOT, "a".to_owned()..).rev(); + let range = doc.map_range(ROOT, "a".to_owned()..).rev(); assert_eq!( range.collect::>(), vec![ diff --git a/automerge/src/lib.rs b/automerge/src/lib.rs index bb39de25..c011d2de 100644 --- a/automerge/src/lib.rs +++ b/automerge/src/lib.rs @@ -67,20 +67,21 @@ mod indexed_cache; mod keys; mod keys_at; mod legacy; +mod list_range; +mod list_range_at; +mod map_range; +mod map_range_at; mod op_observer; mod op_set; mod op_tree; mod options; mod parents; mod query; -mod range; -mod range_at; pub mod sync; pub mod transaction; mod types; mod value; mod values; -mod values_at; #[cfg(feature = "optree-visualisation")] mod visualisation; @@ -95,16 +96,17 @@ pub use exid::ExId as ObjId; pub use keys::Keys; pub use keys_at::KeysAt; pub use legacy::Change as ExpandedChange; +pub use list_range::ListRange; +pub use list_range_at::ListRangeAt; +pub use map_range::MapRange; +pub use map_range_at::MapRangeAt; pub use op_observer::OpObserver; pub use op_observer::Patch; pub use op_observer::VecOpObserver; pub use options::ApplyOptions; pub use parents::Parents; -pub use range::Range; -pub use range_at::RangeAt; pub use types::{ActorId, ChangeHash, ObjType, OpType, Prop}; pub use value::{ScalarValue, Value}; pub use values::Values; -pub use values_at::ValuesAt; pub const ROOT: ObjId = ObjId::Root; diff --git a/automerge/src/list_range.rs b/automerge/src/list_range.rs new file mode 100644 index 00000000..ae7b2aa5 --- /dev/null +++ b/automerge/src/list_range.rs @@ -0,0 +1,27 @@ +use crate::{exid::ExId, Value}; + +use crate::{query, Automerge}; +use std::ops::RangeBounds; + +#[derive(Debug)] +pub struct ListRange<'a, R: RangeBounds> { + range: Option>, + doc: &'a Automerge, +} + +impl<'a, R: RangeBounds> ListRange<'a, R> { + pub(crate) fn new(doc: &'a Automerge, range: Option>) -> Self { + Self { range, doc } + } +} + +impl<'a, R: RangeBounds> Iterator for ListRange<'a, R> { + type Item = (usize, Value<'a>, ExId); + + fn next(&mut self) -> Option { + self.range + .as_mut()? + .next() + .map(|(idx, value, id)| (idx, value, self.doc.id_to_exid(id))) + } +} diff --git a/automerge/src/list_range_at.rs b/automerge/src/list_range_at.rs new file mode 100644 index 00000000..37db9677 --- /dev/null +++ b/automerge/src/list_range_at.rs @@ -0,0 +1,27 @@ +use crate::{exid::ExId, Value}; +use std::ops::RangeBounds; + +use crate::{query, Automerge}; + +#[derive(Debug)] +pub struct ListRangeAt<'a, R: RangeBounds> { + range: Option>, + doc: &'a Automerge, +} + +impl<'a, R: RangeBounds> ListRangeAt<'a, R> { + pub(crate) fn new(doc: &'a Automerge, range: Option>) -> Self { + Self { range, doc } + } +} + +impl<'a, R: RangeBounds> Iterator for ListRangeAt<'a, R> { + type Item = (usize, Value<'a>, ExId); + + fn next(&mut self) -> Option { + self.range + .as_mut()? + .next() + .map(|(key, value, id)| (key, value, self.doc.id_to_exid(id))) + } +} diff --git a/automerge/src/range.rs b/automerge/src/map_range.rs similarity index 75% rename from automerge/src/range.rs rename to automerge/src/map_range.rs index a20c1cf9..8029b84d 100644 --- a/automerge/src/range.rs +++ b/automerge/src/map_range.rs @@ -4,18 +4,18 @@ use std::ops::RangeBounds; use crate::{query, Automerge}; #[derive(Debug)] -pub struct Range<'a, R: RangeBounds> { +pub struct MapRange<'a, R: RangeBounds> { range: Option>, doc: &'a Automerge, } -impl<'a, R: RangeBounds> Range<'a, R> { +impl<'a, R: RangeBounds> MapRange<'a, R> { pub(crate) fn new(doc: &'a Automerge, range: Option>) -> Self { Self { range, doc } } } -impl<'a, R: RangeBounds> Iterator for Range<'a, R> { +impl<'a, R: RangeBounds> Iterator for MapRange<'a, R> { type Item = (&'a str, Value<'a>, ExId); fn next(&mut self) -> Option { @@ -26,7 +26,7 @@ impl<'a, R: RangeBounds> Iterator for Range<'a, R> { } } -impl<'a, R: RangeBounds> DoubleEndedIterator for Range<'a, R> { +impl<'a, R: RangeBounds> DoubleEndedIterator for MapRange<'a, R> { fn next_back(&mut self) -> Option { self.range .as_mut()? diff --git a/automerge/src/range_at.rs b/automerge/src/map_range_at.rs similarity index 75% rename from automerge/src/range_at.rs rename to automerge/src/map_range_at.rs index 1d802a78..b2eb3fb2 100644 --- a/automerge/src/range_at.rs +++ b/automerge/src/map_range_at.rs @@ -4,18 +4,18 @@ use std::ops::RangeBounds; use crate::{query, Automerge}; #[derive(Debug)] -pub struct RangeAt<'a, R: RangeBounds> { +pub struct MapRangeAt<'a, R: RangeBounds> { range: Option>, doc: &'a Automerge, } -impl<'a, R: RangeBounds> RangeAt<'a, R> { +impl<'a, R: RangeBounds> MapRangeAt<'a, R> { pub(crate) fn new(doc: &'a Automerge, range: Option>) -> Self { Self { range, doc } } } -impl<'a, R: RangeBounds> Iterator for RangeAt<'a, R> { +impl<'a, R: RangeBounds> Iterator for MapRangeAt<'a, R> { type Item = (&'a str, Value<'a>, ExId); fn next(&mut self) -> Option { @@ -26,7 +26,7 @@ impl<'a, R: RangeBounds> Iterator for RangeAt<'a, R> { } } -impl<'a, R: RangeBounds> DoubleEndedIterator for RangeAt<'a, R> { +impl<'a, R: RangeBounds> DoubleEndedIterator for MapRangeAt<'a, R> { fn next_back(&mut self) -> Option { self.range .as_mut()? diff --git a/automerge/src/op_set.rs b/automerge/src/op_set.rs index 797fdede..8462c63e 100644 --- a/automerge/src/op_set.rs +++ b/automerge/src/op_set.rs @@ -77,7 +77,7 @@ impl OpSetInternal { } } - pub(crate) fn range>( + pub(crate) fn map_range>( &self, obj: ObjId, range: R, @@ -89,7 +89,7 @@ impl OpSetInternal { } } - pub(crate) fn range_at>( + pub(crate) fn map_range_at>( &self, obj: ObjId, range: R, diff --git a/automerge/src/query/list_range.rs b/automerge/src/query/list_range.rs index 261e09a2..e663b2e6 100644 --- a/automerge/src/query/list_range.rs +++ b/automerge/src/query/list_range.rs @@ -1,6 +1,8 @@ +use crate::exid::ExId; use crate::op_tree::OpTreeNode; use crate::types::{ElemId, OpId}; -use crate::Value; +use crate::values::ValueIter; +use crate::{Automerge, Value}; use std::fmt::Debug; use std::ops::RangeBounds; @@ -29,24 +31,31 @@ impl<'a, R: RangeBounds> ListRange<'a, R> { } } +impl<'a, R: RangeBounds> ValueIter<'a> for ListRange<'a, R> { + fn next_value(&mut self, doc: &'a Automerge) -> Option<(Value<'a>, ExId)> { + self.next().map(|(_, val, id)| (val, doc.id_to_exid(id))) + } +} + impl<'a, R: RangeBounds> Iterator for ListRange<'a, R> { type Item = (usize, Value<'a>, OpId); fn next(&mut self) -> Option { - let mut result = None; for i in self.index..self.index_back { let op = self.root_child.get(i)?; self.index += 1; if op.visible() { - if self.range.contains(&self.pos) { - result = self.next_result.replace((self.pos, op.value(), op.id)); - } if op.elemid() != self.last_elemid { self.last_elemid = op.elemid(); self.pos += 1; - if result.is_some() { - return result; + if self.range.contains(&(self.pos - 1)) { + let result = self.next_result.replace((self.pos - 1, op.value(), op.id)); + if result.is_some() { + return result; + } } + } else if self.pos > 0 && self.range.contains(&(self.pos - 1)) { + self.next_result = Some((self.pos - 1, op.value(), op.id)); } } } diff --git a/automerge/src/query/list_range_at.rs b/automerge/src/query/list_range_at.rs index a5c37167..5c7257af 100644 --- a/automerge/src/query/list_range_at.rs +++ b/automerge/src/query/list_range_at.rs @@ -1,7 +1,9 @@ use super::VisWindow; +use crate::exid::ExId; use crate::op_tree::OpTreeNode; use crate::types::{Clock, ElemId, OpId}; -use crate::Value; +use crate::values::ValueIter; +use crate::{Automerge, Value}; use std::fmt::Debug; use std::ops::RangeBounds; @@ -18,6 +20,12 @@ pub(crate) struct ListRangeAt<'a, R: RangeBounds> { window: VisWindow, } +impl<'a, R: RangeBounds> ValueIter<'a> for ListRangeAt<'a, R> { + fn next_value(&mut self, doc: &'a Automerge) -> Option<(Value<'a>, ExId)> { + self.next().map(|(_, val, id)| (val, doc.id_to_exid(id))) + } +} + impl<'a, R: RangeBounds> ListRangeAt<'a, R> { pub(crate) fn new(range: R, clock: Clock, root_child: &'a OpTreeNode) -> Self { Self { @@ -38,22 +46,22 @@ impl<'a, R: RangeBounds> Iterator for ListRangeAt<'a, R> { type Item = (usize, Value<'a>, OpId); fn next(&mut self) -> Option { - // FIXME if self.pos > range.end { return None } - let mut result = None; for i in self.index..self.index_back { let op = self.root_child.get(i)?; let visible = self.window.visible_at(op, i, &self.clock); self.index += 1; if visible { - if self.range.contains(&self.pos) { - result = self.next_result.replace((self.pos, op.value(), op.id)); - } if op.elemid() != self.last_elemid { self.last_elemid = op.elemid(); self.pos += 1; - if result.is_some() { - return result; + if self.range.contains(&(self.pos - 1)) { + let result = self.next_result.replace((self.pos - 1, op.value(), op.id)); + if result.is_some() { + return result; + } } + } else if self.pos > 0 && self.range.contains(&(self.pos - 1)) { + self.next_result = Some((self.pos - 1, op.value(), op.id)); } } } diff --git a/automerge/src/query/map_range.rs b/automerge/src/query/map_range.rs index b611951d..c5060e6a 100644 --- a/automerge/src/query/map_range.rs +++ b/automerge/src/query/map_range.rs @@ -1,6 +1,8 @@ +use crate::exid::ExId; use crate::op_tree::{OpSetMetadata, OpTreeNode}; use crate::types::{Key, OpId}; -use crate::Value; +use crate::values::ValueIter; +use crate::{Automerge, Value}; use std::fmt::Debug; use std::ops::RangeBounds; @@ -16,6 +18,12 @@ pub(crate) struct MapRange<'a, R: RangeBounds> { meta: &'a OpSetMetadata, } +impl<'a, R: RangeBounds> ValueIter<'a> for MapRange<'a, R> { + fn next_value(&mut self, doc: &'a Automerge) -> Option<(Value<'a>, ExId)> { + self.next().map(|(_, val, id)| (val, doc.id_to_exid(id))) + } +} + impl<'a, R: RangeBounds> MapRange<'a, R> { pub(crate) fn new(range: R, root_child: &'a OpTreeNode, meta: &'a OpSetMetadata) -> Self { Self { diff --git a/automerge/src/query/map_range_at.rs b/automerge/src/query/map_range_at.rs index 4f10b217..cbd52b29 100644 --- a/automerge/src/query/map_range_at.rs +++ b/automerge/src/query/map_range_at.rs @@ -1,7 +1,9 @@ use crate::clock::Clock; +use crate::exid::ExId; use crate::op_tree::{OpSetMetadata, OpTreeNode}; use crate::types::{Key, OpId}; -use crate::Value; +use crate::values::ValueIter; +use crate::{Automerge, Value}; use std::fmt::Debug; use std::ops::RangeBounds; @@ -24,6 +26,12 @@ pub(crate) struct MapRangeAt<'a, R: RangeBounds> { meta: &'a OpSetMetadata, } +impl<'a, R: RangeBounds> ValueIter<'a> for MapRangeAt<'a, R> { + fn next_value(&mut self, doc: &'a Automerge) -> Option<(Value<'a>, ExId)> { + self.next().map(|(_, val, id)| (val, doc.id_to_exid(id))) + } +} + impl<'a, R: RangeBounds> MapRangeAt<'a, R> { pub(crate) fn new( range: R, diff --git a/automerge/src/transaction/manual_transaction.rs b/automerge/src/transaction/manual_transaction.rs index 949ff437..7be7932e 100644 --- a/automerge/src/transaction/manual_transaction.rs +++ b/automerge/src/transaction/manual_transaction.rs @@ -1,9 +1,9 @@ use std::ops::RangeBounds; use crate::exid::ExId; -use crate::{Automerge, ChangeHash, KeysAt, ObjType, OpObserver, Prop, ScalarValue, Value}; +use crate::{Automerge, ChangeHash, KeysAt, ObjType, OpObserver, Prop, ScalarValue, Value, Values}; use crate::{AutomergeError, Keys}; -use crate::{Range, RangeAt, Values, ValuesAt}; +use crate::{ListRange, ListRangeAt, MapRange, MapRangeAt}; use super::{CommitOptions, Transactable, TransactionInner}; @@ -187,24 +187,45 @@ impl<'a> Transactable for Transaction<'a> { self.doc.keys_at(obj, heads) } - fn range, R: RangeBounds>(&self, obj: O, range: R) -> Range<'_, R> { - self.doc.range(obj, range) + fn map_range, R: RangeBounds>( + &self, + obj: O, + range: R, + ) -> MapRange<'_, R> { + self.doc.map_range(obj, range) } - fn range_at, R: RangeBounds>( + fn map_range_at, R: RangeBounds>( &self, obj: O, range: R, heads: &[ChangeHash], - ) -> RangeAt<'_, R> { - self.doc.range_at(obj, range, heads) + ) -> MapRangeAt<'_, R> { + self.doc.map_range_at(obj, range, heads) + } + + fn list_range, R: RangeBounds>( + &self, + obj: O, + range: R, + ) -> ListRange<'_, R> { + self.doc.list_range(obj, range) + } + + fn list_range_at, R: RangeBounds>( + &self, + obj: O, + range: R, + heads: &[ChangeHash], + ) -> ListRangeAt<'_, R> { + self.doc.list_range_at(obj, range, heads) } fn values>(&self, obj: O) -> Values<'_> { self.doc.values(obj) } - fn values_at>(&self, obj: O, heads: &[ChangeHash]) -> ValuesAt<'_> { + fn values_at>(&self, obj: O, heads: &[ChangeHash]) -> Values<'_> { self.doc.values_at(obj, heads) } diff --git a/automerge/src/transaction/transactable.rs b/automerge/src/transaction/transactable.rs index 44a3e53b..209da3c9 100644 --- a/automerge/src/transaction/transactable.rs +++ b/automerge/src/transaction/transactable.rs @@ -2,8 +2,8 @@ use std::ops::RangeBounds; use crate::exid::ExId; use crate::{ - AutomergeError, ChangeHash, Keys, KeysAt, ObjType, Parents, Prop, Range, RangeAt, ScalarValue, - Value, Values, ValuesAt, + AutomergeError, ChangeHash, Keys, KeysAt, ListRange, ListRangeAt, MapRange, MapRangeAt, + ObjType, Parents, Prop, ScalarValue, Value, Values, }; /// A way of mutating a document within a single change. @@ -102,18 +102,35 @@ pub trait Transactable { /// Get the keys of the given object at a point in history. fn keys_at>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt<'_, '_>; - fn range, R: RangeBounds>(&self, obj: O, range: R) -> Range<'_, R>; + fn map_range, R: RangeBounds>( + &self, + obj: O, + range: R, + ) -> MapRange<'_, R>; - fn range_at, R: RangeBounds>( + fn map_range_at, R: RangeBounds>( &self, obj: O, range: R, heads: &[ChangeHash], - ) -> RangeAt<'_, R>; + ) -> MapRangeAt<'_, R>; + + fn list_range, R: RangeBounds>( + &self, + obj: O, + range: R, + ) -> ListRange<'_, R>; + + fn list_range_at, R: RangeBounds>( + &self, + obj: O, + range: R, + heads: &[ChangeHash], + ) -> ListRangeAt<'_, R>; fn values>(&self, obj: O) -> Values<'_>; - fn values_at>(&self, obj: O, heads: &[ChangeHash]) -> ValuesAt<'_>; + fn values_at>(&self, obj: O, heads: &[ChangeHash]) -> Values<'_>; /// Get the length of the given object. fn length>(&self, obj: O) -> usize; diff --git a/automerge/src/values.rs b/automerge/src/values.rs index eb4a227a..d713d9af 100644 --- a/automerge/src/values.rs +++ b/automerge/src/values.rs @@ -1,17 +1,47 @@ -use crate::{exid::ExId, Value}; -use std::ops::RangeFull; +use crate::exid::ExId; +use crate::{Automerge, Value}; +use std::fmt; -use crate::{query, Automerge}; - -#[derive(Debug)] pub struct Values<'a> { - range: Option>, + range: Box>, doc: &'a Automerge, } +impl<'a> fmt::Debug for Values<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("Values").finish() + } +} + +pub(crate) trait ValueIter<'a> { + fn next_value(&mut self, doc: &'a Automerge) -> Option<(Value<'a>, ExId)>; +} + +pub(crate) struct NoValues {} + +impl<'a> ValueIter<'a> for NoValues { + fn next_value(&mut self, _doc: &'a Automerge) -> Option<(Value<'a>, ExId)> { + None + } +} + impl<'a> Values<'a> { - pub(crate) fn new(doc: &'a Automerge, range: Option>) -> Self { - Self { range, doc } + pub(crate) fn new>(doc: &'a Automerge, range: Option) -> Self { + if let Some(range) = range { + Self { + range: Box::new(range), + doc, + } + } else { + Self::empty(doc) + } + } + + pub(crate) fn empty(doc: &'a Automerge) -> Self { + Self { + range: Box::new(NoValues {}), + doc, + } } } @@ -19,9 +49,6 @@ impl<'a> Iterator for Values<'a> { type Item = (Value<'a>, ExId); fn next(&mut self) -> Option { - self.range - .as_mut()? - .next() - .map(|(_idx, value, id)| (value, self.doc.id_to_exid(id))) + self.range.next_value(self.doc) } } diff --git a/automerge/src/values_at.rs b/automerge/src/values_at.rs deleted file mode 100644 index 07099de3..00000000 --- a/automerge/src/values_at.rs +++ /dev/null @@ -1,30 +0,0 @@ -use crate::{exid::ExId, Value}; -use std::ops::RangeFull; - -use crate::{query, Automerge}; - -#[derive(Debug)] -pub struct ValuesAt<'a> { - range: Option>, - doc: &'a Automerge, -} - -impl<'a> ValuesAt<'a> { - pub(crate) fn new( - doc: &'a Automerge, - range: Option>, - ) -> Self { - Self { range, doc } - } -} - -impl<'a> Iterator for ValuesAt<'a> { - type Item = (Value<'a>, ExId); - - fn next(&mut self) -> Option { - self.range - .as_mut()? - .next() - .map(|(_idx, value, id)| (value, self.doc.id_to_exid(id))) - } -} From bf6ee85c58c1f54a920623b1e40b5e282b09bd13 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Tue, 3 May 2022 22:44:58 -0500 Subject: [PATCH 336/730] Added the `time_t` header. --- automerge-c/cbindgen.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automerge-c/cbindgen.toml b/automerge-c/cbindgen.toml index aad1850d..e1a7b6aa 100644 --- a/automerge-c/cbindgen.toml +++ b/automerge-c/cbindgen.toml @@ -25,7 +25,7 @@ language = "C" line_length = 140 no_includes = true style = "both" -sys_includes = ["stdbool.h", "stddef.h", "stdint.h"] +sys_includes = ["stdbool.h", "stddef.h", "stdint.h", "time.h"] usize_is_size_t = true [enum] From 30b220d9b7dbd222e71732765f7cca256a7c0693 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Tue, 3 May 2022 23:18:42 -0500 Subject: [PATCH 337/730] Added a port of the Rust quickstart example. --- automerge-c/CMakeLists.txt | 2 + automerge-c/examples/CMakeLists.txt | 42 ++++++++++++++ automerge-c/examples/README.md | 9 +++ automerge-c/examples/quickstart.c | 86 +++++++++++++++++++++++++++++ 4 files changed, 139 insertions(+) create mode 100644 automerge-c/examples/CMakeLists.txt create mode 100644 automerge-c/examples/README.md create mode 100644 automerge-c/examples/quickstart.c diff --git a/automerge-c/CMakeLists.txt b/automerge-c/CMakeLists.txt index 188780f9..4ffca094 100644 --- a/automerge-c/CMakeLists.txt +++ b/automerge-c/CMakeLists.txt @@ -96,6 +96,8 @@ if(BUILD_TESTING) enable_testing() endif() +add_subdirectory(examples EXCLUDE_FROM_ALL) + # Generate and install .cmake files set(PROJECT_CONFIG_NAME "${PROJECT_NAME}-config") diff --git a/automerge-c/examples/CMakeLists.txt b/automerge-c/examples/CMakeLists.txt new file mode 100644 index 00000000..09ddeb70 --- /dev/null +++ b/automerge-c/examples/CMakeLists.txt @@ -0,0 +1,42 @@ +cmake_minimum_required(VERSION 3.18 FATAL_ERROR) + +add_executable( + example_quickstart + quickstart.c +) + +set_target_properties(example_quickstart PROPERTIES LINKER_LANGUAGE C) + +# \note An imported library's INTERFACE_INCLUDE_DIRECTORIES property can't +# contain a non-existent path so its build-time include directory +# must be specified for all of its dependent targets instead. +target_include_directories( + example_quickstart + PRIVATE "$" +) + +target_link_libraries(example_quickstart PRIVATE ${LIBRARY_NAME}) + +add_dependencies(example_quickstart ${LIBRARY_NAME}_artifacts) + +if(BUILD_SHARED_LIBS AND WIN32) + add_custom_command( + TARGET example_quickstart + POST_BUILD + COMMAND ${CMAKE_COMMAND} -E copy_if_different + ${CARGO_CURRENT_BINARY_DIR}/${CMAKE_SHARED_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_${CMAKE_BUILD_TYPE}_POSTFIX}${CMAKE_SHARED_LIBRARY_SUFFIX} + ${CMAKE_CURRENT_BINARY_DIR} + COMMENT "Copying the DLL built by Cargo into the examples directory..." + VERBATIM + ) +endif() + +add_custom_command( + TARGET example_quickstart + POST_BUILD + COMMAND + example_quickstart + COMMENT + "Running the example quickstart..." + VERBATIM +) diff --git a/automerge-c/examples/README.md b/automerge-c/examples/README.md new file mode 100644 index 00000000..17aa2227 --- /dev/null +++ b/automerge-c/examples/README.md @@ -0,0 +1,9 @@ +# Automerge C examples + +## Quickstart + +```shell +cmake -E make_directory automerge-c/build +cmake -S automerge-c -B automerge-c/build +cmake --build automerge-c/build --target example_quickstart +``` diff --git a/automerge-c/examples/quickstart.c b/automerge-c/examples/quickstart.c new file mode 100644 index 00000000..56b8eaa7 --- /dev/null +++ b/automerge-c/examples/quickstart.c @@ -0,0 +1,86 @@ +#include +#include + +#include + +AMvalue test(AMresult* result, AMvalueVariant const value_tag) { + if (result == NULL) { + fprintf(stderr, "Invalid AMresult struct."); + exit(-1); + } + AMstatus const status = AMresultStatus(result); + if (status != AM_STATUS_OK) { + fprintf(stderr, "Unexpected AMstatus enum tag %d.", status); + exit(-2); + } + AMvalue const value = AMresultValue(result, 0); + if (value.tag != value_tag) { + fprintf(stderr, "Unexpected AMvalueVariant enum tag %d.", value.tag); + exit(-3); + } + return value; +} + +/* + * Based on https://automerge.github.io/docs/quickstart + */ +int main(int argc, char** argv) { + AMdoc* const doc1 = AMalloc(); + AMresult* const cards_result = AMmapPutObject(doc1, AM_ROOT, "cards", AM_OBJ_TYPE_LIST); + AMvalue value = test(cards_result, AM_VALUE_OBJ_ID); + AMobjId const* const cards = value.obj_id; + AMresult* const card1_result = AMlistPutObject(doc1, cards, 0, true, AM_OBJ_TYPE_MAP); + value = test(card1_result, AM_VALUE_OBJ_ID); + AMobjId const* const card1 = value.obj_id; + AMresult* result = AMmapPutStr(doc1, card1, "title", "Rewrite everything in Clojure"); + AMfreeResult(result); + result = AMmapPutBool(doc1, card1, "done", false); + AMfreeResult(result); + AMresult* const card2_result = AMlistPutObject(doc1, cards, 0, true, AM_OBJ_TYPE_MAP); + value = test(card2_result, AM_VALUE_OBJ_ID); + AMobjId const* const card2 = value.obj_id; + result = AMmapPutStr(doc1, card2, "title", "Rewrite everything in Haskell"); + AMfreeResult(result); + result = AMmapPutBool(doc1, card2, "done", false); + AMfreeResult(result); + AMfreeResult(card2_result); + result = AMcommit(doc1, "Add card", NULL); + AMfreeResult(result); + + AMdoc* doc2 = AMalloc(); + result = AMmerge(doc2, doc1); + AMfreeResult(result); + AMfreeDoc(doc2); + + AMresult* save_result = AMsave(doc1); + AMvalue save_value = test(save_result, AM_VALUE_BYTES); + AMbyteSpan binary = save_value.bytes; + doc2 = AMalloc(); + AMresult* load_result = AMload(doc2, binary.src, binary.count); + AMfreeResult(load_result); + AMfreeResult(save_result); + + result = AMmapPutBool(doc1, card1, "done", true); + AMfreeResult(result); + AMcommit(doc1, "Mark card as done", NULL); + AMfreeResult(card1_result); + + result = AMlistDelete(doc2, cards, 0); + AMfreeResult(result); + AMcommit(doc2, "Delete card", NULL); + + result = AMmerge(doc1, doc2); + AMfreeResult(result); + AMfreeDoc(doc2); + + result = AMgetChanges(doc1, NULL); + value = test(result, AM_VALUE_CHANGES); + AMchange const* change = NULL; + while (value.changes.ptr && (change = AMnextChange(&value.changes, 1))) { + size_t const size = AMobjSizeAt(doc1, cards, change); + printf("%s %ld\n", AMgetMessage(change), size); + } + AMfreeResult(result); + AMfreeResult(cards_result); + AMfreeDoc(doc1); +} From c6e7f993fd1817cd3988bcc5d2066a599c8e9587 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Tue, 3 May 2022 23:54:54 -0500 Subject: [PATCH 338/730] Moved the `AMbyteSpan` struct into its own source file. Added the `AMchangeHashes` struct. Added the `AMchange` and `AMchanges` structs. Added `ChangeHashes` and `Changes` variants to the `AMresult` struct. Renamed the `AMvalue::Nothing` variant to `AMvalue::Void`. Tied the lifetime of an `AMobjId` struct to the `AMresult` struct that it's returned through so that it can be used to reach equivalent objects within multiple `AMdoc` structs. Consolidated the `AMresult` struct's related trait implementations. --- automerge-c/src/CMakeLists.txt | 4 +- automerge-c/src/byte_span.rs | 61 +++++++++++ automerge-c/src/change_hashes.rs | 159 +++++++++++++++++++++++++++ automerge-c/src/changes.rs | 181 +++++++++++++++++++++++++++++++ automerge-c/src/doc.rs | 60 +--------- automerge-c/src/result.rs | 168 +++++++++++++++------------- automerge-c/src/utils.rs | 7 -- 7 files changed, 502 insertions(+), 138 deletions(-) create mode 100644 automerge-c/src/byte_span.rs create mode 100644 automerge-c/src/change_hashes.rs create mode 100644 automerge-c/src/changes.rs delete mode 100644 automerge-c/src/utils.rs diff --git a/automerge-c/src/CMakeLists.txt b/automerge-c/src/CMakeLists.txt index 11cf5d96..5df9dd74 100644 --- a/automerge-c/src/CMakeLists.txt +++ b/automerge-c/src/CMakeLists.txt @@ -50,9 +50,11 @@ add_custom_command( MAIN_DEPENDENCY lib.rs DEPENDS + byte_span.rs + change_hashes.rs + changes.rs doc.rs result.rs - utils.rs ${CMAKE_SOURCE_DIR}/build.rs ${CMAKE_SOURCE_DIR}/Cargo.toml ${CMAKE_SOURCE_DIR}/cbindgen.toml diff --git a/automerge-c/src/byte_span.rs b/automerge-c/src/byte_span.rs new file mode 100644 index 00000000..68bfea00 --- /dev/null +++ b/automerge-c/src/byte_span.rs @@ -0,0 +1,61 @@ +use automerge as am; + +use crate::AMchange; + +/// \struct AMbyteSpan +/// \brief A contiguous sequence of bytes. +/// +#[repr(C)] +pub struct AMbyteSpan { + /// A pointer to an array of bytes. + /// \warning \p src is only valid until the `AMfreeResult()` function is called + /// on the `AMresult` struct hosting the array of bytes to which + /// it points. + src: *const u8, + /// The number of bytes in the array. + count: usize, +} + +impl Default for AMbyteSpan { + fn default() -> Self { + Self { + src: std::ptr::null(), + count: 0, + } + } +} + +impl From<&AMchange> for AMbyteSpan { + fn from(change: &AMchange) -> Self { + let change_hash = &(change.as_ref()).hash; + change_hash.into() + } +} + +impl From<&mut am::ActorId> for AMbyteSpan { + fn from(actor: &mut am::ActorId) -> Self { + let slice = actor.to_bytes(); + Self { + src: slice.as_ptr(), + count: slice.len(), + } + } +} + +impl From<&am::ChangeHash> for AMbyteSpan { + fn from(change_hash: &am::ChangeHash) -> Self { + Self { + src: change_hash.0.as_ptr(), + count: change_hash.0.len(), + } + } +} + +impl From<&Vec> for AMbyteSpan { + fn from(v: &Vec) -> Self { + Self { + src: (*v).as_ptr(), + count: (*v).len(), + } + } +} diff --git a/automerge-c/src/change_hashes.rs b/automerge-c/src/change_hashes.rs new file mode 100644 index 00000000..ad29b490 --- /dev/null +++ b/automerge-c/src/change_hashes.rs @@ -0,0 +1,159 @@ +use automerge as am; +use std::ffi::c_void; + +use crate::AMbyteSpan; + +/// \struct AMchangeHashes +/// \brief A bidirectional iterator over a sequence of `AMbyteSpan` structs. +#[repr(C)] +pub struct AMchangeHashes { + len: usize, + offset: isize, + ptr: *const c_void, +} + +impl AsRef<[am::ChangeHash]> for AMchangeHashes { + fn as_ref(&self) -> &[am::ChangeHash] { + unsafe { std::slice::from_raw_parts(self.ptr as *const am::ChangeHash, self.len) } + } +} + +impl AMchangeHashes { + pub fn new(change_hashes: &[am::ChangeHash]) -> Self { + Self { + len: change_hashes.len(), + offset: 0, + ptr: change_hashes.as_ptr() as *const c_void, + } + } + + pub fn advance(&mut self, n: isize) { + let len = self.len as isize; + if n != 0 && self.offset >= -len && self.offset < len { + // It's being advanced and it's hasn't stopped. + self.offset = std::cmp::max(-(len + 1), std::cmp::min(self.offset + n, len)); + }; + } + + pub fn next(&mut self, n: isize) -> Option<&am::ChangeHash> { + let len = self.len as isize; + if self.offset < -len || self.offset == len { + // It's stopped. + None + } else { + let slice = + unsafe { std::slice::from_raw_parts(self.ptr as *const am::ChangeHash, self.len) }; + let index = (self.offset + if self.offset < 0 { len } else { 0 }) as usize; + let element = Some(&slice[index]); + self.advance(n); + element + } + } + + pub fn prev(&mut self, n: isize) -> Option<&am::ChangeHash> { + self.advance(n); + let len = self.len as isize; + if self.offset < -len || self.offset == len { + // It's stopped. + None + } else { + let slice = + unsafe { std::slice::from_raw_parts(self.ptr as *const am::ChangeHash, self.len) }; + let index = (self.offset + if self.offset < 0 { len } else { 0 }) as usize; + Some(&slice[index]) + } + } +} + +/// \memberof AMchangeHashes +/// \brief Advances/rewinds an `AMchangeHashes` struct by at most \p |n| +/// positions. +/// +/// \param[in] change_hashes A pointer to an `AMchangeHashes` struct. +/// \param[in] n The direction (\p -n -> backward, \p +n -> forward) and maximum +/// number of positions to advance/rewind. +/// \pre \p change_hashes must be a valid address. +/// \internal +/// +/// #Safety +/// change_hashes must be a pointer to a valid AMchangeHashes +#[no_mangle] +pub unsafe extern "C" fn AMadvanceChangeHashes(change_hashes: *mut AMchangeHashes, n: isize) { + if let Some(change_hashes) = change_hashes.as_mut() { + change_hashes.advance(n); + }; +} + +/// \memberof AMchangeHashes +/// \brief Gets the size of an `AMchangeHashes` struct. +/// +/// \param[in] change_hashes A pointer to an `AMchangeHashes` struct. +/// \return The count of values in \p change_hashes. +/// \pre \p change_hashes must be a valid address. +/// \internal +/// +/// #Safety +/// change_hashes must be a pointer to a valid AMchangeHashes +#[no_mangle] +pub unsafe extern "C" fn AMchangeHashesSize(change_hashes: *const AMchangeHashes) -> usize { + if let Some(change_hashes) = change_hashes.as_ref() { + change_hashes.len + } else { + 0 + } +} + +/// \memberof AMchangeHashes +/// \brief Gets the `AMbyteSpan` struct at the current position of an +/// `AMchangeHashes`struct and then advances/rewinds it by at most \p |n| +/// positions. +/// +/// \param[in] change_hashes A pointer to an `AMchangeHashes` struct. +/// \param[in] n The direction (\p -n -> backward, \p +n -> forward) and maximum +/// number of positions to advance/rewind. +/// \return An `AMbyteSpan` struct that's invalid when \p change_hashes was +/// previously advanced/rewound past its forward/backward limit. +/// \pre \p change_hashes must be a valid address. +/// \internal +/// +/// #Safety +/// change_hashes must be a pointer to a valid AMchangeHashes +#[no_mangle] +pub unsafe extern "C" fn AMnextChangeHash( + change_hashes: *mut AMchangeHashes, + n: isize, +) -> AMbyteSpan { + if let Some(change_hashes) = change_hashes.as_mut() { + if let Some(change_hash) = change_hashes.next(n) { + return change_hash.into(); + } + } + AMbyteSpan::default() +} + +/// \memberof AMchangeHashes +/// \brief Advances/rewinds an `AMchangeHashes` struct by at most \p |n| +/// positions and then gets the `AMbyteSpan` struct at its current position. +/// +/// \param[in] change_hashes A pointer to an `AMchangeHashes` struct. +/// \param[in] n The direction (\p -n -> backward, \p +n -> forward) and maximum +/// number of positions to advance/rewind. +/// \return An `AMbyteSpan` struct that's invalid when \p change_hashes is +/// presently advanced/rewound past its forward/backward limit. +/// \pre \p change_hashes must be a valid address. +/// \internal +/// +/// #Safety +/// change_hashes must be a pointer to a valid AMchangeHashes +#[no_mangle] +pub unsafe extern "C" fn AMprevChangeHash( + change_hashes: *mut AMchangeHashes, + n: isize, +) -> AMbyteSpan { + if let Some(change_hashes) = change_hashes.as_mut() { + if let Some(change_hash) = change_hashes.prev(n) { + return change_hash.into(); + } + } + AMbyteSpan::default() +} diff --git a/automerge-c/src/changes.rs b/automerge-c/src/changes.rs new file mode 100644 index 00000000..755d2510 --- /dev/null +++ b/automerge-c/src/changes.rs @@ -0,0 +1,181 @@ +use automerge as am; +use std::ffi::{c_void, CString}; + +/// \struct AMchange +/// \brief A group of operations performed by an actor. +pub struct AMchange { + body: am::Change, + c_message: Option, +} + +impl AMchange { + pub fn new(change: am::Change) -> Self { + let c_message = match change.message() { + Some(c_message) => CString::new(c_message).ok(), + None => None, + }; + Self { + body: change, + c_message, + } + } + + pub fn c_message(&self) -> Option<&CString> { + self.c_message.as_ref() + } +} + +impl AsRef for AMchange { + fn as_ref(&self) -> &am::Change { + &self.body + } +} + +/// \struct AMchanges +/// \brief A bidirectional iterator over a sequence of `AMchange` structs. +#[repr(C)] +pub struct AMchanges { + len: usize, + offset: isize, + ptr: *const c_void, +} + +impl AsRef<[AMchange]> for AMchanges { + fn as_ref(&self) -> &[AMchange] { + unsafe { std::slice::from_raw_parts(self.ptr as *const AMchange, self.len) } + } +} + +impl AMchanges { + pub fn new(changes: &[AMchange]) -> Self { + Self { + len: changes.len(), + offset: 0, + ptr: changes.as_ptr() as *const c_void, + } + } + + pub fn advance(&mut self, n: isize) { + let len = self.len as isize; + if n != 0 && self.offset >= -len && self.offset < len { + // It's being advanced and it hasn't stopped. + self.offset = std::cmp::max(-(len + 1), std::cmp::min(self.offset + n, len)); + }; + } + + pub fn next(&mut self, n: isize) -> Option<&AMchange> { + let len = self.len as isize; + if self.offset < -len || self.offset == len { + // It's stopped. + None + } else { + let slice = + unsafe { std::slice::from_raw_parts(self.ptr as *const AMchange, self.len) }; + let index = (self.offset + if self.offset < 0 { len } else { 0 }) as usize; + let element = Some(&slice[index]); + self.advance(n); + element + } + } + + pub fn prev(&mut self, n: isize) -> Option<&AMchange> { + self.advance(n); + let len = self.len as isize; + if self.offset < -len || self.offset == len { + // It's stopped. + None + } else { + let slice = + unsafe { std::slice::from_raw_parts(self.ptr as *const AMchange, self.len) }; + let index = (self.offset + if self.offset < 0 { len } else { 0 }) as usize; + Some(&slice[index]) + } + } +} + +/// \memberof AMchanges +/// \brief Advances/rewinds an `AMchanges` struct by at most \p |n| +/// positions. +/// +/// \param[in] changes A pointer to an `AMchanges` struct. +/// \param[in] n The direction (\p -n -> backward, \p +n -> forward) and maximum +/// number of positions to advance/rewind. +/// \pre \p changes must be a valid address. +/// \internal +/// +/// #Safety +/// changes must be a pointer to a valid AMchanges +#[no_mangle] +pub unsafe extern "C" fn AMadvanceChanges(changes: *mut AMchanges, n: isize) { + if let Some(changes) = changes.as_mut() { + changes.advance(n); + }; +} + +/// \memberof AMchanges +/// \brief Gets the size of an `AMchanges` struct. +/// +/// \param[in] changes A pointer to an `AMchanges` struct. +/// \return The count of values in \p changes. +/// \pre \p changes must be a valid address. +/// \internal +/// +/// #Safety +/// changes must be a pointer to a valid AMchanges +#[no_mangle] +pub unsafe extern "C" fn AMchangesSize(changes: *const AMchanges) -> usize { + if let Some(changes) = changes.as_ref() { + changes.len + } else { + 0 + } +} + +/// \memberof AMchanges +/// \brief Gets the `AMchange` struct at the current position of an +/// `AMchanges`struct and then advances/rewinds it by at most \p |n| +/// positions. +/// +/// \param[in] changes A pointer to an `AMchanges` struct. +/// \param[in] n The direction (\p -n -> backward, \p +n -> forward) and maximum +/// number of positions to advance/rewind. +/// \return A pointer to an `AMchange` struct that's invalid when \p changes was +/// previously advanced/rewound past its forward/backward limit. +/// \pre \p changes must be a valid address. +/// \internal +/// +/// #Safety +/// changes must be a pointer to a valid AMchanges +#[no_mangle] +pub unsafe extern "C" fn AMnextChange(changes: *mut AMchanges, n: isize) -> *const AMchange { + if let Some(changes) = changes.as_mut() { + if let Some(change) = changes.next(n) { + return change; + } + } + std::ptr::null() +} + +/// \memberof AMchanges +/// \brief Advances/rewinds an `AMchanges` struct by at most \p |n| +/// positions and then gets the `AMchange` struct at its current position. +/// +/// \param[in] changes A pointer to an `AMchanges` struct. +/// \param[in] n The direction (\p -n -> backward, \p +n -> forward) and maximum +/// number of positions to advance/rewind. +/// \return A pointer to an `AMchange` struct that's invalid when \p changes is +/// presently advanced/rewound past its forward/backward limit. +/// \pre \p changes must be a valid address. +/// \internal +/// +/// #Safety +/// changes must be a pointer to a valid AMchanges +#[no_mangle] +pub unsafe extern "C" fn AMprevChange(changes: *mut AMchanges, n: isize) -> *const AMchange { + if let Some(changes) = changes.as_mut() { + if let Some(change) = changes.prev(n) { + return change; + } + } + std::ptr::null() +} diff --git a/automerge-c/src/doc.rs b/automerge-c/src/doc.rs index 4de2524a..9a63042f 100644 --- a/automerge-c/src/doc.rs +++ b/automerge-c/src/doc.rs @@ -1,66 +1,14 @@ use automerge as am; -use std::collections::BTreeSet; use std::ops::{Deref, DerefMut}; -use crate::result::AMobjId; -use automerge::transaction::Transactable; - /// \struct AMdoc /// \brief A JSON-like CRDT. #[derive(Clone)] -pub struct AMdoc { - body: am::AutoCommit, - obj_ids: BTreeSet, -} +pub struct AMdoc(am::AutoCommit); impl AMdoc { pub fn new(body: am::AutoCommit) -> Self { - Self { - body, - obj_ids: BTreeSet::new(), - } - } - - pub fn insert_object( - &mut self, - obj: &am::ObjId, - index: usize, - value: am::ObjType, - ) -> Result<&AMobjId, am::AutomergeError> { - match self.body.insert_object(obj, index, value) { - Ok(ex_id) => { - let obj_id = AMobjId::new(ex_id); - self.obj_ids.insert(obj_id.clone()); - match self.obj_ids.get(&obj_id) { - Some(obj_id) => Ok(obj_id), - None => Err(am::AutomergeError::Fail), - } - } - Err(e) => Err(e), - } - } - - pub fn put_object, P: Into>( - &mut self, - obj: O, - prop: P, - value: am::ObjType, - ) -> Result<&AMobjId, am::AutomergeError> { - match self.body.put_object(obj, prop, value) { - Ok(ex_id) => { - let obj_id = AMobjId::new(ex_id); - self.obj_ids.insert(obj_id.clone()); - match self.obj_ids.get(&obj_id) { - Some(obj_id) => Ok(obj_id), - None => Err(am::AutomergeError::Fail), - } - } - Err(e) => Err(e), - } - } - - pub fn drop_obj_id(&mut self, obj_id: &AMobjId) -> bool { - self.obj_ids.remove(obj_id) + Self(body) } } @@ -68,13 +16,13 @@ impl Deref for AMdoc { type Target = am::AutoCommit; fn deref(&self) -> &Self::Target { - &self.body + &self.0 } } impl DerefMut for AMdoc { fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.body + &mut self.0 } } diff --git a/automerge-c/src/result.rs b/automerge-c/src/result.rs index 07395fb1..0c8845b1 100644 --- a/automerge-c/src/result.rs +++ b/automerge-c/src/result.rs @@ -2,9 +2,12 @@ use automerge as am; use std::ffi::CString; use std::ops::Deref; +use crate::AMbyteSpan; +use crate::AMchangeHashes; +use crate::{AMchange, AMchanges}; + /// \struct AMobjId /// \brief An object's unique identifier. -#[derive(Clone, Eq, Ord, PartialEq, PartialOrd)] pub struct AMobjId(am::ObjId); impl AMobjId { @@ -27,40 +30,6 @@ impl Deref for AMobjId { } } -/// \memberof AMvalue -/// \struct AMbyteSpan -/// \brief A contiguous sequence of bytes. -/// -#[repr(C)] -pub struct AMbyteSpan { - /// A pointer to the byte at position zero. - /// \warning \p src is only valid until the `AMfreeResult()` function is called - /// on the `AMresult` struct hosting the array of bytes to which - /// it points. - src: *const u8, - /// The number of bytes in the sequence. - count: usize, -} - -impl From<&Vec> for AMbyteSpan { - fn from(v: &Vec) -> Self { - AMbyteSpan { - src: (*v).as_ptr(), - count: (*v).len(), - } - } -} - -impl From<&mut am::ActorId> for AMbyteSpan { - fn from(actor: &mut am::ActorId) -> Self { - let slice = actor.to_bytes(); - AMbyteSpan { - src: slice.as_ptr(), - count: slice.len(), - } - } -} - /// \struct AMvalue /// \brief A discriminated union of value type variants for an `AMresult` struct. /// @@ -79,15 +48,18 @@ impl From<&mut am::ActorId> for AMbyteSpan { /// \var AMvalue::bytes /// An array of bytes as an `AMbyteSpan` struct. /// +/// \var AMvalue::change_hashes +/// A sequence of change hashes as an `AMchangeHashes` struct. +/// +/// \var AMvalue::changes +/// A sequence of changes as an `AMchanges` struct. +/// /// \var AMvalue::counter /// A CRDT counter. /// /// \var AMvalue::f64 /// A 64-bit float. /// -/// \var AMvalue::change_hash -/// A change hash as an `AMbyteSpan` struct. -/// /// \var AMvalue::int_ /// A 64-bit signed integer. /// @@ -107,27 +79,23 @@ pub enum AMvalue<'a> { /// An actor ID variant. ActorId(AMbyteSpan), /// A boolean variant. - Boolean(libc::c_char), + Boolean(bool), /// An array of bytes variant. Bytes(AMbyteSpan), - /* + /// A change hashes variant. + ChangeHashes(AMchangeHashes), /// A changes variant. - Changes(_), - */ + Changes(AMchanges), /// A CRDT counter variant. Counter(i64), /// A 64-bit float variant. F64(f64), - /// A change hash variant. - ChangeHash(AMbyteSpan), /// A 64-bit signed integer variant. Int(i64), /* /// A keys variant. Keys(_), */ - /// A nothing variant. - Nothing, /// A null variant. Null, /// An object identifier variant. @@ -142,71 +110,123 @@ pub enum AMvalue<'a> { */ /// A 64-bit unsigned integer variant. Uint(u64), + /// A void variant. + Void, } /// \struct AMresult /// \brief A discriminated union of result variants. -/// -pub enum AMresult<'a> { +pub enum AMresult { ActorId(am::ActorId), - Changes(Vec), + ChangeHashes(Vec), + Changes(Vec), Error(CString), - ObjId(&'a AMobjId), - Nothing, + ObjId(AMobjId), Scalars(Vec>, Option), + Void, } -impl<'a> AMresult<'a> { +impl AMresult { pub(crate) fn err(s: &str) -> Self { AMresult::Error(CString::new(s).unwrap()) } } -impl<'a> From> for AMresult<'a> { +impl From for AMresult { + fn from(change_hash: am::ChangeHash) -> Self { + AMresult::ChangeHashes(vec![change_hash]) + } +} + +impl From> for AMresult { + fn from(maybe: Result<(), am::AutomergeError>) -> Self { + match maybe { + Ok(()) => AMresult::Void, + Err(e) => AMresult::err(&e.to_string()), + } + } +} +impl From> for AMresult { fn from(maybe: Result) -> Self { match maybe { Ok(actor_id) => AMresult::ActorId(actor_id), - Err(e) => AMresult::Error(CString::new(e.to_string()).unwrap()), + Err(e) => AMresult::err(&e.to_string()), } } } -impl<'a> From> for AMresult<'a> { - fn from(maybe: Result<&'a AMobjId, am::AutomergeError>) -> Self { +impl From> for AMresult { + fn from(maybe: Result) -> Self { match maybe { - Ok(obj_id) => AMresult::ObjId(obj_id), - Err(e) => AMresult::Error(CString::new(e.to_string()).unwrap()), + Ok(obj_id) => AMresult::ObjId(AMobjId::new(obj_id)), + Err(e) => AMresult::err(&e.to_string()), } } } -impl<'a> From> for AMresult<'a> { - fn from(maybe: Result<(), am::AutomergeError>) -> Self { - match maybe { - Ok(()) => AMresult::Nothing, - Err(e) => AMresult::Error(CString::new(e.to_string()).unwrap()), - } - } -} - -impl<'a> From, am::ObjId)>, am::AutomergeError>> - for AMresult<'a> -{ +impl From, am::ObjId)>, am::AutomergeError>> for AMresult { fn from(maybe: Result, am::ObjId)>, am::AutomergeError>) -> Self { match maybe { // \todo Ensure that it's alright to ignore the `am::ObjId` value. Ok(Some((value, _))) => AMresult::Scalars(vec![value], None), - Ok(None) => AMresult::Nothing, - Err(e) => AMresult::Error(CString::new(e.to_string()).unwrap()), + Ok(None) => AMresult::Void, + Err(e) => AMresult::err(&e.to_string()), } } } -impl<'a> From, am::AutomergeError>> for AMresult<'a> { +impl From, am::AutomergeError>> for AMresult { fn from(maybe: Result, am::AutomergeError>) -> Self { match maybe { Ok(value) => AMresult::Scalars(vec![value], None), - Err(e) => AMresult::Error(CString::new(e.to_string()).unwrap()), + Err(e) => AMresult::err(&e.to_string()), } } } + +impl From> for AMresult { + fn from(maybe: Result) -> Self { + match maybe { + Ok(size) => AMresult::Scalars(vec![am::Value::uint(size as u64)], None), + Err(e) => AMresult::err(&e.to_string()), + } + } +} + +impl From, am::AutomergeError>> for AMresult { + fn from(maybe: Result, am::AutomergeError>) -> Self { + match maybe { + Ok(changes) => AMresult::Changes( + changes + .iter() + .map(|&change| AMchange::new(change.clone())) + .collect(), + ), + Err(e) => AMresult::err(&e.to_string()), + } + } +} + +impl From, am::AutomergeError>> for AMresult { + fn from(maybe: Result, am::AutomergeError>) -> Self { + match maybe { + Ok(change_hashes) => AMresult::ChangeHashes(change_hashes), + Err(e) => AMresult::err(&e.to_string()), + } + } +} + +impl From, am::AutomergeError>> for AMresult { + fn from(maybe: Result, am::AutomergeError>) -> Self { + match maybe { + Ok(bytes) => AMresult::Scalars(vec![am::Value::bytes(bytes)], None), + Err(e) => AMresult::err(&e.to_string()), + } + } +} + +impl From for *mut AMresult { + fn from(b: AMresult) -> Self { + Box::into_raw(Box::new(b)) + } +} diff --git a/automerge-c/src/utils.rs b/automerge-c/src/utils.rs deleted file mode 100644 index 70d2471a..00000000 --- a/automerge-c/src/utils.rs +++ /dev/null @@ -1,7 +0,0 @@ -use crate::AMresult; - -impl<'a> From> for *mut AMresult<'a> { - fn from(b: AMresult<'a>) -> Self { - Box::into_raw(Box::new(b)) - } -} From 58e0ce5efb2ffdcc169703bb912ea491a59f1f7d Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Wed, 4 May 2022 00:01:02 -0500 Subject: [PATCH 339/730] Renamed the `AMvalue::Nothing` variant to `AMvalue::Void`. Tied the lifetime of an `AMobjId` struct to the `AMresult` struct that it's returned through so that it can be used to reach equivalent objects within multiple `AMdoc` structs. Added test cases for the `AMlistPutBool()` function. Added a test case for the `AMmapPutBool()` function. --- automerge-c/test/amdoc_property_tests.c | 4 ++-- automerge-c/test/amlistput_tests.c | 22 +++++++++++----------- automerge-c/test/ammapput_tests.c | 19 ++++++++----------- automerge-c/test/macro_utils.c | 5 +++-- 4 files changed, 24 insertions(+), 26 deletions(-) diff --git a/automerge-c/test/amdoc_property_tests.c b/automerge-c/test/amdoc_property_tests.c index 4b2b3d2a..cce016b9 100644 --- a/automerge-c/test/amdoc_property_tests.c +++ b/automerge-c/test/amdoc_property_tests.c @@ -60,7 +60,7 @@ static void test_AMputActor(void **state) { } assert_int_equal(AMresultSize(res), 0); AMvalue value = AMresultValue(res, 0); - assert_int_equal(value.tag, AM_VALUE_NOTHING); + assert_int_equal(value.tag, AM_VALUE_VOID); AMfreeResult(res); res = AMgetActor(group_state->doc); if (AMresultStatus(res) != AM_STATUS_OK) { @@ -86,7 +86,7 @@ static void test_AMputActorHex(void **state) { } assert_int_equal(AMresultSize(res), 0); AMvalue value = AMresultValue(res, 0); - assert_int_equal(value.tag, AM_VALUE_NOTHING); + assert_int_equal(value.tag, AM_VALUE_VOID); AMfreeResult(res); res = AMgetActorHex(group_state->doc); if (AMresultStatus(res) != AM_STATUS_OK) { diff --git a/automerge-c/test/amlistput_tests.c b/automerge-c/test/amlistput_tests.c index 27b4fae9..c1da771a 100644 --- a/automerge-c/test/amlistput_tests.c +++ b/automerge-c/test/amlistput_tests.c @@ -26,7 +26,7 @@ static void test_AMlistPut ## suffix ## _ ## mode(void **state) { \ } \ assert_int_equal(AMresultSize(res), 0); \ AMvalue value = AMresultValue(res, 0); \ - assert_int_equal(value.tag, AM_VALUE_NOTHING); \ + assert_int_equal(value.tag, AM_VALUE_VOID); \ AMfreeResult(res); \ res = AMlistGet(group_state->doc, AM_ROOT, 0); \ if (AMresultStatus(res) != AM_STATUS_OK) { \ @@ -59,7 +59,7 @@ static void test_AMlistPutBytes_ ## mode(void **state) { \ } \ assert_int_equal(AMresultSize(res), 0); \ AMvalue value = AMresultValue(res, 0); \ - assert_int_equal(value.tag, AM_VALUE_NOTHING); \ + assert_int_equal(value.tag, AM_VALUE_VOID); \ AMfreeResult(res); \ res = AMlistGet(group_state->doc, AM_ROOT, 0); \ if (AMresultStatus(res) != AM_STATUS_OK) { \ @@ -85,7 +85,7 @@ static void test_AMlistPutNull_ ## mode(void **state) { \ } \ assert_int_equal(AMresultSize(res), 0); \ AMvalue value = AMresultValue(res, 0); \ - assert_int_equal(value.tag, AM_VALUE_NOTHING); \ + assert_int_equal(value.tag, AM_VALUE_VOID); \ AMfreeResult(res); \ res = AMlistGet(group_state->doc, AM_ROOT, 0); \ if (AMresultStatus(res) != AM_STATUS_OK) { \ @@ -115,15 +115,9 @@ static void test_AMlistPutObject_ ## label ## _ ## mode(void **state) { \ assert_int_equal(AMresultSize(res), 1); \ AMvalue value = AMresultValue(res, 0); \ assert_int_equal(value.tag, AM_VALUE_OBJ_ID); \ - /** \ - * \note The `AMresult` struct can be deallocated immediately when its \ - * value is a pointer to an opaque struct because its lifetime \ - * is tied to the `AMdoc` struct instead. \ - */ \ - AMfreeResult(res); \ assert_non_null(value.obj_id); \ assert_int_equal(AMobjSize(group_state->doc, value.obj_id), 0); \ - AMfreeObjId(group_state->doc, value.obj_id); \ + AMfreeResult(res); \ } #define test_AMlistPutStr(mode) test_AMlistPutStr ## _ ## mode @@ -145,7 +139,7 @@ static void test_AMlistPutStr_ ## mode(void **state) { \ } \ assert_int_equal(AMresultSize(res), 0); \ AMvalue value = AMresultValue(res, 0); \ - assert_int_equal(value.tag, AM_VALUE_NOTHING); \ + assert_int_equal(value.tag, AM_VALUE_VOID); \ AMfreeResult(res); \ res = AMlistGet(group_state->doc, AM_ROOT, 0); \ if (AMresultStatus(res) != AM_STATUS_OK) { \ @@ -159,6 +153,10 @@ static void test_AMlistPutStr_ ## mode(void **state) { \ AMfreeResult(res); \ } +static_void_test_AMlistPut(Bool, insert, boolean, true) + +static_void_test_AMlistPut(Bool, update, boolean, true) + static uint8_t const BYTES_VALUE[] = {INT8_MIN, INT8_MAX / 2, INT8_MAX}; static_void_test_AMlistPutBytes(insert, BYTES_VALUE) @@ -207,6 +205,8 @@ static_void_test_AMlistPut(Uint, update, uint, UINT64_MAX) int run_AMlistPut_tests(void) { const struct CMUnitTest tests[] = { + cmocka_unit_test(test_AMlistPut(Bool, insert)), + cmocka_unit_test(test_AMlistPut(Bool, update)), cmocka_unit_test(test_AMlistPutBytes(insert)), cmocka_unit_test(test_AMlistPutBytes(update)), cmocka_unit_test(test_AMlistPut(Counter, insert)), diff --git a/automerge-c/test/ammapput_tests.c b/automerge-c/test/ammapput_tests.c index 79b79f62..1e24438d 100644 --- a/automerge-c/test/ammapput_tests.c +++ b/automerge-c/test/ammapput_tests.c @@ -29,7 +29,7 @@ static void test_AMmapPut ## suffix(void **state) { \ } \ assert_int_equal(AMresultSize(res), 0); \ AMvalue value = AMresultValue(res, 0); \ - assert_int_equal(value.tag, AM_VALUE_NOTHING); \ + assert_int_equal(value.tag, AM_VALUE_VOID); \ AMfreeResult(res); \ res = AMmapGet(group_state->doc, AM_ROOT, #suffix); \ if (AMresultStatus(res) != AM_STATUS_OK) { \ @@ -59,17 +59,13 @@ static void test_AMmapPutObject_ ## label(void **state) { \ assert_int_equal(AMresultSize(res), 1); \ AMvalue value = AMresultValue(res, 0); \ assert_int_equal(value.tag, AM_VALUE_OBJ_ID); \ - /** \ - * \note The `AMresult` struct can be deallocated immediately when its \ - * value is a pointer to an opaque struct because its lifetime \ - * is tied to the `AMdoc` struct instead. \ - */ \ - AMfreeResult(res); \ assert_non_null(value.obj_id); \ assert_int_equal(AMobjSize(group_state->doc, value.obj_id), 0); \ - AMfreeObjId(group_state->doc, value.obj_id); \ + AMfreeResult(res); \ } +static_void_test_AMmapPut(Bool, boolean, true) + static void test_AMmapPutBytes(void **state) { static char const* const KEY = "Bytes"; static uint8_t const BYTES_VALUE[] = {INT8_MIN, INT8_MAX / 2, INT8_MAX}; @@ -88,7 +84,7 @@ static void test_AMmapPutBytes(void **state) { } assert_int_equal(AMresultSize(res), 0); AMvalue value = AMresultValue(res, 0); - assert_int_equal(value.tag, AM_VALUE_NOTHING); + assert_int_equal(value.tag, AM_VALUE_VOID); AMfreeResult(res); res = AMmapGet(group_state->doc, AM_ROOT, KEY); if (AMresultStatus(res) != AM_STATUS_OK) { @@ -118,7 +114,7 @@ static void test_AMmapPutNull(void **state) { } assert_int_equal(AMresultSize(res), 0); AMvalue value = AMresultValue(res, 0); - assert_int_equal(value.tag, AM_VALUE_NOTHING); + assert_int_equal(value.tag, AM_VALUE_VOID); AMfreeResult(res); res = AMmapGet(group_state->doc, AM_ROOT, KEY); if (AMresultStatus(res) != AM_STATUS_OK) { @@ -153,7 +149,7 @@ static void test_AMmapPutStr(void **state) { } assert_int_equal(AMresultSize(res), 0); AMvalue value = AMresultValue(res, 0); - assert_int_equal(value.tag, AM_VALUE_NOTHING); + assert_int_equal(value.tag, AM_VALUE_VOID); AMfreeResult(res); res = AMmapGet(group_state->doc, AM_ROOT, KEY); if (AMresultStatus(res) != AM_STATUS_OK) { @@ -173,6 +169,7 @@ static_void_test_AMmapPut(Uint, uint, UINT64_MAX) int run_AMmapPut_tests(void) { const struct CMUnitTest tests[] = { + cmocka_unit_test(test_AMmapPut(Bool)), cmocka_unit_test(test_AMmapPutBytes), cmocka_unit_test(test_AMmapPut(Counter)), cmocka_unit_test(test_AMmapPut(F64)), diff --git a/automerge-c/test/macro_utils.c b/automerge-c/test/macro_utils.c index d4343bc0..35c55b85 100644 --- a/automerge-c/test/macro_utils.c +++ b/automerge-c/test/macro_utils.c @@ -4,7 +4,8 @@ #include "macro_utils.h" AMvalueVariant AMvalue_discriminant(char const* suffix) { - if (!strcmp(suffix, "Bytes")) return AM_VALUE_BYTES; + if (!strcmp(suffix, "Bool")) return AM_VALUE_BOOLEAN; + else if (!strcmp(suffix, "Bytes")) return AM_VALUE_BYTES; else if (!strcmp(suffix, "Counter")) return AM_VALUE_COUNTER; else if (!strcmp(suffix, "F64")) return AM_VALUE_F64; else if (!strcmp(suffix, "Int")) return AM_VALUE_INT; @@ -12,7 +13,7 @@ AMvalueVariant AMvalue_discriminant(char const* suffix) { else if (!strcmp(suffix, "Str")) return AM_VALUE_STR; else if (!strcmp(suffix, "Timestamp")) return AM_VALUE_TIMESTAMP; else if (!strcmp(suffix, "Uint")) return AM_VALUE_UINT; - else return AM_VALUE_NOTHING; + else return AM_VALUE_VOID; } AMobjType AMobjType_tag(char const* obj_type_label) { From 069c33a13e07bc0d5728df9198891079738a01ce Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Wed, 4 May 2022 01:02:27 -0500 Subject: [PATCH 340/730] Moved the `AMbyteSpan` struct into its own source file. Added the `AMchangeHashes` struct. Added the `AMchange` and `AMchanges` structs. Tied the lifetime of an `AMobjId` struct to the `AMresult` struct that it's returned through so that it can be used to reach equivalent objects within multiple `AMdoc` structs. Removed the `AMfreeObjId()` function. Renamed `AMallocDoc()` to `AMalloc()`. Added the `AMcommit()` function. Added the `AMgetChangeHash()` function. Added the `AMgetChanges()` function. Added the `AMgetMessage()` function. Added the `AMlistDelete()` function. Added the `AMlistPutBool()` function. Added the `AMmapDelete()` function. Added the `AMmapPutBool()` function. Added the `AMobjSizeAt()` function. Added the `AMsave()` function. Renamed the `AMvalue::Nothing` variant to `AMvalue::Void`. Changed all `AMobjId` struct function arguments to be immutable. --- automerge-c/src/lib.rs | 638 ++++++++++++++++++++++++--------- automerge-c/test/group_state.c | 2 +- 2 files changed, 474 insertions(+), 166 deletions(-) diff --git a/automerge-c/src/lib.rs b/automerge-c/src/lib.rs index a880d588..e441fac6 100644 --- a/automerge-c/src/lib.rs +++ b/automerge-c/src/lib.rs @@ -2,11 +2,17 @@ use automerge as am; use smol_str::SmolStr; use std::{borrow::Cow, ffi::CStr, ffi::CString, os::raw::c_char}; +mod byte_span; +mod change_hashes; +mod changes; mod doc; mod result; -mod utils; -use automerge::transaction::Transactable; +use automerge::transaction::{CommitOptions, Transactable}; + +use byte_span::AMbyteSpan; +use change_hashes::AMchangeHashes; +use changes::{AMchange, AMchanges}; use doc::AMdoc; use result::{AMobjId, AMresult, AMvalue}; @@ -71,7 +77,7 @@ macro_rules! to_obj_id { }}; } -fn to_result<'a, R: Into>>(r: R) -> *mut AMresult<'a> { +fn to_result>(r: R) -> *mut AMresult { (r.into()).into() } @@ -82,26 +88,41 @@ fn to_result<'a, R: Into>>(r: R) -> *mut AMresult<'a> { /// \warning To avoid a memory leak, the returned pointer must be deallocated /// with `AMfreeDoc()`. #[no_mangle] -pub extern "C" fn AMallocDoc() -> *mut AMdoc { +pub extern "C" fn AMalloc() -> *mut AMdoc { AMdoc::new(am::AutoCommit::new()).into() } /// \memberof AMdoc -/// \brief Deallocates the storage for an `AMdoc` struct previously -/// allocated by `AMallocDoc()` or `AMdup()`. +/// \brief Commits the current operations on \p doc with an optional message +/// and/or time override as seconds since the epoch. /// /// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] message A UTF-8 string or `NULL`. +/// \param[in] time A pointer to a `time_t` value or `NULL`. +/// \return A pointer to an `AMresult` struct containing a change hash as an +/// `AMbyteSpan` struct. /// \pre \p doc must be a valid address. +/// \warning To avoid a memory leak, the returned pointer must be deallocated +/// with `AMfreeResult()`. /// \internal /// /// # Safety /// doc must be a pointer to a valid AMdoc #[no_mangle] -pub unsafe extern "C" fn AMfreeDoc(doc: *mut AMdoc) { - if !doc.is_null() { - let doc: AMdoc = *Box::from_raw(doc); - drop(doc) +pub unsafe extern "C" fn AMcommit( + doc: *mut AMdoc, + message: *const c_char, + time: *const libc::time_t, +) -> *mut AMresult { + let doc = to_doc!(doc); + let mut options = CommitOptions::default(); + if !message.is_null() { + options.set_message(to_str(message)); } + if let Some(time) = time.as_ref() { + options.set_time(*time); + } + to_result(doc.commit_with::<()>(options)) } /// \memberof AMdoc @@ -126,10 +147,79 @@ pub unsafe extern "C" fn AMdup(doc: *mut AMdoc) -> *mut AMdoc { } /// \memberof AMdoc -/// \brief Gets an `AMdoc` struct's actor ID value as an array of bytes. +/// \brief Deallocates the storage for an `AMdoc` struct previously +/// allocated by `AMalloc()` or `AMdup()`. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \return A pointer to an `AMresult` struct containing an `AMbyteSpan`. +/// \pre \p doc must be a valid address. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +#[no_mangle] +pub unsafe extern "C" fn AMfreeDoc(doc: *mut AMdoc) { + if !doc.is_null() { + let doc: AMdoc = *Box::from_raw(doc); + drop(doc) + } +} + +/// \memberof AMdoc +/// \brief Loads the compact form of an incremental save of an `AMdoc` struct +/// into \p doc. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] src A pointer to an array of bytes. +/// \param[in] count The number of bytes in \p src to load. +/// \return A pointer to an `AMresult` struct containing the number of +/// operations loaded from \p src. +/// \pre \p doc must be a valid address. +/// \pre \p src must be a valid address. +/// \pre `0 <=` \p count `<=` length of \p src. +/// \warning To avoid a memory leak, the returned pointer must be deallocated +/// with `AMfreeResult()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// src must be a byte array of length `>= count` +#[no_mangle] +pub unsafe extern "C" fn AMload(doc: *mut AMdoc, src: *const u8, count: usize) -> *mut AMresult { + let doc = to_doc!(doc); + let mut data = Vec::new(); + data.extend_from_slice(std::slice::from_raw_parts(src, count)); + to_result(doc.load_incremental(&data)) +} + +/// \memberof AMdoc +/// \brief Applies all of the changes in \p src which are not in \p dest to +/// \p dest. +/// +/// \param[in] dest A pointer to an `AMdoc` struct. +/// \param[in] src A pointer to an `AMdoc` struct. +/// \return A pointer to an `AMresult` struct containing an `AMchangeHashes` +/// struct. +/// \pre \p dest must be a valid address. +/// \pre \p src must be a valid address. +/// \warning To avoid a memory leak, the returned pointer must be deallocated +/// with `AMfreeResult()`. +/// \internal +/// +/// # Safety +/// dest must be a pointer to a valid AMdoc +/// src must be a pointer to a valid AMdoc +#[no_mangle] +pub unsafe extern "C" fn AMmerge(dest: *mut AMdoc, src: *mut AMdoc) -> *mut AMresult { + let dest = to_doc!(dest); + to_result(dest.merge(to_doc!(src))) +} + +/// \memberof AMdoc +/// \brief Saves the entirety of \p doc into a compact form. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \return A pointer to an `AMresult` struct containing an array of bytes as +/// an `AMbyteSpan` struct. /// \pre \p doc must be a valid address. /// \warning To avoid a memory leak, the returned pointer must be deallocated /// with `AMfreeResult()`. @@ -138,7 +228,25 @@ pub unsafe extern "C" fn AMdup(doc: *mut AMdoc) -> *mut AMdoc { /// # Safety /// doc must be a pointer to a valid AMdoc #[no_mangle] -pub unsafe extern "C" fn AMgetActor<'a>(doc: *mut AMdoc) -> *mut AMresult<'a> { +pub unsafe extern "C" fn AMsave(doc: *mut AMdoc) -> *mut AMresult { + let doc = to_doc!(doc); + to_result(Ok(doc.save())) +} +/// \memberof AMdoc +/// \brief Gets an `AMdoc` struct's actor ID value as an array of bytes. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \return A pointer to an `AMresult` struct containing an actor ID as an +/// `AMbyteSpan` struct. +/// \pre \p doc must be a valid address. +/// \warning To avoid a memory leak, the returned pointer must be deallocated +/// with `AMfreeResult()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +#[no_mangle] +pub unsafe extern "C" fn AMgetActor(doc: *mut AMdoc) -> *mut AMresult { let doc = to_doc!(doc); to_result(Ok(doc.get_actor().clone())) } @@ -156,7 +264,7 @@ pub unsafe extern "C" fn AMgetActor<'a>(doc: *mut AMdoc) -> *mut AMresult<'a> { /// # Safety /// doc must be a pointer to a valid AMdoc #[no_mangle] -pub unsafe extern "C" fn AMgetActorHex<'a>(doc: *mut AMdoc) -> *mut AMresult<'a> { +pub unsafe extern "C" fn AMgetActorHex(doc: *mut AMdoc) -> *mut AMresult { let doc = to_doc!(doc); let hex_str = doc.get_actor().to_hex_string(); let value = am::Value::Scalar(Cow::Owned(am::ScalarValue::Str(SmolStr::new(hex_str)))); @@ -169,7 +277,7 @@ pub unsafe extern "C" fn AMgetActorHex<'a>(doc: *mut AMdoc) -> *mut AMresult<'a> /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] value A pointer to an array of bytes. /// \param[in] count The number of bytes to copy from \p value. -/// \return A pointer to an `AMresult` struct containing nothing. +/// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc must be a valid address. /// \pre \p value must be a valid address. /// \pre `0 <=` \p count `<=` length of \p value. @@ -179,13 +287,13 @@ pub unsafe extern "C" fn AMgetActorHex<'a>(doc: *mut AMdoc) -> *mut AMresult<'a> /// /// # Safety /// doc must be a pointer to a valid AMdoc -/// value must be a byte array of length `count` +/// value must be a byte array of length `>= count` #[no_mangle] -pub unsafe extern "C" fn AMsetActor<'a>( +pub unsafe extern "C" fn AMsetActor( doc: *mut AMdoc, value: *const u8, count: usize, -) -> *mut AMresult<'a> { +) -> *mut AMresult { let doc = to_doc!(doc); let slice = std::slice::from_raw_parts(value, count); doc.set_actor(am::ActorId::from(slice)); @@ -197,7 +305,7 @@ pub unsafe extern "C" fn AMsetActor<'a>( /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] hex_str A string of hexadecimal characters. -/// \return A pointer to an `AMresult` struct containing nothing. +/// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc must be a valid address. /// \pre \p hex_str must be a valid address. /// \warning To avoid a memory leak, the returned pointer must be deallocated @@ -208,10 +316,7 @@ pub unsafe extern "C" fn AMsetActor<'a>( /// doc must be a pointer to a valid AMdoc /// hex_str must be a null-terminated array of `c_char` #[no_mangle] -pub unsafe extern "C" fn AMsetActorHex<'a>( - doc: *mut AMdoc, - hex_str: *const c_char, -) -> *mut AMresult<'a> { +pub unsafe extern "C" fn AMsetActorHex(doc: *mut AMdoc, hex_str: *const c_char) -> *mut AMresult { let doc = to_doc!(doc); let slice = std::slice::from_raw_parts(hex_str as *const u8, libc::strlen(hex_str)); to_result(match hex::decode(slice) { @@ -257,8 +362,9 @@ pub unsafe extern "C" fn AMresultSize(result: *mut AMresult) -> usize { if let Some(result) = result.as_mut() { match result { AMresult::ActorId(_) | AMresult::ObjId(_) => 1, + AMresult::ChangeHashes(change_hashes) => change_hashes.len(), AMresult::Changes(changes) => changes.len(), - AMresult::Error(_) | AMresult::Nothing => 0, + AMresult::Error(_) | AMresult::Void => 0, AMresult::Scalars(vec, _) => vec.len(), } } else { @@ -279,8 +385,8 @@ pub unsafe extern "C" fn AMresultSize(result: *mut AMresult) -> usize { /// # Safety /// result must be a pointer to a valid AMresult #[no_mangle] -pub unsafe extern "C" fn AMresultValue(result: *mut AMresult, index: usize) -> AMvalue { - let mut value = AMvalue::Nothing; +pub unsafe extern "C" fn AMresultValue<'a>(result: *mut AMresult, index: usize) -> AMvalue<'a> { + let mut value = AMvalue::Void; if let Some(result) = result.as_mut() { match result { AMresult::ActorId(actor_id) => { @@ -288,20 +394,24 @@ pub unsafe extern "C" fn AMresultValue(result: *mut AMresult, index: usize) -> A value = AMvalue::ActorId(actor_id.into()); } } - AMresult::Changes(_) => {} + AMresult::ChangeHashes(change_hashes) => { + value = AMvalue::ChangeHashes(AMchangeHashes::new(change_hashes)); + } + AMresult::Changes(changes) => { + value = AMvalue::Changes(AMchanges::new(changes)); + } AMresult::Error(_) => {} AMresult::ObjId(obj_id) => { if index == 0 { value = AMvalue::ObjId(obj_id); } } - AMresult::Nothing => (), AMresult::Scalars(vec, hosted_str) => { if let Some(element) = vec.get(index) { match element { am::Value::Scalar(scalar) => match scalar.as_ref() { am::ScalarValue::Boolean(flag) => { - value = AMvalue::Boolean(*flag as i8); + value = AMvalue::Boolean(*flag); } am::ScalarValue::Bytes(bytes) => { value = AMvalue::Bytes(bytes.into()); @@ -337,19 +447,19 @@ pub unsafe extern "C" fn AMresultValue(result: *mut AMresult, index: usize) -> A } } } + AMresult::Void => (), } }; value } /// \memberof AMdoc -/// \brief Puts a signed integer as the value of a key in a map object. +/// \brief Deletes a key in a map object. /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \param[in] key A UTF-8 string key for the map object identified by \p obj. -/// \param[in] value A 64-bit signed integer. -/// \return A pointer to an `AMresult` struct containing nothing. +/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. +/// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc must be a valid address. /// \pre \p key must be a valid address. /// \warning To avoid a memory leak, the returned pointer must be deallocated @@ -361,12 +471,69 @@ pub unsafe extern "C" fn AMresultValue(result: *mut AMresult, index: usize) -> A /// obj_id must be a pointer to a valid AMobjId or NULL /// key must be a c string of the map key to be used #[no_mangle] -pub unsafe extern "C" fn AMmapPutInt<'a>( +pub unsafe extern "C" fn AMmapDelete( doc: *mut AMdoc, - obj_id: *mut AMobjId, + obj_id: *const AMobjId, + key: *const c_char, +) -> *mut AMresult { + let doc = to_doc!(doc); + to_result(doc.delete(to_obj_id!(obj_id), to_str(key))) +} + +/// \memberof AMdoc +/// \brief Puts a boolean as the value of a key in a map object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. +/// \param[in] value A boolean. +/// \return A pointer to an `AMresult` struct containing a void. +/// \pre \p doc must be a valid address. +/// \pre \p key must be a valid address. +/// \warning To avoid a memory leak, the returned pointer must be deallocated +/// with `AMfreeResult()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj_id must be a pointer to a valid AMobjId or NULL +/// key must be a c string of the map key to be used +#[no_mangle] +pub unsafe extern "C" fn AMmapPutBool( + doc: *mut AMdoc, + obj_id: *const AMobjId, + key: *const c_char, + value: bool, +) -> *mut AMresult { + let doc = to_doc!(doc); + to_result(doc.put(to_obj_id!(obj_id), to_str(key), value)) +} + +/// \memberof AMdoc +/// \brief Puts a signed integer as the value of a key in a map object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. +/// \param[in] value A 64-bit signed integer. +/// \return A pointer to an `AMresult` struct containing a void. +/// \pre \p doc must be a valid address. +/// \pre \p key must be a valid address. +/// \warning To avoid a memory leak, the returned pointer must be deallocated +/// with `AMfreeResult()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj_id must be a pointer to a valid AMobjId or NULL +/// key must be a c string of the map key to be used +#[no_mangle] +pub unsafe extern "C" fn AMmapPutInt( + doc: *mut AMdoc, + obj_id: *const AMobjId, key: *const c_char, value: i64, -) -> *mut AMresult<'a> { +) -> *mut AMresult { let doc = to_doc!(doc); to_result(doc.put(to_obj_id!(obj_id), to_str(key), value)) } @@ -376,9 +543,9 @@ pub unsafe extern "C" fn AMmapPutInt<'a>( /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \param[in] key A UTF-8 string key for the map object identified by \p obj. +/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \param[in] value A 64-bit unsigned integer. -/// \return A pointer to an `AMresult` struct containing nothing. +/// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc must be a valid address. /// \pre \p key must be a valid address. /// \warning To avoid a memory leak, the returned pointer must be deallocated @@ -390,12 +557,12 @@ pub unsafe extern "C" fn AMmapPutInt<'a>( /// obj_id must be a pointer to a valid AMobjId or NULL /// key must be a c string of the map key to be used #[no_mangle] -pub unsafe extern "C" fn AMmapPutUint<'a>( +pub unsafe extern "C" fn AMmapPutUint( doc: *mut AMdoc, - obj_id: *mut AMobjId, + obj_id: *const AMobjId, key: *const c_char, value: u64, -) -> *mut AMresult<'a> { +) -> *mut AMresult { let doc = to_doc!(doc); to_result(doc.put(to_obj_id!(obj_id), to_str(key), value)) } @@ -405,9 +572,9 @@ pub unsafe extern "C" fn AMmapPutUint<'a>( /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \param[in] key A UTF-8 string key for the map object identified by \p obj. +/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \param[in] value A UTF-8 string. -/// \return A pointer to an `AMresult` struct containing nothing. +/// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc must be a valid address. /// \pre \p key must be a valid address. /// \pre \p value must be a valid address. @@ -421,12 +588,12 @@ pub unsafe extern "C" fn AMmapPutUint<'a>( /// key must be a c string of the map key to be used /// value must be a null-terminated array of `c_char` #[no_mangle] -pub unsafe extern "C" fn AMmapPutStr<'a>( +pub unsafe extern "C" fn AMmapPutStr( doc: *mut AMdoc, - obj_id: *mut AMobjId, + obj_id: *const AMobjId, key: *const c_char, value: *const c_char, -) -> *mut AMresult<'a> { +) -> *mut AMresult { let doc = to_doc!(doc); to_result(doc.put(to_obj_id!(obj_id), to_str(key), to_str(value))) } @@ -436,10 +603,10 @@ pub unsafe extern "C" fn AMmapPutStr<'a>( /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \param[in] key A UTF-8 string key for the map object identified by \p obj. +/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \param[in] value A pointer to an array of bytes. /// \param[in] count The number of bytes to copy from \p value. -/// \return A pointer to an `AMresult` struct containing nothing. +/// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc must be a valid address. /// \pre \p key must be a valid address. /// \pre \p value must be a valid address. @@ -452,19 +619,18 @@ pub unsafe extern "C" fn AMmapPutStr<'a>( /// doc must be a pointer to a valid AMdoc /// obj_id must be a pointer to a valid AMobjId or NULL /// key must be a c string of the map key to be used -/// value must be a byte array of length `count` +/// value must be a byte array of length `>= count` #[no_mangle] -pub unsafe extern "C" fn AMmapPutBytes<'a>( +pub unsafe extern "C" fn AMmapPutBytes( doc: *mut AMdoc, - obj_id: *mut AMobjId, + obj_id: *const AMobjId, key: *const c_char, value: *const u8, count: usize, -) -> *mut AMresult<'a> { +) -> *mut AMresult { let doc = to_doc!(doc); - let slice = std::slice::from_raw_parts(value, count); let mut vec = Vec::new(); - vec.extend_from_slice(slice); + vec.extend_from_slice(std::slice::from_raw_parts(value, count)); to_result(doc.put(to_obj_id!(obj_id), to_str(key), vec)) } @@ -473,9 +639,9 @@ pub unsafe extern "C" fn AMmapPutBytes<'a>( /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \param[in] key A UTF-8 string key for the map object identified by \p obj. +/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \param[in] value A 64-bit float. -/// \return A pointer to an `AMresult` struct containing nothing. +/// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc must be a valid address. /// \pre \p key must be a valid address. /// \warning To avoid a memory leak, the returned pointer must be deallocated @@ -487,12 +653,12 @@ pub unsafe extern "C" fn AMmapPutBytes<'a>( /// obj_id must be a pointer to a valid AMobjId or NULL /// key must be a c string of the map key to be used #[no_mangle] -pub unsafe extern "C" fn AMmapPutF64<'a>( +pub unsafe extern "C" fn AMmapPutF64( doc: *mut AMdoc, - obj_id: *mut AMobjId, + obj_id: *const AMobjId, key: *const c_char, value: f64, -) -> *mut AMresult<'a> { +) -> *mut AMresult { let doc = to_doc!(doc); to_result(doc.put(to_obj_id!(obj_id), to_str(key), value)) } @@ -502,9 +668,9 @@ pub unsafe extern "C" fn AMmapPutF64<'a>( /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \param[in] key A UTF-8 string key for the map object identified by \p obj. +/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \param[in] value A 64-bit signed integer. -/// \return A pointer to an `AMresult` struct containing nothing. +/// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc must be a valid address. /// \pre \p key must be a valid address. /// \warning To avoid a memory leak, the returned pointer must be deallocated @@ -516,12 +682,12 @@ pub unsafe extern "C" fn AMmapPutF64<'a>( /// obj_id must be a pointer to a valid AMobjId or NULL /// key must be a c string of the map key to be used #[no_mangle] -pub unsafe extern "C" fn AMmapPutCounter<'a>( +pub unsafe extern "C" fn AMmapPutCounter( doc: *mut AMdoc, - obj_id: *mut AMobjId, + obj_id: *const AMobjId, key: *const c_char, value: i64, -) -> *mut AMresult<'a> { +) -> *mut AMresult { let doc = to_doc!(doc); to_result(doc.put( to_obj_id!(obj_id), @@ -535,9 +701,9 @@ pub unsafe extern "C" fn AMmapPutCounter<'a>( /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \param[in] key A UTF-8 string key for the map object identified by \p obj. +/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \param[in] value A 64-bit signed integer. -/// \return A pointer to an `AMresult` struct containing nothing. +/// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc must be a valid address. /// \pre \p key must be a valid address. /// \warning To avoid a memory leak, the returned pointer must be deallocated @@ -549,12 +715,12 @@ pub unsafe extern "C" fn AMmapPutCounter<'a>( /// obj_id must be a pointer to a valid AMobjId or NULL /// key must be a c string of the map key to be used #[no_mangle] -pub unsafe extern "C" fn AMmapPutTimestamp<'a>( +pub unsafe extern "C" fn AMmapPutTimestamp( doc: *mut AMdoc, - obj_id: *mut AMobjId, + obj_id: *const AMobjId, key: *const c_char, value: i64, -) -> *mut AMresult<'a> { +) -> *mut AMresult { let doc = to_doc!(doc); to_result(doc.put( to_obj_id!(obj_id), @@ -568,8 +734,8 @@ pub unsafe extern "C" fn AMmapPutTimestamp<'a>( /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \param[in] key A UTF-8 string key for the map object identified by \p obj. -/// \return A pointer to an `AMresult` struct containing nothing. +/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. +/// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc must be a valid address. /// \pre \p key must be a valid address. /// \warning To avoid a memory leak, the returned p ointer must be deallocated @@ -581,11 +747,11 @@ pub unsafe extern "C" fn AMmapPutTimestamp<'a>( /// obj_id must be a pointer to a valid AMobjId or NULL /// key must be a c string of the map key to be used #[no_mangle] -pub unsafe extern "C" fn AMmapPutNull<'a>( +pub unsafe extern "C" fn AMmapPutNull( doc: *mut AMdoc, - obj_id: *mut AMobjId, + obj_id: *const AMobjId, key: *const c_char, -) -> *mut AMresult<'a> { +) -> *mut AMresult { let doc = to_doc!(doc); to_result(doc.put(to_obj_id!(obj_id), to_str(key), ())) } @@ -595,7 +761,7 @@ pub unsafe extern "C" fn AMmapPutNull<'a>( /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \param[in] key A UTF-8 string key for the map object identified by \p obj. +/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \param[in] obj_type An `AMobjIdType` enum tag. /// \return A pointer to an `AMresult` struct containing a pointer to an `AMobjId` struct. /// \pre \p doc must be a valid address. @@ -609,12 +775,12 @@ pub unsafe extern "C" fn AMmapPutNull<'a>( /// obj_id must be a pointer to a valid AMobjId or NULL /// key must be a c string of the map key to be used #[no_mangle] -pub unsafe extern "C" fn AMmapPutObject<'a>( +pub unsafe extern "C" fn AMmapPutObject( doc: *mut AMdoc, - obj_id: *mut AMobjId, + obj_id: *const AMobjId, key: *const c_char, obj_type: AMobjType, -) -> *mut AMresult<'a> { +) -> *mut AMresult { let doc = to_doc!(doc); to_result(doc.put_object(to_obj_id!(obj_id), to_str(key), obj_type.into())) } @@ -624,10 +790,10 @@ pub unsafe extern "C" fn AMmapPutObject<'a>( /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \param[in] index An index within the list object identified by \p obj. +/// \param[in] index An index within the list object identified by \p obj_id. /// \return A pointer to an `AMresult` struct. /// \pre \p doc must be a valid address. -/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj. +/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. /// \warning To avoid a memory leak, the returned pointer must be deallocated /// with `AMfreeResult()`. /// \internal @@ -636,11 +802,11 @@ pub unsafe extern "C" fn AMmapPutObject<'a>( /// doc must be a pointer to a valid AMdoc /// obj_id must be a pointer to a valid AMobjId or NULL #[no_mangle] -pub unsafe extern "C" fn AMlistGet<'a>( +pub unsafe extern "C" fn AMlistGet( doc: *mut AMdoc, - obj_id: *mut AMobjId, + obj_id: *const AMobjId, index: usize, -) -> *mut AMresult<'a> { +) -> *mut AMresult { let doc = to_doc!(doc); to_result(doc.get(to_obj_id!(obj_id), index)) } @@ -650,7 +816,7 @@ pub unsafe extern "C" fn AMlistGet<'a>( /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \param[in] key A UTF-8 string key for the map object identified by \p obj. +/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \return A pointer to an `AMresult` struct. /// \pre \p doc must be a valid address. /// \pre \p key must be a valid address. @@ -663,27 +829,90 @@ pub unsafe extern "C" fn AMlistGet<'a>( /// obj_id must be a pointer to a valid AMobjId or NULL /// key must be a c string of the map key to be used #[no_mangle] -pub unsafe extern "C" fn AMmapGet<'a>( +pub unsafe extern "C" fn AMmapGet( doc: *mut AMdoc, - obj_id: *mut AMobjId, + obj_id: *const AMobjId, key: *const c_char, -) -> *mut AMresult<'a> { +) -> *mut AMresult { let doc = to_doc!(doc); to_result(doc.get(to_obj_id!(obj_id), to_str(key))) } +/// \memberof AMdoc +/// \brief Deletes an index in a list object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] index An index in the list object identified by \p obj_id. +/// \return A pointer to an `AMresult` struct containing a void. +/// \pre \p doc must be a valid address. +/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. +/// \warning To avoid a memory leak, the returned pointer must be deallocated +/// with `AMfreeResult()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj_id must be a pointer to a valid AMobjId or NULL +#[no_mangle] +pub unsafe extern "C" fn AMlistDelete( + doc: *mut AMdoc, + obj_id: *const AMobjId, + index: usize, +) -> *mut AMresult { + let doc = to_doc!(doc); + to_result(doc.delete(to_obj_id!(obj_id), index)) +} + +/// \memberof AMdoc +/// \brief Puts a boolean as the value at an index in a list object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] index An index in the list object identified by \p obj_id. +/// \param[in] insert A flag to insert \p value before \p index instead of +/// writing \p value over \p index. +/// \param[in] value A boolean. +/// \return A pointer to an `AMresult` struct containing a void. +/// \pre \p doc must be a valid address. +/// \warning To avoid a memory leak, the returned pointer must be deallocated +/// with `AMfreeResult()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj_id must be a pointer to a valid AMobjId or NULL +#[no_mangle] +pub unsafe extern "C" fn AMlistPutBool( + doc: *mut AMdoc, + obj_id: *const AMobjId, + index: usize, + insert: bool, + value: bool, +) -> *mut AMresult { + let doc = to_doc!(doc); + let obj_id = to_obj_id!(obj_id); + let value = am::ScalarValue::Boolean(value); + to_result(if insert { + doc.insert(obj_id, index, value) + } else { + doc.put(obj_id, index, value) + }) +} + /// \memberof AMdoc /// \brief Puts an array of bytes as the value at an index in a list object. /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \param[in] index An index in the list object identified by \p obj. -/// \param[in] insert A flag to insert \p value before \p index instead of writing \p value over \p index. +/// \param[in] index An index in the list object identified by \p obj_id. +/// \param[in] insert A flag to insert \p value before \p index instead of +/// writing \p value over \p index. /// \param[in] value A pointer to an array of bytes. /// \param[in] count The number of bytes to copy from \p value. -/// \return A pointer to an `AMresult` struct containing nothing. +/// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc must be a valid address. -/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj. +/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. /// \pre \p value must be a valid address. /// \pre `0 <=` \p count `<=` length of \p value. /// \warning To avoid a memory leak, the returned pointer must be deallocated @@ -693,21 +922,20 @@ pub unsafe extern "C" fn AMmapGet<'a>( /// # Safety /// doc must be a pointer to a valid AMdoc /// obj_id must be a pointer to a valid AMobjId or NULL -/// value must be a byte array of length `count` +/// value must be a byte array of length `>= count` #[no_mangle] -pub unsafe extern "C" fn AMlistPutBytes<'a>( +pub unsafe extern "C" fn AMlistPutBytes( doc: *mut AMdoc, - obj_id: *mut AMobjId, + obj_id: *const AMobjId, index: usize, insert: bool, value: *const u8, count: usize, -) -> *mut AMresult<'a> { +) -> *mut AMresult { let doc = to_doc!(doc); let obj_id = to_obj_id!(obj_id); - let slice = std::slice::from_raw_parts(value, count); let mut vec = Vec::new(); - vec.extend_from_slice(slice); + vec.extend_from_slice(std::slice::from_raw_parts(value, count)); to_result(if insert { doc.insert(obj_id, index, vec) } else { @@ -720,12 +948,13 @@ pub unsafe extern "C" fn AMlistPutBytes<'a>( /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \param[in] index An index in the list object identified by \p obj. -/// \param[in] insert A flag to insert \p value before \p index instead of writing \p value over \p index. +/// \param[in] index An index in the list object identified by \p obj_id. +/// \param[in] insert A flag to insert \p value before \p index instead of +/// writing \p value over \p index. /// \param[in] value A 64-bit signed integer. -/// \return A pointer to an `AMresult` struct containing nothing. +/// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc must be a valid address. -/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj. +/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. /// \warning To avoid a memory leak, the returned pointer must be deallocated /// with `AMfreeResult()`. /// \internal @@ -734,13 +963,13 @@ pub unsafe extern "C" fn AMlistPutBytes<'a>( /// doc must be a pointer to a valid AMdoc /// obj_id must be a pointer to a valid AMobjId or NULL #[no_mangle] -pub unsafe extern "C" fn AMlistPutCounter<'a>( +pub unsafe extern "C" fn AMlistPutCounter( doc: *mut AMdoc, - obj_id: *mut AMobjId, + obj_id: *const AMobjId, index: usize, insert: bool, value: i64, -) -> *mut AMresult<'a> { +) -> *mut AMresult { let doc = to_doc!(doc); let obj_id = to_obj_id!(obj_id); let value = am::ScalarValue::Counter(value.into()); @@ -756,12 +985,13 @@ pub unsafe extern "C" fn AMlistPutCounter<'a>( /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \param[in] index An index in the list object identified by \p obj. -/// \param[in] insert A flag to insert \p value before \p index instead of writing \p value over \p index. +/// \param[in] index An index in the list object identified by \p obj_id. +/// \param[in] insert A flag to insert \p value before \p index instead of +/// writing \p value over \p index. /// \param[in] value A 64-bit float. -/// \return A pointer to an `AMresult` struct containing nothing. +/// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc must be a valid address. -/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj. +/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. /// \warning To avoid a memory leak, the returned pointer must be deallocated /// with `AMfreeResult()`. /// \internal @@ -770,13 +1000,13 @@ pub unsafe extern "C" fn AMlistPutCounter<'a>( /// doc must be a pointer to a valid AMdoc /// obj_id must be a pointer to a valid AMobjId or NULL #[no_mangle] -pub unsafe extern "C" fn AMlistPutF64<'a>( +pub unsafe extern "C" fn AMlistPutF64( doc: *mut AMdoc, - obj_id: *mut AMobjId, + obj_id: *const AMobjId, index: usize, insert: bool, value: f64, -) -> *mut AMresult<'a> { +) -> *mut AMresult { let doc = to_doc!(doc); let obj_id = to_obj_id!(obj_id); to_result(if insert { @@ -791,12 +1021,13 @@ pub unsafe extern "C" fn AMlistPutF64<'a>( /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \param[in] index An index in the list object identified by \p obj. -/// \param[in] insert A flag to insert \p value before \p index instead of writing \p value over \p index. +/// \param[in] index An index in the list object identified by \p obj_id. +/// \param[in] insert A flag to insert \p value before \p index instead of +/// writing \p value over \p index. /// \param[in] value A 64-bit signed integer. -/// \return A pointer to an `AMresult` struct containing nothing. +/// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc must be a valid address. -/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj. +/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. /// \warning To avoid a memory leak, the returned pointer must be deallocated /// with `AMfreeResult()`. /// \internal @@ -805,13 +1036,13 @@ pub unsafe extern "C" fn AMlistPutF64<'a>( /// doc must be a pointer to a valid AMdoc /// obj_id must be a pointer to a valid AMobjId or NULL #[no_mangle] -pub unsafe extern "C" fn AMlistPutInt<'a>( +pub unsafe extern "C" fn AMlistPutInt( doc: *mut AMdoc, - obj_id: *mut AMobjId, + obj_id: *const AMobjId, index: usize, insert: bool, value: i64, -) -> *mut AMresult<'a> { +) -> *mut AMresult { let doc = to_doc!(doc); let obj_id = to_obj_id!(obj_id); to_result(if insert { @@ -826,11 +1057,12 @@ pub unsafe extern "C" fn AMlistPutInt<'a>( /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \param[in] index An index in the list object identified by \p obj. -/// \param[in] insert A flag to insert \p value before \p index instead of writing \p value over \p index. -/// \return A pointer to an `AMresult` struct containing nothing. +/// \param[in] index An index in the list object identified by \p obj_id. +/// \param[in] insert A flag to insert \p value before \p index instead of +/// writing \p value over \p index. +/// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc must be a valid address. -/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj. +/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. /// \warning To avoid a memory leak, the returned pointer must be deallocated /// with `AMfreeResult()`. /// \internal @@ -839,12 +1071,12 @@ pub unsafe extern "C" fn AMlistPutInt<'a>( /// doc must be a pointer to a valid AMdoc /// obj_id must be a pointer to a valid AMobjId or NULL #[no_mangle] -pub unsafe extern "C" fn AMlistPutNull<'a>( +pub unsafe extern "C" fn AMlistPutNull( doc: *mut AMdoc, - obj_id: *mut AMobjId, + obj_id: *const AMobjId, index: usize, insert: bool, -) -> *mut AMresult<'a> { +) -> *mut AMresult { let doc = to_doc!(doc); let obj_id = to_obj_id!(obj_id); let value = (); @@ -860,12 +1092,13 @@ pub unsafe extern "C" fn AMlistPutNull<'a>( /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \param[in] index An index in the list object identified by \p obj. -/// \param[in] insert A flag to insert \p value before \p index instead of writing \p value over \p index. +/// \param[in] index An index in the list object identified by \p obj_id. +/// \param[in] insert A flag to insert \p value before \p index instead of +/// writing \p value over \p index. /// \param[in] obj_type An `AMobjIdType` enum tag. /// \return A pointer to an `AMresult` struct containing a pointer to an `AMobjId` struct. /// \pre \p doc must be a valid address. -/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj. +/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. /// \warning To avoid a memory leak, the returned pointer must be deallocated /// with `AMfreeResult()`. /// \internal @@ -874,13 +1107,13 @@ pub unsafe extern "C" fn AMlistPutNull<'a>( /// doc must be a pointer to a valid AMdoc /// obj_id must be a pointer to a valid AMobjId or NULL #[no_mangle] -pub unsafe extern "C" fn AMlistPutObject<'a>( +pub unsafe extern "C" fn AMlistPutObject( doc: *mut AMdoc, - obj_id: *mut AMobjId, + obj_id: *const AMobjId, index: usize, insert: bool, obj_type: AMobjType, -) -> *mut AMresult<'a> { +) -> *mut AMresult { let doc = to_doc!(doc); let obj_id = to_obj_id!(obj_id); let value = obj_type.into(); @@ -896,12 +1129,13 @@ pub unsafe extern "C" fn AMlistPutObject<'a>( /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \param[in] index An index in the list object identified by \p obj. -/// \param[in] insert A flag to insert \p value before \p index instead of writing \p value over \p index. +/// \param[in] index An index in the list object identified by \p obj_id. +/// \param[in] insert A flag to insert \p value before \p index instead of +/// writing \p value over \p index. /// \param[in] value A UTF-8 string. -/// \return A pointer to an `AMresult` struct containing nothing. +/// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc must be a valid address. -/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj. +/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. /// \pre \p value must be a valid address. /// \warning To avoid a memory leak, the returned pointer must be deallocated /// with `AMfreeResult()`. @@ -912,13 +1146,13 @@ pub unsafe extern "C" fn AMlistPutObject<'a>( /// obj_id must be a pointer to a valid AMobjId or NULL /// value must be a null-terminated array of `c_char` #[no_mangle] -pub unsafe extern "C" fn AMlistPutStr<'a>( +pub unsafe extern "C" fn AMlistPutStr( doc: *mut AMdoc, - obj_id: *mut AMobjId, + obj_id: *const AMobjId, index: usize, insert: bool, value: *const c_char, -) -> *mut AMresult<'a> { +) -> *mut AMresult { let doc = to_doc!(doc); let obj_id = to_obj_id!(obj_id); let value = to_str(value); @@ -934,12 +1168,13 @@ pub unsafe extern "C" fn AMlistPutStr<'a>( /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \param[in] index An index in the list object identified by \p obj. -/// \param[in] insert A flag to insert \p value before \p index instead of writing \p value over \p index. +/// \param[in] index An index in the list object identified by \p obj_id. +/// \param[in] insert A flag to insert \p value before \p index instead of +/// writing \p value over \p index. /// \param[in] value A 64-bit signed integer. -/// \return A pointer to an `AMresult` struct containing nothing. +/// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc must be a valid address. -/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj. +/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. /// \warning To avoid a memory leak, the returned pointer must be deallocated /// with `AMfreeResult()`. /// \internal @@ -948,13 +1183,13 @@ pub unsafe extern "C" fn AMlistPutStr<'a>( /// doc must be a pointer to a valid AMdoc /// obj_id must be a pointer to a valid AMobjId or NULL #[no_mangle] -pub unsafe extern "C" fn AMlistPutTimestamp<'a>( +pub unsafe extern "C" fn AMlistPutTimestamp( doc: *mut AMdoc, - obj_id: *mut AMobjId, + obj_id: *const AMobjId, index: usize, insert: bool, value: i64, -) -> *mut AMresult<'a> { +) -> *mut AMresult { let doc = to_doc!(doc); let obj_id = to_obj_id!(obj_id); let value = am::ScalarValue::Timestamp(value); @@ -970,12 +1205,13 @@ pub unsafe extern "C" fn AMlistPutTimestamp<'a>( /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \param[in] index An index in the list object identified by \p obj. -/// \param[in] insert A flag to insert \p value before \p index instead of writing \p value over \p index. +/// \param[in] index An index in the list object identified by \p obj_id. +/// \param[in] insert A flag to insert \p value before \p index instead of +/// writing \p value over \p index. /// \param[in] value A 64-bit unsigned integer. -/// \return A pointer to an `AMresult` struct containing nothing. +/// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc must be a valid address. -/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj. +/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. /// \warning To avoid a memory leak, the returned pointer must be deallocated /// with `AMfreeResult()`. /// \internal @@ -984,13 +1220,13 @@ pub unsafe extern "C" fn AMlistPutTimestamp<'a>( /// doc must be a pointer to a valid AMdoc /// obj_id must be a pointer to a valid AMobjId or NULL #[no_mangle] -pub unsafe extern "C" fn AMlistPutUint<'a>( +pub unsafe extern "C" fn AMlistPutUint( doc: *mut AMdoc, - obj_id: *mut AMobjId, + obj_id: *const AMobjId, index: usize, insert: bool, value: u64, -) -> *mut AMresult<'a> { +) -> *mut AMresult { let doc = to_doc!(doc); let obj_id = to_obj_id!(obj_id); to_result(if insert { @@ -1036,11 +1272,11 @@ pub unsafe extern "C" fn AMerrorMessage(result: *mut AMresult) -> *const c_char } /// \memberof AMdoc -/// \brief Gets the size of an `AMobjId` struct. +/// \brief Gets the size of an object. /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \return The count of values in \p obj. +/// \return The count of values in the object identified by \p obj_id. /// \pre \p doc must be a valid address. /// \internal /// @@ -1057,25 +1293,97 @@ pub unsafe extern "C" fn AMobjSize(doc: *const AMdoc, obj_id: *const AMobjId) -> } /// \memberof AMdoc -/// \brief Deallocates the storage for an `AMobjId` struct. +/// \brief Gets the historical size of an object. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] change A pointer to an `AMchange` struct or `NULL`. +/// \return The count of values in the object identified by \p obj_id at +/// \p change. /// \pre \p doc must be a valid address. -/// \pre \p obj_id must be a valid address. -/// \note An `AMobjId` struct is automatically deallocated along with its owning -/// `AMdoc` struct, this function just enables an `AMobjId` struct to be -/// deallocated sooner than that. /// \internal /// /// # Safety /// doc must be a pointer to a valid AMdoc /// obj_id must be a pointer to a valid AMobjId or NULL +/// change must be a pointer to a valid AMchange or NULL #[no_mangle] -pub unsafe extern "C" fn AMfreeObjId(doc: *mut AMdoc, obj_id: *const AMobjId) { - if let Some(doc) = doc.as_mut() { - if let Some(obj_id) = obj_id.as_ref() { - doc.drop_obj_id(obj_id); - }; +pub unsafe extern "C" fn AMobjSizeAt( + doc: *const AMdoc, + obj_id: *const AMobjId, + change: *const AMchange, +) -> usize { + if let Some(doc) = doc.as_ref() { + if let Some(change) = change.as_ref() { + let change: &am::Change = change.as_ref(); + let change_hashes = vec![change.hash]; + return doc.length_at(to_obj_id!(obj_id), &change_hashes); + } }; + 0 +} + +/// \memberof AMchange +/// \brief Gets the change hash within an `AMchange` struct. +/// +/// \param[in] change A pointer to an `AMchange` struct. +/// \return A change hash as an `AMbyteSpan` struct. +/// \pre \p change must be a valid address. +/// \internal +/// +/// # Safety +/// change must be a pointer to a valid AMchange +#[no_mangle] +pub unsafe extern "C" fn AMgetChangeHash(change: *const AMchange) -> AMbyteSpan { + match change.as_ref() { + Some(change) => change.into(), + None => AMbyteSpan::default(), + } +} + +/// \memberof AMdoc +/// \brief Gets the changes added to \p doc by their respective hashes. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] have_deps A pointer to an `AMchangeHashes` struct or `NULL`. +/// \return A pointer to an `AMresult` struct containing an `AMchanges` struct. +/// \pre \p doc must be a valid address. +/// \warning To avoid a memory leak, the returned pointer must be deallocated +/// with `AMfreeResult()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +#[no_mangle] +pub unsafe extern "C" fn AMgetChanges( + doc: *mut AMdoc, + have_deps: *const AMchangeHashes, +) -> *mut AMresult { + let doc = to_doc!(doc); + let empty_deps = Vec::::new(); + let have_deps = match have_deps.as_ref() { + Some(have_deps) => have_deps.as_ref(), + None => &empty_deps, + }; + to_result(Ok(doc.get_changes(have_deps))) +} + +/// \memberof AMchange +/// \brief Gets the message within an `AMchange` struct. +/// +/// \param[in] change A pointer to an `AMchange` struct. +/// \return A UTF-8 string or `NULL`. +/// \pre \p change must be a valid address. +/// \internal +/// +/// # Safety +/// change must be a pointer to a valid AMchange +#[no_mangle] +pub unsafe extern "C" fn AMgetMessage(change: *const AMchange) -> *const c_char { + if let Some(change) = change.as_ref() { + if let Some(c_message) = change.c_message() { + return c_message.as_ptr(); + } + } + std::ptr::null::() } diff --git a/automerge-c/test/group_state.c b/automerge-c/test/group_state.c index a0a2a049..d59b6b7f 100644 --- a/automerge-c/test/group_state.c +++ b/automerge-c/test/group_state.c @@ -5,7 +5,7 @@ int group_setup(void** state) { GroupState* group_state = calloc(1, sizeof(GroupState)); - group_state->doc = AMallocDoc(); + group_state->doc = AMalloc(); *state = group_state; return 0; } From 3cf990eabf4298596606bfd5e17f66f956815f96 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Wed, 4 May 2022 07:45:05 -0500 Subject: [PATCH 341/730] Fixed some minor inconsistencies in `quickstart.c`. --- automerge-c/examples/quickstart.c | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/automerge-c/examples/quickstart.c b/automerge-c/examples/quickstart.c index 56b8eaa7..d5a2d9a8 100644 --- a/automerge-c/examples/quickstart.c +++ b/automerge-c/examples/quickstart.c @@ -52,11 +52,11 @@ int main(int argc, char** argv) { AMfreeResult(result); AMfreeDoc(doc2); - AMresult* save_result = AMsave(doc1); - AMvalue save_value = test(save_result, AM_VALUE_BYTES); - AMbyteSpan binary = save_value.bytes; + AMresult* const save_result = AMsave(doc1); + value = test(save_result, AM_VALUE_BYTES); + AMbyteSpan binary = value.bytes; doc2 = AMalloc(); - AMresult* load_result = AMload(doc2, binary.src, binary.count); + AMresult* const load_result = AMload(doc2, binary.src, binary.count); AMfreeResult(load_result); AMfreeResult(save_result); From 729752dac2386df52c8b7433db90918cbedf4e16 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Wed, 4 May 2022 08:27:15 -0500 Subject: [PATCH 342/730] De-emphasized the `AMload()` call's result. --- automerge-c/examples/quickstart.c | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/automerge-c/examples/quickstart.c b/automerge-c/examples/quickstart.c index d5a2d9a8..afc3e317 100644 --- a/automerge-c/examples/quickstart.c +++ b/automerge-c/examples/quickstart.c @@ -56,8 +56,8 @@ int main(int argc, char** argv) { value = test(save_result, AM_VALUE_BYTES); AMbyteSpan binary = value.bytes; doc2 = AMalloc(); - AMresult* const load_result = AMload(doc2, binary.src, binary.count); - AMfreeResult(load_result); + result = AMload(doc2, binary.src, binary.count); + AMfreeResult(result); AMfreeResult(save_result); result = AMmapPutBool(doc1, card1, "done", true); From 54042bcf963b9fd4f4ade9aecdf1adddf5d5dc85 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Wed, 4 May 2022 09:50:27 -0400 Subject: [PATCH 343/730] and unimplemented double ended iterator --- automerge/src/values.rs | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/automerge/src/values.rs b/automerge/src/values.rs index d713d9af..90f596f3 100644 --- a/automerge/src/values.rs +++ b/automerge/src/values.rs @@ -52,3 +52,9 @@ impl<'a> Iterator for Values<'a> { self.range.next_value(self.doc) } } + +impl<'a> DoubleEndedIterator for Values<'a> { + fn next_back(&mut self) -> Option { + unimplemented!() + } +} From fb8f3e5d4eff60fe81d60898602ee3e0970b4835 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Wed, 4 May 2022 10:09:50 -0400 Subject: [PATCH 344/730] fixme: performance --- automerge/src/query/list_range.rs | 3 +++ automerge/src/query/map_range.rs | 3 +++ 2 files changed, 6 insertions(+) diff --git a/automerge/src/query/list_range.rs b/automerge/src/query/list_range.rs index e663b2e6..d3206af3 100644 --- a/automerge/src/query/list_range.rs +++ b/automerge/src/query/list_range.rs @@ -40,6 +40,9 @@ impl<'a, R: RangeBounds> ValueIter<'a> for ListRange<'a, R> { impl<'a, R: RangeBounds> Iterator for ListRange<'a, R> { type Item = (usize, Value<'a>, OpId); + // FIXME: this is fine if we're scanning everything (see values()) but could be much more efficient + // if we're scanning a narrow range on a large sequence ... we should be able to seek to the starting + // point and stop at the end point and not needless scan all the ops before and after the range fn next(&mut self) -> Option { for i in self.index..self.index_back { let op = self.root_child.get(i)?; diff --git a/automerge/src/query/map_range.rs b/automerge/src/query/map_range.rs index c5060e6a..8120dc55 100644 --- a/automerge/src/query/map_range.rs +++ b/automerge/src/query/map_range.rs @@ -42,6 +42,9 @@ impl<'a, R: RangeBounds> MapRange<'a, R> { impl<'a, R: RangeBounds> Iterator for MapRange<'a, R> { type Item = (&'a str, Value<'a>, OpId); + // FIXME: this is fine if we're scanning everything (see values()) but could be much more efficient + // if we're scanning a narrow range on a map with many keys... we should be able to seek to the starting + // point and stop at the end point and not needless scan all the ops before and after the range fn next(&mut self) -> Option { for i in self.index..self.index_back { let op = self.root_child.get(i)?; From 5b15a045164ae2fa76ef4cfb595389ce44cab727 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 5 May 2022 14:52:01 +0100 Subject: [PATCH 345/730] Some tidies --- automerge/src/automerge.rs | 2 +- automerge/src/op_set.rs | 4 ++-- automerge/src/op_tree.rs | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index f62ce9bb..48efcde2 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -322,7 +322,7 @@ impl Automerge { } } - /// Historical version of [`range`](Self::list_range_at). + /// Historical version of [`list_range`](Self::list_range). pub fn list_range_at, R: RangeBounds>( &self, obj: O, diff --git a/automerge/src/op_set.rs b/automerge/src/op_set.rs index c03870c5..e1fe7501 100644 --- a/automerge/src/op_set.rs +++ b/automerge/src/op_set.rs @@ -81,7 +81,7 @@ impl OpSetInternal { range: R, ) -> Option> { if let Some(tree) = self.trees.get(&obj) { - tree.internal.range(range, &self.m) + tree.internal.map_range(range, &self.m) } else { None } @@ -94,7 +94,7 @@ impl OpSetInternal { clock: Clock, ) -> Option> { if let Some(tree) = self.trees.get(&obj) { - tree.internal.range_at(range, &self.m, clock) + tree.internal.map_range_at(range, &self.m, clock) } else { None } diff --git a/automerge/src/op_tree.rs b/automerge/src/op_tree.rs index 2553c9e9..ea77c311 100644 --- a/automerge/src/op_tree.rs +++ b/automerge/src/op_tree.rs @@ -77,7 +77,7 @@ impl OpTreeInternal { .map(|root| query::KeysAt::new(root, clock)) } - pub(crate) fn range<'a, R: RangeBounds>( + pub(crate) fn map_range<'a, R: RangeBounds>( &'a self, range: R, meta: &'a OpSetMetadata, @@ -87,7 +87,7 @@ impl OpTreeInternal { .map(|node| query::MapRange::new(range, node, meta)) } - pub(crate) fn range_at<'a, R: RangeBounds>( + pub(crate) fn map_range_at<'a, R: RangeBounds>( &'a self, range: R, meta: &'a OpSetMetadata, From 7d5eaa0b7f3441d9c981dbd57894ef1f4c13d2c1 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 5 May 2022 14:58:22 +0100 Subject: [PATCH 346/730] Move automerge unit tests to new file for clarity --- automerge/src/automerge.rs | 1200 +----------------------------- automerge/src/automerge/tests.rs | 1191 +++++++++++++++++++++++++++++ 2 files changed, 1194 insertions(+), 1197 deletions(-) create mode 100644 automerge/src/automerge/tests.rs diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 48efcde2..f0963ec4 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -22,6 +22,9 @@ use crate::{ use crate::{AutomergeError, Change, Prop}; use serde::Serialize; +#[cfg(test)] +mod tests; + #[derive(Debug, Clone, PartialEq)] pub(crate) enum Actor { Unused(ActorId), @@ -1102,1200 +1105,3 @@ pub(crate) struct SpanInfo { pub(crate) span_type: String, pub(crate) value: ScalarValue, } - -#[cfg(test)] -mod tests { - use itertools::Itertools; - use pretty_assertions::assert_eq; - - use super::*; - use crate::op_tree::B; - use crate::transaction::Transactable; - use crate::*; - use std::convert::TryInto; - - #[test] - fn insert_op() -> Result<(), AutomergeError> { - let mut doc = Automerge::new(); - doc.set_actor(ActorId::random()); - let mut tx = doc.transaction(); - tx.put(ROOT, "hello", "world")?; - tx.get(ROOT, "hello")?; - tx.commit(); - Ok(()) - } - - #[test] - fn test_set() -> Result<(), AutomergeError> { - let mut doc = Automerge::new(); - let mut tx = doc.transaction(); - // setting a scalar value shouldn't return an opid as no object was created. - tx.put(ROOT, "a", 1)?; - - // setting the same value shouldn't return an opid as there is no change. - tx.put(ROOT, "a", 1)?; - - assert_eq!(tx.pending_ops(), 1); - - let map = tx.put_object(ROOT, "b", ObjType::Map)?; - // object already exists at b but setting a map again overwrites it so we get an opid. - tx.put(map, "a", 2)?; - - tx.put_object(ROOT, "b", ObjType::Map)?; - - assert_eq!(tx.pending_ops(), 4); - let map = tx.get(ROOT, "b").unwrap().unwrap().1; - assert_eq!(tx.get(&map, "a")?, None); - - tx.commit(); - Ok(()) - } - - #[test] - fn test_list() -> Result<(), AutomergeError> { - let mut doc = Automerge::new(); - doc.set_actor(ActorId::random()); - let mut tx = doc.transaction(); - let list_id = tx.put_object(ROOT, "items", ObjType::List)?; - tx.put(ROOT, "zzz", "zzzval")?; - assert!(tx.get(ROOT, "items")?.unwrap().1 == list_id); - tx.insert(&list_id, 0, "a")?; - tx.insert(&list_id, 0, "b")?; - tx.insert(&list_id, 2, "c")?; - tx.insert(&list_id, 1, "d")?; - assert!(tx.get(&list_id, 0)?.unwrap().0 == "b".into()); - assert!(tx.get(&list_id, 1)?.unwrap().0 == "d".into()); - assert!(tx.get(&list_id, 2)?.unwrap().0 == "a".into()); - assert!(tx.get(&list_id, 3)?.unwrap().0 == "c".into()); - assert!(tx.length(&list_id) == 4); - tx.commit(); - doc.save(); - Ok(()) - } - - #[test] - fn test_del() -> Result<(), AutomergeError> { - let mut doc = Automerge::new(); - doc.set_actor(ActorId::random()); - let mut tx = doc.transaction(); - tx.put(ROOT, "xxx", "xxx")?; - assert!(tx.get(ROOT, "xxx")?.is_some()); - tx.delete(ROOT, "xxx")?; - assert!(tx.get(ROOT, "xxx")?.is_none()); - tx.commit(); - Ok(()) - } - - #[test] - fn test_inc() -> Result<(), AutomergeError> { - let mut doc = Automerge::new(); - let mut tx = doc.transaction(); - tx.put(ROOT, "counter", ScalarValue::counter(10))?; - assert!(tx.get(ROOT, "counter")?.unwrap().0 == Value::counter(10)); - tx.increment(ROOT, "counter", 10)?; - assert!(tx.get(ROOT, "counter")?.unwrap().0 == Value::counter(20)); - tx.increment(ROOT, "counter", -5)?; - assert!(tx.get(ROOT, "counter")?.unwrap().0 == Value::counter(15)); - tx.commit(); - Ok(()) - } - - #[test] - fn test_save_incremental() -> Result<(), AutomergeError> { - let mut doc = Automerge::new(); - - let mut tx = doc.transaction(); - tx.put(ROOT, "foo", 1)?; - tx.commit(); - - let save1 = doc.save(); - - let mut tx = doc.transaction(); - tx.put(ROOT, "bar", 2)?; - tx.commit(); - - let save2 = doc.save_incremental(); - - let mut tx = doc.transaction(); - tx.put(ROOT, "baz", 3)?; - tx.commit(); - - let save3 = doc.save_incremental(); - - let mut save_a: Vec = vec![]; - save_a.extend(&save1); - save_a.extend(&save2); - save_a.extend(&save3); - - assert!(doc.save_incremental().is_empty()); - - let save_b = doc.save(); - - assert!(save_b.len() < save_a.len()); - - let mut doc_a = Automerge::load(&save_a)?; - let mut doc_b = Automerge::load(&save_b)?; - - assert!(doc_a.get_all(ROOT, "baz")? == doc_b.get_all(ROOT, "baz")?); - - assert!(doc_a.save() == doc_b.save()); - - Ok(()) - } - - #[test] - fn test_save_text() -> Result<(), AutomergeError> { - let mut doc = Automerge::new(); - let mut tx = doc.transaction(); - let text = tx.put_object(ROOT, "text", ObjType::Text)?; - tx.commit(); - let heads1 = doc.get_heads(); - let mut tx = doc.transaction(); - tx.splice_text(&text, 0, 0, "hello world")?; - tx.commit(); - let heads2 = doc.get_heads(); - let mut tx = doc.transaction(); - tx.splice_text(&text, 6, 0, "big bad ")?; - tx.commit(); - let heads3 = doc.get_heads(); - - assert!(&doc.text(&text)? == "hello big bad world"); - assert!(&doc.text_at(&text, &heads1)?.is_empty()); - assert!(&doc.text_at(&text, &heads2)? == "hello world"); - assert!(&doc.text_at(&text, &heads3)? == "hello big bad world"); - - Ok(()) - } - - #[test] - fn test_props_vals_at() -> Result<(), AutomergeError> { - let mut doc = Automerge::new(); - doc.set_actor("aaaa".try_into().unwrap()); - let mut tx = doc.transaction(); - tx.put(ROOT, "prop1", "val1")?; - tx.commit(); - doc.get_heads(); - let heads1 = doc.get_heads(); - let mut tx = doc.transaction(); - tx.put(ROOT, "prop1", "val2")?; - tx.commit(); - doc.get_heads(); - let heads2 = doc.get_heads(); - let mut tx = doc.transaction(); - tx.put(ROOT, "prop2", "val3")?; - tx.commit(); - doc.get_heads(); - let heads3 = doc.get_heads(); - let mut tx = doc.transaction(); - tx.delete(ROOT, "prop1")?; - tx.commit(); - doc.get_heads(); - let heads4 = doc.get_heads(); - let mut tx = doc.transaction(); - tx.put(ROOT, "prop3", "val4")?; - tx.commit(); - doc.get_heads(); - let heads5 = doc.get_heads(); - assert!(doc.keys_at(ROOT, &heads1).collect_vec() == vec!["prop1".to_owned()]); - assert_eq!(doc.length_at(ROOT, &heads1), 1); - assert!(doc.get_at(ROOT, "prop1", &heads1)?.unwrap().0 == Value::str("val1")); - assert!(doc.get_at(ROOT, "prop2", &heads1)? == None); - assert!(doc.get_at(ROOT, "prop3", &heads1)? == None); - - assert!(doc.keys_at(ROOT, &heads2).collect_vec() == vec!["prop1".to_owned()]); - assert_eq!(doc.length_at(ROOT, &heads2), 1); - assert!(doc.get_at(ROOT, "prop1", &heads2)?.unwrap().0 == Value::str("val2")); - assert!(doc.get_at(ROOT, "prop2", &heads2)? == None); - assert!(doc.get_at(ROOT, "prop3", &heads2)? == None); - - assert!( - doc.keys_at(ROOT, &heads3).collect_vec() - == vec!["prop1".to_owned(), "prop2".to_owned()] - ); - assert_eq!(doc.length_at(ROOT, &heads3), 2); - assert!(doc.get_at(ROOT, "prop1", &heads3)?.unwrap().0 == Value::str("val2")); - assert!(doc.get_at(ROOT, "prop2", &heads3)?.unwrap().0 == Value::str("val3")); - assert!(doc.get_at(ROOT, "prop3", &heads3)? == None); - - assert!(doc.keys_at(ROOT, &heads4).collect_vec() == vec!["prop2".to_owned()]); - assert_eq!(doc.length_at(ROOT, &heads4), 1); - assert!(doc.get_at(ROOT, "prop1", &heads4)? == None); - assert!(doc.get_at(ROOT, "prop2", &heads4)?.unwrap().0 == Value::str("val3")); - assert!(doc.get_at(ROOT, "prop3", &heads4)? == None); - - assert!( - doc.keys_at(ROOT, &heads5).collect_vec() - == vec!["prop2".to_owned(), "prop3".to_owned()] - ); - assert_eq!(doc.length_at(ROOT, &heads5), 2); - assert_eq!(doc.length(ROOT), 2); - assert!(doc.get_at(ROOT, "prop1", &heads5)? == None); - assert!(doc.get_at(ROOT, "prop2", &heads5)?.unwrap().0 == Value::str("val3")); - assert!(doc.get_at(ROOT, "prop3", &heads5)?.unwrap().0 == Value::str("val4")); - - assert_eq!(doc.keys_at(ROOT, &[]).count(), 0); - assert_eq!(doc.length_at(ROOT, &[]), 0); - assert!(doc.get_at(ROOT, "prop1", &[])? == None); - assert!(doc.get_at(ROOT, "prop2", &[])? == None); - assert!(doc.get_at(ROOT, "prop3", &[])? == None); - Ok(()) - } - - #[test] - fn test_len_at() -> Result<(), AutomergeError> { - let mut doc = Automerge::new(); - doc.set_actor("aaaa".try_into().unwrap()); - - let mut tx = doc.transaction(); - let list = tx.put_object(ROOT, "list", ObjType::List)?; - tx.commit(); - let heads1 = doc.get_heads(); - - let mut tx = doc.transaction(); - tx.insert(&list, 0, 10)?; - tx.commit(); - let heads2 = doc.get_heads(); - - let mut tx = doc.transaction(); - tx.put(&list, 0, 20)?; - tx.insert(&list, 0, 30)?; - tx.commit(); - let heads3 = doc.get_heads(); - - let mut tx = doc.transaction(); - tx.put(&list, 1, 40)?; - tx.insert(&list, 1, 50)?; - tx.commit(); - let heads4 = doc.get_heads(); - - let mut tx = doc.transaction(); - tx.delete(&list, 2)?; - tx.commit(); - let heads5 = doc.get_heads(); - - let mut tx = doc.transaction(); - tx.delete(&list, 0)?; - tx.commit(); - let heads6 = doc.get_heads(); - - assert!(doc.length_at(&list, &heads1) == 0); - assert!(doc.get_at(&list, 0, &heads1)?.is_none()); - - assert!(doc.length_at(&list, &heads2) == 1); - assert!(doc.get_at(&list, 0, &heads2)?.unwrap().0 == Value::int(10)); - - assert!(doc.length_at(&list, &heads3) == 2); - assert!(doc.get_at(&list, 0, &heads3)?.unwrap().0 == Value::int(30)); - assert!(doc.get_at(&list, 1, &heads3)?.unwrap().0 == Value::int(20)); - - assert!(doc.length_at(&list, &heads4) == 3); - assert!(doc.get_at(&list, 0, &heads4)?.unwrap().0 == Value::int(30)); - assert!(doc.get_at(&list, 1, &heads4)?.unwrap().0 == Value::int(50)); - assert!(doc.get_at(&list, 2, &heads4)?.unwrap().0 == Value::int(40)); - - assert!(doc.length_at(&list, &heads5) == 2); - assert!(doc.get_at(&list, 0, &heads5)?.unwrap().0 == Value::int(30)); - assert!(doc.get_at(&list, 1, &heads5)?.unwrap().0 == Value::int(50)); - - assert!(doc.length_at(&list, &heads6) == 1); - assert!(doc.length(&list) == 1); - assert!(doc.get_at(&list, 0, &heads6)?.unwrap().0 == Value::int(50)); - - Ok(()) - } - - #[test] - fn keys_iter_map() { - let mut doc = Automerge::new(); - let mut tx = doc.transaction(); - tx.put(ROOT, "a", 3).unwrap(); - tx.put(ROOT, "b", 4).unwrap(); - tx.put(ROOT, "c", 5).unwrap(); - tx.put(ROOT, "d", 6).unwrap(); - tx.commit(); - let mut tx = doc.transaction(); - tx.put(ROOT, "a", 7).unwrap(); - tx.commit(); - let mut tx = doc.transaction(); - tx.put(ROOT, "a", 8).unwrap(); - tx.put(ROOT, "d", 9).unwrap(); - tx.commit(); - assert_eq!(doc.keys(ROOT).count(), 4); - - let mut keys = doc.keys(ROOT); - assert_eq!(keys.next(), Some("a".into())); - assert_eq!(keys.next(), Some("b".into())); - assert_eq!(keys.next(), Some("c".into())); - assert_eq!(keys.next(), Some("d".into())); - assert_eq!(keys.next(), None); - - let mut keys = doc.keys(ROOT); - assert_eq!(keys.next_back(), Some("d".into())); - assert_eq!(keys.next_back(), Some("c".into())); - assert_eq!(keys.next_back(), Some("b".into())); - assert_eq!(keys.next_back(), Some("a".into())); - assert_eq!(keys.next_back(), None); - - let mut keys = doc.keys(ROOT); - assert_eq!(keys.next(), Some("a".into())); - assert_eq!(keys.next_back(), Some("d".into())); - assert_eq!(keys.next_back(), Some("c".into())); - assert_eq!(keys.next_back(), Some("b".into())); - assert_eq!(keys.next_back(), None); - - let mut keys = doc.keys(ROOT); - assert_eq!(keys.next_back(), Some("d".into())); - assert_eq!(keys.next(), Some("a".into())); - assert_eq!(keys.next(), Some("b".into())); - assert_eq!(keys.next(), Some("c".into())); - assert_eq!(keys.next(), None); - let keys = doc.keys(ROOT); - assert_eq!(keys.collect::>(), vec!["a", "b", "c", "d"]); - } - - #[test] - fn keys_iter_seq() { - let mut doc = Automerge::new(); - let mut tx = doc.transaction(); - let list = tx.put_object(ROOT, "list", ObjType::List).unwrap(); - tx.insert(&list, 0, 3).unwrap(); - tx.insert(&list, 1, 4).unwrap(); - tx.insert(&list, 2, 5).unwrap(); - tx.insert(&list, 3, 6).unwrap(); - tx.commit(); - let mut tx = doc.transaction(); - tx.put(&list, 0, 7).unwrap(); - tx.commit(); - let mut tx = doc.transaction(); - tx.put(&list, 0, 8).unwrap(); - tx.put(&list, 3, 9).unwrap(); - tx.commit(); - let actor = doc.get_actor(); - assert_eq!(doc.keys(&list).count(), 4); - - let mut keys = doc.keys(&list); - assert_eq!(keys.next(), Some(format!("2@{}", actor))); - assert_eq!(keys.next(), Some(format!("3@{}", actor))); - assert_eq!(keys.next(), Some(format!("4@{}", actor))); - assert_eq!(keys.next(), Some(format!("5@{}", actor))); - assert_eq!(keys.next(), None); - - let mut keys = doc.keys(&list); - assert_eq!(keys.next_back(), Some(format!("5@{}", actor))); - assert_eq!(keys.next_back(), Some(format!("4@{}", actor))); - assert_eq!(keys.next_back(), Some(format!("3@{}", actor))); - assert_eq!(keys.next_back(), Some(format!("2@{}", actor))); - assert_eq!(keys.next_back(), None); - - let mut keys = doc.keys(&list); - assert_eq!(keys.next(), Some(format!("2@{}", actor))); - assert_eq!(keys.next_back(), Some(format!("5@{}", actor))); - assert_eq!(keys.next_back(), Some(format!("4@{}", actor))); - assert_eq!(keys.next_back(), Some(format!("3@{}", actor))); - assert_eq!(keys.next_back(), None); - - let mut keys = doc.keys(&list); - assert_eq!(keys.next_back(), Some(format!("5@{}", actor))); - assert_eq!(keys.next(), Some(format!("2@{}", actor))); - assert_eq!(keys.next(), Some(format!("3@{}", actor))); - assert_eq!(keys.next(), Some(format!("4@{}", actor))); - assert_eq!(keys.next(), None); - - let keys = doc.keys(&list); - assert_eq!( - keys.collect::>(), - vec![ - format!("2@{}", actor), - format!("3@{}", actor), - format!("4@{}", actor), - format!("5@{}", actor) - ] - ); - } - - #[test] - fn range_iter_map() { - let mut doc = Automerge::new(); - let mut tx = doc.transaction(); - tx.put(ROOT, "a", 3).unwrap(); - tx.put(ROOT, "b", 4).unwrap(); - tx.put(ROOT, "c", 5).unwrap(); - tx.put(ROOT, "d", 6).unwrap(); - tx.commit(); - let mut tx = doc.transaction(); - tx.put(ROOT, "a", 7).unwrap(); - tx.commit(); - let mut tx = doc.transaction(); - tx.put(ROOT, "a", 8).unwrap(); - tx.put(ROOT, "d", 9).unwrap(); - tx.commit(); - let actor = doc.get_actor(); - assert_eq!(doc.map_range(ROOT, ..).count(), 4); - - let mut range = doc.map_range(ROOT, "b".to_owned().."d".into()); - assert_eq!( - range.next(), - Some(("b", 4.into(), ExId::Id(2, actor.clone(), 0))) - ); - assert_eq!( - range.next(), - Some(("c", 5.into(), ExId::Id(3, actor.clone(), 0))) - ); - assert_eq!(range.next(), None); - - let mut range = doc.map_range(ROOT, "b".to_owned()..="d".into()); - assert_eq!( - range.next(), - Some(("b", 4.into(), ExId::Id(2, actor.clone(), 0))) - ); - assert_eq!( - range.next(), - Some(("c", 5.into(), ExId::Id(3, actor.clone(), 0))) - ); - assert_eq!( - range.next(), - Some(("d", 9.into(), ExId::Id(7, actor.clone(), 0))) - ); - assert_eq!(range.next(), None); - - let mut range = doc.map_range(ROOT, ..="c".to_owned()); - assert_eq!( - range.next(), - Some(("a", 8.into(), ExId::Id(6, actor.clone(), 0))) - ); - assert_eq!( - range.next(), - Some(("b", 4.into(), ExId::Id(2, actor.clone(), 0))) - ); - assert_eq!( - range.next(), - Some(("c", 5.into(), ExId::Id(3, actor.clone(), 0))) - ); - assert_eq!(range.next(), None); - - let range = doc.map_range(ROOT, "a".to_owned()..); - assert_eq!( - range.collect::>(), - vec![ - ("a", 8.into(), ExId::Id(6, actor.clone(), 0)), - ("b", 4.into(), ExId::Id(2, actor.clone(), 0)), - ("c", 5.into(), ExId::Id(3, actor.clone(), 0)), - ("d", 9.into(), ExId::Id(7, actor.clone(), 0)), - ] - ); - } - - #[test] - fn insert_at_index() { - let mut doc = AutoCommit::new(); - - let list = &doc.put_object(ROOT, "list", ObjType::List).unwrap(); - doc.insert(list, 0, 0).unwrap(); - doc.insert(list, 0, 1).unwrap(); // both inserts at the same index - - assert_eq!(doc.length(list), 2); - assert_eq!(doc.keys(list).count(), 2); - assert_eq!(doc.list_range(list, ..).count(), 2); - } - - #[test] - fn get_list_values() -> Result<(), AutomergeError> { - let mut doc1 = Automerge::new(); - let mut tx = doc1.transaction(); - let list = tx.put_object(ROOT, "list", ObjType::List)?; - - // insert elements - tx.insert(&list, 0, "First")?; - tx.insert(&list, 1, "Second")?; - tx.insert(&list, 2, "Third")?; - tx.insert(&list, 3, "Forth")?; - tx.insert(&list, 4, "Fith")?; - tx.insert(&list, 5, "Sixth")?; - tx.insert(&list, 6, "Seventh")?; - tx.insert(&list, 7, "Eights")?; - tx.commit(); - - let v1 = doc1.get_heads(); - let mut doc2 = doc1.fork(); - - let mut tx = doc1.transaction(); - tx.put(&list, 2, "Third V2")?; - tx.commit(); - - let mut tx = doc2.transaction(); - tx.put(&list, 2, "Third V3")?; - tx.commit(); - - doc1.merge(&mut doc2)?; - - assert_eq!(doc1.list_range(&list, ..).count(), 8); - - for (i, val1, id) in doc1.list_range(&list, ..) { - let val2 = doc1.get(&list, i)?; - assert_eq!(Some((val1, id)), val2); - } - - assert_eq!(doc1.list_range(&list, 3..6).count(), 3); - assert_eq!(doc1.list_range(&list, 3..6).next().unwrap().0, 3); - assert_eq!(doc1.list_range(&list, 3..6).last().unwrap().0, 5); - - for (i, val1, id) in doc1.list_range(&list, 3..6) { - let val2 = doc1.get(&list, i)?; - assert_eq!(Some((val1, id)), val2); - } - - assert_eq!(doc1.list_range_at(&list, .., &v1).count(), 8); - for (i, val1, id) in doc1.list_range_at(&list, .., &v1) { - let val2 = doc1.get_at(&list, i, &v1)?; - assert_eq!(Some((val1, id)), val2); - } - - assert_eq!(doc1.list_range_at(&list, 3..6, &v1).count(), 3); - assert_eq!(doc1.list_range_at(&list, 3..6, &v1).next().unwrap().0, 3); - assert_eq!(doc1.list_range_at(&list, 3..6, &v1).last().unwrap().0, 5); - - for (i, val1, id) in doc1.list_range_at(&list, 3..6, &v1) { - let val2 = doc1.get_at(&list, i, &v1)?; - assert_eq!(Some((val1, id)), val2); - } - - let range: Vec<_> = doc1 - .list_range(&list, ..) - .map(|(_, val, id)| (val, id)) - .collect(); - let values = doc1.values(&list); - let values: Vec<_> = values.collect(); - assert_eq!(range, values); - - let range: Vec<_> = doc1 - .list_range_at(&list, .., &v1) - .map(|(_, val, id)| (val, id)) - .collect(); - let values: Vec<_> = doc1.values_at(&list, &v1).collect(); - assert_eq!(range, values); - - Ok(()) - } - - #[test] - fn get_range_values() -> Result<(), AutomergeError> { - let mut doc1 = Automerge::new(); - let mut tx = doc1.transaction(); - tx.put(ROOT, "aa", "aaa")?; - tx.put(ROOT, "bb", "bbb")?; - tx.put(ROOT, "cc", "ccc")?; - tx.put(ROOT, "dd", "ddd")?; - tx.commit(); - - let v1 = doc1.get_heads(); - let mut doc2 = doc1.fork(); - - let mut tx = doc1.transaction(); - tx.put(ROOT, "cc", "ccc V2")?; - tx.commit(); - - let mut tx = doc2.transaction(); - tx.put(ROOT, "cc", "ccc V3")?; - tx.commit(); - - doc1.merge(&mut doc2)?; - - let range = "b".to_string().."d".to_string(); - - assert_eq!(doc1.map_range(ROOT, range.clone()).count(), 2); - - for (key, val1, id) in doc1.map_range(ROOT, range.clone()) { - let val2 = doc1.get(ROOT, key)?; - assert_eq!(Some((val1, id)), val2); - } - - assert_eq!(doc1.map_range(ROOT, range.clone()).rev().count(), 2); - - for (key, val1, id) in doc1.map_range(ROOT, range.clone()).rev() { - let val2 = doc1.get(ROOT, key)?; - assert_eq!(Some((val1, id)), val2); - } - - assert_eq!(doc1.map_range_at(ROOT, range.clone(), &v1).count(), 2); - - for (key, val1, id) in doc1.map_range_at(ROOT, range.clone(), &v1) { - let val2 = doc1.get_at(ROOT, key, &v1)?; - assert_eq!(Some((val1, id)), val2); - } - - assert_eq!(doc1.map_range_at(ROOT, range.clone(), &v1).rev().count(), 2); - - for (key, val1, id) in doc1.map_range_at(ROOT, range, &v1).rev() { - let val2 = doc1.get_at(ROOT, key, &v1)?; - assert_eq!(Some((val1, id)), val2); - } - - let range: Vec<_> = doc1 - .map_range(ROOT, ..) - .map(|(_, val, id)| (val, id)) - .collect(); - let values: Vec<_> = doc1.values(ROOT).collect(); - assert_eq!(range, values); - - let range: Vec<_> = doc1 - .map_range_at(ROOT, .., &v1) - .map(|(_, val, id)| (val, id)) - .collect(); - let values: Vec<_> = doc1.values_at(ROOT, &v1).collect(); - assert_eq!(range, values); - - Ok(()) - } - - #[test] - fn range_iter_map_rev() { - let mut doc = Automerge::new(); - let mut tx = doc.transaction(); - tx.put(ROOT, "a", 3).unwrap(); - tx.put(ROOT, "b", 4).unwrap(); - tx.put(ROOT, "c", 5).unwrap(); - tx.put(ROOT, "d", 6).unwrap(); - tx.commit(); - let mut tx = doc.transaction(); - tx.put(ROOT, "a", 7).unwrap(); - tx.commit(); - let mut tx = doc.transaction(); - tx.put(ROOT, "a", 8).unwrap(); - tx.put(ROOT, "d", 9).unwrap(); - tx.commit(); - let actor = doc.get_actor(); - assert_eq!(doc.map_range(ROOT, ..).rev().count(), 4); - - let mut range = doc.map_range(ROOT, "b".to_owned().."d".into()).rev(); - assert_eq!( - range.next(), - Some(("c", 5.into(), ExId::Id(3, actor.clone(), 0))) - ); - assert_eq!( - range.next(), - Some(("b", 4.into(), ExId::Id(2, actor.clone(), 0))) - ); - assert_eq!(range.next(), None); - - let mut range = doc.map_range(ROOT, "b".to_owned()..="d".into()).rev(); - assert_eq!( - range.next(), - Some(("d", 9.into(), ExId::Id(7, actor.clone(), 0))) - ); - assert_eq!( - range.next(), - Some(("c", 5.into(), ExId::Id(3, actor.clone(), 0))) - ); - assert_eq!( - range.next(), - Some(("b", 4.into(), ExId::Id(2, actor.clone(), 0))) - ); - assert_eq!(range.next(), None); - - let mut range = doc.map_range(ROOT, ..="c".to_owned()).rev(); - assert_eq!( - range.next(), - Some(("c", 5.into(), ExId::Id(3, actor.clone(), 0))) - ); - assert_eq!( - range.next(), - Some(("b", 4.into(), ExId::Id(2, actor.clone(), 0))) - ); - assert_eq!( - range.next(), - Some(("a", 8.into(), ExId::Id(6, actor.clone(), 0))) - ); - assert_eq!(range.next(), None); - - let range = doc.map_range(ROOT, "a".to_owned()..).rev(); - assert_eq!( - range.collect::>(), - vec![ - ("d", 9.into(), ExId::Id(7, actor.clone(), 0)), - ("c", 5.into(), ExId::Id(3, actor.clone(), 0)), - ("b", 4.into(), ExId::Id(2, actor.clone(), 0)), - ("a", 8.into(), ExId::Id(6, actor.clone(), 0)), - ] - ); - } - - #[test] - fn rolling_back_transaction_has_no_effect() { - let mut doc = Automerge::new(); - let old_states = doc.states.clone(); - let bytes = doc.save(); - let tx = doc.transaction(); - tx.rollback(); - let new_states = doc.states.clone(); - assert_eq!(old_states, new_states); - let new_bytes = doc.save(); - assert_eq!(bytes, new_bytes); - } - - #[test] - fn mutate_old_objects() { - let mut doc = Automerge::new(); - let mut tx = doc.transaction(); - // create a map - let map1 = tx.put_object(ROOT, "a", ObjType::Map).unwrap(); - tx.put(&map1, "b", 1).unwrap(); - // overwrite the first map with a new one - let map2 = tx.put_object(ROOT, "a", ObjType::Map).unwrap(); - tx.put(&map2, "c", 2).unwrap(); - tx.commit(); - - // we can get the new map by traversing the tree - let map = doc.get(&ROOT, "a").unwrap().unwrap().1; - assert_eq!(doc.get(&map, "b").unwrap(), None); - // and get values from it - assert_eq!( - doc.get(&map, "c").unwrap().map(|s| s.0), - Some(ScalarValue::Int(2).into()) - ); - - // but we can still access the old one if we know the ID! - assert_eq!(doc.get(&map1, "b").unwrap().unwrap().0, Value::int(1)); - // and even set new things in it! - let mut tx = doc.transaction(); - tx.put(&map1, "c", 3).unwrap(); - tx.commit(); - - assert_eq!(doc.get(&map1, "c").unwrap().unwrap().0, Value::int(3)); - } - - #[test] - fn delete_nothing_in_map_is_noop() { - let mut doc = Automerge::new(); - let mut tx = doc.transaction(); - // deleting a missing key in a map should just be a noop - assert!(tx.delete(ROOT, "a",).is_ok()); - tx.commit(); - let last_change = doc.get_last_local_change().unwrap(); - assert_eq!(last_change.len(), 0); - - let bytes = doc.save(); - assert!(Automerge::load(&bytes,).is_ok()); - - let mut tx = doc.transaction(); - tx.put(ROOT, "a", 1).unwrap(); - tx.commit(); - let last_change = doc.get_last_local_change().unwrap(); - assert_eq!(last_change.len(), 1); - - let mut tx = doc.transaction(); - // a real op - tx.delete(ROOT, "a").unwrap(); - // a no-op - tx.delete(ROOT, "a").unwrap(); - tx.commit(); - let last_change = doc.get_last_local_change().unwrap(); - assert_eq!(last_change.len(), 1); - } - - #[test] - fn delete_nothing_in_list_returns_error() { - let mut doc = Automerge::new(); - let mut tx = doc.transaction(); - // deleting an element in a list that does not exist is an error - assert!(tx.delete(ROOT, 0,).is_err()); - } - - #[test] - fn loaded_doc_changes_have_hash() { - let mut doc = Automerge::new(); - let mut tx = doc.transaction(); - tx.put(ROOT, "a", 1).unwrap(); - tx.commit(); - let hash = doc.get_last_local_change().unwrap().hash; - let bytes = doc.save(); - let doc = Automerge::load(&bytes).unwrap(); - assert_eq!(doc.get_change_by_hash(&hash).unwrap().hash, hash); - } - - #[test] - fn load_change_with_zero_start_op() { - let bytes = &[ - 133, 111, 74, 131, 202, 50, 52, 158, 2, 96, 163, 163, 83, 255, 255, 255, 50, 50, 50, - 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 255, 255, 245, 53, 1, 0, 0, 0, 0, 0, 0, 4, - 233, 245, 239, 255, 1, 0, 0, 0, 133, 111, 74, 131, 163, 96, 0, 0, 2, 10, 202, 144, 125, - 19, 48, 89, 133, 49, 10, 10, 67, 91, 111, 10, 74, 131, 96, 0, 163, 131, 255, 255, 255, - 255, 255, 255, 255, 255, 255, 1, 153, 0, 0, 246, 255, 255, 255, 157, 157, 157, 157, - 157, 157, 157, 157, 157, 157, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, - 255, 255, 255, 255, 255, 255, 255, 48, 254, 208, - ]; - let _ = Automerge::load(bytes); - } - - #[test] - fn load_broken_list() { - enum Action { - InsertText(usize, char), - DelText(usize), - } - use Action::*; - let actions = [ - InsertText(0, 'a'), - InsertText(0, 'b'), - DelText(1), - InsertText(0, 'c'), - DelText(1), - DelText(0), - InsertText(0, 'd'), - InsertText(0, 'e'), - InsertText(1, 'f'), - DelText(2), - DelText(1), - InsertText(0, 'g'), - DelText(1), - DelText(0), - InsertText(0, 'h'), - InsertText(1, 'i'), - DelText(1), - DelText(0), - InsertText(0, 'j'), - InsertText(0, 'k'), - DelText(1), - DelText(0), - InsertText(0, 'l'), - DelText(0), - InsertText(0, 'm'), - InsertText(0, 'n'), - DelText(1), - DelText(0), - InsertText(0, 'o'), - DelText(0), - InsertText(0, 'p'), - InsertText(1, 'q'), - InsertText(1, 'r'), - InsertText(1, 's'), - InsertText(3, 't'), - InsertText(5, 'u'), - InsertText(0, 'v'), - InsertText(3, 'w'), - InsertText(4, 'x'), - InsertText(0, 'y'), - InsertText(6, 'z'), - InsertText(11, '1'), - InsertText(0, '2'), - InsertText(0, '3'), - InsertText(0, '4'), - InsertText(13, '5'), - InsertText(11, '6'), - InsertText(17, '7'), - ]; - let mut doc = Automerge::new(); - let mut tx = doc.transaction(); - let list = tx.put_object(ROOT, "list", ObjType::List).unwrap(); - for action in actions { - match action { - Action::InsertText(index, c) => { - println!("inserting {} at {}", c, index); - tx.insert(&list, index, c).unwrap(); - } - Action::DelText(index) => { - println!("deleting at {} ", index); - tx.delete(&list, index).unwrap(); - } - } - } - tx.commit(); - let bytes = doc.save(); - println!("doc2 time"); - let mut doc2 = Automerge::load(&bytes).unwrap(); - let bytes2 = doc2.save(); - assert_eq!(doc.text(&list).unwrap(), doc2.text(&list).unwrap()); - - assert_eq!(doc.queue, doc2.queue); - assert_eq!(doc.history, doc2.history); - assert_eq!(doc.history_index, doc2.history_index); - assert_eq!(doc.states, doc2.states); - assert_eq!(doc.deps, doc2.deps); - assert_eq!(doc.saved, doc2.saved); - assert_eq!(doc.ops, doc2.ops); - assert_eq!(doc.max_op, doc2.max_op); - - assert_eq!(bytes, bytes2); - } - - #[test] - fn load_broken_list_short() { - // breaks when the B constant in OpSet is 3 - enum Action { - InsertText(usize, char), - DelText(usize), - } - use Action::*; - let actions = [ - InsertText(0, 'a'), - InsertText(1, 'b'), - DelText(1), - InsertText(1, 'c'), - InsertText(2, 'd'), - InsertText(2, 'e'), - InsertText(0, 'f'), - DelText(4), - InsertText(4, 'g'), - ]; - let mut doc = Automerge::new(); - let mut tx = doc.transaction(); - let list = tx.put_object(ROOT, "list", ObjType::List).unwrap(); - for action in actions { - match action { - Action::InsertText(index, c) => { - println!("inserting {} at {}", c, index); - tx.insert(&list, index, c).unwrap(); - } - Action::DelText(index) => { - println!("deleting at {} ", index); - tx.delete(&list, index).unwrap(); - } - } - } - tx.commit(); - let bytes = doc.save(); - println!("doc2 time"); - let mut doc2 = Automerge::load(&bytes).unwrap(); - let bytes2 = doc2.save(); - assert_eq!(doc.text(&list).unwrap(), doc2.text(&list).unwrap()); - - assert_eq!(doc.queue, doc2.queue); - assert_eq!(doc.history, doc2.history); - assert_eq!(doc.history_index, doc2.history_index); - assert_eq!(doc.states, doc2.states); - assert_eq!(doc.deps, doc2.deps); - assert_eq!(doc.saved, doc2.saved); - assert_eq!(doc.ops, doc2.ops); - assert_eq!(doc.max_op, doc2.max_op); - - assert_eq!(bytes, bytes2); - } - - #[test] - fn compute_list_indexes_correctly_when_list_element_is_split_across_tree_nodes() { - let max = B as u64 * 2; - let actor1 = ActorId::from(b"aaaa"); - let mut doc1 = AutoCommit::new().with_actor(actor1.clone()); - let actor2 = ActorId::from(b"bbbb"); - let mut doc2 = AutoCommit::new().with_actor(actor2.clone()); - let list = doc1.put_object(ROOT, "list", ObjType::List).unwrap(); - doc1.insert(&list, 0, 0).unwrap(); - doc2.load_incremental(&doc1.save_incremental()).unwrap(); - for i in 1..=max { - doc1.put(&list, 0, i).unwrap() - } - for i in 1..=max { - doc2.put(&list, 0, i).unwrap() - } - let change1 = doc1.save_incremental(); - let change2 = doc2.save_incremental(); - doc2.load_incremental(&change1).unwrap(); - doc1.load_incremental(&change2).unwrap(); - assert_eq!(doc1.length(&list), 1); - assert_eq!(doc2.length(&list), 1); - assert_eq!( - doc1.get_all(&list, 0).unwrap(), - vec![ - (max.into(), ExId::Id(max + 2, actor1.clone(), 0)), - (max.into(), ExId::Id(max + 2, actor2.clone(), 1)) - ] - ); - assert_eq!( - doc2.get_all(&list, 0).unwrap(), - vec![ - (max.into(), ExId::Id(max + 2, actor1, 0)), - (max.into(), ExId::Id(max + 2, actor2, 1)) - ] - ); - assert!(doc1.get(&list, 1).unwrap().is_none()); - assert!(doc2.get(&list, 1).unwrap().is_none()); - } - - #[test] - fn get_parent_objects() { - let mut doc = AutoCommit::new(); - let map = doc.put_object(ROOT, "a", ObjType::Map).unwrap(); - let list = doc.insert_object(&map, 0, ObjType::List).unwrap(); - doc.insert(&list, 0, 2).unwrap(); - let text = doc.put_object(&list, 0, ObjType::Text).unwrap(); - - assert_eq!(doc.parent_object(&map), Some((ROOT, Prop::Map("a".into())))); - assert_eq!(doc.parent_object(&list), Some((map, Prop::Seq(0)))); - assert_eq!(doc.parent_object(&text), Some((list, Prop::Seq(0)))); - } - - #[test] - fn get_path_to_object() { - let mut doc = AutoCommit::new(); - let map = doc.put_object(ROOT, "a", ObjType::Map).unwrap(); - let list = doc.insert_object(&map, 0, ObjType::List).unwrap(); - doc.insert(&list, 0, 2).unwrap(); - let text = doc.put_object(&list, 0, ObjType::Text).unwrap(); - - assert_eq!( - doc.path_to_object(&map), - vec![(ROOT, Prop::Map("a".into()))] - ); - assert_eq!( - doc.path_to_object(&list), - vec![(ROOT, Prop::Map("a".into())), (map.clone(), Prop::Seq(0)),] - ); - assert_eq!( - doc.path_to_object(&text), - vec![ - (ROOT, Prop::Map("a".into())), - (map, Prop::Seq(0)), - (list, Prop::Seq(0)), - ] - ); - } - - #[test] - fn parents_iterator() { - let mut doc = AutoCommit::new(); - let map = doc.put_object(ROOT, "a", ObjType::Map).unwrap(); - let list = doc.insert_object(&map, 0, ObjType::List).unwrap(); - doc.insert(&list, 0, 2).unwrap(); - let text = doc.put_object(&list, 0, ObjType::Text).unwrap(); - - let mut parents = doc.parents(text); - assert_eq!(parents.next(), Some((list, Prop::Seq(0)))); - assert_eq!(parents.next(), Some((map, Prop::Seq(0)))); - assert_eq!(parents.next(), Some((ROOT, Prop::Map("a".into())))); - assert_eq!(parents.next(), None); - } - - #[test] - fn can_insert_a_grapheme_into_text() { - let mut doc = Automerge::new(); - let mut tx = doc.transaction(); - let text = tx.put_object(ROOT, "text", ObjType::Text).unwrap(); - let polar_bear = "🐻‍❄️"; - tx.insert(&text, 0, polar_bear).unwrap(); - tx.commit(); - let s = doc.text(&text).unwrap(); - assert_eq!(s, polar_bear); - let len = doc.length(&text); - assert_eq!(len, 1); // just one grapheme - } - - #[test] - fn can_insert_long_string_into_text() { - let mut doc = Automerge::new(); - let mut tx = doc.transaction(); - let text = tx.put_object(ROOT, "text", ObjType::Text).unwrap(); - let polar_bear = "🐻‍❄️"; - let polar_bear_army = polar_bear.repeat(100); - tx.insert(&text, 0, &polar_bear_army).unwrap(); - tx.commit(); - let s = doc.text(&text).unwrap(); - assert_eq!(s, polar_bear_army); - let len = doc.length(&text); - assert_eq!(len, 1); // many graphemes - } - - #[test] - fn splice_text_uses_unicode_scalars() { - let mut doc = Automerge::new(); - let mut tx = doc.transaction(); - let text = tx.put_object(ROOT, "text", ObjType::Text).unwrap(); - let polar_bear = "🐻‍❄️"; - tx.splice_text(&text, 0, 0, polar_bear).unwrap(); - tx.commit(); - let s = doc.text(&text).unwrap(); - assert_eq!(s, polar_bear); - let len = doc.length(&text); - assert_eq!(len, 4); // 4 chars - } - - #[test] - fn observe_counter_change_application_overwrite() { - let mut doc1 = AutoCommit::new(); - doc1.set_actor(ActorId::from([1])); - doc1.put(ROOT, "counter", ScalarValue::counter(1)).unwrap(); - doc1.commit(); - - let mut doc2 = doc1.fork(); - doc2.set_actor(ActorId::from([2])); - doc2.put(ROOT, "counter", "mystring").unwrap(); - doc2.commit(); - - doc1.increment(ROOT, "counter", 2).unwrap(); - doc1.commit(); - doc1.increment(ROOT, "counter", 5).unwrap(); - doc1.commit(); - - let mut observer = VecOpObserver::default(); - let mut doc3 = doc1.clone(); - doc3.merge_with( - &mut doc2, - ApplyOptions::default().with_op_observer(&mut observer), - ) - .unwrap(); - - assert_eq!( - observer.take_patches(), - vec![Patch::Put { - obj: ExId::Root, - key: Prop::Map("counter".into()), - value: ( - ScalarValue::Str("mystring".into()).into(), - ExId::Id(2, doc2.get_actor().clone(), 1) - ), - conflict: false - }] - ); - - let mut observer = VecOpObserver::default(); - let mut doc4 = doc2.clone(); - doc4.merge_with( - &mut doc1, - ApplyOptions::default().with_op_observer(&mut observer), - ) - .unwrap(); - - // no patches as the increments operate on an invisible counter - assert_eq!(observer.take_patches(), vec![]); - } - - #[test] - fn observe_counter_change_application() { - let mut doc = AutoCommit::new(); - doc.put(ROOT, "counter", ScalarValue::counter(1)).unwrap(); - doc.increment(ROOT, "counter", 2).unwrap(); - doc.increment(ROOT, "counter", 5).unwrap(); - let changes = doc.get_changes(&[]).into_iter().cloned().collect(); - - let mut new_doc = AutoCommit::new(); - let mut observer = VecOpObserver::default(); - new_doc - .apply_changes_with( - changes, - ApplyOptions::default().with_op_observer(&mut observer), - ) - .unwrap(); - assert_eq!( - observer.take_patches(), - vec![ - Patch::Put { - obj: ExId::Root, - key: Prop::Map("counter".into()), - value: ( - ScalarValue::counter(1).into(), - ExId::Id(1, doc.get_actor().clone(), 0) - ), - conflict: false - }, - Patch::Increment { - obj: ExId::Root, - key: Prop::Map("counter".into()), - value: (2, ExId::Id(2, doc.get_actor().clone(), 0)), - }, - Patch::Increment { - obj: ExId::Root, - key: Prop::Map("counter".into()), - value: (5, ExId::Id(3, doc.get_actor().clone(), 0)), - } - ] - ); - } -} diff --git a/automerge/src/automerge/tests.rs b/automerge/src/automerge/tests.rs new file mode 100644 index 00000000..ff8a554f --- /dev/null +++ b/automerge/src/automerge/tests.rs @@ -0,0 +1,1191 @@ +use itertools::Itertools; +use pretty_assertions::assert_eq; + +use super::*; +use crate::op_tree::B; +use crate::transaction::Transactable; +use crate::*; +use std::convert::TryInto; + +#[test] +fn insert_op() -> Result<(), AutomergeError> { + let mut doc = Automerge::new(); + doc.set_actor(ActorId::random()); + let mut tx = doc.transaction(); + tx.put(ROOT, "hello", "world")?; + tx.get(ROOT, "hello")?; + tx.commit(); + Ok(()) +} + +#[test] +fn test_set() -> Result<(), AutomergeError> { + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + // setting a scalar value shouldn't return an opid as no object was created. + tx.put(ROOT, "a", 1)?; + + // setting the same value shouldn't return an opid as there is no change. + tx.put(ROOT, "a", 1)?; + + assert_eq!(tx.pending_ops(), 1); + + let map = tx.put_object(ROOT, "b", ObjType::Map)?; + // object already exists at b but setting a map again overwrites it so we get an opid. + tx.put(map, "a", 2)?; + + tx.put_object(ROOT, "b", ObjType::Map)?; + + assert_eq!(tx.pending_ops(), 4); + let map = tx.get(ROOT, "b").unwrap().unwrap().1; + assert_eq!(tx.get(&map, "a")?, None); + + tx.commit(); + Ok(()) +} + +#[test] +fn test_list() -> Result<(), AutomergeError> { + let mut doc = Automerge::new(); + doc.set_actor(ActorId::random()); + let mut tx = doc.transaction(); + let list_id = tx.put_object(ROOT, "items", ObjType::List)?; + tx.put(ROOT, "zzz", "zzzval")?; + assert!(tx.get(ROOT, "items")?.unwrap().1 == list_id); + tx.insert(&list_id, 0, "a")?; + tx.insert(&list_id, 0, "b")?; + tx.insert(&list_id, 2, "c")?; + tx.insert(&list_id, 1, "d")?; + assert!(tx.get(&list_id, 0)?.unwrap().0 == "b".into()); + assert!(tx.get(&list_id, 1)?.unwrap().0 == "d".into()); + assert!(tx.get(&list_id, 2)?.unwrap().0 == "a".into()); + assert!(tx.get(&list_id, 3)?.unwrap().0 == "c".into()); + assert!(tx.length(&list_id) == 4); + tx.commit(); + doc.save(); + Ok(()) +} + +#[test] +fn test_del() -> Result<(), AutomergeError> { + let mut doc = Automerge::new(); + doc.set_actor(ActorId::random()); + let mut tx = doc.transaction(); + tx.put(ROOT, "xxx", "xxx")?; + assert!(tx.get(ROOT, "xxx")?.is_some()); + tx.delete(ROOT, "xxx")?; + assert!(tx.get(ROOT, "xxx")?.is_none()); + tx.commit(); + Ok(()) +} + +#[test] +fn test_inc() -> Result<(), AutomergeError> { + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + tx.put(ROOT, "counter", ScalarValue::counter(10))?; + assert!(tx.get(ROOT, "counter")?.unwrap().0 == Value::counter(10)); + tx.increment(ROOT, "counter", 10)?; + assert!(tx.get(ROOT, "counter")?.unwrap().0 == Value::counter(20)); + tx.increment(ROOT, "counter", -5)?; + assert!(tx.get(ROOT, "counter")?.unwrap().0 == Value::counter(15)); + tx.commit(); + Ok(()) +} + +#[test] +fn test_save_incremental() -> Result<(), AutomergeError> { + let mut doc = Automerge::new(); + + let mut tx = doc.transaction(); + tx.put(ROOT, "foo", 1)?; + tx.commit(); + + let save1 = doc.save(); + + let mut tx = doc.transaction(); + tx.put(ROOT, "bar", 2)?; + tx.commit(); + + let save2 = doc.save_incremental(); + + let mut tx = doc.transaction(); + tx.put(ROOT, "baz", 3)?; + tx.commit(); + + let save3 = doc.save_incremental(); + + let mut save_a: Vec = vec![]; + save_a.extend(&save1); + save_a.extend(&save2); + save_a.extend(&save3); + + assert!(doc.save_incremental().is_empty()); + + let save_b = doc.save(); + + assert!(save_b.len() < save_a.len()); + + let mut doc_a = Automerge::load(&save_a)?; + let mut doc_b = Automerge::load(&save_b)?; + + assert!(doc_a.get_all(ROOT, "baz")? == doc_b.get_all(ROOT, "baz")?); + + assert!(doc_a.save() == doc_b.save()); + + Ok(()) +} + +#[test] +fn test_save_text() -> Result<(), AutomergeError> { + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + let text = tx.put_object(ROOT, "text", ObjType::Text)?; + tx.commit(); + let heads1 = doc.get_heads(); + let mut tx = doc.transaction(); + tx.splice_text(&text, 0, 0, "hello world")?; + tx.commit(); + let heads2 = doc.get_heads(); + let mut tx = doc.transaction(); + tx.splice_text(&text, 6, 0, "big bad ")?; + tx.commit(); + let heads3 = doc.get_heads(); + + assert!(&doc.text(&text)? == "hello big bad world"); + assert!(&doc.text_at(&text, &heads1)?.is_empty()); + assert!(&doc.text_at(&text, &heads2)? == "hello world"); + assert!(&doc.text_at(&text, &heads3)? == "hello big bad world"); + + Ok(()) +} + +#[test] +fn test_props_vals_at() -> Result<(), AutomergeError> { + let mut doc = Automerge::new(); + doc.set_actor("aaaa".try_into().unwrap()); + let mut tx = doc.transaction(); + tx.put(ROOT, "prop1", "val1")?; + tx.commit(); + doc.get_heads(); + let heads1 = doc.get_heads(); + let mut tx = doc.transaction(); + tx.put(ROOT, "prop1", "val2")?; + tx.commit(); + doc.get_heads(); + let heads2 = doc.get_heads(); + let mut tx = doc.transaction(); + tx.put(ROOT, "prop2", "val3")?; + tx.commit(); + doc.get_heads(); + let heads3 = doc.get_heads(); + let mut tx = doc.transaction(); + tx.delete(ROOT, "prop1")?; + tx.commit(); + doc.get_heads(); + let heads4 = doc.get_heads(); + let mut tx = doc.transaction(); + tx.put(ROOT, "prop3", "val4")?; + tx.commit(); + doc.get_heads(); + let heads5 = doc.get_heads(); + assert!(doc.keys_at(ROOT, &heads1).collect_vec() == vec!["prop1".to_owned()]); + assert_eq!(doc.length_at(ROOT, &heads1), 1); + assert!(doc.get_at(ROOT, "prop1", &heads1)?.unwrap().0 == Value::str("val1")); + assert!(doc.get_at(ROOT, "prop2", &heads1)? == None); + assert!(doc.get_at(ROOT, "prop3", &heads1)? == None); + + assert!(doc.keys_at(ROOT, &heads2).collect_vec() == vec!["prop1".to_owned()]); + assert_eq!(doc.length_at(ROOT, &heads2), 1); + assert!(doc.get_at(ROOT, "prop1", &heads2)?.unwrap().0 == Value::str("val2")); + assert!(doc.get_at(ROOT, "prop2", &heads2)? == None); + assert!(doc.get_at(ROOT, "prop3", &heads2)? == None); + + assert!( + doc.keys_at(ROOT, &heads3).collect_vec() == vec!["prop1".to_owned(), "prop2".to_owned()] + ); + assert_eq!(doc.length_at(ROOT, &heads3), 2); + assert!(doc.get_at(ROOT, "prop1", &heads3)?.unwrap().0 == Value::str("val2")); + assert!(doc.get_at(ROOT, "prop2", &heads3)?.unwrap().0 == Value::str("val3")); + assert!(doc.get_at(ROOT, "prop3", &heads3)? == None); + + assert!(doc.keys_at(ROOT, &heads4).collect_vec() == vec!["prop2".to_owned()]); + assert_eq!(doc.length_at(ROOT, &heads4), 1); + assert!(doc.get_at(ROOT, "prop1", &heads4)? == None); + assert!(doc.get_at(ROOT, "prop2", &heads4)?.unwrap().0 == Value::str("val3")); + assert!(doc.get_at(ROOT, "prop3", &heads4)? == None); + + assert!( + doc.keys_at(ROOT, &heads5).collect_vec() == vec!["prop2".to_owned(), "prop3".to_owned()] + ); + assert_eq!(doc.length_at(ROOT, &heads5), 2); + assert_eq!(doc.length(ROOT), 2); + assert!(doc.get_at(ROOT, "prop1", &heads5)? == None); + assert!(doc.get_at(ROOT, "prop2", &heads5)?.unwrap().0 == Value::str("val3")); + assert!(doc.get_at(ROOT, "prop3", &heads5)?.unwrap().0 == Value::str("val4")); + + assert_eq!(doc.keys_at(ROOT, &[]).count(), 0); + assert_eq!(doc.length_at(ROOT, &[]), 0); + assert!(doc.get_at(ROOT, "prop1", &[])? == None); + assert!(doc.get_at(ROOT, "prop2", &[])? == None); + assert!(doc.get_at(ROOT, "prop3", &[])? == None); + Ok(()) +} + +#[test] +fn test_len_at() -> Result<(), AutomergeError> { + let mut doc = Automerge::new(); + doc.set_actor("aaaa".try_into().unwrap()); + + let mut tx = doc.transaction(); + let list = tx.put_object(ROOT, "list", ObjType::List)?; + tx.commit(); + let heads1 = doc.get_heads(); + + let mut tx = doc.transaction(); + tx.insert(&list, 0, 10)?; + tx.commit(); + let heads2 = doc.get_heads(); + + let mut tx = doc.transaction(); + tx.put(&list, 0, 20)?; + tx.insert(&list, 0, 30)?; + tx.commit(); + let heads3 = doc.get_heads(); + + let mut tx = doc.transaction(); + tx.put(&list, 1, 40)?; + tx.insert(&list, 1, 50)?; + tx.commit(); + let heads4 = doc.get_heads(); + + let mut tx = doc.transaction(); + tx.delete(&list, 2)?; + tx.commit(); + let heads5 = doc.get_heads(); + + let mut tx = doc.transaction(); + tx.delete(&list, 0)?; + tx.commit(); + let heads6 = doc.get_heads(); + + assert!(doc.length_at(&list, &heads1) == 0); + assert!(doc.get_at(&list, 0, &heads1)?.is_none()); + + assert!(doc.length_at(&list, &heads2) == 1); + assert!(doc.get_at(&list, 0, &heads2)?.unwrap().0 == Value::int(10)); + + assert!(doc.length_at(&list, &heads3) == 2); + assert!(doc.get_at(&list, 0, &heads3)?.unwrap().0 == Value::int(30)); + assert!(doc.get_at(&list, 1, &heads3)?.unwrap().0 == Value::int(20)); + + assert!(doc.length_at(&list, &heads4) == 3); + assert!(doc.get_at(&list, 0, &heads4)?.unwrap().0 == Value::int(30)); + assert!(doc.get_at(&list, 1, &heads4)?.unwrap().0 == Value::int(50)); + assert!(doc.get_at(&list, 2, &heads4)?.unwrap().0 == Value::int(40)); + + assert!(doc.length_at(&list, &heads5) == 2); + assert!(doc.get_at(&list, 0, &heads5)?.unwrap().0 == Value::int(30)); + assert!(doc.get_at(&list, 1, &heads5)?.unwrap().0 == Value::int(50)); + + assert!(doc.length_at(&list, &heads6) == 1); + assert!(doc.length(&list) == 1); + assert!(doc.get_at(&list, 0, &heads6)?.unwrap().0 == Value::int(50)); + + Ok(()) +} + +#[test] +fn keys_iter_map() { + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + tx.put(ROOT, "a", 3).unwrap(); + tx.put(ROOT, "b", 4).unwrap(); + tx.put(ROOT, "c", 5).unwrap(); + tx.put(ROOT, "d", 6).unwrap(); + tx.commit(); + let mut tx = doc.transaction(); + tx.put(ROOT, "a", 7).unwrap(); + tx.commit(); + let mut tx = doc.transaction(); + tx.put(ROOT, "a", 8).unwrap(); + tx.put(ROOT, "d", 9).unwrap(); + tx.commit(); + assert_eq!(doc.keys(ROOT).count(), 4); + + let mut keys = doc.keys(ROOT); + assert_eq!(keys.next(), Some("a".into())); + assert_eq!(keys.next(), Some("b".into())); + assert_eq!(keys.next(), Some("c".into())); + assert_eq!(keys.next(), Some("d".into())); + assert_eq!(keys.next(), None); + + let mut keys = doc.keys(ROOT); + assert_eq!(keys.next_back(), Some("d".into())); + assert_eq!(keys.next_back(), Some("c".into())); + assert_eq!(keys.next_back(), Some("b".into())); + assert_eq!(keys.next_back(), Some("a".into())); + assert_eq!(keys.next_back(), None); + + let mut keys = doc.keys(ROOT); + assert_eq!(keys.next(), Some("a".into())); + assert_eq!(keys.next_back(), Some("d".into())); + assert_eq!(keys.next_back(), Some("c".into())); + assert_eq!(keys.next_back(), Some("b".into())); + assert_eq!(keys.next_back(), None); + + let mut keys = doc.keys(ROOT); + assert_eq!(keys.next_back(), Some("d".into())); + assert_eq!(keys.next(), Some("a".into())); + assert_eq!(keys.next(), Some("b".into())); + assert_eq!(keys.next(), Some("c".into())); + assert_eq!(keys.next(), None); + let keys = doc.keys(ROOT); + assert_eq!(keys.collect::>(), vec!["a", "b", "c", "d"]); +} + +#[test] +fn keys_iter_seq() { + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + let list = tx.put_object(ROOT, "list", ObjType::List).unwrap(); + tx.insert(&list, 0, 3).unwrap(); + tx.insert(&list, 1, 4).unwrap(); + tx.insert(&list, 2, 5).unwrap(); + tx.insert(&list, 3, 6).unwrap(); + tx.commit(); + let mut tx = doc.transaction(); + tx.put(&list, 0, 7).unwrap(); + tx.commit(); + let mut tx = doc.transaction(); + tx.put(&list, 0, 8).unwrap(); + tx.put(&list, 3, 9).unwrap(); + tx.commit(); + let actor = doc.get_actor(); + assert_eq!(doc.keys(&list).count(), 4); + + let mut keys = doc.keys(&list); + assert_eq!(keys.next(), Some(format!("2@{}", actor))); + assert_eq!(keys.next(), Some(format!("3@{}", actor))); + assert_eq!(keys.next(), Some(format!("4@{}", actor))); + assert_eq!(keys.next(), Some(format!("5@{}", actor))); + assert_eq!(keys.next(), None); + + let mut keys = doc.keys(&list); + assert_eq!(keys.next_back(), Some(format!("5@{}", actor))); + assert_eq!(keys.next_back(), Some(format!("4@{}", actor))); + assert_eq!(keys.next_back(), Some(format!("3@{}", actor))); + assert_eq!(keys.next_back(), Some(format!("2@{}", actor))); + assert_eq!(keys.next_back(), None); + + let mut keys = doc.keys(&list); + assert_eq!(keys.next(), Some(format!("2@{}", actor))); + assert_eq!(keys.next_back(), Some(format!("5@{}", actor))); + assert_eq!(keys.next_back(), Some(format!("4@{}", actor))); + assert_eq!(keys.next_back(), Some(format!("3@{}", actor))); + assert_eq!(keys.next_back(), None); + + let mut keys = doc.keys(&list); + assert_eq!(keys.next_back(), Some(format!("5@{}", actor))); + assert_eq!(keys.next(), Some(format!("2@{}", actor))); + assert_eq!(keys.next(), Some(format!("3@{}", actor))); + assert_eq!(keys.next(), Some(format!("4@{}", actor))); + assert_eq!(keys.next(), None); + + let keys = doc.keys(&list); + assert_eq!( + keys.collect::>(), + vec![ + format!("2@{}", actor), + format!("3@{}", actor), + format!("4@{}", actor), + format!("5@{}", actor) + ] + ); +} + +#[test] +fn range_iter_map() { + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + tx.put(ROOT, "a", 3).unwrap(); + tx.put(ROOT, "b", 4).unwrap(); + tx.put(ROOT, "c", 5).unwrap(); + tx.put(ROOT, "d", 6).unwrap(); + tx.commit(); + let mut tx = doc.transaction(); + tx.put(ROOT, "a", 7).unwrap(); + tx.commit(); + let mut tx = doc.transaction(); + tx.put(ROOT, "a", 8).unwrap(); + tx.put(ROOT, "d", 9).unwrap(); + tx.commit(); + let actor = doc.get_actor(); + assert_eq!(doc.map_range(ROOT, ..).count(), 4); + + let mut range = doc.map_range(ROOT, "b".to_owned().."d".into()); + assert_eq!( + range.next(), + Some(("b", 4.into(), ExId::Id(2, actor.clone(), 0))) + ); + assert_eq!( + range.next(), + Some(("c", 5.into(), ExId::Id(3, actor.clone(), 0))) + ); + assert_eq!(range.next(), None); + + let mut range = doc.map_range(ROOT, "b".to_owned()..="d".into()); + assert_eq!( + range.next(), + Some(("b", 4.into(), ExId::Id(2, actor.clone(), 0))) + ); + assert_eq!( + range.next(), + Some(("c", 5.into(), ExId::Id(3, actor.clone(), 0))) + ); + assert_eq!( + range.next(), + Some(("d", 9.into(), ExId::Id(7, actor.clone(), 0))) + ); + assert_eq!(range.next(), None); + + let mut range = doc.map_range(ROOT, ..="c".to_owned()); + assert_eq!( + range.next(), + Some(("a", 8.into(), ExId::Id(6, actor.clone(), 0))) + ); + assert_eq!( + range.next(), + Some(("b", 4.into(), ExId::Id(2, actor.clone(), 0))) + ); + assert_eq!( + range.next(), + Some(("c", 5.into(), ExId::Id(3, actor.clone(), 0))) + ); + assert_eq!(range.next(), None); + + let range = doc.map_range(ROOT, "a".to_owned()..); + assert_eq!( + range.collect::>(), + vec![ + ("a", 8.into(), ExId::Id(6, actor.clone(), 0)), + ("b", 4.into(), ExId::Id(2, actor.clone(), 0)), + ("c", 5.into(), ExId::Id(3, actor.clone(), 0)), + ("d", 9.into(), ExId::Id(7, actor.clone(), 0)), + ] + ); +} + +#[test] +fn insert_at_index() { + let mut doc = AutoCommit::new(); + + let list = &doc.put_object(ROOT, "list", ObjType::List).unwrap(); + doc.insert(list, 0, 0).unwrap(); + doc.insert(list, 0, 1).unwrap(); // both inserts at the same index + + assert_eq!(doc.length(list), 2); + assert_eq!(doc.keys(list).count(), 2); + assert_eq!(doc.list_range(list, ..).count(), 2); +} + +#[test] +fn get_list_values() -> Result<(), AutomergeError> { + let mut doc1 = Automerge::new(); + let mut tx = doc1.transaction(); + let list = tx.put_object(ROOT, "list", ObjType::List)?; + + // insert elements + tx.insert(&list, 0, "First")?; + tx.insert(&list, 1, "Second")?; + tx.insert(&list, 2, "Third")?; + tx.insert(&list, 3, "Forth")?; + tx.insert(&list, 4, "Fith")?; + tx.insert(&list, 5, "Sixth")?; + tx.insert(&list, 6, "Seventh")?; + tx.insert(&list, 7, "Eights")?; + tx.commit(); + + let v1 = doc1.get_heads(); + let mut doc2 = doc1.fork(); + + let mut tx = doc1.transaction(); + tx.put(&list, 2, "Third V2")?; + tx.commit(); + + let mut tx = doc2.transaction(); + tx.put(&list, 2, "Third V3")?; + tx.commit(); + + doc1.merge(&mut doc2)?; + + assert_eq!(doc1.list_range(&list, ..).count(), 8); + + for (i, val1, id) in doc1.list_range(&list, ..) { + let val2 = doc1.get(&list, i)?; + assert_eq!(Some((val1, id)), val2); + } + + assert_eq!(doc1.list_range(&list, 3..6).count(), 3); + assert_eq!(doc1.list_range(&list, 3..6).next().unwrap().0, 3); + assert_eq!(doc1.list_range(&list, 3..6).last().unwrap().0, 5); + + for (i, val1, id) in doc1.list_range(&list, 3..6) { + let val2 = doc1.get(&list, i)?; + assert_eq!(Some((val1, id)), val2); + } + + assert_eq!(doc1.list_range_at(&list, .., &v1).count(), 8); + for (i, val1, id) in doc1.list_range_at(&list, .., &v1) { + let val2 = doc1.get_at(&list, i, &v1)?; + assert_eq!(Some((val1, id)), val2); + } + + assert_eq!(doc1.list_range_at(&list, 3..6, &v1).count(), 3); + assert_eq!(doc1.list_range_at(&list, 3..6, &v1).next().unwrap().0, 3); + assert_eq!(doc1.list_range_at(&list, 3..6, &v1).last().unwrap().0, 5); + + for (i, val1, id) in doc1.list_range_at(&list, 3..6, &v1) { + let val2 = doc1.get_at(&list, i, &v1)?; + assert_eq!(Some((val1, id)), val2); + } + + let range: Vec<_> = doc1 + .list_range(&list, ..) + .map(|(_, val, id)| (val, id)) + .collect(); + let values = doc1.values(&list); + let values: Vec<_> = values.collect(); + assert_eq!(range, values); + + let range: Vec<_> = doc1 + .list_range_at(&list, .., &v1) + .map(|(_, val, id)| (val, id)) + .collect(); + let values: Vec<_> = doc1.values_at(&list, &v1).collect(); + assert_eq!(range, values); + + Ok(()) +} + +#[test] +fn get_range_values() -> Result<(), AutomergeError> { + let mut doc1 = Automerge::new(); + let mut tx = doc1.transaction(); + tx.put(ROOT, "aa", "aaa")?; + tx.put(ROOT, "bb", "bbb")?; + tx.put(ROOT, "cc", "ccc")?; + tx.put(ROOT, "dd", "ddd")?; + tx.commit(); + + let v1 = doc1.get_heads(); + let mut doc2 = doc1.fork(); + + let mut tx = doc1.transaction(); + tx.put(ROOT, "cc", "ccc V2")?; + tx.commit(); + + let mut tx = doc2.transaction(); + tx.put(ROOT, "cc", "ccc V3")?; + tx.commit(); + + doc1.merge(&mut doc2)?; + + let range = "b".to_string().."d".to_string(); + + assert_eq!(doc1.map_range(ROOT, range.clone()).count(), 2); + + for (key, val1, id) in doc1.map_range(ROOT, range.clone()) { + let val2 = doc1.get(ROOT, key)?; + assert_eq!(Some((val1, id)), val2); + } + + assert_eq!(doc1.map_range(ROOT, range.clone()).rev().count(), 2); + + for (key, val1, id) in doc1.map_range(ROOT, range.clone()).rev() { + let val2 = doc1.get(ROOT, key)?; + assert_eq!(Some((val1, id)), val2); + } + + assert_eq!(doc1.map_range_at(ROOT, range.clone(), &v1).count(), 2); + + for (key, val1, id) in doc1.map_range_at(ROOT, range.clone(), &v1) { + let val2 = doc1.get_at(ROOT, key, &v1)?; + assert_eq!(Some((val1, id)), val2); + } + + assert_eq!(doc1.map_range_at(ROOT, range.clone(), &v1).rev().count(), 2); + + for (key, val1, id) in doc1.map_range_at(ROOT, range, &v1).rev() { + let val2 = doc1.get_at(ROOT, key, &v1)?; + assert_eq!(Some((val1, id)), val2); + } + + let range: Vec<_> = doc1 + .map_range(ROOT, ..) + .map(|(_, val, id)| (val, id)) + .collect(); + let values: Vec<_> = doc1.values(ROOT).collect(); + assert_eq!(range, values); + + let range: Vec<_> = doc1 + .map_range_at(ROOT, .., &v1) + .map(|(_, val, id)| (val, id)) + .collect(); + let values: Vec<_> = doc1.values_at(ROOT, &v1).collect(); + assert_eq!(range, values); + + Ok(()) +} + +#[test] +fn range_iter_map_rev() { + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + tx.put(ROOT, "a", 3).unwrap(); + tx.put(ROOT, "b", 4).unwrap(); + tx.put(ROOT, "c", 5).unwrap(); + tx.put(ROOT, "d", 6).unwrap(); + tx.commit(); + let mut tx = doc.transaction(); + tx.put(ROOT, "a", 7).unwrap(); + tx.commit(); + let mut tx = doc.transaction(); + tx.put(ROOT, "a", 8).unwrap(); + tx.put(ROOT, "d", 9).unwrap(); + tx.commit(); + let actor = doc.get_actor(); + assert_eq!(doc.map_range(ROOT, ..).rev().count(), 4); + + let mut range = doc.map_range(ROOT, "b".to_owned().."d".into()).rev(); + assert_eq!( + range.next(), + Some(("c", 5.into(), ExId::Id(3, actor.clone(), 0))) + ); + assert_eq!( + range.next(), + Some(("b", 4.into(), ExId::Id(2, actor.clone(), 0))) + ); + assert_eq!(range.next(), None); + + let mut range = doc.map_range(ROOT, "b".to_owned()..="d".into()).rev(); + assert_eq!( + range.next(), + Some(("d", 9.into(), ExId::Id(7, actor.clone(), 0))) + ); + assert_eq!( + range.next(), + Some(("c", 5.into(), ExId::Id(3, actor.clone(), 0))) + ); + assert_eq!( + range.next(), + Some(("b", 4.into(), ExId::Id(2, actor.clone(), 0))) + ); + assert_eq!(range.next(), None); + + let mut range = doc.map_range(ROOT, ..="c".to_owned()).rev(); + assert_eq!( + range.next(), + Some(("c", 5.into(), ExId::Id(3, actor.clone(), 0))) + ); + assert_eq!( + range.next(), + Some(("b", 4.into(), ExId::Id(2, actor.clone(), 0))) + ); + assert_eq!( + range.next(), + Some(("a", 8.into(), ExId::Id(6, actor.clone(), 0))) + ); + assert_eq!(range.next(), None); + + let range = doc.map_range(ROOT, "a".to_owned()..).rev(); + assert_eq!( + range.collect::>(), + vec![ + ("d", 9.into(), ExId::Id(7, actor.clone(), 0)), + ("c", 5.into(), ExId::Id(3, actor.clone(), 0)), + ("b", 4.into(), ExId::Id(2, actor.clone(), 0)), + ("a", 8.into(), ExId::Id(6, actor.clone(), 0)), + ] + ); +} + +#[test] +fn rolling_back_transaction_has_no_effect() { + let mut doc = Automerge::new(); + let old_states = doc.states.clone(); + let bytes = doc.save(); + let tx = doc.transaction(); + tx.rollback(); + let new_states = doc.states.clone(); + assert_eq!(old_states, new_states); + let new_bytes = doc.save(); + assert_eq!(bytes, new_bytes); +} + +#[test] +fn mutate_old_objects() { + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + // create a map + let map1 = tx.put_object(ROOT, "a", ObjType::Map).unwrap(); + tx.put(&map1, "b", 1).unwrap(); + // overwrite the first map with a new one + let map2 = tx.put_object(ROOT, "a", ObjType::Map).unwrap(); + tx.put(&map2, "c", 2).unwrap(); + tx.commit(); + + // we can get the new map by traversing the tree + let map = doc.get(&ROOT, "a").unwrap().unwrap().1; + assert_eq!(doc.get(&map, "b").unwrap(), None); + // and get values from it + assert_eq!( + doc.get(&map, "c").unwrap().map(|s| s.0), + Some(ScalarValue::Int(2).into()) + ); + + // but we can still access the old one if we know the ID! + assert_eq!(doc.get(&map1, "b").unwrap().unwrap().0, Value::int(1)); + // and even set new things in it! + let mut tx = doc.transaction(); + tx.put(&map1, "c", 3).unwrap(); + tx.commit(); + + assert_eq!(doc.get(&map1, "c").unwrap().unwrap().0, Value::int(3)); +} + +#[test] +fn delete_nothing_in_map_is_noop() { + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + // deleting a missing key in a map should just be a noop + assert!(tx.delete(ROOT, "a",).is_ok()); + tx.commit(); + let last_change = doc.get_last_local_change().unwrap(); + assert_eq!(last_change.len(), 0); + + let bytes = doc.save(); + assert!(Automerge::load(&bytes,).is_ok()); + + let mut tx = doc.transaction(); + tx.put(ROOT, "a", 1).unwrap(); + tx.commit(); + let last_change = doc.get_last_local_change().unwrap(); + assert_eq!(last_change.len(), 1); + + let mut tx = doc.transaction(); + // a real op + tx.delete(ROOT, "a").unwrap(); + // a no-op + tx.delete(ROOT, "a").unwrap(); + tx.commit(); + let last_change = doc.get_last_local_change().unwrap(); + assert_eq!(last_change.len(), 1); +} + +#[test] +fn delete_nothing_in_list_returns_error() { + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + // deleting an element in a list that does not exist is an error + assert!(tx.delete(ROOT, 0,).is_err()); +} + +#[test] +fn loaded_doc_changes_have_hash() { + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + tx.put(ROOT, "a", 1).unwrap(); + tx.commit(); + let hash = doc.get_last_local_change().unwrap().hash; + let bytes = doc.save(); + let doc = Automerge::load(&bytes).unwrap(); + assert_eq!(doc.get_change_by_hash(&hash).unwrap().hash, hash); +} + +#[test] +fn load_change_with_zero_start_op() { + let bytes = &[ + 133, 111, 74, 131, 202, 50, 52, 158, 2, 96, 163, 163, 83, 255, 255, 255, 50, 50, 50, 50, + 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 255, 255, 245, 53, 1, 0, 0, 0, 0, 0, 0, 4, 233, + 245, 239, 255, 1, 0, 0, 0, 133, 111, 74, 131, 163, 96, 0, 0, 2, 10, 202, 144, 125, 19, 48, + 89, 133, 49, 10, 10, 67, 91, 111, 10, 74, 131, 96, 0, 163, 131, 255, 255, 255, 255, 255, + 255, 255, 255, 255, 1, 153, 0, 0, 246, 255, 255, 255, 157, 157, 157, 157, 157, 157, 157, + 157, 157, 157, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, + 255, 255, 255, 48, 254, 208, + ]; + let _ = Automerge::load(bytes); +} + +#[test] +fn load_broken_list() { + enum Action { + InsertText(usize, char), + DelText(usize), + } + use Action::*; + let actions = [ + InsertText(0, 'a'), + InsertText(0, 'b'), + DelText(1), + InsertText(0, 'c'), + DelText(1), + DelText(0), + InsertText(0, 'd'), + InsertText(0, 'e'), + InsertText(1, 'f'), + DelText(2), + DelText(1), + InsertText(0, 'g'), + DelText(1), + DelText(0), + InsertText(0, 'h'), + InsertText(1, 'i'), + DelText(1), + DelText(0), + InsertText(0, 'j'), + InsertText(0, 'k'), + DelText(1), + DelText(0), + InsertText(0, 'l'), + DelText(0), + InsertText(0, 'm'), + InsertText(0, 'n'), + DelText(1), + DelText(0), + InsertText(0, 'o'), + DelText(0), + InsertText(0, 'p'), + InsertText(1, 'q'), + InsertText(1, 'r'), + InsertText(1, 's'), + InsertText(3, 't'), + InsertText(5, 'u'), + InsertText(0, 'v'), + InsertText(3, 'w'), + InsertText(4, 'x'), + InsertText(0, 'y'), + InsertText(6, 'z'), + InsertText(11, '1'), + InsertText(0, '2'), + InsertText(0, '3'), + InsertText(0, '4'), + InsertText(13, '5'), + InsertText(11, '6'), + InsertText(17, '7'), + ]; + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + let list = tx.put_object(ROOT, "list", ObjType::List).unwrap(); + for action in actions { + match action { + Action::InsertText(index, c) => { + println!("inserting {} at {}", c, index); + tx.insert(&list, index, c).unwrap(); + } + Action::DelText(index) => { + println!("deleting at {} ", index); + tx.delete(&list, index).unwrap(); + } + } + } + tx.commit(); + let bytes = doc.save(); + println!("doc2 time"); + let mut doc2 = Automerge::load(&bytes).unwrap(); + let bytes2 = doc2.save(); + assert_eq!(doc.text(&list).unwrap(), doc2.text(&list).unwrap()); + + assert_eq!(doc.queue, doc2.queue); + assert_eq!(doc.history, doc2.history); + assert_eq!(doc.history_index, doc2.history_index); + assert_eq!(doc.states, doc2.states); + assert_eq!(doc.deps, doc2.deps); + assert_eq!(doc.saved, doc2.saved); + assert_eq!(doc.ops, doc2.ops); + assert_eq!(doc.max_op, doc2.max_op); + + assert_eq!(bytes, bytes2); +} + +#[test] +fn load_broken_list_short() { + // breaks when the B constant in OpSet is 3 + enum Action { + InsertText(usize, char), + DelText(usize), + } + use Action::*; + let actions = [ + InsertText(0, 'a'), + InsertText(1, 'b'), + DelText(1), + InsertText(1, 'c'), + InsertText(2, 'd'), + InsertText(2, 'e'), + InsertText(0, 'f'), + DelText(4), + InsertText(4, 'g'), + ]; + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + let list = tx.put_object(ROOT, "list", ObjType::List).unwrap(); + for action in actions { + match action { + Action::InsertText(index, c) => { + println!("inserting {} at {}", c, index); + tx.insert(&list, index, c).unwrap(); + } + Action::DelText(index) => { + println!("deleting at {} ", index); + tx.delete(&list, index).unwrap(); + } + } + } + tx.commit(); + let bytes = doc.save(); + println!("doc2 time"); + let mut doc2 = Automerge::load(&bytes).unwrap(); + let bytes2 = doc2.save(); + assert_eq!(doc.text(&list).unwrap(), doc2.text(&list).unwrap()); + + assert_eq!(doc.queue, doc2.queue); + assert_eq!(doc.history, doc2.history); + assert_eq!(doc.history_index, doc2.history_index); + assert_eq!(doc.states, doc2.states); + assert_eq!(doc.deps, doc2.deps); + assert_eq!(doc.saved, doc2.saved); + assert_eq!(doc.ops, doc2.ops); + assert_eq!(doc.max_op, doc2.max_op); + + assert_eq!(bytes, bytes2); +} + +#[test] +fn compute_list_indexes_correctly_when_list_element_is_split_across_tree_nodes() { + let max = B as u64 * 2; + let actor1 = ActorId::from(b"aaaa"); + let mut doc1 = AutoCommit::new().with_actor(actor1.clone()); + let actor2 = ActorId::from(b"bbbb"); + let mut doc2 = AutoCommit::new().with_actor(actor2.clone()); + let list = doc1.put_object(ROOT, "list", ObjType::List).unwrap(); + doc1.insert(&list, 0, 0).unwrap(); + doc2.load_incremental(&doc1.save_incremental()).unwrap(); + for i in 1..=max { + doc1.put(&list, 0, i).unwrap() + } + for i in 1..=max { + doc2.put(&list, 0, i).unwrap() + } + let change1 = doc1.save_incremental(); + let change2 = doc2.save_incremental(); + doc2.load_incremental(&change1).unwrap(); + doc1.load_incremental(&change2).unwrap(); + assert_eq!(doc1.length(&list), 1); + assert_eq!(doc2.length(&list), 1); + assert_eq!( + doc1.get_all(&list, 0).unwrap(), + vec![ + (max.into(), ExId::Id(max + 2, actor1.clone(), 0)), + (max.into(), ExId::Id(max + 2, actor2.clone(), 1)) + ] + ); + assert_eq!( + doc2.get_all(&list, 0).unwrap(), + vec![ + (max.into(), ExId::Id(max + 2, actor1, 0)), + (max.into(), ExId::Id(max + 2, actor2, 1)) + ] + ); + assert!(doc1.get(&list, 1).unwrap().is_none()); + assert!(doc2.get(&list, 1).unwrap().is_none()); +} + +#[test] +fn get_parent_objects() { + let mut doc = AutoCommit::new(); + let map = doc.put_object(ROOT, "a", ObjType::Map).unwrap(); + let list = doc.insert_object(&map, 0, ObjType::List).unwrap(); + doc.insert(&list, 0, 2).unwrap(); + let text = doc.put_object(&list, 0, ObjType::Text).unwrap(); + + assert_eq!(doc.parent_object(&map), Some((ROOT, Prop::Map("a".into())))); + assert_eq!(doc.parent_object(&list), Some((map, Prop::Seq(0)))); + assert_eq!(doc.parent_object(&text), Some((list, Prop::Seq(0)))); +} + +#[test] +fn get_path_to_object() { + let mut doc = AutoCommit::new(); + let map = doc.put_object(ROOT, "a", ObjType::Map).unwrap(); + let list = doc.insert_object(&map, 0, ObjType::List).unwrap(); + doc.insert(&list, 0, 2).unwrap(); + let text = doc.put_object(&list, 0, ObjType::Text).unwrap(); + + assert_eq!( + doc.path_to_object(&map), + vec![(ROOT, Prop::Map("a".into()))] + ); + assert_eq!( + doc.path_to_object(&list), + vec![(ROOT, Prop::Map("a".into())), (map.clone(), Prop::Seq(0)),] + ); + assert_eq!( + doc.path_to_object(&text), + vec![ + (ROOT, Prop::Map("a".into())), + (map, Prop::Seq(0)), + (list, Prop::Seq(0)), + ] + ); +} + +#[test] +fn parents_iterator() { + let mut doc = AutoCommit::new(); + let map = doc.put_object(ROOT, "a", ObjType::Map).unwrap(); + let list = doc.insert_object(&map, 0, ObjType::List).unwrap(); + doc.insert(&list, 0, 2).unwrap(); + let text = doc.put_object(&list, 0, ObjType::Text).unwrap(); + + let mut parents = doc.parents(text); + assert_eq!(parents.next(), Some((list, Prop::Seq(0)))); + assert_eq!(parents.next(), Some((map, Prop::Seq(0)))); + assert_eq!(parents.next(), Some((ROOT, Prop::Map("a".into())))); + assert_eq!(parents.next(), None); +} + +#[test] +fn can_insert_a_grapheme_into_text() { + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + let text = tx.put_object(ROOT, "text", ObjType::Text).unwrap(); + let polar_bear = "🐻‍❄️"; + tx.insert(&text, 0, polar_bear).unwrap(); + tx.commit(); + let s = doc.text(&text).unwrap(); + assert_eq!(s, polar_bear); + let len = doc.length(&text); + assert_eq!(len, 1); // just one grapheme +} + +#[test] +fn can_insert_long_string_into_text() { + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + let text = tx.put_object(ROOT, "text", ObjType::Text).unwrap(); + let polar_bear = "🐻‍❄️"; + let polar_bear_army = polar_bear.repeat(100); + tx.insert(&text, 0, &polar_bear_army).unwrap(); + tx.commit(); + let s = doc.text(&text).unwrap(); + assert_eq!(s, polar_bear_army); + let len = doc.length(&text); + assert_eq!(len, 1); // many graphemes +} + +#[test] +fn splice_text_uses_unicode_scalars() { + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + let text = tx.put_object(ROOT, "text", ObjType::Text).unwrap(); + let polar_bear = "🐻‍❄️"; + tx.splice_text(&text, 0, 0, polar_bear).unwrap(); + tx.commit(); + let s = doc.text(&text).unwrap(); + assert_eq!(s, polar_bear); + let len = doc.length(&text); + assert_eq!(len, 4); // 4 chars +} + +#[test] +fn observe_counter_change_application_overwrite() { + let mut doc1 = AutoCommit::new(); + doc1.set_actor(ActorId::from([1])); + doc1.put(ROOT, "counter", ScalarValue::counter(1)).unwrap(); + doc1.commit(); + + let mut doc2 = doc1.fork(); + doc2.set_actor(ActorId::from([2])); + doc2.put(ROOT, "counter", "mystring").unwrap(); + doc2.commit(); + + doc1.increment(ROOT, "counter", 2).unwrap(); + doc1.commit(); + doc1.increment(ROOT, "counter", 5).unwrap(); + doc1.commit(); + + let mut observer = VecOpObserver::default(); + let mut doc3 = doc1.clone(); + doc3.merge_with( + &mut doc2, + ApplyOptions::default().with_op_observer(&mut observer), + ) + .unwrap(); + + assert_eq!( + observer.take_patches(), + vec![Patch::Put { + obj: ExId::Root, + key: Prop::Map("counter".into()), + value: ( + ScalarValue::Str("mystring".into()).into(), + ExId::Id(2, doc2.get_actor().clone(), 1) + ), + conflict: false + }] + ); + + let mut observer = VecOpObserver::default(); + let mut doc4 = doc2.clone(); + doc4.merge_with( + &mut doc1, + ApplyOptions::default().with_op_observer(&mut observer), + ) + .unwrap(); + + // no patches as the increments operate on an invisible counter + assert_eq!(observer.take_patches(), vec![]); +} + +#[test] +fn observe_counter_change_application() { + let mut doc = AutoCommit::new(); + doc.put(ROOT, "counter", ScalarValue::counter(1)).unwrap(); + doc.increment(ROOT, "counter", 2).unwrap(); + doc.increment(ROOT, "counter", 5).unwrap(); + let changes = doc.get_changes(&[]).into_iter().cloned().collect(); + + let mut new_doc = AutoCommit::new(); + let mut observer = VecOpObserver::default(); + new_doc + .apply_changes_with( + changes, + ApplyOptions::default().with_op_observer(&mut observer), + ) + .unwrap(); + assert_eq!( + observer.take_patches(), + vec![ + Patch::Put { + obj: ExId::Root, + key: Prop::Map("counter".into()), + value: ( + ScalarValue::counter(1).into(), + ExId::Id(1, doc.get_actor().clone(), 0) + ), + conflict: false + }, + Patch::Increment { + obj: ExId::Root, + key: Prop::Map("counter".into()), + value: (2, ExId::Id(2, doc.get_actor().clone(), 0)), + }, + Patch::Increment { + obj: ExId::Root, + key: Prop::Map("counter".into()), + value: (5, ExId::Id(3, doc.get_actor().clone(), 0)), + } + ] + ); +} From 944e5d8001d65c3accd5395708384b4207010902 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Thu, 5 May 2022 08:29:37 -0500 Subject: [PATCH 347/730] Trap and report all errors. --- automerge-c/examples/quickstart.c | 51 ++++++++++++++++++++++++++++--- 1 file changed, 46 insertions(+), 5 deletions(-) diff --git a/automerge-c/examples/quickstart.c b/automerge-c/examples/quickstart.c index afc3e317..8e811853 100644 --- a/automerge-c/examples/quickstart.c +++ b/automerge-c/examples/quickstart.c @@ -4,18 +4,45 @@ #include AMvalue test(AMresult* result, AMvalueVariant const value_tag) { + static char prelude[64]; + if (result == NULL) { - fprintf(stderr, "Invalid AMresult struct."); + fprintf(stderr, "NULL AMresult struct pointer."); exit(-1); } AMstatus const status = AMresultStatus(result); if (status != AM_STATUS_OK) { - fprintf(stderr, "Unexpected AMstatus enum tag %d.", status); + switch (status) { + case AM_STATUS_ERROR: sprintf(prelude, "Error"); break; + case AM_STATUS_INVALID_RESULT: sprintf(prelude, "Invalid result"); break; + default: sprintf(prelude, "Unknown status code %d", status); + } + fprintf(stderr, "%s; %s.", prelude, AMerrorMessage(result)); + AMfreeResult(result); exit(-2); } AMvalue const value = AMresultValue(result, 0); if (value.tag != value_tag) { - fprintf(stderr, "Unexpected AMvalueVariant enum tag %d.", value.tag); + char const* label = NULL; + switch (value.tag) { + case AM_VALUE_ACTOR_ID: label = "AM_VALUE_ACTOR_ID"; break; + case AM_VALUE_BOOLEAN: label = "AM_VALUE_BOOLEAN"; break; + case AM_VALUE_BYTES: label = "AM_VALUE_BYTES"; break; + case AM_VALUE_CHANGE_HASHES: label = "AM_VALUE_CHANGE_HASHES"; break; + case AM_VALUE_CHANGES: label = "AM_VALUE_CHANGES"; break; + case AM_VALUE_COUNTER: label = "AM_VALUE_COUNTER"; break; + case AM_VALUE_F64: label = "AM_VALUE_F64"; break; + case AM_VALUE_INT: label = "AM_VALUE_INT"; break; + case AM_VALUE_VOID: label = "AM_VALUE_VOID"; break; + case AM_VALUE_NULL: label = "AM_VALUE_NULL"; break; + case AM_VALUE_OBJ_ID: label = "AM_VALUE_OBJ_ID"; break; + case AM_VALUE_STR: label = "AM_VALUE_STR"; break; + case AM_VALUE_TIMESTAMP: label = "AM_VALUE_TIMESTAMP"; break; + case AM_VALUE_UINT: label = "AM_VALUE_UINT"; break; + default: label = "unknown"; + } + fprintf(stderr, "Unexpected %s variant (%d).", label, value.tag); + AMfreeResult(result); exit(-3); } return value; @@ -33,22 +60,28 @@ int main(int argc, char** argv) { value = test(card1_result, AM_VALUE_OBJ_ID); AMobjId const* const card1 = value.obj_id; AMresult* result = AMmapPutStr(doc1, card1, "title", "Rewrite everything in Clojure"); + test(result, AM_VALUE_VOID); AMfreeResult(result); result = AMmapPutBool(doc1, card1, "done", false); + test(result, AM_VALUE_VOID); AMfreeResult(result); AMresult* const card2_result = AMlistPutObject(doc1, cards, 0, true, AM_OBJ_TYPE_MAP); value = test(card2_result, AM_VALUE_OBJ_ID); AMobjId const* const card2 = value.obj_id; result = AMmapPutStr(doc1, card2, "title", "Rewrite everything in Haskell"); + test(result, AM_VALUE_VOID); AMfreeResult(result); result = AMmapPutBool(doc1, card2, "done", false); + test(result, AM_VALUE_VOID); AMfreeResult(result); AMfreeResult(card2_result); result = AMcommit(doc1, "Add card", NULL); + test(result, AM_VALUE_CHANGE_HASHES); AMfreeResult(result); AMdoc* doc2 = AMalloc(); result = AMmerge(doc2, doc1); + test(result, AM_VALUE_CHANGE_HASHES); AMfreeResult(result); AMfreeDoc(doc2); @@ -57,19 +90,27 @@ int main(int argc, char** argv) { AMbyteSpan binary = value.bytes; doc2 = AMalloc(); result = AMload(doc2, binary.src, binary.count); + test(result, AM_VALUE_UINT); AMfreeResult(result); AMfreeResult(save_result); result = AMmapPutBool(doc1, card1, "done", true); + test(result, AM_VALUE_VOID); + AMfreeResult(result); + result = AMcommit(doc1, "Mark card as done", NULL); + test(result, AM_VALUE_CHANGE_HASHES); AMfreeResult(result); - AMcommit(doc1, "Mark card as done", NULL); AMfreeResult(card1_result); result = AMlistDelete(doc2, cards, 0); + test(result, AM_VALUE_VOID); + AMfreeResult(result); + result = AMcommit(doc2, "Delete card", NULL); + test(result, AM_VALUE_CHANGE_HASHES); AMfreeResult(result); - AMcommit(doc2, "Delete card", NULL); result = AMmerge(doc1, doc2); + test(result, AM_VALUE_CHANGE_HASHES); AMfreeResult(result); AMfreeDoc(doc2); From 28a61f2dcd1af2e1d5131762acd9667b7b1a3938 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 5 May 2022 16:51:16 +0100 Subject: [PATCH 348/730] Add tests and fixes for double ended map range iterator --- automerge/src/automerge/tests.rs | 319 ++++++++++++++++++++++++++++ automerge/src/query.rs | 2 +- automerge/src/query/map_range.rs | 16 ++ automerge/src/query/map_range_at.rs | 15 ++ 4 files changed, 351 insertions(+), 1 deletion(-) diff --git a/automerge/src/automerge/tests.rs b/automerge/src/automerge/tests.rs index ff8a554f..430916d8 100644 --- a/automerge/src/automerge/tests.rs +++ b/automerge/src/automerge/tests.rs @@ -476,6 +476,325 @@ fn range_iter_map() { ); } +#[test] +fn map_range_back_and_forth_single() { + let mut doc = AutoCommit::new(); + let actor = doc.get_actor().clone(); + + doc.put(ROOT, "1", "a").unwrap(); + doc.put(ROOT, "2", "b").unwrap(); + doc.put(ROOT, "3", "c").unwrap(); + + let mut range_all = doc.map_range(ROOT, ..); + assert_eq!( + range_all.next(), + Some(("1", "a".into(), ExId::Id(1, actor.clone(), 0))) + ); + assert_eq!( + range_all.next_back(), + Some(("3", "c".into(), ExId::Id(3, actor.clone(), 0))) + ); + assert_eq!( + range_all.next_back(), + Some(("2", "b".into(), ExId::Id(2, actor.clone(), 0))) + ); + assert_eq!(range_all.next_back(), None); + assert_eq!(range_all.next(), None); + + let mut range_all = doc.map_range(ROOT, ..); + assert_eq!( + range_all.next(), + Some(("1", "a".into(), ExId::Id(1, actor.clone(), 0))) + ); + assert_eq!( + range_all.next_back(), + Some(("3", "c".into(), ExId::Id(3, actor.clone(), 0))) + ); + assert_eq!( + range_all.next(), + Some(("2", Value::str("b"), ExId::Id(2, actor.clone(), 0))) + ); + assert_eq!(range_all.next_back(), None); + assert_eq!(range_all.next(), None); + + let mut range_all = doc.map_range(ROOT, ..); + assert_eq!( + range_all.next(), + Some(("1", "a".into(), ExId::Id(1, actor.clone(), 0))) + ); + assert_eq!( + range_all.next(), + Some(("2", "b".into(), ExId::Id(2, actor.clone(), 0))) + ); + assert_eq!( + range_all.next(), + Some(("3", "c".into(), ExId::Id(3, actor.clone(), 0))) + ); + assert_eq!(range_all.next_back(), None); + assert_eq!(range_all.next(), None); + + let mut range_all = doc.map_range(ROOT, ..); + assert_eq!( + range_all.next_back(), + Some(("3", "c".into(), ExId::Id(3, actor.clone(), 0))) + ); + assert_eq!( + range_all.next_back(), + Some(("2", "b".into(), ExId::Id(2, actor.clone(), 0))) + ); + assert_eq!( + range_all.next_back(), + Some(("1", "a".into(), ExId::Id(1, actor, 0))) + ); + assert_eq!(range_all.next_back(), None); + assert_eq!(range_all.next(), None); +} + +#[test] +fn map_range_back_and_forth_double() { + let mut doc1 = AutoCommit::new(); + doc1.set_actor(ActorId::from([0])); + + doc1.put(ROOT, "1", "a").unwrap(); + doc1.put(ROOT, "2", "b").unwrap(); + doc1.put(ROOT, "3", "c").unwrap(); + + // actor 2 should win in all conflicts here + let mut doc2 = AutoCommit::new(); + doc1.set_actor(ActorId::from([1])); + let actor2 = doc2.get_actor().clone(); + doc2.put(ROOT, "1", "aa").unwrap(); + doc2.put(ROOT, "2", "bb").unwrap(); + doc2.put(ROOT, "3", "cc").unwrap(); + + doc1.merge(&mut doc2).unwrap(); + + let mut range_all = doc1.map_range(ROOT, ..); + assert_eq!( + range_all.next(), + Some(("1", "aa".into(), ExId::Id(1, actor2.clone(), 1))) + ); + assert_eq!( + range_all.next_back(), + Some(("3", "cc".into(), ExId::Id(3, actor2.clone(), 1))) + ); + assert_eq!( + range_all.next_back(), + Some(("2", "bb".into(), ExId::Id(2, actor2.clone(), 1))) + ); + assert_eq!(range_all.next_back(), None); + assert_eq!(range_all.next(), None); + + let mut range_all = doc1.map_range(ROOT, ..); + assert_eq!( + range_all.next(), + Some(("1", "aa".into(), ExId::Id(1, actor2.clone(), 1))) + ); + assert_eq!( + range_all.next_back(), + Some(("3", "cc".into(), ExId::Id(3, actor2.clone(), 1))) + ); + assert_eq!( + range_all.next(), + Some(("2", "bb".into(), ExId::Id(2, actor2.clone(), 1))) + ); + assert_eq!(range_all.next_back(), None); + assert_eq!(range_all.next(), None); + + let mut range_all = doc1.map_range(ROOT, ..); + assert_eq!( + range_all.next(), + Some(("1", "aa".into(), ExId::Id(1, actor2.clone(), 1))) + ); + assert_eq!( + range_all.next(), + Some(("2", "bb".into(), ExId::Id(2, actor2.clone(), 1))) + ); + assert_eq!( + range_all.next(), + Some(("3", "cc".into(), ExId::Id(3, actor2.clone(), 1))) + ); + assert_eq!(range_all.next_back(), None); + assert_eq!(range_all.next(), None); + + let mut range_all = doc1.map_range(ROOT, ..); + assert_eq!( + range_all.next_back(), + Some(("3", "cc".into(), ExId::Id(3, actor2.clone(), 1))) + ); + assert_eq!( + range_all.next_back(), + Some(("2", "bb".into(), ExId::Id(2, actor2.clone(), 1))) + ); + assert_eq!( + range_all.next_back(), + Some(("1", "aa".into(), ExId::Id(1, actor2, 1))) + ); + assert_eq!(range_all.next_back(), None); + assert_eq!(range_all.next(), None); +} + +#[test] +fn map_range_at_back_and_forth_single() { + let mut doc = AutoCommit::new(); + let actor = doc.get_actor().clone(); + + doc.put(ROOT, "1", "a").unwrap(); + doc.put(ROOT, "2", "b").unwrap(); + doc.put(ROOT, "3", "c").unwrap(); + + let heads = doc.get_heads(); + + let mut range_all = doc.map_range_at(ROOT, .., &heads); + assert_eq!( + range_all.next(), + Some(("1", "a".into(), ExId::Id(1, actor.clone(), 0))) + ); + assert_eq!( + range_all.next_back(), + Some(("3", "c".into(), ExId::Id(3, actor.clone(), 0))) + ); + assert_eq!( + range_all.next_back(), + Some(("2", "b".into(), ExId::Id(2, actor.clone(), 0))) + ); + assert_eq!(range_all.next_back(), None); + assert_eq!(range_all.next(), None); + + let mut range_all = doc.map_range_at(ROOT, .., &heads); + assert_eq!( + range_all.next(), + Some(("1", "a".into(), ExId::Id(1, actor.clone(), 0))) + ); + assert_eq!( + range_all.next_back(), + Some(("3", "c".into(), ExId::Id(3, actor.clone(), 0))) + ); + assert_eq!( + range_all.next(), + Some(("2", Value::str("b"), ExId::Id(2, actor.clone(), 0))) + ); + assert_eq!(range_all.next_back(), None); + assert_eq!(range_all.next(), None); + + let mut range_all = doc.map_range_at(ROOT, .., &heads); + assert_eq!( + range_all.next(), + Some(("1", "a".into(), ExId::Id(1, actor.clone(), 0))) + ); + assert_eq!( + range_all.next(), + Some(("2", "b".into(), ExId::Id(2, actor.clone(), 0))) + ); + assert_eq!( + range_all.next(), + Some(("3", "c".into(), ExId::Id(3, actor.clone(), 0))) + ); + assert_eq!(range_all.next_back(), None); + assert_eq!(range_all.next(), None); + + let mut range_all = doc.map_range_at(ROOT, .., &heads); + assert_eq!( + range_all.next_back(), + Some(("3", "c".into(), ExId::Id(3, actor.clone(), 0))) + ); + assert_eq!( + range_all.next_back(), + Some(("2", "b".into(), ExId::Id(2, actor.clone(), 0))) + ); + assert_eq!( + range_all.next_back(), + Some(("1", "a".into(), ExId::Id(1, actor, 0))) + ); + assert_eq!(range_all.next_back(), None); + assert_eq!(range_all.next(), None); +} + +#[test] +fn map_range_at_back_and_forth_double() { + let mut doc1 = AutoCommit::new(); + doc1.set_actor(ActorId::from([0])); + + doc1.put(ROOT, "1", "a").unwrap(); + doc1.put(ROOT, "2", "b").unwrap(); + doc1.put(ROOT, "3", "c").unwrap(); + + // actor 2 should win in all conflicts here + let mut doc2 = AutoCommit::new(); + doc1.set_actor(ActorId::from([1])); + let actor2 = doc2.get_actor().clone(); + doc2.put(ROOT, "1", "aa").unwrap(); + doc2.put(ROOT, "2", "bb").unwrap(); + doc2.put(ROOT, "3", "cc").unwrap(); + + doc1.merge(&mut doc2).unwrap(); + let heads = doc1.get_heads(); + + let mut range_all = doc1.map_range_at(ROOT, .., &heads); + assert_eq!( + range_all.next(), + Some(("1", "aa".into(), ExId::Id(1, actor2.clone(), 1))) + ); + assert_eq!( + range_all.next_back(), + Some(("3", "cc".into(), ExId::Id(3, actor2.clone(), 1))) + ); + assert_eq!( + range_all.next_back(), + Some(("2", "bb".into(), ExId::Id(2, actor2.clone(), 1))) + ); + assert_eq!(range_all.next_back(), None); + assert_eq!(range_all.next(), None); + + let mut range_all = doc1.map_range_at(ROOT, .., &heads); + assert_eq!( + range_all.next(), + Some(("1", "aa".into(), ExId::Id(1, actor2.clone(), 1))) + ); + assert_eq!( + range_all.next_back(), + Some(("3", "cc".into(), ExId::Id(3, actor2.clone(), 1))) + ); + assert_eq!( + range_all.next(), + Some(("2", "bb".into(), ExId::Id(2, actor2.clone(), 1))) + ); + assert_eq!(range_all.next_back(), None); + assert_eq!(range_all.next(), None); + + let mut range_all = doc1.map_range_at(ROOT, .., &heads); + assert_eq!( + range_all.next(), + Some(("1", "aa".into(), ExId::Id(1, actor2.clone(), 1))) + ); + assert_eq!( + range_all.next(), + Some(("2", "bb".into(), ExId::Id(2, actor2.clone(), 1))) + ); + assert_eq!( + range_all.next(), + Some(("3", "cc".into(), ExId::Id(3, actor2.clone(), 1))) + ); + assert_eq!(range_all.next_back(), None); + assert_eq!(range_all.next(), None); + + let mut range_all = doc1.map_range_at(ROOT, .., &heads); + assert_eq!( + range_all.next_back(), + Some(("3", "cc".into(), ExId::Id(3, actor2.clone(), 1))) + ); + assert_eq!( + range_all.next_back(), + Some(("2", "bb".into(), ExId::Id(2, actor2.clone(), 1))) + ); + assert_eq!( + range_all.next_back(), + Some(("1", "aa".into(), ExId::Id(1, actor2, 1))) + ); + assert_eq!(range_all.next_back(), None); + assert_eq!(range_all.next(), None); +} + #[test] fn insert_at_index() { let mut doc = AutoCommit::new(); diff --git a/automerge/src/query.rs b/automerge/src/query.rs index ab670438..e3d2f372 100644 --- a/automerge/src/query.rs +++ b/automerge/src/query.rs @@ -98,7 +98,7 @@ pub(crate) enum QueryResult { #[derive(Clone, Debug, PartialEq)] pub(crate) struct Index { - /// The map of visible elements to the number of operations targetting them. + /// The map of visible keys to the number of visible operations for that key. pub(crate) visible: HashMap, /// Set of opids found in this node and below. pub(crate) ops: HashSet, diff --git a/automerge/src/query/map_range.rs b/automerge/src/query/map_range.rs index 8120dc55..81334ca4 100644 --- a/automerge/src/query/map_range.rs +++ b/automerge/src/query/map_range.rs @@ -74,6 +74,7 @@ impl<'a, R: RangeBounds> DoubleEndedIterator for MapRange<'a, R> { for i in (self.index..self.index_back).rev() { let op = self.root_child.get(i)?; self.index_back -= 1; + if Some(op.key) != self.last_key_back && op.visible() { self.last_key_back = Some(op.key); let prop = match op.key { @@ -85,6 +86,21 @@ impl<'a, R: RangeBounds> DoubleEndedIterator for MapRange<'a, R> { } } } + + // we're now overlapping the index and index_back so try and take the result from the next query + if let Some((prop, a, b)) = self.next_result.take() { + let last_prop = match self.last_key_back { + None => None, + Some(Key::Map(u)) => Some(self.meta.props.get(u).as_str()), + Some(Key::Seq(_)) => None, + }; + + // we can only use this result if we haven't ended in the prop's state (to account for + // conflicts). + if Some(prop) != last_prop { + return Some((prop, a, b)); + } + } None } } diff --git a/automerge/src/query/map_range_at.rs b/automerge/src/query/map_range_at.rs index cbd52b29..84453955 100644 --- a/automerge/src/query/map_range_at.rs +++ b/automerge/src/query/map_range_at.rs @@ -99,6 +99,21 @@ impl<'a, R: RangeBounds> DoubleEndedIterator for MapRangeAt<'a, R> { } } } + + // we're now overlapping the index and index_back so try and take the result from the next query + if let Some((prop, a, b)) = self.next_result.take() { + let last_prop = match self.last_key_back { + None => None, + Some(Key::Map(u)) => Some(self.meta.props.get(u).as_str()), + Some(Key::Seq(_)) => None, + }; + + // we can only use this result if we haven't ended in the prop's state (to account for + // conflicts). + if Some(prop) != last_prop { + return Some((prop, a, b)); + } + } None } } From eb3155e49b7a9aeafc81d96ca5d153fd80a800c1 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Fri, 6 May 2022 04:50:02 -0500 Subject: [PATCH 349/730] Sorted `main()` to the top. Documented `test()`. --- automerge-c/examples/quickstart.c | 98 +++++++++++++++++-------------- 1 file changed, 54 insertions(+), 44 deletions(-) diff --git a/automerge-c/examples/quickstart.c b/automerge-c/examples/quickstart.c index 8e811853..c8e19ce2 100644 --- a/automerge-c/examples/quickstart.c +++ b/automerge-c/examples/quickstart.c @@ -3,50 +3,7 @@ #include -AMvalue test(AMresult* result, AMvalueVariant const value_tag) { - static char prelude[64]; - - if (result == NULL) { - fprintf(stderr, "NULL AMresult struct pointer."); - exit(-1); - } - AMstatus const status = AMresultStatus(result); - if (status != AM_STATUS_OK) { - switch (status) { - case AM_STATUS_ERROR: sprintf(prelude, "Error"); break; - case AM_STATUS_INVALID_RESULT: sprintf(prelude, "Invalid result"); break; - default: sprintf(prelude, "Unknown status code %d", status); - } - fprintf(stderr, "%s; %s.", prelude, AMerrorMessage(result)); - AMfreeResult(result); - exit(-2); - } - AMvalue const value = AMresultValue(result, 0); - if (value.tag != value_tag) { - char const* label = NULL; - switch (value.tag) { - case AM_VALUE_ACTOR_ID: label = "AM_VALUE_ACTOR_ID"; break; - case AM_VALUE_BOOLEAN: label = "AM_VALUE_BOOLEAN"; break; - case AM_VALUE_BYTES: label = "AM_VALUE_BYTES"; break; - case AM_VALUE_CHANGE_HASHES: label = "AM_VALUE_CHANGE_HASHES"; break; - case AM_VALUE_CHANGES: label = "AM_VALUE_CHANGES"; break; - case AM_VALUE_COUNTER: label = "AM_VALUE_COUNTER"; break; - case AM_VALUE_F64: label = "AM_VALUE_F64"; break; - case AM_VALUE_INT: label = "AM_VALUE_INT"; break; - case AM_VALUE_VOID: label = "AM_VALUE_VOID"; break; - case AM_VALUE_NULL: label = "AM_VALUE_NULL"; break; - case AM_VALUE_OBJ_ID: label = "AM_VALUE_OBJ_ID"; break; - case AM_VALUE_STR: label = "AM_VALUE_STR"; break; - case AM_VALUE_TIMESTAMP: label = "AM_VALUE_TIMESTAMP"; break; - case AM_VALUE_UINT: label = "AM_VALUE_UINT"; break; - default: label = "unknown"; - } - fprintf(stderr, "Unexpected %s variant (%d).", label, value.tag); - AMfreeResult(result); - exit(-3); - } - return value; -} +AMvalue test(AMresult*, AMvalueVariant const); /* * Based on https://automerge.github.io/docs/quickstart @@ -125,3 +82,56 @@ int main(int argc, char** argv) { AMfreeResult(cards_result); AMfreeDoc(doc1); } + +/// \brief Extracts an `AMvalue` struct with discriminant \p value_tag +/// from \p result or writes a message to `stderr`, frees \p result +/// and terminates the program. +/// +/// \param[in] result A pointer to an `AMresult` struct. +/// \param[in] value_tag An `AMvalue` struct discriminant. +/// \return An `AMvalue` struct. +/// \pre \p result must be a valid address. +AMvalue test(AMresult* result, AMvalueVariant const value_tag) { + static char prelude[64]; + + if (result == NULL) { + fprintf(stderr, "NULL `AMresult` struct pointer."); + exit(EXIT_FAILURE); + } + AMstatus const status = AMresultStatus(result); + if (status != AM_STATUS_OK) { + switch (status) { + case AM_STATUS_ERROR: sprintf(prelude, "Error"); break; + case AM_STATUS_INVALID_RESULT: sprintf(prelude, "Invalid result"); break; + default: sprintf(prelude, "Unknown `AMstatus` tag %d", status); + } + fprintf(stderr, "%s; %s.", prelude, AMerrorMessage(result)); + AMfreeResult(result); + exit(EXIT_FAILURE); + } + AMvalue const value = AMresultValue(result, 0); + if (value.tag != value_tag) { + char const* label = NULL; + switch (value.tag) { + case AM_VALUE_ACTOR_ID: label = "AM_VALUE_ACTOR_ID"; break; + case AM_VALUE_BOOLEAN: label = "AM_VALUE_BOOLEAN"; break; + case AM_VALUE_BYTES: label = "AM_VALUE_BYTES"; break; + case AM_VALUE_CHANGE_HASHES: label = "AM_VALUE_CHANGE_HASHES"; break; + case AM_VALUE_CHANGES: label = "AM_VALUE_CHANGES"; break; + case AM_VALUE_COUNTER: label = "AM_VALUE_COUNTER"; break; + case AM_VALUE_F64: label = "AM_VALUE_F64"; break; + case AM_VALUE_INT: label = "AM_VALUE_INT"; break; + case AM_VALUE_VOID: label = "AM_VALUE_VOID"; break; + case AM_VALUE_NULL: label = "AM_VALUE_NULL"; break; + case AM_VALUE_OBJ_ID: label = "AM_VALUE_OBJ_ID"; break; + case AM_VALUE_STR: label = "AM_VALUE_STR"; break; + case AM_VALUE_TIMESTAMP: label = "AM_VALUE_TIMESTAMP"; break; + case AM_VALUE_UINT: label = "AM_VALUE_UINT"; break; + default: label = ""; + } + fprintf(stderr, "Unexpected `AMvalueVariant` tag `%s` (%d).", label, value.tag); + AMfreeResult(result); + exit(EXIT_FAILURE); + } + return value; +} From bb3d75604ad21bb97ef10ce0756eecc8df3d0fa8 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Fri, 6 May 2022 04:51:44 -0500 Subject: [PATCH 350/730] Improved the documentation slightly. --- automerge-c/test/macro_utils.h | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/automerge-c/test/macro_utils.h b/automerge-c/test/macro_utils.h index 5a74c562..334557eb 100644 --- a/automerge-c/test/macro_utils.h +++ b/automerge-c/test/macro_utils.h @@ -5,15 +5,16 @@ #include "automerge.h" /** - * \brief Gets the `AMvalue` discriminant corresponding to a function name suffix. + * \brief Gets the `AMvalue` struct discriminant corresponding to a function + * name suffix. * * \param[in] suffix A string. - * \return An `AMvalue` variant discriminant enum tag. + * \return An `AMvalue` struct discriminant. */ AMvalueVariant AMvalue_discriminant(char const* suffix); /** - * \brief Gets the `AMobjType` tag corresponding to a object type label. + * \brief Gets the `AMobjType` enum tag corresponding to an object type label. * * \param[in] obj_type_label A string. * \return An `AMobjType` enum tag. From b56464c2e78da1cfa500b65ad1f55cd654addb9c Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Fri, 6 May 2022 04:59:47 -0500 Subject: [PATCH 351/730] Switched to C comment delimiting. --- automerge-c/examples/quickstart.c | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/automerge-c/examples/quickstart.c b/automerge-c/examples/quickstart.c index c8e19ce2..4abfd0b9 100644 --- a/automerge-c/examples/quickstart.c +++ b/automerge-c/examples/quickstart.c @@ -83,14 +83,16 @@ int main(int argc, char** argv) { AMfreeDoc(doc1); } -/// \brief Extracts an `AMvalue` struct with discriminant \p value_tag -/// from \p result or writes a message to `stderr`, frees \p result -/// and terminates the program. -/// -/// \param[in] result A pointer to an `AMresult` struct. -/// \param[in] value_tag An `AMvalue` struct discriminant. -/// \return An `AMvalue` struct. -/// \pre \p result must be a valid address. +/** + * \brief Extracts an `AMvalue` struct with discriminant \p value_tag + * from \p result or writes a message to `stderr`, frees \p result + * and terminates the program. + * +.* \param[in] result A pointer to an `AMresult` struct. + * \param[in] value_tag An `AMvalue` struct discriminant. + * \return An `AMvalue` struct. + * \pre \p result must be a valid address. + */ AMvalue test(AMresult* result, AMvalueVariant const value_tag) { static char prelude[64]; From fdd3880bd3c8b8b4b81742173b6c0c3cbe2662c5 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Sat, 7 May 2022 09:55:05 -0500 Subject: [PATCH 352/730] Renamed `AMalloc()` to `AMcreate()`. Renamed `AMload()` to `AMloadIncremental()`. Added the `AMload()` function. --- automerge-c/examples/quickstart.c | 27 +++++++++++++++++----- automerge-c/src/lib.rs | 38 +++++++++++++++++++++++++++---- automerge-c/test/group_state.c | 2 +- 3 files changed, 56 insertions(+), 11 deletions(-) diff --git a/automerge-c/examples/quickstart.c b/automerge-c/examples/quickstart.c index 4abfd0b9..43915c7a 100644 --- a/automerge-c/examples/quickstart.c +++ b/automerge-c/examples/quickstart.c @@ -9,7 +9,11 @@ AMvalue test(AMresult*, AMvalueVariant const); * Based on https://automerge.github.io/docs/quickstart */ int main(int argc, char** argv) { - AMdoc* const doc1 = AMalloc(); + AMdoc* const doc1 = AMcreate(); + if (doc1 == NULL) { + fprintf(stderr, "`AMcreate()` failure."); + exit(EXIT_FAILURE); + } AMresult* const cards_result = AMmapPutObject(doc1, AM_ROOT, "cards", AM_OBJ_TYPE_LIST); AMvalue value = test(cards_result, AM_VALUE_OBJ_ID); AMobjId const* const cards = value.obj_id; @@ -36,7 +40,14 @@ int main(int argc, char** argv) { test(result, AM_VALUE_CHANGE_HASHES); AMfreeResult(result); - AMdoc* doc2 = AMalloc(); + AMdoc* doc2 = AMcreate(); + if (doc2 == NULL) { + fprintf(stderr, "`AMcreate()` failure."); + AMfreeResult(card1_result); + AMfreeResult(cards_result); + AMfreeDoc(doc1); + exit(EXIT_FAILURE); + } result = AMmerge(doc2, doc1); test(result, AM_VALUE_CHANGE_HASHES); AMfreeResult(result); @@ -45,11 +56,15 @@ int main(int argc, char** argv) { AMresult* const save_result = AMsave(doc1); value = test(save_result, AM_VALUE_BYTES); AMbyteSpan binary = value.bytes; - doc2 = AMalloc(); - result = AMload(doc2, binary.src, binary.count); - test(result, AM_VALUE_UINT); - AMfreeResult(result); + doc2 = AMload(binary.src, binary.count); AMfreeResult(save_result); + if (doc2 == NULL) { + fprintf(stderr, "`AMload()` failure."); + AMfreeResult(card1_result); + AMfreeResult(cards_result); + AMfreeDoc(doc1); + exit(EXIT_FAILURE); + } result = AMmapPutBool(doc1, card1, "done", true); test(result, AM_VALUE_VOID); diff --git a/automerge-c/src/lib.rs b/automerge-c/src/lib.rs index e441fac6..7bdcae23 100644 --- a/automerge-c/src/lib.rs +++ b/automerge-c/src/lib.rs @@ -88,7 +88,7 @@ fn to_result>(r: R) -> *mut AMresult { /// \warning To avoid a memory leak, the returned pointer must be deallocated /// with `AMfreeDoc()`. #[no_mangle] -pub extern "C" fn AMalloc() -> *mut AMdoc { +pub extern "C" fn AMcreate() -> *mut AMdoc { AMdoc::new(am::AutoCommit::new()).into() } @@ -148,7 +148,7 @@ pub unsafe extern "C" fn AMdup(doc: *mut AMdoc) -> *mut AMdoc { /// \memberof AMdoc /// \brief Deallocates the storage for an `AMdoc` struct previously -/// allocated by `AMalloc()` or `AMdup()`. +/// allocated by `AMcreate()`, `AMdup()` or `AMload()`. /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \pre \p doc must be a valid address. @@ -165,7 +165,33 @@ pub unsafe extern "C" fn AMfreeDoc(doc: *mut AMdoc) { } /// \memberof AMdoc -/// \brief Loads the compact form of an incremental save of an `AMdoc` struct +/// \brief Allocates storage for an `AMdoc` struct and initializes it with the +/// compact form of an incremental save pointed to by \p src. +/// +/// \param[in] src A pointer to an array of bytes. +/// \param[in] count The number of bytes in \p src to load. +/// \return A pointer to an `AMdoc` struct. +/// \pre \p src must be a valid address. +/// \pre `0 <=` \p count `<=` length of \p src. +/// \warning To avoid a memory leak, the returned pointer must be deallocated +/// with `AMfreeDoc()`. +/// \internal +/// +/// # Safety +/// src must be a byte array of length `>= count` +#[no_mangle] +pub unsafe extern "C" fn AMload(src: *const u8, count: usize) -> *mut AMdoc { + let mut data = Vec::new(); + data.extend_from_slice(std::slice::from_raw_parts(src, count)); + if let Ok(auto_commit) = am::AutoCommit::load(&data) { + AMdoc::new(auto_commit).into() + } else { + std::ptr::null_mut::() + } +} + +/// \memberof AMdoc +/// \brief Loads the compact form of an incremental save pointed to by \p src /// into \p doc. /// /// \param[in] doc A pointer to an `AMdoc` struct. @@ -184,7 +210,11 @@ pub unsafe extern "C" fn AMfreeDoc(doc: *mut AMdoc) { /// doc must be a pointer to a valid AMdoc /// src must be a byte array of length `>= count` #[no_mangle] -pub unsafe extern "C" fn AMload(doc: *mut AMdoc, src: *const u8, count: usize) -> *mut AMresult { +pub unsafe extern "C" fn AMloadIncremental( + doc: *mut AMdoc, + src: *const u8, + count: usize, +) -> *mut AMresult { let doc = to_doc!(doc); let mut data = Vec::new(); data.extend_from_slice(std::slice::from_raw_parts(src, count)); diff --git a/automerge-c/test/group_state.c b/automerge-c/test/group_state.c index d59b6b7f..66676ef0 100644 --- a/automerge-c/test/group_state.c +++ b/automerge-c/test/group_state.c @@ -5,7 +5,7 @@ int group_setup(void** state) { GroupState* group_state = calloc(1, sizeof(GroupState)); - group_state->doc = AMalloc(); + group_state->doc = AMcreate(); *state = group_state; return 0; } From 004d1a0cf2fbc24218123f66ee7e3ed82bf2a5b1 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Mon, 9 May 2022 22:53:39 +0100 Subject: [PATCH 353/730] Update CI toolchain to 1.60 --- .github/workflows/ci.yaml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 2ddae3e0..3039687d 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -14,7 +14,7 @@ jobs: - uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: 1.57.0 + toolchain: 1.60.0 default: true components: rustfmt - uses: Swatinem/rust-cache@v1 @@ -28,7 +28,7 @@ jobs: - uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: 1.57.0 + toolchain: 1.60.0 default: true components: clippy - uses: Swatinem/rust-cache@v1 @@ -42,7 +42,7 @@ jobs: - uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: 1.57.0 + toolchain: 1.60.0 default: true - uses: Swatinem/rust-cache@v1 - run: ./scripts/ci/docs @@ -87,7 +87,7 @@ jobs: - uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: 1.57.0 + toolchain: 1.60.0 default: true - uses: Swatinem/rust-cache@v1 - name: Install CMocka @@ -105,7 +105,7 @@ jobs: strategy: matrix: toolchain: - - 1.57.0 + - 1.60.0 - nightly continue-on-error: ${{ matrix.toolchain == 'nightly' }} steps: @@ -126,7 +126,7 @@ jobs: - uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: 1.57.0 + toolchain: 1.60.0 default: true - uses: Swatinem/rust-cache@v1 - run: ./scripts/ci/build-test @@ -139,7 +139,7 @@ jobs: - uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: 1.57.0 + toolchain: 1.60.0 default: true - uses: Swatinem/rust-cache@v1 - run: ./scripts/ci/build-test From 4e431c00a182a5cdeee0737d9b14a93d9a902b7e Mon Sep 17 00:00:00 2001 From: Alex Good Date: Sat, 7 May 2022 21:39:17 +0100 Subject: [PATCH 354/730] Implement OpTreeIter::nth correctly The previous implementation of nth was incorrect, it returned the nth element of the optree but it did not modify the internal state of the iterator such that future calls to `next()` were after the nth element. This commit fixes that. Signed-off-by: Alex Good --- automerge/src/op_tree/iter.rs | 314 ++++++++++++++++++++++++++++------ 1 file changed, 261 insertions(+), 53 deletions(-) diff --git a/automerge/src/op_tree/iter.rs b/automerge/src/op_tree/iter.rs index 48406444..8a24a0a6 100644 --- a/automerge/src/op_tree/iter.rs +++ b/automerge/src/op_tree/iter.rs @@ -1,64 +1,108 @@ +use std::cmp::Ordering; + use crate::types::Op; use super::{OpTreeInternal, OpTreeNode}; #[derive(Clone)] -pub(crate) enum OpTreeIter<'a> { - Empty, - NonEmpty { - // A stack of (OpTreeNode, index) where `index` is the index in the elements of the optree node - // at which we descended into a child - ancestors: Vec<(&'a OpTreeNode, usize)>, - current: &'a OpTreeNode, - index: usize, - tree: &'a OpTreeInternal, - }, -} +pub(crate) struct OpTreeIter<'a>(Inner<'a>); impl<'a> OpTreeIter<'a> { pub(crate) fn new(tree: &'a OpTreeInternal) -> OpTreeIter<'a> { - tree.root_node - .as_ref() - .map(|root| OpTreeIter::NonEmpty { - // This is a guess at the average depth of an OpTree - ancestors: Vec::with_capacity(6), - current: root, - index: 0, - tree, - }) - .unwrap_or(OpTreeIter::Empty) + Self( + tree.root_node + .as_ref() + .map(|root| Inner::NonEmpty { + // This is a guess at the average depth of an OpTree + ancestors: Vec::with_capacity(6), + current: NodeIter { + node: root, + index: 0, + }, + cumulative_index: 0, + root_node: root, + }) + .unwrap_or(Inner::Empty), + ) } } impl<'a> Iterator for OpTreeIter<'a> { type Item = &'a Op; + fn next(&mut self) -> Option { + self.0.next() + } + + fn nth(&mut self, n: usize) -> Option { + self.0.nth(n) + } +} + +#[derive(Clone)] +enum Inner<'a> { + Empty, + NonEmpty { + // A stack of nodes in the optree which we have descended in to to get to the current + // element. + ancestors: Vec>, + current: NodeIter<'a>, + // How far through the whole optree we are + cumulative_index: usize, + root_node: &'a OpTreeNode, + }, +} + +/// A node in the op tree which we are iterating over +#[derive(Clone)] +struct NodeIter<'a> { + /// The node itself + node: &'a OpTreeNode, + /// The index of the next element we will pull from the node. This means something different + /// depending on whether the node is a leaf node or not. If the node is a leaf node then this + /// index is the index in `node.elements` which will be returned on the next call to `next()`. + /// If the node is not an internal node then this index is the index of `children` which we are + /// currently iterating as well as being the index of the next element of `elements` which we + /// will return once we have finished iterating over the child node. + index: usize, +} + +impl<'a> Iterator for Inner<'a> { + type Item = &'a Op; + fn next(&mut self) -> Option { match self { - OpTreeIter::Empty => None, - OpTreeIter::NonEmpty { + Inner::Empty => None, + Inner::NonEmpty { ancestors, current, - index, + cumulative_index, .. } => { - if current.is_leaf() { + if current.node.is_leaf() { // If we're in a leaf node and we haven't exhausted it yet we just return the elements // of the leaf node - if *index < current.len() { - let result = ¤t.elements[*index]; - *index += 1; + if current.index < current.node.len() { + let result = ¤t.node.elements[current.index]; + current.index += 1; + *cumulative_index += 1; Some(result) } else { // We've exhausted the leaf node, we must find the nearest non-exhausted parent (lol) - let (parent, parent_index) = loop { - if let Some((parent, parent_index)) = ancestors.pop() { + let node_iter = loop { + if let Some( + node_iter @ NodeIter { + node: parent, + index: parent_index, + }, + ) = ancestors.pop() + { // We've exhausted this parent if parent_index >= parent.elements.len() { continue; } else { // This parent still has elements to process, let's use it! - break (parent, parent_index); + break node_iter; } } else { // No parents left, we're done @@ -68,23 +112,27 @@ impl<'a> Iterator for OpTreeIter<'a> { // if we've finished the elements in a leaf node and there's a parent node then we // return the element from the parent node which is one after the index at which we // descended into the child - *index = parent_index + 1; - *current = parent; - let result = ¤t.elements[parent_index]; + *current = node_iter; + let result = ¤t.node.elements[current.index]; + current.index += 1; + *cumulative_index += 1; Some(result) } } else { // If we're in a non-leaf node then the last iteration returned an element from the // current nodes `elements`, so we must now descend into a leaf child - ancestors.push((current, *index)); + ancestors.push(current.clone()); loop { - let child = ¤t.children[*index]; - *index = 0; + let child = ¤t.node.children[current.index]; + current.index = 0; if !child.is_leaf() { - ancestors.push((child, 0)); - *current = child + ancestors.push(NodeIter { + node: child, + index: 0, + }); + current.node = child } else { - *current = child; + current.node = child; break; } } @@ -97,7 +145,61 @@ impl<'a> Iterator for OpTreeIter<'a> { fn nth(&mut self, n: usize) -> Option { match self { Self::Empty => None, - Self::NonEmpty { tree, .. } => tree.get(n), + Self::NonEmpty { + root_node, + cumulative_index, + current, + ancestors, + .. + } => { + // Make sure that we don't rewind when calling nth more than once + if n < *cumulative_index { + None + } else if n >= root_node.len() { + *cumulative_index = root_node.len() - 1; + None + } else { + // rather than trying to go back up through the ancestors to find the right + // node we just start at the root. + *current = NodeIter { + node: root_node, + index: n, + }; + *cumulative_index = 0; + ancestors.clear(); + while !current.node.is_leaf() { + for (child_index, child) in current.node.children.iter().enumerate() { + match (*cumulative_index + child.len()).cmp(&n) { + Ordering::Less => { + *cumulative_index += child.len() + 1; + current.index = child_index + 1; + } + Ordering::Equal => { + *cumulative_index += child.len() + 1; + current.index = child_index + 1; + return Some(¤t.node.elements[child_index]); + } + Ordering::Greater => { + current.index = child_index; + let old = std::mem::replace( + current, + NodeIter { + node: child, + index: 0, + }, + ); + ancestors.push(old); + break; + } + } + } + } + // we're in a leaf node and we kept track of the cumulative index as we went, + let index_in_this_node = n.saturating_sub(*cumulative_index); + current.index = index_in_this_node + 1; + Some(¤t.node.elements[index_in_this_node]) + } + } } } } @@ -108,12 +210,51 @@ mod tests { use crate::types::{Key, Op, OpId, OpType, ScalarValue}; use proptest::prelude::*; - #[derive(Debug, Clone)] + #[derive(Clone)] enum Action { Insert(usize, Op), Delete(usize), } + impl std::fmt::Debug for Action { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Insert(index, ..) => write!(f, "Insert({})", index), + Self::Delete(index) => write!(f, "Delete({})", index), + } + } + } + + // A struct which impls Debug by only printing the counters of the IDs of the ops it wraps. + // This is useful because the only difference between the ops that we generate is the counter + // of their IDs. Wrapping a Vec in DebugOps will result in output from assert! etc. which + // only shows the counters. For example, the output of a failing assert_eq! like this + // + // assert_eq!(DebugOps(&ops1), DebugOps(&ops2)) + // + // Might look like this + // + // left: `[0,1,2,3] + // right: `[0,1,2,3,4] + // + // i.e. all the other details of the ops are elided + #[derive(PartialEq)] + struct DebugOps<'a>(&'a [Op]); + + impl<'a> std::fmt::Debug for DebugOps<'a> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "[")?; + for (index, op) in self.0.iter().enumerate() { + if index < self.0.len() - 1 { + write!(f, "{},", op.id.counter())?; + } else { + write!(f, "{}]", op.id.counter())? + } + } + Ok(()) + } + } + fn op(counter: u64) -> Op { Op { action: OpType::Put(ScalarValue::Uint(counter)), @@ -130,12 +271,21 @@ mod tests { /// apply it to the model and record the action we took. In the property test we replay the /// same actions against an `OpTree` and check that the iterator returns the same result as the /// `model`. - #[derive(Debug, Clone)] + #[derive(Clone)] struct Model { actions: Vec, model: Vec, } + impl std::fmt::Debug for Model { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("Model") + .field("actions", &self.actions) + .field("model", &DebugOps(&self.model)) + .finish() + } + } + impl Model { fn insert(&self, index: usize, next_op_counter: u64) -> Self { let mut actions = self.actions.clone(); @@ -185,7 +335,7 @@ mod tests { } } - fn scenario() -> impl Strategy { + fn model() -> impl Strategy { (0_u64..150).prop_flat_map(|num_steps| { let mut strat = Just(( 0, @@ -212,19 +362,77 @@ mod tests { }) } - proptest! { - #[test] - fn optree_iter_proptest(Model{actions, model} in scenario()) { - let mut optree = OpTreeInternal::new(); - for action in actions { - match action { - Action::Insert(index, op) => optree.insert(index, op), - Action::Delete(index) => { optree.remove(index); }, + fn make_optree(actions: &[Action]) -> super::OpTreeInternal { + let mut optree = OpTreeInternal::new(); + for action in actions { + match action { + Action::Insert(index, op) => optree.insert(*index, op.clone()), + Action::Delete(index) => { + optree.remove(*index); } } + } + optree + } + + /// A model for calls to `nth`. `NthModel::n` is guarnateed to be in `(0..model.len())` + #[derive(Clone)] + struct NthModel { + model: Vec, + actions: Vec, + n: usize, + } + + impl std::fmt::Debug for NthModel { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("Model") + .field("actions", &self.actions) + .field("model", &DebugOps(&self.model)) + .field("n", &self.n) + .finish() + } + } + + fn nth_model() -> impl Strategy { + model().prop_flat_map(|model| { + if model.model.is_empty() { + Just(NthModel { + model: model.model, + actions: model.actions, + n: 0, + }) + .boxed() + } else { + (0..model.model.len(), Just(model)) + .prop_map(|(index, model)| NthModel { + model: model.model, + actions: model.actions, + n: index, + }) + .boxed() + } + }) + } + + proptest! { + #[test] + fn optree_iter_proptest(model in model()) { + let optree = make_optree(&model.actions); let iter = super::OpTreeIter::new(&optree); let iterated = iter.cloned().collect::>(); - assert_eq!(model, iterated) + assert_eq!(DebugOps(&model.model), DebugOps(&iterated)) + } + + #[test] + fn optree_iter_nth(model in nth_model()) { + let optree = make_optree(&model.actions); + let mut iter = super::OpTreeIter::new(&optree); + let mut model_iter = model.model.iter(); + assert_eq!(model_iter.nth(model.n), iter.nth(model.n)); + + let tail = iter.cloned().collect::>(); + let expected_tail = model_iter.cloned().collect::>(); + assert_eq!(DebugOps(tail.as_slice()), DebugOps(expected_tail.as_slice())); } } } From 8f71ac30a486914301af08279c353db1f99bc783 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Wed, 11 May 2022 20:25:17 +0100 Subject: [PATCH 355/730] Add index info to op_tree panic message --- automerge/src/op_tree.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automerge/src/op_tree.rs b/automerge/src/op_tree.rs index ea77c311..4df22939 100644 --- a/automerge/src/op_tree.rs +++ b/automerge/src/op_tree.rs @@ -333,7 +333,7 @@ impl OpTreeNode { cumulative_len += child.len() + 1; } } - panic!("index not found in node") + panic!("index {} not found in node with len {}", index, self.len()) } fn insert_into_non_full_node(&mut self, index: usize, element: Op) { From f373deba6b99186b249705afa22db3d950700184 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Wed, 11 May 2022 21:15:50 +0100 Subject: [PATCH 356/730] Add length assertion --- automerge/src/op_tree.rs | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/automerge/src/op_tree.rs b/automerge/src/op_tree.rs index 4df22939..c338c145 100644 --- a/automerge/src/op_tree.rs +++ b/automerge/src/op_tree.rs @@ -142,6 +142,13 @@ impl OpTreeInternal { /// /// Panics if `index > len`. pub(crate) fn insert(&mut self, index: usize, element: Op) { + assert!( + index <= self.len(), + "tried to insert at {} but len is {}", + index, + self.len() + ); + let old_len = self.len(); if let Some(root) = self.root_node.as_mut() { #[cfg(debug_assertions)] From aa5a03a0c41404a57487590b615509b0c7d86403 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Sun, 15 May 2022 11:53:04 -0400 Subject: [PATCH 357/730] webpack example config --- examples/webpack/.gitignore | 4 ++++ examples/webpack/dist/index.html | 10 +++++++++ examples/webpack/package.json | 18 +++++++++++++++ examples/webpack/src/index.js | 20 +++++++++++++++++ examples/webpack/webpack.config.js | 35 ++++++++++++++++++++++++++++++ 5 files changed, 87 insertions(+) create mode 100644 examples/webpack/.gitignore create mode 100644 examples/webpack/dist/index.html create mode 100644 examples/webpack/package.json create mode 100644 examples/webpack/src/index.js create mode 100644 examples/webpack/webpack.config.js diff --git a/examples/webpack/.gitignore b/examples/webpack/.gitignore new file mode 100644 index 00000000..92cad834 --- /dev/null +++ b/examples/webpack/.gitignore @@ -0,0 +1,4 @@ +yarn.lock +node_modules +dist/*.wasm +dist/*.js diff --git a/examples/webpack/dist/index.html b/examples/webpack/dist/index.html new file mode 100644 index 00000000..7727bff4 --- /dev/null +++ b/examples/webpack/dist/index.html @@ -0,0 +1,10 @@ + + + + + Simple Webpack for automerge-wasm + + + + + diff --git a/examples/webpack/package.json b/examples/webpack/package.json new file mode 100644 index 00000000..b0274d0b --- /dev/null +++ b/examples/webpack/package.json @@ -0,0 +1,18 @@ +{ + "name": "webpack-automerge-example", + "version": "0.1.0", + "description": "", + "private": true, + "scripts": { + "build": "webpack" + }, + "author": "", + "dependencies": { + "automerge-wasm": "^0.1.2" + }, + "devDependencies": { + "webpack": "^5.72.1", + "webpack-cli": "^4.9.2", + "webpack-node-externals": "^3.0.0" + } +} diff --git a/examples/webpack/src/index.js b/examples/webpack/src/index.js new file mode 100644 index 00000000..8394af50 --- /dev/null +++ b/examples/webpack/src/index.js @@ -0,0 +1,20 @@ +import init, { create } from "automerge-wasm" + +// hello world code that will run correctly on web or node + +init().then(_ => { + const doc = create() + doc.put("/", "hello", "world") + const result = doc.materialize("/") + + if (typeof document !== 'undefined') { + // browser + const element = document.createElement('div'); + element.innerHTML = JSON.stringify(result) + document.body.appendChild(element); + } else { + // server + console.log("node:", result) + } +}) + diff --git a/examples/webpack/webpack.config.js b/examples/webpack/webpack.config.js new file mode 100644 index 00000000..3d4bc98e --- /dev/null +++ b/examples/webpack/webpack.config.js @@ -0,0 +1,35 @@ +const path = require('path'); +const nodeExternals = require('webpack-node-externals'); + +// the most basic webpack config for node or web targets for automerge-wasm + +const serverConfig = { + // basic setup for bundling a node package + target: 'node', + externals: [nodeExternals()], + externalsPresets: { node: true }, + + entry: './src/index.js', + output: { + filename: 'server.js', + path: path.resolve(__dirname, 'dist'), + }, + mode: "development", // or production +}; + +const clientConfig = { + target: 'web', + entry: './src/index.js', + output: { + filename: 'browser.js', + path: path.resolve(__dirname, 'dist'), + }, + mode: "development", // or production + performance: { // we dont want the wasm blob to generate warnings + hints: false, + maxEntrypointSize: 512000, + maxAssetSize: 512000 + } +}; + +module.exports = [serverConfig, clientConfig]; From d01e7ceb0ee1a1f85eb300b716f7c3700848d891 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Sun, 15 May 2022 11:53:55 -0400 Subject: [PATCH 358/730] add webpack example and move into wasm folder --- .../examples}/cra/.gitignore | 0 {examples => automerge-wasm/examples}/cra/README.md | 0 .../examples}/cra/package.json | 0 .../examples}/cra/public/favicon.ico | Bin .../examples}/cra/public/index.html | 0 .../examples}/cra/public/logo192.png | Bin .../examples}/cra/public/logo512.png | Bin .../examples}/cra/public/manifest.json | 0 .../examples}/cra/public/robots.txt | 0 .../examples}/cra/src/App.css | 0 .../examples}/cra/src/App.test.tsx | 0 .../examples}/cra/src/App.tsx | 0 .../examples}/cra/src/index.css | 0 .../examples}/cra/src/index.tsx | 0 .../examples}/cra/src/logo.svg | 0 .../examples}/cra/src/react-app-env.d.ts | 0 .../examples}/cra/src/reportWebVitals.ts | 0 .../examples}/cra/src/setupTests.ts | 0 .../examples}/cra/tsconfig.json | 0 .../examples}/webpack/.gitignore | 0 .../examples}/webpack/dist/index.html | 0 .../examples}/webpack/package.json | 0 .../examples}/webpack/src/index.js | 0 .../examples}/webpack/webpack.config.js | 0 24 files changed, 0 insertions(+), 0 deletions(-) rename {examples => automerge-wasm/examples}/cra/.gitignore (100%) rename {examples => automerge-wasm/examples}/cra/README.md (100%) rename {examples => automerge-wasm/examples}/cra/package.json (100%) rename {examples => automerge-wasm/examples}/cra/public/favicon.ico (100%) rename {examples => automerge-wasm/examples}/cra/public/index.html (100%) rename {examples => automerge-wasm/examples}/cra/public/logo192.png (100%) rename {examples => automerge-wasm/examples}/cra/public/logo512.png (100%) rename {examples => automerge-wasm/examples}/cra/public/manifest.json (100%) rename {examples => automerge-wasm/examples}/cra/public/robots.txt (100%) rename {examples => automerge-wasm/examples}/cra/src/App.css (100%) rename {examples => automerge-wasm/examples}/cra/src/App.test.tsx (100%) rename {examples => automerge-wasm/examples}/cra/src/App.tsx (100%) rename {examples => automerge-wasm/examples}/cra/src/index.css (100%) rename {examples => automerge-wasm/examples}/cra/src/index.tsx (100%) rename {examples => automerge-wasm/examples}/cra/src/logo.svg (100%) rename {examples => automerge-wasm/examples}/cra/src/react-app-env.d.ts (100%) rename {examples => automerge-wasm/examples}/cra/src/reportWebVitals.ts (100%) rename {examples => automerge-wasm/examples}/cra/src/setupTests.ts (100%) rename {examples => automerge-wasm/examples}/cra/tsconfig.json (100%) rename {examples => automerge-wasm/examples}/webpack/.gitignore (100%) rename {examples => automerge-wasm/examples}/webpack/dist/index.html (100%) rename {examples => automerge-wasm/examples}/webpack/package.json (100%) rename {examples => automerge-wasm/examples}/webpack/src/index.js (100%) rename {examples => automerge-wasm/examples}/webpack/webpack.config.js (100%) diff --git a/examples/cra/.gitignore b/automerge-wasm/examples/cra/.gitignore similarity index 100% rename from examples/cra/.gitignore rename to automerge-wasm/examples/cra/.gitignore diff --git a/examples/cra/README.md b/automerge-wasm/examples/cra/README.md similarity index 100% rename from examples/cra/README.md rename to automerge-wasm/examples/cra/README.md diff --git a/examples/cra/package.json b/automerge-wasm/examples/cra/package.json similarity index 100% rename from examples/cra/package.json rename to automerge-wasm/examples/cra/package.json diff --git a/examples/cra/public/favicon.ico b/automerge-wasm/examples/cra/public/favicon.ico similarity index 100% rename from examples/cra/public/favicon.ico rename to automerge-wasm/examples/cra/public/favicon.ico diff --git a/examples/cra/public/index.html b/automerge-wasm/examples/cra/public/index.html similarity index 100% rename from examples/cra/public/index.html rename to automerge-wasm/examples/cra/public/index.html diff --git a/examples/cra/public/logo192.png b/automerge-wasm/examples/cra/public/logo192.png similarity index 100% rename from examples/cra/public/logo192.png rename to automerge-wasm/examples/cra/public/logo192.png diff --git a/examples/cra/public/logo512.png b/automerge-wasm/examples/cra/public/logo512.png similarity index 100% rename from examples/cra/public/logo512.png rename to automerge-wasm/examples/cra/public/logo512.png diff --git a/examples/cra/public/manifest.json b/automerge-wasm/examples/cra/public/manifest.json similarity index 100% rename from examples/cra/public/manifest.json rename to automerge-wasm/examples/cra/public/manifest.json diff --git a/examples/cra/public/robots.txt b/automerge-wasm/examples/cra/public/robots.txt similarity index 100% rename from examples/cra/public/robots.txt rename to automerge-wasm/examples/cra/public/robots.txt diff --git a/examples/cra/src/App.css b/automerge-wasm/examples/cra/src/App.css similarity index 100% rename from examples/cra/src/App.css rename to automerge-wasm/examples/cra/src/App.css diff --git a/examples/cra/src/App.test.tsx b/automerge-wasm/examples/cra/src/App.test.tsx similarity index 100% rename from examples/cra/src/App.test.tsx rename to automerge-wasm/examples/cra/src/App.test.tsx diff --git a/examples/cra/src/App.tsx b/automerge-wasm/examples/cra/src/App.tsx similarity index 100% rename from examples/cra/src/App.tsx rename to automerge-wasm/examples/cra/src/App.tsx diff --git a/examples/cra/src/index.css b/automerge-wasm/examples/cra/src/index.css similarity index 100% rename from examples/cra/src/index.css rename to automerge-wasm/examples/cra/src/index.css diff --git a/examples/cra/src/index.tsx b/automerge-wasm/examples/cra/src/index.tsx similarity index 100% rename from examples/cra/src/index.tsx rename to automerge-wasm/examples/cra/src/index.tsx diff --git a/examples/cra/src/logo.svg b/automerge-wasm/examples/cra/src/logo.svg similarity index 100% rename from examples/cra/src/logo.svg rename to automerge-wasm/examples/cra/src/logo.svg diff --git a/examples/cra/src/react-app-env.d.ts b/automerge-wasm/examples/cra/src/react-app-env.d.ts similarity index 100% rename from examples/cra/src/react-app-env.d.ts rename to automerge-wasm/examples/cra/src/react-app-env.d.ts diff --git a/examples/cra/src/reportWebVitals.ts b/automerge-wasm/examples/cra/src/reportWebVitals.ts similarity index 100% rename from examples/cra/src/reportWebVitals.ts rename to automerge-wasm/examples/cra/src/reportWebVitals.ts diff --git a/examples/cra/src/setupTests.ts b/automerge-wasm/examples/cra/src/setupTests.ts similarity index 100% rename from examples/cra/src/setupTests.ts rename to automerge-wasm/examples/cra/src/setupTests.ts diff --git a/examples/cra/tsconfig.json b/automerge-wasm/examples/cra/tsconfig.json similarity index 100% rename from examples/cra/tsconfig.json rename to automerge-wasm/examples/cra/tsconfig.json diff --git a/examples/webpack/.gitignore b/automerge-wasm/examples/webpack/.gitignore similarity index 100% rename from examples/webpack/.gitignore rename to automerge-wasm/examples/webpack/.gitignore diff --git a/examples/webpack/dist/index.html b/automerge-wasm/examples/webpack/dist/index.html similarity index 100% rename from examples/webpack/dist/index.html rename to automerge-wasm/examples/webpack/dist/index.html diff --git a/examples/webpack/package.json b/automerge-wasm/examples/webpack/package.json similarity index 100% rename from examples/webpack/package.json rename to automerge-wasm/examples/webpack/package.json diff --git a/examples/webpack/src/index.js b/automerge-wasm/examples/webpack/src/index.js similarity index 100% rename from examples/webpack/src/index.js rename to automerge-wasm/examples/webpack/src/index.js diff --git a/examples/webpack/webpack.config.js b/automerge-wasm/examples/webpack/webpack.config.js similarity index 100% rename from examples/webpack/webpack.config.js rename to automerge-wasm/examples/webpack/webpack.config.js From 7acb9ed0e2a6a01ebb7f667e830f0d7fe320f632 Mon Sep 17 00:00:00 2001 From: Jerome Gravel-Niquet Date: Mon, 16 May 2022 10:56:10 -0400 Subject: [PATCH 359/730] don't remove last actor when there are none --- automerge/src/transaction/inner.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automerge/src/transaction/inner.rs b/automerge/src/transaction/inner.rs index ebfb20ce..6969e317 100644 --- a/automerge/src/transaction/inner.rs +++ b/automerge/src/transaction/inner.rs @@ -87,7 +87,7 @@ impl TransactionInner { } // remove the actor from the cache so that it doesn't end up in the saved document - if doc.states.get(&self.actor).is_none() { + if doc.states.get(&self.actor).is_none() && doc.ops.m.actors.len() > 0 { let actor = doc.ops.m.actors.remove_last(); doc.actor = Actor::Unused(actor); } From 81dd1a56ebeef28f583b18c14c7190434d89c6f0 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Mon, 16 May 2022 11:33:08 -0400 Subject: [PATCH 360/730] add start script - split up outputs --- automerge-wasm/examples/webpack/.gitignore | 5 +++-- automerge-wasm/examples/webpack/package.json | 7 +++++-- .../examples/webpack/{dist => public}/index.html | 2 +- automerge-wasm/examples/webpack/webpack.config.js | 6 +++--- 4 files changed, 12 insertions(+), 8 deletions(-) rename automerge-wasm/examples/webpack/{dist => public}/index.html (79%) diff --git a/automerge-wasm/examples/webpack/.gitignore b/automerge-wasm/examples/webpack/.gitignore index 92cad834..da9d3ff5 100644 --- a/automerge-wasm/examples/webpack/.gitignore +++ b/automerge-wasm/examples/webpack/.gitignore @@ -1,4 +1,5 @@ yarn.lock node_modules -dist/*.wasm -dist/*.js +public/*.wasm +public/main.js +dist diff --git a/automerge-wasm/examples/webpack/package.json b/automerge-wasm/examples/webpack/package.json index b0274d0b..5c90319c 100644 --- a/automerge-wasm/examples/webpack/package.json +++ b/automerge-wasm/examples/webpack/package.json @@ -4,7 +4,9 @@ "description": "", "private": true, "scripts": { - "build": "webpack" + "build": "webpack", + "start": "serve public", + "test": "node dist/node.js" }, "author": "", "dependencies": { @@ -13,6 +15,7 @@ "devDependencies": { "webpack": "^5.72.1", "webpack-cli": "^4.9.2", - "webpack-node-externals": "^3.0.0" + "webpack-node-externals": "^3.0.0", + "serve": "^13.0.2" } } diff --git a/automerge-wasm/examples/webpack/dist/index.html b/automerge-wasm/examples/webpack/public/index.html similarity index 79% rename from automerge-wasm/examples/webpack/dist/index.html rename to automerge-wasm/examples/webpack/public/index.html index 7727bff4..5003393a 100644 --- a/automerge-wasm/examples/webpack/dist/index.html +++ b/automerge-wasm/examples/webpack/public/index.html @@ -5,6 +5,6 @@ Simple Webpack for automerge-wasm - + diff --git a/automerge-wasm/examples/webpack/webpack.config.js b/automerge-wasm/examples/webpack/webpack.config.js index 3d4bc98e..3ab0e798 100644 --- a/automerge-wasm/examples/webpack/webpack.config.js +++ b/automerge-wasm/examples/webpack/webpack.config.js @@ -11,7 +11,7 @@ const serverConfig = { entry: './src/index.js', output: { - filename: 'server.js', + filename: 'node.js', path: path.resolve(__dirname, 'dist'), }, mode: "development", // or production @@ -21,8 +21,8 @@ const clientConfig = { target: 'web', entry: './src/index.js', output: { - filename: 'browser.js', - path: path.resolve(__dirname, 'dist'), + filename: 'main.js', + path: path.resolve(__dirname, 'public'), }, mode: "development", // or production performance: { // we dont want the wasm blob to generate warnings From e1f3ecfcf5989355023f21b330e598b8c4486ffe Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Mon, 16 May 2022 15:09:55 -0400 Subject: [PATCH 361/730] typescript implicit any --- automerge-wasm/index.d.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts index f6b58bfe..bf23948f 100644 --- a/automerge-wasm/index.d.ts +++ b/automerge-wasm/index.d.ts @@ -7,7 +7,7 @@ export type Prop = string | number; export type Hash = string; export type Heads = Hash[]; export type Value = string | number | boolean | null | Date | Uint8Array -export type ObjType = string | Array | Object +export type ObjType = string | Array | Object export type FullValue = ["str", string] | ["int", number] | From 531e434bf635a353b0f6be71d201fcfc6ffcc6be Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Mon, 16 May 2022 22:32:27 +0100 Subject: [PATCH 362/730] Optimise seek op --- automerge/src/query/seek_op.rs | 95 ++++++++++++++++++++++------------ 1 file changed, 62 insertions(+), 33 deletions(-) diff --git a/automerge/src/query/seek_op.rs b/automerge/src/query/seek_op.rs index 9f271c26..023c431a 100644 --- a/automerge/src/query/seek_op.rs +++ b/automerge/src/query/seek_op.rs @@ -14,6 +14,8 @@ pub(crate) struct SeekOp<'a> { pub(crate) succ: Vec, /// whether a position has been found found: bool, + /// The found start position of the key if there is one yet (for map objects). + start: Option, } impl<'a> SeekOp<'a> { @@ -23,6 +25,7 @@ impl<'a> SeekOp<'a> { succ: vec![], pos: 0, found: false, + start: None, } } @@ -64,53 +67,79 @@ impl<'a> TreeQuery<'a> for SeekOp<'a> { } } Key::Map(_) => { - self.pos = binary_search_by(child, |op| m.key_cmp(&op.key, &self.op.key)); - while self.pos < child.len() { - let op = child.get(self.pos).unwrap(); - if op.key != self.op.key { - break; + if let Some(start) = self.start { + if self.pos + child.len() >= start { + // skip empty nodes + if child.index.visible_len() == 0 { + self.pos += child.len(); + QueryResult::Next + } else { + QueryResult::Descend + } + } else { + self.pos += child.len(); + QueryResult::Next } - if self.op.overwrites(op) { - self.succ.push(self.pos); - } - if m.lamport_cmp(op.id, self.op.id) == Ordering::Greater { - break; - } - self.pos += 1; + } else { + // in the root node find the first op position for the key + let start = binary_search_by(child, |op| m.key_cmp(&op.key, &self.op.key)); + self.start = Some(start); + self.pos = start; + QueryResult::Skip(start) } - QueryResult::Finish } } } fn query_element_with_metadata(&mut self, e: &Op, m: &OpSetMetadata) -> QueryResult { - if !self.found { - if self.is_target_insert(e) { - self.found = true; + match self.op.key { + Key::Map(_) => { + // don't bother looking at things past our key + if e.key != self.op.key { + return QueryResult::Finish; + } + if self.op.overwrites(e) { self.succ.push(self.pos); } - } - self.pos += 1; - QueryResult::Next - } else { - // we have already found the target - if self.op.overwrites(e) { - self.succ.push(self.pos); - } - if self.op.insert { - if self.lesser_insert(e, m) { - QueryResult::Finish - } else { - self.pos += 1; - QueryResult::Next + + if m.lamport_cmp(e.id, self.op.id) == Ordering::Greater { + return QueryResult::Finish; } - } else if e.insert || self.greater_opid(e, m) { - QueryResult::Finish - } else { + self.pos += 1; QueryResult::Next } + Key::Seq(_) => { + if !self.found { + if self.is_target_insert(e) { + self.found = true; + if self.op.overwrites(e) { + self.succ.push(self.pos); + } + } + self.pos += 1; + QueryResult::Next + } else { + // we have already found the target + if self.op.overwrites(e) { + self.succ.push(self.pos); + } + if self.op.insert { + if self.lesser_insert(e, m) { + QueryResult::Finish + } else { + self.pos += 1; + QueryResult::Next + } + } else if e.insert || self.greater_opid(e, m) { + QueryResult::Finish + } else { + self.pos += 1; + QueryResult::Next + } + } + } } } } From 43c4ce76fb6743d3b6c7bd21ee780504f35b1fff Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Mon, 16 May 2022 23:07:45 +0100 Subject: [PATCH 363/730] Optimise seek op with patch --- automerge/src/query/seek_op_with_patch.rs | 245 ++++++++++++---------- 1 file changed, 137 insertions(+), 108 deletions(-) diff --git a/automerge/src/query/seek_op_with_patch.rs b/automerge/src/query/seek_op_with_patch.rs index 6958a972..e8ebded8 100644 --- a/automerge/src/query/seek_op_with_patch.rs +++ b/automerge/src/query/seek_op_with_patch.rs @@ -8,12 +8,16 @@ use std::fmt::Debug; pub(crate) struct SeekOpWithPatch<'a> { op: Op, pub(crate) pos: usize, + /// A position counter for after we find the insert position to record conflicts. + later_pos: usize, pub(crate) succ: Vec, found: bool, pub(crate) seen: usize, last_seen: Option, pub(crate) values: Vec<&'a Op>, pub(crate) had_value_before: bool, + /// The found start position of the key if there is one yet (for map objects). + start: Option, } impl<'a> SeekOpWithPatch<'a> { @@ -22,11 +26,13 @@ impl<'a> SeekOpWithPatch<'a> { op: op.clone(), succ: vec![], pos: 0, + later_pos: 0, found: false, seen: 0, last_seen: None, values: vec![], had_value_before: false, + start: None, } } @@ -124,55 +130,28 @@ impl<'a> TreeQuery<'a> for SeekOpWithPatch<'a> { // Updating a map: operations appear in sorted order by key Key::Map(_) => { - // Search for the place where we need to insert the new operation. First find the - // first op with a key >= the key we're updating - self.pos = binary_search_by(child, |op| m.key_cmp(&op.key, &self.op.key)); - while self.pos < child.len() { - // Iterate over any existing operations for the same key; stop when we reach an - // operation with a different key - let op = child.get(self.pos).unwrap(); - if op.key != self.op.key { - break; - } - - // Keep track of any ops we're overwriting and any conflicts on this key - if self.op.overwrites(op) { - // when we encounter an increment op we also want to find the counter for - // it. - if self.op.is_inc() && op.is_counter() && op.visible() { - self.values.push(op); + if let Some(start) = self.start { + if self.pos + child.len() >= start { + // skip empty nodes + if child.index.visible_len() == 0 { + self.pos += child.len(); + QueryResult::Next + } else { + QueryResult::Descend } - self.succ.push(self.pos); - } else if op.visible() { - self.values.push(op); + } else { + self.pos += child.len(); + QueryResult::Next } - - // Ops for the same key should be in ascending order of opId, so we break when - // we reach an op with an opId greater than that of the new operation - if m.lamport_cmp(op.id, self.op.id) == Ordering::Greater { - break; - } - - self.pos += 1; + } else { + // in the root node find the first op position for the key + // Search for the place where we need to insert the new operation. First find the + // first op with a key >= the key we're updating + let start = binary_search_by(child, |op| m.key_cmp(&op.key, &self.op.key)); + self.start = Some(start); + self.pos = start; + QueryResult::Skip(start) } - - // For the purpose of reporting conflicts, we also need to take into account any - // ops for the same key that appear after the new operation - let mut later_pos = self.pos; - while later_pos < child.len() { - let op = child.get(later_pos).unwrap(); - if op.key != self.op.key { - break; - } - // No need to check if `self.op.overwrites(op)` because an operation's `preds` - // must always have lower Lamport timestamps than that op itself, and the ops - // here all have greater opIds than the new op - if op.visible() { - self.values.push(op); - } - later_pos += 1; - } - QueryResult::Finish } } } @@ -180,78 +159,128 @@ impl<'a> TreeQuery<'a> for SeekOpWithPatch<'a> { // Only called when operating on a sequence (list/text) object, since updates of a map are // handled in `query_node_with_metadata`. fn query_element_with_metadata(&mut self, e: &'a Op, m: &OpSetMetadata) -> QueryResult { - let result = if !self.found { - // First search for the referenced list element (i.e. the element we're updating, or - // after which we're inserting) - if self.is_target_insert(e) { - self.found = true; - if self.op.overwrites(e) { - // when we encounter an increment op we also want to find the counter for - // it. - if self.op.is_inc() && e.is_counter() && e.visible() { + match self.op.key { + Key::Map(_) => { + if !self.found { + // Iterate over any existing operations for the same key; stop when we reach an + // operation with a different key + if e.key != self.op.key { + return QueryResult::Finish; + } + + // Keep track of any ops we're overwriting and any conflicts on this key + if self.op.overwrites(e) { + // when we encounter an increment op we also want to find the counter for + // it. + if self.op.is_inc() && e.is_counter() && e.visible() { + self.values.push(e); + } + self.succ.push(self.pos); + } else if e.visible() { self.values.push(e); } - self.succ.push(self.pos); - } - if e.visible() { - self.had_value_before = true; - } - } - self.pos += 1; - QueryResult::Next - } else { - // Once we've found the reference element, keep track of any ops that we're overwriting - let overwritten = self.op.overwrites(e); - if overwritten { - // when we encounter an increment op we also want to find the counter for - // it. - if self.op.is_inc() && e.is_counter() && e.visible() { - self.values.push(e); - } - self.succ.push(self.pos); - } - // If the new op is an insertion, skip over any existing list elements whose elemId is - // greater than the ID of the new insertion - if self.op.insert { - if self.lesser_insert(e, m) { - // Insert before the first existing list element whose elemId is less than that - // of the new insertion - QueryResult::Finish + // Ops for the same key should be in ascending order of opId, so we break when + // we reach an op with an opId greater than that of the new operation + if m.lamport_cmp(e.id, self.op.id) == Ordering::Greater { + self.found = true; + self.later_pos = self.pos + 1; + return QueryResult::Next; + } + + self.pos += 1; } else { - self.pos += 1; - QueryResult::Next - } - } else if e.insert { - // If the new op is an update of an existing list element, the first insertion op - // we encounter after the reference element indicates the end of the reference elem - QueryResult::Finish - } else { - // When updating an existing list element, keep track of any conflicts on this list - // element. We also need to remember if the list element had any visible elements - // prior to applying the new operation: if not, the new operation is resurrecting - // a deleted list element, so it looks like an insertion in the patch. - if e.visible() { - self.had_value_before = true; - if !overwritten { + // For the purpose of reporting conflicts, we also need to take into account any + // ops for the same key that appear after the new operation + + if e.key != self.op.key { + return QueryResult::Finish; + } + // No need to check if `self.op.overwrites(op)` because an operation's `preds` + // must always have lower Lamport timestamps than that op itself, and the ops + // here all have greater opIds than the new op + if e.visible() { self.values.push(e); } - } - - // We now need to put the ops for the same list element into ascending order, so we - // skip over any ops whose ID is less than that of the new operation. - if !self.greater_opid(e, m) { - self.pos += 1; + self.later_pos += 1; } QueryResult::Next } - }; + Key::Seq(_) => { + let result = if !self.found { + // First search for the referenced list element (i.e. the element we're updating, or + // after which we're inserting) + if self.is_target_insert(e) { + self.found = true; + if self.op.overwrites(e) { + // when we encounter an increment op we also want to find the counter for + // it. + if self.op.is_inc() && e.is_counter() && e.visible() { + self.values.push(e); + } + self.succ.push(self.pos); + } + if e.visible() { + self.had_value_before = true; + } + } + self.pos += 1; + QueryResult::Next + } else { + // Once we've found the reference element, keep track of any ops that we're overwriting + let overwritten = self.op.overwrites(e); + if overwritten { + // when we encounter an increment op we also want to find the counter for + // it. + if self.op.is_inc() && e.is_counter() && e.visible() { + self.values.push(e); + } + self.succ.push(self.pos); + } - // The patch needs to know the list index of each operation, so we count the number of - // visible list elements up to the insertion position of the new operation - if result == QueryResult::Next { - self.count_visible(e); + // If the new op is an insertion, skip over any existing list elements whose elemId is + // greater than the ID of the new insertion + if self.op.insert { + if self.lesser_insert(e, m) { + // Insert before the first existing list element whose elemId is less than that + // of the new insertion + QueryResult::Finish + } else { + self.pos += 1; + QueryResult::Next + } + } else if e.insert { + // If the new op is an update of an existing list element, the first insertion op + // we encounter after the reference element indicates the end of the reference elem + QueryResult::Finish + } else { + // When updating an existing list element, keep track of any conflicts on this list + // element. We also need to remember if the list element had any visible elements + // prior to applying the new operation: if not, the new operation is resurrecting + // a deleted list element, so it looks like an insertion in the patch. + if e.visible() { + self.had_value_before = true; + if !overwritten { + self.values.push(e); + } + } + + // We now need to put the ops for the same list element into ascending order, so we + // skip over any ops whose ID is less than that of the new operation. + if !self.greater_opid(e, m) { + self.pos += 1; + } + QueryResult::Next + } + }; + + // The patch needs to know the list index of each operation, so we count the number of + // visible list elements up to the insertion position of the new operation + if result == QueryResult::Next { + self.count_visible(e); + } + result + } } - result } } From d89669fcaa490bdd4d87fbbbf98c7a55eb0d605f Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Mon, 16 May 2022 23:13:35 +0100 Subject: [PATCH 364/730] Add apply benchmarks --- automerge/benches/map.rs | 86 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 86 insertions(+) diff --git a/automerge/benches/map.rs b/automerge/benches/map.rs index 9aad6941..cd94fde3 100644 --- a/automerge/benches/map.rs +++ b/automerge/benches/map.rs @@ -172,6 +172,92 @@ fn criterion_benchmark(c: &mut Criterion) { ); } group.finish(); + + let mut group = c.benchmark_group("map apply"); + for size in &sizes { + group.throughput(criterion::Throughput::Elements(*size)); + group.bench_with_input(BenchmarkId::new("repeated put", size), size, |b, &size| { + b.iter_batched( + || { + repeated_put(size) + .get_changes(&[]) + .into_iter() + .cloned() + .collect::>() + }, + |changes| { + let mut doc = Automerge::new(); + doc.apply_changes(changes) + }, + criterion::BatchSize::LargeInput, + ) + }); + group.bench_with_input( + BenchmarkId::new("repeated increment", size), + size, + |b, &size| { + b.iter_batched( + || { + repeated_increment(size) + .get_changes(&[]) + .into_iter() + .cloned() + .collect::>() + }, + |changes| { + let mut doc = Automerge::new(); + doc.apply_changes(changes) + }, + criterion::BatchSize::LargeInput, + ) + }, + ); + + group.throughput(criterion::Throughput::Elements(*size)); + group.bench_with_input( + BenchmarkId::new("increasing put", size), + size, + |b, &size| { + b.iter_batched( + || { + increasing_put(size) + .get_changes(&[]) + .into_iter() + .cloned() + .collect::>() + }, + |changes| { + let mut doc = Automerge::new(); + doc.apply_changes(changes) + }, + criterion::BatchSize::LargeInput, + ) + }, + ); + + group.throughput(criterion::Throughput::Elements(*size)); + group.bench_with_input( + BenchmarkId::new("decreasing put", size), + size, + |b, &size| { + b.iter_batched( + || { + decreasing_put(size) + .get_changes(&[]) + .into_iter() + .cloned() + .collect::>() + }, + |changes| { + let mut doc = Automerge::new(); + doc.apply_changes(changes) + }, + criterion::BatchSize::LargeInput, + ) + }, + ); + } + group.finish(); } criterion_group!(benches, criterion_benchmark); From 6bce8bf4fd174bd33df9bff5145de9660cd41d58 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 19 May 2022 10:40:44 +0100 Subject: [PATCH 365/730] Use vec with capacity when calculating bloom probes --- automerge/src/sync/bloom.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/automerge/src/sync/bloom.rs b/automerge/src/sync/bloom.rs index d20df5fd..ce0a79d3 100644 --- a/automerge/src/sync/bloom.rs +++ b/automerge/src/sync/bloom.rs @@ -41,7 +41,8 @@ impl BloomFilter { let z = u32::from_le_bytes([hash_bytes[8], hash_bytes[9], hash_bytes[10], hash_bytes[11]]) % modulo; - let mut probes = vec![x]; + let mut probes = Vec::with_capacity(self.num_probes as usize); + probes.push(x); for _ in 1..self.num_probes { x = (x + y) % modulo; y = (y + z) % modulo; From e8e42b2d16b5366e1973c33d46b64116549da7ab Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 19 May 2022 10:41:23 +0100 Subject: [PATCH 366/730] Remove need to collect hashes when building bloom filter --- automerge/src/sync.rs | 7 ++----- automerge/src/sync/bloom.rs | 14 ++++++-------- 2 files changed, 8 insertions(+), 13 deletions(-) diff --git a/automerge/src/sync.rs b/automerge/src/sync.rs index 85db6cce..4219172f 100644 --- a/automerge/src/sync.rs +++ b/automerge/src/sync.rs @@ -165,13 +165,10 @@ impl Automerge { fn make_bloom_filter(&self, last_sync: Vec) -> Have { let new_changes = self.get_changes(&last_sync); - let hashes = new_changes - .into_iter() - .map(|change| change.hash) - .collect::>(); + let hashes = new_changes.into_iter().map(|change| &change.hash); Have { last_sync, - bloom: BloomFilter::from(&hashes[..]), + bloom: BloomFilter::from_hashes(hashes), } } diff --git a/automerge/src/sync/bloom.rs b/automerge/src/sync/bloom.rs index ce0a79d3..55edef62 100644 --- a/automerge/src/sync/bloom.rs +++ b/automerge/src/sync/bloom.rs @@ -83,15 +83,8 @@ impl BloomFilter { true } } -} -fn bits_capacity(num_entries: u32, num_bits_per_entry: u32) -> usize { - let f = ((f64::from(num_entries) * f64::from(num_bits_per_entry)) / 8_f64).ceil(); - f as usize -} - -impl From<&[ChangeHash]> for BloomFilter { - fn from(hashes: &[ChangeHash]) -> Self { + pub fn from_hashes<'a>(hashes: impl ExactSizeIterator) -> Self { let num_entries = hashes.len() as u32; let num_bits_per_entry = BITS_PER_ENTRY; let num_probes = NUM_PROBES; @@ -109,6 +102,11 @@ impl From<&[ChangeHash]> for BloomFilter { } } +fn bits_capacity(num_entries: u32, num_bits_per_entry: u32) -> usize { + let f = ((f64::from(num_entries) * f64::from(num_bits_per_entry)) / 8_f64).ceil(); + f as usize +} + impl TryFrom<&[u8]> for BloomFilter { type Error = decoding::Error; From 11fbde47bbe9a23915ff603cef0fab2d76422414 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 20 May 2022 10:04:32 +0100 Subject: [PATCH 367/730] Use HASH_SIZE const in ChangeHash definition --- automerge/src/sync.rs | 5 ++--- automerge/src/types.rs | 5 ++++- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/automerge/src/sync.rs b/automerge/src/sync.rs index 4219172f..57848a66 100644 --- a/automerge/src/sync.rs +++ b/automerge/src/sync.rs @@ -7,8 +7,8 @@ use std::{ }; use crate::{ - decoding, decoding::Decoder, encoding::Encodable, ApplyOptions, Automerge, AutomergeError, - Change, ChangeHash, OpObserver, + decoding, decoding::Decoder, encoding::Encodable, types::HASH_SIZE, ApplyOptions, Automerge, + AutomergeError, Change, ChangeHash, OpObserver, }; mod bloom; @@ -17,7 +17,6 @@ mod state; pub use bloom::BloomFilter; pub use state::{Have, State}; -const HASH_SIZE: usize = 32; // 256 bits = 32 bytes const MESSAGE_TYPE_SYNC: u8 = 0x42; // first byte of a sync message, for identification impl Automerge { diff --git a/automerge/src/types.rs b/automerge/src/types.rs index a135f21a..d3230ec3 100644 --- a/automerge/src/types.rs +++ b/automerge/src/types.rs @@ -508,9 +508,12 @@ impl Op { #[derive(Debug, Clone)] pub(crate) struct Peer {} +/// The number of bytes in a change hash. +pub(crate) const HASH_SIZE: usize = 32; // 256 bits = 32 bytes + /// The sha256 hash of a change. #[derive(Eq, PartialEq, Hash, Clone, PartialOrd, Ord, Copy)] -pub struct ChangeHash(pub [u8; 32]); +pub struct ChangeHash(pub [u8; HASH_SIZE]); impl fmt::Debug for ChangeHash { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { From b9a6b3129f54e47f9b2412faec4ffe2397e40f4a Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 19 May 2022 14:28:14 +0100 Subject: [PATCH 368/730] Add method to get changes by clock --- automerge/src/automerge.rs | 54 ++++++++++++++++++++++++++++++++------ automerge/src/clock.rs | 5 ++++ 2 files changed, 51 insertions(+), 8 deletions(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index f0963ec4..18762d4c 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -903,6 +903,52 @@ impl Automerge { .collect() } + /// Get the changes since `have_deps` in this document using a clock internally. + fn get_changes_clock(&self, have_deps: &[ChangeHash]) -> Vec<&Change> { + // get the clock for the given deps + let clock = self.clock_at(have_deps); + + // get the documents current clock + + let mut changes = Vec::new(); + // walk the state from the given deps clock and add them into the vec + for (actor_index, actor_changes) in &self.states { + if let Some(max_op) = clock.get_for_actor(actor_index) { + // find the change in this actors sequence of changes that corresponds to the max_op + // recorded for them in the clock + let clock_start = actor_changes + .binary_search_by_key(max_op, |change_index| { + self.history[*change_index].max_op() + }) + .expect("Clock index should always correspond to a value in the actor's state"); + changes.extend( + actor_changes[clock_start..] + .iter() + .map(|change_index| &self.history[*change_index]), + ); + } else { + changes.extend( + actor_changes + .iter() + .map(|change_index| &self.history[*change_index]), + ); + } + } + + // ensure the changes are still in sorted order + changes + } + + pub fn get_changes(&self, have_deps: &[ChangeHash]) -> Vec<&Change> { + let changes = if let Some(changes) = self.get_changes_fast(have_deps) { + changes + } else { + self.get_changes_slow(have_deps) + }; + assert_eq!(changes, self.get_changes_clock(have_deps)); + changes + } + /// Get the last change this actor made to the document. pub fn get_last_local_change(&self) -> Option<&Change> { return self @@ -912,14 +958,6 @@ impl Automerge { .find(|c| c.actor_id() == self.get_actor()); } - pub fn get_changes(&self, have_deps: &[ChangeHash]) -> Vec<&Change> { - if let Some(changes) = self.get_changes_fast(have_deps) { - changes - } else { - self.get_changes_slow(have_deps) - } - } - fn clock_at(&self, heads: &[ChangeHash]) -> Clock { let mut clock = Clock::new(); let mut seen = HashSet::new(); diff --git a/automerge/src/clock.rs b/automerge/src/clock.rs index d80d091b..69bd08c2 100644 --- a/automerge/src/clock.rs +++ b/automerge/src/clock.rs @@ -25,6 +25,11 @@ impl Clock { false } } + + /// Get the max_op recorded in this clock for the actor. + pub(crate) fn get_for_actor(&self, actor_index: &usize) -> Option<&u64> { + self.0.get(actor_index) + } } #[cfg(test)] From 0de37d292d69996e5d8560dcc5b116d526296112 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 19 May 2022 14:47:04 +0100 Subject: [PATCH 369/730] Sort change results from clock search --- automerge/src/automerge.rs | 31 ++++++++++++++++++------------- automerge/src/automerge/tests.rs | 11 +++++++++++ 2 files changed, 29 insertions(+), 13 deletions(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 18762d4c..0874d6ca 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -910,7 +910,7 @@ impl Automerge { // get the documents current clock - let mut changes = Vec::new(); + let mut change_indexes: Vec = Vec::new(); // walk the state from the given deps clock and add them into the vec for (actor_index, actor_changes) in &self.states { if let Some(max_op) = clock.get_for_actor(actor_index) { @@ -921,22 +921,20 @@ impl Automerge { self.history[*change_index].max_op() }) .expect("Clock index should always correspond to a value in the actor's state"); - changes.extend( - actor_changes[clock_start..] - .iter() - .map(|change_index| &self.history[*change_index]), - ); + let first_unseen_change = clock_start + 1; + change_indexes.extend(&actor_changes[first_unseen_change..]); } else { - changes.extend( - actor_changes - .iter() - .map(|change_index| &self.history[*change_index]), - ); + change_indexes.extend(&actor_changes[..]); } } // ensure the changes are still in sorted order - changes + change_indexes.sort_unstable(); + + change_indexes + .into_iter() + .map(|i| &self.history[i]) + .collect() } pub fn get_changes(&self, have_deps: &[ChangeHash]) -> Vec<&Change> { @@ -945,7 +943,14 @@ impl Automerge { } else { self.get_changes_slow(have_deps) }; - assert_eq!(changes, self.get_changes_clock(have_deps)); + let clock_changes = self.get_changes_clock(have_deps); + assert_eq!( + changes, + clock_changes, + "{:#?} {:#?}", + changes.iter().map(|c| c.hash).collect::>(), + clock_changes.iter().map(|c| c.hash).collect::>() + ); changes } diff --git a/automerge/src/automerge/tests.rs b/automerge/src/automerge/tests.rs index 430916d8..84253299 100644 --- a/automerge/src/automerge/tests.rs +++ b/automerge/src/automerge/tests.rs @@ -1508,3 +1508,14 @@ fn observe_counter_change_application() { ] ); } + +#[test] +fn get_changes_heads_empty() { + let mut doc = AutoCommit::new(); + doc.put(ROOT, "key1", 1).unwrap(); + doc.commit(); + doc.put(ROOT, "key2", 1).unwrap(); + doc.commit(); + let heads = doc.get_heads(); + assert_eq!(doc.get_changes(&heads), Vec::<&Change>::new()); +} From 1b348925853f8a39d544f6f8719ab2b49efb7219 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 19 May 2022 15:08:30 +0100 Subject: [PATCH 370/730] Add num_ops to change to quickly get the len --- automerge/src/change.rs | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/automerge/src/change.rs b/automerge/src/change.rs index c2bbe66c..be9b4aff 100644 --- a/automerge/src/change.rs +++ b/automerge/src/change.rs @@ -200,6 +200,7 @@ fn encode(change: &::Change) -> Change { deps, ops: chunk.ops, extra_bytes: chunk.extra_bytes, + num_ops: change.operations.len(), } } @@ -332,6 +333,7 @@ pub struct Change { pub deps: Vec, ops: HashMap>, extra_bytes: Range, + num_ops: usize, } impl Change { @@ -353,8 +355,7 @@ impl Change { } pub fn len(&self) -> usize { - // TODO - this could be a lot more efficient - self.iter_ops().count() + self.num_ops } pub fn max_op(&self) -> u64 { @@ -550,7 +551,7 @@ impl TryFrom> for Change { let ops_info = decode_column_info(bytes.uncompressed(), &mut cursor, false)?; let ops = decode_columns(&mut cursor, &ops_info); - Ok(Change { + let mut change = Change { bytes, body_start, hash, @@ -562,7 +563,13 @@ impl TryFrom> for Change { deps, ops, extra_bytes: cursor, - }) + num_ops: 0, // filled in below + }; + + let len = change.iter_ops().count(); + change.num_ops = len; + + Ok(change) } } From 5e088ee9e0435f0578a8d1a99ba34f18243c1f40 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 19 May 2022 15:59:29 +0100 Subject: [PATCH 371/730] Document clock module and add merge function --- automerge/src/clock.rs | 23 +++++++++++++++-------- 1 file changed, 15 insertions(+), 8 deletions(-) diff --git a/automerge/src/clock.rs b/automerge/src/clock.rs index 69bd08c2..62fa7b56 100644 --- a/automerge/src/clock.rs +++ b/automerge/src/clock.rs @@ -3,7 +3,8 @@ use fxhash::FxBuildHasher; use std::cmp; use std::collections::HashMap; -#[derive(Debug, Clone, PartialEq)] +/// Vector clock mapping actor indices to the max op counter of the changes created by that actor. +#[derive(Default, Debug, Clone, PartialEq)] pub(crate) struct Clock(HashMap); impl Clock { @@ -11,25 +12,31 @@ impl Clock { Clock(Default::default()) } - pub(crate) fn include(&mut self, key: usize, n: u64) { + pub(crate) fn include(&mut self, actor_index: usize, max_op: u64) { self.0 - .entry(key) - .and_modify(|m| *m = cmp::max(n, *m)) - .or_insert(n); + .entry(actor_index) + .and_modify(|m| *m = cmp::max(max_op, *m)) + .or_insert(max_op); } pub(crate) fn covers(&self, id: &OpId) -> bool { - if let Some(val) = self.0.get(&id.1) { - val >= &id.0 + if let Some(max_op) = self.0.get(&id.1) { + max_op >= &id.0 } else { false } } - /// Get the max_op recorded in this clock for the actor. + /// Get the max_op counter recorded in this clock for the actor. pub(crate) fn get_for_actor(&self, actor_index: &usize) -> Option<&u64> { self.0.get(actor_index) } + + pub(crate) fn merge(&mut self, other: &Self) { + for (actor, max_op) in &other.0 { + self.include(*actor, *max_op); + } + } } #[cfg(test)] From c2765885fde0358dd50b60f9d839179385027b99 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 19 May 2022 16:00:00 +0100 Subject: [PATCH 372/730] Maintain incremental clocks --- automerge/src/automerge.rs | 40 +++++++++++++++++++++++--------------- 1 file changed, 24 insertions(+), 16 deletions(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 0874d6ca..84b43823 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -40,6 +40,8 @@ pub struct Automerge { pub(crate) history: Vec, /// Mapping from change hash to index into the history list. pub(crate) history_index: HashMap, + /// Mapping from change hash to vector clock at this state. + pub(crate) clocks: HashMap, /// Mapping from actor index to list of seqs seen for them. pub(crate) states: HashMap>, /// Current dependencies of this document (heads hashes). @@ -61,6 +63,7 @@ impl Automerge { queue: vec![], history: vec![], history_index: HashMap::new(), + clocks: HashMap::new(), states: HashMap::new(), ops: Default::default(), deps: Default::default(), @@ -947,9 +950,13 @@ impl Automerge { assert_eq!( changes, clock_changes, - "{:#?} {:#?}", + "{:#?} {:#?} {:#?}", changes.iter().map(|c| c.hash).collect::>(), - clock_changes.iter().map(|c| c.hash).collect::>() + clock_changes + .iter() + .map(|c| (c.hash, c.actor_id())) + .collect::>(), + self.clock_at(have_deps), ); changes } @@ -965,20 +972,12 @@ impl Automerge { fn clock_at(&self, heads: &[ChangeHash]) -> Clock { let mut clock = Clock::new(); - let mut seen = HashSet::new(); - let mut to_see = heads.to_vec(); - // FIXME - faster - while let Some(hash) = to_see.pop() { - if let Some(c) = self.get_change_by_hash(&hash) { - for h in &c.deps { - if !seen.contains(h) { - to_see.push(*h); - } - } - let actor = self.ops.m.actors.lookup(c.actor_id()).unwrap(); - clock.include(actor, c.max_op()); - seen.insert(hash); - } + for hash in heads { + let c = self + .clocks + .get(hash) + .expect("Asked for change that isn't in this document"); + clock.merge(c); } clock } @@ -1038,11 +1037,20 @@ impl Automerge { let history_index = self.history.len(); + let actor_index = self.ops.m.actors.cache(change.actor_id().clone()); self.states .entry(self.ops.m.actors.cache(change.actor_id().clone())) .or_default() .push(history_index); + let mut clock = Clock::new(); + for hash in &change.deps { + let c = self.clocks.get(hash).unwrap(); + clock.merge(c); + } + clock.include(actor_index, change.max_op()); + self.clocks.insert(change.hash, clock); + self.history_index.insert(change.hash, history_index); self.history.push(change); From 933bf5ee0785f0fe37e816ab857cdca843ceeb5d Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 19 May 2022 16:18:07 +0100 Subject: [PATCH 373/730] Return an error when getting clock for missing hash --- automerge-c/src/lib.rs | 2 +- automerge-cli/src/examine.rs | 7 ++- automerge-wasm/src/lib.rs | 2 +- automerge/benches/map.rs | 4 ++ automerge/examples/quickstart.rs | 2 +- automerge/src/autocommit.rs | 5 +- automerge/src/automerge.rs | 100 ++++++++++++++++--------------- automerge/src/automerge/tests.rs | 4 +- automerge/src/error.rs | 2 + automerge/src/sync.rs | 20 +++++-- automerge/tests/test.rs | 3 +- 11 files changed, 88 insertions(+), 63 deletions(-) diff --git a/automerge-c/src/lib.rs b/automerge-c/src/lib.rs index 7bdcae23..4c4dd0c2 100644 --- a/automerge-c/src/lib.rs +++ b/automerge-c/src/lib.rs @@ -1395,7 +1395,7 @@ pub unsafe extern "C" fn AMgetChanges( Some(have_deps) => have_deps.as_ref(), None => &empty_deps, }; - to_result(Ok(doc.get_changes(have_deps))) + to_result(doc.get_changes(have_deps)) } /// \memberof AMchange diff --git a/automerge-cli/src/examine.rs b/automerge-cli/src/examine.rs index 010fa0f1..847abd4f 100644 --- a/automerge-cli/src/examine.rs +++ b/automerge-cli/src/examine.rs @@ -31,7 +31,12 @@ pub fn examine( .map_err(|e| ExamineError::ReadingChanges { source: e })?; let doc = am::Automerge::load(&buf) .map_err(|e| ExamineError::ApplyingInitialChanges { source: e })?; - let uncompressed_changes: Vec<_> = doc.get_changes(&[]).iter().map(|c| c.decode()).collect(); + let uncompressed_changes: Vec<_> = doc + .get_changes(&[]) + .unwrap() + .iter() + .map(|c| c.decode()) + .collect(); if is_tty { let json_changes = serde_json::to_value(uncompressed_changes).unwrap(); colored_json::write_colored_json(&json_changes, &mut output).unwrap(); diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 4429c0c8..db948704 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -572,7 +572,7 @@ impl Automerge { pub fn get_changes(&mut self, have_deps: JsValue) -> Result { self.ensure_transaction_closed(); let deps: Vec<_> = JS(have_deps).try_into()?; - let changes = self.doc.get_changes(&deps); + let changes = self.doc.get_changes(&deps)?; let changes: Array = changes .iter() .map(|c| Uint8Array::from(c.raw_bytes())) diff --git a/automerge/benches/map.rs b/automerge/benches/map.rs index cd94fde3..fcf3bfa3 100644 --- a/automerge/benches/map.rs +++ b/automerge/benches/map.rs @@ -181,6 +181,7 @@ fn criterion_benchmark(c: &mut Criterion) { || { repeated_put(size) .get_changes(&[]) + .unwrap() .into_iter() .cloned() .collect::>() @@ -200,6 +201,7 @@ fn criterion_benchmark(c: &mut Criterion) { || { repeated_increment(size) .get_changes(&[]) + .unwrap() .into_iter() .cloned() .collect::>() @@ -222,6 +224,7 @@ fn criterion_benchmark(c: &mut Criterion) { || { increasing_put(size) .get_changes(&[]) + .unwrap() .into_iter() .cloned() .collect::>() @@ -244,6 +247,7 @@ fn criterion_benchmark(c: &mut Criterion) { || { decreasing_put(size) .get_changes(&[]) + .unwrap() .into_iter() .cloned() .collect::>() diff --git a/automerge/examples/quickstart.rs b/automerge/examples/quickstart.rs index 0ce5de72..a041730c 100644 --- a/automerge/examples/quickstart.rs +++ b/automerge/examples/quickstart.rs @@ -50,7 +50,7 @@ fn main() { doc1.merge(&mut doc2).unwrap(); - for change in doc1.get_changes(&[]) { + for change in doc1.get_changes(&[]).unwrap() { let length = doc1.length_at(&cards, &[change.hash]); println!("{} {}", change.message().unwrap(), length); } diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index b0b56709..22efd155 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -170,7 +170,10 @@ impl AutoCommit { self.doc.get_last_local_change() } - pub fn get_changes(&mut self, have_deps: &[ChangeHash]) -> Vec<&Change> { + pub fn get_changes( + &mut self, + have_deps: &[ChangeHash], + ) -> Result, AutomergeError> { self.ensure_transaction_closed(); self.doc.get_changes(have_deps) } diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 84b43823..d7f18654 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -279,11 +279,11 @@ impl Automerge { /// Historical version of [`keys`](Self::keys). pub fn keys_at>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt<'_, '_> { if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { - let clock = self.clock_at(heads); - KeysAt::new(self, self.ops.keys_at(obj, clock)) - } else { - KeysAt::new(self, None) + if let Ok(clock) = self.clock_at(heads) { + return KeysAt::new(self, self.ops.keys_at(obj, clock)); + } } + KeysAt::new(self, None) } /// Iterate over the keys and values of the map `obj` in the given range. @@ -307,12 +307,12 @@ impl Automerge { heads: &[ChangeHash], ) -> MapRangeAt<'_, R> { if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { - let clock = self.clock_at(heads); - let iter_range = self.ops.map_range_at(obj, range, clock); - MapRangeAt::new(self, iter_range) - } else { - MapRangeAt::new(self, None) + if let Ok(clock) = self.clock_at(heads) { + let iter_range = self.ops.map_range_at(obj, range, clock); + return MapRangeAt::new(self, iter_range); + } } + MapRangeAt::new(self, None) } /// Iterate over the indexes and values of the list `obj` in the given range. @@ -336,12 +336,12 @@ impl Automerge { heads: &[ChangeHash], ) -> ListRangeAt<'_, R> { if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { - let clock = self.clock_at(heads); - let iter_range = self.ops.list_range_at(obj, range, clock); - ListRangeAt::new(self, iter_range) - } else { - ListRangeAt::new(self, None) + if let Ok(clock) = self.clock_at(heads) { + let iter_range = self.ops.list_range_at(obj, range, clock); + return ListRangeAt::new(self, iter_range); + } } + ListRangeAt::new(self, None) } pub fn values>(&self, obj: O) -> Values<'_> { @@ -358,21 +358,21 @@ impl Automerge { pub fn values_at>(&self, obj: O, heads: &[ChangeHash]) -> Values<'_> { if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { - let clock = self.clock_at(heads); - match self.ops.object_type(&obj) { - Some(ObjType::Map) | Some(ObjType::Table) => { - let iter_range = self.ops.map_range_at(obj, .., clock); - Values::new(self, iter_range) - } - Some(ObjType::List) | Some(ObjType::Text) => { - let iter_range = self.ops.list_range_at(obj, .., clock); - Values::new(self, iter_range) - } - None => Values::empty(self), + if let Ok(clock) = self.clock_at(heads) { + return match self.ops.object_type(&obj) { + Some(ObjType::Map) | Some(ObjType::Table) => { + let iter_range = self.ops.map_range_at(obj, .., clock); + Values::new(self, iter_range) + } + Some(ObjType::List) | Some(ObjType::Text) => { + let iter_range = self.ops.list_range_at(obj, .., clock); + Values::new(self, iter_range) + } + None => Values::empty(self), + }; } - } else { - Values::empty(self) } + Values::empty(self) } /// Get the length of the given object. @@ -393,17 +393,17 @@ impl Automerge { /// Historical version of [`length`](Self::length). pub fn length_at>(&self, obj: O, heads: &[ChangeHash]) -> usize { if let Ok(inner_obj) = self.exid_to_obj(obj.as_ref()) { - let clock = self.clock_at(heads); - match self.ops.object_type(&inner_obj) { - Some(ObjType::Map) | Some(ObjType::Table) => self.keys_at(obj, heads).count(), - Some(ObjType::List) | Some(ObjType::Text) => { - self.ops.search(&inner_obj, query::LenAt::new(clock)).len - } - None => 0, + if let Ok(clock) = self.clock_at(heads) { + return match self.ops.object_type(&inner_obj) { + Some(ObjType::Map) | Some(ObjType::Table) => self.keys_at(obj, heads).count(), + Some(ObjType::List) | Some(ObjType::Text) => { + self.ops.search(&inner_obj, query::LenAt::new(clock)).len + } + None => 0, + }; } - } else { - 0 } + 0 } /// Get the type of this object, if it is an object. @@ -460,7 +460,7 @@ impl Automerge { heads: &[ChangeHash], ) -> Result { let obj = self.exid_to_obj(obj.as_ref())?; - let clock = self.clock_at(heads); + let clock = self.clock_at(heads)?; let query = self.ops.search(&obj, query::ListValsAt::new(clock)); let mut buffer = String::new(); for q in &query.ops { @@ -542,7 +542,7 @@ impl Automerge { ) -> Result, ExId)>, AutomergeError> { let prop = prop.into(); let obj = self.exid_to_obj(obj.as_ref())?; - let clock = self.clock_at(heads); + let clock = self.clock_at(heads)?; let result = match prop { Prop::Map(p) => { let prop = self.ops.m.props.lookup(&p); @@ -752,7 +752,9 @@ impl Automerge { /// Save the changes since last save in a compact form. pub fn save_incremental(&mut self) -> Vec { - let changes = self.get_changes(self.saved.as_slice()); + let changes = self + .get_changes(self.saved.as_slice()) + .expect("Should only be getting changes using previously saved heads"); let mut bytes = vec![]; for c in changes { bytes.extend(c.raw_bytes()); @@ -907,9 +909,9 @@ impl Automerge { } /// Get the changes since `have_deps` in this document using a clock internally. - fn get_changes_clock(&self, have_deps: &[ChangeHash]) -> Vec<&Change> { + fn get_changes_clock(&self, have_deps: &[ChangeHash]) -> Result, AutomergeError> { // get the clock for the given deps - let clock = self.clock_at(have_deps); + let clock = self.clock_at(have_deps)?; // get the documents current clock @@ -934,19 +936,19 @@ impl Automerge { // ensure the changes are still in sorted order change_indexes.sort_unstable(); - change_indexes + Ok(change_indexes .into_iter() .map(|i| &self.history[i]) - .collect() + .collect()) } - pub fn get_changes(&self, have_deps: &[ChangeHash]) -> Vec<&Change> { + pub fn get_changes(&self, have_deps: &[ChangeHash]) -> Result, AutomergeError> { let changes = if let Some(changes) = self.get_changes_fast(have_deps) { changes } else { self.get_changes_slow(have_deps) }; - let clock_changes = self.get_changes_clock(have_deps); + let clock_changes = self.get_changes_clock(have_deps)?; assert_eq!( changes, clock_changes, @@ -958,7 +960,7 @@ impl Automerge { .collect::>(), self.clock_at(have_deps), ); - changes + Ok(changes) } /// Get the last change this actor made to the document. @@ -970,16 +972,16 @@ impl Automerge { .find(|c| c.actor_id() == self.get_actor()); } - fn clock_at(&self, heads: &[ChangeHash]) -> Clock { + fn clock_at(&self, heads: &[ChangeHash]) -> Result { let mut clock = Clock::new(); for hash in heads { let c = self .clocks .get(hash) - .expect("Asked for change that isn't in this document"); + .ok_or(AutomergeError::MissingHash(*hash))?; clock.merge(c); } - clock + Ok(clock) } /// Get a change by its hash. diff --git a/automerge/src/automerge/tests.rs b/automerge/src/automerge/tests.rs index 84253299..dc4204e1 100644 --- a/automerge/src/automerge/tests.rs +++ b/automerge/src/automerge/tests.rs @@ -1473,7 +1473,7 @@ fn observe_counter_change_application() { doc.put(ROOT, "counter", ScalarValue::counter(1)).unwrap(); doc.increment(ROOT, "counter", 2).unwrap(); doc.increment(ROOT, "counter", 5).unwrap(); - let changes = doc.get_changes(&[]).into_iter().cloned().collect(); + let changes = doc.get_changes(&[]).unwrap().into_iter().cloned().collect(); let mut new_doc = AutoCommit::new(); let mut observer = VecOpObserver::default(); @@ -1517,5 +1517,5 @@ fn get_changes_heads_empty() { doc.put(ROOT, "key2", 1).unwrap(); doc.commit(); let heads = doc.get_heads(); - assert_eq!(doc.get_changes(&heads), Vec::<&Change>::new()); + assert_eq!(doc.get_changes(&heads).unwrap(), Vec::<&Change>::new()); } diff --git a/automerge/src/error.rs b/automerge/src/error.rs index 3498ed40..cc76d7ef 100644 --- a/automerge/src/error.rs +++ b/automerge/src/error.rs @@ -23,6 +23,8 @@ pub enum AutomergeError { DuplicateSeqNumber(u64, ActorId), #[error("invalid hash {0}")] InvalidHash(ChangeHash), + #[error("hash {0} does not correspond to a change in this document")] + MissingHash(ChangeHash), #[error("increment operations must be against a counter value")] MissingCounter, #[error("general failure")] diff --git a/automerge/src/sync.rs b/automerge/src/sync.rs index 57848a66..99961397 100644 --- a/automerge/src/sync.rs +++ b/automerge/src/sync.rs @@ -59,6 +59,7 @@ impl Automerge { sync_state.their_need.as_ref(), ) { self.get_changes_to_send(their_have.clone(), their_need) + .expect("Should have only used hashes that are in the document") } else { Vec::new() }; @@ -163,7 +164,9 @@ impl Automerge { } fn make_bloom_filter(&self, last_sync: Vec) -> Have { - let new_changes = self.get_changes(&last_sync); + let new_changes = self + .get_changes(&last_sync) + .expect("Should have only used hashes that are in the document"); let hashes = new_changes.into_iter().map(|change| &change.hash); Have { last_sync, @@ -171,11 +174,16 @@ impl Automerge { } } - fn get_changes_to_send(&self, have: Vec, need: &[ChangeHash]) -> Vec<&Change> { + fn get_changes_to_send( + &self, + have: Vec, + need: &[ChangeHash], + ) -> Result, AutomergeError> { if have.is_empty() { - need.iter() + Ok(need + .iter() .filter_map(|hash| self.get_change_by_hash(hash)) - .collect() + .collect()) } else { let mut last_sync_hashes = HashSet::new(); let mut bloom_filters = Vec::with_capacity(have.len()); @@ -189,7 +197,7 @@ impl Automerge { } let last_sync_hashes = last_sync_hashes.into_iter().collect::>(); - let changes = self.get_changes(&last_sync_hashes); + let changes = self.get_changes(&last_sync_hashes)?; let mut change_hashes = HashSet::with_capacity(changes.len()); let mut dependents: HashMap> = HashMap::new(); @@ -237,7 +245,7 @@ impl Automerge { changes_to_send.push(change); } } - changes_to_send + Ok(changes_to_send) } } } diff --git a/automerge/tests/test.rs b/automerge/tests/test.rs index e661886a..f13bcd2b 100644 --- a/automerge/tests/test.rs +++ b/automerge/tests/test.rs @@ -838,6 +838,7 @@ fn handle_repeated_out_of_order_changes() -> Result<(), automerge::AutomergeErro doc1.commit(); let changes = doc1 .get_changes(&[]) + .unwrap() .into_iter() .cloned() .collect::>(); @@ -937,7 +938,7 @@ fn observe_counter_change_application() { doc.put(ROOT, "counter", ScalarValue::counter(1)).unwrap(); doc.increment(ROOT, "counter", 2).unwrap(); doc.increment(ROOT, "counter", 5).unwrap(); - let changes = doc.get_changes(&[]).into_iter().cloned().collect(); + let changes = doc.get_changes(&[]).unwrap().into_iter().cloned().collect(); let mut doc = AutoCommit::new(); let mut observer = VecOpObserver::default(); From 16f13043452dda48802f3065e40c5d861109b749 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 19 May 2022 16:22:15 +0100 Subject: [PATCH 374/730] Fix wasm test calling getChanges with wrong heads --- automerge-wasm/test/test.ts | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts index 6e5201ec..e02dde26 100644 --- a/automerge-wasm/test/test.ts +++ b/automerge-wasm/test/test.ts @@ -312,11 +312,12 @@ describe('Automerge', () => { doc1.put("_root", "hello", "world") let doc2 = load(doc1.save(), "bbbb"); let doc3 = load(doc1.save(), "cccc"); + let heads = doc1.getHeads() doc1.put("_root", "cnt", 20) doc2.put("_root", "cnt", 0, "counter") doc3.put("_root", "cnt", 10, "counter") - doc1.applyChanges(doc2.getChanges(doc1.getHeads())) - doc1.applyChanges(doc3.getChanges(doc1.getHeads())) + doc1.applyChanges(doc2.getChanges(heads)) + doc1.applyChanges(doc3.getChanges(heads)) let result = doc1.getAll("_root", "cnt") assert.deepEqual(result,[ ['int',20,'2@aaaa'], @@ -345,11 +346,12 @@ describe('Automerge', () => { doc1.insert(seq, 0, "hello") let doc2 = load(doc1.save(), "bbbb"); let doc3 = load(doc1.save(), "cccc"); + let heads = doc1.getHeads() doc1.put(seq, 0, 20) doc2.put(seq, 0, 0, "counter") doc3.put(seq, 0, 10, "counter") - doc1.applyChanges(doc2.getChanges(doc1.getHeads())) - doc1.applyChanges(doc3.getChanges(doc1.getHeads())) + doc1.applyChanges(doc2.getChanges(heads)) + doc1.applyChanges(doc3.getChanges(heads)) let result = doc1.getAll(seq, 0) assert.deepEqual(result,[ ['int',20,'3@aaaa'], From b7c50e47b95a81dfd460d2d877f2f137ff2b9979 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 19 May 2022 16:26:28 +0100 Subject: [PATCH 375/730] Just use get_changes_clock --- automerge/src/automerge.rs | 19 +------------------ 1 file changed, 1 insertion(+), 18 deletions(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index d7f18654..8f002159 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -943,24 +943,7 @@ impl Automerge { } pub fn get_changes(&self, have_deps: &[ChangeHash]) -> Result, AutomergeError> { - let changes = if let Some(changes) = self.get_changes_fast(have_deps) { - changes - } else { - self.get_changes_slow(have_deps) - }; - let clock_changes = self.get_changes_clock(have_deps)?; - assert_eq!( - changes, - clock_changes, - "{:#?} {:#?} {:#?}", - changes.iter().map(|c| c.hash).collect::>(), - clock_changes - .iter() - .map(|c| (c.hash, c.actor_id())) - .collect::>(), - self.clock_at(have_deps), - ); - Ok(changes) + self.get_changes_clock(have_deps) } /// Get the last change this actor made to the document. From 36857e0f6ba6c04d8717b1976fdbe1a5da31f936 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 19 May 2022 16:47:08 +0100 Subject: [PATCH 376/730] Store seq in clock to remove binary_search_by_key --- automerge/src/automerge.rs | 43 ++++++++++++++++++++++++-------------- automerge/src/clock.rs | 35 ++++++++++++++++++++----------- 2 files changed, 50 insertions(+), 28 deletions(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 8f002159..ba02b58d 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -4,6 +4,7 @@ use std::num::NonZeroU64; use std::ops::RangeBounds; use crate::change::encode_document; +use crate::clock::ClockData; use crate::exid::ExId; use crate::keys::Keys; use crate::op_observer::OpObserver; @@ -918,16 +919,10 @@ impl Automerge { let mut change_indexes: Vec = Vec::new(); // walk the state from the given deps clock and add them into the vec for (actor_index, actor_changes) in &self.states { - if let Some(max_op) = clock.get_for_actor(actor_index) { + if let Some(clock_data) = clock.get_for_actor(actor_index) { // find the change in this actors sequence of changes that corresponds to the max_op // recorded for them in the clock - let clock_start = actor_changes - .binary_search_by_key(max_op, |change_index| { - self.history[*change_index].max_op() - }) - .expect("Clock index should always correspond to a value in the actor's state"); - let first_unseen_change = clock_start + 1; - change_indexes.extend(&actor_changes[first_unseen_change..]); + change_indexes.extend(&actor_changes[clock_data.seq as usize..]); } else { change_indexes.extend(&actor_changes[..]); } @@ -956,15 +951,25 @@ impl Automerge { } fn clock_at(&self, heads: &[ChangeHash]) -> Result { - let mut clock = Clock::new(); - for hash in heads { - let c = self + if let Some(first_hash) = heads.first() { + let mut clock = self .clocks - .get(hash) - .ok_or(AutomergeError::MissingHash(*hash))?; - clock.merge(c); + .get(first_hash) + .ok_or(AutomergeError::MissingHash(*first_hash))? + .clone(); + + for hash in &heads[1..] { + let c = self + .clocks + .get(hash) + .ok_or(AutomergeError::MissingHash(*hash))?; + clock.merge(c); + } + + Ok(clock) + } else { + Ok(Clock::new()) } - Ok(clock) } /// Get a change by its hash. @@ -1033,7 +1038,13 @@ impl Automerge { let c = self.clocks.get(hash).unwrap(); clock.merge(c); } - clock.include(actor_index, change.max_op()); + clock.include( + actor_index, + ClockData { + max_op: change.max_op(), + seq: change.seq, + }, + ); self.clocks.insert(change.hash, clock); self.history_index.insert(change.hash, history_index); diff --git a/automerge/src/clock.rs b/automerge/src/clock.rs index 62fa7b56..bbe376fa 100644 --- a/automerge/src/clock.rs +++ b/automerge/src/clock.rs @@ -1,40 +1,51 @@ use crate::types::OpId; use fxhash::FxBuildHasher; -use std::cmp; use std::collections::HashMap; +#[derive(Default, Debug, Clone, Copy, PartialEq)] +pub(crate) struct ClockData { + /// Maximum operation counter of the actor at the point in time. + pub(crate) max_op: u64, + /// Sequence number of the change from this actor. + pub(crate) seq: u64, +} + /// Vector clock mapping actor indices to the max op counter of the changes created by that actor. #[derive(Default, Debug, Clone, PartialEq)] -pub(crate) struct Clock(HashMap); +pub(crate) struct Clock(HashMap); impl Clock { pub(crate) fn new() -> Self { Clock(Default::default()) } - pub(crate) fn include(&mut self, actor_index: usize, max_op: u64) { + pub(crate) fn include(&mut self, actor_index: usize, data: ClockData) { self.0 .entry(actor_index) - .and_modify(|m| *m = cmp::max(max_op, *m)) - .or_insert(max_op); + .and_modify(|d| { + if data.max_op > d.max_op { + *d = data; + } + }) + .or_insert(data); } pub(crate) fn covers(&self, id: &OpId) -> bool { - if let Some(max_op) = self.0.get(&id.1) { - max_op >= &id.0 + if let Some(data) = self.0.get(&id.1) { + data.max_op >= id.0 } else { false } } /// Get the max_op counter recorded in this clock for the actor. - pub(crate) fn get_for_actor(&self, actor_index: &usize) -> Option<&u64> { + pub(crate) fn get_for_actor(&self, actor_index: &usize) -> Option<&ClockData> { self.0.get(actor_index) } pub(crate) fn merge(&mut self, other: &Self) { - for (actor, max_op) in &other.0 { - self.include(*actor, *max_op); + for (actor, data) in &other.0 { + self.include(*actor, *data); } } } @@ -47,8 +58,8 @@ mod tests { fn covers() { let mut clock = Clock::new(); - clock.include(1, 20); - clock.include(2, 10); + clock.include(1, ClockData { max_op: 20, seq: 1 }); + clock.include(2, ClockData { max_op: 10, seq: 2 }); assert!(clock.covers(&OpId(10, 1))); assert!(clock.covers(&OpId(20, 1))); From 4b344ac30830f3f54c7a3ba7749e69686f2d7fa9 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 19 May 2022 16:53:25 +0100 Subject: [PATCH 377/730] Add sync benchmark --- automerge/Cargo.toml | 4 ++ automerge/benches/sync.rs | 95 +++++++++++++++++++++++++++++++++++++++ 2 files changed, 99 insertions(+) create mode 100644 automerge/benches/sync.rs diff --git a/automerge/Cargo.toml b/automerge/Cargo.toml index 6a907e7b..64283ca2 100644 --- a/automerge/Cargo.toml +++ b/automerge/Cargo.toml @@ -51,3 +51,7 @@ harness = false [[bench]] name = "map" harness = false + +[[bench]] +name = "sync" +harness = false diff --git a/automerge/benches/sync.rs b/automerge/benches/sync.rs new file mode 100644 index 00000000..9798c803 --- /dev/null +++ b/automerge/benches/sync.rs @@ -0,0 +1,95 @@ +use automerge::{sync, transaction::Transactable, Automerge, ROOT}; +use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion}; + +#[derive(Default)] +struct DocWithSync { + doc: Automerge, + peer_state: sync::State, +} + +impl From for DocWithSync { + fn from(doc: Automerge) -> Self { + Self { + doc, + peer_state: sync::State::default(), + } + } +} + +fn increasing_put(n: u64) -> Automerge { + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + for i in 0..n { + tx.put(ROOT, i.to_string(), i).unwrap(); + } + tx.commit(); + doc +} + +// keep syncing until doc1 no longer generates a sync message for doc2. +fn sync(doc1: &mut DocWithSync, doc2: &mut DocWithSync) { + loop { + if let Some(message1) = doc1.doc.generate_sync_message(&mut doc1.peer_state) { + doc2.doc + .receive_sync_message(&mut doc2.peer_state, message1) + .unwrap() + } else { + break; + } + + if let Some(message2) = doc2.doc.generate_sync_message(&mut doc2.peer_state) { + doc1.doc + .receive_sync_message(&mut doc1.peer_state, message2) + .unwrap() + } + } +} + +fn criterion_benchmark(c: &mut Criterion) { + let sizes = [100, 1_000, 10_000]; + + let mut group = c.benchmark_group("sync unidirectional"); + for size in &sizes { + group.throughput(criterion::Throughput::Elements(*size)); + + group.bench_with_input( + BenchmarkId::new("increasing put", size), + size, + |b, &size| { + b.iter_batched( + || (increasing_put(size), DocWithSync::default()), + |(doc1, mut doc2)| sync(&mut doc1.into(), &mut doc2), + criterion::BatchSize::LargeInput, + ) + }, + ); + } + group.finish(); + + let mut group = c.benchmark_group("sync unidirectional every change"); + for size in &sizes { + group.throughput(criterion::Throughput::Elements(*size)); + + group.bench_with_input( + BenchmarkId::new("increasing put", size), + size, + |b, &size| { + b.iter(|| { + let mut doc1 = DocWithSync::default(); + let mut doc2 = DocWithSync::default(); + + for i in 0..size { + let mut tx = doc1.doc.transaction(); + tx.put(ROOT, i.to_string(), i).unwrap(); + tx.commit(); + sync(&mut doc1, &mut doc2); + } + }) + }, + ); + } + group.finish(); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); From e5b527e17d4c17ec173ba35506bef47c42302850 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 19 May 2022 16:54:52 +0100 Subject: [PATCH 378/730] Remove old functions --- automerge/src/automerge.rs | 55 -------------------------------------- 1 file changed, 55 deletions(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index ba02b58d..8bee574f 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -854,61 +854,6 @@ impl Automerge { missing } - fn get_changes_fast(&self, have_deps: &[ChangeHash]) -> Option> { - if have_deps.is_empty() { - return Some(self.history.iter().collect()); - } - - let lowest_idx = have_deps - .iter() - .filter_map(|h| self.history_index.get(h)) - .min()? - + 1; - - let mut missing_changes = vec![]; - let mut has_seen: HashSet<_> = have_deps.iter().collect(); - for change in &self.history[lowest_idx..] { - let deps_seen = change.deps.iter().filter(|h| has_seen.contains(h)).count(); - if deps_seen > 0 { - if deps_seen != change.deps.len() { - // future change depends on something we haven't seen - fast path cant work - return None; - } - missing_changes.push(change); - has_seen.insert(&change.hash); - } - } - - // if we get to the end and there is a head we haven't seen then fast path cant work - if self.get_heads().iter().all(|h| has_seen.contains(h)) { - Some(missing_changes) - } else { - None - } - } - - fn get_changes_slow(&self, have_deps: &[ChangeHash]) -> Vec<&Change> { - let mut stack: Vec<_> = have_deps.iter().collect(); - let mut has_seen = HashSet::new(); - while let Some(hash) = stack.pop() { - if has_seen.contains(&hash) { - continue; - } - if let Some(change) = self - .history_index - .get(hash) - .and_then(|i| self.history.get(*i)) - { - stack.extend(change.deps.iter()); - } - has_seen.insert(hash); - } - self.history - .iter() - .filter(|change| !has_seen.contains(&change.hash)) - .collect() - } - /// Get the changes since `have_deps` in this document using a clock internally. fn get_changes_clock(&self, have_deps: &[ChangeHash]) -> Result, AutomergeError> { // get the clock for the given deps From 1355a024a7bd8020d17a65ff8ee7ae49a65d713c Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 19 May 2022 16:58:43 +0100 Subject: [PATCH 379/730] Use actor_index to get state in update_history --- automerge/src/automerge.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 8bee574f..6a5c6568 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -974,7 +974,7 @@ impl Automerge { let actor_index = self.ops.m.actors.cache(change.actor_id().clone()); self.states - .entry(self.ops.m.actors.cache(change.actor_id().clone())) + .entry(actor_index) .or_default() .push(history_index); From 3a8e833187e4313b93c7c80cc011accdbe0854aa Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 19 May 2022 17:07:31 +0100 Subject: [PATCH 380/730] Document num_ops on change --- automerge/src/change.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/automerge/src/change.rs b/automerge/src/change.rs index be9b4aff..1cf55de0 100644 --- a/automerge/src/change.rs +++ b/automerge/src/change.rs @@ -333,6 +333,7 @@ pub struct Change { pub deps: Vec, ops: HashMap>, extra_bytes: Range, + /// The number of operations in this change. num_ops: usize, } From 8b1c3c73cd9dc5fd17d2a0edcaa264d626de1c62 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 20 May 2022 16:13:10 +0100 Subject: [PATCH 381/730] Use BTreeSet for sync::State to allow deriving Hash --- automerge-wasm/src/interop.rs | 26 +++++++++++++++++++++++++- automerge-wasm/src/sync.rs | 4 ++-- automerge/src/automerge.rs | 4 ++-- automerge/src/sync/bloom.rs | 2 +- automerge/src/sync/state.rs | 10 +++++----- 5 files changed, 35 insertions(+), 11 deletions(-) diff --git a/automerge-wasm/src/interop.rs b/automerge-wasm/src/interop.rs index be0927f7..bc17c018 100644 --- a/automerge-wasm/src/interop.rs +++ b/automerge-wasm/src/interop.rs @@ -2,7 +2,7 @@ use automerge as am; use automerge::transaction::Transactable; use automerge::{Change, ChangeHash, Prop}; use js_sys::{Array, Object, Reflect, Uint8Array}; -use std::collections::HashSet; +use std::collections::{BTreeSet, HashSet}; use std::fmt::Display; use wasm_bindgen::prelude::*; use wasm_bindgen::JsCast; @@ -68,6 +68,16 @@ impl From> for JS { } } +impl From> for JS { + fn from(heads: BTreeSet) -> Self { + let result: JsValue = Object::new().into(); + for key in &heads { + Reflect::set(&result, &key.to_string().into(), &true.into()).unwrap(); + } + JS(result) + } +} + impl From>> for JS { fn from(heads: Option>) -> Self { if let Some(v) = heads { @@ -96,6 +106,20 @@ impl TryFrom for HashSet { } } +impl TryFrom for BTreeSet { + type Error = JsValue; + + fn try_from(value: JS) -> Result { + let mut result = BTreeSet::new(); + for key in Reflect::own_keys(&value.0)?.iter() { + if let Some(true) = Reflect::get(&value.0, &key)?.as_bool() { + result.insert(key.into_serde().map_err(to_js_err)?); + } + } + Ok(result) + } +} + impl TryFrom for Vec { type Error = JsValue; diff --git a/automerge-wasm/src/sync.rs b/automerge-wasm/src/sync.rs index 5a24a28c..f76eae84 100644 --- a/automerge-wasm/src/sync.rs +++ b/automerge-wasm/src/sync.rs @@ -1,7 +1,7 @@ use automerge as am; use automerge::ChangeHash; use js_sys::Uint8Array; -use std::collections::{HashMap, HashSet}; +use std::collections::{BTreeSet, HashMap}; use std::convert::TryInto; use wasm_bindgen::prelude::*; @@ -33,7 +33,7 @@ impl SyncState { #[wasm_bindgen(setter, js_name = sentHashes)] pub fn set_sent_hashes(&mut self, hashes: JsValue) -> Result<(), JsValue> { let hashes_map: HashMap = hashes.into_serde().map_err(to_js_err)?; - let hashes_set: HashSet = hashes_map.keys().cloned().collect(); + let hashes_set: BTreeSet = hashes_map.keys().cloned().collect(); self.0.sent_hashes = hashes_set; Ok(()) } diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index f0963ec4..a893b794 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -1,4 +1,4 @@ -use std::collections::{HashMap, HashSet, VecDeque}; +use std::collections::{BTreeSet, HashMap, HashSet, VecDeque}; use std::fmt::Debug; use std::num::NonZeroU64; use std::ops::RangeBounds; @@ -763,7 +763,7 @@ impl Automerge { /// Filter the changes down to those that are not transitive dependencies of the heads. /// /// Thus a graph with these heads has not seen the remaining changes. - pub(crate) fn filter_changes(&self, heads: &[ChangeHash], changes: &mut HashSet) { + pub(crate) fn filter_changes(&self, heads: &[ChangeHash], changes: &mut BTreeSet) { // Reduce the working set to find to those which we may be able to find. // This filters out those hashes that are successors of or concurrent with all of the // heads. diff --git a/automerge/src/sync/bloom.rs b/automerge/src/sync/bloom.rs index 55edef62..0ed1332f 100644 --- a/automerge/src/sync/bloom.rs +++ b/automerge/src/sync/bloom.rs @@ -8,7 +8,7 @@ use crate::{decoding, decoding::Decoder, encoding::Encodable, ChangeHash}; const BITS_PER_ENTRY: u32 = 10; const NUM_PROBES: u32 = 7; -#[derive(Default, Debug, Clone)] +#[derive(Default, Debug, Clone, PartialEq, Eq, Hash)] pub struct BloomFilter { num_entries: u32, num_bits_per_entry: u32, diff --git a/automerge/src/sync/state.rs b/automerge/src/sync/state.rs index 9828060c..2ca5216f 100644 --- a/automerge/src/sync/state.rs +++ b/automerge/src/sync/state.rs @@ -1,4 +1,4 @@ -use std::{borrow::Cow, collections::HashSet}; +use std::{borrow::Cow, collections::BTreeSet}; use super::{decode_hashes, encode_hashes, BloomFilter}; use crate::{decoding, decoding::Decoder, ChangeHash}; @@ -6,20 +6,20 @@ use crate::{decoding, decoding::Decoder, ChangeHash}; const SYNC_STATE_TYPE: u8 = 0x43; // first byte of an encoded sync state, for identification /// The state of synchronisation with a peer. -#[derive(Debug, Clone, Default)] +#[derive(Debug, Clone, Default, PartialEq, Eq, Hash)] pub struct State { pub shared_heads: Vec, pub last_sent_heads: Vec, pub their_heads: Option>, pub their_need: Option>, pub their_have: Option>, - pub sent_hashes: HashSet, + pub sent_hashes: BTreeSet, } /// A summary of the changes that the sender of the message already has. /// This is implicitly a request to the recipient to send all changes that the /// sender does not already have. -#[derive(Debug, Clone, Default)] +#[derive(Debug, Clone, Default, PartialEq, Eq, Hash)] pub struct Have { /// The heads at the time of the last successful sync with this recipient. pub last_sync: Vec, @@ -57,7 +57,7 @@ impl State { their_heads: None, their_need: None, their_have: Some(Vec::new()), - sent_hashes: HashSet::new(), + sent_hashes: BTreeSet::new(), }) } } From 2c1a71e1436d48e132beb3399fe5560cb6da2c6f Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 20 May 2022 18:01:46 +0100 Subject: [PATCH 382/730] Use expect for getting clock --- automerge/src/automerge.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 6a5c6568..49dee479 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -980,7 +980,10 @@ impl Automerge { let mut clock = Clock::new(); for hash in &change.deps { - let c = self.clocks.get(hash).unwrap(); + let c = self + .clocks + .get(hash) + .expect("Change's deps should already be in the document"); clock.merge(c); } clock.include( From 551f6e13434d9e730bc7a90debde97206c7228a8 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Tue, 17 May 2022 13:22:18 -0400 Subject: [PATCH 383/730] convert automerge-js to typescript --- automerge-js/.gitignore | 1 + automerge-js/package.json | 15 +- automerge-js/src/{columnar.js => columnar.ts} | 128 ++--- automerge-js/src/{common.js => common.ts} | 19 +- automerge-js/src/constants.js | 18 - automerge-js/src/constants.ts | 15 + automerge-js/src/{counter.js => counter.ts} | 43 +- automerge-js/src/{encoding.js => encoding.ts} | 69 ++- automerge-js/src/index.js | 372 ------------- automerge-js/src/index.ts | 496 ++++++++++++++++++ automerge-js/src/{numbers.js => numbers.ts} | 19 +- automerge-js/src/{proxies.js => proxies.ts} | 79 +-- automerge-js/src/{sync.js => sync.ts} | 48 +- automerge-js/src/{text.js => text.ts} | 43 +- automerge-js/src/uuid.js | 16 - automerge-js/src/uuid.ts | 16 + .../test/{basic_test.js => basic_test.ts} | 7 +- .../{columnar_test.js => columnar_test.ts} | 8 +- automerge-js/test/{helpers.js => helpers.ts} | 4 +- .../test/{legacy_tests.js => legacy_tests.ts} | 13 +- .../test/{sync_test.js => sync_test.ts} | 11 +- .../test/{text_test.js => text_test.ts} | 6 +- .../test/{uuid_test.js => uuid_test.ts} | 4 +- automerge-js/tsconfig.json | 16 + automerge-js/tslint.json | 3 + automerge-wasm/index.d.ts | 5 + 26 files changed, 845 insertions(+), 629 deletions(-) rename automerge-js/src/{columnar.js => columnar.ts} (94%) rename automerge-js/src/{common.js => common.ts} (78%) delete mode 100644 automerge-js/src/constants.js create mode 100644 automerge-js/src/constants.ts rename automerge-js/src/{counter.js => counter.ts} (72%) rename automerge-js/src/{encoding.js => encoding.ts} (97%) delete mode 100644 automerge-js/src/index.js create mode 100644 automerge-js/src/index.ts rename automerge-js/src/{numbers.js => numbers.ts} (76%) rename automerge-js/src/{proxies.js => proxies.ts} (90%) rename automerge-js/src/{sync.js => sync.ts} (94%) rename automerge-js/src/{text.js => text.ts} (82%) delete mode 100644 automerge-js/src/uuid.js create mode 100644 automerge-js/src/uuid.ts rename automerge-js/test/{basic_test.js => basic_test.ts} (98%) rename automerge-js/test/{columnar_test.js => columnar_test.ts} (96%) rename automerge-js/test/{helpers.js => helpers.ts} (93%) rename automerge-js/test/{legacy_tests.js => legacy_tests.ts} (99%) rename automerge-js/test/{sync_test.js => sync_test.ts} (99%) rename automerge-js/test/{text_test.js => text_test.ts} (99%) rename automerge-js/test/{uuid_test.js => uuid_test.ts} (89%) create mode 100644 automerge-js/tsconfig.json create mode 100644 automerge-js/tslint.json diff --git a/automerge-js/.gitignore b/automerge-js/.gitignore index 5add9449..05065cf0 100644 --- a/automerge-js/.gitignore +++ b/automerge-js/.gitignore @@ -1,2 +1,3 @@ /node_modules /yarn.lock +dist diff --git a/automerge-js/package.json b/automerge-js/package.json index 17018429..4b3b2b55 100644 --- a/automerge-js/package.json +++ b/automerge-js/package.json @@ -4,10 +4,21 @@ "main": "src/index.js", "license": "MIT", "scripts": { - "test": "mocha --bail --full-trace" + "lint": "tslint --project tsconfig.json", + "test": "ts-mocha -p tsconfig.json test/**/*.ts" + }, + "directories": { + "src": "./src", + "test": "./test" }, "devDependencies": { - "mocha": "^9.1.1" + "@types/expect": "^24.3.0", + "@types/mocha": "^9.1.1", + "@types/uuid": "^8.3.4", + "mocha": "^10.0.0", + "ts-mocha": "^10.0.0", + "tslint": "^6.1.3", + "typescript": "^4.6.4" }, "dependencies": { "automerge-wasm": "file:../automerge-wasm", diff --git a/automerge-js/src/columnar.js b/automerge-js/src/columnar.ts similarity index 94% rename from automerge-js/src/columnar.js rename to automerge-js/src/columnar.ts index 8d266f5b..fd203333 100644 --- a/automerge-js/src/columnar.js +++ b/automerge-js/src/columnar.ts @@ -1,9 +1,9 @@ -const pako = require('pako') -const { copyObject, parseOpId, equalBytes } = require('./common') -const { +import * as pako from 'pako' +import { copyObject, parseOpId, equalBytes } from './common' +import { utf8ToString, hexStringToBytes, bytesToHexString, Encoder, Decoder, RLEEncoder, RLEDecoder, DeltaEncoder, DeltaDecoder, BooleanEncoder, BooleanDecoder -} = require('./encoding') +} from './encoding' // Maybe we should be using the platform's built-in hash implementation? // Node has the crypto module: https://nodejs.org/api/crypto.html and browsers have @@ -18,7 +18,7 @@ const { // - It does not need a secure source of random bits and does not need to be // constant-time; // - I have reviewed the source code and it seems pretty reasonable. -const { Hash } = require('fast-sha256') +import { Hash } from 'fast-sha256' // These bytes don't mean anything, they were generated randomly const MAGIC_BYTES = new Uint8Array([0x85, 0x6f, 0x4a, 0x83]) @@ -32,7 +32,7 @@ const CHUNK_TYPE_DEFLATE = 2 // like CHUNK_TYPE_CHANGE but with DEFLATE compress const DEFLATE_MIN_SIZE = 256 // The least-significant 3 bits of a columnId indicate its datatype -const COLUMN_TYPE = { +export const COLUMN_TYPE = { GROUP_CARD: 0, ACTOR_ID: 1, INT_RLE: 2, INT_DELTA: 3, BOOLEAN: 4, STRING_RLE: 5, VALUE_LEN: 6, VALUE_RAW: 7 } @@ -43,15 +43,15 @@ const COLUMN_TYPE_DEFLATE = 8 // In the values in a column of type VALUE_LEN, the bottom four bits indicate the type of the value, // one of the following types in VALUE_TYPE. The higher bits indicate the length of the value in the // associated VALUE_RAW column (in bytes). -const VALUE_TYPE = { +export const VALUE_TYPE = { NULL: 0, FALSE: 1, TRUE: 2, LEB128_UINT: 3, LEB128_INT: 4, IEEE754: 5, UTF8: 6, BYTES: 7, COUNTER: 8, TIMESTAMP: 9, MIN_UNKNOWN: 10, MAX_UNKNOWN: 15 } // make* actions must be at even-numbered indexes in this list -const ACTIONS = ['makeMap', 'set', 'makeList', 'del', 'makeText', 'inc', 'makeTable', 'link'] +export const ACTIONS = ['makeMap', 'set', 'makeList', 'del', 'makeText', 'inc', 'makeTable', 'link'] -const OBJECT_TYPE = {makeMap: 'map', makeList: 'list', makeText: 'text', makeTable: 'table'} +export const OBJECT_TYPE = {makeMap: 'map', makeList: 'list', makeText: 'text', makeTable: 'table'} const COMMON_COLUMNS = [ {columnName: 'objActor', columnId: 0 << 4 | COLUMN_TYPE.ACTOR_ID}, @@ -69,13 +69,13 @@ const COMMON_COLUMNS = [ {columnName: 'chldCtr', columnId: 6 << 4 | COLUMN_TYPE.INT_DELTA} ] -const CHANGE_COLUMNS = COMMON_COLUMNS.concat([ +export const CHANGE_COLUMNS = COMMON_COLUMNS.concat([ {columnName: 'predNum', columnId: 7 << 4 | COLUMN_TYPE.GROUP_CARD}, {columnName: 'predActor', columnId: 7 << 4 | COLUMN_TYPE.ACTOR_ID}, {columnName: 'predCtr', columnId: 7 << 4 | COLUMN_TYPE.INT_DELTA} ]) -const DOC_OPS_COLUMNS = COMMON_COLUMNS.concat([ +export const DOC_OPS_COLUMNS = COMMON_COLUMNS.concat([ {columnName: 'succNum', columnId: 8 << 4 | COLUMN_TYPE.GROUP_CARD}, {columnName: 'succActor', columnId: 8 << 4 | COLUMN_TYPE.ACTOR_ID}, {columnName: 'succCtr', columnId: 8 << 4 | COLUMN_TYPE.INT_DELTA} @@ -131,7 +131,7 @@ function compareParsedOpIds(id1, id2) { * false. */ function parseAllOpIds(changes, single) { - const actors = {}, newChanges = [] + const actors : any = {}, newChanges : any = [] for (let change of changes) { change = copyObject(change) actors[change.actor] = true @@ -294,7 +294,7 @@ function encodeValue(op, columns) { * form `{value: value, datatype: datatypeTag}` where `value` is a JavaScript primitive datatype * corresponding to the value, and `datatypeTag` is a datatype annotation such as 'counter'. */ -function decodeValue(sizeTag, bytes) { +export function decodeValue(sizeTag, bytes) { if (sizeTag === VALUE_TYPE.NULL) { return {value: null} } else if (sizeTag === VALUE_TYPE.FALSE) { @@ -367,7 +367,7 @@ function decodeValueColumns(columns, colIndex, actorIds, result) { * objects. */ function encodeOps(ops, forDocument) { - const columns = { + const columns : any = { objActor : new RLEEncoder('uint'), objCtr : new RLEEncoder('uint'), keyActor : new RLEEncoder('uint'), @@ -427,7 +427,7 @@ function encodeOps(ops, forDocument) { } } - let columnList = [] + let columnList : any = [] for (let {columnName, columnId} of forDocument ? DOC_OPS_COLUMNS : CHANGE_COLUMNS) { if (columns[columnName]) columnList.push({id: columnId, name: columnName, encoder: columns[columnName]}) } @@ -436,7 +436,7 @@ function encodeOps(ops, forDocument) { function expandMultiOps(ops, startOp, actor) { let opNum = startOp - let expandedOps = [] + let expandedOps : any = [] for (const op of ops) { if (op.action === 'set' && op.values && op.insert) { if (op.pred.length !== 0) throw new RangeError('multi-insert pred must be empty') @@ -470,12 +470,12 @@ function expandMultiOps(ops, startOp, actor) { * individual change. */ function decodeOps(ops, forDocument) { - const newOps = [] + const newOps : any = [] for (let op of ops) { const obj = (op.objCtr === null) ? '_root' : `${op.objCtr}@${op.objActor}` const elemId = op.keyStr ? undefined : (op.keyCtr === 0 ? '_head' : `${op.keyCtr}@${op.keyActor}`) const action = ACTIONS[op.action] || op.action - const newOp = elemId ? {obj, elemId, action} : {obj, key: op.keyStr, action} + const newOp : any = elemId ? {obj, elemId, action} : {obj, key: op.keyStr, action} newOp.insert = !!op.insert if (ACTIONS[op.action] === 'set' || ACTIONS[op.action] === 'inc') { newOp.value = op.valLen @@ -511,7 +511,7 @@ function checkSortedOpIds(opIds) { } } -function encoderByColumnId(columnId) { +export function encoderByColumnId(columnId) { if ((columnId & 7) === COLUMN_TYPE.INT_DELTA) { return new DeltaEncoder() } else if ((columnId & 7) === COLUMN_TYPE.BOOLEAN) { @@ -525,7 +525,7 @@ function encoderByColumnId(columnId) { } } -function decoderByColumnId(columnId, buffer) { +export function decoderByColumnId(columnId, buffer) { if ((columnId & 7) === COLUMN_TYPE.INT_DELTA) { return new DeltaDecoder(buffer) } else if ((columnId & 7) === COLUMN_TYPE.BOOLEAN) { @@ -539,9 +539,9 @@ function decoderByColumnId(columnId, buffer) { } } -function makeDecoders(columns, columnSpec) { +export function makeDecoders(columns, columnSpec) { const emptyBuf = new Uint8Array(0) - let decoders = [], columnIndex = 0, specIndex = 0 + let decoders : any = [], columnIndex = 0, specIndex = 0 while (columnIndex < columns.length || specIndex < columnSpec.length) { if (columnIndex === columns.length || @@ -565,7 +565,7 @@ function makeDecoders(columns, columnSpec) { function decodeColumns(columns, actorIds, columnSpec) { columns = makeDecoders(columns, columnSpec) - let parsedRows = [] + let parsedRows : any = [] while (columns.some(col => !col.decoder.done)) { let row = {}, col = 0 while (col < columns.length) { @@ -576,7 +576,7 @@ function decodeColumns(columns, actorIds, columnSpec) { } if (columnId % 8 === COLUMN_TYPE.GROUP_CARD) { - const values = [], count = columns[col].decoder.readValue() + const values : any = [], count = columns[col].decoder.readValue() for (let i = 0; i < count; i++) { let value = {} for (let colOffset = 1; colOffset < groupCols; colOffset++) { @@ -600,7 +600,7 @@ function decodeColumnInfo(decoder) { // deflate-compressed. We ignore this bit when checking whether columns are sorted by ID. const COLUMN_ID_MASK = (-1 ^ COLUMN_TYPE_DEFLATE) >>> 0 - let lastColumnId = -1, columns = [], numColumns = decoder.readUint53() + let lastColumnId = -1, columns : any = [], numColumns = decoder.readUint53() for (let i = 0; i < numColumns; i++) { const columnId = decoder.readUint53(), bufferLen = decoder.readUint53() if ((columnId & COLUMN_ID_MASK) <= (lastColumnId & COLUMN_ID_MASK)) { @@ -622,11 +622,11 @@ function encodeColumnInfo(encoder, columns) { } function decodeChangeHeader(decoder) { - const numDeps = decoder.readUint53(), deps = [] + const numDeps = decoder.readUint53(), deps : any = [] for (let i = 0; i < numDeps; i++) { deps.push(bytesToHexString(decoder.readRawBytes(32))) } - let change = { + let change : any = { actor: decoder.readHexString(), seq: decoder.readUint53(), startOp: decoder.readUint53(), @@ -682,7 +682,7 @@ function decodeContainerHeader(decoder, computeHash) { const hashStartOffset = decoder.offset const chunkType = decoder.readByte() const chunkLength = decoder.readUint53() - const header = {chunkType, chunkLength, chunkData: decoder.readRawBytes(chunkLength)} + const header : any = {chunkType, chunkLength, chunkData: decoder.readRawBytes(chunkLength)} if (computeHash) { const sha256 = new Hash() @@ -699,7 +699,7 @@ function decodeContainerHeader(decoder, computeHash) { /** * Returns the checksum of a change (bytes 4 to 7) as a 32-bit unsigned integer. */ -function getChangeChecksum(change) { +export function getChangeChecksum(change) { if (change[0] !== MAGIC_BYTES[0] || change[1] !== MAGIC_BYTES[1] || change[2] !== MAGIC_BYTES[2] || change[3] !== MAGIC_BYTES[3]) { throw new RangeError('Data does not begin with magic bytes 85 6f 4a 83') @@ -707,9 +707,9 @@ function getChangeChecksum(change) { return ((change[4] << 24) | (change[5] << 16) | (change[6] << 8) | change[7]) >>> 0 } -function encodeChange(changeObj) { +export function encodeChange(changeObj) { const { changes, actorIds } = parseAllOpIds([changeObj], true) - const change = changes[0] + const change : any = changes[0] const { hash, bytes } = encodeContainer(CHUNK_TYPE_CHANGE, encoder => { if (!Array.isArray(change.deps)) throw new TypeError('deps is not an array') @@ -725,7 +725,7 @@ function encodeChange(changeObj) { encoder.appendUint53(actorIds.length - 1) for (let actor of actorIds.slice(1)) encoder.appendHexString(actor) - const columns = encodeOps(change.ops, false) + const columns : any = encodeOps(change.ops, false) encodeColumnInfo(encoder, columns) for (let column of columns) encoder.appendRawBytes(column.encoder.buffer) if (change.extraBytes) encoder.appendRawBytes(change.extraBytes) @@ -738,16 +738,16 @@ function encodeChange(changeObj) { return (bytes.byteLength >= DEFLATE_MIN_SIZE) ? deflateChange(bytes) : bytes } -function decodeChangeColumns(buffer) { +export function decodeChangeColumns(buffer) { if (buffer[8] === CHUNK_TYPE_DEFLATE) buffer = inflateChange(buffer) const decoder = new Decoder(buffer) - const header = decodeContainerHeader(decoder, true) + const header : any = decodeContainerHeader(decoder, true) const chunkDecoder = new Decoder(header.chunkData) if (!decoder.done) throw new RangeError('Encoded change has trailing data') if (header.chunkType !== CHUNK_TYPE_CHANGE) throw new RangeError(`Unexpected chunk type: ${header.chunkType}`) - const change = decodeChangeHeader(chunkDecoder) - const columns = decodeColumnInfo(chunkDecoder) + const change : any = decodeChangeHeader(chunkDecoder) + const columns : any = decodeColumnInfo(chunkDecoder) for (let i = 0; i < columns.length; i++) { if ((columns[i].columnId & COLUMN_TYPE_DEFLATE) !== 0) { throw new RangeError('change must not contain deflated columns') @@ -767,8 +767,8 @@ function decodeChangeColumns(buffer) { /** * Decodes one change in binary format into its JS object representation. */ -function decodeChange(buffer) { - const change = decodeChangeColumns(buffer) +export function decodeChange(buffer) { + const change : any = decodeChangeColumns(buffer) change.ops = decodeOps(decodeColumns(change.columns, change.actorIds, CHANGE_COLUMNS), false) delete change.actorIds delete change.columns @@ -780,13 +780,13 @@ function decodeChange(buffer) { * the operations. Saves work when we only need to inspect the headers. Only * computes the hash of the change if `computeHash` is true. */ -function decodeChangeMeta(buffer, computeHash) { +export function decodeChangeMeta(buffer, computeHash) : any { if (buffer[8] === CHUNK_TYPE_DEFLATE) buffer = inflateChange(buffer) - const header = decodeContainerHeader(new Decoder(buffer), computeHash) + const header : any = decodeContainerHeader(new Decoder(buffer), computeHash) if (header.chunkType !== CHUNK_TYPE_CHANGE) { throw new RangeError('Buffer chunk type is not a change') } - const meta = decodeChangeHeader(new Decoder(header.chunkData)) + const meta : any = decodeChangeHeader(new Decoder(header.chunkData)) meta.change = buffer if (computeHash) meta.hash = header.hash return meta @@ -826,8 +826,8 @@ function inflateChange(buffer) { * Takes an Uint8Array that may contain multiple concatenated changes, and * returns an array of subarrays, each subarray containing one change. */ -function splitContainers(buffer) { - let decoder = new Decoder(buffer), chunks = [], startOffset = 0 +export function splitContainers(buffer) { + let decoder = new Decoder(buffer), chunks : any = [], startOffset = 0 while (!decoder.done) { decodeContainerHeader(decoder, false) chunks.push(buffer.subarray(startOffset, decoder.offset)) @@ -840,8 +840,8 @@ function splitContainers(buffer) { * Decodes a list of changes from the binary format into JS objects. * `binaryChanges` is an array of `Uint8Array` objects. */ -function decodeChanges(binaryChanges) { - let decoded = [] +export function decodeChanges(binaryChanges) { + let decoded : any = [] for (let binaryChange of binaryChanges) { for (let chunk of splitContainers(binaryChange)) { if (chunk[8] === CHUNK_TYPE_DOCUMENT) { @@ -914,11 +914,11 @@ function groupDocumentOps(changes) { let ops = [] for (let objectId of Object.keys(byObjectId).sort(sortOpIds)) { - let keys = [] + let keys : string[] = [] if (objectType[objectId] === 'makeList' || objectType[objectId] === 'makeText') { let stack = ['_head'] while (stack.length > 0) { - const key = stack.pop() + const key : any = stack.pop() if (key !== '_head') keys.push(key) for (let opId of byReference[objectId][key].sort(sortOpIds)) stack.push(opId) } @@ -931,6 +931,7 @@ function groupDocumentOps(changes) { for (let key of keys) { for (let opId of Object.keys(byObjectId[objectId][key]).sort(sortOpIds)) { const op = byObjectId[objectId][key][opId] + // @ts-ignore if (op.action !== 'del') ops.push(op) } } @@ -976,6 +977,7 @@ function groupChangeOps(changes, ops) { delete op.succ } for (let op of Object.values(opsById)) { + // @ts-ignore if (op.action === 'del') ops.push(op) } @@ -1055,7 +1057,7 @@ function encodeDocumentChanges(changes) { } } - let changesColumns = [] + let changesColumns : any = [] for (let {columnName, columnId} of DOCUMENT_COLUMNS) { changesColumns.push({id: columnId, name: columnName, encoder: columns[columnName]}) } @@ -1104,7 +1106,7 @@ function decodeDocumentChanges(changes, expectedHeads) { /** * Transforms a list of changes into a binary representation of the document state. */ -function encodeDocument(binaryChanges) { +export function encodeDocument(binaryChanges) { const { changes, actorIds } = parseAllOpIds(decodeChanges(binaryChanges), false) const { changesColumns, heads } = encodeDocumentChanges(changes) const opsColumns = encodeOps(groupDocumentOps(changes), true) @@ -1122,29 +1124,31 @@ function encodeDocument(binaryChanges) { } encodeColumnInfo(encoder, changesColumns) encodeColumnInfo(encoder, opsColumns) + // @ts-ignore for (let column of changesColumns) encoder.appendRawBytes(column.encoder.buffer) + // @ts-ignore for (let column of opsColumns) encoder.appendRawBytes(column.encoder.buffer) }).bytes } -function decodeDocumentHeader(buffer) { +export function decodeDocumentHeader(buffer) { const documentDecoder = new Decoder(buffer) const header = decodeContainerHeader(documentDecoder, true) const decoder = new Decoder(header.chunkData) if (!documentDecoder.done) throw new RangeError('Encoded document has trailing data') if (header.chunkType !== CHUNK_TYPE_DOCUMENT) throw new RangeError(`Unexpected chunk type: ${header.chunkType}`) - const actorIds = [], numActors = decoder.readUint53() + const actorIds : string[] = [], numActors = decoder.readUint53() for (let i = 0; i < numActors; i++) { actorIds.push(decoder.readHexString()) } - const heads = [], numHeads = decoder.readUint53() + const heads : string[] = [], numHeads = decoder.readUint53() for (let i = 0; i < numHeads; i++) { heads.push(bytesToHexString(decoder.readRawBytes(32))) } - const changesColumns = decodeColumnInfo(decoder) - const opsColumns = decodeColumnInfo(decoder) + const changesColumns : any = decodeColumnInfo(decoder) + const opsColumns : any = decodeColumnInfo(decoder) for (let i = 0; i < changesColumns.length; i++) { changesColumns[i].buffer = decoder.readRawBytes(changesColumns[i].bufferLen) inflateColumn(changesColumns[i]) @@ -1158,7 +1162,7 @@ function decodeDocumentHeader(buffer) { return { changesColumns, opsColumns, actorIds, heads, extraBytes } } -function decodeDocument(buffer) { +export function decodeDocument(buffer) { const { changesColumns, opsColumns, actorIds, heads } = decodeDocumentHeader(buffer) const changes = decodeColumns(changesColumns, actorIds, DOCUMENT_COLUMNS) const ops = decodeOps(decodeColumns(opsColumns, actorIds, DOC_OPS_COLUMNS), true) @@ -1196,7 +1200,7 @@ function inflateColumn(column) { * or false if the property has been deleted. */ function addPatchProperty(objects, property) { - let values = {}, counter = null + let values : any = {}, counter : any = null for (let op of property.ops) { // Apply counters and their increments regardless of the number of successor operations if (op.actionName === 'set' && op.value.datatype === 'counter') { @@ -1290,7 +1294,7 @@ function condenseEdits(diff) { * Appends a list edit operation (insert, update, remove) to an array of existing operations. If the * last existing operation can be extended (as a multi-op), we do that. */ -function appendEdit(existingEdits, nextEdit) { +export function appendEdit(existingEdits, nextEdit) { if (existingEdits.length === 0) { existingEdits.push(nextEdit) return @@ -1336,13 +1340,13 @@ function opIdDelta(id1, id2, delta = 1) { * and returns a patch that can be sent to the frontend to instantiate the * current state of that document. */ -function constructPatch(documentBuffer) { +export function constructPatch(documentBuffer) { const { opsColumns, actorIds } = decodeDocumentHeader(documentBuffer) - const col = makeDecoders(opsColumns, DOC_OPS_COLUMNS).reduce( - (acc, col) => Object.assign(acc, {[col.columnName]: col.decoder}), {}) + const col : any = makeDecoders(opsColumns, DOC_OPS_COLUMNS).reduce( + (acc, col: any) => Object.assign(acc, {[col.columnName]: col.decoder}), {}) let objects = {_root: {objectId: '_root', type: 'map', props: {}}} - let property = null + let property : any = null while (!col.idActor.done) { const opId = `${col.idCtr.readValue()}@${actorIds[col.idActor.readValue()]}` @@ -1369,7 +1373,7 @@ function constructPatch(documentBuffer) { const rawValue = col.valRaw.readRawBytes(sizeTag >> 4) const value = decodeValue(sizeTag, rawValue) const succNum = col.succNum.readValue() - let succ = [] + let succ : string[] = [] for (let i = 0; i < succNum; i++) { succ.push(`${col.succCtr.readValue()}@${actorIds[col.succActor.readValue()]}`) } diff --git a/automerge-js/src/common.js b/automerge-js/src/common.ts similarity index 78% rename from automerge-js/src/common.js rename to automerge-js/src/common.ts index b41cadc8..5f1b53d1 100644 --- a/automerge-js/src/common.js +++ b/automerge-js/src/common.ts @@ -1,4 +1,4 @@ -function isObject(obj) { +export function isObject(obj: any) : boolean { return typeof obj === 'object' && obj !== null } @@ -6,9 +6,9 @@ function isObject(obj) { * Returns a shallow copy of the object `obj`. Faster than `Object.assign({}, obj)`. * https://jsperf.com/cloning-large-objects/1 */ -function copyObject(obj) { +export function copyObject(obj: any) : any { if (!isObject(obj)) return {} - let copy = {} + let copy : any = {} for (let key of Object.keys(obj)) { copy[key] = obj[key] } @@ -19,7 +19,13 @@ function copyObject(obj) { * Takes a string in the form that is used to identify operations (a counter concatenated * with an actor ID, separated by an `@` sign) and returns an object `{counter, actorId}`. */ -function parseOpId(opId) { + +interface OpIdObj { + counter: number, + actorId: string +} + +export function parseOpId(opId: string) : OpIdObj { const match = /^(\d+)@(.*)$/.exec(opId || '') if (!match) { throw new RangeError(`Not a valid opId: ${opId}`) @@ -30,7 +36,7 @@ function parseOpId(opId) { /** * Returns true if the two byte arrays contain the same data, false if not. */ -function equalBytes(array1, array2) { +export function equalBytes(array1: Uint8Array, array2: Uint8Array) : boolean { if (!(array1 instanceof Uint8Array) || !(array2 instanceof Uint8Array)) { throw new TypeError('equalBytes can only compare Uint8Arrays') } @@ -41,6 +47,3 @@ function equalBytes(array1, array2) { return true } -module.exports = { - isObject, copyObject, parseOpId, equalBytes -} diff --git a/automerge-js/src/constants.js b/automerge-js/src/constants.js deleted file mode 100644 index ea92228c..00000000 --- a/automerge-js/src/constants.js +++ /dev/null @@ -1,18 +0,0 @@ -// Properties of the document root object -//const OPTIONS = Symbol('_options') // object containing options passed to init() -//const CACHE = Symbol('_cache') // map from objectId to immutable object -const STATE = Symbol('_state') // object containing metadata about current state (e.g. sequence numbers) -const HEADS = Symbol('_heads') // object containing metadata about current state (e.g. sequence numbers) -const OBJECT_ID = Symbol('_objectId') // object containing metadata about current state (e.g. sequence numbers) -const READ_ONLY = Symbol('_readOnly') // object containing metadata about current state (e.g. sequence numbers) -const FROZEN = Symbol('_frozen') // object containing metadata about current state (e.g. sequence numbers) - -// Properties of all Automerge objects -//const OBJECT_ID = Symbol('_objectId') // the object ID of the current object (string) -//const CONFLICTS = Symbol('_conflicts') // map or list (depending on object type) of conflicts -//const CHANGE = Symbol('_change') // the context object on proxy objects used in change callback -//const ELEM_IDS = Symbol('_elemIds') // list containing the element ID of each list element - -module.exports = { - STATE, HEADS, OBJECT_ID, READ_ONLY, FROZEN -} diff --git a/automerge-js/src/constants.ts b/automerge-js/src/constants.ts new file mode 100644 index 00000000..597bfa1c --- /dev/null +++ b/automerge-js/src/constants.ts @@ -0,0 +1,15 @@ +// Properties of the document root object +//const OPTIONS = Symbol('_options') // object containing options passed to init() +//const CACHE = Symbol('_cache') // map from objectId to immutable object +export const STATE = Symbol('_state') // object containing metadata about current state (e.g. sequence numbers) +export const HEADS = Symbol('_heads') // object containing metadata about current state (e.g. sequence numbers) +export const OBJECT_ID = Symbol('_objectId') // object containing metadata about current state (e.g. sequence numbers) +export const READ_ONLY = Symbol('_readOnly') // object containing metadata about current state (e.g. sequence numbers) +export const FROZEN = Symbol('_frozen') // object containing metadata about current state (e.g. sequence numbers) + +// Properties of all Automerge objects +//const OBJECT_ID = Symbol('_objectId') // the object ID of the current object (string) +//const CONFLICTS = Symbol('_conflicts') // map or list (depending on object type) of conflicts +//const CHANGE = Symbol('_change') // the context object on proxy objects used in change callback +//const ELEM_IDS = Symbol('_elemIds') // list containing the element ID of each list element + diff --git a/automerge-js/src/counter.js b/automerge-js/src/counter.ts similarity index 72% rename from automerge-js/src/counter.js rename to automerge-js/src/counter.ts index 6ca54f6d..fba2d8d0 100644 --- a/automerge-js/src/counter.js +++ b/automerge-js/src/counter.ts @@ -1,12 +1,14 @@ +import { Automerge, ObjID, Prop } from "automerge-wasm" /** * The most basic CRDT: an integer value that can be changed only by * incrementing and decrementing. Since addition of integers is commutative, * the value trivially converges. */ -class Counter { - constructor(value) { +export class Counter { + value : number; + + constructor(value?: number) { this.value = value || 0 - Object.freeze(this) } /** @@ -17,7 +19,7 @@ class Counter { * concatenating it with another string, as in `x + ''`. * https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/valueOf */ - valueOf() { + valueOf() : number { return this.value } @@ -26,7 +28,7 @@ class Counter { * this method is called e.g. when you do `['value: ', x].join('')` or when * you use string interpolation: `value: ${x}`. */ - toString() { + toString() : string { return this.valueOf().toString() } @@ -34,7 +36,7 @@ class Counter { * Returns the counter value, so that a JSON serialization of an Automerge * document represents the counter simply as an integer. */ - toJSON() { + toJSON() : number { return this.value } } @@ -44,11 +46,24 @@ class Counter { * callback. */ class WriteableCounter extends Counter { + context: Automerge + path: string[] + objectId: ObjID + key: Prop + + constructor(value: number, context: Automerge, path: string[], objectId: ObjID, key: Prop) { + super(value) + this.context = context + this.path = path + this.objectId = objectId + this.key = key + } + /** * Increases the value of the counter by `delta`. If `delta` is not given, * increases the value of the counter by 1. */ - increment(delta) { + increment(delta: number) : number { delta = typeof delta === 'number' ? delta : 1 this.context.increment(this.objectId, this.key, delta) this.value += delta @@ -59,7 +74,7 @@ class WriteableCounter extends Counter { * Decreases the value of the counter by `delta`. If `delta` is not given, * decreases the value of the counter by 1. */ - decrement(delta) { + decrement(delta: number) : number { return this.increment(typeof delta === 'number' ? -delta : -1) } } @@ -71,14 +86,8 @@ class WriteableCounter extends Counter { * the property name (key in map, or index in list) where the counter is * located. */ -function getWriteableCounter(value, context, path, objectId, key) { - const instance = Object.create(WriteableCounter.prototype) - instance.value = value - instance.context = context - instance.path = path - instance.objectId = objectId - instance.key = key - return instance +export function getWriteableCounter(value: number, context: Automerge, path: string[], objectId: ObjID, key: Prop) { + return new WriteableCounter(value, context, path, objectId, key) } -module.exports = { Counter, getWriteableCounter } +//module.exports = { Counter, getWriteableCounter } diff --git a/automerge-js/src/encoding.js b/automerge-js/src/encoding.ts similarity index 97% rename from automerge-js/src/encoding.js rename to automerge-js/src/encoding.ts index 92b62df6..55ba679d 100644 --- a/automerge-js/src/encoding.js +++ b/automerge-js/src/encoding.ts @@ -8,18 +8,18 @@ const utf8encoder = new TextEncoder() const utf8decoder = new TextDecoder('utf-8') -function stringToUtf8(string) { - return utf8encoder.encode(string) +export function stringToUtf8(s: string) : BufferSource { + return utf8encoder.encode(s) } -function utf8ToString(buffer) { +export function utf8ToString(buffer: BufferSource) : string { return utf8decoder.decode(buffer) } /** * Converts a string consisting of hexadecimal digits into an Uint8Array. */ -function hexStringToBytes(value) { +export function hexStringToBytes(value: string) : Uint8Array { if (typeof value !== 'string') { throw new TypeError('value is not a string') } @@ -29,6 +29,7 @@ function hexStringToBytes(value) { if (value === '') { return new Uint8Array(0) } else { + // @ts-ignore return new Uint8Array(value.match(/../g).map(b => parseInt(b, 16))) } } @@ -42,7 +43,7 @@ for (let i = 0; i < 256; i++) { /** * Converts a Uint8Array into the equivalent hexadecimal string. */ -function bytesToHexString(bytes) { +export function bytesToHexString(bytes: Uint8Array) : string { let hex = '', len = bytes.byteLength for (let i = 0; i < len; i++) { hex += BYTE_TO_HEX[bytes[i]] @@ -54,7 +55,10 @@ function bytesToHexString(bytes) { * Wrapper around an Uint8Array that allows values to be appended to the buffer, * and that automatically grows the buffer when space runs out. */ -class Encoder { +export class Encoder { + buf: Uint8Array; + offset: number; + constructor() { this.buf = new Uint8Array(16) this.offset = 0 @@ -290,7 +294,10 @@ class Encoder { * the current decoding position, and allows values to be incrementally read by * decoding the bytes at the current position. */ -class Decoder { +export class Decoder { + buf: Uint8Array; + offset: number; + constructor(buffer) { if (!(buffer instanceof Uint8Array)) { throw new TypeError(`Not a byte array: ${buffer}`) @@ -555,7 +562,13 @@ class Decoder { * After one of these three has completed, the process repeats, starting again * with a repetition count, until we reach the end of the buffer. */ -class RLEEncoder extends Encoder { +export class RLEEncoder extends Encoder { + type: any + state: string + lastValue: any + count: number + literal: any + constructor(type) { super() this.type = type @@ -664,7 +677,7 @@ class RLEEncoder extends Encoder { * Returns an object of the form `{nonNullValues, sum}` where `nonNullValues` is the number of * non-null values copied, and `sum` is the sum (only if the `sumValues` option is set). */ - copyFrom(decoder, options = {}) { + copyFrom(decoder, options: any = {}) : any { const { count, sumValues, sumShift } = options if (!(decoder instanceof RLEDecoder) || (decoder.type !== this.type)) { throw new TypeError('incompatible type of decoder') @@ -707,7 +720,7 @@ class RLEEncoder extends Encoder { nonNullValues += numValues for (let i = 0; i < numValues; i++) { if (decoder.done) throw new RangeError('incomplete literal') - const value = decoder.readRawValue() + const value : any = decoder.readRawValue() if (value === decoder.lastValue) throw new RangeError('Repetition of values is not allowed in literal') decoder.lastValue = value this._appendValue(value) @@ -786,7 +799,12 @@ class RLEEncoder extends Encoder { * Counterpart to RLEEncoder: reads values from an RLE-compressed sequence, * returning nulls and repeated values as required. */ -class RLEDecoder extends Decoder { +export class RLEDecoder extends Decoder { + type: any; + lastValue: any; + count: number; + state: any; + constructor(type, buffer) { super(buffer) this.type = type @@ -929,7 +947,9 @@ class RLEDecoder extends Decoder { * * Null values are also allowed, as with RLEEncoder. */ -class DeltaEncoder extends RLEEncoder { +export class DeltaEncoder extends RLEEncoder { + absoluteValue: number + constructor() { super('int') this.absoluteValue = 0 @@ -955,7 +975,7 @@ class DeltaEncoder extends RLEEncoder { * contain the key `count`, indicating the number of values to copy. If not specified, copies * all remaining values in the decoder. */ - copyFrom(decoder, options = {}) { + copyFrom(decoder, options: any = {}) : any { if (options.sumValues) { throw new RangeError('unsupported options for DeltaEncoder.copyFrom()') } @@ -991,7 +1011,9 @@ class DeltaEncoder extends RLEEncoder { if (remaining !== undefined) remaining -= nulls + 1 const { nonNullValues, sum } = super.copyFrom(decoder, {count: remaining, sumValues: true}) if (nonNullValues > 0) { + // @ts-ignore this.absoluteValue = sum + // @ts-ignore decoder.absoluteValue = sum } } @@ -1001,7 +1023,9 @@ class DeltaEncoder extends RLEEncoder { * Counterpart to DeltaEncoder: reads values from a delta-compressed sequence of * numbers (may include null values). */ -class DeltaDecoder extends RLEDecoder { +export class DeltaDecoder extends RLEDecoder { + absoluteValue : number; + constructor(buffer) { super('int', buffer) this.absoluteValue = 0 @@ -1058,7 +1082,10 @@ class DeltaDecoder extends RLEDecoder { * only encode the repetition count but not the actual value, since the values * just alternate between false and true (starting with false). */ -class BooleanEncoder extends Encoder { +export class BooleanEncoder extends Encoder { + lastValue: boolean; + count: number; + constructor() { super() this.lastValue = false @@ -1088,7 +1115,7 @@ class BooleanEncoder extends Encoder { * contain the key `count`, indicating the number of values to copy. If not specified, copies * all remaining values in the decoder. */ - copyFrom(decoder, options = {}) { + copyFrom(decoder, options: any = {}) : any { if (!(decoder instanceof BooleanDecoder)) { throw new TypeError('incompatible type of decoder') } @@ -1138,7 +1165,11 @@ class BooleanEncoder extends Encoder { * Counterpart to BooleanEncoder: reads boolean values from a runlength-encoded * sequence. */ -class BooleanDecoder extends Decoder { +export class BooleanDecoder extends Decoder { + lastValue: boolean; + firstRun: boolean; + count: number; + constructor(buffer) { super(buffer) this.lastValue = true // is negated the first time we read a count @@ -1203,7 +1234,3 @@ class BooleanDecoder extends Decoder { } } -module.exports = { - stringToUtf8, utf8ToString, hexStringToBytes, bytesToHexString, - Encoder, Decoder, RLEEncoder, RLEDecoder, DeltaEncoder, DeltaDecoder, BooleanEncoder, BooleanDecoder -} diff --git a/automerge-js/src/index.js b/automerge-js/src/index.js deleted file mode 100644 index 04cee89b..00000000 --- a/automerge-js/src/index.js +++ /dev/null @@ -1,372 +0,0 @@ -const AutomergeWASM = require("automerge-wasm") -const uuid = require('./uuid') - -let { rootProxy, listProxy, textProxy, mapProxy } = require("./proxies") -let { Counter } = require("./counter") -let { Text } = require("./text") -let { Int, Uint, Float64 } = require("./numbers") -let { STATE, HEADS, OBJECT_ID, READ_ONLY, FROZEN } = require("./constants") - -function init(actor) { - if (typeof actor != 'string') { - actor = null - } - const state = AutomergeWASM.create(actor) - return rootProxy(state, true); -} - -function clone(doc) { - const state = doc[STATE].clone() - return rootProxy(state, true); -} - -function free(doc) { - return doc[STATE].free() -} - -function from(data, actor) { - let doc1 = init(actor) - let doc2 = change(doc1, (d) => Object.assign(d, data)) - return doc2 -} - -function change(doc, options, callback) { - if (callback === undefined) { - // FIXME implement options - callback = options - options = {} - } - if (typeof options === "string") { - options = { message: options } - } - if (doc === undefined || doc[STATE] === undefined || doc[OBJECT_ID] !== "_root") { - throw new RangeError("must be the document root"); - } - if (doc[FROZEN] === true) { - throw new RangeError("Attempting to use an outdated Automerge document") - } - if (!!doc[HEADS] === true) { - throw new RangeError("Attempting to change an out of date document"); - } - if (doc[READ_ONLY] === false) { - throw new RangeError("Calls to Automerge.change cannot be nested") - } - const state = doc[STATE] - const heads = state.getHeads() - try { - doc[HEADS] = heads - doc[FROZEN] = true - let root = rootProxy(state); - callback(root) - if (state.pendingOps() === 0) { - doc[FROZEN] = false - doc[HEADS] = undefined - return doc - } else { - state.commit(options.message, options.time) - return rootProxy(state, true); - } - } catch (e) { - //console.log("ERROR: ",e) - doc[FROZEN] = false - doc[HEADS] = undefined - state.rollback() - throw e - } -} - -function emptyChange(doc, options) { - if (options === undefined) { - options = {} - } - if (typeof options === "string") { - options = { message: options } - } - - if (doc === undefined || doc[STATE] === undefined || doc[OBJECT_ID] !== "_root") { - throw new RangeError("must be the document root"); - } - if (doc[FROZEN] === true) { - throw new RangeError("Attempting to use an outdated Automerge document") - } - if (doc[READ_ONLY] === false) { - throw new RangeError("Calls to Automerge.change cannot be nested") - } - - const state = doc[STATE] - state.commit(options.message, options.time) - return rootProxy(state, true); -} - -function load(data, actor) { - const state = AutomergeWASM.load(data, actor) - return rootProxy(state, true); -} - -function save(doc) { - const state = doc[STATE] - return state.save() -} - -function merge(local, remote) { - if (local[HEADS] === true) { - throw new RangeError("Attempting to change an out of date document"); - } - const localState = local[STATE] - const heads = localState.getHeads() - const remoteState = remote[STATE] - const changes = localState.getChangesAdded(remoteState) - localState.applyChanges(changes) - local[HEADS] = heads - return rootProxy(localState, true) -} - -function getActorId(doc) { - const state = doc[STATE] - return state.getActorId() -} - -function conflictAt(context, objectId, prop) { - let values = context.getAll(objectId, prop) - if (values.length <= 1) { - return - } - let result = {} - for (const conflict of values) { - const datatype = conflict[0] - const value = conflict[1] - switch (datatype) { - case "map": - result[value] = mapProxy(context, value, [ prop ], true) - break; - case "list": - result[value] = listProxy(context, value, [ prop ], true) - break; - case "text": - result[value] = textProxy(context, value, [ prop ], true) - break; - //case "table": - //case "cursor": - case "str": - case "uint": - case "int": - case "f64": - case "boolean": - case "bytes": - case "null": - result[conflict[2]] = value - break; - case "counter": - result[conflict[2]] = new Counter(value) - break; - case "timestamp": - result[conflict[2]] = new Date(value) - break; - default: - throw RangeError(`datatype ${datatype} unimplemented`) - } - } - return result -} - -function getConflicts(doc, prop) { - const state = doc[STATE] - const objectId = doc[OBJECT_ID] - return conflictAt(state, objectId, prop) -} - -function getLastLocalChange(doc) { - const state = doc[STATE] - try { - return state.getLastLocalChange() - } catch (e) { - return - } -} - -function getObjectId(doc) { - return doc[OBJECT_ID] -} - -function getChanges(oldState, newState) { - const o = oldState[STATE] - const n = newState[STATE] - const heads = oldState[HEADS] - return n.getChanges(heads || o.getHeads()) -} - -function getAllChanges(doc) { - const state = doc[STATE] - return state.getChanges([]) -} - -function applyChanges(doc, changes) { - if (doc === undefined || doc[STATE] === undefined || doc[OBJECT_ID] !== "_root") { - throw new RangeError("must be the document root"); - } - if (doc[FROZEN] === true) { - throw new RangeError("Attempting to use an outdated Automerge document") - } - if (doc[READ_ONLY] === false) { - throw new RangeError("Calls to Automerge.change cannot be nested") - } - const state = doc[STATE] - const heads = state.getHeads() - state.applyChanges(changes) - doc[HEADS] = heads - return [rootProxy(state, true)]; -} - -function getHistory(doc) { - const actor = getActorId(doc) - const history = getAllChanges(doc) - return history.map((change, index) => ({ - get change () { - return decodeChange(change) - }, - get snapshot () { - const [state] = applyChanges(init(), history.slice(0, index + 1)) - return state - } - }) - ) -} - -function equals() { - if (!isObject(val1) || !isObject(val2)) return val1 === val2 - const keys1 = Object.keys(val1).sort(), keys2 = Object.keys(val2).sort() - if (keys1.length !== keys2.length) return false - for (let i = 0; i < keys1.length; i++) { - if (keys1[i] !== keys2[i]) return false - if (!equals(val1[keys1[i]], val2[keys2[i]])) return false - } - return true -} - -function encodeSyncMessage(msg) { - return AutomergeWASM.encodeSyncMessage(msg) -} - -function decodeSyncMessage(msg) { - return AutomergeWASM.decodeSyncMessage(msg) -} - -function encodeSyncState(state) { - return AutomergeWASM.encodeSyncState(AutomergeWASM.importSyncState(state)) -} - -function decodeSyncState(state) { - return AutomergeWASM.exportSyncState(AutomergeWASM.decodeSyncState(state)) -} - -function generateSyncMessage(doc, inState) { - const state = doc[STATE] - const syncState = AutomergeWASM.importSyncState(inState) - const message = state.generateSyncMessage(syncState) - const outState = AutomergeWASM.exportSyncState(syncState) - return [ outState, message ] -} - -function receiveSyncMessage(doc, inState, message) { - const syncState = AutomergeWASM.importSyncState(inState) - if (doc === undefined || doc[STATE] === undefined || doc[OBJECT_ID] !== "_root") { - throw new RangeError("must be the document root"); - } - if (doc[FROZEN] === true) { - throw new RangeError("Attempting to use an outdated Automerge document") - } - if (!!doc[HEADS] === true) { - throw new RangeError("Attempting to change an out of date document"); - } - if (doc[READ_ONLY] === false) { - throw new RangeError("Calls to Automerge.change cannot be nested") - } - const state = doc[STATE] - const heads = state.getHeads() - state.receiveSyncMessage(syncState, message) - const outState = AutomergeWASM.exportSyncState(syncState) - doc[HEADS] = heads - return [rootProxy(state, true), outState, null]; -} - -function initSyncState() { - return AutomergeWASM.exportSyncState(AutomergeWASM.initSyncState(change)) -} - -function encodeChange(change) { - return AutomergeWASM.encodeChange(change) -} - -function decodeChange(data) { - return AutomergeWASM.decodeChange(data) -} - -function encodeSyncMessage(change) { - return AutomergeWASM.encodeSyncMessage(change) -} - -function decodeSyncMessage(data) { - return AutomergeWASM.decodeSyncMessage(data) -} - -function getMissingDeps(doc, heads) { - const state = doc[STATE] - return state.getMissingDeps(heads) -} - -function getHeads(doc) { - const state = doc[STATE] - return doc[HEADS] || state.getHeads() -} - -function dump(doc) { - const state = doc[STATE] - state.dump() -} - -function toJS(doc) { - if (typeof doc === "object") { - if (doc instanceof Uint8Array) { - return doc - } - if (doc === null) { - return doc - } - if (doc instanceof Array) { - return doc.map((a) => toJS(a)) - } - if (doc instanceof Text) { - return doc.map((a) => toJS(a)) - } - let tmp = {} - for (index in doc) { - tmp[index] = toJS(doc[index]) - } - return tmp - } else { - return doc - } -} - -module.exports = { - init, from, change, emptyChange, clone, free, - load, save, merge, getChanges, getAllChanges, applyChanges, - getLastLocalChange, getObjectId, getActorId, getConflicts, - encodeChange, decodeChange, equals, getHistory, getHeads, uuid, - generateSyncMessage, receiveSyncMessage, initSyncState, - decodeSyncMessage, encodeSyncMessage, decodeSyncState, encodeSyncState, - getMissingDeps, - dump, Text, Counter, Int, Uint, Float64, toJS, -} - -// depricated -// Frontend, setDefaultBackend, Backend - -// more... -/* -for (let name of ['getObjectId', 'getObjectById', - 'setActorId', - 'Text', 'Table', 'Counter', 'Observable' ]) { - module.exports[name] = Frontend[name] -} -*/ diff --git a/automerge-js/src/index.ts b/automerge-js/src/index.ts new file mode 100644 index 00000000..1f86580e --- /dev/null +++ b/automerge-js/src/index.ts @@ -0,0 +1,496 @@ +import * as AutomergeWASM from "automerge-wasm" + +import { uuid } from './uuid' +export { uuid } from './uuid' + +import { rootProxy, listProxy, textProxy, mapProxy } from "./proxies" +import { STATE, HEADS, OBJECT_ID, READ_ONLY, FROZEN } from "./constants" +import { Counter } from "./counter" +//@ts-ignore +import { Text } from "./text" +import { Int, Uint, Float64 } from "./numbers" +import { isObject } from "./common" + +import { Actor as ActorId, Prop, ObjID, Change, DecodedChange, Heads, Automerge } from "automerge-wasm" +import { JsSyncState as SyncState, SyncMessage, DecodedSyncMessage } from "automerge-wasm" + +export { Counter } from "./counter" +export { Int, Uint, Float64 } from "./numbers" +//@ts-ignore +export { Text } from "./text" + +function _state(doc: Doc) : Automerge { + let state = (doc)[STATE] + if (state == undefined) { + throw new RangeError("must be the document root") + } + return state +} + +function _frozen(doc: Doc) : boolean { + return (doc)[FROZEN] === true +} + +function _heads(doc: Doc) : Heads | undefined { + return (doc)[HEADS] +} + +function _obj(doc: Doc) : ObjID { + return (doc)[OBJECT_ID] +} + +function _readonly(doc: Doc) : boolean { + return (doc)[READ_ONLY] === true +} + +export function init(actor?: ActorId) : Doc{ + if (typeof actor !== "string") { + actor = undefined + } + const state = AutomergeWASM.create(actor) + return rootProxy(state, true); +} + +export function clone(doc: Doc) : Doc { + const state = _state(doc).clone() + return rootProxy(state, true); +} + +export function free(doc: Doc) { + return _state(doc).free() +} + +export function from(initialState: T | Doc, actor?: ActorId): Doc { + return change(init(actor), (d) => Object.assign(d, initialState)) +} + +export function change>(doc: D, options: ChangeOptions | ChangeFn, callback?: ChangeFn): D { + + if (typeof options === 'function') { + callback = options + options = {} + } + + if (typeof options === "string") { + options = { message: options } + } + + if (typeof callback !== "function") { + throw new RangeError("invalid change function"); + } + + if (doc === undefined || _state(doc) === undefined || _obj(doc) !== "_root") { + throw new RangeError("must be the document root"); + } + if (_frozen(doc) === true) { + throw new RangeError("Attempting to use an outdated Automerge document") + } + if (!!_heads(doc) === true) { + throw new RangeError("Attempting to change an out of date document"); + } + if (_readonly(doc) === false) { + throw new RangeError("Calls to Automerge.change cannot be nested") + } + const state = _state(doc) + const heads = state.getHeads() + try { + //@ts-ignore + doc[HEADS] = heads + //Object.defineProperty(doc, HEADS, { value: heads, configurable: true, writable: true }) + //@ts-ignore + doc[FROZEN] = true + let root = rootProxy(state); + callback(root) + if (state.pendingOps() === 0) { + //@ts-ignore + doc[FROZEN] = false + //@ts-ignore + doc[HEADS] = undefined + return doc + } else { + state.commit(options.message, options.time) + return rootProxy(state, true); + } + } catch (e) { + //console.log("ERROR: ",e) + //@ts-ignore + doc[FROZEN] = false + //@ts-ignore + doc[HEADS] = undefined + state.rollback() + throw e + } +} + +export function emptyChange(doc: Doc, options: ChangeOptions) { + if (options === undefined) { + options = {} + } + if (typeof options === "string") { + options = { message: options } + } + + if (doc === undefined || _state(doc) === undefined || _obj(doc) !== "_root") { + throw new RangeError("must be the document root"); + } + if (_frozen(doc) === true) { + throw new RangeError("Attempting to use an outdated Automerge document") + } + if (_readonly(doc) === false) { + throw new RangeError("Calls to Automerge.change cannot be nested") + } + + const state = _state(doc) + state.commit(options.message, options.time) + return rootProxy(state, true); +} + +export function load(data: Uint8Array, actor: ActorId) : Doc { + const state = AutomergeWASM.load(data, actor) + return rootProxy(state, true); +} + +export function save(doc: Doc) : Uint8Array { + const state = _state(doc) + return state.save() +} + +export function merge(local: Doc, remote: Doc) : Doc { + if (!!_heads(local) === true) { + throw new RangeError("Attempting to change an out of date document"); + } + const localState = _state(local) + const heads = localState.getHeads() + const remoteState = _state(remote) + const changes = localState.getChangesAdded(remoteState) + localState.applyChanges(changes) + //@ts-ignore + local[HEADS] = heads + return rootProxy(localState, true) +} + +export function getActorId(doc: Doc) : ActorId { + const state = _state(doc) + return state.getActorId() +} + +function conflictAt(context : Automerge, objectId: ObjID, prop: Prop) : any { + let values = context.getAll(objectId, prop) + if (values.length <= 1) { + return + } + let result = {} + for (const conflict of values) { + const datatype = conflict[0] + const value = conflict[1] + switch (datatype) { + case "map": + //@ts-ignore + result[value] = mapProxy(context, value, [ prop ], true) + break; + case "list": + //@ts-ignore + result[value] = listProxy(context, value, [ prop ], true) + break; + case "text": + //@ts-ignore + result[value] = textProxy(context, value, [ prop ], true) + break; + //case "table": + //case "cursor": + case "str": + case "uint": + case "int": + case "f64": + case "boolean": + case "bytes": + case "null": + //@ts-ignore + result[conflict[2]] = value + break; + case "counter": + //@ts-ignore + result[conflict[2]] = new Counter(value) + break; + case "timestamp": + //@ts-ignore + result[conflict[2]] = new Date(value) + break; + default: + throw RangeError(`datatype ${datatype} unimplemented`) + } + } + return result +} + +export function getConflicts(doc: Doc, prop: Prop) : any { + const state = _state(doc) + const objectId = _obj(doc) + return conflictAt(state, objectId, prop) +} + +export function getLastLocalChange(doc: Doc) : Change | undefined { + const state = _state(doc) + try { + return state.getLastLocalChange() + } catch (e) { + return + } +} + +export function getObjectId(doc: Doc) : ObjID { + return _obj(doc) +} + +export function getChanges(oldState: Doc, newState: Doc) : Change[] { + const o = _state(oldState) + const n = _state(newState) + const heads = _heads(oldState) + return n.getChanges(heads || o.getHeads()) +} + +export function getAllChanges(doc: Doc) : Change[] { + const state = _state(doc) + return state.getChanges([]) +} + +export function applyChanges(doc: Doc, changes: Change[]) : [Doc] { + if (doc === undefined || _obj(doc) !== "_root") { + throw new RangeError("must be the document root"); + } + if (_frozen(doc) === true) { + throw new RangeError("Attempting to use an outdated Automerge document") + } + if (_readonly(doc) === false) { + throw new RangeError("Calls to Automerge.change cannot be nested") + } + const state = _state(doc) + const heads = state.getHeads() + state.applyChanges(changes) + //@ts-ignore + doc[HEADS] = heads + return [rootProxy(state, true)]; +} + +export function getHistory(doc: Doc) : State[] { + const actor = getActorId(doc) + const history = getAllChanges(doc) + return history.map((change, index) => ({ + get change () { + return decodeChange(change) + }, + get snapshot () { + const [state] = applyChanges(init(), history.slice(0, index + 1)) + return state + } + }) + ) +} + +// FIXME : no tests +export function equals(val1: any, val2: any) : boolean { + if (!isObject(val1) || !isObject(val2)) return val1 === val2 + const keys1 = Object.keys(val1).sort(), keys2 = Object.keys(val2).sort() + if (keys1.length !== keys2.length) return false + for (let i = 0; i < keys1.length; i++) { + if (keys1[i] !== keys2[i]) return false + if (!equals(val1[keys1[i]], val2[keys2[i]])) return false + } + return true +} + +export function encodeSyncState(state: SyncState) : Uint8Array { + return AutomergeWASM.encodeSyncState(AutomergeWASM.importSyncState(state)) +} + +export function decodeSyncState(state: Uint8Array) : SyncState { + return AutomergeWASM.exportSyncState(AutomergeWASM.decodeSyncState(state)) +} + +export function generateSyncMessage(doc: Doc, inState: SyncState) : [ SyncState, SyncMessage | null ] { + const state = _state(doc) + const syncState = AutomergeWASM.importSyncState(inState) + const message = state.generateSyncMessage(syncState) + const outState = AutomergeWASM.exportSyncState(syncState) + return [ outState, message ] +} + +export function receiveSyncMessage(doc: Doc, inState: SyncState, message: SyncMessage) : [ Doc, SyncState, null ] { + const syncState = AutomergeWASM.importSyncState(inState) + if (doc === undefined || _obj(doc) !== "_root") { + throw new RangeError("must be the document root"); + } + if (_frozen(doc) === true) { + throw new RangeError("Attempting to use an outdated Automerge document") + } + if (!!_heads(doc) === true) { + throw new RangeError("Attempting to change an out of date document"); + } + if (_readonly(doc) === false) { + throw new RangeError("Calls to Automerge.change cannot be nested") + } + const state = _state(doc) + const heads = state.getHeads() + state.receiveSyncMessage(syncState, message) + //@ts-ignore + doc[HEADS] = heads; + const outState = AutomergeWASM.exportSyncState(syncState) + return [rootProxy(state, true), outState, null]; +} + +export function initSyncState() : SyncState { + return AutomergeWASM.exportSyncState(AutomergeWASM.initSyncState()) +} + +export function encodeChange(change: DecodedChange) : Change { + return AutomergeWASM.encodeChange(change) +} + +export function decodeChange(data: Change) : DecodedChange { + return AutomergeWASM.decodeChange(data) +} + +export function encodeSyncMessage(message: DecodedSyncMessage) : SyncMessage { + return AutomergeWASM.encodeSyncMessage(message) +} + +export function decodeSyncMessage(message: SyncMessage) : DecodedSyncMessage { + return AutomergeWASM.decodeSyncMessage(message) +} + +export function getMissingDeps(doc: Doc, heads: Heads) : Heads { + const state = _state(doc) + return state.getMissingDeps(heads) +} + +export function getHeads(doc: Doc) : Heads { + const state = _state(doc) + return _heads(doc) || state.getHeads() +} + +export function dump(doc: Doc) { + const state = _state(doc) + state.dump() +} + +export function toJS(doc: any) : any { + if (typeof doc === "object") { + if (doc instanceof Uint8Array) { + return doc + } + if (doc === null) { + return doc + } + if (doc instanceof Array) { + return doc.map((a) => toJS(a)) + } + if (doc instanceof Text) { + //@ts-ignore + return doc.map((a: any) => toJS(a)) + } + let tmp : any = {} + for (let index in doc) { + tmp[index] = toJS(doc[index]) + } + return tmp + } else { + return doc + } +} + +type ChangeOptions = + | string // = message + | { + message?: string + time?: number + } + +type Doc = FreezeObject + +/** + * The argument pased to the callback of a `change` function is a mutable proxy of the original + * type. `Proxy` is the inverse of `Doc`: `Proxy>` is `T`, and `Doc>` is `D`. + */ +type Proxy = D extends Doc ? T : never + +type ChangeFn = (doc: T) => void + +interface State { + change: DecodedChange + snapshot: T +} + +// custom CRDT types + +/* + class TableRow { + readonly id: UUID + } + + class Table { + constructor() + add(item: T): UUID + byId(id: UUID): T & TableRow + count: number + ids: UUID[] + remove(id: UUID): void + rows: (T & TableRow)[] + } +*/ + + class List extends Array { + insertAt?(index: number, ...args: T[]): List + deleteAt?(index: number, numDelete?: number): List + } + +/* + + class Text extends List { + constructor(text?: string | string[]) + get(index: number): string + toSpans(): (string | T)[] + } + + // Note that until https://github.com/Microsoft/TypeScript/issues/2361 is addressed, we + // can't treat a Counter like a literal number without force-casting it as a number. + // This won't compile: + // `assert.strictEqual(c + 10, 13) // Operator '+' cannot be applied to types 'Counter' and '10'.ts(2365)` + // But this will: + // `assert.strictEqual(c as unknown as number + 10, 13)` + class Counter extends Number { + constructor(value?: number) + increment(delta?: number): void + decrement(delta?: number): void + toString(): string + valueOf(): number + value: number + } + + class Int { constructor(value: number) } + class Uint { constructor(value: number) } + class Float64 { constructor(value: number) } + +*/ + + // Readonly variants + + //type ReadonlyTable = ReadonlyArray & Table + type ReadonlyList = ReadonlyArray & List + type ReadonlyText = ReadonlyList & Text + +// prettier-ignore +type Freeze = + T extends Function ? T + : T extends Text ? ReadonlyText +// : T extends Table ? FreezeTable + : T extends List ? FreezeList + : T extends Array ? FreezeArray + : T extends Map ? FreezeMap + : T extends string & infer O ? string & O + : FreezeObject + +//interface FreezeTable extends ReadonlyTable> {} +interface FreezeList extends ReadonlyList> {} +interface FreezeArray extends ReadonlyArray> {} +interface FreezeMap extends ReadonlyMap, Freeze> {} +type FreezeObject = { readonly [P in keyof T]: Freeze } diff --git a/automerge-js/src/numbers.js b/automerge-js/src/numbers.ts similarity index 76% rename from automerge-js/src/numbers.js rename to automerge-js/src/numbers.ts index 1ee22dee..dbc26669 100644 --- a/automerge-js/src/numbers.js +++ b/automerge-js/src/numbers.ts @@ -1,7 +1,9 @@ // Convience classes to allow users to stricly specify the number type they want -class Int { - constructor(value) { +export class Int { + value: number; + + constructor(value: number) { if (!(Number.isInteger(value) && value <= Number.MAX_SAFE_INTEGER && value >= Number.MIN_SAFE_INTEGER)) { throw new RangeError(`Value ${value} cannot be a uint`) } @@ -10,8 +12,10 @@ class Int { } } -class Uint { - constructor(value) { +export class Uint { + value: number; + + constructor(value: number) { if (!(Number.isInteger(value) && value <= Number.MAX_SAFE_INTEGER && value >= 0)) { throw new RangeError(`Value ${value} cannot be a uint`) } @@ -20,8 +24,10 @@ class Uint { } } -class Float64 { - constructor(value) { +export class Float64 { + value: number; + + constructor(value: number) { if (typeof value !== 'number') { throw new RangeError(`Value ${value} cannot be a float64`) } @@ -30,4 +36,3 @@ class Float64 { } } -module.exports = { Int, Uint, Float64 } diff --git a/automerge-js/src/proxies.js b/automerge-js/src/proxies.ts similarity index 90% rename from automerge-js/src/proxies.js rename to automerge-js/src/proxies.ts index 3bf2fbd2..4e91b2e2 100644 --- a/automerge-js/src/proxies.js +++ b/automerge-js/src/proxies.ts @@ -1,11 +1,15 @@ -const AutomergeWASM = require("automerge-wasm") -const { Int, Uint, Float64 } = require("./numbers"); -const { Counter, getWriteableCounter } = require("./counter"); -const { Text } = require("./text"); -const { STATE, HEADS, FROZEN, OBJECT_ID, READ_ONLY } = require("./constants") +import AutomergeWASM from "automerge-wasm" +import { Automerge, Heads, ObjID } from "automerge-wasm" +// @ts-ignore +import { Int, Uint, Float64 } from "./numbers" +// @ts-ignore +import { Counter, getWriteableCounter } from "./counter" +// @ts-ignore +import { Text } from "./text" +import { STATE, HEADS, FROZEN, OBJECT_ID, READ_ONLY } from "./constants" -function parseListIndex(key) { +export function parseListIndex(key) { if (typeof key === 'string' && /^[0-9]+$/.test(key)) key = parseInt(key, 10) if (typeof key !== 'number') { // throw new TypeError('A list index must be a number, but you passed ' + JSON.stringify(key)) @@ -17,7 +21,7 @@ function parseListIndex(key) { return key } -function valueAt(target, prop) { +function valueAt(target, prop) : any { const { context, objectId, path, readonly, heads} = target let value = context.get(objectId, prop, heads) if (value === undefined) { @@ -96,8 +100,8 @@ function import_value(value) { } } -const MapHandler = { - get (target, key) { +export const MapHandler = { + get (target, key) : any { const { context, objectId, path, readonly, frozen, heads, cache } = target if (key === Symbol.toStringTag) { return target[Symbol.toStringTag] } if (key === OBJECT_ID) return objectId @@ -119,11 +123,11 @@ const MapHandler = { } if (key === FROZEN) { target.frozen = val - return + return true } if (key === HEADS) { target.heads = val - return + return true } let [ value, datatype ] = import_value(val) if (frozen) { @@ -192,10 +196,11 @@ const MapHandler = { } -const ListHandler = { +export const ListHandler = { get (target, index) { const {context, objectId, path, readonly, frozen, heads } = target index = parseListIndex(index) + // @ts-ignore if (index === Symbol.hasInstance) { return (instance) => { return [].has(instance) } } if (index === Symbol.toStringTag) { return target[Symbol.toStringTag] } if (index === OBJECT_ID) return objectId @@ -231,11 +236,11 @@ const ListHandler = { } if (index === FROZEN) { target.frozen = val - return + return true } if (index === HEADS) { target.heads = val - return + return true } if (typeof index == "string") { throw new RangeError('list index must be a number') @@ -322,9 +327,9 @@ const ListHandler = { }, getPrototypeOf(target) { return Object.getPrototypeOf([]) }, - ownKeys (target) { + ownKeys (target) : string[] { const {context, objectId, heads } = target - let keys = [] + let keys : string[] = [] // uncommenting this causes assert.deepEqual() to fail when comparing to a pojo array // but not uncommenting it causes for (i in list) {} to not enumerate values properly //for (let i = 0; i < target.context.length(objectId, heads); i++) { keys.push(i.toString()) } @@ -333,12 +338,13 @@ const ListHandler = { } } -const TextHandler = Object.assign({}, ListHandler, { +export const TextHandler = Object.assign({}, ListHandler, { get (target, index) { // FIXME this is a one line change from ListHandler.get() const {context, objectId, path, readonly, frozen, heads } = target index = parseListIndex(index) if (index === Symbol.toStringTag) { return target[Symbol.toStringTag] } + // @ts-ignore if (index === Symbol.hasInstance) { return (instance) => { return [].has(instance) } } if (index === OBJECT_ID) return objectId if (index === READ_ONLY) return readonly @@ -368,24 +374,24 @@ const TextHandler = Object.assign({}, ListHandler, { }, }) -function mapProxy(context, objectId, path, readonly, heads) { +export function mapProxy(context: Automerge, objectId: ObjID, path?: string[], readonly?: boolean, heads?: Heads) : any { return new Proxy({context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}}, MapHandler) } -function listProxy(context, objectId, path, readonly, heads) { +export function listProxy(context: Automerge, objectId: ObjID, path?: string[], readonly?: boolean, heads?: Heads) : any { let target = [] Object.assign(target, {context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}}) return new Proxy(target, ListHandler) } -function textProxy(context, objectId, path, readonly, heads) { +export function textProxy(context: Automerge, objectId: ObjID, path?: string[], readonly?: boolean, heads?: Heads) : any { let target = [] Object.assign(target, {context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}}) return new Proxy(target, TextHandler) } -function rootProxy(context, readonly) { - return mapProxy(context, "_root", [], readonly) +export function rootProxy(context: Automerge, readonly?: boolean) : any { + return mapProxy(context, "_root", [], !!readonly) } function listMethods(target) { @@ -400,18 +406,20 @@ function listMethods(target) { return this }, - fill(val, start, end) { - // FIXME - let list = context.getObject(objectId) - let [value, datatype] = valueAt(target, index) - for (let index = parseListIndex(start || 0); index < parseListIndex(end || list.length); index++) { - context.put(objectId, index, value, datatype) + fill(val: any, start: number, end: number) { + // FIXME needs tests + const [value, datatype] = import_value(val) + start = parseListIndex(start || 0) + end = parseListIndex(end || context.length(objectId)) + for (let i = start; i < end; i++) { + context.put(objectId, i, value, datatype) } return this }, indexOf(o, start = 0) { // FIXME + /* const id = o[OBJECT_ID] if (id) { const list = context.getObject(objectId) @@ -424,6 +432,7 @@ function listMethods(target) { } else { return context.indexOf(objectId, o, start) } + */ }, insertAt(index, ...values) { @@ -468,7 +477,7 @@ function listMethods(target) { if (readonly) { throw new RangeError("Sequence object cannot be modified outside of a change block") } - let result = [] + let result : any = [] for (let i = 0; i < del; i++) { let value = valueAt(target, index) result.push(value) @@ -527,7 +536,7 @@ function listMethods(target) { let len = context.length(objectId, heads) const iterator = { next: () => { - let value = undefined + let value : undefined | number = undefined if (i < len) { value = i; i++ } return { value, done: true } } @@ -557,7 +566,7 @@ function listMethods(target) { 'join', 'lastIndexOf', 'map', 'reduce', 'reduceRight', 'slice', 'some', 'toLocaleString', 'toString']) { methods[method] = (...args) => { - const list = [] + const list : any = [] while (true) { let value = valueAt(target, list.length) if (value == undefined) { @@ -575,7 +584,7 @@ function listMethods(target) { function textMethods(target) { const {context, objectId, path, readonly, frozen, heads } = target - const methods = { + const methods : any = { set (index, value) { return this[index] = value }, @@ -585,8 +594,8 @@ function textMethods(target) { toString () { return context.text(objectId, heads).replace(//g,'') }, - toSpans () { - let spans = [] + toSpans () : any[] { + let spans : any[] = [] let chars = '' let length = this.length for (let i = 0; i < length; i++) { @@ -614,4 +623,4 @@ function textMethods(target) { } -module.exports = { rootProxy, textProxy, listProxy, mapProxy, MapHandler, ListHandler, TextHandler } +//module.exports = { rootProxy, textProxy, listProxy, mapProxy, MapHandler, ListHandler, TextHandler } diff --git a/automerge-js/src/sync.js b/automerge-js/src/sync.ts similarity index 94% rename from automerge-js/src/sync.js rename to automerge-js/src/sync.ts index 2ae3f4e4..fd40e343 100644 --- a/automerge-js/src/sync.js +++ b/automerge-js/src/sync.ts @@ -16,11 +16,10 @@ * last sync to disk), and we fall back to sending the entire document in this case. */ -//const Backend = require('./backend') -const Backend = {} //require('./backend') -const { hexStringToBytes, bytesToHexString, Encoder, Decoder } = require('./encoding') -const { decodeChangeMeta } = require('./columnar') -const { copyObject } = require('../src/common') +const Backend : any = {} //require('./backend') +import { hexStringToBytes, bytesToHexString, Encoder, Decoder } from './encoding' +import { decodeChangeMeta } from './columnar' +import { copyObject } from './common' const HASH_SIZE = 32 // 256 bits = 32 bytes const MESSAGE_TYPE_SYNC = 0x42 // first byte of a sync message, for identification @@ -36,7 +35,12 @@ const BITS_PER_ENTRY = 10, NUM_PROBES = 7 * over a network. The entries that are added are assumed to already be SHA-256 hashes, * so this implementation does not perform its own hashing. */ -class BloomFilter { +export class BloomFilter { + numEntries: number; + numBitsPerEntry: number; + numProbes: number; + bits: Uint8Array; + constructor (arg) { if (Array.isArray(arg)) { // arg is an array of SHA256 hashes in hexadecimal encoding @@ -143,8 +147,8 @@ function encodeHashes(encoder, hashes) { * Decodes a byte array in the format returned by encodeHashes(), and returns its content as an * array of hex strings. */ -function decodeHashes(decoder) { - let length = decoder.readUint32(), hashes = [] +function decodeHashes(decoder) : string[] { + let length = decoder.readUint32(), hashes : string[] = [] for (let i = 0; i < length; i++) { hashes.push(bytesToHexString(decoder.readRawBytes(HASH_SIZE))) } @@ -155,7 +159,7 @@ function decodeHashes(decoder) { * Takes a sync message of the form `{heads, need, have, changes}` and encodes it as a byte array for * transmission. */ -function encodeSyncMessage(message) { +export function encodeSyncMessage(message) { const encoder = new Encoder() encoder.appendByte(MESSAGE_TYPE_SYNC) encodeHashes(encoder, message.heads) @@ -175,7 +179,7 @@ function encodeSyncMessage(message) { /** * Takes a binary-encoded sync message and decodes it into the form `{heads, need, have, changes}`. */ -function decodeSyncMessage(bytes) { +export function decodeSyncMessage(bytes) { const decoder = new Decoder(bytes) const messageType = decoder.readByte() if (messageType !== MESSAGE_TYPE_SYNC) { @@ -187,12 +191,14 @@ function decodeSyncMessage(bytes) { let message = {heads, need, have: [], changes: []} for (let i = 0; i < haveCount; i++) { const lastSync = decodeHashes(decoder) - const bloom = decoder.readPrefixedBytes(decoder) + const bloom = decoder.readPrefixedBytes() + // @ts-ignore message.have.push({lastSync, bloom}) } const changeCount = decoder.readUint32() for (let i = 0; i < changeCount; i++) { const change = decoder.readPrefixedBytes() + // @ts-ignore message.changes.push(change) } // Ignore any trailing bytes -- they can be used for extensions by future versions of the protocol @@ -204,7 +210,7 @@ function decodeSyncMessage(bytes) { * an application restart or disconnect and reconnect. The ephemeral parts of the state that should * be cleared on reconnect are not encoded. */ -function encodeSyncState(syncState) { +export function encodeSyncState(syncState) { const encoder = new Encoder() encoder.appendByte(PEER_STATE_TYPE) encodeHashes(encoder, syncState.sharedHeads) @@ -215,7 +221,7 @@ function encodeSyncState(syncState) { * Takes a persisted peer state as encoded by `encodeSyncState` and decodes it into a SyncState * object. The parts of the peer state that were not encoded are initialised with default values. */ -function decodeSyncState(bytes) { +export function decodeSyncState(bytes) { const decoder = new Decoder(bytes) const recordType = decoder.readByte() if (recordType !== PEER_STATE_TYPE) { @@ -249,7 +255,7 @@ function getChangesToSend(backend, have, need) { return need.map(hash => Backend.getChangeByHash(backend, hash)).filter(change => change !== undefined) } - let lastSyncHashes = {}, bloomFilters = [] + let lastSyncHashes : any = {}, bloomFilters : BloomFilter[] = [] for (let h of have) { for (let hash of h.lastSync) lastSyncHashes[hash] = true bloomFilters.push(new BloomFilter(h.bloom)) @@ -259,7 +265,7 @@ function getChangesToSend(backend, have, need) { const changes = Backend.getChanges(backend, Object.keys(lastSyncHashes)) .map(change => decodeChangeMeta(change, true)) - let changeHashes = {}, dependents = {}, hashesToSend = {} + let changeHashes : any = {}, dependents : any = {}, hashesToSend : any = {} for (let change of changes) { changeHashes[change.hash] = true @@ -278,7 +284,7 @@ function getChangesToSend(backend, have, need) { // Include any changes that depend on a Bloom-negative change let stack = Object.keys(hashesToSend) while (stack.length > 0) { - const hash = stack.pop() + const hash : any = stack.pop() if (dependents[hash]) { for (let dep of dependents[hash]) { if (!hashesToSend[dep]) { @@ -290,7 +296,7 @@ function getChangesToSend(backend, have, need) { } // Include any explicitly requested changes - let changesToSend = [] + let changesToSend : any = [] for (let hash of need) { hashesToSend[hash] = true if (!changeHashes[hash]) { // Change is not among those returned by getMissingChanges()? @@ -306,7 +312,7 @@ function getChangesToSend(backend, have, need) { return changesToSend } -function initSyncState() { +export function initSyncState() { return { sharedHeads: [], lastSentHeads: [], @@ -325,7 +331,7 @@ function compareArrays(a, b) { * Given a backend and what we believe to be the state of our peer, generate a message which tells * them about we have and includes any changes we believe they need */ -function generateSyncMessage(backend, syncState) { +export function generateSyncMessage(backend, syncState) { if (!backend) { throw new Error("generateSyncMessage called with no Automerge document") } @@ -345,7 +351,7 @@ function generateSyncMessage(backend, syncState) { // because they (intentionally) only sent us a subset of changes. In case 1, we leave the `have` // field of the message empty because we just want to fill in the missing dependencies for now. // In case 2, or if ourNeed is empty, we send a Bloom filter to request any unsent changes. - let ourHave = [] + let ourHave : any = [] if (!theirHeads || ourNeed.every(hash => theirHeads.includes(hash))) { ourHave = [makeBloomFilter(backend, sharedHeads)] } @@ -418,7 +424,7 @@ function advanceHeads(myOldHeads, myNewHeads, ourOldSharedHeads) { * Given a backend, a message message and the state of our peer, apply any changes, update what * we believe about the peer, and (if there were applied changes) produce a patch for the frontend */ -function receiveSyncMessage(backend, oldSyncState, binaryMessage) { +export function receiveSyncMessage(backend, oldSyncState, binaryMessage) { if (!backend) { throw new Error("generateSyncMessage called with no Automerge document") } diff --git a/automerge-js/src/text.js b/automerge-js/src/text.ts similarity index 82% rename from automerge-js/src/text.js rename to automerge-js/src/text.ts index a7f442fe..02aac54d 100644 --- a/automerge-js/src/text.js +++ b/automerge-js/src/text.ts @@ -1,39 +1,37 @@ -const { OBJECT_ID } = require('./constants') -const { isObject } = require('../src/common') +import { OBJECT_ID } from './constants' +import { isObject } from '../src/common' -class Text { - constructor (text) { - const instance = Object.create(Text.prototype) +export class Text { + elems: any[] + + constructor (text?: string | string[]) { + //const instance = Object.create(Text.prototype) if (typeof text === 'string') { - instance.elems = [...text] + this.elems = [...text] } else if (Array.isArray(text)) { - instance.elems = text + this.elems = text } else if (text === undefined) { - instance.elems = [] + this.elems = [] } else { throw new TypeError(`Unsupported initial value for Text: ${text}`) } - return instance } - get length () { + get length () : number { return this.elems.length } - get (index) { + get (index) : any { return this.elems[index] } - getElemId (index) { - return undefined - } - /** * Iterates over the text elements character by character, including any * inline objects. */ [Symbol.iterator] () { - let elems = this.elems, index = -1 + const elems = this.elems + let index = -1 return { next () { index += 1 @@ -50,7 +48,7 @@ class Text { * Returns the content of the Text object as a simple string, ignoring any * non-character elements. */ - toString() { + toString() : string { // Concatting to a string is faster than creating an array and then // .join()ing for small (<100KB) arrays. // https://jsperf.com/join-vs-loop-w-type-test @@ -68,8 +66,8 @@ class Text { * For example, the value ['a', 'b', {x: 3}, 'c', 'd'] has spans: * => ['ab', {x: 3}, 'cd'] */ - toSpans() { - let spans = [] + toSpans() : any[] { + const spans : any = [] let chars = '' for (const elem of this.elems) { if (typeof elem === 'string') { @@ -92,21 +90,21 @@ class Text { * Returns the content of the Text object as a simple string, so that the * JSON serialization of an Automerge document represents text nicely. */ - toJSON() { + toJSON() : string { return this.toString() } /** * Updates the list item at position `index` to a new value `value`. */ - set (index, value) { + set (index: number, value: any) { this.elems[index] = value } /** * Inserts new list items `values` starting at position `index`. */ - insertAt(index, ...values) { + insertAt(index: number, ...values) { this.elems.splice(index, 0, ... values) } @@ -129,4 +127,3 @@ for (let method of ['concat', 'every', 'filter', 'find', 'findIndex', 'forEach', } } -module.exports = { Text } diff --git a/automerge-js/src/uuid.js b/automerge-js/src/uuid.js deleted file mode 100644 index 42a8cc6e..00000000 --- a/automerge-js/src/uuid.js +++ /dev/null @@ -1,16 +0,0 @@ -const { v4: uuid } = require('uuid') - -function defaultFactory() { - return uuid().replace(/-/g, '') -} - -let factory = defaultFactory - -function makeUuid() { - return factory() -} - -makeUuid.setFactory = newFactory => { factory = newFactory } -makeUuid.reset = () => { factory = defaultFactory } - -module.exports = makeUuid diff --git a/automerge-js/src/uuid.ts b/automerge-js/src/uuid.ts new file mode 100644 index 00000000..bc6c4bb1 --- /dev/null +++ b/automerge-js/src/uuid.ts @@ -0,0 +1,16 @@ +import { v4 } from 'uuid' + +function defaultFactory() { + return v4().replace(/-/g, '') +} + +let factory = defaultFactory + +export function uuid() { + return factory() +} + +// @ts-ignore +uuid.setFactory = newFactory => { factory = newFactory } +// @ts-ignore +uuid.reset = () => { factory = defaultFactory } diff --git a/automerge-js/test/basic_test.js b/automerge-js/test/basic_test.ts similarity index 98% rename from automerge-js/test/basic_test.js rename to automerge-js/test/basic_test.ts index 68d2fecf..5aff21b0 100644 --- a/automerge-js/test/basic_test.js +++ b/automerge-js/test/basic_test.ts @@ -1,7 +1,6 @@ - -const assert = require('assert') -const util = require('util') -const Automerge = require('..') +import * as assert from 'assert' +import * as util from 'util' +import * as Automerge from '../src' describe('Automerge', () => { describe('basics', () => { diff --git a/automerge-js/test/columnar_test.js b/automerge-js/test/columnar_test.ts similarity index 96% rename from automerge-js/test/columnar_test.js rename to automerge-js/test/columnar_test.ts index 8cbe1482..ca670377 100644 --- a/automerge-js/test/columnar_test.js +++ b/automerge-js/test/columnar_test.ts @@ -1,7 +1,7 @@ -const assert = require('assert') -const { checkEncoded } = require('./helpers') -const Automerge = require('..') -const { encodeChange, decodeChange } = Automerge +import * as assert from 'assert' +import { checkEncoded } from './helpers' +import * as Automerge from '../src' +import { encodeChange, decodeChange } from '../src' describe('change encoding', () => { it('should encode text edits', () => { diff --git a/automerge-js/test/helpers.js b/automerge-js/test/helpers.ts similarity index 93% rename from automerge-js/test/helpers.js rename to automerge-js/test/helpers.ts index c3fc52ae..76cae7d6 100644 --- a/automerge-js/test/helpers.js +++ b/automerge-js/test/helpers.ts @@ -1,5 +1,5 @@ -const assert = require('assert') -const { Encoder } = require('../src/encoding') +import * as assert from 'assert' +import { Encoder } from '../src/encoding' // Assertion that succeeds if the first argument deepStrictEquals at least one of the // subsequent arguments (but we don't care which one) diff --git a/automerge-js/test/legacy_tests.js b/automerge-js/test/legacy_tests.ts similarity index 99% rename from automerge-js/test/legacy_tests.js rename to automerge-js/test/legacy_tests.ts index 76348d06..4034ca25 100644 --- a/automerge-js/test/legacy_tests.js +++ b/automerge-js/test/legacy_tests.ts @@ -1,9 +1,7 @@ -const assert = require('assert') -//const Automerge = process.env.TEST_DIST === '1' ? require('../dist/automerge') : require('../src/automerge') -const Automerge = require('../src') -const { assertEqualsOneOf } = require('./helpers') -const { decodeChange } = require('../src/columnar') -//const { decodeChange } = Automerge +import * as assert from 'assert' +import * as Automerge from '../src' +import { assertEqualsOneOf } from './helpers' +import { decodeChange } from '../src/columnar' const UUID_PATTERN = /^[0-9a-f]{32}$/ const OPID_PATTERN = /^[0-9]+@[0-9a-f]{32}$/ @@ -810,11 +808,12 @@ describe('Automerge', () => { }) describe('concurrent use', () => { - let s1, s2, s3 + let s1, s2, s3, s4 beforeEach(() => { s1 = Automerge.init() s2 = Automerge.init() s3 = Automerge.init() + s4 = Automerge.init() }) it('should merge concurrent updates of different properties', () => { diff --git a/automerge-js/test/sync_test.js b/automerge-js/test/sync_test.ts similarity index 99% rename from automerge-js/test/sync_test.js rename to automerge-js/test/sync_test.ts index 86c3b3fd..c7f8015b 100644 --- a/automerge-js/test/sync_test.js +++ b/automerge-js/test/sync_test.ts @@ -1,8 +1,8 @@ -const assert = require('assert') -const Automerge = require('..'); -const { BloomFilter } = require('../src/sync') -const { decodeChangeMeta } = require('../src/columnar') -const { decodeSyncMessage, encodeSyncMessage, decodeSyncState, encodeSyncState, initSyncState } = Automerge +import * as assert from 'assert' +import * as Automerge from '../src' +import { BloomFilter } from '../src/sync' +import { decodeChangeMeta } from '../src/columnar' +import { decodeSyncMessage, encodeSyncMessage, decodeSyncState, encodeSyncState, initSyncState } from "../src" function inspect(a) { const util = require("util"); @@ -240,6 +240,7 @@ describe('Data sync protocol', () => { it('should assume sent changes were recieved until we hear otherwise', () => { let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') let s1 = initSyncState(), message = null + let s2 n1 = Automerge.change(n1, {time: 0}, doc => doc.items = []) ;[n1, n2, s1, s2 ] = sync(n1, n2) diff --git a/automerge-js/test/text_test.js b/automerge-js/test/text_test.ts similarity index 99% rename from automerge-js/test/text_test.js rename to automerge-js/test/text_test.ts index 57e8884e..8dbfc93c 100644 --- a/automerge-js/test/text_test.js +++ b/automerge-js/test/text_test.ts @@ -1,6 +1,6 @@ -const assert = require('assert') -const Automerge = require('..') -const { assertEqualsOneOf } = require('./helpers') +import * as assert from 'assert' +import * as Automerge from '../src' +import { assertEqualsOneOf } from './helpers' function attributeStateToAttributes(accumulatedAttributes) { const attributes = {} diff --git a/automerge-js/test/uuid_test.js b/automerge-js/test/uuid_test.ts similarity index 89% rename from automerge-js/test/uuid_test.js rename to automerge-js/test/uuid_test.ts index a0f83df1..4182a8c4 100644 --- a/automerge-js/test/uuid_test.js +++ b/automerge-js/test/uuid_test.ts @@ -1,5 +1,5 @@ -const assert = require('assert') -const Automerge = require('..') +import * as assert from 'assert' +import * as Automerge from '../src' const uuid = Automerge.uuid diff --git a/automerge-js/tsconfig.json b/automerge-js/tsconfig.json new file mode 100644 index 00000000..987f9d37 --- /dev/null +++ b/automerge-js/tsconfig.json @@ -0,0 +1,16 @@ +{ + "compilerOptions": { + "noImplicitAny": false, + "strict": true, + "allowJs": false, + "baseUrl": ".", + "lib": ["dom", "esnext.asynciterable", "es2017", "es2016", "es2015"], + "module": "commonjs", + "moduleResolution": "node", + "target": "es2016", + "skipLibCheck": true, + "outDir": "./dist" + }, + "include": [ "src/**/*" ], + "exclude": ["dist/**/*"] +} diff --git a/automerge-js/tslint.json b/automerge-js/tslint.json new file mode 100644 index 00000000..f7bb7a71 --- /dev/null +++ b/automerge-js/tslint.json @@ -0,0 +1,3 @@ +{ + "extends": "tslint:recommended" +} diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts index bf23948f..47f32deb 100644 --- a/automerge-wasm/index.d.ts +++ b/automerge-wasm/index.d.ts @@ -89,6 +89,8 @@ export function encodeSyncMessage(message: DecodedSyncMessage): SyncMessage; export function decodeSyncMessage(msg: SyncMessage): DecodedSyncMessage; export function encodeSyncState(state: SyncState): Uint8Array; export function decodeSyncState(data: Uint8Array): SyncState; +export function exportSyncState(state: SyncState): JsSyncState; +export function importSyncState(state: JsSyncState): SyncState; export class Automerge { // change state @@ -153,6 +155,9 @@ export class Automerge { toJS(): any; } +export class JsSyncState { +} + export class SyncState { free(): void; clone(): SyncState; From 4f898b67b3102df2962c99938e68317032d0e2b2 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Tue, 17 May 2022 16:53:17 -0400 Subject: [PATCH 384/730] able to build npm package --- automerge-js/.gitignore | 1 + automerge-js/LICENSE | 10 ++++++++ automerge-js/README.md | 8 ++++++ automerge-js/config/cjs.json | 8 ++++++ automerge-js/config/mjs.json | 8 ++++++ automerge-js/config/types.json | 10 ++++++++ automerge-js/package.json | 47 +++++++++++++++++++++++++++++----- automerge-js/src/index.ts | 14 +++++----- automerge-js/tsconfig.json | 34 ++++++++++++++---------- automerge-wasm/web-index.js | 2 ++ 10 files changed, 116 insertions(+), 26 deletions(-) create mode 100644 automerge-js/LICENSE create mode 100644 automerge-js/README.md create mode 100644 automerge-js/config/cjs.json create mode 100644 automerge-js/config/mjs.json create mode 100644 automerge-js/config/types.json diff --git a/automerge-js/.gitignore b/automerge-js/.gitignore index 05065cf0..cfe564d7 100644 --- a/automerge-js/.gitignore +++ b/automerge-js/.gitignore @@ -1,3 +1,4 @@ /node_modules /yarn.lock dist +index.d.ts diff --git a/automerge-js/LICENSE b/automerge-js/LICENSE new file mode 100644 index 00000000..63b21502 --- /dev/null +++ b/automerge-js/LICENSE @@ -0,0 +1,10 @@ +MIT License + +Copyright 2022, Ink & Switch LLC + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/automerge-js/README.md b/automerge-js/README.md new file mode 100644 index 00000000..3875e2b1 --- /dev/null +++ b/automerge-js/README.md @@ -0,0 +1,8 @@ + +## Todo + +1. write a readme +1. final name for package - to distinguish it from the old one +1. get a index.d.ts you like +1. publish package + diff --git a/automerge-js/config/cjs.json b/automerge-js/config/cjs.json new file mode 100644 index 00000000..890a0422 --- /dev/null +++ b/automerge-js/config/cjs.json @@ -0,0 +1,8 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "target": "es2016", + "module": "commonjs", + "outDir": "../dist/cjs" + } +} diff --git a/automerge-js/config/mjs.json b/automerge-js/config/mjs.json new file mode 100644 index 00000000..8f964400 --- /dev/null +++ b/automerge-js/config/mjs.json @@ -0,0 +1,8 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "target": "es6", + "module": "es6", + "outDir": "../dist/mjs" + } +} diff --git a/automerge-js/config/types.json b/automerge-js/config/types.json new file mode 100644 index 00000000..3e7cde18 --- /dev/null +++ b/automerge-js/config/types.json @@ -0,0 +1,10 @@ + +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "declaration": true, + "emitDeclarationOnly": true, + "outFile": "../index.d.ts" + }, + "include": [ "../src/index.ts" ] +} diff --git a/automerge-js/package.json b/automerge-js/package.json index 4b3b2b55..2bdafd6b 100644 --- a/automerge-js/package.json +++ b/automerge-js/package.json @@ -1,16 +1,51 @@ { "name": "automerge-js", + "collaborators": [ + "Orion Henry ", + "Martin Kleppmann" + ], "version": "0.1.0", - "main": "src/index.js", + "description": "Reimplementation of `automerge` on top of the automerge-wasm backend", + "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-js", + "repository": "github:automerge/automerge-rs", + "files": [ + "README.md", + "LICENSE", + "package.json", + "index.d.ts", + "package.json", + "index.d.ts", + "dist/mjs/constants.js", + "dist/mjs/numbers.js", + "dist/mjs/sync.js", + "dist/mjs/index.js", + "dist/mjs/encoding.js", + "dist/mjs/columnar.js", + "dist/mjs/uuid.js", + "dist/mjs/counter.js", + "dist/mjs/common.js", + "dist/mjs/text.js", + "dist/mjs/proxies.js", + "dist/cjs/constants.js", + "dist/cjs/numbers.js", + "dist/cjs/sync.js", + "dist/cjs/index.js", + "dist/cjs/encoding.js", + "dist/cjs/columnar.js", + "dist/cjs/uuid.js", + "dist/cjs/counter.js", + "dist/cjs/common.js", + "dist/cjs/text.js", + "dist/cjs/proxies.js" + ], + "module": "./dist/mjs/index.js", + "main": "./dist/cjs/index.js", "license": "MIT", "scripts": { "lint": "tslint --project tsconfig.json", + "build": "tsc -p config/mjs.json && tsc -p config/cjs.json && tsc -p config/types.json", "test": "ts-mocha -p tsconfig.json test/**/*.ts" }, - "directories": { - "src": "./src", - "test": "./test" - }, "devDependencies": { "@types/expect": "^24.3.0", "@types/mocha": "^9.1.1", @@ -21,7 +56,7 @@ "typescript": "^4.6.4" }, "dependencies": { - "automerge-wasm": "file:../automerge-wasm", + "automerge-wasm": "^0.1.2", "fast-sha256": "^1.3.0", "pako": "^2.0.4", "uuid": "^8.3" diff --git a/automerge-js/src/index.ts b/automerge-js/src/index.ts index 1f86580e..2b81d70a 100644 --- a/automerge-js/src/index.ts +++ b/automerge-js/src/index.ts @@ -1,23 +1,25 @@ import * as AutomergeWASM from "automerge-wasm" import { uuid } from './uuid' + +import _init from "automerge-wasm" +export default _init + export { uuid } from './uuid' import { rootProxy, listProxy, textProxy, mapProxy } from "./proxies" import { STATE, HEADS, OBJECT_ID, READ_ONLY, FROZEN } from "./constants" import { Counter } from "./counter" -//@ts-ignore import { Text } from "./text" import { Int, Uint, Float64 } from "./numbers" import { isObject } from "./common" -import { Actor as ActorId, Prop, ObjID, Change, DecodedChange, Heads, Automerge } from "automerge-wasm" -import { JsSyncState as SyncState, SyncMessage, DecodedSyncMessage } from "automerge-wasm" - +export { Text } from "./text" export { Counter } from "./counter" export { Int, Uint, Float64 } from "./numbers" -//@ts-ignore -export { Text } from "./text" + +import { Actor as ActorId, Prop, ObjID, Change, DecodedChange, Heads, Automerge } from "automerge-wasm" +import { JsSyncState as SyncState, SyncMessage, DecodedSyncMessage } from "automerge-wasm" function _state(doc: Doc) : Automerge { let state = (doc)[STATE] diff --git a/automerge-js/tsconfig.json b/automerge-js/tsconfig.json index 987f9d37..b0e2620c 100644 --- a/automerge-js/tsconfig.json +++ b/automerge-js/tsconfig.json @@ -1,16 +1,22 @@ { - "compilerOptions": { - "noImplicitAny": false, - "strict": true, - "allowJs": false, - "baseUrl": ".", - "lib": ["dom", "esnext.asynciterable", "es2017", "es2016", "es2015"], - "module": "commonjs", - "moduleResolution": "node", - "target": "es2016", - "skipLibCheck": true, - "outDir": "./dist" - }, - "include": [ "src/**/*" ], - "exclude": ["dist/**/*"] + "compilerOptions": { + "target": "es2016", + "sourceMap": false, + "declaration": false, + "resolveJsonModule": true, + "module": "commonjs", + "moduleResolution": "node", + "noImplicitAny": false, + "allowSyntheticDefaultImports": true, + "forceConsistentCasingInFileNames": true, + "strict": true, + "noFallthroughCasesInSwitch": true, + "skipLibCheck": true, + "outDir": "./dist/cjs" + }, + "include": [ "src/**/*" ], + "exclude": [ + "./dist/**/*", + "./node_modules" + ] } diff --git a/automerge-wasm/web-index.js b/automerge-wasm/web-index.js index ab9e8a1d..80057798 100644 --- a/automerge-wasm/web-index.js +++ b/automerge-wasm/web-index.js @@ -8,6 +8,8 @@ export { decodeSyncMessage, encodeSyncState, decodeSyncState, + exportSyncState, + importSyncState, } from "./bindgen.js" import init from "./bindgen.js" export default init; From 1eec70f11632a3800f65350e3e9a61fb1eaf724b Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Tue, 17 May 2022 17:01:06 -0400 Subject: [PATCH 385/730] example webpack for js --- automerge-js/examples/webpack/.gitignore | 5 +++ automerge-js/examples/webpack/package.json | 21 +++++++++++ .../examples/webpack/public/index.html | 10 ++++++ automerge-js/examples/webpack/src/index.js | 20 +++++++++++ .../examples/webpack/webpack.config.js | 35 +++++++++++++++++++ automerge-js/package.json | 2 +- automerge-wasm/package.json | 2 +- 7 files changed, 93 insertions(+), 2 deletions(-) create mode 100644 automerge-js/examples/webpack/.gitignore create mode 100644 automerge-js/examples/webpack/package.json create mode 100644 automerge-js/examples/webpack/public/index.html create mode 100644 automerge-js/examples/webpack/src/index.js create mode 100644 automerge-js/examples/webpack/webpack.config.js diff --git a/automerge-js/examples/webpack/.gitignore b/automerge-js/examples/webpack/.gitignore new file mode 100644 index 00000000..da9d3ff5 --- /dev/null +++ b/automerge-js/examples/webpack/.gitignore @@ -0,0 +1,5 @@ +yarn.lock +node_modules +public/*.wasm +public/main.js +dist diff --git a/automerge-js/examples/webpack/package.json b/automerge-js/examples/webpack/package.json new file mode 100644 index 00000000..474d9904 --- /dev/null +++ b/automerge-js/examples/webpack/package.json @@ -0,0 +1,21 @@ +{ + "name": "webpack-automerge-example", + "version": "0.1.0", + "description": "", + "private": true, + "scripts": { + "build": "webpack", + "start": "serve public", + "test": "node dist/node.js" + }, + "author": "", + "dependencies": { + "automerge-js": "file:automerge-js-0.1.0.tgz" + }, + "devDependencies": { + "serve": "^13.0.2", + "webpack": "^5.72.1", + "webpack-cli": "^4.9.2", + "webpack-node-externals": "^3.0.0" + } +} diff --git a/automerge-js/examples/webpack/public/index.html b/automerge-js/examples/webpack/public/index.html new file mode 100644 index 00000000..5003393a --- /dev/null +++ b/automerge-js/examples/webpack/public/index.html @@ -0,0 +1,10 @@ + + + + + Simple Webpack for automerge-wasm + + + + + diff --git a/automerge-js/examples/webpack/src/index.js b/automerge-js/examples/webpack/src/index.js new file mode 100644 index 00000000..7d0b8371 --- /dev/null +++ b/automerge-js/examples/webpack/src/index.js @@ -0,0 +1,20 @@ +import init, * as Automerge from "automerge-js" + +// hello world code that will run correctly on web or node + +init().then(_ => { + let doc = Automerge.init() + doc = Automerge.change(doc, (d) => d.hello = "from automerge-js") + const result = JSON.stringify(doc) + + if (typeof document !== 'undefined') { + // browser + const element = document.createElement('div'); + element.innerHTML = JSON.stringify(result) + document.body.appendChild(element); + } else { + // server + console.log("node:", result) + } +}) + diff --git a/automerge-js/examples/webpack/webpack.config.js b/automerge-js/examples/webpack/webpack.config.js new file mode 100644 index 00000000..3ab0e798 --- /dev/null +++ b/automerge-js/examples/webpack/webpack.config.js @@ -0,0 +1,35 @@ +const path = require('path'); +const nodeExternals = require('webpack-node-externals'); + +// the most basic webpack config for node or web targets for automerge-wasm + +const serverConfig = { + // basic setup for bundling a node package + target: 'node', + externals: [nodeExternals()], + externalsPresets: { node: true }, + + entry: './src/index.js', + output: { + filename: 'node.js', + path: path.resolve(__dirname, 'dist'), + }, + mode: "development", // or production +}; + +const clientConfig = { + target: 'web', + entry: './src/index.js', + output: { + filename: 'main.js', + path: path.resolve(__dirname, 'public'), + }, + mode: "development", // or production + performance: { // we dont want the wasm blob to generate warnings + hints: false, + maxEntrypointSize: 512000, + maxAssetSize: 512000 + } +}; + +module.exports = [serverConfig, clientConfig]; diff --git a/automerge-js/package.json b/automerge-js/package.json index 2bdafd6b..508f1351 100644 --- a/automerge-js/package.json +++ b/automerge-js/package.json @@ -56,7 +56,7 @@ "typescript": "^4.6.4" }, "dependencies": { - "automerge-wasm": "^0.1.2", + "automerge-wasm": "^0.1.3", "fast-sha256": "^1.3.0", "pako": "^2.0.4", "uuid": "^8.3" diff --git a/automerge-wasm/package.json b/automerge-wasm/package.json index a7243e3e..7029688c 100644 --- a/automerge-wasm/package.json +++ b/automerge-wasm/package.json @@ -8,7 +8,7 @@ "description": "wasm-bindgen bindings to the automerge rust implementation", "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-wasm", "repository": "github:automerge/automerge-rs", - "version": "0.1.2", + "version": "0.1.3", "license": "MIT", "files": [ "README.md", From 226bbeb023b0b1c48f6653a7e7bcc233ec047c34 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Tue, 17 May 2022 17:16:38 -0400 Subject: [PATCH 386/730] tslint to eslint --- automerge-js/.eslintignore | 2 ++ automerge-js/.eslintrc.cjs | 11 +++++++++++ automerge-js/package.json | 6 ++++-- automerge-js/src/index.ts | 1 + 4 files changed, 18 insertions(+), 2 deletions(-) create mode 100644 automerge-js/.eslintignore create mode 100644 automerge-js/.eslintrc.cjs diff --git a/automerge-js/.eslintignore b/automerge-js/.eslintignore new file mode 100644 index 00000000..4d6880d3 --- /dev/null +++ b/automerge-js/.eslintignore @@ -0,0 +1,2 @@ +dist +examples diff --git a/automerge-js/.eslintrc.cjs b/automerge-js/.eslintrc.cjs new file mode 100644 index 00000000..80e08d55 --- /dev/null +++ b/automerge-js/.eslintrc.cjs @@ -0,0 +1,11 @@ +module.exports = { + root: true, + parser: '@typescript-eslint/parser', + plugins: [ + '@typescript-eslint', + ], + extends: [ + 'eslint:recommended', + 'plugin:@typescript-eslint/recommended', + ], +}; diff --git a/automerge-js/package.json b/automerge-js/package.json index 508f1351..7bfbca15 100644 --- a/automerge-js/package.json +++ b/automerge-js/package.json @@ -42,7 +42,7 @@ "main": "./dist/cjs/index.js", "license": "MIT", "scripts": { - "lint": "tslint --project tsconfig.json", + "lint": "eslint src", "build": "tsc -p config/mjs.json && tsc -p config/cjs.json && tsc -p config/types.json", "test": "ts-mocha -p tsconfig.json test/**/*.ts" }, @@ -50,9 +50,11 @@ "@types/expect": "^24.3.0", "@types/mocha": "^9.1.1", "@types/uuid": "^8.3.4", + "@typescript-eslint/eslint-plugin": "^5.25.0", + "@typescript-eslint/parser": "^5.25.0", + "eslint": "^8.15.0", "mocha": "^10.0.0", "ts-mocha": "^10.0.0", - "tslint": "^6.1.3", "typescript": "^4.6.4" }, "dependencies": { diff --git a/automerge-js/src/index.ts b/automerge-js/src/index.ts index 2b81d70a..9b856833 100644 --- a/automerge-js/src/index.ts +++ b/automerge-js/src/index.ts @@ -3,6 +3,7 @@ import * as AutomergeWASM from "automerge-wasm" import { uuid } from './uuid' import _init from "automerge-wasm" + export default _init export { uuid } from './uuid' From 1cf8f80ba4cd25ace693fcd2f0c3bb1e36964b88 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Wed, 18 May 2022 15:38:52 -0400 Subject: [PATCH 387/730] pull wasm out of deps --- automerge-js/package.json | 2 +- automerge-js/src/counter.ts | 2 +- automerge-js/src/index.ts | 166 +++++++------------------ automerge-js/src/low_level_api.ts | 190 +++++++++++++++++++++++++++++ automerge-js/src/proxies.ts | 16 +-- automerge-js/test/basic_test.ts | 3 + automerge-js/test/columnar_test.ts | 3 + automerge-js/test/legacy_tests.ts | 3 + automerge-js/test/sync_test.ts | 3 + automerge-js/test/text_test.ts | 3 + automerge-js/test/uuid_test.ts | 3 + 11 files changed, 258 insertions(+), 136 deletions(-) create mode 100644 automerge-js/src/low_level_api.ts diff --git a/automerge-js/package.json b/automerge-js/package.json index 7bfbca15..ac6c5c5a 100644 --- a/automerge-js/package.json +++ b/automerge-js/package.json @@ -55,10 +55,10 @@ "eslint": "^8.15.0", "mocha": "^10.0.0", "ts-mocha": "^10.0.0", + "automerge-wasm": "^0.1.3", "typescript": "^4.6.4" }, "dependencies": { - "automerge-wasm": "^0.1.3", "fast-sha256": "^1.3.0", "pako": "^2.0.4", "uuid": "^8.3" diff --git a/automerge-js/src/counter.ts b/automerge-js/src/counter.ts index fba2d8d0..0539af39 100644 --- a/automerge-js/src/counter.ts +++ b/automerge-js/src/counter.ts @@ -1,4 +1,4 @@ -import { Automerge, ObjID, Prop } from "automerge-wasm" +import { Automerge, ObjID, Prop } from "./low_level_api" /** * The most basic CRDT: an integer value that can be changed only by * incrementing and decrementing. Since addition of integers is commutative, diff --git a/automerge-js/src/index.ts b/automerge-js/src/index.ts index 9b856833..a1cc4968 100644 --- a/automerge-js/src/index.ts +++ b/automerge-js/src/index.ts @@ -1,11 +1,6 @@ -import * as AutomergeWASM from "automerge-wasm" import { uuid } from './uuid' -import _init from "automerge-wasm" - -export default _init - export { uuid } from './uuid' import { rootProxy, listProxy, textProxy, mapProxy } from "./proxies" @@ -19,8 +14,24 @@ export { Text } from "./text" export { Counter } from "./counter" export { Int, Uint, Float64 } from "./numbers" -import { Actor as ActorId, Prop, ObjID, Change, DecodedChange, Heads, Automerge } from "automerge-wasm" -import { JsSyncState as SyncState, SyncMessage, DecodedSyncMessage } from "automerge-wasm" +import { ApiHandler, LowLevelApi, UseApi } from "./low_level_api" +import { Actor as ActorId, Prop, ObjID, Change, DecodedChange, Heads, Automerge } from "./low_level_api" +import { JsSyncState as SyncState, SyncMessage, DecodedSyncMessage } from "./low_level_api" + +export type ChangeOptions = { message?: string, time?: number } + +export type Doc = { readonly [P in keyof T]: Doc } + +export type ChangeFn = (doc: T) => void + +export interface State { + change: DecodedChange + snapshot: T +} + +export function use(api: LowLevelApi) { + UseApi(api) +} function _state(doc: Doc) : Automerge { let state = (doc)[STATE] @@ -50,7 +61,7 @@ export function init(actor?: ActorId) : Doc{ if (typeof actor !== "string") { actor = undefined } - const state = AutomergeWASM.create(actor) + const state = ApiHandler.create(actor) return rootProxy(state, true); } @@ -67,16 +78,21 @@ export function from(initialState: T | Doc, actor?: ActorId): Doc { return change(init(actor), (d) => Object.assign(d, initialState)) } -export function change>(doc: D, options: ChangeOptions | ChangeFn, callback?: ChangeFn): D { - +export function change(doc: Doc, options: string | ChangeOptions | ChangeFn, callback?: ChangeFn): Doc { if (typeof options === 'function') { - callback = options - options = {} + return _change(doc, {}, options) + } else if (typeof callback === 'function') { + if (typeof options === "string") { + options = { message: options } + } + return _change(doc, options, callback) + } else { + throw RangeError("Invalid args for change") } +} + +function _change(doc: Doc, options: ChangeOptions, callback: ChangeFn): Doc { - if (typeof options === "string") { - options = { message: options } - } if (typeof callback !== "function") { throw new RangeError("invalid change function"); @@ -149,7 +165,7 @@ export function emptyChange(doc: Doc, options: ChangeOptions) { } export function load(data: Uint8Array, actor: ActorId) : Doc { - const state = AutomergeWASM.load(data, actor) + const state = ApiHandler.load(data, actor) return rootProxy(state, true); } @@ -303,23 +319,23 @@ export function equals(val1: any, val2: any) : boolean { } export function encodeSyncState(state: SyncState) : Uint8Array { - return AutomergeWASM.encodeSyncState(AutomergeWASM.importSyncState(state)) + return ApiHandler.encodeSyncState(ApiHandler.importSyncState(state)) } export function decodeSyncState(state: Uint8Array) : SyncState { - return AutomergeWASM.exportSyncState(AutomergeWASM.decodeSyncState(state)) + return ApiHandler.exportSyncState(ApiHandler.decodeSyncState(state)) } export function generateSyncMessage(doc: Doc, inState: SyncState) : [ SyncState, SyncMessage | null ] { const state = _state(doc) - const syncState = AutomergeWASM.importSyncState(inState) + const syncState = ApiHandler.importSyncState(inState) const message = state.generateSyncMessage(syncState) - const outState = AutomergeWASM.exportSyncState(syncState) + const outState = ApiHandler.exportSyncState(syncState) return [ outState, message ] } export function receiveSyncMessage(doc: Doc, inState: SyncState, message: SyncMessage) : [ Doc, SyncState, null ] { - const syncState = AutomergeWASM.importSyncState(inState) + const syncState = ApiHandler.importSyncState(inState) if (doc === undefined || _obj(doc) !== "_root") { throw new RangeError("must be the document root"); } @@ -337,28 +353,28 @@ export function receiveSyncMessage(doc: Doc, inState: SyncState, message: state.receiveSyncMessage(syncState, message) //@ts-ignore doc[HEADS] = heads; - const outState = AutomergeWASM.exportSyncState(syncState) + const outState = ApiHandler.exportSyncState(syncState) return [rootProxy(state, true), outState, null]; } export function initSyncState() : SyncState { - return AutomergeWASM.exportSyncState(AutomergeWASM.initSyncState()) + return ApiHandler.exportSyncState(ApiHandler.initSyncState()) } export function encodeChange(change: DecodedChange) : Change { - return AutomergeWASM.encodeChange(change) + return ApiHandler.encodeChange(change) } export function decodeChange(data: Change) : DecodedChange { - return AutomergeWASM.decodeChange(data) + return ApiHandler.decodeChange(data) } export function encodeSyncMessage(message: DecodedSyncMessage) : SyncMessage { - return AutomergeWASM.encodeSyncMessage(message) + return ApiHandler.encodeSyncMessage(message) } export function decodeSyncMessage(message: SyncMessage) : DecodedSyncMessage { - return AutomergeWASM.decodeSyncMessage(message) + return ApiHandler.decodeSyncMessage(message) } export function getMissingDeps(doc: Doc, heads: Heads) : Heads { @@ -401,99 +417,3 @@ export function toJS(doc: any) : any { } } -type ChangeOptions = - | string // = message - | { - message?: string - time?: number - } - -type Doc = FreezeObject - -/** - * The argument pased to the callback of a `change` function is a mutable proxy of the original - * type. `Proxy` is the inverse of `Doc`: `Proxy>` is `T`, and `Doc>` is `D`. - */ -type Proxy = D extends Doc ? T : never - -type ChangeFn = (doc: T) => void - -interface State { - change: DecodedChange - snapshot: T -} - -// custom CRDT types - -/* - class TableRow { - readonly id: UUID - } - - class Table { - constructor() - add(item: T): UUID - byId(id: UUID): T & TableRow - count: number - ids: UUID[] - remove(id: UUID): void - rows: (T & TableRow)[] - } -*/ - - class List extends Array { - insertAt?(index: number, ...args: T[]): List - deleteAt?(index: number, numDelete?: number): List - } - -/* - - class Text extends List { - constructor(text?: string | string[]) - get(index: number): string - toSpans(): (string | T)[] - } - - // Note that until https://github.com/Microsoft/TypeScript/issues/2361 is addressed, we - // can't treat a Counter like a literal number without force-casting it as a number. - // This won't compile: - // `assert.strictEqual(c + 10, 13) // Operator '+' cannot be applied to types 'Counter' and '10'.ts(2365)` - // But this will: - // `assert.strictEqual(c as unknown as number + 10, 13)` - class Counter extends Number { - constructor(value?: number) - increment(delta?: number): void - decrement(delta?: number): void - toString(): string - valueOf(): number - value: number - } - - class Int { constructor(value: number) } - class Uint { constructor(value: number) } - class Float64 { constructor(value: number) } - -*/ - - // Readonly variants - - //type ReadonlyTable = ReadonlyArray & Table - type ReadonlyList = ReadonlyArray & List - type ReadonlyText = ReadonlyList & Text - -// prettier-ignore -type Freeze = - T extends Function ? T - : T extends Text ? ReadonlyText -// : T extends Table ? FreezeTable - : T extends List ? FreezeList - : T extends Array ? FreezeArray - : T extends Map ? FreezeMap - : T extends string & infer O ? string & O - : FreezeObject - -//interface FreezeTable extends ReadonlyTable> {} -interface FreezeList extends ReadonlyList> {} -interface FreezeArray extends ReadonlyArray> {} -interface FreezeMap extends ReadonlyMap, Freeze> {} -type FreezeObject = { readonly [P in keyof T]: Freeze } diff --git a/automerge-js/src/low_level_api.ts b/automerge-js/src/low_level_api.ts new file mode 100644 index 00000000..4f01a18b --- /dev/null +++ b/automerge-js/src/low_level_api.ts @@ -0,0 +1,190 @@ + +export type Actor = string; +export type ObjID = string; +export type Change = Uint8Array; +export type SyncMessage = Uint8Array; +export type Prop = string | number; +export type Hash = string; +export type Heads = Hash[]; +export type Value = string | number | boolean | null | Date | Uint8Array +export type ObjType = string | Array | Object +export type FullValue = + ["str", string] | + ["int", number] | + ["uint", number] | + ["f64", number] | + ["boolean", boolean] | + ["timestamp", Date] | + ["counter", number] | + ["bytes", Uint8Array] | + ["null", Uint8Array] | + ["map", ObjID] | + ["list", ObjID] | + ["text", ObjID] | + ["table", ObjID] + +export enum ObjTypeName { + list = "list", + map = "map", + table = "table", + text = "text", +} + +export type Datatype = + "boolean" | + "str" | + "int" | + "uint" | + "f64" | + "null" | + "timestamp" | + "counter" | + "bytes" | + "map" | + "text" | + "list"; + +export type DecodedSyncMessage = { + heads: Heads, + need: Heads, + have: any[] + changes: Change[] +} + +export type DecodedChange = { + actor: Actor, + seq: number + startOp: number, + time: number, + message: string | null, + deps: Heads, + hash: Hash, + ops: Op[] +} + +export type Op = { + action: string, + obj: ObjID, + key: string, + value?: string | number | boolean, + datatype?: string, + pred: string[], +} + +export type Patch = { + obj: ObjID + action: 'assign' | 'insert' | 'delete' + key: Prop + value: Value + datatype: Datatype + conflict: boolean +} + +export interface LowLevelApi { + create(actor?: Actor): Automerge; + load(data: Uint8Array, actor?: Actor): Automerge; + encodeChange(change: DecodedChange): Change; + decodeChange(change: Change): DecodedChange; + initSyncState(): SyncState; + encodeSyncMessage(message: DecodedSyncMessage): SyncMessage; + decodeSyncMessage(msg: SyncMessage): DecodedSyncMessage; + encodeSyncState(state: SyncState): Uint8Array; + decodeSyncState(data: Uint8Array): SyncState; + exportSyncState(state: SyncState): JsSyncState; + importSyncState(state: JsSyncState): SyncState; +} + +export function UseApi(api: LowLevelApi) { + for (let k in api) { + ApiHandler[k] = api[k] + } +} + +export let ApiHandler : LowLevelApi = { + create(actor?: Actor): Automerge { throw new RangeError("Automerge.use() not called") }, + load(data: Uint8Array, actor?: Actor): Automerge { throw new RangeError("Automerge.use() not called") }, + encodeChange(change: DecodedChange): Change { throw new RangeError("Automerge.use() not called") }, + decodeChange(change: Change): DecodedChange { throw new RangeError("Automerge.use() not called") }, + initSyncState(): SyncState { throw new RangeError("Automerge.use() not called") }, + encodeSyncMessage(message: DecodedSyncMessage): SyncMessage { throw new RangeError("Automerge.use() not called") }, + decodeSyncMessage(msg: SyncMessage): DecodedSyncMessage { throw new RangeError("Automerge.use() not called") }, + encodeSyncState(state: SyncState): Uint8Array { throw new RangeError("Automerge.use() not called") }, + decodeSyncState(data: Uint8Array): SyncState { throw new RangeError("Automerge.use() not called") }, + exportSyncState(state: SyncState): JsSyncState { throw new RangeError("Automerge.use() not called") }, + importSyncState(state: JsSyncState): SyncState { throw new RangeError("Automerge.use() not called") }, +} + +export interface Automerge { + // change state + put(obj: ObjID, prop: Prop, value: Value, datatype?: Datatype): undefined; + putObject(obj: ObjID, prop: Prop, value: ObjType): ObjID; + insert(obj: ObjID, index: number, value: Value, datatype?: Datatype): undefined; + insertObject(obj: ObjID, index: number, value: ObjType): ObjID; + push(obj: ObjID, value: Value, datatype?: Datatype): undefined; + pushObject(obj: ObjID, value: ObjType): ObjID; + splice(obj: ObjID, start: number, delete_count: number, text?: string | Array): ObjID[] | undefined; + increment(obj: ObjID, prop: Prop, value: number): void; + delete(obj: ObjID, prop: Prop): void; + + // returns a single value - if there is a conflict return the winner + get(obj: ObjID, prop: any, heads?: Heads): FullValue | null; + // return all values in case of a conflict + getAll(obj: ObjID, arg: any, heads?: Heads): FullValue[]; + keys(obj: ObjID, heads?: Heads): string[]; + text(obj: ObjID, heads?: Heads): string; + length(obj: ObjID, heads?: Heads): number; + materialize(obj?: ObjID, heads?: Heads): any; + + // transactions + commit(message?: string, time?: number): Hash; + merge(other: Automerge): Heads; + getActorId(): Actor; + pendingOps(): number; + rollback(): number; + + // patches + enablePatches(enable: boolean): void; + popPatches(): Patch[]; + + // save and load to local store + save(): Uint8Array; + saveIncremental(): Uint8Array; + loadIncremental(data: Uint8Array): number; + + // sync over network + receiveSyncMessage(state: SyncState, message: SyncMessage): void; + generateSyncMessage(state: SyncState): SyncMessage | null; + + // low level change functions + applyChanges(changes: Change[]): void; + getChanges(have_deps: Heads): Change[]; + getChangeByHash(hash: Hash): Change | null; + getChangesAdded(other: Automerge): Change[]; + getHeads(): Heads; + getLastLocalChange(): Change; + getMissingDeps(heads?: Heads): Heads; + + // memory management + free(): void; + clone(actor?: string): Automerge; + fork(actor?: string): Automerge; + forkAt(heads: Heads, actor?: string): Automerge; + + // dump internal state to console.log + dump(): void; + + // dump internal state to a JS object + toJS(): any; +} + +export interface JsSyncState { + lastSentHeads: any; + sentHashes: any; + readonly sharedHeads: any; +} + +export interface SyncState extends JsSyncState { + free(): void; + clone(): SyncState; +} + diff --git a/automerge-js/src/proxies.ts b/automerge-js/src/proxies.ts index 4e91b2e2..82171218 100644 --- a/automerge-js/src/proxies.ts +++ b/automerge-js/src/proxies.ts @@ -1,15 +1,11 @@ -import AutomergeWASM from "automerge-wasm" -import { Automerge, Heads, ObjID } from "automerge-wasm" -// @ts-ignore +import { Automerge, Heads, ObjID } from "./low_level_api" import { Int, Uint, Float64 } from "./numbers" -// @ts-ignore import { Counter, getWriteableCounter } from "./counter" -// @ts-ignore import { Text } from "./text" import { STATE, HEADS, FROZEN, OBJECT_ID, READ_ONLY } from "./constants" -export function parseListIndex(key) { +function parseListIndex(key) { if (typeof key === 'string' && /^[0-9]+$/.test(key)) key = parseInt(key, 10) if (typeof key !== 'number') { // throw new TypeError('A list index must be a number, but you passed ' + JSON.stringify(key)) @@ -100,7 +96,7 @@ function import_value(value) { } } -export const MapHandler = { +const MapHandler = { get (target, key) : any { const { context, objectId, path, readonly, frozen, heads, cache } = target if (key === Symbol.toStringTag) { return target[Symbol.toStringTag] } @@ -196,7 +192,7 @@ export const MapHandler = { } -export const ListHandler = { +const ListHandler = { get (target, index) { const {context, objectId, path, readonly, frozen, heads } = target index = parseListIndex(index) @@ -338,7 +334,7 @@ export const ListHandler = { } } -export const TextHandler = Object.assign({}, ListHandler, { +const TextHandler = Object.assign({}, ListHandler, { get (target, index) { // FIXME this is a one line change from ListHandler.get() const {context, objectId, path, readonly, frozen, heads } = target @@ -622,5 +618,3 @@ function textMethods(target) { return methods } - -//module.exports = { rootProxy, textProxy, listProxy, mapProxy, MapHandler, ListHandler, TextHandler } diff --git a/automerge-js/test/basic_test.ts b/automerge-js/test/basic_test.ts index 5aff21b0..9508f3d3 100644 --- a/automerge-js/test/basic_test.ts +++ b/automerge-js/test/basic_test.ts @@ -1,6 +1,9 @@ import * as assert from 'assert' import * as util from 'util' import * as Automerge from '../src' +import * as AutomergeWASM from "automerge-wasm" + +Automerge.use(AutomergeWASM) describe('Automerge', () => { describe('basics', () => { diff --git a/automerge-js/test/columnar_test.ts b/automerge-js/test/columnar_test.ts index ca670377..fc01741b 100644 --- a/automerge-js/test/columnar_test.ts +++ b/automerge-js/test/columnar_test.ts @@ -2,6 +2,9 @@ import * as assert from 'assert' import { checkEncoded } from './helpers' import * as Automerge from '../src' import { encodeChange, decodeChange } from '../src' +import * as AutomergeWASM from "automerge-wasm" + +Automerge.use(AutomergeWASM) describe('change encoding', () => { it('should encode text edits', () => { diff --git a/automerge-js/test/legacy_tests.ts b/automerge-js/test/legacy_tests.ts index 4034ca25..044b7eef 100644 --- a/automerge-js/test/legacy_tests.ts +++ b/automerge-js/test/legacy_tests.ts @@ -2,6 +2,9 @@ import * as assert from 'assert' import * as Automerge from '../src' import { assertEqualsOneOf } from './helpers' import { decodeChange } from '../src/columnar' +import * as AutomergeWASM from "automerge-wasm" + +Automerge.use(AutomergeWASM) const UUID_PATTERN = /^[0-9a-f]{32}$/ const OPID_PATTERN = /^[0-9]+@[0-9a-f]{32}$/ diff --git a/automerge-js/test/sync_test.ts b/automerge-js/test/sync_test.ts index c7f8015b..db5c3bb9 100644 --- a/automerge-js/test/sync_test.ts +++ b/automerge-js/test/sync_test.ts @@ -3,6 +3,9 @@ import * as Automerge from '../src' import { BloomFilter } from '../src/sync' import { decodeChangeMeta } from '../src/columnar' import { decodeSyncMessage, encodeSyncMessage, decodeSyncState, encodeSyncState, initSyncState } from "../src" +import * as AutomergeWASM from "automerge-wasm" + +Automerge.use(AutomergeWASM) function inspect(a) { const util = require("util"); diff --git a/automerge-js/test/text_test.ts b/automerge-js/test/text_test.ts index 8dbfc93c..51424c91 100644 --- a/automerge-js/test/text_test.ts +++ b/automerge-js/test/text_test.ts @@ -1,6 +1,9 @@ import * as assert from 'assert' import * as Automerge from '../src' import { assertEqualsOneOf } from './helpers' +import * as AutomergeWASM from "automerge-wasm" + +Automerge.use(AutomergeWASM) function attributeStateToAttributes(accumulatedAttributes) { const attributes = {} diff --git a/automerge-js/test/uuid_test.ts b/automerge-js/test/uuid_test.ts index 4182a8c4..1bed4f49 100644 --- a/automerge-js/test/uuid_test.ts +++ b/automerge-js/test/uuid_test.ts @@ -1,5 +1,8 @@ import * as assert from 'assert' import * as Automerge from '../src' +import * as AutomergeWASM from "automerge-wasm" + +Automerge.use(AutomergeWASM) const uuid = Automerge.uuid From 5e1bdb79eddc70044b83a17f77650c491e06869a Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Wed, 18 May 2022 15:39:42 -0400 Subject: [PATCH 388/730] eslint --fix --- automerge-js/src/columnar.ts | 116 +++++++++++++++--------------- automerge-js/src/common.ts | 4 +- automerge-js/src/encoding.ts | 2 +- automerge-js/src/index.ts | 12 ++-- automerge-js/src/low_level_api.ts | 4 +- automerge-js/src/proxies.ts | 48 ++++++------- automerge-js/src/sync.ts | 38 +++++----- automerge-js/src/text.ts | 2 +- 8 files changed, 113 insertions(+), 113 deletions(-) diff --git a/automerge-js/src/columnar.ts b/automerge-js/src/columnar.ts index fd203333..54847e12 100644 --- a/automerge-js/src/columnar.ts +++ b/automerge-js/src/columnar.ts @@ -145,7 +145,7 @@ function parseAllOpIds(changes, single) { if (op.obj.actorId) actors[op.obj.actorId] = true if (op.elemId && op.elemId.actorId) actors[op.elemId.actorId] = true if (op.child && op.child.actorId) actors[op.child.actorId] = true - for (let pred of op.pred) actors[pred.actorId] = true + for (const pred of op.pred) actors[pred.actorId] = true return op }) newChanges.push(change) @@ -155,10 +155,10 @@ function parseAllOpIds(changes, single) { if (single) { actorIds = [changes[0].actor].concat(actorIds.filter(actor => actor !== changes[0].actor)) } - for (let change of newChanges) { + for (const change of newChanges) { change.actorNum = actorIds.indexOf(change.actor) for (let i = 0; i < change.ops.length; i++) { - let op = change.ops[i] + const op = change.ops[i] op.id = {counter: change.startOp + i, actorNum: change.actorNum, actorId: change.actor} op.obj = actorIdToActorNum(op.obj, actorIds) op.elemId = actorIdToActorNum(op.elemId, actorIds) @@ -393,7 +393,7 @@ function encodeOps(ops, forDocument) { columns.predActor = new RLEEncoder('uint') } - for (let op of ops) { + for (const op of ops) { encodeObjectId(op, columns) encodeOperationKey(op, columns) columns.insert.appendValue(!!op.insert) @@ -427,8 +427,8 @@ function encodeOps(ops, forDocument) { } } - let columnList : any = [] - for (let {columnName, columnId} of forDocument ? DOC_OPS_COLUMNS : CHANGE_COLUMNS) { + const columnList : any = [] + for (const {columnName, columnId} of forDocument ? DOC_OPS_COLUMNS : CHANGE_COLUMNS) { if (columns[columnName]) columnList.push({id: columnId, name: columnName, encoder: columns[columnName]}) } return columnList.sort((a, b) => a.id - b.id) @@ -436,7 +436,7 @@ function encodeOps(ops, forDocument) { function expandMultiOps(ops, startOp, actor) { let opNum = startOp - let expandedOps : any = [] + const expandedOps : any = [] for (const op of ops) { if (op.action === 'set' && op.values && op.insert) { if (op.pred.length !== 0) throw new RangeError('multi-insert pred must be empty') @@ -471,7 +471,7 @@ function expandMultiOps(ops, startOp, actor) { */ function decodeOps(ops, forDocument) { const newOps : any = [] - for (let op of ops) { + for (const op of ops) { const obj = (op.objCtr === null) ? '_root' : `${op.objCtr}@${op.objActor}` const elemId = op.keyStr ? undefined : (op.keyCtr === 0 ? '_head' : `${op.keyCtr}@${op.keyActor}`) const action = ACTIONS[op.action] || op.action @@ -503,7 +503,7 @@ function decodeOps(ops, forDocument) { */ function checkSortedOpIds(opIds) { let last = null - for (let opId of opIds) { + for (const opId of opIds) { if (last && compareParsedOpIds(last, opId) !== -1) { throw new RangeError('operation IDs are not in ascending order') } @@ -565,7 +565,7 @@ export function makeDecoders(columns, columnSpec) { function decodeColumns(columns, actorIds, columnSpec) { columns = makeDecoders(columns, columnSpec) - let parsedRows : any = [] + const parsedRows : any = [] while (columns.some(col => !col.decoder.done)) { let row = {}, col = 0 while (col < columns.length) { @@ -578,7 +578,7 @@ function decodeColumns(columns, actorIds, columnSpec) { if (columnId % 8 === COLUMN_TYPE.GROUP_CARD) { const values : any = [], count = columns[col].decoder.readValue() for (let i = 0; i < count; i++) { - let value = {} + const value = {} for (let colOffset = 1; colOffset < groupCols; colOffset++) { decodeValueColumns(columns, col + colOffset, actorIds, value) } @@ -615,7 +615,7 @@ function decodeColumnInfo(decoder) { function encodeColumnInfo(encoder, columns) { const nonEmptyColumns = columns.filter(column => column.encoder.buffer.byteLength > 0) encoder.appendUint53(nonEmptyColumns.length) - for (let column of nonEmptyColumns) { + for (const column of nonEmptyColumns) { encoder.appendUint53(column.id) encoder.appendUint53(column.encoder.buffer.byteLength) } @@ -626,7 +626,7 @@ function decodeChangeHeader(decoder) { for (let i = 0; i < numDeps; i++) { deps.push(bytesToHexString(decoder.readRawBytes(32))) } - let change : any = { + const change : any = { actor: decoder.readHexString(), seq: decoder.readUint53(), startOp: decoder.readUint53(), @@ -714,7 +714,7 @@ export function encodeChange(changeObj) { const { hash, bytes } = encodeContainer(CHUNK_TYPE_CHANGE, encoder => { if (!Array.isArray(change.deps)) throw new TypeError('deps is not an array') encoder.appendUint53(change.deps.length) - for (let hash of change.deps.slice().sort()) { + for (const hash of change.deps.slice().sort()) { encoder.appendRawBytes(hexStringToBytes(hash)) } encoder.appendHexString(change.actor) @@ -723,11 +723,11 @@ export function encodeChange(changeObj) { encoder.appendInt53(change.time) encoder.appendPrefixedString(change.message || '') encoder.appendUint53(actorIds.length - 1) - for (let actor of actorIds.slice(1)) encoder.appendHexString(actor) + for (const actor of actorIds.slice(1)) encoder.appendHexString(actor) const columns : any = encodeOps(change.ops, false) encodeColumnInfo(encoder, columns) - for (let column of columns) encoder.appendRawBytes(column.encoder.buffer) + for (const column of columns) encoder.appendRawBytes(column.encoder.buffer) if (change.extraBytes) encoder.appendRawBytes(change.extraBytes) }) @@ -842,8 +842,8 @@ export function splitContainers(buffer) { */ export function decodeChanges(binaryChanges) { let decoded : any = [] - for (let binaryChange of binaryChanges) { - for (let chunk of splitContainers(binaryChange)) { + for (const binaryChange of binaryChanges) { + for (const chunk of splitContainers(binaryChange)) { if (chunk[8] === CHUNK_TYPE_DOCUMENT) { decoded = decoded.concat(decodeDocument(chunk)) } else if (chunk[8] === CHUNK_TYPE_CHANGE || chunk[8] === CHUNK_TYPE_DEFLATE) { @@ -869,8 +869,8 @@ function sortOpIds(a, b) { } function groupDocumentOps(changes) { - let byObjectId = {}, byReference = {}, objectType = {} - for (let change of changes) { + const byObjectId = {}, byReference = {}, objectType = {} + for (const change of changes) { for (let i = 0; i < change.ops.length; i++) { const op = change.ops[i], opId = `${op.id.counter}@${op.id.actorId}` const objectId = (op.obj === '_root') ? '_root' : `${op.obj.counter}@${op.obj.actorId}` @@ -902,7 +902,7 @@ function groupDocumentOps(changes) { byObjectId[objectId][key][opId] = op op.succ = [] - for (let pred of op.pred) { + for (const pred of op.pred) { const predId = `${pred.counter}@${pred.actorId}` if (!byObjectId[objectId][key][predId]) { throw new RangeError(`No predecessor operation ${predId}`) @@ -912,15 +912,15 @@ function groupDocumentOps(changes) { } } - let ops = [] - for (let objectId of Object.keys(byObjectId).sort(sortOpIds)) { + const ops = [] + for (const objectId of Object.keys(byObjectId).sort(sortOpIds)) { let keys : string[] = [] if (objectType[objectId] === 'makeList' || objectType[objectId] === 'makeText') { - let stack = ['_head'] + const stack = ['_head'] while (stack.length > 0) { const key : any = stack.pop() if (key !== '_head') keys.push(key) - for (let opId of byReference[objectId][key].sort(sortOpIds)) stack.push(opId) + for (const opId of byReference[objectId][key].sort(sortOpIds)) stack.push(opId) } } else { // FIXME JavaScript sorts based on UTF-16 encoding. We should change this to use the UTF-8 @@ -928,8 +928,8 @@ function groupDocumentOps(changes) { keys = Object.keys(byObjectId[objectId]).sort() } - for (let key of keys) { - for (let opId of Object.keys(byObjectId[objectId][key]).sort(sortOpIds)) { + for (const key of keys) { + for (const opId of Object.keys(byObjectId[objectId][key]).sort(sortOpIds)) { const op = byObjectId[objectId][key][opId] // @ts-ignore if (op.action !== 'del') ops.push(op) @@ -945,8 +945,8 @@ function groupDocumentOps(changes) { * Does not return anything, only mutates `changes`. */ function groupChangeOps(changes, ops) { - let changesByActor = {} // map from actorId to array of changes by that actor - for (let change of changes) { + const changesByActor = {} // map from actorId to array of changes by that actor + for (const change of changes) { change.ops = [] if (!changesByActor[change.actor]) changesByActor[change.actor] = [] if (change.seq !== changesByActor[change.actor].length + 1) { @@ -958,12 +958,12 @@ function groupChangeOps(changes, ops) { changesByActor[change.actor].push(change) } - let opsById = {} - for (let op of ops) { + const opsById = {} + for (const op of ops) { if (op.action === 'del') throw new RangeError('document should not contain del operations') op.pred = opsById[op.id] ? opsById[op.id].pred : [] opsById[op.id] = op - for (let succ of op.succ) { + for (const succ of op.succ) { if (!opsById[succ]) { if (op.elemId) { const elemId = op.insert ? op.id : op.elemId @@ -976,12 +976,12 @@ function groupChangeOps(changes, ops) { } delete op.succ } - for (let op of Object.values(opsById)) { + for (const op of Object.values(opsById)) { // @ts-ignore if (op.action === 'del') ops.push(op) } - for (let op of ops) { + for (const op of ops) { const { counter, actorId } = parseOpId(op.id) const actorChanges = changesByActor[actorId] // Binary search to find the change that should contain this operation @@ -1000,7 +1000,7 @@ function groupChangeOps(changes, ops) { actorChanges[left].ops.push(op) } - for (let change of changes) { + for (const change of changes) { change.ops.sort((op1, op2) => sortOpIds(op1.id, op2.id)) change.startOp = change.maxOp - change.ops.length + 1 delete change.maxOp @@ -1026,8 +1026,8 @@ function encodeDocumentChanges(changes) { extraLen : new RLEEncoder('uint'), extraRaw : new Encoder() } - let indexByHash = {} // map from change hash to its index in the changes array - let heads = {} // change hashes that are not a dependency of any other change + const indexByHash = {} // map from change hash to its index in the changes array + const heads = {} // change hashes that are not a dependency of any other change for (let i = 0; i < changes.length; i++) { const change = changes[i] @@ -1041,7 +1041,7 @@ function encodeDocumentChanges(changes) { columns.message.appendValue(change.message) columns.depsNum.appendValue(change.deps.length) - for (let dep of change.deps) { + for (const dep of change.deps) { if (typeof indexByHash[dep] !== 'number') { throw new RangeError(`Unknown dependency hash: ${dep}`) } @@ -1057,8 +1057,8 @@ function encodeDocumentChanges(changes) { } } - let changesColumns : any = [] - for (let {columnName, columnId} of DOCUMENT_COLUMNS) { + const changesColumns : any = [] + for (const {columnName, columnId} of DOCUMENT_COLUMNS) { changesColumns.push({id: columnId, name: columnName, encoder: columns[columnName]}) } changesColumns.sort((a, b) => a.id - b.id) @@ -1066,11 +1066,11 @@ function encodeDocumentChanges(changes) { } function decodeDocumentChanges(changes, expectedHeads) { - let heads = {} // change hashes that are not a dependency of any other change + const heads = {} // change hashes that are not a dependency of any other change for (let i = 0; i < changes.length; i++) { - let change = changes[i] + const change = changes[i] change.deps = [] - for (let index of change.depsNum.map(d => d.depsIndex)) { + for (const index of change.depsNum.map(d => d.depsIndex)) { if (!changes[index] || !changes[index].hash) { throw new RangeError(`No hash for index ${index} while processing index ${i}`) } @@ -1110,24 +1110,24 @@ export function encodeDocument(binaryChanges) { const { changes, actorIds } = parseAllOpIds(decodeChanges(binaryChanges), false) const { changesColumns, heads } = encodeDocumentChanges(changes) const opsColumns = encodeOps(groupDocumentOps(changes), true) - for (let column of changesColumns) deflateColumn(column) - for (let column of opsColumns) deflateColumn(column) + for (const column of changesColumns) deflateColumn(column) + for (const column of opsColumns) deflateColumn(column) return encodeContainer(CHUNK_TYPE_DOCUMENT, encoder => { encoder.appendUint53(actorIds.length) - for (let actor of actorIds) { + for (const actor of actorIds) { encoder.appendHexString(actor) } encoder.appendUint53(heads.length) - for (let head of heads.sort()) { + for (const head of heads.sort()) { encoder.appendRawBytes(hexStringToBytes(head)) } encodeColumnInfo(encoder, changesColumns) encodeColumnInfo(encoder, opsColumns) // @ts-ignore - for (let column of changesColumns) encoder.appendRawBytes(column.encoder.buffer) + for (const column of changesColumns) encoder.appendRawBytes(column.encoder.buffer) // @ts-ignore - for (let column of opsColumns) encoder.appendRawBytes(column.encoder.buffer) + for (const column of opsColumns) encoder.appendRawBytes(column.encoder.buffer) }).bytes } @@ -1201,17 +1201,17 @@ function inflateColumn(column) { */ function addPatchProperty(objects, property) { let values : any = {}, counter : any = null - for (let op of property.ops) { + for (const op of property.ops) { // Apply counters and their increments regardless of the number of successor operations if (op.actionName === 'set' && op.value.datatype === 'counter') { if (!counter) counter = {opId: op.opId, value: 0, succ: {}} counter.value += op.value.value - for (let succId of op.succ) counter.succ[succId] = true + for (const succId of op.succ) counter.succ[succId] = true } else if (op.actionName === 'inc') { if (!counter) throw new RangeError(`inc operation ${op.opId} without a counter`) counter.value += op.value.value delete counter.succ[op.opId] - for (let succId of op.succ) counter.succ[succId] = true + for (const succId of op.succ) counter.succ[succId] = true } else if (op.succ.length === 0) { // Ignore any ops that have been overwritten if (op.actionName.startsWith('make')) { @@ -1240,7 +1240,7 @@ function addPatchProperty(objects, property) { } if (Object.keys(values).length > 0) { - let obj = objects[property.objId] + const obj = objects[property.objId] if (obj.type === 'map' || obj.type === 'table') { obj.props[property.key] = values } else if (obj.type === 'list' || obj.type === 'text') { @@ -1278,7 +1278,7 @@ function makeListEdits(list, values, elemId, index) { function condenseEdits(diff) { if (diff.type === 'list' || diff.type === 'text') { diff.edits.forEach(e => condenseEdits(e.value)) - let newEdits = diff.edits + const newEdits = diff.edits diff.edits = [] for (const edit of newEdits) appendEdit(diff.edits, edit) } else if (diff.type === 'map' || diff.type === 'table') { @@ -1300,7 +1300,7 @@ export function appendEdit(existingEdits, nextEdit) { return } - let lastEdit = existingEdits[existingEdits.length - 1] + const lastEdit = existingEdits[existingEdits.length - 1] if (lastEdit.action === 'insert' && nextEdit.action === 'insert' && lastEdit.index === nextEdit.index - 1 && lastEdit.value.type === 'value' && nextEdit.value.type === 'value' && @@ -1345,7 +1345,7 @@ export function constructPatch(documentBuffer) { const col : any = makeDecoders(opsColumns, DOC_OPS_COLUMNS).reduce( (acc, col: any) => Object.assign(acc, {[col.columnName]: col.decoder}), {}) - let objects = {_root: {objectId: '_root', type: 'map', props: {}}} + const objects = {_root: {objectId: '_root', type: 'map', props: {}}} let property : any = null while (!col.idActor.done) { @@ -1362,7 +1362,7 @@ export function constructPatch(documentBuffer) { const objActor = col.objActor.readValue(), objCtr = col.objCtr.readValue() const objId = objActor === null ? '_root' : `${objCtr}@${actorIds[objActor]}` - let obj = objects[objId] + const obj = objects[objId] if (!obj) throw new RangeError(`Operation for nonexistent object: ${objId}`) const keyActor = col.keyActor.readValue(), keyCtr = col.keyCtr.readValue() @@ -1373,7 +1373,7 @@ export function constructPatch(documentBuffer) { const rawValue = col.valRaw.readRawBytes(sizeTag >> 4) const value = decodeValue(sizeTag, rawValue) const succNum = col.succNum.readValue() - let succ : string[] = [] + const succ : string[] = [] for (let i = 0; i < succNum; i++) { succ.push(`${col.succCtr.readValue()}@${actorIds[col.succActor.readValue()]}`) } diff --git a/automerge-js/src/common.ts b/automerge-js/src/common.ts index 5f1b53d1..f8abe8ea 100644 --- a/automerge-js/src/common.ts +++ b/automerge-js/src/common.ts @@ -8,8 +8,8 @@ export function isObject(obj: any) : boolean { */ export function copyObject(obj: any) : any { if (!isObject(obj)) return {} - let copy : any = {} - for (let key of Object.keys(obj)) { + const copy : any = {} + for (const key of Object.keys(obj)) { copy[key] = obj[key] } return copy diff --git a/automerge-js/src/encoding.ts b/automerge-js/src/encoding.ts index 55ba679d..e31312ce 100644 --- a/automerge-js/src/encoding.ts +++ b/automerge-js/src/encoding.ts @@ -761,7 +761,7 @@ export class RLEEncoder extends Encoder { this.appendRawValue(this.lastValue) } else if (this.state === 'literal') { this.appendInt53(-this.literal.length) - for (let v of this.literal) this.appendRawValue(v) + for (const v of this.literal) this.appendRawValue(v) } else if (this.state === 'nulls') { this.appendInt32(0) this.appendUint53(this.count) diff --git a/automerge-js/src/index.ts b/automerge-js/src/index.ts index a1cc4968..cf207200 100644 --- a/automerge-js/src/index.ts +++ b/automerge-js/src/index.ts @@ -34,7 +34,7 @@ export function use(api: LowLevelApi) { } function _state(doc: Doc) : Automerge { - let state = (doc)[STATE] + const state = (doc)[STATE] if (state == undefined) { throw new RangeError("must be the document root") } @@ -118,7 +118,7 @@ function _change(doc: Doc, options: ChangeOptions, callback: ChangeFn(doc: Doc) : ActorId { } function conflictAt(context : Automerge, objectId: ObjID, prop: Prop) : any { - let values = context.getAll(objectId, prop) + const values = context.getAll(objectId, prop) if (values.length <= 1) { return } - let result = {} + const result = {} for (const conflict of values) { const datatype = conflict[0] const value = conflict[1] @@ -407,8 +407,8 @@ export function toJS(doc: any) : any { //@ts-ignore return doc.map((a: any) => toJS(a)) } - let tmp : any = {} - for (let index in doc) { + const tmp : any = {} + for (const index in doc) { tmp[index] = toJS(doc[index]) } return tmp diff --git a/automerge-js/src/low_level_api.ts b/automerge-js/src/low_level_api.ts index 4f01a18b..813839fa 100644 --- a/automerge-js/src/low_level_api.ts +++ b/automerge-js/src/low_level_api.ts @@ -95,12 +95,12 @@ export interface LowLevelApi { } export function UseApi(api: LowLevelApi) { - for (let k in api) { + for (const k in api) { ApiHandler[k] = api[k] } } -export let ApiHandler : LowLevelApi = { +export const ApiHandler : LowLevelApi = { create(actor?: Actor): Automerge { throw new RangeError("Automerge.use() not called") }, load(data: Uint8Array, actor?: Actor): Automerge { throw new RangeError("Automerge.use() not called") }, encodeChange(change: DecodedChange): Change { throw new RangeError("Automerge.use() not called") }, diff --git a/automerge-js/src/proxies.ts b/automerge-js/src/proxies.ts index 82171218..1733ab4b 100644 --- a/automerge-js/src/proxies.ts +++ b/automerge-js/src/proxies.ts @@ -19,7 +19,7 @@ function parseListIndex(key) { function valueAt(target, prop) : any { const { context, objectId, path, readonly, heads} = target - let value = context.get(objectId, prop, heads) + const value = context.get(objectId, prop, heads) if (value === undefined) { return } @@ -112,7 +112,7 @@ const MapHandler = { }, set (target, key, val) { - let { context, objectId, path, readonly, frozen} = target + const { context, objectId, path, readonly, frozen} = target target.cache = {} // reset cache on set if (val && val[OBJECT_ID]) { throw new RangeError('Cannot create a reference to an existing document object') @@ -125,7 +125,7 @@ const MapHandler = { target.heads = val return true } - let [ value, datatype ] = import_value(val) + const [ value, datatype ] = import_value(val) if (frozen) { throw new RangeError("Attempting to use an outdated Automerge document") } @@ -225,7 +225,7 @@ const ListHandler = { }, set (target, index, val) { - let {context, objectId, path, readonly, frozen } = target + const {context, objectId, path, readonly, frozen } = target index = parseListIndex(index) if (val && val[OBJECT_ID]) { throw new RangeError('Cannot create a reference to an existing document object') @@ -318,14 +318,14 @@ const ListHandler = { index = parseListIndex(index) - let value = valueAt(target, index) + const value = valueAt(target, index) return { configurable: true, enumerable: true, value } }, getPrototypeOf(target) { return Object.getPrototypeOf([]) }, ownKeys (target) : string[] { const {context, objectId, heads } = target - let keys : string[] = [] + const keys : string[] = [] // uncommenting this causes assert.deepEqual() to fail when comparing to a pojo array // but not uncommenting it causes for (i in list) {} to not enumerate values properly //for (let i = 0; i < target.context.length(objectId, heads); i++) { keys.push(i.toString()) } @@ -375,13 +375,13 @@ export function mapProxy(context: Automerge, objectId: ObjID, path?: string[], r } export function listProxy(context: Automerge, objectId: ObjID, path?: string[], readonly?: boolean, heads?: Heads) : any { - let target = [] + const target = [] Object.assign(target, {context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}}) return new Proxy(target, ListHandler) } export function textProxy(context: Automerge, objectId: ObjID, path?: string[], readonly?: boolean, heads?: Heads) : any { - let target = [] + const target = [] Object.assign(target, {context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}}) return new Proxy(target, TextHandler) } @@ -437,17 +437,17 @@ function listMethods(target) { }, pop() { - let length = context.length(objectId) + const length = context.length(objectId) if (length == 0) { return undefined } - let last = valueAt(target, length - 1) + const last = valueAt(target, length - 1) context.delete(objectId, length - 1) return last }, push(...values) { - let len = context.length(objectId) + const len = context.length(objectId) this.splice(len, 0, ...values) return context.length(objectId) }, @@ -462,7 +462,7 @@ function listMethods(target) { splice(index, del, ...vals) { index = parseListIndex(index) del = parseListIndex(del) - for (let val of vals) { + for (const val of vals) { if (val && val[OBJECT_ID]) { throw new RangeError('Cannot create a reference to an existing document object') } @@ -473,14 +473,14 @@ function listMethods(target) { if (readonly) { throw new RangeError("Sequence object cannot be modified outside of a change block") } - let result : any = [] + const result : any = [] for (let i = 0; i < del; i++) { - let value = valueAt(target, index) + const value = valueAt(target, index) result.push(value) context.delete(objectId, index) } const values = vals.map((val) => import_value(val)) - for (let [value,datatype] of values) { + for (const [value,datatype] of values) { switch (datatype) { case "list": const list = context.insertObject(objectId, index, []) @@ -513,10 +513,10 @@ function listMethods(target) { }, entries() { - let i = 0; + const i = 0; const iterator = { next: () => { - let value = valueAt(target, i) + const value = valueAt(target, i) if (value === undefined) { return { value: undefined, done: true } } else { @@ -529,7 +529,7 @@ function listMethods(target) { keys() { let i = 0; - let len = context.length(objectId, heads) + const len = context.length(objectId, heads) const iterator = { next: () => { let value : undefined | number = undefined @@ -541,10 +541,10 @@ function listMethods(target) { }, values() { - let i = 0; + const i = 0; const iterator = { next: () => { - let value = valueAt(target, i) + const value = valueAt(target, i) if (value === undefined) { return { value: undefined, done: true } } else { @@ -558,13 +558,13 @@ function listMethods(target) { // Read-only methods that can delegate to the JavaScript built-in implementations // FIXME - super slow - for (let method of ['concat', 'every', 'filter', 'find', 'findIndex', 'forEach', 'includes', + for (const method of ['concat', 'every', 'filter', 'find', 'findIndex', 'forEach', 'includes', 'join', 'lastIndexOf', 'map', 'reduce', 'reduceRight', 'slice', 'some', 'toLocaleString', 'toString']) { methods[method] = (...args) => { const list : any = [] while (true) { - let value = valueAt(target, list.length) + const value = valueAt(target, list.length) if (value == undefined) { break } @@ -591,9 +591,9 @@ function textMethods(target) { return context.text(objectId, heads).replace(//g,'') }, toSpans () : any[] { - let spans : any[] = [] + const spans : any[] = [] let chars = '' - let length = this.length + const length = this.length for (let i = 0; i < length; i++) { const value = this[i] if (typeof value === 'string') { diff --git a/automerge-js/src/sync.ts b/automerge-js/src/sync.ts index fd40e343..cf90d5cf 100644 --- a/automerge-js/src/sync.ts +++ b/automerge-js/src/sync.ts @@ -48,7 +48,7 @@ export class BloomFilter { this.numBitsPerEntry = BITS_PER_ENTRY this.numProbes = NUM_PROBES this.bits = new Uint8Array(Math.ceil(this.numEntries * this.numBitsPerEntry / 8)) - for (let hash of arg) this.addHash(hash) + for (const hash of arg) this.addHash(hash) } else if (arg instanceof Uint8Array) { if (arg.byteLength === 0) { this.numEntries = 0 @@ -96,7 +96,7 @@ export class BloomFilter { // on the next three lines, the right shift means interpret value as unsigned let x = ((hashBytes[0] | hashBytes[1] << 8 | hashBytes[2] << 16 | hashBytes[3] << 24) >>> 0) % modulo let y = ((hashBytes[4] | hashBytes[5] << 8 | hashBytes[6] << 16 | hashBytes[7] << 24) >>> 0) % modulo - let z = ((hashBytes[8] | hashBytes[9] << 8 | hashBytes[10] << 16 | hashBytes[11] << 24) >>> 0) % modulo + const z = ((hashBytes[8] | hashBytes[9] << 8 | hashBytes[10] << 16 | hashBytes[11] << 24) >>> 0) % modulo const probes = [x] for (let i = 1; i < this.numProbes; i++) { x = (x + y) % modulo @@ -110,7 +110,7 @@ export class BloomFilter { * Sets the Bloom filter bits corresponding to a given SHA-256 hash (given as hex string). */ addHash(hash) { - for (let probe of this.getProbes(hash)) { + for (const probe of this.getProbes(hash)) { this.bits[probe >>> 3] |= 1 << (probe & 7) } } @@ -120,7 +120,7 @@ export class BloomFilter { */ containsHash(hash) { if (this.numEntries === 0) return false - for (let probe of this.getProbes(hash)) { + for (const probe of this.getProbes(hash)) { if ((this.bits[probe >>> 3] & (1 << (probe & 7))) === 0) { return false } @@ -148,7 +148,7 @@ function encodeHashes(encoder, hashes) { * array of hex strings. */ function decodeHashes(decoder) : string[] { - let length = decoder.readUint32(), hashes : string[] = [] + const length = decoder.readUint32(), hashes : string[] = [] for (let i = 0; i < length; i++) { hashes.push(bytesToHexString(decoder.readRawBytes(HASH_SIZE))) } @@ -165,12 +165,12 @@ export function encodeSyncMessage(message) { encodeHashes(encoder, message.heads) encodeHashes(encoder, message.need) encoder.appendUint32(message.have.length) - for (let have of message.have) { + for (const have of message.have) { encodeHashes(encoder, have.lastSync) encoder.appendPrefixedBytes(have.bloom) } encoder.appendUint32(message.changes.length) - for (let change of message.changes) { + for (const change of message.changes) { encoder.appendPrefixedBytes(change) } return encoder.buffer @@ -188,7 +188,7 @@ export function decodeSyncMessage(bytes) { const heads = decodeHashes(decoder) const need = decodeHashes(decoder) const haveCount = decoder.readUint32() - let message = {heads, need, have: [], changes: []} + const message = {heads, need, have: [], changes: []} for (let i = 0; i < haveCount; i++) { const lastSync = decodeHashes(decoder) const bloom = decoder.readPrefixedBytes() @@ -255,9 +255,9 @@ function getChangesToSend(backend, have, need) { return need.map(hash => Backend.getChangeByHash(backend, hash)).filter(change => change !== undefined) } - let lastSyncHashes : any = {}, bloomFilters : BloomFilter[] = [] - for (let h of have) { - for (let hash of h.lastSync) lastSyncHashes[hash] = true + const lastSyncHashes : any = {}, bloomFilters : BloomFilter[] = [] + for (const h of have) { + for (const hash of h.lastSync) lastSyncHashes[hash] = true bloomFilters.push(new BloomFilter(h.bloom)) } @@ -265,12 +265,12 @@ function getChangesToSend(backend, have, need) { const changes = Backend.getChanges(backend, Object.keys(lastSyncHashes)) .map(change => decodeChangeMeta(change, true)) - let changeHashes : any = {}, dependents : any = {}, hashesToSend : any = {} - for (let change of changes) { + const changeHashes : any = {}, dependents : any = {}, hashesToSend : any = {} + for (const change of changes) { changeHashes[change.hash] = true // For each change, make a list of changes that depend on it - for (let dep of change.deps) { + for (const dep of change.deps) { if (!dependents[dep]) dependents[dep] = [] dependents[dep].push(change.hash) } @@ -282,11 +282,11 @@ function getChangesToSend(backend, have, need) { } // Include any changes that depend on a Bloom-negative change - let stack = Object.keys(hashesToSend) + const stack = Object.keys(hashesToSend) while (stack.length > 0) { const hash : any = stack.pop() if (dependents[hash]) { - for (let dep of dependents[hash]) { + for (const dep of dependents[hash]) { if (!hashesToSend[dep]) { hashesToSend[dep] = true stack.push(dep) @@ -296,8 +296,8 @@ function getChangesToSend(backend, have, need) { } // Include any explicitly requested changes - let changesToSend : any = [] - for (let hash of need) { + const changesToSend : any = [] + for (const hash of need) { hashesToSend[hash] = true if (!changeHashes[hash]) { // Change is not among those returned by getMissingChanges()? const change = Backend.getChangeByHash(backend, hash) @@ -306,7 +306,7 @@ function getChangesToSend(backend, have, need) { } // Return changes in the order they were returned by getMissingChanges() - for (let change of changes) { + for (const change of changes) { if (hashesToSend[change.hash]) changesToSend.push(change.change) } return changesToSend diff --git a/automerge-js/src/text.ts b/automerge-js/src/text.ts index 02aac54d..738289a4 100644 --- a/automerge-js/src/text.ts +++ b/automerge-js/src/text.ts @@ -118,7 +118,7 @@ export class Text { } // Read-only methods that can delegate to the JavaScript built-in array -for (let method of ['concat', 'every', 'filter', 'find', 'findIndex', 'forEach', 'includes', +for (const method of ['concat', 'every', 'filter', 'find', 'findIndex', 'forEach', 'includes', 'indexOf', 'join', 'lastIndexOf', 'map', 'reduce', 'reduceRight', 'slice', 'some', 'toLocaleString']) { Text.prototype[method] = function (...args) { From 515a2eb94b80e891029413b7ab80ac198acdf655 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Wed, 18 May 2022 16:16:29 -0400 Subject: [PATCH 389/730] removing some ts errors --- automerge-js/src/bloom.ts | 124 ++++++++ automerge-js/src/index.ts | 2 +- automerge-js/src/low_level_api.ts | 7 +- automerge-js/src/proxies.ts | 16 +- automerge-js/src/sync.ts | 487 ------------------------------ automerge-js/src/text.ts | 13 +- automerge-js/src/uuid.ts | 11 +- automerge-js/test/sync_test.ts | 2 +- automerge-wasm/index.d.ts | 7 +- 9 files changed, 160 insertions(+), 509 deletions(-) create mode 100644 automerge-js/src/bloom.ts delete mode 100644 automerge-js/src/sync.ts diff --git a/automerge-js/src/bloom.ts b/automerge-js/src/bloom.ts new file mode 100644 index 00000000..cb66466a --- /dev/null +++ b/automerge-js/src/bloom.ts @@ -0,0 +1,124 @@ +/** + * Implementation of the data synchronisation protocol that brings a local and a remote document + * into the same state. This is typically used when two nodes have been disconnected for some time, + * and need to exchange any changes that happened while they were disconnected. The two nodes that + * are syncing could be client and server, or server and client, or two peers with symmetric roles. + * + * The protocol is based on this paper: Martin Kleppmann and Heidi Howard. Byzantine Eventual + * Consistency and the Fundamental Limits of Peer-to-Peer Databases. https://arxiv.org/abs/2012.00472 + * + * The protocol assumes that every time a node successfully syncs with another node, it remembers + * the current heads (as returned by `Backend.getHeads()`) after the last sync with that node. The + * next time we try to sync with the same node, we start from the assumption that the other node's + * document version is no older than the outcome of the last sync, so we only need to exchange any + * changes that are more recent than the last sync. This assumption may not be true if the other + * node did not correctly persist its state (perhaps it crashed before writing the result of the + * last sync to disk), and we fall back to sending the entire document in this case. + */ + +import { hexStringToBytes, Encoder, Decoder } from './encoding' + +// These constants correspond to a 1% false positive rate. The values can be changed without +// breaking compatibility of the network protocol, since the parameters used for a particular +// Bloom filter are encoded in the wire format. +const BITS_PER_ENTRY = 10, NUM_PROBES = 7 + +/** + * A Bloom filter implementation that can be serialised to a byte array for transmission + * over a network. The entries that are added are assumed to already be SHA-256 hashes, + * so this implementation does not perform its own hashing. + */ +export class BloomFilter { + numEntries: number; + numBitsPerEntry: number; + numProbes: number; + bits: Uint8Array; + + constructor (arg) { + if (Array.isArray(arg)) { + // arg is an array of SHA256 hashes in hexadecimal encoding + this.numEntries = arg.length + this.numBitsPerEntry = BITS_PER_ENTRY + this.numProbes = NUM_PROBES + this.bits = new Uint8Array(Math.ceil(this.numEntries * this.numBitsPerEntry / 8)) + for (const hash of arg) this.addHash(hash) + } else if (arg instanceof Uint8Array) { + if (arg.byteLength === 0) { + this.numEntries = 0 + this.numBitsPerEntry = 0 + this.numProbes = 0 + this.bits = arg + } else { + const decoder = new Decoder(arg) + this.numEntries = decoder.readUint32() + this.numBitsPerEntry = decoder.readUint32() + this.numProbes = decoder.readUint32() + this.bits = decoder.readRawBytes(Math.ceil(this.numEntries * this.numBitsPerEntry / 8)) + } + } else { + throw new TypeError('invalid argument') + } + } + + /** + * Returns the Bloom filter state, encoded as a byte array. + */ + get bytes() { + if (this.numEntries === 0) return new Uint8Array(0) + const encoder = new Encoder() + encoder.appendUint32(this.numEntries) + encoder.appendUint32(this.numBitsPerEntry) + encoder.appendUint32(this.numProbes) + encoder.appendRawBytes(this.bits) + return encoder.buffer + } + + /** + * Given a SHA-256 hash (as hex string), returns an array of probe indexes indicating which bits + * in the Bloom filter need to be tested or set for this particular entry. We do this by + * interpreting the first 12 bytes of the hash as three little-endian 32-bit unsigned integers, + * and then using triple hashing to compute the probe indexes. The algorithm comes from: + * + * Peter C. Dillinger and Panagiotis Manolios. Bloom Filters in Probabilistic Verification. + * 5th International Conference on Formal Methods in Computer-Aided Design (FMCAD), November 2004. + * http://www.ccis.northeastern.edu/home/pete/pub/bloom-filters-verification.pdf + */ + getProbes(hash) { + const hashBytes = hexStringToBytes(hash), modulo = 8 * this.bits.byteLength + if (hashBytes.byteLength !== 32) throw new RangeError(`Not a 256-bit hash: ${hash}`) + // on the next three lines, the right shift means interpret value as unsigned + let x = ((hashBytes[0] | hashBytes[1] << 8 | hashBytes[2] << 16 | hashBytes[3] << 24) >>> 0) % modulo + let y = ((hashBytes[4] | hashBytes[5] << 8 | hashBytes[6] << 16 | hashBytes[7] << 24) >>> 0) % modulo + const z = ((hashBytes[8] | hashBytes[9] << 8 | hashBytes[10] << 16 | hashBytes[11] << 24) >>> 0) % modulo + const probes = [x] + for (let i = 1; i < this.numProbes; i++) { + x = (x + y) % modulo + y = (y + z) % modulo + probes.push(x) + } + return probes + } + + /** + * Sets the Bloom filter bits corresponding to a given SHA-256 hash (given as hex string). + */ + addHash(hash) { + for (const probe of this.getProbes(hash)) { + this.bits[probe >>> 3] |= 1 << (probe & 7) + } + } + + /** + * Tests whether a given SHA-256 hash (given as hex string) is contained in the Bloom filter. + */ + containsHash(hash) { + if (this.numEntries === 0) return false + for (const probe of this.getProbes(hash)) { + if ((this.bits[probe >>> 3] & (1 << (probe & 7))) === 0) { + return false + } + } + return true + } +} + diff --git a/automerge-js/src/index.ts b/automerge-js/src/index.ts index cf207200..2885531c 100644 --- a/automerge-js/src/index.ts +++ b/automerge-js/src/index.ts @@ -118,7 +118,7 @@ function _change(doc: Doc, options: ChangeOptions, callback: ChangeFn(context: Automerge, objectId: ObjID, path?: string[], readonly?: boolean, heads?: Heads) : T { return new Proxy({context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}}, MapHandler) } -export function listProxy(context: Automerge, objectId: ObjID, path?: string[], readonly?: boolean, heads?: Heads) : any { +export function listProxy(context: Automerge, objectId: ObjID, path?: string[], readonly?: boolean, heads?: Heads) : Array { const target = [] Object.assign(target, {context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}}) return new Proxy(target, ListHandler) } -export function textProxy(context: Automerge, objectId: ObjID, path?: string[], readonly?: boolean, heads?: Heads) : any { +export function textProxy(context: Automerge, objectId: ObjID, path?: string[], readonly?: boolean, heads?: Heads) : Array { const target = [] Object.assign(target, {context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}}) return new Proxy(target, TextHandler) } -export function rootProxy(context: Automerge, readonly?: boolean) : any { +export function rootProxy(context: Automerge, readonly?: boolean) : T { return mapProxy(context, "_root", [], !!readonly) } @@ -494,7 +494,7 @@ function listMethods(target) { break; case "map": const map = context.insertObject(objectId, index, {}) - const proxyMap = mapProxy(context, map, [ ... path, index ], readonly); + const proxyMap : any = mapProxy(context, map, [ ... path, index ], readonly); for (const key in value) { proxyMap[key] = value[key] } @@ -578,7 +578,7 @@ function listMethods(target) { return methods } -function textMethods(target) { +function textMethods(target) : any { const {context, objectId, path, readonly, frozen, heads } = target const methods : any = { set (index, value) { diff --git a/automerge-js/src/sync.ts b/automerge-js/src/sync.ts deleted file mode 100644 index cf90d5cf..00000000 --- a/automerge-js/src/sync.ts +++ /dev/null @@ -1,487 +0,0 @@ -/** - * Implementation of the data synchronisation protocol that brings a local and a remote document - * into the same state. This is typically used when two nodes have been disconnected for some time, - * and need to exchange any changes that happened while they were disconnected. The two nodes that - * are syncing could be client and server, or server and client, or two peers with symmetric roles. - * - * The protocol is based on this paper: Martin Kleppmann and Heidi Howard. Byzantine Eventual - * Consistency and the Fundamental Limits of Peer-to-Peer Databases. https://arxiv.org/abs/2012.00472 - * - * The protocol assumes that every time a node successfully syncs with another node, it remembers - * the current heads (as returned by `Backend.getHeads()`) after the last sync with that node. The - * next time we try to sync with the same node, we start from the assumption that the other node's - * document version is no older than the outcome of the last sync, so we only need to exchange any - * changes that are more recent than the last sync. This assumption may not be true if the other - * node did not correctly persist its state (perhaps it crashed before writing the result of the - * last sync to disk), and we fall back to sending the entire document in this case. - */ - -const Backend : any = {} //require('./backend') -import { hexStringToBytes, bytesToHexString, Encoder, Decoder } from './encoding' -import { decodeChangeMeta } from './columnar' -import { copyObject } from './common' - -const HASH_SIZE = 32 // 256 bits = 32 bytes -const MESSAGE_TYPE_SYNC = 0x42 // first byte of a sync message, for identification -const PEER_STATE_TYPE = 0x43 // first byte of an encoded peer state, for identification - -// These constants correspond to a 1% false positive rate. The values can be changed without -// breaking compatibility of the network protocol, since the parameters used for a particular -// Bloom filter are encoded in the wire format. -const BITS_PER_ENTRY = 10, NUM_PROBES = 7 - -/** - * A Bloom filter implementation that can be serialised to a byte array for transmission - * over a network. The entries that are added are assumed to already be SHA-256 hashes, - * so this implementation does not perform its own hashing. - */ -export class BloomFilter { - numEntries: number; - numBitsPerEntry: number; - numProbes: number; - bits: Uint8Array; - - constructor (arg) { - if (Array.isArray(arg)) { - // arg is an array of SHA256 hashes in hexadecimal encoding - this.numEntries = arg.length - this.numBitsPerEntry = BITS_PER_ENTRY - this.numProbes = NUM_PROBES - this.bits = new Uint8Array(Math.ceil(this.numEntries * this.numBitsPerEntry / 8)) - for (const hash of arg) this.addHash(hash) - } else if (arg instanceof Uint8Array) { - if (arg.byteLength === 0) { - this.numEntries = 0 - this.numBitsPerEntry = 0 - this.numProbes = 0 - this.bits = arg - } else { - const decoder = new Decoder(arg) - this.numEntries = decoder.readUint32() - this.numBitsPerEntry = decoder.readUint32() - this.numProbes = decoder.readUint32() - this.bits = decoder.readRawBytes(Math.ceil(this.numEntries * this.numBitsPerEntry / 8)) - } - } else { - throw new TypeError('invalid argument') - } - } - - /** - * Returns the Bloom filter state, encoded as a byte array. - */ - get bytes() { - if (this.numEntries === 0) return new Uint8Array(0) - const encoder = new Encoder() - encoder.appendUint32(this.numEntries) - encoder.appendUint32(this.numBitsPerEntry) - encoder.appendUint32(this.numProbes) - encoder.appendRawBytes(this.bits) - return encoder.buffer - } - - /** - * Given a SHA-256 hash (as hex string), returns an array of probe indexes indicating which bits - * in the Bloom filter need to be tested or set for this particular entry. We do this by - * interpreting the first 12 bytes of the hash as three little-endian 32-bit unsigned integers, - * and then using triple hashing to compute the probe indexes. The algorithm comes from: - * - * Peter C. Dillinger and Panagiotis Manolios. Bloom Filters in Probabilistic Verification. - * 5th International Conference on Formal Methods in Computer-Aided Design (FMCAD), November 2004. - * http://www.ccis.northeastern.edu/home/pete/pub/bloom-filters-verification.pdf - */ - getProbes(hash) { - const hashBytes = hexStringToBytes(hash), modulo = 8 * this.bits.byteLength - if (hashBytes.byteLength !== 32) throw new RangeError(`Not a 256-bit hash: ${hash}`) - // on the next three lines, the right shift means interpret value as unsigned - let x = ((hashBytes[0] | hashBytes[1] << 8 | hashBytes[2] << 16 | hashBytes[3] << 24) >>> 0) % modulo - let y = ((hashBytes[4] | hashBytes[5] << 8 | hashBytes[6] << 16 | hashBytes[7] << 24) >>> 0) % modulo - const z = ((hashBytes[8] | hashBytes[9] << 8 | hashBytes[10] << 16 | hashBytes[11] << 24) >>> 0) % modulo - const probes = [x] - for (let i = 1; i < this.numProbes; i++) { - x = (x + y) % modulo - y = (y + z) % modulo - probes.push(x) - } - return probes - } - - /** - * Sets the Bloom filter bits corresponding to a given SHA-256 hash (given as hex string). - */ - addHash(hash) { - for (const probe of this.getProbes(hash)) { - this.bits[probe >>> 3] |= 1 << (probe & 7) - } - } - - /** - * Tests whether a given SHA-256 hash (given as hex string) is contained in the Bloom filter. - */ - containsHash(hash) { - if (this.numEntries === 0) return false - for (const probe of this.getProbes(hash)) { - if ((this.bits[probe >>> 3] & (1 << (probe & 7))) === 0) { - return false - } - } - return true - } -} - -/** - * Encodes a sorted array of SHA-256 hashes (as hexadecimal strings) into a byte array. - */ -function encodeHashes(encoder, hashes) { - if (!Array.isArray(hashes)) throw new TypeError('hashes must be an array') - encoder.appendUint32(hashes.length) - for (let i = 0; i < hashes.length; i++) { - if (i > 0 && hashes[i - 1] >= hashes[i]) throw new RangeError('hashes must be sorted') - const bytes = hexStringToBytes(hashes[i]) - if (bytes.byteLength !== HASH_SIZE) throw new TypeError('heads hashes must be 256 bits') - encoder.appendRawBytes(bytes) - } -} - -/** - * Decodes a byte array in the format returned by encodeHashes(), and returns its content as an - * array of hex strings. - */ -function decodeHashes(decoder) : string[] { - const length = decoder.readUint32(), hashes : string[] = [] - for (let i = 0; i < length; i++) { - hashes.push(bytesToHexString(decoder.readRawBytes(HASH_SIZE))) - } - return hashes -} - -/** - * Takes a sync message of the form `{heads, need, have, changes}` and encodes it as a byte array for - * transmission. - */ -export function encodeSyncMessage(message) { - const encoder = new Encoder() - encoder.appendByte(MESSAGE_TYPE_SYNC) - encodeHashes(encoder, message.heads) - encodeHashes(encoder, message.need) - encoder.appendUint32(message.have.length) - for (const have of message.have) { - encodeHashes(encoder, have.lastSync) - encoder.appendPrefixedBytes(have.bloom) - } - encoder.appendUint32(message.changes.length) - for (const change of message.changes) { - encoder.appendPrefixedBytes(change) - } - return encoder.buffer -} - -/** - * Takes a binary-encoded sync message and decodes it into the form `{heads, need, have, changes}`. - */ -export function decodeSyncMessage(bytes) { - const decoder = new Decoder(bytes) - const messageType = decoder.readByte() - if (messageType !== MESSAGE_TYPE_SYNC) { - throw new RangeError(`Unexpected message type: ${messageType}`) - } - const heads = decodeHashes(decoder) - const need = decodeHashes(decoder) - const haveCount = decoder.readUint32() - const message = {heads, need, have: [], changes: []} - for (let i = 0; i < haveCount; i++) { - const lastSync = decodeHashes(decoder) - const bloom = decoder.readPrefixedBytes() - // @ts-ignore - message.have.push({lastSync, bloom}) - } - const changeCount = decoder.readUint32() - for (let i = 0; i < changeCount; i++) { - const change = decoder.readPrefixedBytes() - // @ts-ignore - message.changes.push(change) - } - // Ignore any trailing bytes -- they can be used for extensions by future versions of the protocol - return message -} - -/** - * Takes a SyncState and encodes as a byte array those parts of the state that should persist across - * an application restart or disconnect and reconnect. The ephemeral parts of the state that should - * be cleared on reconnect are not encoded. - */ -export function encodeSyncState(syncState) { - const encoder = new Encoder() - encoder.appendByte(PEER_STATE_TYPE) - encodeHashes(encoder, syncState.sharedHeads) - return encoder.buffer -} - -/** - * Takes a persisted peer state as encoded by `encodeSyncState` and decodes it into a SyncState - * object. The parts of the peer state that were not encoded are initialised with default values. - */ -export function decodeSyncState(bytes) { - const decoder = new Decoder(bytes) - const recordType = decoder.readByte() - if (recordType !== PEER_STATE_TYPE) { - throw new RangeError(`Unexpected record type: ${recordType}`) - } - const sharedHeads = decodeHashes(decoder) - return Object.assign(initSyncState(), { sharedHeads }) -} - -/** - * Constructs a Bloom filter containing all changes that are not one of the hashes in - * `lastSync` or its transitive dependencies. In other words, the filter contains those - * changes that have been applied since the version identified by `lastSync`. Returns - * an object of the form `{lastSync, bloom}` as required for the `have` field of a sync - * message. - */ -function makeBloomFilter(backend, lastSync) { - const newChanges = Backend.getChanges(backend, lastSync) - const hashes = newChanges.map(change => decodeChangeMeta(change, true).hash) - return {lastSync, bloom: new BloomFilter(hashes).bytes} -} - -/** - * Call this function when a sync message is received from another node. The `message` argument - * needs to already have been decoded using `decodeSyncMessage()`. This function determines the - * changes that we need to send to the other node in response. Returns an array of changes (as - * byte arrays). - */ -function getChangesToSend(backend, have, need) { - if (have.length === 0) { - return need.map(hash => Backend.getChangeByHash(backend, hash)).filter(change => change !== undefined) - } - - const lastSyncHashes : any = {}, bloomFilters : BloomFilter[] = [] - for (const h of have) { - for (const hash of h.lastSync) lastSyncHashes[hash] = true - bloomFilters.push(new BloomFilter(h.bloom)) - } - - // Get all changes that were added since the last sync - const changes = Backend.getChanges(backend, Object.keys(lastSyncHashes)) - .map(change => decodeChangeMeta(change, true)) - - const changeHashes : any = {}, dependents : any = {}, hashesToSend : any = {} - for (const change of changes) { - changeHashes[change.hash] = true - - // For each change, make a list of changes that depend on it - for (const dep of change.deps) { - if (!dependents[dep]) dependents[dep] = [] - dependents[dep].push(change.hash) - } - - // Exclude any change hashes contained in one or more Bloom filters - if (bloomFilters.every(bloom => !bloom.containsHash(change.hash))) { - hashesToSend[change.hash] = true - } - } - - // Include any changes that depend on a Bloom-negative change - const stack = Object.keys(hashesToSend) - while (stack.length > 0) { - const hash : any = stack.pop() - if (dependents[hash]) { - for (const dep of dependents[hash]) { - if (!hashesToSend[dep]) { - hashesToSend[dep] = true - stack.push(dep) - } - } - } - } - - // Include any explicitly requested changes - const changesToSend : any = [] - for (const hash of need) { - hashesToSend[hash] = true - if (!changeHashes[hash]) { // Change is not among those returned by getMissingChanges()? - const change = Backend.getChangeByHash(backend, hash) - if (change) changesToSend.push(change) - } - } - - // Return changes in the order they were returned by getMissingChanges() - for (const change of changes) { - if (hashesToSend[change.hash]) changesToSend.push(change.change) - } - return changesToSend -} - -export function initSyncState() { - return { - sharedHeads: [], - lastSentHeads: [], - theirHeads: null, - theirNeed: null, - theirHave: null, - sentHashes: {}, - } -} - -function compareArrays(a, b) { - return (a.length === b.length) && a.every((v, i) => v === b[i]) -} - -/** - * Given a backend and what we believe to be the state of our peer, generate a message which tells - * them about we have and includes any changes we believe they need - */ -export function generateSyncMessage(backend, syncState) { - if (!backend) { - throw new Error("generateSyncMessage called with no Automerge document") - } - if (!syncState) { - throw new Error("generateSyncMessage requires a syncState, which can be created with initSyncState()") - } - - let { sharedHeads, lastSentHeads, theirHeads, theirNeed, theirHave, sentHashes } = syncState - const ourHeads = Backend.getHeads(backend) - - // Hashes to explicitly request from the remote peer: any missing dependencies of unapplied - // changes, and any of the remote peer's heads that we don't know about - const ourNeed = Backend.getMissingDeps(backend, theirHeads || []) - - // There are two reasons why ourNeed may be nonempty: 1. we might be missing dependencies due to - // Bloom filter false positives; 2. we might be missing heads that the other peer mentioned - // because they (intentionally) only sent us a subset of changes. In case 1, we leave the `have` - // field of the message empty because we just want to fill in the missing dependencies for now. - // In case 2, or if ourNeed is empty, we send a Bloom filter to request any unsent changes. - let ourHave : any = [] - if (!theirHeads || ourNeed.every(hash => theirHeads.includes(hash))) { - ourHave = [makeBloomFilter(backend, sharedHeads)] - } - - // Fall back to a full re-sync if the sender's last sync state includes hashes - // that we don't know. This could happen if we crashed after the last sync and - // failed to persist changes that the other node already sent us. - if (theirHave && theirHave.length > 0) { - const lastSync = theirHave[0].lastSync - if (!lastSync.every(hash => Backend.getChangeByHash(backend, hash))) { - // we need to queue them to send us a fresh sync message, the one they sent is uninteligible so we don't know what they need - const resetMsg = {heads: ourHeads, need: [], have: [{ lastSync: [], bloom: new Uint8Array(0) }], changes: []} - return [syncState, encodeSyncMessage(resetMsg)] - } - } - - // XXX: we should limit ourselves to only sending a subset of all the messages, probably limited by a total message size - // these changes should ideally be RLE encoded but we haven't implemented that yet. - let changesToSend = Array.isArray(theirHave) && Array.isArray(theirNeed) ? getChangesToSend(backend, theirHave, theirNeed) : [] - - // If the heads are equal, we're in sync and don't need to do anything further - const headsUnchanged = Array.isArray(lastSentHeads) && compareArrays(ourHeads, lastSentHeads) - const headsEqual = Array.isArray(theirHeads) && compareArrays(ourHeads, theirHeads) - if (headsUnchanged && headsEqual && changesToSend.length === 0) { - // no need to send a sync message if we know we're synced! - return [syncState, null] - } - - // TODO: this recomputes the SHA-256 hash of each change; we should restructure this to avoid the - // unnecessary recomputation - changesToSend = changesToSend.filter(change => !sentHashes[decodeChangeMeta(change, true).hash]) - - // Regular response to a sync message: send any changes that the other node - // doesn't have. We leave the "have" field empty because the previous message - // generated by `syncStart` already indicated what changes we have. - const syncMessage = {heads: ourHeads, have: ourHave, need: ourNeed, changes: changesToSend} - if (changesToSend.length > 0) { - sentHashes = copyObject(sentHashes) - for (const change of changesToSend) { - sentHashes[decodeChangeMeta(change, true).hash] = true - } - } - - syncState = Object.assign({}, syncState, {lastSentHeads: ourHeads, sentHashes}) - return [syncState, encodeSyncMessage(syncMessage)] -} - -/** - * Computes the heads that we share with a peer after we have just received some changes from that - * peer and applied them. This may not be sufficient to bring our heads in sync with the other - * peer's heads, since they may have only sent us a subset of their outstanding changes. - * - * `myOldHeads` are the local heads before the most recent changes were applied, `myNewHeads` are - * the local heads after those changes were applied, and `ourOldSharedHeads` is the previous set of - * shared heads. Applying the changes will have replaced some heads with others, but some heads may - * have remained unchanged (because they are for branches on which no changes have been added). Any - * such unchanged heads remain in the sharedHeads. Any sharedHeads that were replaced by applying - * changes are also replaced as sharedHeads. This is safe because if we received some changes from - * another peer, that means that peer had those changes, and therefore we now both know about them. - */ -function advanceHeads(myOldHeads, myNewHeads, ourOldSharedHeads) { - const newHeads = myNewHeads.filter((head) => !myOldHeads.includes(head)) - const commonHeads = ourOldSharedHeads.filter((head) => myNewHeads.includes(head)) - const advancedHeads = [...new Set([...newHeads, ...commonHeads])].sort() - return advancedHeads -} - - -/** - * Given a backend, a message message and the state of our peer, apply any changes, update what - * we believe about the peer, and (if there were applied changes) produce a patch for the frontend - */ -export function receiveSyncMessage(backend, oldSyncState, binaryMessage) { - if (!backend) { - throw new Error("generateSyncMessage called with no Automerge document") - } - if (!oldSyncState) { - throw new Error("generateSyncMessage requires a syncState, which can be created with initSyncState()") - } - - let { sharedHeads, lastSentHeads, sentHashes } = oldSyncState, patch = null - const message = decodeSyncMessage(binaryMessage) - const beforeHeads = Backend.getHeads(backend) - - // If we received changes, we try to apply them to the document. There may still be missing - // dependencies due to Bloom filter false positives, in which case the backend will enqueue the - // changes without applying them. The set of changes may also be incomplete if the sender decided - // to break a large set of changes into chunks. - if (message.changes.length > 0) { - [backend, patch] = Backend.applyChanges(backend, message.changes) - sharedHeads = advanceHeads(beforeHeads, Backend.getHeads(backend), sharedHeads) - } - - // If heads are equal, indicate we don't need to send a response message - if (message.changes.length === 0 && compareArrays(message.heads, beforeHeads)) { - lastSentHeads = message.heads - } - - // If all of the remote heads are known to us, that means either our heads are equal, or we are - // ahead of the remote peer. In this case, take the remote heads to be our shared heads. - const knownHeads = message.heads.filter(head => Backend.getChangeByHash(backend, head)) - if (knownHeads.length === message.heads.length) { - sharedHeads = message.heads - // If the remote peer has lost all its data, reset our state to perform a full resync - if (message.heads.length === 0) { - lastSentHeads = [] - sentHashes = [] - } - } else { - // If some remote heads are unknown to us, we add all the remote heads we know to - // sharedHeads, but don't remove anything from sharedHeads. This might cause sharedHeads to - // contain some redundant hashes (where one hash is actually a transitive dependency of - // another), but this will be cleared up as soon as we know all the remote heads. - sharedHeads = [...new Set(knownHeads.concat(sharedHeads))].sort() - } - - const syncState = { - sharedHeads, // what we have in common to generate an efficient bloom filter - lastSentHeads, - theirHave: message.have, // the information we need to calculate the changes they need - theirHeads: message.heads, - theirNeed: message.need, - sentHashes - } - return [backend, syncState, patch] -} - -module.exports = { - receiveSyncMessage, generateSyncMessage, - encodeSyncMessage, decodeSyncMessage, - initSyncState, encodeSyncState, decodeSyncState, - BloomFilter // BloomFilter is a private API, exported only for testing purposes -} diff --git a/automerge-js/src/text.ts b/automerge-js/src/text.ts index 738289a4..e31f979c 100644 --- a/automerge-js/src/text.ts +++ b/automerge-js/src/text.ts @@ -1,8 +1,7 @@ -import { OBJECT_ID } from './constants' -import { isObject } from '../src/common' +import { Value } from "./low_level_api" export class Text { - elems: any[] + elems: Value[] constructor (text?: string | string[]) { //const instance = Object.create(Text.prototype) @@ -21,7 +20,7 @@ export class Text { return this.elems.length } - get (index) : any { + get (index) : Value { return this.elems[index] } @@ -66,8 +65,8 @@ export class Text { * For example, the value ['a', 'b', {x: 3}, 'c', 'd'] has spans: * => ['ab', {x: 3}, 'cd'] */ - toSpans() : any[] { - const spans : any = [] + toSpans() : Value[] { + const spans : Value[] = [] let chars = '' for (const elem of this.elems) { if (typeof elem === 'string') { @@ -97,7 +96,7 @@ export class Text { /** * Updates the list item at position `index` to a new value `value`. */ - set (index: number, value: any) { + set (index: number, value: Value) { this.elems[index] = value } diff --git a/automerge-js/src/uuid.ts b/automerge-js/src/uuid.ts index bc6c4bb1..549b0fc5 100644 --- a/automerge-js/src/uuid.ts +++ b/automerge-js/src/uuid.ts @@ -6,11 +6,16 @@ function defaultFactory() { let factory = defaultFactory -export function uuid() { +interface UUIDFactory extends Function { + setFactory(f: typeof factory); + reset(); +} + +export const uuid : UUIDFactory = () => { return factory() } -// @ts-ignore uuid.setFactory = newFactory => { factory = newFactory } -// @ts-ignore + uuid.reset = () => { factory = defaultFactory } + diff --git a/automerge-js/test/sync_test.ts b/automerge-js/test/sync_test.ts index db5c3bb9..0118776c 100644 --- a/automerge-js/test/sync_test.ts +++ b/automerge-js/test/sync_test.ts @@ -1,6 +1,6 @@ import * as assert from 'assert' import * as Automerge from '../src' -import { BloomFilter } from '../src/sync' +import { BloomFilter } from '../src/bloom' import { decodeChangeMeta } from '../src/columnar' import { decodeSyncMessage, encodeSyncMessage, decodeSyncState, encodeSyncState, initSyncState } from "../src" import * as AutomergeWASM from "automerge-wasm" diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts index 47f32deb..e4701a62 100644 --- a/automerge-wasm/index.d.ts +++ b/automerge-wasm/index.d.ts @@ -44,10 +44,15 @@ export type Datatype = "text" | "list"; +export type SyncHave { + lastSync: Heads, + bloom: Uint8Array, +} + export type DecodedSyncMessage = { heads: Heads, need: Heads, - have: any[] + have: SyncHave[] changes: Change[] } From fd02585d2ad22d74a959150dce88d66a8696713c Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Wed, 18 May 2022 17:36:09 -0400 Subject: [PATCH 390/730] removed a bunch of lint errors --- automerge-js/src/columnar.ts | 23 ++++--- automerge-js/src/counter.ts | 2 +- automerge-js/src/encoding.ts | 14 ++-- automerge-js/src/index.ts | 34 ++++----- .../src/{low_level_api.ts => low_level.ts} | 0 automerge-js/src/proxies.ts | 69 ++++++++++--------- automerge-js/src/text.ts | 2 +- 7 files changed, 73 insertions(+), 71 deletions(-) rename automerge-js/src/{low_level_api.ts => low_level.ts} (100%) diff --git a/automerge-js/src/columnar.ts b/automerge-js/src/columnar.ts index 54847e12..2560380b 100644 --- a/automerge-js/src/columnar.ts +++ b/automerge-js/src/columnar.ts @@ -541,7 +541,8 @@ export function decoderByColumnId(columnId, buffer) { export function makeDecoders(columns, columnSpec) { const emptyBuf = new Uint8Array(0) - let decoders : any = [], columnIndex = 0, specIndex = 0 + const decoders : any = [] + let columnIndex = 0, specIndex = 0 while (columnIndex < columns.length || specIndex < columnSpec.length) { if (columnIndex === columns.length || @@ -567,10 +568,12 @@ function decodeColumns(columns, actorIds, columnSpec) { columns = makeDecoders(columns, columnSpec) const parsedRows : any = [] while (columns.some(col => !col.decoder.done)) { - let row = {}, col = 0 + const row = {} + let col = 0 while (col < columns.length) { const columnId = columns[col].columnId - let groupId = columnId >> 4, groupCols = 1 + const groupId = columnId >> 4 + let groupCols = 1 while (col + groupCols < columns.length && columns[col + groupCols].columnId >> 4 === groupId) { groupCols++ } @@ -600,7 +603,8 @@ function decodeColumnInfo(decoder) { // deflate-compressed. We ignore this bit when checking whether columns are sorted by ID. const COLUMN_ID_MASK = (-1 ^ COLUMN_TYPE_DEFLATE) >>> 0 - let lastColumnId = -1, columns : any = [], numColumns = decoder.readUint53() + let lastColumnId = -1 + const columns : any = [], numColumns = decoder.readUint53() for (let i = 0; i < numColumns; i++) { const columnId = decoder.readUint53(), bufferLen = decoder.readUint53() if ((columnId & COLUMN_ID_MASK) <= (lastColumnId & COLUMN_ID_MASK)) { @@ -827,7 +831,8 @@ function inflateChange(buffer) { * returns an array of subarrays, each subarray containing one change. */ export function splitContainers(buffer) { - let decoder = new Decoder(buffer), chunks : any = [], startOffset = 0 + const decoder = new Decoder(buffer), chunks : any = [] + let startOffset = 0 while (!decoder.done) { decodeContainerHeader(decoder, false) chunks.push(buffer.subarray(startOffset, decoder.offset)) @@ -912,7 +917,7 @@ function groupDocumentOps(changes) { } } - const ops = [] + const ops : any[] = [] for (const objectId of Object.keys(byObjectId).sort(sortOpIds)) { let keys : string[] = [] if (objectType[objectId] === 'makeList' || objectType[objectId] === 'makeText') { @@ -930,8 +935,7 @@ function groupDocumentOps(changes) { for (const key of keys) { for (const opId of Object.keys(byObjectId[objectId][key]).sort(sortOpIds)) { - const op = byObjectId[objectId][key][opId] - // @ts-ignore + const op : any = byObjectId[objectId][key][opId] if (op.action !== 'del') ops.push(op) } } @@ -1200,7 +1204,8 @@ function inflateColumn(column) { * or false if the property has been deleted. */ function addPatchProperty(objects, property) { - let values : any = {}, counter : any = null + const values : any = {} + let counter : any = null for (const op of property.ops) { // Apply counters and their increments regardless of the number of successor operations if (op.actionName === 'set' && op.value.datatype === 'counter') { diff --git a/automerge-js/src/counter.ts b/automerge-js/src/counter.ts index 0539af39..34ce211b 100644 --- a/automerge-js/src/counter.ts +++ b/automerge-js/src/counter.ts @@ -1,4 +1,4 @@ -import { Automerge, ObjID, Prop } from "./low_level_api" +import { Automerge, ObjID, Prop } from "./low_level" /** * The most basic CRDT: an integer value that can be changed only by * incrementing and decrementing. Since addition of integers is commutative, diff --git a/automerge-js/src/encoding.ts b/automerge-js/src/encoding.ts index e31312ce..773c3288 100644 --- a/automerge-js/src/encoding.ts +++ b/automerge-js/src/encoding.ts @@ -26,11 +26,11 @@ export function hexStringToBytes(value: string) : Uint8Array { if (!/^([0-9a-f][0-9a-f])*$/.test(value)) { throw new RangeError('value is not hexadecimal') } - if (value === '') { + const match = value.match(/../g) + if (match === null) { return new Uint8Array(0) } else { - // @ts-ignore - return new Uint8Array(value.match(/../g).map(b => parseInt(b, 16))) + return new Uint8Array(match.map(b => parseInt(b, 16))) } } @@ -44,7 +44,8 @@ for (let i = 0; i < 256; i++) { * Converts a Uint8Array into the equivalent hexadecimal string. */ export function bytesToHexString(bytes: Uint8Array) : string { - let hex = '', len = bytes.byteLength + let hex = '' + const len = bytes.byteLength for (let i = 0; i < len; i++) { hex += BYTE_TO_HEX[bytes[i]] } @@ -989,7 +990,8 @@ export class DeltaEncoder extends RLEEncoder { // Copy any null values, and the first non-null value, so that appendValue() computes the // difference between the encoder's last value and the decoder's first (absolute) value. - let value = decoder.readValue(), nulls = 0 + const value = decoder.readValue() + let nulls = 0 this.appendValue(value) if (value === null) { nulls = decoder.count + 1 @@ -1011,9 +1013,7 @@ export class DeltaEncoder extends RLEEncoder { if (remaining !== undefined) remaining -= nulls + 1 const { nonNullValues, sum } = super.copyFrom(decoder, {count: remaining, sumValues: true}) if (nonNullValues > 0) { - // @ts-ignore this.absoluteValue = sum - // @ts-ignore decoder.absoluteValue = sum } } diff --git a/automerge-js/src/index.ts b/automerge-js/src/index.ts index 2885531c..e4fc5e4b 100644 --- a/automerge-js/src/index.ts +++ b/automerge-js/src/index.ts @@ -14,9 +14,9 @@ export { Text } from "./text" export { Counter } from "./counter" export { Int, Uint, Float64 } from "./numbers" -import { ApiHandler, LowLevelApi, UseApi } from "./low_level_api" -import { Actor as ActorId, Prop, ObjID, Change, DecodedChange, Heads, Automerge } from "./low_level_api" -import { JsSyncState as SyncState, SyncMessage, DecodedSyncMessage } from "./low_level_api" +import { ApiHandler, LowLevelApi, UseApi } from "./low_level" +import { Actor as ActorId, Prop, ObjID, Change, DecodedChange, Heads, Automerge } from "./low_level" +import { JsSyncState as SyncState, SyncMessage, DecodedSyncMessage } from "./low_level" export type ChangeOptions = { message?: string, time?: number } @@ -113,18 +113,13 @@ function _change(doc: Doc, options: ChangeOptions, callback: ChangeFn(doc: Doc, options: ChangeOptions, callback: ChangeFn(local: Doc, remote: Doc) : Doc { const remoteState = _state(remote) const changes = localState.getChangesAdded(remoteState) localState.applyChanges(changes) - //@ts-ignore - local[HEADS] = heads + Reflect.set(local,HEADS,heads) return rootProxy(localState, true) } @@ -286,8 +278,7 @@ export function applyChanges(doc: Doc, changes: Change[]) : [Doc] { const state = _state(doc) const heads = state.getHeads() state.applyChanges(changes) - //@ts-ignore - doc[HEADS] = heads + Reflect.set(doc,HEADS,heads) return [rootProxy(state, true)]; } @@ -351,8 +342,7 @@ export function receiveSyncMessage(doc: Doc, inState: SyncState, message: const state = _state(doc) const heads = state.getHeads() state.receiveSyncMessage(syncState, message) - //@ts-ignore - doc[HEADS] = heads; + Reflect.set(doc,HEADS,heads) const outState = ApiHandler.exportSyncState(syncState) return [rootProxy(state, true), outState, null]; } diff --git a/automerge-js/src/low_level_api.ts b/automerge-js/src/low_level.ts similarity index 100% rename from automerge-js/src/low_level_api.ts rename to automerge-js/src/low_level.ts diff --git a/automerge-js/src/proxies.ts b/automerge-js/src/proxies.ts index 38efd7d2..e936af64 100644 --- a/automerge-js/src/proxies.ts +++ b/automerge-js/src/proxies.ts @@ -1,5 +1,5 @@ -import { Automerge, Heads, ObjID } from "./low_level_api" +import { Automerge, Heads, ObjID } from "./low_level" import { Int, Uint, Float64 } from "./numbers" import { Counter, getWriteableCounter } from "./counter" import { Text } from "./text" @@ -98,7 +98,7 @@ function import_value(value) { const MapHandler = { get (target, key) : any { - const { context, objectId, path, readonly, frozen, heads, cache } = target + const { context, objectId, readonly, frozen, heads, cache } = target if (key === Symbol.toStringTag) { return target[Symbol.toStringTag] } if (key === OBJECT_ID) return objectId if (key === READ_ONLY) return readonly @@ -133,27 +133,30 @@ const MapHandler = { throw new RangeError(`Object property "${key}" cannot be modified`) } switch (datatype) { - case "list": + case "list": { const list = context.putObject(objectId, key, []) const proxyList = listProxy(context, list, [ ... path, key ], readonly ); for (let i = 0; i < value.length; i++) { proxyList[i] = value[i] } - break; - case "text": + break + } + case "text": { const text = context.putObject(objectId, key, "", "text") const proxyText = textProxy(context, text, [ ... path, key ], readonly ); for (let i = 0; i < value.length; i++) { proxyText[i] = value.get(i) } - break; - case "map": + break + } + case "map": { const map = context.putObject(objectId, key, {}) const proxyMap : any = mapProxy(context, map, [ ... path, key ], readonly ); for (const key in value) { proxyMap[key] = value[key] } break; + } default: context.put(objectId, key, value, datatype) } @@ -161,7 +164,7 @@ const MapHandler = { }, deleteProperty (target, key) { - const { context, objectId, path, readonly, frozen } = target + const { context, objectId, readonly } = target target.cache = {} // reset cache on delete if (readonly) { throw new RangeError(`Object property "${key}" cannot be modified`) @@ -176,7 +179,7 @@ const MapHandler = { }, getOwnPropertyDescriptor (target, key) { - const { context, objectId } = target + // const { context, objectId } = target const value = this.get(target, key) if (typeof value !== 'undefined') { return { @@ -194,10 +197,9 @@ const MapHandler = { const ListHandler = { get (target, index) { - const {context, objectId, path, readonly, frozen, heads } = target + const {context, objectId, readonly, frozen, heads } = target index = parseListIndex(index) - // @ts-ignore - if (index === Symbol.hasInstance) { return (instance) => { return [].has(instance) } } + if (index === Symbol.hasInstance) { return (instance) => { return Array.isArray(instance) } } if (index === Symbol.toStringTag) { return target[Symbol.toStringTag] } if (index === OBJECT_ID) return objectId if (index === READ_ONLY) return readonly @@ -249,7 +251,7 @@ const ListHandler = { throw new RangeError(`Object property "${index}" cannot be modified`) } switch (datatype) { - case "list": + case "list": { let list if (index >= context.length(objectId)) { list = context.insertObject(objectId, index, []) @@ -259,7 +261,8 @@ const ListHandler = { const proxyList = listProxy(context, list, [ ... path, index ], readonly); proxyList.splice(0,0,...value) break; - case "text": + } + case "text": { let text if (index >= context.length(objectId)) { text = context.insertObject(objectId, index, "", "text") @@ -269,7 +272,8 @@ const ListHandler = { const proxyText = textProxy(context, text, [ ... path, index ], readonly); proxyText.splice(0,0,...value) break; - case "map": + } + case "map": { let map if (index >= context.length(objectId)) { map = context.insertObject(objectId, index, {}) @@ -281,6 +285,7 @@ const ListHandler = { proxyMap[key] = value[key] } break; + } default: if (index >= context.length(objectId)) { context.insert(objectId, index, value, datatype) @@ -311,7 +316,7 @@ const ListHandler = { }, getOwnPropertyDescriptor (target, index) { - const {context, objectId, path, readonly, frozen, heads} = target + const {context, objectId, heads} = target if (index === 'length') return {writable: true, value: context.length(objectId, heads) } if (index === OBJECT_ID) return {configurable: false, enumerable: false, value: objectId} @@ -322,12 +327,12 @@ const ListHandler = { return { configurable: true, enumerable: true, value } }, - getPrototypeOf(target) { return Object.getPrototypeOf([]) }, + getPrototypeOf(target) { return Object.getPrototypeOf(target) }, ownKeys (target) : string[] { - const {context, objectId, heads } = target const keys : string[] = [] // uncommenting this causes assert.deepEqual() to fail when comparing to a pojo array // but not uncommenting it causes for (i in list) {} to not enumerate values properly + //const {context, objectId, heads } = target //for (let i = 0; i < target.context.length(objectId, heads); i++) { keys.push(i.toString()) } keys.push("length"); return keys @@ -337,11 +342,10 @@ const ListHandler = { const TextHandler = Object.assign({}, ListHandler, { get (target, index) { // FIXME this is a one line change from ListHandler.get() - const {context, objectId, path, readonly, frozen, heads } = target + const {context, objectId, readonly, frozen, heads } = target index = parseListIndex(index) if (index === Symbol.toStringTag) { return target[Symbol.toStringTag] } - // @ts-ignore - if (index === Symbol.hasInstance) { return (instance) => { return [].has(instance) } } + if (index === Symbol.hasInstance) { return (instance) => { return Array.isArray(instance) } } if (index === OBJECT_ID) return objectId if (index === READ_ONLY) return readonly if (index === FROZEN) return frozen @@ -482,23 +486,26 @@ function listMethods(target) { const values = vals.map((val) => import_value(val)) for (const [value,datatype] of values) { switch (datatype) { - case "list": + case "list": { const list = context.insertObject(objectId, index, []) const proxyList = listProxy(context, list, [ ... path, index ], readonly); proxyList.splice(0,0,...value) break; - case "text": + } + case "text": { const text = context.insertObject(objectId, index, "", "text") const proxyText = textProxy(context, text, [ ... path, index ], readonly); proxyText.splice(0,0,...value) break; - case "map": + } + case "map": { const map = context.insertObject(objectId, index, {}) const proxyMap : any = mapProxy(context, map, [ ... path, index ], readonly); for (const key in value) { proxyMap[key] = value[key] } break; + } default: context.insert(objectId, index, value, datatype) } @@ -563,13 +570,13 @@ function listMethods(target) { 'slice', 'some', 'toLocaleString', 'toString']) { methods[method] = (...args) => { const list : any = [] - while (true) { - const value = valueAt(target, list.length) - if (value == undefined) { - break + let value + do { + value = valueAt(target, list.length) + if (value !== undefined) { + list.push(value) } - list.push(value) - } + } while (value !== undefined) return list[method](...args) } @@ -579,7 +586,7 @@ function listMethods(target) { } function textMethods(target) : any { - const {context, objectId, path, readonly, frozen, heads } = target + const {context, objectId, heads } = target const methods : any = { set (index, value) { return this[index] = value diff --git a/automerge-js/src/text.ts b/automerge-js/src/text.ts index e31f979c..2d568e1c 100644 --- a/automerge-js/src/text.ts +++ b/automerge-js/src/text.ts @@ -1,4 +1,4 @@ -import { Value } from "./low_level_api" +import { Value } from "./low_level" export class Text { elems: Value[] From d2fba6bf048169d2757fd786675ddfe3eac11234 Mon Sep 17 00:00:00 2001 From: Scott Trinh Date: Thu, 19 May 2022 09:13:56 -0400 Subject: [PATCH 391/730] Use an `UnknownObject` type alias --- automerge-js/src/common.ts | 8 +++++--- automerge-js/src/types.ts | 2 ++ 2 files changed, 7 insertions(+), 3 deletions(-) create mode 100644 automerge-js/src/types.ts diff --git a/automerge-js/src/common.ts b/automerge-js/src/common.ts index f8abe8ea..6fc45c7c 100644 --- a/automerge-js/src/common.ts +++ b/automerge-js/src/common.ts @@ -1,4 +1,6 @@ -export function isObject(obj: any) : boolean { +import { UnknownObject } from './types'; + +export function isObject(obj: unknown) : obj is UnknownObject { return typeof obj === 'object' && obj !== null } @@ -6,9 +8,9 @@ export function isObject(obj: any) : boolean { * Returns a shallow copy of the object `obj`. Faster than `Object.assign({}, obj)`. * https://jsperf.com/cloning-large-objects/1 */ -export function copyObject(obj: any) : any { +export function copyObject(obj: T) : T { if (!isObject(obj)) return {} - const copy : any = {} + const copy = {} for (const key of Object.keys(obj)) { copy[key] = obj[key] } diff --git a/automerge-js/src/types.ts b/automerge-js/src/types.ts new file mode 100644 index 00000000..37443332 --- /dev/null +++ b/automerge-js/src/types.ts @@ -0,0 +1,2 @@ +export type UnknownObject = Record; +export type Dictionary = Record; From bd35361354deedbca245120a64534eeb5da69539 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Sun, 22 May 2022 13:28:41 -0400 Subject: [PATCH 392/730] fixed typescript errors, pull wasm dep (mostly) out --- automerge-js/package.json | 6 +- automerge-js/src/columnar.ts | 19 +- automerge-js/src/common.ts | 6 +- automerge-js/src/counter.ts | 2 +- automerge-js/src/encoding.ts | 1 + automerge-js/src/index.ts | 56 +++--- automerge-js/src/low_level.ts | 179 +------------------ automerge-js/src/proxies.ts | 54 +++--- automerge-js/src/text.ts | 10 +- automerge-js/src/types.ts | 16 ++ automerge-js/tsconfig.json | 2 +- automerge-wasm/examples/webpack/package.json | 6 +- automerge-wasm/examples/webpack/src/index.js | 7 +- automerge-wasm/index.d.ts | 65 +++++-- automerge-wasm/nodejs-index.js | 2 +- automerge-wasm/web-index.js | 34 +++- 16 files changed, 197 insertions(+), 268 deletions(-) diff --git a/automerge-js/package.json b/automerge-js/package.json index ac6c5c5a..30dc689a 100644 --- a/automerge-js/package.json +++ b/automerge-js/package.json @@ -43,7 +43,9 @@ "license": "MIT", "scripts": { "lint": "eslint src", - "build": "tsc -p config/mjs.json && tsc -p config/cjs.json && tsc -p config/types.json", + "build": "yarn build-cjs", + "build-cjs": "tsc -p config/cjs.json && tsc -p config/types.json", + "build-mjs": "tsc -p config/mjs.json && tsc -p config/types.json", "test": "ts-mocha -p tsconfig.json test/**/*.ts" }, "devDependencies": { @@ -55,10 +57,10 @@ "eslint": "^8.15.0", "mocha": "^10.0.0", "ts-mocha": "^10.0.0", - "automerge-wasm": "^0.1.3", "typescript": "^4.6.4" }, "dependencies": { + "automerge-wasm": "file:../automerge-wasm", "fast-sha256": "^1.3.0", "pako": "^2.0.4", "uuid": "^8.3" diff --git a/automerge-js/src/columnar.ts b/automerge-js/src/columnar.ts index 2560380b..b1776910 100644 --- a/automerge-js/src/columnar.ts +++ b/automerge-js/src/columnar.ts @@ -1,10 +1,20 @@ import * as pako from 'pako' -import { copyObject, parseOpId, equalBytes } from './common' +import { parseOpId, equalBytes } from './common' import { utf8ToString, hexStringToBytes, bytesToHexString, Encoder, Decoder, RLEEncoder, RLEDecoder, DeltaEncoder, DeltaDecoder, BooleanEncoder, BooleanDecoder } from './encoding' + +interface Op { + id: string; + action: string; + obj: string; + elemId?: string; + key?: string; + pred: string[]; +} + // Maybe we should be using the platform's built-in hash implementation? // Node has the crypto module: https://nodejs.org/api/crypto.html and browsers have // https://developer.mozilla.org/en-US/docs/Web/API/SubtleCrypto/digest @@ -133,11 +143,11 @@ function compareParsedOpIds(id1, id2) { function parseAllOpIds(changes, single) { const actors : any = {}, newChanges : any = [] for (let change of changes) { - change = copyObject(change) + change = { ... change } actors[change.actor] = true change.ops = expandMultiOps(change.ops, change.startOp, change.actor) change.ops = change.ops.map(op => { - op = copyObject(op) + op = { ... op } if (op.obj !== '_root') op.obj = parseOpId(op.obj) if (op.elemId && op.elemId !== '_head') op.elemId = parseOpId(op.elemId) if (op.child) op.child = parseOpId(op.child) @@ -962,7 +972,7 @@ function groupChangeOps(changes, ops) { changesByActor[change.actor].push(change) } - const opsById = {} + const opsById : { [key:string]: Op } = {} for (const op of ops) { if (op.action === 'del') throw new RangeError('document should not contain del operations') op.pred = opsById[op.id] ? opsById[op.id].pred : [] @@ -981,7 +991,6 @@ function groupChangeOps(changes, ops) { delete op.succ } for (const op of Object.values(opsById)) { - // @ts-ignore if (op.action === 'del') ops.push(op) } diff --git a/automerge-js/src/common.ts b/automerge-js/src/common.ts index 6fc45c7c..9b5a7299 100644 --- a/automerge-js/src/common.ts +++ b/automerge-js/src/common.ts @@ -8,14 +8,16 @@ export function isObject(obj: unknown) : obj is UnknownObject { * Returns a shallow copy of the object `obj`. Faster than `Object.assign({}, obj)`. * https://jsperf.com/cloning-large-objects/1 */ +/* export function copyObject(obj: T) : T { - if (!isObject(obj)) return {} - const copy = {} + if (!isObject(obj)) throw RangeError(`Cannot copy object '${obj}'`) //return {} + const copy : UnknownObject = {} for (const key of Object.keys(obj)) { copy[key] = obj[key] } return copy } +*/ /** * Takes a string in the form that is used to identify operations (a counter concatenated diff --git a/automerge-js/src/counter.ts b/automerge-js/src/counter.ts index 34ce211b..97372381 100644 --- a/automerge-js/src/counter.ts +++ b/automerge-js/src/counter.ts @@ -1,4 +1,4 @@ -import { Automerge, ObjID, Prop } from "./low_level" +import { Automerge, ObjID, Prop } from "./types" /** * The most basic CRDT: an integer value that can be changed only by * incrementing and decrementing. Since addition of integers is commutative, diff --git a/automerge-js/src/encoding.ts b/automerge-js/src/encoding.ts index 773c3288..dac447ec 100644 --- a/automerge-js/src/encoding.ts +++ b/automerge-js/src/encoding.ts @@ -287,6 +287,7 @@ export class Encoder { * the buffer constructed by this Encoder. */ finish() { + return } } diff --git a/automerge-js/src/index.ts b/automerge-js/src/index.ts index e4fc5e4b..e20f32a2 100644 --- a/automerge-js/src/index.ts +++ b/automerge-js/src/index.ts @@ -1,24 +1,20 @@ -import { uuid } from './uuid' - export { uuid } from './uuid' import { rootProxy, listProxy, textProxy, mapProxy } from "./proxies" import { STATE, HEADS, OBJECT_ID, READ_ONLY, FROZEN } from "./constants" -import { Counter } from "./counter" -import { Text } from "./text" -import { Int, Uint, Float64 } from "./numbers" + import { isObject } from "./common" -export { Text } from "./text" -export { Counter } from "./counter" -export { Int, Uint, Float64 } from "./numbers" +import { Text, Counter } from "./types" +export { Text, Counter, Int, Uint, Float64 } from "./types" import { ApiHandler, LowLevelApi, UseApi } from "./low_level" -import { Actor as ActorId, Prop, ObjID, Change, DecodedChange, Heads, Automerge } from "./low_level" -import { JsSyncState as SyncState, SyncMessage, DecodedSyncMessage } from "./low_level" -export type ChangeOptions = { message?: string, time?: number } +import { ActorId, Prop, ObjID, Change, DecodedChange, Heads, Automerge } from "./types" +import { SyncState, SyncMessage, DecodedSyncMessage, AutomergeValue } from "./types" + +export type ChangeOptions = { message?: string, time?: number } export type Doc = { readonly [P in keyof T]: Doc } @@ -78,7 +74,7 @@ export function from(initialState: T | Doc, actor?: ActorId): Doc { return change(init(actor), (d) => Object.assign(d, initialState)) } -export function change(doc: Doc, options: string | ChangeOptions | ChangeFn, callback?: ChangeFn): Doc { +export function change(doc: Doc, options: string | ChangeOptions | ChangeFn, callback?: ChangeFn): Doc { if (typeof options === 'function') { return _change(doc, {}, options) } else if (typeof callback === 'function') { @@ -91,7 +87,7 @@ export function change(doc: Doc, options: string | ChangeOptions | Chan } } -function _change(doc: Doc, options: ChangeOptions, callback: ChangeFn): Doc { +function _change(doc: Doc, options: ChangeOptions, callback: ChangeFn): Doc { if (typeof callback !== "function") { @@ -134,7 +130,7 @@ function _change(doc: Doc, options: ChangeOptions, callback: ChangeFn(doc: Doc, options: ChangeOptions) { +export function emptyChange(doc: Doc, options: ChangeOptions) { if (options === undefined) { options = {} } @@ -190,22 +186,20 @@ function conflictAt(context : Automerge, objectId: ObjID, prop: Prop) : any { if (values.length <= 1) { return } - const result = {} - for (const conflict of values) { - const datatype = conflict[0] - const value = conflict[1] - switch (datatype) { + const result : { [key: ObjID]: AutomergeValue } = {} + for (const fullVal of values) { + //const datatype = fullVal[0] + //const value = fullVal[1] + //switch (datatype) { + switch (fullVal[0]) { case "map": - //@ts-ignore - result[value] = mapProxy(context, value, [ prop ], true) + result[fullVal[1]] = mapProxy(context, fullVal[1], [ prop ], true) break; case "list": - //@ts-ignore - result[value] = listProxy(context, value, [ prop ], true) + result[fullVal[1]] = listProxy(context, fullVal[1], [ prop ], true) break; case "text": - //@ts-ignore - result[value] = textProxy(context, value, [ prop ], true) + result[fullVal[1]] = textProxy(context, fullVal[1], [ prop ], true) break; //case "table": //case "cursor": @@ -216,19 +210,16 @@ function conflictAt(context : Automerge, objectId: ObjID, prop: Prop) : any { case "boolean": case "bytes": case "null": - //@ts-ignore - result[conflict[2]] = value + result[fullVal[2]] = fullVal[1] break; case "counter": - //@ts-ignore - result[conflict[2]] = new Counter(value) + result[fullVal[2]] = new Counter(fullVal[1]) break; case "timestamp": - //@ts-ignore - result[conflict[2]] = new Date(value) + result[fullVal[2]] = new Date(fullVal[1]) break; default: - throw RangeError(`datatype ${datatype} unimplemented`) + throw RangeError(`datatype ${fullVal[0]} unimplemented`) } } return result @@ -394,7 +385,6 @@ export function toJS(doc: any) : any { return doc.map((a) => toJS(a)) } if (doc instanceof Text) { - //@ts-ignore return doc.map((a: any) => toJS(a)) } const tmp : any = {} diff --git a/automerge-js/src/low_level.ts b/automerge-js/src/low_level.ts index 27c18c56..5a1277fd 100644 --- a/automerge-js/src/low_level.ts +++ b/automerge-js/src/low_level.ts @@ -1,103 +1,7 @@ -export type Actor = string; -export type ObjID = string; -export type Change = Uint8Array; -export type SyncMessage = Uint8Array; -export type Prop = string | number; -export type Hash = string; -export type Heads = Hash[]; -export type Value = string | number | boolean | null | Date | Uint8Array -export type ObjType = string | Array | Object -export type FullValue = - ["str", string] | - ["int", number] | - ["uint", number] | - ["f64", number] | - ["boolean", boolean] | - ["timestamp", Date] | - ["counter", number] | - ["bytes", Uint8Array] | - ["null", Uint8Array] | - ["map", ObjID] | - ["list", ObjID] | - ["text", ObjID] | - ["table", ObjID] - -export enum ObjTypeName { - list = "list", - map = "map", - table = "table", - text = "text", -} - -export type Datatype = - "boolean" | - "str" | - "int" | - "uint" | - "f64" | - "null" | - "timestamp" | - "counter" | - "bytes" | - "map" | - "text" | - "list"; - -export type SyncHave = { - lastSync: Heads, - bloom: Uint8Array, -} - -export type DecodedSyncMessage = { - heads: Heads, - need: Heads, - have: SyncHave[] - changes: Change[] -} - -export type DecodedChange = { - actor: Actor, - seq: number - startOp: number, - time: number, - message: string | null, - deps: Heads, - hash: Hash, - ops: Op[] -} - -export type Op = { - action: string, - obj: ObjID, - key: string, - value?: string | number | boolean, - datatype?: string, - pred: string[], -} - -export type Patch = { - obj: ObjID - action: 'assign' | 'insert' | 'delete' - key: Prop - value: Value - datatype: Datatype - conflict: boolean -} - -export interface LowLevelApi { - create(actor?: Actor): Automerge; - load(data: Uint8Array, actor?: Actor): Automerge; - encodeChange(change: DecodedChange): Change; - decodeChange(change: Change): DecodedChange; - initSyncState(): SyncState; - encodeSyncMessage(message: DecodedSyncMessage): SyncMessage; - decodeSyncMessage(msg: SyncMessage): DecodedSyncMessage; - encodeSyncState(state: SyncState): Uint8Array; - decodeSyncState(data: Uint8Array): SyncState; - exportSyncState(state: SyncState): JsSyncState; - importSyncState(state: JsSyncState): SyncState; -} +import { Automerge, Change, DecodedChange, Actor, SyncState, SyncMessage, JsSyncState, DecodedSyncMessage } from "automerge-wasm" +import { API as LowLevelApi } from "automerge-wasm" +export { API as LowLevelApi } from "automerge-wasm" export function UseApi(api: LowLevelApi) { for (const k in api) { @@ -105,6 +9,7 @@ export function UseApi(api: LowLevelApi) { } } +/* eslint-disable */ export const ApiHandler : LowLevelApi = { create(actor?: Actor): Automerge { throw new RangeError("Automerge.use() not called") }, load(data: Uint8Array, actor?: Actor): Automerge { throw new RangeError("Automerge.use() not called") }, @@ -118,78 +23,4 @@ export const ApiHandler : LowLevelApi = { exportSyncState(state: SyncState): JsSyncState { throw new RangeError("Automerge.use() not called") }, importSyncState(state: JsSyncState): SyncState { throw new RangeError("Automerge.use() not called") }, } - -export interface Automerge { - // change state - put(obj: ObjID, prop: Prop, value: Value, datatype?: Datatype): undefined; - putObject(obj: ObjID, prop: Prop, value: ObjType): ObjID; - insert(obj: ObjID, index: number, value: Value, datatype?: Datatype): undefined; - insertObject(obj: ObjID, index: number, value: ObjType): ObjID; - push(obj: ObjID, value: Value, datatype?: Datatype): undefined; - pushObject(obj: ObjID, value: ObjType): ObjID; - splice(obj: ObjID, start: number, delete_count: number, text?: string | Array): ObjID[] | undefined; - increment(obj: ObjID, prop: Prop, value: number): void; - delete(obj: ObjID, prop: Prop): void; - - // returns a single value - if there is a conflict return the winner - get(obj: ObjID, prop: any, heads?: Heads): FullValue | null; - // return all values in case of a conflict - getAll(obj: ObjID, arg: any, heads?: Heads): FullValue[]; - keys(obj: ObjID, heads?: Heads): string[]; - text(obj: ObjID, heads?: Heads): string; - length(obj: ObjID, heads?: Heads): number; - materialize(obj?: ObjID, heads?: Heads): any; - - // transactions - commit(message?: string, time?: number): Hash; - merge(other: Automerge): Heads; - getActorId(): Actor; - pendingOps(): number; - rollback(): number; - - // patches - enablePatches(enable: boolean): void; - popPatches(): Patch[]; - - // save and load to local store - save(): Uint8Array; - saveIncremental(): Uint8Array; - loadIncremental(data: Uint8Array): number; - - // sync over network - receiveSyncMessage(state: SyncState, message: SyncMessage): void; - generateSyncMessage(state: SyncState): SyncMessage | null; - - // low level change functions - applyChanges(changes: Change[]): void; - getChanges(have_deps: Heads): Change[]; - getChangeByHash(hash: Hash): Change | null; - getChangesAdded(other: Automerge): Change[]; - getHeads(): Heads; - getLastLocalChange(): Change; - getMissingDeps(heads?: Heads): Heads; - - // memory management - free(): void; - clone(actor?: string): Automerge; - fork(actor?: string): Automerge; - forkAt(heads: Heads, actor?: string): Automerge; - - // dump internal state to console.log - dump(): void; - - // dump internal state to a JS object - toJS(): any; -} - -export interface JsSyncState { - lastSentHeads: any; - sentHashes: any; - readonly sharedHeads: any; -} - -export interface SyncState extends JsSyncState { - free(): void; - clone(): SyncState; -} - +/* eslint-enable */ diff --git a/automerge-js/src/proxies.ts b/automerge-js/src/proxies.ts index e936af64..05ac2873 100644 --- a/automerge-js/src/proxies.ts +++ b/automerge-js/src/proxies.ts @@ -1,9 +1,10 @@ -import { Automerge, Heads, ObjID } from "./low_level" +import { Automerge, Heads, ObjID } from "./types" import { Int, Uint, Float64 } from "./numbers" import { Counter, getWriteableCounter } from "./counter" import { Text } from "./text" import { STATE, HEADS, FROZEN, OBJECT_ID, READ_ONLY } from "./constants" +import { AutomergeValue, ScalarValue, MapValue, ListValue, TextValue, Prop } from "./types" function parseListIndex(key) { if (typeof key === 'string' && /^[0-9]+$/.test(key)) key = parseInt(key, 10) @@ -17,7 +18,7 @@ function parseListIndex(key) { return key } -function valueAt(target, prop) : any { +function valueAt(target, prop: Prop) : AutomergeValue | undefined { const { context, objectId, path, readonly, heads} = target const value = context.get(objectId, prop, heads) if (value === undefined) { @@ -97,7 +98,7 @@ function import_value(value) { } const MapHandler = { - get (target, key) : any { + get (target, key) : AutomergeValue { const { context, objectId, readonly, frozen, heads, cache } = target if (key === Symbol.toStringTag) { return target[Symbol.toStringTag] } if (key === OBJECT_ID) return objectId @@ -151,7 +152,7 @@ const MapHandler = { } case "map": { const map = context.putObject(objectId, key, {}) - const proxyMap : any = mapProxy(context, map, [ ... path, key ], readonly ); + const proxyMap = mapProxy(context, map, [ ... path, key ], readonly ); for (const key in value) { proxyMap[key] = value[key] } @@ -280,7 +281,7 @@ const ListHandler = { } else { map = context.putObject(objectId, index, {}) } - const proxyMap : any = mapProxy(context, map, [ ... path, index ], readonly); + const proxyMap = mapProxy(context, map, [ ... path, index ], readonly); for (const key in value) { proxyMap[key] = value[key] } @@ -328,7 +329,7 @@ const ListHandler = { }, getPrototypeOf(target) { return Object.getPrototypeOf(target) }, - ownKeys (target) : string[] { + ownKeys (/*target*/) : string[] { const keys : string[] = [] // uncommenting this causes assert.deepEqual() to fail when comparing to a pojo array // but not uncommenting it causes for (i in list) {} to not enumerate values properly @@ -369,29 +370,30 @@ const TextHandler = Object.assign({}, ListHandler, { return textMethods(target)[index] || listMethods(target)[index] } }, - getPrototypeOf(target) { + getPrototypeOf(/*target*/) { return Object.getPrototypeOf(new Text()) }, }) -export function mapProxy(context: Automerge, objectId: ObjID, path?: string[], readonly?: boolean, heads?: Heads) : T { +export function mapProxy(context: Automerge, objectId: ObjID, path?: Prop[], readonly?: boolean, heads?: Heads) : MapValue { return new Proxy({context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}}, MapHandler) } -export function listProxy(context: Automerge, objectId: ObjID, path?: string[], readonly?: boolean, heads?: Heads) : Array { +export function listProxy(context: Automerge, objectId: ObjID, path?: Prop[], readonly?: boolean, heads?: Heads) : ListValue { const target = [] Object.assign(target, {context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}}) return new Proxy(target, ListHandler) } -export function textProxy(context: Automerge, objectId: ObjID, path?: string[], readonly?: boolean, heads?: Heads) : Array { +export function textProxy(context: Automerge, objectId: ObjID, path?: Prop[], readonly?: boolean, heads?: Heads) : TextValue { const target = [] Object.assign(target, {context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}}) return new Proxy(target, TextHandler) } export function rootProxy(context: Automerge, readonly?: boolean) : T { - return mapProxy(context, "_root", [], !!readonly) + /* eslint-disable-next-line */ + return mapProxy(context, "_root", [], !!readonly) } function listMethods(target) { @@ -406,7 +408,7 @@ function listMethods(target) { return this }, - fill(val: any, start: number, end: number) { + fill(val: ScalarValue, start: number, end: number) { // FIXME needs tests const [value, datatype] = import_value(val) start = parseListIndex(start || 0) @@ -417,7 +419,7 @@ function listMethods(target) { return this }, - indexOf(o, start = 0) { + indexOf(/*o, start = 0*/) { // FIXME /* const id = o[OBJECT_ID] @@ -477,10 +479,12 @@ function listMethods(target) { if (readonly) { throw new RangeError("Sequence object cannot be modified outside of a change block") } - const result : any = [] + const result : AutomergeValue[] = [] for (let i = 0; i < del; i++) { const value = valueAt(target, index) - result.push(value) + if (value !== undefined) { + result.push(value) + } context.delete(objectId, index) } const values = vals.map((val) => import_value(val)) @@ -500,7 +504,7 @@ function listMethods(target) { } case "map": { const map = context.insertObject(objectId, index, {}) - const proxyMap : any = mapProxy(context, map, [ ... path, index ], readonly); + const proxyMap = mapProxy(context, map, [ ... path, index ], readonly); for (const key in value) { proxyMap[key] = value[key] } @@ -569,7 +573,7 @@ function listMethods(target) { 'join', 'lastIndexOf', 'map', 'reduce', 'reduceRight', 'slice', 'some', 'toLocaleString', 'toString']) { methods[method] = (...args) => { - const list : any = [] + const list : AutomergeValue = [] let value do { value = valueAt(target, list.length) @@ -585,22 +589,22 @@ function listMethods(target) { return methods } -function textMethods(target) : any { +function textMethods(target) { const {context, objectId, heads } = target - const methods : any = { + const methods = { set (index, value) { return this[index] = value }, - get (index) { + get (index) : AutomergeValue { return this[index] }, - toString () { + toString () : string { return context.text(objectId, heads).replace(//g,'') }, - toSpans () : any[] { - const spans : any[] = [] + toSpans () : AutomergeValue[] { + const spans : AutomergeValue[] = [] let chars = '' - const length = this.length + const length = context.length(objectId) for (let i = 0; i < length; i++) { const value = this[i] if (typeof value === 'string') { @@ -618,7 +622,7 @@ function textMethods(target) : any { } return spans }, - toJSON () { + toJSON () : string { return this.toString() } } diff --git a/automerge-js/src/text.ts b/automerge-js/src/text.ts index 2d568e1c..c58c1efa 100644 --- a/automerge-js/src/text.ts +++ b/automerge-js/src/text.ts @@ -1,4 +1,4 @@ -import { Value } from "./low_level" +import { Value } from "./types" export class Text { elems: Value[] @@ -114,11 +114,17 @@ export class Text { deleteAt(index, numDelete = 1) { this.elems.splice(index, numDelete) } + + map(callback, thisArg?) { + this.elems.map(callback, thisArg) + } + + } // Read-only methods that can delegate to the JavaScript built-in array for (const method of ['concat', 'every', 'filter', 'find', 'findIndex', 'forEach', 'includes', - 'indexOf', 'join', 'lastIndexOf', 'map', 'reduce', 'reduceRight', + 'indexOf', 'join', 'lastIndexOf', 'reduce', 'reduceRight', 'slice', 'some', 'toLocaleString']) { Text.prototype[method] = function (...args) { const array = [...this] diff --git a/automerge-js/src/types.ts b/automerge-js/src/types.ts index 37443332..609c71e7 100644 --- a/automerge-js/src/types.ts +++ b/automerge-js/src/types.ts @@ -1,2 +1,18 @@ + +export { Actor as ActorId, Value, Prop, ObjID, Change, DecodedChange, Heads, Automerge } from "automerge-wasm" +export { JsSyncState as SyncState, SyncMessage, DecodedSyncMessage } from "automerge-wasm" + +export { Text } from "./text" +export { Counter } from "./counter" +export { Int, Uint, Float64 } from "./numbers" + export type UnknownObject = Record; export type Dictionary = Record; + +import { Counter } from "./counter" + +export type AutomergeValue = ScalarValue | { [key: string]: AutomergeValue } | Array +export type MapValue = { [key: string]: AutomergeValue } +export type ListValue = Array +export type TextValue = Array +export type ScalarValue = string | number | null | boolean | Date | Counter | Uint8Array diff --git a/automerge-js/tsconfig.json b/automerge-js/tsconfig.json index b0e2620c..26fa7e8f 100644 --- a/automerge-js/tsconfig.json +++ b/automerge-js/tsconfig.json @@ -2,7 +2,7 @@ "compilerOptions": { "target": "es2016", "sourceMap": false, - "declaration": false, + "declaration": true, "resolveJsonModule": true, "module": "commonjs", "moduleResolution": "node", diff --git a/automerge-wasm/examples/webpack/package.json b/automerge-wasm/examples/webpack/package.json index 5c90319c..2ba64736 100644 --- a/automerge-wasm/examples/webpack/package.json +++ b/automerge-wasm/examples/webpack/package.json @@ -10,12 +10,12 @@ }, "author": "", "dependencies": { - "automerge-wasm": "^0.1.2" + "automerge-wasm": "file:automerge-wasm-0.1.3.tgz" }, "devDependencies": { + "serve": "^13.0.2", "webpack": "^5.72.1", "webpack-cli": "^4.9.2", - "webpack-node-externals": "^3.0.0", - "serve": "^13.0.2" + "webpack-node-externals": "^3.0.0" } } diff --git a/automerge-wasm/examples/webpack/src/index.js b/automerge-wasm/examples/webpack/src/index.js index 8394af50..bab417f5 100644 --- a/automerge-wasm/examples/webpack/src/index.js +++ b/automerge-wasm/examples/webpack/src/index.js @@ -2,10 +2,13 @@ import init, { create } from "automerge-wasm" // hello world code that will run correctly on web or node -init().then(_ => { - const doc = create() +init().then((Automerge) => { + console.log("Automerge=", Automerge) + console.log("create=", create) + const doc = Automerge.create() doc.put("/", "hello", "world") const result = doc.materialize("/") + //const result = xxx if (typeof document !== 'undefined') { // browser diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts index e4701a62..ba5cf07d 100644 --- a/automerge-wasm/index.d.ts +++ b/automerge-wasm/index.d.ts @@ -7,7 +7,8 @@ export type Prop = string | number; export type Hash = string; export type Heads = Hash[]; export type Value = string | number | boolean | null | Date | Uint8Array -export type ObjType = string | Array | Object +export type MaterializeValue = Record | Array | Value +export type ObjType = string | Array | Record export type FullValue = ["str", string] | ["int", number] | @@ -17,12 +18,27 @@ export type FullValue = ["timestamp", Date] | ["counter", number] | ["bytes", Uint8Array] | - ["null", Uint8Array] | + ["null", null] | ["map", ObjID] | ["list", ObjID] | ["text", ObjID] | ["table", ObjID] +export type FullValueWithId = + ["str", string, ObjID ] | + ["int", number, ObjID ] | + ["uint", number, ObjID ] | + ["f64", number, ObjID ] | + ["boolean", boolean, ObjID ] | + ["timestamp", Date, ObjID ] | + ["counter", number, ObjID ] | + ["bytes", Uint8Array, ObjID ] | + ["null", null, ObjID ] | + ["map", ObjID ] | + ["list", ObjID] | + ["text", ObjID] | + ["table", ObjID] + export enum ObjTypeName { list = "list", map = "map", @@ -44,7 +60,7 @@ export type Datatype = "text" | "list"; -export type SyncHave { +export type SyncHave = { lastSync: Heads, bloom: Uint8Array, } @@ -97,26 +113,40 @@ export function decodeSyncState(data: Uint8Array): SyncState; export function exportSyncState(state: SyncState): JsSyncState; export function importSyncState(state: JsSyncState): SyncState; +export class API { + create(actor?: Actor): Automerge; + load(data: Uint8Array, actor?: Actor): Automerge; + encodeChange(change: DecodedChange): Change; + decodeChange(change: Change): DecodedChange; + initSyncState(): SyncState; + encodeSyncMessage(message: DecodedSyncMessage): SyncMessage; + decodeSyncMessage(msg: SyncMessage): DecodedSyncMessage; + encodeSyncState(state: SyncState): Uint8Array; + decodeSyncState(data: Uint8Array): SyncState; + exportSyncState(state: SyncState): JsSyncState; + importSyncState(state: JsSyncState): SyncState; +} + export class Automerge { // change state - put(obj: ObjID, prop: Prop, value: Value, datatype?: Datatype): undefined; + put(obj: ObjID, prop: Prop, value: Value, datatype?: Datatype): void; putObject(obj: ObjID, prop: Prop, value: ObjType): ObjID; - insert(obj: ObjID, index: number, value: Value, datatype?: Datatype): undefined; + insert(obj: ObjID, index: number, value: Value, datatype?: Datatype): void; insertObject(obj: ObjID, index: number, value: ObjType): ObjID; - push(obj: ObjID, value: Value, datatype?: Datatype): undefined; + push(obj: ObjID, value: Value, datatype?: Datatype): void; pushObject(obj: ObjID, value: ObjType): ObjID; splice(obj: ObjID, start: number, delete_count: number, text?: string | Array): ObjID[] | undefined; increment(obj: ObjID, prop: Prop, value: number): void; delete(obj: ObjID, prop: Prop): void; // returns a single value - if there is a conflict return the winner - get(obj: ObjID, prop: any, heads?: Heads): FullValue | null; + get(obj: ObjID, prop: Prop, heads?: Heads): FullValue | null; // return all values in case of a conflict - getAll(obj: ObjID, arg: any, heads?: Heads): FullValue[]; + getAll(obj: ObjID, arg: Prop, heads?: Heads): FullValueWithId[]; keys(obj: ObjID, heads?: Heads): string[]; text(obj: ObjID, heads?: Heads): string; length(obj: ObjID, heads?: Heads): number; - materialize(obj?: ObjID, heads?: Heads): any; + materialize(obj?: ObjID, heads?: Heads): MaterializeValue; // transactions commit(message?: string, time?: number): Hash; @@ -155,20 +185,23 @@ export class Automerge { // dump internal state to console.log dump(): void; - - // dump internal state to a JS object - toJS(): any; } export class JsSyncState { + sharedHeads: Heads; + lastSentHeads: Heads; + theirHeads: Heads | undefined; + theirHeed: Heads | undefined; + theirHave: SyncHave[] | undefined; + sentHashes: Heads; } export class SyncState { free(): void; clone(): SyncState; - lastSentHeads: any; - sentHashes: any; - readonly sharedHeads: any; + lastSentHeads: Heads; + sentHashes: Heads; + readonly sharedHeads: Heads; } -export default function init (): Promise; +export default function init (): Promise; diff --git a/automerge-wasm/nodejs-index.js b/automerge-wasm/nodejs-index.js index a8b9b1cd..58eddd76 100644 --- a/automerge-wasm/nodejs-index.js +++ b/automerge-wasm/nodejs-index.js @@ -3,4 +3,4 @@ module.exports = wasm module.exports.load = module.exports.loadDoc delete module.exports.loadDoc Object.defineProperty(module.exports, "__esModule", { value: true }); -module.exports.default = () => (new Promise((resolve,reject) => { resolve() })) +module.exports.default = () => (new Promise((resolve,reject) => { resolve(module.exports) })) diff --git a/automerge-wasm/web-index.js b/automerge-wasm/web-index.js index 80057798..1ce280b3 100644 --- a/automerge-wasm/web-index.js +++ b/automerge-wasm/web-index.js @@ -11,5 +11,37 @@ export { exportSyncState, importSyncState, } from "./bindgen.js" +import { + loadDoc as load, + create, + encodeChange, + decodeChange, + initSyncState, + encodeSyncMessage, + decodeSyncMessage, + encodeSyncState, + decodeSyncState, + exportSyncState, + importSyncState, +} from "./bindgen.js" + +let api = { + load, + create, + encodeChange, + decodeChange, + initSyncState, + encodeSyncMessage, + decodeSyncMessage, + encodeSyncState, + decodeSyncState, + exportSyncState, + importSyncState +} + import init from "./bindgen.js" -export default init; +export default function() { + return new Promise((resolve,reject) => init().then(() => { + resolve({ ... api, load, create, foo: "bar" }) + })) +} From d638a41a6c960a43c2568fb884ae1dd449d2a69c Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Sun, 22 May 2022 13:41:01 -0400 Subject: [PATCH 393/730] record type --- automerge-wasm/index.d.ts | 2 +- automerge-wasm/package.json | 9 +++++++-- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts index ba5cf07d..ff94d279 100644 --- a/automerge-wasm/index.d.ts +++ b/automerge-wasm/index.d.ts @@ -8,7 +8,7 @@ export type Hash = string; export type Heads = Hash[]; export type Value = string | number | boolean | null | Date | Uint8Array export type MaterializeValue = Record | Array | Value -export type ObjType = string | Array | Record +export type ObjType = string | Array | Record export type FullValue = ["str", string] | ["int", number] | diff --git a/automerge-wasm/package.json b/automerge-wasm/package.json index 7029688c..b214fa81 100644 --- a/automerge-wasm/package.json +++ b/automerge-wasm/package.json @@ -26,16 +26,20 @@ "module": "./web/index.js", "main": "./nodejs/index.js", "scripts": { + "lint": "eslint test", "build": "cross-env PROFILE=dev TARGET=nodejs yarn target", "release": "cross-env PROFILE=release yarn buildall", "buildall": "cross-env TARGET=nodejs yarn target && cross-env TARGET=web yarn target", "target": "rimraf ./$TARGET && wasm-pack build --target $TARGET --$PROFILE --out-name bindgen -d $TARGET && cp $TARGET-index.js $TARGET/index.js", "test": "ts-mocha -p tsconfig.json --type-check --bail --full-trace test/*.ts" }, - "dependencies": {}, + "dependencies": { + }, "devDependencies": { "@types/expect": "^24.3.0", "@types/jest": "^27.4.0", + "@typescript-eslint/eslint-plugin": "^5.25.0", + "@typescript-eslint/parser": "^5.25.0", "@types/mocha": "^9.1.0", "@types/node": "^17.0.13", "cross-env": "^7.0.3", @@ -44,6 +48,7 @@ "pako": "^2.0.4", "rimraf": "^3.0.2", "ts-mocha": "^9.0.2", - "typescript": "^4.5.5" + "typescript": "^4.5.5", + "eslint": "^8.15.0" } } From 07f5678a2bc578e10f7c6e506742a8fdb8c8b090 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Sun, 22 May 2022 13:54:59 -0400 Subject: [PATCH 394/730] linting in wasm --- automerge-wasm/.eslintignore | 3 + automerge-wasm/.eslintrc.cjs | 11 + automerge-wasm/index.d.ts | 4 +- automerge-wasm/package.json | 12 +- automerge-wasm/test/readme.ts | 80 ++++---- automerge-wasm/test/test.ts | 373 +++++++++++++++++----------------- automerge-wasm/tsconfig.json | 3 +- 7 files changed, 251 insertions(+), 235 deletions(-) create mode 100644 automerge-wasm/.eslintignore create mode 100644 automerge-wasm/.eslintrc.cjs diff --git a/automerge-wasm/.eslintignore b/automerge-wasm/.eslintignore new file mode 100644 index 00000000..7cd573e3 --- /dev/null +++ b/automerge-wasm/.eslintignore @@ -0,0 +1,3 @@ +web +nodejs +examples diff --git a/automerge-wasm/.eslintrc.cjs b/automerge-wasm/.eslintrc.cjs new file mode 100644 index 00000000..80e08d55 --- /dev/null +++ b/automerge-wasm/.eslintrc.cjs @@ -0,0 +1,11 @@ +module.exports = { + root: true, + parser: '@typescript-eslint/parser', + plugins: [ + '@typescript-eslint', + ], + extends: [ + 'eslint:recommended', + 'plugin:@typescript-eslint/recommended', + ], +}; diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts index ff94d279..cfecd081 100644 --- a/automerge-wasm/index.d.ts +++ b/automerge-wasm/index.d.ts @@ -7,8 +7,8 @@ export type Prop = string | number; export type Hash = string; export type Heads = Hash[]; export type Value = string | number | boolean | null | Date | Uint8Array -export type MaterializeValue = Record | Array | Value -export type ObjType = string | Array | Record +export type MaterializeValue = { [key:string]: MaterializeValue } | Array | Value +export type ObjType = string | Array | { [key: string]: ObjType | Value } export type FullValue = ["str", string] | ["int", number] | diff --git a/automerge-wasm/package.json b/automerge-wasm/package.json index b214fa81..f1077fe2 100644 --- a/automerge-wasm/package.json +++ b/automerge-wasm/package.json @@ -26,29 +26,27 @@ "module": "./web/index.js", "main": "./nodejs/index.js", "scripts": { - "lint": "eslint test", + "lint": "eslint test/*.ts", "build": "cross-env PROFILE=dev TARGET=nodejs yarn target", "release": "cross-env PROFILE=release yarn buildall", "buildall": "cross-env TARGET=nodejs yarn target && cross-env TARGET=web yarn target", "target": "rimraf ./$TARGET && wasm-pack build --target $TARGET --$PROFILE --out-name bindgen -d $TARGET && cp $TARGET-index.js $TARGET/index.js", "test": "ts-mocha -p tsconfig.json --type-check --bail --full-trace test/*.ts" }, - "dependencies": { - }, "devDependencies": { "@types/expect": "^24.3.0", "@types/jest": "^27.4.0", - "@typescript-eslint/eslint-plugin": "^5.25.0", - "@typescript-eslint/parser": "^5.25.0", "@types/mocha": "^9.1.0", "@types/node": "^17.0.13", + "@typescript-eslint/eslint-plugin": "^5.25.0", + "@typescript-eslint/parser": "^5.25.0", "cross-env": "^7.0.3", + "eslint": "^8.16.0", "fast-sha256": "^1.3.0", "mocha": "^9.1.3", "pako": "^2.0.4", "rimraf": "^3.0.2", "ts-mocha": "^9.0.2", - "typescript": "^4.5.5", - "eslint": "^8.15.0" + "typescript": "^4.6.4" } } diff --git a/automerge-wasm/test/readme.ts b/automerge-wasm/test/readme.ts index 5b7ddaf2..d06df0fb 100644 --- a/automerge-wasm/test/readme.ts +++ b/automerge-wasm/test/readme.ts @@ -7,18 +7,18 @@ import init, { create, load } from '..' describe('Automerge', () => { describe('Readme Examples', () => { it('Using the Library and Creating a Document (1)', () => { - let doc = create() + const doc = create() doc.free() }) it('Using the Library and Creating a Document (2)', (done) => { init().then((_:any) => { - let doc = create() + const doc = create() doc.free() done() }) }) it('Automerge Scalar Types (1)', () => { - let doc = create() + const doc = create() doc.put("/", "prop1", 100) // int doc.put("/", "prop2", 3.14) // f64 doc.put("/", "prop3", "hello world") @@ -40,7 +40,7 @@ describe('Automerge', () => { doc.free() }) it('Automerge Scalar Types (2)', () => { - let doc = create() + const doc = create() doc.put("/", "prop1", 100, "int") doc.put("/", "prop2", 100, "uint") doc.put("/", "prop3", 100.5, "f64") @@ -54,37 +54,37 @@ describe('Automerge', () => { doc.free() }) it('Automerge Object Types (1)', () => { - let doc = create() + const doc = create() // you can create an object by passing in the inital state - if blank pass in `{}` // the return value is the Object Id // these functions all return an object id - let config = doc.putObject("/", "config", { align: "left", archived: false, cycles: [10, 19, 21] }) - let token = doc.putObject("/", "tokens", {}) + const config = doc.putObject("/", "config", { align: "left", archived: false, cycles: [10, 19, 21] }) + const token = doc.putObject("/", "tokens", {}) // lists can be made with javascript arrays - let birds = doc.putObject("/", "birds", ["bluejay", "penguin", "puffin"]) - let bots = doc.putObject("/", "bots", []) + const birds = doc.putObject("/", "birds", ["bluejay", "penguin", "puffin"]) + const bots = doc.putObject("/", "bots", []) // text is initialized with a string - let notes = doc.putObject("/", "notes", "Hello world!") + const notes = doc.putObject("/", "notes", "Hello world!") doc.free() }) it('Automerge Object Types (2)', () => { - let doc = create() + const doc = create() - let config = doc.putObject("/", "config", { align: "left", archived: false, cycles: [10, 19, 21] }) + const config = doc.putObject("/", "config", { align: "left", archived: false, cycles: [10, 19, 21] }) doc.put(config, "align", "right") // Anywhere Object Ids are being used a path can also be used. // The following two statements are equivalent: - let id = doc.get("/", "config") + const id = doc.get("/", "config") if (id && id[0] === 'map') { doc.put(id[1], "align", "right") } @@ -98,14 +98,14 @@ describe('Automerge', () => { doc.free() }) it('Maps (1)', () => { - let doc = create() - let mymap = doc.putObject("_root", "mymap", { foo: "bar"}) + const doc = create() + const mymap = doc.putObject("_root", "mymap", { foo: "bar"}) // make a new map with the foo key doc.put(mymap, "bytes", new Uint8Array([1,2,3])) // assign a byte array to key `bytes` of the mymap object - let submap = doc.putObject(mymap, "sub", {}) + const submap = doc.putObject(mymap, "sub", {}) // make a new empty object and assign it to the key `sub` of mymap assert.deepEqual(doc.keys(mymap),["bytes","foo","sub"]) @@ -114,8 +114,8 @@ describe('Automerge', () => { doc.free() }) it('Lists (1)', () => { - let doc = create() - let items = doc.putObject("_root", "items", [10,"box"]) + const doc = create() + const items = doc.putObject("_root", "items", [10,"box"]) // init a new list with two elements doc.push(items, true) // push `true` to the end of the list doc.putObject(items, 0, { hello: "world" }) // overwrite the value 10 with an object with a key and value @@ -130,13 +130,13 @@ describe('Automerge', () => { doc.free() }) it('Text (1)', () => { - let doc = create("aaaaaa") - let notes = doc.putObject("_root", "notes", "Hello world") + const doc = create("aaaaaa") + const notes = doc.putObject("_root", "notes", "Hello world") doc.splice(notes, 6, 5, "everyone") assert.deepEqual(doc.text(notes), "Hello everyone") - let obj = doc.insertObject(notes, 6, { hi: "there" }) + const obj = doc.insertObject(notes, 6, { hi: "there" }) assert.deepEqual(doc.text(notes), "Hello \ufffceveryone") assert.deepEqual(doc.get(notes, 6), ["map", obj]) @@ -145,15 +145,15 @@ describe('Automerge', () => { doc.free() }) it('Querying Data (1)', () => { - let doc1 = create("aabbcc") + const doc1 = create("aabbcc") doc1.put("_root", "key1", "val1") - let key2 = doc1.putObject("_root", "key2", []) + const key2 = doc1.putObject("_root", "key2", []) assert.deepEqual(doc1.get("_root", "key1"), ["str", "val1"]) assert.deepEqual(doc1.get("_root", "key2"), ["list", "2@aabbcc"]) assert.deepEqual(doc1.keys("_root"), ["key1", "key2"]) - let doc2 = doc1.fork("ffaaff") + const doc2 = doc1.fork("ffaaff") // set a value concurrently doc1.put("_root","key3","doc1val") @@ -167,11 +167,11 @@ describe('Automerge', () => { doc1.free(); doc2.free() }) it('Counters (1)', () => { - let doc1 = create("aaaaaa") + const doc1 = create("aaaaaa") doc1.put("_root", "number", 0) doc1.put("_root", "total", 0, "counter") - let doc2 = doc1.fork("bbbbbb") + const doc2 = doc1.fork("bbbbbb") doc2.put("_root", "number", 10) doc2.increment("_root", "total", 11) @@ -185,7 +185,7 @@ describe('Automerge', () => { doc1.free(); doc2.free() }) it('Transactions (1)', () => { - let doc = create() + const doc = create() doc.put("_root", "key", "val1") @@ -209,13 +209,13 @@ describe('Automerge', () => { doc.free() }) it('Viewing Old Versions of the Document (1)', () => { - let doc = create() + const doc = create() doc.put("_root", "key", "val1") - let heads1 = doc.getHeads() + const heads1 = doc.getHeads() doc.put("_root", "key", "val2") - let heads2 = doc.getHeads() + const heads2 = doc.getHeads() doc.put("_root", "key", "val3") @@ -227,10 +227,10 @@ describe('Automerge', () => { doc.free() }) it('Forking And Merging (1)', () => { - let doc1 = create() + const doc1 = create() doc1.put("_root", "key1", "val1") - let doc2 = doc1.fork() + const doc2 = doc1.fork() doc1.put("_root", "key2", "val2") doc2.put("_root", "key3", "val3") @@ -243,31 +243,31 @@ describe('Automerge', () => { doc1.free(); doc2.free() }) it('Saving And Loading (1)', () => { - let doc1 = create() + const doc1 = create() doc1.put("_root", "key1", "value1") - let save1 = doc1.save() + const save1 = doc1.save() - let doc2 = load(save1) + const doc2 = load(save1) doc2.materialize("_root") // returns { key1: "value1" } doc1.put("_root", "key2", "value2") - let saveIncremental = doc1.saveIncremental() + const saveIncremental = doc1.saveIncremental() - let save2 = doc1.save() + const save2 = doc1.save() - let save3 = new Uint8Array([... save1, ... saveIncremental]) + const save3 = new Uint8Array([... save1, ... saveIncremental]) // save2 has fewer bytes than save3 but contains the same ops doc2.loadIncremental(saveIncremental) - let doc3 = load(save2) + const doc3 = load(save2) - let doc4 = load(save3) + const doc4 = load(save3) assert.deepEqual(doc1.materialize("_root"), { key1: "value1", key2: "value2" }) assert.deepEqual(doc2.materialize("_root"), { key1: "value1", key2: "value2" }) diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts index e02dde26..ce04d930 100644 --- a/automerge-wasm/test/test.ts +++ b/automerge-wasm/test/test.ts @@ -33,29 +33,29 @@ describe('Automerge', () => { }) it('should create, clone and free', () => { - let doc1 = create() - let doc2 = doc1.clone() + const doc1 = create() + const doc2 = doc1.clone() doc1.free() doc2.free() }) it('should be able to start and commit', () => { - let doc = create() + const doc = create() doc.commit() doc.free() }) it('getting a nonexistant prop does not throw an error', () => { - let doc = create() - let root = "_root" - let result = doc.get(root,"hello") + const doc = create() + const root = "_root" + const result = doc.get(root,"hello") assert.deepEqual(result,undefined) doc.free() }) it('should be able to set and get a simple value', () => { - let doc : Automerge = create("aabbcc") - let root = "_root" + const doc : Automerge = create("aabbcc") + const root = "_root" let result doc.put(root, "hello", "world") @@ -112,22 +112,22 @@ describe('Automerge', () => { }) it('should be able to use bytes', () => { - let doc = create() + const doc = create() doc.put("_root","data1", new Uint8Array([10,11,12])); doc.put("_root","data2", new Uint8Array([13,14,15]), "bytes"); - let value1 = doc.get("_root", "data1") + const value1 = doc.get("_root", "data1") assert.deepEqual(value1, ["bytes", new Uint8Array([10,11,12])]); - let value2 = doc.get("_root", "data2") + const value2 = doc.get("_root", "data2") assert.deepEqual(value2, ["bytes", new Uint8Array([13,14,15])]); doc.free() }) it('should be able to make sub objects', () => { - let doc = create() - let root = "_root" + const doc = create() + const root = "_root" let result - let submap = doc.putObject(root, "submap", {}) + const submap = doc.putObject(root, "submap", {}) doc.put(submap, "number", 6, "uint") assert.strictEqual(doc.pendingOps(),2) @@ -140,10 +140,10 @@ describe('Automerge', () => { }) it('should be able to make lists', () => { - let doc = create() - let root = "_root" + const doc = create() + const root = "_root" - let submap = doc.putObject(root, "numbers", []) + const submap = doc.putObject(root, "numbers", []) doc.insert(submap, 0, "a"); doc.insert(submap, 1, "b"); doc.insert(submap, 2, "c"); @@ -163,15 +163,15 @@ describe('Automerge', () => { }) it('lists have insert, set, splice, and push ops', () => { - let doc = create() - let root = "_root" + const doc = create() + const root = "_root" - let submap = doc.putObject(root, "letters", []) + const submap = doc.putObject(root, "letters", []) doc.insert(submap, 0, "a"); doc.insert(submap, 0, "b"); assert.deepEqual(doc.materialize(), { letters: ["b", "a" ] }) doc.push(submap, "c"); - let heads = doc.getHeads() + const heads = doc.getHeads() assert.deepEqual(doc.materialize(), { letters: ["b", "a", "c" ] }) doc.push(submap, 3, "timestamp"); assert.deepEqual(doc.materialize(), { letters: ["b", "a", "c", new Date(3) ] }) @@ -187,17 +187,17 @@ describe('Automerge', () => { }) it('should be able delete non-existant props', () => { - let doc = create() + const doc = create() doc.put("_root", "foo","bar") doc.put("_root", "bip","bap") - let hash1 = doc.commit() + const hash1 = doc.commit() assert.deepEqual(doc.keys("_root"),["bip","foo"]) doc.delete("_root", "foo") doc.delete("_root", "baz") - let hash2 = doc.commit() + const hash2 = doc.commit() assert.deepEqual(doc.keys("_root"),["bip"]) assert.deepEqual(doc.keys("_root", [hash1]),["bip", "foo"]) @@ -206,8 +206,8 @@ describe('Automerge', () => { }) it('should be able to del', () => { - let doc = create() - let root = "_root" + const doc = create() + const root = "_root" doc.put(root, "xxx", "xxx"); assert.deepEqual(doc.get(root, "xxx"),["str","xxx"]) @@ -217,8 +217,8 @@ describe('Automerge', () => { }) it('should be able to use counters', () => { - let doc = create() - let root = "_root" + const doc = create() + const root = "_root" doc.put(root, "counter", 10, "counter"); assert.deepEqual(doc.get(root, "counter"),["counter",10]) @@ -230,10 +230,10 @@ describe('Automerge', () => { }) it('should be able to splice text', () => { - let doc = create() - let root = "_root"; + const doc = create() + const root = "_root"; - let text = doc.putObject(root, "text", ""); + const text = doc.putObject(root, "text", ""); doc.splice(text, 0, 0, "hello ") doc.splice(text, 6, 0, ["w","o","r","l","d"]) doc.splice(text, 11, 0, ["!","?"]) @@ -247,39 +247,39 @@ describe('Automerge', () => { }) it('should be able to insert objects into text', () => { - let doc = create() - let text = doc.putObject("/", "text", "Hello world"); - let obj = doc.insertObject(text, 6, { hello: "world" }); + const doc = create() + const text = doc.putObject("/", "text", "Hello world"); + const obj = doc.insertObject(text, 6, { hello: "world" }); assert.deepEqual(doc.text(text), "Hello \ufffcworld"); assert.deepEqual(doc.get(text, 6), ["map", obj]); assert.deepEqual(doc.get(obj, "hello"), ["str", "world"]); }) it('should be able save all or incrementally', () => { - let doc = create() + const doc = create() doc.put("_root", "foo", 1) - let save1 = doc.save() + const save1 = doc.save() doc.put("_root", "bar", 2) - let saveMidway = doc.clone().save(); + const saveMidway = doc.clone().save(); - let save2 = doc.saveIncremental(); + const save2 = doc.saveIncremental(); doc.put("_root", "baz", 3); - let save3 = doc.saveIncremental(); + const save3 = doc.saveIncremental(); - let saveA = doc.save(); - let saveB = new Uint8Array([... save1, ...save2, ...save3]); + const saveA = doc.save(); + const saveB = new Uint8Array([... save1, ...save2, ...save3]); assert.notDeepEqual(saveA, saveB); - let docA = load(saveA); - let docB = load(saveB); - let docC = load(saveMidway) + const docA = load(saveA); + const docB = load(saveB); + const docC = load(saveMidway) docC.loadIncremental(save3) assert.deepEqual(docA.keys("_root"), docB.keys("_root")); @@ -292,12 +292,12 @@ describe('Automerge', () => { }) it('should be able to splice text', () => { - let doc = create() - let text = doc.putObject("_root", "text", ""); + const doc = create() + const text = doc.putObject("_root", "text", ""); doc.splice(text, 0, 0, "hello world"); - let hash1 = doc.commit(); + const hash1 = doc.commit(); doc.splice(text, 6, 0, "big bad "); - let hash2 = doc.commit(); + const hash2 = doc.commit(); assert.strictEqual(doc.text(text), "hello big bad world") assert.strictEqual(doc.length(text), 19) assert.strictEqual(doc.text(text, [ hash1 ]), "hello world") @@ -308,10 +308,10 @@ describe('Automerge', () => { }) it('local inc increments all visible counters in a map', () => { - let doc1 = create("aaaa") + const doc1 = create("aaaa") doc1.put("_root", "hello", "world") - let doc2 = load(doc1.save(), "bbbb"); - let doc3 = load(doc1.save(), "cccc"); + const doc2 = load(doc1.save(), "bbbb"); + const doc3 = load(doc1.save(), "cccc"); let heads = doc1.getHeads() doc1.put("_root", "cnt", 20) doc2.put("_root", "cnt", 0, "counter") @@ -331,8 +331,8 @@ describe('Automerge', () => { [ 'counter', 15, '2@cccc' ], ]) - let save1 = doc1.save() - let doc4 = load(save1) + const save1 = doc1.save() + const doc4 = load(save1) assert.deepEqual(doc4.save(), save1); doc1.free() doc2.free() @@ -341,11 +341,11 @@ describe('Automerge', () => { }) it('local inc increments all visible counters in a sequence', () => { - let doc1 = create("aaaa") - let seq = doc1.putObject("_root", "seq", []) + const doc1 = create("aaaa") + const seq = doc1.putObject("_root", "seq", []) doc1.insert(seq, 0, "hello") - let doc2 = load(doc1.save(), "bbbb"); - let doc3 = load(doc1.save(), "cccc"); + const doc2 = load(doc1.save(), "bbbb"); + const doc3 = load(doc1.save(), "cccc"); let heads = doc1.getHeads() doc1.put(seq, 0, 20) doc2.put(seq, 0, 0, "counter") @@ -365,8 +365,8 @@ describe('Automerge', () => { [ 'counter', 15, '3@cccc' ], ]) - let save = doc1.save() - let doc4 = load(save) + const save = doc1.save() + const doc4 = load(save) assert.deepEqual(doc4.save(), save); doc1.free() doc2.free() @@ -375,7 +375,7 @@ describe('Automerge', () => { }) it('paths can be used instead of objids', () => { - let doc = create("aaaa") + const doc = create("aaaa") doc.putObject("_root","list",[{ foo: "bar"}, [1,2,3]]) assert.deepEqual(doc.materialize("/"), { list: [{ foo: "bar"}, [1,2,3]] }) assert.deepEqual(doc.materialize("/list"), [{ foo: "bar"}, [1,2,3]]) @@ -383,26 +383,26 @@ describe('Automerge', () => { }) it('should be able to fetch changes by hash', () => { - let doc1 = create("aaaa") - let doc2 = create("bbbb") + const doc1 = create("aaaa") + const doc2 = create("bbbb") doc1.put("/","a","b") doc2.put("/","b","c") - let head1 = doc1.getHeads() - let head2 = doc2.getHeads() - let change1 = doc1.getChangeByHash(head1[0]) - let change2 = doc1.getChangeByHash(head2[0]) + const head1 = doc1.getHeads() + const head2 = doc2.getHeads() + const change1 = doc1.getChangeByHash(head1[0]) + const change2 = doc1.getChangeByHash(head2[0]) assert.deepEqual(change2, null) if (change1 === null) { throw new RangeError("change1 should not be null") } assert.deepEqual(decodeChange(change1).hash, head1[0]) }) it('recursive sets are possible', () => { - let doc = create("aaaa") - let l1 = doc.putObject("_root","list",[{ foo: "bar"}, [1,2,3]]) - let l2 = doc.insertObject(l1, 0, { zip: ["a", "b"] }) - let l3 = doc.putObject("_root","info1","hello world") // 'text' object + const doc = create("aaaa") + const l1 = doc.putObject("_root","list",[{ foo: "bar"}, [1,2,3]]) + const l2 = doc.insertObject(l1, 0, { zip: ["a", "b"] }) + const l3 = doc.putObject("_root","info1","hello world") // 'text' object doc.put("_root","info2","hello world") // 'str' - let l4 = doc.putObject("_root","info3","hello world") + const l4 = doc.putObject("_root","info3","hello world") assert.deepEqual(doc.materialize(), { "list": [ { zip: ["a", "b"] }, { foo: "bar"}, [ 1,2,3]], "info1": "hello world", @@ -416,15 +416,15 @@ describe('Automerge', () => { }) it('only returns an object id when objects are created', () => { - let doc = create("aaaa") - let r1 = doc.put("_root","foo","bar") - let r2 = doc.putObject("_root","list",[]) - let r3 = doc.put("_root","counter",10, "counter") - let r4 = doc.increment("_root","counter",1) - let r5 = doc.delete("_root","counter") - let r6 = doc.insert(r2,0,10); - let r7 = doc.insertObject(r2,0,{}); - let r8 = doc.splice(r2,1,0,["a","b","c"]); + const doc = create("aaaa") + const r1 = doc.put("_root","foo","bar") + const r2 = doc.putObject("_root","list",[]) + const r3 = doc.put("_root","counter",10, "counter") + const r4 = doc.increment("_root","counter",1) + const r5 = doc.delete("_root","counter") + const r6 = doc.insert(r2,0,10); + const r7 = doc.insertObject(r2,0,{}); + const r8 = doc.splice(r2,1,0,["a","b","c"]); //let r9 = doc.splice(r2,1,0,["a",[],{},"d"]); assert.deepEqual(r1,null); assert.deepEqual(r2,"2@aaaa"); @@ -439,13 +439,13 @@ describe('Automerge', () => { }) it('objects without properties are preserved', () => { - let doc1 = create("aaaa") - let a = doc1.putObject("_root","a",{}); - let b = doc1.putObject("_root","b",{}); - let c = doc1.putObject("_root","c",{}); - let d = doc1.put(c,"d","dd"); - let saved = doc1.save(); - let doc2 = load(saved); + const doc1 = create("aaaa") + const a = doc1.putObject("_root","a",{}); + const b = doc1.putObject("_root","b",{}); + const c = doc1.putObject("_root","c",{}); + const d = doc1.put(c,"d","dd"); + const saved = doc1.save(); + const doc2 = load(saved); assert.deepEqual(doc2.get("_root","a"),["map",a]) assert.deepEqual(doc2.keys(a),[]) assert.deepEqual(doc2.get("_root","b"),["map",b]) @@ -458,26 +458,26 @@ describe('Automerge', () => { }) it('should allow you to forkAt a heads', () => { - let A = create("aaaaaa") + const A = create("aaaaaa") A.put("/", "key1","val1"); A.put("/", "key2","val2"); - let heads1 = A.getHeads(); - let B = A.fork("bbbbbb") + const heads1 = A.getHeads(); + const B = A.fork("bbbbbb") A.put("/", "key3","val3"); B.put("/", "key4","val4"); A.merge(B) - let heads2 = A.getHeads(); + const heads2 = A.getHeads(); A.put("/", "key5","val5"); assert.deepEqual(A.forkAt(heads1).materialize("/"), A.materialize("/",heads1)) assert.deepEqual(A.forkAt(heads2).materialize("/"), A.materialize("/",heads2)) }) it('should handle merging text conflicts then saving & loading', () => { - let A = create("aabbcc") - let At = A.putObject('_root', 'text', "") + const A = create("aabbcc") + const At = A.putObject('_root', 'text', "") A.splice(At, 0, 0, 'hello') - let B = A.fork() + const B = A.fork() assert.deepEqual(B.get("_root","text"), [ "text", At]) @@ -488,9 +488,9 @@ describe('Automerge', () => { A.merge(B) - let binary = A.save() + const binary = A.save() - let C = load(binary) + const C = load(binary) assert.deepEqual(C.get('_root', 'text'), ['text', '1@aabbcc']) assert.deepEqual(C.text(At), 'hell! world') @@ -499,7 +499,7 @@ describe('Automerge', () => { describe('patch generation', () => { it('should include root object key updates', () => { - let doc1 = create('aaaa'), doc2 = create('bbbb') + const doc1 = create('aaaa'), doc2 = create('bbbb') doc1.put('_root', 'hello', 'world') doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) @@ -511,7 +511,7 @@ describe('Automerge', () => { }) it('should include nested object creation', () => { - let doc1 = create('aaaa'), doc2 = create('bbbb') + const doc1 = create('aaaa'), doc2 = create('bbbb') doc1.putObject('_root', 'birds', {friday: {robins: 3}}) doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) @@ -525,7 +525,7 @@ describe('Automerge', () => { }) it('should delete map keys', () => { - let doc1 = create('aaaa'), doc2 = create('bbbb') + const doc1 = create('aaaa'), doc2 = create('bbbb') doc1.put('_root', 'favouriteBird', 'Robin') doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) @@ -540,7 +540,7 @@ describe('Automerge', () => { }) it('should include list element insertion', () => { - let doc1 = create('aaaa'), doc2 = create('bbbb') + const doc1 = create('aaaa'), doc2 = create('bbbb') doc1.putObject('_root', 'birds', ['Goldfinch', 'Chaffinch']) doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) @@ -554,7 +554,7 @@ describe('Automerge', () => { }) it('should insert nested maps into a list', () => { - let doc1 = create('aaaa'), doc2 = create('bbbb') + const doc1 = create('aaaa'), doc2 = create('bbbb') doc1.putObject('_root', 'birds', []) doc2.loadIncremental(doc1.saveIncremental()) doc1.insertObject('1@aaaa', 0, {species: 'Goldfinch', count: 3}) @@ -570,7 +570,7 @@ describe('Automerge', () => { }) it('should calculate list indexes based on visible elements', () => { - let doc1 = create('aaaa'), doc2 = create('bbbb') + const doc1 = create('aaaa'), doc2 = create('bbbb') doc1.putObject('_root', 'birds', ['Goldfinch', 'Chaffinch']) doc2.loadIncremental(doc1.saveIncremental()) doc1.delete('1@aaaa', 0) @@ -588,9 +588,9 @@ describe('Automerge', () => { }) it('should handle concurrent insertions at the head of a list', () => { - let doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd') + const doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd') doc1.putObject('_root', 'values', []) - let change1 = doc1.saveIncremental() + const change1 = doc1.saveIncremental() doc2.loadIncremental(change1) doc3.loadIncremental(change1) doc4.loadIncremental(change1) @@ -598,7 +598,7 @@ describe('Automerge', () => { doc1.insert('1@aaaa', 1, 'd') doc2.insert('1@aaaa', 0, 'a') doc2.insert('1@aaaa', 1, 'b') - let change2 = doc1.saveIncremental(), change3 = doc2.saveIncremental() + const change2 = doc1.saveIncremental(), change3 = doc2.saveIncremental() doc3.enablePatches(true) doc4.enablePatches(true) doc3.loadIncremental(change2); doc3.loadIncremental(change3) @@ -621,9 +621,9 @@ describe('Automerge', () => { }) it('should handle concurrent insertions beyond the head', () => { - let doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd') + const doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd') doc1.putObject('_root', 'values', ['a', 'b']) - let change1 = doc1.saveIncremental() + const change1 = doc1.saveIncremental() doc2.loadIncremental(change1) doc3.loadIncremental(change1) doc4.loadIncremental(change1) @@ -631,7 +631,7 @@ describe('Automerge', () => { doc1.insert('1@aaaa', 3, 'f') doc2.insert('1@aaaa', 2, 'c') doc2.insert('1@aaaa', 3, 'd') - let change2 = doc1.saveIncremental(), change3 = doc2.saveIncremental() + const change2 = doc1.saveIncremental(), change3 = doc2.saveIncremental() doc3.enablePatches(true) doc4.enablePatches(true) doc3.loadIncremental(change2); doc3.loadIncremental(change3) @@ -654,10 +654,10 @@ describe('Automerge', () => { }) it('should handle conflicts on root object keys', () => { - let doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd') + const doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd') doc1.put('_root', 'bird', 'Greenfinch') doc2.put('_root', 'bird', 'Goldfinch') - let change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental() + const change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental() doc3.enablePatches(true) doc4.enablePatches(true) doc3.loadIncremental(change1); doc3.loadIncremental(change2) @@ -678,11 +678,11 @@ describe('Automerge', () => { }) it('should handle three-way conflicts', () => { - let doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc') + const doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc') doc1.put('_root', 'bird', 'Greenfinch') doc2.put('_root', 'bird', 'Chaffinch') doc3.put('_root', 'bird', 'Goldfinch') - let change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental(), change3 = doc3.saveIncremental() + const change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental(), change3 = doc3.saveIncremental() doc1.enablePatches(true) doc2.enablePatches(true) doc3.enablePatches(true) @@ -717,11 +717,11 @@ describe('Automerge', () => { }) it('should allow a conflict to be resolved', () => { - let doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc') + const doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc') doc1.put('_root', 'bird', 'Greenfinch') doc2.put('_root', 'bird', 'Chaffinch') doc3.enablePatches(true) - let change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental() + const change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental() doc1.loadIncremental(change2); doc3.loadIncremental(change1) doc2.loadIncremental(change1); doc3.loadIncremental(change2) doc1.put('_root', 'bird', 'Goldfinch') @@ -736,12 +736,12 @@ describe('Automerge', () => { }) it('should handle a concurrent map key overwrite and delete', () => { - let doc1 = create('aaaa'), doc2 = create('bbbb') + const doc1 = create('aaaa'), doc2 = create('bbbb') doc1.put('_root', 'bird', 'Greenfinch') doc2.loadIncremental(doc1.saveIncremental()) doc1.put('_root', 'bird', 'Goldfinch') doc2.delete('_root', 'bird') - let change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental() + const change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental() doc1.enablePatches(true) doc2.enablePatches(true) doc1.loadIncremental(change2) @@ -760,15 +760,15 @@ describe('Automerge', () => { }) it('should handle a conflict on a list element', () => { - let doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd') + const doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd') doc1.putObject('_root', 'birds', ['Thrush', 'Magpie']) - let change1 = doc1.saveIncremental() + const change1 = doc1.saveIncremental() doc2.loadIncremental(change1) doc3.loadIncremental(change1) doc4.loadIncremental(change1) doc1.put('1@aaaa', 0, 'Song Thrush') doc2.put('1@aaaa', 0, 'Redwing') - let change2 = doc1.saveIncremental(), change3 = doc2.saveIncremental() + const change2 = doc1.saveIncremental(), change3 = doc2.saveIncremental() doc3.enablePatches(true) doc4.enablePatches(true) doc3.loadIncremental(change2); doc3.loadIncremental(change3) @@ -789,9 +789,9 @@ describe('Automerge', () => { }) it('should handle a concurrent list element overwrite and delete', () => { - let doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd') + const doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd') doc1.putObject('_root', 'birds', ['Parakeet', 'Magpie', 'Thrush']) - let change1 = doc1.saveIncremental() + const change1 = doc1.saveIncremental() doc2.loadIncremental(change1) doc3.loadIncremental(change1) doc4.loadIncremental(change1) @@ -799,7 +799,7 @@ describe('Automerge', () => { doc1.put('1@aaaa', 1, 'Song Thrush') doc2.put('1@aaaa', 0, 'Ring-necked parakeet') doc2.put('1@aaaa', 2, 'Redwing') - let change2 = doc1.saveIncremental(), change3 = doc2.saveIncremental() + const change2 = doc1.saveIncremental(), change3 = doc2.saveIncremental() doc3.enablePatches(true) doc4.enablePatches(true) doc3.loadIncremental(change2); doc3.loadIncremental(change3) @@ -824,12 +824,12 @@ describe('Automerge', () => { }) it('should handle deletion of a conflict value', () => { - let doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc') + const doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc') doc1.put('_root', 'bird', 'Robin') doc2.put('_root', 'bird', 'Wren') - let change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental() + const change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental() doc2.delete('_root', 'bird') - let change3 = doc2.saveIncremental() + const change3 = doc2.saveIncremental() doc3.enablePatches(true) doc3.loadIncremental(change1) doc3.loadIncremental(change2) @@ -848,10 +848,10 @@ describe('Automerge', () => { }) it('should handle conflicting nested objects', () => { - let doc1 = create('aaaa'), doc2 = create('bbbb') + const doc1 = create('aaaa'), doc2 = create('bbbb') doc1.putObject('_root', 'birds', ['Parakeet']) doc2.putObject('_root', 'birds', {'Sparrowhawk': 1}) - let change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental() + const change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental() doc1.enablePatches(true) doc2.enablePatches(true) doc1.loadIncremental(change2) @@ -871,7 +871,7 @@ describe('Automerge', () => { it('should support date objects', () => { // FIXME: either use Date objects or use numbers consistently - let doc1 = create('aaaa'), doc2 = create('bbbb'), now = new Date() + const doc1 = create('aaaa'), doc2 = create('bbbb'), now = new Date() doc1.put('_root', 'createdAt', now.getTime(), 'timestamp') doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) @@ -883,7 +883,7 @@ describe('Automerge', () => { }) it('should capture local put ops', () => { - let doc1 = create('aaaa') + const doc1 = create('aaaa') doc1.enablePatches(true) doc1.put('_root', 'key1', 1) doc1.put('_root', 'key1', 2) @@ -902,7 +902,7 @@ describe('Automerge', () => { }) it('should capture local insert ops', () => { - let doc1 = create('aaaa') + const doc1 = create('aaaa') doc1.enablePatches(true) const list = doc1.putObject('_root', 'list', []) doc1.insert(list, 0, 1) @@ -923,7 +923,7 @@ describe('Automerge', () => { }) it('should capture local push ops', () => { - let doc1 = create('aaaa') + const doc1 = create('aaaa') doc1.enablePatches(true) const list = doc1.putObject('_root', 'list', []) doc1.push(list, 1) @@ -940,7 +940,7 @@ describe('Automerge', () => { }) it('should capture local splice ops', () => { - let doc1 = create('aaaa') + const doc1 = create('aaaa') doc1.enablePatches(true) const list = doc1.putObject('_root', 'list', []) doc1.splice(list, 0, 0, [1,2,3,4]) @@ -959,7 +959,7 @@ describe('Automerge', () => { }) it('should capture local increment ops', () => { - let doc1 = create('aaaa') + const doc1 = create('aaaa') doc1.enablePatches(true) doc1.put('_root', 'counter', 2, 'counter') doc1.increment('_root', 'counter', 4) @@ -973,7 +973,7 @@ describe('Automerge', () => { it('should capture local delete ops', () => { - let doc1 = create('aaaa') + const doc1 = create('aaaa') doc1.enablePatches(true) doc1.put('_root', 'key1', 1) doc1.put('_root', 'key2', 2) @@ -989,7 +989,7 @@ describe('Automerge', () => { }) it('should support counters in a map', () => { - let doc1 = create('aaaa'), doc2 = create('bbbb') + const doc1 = create('aaaa'), doc2 = create('bbbb') doc2.enablePatches(true) doc1.put('_root', 'starlings', 2, 'counter') doc2.loadIncremental(doc1.saveIncremental()) @@ -1004,7 +1004,7 @@ describe('Automerge', () => { }) it('should support counters in a list', () => { - let doc1 = create('aaaa'), doc2 = create('bbbb') + const doc1 = create('aaaa'), doc2 = create('bbbb') doc2.enablePatches(true) const list = doc1.putObject('_root', 'list', []) doc2.loadIncremental(doc1.saveIncremental()) @@ -1029,9 +1029,9 @@ describe('Automerge', () => { describe('sync', () => { it('should send a sync message implying no local data', () => { - let doc = create() - let s1 = initSyncState() - let m1 = doc.generateSyncMessage(s1) + const doc = create() + const s1 = initSyncState() + const m1 = doc.generateSyncMessage(s1) if (m1 === null) { throw new RangeError("message should not be null") } const message: DecodedSyncMessage = decodeSyncMessage(m1) assert.deepStrictEqual(message.heads, []) @@ -1043,21 +1043,21 @@ describe('Automerge', () => { }) it('should not reply if we have no data as well', () => { - let n1 = create(), n2 = create() - let s1 = initSyncState(), s2 = initSyncState() - let m1 = n1.generateSyncMessage(s1) + const n1 = create(), n2 = create() + const s1 = initSyncState(), s2 = initSyncState() + const m1 = n1.generateSyncMessage(s1) if (m1 === null) { throw new RangeError("message should not be null") } n2.receiveSyncMessage(s2, m1) - let m2 = n2.generateSyncMessage(s2) + const m2 = n2.generateSyncMessage(s2) assert.deepStrictEqual(m2, null) }) it('repos with equal heads do not need a reply message', () => { - let n1 = create(), n2 = create() - let s1 = initSyncState(), s2 = initSyncState() + const n1 = create(), n2 = create() + const s1 = initSyncState(), s2 = initSyncState() // make two nodes with the same changes - let list = n1.putObject("_root","n", []) + const list = n1.putObject("_root","n", []) n1.commit("",0) for (let i = 0; i < 10; i++) { n1.insert(list,i,i) @@ -1067,21 +1067,21 @@ describe('Automerge', () => { assert.deepStrictEqual(n1.materialize(), n2.materialize()) // generate a naive sync message - let m1 = n1.generateSyncMessage(s1) + const m1 = n1.generateSyncMessage(s1) if (m1 === null) { throw new RangeError("message should not be null") } assert.deepStrictEqual(s1.lastSentHeads, n1.getHeads()) // heads are equal so this message should be null n2.receiveSyncMessage(s2, m1) - let m2 = n2.generateSyncMessage(s2) + const m2 = n2.generateSyncMessage(s2) assert.strictEqual(m2, null) }) it('n1 should offer all changes to n2 when starting from nothing', () => { - let n1 = create(), n2 = create() + const n1 = create(), n2 = create() // make changes for n1 that n2 should request - let list = n1.putObject("_root","n",[]) + const list = n1.putObject("_root","n",[]) n1.commit("",0) for (let i = 0; i < 10; i++) { n1.insert(list, i, i) @@ -1094,10 +1094,10 @@ describe('Automerge', () => { }) it('should sync peers where one has commits the other does not', () => { - let n1 = create(), n2 = create() + const n1 = create(), n2 = create() // make changes for n1 that n2 should request - let list = n1.putObject("_root","n",[]) + const list = n1.putObject("_root","n",[]) n1.commit("",0) for (let i = 0; i < 10; i++) { n1.insert(list,i,i) @@ -1111,8 +1111,8 @@ describe('Automerge', () => { it('should work with prior sync state', () => { // create & synchronize two nodes - let n1 = create(), n2 = create() - let s1 = initSyncState(), s2 = initSyncState() + const n1 = create(), n2 = create() + const s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 5; i++) { n1.put("_root","x",i) @@ -1134,8 +1134,8 @@ describe('Automerge', () => { it('should not generate messages once synced', () => { // create & synchronize two nodes - let n1 = create('abc123'), n2 = create('def456') - let s1 = initSyncState(), s2 = initSyncState() + const n1 = create('abc123'), n2 = create('def456') + const s1 = initSyncState(), s2 = initSyncState() let message, patch for (let i = 0; i < 5; i++) { @@ -1182,8 +1182,8 @@ describe('Automerge', () => { it('should allow simultaneous messages during synchronization', () => { // create & synchronize two nodes - let n1 = create('abc123'), n2 = create('def456') - let s1 = initSyncState(), s2 = initSyncState() + const n1 = create('abc123'), n2 = create('def456') + const s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 5; i++) { n1.put("_root", "x", i) @@ -1261,10 +1261,11 @@ describe('Automerge', () => { }) it('should assume sent changes were recieved until we hear otherwise', () => { - let n1 = create('01234567'), n2 = create('89abcdef') - let s1 = initSyncState(), s2 = initSyncState(), message = null + const n1 = create('01234567'), n2 = create('89abcdef') + const s1 = initSyncState(), s2 = initSyncState() + let message = null - let items = n1.putObject("_root", "items", []) + const items = n1.putObject("_root", "items", []) n1.commit("",0) sync(n1, n2, s1, s2) @@ -1291,8 +1292,8 @@ describe('Automerge', () => { it('should work regardless of who initiates the exchange', () => { // create & synchronize two nodes - let n1 = create(), n2 = create() - let s1 = initSyncState(), s2 = initSyncState() + const n1 = create(), n2 = create() + const s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 5; i++) { n1.put("_root", "x", i) @@ -1319,8 +1320,8 @@ describe('Automerge', () => { // lastSync is undefined. // create two peers both with divergent commits - let n1 = create('01234567'), n2 = create('89abcdef') - let s1 = initSyncState(), s2 = initSyncState() + const n1 = create('01234567'), n2 = create('89abcdef') + const s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 10; i++) { n1.put("_root","x",i) @@ -1352,7 +1353,7 @@ describe('Automerge', () => { // lastSync is c9. // create two peers both with divergent commits - let n1 = create('01234567'), n2 = create('89abcdef') + const n1 = create('01234567'), n2 = create('89abcdef') let s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 10; i++) { @@ -1381,8 +1382,8 @@ describe('Automerge', () => { }) it('should ensure non-empty state after sync', () => { - let n1 = create('01234567'), n2 = create('89abcdef') - let s1 = initSyncState(), s2 = initSyncState() + const n1 = create('01234567'), n2 = create('89abcdef') + const s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 3; i++) { n1.put("_root","x",i) @@ -1400,8 +1401,9 @@ describe('Automerge', () => { // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 // n2 has changes {c0, c1, c2}, n1's lastSync is c5, and n2's lastSync is c2. // we want to successfully sync (n1) with (r), even though (n1) believes it's talking to (n2) - let n1 = create('01234567'), n2 = create('89abcdef') - let s1 = initSyncState(), s2 = initSyncState() + const n1 = create('01234567'), n2 = create('89abcdef') + let s1 = initSyncState() + const s2 = initSyncState() // n1 makes three changes, which we sync to n2 for (let i = 0; i < 3; i++) { @@ -1412,7 +1414,8 @@ describe('Automerge', () => { sync(n1, n2, s1, s2) // save a copy of n2 as "r" to simulate recovering from crash - let r, rSyncState + let r + let rSyncState ;[r, rSyncState] = [n2.clone(), s2.clone()] // sync another few commits @@ -1446,8 +1449,8 @@ describe('Automerge', () => { }) it('should resync after one node experiences data loss without disconnecting', () => { - let n1 = create('01234567'), n2 = create('89abcdef') - let s1 = initSyncState(), s2 = initSyncState() + const n1 = create('01234567'), n2 = create('89abcdef') + const s1 = initSyncState(), s2 = initSyncState() // n1 makes three changes, which we sync to n2 for (let i = 0; i < 3; i++) { @@ -1460,7 +1463,7 @@ describe('Automerge', () => { assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) assert.deepStrictEqual(n1.materialize(), n2.materialize()) - let n2AfterDataLoss = create('89abcdef') + const n2AfterDataLoss = create('89abcdef') // "n2" now has no data, but n1 still thinks it does. Note we don't do // decodeSyncState(encodeSyncState(s1)) in order to simulate data loss without disconnecting @@ -1470,8 +1473,8 @@ describe('Automerge', () => { }) it('should handle changes concurrent to the last sync heads', () => { - let n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('fedcba98') - let s12 = initSyncState(), s21 = initSyncState(), s23 = initSyncState(), s32 = initSyncState() + const n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('fedcba98') + const s12 = initSyncState(), s21 = initSyncState(), s23 = initSyncState(), s32 = initSyncState() // Change 1 is known to all three nodes //n1 = Automerge.change(n1, {time: 0}, doc => doc.x = 1) @@ -1505,7 +1508,7 @@ describe('Automerge', () => { }) it('should handle histories with lots of branching and merging', () => { - let n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('fedcba98') + const n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('fedcba98') n1.put("_root","x",0); n1.commit("",0) n2.applyChanges([n1.getLastLocalChange()]) n3.applyChanges([n1.getLastLocalChange()]) @@ -1526,7 +1529,7 @@ describe('Automerge', () => { n2.applyChanges([change1]) } - let s1 = initSyncState(), s2 = initSyncState() + const s1 = initSyncState(), s2 = initSyncState() sync(n1, n2, s1, s2) // Having n3's last change concurrent to the last sync heads forces us into the slower code path @@ -1652,7 +1655,7 @@ describe('Automerge', () => { assert.strictEqual(decodeSyncMessage(m2).changes.length, 1) // only n2c2; change n2c1 is not sent // n3 is a node that doesn't have the missing change. Nevertheless n1 is going to ask n3 for it - let n3 = create('fedcba98'), s13 = initSyncState(), s31 = initSyncState() + const n3 = create('fedcba98'), s13 = initSyncState(), s31 = initSyncState() sync(n1, n3, s13, s31) assert.deepStrictEqual(n1.getHeads(), [n1hash2]) assert.deepStrictEqual(n3.getHeads(), [n1hash2]) @@ -1819,7 +1822,7 @@ describe('Automerge', () => { // n1 has {c0, c1, c2, n1c1, n1c2, n1c3, n2c1, n2c2}; // n2 has {c0, c1, c2, n1c1, n1c2, n2c1, n2c2, n2c3}; // n3 has {c0, c1, c2, n3c1, n3c2, n3c3}. - let n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('76543210') + const n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('76543210') let s13 = initSyncState(), s12 = initSyncState(), s21 = initSyncState() let s32 = initSyncState(), s31 = initSyncState(), s23 = initSyncState() let message1, message2, message3 @@ -1889,8 +1892,8 @@ describe('Automerge', () => { }) it('should allow any change to be requested', () => { - let n1 = create('01234567'), n2 = create('89abcdef') - let s1 = initSyncState(), s2 = initSyncState() + const n1 = create('01234567'), n2 = create('89abcdef') + const s1 = initSyncState(), s2 = initSyncState() let message = null for (let i = 0; i < 3; i++) { @@ -1917,8 +1920,8 @@ describe('Automerge', () => { }) it('should ignore requests for a nonexistent change', () => { - let n1 = create('01234567'), n2 = create('89abcdef') - let s1 = initSyncState(), s2 = initSyncState() + const n1 = create('01234567'), n2 = create('89abcdef') + const s1 = initSyncState(), s2 = initSyncState() let message = null for (let i = 0; i < 3; i++) { @@ -1940,7 +1943,7 @@ describe('Automerge', () => { // ,-- c1 <-- c2 // c0 <-+ // `-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 - let n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('76543210') + const n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('76543210') let s1 = initSyncState(), s2 = initSyncState() let msg, decodedMsg diff --git a/automerge-wasm/tsconfig.json b/automerge-wasm/tsconfig.json index 69ca846b..2627c69b 100644 --- a/automerge-wasm/tsconfig.json +++ b/automerge-wasm/tsconfig.json @@ -13,5 +13,6 @@ "target": "es2016", "typeRoots": ["./index.d.ts"] }, - "exclude": ["dist/**/*"] + "include": ["test/**/*.ts"], + "exclude": ["dist/**/*", "examples/**/*"] } From 3a44ccd52dd7ae08701adb8b02a886ef20439394 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Mon, 23 May 2022 18:49:29 +0200 Subject: [PATCH 395/730] clean up lint, simplify package, hand write an index.d.ts --- automerge-js/README.md | 4 +- automerge-js/config/cjs.json | 8 - automerge-js/config/types.json | 10 - automerge-js/examples/webpack/src/index.js | 6 +- automerge-js/package.json | 43 +- automerge-js/src/bloom.ts | 124 ---- automerge-js/src/index.ts | 61 +- automerge-js/src/proxies.ts | 4 +- automerge-js/src/text.ts | 10 +- automerge-js/src/types.ts | 2 +- automerge-js/src/uuid.ts | 4 +- automerge-js/test/helpers.ts | 2 +- .../columnar.ts => test/legacy/columnar.js} | 665 ++++-------------- .../{src/common.ts => test/legacy/common.js} | 36 +- .../encoding.ts => test/legacy/encoding.js} | 80 +-- automerge-js/test/legacy/sync.js | 480 +++++++++++++ automerge-js/test/legacy_tests.ts | 2 +- automerge-js/test/sync_test.ts | 4 +- automerge-js/test/text_test.ts | 3 +- automerge-js/tsconfig.json | 4 +- 20 files changed, 736 insertions(+), 816 deletions(-) delete mode 100644 automerge-js/config/cjs.json delete mode 100644 automerge-js/config/types.json delete mode 100644 automerge-js/src/bloom.ts rename automerge-js/{src/columnar.ts => test/legacy/columnar.js} (62%) rename automerge-js/{src/common.ts => test/legacy/common.js} (66%) rename automerge-js/{src/encoding.ts => test/legacy/encoding.js} (96%) create mode 100644 automerge-js/test/legacy/sync.js diff --git a/automerge-js/README.md b/automerge-js/README.md index 3875e2b1..3c5cde33 100644 --- a/automerge-js/README.md +++ b/automerge-js/README.md @@ -2,7 +2,5 @@ ## Todo 1. write a readme -1. final name for package - to distinguish it from the old one -1. get a index.d.ts you like 1. publish package - +1. make sure the example code works with published packages diff --git a/automerge-js/config/cjs.json b/automerge-js/config/cjs.json deleted file mode 100644 index 890a0422..00000000 --- a/automerge-js/config/cjs.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "extends": "../tsconfig.json", - "compilerOptions": { - "target": "es2016", - "module": "commonjs", - "outDir": "../dist/cjs" - } -} diff --git a/automerge-js/config/types.json b/automerge-js/config/types.json deleted file mode 100644 index 3e7cde18..00000000 --- a/automerge-js/config/types.json +++ /dev/null @@ -1,10 +0,0 @@ - -{ - "extends": "../tsconfig.json", - "compilerOptions": { - "declaration": true, - "emitDeclarationOnly": true, - "outFile": "../index.d.ts" - }, - "include": [ "../src/index.ts" ] -} diff --git a/automerge-js/examples/webpack/src/index.js b/automerge-js/examples/webpack/src/index.js index 7d0b8371..876c1940 100644 --- a/automerge-js/examples/webpack/src/index.js +++ b/automerge-js/examples/webpack/src/index.js @@ -1,8 +1,10 @@ -import init, * as Automerge from "automerge-js" +import * as Automerge from "automerge-js" +import init from "automerge-wasm" // hello world code that will run correctly on web or node -init().then(_ => { +init().then((api) => { + Automerge.use(api) let doc = Automerge.init() doc = Automerge.change(doc, (d) => d.hello = "from automerge-js") const result = JSON.stringify(doc) diff --git a/automerge-js/package.json b/automerge-js/package.json index 30dc689a..728ff970 100644 --- a/automerge-js/package.json +++ b/automerge-js/package.json @@ -13,40 +13,23 @@ "LICENSE", "package.json", "index.d.ts", - "package.json", - "index.d.ts", - "dist/mjs/constants.js", - "dist/mjs/numbers.js", - "dist/mjs/sync.js", - "dist/mjs/index.js", - "dist/mjs/encoding.js", - "dist/mjs/columnar.js", - "dist/mjs/uuid.js", - "dist/mjs/counter.js", - "dist/mjs/common.js", - "dist/mjs/text.js", - "dist/mjs/proxies.js", - "dist/cjs/constants.js", - "dist/cjs/numbers.js", - "dist/cjs/sync.js", - "dist/cjs/index.js", - "dist/cjs/encoding.js", - "dist/cjs/columnar.js", - "dist/cjs/uuid.js", - "dist/cjs/counter.js", - "dist/cjs/common.js", - "dist/cjs/text.js", - "dist/cjs/proxies.js" + "dist/constants.js", + "dist/types.js", + "dist/numbers.js", + "dist/index.js", + "dist/uuid.js", + "dist/counter.js", + "dist/low_level.js", + "dist/text.js", + "dist/proxies.js" ], - "module": "./dist/mjs/index.js", - "main": "./dist/cjs/index.js", + "types": "index.d.ts", + "main": "./dist/index.js", "license": "MIT", "scripts": { "lint": "eslint src", - "build": "yarn build-cjs", - "build-cjs": "tsc -p config/cjs.json && tsc -p config/types.json", - "build-mjs": "tsc -p config/mjs.json && tsc -p config/types.json", - "test": "ts-mocha -p tsconfig.json test/**/*.ts" + "build": "tsc", + "test": "ts-mocha test/*.ts" }, "devDependencies": { "@types/expect": "^24.3.0", diff --git a/automerge-js/src/bloom.ts b/automerge-js/src/bloom.ts deleted file mode 100644 index cb66466a..00000000 --- a/automerge-js/src/bloom.ts +++ /dev/null @@ -1,124 +0,0 @@ -/** - * Implementation of the data synchronisation protocol that brings a local and a remote document - * into the same state. This is typically used when two nodes have been disconnected for some time, - * and need to exchange any changes that happened while they were disconnected. The two nodes that - * are syncing could be client and server, or server and client, or two peers with symmetric roles. - * - * The protocol is based on this paper: Martin Kleppmann and Heidi Howard. Byzantine Eventual - * Consistency and the Fundamental Limits of Peer-to-Peer Databases. https://arxiv.org/abs/2012.00472 - * - * The protocol assumes that every time a node successfully syncs with another node, it remembers - * the current heads (as returned by `Backend.getHeads()`) after the last sync with that node. The - * next time we try to sync with the same node, we start from the assumption that the other node's - * document version is no older than the outcome of the last sync, so we only need to exchange any - * changes that are more recent than the last sync. This assumption may not be true if the other - * node did not correctly persist its state (perhaps it crashed before writing the result of the - * last sync to disk), and we fall back to sending the entire document in this case. - */ - -import { hexStringToBytes, Encoder, Decoder } from './encoding' - -// These constants correspond to a 1% false positive rate. The values can be changed without -// breaking compatibility of the network protocol, since the parameters used for a particular -// Bloom filter are encoded in the wire format. -const BITS_PER_ENTRY = 10, NUM_PROBES = 7 - -/** - * A Bloom filter implementation that can be serialised to a byte array for transmission - * over a network. The entries that are added are assumed to already be SHA-256 hashes, - * so this implementation does not perform its own hashing. - */ -export class BloomFilter { - numEntries: number; - numBitsPerEntry: number; - numProbes: number; - bits: Uint8Array; - - constructor (arg) { - if (Array.isArray(arg)) { - // arg is an array of SHA256 hashes in hexadecimal encoding - this.numEntries = arg.length - this.numBitsPerEntry = BITS_PER_ENTRY - this.numProbes = NUM_PROBES - this.bits = new Uint8Array(Math.ceil(this.numEntries * this.numBitsPerEntry / 8)) - for (const hash of arg) this.addHash(hash) - } else if (arg instanceof Uint8Array) { - if (arg.byteLength === 0) { - this.numEntries = 0 - this.numBitsPerEntry = 0 - this.numProbes = 0 - this.bits = arg - } else { - const decoder = new Decoder(arg) - this.numEntries = decoder.readUint32() - this.numBitsPerEntry = decoder.readUint32() - this.numProbes = decoder.readUint32() - this.bits = decoder.readRawBytes(Math.ceil(this.numEntries * this.numBitsPerEntry / 8)) - } - } else { - throw new TypeError('invalid argument') - } - } - - /** - * Returns the Bloom filter state, encoded as a byte array. - */ - get bytes() { - if (this.numEntries === 0) return new Uint8Array(0) - const encoder = new Encoder() - encoder.appendUint32(this.numEntries) - encoder.appendUint32(this.numBitsPerEntry) - encoder.appendUint32(this.numProbes) - encoder.appendRawBytes(this.bits) - return encoder.buffer - } - - /** - * Given a SHA-256 hash (as hex string), returns an array of probe indexes indicating which bits - * in the Bloom filter need to be tested or set for this particular entry. We do this by - * interpreting the first 12 bytes of the hash as three little-endian 32-bit unsigned integers, - * and then using triple hashing to compute the probe indexes. The algorithm comes from: - * - * Peter C. Dillinger and Panagiotis Manolios. Bloom Filters in Probabilistic Verification. - * 5th International Conference on Formal Methods in Computer-Aided Design (FMCAD), November 2004. - * http://www.ccis.northeastern.edu/home/pete/pub/bloom-filters-verification.pdf - */ - getProbes(hash) { - const hashBytes = hexStringToBytes(hash), modulo = 8 * this.bits.byteLength - if (hashBytes.byteLength !== 32) throw new RangeError(`Not a 256-bit hash: ${hash}`) - // on the next three lines, the right shift means interpret value as unsigned - let x = ((hashBytes[0] | hashBytes[1] << 8 | hashBytes[2] << 16 | hashBytes[3] << 24) >>> 0) % modulo - let y = ((hashBytes[4] | hashBytes[5] << 8 | hashBytes[6] << 16 | hashBytes[7] << 24) >>> 0) % modulo - const z = ((hashBytes[8] | hashBytes[9] << 8 | hashBytes[10] << 16 | hashBytes[11] << 24) >>> 0) % modulo - const probes = [x] - for (let i = 1; i < this.numProbes; i++) { - x = (x + y) % modulo - y = (y + z) % modulo - probes.push(x) - } - return probes - } - - /** - * Sets the Bloom filter bits corresponding to a given SHA-256 hash (given as hex string). - */ - addHash(hash) { - for (const probe of this.getProbes(hash)) { - this.bits[probe >>> 3] |= 1 << (probe & 7) - } - } - - /** - * Tests whether a given SHA-256 hash (given as hex string) is contained in the Bloom filter. - */ - containsHash(hash) { - if (this.numEntries === 0) return false - for (const probe of this.getProbes(hash)) { - if ((this.bits[probe >>> 3] & (1 << (probe & 7))) === 0) { - return false - } - } - return true - } -} - diff --git a/automerge-js/src/index.ts b/automerge-js/src/index.ts index e20f32a2..02f864b1 100644 --- a/automerge-js/src/index.ts +++ b/automerge-js/src/index.ts @@ -4,14 +4,12 @@ export { uuid } from './uuid' import { rootProxy, listProxy, textProxy, mapProxy } from "./proxies" import { STATE, HEADS, OBJECT_ID, READ_ONLY, FROZEN } from "./constants" -import { isObject } from "./common" - -import { Text, Counter } from "./types" +import { Counter } from "./types" export { Text, Counter, Int, Uint, Float64 } from "./types" import { ApiHandler, LowLevelApi, UseApi } from "./low_level" -import { ActorId, Prop, ObjID, Change, DecodedChange, Heads, Automerge } from "./types" +import { ActorId, Prop, ObjID, Change, DecodedChange, Heads, Automerge, MaterializeValue } from "./types" import { SyncState, SyncMessage, DecodedSyncMessage, AutomergeValue } from "./types" export type ChangeOptions = { message?: string, time?: number } @@ -30,7 +28,7 @@ export function use(api: LowLevelApi) { } function _state(doc: Doc) : Automerge { - const state = (doc)[STATE] + const state = Reflect.get(doc,STATE) if (state == undefined) { throw new RangeError("must be the document root") } @@ -38,19 +36,19 @@ function _state(doc: Doc) : Automerge { } function _frozen(doc: Doc) : boolean { - return (doc)[FROZEN] === true + return Reflect.get(doc,FROZEN) === true } function _heads(doc: Doc) : Heads | undefined { - return (doc)[HEADS] + return Reflect.get(doc,HEADS) } function _obj(doc: Doc) : ObjID { - return (doc)[OBJECT_ID] + return Reflect.get(doc,OBJECT_ID) } function _readonly(doc: Doc) : boolean { - return (doc)[READ_ONLY] === true + return Reflect.get(doc,READ_ONLY) === true } export function init(actor?: ActorId) : Doc{ @@ -181,16 +179,15 @@ export function getActorId(doc: Doc) : ActorId { return state.getActorId() } -function conflictAt(context : Automerge, objectId: ObjID, prop: Prop) : any { +type Conflicts = { [key: string]: AutomergeValue } + +function conflictAt(context : Automerge, objectId: ObjID, prop: Prop) : Conflicts | undefined { const values = context.getAll(objectId, prop) if (values.length <= 1) { return } - const result : { [key: ObjID]: AutomergeValue } = {} + const result : Conflicts = {} for (const fullVal of values) { - //const datatype = fullVal[0] - //const value = fullVal[1] - //switch (datatype) { switch (fullVal[0]) { case "map": result[fullVal[1]] = mapProxy(context, fullVal[1], [ prop ], true) @@ -225,7 +222,7 @@ function conflictAt(context : Automerge, objectId: ObjID, prop: Prop) : any { return result } -export function getConflicts(doc: Doc, prop: Prop) : any { +export function getConflicts(doc: Doc, prop: Prop) : Conflicts | undefined { const state = _state(doc) const objectId = _obj(doc) return conflictAt(state, objectId, prop) @@ -274,7 +271,6 @@ export function applyChanges(doc: Doc, changes: Change[]) : [Doc] { } export function getHistory(doc: Doc) : State[] { - const actor = getActorId(doc) const history = getAllChanges(doc) return history.map((change, index) => ({ get change () { @@ -289,7 +285,7 @@ export function getHistory(doc: Doc) : State[] { } // FIXME : no tests -export function equals(val1: any, val2: any) : boolean { +export function equals(val1: unknown, val2: unknown) : boolean { if (!isObject(val1) || !isObject(val2)) return val1 === val2 const keys1 = Object.keys(val1).sort(), keys2 = Object.keys(val2).sort() if (keys1.length !== keys2.length) return false @@ -373,27 +369,14 @@ export function dump(doc: Doc) { state.dump() } -export function toJS(doc: any) : any { - if (typeof doc === "object") { - if (doc instanceof Uint8Array) { - return doc - } - if (doc === null) { - return doc - } - if (doc instanceof Array) { - return doc.map((a) => toJS(a)) - } - if (doc instanceof Text) { - return doc.map((a: any) => toJS(a)) - } - const tmp : any = {} - for (const index in doc) { - tmp[index] = toJS(doc[index]) - } - return tmp - } else { - return doc - } +// FIXME - return T? +export function toJS(doc: Doc) : MaterializeValue { + let state = _state(doc) + let heads = _heads(doc) + return state.materialize("_root", heads) } + +function isObject(obj: unknown) : obj is Record { + return typeof obj === 'object' && obj !== null +} diff --git a/automerge-js/src/proxies.ts b/automerge-js/src/proxies.ts index 05ac2873..fbb044a6 100644 --- a/automerge-js/src/proxies.ts +++ b/automerge-js/src/proxies.ts @@ -592,10 +592,10 @@ function listMethods(target) { function textMethods(target) { const {context, objectId, heads } = target const methods = { - set (index, value) { + set (index: number, value) { return this[index] = value }, - get (index) : AutomergeValue { + get (index: number) : AutomergeValue { return this[index] }, toString () : string { diff --git a/automerge-js/src/text.ts b/automerge-js/src/text.ts index c58c1efa..26f4a861 100644 --- a/automerge-js/src/text.ts +++ b/automerge-js/src/text.ts @@ -20,7 +20,7 @@ export class Text { return this.elems.length } - get (index) : Value { + get (index: number) : Value { return this.elems[index] } @@ -103,7 +103,7 @@ export class Text { /** * Inserts new list items `values` starting at position `index`. */ - insertAt(index: number, ...values) { + insertAt(index: number, ...values: Value[]) { this.elems.splice(index, 0, ... values) } @@ -111,12 +111,12 @@ export class Text { * Deletes `numDelete` list items starting at position `index`. * if `numDelete` is not given, one item is deleted. */ - deleteAt(index, numDelete = 1) { + deleteAt(index: number, numDelete = 1) { this.elems.splice(index, numDelete) } - map(callback, thisArg?) { - this.elems.map(callback, thisArg) + map(callback: (e: Value) => T) { + this.elems.map(callback) } diff --git a/automerge-js/src/types.ts b/automerge-js/src/types.ts index 609c71e7..5fb63abd 100644 --- a/automerge-js/src/types.ts +++ b/automerge-js/src/types.ts @@ -1,5 +1,5 @@ -export { Actor as ActorId, Value, Prop, ObjID, Change, DecodedChange, Heads, Automerge } from "automerge-wasm" +export { Actor as ActorId, Value, Prop, ObjID, Change, DecodedChange, Heads, Automerge, MaterializeValue } from "automerge-wasm" export { JsSyncState as SyncState, SyncMessage, DecodedSyncMessage } from "automerge-wasm" export { Text } from "./text" diff --git a/automerge-js/src/uuid.ts b/automerge-js/src/uuid.ts index 549b0fc5..5ddb5ae6 100644 --- a/automerge-js/src/uuid.ts +++ b/automerge-js/src/uuid.ts @@ -7,8 +7,8 @@ function defaultFactory() { let factory = defaultFactory interface UUIDFactory extends Function { - setFactory(f: typeof factory); - reset(); + setFactory(f: typeof factory): void; + reset(): void; } export const uuid : UUIDFactory = () => { diff --git a/automerge-js/test/helpers.ts b/automerge-js/test/helpers.ts index 76cae7d6..d5292130 100644 --- a/automerge-js/test/helpers.ts +++ b/automerge-js/test/helpers.ts @@ -1,5 +1,5 @@ import * as assert from 'assert' -import { Encoder } from '../src/encoding' +import { Encoder } from './legacy/encoding' // Assertion that succeeds if the first argument deepStrictEquals at least one of the // subsequent arguments (but we don't care which one) diff --git a/automerge-js/src/columnar.ts b/automerge-js/test/legacy/columnar.js similarity index 62% rename from automerge-js/src/columnar.ts rename to automerge-js/test/legacy/columnar.js index b1776910..b97e6275 100644 --- a/automerge-js/src/columnar.ts +++ b/automerge-js/test/legacy/columnar.js @@ -1,19 +1,9 @@ -import * as pako from 'pako' -import { parseOpId, equalBytes } from './common' -import { +const pako = require('pako') +const { copyObject, parseOpId, equalBytes } = require('./common') +const { utf8ToString, hexStringToBytes, bytesToHexString, Encoder, Decoder, RLEEncoder, RLEDecoder, DeltaEncoder, DeltaDecoder, BooleanEncoder, BooleanDecoder -} from './encoding' - - -interface Op { - id: string; - action: string; - obj: string; - elemId?: string; - key?: string; - pred: string[]; -} +} = require('./encoding') // Maybe we should be using the platform's built-in hash implementation? // Node has the crypto module: https://nodejs.org/api/crypto.html and browsers have @@ -28,7 +18,7 @@ interface Op { // - It does not need a secure source of random bits and does not need to be // constant-time; // - I have reviewed the source code and it seems pretty reasonable. -import { Hash } from 'fast-sha256' +const { Hash } = require('fast-sha256') // These bytes don't mean anything, they were generated randomly const MAGIC_BYTES = new Uint8Array([0x85, 0x6f, 0x4a, 0x83]) @@ -42,7 +32,7 @@ const CHUNK_TYPE_DEFLATE = 2 // like CHUNK_TYPE_CHANGE but with DEFLATE compress const DEFLATE_MIN_SIZE = 256 // The least-significant 3 bits of a columnId indicate its datatype -export const COLUMN_TYPE = { +const COLUMN_TYPE = { GROUP_CARD: 0, ACTOR_ID: 1, INT_RLE: 2, INT_DELTA: 3, BOOLEAN: 4, STRING_RLE: 5, VALUE_LEN: 6, VALUE_RAW: 7 } @@ -53,15 +43,15 @@ const COLUMN_TYPE_DEFLATE = 8 // In the values in a column of type VALUE_LEN, the bottom four bits indicate the type of the value, // one of the following types in VALUE_TYPE. The higher bits indicate the length of the value in the // associated VALUE_RAW column (in bytes). -export const VALUE_TYPE = { +const VALUE_TYPE = { NULL: 0, FALSE: 1, TRUE: 2, LEB128_UINT: 3, LEB128_INT: 4, IEEE754: 5, UTF8: 6, BYTES: 7, COUNTER: 8, TIMESTAMP: 9, MIN_UNKNOWN: 10, MAX_UNKNOWN: 15 } // make* actions must be at even-numbered indexes in this list -export const ACTIONS = ['makeMap', 'set', 'makeList', 'del', 'makeText', 'inc', 'makeTable', 'link'] +const ACTIONS = ['makeMap', 'set', 'makeList', 'del', 'makeText', 'inc', 'makeTable', 'link'] -export const OBJECT_TYPE = {makeMap: 'map', makeList: 'list', makeText: 'text', makeTable: 'table'} +const OBJECT_TYPE = {makeMap: 'map', makeList: 'list', makeText: 'text', makeTable: 'table'} const COMMON_COLUMNS = [ {columnName: 'objActor', columnId: 0 << 4 | COLUMN_TYPE.ACTOR_ID}, @@ -79,13 +69,13 @@ const COMMON_COLUMNS = [ {columnName: 'chldCtr', columnId: 6 << 4 | COLUMN_TYPE.INT_DELTA} ] -export const CHANGE_COLUMNS = COMMON_COLUMNS.concat([ +const CHANGE_COLUMNS = COMMON_COLUMNS.concat([ {columnName: 'predNum', columnId: 7 << 4 | COLUMN_TYPE.GROUP_CARD}, {columnName: 'predActor', columnId: 7 << 4 | COLUMN_TYPE.ACTOR_ID}, {columnName: 'predCtr', columnId: 7 << 4 | COLUMN_TYPE.INT_DELTA} ]) -export const DOC_OPS_COLUMNS = COMMON_COLUMNS.concat([ +const DOC_OPS_COLUMNS = COMMON_COLUMNS.concat([ {columnName: 'succNum', columnId: 8 << 4 | COLUMN_TYPE.GROUP_CARD}, {columnName: 'succActor', columnId: 8 << 4 | COLUMN_TYPE.ACTOR_ID}, {columnName: 'succCtr', columnId: 8 << 4 | COLUMN_TYPE.INT_DELTA} @@ -141,13 +131,13 @@ function compareParsedOpIds(id1, id2) { * false. */ function parseAllOpIds(changes, single) { - const actors : any = {}, newChanges : any = [] + const actors = {}, newChanges = [] for (let change of changes) { - change = { ... change } + change = copyObject(change) actors[change.actor] = true change.ops = expandMultiOps(change.ops, change.startOp, change.actor) change.ops = change.ops.map(op => { - op = { ... op } + op = copyObject(op) if (op.obj !== '_root') op.obj = parseOpId(op.obj) if (op.elemId && op.elemId !== '_head') op.elemId = parseOpId(op.elemId) if (op.child) op.child = parseOpId(op.child) @@ -155,7 +145,7 @@ function parseAllOpIds(changes, single) { if (op.obj.actorId) actors[op.obj.actorId] = true if (op.elemId && op.elemId.actorId) actors[op.elemId.actorId] = true if (op.child && op.child.actorId) actors[op.child.actorId] = true - for (const pred of op.pred) actors[pred.actorId] = true + for (let pred of op.pred) actors[pred.actorId] = true return op }) newChanges.push(change) @@ -165,10 +155,10 @@ function parseAllOpIds(changes, single) { if (single) { actorIds = [changes[0].actor].concat(actorIds.filter(actor => actor !== changes[0].actor)) } - for (const change of newChanges) { + for (let change of newChanges) { change.actorNum = actorIds.indexOf(change.actor) for (let i = 0; i < change.ops.length; i++) { - const op = change.ops[i] + let op = change.ops[i] op.id = {counter: change.startOp + i, actorNum: change.actorNum, actorId: change.actor} op.obj = actorIdToActorNum(op.obj, actorIds) op.elemId = actorIdToActorNum(op.elemId, actorIds) @@ -232,21 +222,34 @@ function encodeOperationAction(op, columns) { } /** - * Encodes the integer `value` into the two columns `valLen` and `valRaw`, - * with the datatype tag set to `typeTag`. If `typeTag` is zero, it is set - * automatically to signed or unsigned depending on the sign of the value. - * Values with non-zero type tags are always encoded as signed integers. + * Given the datatype for a number, determine the typeTag and the value to encode + * otherwise guess */ -function encodeInteger(value, typeTag, columns) { - let numBytes - if (value < 0 || typeTag > 0) { - numBytes = columns.valRaw.appendInt53(value) - if (!typeTag) typeTag = VALUE_TYPE.LEB128_INT - } else { - numBytes = columns.valRaw.appendUint53(value) - typeTag = VALUE_TYPE.LEB128_UINT +function getNumberTypeAndValue(op) { + switch (op.datatype) { + case "counter": + return [ VALUE_TYPE.COUNTER, op.value ] + case "timestamp": + return [ VALUE_TYPE.TIMESTAMP, op.value ] + case "uint": + return [ VALUE_TYPE.LEB128_UINT, op.value ] + case "int": + return [ VALUE_TYPE.LEB128_INT, op.value ] + case "float64": { + const buf64 = new ArrayBuffer(8), view64 = new DataView(buf64) + view64.setFloat64(0, op.value, true) + return [ VALUE_TYPE.IEEE754, new Uint8Array(buf64) ] + } + default: + // increment operators get resolved here ... + if (Number.isInteger(op.value) && op.value <= Number.MAX_SAFE_INTEGER && op.value >= Number.MIN_SAFE_INTEGER) { + return [ VALUE_TYPE.LEB128_INT, op.value ] + } else { + const buf64 = new ArrayBuffer(8), view64 = new DataView(buf64) + view64.setFloat64(0, op.value, true) + return [ VALUE_TYPE.IEEE754, new Uint8Array(buf64) ] + } } - columns.valLen.appendValue(numBytes << 4 | typeTag) } /** @@ -266,33 +269,23 @@ function encodeValue(op, columns) { } else if (ArrayBuffer.isView(op.value)) { const numBytes = columns.valRaw.appendRawBytes(new Uint8Array(op.value.buffer)) columns.valLen.appendValue(numBytes << 4 | VALUE_TYPE.BYTES) - } else if (op.datatype === 'counter' && typeof op.value === 'number') { - encodeInteger(op.value, VALUE_TYPE.COUNTER, columns) - } else if (op.datatype === 'timestamp' && typeof op.value === 'number') { - encodeInteger(op.value, VALUE_TYPE.TIMESTAMP, columns) + } else if (typeof op.value === 'number') { + let [typeTag, value] = getNumberTypeAndValue(op) + let numBytes + if (typeTag === VALUE_TYPE.LEB128_UINT) { + numBytes = columns.valRaw.appendUint53(value) + } else if (typeTag === VALUE_TYPE.IEEE754) { + numBytes = columns.valRaw.appendRawBytes(value) + } else { + numBytes = columns.valRaw.appendInt53(value) + } + columns.valLen.appendValue(numBytes << 4 | typeTag) } else if (typeof op.datatype === 'number' && op.datatype >= VALUE_TYPE.MIN_UNKNOWN && op.datatype <= VALUE_TYPE.MAX_UNKNOWN && op.value instanceof Uint8Array) { const numBytes = columns.valRaw.appendRawBytes(op.value) columns.valLen.appendValue(numBytes << 4 | op.datatype) } else if (op.datatype) { throw new RangeError(`Unknown datatype ${op.datatype} for value ${op.value}`) - } else if (typeof op.value === 'number') { - if (Number.isInteger(op.value) && op.value <= Number.MAX_SAFE_INTEGER && op.value >= Number.MIN_SAFE_INTEGER) { - encodeInteger(op.value, 0, columns) - } else { - // Encode number in 32-bit float if this can be done without loss of precision - const buf32 = new ArrayBuffer(4), view32 = new DataView(buf32) - view32.setFloat32(0, op.value, true) // true means little-endian - if (view32.getFloat32(0, true) === op.value) { - columns.valRaw.appendRawBytes(new Uint8Array(buf32)) - columns.valLen.appendValue(4 << 4 | VALUE_TYPE.IEEE754) - } else { - const buf64 = new ArrayBuffer(8), view64 = new DataView(buf64) - view64.setFloat64(0, op.value, true) // true means little-endian - columns.valRaw.appendRawBytes(new Uint8Array(buf64)) - columns.valLen.appendValue(8 << 4 | VALUE_TYPE.IEEE754) - } - } } else { throw new RangeError(`Unsupported value in operation: ${op.value}`) } @@ -304,7 +297,7 @@ function encodeValue(op, columns) { * form `{value: value, datatype: datatypeTag}` where `value` is a JavaScript primitive datatype * corresponding to the value, and `datatypeTag` is a datatype annotation such as 'counter'. */ -export function decodeValue(sizeTag, bytes) { +function decodeValue(sizeTag, bytes) { if (sizeTag === VALUE_TYPE.NULL) { return {value: null} } else if (sizeTag === VALUE_TYPE.FALSE) { @@ -315,15 +308,13 @@ export function decodeValue(sizeTag, bytes) { return {value: utf8ToString(bytes)} } else { if (sizeTag % 16 === VALUE_TYPE.LEB128_UINT) { - return {value: new Decoder(bytes).readUint53()} + return {value: new Decoder(bytes).readUint53(), datatype: "uint"} } else if (sizeTag % 16 === VALUE_TYPE.LEB128_INT) { - return {value: new Decoder(bytes).readInt53()} + return {value: new Decoder(bytes).readInt53(), datatype: "int"} } else if (sizeTag % 16 === VALUE_TYPE.IEEE754) { const view = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength) - if (bytes.byteLength === 4) { - return {value: view.getFloat32(0, true)} // true means little-endian - } else if (bytes.byteLength === 8) { - return {value: view.getFloat64(0, true)} + if (bytes.byteLength === 8) { + return {value: view.getFloat64(0, true), datatype: "float64"} } else { throw new RangeError(`Invalid length for floating point number: ${bytes.byteLength}`) } @@ -373,11 +364,11 @@ function decodeValueColumns(columns, colIndex, actorIds, result) { * Encodes an array of operations in a set of columns. The operations need to * be parsed with `parseAllOpIds()` beforehand. If `forDocument` is true, we use * the column structure of a whole document, otherwise we use the column - * structure for an individual change. Returns an array of `{id, name, encoder}` - * objects. + * structure for an individual change. Returns an array of + * `{columnId, columnName, encoder}` objects. */ function encodeOps(ops, forDocument) { - const columns : any = { + const columns = { objActor : new RLEEncoder('uint'), objCtr : new RLEEncoder('uint'), keyActor : new RLEEncoder('uint'), @@ -403,7 +394,7 @@ function encodeOps(ops, forDocument) { columns.predActor = new RLEEncoder('uint') } - for (const op of ops) { + for (let op of ops) { encodeObjectId(op, columns) encodeOperationKey(op, columns) columns.insert.appendValue(!!op.insert) @@ -437,22 +428,32 @@ function encodeOps(ops, forDocument) { } } - const columnList : any = [] - for (const {columnName, columnId} of forDocument ? DOC_OPS_COLUMNS : CHANGE_COLUMNS) { - if (columns[columnName]) columnList.push({id: columnId, name: columnName, encoder: columns[columnName]}) + let columnList = [] + for (let {columnName, columnId} of forDocument ? DOC_OPS_COLUMNS : CHANGE_COLUMNS) { + if (columns[columnName]) columnList.push({columnId, columnName, encoder: columns[columnName]}) + } + return columnList.sort((a, b) => a.columnId - b.columnId) +} + +function validDatatype(value, datatype) { + if (datatype === undefined) { + return (typeof value === 'string' || typeof value === 'boolean' || value === null) + } else { + return typeof value === 'number' } - return columnList.sort((a, b) => a.id - b.id) } function expandMultiOps(ops, startOp, actor) { let opNum = startOp - const expandedOps : any = [] + let expandedOps = [] for (const op of ops) { if (op.action === 'set' && op.values && op.insert) { if (op.pred.length !== 0) throw new RangeError('multi-insert pred must be empty') let lastElemId = op.elemId + const datatype = op.datatype for (const value of op.values) { - expandedOps.push({action: 'set', obj: op.obj, elemId: lastElemId, value, pred: [], insert: true}) + if (!validDatatype(value, datatype)) throw new RangeError(`Decode failed: bad value/datatype association (${value},${datatype})`) + expandedOps.push({action: 'set', obj: op.obj, elemId: lastElemId, datatype, value, pred: [], insert: true}) lastElemId = `${opNum}@${actor}` opNum += 1 } @@ -480,12 +481,12 @@ function expandMultiOps(ops, startOp, actor) { * individual change. */ function decodeOps(ops, forDocument) { - const newOps : any = [] - for (const op of ops) { + const newOps = [] + for (let op of ops) { const obj = (op.objCtr === null) ? '_root' : `${op.objCtr}@${op.objActor}` const elemId = op.keyStr ? undefined : (op.keyCtr === 0 ? '_head' : `${op.keyCtr}@${op.keyActor}`) const action = ACTIONS[op.action] || op.action - const newOp : any = elemId ? {obj, elemId, action} : {obj, key: op.keyStr, action} + const newOp = elemId ? {obj, elemId, action} : {obj, key: op.keyStr, action} newOp.insert = !!op.insert if (ACTIONS[op.action] === 'set' || ACTIONS[op.action] === 'inc') { newOp.value = op.valLen @@ -513,7 +514,7 @@ function decodeOps(ops, forDocument) { */ function checkSortedOpIds(opIds) { let last = null - for (const opId of opIds) { + for (let opId of opIds) { if (last && compareParsedOpIds(last, opId) !== -1) { throw new RangeError('operation IDs are not in ascending order') } @@ -521,7 +522,7 @@ function checkSortedOpIds(opIds) { } } -export function encoderByColumnId(columnId) { +function encoderByColumnId(columnId) { if ((columnId & 7) === COLUMN_TYPE.INT_DELTA) { return new DeltaEncoder() } else if ((columnId & 7) === COLUMN_TYPE.BOOLEAN) { @@ -535,7 +536,7 @@ export function encoderByColumnId(columnId) { } } -export function decoderByColumnId(columnId, buffer) { +function decoderByColumnId(columnId, buffer) { if ((columnId & 7) === COLUMN_TYPE.INT_DELTA) { return new DeltaDecoder(buffer) } else if ((columnId & 7) === COLUMN_TYPE.BOOLEAN) { @@ -549,10 +550,9 @@ export function decoderByColumnId(columnId, buffer) { } } -export function makeDecoders(columns, columnSpec) { +function makeDecoders(columns, columnSpec) { const emptyBuf = new Uint8Array(0) - const decoders : any = [] - let columnIndex = 0, specIndex = 0 + let decoders = [], columnIndex = 0, specIndex = 0 while (columnIndex < columns.length || specIndex < columnSpec.length) { if (columnIndex === columns.length || @@ -576,22 +576,20 @@ export function makeDecoders(columns, columnSpec) { function decodeColumns(columns, actorIds, columnSpec) { columns = makeDecoders(columns, columnSpec) - const parsedRows : any = [] + let parsedRows = [] while (columns.some(col => !col.decoder.done)) { - const row = {} - let col = 0 + let row = {}, col = 0 while (col < columns.length) { const columnId = columns[col].columnId - const groupId = columnId >> 4 - let groupCols = 1 + let groupId = columnId >> 4, groupCols = 1 while (col + groupCols < columns.length && columns[col + groupCols].columnId >> 4 === groupId) { groupCols++ } if (columnId % 8 === COLUMN_TYPE.GROUP_CARD) { - const values : any = [], count = columns[col].decoder.readValue() + const values = [], count = columns[col].decoder.readValue() for (let i = 0; i < count; i++) { - const value = {} + let value = {} for (let colOffset = 1; colOffset < groupCols; colOffset++) { decodeValueColumns(columns, col + colOffset, actorIds, value) } @@ -613,8 +611,7 @@ function decodeColumnInfo(decoder) { // deflate-compressed. We ignore this bit when checking whether columns are sorted by ID. const COLUMN_ID_MASK = (-1 ^ COLUMN_TYPE_DEFLATE) >>> 0 - let lastColumnId = -1 - const columns : any = [], numColumns = decoder.readUint53() + let lastColumnId = -1, columns = [], numColumns = decoder.readUint53() for (let i = 0; i < numColumns; i++) { const columnId = decoder.readUint53(), bufferLen = decoder.readUint53() if ((columnId & COLUMN_ID_MASK) <= (lastColumnId & COLUMN_ID_MASK)) { @@ -629,18 +626,18 @@ function decodeColumnInfo(decoder) { function encodeColumnInfo(encoder, columns) { const nonEmptyColumns = columns.filter(column => column.encoder.buffer.byteLength > 0) encoder.appendUint53(nonEmptyColumns.length) - for (const column of nonEmptyColumns) { - encoder.appendUint53(column.id) + for (let column of nonEmptyColumns) { + encoder.appendUint53(column.columnId) encoder.appendUint53(column.encoder.buffer.byteLength) } } function decodeChangeHeader(decoder) { - const numDeps = decoder.readUint53(), deps : any = [] + const numDeps = decoder.readUint53(), deps = [] for (let i = 0; i < numDeps; i++) { deps.push(bytesToHexString(decoder.readRawBytes(32))) } - const change : any = { + let change = { actor: decoder.readHexString(), seq: decoder.readUint53(), startOp: decoder.readUint53(), @@ -696,7 +693,7 @@ function decodeContainerHeader(decoder, computeHash) { const hashStartOffset = decoder.offset const chunkType = decoder.readByte() const chunkLength = decoder.readUint53() - const header : any = {chunkType, chunkLength, chunkData: decoder.readRawBytes(chunkLength)} + const header = {chunkType, chunkLength, chunkData: decoder.readRawBytes(chunkLength)} if (computeHash) { const sha256 = new Hash() @@ -710,25 +707,14 @@ function decodeContainerHeader(decoder, computeHash) { return header } -/** - * Returns the checksum of a change (bytes 4 to 7) as a 32-bit unsigned integer. - */ -export function getChangeChecksum(change) { - if (change[0] !== MAGIC_BYTES[0] || change[1] !== MAGIC_BYTES[1] || - change[2] !== MAGIC_BYTES[2] || change[3] !== MAGIC_BYTES[3]) { - throw new RangeError('Data does not begin with magic bytes 85 6f 4a 83') - } - return ((change[4] << 24) | (change[5] << 16) | (change[6] << 8) | change[7]) >>> 0 -} - -export function encodeChange(changeObj) { +function encodeChange(changeObj) { const { changes, actorIds } = parseAllOpIds([changeObj], true) - const change : any = changes[0] + const change = changes[0] const { hash, bytes } = encodeContainer(CHUNK_TYPE_CHANGE, encoder => { if (!Array.isArray(change.deps)) throw new TypeError('deps is not an array') encoder.appendUint53(change.deps.length) - for (const hash of change.deps.slice().sort()) { + for (let hash of change.deps.slice().sort()) { encoder.appendRawBytes(hexStringToBytes(hash)) } encoder.appendHexString(change.actor) @@ -737,11 +723,11 @@ export function encodeChange(changeObj) { encoder.appendInt53(change.time) encoder.appendPrefixedString(change.message || '') encoder.appendUint53(actorIds.length - 1) - for (const actor of actorIds.slice(1)) encoder.appendHexString(actor) + for (let actor of actorIds.slice(1)) encoder.appendHexString(actor) - const columns : any = encodeOps(change.ops, false) + const columns = encodeOps(change.ops, false) encodeColumnInfo(encoder, columns) - for (const column of columns) encoder.appendRawBytes(column.encoder.buffer) + for (let column of columns) encoder.appendRawBytes(column.encoder.buffer) if (change.extraBytes) encoder.appendRawBytes(change.extraBytes) }) @@ -752,16 +738,16 @@ export function encodeChange(changeObj) { return (bytes.byteLength >= DEFLATE_MIN_SIZE) ? deflateChange(bytes) : bytes } -export function decodeChangeColumns(buffer) { +function decodeChangeColumns(buffer) { if (buffer[8] === CHUNK_TYPE_DEFLATE) buffer = inflateChange(buffer) const decoder = new Decoder(buffer) - const header : any = decodeContainerHeader(decoder, true) + const header = decodeContainerHeader(decoder, true) const chunkDecoder = new Decoder(header.chunkData) if (!decoder.done) throw new RangeError('Encoded change has trailing data') if (header.chunkType !== CHUNK_TYPE_CHANGE) throw new RangeError(`Unexpected chunk type: ${header.chunkType}`) - const change : any = decodeChangeHeader(chunkDecoder) - const columns : any = decodeColumnInfo(chunkDecoder) + const change = decodeChangeHeader(chunkDecoder) + const columns = decodeColumnInfo(chunkDecoder) for (let i = 0; i < columns.length; i++) { if ((columns[i].columnId & COLUMN_TYPE_DEFLATE) !== 0) { throw new RangeError('change must not contain deflated columns') @@ -781,8 +767,8 @@ export function decodeChangeColumns(buffer) { /** * Decodes one change in binary format into its JS object representation. */ -export function decodeChange(buffer) { - const change : any = decodeChangeColumns(buffer) +function decodeChange(buffer) { + const change = decodeChangeColumns(buffer) change.ops = decodeOps(decodeColumns(change.columns, change.actorIds, CHANGE_COLUMNS), false) delete change.actorIds delete change.columns @@ -794,13 +780,13 @@ export function decodeChange(buffer) { * the operations. Saves work when we only need to inspect the headers. Only * computes the hash of the change if `computeHash` is true. */ -export function decodeChangeMeta(buffer, computeHash) : any { +function decodeChangeMeta(buffer, computeHash) { if (buffer[8] === CHUNK_TYPE_DEFLATE) buffer = inflateChange(buffer) - const header : any = decodeContainerHeader(new Decoder(buffer), computeHash) + const header = decodeContainerHeader(new Decoder(buffer), computeHash) if (header.chunkType !== CHUNK_TYPE_CHANGE) { throw new RangeError('Buffer chunk type is not a change') } - const meta : any = decodeChangeHeader(new Decoder(header.chunkData)) + const meta = decodeChangeHeader(new Decoder(header.chunkData)) meta.change = buffer if (computeHash) meta.hash = header.hash return meta @@ -840,9 +826,8 @@ function inflateChange(buffer) { * Takes an Uint8Array that may contain multiple concatenated changes, and * returns an array of subarrays, each subarray containing one change. */ -export function splitContainers(buffer) { - const decoder = new Decoder(buffer), chunks : any = [] - let startOffset = 0 +function splitContainers(buffer) { + let decoder = new Decoder(buffer), chunks = [], startOffset = 0 while (!decoder.done) { decodeContainerHeader(decoder, false) chunks.push(buffer.subarray(startOffset, decoder.offset)) @@ -855,10 +840,10 @@ export function splitContainers(buffer) { * Decodes a list of changes from the binary format into JS objects. * `binaryChanges` is an array of `Uint8Array` objects. */ -export function decodeChanges(binaryChanges) { - let decoded : any = [] - for (const binaryChange of binaryChanges) { - for (const chunk of splitContainers(binaryChange)) { +function decodeChanges(binaryChanges) { + let decoded = [] + for (let binaryChange of binaryChanges) { + for (let chunk of splitContainers(binaryChange)) { if (chunk[8] === CHUNK_TYPE_DOCUMENT) { decoded = decoded.concat(decodeDocument(chunk)) } else if (chunk[8] === CHUNK_TYPE_CHANGE || chunk[8] === CHUNK_TYPE_DEFLATE) { @@ -883,84 +868,14 @@ function sortOpIds(a, b) { return 0 } -function groupDocumentOps(changes) { - const byObjectId = {}, byReference = {}, objectType = {} - for (const change of changes) { - for (let i = 0; i < change.ops.length; i++) { - const op = change.ops[i], opId = `${op.id.counter}@${op.id.actorId}` - const objectId = (op.obj === '_root') ? '_root' : `${op.obj.counter}@${op.obj.actorId}` - if (op.action.startsWith('make')) { - objectType[opId] = op.action - if (op.action === 'makeList' || op.action === 'makeText') { - byReference[opId] = {'_head': []} - } - } - - let key - if (objectId === '_root' || objectType[objectId] === 'makeMap' || objectType[objectId] === 'makeTable') { - key = op.key - } else if (objectType[objectId] === 'makeList' || objectType[objectId] === 'makeText') { - if (op.insert) { - key = opId - const ref = (op.elemId === '_head') ? '_head' : `${op.elemId.counter}@${op.elemId.actorId}` - byReference[objectId][ref].push(opId) - byReference[objectId][opId] = [] - } else { - key = `${op.elemId.counter}@${op.elemId.actorId}` - } - } else { - throw new RangeError(`Unknown object type for object ${objectId}`) - } - - if (!byObjectId[objectId]) byObjectId[objectId] = {} - if (!byObjectId[objectId][key]) byObjectId[objectId][key] = {} - byObjectId[objectId][key][opId] = op - op.succ = [] - - for (const pred of op.pred) { - const predId = `${pred.counter}@${pred.actorId}` - if (!byObjectId[objectId][key][predId]) { - throw new RangeError(`No predecessor operation ${predId}`) - } - byObjectId[objectId][key][predId].succ.push(op.id) - } - } - } - - const ops : any[] = [] - for (const objectId of Object.keys(byObjectId).sort(sortOpIds)) { - let keys : string[] = [] - if (objectType[objectId] === 'makeList' || objectType[objectId] === 'makeText') { - const stack = ['_head'] - while (stack.length > 0) { - const key : any = stack.pop() - if (key !== '_head') keys.push(key) - for (const opId of byReference[objectId][key].sort(sortOpIds)) stack.push(opId) - } - } else { - // FIXME JavaScript sorts based on UTF-16 encoding. We should change this to use the UTF-8 - // encoding instead (the sort order will be different beyond the basic multilingual plane) - keys = Object.keys(byObjectId[objectId]).sort() - } - - for (const key of keys) { - for (const opId of Object.keys(byObjectId[objectId][key]).sort(sortOpIds)) { - const op : any = byObjectId[objectId][key][opId] - if (op.action !== 'del') ops.push(op) - } - } - } - return ops -} - /** * Takes a set of operations `ops` loaded from an encoded document, and * reconstructs the changes that they originally came from. * Does not return anything, only mutates `changes`. */ function groupChangeOps(changes, ops) { - const changesByActor = {} // map from actorId to array of changes by that actor - for (const change of changes) { + let changesByActor = {} // map from actorId to array of changes by that actor + for (let change of changes) { change.ops = [] if (!changesByActor[change.actor]) changesByActor[change.actor] = [] if (change.seq !== changesByActor[change.actor].length + 1) { @@ -972,12 +887,12 @@ function groupChangeOps(changes, ops) { changesByActor[change.actor].push(change) } - const opsById : { [key:string]: Op } = {} - for (const op of ops) { + let opsById = {} + for (let op of ops) { if (op.action === 'del') throw new RangeError('document should not contain del operations') op.pred = opsById[op.id] ? opsById[op.id].pred : [] opsById[op.id] = op - for (const succ of op.succ) { + for (let succ of op.succ) { if (!opsById[succ]) { if (op.elemId) { const elemId = op.insert ? op.id : op.elemId @@ -990,11 +905,11 @@ function groupChangeOps(changes, ops) { } delete op.succ } - for (const op of Object.values(opsById)) { + for (let op of Object.values(opsById)) { if (op.action === 'del') ops.push(op) } - for (const op of ops) { + for (let op of ops) { const { counter, actorId } = parseOpId(op.id) const actorChanges = changesByActor[actorId] // Binary search to find the change that should contain this operation @@ -1013,7 +928,7 @@ function groupChangeOps(changes, ops) { actorChanges[left].ops.push(op) } - for (const change of changes) { + for (let change of changes) { change.ops.sort((op1, op2) => sortOpIds(op1.id, op2.id)) change.startOp = change.maxOp - change.ops.length + 1 delete change.maxOp @@ -1027,63 +942,12 @@ function groupChangeOps(changes, ops) { } } -function encodeDocumentChanges(changes) { - const columns = { // see DOCUMENT_COLUMNS - actor : new RLEEncoder('uint'), - seq : new DeltaEncoder(), - maxOp : new DeltaEncoder(), - time : new DeltaEncoder(), - message : new RLEEncoder('utf8'), - depsNum : new RLEEncoder('uint'), - depsIndex : new DeltaEncoder(), - extraLen : new RLEEncoder('uint'), - extraRaw : new Encoder() - } - const indexByHash = {} // map from change hash to its index in the changes array - const heads = {} // change hashes that are not a dependency of any other change - - for (let i = 0; i < changes.length; i++) { - const change = changes[i] - indexByHash[change.hash] = i - heads[change.hash] = true - - columns.actor.appendValue(change.actorNum) - columns.seq.appendValue(change.seq) - columns.maxOp.appendValue(change.startOp + change.ops.length - 1) - columns.time.appendValue(change.time) - columns.message.appendValue(change.message) - columns.depsNum.appendValue(change.deps.length) - - for (const dep of change.deps) { - if (typeof indexByHash[dep] !== 'number') { - throw new RangeError(`Unknown dependency hash: ${dep}`) - } - columns.depsIndex.appendValue(indexByHash[dep]) - if (heads[dep]) delete heads[dep] - } - - if (change.extraBytes) { - columns.extraLen.appendValue(change.extraBytes.byteLength << 4 | VALUE_TYPE.BYTES) - columns.extraRaw.appendRawBytes(change.extraBytes) - } else { - columns.extraLen.appendValue(VALUE_TYPE.BYTES) // zero-length byte array - } - } - - const changesColumns : any = [] - for (const {columnName, columnId} of DOCUMENT_COLUMNS) { - changesColumns.push({id: columnId, name: columnName, encoder: columns[columnName]}) - } - changesColumns.sort((a, b) => a.id - b.id) - return { changesColumns, heads: Object.keys(heads).sort() } -} - function decodeDocumentChanges(changes, expectedHeads) { - const heads = {} // change hashes that are not a dependency of any other change + let heads = {} // change hashes that are not a dependency of any other change for (let i = 0; i < changes.length; i++) { - const change = changes[i] + let change = changes[i] change.deps = [] - for (const index of change.depsNum.map(d => d.depsIndex)) { + for (let index of change.depsNum.map(d => d.depsIndex)) { if (!changes[index] || !changes[index].hash) { throw new RangeError(`No hash for index ${index} while processing index ${i}`) } @@ -1116,52 +980,47 @@ function decodeDocumentChanges(changes, expectedHeads) { } } -/** - * Transforms a list of changes into a binary representation of the document state. - */ -export function encodeDocument(binaryChanges) { - const { changes, actorIds } = parseAllOpIds(decodeChanges(binaryChanges), false) - const { changesColumns, heads } = encodeDocumentChanges(changes) - const opsColumns = encodeOps(groupDocumentOps(changes), true) - for (const column of changesColumns) deflateColumn(column) - for (const column of opsColumns) deflateColumn(column) +function encodeDocumentHeader(doc) { + const { changesColumns, opsColumns, actorIds, heads, headsIndexes, extraBytes } = doc + for (let column of changesColumns) deflateColumn(column) + for (let column of opsColumns) deflateColumn(column) return encodeContainer(CHUNK_TYPE_DOCUMENT, encoder => { encoder.appendUint53(actorIds.length) - for (const actor of actorIds) { + for (let actor of actorIds) { encoder.appendHexString(actor) } encoder.appendUint53(heads.length) - for (const head of heads.sort()) { + for (let head of heads.sort()) { encoder.appendRawBytes(hexStringToBytes(head)) } encodeColumnInfo(encoder, changesColumns) encodeColumnInfo(encoder, opsColumns) - // @ts-ignore - for (const column of changesColumns) encoder.appendRawBytes(column.encoder.buffer) - // @ts-ignore - for (const column of opsColumns) encoder.appendRawBytes(column.encoder.buffer) + for (let column of changesColumns) encoder.appendRawBytes(column.encoder.buffer) + for (let column of opsColumns) encoder.appendRawBytes(column.encoder.buffer) + for (let index of headsIndexes) encoder.appendUint53(index) + if (extraBytes) encoder.appendRawBytes(extraBytes) }).bytes } -export function decodeDocumentHeader(buffer) { +function decodeDocumentHeader(buffer) { const documentDecoder = new Decoder(buffer) const header = decodeContainerHeader(documentDecoder, true) const decoder = new Decoder(header.chunkData) if (!documentDecoder.done) throw new RangeError('Encoded document has trailing data') if (header.chunkType !== CHUNK_TYPE_DOCUMENT) throw new RangeError(`Unexpected chunk type: ${header.chunkType}`) - const actorIds : string[] = [], numActors = decoder.readUint53() + const actorIds = [], numActors = decoder.readUint53() for (let i = 0; i < numActors; i++) { actorIds.push(decoder.readHexString()) } - const heads : string[] = [], numHeads = decoder.readUint53() + const heads = [], headsIndexes = [], numHeads = decoder.readUint53() for (let i = 0; i < numHeads; i++) { heads.push(bytesToHexString(decoder.readRawBytes(32))) } - const changesColumns : any = decodeColumnInfo(decoder) - const opsColumns : any = decodeColumnInfo(decoder) + const changesColumns = decodeColumnInfo(decoder) + const opsColumns = decodeColumnInfo(decoder) for (let i = 0; i < changesColumns.length; i++) { changesColumns[i].buffer = decoder.readRawBytes(changesColumns[i].bufferLen) inflateColumn(changesColumns[i]) @@ -1170,12 +1029,15 @@ export function decodeDocumentHeader(buffer) { opsColumns[i].buffer = decoder.readRawBytes(opsColumns[i].bufferLen) inflateColumn(opsColumns[i]) } + if (!decoder.done) { + for (let i = 0; i < numHeads; i++) headsIndexes.push(decoder.readUint53()) + } const extraBytes = decoder.readRawBytes(decoder.buf.byteLength - decoder.offset) - return { changesColumns, opsColumns, actorIds, heads, extraBytes } + return { changesColumns, opsColumns, actorIds, heads, headsIndexes, extraBytes } } -export function decodeDocument(buffer) { +function decodeDocument(buffer) { const { changesColumns, opsColumns, actorIds, heads } = decodeDocumentHeader(buffer) const changes = decodeColumns(changesColumns, actorIds, DOCUMENT_COLUMNS) const ops = decodeOps(decodeColumns(opsColumns, actorIds, DOC_OPS_COLUMNS), true) @@ -1190,7 +1052,7 @@ export function decodeDocument(buffer) { function deflateColumn(column) { if (column.encoder.buffer.byteLength >= DEFLATE_MIN_SIZE) { column.encoder = {buffer: pako.deflateRaw(column.encoder.buffer)} - column.id |= COLUMN_TYPE_DEFLATE + column.columnId |= COLUMN_TYPE_DEFLATE } } @@ -1204,230 +1066,9 @@ function inflateColumn(column) { } } -/** - * Takes all the operations for the same property (i.e. the same key in a map, or the same list - * element) and mutates the object patch to reflect the current value(s) of that property. There - * might be multiple values in the case of a conflict. `objects` is a map from objectId to the - * patch for that object. `property` contains `objId`, `key`, a list of `ops`, and `index` (the - * current list index if the object is a list). Returns true if one or more values are present, - * or false if the property has been deleted. - */ -function addPatchProperty(objects, property) { - const values : any = {} - let counter : any = null - for (const op of property.ops) { - // Apply counters and their increments regardless of the number of successor operations - if (op.actionName === 'set' && op.value.datatype === 'counter') { - if (!counter) counter = {opId: op.opId, value: 0, succ: {}} - counter.value += op.value.value - for (const succId of op.succ) counter.succ[succId] = true - } else if (op.actionName === 'inc') { - if (!counter) throw new RangeError(`inc operation ${op.opId} without a counter`) - counter.value += op.value.value - delete counter.succ[op.opId] - for (const succId of op.succ) counter.succ[succId] = true - - } else if (op.succ.length === 0) { // Ignore any ops that have been overwritten - if (op.actionName.startsWith('make')) { - values[op.opId] = objects[op.opId] - } else if (op.actionName === 'set') { - values[op.opId] = {value: op.value.value, type: 'value'} - if (op.value.datatype) { - values[op.opId].datatype = op.value.datatype - } - } else if (op.actionName === 'link') { - // NB. This assumes that the ID of the child object is greater than the ID of the current - // object. This is true as long as link operations are only used to redo undone make* - // operations, but it will cease to be true once subtree moves are allowed. - if (!op.childId) throw new RangeError(`link operation ${op.opId} without a childId`) - values[op.opId] = objects[op.childId] - } else { - throw new RangeError(`Unexpected action type: ${op.actionName}`) - } - } - } - - // If the counter had any successor operation that was not an increment, that means the counter - // must have been deleted, so we omit it from the patch. - if (counter && Object.keys(counter.succ).length === 0) { - values[counter.opId] = {type: 'value', value: counter.value, datatype: 'counter'} - } - - if (Object.keys(values).length > 0) { - const obj = objects[property.objId] - if (obj.type === 'map' || obj.type === 'table') { - obj.props[property.key] = values - } else if (obj.type === 'list' || obj.type === 'text') { - makeListEdits(obj, values, property.key, property.index) - } - return true - } else { - return false - } -} - -/** - * When constructing a patch to instantiate a loaded document, this function adds the edits to - * insert one list element. Usually there is one value, but in the case of a conflict there may be - * several values. `elemId` is the ID of the list element, and `index` is the list index at which - * the value(s) should be placed. - */ -function makeListEdits(list, values, elemId, index) { - let firstValue = true - const opIds = Object.keys(values).sort((id1, id2) => compareParsedOpIds(parseOpId(id1), parseOpId(id2))) - for (const opId of opIds) { - if (firstValue) { - list.edits.push({action: 'insert', value: values[opId], elemId, opId, index}) - } else { - list.edits.push({action: 'update', value: values[opId], opId, index}) - } - firstValue = false - } -} - -/** - * Recursively walks the patch tree, calling appendEdit on every list edit in order to consense - * consecutive sequences of insertions into multi-inserts. - */ -function condenseEdits(diff) { - if (diff.type === 'list' || diff.type === 'text') { - diff.edits.forEach(e => condenseEdits(e.value)) - const newEdits = diff.edits - diff.edits = [] - for (const edit of newEdits) appendEdit(diff.edits, edit) - } else if (diff.type === 'map' || diff.type === 'table') { - for (const prop of Object.keys(diff.props)) { - for (const opId of Object.keys(diff.props[prop])) { - condenseEdits(diff.props[prop][opId]) - } - } - } -} - -/** - * Appends a list edit operation (insert, update, remove) to an array of existing operations. If the - * last existing operation can be extended (as a multi-op), we do that. - */ -export function appendEdit(existingEdits, nextEdit) { - if (existingEdits.length === 0) { - existingEdits.push(nextEdit) - return - } - - const lastEdit = existingEdits[existingEdits.length - 1] - if (lastEdit.action === 'insert' && nextEdit.action === 'insert' && - lastEdit.index === nextEdit.index - 1 && - lastEdit.value.type === 'value' && nextEdit.value.type === 'value' && - lastEdit.elemId === lastEdit.opId && nextEdit.elemId === nextEdit.opId && - opIdDelta(lastEdit.elemId, nextEdit.elemId, 1)) { - lastEdit.action = 'multi-insert' - lastEdit.values = [lastEdit.value.value, nextEdit.value.value] - delete lastEdit.value - delete lastEdit.opId - - } else if (lastEdit.action === 'multi-insert' && nextEdit.action === 'insert' && - lastEdit.index + lastEdit.values.length === nextEdit.index && - nextEdit.value.type === 'value' && nextEdit.elemId === nextEdit.opId && - opIdDelta(lastEdit.elemId, nextEdit.elemId, lastEdit.values.length)) { - lastEdit.values.push(nextEdit.value.value) - - } else if (lastEdit.action === 'remove' && nextEdit.action === 'remove' && - lastEdit.index === nextEdit.index) { - lastEdit.count += nextEdit.count - - } else { - existingEdits.push(nextEdit) - } -} - -/** - * Returns true if the two given operation IDs have the same actor ID, and the counter of `id2` is - * exactly `delta` greater than the counter of `id1`. - */ -function opIdDelta(id1, id2, delta = 1) { - const parsed1 = parseOpId(id1), parsed2 = parseOpId(id2) - return parsed1.actorId === parsed2.actorId && parsed1.counter + delta === parsed2.counter -} - -/** - * Parses the document (in compressed binary format) given as `documentBuffer` - * and returns a patch that can be sent to the frontend to instantiate the - * current state of that document. - */ -export function constructPatch(documentBuffer) { - const { opsColumns, actorIds } = decodeDocumentHeader(documentBuffer) - const col : any = makeDecoders(opsColumns, DOC_OPS_COLUMNS).reduce( - (acc, col: any) => Object.assign(acc, {[col.columnName]: col.decoder}), {}) - - const objects = {_root: {objectId: '_root', type: 'map', props: {}}} - let property : any = null - - while (!col.idActor.done) { - const opId = `${col.idCtr.readValue()}@${actorIds[col.idActor.readValue()]}` - const action = col.action.readValue(), actionName = ACTIONS[action] - if (action % 2 === 0) { // even-numbered actions are object creation - const type = OBJECT_TYPE[actionName] || 'unknown' - if (type === 'list' || type === 'text') { - objects[opId] = {objectId: opId, type, edits: []} - } else { - objects[opId] = {objectId: opId, type, props: {}} - } - } - - const objActor = col.objActor.readValue(), objCtr = col.objCtr.readValue() - const objId = objActor === null ? '_root' : `${objCtr}@${actorIds[objActor]}` - const obj = objects[objId] - if (!obj) throw new RangeError(`Operation for nonexistent object: ${objId}`) - - const keyActor = col.keyActor.readValue(), keyCtr = col.keyCtr.readValue() - const keyStr = col.keyStr.readValue(), insert = !!col.insert.readValue() - const chldActor = col.chldActor.readValue(), chldCtr = col.chldCtr.readValue() - const childId = chldActor === null ? null : `${chldCtr}@${actorIds[chldActor]}` - const sizeTag = col.valLen.readValue() - const rawValue = col.valRaw.readRawBytes(sizeTag >> 4) - const value = decodeValue(sizeTag, rawValue) - const succNum = col.succNum.readValue() - const succ : string[] = [] - for (let i = 0; i < succNum; i++) { - succ.push(`${col.succCtr.readValue()}@${actorIds[col.succActor.readValue()]}`) - } - - if (!actionName || obj.type === 'unknown') continue - - let key - if (obj.type === 'list' || obj.type === 'text') { - if (keyCtr === null || (keyCtr === 0 && !insert)) { - throw new RangeError(`Operation ${opId} on ${obj.type} object has no key`) - } - key = insert ? opId : `${keyCtr}@${actorIds[keyActor]}` - } else { - if (keyStr === null) { - throw new RangeError(`Operation ${opId} on ${obj.type} object has no key`) - } - key = keyStr - } - - if (!property || property.objId !== objId || property.key !== key) { - let index = 0 - if (property) { - index = property.index - if (addPatchProperty(objects, property)) index += 1 - if (property.objId !== objId) index = 0 - } - property = {objId, key, index, ops: []} - } - property.ops.push({opId, actionName, value, childId, succ}) - } - - if (property) addPatchProperty(objects, property) - condenseEdits(objects._root) - return objects._root -} - module.exports = { - COLUMN_TYPE, VALUE_TYPE, ACTIONS, OBJECT_TYPE, DOC_OPS_COLUMNS, CHANGE_COLUMNS, + COLUMN_TYPE, VALUE_TYPE, ACTIONS, OBJECT_TYPE, DOC_OPS_COLUMNS, CHANGE_COLUMNS, DOCUMENT_COLUMNS, encoderByColumnId, decoderByColumnId, makeDecoders, decodeValue, splitContainers, encodeChange, decodeChangeColumns, decodeChange, decodeChangeMeta, decodeChanges, - decodeDocumentHeader, encodeDocument, decodeDocument, - getChangeChecksum, appendEdit, constructPatch + encodeDocumentHeader, decodeDocumentHeader, decodeDocument } diff --git a/automerge-js/src/common.ts b/automerge-js/test/legacy/common.js similarity index 66% rename from automerge-js/src/common.ts rename to automerge-js/test/legacy/common.js index 9b5a7299..02e91392 100644 --- a/automerge-js/src/common.ts +++ b/automerge-js/test/legacy/common.js @@ -1,6 +1,4 @@ -import { UnknownObject } from './types'; - -export function isObject(obj: unknown) : obj is UnknownObject { +function isObject(obj) { return typeof obj === 'object' && obj !== null } @@ -8,28 +6,20 @@ export function isObject(obj: unknown) : obj is UnknownObject { * Returns a shallow copy of the object `obj`. Faster than `Object.assign({}, obj)`. * https://jsperf.com/cloning-large-objects/1 */ -/* -export function copyObject(obj: T) : T { - if (!isObject(obj)) throw RangeError(`Cannot copy object '${obj}'`) //return {} - const copy : UnknownObject = {} - for (const key of Object.keys(obj)) { +function copyObject(obj) { + if (!isObject(obj)) return {} + let copy = {} + for (let key of Object.keys(obj)) { copy[key] = obj[key] } return copy } -*/ /** * Takes a string in the form that is used to identify operations (a counter concatenated * with an actor ID, separated by an `@` sign) and returns an object `{counter, actorId}`. */ - -interface OpIdObj { - counter: number, - actorId: string -} - -export function parseOpId(opId: string) : OpIdObj { +function parseOpId(opId) { const match = /^(\d+)@(.*)$/.exec(opId || '') if (!match) { throw new RangeError(`Not a valid opId: ${opId}`) @@ -40,7 +30,7 @@ export function parseOpId(opId: string) : OpIdObj { /** * Returns true if the two byte arrays contain the same data, false if not. */ -export function equalBytes(array1: Uint8Array, array2: Uint8Array) : boolean { +function equalBytes(array1, array2) { if (!(array1 instanceof Uint8Array) || !(array2 instanceof Uint8Array)) { throw new TypeError('equalBytes can only compare Uint8Arrays') } @@ -51,3 +41,15 @@ export function equalBytes(array1: Uint8Array, array2: Uint8Array) : boolean { return true } +/** + * Creates an array containing the value `null` repeated `length` times. + */ +function createArrayOfNulls(length) { + const array = new Array(length) + for (let i = 0; i < length; i++) array[i] = null + return array +} + +module.exports = { + isObject, copyObject, parseOpId, equalBytes, createArrayOfNulls +} diff --git a/automerge-js/src/encoding.ts b/automerge-js/test/legacy/encoding.js similarity index 96% rename from automerge-js/src/encoding.ts rename to automerge-js/test/legacy/encoding.js index dac447ec..92b62df6 100644 --- a/automerge-js/src/encoding.ts +++ b/automerge-js/test/legacy/encoding.js @@ -8,29 +8,28 @@ const utf8encoder = new TextEncoder() const utf8decoder = new TextDecoder('utf-8') -export function stringToUtf8(s: string) : BufferSource { - return utf8encoder.encode(s) +function stringToUtf8(string) { + return utf8encoder.encode(string) } -export function utf8ToString(buffer: BufferSource) : string { +function utf8ToString(buffer) { return utf8decoder.decode(buffer) } /** * Converts a string consisting of hexadecimal digits into an Uint8Array. */ -export function hexStringToBytes(value: string) : Uint8Array { +function hexStringToBytes(value) { if (typeof value !== 'string') { throw new TypeError('value is not a string') } if (!/^([0-9a-f][0-9a-f])*$/.test(value)) { throw new RangeError('value is not hexadecimal') } - const match = value.match(/../g) - if (match === null) { + if (value === '') { return new Uint8Array(0) } else { - return new Uint8Array(match.map(b => parseInt(b, 16))) + return new Uint8Array(value.match(/../g).map(b => parseInt(b, 16))) } } @@ -43,9 +42,8 @@ for (let i = 0; i < 256; i++) { /** * Converts a Uint8Array into the equivalent hexadecimal string. */ -export function bytesToHexString(bytes: Uint8Array) : string { - let hex = '' - const len = bytes.byteLength +function bytesToHexString(bytes) { + let hex = '', len = bytes.byteLength for (let i = 0; i < len; i++) { hex += BYTE_TO_HEX[bytes[i]] } @@ -56,10 +54,7 @@ export function bytesToHexString(bytes: Uint8Array) : string { * Wrapper around an Uint8Array that allows values to be appended to the buffer, * and that automatically grows the buffer when space runs out. */ -export class Encoder { - buf: Uint8Array; - offset: number; - +class Encoder { constructor() { this.buf = new Uint8Array(16) this.offset = 0 @@ -287,7 +282,6 @@ export class Encoder { * the buffer constructed by this Encoder. */ finish() { - return } } @@ -296,10 +290,7 @@ export class Encoder { * the current decoding position, and allows values to be incrementally read by * decoding the bytes at the current position. */ -export class Decoder { - buf: Uint8Array; - offset: number; - +class Decoder { constructor(buffer) { if (!(buffer instanceof Uint8Array)) { throw new TypeError(`Not a byte array: ${buffer}`) @@ -564,13 +555,7 @@ export class Decoder { * After one of these three has completed, the process repeats, starting again * with a repetition count, until we reach the end of the buffer. */ -export class RLEEncoder extends Encoder { - type: any - state: string - lastValue: any - count: number - literal: any - +class RLEEncoder extends Encoder { constructor(type) { super() this.type = type @@ -679,7 +664,7 @@ export class RLEEncoder extends Encoder { * Returns an object of the form `{nonNullValues, sum}` where `nonNullValues` is the number of * non-null values copied, and `sum` is the sum (only if the `sumValues` option is set). */ - copyFrom(decoder, options: any = {}) : any { + copyFrom(decoder, options = {}) { const { count, sumValues, sumShift } = options if (!(decoder instanceof RLEDecoder) || (decoder.type !== this.type)) { throw new TypeError('incompatible type of decoder') @@ -722,7 +707,7 @@ export class RLEEncoder extends Encoder { nonNullValues += numValues for (let i = 0; i < numValues; i++) { if (decoder.done) throw new RangeError('incomplete literal') - const value : any = decoder.readRawValue() + const value = decoder.readRawValue() if (value === decoder.lastValue) throw new RangeError('Repetition of values is not allowed in literal') decoder.lastValue = value this._appendValue(value) @@ -763,7 +748,7 @@ export class RLEEncoder extends Encoder { this.appendRawValue(this.lastValue) } else if (this.state === 'literal') { this.appendInt53(-this.literal.length) - for (const v of this.literal) this.appendRawValue(v) + for (let v of this.literal) this.appendRawValue(v) } else if (this.state === 'nulls') { this.appendInt32(0) this.appendUint53(this.count) @@ -801,12 +786,7 @@ export class RLEEncoder extends Encoder { * Counterpart to RLEEncoder: reads values from an RLE-compressed sequence, * returning nulls and repeated values as required. */ -export class RLEDecoder extends Decoder { - type: any; - lastValue: any; - count: number; - state: any; - +class RLEDecoder extends Decoder { constructor(type, buffer) { super(buffer) this.type = type @@ -949,9 +929,7 @@ export class RLEDecoder extends Decoder { * * Null values are also allowed, as with RLEEncoder. */ -export class DeltaEncoder extends RLEEncoder { - absoluteValue: number - +class DeltaEncoder extends RLEEncoder { constructor() { super('int') this.absoluteValue = 0 @@ -977,7 +955,7 @@ export class DeltaEncoder extends RLEEncoder { * contain the key `count`, indicating the number of values to copy. If not specified, copies * all remaining values in the decoder. */ - copyFrom(decoder, options: any = {}) : any { + copyFrom(decoder, options = {}) { if (options.sumValues) { throw new RangeError('unsupported options for DeltaEncoder.copyFrom()') } @@ -991,8 +969,7 @@ export class DeltaEncoder extends RLEEncoder { // Copy any null values, and the first non-null value, so that appendValue() computes the // difference between the encoder's last value and the decoder's first (absolute) value. - const value = decoder.readValue() - let nulls = 0 + let value = decoder.readValue(), nulls = 0 this.appendValue(value) if (value === null) { nulls = decoder.count + 1 @@ -1024,9 +1001,7 @@ export class DeltaEncoder extends RLEEncoder { * Counterpart to DeltaEncoder: reads values from a delta-compressed sequence of * numbers (may include null values). */ -export class DeltaDecoder extends RLEDecoder { - absoluteValue : number; - +class DeltaDecoder extends RLEDecoder { constructor(buffer) { super('int', buffer) this.absoluteValue = 0 @@ -1083,10 +1058,7 @@ export class DeltaDecoder extends RLEDecoder { * only encode the repetition count but not the actual value, since the values * just alternate between false and true (starting with false). */ -export class BooleanEncoder extends Encoder { - lastValue: boolean; - count: number; - +class BooleanEncoder extends Encoder { constructor() { super() this.lastValue = false @@ -1116,7 +1088,7 @@ export class BooleanEncoder extends Encoder { * contain the key `count`, indicating the number of values to copy. If not specified, copies * all remaining values in the decoder. */ - copyFrom(decoder, options: any = {}) : any { + copyFrom(decoder, options = {}) { if (!(decoder instanceof BooleanDecoder)) { throw new TypeError('incompatible type of decoder') } @@ -1166,11 +1138,7 @@ export class BooleanEncoder extends Encoder { * Counterpart to BooleanEncoder: reads boolean values from a runlength-encoded * sequence. */ -export class BooleanDecoder extends Decoder { - lastValue: boolean; - firstRun: boolean; - count: number; - +class BooleanDecoder extends Decoder { constructor(buffer) { super(buffer) this.lastValue = true // is negated the first time we read a count @@ -1235,3 +1203,7 @@ export class BooleanDecoder extends Decoder { } } +module.exports = { + stringToUtf8, utf8ToString, hexStringToBytes, bytesToHexString, + Encoder, Decoder, RLEEncoder, RLEDecoder, DeltaEncoder, DeltaDecoder, BooleanEncoder, BooleanDecoder +} diff --git a/automerge-js/test/legacy/sync.js b/automerge-js/test/legacy/sync.js new file mode 100644 index 00000000..3bb1571d --- /dev/null +++ b/automerge-js/test/legacy/sync.js @@ -0,0 +1,480 @@ +/** + * Implementation of the data synchronisation protocol that brings a local and a remote document + * into the same state. This is typically used when two nodes have been disconnected for some time, + * and need to exchange any changes that happened while they were disconnected. The two nodes that + * are syncing could be client and server, or server and client, or two peers with symmetric roles. + * + * The protocol is based on this paper: Martin Kleppmann and Heidi Howard. Byzantine Eventual + * Consistency and the Fundamental Limits of Peer-to-Peer Databases. https://arxiv.org/abs/2012.00472 + * + * The protocol assumes that every time a node successfully syncs with another node, it remembers + * the current heads (as returned by `Backend.getHeads()`) after the last sync with that node. The + * next time we try to sync with the same node, we start from the assumption that the other node's + * document version is no older than the outcome of the last sync, so we only need to exchange any + * changes that are more recent than the last sync. This assumption may not be true if the other + * node did not correctly persist its state (perhaps it crashed before writing the result of the + * last sync to disk), and we fall back to sending the entire document in this case. + */ + +const Backend = null //require('./backend') +const { hexStringToBytes, bytesToHexString, Encoder, Decoder } = require('./encoding') +const { decodeChangeMeta } = require('./columnar') +const { copyObject } = require('./common') + +const HASH_SIZE = 32 // 256 bits = 32 bytes +const MESSAGE_TYPE_SYNC = 0x42 // first byte of a sync message, for identification +const PEER_STATE_TYPE = 0x43 // first byte of an encoded peer state, for identification + +// These constants correspond to a 1% false positive rate. The values can be changed without +// breaking compatibility of the network protocol, since the parameters used for a particular +// Bloom filter are encoded in the wire format. +const BITS_PER_ENTRY = 10, NUM_PROBES = 7 + +/** + * A Bloom filter implementation that can be serialised to a byte array for transmission + * over a network. The entries that are added are assumed to already be SHA-256 hashes, + * so this implementation does not perform its own hashing. + */ +class BloomFilter { + constructor (arg) { + if (Array.isArray(arg)) { + // arg is an array of SHA256 hashes in hexadecimal encoding + this.numEntries = arg.length + this.numBitsPerEntry = BITS_PER_ENTRY + this.numProbes = NUM_PROBES + this.bits = new Uint8Array(Math.ceil(this.numEntries * this.numBitsPerEntry / 8)) + for (let hash of arg) this.addHash(hash) + } else if (arg instanceof Uint8Array) { + if (arg.byteLength === 0) { + this.numEntries = 0 + this.numBitsPerEntry = 0 + this.numProbes = 0 + this.bits = arg + } else { + const decoder = new Decoder(arg) + this.numEntries = decoder.readUint32() + this.numBitsPerEntry = decoder.readUint32() + this.numProbes = decoder.readUint32() + this.bits = decoder.readRawBytes(Math.ceil(this.numEntries * this.numBitsPerEntry / 8)) + } + } else { + throw new TypeError('invalid argument') + } + } + + /** + * Returns the Bloom filter state, encoded as a byte array. + */ + get bytes() { + if (this.numEntries === 0) return new Uint8Array(0) + const encoder = new Encoder() + encoder.appendUint32(this.numEntries) + encoder.appendUint32(this.numBitsPerEntry) + encoder.appendUint32(this.numProbes) + encoder.appendRawBytes(this.bits) + return encoder.buffer + } + + /** + * Given a SHA-256 hash (as hex string), returns an array of probe indexes indicating which bits + * in the Bloom filter need to be tested or set for this particular entry. We do this by + * interpreting the first 12 bytes of the hash as three little-endian 32-bit unsigned integers, + * and then using triple hashing to compute the probe indexes. The algorithm comes from: + * + * Peter C. Dillinger and Panagiotis Manolios. Bloom Filters in Probabilistic Verification. + * 5th International Conference on Formal Methods in Computer-Aided Design (FMCAD), November 2004. + * http://www.ccis.northeastern.edu/home/pete/pub/bloom-filters-verification.pdf + */ + getProbes(hash) { + const hashBytes = hexStringToBytes(hash), modulo = 8 * this.bits.byteLength + if (hashBytes.byteLength !== 32) throw new RangeError(`Not a 256-bit hash: ${hash}`) + // on the next three lines, the right shift means interpret value as unsigned + let x = ((hashBytes[0] | hashBytes[1] << 8 | hashBytes[2] << 16 | hashBytes[3] << 24) >>> 0) % modulo + let y = ((hashBytes[4] | hashBytes[5] << 8 | hashBytes[6] << 16 | hashBytes[7] << 24) >>> 0) % modulo + let z = ((hashBytes[8] | hashBytes[9] << 8 | hashBytes[10] << 16 | hashBytes[11] << 24) >>> 0) % modulo + const probes = [x] + for (let i = 1; i < this.numProbes; i++) { + x = (x + y) % modulo + y = (y + z) % modulo + probes.push(x) + } + return probes + } + + /** + * Sets the Bloom filter bits corresponding to a given SHA-256 hash (given as hex string). + */ + addHash(hash) { + for (let probe of this.getProbes(hash)) { + this.bits[probe >>> 3] |= 1 << (probe & 7) + } + } + + /** + * Tests whether a given SHA-256 hash (given as hex string) is contained in the Bloom filter. + */ + containsHash(hash) { + if (this.numEntries === 0) return false + for (let probe of this.getProbes(hash)) { + if ((this.bits[probe >>> 3] & (1 << (probe & 7))) === 0) { + return false + } + } + return true + } +} + +/** + * Encodes a sorted array of SHA-256 hashes (as hexadecimal strings) into a byte array. + */ +function encodeHashes(encoder, hashes) { + if (!Array.isArray(hashes)) throw new TypeError('hashes must be an array') + encoder.appendUint32(hashes.length) + for (let i = 0; i < hashes.length; i++) { + if (i > 0 && hashes[i - 1] >= hashes[i]) throw new RangeError('hashes must be sorted') + const bytes = hexStringToBytes(hashes[i]) + if (bytes.byteLength !== HASH_SIZE) throw new TypeError('heads hashes must be 256 bits') + encoder.appendRawBytes(bytes) + } +} + +/** + * Decodes a byte array in the format returned by encodeHashes(), and returns its content as an + * array of hex strings. + */ +function decodeHashes(decoder) { + let length = decoder.readUint32(), hashes = [] + for (let i = 0; i < length; i++) { + hashes.push(bytesToHexString(decoder.readRawBytes(HASH_SIZE))) + } + return hashes +} + +/** + * Takes a sync message of the form `{heads, need, have, changes}` and encodes it as a byte array for + * transmission. + */ +function encodeSyncMessage(message) { + const encoder = new Encoder() + encoder.appendByte(MESSAGE_TYPE_SYNC) + encodeHashes(encoder, message.heads) + encodeHashes(encoder, message.need) + encoder.appendUint32(message.have.length) + for (let have of message.have) { + encodeHashes(encoder, have.lastSync) + encoder.appendPrefixedBytes(have.bloom) + } + encoder.appendUint32(message.changes.length) + for (let change of message.changes) { + encoder.appendPrefixedBytes(change) + } + return encoder.buffer +} + +/** + * Takes a binary-encoded sync message and decodes it into the form `{heads, need, have, changes}`. + */ +function decodeSyncMessage(bytes) { + const decoder = new Decoder(bytes) + const messageType = decoder.readByte() + if (messageType !== MESSAGE_TYPE_SYNC) { + throw new RangeError(`Unexpected message type: ${messageType}`) + } + const heads = decodeHashes(decoder) + const need = decodeHashes(decoder) + const haveCount = decoder.readUint32() + let message = {heads, need, have: [], changes: []} + for (let i = 0; i < haveCount; i++) { + const lastSync = decodeHashes(decoder) + const bloom = decoder.readPrefixedBytes(decoder) + message.have.push({lastSync, bloom}) + } + const changeCount = decoder.readUint32() + for (let i = 0; i < changeCount; i++) { + const change = decoder.readPrefixedBytes() + message.changes.push(change) + } + // Ignore any trailing bytes -- they can be used for extensions by future versions of the protocol + return message +} + +/** + * Takes a SyncState and encodes as a byte array those parts of the state that should persist across + * an application restart or disconnect and reconnect. The ephemeral parts of the state that should + * be cleared on reconnect are not encoded. + */ +function encodeSyncState(syncState) { + const encoder = new Encoder() + encoder.appendByte(PEER_STATE_TYPE) + encodeHashes(encoder, syncState.sharedHeads) + return encoder.buffer +} + +/** + * Takes a persisted peer state as encoded by `encodeSyncState` and decodes it into a SyncState + * object. The parts of the peer state that were not encoded are initialised with default values. + */ +function decodeSyncState(bytes) { + const decoder = new Decoder(bytes) + const recordType = decoder.readByte() + if (recordType !== PEER_STATE_TYPE) { + throw new RangeError(`Unexpected record type: ${recordType}`) + } + const sharedHeads = decodeHashes(decoder) + return Object.assign(initSyncState(), { sharedHeads }) +} + +/** + * Constructs a Bloom filter containing all changes that are not one of the hashes in + * `lastSync` or its transitive dependencies. In other words, the filter contains those + * changes that have been applied since the version identified by `lastSync`. Returns + * an object of the form `{lastSync, bloom}` as required for the `have` field of a sync + * message. + */ +function makeBloomFilter(backend, lastSync) { + const newChanges = Backend.getChanges(backend, lastSync) + const hashes = newChanges.map(change => decodeChangeMeta(change, true).hash) + return {lastSync, bloom: new BloomFilter(hashes).bytes} +} + +/** + * Call this function when a sync message is received from another node. The `message` argument + * needs to already have been decoded using `decodeSyncMessage()`. This function determines the + * changes that we need to send to the other node in response. Returns an array of changes (as + * byte arrays). + */ +function getChangesToSend(backend, have, need) { + if (have.length === 0) { + return need.map(hash => Backend.getChangeByHash(backend, hash)).filter(change => change !== undefined) + } + + let lastSyncHashes = {}, bloomFilters = [] + for (let h of have) { + for (let hash of h.lastSync) lastSyncHashes[hash] = true + bloomFilters.push(new BloomFilter(h.bloom)) + } + + // Get all changes that were added since the last sync + const changes = Backend.getChanges(backend, Object.keys(lastSyncHashes)) + .map(change => decodeChangeMeta(change, true)) + + let changeHashes = {}, dependents = {}, hashesToSend = {} + for (let change of changes) { + changeHashes[change.hash] = true + + // For each change, make a list of changes that depend on it + for (let dep of change.deps) { + if (!dependents[dep]) dependents[dep] = [] + dependents[dep].push(change.hash) + } + + // Exclude any change hashes contained in one or more Bloom filters + if (bloomFilters.every(bloom => !bloom.containsHash(change.hash))) { + hashesToSend[change.hash] = true + } + } + + // Include any changes that depend on a Bloom-negative change + let stack = Object.keys(hashesToSend) + while (stack.length > 0) { + const hash = stack.pop() + if (dependents[hash]) { + for (let dep of dependents[hash]) { + if (!hashesToSend[dep]) { + hashesToSend[dep] = true + stack.push(dep) + } + } + } + } + + // Include any explicitly requested changes + let changesToSend = [] + for (let hash of need) { + hashesToSend[hash] = true + if (!changeHashes[hash]) { // Change is not among those returned by getMissingChanges()? + const change = Backend.getChangeByHash(backend, hash) + if (change) changesToSend.push(change) + } + } + + // Return changes in the order they were returned by getMissingChanges() + for (let change of changes) { + if (hashesToSend[change.hash]) changesToSend.push(change.change) + } + return changesToSend +} + +function initSyncState() { + return { + sharedHeads: [], + lastSentHeads: [], + theirHeads: null, + theirNeed: null, + theirHave: null, + sentHashes: {}, + } +} + +function compareArrays(a, b) { + return (a.length === b.length) && a.every((v, i) => v === b[i]) +} + +/** + * Given a backend and what we believe to be the state of our peer, generate a message which tells + * them about we have and includes any changes we believe they need + */ +function generateSyncMessage(backend, syncState) { + if (!backend) { + throw new Error("generateSyncMessage called with no Automerge document") + } + if (!syncState) { + throw new Error("generateSyncMessage requires a syncState, which can be created with initSyncState()") + } + + let { sharedHeads, lastSentHeads, theirHeads, theirNeed, theirHave, sentHashes } = syncState + const ourHeads = Backend.getHeads(backend) + + // Hashes to explicitly request from the remote peer: any missing dependencies of unapplied + // changes, and any of the remote peer's heads that we don't know about + const ourNeed = Backend.getMissingDeps(backend, theirHeads || []) + + // There are two reasons why ourNeed may be nonempty: 1. we might be missing dependencies due to + // Bloom filter false positives; 2. we might be missing heads that the other peer mentioned + // because they (intentionally) only sent us a subset of changes. In case 1, we leave the `have` + // field of the message empty because we just want to fill in the missing dependencies for now. + // In case 2, or if ourNeed is empty, we send a Bloom filter to request any unsent changes. + let ourHave = [] + if (!theirHeads || ourNeed.every(hash => theirHeads.includes(hash))) { + ourHave = [makeBloomFilter(backend, sharedHeads)] + } + + // Fall back to a full re-sync if the sender's last sync state includes hashes + // that we don't know. This could happen if we crashed after the last sync and + // failed to persist changes that the other node already sent us. + if (theirHave && theirHave.length > 0) { + const lastSync = theirHave[0].lastSync + if (!lastSync.every(hash => Backend.getChangeByHash(backend, hash))) { + // we need to queue them to send us a fresh sync message, the one they sent is uninteligible so we don't know what they need + const resetMsg = {heads: ourHeads, need: [], have: [{ lastSync: [], bloom: new Uint8Array(0) }], changes: []} + return [syncState, encodeSyncMessage(resetMsg)] + } + } + + // XXX: we should limit ourselves to only sending a subset of all the messages, probably limited by a total message size + // these changes should ideally be RLE encoded but we haven't implemented that yet. + let changesToSend = Array.isArray(theirHave) && Array.isArray(theirNeed) ? getChangesToSend(backend, theirHave, theirNeed) : [] + + // If the heads are equal, we're in sync and don't need to do anything further + const headsUnchanged = Array.isArray(lastSentHeads) && compareArrays(ourHeads, lastSentHeads) + const headsEqual = Array.isArray(theirHeads) && compareArrays(ourHeads, theirHeads) + if (headsUnchanged && headsEqual && changesToSend.length === 0) { + // no need to send a sync message if we know we're synced! + return [syncState, null] + } + + // TODO: this recomputes the SHA-256 hash of each change; we should restructure this to avoid the + // unnecessary recomputation + changesToSend = changesToSend.filter(change => !sentHashes[decodeChangeMeta(change, true).hash]) + + // Regular response to a sync message: send any changes that the other node + // doesn't have. We leave the "have" field empty because the previous message + // generated by `syncStart` already indicated what changes we have. + const syncMessage = {heads: ourHeads, have: ourHave, need: ourNeed, changes: changesToSend} + if (changesToSend.length > 0) { + sentHashes = copyObject(sentHashes) + for (const change of changesToSend) { + sentHashes[decodeChangeMeta(change, true).hash] = true + } + } + + syncState = Object.assign({}, syncState, {lastSentHeads: ourHeads, sentHashes}) + return [syncState, encodeSyncMessage(syncMessage)] +} + +/** + * Computes the heads that we share with a peer after we have just received some changes from that + * peer and applied them. This may not be sufficient to bring our heads in sync with the other + * peer's heads, since they may have only sent us a subset of their outstanding changes. + * + * `myOldHeads` are the local heads before the most recent changes were applied, `myNewHeads` are + * the local heads after those changes were applied, and `ourOldSharedHeads` is the previous set of + * shared heads. Applying the changes will have replaced some heads with others, but some heads may + * have remained unchanged (because they are for branches on which no changes have been added). Any + * such unchanged heads remain in the sharedHeads. Any sharedHeads that were replaced by applying + * changes are also replaced as sharedHeads. This is safe because if we received some changes from + * another peer, that means that peer had those changes, and therefore we now both know about them. + */ +function advanceHeads(myOldHeads, myNewHeads, ourOldSharedHeads) { + const newHeads = myNewHeads.filter((head) => !myOldHeads.includes(head)) + const commonHeads = ourOldSharedHeads.filter((head) => myNewHeads.includes(head)) + const advancedHeads = [...new Set([...newHeads, ...commonHeads])].sort() + return advancedHeads +} + + +/** + * Given a backend, a message message and the state of our peer, apply any changes, update what + * we believe about the peer, and (if there were applied changes) produce a patch for the frontend + */ +function receiveSyncMessage(backend, oldSyncState, binaryMessage) { + if (!backend) { + throw new Error("generateSyncMessage called with no Automerge document") + } + if (!oldSyncState) { + throw new Error("generateSyncMessage requires a syncState, which can be created with initSyncState()") + } + + let { sharedHeads, lastSentHeads, sentHashes } = oldSyncState, patch = null + const message = decodeSyncMessage(binaryMessage) + const beforeHeads = Backend.getHeads(backend) + + // If we received changes, we try to apply them to the document. There may still be missing + // dependencies due to Bloom filter false positives, in which case the backend will enqueue the + // changes without applying them. The set of changes may also be incomplete if the sender decided + // to break a large set of changes into chunks. + if (message.changes.length > 0) { + [backend, patch] = Backend.applyChanges(backend, message.changes) + sharedHeads = advanceHeads(beforeHeads, Backend.getHeads(backend), sharedHeads) + } + + // If heads are equal, indicate we don't need to send a response message + if (message.changes.length === 0 && compareArrays(message.heads, beforeHeads)) { + lastSentHeads = message.heads + } + + // If all of the remote heads are known to us, that means either our heads are equal, or we are + // ahead of the remote peer. In this case, take the remote heads to be our shared heads. + const knownHeads = message.heads.filter(head => Backend.getChangeByHash(backend, head)) + if (knownHeads.length === message.heads.length) { + sharedHeads = message.heads + // If the remote peer has lost all its data, reset our state to perform a full resync + if (message.heads.length === 0) { + lastSentHeads = [] + sentHashes = [] + } + } else { + // If some remote heads are unknown to us, we add all the remote heads we know to + // sharedHeads, but don't remove anything from sharedHeads. This might cause sharedHeads to + // contain some redundant hashes (where one hash is actually a transitive dependency of + // another), but this will be cleared up as soon as we know all the remote heads. + sharedHeads = [...new Set(knownHeads.concat(sharedHeads))].sort() + } + + const syncState = { + sharedHeads, // what we have in common to generate an efficient bloom filter + lastSentHeads, + theirHave: message.have, // the information we need to calculate the changes they need + theirHeads: message.heads, + theirNeed: message.need, + sentHashes + } + return [backend, syncState, patch] +} + +module.exports = { + receiveSyncMessage, generateSyncMessage, + encodeSyncMessage, decodeSyncMessage, + initSyncState, encodeSyncState, decodeSyncState, + BloomFilter // BloomFilter is a private API, exported only for testing purposes +} diff --git a/automerge-js/test/legacy_tests.ts b/automerge-js/test/legacy_tests.ts index 044b7eef..50cecbc4 100644 --- a/automerge-js/test/legacy_tests.ts +++ b/automerge-js/test/legacy_tests.ts @@ -1,7 +1,7 @@ import * as assert from 'assert' import * as Automerge from '../src' import { assertEqualsOneOf } from './helpers' -import { decodeChange } from '../src/columnar' +import { decodeChange } from './legacy/columnar' import * as AutomergeWASM from "automerge-wasm" Automerge.use(AutomergeWASM) diff --git a/automerge-js/test/sync_test.ts b/automerge-js/test/sync_test.ts index 0118776c..7b1e52ef 100644 --- a/automerge-js/test/sync_test.ts +++ b/automerge-js/test/sync_test.ts @@ -1,7 +1,7 @@ import * as assert from 'assert' import * as Automerge from '../src' -import { BloomFilter } from '../src/bloom' -import { decodeChangeMeta } from '../src/columnar' +import { BloomFilter } from './legacy/sync' +import { decodeChangeMeta } from './legacy/columnar' import { decodeSyncMessage, encodeSyncMessage, decodeSyncState, encodeSyncState, initSyncState } from "../src" import * as AutomergeWASM from "automerge-wasm" diff --git a/automerge-js/test/text_test.ts b/automerge-js/test/text_test.ts index 51424c91..e55287ce 100644 --- a/automerge-js/test/text_test.ts +++ b/automerge-js/test/text_test.ts @@ -603,7 +603,8 @@ describe('Automerge.Text', () => { applyDeltaDocToAutomergeText(delta, doc) }) - assert.strictEqual(s2.text.join(''), 'Hello reader') + //assert.strictEqual(s2.text.join(''), 'Hello reader') + assert.strictEqual(s2.text.toString(), 'Hello reader') }) it('should apply an insert with control characters', () => { diff --git a/automerge-js/tsconfig.json b/automerge-js/tsconfig.json index 26fa7e8f..01500ed5 100644 --- a/automerge-js/tsconfig.json +++ b/automerge-js/tsconfig.json @@ -2,7 +2,7 @@ "compilerOptions": { "target": "es2016", "sourceMap": false, - "declaration": true, + "declaration": false, "resolveJsonModule": true, "module": "commonjs", "moduleResolution": "node", @@ -12,7 +12,7 @@ "strict": true, "noFallthroughCasesInSwitch": true, "skipLibCheck": true, - "outDir": "./dist/cjs" + "outDir": "./dist" }, "include": [ "src/**/*" ], "exclude": [ From df8cae8a2be9a5796f94b82d3d49d1c90a9e714c Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Mon, 23 May 2022 19:25:23 +0200 Subject: [PATCH 396/730] README --- automerge-js/README.md | 29 +++++++++++++++++++++++++---- 1 file changed, 25 insertions(+), 4 deletions(-) diff --git a/automerge-js/README.md b/automerge-js/README.md index 3c5cde33..7b8da950 100644 --- a/automerge-js/README.md +++ b/automerge-js/README.md @@ -1,6 +1,27 @@ -## Todo +## Automerge JS + +This is a reimplementation of Automerge as a JavaScript wrapper around the "automerge-wasm". + +This package is in alpha and feedback in welcome. + +The primary differences between using this package and "automerge" are as follows: + +1. The low level api needs to plugged in via the use function. The only current implementation of "automerge-wasm" but another could used in theory. + +```js +import * as Automerge from "automerge-js" +import * as wasm_api from "automerge-wasm" + +// browsers require an async wasm load - see automerge-wasm docs +Automerge.use(wasm_api) +``` + +2. There is no front-end back-end split, and no patch format or patch observer. These concepts don't make sense with the wasm implementation. + +3. The basic `Doc` object is now a Proxy object and will behave differently in a repl environment. + +4. The 'Text' class is currently very slow and needs to be re-worked. + +Beyond this please refer to the Automerge [README](http://github.com/automerge/automerge/) for further information. -1. write a readme -1. publish package -1. make sure the example code works with published packages From 587adf7418690dfd89ed4d8f411f8fb4b0d55fc7 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Tue, 24 May 2022 09:48:55 +0100 Subject: [PATCH 397/730] Add Eq to ObjType --- automerge/src/types.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automerge/src/types.rs b/automerge/src/types.rs index d3230ec3..1c67afe2 100644 --- a/automerge/src/types.rs +++ b/automerge/src/types.rs @@ -132,7 +132,7 @@ impl fmt::Display for ActorId { } } -#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Copy, Hash)] +#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq, Copy, Hash)] #[serde(rename_all = "camelCase", untagged)] pub enum ObjType { Map, From dae6509e13a96b513099f9c1c3f4dc57008e7658 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 26 May 2022 08:59:43 +0100 Subject: [PATCH 398/730] Update autocommit's apply_changes to take an iterator --- automerge/src/autocommit.rs | 9 ++++++--- automerge/src/automerge/tests.rs | 2 +- automerge/tests/test.rs | 2 +- 3 files changed, 8 insertions(+), 5 deletions(-) diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index 22efd155..86601aa5 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -117,14 +117,17 @@ impl AutoCommit { self.doc.load_incremental_with(data, options) } - pub fn apply_changes(&mut self, changes: Vec) -> Result<(), AutomergeError> { + pub fn apply_changes( + &mut self, + changes: impl IntoIterator, + ) -> Result<(), AutomergeError> { self.ensure_transaction_closed(); self.doc.apply_changes(changes) } - pub fn apply_changes_with( + pub fn apply_changes_with, Obs: OpObserver>( &mut self, - changes: Vec, + changes: I, options: ApplyOptions<'_, Obs>, ) -> Result<(), AutomergeError> { self.ensure_transaction_closed(); diff --git a/automerge/src/automerge/tests.rs b/automerge/src/automerge/tests.rs index dc4204e1..94f06099 100644 --- a/automerge/src/automerge/tests.rs +++ b/automerge/src/automerge/tests.rs @@ -1473,7 +1473,7 @@ fn observe_counter_change_application() { doc.put(ROOT, "counter", ScalarValue::counter(1)).unwrap(); doc.increment(ROOT, "counter", 2).unwrap(); doc.increment(ROOT, "counter", 5).unwrap(); - let changes = doc.get_changes(&[]).unwrap().into_iter().cloned().collect(); + let changes = doc.get_changes(&[]).unwrap().into_iter().cloned(); let mut new_doc = AutoCommit::new(); let mut observer = VecOpObserver::default(); diff --git a/automerge/tests/test.rs b/automerge/tests/test.rs index f13bcd2b..8124ec12 100644 --- a/automerge/tests/test.rs +++ b/automerge/tests/test.rs @@ -938,7 +938,7 @@ fn observe_counter_change_application() { doc.put(ROOT, "counter", ScalarValue::counter(1)).unwrap(); doc.increment(ROOT, "counter", 2).unwrap(); doc.increment(ROOT, "counter", 5).unwrap(); - let changes = doc.get_changes(&[]).unwrap().into_iter().cloned().collect(); + let changes = doc.get_changes(&[]).unwrap().into_iter().cloned(); let mut doc = AutoCommit::new(); let mut observer = VecOpObserver::default(); From 03289510d60bc2539f6300853615a1fb74c7994a Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 20 May 2022 10:14:04 +0100 Subject: [PATCH 399/730] Remove cloning their_have in sync --- automerge/src/sync.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/automerge/src/sync.rs b/automerge/src/sync.rs index 99961397..ff604ccf 100644 --- a/automerge/src/sync.rs +++ b/automerge/src/sync.rs @@ -58,7 +58,7 @@ impl Automerge { sync_state.their_have.as_ref(), sync_state.their_need.as_ref(), ) { - self.get_changes_to_send(their_have.clone(), their_need) + self.get_changes_to_send(their_have, their_need) .expect("Should have only used hashes that are in the document") } else { Vec::new() @@ -176,7 +176,7 @@ impl Automerge { fn get_changes_to_send( &self, - have: Vec, + have: &[Have], need: &[ChangeHash], ) -> Result, AutomergeError> { if have.is_empty() { @@ -195,7 +195,7 @@ impl Automerge { } bloom_filters.push(bloom); } - let last_sync_hashes = last_sync_hashes.into_iter().collect::>(); + let last_sync_hashes = last_sync_hashes.into_iter().copied().collect::>(); let changes = self.get_changes(&last_sync_hashes)?; From 97a5144d59c1146a8cd2a93de583f8e4accfe553 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 20 May 2022 10:22:13 +0100 Subject: [PATCH 400/730] Reduce the amount of shuffling data for changes_to_send --- automerge/src/sync.rs | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/automerge/src/sync.rs b/automerge/src/sync.rs index ff604ccf..8301a990 100644 --- a/automerge/src/sync.rs +++ b/automerge/src/sync.rs @@ -54,7 +54,7 @@ impl Automerge { } } - let mut changes_to_send = if let (Some(their_have), Some(their_need)) = ( + let changes_to_send = if let (Some(their_have), Some(their_need)) = ( sync_state.their_have.as_ref(), sync_state.their_need.as_ref(), ) { @@ -76,8 +76,17 @@ impl Automerge { return None; } - // deduplicate the changes to send with those we have already sent - changes_to_send.retain(|change| !sync_state.sent_hashes.contains(&change.hash)); + // deduplicate the changes to send with those we have already sent and clone it now + let changes_to_send = changes_to_send + .into_iter() + .filter_map(|change| { + if !sync_state.sent_hashes.contains(&change.hash) { + Some(change.clone()) + } else { + None + } + }) + .collect::>(); sync_state.last_sent_heads = our_heads.clone(); sync_state @@ -88,7 +97,7 @@ impl Automerge { heads: our_heads, have: our_have, need: our_need, - changes: changes_to_send.into_iter().cloned().collect(), + changes: changes_to_send, }; Some(sync_message) From 03a635a92695ba1fe2b5af6364e6d48e007c1ff7 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 20 May 2022 11:43:58 +0100 Subject: [PATCH 401/730] Extend last_sync_hashes --- automerge/src/sync.rs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/automerge/src/sync.rs b/automerge/src/sync.rs index 8301a990..2e66c328 100644 --- a/automerge/src/sync.rs +++ b/automerge/src/sync.rs @@ -199,9 +199,7 @@ impl Automerge { for h in have { let Have { last_sync, bloom } = h; - for hash in last_sync { - last_sync_hashes.insert(hash); - } + last_sync_hashes.extend(last_sync); bloom_filters.push(bloom); } let last_sync_hashes = last_sync_hashes.into_iter().copied().collect::>(); From a569611d837f6028957ced94a37bf117d4791988 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Fri, 20 May 2022 11:44:11 +0100 Subject: [PATCH 402/730] Use clock_at for filter_changes --- automerge/src/automerge.rs | 73 ++++++++--------------------- automerge/src/clock.rs | 94 +++++++++++++++++++++++++++++++++++++- automerge/src/sync.rs | 2 +- 3 files changed, 114 insertions(+), 55 deletions(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 41ac9579..35552658 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -1,4 +1,5 @@ -use std::collections::{BTreeSet, HashMap, HashSet, VecDeque}; +use std::cmp::Ordering; +use std::collections::{BTreeSet, HashMap, HashSet}; use std::fmt::Debug; use std::num::NonZeroU64; use std::ops::RangeBounds; @@ -769,62 +770,28 @@ impl Automerge { /// Filter the changes down to those that are not transitive dependencies of the heads. /// /// Thus a graph with these heads has not seen the remaining changes. - pub(crate) fn filter_changes(&self, heads: &[ChangeHash], changes: &mut BTreeSet) { - // Reduce the working set to find to those which we may be able to find. - // This filters out those hashes that are successors of or concurrent with all of the - // heads. - // This can help in avoiding traversing the entire graph back to the roots when we try to - // search for a hash we can know won't be found there. - let max_head_index = heads + pub(crate) fn filter_changes( + &self, + heads: &[ChangeHash], + changes: &mut BTreeSet, + ) -> Result<(), AutomergeError> { + let heads = heads .iter() - .map(|h| self.history_index.get(h).unwrap_or(&0)) - .max() - .unwrap_or(&0); - let mut may_find: HashSet = changes - .iter() - .filter(|hash| { - let change_index = self.history_index.get(hash).unwrap_or(&0); - change_index <= max_head_index - }) + .filter(|hash| self.history_index.contains_key(hash)) .copied() - .collect(); + .collect::>(); + let heads_clock = self.clock_at(&heads)?; - if may_find.is_empty() { - return; - } - - let mut queue: VecDeque<_> = heads.iter().collect(); - let mut seen = HashSet::new(); - while let Some(hash) = queue.pop_front() { - if seen.contains(hash) { - continue; - } - seen.insert(hash); - - let removed = may_find.remove(hash); - changes.remove(hash); - if may_find.is_empty() { - break; - } - - for dep in self - .history_index + // keep the hashes that are concurrent or after the heads + changes.retain(|hash| { + self.clocks .get(hash) - .and_then(|i| self.history.get(*i)) - .map(|c| c.deps.as_slice()) - .unwrap_or_default() - { - // if we just removed something from our hashes then it is likely there is more - // down here so do a quick inspection on the children. - // When we don't remove anything it is less likely that there is something down - // that chain so delay it. - if removed { - queue.push_front(dep); - } else { - queue.push_back(dep); - } - } - } + .unwrap() + .partial_cmp(&heads_clock) + .map_or(true, |o| o == Ordering::Greater) + }); + + Ok(()) } /// Get the hashes of the changes in this document that aren't transitive dependencies of the diff --git a/automerge/src/clock.rs b/automerge/src/clock.rs index bbe376fa..11890ffb 100644 --- a/automerge/src/clock.rs +++ b/automerge/src/clock.rs @@ -1,6 +1,6 @@ use crate::types::OpId; use fxhash::FxBuildHasher; -use std::collections::HashMap; +use std::{cmp::Ordering, collections::HashMap}; #[derive(Default, Debug, Clone, Copy, PartialEq)] pub(crate) struct ClockData { @@ -10,10 +10,38 @@ pub(crate) struct ClockData { pub(crate) seq: u64, } +// a clock for the same actor is ahead of another if it has a higher max_op +impl PartialOrd for ClockData { + fn partial_cmp(&self, other: &Self) -> Option { + self.max_op.partial_cmp(&other.max_op) + } +} + /// Vector clock mapping actor indices to the max op counter of the changes created by that actor. #[derive(Default, Debug, Clone, PartialEq)] pub(crate) struct Clock(HashMap); +// A general clock is greater if it has one element the other does not or has a counter higher than +// the other for a given actor. +// +// It is equal with another clock if it has the same entries everywhere. +// +// It is less than another clock otherwise. +impl PartialOrd for Clock { + fn partial_cmp(&self, other: &Self) -> Option { + if self.0 == other.0 { + Some(Ordering::Equal) + } else if self.is_greater(other) { + Some(Ordering::Greater) + } else if other.is_greater(self) { + Some(Ordering::Less) + } else { + // concurrent + None + } + } +} + impl Clock { pub(crate) fn new() -> Self { Clock(Default::default()) @@ -48,6 +76,40 @@ impl Clock { self.include(*actor, *data); } } + + fn is_greater(&self, other: &Self) -> bool { + let mut has_greater = false; + + let mut others_found = 0; + + for (actor, data) in &self.0 { + if let Some(other_data) = other.0.get(actor) { + if data < other_data { + // may be concurrent or less + return false; + } else if data > other_data { + has_greater = true; + } + others_found += 1; + } else { + // other doesn't have this so effectively has a greater element + has_greater = true; + } + } + + if has_greater { + // if they are equal then we have seen every key in the other clock and have at least + // one greater element so our clock is greater + // + // If they aren't the same then we haven't seen every key but have a greater element + // anyway so are concurrent + others_found == other.0.len() + } else { + // our clock doesn't have anything greater than the other clock so can't be greater but + // could still be concurrent + false + } + } } #[cfg(test)] @@ -72,4 +134,34 @@ mod tests { assert!(!clock.covers(&OpId(1, 3))); assert!(!clock.covers(&OpId(100, 3))); } + + #[test] + fn comparison() { + let mut base_clock = Clock::new(); + base_clock.include(1, ClockData { max_op: 1, seq: 1 }); + base_clock.include(2, ClockData { max_op: 1, seq: 1 }); + + let mut after_clock = base_clock.clone(); + after_clock.include(1, ClockData { max_op: 2, seq: 2 }); + + assert!(after_clock > base_clock); + assert!(base_clock < after_clock); + + assert!(base_clock == base_clock); + + let mut new_actor_clock = base_clock.clone(); + new_actor_clock.include(3, ClockData { max_op: 1, seq: 1 }); + + assert_eq!( + base_clock.partial_cmp(&new_actor_clock), + Some(Ordering::Less) + ); + assert_eq!( + new_actor_clock.partial_cmp(&base_clock), + Some(Ordering::Greater) + ); + + assert_eq!(after_clock.partial_cmp(&new_actor_clock), None); + assert_eq!(new_actor_clock.partial_cmp(&after_clock), None); + } } diff --git a/automerge/src/sync.rs b/automerge/src/sync.rs index 2e66c328..1a3a4ed2 100644 --- a/automerge/src/sync.rs +++ b/automerge/src/sync.rs @@ -137,7 +137,7 @@ impl Automerge { } // trim down the sent hashes to those that we know they haven't seen - self.filter_changes(&message_heads, &mut sync_state.sent_hashes); + self.filter_changes(&message_heads, &mut sync_state.sent_hashes)?; if changes_is_empty && message_heads == before_heads { sync_state.last_sent_heads = message_heads.clone(); From 210c6d2045735cfcd04af1802ab6333d05a626ba Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Fri, 27 May 2022 10:23:51 -0700 Subject: [PATCH 403/730] move types to their own package --- automerge-js/examples/webpack/package.json | 3 +- automerge-js/package.json | 8 +- automerge-js/src/counter.ts | 2 +- automerge-js/src/index.ts | 17 +- automerge-js/src/low_level.ts | 9 +- automerge-js/src/proxies.ts | 5 +- automerge-js/src/text.ts | 2 +- automerge-js/src/types.ts | 6 - automerge-js/test/basic_test.ts | 1 + automerge-wasm/examples/webpack/package.json | 2 +- automerge-wasm/index.d.ts | 208 +----------------- automerge-wasm/package.json | 6 +- automerge-wasm/test/readme.ts | 3 +- automerge-wasm/tsconfig.json | 1 + automerge-wasm/types/LICENSE | 10 + .../types/automerge-types-0.1.1.tgz | Bin 0 -> 2566 bytes automerge-wasm/types/index.d.ts | 207 +++++++++++++++++ automerge-wasm/types/package.json | 18 ++ 18 files changed, 269 insertions(+), 239 deletions(-) create mode 100644 automerge-wasm/types/LICENSE create mode 100644 automerge-wasm/types/automerge-types-0.1.1.tgz create mode 100644 automerge-wasm/types/index.d.ts create mode 100644 automerge-wasm/types/package.json diff --git a/automerge-js/examples/webpack/package.json b/automerge-js/examples/webpack/package.json index 474d9904..fb74fb82 100644 --- a/automerge-js/examples/webpack/package.json +++ b/automerge-js/examples/webpack/package.json @@ -10,7 +10,8 @@ }, "author": "", "dependencies": { - "automerge-js": "file:automerge-js-0.1.0.tgz" + "automerge-js": "file:automerge-js-0.1.0.tgz", + "automerge-wasm": "file:automerge-wasm-0.1.3.tgz" }, "devDependencies": { "serve": "^13.0.2", diff --git a/automerge-js/package.json b/automerge-js/package.json index 728ff970..d2ba317f 100644 --- a/automerge-js/package.json +++ b/automerge-js/package.json @@ -28,7 +28,7 @@ "license": "MIT", "scripts": { "lint": "eslint src", - "build": "tsc", + "build": "tsc -p config/mjs.json", "test": "ts-mocha test/*.ts" }, "devDependencies": { @@ -40,12 +40,12 @@ "eslint": "^8.15.0", "mocha": "^10.0.0", "ts-mocha": "^10.0.0", + "fast-sha256": "^1.3.0", + "pako": "^2.0.4", "typescript": "^4.6.4" }, "dependencies": { - "automerge-wasm": "file:../automerge-wasm", - "fast-sha256": "^1.3.0", - "pako": "^2.0.4", + "automerge-types": "^0.1.1", "uuid": "^8.3" } } diff --git a/automerge-js/src/counter.ts b/automerge-js/src/counter.ts index 97372381..50c885d6 100644 --- a/automerge-js/src/counter.ts +++ b/automerge-js/src/counter.ts @@ -1,4 +1,4 @@ -import { Automerge, ObjID, Prop } from "./types" +import { Automerge, ObjID, Prop } from "automerge-types" /** * The most basic CRDT: an integer value that can be changed only by * incrementing and decrementing. Since addition of integers is commutative, diff --git a/automerge-js/src/index.ts b/automerge-js/src/index.ts index 02f864b1..27d73377 100644 --- a/automerge-js/src/index.ts +++ b/automerge-js/src/index.ts @@ -4,13 +4,14 @@ export { uuid } from './uuid' import { rootProxy, listProxy, textProxy, mapProxy } from "./proxies" import { STATE, HEADS, OBJECT_ID, READ_ONLY, FROZEN } from "./constants" -import { Counter } from "./types" -export { Text, Counter, Int, Uint, Float64 } from "./types" +import { AutomergeValue, Counter } from "./types" +export { AutomergeValue, Text, Counter, Int, Uint, Float64 } from "./types" -import { ApiHandler, LowLevelApi, UseApi } from "./low_level" +import { API } from "automerge-types"; +import { ApiHandler, UseApi } from "./low_level" -import { ActorId, Prop, ObjID, Change, DecodedChange, Heads, Automerge, MaterializeValue } from "./types" -import { SyncState, SyncMessage, DecodedSyncMessage, AutomergeValue } from "./types" +import { Actor as ActorId, Prop, ObjID, Change, DecodedChange, Heads, Automerge, MaterializeValue } from "automerge-types" +import { JsSyncState as SyncState, SyncMessage, DecodedSyncMessage } from "automerge-types" export type ChangeOptions = { message?: string, time?: number } @@ -23,7 +24,7 @@ export interface State { snapshot: T } -export function use(api: LowLevelApi) { +export function use(api: API) { UseApi(api) } @@ -371,8 +372,8 @@ export function dump(doc: Doc) { // FIXME - return T? export function toJS(doc: Doc) : MaterializeValue { - let state = _state(doc) - let heads = _heads(doc) + const state = _state(doc) + const heads = _heads(doc) return state.materialize("_root", heads) } diff --git a/automerge-js/src/low_level.ts b/automerge-js/src/low_level.ts index 5a1277fd..cf0695d9 100644 --- a/automerge-js/src/low_level.ts +++ b/automerge-js/src/low_level.ts @@ -1,16 +1,15 @@ -import { Automerge, Change, DecodedChange, Actor, SyncState, SyncMessage, JsSyncState, DecodedSyncMessage } from "automerge-wasm" -import { API as LowLevelApi } from "automerge-wasm" -export { API as LowLevelApi } from "automerge-wasm" +import { Automerge, Change, DecodedChange, Actor, SyncState, SyncMessage, JsSyncState, DecodedSyncMessage } from "automerge-types" +import { API } from "automerge-types" -export function UseApi(api: LowLevelApi) { +export function UseApi(api: API) { for (const k in api) { ApiHandler[k] = api[k] } } /* eslint-disable */ -export const ApiHandler : LowLevelApi = { +export const ApiHandler : API = { create(actor?: Actor): Automerge { throw new RangeError("Automerge.use() not called") }, load(data: Uint8Array, actor?: Actor): Automerge { throw new RangeError("Automerge.use() not called") }, encodeChange(change: DecodedChange): Change { throw new RangeError("Automerge.use() not called") }, diff --git a/automerge-js/src/proxies.ts b/automerge-js/src/proxies.ts index fbb044a6..50542716 100644 --- a/automerge-js/src/proxies.ts +++ b/automerge-js/src/proxies.ts @@ -1,10 +1,11 @@ -import { Automerge, Heads, ObjID } from "./types" +import { Automerge, Heads, ObjID } from "automerge-types" +import { Prop } from "automerge-types" +import { AutomergeValue, ScalarValue, MapValue, ListValue, TextValue } from "./types" import { Int, Uint, Float64 } from "./numbers" import { Counter, getWriteableCounter } from "./counter" import { Text } from "./text" import { STATE, HEADS, FROZEN, OBJECT_ID, READ_ONLY } from "./constants" -import { AutomergeValue, ScalarValue, MapValue, ListValue, TextValue, Prop } from "./types" function parseListIndex(key) { if (typeof key === 'string' && /^[0-9]+$/.test(key)) key = parseInt(key, 10) diff --git a/automerge-js/src/text.ts b/automerge-js/src/text.ts index 26f4a861..7aa2cac4 100644 --- a/automerge-js/src/text.ts +++ b/automerge-js/src/text.ts @@ -1,4 +1,4 @@ -import { Value } from "./types" +import { Value } from "automerge-types" export class Text { elems: Value[] diff --git a/automerge-js/src/types.ts b/automerge-js/src/types.ts index 5fb63abd..e75a3854 100644 --- a/automerge-js/src/types.ts +++ b/automerge-js/src/types.ts @@ -1,14 +1,8 @@ -export { Actor as ActorId, Value, Prop, ObjID, Change, DecodedChange, Heads, Automerge, MaterializeValue } from "automerge-wasm" -export { JsSyncState as SyncState, SyncMessage, DecodedSyncMessage } from "automerge-wasm" - export { Text } from "./text" export { Counter } from "./counter" export { Int, Uint, Float64 } from "./numbers" -export type UnknownObject = Record; -export type Dictionary = Record; - import { Counter } from "./counter" export type AutomergeValue = ScalarValue | { [key: string]: AutomergeValue } | Array diff --git a/automerge-js/test/basic_test.ts b/automerge-js/test/basic_test.ts index 9508f3d3..058a9072 100644 --- a/automerge-js/test/basic_test.ts +++ b/automerge-js/test/basic_test.ts @@ -1,3 +1,4 @@ +import * as tt from "automerge-types" import * as assert from 'assert' import * as util from 'util' import * as Automerge from '../src' diff --git a/automerge-wasm/examples/webpack/package.json b/automerge-wasm/examples/webpack/package.json index 2ba64736..4abcd1c6 100644 --- a/automerge-wasm/examples/webpack/package.json +++ b/automerge-wasm/examples/webpack/package.json @@ -10,7 +10,7 @@ }, "author": "", "dependencies": { - "automerge-wasm": "file:automerge-wasm-0.1.3.tgz" + "automerge-wasm": "file:automerge-wasm-0.1.4.tgz" }, "devDependencies": { "serve": "^13.0.2", diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts index cfecd081..28a4b5b4 100644 --- a/automerge-wasm/index.d.ts +++ b/automerge-wasm/index.d.ts @@ -1,207 +1 @@ - -export type Actor = string; -export type ObjID = string; -export type Change = Uint8Array; -export type SyncMessage = Uint8Array; -export type Prop = string | number; -export type Hash = string; -export type Heads = Hash[]; -export type Value = string | number | boolean | null | Date | Uint8Array -export type MaterializeValue = { [key:string]: MaterializeValue } | Array | Value -export type ObjType = string | Array | { [key: string]: ObjType | Value } -export type FullValue = - ["str", string] | - ["int", number] | - ["uint", number] | - ["f64", number] | - ["boolean", boolean] | - ["timestamp", Date] | - ["counter", number] | - ["bytes", Uint8Array] | - ["null", null] | - ["map", ObjID] | - ["list", ObjID] | - ["text", ObjID] | - ["table", ObjID] - -export type FullValueWithId = - ["str", string, ObjID ] | - ["int", number, ObjID ] | - ["uint", number, ObjID ] | - ["f64", number, ObjID ] | - ["boolean", boolean, ObjID ] | - ["timestamp", Date, ObjID ] | - ["counter", number, ObjID ] | - ["bytes", Uint8Array, ObjID ] | - ["null", null, ObjID ] | - ["map", ObjID ] | - ["list", ObjID] | - ["text", ObjID] | - ["table", ObjID] - -export enum ObjTypeName { - list = "list", - map = "map", - table = "table", - text = "text", -} - -export type Datatype = - "boolean" | - "str" | - "int" | - "uint" | - "f64" | - "null" | - "timestamp" | - "counter" | - "bytes" | - "map" | - "text" | - "list"; - -export type SyncHave = { - lastSync: Heads, - bloom: Uint8Array, -} - -export type DecodedSyncMessage = { - heads: Heads, - need: Heads, - have: SyncHave[] - changes: Change[] -} - -export type DecodedChange = { - actor: Actor, - seq: number - startOp: number, - time: number, - message: string | null, - deps: Heads, - hash: Hash, - ops: Op[] -} - -export type Op = { - action: string, - obj: ObjID, - key: string, - value?: string | number | boolean, - datatype?: string, - pred: string[], -} - -export type Patch = { - obj: ObjID - action: 'assign' | 'insert' | 'delete' - key: Prop - value: Value - datatype: Datatype - conflict: boolean -} - -export function create(actor?: Actor): Automerge; -export function load(data: Uint8Array, actor?: Actor): Automerge; -export function encodeChange(change: DecodedChange): Change; -export function decodeChange(change: Change): DecodedChange; -export function initSyncState(): SyncState; -export function encodeSyncMessage(message: DecodedSyncMessage): SyncMessage; -export function decodeSyncMessage(msg: SyncMessage): DecodedSyncMessage; -export function encodeSyncState(state: SyncState): Uint8Array; -export function decodeSyncState(data: Uint8Array): SyncState; -export function exportSyncState(state: SyncState): JsSyncState; -export function importSyncState(state: JsSyncState): SyncState; - -export class API { - create(actor?: Actor): Automerge; - load(data: Uint8Array, actor?: Actor): Automerge; - encodeChange(change: DecodedChange): Change; - decodeChange(change: Change): DecodedChange; - initSyncState(): SyncState; - encodeSyncMessage(message: DecodedSyncMessage): SyncMessage; - decodeSyncMessage(msg: SyncMessage): DecodedSyncMessage; - encodeSyncState(state: SyncState): Uint8Array; - decodeSyncState(data: Uint8Array): SyncState; - exportSyncState(state: SyncState): JsSyncState; - importSyncState(state: JsSyncState): SyncState; -} - -export class Automerge { - // change state - put(obj: ObjID, prop: Prop, value: Value, datatype?: Datatype): void; - putObject(obj: ObjID, prop: Prop, value: ObjType): ObjID; - insert(obj: ObjID, index: number, value: Value, datatype?: Datatype): void; - insertObject(obj: ObjID, index: number, value: ObjType): ObjID; - push(obj: ObjID, value: Value, datatype?: Datatype): void; - pushObject(obj: ObjID, value: ObjType): ObjID; - splice(obj: ObjID, start: number, delete_count: number, text?: string | Array): ObjID[] | undefined; - increment(obj: ObjID, prop: Prop, value: number): void; - delete(obj: ObjID, prop: Prop): void; - - // returns a single value - if there is a conflict return the winner - get(obj: ObjID, prop: Prop, heads?: Heads): FullValue | null; - // return all values in case of a conflict - getAll(obj: ObjID, arg: Prop, heads?: Heads): FullValueWithId[]; - keys(obj: ObjID, heads?: Heads): string[]; - text(obj: ObjID, heads?: Heads): string; - length(obj: ObjID, heads?: Heads): number; - materialize(obj?: ObjID, heads?: Heads): MaterializeValue; - - // transactions - commit(message?: string, time?: number): Hash; - merge(other: Automerge): Heads; - getActorId(): Actor; - pendingOps(): number; - rollback(): number; - - // patches - enablePatches(enable: boolean): void; - popPatches(): Patch[]; - - // save and load to local store - save(): Uint8Array; - saveIncremental(): Uint8Array; - loadIncremental(data: Uint8Array): number; - - // sync over network - receiveSyncMessage(state: SyncState, message: SyncMessage): void; - generateSyncMessage(state: SyncState): SyncMessage | null; - - // low level change functions - applyChanges(changes: Change[]): void; - getChanges(have_deps: Heads): Change[]; - getChangeByHash(hash: Hash): Change | null; - getChangesAdded(other: Automerge): Change[]; - getHeads(): Heads; - getLastLocalChange(): Change; - getMissingDeps(heads?: Heads): Heads; - - // memory management - free(): void; - clone(actor?: string): Automerge; - fork(actor?: string): Automerge; - forkAt(heads: Heads, actor?: string): Automerge; - - // dump internal state to console.log - dump(): void; -} - -export class JsSyncState { - sharedHeads: Heads; - lastSentHeads: Heads; - theirHeads: Heads | undefined; - theirHeed: Heads | undefined; - theirHave: SyncHave[] | undefined; - sentHashes: Heads; -} - -export class SyncState { - free(): void; - clone(): SyncState; - lastSentHeads: Heads; - sentHashes: Heads; - readonly sharedHeads: Heads; -} - -export default function init (): Promise; +export * from "automerge-types" diff --git a/automerge-wasm/package.json b/automerge-wasm/package.json index f1077fe2..2d024c10 100644 --- a/automerge-wasm/package.json +++ b/automerge-wasm/package.json @@ -8,7 +8,7 @@ "description": "wasm-bindgen bindings to the automerge rust implementation", "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-wasm", "repository": "github:automerge/automerge-rs", - "version": "0.1.3", + "version": "0.1.4", "license": "MIT", "files": [ "README.md", @@ -38,6 +38,7 @@ "@types/jest": "^27.4.0", "@types/mocha": "^9.1.0", "@types/node": "^17.0.13", + "@types/uuid": "^8.3.4", "@typescript-eslint/eslint-plugin": "^5.25.0", "@typescript-eslint/parser": "^5.25.0", "cross-env": "^7.0.3", @@ -48,5 +49,8 @@ "rimraf": "^3.0.2", "ts-mocha": "^9.0.2", "typescript": "^4.6.4" + }, + "dependencies": { + "automerge-types": "^0.1.1" } } diff --git a/automerge-wasm/test/readme.ts b/automerge-wasm/test/readme.ts index d06df0fb..ff5c94ac 100644 --- a/automerge-wasm/test/readme.ts +++ b/automerge-wasm/test/readme.ts @@ -1,6 +1,5 @@ import { describe, it } from 'mocha'; -//@ts-ignore -import assert from 'assert' +import * as assert from 'assert' //@ts-ignore import init, { create, load } from '..' diff --git a/automerge-wasm/tsconfig.json b/automerge-wasm/tsconfig.json index 2627c69b..339eab93 100644 --- a/automerge-wasm/tsconfig.json +++ b/automerge-wasm/tsconfig.json @@ -11,6 +11,7 @@ "paths": { "dev": ["*"]}, "rootDir": "", "target": "es2016", + "types": ["mocha", "node"], "typeRoots": ["./index.d.ts"] }, "include": ["test/**/*.ts"], diff --git a/automerge-wasm/types/LICENSE b/automerge-wasm/types/LICENSE new file mode 100644 index 00000000..63b21502 --- /dev/null +++ b/automerge-wasm/types/LICENSE @@ -0,0 +1,10 @@ +MIT License + +Copyright 2022, Ink & Switch LLC + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/automerge-wasm/types/automerge-types-0.1.1.tgz b/automerge-wasm/types/automerge-types-0.1.1.tgz new file mode 100644 index 0000000000000000000000000000000000000000..cc3415bdd139e56e4db44456ca0d058bcc3db3dd GIT binary patch literal 2566 zcmV+h3i5j`TKw5xv=Ii zbZs6;-e~khxQX1=I);j^ zbEgfrx$DTVDa zCvM3! zTLg|v7Rhd-F~=pg5I0=eqIWkCF{-LnpbZF%T}_wdnxXCso;nQWqI+4%oQTBZ(FtM~ zeIJTQ$*Y@ri1G&}%*N;Q_r}x&YX+0)_?^`^`_MFIIB&M$y)_?-x|GzH%K=cV+dOq$jEd)1`?REvdkGV zyD+D{0ZxrK*3g=NXv4WR9}&-UWNN^~n9i-<<wo8E=cHc$kDot#{!ssa#`8x5plM_2z#0*;SVXefg)0OicpOnh81Nw4z$-zMf4IRl zRt>2-fIQYkh1_5x|#tNMIK^+67@QcL)kq#T44!K7No{ePEm}WMDi%chUCb{uA>CLWzyF? zBOVIrqRkuPs=0M{bJ7GiSfx`7I)}%H#~PZJeS%^U(lFJm6rN1GYqf^c@?pzhZs73G zht6Rv3zupKb}Nf&flMNJv+=d@W6pme;V1Y1$4{RgZ}tB)dN}_-<7x2E)V!%<2!@S^ z0e%7MYJ=5jWo^9pWcBx~_0}xF!}<$cxP*?LU^173WWXb7wF^N17C%b6-F1h8ez-p z{?0R=f56o>-*h$WkKJvQugFeudR1R~jVo$UclDg!C3Q7-lG#p{tK^ZLB1zf&(wgpSwiSTrkEyS*m7cu6CxI`|_;I>Hc z@)&J1+Qd{mO8bzl&^{@2&&xnQBep6;GZ5a;;yNtmpSNLb;qe^U*zK+NSQ{*7htEb|Y7*L}PtV9VEajFQjmOC{gK4{|<7HM2k-VOs&zr4Ua% z5-D#4pkSIS`8ZX)2#_)*5Q#gKMOkCbEd zu-FAX&%CZ@+Q)iKDyKB0nI0;;P8(Z!bHHv?mLV4~855|ha~+wu@PzQY<(=CyliLC> z@YNJd+}6aS#46z2DNWZXx0`El^$|g~%JWF06F$Or%gXBkiD6V!y1KRyU&(*#rg2C! ztiJI$%phvl(2wQ0ueI)$lQ)JTt~flD6^-S(tI|Ks1g(w3Z6(KHp&52_8DB-S_|(TU z-&tFHx<%m$=Cnv)B_QD*9C$zhq~oWh!>wD5s)H>0wuv3SgE*(EaM>ny16Q4`SxiPB z=*U--eOi`IR4b3s?bDXBN@x4Dt}v6U3eCYTws&czZIJYD$&Q2WuFh+R9-6$qkKb^K zn%^n>pYjfR*WXEBqN&Qy7}%H-B^VP+ot|%pEc}Hah5Lmi+%FK}&d`JZEZE?$hZo#8 zpx~Y%1osXaxObR*tp`f}%L0YV(NS8`K=GkWoy4tD5x~qXLY-OLmBiXEC;BY0qUamp zI;Wb?Sp7QxEpBO-bCBZbaG(rdAyl{Xye8gD6U}XhGP`;1&?ijfx}z zYq5tc$J0R;@i>VB2@E7Ehh3^-3QxdQpG)*79+p?-6kmNwxpjj;HA*YKUq{u+ou!=+ z$}Vnsx>q_atD!#QC

a)UlaV-|PzUTA1N^6?rULeN&viCehtnwIs5_piY&wL#Jt@ z{vF6Fee+-yukS?8em9^iLVn*SinF~W^^1A7b8*B1soOBA`X=9Z<17IeJ)Y`W&q`53 zO{h|Ua!pInCYMUeK`Qt(bv2b7%c0D#hU(n#fId@>L)ofW6ba8;F#Ea;)vAWnMRKX~ zHr;?Gdfw8rywfY6O%Y~D2vtMc4$V%|wS&G8QoW>ajGnNWhX#v?5+yCSwh912t?WoK zZySM_l@WGSh^kc*b2*3`Y_kI%--_s(e86q)-c)K@y>hgRTSK+dWwx(49xFt=i%GSZ z=4VMOR{UC>mXNHKp&ejh=xua?m#wW^Q$>w9L!)=RN2EgKdUH|gLR5PJ^cfwmeuI)KrwXnv^5})_72GE`Due44D@VyM>-#Wj z_xEiyNg7}P@j`{VNmYY(?^wO7+h^PkadeiMBLZ);(^Z8ws5eWz+I0<7+KPnlO8yG3 c0IyFUzE3|q56{E%@O | Value +export type ObjType = string | Array | { [key: string]: ObjType | Value } +export type FullValue = + ["str", string] | + ["int", number] | + ["uint", number] | + ["f64", number] | + ["boolean", boolean] | + ["timestamp", Date] | + ["counter", number] | + ["bytes", Uint8Array] | + ["null", null] | + ["map", ObjID] | + ["list", ObjID] | + ["text", ObjID] | + ["table", ObjID] + +export type FullValueWithId = + ["str", string, ObjID ] | + ["int", number, ObjID ] | + ["uint", number, ObjID ] | + ["f64", number, ObjID ] | + ["boolean", boolean, ObjID ] | + ["timestamp", Date, ObjID ] | + ["counter", number, ObjID ] | + ["bytes", Uint8Array, ObjID ] | + ["null", null, ObjID ] | + ["map", ObjID ] | + ["list", ObjID] | + ["text", ObjID] | + ["table", ObjID] + +export enum ObjTypeName { + list = "list", + map = "map", + table = "table", + text = "text", +} + +export type Datatype = + "boolean" | + "str" | + "int" | + "uint" | + "f64" | + "null" | + "timestamp" | + "counter" | + "bytes" | + "map" | + "text" | + "list"; + +export type SyncHave = { + lastSync: Heads, + bloom: Uint8Array, +} + +export type DecodedSyncMessage = { + heads: Heads, + need: Heads, + have: SyncHave[] + changes: Change[] +} + +export type DecodedChange = { + actor: Actor, + seq: number + startOp: number, + time: number, + message: string | null, + deps: Heads, + hash: Hash, + ops: Op[] +} + +export type Op = { + action: string, + obj: ObjID, + key: string, + value?: string | number | boolean, + datatype?: string, + pred: string[], +} + +export type Patch = { + obj: ObjID + action: 'assign' | 'insert' | 'delete' + key: Prop + value: Value + datatype: Datatype + conflict: boolean +} + +export function create(actor?: Actor): Automerge; +export function load(data: Uint8Array, actor?: Actor): Automerge; +export function encodeChange(change: DecodedChange): Change; +export function decodeChange(change: Change): DecodedChange; +export function initSyncState(): SyncState; +export function encodeSyncMessage(message: DecodedSyncMessage): SyncMessage; +export function decodeSyncMessage(msg: SyncMessage): DecodedSyncMessage; +export function encodeSyncState(state: SyncState): Uint8Array; +export function decodeSyncState(data: Uint8Array): SyncState; +export function exportSyncState(state: SyncState): JsSyncState; +export function importSyncState(state: JsSyncState): SyncState; + +export class API { + create(actor?: Actor): Automerge; + load(data: Uint8Array, actor?: Actor): Automerge; + encodeChange(change: DecodedChange): Change; + decodeChange(change: Change): DecodedChange; + initSyncState(): SyncState; + encodeSyncMessage(message: DecodedSyncMessage): SyncMessage; + decodeSyncMessage(msg: SyncMessage): DecodedSyncMessage; + encodeSyncState(state: SyncState): Uint8Array; + decodeSyncState(data: Uint8Array): SyncState; + exportSyncState(state: SyncState): JsSyncState; + importSyncState(state: JsSyncState): SyncState; +} + +export class Automerge { + // change state + put(obj: ObjID, prop: Prop, value: Value, datatype?: Datatype): void; + putObject(obj: ObjID, prop: Prop, value: ObjType): ObjID; + insert(obj: ObjID, index: number, value: Value, datatype?: Datatype): void; + insertObject(obj: ObjID, index: number, value: ObjType): ObjID; + push(obj: ObjID, value: Value, datatype?: Datatype): void; + pushObject(obj: ObjID, value: ObjType): ObjID; + splice(obj: ObjID, start: number, delete_count: number, text?: string | Array): ObjID[] | undefined; + increment(obj: ObjID, prop: Prop, value: number): void; + delete(obj: ObjID, prop: Prop): void; + + // returns a single value - if there is a conflict return the winner + get(obj: ObjID, prop: Prop, heads?: Heads): FullValue | null; + // return all values in case of a conflict + getAll(obj: ObjID, arg: Prop, heads?: Heads): FullValueWithId[]; + keys(obj: ObjID, heads?: Heads): string[]; + text(obj: ObjID, heads?: Heads): string; + length(obj: ObjID, heads?: Heads): number; + materialize(obj?: ObjID, heads?: Heads): MaterializeValue; + + // transactions + commit(message?: string, time?: number): Hash; + merge(other: Automerge): Heads; + getActorId(): Actor; + pendingOps(): number; + rollback(): number; + + // patches + enablePatches(enable: boolean): void; + popPatches(): Patch[]; + + // save and load to local store + save(): Uint8Array; + saveIncremental(): Uint8Array; + loadIncremental(data: Uint8Array): number; + + // sync over network + receiveSyncMessage(state: SyncState, message: SyncMessage): void; + generateSyncMessage(state: SyncState): SyncMessage | null; + + // low level change functions + applyChanges(changes: Change[]): void; + getChanges(have_deps: Heads): Change[]; + getChangeByHash(hash: Hash): Change | null; + getChangesAdded(other: Automerge): Change[]; + getHeads(): Heads; + getLastLocalChange(): Change; + getMissingDeps(heads?: Heads): Heads; + + // memory management + free(): void; + clone(actor?: string): Automerge; + fork(actor?: string): Automerge; + forkAt(heads: Heads, actor?: string): Automerge; + + // dump internal state to console.log + dump(): void; +} + +export class JsSyncState { + sharedHeads: Heads; + lastSentHeads: Heads; + theirHeads: Heads | undefined; + theirHeed: Heads | undefined; + theirHave: SyncHave[] | undefined; + sentHashes: Heads; +} + +export class SyncState { + free(): void; + clone(): SyncState; + lastSentHeads: Heads; + sentHashes: Heads; + readonly sharedHeads: Heads; +} + +export default function init (): Promise; diff --git a/automerge-wasm/types/package.json b/automerge-wasm/types/package.json new file mode 100644 index 00000000..e28e2f1c --- /dev/null +++ b/automerge-wasm/types/package.json @@ -0,0 +1,18 @@ +{ + "collaborators": [ + "Orion Henry " + ], + "name": "automerge-types", + "description": "typescript types for low level automerge api", + "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-wasm", + "repository": "github:automerge/automerge-rs", + "version": "0.1.1", + "license": "MIT", + "files": [ + "LICENSE", + "package.json", + "index.d.ts" + ], + "types": "index.d.ts", + "main": "" +} From 4bed03f008db9d6bbce3f8fce566bcd7bb0b2c0f Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 9 May 2022 11:59:24 -0400 Subject: [PATCH 404/730] Added the `AMsyncMessage` struct. Added the `AMsyncState` struct. Added the `AMfreeSyncState()` function. Added the `AMgenerateSyncMessage()` function. Added the `AMinitSyncState()` function. Added the `AMreceiveSyncMessage()` function. --- automerge-c/src/CMakeLists.txt | 1 + automerge-c/src/lib.rs | 258 ++++++++++++++++++++++++--------- automerge-c/src/result.rs | 13 ++ automerge-c/src/sync.rs | 39 +++++ 4 files changed, 239 insertions(+), 72 deletions(-) create mode 100644 automerge-c/src/sync.rs diff --git a/automerge-c/src/CMakeLists.txt b/automerge-c/src/CMakeLists.txt index 5df9dd74..50c8b3da 100644 --- a/automerge-c/src/CMakeLists.txt +++ b/automerge-c/src/CMakeLists.txt @@ -55,6 +55,7 @@ add_custom_command( changes.rs doc.rs result.rs + sync.rs ${CMAKE_SOURCE_DIR}/build.rs ${CMAKE_SOURCE_DIR}/Cargo.toml ${CMAKE_SOURCE_DIR}/cbindgen.toml diff --git a/automerge-c/src/lib.rs b/automerge-c/src/lib.rs index 4c4dd0c2..520a7411 100644 --- a/automerge-c/src/lib.rs +++ b/automerge-c/src/lib.rs @@ -7,6 +7,7 @@ mod change_hashes; mod changes; mod doc; mod result; +mod sync; use automerge::transaction::{CommitOptions, Transactable}; @@ -15,6 +16,7 @@ use change_hashes::AMchangeHashes; use changes::{AMchange, AMchanges}; use doc::AMdoc; use result::{AMobjId, AMresult, AMvalue}; +use sync::{AMsyncMessage, AMsyncState}; /// \ingroup enumerations /// \enum AMobjType @@ -77,6 +79,26 @@ macro_rules! to_obj_id { }}; } +macro_rules! to_sync_message { + ($handle:expr) => {{ + let handle = $handle.as_ref(); + match handle { + Some(b) => b, + None => return AMresult::err("Invalid AMsyncMessage pointer").into(), + } + }}; +} + +macro_rules! to_sync_state { + ($handle:expr) => {{ + let handle = $handle.as_mut(); + match handle { + Some(b) => b, + None => return AMresult::err("Invalid AMsyncState pointer").into(), + } + }}; +} + fn to_result>(r: R) -> *mut AMresult { (r.into()).into() } @@ -85,8 +107,8 @@ fn to_result>(r: R) -> *mut AMresult { /// \brief Allocates a new `AMdoc` struct and initializes it with defaults. /// /// \return A pointer to an `AMdoc` struct. -/// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMfreeDoc()`. +/// \warning To avoid a memory leak, the returned `AMdoc` struct must be +/// deallocated with `AMfreeDoc()`. #[no_mangle] pub extern "C" fn AMcreate() -> *mut AMdoc { AMdoc::new(am::AutoCommit::new()).into() @@ -102,8 +124,8 @@ pub extern "C" fn AMcreate() -> *mut AMdoc { /// \return A pointer to an `AMresult` struct containing a change hash as an /// `AMbyteSpan` struct. /// \pre \p doc must be a valid address. -/// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMfreeResult()`. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMfreeResult()`. /// \internal /// /// # Safety @@ -132,8 +154,8 @@ pub unsafe extern "C" fn AMcommit( /// \param[in] doc A pointer to an `AMdoc` struct. /// \return A pointer to an `AMdoc` struct. /// \pre \p doc must be a valid address. -/// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMfreeDoc()`. +/// \warning To avoid a memory leak, the returned `AMdoc` struct must be +/// deallocated with `AMfreeDoc()`. /// \internal /// /// # Safety @@ -164,6 +186,45 @@ pub unsafe extern "C" fn AMfreeDoc(doc: *mut AMdoc) { } } +/// \memberof AMdoc +/// \brief Generates a synchronization message for a peer based upon the +/// synchronization state \p sync_state. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] sync_state A pointer to an `AMsyncState` struct. +/// \return A pointer to an `AMresult` struct containing an `AMsyncMessage` +/// struct. +/// \pre \p doc must b e a valid address. +/// \pre \p sync_state must be a valid address. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMfreeResult()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// sync_state must be a pointer to a valid AMsyncState +#[no_mangle] +pub unsafe extern "C" fn AMgenerateSyncMessage( + doc: *mut AMdoc, + sync_state: *mut AMsyncState, +) -> *mut AMresult { + let doc = to_doc!(doc); + let sync_state = to_sync_state!(sync_state); + to_result(doc.generate_sync_message(sync_state.as_mut())) +} + +/// \memberof AMsyncState +/// \brief Allocates a new `AMsyncState` struct and initializes it with +/// defaults. +/// +/// \return A pointer to an `AMsyncState` struct. +/// \warning To avoid a memory leak, the returned `AMsyncState` struct must be +/// deallocated with `AMfreeSyncState()`. +#[no_mangle] +pub extern "C" fn AMinitSyncState() -> *mut AMsyncState { + AMsyncState::new(am::sync::State::new()).into() +} + /// \memberof AMdoc /// \brief Allocates storage for an `AMdoc` struct and initializes it with the /// compact form of an incremental save pointed to by \p src. @@ -173,8 +234,8 @@ pub unsafe extern "C" fn AMfreeDoc(doc: *mut AMdoc) { /// \return A pointer to an `AMdoc` struct. /// \pre \p src must be a valid address. /// \pre `0 <=` \p count `<=` length of \p src. -/// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMfreeDoc()`. +/// \warning To avoid a memory leak, the returned `AMdoc` struct must be +/// deallocated with `AMfreeDoc()`. /// \internal /// /// # Safety @@ -202,8 +263,8 @@ pub unsafe extern "C" fn AMload(src: *const u8, count: usize) -> *mut AMdoc { /// \pre \p doc must be a valid address. /// \pre \p src must be a valid address. /// \pre `0 <=` \p count `<=` length of \p src. -/// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMfreeResult()`. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMfreeResult()`. /// \internal /// /// # Safety @@ -231,8 +292,8 @@ pub unsafe extern "C" fn AMloadIncremental( /// struct. /// \pre \p dest must be a valid address. /// \pre \p src must be a valid address. -/// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMfreeResult()`. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMfreeResult()`. /// \internal /// /// # Safety @@ -244,6 +305,35 @@ pub unsafe extern "C" fn AMmerge(dest: *mut AMdoc, src: *mut AMdoc) -> *mut AMre to_result(dest.merge(to_doc!(src))) } +/// \memberof AMdoc +/// \brief Receives a synchronization message \p sync_message from a peer based +/// upon the synchronization state \p sync_state. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] sync_state A pointer to an `AMsyncState` struct. +/// \param[in] sync_message A pointer to an `AMsyncMessage` struct. +/// \return A pointer to an `AMresult` struct containing a void. +/// \pre \p doc must be a valid address. +/// \pre \p sync_state must be a valid address. +/// \pre \p sync_message must be a valid address. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// sync_state must be a pointer to a valid AMsyncState +/// sync_message must be a pointer to a valid AMsyncMessage +#[no_mangle] +pub unsafe extern "C" fn AMreceiveSyncMessage( + doc: *mut AMdoc, + sync_state: *mut AMsyncState, + sync_message: *const AMsyncMessage, +) -> *mut AMresult { + let doc = to_doc!(doc); + let sync_state = to_sync_state!(sync_state); + let sync_message = to_sync_message!(sync_message); + to_result(doc.receive_sync_message(sync_state.as_mut(), sync_message.as_ref().clone())) +} + /// \memberof AMdoc /// \brief Saves the entirety of \p doc into a compact form. /// @@ -251,8 +341,8 @@ pub unsafe extern "C" fn AMmerge(dest: *mut AMdoc, src: *mut AMdoc) -> *mut AMre /// \return A pointer to an `AMresult` struct containing an array of bytes as /// an `AMbyteSpan` struct. /// \pre \p doc must be a valid address. -/// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMfreeResult()`. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMfreeResult()`. /// \internal /// /// # Safety @@ -269,8 +359,8 @@ pub unsafe extern "C" fn AMsave(doc: *mut AMdoc) -> *mut AMresult { /// \return A pointer to an `AMresult` struct containing an actor ID as an /// `AMbyteSpan` struct. /// \pre \p doc must be a valid address. -/// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMfreeResult()`. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMfreeResult()`. /// \internal /// /// # Safety @@ -287,8 +377,8 @@ pub unsafe extern "C" fn AMgetActor(doc: *mut AMdoc) -> *mut AMresult { /// \param[in] doc A pointer to an `AMdoc` struct. /// \return A pointer to an `AMresult` struct containing a `char const*`. /// \pre \p doc must be a valid address. -/// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMfreeResult()`. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMfreeResult()`. /// \internal /// /// # Safety @@ -311,8 +401,8 @@ pub unsafe extern "C" fn AMgetActorHex(doc: *mut AMdoc) -> *mut AMresult { /// \pre \p doc must be a valid address. /// \pre \p value must be a valid address. /// \pre `0 <=` \p count `<=` length of \p value. -/// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMfreeResult()`. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMfreeResult()`. /// \internal /// /// # Safety @@ -338,8 +428,8 @@ pub unsafe extern "C" fn AMsetActor( /// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc must be a valid address. /// \pre \p hex_str must be a valid address. -/// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMfreeResult()`. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMfreeResult()`. /// \internal /// /// # Safety @@ -396,6 +486,7 @@ pub unsafe extern "C" fn AMresultSize(result: *mut AMresult) -> usize { AMresult::Changes(changes) => changes.len(), AMresult::Error(_) | AMresult::Void => 0, AMresult::Scalars(vec, _) => vec.len(), + AMresult::SyncMessage(_) => 1, } } else { 0 @@ -477,6 +568,11 @@ pub unsafe extern "C" fn AMresultValue<'a>(result: *mut AMresult, index: usize) } } } + AMresult::SyncMessage(sync_message) => { + if index == 0 { + value = AMvalue::SyncMessage(sync_message); + } + } AMresult::Void => (), } }; @@ -492,8 +588,8 @@ pub unsafe extern "C" fn AMresultValue<'a>(result: *mut AMresult, index: usize) /// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc must be a valid address. /// \pre \p key must be a valid address. -/// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMfreeResult()`. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMfreeResult()`. /// \internal /// /// # Safety @@ -520,8 +616,8 @@ pub unsafe extern "C" fn AMmapDelete( /// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc must be a valid address. /// \pre \p key must be a valid address. -/// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMfreeResult()`. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMfreeResult()`. /// \internal /// /// # Safety @@ -549,8 +645,8 @@ pub unsafe extern "C" fn AMmapPutBool( /// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc must be a valid address. /// \pre \p key must be a valid address. -/// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMfreeResult()`. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMfreeResult()`. /// \internal /// /// # Safety @@ -578,8 +674,8 @@ pub unsafe extern "C" fn AMmapPutInt( /// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc must be a valid address. /// \pre \p key must be a valid address. -/// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMfreeResult()`. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMfreeResult()`. /// \internal /// /// # Safety @@ -608,8 +704,8 @@ pub unsafe extern "C" fn AMmapPutUint( /// \pre \p doc must be a valid address. /// \pre \p key must be a valid address. /// \pre \p value must be a valid address. -/// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMfreeResult()`. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMfreeResult()`. /// \internal /// /// # Safety @@ -641,8 +737,8 @@ pub unsafe extern "C" fn AMmapPutStr( /// \pre \p key must be a valid address. /// \pre \p value must be a valid address. /// \pre `0 <=` \p count `<=` length of \p value. -/// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMfreeResult()`. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMfreeResult()`. /// \internal /// /// # Safety @@ -674,8 +770,8 @@ pub unsafe extern "C" fn AMmapPutBytes( /// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc must be a valid address. /// \pre \p key must be a valid address. -/// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMfreeResult()`. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMfreeResult()`. /// \internal /// /// # Safety @@ -703,8 +799,8 @@ pub unsafe extern "C" fn AMmapPutF64( /// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc must be a valid address. /// \pre \p key must be a valid address. -/// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMfreeResult()`. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMfreeResult()`. /// \internal /// /// # Safety @@ -736,8 +832,8 @@ pub unsafe extern "C" fn AMmapPutCounter( /// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc must be a valid address. /// \pre \p key must be a valid address. -/// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMfreeResult()`. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMfreeResult()`. /// \internal /// /// # Safety @@ -768,8 +864,8 @@ pub unsafe extern "C" fn AMmapPutTimestamp( /// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc must be a valid address. /// \pre \p key must be a valid address. -/// \warning To avoid a memory leak, the returned p ointer must be deallocated -/// with `AMfreeResult()`. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMfreeResult()`. /// \internal /// /// # Safety @@ -796,8 +892,8 @@ pub unsafe extern "C" fn AMmapPutNull( /// \return A pointer to an `AMresult` struct containing a pointer to an `AMobjId` struct. /// \pre \p doc must be a valid address. /// \pre \p key must be a valid address. -/// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMfreeResult()`. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMfreeResult()`. /// \internal /// /// # Safety @@ -824,8 +920,8 @@ pub unsafe extern "C" fn AMmapPutObject( /// \return A pointer to an `AMresult` struct. /// \pre \p doc must be a valid address. /// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. -/// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMfreeResult()`. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMfreeResult()`. /// \internal /// /// # Safety @@ -850,8 +946,8 @@ pub unsafe extern "C" fn AMlistGet( /// \return A pointer to an `AMresult` struct. /// \pre \p doc must be a valid address. /// \pre \p key must be a valid address. -/// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMfreeResult()`. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMfreeResult()`. /// \internal /// /// # Safety @@ -877,8 +973,8 @@ pub unsafe extern "C" fn AMmapGet( /// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc must be a valid address. /// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. -/// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMfreeResult()`. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMfreeResult()`. /// \internal /// /// # Safety @@ -905,8 +1001,8 @@ pub unsafe extern "C" fn AMlistDelete( /// \param[in] value A boolean. /// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc must be a valid address. -/// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMfreeResult()`. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMfreeResult()`. /// \internal /// /// # Safety @@ -945,8 +1041,8 @@ pub unsafe extern "C" fn AMlistPutBool( /// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. /// \pre \p value must be a valid address. /// \pre `0 <=` \p count `<=` length of \p value. -/// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMfreeResult()`. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMfreeResult()`. /// \internal /// /// # Safety @@ -985,8 +1081,8 @@ pub unsafe extern "C" fn AMlistPutBytes( /// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc must be a valid address. /// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. -/// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMfreeResult()`. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMfreeResult()`. /// \internal /// /// # Safety @@ -1022,8 +1118,8 @@ pub unsafe extern "C" fn AMlistPutCounter( /// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc must be a valid address. /// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. -/// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMfreeResult()`. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMfreeResult()`. /// \internal /// /// # Safety @@ -1058,8 +1154,8 @@ pub unsafe extern "C" fn AMlistPutF64( /// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc must be a valid address. /// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. -/// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMfreeResult()`. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMfreeResult()`. /// \internal /// /// # Safety @@ -1093,8 +1189,8 @@ pub unsafe extern "C" fn AMlistPutInt( /// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc must be a valid address. /// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. -/// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMfreeResult()`. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMfreeResult()`. /// \internal /// /// # Safety @@ -1129,8 +1225,8 @@ pub unsafe extern "C" fn AMlistPutNull( /// \return A pointer to an `AMresult` struct containing a pointer to an `AMobjId` struct. /// \pre \p doc must be a valid address. /// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. -/// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMfreeResult()`. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMfreeResult()`. /// \internal /// /// # Safety @@ -1167,8 +1263,8 @@ pub unsafe extern "C" fn AMlistPutObject( /// \pre \p doc must be a valid address. /// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. /// \pre \p value must be a valid address. -/// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMfreeResult()`. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMfreeResult()`. /// \internal /// /// # Safety @@ -1205,8 +1301,8 @@ pub unsafe extern "C" fn AMlistPutStr( /// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc must be a valid address. /// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. -/// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMfreeResult()`. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMfreeResult()`. /// \internal /// /// # Safety @@ -1242,8 +1338,8 @@ pub unsafe extern "C" fn AMlistPutTimestamp( /// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc must be a valid address. /// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. -/// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMfreeResult()`. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMfreeResult()`. /// \internal /// /// # Safety @@ -1283,6 +1379,24 @@ pub unsafe extern "C" fn AMfreeResult(result: *mut AMresult) { } } +/// \memberof AMsyncState +/// \brief Deallocates the storage for an `AMsyncState` struct previously +/// allocated by `AMinitSyncState()`. +/// +/// \param[in] sync_state A pointer to an `AMsyncState` struct. +/// \pre \p sync_state must be a valid address. +/// \internal +/// +/// # Safety +/// sync_state must be a pointer to a valid AMsyncState +#[no_mangle] +pub unsafe extern "C" fn AMfreeSyncState(sync_state: *mut AMsyncState) { + if !sync_state.is_null() { + let sync_state: AMsyncState = *Box::from_raw(sync_state); + drop(sync_state) + } +} + /// \memberof AMresult /// \brief Gets an `AMresult` struct's error message string. /// @@ -1378,8 +1492,8 @@ pub unsafe extern "C" fn AMgetChangeHash(change: *const AMchange) -> AMbyteSpan /// \param[in] have_deps A pointer to an `AMchangeHashes` struct or `NULL`. /// \return A pointer to an `AMresult` struct containing an `AMchanges` struct. /// \pre \p doc must be a valid address. -/// \warning To avoid a memory leak, the returned pointer must be deallocated -/// with `AMfreeResult()`. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMfreeResult()`. /// \internal /// /// # Safety diff --git a/automerge-c/src/result.rs b/automerge-c/src/result.rs index 0c8845b1..8948b577 100644 --- a/automerge-c/src/result.rs +++ b/automerge-c/src/result.rs @@ -4,6 +4,7 @@ use std::ops::Deref; use crate::AMbyteSpan; use crate::AMchangeHashes; +use crate::AMsyncMessage; use crate::{AMchange, AMchanges}; /// \struct AMobjId @@ -110,6 +111,8 @@ pub enum AMvalue<'a> { */ /// A 64-bit unsigned integer variant. Uint(u64), + /// A synchronization message variant. + SyncMessage(&'a AMsyncMessage), /// A void variant. Void, } @@ -123,6 +126,7 @@ pub enum AMresult { Error(CString), ObjId(AMobjId), Scalars(Vec>, Option), + SyncMessage(AMsyncMessage), Void, } @@ -138,6 +142,15 @@ impl From for AMresult { } } +impl From> for AMresult { + fn from(maybe: Option) -> Self { + match maybe { + Some(message) => AMresult::SyncMessage(AMsyncMessage::new(message)), + None => AMresult::Void, + } + } +} + impl From> for AMresult { fn from(maybe: Result<(), am::AutomergeError>) -> Self { match maybe { diff --git a/automerge-c/src/sync.rs b/automerge-c/src/sync.rs new file mode 100644 index 00000000..ad15ce51 --- /dev/null +++ b/automerge-c/src/sync.rs @@ -0,0 +1,39 @@ +use automerge as am; + +/// \struct AMsyncMessage +/// \brief A synchronization message for a peer. +pub struct AMsyncMessage(am::sync::Message); + +impl AMsyncMessage { + pub fn new(message: am::sync::Message) -> Self { + Self(message) + } +} + +impl AsRef for AMsyncMessage { + fn as_ref(&self) -> &am::sync::Message { + &self.0 + } +} + +/// \struct AMsyncState +/// \brief The state of synchronization with a peer. +pub struct AMsyncState(am::sync::State); + +impl AMsyncState { + pub fn new(state: am::sync::State) -> Self { + Self(state) + } +} + +impl AsMut for AMsyncState { + fn as_mut(&mut self) -> &mut am::sync::State { + &mut self.0 + } +} + +impl From for *mut AMsyncState { + fn from(b: AMsyncState) -> Self { + Box::into_raw(Box::new(b)) + } +} From 5765fea77109d04c82e341f6e4fd0d1e7abb194c Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 30 May 2022 22:06:22 -0700 Subject: [PATCH 405/730] Renamed `AMfreeResult()` to `AMresultFree()`. Remove the `&AMchange` conversion for `AMbyteSpan`. Add a `&automerge::ActorId` conversion to for `AMbyteSpan`. Remove the `&Vec` conversion for `AMbyteSpan`. Add a `&[u8]` conversion for `AMbyteSpan`. --- automerge-c/src/byte_span.rs | 27 ++++++++++++++------------- 1 file changed, 14 insertions(+), 13 deletions(-) diff --git a/automerge-c/src/byte_span.rs b/automerge-c/src/byte_span.rs index 68bfea00..d3543604 100644 --- a/automerge-c/src/byte_span.rs +++ b/automerge-c/src/byte_span.rs @@ -1,16 +1,14 @@ use automerge as am; -use crate::AMchange; - /// \struct AMbyteSpan /// \brief A contiguous sequence of bytes. /// #[repr(C)] pub struct AMbyteSpan { /// A pointer to an array of bytes. - /// \warning \p src is only valid until the `AMfreeResult()` function is called - /// on the `AMresult` struct hosting the array of bytes to which - /// it points. + /// \warning \p src is only valid until the `AMresultFree()` function is + /// called on the `AMresult` struct hosting the array of bytes to + /// which it points. src: *const u8, /// The number of bytes in the array. count: usize, @@ -25,10 +23,13 @@ impl Default for AMbyteSpan { } } -impl From<&AMchange> for AMbyteSpan { - fn from(change: &AMchange) -> Self { - let change_hash = &(change.as_ref()).hash; - change_hash.into() +impl From<&am::ActorId> for AMbyteSpan { + fn from(actor: &am::ActorId) -> Self { + let slice = actor.to_bytes(); + Self { + src: slice.as_ptr(), + count: slice.len(), + } } } @@ -51,11 +52,11 @@ impl From<&am::ChangeHash> for AMbyteSpan { } } -impl From<&Vec> for AMbyteSpan { - fn from(v: &Vec) -> Self { +impl From<&[u8]> for AMbyteSpan { + fn from(slice: &[u8]) -> Self { Self { - src: (*v).as_ptr(), - count: (*v).len(), + src: slice.as_ptr(), + count: slice.len(), } } } From a31a65033ffa99b33fef551f49482d4019286421 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 30 May 2022 22:08:27 -0700 Subject: [PATCH 406/730] Renamed `AMfreeResult()` to `AMresultFree()`. Renamed `AMfreeDoc()` to `AMfree()`. Renamed `AMnextChange()` to `AMchangesNext()`. Renamed `AMgetMessage()` to `AMchangeMessage()`. --- automerge-c/examples/quickstart.c | 58 +++++++++++++++---------------- 1 file changed, 29 insertions(+), 29 deletions(-) diff --git a/automerge-c/examples/quickstart.c b/automerge-c/examples/quickstart.c index 43915c7a..66edd378 100644 --- a/automerge-c/examples/quickstart.c +++ b/automerge-c/examples/quickstart.c @@ -22,80 +22,80 @@ int main(int argc, char** argv) { AMobjId const* const card1 = value.obj_id; AMresult* result = AMmapPutStr(doc1, card1, "title", "Rewrite everything in Clojure"); test(result, AM_VALUE_VOID); - AMfreeResult(result); + AMresultFree(result); result = AMmapPutBool(doc1, card1, "done", false); test(result, AM_VALUE_VOID); - AMfreeResult(result); + AMresultFree(result); AMresult* const card2_result = AMlistPutObject(doc1, cards, 0, true, AM_OBJ_TYPE_MAP); value = test(card2_result, AM_VALUE_OBJ_ID); AMobjId const* const card2 = value.obj_id; result = AMmapPutStr(doc1, card2, "title", "Rewrite everything in Haskell"); test(result, AM_VALUE_VOID); - AMfreeResult(result); + AMresultFree(result); result = AMmapPutBool(doc1, card2, "done", false); test(result, AM_VALUE_VOID); - AMfreeResult(result); - AMfreeResult(card2_result); + AMresultFree(result); + AMresultFree(card2_result); result = AMcommit(doc1, "Add card", NULL); test(result, AM_VALUE_CHANGE_HASHES); - AMfreeResult(result); + AMresultFree(result); AMdoc* doc2 = AMcreate(); if (doc2 == NULL) { fprintf(stderr, "`AMcreate()` failure."); - AMfreeResult(card1_result); - AMfreeResult(cards_result); - AMfreeDoc(doc1); + AMresultFree(card1_result); + AMresultFree(cards_result); + AMfree(doc1); exit(EXIT_FAILURE); } result = AMmerge(doc2, doc1); test(result, AM_VALUE_CHANGE_HASHES); - AMfreeResult(result); - AMfreeDoc(doc2); + AMresultFree(result); + AMfree(doc2); AMresult* const save_result = AMsave(doc1); value = test(save_result, AM_VALUE_BYTES); AMbyteSpan binary = value.bytes; doc2 = AMload(binary.src, binary.count); - AMfreeResult(save_result); + AMresultFree(save_result); if (doc2 == NULL) { fprintf(stderr, "`AMload()` failure."); - AMfreeResult(card1_result); - AMfreeResult(cards_result); - AMfreeDoc(doc1); + AMresultFree(card1_result); + AMresultFree(cards_result); + AMfree(doc1); exit(EXIT_FAILURE); } result = AMmapPutBool(doc1, card1, "done", true); test(result, AM_VALUE_VOID); - AMfreeResult(result); + AMresultFree(result); result = AMcommit(doc1, "Mark card as done", NULL); test(result, AM_VALUE_CHANGE_HASHES); - AMfreeResult(result); - AMfreeResult(card1_result); + AMresultFree(result); + AMresultFree(card1_result); result = AMlistDelete(doc2, cards, 0); test(result, AM_VALUE_VOID); - AMfreeResult(result); + AMresultFree(result); result = AMcommit(doc2, "Delete card", NULL); test(result, AM_VALUE_CHANGE_HASHES); - AMfreeResult(result); + AMresultFree(result); result = AMmerge(doc1, doc2); test(result, AM_VALUE_CHANGE_HASHES); - AMfreeResult(result); - AMfreeDoc(doc2); + AMresultFree(result); + AMfree(doc2); result = AMgetChanges(doc1, NULL); value = test(result, AM_VALUE_CHANGES); AMchange const* change = NULL; - while (value.changes.ptr && (change = AMnextChange(&value.changes, 1))) { + while (value.changes.ptr && (change = AMchangesNext(&value.changes, 1))) { size_t const size = AMobjSizeAt(doc1, cards, change); - printf("%s %ld\n", AMgetMessage(change), size); + printf("%s %ld\n", AMchangeMessage(change), size); } - AMfreeResult(result); - AMfreeResult(cards_result); - AMfreeDoc(doc1); + AMresultFree(result); + AMresultFree(cards_result); + AMfree(doc1); } /** @@ -123,7 +123,7 @@ AMvalue test(AMresult* result, AMvalueVariant const value_tag) { default: sprintf(prelude, "Unknown `AMstatus` tag %d", status); } fprintf(stderr, "%s; %s.", prelude, AMerrorMessage(result)); - AMfreeResult(result); + AMresultFree(result); exit(EXIT_FAILURE); } AMvalue const value = AMresultValue(result, 0); @@ -147,7 +147,7 @@ AMvalue test(AMresult* result, AMvalueVariant const value_tag) { default: label = ""; } fprintf(stderr, "Unexpected `AMvalueVariant` tag `%s` (%d).", label, value.tag); - AMfreeResult(result); + AMresultFree(result); exit(EXIT_FAILURE); } return value; From fb0ea2c7a4789fe654b904f9bbd388acebaf7d47 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 30 May 2022 22:12:03 -0700 Subject: [PATCH 407/730] Renamed `AMadvanceChangeHashes()` to `AMchangeHashesAdvance()`. Added `AMchangeHashesCmp()`. Renamed `AMnextChangeHash()` to `AMchangeHashesNext()`. --- automerge-c/src/change_hashes.rs | 179 ++++++++++++++++++++----------- 1 file changed, 114 insertions(+), 65 deletions(-) diff --git a/automerge-c/src/change_hashes.rs b/automerge-c/src/change_hashes.rs index ad29b490..926c37eb 100644 --- a/automerge-c/src/change_hashes.rs +++ b/automerge-c/src/change_hashes.rs @@ -1,23 +1,22 @@ use automerge as am; +use std::cmp::Ordering; use std::ffi::c_void; -use crate::AMbyteSpan; +use crate::byte_span::AMbyteSpan; /// \struct AMchangeHashes -/// \brief A bidirectional iterator over a sequence of `AMbyteSpan` structs. +/// \brief A bidirectional iterator over a sequence of change hashes. #[repr(C)] pub struct AMchangeHashes { + /// The length of the sequence. len: usize, + /// The offset from \p ptr, \p +offset -> forward direction, + /// \p -offset -> reverse direction. offset: isize, + /// A pointer to the first change hash or `NULL`. ptr: *const c_void, } -impl AsRef<[am::ChangeHash]> for AMchangeHashes { - fn as_ref(&self) -> &[am::ChangeHash] { - unsafe { std::slice::from_raw_parts(self.ptr as *const am::ChangeHash, self.len) } - } -} - impl AMchangeHashes { pub fn new(change_hashes: &[am::ChangeHash]) -> Self { Self { @@ -65,25 +64,130 @@ impl AMchangeHashes { } } +impl AsRef<[am::ChangeHash]> for AMchangeHashes { + fn as_ref(&self) -> &[am::ChangeHash] { + unsafe { std::slice::from_raw_parts(self.ptr as *const am::ChangeHash, self.len) } + } +} + +impl Default for AMchangeHashes { + fn default() -> Self { + Self { + len: 0, + offset: 0, + ptr: std::ptr::null(), + } + } +} + /// \memberof AMchangeHashes /// \brief Advances/rewinds an `AMchangeHashes` struct by at most \p |n| /// positions. /// /// \param[in] change_hashes A pointer to an `AMchangeHashes` struct. /// \param[in] n The direction (\p -n -> backward, \p +n -> forward) and maximum -/// number of positions to advance/rewind. +/// number of positions to advance/rewind. /// \pre \p change_hashes must be a valid address. /// \internal /// /// #Safety /// change_hashes must be a pointer to a valid AMchangeHashes #[no_mangle] -pub unsafe extern "C" fn AMadvanceChangeHashes(change_hashes: *mut AMchangeHashes, n: isize) { +pub unsafe extern "C" fn AMchangeHashesAdvance(change_hashes: *mut AMchangeHashes, n: isize) { if let Some(change_hashes) = change_hashes.as_mut() { change_hashes.advance(n); }; } +/// \memberof AMchangeHashes +/// \brief Compares two change hash sequences. +/// +/// \param[in] change_hashes1 A pointer to an `AMchangeHashes` struct. +/// \param[in] change_hashes2 A pointer to an `AMchangeHashes` struct. +/// \return `-1` if \p change_hashes1 `<` \p change_hashes2, `0` if +/// \p change_hashes1 `==` \p change_hashes2 and `1` if +/// \p change_hashes1 `>` \p change_hashes2. +/// \pre \p change_hashes1 must be a valid address. +/// \pre \p change_hashes2 must be a valid address. +/// \internal +/// +/// #Safety +/// change_hashes1 must be a pointer to a valid AMchangeHashes +/// change_hashes2 must be a pointer to a valid AMchangeHashes +#[no_mangle] +pub unsafe extern "C" fn AMchangeHashesCmp( + change_hashes1: *const AMchangeHashes, + change_hashes2: *const AMchangeHashes, +) -> isize { + match (change_hashes1.as_ref(), change_hashes2.as_ref()) { + (Some(change_hashes1), Some(change_hashes2)) => { + match change_hashes1.as_ref().cmp(change_hashes2.as_ref()) { + Ordering::Less => -1, + Ordering::Equal => 0, + Ordering::Greater => 1, + } + } + (None, Some(_)) => -1, + (Some(_), None) => 1, + (None, None) => 0, + } +} + +/// \memberof AMchangeHashes +/// \brief Gets the `AMbyteSpan` struct at the current position of an +/// `AMchangeHashes`struct and then advances/rewinds it by at most \p |n| +/// positions. +/// +/// \param[in] change_hashes A pointer to an `AMchangeHashes` struct. +/// \param[in] n The direction (\p -n -> backward, \p +n -> forward) and maximum +/// number of positions to advance/rewind. +/// \return An `AMbyteSpan` struct that's `NULL` when \p change_hashes was +/// previously advanced/rewound past its forward/backward limit. +/// \pre \p change_hashes must be a valid address. +/// \internal +/// +/// #Safety +/// change_hashes must be a pointer to a valid AMchangeHashes +#[no_mangle] +pub unsafe extern "C" fn AMchangeHashesNext( + change_hashes: *mut AMchangeHashes, + n: isize, +) -> AMbyteSpan { + if let Some(change_hashes) = change_hashes.as_mut() { + if let Some(change_hash) = change_hashes.next(n) { + return change_hash.into(); + } + } + AMbyteSpan::default() +} + +/// \memberof AMchangeHashes +/// \brief Advances/rewinds an `AMchangeHashes` struct by at most \p |n| +/// positions and then gets the `AMbyteSpan` struct at its current position. +/// +/// \param[in] change_hashes A pointer to an `AMchangeHashes` struct. +/// \param[in] n The direction (\p -n -> backward, \p +n -> forward) and maximum +/// number of positions to advance/rewind. +/// \return An `AMbyteSpan` struct that's `NULL` when \p change_hashes is +/// presently advanced/rewound past its forward/backward limit. +/// \pre \p change_hashes must be a valid address. +/// \internal +/// +/// #Safety +/// change_hashes must be a pointer to a valid AMchangeHashes +#[no_mangle] +pub unsafe extern "C" fn AMchangeHashesPrev( + change_hashes: *mut AMchangeHashes, + n: isize, +) -> AMbyteSpan { + if let Some(change_hashes) = change_hashes.as_mut() { + if let Some(change_hash) = change_hashes.prev(n) { + return change_hash.into(); + } + } + AMbyteSpan::default() +} + /// \memberof AMchangeHashes /// \brief Gets the size of an `AMchangeHashes` struct. /// @@ -102,58 +206,3 @@ pub unsafe extern "C" fn AMchangeHashesSize(change_hashes: *const AMchangeHashes 0 } } - -/// \memberof AMchangeHashes -/// \brief Gets the `AMbyteSpan` struct at the current position of an -/// `AMchangeHashes`struct and then advances/rewinds it by at most \p |n| -/// positions. -/// -/// \param[in] change_hashes A pointer to an `AMchangeHashes` struct. -/// \param[in] n The direction (\p -n -> backward, \p +n -> forward) and maximum -/// number of positions to advance/rewind. -/// \return An `AMbyteSpan` struct that's invalid when \p change_hashes was -/// previously advanced/rewound past its forward/backward limit. -/// \pre \p change_hashes must be a valid address. -/// \internal -/// -/// #Safety -/// change_hashes must be a pointer to a valid AMchangeHashes -#[no_mangle] -pub unsafe extern "C" fn AMnextChangeHash( - change_hashes: *mut AMchangeHashes, - n: isize, -) -> AMbyteSpan { - if let Some(change_hashes) = change_hashes.as_mut() { - if let Some(change_hash) = change_hashes.next(n) { - return change_hash.into(); - } - } - AMbyteSpan::default() -} - -/// \memberof AMchangeHashes -/// \brief Advances/rewinds an `AMchangeHashes` struct by at most \p |n| -/// positions and then gets the `AMbyteSpan` struct at its current position. -/// -/// \param[in] change_hashes A pointer to an `AMchangeHashes` struct. -/// \param[in] n The direction (\p -n -> backward, \p +n -> forward) and maximum -/// number of positions to advance/rewind. -/// \return An `AMbyteSpan` struct that's invalid when \p change_hashes is -/// presently advanced/rewound past its forward/backward limit. -/// \pre \p change_hashes must be a valid address. -/// \internal -/// -/// #Safety -/// change_hashes must be a pointer to a valid AMchangeHashes -#[no_mangle] -pub unsafe extern "C" fn AMprevChangeHash( - change_hashes: *mut AMchangeHashes, - n: isize, -) -> AMbyteSpan { - if let Some(change_hashes) = change_hashes.as_mut() { - if let Some(change_hash) = change_hashes.prev(n) { - return change_hash.into(); - } - } - AMbyteSpan::default() -} From 3ad979a1782eb9f74cc87f75a6bd56cbe4734321 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 30 May 2022 22:19:54 -0700 Subject: [PATCH 408/730] Added `AMchangeActorId()`. Added `AMchangeCompress()`. Added `AMchangeDeps()`. Added `AMchangeExtraBytes()`. Added `AMchangeFromBytes()`. Added `AMchangeHash()`. Added `AMchangeIsEmpty()`. Added `AMchangeMaxOp()`. Added `AMchangeMessage()`. Added `AMchangeSeq()`. Added `AMchangeSize()`. Added `AMchangeStartOp()`. Added `AMchangeTime()`. Added `AMchangeRawBytes()`. Added `AMchangeLoadDocument()`. --- automerge-c/src/change.rs | 331 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 331 insertions(+) create mode 100644 automerge-c/src/change.rs diff --git a/automerge-c/src/change.rs b/automerge-c/src/change.rs new file mode 100644 index 00000000..b7b0e03d --- /dev/null +++ b/automerge-c/src/change.rs @@ -0,0 +1,331 @@ +use automerge as am; +use std::ffi::CString; +use std::os::raw::c_char; + +use crate::byte_span::AMbyteSpan; +use crate::change_hashes::AMchangeHashes; +use crate::result::{to_result, AMresult}; + +/// \struct AMchange +/// \brief A group of operations performed by an actor. +pub struct AMchange { + body: *mut am::Change, + c_message: Option, +} + +impl AMchange { + pub fn new(change: &mut am::Change) -> Self { + let c_message = match change.message() { + Some(c_message) => CString::new(c_message).ok(), + None => None, + }; + Self { + body: change, + c_message, + } + } + + pub fn c_message(&self) -> Option<&CString> { + self.c_message.as_ref() + } +} + +impl AsMut for AMchange { + fn as_mut(&mut self) -> &mut am::Change { + unsafe { &mut *self.body } + } +} + +impl AsRef for AMchange { + fn as_ref(&self) -> &am::Change { + unsafe { &*self.body } + } +} + +/// \memberof AMchange +/// \brief Gets the first referenced actor ID in a change. +/// +/// \param[in] change A pointer to an `AMchange` struct. +/// \return An actor ID as an `AMbyteSpan` struct. +/// \pre \p change must be a valid address. +/// \internal +/// +/// # Safety +/// change must be a pointer to a valid AMchange +#[no_mangle] +pub unsafe extern "C" fn AMchangeActorId(change: *const AMchange) -> AMbyteSpan { + match change.as_ref() { + Some(change) => change.as_ref().actor_id().into(), + None => AMbyteSpan::default(), + } +} + +/// \memberof AMchange +/// \brief Compresses the raw bytes within an `AMchange` struct. +/// +/// \param[in] change A pointer to an `AMchange` struct. +/// \pre \p change must be a valid address. +/// \internal +/// +/// # Safety +/// change must be a pointer to a valid AMchange +#[no_mangle] +pub unsafe extern "C" fn AMchangeCompress(change: *mut AMchange) { + if let Some(change) = change.as_mut() { + change.as_mut().compress(); + }; +} + +/// \memberof AMchange +/// \brief Gets the dependencies of a change. +/// +/// \param[in] change A pointer to an `AMchange` struct. +/// \return A pointer to an `AMchangeHashes` struct or `NULL`. +/// \pre \p change must be a valid address. +/// \internal +/// +/// # Safety +/// change must be a pointer to a valid AMchange +#[no_mangle] +pub unsafe extern "C" fn AMchangeDeps(change: *const AMchange) -> AMchangeHashes { + match change.as_ref() { + Some(change) => AMchangeHashes::new(&change.as_ref().deps), + None => AMchangeHashes::default(), + } +} + +/// \memberof AMchange +/// \brief Gets the extra bytes of a change. +/// +/// \param[in] change A pointer to an `AMchange` struct. +/// \return An `AMbyteSpan` struct. +/// \pre \p change must be a valid address. +/// \internal +/// +/// # Safety +/// change must be a pointer to a valid AMchange +#[no_mangle] +pub unsafe extern "C" fn AMchangeExtraBytes(change: *const AMchange) -> AMbyteSpan { + if let Some(change) = change.as_ref() { + change.as_ref().extra_bytes().into() + } else { + AMbyteSpan::default() + } +} + +/// \memberof AMchange +/// \brief Loads a change as bytes into an `AMchange` struct. +/// +/// \param[in] src A pointer to an array of bytes. +/// \param[in] count The number of bytes in \p src to load. +/// \return A pointer to an `AMresult` struct containing an `AMchange` struct. +/// \pre \p src must be a valid address. +/// \pre `0 <=` \p count `<=` length of \p src. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMresultFree()`. +/// \internal +/// +/// # Safety +/// src must be a byte array of length `>= count` +#[no_mangle] +pub unsafe extern "C" fn AMchangeFromBytes(src: *const u8, count: usize) -> *mut AMresult { + let mut data = Vec::new(); + data.extend_from_slice(std::slice::from_raw_parts(src, count)); + to_result(am::Change::from_bytes(data)) +} + +/// \memberof AMchange +/// \brief Gets the hash of a change. +/// +/// \param[in] change A pointer to an `AMchange` struct. +/// \return A change hash as an `AMbyteSpan` struct. +/// \pre \p change must be a valid address. +/// \internal +/// +/// # Safety +/// change must be a pointer to a valid AMchange +#[no_mangle] +pub unsafe extern "C" fn AMchangeHash(change: *const AMchange) -> AMbyteSpan { + match change.as_ref() { + Some(change) => { + let hash: &am::ChangeHash = &change.as_ref().hash; + hash.into() + } + None => AMbyteSpan::default(), + } +} + +/// \memberof AMchange +/// \brief Gets the emptiness of a change. +/// +/// \param[in] change A pointer to an `AMchange` struct. +/// \return A boolean. +/// \pre \p change must be a valid address. +/// \internal +/// +/// # Safety +/// change must be a pointer to a valid AMchange +#[no_mangle] +pub unsafe extern "C" fn AMchangeIsEmpty(change: *const AMchange) -> bool { + if let Some(change) = change.as_ref() { + change.as_ref().is_empty() + } else { + true + } +} + +/// \memberof AMchange +/// \brief Gets the maximum operation index of a change. +/// +/// \param[in] change A pointer to an `AMchange` struct. +/// \return A 64-bit unsigned integer. +/// \pre \p change must be a valid address. +/// \internal +/// +/// # Safety +/// change must be a pointer to a valid AMchange +#[no_mangle] +pub unsafe extern "C" fn AMchangeMaxOp(change: *const AMchange) -> u64 { + if let Some(change) = change.as_ref() { + change.as_ref().max_op() + } else { + u64::MAX + } +} + +/// \memberof AMchange +/// \brief Gets the message of a change. +/// +/// \param[in] change A pointer to an `AMchange` struct. +/// \return A UTF-8 string or `NULL`. +/// \pre \p change must be a valid address. +/// \internal +/// +/// # Safety +/// change must be a pointer to a valid AMchange +#[no_mangle] +pub unsafe extern "C" fn AMchangeMessage(change: *const AMchange) -> *const c_char { + if let Some(change) = change.as_ref() { + if let Some(c_message) = change.c_message() { + return c_message.as_ptr(); + } + } + std::ptr::null::() +} + +/// \memberof AMchange +/// \brief Gets the index of a change in the changes from an actor. +/// +/// \param[in] change A pointer to an `AMchange` struct. +/// \return A 64-bit unsigned integer. +/// \pre \p change must be a valid address. +/// \internal +/// +/// # Safety +/// change must be a pointer to a valid AMchange +#[no_mangle] +pub unsafe extern "C" fn AMchangeSeq(change: *const AMchange) -> u64 { + if let Some(change) = change.as_ref() { + change.as_ref().seq + } else { + u64::MAX + } +} + +/// \memberof AMchange +/// \brief Gets the size of a change. +/// +/// \param[in] change A pointer to an `AMchange` struct. +/// \return A 64-bit unsigned integer. +/// \pre \p change must be a valid address. +/// \internal +/// +/// # Safety +/// change must be a pointer to a valid AMchange +#[no_mangle] +pub unsafe extern "C" fn AMchangeSize(change: *const AMchange) -> usize { + if let Some(change) = change.as_ref() { + change.as_ref().len() + } else { + 0 + } +} + +/// \memberof AMchange +/// \brief Gets the start operation index of a change. +/// +/// \param[in] change A pointer to an `AMchange` struct. +/// \return A 64-bit unsigned integer. +/// \pre \p change must be a valid address. +/// \internal +/// +/// # Safety +/// change must be a pointer to a valid AMchange +#[no_mangle] +pub unsafe extern "C" fn AMchangeStartOp(change: *const AMchange) -> u64 { + if let Some(change) = change.as_ref() { + u64::from(change.as_ref().start_op) + } else { + u64::MAX + } +} + +/// \memberof AMchange +/// \brief Gets the commit time of a change. +/// +/// \param[in] change A pointer to an `AMchange` struct. +/// \return A 64-bit signed integer. +/// \pre \p change must be a valid address. +/// \internal +/// +/// # Safety +/// change must be a pointer to a valid AMchange +#[no_mangle] +pub unsafe extern "C" fn AMchangeTime(change: *const AMchange) -> i64 { + if let Some(change) = change.as_ref() { + change.as_ref().time + } else { + i64::MAX + } +} + +/// \memberof AMchange +/// \brief Gets the raw bytes of a change. +/// +/// \param[in] change A pointer to an `AMchange` struct. +/// \return An `AMbyteSpan` struct. +/// \pre \p change must be a valid address. +/// \internal +/// +/// # Safety +/// change must be a pointer to a valid AMchange +#[no_mangle] +pub unsafe extern "C" fn AMchangeRawBytes(change: *const AMchange) -> AMbyteSpan { + if let Some(change) = change.as_ref() { + change.as_ref().raw_bytes().into() + } else { + AMbyteSpan::default() + } +} + +/// \memberof AMchange +/// \brief Loads a document into a sequence of `AMchange` structs. +/// +/// \param[in] src A pointer to an array of bytes. +/// \param[in] count The number of bytes in \p src to load. +/// \return A pointer to an `AMresult` struct containing a sequence of +/// `AMchange` structs. +/// \pre \p src must be a valid address. +/// \pre `0 <=` \p count `<=` length of \p src. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMresultFree()`. +/// \internal +/// +/// # Safety +/// src must be a byte array of length `>= count` +#[no_mangle] +pub unsafe extern "C" fn AMchangeLoadDocument(src: *const u8, count: usize) -> *mut AMresult { + let mut data = Vec::new(); + data.extend_from_slice(std::slice::from_raw_parts(src, count)); + to_result(am::Change::load_document(&data)) +} From e8f1f07f21338fa82290cd96bbe4c5dfd3309b04 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 30 May 2022 22:24:53 -0700 Subject: [PATCH 409/730] Changed `AMchanges` to lazily create `AMchange` structs. Renamed `AMadvanceChanges()` to `AMchangesAdvance()`. Added `AMchangesEqual()`. Renamed `AMnextChange()` to `AMchangesNext()`. Renamed `AMprevChange()` to `AMchangesPrev()`. --- automerge-c/src/changes.rs | 227 +++++++++++++++++++++---------------- 1 file changed, 129 insertions(+), 98 deletions(-) diff --git a/automerge-c/src/changes.rs b/automerge-c/src/changes.rs index 755d2510..86bdec42 100644 --- a/automerge-c/src/changes.rs +++ b/automerge-c/src/changes.rs @@ -1,57 +1,32 @@ use automerge as am; -use std::ffi::{c_void, CString}; +use std::collections::BTreeMap; +use std::ffi::c_void; -/// \struct AMchange -/// \brief A group of operations performed by an actor. -pub struct AMchange { - body: am::Change, - c_message: Option, -} - -impl AMchange { - pub fn new(change: am::Change) -> Self { - let c_message = match change.message() { - Some(c_message) => CString::new(c_message).ok(), - None => None, - }; - Self { - body: change, - c_message, - } - } - - pub fn c_message(&self) -> Option<&CString> { - self.c_message.as_ref() - } -} - -impl AsRef for AMchange { - fn as_ref(&self) -> &am::Change { - &self.body - } -} +use crate::change::AMchange; /// \struct AMchanges -/// \brief A bidirectional iterator over a sequence of `AMchange` structs. +/// \brief A bidirectional iterator over a sequence of changes. #[repr(C)] pub struct AMchanges { + /// The length of the sequence. len: usize, + /// The offset from \p ptr, \p +offset -> forward direction, + /// \p -offset -> reverse direction. offset: isize, + /// A pointer to the first change or `NULL`. ptr: *const c_void, -} - -impl AsRef<[AMchange]> for AMchanges { - fn as_ref(&self) -> &[AMchange] { - unsafe { std::slice::from_raw_parts(self.ptr as *const AMchange, self.len) } - } + /// Reserved. + storage: *mut c_void, } impl AMchanges { - pub fn new(changes: &[AMchange]) -> Self { + pub fn new(changes: &[am::Change], storage: &mut BTreeMap) -> Self { + let storage: *mut BTreeMap = storage; Self { len: changes.len(), offset: 0, ptr: changes.as_ptr() as *const c_void, + storage: storage as *mut c_void, } } @@ -63,32 +38,63 @@ impl AMchanges { }; } - pub fn next(&mut self, n: isize) -> Option<&AMchange> { + pub fn next(&mut self, n: isize) -> Option<*const AMchange> { let len = self.len as isize; if self.offset < -len || self.offset == len { // It's stopped. None } else { - let slice = - unsafe { std::slice::from_raw_parts(self.ptr as *const AMchange, self.len) }; + let slice: &mut [am::Change] = + unsafe { std::slice::from_raw_parts_mut(self.ptr as *mut am::Change, self.len) }; let index = (self.offset + if self.offset < 0 { len } else { 0 }) as usize; - let element = Some(&slice[index]); + let storage = unsafe { &mut *(self.storage as *mut BTreeMap) }; + let value = match storage.get_mut(&index) { + Some(value) => value, + None => { + storage.insert(index, AMchange::new(&mut slice[index])); + storage.get_mut(&index).unwrap() + } + }; self.advance(n); - element + Some(value) } } - pub fn prev(&mut self, n: isize) -> Option<&AMchange> { + pub fn prev(&mut self, n: isize) -> Option<*const AMchange> { self.advance(n); let len = self.len as isize; if self.offset < -len || self.offset == len { // It's stopped. None } else { - let slice = - unsafe { std::slice::from_raw_parts(self.ptr as *const AMchange, self.len) }; + let slice: &mut [am::Change] = + unsafe { std::slice::from_raw_parts_mut(self.ptr as *mut am::Change, self.len) }; let index = (self.offset + if self.offset < 0 { len } else { 0 }) as usize; - Some(&slice[index]) + let storage = unsafe { &mut *(self.storage as *mut BTreeMap) }; + Some(match storage.get_mut(&index) { + Some(value) => value, + None => { + storage.insert(index, AMchange::new(&mut slice[index])); + storage.get_mut(&index).unwrap() + } + }) + } + } +} + +impl AsRef<[am::Change]> for AMchanges { + fn as_ref(&self) -> &[am::Change] { + unsafe { std::slice::from_raw_parts(self.ptr as *const am::Change, self.len) } + } +} + +impl Default for AMchanges { + fn default() -> Self { + Self { + len: 0, + offset: 0, + ptr: std::ptr::null(), + storage: std::ptr::null_mut(), } } } @@ -99,19 +105,93 @@ impl AMchanges { /// /// \param[in] changes A pointer to an `AMchanges` struct. /// \param[in] n The direction (\p -n -> backward, \p +n -> forward) and maximum -/// number of positions to advance/rewind. +/// number of positions to advance/rewind. /// \pre \p changes must be a valid address. /// \internal /// /// #Safety /// changes must be a pointer to a valid AMchanges #[no_mangle] -pub unsafe extern "C" fn AMadvanceChanges(changes: *mut AMchanges, n: isize) { +pub unsafe extern "C" fn AMchangesAdvance(changes: *mut AMchanges, n: isize) { if let Some(changes) = changes.as_mut() { changes.advance(n); }; } +/// \memberof AMchanges +/// \brief Compares two change sequences for equality. +/// +/// \param[in] changes1 A pointer to an `AMchanges` struct. +/// \param[in] changes2 A pointer to an `AMchanges` struct. +/// \return `true` if \p changes1 `==` \p changes2 and `false` otherwise. +/// \pre \p changes1 must be a valid address. +/// \pre \p changes2 must be a valid address. +/// \internal +/// +/// #Safety +/// changes1 must be a pointer to a valid AMchanges +/// changes2 must be a pointer to a valid AMchanges +#[no_mangle] +pub unsafe extern "C" fn AMchangesEqual( + changes1: *const AMchanges, + changes2: *const AMchanges, +) -> bool { + match (changes1.as_ref(), changes2.as_ref()) { + (Some(changes1), Some(changes2)) => changes1.as_ref() == changes2.as_ref(), + (None, Some(_)) | (Some(_), None) | (None, None) => false, + } +} + +/// \memberof AMchanges +/// \brief Gets a pointer to the `AMchange` struct at the current position of +/// an `AMchanges`struct and then advances/rewinds it by at most \p |n| +/// positions. +/// +/// \param[in] changes A pointer to an `AMchanges` struct. +/// \param[in] n The direction (\p -n -> backward, \p +n -> forward) and maximum +/// number of positions to advance/rewind. +/// \return A pointer to an `AMchange` struct that's `NULL` when \p changes was +/// previously advanced/rewound past its forward/backward limit. +/// \pre \p changes must be a valid address. +/// \internal +/// +/// #Safety +/// changes must be a pointer to a valid AMchanges +#[no_mangle] +pub unsafe extern "C" fn AMchangesNext(changes: *mut AMchanges, n: isize) -> *const AMchange { + if let Some(changes) = changes.as_mut() { + if let Some(change) = changes.next(n) { + return change; + } + } + std::ptr::null() +} + +/// \memberof AMchanges +/// \brief Advances/rewinds an `AMchanges` struct by at most \p |n| +/// positions and then gets a pointer to the `AMchange` struct at its +/// current position. +/// +/// \param[in] changes A pointer to an `AMchanges` struct. +/// \param[in] n The direction (\p -n -> backward, \p +n -> forward) and maximum +/// number of positions to advance/rewind. +/// \return A pointer to an `AMchange` struct that's `NULL` when \p changes is +/// presently advanced/rewound past its forward/backward limit. +/// \pre \p changes must be a valid address. +/// \internal +/// +/// #Safety +/// changes must be a pointer to a valid AMchanges +#[no_mangle] +pub unsafe extern "C" fn AMchangesPrev(changes: *mut AMchanges, n: isize) -> *const AMchange { + if let Some(changes) = changes.as_mut() { + if let Some(change) = changes.prev(n) { + return change; + } + } + std::ptr::null() +} + /// \memberof AMchanges /// \brief Gets the size of an `AMchanges` struct. /// @@ -130,52 +210,3 @@ pub unsafe extern "C" fn AMchangesSize(changes: *const AMchanges) -> usize { 0 } } - -/// \memberof AMchanges -/// \brief Gets the `AMchange` struct at the current position of an -/// `AMchanges`struct and then advances/rewinds it by at most \p |n| -/// positions. -/// -/// \param[in] changes A pointer to an `AMchanges` struct. -/// \param[in] n The direction (\p -n -> backward, \p +n -> forward) and maximum -/// number of positions to advance/rewind. -/// \return A pointer to an `AMchange` struct that's invalid when \p changes was -/// previously advanced/rewound past its forward/backward limit. -/// \pre \p changes must be a valid address. -/// \internal -/// -/// #Safety -/// changes must be a pointer to a valid AMchanges -#[no_mangle] -pub unsafe extern "C" fn AMnextChange(changes: *mut AMchanges, n: isize) -> *const AMchange { - if let Some(changes) = changes.as_mut() { - if let Some(change) = changes.next(n) { - return change; - } - } - std::ptr::null() -} - -/// \memberof AMchanges -/// \brief Advances/rewinds an `AMchanges` struct by at most \p |n| -/// positions and then gets the `AMchange` struct at its current position. -/// -/// \param[in] changes A pointer to an `AMchanges` struct. -/// \param[in] n The direction (\p -n -> backward, \p +n -> forward) and maximum -/// number of positions to advance/rewind. -/// \return A pointer to an `AMchange` struct that's invalid when \p changes is -/// presently advanced/rewound past its forward/backward limit. -/// \pre \p changes must be a valid address. -/// \internal -/// -/// #Safety -/// changes must be a pointer to a valid AMchanges -#[no_mangle] -pub unsafe extern "C" fn AMprevChange(changes: *mut AMchanges, n: isize) -> *const AMchange { - if let Some(changes) = changes.as_mut() { - if let Some(change) = changes.prev(n) { - return change; - } - } - std::ptr::null() -} From 007253d6aefa48d0c09f3f95efe6d421937666bf Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 30 May 2022 22:27:14 -0700 Subject: [PATCH 410/730] Updated the file dependencies of the CMake custom command for Cargo. --- automerge-c/src/CMakeLists.txt | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/automerge-c/src/CMakeLists.txt b/automerge-c/src/CMakeLists.txt index 50c8b3da..4ecd9f1c 100644 --- a/automerge-c/src/CMakeLists.txt +++ b/automerge-c/src/CMakeLists.txt @@ -52,10 +52,19 @@ add_custom_command( DEPENDS byte_span.rs change_hashes.rs + change.rs changes.rs doc.rs + doc/list.rs + doc/map.rs + doc/utils.rs + obj.rs result.rs sync.rs + sync/have.rs + sync/haves.rs + sync/message.rs + sync/state.rs ${CMAKE_SOURCE_DIR}/build.rs ${CMAKE_SOURCE_DIR}/Cargo.toml ${CMAKE_SOURCE_DIR}/cbindgen.toml From e56fe64a18e3799da45c9ea40e17da5307c684fd Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 30 May 2022 22:34:01 -0700 Subject: [PATCH 411/730] Added `AMapplyChanges()`. Fixed `AMdup()`. Added `AMequal()`. Renamed `AMfreeDoc()` to `AMfree()`. Added `AMgetHeads()`. Added `AMgetMissingDeps()`. Added `AMgetLastLocalChange()`. --- automerge-c/src/doc.rs | 565 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 565 insertions(+) diff --git a/automerge-c/src/doc.rs b/automerge-c/src/doc.rs index 9a63042f..828a38b9 100644 --- a/automerge-c/src/doc.rs +++ b/automerge-c/src/doc.rs @@ -1,5 +1,43 @@ use automerge as am; +use automerge::transaction::{CommitOptions, Transactable}; +use smol_str::SmolStr; +use std::borrow::Cow; use std::ops::{Deref, DerefMut}; +use std::os::raw::c_char; + +use crate::change::AMchange; +use crate::change_hashes::AMchangeHashes; +use crate::obj::AMobjId; +use crate::result::{to_result, AMresult}; +use crate::sync::{to_sync_message, AMsyncMessage, AMsyncState}; + +mod list; +mod map; +mod utils; + +use crate::changes::AMchanges; +use crate::doc::utils::to_str; +use crate::doc::utils::{to_doc, to_obj_id}; + +macro_rules! to_changes { + ($handle:expr) => {{ + let handle = $handle.as_ref(); + match handle { + Some(b) => b, + None => return AMresult::err("Invalid AMchanges pointer").into(), + } + }}; +} + +macro_rules! to_sync_state_mut { + ($handle:expr) => {{ + let handle = $handle.as_mut(); + match handle { + Some(b) => b, + None => return AMresult::err("Invalid AMsyncState pointer").into(), + } + }}; +} /// \struct AMdoc /// \brief A JSON-like CRDT. @@ -31,3 +69,530 @@ impl From for *mut AMdoc { Box::into_raw(Box::new(b)) } } + +/// \memberof AMdoc +/// \brief Applies a sequence of changes to a document. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] changes A pointer to an `AMchanges` struct. +/// \ +/// \pre \p doc must be a valid address. +/// \pre \p changes must be a valid address. +/// \return A pointer to an `AMresult` struct containing a void. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMresultFree()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// changes must be a pointer to a valid AMchanges. +#[no_mangle] +pub unsafe extern "C" fn AMapplyChanges( + doc: *mut AMdoc, + changes: *const AMchanges, +) -> *mut AMresult { + let doc = to_doc!(doc); + let changes = to_changes!(changes); + to_result(doc.apply_changes(changes.as_ref().to_vec())) +} + +/// \memberof AMdoc +/// \brief Allocates a new `AMdoc` struct and initializes it with defaults. +/// +/// \return A pointer to an `AMdoc` struct. +/// \warning To avoid a memory leak, the returned `AMdoc` struct must be +/// deallocated with `AMfree()`. +#[no_mangle] +pub extern "C" fn AMcreate() -> *mut AMdoc { + AMdoc::new(am::AutoCommit::new()).into() +} + +/// \memberof AMdoc +/// \brief Commits the current operations on \p doc with an optional message +/// and/or time override as seconds since the epoch. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] message A UTF-8 string or `NULL`. +/// \param[in] time A pointer to a `time_t` value or `NULL`. +/// \return A pointer to an `AMresult` struct containing a change hash as an +/// `AMbyteSpan` struct. +/// \pre \p doc must be a valid address. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMresultFree()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +#[no_mangle] +pub unsafe extern "C" fn AMcommit( + doc: *mut AMdoc, + message: *const c_char, + time: *const libc::time_t, +) -> *mut AMresult { + let doc = to_doc!(doc); + let mut options = CommitOptions::default(); + if !message.is_null() { + options.set_message(to_str(message)); + } + if let Some(time) = time.as_ref() { + options.set_time(*time); + } + to_result(doc.commit_with::<()>(options)) +} + +/// \memberof AMdoc +/// \brief Allocates storage for an `AMdoc` struct and initializes it by +/// duplicating the given `AMdoc` struct. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \return A pointer to an `AMdoc` struct. +/// \pre \p doc must be a valid address. +/// \warning To avoid a memory leak, the returned `AMdoc` struct must be +/// deallocated with `AMfree()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +#[no_mangle] +pub unsafe extern "C" fn AMdup(doc: *mut AMdoc) -> *mut AMdoc { + match doc.as_mut() { + Some(doc) => doc.clone().into(), + None => std::ptr::null_mut::(), + } +} + +/// \memberof AMdoc +/// \brief Compares two documents for equality after closing their respective +/// transactions. +/// +/// \param[in] doc1 An `AMdoc` struct. +/// \param[in] doc2 An `AMdoc` struct. +/// \return `true` if \p doc1 `==` \p doc2 and `false` otherwise. +/// \pre \p doc1 must be a valid address. +/// \pre \p doc2 must be a valid address. +/// \internal +/// +/// #Safety +/// doc1 must be a pointer to a valid AMdoc +/// doc2 must be a pointer to a valid AMdoc +#[no_mangle] +pub unsafe extern "C" fn AMequal(doc1: *mut AMdoc, doc2: *mut AMdoc) -> bool { + match (doc1.as_mut(), doc2.as_mut()) { + (Some(doc1), Some(doc2)) => doc1.document().get_heads() == doc2.document().get_heads(), + (None, Some(_)) | (Some(_), None) | (None, None) => false, + } +} + +/// \memberof AMdoc +/// \brief Deallocates the storage for an `AMdoc` struct previously +/// allocated by `AMcreate()`, `AMdup()` or `AMload()`. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \pre \p doc must be a valid address. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +#[no_mangle] +pub unsafe extern "C" fn AMfree(doc: *mut AMdoc) { + if !doc.is_null() { + let doc: AMdoc = *Box::from_raw(doc); + drop(doc) + } +} + +/// \memberof AMdoc +/// \brief Generates a synchronization message for a peer based upon the +/// synchronization state \p sync_state. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] sync_state A pointer to an `AMsyncState` struct. +/// \return A pointer to an `AMresult` struct containing either a pointer to an +/// `AMsyncMessage` struct or a void. +/// \pre \p doc must b e a valid address. +/// \pre \p sync_state must be a valid address. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMresultFree()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// sync_state must be a pointer to a valid AMsyncState +#[no_mangle] +pub unsafe extern "C" fn AMgenerateSyncMessage( + doc: *mut AMdoc, + sync_state: *mut AMsyncState, +) -> *mut AMresult { + let doc = to_doc!(doc); + let sync_state = to_sync_state_mut!(sync_state); + to_result(doc.generate_sync_message(sync_state.as_mut())) +} + +/// \memberof AMdoc +/// \brief Gets an `AMdoc` struct's actor ID value as an array of bytes. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \return A pointer to an `AMresult` struct containing an actor ID as an +/// `AMbyteSpan` struct. +/// \pre \p doc must be a valid address. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMresultFree()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +#[no_mangle] +pub unsafe extern "C" fn AMgetActor(doc: *mut AMdoc) -> *mut AMresult { + let doc = to_doc!(doc); + to_result(Ok(doc.get_actor().clone())) +} + +/// \memberof AMdoc +/// \brief Gets an `AMdoc` struct's actor ID value as a hexadecimal string. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \return A pointer to an `AMresult` struct containing a `char const*`. +/// \pre \p doc must be a valid address. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMresultFree()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +#[no_mangle] +pub unsafe extern "C" fn AMgetActorHex(doc: *mut AMdoc) -> *mut AMresult { + let doc = to_doc!(doc); + let hex_str = doc.get_actor().to_hex_string(); + let value = am::Value::Scalar(Cow::Owned(am::ScalarValue::Str(SmolStr::new(hex_str)))); + to_result(Ok(value)) +} + +/// \memberof AMdoc +/// \brief Gets the changes added to a document by their respective hashes. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] have_deps A pointer to an `AMchangeHashes` struct or `NULL`. +/// \return A pointer to an `AMresult` struct containing an `AMchanges` struct. +/// \pre \p doc must be a valid address. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMresultFree()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +#[no_mangle] +pub unsafe extern "C" fn AMgetChanges( + doc: *mut AMdoc, + have_deps: *const AMchangeHashes, +) -> *mut AMresult { + let doc = to_doc!(doc); + let empty_deps = Vec::::new(); + let have_deps = match have_deps.as_ref() { + Some(have_deps) => have_deps.as_ref(), + None => &empty_deps, + }; + to_result(doc.get_changes(have_deps)) +} + +/// \memberof AMdoc +/// \brief Gets the current heads of a document. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \return A pointer to an `AMresult` struct containing an `AMchangeHashes` +/// struct. +/// \pre \p doc must be a valid address. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMresultFree()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +#[no_mangle] +pub unsafe extern "C" fn AMgetHeads(doc: *mut AMdoc) -> *mut AMresult { + let doc = to_doc!(doc); + to_result(Ok(doc.get_heads())) +} + +/// \memberof AMdoc +/// \brief Gets the hashes of the changes in a document that aren't transitive +/// dependencies of the given hashes of changes. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] heads A pointer to an `AMchangeHashes` struct or `NULL`. +/// \return A pointer to an `AMresult` struct containing an `AMchangeHashes` +/// struct. +/// \pre \p doc must be a valid address. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMresultFree()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +#[no_mangle] +pub unsafe extern "C" fn AMgetMissingDeps( + doc: *mut AMdoc, + heads: *const AMchangeHashes, +) -> *mut AMresult { + let doc = to_doc!(doc); + let empty_heads = Vec::::new(); + let heads = match heads.as_ref() { + Some(heads) => heads.as_ref(), + None => &empty_heads, + }; + to_result(doc.get_missing_deps(heads)) +} + +/// \memberof AMdoc +/// \brief Gets the last change made to a document. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \return A pointer to an `AMresult` struct containing either an `AMchange` +/// struct or a void. +/// \pre \p doc must be a valid address. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMresultFree()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +#[no_mangle] +pub unsafe extern "C" fn AMgetLastLocalChange(doc: *mut AMdoc) -> *mut AMresult { + let doc = to_doc!(doc); + to_result(doc.get_last_local_change()) +} + +/// \memberof AMdoc +/// \brief Allocates storage for an `AMdoc` struct and initializes it with the +/// compact form of an incremental save pointed to by \p src. +/// +/// \param[in] src A pointer to an array of bytes. +/// \param[in] count The number of bytes in \p src to load. +/// \return A pointer to an `AMdoc` struct. +/// \pre \p src must be a valid address. +/// \pre `0 <=` \p count `<=` length of \p src. +/// \warning To avoid a memory leak, the returned `AMdoc` struct must be +/// deallocated with `AMfree()`. +/// \internal +/// +/// # Safety +/// src must be a byte array of length `>= count` +#[no_mangle] +pub unsafe extern "C" fn AMload(src: *const u8, count: usize) -> *mut AMdoc { + let mut data = Vec::new(); + data.extend_from_slice(std::slice::from_raw_parts(src, count)); + if let Ok(auto_commit) = am::AutoCommit::load(&data) { + AMdoc::new(auto_commit).into() + } else { + std::ptr::null_mut::() + } +} + +/// \memberof AMdoc +/// \brief Loads the compact form of an incremental save pointed to by \p src +/// into \p doc. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] src A pointer to an array of bytes. +/// \param[in] count The number of bytes in \p src to load. +/// \return A pointer to an `AMresult` struct containing the number of +/// operations loaded from \p src. +/// \pre \p doc must be a valid address. +/// \pre \p src must be a valid address. +/// \pre `0 <=` \p count `<=` length of \p src. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMresultFree()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// src must be a byte array of length `>= count` +#[no_mangle] +pub unsafe extern "C" fn AMloadIncremental( + doc: *mut AMdoc, + src: *const u8, + count: usize, +) -> *mut AMresult { + let doc = to_doc!(doc); + let mut data = Vec::new(); + data.extend_from_slice(std::slice::from_raw_parts(src, count)); + to_result(doc.load_incremental(&data)) +} + +/// \memberof AMdoc +/// \brief Applies a sequence of changes to a document. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] changes A pointer to an `AMdoc` struct. +/// \return A pointer to an `AMresult` struct containing an `AMchangeHashes` +/// struct. +/// \pre \p dest must be a valid address. +/// \pre \p src must be a valid address. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMresultFree()`. +/// \internal +/// +/// # Safety +/// dest must be a pointer to a valid AMdoc +/// src must be a pointer to a valid AMdoc +#[no_mangle] +pub unsafe extern "C" fn AMmerge(dest: *mut AMdoc, src: *mut AMdoc) -> *mut AMresult { + let dest = to_doc!(dest); + to_result(dest.merge(to_doc!(src))) +} + +/// \memberof AMdoc +/// \brief Gets the size of an object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \return The count of values in the object identified by \p obj_id. +/// \pre \p doc must be a valid address. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj_id must be a pointer to a valid AMobjId or NULL +#[no_mangle] +pub unsafe extern "C" fn AMobjSize(doc: *const AMdoc, obj_id: *const AMobjId) -> usize { + if let Some(doc) = doc.as_ref() { + doc.length(to_obj_id!(obj_id)) + } else { + 0 + } +} + +/// \memberof AMdoc +/// \brief Gets the historical size of an object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] change A pointer to an `AMchange` struct or `NULL`. +/// \return The count of values in the object identified by \p obj_id at +/// \p change. +/// \pre \p doc must be a valid address. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj_id must be a pointer to a valid AMobjId or NULL +/// change must be a pointer to a valid AMchange or NULL +#[no_mangle] +pub unsafe extern "C" fn AMobjSizeAt( + doc: *const AMdoc, + obj_id: *const AMobjId, + change: *const AMchange, +) -> usize { + if let Some(doc) = doc.as_ref() { + if let Some(change) = change.as_ref() { + let change: &am::Change = change.as_ref(); + let change_hashes = vec![change.hash]; + return doc.length_at(to_obj_id!(obj_id), &change_hashes); + } + }; + 0 +} + +/// \memberof AMdoc +/// \brief Receives a synchronization message from a peer based upon a given +/// synchronization state. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] sync_state A pointer to an `AMsyncState` struct. +/// \param[in] sync_message A pointer to an `AMsyncMessage` struct. +/// \return A pointer to an `AMresult` struct containing a void. +/// \pre \p doc must be a valid address. +/// \pre \p sync_state must be a valid address. +/// \pre \p sync_message must be a valid address. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// sync_state must be a pointer to a valid AMsyncState +/// sync_message must be a pointer to a valid AMsyncMessage +#[no_mangle] +pub unsafe extern "C" fn AMreceiveSyncMessage( + doc: *mut AMdoc, + sync_state: *mut AMsyncState, + sync_message: *const AMsyncMessage, +) -> *mut AMresult { + let doc = to_doc!(doc); + let sync_state = to_sync_state_mut!(sync_state); + let sync_message = to_sync_message!(sync_message); + to_result(doc.receive_sync_message(sync_state.as_mut(), sync_message.as_ref().clone())) +} + +/// \memberof AMdoc +/// \brief Saves the entirety of \p doc into a compact form. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \return A pointer to an `AMresult` struct containing an array of bytes as +/// an `AMbyteSpan` struct. +/// \pre \p doc must be a valid address. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMresultFree()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +#[no_mangle] +pub unsafe extern "C" fn AMsave(doc: *mut AMdoc) -> *mut AMresult { + let doc = to_doc!(doc); + to_result(Ok(doc.save())) +} + +/// \memberof AMdoc +/// \brief Puts an array of bytes as the actor ID value of an `AMdoc` struct. . +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] value A pointer to an array of bytes. +/// \param[in] count The number of bytes to copy from \p value. +/// \return A pointer to an `AMresult` struct containing a void. +/// \pre \p doc must be a valid address. +/// \pre \p value must be a valid address. +/// \pre `0 <=` \p count `<=` length of \p value. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMresultFree()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// value must be a byte array of length `>= count` +#[no_mangle] +pub unsafe extern "C" fn AMsetActor( + doc: *mut AMdoc, + value: *const u8, + count: usize, +) -> *mut AMresult { + let doc = to_doc!(doc); + let slice = std::slice::from_raw_parts(value, count); + doc.set_actor(am::ActorId::from(slice)); + to_result(Ok(())) +} + +/// \memberof AMdoc +/// \brief Puts a hexadecimal string as the actor ID value of an `AMdoc` struct. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] hex_str A string of hexadecimal characters. +/// \return A pointer to an `AMresult` struct containing a void. +/// \pre \p doc must be a valid address. +/// \pre \p hex_str must be a valid address. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMresultFree()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// hex_str must be a null-terminated array of `c_char` +#[no_mangle] +pub unsafe extern "C" fn AMsetActorHex(doc: *mut AMdoc, hex_str: *const c_char) -> *mut AMresult { + let doc = to_doc!(doc); + let slice = std::slice::from_raw_parts(hex_str as *const u8, libc::strlen(hex_str)); + to_result(match hex::decode(slice) { + Ok(vec) => { + doc.set_actor(vec.into()); + Ok(()) + } + Err(error) => Err(am::AutomergeError::HexDecode(error)), + }) +} From c3554199f385dc2177087288a809e9ef2a38ec23 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 30 May 2022 22:36:26 -0700 Subject: [PATCH 412/730] Grouped related `AM*()` functions into separate source files. --- automerge-c/src/lib.rs | 1529 +--------------------------------------- 1 file changed, 2 insertions(+), 1527 deletions(-) diff --git a/automerge-c/src/lib.rs b/automerge-c/src/lib.rs index 520a7411..0c01c0d3 100644 --- a/automerge-c/src/lib.rs +++ b/automerge-c/src/lib.rs @@ -1,1533 +1,8 @@ -use automerge as am; -use smol_str::SmolStr; -use std::{borrow::Cow, ffi::CStr, ffi::CString, os::raw::c_char}; - mod byte_span; +mod change; mod change_hashes; mod changes; mod doc; +mod obj; mod result; mod sync; - -use automerge::transaction::{CommitOptions, Transactable}; - -use byte_span::AMbyteSpan; -use change_hashes::AMchangeHashes; -use changes::{AMchange, AMchanges}; -use doc::AMdoc; -use result::{AMobjId, AMresult, AMvalue}; -use sync::{AMsyncMessage, AMsyncState}; - -/// \ingroup enumerations -/// \enum AMobjType -/// \brief The type of an object value. -#[repr(u8)] -pub enum AMobjType { - /// A list. - List = 1, - /// A key-value map. - Map, - /// A list of Unicode graphemes. - Text, -} - -impl From for am::ObjType { - fn from(o: AMobjType) -> Self { - match o { - AMobjType::Map => am::ObjType::Map, - AMobjType::List => am::ObjType::List, - AMobjType::Text => am::ObjType::Text, - } - } -} - -/// \ingroup enumerations -/// \enum AMstatus -/// \brief The status of an API call. -#[derive(Debug)] -#[repr(u8)] -pub enum AMstatus { - /// Success. - /// \note This tag is unalphabetized so that `0` indicates success. - Ok, - /// Failure due to an error. - Error, - /// Failure due to an invalid result. - InvalidResult, -} - -unsafe fn to_str(c: *const c_char) -> String { - CStr::from_ptr(c).to_string_lossy().to_string() -} - -macro_rules! to_doc { - ($handle:expr) => {{ - let handle = $handle.as_mut(); - match handle { - Some(b) => b, - None => return AMresult::err("Invalid AMdoc pointer").into(), - } - }}; -} - -macro_rules! to_obj_id { - ($handle:expr) => {{ - match $handle.as_ref() { - Some(obj_id) => obj_id, - None => &am::ROOT, - } - }}; -} - -macro_rules! to_sync_message { - ($handle:expr) => {{ - let handle = $handle.as_ref(); - match handle { - Some(b) => b, - None => return AMresult::err("Invalid AMsyncMessage pointer").into(), - } - }}; -} - -macro_rules! to_sync_state { - ($handle:expr) => {{ - let handle = $handle.as_mut(); - match handle { - Some(b) => b, - None => return AMresult::err("Invalid AMsyncState pointer").into(), - } - }}; -} - -fn to_result>(r: R) -> *mut AMresult { - (r.into()).into() -} - -/// \memberof AMdoc -/// \brief Allocates a new `AMdoc` struct and initializes it with defaults. -/// -/// \return A pointer to an `AMdoc` struct. -/// \warning To avoid a memory leak, the returned `AMdoc` struct must be -/// deallocated with `AMfreeDoc()`. -#[no_mangle] -pub extern "C" fn AMcreate() -> *mut AMdoc { - AMdoc::new(am::AutoCommit::new()).into() -} - -/// \memberof AMdoc -/// \brief Commits the current operations on \p doc with an optional message -/// and/or time override as seconds since the epoch. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] message A UTF-8 string or `NULL`. -/// \param[in] time A pointer to a `time_t` value or `NULL`. -/// \return A pointer to an `AMresult` struct containing a change hash as an -/// `AMbyteSpan` struct. -/// \pre \p doc must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfreeResult()`. -/// \internal -/// -/// # Safety -/// doc must be a pointer to a valid AMdoc -#[no_mangle] -pub unsafe extern "C" fn AMcommit( - doc: *mut AMdoc, - message: *const c_char, - time: *const libc::time_t, -) -> *mut AMresult { - let doc = to_doc!(doc); - let mut options = CommitOptions::default(); - if !message.is_null() { - options.set_message(to_str(message)); - } - if let Some(time) = time.as_ref() { - options.set_time(*time); - } - to_result(doc.commit_with::<()>(options)) -} - -/// \memberof AMdoc -/// \brief Allocates storage for an `AMdoc` struct and initializes it by -/// duplicating the `AMdoc` struct pointed to by \p doc. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \return A pointer to an `AMdoc` struct. -/// \pre \p doc must be a valid address. -/// \warning To avoid a memory leak, the returned `AMdoc` struct must be -/// deallocated with `AMfreeDoc()`. -/// \internal -/// -/// # Safety -/// doc must be a pointer to a valid AMdoc -#[no_mangle] -pub unsafe extern "C" fn AMdup(doc: *mut AMdoc) -> *mut AMdoc { - let doc = *Box::from_raw(doc); - let copy = doc.clone(); - std::mem::forget(doc); - copy.into() -} - -/// \memberof AMdoc -/// \brief Deallocates the storage for an `AMdoc` struct previously -/// allocated by `AMcreate()`, `AMdup()` or `AMload()`. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \pre \p doc must be a valid address. -/// \internal -/// -/// # Safety -/// doc must be a pointer to a valid AMdoc -#[no_mangle] -pub unsafe extern "C" fn AMfreeDoc(doc: *mut AMdoc) { - if !doc.is_null() { - let doc: AMdoc = *Box::from_raw(doc); - drop(doc) - } -} - -/// \memberof AMdoc -/// \brief Generates a synchronization message for a peer based upon the -/// synchronization state \p sync_state. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] sync_state A pointer to an `AMsyncState` struct. -/// \return A pointer to an `AMresult` struct containing an `AMsyncMessage` -/// struct. -/// \pre \p doc must b e a valid address. -/// \pre \p sync_state must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfreeResult()`. -/// \internal -/// -/// # Safety -/// doc must be a pointer to a valid AMdoc -/// sync_state must be a pointer to a valid AMsyncState -#[no_mangle] -pub unsafe extern "C" fn AMgenerateSyncMessage( - doc: *mut AMdoc, - sync_state: *mut AMsyncState, -) -> *mut AMresult { - let doc = to_doc!(doc); - let sync_state = to_sync_state!(sync_state); - to_result(doc.generate_sync_message(sync_state.as_mut())) -} - -/// \memberof AMsyncState -/// \brief Allocates a new `AMsyncState` struct and initializes it with -/// defaults. -/// -/// \return A pointer to an `AMsyncState` struct. -/// \warning To avoid a memory leak, the returned `AMsyncState` struct must be -/// deallocated with `AMfreeSyncState()`. -#[no_mangle] -pub extern "C" fn AMinitSyncState() -> *mut AMsyncState { - AMsyncState::new(am::sync::State::new()).into() -} - -/// \memberof AMdoc -/// \brief Allocates storage for an `AMdoc` struct and initializes it with the -/// compact form of an incremental save pointed to by \p src. -/// -/// \param[in] src A pointer to an array of bytes. -/// \param[in] count The number of bytes in \p src to load. -/// \return A pointer to an `AMdoc` struct. -/// \pre \p src must be a valid address. -/// \pre `0 <=` \p count `<=` length of \p src. -/// \warning To avoid a memory leak, the returned `AMdoc` struct must be -/// deallocated with `AMfreeDoc()`. -/// \internal -/// -/// # Safety -/// src must be a byte array of length `>= count` -#[no_mangle] -pub unsafe extern "C" fn AMload(src: *const u8, count: usize) -> *mut AMdoc { - let mut data = Vec::new(); - data.extend_from_slice(std::slice::from_raw_parts(src, count)); - if let Ok(auto_commit) = am::AutoCommit::load(&data) { - AMdoc::new(auto_commit).into() - } else { - std::ptr::null_mut::() - } -} - -/// \memberof AMdoc -/// \brief Loads the compact form of an incremental save pointed to by \p src -/// into \p doc. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] src A pointer to an array of bytes. -/// \param[in] count The number of bytes in \p src to load. -/// \return A pointer to an `AMresult` struct containing the number of -/// operations loaded from \p src. -/// \pre \p doc must be a valid address. -/// \pre \p src must be a valid address. -/// \pre `0 <=` \p count `<=` length of \p src. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfreeResult()`. -/// \internal -/// -/// # Safety -/// doc must be a pointer to a valid AMdoc -/// src must be a byte array of length `>= count` -#[no_mangle] -pub unsafe extern "C" fn AMloadIncremental( - doc: *mut AMdoc, - src: *const u8, - count: usize, -) -> *mut AMresult { - let doc = to_doc!(doc); - let mut data = Vec::new(); - data.extend_from_slice(std::slice::from_raw_parts(src, count)); - to_result(doc.load_incremental(&data)) -} - -/// \memberof AMdoc -/// \brief Applies all of the changes in \p src which are not in \p dest to -/// \p dest. -/// -/// \param[in] dest A pointer to an `AMdoc` struct. -/// \param[in] src A pointer to an `AMdoc` struct. -/// \return A pointer to an `AMresult` struct containing an `AMchangeHashes` -/// struct. -/// \pre \p dest must be a valid address. -/// \pre \p src must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfreeResult()`. -/// \internal -/// -/// # Safety -/// dest must be a pointer to a valid AMdoc -/// src must be a pointer to a valid AMdoc -#[no_mangle] -pub unsafe extern "C" fn AMmerge(dest: *mut AMdoc, src: *mut AMdoc) -> *mut AMresult { - let dest = to_doc!(dest); - to_result(dest.merge(to_doc!(src))) -} - -/// \memberof AMdoc -/// \brief Receives a synchronization message \p sync_message from a peer based -/// upon the synchronization state \p sync_state. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] sync_state A pointer to an `AMsyncState` struct. -/// \param[in] sync_message A pointer to an `AMsyncMessage` struct. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \pre \p sync_state must be a valid address. -/// \pre \p sync_message must be a valid address. -/// \internal -/// -/// # Safety -/// doc must be a pointer to a valid AMdoc -/// sync_state must be a pointer to a valid AMsyncState -/// sync_message must be a pointer to a valid AMsyncMessage -#[no_mangle] -pub unsafe extern "C" fn AMreceiveSyncMessage( - doc: *mut AMdoc, - sync_state: *mut AMsyncState, - sync_message: *const AMsyncMessage, -) -> *mut AMresult { - let doc = to_doc!(doc); - let sync_state = to_sync_state!(sync_state); - let sync_message = to_sync_message!(sync_message); - to_result(doc.receive_sync_message(sync_state.as_mut(), sync_message.as_ref().clone())) -} - -/// \memberof AMdoc -/// \brief Saves the entirety of \p doc into a compact form. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \return A pointer to an `AMresult` struct containing an array of bytes as -/// an `AMbyteSpan` struct. -/// \pre \p doc must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfreeResult()`. -/// \internal -/// -/// # Safety -/// doc must be a pointer to a valid AMdoc -#[no_mangle] -pub unsafe extern "C" fn AMsave(doc: *mut AMdoc) -> *mut AMresult { - let doc = to_doc!(doc); - to_result(Ok(doc.save())) -} -/// \memberof AMdoc -/// \brief Gets an `AMdoc` struct's actor ID value as an array of bytes. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \return A pointer to an `AMresult` struct containing an actor ID as an -/// `AMbyteSpan` struct. -/// \pre \p doc must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfreeResult()`. -/// \internal -/// -/// # Safety -/// doc must be a pointer to a valid AMdoc -#[no_mangle] -pub unsafe extern "C" fn AMgetActor(doc: *mut AMdoc) -> *mut AMresult { - let doc = to_doc!(doc); - to_result(Ok(doc.get_actor().clone())) -} - -/// \memberof AMdoc -/// \brief Gets an `AMdoc` struct's actor ID value as a hexadecimal string. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \return A pointer to an `AMresult` struct containing a `char const*`. -/// \pre \p doc must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfreeResult()`. -/// \internal -/// -/// # Safety -/// doc must be a pointer to a valid AMdoc -#[no_mangle] -pub unsafe extern "C" fn AMgetActorHex(doc: *mut AMdoc) -> *mut AMresult { - let doc = to_doc!(doc); - let hex_str = doc.get_actor().to_hex_string(); - let value = am::Value::Scalar(Cow::Owned(am::ScalarValue::Str(SmolStr::new(hex_str)))); - to_result(Ok(value)) -} - -/// \memberof AMdoc -/// \brief Puts an array of bytes as the actor ID value of an `AMdoc` struct. . -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] value A pointer to an array of bytes. -/// \param[in] count The number of bytes to copy from \p value. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \pre \p value must be a valid address. -/// \pre `0 <=` \p count `<=` length of \p value. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfreeResult()`. -/// \internal -/// -/// # Safety -/// doc must be a pointer to a valid AMdoc -/// value must be a byte array of length `>= count` -#[no_mangle] -pub unsafe extern "C" fn AMsetActor( - doc: *mut AMdoc, - value: *const u8, - count: usize, -) -> *mut AMresult { - let doc = to_doc!(doc); - let slice = std::slice::from_raw_parts(value, count); - doc.set_actor(am::ActorId::from(slice)); - to_result(Ok(())) -} - -/// \memberof AMdoc -/// \brief Puts a hexadecimal string as the actor ID value of an `AMdoc` struct. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] hex_str A string of hexadecimal characters. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \pre \p hex_str must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfreeResult()`. -/// \internal -/// -/// # Safety -/// doc must be a pointer to a valid AMdoc -/// hex_str must be a null-terminated array of `c_char` -#[no_mangle] -pub unsafe extern "C" fn AMsetActorHex(doc: *mut AMdoc, hex_str: *const c_char) -> *mut AMresult { - let doc = to_doc!(doc); - let slice = std::slice::from_raw_parts(hex_str as *const u8, libc::strlen(hex_str)); - to_result(match hex::decode(slice) { - Ok(vec) => { - doc.set_actor(vec.into()); - Ok(()) - } - Err(error) => Err(am::AutomergeError::HexDecode(error)), - }) -} - -/// \memberof AMresult -/// \brief Gets the status code of an `AMresult` struct. -/// -/// \param[in] result A pointer to an `AMresult` struct. -/// \return An `AMstatus` enum tag. -/// \pre \p result must be a valid address. -/// \internal -/// -/// # Safety -/// result must be a pointer to a valid AMresult -#[no_mangle] -pub unsafe extern "C" fn AMresultStatus(result: *mut AMresult) -> AMstatus { - match result.as_mut() { - Some(AMresult::Error(_)) => AMstatus::Error, - None => AMstatus::InvalidResult, - _ => AMstatus::Ok, - } -} - -/// \memberof AMresult -/// \brief Gets the size of an `AMresult` struct. -/// -/// \param[in] result A pointer to an `AMresult` struct. -/// \return The count of values in \p result. -/// \pre \p result must be a valid address. -/// \internal -/// -/// # Safety -/// result must be a pointer to a valid AMresult -#[no_mangle] -pub unsafe extern "C" fn AMresultSize(result: *mut AMresult) -> usize { - if let Some(result) = result.as_mut() { - match result { - AMresult::ActorId(_) | AMresult::ObjId(_) => 1, - AMresult::ChangeHashes(change_hashes) => change_hashes.len(), - AMresult::Changes(changes) => changes.len(), - AMresult::Error(_) | AMresult::Void => 0, - AMresult::Scalars(vec, _) => vec.len(), - AMresult::SyncMessage(_) => 1, - } - } else { - 0 - } -} - -/// \memberof AMresult -/// \brief Gets a value from an `AMresult` struct. -/// -/// \param[in] result A pointer to an `AMresult` struct. -/// \param[in] index The index of a value. -/// \return An `AMvalue` struct. -/// \pre \p result must be a valid address. -/// \pre `0 <=` \p index `<=` AMresultSize() for \p result. -/// \internal -/// -/// # Safety -/// result must be a pointer to a valid AMresult -#[no_mangle] -pub unsafe extern "C" fn AMresultValue<'a>(result: *mut AMresult, index: usize) -> AMvalue<'a> { - let mut value = AMvalue::Void; - if let Some(result) = result.as_mut() { - match result { - AMresult::ActorId(actor_id) => { - if index == 0 { - value = AMvalue::ActorId(actor_id.into()); - } - } - AMresult::ChangeHashes(change_hashes) => { - value = AMvalue::ChangeHashes(AMchangeHashes::new(change_hashes)); - } - AMresult::Changes(changes) => { - value = AMvalue::Changes(AMchanges::new(changes)); - } - AMresult::Error(_) => {} - AMresult::ObjId(obj_id) => { - if index == 0 { - value = AMvalue::ObjId(obj_id); - } - } - AMresult::Scalars(vec, hosted_str) => { - if let Some(element) = vec.get(index) { - match element { - am::Value::Scalar(scalar) => match scalar.as_ref() { - am::ScalarValue::Boolean(flag) => { - value = AMvalue::Boolean(*flag); - } - am::ScalarValue::Bytes(bytes) => { - value = AMvalue::Bytes(bytes.into()); - } - am::ScalarValue::Counter(counter) => { - value = AMvalue::Counter(counter.into()); - } - am::ScalarValue::F64(float) => { - value = AMvalue::F64(*float); - } - am::ScalarValue::Int(int) => { - value = AMvalue::Int(*int); - } - am::ScalarValue::Null => { - value = AMvalue::Null; - } - am::ScalarValue::Str(smol_str) => { - *hosted_str = CString::new(smol_str.to_string()).ok(); - if let Some(c_str) = hosted_str { - value = AMvalue::Str(c_str.as_ptr()); - } - } - am::ScalarValue::Timestamp(timestamp) => { - value = AMvalue::Timestamp(*timestamp); - } - am::ScalarValue::Uint(uint) => { - value = AMvalue::Uint(*uint); - } - }, - // \todo Confirm that an object value should be ignored - // when there's no object ID variant. - am::Value::Object(_) => (), - } - } - } - AMresult::SyncMessage(sync_message) => { - if index == 0 { - value = AMvalue::SyncMessage(sync_message); - } - } - AMresult::Void => (), - } - }; - value -} - -/// \memberof AMdoc -/// \brief Deletes a key in a map object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \pre \p key must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfreeResult()`. -/// \internal -/// -/// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL -/// key must be a c string of the map key to be used -#[no_mangle] -pub unsafe extern "C" fn AMmapDelete( - doc: *mut AMdoc, - obj_id: *const AMobjId, - key: *const c_char, -) -> *mut AMresult { - let doc = to_doc!(doc); - to_result(doc.delete(to_obj_id!(obj_id), to_str(key))) -} - -/// \memberof AMdoc -/// \brief Puts a boolean as the value of a key in a map object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. -/// \param[in] value A boolean. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \pre \p key must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfreeResult()`. -/// \internal -/// -/// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL -/// key must be a c string of the map key to be used -#[no_mangle] -pub unsafe extern "C" fn AMmapPutBool( - doc: *mut AMdoc, - obj_id: *const AMobjId, - key: *const c_char, - value: bool, -) -> *mut AMresult { - let doc = to_doc!(doc); - to_result(doc.put(to_obj_id!(obj_id), to_str(key), value)) -} - -/// \memberof AMdoc -/// \brief Puts a signed integer as the value of a key in a map object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. -/// \param[in] value A 64-bit signed integer. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \pre \p key must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfreeResult()`. -/// \internal -/// -/// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL -/// key must be a c string of the map key to be used -#[no_mangle] -pub unsafe extern "C" fn AMmapPutInt( - doc: *mut AMdoc, - obj_id: *const AMobjId, - key: *const c_char, - value: i64, -) -> *mut AMresult { - let doc = to_doc!(doc); - to_result(doc.put(to_obj_id!(obj_id), to_str(key), value)) -} - -/// \memberof AMdoc -/// \brief Puts an unsigned integer as the value of a key in a map object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. -/// \param[in] value A 64-bit unsigned integer. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \pre \p key must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfreeResult()`. -/// \internal -/// -/// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL -/// key must be a c string of the map key to be used -#[no_mangle] -pub unsafe extern "C" fn AMmapPutUint( - doc: *mut AMdoc, - obj_id: *const AMobjId, - key: *const c_char, - value: u64, -) -> *mut AMresult { - let doc = to_doc!(doc); - to_result(doc.put(to_obj_id!(obj_id), to_str(key), value)) -} - -/// \memberof AMdoc -/// \brief Puts a UTF-8 string as the value of a key in a map object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. -/// \param[in] value A UTF-8 string. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \pre \p key must be a valid address. -/// \pre \p value must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfreeResult()`. -/// \internal -/// -/// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL -/// key must be a c string of the map key to be used -/// value must be a null-terminated array of `c_char` -#[no_mangle] -pub unsafe extern "C" fn AMmapPutStr( - doc: *mut AMdoc, - obj_id: *const AMobjId, - key: *const c_char, - value: *const c_char, -) -> *mut AMresult { - let doc = to_doc!(doc); - to_result(doc.put(to_obj_id!(obj_id), to_str(key), to_str(value))) -} - -/// \memberof AMdoc -/// \brief Puts an array of bytes as the value of a key in a map object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. -/// \param[in] value A pointer to an array of bytes. -/// \param[in] count The number of bytes to copy from \p value. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \pre \p key must be a valid address. -/// \pre \p value must be a valid address. -/// \pre `0 <=` \p count `<=` length of \p value. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfreeResult()`. -/// \internal -/// -/// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL -/// key must be a c string of the map key to be used -/// value must be a byte array of length `>= count` -#[no_mangle] -pub unsafe extern "C" fn AMmapPutBytes( - doc: *mut AMdoc, - obj_id: *const AMobjId, - key: *const c_char, - value: *const u8, - count: usize, -) -> *mut AMresult { - let doc = to_doc!(doc); - let mut vec = Vec::new(); - vec.extend_from_slice(std::slice::from_raw_parts(value, count)); - to_result(doc.put(to_obj_id!(obj_id), to_str(key), vec)) -} - -/// \memberof AMdoc -/// \brief Puts a float as the value of a key in a map object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. -/// \param[in] value A 64-bit float. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \pre \p key must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfreeResult()`. -/// \internal -/// -/// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL -/// key must be a c string of the map key to be used -#[no_mangle] -pub unsafe extern "C" fn AMmapPutF64( - doc: *mut AMdoc, - obj_id: *const AMobjId, - key: *const c_char, - value: f64, -) -> *mut AMresult { - let doc = to_doc!(doc); - to_result(doc.put(to_obj_id!(obj_id), to_str(key), value)) -} - -/// \memberof AMdoc -/// \brief Puts a CRDT counter as the value of a key in a map object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. -/// \param[in] value A 64-bit signed integer. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \pre \p key must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfreeResult()`. -/// \internal -/// -/// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL -/// key must be a c string of the map key to be used -#[no_mangle] -pub unsafe extern "C" fn AMmapPutCounter( - doc: *mut AMdoc, - obj_id: *const AMobjId, - key: *const c_char, - value: i64, -) -> *mut AMresult { - let doc = to_doc!(doc); - to_result(doc.put( - to_obj_id!(obj_id), - to_str(key), - am::ScalarValue::Counter(value.into()), - )) -} - -/// \memberof AMdoc -/// \brief Puts a Lamport timestamp as the value of a key in a map object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. -/// \param[in] value A 64-bit signed integer. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \pre \p key must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfreeResult()`. -/// \internal -/// -/// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL -/// key must be a c string of the map key to be used -#[no_mangle] -pub unsafe extern "C" fn AMmapPutTimestamp( - doc: *mut AMdoc, - obj_id: *const AMobjId, - key: *const c_char, - value: i64, -) -> *mut AMresult { - let doc = to_doc!(doc); - to_result(doc.put( - to_obj_id!(obj_id), - to_str(key), - am::ScalarValue::Timestamp(value), - )) -} - -/// \memberof AMdoc -/// \brief Puts null as the value of a key in a map object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \pre \p key must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfreeResult()`. -/// \internal -/// -/// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL -/// key must be a c string of the map key to be used -#[no_mangle] -pub unsafe extern "C" fn AMmapPutNull( - doc: *mut AMdoc, - obj_id: *const AMobjId, - key: *const c_char, -) -> *mut AMresult { - let doc = to_doc!(doc); - to_result(doc.put(to_obj_id!(obj_id), to_str(key), ())) -} - -/// \memberof AMdoc -/// \brief Puts an empty object as the value of a key in a map object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. -/// \param[in] obj_type An `AMobjIdType` enum tag. -/// \return A pointer to an `AMresult` struct containing a pointer to an `AMobjId` struct. -/// \pre \p doc must be a valid address. -/// \pre \p key must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfreeResult()`. -/// \internal -/// -/// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL -/// key must be a c string of the map key to be used -#[no_mangle] -pub unsafe extern "C" fn AMmapPutObject( - doc: *mut AMdoc, - obj_id: *const AMobjId, - key: *const c_char, - obj_type: AMobjType, -) -> *mut AMresult { - let doc = to_doc!(doc); - to_result(doc.put_object(to_obj_id!(obj_id), to_str(key), obj_type.into())) -} - -/// \memberof AMdoc -/// \brief Gets the value at an index in a list object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \param[in] index An index within the list object identified by \p obj_id. -/// \return A pointer to an `AMresult` struct. -/// \pre \p doc must be a valid address. -/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfreeResult()`. -/// \internal -/// -/// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL -#[no_mangle] -pub unsafe extern "C" fn AMlistGet( - doc: *mut AMdoc, - obj_id: *const AMobjId, - index: usize, -) -> *mut AMresult { - let doc = to_doc!(doc); - to_result(doc.get(to_obj_id!(obj_id), index)) -} - -/// \memberof AMdoc -/// \brief Gets the value for a key in a map object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. -/// \return A pointer to an `AMresult` struct. -/// \pre \p doc must be a valid address. -/// \pre \p key must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfreeResult()`. -/// \internal -/// -/// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL -/// key must be a c string of the map key to be used -#[no_mangle] -pub unsafe extern "C" fn AMmapGet( - doc: *mut AMdoc, - obj_id: *const AMobjId, - key: *const c_char, -) -> *mut AMresult { - let doc = to_doc!(doc); - to_result(doc.get(to_obj_id!(obj_id), to_str(key))) -} - -/// \memberof AMdoc -/// \brief Deletes an index in a list object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \param[in] index An index in the list object identified by \p obj_id. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfreeResult()`. -/// \internal -/// -/// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL -#[no_mangle] -pub unsafe extern "C" fn AMlistDelete( - doc: *mut AMdoc, - obj_id: *const AMobjId, - index: usize, -) -> *mut AMresult { - let doc = to_doc!(doc); - to_result(doc.delete(to_obj_id!(obj_id), index)) -} - -/// \memberof AMdoc -/// \brief Puts a boolean as the value at an index in a list object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \param[in] index An index in the list object identified by \p obj_id. -/// \param[in] insert A flag to insert \p value before \p index instead of -/// writing \p value over \p index. -/// \param[in] value A boolean. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfreeResult()`. -/// \internal -/// -/// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL -#[no_mangle] -pub unsafe extern "C" fn AMlistPutBool( - doc: *mut AMdoc, - obj_id: *const AMobjId, - index: usize, - insert: bool, - value: bool, -) -> *mut AMresult { - let doc = to_doc!(doc); - let obj_id = to_obj_id!(obj_id); - let value = am::ScalarValue::Boolean(value); - to_result(if insert { - doc.insert(obj_id, index, value) - } else { - doc.put(obj_id, index, value) - }) -} - -/// \memberof AMdoc -/// \brief Puts an array of bytes as the value at an index in a list object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \param[in] index An index in the list object identified by \p obj_id. -/// \param[in] insert A flag to insert \p value before \p index instead of -/// writing \p value over \p index. -/// \param[in] value A pointer to an array of bytes. -/// \param[in] count The number of bytes to copy from \p value. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. -/// \pre \p value must be a valid address. -/// \pre `0 <=` \p count `<=` length of \p value. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfreeResult()`. -/// \internal -/// -/// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL -/// value must be a byte array of length `>= count` -#[no_mangle] -pub unsafe extern "C" fn AMlistPutBytes( - doc: *mut AMdoc, - obj_id: *const AMobjId, - index: usize, - insert: bool, - value: *const u8, - count: usize, -) -> *mut AMresult { - let doc = to_doc!(doc); - let obj_id = to_obj_id!(obj_id); - let mut vec = Vec::new(); - vec.extend_from_slice(std::slice::from_raw_parts(value, count)); - to_result(if insert { - doc.insert(obj_id, index, vec) - } else { - doc.put(obj_id, index, vec) - }) -} - -/// \memberof AMdoc -/// \brief Puts a CRDT counter as the value at an index in a list object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \param[in] index An index in the list object identified by \p obj_id. -/// \param[in] insert A flag to insert \p value before \p index instead of -/// writing \p value over \p index. -/// \param[in] value A 64-bit signed integer. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfreeResult()`. -/// \internal -/// -/// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL -#[no_mangle] -pub unsafe extern "C" fn AMlistPutCounter( - doc: *mut AMdoc, - obj_id: *const AMobjId, - index: usize, - insert: bool, - value: i64, -) -> *mut AMresult { - let doc = to_doc!(doc); - let obj_id = to_obj_id!(obj_id); - let value = am::ScalarValue::Counter(value.into()); - to_result(if insert { - doc.insert(obj_id, index, value) - } else { - doc.put(obj_id, index, value) - }) -} - -/// \memberof AMdoc -/// \brief Puts a float as the value at an index in a list object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \param[in] index An index in the list object identified by \p obj_id. -/// \param[in] insert A flag to insert \p value before \p index instead of -/// writing \p value over \p index. -/// \param[in] value A 64-bit float. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfreeResult()`. -/// \internal -/// -/// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL -#[no_mangle] -pub unsafe extern "C" fn AMlistPutF64( - doc: *mut AMdoc, - obj_id: *const AMobjId, - index: usize, - insert: bool, - value: f64, -) -> *mut AMresult { - let doc = to_doc!(doc); - let obj_id = to_obj_id!(obj_id); - to_result(if insert { - doc.insert(obj_id, index, value) - } else { - doc.put(obj_id, index, value) - }) -} - -/// \memberof AMdoc -/// \brief Puts a signed integer as the value at an index in a list object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \param[in] index An index in the list object identified by \p obj_id. -/// \param[in] insert A flag to insert \p value before \p index instead of -/// writing \p value over \p index. -/// \param[in] value A 64-bit signed integer. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfreeResult()`. -/// \internal -/// -/// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL -#[no_mangle] -pub unsafe extern "C" fn AMlistPutInt( - doc: *mut AMdoc, - obj_id: *const AMobjId, - index: usize, - insert: bool, - value: i64, -) -> *mut AMresult { - let doc = to_doc!(doc); - let obj_id = to_obj_id!(obj_id); - to_result(if insert { - doc.insert(obj_id, index, value) - } else { - doc.put(obj_id, index, value) - }) -} - -/// \memberof AMdoc -/// \brief Puts null as the value at an index in a list object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \param[in] index An index in the list object identified by \p obj_id. -/// \param[in] insert A flag to insert \p value before \p index instead of -/// writing \p value over \p index. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfreeResult()`. -/// \internal -/// -/// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL -#[no_mangle] -pub unsafe extern "C" fn AMlistPutNull( - doc: *mut AMdoc, - obj_id: *const AMobjId, - index: usize, - insert: bool, -) -> *mut AMresult { - let doc = to_doc!(doc); - let obj_id = to_obj_id!(obj_id); - let value = (); - to_result(if insert { - doc.insert(obj_id, index, value) - } else { - doc.put(obj_id, index, value) - }) -} - -/// \memberof AMdoc -/// \brief Puts an empty object as the value at an index in a list object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \param[in] index An index in the list object identified by \p obj_id. -/// \param[in] insert A flag to insert \p value before \p index instead of -/// writing \p value over \p index. -/// \param[in] obj_type An `AMobjIdType` enum tag. -/// \return A pointer to an `AMresult` struct containing a pointer to an `AMobjId` struct. -/// \pre \p doc must be a valid address. -/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfreeResult()`. -/// \internal -/// -/// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL -#[no_mangle] -pub unsafe extern "C" fn AMlistPutObject( - doc: *mut AMdoc, - obj_id: *const AMobjId, - index: usize, - insert: bool, - obj_type: AMobjType, -) -> *mut AMresult { - let doc = to_doc!(doc); - let obj_id = to_obj_id!(obj_id); - let value = obj_type.into(); - to_result(if insert { - doc.insert_object(obj_id, index, value) - } else { - doc.put_object(&obj_id, index, value) - }) -} - -/// \memberof AMdoc -/// \brief Puts a UTF-8 string as the value at an index in a list object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \param[in] index An index in the list object identified by \p obj_id. -/// \param[in] insert A flag to insert \p value before \p index instead of -/// writing \p value over \p index. -/// \param[in] value A UTF-8 string. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. -/// \pre \p value must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfreeResult()`. -/// \internal -/// -/// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL -/// value must be a null-terminated array of `c_char` -#[no_mangle] -pub unsafe extern "C" fn AMlistPutStr( - doc: *mut AMdoc, - obj_id: *const AMobjId, - index: usize, - insert: bool, - value: *const c_char, -) -> *mut AMresult { - let doc = to_doc!(doc); - let obj_id = to_obj_id!(obj_id); - let value = to_str(value); - to_result(if insert { - doc.insert(obj_id, index, value) - } else { - doc.put(obj_id, index, value) - }) -} - -/// \memberof AMdoc -/// \brief Puts a Lamport timestamp as the value at an index in a list object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \param[in] index An index in the list object identified by \p obj_id. -/// \param[in] insert A flag to insert \p value before \p index instead of -/// writing \p value over \p index. -/// \param[in] value A 64-bit signed integer. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfreeResult()`. -/// \internal -/// -/// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL -#[no_mangle] -pub unsafe extern "C" fn AMlistPutTimestamp( - doc: *mut AMdoc, - obj_id: *const AMobjId, - index: usize, - insert: bool, - value: i64, -) -> *mut AMresult { - let doc = to_doc!(doc); - let obj_id = to_obj_id!(obj_id); - let value = am::ScalarValue::Timestamp(value); - to_result(if insert { - doc.insert(obj_id, index, value) - } else { - doc.put(obj_id, index, value) - }) -} - -/// \memberof AMdoc -/// \brief Puts an unsigned integer as the value at an index in a list object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \param[in] index An index in the list object identified by \p obj_id. -/// \param[in] insert A flag to insert \p value before \p index instead of -/// writing \p value over \p index. -/// \param[in] value A 64-bit unsigned integer. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfreeResult()`. -/// \internal -/// -/// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL -#[no_mangle] -pub unsafe extern "C" fn AMlistPutUint( - doc: *mut AMdoc, - obj_id: *const AMobjId, - index: usize, - insert: bool, - value: u64, -) -> *mut AMresult { - let doc = to_doc!(doc); - let obj_id = to_obj_id!(obj_id); - to_result(if insert { - doc.insert(obj_id, index, value) - } else { - doc.put(obj_id, index, value) - }) -} - -/// \memberof AMresult -/// \brief Deallocates the storage for an `AMresult` struct. -/// -/// \param[in] result A pointer to an `AMresult` struct. -/// \pre \p result must be a valid address. -/// \internal -/// -/// # Safety -/// result must be a pointer to a valid AMresult -#[no_mangle] -pub unsafe extern "C" fn AMfreeResult(result: *mut AMresult) { - if !result.is_null() { - let result: AMresult = *Box::from_raw(result); - drop(result) - } -} - -/// \memberof AMsyncState -/// \brief Deallocates the storage for an `AMsyncState` struct previously -/// allocated by `AMinitSyncState()`. -/// -/// \param[in] sync_state A pointer to an `AMsyncState` struct. -/// \pre \p sync_state must be a valid address. -/// \internal -/// -/// # Safety -/// sync_state must be a pointer to a valid AMsyncState -#[no_mangle] -pub unsafe extern "C" fn AMfreeSyncState(sync_state: *mut AMsyncState) { - if !sync_state.is_null() { - let sync_state: AMsyncState = *Box::from_raw(sync_state); - drop(sync_state) - } -} - -/// \memberof AMresult -/// \brief Gets an `AMresult` struct's error message string. -/// -/// \param[in] result A pointer to an `AMresult` struct. -/// \return A UTF-8 string value or `NULL`. -/// \pre \p result must be a valid address. -/// \internal -/// -/// # Safety -/// result must be a pointer to a valid AMresult -#[no_mangle] -pub unsafe extern "C" fn AMerrorMessage(result: *mut AMresult) -> *const c_char { - match result.as_mut() { - Some(AMresult::Error(s)) => s.as_ptr(), - _ => std::ptr::null::(), - } -} - -/// \memberof AMdoc -/// \brief Gets the size of an object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \return The count of values in the object identified by \p obj_id. -/// \pre \p doc must be a valid address. -/// \internal -/// -/// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL -#[no_mangle] -pub unsafe extern "C" fn AMobjSize(doc: *const AMdoc, obj_id: *const AMobjId) -> usize { - if let Some(doc) = doc.as_ref() { - doc.length(to_obj_id!(obj_id)) - } else { - 0 - } -} - -/// \memberof AMdoc -/// \brief Gets the historical size of an object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \param[in] change A pointer to an `AMchange` struct or `NULL`. -/// \return The count of values in the object identified by \p obj_id at -/// \p change. -/// \pre \p doc must be a valid address. -/// \internal -/// -/// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL -/// change must be a pointer to a valid AMchange or NULL -#[no_mangle] -pub unsafe extern "C" fn AMobjSizeAt( - doc: *const AMdoc, - obj_id: *const AMobjId, - change: *const AMchange, -) -> usize { - if let Some(doc) = doc.as_ref() { - if let Some(change) = change.as_ref() { - let change: &am::Change = change.as_ref(); - let change_hashes = vec![change.hash]; - return doc.length_at(to_obj_id!(obj_id), &change_hashes); - } - }; - 0 -} - -/// \memberof AMchange -/// \brief Gets the change hash within an `AMchange` struct. -/// -/// \param[in] change A pointer to an `AMchange` struct. -/// \return A change hash as an `AMbyteSpan` struct. -/// \pre \p change must be a valid address. -/// \internal -/// -/// # Safety -/// change must be a pointer to a valid AMchange -#[no_mangle] -pub unsafe extern "C" fn AMgetChangeHash(change: *const AMchange) -> AMbyteSpan { - match change.as_ref() { - Some(change) => change.into(), - None => AMbyteSpan::default(), - } -} - -/// \memberof AMdoc -/// \brief Gets the changes added to \p doc by their respective hashes. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] have_deps A pointer to an `AMchangeHashes` struct or `NULL`. -/// \return A pointer to an `AMresult` struct containing an `AMchanges` struct. -/// \pre \p doc must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfreeResult()`. -/// \internal -/// -/// # Safety -/// doc must be a pointer to a valid AMdoc -#[no_mangle] -pub unsafe extern "C" fn AMgetChanges( - doc: *mut AMdoc, - have_deps: *const AMchangeHashes, -) -> *mut AMresult { - let doc = to_doc!(doc); - let empty_deps = Vec::::new(); - let have_deps = match have_deps.as_ref() { - Some(have_deps) => have_deps.as_ref(), - None => &empty_deps, - }; - to_result(doc.get_changes(have_deps)) -} - -/// \memberof AMchange -/// \brief Gets the message within an `AMchange` struct. -/// -/// \param[in] change A pointer to an `AMchange` struct. -/// \return A UTF-8 string or `NULL`. -/// \pre \p change must be a valid address. -/// \internal -/// -/// # Safety -/// change must be a pointer to a valid AMchange -#[no_mangle] -pub unsafe extern "C" fn AMgetMessage(change: *const AMchange) -> *const c_char { - if let Some(change) = change.as_ref() { - if let Some(c_message) = change.c_message() { - return c_message.as_ptr(); - } - } - std::ptr::null::() -} From bb0b023c9af9feab6f7f1c42218a8790d12a4130 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 30 May 2022 22:37:22 -0700 Subject: [PATCH 413/730] Moved `AMobjId` into its own source file. --- automerge-c/src/obj.rs | 49 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 49 insertions(+) create mode 100644 automerge-c/src/obj.rs diff --git a/automerge-c/src/obj.rs b/automerge-c/src/obj.rs new file mode 100644 index 00000000..f038a8e7 --- /dev/null +++ b/automerge-c/src/obj.rs @@ -0,0 +1,49 @@ +use automerge as am; +use std::ops::Deref; + +/// \struct AMobjId +/// \brief An object's unique identifier. +pub struct AMobjId(am::ObjId); + +impl AMobjId { + pub fn new(obj_id: am::ObjId) -> Self { + Self(obj_id) + } +} + +impl AsRef for AMobjId { + fn as_ref(&self) -> &am::ObjId { + &self.0 + } +} + +impl Deref for AMobjId { + type Target = am::ObjId; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +/// \ingroup enumerations +/// \enum AMobjType +/// \brief The type of an object value. +#[repr(u8)] +pub enum AMobjType { + /// A list. + List = 1, + /// A key-value map. + Map, + /// A list of Unicode graphemes. + Text, +} + +impl From for am::ObjType { + fn from(o: AMobjType) -> Self { + match o { + AMobjType::Map => am::ObjType::Map, + AMobjType::List => am::ObjType::List, + AMobjType::Text => am::ObjType::Text, + } + } +} From 546b6ccbbdd3c622602080e8a1d497e2fe1686c7 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 30 May 2022 22:49:23 -0700 Subject: [PATCH 414/730] Moved `AMobjId` into its own source file. Added the `AMvalue::SyncState` variant. Enabled `AMchange` structs to be lazily created. Added the `AMresult::SyncState` variant. Added an `Option<&automerge::Change>` conversion for `AMresult`. Added a `Result` conversion for `AMresult`. Added a `Result` conversion for `AMresult`. Added a `Result` conversion for `AMresult`. Moved `AMerrorMessage()` and `AMresult*()` into the source file for `AMresult`. --- automerge-c/src/result.rs | 323 ++++++++++++++++++++++++++++++++------ 1 file changed, 276 insertions(+), 47 deletions(-) diff --git a/automerge-c/src/result.rs b/automerge-c/src/result.rs index 8948b577..75136566 100644 --- a/automerge-c/src/result.rs +++ b/automerge-c/src/result.rs @@ -1,35 +1,14 @@ use automerge as am; +use std::collections::BTreeMap; use std::ffi::CString; -use std::ops::Deref; +use std::os::raw::c_char; -use crate::AMbyteSpan; -use crate::AMchangeHashes; -use crate::AMsyncMessage; -use crate::{AMchange, AMchanges}; - -/// \struct AMobjId -/// \brief An object's unique identifier. -pub struct AMobjId(am::ObjId); - -impl AMobjId { - pub fn new(obj_id: am::ObjId) -> Self { - Self(obj_id) - } -} - -impl AsRef for AMobjId { - fn as_ref(&self) -> &am::ObjId { - &self.0 - } -} - -impl Deref for AMobjId { - type Target = am::ObjId; - - fn deref(&self) -> &Self::Target { - &self.0 - } -} +use crate::byte_span::AMbyteSpan; +use crate::change::AMchange; +use crate::change_hashes::AMchangeHashes; +use crate::changes::AMchanges; +use crate::obj::AMobjId; +use crate::sync::{AMsyncMessage, AMsyncState}; /// \struct AMvalue /// \brief A discriminated union of value type variants for an `AMresult` struct. @@ -47,7 +26,7 @@ impl Deref for AMobjId { /// A boolean. /// /// \var AMvalue::bytes -/// An array of bytes as an `AMbyteSpan` struct. +/// A sequence of bytes as an `AMbyteSpan` struct. /// /// \var AMvalue::change_hashes /// A sequence of change hashes as an `AMchangeHashes` struct. @@ -81,7 +60,7 @@ pub enum AMvalue<'a> { ActorId(AMbyteSpan), /// A boolean variant. Boolean(bool), - /// An array of bytes variant. + /// A byte array variant. Bytes(AMbyteSpan), /// A change hashes variant. ChangeHashes(AMchangeHashes), @@ -113,6 +92,8 @@ pub enum AMvalue<'a> { Uint(u64), /// A synchronization message variant. SyncMessage(&'a AMsyncMessage), + /// A synchronization state variant. + SyncState(&'a mut AMsyncState), /// A void variant. Void, } @@ -122,11 +103,12 @@ pub enum AMvalue<'a> { pub enum AMresult { ActorId(am::ActorId), ChangeHashes(Vec), - Changes(Vec), + Changes(Vec, BTreeMap), Error(CString), ObjId(AMobjId), Scalars(Vec>, Option), SyncMessage(AMsyncMessage), + SyncState(AMsyncState), Void, } @@ -142,6 +124,15 @@ impl From for AMresult { } } +impl From> for AMresult { + fn from(maybe: Option<&am::Change>) -> Self { + match maybe { + Some(change) => AMresult::Changes(vec![change.clone()], BTreeMap::new()), + None => AMresult::Void, + } + } +} + impl From> for AMresult { fn from(maybe: Option) -> Self { match maybe { @@ -168,6 +159,15 @@ impl From> for AMresult { } } +impl From> for AMresult { + fn from(maybe: Result) -> Self { + match maybe { + Ok(change) => AMresult::Changes(vec![change], BTreeMap::new()), + Err(e) => AMresult::err(&e.to_string()), + } + } +} + impl From> for AMresult { fn from(maybe: Result) -> Self { match maybe { @@ -177,6 +177,33 @@ impl From> for AMresult { } } +impl From> for AMresult { + fn from(maybe: Result) -> Self { + match maybe { + Ok(message) => AMresult::SyncMessage(AMsyncMessage::new(message)), + Err(e) => AMresult::err(&e.to_string()), + } + } +} + +impl From> for AMresult { + fn from(maybe: Result) -> Self { + match maybe { + Ok(state) => AMresult::SyncState(AMsyncState::new(state)), + Err(e) => AMresult::err(&e.to_string()), + } + } +} + +impl From, am::AutomergeError>> for AMresult { + fn from(maybe: Result, am::AutomergeError>) -> Self { + match maybe { + Ok(value) => AMresult::Scalars(vec![value], None), + Err(e) => AMresult::err(&e.to_string()), + } + } +} + impl From, am::ObjId)>, am::AutomergeError>> for AMresult { fn from(maybe: Result, am::ObjId)>, am::AutomergeError>) -> Self { match maybe { @@ -188,15 +215,6 @@ impl From, am::ObjId)>, am::AutomergeError>> f } } -impl From, am::AutomergeError>> for AMresult { - fn from(maybe: Result, am::AutomergeError>) -> Self { - match maybe { - Ok(value) => AMresult::Scalars(vec![value], None), - Err(e) => AMresult::err(&e.to_string()), - } - } -} - impl From> for AMresult { fn from(maybe: Result) -> Self { match maybe { @@ -206,15 +224,23 @@ impl From> for AMresult { } } +impl From, am::AutomergeError>> for AMresult { + fn from(maybe: Result, am::AutomergeError>) -> Self { + match maybe { + Ok(changes) => AMresult::Changes(changes, BTreeMap::new()), + Err(e) => AMresult::err(&e.to_string()), + } + } +} + impl From, am::AutomergeError>> for AMresult { fn from(maybe: Result, am::AutomergeError>) -> Self { match maybe { - Ok(changes) => AMresult::Changes( - changes - .iter() - .map(|&change| AMchange::new(change.clone())) - .collect(), - ), + Ok(changes) => { + let changes: Vec = + changes.iter().map(|&change| change.clone()).collect(); + AMresult::Changes(changes, BTreeMap::new()) + } Err(e) => AMresult::err(&e.to_string()), } } @@ -238,8 +264,211 @@ impl From, am::AutomergeError>> for AMresult { } } +impl From> for AMresult { + fn from(bytes: Vec) -> Self { + AMresult::Scalars(vec![am::Value::bytes(bytes)], None) + } +} + +impl From> for AMresult { + fn from(change_hashes: Vec) -> Self { + AMresult::ChangeHashes(change_hashes) + } +} + impl From for *mut AMresult { fn from(b: AMresult) -> Self { Box::into_raw(Box::new(b)) } } + +pub fn to_result>(r: R) -> *mut AMresult { + (r.into()).into() +} + +/// \ingroup enumerations +/// \enum AMstatus +/// \brief The status of an API call. +#[derive(Debug)] +#[repr(u8)] +pub enum AMstatus { + /// Success. + /// \note This tag is unalphabetized so that `0` indicates success. + Ok, + /// Failure due to an error. + Error, + /// Failure due to an invalid result. + InvalidResult, +} + +/// \memberof AMresult +/// \brief Gets an `AMresult` struct's error message string. +/// +/// \param[in] result A pointer to an `AMresult` struct. +/// \return A UTF-8 string value or `NULL`. +/// \pre \p result must be a valid address. +/// \internal +/// +/// # Safety +/// result must be a pointer to a valid AMresult +#[no_mangle] +pub unsafe extern "C" fn AMerrorMessage(result: *mut AMresult) -> *const c_char { + match result.as_mut() { + Some(AMresult::Error(s)) => s.as_ptr(), + _ => std::ptr::null::(), + } +} + +/// \memberof AMresult +/// \brief Deallocates the storage for an `AMresult` struct. +/// +/// \param[in] result A pointer to an `AMresult` struct. +/// \pre \p result must be a valid address. +/// \internal +/// +/// # Safety +/// result must be a pointer to a valid AMresult +#[no_mangle] +pub unsafe extern "C" fn AMresultFree(result: *mut AMresult) { + if !result.is_null() { + let result: AMresult = *Box::from_raw(result); + drop(result) + } +} + +/// \memberof AMresult +/// \brief Gets the size of an `AMresult` struct. +/// +/// \param[in] result A pointer to an `AMresult` struct. +/// \return The count of values in \p result. +/// \pre \p result must be a valid address. +/// \internal +/// +/// # Safety +/// result must be a pointer to a valid AMresult +#[no_mangle] +pub unsafe extern "C" fn AMresultSize(result: *mut AMresult) -> usize { + if let Some(result) = result.as_mut() { + match result { + AMresult::ActorId(_) | AMresult::ObjId(_) => 1, + AMresult::ChangeHashes(change_hashes) => change_hashes.len(), + AMresult::Changes(changes, _) => changes.len(), + AMresult::Error(_) | AMresult::Void => 0, + AMresult::Scalars(vec, _) => vec.len(), + AMresult::SyncMessage(_) => 1, + AMresult::SyncState(_) => 1, + } + } else { + 0 + } +} + +/// \memberof AMresult +/// \brief Gets the status code of an `AMresult` struct. +/// +/// \param[in] result A pointer to an `AMresult` struct. +/// \return An `AMstatus` enum tag. +/// \pre \p result must be a valid address. +/// \internal +/// +/// # Safety +/// result must be a pointer to a valid AMresult +#[no_mangle] +pub unsafe extern "C" fn AMresultStatus(result: *mut AMresult) -> AMstatus { + match result.as_mut() { + Some(AMresult::Error(_)) => AMstatus::Error, + None => AMstatus::InvalidResult, + _ => AMstatus::Ok, + } +} + +/// \memberof AMresult +/// \brief Gets a value from an `AMresult` struct. +/// +/// \param[in] result A pointer to an `AMresult` struct. +/// \param[in] index The index of a value. +/// \return An `AMvalue` struct. +/// \pre \p result must be a valid address. +/// \pre `0 <=` \p index `<=` AMresultSize() for \p result. +/// \internal +/// +/// # Safety +/// result must be a pointer to a valid AMresult +#[no_mangle] +pub unsafe extern "C" fn AMresultValue<'a>(result: *mut AMresult, index: usize) -> AMvalue<'a> { + let mut value = AMvalue::Void; + if let Some(result) = result.as_mut() { + match result { + AMresult::ActorId(actor_id) => { + if index == 0 { + value = AMvalue::ActorId(actor_id.into()); + } + } + AMresult::ChangeHashes(change_hashes) => { + value = AMvalue::ChangeHashes(AMchangeHashes::new(change_hashes)); + } + AMresult::Changes(changes, storage) => { + value = AMvalue::Changes(AMchanges::new(changes, storage)); + } + AMresult::Error(_) => {} + AMresult::ObjId(obj_id) => { + if index == 0 { + value = AMvalue::ObjId(obj_id); + } + } + AMresult::Scalars(vec, hosted_str) => { + if let Some(element) = vec.get(index) { + match element { + am::Value::Scalar(scalar) => match scalar.as_ref() { + am::ScalarValue::Boolean(flag) => { + value = AMvalue::Boolean(*flag); + } + am::ScalarValue::Bytes(bytes) => { + value = AMvalue::Bytes(bytes.as_slice().into()); + } + am::ScalarValue::Counter(counter) => { + value = AMvalue::Counter(counter.into()); + } + am::ScalarValue::F64(float) => { + value = AMvalue::F64(*float); + } + am::ScalarValue::Int(int) => { + value = AMvalue::Int(*int); + } + am::ScalarValue::Null => { + value = AMvalue::Null; + } + am::ScalarValue::Str(smol_str) => { + *hosted_str = CString::new(smol_str.to_string()).ok(); + if let Some(c_str) = hosted_str { + value = AMvalue::Str(c_str.as_ptr()); + } + } + am::ScalarValue::Timestamp(timestamp) => { + value = AMvalue::Timestamp(*timestamp); + } + am::ScalarValue::Uint(uint) => { + value = AMvalue::Uint(*uint); + } + }, + // \todo Confirm that an object value should be ignored + // when there's no object ID variant. + am::Value::Object(_) => {} + } + } + } + AMresult::SyncMessage(sync_message) => { + if index == 0 { + value = AMvalue::SyncMessage(sync_message); + } + } + AMresult::SyncState(sync_state) => { + if index == 0 { + value = AMvalue::SyncState(sync_state); + } + } + AMresult::Void => {} + } + }; + value +} From d9bf29e8fdb0989d7f644d1a636147561f9a0046 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 30 May 2022 22:50:26 -0700 Subject: [PATCH 415/730] Grouped `AMsyncMessage` and `AMsyncState` into separate source files. --- automerge-c/src/sync.rs | 44 ++++++----------------------------------- 1 file changed, 6 insertions(+), 38 deletions(-) diff --git a/automerge-c/src/sync.rs b/automerge-c/src/sync.rs index ad15ce51..cfed1af5 100644 --- a/automerge-c/src/sync.rs +++ b/automerge-c/src/sync.rs @@ -1,39 +1,7 @@ -use automerge as am; +mod have; +mod haves; +mod message; +mod state; -/// \struct AMsyncMessage -/// \brief A synchronization message for a peer. -pub struct AMsyncMessage(am::sync::Message); - -impl AMsyncMessage { - pub fn new(message: am::sync::Message) -> Self { - Self(message) - } -} - -impl AsRef for AMsyncMessage { - fn as_ref(&self) -> &am::sync::Message { - &self.0 - } -} - -/// \struct AMsyncState -/// \brief The state of synchronization with a peer. -pub struct AMsyncState(am::sync::State); - -impl AMsyncState { - pub fn new(state: am::sync::State) -> Self { - Self(state) - } -} - -impl AsMut for AMsyncState { - fn as_mut(&mut self) -> &mut am::sync::State { - &mut self.0 - } -} - -impl From for *mut AMsyncState { - fn from(b: AMsyncState) -> Self { - Box::into_raw(Box::new(b)) - } -} +pub(crate) use message::{to_sync_message, AMsyncMessage}; +pub(crate) use state::AMsyncState; From a9912d4b9f5fc474a810d6bdb05d57f32a062360 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 30 May 2022 22:51:41 -0700 Subject: [PATCH 416/730] Grouped the `AMlist*()` functions into their own source file. --- automerge-c/src/doc/list.rs | 431 ++++++++++++++++++++++++++++++++++++ 1 file changed, 431 insertions(+) create mode 100644 automerge-c/src/doc/list.rs diff --git a/automerge-c/src/doc/list.rs b/automerge-c/src/doc/list.rs new file mode 100644 index 00000000..8f1718c4 --- /dev/null +++ b/automerge-c/src/doc/list.rs @@ -0,0 +1,431 @@ +use automerge as am; +use automerge::transaction::Transactable; +use std::os::raw::c_char; + +use crate::doc::{to_doc, to_obj_id, to_str, AMdoc}; +use crate::obj::{AMobjId, AMobjType}; +use crate::result::{to_result, AMresult}; + +/// \memberof AMdoc +/// \brief Deletes an index in a list object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] index An index in the list object identified by \p obj_id. +/// \return A pointer to an `AMresult` struct containing a void. +/// \pre \p doc must be a valid address. +/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMresultFree()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj_id must be a pointer to a valid AMobjId or NULL +#[no_mangle] +pub unsafe extern "C" fn AMlistDelete( + doc: *mut AMdoc, + obj_id: *const AMobjId, + index: usize, +) -> *mut AMresult { + let doc = to_doc!(doc); + to_result(doc.delete(to_obj_id!(obj_id), index)) +} + +/// \memberof AMdoc +/// \brief Gets the value at an index in a list object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] index An index within the list object identified by \p obj_id. +/// \return A pointer to an `AMresult` struct. +/// \pre \p doc must be a valid address. +/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMresultFree()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj_id must be a pointer to a valid AMobjId or NULL +#[no_mangle] +pub unsafe extern "C" fn AMlistGet( + doc: *mut AMdoc, + obj_id: *const AMobjId, + index: usize, +) -> *mut AMresult { + let doc = to_doc!(doc); + to_result(doc.get(to_obj_id!(obj_id), index)) +} + +/// \memberof AMdoc +/// \brief Puts a boolean as the value at an index in a list object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] index An index in the list object identified by \p obj_id. +/// \param[in] insert A flag to insert \p value before \p index instead of +/// writing \p value over \p index. +/// \param[in] value A boolean. +/// \return A pointer to an `AMresult` struct containing a void. +/// \pre \p doc must be a valid address. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMresultFree()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj_id must be a pointer to a valid AMobjId or NULL +#[no_mangle] +pub unsafe extern "C" fn AMlistPutBool( + doc: *mut AMdoc, + obj_id: *const AMobjId, + index: usize, + insert: bool, + value: bool, +) -> *mut AMresult { + let doc = to_doc!(doc); + let obj_id = to_obj_id!(obj_id); + let value = am::ScalarValue::Boolean(value); + to_result(if insert { + doc.insert(obj_id, index, value) + } else { + doc.put(obj_id, index, value) + }) +} + +/// \memberof AMdoc +/// \brief Puts an array of bytes as the value at an index in a list object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] index An index in the list object identified by \p obj_id. +/// \param[in] insert A flag to insert \p value before \p index instead of +/// writing \p value over \p index. +/// \param[in] value A pointer to an array of bytes. +/// \param[in] count The number of bytes to copy from \p value. +/// \return A pointer to an `AMresult` struct containing a void. +/// \pre \p doc must be a valid address. +/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. +/// \pre \p value must be a valid address. +/// \pre `0 <=` \p count `<=` length of \p value. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMresultFree()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj_id must be a pointer to a valid AMobjId or NULL +/// value must be a byte array of length `>= count` +#[no_mangle] +pub unsafe extern "C" fn AMlistPutBytes( + doc: *mut AMdoc, + obj_id: *const AMobjId, + index: usize, + insert: bool, + value: *const u8, + count: usize, +) -> *mut AMresult { + let doc = to_doc!(doc); + let obj_id = to_obj_id!(obj_id); + let mut vec = Vec::new(); + vec.extend_from_slice(std::slice::from_raw_parts(value, count)); + to_result(if insert { + doc.insert(obj_id, index, vec) + } else { + doc.put(obj_id, index, vec) + }) +} + +/// \memberof AMdoc +/// \brief Puts a CRDT counter as the value at an index in a list object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] index An index in the list object identified by \p obj_id. +/// \param[in] insert A flag to insert \p value before \p index instead of +/// writing \p value over \p index. +/// \param[in] value A 64-bit signed integer. +/// \return A pointer to an `AMresult` struct containing a void. +/// \pre \p doc must be a valid address. +/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMresultFree()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj_id must be a pointer to a valid AMobjId or NULL +#[no_mangle] +pub unsafe extern "C" fn AMlistPutCounter( + doc: *mut AMdoc, + obj_id: *const AMobjId, + index: usize, + insert: bool, + value: i64, +) -> *mut AMresult { + let doc = to_doc!(doc); + let obj_id = to_obj_id!(obj_id); + let value = am::ScalarValue::Counter(value.into()); + to_result(if insert { + doc.insert(obj_id, index, value) + } else { + doc.put(obj_id, index, value) + }) +} + +/// \memberof AMdoc +/// \brief Puts a float as the value at an index in a list object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] index An index in the list object identified by \p obj_id. +/// \param[in] insert A flag to insert \p value before \p index instead of +/// writing \p value over \p index. +/// \param[in] value A 64-bit float. +/// \return A pointer to an `AMresult` struct containing a void. +/// \pre \p doc must be a valid address. +/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMresultFree()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj_id must be a pointer to a valid AMobjId or NULL +#[no_mangle] +pub unsafe extern "C" fn AMlistPutF64( + doc: *mut AMdoc, + obj_id: *const AMobjId, + index: usize, + insert: bool, + value: f64, +) -> *mut AMresult { + let doc = to_doc!(doc); + let obj_id = to_obj_id!(obj_id); + to_result(if insert { + doc.insert(obj_id, index, value) + } else { + doc.put(obj_id, index, value) + }) +} + +/// \memberof AMdoc +/// \brief Puts a signed integer as the value at an index in a list object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] index An index in the list object identified by \p obj_id. +/// \param[in] insert A flag to insert \p value before \p index instead of +/// writing \p value over \p index. +/// \param[in] value A 64-bit signed integer. +/// \return A pointer to an `AMresult` struct containing a void. +/// \pre \p doc must be a valid address. +/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMresultFree()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj_id must be a pointer to a valid AMobjId or NULL +#[no_mangle] +pub unsafe extern "C" fn AMlistPutInt( + doc: *mut AMdoc, + obj_id: *const AMobjId, + index: usize, + insert: bool, + value: i64, +) -> *mut AMresult { + let doc = to_doc!(doc); + let obj_id = to_obj_id!(obj_id); + to_result(if insert { + doc.insert(obj_id, index, value) + } else { + doc.put(obj_id, index, value) + }) +} + +/// \memberof AMdoc +/// \brief Puts null as the value at an index in a list object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] index An index in the list object identified by \p obj_id. +/// \param[in] insert A flag to insert \p value before \p index instead of +/// writing \p value over \p index. +/// \return A pointer to an `AMresult` struct containing a void. +/// \pre \p doc must be a valid address. +/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMresultFree()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj_id must be a pointer to a valid AMobjId or NULL +#[no_mangle] +pub unsafe extern "C" fn AMlistPutNull( + doc: *mut AMdoc, + obj_id: *const AMobjId, + index: usize, + insert: bool, +) -> *mut AMresult { + let doc = to_doc!(doc); + let obj_id = to_obj_id!(obj_id); + let value = (); + to_result(if insert { + doc.insert(obj_id, index, value) + } else { + doc.put(obj_id, index, value) + }) +} + +/// \memberof AMdoc +/// \brief Puts an empty object as the value at an index in a list object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] index An index in the list object identified by \p obj_id. +/// \param[in] insert A flag to insert \p value before \p index instead of +/// writing \p value over \p index. +/// \param[in] obj_type An `AMobjIdType` enum tag. +/// \return A pointer to an `AMresult` struct containing a pointer to an `AMobjId` struct. +/// \pre \p doc must be a valid address. +/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMresultFree()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj_id must be a pointer to a valid AMobjId or NULL +#[no_mangle] +pub unsafe extern "C" fn AMlistPutObject( + doc: *mut AMdoc, + obj_id: *const AMobjId, + index: usize, + insert: bool, + obj_type: AMobjType, +) -> *mut AMresult { + let doc = to_doc!(doc); + let obj_id = to_obj_id!(obj_id); + let value = obj_type.into(); + to_result(if insert { + doc.insert_object(obj_id, index, value) + } else { + doc.put_object(&obj_id, index, value) + }) +} + +/// \memberof AMdoc +/// \brief Puts a UTF-8 string as the value at an index in a list object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] index An index in the list object identified by \p obj_id. +/// \param[in] insert A flag to insert \p value before \p index instead of +/// writing \p value over \p index. +/// \param[in] value A UTF-8 string. +/// \return A pointer to an `AMresult` struct containing a void. +/// \pre \p doc must be a valid address. +/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. +/// \pre \p value must be a valid address. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMresultFree()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj_id must be a pointer to a valid AMobjId or NULL +/// value must be a null-terminated array of `c_char` +#[no_mangle] +pub unsafe extern "C" fn AMlistPutStr( + doc: *mut AMdoc, + obj_id: *const AMobjId, + index: usize, + insert: bool, + value: *const c_char, +) -> *mut AMresult { + let doc = to_doc!(doc); + let obj_id = to_obj_id!(obj_id); + let value = to_str(value); + to_result(if insert { + doc.insert(obj_id, index, value) + } else { + doc.put(obj_id, index, value) + }) +} + +/// \memberof AMdoc +/// \brief Puts a Lamport timestamp as the value at an index in a list object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] index An index in the list object identified by \p obj_id. +/// \param[in] insert A flag to insert \p value before \p index instead of +/// writing \p value over \p index. +/// \param[in] value A 64-bit signed integer. +/// \return A pointer to an `AMresult` struct containing a void. +/// \pre \p doc must be a valid address. +/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMresultFree()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj_id must be a pointer to a valid AMobjId or NULL +#[no_mangle] +pub unsafe extern "C" fn AMlistPutTimestamp( + doc: *mut AMdoc, + obj_id: *const AMobjId, + index: usize, + insert: bool, + value: i64, +) -> *mut AMresult { + let doc = to_doc!(doc); + let obj_id = to_obj_id!(obj_id); + let value = am::ScalarValue::Timestamp(value); + to_result(if insert { + doc.insert(obj_id, index, value) + } else { + doc.put(obj_id, index, value) + }) +} + +/// \memberof AMdoc +/// \brief Puts an unsigned integer as the value at an index in a list object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] index An index in the list object identified by \p obj_id. +/// \param[in] insert A flag to insert \p value before \p index instead of +/// writing \p value over \p index. +/// \param[in] value A 64-bit unsigned integer. +/// \return A pointer to an `AMresult` struct containing a void. +/// \pre \p doc must be a valid address. +/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMresultFree()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj_id must be a pointer to a valid AMobjId or NULL +#[no_mangle] +pub unsafe extern "C" fn AMlistPutUint( + doc: *mut AMdoc, + obj_id: *const AMobjId, + index: usize, + insert: bool, + value: u64, +) -> *mut AMresult { + let doc = to_doc!(doc); + let obj_id = to_obj_id!(obj_id); + to_result(if insert { + doc.insert(obj_id, index, value) + } else { + doc.put(obj_id, index, value) + }) +} From 18ee9b71e01193d41313768910ae5a92c1509e84 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 30 May 2022 22:52:02 -0700 Subject: [PATCH 417/730] Grouped the `AMmap*()` functions into their own source file. --- automerge-c/src/doc/map.rs | 367 +++++++++++++++++++++++++++++++++++++ 1 file changed, 367 insertions(+) create mode 100644 automerge-c/src/doc/map.rs diff --git a/automerge-c/src/doc/map.rs b/automerge-c/src/doc/map.rs new file mode 100644 index 00000000..1469e11a --- /dev/null +++ b/automerge-c/src/doc/map.rs @@ -0,0 +1,367 @@ +use automerge as am; +use automerge::transaction::Transactable; +use std::os::raw::c_char; + +use crate::doc::utils::to_str; +use crate::doc::{to_doc, to_obj_id, AMdoc}; +use crate::obj::{AMobjId, AMobjType}; +use crate::result::{to_result, AMresult}; + +/// \memberof AMdoc +/// \brief Deletes a key in a map object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. +/// \return A pointer to an `AMresult` struct containing a void. +/// \pre \p doc must be a valid address. +/// \pre \p key must be a valid address. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMresultFree()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj_id must be a pointer to a valid AMobjId or NULL +/// key must be a c string of the map key to be used +#[no_mangle] +pub unsafe extern "C" fn AMmapDelete( + doc: *mut AMdoc, + obj_id: *const AMobjId, + key: *const c_char, +) -> *mut AMresult { + let doc = to_doc!(doc); + to_result(doc.delete(to_obj_id!(obj_id), to_str(key))) +} + +/// \memberof AMdoc +/// \brief Gets the value for a key in a map object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. +/// \return A pointer to an `AMresult` struct. +/// \pre \p doc must be a valid address. +/// \pre \p key must be a valid address. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMresultFree()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj_id must be a pointer to a valid AMobjId or NULL +/// key must be a c string of the map key to be used +#[no_mangle] +pub unsafe extern "C" fn AMmapGet( + doc: *mut AMdoc, + obj_id: *const AMobjId, + key: *const c_char, +) -> *mut AMresult { + let doc = to_doc!(doc); + to_result(doc.get(to_obj_id!(obj_id), to_str(key))) +} + +/// \memberof AMdoc +/// \brief Puts a boolean as the value of a key in a map object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. +/// \param[in] value A boolean. +/// \return A pointer to an `AMresult` struct containing a void. +/// \pre \p doc must be a valid address. +/// \pre \p key must be a valid address. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMresultFree()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj_id must be a pointer to a valid AMobjId or NULL +/// key must be a c string of the map key to be used +#[no_mangle] +pub unsafe extern "C" fn AMmapPutBool( + doc: *mut AMdoc, + obj_id: *const AMobjId, + key: *const c_char, + value: bool, +) -> *mut AMresult { + let doc = to_doc!(doc); + to_result(doc.put(to_obj_id!(obj_id), to_str(key), value)) +} + +/// \memberof AMdoc +/// \brief Puts an array of bytes as the value of a key in a map object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. +/// \param[in] value A pointer to an array of bytes. +/// \param[in] count The number of bytes to copy from \p value. +/// \return A pointer to an `AMresult` struct containing a void. +/// \pre \p doc must be a valid address. +/// \pre \p key must be a valid address. +/// \pre \p value must be a valid address. +/// \pre `0 <=` \p count `<=` length of \p value. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMresultFree()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj_id must be a pointer to a valid AMobjId or NULL +/// key must be a c string of the map key to be used +/// value must be a byte array of length `>= count` +#[no_mangle] +pub unsafe extern "C" fn AMmapPutBytes( + doc: *mut AMdoc, + obj_id: *const AMobjId, + key: *const c_char, + value: *const u8, + count: usize, +) -> *mut AMresult { + let doc = to_doc!(doc); + let mut vec = Vec::new(); + vec.extend_from_slice(std::slice::from_raw_parts(value, count)); + to_result(doc.put(to_obj_id!(obj_id), to_str(key), vec)) +} + +/// \memberof AMdoc +/// \brief Puts a CRDT counter as the value of a key in a map object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. +/// \param[in] value A 64-bit signed integer. +/// \return A pointer to an `AMresult` struct containing a void. +/// \pre \p doc must be a valid address. +/// \pre \p key must be a valid address. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMresultFree()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj_id must be a pointer to a valid AMobjId or NULL +/// key must be a c string of the map key to be used +#[no_mangle] +pub unsafe extern "C" fn AMmapPutCounter( + doc: *mut AMdoc, + obj_id: *const AMobjId, + key: *const c_char, + value: i64, +) -> *mut AMresult { + let doc = to_doc!(doc); + to_result(doc.put( + to_obj_id!(obj_id), + to_str(key), + am::ScalarValue::Counter(value.into()), + )) +} + +/// \memberof AMdoc +/// \brief Puts null as the value of a key in a map object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. +/// \return A pointer to an `AMresult` struct containing a void. +/// \pre \p doc must be a valid address. +/// \pre \p key must be a valid address. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMresultFree()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj_id must be a pointer to a valid AMobjId or NULL +/// key must be a c string of the map key to be used +#[no_mangle] +pub unsafe extern "C" fn AMmapPutNull( + doc: *mut AMdoc, + obj_id: *const AMobjId, + key: *const c_char, +) -> *mut AMresult { + let doc = to_doc!(doc); + to_result(doc.put(to_obj_id!(obj_id), to_str(key), ())) +} + +/// \memberof AMdoc +/// \brief Puts an empty object as the value of a key in a map object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. +/// \param[in] obj_type An `AMobjIdType` enum tag. +/// \return A pointer to an `AMresult` struct containing a pointer to an `AMobjId` struct. +/// \pre \p doc must be a valid address. +/// \pre \p key must be a valid address. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMresultFree()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj_id must be a pointer to a valid AMobjId or NULL +/// key must be a c string of the map key to be used +#[no_mangle] +pub unsafe extern "C" fn AMmapPutObject( + doc: *mut AMdoc, + obj_id: *const AMobjId, + key: *const c_char, + obj_type: AMobjType, +) -> *mut AMresult { + let doc = to_doc!(doc); + to_result(doc.put_object(to_obj_id!(obj_id), to_str(key), obj_type.into())) +} + +/// \memberof AMdoc +/// \brief Puts a float as the value of a key in a map object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. +/// \param[in] value A 64-bit float. +/// \return A pointer to an `AMresult` struct containing a void. +/// \pre \p doc must be a valid address. +/// \pre \p key must be a valid address. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMresultFree()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj_id must be a pointer to a valid AMobjId or NULL +/// key must be a c string of the map key to be used +#[no_mangle] +pub unsafe extern "C" fn AMmapPutF64( + doc: *mut AMdoc, + obj_id: *const AMobjId, + key: *const c_char, + value: f64, +) -> *mut AMresult { + let doc = to_doc!(doc); + to_result(doc.put(to_obj_id!(obj_id), to_str(key), value)) +} + +/// \memberof AMdoc +/// \brief Puts a signed integer as the value of a key in a map object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. +/// \param[in] value A 64-bit signed integer. +/// \return A pointer to an `AMresult` struct containing a void. +/// \pre \p doc must be a valid address. +/// \pre \p key must be a valid address. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMresultFree()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj_id must be a pointer to a valid AMobjId or NULL +/// key must be a c string of the map key to be used +#[no_mangle] +pub unsafe extern "C" fn AMmapPutInt( + doc: *mut AMdoc, + obj_id: *const AMobjId, + key: *const c_char, + value: i64, +) -> *mut AMresult { + let doc = to_doc!(doc); + to_result(doc.put(to_obj_id!(obj_id), to_str(key), value)) +} + +/// \memberof AMdoc +/// \brief Puts a UTF-8 string as the value of a key in a map object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. +/// \param[in] value A UTF-8 string. +/// \return A pointer to an `AMresult` struct containing a void. +/// \pre \p doc must be a valid address. +/// \pre \p key must be a valid address. +/// \pre \p value must be a valid address. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMresultFree()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj_id must be a pointer to a valid AMobjId or NULL +/// key must be a c string of the map key to be used +/// value must be a null-terminated array of `c_char` +#[no_mangle] +pub unsafe extern "C" fn AMmapPutStr( + doc: *mut AMdoc, + obj_id: *const AMobjId, + key: *const c_char, + value: *const c_char, +) -> *mut AMresult { + let doc = to_doc!(doc); + to_result(doc.put(to_obj_id!(obj_id), to_str(key), to_str(value))) +} + +/// \memberof AMdoc +/// \brief Puts a Lamport timestamp as the value of a key in a map object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. +/// \param[in] value A 64-bit signed integer. +/// \return A pointer to an `AMresult` struct containing a void. +/// \pre \p doc must be a valid address. +/// \pre \p key must be a valid address. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMresultFree()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj_id must be a pointer to a valid AMobjId or NULL +/// key must be a c string of the map key to be used +#[no_mangle] +pub unsafe extern "C" fn AMmapPutTimestamp( + doc: *mut AMdoc, + obj_id: *const AMobjId, + key: *const c_char, + value: i64, +) -> *mut AMresult { + let doc = to_doc!(doc); + to_result(doc.put( + to_obj_id!(obj_id), + to_str(key), + am::ScalarValue::Timestamp(value), + )) +} + +/// \memberof AMdoc +/// \brief Puts an unsigned integer as the value of a key in a map object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. +/// \param[in] value A 64-bit unsigned integer. +/// \return A pointer to an `AMresult` struct containing a void. +/// \pre \p doc must be a valid address. +/// \pre \p key must be a valid address. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMresultFree()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj_id must be a pointer to a valid AMobjId or NULL +/// key must be a c string of the map key to be used +#[no_mangle] +pub unsafe extern "C" fn AMmapPutUint( + doc: *mut AMdoc, + obj_id: *const AMobjId, + key: *const c_char, + value: u64, +) -> *mut AMresult { + let doc = to_doc!(doc); + to_result(doc.put(to_obj_id!(obj_id), to_str(key), value)) +} From 9213d438502e5ef132c83c3aca0014ac3077429b Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 30 May 2022 22:53:09 -0700 Subject: [PATCH 418/730] Grouped some common macros and functions into their own source file. --- automerge-c/src/doc/utils.rs | 29 +++++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) create mode 100644 automerge-c/src/doc/utils.rs diff --git a/automerge-c/src/doc/utils.rs b/automerge-c/src/doc/utils.rs new file mode 100644 index 00000000..eb35b69e --- /dev/null +++ b/automerge-c/src/doc/utils.rs @@ -0,0 +1,29 @@ +use std::ffi::CStr; +use std::os::raw::c_char; + +macro_rules! to_doc { + ($handle:expr) => {{ + let handle = $handle.as_mut(); + match handle { + Some(b) => b, + None => return AMresult::err("Invalid AMdoc pointer").into(), + } + }}; +} + +pub(crate) use to_doc; + +macro_rules! to_obj_id { + ($handle:expr) => {{ + match $handle.as_ref() { + Some(obj_id) => obj_id, + None => &automerge::ROOT, + } + }}; +} + +pub(crate) use to_obj_id; + +pub(crate) unsafe fn to_str(c: *const c_char) -> String { + CStr::from_ptr(c).to_string_lossy().to_string() +} From be3c7d6233bbdcab4a0196fd670a9f952d9d588f Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 30 May 2022 22:54:02 -0700 Subject: [PATCH 419/730] Added the `AMsyncHave` struct. Added `AMsyncHaveLastSync()`. --- automerge-c/src/sync/have.rs | 40 ++++++++++++++++++++++++++++++++++++ 1 file changed, 40 insertions(+) create mode 100644 automerge-c/src/sync/have.rs diff --git a/automerge-c/src/sync/have.rs b/automerge-c/src/sync/have.rs new file mode 100644 index 00000000..ae85ee93 --- /dev/null +++ b/automerge-c/src/sync/have.rs @@ -0,0 +1,40 @@ +use automerge as am; + +use crate::change_hashes::AMchangeHashes; + +/// \struct AMsyncHave +/// \brief A summary of the changes that the sender of a synchronization +/// message already has. +#[derive(Clone)] +pub struct AMsyncHave(*const am::sync::Have); + +impl AMsyncHave { + pub fn new(have: &am::sync::Have) -> Self { + Self(have) + } +} + +impl AsRef for AMsyncHave { + fn as_ref(&self) -> &am::sync::Have { + unsafe { &*self.0 } + } +} + +/// \memberof AMsyncHave +/// \brief Gets the heads of the sender. +/// +/// \param[in] sync_have A pointer to an `AMsyncHave` struct. +/// \return An `AMchangeHashes` struct. +/// \pre \p sync_have must be a valid address. +/// \internal +/// +/// # Safety +/// sync_have must be a pointer to a valid AMsyncHave +#[no_mangle] +pub unsafe extern "C" fn AMsyncHaveLastSync(sync_have: *const AMsyncHave) -> AMchangeHashes { + if let Some(sync_have) = sync_have.as_ref() { + AMchangeHashes::new(&sync_have.as_ref().last_sync) + } else { + AMchangeHashes::default() + } +} From c5d3d1b0a028fa0b3289d05cc319c93ce6ca4357 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 30 May 2022 22:55:34 -0700 Subject: [PATCH 420/730] Added the `AMsyncHaves` struct. Added `AMsyncHavesAdvance()`. Added `AMsyncHavesNext()`. Added `AMsyncHavesPrev()`. Added `AMsyncHavesSize()`. --- automerge-c/src/sync/haves.rs | 196 ++++++++++++++++++++++++++++++++++ 1 file changed, 196 insertions(+) create mode 100644 automerge-c/src/sync/haves.rs diff --git a/automerge-c/src/sync/haves.rs b/automerge-c/src/sync/haves.rs new file mode 100644 index 00000000..c929ff8d --- /dev/null +++ b/automerge-c/src/sync/haves.rs @@ -0,0 +1,196 @@ +use automerge as am; +use std::collections::BTreeMap; +use std::ffi::c_void; + +use crate::sync::have::AMsyncHave; + +/// \struct AMsyncHaves +/// \brief A bidirectional iterator over a sequence of synchronization haves. +#[repr(C)] +pub struct AMsyncHaves { + /// The length of the sequence. + len: usize, + /// The offset from \p ptr, \p +offset -> forward direction, + /// \p -offset -> reverse direction. + offset: isize, + /// A pointer to the first synchronization have or `NULL`. + ptr: *const c_void, + /// Reserved. + storage: *mut c_void, +} + +impl AMsyncHaves { + pub fn new(sync_haves: &[am::sync::Have], storage: &mut BTreeMap) -> Self { + let storage: *mut BTreeMap = storage; + Self { + len: sync_haves.len(), + offset: 0, + ptr: sync_haves.as_ptr() as *const c_void, + storage: storage as *mut c_void, + } + } + + pub fn advance(&mut self, n: isize) { + let len = self.len as isize; + if n != 0 && self.offset >= -len && self.offset < len { + // It's being advanced and it's hasn't stopped. + self.offset = std::cmp::max(-(len + 1), std::cmp::min(self.offset + n, len)); + }; + } + + pub fn next(&mut self, n: isize) -> Option<*const AMsyncHave> { + let len = self.len as isize; + if self.offset < -len || self.offset == len { + // It's stopped. + None + } else { + let slice: &[am::sync::Have] = + unsafe { std::slice::from_raw_parts(self.ptr as *const am::sync::Have, self.len) }; + let index = (self.offset + if self.offset < 0 { len } else { 0 }) as usize; + let storage = unsafe { &mut *(self.storage as *mut BTreeMap) }; + let value = match storage.get_mut(&index) { + Some(value) => value, + None => { + storage.insert(index, AMsyncHave::new(&slice[index])); + storage.get_mut(&index).unwrap() + } + }; + self.advance(n); + Some(value) + } + } + + pub fn prev(&mut self, n: isize) -> Option<*const AMsyncHave> { + self.advance(n); + let len = self.len as isize; + if self.offset < -len || self.offset == len { + // It's stopped. + None + } else { + let slice: &[am::sync::Have] = + unsafe { std::slice::from_raw_parts(self.ptr as *const am::sync::Have, self.len) }; + let index = (self.offset + if self.offset < 0 { len } else { 0 }) as usize; + let storage = unsafe { &mut *(self.storage as *mut BTreeMap) }; + Some(match storage.get_mut(&index) { + Some(value) => value, + None => { + storage.insert(index, AMsyncHave::new(&slice[index])); + storage.get_mut(&index).unwrap() + } + }) + } + } +} + +impl AsRef<[am::sync::Have]> for AMsyncHaves { + fn as_ref(&self) -> &[am::sync::Have] { + unsafe { std::slice::from_raw_parts(self.ptr as *const am::sync::Have, self.len) } + } +} + +impl Default for AMsyncHaves { + fn default() -> Self { + Self { + len: 0, + offset: 0, + ptr: std::ptr::null(), + storage: std::ptr::null_mut(), + } + } +} + +/// \memberof AMsyncHaves +/// \brief Advances/rewinds an `AMsyncHaves` struct by at most \p |n| +/// positions. +/// +/// \param[in] sync_haves A pointer to an `AMsyncHaves` struct. +/// \param[in] n The direction (\p -n -> backward, \p +n -> forward) and maximum +/// number of positions to advance/rewind. +/// \pre \p sync_haves must be a valid address. +/// \internal +/// +/// #Safety +/// sync_haves must be a pointer to a valid AMsyncHaves +#[no_mangle] +pub unsafe extern "C" fn AMsyncHavesAdvance(sync_haves: *mut AMsyncHaves, n: isize) { + if let Some(sync_haves) = sync_haves.as_mut() { + sync_haves.advance(n); + }; +} + +/// \memberof AMsyncHaves +/// \brief Gets a pointer to the `AMsyncHave` struct at the current position of +/// an `AMsyncHaves`struct and then advances/rewinds it by at most \p |n| +/// positions. +/// +/// \param[in] sync_haves A pointer to an `AMsyncHaves` struct. +/// \param[in] n The direction (\p -n -> backward, \p +n -> forward) and maximum +/// number of positions to advance/rewind. +/// \return A pointer to an `AMsyncHave` struct that's `NULL` when \p sync_haves +/// was previously advanced/rewound past its +/// forward/backward limit. +/// \pre \p sync_haves must be a valid address. +/// \internal +/// +/// #Safety +/// sync_haves must be a pointer to a valid AMsyncHaves +#[no_mangle] +pub unsafe extern "C" fn AMsyncHavesNext( + sync_haves: *mut AMsyncHaves, + n: isize, +) -> *const AMsyncHave { + if let Some(sync_haves) = sync_haves.as_mut() { + if let Some(sync_have) = sync_haves.next(n) { + return sync_have; + } + } + std::ptr::null() +} + +/// \memberof AMsyncHaves +/// \brief Advances/rewinds an `AMsyncHaves` struct by at most \p |n| +/// positions and then gets a pointer to the `AMsyncHave` struct at its +/// current position. +/// +/// \param[in] sync_haves A pointer to an `AMsyncHaves` struct. +/// \param[in] n The direction (\p -n -> backward, \p +n -> forward) and maximum +/// number of positions to advance/rewind. +/// \return A pointer to an `AMsyncHave` struct that's `NULL` when \p sync_haves +/// is presently advanced/rewound past its +/// forward/backward limit. +/// \pre \p sync_haves must be a valid address. +/// \internal +/// +/// #Safety +/// sync_haves must be a pointer to a valid AMsyncHaves +#[no_mangle] +pub unsafe extern "C" fn AMsyncHavesPrev( + sync_haves: *mut AMsyncHaves, + n: isize, +) -> *const AMsyncHave { + if let Some(sync_haves) = sync_haves.as_mut() { + if let Some(sync_have) = sync_haves.prev(n) { + return sync_have; + } + } + std::ptr::null() +} + +/// \memberof AMsyncHaves +/// \brief Gets the size of an `AMsyncHaves` struct. +/// +/// \param[in] sync_haves A pointer to an `AMsyncHaves` struct. +/// \return The count of values in \p sync_haves. +/// \pre \p sync_haves must be a valid address. +/// \internal +/// +/// #Safety +/// sync_haves must be a pointer to a valid AMsyncHaves +#[no_mangle] +pub unsafe extern "C" fn AMsyncHavesSize(sync_haves: *const AMsyncHaves) -> usize { + if let Some(sync_haves) = sync_haves.as_ref() { + sync_haves.len + } else { + 0 + } +} From 3c11946c16500638354d585f252a663596792f88 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 30 May 2022 22:58:45 -0700 Subject: [PATCH 421/730] Moved the `AMsyncMessage` struct into its own source file. Added `AMsyncMessageChanges()`. Added `AMsyncMessageDecode()`. Added `AMsyncMessageEncode()`. Added `AMsyncMessageHaves()`. Added `AMsyncMessageHeads()`. Added `AMsyncMessageNeeds()`. --- automerge-c/src/sync/message.rs | 170 ++++++++++++++++++++++++++++++++ 1 file changed, 170 insertions(+) create mode 100644 automerge-c/src/sync/message.rs diff --git a/automerge-c/src/sync/message.rs b/automerge-c/src/sync/message.rs new file mode 100644 index 00000000..70f9e1f1 --- /dev/null +++ b/automerge-c/src/sync/message.rs @@ -0,0 +1,170 @@ +use automerge as am; +use std::cell::RefCell; +use std::collections::BTreeMap; + +use crate::change::AMchange; +use crate::change_hashes::AMchangeHashes; +use crate::changes::AMchanges; +use crate::result::{to_result, AMresult}; +use crate::sync::have::AMsyncHave; +use crate::sync::haves::AMsyncHaves; + +macro_rules! to_sync_message { + ($handle:expr) => {{ + let handle = $handle.as_ref(); + match handle { + Some(b) => b, + None => return AMresult::err("Invalid AMsyncMessage pointer").into(), + } + }}; +} + +pub(crate) use to_sync_message; + +/// \struct AMsyncMessage +/// \brief A synchronization message for a peer. +pub struct AMsyncMessage { + body: am::sync::Message, + changes_storage: RefCell>, + haves_storage: RefCell>, +} + +impl AMsyncMessage { + pub fn new(message: am::sync::Message) -> Self { + Self { + body: message, + changes_storage: RefCell::new(BTreeMap::new()), + haves_storage: RefCell::new(BTreeMap::new()), + } + } +} + +impl AsRef for AMsyncMessage { + fn as_ref(&self) -> &am::sync::Message { + &self.body + } +} + +/// \memberof AMsyncMessage +/// \brief Gets the changes for the recipient to apply. +/// +/// \param[in] sync_message A pointer to an `AMsyncMessage` struct. +/// \return An `AMchanges` struct. +/// \pre \p sync_message must be a valid address. +/// \internal +/// +/// # Safety +/// sync_message must be a pointer to a valid AMsyncMessage +#[no_mangle] +pub unsafe extern "C" fn AMsyncMessageChanges(sync_message: *const AMsyncMessage) -> AMchanges { + if let Some(sync_message) = sync_message.as_ref() { + AMchanges::new( + &sync_message.body.changes, + &mut sync_message.changes_storage.borrow_mut(), + ) + } else { + AMchanges::default() + } +} + +/// \memberof AMsyncMessage +/// \brief Decodes an array of bytes into a synchronization message. +/// +/// \param[in] src A pointer to an array of bytes. +/// \param[in] count The number of bytes in \p src to decode. +/// \return A pointer to an `AMresult` struct containing an `AMsyncMessage` +/// struct. +/// \pre \p src must be a valid address. +/// \pre `0 <=` \p count `<=` length of \p src. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMresultFree()`. +/// \internal +/// +/// # Safety +/// src must be a byte array of length `>= count` +#[no_mangle] +pub unsafe extern "C" fn AMsyncMessageDecode(src: *const u8, count: usize) -> *mut AMresult { + let mut data = Vec::new(); + data.extend_from_slice(std::slice::from_raw_parts(src, count)); + to_result(am::sync::Message::decode(&data)) +} + +/// \memberof AMsyncMessage +/// \brief Encodes a synchronization message as an array of bytes. +/// +/// \param[in] sync_message A pointer to an `AMsyncMessage` struct. +/// \return A pointer to an `AMresult` struct containing an array of bytes as +/// an `AMbyteSpan` struct. +/// \pre \p sync_message must be a valid address. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMresultFree()`. +/// \internal +/// +/// # Safety +/// sync_message must be a pointer to a valid AMsyncMessage +#[no_mangle] +pub unsafe extern "C" fn AMsyncMessageEncode(sync_message: *const AMsyncMessage) -> *mut AMresult { + let sync_message = to_sync_message!(sync_message); + to_result(sync_message.as_ref().clone().encode()) +} + +/// \memberof AMsyncMessage +/// \brief Gets a summary of the changes that the sender already has. +/// +/// \param[in] sync_message A pointer to an `AMsyncMessage` struct. +/// \return An `AMhaves` struct. +/// \pre \p sync_message must be a valid address. +/// \internal +/// +/// # Safety +/// sync_message must be a pointer to a valid AMsyncMessage +#[no_mangle] +pub unsafe extern "C" fn AMsyncMessageHaves(sync_message: *const AMsyncMessage) -> AMsyncHaves { + if let Some(sync_message) = sync_message.as_ref() { + AMsyncHaves::new( + &sync_message.as_ref().have, + &mut sync_message.haves_storage.borrow_mut(), + ) + } else { + AMsyncHaves::default() + } +} + +/// \memberof AMsyncMessage +/// \brief Gets the heads of the sender. +/// +/// \param[in] sync_message A pointer to an `AMsyncMessage` struct. +/// \return An `AMchangeHashes` struct. +/// \pre \p sync_message must be a valid address. +/// \internal +/// +/// # Safety +/// sync_message must be a pointer to a valid AMsyncMessage +#[no_mangle] +pub unsafe extern "C" fn AMsyncMessageHeads(sync_message: *const AMsyncMessage) -> AMchangeHashes { + if let Some(sync_message) = sync_message.as_ref() { + AMchangeHashes::new(&sync_message.as_ref().heads) + } else { + AMchangeHashes::default() + } +} + +/// \memberof AMsyncMessage +/// \brief Gets the hashes of any changes that are being explicitly requested +/// by the recipient. +/// +/// \param[in] sync_message A pointer to an `AMsyncMessage` struct. +/// \return An `AMchangeHashes` struct. +/// \pre \p sync_message must be a valid address. +/// \internal +/// +/// # Safety +/// sync_message must be a pointer to a valid AMsyncMessage +#[no_mangle] +pub unsafe extern "C" fn AMsyncMessageNeeds(sync_message: *const AMsyncMessage) -> AMchangeHashes { + if let Some(sync_message) = sync_message.as_ref() { + AMchangeHashes::new(&sync_message.as_ref().need) + } else { + AMchangeHashes::default() + } +} From 4cb7481a1bc8990abd6a22e44fc0a720080ccb49 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 30 May 2022 23:07:55 -0700 Subject: [PATCH 422/730] Moved the `AMsyncState` struct into its own source file. Added `AMsyncStateDecode()`. Added `AMsyncStateEncode()`. Added `AMsyncStateEqual()`. Added `AMsyncStateSharedHeads()`. Added `AMsyncStateLastSentHeads()`. Added `AMsyncStateTheirHaves()`. Added `AMsyncStateTheirHeads()`. Added `AMsyncStateTheirNeeds()`. --- automerge-c/src/sync/state.rs | 278 ++++++++++++++++++++++++++++++++++ 1 file changed, 278 insertions(+) create mode 100644 automerge-c/src/sync/state.rs diff --git a/automerge-c/src/sync/state.rs b/automerge-c/src/sync/state.rs new file mode 100644 index 00000000..16a5fae2 --- /dev/null +++ b/automerge-c/src/sync/state.rs @@ -0,0 +1,278 @@ +use automerge as am; +use std::cell::RefCell; +use std::collections::BTreeMap; + +use crate::change_hashes::AMchangeHashes; +use crate::result::{to_result, AMresult}; +use crate::sync::have::AMsyncHave; +use crate::sync::haves::AMsyncHaves; + +macro_rules! to_sync_state { + ($handle:expr) => {{ + let handle = $handle.as_ref(); + match handle { + Some(b) => b, + None => return AMresult::err("Invalid AMsyncState pointer").into(), + } + }}; +} + +pub(crate) use to_sync_state; + +/// \struct AMsyncState +/// \brief The state of synchronization with a peer. +pub struct AMsyncState { + body: am::sync::State, + their_haves_storage: RefCell>, +} + +impl AMsyncState { + pub fn new(state: am::sync::State) -> Self { + Self { + body: state, + their_haves_storage: RefCell::new(BTreeMap::new()), + } + } +} + +impl AsMut for AMsyncState { + fn as_mut(&mut self) -> &mut am::sync::State { + &mut self.body + } +} + +impl AsRef for AMsyncState { + fn as_ref(&self) -> &am::sync::State { + &self.body + } +} + +impl From for *mut AMsyncState { + fn from(b: AMsyncState) -> Self { + Box::into_raw(Box::new(b)) + } +} + +/// \memberof AMsyncState +/// \brief Decodes an array of bytes into a synchronizaton state. +/// +/// \param[in] src A pointer to an array of bytes. +/// \param[in] count The number of bytes in \p src to decode. +/// \return A pointer to an `AMresult` struct containing an `AMsyncState` +/// struct. +/// \pre \p src must be a valid address. +/// \pre `0 <=` \p count `<=` length of \p src. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMresultFree()`. +/// \internal +/// +/// # Safety +/// src must be a byte array of length `>= count` +#[no_mangle] +pub unsafe extern "C" fn AMsyncStateDecode(src: *const u8, count: usize) -> *mut AMresult { + let mut data = Vec::new(); + data.extend_from_slice(std::slice::from_raw_parts(src, count)); + to_result(am::sync::State::decode(&data)) +} + +/// \memberof AMsyncState +/// \brief Encodes a synchronizaton state as an array of bytes. +/// +/// \param[in] sync_state A pointer to an `AMsyncState` struct. +/// \return A pointer to an `AMresult` struct containing an array of bytes as +/// an `AMbyteSpan` struct. +/// \pre \p sync_state must be a valid address. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMresultFree()`. +/// \internal +/// +/// # Safety +/// sync_state must be a pointer to a valid AMsyncState +#[no_mangle] +pub unsafe extern "C" fn AMsyncStateEncode(sync_state: *const AMsyncState) -> *mut AMresult { + let sync_state = to_sync_state!(sync_state); + to_result(sync_state.as_ref().encode()) +} + +/// \memberof AMsyncState +/// \brief Compares two synchronization states for equality. +/// +/// \param[in] sync_state1 A pointer to an `AMsyncState` struct. +/// \param[in] sync_state2 A pointer to an `AMsyncState` struct. +/// \return `true` if \p sync_state1 `==` \p sync_state2 and `false` otherwise. +/// \pre \p sync_state1 must be a valid address. +/// \pre \p sync_state2 must be a valid address. +/// \internal +/// +/// #Safety +/// sync_state1 must be a pointer to a valid AMsyncState +/// sync_state2 must be a pointer to a valid AMsyncState +#[no_mangle] +pub unsafe extern "C" fn AMsyncStateEqual( + sync_state1: *const AMsyncState, + sync_state2: *const AMsyncState, +) -> bool { + match (sync_state1.as_ref(), sync_state2.as_ref()) { + (Some(sync_state1), Some(sync_state2)) => sync_state1.as_ref() == sync_state2.as_ref(), + (None, Some(_)) | (Some(_), None) | (None, None) => false, + } +} + +/// \memberof AMsyncState +/// \brief Deallocates the storage for an `AMsyncState` struct previously +/// allocated by `AMsyncStateInit()`. +/// +/// \param[in] sync_state A pointer to an `AMsyncState` struct. +/// \pre \p sync_state must be a valid address. +/// \internal +/// +/// # Safety +/// sync_state must be a pointer to a valid AMsyncState +#[no_mangle] +pub unsafe extern "C" fn AMsyncStateFree(sync_state: *mut AMsyncState) { + if !sync_state.is_null() { + let sync_state: AMsyncState = *Box::from_raw(sync_state); + drop(sync_state) + } +} + +/// \memberof AMsyncState +/// \brief Allocates a new `AMsyncState` struct and initializes it with +/// defaults. +/// +/// \return A pointer to an `AMsyncState` struct. +/// \warning To avoid a memory leak, the returned `AMsyncState` struct must be +/// deallocated with `AMsyncStateFree()`. +#[no_mangle] +pub extern "C" fn AMsyncStateInit() -> *mut AMsyncState { + AMsyncState::new(am::sync::State::new()).into() +} + +/// \memberof AMsyncState +/// \brief Gets the heads that are shared by both peers. +/// +/// \param[in] sync_state A pointer to an `AMsyncState` struct. +/// \return An `AMchangeHashes` struct. +/// \pre \p sync_state must be a valid address. +/// \internal +/// +/// # Safety +/// sync_state must be a pointer to a valid AMsyncState +#[no_mangle] +pub unsafe extern "C" fn AMsyncStateSharedHeads(sync_state: *const AMsyncState) -> AMchangeHashes { + if let Some(sync_state) = sync_state.as_ref() { + AMchangeHashes::new(&sync_state.as_ref().shared_heads) + } else { + AMchangeHashes::default() + } +} + +/// \memberof AMsyncState +/// \brief Gets the heads that were last sent by this peer. +/// +/// \param[in] sync_state A pointer to an `AMsyncState` struct. +/// \return An `AMchangeHashes` struct. +/// \pre \p sync_state must be a valid address. +/// \internal +/// +/// # Safety +/// sync_state must be a pointer to a valid AMsyncState +#[no_mangle] +pub unsafe extern "C" fn AMsyncStateLastSentHeads( + sync_state: *const AMsyncState, +) -> AMchangeHashes { + if let Some(sync_state) = sync_state.as_ref() { + AMchangeHashes::new(&sync_state.as_ref().last_sent_heads) + } else { + AMchangeHashes::default() + } +} + +/// \memberof AMsyncState +/// \brief Gets a summary of the changes that the other peer already has. +/// +/// \param[in] sync_state A pointer to an `AMsyncState` struct. +/// \param[out] has_value A pointer to a boolean flag that is set to `true` if +/// the returned `AMhaves` struct is relevant, `false` otherwise. +/// \return An `AMhaves` struct. +/// \pre \p sync_state must be a valid address. +/// \pre \p has_value must be a valid address. +/// \internal +/// +/// # Safety +/// sync_state must be a pointer to a valid AMsyncState +/// has_value must be a pointer to a valid bool. +#[no_mangle] +pub unsafe extern "C" fn AMsyncStateTheirHaves( + sync_state: *const AMsyncState, + has_value: *mut bool, +) -> AMsyncHaves { + if let Some(sync_state) = sync_state.as_ref() { + if let Some(haves) = &sync_state.as_ref().their_have { + *has_value = true; + return AMsyncHaves::new(haves, &mut sync_state.their_haves_storage.borrow_mut()); + }; + }; + *has_value = false; + AMsyncHaves::default() +} + +/// \memberof AMsyncState +/// \brief Gets the heads that were sent by the other peer. +/// +/// \param[in] sync_state A pointer to an `AMsyncState` struct. +/// \param[out] has_value A pointer to a boolean flag that is set to `true` if +/// the returned `AMchangeHashes` struct is relevant, `false` +/// otherwise. +/// \return An `AMchangeHashes` struct. +/// \pre \p sync_state must be a valid address. +/// \pre \p has_value must be a valid address. +/// \internal +/// +/// # Safety +/// sync_state must be a pointer to a valid AMsyncState +/// has_value must be a pointer to a valid bool. +#[no_mangle] +pub unsafe extern "C" fn AMsyncStateTheirHeads( + sync_state: *const AMsyncState, + has_value: *mut bool, +) -> AMchangeHashes { + if let Some(sync_state) = sync_state.as_ref() { + if let Some(change_hashes) = &sync_state.as_ref().their_heads { + *has_value = true; + return AMchangeHashes::new(change_hashes); + } + }; + *has_value = false; + AMchangeHashes::default() +} + +/// \memberof AMsyncState +/// \brief Gets the needs that were sent by the other peer. +/// +/// \param[in] sync_state A pointer to an `AMsyncState` struct. +/// \param[out] has_value A pointer to a boolean flag that is set to `true` if +/// the returned `AMchangeHashes` struct is relevant, `false` +/// otherwise. +/// \return An `AMchangeHashes` struct. +/// \pre \p sync_state must be a valid address. +/// \pre \p has_value must be a valid address. +/// \internal +/// +/// # Safety +/// sync_state must be a pointer to a valid AMsyncState +/// has_value must be a pointer to a valid bool. +#[no_mangle] +pub unsafe extern "C" fn AMsyncStateTheirNeeds( + sync_state: *const AMsyncState, + has_value: *mut bool, +) -> AMchangeHashes { + if let Some(sync_state) = sync_state.as_ref() { + if let Some(change_hashes) = &sync_state.as_ref().their_need { + *has_value = true; + return AMchangeHashes::new(change_hashes); + } + }; + *has_value = false; + AMchangeHashes::default() +} From 846b96bc9a3708fe590c6323bf17d26fdb645913 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 30 May 2022 23:11:56 -0700 Subject: [PATCH 423/730] Renamed `AMfreeResult()` to `AMresultFree()`. --- automerge-c/test/amdoc_property_tests.c | 8 ++++---- automerge-c/test/amlistput_tests.c | 26 ++++++++++++------------- automerge-c/test/ammapput_tests.c | 20 +++++++++---------- 3 files changed, 27 insertions(+), 27 deletions(-) diff --git a/automerge-c/test/amdoc_property_tests.c b/automerge-c/test/amdoc_property_tests.c index cce016b9..c2de18d3 100644 --- a/automerge-c/test/amdoc_property_tests.c +++ b/automerge-c/test/amdoc_property_tests.c @@ -61,7 +61,7 @@ static void test_AMputActor(void **state) { assert_int_equal(AMresultSize(res), 0); AMvalue value = AMresultValue(res, 0); assert_int_equal(value.tag, AM_VALUE_VOID); - AMfreeResult(res); + AMresultFree(res); res = AMgetActor(group_state->doc); if (AMresultStatus(res) != AM_STATUS_OK) { fail_msg("%s", AMerrorMessage(res)); @@ -71,7 +71,7 @@ static void test_AMputActor(void **state) { assert_int_equal(value.tag, AM_VALUE_ACTOR_ID); assert_int_equal(value.actor_id.count, test_state->actor_id_size); assert_memory_equal(value.actor_id.src, test_state->actor_id_bytes, value.actor_id.count); - AMfreeResult(res); + AMresultFree(res); } static void test_AMputActorHex(void **state) { @@ -87,7 +87,7 @@ static void test_AMputActorHex(void **state) { assert_int_equal(AMresultSize(res), 0); AMvalue value = AMresultValue(res, 0); assert_int_equal(value.tag, AM_VALUE_VOID); - AMfreeResult(res); + AMresultFree(res); res = AMgetActorHex(group_state->doc); if (AMresultStatus(res) != AM_STATUS_OK) { fail_msg("%s", AMerrorMessage(res)); @@ -97,7 +97,7 @@ static void test_AMputActorHex(void **state) { assert_int_equal(value.tag, AM_VALUE_STR); assert_int_equal(strlen(value.str), test_state->actor_id_size * 2); assert_string_equal(value.str, test_state->actor_id_str); - AMfreeResult(res); + AMresultFree(res); } int run_AMdoc_property_tests(void) { diff --git a/automerge-c/test/amlistput_tests.c b/automerge-c/test/amlistput_tests.c index c1da771a..59943f63 100644 --- a/automerge-c/test/amlistput_tests.c +++ b/automerge-c/test/amlistput_tests.c @@ -26,8 +26,8 @@ static void test_AMlistPut ## suffix ## _ ## mode(void **state) { \ } \ assert_int_equal(AMresultSize(res), 0); \ AMvalue value = AMresultValue(res, 0); \ - assert_int_equal(value.tag, AM_VALUE_VOID); \ - AMfreeResult(res); \ + assert_int_equal(value.tag, AM_VALUE_VOID); \ + AMresultFree(res); \ res = AMlistGet(group_state->doc, AM_ROOT, 0); \ if (AMresultStatus(res) != AM_STATUS_OK) { \ fail_msg("%s", AMerrorMessage(res)); \ @@ -36,7 +36,7 @@ static void test_AMlistPut ## suffix ## _ ## mode(void **state) { \ value = AMresultValue(res, 0); \ assert_int_equal(value.tag, AMvalue_discriminant(#suffix)); \ assert_true(value.member == scalar_value); \ - AMfreeResult(res); \ + AMresultFree(res); \ } #define test_AMlistPutBytes(mode) test_AMlistPutBytes ## _ ## mode @@ -59,8 +59,8 @@ static void test_AMlistPutBytes_ ## mode(void **state) { \ } \ assert_int_equal(AMresultSize(res), 0); \ AMvalue value = AMresultValue(res, 0); \ - assert_int_equal(value.tag, AM_VALUE_VOID); \ - AMfreeResult(res); \ + assert_int_equal(value.tag, AM_VALUE_VOID); \ + AMresultFree(res); \ res = AMlistGet(group_state->doc, AM_ROOT, 0); \ if (AMresultStatus(res) != AM_STATUS_OK) { \ fail_msg("%s", AMerrorMessage(res)); \ @@ -70,7 +70,7 @@ static void test_AMlistPutBytes_ ## mode(void **state) { \ assert_int_equal(value.tag, AM_VALUE_BYTES); \ assert_int_equal(value.bytes.count, BYTES_SIZE); \ assert_memory_equal(value.bytes.src, bytes_value, BYTES_SIZE); \ - AMfreeResult(res); \ + AMresultFree(res); \ } #define test_AMlistPutNull(mode) test_AMlistPutNull_ ## mode @@ -85,8 +85,8 @@ static void test_AMlistPutNull_ ## mode(void **state) { \ } \ assert_int_equal(AMresultSize(res), 0); \ AMvalue value = AMresultValue(res, 0); \ - assert_int_equal(value.tag, AM_VALUE_VOID); \ - AMfreeResult(res); \ + assert_int_equal(value.tag, AM_VALUE_VOID); \ + AMresultFree(res); \ res = AMlistGet(group_state->doc, AM_ROOT, 0); \ if (AMresultStatus(res) != AM_STATUS_OK) { \ fail_msg("%s", AMerrorMessage(res)); \ @@ -94,7 +94,7 @@ static void test_AMlistPutNull_ ## mode(void **state) { \ assert_int_equal(AMresultSize(res), 1); \ value = AMresultValue(res, 0); \ assert_int_equal(value.tag, AM_VALUE_NULL); \ - AMfreeResult(res); \ + AMresultFree(res); \ } #define test_AMlistPutObject(label, mode) test_AMlistPutObject_ ## label ## _ ## mode @@ -117,7 +117,7 @@ static void test_AMlistPutObject_ ## label ## _ ## mode(void **state) { \ assert_int_equal(value.tag, AM_VALUE_OBJ_ID); \ assert_non_null(value.obj_id); \ assert_int_equal(AMobjSize(group_state->doc, value.obj_id), 0); \ - AMfreeResult(res); \ + AMresultFree(res); \ } #define test_AMlistPutStr(mode) test_AMlistPutStr ## _ ## mode @@ -139,8 +139,8 @@ static void test_AMlistPutStr_ ## mode(void **state) { \ } \ assert_int_equal(AMresultSize(res), 0); \ AMvalue value = AMresultValue(res, 0); \ - assert_int_equal(value.tag, AM_VALUE_VOID); \ - AMfreeResult(res); \ + assert_int_equal(value.tag, AM_VALUE_VOID); \ + AMresultFree(res); \ res = AMlistGet(group_state->doc, AM_ROOT, 0); \ if (AMresultStatus(res) != AM_STATUS_OK) { \ fail_msg("%s", AMerrorMessage(res)); \ @@ -150,7 +150,7 @@ static void test_AMlistPutStr_ ## mode(void **state) { \ assert_int_equal(value.tag, AM_VALUE_STR); \ assert_int_equal(strlen(value.str), STR_LEN); \ assert_memory_equal(value.str, str_value, STR_LEN + 1); \ - AMfreeResult(res); \ + AMresultFree(res); \ } static_void_test_AMlistPut(Bool, insert, boolean, true) diff --git a/automerge-c/test/ammapput_tests.c b/automerge-c/test/ammapput_tests.c index 1e24438d..cf881cfd 100644 --- a/automerge-c/test/ammapput_tests.c +++ b/automerge-c/test/ammapput_tests.c @@ -29,8 +29,8 @@ static void test_AMmapPut ## suffix(void **state) { \ } \ assert_int_equal(AMresultSize(res), 0); \ AMvalue value = AMresultValue(res, 0); \ - assert_int_equal(value.tag, AM_VALUE_VOID); \ - AMfreeResult(res); \ + assert_int_equal(value.tag, AM_VALUE_VOID); \ + AMresultFree(res); \ res = AMmapGet(group_state->doc, AM_ROOT, #suffix); \ if (AMresultStatus(res) != AM_STATUS_OK) { \ fail_msg("%s", AMerrorMessage(res)); \ @@ -39,7 +39,7 @@ static void test_AMmapPut ## suffix(void **state) { \ value = AMresultValue(res, 0); \ assert_int_equal(value.tag, AMvalue_discriminant(#suffix)); \ assert_true(value.member == scalar_value); \ - AMfreeResult(res); \ + AMresultFree(res); \ } #define test_AMmapPutObject(label) test_AMmapPutObject_ ## label @@ -61,7 +61,7 @@ static void test_AMmapPutObject_ ## label(void **state) { \ assert_int_equal(value.tag, AM_VALUE_OBJ_ID); \ assert_non_null(value.obj_id); \ assert_int_equal(AMobjSize(group_state->doc, value.obj_id), 0); \ - AMfreeResult(res); \ + AMresultFree(res); \ } static_void_test_AMmapPut(Bool, boolean, true) @@ -85,7 +85,7 @@ static void test_AMmapPutBytes(void **state) { assert_int_equal(AMresultSize(res), 0); AMvalue value = AMresultValue(res, 0); assert_int_equal(value.tag, AM_VALUE_VOID); - AMfreeResult(res); + AMresultFree(res); res = AMmapGet(group_state->doc, AM_ROOT, KEY); if (AMresultStatus(res) != AM_STATUS_OK) { fail_msg("%s", AMerrorMessage(res)); @@ -95,7 +95,7 @@ static void test_AMmapPutBytes(void **state) { assert_int_equal(value.tag, AM_VALUE_BYTES); assert_int_equal(value.bytes.count, BYTES_SIZE); assert_memory_equal(value.bytes.src, BYTES_VALUE, BYTES_SIZE); - AMfreeResult(res); + AMresultFree(res); } static_void_test_AMmapPut(Counter, counter, INT64_MAX) @@ -115,7 +115,7 @@ static void test_AMmapPutNull(void **state) { assert_int_equal(AMresultSize(res), 0); AMvalue value = AMresultValue(res, 0); assert_int_equal(value.tag, AM_VALUE_VOID); - AMfreeResult(res); + AMresultFree(res); res = AMmapGet(group_state->doc, AM_ROOT, KEY); if (AMresultStatus(res) != AM_STATUS_OK) { fail_msg("%s", AMerrorMessage(res)); @@ -123,7 +123,7 @@ static void test_AMmapPutNull(void **state) { assert_int_equal(AMresultSize(res), 1); value = AMresultValue(res, 0); assert_int_equal(value.tag, AM_VALUE_NULL); - AMfreeResult(res); + AMresultFree(res); } static_void_test_AMmapPutObject(List) @@ -150,7 +150,7 @@ static void test_AMmapPutStr(void **state) { assert_int_equal(AMresultSize(res), 0); AMvalue value = AMresultValue(res, 0); assert_int_equal(value.tag, AM_VALUE_VOID); - AMfreeResult(res); + AMresultFree(res); res = AMmapGet(group_state->doc, AM_ROOT, KEY); if (AMresultStatus(res) != AM_STATUS_OK) { fail_msg("%s", AMerrorMessage(res)); @@ -160,7 +160,7 @@ static void test_AMmapPutStr(void **state) { assert_int_equal(value.tag, AM_VALUE_STR); assert_int_equal(strlen(value.str), STR_LEN); assert_memory_equal(value.str, STR_VALUE, STR_LEN + 1); - AMfreeResult(res); + AMresultFree(res); } static_void_test_AMmapPut(Timestamp, timestamp, INT64_MAX) From 472b5dc3485a75d9e648e912e38be9f57c2b9bf2 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 30 May 2022 23:14:38 -0700 Subject: [PATCH 424/730] Added the synchronization unit test suite to the CTest suite. --- automerge-c/test/CMakeLists.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/automerge-c/test/CMakeLists.txt b/automerge-c/test/CMakeLists.txt index 3da6051e..ac43a61f 100644 --- a/automerge-c/test/CMakeLists.txt +++ b/automerge-c/test/CMakeLists.txt @@ -10,6 +10,7 @@ add_executable( ammapput_tests.c macro_utils.c main.c + sync_tests.c ) set_target_properties(test_${LIBRARY_NAME} PROPERTIES LINKER_LANGUAGE C) From d08eeeed61143570f6d93be5d31d741b73f4c42c Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 30 May 2022 23:15:20 -0700 Subject: [PATCH 425/730] Renamed `AMfreeDoc()` to `AMFree()`. --- automerge-c/test/group_state.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automerge-c/test/group_state.c b/automerge-c/test/group_state.c index 66676ef0..ab35321a 100644 --- a/automerge-c/test/group_state.c +++ b/automerge-c/test/group_state.c @@ -12,7 +12,7 @@ int group_setup(void** state) { int group_teardown(void** state) { GroupState* group_state = *state; - AMfreeDoc(group_state->doc); + AMfree(group_state->doc); free(group_state); return 0; } From cdcd5156dbe65a3ce43626e981c98c4446e8698a Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 30 May 2022 23:16:14 -0700 Subject: [PATCH 426/730] Added the synchronization unit test suite to the CTest suite. --- automerge-c/test/main.c | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/automerge-c/test/main.c b/automerge-c/test/main.c index 11a2e888..15c61acd 100644 --- a/automerge-c/test/main.c +++ b/automerge-c/test/main.c @@ -12,10 +12,13 @@ extern int run_AMlistPut_tests(void); extern int run_AMmapPut_tests(void); +extern int run_sync_tests(void); + int main(void) { return ( run_AMdoc_property_tests() + run_AMlistPut_tests() + - run_AMmapPut_tests() + run_AMmapPut_tests() + + run_sync_tests() ); } From fbdb5da50864b7133ebe805b13d00474c6236052 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 30 May 2022 23:17:44 -0700 Subject: [PATCH 427/730] Ported 17 synchronization unit test cases from JS to C. --- automerge-c/test/sync_tests.c | 1009 +++++++++++++++++++++++++++++++++ 1 file changed, 1009 insertions(+) create mode 100644 automerge-c/test/sync_tests.c diff --git a/automerge-c/test/sync_tests.c b/automerge-c/test/sync_tests.c new file mode 100644 index 00000000..59d5043f --- /dev/null +++ b/automerge-c/test/sync_tests.c @@ -0,0 +1,1009 @@ +#include +#include +#include +#include +#include + +/* third-party */ +#include + +/* local */ +#include "automerge.h" + +typedef struct { + AMdoc* doc1; + AMdoc* doc2; + AMsyncState* sync_state1; + AMsyncState* sync_state2; +} TestState; + +static int setup(void** state) { + TestState* test_state = calloc(1, sizeof(TestState)); + test_state->doc1 = AMcreate(); + test_state->doc2 = AMcreate(); + test_state->sync_state1 = AMsyncStateInit(); + test_state->sync_state2 = AMsyncStateInit(); + *state = test_state; + return 0; +} + +static int teardown(void** state) { + TestState* test_state = *state; + AMfree(test_state->doc1); + AMfree(test_state->doc2); + AMsyncStateFree(test_state->sync_state1); + AMsyncStateFree(test_state->sync_state2); + free(test_state); + return 0; +} + +static void sync(AMdoc* a, + AMdoc* b, + AMsyncState* a_sync_state, + AMsyncState* b_sync_state) { + static size_t const MAX_ITER = 10; + + AMsyncMessage const* a2b_msg = NULL; + AMsyncMessage const* b2a_msg = NULL; + size_t iter = 0; + do { + AMresult* a2b_msg_result = AMgenerateSyncMessage(a, a_sync_state); + AMresult* b2a_msg_result = AMgenerateSyncMessage(b, b_sync_state); + AMvalue value = AMresultValue(a2b_msg_result, 0); + switch (value.tag) { + case AM_VALUE_SYNC_MESSAGE: { + a2b_msg = value.sync_message; + AMresultFree(AMreceiveSyncMessage(b, b_sync_state, a2b_msg)); + } + break; + case AM_VALUE_VOID: a2b_msg = NULL; break; + } + value = AMresultValue(b2a_msg_result, 0); + switch (value.tag) { + case AM_VALUE_SYNC_MESSAGE: { + b2a_msg = value.sync_message; + AMresultFree(AMreceiveSyncMessage(a, a_sync_state, b2a_msg)); + } + break; + case AM_VALUE_VOID: b2a_msg = NULL; break; + } + if (++iter > MAX_ITER) { + fail_msg("Did not synchronize within %d iterations. " + "Do you have a bug causing an infinite loop?", MAX_ITER); + } + } while(a2b_msg || b2a_msg); +} + +/** + * \brief Data sync protocol with docs already in sync, an empty local doc + * should send a sync message implying no local data. + */ +static void test_converged_empty_local_doc_reply_no_local_data(void **state) { + TestState* test_state = *state; + AMresult* sync_message_result = AMgenerateSyncMessage( + test_state->doc1, test_state->sync_state1 + ); + if (AMresultStatus(sync_message_result) != AM_STATUS_OK) { + fail_msg("%s", AMerrorMessage(sync_message_result)); + } + assert_int_equal(AMresultSize(sync_message_result), 1); + AMvalue value = AMresultValue(sync_message_result, 0); + assert_int_equal(value.tag, AM_VALUE_SYNC_MESSAGE); + AMsyncMessage const* sync_message = value.sync_message; + AMchangeHashes heads = AMsyncMessageHeads(sync_message); + assert_int_equal(AMchangeHashesSize(&heads), 0); + AMchangeHashes needs = AMsyncMessageNeeds(sync_message); + assert_int_equal(AMchangeHashesSize(&needs), 0); + AMsyncHaves haves = AMsyncMessageHaves(sync_message); + assert_int_equal(AMsyncHavesSize(&haves), 1); + AMsyncHave const* have0 = AMsyncHavesNext(&haves, 1); + AMchangeHashes last_sync = AMsyncHaveLastSync(have0); + assert_int_equal(AMchangeHashesSize(&last_sync), 0); + AMchanges changes = AMsyncMessageChanges(sync_message); + assert_int_equal(AMchangesSize(&changes), 0); + AMresultFree(sync_message_result); +} + +/** + * \brief Data sync protocol with docs already in sync, an empty local doc + * should not reply if we have no data as well. + */ +static void test_converged_empty_local_doc_no_reply(void **state) { + TestState* test_state = *state; + AMresult* sync_message_result1 = AMgenerateSyncMessage( + test_state->doc1, test_state->sync_state1 + ); + if (AMresultStatus(sync_message_result1) != AM_STATUS_OK) { + fail_msg("%s", AMerrorMessage(sync_message_result1)); + } + assert_int_equal(AMresultSize(sync_message_result1), 1); + AMvalue value = AMresultValue(sync_message_result1, 0); + assert_int_equal(value.tag, AM_VALUE_SYNC_MESSAGE); + AMsyncMessage const* sync_message1 = value.sync_message; + AMresult* result = AMreceiveSyncMessage( + test_state->doc2, test_state->sync_state2, sync_message1 + ); + if (AMresultStatus(result) != AM_STATUS_OK) { + fail_msg("%s", AMerrorMessage(result)); + } + assert_int_equal(AMresultSize(result), 0); + value = AMresultValue(result, 0); + assert_int_equal(value.tag, AM_VALUE_VOID); + AMresultFree(result); + AMresult* sync_message_result2 = AMgenerateSyncMessage( + test_state->doc2, test_state->sync_state2 + ); + if (AMresultStatus(sync_message_result2) != AM_STATUS_OK) { + fail_msg("%s", AMerrorMessage(sync_message_result2)); + } + assert_int_equal(AMresultSize(sync_message_result2), 0); + value = AMresultValue(sync_message_result2, 0); + assert_int_equal(value.tag, AM_VALUE_VOID); + AMresultFree(sync_message_result2); + AMresultFree(sync_message_result1); +} + +/** + * \brief Data sync protocol with docs already in sync, documents with data and + * repos with equal heads do not need a reply message. + */ +static void test_converged_equal_heads_no_reply(void **state) { + TestState* test_state = *state; + + /* Make two nodes with the same changes. */ + time_t const time = 0; + for (size_t index = 0; index != 10; ++index) { + AMresultFree(AMlistPutUint(test_state->doc1, AM_ROOT, index, true, index)); + AMcommit(test_state->doc1, NULL, &time); + } + AMresult* changes_result = AMgetChanges(test_state->doc1, NULL); + AMvalue value = AMresultValue(changes_result, 0); + AMresultFree(AMapplyChanges(test_state->doc2, &value.changes)); + AMresultFree(changes_result); + assert_true(AMequal(test_state->doc1, test_state->doc2)); + + /* Generate a naive sync message. */ + AMresult* sync_message_result1 = AMgenerateSyncMessage( + test_state->doc1, + test_state->sync_state1 + ); + AMsyncMessage const* sync_message1 = AMresultValue(sync_message_result1, 0).sync_message; + AMchangeHashes last_sent_heads = AMsyncStateLastSentHeads(test_state->sync_state1); + AMresult* heads_result = AMgetHeads(test_state->doc1); + AMchangeHashes heads = AMresultValue(heads_result, 0).change_hashes; + assert_int_equal(AMchangeHashesCmp(&last_sent_heads, &heads), 0); + AMresultFree(heads_result); + + /* Heads are equal so this message should be void. */ + AMresultFree(AMreceiveSyncMessage( + test_state->doc2, test_state->sync_state2, sync_message1 + )); + AMresultFree(sync_message_result1); + AMresult* sync_message_result2 = AMgenerateSyncMessage( + test_state->doc2, test_state->sync_state2 + ); + assert_int_equal(AMresultValue(sync_message_result2, 0).tag, AM_VALUE_VOID); + AMresultFree(sync_message_result2); +} + +/** + * \brief Data sync protocol with docs already in sync, documents with data and + * the first node should offer all changes to the second node when + * starting from nothing. + */ +static void test_converged_offer_all_changes_from_nothing(void **state) { + TestState* test_state = *state; + + /* Make changes for the first node that the second node should request. */ + time_t const time = 0; + for (size_t index = 0; index != 10; ++index) { + AMresultFree(AMlistPutUint(test_state->doc1, AM_ROOT, index, true, index)); + AMcommit(test_state->doc1, NULL, &time); + } + + assert_false(AMequal(test_state->doc1, test_state->doc2)); + sync(test_state->doc1, + test_state->doc2, + test_state->sync_state1, + test_state->sync_state2); + assert_true(AMequal(test_state->doc1, test_state->doc2)); +} + +/** + * \brief Data sync protocol with docs already in sync, documents with data and + * it should sync peers where one has commits the other does not. + */ +static void test_converged_sync_peers_with_uneven_commits(void **state) { + TestState* test_state = *state; + + /* Make changes for the first node that the second node should request. */ + time_t const time = 0; + for (size_t index = 0; index != 10; ++index) { + AMresultFree(AMlistPutUint(test_state->doc1, AM_ROOT, index, true, index)); + AMcommit(test_state->doc1, NULL, &time); + } + + assert_false(AMequal(test_state->doc1, test_state->doc2)); + sync(test_state->doc1, + test_state->doc2, + test_state->sync_state1, + test_state->sync_state2); + assert_true(AMequal(test_state->doc1, test_state->doc2)); +} + +/** + * \brief Data sync protocol with docs already in sync, documents with data and + * it should work with prior sync state. + */ +static void test_converged_works_with_prior_sync_state(void **state) { + /* Create & synchronize two nodes. */ + TestState* test_state = *state; + + time_t const time = 0; + for (size_t value = 0; value != 5; ++value) { + AMresultFree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); + AMcommit(test_state->doc1, NULL, &time); + } + sync(test_state->doc1, + test_state->doc2, + test_state->sync_state1, + test_state->sync_state2); + + /* Modify the first node further. */ + for (size_t value = 5; value != 10; ++value) { + AMresultFree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); + AMcommit(test_state->doc1, NULL, &time); + } + + assert_false(AMequal(test_state->doc1, test_state->doc2)); + sync(test_state->doc1, + test_state->doc2, + test_state->sync_state1, + test_state->sync_state2); + assert_true(AMequal(test_state->doc1, test_state->doc2)); +} + +/** + * \brief Data sync protocol with docs already in sync, documents with data and + * it should not generate messages once synced. + */ +static void test_converged_no_message_once_synced(void **state) { + /* Create & synchronize two nodes. */ + TestState* test_state = *state; + AMresultFree(AMsetActorHex(test_state->doc1, "abc123")); + AMresultFree(AMsetActorHex(test_state->doc2, "def456")); + + time_t const time = 0; + for (size_t value = 0; value != 5; ++value) { + AMresultFree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); + AMcommit(test_state->doc1, NULL, &time); + AMresultFree(AMmapPutUint(test_state->doc2, AM_ROOT, "y", value)); + AMcommit(test_state->doc2, NULL, &time); + } + + /* The first node reports what it has. */ + AMresult* message_result = AMgenerateSyncMessage(test_state->doc1, + test_state->sync_state1); + AMsyncMessage const* message = AMresultValue(message_result, 0).sync_message; + + /* The second node receives that message and sends changes along with what + * it has. */ + AMresultFree(AMreceiveSyncMessage(test_state->doc2, + test_state->sync_state2, + message)); + AMresultFree(message_result); + message_result = AMgenerateSyncMessage(test_state->doc2, + test_state->sync_state2); + message = AMresultValue(message_result, 0).sync_message; + AMchanges message_changes = AMsyncMessageChanges(message); + assert_int_equal(AMchangesSize(&message_changes), 5); + + /* The first node receives the changes and replies with the changes it now + * knows that the second node needs. */ + AMresultFree(AMreceiveSyncMessage(test_state->doc1, + test_state->sync_state1, + message)); + AMresultFree(message_result); + message_result = AMgenerateSyncMessage(test_state->doc1, + test_state->sync_state1); + message = AMresultValue(message_result, 0).sync_message; + message_changes = AMsyncMessageChanges(message); + assert_int_equal(AMchangesSize(&message_changes), 5); + + /* The second node applies the changes and sends confirmation ending the + * exchange. */ + AMresultFree(AMreceiveSyncMessage(test_state->doc2, + test_state->sync_state2, + message)); + AMresultFree(message_result); + message_result = AMgenerateSyncMessage(test_state->doc2, + test_state->sync_state2); + message = AMresultValue(message_result, 0).sync_message; + + /* The first node receives the message and has nothing more to say. */ + AMresultFree(AMreceiveSyncMessage(test_state->doc1, + test_state->sync_state1, + message)); + AMresultFree(message_result); + message_result = AMgenerateSyncMessage(test_state->doc1, + test_state->sync_state1); + assert_int_equal(AMresultValue(message_result, 0).tag, AM_VALUE_VOID); + AMresultFree(message_result); + + /* The second node also has nothing left to say. */ + message_result = AMgenerateSyncMessage(test_state->doc2, + test_state->sync_state2); + assert_int_equal(AMresultValue(message_result, 0).tag, AM_VALUE_VOID); + AMresultFree(message_result); +} + +/** + * \brief Data sync protocol with docs already in sync, documents with data and + * it should allow simultaneous messages during synchronization. + */ +static void test_converged_allow_simultaneous_messages(void **state) { + /* Create & synchronize two nodes. */ + TestState* test_state = *state; + AMresultFree(AMsetActorHex(test_state->doc1, "abc123")); + AMresultFree(AMsetActorHex(test_state->doc2, "def456")); + + time_t const time = 0; + for (size_t value = 0; value != 5; ++value) { + AMresultFree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); + AMcommit(test_state->doc1, NULL, &time); + AMresultFree(AMmapPutUint(test_state->doc2, AM_ROOT, "y", value)); + AMcommit(test_state->doc2, NULL, &time); + } + AMresult* heads_result1 = AMgetHeads(test_state->doc1); + AMchangeHashes heads1 = AMresultValue(heads_result1, 0).change_hashes; + AMbyteSpan head1 = AMchangeHashesNext(&heads1, 1); + AMresult* heads_result2 = AMgetHeads(test_state->doc2); + AMchangeHashes heads2 = AMresultValue(heads_result2, 0).change_hashes; + AMbyteSpan head2 = AMchangeHashesNext(&heads2, 1); + + /* Both sides report what they have but have no shared peer state. */ + AMresult* msg1to2_result = AMgenerateSyncMessage(test_state->doc1, + test_state->sync_state1); + AMsyncMessage const* msg1to2 = AMresultValue(msg1to2_result, 0).sync_message; + AMresult* msg2to1_result = AMgenerateSyncMessage(test_state->doc2, + test_state->sync_state2); + AMsyncMessage const* msg2to1 = AMresultValue(msg2to1_result, 0).sync_message; + AMchanges msg1to2_changes = AMsyncMessageChanges(msg1to2); + assert_int_equal(AMchangesSize(&msg1to2_changes), 0); + AMsyncHaves msg1to2_haves = AMsyncMessageHaves(msg1to2); + AMsyncHave const* msg1to2_have = AMsyncHavesNext(&msg1to2_haves, 1); + AMchangeHashes msg1to2_last_sync = AMsyncHaveLastSync(msg1to2_have); + assert_int_equal(AMchangeHashesSize(&msg1to2_last_sync), 0); + AMchanges msg2to1_changes = AMsyncMessageChanges(msg2to1); + assert_int_equal(AMchangesSize(&msg2to1_changes), 0); + AMsyncHaves msg2to1_haves = AMsyncMessageHaves(msg2to1); + AMsyncHave const* msg2to1_have = AMsyncHavesNext(&msg2to1_haves, 1); + AMchangeHashes msg2to1_last_sync = AMsyncHaveLastSync(msg2to1_have); + assert_int_equal(AMchangeHashesSize(&msg2to1_last_sync), 0); + + /* Both nodes receive messages from each other and update their + * synchronization states. */ + AMresultFree(AMreceiveSyncMessage(test_state->doc1, + test_state->sync_state1, + msg2to1)); + AMresultFree(msg2to1_result); + AMresultFree(AMreceiveSyncMessage(test_state->doc2, + test_state->sync_state2, + msg1to2)); + AMresultFree(msg1to2_result); + + /* Now both reply with their local changes that the other lacks + * (standard warning that 1% of the time this will result in a "needs" + * message). */ + msg1to2_result = AMgenerateSyncMessage(test_state->doc1, + test_state->sync_state1); + msg1to2 = AMresultValue(msg1to2_result, 0).sync_message; + msg1to2_changes = AMsyncMessageChanges(msg1to2); + assert_int_equal(AMchangesSize(&msg1to2_changes), 5); + msg2to1_result = AMgenerateSyncMessage(test_state->doc2, + test_state->sync_state2); + msg2to1 = AMresultValue(msg2to1_result, 0).sync_message; + msg2to1_changes = AMsyncMessageChanges(msg2to1); + assert_int_equal(AMchangesSize(&msg2to1_changes), 5); + + /* Both should now apply the changes. */ + AMresultFree(AMreceiveSyncMessage(test_state->doc1, + test_state->sync_state1, + msg2to1)); + AMresultFree(msg2to1_result); + AMresult* missing_deps_result = AMgetMissingDeps(test_state->doc1, NULL); + AMchangeHashes missing_deps = AMresultValue(missing_deps_result, 0).change_hashes; + assert_int_equal(AMchangeHashesSize(&missing_deps), 0); + AMresultFree(missing_deps_result); + AMresult* map_value_result = AMmapGet(test_state->doc1, AM_ROOT, "x"); + assert_int_equal(AMresultValue(map_value_result, 0).uint, 4); + AMresultFree(map_value_result); + map_value_result = AMmapGet(test_state->doc1, AM_ROOT, "y"); + assert_int_equal(AMresultValue(map_value_result, 0).uint, 4); + AMresultFree(map_value_result); + + AMresultFree(AMreceiveSyncMessage(test_state->doc2, + test_state->sync_state2, + msg1to2)); + AMresultFree(msg1to2_result); + missing_deps_result = AMgetMissingDeps(test_state->doc2, NULL); + missing_deps = AMresultValue(missing_deps_result, 0).change_hashes; + assert_int_equal(AMchangeHashesSize(&missing_deps), 0); + AMresultFree(missing_deps_result); + map_value_result = AMmapGet(test_state->doc2, AM_ROOT, "x"); + assert_int_equal(AMresultValue(map_value_result, 0).uint, 4); + AMresultFree(map_value_result); + map_value_result = AMmapGet(test_state->doc2, AM_ROOT, "y"); + assert_int_equal(AMresultValue(map_value_result, 0).uint, 4); + AMresultFree(map_value_result); + + /* The response acknowledges that the changes were received and sends no + * further changes. */ + msg1to2_result = AMgenerateSyncMessage(test_state->doc1, + test_state->sync_state1); + msg1to2 = AMresultValue(msg1to2_result, 0).sync_message; + msg1to2_changes = AMsyncMessageChanges(msg1to2); + assert_int_equal(AMchangesSize(&msg1to2_changes), 0); + msg2to1_result = AMgenerateSyncMessage(test_state->doc2, + test_state->sync_state2); + msg2to1 = AMresultValue(msg2to1_result, 0).sync_message; + msg2to1_changes = AMsyncMessageChanges(msg2to1); + assert_int_equal(AMchangesSize(&msg2to1_changes), 0); + + /* After receiving acknowledgements their shared heads should be equal. */ + AMresultFree(AMreceiveSyncMessage(test_state->doc1, + test_state->sync_state1, + msg2to1)); + AMresultFree(msg2to1_result); + AMresultFree(AMreceiveSyncMessage(test_state->doc2, + test_state->sync_state2, + msg1to2)); + AMresultFree(msg1to2_result); + + /* They're synchronized so no more messages are required. */ + msg1to2_result = AMgenerateSyncMessage(test_state->doc1, + test_state->sync_state1); + assert_int_equal(AMresultValue(msg1to2_result, 0).tag, AM_VALUE_VOID); + AMresultFree(msg1to2_result); + msg2to1_result = AMgenerateSyncMessage(test_state->doc2, + test_state->sync_state2); + assert_int_equal(AMresultValue(msg2to1_result, 0).tag, AM_VALUE_VOID); + AMresultFree(msg2to1_result); + + /* If we make one more change and start synchronizing then its "last + * sync" property should be updated. */ + AMresultFree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", 5)); + AMcommit(test_state->doc1, NULL, &time); + msg1to2_result = AMgenerateSyncMessage(test_state->doc1, + test_state->sync_state1); + msg1to2 = AMresultValue(msg1to2_result, 0).sync_message; + msg1to2_haves = AMsyncMessageHaves(msg1to2); + msg1to2_have = AMsyncHavesNext(&msg1to2_haves, 1); + msg1to2_last_sync = AMsyncHaveLastSync(msg1to2_have); + AMbyteSpan msg1to2_last_sync_next = AMchangeHashesNext(&msg1to2_last_sync, 1); + assert_int_equal(msg1to2_last_sync_next.count, head1.count); + assert_memory_equal(msg1to2_last_sync_next.src, head1.src, head1.count); + msg1to2_last_sync_next = AMchangeHashesNext(&msg1to2_last_sync, 1); + assert_int_equal(msg1to2_last_sync_next.count, head2.count); + assert_memory_equal(msg1to2_last_sync_next.src, head2.src, head2.count); + AMresultFree(heads_result1); + AMresultFree(heads_result2); + AMresultFree(msg1to2_result); +} + +/** + * \brief Data sync protocol with docs already in sync, documents with data and + * it should assume sent changes were received until we hear otherwise. + */ +static void test_converged_assume_sent_changes_were_received(void **state) { + TestState* test_state = *state; + AMresultFree(AMsetActorHex(test_state->doc1, "01234567")); + AMresultFree(AMsetActorHex(test_state->doc2, "89abcdef")); + + AMresult* items_result = AMmapPutObject(test_state->doc1, + AM_ROOT, + "items", + AM_OBJ_TYPE_LIST); + AMobjId const* items = AMresultValue(items_result, 0).obj_id; + time_t const time = 0; + AMcommit(test_state->doc1, NULL, &time); + sync(test_state->doc1, + test_state->doc2, + test_state->sync_state1, + test_state->sync_state2); + + AMresultFree(AMlistPutStr(test_state->doc1, items, 0, true, "x")); + AMcommit(test_state->doc1, NULL, &time); + AMresult* message_result = AMgenerateSyncMessage(test_state->doc1, + test_state->sync_state1); + AMsyncMessage const* message = AMresultValue(message_result, 0).sync_message; + AMchanges message_changes = AMsyncMessageChanges(message); + assert_int_equal(AMchangesSize(&message_changes), 1); + AMresultFree(message_result); + + AMresultFree(AMlistPutStr(test_state->doc1, items, 1, true, "y")); + AMcommit(test_state->doc1, NULL, &time); + message_result = AMgenerateSyncMessage(test_state->doc1, + test_state->sync_state1); + message = AMresultValue(message_result, 0).sync_message; + message_changes = AMsyncMessageChanges(message); + assert_int_equal(AMchangesSize(&message_changes), 1); + AMresultFree(message_result); + + AMresultFree(AMlistPutStr(test_state->doc1, items, 2, true, "z")); + AMcommit(test_state->doc1, NULL, &time); + message_result = AMgenerateSyncMessage(test_state->doc1, + test_state->sync_state1); + message = AMresultValue(message_result, 0).sync_message; + message_changes = AMsyncMessageChanges(message); + assert_int_equal(AMchangesSize(&message_changes), 1); + AMresultFree(message_result); + + AMresultFree(items_result); +} + +/** + * \brief Data sync protocol with docs already in sync, documents with data and + * it should work regardless of who initiates the exchange. + */ +static void test_converged_works_regardless_of_who_initiates(void **state) { + /* Create & synchronize two nodes. */ + TestState* test_state = *state; + + time_t const time = 0; + for (size_t value = 0; value != 5; ++value) { + AMresultFree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); + AMcommit(test_state->doc1, NULL, &time); + } + sync(test_state->doc1, + test_state->doc2, + test_state->sync_state1, + test_state->sync_state2); + + /* Modify the first node further. */ + for (size_t value = 5; value != 10; ++value) { + AMresultFree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); + AMcommit(test_state->doc1, NULL, &time); + } + + assert_false(AMequal(test_state->doc1, test_state->doc2)); + sync(test_state->doc1, + test_state->doc2, + test_state->sync_state1, + test_state->sync_state2); + assert_true(AMequal(test_state->doc1, test_state->doc2)); +} + +/** + * \brief Data sync protocol with diverged documents and it should work without + * prior sync state. + */ +static void test_diverged_works_without_prior_sync_state(void **state) { + /* Scenario: + * ,-- c10 <-- c11 <-- c12 <-- c13 <-- c14 + * c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ + * `-- c15 <-- c16 <-- c17 + * lastSync is undefined. */ + + /* Create two peers both with divergent commits. */ + TestState* test_state = *state; + AMresultFree(AMsetActorHex(test_state->doc1, "01234567")); + AMresultFree(AMsetActorHex(test_state->doc2, "89abcdef")); + time_t const time = 0; + for (size_t value = 0; value != 10; ++value) { + AMresultFree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); + AMcommit(test_state->doc1, NULL, &time); + } + + sync(test_state->doc1, + test_state->doc2, + test_state->sync_state1, + test_state->sync_state2); + + for (size_t value = 10; value != 15; ++value) { + AMresultFree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); + AMcommit(test_state->doc1, NULL, &time); + } + for (size_t value = 15; value != 18; ++value) { + AMresultFree(AMmapPutUint(test_state->doc2, AM_ROOT, "x", value)); + AMcommit(test_state->doc2, NULL, &time); + } + + assert_false(AMequal(test_state->doc1, test_state->doc2)); + sync(test_state->doc1, + test_state->doc2, + test_state->sync_state1, + test_state->sync_state2); + AMresult* heads_result1 = AMgetHeads(test_state->doc1); + AMchangeHashes heads1 = AMresultValue(heads_result1, 0).change_hashes; + AMresult* heads_result2 = AMgetHeads(test_state->doc2); + AMchangeHashes heads2 = AMresultValue(heads_result2, 0).change_hashes; + assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); + AMresultFree(heads_result2); + AMresultFree(heads_result1); + assert_true(AMequal(test_state->doc1, test_state->doc2)); +} + +/** + * \brief Data sync protocol with diverged documents and it should work with + * prior sync state. + */ +static void test_diverged_works_with_prior_sync_state(void **state) { + /* Scenario: + * ,-- c10 <-- c11 <-- c12 <-- c13 <-- c14 + * c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ + * `-- c15 <-- c16 <-- c17 + * lastSync is c9. */ + + /* Create two peers both with divergent commits. */ + TestState* test_state = *state; + AMresultFree(AMsetActorHex(test_state->doc1, "01234567")); + AMresultFree(AMsetActorHex(test_state->doc2, "89abcdef")); + time_t const time = 0; + for (size_t value = 0; value != 10; ++value) { + AMresultFree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); + AMcommit(test_state->doc1, NULL, &time); + } + sync(test_state->doc1, + test_state->doc2, + test_state->sync_state1, + test_state->sync_state2); + + for (size_t value = 10; value != 15; ++value) { + AMresultFree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); + AMcommit(test_state->doc1, NULL, &time); + } + for (size_t value = 15; value != 18; ++value) { + AMresultFree(AMmapPutUint(test_state->doc2, AM_ROOT, "x", value)); + AMcommit(test_state->doc2, NULL, &time); + } + AMresult* encoded_result = AMsyncStateEncode(test_state->sync_state1); + AMbyteSpan encoded = AMresultValue(encoded_result, 0).bytes; + AMresult* sync_state_result1 = AMsyncStateDecode(encoded.src, encoded.count); + AMresultFree(encoded_result); + AMsyncState* sync_state1 = AMresultValue(sync_state_result1, 0).sync_state; + encoded_result = AMsyncStateEncode(test_state->sync_state2); + encoded = AMresultValue(encoded_result, 0).bytes; + AMresult* sync_state_result2 = AMsyncStateDecode(encoded.src, encoded.count); + AMresultFree(encoded_result); + AMsyncState* sync_state2 = AMresultValue(sync_state_result2, 0).sync_state; + + assert_false(AMequal(test_state->doc1, test_state->doc2)); + sync(test_state->doc1, test_state->doc2, sync_state1, sync_state2); + AMresultFree(sync_state_result2); + AMresultFree(sync_state_result1); + AMresult* heads_result1 = AMgetHeads(test_state->doc1); + AMchangeHashes heads1 = AMresultValue(heads_result1, 0).change_hashes; + AMresult* heads_result2 = AMgetHeads(test_state->doc2); + AMchangeHashes heads2 = AMresultValue(heads_result2, 0).change_hashes; + assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); + AMresultFree(heads_result2); + AMresultFree(heads_result1); + assert_true(AMequal(test_state->doc1, test_state->doc2)); +} + +/** + * \brief Data sync protocol with diverged documents and it should ensure + * non-empty state after synchronization. + */ +static void test_diverged_ensure_not_empty_after_sync(void **state) { + TestState* test_state = *state; + AMresultFree(AMsetActorHex(test_state->doc1, "01234567")); + AMresultFree(AMsetActorHex(test_state->doc2, "89abcdef")); + + time_t const time = 0; + for (size_t value = 0; value != 3; ++value) { + AMresultFree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); + AMcommit(test_state->doc1, NULL, &time); + } + sync(test_state->doc1, + test_state->doc2, + test_state->sync_state1, + test_state->sync_state2); + + AMresult* heads_result1 = AMgetHeads(test_state->doc1); + AMchangeHashes heads1 = AMresultValue(heads_result1, 0).change_hashes; + AMchangeHashes shared_heads1 = AMsyncStateSharedHeads(test_state->sync_state1); + assert_int_equal(AMchangeHashesCmp(&shared_heads1, &heads1), 0); + AMchangeHashes shared_heads2 = AMsyncStateSharedHeads(test_state->sync_state2); + assert_int_equal(AMchangeHashesCmp(&shared_heads2, &heads1), 0); + AMresultFree(heads_result1); +} + +/** + * \brief Data sync protocol with diverged documents and it should + * re-synchronize after one node crashed with data loss. + */ +static void test_diverged_resync_after_node_crash_with_data_loss(void **state) { + /* Scenario: + * (r) (n2) (n1) + * c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 + * n2 has changes {c0, c1, c2}, n1's lastSync is c5, and n2's lastSync + * is c2. + * We want to successfully sync (n1) with (r), even though (n1) believes + * it's talking to (n2). */ + TestState* test_state = *state; + AMresultFree(AMsetActorHex(test_state->doc1, "01234567")); + AMresultFree(AMsetActorHex(test_state->doc2, "89abcdef")); + + /* n1 makes three changes which we synchronize to n2. */ + time_t const time = 0; + for (size_t value = 0; value != 3; ++value) { + AMresultFree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); + AMcommit(test_state->doc1, NULL, &time); + } + sync(test_state->doc1, + test_state->doc2, + test_state->sync_state1, + test_state->sync_state2); + + /* Save a copy of n2 as "r" to simulate recovering from a crash. */ + AMdoc* r = AMdup(test_state->doc2); + AMresult* encoded_result = AMsyncStateEncode(test_state->sync_state2); + AMbyteSpan encoded = AMresultValue(encoded_result, 0).bytes; + AMresult* sync_state_resultr = AMsyncStateDecode(encoded.src, encoded.count); + AMresultFree(encoded_result); + AMsyncState* sync_stater = AMresultValue(sync_state_resultr, 0).sync_state; + /* Synchronize another few commits. */ + for (size_t value = 3; value != 6; ++value) { + AMresultFree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); + AMcommit(test_state->doc1, NULL, &time); + } + sync(test_state->doc1, + test_state->doc2, + test_state->sync_state1, + test_state->sync_state2); + /* Everyone should be on the same page here. */ + AMresult* heads_result1 = AMgetHeads(test_state->doc1); + AMchangeHashes heads1 = AMresultValue(heads_result1, 0).change_hashes; + AMresult* heads_result2 = AMgetHeads(test_state->doc2); + AMchangeHashes heads2 = AMresultValue(heads_result2, 0).change_hashes; + assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); + AMresultFree(heads_result2); + AMresultFree(heads_result1); + assert_true(AMequal(test_state->doc1, test_state->doc2)); + + /* Now make a few more changes and then attempt to synchronize the + * fully-up-to-date n1 with with the confused r. */ + for (size_t value = 6; value != 9; ++value) { + AMresultFree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); + AMcommit(test_state->doc1, NULL, &time); + } + heads_result1 = AMgetHeads(test_state->doc1); + heads1 = AMresultValue(heads_result1, 0).change_hashes; + AMresult* heads_resultr = AMgetHeads(r); + AMchangeHashes headsr = AMresultValue(heads_resultr, 0).change_hashes; + assert_int_not_equal(AMchangeHashesCmp(&heads1, &headsr), 0); + AMresultFree(heads_resultr); + AMresultFree(heads_result1); + assert_false(AMequal(test_state->doc1, r)); + AMresult* map_value_result = AMmapGet(test_state->doc1, AM_ROOT, "x"); + assert_int_equal(AMresultValue(map_value_result, 0).uint, 8); + AMresultFree(map_value_result); + map_value_result = AMmapGet(r, AM_ROOT, "x"); + assert_int_equal(AMresultValue(map_value_result, 0).uint, 2); + AMresultFree(map_value_result); + sync(test_state->doc1, + r, + test_state->sync_state1, + sync_stater); + AMresultFree(sync_state_resultr); + heads_result1 = AMgetHeads(test_state->doc1); + heads1 = AMresultValue(heads_result1, 0).change_hashes; + heads_resultr = AMgetHeads(r); + headsr = AMresultValue(heads_resultr, 0).change_hashes; + assert_int_equal(AMchangeHashesCmp(&heads1, &headsr), 0); + AMresultFree(heads_resultr); + AMresultFree(heads_result1); + assert_true(AMequal(test_state->doc1, r)); + AMfree(r); +} + +/** + * \brief Data sync protocol with diverged documents and it should resync after + * one node experiences data loss without disconnecting. + */ +static void test_diverged_resync_after_data_loss_without_disconnection(void **state) { + TestState* test_state = *state; + AMresultFree(AMsetActorHex(test_state->doc1, "01234567")); + AMresultFree(AMsetActorHex(test_state->doc2, "89abcdef")); + + /* n1 makes three changes which we synchronize to n2. */ + time_t const time = 0; + for (size_t value = 0; value != 3; ++value) { + AMresultFree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); + AMcommit(test_state->doc1, NULL, &time); + } + sync(test_state->doc1, + test_state->doc2, + test_state->sync_state1, + test_state->sync_state2); + + AMresult* heads_result1 = AMgetHeads(test_state->doc1); + AMchangeHashes heads1 = AMresultValue(heads_result1, 0).change_hashes; + AMresult* heads_result2 = AMgetHeads(test_state->doc2); + AMchangeHashes heads2 = AMresultValue(heads_result2, 0).change_hashes; + assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); + AMresultFree(heads_result2); + AMresultFree(heads_result1); + assert_true(AMequal(test_state->doc1, test_state->doc2)); + + AMdoc* doc2_after_data_loss = AMcreate(); + AMresultFree(AMsetActorHex(doc2_after_data_loss, "89abcdef")); + + /* "n2" now has no data, but n1 still thinks it does. Note we don't do + * decodeSyncState(encodeSyncState(s1)) in order to simulate data loss + * without disconnecting. */ + AMsyncState* sync_state2_after_data_loss = AMsyncStateInit(); + sync(test_state->doc1, + doc2_after_data_loss, + test_state->sync_state1, + sync_state2_after_data_loss); + heads_result1 = AMgetHeads(test_state->doc1); + heads1 = AMresultValue(heads_result1, 0).change_hashes; + heads_result2 = AMgetHeads(doc2_after_data_loss); + heads2 = AMresultValue(heads_result2, 0).change_hashes; + assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); + AMresultFree(heads_result2); + AMresultFree(heads_result1); + assert_true(AMequal(test_state->doc1, doc2_after_data_loss)); + AMsyncStateFree(sync_state2_after_data_loss); + AMfree(doc2_after_data_loss); +} + +/** + * \brief Data sync protocol with diverged documents and it should handle + * changes concurrent to the last sync heads. + */ +static void test_diverged_handles_concurrent_changes(void **state) { + TestState* test_state = *state; + AMresultFree(AMsetActorHex(test_state->doc1, "01234567")); + AMresultFree(AMsetActorHex(test_state->doc2, "89abcdef")); + AMdoc* doc3 = AMcreate(); + AMresultFree(AMsetActorHex(doc3, "fedcba98")); + AMsyncState* sync_state12 = test_state->sync_state1; + AMsyncState* sync_state21 = test_state->sync_state2; + AMsyncState* sync_state23 = AMsyncStateInit(); + AMsyncState* sync_state32 = AMsyncStateInit(); + + /* Change 1 is known to all three nodes. */ + time_t const time = 0; + AMresultFree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", 1)); + AMcommit(test_state->doc1, NULL, &time); + sync(test_state->doc1, test_state->doc2, sync_state12, sync_state21); + sync(test_state->doc2, doc3, sync_state23, sync_state32); + + /* Change 2 is known to n1 and n2. */ + AMresultFree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", 2)); + AMcommit(test_state->doc1, NULL, &time); + sync(test_state->doc1, test_state->doc2, sync_state12, sync_state21); + + /* Each of the three nodes makes one change (changes 3, 4, 5). */ + AMresultFree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", 3)); + AMcommit(test_state->doc1, NULL, &time); + AMresultFree(AMmapPutUint(test_state->doc2, AM_ROOT, "x", 4)); + AMcommit(test_state->doc2, NULL, &time); + AMresultFree(AMmapPutUint(doc3, AM_ROOT, "x", 5)); + AMcommit(doc3, NULL, &time); + + /* Apply n3's latest change to n2. */ + AMresult* changes_result = AMgetLastLocalChange(doc3); + AMchanges changes = AMresultValue(changes_result, 0).changes; + AMresultFree(AMapplyChanges(test_state->doc2, &changes)); + AMresultFree(changes_result); + + /* Now sync n1 and n2. n3's change is concurrent to n1 and n2's last sync + * heads. */ + sync(test_state->doc1, test_state->doc2, sync_state12, sync_state21); + AMresult* heads_result1 = AMgetHeads(test_state->doc1); + AMchangeHashes heads1 = AMresultValue(heads_result1, 0).change_hashes; + AMresult* heads_result2 = AMgetHeads(test_state->doc2); + AMchangeHashes heads2 = AMresultValue(heads_result2, 0).change_hashes; + assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); + AMresultFree(heads_result2); + AMresultFree(heads_result1); + assert_true(AMequal(test_state->doc1, test_state->doc2)); + + AMsyncStateFree(sync_state32); + AMsyncStateFree(sync_state23); + AMfree(doc3); +} + +/** + * \brief Data sync protocol with diverged documents and it should handle + * histories with lots of branching and merging. + */ +static void test_diverged_handles_histories_of_branching_and_merging(void **state) { + TestState* test_state = *state; + AMresultFree(AMsetActorHex(test_state->doc1, "01234567")); + AMresultFree(AMsetActorHex(test_state->doc2, "89abcdef")); + AMdoc* doc3 = AMcreate(); + AMresultFree(AMsetActorHex(doc3, "fedcba98")); + time_t const time = 0; + AMresultFree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", 0)); + AMcommit(test_state->doc1, NULL, &time); + AMresult* changes_result = AMgetLastLocalChange(test_state->doc1); + AMchanges changes = AMresultValue(changes_result, 0).changes; + AMresultFree(AMapplyChanges(test_state->doc2, &changes)); + AMresultFree(AMapplyChanges(doc3, &changes)); + AMresultFree(changes_result); + AMresultFree(AMmapPutUint(doc3, AM_ROOT, "x", 1)); + AMcommit(doc3, NULL, &time); + + /* - n1c1 <------ n1c2 <------ n1c3 <-- etc. <-- n1c20 <------ n1c21 + * / \/ \/ \/ + * / /\ /\ /\ + * c0 <---- n2c1 <------ n2c2 <------ n2c3 <-- etc. <-- n2c20 <------ n2c21 + * \ / + * ---------------------------------------------- n3c1 <----- + */ + for (size_t value = 1; value != 20; ++value) { + AMresultFree(AMmapPutUint(test_state->doc1, AM_ROOT, "n1", value)); + AMcommit(test_state->doc1, NULL, &time); + AMresultFree(AMmapPutUint(test_state->doc2, AM_ROOT, "n2", value)); + AMcommit(test_state->doc2, NULL, &time); + AMresult* changes_result1 = AMgetLastLocalChange(test_state->doc1); + AMchanges changes1 = AMresultValue(changes_result1, 0).changes; + AMresult* changes_result2 = AMgetLastLocalChange(test_state->doc2); + AMchanges changes2 = AMresultValue(changes_result2, 0).changes; + AMresultFree(AMapplyChanges(test_state->doc1, &changes2)); + AMresultFree(changes_result2); + AMresultFree(AMapplyChanges(test_state->doc2, &changes1)); + AMresultFree(changes_result1); + } + + sync(test_state->doc1, + test_state->doc2, + test_state->sync_state1, + test_state->sync_state2); + + /* Having n3's last change concurrent to the last sync heads forces us into + * the slower code path. */ + AMresult* changes_result3 = AMgetLastLocalChange(doc3); + AMchanges changes3 = AMresultValue(changes_result3, 0).changes; + AMresultFree(AMapplyChanges(test_state->doc2, &changes3)); + AMresultFree(changes_result3); + AMresultFree(AMmapPutStr(test_state->doc1, AM_ROOT, "n1", "final")); + AMcommit(test_state->doc1, NULL, &time); + AMresultFree(AMmapPutStr(test_state->doc2, AM_ROOT, "n2", "final")); + AMcommit(test_state->doc2, NULL, &time); + + sync(test_state->doc1, + test_state->doc2, + test_state->sync_state1, + test_state->sync_state2); + AMresult* heads_result1 = AMgetHeads(test_state->doc1); + AMchangeHashes heads1 = AMresultValue(heads_result1, 0).change_hashes; + AMresult* heads_result2 = AMgetHeads(test_state->doc2); + AMchangeHashes heads2 = AMresultValue(heads_result2, 0).change_hashes; + assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); + AMresultFree(heads_result2); + AMresultFree(heads_result1); + assert_true(AMequal(test_state->doc1, test_state->doc2)); + + AMfree(doc3); +} + +int run_sync_tests(void) { + const struct CMUnitTest tests[] = { + cmocka_unit_test_setup_teardown(test_converged_empty_local_doc_reply_no_local_data, setup, teardown), + cmocka_unit_test_setup_teardown(test_converged_empty_local_doc_no_reply, setup, teardown), + cmocka_unit_test_setup_teardown(test_converged_equal_heads_no_reply, setup, teardown), + cmocka_unit_test_setup_teardown(test_converged_offer_all_changes_from_nothing, setup, teardown), + cmocka_unit_test_setup_teardown(test_converged_sync_peers_with_uneven_commits, setup, teardown), + cmocka_unit_test_setup_teardown(test_converged_works_with_prior_sync_state, setup, teardown), + cmocka_unit_test_setup_teardown(test_converged_no_message_once_synced, setup, teardown), + cmocka_unit_test_setup_teardown(test_converged_allow_simultaneous_messages, setup, teardown), + cmocka_unit_test_setup_teardown(test_converged_assume_sent_changes_were_received, setup, teardown), + cmocka_unit_test_setup_teardown(test_converged_works_regardless_of_who_initiates, setup, teardown), + cmocka_unit_test_setup_teardown(test_diverged_works_without_prior_sync_state, setup, teardown), + cmocka_unit_test_setup_teardown(test_diverged_works_with_prior_sync_state, setup, teardown), + cmocka_unit_test_setup_teardown(test_diverged_ensure_not_empty_after_sync, setup, teardown), + cmocka_unit_test_setup_teardown(test_diverged_resync_after_node_crash_with_data_loss, setup, teardown), + cmocka_unit_test_setup_teardown(test_diverged_resync_after_data_loss_without_disconnection, setup, teardown), + cmocka_unit_test_setup_teardown(test_diverged_handles_concurrent_changes, setup, teardown), + cmocka_unit_test_setup_teardown(test_diverged_handles_histories_of_branching_and_merging, setup, teardown), + }; + + return cmocka_run_group_tests(tests, NULL, NULL); +} From 8ce10dab69d2a3f5979b0461454119af43ba46f0 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Tue, 31 May 2022 13:49:18 -0400 Subject: [PATCH 428/730] some api changes/tweaks - basic js package --- automerge-js/config/cjs.json | 6 + automerge-js/package.json | 36 ++++-- automerge-js/src/index.ts | 6 +- automerge-wasm/README.md | 27 ++-- automerge-wasm/index.d.ts | 1 + automerge-wasm/package.json | 4 +- automerge-wasm/src/lib.rs | 46 +++++-- automerge-wasm/test/readme.ts | 29 +++-- automerge-wasm/test/test.ts | 120 +++++++++--------- .../types/automerge-types-0.1.1.tgz | Bin 2566 -> 0 bytes automerge-wasm/types/index.d.ts | 5 +- automerge-wasm/types/package.json | 2 +- 12 files changed, 166 insertions(+), 116 deletions(-) create mode 100644 automerge-js/config/cjs.json delete mode 100644 automerge-wasm/types/automerge-types-0.1.1.tgz diff --git a/automerge-js/config/cjs.json b/automerge-js/config/cjs.json new file mode 100644 index 00000000..d7f8c63f --- /dev/null +++ b/automerge-js/config/cjs.json @@ -0,0 +1,6 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "outDir": "../dist/cjs" + } +} diff --git a/automerge-js/package.json b/automerge-js/package.json index d2ba317f..80c9deca 100644 --- a/automerge-js/package.json +++ b/automerge-js/package.json @@ -4,7 +4,7 @@ "Orion Henry ", "Martin Kleppmann" ], - "version": "0.1.0", + "version": "0.1.1", "description": "Reimplementation of `automerge` on top of the automerge-wasm backend", "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-js", "repository": "github:automerge/automerge-rs", @@ -13,22 +13,32 @@ "LICENSE", "package.json", "index.d.ts", - "dist/constants.js", - "dist/types.js", - "dist/numbers.js", - "dist/index.js", - "dist/uuid.js", - "dist/counter.js", - "dist/low_level.js", - "dist/text.js", - "dist/proxies.js" + "dist/cjs/constants.js", + "dist/cjs/types.js", + "dist/cjs/numbers.js", + "dist/cjs/index.js", + "dist/cjs/uuid.js", + "dist/cjs/counter.js", + "dist/cjs/low_level.js", + "dist/cjs/text.js", + "dist/cjs/proxies.js", + "dist/mjs/constants.js", + "dist/mjs/types.js", + "dist/mjs/numbers.js", + "dist/mjs/index.js", + "dist/mjs/uuid.js", + "dist/mjs/counter.js", + "dist/mjs/low_level.js", + "dist/mjs/text.js", + "dist/mjs/proxies.js" ], "types": "index.d.ts", - "main": "./dist/index.js", + "module": "./dist/mjs/index.js", + "main": "./dist/cjs/index.js", "license": "MIT", "scripts": { "lint": "eslint src", - "build": "tsc -p config/mjs.json", + "build": "tsc -p config/mjs.json && tsc -p config/cjs.json", "test": "ts-mocha test/*.ts" }, "devDependencies": { @@ -45,7 +55,7 @@ "typescript": "^4.6.4" }, "dependencies": { - "automerge-types": "^0.1.1", + "automerge-types": "0.1.4", "uuid": "^8.3" } } diff --git a/automerge-js/src/index.ts b/automerge-js/src/index.ts index 27d73377..52f479e2 100644 --- a/automerge-js/src/index.ts +++ b/automerge-js/src/index.ts @@ -231,11 +231,7 @@ export function getConflicts(doc: Doc, prop: Prop) : Conflicts | undefined export function getLastLocalChange(doc: Doc) : Change | undefined { const state = _state(doc) - try { - return state.getLastLocalChange() - } catch (e) { - return - } + return state.getLastLocalChange() || undefined } export function getObjectId(doc: Doc) : ObjID { diff --git a/automerge-wasm/README.md b/automerge-wasm/README.md index 0e37fcf7..add3d1b1 100644 --- a/automerge-wasm/README.md +++ b/automerge-wasm/README.md @@ -131,7 +131,10 @@ You can access objects by passing the object id as the first parameter for a cal // get the id then use it - let id = doc.get("/", "config") + // get returns a single simple javascript value or undefined + // getWithType returns an Array of the datatype plus basic type or null + + let id = doc.getWithType("/", "config") if (id && id[0] === 'map') { doc.put(id[1], "align", "right") } @@ -199,8 +202,8 @@ Text is a specialized list type intended for modifying a text document. The pri let obj = doc.insertObject(notes, 6, { hi: "there" }) doc.text(notes) // returns "Hello \ufffceveryone" - doc.get(notes, 6) // returns ["map", obj] - doc.get(obj, "hi") // returns ["str", "there"] + doc.getWithType(notes, 6) // returns ["map", obj] + doc.get(obj, "hi") // returns "there" doc.free() ``` @@ -217,8 +220,8 @@ When querying maps use the `get()` method with the object in question and the pr doc1.put("_root", "key1", "val1") let key2 = doc1.putObject("_root", "key2", []) - doc1.get("_root", "key1") // returns ["str", "val1"] - doc1.get("_root", "key2") // returns ["list", "2@aabbcc"] + doc1.get("_root", "key1") // returns "val1" + doc1.getWithType("_root", "key2") // returns ["list", "2@aabbcc"] doc1.keys("_root") // returns ["key1", "key2"] let doc2 = doc1.fork("ffaaff") @@ -229,7 +232,7 @@ When querying maps use the `get()` method with the object in question and the pr doc1.merge(doc2) - doc1.get("_root","key3") // returns ["str", "doc2val"] + doc1.get("_root","key3") // returns "doc2val" doc1.getAll("_root","key3") // returns [[ "str", "doc1val"], ["str", "doc2val"]] doc1.free(); doc2.free() ``` @@ -266,7 +269,7 @@ Generally speaking you don't need to think about transactions when using Automer doc.put("_root", "key", "val1") - doc.get("_root", "key") // returns ["str","val1"] + doc.get("_root", "key") // returns "val1" doc.pendingOps() // returns 1 doc.rollback() @@ -280,7 +283,7 @@ Generally speaking you don't need to think about transactions when using Automer doc.commit("test commit 1") - doc.get("_root", "key") // returns ["str","val2"] + doc.get("_root", "key") // returns "val2" doc.pendingOps() // returns 0 doc.free() @@ -301,10 +304,10 @@ All query functions can take an optional argument of `heads` which allow you to doc.put("_root", "key", "val3") - doc.get("_root","key") // returns ["str","val3"] - doc.get("_root","key",heads2) // returns ["str","val2"] - doc.get("_root","key",heads1) // returns ["str","val1"] - doc.get("_root","key",[]) // returns null + doc.get("_root","key") // returns "val3" + doc.get("_root","key",heads2) // returns "val2" + doc.get("_root","key",heads1) // returns "val1" + doc.get("_root","key",[]) // returns undefined doc.free() ``` diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts index 28a4b5b4..28e41609 100644 --- a/automerge-wasm/index.d.ts +++ b/automerge-wasm/index.d.ts @@ -1 +1,2 @@ export * from "automerge-types" +export default from "automerge-types" diff --git a/automerge-wasm/package.json b/automerge-wasm/package.json index 2d024c10..cfeea401 100644 --- a/automerge-wasm/package.json +++ b/automerge-wasm/package.json @@ -8,7 +8,7 @@ "description": "wasm-bindgen bindings to the automerge rust implementation", "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-wasm", "repository": "github:automerge/automerge-rs", - "version": "0.1.4", + "version": "0.1.5", "license": "MIT", "files": [ "README.md", @@ -51,6 +51,6 @@ "typescript": "^4.6.4" }, "dependencies": { - "automerge-types": "^0.1.1" + "automerge-types": "0.1.4" } } diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index db948704..b7220d3b 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -356,7 +356,37 @@ impl Automerge { obj: JsValue, prop: JsValue, heads: Option, - ) -> Result, JsValue> { + ) -> Result { + let obj = self.import(obj)?; + let prop = to_prop(prop); + let heads = get_heads(heads); + if let Ok(prop) = prop { + let value = if let Some(h) = heads { + self.doc.get_at(&obj, prop, &h)? + } else { + self.doc.get(&obj, prop)? + }; + match value { + Some((Value::Object(_), obj_id)) => { + Ok(obj_id.to_string().into()) + } + Some((Value::Scalar(value), _)) => { + Ok(ScalarValue(value).into()) + } + None => Ok(JsValue::undefined()), + } + } else { + Ok(JsValue::undefined()) + } + } + + #[wasm_bindgen(js_name = getWithType)] + pub fn get_with_type( + &self, + obj: JsValue, + prop: JsValue, + heads: Option, + ) -> Result { let obj = self.import(obj)?; let result = Array::new(); let prop = to_prop(prop); @@ -371,17 +401,17 @@ impl Automerge { Some((Value::Object(obj_type), obj_id)) => { result.push(&obj_type.to_string().into()); result.push(&obj_id.to_string().into()); - Ok(Some(result)) + Ok(result.into()) } Some((Value::Scalar(value), _)) => { result.push(&datatype(&value).into()); result.push(&ScalarValue(value).into()); - Ok(Some(result)) + Ok(result.into()) } - None => Ok(None), + None => Ok(JsValue::null()), } } else { - Ok(None) + Ok(JsValue::null()) } } @@ -621,12 +651,12 @@ impl Automerge { } #[wasm_bindgen(js_name = getLastLocalChange)] - pub fn get_last_local_change(&mut self) -> Result { + pub fn get_last_local_change(&mut self) -> Result { self.ensure_transaction_closed(); if let Some(change) = self.doc.get_last_local_change() { - Ok(Uint8Array::from(change.raw_bytes())) + Ok(Uint8Array::from(change.raw_bytes()).into()) } else { - Err(to_js_err("no local changes")) + Ok(JsValue::null()) } } diff --git a/automerge-wasm/test/readme.ts b/automerge-wasm/test/readme.ts index ff5c94ac..5917cbe9 100644 --- a/automerge-wasm/test/readme.ts +++ b/automerge-wasm/test/readme.ts @@ -1,7 +1,8 @@ import { describe, it } from 'mocha'; import * as assert from 'assert' //@ts-ignore -import init, { create, load } from '..' +import init from '..' +import { create, load } from '..' describe('Automerge', () => { describe('Readme Examples', () => { @@ -83,7 +84,7 @@ describe('Automerge', () => { // Anywhere Object Ids are being used a path can also be used. // The following two statements are equivalent: - const id = doc.get("/", "config") + const id = doc.getWithType("/", "config") if (id && id[0] === 'map') { doc.put(id[1], "align", "right") } @@ -138,8 +139,8 @@ describe('Automerge', () => { const obj = doc.insertObject(notes, 6, { hi: "there" }) assert.deepEqual(doc.text(notes), "Hello \ufffceveryone") - assert.deepEqual(doc.get(notes, 6), ["map", obj]) - assert.deepEqual(doc.get(obj, "hi"), ["str", "there"]) + assert.deepEqual(doc.get(notes, 6), obj) + assert.deepEqual(doc.get(obj, "hi"), "there") doc.free() }) @@ -148,8 +149,8 @@ describe('Automerge', () => { doc1.put("_root", "key1", "val1") const key2 = doc1.putObject("_root", "key2", []) - assert.deepEqual(doc1.get("_root", "key1"), ["str", "val1"]) - assert.deepEqual(doc1.get("_root", "key2"), ["list", "2@aabbcc"]) + assert.deepEqual(doc1.get("_root", "key1"), "val1") + assert.deepEqual(doc1.getWithType("_root", "key2"), ["list", "2@aabbcc"]) assert.deepEqual(doc1.keys("_root"), ["key1", "key2"]) const doc2 = doc1.fork("ffaaff") @@ -160,7 +161,7 @@ describe('Automerge', () => { doc1.merge(doc2) - assert.deepEqual(doc1.get("_root","key3"), ["str", "doc2val"]) + assert.deepEqual(doc1.get("_root","key3"), "doc2val") assert.deepEqual(doc1.getAll("_root","key3"),[[ "str", "doc1val", "3@aabbcc"], ["str", "doc2val", "3@ffaaff"]]) doc1.free(); doc2.free() @@ -188,12 +189,12 @@ describe('Automerge', () => { doc.put("_root", "key", "val1") - assert.deepEqual(doc.get("_root", "key"),["str","val1"]) + assert.deepEqual(doc.get("_root", "key"),"val1") assert.deepEqual(doc.pendingOps(),1) doc.rollback() - assert.deepEqual(doc.get("_root", "key"),null) + assert.deepEqual(doc.get("_root", "key"),undefined) assert.deepEqual(doc.pendingOps(),0) doc.put("_root", "key", "val2") @@ -202,7 +203,7 @@ describe('Automerge', () => { doc.commit("test commit 1") - assert.deepEqual(doc.get("_root", "key"),["str","val2"]) + assert.deepEqual(doc.get("_root", "key"),"val2") assert.deepEqual(doc.pendingOps(),0) doc.free() @@ -218,10 +219,10 @@ describe('Automerge', () => { doc.put("_root", "key", "val3") - assert.deepEqual(doc.get("_root","key"), ["str","val3"]) - assert.deepEqual(doc.get("_root","key",heads2), ["str","val2"]) - assert.deepEqual(doc.get("_root","key",heads1), ["str","val1"]) - assert.deepEqual(doc.get("_root","key",[]), null) + assert.deepEqual(doc.get("_root","key"), "val3") + assert.deepEqual(doc.get("_root","key",heads2), "val2") + assert.deepEqual(doc.get("_root","key",heads1), "val1") + assert.deepEqual(doc.get("_root","key",[]), undefined) doc.free() }) diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts index ce04d930..4129480c 100644 --- a/automerge-wasm/test/test.ts +++ b/automerge-wasm/test/test.ts @@ -48,7 +48,7 @@ describe('Automerge', () => { it('getting a nonexistant prop does not throw an error', () => { const doc = create() const root = "_root" - const result = doc.get(root,"hello") + const result = doc.getWithType(root,"hello") assert.deepEqual(result,undefined) doc.free() }) @@ -70,42 +70,44 @@ describe('Automerge', () => { doc.putObject(root, "list", []); doc.put(root, "null", null) - result = doc.get(root,"hello") + result = doc.getWithType(root,"hello") assert.deepEqual(result,["str","world"]) + assert.deepEqual(doc.get("/","hello"),"world") - result = doc.get(root,"number1") + result = doc.getWithType(root,"number1") assert.deepEqual(result,["uint",5]) + assert.deepEqual(doc.get("/","number1"),5) - result = doc.get(root,"number2") + result = doc.getWithType(root,"number2") assert.deepEqual(result,["int",5]) - result = doc.get(root,"number3") + result = doc.getWithType(root,"number3") assert.deepEqual(result,["f64",5.5]) - result = doc.get(root,"number4") + result = doc.getWithType(root,"number4") assert.deepEqual(result,["f64",5.5]) - result = doc.get(root,"number5") + result = doc.getWithType(root,"number5") assert.deepEqual(result,["int",5]) - result = doc.get(root,"bool") + result = doc.getWithType(root,"bool") assert.deepEqual(result,["boolean",true]) doc.put(root, "bool", false, "boolean") - result = doc.get(root,"bool") + result = doc.getWithType(root,"bool") assert.deepEqual(result,["boolean",false]) - result = doc.get(root,"time1") + result = doc.getWithType(root,"time1") assert.deepEqual(result,["timestamp",new Date(1000)]) - result = doc.get(root,"time2") + result = doc.getWithType(root,"time2") assert.deepEqual(result,["timestamp",new Date(1001)]) - result = doc.get(root,"list") + result = doc.getWithType(root,"list") assert.deepEqual(result,["list","10@aabbcc"]); - result = doc.get(root,"null") + result = doc.getWithType(root,"null") assert.deepEqual(result,["null",null]); doc.free() @@ -115,9 +117,9 @@ describe('Automerge', () => { const doc = create() doc.put("_root","data1", new Uint8Array([10,11,12])); doc.put("_root","data2", new Uint8Array([13,14,15]), "bytes"); - const value1 = doc.get("_root", "data1") + const value1 = doc.getWithType("_root", "data1") assert.deepEqual(value1, ["bytes", new Uint8Array([10,11,12])]); - const value2 = doc.get("_root", "data2") + const value2 = doc.getWithType("_root", "data2") assert.deepEqual(value2, ["bytes", new Uint8Array([13,14,15])]); doc.free() }) @@ -131,10 +133,10 @@ describe('Automerge', () => { doc.put(submap, "number", 6, "uint") assert.strictEqual(doc.pendingOps(),2) - result = doc.get(root,"submap") + result = doc.getWithType(root,"submap") assert.deepEqual(result,["map",submap]) - result = doc.get(submap,"number") + result = doc.getWithType(submap,"number") assert.deepEqual(result,["uint",6]) doc.free() }) @@ -149,15 +151,15 @@ describe('Automerge', () => { doc.insert(submap, 2, "c"); doc.insert(submap, 0, "z"); - assert.deepEqual(doc.get(submap, 0),["str","z"]) - assert.deepEqual(doc.get(submap, 1),["str","a"]) - assert.deepEqual(doc.get(submap, 2),["str","b"]) - assert.deepEqual(doc.get(submap, 3),["str","c"]) + assert.deepEqual(doc.getWithType(submap, 0),["str","z"]) + assert.deepEqual(doc.getWithType(submap, 1),["str","a"]) + assert.deepEqual(doc.getWithType(submap, 2),["str","b"]) + assert.deepEqual(doc.getWithType(submap, 3),["str","c"]) assert.deepEqual(doc.length(submap),4) doc.put(submap, 2, "b v2"); - assert.deepEqual(doc.get(submap, 2),["str","b v2"]) + assert.deepEqual(doc.getWithType(submap, 2),["str","b v2"]) assert.deepEqual(doc.length(submap),4) doc.free() }) @@ -210,9 +212,9 @@ describe('Automerge', () => { const root = "_root" doc.put(root, "xxx", "xxx"); - assert.deepEqual(doc.get(root, "xxx"),["str","xxx"]) + assert.deepEqual(doc.getWithType(root, "xxx"),["str","xxx"]) doc.delete(root, "xxx"); - assert.deepEqual(doc.get(root, "xxx"),undefined) + assert.deepEqual(doc.getWithType(root, "xxx"),undefined) doc.free() }) @@ -221,11 +223,11 @@ describe('Automerge', () => { const root = "_root" doc.put(root, "counter", 10, "counter"); - assert.deepEqual(doc.get(root, "counter"),["counter",10]) + assert.deepEqual(doc.getWithType(root, "counter"),["counter",10]) doc.increment(root, "counter", 10); - assert.deepEqual(doc.get(root, "counter"),["counter",20]) + assert.deepEqual(doc.getWithType(root, "counter"),["counter",20]) doc.increment(root, "counter", -5); - assert.deepEqual(doc.get(root, "counter"),["counter",15]) + assert.deepEqual(doc.getWithType(root, "counter"),["counter",15]) doc.free() }) @@ -237,12 +239,12 @@ describe('Automerge', () => { doc.splice(text, 0, 0, "hello ") doc.splice(text, 6, 0, ["w","o","r","l","d"]) doc.splice(text, 11, 0, ["!","?"]) - assert.deepEqual(doc.get(text, 0),["str","h"]) - assert.deepEqual(doc.get(text, 1),["str","e"]) - assert.deepEqual(doc.get(text, 9),["str","l"]) - assert.deepEqual(doc.get(text, 10),["str","d"]) - assert.deepEqual(doc.get(text, 11),["str","!"]) - assert.deepEqual(doc.get(text, 12),["str","?"]) + assert.deepEqual(doc.getWithType(text, 0),["str","h"]) + assert.deepEqual(doc.getWithType(text, 1),["str","e"]) + assert.deepEqual(doc.getWithType(text, 9),["str","l"]) + assert.deepEqual(doc.getWithType(text, 10),["str","d"]) + assert.deepEqual(doc.getWithType(text, 11),["str","!"]) + assert.deepEqual(doc.getWithType(text, 12),["str","?"]) doc.free() }) @@ -251,8 +253,8 @@ describe('Automerge', () => { const text = doc.putObject("/", "text", "Hello world"); const obj = doc.insertObject(text, 6, { hello: "world" }); assert.deepEqual(doc.text(text), "Hello \ufffcworld"); - assert.deepEqual(doc.get(text, 6), ["map", obj]); - assert.deepEqual(doc.get(obj, "hello"), ["str", "world"]); + assert.deepEqual(doc.getWithType(text, 6), ["map", obj]); + assert.deepEqual(doc.getWithType(obj, "hello"), ["str", "world"]); }) it('should be able save all or incrementally', () => { @@ -446,13 +448,13 @@ describe('Automerge', () => { const d = doc1.put(c,"d","dd"); const saved = doc1.save(); const doc2 = load(saved); - assert.deepEqual(doc2.get("_root","a"),["map",a]) + assert.deepEqual(doc2.getWithType("_root","a"),["map",a]) assert.deepEqual(doc2.keys(a),[]) - assert.deepEqual(doc2.get("_root","b"),["map",b]) + assert.deepEqual(doc2.getWithType("_root","b"),["map",b]) assert.deepEqual(doc2.keys(b),[]) - assert.deepEqual(doc2.get("_root","c"),["map",c]) + assert.deepEqual(doc2.getWithType("_root","c"),["map",c]) assert.deepEqual(doc2.keys(c),["d"]) - assert.deepEqual(doc2.get(c,"d"),["str","dd"]) + assert.deepEqual(doc2.getWithType(c,"d"),["str","dd"]) doc1.free() doc2.free() }) @@ -479,7 +481,7 @@ describe('Automerge', () => { const B = A.fork() - assert.deepEqual(B.get("_root","text"), [ "text", At]) + assert.deepEqual(B.getWithType("_root","text"), [ "text", At]) B.splice(At, 4, 1) B.splice(At, 4, 0, '!') @@ -492,7 +494,7 @@ describe('Automerge', () => { const C = load(binary) - assert.deepEqual(C.get('_root', 'text'), ['text', '1@aabbcc']) + assert.deepEqual(C.getWithType('_root', 'text'), ['text', '1@aabbcc']) assert.deepEqual(C.text(At), 'hell! world') }) }) @@ -577,8 +579,8 @@ describe('Automerge', () => { doc1.insert('1@aaaa', 1, 'Greenfinch') doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) - assert.deepEqual(doc1.get('1@aaaa', 0), ['str', 'Chaffinch']) - assert.deepEqual(doc1.get('1@aaaa', 1), ['str', 'Greenfinch']) + assert.deepEqual(doc1.getWithType('1@aaaa', 0), ['str', 'Chaffinch']) + assert.deepEqual(doc1.getWithType('1@aaaa', 1), ['str', 'Greenfinch']) assert.deepEqual(doc2.popPatches(), [ {action: 'delete', obj: '1@aaaa', key: 0}, {action: 'insert', obj: '1@aaaa', key: 1, value: 'Greenfinch', datatype: 'str'} @@ -603,8 +605,8 @@ describe('Automerge', () => { doc4.enablePatches(true) doc3.loadIncremental(change2); doc3.loadIncremental(change3) doc4.loadIncremental(change3); doc4.loadIncremental(change2) - assert.deepEqual([0, 1, 2, 3].map(i => (doc3.get('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd']) - assert.deepEqual([0, 1, 2, 3].map(i => (doc4.get('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd']) + assert.deepEqual([0, 1, 2, 3].map(i => (doc3.getWithType('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd']) + assert.deepEqual([0, 1, 2, 3].map(i => (doc4.getWithType('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd']) assert.deepEqual(doc3.popPatches(), [ {action: 'insert', obj: '1@aaaa', key: 0, value: 'c', datatype: 'str'}, {action: 'insert', obj: '1@aaaa', key: 1, value: 'd', datatype: 'str'}, @@ -636,8 +638,8 @@ describe('Automerge', () => { doc4.enablePatches(true) doc3.loadIncremental(change2); doc3.loadIncremental(change3) doc4.loadIncremental(change3); doc4.loadIncremental(change2) - assert.deepEqual([0, 1, 2, 3, 4, 5].map(i => (doc3.get('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd', 'e', 'f']) - assert.deepEqual([0, 1, 2, 3, 4, 5].map(i => (doc4.get('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd', 'e', 'f']) + assert.deepEqual([0, 1, 2, 3, 4, 5].map(i => (doc3.getWithType('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd', 'e', 'f']) + assert.deepEqual([0, 1, 2, 3, 4, 5].map(i => (doc4.getWithType('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd', 'e', 'f']) assert.deepEqual(doc3.popPatches(), [ {action: 'insert', obj: '1@aaaa', key: 2, value: 'e', datatype: 'str'}, {action: 'insert', obj: '1@aaaa', key: 3, value: 'f', datatype: 'str'}, @@ -662,9 +664,9 @@ describe('Automerge', () => { doc4.enablePatches(true) doc3.loadIncremental(change1); doc3.loadIncremental(change2) doc4.loadIncremental(change2); doc4.loadIncremental(change1) - assert.deepEqual(doc3.get('_root', 'bird'), ['str', 'Goldfinch']) + assert.deepEqual(doc3.getWithType('_root', 'bird'), ['str', 'Goldfinch']) assert.deepEqual(doc3.getAll('_root', 'bird'), [['str', 'Greenfinch', '1@aaaa'], ['str', 'Goldfinch', '1@bbbb']]) - assert.deepEqual(doc4.get('_root', 'bird'), ['str', 'Goldfinch']) + assert.deepEqual(doc4.getWithType('_root', 'bird'), ['str', 'Goldfinch']) assert.deepEqual(doc4.getAll('_root', 'bird'), [['str', 'Greenfinch', '1@aaaa'], ['str', 'Goldfinch', '1@bbbb']]) assert.deepEqual(doc3.popPatches(), [ {action: 'put', obj: '_root', key: 'bird', value: 'Greenfinch', datatype: 'str', conflict: false}, @@ -689,15 +691,15 @@ describe('Automerge', () => { doc1.loadIncremental(change2); doc1.loadIncremental(change3) doc2.loadIncremental(change3); doc2.loadIncremental(change1) doc3.loadIncremental(change1); doc3.loadIncremental(change2) - assert.deepEqual(doc1.get('_root', 'bird'), ['str', 'Goldfinch']) + assert.deepEqual(doc1.getWithType('_root', 'bird'), ['str', 'Goldfinch']) assert.deepEqual(doc1.getAll('_root', 'bird'), [ ['str', 'Greenfinch', '1@aaaa'], ['str', 'Chaffinch', '1@bbbb'], ['str', 'Goldfinch', '1@cccc'] ]) - assert.deepEqual(doc2.get('_root', 'bird'), ['str', 'Goldfinch']) + assert.deepEqual(doc2.getWithType('_root', 'bird'), ['str', 'Goldfinch']) assert.deepEqual(doc2.getAll('_root', 'bird'), [ ['str', 'Greenfinch', '1@aaaa'], ['str', 'Chaffinch', '1@bbbb'], ['str', 'Goldfinch', '1@cccc'] ]) - assert.deepEqual(doc3.get('_root', 'bird'), ['str', 'Goldfinch']) + assert.deepEqual(doc3.getWithType('_root', 'bird'), ['str', 'Goldfinch']) assert.deepEqual(doc3.getAll('_root', 'bird'), [ ['str', 'Greenfinch', '1@aaaa'], ['str', 'Chaffinch', '1@bbbb'], ['str', 'Goldfinch', '1@cccc'] ]) @@ -746,9 +748,9 @@ describe('Automerge', () => { doc2.enablePatches(true) doc1.loadIncremental(change2) doc2.loadIncremental(change1) - assert.deepEqual(doc1.get('_root', 'bird'), ['str', 'Goldfinch']) + assert.deepEqual(doc1.getWithType('_root', 'bird'), ['str', 'Goldfinch']) assert.deepEqual(doc1.getAll('_root', 'bird'), [['str', 'Goldfinch', '2@aaaa']]) - assert.deepEqual(doc2.get('_root', 'bird'), ['str', 'Goldfinch']) + assert.deepEqual(doc2.getWithType('_root', 'bird'), ['str', 'Goldfinch']) assert.deepEqual(doc2.getAll('_root', 'bird'), [['str', 'Goldfinch', '2@aaaa']]) assert.deepEqual(doc1.popPatches(), [ {action: 'put', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: false} @@ -773,9 +775,9 @@ describe('Automerge', () => { doc4.enablePatches(true) doc3.loadIncremental(change2); doc3.loadIncremental(change3) doc4.loadIncremental(change3); doc4.loadIncremental(change2) - assert.deepEqual(doc3.get('1@aaaa', 0), ['str', 'Redwing']) + assert.deepEqual(doc3.getWithType('1@aaaa', 0), ['str', 'Redwing']) assert.deepEqual(doc3.getAll('1@aaaa', 0), [['str', 'Song Thrush', '4@aaaa'], ['str', 'Redwing', '4@bbbb']]) - assert.deepEqual(doc4.get('1@aaaa', 0), ['str', 'Redwing']) + assert.deepEqual(doc4.getWithType('1@aaaa', 0), ['str', 'Redwing']) assert.deepEqual(doc4.getAll('1@aaaa', 0), [['str', 'Song Thrush', '4@aaaa'], ['str', 'Redwing', '4@bbbb']]) assert.deepEqual(doc3.popPatches(), [ {action: 'put', obj: '1@aaaa', key: 0, value: 'Song Thrush', datatype: 'str', conflict: false}, @@ -839,7 +841,7 @@ describe('Automerge', () => { {action: 'put', obj: '_root', key: 'bird', value: 'Wren', datatype: 'str', conflict: true} ]) doc3.loadIncremental(change3) - assert.deepEqual(doc3.get('_root', 'bird'), ['str', 'Robin']) + assert.deepEqual(doc3.getWithType('_root', 'bird'), ['str', 'Robin']) assert.deepEqual(doc3.getAll('_root', 'bird'), [['str', 'Robin', '1@aaaa']]) assert.deepEqual(doc3.popPatches(), [ {action: 'put', obj: '_root', key: 'bird', value: 'Robin', datatype: 'str', conflict: false} @@ -875,7 +877,7 @@ describe('Automerge', () => { doc1.put('_root', 'createdAt', now.getTime(), 'timestamp') doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) - assert.deepEqual(doc2.get('_root', 'createdAt'), ['timestamp', now]) + assert.deepEqual(doc2.getWithType('_root', 'createdAt'), ['timestamp', now]) assert.deepEqual(doc2.popPatches(), [ {action: 'put', obj: '_root', key: 'createdAt', value: now, datatype: 'timestamp', conflict: false} ]) @@ -995,7 +997,7 @@ describe('Automerge', () => { doc2.loadIncremental(doc1.saveIncremental()) doc1.increment('_root', 'starlings', 1) doc2.loadIncremental(doc1.saveIncremental()) - assert.deepEqual(doc2.get('_root', 'starlings'), ['counter', 3]) + assert.deepEqual(doc2.getWithType('_root', 'starlings'), ['counter', 3]) assert.deepEqual(doc2.popPatches(), [ {action: 'put', obj: '_root', key: 'starlings', value: 2, datatype: 'counter', conflict: false}, {action: 'increment', obj: '_root', key: 'starlings', value: 1} diff --git a/automerge-wasm/types/automerge-types-0.1.1.tgz b/automerge-wasm/types/automerge-types-0.1.1.tgz deleted file mode 100644 index cc3415bdd139e56e4db44456ca0d058bcc3db3dd..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 2566 zcmV+h3i5j`TKw5xv=Ii zbZs6;-e~khxQX1=I);j^ zbEgfrx$DTVDa zCvM3! zTLg|v7Rhd-F~=pg5I0=eqIWkCF{-LnpbZF%T}_wdnxXCso;nQWqI+4%oQTBZ(FtM~ zeIJTQ$*Y@ri1G&}%*N;Q_r}x&YX+0)_?^`^`_MFIIB&M$y)_?-x|GzH%K=cV+dOq$jEd)1`?REvdkGV zyD+D{0ZxrK*3g=NXv4WR9}&-UWNN^~n9i-<<wo8E=cHc$kDot#{!ssa#`8x5plM_2z#0*;SVXefg)0OicpOnh81Nw4z$-zMf4IRl zRt>2-fIQYkh1_5x|#tNMIK^+67@QcL)kq#T44!K7No{ePEm}WMDi%chUCb{uA>CLWzyF? zBOVIrqRkuPs=0M{bJ7GiSfx`7I)}%H#~PZJeS%^U(lFJm6rN1GYqf^c@?pzhZs73G zht6Rv3zupKb}Nf&flMNJv+=d@W6pme;V1Y1$4{RgZ}tB)dN}_-<7x2E)V!%<2!@S^ z0e%7MYJ=5jWo^9pWcBx~_0}xF!}<$cxP*?LU^173WWXb7wF^N17C%b6-F1h8ez-p z{?0R=f56o>-*h$WkKJvQugFeudR1R~jVo$UclDg!C3Q7-lG#p{tK^ZLB1zf&(wgpSwiSTrkEyS*m7cu6CxI`|_;I>Hc z@)&J1+Qd{mO8bzl&^{@2&&xnQBep6;GZ5a;;yNtmpSNLb;qe^U*zK+NSQ{*7htEb|Y7*L}PtV9VEajFQjmOC{gK4{|<7HM2k-VOs&zr4Ua% z5-D#4pkSIS`8ZX)2#_)*5Q#gKMOkCbEd zu-FAX&%CZ@+Q)iKDyKB0nI0;;P8(Z!bHHv?mLV4~855|ha~+wu@PzQY<(=CyliLC> z@YNJd+}6aS#46z2DNWZXx0`El^$|g~%JWF06F$Or%gXBkiD6V!y1KRyU&(*#rg2C! ztiJI$%phvl(2wQ0ueI)$lQ)JTt~flD6^-S(tI|Ks1g(w3Z6(KHp&52_8DB-S_|(TU z-&tFHx<%m$=Cnv)B_QD*9C$zhq~oWh!>wD5s)H>0wuv3SgE*(EaM>ny16Q4`SxiPB z=*U--eOi`IR4b3s?bDXBN@x4Dt}v6U3eCYTws&czZIJYD$&Q2WuFh+R9-6$qkKb^K zn%^n>pYjfR*WXEBqN&Qy7}%H-B^VP+ot|%pEc}Hah5Lmi+%FK}&d`JZEZE?$hZo#8 zpx~Y%1osXaxObR*tp`f}%L0YV(NS8`K=GkWoy4tD5x~qXLY-OLmBiXEC;BY0qUamp zI;Wb?Sp7QxEpBO-bCBZbaG(rdAyl{Xye8gD6U}XhGP`;1&?ijfx}z zYq5tc$J0R;@i>VB2@E7Ehh3^-3QxdQpG)*79+p?-6kmNwxpjj;HA*YKUq{u+ou!=+ z$}Vnsx>q_atD!#QC

a)UlaV-|PzUTA1N^6?rULeN&viCehtnwIs5_piY&wL#Jt@ z{vF6Fee+-yukS?8em9^iLVn*SinF~W^^1A7b8*B1soOBA`X=9Z<17IeJ)Y`W&q`53 zO{h|Ua!pInCYMUeK`Qt(bv2b7%c0D#hU(n#fId@>L)ofW6ba8;F#Ea;)vAWnMRKX~ zHr;?Gdfw8rywfY6O%Y~D2vtMc4$V%|wS&G8QoW>ajGnNWhX#v?5+yCSwh912t?WoK zZySM_l@WGSh^kc*b2*3`Y_kI%--_s(e86q)-c)K@y>hgRTSK+dWwx(49xFt=i%GSZ z=4VMOR{UC>mXNHKp&ejh=xua?m#wW^Q$>w9L!)=RN2EgKdUH|gLR5PJ^cfwmeuI)KrwXnv^5})_72GE`Due44D@VyM>-#Wj z_xEiyNg7}P@j`{VNmYY(?^wO7+h^PkadeiMBLZ);(^Z8ws5eWz+I0<7+KPnlO8yG3 c0IyFUzE3|q56{E%@O Date: Wed, 1 Jun 2022 08:08:01 -0400 Subject: [PATCH 429/730] fmt / tests --- automerge-wasm/src/lib.rs | 8 ++------ automerge-wasm/test/test.ts | 17 ++++++++++++++--- 2 files changed, 16 insertions(+), 9 deletions(-) diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index b7220d3b..9111a4de 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -367,12 +367,8 @@ impl Automerge { self.doc.get(&obj, prop)? }; match value { - Some((Value::Object(_), obj_id)) => { - Ok(obj_id.to_string().into()) - } - Some((Value::Scalar(value), _)) => { - Ok(ScalarValue(value).into()) - } + Some((Value::Object(_), obj_id)) => Ok(obj_id.to_string().into()), + Some((Value::Scalar(value), _)) => Ok(ScalarValue(value).into()), None => Ok(JsValue::undefined()), } } else { diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts index 4129480c..1a29b962 100644 --- a/automerge-wasm/test/test.ts +++ b/automerge-wasm/test/test.ts @@ -1498,6 +1498,7 @@ describe('Automerge', () => { // Apply n3's latest change to n2. If running in Node, turn the Uint8Array into a Buffer, to // simulate transmission over a network (see https://github.com/automerge/automerge/pull/362) let change = n3.getLastLocalChange() + if (change === null) throw new RangeError("no local change") //@ts-ignore if (typeof Buffer === 'function') change = Buffer.from(change) if (change === undefined) { throw new RangeError("last local change failed") } @@ -1512,8 +1513,12 @@ describe('Automerge', () => { it('should handle histories with lots of branching and merging', () => { const n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('fedcba98') n1.put("_root","x",0); n1.commit("",0) - n2.applyChanges([n1.getLastLocalChange()]) - n3.applyChanges([n1.getLastLocalChange()]) + let change1 = n1.getLastLocalChange() + if (change1 === null) throw new RangeError("no local change") + n2.applyChanges([change1]) + let change2 = n1.getLastLocalChange() + if (change2 === null) throw new RangeError("no local change") + n3.applyChanges([change2]) n3.put("_root","x",1); n3.commit("",0) // - n1c1 <------ n1c2 <------ n1c3 <-- etc. <-- n1c20 <------ n1c21 @@ -1526,7 +1531,9 @@ describe('Automerge', () => { n1.put("_root","n1",i); n1.commit("",0) n2.put("_root","n2",i); n2.commit("",0) const change1 = n1.getLastLocalChange() + if (change1 === null) throw new RangeError("no local change") const change2 = n2.getLastLocalChange() + if (change2 === null) throw new RangeError("no local change") n1.applyChanges([change2]) n2.applyChanges([change1]) } @@ -1535,7 +1542,9 @@ describe('Automerge', () => { sync(n1, n2, s1, s2) // Having n3's last change concurrent to the last sync heads forces us into the slower code path - n2.applyChanges([n3.getLastLocalChange()]) + const change3 = n2.getLastLocalChange() + if (change3 === null) throw new RangeError("no local change") + n2.applyChanges([change3]) n1.put("_root","n1","final"); n1.commit("",0) n2.put("_root","n2","final"); n2.commit("",0) @@ -1970,8 +1979,10 @@ describe('Automerge', () => { // n2 and n3 apply {c5, c6, c7, c8} n3.put("_root","x",5); n3.commit("",0) const change5 = n3.getLastLocalChange() + if (change5 === null) throw new RangeError("no local change") n3.put("_root","x",6); n3.commit("",0) const change6 = n3.getLastLocalChange(), c6 = n3.getHeads()[0] + if (change6 === null) throw new RangeError("no local change") for (let i = 7; i <= 8; i++) { n3.put("_root","x",i); n3.commit("",0) } From 27dfa4ca2793d3d162b66cc3e9a247b9fc1fdcff Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Wed, 1 Jun 2022 16:31:18 -0400 Subject: [PATCH 430/730] missed some bugs related to the wasm api change --- automerge-js/package.json | 7 ++++--- automerge-js/src/proxies.ts | 4 ++-- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/automerge-js/package.json b/automerge-js/package.json index 80c9deca..deebded8 100644 --- a/automerge-js/package.json +++ b/automerge-js/package.json @@ -4,7 +4,7 @@ "Orion Henry ", "Martin Kleppmann" ], - "version": "0.1.1", + "version": "0.1.2", "description": "Reimplementation of `automerge` on top of the automerge-wasm backend", "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-js", "repository": "github:automerge/automerge-rs", @@ -47,11 +47,12 @@ "@types/uuid": "^8.3.4", "@typescript-eslint/eslint-plugin": "^5.25.0", "@typescript-eslint/parser": "^5.25.0", + "automerge-wasm": "^0.1.5", "eslint": "^8.15.0", - "mocha": "^10.0.0", - "ts-mocha": "^10.0.0", "fast-sha256": "^1.3.0", + "mocha": "^10.0.0", "pako": "^2.0.4", + "ts-mocha": "^10.0.0", "typescript": "^4.6.4" }, "dependencies": { diff --git a/automerge-js/src/proxies.ts b/automerge-js/src/proxies.ts index 50542716..e3dd015f 100644 --- a/automerge-js/src/proxies.ts +++ b/automerge-js/src/proxies.ts @@ -21,8 +21,8 @@ function parseListIndex(key) { function valueAt(target, prop: Prop) : AutomergeValue | undefined { const { context, objectId, path, readonly, heads} = target - const value = context.get(objectId, prop, heads) - if (value === undefined) { + const value = context.getWithType(objectId, prop, heads) + if (value === null) { return } const datatype = value[0] From ca383f03e43e02d351749c5f116581bfa413fd88 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Wed, 1 Jun 2022 23:10:23 -0700 Subject: [PATCH 431/730] Wrapped all newly-allocated values in an `AMresult` struct. Removed `AMfree()`. Renamed `AMresultFree()` to `AMfree()`. Removed type names from brief descriptions. --- automerge-c/cbindgen.toml | 2 +- automerge-c/examples/quickstart.c | 75 ++-- automerge-c/src/byte_span.rs | 2 +- automerge-c/src/change.rs | 12 +- automerge-c/src/change_hashes.rs | 27 +- automerge-c/src/changes.rs | 18 +- automerge-c/src/doc.rs | 125 +++---- automerge-c/src/doc/list.rs | 26 +- automerge-c/src/doc/map.rs | 26 +- automerge-c/src/result.rs | 50 ++- automerge-c/src/sync/haves.rs | 50 ++- automerge-c/src/sync/message.rs | 8 +- automerge-c/src/sync/state.rs | 41 +- automerge-c/test/amdoc_property_tests.c | 8 +- automerge-c/test/amlistput_tests.c | 18 +- automerge-c/test/ammapput_tests.c | 18 +- automerge-c/test/group_state.c | 5 +- automerge-c/test/group_state.h | 1 + automerge-c/test/macro_utils.h | 6 +- automerge-c/test/sync_tests.c | 477 ++++++++++++------------ 20 files changed, 516 insertions(+), 479 deletions(-) diff --git a/automerge-c/cbindgen.toml b/automerge-c/cbindgen.toml index e1a7b6aa..20b7a41b 100644 --- a/automerge-c/cbindgen.toml +++ b/automerge-c/cbindgen.toml @@ -7,7 +7,7 @@ after_includes = """\n /** * \\memberof AMdoc * \\def AM_ROOT - * \\brief The root object of an `AMdoc` struct. + * \\brief The root object of a document. */ #define AM_ROOT NULL """ diff --git a/automerge-c/examples/quickstart.c b/automerge-c/examples/quickstart.c index 66edd378..b25b6172 100644 --- a/automerge-c/examples/quickstart.c +++ b/automerge-c/examples/quickstart.c @@ -9,7 +9,8 @@ AMvalue test(AMresult*, AMvalueVariant const); * Based on https://automerge.github.io/docs/quickstart */ int main(int argc, char** argv) { - AMdoc* const doc1 = AMcreate(); + AMresult* const doc1_result = AMcreate(); + AMdoc* const doc1 = AMresultValue(doc1_result, 0).doc; if (doc1 == NULL) { fprintf(stderr, "`AMcreate()` failure."); exit(EXIT_FAILURE); @@ -22,69 +23,71 @@ int main(int argc, char** argv) { AMobjId const* const card1 = value.obj_id; AMresult* result = AMmapPutStr(doc1, card1, "title", "Rewrite everything in Clojure"); test(result, AM_VALUE_VOID); - AMresultFree(result); + AMfree(result); result = AMmapPutBool(doc1, card1, "done", false); test(result, AM_VALUE_VOID); - AMresultFree(result); + AMfree(result); AMresult* const card2_result = AMlistPutObject(doc1, cards, 0, true, AM_OBJ_TYPE_MAP); value = test(card2_result, AM_VALUE_OBJ_ID); AMobjId const* const card2 = value.obj_id; result = AMmapPutStr(doc1, card2, "title", "Rewrite everything in Haskell"); test(result, AM_VALUE_VOID); - AMresultFree(result); + AMfree(result); result = AMmapPutBool(doc1, card2, "done", false); test(result, AM_VALUE_VOID); - AMresultFree(result); - AMresultFree(card2_result); + AMfree(result); + AMfree(card2_result); result = AMcommit(doc1, "Add card", NULL); test(result, AM_VALUE_CHANGE_HASHES); - AMresultFree(result); + AMfree(result); - AMdoc* doc2 = AMcreate(); + AMresult* doc2_result = AMcreate(); + AMdoc* doc2 = AMresultValue(doc2_result, 0).doc; if (doc2 == NULL) { fprintf(stderr, "`AMcreate()` failure."); - AMresultFree(card1_result); - AMresultFree(cards_result); - AMfree(doc1); + AMfree(card1_result); + AMfree(cards_result); + AMfree(doc1_result); exit(EXIT_FAILURE); } result = AMmerge(doc2, doc1); test(result, AM_VALUE_CHANGE_HASHES); - AMresultFree(result); - AMfree(doc2); + AMfree(result); + AMfree(doc2_result); AMresult* const save_result = AMsave(doc1); value = test(save_result, AM_VALUE_BYTES); AMbyteSpan binary = value.bytes; - doc2 = AMload(binary.src, binary.count); - AMresultFree(save_result); + doc2_result = AMload(binary.src, binary.count); + doc2 = AMresultValue(doc2_result, 0).doc; + AMfree(save_result); if (doc2 == NULL) { fprintf(stderr, "`AMload()` failure."); - AMresultFree(card1_result); - AMresultFree(cards_result); - AMfree(doc1); + AMfree(card1_result); + AMfree(cards_result); + AMfree(doc1_result); exit(EXIT_FAILURE); } result = AMmapPutBool(doc1, card1, "done", true); test(result, AM_VALUE_VOID); - AMresultFree(result); + AMfree(result); result = AMcommit(doc1, "Mark card as done", NULL); test(result, AM_VALUE_CHANGE_HASHES); - AMresultFree(result); - AMresultFree(card1_result); + AMfree(result); + AMfree(card1_result); result = AMlistDelete(doc2, cards, 0); test(result, AM_VALUE_VOID); - AMresultFree(result); + AMfree(result); result = AMcommit(doc2, "Delete card", NULL); test(result, AM_VALUE_CHANGE_HASHES); - AMresultFree(result); + AMfree(result); result = AMmerge(doc1, doc2); test(result, AM_VALUE_CHANGE_HASHES); - AMresultFree(result); - AMfree(doc2); + AMfree(result); + AMfree(doc2_result); result = AMgetChanges(doc1, NULL); value = test(result, AM_VALUE_CHANGES); @@ -93,22 +96,22 @@ int main(int argc, char** argv) { size_t const size = AMobjSizeAt(doc1, cards, change); printf("%s %ld\n", AMchangeMessage(change), size); } - AMresultFree(result); - AMresultFree(cards_result); - AMfree(doc1); + AMfree(result); + AMfree(cards_result); + AMfree(doc1_result); } /** - * \brief Extracts an `AMvalue` struct with discriminant \p value_tag - * from \p result or writes a message to `stderr`, frees \p result - * and terminates the program. + * \brief Extracts a value with the given discriminant from the given result + * or writes a message to `stderr`, frees the given result and + * terminates the program. * .* \param[in] result A pointer to an `AMresult` struct. - * \param[in] value_tag An `AMvalue` struct discriminant. + * \param[in] discriminant An `AMvalueVariant` enum tag. * \return An `AMvalue` struct. * \pre \p result must be a valid address. */ -AMvalue test(AMresult* result, AMvalueVariant const value_tag) { +AMvalue test(AMresult* result, AMvalueVariant const discriminant) { static char prelude[64]; if (result == NULL) { @@ -123,11 +126,11 @@ AMvalue test(AMresult* result, AMvalueVariant const value_tag) { default: sprintf(prelude, "Unknown `AMstatus` tag %d", status); } fprintf(stderr, "%s; %s.", prelude, AMerrorMessage(result)); - AMresultFree(result); + AMfree(result); exit(EXIT_FAILURE); } AMvalue const value = AMresultValue(result, 0); - if (value.tag != value_tag) { + if (value.tag != discriminant) { char const* label = NULL; switch (value.tag) { case AM_VALUE_ACTOR_ID: label = "AM_VALUE_ACTOR_ID"; break; @@ -147,7 +150,7 @@ AMvalue test(AMresult* result, AMvalueVariant const value_tag) { default: label = ""; } fprintf(stderr, "Unexpected `AMvalueVariant` tag `%s` (%d).", label, value.tag); - AMresultFree(result); + AMfree(result); exit(EXIT_FAILURE); } return value; diff --git a/automerge-c/src/byte_span.rs b/automerge-c/src/byte_span.rs index d3543604..4ed7198a 100644 --- a/automerge-c/src/byte_span.rs +++ b/automerge-c/src/byte_span.rs @@ -6,7 +6,7 @@ use automerge as am; #[repr(C)] pub struct AMbyteSpan { /// A pointer to an array of bytes. - /// \warning \p src is only valid until the `AMresultFree()` function is + /// \warning \p src is only valid until the `AMfree()` function is /// called on the `AMresult` struct hosting the array of bytes to /// which it points. src: *const u8, diff --git a/automerge-c/src/change.rs b/automerge-c/src/change.rs index b7b0e03d..2ebd7469 100644 --- a/automerge-c/src/change.rs +++ b/automerge-c/src/change.rs @@ -61,7 +61,7 @@ pub unsafe extern "C" fn AMchangeActorId(change: *const AMchange) -> AMbyteSpan } /// \memberof AMchange -/// \brief Compresses the raw bytes within an `AMchange` struct. +/// \brief Compresses the raw bytes of a change. /// /// \param[in] change A pointer to an `AMchange` struct. /// \pre \p change must be a valid address. @@ -114,7 +114,7 @@ pub unsafe extern "C" fn AMchangeExtraBytes(change: *const AMchange) -> AMbyteSp } /// \memberof AMchange -/// \brief Loads a change as bytes into an `AMchange` struct. +/// \brief Loads a sequence of bytes into a change. /// /// \param[in] src A pointer to an array of bytes. /// \param[in] count The number of bytes in \p src to load. @@ -122,7 +122,7 @@ pub unsafe extern "C" fn AMchangeExtraBytes(change: *const AMchange) -> AMbyteSp /// \pre \p src must be a valid address. /// \pre `0 <=` \p count `<=` length of \p src. /// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMresultFree()`. +/// deallocated with `AMfree()`. /// \internal /// /// # Safety @@ -156,7 +156,7 @@ pub unsafe extern "C" fn AMchangeHash(change: *const AMchange) -> AMbyteSpan { } /// \memberof AMchange -/// \brief Gets the emptiness of a change. +/// \brief Tests the emptiness of a change. /// /// \param[in] change A pointer to an `AMchange` struct. /// \return A boolean. @@ -309,7 +309,7 @@ pub unsafe extern "C" fn AMchangeRawBytes(change: *const AMchange) -> AMbyteSpan } /// \memberof AMchange -/// \brief Loads a document into a sequence of `AMchange` structs. +/// \brief Loads a document into a sequence of changes. /// /// \param[in] src A pointer to an array of bytes. /// \param[in] count The number of bytes in \p src to load. @@ -318,7 +318,7 @@ pub unsafe extern "C" fn AMchangeRawBytes(change: *const AMchange) -> AMbyteSpan /// \pre \p src must be a valid address. /// \pre `0 <=` \p count `<=` length of \p src. /// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMresultFree()`. +/// deallocated with `AMfree()`. /// \internal /// /// # Safety diff --git a/automerge-c/src/change_hashes.rs b/automerge-c/src/change_hashes.rs index 926c37eb..d1ea71ff 100644 --- a/automerge-c/src/change_hashes.rs +++ b/automerge-c/src/change_hashes.rs @@ -81,8 +81,8 @@ impl Default for AMchangeHashes { } /// \memberof AMchangeHashes -/// \brief Advances/rewinds an `AMchangeHashes` struct by at most \p |n| -/// positions. +/// \brief Advances/rewinds an iterator over a sequence of change hashes by at +/// most \p |n| positions. /// /// \param[in] change_hashes A pointer to an `AMchangeHashes` struct. /// \param[in] n The direction (\p -n -> backward, \p +n -> forward) and maximum @@ -100,7 +100,8 @@ pub unsafe extern "C" fn AMchangeHashesAdvance(change_hashes: *mut AMchangeHashe } /// \memberof AMchangeHashes -/// \brief Compares two change hash sequences. +/// \brief Compares the sequences of change hashes underlying a pair of +/// iterators. /// /// \param[in] change_hashes1 A pointer to an `AMchangeHashes` struct. /// \param[in] change_hashes2 A pointer to an `AMchangeHashes` struct. @@ -134,15 +135,15 @@ pub unsafe extern "C" fn AMchangeHashesCmp( } /// \memberof AMchangeHashes -/// \brief Gets the `AMbyteSpan` struct at the current position of an -/// `AMchangeHashes`struct and then advances/rewinds it by at most \p |n| -/// positions. +/// \brief Gets the change hash at the current position of an iterator over +/// a sequence of change hashes and then advances/rewinds it by at most +/// \p |n| positions. /// /// \param[in] change_hashes A pointer to an `AMchangeHashes` struct. /// \param[in] n The direction (\p -n -> backward, \p +n -> forward) and maximum /// number of positions to advance/rewind. -/// \return An `AMbyteSpan` struct that's `NULL` when \p change_hashes was -/// previously advanced/rewound past its forward/backward limit. +/// \return An `AMbyteSpan` struct with `.src == NULL` when \p change_hashes +/// was previously advanced/rewound past its forward/backward limit. /// \pre \p change_hashes must be a valid address. /// \internal /// @@ -162,13 +163,14 @@ pub unsafe extern "C" fn AMchangeHashesNext( } /// \memberof AMchangeHashes -/// \brief Advances/rewinds an `AMchangeHashes` struct by at most \p |n| -/// positions and then gets the `AMbyteSpan` struct at its current position. +/// \brief Advances/rewinds an iterator over a sequence of change hashes by at +/// most \p |n| positions and then gets the change hash at its current +/// position. /// /// \param[in] change_hashes A pointer to an `AMchangeHashes` struct. /// \param[in] n The direction (\p -n -> backward, \p +n -> forward) and maximum /// number of positions to advance/rewind. -/// \return An `AMbyteSpan` struct that's `NULL` when \p change_hashes is +/// \return An `AMbyteSpan` struct that's null when \p change_hashes is /// presently advanced/rewound past its forward/backward limit. /// \pre \p change_hashes must be a valid address. /// \internal @@ -189,7 +191,8 @@ pub unsafe extern "C" fn AMchangeHashesPrev( } /// \memberof AMchangeHashes -/// \brief Gets the size of an `AMchangeHashes` struct. +/// \brief Gets the size of the sequence of change hashes underlying an +/// iterator. /// /// \param[in] change_hashes A pointer to an `AMchangeHashes` struct. /// \return The count of values in \p change_hashes. diff --git a/automerge-c/src/changes.rs b/automerge-c/src/changes.rs index 86bdec42..e7b5e9ff 100644 --- a/automerge-c/src/changes.rs +++ b/automerge-c/src/changes.rs @@ -100,8 +100,8 @@ impl Default for AMchanges { } /// \memberof AMchanges -/// \brief Advances/rewinds an `AMchanges` struct by at most \p |n| -/// positions. +/// \brief Advances/rewinds an iterator over a sequence of changes by at most +/// \p |n| positions. /// /// \param[in] changes A pointer to an `AMchanges` struct. /// \param[in] n The direction (\p -n -> backward, \p +n -> forward) and maximum @@ -119,7 +119,8 @@ pub unsafe extern "C" fn AMchangesAdvance(changes: *mut AMchanges, n: isize) { } /// \memberof AMchanges -/// \brief Compares two change sequences for equality. +/// \brief Tests the equality of two sequences of changes underlying a pair +/// of iterators. /// /// \param[in] changes1 A pointer to an `AMchanges` struct. /// \param[in] changes2 A pointer to an `AMchanges` struct. @@ -143,8 +144,8 @@ pub unsafe extern "C" fn AMchangesEqual( } /// \memberof AMchanges -/// \brief Gets a pointer to the `AMchange` struct at the current position of -/// an `AMchanges`struct and then advances/rewinds it by at most \p |n| +/// \brief Gets the change at the current position of an iterator over a +/// sequence of changes and then advances/rewinds it by at most \p |n| /// positions. /// /// \param[in] changes A pointer to an `AMchanges` struct. @@ -168,9 +169,8 @@ pub unsafe extern "C" fn AMchangesNext(changes: *mut AMchanges, n: isize) -> *co } /// \memberof AMchanges -/// \brief Advances/rewinds an `AMchanges` struct by at most \p |n| -/// positions and then gets a pointer to the `AMchange` struct at its -/// current position. +/// \brief Advances/rewinds an iterator over a sequence of changes by at most +/// \p |n| positions and then gets the change at its current position. /// /// \param[in] changes A pointer to an `AMchanges` struct. /// \param[in] n The direction (\p -n -> backward, \p +n -> forward) and maximum @@ -193,7 +193,7 @@ pub unsafe extern "C" fn AMchangesPrev(changes: *mut AMchanges, n: isize) -> *co } /// \memberof AMchanges -/// \brief Gets the size of an `AMchanges` struct. +/// \brief Gets the size of the sequence of changes underlying an iterator. /// /// \param[in] changes A pointer to an `AMchanges` struct. /// \return The count of values in \p changes. diff --git a/automerge-c/src/doc.rs b/automerge-c/src/doc.rs index 828a38b9..c57a6ead 100644 --- a/automerge-c/src/doc.rs +++ b/automerge-c/src/doc.rs @@ -50,6 +50,12 @@ impl AMdoc { } } +impl AsRef for AMdoc { + fn as_ref(&self) -> &am::AutoCommit { + &self.0 + } +} + impl Deref for AMdoc { type Target = am::AutoCommit; @@ -64,23 +70,16 @@ impl DerefMut for AMdoc { } } -impl From for *mut AMdoc { - fn from(b: AMdoc) -> Self { - Box::into_raw(Box::new(b)) - } -} - /// \memberof AMdoc /// \brief Applies a sequence of changes to a document. /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] changes A pointer to an `AMchanges` struct. -/// \ /// \pre \p doc must be a valid address. /// \pre \p changes must be a valid address. /// \return A pointer to an `AMresult` struct containing a void. /// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMresultFree()`. +/// deallocated with `AMfree()`. /// \internal /// /// # Safety @@ -97,19 +96,20 @@ pub unsafe extern "C" fn AMapplyChanges( } /// \memberof AMdoc -/// \brief Allocates a new `AMdoc` struct and initializes it with defaults. +/// \brief Allocates a new document and initializes it with defaults. /// -/// \return A pointer to an `AMdoc` struct. -/// \warning To avoid a memory leak, the returned `AMdoc` struct must be +/// \return A pointer to an `AMresult` struct containing a pointer to an +/// `AMdoc` struct. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be /// deallocated with `AMfree()`. #[no_mangle] -pub extern "C" fn AMcreate() -> *mut AMdoc { - AMdoc::new(am::AutoCommit::new()).into() +pub extern "C" fn AMcreate() -> *mut AMresult { + to_result(am::AutoCommit::new()) } /// \memberof AMdoc -/// \brief Commits the current operations on \p doc with an optional message -/// and/or time override as seconds since the epoch. +/// \brief Commits the current operations on a document with an optional +/// message and/or time override as seconds since the epoch. /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] message A UTF-8 string or `NULL`. @@ -118,7 +118,7 @@ pub extern "C" fn AMcreate() -> *mut AMdoc { /// `AMbyteSpan` struct. /// \pre \p doc must be a valid address. /// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMresultFree()`. +/// deallocated with `AMfree()`. /// \internal /// /// # Safety @@ -141,28 +141,27 @@ pub unsafe extern "C" fn AMcommit( } /// \memberof AMdoc -/// \brief Allocates storage for an `AMdoc` struct and initializes it by -/// duplicating the given `AMdoc` struct. +/// \brief Allocates storage for a document and initializes it by duplicating +/// the given document. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \return A pointer to an `AMdoc` struct. +/// \return A pointer to an `AMresult` struct containing a pointer to an +/// `AMdoc` struct. /// \pre \p doc must be a valid address. -/// \warning To avoid a memory leak, the returned `AMdoc` struct must be +/// \warning To avoid a memory leak, the returned `AMresult` struct must be /// deallocated with `AMfree()`. /// \internal /// /// # Safety /// doc must be a pointer to a valid AMdoc #[no_mangle] -pub unsafe extern "C" fn AMdup(doc: *mut AMdoc) -> *mut AMdoc { - match doc.as_mut() { - Some(doc) => doc.clone().into(), - None => std::ptr::null_mut::(), - } +pub unsafe extern "C" fn AMdup(doc: *mut AMdoc) -> *mut AMresult { + let doc = to_doc!(doc); + to_result(doc.as_ref().clone()) } /// \memberof AMdoc -/// \brief Compares two documents for equality after closing their respective +/// \brief Tests the equality of two documents after closing their respective /// transactions. /// /// \param[in] doc1 An `AMdoc` struct. @@ -184,26 +183,8 @@ pub unsafe extern "C" fn AMequal(doc1: *mut AMdoc, doc2: *mut AMdoc) -> bool { } /// \memberof AMdoc -/// \brief Deallocates the storage for an `AMdoc` struct previously -/// allocated by `AMcreate()`, `AMdup()` or `AMload()`. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \pre \p doc must be a valid address. -/// \internal -/// -/// # Safety -/// doc must be a pointer to a valid AMdoc -#[no_mangle] -pub unsafe extern "C" fn AMfree(doc: *mut AMdoc) { - if !doc.is_null() { - let doc: AMdoc = *Box::from_raw(doc); - drop(doc) - } -} - -/// \memberof AMdoc -/// \brief Generates a synchronization message for a peer based upon the -/// synchronization state \p sync_state. +/// \brief Generates a synchronization message for a peer based upon the given +/// synchronization state. /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] sync_state A pointer to an `AMsyncState` struct. @@ -212,7 +193,7 @@ pub unsafe extern "C" fn AMfree(doc: *mut AMdoc) { /// \pre \p doc must b e a valid address. /// \pre \p sync_state must be a valid address. /// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMresultFree()`. +/// deallocated with `AMfree()`. /// \internal /// /// # Safety @@ -236,7 +217,7 @@ pub unsafe extern "C" fn AMgenerateSyncMessage( /// `AMbyteSpan` struct. /// \pre \p doc must be a valid address. /// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMresultFree()`. +/// deallocated with `AMfree()`. /// \internal /// /// # Safety @@ -254,7 +235,7 @@ pub unsafe extern "C" fn AMgetActor(doc: *mut AMdoc) -> *mut AMresult { /// \return A pointer to an `AMresult` struct containing a `char const*`. /// \pre \p doc must be a valid address. /// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMresultFree()`. +/// deallocated with `AMfree()`. /// \internal /// /// # Safety @@ -275,7 +256,7 @@ pub unsafe extern "C" fn AMgetActorHex(doc: *mut AMdoc) -> *mut AMresult { /// \return A pointer to an `AMresult` struct containing an `AMchanges` struct. /// \pre \p doc must be a valid address. /// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMresultFree()`. +/// deallocated with `AMfree()`. /// \internal /// /// # Safety @@ -302,7 +283,7 @@ pub unsafe extern "C" fn AMgetChanges( /// struct. /// \pre \p doc must be a valid address. /// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMresultFree()`. +/// deallocated with `AMfree()`. /// \internal /// /// # Safety @@ -323,7 +304,7 @@ pub unsafe extern "C" fn AMgetHeads(doc: *mut AMdoc) -> *mut AMresult { /// struct. /// \pre \p doc must be a valid address. /// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMresultFree()`. +/// deallocated with `AMfree()`. /// \internal /// /// # Safety @@ -350,7 +331,7 @@ pub unsafe extern "C" fn AMgetMissingDeps( /// struct or a void. /// \pre \p doc must be a valid address. /// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMresultFree()`. +/// deallocated with `AMfree()`. /// \internal /// /// # Safety @@ -362,34 +343,30 @@ pub unsafe extern "C" fn AMgetLastLocalChange(doc: *mut AMdoc) -> *mut AMresult } /// \memberof AMdoc -/// \brief Allocates storage for an `AMdoc` struct and initializes it with the -/// compact form of an incremental save pointed to by \p src. +/// \brief Allocates storage for a document and initializes it with the compact +/// form of an incremental save. /// /// \param[in] src A pointer to an array of bytes. /// \param[in] count The number of bytes in \p src to load. -/// \return A pointer to an `AMdoc` struct. +/// \return A pointer to an `AMresult` struct containing a pointer to an +/// `AMdoc` struct. /// \pre \p src must be a valid address. /// \pre `0 <=` \p count `<=` length of \p src. -/// \warning To avoid a memory leak, the returned `AMdoc` struct must be +/// \warning To avoid a memory leak, the returned `AMresult` struct must be /// deallocated with `AMfree()`. /// \internal /// /// # Safety /// src must be a byte array of length `>= count` #[no_mangle] -pub unsafe extern "C" fn AMload(src: *const u8, count: usize) -> *mut AMdoc { +pub unsafe extern "C" fn AMload(src: *const u8, count: usize) -> *mut AMresult { let mut data = Vec::new(); data.extend_from_slice(std::slice::from_raw_parts(src, count)); - if let Ok(auto_commit) = am::AutoCommit::load(&data) { - AMdoc::new(auto_commit).into() - } else { - std::ptr::null_mut::() - } + to_result(am::AutoCommit::load(&data)) } /// \memberof AMdoc -/// \brief Loads the compact form of an incremental save pointed to by \p src -/// into \p doc. +/// \brief Loads the compact form of an incremental save into a document. /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] src A pointer to an array of bytes. @@ -400,7 +377,7 @@ pub unsafe extern "C" fn AMload(src: *const u8, count: usize) -> *mut AMdoc { /// \pre \p src must be a valid address. /// \pre `0 <=` \p count `<=` length of \p src. /// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMresultFree()`. +/// deallocated with `AMfree()`. /// \internal /// /// # Safety @@ -428,7 +405,7 @@ pub unsafe extern "C" fn AMloadIncremental( /// \pre \p dest must be a valid address. /// \pre \p src must be a valid address. /// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMresultFree()`. +/// deallocated with `AMfree()`. /// \internal /// /// # Safety @@ -522,14 +499,14 @@ pub unsafe extern "C" fn AMreceiveSyncMessage( } /// \memberof AMdoc -/// \brief Saves the entirety of \p doc into a compact form. +/// \brief Saves the entirety of a document into a compact form. /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \return A pointer to an `AMresult` struct containing an array of bytes as /// an `AMbyteSpan` struct. /// \pre \p doc must be a valid address. /// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMresultFree()`. +/// deallocated with `AMfree()`. /// \internal /// /// # Safety @@ -541,17 +518,17 @@ pub unsafe extern "C" fn AMsave(doc: *mut AMdoc) -> *mut AMresult { } /// \memberof AMdoc -/// \brief Puts an array of bytes as the actor ID value of an `AMdoc` struct. . +/// \brief Puts a sequence of bytes as the actor ID value of a document. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] value A pointer to an array of bytes. +/// \param[in] value A pointer to a contiguous sequence of bytes. /// \param[in] count The number of bytes to copy from \p value. /// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc must be a valid address. /// \pre \p value must be a valid address. /// \pre `0 <=` \p count `<=` length of \p value. /// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMresultFree()`. +/// deallocated with `AMfree()`. /// \internal /// /// # Safety @@ -570,7 +547,7 @@ pub unsafe extern "C" fn AMsetActor( } /// \memberof AMdoc -/// \brief Puts a hexadecimal string as the actor ID value of an `AMdoc` struct. +/// \brief Puts a hexadecimal string as the actor ID value of a document. /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] hex_str A string of hexadecimal characters. @@ -578,7 +555,7 @@ pub unsafe extern "C" fn AMsetActor( /// \pre \p doc must be a valid address. /// \pre \p hex_str must be a valid address. /// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMresultFree()`. +/// deallocated with `AMfree()`. /// \internal /// /// # Safety diff --git a/automerge-c/src/doc/list.rs b/automerge-c/src/doc/list.rs index 8f1718c4..41265eb4 100644 --- a/automerge-c/src/doc/list.rs +++ b/automerge-c/src/doc/list.rs @@ -16,7 +16,7 @@ use crate::result::{to_result, AMresult}; /// \pre \p doc must be a valid address. /// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. /// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMresultFree()`. +/// deallocated with `AMfree()`. /// \internal /// /// # Safety @@ -42,7 +42,7 @@ pub unsafe extern "C" fn AMlistDelete( /// \pre \p doc must be a valid address. /// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. /// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMresultFree()`. +/// deallocated with `AMfree()`. /// \internal /// /// # Safety @@ -70,7 +70,7 @@ pub unsafe extern "C" fn AMlistGet( /// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc must be a valid address. /// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMresultFree()`. +/// deallocated with `AMfree()`. /// \internal /// /// # Safety @@ -95,7 +95,7 @@ pub unsafe extern "C" fn AMlistPutBool( } /// \memberof AMdoc -/// \brief Puts an array of bytes as the value at an index in a list object. +/// \brief Puts a sequence of bytes as the value at an index in a list object. /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. @@ -110,7 +110,7 @@ pub unsafe extern "C" fn AMlistPutBool( /// \pre \p value must be a valid address. /// \pre `0 <=` \p count `<=` length of \p value. /// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMresultFree()`. +/// deallocated with `AMfree()`. /// \internal /// /// # Safety @@ -150,7 +150,7 @@ pub unsafe extern "C" fn AMlistPutBytes( /// \pre \p doc must be a valid address. /// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. /// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMresultFree()`. +/// deallocated with `AMfree()`. /// \internal /// /// # Safety @@ -187,7 +187,7 @@ pub unsafe extern "C" fn AMlistPutCounter( /// \pre \p doc must be a valid address. /// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. /// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMresultFree()`. +/// deallocated with `AMfree()`. /// \internal /// /// # Safety @@ -223,7 +223,7 @@ pub unsafe extern "C" fn AMlistPutF64( /// \pre \p doc must be a valid address. /// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. /// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMresultFree()`. +/// deallocated with `AMfree()`. /// \internal /// /// # Safety @@ -258,7 +258,7 @@ pub unsafe extern "C" fn AMlistPutInt( /// \pre \p doc must be a valid address. /// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. /// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMresultFree()`. +/// deallocated with `AMfree()`. /// \internal /// /// # Safety @@ -294,7 +294,7 @@ pub unsafe extern "C" fn AMlistPutNull( /// \pre \p doc must be a valid address. /// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. /// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMresultFree()`. +/// deallocated with `AMfree()`. /// \internal /// /// # Safety @@ -332,7 +332,7 @@ pub unsafe extern "C" fn AMlistPutObject( /// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. /// \pre \p value must be a valid address. /// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMresultFree()`. +/// deallocated with `AMfree()`. /// \internal /// /// # Safety @@ -370,7 +370,7 @@ pub unsafe extern "C" fn AMlistPutStr( /// \pre \p doc must be a valid address. /// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. /// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMresultFree()`. +/// deallocated with `AMfree()`. /// \internal /// /// # Safety @@ -407,7 +407,7 @@ pub unsafe extern "C" fn AMlistPutTimestamp( /// \pre \p doc must be a valid address. /// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. /// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMresultFree()`. +/// deallocated with `AMfree()`. /// \internal /// /// # Safety diff --git a/automerge-c/src/doc/map.rs b/automerge-c/src/doc/map.rs index 1469e11a..848f1ef8 100644 --- a/automerge-c/src/doc/map.rs +++ b/automerge-c/src/doc/map.rs @@ -17,7 +17,7 @@ use crate::result::{to_result, AMresult}; /// \pre \p doc must be a valid address. /// \pre \p key must be a valid address. /// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMresultFree()`. +/// deallocated with `AMfree()`. /// \internal /// /// # Safety @@ -44,7 +44,7 @@ pub unsafe extern "C" fn AMmapDelete( /// \pre \p doc must be a valid address. /// \pre \p key must be a valid address. /// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMresultFree()`. +/// deallocated with `AMfree()`. /// \internal /// /// # Safety @@ -72,7 +72,7 @@ pub unsafe extern "C" fn AMmapGet( /// \pre \p doc must be a valid address. /// \pre \p key must be a valid address. /// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMresultFree()`. +/// deallocated with `AMfree()`. /// \internal /// /// # Safety @@ -91,7 +91,7 @@ pub unsafe extern "C" fn AMmapPutBool( } /// \memberof AMdoc -/// \brief Puts an array of bytes as the value of a key in a map object. +/// \brief Puts a sequence of bytes as the value of a key in a map object. /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. @@ -104,7 +104,7 @@ pub unsafe extern "C" fn AMmapPutBool( /// \pre \p value must be a valid address. /// \pre `0 <=` \p count `<=` length of \p value. /// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMresultFree()`. +/// deallocated with `AMfree()`. /// \internal /// /// # Safety @@ -137,7 +137,7 @@ pub unsafe extern "C" fn AMmapPutBytes( /// \pre \p doc must be a valid address. /// \pre \p key must be a valid address. /// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMresultFree()`. +/// deallocated with `AMfree()`. /// \internal /// /// # Safety @@ -169,7 +169,7 @@ pub unsafe extern "C" fn AMmapPutCounter( /// \pre \p doc must be a valid address. /// \pre \p key must be a valid address. /// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMresultFree()`. +/// deallocated with `AMfree()`. /// \internal /// /// # Safety @@ -197,7 +197,7 @@ pub unsafe extern "C" fn AMmapPutNull( /// \pre \p doc must be a valid address. /// \pre \p key must be a valid address. /// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMresultFree()`. +/// deallocated with `AMfree()`. /// \internal /// /// # Safety @@ -226,7 +226,7 @@ pub unsafe extern "C" fn AMmapPutObject( /// \pre \p doc must be a valid address. /// \pre \p key must be a valid address. /// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMresultFree()`. +/// deallocated with `AMfree()`. /// \internal /// /// # Safety @@ -255,7 +255,7 @@ pub unsafe extern "C" fn AMmapPutF64( /// \pre \p doc must be a valid address. /// \pre \p key must be a valid address. /// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMresultFree()`. +/// deallocated with `AMfree()`. /// \internal /// /// # Safety @@ -285,7 +285,7 @@ pub unsafe extern "C" fn AMmapPutInt( /// \pre \p key must be a valid address. /// \pre \p value must be a valid address. /// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMresultFree()`. +/// deallocated with `AMfree()`. /// \internal /// /// # Safety @@ -315,7 +315,7 @@ pub unsafe extern "C" fn AMmapPutStr( /// \pre \p doc must be a valid address. /// \pre \p key must be a valid address. /// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMresultFree()`. +/// deallocated with `AMfree()`. /// \internal /// /// # Safety @@ -348,7 +348,7 @@ pub unsafe extern "C" fn AMmapPutTimestamp( /// \pre \p doc must be a valid address. /// \pre \p key must be a valid address. /// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMresultFree()`. +/// deallocated with `AMfree()`. /// \internal /// /// # Safety diff --git a/automerge-c/src/result.rs b/automerge-c/src/result.rs index 75136566..0624a692 100644 --- a/automerge-c/src/result.rs +++ b/automerge-c/src/result.rs @@ -7,11 +7,12 @@ use crate::byte_span::AMbyteSpan; use crate::change::AMchange; use crate::change_hashes::AMchangeHashes; use crate::changes::AMchanges; +use crate::doc::AMdoc; use crate::obj::AMobjId; use crate::sync::{AMsyncMessage, AMsyncState}; /// \struct AMvalue -/// \brief A discriminated union of value type variants for an `AMresult` struct. +/// \brief A discriminated union of value type variants for a result. /// /// \enum AMvalueVariant /// \brief A value type discriminant. @@ -68,6 +69,8 @@ pub enum AMvalue<'a> { Changes(AMchanges), /// A CRDT counter variant. Counter(i64), + /// A document variant. + Doc(*mut AMdoc), /// A 64-bit float variant. F64(f64), /// A 64-bit signed integer variant. @@ -104,6 +107,7 @@ pub enum AMresult { ActorId(am::ActorId), ChangeHashes(Vec), Changes(Vec, BTreeMap), + Doc(AMdoc), Error(CString), ObjId(AMobjId), Scalars(Vec>, Option), @@ -118,12 +122,24 @@ impl AMresult { } } +impl From for AMresult { + fn from(auto_commit: am::AutoCommit) -> Self { + AMresult::Doc(AMdoc::new(auto_commit)) + } +} + impl From for AMresult { fn from(change_hash: am::ChangeHash) -> Self { AMresult::ChangeHashes(vec![change_hash]) } } +impl From for AMresult { + fn from(state: am::sync::State) -> Self { + AMresult::SyncState(AMsyncState::new(state)) + } +} + impl From> for AMresult { fn from(maybe: Option<&am::Change>) -> Self { match maybe { @@ -159,6 +175,15 @@ impl From> for AMresult { } } +impl From> for AMresult { + fn from(maybe: Result) -> Self { + match maybe { + Ok(auto_commit) => AMresult::Doc(AMdoc::new(auto_commit)), + Err(e) => AMresult::err(&e.to_string()), + } + } +} + impl From> for AMresult { fn from(maybe: Result) -> Self { match maybe { @@ -302,7 +327,7 @@ pub enum AMstatus { } /// \memberof AMresult -/// \brief Gets an `AMresult` struct's error message string. +/// \brief Gets a result's error message string. /// /// \param[in] result A pointer to an `AMresult` struct. /// \return A UTF-8 string value or `NULL`. @@ -320,7 +345,7 @@ pub unsafe extern "C" fn AMerrorMessage(result: *mut AMresult) -> *const c_char } /// \memberof AMresult -/// \brief Deallocates the storage for an `AMresult` struct. +/// \brief Deallocates the storage for a result. /// /// \param[in] result A pointer to an `AMresult` struct. /// \pre \p result must be a valid address. @@ -329,7 +354,7 @@ pub unsafe extern "C" fn AMerrorMessage(result: *mut AMresult) -> *const c_char /// # Safety /// result must be a pointer to a valid AMresult #[no_mangle] -pub unsafe extern "C" fn AMresultFree(result: *mut AMresult) { +pub unsafe extern "C" fn AMfree(result: *mut AMresult) { if !result.is_null() { let result: AMresult = *Box::from_raw(result); drop(result) @@ -337,7 +362,7 @@ pub unsafe extern "C" fn AMresultFree(result: *mut AMresult) { } /// \memberof AMresult -/// \brief Gets the size of an `AMresult` struct. +/// \brief Gets the size of a result's value. /// /// \param[in] result A pointer to an `AMresult` struct. /// \return The count of values in \p result. @@ -350,13 +375,11 @@ pub unsafe extern "C" fn AMresultFree(result: *mut AMresult) { pub unsafe extern "C" fn AMresultSize(result: *mut AMresult) -> usize { if let Some(result) = result.as_mut() { match result { - AMresult::ActorId(_) | AMresult::ObjId(_) => 1, + AMresult::Error(_) | AMresult::Void => 0, + AMresult::ActorId(_) | AMresult::Doc(_) | AMresult::ObjId(_) | AMresult::SyncMessage(_) | AMresult::SyncState(_) => 1, AMresult::ChangeHashes(change_hashes) => change_hashes.len(), AMresult::Changes(changes, _) => changes.len(), - AMresult::Error(_) | AMresult::Void => 0, AMresult::Scalars(vec, _) => vec.len(), - AMresult::SyncMessage(_) => 1, - AMresult::SyncState(_) => 1, } } else { 0 @@ -364,7 +387,7 @@ pub unsafe extern "C" fn AMresultSize(result: *mut AMresult) -> usize { } /// \memberof AMresult -/// \brief Gets the status code of an `AMresult` struct. +/// \brief Gets the status code of a result. /// /// \param[in] result A pointer to an `AMresult` struct. /// \return An `AMstatus` enum tag. @@ -383,7 +406,7 @@ pub unsafe extern "C" fn AMresultStatus(result: *mut AMresult) -> AMstatus { } /// \memberof AMresult -/// \brief Gets a value from an `AMresult` struct. +/// \brief Gets a result's value. /// /// \param[in] result A pointer to an `AMresult` struct. /// \param[in] index The index of a value. @@ -410,6 +433,11 @@ pub unsafe extern "C" fn AMresultValue<'a>(result: *mut AMresult, index: usize) AMresult::Changes(changes, storage) => { value = AMvalue::Changes(AMchanges::new(changes, storage)); } + AMresult::Doc(doc) => { + if index == 0 { + value = AMvalue::Doc(doc) + } + } AMresult::Error(_) => {} AMresult::ObjId(obj_id) => { if index == 0 { diff --git a/automerge-c/src/sync/haves.rs b/automerge-c/src/sync/haves.rs index c929ff8d..5cfef5ec 100644 --- a/automerge-c/src/sync/haves.rs +++ b/automerge-c/src/sync/haves.rs @@ -33,7 +33,7 @@ impl AMsyncHaves { pub fn advance(&mut self, n: isize) { let len = self.len as isize; if n != 0 && self.offset >= -len && self.offset < len { - // It's being advanced and it's hasn't stopped. + // It's being advanced and its hasn't stopped. self.offset = std::cmp::max(-(len + 1), std::cmp::min(self.offset + n, len)); }; } @@ -100,8 +100,8 @@ impl Default for AMsyncHaves { } /// \memberof AMsyncHaves -/// \brief Advances/rewinds an `AMsyncHaves` struct by at most \p |n| -/// positions. +/// \brief Advances/rewinds an iterator over a sequence of synchronization +/// haves by at most \p |n| positions. /// /// \param[in] sync_haves A pointer to an `AMsyncHaves` struct. /// \param[in] n The direction (\p -n -> backward, \p +n -> forward) and maximum @@ -119,15 +119,40 @@ pub unsafe extern "C" fn AMsyncHavesAdvance(sync_haves: *mut AMsyncHaves, n: isi } /// \memberof AMsyncHaves -/// \brief Gets a pointer to the `AMsyncHave` struct at the current position of -/// an `AMsyncHaves`struct and then advances/rewinds it by at most \p |n| -/// positions. +/// \brief Tests the equality of two sequences of synchronization haves +/// underlying a pair of iterators. +/// +/// \param[in] sync_haves1 A pointer to an `AMsyncHaves` struct. +/// \param[in] sync_haves2 A pointer to an `AMsyncHaves` struct. +/// \return `true` if \p sync_haves1 `==` \p sync_haves2 and `false` otherwise. +/// \pre \p sync_haves1 must be a valid address. +/// \pre \p sync_haves2 must be a valid address. +/// \internal +/// +/// #Safety +/// sync_haves1 must be a pointer to a valid AMsyncHaves +/// sync_haves2 must be a pointer to a valid AMsyncHaves +#[no_mangle] +pub unsafe extern "C" fn AMsyncHavesEqual( + sync_haves1: *const AMsyncHaves, + sync_haves2: *const AMsyncHaves, +) -> bool { + match (sync_haves1.as_ref(), sync_haves2.as_ref()) { + (Some(sync_haves1), Some(sync_haves2)) => sync_haves1.as_ref() == sync_haves2.as_ref(), + (None, Some(_)) | (Some(_), None) | (None, None) => false, + } +} + +/// \memberof AMsyncHaves +/// \brief Gets the synchronization have at the current position of an iterator +/// over a sequence of synchronization haves and then advances/rewinds +/// it by at most \p |n| positions. /// /// \param[in] sync_haves A pointer to an `AMsyncHaves` struct. /// \param[in] n The direction (\p -n -> backward, \p +n -> forward) and maximum /// number of positions to advance/rewind. -/// \return A pointer to an `AMsyncHave` struct that's `NULL` when \p sync_haves -/// was previously advanced/rewound past its +/// \return A pointer to an `AMsyncHave` struct that's `NULL` when +/// \p sync_haves was previously advanced/rewound past its /// forward/backward limit. /// \pre \p sync_haves must be a valid address. /// \internal @@ -148,9 +173,9 @@ pub unsafe extern "C" fn AMsyncHavesNext( } /// \memberof AMsyncHaves -/// \brief Advances/rewinds an `AMsyncHaves` struct by at most \p |n| -/// positions and then gets a pointer to the `AMsyncHave` struct at its -/// current position. +/// \brief Advances/rewinds an iterator over a sequence of synchronization +/// haves by at most \p |n| positions and then gets the synchronization +/// have at its current position. /// /// \param[in] sync_haves A pointer to an `AMsyncHaves` struct. /// \param[in] n The direction (\p -n -> backward, \p +n -> forward) and maximum @@ -177,7 +202,8 @@ pub unsafe extern "C" fn AMsyncHavesPrev( } /// \memberof AMsyncHaves -/// \brief Gets the size of an `AMsyncHaves` struct. +/// \brief Gets the size of the sequence of synchronization haves underlying an +/// iterator. /// /// \param[in] sync_haves A pointer to an `AMsyncHaves` struct. /// \return The count of values in \p sync_haves. diff --git a/automerge-c/src/sync/message.rs b/automerge-c/src/sync/message.rs index 70f9e1f1..6481e671 100644 --- a/automerge-c/src/sync/message.rs +++ b/automerge-c/src/sync/message.rs @@ -68,7 +68,7 @@ pub unsafe extern "C" fn AMsyncMessageChanges(sync_message: *const AMsyncMessage } /// \memberof AMsyncMessage -/// \brief Decodes an array of bytes into a synchronization message. +/// \brief Decodes a sequence of bytes into a synchronization message. /// /// \param[in] src A pointer to an array of bytes. /// \param[in] count The number of bytes in \p src to decode. @@ -77,7 +77,7 @@ pub unsafe extern "C" fn AMsyncMessageChanges(sync_message: *const AMsyncMessage /// \pre \p src must be a valid address. /// \pre `0 <=` \p count `<=` length of \p src. /// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMresultFree()`. +/// deallocated with `AMfree()`. /// \internal /// /// # Safety @@ -90,14 +90,14 @@ pub unsafe extern "C" fn AMsyncMessageDecode(src: *const u8, count: usize) -> *m } /// \memberof AMsyncMessage -/// \brief Encodes a synchronization message as an array of bytes. +/// \brief Encodes a synchronization message as a sequence of bytes. /// /// \param[in] sync_message A pointer to an `AMsyncMessage` struct. /// \return A pointer to an `AMresult` struct containing an array of bytes as /// an `AMbyteSpan` struct. /// \pre \p sync_message must be a valid address. /// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMresultFree()`. +/// deallocated with `AMfree()`. /// \internal /// /// # Safety diff --git a/automerge-c/src/sync/state.rs b/automerge-c/src/sync/state.rs index 16a5fae2..c44256c8 100644 --- a/automerge-c/src/sync/state.rs +++ b/automerge-c/src/sync/state.rs @@ -54,7 +54,7 @@ impl From for *mut AMsyncState { } /// \memberof AMsyncState -/// \brief Decodes an array of bytes into a synchronizaton state. +/// \brief Decodes a sequence of bytes into a synchronizaton state. /// /// \param[in] src A pointer to an array of bytes. /// \param[in] count The number of bytes in \p src to decode. @@ -63,7 +63,7 @@ impl From for *mut AMsyncState { /// \pre \p src must be a valid address. /// \pre `0 <=` \p count `<=` length of \p src. /// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMresultFree()`. +/// deallocated with `AMfree()`. /// \internal /// /// # Safety @@ -76,14 +76,14 @@ pub unsafe extern "C" fn AMsyncStateDecode(src: *const u8, count: usize) -> *mut } /// \memberof AMsyncState -/// \brief Encodes a synchronizaton state as an array of bytes. +/// \brief Encodes a synchronizaton state as a sequence of bytes. /// /// \param[in] sync_state A pointer to an `AMsyncState` struct. /// \return A pointer to an `AMresult` struct containing an array of bytes as /// an `AMbyteSpan` struct. /// \pre \p sync_state must be a valid address. /// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMresultFree()`. +/// deallocated with `AMfree()`. /// \internal /// /// # Safety @@ -95,7 +95,7 @@ pub unsafe extern "C" fn AMsyncStateEncode(sync_state: *const AMsyncState) -> *m } /// \memberof AMsyncState -/// \brief Compares two synchronization states for equality. +/// \brief Tests the equality of two synchronization states. /// /// \param[in] sync_state1 A pointer to an `AMsyncState` struct. /// \param[in] sync_state2 A pointer to an `AMsyncState` struct. @@ -119,33 +119,16 @@ pub unsafe extern "C" fn AMsyncStateEqual( } /// \memberof AMsyncState -/// \brief Deallocates the storage for an `AMsyncState` struct previously -/// allocated by `AMsyncStateInit()`. -/// -/// \param[in] sync_state A pointer to an `AMsyncState` struct. -/// \pre \p sync_state must be a valid address. -/// \internal -/// -/// # Safety -/// sync_state must be a pointer to a valid AMsyncState -#[no_mangle] -pub unsafe extern "C" fn AMsyncStateFree(sync_state: *mut AMsyncState) { - if !sync_state.is_null() { - let sync_state: AMsyncState = *Box::from_raw(sync_state); - drop(sync_state) - } -} - -/// \memberof AMsyncState -/// \brief Allocates a new `AMsyncState` struct and initializes it with +/// \brief Allocates a new synchronization state and initializes it with /// defaults. /// -/// \return A pointer to an `AMsyncState` struct. -/// \warning To avoid a memory leak, the returned `AMsyncState` struct must be -/// deallocated with `AMsyncStateFree()`. +/// \return A pointer to an `AMresult` struct containing a pointer to an +/// `AMsyncState` struct. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMfree()`. #[no_mangle] -pub extern "C" fn AMsyncStateInit() -> *mut AMsyncState { - AMsyncState::new(am::sync::State::new()).into() +pub extern "C" fn AMsyncStateInit() -> *mut AMresult { + to_result(am::sync::State::new()) } /// \memberof AMsyncState diff --git a/automerge-c/test/amdoc_property_tests.c b/automerge-c/test/amdoc_property_tests.c index c2de18d3..092a4b00 100644 --- a/automerge-c/test/amdoc_property_tests.c +++ b/automerge-c/test/amdoc_property_tests.c @@ -61,7 +61,7 @@ static void test_AMputActor(void **state) { assert_int_equal(AMresultSize(res), 0); AMvalue value = AMresultValue(res, 0); assert_int_equal(value.tag, AM_VALUE_VOID); - AMresultFree(res); + AMfree(res); res = AMgetActor(group_state->doc); if (AMresultStatus(res) != AM_STATUS_OK) { fail_msg("%s", AMerrorMessage(res)); @@ -71,7 +71,7 @@ static void test_AMputActor(void **state) { assert_int_equal(value.tag, AM_VALUE_ACTOR_ID); assert_int_equal(value.actor_id.count, test_state->actor_id_size); assert_memory_equal(value.actor_id.src, test_state->actor_id_bytes, value.actor_id.count); - AMresultFree(res); + AMfree(res); } static void test_AMputActorHex(void **state) { @@ -87,7 +87,7 @@ static void test_AMputActorHex(void **state) { assert_int_equal(AMresultSize(res), 0); AMvalue value = AMresultValue(res, 0); assert_int_equal(value.tag, AM_VALUE_VOID); - AMresultFree(res); + AMfree(res); res = AMgetActorHex(group_state->doc); if (AMresultStatus(res) != AM_STATUS_OK) { fail_msg("%s", AMerrorMessage(res)); @@ -97,7 +97,7 @@ static void test_AMputActorHex(void **state) { assert_int_equal(value.tag, AM_VALUE_STR); assert_int_equal(strlen(value.str), test_state->actor_id_size * 2); assert_string_equal(value.str, test_state->actor_id_str); - AMresultFree(res); + AMfree(res); } int run_AMdoc_property_tests(void) { diff --git a/automerge-c/test/amlistput_tests.c b/automerge-c/test/amlistput_tests.c index 59943f63..bca8b80c 100644 --- a/automerge-c/test/amlistput_tests.c +++ b/automerge-c/test/amlistput_tests.c @@ -27,7 +27,7 @@ static void test_AMlistPut ## suffix ## _ ## mode(void **state) { \ assert_int_equal(AMresultSize(res), 0); \ AMvalue value = AMresultValue(res, 0); \ assert_int_equal(value.tag, AM_VALUE_VOID); \ - AMresultFree(res); \ + AMfree(res); \ res = AMlistGet(group_state->doc, AM_ROOT, 0); \ if (AMresultStatus(res) != AM_STATUS_OK) { \ fail_msg("%s", AMerrorMessage(res)); \ @@ -36,7 +36,7 @@ static void test_AMlistPut ## suffix ## _ ## mode(void **state) { \ value = AMresultValue(res, 0); \ assert_int_equal(value.tag, AMvalue_discriminant(#suffix)); \ assert_true(value.member == scalar_value); \ - AMresultFree(res); \ + AMfree(res); \ } #define test_AMlistPutBytes(mode) test_AMlistPutBytes ## _ ## mode @@ -60,7 +60,7 @@ static void test_AMlistPutBytes_ ## mode(void **state) { \ assert_int_equal(AMresultSize(res), 0); \ AMvalue value = AMresultValue(res, 0); \ assert_int_equal(value.tag, AM_VALUE_VOID); \ - AMresultFree(res); \ + AMfree(res); \ res = AMlistGet(group_state->doc, AM_ROOT, 0); \ if (AMresultStatus(res) != AM_STATUS_OK) { \ fail_msg("%s", AMerrorMessage(res)); \ @@ -70,7 +70,7 @@ static void test_AMlistPutBytes_ ## mode(void **state) { \ assert_int_equal(value.tag, AM_VALUE_BYTES); \ assert_int_equal(value.bytes.count, BYTES_SIZE); \ assert_memory_equal(value.bytes.src, bytes_value, BYTES_SIZE); \ - AMresultFree(res); \ + AMfree(res); \ } #define test_AMlistPutNull(mode) test_AMlistPutNull_ ## mode @@ -86,7 +86,7 @@ static void test_AMlistPutNull_ ## mode(void **state) { \ assert_int_equal(AMresultSize(res), 0); \ AMvalue value = AMresultValue(res, 0); \ assert_int_equal(value.tag, AM_VALUE_VOID); \ - AMresultFree(res); \ + AMfree(res); \ res = AMlistGet(group_state->doc, AM_ROOT, 0); \ if (AMresultStatus(res) != AM_STATUS_OK) { \ fail_msg("%s", AMerrorMessage(res)); \ @@ -94,7 +94,7 @@ static void test_AMlistPutNull_ ## mode(void **state) { \ assert_int_equal(AMresultSize(res), 1); \ value = AMresultValue(res, 0); \ assert_int_equal(value.tag, AM_VALUE_NULL); \ - AMresultFree(res); \ + AMfree(res); \ } #define test_AMlistPutObject(label, mode) test_AMlistPutObject_ ## label ## _ ## mode @@ -117,7 +117,7 @@ static void test_AMlistPutObject_ ## label ## _ ## mode(void **state) { \ assert_int_equal(value.tag, AM_VALUE_OBJ_ID); \ assert_non_null(value.obj_id); \ assert_int_equal(AMobjSize(group_state->doc, value.obj_id), 0); \ - AMresultFree(res); \ + AMfree(res); \ } #define test_AMlistPutStr(mode) test_AMlistPutStr ## _ ## mode @@ -140,7 +140,7 @@ static void test_AMlistPutStr_ ## mode(void **state) { \ assert_int_equal(AMresultSize(res), 0); \ AMvalue value = AMresultValue(res, 0); \ assert_int_equal(value.tag, AM_VALUE_VOID); \ - AMresultFree(res); \ + AMfree(res); \ res = AMlistGet(group_state->doc, AM_ROOT, 0); \ if (AMresultStatus(res) != AM_STATUS_OK) { \ fail_msg("%s", AMerrorMessage(res)); \ @@ -150,7 +150,7 @@ static void test_AMlistPutStr_ ## mode(void **state) { \ assert_int_equal(value.tag, AM_VALUE_STR); \ assert_int_equal(strlen(value.str), STR_LEN); \ assert_memory_equal(value.str, str_value, STR_LEN + 1); \ - AMresultFree(res); \ + AMfree(res); \ } static_void_test_AMlistPut(Bool, insert, boolean, true) diff --git a/automerge-c/test/ammapput_tests.c b/automerge-c/test/ammapput_tests.c index cf881cfd..c86809ed 100644 --- a/automerge-c/test/ammapput_tests.c +++ b/automerge-c/test/ammapput_tests.c @@ -30,7 +30,7 @@ static void test_AMmapPut ## suffix(void **state) { \ assert_int_equal(AMresultSize(res), 0); \ AMvalue value = AMresultValue(res, 0); \ assert_int_equal(value.tag, AM_VALUE_VOID); \ - AMresultFree(res); \ + AMfree(res); \ res = AMmapGet(group_state->doc, AM_ROOT, #suffix); \ if (AMresultStatus(res) != AM_STATUS_OK) { \ fail_msg("%s", AMerrorMessage(res)); \ @@ -39,7 +39,7 @@ static void test_AMmapPut ## suffix(void **state) { \ value = AMresultValue(res, 0); \ assert_int_equal(value.tag, AMvalue_discriminant(#suffix)); \ assert_true(value.member == scalar_value); \ - AMresultFree(res); \ + AMfree(res); \ } #define test_AMmapPutObject(label) test_AMmapPutObject_ ## label @@ -61,7 +61,7 @@ static void test_AMmapPutObject_ ## label(void **state) { \ assert_int_equal(value.tag, AM_VALUE_OBJ_ID); \ assert_non_null(value.obj_id); \ assert_int_equal(AMobjSize(group_state->doc, value.obj_id), 0); \ - AMresultFree(res); \ + AMfree(res); \ } static_void_test_AMmapPut(Bool, boolean, true) @@ -85,7 +85,7 @@ static void test_AMmapPutBytes(void **state) { assert_int_equal(AMresultSize(res), 0); AMvalue value = AMresultValue(res, 0); assert_int_equal(value.tag, AM_VALUE_VOID); - AMresultFree(res); + AMfree(res); res = AMmapGet(group_state->doc, AM_ROOT, KEY); if (AMresultStatus(res) != AM_STATUS_OK) { fail_msg("%s", AMerrorMessage(res)); @@ -95,7 +95,7 @@ static void test_AMmapPutBytes(void **state) { assert_int_equal(value.tag, AM_VALUE_BYTES); assert_int_equal(value.bytes.count, BYTES_SIZE); assert_memory_equal(value.bytes.src, BYTES_VALUE, BYTES_SIZE); - AMresultFree(res); + AMfree(res); } static_void_test_AMmapPut(Counter, counter, INT64_MAX) @@ -115,7 +115,7 @@ static void test_AMmapPutNull(void **state) { assert_int_equal(AMresultSize(res), 0); AMvalue value = AMresultValue(res, 0); assert_int_equal(value.tag, AM_VALUE_VOID); - AMresultFree(res); + AMfree(res); res = AMmapGet(group_state->doc, AM_ROOT, KEY); if (AMresultStatus(res) != AM_STATUS_OK) { fail_msg("%s", AMerrorMessage(res)); @@ -123,7 +123,7 @@ static void test_AMmapPutNull(void **state) { assert_int_equal(AMresultSize(res), 1); value = AMresultValue(res, 0); assert_int_equal(value.tag, AM_VALUE_NULL); - AMresultFree(res); + AMfree(res); } static_void_test_AMmapPutObject(List) @@ -150,7 +150,7 @@ static void test_AMmapPutStr(void **state) { assert_int_equal(AMresultSize(res), 0); AMvalue value = AMresultValue(res, 0); assert_int_equal(value.tag, AM_VALUE_VOID); - AMresultFree(res); + AMfree(res); res = AMmapGet(group_state->doc, AM_ROOT, KEY); if (AMresultStatus(res) != AM_STATUS_OK) { fail_msg("%s", AMerrorMessage(res)); @@ -160,7 +160,7 @@ static void test_AMmapPutStr(void **state) { assert_int_equal(value.tag, AM_VALUE_STR); assert_int_equal(strlen(value.str), STR_LEN); assert_memory_equal(value.str, STR_VALUE, STR_LEN + 1); - AMresultFree(res); + AMfree(res); } static_void_test_AMmapPut(Timestamp, timestamp, INT64_MAX) diff --git a/automerge-c/test/group_state.c b/automerge-c/test/group_state.c index ab35321a..784f6013 100644 --- a/automerge-c/test/group_state.c +++ b/automerge-c/test/group_state.c @@ -5,14 +5,15 @@ int group_setup(void** state) { GroupState* group_state = calloc(1, sizeof(GroupState)); - group_state->doc = AMcreate(); + group_state->doc_result = AMcreate(); + group_state->doc = AMresultValue(group_state->doc_result, 0).doc; *state = group_state; return 0; } int group_teardown(void** state) { GroupState* group_state = *state; - AMfree(group_state->doc); + AMfree(group_state->doc_result); free(group_state); return 0; } diff --git a/automerge-c/test/group_state.h b/automerge-c/test/group_state.h index 749209c2..84dba588 100644 --- a/automerge-c/test/group_state.h +++ b/automerge-c/test/group_state.h @@ -5,6 +5,7 @@ #include "automerge.h" typedef struct { + AMresult* doc_result; AMdoc* doc; } GroupState; diff --git a/automerge-c/test/macro_utils.h b/automerge-c/test/macro_utils.h index 334557eb..f9ec400c 100644 --- a/automerge-c/test/macro_utils.h +++ b/automerge-c/test/macro_utils.h @@ -5,8 +5,8 @@ #include "automerge.h" /** - * \brief Gets the `AMvalue` struct discriminant corresponding to a function - * name suffix. + * \brief Gets the result value discriminant corresponding to a function name + * suffix. * * \param[in] suffix A string. * \return An `AMvalue` struct discriminant. @@ -14,7 +14,7 @@ AMvalueVariant AMvalue_discriminant(char const* suffix); /** - * \brief Gets the `AMobjType` enum tag corresponding to an object type label. + * \brief Gets the object type tag corresponding to an object type label. * * \param[in] obj_type_label A string. * \return An `AMobjType` enum tag. diff --git a/automerge-c/test/sync_tests.c b/automerge-c/test/sync_tests.c index 59d5043f..3d466a7d 100644 --- a/automerge-c/test/sync_tests.c +++ b/automerge-c/test/sync_tests.c @@ -11,28 +11,36 @@ #include "automerge.h" typedef struct { + AMresult* doc1_result; AMdoc* doc1; + AMresult* doc2_result; AMdoc* doc2; + AMresult* sync_state1_result; AMsyncState* sync_state1; + AMresult* sync_state2_result; AMsyncState* sync_state2; } TestState; static int setup(void** state) { TestState* test_state = calloc(1, sizeof(TestState)); - test_state->doc1 = AMcreate(); - test_state->doc2 = AMcreate(); - test_state->sync_state1 = AMsyncStateInit(); - test_state->sync_state2 = AMsyncStateInit(); + test_state->doc1_result = AMcreate(); + test_state->doc1 = AMresultValue(test_state->doc1_result, 0).doc; + test_state->doc2_result = AMcreate(); + test_state->doc2 = AMresultValue(test_state->doc2_result, 0).doc; + test_state->sync_state1_result = AMsyncStateInit(); + test_state->sync_state1 = AMresultValue(test_state->sync_state1_result, 0).sync_state; + test_state->sync_state2_result = AMsyncStateInit(); + test_state->sync_state2 = AMresultValue(test_state->sync_state2_result, 0).sync_state; *state = test_state; return 0; } static int teardown(void** state) { TestState* test_state = *state; - AMfree(test_state->doc1); - AMfree(test_state->doc2); - AMsyncStateFree(test_state->sync_state1); - AMsyncStateFree(test_state->sync_state2); + AMfree(test_state->doc1_result); + AMfree(test_state->doc2_result); + AMfree(test_state->sync_state1_result); + AMfree(test_state->sync_state2_result); free(test_state); return 0; } @@ -53,7 +61,7 @@ static void sync(AMdoc* a, switch (value.tag) { case AM_VALUE_SYNC_MESSAGE: { a2b_msg = value.sync_message; - AMresultFree(AMreceiveSyncMessage(b, b_sync_state, a2b_msg)); + AMfree(AMreceiveSyncMessage(b, b_sync_state, a2b_msg)); } break; case AM_VALUE_VOID: a2b_msg = NULL; break; @@ -62,7 +70,7 @@ static void sync(AMdoc* a, switch (value.tag) { case AM_VALUE_SYNC_MESSAGE: { b2a_msg = value.sync_message; - AMresultFree(AMreceiveSyncMessage(a, a_sync_state, b2a_msg)); + AMfree(AMreceiveSyncMessage(a, a_sync_state, b2a_msg)); } break; case AM_VALUE_VOID: b2a_msg = NULL; break; @@ -101,7 +109,7 @@ static void test_converged_empty_local_doc_reply_no_local_data(void **state) { assert_int_equal(AMchangeHashesSize(&last_sync), 0); AMchanges changes = AMsyncMessageChanges(sync_message); assert_int_equal(AMchangesSize(&changes), 0); - AMresultFree(sync_message_result); + AMfree(sync_message_result); } /** @@ -110,14 +118,14 @@ static void test_converged_empty_local_doc_reply_no_local_data(void **state) { */ static void test_converged_empty_local_doc_no_reply(void **state) { TestState* test_state = *state; - AMresult* sync_message_result1 = AMgenerateSyncMessage( + AMresult* sync_message1_result = AMgenerateSyncMessage( test_state->doc1, test_state->sync_state1 ); - if (AMresultStatus(sync_message_result1) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(sync_message_result1)); + if (AMresultStatus(sync_message1_result) != AM_STATUS_OK) { + fail_msg("%s", AMerrorMessage(sync_message1_result)); } - assert_int_equal(AMresultSize(sync_message_result1), 1); - AMvalue value = AMresultValue(sync_message_result1, 0); + assert_int_equal(AMresultSize(sync_message1_result), 1); + AMvalue value = AMresultValue(sync_message1_result, 0); assert_int_equal(value.tag, AM_VALUE_SYNC_MESSAGE); AMsyncMessage const* sync_message1 = value.sync_message; AMresult* result = AMreceiveSyncMessage( @@ -129,18 +137,18 @@ static void test_converged_empty_local_doc_no_reply(void **state) { assert_int_equal(AMresultSize(result), 0); value = AMresultValue(result, 0); assert_int_equal(value.tag, AM_VALUE_VOID); - AMresultFree(result); - AMresult* sync_message_result2 = AMgenerateSyncMessage( + AMfree(result); + AMresult* sync_message2_result = AMgenerateSyncMessage( test_state->doc2, test_state->sync_state2 ); - if (AMresultStatus(sync_message_result2) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(sync_message_result2)); + if (AMresultStatus(sync_message2_result) != AM_STATUS_OK) { + fail_msg("%s", AMerrorMessage(sync_message2_result)); } - assert_int_equal(AMresultSize(sync_message_result2), 0); - value = AMresultValue(sync_message_result2, 0); + assert_int_equal(AMresultSize(sync_message2_result), 0); + value = AMresultValue(sync_message2_result, 0); assert_int_equal(value.tag, AM_VALUE_VOID); - AMresultFree(sync_message_result2); - AMresultFree(sync_message_result1); + AMfree(sync_message2_result); + AMfree(sync_message1_result); } /** @@ -153,37 +161,37 @@ static void test_converged_equal_heads_no_reply(void **state) { /* Make two nodes with the same changes. */ time_t const time = 0; for (size_t index = 0; index != 10; ++index) { - AMresultFree(AMlistPutUint(test_state->doc1, AM_ROOT, index, true, index)); + AMfree(AMlistPutUint(test_state->doc1, AM_ROOT, index, true, index)); AMcommit(test_state->doc1, NULL, &time); } AMresult* changes_result = AMgetChanges(test_state->doc1, NULL); AMvalue value = AMresultValue(changes_result, 0); - AMresultFree(AMapplyChanges(test_state->doc2, &value.changes)); - AMresultFree(changes_result); + AMfree(AMapplyChanges(test_state->doc2, &value.changes)); + AMfree(changes_result); assert_true(AMequal(test_state->doc1, test_state->doc2)); /* Generate a naive sync message. */ - AMresult* sync_message_result1 = AMgenerateSyncMessage( + AMresult* sync_message1_result = AMgenerateSyncMessage( test_state->doc1, test_state->sync_state1 ); - AMsyncMessage const* sync_message1 = AMresultValue(sync_message_result1, 0).sync_message; + AMsyncMessage const* sync_message1 = AMresultValue(sync_message1_result, 0).sync_message; AMchangeHashes last_sent_heads = AMsyncStateLastSentHeads(test_state->sync_state1); AMresult* heads_result = AMgetHeads(test_state->doc1); AMchangeHashes heads = AMresultValue(heads_result, 0).change_hashes; assert_int_equal(AMchangeHashesCmp(&last_sent_heads, &heads), 0); - AMresultFree(heads_result); + AMfree(heads_result); /* Heads are equal so this message should be void. */ - AMresultFree(AMreceiveSyncMessage( + AMfree(AMreceiveSyncMessage( test_state->doc2, test_state->sync_state2, sync_message1 )); - AMresultFree(sync_message_result1); - AMresult* sync_message_result2 = AMgenerateSyncMessage( + AMfree(sync_message1_result); + AMresult* sync_message2_result = AMgenerateSyncMessage( test_state->doc2, test_state->sync_state2 ); - assert_int_equal(AMresultValue(sync_message_result2, 0).tag, AM_VALUE_VOID); - AMresultFree(sync_message_result2); + assert_int_equal(AMresultValue(sync_message2_result, 0).tag, AM_VALUE_VOID); + AMfree(sync_message2_result); } /** @@ -197,7 +205,7 @@ static void test_converged_offer_all_changes_from_nothing(void **state) { /* Make changes for the first node that the second node should request. */ time_t const time = 0; for (size_t index = 0; index != 10; ++index) { - AMresultFree(AMlistPutUint(test_state->doc1, AM_ROOT, index, true, index)); + AMfree(AMlistPutUint(test_state->doc1, AM_ROOT, index, true, index)); AMcommit(test_state->doc1, NULL, &time); } @@ -219,7 +227,7 @@ static void test_converged_sync_peers_with_uneven_commits(void **state) { /* Make changes for the first node that the second node should request. */ time_t const time = 0; for (size_t index = 0; index != 10; ++index) { - AMresultFree(AMlistPutUint(test_state->doc1, AM_ROOT, index, true, index)); + AMfree(AMlistPutUint(test_state->doc1, AM_ROOT, index, true, index)); AMcommit(test_state->doc1, NULL, &time); } @@ -241,7 +249,7 @@ static void test_converged_works_with_prior_sync_state(void **state) { time_t const time = 0; for (size_t value = 0; value != 5; ++value) { - AMresultFree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); + AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); AMcommit(test_state->doc1, NULL, &time); } sync(test_state->doc1, @@ -251,7 +259,7 @@ static void test_converged_works_with_prior_sync_state(void **state) { /* Modify the first node further. */ for (size_t value = 5; value != 10; ++value) { - AMresultFree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); + AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); AMcommit(test_state->doc1, NULL, &time); } @@ -270,14 +278,14 @@ static void test_converged_works_with_prior_sync_state(void **state) { static void test_converged_no_message_once_synced(void **state) { /* Create & synchronize two nodes. */ TestState* test_state = *state; - AMresultFree(AMsetActorHex(test_state->doc1, "abc123")); - AMresultFree(AMsetActorHex(test_state->doc2, "def456")); + AMfree(AMsetActorHex(test_state->doc1, "abc123")); + AMfree(AMsetActorHex(test_state->doc2, "def456")); time_t const time = 0; for (size_t value = 0; value != 5; ++value) { - AMresultFree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); + AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); AMcommit(test_state->doc1, NULL, &time); - AMresultFree(AMmapPutUint(test_state->doc2, AM_ROOT, "y", value)); + AMfree(AMmapPutUint(test_state->doc2, AM_ROOT, "y", value)); AMcommit(test_state->doc2, NULL, &time); } @@ -288,10 +296,10 @@ static void test_converged_no_message_once_synced(void **state) { /* The second node receives that message and sends changes along with what * it has. */ - AMresultFree(AMreceiveSyncMessage(test_state->doc2, + AMfree(AMreceiveSyncMessage(test_state->doc2, test_state->sync_state2, message)); - AMresultFree(message_result); + AMfree(message_result); message_result = AMgenerateSyncMessage(test_state->doc2, test_state->sync_state2); message = AMresultValue(message_result, 0).sync_message; @@ -300,10 +308,10 @@ static void test_converged_no_message_once_synced(void **state) { /* The first node receives the changes and replies with the changes it now * knows that the second node needs. */ - AMresultFree(AMreceiveSyncMessage(test_state->doc1, + AMfree(AMreceiveSyncMessage(test_state->doc1, test_state->sync_state1, message)); - AMresultFree(message_result); + AMfree(message_result); message_result = AMgenerateSyncMessage(test_state->doc1, test_state->sync_state1); message = AMresultValue(message_result, 0).sync_message; @@ -312,29 +320,29 @@ static void test_converged_no_message_once_synced(void **state) { /* The second node applies the changes and sends confirmation ending the * exchange. */ - AMresultFree(AMreceiveSyncMessage(test_state->doc2, + AMfree(AMreceiveSyncMessage(test_state->doc2, test_state->sync_state2, message)); - AMresultFree(message_result); + AMfree(message_result); message_result = AMgenerateSyncMessage(test_state->doc2, test_state->sync_state2); message = AMresultValue(message_result, 0).sync_message; /* The first node receives the message and has nothing more to say. */ - AMresultFree(AMreceiveSyncMessage(test_state->doc1, + AMfree(AMreceiveSyncMessage(test_state->doc1, test_state->sync_state1, message)); - AMresultFree(message_result); + AMfree(message_result); message_result = AMgenerateSyncMessage(test_state->doc1, test_state->sync_state1); assert_int_equal(AMresultValue(message_result, 0).tag, AM_VALUE_VOID); - AMresultFree(message_result); + AMfree(message_result); /* The second node also has nothing left to say. */ message_result = AMgenerateSyncMessage(test_state->doc2, test_state->sync_state2); assert_int_equal(AMresultValue(message_result, 0).tag, AM_VALUE_VOID); - AMresultFree(message_result); + AMfree(message_result); } /** @@ -344,21 +352,21 @@ static void test_converged_no_message_once_synced(void **state) { static void test_converged_allow_simultaneous_messages(void **state) { /* Create & synchronize two nodes. */ TestState* test_state = *state; - AMresultFree(AMsetActorHex(test_state->doc1, "abc123")); - AMresultFree(AMsetActorHex(test_state->doc2, "def456")); + AMfree(AMsetActorHex(test_state->doc1, "abc123")); + AMfree(AMsetActorHex(test_state->doc2, "def456")); time_t const time = 0; for (size_t value = 0; value != 5; ++value) { - AMresultFree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); + AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); AMcommit(test_state->doc1, NULL, &time); - AMresultFree(AMmapPutUint(test_state->doc2, AM_ROOT, "y", value)); + AMfree(AMmapPutUint(test_state->doc2, AM_ROOT, "y", value)); AMcommit(test_state->doc2, NULL, &time); } - AMresult* heads_result1 = AMgetHeads(test_state->doc1); - AMchangeHashes heads1 = AMresultValue(heads_result1, 0).change_hashes; + AMresult* heads1_result = AMgetHeads(test_state->doc1); + AMchangeHashes heads1 = AMresultValue(heads1_result, 0).change_hashes; AMbyteSpan head1 = AMchangeHashesNext(&heads1, 1); - AMresult* heads_result2 = AMgetHeads(test_state->doc2); - AMchangeHashes heads2 = AMresultValue(heads_result2, 0).change_hashes; + AMresult* heads2_result = AMgetHeads(test_state->doc2); + AMchangeHashes heads2 = AMresultValue(heads2_result, 0).change_hashes; AMbyteSpan head2 = AMchangeHashesNext(&heads2, 1); /* Both sides report what they have but have no shared peer state. */ @@ -383,14 +391,14 @@ static void test_converged_allow_simultaneous_messages(void **state) { /* Both nodes receive messages from each other and update their * synchronization states. */ - AMresultFree(AMreceiveSyncMessage(test_state->doc1, + AMfree(AMreceiveSyncMessage(test_state->doc1, test_state->sync_state1, msg2to1)); - AMresultFree(msg2to1_result); - AMresultFree(AMreceiveSyncMessage(test_state->doc2, + AMfree(msg2to1_result); + AMfree(AMreceiveSyncMessage(test_state->doc2, test_state->sync_state2, msg1to2)); - AMresultFree(msg1to2_result); + AMfree(msg1to2_result); /* Now both reply with their local changes that the other lacks * (standard warning that 1% of the time this will result in a "needs" @@ -407,35 +415,35 @@ static void test_converged_allow_simultaneous_messages(void **state) { assert_int_equal(AMchangesSize(&msg2to1_changes), 5); /* Both should now apply the changes. */ - AMresultFree(AMreceiveSyncMessage(test_state->doc1, + AMfree(AMreceiveSyncMessage(test_state->doc1, test_state->sync_state1, msg2to1)); - AMresultFree(msg2to1_result); + AMfree(msg2to1_result); AMresult* missing_deps_result = AMgetMissingDeps(test_state->doc1, NULL); AMchangeHashes missing_deps = AMresultValue(missing_deps_result, 0).change_hashes; assert_int_equal(AMchangeHashesSize(&missing_deps), 0); - AMresultFree(missing_deps_result); + AMfree(missing_deps_result); AMresult* map_value_result = AMmapGet(test_state->doc1, AM_ROOT, "x"); assert_int_equal(AMresultValue(map_value_result, 0).uint, 4); - AMresultFree(map_value_result); + AMfree(map_value_result); map_value_result = AMmapGet(test_state->doc1, AM_ROOT, "y"); assert_int_equal(AMresultValue(map_value_result, 0).uint, 4); - AMresultFree(map_value_result); + AMfree(map_value_result); - AMresultFree(AMreceiveSyncMessage(test_state->doc2, + AMfree(AMreceiveSyncMessage(test_state->doc2, test_state->sync_state2, msg1to2)); - AMresultFree(msg1to2_result); + AMfree(msg1to2_result); missing_deps_result = AMgetMissingDeps(test_state->doc2, NULL); missing_deps = AMresultValue(missing_deps_result, 0).change_hashes; assert_int_equal(AMchangeHashesSize(&missing_deps), 0); - AMresultFree(missing_deps_result); + AMfree(missing_deps_result); map_value_result = AMmapGet(test_state->doc2, AM_ROOT, "x"); assert_int_equal(AMresultValue(map_value_result, 0).uint, 4); - AMresultFree(map_value_result); + AMfree(map_value_result); map_value_result = AMmapGet(test_state->doc2, AM_ROOT, "y"); assert_int_equal(AMresultValue(map_value_result, 0).uint, 4); - AMresultFree(map_value_result); + AMfree(map_value_result); /* The response acknowledges that the changes were received and sends no * further changes. */ @@ -451,28 +459,28 @@ static void test_converged_allow_simultaneous_messages(void **state) { assert_int_equal(AMchangesSize(&msg2to1_changes), 0); /* After receiving acknowledgements their shared heads should be equal. */ - AMresultFree(AMreceiveSyncMessage(test_state->doc1, + AMfree(AMreceiveSyncMessage(test_state->doc1, test_state->sync_state1, msg2to1)); - AMresultFree(msg2to1_result); - AMresultFree(AMreceiveSyncMessage(test_state->doc2, + AMfree(msg2to1_result); + AMfree(AMreceiveSyncMessage(test_state->doc2, test_state->sync_state2, msg1to2)); - AMresultFree(msg1to2_result); + AMfree(msg1to2_result); /* They're synchronized so no more messages are required. */ msg1to2_result = AMgenerateSyncMessage(test_state->doc1, test_state->sync_state1); assert_int_equal(AMresultValue(msg1to2_result, 0).tag, AM_VALUE_VOID); - AMresultFree(msg1to2_result); + AMfree(msg1to2_result); msg2to1_result = AMgenerateSyncMessage(test_state->doc2, test_state->sync_state2); assert_int_equal(AMresultValue(msg2to1_result, 0).tag, AM_VALUE_VOID); - AMresultFree(msg2to1_result); + AMfree(msg2to1_result); /* If we make one more change and start synchronizing then its "last * sync" property should be updated. */ - AMresultFree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", 5)); + AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", 5)); AMcommit(test_state->doc1, NULL, &time); msg1to2_result = AMgenerateSyncMessage(test_state->doc1, test_state->sync_state1); @@ -486,9 +494,9 @@ static void test_converged_allow_simultaneous_messages(void **state) { msg1to2_last_sync_next = AMchangeHashesNext(&msg1to2_last_sync, 1); assert_int_equal(msg1to2_last_sync_next.count, head2.count); assert_memory_equal(msg1to2_last_sync_next.src, head2.src, head2.count); - AMresultFree(heads_result1); - AMresultFree(heads_result2); - AMresultFree(msg1to2_result); + AMfree(heads1_result); + AMfree(heads2_result); + AMfree(msg1to2_result); } /** @@ -497,8 +505,8 @@ static void test_converged_allow_simultaneous_messages(void **state) { */ static void test_converged_assume_sent_changes_were_received(void **state) { TestState* test_state = *state; - AMresultFree(AMsetActorHex(test_state->doc1, "01234567")); - AMresultFree(AMsetActorHex(test_state->doc2, "89abcdef")); + AMfree(AMsetActorHex(test_state->doc1, "01234567")); + AMfree(AMsetActorHex(test_state->doc2, "89abcdef")); AMresult* items_result = AMmapPutObject(test_state->doc1, AM_ROOT, @@ -512,34 +520,34 @@ static void test_converged_assume_sent_changes_were_received(void **state) { test_state->sync_state1, test_state->sync_state2); - AMresultFree(AMlistPutStr(test_state->doc1, items, 0, true, "x")); + AMfree(AMlistPutStr(test_state->doc1, items, 0, true, "x")); AMcommit(test_state->doc1, NULL, &time); AMresult* message_result = AMgenerateSyncMessage(test_state->doc1, test_state->sync_state1); AMsyncMessage const* message = AMresultValue(message_result, 0).sync_message; AMchanges message_changes = AMsyncMessageChanges(message); assert_int_equal(AMchangesSize(&message_changes), 1); - AMresultFree(message_result); + AMfree(message_result); - AMresultFree(AMlistPutStr(test_state->doc1, items, 1, true, "y")); + AMfree(AMlistPutStr(test_state->doc1, items, 1, true, "y")); AMcommit(test_state->doc1, NULL, &time); message_result = AMgenerateSyncMessage(test_state->doc1, test_state->sync_state1); message = AMresultValue(message_result, 0).sync_message; message_changes = AMsyncMessageChanges(message); assert_int_equal(AMchangesSize(&message_changes), 1); - AMresultFree(message_result); + AMfree(message_result); - AMresultFree(AMlistPutStr(test_state->doc1, items, 2, true, "z")); + AMfree(AMlistPutStr(test_state->doc1, items, 2, true, "z")); AMcommit(test_state->doc1, NULL, &time); message_result = AMgenerateSyncMessage(test_state->doc1, test_state->sync_state1); message = AMresultValue(message_result, 0).sync_message; message_changes = AMsyncMessageChanges(message); assert_int_equal(AMchangesSize(&message_changes), 1); - AMresultFree(message_result); + AMfree(message_result); - AMresultFree(items_result); + AMfree(items_result); } /** @@ -552,7 +560,7 @@ static void test_converged_works_regardless_of_who_initiates(void **state) { time_t const time = 0; for (size_t value = 0; value != 5; ++value) { - AMresultFree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); + AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); AMcommit(test_state->doc1, NULL, &time); } sync(test_state->doc1, @@ -562,7 +570,7 @@ static void test_converged_works_regardless_of_who_initiates(void **state) { /* Modify the first node further. */ for (size_t value = 5; value != 10; ++value) { - AMresultFree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); + AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); AMcommit(test_state->doc1, NULL, &time); } @@ -587,11 +595,11 @@ static void test_diverged_works_without_prior_sync_state(void **state) { /* Create two peers both with divergent commits. */ TestState* test_state = *state; - AMresultFree(AMsetActorHex(test_state->doc1, "01234567")); - AMresultFree(AMsetActorHex(test_state->doc2, "89abcdef")); + AMfree(AMsetActorHex(test_state->doc1, "01234567")); + AMfree(AMsetActorHex(test_state->doc2, "89abcdef")); time_t const time = 0; for (size_t value = 0; value != 10; ++value) { - AMresultFree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); + AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); AMcommit(test_state->doc1, NULL, &time); } @@ -601,11 +609,11 @@ static void test_diverged_works_without_prior_sync_state(void **state) { test_state->sync_state2); for (size_t value = 10; value != 15; ++value) { - AMresultFree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); + AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); AMcommit(test_state->doc1, NULL, &time); } for (size_t value = 15; value != 18; ++value) { - AMresultFree(AMmapPutUint(test_state->doc2, AM_ROOT, "x", value)); + AMfree(AMmapPutUint(test_state->doc2, AM_ROOT, "x", value)); AMcommit(test_state->doc2, NULL, &time); } @@ -614,13 +622,13 @@ static void test_diverged_works_without_prior_sync_state(void **state) { test_state->doc2, test_state->sync_state1, test_state->sync_state2); - AMresult* heads_result1 = AMgetHeads(test_state->doc1); - AMchangeHashes heads1 = AMresultValue(heads_result1, 0).change_hashes; - AMresult* heads_result2 = AMgetHeads(test_state->doc2); - AMchangeHashes heads2 = AMresultValue(heads_result2, 0).change_hashes; + AMresult* heads1_result = AMgetHeads(test_state->doc1); + AMchangeHashes heads1 = AMresultValue(heads1_result, 0).change_hashes; + AMresult* heads2_result = AMgetHeads(test_state->doc2); + AMchangeHashes heads2 = AMresultValue(heads2_result, 0).change_hashes; assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); - AMresultFree(heads_result2); - AMresultFree(heads_result1); + AMfree(heads2_result); + AMfree(heads1_result); assert_true(AMequal(test_state->doc1, test_state->doc2)); } @@ -637,11 +645,11 @@ static void test_diverged_works_with_prior_sync_state(void **state) { /* Create two peers both with divergent commits. */ TestState* test_state = *state; - AMresultFree(AMsetActorHex(test_state->doc1, "01234567")); - AMresultFree(AMsetActorHex(test_state->doc2, "89abcdef")); + AMfree(AMsetActorHex(test_state->doc1, "01234567")); + AMfree(AMsetActorHex(test_state->doc2, "89abcdef")); time_t const time = 0; for (size_t value = 0; value != 10; ++value) { - AMresultFree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); + AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); AMcommit(test_state->doc1, NULL, &time); } sync(test_state->doc1, @@ -650,35 +658,35 @@ static void test_diverged_works_with_prior_sync_state(void **state) { test_state->sync_state2); for (size_t value = 10; value != 15; ++value) { - AMresultFree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); + AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); AMcommit(test_state->doc1, NULL, &time); } for (size_t value = 15; value != 18; ++value) { - AMresultFree(AMmapPutUint(test_state->doc2, AM_ROOT, "x", value)); + AMfree(AMmapPutUint(test_state->doc2, AM_ROOT, "x", value)); AMcommit(test_state->doc2, NULL, &time); } AMresult* encoded_result = AMsyncStateEncode(test_state->sync_state1); AMbyteSpan encoded = AMresultValue(encoded_result, 0).bytes; - AMresult* sync_state_result1 = AMsyncStateDecode(encoded.src, encoded.count); - AMresultFree(encoded_result); - AMsyncState* sync_state1 = AMresultValue(sync_state_result1, 0).sync_state; + AMresult* sync_state1_result = AMsyncStateDecode(encoded.src, encoded.count); + AMfree(encoded_result); + AMsyncState* sync_state1 = AMresultValue(sync_state1_result, 0).sync_state; encoded_result = AMsyncStateEncode(test_state->sync_state2); encoded = AMresultValue(encoded_result, 0).bytes; - AMresult* sync_state_result2 = AMsyncStateDecode(encoded.src, encoded.count); - AMresultFree(encoded_result); - AMsyncState* sync_state2 = AMresultValue(sync_state_result2, 0).sync_state; + AMresult* sync_state2_result = AMsyncStateDecode(encoded.src, encoded.count); + AMfree(encoded_result); + AMsyncState* sync_state2 = AMresultValue(sync_state2_result, 0).sync_state; assert_false(AMequal(test_state->doc1, test_state->doc2)); sync(test_state->doc1, test_state->doc2, sync_state1, sync_state2); - AMresultFree(sync_state_result2); - AMresultFree(sync_state_result1); - AMresult* heads_result1 = AMgetHeads(test_state->doc1); - AMchangeHashes heads1 = AMresultValue(heads_result1, 0).change_hashes; - AMresult* heads_result2 = AMgetHeads(test_state->doc2); - AMchangeHashes heads2 = AMresultValue(heads_result2, 0).change_hashes; + AMfree(sync_state2_result); + AMfree(sync_state1_result); + AMresult* heads1_result = AMgetHeads(test_state->doc1); + AMchangeHashes heads1 = AMresultValue(heads1_result, 0).change_hashes; + AMresult* heads2_result = AMgetHeads(test_state->doc2); + AMchangeHashes heads2 = AMresultValue(heads2_result, 0).change_hashes; assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); - AMresultFree(heads_result2); - AMresultFree(heads_result1); + AMfree(heads2_result); + AMfree(heads1_result); assert_true(AMequal(test_state->doc1, test_state->doc2)); } @@ -688,12 +696,12 @@ static void test_diverged_works_with_prior_sync_state(void **state) { */ static void test_diverged_ensure_not_empty_after_sync(void **state) { TestState* test_state = *state; - AMresultFree(AMsetActorHex(test_state->doc1, "01234567")); - AMresultFree(AMsetActorHex(test_state->doc2, "89abcdef")); + AMfree(AMsetActorHex(test_state->doc1, "01234567")); + AMfree(AMsetActorHex(test_state->doc2, "89abcdef")); time_t const time = 0; for (size_t value = 0; value != 3; ++value) { - AMresultFree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); + AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); AMcommit(test_state->doc1, NULL, &time); } sync(test_state->doc1, @@ -701,13 +709,13 @@ static void test_diverged_ensure_not_empty_after_sync(void **state) { test_state->sync_state1, test_state->sync_state2); - AMresult* heads_result1 = AMgetHeads(test_state->doc1); - AMchangeHashes heads1 = AMresultValue(heads_result1, 0).change_hashes; + AMresult* heads1_result = AMgetHeads(test_state->doc1); + AMchangeHashes heads1 = AMresultValue(heads1_result, 0).change_hashes; AMchangeHashes shared_heads1 = AMsyncStateSharedHeads(test_state->sync_state1); assert_int_equal(AMchangeHashesCmp(&shared_heads1, &heads1), 0); AMchangeHashes shared_heads2 = AMsyncStateSharedHeads(test_state->sync_state2); assert_int_equal(AMchangeHashesCmp(&shared_heads2, &heads1), 0); - AMresultFree(heads_result1); + AMfree(heads1_result); } /** @@ -723,13 +731,13 @@ static void test_diverged_resync_after_node_crash_with_data_loss(void **state) { * We want to successfully sync (n1) with (r), even though (n1) believes * it's talking to (n2). */ TestState* test_state = *state; - AMresultFree(AMsetActorHex(test_state->doc1, "01234567")); - AMresultFree(AMsetActorHex(test_state->doc2, "89abcdef")); + AMfree(AMsetActorHex(test_state->doc1, "01234567")); + AMfree(AMsetActorHex(test_state->doc2, "89abcdef")); /* n1 makes three changes which we synchronize to n2. */ time_t const time = 0; for (size_t value = 0; value != 3; ++value) { - AMresultFree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); + AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); AMcommit(test_state->doc1, NULL, &time); } sync(test_state->doc1, @@ -738,15 +746,16 @@ static void test_diverged_resync_after_node_crash_with_data_loss(void **state) { test_state->sync_state2); /* Save a copy of n2 as "r" to simulate recovering from a crash. */ - AMdoc* r = AMdup(test_state->doc2); + AMresult* r_result = AMdup(test_state->doc2); + AMdoc* r = AMresultValue(r_result, 0).doc; AMresult* encoded_result = AMsyncStateEncode(test_state->sync_state2); AMbyteSpan encoded = AMresultValue(encoded_result, 0).bytes; AMresult* sync_state_resultr = AMsyncStateDecode(encoded.src, encoded.count); - AMresultFree(encoded_result); + AMfree(encoded_result); AMsyncState* sync_stater = AMresultValue(sync_state_resultr, 0).sync_state; /* Synchronize another few commits. */ for (size_t value = 3; value != 6; ++value) { - AMresultFree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); + AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); AMcommit(test_state->doc1, NULL, &time); } sync(test_state->doc1, @@ -754,49 +763,49 @@ static void test_diverged_resync_after_node_crash_with_data_loss(void **state) { test_state->sync_state1, test_state->sync_state2); /* Everyone should be on the same page here. */ - AMresult* heads_result1 = AMgetHeads(test_state->doc1); - AMchangeHashes heads1 = AMresultValue(heads_result1, 0).change_hashes; - AMresult* heads_result2 = AMgetHeads(test_state->doc2); - AMchangeHashes heads2 = AMresultValue(heads_result2, 0).change_hashes; + AMresult* heads1_result = AMgetHeads(test_state->doc1); + AMchangeHashes heads1 = AMresultValue(heads1_result, 0).change_hashes; + AMresult* heads2_result = AMgetHeads(test_state->doc2); + AMchangeHashes heads2 = AMresultValue(heads2_result, 0).change_hashes; assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); - AMresultFree(heads_result2); - AMresultFree(heads_result1); + AMfree(heads2_result); + AMfree(heads1_result); assert_true(AMequal(test_state->doc1, test_state->doc2)); /* Now make a few more changes and then attempt to synchronize the * fully-up-to-date n1 with with the confused r. */ for (size_t value = 6; value != 9; ++value) { - AMresultFree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); + AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); AMcommit(test_state->doc1, NULL, &time); } - heads_result1 = AMgetHeads(test_state->doc1); - heads1 = AMresultValue(heads_result1, 0).change_hashes; + heads1_result = AMgetHeads(test_state->doc1); + heads1 = AMresultValue(heads1_result, 0).change_hashes; AMresult* heads_resultr = AMgetHeads(r); AMchangeHashes headsr = AMresultValue(heads_resultr, 0).change_hashes; assert_int_not_equal(AMchangeHashesCmp(&heads1, &headsr), 0); - AMresultFree(heads_resultr); - AMresultFree(heads_result1); + AMfree(heads_resultr); + AMfree(heads1_result); assert_false(AMequal(test_state->doc1, r)); AMresult* map_value_result = AMmapGet(test_state->doc1, AM_ROOT, "x"); assert_int_equal(AMresultValue(map_value_result, 0).uint, 8); - AMresultFree(map_value_result); + AMfree(map_value_result); map_value_result = AMmapGet(r, AM_ROOT, "x"); assert_int_equal(AMresultValue(map_value_result, 0).uint, 2); - AMresultFree(map_value_result); + AMfree(map_value_result); sync(test_state->doc1, r, test_state->sync_state1, sync_stater); - AMresultFree(sync_state_resultr); - heads_result1 = AMgetHeads(test_state->doc1); - heads1 = AMresultValue(heads_result1, 0).change_hashes; + AMfree(sync_state_resultr); + heads1_result = AMgetHeads(test_state->doc1); + heads1 = AMresultValue(heads1_result, 0).change_hashes; heads_resultr = AMgetHeads(r); headsr = AMresultValue(heads_resultr, 0).change_hashes; assert_int_equal(AMchangeHashesCmp(&heads1, &headsr), 0); - AMresultFree(heads_resultr); - AMresultFree(heads_result1); + AMfree(heads_resultr); + AMfree(heads1_result); assert_true(AMequal(test_state->doc1, r)); - AMfree(r); + AMfree(r_result); } /** @@ -805,13 +814,13 @@ static void test_diverged_resync_after_node_crash_with_data_loss(void **state) { */ static void test_diverged_resync_after_data_loss_without_disconnection(void **state) { TestState* test_state = *state; - AMresultFree(AMsetActorHex(test_state->doc1, "01234567")); - AMresultFree(AMsetActorHex(test_state->doc2, "89abcdef")); + AMfree(AMsetActorHex(test_state->doc1, "01234567")); + AMfree(AMsetActorHex(test_state->doc2, "89abcdef")); /* n1 makes three changes which we synchronize to n2. */ time_t const time = 0; for (size_t value = 0; value != 3; ++value) { - AMresultFree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); + AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); AMcommit(test_state->doc1, NULL, &time); } sync(test_state->doc1, @@ -819,36 +828,38 @@ static void test_diverged_resync_after_data_loss_without_disconnection(void **st test_state->sync_state1, test_state->sync_state2); - AMresult* heads_result1 = AMgetHeads(test_state->doc1); - AMchangeHashes heads1 = AMresultValue(heads_result1, 0).change_hashes; - AMresult* heads_result2 = AMgetHeads(test_state->doc2); - AMchangeHashes heads2 = AMresultValue(heads_result2, 0).change_hashes; + AMresult* heads1_result = AMgetHeads(test_state->doc1); + AMchangeHashes heads1 = AMresultValue(heads1_result, 0).change_hashes; + AMresult* heads2_result = AMgetHeads(test_state->doc2); + AMchangeHashes heads2 = AMresultValue(heads2_result, 0).change_hashes; assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); - AMresultFree(heads_result2); - AMresultFree(heads_result1); + AMfree(heads2_result); + AMfree(heads1_result); assert_true(AMequal(test_state->doc1, test_state->doc2)); - AMdoc* doc2_after_data_loss = AMcreate(); - AMresultFree(AMsetActorHex(doc2_after_data_loss, "89abcdef")); + AMresult* doc2_after_data_loss_result = AMcreate(); + AMdoc* doc2_after_data_loss = AMresultValue(doc2_after_data_loss_result, 0).doc; + AMfree(AMsetActorHex(doc2_after_data_loss, "89abcdef")); /* "n2" now has no data, but n1 still thinks it does. Note we don't do * decodeSyncState(encodeSyncState(s1)) in order to simulate data loss * without disconnecting. */ - AMsyncState* sync_state2_after_data_loss = AMsyncStateInit(); + AMresult* sync_state2_after_data_loss_result = AMsyncStateInit(); + AMsyncState* sync_state2_after_data_loss = AMresultValue(sync_state2_after_data_loss_result, 0).sync_state; sync(test_state->doc1, doc2_after_data_loss, test_state->sync_state1, sync_state2_after_data_loss); - heads_result1 = AMgetHeads(test_state->doc1); - heads1 = AMresultValue(heads_result1, 0).change_hashes; - heads_result2 = AMgetHeads(doc2_after_data_loss); - heads2 = AMresultValue(heads_result2, 0).change_hashes; + heads1_result = AMgetHeads(test_state->doc1); + heads1 = AMresultValue(heads1_result, 0).change_hashes; + heads2_result = AMgetHeads(doc2_after_data_loss); + heads2 = AMresultValue(heads2_result, 0).change_hashes; assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); - AMresultFree(heads_result2); - AMresultFree(heads_result1); + AMfree(heads2_result); + AMfree(heads1_result); assert_true(AMequal(test_state->doc1, doc2_after_data_loss)); - AMsyncStateFree(sync_state2_after_data_loss); - AMfree(doc2_after_data_loss); + AMfree(sync_state2_after_data_loss_result); + AMfree(doc2_after_data_loss_result); } /** @@ -857,56 +868,59 @@ static void test_diverged_resync_after_data_loss_without_disconnection(void **st */ static void test_diverged_handles_concurrent_changes(void **state) { TestState* test_state = *state; - AMresultFree(AMsetActorHex(test_state->doc1, "01234567")); - AMresultFree(AMsetActorHex(test_state->doc2, "89abcdef")); - AMdoc* doc3 = AMcreate(); - AMresultFree(AMsetActorHex(doc3, "fedcba98")); + AMfree(AMsetActorHex(test_state->doc1, "01234567")); + AMfree(AMsetActorHex(test_state->doc2, "89abcdef")); + AMresult* doc3_result = AMcreate(); + AMdoc* doc3 = AMresultValue(doc3_result, 0).doc; + AMfree(AMsetActorHex(doc3, "fedcba98")); AMsyncState* sync_state12 = test_state->sync_state1; AMsyncState* sync_state21 = test_state->sync_state2; - AMsyncState* sync_state23 = AMsyncStateInit(); - AMsyncState* sync_state32 = AMsyncStateInit(); + AMresult* sync_state23_result = AMsyncStateInit(); + AMsyncState* sync_state23 = AMresultValue(sync_state23_result, 0).sync_state; + AMresult* sync_state32_result = AMsyncStateInit(); + AMsyncState* sync_state32 = AMresultValue(sync_state32_result, 0).sync_state; /* Change 1 is known to all three nodes. */ time_t const time = 0; - AMresultFree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", 1)); + AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", 1)); AMcommit(test_state->doc1, NULL, &time); sync(test_state->doc1, test_state->doc2, sync_state12, sync_state21); sync(test_state->doc2, doc3, sync_state23, sync_state32); /* Change 2 is known to n1 and n2. */ - AMresultFree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", 2)); + AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", 2)); AMcommit(test_state->doc1, NULL, &time); sync(test_state->doc1, test_state->doc2, sync_state12, sync_state21); /* Each of the three nodes makes one change (changes 3, 4, 5). */ - AMresultFree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", 3)); + AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", 3)); AMcommit(test_state->doc1, NULL, &time); - AMresultFree(AMmapPutUint(test_state->doc2, AM_ROOT, "x", 4)); + AMfree(AMmapPutUint(test_state->doc2, AM_ROOT, "x", 4)); AMcommit(test_state->doc2, NULL, &time); - AMresultFree(AMmapPutUint(doc3, AM_ROOT, "x", 5)); + AMfree(AMmapPutUint(doc3, AM_ROOT, "x", 5)); AMcommit(doc3, NULL, &time); /* Apply n3's latest change to n2. */ AMresult* changes_result = AMgetLastLocalChange(doc3); AMchanges changes = AMresultValue(changes_result, 0).changes; - AMresultFree(AMapplyChanges(test_state->doc2, &changes)); - AMresultFree(changes_result); + AMfree(AMapplyChanges(test_state->doc2, &changes)); + AMfree(changes_result); /* Now sync n1 and n2. n3's change is concurrent to n1 and n2's last sync * heads. */ sync(test_state->doc1, test_state->doc2, sync_state12, sync_state21); - AMresult* heads_result1 = AMgetHeads(test_state->doc1); - AMchangeHashes heads1 = AMresultValue(heads_result1, 0).change_hashes; - AMresult* heads_result2 = AMgetHeads(test_state->doc2); - AMchangeHashes heads2 = AMresultValue(heads_result2, 0).change_hashes; + AMresult* heads1_result = AMgetHeads(test_state->doc1); + AMchangeHashes heads1 = AMresultValue(heads1_result, 0).change_hashes; + AMresult* heads2_result = AMgetHeads(test_state->doc2); + AMchangeHashes heads2 = AMresultValue(heads2_result, 0).change_hashes; assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); - AMresultFree(heads_result2); - AMresultFree(heads_result1); + AMfree(heads2_result); + AMfree(heads1_result); assert_true(AMequal(test_state->doc1, test_state->doc2)); - AMsyncStateFree(sync_state32); - AMsyncStateFree(sync_state23); - AMfree(doc3); + AMfree(sync_state32_result); + AMfree(sync_state23_result); + AMfree(doc3_result); } /** @@ -915,19 +929,20 @@ static void test_diverged_handles_concurrent_changes(void **state) { */ static void test_diverged_handles_histories_of_branching_and_merging(void **state) { TestState* test_state = *state; - AMresultFree(AMsetActorHex(test_state->doc1, "01234567")); - AMresultFree(AMsetActorHex(test_state->doc2, "89abcdef")); - AMdoc* doc3 = AMcreate(); - AMresultFree(AMsetActorHex(doc3, "fedcba98")); + AMfree(AMsetActorHex(test_state->doc1, "01234567")); + AMfree(AMsetActorHex(test_state->doc2, "89abcdef")); + AMresult* doc3_result = AMcreate(); + AMdoc* doc3 = AMresultValue(doc3_result, 0).doc; + AMfree(AMsetActorHex(doc3, "fedcba98")); time_t const time = 0; - AMresultFree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", 0)); + AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", 0)); AMcommit(test_state->doc1, NULL, &time); AMresult* changes_result = AMgetLastLocalChange(test_state->doc1); AMchanges changes = AMresultValue(changes_result, 0).changes; - AMresultFree(AMapplyChanges(test_state->doc2, &changes)); - AMresultFree(AMapplyChanges(doc3, &changes)); - AMresultFree(changes_result); - AMresultFree(AMmapPutUint(doc3, AM_ROOT, "x", 1)); + AMfree(AMapplyChanges(test_state->doc2, &changes)); + AMfree(AMapplyChanges(doc3, &changes)); + AMfree(changes_result); + AMfree(AMmapPutUint(doc3, AM_ROOT, "x", 1)); AMcommit(doc3, NULL, &time); /* - n1c1 <------ n1c2 <------ n1c3 <-- etc. <-- n1c20 <------ n1c21 @@ -938,18 +953,18 @@ static void test_diverged_handles_histories_of_branching_and_merging(void **stat * ---------------------------------------------- n3c1 <----- */ for (size_t value = 1; value != 20; ++value) { - AMresultFree(AMmapPutUint(test_state->doc1, AM_ROOT, "n1", value)); + AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "n1", value)); AMcommit(test_state->doc1, NULL, &time); - AMresultFree(AMmapPutUint(test_state->doc2, AM_ROOT, "n2", value)); + AMfree(AMmapPutUint(test_state->doc2, AM_ROOT, "n2", value)); AMcommit(test_state->doc2, NULL, &time); - AMresult* changes_result1 = AMgetLastLocalChange(test_state->doc1); - AMchanges changes1 = AMresultValue(changes_result1, 0).changes; - AMresult* changes_result2 = AMgetLastLocalChange(test_state->doc2); - AMchanges changes2 = AMresultValue(changes_result2, 0).changes; - AMresultFree(AMapplyChanges(test_state->doc1, &changes2)); - AMresultFree(changes_result2); - AMresultFree(AMapplyChanges(test_state->doc2, &changes1)); - AMresultFree(changes_result1); + AMresult* changes1_result = AMgetLastLocalChange(test_state->doc1); + AMchanges changes1 = AMresultValue(changes1_result, 0).changes; + AMresult* changes2_result = AMgetLastLocalChange(test_state->doc2); + AMchanges changes2 = AMresultValue(changes2_result, 0).changes; + AMfree(AMapplyChanges(test_state->doc1, &changes2)); + AMfree(changes2_result); + AMfree(AMapplyChanges(test_state->doc2, &changes1)); + AMfree(changes1_result); } sync(test_state->doc1, @@ -959,29 +974,29 @@ static void test_diverged_handles_histories_of_branching_and_merging(void **stat /* Having n3's last change concurrent to the last sync heads forces us into * the slower code path. */ - AMresult* changes_result3 = AMgetLastLocalChange(doc3); - AMchanges changes3 = AMresultValue(changes_result3, 0).changes; - AMresultFree(AMapplyChanges(test_state->doc2, &changes3)); - AMresultFree(changes_result3); - AMresultFree(AMmapPutStr(test_state->doc1, AM_ROOT, "n1", "final")); + AMresult* changes3_result = AMgetLastLocalChange(doc3); + AMchanges changes3 = AMresultValue(changes3_result, 0).changes; + AMfree(AMapplyChanges(test_state->doc2, &changes3)); + AMfree(changes3_result); + AMfree(AMmapPutStr(test_state->doc1, AM_ROOT, "n1", "final")); AMcommit(test_state->doc1, NULL, &time); - AMresultFree(AMmapPutStr(test_state->doc2, AM_ROOT, "n2", "final")); + AMfree(AMmapPutStr(test_state->doc2, AM_ROOT, "n2", "final")); AMcommit(test_state->doc2, NULL, &time); sync(test_state->doc1, test_state->doc2, test_state->sync_state1, test_state->sync_state2); - AMresult* heads_result1 = AMgetHeads(test_state->doc1); - AMchangeHashes heads1 = AMresultValue(heads_result1, 0).change_hashes; - AMresult* heads_result2 = AMgetHeads(test_state->doc2); - AMchangeHashes heads2 = AMresultValue(heads_result2, 0).change_hashes; + AMresult* heads1_result = AMgetHeads(test_state->doc1); + AMchangeHashes heads1 = AMresultValue(heads1_result, 0).change_hashes; + AMresult* heads2_result = AMgetHeads(test_state->doc2); + AMchangeHashes heads2 = AMresultValue(heads2_result, 0).change_hashes; assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); - AMresultFree(heads_result2); - AMresultFree(heads_result1); + AMfree(heads2_result); + AMfree(heads1_result); assert_true(AMequal(test_state->doc1, test_state->doc2)); - AMfree(doc3); + AMfree(doc3_result); } int run_sync_tests(void) { From afddf7d508712937110e6595f589ed59a8dd2442 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Wed, 1 Jun 2022 23:34:28 -0700 Subject: [PATCH 432/730] Fix "fmt" script violations. Fix "lint" script violations. --- automerge-c/src/result.rs | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/automerge-c/src/result.rs b/automerge-c/src/result.rs index 0624a692..af061d83 100644 --- a/automerge-c/src/result.rs +++ b/automerge-c/src/result.rs @@ -107,7 +107,7 @@ pub enum AMresult { ActorId(am::ActorId), ChangeHashes(Vec), Changes(Vec, BTreeMap), - Doc(AMdoc), + Doc(Box), Error(CString), ObjId(AMobjId), Scalars(Vec>, Option), @@ -124,7 +124,7 @@ impl AMresult { impl From for AMresult { fn from(auto_commit: am::AutoCommit) -> Self { - AMresult::Doc(AMdoc::new(auto_commit)) + AMresult::Doc(Box::new(AMdoc::new(auto_commit))) } } @@ -178,7 +178,7 @@ impl From> for AMresult { impl From> for AMresult { fn from(maybe: Result) -> Self { match maybe { - Ok(auto_commit) => AMresult::Doc(AMdoc::new(auto_commit)), + Ok(auto_commit) => AMresult::Doc(Box::new(AMdoc::new(auto_commit))), Err(e) => AMresult::err(&e.to_string()), } } @@ -376,7 +376,11 @@ pub unsafe extern "C" fn AMresultSize(result: *mut AMresult) -> usize { if let Some(result) = result.as_mut() { match result { AMresult::Error(_) | AMresult::Void => 0, - AMresult::ActorId(_) | AMresult::Doc(_) | AMresult::ObjId(_) | AMresult::SyncMessage(_) | AMresult::SyncState(_) => 1, + AMresult::ActorId(_) + | AMresult::Doc(_) + | AMresult::ObjId(_) + | AMresult::SyncMessage(_) + | AMresult::SyncState(_) => 1, AMresult::ChangeHashes(change_hashes) => change_hashes.len(), AMresult::Changes(changes, _) => changes.len(), AMresult::Scalars(vec, _) => vec.len(), @@ -435,7 +439,7 @@ pub unsafe extern "C" fn AMresultValue<'a>(result: *mut AMresult, index: usize) } AMresult::Doc(doc) => { if index == 0 { - value = AMvalue::Doc(doc) + value = AMvalue::Doc(&mut **doc) } } AMresult::Error(_) => {} From b38be0750be1ed45c5c3dc7b6e945bdff300c5e9 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Sat, 4 Jun 2022 18:51:57 -0700 Subject: [PATCH 433/730] Obfuscated most implementation details of the `AMChangeHashes` struct. Added `AMchangeHashesReverse()`. --- automerge-c/src/change_hashes.rs | 123 +++++++++++++++++++++++-------- 1 file changed, 94 insertions(+), 29 deletions(-) diff --git a/automerge-c/src/change_hashes.rs b/automerge-c/src/change_hashes.rs index d1ea71ff..b5daf5d8 100644 --- a/automerge-c/src/change_hashes.rs +++ b/automerge-c/src/change_hashes.rs @@ -1,81 +1,125 @@ use automerge as am; use std::cmp::Ordering; use std::ffi::c_void; +use std::mem::size_of; use crate::byte_span::AMbyteSpan; +#[repr(C)] +struct Detail { + len: usize, + offset: isize, +} + +/// \note cbindgen won't propagate the value of a `std::mem::size_of()` call +/// (https://github.com/eqrion/cbindgen/issues/252) but it will +/// propagate the name of a constant initialized from it so if the +/// constant's name is a symbolic representation of the value it can be +/// converted into a number by post-processing the header it generated. +pub const USIZE_USIZE_: usize = size_of::(); + +impl Detail { + fn new(len: usize, offset: isize) -> Self { + Self { len, offset } + } +} + +impl From for [u8; USIZE_USIZE_] { + fn from(detail: Detail) -> Self { + unsafe { + std::slice::from_raw_parts((&detail as *const Detail) as *const u8, USIZE_USIZE_) + .try_into() + .unwrap() + } + } +} + /// \struct AMchangeHashes /// \brief A bidirectional iterator over a sequence of change hashes. #[repr(C)] pub struct AMchangeHashes { - /// The length of the sequence. - len: usize, - /// The offset from \p ptr, \p +offset -> forward direction, - /// \p -offset -> reverse direction. - offset: isize, /// A pointer to the first change hash or `NULL`. ptr: *const c_void, + /// Reserved. + detail: [u8; USIZE_USIZE_], } impl AMchangeHashes { pub fn new(change_hashes: &[am::ChangeHash]) -> Self { Self { - len: change_hashes.len(), - offset: 0, ptr: change_hashes.as_ptr() as *const c_void, + detail: Detail::new(change_hashes.len(), 0).into(), } } pub fn advance(&mut self, n: isize) { - let len = self.len as isize; - if n != 0 && self.offset >= -len && self.offset < len { - // It's being advanced and it's hasn't stopped. - self.offset = std::cmp::max(-(len + 1), std::cmp::min(self.offset + n, len)); + let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; + let len = detail.len as isize; + if n != 0 && detail.offset >= -len && detail.offset < len { + // It's being advanced and it hasn't stopped. + detail.offset = std::cmp::max(-(len + 1), std::cmp::min(detail.offset + n, len)); }; } + pub fn len(&self) -> usize { + let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; + detail.len + } + pub fn next(&mut self, n: isize) -> Option<&am::ChangeHash> { - let len = self.len as isize; - if self.offset < -len || self.offset == len { + let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; + let len = detail.len as isize; + if detail.offset < -len || detail.offset == len { // It's stopped. None } else { - let slice = - unsafe { std::slice::from_raw_parts(self.ptr as *const am::ChangeHash, self.len) }; - let index = (self.offset + if self.offset < 0 { len } else { 0 }) as usize; - let element = Some(&slice[index]); + let slice: &[am::ChangeHash] = unsafe { + std::slice::from_raw_parts(self.ptr as *const am::ChangeHash, detail.len) + }; + let index = (detail.offset + if detail.offset < 0 { len } else { 0 }) as usize; + let value = &slice[index]; self.advance(n); - element + Some(value) } } pub fn prev(&mut self, n: isize) -> Option<&am::ChangeHash> { self.advance(n); - let len = self.len as isize; - if self.offset < -len || self.offset == len { + let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; + let len = detail.len as isize; + if detail.offset < -len || detail.offset == len { // It's stopped. None } else { - let slice = - unsafe { std::slice::from_raw_parts(self.ptr as *const am::ChangeHash, self.len) }; - let index = (self.offset + if self.offset < 0 { len } else { 0 }) as usize; + let slice: &[am::ChangeHash] = unsafe { + std::slice::from_raw_parts(self.ptr as *const am::ChangeHash, detail.len) + }; + let index = (detail.offset + if detail.offset < 0 { len } else { 0 }) as usize; Some(&slice[index]) } } + + pub fn reverse(&self) -> Self { + let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; + Self { + ptr: self.ptr, + detail: Detail::new(detail.len, -(detail.offset + 1)).into(), + } + } } impl AsRef<[am::ChangeHash]> for AMchangeHashes { fn as_ref(&self) -> &[am::ChangeHash] { - unsafe { std::slice::from_raw_parts(self.ptr as *const am::ChangeHash, self.len) } + let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; + unsafe { std::slice::from_raw_parts(self.ptr as *const am::ChangeHash, detail.len) } } } impl Default for AMchangeHashes { fn default() -> Self { Self { - len: 0, - offset: 0, ptr: std::ptr::null(), + detail: [0; USIZE_USIZE_], } } } @@ -170,8 +214,8 @@ pub unsafe extern "C" fn AMchangeHashesNext( /// \param[in] change_hashes A pointer to an `AMchangeHashes` struct. /// \param[in] n The direction (\p -n -> backward, \p +n -> forward) and maximum /// number of positions to advance/rewind. -/// \return An `AMbyteSpan` struct that's null when \p change_hashes is -/// presently advanced/rewound past its forward/backward limit. +/// \return An `AMbyteSpan` struct with `.src == NULL` when \p change_hashes +/// is presently advanced/rewound past its forward/backward limit. /// \pre \p change_hashes must be a valid address. /// \internal /// @@ -204,8 +248,29 @@ pub unsafe extern "C" fn AMchangeHashesPrev( #[no_mangle] pub unsafe extern "C" fn AMchangeHashesSize(change_hashes: *const AMchangeHashes) -> usize { if let Some(change_hashes) = change_hashes.as_ref() { - change_hashes.len + change_hashes.len() } else { 0 } } + +/// \memberof AMchangeHashes +/// \brief Creates a reversed copy of a change hashes iterator. +/// +/// \param[in] change_hashes A pointer to an `AMchangeHashes` struct. +/// \return An `AMchangeHashes` struct +/// \pre \p change_hashes must be a valid address. +/// \internal +/// +/// #Safety +/// change_hashes must be a pointer to a valid AMchangeHashes +#[no_mangle] +pub unsafe extern "C" fn AMchangeHashesReverse( + change_hashes: *const AMchangeHashes, +) -> AMchangeHashes { + if let Some(change_hashes) = change_hashes.as_ref() { + change_hashes.reverse() + } else { + AMchangeHashes::default() + } +} From 1990f29c607d599fc09f1dc22bf0181899d3f3d7 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Sat, 4 Jun 2022 19:13:22 -0700 Subject: [PATCH 434/730] Obfuscated most implementation details of the `AMChanges` struct. Added `AMchangesReverse()`. --- automerge-c/src/changes.rs | 119 ++++++++++++++++++++++++++++--------- 1 file changed, 91 insertions(+), 28 deletions(-) diff --git a/automerge-c/src/changes.rs b/automerge-c/src/changes.rs index e7b5e9ff..bdc5e9bb 100644 --- a/automerge-c/src/changes.rs +++ b/automerge-c/src/changes.rs @@ -1,53 +1,88 @@ use automerge as am; use std::collections::BTreeMap; use std::ffi::c_void; +use std::mem::size_of; use crate::change::AMchange; +#[repr(C)] +struct Detail { + len: usize, + offset: isize, + storage: *mut c_void, +} + +/// \note cbindgen won't propagate the value of a `std::mem::size_of()` call +/// (https://github.com/eqrion/cbindgen/issues/252) but it will +/// propagate the name of a constant initialized from it so if the +/// constant's name is a symbolic representation of the value it can be +/// converted into a number by post-processing the header it generated. +pub const USIZE_USIZE_USIZE_: usize = size_of::(); + +impl Detail { + fn new(len: usize, offset: isize, storage: &mut BTreeMap) -> Self { + let storage: *mut BTreeMap = storage; + Self { + len, + offset, + storage: storage as *mut c_void, + } + } +} + +impl From for [u8; USIZE_USIZE_USIZE_] { + fn from(detail: Detail) -> Self { + unsafe { + std::slice::from_raw_parts((&detail as *const Detail) as *const u8, USIZE_USIZE_USIZE_) + .try_into() + .unwrap() + } + } +} + /// \struct AMchanges /// \brief A bidirectional iterator over a sequence of changes. #[repr(C)] pub struct AMchanges { - /// The length of the sequence. - len: usize, - /// The offset from \p ptr, \p +offset -> forward direction, - /// \p -offset -> reverse direction. - offset: isize, /// A pointer to the first change or `NULL`. ptr: *const c_void, /// Reserved. - storage: *mut c_void, + detail: [u8; USIZE_USIZE_USIZE_], } impl AMchanges { pub fn new(changes: &[am::Change], storage: &mut BTreeMap) -> Self { - let storage: *mut BTreeMap = storage; Self { - len: changes.len(), - offset: 0, ptr: changes.as_ptr() as *const c_void, - storage: storage as *mut c_void, + detail: Detail::new(changes.len(), 0, storage).into(), } } pub fn advance(&mut self, n: isize) { - let len = self.len as isize; - if n != 0 && self.offset >= -len && self.offset < len { + let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; + let len = detail.len as isize; + if n != 0 && detail.offset >= -len && detail.offset < len { // It's being advanced and it hasn't stopped. - self.offset = std::cmp::max(-(len + 1), std::cmp::min(self.offset + n, len)); + detail.offset = std::cmp::max(-(len + 1), std::cmp::min(detail.offset + n, len)); }; } + pub fn len(&self) -> usize { + let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; + detail.len + } + pub fn next(&mut self, n: isize) -> Option<*const AMchange> { - let len = self.len as isize; - if self.offset < -len || self.offset == len { + let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; + let len = detail.len as isize; + if detail.offset < -len || detail.offset == len { // It's stopped. None } else { let slice: &mut [am::Change] = - unsafe { std::slice::from_raw_parts_mut(self.ptr as *mut am::Change, self.len) }; - let index = (self.offset + if self.offset < 0 { len } else { 0 }) as usize; - let storage = unsafe { &mut *(self.storage as *mut BTreeMap) }; + unsafe { std::slice::from_raw_parts_mut(self.ptr as *mut am::Change, detail.len) }; + let index = (detail.offset + if detail.offset < 0 { len } else { 0 }) as usize; + let storage = unsafe { &mut *(detail.storage as *mut BTreeMap) }; let value = match storage.get_mut(&index) { Some(value) => value, None => { @@ -62,15 +97,16 @@ impl AMchanges { pub fn prev(&mut self, n: isize) -> Option<*const AMchange> { self.advance(n); - let len = self.len as isize; - if self.offset < -len || self.offset == len { + let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; + let len = detail.len as isize; + if detail.offset < -len || detail.offset == len { // It's stopped. None } else { let slice: &mut [am::Change] = - unsafe { std::slice::from_raw_parts_mut(self.ptr as *mut am::Change, self.len) }; - let index = (self.offset + if self.offset < 0 { len } else { 0 }) as usize; - let storage = unsafe { &mut *(self.storage as *mut BTreeMap) }; + unsafe { std::slice::from_raw_parts_mut(self.ptr as *mut am::Change, detail.len) }; + let index = (detail.offset + if detail.offset < 0 { len } else { 0 }) as usize; + let storage = unsafe { &mut *(detail.storage as *mut BTreeMap) }; Some(match storage.get_mut(&index) { Some(value) => value, None => { @@ -80,21 +116,29 @@ impl AMchanges { }) } } + + pub fn reverse(&self) -> Self { + let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; + let storage = unsafe { &mut *(detail.storage as *mut BTreeMap) }; + Self { + ptr: self.ptr, + detail: Detail::new(detail.len, -(detail.offset + 1), storage).into(), + } + } } impl AsRef<[am::Change]> for AMchanges { fn as_ref(&self) -> &[am::Change] { - unsafe { std::slice::from_raw_parts(self.ptr as *const am::Change, self.len) } + let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; + unsafe { std::slice::from_raw_parts(self.ptr as *const am::Change, detail.len) } } } impl Default for AMchanges { fn default() -> Self { Self { - len: 0, - offset: 0, ptr: std::ptr::null(), - storage: std::ptr::null_mut(), + detail: [0; USIZE_USIZE_USIZE_], } } } @@ -205,8 +249,27 @@ pub unsafe extern "C" fn AMchangesPrev(changes: *mut AMchanges, n: isize) -> *co #[no_mangle] pub unsafe extern "C" fn AMchangesSize(changes: *const AMchanges) -> usize { if let Some(changes) = changes.as_ref() { - changes.len + changes.len() } else { 0 } } + +/// \memberof AMchanges +/// \brief Creates a reversed copy of a changes iterator. +/// +/// \param[in] changes A pointer to an `AMchanges` struct. +/// \return An `AMchanges` struct +/// \pre \p changes must be a valid address. +/// \internal +/// +/// #Safety +/// changes must be a pointer to a valid AMchanges +#[no_mangle] +pub unsafe extern "C" fn AMchangesReverse(changes: *const AMchanges) -> AMchanges { + if let Some(changes) = changes.as_ref() { + changes.reverse() + } else { + AMchanges::default() + } +} From 92b121610122a1759ca80883508398ca3d824fe1 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Sat, 4 Jun 2022 19:14:31 -0700 Subject: [PATCH 435/730] Obfuscated most implementation details of the `AMsyncHaves` struct. Added `AMsyncHavesReverse()`. --- automerge-c/src/sync/haves.rs | 131 +++++++++++++++++++++++++--------- 1 file changed, 98 insertions(+), 33 deletions(-) diff --git a/automerge-c/src/sync/haves.rs b/automerge-c/src/sync/haves.rs index 5cfef5ec..619f92a5 100644 --- a/automerge-c/src/sync/haves.rs +++ b/automerge-c/src/sync/haves.rs @@ -1,53 +1,89 @@ use automerge as am; use std::collections::BTreeMap; use std::ffi::c_void; +use std::mem::size_of; use crate::sync::have::AMsyncHave; +#[repr(C)] +struct Detail { + len: usize, + offset: isize, + storage: *mut c_void, +} + +/// \note cbindgen won't propagate the value of a `std::mem::size_of()` call +/// (https://github.com/eqrion/cbindgen/issues/252) but it will +/// propagate the name of a constant initialized from it so if the +/// constant's name is a symbolic representation of the value it can be +/// converted into a number by post-processing the header it generated. +pub const USIZE_USIZE_USIZE_: usize = size_of::(); + +impl Detail { + fn new(len: usize, offset: isize, storage: &mut BTreeMap) -> Self { + let storage: *mut BTreeMap = storage; + Self { + len, + offset, + storage: storage as *mut c_void, + } + } +} + +impl From for [u8; USIZE_USIZE_USIZE_] { + fn from(detail: Detail) -> Self { + unsafe { + std::slice::from_raw_parts((&detail as *const Detail) as *const u8, USIZE_USIZE_USIZE_) + .try_into() + .unwrap() + } + } +} + /// \struct AMsyncHaves /// \brief A bidirectional iterator over a sequence of synchronization haves. #[repr(C)] pub struct AMsyncHaves { - /// The length of the sequence. - len: usize, - /// The offset from \p ptr, \p +offset -> forward direction, - /// \p -offset -> reverse direction. - offset: isize, /// A pointer to the first synchronization have or `NULL`. ptr: *const c_void, /// Reserved. - storage: *mut c_void, + detail: [u8; USIZE_USIZE_USIZE_], } impl AMsyncHaves { - pub fn new(sync_haves: &[am::sync::Have], storage: &mut BTreeMap) -> Self { - let storage: *mut BTreeMap = storage; + pub fn new(haves: &[am::sync::Have], storage: &mut BTreeMap) -> Self { Self { - len: sync_haves.len(), - offset: 0, - ptr: sync_haves.as_ptr() as *const c_void, - storage: storage as *mut c_void, + ptr: haves.as_ptr() as *const c_void, + detail: Detail::new(haves.len(), 0, storage).into(), } } pub fn advance(&mut self, n: isize) { - let len = self.len as isize; - if n != 0 && self.offset >= -len && self.offset < len { - // It's being advanced and its hasn't stopped. - self.offset = std::cmp::max(-(len + 1), std::cmp::min(self.offset + n, len)); + let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; + let len = detail.len as isize; + if n != 0 && detail.offset >= -len && detail.offset < len { + // It's being advanced and it hasn't stopped. + detail.offset = std::cmp::max(-(len + 1), std::cmp::min(detail.offset + n, len)); }; } + pub fn len(&self) -> usize { + let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; + detail.len + } + pub fn next(&mut self, n: isize) -> Option<*const AMsyncHave> { - let len = self.len as isize; - if self.offset < -len || self.offset == len { + let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; + let len = detail.len as isize; + if detail.offset < -len || detail.offset == len { // It's stopped. None } else { - let slice: &[am::sync::Have] = - unsafe { std::slice::from_raw_parts(self.ptr as *const am::sync::Have, self.len) }; - let index = (self.offset + if self.offset < 0 { len } else { 0 }) as usize; - let storage = unsafe { &mut *(self.storage as *mut BTreeMap) }; + let slice: &[am::sync::Have] = unsafe { + std::slice::from_raw_parts(self.ptr as *const am::sync::Have, detail.len) + }; + let index = (detail.offset + if detail.offset < 0 { len } else { 0 }) as usize; + let storage = unsafe { &mut *(detail.storage as *mut BTreeMap) }; let value = match storage.get_mut(&index) { Some(value) => value, None => { @@ -62,15 +98,17 @@ impl AMsyncHaves { pub fn prev(&mut self, n: isize) -> Option<*const AMsyncHave> { self.advance(n); - let len = self.len as isize; - if self.offset < -len || self.offset == len { + let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; + let len = detail.len as isize; + if detail.offset < -len || detail.offset == len { // It's stopped. None } else { - let slice: &[am::sync::Have] = - unsafe { std::slice::from_raw_parts(self.ptr as *const am::sync::Have, self.len) }; - let index = (self.offset + if self.offset < 0 { len } else { 0 }) as usize; - let storage = unsafe { &mut *(self.storage as *mut BTreeMap) }; + let slice: &[am::sync::Have] = unsafe { + std::slice::from_raw_parts(self.ptr as *const am::sync::Have, detail.len) + }; + let index = (detail.offset + if detail.offset < 0 { len } else { 0 }) as usize; + let storage = unsafe { &mut *(detail.storage as *mut BTreeMap) }; Some(match storage.get_mut(&index) { Some(value) => value, None => { @@ -80,21 +118,29 @@ impl AMsyncHaves { }) } } + + pub fn reverse(&self) -> Self { + let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; + let storage = unsafe { &mut *(detail.storage as *mut BTreeMap) }; + Self { + ptr: self.ptr, + detail: Detail::new(detail.len, -(detail.offset + 1), storage).into(), + } + } } impl AsRef<[am::sync::Have]> for AMsyncHaves { fn as_ref(&self) -> &[am::sync::Have] { - unsafe { std::slice::from_raw_parts(self.ptr as *const am::sync::Have, self.len) } + let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; + unsafe { std::slice::from_raw_parts(self.ptr as *const am::sync::Have, detail.len) } } } impl Default for AMsyncHaves { fn default() -> Self { Self { - len: 0, - offset: 0, ptr: std::ptr::null(), - storage: std::ptr::null_mut(), + detail: [0; USIZE_USIZE_USIZE_], } } } @@ -215,8 +261,27 @@ pub unsafe extern "C" fn AMsyncHavesPrev( #[no_mangle] pub unsafe extern "C" fn AMsyncHavesSize(sync_haves: *const AMsyncHaves) -> usize { if let Some(sync_haves) = sync_haves.as_ref() { - sync_haves.len + sync_haves.len() } else { 0 } } + +/// \memberof AMsyncHaves +/// \brief Creates a reversed copy of a synchronization haves iterator. +/// +/// \param[in] sync_haves A pointer to an `AMsyncHaves` struct. +/// \return An `AMsyncHaves` struct +/// \pre \p sync_haves must be a valid address. +/// \internal +/// +/// #Safety +/// sync_haves must be a pointer to a valid AMsyncHaves +#[no_mangle] +pub unsafe extern "C" fn AMsyncHavesReverse(sync_haves: *const AMsyncHaves) -> AMsyncHaves { + if let Some(sync_haves) = sync_haves.as_ref() { + sync_haves.reverse() + } else { + AMsyncHaves::default() + } +} From d4d1b64cf48da33b53a9e603c00fdc4d806e149a Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Sat, 4 Jun 2022 19:18:47 -0700 Subject: [PATCH 436/730] Compensate for cbindgen issue #252. --- automerge-c/src/CMakeLists.txt | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/automerge-c/src/CMakeLists.txt b/automerge-c/src/CMakeLists.txt index 4ecd9f1c..2e6a5658 100644 --- a/automerge-c/src/CMakeLists.txt +++ b/automerge-c/src/CMakeLists.txt @@ -39,7 +39,8 @@ if(WIN32) endif() add_custom_command( - OUTPUT ${CARGO_OUTPUT} + OUTPUT + ${CARGO_OUTPUT} COMMAND # \note cbindgen won't regenerate its output header file after it's # been removed but it will after its configuration file has been @@ -80,7 +81,8 @@ add_custom_target( DEPENDS ${CARGO_OUTPUT} ) -# \note cbindgen's naming behavior isn't fully configurable. +# \note cbindgen's naming behavior isn't fully configurable and it ignores +# `const fn` calls (https://github.com/eqrion/cbindgen/issues/252). add_custom_command( TARGET ${LIBRARY_NAME}_artifacts POST_BUILD @@ -93,10 +95,13 @@ add_custom_command( COMMAND # Compensate for cbindgen's translation of consecutive uppercase letters to "ScreamingSnakeCase". ${CMAKE_COMMAND} -DMATCH_REGEX=A_M\([^_]+\)_ -DREPLACE_EXPR=AM_\\1_ -P ${CMAKE_SOURCE_DIR}/cmake/file_regex_replace.cmake -- ${CARGO_TARGET_DIR}/${LIBRARY_NAME}.h + COMMAND + # Compensate for cbindgen ignoring `std:mem::size_of()` calls. + ${CMAKE_COMMAND} -DMATCH_REGEX=USIZE_ -DREPLACE_EXPR=\+${CMAKE_SIZEOF_VOID_P} -P ${CMAKE_SOURCE_DIR}/cmake/file_regex_replace.cmake -- ${CARGO_TARGET_DIR}/${LIBRARY_NAME}.h WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} COMMENT - "Compensating for hard-coded cbindgen naming behaviors..." + "Compensating for cbindgen deficits..." VERBATIM ) From 31fe8dbb3626def95858bf74dc335eaa8ef93655 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Sat, 4 Jun 2022 22:24:02 -0700 Subject: [PATCH 437/730] Renamed the `AMresult::Scalars` variant to `AMresult::Value`. Removed the `Vec` wrapping the 0th field of an `AMresult::Value`. Removed the `index` argument from `AMresultValue()`. --- automerge-c/src/result.rs | 124 +++++++++++++++++--------------------- 1 file changed, 54 insertions(+), 70 deletions(-) diff --git a/automerge-c/src/result.rs b/automerge-c/src/result.rs index af061d83..6ef2d015 100644 --- a/automerge-c/src/result.rs +++ b/automerge-c/src/result.rs @@ -110,7 +110,7 @@ pub enum AMresult { Doc(Box), Error(CString), ObjId(AMobjId), - Scalars(Vec>, Option), + Value(am::Value<'static>, Option), SyncMessage(AMsyncMessage), SyncState(AMsyncState), Void, @@ -223,7 +223,7 @@ impl From> for AMresult { impl From, am::AutomergeError>> for AMresult { fn from(maybe: Result, am::AutomergeError>) -> Self { match maybe { - Ok(value) => AMresult::Scalars(vec![value], None), + Ok(value) => AMresult::Value(value, None), Err(e) => AMresult::err(&e.to_string()), } } @@ -233,7 +233,7 @@ impl From, am::ObjId)>, am::AutomergeError>> f fn from(maybe: Result, am::ObjId)>, am::AutomergeError>) -> Self { match maybe { // \todo Ensure that it's alright to ignore the `am::ObjId` value. - Ok(Some((value, _))) => AMresult::Scalars(vec![value], None), + Ok(Some((value, _))) => AMresult::Value(value, None), Ok(None) => AMresult::Void, Err(e) => AMresult::err(&e.to_string()), } @@ -243,7 +243,7 @@ impl From, am::ObjId)>, am::AutomergeError>> f impl From> for AMresult { fn from(maybe: Result) -> Self { match maybe { - Ok(size) => AMresult::Scalars(vec![am::Value::uint(size as u64)], None), + Ok(size) => AMresult::Value(am::Value::uint(size as u64), None), Err(e) => AMresult::err(&e.to_string()), } } @@ -283,7 +283,7 @@ impl From, am::AutomergeError>> for AMresult { impl From, am::AutomergeError>> for AMresult { fn from(maybe: Result, am::AutomergeError>) -> Self { match maybe { - Ok(bytes) => AMresult::Scalars(vec![am::Value::bytes(bytes)], None), + Ok(bytes) => AMresult::Value(am::Value::bytes(bytes), None), Err(e) => AMresult::err(&e.to_string()), } } @@ -291,7 +291,7 @@ impl From, am::AutomergeError>> for AMresult { impl From> for AMresult { fn from(bytes: Vec) -> Self { - AMresult::Scalars(vec![am::Value::bytes(bytes)], None) + AMresult::Value(am::Value::bytes(bytes), None) } } @@ -380,10 +380,10 @@ pub unsafe extern "C" fn AMresultSize(result: *mut AMresult) -> usize { | AMresult::Doc(_) | AMresult::ObjId(_) | AMresult::SyncMessage(_) - | AMresult::SyncState(_) => 1, + | AMresult::SyncState(_) + | AMresult::Value(_, _) => 1, AMresult::ChangeHashes(change_hashes) => change_hashes.len(), AMresult::Changes(changes, _) => changes.len(), - AMresult::Scalars(vec, _) => vec.len(), } } else { 0 @@ -413,94 +413,78 @@ pub unsafe extern "C" fn AMresultStatus(result: *mut AMresult) -> AMstatus { /// \brief Gets a result's value. /// /// \param[in] result A pointer to an `AMresult` struct. -/// \param[in] index The index of a value. /// \return An `AMvalue` struct. /// \pre \p result must be a valid address. -/// \pre `0 <=` \p index `<=` AMresultSize() for \p result. /// \internal /// /// # Safety /// result must be a pointer to a valid AMresult #[no_mangle] -pub unsafe extern "C" fn AMresultValue<'a>(result: *mut AMresult, index: usize) -> AMvalue<'a> { - let mut value = AMvalue::Void; +pub unsafe extern "C" fn AMresultValue<'a>(result: *mut AMresult) -> AMvalue<'a> { + let mut content = AMvalue::Void; if let Some(result) = result.as_mut() { match result { AMresult::ActorId(actor_id) => { - if index == 0 { - value = AMvalue::ActorId(actor_id.into()); - } + content = AMvalue::ActorId(actor_id.into()); } AMresult::ChangeHashes(change_hashes) => { - value = AMvalue::ChangeHashes(AMchangeHashes::new(change_hashes)); + content = AMvalue::ChangeHashes(AMchangeHashes::new(change_hashes)); } AMresult::Changes(changes, storage) => { - value = AMvalue::Changes(AMchanges::new(changes, storage)); - } - AMresult::Doc(doc) => { - if index == 0 { - value = AMvalue::Doc(&mut **doc) - } + content = AMvalue::Changes(AMchanges::new(changes, storage)); } + AMresult::Doc(doc) => content = AMvalue::Doc(&mut **doc), AMresult::Error(_) => {} AMresult::ObjId(obj_id) => { - if index == 0 { - value = AMvalue::ObjId(obj_id); - } + content = AMvalue::ObjId(obj_id); } - AMresult::Scalars(vec, hosted_str) => { - if let Some(element) = vec.get(index) { - match element { - am::Value::Scalar(scalar) => match scalar.as_ref() { - am::ScalarValue::Boolean(flag) => { - value = AMvalue::Boolean(*flag); + AMresult::Value(value, hosted_str) => { + match value { + am::Value::Scalar(scalar) => match scalar.as_ref() { + am::ScalarValue::Boolean(flag) => { + content = AMvalue::Boolean(*flag); + } + am::ScalarValue::Bytes(bytes) => { + content = AMvalue::Bytes(bytes.as_slice().into()); + } + am::ScalarValue::Counter(counter) => { + content = AMvalue::Counter(counter.into()); + } + am::ScalarValue::F64(float) => { + content = AMvalue::F64(*float); + } + am::ScalarValue::Int(int) => { + content = AMvalue::Int(*int); + } + am::ScalarValue::Null => { + content = AMvalue::Null; + } + am::ScalarValue::Str(smol_str) => { + *hosted_str = CString::new(smol_str.to_string()).ok(); + if let Some(c_str) = hosted_str { + content = AMvalue::Str(c_str.as_ptr()); } - am::ScalarValue::Bytes(bytes) => { - value = AMvalue::Bytes(bytes.as_slice().into()); - } - am::ScalarValue::Counter(counter) => { - value = AMvalue::Counter(counter.into()); - } - am::ScalarValue::F64(float) => { - value = AMvalue::F64(*float); - } - am::ScalarValue::Int(int) => { - value = AMvalue::Int(*int); - } - am::ScalarValue::Null => { - value = AMvalue::Null; - } - am::ScalarValue::Str(smol_str) => { - *hosted_str = CString::new(smol_str.to_string()).ok(); - if let Some(c_str) = hosted_str { - value = AMvalue::Str(c_str.as_ptr()); - } - } - am::ScalarValue::Timestamp(timestamp) => { - value = AMvalue::Timestamp(*timestamp); - } - am::ScalarValue::Uint(uint) => { - value = AMvalue::Uint(*uint); - } - }, - // \todo Confirm that an object value should be ignored - // when there's no object ID variant. - am::Value::Object(_) => {} - } + } + am::ScalarValue::Timestamp(timestamp) => { + content = AMvalue::Timestamp(*timestamp); + } + am::ScalarValue::Uint(uint) => { + content = AMvalue::Uint(*uint); + } + }, + // \todo Confirm that an object variant should be ignored + // when there's no object ID variant. + am::Value::Object(_) => {} } } AMresult::SyncMessage(sync_message) => { - if index == 0 { - value = AMvalue::SyncMessage(sync_message); - } + content = AMvalue::SyncMessage(sync_message); } AMresult::SyncState(sync_state) => { - if index == 0 { - value = AMvalue::SyncState(sync_state); - } + content = AMvalue::SyncState(sync_state); } AMresult::Void => {} } }; - value + content } From 92f3efd6e0d0523483c84751b0be6253dc580e28 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Sat, 4 Jun 2022 22:31:15 -0700 Subject: [PATCH 438/730] Removed the `0` argument from `AMresultValue()` calls. --- automerge-c/examples/quickstart.c | 8 +- automerge-c/test/amdoc_property_tests.c | 8 +- automerge-c/test/amlistput_tests.c | 36 +++--- automerge-c/test/ammapput_tests.c | 24 ++-- automerge-c/test/group_state.c | 2 +- automerge-c/test/sync_tests.c | 160 ++++++++++++------------ 6 files changed, 119 insertions(+), 119 deletions(-) diff --git a/automerge-c/examples/quickstart.c b/automerge-c/examples/quickstart.c index b25b6172..cadf8137 100644 --- a/automerge-c/examples/quickstart.c +++ b/automerge-c/examples/quickstart.c @@ -10,7 +10,7 @@ AMvalue test(AMresult*, AMvalueVariant const); */ int main(int argc, char** argv) { AMresult* const doc1_result = AMcreate(); - AMdoc* const doc1 = AMresultValue(doc1_result, 0).doc; + AMdoc* const doc1 = AMresultValue(doc1_result).doc; if (doc1 == NULL) { fprintf(stderr, "`AMcreate()` failure."); exit(EXIT_FAILURE); @@ -42,7 +42,7 @@ int main(int argc, char** argv) { AMfree(result); AMresult* doc2_result = AMcreate(); - AMdoc* doc2 = AMresultValue(doc2_result, 0).doc; + AMdoc* doc2 = AMresultValue(doc2_result).doc; if (doc2 == NULL) { fprintf(stderr, "`AMcreate()` failure."); AMfree(card1_result); @@ -59,7 +59,7 @@ int main(int argc, char** argv) { value = test(save_result, AM_VALUE_BYTES); AMbyteSpan binary = value.bytes; doc2_result = AMload(binary.src, binary.count); - doc2 = AMresultValue(doc2_result, 0).doc; + doc2 = AMresultValue(doc2_result).doc; AMfree(save_result); if (doc2 == NULL) { fprintf(stderr, "`AMload()` failure."); @@ -129,7 +129,7 @@ AMvalue test(AMresult* result, AMvalueVariant const discriminant) { AMfree(result); exit(EXIT_FAILURE); } - AMvalue const value = AMresultValue(result, 0); + AMvalue const value = AMresultValue(result); if (value.tag != discriminant) { char const* label = NULL; switch (value.tag) { diff --git a/automerge-c/test/amdoc_property_tests.c b/automerge-c/test/amdoc_property_tests.c index 092a4b00..bcb2cdec 100644 --- a/automerge-c/test/amdoc_property_tests.c +++ b/automerge-c/test/amdoc_property_tests.c @@ -59,7 +59,7 @@ static void test_AMputActor(void **state) { fail_msg("%s", AMerrorMessage(res)); } assert_int_equal(AMresultSize(res), 0); - AMvalue value = AMresultValue(res, 0); + AMvalue value = AMresultValue(res); assert_int_equal(value.tag, AM_VALUE_VOID); AMfree(res); res = AMgetActor(group_state->doc); @@ -67,7 +67,7 @@ static void test_AMputActor(void **state) { fail_msg("%s", AMerrorMessage(res)); } assert_int_equal(AMresultSize(res), 1); - value = AMresultValue(res, 0); + value = AMresultValue(res); assert_int_equal(value.tag, AM_VALUE_ACTOR_ID); assert_int_equal(value.actor_id.count, test_state->actor_id_size); assert_memory_equal(value.actor_id.src, test_state->actor_id_bytes, value.actor_id.count); @@ -85,7 +85,7 @@ static void test_AMputActorHex(void **state) { fail_msg("%s", AMerrorMessage(res)); } assert_int_equal(AMresultSize(res), 0); - AMvalue value = AMresultValue(res, 0); + AMvalue value = AMresultValue(res); assert_int_equal(value.tag, AM_VALUE_VOID); AMfree(res); res = AMgetActorHex(group_state->doc); @@ -93,7 +93,7 @@ static void test_AMputActorHex(void **state) { fail_msg("%s", AMerrorMessage(res)); } assert_int_equal(AMresultSize(res), 1); - value = AMresultValue(res, 0); + value = AMresultValue(res); assert_int_equal(value.tag, AM_VALUE_STR); assert_int_equal(strlen(value.str), test_state->actor_id_size * 2); assert_string_equal(value.str, test_state->actor_id_str); diff --git a/automerge-c/test/amlistput_tests.c b/automerge-c/test/amlistput_tests.c index bca8b80c..bddc832a 100644 --- a/automerge-c/test/amlistput_tests.c +++ b/automerge-c/test/amlistput_tests.c @@ -25,18 +25,18 @@ static void test_AMlistPut ## suffix ## _ ## mode(void **state) { \ fail_msg("%s", AMerrorMessage(res)); \ } \ assert_int_equal(AMresultSize(res), 0); \ - AMvalue value = AMresultValue(res, 0); \ + AMvalue value = AMresultValue(res); \ assert_int_equal(value.tag, AM_VALUE_VOID); \ - AMfree(res); \ + AMfree(res); \ res = AMlistGet(group_state->doc, AM_ROOT, 0); \ if (AMresultStatus(res) != AM_STATUS_OK) { \ fail_msg("%s", AMerrorMessage(res)); \ } \ assert_int_equal(AMresultSize(res), 1); \ - value = AMresultValue(res, 0); \ + value = AMresultValue(res); \ assert_int_equal(value.tag, AMvalue_discriminant(#suffix)); \ assert_true(value.member == scalar_value); \ - AMfree(res); \ + AMfree(res); \ } #define test_AMlistPutBytes(mode) test_AMlistPutBytes ## _ ## mode @@ -58,19 +58,19 @@ static void test_AMlistPutBytes_ ## mode(void **state) { \ fail_msg("%s", AMerrorMessage(res)); \ } \ assert_int_equal(AMresultSize(res), 0); \ - AMvalue value = AMresultValue(res, 0); \ + AMvalue value = AMresultValue(res); \ assert_int_equal(value.tag, AM_VALUE_VOID); \ - AMfree(res); \ + AMfree(res); \ res = AMlistGet(group_state->doc, AM_ROOT, 0); \ if (AMresultStatus(res) != AM_STATUS_OK) { \ fail_msg("%s", AMerrorMessage(res)); \ } \ assert_int_equal(AMresultSize(res), 1); \ - value = AMresultValue(res, 0); \ + value = AMresultValue(res); \ assert_int_equal(value.tag, AM_VALUE_BYTES); \ assert_int_equal(value.bytes.count, BYTES_SIZE); \ assert_memory_equal(value.bytes.src, bytes_value, BYTES_SIZE); \ - AMfree(res); \ + AMfree(res); \ } #define test_AMlistPutNull(mode) test_AMlistPutNull_ ## mode @@ -84,17 +84,17 @@ static void test_AMlistPutNull_ ## mode(void **state) { \ fail_msg("%s", AMerrorMessage(res)); \ } \ assert_int_equal(AMresultSize(res), 0); \ - AMvalue value = AMresultValue(res, 0); \ + AMvalue value = AMresultValue(res); \ assert_int_equal(value.tag, AM_VALUE_VOID); \ - AMfree(res); \ + AMfree(res); \ res = AMlistGet(group_state->doc, AM_ROOT, 0); \ if (AMresultStatus(res) != AM_STATUS_OK) { \ fail_msg("%s", AMerrorMessage(res)); \ } \ assert_int_equal(AMresultSize(res), 1); \ - value = AMresultValue(res, 0); \ + value = AMresultValue(res); \ assert_int_equal(value.tag, AM_VALUE_NULL); \ - AMfree(res); \ + AMfree(res); \ } #define test_AMlistPutObject(label, mode) test_AMlistPutObject_ ## label ## _ ## mode @@ -113,11 +113,11 @@ static void test_AMlistPutObject_ ## label ## _ ## mode(void **state) { \ fail_msg("%s", AMerrorMessage(res)); \ } \ assert_int_equal(AMresultSize(res), 1); \ - AMvalue value = AMresultValue(res, 0); \ + AMvalue value = AMresultValue(res); \ assert_int_equal(value.tag, AM_VALUE_OBJ_ID); \ assert_non_null(value.obj_id); \ assert_int_equal(AMobjSize(group_state->doc, value.obj_id), 0); \ - AMfree(res); \ + AMfree(res); \ } #define test_AMlistPutStr(mode) test_AMlistPutStr ## _ ## mode @@ -138,19 +138,19 @@ static void test_AMlistPutStr_ ## mode(void **state) { \ fail_msg("%s", AMerrorMessage(res)); \ } \ assert_int_equal(AMresultSize(res), 0); \ - AMvalue value = AMresultValue(res, 0); \ + AMvalue value = AMresultValue(res); \ assert_int_equal(value.tag, AM_VALUE_VOID); \ - AMfree(res); \ + AMfree(res); \ res = AMlistGet(group_state->doc, AM_ROOT, 0); \ if (AMresultStatus(res) != AM_STATUS_OK) { \ fail_msg("%s", AMerrorMessage(res)); \ } \ assert_int_equal(AMresultSize(res), 1); \ - value = AMresultValue(res, 0); \ + value = AMresultValue(res); \ assert_int_equal(value.tag, AM_VALUE_STR); \ assert_int_equal(strlen(value.str), STR_LEN); \ assert_memory_equal(value.str, str_value, STR_LEN + 1); \ - AMfree(res); \ + AMfree(res); \ } static_void_test_AMlistPut(Bool, insert, boolean, true) diff --git a/automerge-c/test/ammapput_tests.c b/automerge-c/test/ammapput_tests.c index c86809ed..280c8e5b 100644 --- a/automerge-c/test/ammapput_tests.c +++ b/automerge-c/test/ammapput_tests.c @@ -28,18 +28,18 @@ static void test_AMmapPut ## suffix(void **state) { \ fail_msg("%s", AMerrorMessage(res)); \ } \ assert_int_equal(AMresultSize(res), 0); \ - AMvalue value = AMresultValue(res, 0); \ + AMvalue value = AMresultValue(res); \ assert_int_equal(value.tag, AM_VALUE_VOID); \ - AMfree(res); \ + AMfree(res); \ res = AMmapGet(group_state->doc, AM_ROOT, #suffix); \ if (AMresultStatus(res) != AM_STATUS_OK) { \ fail_msg("%s", AMerrorMessage(res)); \ } \ assert_int_equal(AMresultSize(res), 1); \ - value = AMresultValue(res, 0); \ + value = AMresultValue(res); \ assert_int_equal(value.tag, AMvalue_discriminant(#suffix)); \ assert_true(value.member == scalar_value); \ - AMfree(res); \ + AMfree(res); \ } #define test_AMmapPutObject(label) test_AMmapPutObject_ ## label @@ -57,11 +57,11 @@ static void test_AMmapPutObject_ ## label(void **state) { \ fail_msg("%s", AMerrorMessage(res)); \ } \ assert_int_equal(AMresultSize(res), 1); \ - AMvalue value = AMresultValue(res, 0); \ + AMvalue value = AMresultValue(res); \ assert_int_equal(value.tag, AM_VALUE_OBJ_ID); \ assert_non_null(value.obj_id); \ assert_int_equal(AMobjSize(group_state->doc, value.obj_id), 0); \ - AMfree(res); \ + AMfree(res); \ } static_void_test_AMmapPut(Bool, boolean, true) @@ -83,7 +83,7 @@ static void test_AMmapPutBytes(void **state) { fail_msg("%s", AMerrorMessage(res)); } assert_int_equal(AMresultSize(res), 0); - AMvalue value = AMresultValue(res, 0); + AMvalue value = AMresultValue(res); assert_int_equal(value.tag, AM_VALUE_VOID); AMfree(res); res = AMmapGet(group_state->doc, AM_ROOT, KEY); @@ -91,7 +91,7 @@ static void test_AMmapPutBytes(void **state) { fail_msg("%s", AMerrorMessage(res)); } assert_int_equal(AMresultSize(res), 1); - value = AMresultValue(res, 0); + value = AMresultValue(res); assert_int_equal(value.tag, AM_VALUE_BYTES); assert_int_equal(value.bytes.count, BYTES_SIZE); assert_memory_equal(value.bytes.src, BYTES_VALUE, BYTES_SIZE); @@ -113,7 +113,7 @@ static void test_AMmapPutNull(void **state) { fail_msg("%s", AMerrorMessage(res)); } assert_int_equal(AMresultSize(res), 0); - AMvalue value = AMresultValue(res, 0); + AMvalue value = AMresultValue(res); assert_int_equal(value.tag, AM_VALUE_VOID); AMfree(res); res = AMmapGet(group_state->doc, AM_ROOT, KEY); @@ -121,7 +121,7 @@ static void test_AMmapPutNull(void **state) { fail_msg("%s", AMerrorMessage(res)); } assert_int_equal(AMresultSize(res), 1); - value = AMresultValue(res, 0); + value = AMresultValue(res); assert_int_equal(value.tag, AM_VALUE_NULL); AMfree(res); } @@ -148,7 +148,7 @@ static void test_AMmapPutStr(void **state) { fail_msg("%s", AMerrorMessage(res)); } assert_int_equal(AMresultSize(res), 0); - AMvalue value = AMresultValue(res, 0); + AMvalue value = AMresultValue(res); assert_int_equal(value.tag, AM_VALUE_VOID); AMfree(res); res = AMmapGet(group_state->doc, AM_ROOT, KEY); @@ -156,7 +156,7 @@ static void test_AMmapPutStr(void **state) { fail_msg("%s", AMerrorMessage(res)); } assert_int_equal(AMresultSize(res), 1); - value = AMresultValue(res, 0); + value = AMresultValue(res); assert_int_equal(value.tag, AM_VALUE_STR); assert_int_equal(strlen(value.str), STR_LEN); assert_memory_equal(value.str, STR_VALUE, STR_LEN + 1); diff --git a/automerge-c/test/group_state.c b/automerge-c/test/group_state.c index 784f6013..66be32b3 100644 --- a/automerge-c/test/group_state.c +++ b/automerge-c/test/group_state.c @@ -6,7 +6,7 @@ int group_setup(void** state) { GroupState* group_state = calloc(1, sizeof(GroupState)); group_state->doc_result = AMcreate(); - group_state->doc = AMresultValue(group_state->doc_result, 0).doc; + group_state->doc = AMresultValue(group_state->doc_result).doc; *state = group_state; return 0; } diff --git a/automerge-c/test/sync_tests.c b/automerge-c/test/sync_tests.c index 3d466a7d..1ecda1cc 100644 --- a/automerge-c/test/sync_tests.c +++ b/automerge-c/test/sync_tests.c @@ -24,13 +24,13 @@ typedef struct { static int setup(void** state) { TestState* test_state = calloc(1, sizeof(TestState)); test_state->doc1_result = AMcreate(); - test_state->doc1 = AMresultValue(test_state->doc1_result, 0).doc; + test_state->doc1 = AMresultValue(test_state->doc1_result).doc; test_state->doc2_result = AMcreate(); - test_state->doc2 = AMresultValue(test_state->doc2_result, 0).doc; + test_state->doc2 = AMresultValue(test_state->doc2_result).doc; test_state->sync_state1_result = AMsyncStateInit(); - test_state->sync_state1 = AMresultValue(test_state->sync_state1_result, 0).sync_state; + test_state->sync_state1 = AMresultValue(test_state->sync_state1_result).sync_state; test_state->sync_state2_result = AMsyncStateInit(); - test_state->sync_state2 = AMresultValue(test_state->sync_state2_result, 0).sync_state; + test_state->sync_state2 = AMresultValue(test_state->sync_state2_result).sync_state; *state = test_state; return 0; } @@ -57,7 +57,7 @@ static void sync(AMdoc* a, do { AMresult* a2b_msg_result = AMgenerateSyncMessage(a, a_sync_state); AMresult* b2a_msg_result = AMgenerateSyncMessage(b, b_sync_state); - AMvalue value = AMresultValue(a2b_msg_result, 0); + AMvalue value = AMresultValue(a2b_msg_result); switch (value.tag) { case AM_VALUE_SYNC_MESSAGE: { a2b_msg = value.sync_message; @@ -66,7 +66,7 @@ static void sync(AMdoc* a, break; case AM_VALUE_VOID: a2b_msg = NULL; break; } - value = AMresultValue(b2a_msg_result, 0); + value = AMresultValue(b2a_msg_result); switch (value.tag) { case AM_VALUE_SYNC_MESSAGE: { b2a_msg = value.sync_message; @@ -95,7 +95,7 @@ static void test_converged_empty_local_doc_reply_no_local_data(void **state) { fail_msg("%s", AMerrorMessage(sync_message_result)); } assert_int_equal(AMresultSize(sync_message_result), 1); - AMvalue value = AMresultValue(sync_message_result, 0); + AMvalue value = AMresultValue(sync_message_result); assert_int_equal(value.tag, AM_VALUE_SYNC_MESSAGE); AMsyncMessage const* sync_message = value.sync_message; AMchangeHashes heads = AMsyncMessageHeads(sync_message); @@ -125,7 +125,7 @@ static void test_converged_empty_local_doc_no_reply(void **state) { fail_msg("%s", AMerrorMessage(sync_message1_result)); } assert_int_equal(AMresultSize(sync_message1_result), 1); - AMvalue value = AMresultValue(sync_message1_result, 0); + AMvalue value = AMresultValue(sync_message1_result); assert_int_equal(value.tag, AM_VALUE_SYNC_MESSAGE); AMsyncMessage const* sync_message1 = value.sync_message; AMresult* result = AMreceiveSyncMessage( @@ -135,7 +135,7 @@ static void test_converged_empty_local_doc_no_reply(void **state) { fail_msg("%s", AMerrorMessage(result)); } assert_int_equal(AMresultSize(result), 0); - value = AMresultValue(result, 0); + value = AMresultValue(result); assert_int_equal(value.tag, AM_VALUE_VOID); AMfree(result); AMresult* sync_message2_result = AMgenerateSyncMessage( @@ -145,7 +145,7 @@ static void test_converged_empty_local_doc_no_reply(void **state) { fail_msg("%s", AMerrorMessage(sync_message2_result)); } assert_int_equal(AMresultSize(sync_message2_result), 0); - value = AMresultValue(sync_message2_result, 0); + value = AMresultValue(sync_message2_result); assert_int_equal(value.tag, AM_VALUE_VOID); AMfree(sync_message2_result); AMfree(sync_message1_result); @@ -165,7 +165,7 @@ static void test_converged_equal_heads_no_reply(void **state) { AMcommit(test_state->doc1, NULL, &time); } AMresult* changes_result = AMgetChanges(test_state->doc1, NULL); - AMvalue value = AMresultValue(changes_result, 0); + AMvalue value = AMresultValue(changes_result); AMfree(AMapplyChanges(test_state->doc2, &value.changes)); AMfree(changes_result); assert_true(AMequal(test_state->doc1, test_state->doc2)); @@ -175,10 +175,10 @@ static void test_converged_equal_heads_no_reply(void **state) { test_state->doc1, test_state->sync_state1 ); - AMsyncMessage const* sync_message1 = AMresultValue(sync_message1_result, 0).sync_message; + AMsyncMessage const* sync_message1 = AMresultValue(sync_message1_result).sync_message; AMchangeHashes last_sent_heads = AMsyncStateLastSentHeads(test_state->sync_state1); AMresult* heads_result = AMgetHeads(test_state->doc1); - AMchangeHashes heads = AMresultValue(heads_result, 0).change_hashes; + AMchangeHashes heads = AMresultValue(heads_result).change_hashes; assert_int_equal(AMchangeHashesCmp(&last_sent_heads, &heads), 0); AMfree(heads_result); @@ -190,7 +190,7 @@ static void test_converged_equal_heads_no_reply(void **state) { AMresult* sync_message2_result = AMgenerateSyncMessage( test_state->doc2, test_state->sync_state2 ); - assert_int_equal(AMresultValue(sync_message2_result, 0).tag, AM_VALUE_VOID); + assert_int_equal(AMresultValue(sync_message2_result).tag, AM_VALUE_VOID); AMfree(sync_message2_result); } @@ -292,7 +292,7 @@ static void test_converged_no_message_once_synced(void **state) { /* The first node reports what it has. */ AMresult* message_result = AMgenerateSyncMessage(test_state->doc1, test_state->sync_state1); - AMsyncMessage const* message = AMresultValue(message_result, 0).sync_message; + AMsyncMessage const* message = AMresultValue(message_result).sync_message; /* The second node receives that message and sends changes along with what * it has. */ @@ -302,7 +302,7 @@ static void test_converged_no_message_once_synced(void **state) { AMfree(message_result); message_result = AMgenerateSyncMessage(test_state->doc2, test_state->sync_state2); - message = AMresultValue(message_result, 0).sync_message; + message = AMresultValue(message_result).sync_message; AMchanges message_changes = AMsyncMessageChanges(message); assert_int_equal(AMchangesSize(&message_changes), 5); @@ -314,7 +314,7 @@ static void test_converged_no_message_once_synced(void **state) { AMfree(message_result); message_result = AMgenerateSyncMessage(test_state->doc1, test_state->sync_state1); - message = AMresultValue(message_result, 0).sync_message; + message = AMresultValue(message_result).sync_message; message_changes = AMsyncMessageChanges(message); assert_int_equal(AMchangesSize(&message_changes), 5); @@ -326,7 +326,7 @@ static void test_converged_no_message_once_synced(void **state) { AMfree(message_result); message_result = AMgenerateSyncMessage(test_state->doc2, test_state->sync_state2); - message = AMresultValue(message_result, 0).sync_message; + message = AMresultValue(message_result).sync_message; /* The first node receives the message and has nothing more to say. */ AMfree(AMreceiveSyncMessage(test_state->doc1, @@ -335,13 +335,13 @@ static void test_converged_no_message_once_synced(void **state) { AMfree(message_result); message_result = AMgenerateSyncMessage(test_state->doc1, test_state->sync_state1); - assert_int_equal(AMresultValue(message_result, 0).tag, AM_VALUE_VOID); + assert_int_equal(AMresultValue(message_result).tag, AM_VALUE_VOID); AMfree(message_result); /* The second node also has nothing left to say. */ message_result = AMgenerateSyncMessage(test_state->doc2, test_state->sync_state2); - assert_int_equal(AMresultValue(message_result, 0).tag, AM_VALUE_VOID); + assert_int_equal(AMresultValue(message_result).tag, AM_VALUE_VOID); AMfree(message_result); } @@ -363,19 +363,19 @@ static void test_converged_allow_simultaneous_messages(void **state) { AMcommit(test_state->doc2, NULL, &time); } AMresult* heads1_result = AMgetHeads(test_state->doc1); - AMchangeHashes heads1 = AMresultValue(heads1_result, 0).change_hashes; + AMchangeHashes heads1 = AMresultValue(heads1_result).change_hashes; AMbyteSpan head1 = AMchangeHashesNext(&heads1, 1); AMresult* heads2_result = AMgetHeads(test_state->doc2); - AMchangeHashes heads2 = AMresultValue(heads2_result, 0).change_hashes; + AMchangeHashes heads2 = AMresultValue(heads2_result).change_hashes; AMbyteSpan head2 = AMchangeHashesNext(&heads2, 1); /* Both sides report what they have but have no shared peer state. */ AMresult* msg1to2_result = AMgenerateSyncMessage(test_state->doc1, test_state->sync_state1); - AMsyncMessage const* msg1to2 = AMresultValue(msg1to2_result, 0).sync_message; + AMsyncMessage const* msg1to2 = AMresultValue(msg1to2_result).sync_message; AMresult* msg2to1_result = AMgenerateSyncMessage(test_state->doc2, test_state->sync_state2); - AMsyncMessage const* msg2to1 = AMresultValue(msg2to1_result, 0).sync_message; + AMsyncMessage const* msg2to1 = AMresultValue(msg2to1_result).sync_message; AMchanges msg1to2_changes = AMsyncMessageChanges(msg1to2); assert_int_equal(AMchangesSize(&msg1to2_changes), 0); AMsyncHaves msg1to2_haves = AMsyncMessageHaves(msg1to2); @@ -405,12 +405,12 @@ static void test_converged_allow_simultaneous_messages(void **state) { * message). */ msg1to2_result = AMgenerateSyncMessage(test_state->doc1, test_state->sync_state1); - msg1to2 = AMresultValue(msg1to2_result, 0).sync_message; + msg1to2 = AMresultValue(msg1to2_result).sync_message; msg1to2_changes = AMsyncMessageChanges(msg1to2); assert_int_equal(AMchangesSize(&msg1to2_changes), 5); msg2to1_result = AMgenerateSyncMessage(test_state->doc2, test_state->sync_state2); - msg2to1 = AMresultValue(msg2to1_result, 0).sync_message; + msg2to1 = AMresultValue(msg2to1_result).sync_message; msg2to1_changes = AMsyncMessageChanges(msg2to1); assert_int_equal(AMchangesSize(&msg2to1_changes), 5); @@ -420,14 +420,14 @@ static void test_converged_allow_simultaneous_messages(void **state) { msg2to1)); AMfree(msg2to1_result); AMresult* missing_deps_result = AMgetMissingDeps(test_state->doc1, NULL); - AMchangeHashes missing_deps = AMresultValue(missing_deps_result, 0).change_hashes; + AMchangeHashes missing_deps = AMresultValue(missing_deps_result).change_hashes; assert_int_equal(AMchangeHashesSize(&missing_deps), 0); AMfree(missing_deps_result); AMresult* map_value_result = AMmapGet(test_state->doc1, AM_ROOT, "x"); - assert_int_equal(AMresultValue(map_value_result, 0).uint, 4); + assert_int_equal(AMresultValue(map_value_result).uint, 4); AMfree(map_value_result); map_value_result = AMmapGet(test_state->doc1, AM_ROOT, "y"); - assert_int_equal(AMresultValue(map_value_result, 0).uint, 4); + assert_int_equal(AMresultValue(map_value_result).uint, 4); AMfree(map_value_result); AMfree(AMreceiveSyncMessage(test_state->doc2, @@ -435,26 +435,26 @@ static void test_converged_allow_simultaneous_messages(void **state) { msg1to2)); AMfree(msg1to2_result); missing_deps_result = AMgetMissingDeps(test_state->doc2, NULL); - missing_deps = AMresultValue(missing_deps_result, 0).change_hashes; + missing_deps = AMresultValue(missing_deps_result).change_hashes; assert_int_equal(AMchangeHashesSize(&missing_deps), 0); AMfree(missing_deps_result); map_value_result = AMmapGet(test_state->doc2, AM_ROOT, "x"); - assert_int_equal(AMresultValue(map_value_result, 0).uint, 4); + assert_int_equal(AMresultValue(map_value_result).uint, 4); AMfree(map_value_result); map_value_result = AMmapGet(test_state->doc2, AM_ROOT, "y"); - assert_int_equal(AMresultValue(map_value_result, 0).uint, 4); + assert_int_equal(AMresultValue(map_value_result).uint, 4); AMfree(map_value_result); /* The response acknowledges that the changes were received and sends no * further changes. */ msg1to2_result = AMgenerateSyncMessage(test_state->doc1, test_state->sync_state1); - msg1to2 = AMresultValue(msg1to2_result, 0).sync_message; + msg1to2 = AMresultValue(msg1to2_result).sync_message; msg1to2_changes = AMsyncMessageChanges(msg1to2); assert_int_equal(AMchangesSize(&msg1to2_changes), 0); msg2to1_result = AMgenerateSyncMessage(test_state->doc2, test_state->sync_state2); - msg2to1 = AMresultValue(msg2to1_result, 0).sync_message; + msg2to1 = AMresultValue(msg2to1_result).sync_message; msg2to1_changes = AMsyncMessageChanges(msg2to1); assert_int_equal(AMchangesSize(&msg2to1_changes), 0); @@ -471,11 +471,11 @@ static void test_converged_allow_simultaneous_messages(void **state) { /* They're synchronized so no more messages are required. */ msg1to2_result = AMgenerateSyncMessage(test_state->doc1, test_state->sync_state1); - assert_int_equal(AMresultValue(msg1to2_result, 0).tag, AM_VALUE_VOID); + assert_int_equal(AMresultValue(msg1to2_result).tag, AM_VALUE_VOID); AMfree(msg1to2_result); msg2to1_result = AMgenerateSyncMessage(test_state->doc2, test_state->sync_state2); - assert_int_equal(AMresultValue(msg2to1_result, 0).tag, AM_VALUE_VOID); + assert_int_equal(AMresultValue(msg2to1_result).tag, AM_VALUE_VOID); AMfree(msg2to1_result); /* If we make one more change and start synchronizing then its "last @@ -484,7 +484,7 @@ static void test_converged_allow_simultaneous_messages(void **state) { AMcommit(test_state->doc1, NULL, &time); msg1to2_result = AMgenerateSyncMessage(test_state->doc1, test_state->sync_state1); - msg1to2 = AMresultValue(msg1to2_result, 0).sync_message; + msg1to2 = AMresultValue(msg1to2_result).sync_message; msg1to2_haves = AMsyncMessageHaves(msg1to2); msg1to2_have = AMsyncHavesNext(&msg1to2_haves, 1); msg1to2_last_sync = AMsyncHaveLastSync(msg1to2_have); @@ -512,7 +512,7 @@ static void test_converged_assume_sent_changes_were_received(void **state) { AM_ROOT, "items", AM_OBJ_TYPE_LIST); - AMobjId const* items = AMresultValue(items_result, 0).obj_id; + AMobjId const* items = AMresultValue(items_result).obj_id; time_t const time = 0; AMcommit(test_state->doc1, NULL, &time); sync(test_state->doc1, @@ -524,7 +524,7 @@ static void test_converged_assume_sent_changes_were_received(void **state) { AMcommit(test_state->doc1, NULL, &time); AMresult* message_result = AMgenerateSyncMessage(test_state->doc1, test_state->sync_state1); - AMsyncMessage const* message = AMresultValue(message_result, 0).sync_message; + AMsyncMessage const* message = AMresultValue(message_result).sync_message; AMchanges message_changes = AMsyncMessageChanges(message); assert_int_equal(AMchangesSize(&message_changes), 1); AMfree(message_result); @@ -533,7 +533,7 @@ static void test_converged_assume_sent_changes_were_received(void **state) { AMcommit(test_state->doc1, NULL, &time); message_result = AMgenerateSyncMessage(test_state->doc1, test_state->sync_state1); - message = AMresultValue(message_result, 0).sync_message; + message = AMresultValue(message_result).sync_message; message_changes = AMsyncMessageChanges(message); assert_int_equal(AMchangesSize(&message_changes), 1); AMfree(message_result); @@ -542,7 +542,7 @@ static void test_converged_assume_sent_changes_were_received(void **state) { AMcommit(test_state->doc1, NULL, &time); message_result = AMgenerateSyncMessage(test_state->doc1, test_state->sync_state1); - message = AMresultValue(message_result, 0).sync_message; + message = AMresultValue(message_result).sync_message; message_changes = AMsyncMessageChanges(message); assert_int_equal(AMchangesSize(&message_changes), 1); AMfree(message_result); @@ -623,9 +623,9 @@ static void test_diverged_works_without_prior_sync_state(void **state) { test_state->sync_state1, test_state->sync_state2); AMresult* heads1_result = AMgetHeads(test_state->doc1); - AMchangeHashes heads1 = AMresultValue(heads1_result, 0).change_hashes; + AMchangeHashes heads1 = AMresultValue(heads1_result).change_hashes; AMresult* heads2_result = AMgetHeads(test_state->doc2); - AMchangeHashes heads2 = AMresultValue(heads2_result, 0).change_hashes; + AMchangeHashes heads2 = AMresultValue(heads2_result).change_hashes; assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); AMfree(heads2_result); AMfree(heads1_result); @@ -666,24 +666,24 @@ static void test_diverged_works_with_prior_sync_state(void **state) { AMcommit(test_state->doc2, NULL, &time); } AMresult* encoded_result = AMsyncStateEncode(test_state->sync_state1); - AMbyteSpan encoded = AMresultValue(encoded_result, 0).bytes; + AMbyteSpan encoded = AMresultValue(encoded_result).bytes; AMresult* sync_state1_result = AMsyncStateDecode(encoded.src, encoded.count); AMfree(encoded_result); - AMsyncState* sync_state1 = AMresultValue(sync_state1_result, 0).sync_state; + AMsyncState* sync_state1 = AMresultValue(sync_state1_result).sync_state; encoded_result = AMsyncStateEncode(test_state->sync_state2); - encoded = AMresultValue(encoded_result, 0).bytes; + encoded = AMresultValue(encoded_result).bytes; AMresult* sync_state2_result = AMsyncStateDecode(encoded.src, encoded.count); AMfree(encoded_result); - AMsyncState* sync_state2 = AMresultValue(sync_state2_result, 0).sync_state; + AMsyncState* sync_state2 = AMresultValue(sync_state2_result).sync_state; assert_false(AMequal(test_state->doc1, test_state->doc2)); sync(test_state->doc1, test_state->doc2, sync_state1, sync_state2); AMfree(sync_state2_result); AMfree(sync_state1_result); AMresult* heads1_result = AMgetHeads(test_state->doc1); - AMchangeHashes heads1 = AMresultValue(heads1_result, 0).change_hashes; + AMchangeHashes heads1 = AMresultValue(heads1_result).change_hashes; AMresult* heads2_result = AMgetHeads(test_state->doc2); - AMchangeHashes heads2 = AMresultValue(heads2_result, 0).change_hashes; + AMchangeHashes heads2 = AMresultValue(heads2_result).change_hashes; assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); AMfree(heads2_result); AMfree(heads1_result); @@ -710,7 +710,7 @@ static void test_diverged_ensure_not_empty_after_sync(void **state) { test_state->sync_state2); AMresult* heads1_result = AMgetHeads(test_state->doc1); - AMchangeHashes heads1 = AMresultValue(heads1_result, 0).change_hashes; + AMchangeHashes heads1 = AMresultValue(heads1_result).change_hashes; AMchangeHashes shared_heads1 = AMsyncStateSharedHeads(test_state->sync_state1); assert_int_equal(AMchangeHashesCmp(&shared_heads1, &heads1), 0); AMchangeHashes shared_heads2 = AMsyncStateSharedHeads(test_state->sync_state2); @@ -747,12 +747,12 @@ static void test_diverged_resync_after_node_crash_with_data_loss(void **state) { /* Save a copy of n2 as "r" to simulate recovering from a crash. */ AMresult* r_result = AMdup(test_state->doc2); - AMdoc* r = AMresultValue(r_result, 0).doc; + AMdoc* r = AMresultValue(r_result).doc; AMresult* encoded_result = AMsyncStateEncode(test_state->sync_state2); - AMbyteSpan encoded = AMresultValue(encoded_result, 0).bytes; + AMbyteSpan encoded = AMresultValue(encoded_result).bytes; AMresult* sync_state_resultr = AMsyncStateDecode(encoded.src, encoded.count); AMfree(encoded_result); - AMsyncState* sync_stater = AMresultValue(sync_state_resultr, 0).sync_state; + AMsyncState* sync_stater = AMresultValue(sync_state_resultr).sync_state; /* Synchronize another few commits. */ for (size_t value = 3; value != 6; ++value) { AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); @@ -764,9 +764,9 @@ static void test_diverged_resync_after_node_crash_with_data_loss(void **state) { test_state->sync_state2); /* Everyone should be on the same page here. */ AMresult* heads1_result = AMgetHeads(test_state->doc1); - AMchangeHashes heads1 = AMresultValue(heads1_result, 0).change_hashes; + AMchangeHashes heads1 = AMresultValue(heads1_result).change_hashes; AMresult* heads2_result = AMgetHeads(test_state->doc2); - AMchangeHashes heads2 = AMresultValue(heads2_result, 0).change_hashes; + AMchangeHashes heads2 = AMresultValue(heads2_result).change_hashes; assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); AMfree(heads2_result); AMfree(heads1_result); @@ -779,18 +779,18 @@ static void test_diverged_resync_after_node_crash_with_data_loss(void **state) { AMcommit(test_state->doc1, NULL, &time); } heads1_result = AMgetHeads(test_state->doc1); - heads1 = AMresultValue(heads1_result, 0).change_hashes; + heads1 = AMresultValue(heads1_result).change_hashes; AMresult* heads_resultr = AMgetHeads(r); - AMchangeHashes headsr = AMresultValue(heads_resultr, 0).change_hashes; + AMchangeHashes headsr = AMresultValue(heads_resultr).change_hashes; assert_int_not_equal(AMchangeHashesCmp(&heads1, &headsr), 0); AMfree(heads_resultr); AMfree(heads1_result); assert_false(AMequal(test_state->doc1, r)); AMresult* map_value_result = AMmapGet(test_state->doc1, AM_ROOT, "x"); - assert_int_equal(AMresultValue(map_value_result, 0).uint, 8); + assert_int_equal(AMresultValue(map_value_result).uint, 8); AMfree(map_value_result); map_value_result = AMmapGet(r, AM_ROOT, "x"); - assert_int_equal(AMresultValue(map_value_result, 0).uint, 2); + assert_int_equal(AMresultValue(map_value_result).uint, 2); AMfree(map_value_result); sync(test_state->doc1, r, @@ -798,9 +798,9 @@ static void test_diverged_resync_after_node_crash_with_data_loss(void **state) { sync_stater); AMfree(sync_state_resultr); heads1_result = AMgetHeads(test_state->doc1); - heads1 = AMresultValue(heads1_result, 0).change_hashes; + heads1 = AMresultValue(heads1_result).change_hashes; heads_resultr = AMgetHeads(r); - headsr = AMresultValue(heads_resultr, 0).change_hashes; + headsr = AMresultValue(heads_resultr).change_hashes; assert_int_equal(AMchangeHashesCmp(&heads1, &headsr), 0); AMfree(heads_resultr); AMfree(heads1_result); @@ -829,31 +829,31 @@ static void test_diverged_resync_after_data_loss_without_disconnection(void **st test_state->sync_state2); AMresult* heads1_result = AMgetHeads(test_state->doc1); - AMchangeHashes heads1 = AMresultValue(heads1_result, 0).change_hashes; + AMchangeHashes heads1 = AMresultValue(heads1_result).change_hashes; AMresult* heads2_result = AMgetHeads(test_state->doc2); - AMchangeHashes heads2 = AMresultValue(heads2_result, 0).change_hashes; + AMchangeHashes heads2 = AMresultValue(heads2_result).change_hashes; assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); AMfree(heads2_result); AMfree(heads1_result); assert_true(AMequal(test_state->doc1, test_state->doc2)); AMresult* doc2_after_data_loss_result = AMcreate(); - AMdoc* doc2_after_data_loss = AMresultValue(doc2_after_data_loss_result, 0).doc; + AMdoc* doc2_after_data_loss = AMresultValue(doc2_after_data_loss_result).doc; AMfree(AMsetActorHex(doc2_after_data_loss, "89abcdef")); /* "n2" now has no data, but n1 still thinks it does. Note we don't do * decodeSyncState(encodeSyncState(s1)) in order to simulate data loss * without disconnecting. */ AMresult* sync_state2_after_data_loss_result = AMsyncStateInit(); - AMsyncState* sync_state2_after_data_loss = AMresultValue(sync_state2_after_data_loss_result, 0).sync_state; + AMsyncState* sync_state2_after_data_loss = AMresultValue(sync_state2_after_data_loss_result).sync_state; sync(test_state->doc1, doc2_after_data_loss, test_state->sync_state1, sync_state2_after_data_loss); heads1_result = AMgetHeads(test_state->doc1); - heads1 = AMresultValue(heads1_result, 0).change_hashes; + heads1 = AMresultValue(heads1_result).change_hashes; heads2_result = AMgetHeads(doc2_after_data_loss); - heads2 = AMresultValue(heads2_result, 0).change_hashes; + heads2 = AMresultValue(heads2_result).change_hashes; assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); AMfree(heads2_result); AMfree(heads1_result); @@ -871,14 +871,14 @@ static void test_diverged_handles_concurrent_changes(void **state) { AMfree(AMsetActorHex(test_state->doc1, "01234567")); AMfree(AMsetActorHex(test_state->doc2, "89abcdef")); AMresult* doc3_result = AMcreate(); - AMdoc* doc3 = AMresultValue(doc3_result, 0).doc; + AMdoc* doc3 = AMresultValue(doc3_result).doc; AMfree(AMsetActorHex(doc3, "fedcba98")); AMsyncState* sync_state12 = test_state->sync_state1; AMsyncState* sync_state21 = test_state->sync_state2; AMresult* sync_state23_result = AMsyncStateInit(); - AMsyncState* sync_state23 = AMresultValue(sync_state23_result, 0).sync_state; + AMsyncState* sync_state23 = AMresultValue(sync_state23_result).sync_state; AMresult* sync_state32_result = AMsyncStateInit(); - AMsyncState* sync_state32 = AMresultValue(sync_state32_result, 0).sync_state; + AMsyncState* sync_state32 = AMresultValue(sync_state32_result).sync_state; /* Change 1 is known to all three nodes. */ time_t const time = 0; @@ -902,7 +902,7 @@ static void test_diverged_handles_concurrent_changes(void **state) { /* Apply n3's latest change to n2. */ AMresult* changes_result = AMgetLastLocalChange(doc3); - AMchanges changes = AMresultValue(changes_result, 0).changes; + AMchanges changes = AMresultValue(changes_result).changes; AMfree(AMapplyChanges(test_state->doc2, &changes)); AMfree(changes_result); @@ -910,9 +910,9 @@ static void test_diverged_handles_concurrent_changes(void **state) { * heads. */ sync(test_state->doc1, test_state->doc2, sync_state12, sync_state21); AMresult* heads1_result = AMgetHeads(test_state->doc1); - AMchangeHashes heads1 = AMresultValue(heads1_result, 0).change_hashes; + AMchangeHashes heads1 = AMresultValue(heads1_result).change_hashes; AMresult* heads2_result = AMgetHeads(test_state->doc2); - AMchangeHashes heads2 = AMresultValue(heads2_result, 0).change_hashes; + AMchangeHashes heads2 = AMresultValue(heads2_result).change_hashes; assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); AMfree(heads2_result); AMfree(heads1_result); @@ -932,13 +932,13 @@ static void test_diverged_handles_histories_of_branching_and_merging(void **stat AMfree(AMsetActorHex(test_state->doc1, "01234567")); AMfree(AMsetActorHex(test_state->doc2, "89abcdef")); AMresult* doc3_result = AMcreate(); - AMdoc* doc3 = AMresultValue(doc3_result, 0).doc; + AMdoc* doc3 = AMresultValue(doc3_result).doc; AMfree(AMsetActorHex(doc3, "fedcba98")); time_t const time = 0; AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", 0)); AMcommit(test_state->doc1, NULL, &time); AMresult* changes_result = AMgetLastLocalChange(test_state->doc1); - AMchanges changes = AMresultValue(changes_result, 0).changes; + AMchanges changes = AMresultValue(changes_result).changes; AMfree(AMapplyChanges(test_state->doc2, &changes)); AMfree(AMapplyChanges(doc3, &changes)); AMfree(changes_result); @@ -958,9 +958,9 @@ static void test_diverged_handles_histories_of_branching_and_merging(void **stat AMfree(AMmapPutUint(test_state->doc2, AM_ROOT, "n2", value)); AMcommit(test_state->doc2, NULL, &time); AMresult* changes1_result = AMgetLastLocalChange(test_state->doc1); - AMchanges changes1 = AMresultValue(changes1_result, 0).changes; + AMchanges changes1 = AMresultValue(changes1_result).changes; AMresult* changes2_result = AMgetLastLocalChange(test_state->doc2); - AMchanges changes2 = AMresultValue(changes2_result, 0).changes; + AMchanges changes2 = AMresultValue(changes2_result).changes; AMfree(AMapplyChanges(test_state->doc1, &changes2)); AMfree(changes2_result); AMfree(AMapplyChanges(test_state->doc2, &changes1)); @@ -975,7 +975,7 @@ static void test_diverged_handles_histories_of_branching_and_merging(void **stat /* Having n3's last change concurrent to the last sync heads forces us into * the slower code path. */ AMresult* changes3_result = AMgetLastLocalChange(doc3); - AMchanges changes3 = AMresultValue(changes3_result, 0).changes; + AMchanges changes3 = AMresultValue(changes3_result).changes; AMfree(AMapplyChanges(test_state->doc2, &changes3)); AMfree(changes3_result); AMfree(AMmapPutStr(test_state->doc1, AM_ROOT, "n1", "final")); @@ -988,9 +988,9 @@ static void test_diverged_handles_histories_of_branching_and_merging(void **stat test_state->sync_state1, test_state->sync_state2); AMresult* heads1_result = AMgetHeads(test_state->doc1); - AMchangeHashes heads1 = AMresultValue(heads1_result, 0).change_hashes; + AMchangeHashes heads1 = AMresultValue(heads1_result).change_hashes; AMresult* heads2_result = AMgetHeads(test_state->doc2); - AMchangeHashes heads2 = AMresultValue(heads2_result, 0).change_hashes; + AMchangeHashes heads2 = AMresultValue(heads2_result).change_hashes; assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); AMfree(heads2_result); AMfree(heads1_result); From 92d6fff22feb8c74733459fa195f2c5418ac7615 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Sun, 5 Jun 2022 14:28:33 -0700 Subject: [PATCH 439/730] Compensate for the removal of the `AMchanges.ptr` member. --- automerge-c/examples/quickstart.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automerge-c/examples/quickstart.c b/automerge-c/examples/quickstart.c index cadf8137..5b90fdcd 100644 --- a/automerge-c/examples/quickstart.c +++ b/automerge-c/examples/quickstart.c @@ -92,7 +92,7 @@ int main(int argc, char** argv) { result = AMgetChanges(doc1, NULL); value = test(result, AM_VALUE_CHANGES); AMchange const* change = NULL; - while (value.changes.ptr && (change = AMchangesNext(&value.changes, 1))) { + while ((change = AMchangesNext(&value.changes, 1)) != NULL) { size_t const size = AMobjSizeAt(doc1, cards, change); printf("%s %ld\n", AMchangeMessage(change), size); } From 7e1ae60bdcf061e5fb5940f2c537d92c13114883 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Sun, 5 Jun 2022 14:32:55 -0700 Subject: [PATCH 440/730] Move the `AMchangeHashes.ptr` field into the `change_hashes::Detail` struct. Change `AMchangeHashesAdvance()`, `AMchangeHashesNext()` and `AMchangeHashesPrev()` to interpret their `n` argument relatively instead of absolutely. Renamed `AMchangeHashesReverse()` to `AMchangeHashesReversed()`. Updated the C API's documentation for the `AMchangeHashes` struct. --- automerge-c/src/change_hashes.rs | 160 ++++++++++++++++++------------- 1 file changed, 94 insertions(+), 66 deletions(-) diff --git a/automerge-c/src/change_hashes.rs b/automerge-c/src/change_hashes.rs index b5daf5d8..893f1e7a 100644 --- a/automerge-c/src/change_hashes.rs +++ b/automerge-c/src/change_hashes.rs @@ -9,6 +9,7 @@ use crate::byte_span::AMbyteSpan; struct Detail { len: usize, offset: isize, + ptr: *const c_void, } /// \note cbindgen won't propagate the value of a `std::mem::size_of()` call @@ -16,18 +17,73 @@ struct Detail { /// propagate the name of a constant initialized from it so if the /// constant's name is a symbolic representation of the value it can be /// converted into a number by post-processing the header it generated. -pub const USIZE_USIZE_: usize = size_of::(); +pub const USIZE_USIZE_USIZE_: usize = size_of::(); impl Detail { - fn new(len: usize, offset: isize) -> Self { - Self { len, offset } + fn new(change_hashes: &[am::ChangeHash], offset: isize) -> Self { + Self { + len: change_hashes.len(), + offset, + ptr: change_hashes.as_ptr() as *const c_void, + } + } + + pub fn advance(&mut self, n: isize) { + if n != 0 && !self.is_stopped() { + let n = if self.offset < 0 { -n } else { n }; + let len = self.len as isize; + self.offset = std::cmp::max(-(len + 1), std::cmp::min(self.offset + n, len)); + }; + } + + pub fn get_index(&self) -> usize { + (self.offset + + if self.offset < 0 { + self.len as isize + } else { + 0 + }) as usize + } + + pub fn next(&mut self, n: isize) -> Option<&am::ChangeHash> { + if self.is_stopped() { + return None; + } + let slice: &[am::ChangeHash] = + unsafe { std::slice::from_raw_parts(self.ptr as *const am::ChangeHash, self.len) }; + let value = &slice[self.get_index()]; + self.advance(n); + Some(value) + } + + pub fn is_stopped(&self) -> bool { + let len = self.len as isize; + self.offset < -len || self.offset == len + } + + pub fn prev(&mut self, n: isize) -> Option<&am::ChangeHash> { + self.advance(n); + if self.is_stopped() { + return None; + } + let slice: &[am::ChangeHash] = + unsafe { std::slice::from_raw_parts(self.ptr as *const am::ChangeHash, self.len) }; + Some(&slice[self.get_index()]) + } + + pub fn reversed(&self) -> Self { + Self { + len: self.len, + offset: -(self.offset + 1), + ptr: self.ptr, + } } } -impl From for [u8; USIZE_USIZE_] { +impl From for [u8; USIZE_USIZE_USIZE_] { fn from(detail: Detail) -> Self { unsafe { - std::slice::from_raw_parts((&detail as *const Detail) as *const u8, USIZE_USIZE_) + std::slice::from_raw_parts((&detail as *const Detail) as *const u8, USIZE_USIZE_USIZE_) .try_into() .unwrap() } @@ -35,30 +91,23 @@ impl From for [u8; USIZE_USIZE_] { } /// \struct AMchangeHashes -/// \brief A bidirectional iterator over a sequence of change hashes. +/// \brief A random-access iterator over a sequence of change hashes. #[repr(C)] pub struct AMchangeHashes { - /// A pointer to the first change hash or `NULL`. - ptr: *const c_void, /// Reserved. - detail: [u8; USIZE_USIZE_], + detail: [u8; USIZE_USIZE_USIZE_], } impl AMchangeHashes { pub fn new(change_hashes: &[am::ChangeHash]) -> Self { Self { - ptr: change_hashes.as_ptr() as *const c_void, - detail: Detail::new(change_hashes.len(), 0).into(), + detail: Detail::new(change_hashes, 0).into(), } } pub fn advance(&mut self, n: isize) { let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - let len = detail.len as isize; - if n != 0 && detail.offset >= -len && detail.offset < len { - // It's being advanced and it hasn't stopped. - detail.offset = std::cmp::max(-(len + 1), std::cmp::min(detail.offset + n, len)); - }; + detail.advance(n); } pub fn len(&self) -> usize { @@ -68,42 +117,18 @@ impl AMchangeHashes { pub fn next(&mut self, n: isize) -> Option<&am::ChangeHash> { let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - let len = detail.len as isize; - if detail.offset < -len || detail.offset == len { - // It's stopped. - None - } else { - let slice: &[am::ChangeHash] = unsafe { - std::slice::from_raw_parts(self.ptr as *const am::ChangeHash, detail.len) - }; - let index = (detail.offset + if detail.offset < 0 { len } else { 0 }) as usize; - let value = &slice[index]; - self.advance(n); - Some(value) - } + detail.next(n) } pub fn prev(&mut self, n: isize) -> Option<&am::ChangeHash> { - self.advance(n); let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - let len = detail.len as isize; - if detail.offset < -len || detail.offset == len { - // It's stopped. - None - } else { - let slice: &[am::ChangeHash] = unsafe { - std::slice::from_raw_parts(self.ptr as *const am::ChangeHash, detail.len) - }; - let index = (detail.offset + if detail.offset < 0 { len } else { 0 }) as usize; - Some(&slice[index]) - } + detail.prev(n) } - pub fn reverse(&self) -> Self { + pub fn reversed(&self) -> Self { let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; Self { - ptr: self.ptr, - detail: Detail::new(detail.len, -(detail.offset + 1)).into(), + detail: detail.reversed().into(), } } } @@ -111,26 +136,26 @@ impl AMchangeHashes { impl AsRef<[am::ChangeHash]> for AMchangeHashes { fn as_ref(&self) -> &[am::ChangeHash] { let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - unsafe { std::slice::from_raw_parts(self.ptr as *const am::ChangeHash, detail.len) } + unsafe { std::slice::from_raw_parts(detail.ptr as *const am::ChangeHash, detail.len) } } } impl Default for AMchangeHashes { fn default() -> Self { Self { - ptr: std::ptr::null(), - detail: [0; USIZE_USIZE_], + detail: [0; USIZE_USIZE_USIZE_], } } } /// \memberof AMchangeHashes -/// \brief Advances/rewinds an iterator over a sequence of change hashes by at -/// most \p |n| positions. +/// \brief Advances an iterator over a sequence of change hashes by at most +/// \p |n| positions where the sign of \p n is relative to the +/// iterator's direction. /// /// \param[in] change_hashes A pointer to an `AMchangeHashes` struct. -/// \param[in] n The direction (\p -n -> backward, \p +n -> forward) and maximum -/// number of positions to advance/rewind. +/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum +/// number of positions to advance. /// \pre \p change_hashes must be a valid address. /// \internal /// @@ -179,15 +204,16 @@ pub unsafe extern "C" fn AMchangeHashesCmp( } /// \memberof AMchangeHashes -/// \brief Gets the change hash at the current position of an iterator over -/// a sequence of change hashes and then advances/rewinds it by at most -/// \p |n| positions. +/// \brief Gets the change hash at the current position of an iterator over a +/// sequence of change hashes and then advances it by at most \p |n| +/// positions where the sign of \p n is relative to the iterator's +/// direction. /// /// \param[in] change_hashes A pointer to an `AMchangeHashes` struct. -/// \param[in] n The direction (\p -n -> backward, \p +n -> forward) and maximum -/// number of positions to advance/rewind. +/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum +/// number of positions to advance. /// \return An `AMbyteSpan` struct with `.src == NULL` when \p change_hashes -/// was previously advanced/rewound past its forward/backward limit. +/// was previously advanced past its forward/reverse limit. /// \pre \p change_hashes must be a valid address. /// \internal /// @@ -207,15 +233,16 @@ pub unsafe extern "C" fn AMchangeHashesNext( } /// \memberof AMchangeHashes -/// \brief Advances/rewinds an iterator over a sequence of change hashes by at -/// most \p |n| positions and then gets the change hash at its current +/// \brief Advances an iterator over a sequence of change hashes by at most +/// \p |n| positions where the sign of \p n is relative to the +/// iterator's direction and then gets the change hash at its new /// position. /// /// \param[in] change_hashes A pointer to an `AMchangeHashes` struct. -/// \param[in] n The direction (\p -n -> backward, \p +n -> forward) and maximum -/// number of positions to advance/rewind. -/// \return An `AMbyteSpan` struct with `.src == NULL` when \p change_hashes -/// is presently advanced/rewound past its forward/backward limit. +/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum +/// number of positions to advance. +/// \return An `AMbyteSpan` struct with `.src == NULL` when \p change_hashes is +/// presently advanced past its forward/reverse limit. /// \pre \p change_hashes must be a valid address. /// \internal /// @@ -255,7 +282,8 @@ pub unsafe extern "C" fn AMchangeHashesSize(change_hashes: *const AMchangeHashes } /// \memberof AMchangeHashes -/// \brief Creates a reversed copy of a change hashes iterator. +/// \brief Creates an iterator over the same sequence of change hashes as the +/// given one but with the opposite position and direction. /// /// \param[in] change_hashes A pointer to an `AMchangeHashes` struct. /// \return An `AMchangeHashes` struct @@ -265,11 +293,11 @@ pub unsafe extern "C" fn AMchangeHashesSize(change_hashes: *const AMchangeHashes /// #Safety /// change_hashes must be a pointer to a valid AMchangeHashes #[no_mangle] -pub unsafe extern "C" fn AMchangeHashesReverse( +pub unsafe extern "C" fn AMchangeHashesReversed( change_hashes: *const AMchangeHashes, ) -> AMchangeHashes { if let Some(change_hashes) = change_hashes.as_ref() { - change_hashes.reverse() + change_hashes.reversed() } else { AMchangeHashes::default() } From 74632a05122926b45f1690dbf5d5ff9b8b455eb0 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Sun, 5 Jun 2022 14:37:32 -0700 Subject: [PATCH 441/730] Move the `AMchanges.ptr` field into the `changes::Detail` struct. Change `AMchangesAdvance()`, `AMchangesNext()` and `AMchangesPrev()` to interpret their `n` argument relatively instead of absolutely. Renamed `AMchangesReverse()` to `AMchangesReversed()`. Updated the C API's documentation for the `AMchanges` struct. --- automerge-c/src/changes.rs | 191 +++++++++++++++++++++---------------- 1 file changed, 111 insertions(+), 80 deletions(-) diff --git a/automerge-c/src/changes.rs b/automerge-c/src/changes.rs index bdc5e9bb..f3615557 100644 --- a/automerge-c/src/changes.rs +++ b/automerge-c/src/changes.rs @@ -9,6 +9,7 @@ use crate::change::AMchange; struct Detail { len: usize, offset: isize, + ptr: *const c_void, storage: *mut c_void, } @@ -17,54 +18,119 @@ struct Detail { /// propagate the name of a constant initialized from it so if the /// constant's name is a symbolic representation of the value it can be /// converted into a number by post-processing the header it generated. -pub const USIZE_USIZE_USIZE_: usize = size_of::(); +pub const USIZE_USIZE_USIZE_USIZE_: usize = size_of::(); impl Detail { - fn new(len: usize, offset: isize, storage: &mut BTreeMap) -> Self { + fn new(changes: &[am::Change], offset: isize, storage: &mut BTreeMap) -> Self { let storage: *mut BTreeMap = storage; Self { - len, + len: changes.len(), offset, + ptr: changes.as_ptr() as *const c_void, storage: storage as *mut c_void, } } + + pub fn advance(&mut self, n: isize) { + if n != 0 && !self.is_stopped() { + let n = if self.offset < 0 { -n } else { n }; + let len = self.len as isize; + self.offset = std::cmp::max(-(len + 1), std::cmp::min(self.offset + n, len)); + }; + } + + pub fn get_index(&self) -> usize { + (self.offset + + if self.offset < 0 { + self.len as isize + } else { + 0 + }) as usize + } + + pub fn next(&mut self, n: isize) -> Option<*const AMchange> { + if self.is_stopped() { + return None; + } + let slice: &mut [am::Change] = + unsafe { std::slice::from_raw_parts_mut(self.ptr as *mut am::Change, self.len) }; + let storage = unsafe { &mut *(self.storage as *mut BTreeMap) }; + let index = self.get_index(); + let value = match storage.get_mut(&index) { + Some(value) => value, + None => { + storage.insert(index, AMchange::new(&mut slice[index])); + storage.get_mut(&index).unwrap() + } + }; + self.advance(n); + Some(value) + } + + pub fn is_stopped(&self) -> bool { + let len = self.len as isize; + self.offset < -len || self.offset == len + } + + pub fn prev(&mut self, n: isize) -> Option<*const AMchange> { + self.advance(n); + if self.is_stopped() { + return None; + } + let slice: &mut [am::Change] = + unsafe { std::slice::from_raw_parts_mut(self.ptr as *mut am::Change, self.len) }; + let storage = unsafe { &mut *(self.storage as *mut BTreeMap) }; + let index = self.get_index(); + Some(match storage.get_mut(&index) { + Some(value) => value, + None => { + storage.insert(index, AMchange::new(&mut slice[index])); + storage.get_mut(&index).unwrap() + } + }) + } + + pub fn reversed(&self) -> Self { + Self { + len: self.len, + offset: -(self.offset + 1), + ptr: self.ptr, + storage: self.storage, + } + } } -impl From for [u8; USIZE_USIZE_USIZE_] { +impl From for [u8; USIZE_USIZE_USIZE_USIZE_] { fn from(detail: Detail) -> Self { unsafe { - std::slice::from_raw_parts((&detail as *const Detail) as *const u8, USIZE_USIZE_USIZE_) - .try_into() - .unwrap() + std::slice::from_raw_parts( + (&detail as *const Detail) as *const u8, + USIZE_USIZE_USIZE_USIZE_, + ) + .try_into() + .unwrap() } } } /// \struct AMchanges -/// \brief A bidirectional iterator over a sequence of changes. +/// \brief A random-access iterator over a sequence of changes. #[repr(C)] pub struct AMchanges { - /// A pointer to the first change or `NULL`. - ptr: *const c_void, /// Reserved. - detail: [u8; USIZE_USIZE_USIZE_], + detail: [u8; USIZE_USIZE_USIZE_USIZE_], } impl AMchanges { pub fn new(changes: &[am::Change], storage: &mut BTreeMap) -> Self { Self { - ptr: changes.as_ptr() as *const c_void, - detail: Detail::new(changes.len(), 0, storage).into(), + detail: Detail::new(changes, 0, storage).into(), } } pub fn advance(&mut self, n: isize) { let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - let len = detail.len as isize; - if n != 0 && detail.offset >= -len && detail.offset < len { - // It's being advanced and it hasn't stopped. - detail.offset = std::cmp::max(-(len + 1), std::cmp::min(detail.offset + n, len)); - }; + detail.advance(n); } pub fn len(&self) -> usize { @@ -74,55 +140,18 @@ impl AMchanges { pub fn next(&mut self, n: isize) -> Option<*const AMchange> { let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - let len = detail.len as isize; - if detail.offset < -len || detail.offset == len { - // It's stopped. - None - } else { - let slice: &mut [am::Change] = - unsafe { std::slice::from_raw_parts_mut(self.ptr as *mut am::Change, detail.len) }; - let index = (detail.offset + if detail.offset < 0 { len } else { 0 }) as usize; - let storage = unsafe { &mut *(detail.storage as *mut BTreeMap) }; - let value = match storage.get_mut(&index) { - Some(value) => value, - None => { - storage.insert(index, AMchange::new(&mut slice[index])); - storage.get_mut(&index).unwrap() - } - }; - self.advance(n); - Some(value) - } + detail.next(n) } pub fn prev(&mut self, n: isize) -> Option<*const AMchange> { - self.advance(n); let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - let len = detail.len as isize; - if detail.offset < -len || detail.offset == len { - // It's stopped. - None - } else { - let slice: &mut [am::Change] = - unsafe { std::slice::from_raw_parts_mut(self.ptr as *mut am::Change, detail.len) }; - let index = (detail.offset + if detail.offset < 0 { len } else { 0 }) as usize; - let storage = unsafe { &mut *(detail.storage as *mut BTreeMap) }; - Some(match storage.get_mut(&index) { - Some(value) => value, - None => { - storage.insert(index, AMchange::new(&mut slice[index])); - storage.get_mut(&index).unwrap() - } - }) - } + detail.prev(n) } - pub fn reverse(&self) -> Self { + pub fn reversed(&self) -> Self { let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - let storage = unsafe { &mut *(detail.storage as *mut BTreeMap) }; Self { - ptr: self.ptr, - detail: Detail::new(detail.len, -(detail.offset + 1), storage).into(), + detail: detail.reversed().into(), } } } @@ -130,26 +159,26 @@ impl AMchanges { impl AsRef<[am::Change]> for AMchanges { fn as_ref(&self) -> &[am::Change] { let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - unsafe { std::slice::from_raw_parts(self.ptr as *const am::Change, detail.len) } + unsafe { std::slice::from_raw_parts(detail.ptr as *const am::Change, detail.len) } } } impl Default for AMchanges { fn default() -> Self { Self { - ptr: std::ptr::null(), - detail: [0; USIZE_USIZE_USIZE_], + detail: [0; USIZE_USIZE_USIZE_USIZE_], } } } /// \memberof AMchanges -/// \brief Advances/rewinds an iterator over a sequence of changes by at most -/// \p |n| positions. +/// \brief Advances an iterator over a sequence of changes by at most \p |n| +/// positions where the sign of \p n is relative to the iterator's +/// direction. /// /// \param[in] changes A pointer to an `AMchanges` struct. -/// \param[in] n The direction (\p -n -> backward, \p +n -> forward) and maximum -/// number of positions to advance/rewind. +/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum +/// number of positions to advance. /// \pre \p changes must be a valid address. /// \internal /// @@ -189,14 +218,14 @@ pub unsafe extern "C" fn AMchangesEqual( /// \memberof AMchanges /// \brief Gets the change at the current position of an iterator over a -/// sequence of changes and then advances/rewinds it by at most \p |n| -/// positions. +/// sequence of changes and then advances it by at most \p |n| positions +/// where the sign of \p n is relative to the iterator's direction. /// /// \param[in] changes A pointer to an `AMchanges` struct. -/// \param[in] n The direction (\p -n -> backward, \p +n -> forward) and maximum -/// number of positions to advance/rewind. +/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum +/// number of positions to advance. /// \return A pointer to an `AMchange` struct that's `NULL` when \p changes was -/// previously advanced/rewound past its forward/backward limit. +/// previously advanced past its forward/reverse limit. /// \pre \p changes must be a valid address. /// \internal /// @@ -213,14 +242,15 @@ pub unsafe extern "C" fn AMchangesNext(changes: *mut AMchanges, n: isize) -> *co } /// \memberof AMchanges -/// \brief Advances/rewinds an iterator over a sequence of changes by at most -/// \p |n| positions and then gets the change at its current position. +/// \brief Advances an iterator over a sequence of changes by at most \p |n| +/// positions where the sign of \p n is relative to the iterator's +/// direction and then gets the change at its new position. /// /// \param[in] changes A pointer to an `AMchanges` struct. -/// \param[in] n The direction (\p -n -> backward, \p +n -> forward) and maximum -/// number of positions to advance/rewind. +/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum +/// number of positions to advance. /// \return A pointer to an `AMchange` struct that's `NULL` when \p changes is -/// presently advanced/rewound past its forward/backward limit. +/// presently advanced past its forward/reverse limit. /// \pre \p changes must be a valid address. /// \internal /// @@ -256,19 +286,20 @@ pub unsafe extern "C" fn AMchangesSize(changes: *const AMchanges) -> usize { } /// \memberof AMchanges -/// \brief Creates a reversed copy of a changes iterator. +/// \brief Creates an iterator over the same sequence of changes as the given +/// one but with the opposite position and direction. /// /// \param[in] changes A pointer to an `AMchanges` struct. -/// \return An `AMchanges` struct +/// \return An `AMchanges` struct. /// \pre \p changes must be a valid address. /// \internal /// /// #Safety /// changes must be a pointer to a valid AMchanges #[no_mangle] -pub unsafe extern "C" fn AMchangesReverse(changes: *const AMchanges) -> AMchanges { +pub unsafe extern "C" fn AMchangesReversed(changes: *const AMchanges) -> AMchanges { if let Some(changes) = changes.as_ref() { - changes.reverse() + changes.reversed() } else { AMchanges::default() } From 8222ec17056497278c69fbbc2647e62e40d983dc Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Sun, 5 Jun 2022 14:41:48 -0700 Subject: [PATCH 442/730] Move the `AMsyncHaves.ptr` field into the `sync::haves::Detail` struct. Change `AMsyncHavesAdvance()`, `AMsyncHavesNext()` and `AMsyncHavesPrev()` to interpret their `n` argument relatively instead of absolutely. Renamed `AMsyncHavesReverse()` to `AMsyncHavesReversed()`. Updated the C API's documentation for the `AMsyncHaves` struct. --- automerge-c/src/sync/haves.rs | 203 ++++++++++++++++++++-------------- 1 file changed, 118 insertions(+), 85 deletions(-) diff --git a/automerge-c/src/sync/haves.rs b/automerge-c/src/sync/haves.rs index 619f92a5..62df8b1d 100644 --- a/automerge-c/src/sync/haves.rs +++ b/automerge-c/src/sync/haves.rs @@ -9,6 +9,7 @@ use crate::sync::have::AMsyncHave; struct Detail { len: usize, offset: isize, + ptr: *const c_void, storage: *mut c_void, } @@ -17,54 +18,123 @@ struct Detail { /// propagate the name of a constant initialized from it so if the /// constant's name is a symbolic representation of the value it can be /// converted into a number by post-processing the header it generated. -pub const USIZE_USIZE_USIZE_: usize = size_of::(); +pub const USIZE_USIZE_USIZE_USIZE_: usize = size_of::(); impl Detail { - fn new(len: usize, offset: isize, storage: &mut BTreeMap) -> Self { + fn new( + haves: &[am::sync::Have], + offset: isize, + storage: &mut BTreeMap, + ) -> Self { let storage: *mut BTreeMap = storage; Self { - len, + len: haves.len(), offset, + ptr: haves.as_ptr() as *const c_void, storage: storage as *mut c_void, } } + + pub fn advance(&mut self, n: isize) { + if n != 0 && !self.is_stopped() { + let n = if self.offset < 0 { -n } else { n }; + let len = self.len as isize; + self.offset = std::cmp::max(-(len + 1), std::cmp::min(self.offset + n, len)); + }; + } + + pub fn get_index(&self) -> usize { + (self.offset + + if self.offset < 0 { + self.len as isize + } else { + 0 + }) as usize + } + + pub fn next(&mut self, n: isize) -> Option<*const AMsyncHave> { + if self.is_stopped() { + return None; + } + let slice: &[am::sync::Have] = + unsafe { std::slice::from_raw_parts(self.ptr as *const am::sync::Have, self.len) }; + let storage = unsafe { &mut *(self.storage as *mut BTreeMap) }; + let index = self.get_index(); + let value = match storage.get_mut(&index) { + Some(value) => value, + None => { + storage.insert(index, AMsyncHave::new(&slice[index])); + storage.get_mut(&index).unwrap() + } + }; + self.advance(n); + Some(value) + } + + pub fn is_stopped(&self) -> bool { + let len = self.len as isize; + self.offset < -len || self.offset == len + } + + pub fn prev(&mut self, n: isize) -> Option<*const AMsyncHave> { + self.advance(n); + if self.is_stopped() { + return None; + } + let slice: &[am::sync::Have] = + unsafe { std::slice::from_raw_parts(self.ptr as *const am::sync::Have, self.len) }; + let storage = unsafe { &mut *(self.storage as *mut BTreeMap) }; + let index = self.get_index(); + Some(match storage.get_mut(&index) { + Some(value) => value, + None => { + storage.insert(index, AMsyncHave::new(&slice[index])); + storage.get_mut(&index).unwrap() + } + }) + } + + pub fn reversed(&self) -> Self { + Self { + len: self.len, + offset: -(self.offset + 1), + ptr: self.ptr, + storage: self.storage, + } + } } -impl From for [u8; USIZE_USIZE_USIZE_] { +impl From for [u8; USIZE_USIZE_USIZE_USIZE_] { fn from(detail: Detail) -> Self { unsafe { - std::slice::from_raw_parts((&detail as *const Detail) as *const u8, USIZE_USIZE_USIZE_) - .try_into() - .unwrap() + std::slice::from_raw_parts( + (&detail as *const Detail) as *const u8, + USIZE_USIZE_USIZE_USIZE_, + ) + .try_into() + .unwrap() } } } /// \struct AMsyncHaves -/// \brief A bidirectional iterator over a sequence of synchronization haves. +/// \brief A random-access iterator over a sequence of synchronization haves. #[repr(C)] pub struct AMsyncHaves { - /// A pointer to the first synchronization have or `NULL`. - ptr: *const c_void, /// Reserved. - detail: [u8; USIZE_USIZE_USIZE_], + detail: [u8; USIZE_USIZE_USIZE_USIZE_], } impl AMsyncHaves { pub fn new(haves: &[am::sync::Have], storage: &mut BTreeMap) -> Self { Self { - ptr: haves.as_ptr() as *const c_void, - detail: Detail::new(haves.len(), 0, storage).into(), + detail: Detail::new(haves, 0, storage).into(), } } pub fn advance(&mut self, n: isize) { let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - let len = detail.len as isize; - if n != 0 && detail.offset >= -len && detail.offset < len { - // It's being advanced and it hasn't stopped. - detail.offset = std::cmp::max(-(len + 1), std::cmp::min(detail.offset + n, len)); - }; + detail.advance(n); } pub fn len(&self) -> usize { @@ -74,57 +144,18 @@ impl AMsyncHaves { pub fn next(&mut self, n: isize) -> Option<*const AMsyncHave> { let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - let len = detail.len as isize; - if detail.offset < -len || detail.offset == len { - // It's stopped. - None - } else { - let slice: &[am::sync::Have] = unsafe { - std::slice::from_raw_parts(self.ptr as *const am::sync::Have, detail.len) - }; - let index = (detail.offset + if detail.offset < 0 { len } else { 0 }) as usize; - let storage = unsafe { &mut *(detail.storage as *mut BTreeMap) }; - let value = match storage.get_mut(&index) { - Some(value) => value, - None => { - storage.insert(index, AMsyncHave::new(&slice[index])); - storage.get_mut(&index).unwrap() - } - }; - self.advance(n); - Some(value) - } + detail.next(n) } pub fn prev(&mut self, n: isize) -> Option<*const AMsyncHave> { - self.advance(n); let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - let len = detail.len as isize; - if detail.offset < -len || detail.offset == len { - // It's stopped. - None - } else { - let slice: &[am::sync::Have] = unsafe { - std::slice::from_raw_parts(self.ptr as *const am::sync::Have, detail.len) - }; - let index = (detail.offset + if detail.offset < 0 { len } else { 0 }) as usize; - let storage = unsafe { &mut *(detail.storage as *mut BTreeMap) }; - Some(match storage.get_mut(&index) { - Some(value) => value, - None => { - storage.insert(index, AMsyncHave::new(&slice[index])); - storage.get_mut(&index).unwrap() - } - }) - } + detail.prev(n) } - pub fn reverse(&self) -> Self { + pub fn reversed(&self) -> Self { let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - let storage = unsafe { &mut *(detail.storage as *mut BTreeMap) }; Self { - ptr: self.ptr, - detail: Detail::new(detail.len, -(detail.offset + 1), storage).into(), + detail: detail.reversed().into(), } } } @@ -132,26 +163,26 @@ impl AMsyncHaves { impl AsRef<[am::sync::Have]> for AMsyncHaves { fn as_ref(&self) -> &[am::sync::Have] { let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - unsafe { std::slice::from_raw_parts(self.ptr as *const am::sync::Have, detail.len) } + unsafe { std::slice::from_raw_parts(detail.ptr as *const am::sync::Have, detail.len) } } } impl Default for AMsyncHaves { fn default() -> Self { Self { - ptr: std::ptr::null(), - detail: [0; USIZE_USIZE_USIZE_], + detail: [0; USIZE_USIZE_USIZE_USIZE_], } } } /// \memberof AMsyncHaves -/// \brief Advances/rewinds an iterator over a sequence of synchronization -/// haves by at most \p |n| positions. +/// \brief Advances an iterator over a sequence of synchronization haves by at +/// most \p |n| positions where the sign of \p n is relative to the +/// iterator's direction. /// /// \param[in] sync_haves A pointer to an `AMsyncHaves` struct. -/// \param[in] n The direction (\p -n -> backward, \p +n -> forward) and maximum -/// number of positions to advance/rewind. +/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum +/// number of positions to advance. /// \pre \p sync_haves must be a valid address. /// \internal /// @@ -191,15 +222,16 @@ pub unsafe extern "C" fn AMsyncHavesEqual( /// \memberof AMsyncHaves /// \brief Gets the synchronization have at the current position of an iterator -/// over a sequence of synchronization haves and then advances/rewinds -/// it by at most \p |n| positions. +/// over a sequence of synchronization haves and then advances it by at +/// most \p |n| positions where the sign of \p n is relative to the +/// iterator's direction. /// /// \param[in] sync_haves A pointer to an `AMsyncHaves` struct. -/// \param[in] n The direction (\p -n -> backward, \p +n -> forward) and maximum -/// number of positions to advance/rewind. +/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum +/// number of positions to advance. /// \return A pointer to an `AMsyncHave` struct that's `NULL` when -/// \p sync_haves was previously advanced/rewound past its -/// forward/backward limit. +/// \p sync_haves was previously advanced past its forward/reverse +/// limit. /// \pre \p sync_haves must be a valid address. /// \internal /// @@ -219,16 +251,16 @@ pub unsafe extern "C" fn AMsyncHavesNext( } /// \memberof AMsyncHaves -/// \brief Advances/rewinds an iterator over a sequence of synchronization -/// haves by at most \p |n| positions and then gets the synchronization -/// have at its current position. +/// \brief Advances an iterator over a sequence of synchronization haves by at +/// most \p |n| positions where the sign of \p n is relative to the +/// iterator's direction and then gets the synchronization have at its +/// new position. /// /// \param[in] sync_haves A pointer to an `AMsyncHaves` struct. -/// \param[in] n The direction (\p -n -> backward, \p +n -> forward) and maximum -/// number of positions to advance/rewind. -/// \return A pointer to an `AMsyncHave` struct that's `NULL` when \p sync_haves -/// is presently advanced/rewound past its -/// forward/backward limit. +/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum +/// number of positions to advance. +/// \return A pointer to an `AMsyncHave` struct that's `NULL` when +/// \p sync_haves is presently advanced past its forward/reverse limit. /// \pre \p sync_haves must be a valid address. /// \internal /// @@ -268,7 +300,8 @@ pub unsafe extern "C" fn AMsyncHavesSize(sync_haves: *const AMsyncHaves) -> usiz } /// \memberof AMsyncHaves -/// \brief Creates a reversed copy of a synchronization haves iterator. +/// \brief Creates an iterator over the same sequence of synchronization haves +/// as the given one but with the opposite position and direction. /// /// \param[in] sync_haves A pointer to an `AMsyncHaves` struct. /// \return An `AMsyncHaves` struct @@ -278,9 +311,9 @@ pub unsafe extern "C" fn AMsyncHavesSize(sync_haves: *const AMsyncHaves) -> usiz /// #Safety /// sync_haves must be a pointer to a valid AMsyncHaves #[no_mangle] -pub unsafe extern "C" fn AMsyncHavesReverse(sync_haves: *const AMsyncHaves) -> AMsyncHaves { +pub unsafe extern "C" fn AMsyncHavesReversed(sync_haves: *const AMsyncHaves) -> AMsyncHaves { if let Some(sync_haves) = sync_haves.as_ref() { - sync_haves.reverse() + sync_haves.reversed() } else { AMsyncHaves::default() } From 5c1cbc8eeb4c2c57be7e24a1b95c26b2701861c5 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Mon, 6 Jun 2022 18:15:09 +0100 Subject: [PATCH 443/730] Build c docs in CI --- .github/workflows/docs.yaml | 20 ++++++++++++++++++-- flake.nix | 2 ++ scripts/ci/cmake-docs | 10 ++++++++++ scripts/ci/run | 1 + 4 files changed, 31 insertions(+), 2 deletions(-) create mode 100755 scripts/ci/cmake-docs diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml index 1e928e6e..e7aa4549 100644 --- a/.github/workflows/docs.yaml +++ b/.github/workflows/docs.yaml @@ -23,17 +23,33 @@ jobs: uses: Swatinem/rust-cache@v1 - name: Clean docs dir + run: rm -rf docs + shell: bash + + - name: Clean Rust docs dir uses: actions-rs/cargo@v1 with: command: clean args: --doc - - name: Build docs + - name: Build Rust docs uses: actions-rs/cargo@v1 with: command: doc args: --workspace --all-features --no-deps + - name: Move Rust docs + run: mkdir -p docs && mv target/doc/* docs/. + shell: bash + + - name: Build C docs + run: ./scripts/ci/cmake-docs + shell: bash + + - name: Move C docs + run: mkdir -p docs/automerge-c && mv automerge-c/build/src/html docs/automerge-c + shell: bash + - name: Configure root page run: echo '' > target/doc/index.html @@ -41,4 +57,4 @@ jobs: uses: peaceiris/actions-gh-pages@v3 with: github_token: ${{ secrets.GITHUB_TOKEN }} - publish_dir: ./target/doc + publish_dir: ./docs diff --git a/flake.nix b/flake.nix index cc1b420f..938225b7 100644 --- a/flake.nix +++ b/flake.nix @@ -52,8 +52,10 @@ nodejs yarn + # c deps cmake cmocka + doxygen rnix-lsp nixpkgs-fmt diff --git a/scripts/ci/cmake-docs b/scripts/ci/cmake-docs new file mode 100755 index 00000000..26f49e9c --- /dev/null +++ b/scripts/ci/cmake-docs @@ -0,0 +1,10 @@ +#!/usr/bin/env bash + +set -eoux pipefail + +cmake -E make_directory automerge-c/build +cd automerge-c/build +cmake -B . -S .. +cmake --build . --target automerge_docs + +echo "Try opening automerge-c/build/src/html/index.html" diff --git a/scripts/ci/run b/scripts/ci/run index 3c3f2d6d..c4831fdc 100755 --- a/scripts/ci/run +++ b/scripts/ci/run @@ -9,3 +9,4 @@ set -eou pipefail ./scripts/ci/wasm_tests ./scripts/ci/js_tests ./scripts/ci/cmake-build Release static +./scripts/ci/cmake-docs From 00ab8538130a4906388c65c9bb326568cb4cd6ae Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Mon, 6 Jun 2022 18:40:25 +0100 Subject: [PATCH 444/730] Add cmake docs deps --- .github/workflows/docs.yaml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml index e7aa4549..5d0f4e7e 100644 --- a/.github/workflows/docs.yaml +++ b/.github/workflows/docs.yaml @@ -42,6 +42,14 @@ jobs: run: mkdir -p docs && mv target/doc/* docs/. shell: bash + - name: Install doxygen + run: sudo apt-get install -y doxygen + shell: bash + + - name: Install cmocka + run: sudo apt-get install -y cmocka + shell: bash + - name: Build C docs run: ./scripts/ci/cmake-docs shell: bash From 7d2be219ac1413fde127e84ea7531a32a71ae742 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Mon, 6 Jun 2022 19:05:02 +0100 Subject: [PATCH 445/730] Update cmocka to be libcmocka0 for install --- .github/workflows/docs.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml index 5d0f4e7e..0c9a325f 100644 --- a/.github/workflows/docs.yaml +++ b/.github/workflows/docs.yaml @@ -47,7 +47,7 @@ jobs: shell: bash - name: Install cmocka - run: sudo apt-get install -y cmocka + run: sudo apt-get install -y libcmocka0 shell: bash - name: Build C docs From 82fe420a10276b1678d2c682789bdf08d17ef42a Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Mon, 6 Jun 2022 19:11:07 +0100 Subject: [PATCH 446/730] Use cmocka dev instead of lib --- .github/workflows/docs.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml index 0c9a325f..4cae4174 100644 --- a/.github/workflows/docs.yaml +++ b/.github/workflows/docs.yaml @@ -47,7 +47,7 @@ jobs: shell: bash - name: Install cmocka - run: sudo apt-get install -y libcmocka0 + run: sudo apt-get install -y libcmocka-dev shell: bash - name: Build C docs From 7a9786a146c988193b781092619f7f48e372fda0 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Mon, 6 Jun 2022 19:35:50 +0100 Subject: [PATCH 447/730] Fix index.html location --- .github/workflows/docs.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml index 4cae4174..050f5fdd 100644 --- a/.github/workflows/docs.yaml +++ b/.github/workflows/docs.yaml @@ -59,7 +59,7 @@ jobs: shell: bash - name: Configure root page - run: echo '' > target/doc/index.html + run: echo '' > docs/index.html - name: Deploy docs uses: peaceiris/actions-gh-pages@v3 From 7439a49e370947da8ef5c034335a74aff37f28eb Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Mon, 6 Jun 2022 19:49:18 +0100 Subject: [PATCH 448/730] Fix automerge-c html nesting --- .github/workflows/docs.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml index 050f5fdd..3474dd47 100644 --- a/.github/workflows/docs.yaml +++ b/.github/workflows/docs.yaml @@ -55,7 +55,7 @@ jobs: shell: bash - name: Move C docs - run: mkdir -p docs/automerge-c && mv automerge-c/build/src/html docs/automerge-c + run: mkdir -p docs/automerge-c && mv automerge-c/build/src/html/* docs/automerge-c/. shell: bash - name: Configure root page From 99ab5b4ed7b68fd220f8254d9795f6b12789b6be Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Tue, 7 Jun 2022 00:14:11 -0700 Subject: [PATCH 449/730] Added `AMgetChangesAdded()`. Added `AMpendingOps()`. Added `AMrollback()`. Added `AMsaveIncremental()`. Fixed the `AMmerge()` documentation. --- automerge-c/src/doc.rs | 93 +++++++++++++++++++++++++++++++++++++-- automerge-c/src/result.rs | 20 ++++++--- 2 files changed, 104 insertions(+), 9 deletions(-) diff --git a/automerge-c/src/doc.rs b/automerge-c/src/doc.rs index c57a6ead..f2a8e363 100644 --- a/automerge-c/src/doc.rs +++ b/automerge-c/src/doc.rs @@ -275,6 +275,32 @@ pub unsafe extern "C" fn AMgetChanges( to_result(doc.get_changes(have_deps)) } +/// \memberof AMdoc +/// \brief Gets the changes added to a second document that weren't added to +/// a first document. +/// +/// \param[in] doc1 An `AMdoc` struct. +/// \param[in] doc2 An `AMdoc` struct. +/// \return A pointer to an `AMresult` struct containing an `AMchanges` struct. +/// \pre \p doc1 must be a valid address. +/// \pre \p doc2 must be a valid address. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMfree()`. +/// \internal +/// +/// # Safety +/// doc1 must be a pointer to a valid AMdoc +/// doc2 must be a pointer to a valid AMdoc +#[no_mangle] +pub unsafe extern "C" fn AMgetChangesAdded( + doc1: *mut AMdoc, + doc2: *mut AMdoc, +) -> *mut AMresult { + let doc1 = to_doc!(doc1); + let doc2 = to_doc!(doc2); + to_result(doc1.get_changes_added(doc2)) +} + /// \memberof AMdoc /// \brief Gets the current heads of a document. /// @@ -396,10 +422,11 @@ pub unsafe extern "C" fn AMloadIncremental( } /// \memberof AMdoc -/// \brief Applies a sequence of changes to a document. +/// \brief Applies all of the changes in \p src which are not in \p dest to +/// \p dest. /// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] changes A pointer to an `AMdoc` struct. +/// \param[in] dest A pointer to an `AMdoc` struct. +/// \param[in] src A pointer to an `AMdoc` struct. /// \return A pointer to an `AMresult` struct containing an `AMchangeHashes` /// struct. /// \pre \p dest must be a valid address. @@ -469,6 +496,26 @@ pub unsafe extern "C" fn AMobjSizeAt( 0 } +/// \memberof AMdoc +/// \brief Gets the number of pending operations added during a document's +/// current transaction. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \return The count of pending operations for \p doc. +/// \pre \p doc must be a valid address. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +#[no_mangle] +pub unsafe extern "C" fn AMpendingOps(doc: *mut AMdoc) -> usize { + if let Some(doc) = doc.as_mut() { + doc.pending_ops() + } else { + 0 + } +} + /// \memberof AMdoc /// \brief Receives a synchronization message from a peer based upon a given /// synchronization state. @@ -498,6 +545,26 @@ pub unsafe extern "C" fn AMreceiveSyncMessage( to_result(doc.receive_sync_message(sync_state.as_mut(), sync_message.as_ref().clone())) } +/// \memberof AMdoc +/// \brief Cancels the pending operations added during a document's current +/// transaction and gets the number of cancellations. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \return The count of pending operations for \p doc that were cancelled. +/// \pre \p doc must be a valid address. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +#[no_mangle] +pub unsafe extern "C" fn AMrollback(doc: *mut AMdoc) -> usize { + if let Some(doc) = doc.as_mut() { + doc.rollback() + } else { + 0 + } +} + /// \memberof AMdoc /// \brief Saves the entirety of a document into a compact form. /// @@ -517,6 +584,26 @@ pub unsafe extern "C" fn AMsave(doc: *mut AMdoc) -> *mut AMresult { to_result(Ok(doc.save())) } +/// \memberof AMdoc +/// \brief Saves the changes to a document since its last save into a compact +/// form. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \return A pointer to an `AMresult` struct containing an array of bytes as +/// an `AMbyteSpan` struct. +/// \pre \p doc must be a valid address. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMfree()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +#[no_mangle] +pub unsafe extern "C" fn AMsaveIncremental(doc: *mut AMdoc) -> *mut AMresult { + let doc = to_doc!(doc); + to_result(Ok(doc.save_incremental())) +} + /// \memberof AMdoc /// \brief Puts a sequence of bytes as the actor ID value of a document. /// diff --git a/automerge-c/src/result.rs b/automerge-c/src/result.rs index 6ef2d015..a889e8d0 100644 --- a/automerge-c/src/result.rs +++ b/automerge-c/src/result.rs @@ -140,6 +140,12 @@ impl From for AMresult { } } +impl From for *mut AMresult { + fn from(b: AMresult) -> Self { + Box::into_raw(Box::new(b)) + } +} + impl From> for AMresult { fn from(maybe: Option<&am::Change>) -> Self { match maybe { @@ -289,9 +295,11 @@ impl From, am::AutomergeError>> for AMresult { } } -impl From> for AMresult { - fn from(bytes: Vec) -> Self { - AMresult::Value(am::Value::bytes(bytes), None) +impl From> for AMresult { + fn from(changes: Vec<&am::Change>) -> Self { + let changes: Vec = + changes.iter().map(|&change| change.clone()).collect(); + AMresult::Changes(changes, BTreeMap::new()) } } @@ -301,9 +309,9 @@ impl From> for AMresult { } } -impl From for *mut AMresult { - fn from(b: AMresult) -> Self { - Box::into_raw(Box::new(b)) +impl From> for AMresult { + fn from(bytes: Vec) -> Self { + AMresult::Value(am::Value::bytes(bytes), None) } } From 0ecb9e7dce77a15705b54576c3ce3fee19fcb75c Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Tue, 7 Jun 2022 00:14:42 -0700 Subject: [PATCH 450/730] Added `AMmapIncrement()`. --- automerge-c/src/doc/map.rs | 29 +++++ automerge-c/test/map_tests.c | 224 +++++++++++++++++++++++++++++++++++ 2 files changed, 253 insertions(+) create mode 100644 automerge-c/test/map_tests.c diff --git a/automerge-c/src/doc/map.rs b/automerge-c/src/doc/map.rs index 848f1ef8..d7b32ce4 100644 --- a/automerge-c/src/doc/map.rs +++ b/automerge-c/src/doc/map.rs @@ -61,6 +61,35 @@ pub unsafe extern "C" fn AMmapGet( to_result(doc.get(to_obj_id!(obj_id), to_str(key))) } +/// \memberof AMdoc +/// \brief Increments a counter for a key in a map object by the given value. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. +/// \param[in] value A 64-bit signed integer. +/// \return A pointer to an `AMresult` struct containing a void. +/// \pre \p doc must be a valid address. +/// \pre \p key must be a valid address. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMfree()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj_id must be a pointer to a valid AMobjId or NULL +/// key must be a c string of the map key to be used +#[no_mangle] +pub unsafe extern "C" fn AMmapIncrement( + doc: *mut AMdoc, + obj_id: *const AMobjId, + key: *const c_char, + value: i64, +) -> *mut AMresult { + let doc = to_doc!(doc); + to_result(doc.increment(to_obj_id!(obj_id), to_str(key), value)) +} + /// \memberof AMdoc /// \brief Puts a boolean as the value of a key in a map object. /// diff --git a/automerge-c/test/map_tests.c b/automerge-c/test/map_tests.c new file mode 100644 index 00000000..234a5523 --- /dev/null +++ b/automerge-c/test/map_tests.c @@ -0,0 +1,224 @@ +#include +#include +#include +#include +#include +#include +#include + +/* third-party */ +#include + +/* local */ +#include "group_state.h" +#include "macro_utils.h" + +static void test_AMmapIncrement(void** state) { + GroupState* group_state = *state; + AMresult* res = AMmapPutCounter(group_state->doc, AM_ROOT, "Counter", 0); + if (AMresultStatus(res) != AM_STATUS_OK) { + fail_msg("%s", AMerrorMessage(res)); + } + assert_int_equal(AMresultSize(res), 0); + assert_int_equal(AMresultValue(res).tag, AM_VALUE_VOID); + AMfree(res); + res = AMmapGet(group_state->doc, AM_ROOT, "Counter"); + if (AMresultStatus(res) != AM_STATUS_OK) { + fail_msg("%s", AMerrorMessage(res)); + } + assert_int_equal(AMresultSize(res), 1); + AMvalue value = AMresultValue(res); + assert_int_equal(value.tag, AM_VALUE_COUNTER); + assert_int_equal(value.counter, 0); + AMfree(res); + res = AMmapIncrement(group_state->doc, AM_ROOT, "Counter", 3); + if (AMresultStatus(res) != AM_STATUS_OK) { + fail_msg("%s", AMerrorMessage(res)); + } + assert_int_equal(AMresultSize(res), 0); + assert_int_equal(AMresultValue(res).tag, AM_VALUE_VOID); + AMfree(res); + res = AMmapGet(group_state->doc, AM_ROOT, "Counter"); + if (AMresultStatus(res) != AM_STATUS_OK) { + fail_msg("%s", AMerrorMessage(res)); + } + assert_int_equal(AMresultSize(res), 1); + value = AMresultValue(res); + assert_int_equal(value.tag, AM_VALUE_COUNTER); + assert_int_equal(value.counter, 3); + AMfree(res); +} + +#define test_AMmapPut(suffix) test_AMmapPut ## suffix + +#define static_void_test_AMmapPut(suffix, member, scalar_value) \ +static void test_AMmapPut ## suffix(void **state) { \ + GroupState* group_state = *state; \ + AMresult* res = AMmapPut ## suffix( \ + group_state->doc, \ + AM_ROOT, \ + #suffix, \ + scalar_value \ + ); \ + if (AMresultStatus(res) != AM_STATUS_OK) { \ + fail_msg("%s", AMerrorMessage(res)); \ + } \ + assert_int_equal(AMresultSize(res), 0); \ + AMvalue value = AMresultValue(res); \ + assert_int_equal(value.tag, AM_VALUE_VOID); \ + AMfree(res); \ + res = AMmapGet(group_state->doc, AM_ROOT, #suffix); \ + if (AMresultStatus(res) != AM_STATUS_OK) { \ + fail_msg("%s", AMerrorMessage(res)); \ + } \ + assert_int_equal(AMresultSize(res), 1); \ + value = AMresultValue(res); \ + assert_int_equal(value.tag, AMvalue_discriminant(#suffix)); \ + assert_true(value.member == scalar_value); \ + AMfree(res); \ +} + +#define test_AMmapPutObject(label) test_AMmapPutObject_ ## label + +#define static_void_test_AMmapPutObject(label) \ +static void test_AMmapPutObject_ ## label(void **state) { \ + GroupState* group_state = *state; \ + AMresult* res = AMmapPutObject( \ + group_state->doc, \ + AM_ROOT, \ + #label, \ + AMobjType_tag(#label) \ + ); \ + if (AMresultStatus(res) != AM_STATUS_OK) { \ + fail_msg("%s", AMerrorMessage(res)); \ + } \ + assert_int_equal(AMresultSize(res), 1); \ + AMvalue value = AMresultValue(res); \ + assert_int_equal(value.tag, AM_VALUE_OBJ_ID); \ + assert_non_null(value.obj_id); \ + assert_int_equal(AMobjSize(group_state->doc, value.obj_id), 0); \ + AMfree(res); \ +} + +static_void_test_AMmapPut(Bool, boolean, true) + +static void test_AMmapPutBytes(void **state) { + static char const* const KEY = "Bytes"; + static uint8_t const BYTES_VALUE[] = {INT8_MIN, INT8_MAX / 2, INT8_MAX}; + static size_t const BYTES_SIZE = sizeof(BYTES_VALUE) / sizeof(uint8_t); + + GroupState* group_state = *state; + AMresult* res = AMmapPutBytes( + group_state->doc, + AM_ROOT, + KEY, + BYTES_VALUE, + BYTES_SIZE + ); + if (AMresultStatus(res) != AM_STATUS_OK) { + fail_msg("%s", AMerrorMessage(res)); + } + assert_int_equal(AMresultSize(res), 0); + AMvalue value = AMresultValue(res); + assert_int_equal(value.tag, AM_VALUE_VOID); + AMfree(res); + res = AMmapGet(group_state->doc, AM_ROOT, KEY); + if (AMresultStatus(res) != AM_STATUS_OK) { + fail_msg("%s", AMerrorMessage(res)); + } + assert_int_equal(AMresultSize(res), 1); + value = AMresultValue(res); + assert_int_equal(value.tag, AM_VALUE_BYTES); + assert_int_equal(value.bytes.count, BYTES_SIZE); + assert_memory_equal(value.bytes.src, BYTES_VALUE, BYTES_SIZE); + AMfree(res); +} + +static_void_test_AMmapPut(Counter, counter, INT64_MAX) + +static_void_test_AMmapPut(F64, f64, DBL_MAX) + +static_void_test_AMmapPut(Int, int_, INT64_MAX) + +static void test_AMmapPutNull(void **state) { + static char const* const KEY = "Null"; + + GroupState* group_state = *state; + AMresult* res = AMmapPutNull(group_state->doc, AM_ROOT, KEY); + if (AMresultStatus(res) != AM_STATUS_OK) { + fail_msg("%s", AMerrorMessage(res)); + } + assert_int_equal(AMresultSize(res), 0); + AMvalue value = AMresultValue(res); + assert_int_equal(value.tag, AM_VALUE_VOID); + AMfree(res); + res = AMmapGet(group_state->doc, AM_ROOT, KEY); + if (AMresultStatus(res) != AM_STATUS_OK) { + fail_msg("%s", AMerrorMessage(res)); + } + assert_int_equal(AMresultSize(res), 1); + value = AMresultValue(res); + assert_int_equal(value.tag, AM_VALUE_NULL); + AMfree(res); +} + +static_void_test_AMmapPutObject(List) + +static_void_test_AMmapPutObject(Map) + +static_void_test_AMmapPutObject(Text) + +static void test_AMmapPutStr(void **state) { + static char const* const KEY = "Str"; + static char const* const STR_VALUE = "Hello, world!"; + size_t const STR_LEN = strlen(STR_VALUE); + + GroupState* group_state = *state; + AMresult* res = AMmapPutStr( + group_state->doc, + AM_ROOT, + KEY, + STR_VALUE + ); + if (AMresultStatus(res) != AM_STATUS_OK) { + fail_msg("%s", AMerrorMessage(res)); + } + assert_int_equal(AMresultSize(res), 0); + AMvalue value = AMresultValue(res); + assert_int_equal(value.tag, AM_VALUE_VOID); + AMfree(res); + res = AMmapGet(group_state->doc, AM_ROOT, KEY); + if (AMresultStatus(res) != AM_STATUS_OK) { + fail_msg("%s", AMerrorMessage(res)); + } + assert_int_equal(AMresultSize(res), 1); + value = AMresultValue(res); + assert_int_equal(value.tag, AM_VALUE_STR); + assert_int_equal(strlen(value.str), STR_LEN); + assert_memory_equal(value.str, STR_VALUE, STR_LEN + 1); + AMfree(res); +} + +static_void_test_AMmapPut(Timestamp, timestamp, INT64_MAX) + +static_void_test_AMmapPut(Uint, uint, UINT64_MAX) + +int run_map_tests(void) { + const struct CMUnitTest tests[] = { + cmocka_unit_test(test_AMmapIncrement), + cmocka_unit_test(test_AMmapPut(Bool)), + cmocka_unit_test(test_AMmapPutBytes), + cmocka_unit_test(test_AMmapPut(Counter)), + cmocka_unit_test(test_AMmapPut(F64)), + cmocka_unit_test(test_AMmapPut(Int)), + cmocka_unit_test(test_AMmapPutNull), + cmocka_unit_test(test_AMmapPutObject(List)), + cmocka_unit_test(test_AMmapPutObject(Map)), + cmocka_unit_test(test_AMmapPutObject(Text)), + cmocka_unit_test(test_AMmapPutStr), + cmocka_unit_test(test_AMmapPut(Timestamp)), + cmocka_unit_test(test_AMmapPut(Uint)), + }; + + return cmocka_run_group_tests(tests, group_setup, group_teardown); +} From 2e84c6e9efbb783bbb55c5d93bb44f29d4af3569 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Tue, 7 Jun 2022 00:15:37 -0700 Subject: [PATCH 451/730] Added `AMlistIncrement()`. --- automerge-c/src/doc/list.rs | 29 ++++ automerge-c/test/list_tests.c | 272 ++++++++++++++++++++++++++++++++++ 2 files changed, 301 insertions(+) create mode 100644 automerge-c/test/list_tests.c diff --git a/automerge-c/src/doc/list.rs b/automerge-c/src/doc/list.rs index 41265eb4..bbd999e3 100644 --- a/automerge-c/src/doc/list.rs +++ b/automerge-c/src/doc/list.rs @@ -58,6 +58,35 @@ pub unsafe extern "C" fn AMlistGet( to_result(doc.get(to_obj_id!(obj_id), index)) } +/// \memberof AMdoc +/// \brief Increments a counter at an index in a list object by the given +/// value. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] index An index in the list object identified by \p obj_id. +/// \param[in] value A 64-bit signed integer. +/// \return A pointer to an `AMresult` struct containing a void. +/// \pre \p doc must be a valid address. +/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMfree()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj_id must be a pointer to a valid AMobjId or NULL +#[no_mangle] +pub unsafe extern "C" fn AMlistIncrement( + doc: *mut AMdoc, + obj_id: *const AMobjId, + index: usize, + value: i64, +) -> *mut AMresult { + let doc = to_doc!(doc); + to_result(doc.increment(to_obj_id!(obj_id), index, value)) +} + /// \memberof AMdoc /// \brief Puts a boolean as the value at an index in a list object. /// diff --git a/automerge-c/test/list_tests.c b/automerge-c/test/list_tests.c new file mode 100644 index 00000000..2f4a0e80 --- /dev/null +++ b/automerge-c/test/list_tests.c @@ -0,0 +1,272 @@ +#include +#include +#include +#include +#include +#include +#include + +/* third-party */ +#include + +/* local */ +#include "group_state.h" +#include "macro_utils.h" + +static void test_AMlistIncrement(void** state) { + GroupState* group_state = *state; + AMresult* res = AMlistPutCounter(group_state->doc, AM_ROOT, 0, true, 0); + if (AMresultStatus(res) != AM_STATUS_OK) { + fail_msg("%s", AMerrorMessage(res)); + } + assert_int_equal(AMresultSize(res), 0); + assert_int_equal(AMresultValue(res).tag, AM_VALUE_VOID); + AMfree(res); + res = AMlistGet(group_state->doc, AM_ROOT, 0); + if (AMresultStatus(res) != AM_STATUS_OK) { + fail_msg("%s", AMerrorMessage(res)); + } + assert_int_equal(AMresultSize(res), 1); + AMvalue value = AMresultValue(res); + assert_int_equal(value.tag, AM_VALUE_COUNTER); + assert_int_equal(value.counter, 0); + AMfree(res); + res = AMlistIncrement(group_state->doc, AM_ROOT, 0, 3); + if (AMresultStatus(res) != AM_STATUS_OK) { + fail_msg("%s", AMerrorMessage(res)); + } + assert_int_equal(AMresultSize(res), 0); + assert_int_equal(AMresultValue(res).tag, AM_VALUE_VOID); + AMfree(res); + res = AMlistGet(group_state->doc, AM_ROOT, 0); + if (AMresultStatus(res) != AM_STATUS_OK) { + fail_msg("%s", AMerrorMessage(res)); + } + assert_int_equal(AMresultSize(res), 1); + value = AMresultValue(res); + assert_int_equal(value.tag, AM_VALUE_COUNTER); + assert_int_equal(value.counter, 3); + AMfree(res); +} + +#define test_AMlistPut(suffix, mode) test_AMlistPut ## suffix ## _ ## mode + +#define static_void_test_AMlistPut(suffix, mode, member, scalar_value) \ +static void test_AMlistPut ## suffix ## _ ## mode(void **state) { \ + GroupState* group_state = *state; \ + AMresult* res = AMlistPut ## suffix( \ + group_state->doc, AM_ROOT, 0, !strcmp(#mode, "insert"), scalar_value \ + ); \ + if (AMresultStatus(res) != AM_STATUS_OK) { \ + fail_msg("%s", AMerrorMessage(res)); \ + } \ + assert_int_equal(AMresultSize(res), 0); \ + AMvalue value = AMresultValue(res); \ + assert_int_equal(value.tag, AM_VALUE_VOID); \ + AMfree(res); \ + res = AMlistGet(group_state->doc, AM_ROOT, 0); \ + if (AMresultStatus(res) != AM_STATUS_OK) { \ + fail_msg("%s", AMerrorMessage(res)); \ + } \ + assert_int_equal(AMresultSize(res), 1); \ + value = AMresultValue(res); \ + assert_int_equal(value.tag, AMvalue_discriminant(#suffix)); \ + assert_true(value.member == scalar_value); \ + AMfree(res); \ +} + +#define test_AMlistPutBytes(mode) test_AMlistPutBytes ## _ ## mode + +#define static_void_test_AMlistPutBytes(mode, bytes_value) \ +static void test_AMlistPutBytes_ ## mode(void **state) { \ + static size_t const BYTES_SIZE = sizeof(bytes_value) / sizeof(uint8_t); \ + \ + GroupState* group_state = *state; \ + AMresult* res = AMlistPutBytes( \ + group_state->doc, \ + AM_ROOT, \ + 0, \ + !strcmp(#mode, "insert"), \ + bytes_value, \ + BYTES_SIZE \ + ); \ + if (AMresultStatus(res) != AM_STATUS_OK) { \ + fail_msg("%s", AMerrorMessage(res)); \ + } \ + assert_int_equal(AMresultSize(res), 0); \ + AMvalue value = AMresultValue(res); \ + assert_int_equal(value.tag, AM_VALUE_VOID); \ + AMfree(res); \ + res = AMlistGet(group_state->doc, AM_ROOT, 0); \ + if (AMresultStatus(res) != AM_STATUS_OK) { \ + fail_msg("%s", AMerrorMessage(res)); \ + } \ + assert_int_equal(AMresultSize(res), 1); \ + value = AMresultValue(res); \ + assert_int_equal(value.tag, AM_VALUE_BYTES); \ + assert_int_equal(value.bytes.count, BYTES_SIZE); \ + assert_memory_equal(value.bytes.src, bytes_value, BYTES_SIZE); \ + AMfree(res); \ +} + +#define test_AMlistPutNull(mode) test_AMlistPutNull_ ## mode + +#define static_void_test_AMlistPutNull(mode) \ +static void test_AMlistPutNull_ ## mode(void **state) { \ + GroupState* group_state = *state; \ + AMresult* res = AMlistPutNull( \ + group_state->doc, AM_ROOT, 0, !strcmp(#mode, "insert")); \ + if (AMresultStatus(res) != AM_STATUS_OK) { \ + fail_msg("%s", AMerrorMessage(res)); \ + } \ + assert_int_equal(AMresultSize(res), 0); \ + AMvalue value = AMresultValue(res); \ + assert_int_equal(value.tag, AM_VALUE_VOID); \ + AMfree(res); \ + res = AMlistGet(group_state->doc, AM_ROOT, 0); \ + if (AMresultStatus(res) != AM_STATUS_OK) { \ + fail_msg("%s", AMerrorMessage(res)); \ + } \ + assert_int_equal(AMresultSize(res), 1); \ + value = AMresultValue(res); \ + assert_int_equal(value.tag, AM_VALUE_NULL); \ + AMfree(res); \ +} + +#define test_AMlistPutObject(label, mode) test_AMlistPutObject_ ## label ## _ ## mode + +#define static_void_test_AMlistPutObject(label, mode) \ +static void test_AMlistPutObject_ ## label ## _ ## mode(void **state) { \ + GroupState* group_state = *state; \ + AMresult* res = AMlistPutObject( \ + group_state->doc, \ + AM_ROOT, \ + 0, \ + !strcmp(#mode, "insert"), \ + AMobjType_tag(#label) \ + ); \ + if (AMresultStatus(res) != AM_STATUS_OK) { \ + fail_msg("%s", AMerrorMessage(res)); \ + } \ + assert_int_equal(AMresultSize(res), 1); \ + AMvalue value = AMresultValue(res); \ + assert_int_equal(value.tag, AM_VALUE_OBJ_ID); \ + assert_non_null(value.obj_id); \ + assert_int_equal(AMobjSize(group_state->doc, value.obj_id), 0); \ + AMfree(res); \ +} + +#define test_AMlistPutStr(mode) test_AMlistPutStr ## _ ## mode + +#define static_void_test_AMlistPutStr(mode, str_value) \ +static void test_AMlistPutStr_ ## mode(void **state) { \ + static size_t const STR_LEN = strlen(str_value); \ + \ + GroupState* group_state = *state; \ + AMresult* res = AMlistPutStr( \ + group_state->doc, \ + AM_ROOT, \ + 0, \ + !strcmp(#mode, "insert"), \ + str_value \ + ); \ + if (AMresultStatus(res) != AM_STATUS_OK) { \ + fail_msg("%s", AMerrorMessage(res)); \ + } \ + assert_int_equal(AMresultSize(res), 0); \ + AMvalue value = AMresultValue(res); \ + assert_int_equal(value.tag, AM_VALUE_VOID); \ + AMfree(res); \ + res = AMlistGet(group_state->doc, AM_ROOT, 0); \ + if (AMresultStatus(res) != AM_STATUS_OK) { \ + fail_msg("%s", AMerrorMessage(res)); \ + } \ + assert_int_equal(AMresultSize(res), 1); \ + value = AMresultValue(res); \ + assert_int_equal(value.tag, AM_VALUE_STR); \ + assert_int_equal(strlen(value.str), STR_LEN); \ + assert_memory_equal(value.str, str_value, STR_LEN + 1); \ + AMfree(res); \ +} + +static_void_test_AMlistPut(Bool, insert, boolean, true) + +static_void_test_AMlistPut(Bool, update, boolean, true) + +static uint8_t const BYTES_VALUE[] = {INT8_MIN, INT8_MAX / 2, INT8_MAX}; + +static_void_test_AMlistPutBytes(insert, BYTES_VALUE) + +static_void_test_AMlistPutBytes(update, BYTES_VALUE) + +static_void_test_AMlistPut(Counter, insert, counter, INT64_MAX) + +static_void_test_AMlistPut(Counter, update, counter, INT64_MAX) + +static_void_test_AMlistPut(F64, insert, f64, DBL_MAX) + +static_void_test_AMlistPut(F64, update, f64, DBL_MAX) + +static_void_test_AMlistPut(Int, insert, int_, INT64_MAX) + +static_void_test_AMlistPut(Int, update, int_, INT64_MAX) + +static_void_test_AMlistPutNull(insert) + +static_void_test_AMlistPutNull(update) + +static_void_test_AMlistPutObject(List, insert) + +static_void_test_AMlistPutObject(List, update) + +static_void_test_AMlistPutObject(Map, insert) + +static_void_test_AMlistPutObject(Map, update) + +static_void_test_AMlistPutObject(Text, insert) + +static_void_test_AMlistPutObject(Text, update) + +static_void_test_AMlistPutStr(insert, "Hello, world!") + +static_void_test_AMlistPutStr(update, "Hello, world!") + +static_void_test_AMlistPut(Timestamp, insert, timestamp, INT64_MAX) + +static_void_test_AMlistPut(Timestamp, update, timestamp, INT64_MAX) + +static_void_test_AMlistPut(Uint, insert, uint, UINT64_MAX) + +static_void_test_AMlistPut(Uint, update, uint, UINT64_MAX) + +int run_list_tests(void) { + const struct CMUnitTest tests[] = { + cmocka_unit_test(test_AMlistIncrement), + cmocka_unit_test(test_AMlistPut(Bool, insert)), + cmocka_unit_test(test_AMlistPut(Bool, update)), + cmocka_unit_test(test_AMlistPutBytes(insert)), + cmocka_unit_test(test_AMlistPutBytes(update)), + cmocka_unit_test(test_AMlistPut(Counter, insert)), + cmocka_unit_test(test_AMlistPut(Counter, update)), + cmocka_unit_test(test_AMlistPut(F64, insert)), + cmocka_unit_test(test_AMlistPut(F64, update)), + cmocka_unit_test(test_AMlistPut(Int, insert)), + cmocka_unit_test(test_AMlistPut(Int, update)), + cmocka_unit_test(test_AMlistPutNull(insert)), + cmocka_unit_test(test_AMlistPutNull(update)), + cmocka_unit_test(test_AMlistPutObject(List, insert)), + cmocka_unit_test(test_AMlistPutObject(List, update)), + cmocka_unit_test(test_AMlistPutObject(Map, insert)), + cmocka_unit_test(test_AMlistPutObject(Map, update)), + cmocka_unit_test(test_AMlistPutObject(Text, insert)), + cmocka_unit_test(test_AMlistPutObject(Text, update)), + cmocka_unit_test(test_AMlistPutStr(insert)), + cmocka_unit_test(test_AMlistPutStr(update)), + cmocka_unit_test(test_AMlistPut(Timestamp, insert)), + cmocka_unit_test(test_AMlistPut(Timestamp, update)), + cmocka_unit_test(test_AMlistPut(Uint, insert)), + cmocka_unit_test(test_AMlistPut(Uint, update)), + }; + + return cmocka_run_group_tests(tests, group_setup, group_teardown); +} From ad7dd07cf7a6b3ed34421d28153c172ae42678d6 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Tue, 7 Jun 2022 00:21:22 -0700 Subject: [PATCH 452/730] Simplify the names of the unit test suites' source files. --- automerge-c/test/CMakeLists.txt | 6 +- automerge-c/test/doc_tests.c | 110 ++++++++++++++++++++++++++++++++ automerge-c/test/main.c | 12 ++-- 3 files changed, 119 insertions(+), 9 deletions(-) create mode 100644 automerge-c/test/doc_tests.c diff --git a/automerge-c/test/CMakeLists.txt b/automerge-c/test/CMakeLists.txt index ac43a61f..aab136da 100644 --- a/automerge-c/test/CMakeLists.txt +++ b/automerge-c/test/CMakeLists.txt @@ -5,9 +5,9 @@ find_package(cmocka REQUIRED) add_executable( test_${LIBRARY_NAME} group_state.c - amdoc_property_tests.c - amlistput_tests.c - ammapput_tests.c + doc_tests.c + list_tests.c + map_tests.c macro_utils.c main.c sync_tests.c diff --git a/automerge-c/test/doc_tests.c b/automerge-c/test/doc_tests.c new file mode 100644 index 00000000..7c9cee0c --- /dev/null +++ b/automerge-c/test/doc_tests.c @@ -0,0 +1,110 @@ +#include +#include +#include +#include +#include +#include + +/* third-party */ +#include + +/* local */ +#include "group_state.h" + +typedef struct { + GroupState* group_state; + char const* actor_id_str; + uint8_t* actor_id_bytes; + size_t actor_id_size; +} TestState; + +static void hex_to_bytes(char const* hex_str, uint8_t* bytes, size_t const count) { + unsigned int byte; + char const* next = hex_str; + for (size_t index = 0; *next && index != count; next += 2, ++index) { + if (sscanf(next, "%02x", &byte) == 1) { + bytes[index] = (uint8_t)byte; + } + } +} + +static int setup(void** state) { + TestState* test_state = calloc(1, sizeof(TestState)); + group_setup((void**)&test_state->group_state); + test_state->actor_id_str = "000102030405060708090a0b0c0d0e0f"; + test_state->actor_id_size = strlen(test_state->actor_id_str) / 2; + test_state->actor_id_bytes = malloc(test_state->actor_id_size); + hex_to_bytes(test_state->actor_id_str, test_state->actor_id_bytes, test_state->actor_id_size); + *state = test_state; + return 0; +} + +static int teardown(void** state) { + TestState* test_state = *state; + group_teardown((void**)&test_state->group_state); + free(test_state->actor_id_bytes); + free(test_state); + return 0; +} + +static void test_AMputActor(void **state) { + TestState* test_state = *state; + GroupState* group_state = test_state->group_state; + AMresult* res = AMsetActor( + group_state->doc, + test_state->actor_id_bytes, + test_state->actor_id_size + ); + if (AMresultStatus(res) != AM_STATUS_OK) { + fail_msg("%s", AMerrorMessage(res)); + } + assert_int_equal(AMresultSize(res), 0); + AMvalue value = AMresultValue(res); + assert_int_equal(value.tag, AM_VALUE_VOID); + AMfree(res); + res = AMgetActor(group_state->doc); + if (AMresultStatus(res) != AM_STATUS_OK) { + fail_msg("%s", AMerrorMessage(res)); + } + assert_int_equal(AMresultSize(res), 1); + value = AMresultValue(res); + assert_int_equal(value.tag, AM_VALUE_ACTOR_ID); + assert_int_equal(value.actor_id.count, test_state->actor_id_size); + assert_memory_equal(value.actor_id.src, test_state->actor_id_bytes, value.actor_id.count); + AMfree(res); +} + +static void test_AMputActorHex(void **state) { + TestState* test_state = *state; + GroupState* group_state = test_state->group_state; + AMresult* res = AMsetActorHex( + group_state->doc, + test_state->actor_id_str + ); + if (AMresultStatus(res) != AM_STATUS_OK) { + fail_msg("%s", AMerrorMessage(res)); + } + assert_int_equal(AMresultSize(res), 0); + AMvalue value = AMresultValue(res); + assert_int_equal(value.tag, AM_VALUE_VOID); + AMfree(res); + res = AMgetActorHex(group_state->doc); + if (AMresultStatus(res) != AM_STATUS_OK) { + fail_msg("%s", AMerrorMessage(res)); + } + assert_int_equal(AMresultSize(res), 1); + value = AMresultValue(res); + assert_int_equal(value.tag, AM_VALUE_STR); + assert_int_equal(strlen(value.str), test_state->actor_id_size * 2); + assert_string_equal(value.str, test_state->actor_id_str); + AMfree(res); +} + +int run_doc_tests(void) { + const struct CMUnitTest tests[] = { + cmocka_unit_test_setup_teardown(test_AMputActor, setup, teardown), + cmocka_unit_test_setup_teardown(test_AMputActorHex, setup, teardown), + }; + + return cmocka_run_group_tests(tests, NULL, NULL); +} diff --git a/automerge-c/test/main.c b/automerge-c/test/main.c index 15c61acd..8739fe2b 100644 --- a/automerge-c/test/main.c +++ b/automerge-c/test/main.c @@ -6,19 +6,19 @@ /* third-party */ #include -extern int run_AMdoc_property_tests(void); +extern int run_doc_tests(void); -extern int run_AMlistPut_tests(void); +extern int run_list_tests(void); -extern int run_AMmapPut_tests(void); +extern int run_map_tests(void); extern int run_sync_tests(void); int main(void) { return ( - run_AMdoc_property_tests() + - run_AMlistPut_tests() + - run_AMmapPut_tests() + + run_doc_tests() + + run_list_tests() + + run_map_tests() + run_sync_tests() ); } From 1c78aab5f0a596bda913cf029c303ac4605f7727 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Tue, 7 Jun 2022 00:23:41 -0700 Subject: [PATCH 453/730] Fixed the `AMsyncStateDecode()` documentation. --- automerge-c/src/sync/state.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automerge-c/src/sync/state.rs b/automerge-c/src/sync/state.rs index c44256c8..6e0c4f9a 100644 --- a/automerge-c/src/sync/state.rs +++ b/automerge-c/src/sync/state.rs @@ -54,7 +54,7 @@ impl From for *mut AMsyncState { } /// \memberof AMsyncState -/// \brief Decodes a sequence of bytes into a synchronizaton state. +/// \brief Decodes a sequence of bytes into a synchronization state. /// /// \param[in] src A pointer to an array of bytes. /// \param[in] count The number of bytes in \p src to decode. From bfa85050b85a88e5d12cde91c2104e2957008a16 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Tue, 7 Jun 2022 00:29:58 -0700 Subject: [PATCH 454/730] Fix Rust code formatting violations. --- automerge-c/src/doc.rs | 5 +---- automerge-c/src/result.rs | 3 +-- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/automerge-c/src/doc.rs b/automerge-c/src/doc.rs index f2a8e363..477a75cb 100644 --- a/automerge-c/src/doc.rs +++ b/automerge-c/src/doc.rs @@ -292,10 +292,7 @@ pub unsafe extern "C" fn AMgetChanges( /// doc1 must be a pointer to a valid AMdoc /// doc2 must be a pointer to a valid AMdoc #[no_mangle] -pub unsafe extern "C" fn AMgetChangesAdded( - doc1: *mut AMdoc, - doc2: *mut AMdoc, -) -> *mut AMresult { +pub unsafe extern "C" fn AMgetChangesAdded(doc1: *mut AMdoc, doc2: *mut AMdoc) -> *mut AMresult { let doc1 = to_doc!(doc1); let doc2 = to_doc!(doc2); to_result(doc1.get_changes_added(doc2)) diff --git a/automerge-c/src/result.rs b/automerge-c/src/result.rs index a889e8d0..0ea12c50 100644 --- a/automerge-c/src/result.rs +++ b/automerge-c/src/result.rs @@ -297,8 +297,7 @@ impl From, am::AutomergeError>> for AMresult { impl From> for AMresult { fn from(changes: Vec<&am::Change>) -> Self { - let changes: Vec = - changes.iter().map(|&change| change.clone()).collect(); + let changes: Vec = changes.iter().map(|&change| change.clone()).collect(); AMresult::Changes(changes, BTreeMap::new()) } } From b20d04b0f29b97f1a010c90df45f89684e9f710a Mon Sep 17 00:00:00 2001 From: Jerome Gravel-Niquet Date: Wed, 8 Jun 2022 14:00:03 -0400 Subject: [PATCH 455/730] serialize Counter with it's current value instead of start value --- automerge/src/value.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automerge/src/value.rs b/automerge/src/value.rs index eb6c268b..1df87ace 100644 --- a/automerge/src/value.rs +++ b/automerge/src/value.rs @@ -362,7 +362,7 @@ impl Serialize for Counter { where S: Serializer, { - serializer.serialize_i64(self.start) + serializer.serialize_i64(self.current) } } From d6bce697a5e0485cefbf86771d66cbfce612b8db Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Wed, 8 Jun 2022 11:17:44 +0200 Subject: [PATCH 456/730] normalize edit trace --- edit-trace/src/main.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/edit-trace/src/main.rs b/edit-trace/src/main.rs index 72085fdb..fbe58184 100644 --- a/edit-trace/src/main.rs +++ b/edit-trace/src/main.rs @@ -29,12 +29,14 @@ fn main() -> Result<(), AutomergeError> { } tx.commit(); let save = Instant::now(); - let bytes = doc.save(); + let _bytes = doc.save(); println!("Saved in {} ms", save.elapsed().as_millis()); +/* let load = Instant::now(); let _ = Automerge::load(&bytes).unwrap(); println!("Loaded in {} ms", load.elapsed().as_millis()); +*/ println!("Done in {} ms", now.elapsed().as_millis()); Ok(()) From 0c9e77b644c502c6e73564b5a4a9e72d833cb73b Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Thu, 9 Jun 2022 12:45:20 +0200 Subject: [PATCH 457/730] added a test to ensure we dont break counter serialization --- automerge/src/automerge/tests.rs | 6 ------ automerge/src/legacy/serde_impls/op.rs | 1 + automerge/tests/test.rs | 25 ++++++++++++++++++++++--- edit-trace/src/main.rs | 10 +++++----- 4 files changed, 28 insertions(+), 14 deletions(-) diff --git a/automerge/src/automerge/tests.rs b/automerge/src/automerge/tests.rs index 94f06099..b3ad0ef8 100644 --- a/automerge/src/automerge/tests.rs +++ b/automerge/src/automerge/tests.rs @@ -1199,18 +1199,15 @@ fn load_broken_list() { for action in actions { match action { Action::InsertText(index, c) => { - println!("inserting {} at {}", c, index); tx.insert(&list, index, c).unwrap(); } Action::DelText(index) => { - println!("deleting at {} ", index); tx.delete(&list, index).unwrap(); } } } tx.commit(); let bytes = doc.save(); - println!("doc2 time"); let mut doc2 = Automerge::load(&bytes).unwrap(); let bytes2 = doc2.save(); assert_eq!(doc.text(&list).unwrap(), doc2.text(&list).unwrap()); @@ -1252,18 +1249,15 @@ fn load_broken_list_short() { for action in actions { match action { Action::InsertText(index, c) => { - println!("inserting {} at {}", c, index); tx.insert(&list, index, c).unwrap(); } Action::DelText(index) => { - println!("deleting at {} ", index); tx.delete(&list, index).unwrap(); } } } tx.commit(); let bytes = doc.save(); - println!("doc2 time"); let mut doc2 = Automerge::load(&bytes).unwrap(); let bytes2 = doc2.save(); assert_eq!(doc.text(&list).unwrap(), doc2.text(&list).unwrap()); diff --git a/automerge/src/legacy/serde_impls/op.rs b/automerge/src/legacy/serde_impls/op.rs index 9e9472d8..0f7ef8c2 100644 --- a/automerge/src/legacy/serde_impls/op.rs +++ b/automerge/src/legacy/serde_impls/op.rs @@ -48,6 +48,7 @@ impl Serialize for Op { } match &self.action { OpType::Increment(n) => op.serialize_field("value", &n)?, + OpType::Put(ScalarValue::Counter(c)) => op.serialize_field("value", &c.start)?, OpType::Put(value) => op.serialize_field("value", &value)?, _ => {} } diff --git a/automerge/tests/test.rs b/automerge/tests/test.rs index 8124ec12..d74297e0 100644 --- a/automerge/tests/test.rs +++ b/automerge/tests/test.rs @@ -1,7 +1,7 @@ use automerge::transaction::Transactable; use automerge::{ - ActorId, ApplyOptions, AutoCommit, Automerge, AutomergeError, ObjType, ScalarValue, Value, - VecOpObserver, ROOT, + ActorId, ApplyOptions, AutoCommit, Automerge, AutomergeError, Change, ExpandedChange, ObjType, + ScalarValue, Value, VecOpObserver, ROOT, }; mod helpers; @@ -853,7 +853,6 @@ fn handle_repeated_out_of_order_changes() -> Result<(), automerge::AutomergeErro fn list_counter_del() -> Result<(), automerge::AutomergeError> { let mut v = vec![ActorId::random(), ActorId::random(), ActorId::random()]; v.sort(); - println!("{:?}", v); let actor1 = v[0].clone(); let actor2 = v[1].clone(); let actor3 = v[2].clone(); @@ -1012,3 +1011,23 @@ fn increment_non_counter_list() { assert_eq!(doc1.increment(&list, 0, 2), Ok(())); } + +#[test] +fn save_and_load_incremented_counter() { + let mut doc = AutoCommit::new(); + doc.put(ROOT, "counter", ScalarValue::counter(1)).unwrap(); + doc.commit(); + doc.increment(ROOT, "counter", 1).unwrap(); + doc.commit(); + let changes1: Vec = doc.get_changes(&[]).unwrap().into_iter().cloned().collect(); + let json: Vec<_> = changes1 + .iter() + .map(|c| serde_json::to_string(&c.decode()).unwrap()) + .collect(); + let changes2: Vec = json + .iter() + .map(|j| serde_json::from_str::(j).unwrap().into()) + .collect(); + + assert_eq!(changes1, changes2); +} diff --git a/edit-trace/src/main.rs b/edit-trace/src/main.rs index fbe58184..f6924c7d 100644 --- a/edit-trace/src/main.rs +++ b/edit-trace/src/main.rs @@ -32,11 +32,11 @@ fn main() -> Result<(), AutomergeError> { let _bytes = doc.save(); println!("Saved in {} ms", save.elapsed().as_millis()); -/* - let load = Instant::now(); - let _ = Automerge::load(&bytes).unwrap(); - println!("Loaded in {} ms", load.elapsed().as_millis()); -*/ + /* + let load = Instant::now(); + let _ = Automerge::load(&bytes).unwrap(); + println!("Loaded in {} ms", load.elapsed().as_millis()); + */ println!("Done in {} ms", now.elapsed().as_millis()); Ok(()) From 6668f79a6e145bb18f61f05357ff0d9f797933b6 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Sat, 11 Jun 2022 20:53:17 -0700 Subject: [PATCH 458/730] Decouple the "test_automerge" build target from the "ALL" target. --- automerge-c/CMakeLists.txt | 2 +- automerge-c/src/CMakeLists.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/automerge-c/CMakeLists.txt b/automerge-c/CMakeLists.txt index 4ffca094..68a5176a 100644 --- a/automerge-c/CMakeLists.txt +++ b/automerge-c/CMakeLists.txt @@ -91,7 +91,7 @@ install( ) if(BUILD_TESTING) - add_subdirectory(test) + add_subdirectory(test EXCLUDE_FROM_ALL) enable_testing() endif() diff --git a/automerge-c/src/CMakeLists.txt b/automerge-c/src/CMakeLists.txt index 2e6a5658..354cffc3 100644 --- a/automerge-c/src/CMakeLists.txt +++ b/automerge-c/src/CMakeLists.txt @@ -77,7 +77,7 @@ add_custom_command( ) add_custom_target( - ${LIBRARY_NAME}_artifacts + ${LIBRARY_NAME}_artifacts ALL DEPENDS ${CARGO_OUTPUT} ) From 30dd3da578681850dadf15dca904a73734acb248 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Sat, 11 Jun 2022 20:55:44 -0700 Subject: [PATCH 459/730] Updated the CMake build CI script to build the "test_automerge" target explicitly. --- scripts/ci/cmake-build | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/ci/cmake-build b/scripts/ci/cmake-build index ac715859..41357caa 100755 --- a/scripts/ci/cmake-build +++ b/scripts/ci/cmake-build @@ -15,4 +15,4 @@ C_PROJECT=$THIS_SCRIPT/../../automerge-c; mkdir -p $C_PROJECT/build; cd $C_PROJECT/build; cmake --log-level=ERROR -B . -S .. -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DBUILD_SHARED_LIBS=$SHARED_TOGGLE; -cmake --build .; +cmake --build . --target test_automerge; From 4f7843e00739c58d22a7bb510968037bc256bd04 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Sat, 11 Jun 2022 20:57:28 -0700 Subject: [PATCH 460/730] Removed CMocka from the "docs" CI workflow's list of dependencies. --- .github/workflows/docs.yaml | 4 ---- 1 file changed, 4 deletions(-) diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml index 3474dd47..1f682628 100644 --- a/.github/workflows/docs.yaml +++ b/.github/workflows/docs.yaml @@ -46,10 +46,6 @@ jobs: run: sudo apt-get install -y doxygen shell: bash - - name: Install cmocka - run: sudo apt-get install -y libcmocka-dev - shell: bash - - name: Build C docs run: ./scripts/ci/cmake-docs shell: bash From 4efe9a4f68505d9eaeeab2642e6e1f53945ae040 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Sat, 11 Jun 2022 21:03:26 -0700 Subject: [PATCH 461/730] Replaced "cmake -E make_directory" invocation with "mkdir -p" invocation for consistency with the other CI scripts. --- scripts/ci/cmake-docs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/ci/cmake-docs b/scripts/ci/cmake-docs index 26f49e9c..0ba3ea91 100755 --- a/scripts/ci/cmake-docs +++ b/scripts/ci/cmake-docs @@ -2,7 +2,7 @@ set -eoux pipefail -cmake -E make_directory automerge-c/build +mkdir -p automerge-c/build cd automerge-c/build cmake -B . -S .. cmake --build . --target automerge_docs From efa0a5624ab2853345801562619eea5479ff47cc Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Sat, 11 Jun 2022 21:04:36 -0700 Subject: [PATCH 462/730] Removed renamed unit test suite source files. --- automerge-c/test/amdoc_property_tests.c | 110 ----------- automerge-c/test/amlistput_tests.c | 235 ------------------------ automerge-c/test/ammapput_tests.c | 187 ------------------- 3 files changed, 532 deletions(-) delete mode 100644 automerge-c/test/amdoc_property_tests.c delete mode 100644 automerge-c/test/amlistput_tests.c delete mode 100644 automerge-c/test/ammapput_tests.c diff --git a/automerge-c/test/amdoc_property_tests.c b/automerge-c/test/amdoc_property_tests.c deleted file mode 100644 index bcb2cdec..00000000 --- a/automerge-c/test/amdoc_property_tests.c +++ /dev/null @@ -1,110 +0,0 @@ -#include -#include -#include -#include -#include -#include - -/* third-party */ -#include - -/* local */ -#include "group_state.h" - -typedef struct { - GroupState* group_state; - char const* actor_id_str; - uint8_t* actor_id_bytes; - size_t actor_id_size; -} TestState; - -static void hex_to_bytes(char const* hex_str, uint8_t* bytes, size_t const count) { - unsigned int byte; - char const* next = hex_str; - for (size_t index = 0; *next && index != count; next += 2, ++index) { - if (sscanf(next, "%02x", &byte) == 1) { - bytes[index] = (uint8_t)byte; - } - } -} - -static int setup(void** state) { - TestState* test_state = calloc(1, sizeof(TestState)); - group_setup((void**)&test_state->group_state); - test_state->actor_id_str = "000102030405060708090a0b0c0d0e0f"; - test_state->actor_id_size = strlen(test_state->actor_id_str) / 2; - test_state->actor_id_bytes = malloc(test_state->actor_id_size); - hex_to_bytes(test_state->actor_id_str, test_state->actor_id_bytes, test_state->actor_id_size); - *state = test_state; - return 0; -} - -static int teardown(void** state) { - TestState* test_state = *state; - group_teardown((void**)&test_state->group_state); - free(test_state->actor_id_bytes); - free(test_state); - return 0; -} - -static void test_AMputActor(void **state) { - TestState* test_state = *state; - GroupState* group_state = test_state->group_state; - AMresult* res = AMsetActor( - group_state->doc, - test_state->actor_id_bytes, - test_state->actor_id_size - ); - if (AMresultStatus(res) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(res)); - } - assert_int_equal(AMresultSize(res), 0); - AMvalue value = AMresultValue(res); - assert_int_equal(value.tag, AM_VALUE_VOID); - AMfree(res); - res = AMgetActor(group_state->doc); - if (AMresultStatus(res) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(res)); - } - assert_int_equal(AMresultSize(res), 1); - value = AMresultValue(res); - assert_int_equal(value.tag, AM_VALUE_ACTOR_ID); - assert_int_equal(value.actor_id.count, test_state->actor_id_size); - assert_memory_equal(value.actor_id.src, test_state->actor_id_bytes, value.actor_id.count); - AMfree(res); -} - -static void test_AMputActorHex(void **state) { - TestState* test_state = *state; - GroupState* group_state = test_state->group_state; - AMresult* res = AMsetActorHex( - group_state->doc, - test_state->actor_id_str - ); - if (AMresultStatus(res) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(res)); - } - assert_int_equal(AMresultSize(res), 0); - AMvalue value = AMresultValue(res); - assert_int_equal(value.tag, AM_VALUE_VOID); - AMfree(res); - res = AMgetActorHex(group_state->doc); - if (AMresultStatus(res) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(res)); - } - assert_int_equal(AMresultSize(res), 1); - value = AMresultValue(res); - assert_int_equal(value.tag, AM_VALUE_STR); - assert_int_equal(strlen(value.str), test_state->actor_id_size * 2); - assert_string_equal(value.str, test_state->actor_id_str); - AMfree(res); -} - -int run_AMdoc_property_tests(void) { - const struct CMUnitTest tests[] = { - cmocka_unit_test_setup_teardown(test_AMputActor, setup, teardown), - cmocka_unit_test_setup_teardown(test_AMputActorHex, setup, teardown), - }; - - return cmocka_run_group_tests(tests, NULL, NULL); -} diff --git a/automerge-c/test/amlistput_tests.c b/automerge-c/test/amlistput_tests.c deleted file mode 100644 index bddc832a..00000000 --- a/automerge-c/test/amlistput_tests.c +++ /dev/null @@ -1,235 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include - -/* third-party */ -#include - -/* local */ -#include "group_state.h" -#include "macro_utils.h" - -#define test_AMlistPut(suffix, mode) test_AMlistPut ## suffix ## _ ## mode - -#define static_void_test_AMlistPut(suffix, mode, member, scalar_value) \ -static void test_AMlistPut ## suffix ## _ ## mode(void **state) { \ - GroupState* group_state = *state; \ - AMresult* res = AMlistPut ## suffix( \ - group_state->doc, AM_ROOT, 0, !strcmp(#mode, "insert"), scalar_value \ - ); \ - if (AMresultStatus(res) != AM_STATUS_OK) { \ - fail_msg("%s", AMerrorMessage(res)); \ - } \ - assert_int_equal(AMresultSize(res), 0); \ - AMvalue value = AMresultValue(res); \ - assert_int_equal(value.tag, AM_VALUE_VOID); \ - AMfree(res); \ - res = AMlistGet(group_state->doc, AM_ROOT, 0); \ - if (AMresultStatus(res) != AM_STATUS_OK) { \ - fail_msg("%s", AMerrorMessage(res)); \ - } \ - assert_int_equal(AMresultSize(res), 1); \ - value = AMresultValue(res); \ - assert_int_equal(value.tag, AMvalue_discriminant(#suffix)); \ - assert_true(value.member == scalar_value); \ - AMfree(res); \ -} - -#define test_AMlistPutBytes(mode) test_AMlistPutBytes ## _ ## mode - -#define static_void_test_AMlistPutBytes(mode, bytes_value) \ -static void test_AMlistPutBytes_ ## mode(void **state) { \ - static size_t const BYTES_SIZE = sizeof(bytes_value) / sizeof(uint8_t); \ - \ - GroupState* group_state = *state; \ - AMresult* res = AMlistPutBytes( \ - group_state->doc, \ - AM_ROOT, \ - 0, \ - !strcmp(#mode, "insert"), \ - bytes_value, \ - BYTES_SIZE \ - ); \ - if (AMresultStatus(res) != AM_STATUS_OK) { \ - fail_msg("%s", AMerrorMessage(res)); \ - } \ - assert_int_equal(AMresultSize(res), 0); \ - AMvalue value = AMresultValue(res); \ - assert_int_equal(value.tag, AM_VALUE_VOID); \ - AMfree(res); \ - res = AMlistGet(group_state->doc, AM_ROOT, 0); \ - if (AMresultStatus(res) != AM_STATUS_OK) { \ - fail_msg("%s", AMerrorMessage(res)); \ - } \ - assert_int_equal(AMresultSize(res), 1); \ - value = AMresultValue(res); \ - assert_int_equal(value.tag, AM_VALUE_BYTES); \ - assert_int_equal(value.bytes.count, BYTES_SIZE); \ - assert_memory_equal(value.bytes.src, bytes_value, BYTES_SIZE); \ - AMfree(res); \ -} - -#define test_AMlistPutNull(mode) test_AMlistPutNull_ ## mode - -#define static_void_test_AMlistPutNull(mode) \ -static void test_AMlistPutNull_ ## mode(void **state) { \ - GroupState* group_state = *state; \ - AMresult* res = AMlistPutNull( \ - group_state->doc, AM_ROOT, 0, !strcmp(#mode, "insert")); \ - if (AMresultStatus(res) != AM_STATUS_OK) { \ - fail_msg("%s", AMerrorMessage(res)); \ - } \ - assert_int_equal(AMresultSize(res), 0); \ - AMvalue value = AMresultValue(res); \ - assert_int_equal(value.tag, AM_VALUE_VOID); \ - AMfree(res); \ - res = AMlistGet(group_state->doc, AM_ROOT, 0); \ - if (AMresultStatus(res) != AM_STATUS_OK) { \ - fail_msg("%s", AMerrorMessage(res)); \ - } \ - assert_int_equal(AMresultSize(res), 1); \ - value = AMresultValue(res); \ - assert_int_equal(value.tag, AM_VALUE_NULL); \ - AMfree(res); \ -} - -#define test_AMlistPutObject(label, mode) test_AMlistPutObject_ ## label ## _ ## mode - -#define static_void_test_AMlistPutObject(label, mode) \ -static void test_AMlistPutObject_ ## label ## _ ## mode(void **state) { \ - GroupState* group_state = *state; \ - AMresult* res = AMlistPutObject( \ - group_state->doc, \ - AM_ROOT, \ - 0, \ - !strcmp(#mode, "insert"), \ - AMobjType_tag(#label) \ - ); \ - if (AMresultStatus(res) != AM_STATUS_OK) { \ - fail_msg("%s", AMerrorMessage(res)); \ - } \ - assert_int_equal(AMresultSize(res), 1); \ - AMvalue value = AMresultValue(res); \ - assert_int_equal(value.tag, AM_VALUE_OBJ_ID); \ - assert_non_null(value.obj_id); \ - assert_int_equal(AMobjSize(group_state->doc, value.obj_id), 0); \ - AMfree(res); \ -} - -#define test_AMlistPutStr(mode) test_AMlistPutStr ## _ ## mode - -#define static_void_test_AMlistPutStr(mode, str_value) \ -static void test_AMlistPutStr_ ## mode(void **state) { \ - static size_t const STR_LEN = strlen(str_value); \ - \ - GroupState* group_state = *state; \ - AMresult* res = AMlistPutStr( \ - group_state->doc, \ - AM_ROOT, \ - 0, \ - !strcmp(#mode, "insert"), \ - str_value \ - ); \ - if (AMresultStatus(res) != AM_STATUS_OK) { \ - fail_msg("%s", AMerrorMessage(res)); \ - } \ - assert_int_equal(AMresultSize(res), 0); \ - AMvalue value = AMresultValue(res); \ - assert_int_equal(value.tag, AM_VALUE_VOID); \ - AMfree(res); \ - res = AMlistGet(group_state->doc, AM_ROOT, 0); \ - if (AMresultStatus(res) != AM_STATUS_OK) { \ - fail_msg("%s", AMerrorMessage(res)); \ - } \ - assert_int_equal(AMresultSize(res), 1); \ - value = AMresultValue(res); \ - assert_int_equal(value.tag, AM_VALUE_STR); \ - assert_int_equal(strlen(value.str), STR_LEN); \ - assert_memory_equal(value.str, str_value, STR_LEN + 1); \ - AMfree(res); \ -} - -static_void_test_AMlistPut(Bool, insert, boolean, true) - -static_void_test_AMlistPut(Bool, update, boolean, true) - -static uint8_t const BYTES_VALUE[] = {INT8_MIN, INT8_MAX / 2, INT8_MAX}; - -static_void_test_AMlistPutBytes(insert, BYTES_VALUE) - -static_void_test_AMlistPutBytes(update, BYTES_VALUE) - -static_void_test_AMlistPut(Counter, insert, counter, INT64_MAX) - -static_void_test_AMlistPut(Counter, update, counter, INT64_MAX) - -static_void_test_AMlistPut(F64, insert, f64, DBL_MAX) - -static_void_test_AMlistPut(F64, update, f64, DBL_MAX) - -static_void_test_AMlistPut(Int, insert, int_, INT64_MAX) - -static_void_test_AMlistPut(Int, update, int_, INT64_MAX) - -static_void_test_AMlistPutNull(insert) - -static_void_test_AMlistPutNull(update) - -static_void_test_AMlistPutObject(List, insert) - -static_void_test_AMlistPutObject(List, update) - -static_void_test_AMlistPutObject(Map, insert) - -static_void_test_AMlistPutObject(Map, update) - -static_void_test_AMlistPutObject(Text, insert) - -static_void_test_AMlistPutObject(Text, update) - -static_void_test_AMlistPutStr(insert, "Hello, world!") - -static_void_test_AMlistPutStr(update, "Hello, world!") - -static_void_test_AMlistPut(Timestamp, insert, timestamp, INT64_MAX) - -static_void_test_AMlistPut(Timestamp, update, timestamp, INT64_MAX) - -static_void_test_AMlistPut(Uint, insert, uint, UINT64_MAX) - -static_void_test_AMlistPut(Uint, update, uint, UINT64_MAX) - -int run_AMlistPut_tests(void) { - const struct CMUnitTest tests[] = { - cmocka_unit_test(test_AMlistPut(Bool, insert)), - cmocka_unit_test(test_AMlistPut(Bool, update)), - cmocka_unit_test(test_AMlistPutBytes(insert)), - cmocka_unit_test(test_AMlistPutBytes(update)), - cmocka_unit_test(test_AMlistPut(Counter, insert)), - cmocka_unit_test(test_AMlistPut(Counter, update)), - cmocka_unit_test(test_AMlistPut(F64, insert)), - cmocka_unit_test(test_AMlistPut(F64, update)), - cmocka_unit_test(test_AMlistPut(Int, insert)), - cmocka_unit_test(test_AMlistPut(Int, update)), - cmocka_unit_test(test_AMlistPutNull(insert)), - cmocka_unit_test(test_AMlistPutNull(update)), - cmocka_unit_test(test_AMlistPutObject(List, insert)), - cmocka_unit_test(test_AMlistPutObject(List, update)), - cmocka_unit_test(test_AMlistPutObject(Map, insert)), - cmocka_unit_test(test_AMlistPutObject(Map, update)), - cmocka_unit_test(test_AMlistPutObject(Text, insert)), - cmocka_unit_test(test_AMlistPutObject(Text, update)), - cmocka_unit_test(test_AMlistPutStr(insert)), - cmocka_unit_test(test_AMlistPutStr(update)), - cmocka_unit_test(test_AMlistPut(Timestamp, insert)), - cmocka_unit_test(test_AMlistPut(Timestamp, update)), - cmocka_unit_test(test_AMlistPut(Uint, insert)), - cmocka_unit_test(test_AMlistPut(Uint, update)), - }; - - return cmocka_run_group_tests(tests, group_setup, group_teardown); -} diff --git a/automerge-c/test/ammapput_tests.c b/automerge-c/test/ammapput_tests.c deleted file mode 100644 index 280c8e5b..00000000 --- a/automerge-c/test/ammapput_tests.c +++ /dev/null @@ -1,187 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include - -/* third-party */ -#include - -/* local */ -#include "group_state.h" -#include "macro_utils.h" - -#define test_AMmapPut(suffix) test_AMmapPut ## suffix - -#define static_void_test_AMmapPut(suffix, member, scalar_value) \ -static void test_AMmapPut ## suffix(void **state) { \ - GroupState* group_state = *state; \ - AMresult* res = AMmapPut ## suffix( \ - group_state->doc, \ - AM_ROOT, \ - #suffix, \ - scalar_value \ - ); \ - if (AMresultStatus(res) != AM_STATUS_OK) { \ - fail_msg("%s", AMerrorMessage(res)); \ - } \ - assert_int_equal(AMresultSize(res), 0); \ - AMvalue value = AMresultValue(res); \ - assert_int_equal(value.tag, AM_VALUE_VOID); \ - AMfree(res); \ - res = AMmapGet(group_state->doc, AM_ROOT, #suffix); \ - if (AMresultStatus(res) != AM_STATUS_OK) { \ - fail_msg("%s", AMerrorMessage(res)); \ - } \ - assert_int_equal(AMresultSize(res), 1); \ - value = AMresultValue(res); \ - assert_int_equal(value.tag, AMvalue_discriminant(#suffix)); \ - assert_true(value.member == scalar_value); \ - AMfree(res); \ -} - -#define test_AMmapPutObject(label) test_AMmapPutObject_ ## label - -#define static_void_test_AMmapPutObject(label) \ -static void test_AMmapPutObject_ ## label(void **state) { \ - GroupState* group_state = *state; \ - AMresult* res = AMmapPutObject( \ - group_state->doc, \ - AM_ROOT, \ - #label, \ - AMobjType_tag(#label) \ - ); \ - if (AMresultStatus(res) != AM_STATUS_OK) { \ - fail_msg("%s", AMerrorMessage(res)); \ - } \ - assert_int_equal(AMresultSize(res), 1); \ - AMvalue value = AMresultValue(res); \ - assert_int_equal(value.tag, AM_VALUE_OBJ_ID); \ - assert_non_null(value.obj_id); \ - assert_int_equal(AMobjSize(group_state->doc, value.obj_id), 0); \ - AMfree(res); \ -} - -static_void_test_AMmapPut(Bool, boolean, true) - -static void test_AMmapPutBytes(void **state) { - static char const* const KEY = "Bytes"; - static uint8_t const BYTES_VALUE[] = {INT8_MIN, INT8_MAX / 2, INT8_MAX}; - static size_t const BYTES_SIZE = sizeof(BYTES_VALUE) / sizeof(uint8_t); - - GroupState* group_state = *state; - AMresult* res = AMmapPutBytes( - group_state->doc, - AM_ROOT, - KEY, - BYTES_VALUE, - BYTES_SIZE - ); - if (AMresultStatus(res) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(res)); - } - assert_int_equal(AMresultSize(res), 0); - AMvalue value = AMresultValue(res); - assert_int_equal(value.tag, AM_VALUE_VOID); - AMfree(res); - res = AMmapGet(group_state->doc, AM_ROOT, KEY); - if (AMresultStatus(res) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(res)); - } - assert_int_equal(AMresultSize(res), 1); - value = AMresultValue(res); - assert_int_equal(value.tag, AM_VALUE_BYTES); - assert_int_equal(value.bytes.count, BYTES_SIZE); - assert_memory_equal(value.bytes.src, BYTES_VALUE, BYTES_SIZE); - AMfree(res); -} - -static_void_test_AMmapPut(Counter, counter, INT64_MAX) - -static_void_test_AMmapPut(F64, f64, DBL_MAX) - -static_void_test_AMmapPut(Int, int_, INT64_MAX) - -static void test_AMmapPutNull(void **state) { - static char const* const KEY = "Null"; - - GroupState* group_state = *state; - AMresult* res = AMmapPutNull(group_state->doc, AM_ROOT, KEY); - if (AMresultStatus(res) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(res)); - } - assert_int_equal(AMresultSize(res), 0); - AMvalue value = AMresultValue(res); - assert_int_equal(value.tag, AM_VALUE_VOID); - AMfree(res); - res = AMmapGet(group_state->doc, AM_ROOT, KEY); - if (AMresultStatus(res) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(res)); - } - assert_int_equal(AMresultSize(res), 1); - value = AMresultValue(res); - assert_int_equal(value.tag, AM_VALUE_NULL); - AMfree(res); -} - -static_void_test_AMmapPutObject(List) - -static_void_test_AMmapPutObject(Map) - -static_void_test_AMmapPutObject(Text) - -static void test_AMmapPutStr(void **state) { - static char const* const KEY = "Str"; - static char const* const STR_VALUE = "Hello, world!"; - size_t const STR_LEN = strlen(STR_VALUE); - - GroupState* group_state = *state; - AMresult* res = AMmapPutStr( - group_state->doc, - AM_ROOT, - KEY, - STR_VALUE - ); - if (AMresultStatus(res) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(res)); - } - assert_int_equal(AMresultSize(res), 0); - AMvalue value = AMresultValue(res); - assert_int_equal(value.tag, AM_VALUE_VOID); - AMfree(res); - res = AMmapGet(group_state->doc, AM_ROOT, KEY); - if (AMresultStatus(res) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(res)); - } - assert_int_equal(AMresultSize(res), 1); - value = AMresultValue(res); - assert_int_equal(value.tag, AM_VALUE_STR); - assert_int_equal(strlen(value.str), STR_LEN); - assert_memory_equal(value.str, STR_VALUE, STR_LEN + 1); - AMfree(res); -} - -static_void_test_AMmapPut(Timestamp, timestamp, INT64_MAX) - -static_void_test_AMmapPut(Uint, uint, UINT64_MAX) - -int run_AMmapPut_tests(void) { - const struct CMUnitTest tests[] = { - cmocka_unit_test(test_AMmapPut(Bool)), - cmocka_unit_test(test_AMmapPutBytes), - cmocka_unit_test(test_AMmapPut(Counter)), - cmocka_unit_test(test_AMmapPut(F64)), - cmocka_unit_test(test_AMmapPut(Int)), - cmocka_unit_test(test_AMmapPutNull), - cmocka_unit_test(test_AMmapPutObject(List)), - cmocka_unit_test(test_AMmapPutObject(Map)), - cmocka_unit_test(test_AMmapPutObject(Text)), - cmocka_unit_test(test_AMmapPutStr), - cmocka_unit_test(test_AMmapPut(Timestamp)), - cmocka_unit_test(test_AMmapPut(Uint)), - }; - - return cmocka_run_group_tests(tests, group_setup, group_teardown); -} From bdedafa0218478fd7a957019dedf52919e61f414 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 13 Jun 2022 12:01:54 -0700 Subject: [PATCH 463/730] Decouple the "test_automerge" build target from the "ALL" target. --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 4c58e8d1..09cca71d 100644 --- a/README.md +++ b/README.md @@ -96,7 +96,7 @@ $ mkdir -p build $ cd build $ cmake -S .. -DCMAKE_BUILD_TYPE=Release -DBUILD_SHARED_LIBS=OFF ## building and testing -$ cmake --build . +$ cmake --build . --target test_automerge ``` To add debugging symbols, replace `Release` with `Debug`. To build a shared library instead of a static one, replace `OFF` with `ON`. From 71d8a7e717ddb13b1d389db94eaf6ce539359ed9 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Tue, 14 Jun 2022 00:37:42 -0700 Subject: [PATCH 464/730] Removed the superfluous `AutomergeError::HexDecode` variant. --- automerge/src/error.rs | 2 -- 1 file changed, 2 deletions(-) diff --git a/automerge/src/error.rs b/automerge/src/error.rs index cc76d7ef..db1c4884 100644 --- a/automerge/src/error.rs +++ b/automerge/src/error.rs @@ -29,8 +29,6 @@ pub enum AutomergeError { MissingCounter, #[error("general failure")] Fail, - #[error(transparent)] - HexDecode(#[from] hex::FromHexError), } #[cfg(feature = "wasm")] From ac3709e670c6fb7ffa31383c7830c3ce146a30f4 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Tue, 14 Jun 2022 00:38:55 -0700 Subject: [PATCH 465/730] Hoisted `InvalidActorId` into the `automerge` namespace. --- automerge/src/lib.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/automerge/src/lib.rs b/automerge/src/lib.rs index c011d2de..e15f7d36 100644 --- a/automerge/src/lib.rs +++ b/automerge/src/lib.rs @@ -92,6 +92,7 @@ pub use decoding::Error as DecodingError; pub use decoding::InvalidChangeError; pub use encoding::Error as EncodingError; pub use error::AutomergeError; +pub use error::InvalidActorId; pub use exid::ExId as ObjId; pub use keys::Keys; pub use keys_at::KeysAt; From 84fa83a3f0b9bf14014ae5d50b75869ec2df9753 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Tue, 14 Jun 2022 00:49:20 -0700 Subject: [PATCH 466/730] Added `AMactorId`. Updated `AMchangeActorId()`. Updated `AMsetActor()`. Removed `AMgetActorHex()`. Removed `AMsetActorHex()`. --- automerge-c/src/CMakeLists.txt | 1 + automerge-c/src/actor_id.rs | 132 ++++++++++++++++++++++++++++++ automerge-c/src/change.rs | 25 ++++-- automerge-c/src/doc.rs | 75 +++-------------- automerge-c/src/doc/utils.rs | 12 +++ automerge-c/src/lib.rs | 1 + automerge-c/src/result.rs | 20 +++-- automerge-c/test/CMakeLists.txt | 6 +- automerge-c/test/actor_id_tests.c | 102 +++++++++++++++++++++++ automerge-c/test/doc_tests.c | 94 ++++++++++----------- automerge-c/test/main.c | 3 + automerge-c/test/sync_tests.c | 92 +++++++++++++++------ 12 files changed, 412 insertions(+), 151 deletions(-) create mode 100644 automerge-c/src/actor_id.rs create mode 100644 automerge-c/test/actor_id_tests.c diff --git a/automerge-c/src/CMakeLists.txt b/automerge-c/src/CMakeLists.txt index 2e6a5658..f5d862d8 100644 --- a/automerge-c/src/CMakeLists.txt +++ b/automerge-c/src/CMakeLists.txt @@ -51,6 +51,7 @@ add_custom_command( MAIN_DEPENDENCY lib.rs DEPENDS + actor_id.rs byte_span.rs change_hashes.rs change.rs diff --git a/automerge-c/src/actor_id.rs b/automerge-c/src/actor_id.rs new file mode 100644 index 00000000..00664c5c --- /dev/null +++ b/automerge-c/src/actor_id.rs @@ -0,0 +1,132 @@ +use automerge as am; +use std::cell::RefCell; +use std::ffi::{CStr, CString}; +use std::os::raw::c_char; +use std::str::FromStr; + +use crate::byte_span::AMbyteSpan; +use crate::result::{to_result, AMresult}; + +/// \struct AMactorId +/// \brief An actor's unique identifier. +pub struct AMactorId { + body: am::ActorId, + c_str: RefCell>, +} + +impl AMactorId { + pub fn new(body: am::ActorId) -> Self { + Self { + body, + c_str: RefCell::>::default(), + } + } + + pub fn as_c_str(&self) -> *const c_char { + let mut c_str = self.c_str.borrow_mut(); + match c_str.as_mut() { + None => { + let hex_str = self.body.to_hex_string(); + c_str.insert(CString::new(hex_str).unwrap()).as_ptr() + } + Some(value) => value.as_ptr(), + } + } +} + +impl AsRef for AMactorId { + fn as_ref(&self) -> &am::ActorId { + &self.body + } +} + +/// \memberof AMactorId +/// \brief Gets the value of an actor ID as a sequence of bytes. +/// +/// \param[in] actor_id A pointer to an `AMactorId` struct. +/// \pre \p actor_id must be a valid address. +/// \return An `AMbyteSpan` struct. +/// \internal +/// +/// # Safety +/// actor_id must be a pointer to a valid AMactorId +#[no_mangle] +pub unsafe extern "C" fn AMactorIdBytes(actor_id: *const AMactorId) -> AMbyteSpan { + match actor_id.as_ref() { + Some(actor_id) => actor_id.as_ref().into(), + None => AMbyteSpan::default(), + } +} + +/// \memberof AMactorId +/// \brief Allocates a new actor ID and initializes it with a random UUID. +/// +/// \return A pointer to an `AMresult` struct containing a pointer to an +/// `AMactorId` struct. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMfree()`. +#[no_mangle] +pub unsafe extern "C" fn AMactorIdInit() -> *mut AMresult { + to_result(Ok::(am::ActorId::random())) +} + +/// \memberof AMactorId +/// \brief Allocates a new actor ID and initializes it from a sequence of +/// bytes. +/// +/// \param[in] src A pointer to a contiguous sequence of bytes. +/// \param[in] count The number of bytes to copy from \p src. +/// \pre `0 <=` \p count `<=` length of \p src. +/// \return A pointer to an `AMresult` struct containing a pointer to an +/// `AMactorId` struct. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMfree()`. +/// \internal +/// +/// # Safety +/// src must be a byte array of length `>= count` +#[no_mangle] +pub unsafe extern "C" fn AMactorIdInitBytes(src: *const u8, count: usize) -> *mut AMresult { + let slice = std::slice::from_raw_parts(src, count); + to_result(Ok::(am::ActorId::from( + slice, + ))) +} + +/// \memberof AMactorId +/// \brief Allocates a new actor ID and initializes it from a hexadecimal +/// string. +/// +/// \param[in] hex_str A UTF-8 string. +/// \return A pointer to an `AMresult` struct containing a pointer to an +/// `AMactorId` struct. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMfree()`. +/// \internal +/// +/// # Safety +/// hex_str must be a null-terminated array of `c_char` +#[no_mangle] +pub unsafe extern "C" fn AMactorIdInitStr(hex_str: *const c_char) -> *mut AMresult { + to_result(am::ActorId::from_str( + CStr::from_ptr(hex_str).to_str().unwrap(), + )) +} + +/// \memberof AMactorId +/// \brief Gets the value of an actor ID as a hexadecimal string. +/// +/// \param[in] actor_id A pointer to an `AMactorId` struct. +/// \pre \p actor_id must be a valid address. +/// \return A UTF-8 string. +/// \internal +/// +/// # Safety +/// actor_id must be a pointer to a valid AMactorId +#[no_mangle] +pub unsafe extern "C" fn AMactorIdStr(actor_id: *const AMactorId) -> *const c_char { + match actor_id.as_ref() { + Some(actor_id) => actor_id.as_c_str(), + None => std::ptr::null::(), + } +} diff --git a/automerge-c/src/change.rs b/automerge-c/src/change.rs index 2ebd7469..4e051d01 100644 --- a/automerge-c/src/change.rs +++ b/automerge-c/src/change.rs @@ -6,6 +6,16 @@ use crate::byte_span::AMbyteSpan; use crate::change_hashes::AMchangeHashes; use crate::result::{to_result, AMresult}; +macro_rules! to_change { + ($handle:expr) => {{ + let handle = $handle.as_ref(); + match handle { + Some(b) => b, + None => return AMresult::err("Invalid AMchange pointer").into(), + } + }}; +} + /// \struct AMchange /// \brief A group of operations performed by an actor. pub struct AMchange { @@ -46,18 +56,21 @@ impl AsRef for AMchange { /// \brief Gets the first referenced actor ID in a change. /// /// \param[in] change A pointer to an `AMchange` struct. -/// \return An actor ID as an `AMbyteSpan` struct. /// \pre \p change must be a valid address. +/// \return A pointer to an `AMresult` struct containing a pointer to an +/// `AMactorId` struct. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMfree()`. /// \internal /// /// # Safety /// change must be a pointer to a valid AMchange #[no_mangle] -pub unsafe extern "C" fn AMchangeActorId(change: *const AMchange) -> AMbyteSpan { - match change.as_ref() { - Some(change) => change.as_ref().actor_id().into(), - None => AMbyteSpan::default(), - } +pub unsafe extern "C" fn AMchangeActorId(change: *const AMchange) -> *mut AMresult { + let change = to_change!(change); + to_result(Ok::( + change.as_ref().actor_id().clone(), + )) } /// \memberof AMchange diff --git a/automerge-c/src/doc.rs b/automerge-c/src/doc.rs index 477a75cb..617a142d 100644 --- a/automerge-c/src/doc.rs +++ b/automerge-c/src/doc.rs @@ -1,10 +1,9 @@ use automerge as am; use automerge::transaction::{CommitOptions, Transactable}; -use smol_str::SmolStr; -use std::borrow::Cow; use std::ops::{Deref, DerefMut}; use std::os::raw::c_char; +use crate::actor_id::AMactorId; use crate::change::AMchange; use crate::change_hashes::AMchangeHashes; use crate::obj::AMobjId; @@ -17,7 +16,7 @@ mod utils; use crate::changes::AMchanges; use crate::doc::utils::to_str; -use crate::doc::utils::{to_doc, to_obj_id}; +use crate::doc::utils::{to_actor_id, to_doc, to_obj_id}; macro_rules! to_changes { ($handle:expr) => {{ @@ -225,27 +224,9 @@ pub unsafe extern "C" fn AMgenerateSyncMessage( #[no_mangle] pub unsafe extern "C" fn AMgetActor(doc: *mut AMdoc) -> *mut AMresult { let doc = to_doc!(doc); - to_result(Ok(doc.get_actor().clone())) -} - -/// \memberof AMdoc -/// \brief Gets an `AMdoc` struct's actor ID value as a hexadecimal string. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \return A pointer to an `AMresult` struct containing a `char const*`. -/// \pre \p doc must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. -/// \internal -/// -/// # Safety -/// doc must be a pointer to a valid AMdoc -#[no_mangle] -pub unsafe extern "C" fn AMgetActorHex(doc: *mut AMdoc) -> *mut AMresult { - let doc = to_doc!(doc); - let hex_str = doc.get_actor().to_hex_string(); - let value = am::Value::Scalar(Cow::Owned(am::ScalarValue::Str(SmolStr::new(hex_str)))); - to_result(Ok(value)) + to_result(Ok::( + doc.get_actor().clone(), + )) } /// \memberof AMdoc @@ -602,15 +583,13 @@ pub unsafe extern "C" fn AMsaveIncremental(doc: *mut AMdoc) -> *mut AMresult { } /// \memberof AMdoc -/// \brief Puts a sequence of bytes as the actor ID value of a document. +/// \brief Puts the actor ID value of a document. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] value A pointer to a contiguous sequence of bytes. -/// \param[in] count The number of bytes to copy from \p value. +/// \param[in] actor_id A pointer to an `AMactorId` struct. /// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc must be a valid address. -/// \pre \p value must be a valid address. -/// \pre `0 <=` \p count `<=` length of \p value. +/// \pre \p actor_id must be a valid address. /// \warning To avoid a memory leak, the returned `AMresult` struct must be /// deallocated with `AMfree()`. /// \internal @@ -619,41 +598,9 @@ pub unsafe extern "C" fn AMsaveIncremental(doc: *mut AMdoc) -> *mut AMresult { /// doc must be a pointer to a valid AMdoc /// value must be a byte array of length `>= count` #[no_mangle] -pub unsafe extern "C" fn AMsetActor( - doc: *mut AMdoc, - value: *const u8, - count: usize, -) -> *mut AMresult { +pub unsafe extern "C" fn AMsetActor(doc: *mut AMdoc, actor_id: *const AMactorId) -> *mut AMresult { let doc = to_doc!(doc); - let slice = std::slice::from_raw_parts(value, count); - doc.set_actor(am::ActorId::from(slice)); + let actor_id = to_actor_id!(actor_id); + doc.set_actor(actor_id.as_ref().clone()); to_result(Ok(())) } - -/// \memberof AMdoc -/// \brief Puts a hexadecimal string as the actor ID value of a document. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] hex_str A string of hexadecimal characters. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \pre \p hex_str must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. -/// \internal -/// -/// # Safety -/// doc must be a pointer to a valid AMdoc -/// hex_str must be a null-terminated array of `c_char` -#[no_mangle] -pub unsafe extern "C" fn AMsetActorHex(doc: *mut AMdoc, hex_str: *const c_char) -> *mut AMresult { - let doc = to_doc!(doc); - let slice = std::slice::from_raw_parts(hex_str as *const u8, libc::strlen(hex_str)); - to_result(match hex::decode(slice) { - Ok(vec) => { - doc.set_actor(vec.into()); - Ok(()) - } - Err(error) => Err(am::AutomergeError::HexDecode(error)), - }) -} diff --git a/automerge-c/src/doc/utils.rs b/automerge-c/src/doc/utils.rs index eb35b69e..bf3aaf98 100644 --- a/automerge-c/src/doc/utils.rs +++ b/automerge-c/src/doc/utils.rs @@ -1,6 +1,18 @@ use std::ffi::CStr; use std::os::raw::c_char; +macro_rules! to_actor_id { + ($handle:expr) => {{ + let handle = $handle.as_ref(); + match handle { + Some(b) => b, + None => return AMresult::err("Invalid AMactorId pointer").into(), + } + }}; +} + +pub(crate) use to_actor_id; + macro_rules! to_doc { ($handle:expr) => {{ let handle = $handle.as_mut(); diff --git a/automerge-c/src/lib.rs b/automerge-c/src/lib.rs index 0c01c0d3..f3dcfa09 100644 --- a/automerge-c/src/lib.rs +++ b/automerge-c/src/lib.rs @@ -1,3 +1,4 @@ +mod actor_id; mod byte_span; mod change; mod change_hashes; diff --git a/automerge-c/src/result.rs b/automerge-c/src/result.rs index 0ea12c50..0e26105c 100644 --- a/automerge-c/src/result.rs +++ b/automerge-c/src/result.rs @@ -3,6 +3,7 @@ use std::collections::BTreeMap; use std::ffi::CString; use std::os::raw::c_char; +use crate::actor_id::AMactorId; use crate::byte_span::AMbyteSpan; use crate::change::AMchange; use crate::change_hashes::AMchangeHashes; @@ -21,7 +22,7 @@ use crate::sync::{AMsyncMessage, AMsyncState}; /// The variant discriminator of an `AMvalue` struct. /// /// \var AMvalue::actor_id -/// An actor ID as an `AMbyteSpan` struct. +/// An actor ID as an `AMactorId` struct. /// /// \var AMvalue::boolean /// A boolean. @@ -58,7 +59,7 @@ use crate::sync::{AMsyncMessage, AMsyncState}; #[repr(C)] pub enum AMvalue<'a> { /// An actor ID variant. - ActorId(AMbyteSpan), + ActorId(&'a AMactorId), /// A boolean variant. Boolean(bool), /// A byte array variant. @@ -104,7 +105,7 @@ pub enum AMvalue<'a> { /// \struct AMresult /// \brief A discriminated union of result variants. pub enum AMresult { - ActorId(am::ActorId), + ActorId(AMactorId), ChangeHashes(Vec), Changes(Vec, BTreeMap), Doc(Box), @@ -175,7 +176,16 @@ impl From> for AMresult { impl From> for AMresult { fn from(maybe: Result) -> Self { match maybe { - Ok(actor_id) => AMresult::ActorId(actor_id), + Ok(actor_id) => AMresult::ActorId(AMactorId::new(actor_id)), + Err(e) => AMresult::err(&e.to_string()), + } + } +} + +impl From> for AMresult { + fn from(maybe: Result) -> Self { + match maybe { + Ok(actor_id) => AMresult::ActorId(AMactorId::new(actor_id)), Err(e) => AMresult::err(&e.to_string()), } } @@ -432,7 +442,7 @@ pub unsafe extern "C" fn AMresultValue<'a>(result: *mut AMresult) -> AMvalue<'a> if let Some(result) = result.as_mut() { match result { AMresult::ActorId(actor_id) => { - content = AMvalue::ActorId(actor_id.into()); + content = AMvalue::ActorId(actor_id); } AMresult::ChangeHashes(change_hashes) => { content = AMvalue::ChangeHashes(AMchangeHashes::new(change_hashes)); diff --git a/automerge-c/test/CMakeLists.txt b/automerge-c/test/CMakeLists.txt index aab136da..a72b78a1 100644 --- a/automerge-c/test/CMakeLists.txt +++ b/automerge-c/test/CMakeLists.txt @@ -4,12 +4,14 @@ find_package(cmocka REQUIRED) add_executable( test_${LIBRARY_NAME} - group_state.c + actor_id_tests.c doc_tests.c + group_state.c list_tests.c - map_tests.c macro_utils.c main.c + map_tests.c + str_utils.c sync_tests.c ) diff --git a/automerge-c/test/actor_id_tests.c b/automerge-c/test/actor_id_tests.c new file mode 100644 index 00000000..25ed2886 --- /dev/null +++ b/automerge-c/test/actor_id_tests.c @@ -0,0 +1,102 @@ +#include +#include +#include +#include +#include +#include +#include + +/* third-party */ +#include + +/* local */ +#include "automerge.h" +#include "str_utils.h" + +typedef struct { + uint8_t* src; + char const* str; + size_t count; +} TestState; + +static int setup(void** state) { + TestState* test_state = calloc(1, sizeof(TestState)); + test_state->str = "000102030405060708090a0b0c0d0e0f"; + test_state->count = strlen(test_state->str) / 2; + test_state->src = malloc(test_state->count); + hex_to_bytes(test_state->str, test_state->src, test_state->count); + *state = test_state; + return 0; +} + +static int teardown(void** state) { + TestState* test_state = *state; + free(test_state->src); + free(test_state); + return 0; +} + +static void test_AMactorIdInit(void **state) { + TestState* test_state = *state; + AMresult* prior_result = NULL; + AMbyteSpan prior_bytes; + AMresult* result = NULL; + for (size_t i = 0; i != 11; ++i) { + result = AMactorIdInit(); + if (AMresultStatus(result) != AM_STATUS_OK) { + fail_msg("%s", AMerrorMessage(result)); + } + assert_int_equal(AMresultSize(result), 1); + AMvalue const value = AMresultValue(result); + assert_int_equal(value.tag, AM_VALUE_ACTOR_ID); + AMbyteSpan const bytes = AMactorIdBytes(value.actor_id); + if (prior_result) { + size_t const min_count = fmax(bytes.count, prior_bytes.count); + assert_memory_not_equal(bytes.src, prior_bytes.src, min_count); + AMfree(prior_result); + } + prior_result = result; + prior_bytes = bytes; + } + AMfree(result); +} + +static void test_AMactorIdInitBytes(void **state) { + TestState* test_state = *state; + AMresult* const result = AMactorIdInitBytes(test_state->src, test_state->count); + if (AMresultStatus(result) != AM_STATUS_OK) { + fail_msg("%s", AMerrorMessage(result)); + } + assert_int_equal(AMresultSize(result), 1); + AMvalue const value = AMresultValue(result); + assert_int_equal(value.tag, AM_VALUE_ACTOR_ID); + AMbyteSpan const bytes = AMactorIdBytes(value.actor_id); + assert_int_equal(bytes.count, test_state->count); + assert_memory_equal(bytes.src, test_state->src, bytes.count); + AMfree(result); +} + +static void test_AMactorIdInitStr(void **state) { + TestState* test_state = *state; + AMresult* const result = AMactorIdInitStr(test_state->str); + if (AMresultStatus(result) != AM_STATUS_OK) { + fail_msg("%s", AMerrorMessage(result)); + } + assert_int_equal(AMresultSize(result), 1); + AMvalue const value = AMresultValue(result); + assert_int_equal(value.tag, AM_VALUE_ACTOR_ID); + char const* const str = AMactorIdStr(value.actor_id); + assert_int_equal(strlen(str), test_state->count * 2); + assert_string_equal(str, test_state->str); + AMfree(result); +} + +int run_actor_id_tests(void) { + const struct CMUnitTest tests[] = { + cmocka_unit_test_setup_teardown(test_AMactorIdInit, setup, teardown), + cmocka_unit_test_setup_teardown(test_AMactorIdInitBytes, setup, teardown), + cmocka_unit_test_setup_teardown(test_AMactorIdInitStr, setup, teardown), + }; + + return cmocka_run_group_tests(tests, NULL, NULL); +} diff --git a/automerge-c/test/doc_tests.c b/automerge-c/test/doc_tests.c index 7c9cee0c..f4a6b519 100644 --- a/automerge-c/test/doc_tests.c +++ b/automerge-c/test/doc_tests.c @@ -1,7 +1,6 @@ #include #include #include -#include #include #include @@ -10,6 +9,7 @@ /* local */ #include "group_state.h" +#include "str_utils.h" typedef struct { GroupState* group_state; @@ -18,16 +18,6 @@ typedef struct { size_t actor_id_size; } TestState; -static void hex_to_bytes(char const* hex_str, uint8_t* bytes, size_t const count) { - unsigned int byte; - char const* next = hex_str; - for (size_t index = 0; *next && index != count; next += 2, ++index) { - if (sscanf(next, "%02x", &byte) == 1) { - bytes[index] = (uint8_t)byte; - } - } -} - static int setup(void** state) { TestState* test_state = calloc(1, sizeof(TestState)); group_setup((void**)&test_state->group_state); @@ -47,63 +37,65 @@ static int teardown(void** state) { return 0; } -static void test_AMputActor(void **state) { +static void test_AMputActor_bytes(void **state) { TestState* test_state = *state; GroupState* group_state = test_state->group_state; - AMresult* res = AMsetActor( - group_state->doc, - test_state->actor_id_bytes, - test_state->actor_id_size - ); - if (AMresultStatus(res) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(res)); + AMresult* actor_id_result = AMactorIdInitBytes(test_state->actor_id_bytes, + test_state->actor_id_size); + AMvalue value = AMresultValue(actor_id_result); + AMresult* result = AMsetActor(group_state->doc, value.actor_id); + AMfree(actor_id_result); + if (AMresultStatus(result) != AM_STATUS_OK) { + fail_msg("%s", AMerrorMessage(result)); } - assert_int_equal(AMresultSize(res), 0); - AMvalue value = AMresultValue(res); + assert_int_equal(AMresultSize(result), 0); + value = AMresultValue(result); assert_int_equal(value.tag, AM_VALUE_VOID); - AMfree(res); - res = AMgetActor(group_state->doc); - if (AMresultStatus(res) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(res)); + AMfree(result); + result = AMgetActor(group_state->doc); + if (AMresultStatus(result) != AM_STATUS_OK) { + fail_msg("%s", AMerrorMessage(result)); } - assert_int_equal(AMresultSize(res), 1); - value = AMresultValue(res); + assert_int_equal(AMresultSize(result), 1); + value = AMresultValue(result); assert_int_equal(value.tag, AM_VALUE_ACTOR_ID); - assert_int_equal(value.actor_id.count, test_state->actor_id_size); - assert_memory_equal(value.actor_id.src, test_state->actor_id_bytes, value.actor_id.count); - AMfree(res); + AMbyteSpan const bytes = AMactorIdBytes(value.actor_id); + assert_int_equal(bytes.count, test_state->actor_id_size); + assert_memory_equal(bytes.src, test_state->actor_id_bytes, bytes.count); + AMfree(result); } -static void test_AMputActorHex(void **state) { +static void test_AMputActor_hex(void **state) { TestState* test_state = *state; GroupState* group_state = test_state->group_state; - AMresult* res = AMsetActorHex( - group_state->doc, - test_state->actor_id_str - ); - if (AMresultStatus(res) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(res)); + AMresult* actor_id_result = AMactorIdInitStr(test_state->actor_id_str); + AMvalue value = AMresultValue(actor_id_result); + AMresult* result = AMsetActor(group_state->doc, value.actor_id); + AMfree(actor_id_result); + if (AMresultStatus(result) != AM_STATUS_OK) { + fail_msg("%s", AMerrorMessage(result)); } - assert_int_equal(AMresultSize(res), 0); - AMvalue value = AMresultValue(res); + assert_int_equal(AMresultSize(result), 0); + value = AMresultValue(result); assert_int_equal(value.tag, AM_VALUE_VOID); - AMfree(res); - res = AMgetActorHex(group_state->doc); - if (AMresultStatus(res) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(res)); + AMfree(result); + result = AMgetActor(group_state->doc); + if (AMresultStatus(result) != AM_STATUS_OK) { + fail_msg("%s", AMerrorMessage(result)); } - assert_int_equal(AMresultSize(res), 1); - value = AMresultValue(res); - assert_int_equal(value.tag, AM_VALUE_STR); - assert_int_equal(strlen(value.str), test_state->actor_id_size * 2); - assert_string_equal(value.str, test_state->actor_id_str); - AMfree(res); + assert_int_equal(AMresultSize(result), 1); + value = AMresultValue(result); + assert_int_equal(value.tag, AM_VALUE_ACTOR_ID); + char const* const str = AMactorIdStr(value.actor_id); + assert_int_equal(strlen(str), test_state->actor_id_size * 2); + assert_string_equal(str, test_state->actor_id_str); + AMfree(result); } int run_doc_tests(void) { const struct CMUnitTest tests[] = { - cmocka_unit_test_setup_teardown(test_AMputActor, setup, teardown), - cmocka_unit_test_setup_teardown(test_AMputActorHex, setup, teardown), + cmocka_unit_test_setup_teardown(test_AMputActor_bytes, setup, teardown), + cmocka_unit_test_setup_teardown(test_AMputActor_hex, setup, teardown), }; return cmocka_run_group_tests(tests, NULL, NULL); diff --git a/automerge-c/test/main.c b/automerge-c/test/main.c index 8739fe2b..3eeb8a3b 100644 --- a/automerge-c/test/main.c +++ b/automerge-c/test/main.c @@ -6,6 +6,8 @@ /* third-party */ #include +extern int run_actor_id_tests(void); + extern int run_doc_tests(void); extern int run_list_tests(void); @@ -16,6 +18,7 @@ extern int run_sync_tests(void); int main(void) { return ( + run_actor_id_tests() + run_doc_tests() + run_list_tests() + run_map_tests() + diff --git a/automerge-c/test/sync_tests.c b/automerge-c/test/sync_tests.c index 1ecda1cc..92076bac 100644 --- a/automerge-c/test/sync_tests.c +++ b/automerge-c/test/sync_tests.c @@ -278,8 +278,12 @@ static void test_converged_works_with_prior_sync_state(void **state) { static void test_converged_no_message_once_synced(void **state) { /* Create & synchronize two nodes. */ TestState* test_state = *state; - AMfree(AMsetActorHex(test_state->doc1, "abc123")); - AMfree(AMsetActorHex(test_state->doc2, "def456")); + AMresult* actor_id_result = AMactorIdInitStr("abc123"); + AMfree(AMsetActor(test_state->doc1, AMresultValue(actor_id_result).actor_id)); + AMfree(actor_id_result); + actor_id_result = AMactorIdInitStr("def456"); + AMfree(AMsetActor(test_state->doc2, AMresultValue(actor_id_result).actor_id)); + AMfree(actor_id_result); time_t const time = 0; for (size_t value = 0; value != 5; ++value) { @@ -352,8 +356,12 @@ static void test_converged_no_message_once_synced(void **state) { static void test_converged_allow_simultaneous_messages(void **state) { /* Create & synchronize two nodes. */ TestState* test_state = *state; - AMfree(AMsetActorHex(test_state->doc1, "abc123")); - AMfree(AMsetActorHex(test_state->doc2, "def456")); + AMresult* actor_id_result = AMactorIdInitStr("abc123"); + AMfree(AMsetActor(test_state->doc1, AMresultValue(actor_id_result).actor_id)); + AMfree(actor_id_result); + actor_id_result = AMactorIdInitStr("def456"); + AMfree(AMsetActor(test_state->doc2, AMresultValue(actor_id_result).actor_id)); + AMfree(actor_id_result); time_t const time = 0; for (size_t value = 0; value != 5; ++value) { @@ -505,8 +513,12 @@ static void test_converged_allow_simultaneous_messages(void **state) { */ static void test_converged_assume_sent_changes_were_received(void **state) { TestState* test_state = *state; - AMfree(AMsetActorHex(test_state->doc1, "01234567")); - AMfree(AMsetActorHex(test_state->doc2, "89abcdef")); + AMresult* actor_id_result = AMactorIdInitStr("01234567"); + AMfree(AMsetActor(test_state->doc1, AMresultValue(actor_id_result).actor_id)); + AMfree(actor_id_result); + actor_id_result = AMactorIdInitStr("89abcdef"); + AMfree(AMsetActor(test_state->doc2, AMresultValue(actor_id_result).actor_id)); + AMfree(actor_id_result); AMresult* items_result = AMmapPutObject(test_state->doc1, AM_ROOT, @@ -595,8 +607,12 @@ static void test_diverged_works_without_prior_sync_state(void **state) { /* Create two peers both with divergent commits. */ TestState* test_state = *state; - AMfree(AMsetActorHex(test_state->doc1, "01234567")); - AMfree(AMsetActorHex(test_state->doc2, "89abcdef")); + AMresult* actor_id_result = AMactorIdInitStr("01234567"); + AMfree(AMsetActor(test_state->doc1, AMresultValue(actor_id_result).actor_id)); + AMfree(actor_id_result); + actor_id_result = AMactorIdInitStr("89abcdef"); + AMfree(AMsetActor(test_state->doc2, AMresultValue(actor_id_result).actor_id)); + AMfree(actor_id_result); time_t const time = 0; for (size_t value = 0; value != 10; ++value) { AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); @@ -645,8 +661,12 @@ static void test_diverged_works_with_prior_sync_state(void **state) { /* Create two peers both with divergent commits. */ TestState* test_state = *state; - AMfree(AMsetActorHex(test_state->doc1, "01234567")); - AMfree(AMsetActorHex(test_state->doc2, "89abcdef")); + AMresult* actor_id_result = AMactorIdInitStr("01234567"); + AMfree(AMsetActor(test_state->doc1, AMresultValue(actor_id_result).actor_id)); + AMfree(actor_id_result); + actor_id_result = AMactorIdInitStr("89abcdef"); + AMfree(AMsetActor(test_state->doc2, AMresultValue(actor_id_result).actor_id)); + AMfree(actor_id_result); time_t const time = 0; for (size_t value = 0; value != 10; ++value) { AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); @@ -696,8 +716,12 @@ static void test_diverged_works_with_prior_sync_state(void **state) { */ static void test_diverged_ensure_not_empty_after_sync(void **state) { TestState* test_state = *state; - AMfree(AMsetActorHex(test_state->doc1, "01234567")); - AMfree(AMsetActorHex(test_state->doc2, "89abcdef")); + AMresult* actor_id_result = AMactorIdInitStr("01234567"); + AMfree(AMsetActor(test_state->doc1, AMresultValue(actor_id_result).actor_id)); + AMfree(actor_id_result); + actor_id_result = AMactorIdInitStr("89abcdef"); + AMfree(AMsetActor(test_state->doc2, AMresultValue(actor_id_result).actor_id)); + AMfree(actor_id_result); time_t const time = 0; for (size_t value = 0; value != 3; ++value) { @@ -731,8 +755,12 @@ static void test_diverged_resync_after_node_crash_with_data_loss(void **state) { * We want to successfully sync (n1) with (r), even though (n1) believes * it's talking to (n2). */ TestState* test_state = *state; - AMfree(AMsetActorHex(test_state->doc1, "01234567")); - AMfree(AMsetActorHex(test_state->doc2, "89abcdef")); + AMresult* actor_id_result = AMactorIdInitStr("01234567"); + AMfree(AMsetActor(test_state->doc1, AMresultValue(actor_id_result).actor_id)); + AMfree(actor_id_result); + actor_id_result = AMactorIdInitStr("89abcdef"); + AMfree(AMsetActor(test_state->doc2, AMresultValue(actor_id_result).actor_id)); + AMfree(actor_id_result); /* n1 makes three changes which we synchronize to n2. */ time_t const time = 0; @@ -814,8 +842,12 @@ static void test_diverged_resync_after_node_crash_with_data_loss(void **state) { */ static void test_diverged_resync_after_data_loss_without_disconnection(void **state) { TestState* test_state = *state; - AMfree(AMsetActorHex(test_state->doc1, "01234567")); - AMfree(AMsetActorHex(test_state->doc2, "89abcdef")); + AMresult* actor_id_result = AMactorIdInitStr("01234567"); + AMfree(AMsetActor(test_state->doc1, AMresultValue(actor_id_result).actor_id)); + AMfree(actor_id_result); + actor_id_result = AMactorIdInitStr("89abcdef"); + AMfree(AMsetActor(test_state->doc2, AMresultValue(actor_id_result).actor_id)); + AMfree(actor_id_result); /* n1 makes three changes which we synchronize to n2. */ time_t const time = 0; @@ -839,7 +871,9 @@ static void test_diverged_resync_after_data_loss_without_disconnection(void **st AMresult* doc2_after_data_loss_result = AMcreate(); AMdoc* doc2_after_data_loss = AMresultValue(doc2_after_data_loss_result).doc; - AMfree(AMsetActorHex(doc2_after_data_loss, "89abcdef")); + actor_id_result = AMactorIdInitStr("89abcdef"); + AMfree(AMsetActor(doc2_after_data_loss, AMresultValue(actor_id_result).actor_id)); + AMfree(actor_id_result); /* "n2" now has no data, but n1 still thinks it does. Note we don't do * decodeSyncState(encodeSyncState(s1)) in order to simulate data loss @@ -868,11 +902,17 @@ static void test_diverged_resync_after_data_loss_without_disconnection(void **st */ static void test_diverged_handles_concurrent_changes(void **state) { TestState* test_state = *state; - AMfree(AMsetActorHex(test_state->doc1, "01234567")); - AMfree(AMsetActorHex(test_state->doc2, "89abcdef")); + AMresult* actor_id_result = AMactorIdInitStr("01234567"); + AMfree(AMsetActor(test_state->doc1, AMresultValue(actor_id_result).actor_id)); + AMfree(actor_id_result); + actor_id_result = AMactorIdInitStr("89abcdef"); + AMfree(AMsetActor(test_state->doc2, AMresultValue(actor_id_result).actor_id)); + AMfree(actor_id_result); AMresult* doc3_result = AMcreate(); AMdoc* doc3 = AMresultValue(doc3_result).doc; - AMfree(AMsetActorHex(doc3, "fedcba98")); + actor_id_result = AMactorIdInitStr("fedcba98"); + AMfree(AMsetActor(doc3, AMresultValue(actor_id_result).actor_id)); + AMfree(actor_id_result); AMsyncState* sync_state12 = test_state->sync_state1; AMsyncState* sync_state21 = test_state->sync_state2; AMresult* sync_state23_result = AMsyncStateInit(); @@ -929,11 +969,17 @@ static void test_diverged_handles_concurrent_changes(void **state) { */ static void test_diverged_handles_histories_of_branching_and_merging(void **state) { TestState* test_state = *state; - AMfree(AMsetActorHex(test_state->doc1, "01234567")); - AMfree(AMsetActorHex(test_state->doc2, "89abcdef")); + AMresult* actor_id_result = AMactorIdInitStr("01234567"); + AMfree(AMsetActor(test_state->doc1, AMresultValue(actor_id_result).actor_id)); + AMfree(actor_id_result); + actor_id_result = AMactorIdInitStr("89abcdef"); + AMfree(AMsetActor(test_state->doc2, AMresultValue(actor_id_result).actor_id)); + AMfree(actor_id_result); AMresult* doc3_result = AMcreate(); AMdoc* doc3 = AMresultValue(doc3_result).doc; - AMfree(AMsetActorHex(doc3, "fedcba98")); + actor_id_result = AMactorIdInitStr("fedcba98"); + AMfree(AMsetActor(doc3, AMresultValue(actor_id_result).actor_id)); + AMfree(actor_id_result); time_t const time = 0; AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", 0)); AMcommit(test_state->doc1, NULL, &time); From 6de9ff620d53d5463c9cf3a574ef4e26dfe49a14 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Tue, 14 Jun 2022 00:52:06 -0700 Subject: [PATCH 467/730] Moved `hex_to_bytes()` so that it could be shared by the unit test suites for `AMactorId` and `AMdoc` functions. --- automerge-c/test/str_utils.c | 15 +++++++++++++++ automerge-c/test/str_utils.h | 14 ++++++++++++++ 2 files changed, 29 insertions(+) create mode 100644 automerge-c/test/str_utils.c create mode 100644 automerge-c/test/str_utils.h diff --git a/automerge-c/test/str_utils.c b/automerge-c/test/str_utils.c new file mode 100644 index 00000000..cc923cb4 --- /dev/null +++ b/automerge-c/test/str_utils.c @@ -0,0 +1,15 @@ +#include +#include + +/* local */ +#include "str_utils.h" + +void hex_to_bytes(char const* hex_str, uint8_t* src, size_t const count) { + unsigned int byte; + char const* next = hex_str; + for (size_t index = 0; *next && index != count; next += 2, ++index) { + if (sscanf(next, "%02x", &byte) == 1) { + src[index] = (uint8_t)byte; + } + } +} diff --git a/automerge-c/test/str_utils.h b/automerge-c/test/str_utils.h new file mode 100644 index 00000000..0fc3db62 --- /dev/null +++ b/automerge-c/test/str_utils.h @@ -0,0 +1,14 @@ +#ifndef STR_UTILS_INCLUDED +#define STR_UTILS_INCLUDED + +/** + * \brief Converts a hexadecimal string into a sequence of bytes. + * + * \param[in] hex_str A string. + * \param[in] src A pointer to a contiguous sequence of bytes. + * \param[in] count The number of bytes to copy to \p src. + * \pre \p count `<=` length of \p src. + */ +void hex_to_bytes(char const* hex_str, uint8_t* src, size_t const count); + +#endif From ceecef3b8736ca3443b7b85c1b8132c708098c81 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Tue, 14 Jun 2022 21:28:10 -0400 Subject: [PATCH 468/730] update list of read methods in c readme --- automerge-c/README.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/automerge-c/README.md b/automerge-c/README.md index d500f330..1b0e618d 100644 --- a/automerge-c/README.md +++ b/automerge-c/README.md @@ -24,10 +24,12 @@ 1. `AMinc{Map|List}(doc, obj, prop, value)` 1. `AMspliceText(doc, obj, start, num_del, text)` -### Read +### Read (the heads argument is optional and can be on an `at` variant) 1. `AMkeys(doc, obj, heads)` 1. `AMlength(doc, obj, heads)` + 1. `AMlistRange(doc, obj, heads)` + 1. `AMmapRange(doc, obj, heads)` 1. `AMvalues(doc, obj, heads)` 1. `AMtext(doc, obj, heads)` From 2f37d194baf6473e95a4c0ca6cf78f00cf8785f5 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Tue, 14 Jun 2022 23:04:18 -0700 Subject: [PATCH 469/730] Asserted that the string forms of two random `AMactorId` structs are unequal. --- automerge-c/test/actor_id_tests.c | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/automerge-c/test/actor_id_tests.c b/automerge-c/test/actor_id_tests.c index 25ed2886..ee359740 100644 --- a/automerge-c/test/actor_id_tests.c +++ b/automerge-c/test/actor_id_tests.c @@ -40,6 +40,7 @@ static void test_AMactorIdInit(void **state) { TestState* test_state = *state; AMresult* prior_result = NULL; AMbyteSpan prior_bytes; + char const* prior_str = NULL; AMresult* result = NULL; for (size_t i = 0; i != 11; ++i) { result = AMactorIdInit(); @@ -50,13 +51,16 @@ static void test_AMactorIdInit(void **state) { AMvalue const value = AMresultValue(result); assert_int_equal(value.tag, AM_VALUE_ACTOR_ID); AMbyteSpan const bytes = AMactorIdBytes(value.actor_id); + char const* const str = AMactorIdStr(value.actor_id); if (prior_result) { size_t const min_count = fmax(bytes.count, prior_bytes.count); assert_memory_not_equal(bytes.src, prior_bytes.src, min_count); + assert_string_not_equal(str, prior_str); AMfree(prior_result); } prior_result = result; prior_bytes = bytes; + prior_str = str; } AMfree(result); } From 400b8acdff102d7de683b846700cb7f7ac1a9cb2 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Tue, 14 Jun 2022 23:16:45 -0700 Subject: [PATCH 470/730] Switched the `AMactorId` unit test suite to group setup/teardown. Removed superfluous group state from the `AMactorIdInit()` test. --- automerge-c/test/actor_id_tests.c | 49 +++++++++++++++---------------- 1 file changed, 24 insertions(+), 25 deletions(-) diff --git a/automerge-c/test/actor_id_tests.c b/automerge-c/test/actor_id_tests.c index ee359740..1fa553c7 100644 --- a/automerge-c/test/actor_id_tests.c +++ b/automerge-c/test/actor_id_tests.c @@ -17,27 +17,26 @@ typedef struct { uint8_t* src; char const* str; size_t count; -} TestState; +} GroupState; -static int setup(void** state) { - TestState* test_state = calloc(1, sizeof(TestState)); - test_state->str = "000102030405060708090a0b0c0d0e0f"; - test_state->count = strlen(test_state->str) / 2; - test_state->src = malloc(test_state->count); - hex_to_bytes(test_state->str, test_state->src, test_state->count); - *state = test_state; +static int group_setup(void** state) { + GroupState* group_state = calloc(1, sizeof(GroupState)); + group_state->str = "000102030405060708090a0b0c0d0e0f"; + group_state->count = strlen(group_state->str) / 2; + group_state->src = malloc(group_state->count); + hex_to_bytes(group_state->str, group_state->src, group_state->count); + *state = group_state; return 0; } -static int teardown(void** state) { - TestState* test_state = *state; - free(test_state->src); - free(test_state); +static int group_teardown(void** state) { + GroupState* group_state = *state; + free(group_state->src); + free(group_state); return 0; } static void test_AMactorIdInit(void **state) { - TestState* test_state = *state; AMresult* prior_result = NULL; AMbyteSpan prior_bytes; char const* prior_str = NULL; @@ -66,8 +65,8 @@ static void test_AMactorIdInit(void **state) { } static void test_AMactorIdInitBytes(void **state) { - TestState* test_state = *state; - AMresult* const result = AMactorIdInitBytes(test_state->src, test_state->count); + GroupState* group_state = *state; + AMresult* const result = AMactorIdInitBytes(group_state->src, group_state->count); if (AMresultStatus(result) != AM_STATUS_OK) { fail_msg("%s", AMerrorMessage(result)); } @@ -75,14 +74,14 @@ static void test_AMactorIdInitBytes(void **state) { AMvalue const value = AMresultValue(result); assert_int_equal(value.tag, AM_VALUE_ACTOR_ID); AMbyteSpan const bytes = AMactorIdBytes(value.actor_id); - assert_int_equal(bytes.count, test_state->count); - assert_memory_equal(bytes.src, test_state->src, bytes.count); + assert_int_equal(bytes.count, group_state->count); + assert_memory_equal(bytes.src, group_state->src, bytes.count); AMfree(result); } static void test_AMactorIdInitStr(void **state) { - TestState* test_state = *state; - AMresult* const result = AMactorIdInitStr(test_state->str); + GroupState* group_state = *state; + AMresult* const result = AMactorIdInitStr(group_state->str); if (AMresultStatus(result) != AM_STATUS_OK) { fail_msg("%s", AMerrorMessage(result)); } @@ -90,17 +89,17 @@ static void test_AMactorIdInitStr(void **state) { AMvalue const value = AMresultValue(result); assert_int_equal(value.tag, AM_VALUE_ACTOR_ID); char const* const str = AMactorIdStr(value.actor_id); - assert_int_equal(strlen(str), test_state->count * 2); - assert_string_equal(str, test_state->str); + assert_int_equal(strlen(str), group_state->count * 2); + assert_string_equal(str, group_state->str); AMfree(result); } int run_actor_id_tests(void) { const struct CMUnitTest tests[] = { - cmocka_unit_test_setup_teardown(test_AMactorIdInit, setup, teardown), - cmocka_unit_test_setup_teardown(test_AMactorIdInitBytes, setup, teardown), - cmocka_unit_test_setup_teardown(test_AMactorIdInitStr, setup, teardown), + cmocka_unit_test(test_AMactorIdInit), + cmocka_unit_test(test_AMactorIdInitBytes), + cmocka_unit_test(test_AMactorIdInitStr), }; - return cmocka_run_group_tests(tests, NULL, NULL); + return cmocka_run_group_tests(tests, group_setup, group_teardown); } From 44b6709a60680e21b55772e72b3686948cb45e70 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Thu, 16 Jun 2022 17:49:16 -0400 Subject: [PATCH 471/730] add getBackend to automerge-js --- automerge-js/package.json | 2 +- automerge-js/src/index.ts | 4 ++++ automerge-js/test/basic_test.ts | 5 +++++ 3 files changed, 10 insertions(+), 1 deletion(-) diff --git a/automerge-js/package.json b/automerge-js/package.json index deebded8..8e835a0b 100644 --- a/automerge-js/package.json +++ b/automerge-js/package.json @@ -4,7 +4,7 @@ "Orion Henry ", "Martin Kleppmann" ], - "version": "0.1.2", + "version": "0.1.3", "description": "Reimplementation of `automerge` on top of the automerge-wasm backend", "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-js", "repository": "github:automerge/automerge-rs", diff --git a/automerge-js/src/index.ts b/automerge-js/src/index.ts index 52f479e2..ef231727 100644 --- a/automerge-js/src/index.ts +++ b/automerge-js/src/index.ts @@ -28,6 +28,10 @@ export function use(api: API) { UseApi(api) } +export function getBackend(doc: Doc) : Automerge { + return _state(doc) +} + function _state(doc: Doc) : Automerge { const state = Reflect.get(doc,STATE) if (state == undefined) { diff --git a/automerge-js/test/basic_test.ts b/automerge-js/test/basic_test.ts index 058a9072..1b40c858 100644 --- a/automerge-js/test/basic_test.ts +++ b/automerge-js/test/basic_test.ts @@ -163,5 +163,10 @@ describe('Automerge', () => { }) assert.deepEqual(doc6, { list: [2,1,9,100,101,10,3,11,12] }); }) + + it('allows access to the backend', () => { + let doc = Automerge.init() + assert.deepEqual(Object.keys(Automerge.getBackend(doc)), ["ptr"]) + }) }) }) From f5e9e3537d34af23ec441fd6d9ee6106964390b0 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Thu, 16 Jun 2022 17:50:46 -0400 Subject: [PATCH 472/730] v0.1.4 --- automerge-js/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automerge-js/package.json b/automerge-js/package.json index 8e835a0b..a87816e2 100644 --- a/automerge-js/package.json +++ b/automerge-js/package.json @@ -4,7 +4,7 @@ "Orion Henry ", "Martin Kleppmann" ], - "version": "0.1.3", + "version": "0.1.4", "description": "Reimplementation of `automerge` on top of the automerge-wasm backend", "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-js", "repository": "github:automerge/automerge-rs", From 88073c0cf449b0a5c0e8566295d0388bcd4b34da Mon Sep 17 00:00:00 2001 From: Ryan Fitzgerald Date: Fri, 17 Jun 2022 20:08:48 -0700 Subject: [PATCH 473/730] Fix TypeScript syntax error in `automerge-wasm` definitions I'm not sure if there are some configurations under which this works, but I get index.d.ts:2:21 - error TS1005: ';' expected. 2 export default from "automerge-types" ~~~~~~~~~~~~~~~~~ both in my project that depends on `automerge-wasm` and when I run `tsc` in this repo. It seems like `export default from` is still a Stage 1 proposal, so I wouldn't expect it to be supported by TS, although I couldn't really find hard evidence one way or the other. It does seem like this syntax should be exactly equivalent based on the proposal doc though. --- automerge-wasm/index.d.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts index 28e41609..d515b3c7 100644 --- a/automerge-wasm/index.d.ts +++ b/automerge-wasm/index.d.ts @@ -1,2 +1,2 @@ export * from "automerge-types" -export default from "automerge-types" +export { default } from "automerge-types" From 32baae1a31a6dcdc4475e26d31f75c901ae5b0dc Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 20 Jun 2022 01:09:50 -0700 Subject: [PATCH 474/730] Hoisted `InvalidChangeHashSlice` into the `Automerge` namespace. --- automerge/src/lib.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/automerge/src/lib.rs b/automerge/src/lib.rs index e15f7d36..19c9947b 100644 --- a/automerge/src/lib.rs +++ b/automerge/src/lib.rs @@ -93,6 +93,7 @@ pub use decoding::InvalidChangeError; pub use encoding::Error as EncodingError; pub use error::AutomergeError; pub use error::InvalidActorId; +pub use error::InvalidChangeHashSlice; pub use exid::ExId as ObjId; pub use keys::Keys; pub use keys_at::KeysAt; From 39db64e5d97213dcaf63b3ce16a386aab305b736 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 20 Jun 2022 01:11:30 -0700 Subject: [PATCH 475/730] Publicized the `AMbyteSpan` fields. --- automerge-c/src/byte_span.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/automerge-c/src/byte_span.rs b/automerge-c/src/byte_span.rs index 4ed7198a..c40b6de2 100644 --- a/automerge-c/src/byte_span.rs +++ b/automerge-c/src/byte_span.rs @@ -9,9 +9,9 @@ pub struct AMbyteSpan { /// \warning \p src is only valid until the `AMfree()` function is /// called on the `AMresult` struct hosting the array of bytes to /// which it points. - src: *const u8, + pub src: *const u8, /// The number of bytes in the array. - count: usize, + pub count: usize, } impl Default for AMbyteSpan { From 7b30c84a4c9453903295963e02a50d4705bffbb9 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 20 Jun 2022 01:17:20 -0700 Subject: [PATCH 476/730] Added `AMchangeHashesInit()`. --- automerge-c/src/change_hashes.rs | 46 +++++++++++++++++++++++++++++--- 1 file changed, 43 insertions(+), 3 deletions(-) diff --git a/automerge-c/src/change_hashes.rs b/automerge-c/src/change_hashes.rs index 893f1e7a..b4a71745 100644 --- a/automerge-c/src/change_hashes.rs +++ b/automerge-c/src/change_hashes.rs @@ -4,6 +4,7 @@ use std::ffi::c_void; use std::mem::size_of; use crate::byte_span::AMbyteSpan; +use crate::result::{to_result, AMresult}; #[repr(C)] struct Detail { @@ -46,7 +47,7 @@ impl Detail { } pub fn next(&mut self, n: isize) -> Option<&am::ChangeHash> { - if self.is_stopped() { + if n == 0 || self.is_stopped() { return None; } let slice: &[am::ChangeHash] = @@ -63,7 +64,7 @@ impl Detail { pub fn prev(&mut self, n: isize) -> Option<&am::ChangeHash> { self.advance(n); - if self.is_stopped() { + if n == 0 || self.is_stopped() { return None; } let slice: &[am::ChangeHash] = @@ -94,7 +95,10 @@ impl From for [u8; USIZE_USIZE_USIZE_] { /// \brief A random-access iterator over a sequence of change hashes. #[repr(C)] pub struct AMchangeHashes { - /// Reserved. + /// An implementation detail that is intentionally opaque. + /// \warning Modifying \p detail will cause undefined behavior. + /// \note The actual size of \p detail will vary by platform, this is just + /// the one for the platform this documentation was built on. detail: [u8; USIZE_USIZE_USIZE_], } @@ -203,6 +207,42 @@ pub unsafe extern "C" fn AMchangeHashesCmp( } } +/// \memberof AMchangeHashesInit +/// \brief Allocates an iterator over a sequence of change hashes and +/// initializes it from a sequence of byte spans. +/// +/// \param[in] src A pointer to an array of `AMbyteSpan` structs. +/// \param[in] count The number of `AMbyteSpan` structs to copy from \p src. +/// \return A pointer to an `AMresult` struct containing an `AMchangeHashes` +/// struct. +/// \pre \p src must be a valid address. +/// \pre `0 <=` \p count `<=` size of \p src. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMfree()`. +/// \internal +/// +/// # Safety +/// src must be an AMbyteSpan array of size `>= count` +#[no_mangle] +pub unsafe extern "C" fn AMchangeHashesInit(src: *const AMbyteSpan, count: usize) -> *mut AMresult { + let mut change_hashes = Vec::::new(); + for n in 0..count { + let byte_span = &*src.add(n); + let slice = std::slice::from_raw_parts(byte_span.src, byte_span.count); + match am::ChangeHash::try_from(slice) { + Ok(change_hash) => { + change_hashes.push(change_hash); + } + Err(e) => { + return to_result(Err(e)); + } + } + } + to_result(Ok::, am::InvalidChangeHashSlice>( + change_hashes, + )) +} + /// \memberof AMchangeHashes /// \brief Gets the change hash at the current position of an iterator over a /// sequence of change hashes and then advances it by at most \p |n| From 103d729bd12ab7ea4862f53e446f2e10b3a014ea Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 20 Jun 2022 01:31:08 -0700 Subject: [PATCH 477/730] Replaced the term "length" with "size" in the documentation. --- automerge-c/src/actor_id.rs | 4 ++-- automerge-c/src/change.rs | 8 ++++---- automerge-c/src/sync/message.rs | 4 ++-- automerge-c/src/sync/state.rs | 4 ++-- 4 files changed, 10 insertions(+), 10 deletions(-) diff --git a/automerge-c/src/actor_id.rs b/automerge-c/src/actor_id.rs index 00664c5c..02478e98 100644 --- a/automerge-c/src/actor_id.rs +++ b/automerge-c/src/actor_id.rs @@ -76,7 +76,7 @@ pub unsafe extern "C" fn AMactorIdInit() -> *mut AMresult { /// /// \param[in] src A pointer to a contiguous sequence of bytes. /// \param[in] count The number of bytes to copy from \p src. -/// \pre `0 <=` \p count `<=` length of \p src. +/// \pre `0 <=` \p count `<=` size of \p src. /// \return A pointer to an `AMresult` struct containing a pointer to an /// `AMactorId` struct. /// \warning To avoid a memory leak, the returned `AMresult` struct must be @@ -84,7 +84,7 @@ pub unsafe extern "C" fn AMactorIdInit() -> *mut AMresult { /// \internal /// /// # Safety -/// src must be a byte array of length `>= count` +/// src must be a byte array of size `>= count` #[no_mangle] pub unsafe extern "C" fn AMactorIdInitBytes(src: *const u8, count: usize) -> *mut AMresult { let slice = std::slice::from_raw_parts(src, count); diff --git a/automerge-c/src/change.rs b/automerge-c/src/change.rs index 4e051d01..389fa33c 100644 --- a/automerge-c/src/change.rs +++ b/automerge-c/src/change.rs @@ -133,13 +133,13 @@ pub unsafe extern "C" fn AMchangeExtraBytes(change: *const AMchange) -> AMbyteSp /// \param[in] count The number of bytes in \p src to load. /// \return A pointer to an `AMresult` struct containing an `AMchange` struct. /// \pre \p src must be a valid address. -/// \pre `0 <=` \p count `<=` length of \p src. +/// \pre `0 <=` \p count `<=` size of \p src. /// \warning To avoid a memory leak, the returned `AMresult` struct must be /// deallocated with `AMfree()`. /// \internal /// /// # Safety -/// src must be a byte array of length `>= count` +/// src must be a byte array of size `>= count` #[no_mangle] pub unsafe extern "C" fn AMchangeFromBytes(src: *const u8, count: usize) -> *mut AMresult { let mut data = Vec::new(); @@ -329,13 +329,13 @@ pub unsafe extern "C" fn AMchangeRawBytes(change: *const AMchange) -> AMbyteSpan /// \return A pointer to an `AMresult` struct containing a sequence of /// `AMchange` structs. /// \pre \p src must be a valid address. -/// \pre `0 <=` \p count `<=` length of \p src. +/// \pre `0 <=` \p count `<=` size of \p src. /// \warning To avoid a memory leak, the returned `AMresult` struct must be /// deallocated with `AMfree()`. /// \internal /// /// # Safety -/// src must be a byte array of length `>= count` +/// src must be a byte array of size `>= count` #[no_mangle] pub unsafe extern "C" fn AMchangeLoadDocument(src: *const u8, count: usize) -> *mut AMresult { let mut data = Vec::new(); diff --git a/automerge-c/src/sync/message.rs b/automerge-c/src/sync/message.rs index 6481e671..14244059 100644 --- a/automerge-c/src/sync/message.rs +++ b/automerge-c/src/sync/message.rs @@ -75,13 +75,13 @@ pub unsafe extern "C" fn AMsyncMessageChanges(sync_message: *const AMsyncMessage /// \return A pointer to an `AMresult` struct containing an `AMsyncMessage` /// struct. /// \pre \p src must be a valid address. -/// \pre `0 <=` \p count `<=` length of \p src. +/// \pre `0 <=` \p count `<=` size of \p src. /// \warning To avoid a memory leak, the returned `AMresult` struct must be /// deallocated with `AMfree()`. /// \internal /// /// # Safety -/// src must be a byte array of length `>= count` +/// src must be a byte array of size `>= count` #[no_mangle] pub unsafe extern "C" fn AMsyncMessageDecode(src: *const u8, count: usize) -> *mut AMresult { let mut data = Vec::new(); diff --git a/automerge-c/src/sync/state.rs b/automerge-c/src/sync/state.rs index 6e0c4f9a..4e293c76 100644 --- a/automerge-c/src/sync/state.rs +++ b/automerge-c/src/sync/state.rs @@ -61,13 +61,13 @@ impl From for *mut AMsyncState { /// \return A pointer to an `AMresult` struct containing an `AMsyncState` /// struct. /// \pre \p src must be a valid address. -/// \pre `0 <=` \p count `<=` length of \p src. +/// \pre `0 <=` \p count `<=` size of \p src. /// \warning To avoid a memory leak, the returned `AMresult` struct must be /// deallocated with `AMfree()`. /// \internal /// /// # Safety -/// src must be a byte array of length `>= count` +/// src must be a byte array of size `>= count` #[no_mangle] pub unsafe extern "C" fn AMsyncStateDecode(src: *const u8, count: usize) -> *mut AMresult { let mut data = Vec::new(); From be130560f062c2f3f2780f7f7d469ea3a33fc67d Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 20 Jun 2022 01:34:36 -0700 Subject: [PATCH 478/730] Added a check for a `0` increment in the iterator types. Improved the documentation for the `detail` field in the iterator types. --- automerge-c/src/changes.rs | 9 ++++++--- automerge-c/src/sync/haves.rs | 9 ++++++--- 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/automerge-c/src/changes.rs b/automerge-c/src/changes.rs index f3615557..ba82ed99 100644 --- a/automerge-c/src/changes.rs +++ b/automerge-c/src/changes.rs @@ -49,7 +49,7 @@ impl Detail { } pub fn next(&mut self, n: isize) -> Option<*const AMchange> { - if self.is_stopped() { + if n == 0 || self.is_stopped() { return None; } let slice: &mut [am::Change] = @@ -74,7 +74,7 @@ impl Detail { pub fn prev(&mut self, n: isize) -> Option<*const AMchange> { self.advance(n); - if self.is_stopped() { + if n == 0 || self.is_stopped() { return None; } let slice: &mut [am::Change] = @@ -117,7 +117,10 @@ impl From for [u8; USIZE_USIZE_USIZE_USIZE_] { /// \brief A random-access iterator over a sequence of changes. #[repr(C)] pub struct AMchanges { - /// Reserved. + /// An implementation detail that is intentionally opaque. + /// \warning Modifying \p detail will cause undefined behavior. + /// \note The actual size of \p detail will vary by platform, this is just + /// the one for the platform this documentation was built on. detail: [u8; USIZE_USIZE_USIZE_USIZE_], } diff --git a/automerge-c/src/sync/haves.rs b/automerge-c/src/sync/haves.rs index 62df8b1d..c8296ca3 100644 --- a/automerge-c/src/sync/haves.rs +++ b/automerge-c/src/sync/haves.rs @@ -53,7 +53,7 @@ impl Detail { } pub fn next(&mut self, n: isize) -> Option<*const AMsyncHave> { - if self.is_stopped() { + if n == 0 || self.is_stopped() { return None; } let slice: &[am::sync::Have] = @@ -78,7 +78,7 @@ impl Detail { pub fn prev(&mut self, n: isize) -> Option<*const AMsyncHave> { self.advance(n); - if self.is_stopped() { + if n == 0 || self.is_stopped() { return None; } let slice: &[am::sync::Have] = @@ -121,7 +121,10 @@ impl From for [u8; USIZE_USIZE_USIZE_USIZE_] { /// \brief A random-access iterator over a sequence of synchronization haves. #[repr(C)] pub struct AMsyncHaves { - /// Reserved. + /// An implementation detail that is intentionally opaque. + /// \warning Modifying \p detail will cause undefined behavior. + /// \note The actual size of \p detail will vary by platform, this is just + /// the one for the platform this documentation was built on. detail: [u8; USIZE_USIZE_USIZE_USIZE_], } From ea8bd32cc1865cf92b8f54a240b030c3c2cb5ead Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 20 Jun 2022 01:38:32 -0700 Subject: [PATCH 479/730] Added the `AMstrings` type. --- automerge-c/src/CMakeLists.txt | 3 +- automerge-c/src/lib.rs | 1 + automerge-c/src/strings.rs | 320 +++++++++++++++++++++++++++++++++ 3 files changed, 323 insertions(+), 1 deletion(-) create mode 100644 automerge-c/src/strings.rs diff --git a/automerge-c/src/CMakeLists.txt b/automerge-c/src/CMakeLists.txt index 3638497d..f35ccc54 100644 --- a/automerge-c/src/CMakeLists.txt +++ b/automerge-c/src/CMakeLists.txt @@ -62,6 +62,7 @@ add_custom_command( doc/utils.rs obj.rs result.rs + strings.rs sync.rs sync/have.rs sync/haves.rs @@ -97,7 +98,7 @@ add_custom_command( # Compensate for cbindgen's translation of consecutive uppercase letters to "ScreamingSnakeCase". ${CMAKE_COMMAND} -DMATCH_REGEX=A_M\([^_]+\)_ -DREPLACE_EXPR=AM_\\1_ -P ${CMAKE_SOURCE_DIR}/cmake/file_regex_replace.cmake -- ${CARGO_TARGET_DIR}/${LIBRARY_NAME}.h COMMAND - # Compensate for cbindgen ignoring `std:mem::size_of()` calls. + # Compensate for cbindgen ignoring `std:mem::size_of()` calls. ${CMAKE_COMMAND} -DMATCH_REGEX=USIZE_ -DREPLACE_EXPR=\+${CMAKE_SIZEOF_VOID_P} -P ${CMAKE_SOURCE_DIR}/cmake/file_regex_replace.cmake -- ${CARGO_TARGET_DIR}/${LIBRARY_NAME}.h WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} diff --git a/automerge-c/src/lib.rs b/automerge-c/src/lib.rs index f3dcfa09..dcfa4853 100644 --- a/automerge-c/src/lib.rs +++ b/automerge-c/src/lib.rs @@ -6,4 +6,5 @@ mod changes; mod doc; mod obj; mod result; +mod strings; mod sync; diff --git a/automerge-c/src/strings.rs b/automerge-c/src/strings.rs new file mode 100644 index 00000000..efb7b1bc --- /dev/null +++ b/automerge-c/src/strings.rs @@ -0,0 +1,320 @@ +use std::cmp::Ordering; +use std::collections::BTreeMap; +use std::ffi::{c_void, CString}; +use std::mem::size_of; +use std::os::raw::c_char; + +#[repr(C)] +struct Detail { + len: usize, + offset: isize, + ptr: *const c_void, + storage: *mut c_void, +} + +/// \note cbindgen won't propagate the value of a `std::mem::size_of()` call +/// (https://github.com/eqrion/cbindgen/issues/252) but it will +/// propagate the name of a constant initialized from it so if the +/// constant's name is a symbolic representation of the value it can be +/// converted into a number by post-processing the header it generated. +pub const USIZE_USIZE_USIZE_USIZE_: usize = size_of::(); + +impl Detail { + fn new(strings: &[String], offset: isize, storage: &mut BTreeMap) -> Self { + let storage: *mut BTreeMap = storage; + Self { + len: strings.len(), + offset, + ptr: strings.as_ptr() as *const c_void, + storage: storage as *mut c_void, + } + } + + pub fn advance(&mut self, n: isize) { + if n != 0 && !self.is_stopped() { + let n = if self.offset < 0 { -n } else { n }; + let len = self.len as isize; + self.offset = std::cmp::max(-(len + 1), std::cmp::min(self.offset + n, len)); + }; + } + + pub fn get_index(&self) -> usize { + (self.offset + + if self.offset < 0 { + self.len as isize + } else { + 0 + }) as usize + } + + pub fn next(&mut self, n: isize) -> Option<*const c_char> { + if n == 0 || self.is_stopped() { + return None; + } + let slice: &[String] = + unsafe { std::slice::from_raw_parts(self.ptr as *const String, self.len) }; + let storage = unsafe { &mut *(self.storage as *mut BTreeMap) }; + let index = self.get_index(); + let value = match storage.get_mut(&index) { + Some(value) => value, + None => { + storage.insert(index, CString::new(slice[index].as_str()).unwrap()); + storage.get_mut(&index).unwrap() + } + }; + self.advance(n); + Some(value.as_ptr()) + } + + pub fn is_stopped(&self) -> bool { + let len = self.len as isize; + self.offset < -len || self.offset == len + } + + pub fn prev(&mut self, n: isize) -> Option<*const c_char> { + self.advance(n); + if n == 0 || self.is_stopped() { + return None; + } + let slice: &[String] = + unsafe { std::slice::from_raw_parts(self.ptr as *const String, self.len) }; + let storage = unsafe { &mut *(self.storage as *mut BTreeMap) }; + let index = self.get_index(); + Some( + match storage.get_mut(&index) { + Some(value) => value, + None => { + storage.insert(index, CString::new(slice[index].as_str()).unwrap()); + storage.get_mut(&index).unwrap() + } + } + .as_ptr(), + ) + } + + pub fn reversed(&self) -> Self { + Self { + len: self.len, + offset: -(self.offset + 1), + ptr: self.ptr, + storage: self.storage, + } + } +} + +impl From for [u8; USIZE_USIZE_USIZE_USIZE_] { + fn from(detail: Detail) -> Self { + unsafe { + std::slice::from_raw_parts( + (&detail as *const Detail) as *const u8, + USIZE_USIZE_USIZE_USIZE_, + ) + .try_into() + .unwrap() + } + } +} + +/// \struct AMstrings +/// \brief A random-access iterator over a sequence of UTF-8 strings. +#[repr(C)] +pub struct AMstrings { + /// An implementation detail that is intentionally opaque. + /// \warning Modifying \p detail will cause undefined behavior. + /// \note The actual size of \p detail will vary by platform, this is just + /// the one for the platform this documentation was built on. + detail: [u8; USIZE_USIZE_USIZE_USIZE_], +} + +impl AMstrings { + pub fn new(strings: &[String], storage: &mut BTreeMap) -> Self { + Self { + detail: Detail::new(strings, 0, storage).into(), + } + } + + pub fn advance(&mut self, n: isize) { + let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; + detail.advance(n); + } + + pub fn len(&self) -> usize { + let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; + detail.len + } + + pub fn next(&mut self, n: isize) -> Option<*const c_char> { + let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; + detail.next(n) + } + + pub fn prev(&mut self, n: isize) -> Option<*const c_char> { + let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; + detail.prev(n) + } + + pub fn reversed(&self) -> Self { + let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; + Self { + detail: detail.reversed().into(), + } + } +} + +impl AsRef<[String]> for AMstrings { + fn as_ref(&self) -> &[String] { + let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; + unsafe { std::slice::from_raw_parts(detail.ptr as *const String, detail.len) } + } +} + +impl Default for AMstrings { + fn default() -> Self { + Self { + detail: [0; USIZE_USIZE_USIZE_USIZE_], + } + } +} + +/// \memberof AMstrings +/// \brief Advances an iterator over a sequence of UTF-8 strings by at most +/// \p |n| positions where the sign of \p n is relative to the +/// iterator's direction. +/// +/// \param[in] strings A pointer to an `AMstrings` struct. +/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum +/// number of positions to advance. +/// \pre \p strings must be a valid address. +/// \internal +/// +/// #Safety +/// strings must be a pointer to a valid AMstrings +#[no_mangle] +pub unsafe extern "C" fn AMstringsAdvance(strings: *mut AMstrings, n: isize) { + if let Some(strings) = strings.as_mut() { + strings.advance(n); + }; +} + +/// \memberof AMstrings +/// \brief Compares the sequences of UTF-8 strings underlying a pair of +/// iterators. +/// +/// \param[in] strings1 A pointer to an `AMstrings` struct. +/// \param[in] strings2 A pointer to an `AMstrings` struct. +/// \return `-1` if \p strings1 `<` \p strings2, `0` if +/// \p strings1 `==` \p strings2 and `1` if +/// \p strings1 `>` \p strings2. +/// \pre \p strings1 must be a valid address. +/// \pre \p strings2 must be a valid address. +/// \internal +/// +/// #Safety +/// strings1 must be a pointer to a valid AMstrings +/// strings2 must be a pointer to a valid AMstrings +#[no_mangle] +pub unsafe extern "C" fn AMstringsCmp( + strings1: *const AMstrings, + strings2: *const AMstrings, +) -> isize { + match (strings1.as_ref(), strings2.as_ref()) { + (Some(strings1), Some(strings2)) => match strings1.as_ref().cmp(strings2.as_ref()) { + Ordering::Less => -1, + Ordering::Equal => 0, + Ordering::Greater => 1, + }, + (None, Some(_)) => -1, + (Some(_), None) => 1, + (None, None) => 0, + } +} + +/// \memberof AMstrings +/// \brief Gets the key at the current position of an iterator over a +/// sequence of UTF-8 strings and then advances it by at most \p |n| +/// positions where the sign of \p n is relative to the iterator's direction. +/// +/// \param[in] strings A pointer to an `AMstrings` struct. +/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum +/// number of positions to advance. +/// \return A UTF-8 string that's `NULL` when \p strings was previously +/// advanced past its forward/reverse limit. +/// \pre \p strings must be a valid address. +/// \internal +/// +/// #Safety +/// strings must be a pointer to a valid AMstrings +#[no_mangle] +pub unsafe extern "C" fn AMstringsNext(strings: *mut AMstrings, n: isize) -> *const c_char { + if let Some(strings) = strings.as_mut() { + if let Some(key) = strings.next(n) { + return key; + } + } + std::ptr::null() +} + +/// \memberof AMstrings +/// \brief Advances an iterator over a sequence of UTF-8 strings by at most +/// \p |n| positions where the sign of \p n is relative to the +/// iterator's direction and then gets the key at its new position. +/// +/// \param[in] strings A pointer to an `AMstrings` struct. +/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum +/// number of positions to advance. +/// \return A UTF-8 string that's `NULL` when \p strings is presently advanced +/// past its forward/reverse limit. +/// \pre \p strings must be a valid address. +/// \internal +/// +/// #Safety +/// strings must be a pointer to a valid AMstrings +#[no_mangle] +pub unsafe extern "C" fn AMstringsPrev(strings: *mut AMstrings, n: isize) -> *const c_char { + if let Some(strings) = strings.as_mut() { + if let Some(key) = strings.prev(n) { + return key; + } + } + std::ptr::null() +} + +/// \memberof AMstrings +/// \brief Gets the size of the sequence of UTF-8 strings underlying an +/// iterator. +/// +/// \param[in] strings A pointer to an `AMstrings` struct. +/// \return The count of values in \p strings. +/// \pre \p strings must be a valid address. +/// \internal +/// +/// #Safety +/// strings must be a pointer to a valid AMstrings +#[no_mangle] +pub unsafe extern "C" fn AMstringsSize(strings: *const AMstrings) -> usize { + if let Some(strings) = strings.as_ref() { + strings.len() + } else { + 0 + } +} + +/// \memberof AMstrings +/// \brief Creates an iterator over the same sequence of UTF-8 strings as the +/// given one but with the opposite position and direction. +/// +/// \param[in] strings A pointer to an `AMstrings` struct. +/// \return An `AMstrings` struct. +/// \pre \p strings must be a valid address. +/// \internal +/// +/// #Safety +/// strings must be a pointer to a valid AMstrings +#[no_mangle] +pub unsafe extern "C" fn AMstringsReversed(strings: *const AMstrings) -> AMstrings { + if let Some(strings) = strings.as_ref() { + strings.reversed() + } else { + AMstrings::default() + } +} From 47c527740614c308885b78a858a33d300486f19d Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 20 Jun 2022 01:53:31 -0700 Subject: [PATCH 480/730] Added `AMkeys()`. Removed `AMobjSizeAt()`. Added an optional `AMchangeHashes` argument to `AMobjSize()`. Replaced the term "length" with "size" in the documentation. --- automerge-c/src/doc.rs | 99 +++++++++++++++++-------------- automerge-c/src/result.rs | 52 ++++++++++++---- automerge-c/test/actor_id_tests.c | 2 +- automerge-c/test/doc_tests.c | 86 +++++++++++++++++++++++++++ automerge-c/test/list_tests.c | 3 +- automerge-c/test/map_tests.c | 3 +- 6 files changed, 188 insertions(+), 57 deletions(-) diff --git a/automerge-c/src/doc.rs b/automerge-c/src/doc.rs index 617a142d..4cf386bb 100644 --- a/automerge-c/src/doc.rs +++ b/automerge-c/src/doc.rs @@ -4,7 +4,6 @@ use std::ops::{Deref, DerefMut}; use std::os::raw::c_char; use crate::actor_id::AMactorId; -use crate::change::AMchange; use crate::change_hashes::AMchangeHashes; use crate::obj::AMobjId; use crate::result::{to_result, AMresult}; @@ -295,7 +294,9 @@ pub unsafe extern "C" fn AMgetChangesAdded(doc1: *mut AMdoc, doc2: *mut AMdoc) - #[no_mangle] pub unsafe extern "C" fn AMgetHeads(doc: *mut AMdoc) -> *mut AMresult { let doc = to_doc!(doc); - to_result(Ok(doc.get_heads())) + to_result(Ok::, am::AutomergeError>( + doc.get_heads(), + )) } /// \memberof AMdoc @@ -313,6 +314,7 @@ pub unsafe extern "C" fn AMgetHeads(doc: *mut AMdoc) -> *mut AMresult { /// /// # Safety /// doc must be a pointer to a valid AMdoc +/// heads must be a pointer to a valid AMchangeHashes or NULL #[no_mangle] pub unsafe extern "C" fn AMgetMissingDeps( doc: *mut AMdoc, @@ -346,6 +348,37 @@ pub unsafe extern "C" fn AMgetLastLocalChange(doc: *mut AMdoc) -> *mut AMresult to_result(doc.get_last_local_change()) } +/// \memberof AMdoc +/// \brief Gets the current or historical keys of an object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \param[in] heads A pointer to an `AMchangeHashes` struct for historical +/// keys or `NULL` for current keys. +/// \return A pointer to an `AMresult` struct containing an `AMstrings` struct. +/// \pre \p doc must be a valid address. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMfree()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj_id must be a pointer to a valid AMobjId or NULL +/// heads must be a pointer to a valid AMchangeHashes or NULL +#[no_mangle] +pub unsafe extern "C" fn AMkeys( + doc: *mut AMdoc, + obj_id: *const AMobjId, + heads: *const AMchangeHashes, +) -> *mut AMresult { + let doc = to_doc!(doc); + let obj_id = to_obj_id!(obj_id); + match heads.as_ref() { + None => to_result(doc.keys(obj_id)), + Some(heads) => to_result(doc.keys_at(obj_id, heads.as_ref())), + } +} + /// \memberof AMdoc /// \brief Allocates storage for a document and initializes it with the compact /// form of an incremental save. @@ -355,13 +388,13 @@ pub unsafe extern "C" fn AMgetLastLocalChange(doc: *mut AMdoc) -> *mut AMresult /// \return A pointer to an `AMresult` struct containing a pointer to an /// `AMdoc` struct. /// \pre \p src must be a valid address. -/// \pre `0 <=` \p count `<=` length of \p src. +/// \pre `0 <=` \p count `<=` size of \p src. /// \warning To avoid a memory leak, the returned `AMresult` struct must be /// deallocated with `AMfree()`. /// \internal /// /// # Safety -/// src must be a byte array of length `>= count` +/// src must be a byte array of size `>= count` #[no_mangle] pub unsafe extern "C" fn AMload(src: *const u8, count: usize) -> *mut AMresult { let mut data = Vec::new(); @@ -379,14 +412,14 @@ pub unsafe extern "C" fn AMload(src: *const u8, count: usize) -> *mut AMresult { /// operations loaded from \p src. /// \pre \p doc must be a valid address. /// \pre \p src must be a valid address. -/// \pre `0 <=` \p count `<=` length of \p src. +/// \pre `0 <=` \p count `<=` size of \p src. /// \warning To avoid a memory leak, the returned `AMresult` struct must be /// deallocated with `AMfree()`. /// \internal /// /// # Safety /// doc must be a pointer to a valid AMdoc -/// src must be a byte array of length `>= count` +/// src must be a byte array of size `>= count` #[no_mangle] pub unsafe extern "C" fn AMloadIncremental( doc: *mut AMdoc, @@ -423,57 +456,37 @@ pub unsafe extern "C" fn AMmerge(dest: *mut AMdoc, src: *mut AMdoc) -> *mut AMre } /// \memberof AMdoc -/// \brief Gets the size of an object. +/// \brief Gets the current or historical size of an object. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \return The count of values in the object identified by \p obj_id. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \param[in] heads A pointer to an `AMchangeHashes` struct for historical +/// size or `NULL` for current size. +/// \return A 64-bit unsigned integer. /// \pre \p doc must be a valid address. /// \internal /// /// # Safety /// doc must be a pointer to a valid AMdoc /// obj_id must be a pointer to a valid AMobjId or NULL +/// heads must be a pointer to a valid AMchangeHashes or NULL #[no_mangle] -pub unsafe extern "C" fn AMobjSize(doc: *const AMdoc, obj_id: *const AMobjId) -> usize { +pub unsafe extern "C" fn AMobjSize( + doc: *const AMdoc, + obj_id: *const AMobjId, + heads: *const AMchangeHashes, +) -> usize { if let Some(doc) = doc.as_ref() { - doc.length(to_obj_id!(obj_id)) + let obj_id = to_obj_id!(obj_id); + match heads.as_ref() { + None => doc.length(obj_id), + Some(heads) => doc.length_at(obj_id, heads.as_ref()), + } } else { 0 } } -/// \memberof AMdoc -/// \brief Gets the historical size of an object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. -/// \param[in] change A pointer to an `AMchange` struct or `NULL`. -/// \return The count of values in the object identified by \p obj_id at -/// \p change. -/// \pre \p doc must be a valid address. -/// \internal -/// -/// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL -/// change must be a pointer to a valid AMchange or NULL -#[no_mangle] -pub unsafe extern "C" fn AMobjSizeAt( - doc: *const AMdoc, - obj_id: *const AMobjId, - change: *const AMchange, -) -> usize { - if let Some(doc) = doc.as_ref() { - if let Some(change) = change.as_ref() { - let change: &am::Change = change.as_ref(); - let change_hashes = vec![change.hash]; - return doc.length_at(to_obj_id!(obj_id), &change_hashes); - } - }; - 0 -} - /// \memberof AMdoc /// \brief Gets the number of pending operations added during a document's /// current transaction. @@ -596,7 +609,7 @@ pub unsafe extern "C" fn AMsaveIncremental(doc: *mut AMdoc) -> *mut AMresult { /// /// # Safety /// doc must be a pointer to a valid AMdoc -/// value must be a byte array of length `>= count` +/// actor_id must be a pointer to a valid AMactorId #[no_mangle] pub unsafe extern "C" fn AMsetActor(doc: *mut AMdoc, actor_id: *const AMactorId) -> *mut AMresult { let doc = to_doc!(doc); diff --git a/automerge-c/src/result.rs b/automerge-c/src/result.rs index 0e26105c..97873917 100644 --- a/automerge-c/src/result.rs +++ b/automerge-c/src/result.rs @@ -10,6 +10,7 @@ use crate::change_hashes::AMchangeHashes; use crate::changes::AMchanges; use crate::doc::AMdoc; use crate::obj::AMobjId; +use crate::strings::AMstrings; use crate::sync::{AMsyncMessage, AMsyncState}; /// \struct AMvalue @@ -51,6 +52,9 @@ use crate::sync::{AMsyncMessage, AMsyncState}; /// \var AMvalue::str /// A UTF-8 string. /// +/// \var AMvalue::strings +/// A sequence of UTF-8 strings as an `AMstrings` struct. +/// /// \var AMvalue::timestamp /// A Lamport timestamp. /// @@ -76,16 +80,14 @@ pub enum AMvalue<'a> { F64(f64), /// A 64-bit signed integer variant. Int(i64), - /* - /// A keys variant. - Keys(_), - */ /// A null variant. Null, /// An object identifier variant. ObjId(&'a AMobjId), /// A UTF-8 string variant. Str(*const libc::c_char), + /// A strings variant. + Strings(AMstrings), /// A Lamport timestamp variant. Timestamp(i64), /* @@ -108,12 +110,13 @@ pub enum AMresult { ActorId(AMactorId), ChangeHashes(Vec), Changes(Vec, BTreeMap), + Strings(Vec, BTreeMap), Doc(Box), Error(CString), ObjId(AMobjId), - Value(am::Value<'static>, Option), SyncMessage(AMsyncMessage), SyncState(AMsyncState), + Value(am::Value<'static>, Option), Void, } @@ -135,6 +138,20 @@ impl From for AMresult { } } +impl From> for AMresult { + fn from(keys: am::Keys<'_, '_>) -> Self { + let strings: Vec = keys.collect(); + AMresult::Strings(strings, BTreeMap::new()) + } +} + +impl From> for AMresult { + fn from(keys: am::KeysAt<'_, '_>) -> Self { + let strings: Vec = keys.collect(); + AMresult::Strings(strings, BTreeMap::new()) + } +} + impl From for AMresult { fn from(state: am::sync::State) -> Self { AMresult::SyncState(AMsyncState::new(state)) @@ -296,6 +313,15 @@ impl From, am::AutomergeError>> for AMresult { } } +impl From, am::InvalidChangeHashSlice>> for AMresult { + fn from(maybe: Result, am::InvalidChangeHashSlice>) -> Self { + match maybe { + Ok(change_hashes) => AMresult::ChangeHashes(change_hashes), + Err(e) => AMresult::err(&e.to_string()), + } + } +} + impl From, am::AutomergeError>> for AMresult { fn from(maybe: Result, am::AutomergeError>) -> Self { match maybe { @@ -401,6 +427,7 @@ pub unsafe extern "C" fn AMresultSize(result: *mut AMresult) -> usize { | AMresult::Value(_, _) => 1, AMresult::ChangeHashes(change_hashes) => change_hashes.len(), AMresult::Changes(changes, _) => changes.len(), + AMresult::Strings(strings, _) => strings.len(), } } else { 0 @@ -455,6 +482,15 @@ pub unsafe extern "C" fn AMresultValue<'a>(result: *mut AMresult) -> AMvalue<'a> AMresult::ObjId(obj_id) => { content = AMvalue::ObjId(obj_id); } + AMresult::Strings(strings, storage) => { + content = AMvalue::Strings(AMstrings::new(strings, storage)); + } + AMresult::SyncMessage(sync_message) => { + content = AMvalue::SyncMessage(sync_message); + } + AMresult::SyncState(sync_state) => { + content = AMvalue::SyncState(sync_state); + } AMresult::Value(value, hosted_str) => { match value { am::Value::Scalar(scalar) => match scalar.as_ref() { @@ -494,12 +530,6 @@ pub unsafe extern "C" fn AMresultValue<'a>(result: *mut AMresult) -> AMvalue<'a> am::Value::Object(_) => {} } } - AMresult::SyncMessage(sync_message) => { - content = AMvalue::SyncMessage(sync_message); - } - AMresult::SyncState(sync_state) => { - content = AMvalue::SyncState(sync_state); - } AMresult::Void => {} } }; diff --git a/automerge-c/test/actor_id_tests.c b/automerge-c/test/actor_id_tests.c index 1fa553c7..4a523aeb 100644 --- a/automerge-c/test/actor_id_tests.c +++ b/automerge-c/test/actor_id_tests.c @@ -36,7 +36,7 @@ static int group_teardown(void** state) { return 0; } -static void test_AMactorIdInit(void **state) { +static void test_AMactorIdInit() { AMresult* prior_result = NULL; AMbyteSpan prior_bytes; char const* prior_str = NULL; diff --git a/automerge-c/test/doc_tests.c b/automerge-c/test/doc_tests.c index f4a6b519..5b6f3ee7 100644 --- a/automerge-c/test/doc_tests.c +++ b/automerge-c/test/doc_tests.c @@ -8,6 +8,7 @@ #include /* local */ +#include "automerge.h" #include "group_state.h" #include "str_utils.h" @@ -37,6 +38,88 @@ static int teardown(void** state) { return 0; } +static void test_AMkeys_empty() { + AMresult* const doc_result = AMcreate(); + AMresult* const strings_result = AMkeys(AMresultValue(doc_result).doc, AM_ROOT, NULL); + if (AMresultStatus(strings_result) != AM_STATUS_OK) { + fail_msg("%s", AMerrorMessage(strings_result)); + } + assert_int_equal(AMresultSize(strings_result), 0); + AMvalue value = AMresultValue(strings_result); + assert_int_equal(value.tag, AM_VALUE_STRINGS); + assert_int_equal(AMstringsSize(&value.strings), 0); + AMstrings forward = value.strings; + assert_null(AMstringsNext(&forward, 1)); + AMstrings reverse = AMstringsReversed(&value.strings); + assert_null(AMstringsNext(&reverse, 1)); + AMfree(strings_result); + AMfree(doc_result); +} + +static void test_AMkeys_list() { + AMresult* const doc_result = AMcreate(); + AMdoc* const doc = AMresultValue(doc_result).doc; + AMfree(AMlistPutInt(doc, AM_ROOT, 0, true, 1)); + AMfree(AMlistPutInt(doc, AM_ROOT, 1, true, 2)); + AMfree(AMlistPutInt(doc, AM_ROOT, 2, true, 3)); + AMresult* const strings_result = AMkeys(doc, AM_ROOT, NULL); + if (AMresultStatus(strings_result) != AM_STATUS_OK) { + fail_msg("%s", AMerrorMessage(strings_result)); + } + assert_int_equal(AMresultSize(strings_result), 3); + AMvalue value = AMresultValue(strings_result); + assert_int_equal(value.tag, AM_VALUE_STRINGS); + AMstrings forward = value.strings; + assert_int_equal(AMstringsSize(&forward), 3); + char const* str = AMstringsNext(&forward, 1); + assert_ptr_equal(strstr(str, "1@"), str); + str = AMstringsNext(&forward, 1); + assert_ptr_equal(strstr(str, "2@"), str); + str = AMstringsNext(&forward, 1); + assert_ptr_equal(strstr(str, "3@"), str); + assert_null(AMstringsNext(&forward, 1)); + AMstrings reverse = AMstringsReversed(&value.strings); + assert_int_equal(AMstringsSize(&reverse), 3); + str = AMstringsNext(&reverse, 1); + assert_ptr_equal(strstr(str, "3@"), str); + str = AMstringsNext(&reverse, 1); + assert_ptr_equal(strstr(str, "2@"), str); + str = AMstringsNext(&reverse, 1); + assert_ptr_equal(strstr(str, "1@"), str); + assert_null(AMstringsNext(&reverse, 1)); + AMfree(strings_result); + AMfree(doc_result); +} + +static void test_AMkeys_map() { + AMresult* const doc_result = AMcreate(); + AMdoc* const doc = AMresultValue(doc_result).doc; + AMfree(AMmapPutInt(doc, AM_ROOT, "one", 1)); + AMfree(AMmapPutInt(doc, AM_ROOT, "two", 2)); + AMfree(AMmapPutInt(doc, AM_ROOT, "three", 3)); + AMresult* const strings_result = AMkeys(doc, AM_ROOT, NULL); + if (AMresultStatus(strings_result) != AM_STATUS_OK) { + fail_msg("%s", AMerrorMessage(strings_result)); + } + assert_int_equal(AMresultSize(strings_result), 3); + AMvalue value = AMresultValue(strings_result); + assert_int_equal(value.tag, AM_VALUE_STRINGS); + AMstrings forward = value.strings; + assert_int_equal(AMstringsSize(&forward), 3); + assert_string_equal(AMstringsNext(&forward, 1), "one"); + assert_string_equal(AMstringsNext(&forward, 1), "three"); + assert_string_equal(AMstringsNext(&forward, 1), "two"); + assert_null(AMstringsNext(&forward, 1)); + AMstrings reverse = AMstringsReversed(&value.strings); + assert_int_equal(AMstringsSize(&reverse), 3); + assert_string_equal(AMstringsNext(&reverse, 1), "two"); + assert_string_equal(AMstringsNext(&reverse, 1), "three"); + assert_string_equal(AMstringsNext(&reverse, 1), "one"); + assert_null(AMstringsNext(&reverse, 1)); + AMfree(strings_result); + AMfree(doc_result); +} + static void test_AMputActor_bytes(void **state) { TestState* test_state = *state; GroupState* group_state = test_state->group_state; @@ -94,6 +177,9 @@ static void test_AMputActor_hex(void **state) { int run_doc_tests(void) { const struct CMUnitTest tests[] = { + cmocka_unit_test(test_AMkeys_empty), + cmocka_unit_test(test_AMkeys_list), + cmocka_unit_test(test_AMkeys_map), cmocka_unit_test_setup_teardown(test_AMputActor_bytes, setup, teardown), cmocka_unit_test_setup_teardown(test_AMputActor_hex, setup, teardown), }; diff --git a/automerge-c/test/list_tests.c b/automerge-c/test/list_tests.c index 2f4a0e80..f6f5c3d7 100644 --- a/automerge-c/test/list_tests.c +++ b/automerge-c/test/list_tests.c @@ -10,6 +10,7 @@ #include /* local */ +#include "automerge.h" #include "group_state.h" #include "macro_utils.h" @@ -152,7 +153,7 @@ static void test_AMlistPutObject_ ## label ## _ ## mode(void **state) { \ AMvalue value = AMresultValue(res); \ assert_int_equal(value.tag, AM_VALUE_OBJ_ID); \ assert_non_null(value.obj_id); \ - assert_int_equal(AMobjSize(group_state->doc, value.obj_id), 0); \ + assert_int_equal(AMobjSize(group_state->doc, value.obj_id, NULL), 0); \ AMfree(res); \ } diff --git a/automerge-c/test/map_tests.c b/automerge-c/test/map_tests.c index 234a5523..c90b5d2b 100644 --- a/automerge-c/test/map_tests.c +++ b/automerge-c/test/map_tests.c @@ -10,6 +10,7 @@ #include /* local */ +#include "automerge.h" #include "group_state.h" #include "macro_utils.h" @@ -96,7 +97,7 @@ static void test_AMmapPutObject_ ## label(void **state) { \ AMvalue value = AMresultValue(res); \ assert_int_equal(value.tag, AM_VALUE_OBJ_ID); \ assert_non_null(value.obj_id); \ - assert_int_equal(AMobjSize(group_state->doc, value.obj_id), 0); \ + assert_int_equal(AMobjSize(group_state->doc, value.obj_id, NULL), 0); \ AMfree(res); \ } From 7bdf726ce17278e0a10ddd3c534834a3931a3d0c Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 20 Jun 2022 02:07:33 -0700 Subject: [PATCH 481/730] Sublimated memory management in the quickstart example. --- automerge-c/examples/quickstart.c | 182 +++++++++++++++--------------- 1 file changed, 90 insertions(+), 92 deletions(-) diff --git a/automerge-c/examples/quickstart.c b/automerge-c/examples/quickstart.c index 5b90fdcd..271e4727 100644 --- a/automerge-c/examples/quickstart.c +++ b/automerge-c/examples/quickstart.c @@ -3,121 +3,96 @@ #include -AMvalue test(AMresult*, AMvalueVariant const); +typedef struct StackNode ResultStack; + +AMvalue push(ResultStack**, AMresult*, AMvalueVariant const); + +size_t free_results(ResultStack*); /* * Based on https://automerge.github.io/docs/quickstart */ int main(int argc, char** argv) { - AMresult* const doc1_result = AMcreate(); - AMdoc* const doc1 = AMresultValue(doc1_result).doc; - if (doc1 == NULL) { - fprintf(stderr, "`AMcreate()` failure."); - exit(EXIT_FAILURE); - } - AMresult* const cards_result = AMmapPutObject(doc1, AM_ROOT, "cards", AM_OBJ_TYPE_LIST); - AMvalue value = test(cards_result, AM_VALUE_OBJ_ID); - AMobjId const* const cards = value.obj_id; - AMresult* const card1_result = AMlistPutObject(doc1, cards, 0, true, AM_OBJ_TYPE_MAP); - value = test(card1_result, AM_VALUE_OBJ_ID); - AMobjId const* const card1 = value.obj_id; - AMresult* result = AMmapPutStr(doc1, card1, "title", "Rewrite everything in Clojure"); - test(result, AM_VALUE_VOID); - AMfree(result); - result = AMmapPutBool(doc1, card1, "done", false); - test(result, AM_VALUE_VOID); - AMfree(result); - AMresult* const card2_result = AMlistPutObject(doc1, cards, 0, true, AM_OBJ_TYPE_MAP); - value = test(card2_result, AM_VALUE_OBJ_ID); - AMobjId const* const card2 = value.obj_id; - result = AMmapPutStr(doc1, card2, "title", "Rewrite everything in Haskell"); - test(result, AM_VALUE_VOID); - AMfree(result); - result = AMmapPutBool(doc1, card2, "done", false); - test(result, AM_VALUE_VOID); - AMfree(result); - AMfree(card2_result); - result = AMcommit(doc1, "Add card", NULL); - test(result, AM_VALUE_CHANGE_HASHES); - AMfree(result); + ResultStack* results = NULL; + AMdoc* const doc1 = push(&results, AMcreate(), AM_VALUE_DOC).doc; + AMobjId const* const + cards = push(&results, AMmapPutObject(doc1, AM_ROOT, "cards", AM_OBJ_TYPE_LIST), AM_VALUE_OBJ_ID).obj_id; + AMobjId const* const + card1 = push(&results, AMlistPutObject(doc1, cards, 0, true, AM_OBJ_TYPE_MAP), AM_VALUE_OBJ_ID).obj_id; + push(&results, AMmapPutStr(doc1, card1, "title", "Rewrite everything in Clojure"), AM_VALUE_VOID); + push(&results, AMmapPutBool(doc1, card1, "done", false), AM_VALUE_VOID); + AMobjId const* const + card2 = push(&results, AMlistPutObject(doc1, cards, 0, true, AM_OBJ_TYPE_MAP), AM_VALUE_OBJ_ID).obj_id; + push(&results, AMmapPutStr(doc1, card2, "title", "Rewrite everything in Haskell"), AM_VALUE_VOID); + push(&results, AMmapPutBool(doc1, card2, "done", false), AM_VALUE_VOID); + push(&results, AMcommit(doc1, "Add card", NULL), AM_VALUE_CHANGE_HASHES); - AMresult* doc2_result = AMcreate(); - AMdoc* doc2 = AMresultValue(doc2_result).doc; - if (doc2 == NULL) { - fprintf(stderr, "`AMcreate()` failure."); - AMfree(card1_result); - AMfree(cards_result); - AMfree(doc1_result); - exit(EXIT_FAILURE); - } - result = AMmerge(doc2, doc1); - test(result, AM_VALUE_CHANGE_HASHES); - AMfree(result); - AMfree(doc2_result); + AMdoc* doc2 = push(&results, AMcreate(), AM_VALUE_DOC).doc; + push(&results, AMmerge(doc2, doc1), AM_VALUE_CHANGE_HASHES); - AMresult* const save_result = AMsave(doc1); - value = test(save_result, AM_VALUE_BYTES); - AMbyteSpan binary = value.bytes; - doc2_result = AMload(binary.src, binary.count); - doc2 = AMresultValue(doc2_result).doc; - AMfree(save_result); - if (doc2 == NULL) { - fprintf(stderr, "`AMload()` failure."); - AMfree(card1_result); - AMfree(cards_result); - AMfree(doc1_result); - exit(EXIT_FAILURE); - } + AMbyteSpan const binary = push(&results, AMsave(doc1), AM_VALUE_BYTES).bytes; + doc2 = push(&results, AMload(binary.src, binary.count), AM_VALUE_DOC).doc; - result = AMmapPutBool(doc1, card1, "done", true); - test(result, AM_VALUE_VOID); - AMfree(result); - result = AMcommit(doc1, "Mark card as done", NULL); - test(result, AM_VALUE_CHANGE_HASHES); - AMfree(result); - AMfree(card1_result); + push(&results, AMmapPutBool(doc1, card1, "done", true), AM_VALUE_VOID); + push(&results, AMcommit(doc1, "Mark card as done", NULL), AM_VALUE_CHANGE_HASHES); - result = AMlistDelete(doc2, cards, 0); - test(result, AM_VALUE_VOID); - AMfree(result); - result = AMcommit(doc2, "Delete card", NULL); - test(result, AM_VALUE_CHANGE_HASHES); - AMfree(result); + push(&results, AMlistDelete(doc2, cards, 0), AM_VALUE_VOID); + push(&results, AMcommit(doc2, "Delete card", NULL), AM_VALUE_CHANGE_HASHES); - result = AMmerge(doc1, doc2); - test(result, AM_VALUE_CHANGE_HASHES); - AMfree(result); - AMfree(doc2_result); + push(&results, AMmerge(doc1, doc2), AM_VALUE_CHANGE_HASHES); - result = AMgetChanges(doc1, NULL); - value = test(result, AM_VALUE_CHANGES); + AMchanges changes = push(&results, AMgetChanges(doc1, NULL), AM_VALUE_CHANGES).changes; AMchange const* change = NULL; - while ((change = AMchangesNext(&value.changes, 1)) != NULL) { - size_t const size = AMobjSizeAt(doc1, cards, change); - printf("%s %ld\n", AMchangeMessage(change), size); + while ((change = AMchangesNext(&changes, 1)) != NULL) { + AMbyteSpan const change_hash = AMchangeHash(change); + AMchangeHashes const + heads = push(&results, AMchangeHashesInit(&change_hash, 1), AM_VALUE_CHANGE_HASHES).change_hashes; + printf("%s %ld\n", AMchangeMessage(change), AMobjSize(doc1, cards, &heads)); } - AMfree(result); - AMfree(cards_result); - AMfree(doc1_result); + free_results(results); } /** - * \brief Extracts a value with the given discriminant from the given result - * or writes a message to `stderr`, frees the given result and - * terminates the program. + * \brief A node in a singly-linked list of `AMresult` struct pointers. + */ +struct StackNode { + AMresult* result; + struct StackNode* next; +}; + +/** + * \brief Pushes the given result onto the given stack and then either gets a + * value with the given discriminant from the result or writes a message + * to `stderr`, frees all results in the stack and terminates the + * program. * + * \param[in] stack A pointer to a pointer to a `ResultStack` struct. .* \param[in] result A pointer to an `AMresult` struct. * \param[in] discriminant An `AMvalueVariant` enum tag. * \return An `AMvalue` struct. + * \pre \p stack must be a valid address. * \pre \p result must be a valid address. */ -AMvalue test(AMresult* result, AMvalueVariant const discriminant) { +AMvalue push(ResultStack** stack, AMresult* result, AMvalueVariant const discriminant) { static char prelude[64]; - if (result == NULL) { - fprintf(stderr, "NULL `AMresult` struct pointer."); + if (stack == NULL) { + fprintf(stderr, "Null `ResultStack` struct pointer pointer; previous " + "`AMresult` structs may have leaked!"); + AMfree(result); exit(EXIT_FAILURE); } + if (result == NULL) { + fprintf(stderr, "Null `AMresult` struct pointer."); + free_results(*stack); + exit(EXIT_FAILURE); + } + /* Push the result onto the stack. */ + struct StackNode* top = malloc(sizeof(struct StackNode)); + top->result = result; + top->next = *stack; + *stack = top; + AMstatus const status = AMresultStatus(result); if (status != AM_STATUS_OK) { switch (status) { @@ -126,7 +101,7 @@ AMvalue test(AMresult* result, AMvalueVariant const discriminant) { default: sprintf(prelude, "Unknown `AMstatus` tag %d", status); } fprintf(stderr, "%s; %s.", prelude, AMerrorMessage(result)); - AMfree(result); + free_results(*stack); exit(EXIT_FAILURE); } AMvalue const value = AMresultValue(result); @@ -139,19 +114,42 @@ AMvalue test(AMresult* result, AMvalueVariant const discriminant) { case AM_VALUE_CHANGE_HASHES: label = "AM_VALUE_CHANGE_HASHES"; break; case AM_VALUE_CHANGES: label = "AM_VALUE_CHANGES"; break; case AM_VALUE_COUNTER: label = "AM_VALUE_COUNTER"; break; + case AM_VALUE_DOC: label = "AM_VALUE_DOC"; break; case AM_VALUE_F64: label = "AM_VALUE_F64"; break; case AM_VALUE_INT: label = "AM_VALUE_INT"; break; - case AM_VALUE_VOID: label = "AM_VALUE_VOID"; break; case AM_VALUE_NULL: label = "AM_VALUE_NULL"; break; case AM_VALUE_OBJ_ID: label = "AM_VALUE_OBJ_ID"; break; case AM_VALUE_STR: label = "AM_VALUE_STR"; break; + case AM_VALUE_STRINGS: label = "AM_VALUE_STRINGS"; break; case AM_VALUE_TIMESTAMP: label = "AM_VALUE_TIMESTAMP"; break; case AM_VALUE_UINT: label = "AM_VALUE_UINT"; break; + case AM_VALUE_SYNC_MESSAGE: label = "AM_VALUE_SYNC_MESSAGE"; break; + case AM_VALUE_SYNC_STATE: label = "AM_VALUE_SYNC_STATE"; break; + case AM_VALUE_VOID: label = "AM_VALUE_VOID"; break; default: label = ""; } fprintf(stderr, "Unexpected `AMvalueVariant` tag `%s` (%d).", label, value.tag); - AMfree(result); + free_results(*stack); exit(EXIT_FAILURE); } return value; } + +/** + * \brief Frees a stack of `AMresult` structs. + * + * \param[in] stack A pointer to a `ResultStack` struct. + * \return The number of stack nodes freed. + * \pre \p stack must be a valid address. + */ +size_t free_results(ResultStack* stack) { + struct StackNode* prev = NULL; + size_t count = 0; + for (struct StackNode* node = stack; node; node = node->next, ++count) { + free(prev); + AMfree(node->result); + prev = node; + } + free(prev); + return count; +} From db0333fc5aa7b59c9a01a118ebb6bcbe3f7c926a Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 20 Jun 2022 02:16:33 -0700 Subject: [PATCH 482/730] Added `AM_ROOT` usage to the documentation. Renamed the `value` argument of `AM{list,map}PutBytes()` to `src` for consistency with standard `memcpy()`. --- automerge-c/src/doc/list.rs | 44 ++++++++++++++++++------------------- automerge-c/src/doc/map.rs | 40 ++++++++++++++++----------------- 2 files changed, 42 insertions(+), 42 deletions(-) diff --git a/automerge-c/src/doc/list.rs b/automerge-c/src/doc/list.rs index bbd999e3..ad3fe978 100644 --- a/automerge-c/src/doc/list.rs +++ b/automerge-c/src/doc/list.rs @@ -10,7 +10,7 @@ use crate::result::{to_result, AMresult}; /// \brief Deletes an index in a list object. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] index An index in the list object identified by \p obj_id. /// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc must be a valid address. @@ -36,7 +36,7 @@ pub unsafe extern "C" fn AMlistDelete( /// \brief Gets the value at an index in a list object. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] index An index within the list object identified by \p obj_id. /// \return A pointer to an `AMresult` struct. /// \pre \p doc must be a valid address. @@ -63,7 +63,7 @@ pub unsafe extern "C" fn AMlistGet( /// value. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] index An index in the list object identified by \p obj_id. /// \param[in] value A 64-bit signed integer. /// \return A pointer to an `AMresult` struct containing a void. @@ -91,7 +91,7 @@ pub unsafe extern "C" fn AMlistIncrement( /// \brief Puts a boolean as the value at an index in a list object. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] index An index in the list object identified by \p obj_id. /// \param[in] insert A flag to insert \p value before \p index instead of /// writing \p value over \p index. @@ -127,17 +127,17 @@ pub unsafe extern "C" fn AMlistPutBool( /// \brief Puts a sequence of bytes as the value at an index in a list object. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] index An index in the list object identified by \p obj_id. -/// \param[in] insert A flag to insert \p value before \p index instead of -/// writing \p value over \p index. -/// \param[in] value A pointer to an array of bytes. -/// \param[in] count The number of bytes to copy from \p value. +/// \param[in] insert A flag to insert \p src before \p index instead of +/// writing \p src over \p index. +/// \param[in] src A pointer to an array of bytes. +/// \param[in] count The number of bytes to copy from \p src. /// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc must be a valid address. /// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. -/// \pre \p value must be a valid address. -/// \pre `0 <=` \p count `<=` length of \p value. +/// \pre \p src must be a valid address. +/// \pre `0 <=` \p count `<=` size of \p src. /// \warning To avoid a memory leak, the returned `AMresult` struct must be /// deallocated with `AMfree()`. /// \internal @@ -145,20 +145,20 @@ pub unsafe extern "C" fn AMlistPutBool( /// # Safety /// doc must be a pointer to a valid AMdoc /// obj_id must be a pointer to a valid AMobjId or NULL -/// value must be a byte array of length `>= count` +/// src must be a byte array of size `>= count` #[no_mangle] pub unsafe extern "C" fn AMlistPutBytes( doc: *mut AMdoc, obj_id: *const AMobjId, index: usize, insert: bool, - value: *const u8, + src: *const u8, count: usize, ) -> *mut AMresult { let doc = to_doc!(doc); let obj_id = to_obj_id!(obj_id); let mut vec = Vec::new(); - vec.extend_from_slice(std::slice::from_raw_parts(value, count)); + vec.extend_from_slice(std::slice::from_raw_parts(src, count)); to_result(if insert { doc.insert(obj_id, index, vec) } else { @@ -170,7 +170,7 @@ pub unsafe extern "C" fn AMlistPutBytes( /// \brief Puts a CRDT counter as the value at an index in a list object. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] index An index in the list object identified by \p obj_id. /// \param[in] insert A flag to insert \p value before \p index instead of /// writing \p value over \p index. @@ -207,7 +207,7 @@ pub unsafe extern "C" fn AMlistPutCounter( /// \brief Puts a float as the value at an index in a list object. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] index An index in the list object identified by \p obj_id. /// \param[in] insert A flag to insert \p value before \p index instead of /// writing \p value over \p index. @@ -243,7 +243,7 @@ pub unsafe extern "C" fn AMlistPutF64( /// \brief Puts a signed integer as the value at an index in a list object. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] index An index in the list object identified by \p obj_id. /// \param[in] insert A flag to insert \p value before \p index instead of /// writing \p value over \p index. @@ -279,7 +279,7 @@ pub unsafe extern "C" fn AMlistPutInt( /// \brief Puts null as the value at an index in a list object. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] index An index in the list object identified by \p obj_id. /// \param[in] insert A flag to insert \p value before \p index instead of /// writing \p value over \p index. @@ -314,7 +314,7 @@ pub unsafe extern "C" fn AMlistPutNull( /// \brief Puts an empty object as the value at an index in a list object. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] index An index in the list object identified by \p obj_id. /// \param[in] insert A flag to insert \p value before \p index instead of /// writing \p value over \p index. @@ -351,7 +351,7 @@ pub unsafe extern "C" fn AMlistPutObject( /// \brief Puts a UTF-8 string as the value at an index in a list object. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] index An index in the list object identified by \p obj_id. /// \param[in] insert A flag to insert \p value before \p index instead of /// writing \p value over \p index. @@ -390,7 +390,7 @@ pub unsafe extern "C" fn AMlistPutStr( /// \brief Puts a Lamport timestamp as the value at an index in a list object. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] index An index in the list object identified by \p obj_id. /// \param[in] insert A flag to insert \p value before \p index instead of /// writing \p value over \p index. @@ -427,7 +427,7 @@ pub unsafe extern "C" fn AMlistPutTimestamp( /// \brief Puts an unsigned integer as the value at an index in a list object. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] index An index in the list object identified by \p obj_id. /// \param[in] insert A flag to insert \p value before \p index instead of /// writing \p value over \p index. diff --git a/automerge-c/src/doc/map.rs b/automerge-c/src/doc/map.rs index d7b32ce4..a040bc1c 100644 --- a/automerge-c/src/doc/map.rs +++ b/automerge-c/src/doc/map.rs @@ -11,7 +11,7 @@ use crate::result::{to_result, AMresult}; /// \brief Deletes a key in a map object. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc must be a valid address. @@ -38,7 +38,7 @@ pub unsafe extern "C" fn AMmapDelete( /// \brief Gets the value for a key in a map object. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \return A pointer to an `AMresult` struct. /// \pre \p doc must be a valid address. @@ -65,7 +65,7 @@ pub unsafe extern "C" fn AMmapGet( /// \brief Increments a counter for a key in a map object by the given value. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \param[in] value A 64-bit signed integer. /// \return A pointer to an `AMresult` struct containing a void. @@ -94,7 +94,7 @@ pub unsafe extern "C" fn AMmapIncrement( /// \brief Puts a boolean as the value of a key in a map object. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \param[in] value A boolean. /// \return A pointer to an `AMresult` struct containing a void. @@ -123,15 +123,15 @@ pub unsafe extern "C" fn AMmapPutBool( /// \brief Puts a sequence of bytes as the value of a key in a map object. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. -/// \param[in] value A pointer to an array of bytes. -/// \param[in] count The number of bytes to copy from \p value. +/// \param[in] src A pointer to an array of bytes. +/// \param[in] count The number of bytes to copy from \p src. /// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc must be a valid address. /// \pre \p key must be a valid address. -/// \pre \p value must be a valid address. -/// \pre `0 <=` \p count `<=` length of \p value. +/// \pre \p src must be a valid address. +/// \pre `0 <=` \p count `<=` size of \p src. /// \warning To avoid a memory leak, the returned `AMresult` struct must be /// deallocated with `AMfree()`. /// \internal @@ -140,18 +140,18 @@ pub unsafe extern "C" fn AMmapPutBool( /// doc must be a pointer to a valid AMdoc /// obj_id must be a pointer to a valid AMobjId or NULL /// key must be a c string of the map key to be used -/// value must be a byte array of length `>= count` +/// src must be a byte array of size `>= count` #[no_mangle] pub unsafe extern "C" fn AMmapPutBytes( doc: *mut AMdoc, obj_id: *const AMobjId, key: *const c_char, - value: *const u8, + src: *const u8, count: usize, ) -> *mut AMresult { let doc = to_doc!(doc); let mut vec = Vec::new(); - vec.extend_from_slice(std::slice::from_raw_parts(value, count)); + vec.extend_from_slice(std::slice::from_raw_parts(src, count)); to_result(doc.put(to_obj_id!(obj_id), to_str(key), vec)) } @@ -159,7 +159,7 @@ pub unsafe extern "C" fn AMmapPutBytes( /// \brief Puts a CRDT counter as the value of a key in a map object. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \param[in] value A 64-bit signed integer. /// \return A pointer to an `AMresult` struct containing a void. @@ -192,7 +192,7 @@ pub unsafe extern "C" fn AMmapPutCounter( /// \brief Puts null as the value of a key in a map object. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc must be a valid address. @@ -219,7 +219,7 @@ pub unsafe extern "C" fn AMmapPutNull( /// \brief Puts an empty object as the value of a key in a map object. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \param[in] obj_type An `AMobjIdType` enum tag. /// \return A pointer to an `AMresult` struct containing a pointer to an `AMobjId` struct. @@ -248,7 +248,7 @@ pub unsafe extern "C" fn AMmapPutObject( /// \brief Puts a float as the value of a key in a map object. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \param[in] value A 64-bit float. /// \return A pointer to an `AMresult` struct containing a void. @@ -277,7 +277,7 @@ pub unsafe extern "C" fn AMmapPutF64( /// \brief Puts a signed integer as the value of a key in a map object. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \param[in] value A 64-bit signed integer. /// \return A pointer to an `AMresult` struct containing a void. @@ -306,7 +306,7 @@ pub unsafe extern "C" fn AMmapPutInt( /// \brief Puts a UTF-8 string as the value of a key in a map object. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \param[in] value A UTF-8 string. /// \return A pointer to an `AMresult` struct containing a void. @@ -337,7 +337,7 @@ pub unsafe extern "C" fn AMmapPutStr( /// \brief Puts a Lamport timestamp as the value of a key in a map object. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \param[in] value A 64-bit signed integer. /// \return A pointer to an `AMresult` struct containing a void. @@ -370,7 +370,7 @@ pub unsafe extern "C" fn AMmapPutTimestamp( /// \brief Puts an unsigned integer as the value of a key in a map object. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \param[in] value A 64-bit unsigned integer. /// \return A pointer to an `AMresult` struct containing a void. From 770c064978dfd72d085914a7c81adc9bbcc3ed24 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 20 Jun 2022 13:45:32 -0700 Subject: [PATCH 483/730] Made cosmetic changes to the quickstart example. --- automerge-c/examples/quickstart.c | 49 +++++++++++++++---------------- 1 file changed, 24 insertions(+), 25 deletions(-) diff --git a/automerge-c/examples/quickstart.c b/automerge-c/examples/quickstart.c index 271e4727..8d5dd5be 100644 --- a/automerge-c/examples/quickstart.c +++ b/automerge-c/examples/quickstart.c @@ -61,10 +61,10 @@ struct StackNode { }; /** - * \brief Pushes the given result onto the given stack and then either gets a - * value with the given discriminant from the result or writes a message - * to `stderr`, frees all results in the stack and terminates the - * program. + * \brief Pushes the given result onto the given stack and then either gets the + * value matching the given discriminant from that result or, failing + * that, prints an error message to `stderr`, frees all results in that + * stack and aborts. * * \param[in] stack A pointer to a pointer to a `ResultStack` struct. .* \param[in] result A pointer to an `AMresult` struct. @@ -92,7 +92,6 @@ AMvalue push(ResultStack** stack, AMresult* result, AMvalueVariant const discrim top->result = result; top->next = *stack; *stack = top; - AMstatus const status = AMresultStatus(result); if (status != AM_STATUS_OK) { switch (status) { @@ -108,27 +107,27 @@ AMvalue push(ResultStack** stack, AMresult* result, AMvalueVariant const discrim if (value.tag != discriminant) { char const* label = NULL; switch (value.tag) { - case AM_VALUE_ACTOR_ID: label = "AM_VALUE_ACTOR_ID"; break; - case AM_VALUE_BOOLEAN: label = "AM_VALUE_BOOLEAN"; break; - case AM_VALUE_BYTES: label = "AM_VALUE_BYTES"; break; - case AM_VALUE_CHANGE_HASHES: label = "AM_VALUE_CHANGE_HASHES"; break; - case AM_VALUE_CHANGES: label = "AM_VALUE_CHANGES"; break; - case AM_VALUE_COUNTER: label = "AM_VALUE_COUNTER"; break; - case AM_VALUE_DOC: label = "AM_VALUE_DOC"; break; - case AM_VALUE_F64: label = "AM_VALUE_F64"; break; - case AM_VALUE_INT: label = "AM_VALUE_INT"; break; - case AM_VALUE_NULL: label = "AM_VALUE_NULL"; break; - case AM_VALUE_OBJ_ID: label = "AM_VALUE_OBJ_ID"; break; - case AM_VALUE_STR: label = "AM_VALUE_STR"; break; - case AM_VALUE_STRINGS: label = "AM_VALUE_STRINGS"; break; - case AM_VALUE_TIMESTAMP: label = "AM_VALUE_TIMESTAMP"; break; - case AM_VALUE_UINT: label = "AM_VALUE_UINT"; break; - case AM_VALUE_SYNC_MESSAGE: label = "AM_VALUE_SYNC_MESSAGE"; break; - case AM_VALUE_SYNC_STATE: label = "AM_VALUE_SYNC_STATE"; break; - case AM_VALUE_VOID: label = "AM_VALUE_VOID"; break; - default: label = ""; + case AM_VALUE_ACTOR_ID: label = "ACTOR_ID"; break; + case AM_VALUE_BOOLEAN: label = "BOOLEAN"; break; + case AM_VALUE_BYTES: label = "BYTES"; break; + case AM_VALUE_CHANGE_HASHES: label = "CHANGE_HASHES"; break; + case AM_VALUE_CHANGES: label = "CHANGES"; break; + case AM_VALUE_COUNTER: label = "COUNTER"; break; + case AM_VALUE_DOC: label = "DOC"; break; + case AM_VALUE_F64: label = "F64"; break; + case AM_VALUE_INT: label = "INT"; break; + case AM_VALUE_NULL: label = "NULL"; break; + case AM_VALUE_OBJ_ID: label = "OBJ_ID"; break; + case AM_VALUE_STR: label = "STR"; break; + case AM_VALUE_STRINGS: label = "STRINGS"; break; + case AM_VALUE_TIMESTAMP: label = "TIMESTAMP"; break; + case AM_VALUE_UINT: label = "UINT"; break; + case AM_VALUE_SYNC_MESSAGE: label = "SYNC_MESSAGE"; break; + case AM_VALUE_SYNC_STATE: label = "SYNC_STATE"; break; + case AM_VALUE_VOID: label = "VOID"; break; + default: label = "..."; } - fprintf(stderr, "Unexpected `AMvalueVariant` tag `%s` (%d).", label, value.tag); + fprintf(stderr, "Unexpected `AMvalueVariant` tag `AM_VALUE_%s` (%d).", label, value.tag); free_results(*stack); exit(EXIT_FAILURE); } From bf4988dccacf188abfa6d9d6b69888aea1f60522 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 20 Jun 2022 13:50:05 -0700 Subject: [PATCH 484/730] Fixed `AM{change_hashes,changes,haves,strings}Prev()`. --- automerge-c/src/change_hashes.rs | 23 +++++++++++++++-------- automerge-c/src/changes.rs | 23 +++++++++++++++-------- automerge-c/src/strings.rs | 23 +++++++++++++++-------- automerge-c/src/sync/haves.rs | 23 +++++++++++++++-------- automerge-c/test/doc_tests.c | 32 ++++++++++++++++++++++++++++++++ 5 files changed, 92 insertions(+), 32 deletions(-) diff --git a/automerge-c/src/change_hashes.rs b/automerge-c/src/change_hashes.rs index b4a71745..e8d5b1bf 100644 --- a/automerge-c/src/change_hashes.rs +++ b/automerge-c/src/change_hashes.rs @@ -30,11 +30,16 @@ impl Detail { } pub fn advance(&mut self, n: isize) { - if n != 0 && !self.is_stopped() { - let n = if self.offset < 0 { -n } else { n }; - let len = self.len as isize; - self.offset = std::cmp::max(-(len + 1), std::cmp::min(self.offset + n, len)); - }; + if n == 0 { + return; + } + let len = self.len as isize; + self.offset = if self.offset < 0 { + /* It's reversed. */ + std::cmp::max(-(len + 1), std::cmp::min(self.offset - n, -1)) + } else { + std::cmp::max(0, std::cmp::min(self.offset + n, len)) + } } pub fn get_index(&self) -> usize { @@ -47,7 +52,7 @@ impl Detail { } pub fn next(&mut self, n: isize) -> Option<&am::ChangeHash> { - if n == 0 || self.is_stopped() { + if self.is_stopped() { return None; } let slice: &[am::ChangeHash] = @@ -63,8 +68,10 @@ impl Detail { } pub fn prev(&mut self, n: isize) -> Option<&am::ChangeHash> { - self.advance(n); - if n == 0 || self.is_stopped() { + /* Check for rewinding. */ + let prior_offset = self.offset; + self.advance(-n); + if (self.offset == prior_offset) || self.is_stopped() { return None; } let slice: &[am::ChangeHash] = diff --git a/automerge-c/src/changes.rs b/automerge-c/src/changes.rs index ba82ed99..54244304 100644 --- a/automerge-c/src/changes.rs +++ b/automerge-c/src/changes.rs @@ -32,11 +32,16 @@ impl Detail { } pub fn advance(&mut self, n: isize) { - if n != 0 && !self.is_stopped() { - let n = if self.offset < 0 { -n } else { n }; - let len = self.len as isize; - self.offset = std::cmp::max(-(len + 1), std::cmp::min(self.offset + n, len)); - }; + if n == 0 { + return; + } + let len = self.len as isize; + self.offset = if self.offset < 0 { + /* It's reversed. */ + std::cmp::max(-(len + 1), std::cmp::min(self.offset - n, -1)) + } else { + std::cmp::max(0, std::cmp::min(self.offset + n, len)) + } } pub fn get_index(&self) -> usize { @@ -49,7 +54,7 @@ impl Detail { } pub fn next(&mut self, n: isize) -> Option<*const AMchange> { - if n == 0 || self.is_stopped() { + if self.is_stopped() { return None; } let slice: &mut [am::Change] = @@ -73,8 +78,10 @@ impl Detail { } pub fn prev(&mut self, n: isize) -> Option<*const AMchange> { - self.advance(n); - if n == 0 || self.is_stopped() { + /* Check for rewinding. */ + let prior_offset = self.offset; + self.advance(-n); + if (self.offset == prior_offset) || self.is_stopped() { return None; } let slice: &mut [am::Change] = diff --git a/automerge-c/src/strings.rs b/automerge-c/src/strings.rs index efb7b1bc..201c1310 100644 --- a/automerge-c/src/strings.rs +++ b/automerge-c/src/strings.rs @@ -31,11 +31,16 @@ impl Detail { } pub fn advance(&mut self, n: isize) { - if n != 0 && !self.is_stopped() { - let n = if self.offset < 0 { -n } else { n }; - let len = self.len as isize; - self.offset = std::cmp::max(-(len + 1), std::cmp::min(self.offset + n, len)); - }; + if n == 0 { + return; + } + let len = self.len as isize; + self.offset = if self.offset < 0 { + /* It's reversed. */ + std::cmp::max(-(len + 1), std::cmp::min(self.offset - n, -1)) + } else { + std::cmp::max(0, std::cmp::min(self.offset + n, len)) + } } pub fn get_index(&self) -> usize { @@ -48,7 +53,7 @@ impl Detail { } pub fn next(&mut self, n: isize) -> Option<*const c_char> { - if n == 0 || self.is_stopped() { + if self.is_stopped() { return None; } let slice: &[String] = @@ -72,8 +77,10 @@ impl Detail { } pub fn prev(&mut self, n: isize) -> Option<*const c_char> { - self.advance(n); - if n == 0 || self.is_stopped() { + /* Check for rewinding. */ + let prior_offset = self.offset; + self.advance(-n); + if (self.offset == prior_offset) || self.is_stopped() { return None; } let slice: &[String] = diff --git a/automerge-c/src/sync/haves.rs b/automerge-c/src/sync/haves.rs index c8296ca3..9d9b0f8b 100644 --- a/automerge-c/src/sync/haves.rs +++ b/automerge-c/src/sync/haves.rs @@ -36,11 +36,16 @@ impl Detail { } pub fn advance(&mut self, n: isize) { - if n != 0 && !self.is_stopped() { - let n = if self.offset < 0 { -n } else { n }; - let len = self.len as isize; - self.offset = std::cmp::max(-(len + 1), std::cmp::min(self.offset + n, len)); - }; + if n == 0 { + return; + } + let len = self.len as isize; + self.offset = if self.offset < 0 { + /* It's reversed. */ + std::cmp::max(-(len + 1), std::cmp::min(self.offset - n, -1)) + } else { + std::cmp::max(0, std::cmp::min(self.offset + n, len)) + } } pub fn get_index(&self) -> usize { @@ -53,7 +58,7 @@ impl Detail { } pub fn next(&mut self, n: isize) -> Option<*const AMsyncHave> { - if n == 0 || self.is_stopped() { + if self.is_stopped() { return None; } let slice: &[am::sync::Have] = @@ -77,8 +82,10 @@ impl Detail { } pub fn prev(&mut self, n: isize) -> Option<*const AMsyncHave> { - self.advance(n); - if n == 0 || self.is_stopped() { + /* Check for rewinding. */ + let prior_offset = self.offset; + self.advance(-n); + if (self.offset == prior_offset) || self.is_stopped() { return None; } let slice: &[am::sync::Have] = diff --git a/automerge-c/test/doc_tests.c b/automerge-c/test/doc_tests.c index 5b6f3ee7..3f341845 100644 --- a/automerge-c/test/doc_tests.c +++ b/automerge-c/test/doc_tests.c @@ -50,8 +50,10 @@ static void test_AMkeys_empty() { assert_int_equal(AMstringsSize(&value.strings), 0); AMstrings forward = value.strings; assert_null(AMstringsNext(&forward, 1)); + assert_null(AMstringsPrev(&forward, 1)); AMstrings reverse = AMstringsReversed(&value.strings); assert_null(AMstringsNext(&reverse, 1)); + assert_null(AMstringsPrev(&reverse, 1)); AMfree(strings_result); AMfree(doc_result); } @@ -71,6 +73,7 @@ static void test_AMkeys_list() { assert_int_equal(value.tag, AM_VALUE_STRINGS); AMstrings forward = value.strings; assert_int_equal(AMstringsSize(&forward), 3); + /* Forward iterator forward. */ char const* str = AMstringsNext(&forward, 1); assert_ptr_equal(strstr(str, "1@"), str); str = AMstringsNext(&forward, 1); @@ -78,15 +81,32 @@ static void test_AMkeys_list() { str = AMstringsNext(&forward, 1); assert_ptr_equal(strstr(str, "3@"), str); assert_null(AMstringsNext(&forward, 1)); + /* Forward iterator reverse. */ + str = AMstringsPrev(&forward, 1); + assert_ptr_equal(strstr(str, "3@"), str); + str = AMstringsPrev(&forward, 1); + assert_ptr_equal(strstr(str, "2@"), str); + str = AMstringsPrev(&forward, 1); + assert_ptr_equal(strstr(str, "1@"), str); + assert_null(AMstringsPrev(&forward, 1)); AMstrings reverse = AMstringsReversed(&value.strings); assert_int_equal(AMstringsSize(&reverse), 3); + /* Reverse iterator forward. */ str = AMstringsNext(&reverse, 1); assert_ptr_equal(strstr(str, "3@"), str); str = AMstringsNext(&reverse, 1); assert_ptr_equal(strstr(str, "2@"), str); str = AMstringsNext(&reverse, 1); assert_ptr_equal(strstr(str, "1@"), str); + /* Reverse iterator reverse. */ assert_null(AMstringsNext(&reverse, 1)); + str = AMstringsPrev(&reverse, 1); + assert_ptr_equal(strstr(str, "1@"), str); + str = AMstringsPrev(&reverse, 1); + assert_ptr_equal(strstr(str, "2@"), str); + str = AMstringsPrev(&reverse, 1); + assert_ptr_equal(strstr(str, "3@"), str); + assert_null(AMstringsPrev(&reverse, 1)); AMfree(strings_result); AMfree(doc_result); } @@ -106,16 +126,28 @@ static void test_AMkeys_map() { assert_int_equal(value.tag, AM_VALUE_STRINGS); AMstrings forward = value.strings; assert_int_equal(AMstringsSize(&forward), 3); + /* Forward iterator forward. */ assert_string_equal(AMstringsNext(&forward, 1), "one"); assert_string_equal(AMstringsNext(&forward, 1), "three"); assert_string_equal(AMstringsNext(&forward, 1), "two"); assert_null(AMstringsNext(&forward, 1)); + /* Forward iterator reverse. */ + assert_string_equal(AMstringsPrev(&forward, 1), "two"); + assert_string_equal(AMstringsPrev(&forward, 1), "three"); + assert_string_equal(AMstringsPrev(&forward, 1), "one"); + assert_null(AMstringsPrev(&forward, 1)); AMstrings reverse = AMstringsReversed(&value.strings); assert_int_equal(AMstringsSize(&reverse), 3); + /* Reverse iterator forward. */ assert_string_equal(AMstringsNext(&reverse, 1), "two"); assert_string_equal(AMstringsNext(&reverse, 1), "three"); assert_string_equal(AMstringsNext(&reverse, 1), "one"); assert_null(AMstringsNext(&reverse, 1)); + /* Reverse iterator reverse. */ + assert_string_equal(AMstringsPrev(&reverse, 1), "one"); + assert_string_equal(AMstringsPrev(&reverse, 1), "three"); + assert_string_equal(AMstringsPrev(&reverse, 1), "two"); + assert_null(AMstringsPrev(&reverse, 1)); AMfree(strings_result); AMfree(doc_result); } From 0cbacaebb6b01e6229f3cb488b33a404079bd6c6 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 20 Jun 2022 14:35:30 -0700 Subject: [PATCH 485/730] Simplified the `AMstrings` struct to directly reference `std::ffi::CString` values. Switched the `AMresult` struct to store a `Vec` instead of a `Vec`. --- automerge-c/src/result.rs | 16 +++++----- automerge-c/src/strings.rs | 65 +++++++++++--------------------------- 2 files changed, 27 insertions(+), 54 deletions(-) diff --git a/automerge-c/src/result.rs b/automerge-c/src/result.rs index 97873917..56bc85a7 100644 --- a/automerge-c/src/result.rs +++ b/automerge-c/src/result.rs @@ -110,7 +110,7 @@ pub enum AMresult { ActorId(AMactorId), ChangeHashes(Vec), Changes(Vec, BTreeMap), - Strings(Vec, BTreeMap), + Strings(Vec), Doc(Box), Error(CString), ObjId(AMobjId), @@ -140,15 +140,15 @@ impl From for AMresult { impl From> for AMresult { fn from(keys: am::Keys<'_, '_>) -> Self { - let strings: Vec = keys.collect(); - AMresult::Strings(strings, BTreeMap::new()) + let cstrings: Vec = keys.map(|s| CString::new(s).unwrap()).collect(); + AMresult::Strings(cstrings) } } impl From> for AMresult { fn from(keys: am::KeysAt<'_, '_>) -> Self { - let strings: Vec = keys.collect(); - AMresult::Strings(strings, BTreeMap::new()) + let cstrings: Vec = keys.map(|s| CString::new(s).unwrap()).collect(); + AMresult::Strings(cstrings) } } @@ -427,7 +427,7 @@ pub unsafe extern "C" fn AMresultSize(result: *mut AMresult) -> usize { | AMresult::Value(_, _) => 1, AMresult::ChangeHashes(change_hashes) => change_hashes.len(), AMresult::Changes(changes, _) => changes.len(), - AMresult::Strings(strings, _) => strings.len(), + AMresult::Strings(cstrings) => cstrings.len(), } } else { 0 @@ -482,8 +482,8 @@ pub unsafe extern "C" fn AMresultValue<'a>(result: *mut AMresult) -> AMvalue<'a> AMresult::ObjId(obj_id) => { content = AMvalue::ObjId(obj_id); } - AMresult::Strings(strings, storage) => { - content = AMvalue::Strings(AMstrings::new(strings, storage)); + AMresult::Strings(cstrings) => { + content = AMvalue::Strings(AMstrings::new(cstrings)); } AMresult::SyncMessage(sync_message) => { content = AMvalue::SyncMessage(sync_message); diff --git a/automerge-c/src/strings.rs b/automerge-c/src/strings.rs index 201c1310..afb8d9b5 100644 --- a/automerge-c/src/strings.rs +++ b/automerge-c/src/strings.rs @@ -1,5 +1,4 @@ use std::cmp::Ordering; -use std::collections::BTreeMap; use std::ffi::{c_void, CString}; use std::mem::size_of; use std::os::raw::c_char; @@ -9,7 +8,6 @@ struct Detail { len: usize, offset: isize, ptr: *const c_void, - storage: *mut c_void, } /// \note cbindgen won't propagate the value of a `std::mem::size_of()` call @@ -17,16 +15,14 @@ struct Detail { /// propagate the name of a constant initialized from it so if the /// constant's name is a symbolic representation of the value it can be /// converted into a number by post-processing the header it generated. -pub const USIZE_USIZE_USIZE_USIZE_: usize = size_of::(); +pub const USIZE_USIZE_USIZE_: usize = size_of::(); impl Detail { - fn new(strings: &[String], offset: isize, storage: &mut BTreeMap) -> Self { - let storage: *mut BTreeMap = storage; + fn new(cstrings: &[CString], offset: isize) -> Self { Self { - len: strings.len(), + len: cstrings.len(), offset, - ptr: strings.as_ptr() as *const c_void, - storage: storage as *mut c_void, + ptr: cstrings.as_ptr() as *const c_void, } } @@ -56,19 +52,11 @@ impl Detail { if self.is_stopped() { return None; } - let slice: &[String] = - unsafe { std::slice::from_raw_parts(self.ptr as *const String, self.len) }; - let storage = unsafe { &mut *(self.storage as *mut BTreeMap) }; - let index = self.get_index(); - let value = match storage.get_mut(&index) { - Some(value) => value, - None => { - storage.insert(index, CString::new(slice[index].as_str()).unwrap()); - storage.get_mut(&index).unwrap() - } - }; + let slice: &[CString] = + unsafe { std::slice::from_raw_parts(self.ptr as *const CString, self.len) }; + let value = slice[self.get_index()].as_ptr(); self.advance(n); - Some(value.as_ptr()) + Some(value) } pub fn is_stopped(&self) -> bool { @@ -83,20 +71,9 @@ impl Detail { if (self.offset == prior_offset) || self.is_stopped() { return None; } - let slice: &[String] = - unsafe { std::slice::from_raw_parts(self.ptr as *const String, self.len) }; - let storage = unsafe { &mut *(self.storage as *mut BTreeMap) }; - let index = self.get_index(); - Some( - match storage.get_mut(&index) { - Some(value) => value, - None => { - storage.insert(index, CString::new(slice[index].as_str()).unwrap()); - storage.get_mut(&index).unwrap() - } - } - .as_ptr(), - ) + let slice: &[CString] = + unsafe { std::slice::from_raw_parts(self.ptr as *const CString, self.len) }; + Some(slice[self.get_index()].as_ptr()) } pub fn reversed(&self) -> Self { @@ -104,20 +81,16 @@ impl Detail { len: self.len, offset: -(self.offset + 1), ptr: self.ptr, - storage: self.storage, } } } -impl From for [u8; USIZE_USIZE_USIZE_USIZE_] { +impl From for [u8; USIZE_USIZE_USIZE_] { fn from(detail: Detail) -> Self { unsafe { - std::slice::from_raw_parts( - (&detail as *const Detail) as *const u8, - USIZE_USIZE_USIZE_USIZE_, - ) - .try_into() - .unwrap() + std::slice::from_raw_parts((&detail as *const Detail) as *const u8, USIZE_USIZE_USIZE_) + .try_into() + .unwrap() } } } @@ -130,13 +103,13 @@ pub struct AMstrings { /// \warning Modifying \p detail will cause undefined behavior. /// \note The actual size of \p detail will vary by platform, this is just /// the one for the platform this documentation was built on. - detail: [u8; USIZE_USIZE_USIZE_USIZE_], + detail: [u8; USIZE_USIZE_USIZE_], } impl AMstrings { - pub fn new(strings: &[String], storage: &mut BTreeMap) -> Self { + pub fn new(cstrings: &[CString]) -> Self { Self { - detail: Detail::new(strings, 0, storage).into(), + detail: Detail::new(cstrings, 0).into(), } } @@ -178,7 +151,7 @@ impl AsRef<[String]> for AMstrings { impl Default for AMstrings { fn default() -> Self { Self { - detail: [0; USIZE_USIZE_USIZE_USIZE_], + detail: [0; USIZE_USIZE_USIZE_], } } } From eb462cb2289cfdc65f7a91996bbcfe8e07ebe8f3 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 20 Jun 2022 15:55:31 -0700 Subject: [PATCH 486/730] Made `free_results()` reset the stack pointer. --- automerge-c/examples/quickstart.c | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/automerge-c/examples/quickstart.c b/automerge-c/examples/quickstart.c index 8d5dd5be..24400079 100644 --- a/automerge-c/examples/quickstart.c +++ b/automerge-c/examples/quickstart.c @@ -7,7 +7,7 @@ typedef struct StackNode ResultStack; AMvalue push(ResultStack**, AMresult*, AMvalueVariant const); -size_t free_results(ResultStack*); +size_t free_results(ResultStack**); /* * Based on https://automerge.github.io/docs/quickstart @@ -49,7 +49,7 @@ int main(int argc, char** argv) { heads = push(&results, AMchangeHashesInit(&change_hash, 1), AM_VALUE_CHANGE_HASHES).change_hashes; printf("%s %ld\n", AMchangeMessage(change), AMobjSize(doc1, cards, &heads)); } - free_results(results); + free_results(&results); } /** @@ -66,12 +66,13 @@ struct StackNode { * that, prints an error message to `stderr`, frees all results in that * stack and aborts. * - * \param[in] stack A pointer to a pointer to a `ResultStack` struct. + * \param[in,out] stack A pointer to a pointer to a `ResultStack` struct. .* \param[in] result A pointer to an `AMresult` struct. * \param[in] discriminant An `AMvalueVariant` enum tag. * \return An `AMvalue` struct. * \pre \p stack must be a valid address. * \pre \p result must be a valid address. + * \post \p stack `== NULL`. */ AMvalue push(ResultStack** stack, AMresult* result, AMvalueVariant const discriminant) { static char prelude[64]; @@ -84,7 +85,7 @@ AMvalue push(ResultStack** stack, AMresult* result, AMvalueVariant const discrim } if (result == NULL) { fprintf(stderr, "Null `AMresult` struct pointer."); - free_results(*stack); + free_results(stack); exit(EXIT_FAILURE); } /* Push the result onto the stack. */ @@ -100,7 +101,7 @@ AMvalue push(ResultStack** stack, AMresult* result, AMvalueVariant const discrim default: sprintf(prelude, "Unknown `AMstatus` tag %d", status); } fprintf(stderr, "%s; %s.", prelude, AMerrorMessage(result)); - free_results(*stack); + free_results(stack); exit(EXIT_FAILURE); } AMvalue const value = AMresultValue(result); @@ -128,7 +129,7 @@ AMvalue push(ResultStack** stack, AMresult* result, AMvalueVariant const discrim default: label = "..."; } fprintf(stderr, "Unexpected `AMvalueVariant` tag `AM_VALUE_%s` (%d).", label, value.tag); - free_results(*stack); + free_results(stack); exit(EXIT_FAILURE); } return value; @@ -137,18 +138,20 @@ AMvalue push(ResultStack** stack, AMresult* result, AMvalueVariant const discrim /** * \brief Frees a stack of `AMresult` structs. * - * \param[in] stack A pointer to a `ResultStack` struct. + * \param[in,out] stack A pointer to a pointer to a `ResultStack` struct. * \return The number of stack nodes freed. * \pre \p stack must be a valid address. + * \post \p stack `== NULL`. */ -size_t free_results(ResultStack* stack) { +size_t free_results(ResultStack** stack) { struct StackNode* prev = NULL; size_t count = 0; - for (struct StackNode* node = stack; node; node = node->next, ++count) { + for (struct StackNode* node = *stack; node; node = node->next, ++count) { free(prev); AMfree(node->result); prev = node; } free(prev); + *stack = NULL; return count; } From aeb8db556ce5cebec6304f11dc0abb7544a47450 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 20 Jun 2022 23:11:03 -0700 Subject: [PATCH 487/730] Added "out" directions to the documentation for out function parameters. --- automerge-c/src/change.rs | 2 +- automerge-c/src/change_hashes.rs | 6 +++--- automerge-c/src/changes.rs | 6 +++--- automerge-c/src/doc/list.rs | 30 +++++++++++++++--------------- automerge-c/src/doc/map.rs | 30 +++++++++++++++--------------- automerge-c/src/strings.rs | 6 +++--- automerge-c/src/sync/haves.rs | 6 +++--- 7 files changed, 43 insertions(+), 43 deletions(-) diff --git a/automerge-c/src/change.rs b/automerge-c/src/change.rs index 389fa33c..a0bf59e3 100644 --- a/automerge-c/src/change.rs +++ b/automerge-c/src/change.rs @@ -76,7 +76,7 @@ pub unsafe extern "C" fn AMchangeActorId(change: *const AMchange) -> *mut AMresu /// \memberof AMchange /// \brief Compresses the raw bytes of a change. /// -/// \param[in] change A pointer to an `AMchange` struct. +/// \param[in,out] change A pointer to an `AMchange` struct. /// \pre \p change must be a valid address. /// \internal /// diff --git a/automerge-c/src/change_hashes.rs b/automerge-c/src/change_hashes.rs index e8d5b1bf..f7e01b26 100644 --- a/automerge-c/src/change_hashes.rs +++ b/automerge-c/src/change_hashes.rs @@ -164,7 +164,7 @@ impl Default for AMchangeHashes { /// \p |n| positions where the sign of \p n is relative to the /// iterator's direction. /// -/// \param[in] change_hashes A pointer to an `AMchangeHashes` struct. +/// \param[in,out] change_hashes A pointer to an `AMchangeHashes` struct. /// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum /// number of positions to advance. /// \pre \p change_hashes must be a valid address. @@ -256,7 +256,7 @@ pub unsafe extern "C" fn AMchangeHashesInit(src: *const AMbyteSpan, count: usize /// positions where the sign of \p n is relative to the iterator's /// direction. /// -/// \param[in] change_hashes A pointer to an `AMchangeHashes` struct. +/// \param[in,out] change_hashes A pointer to an `AMchangeHashes` struct. /// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum /// number of positions to advance. /// \return An `AMbyteSpan` struct with `.src == NULL` when \p change_hashes @@ -285,7 +285,7 @@ pub unsafe extern "C" fn AMchangeHashesNext( /// iterator's direction and then gets the change hash at its new /// position. /// -/// \param[in] change_hashes A pointer to an `AMchangeHashes` struct. +/// \param[in,out] change_hashes A pointer to an `AMchangeHashes` struct. /// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum /// number of positions to advance. /// \return An `AMbyteSpan` struct with `.src == NULL` when \p change_hashes is diff --git a/automerge-c/src/changes.rs b/automerge-c/src/changes.rs index 54244304..f8ada1fd 100644 --- a/automerge-c/src/changes.rs +++ b/automerge-c/src/changes.rs @@ -186,7 +186,7 @@ impl Default for AMchanges { /// positions where the sign of \p n is relative to the iterator's /// direction. /// -/// \param[in] changes A pointer to an `AMchanges` struct. +/// \param[in,out] changes A pointer to an `AMchanges` struct. /// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum /// number of positions to advance. /// \pre \p changes must be a valid address. @@ -231,7 +231,7 @@ pub unsafe extern "C" fn AMchangesEqual( /// sequence of changes and then advances it by at most \p |n| positions /// where the sign of \p n is relative to the iterator's direction. /// -/// \param[in] changes A pointer to an `AMchanges` struct. +/// \param[in,out] changes A pointer to an `AMchanges` struct. /// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum /// number of positions to advance. /// \return A pointer to an `AMchange` struct that's `NULL` when \p changes was @@ -256,7 +256,7 @@ pub unsafe extern "C" fn AMchangesNext(changes: *mut AMchanges, n: isize) -> *co /// positions where the sign of \p n is relative to the iterator's /// direction and then gets the change at its new position. /// -/// \param[in] changes A pointer to an `AMchanges` struct. +/// \param[in,out] changes A pointer to an `AMchanges` struct. /// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum /// number of positions to advance. /// \return A pointer to an `AMchange` struct that's `NULL` when \p changes is diff --git a/automerge-c/src/doc/list.rs b/automerge-c/src/doc/list.rs index ad3fe978..029a8b2e 100644 --- a/automerge-c/src/doc/list.rs +++ b/automerge-c/src/doc/list.rs @@ -2,14 +2,14 @@ use automerge as am; use automerge::transaction::Transactable; use std::os::raw::c_char; -use crate::doc::{to_doc, to_obj_id, to_str, AMdoc}; +use crate::doc::{to_doc, to_doc_const, to_obj_id, to_str, AMdoc}; use crate::obj::{AMobjId, AMobjType}; use crate::result::{to_result, AMresult}; /// \memberof AMdoc /// \brief Deletes an index in a list object. /// -/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] index An index in the list object identified by \p obj_id. /// \return A pointer to an `AMresult` struct containing a void. @@ -50,11 +50,11 @@ pub unsafe extern "C" fn AMlistDelete( /// obj_id must be a pointer to a valid AMobjId or NULL #[no_mangle] pub unsafe extern "C" fn AMlistGet( - doc: *mut AMdoc, + doc: *const AMdoc, obj_id: *const AMobjId, index: usize, ) -> *mut AMresult { - let doc = to_doc!(doc); + let doc = to_doc_const!(doc); to_result(doc.get(to_obj_id!(obj_id), index)) } @@ -62,7 +62,7 @@ pub unsafe extern "C" fn AMlistGet( /// \brief Increments a counter at an index in a list object by the given /// value. /// -/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] index An index in the list object identified by \p obj_id. /// \param[in] value A 64-bit signed integer. @@ -90,7 +90,7 @@ pub unsafe extern "C" fn AMlistIncrement( /// \memberof AMdoc /// \brief Puts a boolean as the value at an index in a list object. /// -/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] index An index in the list object identified by \p obj_id. /// \param[in] insert A flag to insert \p value before \p index instead of @@ -126,7 +126,7 @@ pub unsafe extern "C" fn AMlistPutBool( /// \memberof AMdoc /// \brief Puts a sequence of bytes as the value at an index in a list object. /// -/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] index An index in the list object identified by \p obj_id. /// \param[in] insert A flag to insert \p src before \p index instead of @@ -169,7 +169,7 @@ pub unsafe extern "C" fn AMlistPutBytes( /// \memberof AMdoc /// \brief Puts a CRDT counter as the value at an index in a list object. /// -/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] index An index in the list object identified by \p obj_id. /// \param[in] insert A flag to insert \p value before \p index instead of @@ -206,7 +206,7 @@ pub unsafe extern "C" fn AMlistPutCounter( /// \memberof AMdoc /// \brief Puts a float as the value at an index in a list object. /// -/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] index An index in the list object identified by \p obj_id. /// \param[in] insert A flag to insert \p value before \p index instead of @@ -242,7 +242,7 @@ pub unsafe extern "C" fn AMlistPutF64( /// \memberof AMdoc /// \brief Puts a signed integer as the value at an index in a list object. /// -/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] index An index in the list object identified by \p obj_id. /// \param[in] insert A flag to insert \p value before \p index instead of @@ -278,7 +278,7 @@ pub unsafe extern "C" fn AMlistPutInt( /// \memberof AMdoc /// \brief Puts null as the value at an index in a list object. /// -/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] index An index in the list object identified by \p obj_id. /// \param[in] insert A flag to insert \p value before \p index instead of @@ -313,7 +313,7 @@ pub unsafe extern "C" fn AMlistPutNull( /// \memberof AMdoc /// \brief Puts an empty object as the value at an index in a list object. /// -/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] index An index in the list object identified by \p obj_id. /// \param[in] insert A flag to insert \p value before \p index instead of @@ -350,7 +350,7 @@ pub unsafe extern "C" fn AMlistPutObject( /// \memberof AMdoc /// \brief Puts a UTF-8 string as the value at an index in a list object. /// -/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] index An index in the list object identified by \p obj_id. /// \param[in] insert A flag to insert \p value before \p index instead of @@ -389,7 +389,7 @@ pub unsafe extern "C" fn AMlistPutStr( /// \memberof AMdoc /// \brief Puts a Lamport timestamp as the value at an index in a list object. /// -/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] index An index in the list object identified by \p obj_id. /// \param[in] insert A flag to insert \p value before \p index instead of @@ -426,7 +426,7 @@ pub unsafe extern "C" fn AMlistPutTimestamp( /// \memberof AMdoc /// \brief Puts an unsigned integer as the value at an index in a list object. /// -/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] index An index in the list object identified by \p obj_id. /// \param[in] insert A flag to insert \p value before \p index instead of diff --git a/automerge-c/src/doc/map.rs b/automerge-c/src/doc/map.rs index a040bc1c..51941391 100644 --- a/automerge-c/src/doc/map.rs +++ b/automerge-c/src/doc/map.rs @@ -3,14 +3,14 @@ use automerge::transaction::Transactable; use std::os::raw::c_char; use crate::doc::utils::to_str; -use crate::doc::{to_doc, to_obj_id, AMdoc}; +use crate::doc::{to_doc, to_doc_const, to_obj_id, AMdoc}; use crate::obj::{AMobjId, AMobjType}; use crate::result::{to_result, AMresult}; /// \memberof AMdoc /// \brief Deletes a key in a map object. /// -/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \return A pointer to an `AMresult` struct containing a void. @@ -53,18 +53,18 @@ pub unsafe extern "C" fn AMmapDelete( /// key must be a c string of the map key to be used #[no_mangle] pub unsafe extern "C" fn AMmapGet( - doc: *mut AMdoc, + doc: *const AMdoc, obj_id: *const AMobjId, key: *const c_char, ) -> *mut AMresult { - let doc = to_doc!(doc); + let doc = to_doc_const!(doc); to_result(doc.get(to_obj_id!(obj_id), to_str(key))) } /// \memberof AMdoc /// \brief Increments a counter for a key in a map object by the given value. /// -/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \param[in] value A 64-bit signed integer. @@ -93,7 +93,7 @@ pub unsafe extern "C" fn AMmapIncrement( /// \memberof AMdoc /// \brief Puts a boolean as the value of a key in a map object. /// -/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \param[in] value A boolean. @@ -122,7 +122,7 @@ pub unsafe extern "C" fn AMmapPutBool( /// \memberof AMdoc /// \brief Puts a sequence of bytes as the value of a key in a map object. /// -/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \param[in] src A pointer to an array of bytes. @@ -158,7 +158,7 @@ pub unsafe extern "C" fn AMmapPutBytes( /// \memberof AMdoc /// \brief Puts a CRDT counter as the value of a key in a map object. /// -/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \param[in] value A 64-bit signed integer. @@ -191,7 +191,7 @@ pub unsafe extern "C" fn AMmapPutCounter( /// \memberof AMdoc /// \brief Puts null as the value of a key in a map object. /// -/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \return A pointer to an `AMresult` struct containing a void. @@ -218,7 +218,7 @@ pub unsafe extern "C" fn AMmapPutNull( /// \memberof AMdoc /// \brief Puts an empty object as the value of a key in a map object. /// -/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \param[in] obj_type An `AMobjIdType` enum tag. @@ -247,7 +247,7 @@ pub unsafe extern "C" fn AMmapPutObject( /// \memberof AMdoc /// \brief Puts a float as the value of a key in a map object. /// -/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \param[in] value A 64-bit float. @@ -276,7 +276,7 @@ pub unsafe extern "C" fn AMmapPutF64( /// \memberof AMdoc /// \brief Puts a signed integer as the value of a key in a map object. /// -/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \param[in] value A 64-bit signed integer. @@ -305,7 +305,7 @@ pub unsafe extern "C" fn AMmapPutInt( /// \memberof AMdoc /// \brief Puts a UTF-8 string as the value of a key in a map object. /// -/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \param[in] value A UTF-8 string. @@ -336,7 +336,7 @@ pub unsafe extern "C" fn AMmapPutStr( /// \memberof AMdoc /// \brief Puts a Lamport timestamp as the value of a key in a map object. /// -/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \param[in] value A 64-bit signed integer. @@ -369,7 +369,7 @@ pub unsafe extern "C" fn AMmapPutTimestamp( /// \memberof AMdoc /// \brief Puts an unsigned integer as the value of a key in a map object. /// -/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \param[in] value A 64-bit unsigned integer. diff --git a/automerge-c/src/strings.rs b/automerge-c/src/strings.rs index afb8d9b5..83202a24 100644 --- a/automerge-c/src/strings.rs +++ b/automerge-c/src/strings.rs @@ -161,7 +161,7 @@ impl Default for AMstrings { /// \p |n| positions where the sign of \p n is relative to the /// iterator's direction. /// -/// \param[in] strings A pointer to an `AMstrings` struct. +/// \param[in,out] strings A pointer to an `AMstrings` struct. /// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum /// number of positions to advance. /// \pre \p strings must be a valid address. @@ -214,7 +214,7 @@ pub unsafe extern "C" fn AMstringsCmp( /// sequence of UTF-8 strings and then advances it by at most \p |n| /// positions where the sign of \p n is relative to the iterator's direction. /// -/// \param[in] strings A pointer to an `AMstrings` struct. +/// \param[in,out] strings A pointer to an `AMstrings` struct. /// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum /// number of positions to advance. /// \return A UTF-8 string that's `NULL` when \p strings was previously @@ -239,7 +239,7 @@ pub unsafe extern "C" fn AMstringsNext(strings: *mut AMstrings, n: isize) -> *co /// \p |n| positions where the sign of \p n is relative to the /// iterator's direction and then gets the key at its new position. /// -/// \param[in] strings A pointer to an `AMstrings` struct. +/// \param[in,out] strings A pointer to an `AMstrings` struct. /// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum /// number of positions to advance. /// \return A UTF-8 string that's `NULL` when \p strings is presently advanced diff --git a/automerge-c/src/sync/haves.rs b/automerge-c/src/sync/haves.rs index 9d9b0f8b..4a1eb1d6 100644 --- a/automerge-c/src/sync/haves.rs +++ b/automerge-c/src/sync/haves.rs @@ -190,7 +190,7 @@ impl Default for AMsyncHaves { /// most \p |n| positions where the sign of \p n is relative to the /// iterator's direction. /// -/// \param[in] sync_haves A pointer to an `AMsyncHaves` struct. +/// \param[in,out] sync_haves A pointer to an `AMsyncHaves` struct. /// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum /// number of positions to advance. /// \pre \p sync_haves must be a valid address. @@ -236,7 +236,7 @@ pub unsafe extern "C" fn AMsyncHavesEqual( /// most \p |n| positions where the sign of \p n is relative to the /// iterator's direction. /// -/// \param[in] sync_haves A pointer to an `AMsyncHaves` struct. +/// \param[in,out] sync_haves A pointer to an `AMsyncHaves` struct. /// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum /// number of positions to advance. /// \return A pointer to an `AMsyncHave` struct that's `NULL` when @@ -266,7 +266,7 @@ pub unsafe extern "C" fn AMsyncHavesNext( /// iterator's direction and then gets the synchronization have at its /// new position. /// -/// \param[in] sync_haves A pointer to an `AMsyncHaves` struct. +/// \param[in,out] sync_haves A pointer to an `AMsyncHaves` struct. /// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum /// number of positions to advance. /// \return A pointer to an `AMsyncHave` struct that's `NULL` when From e5a8b67b1143e2bd93ccf17876cb896263e02eb1 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 20 Jun 2022 23:15:25 -0700 Subject: [PATCH 488/730] Added `AMspliceText()`. Added `AMtext()`. Replaced `*mut` function arguments with `*const` function arguments where possible. Added "out" directions to the documentation for out function parameters. --- automerge-c/src/doc.rs | 126 ++++++++++++++++++++++++++--------- automerge-c/src/doc/utils.rs | 12 ++++ automerge-c/src/result.rs | 36 ++++++---- automerge-c/test/doc_tests.c | 19 ++++++ 4 files changed, 150 insertions(+), 43 deletions(-) diff --git a/automerge-c/src/doc.rs b/automerge-c/src/doc.rs index 4cf386bb..1da314c9 100644 --- a/automerge-c/src/doc.rs +++ b/automerge-c/src/doc.rs @@ -15,7 +15,7 @@ mod utils; use crate::changes::AMchanges; use crate::doc::utils::to_str; -use crate::doc::utils::{to_actor_id, to_doc, to_obj_id}; +use crate::doc::utils::{to_actor_id, to_doc, to_doc_const, to_obj_id}; macro_rules! to_changes { ($handle:expr) => {{ @@ -71,7 +71,7 @@ impl DerefMut for AMdoc { /// \memberof AMdoc /// \brief Applies a sequence of changes to a document. /// -/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] changes A pointer to an `AMchanges` struct. /// \pre \p doc must be a valid address. /// \pre \p changes must be a valid address. @@ -109,7 +109,7 @@ pub extern "C" fn AMcreate() -> *mut AMresult { /// \brief Commits the current operations on a document with an optional /// message and/or time override as seconds since the epoch. /// -/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] message A UTF-8 string or `NULL`. /// \param[in] time A pointer to a `time_t` value or `NULL`. /// \return A pointer to an `AMresult` struct containing a change hash as an @@ -142,7 +142,7 @@ pub unsafe extern "C" fn AMcommit( /// \brief Allocates storage for a document and initializes it by duplicating /// the given document. /// -/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in,out] doc A pointer to an `AMdoc` struct. /// \return A pointer to an `AMresult` struct containing a pointer to an /// `AMdoc` struct. /// \pre \p doc must be a valid address. @@ -153,8 +153,8 @@ pub unsafe extern "C" fn AMcommit( /// # Safety /// doc must be a pointer to a valid AMdoc #[no_mangle] -pub unsafe extern "C" fn AMdup(doc: *mut AMdoc) -> *mut AMresult { - let doc = to_doc!(doc); +pub unsafe extern "C" fn AMdup(doc: *const AMdoc) -> *mut AMresult { + let doc = to_doc_const!(doc); to_result(doc.as_ref().clone()) } @@ -162,8 +162,8 @@ pub unsafe extern "C" fn AMdup(doc: *mut AMdoc) -> *mut AMresult { /// \brief Tests the equality of two documents after closing their respective /// transactions. /// -/// \param[in] doc1 An `AMdoc` struct. -/// \param[in] doc2 An `AMdoc` struct. +/// \param[in,out] doc1 An `AMdoc` struct. +/// \param[in,out] doc2 An `AMdoc` struct. /// \return `true` if \p doc1 `==` \p doc2 and `false` otherwise. /// \pre \p doc1 must be a valid address. /// \pre \p doc2 must be a valid address. @@ -184,8 +184,8 @@ pub unsafe extern "C" fn AMequal(doc1: *mut AMdoc, doc2: *mut AMdoc) -> bool { /// \brief Generates a synchronization message for a peer based upon the given /// synchronization state. /// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] sync_state A pointer to an `AMsyncState` struct. +/// \param[in,out] doc A pointer to an `AMdoc` struct. +/// \param[in,out] sync_state A pointer to an `AMsyncState` struct. /// \return A pointer to an `AMresult` struct containing either a pointer to an /// `AMsyncMessage` struct or a void. /// \pre \p doc must b e a valid address. @@ -221,8 +221,8 @@ pub unsafe extern "C" fn AMgenerateSyncMessage( /// # Safety /// doc must be a pointer to a valid AMdoc #[no_mangle] -pub unsafe extern "C" fn AMgetActor(doc: *mut AMdoc) -> *mut AMresult { - let doc = to_doc!(doc); +pub unsafe extern "C" fn AMgetActor(doc: *const AMdoc) -> *mut AMresult { + let doc = to_doc_const!(doc); to_result(Ok::( doc.get_actor().clone(), )) @@ -231,7 +231,7 @@ pub unsafe extern "C" fn AMgetActor(doc: *mut AMdoc) -> *mut AMresult { /// \memberof AMdoc /// \brief Gets the changes added to a document by their respective hashes. /// -/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] have_deps A pointer to an `AMchangeHashes` struct or `NULL`. /// \return A pointer to an `AMresult` struct containing an `AMchanges` struct. /// \pre \p doc must be a valid address. @@ -259,8 +259,8 @@ pub unsafe extern "C" fn AMgetChanges( /// \brief Gets the changes added to a second document that weren't added to /// a first document. /// -/// \param[in] doc1 An `AMdoc` struct. -/// \param[in] doc2 An `AMdoc` struct. +/// \param[in,out] doc1 An `AMdoc` struct. +/// \param[in,out] doc2 An `AMdoc` struct. /// \return A pointer to an `AMresult` struct containing an `AMchanges` struct. /// \pre \p doc1 must be a valid address. /// \pre \p doc2 must be a valid address. @@ -281,7 +281,7 @@ pub unsafe extern "C" fn AMgetChangesAdded(doc1: *mut AMdoc, doc2: *mut AMdoc) - /// \memberof AMdoc /// \brief Gets the current heads of a document. /// -/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in,out] doc A pointer to an `AMdoc` struct. /// \return A pointer to an `AMresult` struct containing an `AMchangeHashes` /// struct. /// \pre \p doc must be a valid address. @@ -303,7 +303,7 @@ pub unsafe extern "C" fn AMgetHeads(doc: *mut AMdoc) -> *mut AMresult { /// \brief Gets the hashes of the changes in a document that aren't transitive /// dependencies of the given hashes of changes. /// -/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] heads A pointer to an `AMchangeHashes` struct or `NULL`. /// \return A pointer to an `AMresult` struct containing an `AMchangeHashes` /// struct. @@ -332,7 +332,7 @@ pub unsafe extern "C" fn AMgetMissingDeps( /// \memberof AMdoc /// \brief Gets the last change made to a document. /// -/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in,out] doc A pointer to an `AMdoc` struct. /// \return A pointer to an `AMresult` struct containing either an `AMchange` /// struct or a void. /// \pre \p doc must be a valid address. @@ -367,11 +367,11 @@ pub unsafe extern "C" fn AMgetLastLocalChange(doc: *mut AMdoc) -> *mut AMresult /// heads must be a pointer to a valid AMchangeHashes or NULL #[no_mangle] pub unsafe extern "C" fn AMkeys( - doc: *mut AMdoc, + doc: *const AMdoc, obj_id: *const AMobjId, heads: *const AMchangeHashes, ) -> *mut AMresult { - let doc = to_doc!(doc); + let doc = to_doc_const!(doc); let obj_id = to_obj_id!(obj_id); match heads.as_ref() { None => to_result(doc.keys(obj_id)), @@ -405,7 +405,7 @@ pub unsafe extern "C" fn AMload(src: *const u8, count: usize) -> *mut AMresult { /// \memberof AMdoc /// \brief Loads the compact form of an incremental save into a document. /// -/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] src A pointer to an array of bytes. /// \param[in] count The number of bytes in \p src to load. /// \return A pointer to an `AMresult` struct containing the number of @@ -436,8 +436,8 @@ pub unsafe extern "C" fn AMloadIncremental( /// \brief Applies all of the changes in \p src which are not in \p dest to /// \p dest. /// -/// \param[in] dest A pointer to an `AMdoc` struct. -/// \param[in] src A pointer to an `AMdoc` struct. +/// \param[in,out] dest A pointer to an `AMdoc` struct. +/// \param[in,out] src A pointer to an `AMdoc` struct. /// \return A pointer to an `AMresult` struct containing an `AMchangeHashes` /// struct. /// \pre \p dest must be a valid address. @@ -499,8 +499,8 @@ pub unsafe extern "C" fn AMobjSize( /// # Safety /// doc must be a pointer to a valid AMdoc #[no_mangle] -pub unsafe extern "C" fn AMpendingOps(doc: *mut AMdoc) -> usize { - if let Some(doc) = doc.as_mut() { +pub unsafe extern "C" fn AMpendingOps(doc: *const AMdoc) -> usize { + if let Some(doc) = doc.as_ref() { doc.pending_ops() } else { 0 @@ -511,8 +511,8 @@ pub unsafe extern "C" fn AMpendingOps(doc: *mut AMdoc) -> usize { /// \brief Receives a synchronization message from a peer based upon a given /// synchronization state. /// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] sync_state A pointer to an `AMsyncState` struct. +/// \param[in,out] doc A pointer to an `AMdoc` struct. +/// \param[in,out] sync_state A pointer to an `AMsyncState` struct. /// \param[in] sync_message A pointer to an `AMsyncMessage` struct. /// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc must be a valid address. @@ -540,7 +540,7 @@ pub unsafe extern "C" fn AMreceiveSyncMessage( /// \brief Cancels the pending operations added during a document's current /// transaction and gets the number of cancellations. /// -/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in,out] doc A pointer to an `AMdoc` struct. /// \return The count of pending operations for \p doc that were cancelled. /// \pre \p doc must be a valid address. /// \internal @@ -559,7 +559,7 @@ pub unsafe extern "C" fn AMrollback(doc: *mut AMdoc) -> usize { /// \memberof AMdoc /// \brief Saves the entirety of a document into a compact form. /// -/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in,out] doc A pointer to an `AMdoc` struct. /// \return A pointer to an `AMresult` struct containing an array of bytes as /// an `AMbyteSpan` struct. /// \pre \p doc must be a valid address. @@ -579,7 +579,7 @@ pub unsafe extern "C" fn AMsave(doc: *mut AMdoc) -> *mut AMresult { /// \brief Saves the changes to a document since its last save into a compact /// form. /// -/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in,out] doc A pointer to an `AMdoc` struct. /// \return A pointer to an `AMresult` struct containing an array of bytes as /// an `AMbyteSpan` struct. /// \pre \p doc must be a valid address. @@ -598,7 +598,7 @@ pub unsafe extern "C" fn AMsaveIncremental(doc: *mut AMdoc) -> *mut AMresult { /// \memberof AMdoc /// \brief Puts the actor ID value of a document. /// -/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] actor_id A pointer to an `AMactorId` struct. /// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc must be a valid address. @@ -617,3 +617,67 @@ pub unsafe extern "C" fn AMsetActor(doc: *mut AMdoc, actor_id: *const AMactorId) doc.set_actor(actor_id.as_ref().clone()); to_result(Ok(())) } + +/// \memberof AMdoc +/// \brief Splices new characters into the identified text object at a given +/// index. +/// +/// \param[in,out] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \param[in] index An index in the text object identified by \p obj_id. +/// \param[in] del The number of characters to delete. +/// \param[in] text A UTF-8 string. +/// \return A pointer to an `AMresult` struct containing a void. +/// \pre \p doc must be a valid address. +/// \pre `0 <=` \p index `<=` length of the text object identified by \p obj_id. +/// \pre \p text must be a valid address. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMfree()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj_id must be a pointer to a valid AMobjId or NULL +/// text must be a null-terminated array of `c_char` +#[no_mangle] +pub unsafe extern "C" fn AMspliceText( + doc: *mut AMdoc, + obj_id: *const AMobjId, + index: usize, + del: usize, + text: *const c_char, +) -> *mut AMresult { + let doc = to_doc!(doc); + to_result(doc.splice_text(to_obj_id!(obj_id), index, del, &to_str(text))) +} + +/// \memberof AMdoc +/// \brief Gets the current or historical string represented by a text object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \param[in] heads A pointer to an `AMchangeHashes` struct for historical +/// keys or `NULL` for current keys. +/// \return A pointer to an `AMresult` struct containing a UTF-8 string. +/// \pre \p doc must be a valid address. +/// \warning To avoid a memory leak, the returned `AMresult` struct must be +/// deallocated with `AMfree()`. +/// \internal +/// +/// # Safety +/// doc must be a pointer to a valid AMdoc +/// obj_id must be a pointer to a valid AMobjId or NULL +/// heads must be a pointer to a valid AMchangeHashes or NULL +#[no_mangle] +pub unsafe extern "C" fn AMtext( + doc: *const AMdoc, + obj_id: *const AMobjId, + heads: *const AMchangeHashes, +) -> *mut AMresult { + let doc = to_doc_const!(doc); + let obj_id = to_obj_id!(obj_id); + match heads.as_ref() { + None => to_result(doc.text(obj_id)), + Some(heads) => to_result(doc.text_at(obj_id, heads.as_ref())), + } +} diff --git a/automerge-c/src/doc/utils.rs b/automerge-c/src/doc/utils.rs index bf3aaf98..84203a20 100644 --- a/automerge-c/src/doc/utils.rs +++ b/automerge-c/src/doc/utils.rs @@ -25,6 +25,18 @@ macro_rules! to_doc { pub(crate) use to_doc; +macro_rules! to_doc_const { + ($handle:expr) => {{ + let handle = $handle.as_ref(); + match handle { + Some(b) => b, + None => return AMresult::err("Invalid AMdoc pointer").into(), + } + }}; +} + +pub(crate) use to_doc_const; + macro_rules! to_obj_id { ($handle:expr) => {{ match $handle.as_ref() { diff --git a/automerge-c/src/result.rs b/automerge-c/src/result.rs index 56bc85a7..17820caa 100644 --- a/automerge-c/src/result.rs +++ b/automerge-c/src/result.rs @@ -110,6 +110,7 @@ pub enum AMresult { ActorId(AMactorId), ChangeHashes(Vec), Changes(Vec, BTreeMap), + String(CString), Strings(Vec), Doc(Box), Error(CString), @@ -273,6 +274,15 @@ impl From, am::ObjId)>, am::AutomergeError>> f } } +impl From> for AMresult { + fn from(maybe: Result) -> Self { + match maybe { + Ok(string) => AMresult::String(CString::new(string).unwrap()), + Err(e) => AMresult::err(&e.to_string()), + } + } +} + impl From> for AMresult { fn from(maybe: Result) -> Self { match maybe { @@ -380,8 +390,8 @@ pub enum AMstatus { /// # Safety /// result must be a pointer to a valid AMresult #[no_mangle] -pub unsafe extern "C" fn AMerrorMessage(result: *mut AMresult) -> *const c_char { - match result.as_mut() { +pub unsafe extern "C" fn AMerrorMessage(result: *const AMresult) -> *const c_char { + match result.as_ref() { Some(AMresult::Error(s)) => s.as_ptr(), _ => std::ptr::null::(), } @@ -390,7 +400,7 @@ pub unsafe extern "C" fn AMerrorMessage(result: *mut AMresult) -> *const c_char /// \memberof AMresult /// \brief Deallocates the storage for a result. /// -/// \param[in] result A pointer to an `AMresult` struct. +/// \param[in,out] result A pointer to an `AMresult` struct. /// \pre \p result must be a valid address. /// \internal /// @@ -415,13 +425,14 @@ pub unsafe extern "C" fn AMfree(result: *mut AMresult) { /// # Safety /// result must be a pointer to a valid AMresult #[no_mangle] -pub unsafe extern "C" fn AMresultSize(result: *mut AMresult) -> usize { - if let Some(result) = result.as_mut() { +pub unsafe extern "C" fn AMresultSize(result: *const AMresult) -> usize { + if let Some(result) = result.as_ref() { match result { AMresult::Error(_) | AMresult::Void => 0, AMresult::ActorId(_) | AMresult::Doc(_) | AMresult::ObjId(_) + | AMresult::String(_) | AMresult::SyncMessage(_) | AMresult::SyncState(_) | AMresult::Value(_, _) => 1, @@ -445,8 +456,8 @@ pub unsafe extern "C" fn AMresultSize(result: *mut AMresult) -> usize { /// # Safety /// result must be a pointer to a valid AMresult #[no_mangle] -pub unsafe extern "C" fn AMresultStatus(result: *mut AMresult) -> AMstatus { - match result.as_mut() { +pub unsafe extern "C" fn AMresultStatus(result: *const AMresult) -> AMstatus { + match result.as_ref() { Some(AMresult::Error(_)) => AMstatus::Error, None => AMstatus::InvalidResult, _ => AMstatus::Ok, @@ -456,7 +467,7 @@ pub unsafe extern "C" fn AMresultStatus(result: *mut AMresult) -> AMstatus { /// \memberof AMresult /// \brief Gets a result's value. /// -/// \param[in] result A pointer to an `AMresult` struct. +/// \param[in,out] result A pointer to an `AMresult` struct. /// \return An `AMvalue` struct. /// \pre \p result must be a valid address. /// \internal @@ -482,6 +493,7 @@ pub unsafe extern "C" fn AMresultValue<'a>(result: *mut AMresult) -> AMvalue<'a> AMresult::ObjId(obj_id) => { content = AMvalue::ObjId(obj_id); } + AMresult::String(cstring) => content = AMvalue::Str(cstring.as_ptr()), AMresult::Strings(cstrings) => { content = AMvalue::Strings(AMstrings::new(cstrings)); } @@ -491,7 +503,7 @@ pub unsafe extern "C" fn AMresultValue<'a>(result: *mut AMresult) -> AMvalue<'a> AMresult::SyncState(sync_state) => { content = AMvalue::SyncState(sync_state); } - AMresult::Value(value, hosted_str) => { + AMresult::Value(value, value_str) => { match value { am::Value::Scalar(scalar) => match scalar.as_ref() { am::ScalarValue::Boolean(flag) => { @@ -513,9 +525,9 @@ pub unsafe extern "C" fn AMresultValue<'a>(result: *mut AMresult) -> AMvalue<'a> content = AMvalue::Null; } am::ScalarValue::Str(smol_str) => { - *hosted_str = CString::new(smol_str.to_string()).ok(); - if let Some(c_str) = hosted_str { - content = AMvalue::Str(c_str.as_ptr()); + *value_str = CString::new(smol_str.to_string()).ok(); + if let Some(cstring) = value_str { + content = AMvalue::Str(cstring.as_ptr()); } } am::ScalarValue::Timestamp(timestamp) => { diff --git a/automerge-c/test/doc_tests.c b/automerge-c/test/doc_tests.c index 3f341845..996c98a8 100644 --- a/automerge-c/test/doc_tests.c +++ b/automerge-c/test/doc_tests.c @@ -207,6 +207,24 @@ static void test_AMputActor_hex(void **state) { AMfree(result); } +static void test_AMspliceText() { + AMresult* const doc_result = AMcreate(); + AMdoc* const doc = AMresultValue(doc_result).doc; + AMfree(AMspliceText(doc, AM_ROOT, 0, 0, "one + ")); + AMfree(AMspliceText(doc, AM_ROOT, 4, 2, "two = ")); + AMfree(AMspliceText(doc, AM_ROOT, 8, 2, "three")); + AMresult* const text_result = AMtext(doc, AM_ROOT, NULL); + if (AMresultStatus(text_result) != AM_STATUS_OK) { + fail_msg("%s", AMerrorMessage(text_result)); + } + assert_int_equal(AMresultSize(text_result), 1); + AMvalue value = AMresultValue(text_result); + assert_int_equal(value.tag, AM_VALUE_STR); + assert_string_equal(value.str, "one two three"); + AMfree(text_result); + AMfree(doc_result); +} + int run_doc_tests(void) { const struct CMUnitTest tests[] = { cmocka_unit_test(test_AMkeys_empty), @@ -214,6 +232,7 @@ int run_doc_tests(void) { cmocka_unit_test(test_AMkeys_map), cmocka_unit_test_setup_teardown(test_AMputActor_bytes, setup, teardown), cmocka_unit_test_setup_teardown(test_AMputActor_hex, setup, teardown), + cmocka_unit_test(test_AMspliceText), }; return cmocka_run_group_tests(tests, NULL, NULL); From d5ca0947c0e6c528419a057db92c8909d4a9eb16 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Tue, 21 Jun 2022 13:40:15 -0400 Subject: [PATCH 489/730] minor update on js wrapper --- automerge-js/package.json | 2 +- automerge-js/src/constants.ts | 17 ++++++++++++----- automerge-js/src/counter.ts | 2 ++ automerge-js/src/numbers.ts | 5 +++++ automerge-js/src/proxies.ts | 18 ++++++++++-------- automerge-js/src/text.ts | 2 ++ 6 files changed, 32 insertions(+), 14 deletions(-) diff --git a/automerge-js/package.json b/automerge-js/package.json index a87816e2..2f485322 100644 --- a/automerge-js/package.json +++ b/automerge-js/package.json @@ -4,7 +4,7 @@ "Orion Henry ", "Martin Kleppmann" ], - "version": "0.1.4", + "version": "0.1.5", "description": "Reimplementation of `automerge` on top of the automerge-wasm backend", "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-js", "repository": "github:automerge/automerge-rs", diff --git a/automerge-js/src/constants.ts b/automerge-js/src/constants.ts index 597bfa1c..aa414c8b 100644 --- a/automerge-js/src/constants.ts +++ b/automerge-js/src/constants.ts @@ -1,11 +1,17 @@ // Properties of the document root object //const OPTIONS = Symbol('_options') // object containing options passed to init() //const CACHE = Symbol('_cache') // map from objectId to immutable object -export const STATE = Symbol('_state') // object containing metadata about current state (e.g. sequence numbers) -export const HEADS = Symbol('_heads') // object containing metadata about current state (e.g. sequence numbers) -export const OBJECT_ID = Symbol('_objectId') // object containing metadata about current state (e.g. sequence numbers) -export const READ_ONLY = Symbol('_readOnly') // object containing metadata about current state (e.g. sequence numbers) -export const FROZEN = Symbol('_frozen') // object containing metadata about current state (e.g. sequence numbers) +export const STATE = Symbol.for('_am_state') // object containing metadata about current state (e.g. sequence numbers) +export const HEADS = Symbol.for('_am_heads') // object containing metadata about current state (e.g. sequence numbers) +export const OBJECT_ID = Symbol.for('_am_objectId') // object containing metadata about current state (e.g. sequence numbers) +export const READ_ONLY = Symbol.for('_am_readOnly') // object containing metadata about current state (e.g. sequence numbers) +export const FROZEN = Symbol.for('_am_frozen') // object containing metadata about current state (e.g. sequence numbers) + +export const UINT = Symbol.for('_am_uint') +export const INT = Symbol.for('_am_int') +export const F64 = Symbol.for('_am_f64') +export const COUNTER = Symbol.for('_am_counter') +export const TEXT = Symbol.for('_am_text') // Properties of all Automerge objects //const OBJECT_ID = Symbol('_objectId') // the object ID of the current object (string) @@ -13,3 +19,4 @@ export const FROZEN = Symbol('_frozen') // object containing metadata ab //const CHANGE = Symbol('_change') // the context object on proxy objects used in change callback //const ELEM_IDS = Symbol('_elemIds') // list containing the element ID of each list element + diff --git a/automerge-js/src/counter.ts b/automerge-js/src/counter.ts index 50c885d6..1a810e23 100644 --- a/automerge-js/src/counter.ts +++ b/automerge-js/src/counter.ts @@ -1,4 +1,5 @@ import { Automerge, ObjID, Prop } from "automerge-types" +import { COUNTER } from "./constants" /** * The most basic CRDT: an integer value that can be changed only by * incrementing and decrementing. Since addition of integers is commutative, @@ -9,6 +10,7 @@ export class Counter { constructor(value?: number) { this.value = value || 0 + Reflect.defineProperty(this, COUNTER, { value: true }) } /** diff --git a/automerge-js/src/numbers.ts b/automerge-js/src/numbers.ts index dbc26669..9d63bcc5 100644 --- a/automerge-js/src/numbers.ts +++ b/automerge-js/src/numbers.ts @@ -1,5 +1,7 @@ // Convience classes to allow users to stricly specify the number type they want +import { INT, UINT, F64 } from "./constants" + export class Int { value: number; @@ -8,6 +10,7 @@ export class Int { throw new RangeError(`Value ${value} cannot be a uint`) } this.value = value + Reflect.defineProperty(this, INT, { value: true }) Object.freeze(this) } } @@ -20,6 +23,7 @@ export class Uint { throw new RangeError(`Value ${value} cannot be a uint`) } this.value = value + Reflect.defineProperty(this, UINT, { value: true }) Object.freeze(this) } } @@ -32,6 +36,7 @@ export class Float64 { throw new RangeError(`Value ${value} cannot be a float64`) } this.value = value || 0.0 + Reflect.defineProperty(this, F64, { value: true }) Object.freeze(this) } } diff --git a/automerge-js/src/proxies.ts b/automerge-js/src/proxies.ts index e3dd015f..a890ab38 100644 --- a/automerge-js/src/proxies.ts +++ b/automerge-js/src/proxies.ts @@ -5,7 +5,7 @@ import { AutomergeValue, ScalarValue, MapValue, ListValue, TextValue } from "./t import { Int, Uint, Float64 } from "./numbers" import { Counter, getWriteableCounter } from "./counter" import { Text } from "./text" -import { STATE, HEADS, FROZEN, OBJECT_ID, READ_ONLY } from "./constants" +import { STATE, HEADS, FROZEN, OBJECT_ID, READ_ONLY, COUNTER, INT, UINT, F64, TEXT } from "./constants" function parseListIndex(key) { if (typeof key === 'string' && /^[0-9]+$/.test(key)) key = parseInt(key, 10) @@ -59,26 +59,28 @@ function import_value(value) { case 'object': if (value == null) { return [ null, "null"] - } else if (value instanceof Uint) { + } else if (value[UINT]) { return [ value.value, "uint" ] - } else if (value instanceof Int) { + } else if (value[INT]) { return [ value.value, "int" ] - } else if (value instanceof Float64) { + } else if (value[F64]) { return [ value.value, "f64" ] - } else if (value instanceof Counter) { + } else if (value[COUNTER]) { return [ value.value, "counter" ] + } else if (value[TEXT]) { + return [ value, "text" ] } else if (value instanceof Date) { return [ value.getTime(), "timestamp" ] } else if (value instanceof Uint8Array) { return [ value, "bytes" ] } else if (value instanceof Array) { return [ value, "list" ] - } else if (value instanceof Text) { - return [ value, "text" ] + } else if (Object.getPrototypeOf(value) === Object.getPrototypeOf({})) { + return [ value, "map" ] } else if (value[OBJECT_ID]) { throw new RangeError('Cannot create a reference to an existing document object') } else { - return [ value, "map" ] + throw new RangeError(`Cannot assign unknown object: ${value}`) } break; case 'boolean': diff --git a/automerge-js/src/text.ts b/automerge-js/src/text.ts index 7aa2cac4..5edf9714 100644 --- a/automerge-js/src/text.ts +++ b/automerge-js/src/text.ts @@ -1,4 +1,5 @@ import { Value } from "automerge-types" +import { TEXT } from "./constants" export class Text { elems: Value[] @@ -14,6 +15,7 @@ export class Text { } else { throw new TypeError(`Unsupported initial value for Text: ${text}`) } + Reflect.defineProperty(this, TEXT, { value: true }) } get length () : number { From fe4071316ddedfd84c7587406280113405f2b184 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 7 Jul 2022 09:24:57 +0100 Subject: [PATCH 490/730] Add docs workflow status badge to README --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 09cca71d..c6ff7557 100644 --- a/README.md +++ b/README.md @@ -5,6 +5,7 @@ [![homepage](https://img.shields.io/badge/homepage-published-informational)](https://automerge.org/) [![main docs](https://img.shields.io/badge/docs-main-informational)](https://automerge.org/automerge-rs/automerge/) [![ci](https://github.com/automerge/automerge-rs/actions/workflows/ci.yaml/badge.svg)](https://github.com/automerge/automerge-rs/actions/workflows/ci.yaml) +[![docs](https://github.com/automerge/automerge-rs/actions/workflows/docs.yaml/badge.svg)](https://github.com/automerge/automerge-rs/actions/workflows/docs.yaml) This is a rust implementation of the [Automerge](https://github.com/automerge/automerge) file format and network protocol. From c49ba5ea98053c70138b0763759632d8f8f0c2f6 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 7 Jul 2022 09:18:15 +0100 Subject: [PATCH 491/730] Fixup js edit-trace script and documentation bits --- Makefile | 7 +++++ README.md | 3 ++- automerge-js/README.md | 12 ++++----- edit-trace/Makefile | 10 ++++++-- edit-trace/README.md | 50 +++++++++++++++++------------------- edit-trace/automerge-js.js | 3 +++ edit-trace/automerge-wasm.js | 6 ----- 7 files changed, 48 insertions(+), 43 deletions(-) diff --git a/Makefile b/Makefile index 9f8db2d1..a1f3fd62 100644 --- a/Makefile +++ b/Makefile @@ -1,13 +1,20 @@ +.PHONY: rust rust: cd automerge && cargo test +.PHONY: wasm wasm: cd automerge-wasm && yarn cd automerge-wasm && yarn build cd automerge-wasm && yarn test cd automerge-wasm && yarn link +.PHONY: js js: wasm cd automerge-js && yarn cd automerge-js && yarn link "automerge-wasm" cd automerge-js && yarn test + +.PHONY: clean +clean: + git clean -x -d -f diff --git a/README.md b/README.md index c6ff7557..2e6a2bee 100644 --- a/README.md +++ b/README.md @@ -99,6 +99,7 @@ $ cmake -S .. -DCMAKE_BUILD_TYPE=Release -DBUILD_SHARED_LIBS=OFF ## building and testing $ cmake --build . --target test_automerge ``` + To add debugging symbols, replace `Release` with `Debug`. To build a shared library instead of a static one, replace `OFF` with `ON`. @@ -108,4 +109,4 @@ to list here. ## Benchmarking -The `edit-trace` folder has the main code for running the edit trace benchmarking. +The [`edit-trace`](edit-trace) folder has the main code for running the edit trace benchmarking. diff --git a/automerge-js/README.md b/automerge-js/README.md index 7b8da950..707c51bb 100644 --- a/automerge-js/README.md +++ b/automerge-js/README.md @@ -1,4 +1,3 @@ - ## Automerge JS This is a reimplementation of Automerge as a JavaScript wrapper around the "automerge-wasm". @@ -7,14 +6,14 @@ This package is in alpha and feedback in welcome. The primary differences between using this package and "automerge" are as follows: -1. The low level api needs to plugged in via the use function. The only current implementation of "automerge-wasm" but another could used in theory. +1. The low level api needs to plugged in via the use function. The only current implementation of "automerge-wasm" but another could used in theory. -```js -import * as Automerge from "automerge-js" -import * as wasm_api from "automerge-wasm" +```javascript +import * as Automerge from "automerge-js"; +import * as wasm_api from "automerge-wasm"; // browsers require an async wasm load - see automerge-wasm docs -Automerge.use(wasm_api) +Automerge.use(wasm_api); ``` 2. There is no front-end back-end split, and no patch format or patch observer. These concepts don't make sense with the wasm implementation. @@ -24,4 +23,3 @@ Automerge.use(wasm_api) 4. The 'Text' class is currently very slow and needs to be re-worked. Beyond this please refer to the Automerge [README](http://github.com/automerge/automerge/) for further information. - diff --git a/edit-trace/Makefile b/edit-trace/Makefile index 05001dbf..e0e77643 100644 --- a/edit-trace/Makefile +++ b/edit-trace/Makefile @@ -1,19 +1,25 @@ +.PHONY: rust rust: cargo run --release -build-wasm: ../automerge-wasm ../automerge +.PHONY: build-wasm +build-wasm: cd ../automerge-wasm && yarn cd ../automerge-wasm && yarn release +.PHONY: wasm wasm: build-wasm node automerge-wasm.js +.PHONY: build-js build-js: build-wasm cd ../automerge-js && yarn - cd ../automerge-js && yarn link "automerge-wasm" + cd ../automerge-js && yarn build +.PHONY: js js: build-js node automerge-js.js +.PHONY: baseline baseline: node baseline.js diff --git a/edit-trace/README.md b/edit-trace/README.md index 58c65fe8..aabe83dc 100644 --- a/edit-trace/README.md +++ b/edit-trace/README.md @@ -1,52 +1,48 @@ +# Edit trace benchmarks + Try the different editing traces on different automerge implementations -### Automerge Experiement - pure rust +## Automerge Experiement - pure rust -```code - # cargo --release run +```sh +make rust ``` -#### Benchmarks +### Benchmarks There are some criterion benchmarks in the `benches` folder which can be run with `cargo bench` or `cargo criterion`. For flamegraphing, `cargo flamegraph --bench main -- --bench "save" # or "load" or "replay" or nothing` can be useful. -### Automerge Experiement - wasm api +## Automerge Experiement - wasm api -```code - # node automerge-wasm.js +```sh +make wasm ``` -### Automerge Experiment - JS wrapper +## Automerge Experiment - JS wrapper -```code - # node automerge-js.js +```sh +make js ``` -### Automerge 1.0 pure javascript - new fast backend +## Automerge 1.0 pure javascript - new fast backend -This assume automerge has been checked out in a directory along side this repo +This assumes automerge has been checked out in a directory along side this repo -```code - # node automerge-1.0.js +```sh +node automerge-1.0.js ``` -### Automerge 1.0 with rust backend +## Automerge 1.0 with rust backend -This assume automerge has been checked out in a directory along side this repo +This assumes automerge has been checked out in a directory along side this repo -```code - # node automerge-rs.js +```sh +node automerge-rs.js ``` -### Automerge Experiment - JS wrapper +## Baseline Test. Javascript Array with no CRDT info -```code - # node automerge-js.js -``` - -### Baseline Test. Javascript Array with no CRDT info - -```code - # node baseline.js +```sh +make baseline ``` diff --git a/edit-trace/automerge-js.js b/edit-trace/automerge-js.js index 994c87c8..eae08634 100644 --- a/edit-trace/automerge-js.js +++ b/edit-trace/automerge-js.js @@ -1,6 +1,9 @@ // Apply the paper editing trace to an Automerge.Text object, one char at a time const { edits, finalText } = require('./editing-trace') const Automerge = require('../automerge-js') +const wasm_api = require('../automerge-wasm') + +Automerge.use(wasm_api) const start = new Date() let state = Automerge.from({text: new Automerge.Text()}) diff --git a/edit-trace/automerge-wasm.js b/edit-trace/automerge-wasm.js index cd153c2d..e0f1454d 100644 --- a/edit-trace/automerge-wasm.js +++ b/edit-trace/automerge-wasm.js @@ -1,9 +1,3 @@ - -// make sure to - -// # cd ../automerge-wasm -// # yarn release - const { edits, finalText } = require('./editing-trace') const Automerge = require('../automerge-wasm') From 7e8cbf510a92b68a53526cd20cc512a5527136c3 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 7 Jul 2022 09:40:18 +0100 Subject: [PATCH 492/730] Add links to projects --- README.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index c6ff7557..15d80343 100644 --- a/README.md +++ b/README.md @@ -15,10 +15,10 @@ If you are looking for the origional `automerge-rs` project that can be used as This project has 4 components: -1. _automerge_ - a rust implementation of the library. This project is the most mature and being used in a handful of small applications. -2. _automerge-wasm_ - a js/wasm interface to the underlying rust library. This api is generally mature and in use in a handful of projects as well. -3. _automerge-js_ - this is a javascript library using the wasm interface to export the same public api of the primary automerge project. Currently this project passes all of automerge's tests but has not been used in any real project or packaged as an NPM. Alpha testers welcome. -4. _automerge-c_ - this is a c library intended to be an ffi integration point for all other languages. It is currently a work in progress and not yet ready for any testing. +1. [_automerge_](automerge) - a rust implementation of the library. This project is the most mature and being used in a handful of small applications. +2. [_automerge-wasm_](automerge-wasm) - a js/wasm interface to the underlying rust library. This api is generally mature and in use in a handful of projects as well. +3. [_automerge-js_](automerge-js) - this is a javascript library using the wasm interface to export the same public api of the primary automerge project. Currently this project passes all of automerge's tests but has not been used in any real project or packaged as an NPM. Alpha testers welcome. +4. [_automerge-c_](automerge-c) - this is a c library intended to be an ffi integration point for all other languages. It is currently a work in progress and not yet ready for any testing. ## How? From 0a86a4d92cdd001aa589dbc643abaf533ed4ff03 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Tue, 12 Jul 2022 09:59:03 +0100 Subject: [PATCH 493/730] Don't build tests for docs The test `CMakeLists.txt` brings in cmocka but we don't actually need to build the tests to get the docs. This just makes the cmake docs script tell cmake not to build docs. --- scripts/ci/cmake-docs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/ci/cmake-docs b/scripts/ci/cmake-docs index 0ba3ea91..7f29a311 100755 --- a/scripts/ci/cmake-docs +++ b/scripts/ci/cmake-docs @@ -4,7 +4,7 @@ set -eoux pipefail mkdir -p automerge-c/build cd automerge-c/build -cmake -B . -S .. +cmake -B . -S .. -DBUILD_TESTING=OFF cmake --build . --target automerge_docs echo "Try opening automerge-c/build/src/html/index.html" From 246ed4afabbf5e5fb84ad2e95ec13136f3cc63cd Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Tue, 12 Jul 2022 10:09:07 +0100 Subject: [PATCH 494/730] Test building docs on PRs --- .github/workflows/docs.yaml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml index 1f682628..bdae857c 100644 --- a/.github/workflows/docs.yaml +++ b/.github/workflows/docs.yaml @@ -2,6 +2,9 @@ on: push: branches: - main + pull_request: + branches: + - main name: Documentation @@ -58,6 +61,7 @@ jobs: run: echo '' > docs/index.html - name: Deploy docs + if: github.event_name == 'push' && github.head_ref == 'refs/heads/main' uses: peaceiris/actions-gh-pages@v3 with: github_token: ${{ secrets.GITHUB_TOKEN }} From 6ea5982c16703f5049d04759328cee6dcf19926f Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Tue, 12 Jul 2022 10:36:20 +0100 Subject: [PATCH 495/730] Change parents to return result if objid is not an object There is easy confusion when calling parents with the id of a scalar, wanting it to get the parent object first but that is not implemented. To get the parent object of a scalar id would mean searching every object for the OpId which may get too expensive when lots of objects are around, this may be reconsidered later but the result would still be useful to indicate when the id doesn't exist in the document vs has no parents. --- automerge/src/autocommit.rs | 6 +- automerge/src/automerge.rs | 58 ++++++++++++------- automerge/src/automerge/tests.rs | 23 +++++--- automerge/src/error.rs | 2 + automerge/src/parents.rs | 10 ++-- .../src/transaction/manual_transaction.rs | 6 +- automerge/src/transaction/transactable.rs | 23 +++++--- 7 files changed, 77 insertions(+), 51 deletions(-) diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index 86601aa5..1233c1e0 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -499,11 +499,7 @@ impl Transactable for AutoCommit { self.doc.get_all_at(obj, prop, heads) } - fn parent_object>(&self, obj: O) -> Option<(ExId, Prop)> { - self.doc.parent_object(obj) - } - - fn parents(&self, obj: ExId) -> Parents<'_> { + fn parents>(&self, obj: O) -> Result, AutomergeError> { self.doc.parents(obj) } } diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 35552658..e1e8d787 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -224,34 +224,45 @@ impl Automerge { /// Get the object id of the object that contains this object and the prop that this object is /// at in that object. - pub fn parent_object>(&self, obj: O) -> Option<(ExId, Prop)> { - if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { - if obj == ObjId::root() { - // root has no parent - None - } else { - self.ops - .parent_object(&obj) - .map(|(id, key)| (self.id_to_exid(id.0), self.export_key(id, key))) - } - } else { + pub(crate) fn parent_object(&self, obj: ObjId) -> Option<(ObjId, Key)> { + if obj == ObjId::root() { + // root has no parent None + } else { + self.ops.parent_object(&obj) } } - /// Get an iterator over the parents of an object. - pub fn parents(&self, obj: ExId) -> Parents<'_> { - Parents { obj, doc: self } + /// Get the parents of an object in the document tree. + /// + /// ### Errors + /// + /// Returns an error when the id given is not the id of an object in this document. + /// This function does not get the parents of scalar values contained within objects. + /// + /// ### Experimental + /// + /// This function may in future be changed to allow getting the parents from the id of a scalar + /// value. + pub fn parents>(&self, obj: O) -> Result, AutomergeError> { + let obj_id = self.exid_to_obj(obj.as_ref())?; + Ok(Parents { + obj: obj_id, + doc: self, + }) } - pub fn path_to_object>(&self, obj: O) -> Vec<(ExId, Prop)> { - let mut path = self.parents(obj.as_ref().clone()).collect::>(); + pub fn path_to_object>( + &self, + obj: O, + ) -> Result, AutomergeError> { + let mut path = self.parents(obj.as_ref().clone())?.collect::>(); path.reverse(); - path + Ok(path) } /// Export a key to a prop. - fn export_key(&self, obj: ObjId, key: Key) -> Prop { + pub(crate) fn export_key(&self, obj: ObjId, key: Key) -> Prop { match key { Key::Map(m) => Prop::Map(self.ops.m.props.get(m).into()), Key::Seq(opid) => { @@ -420,8 +431,8 @@ impl Automerge { ExId::Id(ctr, actor, idx) => { // do a direct get here b/c this could be foriegn and not be within the array // bounds - if self.ops.m.actors.cache.get(*idx) == Some(actor) { - Ok(ObjId(OpId(*ctr, *idx))) + let obj = if self.ops.m.actors.cache.get(*idx) == Some(actor) { + ObjId(OpId(*ctr, *idx)) } else { // FIXME - make a real error let idx = self @@ -430,7 +441,12 @@ impl Automerge { .actors .lookup(actor) .ok_or(AutomergeError::Fail)?; - Ok(ObjId(OpId(*ctr, idx))) + ObjId(OpId(*ctr, idx)) + }; + if self.ops.object_type(&obj).is_some() { + Ok(obj) + } else { + Err(AutomergeError::NotAnObject) } } } diff --git a/automerge/src/automerge/tests.rs b/automerge/src/automerge/tests.rs index b3ad0ef8..c66f6959 100644 --- a/automerge/src/automerge/tests.rs +++ b/automerge/src/automerge/tests.rs @@ -1322,9 +1322,18 @@ fn get_parent_objects() { doc.insert(&list, 0, 2).unwrap(); let text = doc.put_object(&list, 0, ObjType::Text).unwrap(); - assert_eq!(doc.parent_object(&map), Some((ROOT, Prop::Map("a".into())))); - assert_eq!(doc.parent_object(&list), Some((map, Prop::Seq(0)))); - assert_eq!(doc.parent_object(&text), Some((list, Prop::Seq(0)))); + assert_eq!( + doc.parents(&map).unwrap().next(), + Some((ROOT, Prop::Map("a".into()))) + ); + assert_eq!( + doc.parents(&list).unwrap().next(), + Some((map, Prop::Seq(0))) + ); + assert_eq!( + doc.parents(&text).unwrap().next(), + Some((list, Prop::Seq(0))) + ); } #[test] @@ -1336,15 +1345,15 @@ fn get_path_to_object() { let text = doc.put_object(&list, 0, ObjType::Text).unwrap(); assert_eq!( - doc.path_to_object(&map), + doc.path_to_object(&map).unwrap(), vec![(ROOT, Prop::Map("a".into()))] ); assert_eq!( - doc.path_to_object(&list), + doc.path_to_object(&list).unwrap(), vec![(ROOT, Prop::Map("a".into())), (map.clone(), Prop::Seq(0)),] ); assert_eq!( - doc.path_to_object(&text), + doc.path_to_object(&text).unwrap(), vec![ (ROOT, Prop::Map("a".into())), (map, Prop::Seq(0)), @@ -1361,7 +1370,7 @@ fn parents_iterator() { doc.insert(&list, 0, 2).unwrap(); let text = doc.put_object(&list, 0, ObjType::Text).unwrap(); - let mut parents = doc.parents(text); + let mut parents = doc.parents(text).unwrap(); assert_eq!(parents.next(), Some((list, Prop::Seq(0)))); assert_eq!(parents.next(), Some((map, Prop::Seq(0)))); assert_eq!(parents.next(), Some((ROOT, Prop::Map("a".into())))); diff --git a/automerge/src/error.rs b/automerge/src/error.rs index db1c4884..9228b501 100644 --- a/automerge/src/error.rs +++ b/automerge/src/error.rs @@ -5,6 +5,8 @@ use thiserror::Error; #[derive(Error, Debug, PartialEq)] pub enum AutomergeError { + #[error("id was not an object id")] + NotAnObject, #[error("invalid obj id format `{0}`")] InvalidObjIdFormat(String), #[error("invalid obj id `{0}`")] diff --git a/automerge/src/parents.rs b/automerge/src/parents.rs index a6c891bd..76478b42 100644 --- a/automerge/src/parents.rs +++ b/automerge/src/parents.rs @@ -1,8 +1,8 @@ -use crate::{exid::ExId, Automerge, Prop}; +use crate::{exid::ExId, types::ObjId, Automerge, Prop}; #[derive(Debug)] pub struct Parents<'a> { - pub(crate) obj: ExId, + pub(crate) obj: ObjId, pub(crate) doc: &'a Automerge, } @@ -10,9 +10,9 @@ impl<'a> Iterator for Parents<'a> { type Item = (ExId, Prop); fn next(&mut self) -> Option { - if let Some((obj, prop)) = self.doc.parent_object(&self.obj) { - self.obj = obj.clone(); - Some((obj, prop)) + if let Some((obj, key)) = self.doc.parent_object(self.obj) { + self.obj = obj; + Some((self.doc.id_to_exid(obj.0), self.doc.export_key(obj, key))) } else { None } diff --git a/automerge/src/transaction/manual_transaction.rs b/automerge/src/transaction/manual_transaction.rs index 7be7932e..58c5ca88 100644 --- a/automerge/src/transaction/manual_transaction.rs +++ b/automerge/src/transaction/manual_transaction.rs @@ -287,11 +287,7 @@ impl<'a> Transactable for Transaction<'a> { self.doc.get_all_at(obj, prop, heads) } - fn parent_object>(&self, obj: O) -> Option<(ExId, Prop)> { - self.doc.parent_object(obj) - } - - fn parents(&self, obj: ExId) -> crate::Parents<'_> { + fn parents>(&self, obj: O) -> Result, AutomergeError> { self.doc.parents(obj) } } diff --git a/automerge/src/transaction/transactable.rs b/automerge/src/transaction/transactable.rs index 209da3c9..0c7f6c45 100644 --- a/automerge/src/transaction/transactable.rs +++ b/automerge/src/transaction/transactable.rs @@ -179,15 +179,22 @@ pub trait Transactable { heads: &[ChangeHash], ) -> Result, ExId)>, AutomergeError>; - /// Get the object id of the object that contains this object and the prop that this object is - /// at in that object. - fn parent_object>(&self, obj: O) -> Option<(ExId, Prop)>; + /// Get the parents of an object in the document tree. + /// + /// ### Errors + /// + /// Returns an error when the id given is not the id of an object in this document. + /// This function does not get the parents of scalar values contained within objects. + /// + /// ### Experimental + /// + /// This function may in future be changed to allow getting the parents from the id of a scalar + /// value. + fn parents>(&self, obj: O) -> Result, AutomergeError>; - fn parents(&self, obj: ExId) -> Parents<'_>; - - fn path_to_object>(&self, obj: O) -> Vec<(ExId, Prop)> { - let mut path = self.parents(obj.as_ref().clone()).collect::>(); + fn path_to_object>(&self, obj: O) -> Result, AutomergeError> { + let mut path = self.parents(obj.as_ref().clone())?.collect::>(); path.reverse(); - path + Ok(path) } } From be439892a48c6e003ddd9b55c2176d760795944f Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Tue, 12 Jul 2022 19:09:47 +0100 Subject: [PATCH 496/730] Clean up automerge dependencies --- automerge/Cargo.toml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/automerge/Cargo.toml b/automerge/Cargo.toml index 64283ca2..dd58e3b5 100644 --- a/automerge/Cargo.toml +++ b/automerge/Cargo.toml @@ -8,28 +8,28 @@ documentation = "https://automerge.org/automerge-rs/automerge/" rust-version = "1.57.0" [features] -optree-visualisation = ["dot"] -wasm = ["js-sys", "wasm-bindgen", "web-sys"] +optree-visualisation = ["dot", "rand"] +wasm = ["js-sys", "wasm-bindgen", "web-sys", "uuid/wasm-bindgen"] [dependencies] hex = "^0.4.3" leb128 = "^0.2.5" sha2 = "^0.10.0" -rand = { version = "^0.8.4" } thiserror = "^1.0.16" itertools = "^0.10.3" flate2 = "^1.0.22" -nonzero_ext = "^0.2.0" -uuid = { version = "^0.8.2", features=["v4", "wasm-bindgen", "serde"] } +uuid = { version = "^0.8.2", features=["v4", "serde"] } smol_str = { version = "^0.1.21", features=["serde"] } -tracing = { version = "^0.1.29", features = ["log"] } +tracing = { version = "^0.1.29" } fxhash = "^0.2.1" tinyvec = { version = "^1.5.1", features = ["alloc"] } serde = { version = "^1.0", features=["derive"] } + # optional deps dot = { version = "0.1.4", optional = true } js-sys = { version = "^0.3", optional = true } wasm-bindgen = { version = "^0.2", optional = true } +rand = { version = "^0.8.4", optional = true } [dependencies.web-sys] version = "^0.3.55" From f14a61e581020a79029fef513e0d567176776822 Mon Sep 17 00:00:00 2001 From: Adel Salakh Date: Mon, 11 Jul 2022 19:44:33 +0200 Subject: [PATCH 497/730] Sort successors in SuccEncoder Makes SuccEncoder sort successors in Lamport clock order. Such an ordering is expected by automerge js when loading documents, otherwise some documents fail to load with a "operation IDs are not in ascending order" error. --- automerge/src/columnar.rs | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/automerge/src/columnar.rs b/automerge/src/columnar.rs index 8744ee77..080d64e1 100644 --- a/automerge/src/columnar.rs +++ b/automerge/src/columnar.rs @@ -754,6 +754,16 @@ struct SuccEncoder { ctr: DeltaEncoder, } +fn succ_ord(left: &OpId, right: &OpId, actors: &[usize]) -> Ordering { + match (left, right) { + (OpId(0, _), OpId(0, _)) => Ordering::Equal, + (OpId(0, _), OpId(_, _)) => Ordering::Less, + (OpId(_, _), OpId(0, _)) => Ordering::Greater, + (OpId(a, x), OpId(b, y)) if a == b => actors[*x].cmp(&actors[*y]), + (OpId(a, _), OpId(b, _)) => a.cmp(b), + } +} + impl SuccEncoder { fn new() -> SuccEncoder { SuccEncoder { @@ -765,7 +775,9 @@ impl SuccEncoder { fn append(&mut self, succ: &[OpId], actors: &[usize]) { self.num.append_value(succ.len()); - for s in succ.iter() { + let mut sorted_succ = succ.to_vec(); + sorted_succ.sort_by(|left, right| succ_ord(left, right, actors)); + for s in sorted_succ.iter() { self.ctr.append_value(s.0); self.actor.append_value(actors[s.1]); } From 8c93d498b309defd7e08e935581be780fc1d2d04 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Wed, 13 Jul 2022 18:16:54 +0100 Subject: [PATCH 498/730] ci: Rename docs script to rust-docs and build cmake docs in CI --- .github/workflows/ci.yaml | 9 ++++++++- .github/workflows/docs.yaml | 4 ---- scripts/ci/run | 2 +- scripts/ci/{docs => rust-docs} | 0 4 files changed, 9 insertions(+), 6 deletions(-) rename scripts/ci/{docs => rust-docs} (100%) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 3039687d..358baee4 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -45,7 +45,14 @@ jobs: toolchain: 1.60.0 default: true - uses: Swatinem/rust-cache@v1 - - run: ./scripts/ci/docs + - name: Build rust docs + run: ./scripts/ci/rust-docs + shell: bash + - name: Install doxygen + run: sudo apt-get install -y doxygen + shell: bash + - name: Build C docs + run: ./scripts/ci/cmake-docs shell: bash cargo-deny: diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml index bdae857c..1f682628 100644 --- a/.github/workflows/docs.yaml +++ b/.github/workflows/docs.yaml @@ -2,9 +2,6 @@ on: push: branches: - main - pull_request: - branches: - - main name: Documentation @@ -61,7 +58,6 @@ jobs: run: echo '' > docs/index.html - name: Deploy docs - if: github.event_name == 'push' && github.head_ref == 'refs/heads/main' uses: peaceiris/actions-gh-pages@v3 with: github_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/scripts/ci/run b/scripts/ci/run index c4831fdc..423b995c 100755 --- a/scripts/ci/run +++ b/scripts/ci/run @@ -4,7 +4,7 @@ set -eou pipefail ./scripts/ci/fmt ./scripts/ci/lint ./scripts/ci/build-test -./scripts/ci/docs +./scripts/ci/rust-docs ./scripts/ci/advisory ./scripts/ci/wasm_tests ./scripts/ci/js_tests diff --git a/scripts/ci/docs b/scripts/ci/rust-docs similarity index 100% rename from scripts/ci/docs rename to scripts/ci/rust-docs From 359376b3db626fcfaa203fef57bddf45bc636457 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Thu, 14 Jul 2022 18:33:00 +0100 Subject: [PATCH 499/730] publish: Add description to automerge crate Came up as a warning in a dry-run publish. --- automerge/Cargo.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/automerge/Cargo.toml b/automerge/Cargo.toml index dd58e3b5..1dbd0833 100644 --- a/automerge/Cargo.toml +++ b/automerge/Cargo.toml @@ -6,6 +6,7 @@ license = "MIT" repository = "https://github.com/automerge/automerge-rs" documentation = "https://automerge.org/automerge-rs/automerge/" rust-version = "1.57.0" +description = "A JSON-like data structure (a CRDT) that can be modified concurrently by different users, and merged again automatically" [features] optree-visualisation = ["dot", "rand"] From d71a734e496ee3835f4cb8e8da59d0b61a5ad73c Mon Sep 17 00:00:00 2001 From: Alex Good Date: Thu, 14 Jul 2022 11:42:20 +0100 Subject: [PATCH 500/730] Add OpIds to enforce ordering of Op::succ and Op::pred The ordering of opids in the successor and predecessors of an op is relevant when encoding because inconsistent ordering changes the hashgraph. This means we must maintain the invariant that opids are encoded in ascending lamport order. We have been maintaining this invariant in the encoding implementation - however, this is not ideal because it requires allocating for every op in the change when we commit a transaction. Add `types::OpIds` and use it in place of `Vec` for `Op::succ` and `Op::pred`. `OpIds` maintains the invariant that the IDs it contains must be ordered with respect to some comparator function - which is always `OpSetMetadata::lamport_cmp`. Remove the sorting of opids in SuccEncoder::append. --- automerge/src/automerge.rs | 8 +- automerge/src/columnar.rs | 17 ++-- automerge/src/op_set.rs | 48 +++++++++-- automerge/src/op_tree.rs | 4 +- automerge/src/op_tree/iter.rs | 4 +- automerge/src/query.rs | 6 +- automerge/src/transaction/inner.rs | 10 +-- automerge/src/types.rs | 11 ++- automerge/src/types/opids.rs | 126 +++++++++++++++++++++++++++++ 9 files changed, 197 insertions(+), 37 deletions(-) create mode 100644 automerge/src/types/opids.rs diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index e1e8d787..c167178b 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -710,11 +710,7 @@ impl Automerge { legacy::ObjectId::Root => ObjId::root(), legacy::ObjectId::Id(id) => ObjId(OpId(id.0, self.ops.m.actors.cache(id.1))), }; - let pred = c - .pred - .iter() - .map(|i| OpId(i.0, self.ops.m.actors.cache(i.1.clone()))) - .collect(); + let pred = self.ops.m.import_opids(c.pred); let key = match &c.key { legacy::Key::Map(n) => Key::Map(self.ops.m.props.cache(n.to_string())), legacy::Key::Seq(legacy::ElementId::Head) => Key::Seq(types::HEAD), @@ -1048,7 +1044,7 @@ impl Automerge { OpType::Delete => format!("del{}", 0), }; let pred: Vec<_> = op.pred.iter().map(|id| self.to_string(*id)).collect(); - let succ: Vec<_> = op.succ.iter().map(|id| self.to_string(*id)).collect(); + let succ: Vec<_> = op.succ.into_iter().map(|id| self.to_string(*id)).collect(); log!( " {:12} {:12} {:12} {:12} {:12?} {:12?}", id, diff --git a/automerge/src/columnar.rs b/automerge/src/columnar.rs index 080d64e1..25748a25 100644 --- a/automerge/src/columnar.rs +++ b/automerge/src/columnar.rs @@ -773,11 +773,18 @@ impl SuccEncoder { } } - fn append(&mut self, succ: &[OpId], actors: &[usize]) { - self.num.append_value(succ.len()); - let mut sorted_succ = succ.to_vec(); - sorted_succ.sort_by(|left, right| succ_ord(left, right, actors)); - for s in sorted_succ.iter() { + fn append< + 'a, + I: IntoIterator, + II: ExactSizeIterator + Iterator, + >( + &mut self, + succ: I, + actors: &[usize], + ) { + let iter = succ.into_iter(); + self.num.append_value(iter.len()); + for s in iter { self.ctr.append_value(s.0); self.actor.append_value(actors[s.1]); } diff --git a/automerge/src/op_set.rs b/automerge/src/op_set.rs index e1fe7501..e29f0630 100644 --- a/automerge/src/op_set.rs +++ b/automerge/src/op_set.rs @@ -3,7 +3,7 @@ use crate::exid::ExId; use crate::indexed_cache::IndexedCache; use crate::op_tree::{self, OpTree}; use crate::query::{self, OpIdSearch, TreeQuery}; -use crate::types::{self, ActorId, Key, ObjId, Op, OpId, OpType}; +use crate::types::{self, ActorId, Key, ObjId, Op, OpId, OpIds, OpType}; use crate::{ObjType, OpObserver}; use fxhash::FxBuildHasher; use std::cmp::Ordering; @@ -138,13 +138,29 @@ impl OpSetInternal { pub(crate) fn replace(&mut self, obj: &ObjId, index: usize, f: F) where - F: FnMut(&mut Op), + F: Fn(&mut Op), { if let Some(tree) = self.trees.get_mut(obj) { tree.internal.update(index, f) } } + /// Add `op` as a successor to each op at `op_indices` in `obj` + pub(crate) fn add_succ>( + &mut self, + obj: &ObjId, + op_indices: I, + op: &Op, + ) { + if let Some(tree) = self.trees.get_mut(obj) { + for i in op_indices { + tree.internal.update(i, |old_op| { + old_op.add_succ(op, |left, right| self.m.lamport_cmp(*left, *right)) + }); + } + } + } + pub(crate) fn remove(&mut self, obj: &ObjId, index: usize) -> Op { // this happens on rollback - be sure to go back to the old state let tree = self.trees.get_mut(obj).unwrap(); @@ -185,9 +201,7 @@ impl OpSetInternal { let succ = q.succ; let pos = q.pos; - for i in succ { - self.replace(obj, i, |old_op| old_op.add_succ(&op)); - } + self.add_succ(obj, succ.iter().copied(), &op); if !op.is_delete() { self.insert(pos, obj, op.clone()); @@ -255,9 +269,7 @@ impl OpSetInternal { } } - for i in succ { - self.replace(obj, i, |old_op| old_op.add_succ(&op)); - } + self.add_succ(obj, succ.iter().copied(), &op); if !op.is_delete() { self.insert(pos, obj, op.clone()); @@ -346,4 +358,24 @@ impl OpSetMetadata { (OpId(a, _), OpId(b, _)) => a.cmp(&b), } } + + pub(crate) fn sorted_opids>(&self, opids: I) -> OpIds { + OpIds::new(opids, |left, right| self.lamport_cmp(*left, *right)) + } + + pub(crate) fn import_opids>( + &mut self, + external_opids: I, + ) -> OpIds { + let iter = external_opids.into_iter(); + let mut result = Vec::with_capacity(iter.size_hint().1.unwrap_or(0)); + for opid in iter { + let crate::legacy::OpId(counter, actor) = opid; + let actor_idx = self.actors.cache(actor); + result.push(OpId(counter, actor_idx)); + } + OpIds::new(result.into_iter(), |left, right| { + self.lamport_cmp(*left, *right) + }) + } } diff --git a/automerge/src/op_tree.rs b/automerge/src/op_tree.rs index c338c145..1363dae3 100644 --- a/automerge/src/op_tree.rs +++ b/automerge/src/op_tree.rs @@ -762,8 +762,8 @@ mod tests { id: zero, action: amp::OpType::Put(0.into()), key: zero.into(), - succ: vec![], - pred: vec![], + succ: Default::default(), + pred: Default::default(), insert: false, } } diff --git a/automerge/src/op_tree/iter.rs b/automerge/src/op_tree/iter.rs index 8a24a0a6..8d070f11 100644 --- a/automerge/src/op_tree/iter.rs +++ b/automerge/src/op_tree/iter.rs @@ -260,8 +260,8 @@ mod tests { action: OpType::Put(ScalarValue::Uint(counter)), id: OpId(counter, 0), key: Key::Map(0), - succ: Vec::new(), - pred: Vec::new(), + succ: Default::default(), + pred: Default::default(), insert: false, } } diff --git a/automerge/src/query.rs b/automerge/src/query.rs index e3d2f372..f09ed0c1 100644 --- a/automerge/src/query.rs +++ b/automerge/src/query.rs @@ -209,11 +209,11 @@ impl VisWindow { CounterData { pos, val: start, - succ: op.succ.iter().cloned().collect(), + succ: op.succ.into_iter().cloned().collect(), op: op.clone(), }, ); - if !op.succ.iter().any(|i| clock.covers(i)) { + if !op.succ.into_iter().any(|i| clock.covers(i)) { visible = true; } } @@ -231,7 +231,7 @@ impl VisWindow { } } _ => { - if !op.succ.iter().any(|i| clock.covers(i)) { + if !op.succ.into_iter().any(|i| clock.covers(i)) { visible = true; } } diff --git a/automerge/src/transaction/inner.rs b/automerge/src/transaction/inner.rs index 6969e317..86936492 100644 --- a/automerge/src/transaction/inner.rs +++ b/automerge/src/transaction/inner.rs @@ -162,11 +162,7 @@ impl TransactionInner { obj: ObjId, succ_pos: &[usize], ) { - for succ in succ_pos { - doc.ops.replace(&obj, *succ, |old_op| { - old_op.add_succ(&op); - }); - } + doc.ops.add_succ(&obj, succ_pos.iter().copied(), &op); if !op.is_delete() { doc.ops.insert(pos, &obj, op.clone()); @@ -272,7 +268,7 @@ impl TransactionInner { return Err(AutomergeError::MissingCounter); } - let pred = query.ops.iter().map(|op| op.id).collect(); + let pred = doc.ops.m.sorted_opids(query.ops.iter().map(|o| o.id)); let op = Op { id, @@ -300,7 +296,7 @@ impl TransactionInner { let query = doc.ops.search(&obj, query::Nth::new(index)); let id = self.next_id(); - let pred = query.ops.iter().map(|op| op.id).collect(); + let pred = doc.ops.m.sorted_opids(query.ops.iter().map(|o| o.id)); let key = query.key()?; if query.ops.len() == 1 && query.ops[0].is_noop(&action) { diff --git a/automerge/src/types.rs b/automerge/src/types.rs index 1c67afe2..141205d0 100644 --- a/automerge/src/types.rs +++ b/automerge/src/types.rs @@ -8,6 +8,9 @@ use std::fmt::Display; use std::str::FromStr; use tinyvec::{ArrayVec, TinyVec}; +mod opids; +pub(crate) use opids::OpIds; + pub(crate) use crate::clock::Clock; pub(crate) use crate::value::{Counter, ScalarValue, Value}; @@ -379,14 +382,14 @@ pub(crate) struct Op { pub(crate) id: OpId, pub(crate) action: OpType, pub(crate) key: Key, - pub(crate) succ: Vec, - pub(crate) pred: Vec, + pub(crate) succ: OpIds, + pub(crate) pred: OpIds, pub(crate) insert: bool, } impl Op { - pub(crate) fn add_succ(&mut self, op: &Op) { - self.succ.push(op.id); + pub(crate) fn add_succ std::cmp::Ordering>(&mut self, op: &Op, cmp: F) { + self.succ.add(op.id, cmp); if let OpType::Put(ScalarValue::Counter(Counter { current, increments, diff --git a/automerge/src/types/opids.rs b/automerge/src/types/opids.rs new file mode 100644 index 00000000..ced0f50c --- /dev/null +++ b/automerge/src/types/opids.rs @@ -0,0 +1,126 @@ +use itertools::Itertools; + +use super::OpId; + +/// A wrapper around `Vec` which preserves the invariant that the ops are +/// in ascending order with respect to their counters and actor IDs. In order to +/// maintain this invariant you must provide a comparator function when adding +/// ops as the actor indices in an OpId are not sufficient to order the OpIds +#[derive(Debug, Clone, PartialEq, Default)] +pub(crate) struct OpIds(Vec); + +impl<'a> IntoIterator for &'a OpIds { + type Item = &'a OpId; + type IntoIter = std::slice::Iter<'a, OpId>; + + fn into_iter(self) -> Self::IntoIter { + self.0.iter() + } +} + +impl OpIds { + pub(crate) fn new, F: Fn(&OpId, &OpId) -> std::cmp::Ordering>( + opids: I, + cmp: F, + ) -> Self { + let mut inner = opids.collect::>(); + inner.sort_by(cmp); + Self(inner) + } + + /// Add an op to this set of OpIds. The `comparator` must provide a + /// consistent ordering between successive calls to `add`. + pub(crate) fn add std::cmp::Ordering>( + &mut self, + opid: OpId, + comparator: F, + ) { + use std::cmp::Ordering::*; + if self.is_empty() { + self.0.push(opid); + return; + } + let idx_and_elem = self + .0 + .iter() + .find_position(|an_opid| matches!(comparator(an_opid, &opid), Greater | Equal)); + if let Some((idx, an_opid)) = idx_and_elem { + if comparator(an_opid, &opid) == Equal { + // nothing to do + } else { + self.0.insert(idx, opid); + } + } else { + self.0.push(opid); + } + } + + pub(crate) fn retain bool>(&mut self, f: F) { + self.0.retain(f) + } + + pub(crate) fn is_empty(&self) -> bool { + self.0.is_empty() + } + + pub(crate) fn len(&self) -> usize { + self.0.len() + } + + pub(crate) fn iter(&self) -> std::slice::Iter<'_, OpId> { + self.0.iter() + } + + pub(crate) fn contains(&self, op: &OpId) -> bool { + self.0.contains(op) + } +} + +#[cfg(test)] +mod tests { + use super::{OpId, OpIds}; + use crate::ActorId; + use proptest::prelude::*; + + fn gen_opid(actors: Vec) -> impl Strategy { + (0..actors.len()).prop_flat_map(|actor_idx| { + (Just(actor_idx), 0..u64::MAX).prop_map(|(actor_idx, counter)| OpId(counter, actor_idx)) + }) + } + + fn scenario() -> impl Strategy, Vec)> { + let actors = vec![ + "aaaa".try_into().unwrap(), + "cccc".try_into().unwrap(), + "bbbb".try_into().unwrap(), + ]; + proptest::collection::vec(gen_opid(actors.clone()), 0..100) + .prop_map(move |opids| (actors.clone(), opids)) + } + + proptest! { + #[test] + fn test_sorted_opids((actors, opids) in scenario()) { + let mut sorted_opids = OpIds::default(); + for opid in &opids { + sorted_opids.add(*opid, |left, right| cmp(&actors, left, right)); + } + let result = sorted_opids.into_iter().cloned().collect::>(); + let mut expected = opids; + expected.sort_by(|left, right| cmp(&actors, left, right)); + expected.dedup(); + assert_eq!(result, expected); + } + } + + fn cmp(actors: &[ActorId], left: &OpId, right: &OpId) -> std::cmp::Ordering { + use std::cmp::Ordering; + match (left, right) { + (OpId(0, _), OpId(0, _)) => Ordering::Equal, + (OpId(0, _), OpId(_, _)) => Ordering::Less, + (OpId(_, _), OpId(0, _)) => Ordering::Greater, + (OpId(a, x), OpId(b, y)) if a == b => actors[*x].cmp(&actors[*y]), + (OpId(a, _), OpId(b, _)) => a.cmp(b), + } + } +} From 668b7b86cae0b125090ca580e3abe083400f2675 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Sun, 17 Jul 2022 12:24:46 +0100 Subject: [PATCH 501/730] Add license for unicode-idents `unicode-idents` distributes some data tables from unicode.org which require an additional license. This doesn't affect our licensing because we don't distribute the data files - just the generated code. Explicitly allow the Unicode-DFS-2016 license for unicode-idents. --- deny.toml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/deny.toml b/deny.toml index 4246fa07..f6985357 100644 --- a/deny.toml +++ b/deny.toml @@ -104,6 +104,13 @@ exceptions = [ # since this is an application not a library people would link to it should be fine { allow = ["EPL-2.0"], name = "colored_json" }, + # The Unicode-DFS--2016 license is necessary for unicode-ident because they + # use data from the unicode tables to generate the tables which are + # included in the application. We do not distribute those data files so + # this is not a problem for us. See https://github.com/dtolnay/unicode-ident/pull/9/files + # for more details. + { allow = ["MIT", "Apache-2.0", "Unicode-DFS-2016"], name = "unicode-ident" }, + # these are needed by cbindgen and its dependancies # should be revied more fully before release { allow = ["MPL-2.0"], name = "cbindgen" }, From 52a558ee4dbc5ea1eaf71c0126c657e633e95813 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Sat, 23 Jul 2022 08:44:41 -0700 Subject: [PATCH 502/730] Cease writing a pristine copy of the generated header file into the root of the C API's source directory to prevent confusion. --- automerge-c/build.rs | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/automerge-c/build.rs b/automerge-c/build.rs index e953527f..e736d7d3 100644 --- a/automerge-c/build.rs +++ b/automerge-c/build.rs @@ -10,14 +10,10 @@ fn main() { let config = cbindgen::Config::from_file("cbindgen.toml") .expect("Unable to find cbindgen.toml configuration file"); - // let mut config: cbindgen::Config = Default::default(); - // config.language = cbindgen::Language::C; - if let Ok(writer) = cbindgen::generate_with_config(&crate_dir, config) { - writer.write_to_file(crate_dir.join("automerge.h")); - - // Also write the generated header into the target directory when - // specified (necessary for an out-of-source build a la CMake). + // \note CMake sets this environment variable before invoking Cargo so + // that it can direct the generated header file into its + // out-of-source build directory for post-processing. if let Ok(target_dir) = env::var("CARGO_TARGET_DIR") { writer.write_to_file(PathBuf::from(target_dir).join("automerge.h")); } From 15c9adf9657929783d959ae9a78099b15b498c7d Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Sat, 23 Jul 2022 08:47:21 -0700 Subject: [PATCH 503/730] Remove the obsolete test suite for the original C API to prevent confusion. --- automerge-c/automerge.c | 36 ------------------------------------ 1 file changed, 36 deletions(-) delete mode 100644 automerge-c/automerge.c diff --git a/automerge-c/automerge.c b/automerge-c/automerge.c deleted file mode 100644 index 48730f99..00000000 --- a/automerge-c/automerge.c +++ /dev/null @@ -1,36 +0,0 @@ -#include -#include -#include -#include -#include "automerge.h" - -#define MAX_BUFF_SIZE 4096 - -int main() { - int n = 0; - int data_type = 0; - char buff[MAX_BUFF_SIZE]; - char obj[MAX_BUFF_SIZE]; - AMresult* res = NULL; - - printf("begin\n"); - - AMdoc* doc = AMcreate(); - - printf("AMconfig()..."); - AMconfig(doc, "actor", "aabbcc"); - printf("pass!\n"); - - printf("AMmapSetStr()...\n"); - res = AMmapSetStr(doc, NULL, "string", "hello world"); - if (AMresultStatus(res) != AM_STATUS_COMMAND_OK) - { - printf("AMmapSet() failed: %s\n", AMerrorMessage(res)); - return 1; - } - AMclear(res); - printf("pass!\n"); - - AMdestroy(doc); - printf("end\n"); -} From cc19a37f0108935f82f8f60ee0634a59749bab00 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Sat, 23 Jul 2022 08:48:19 -0700 Subject: [PATCH 504/730] Remove the makefile for the original C API to prevent confusion. --- automerge-c/Makefile | 30 ------------------------------ 1 file changed, 30 deletions(-) delete mode 100644 automerge-c/Makefile diff --git a/automerge-c/Makefile b/automerge-c/Makefile deleted file mode 100644 index a5ab353b..00000000 --- a/automerge-c/Makefile +++ /dev/null @@ -1,30 +0,0 @@ - -CC=gcc -CFLAGS=-I. -DEPS=automerge.h -LIBS=-lpthread -ldl -lm -LDIR=../target/release -LIB=../target/release/libautomerge.a -DEBUG_LIB=../target/debug/libautomerge.a - -all: $(DEBUG_LIB) automerge - -debug: LDIR=../target/debug -debug: automerge $(DEBUG_LIB) - -automerge: automerge.o $(LDIR)/libautomerge.a - $(CC) -o $@ automerge.o $(LDIR)/libautomerge.a $(LIBS) -L$(LDIR) - -$(DEBUG_LIB): src/*.rs - cargo build - -$(LIB): src/*.rs - cargo build --release - -%.o: %.c $(DEPS) - $(CC) -c -o $@ $< $(CFLAGS) - -.PHONY: clean - -clean: - rm -f *.o automerge $(LIB) $(DEBUG_LIB) From ee68645f31665692e06f055fd02fb686f097708d Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Sun, 24 Jul 2022 22:23:54 -0700 Subject: [PATCH 505/730] Add `AMfork()` to expose `automerge::AutoCommit:: fork()`. Add `AMobjValues()` to expose `automerge::AutoCommit::values()` and `automerge::AutoCommit::values_at()`. Add `AMobjIdActorId()`, `AMobjIdCounter()`, and `AMobjIdIndex()` to expose `automerge::ObjId::Id` fields. Change `AMactorId` to reference an `automerge::ActorId` instead of owning one for `AMobjIdActorId()`. Add `AMactorIdCmp()` for `AMobjIdActorId()` comparison. Add `AMobjItems` for `AMobjValues()`. Add `AMobjItem` for `AMobjItems`. Add `AMobjIdEqual()` for property comparison. Rename `to_doc!()` to `to_doc_mut!()` and `to_doc_const!()` to `to_doc!()` for consistency with the Rust standard library. --- automerge-c/src/actor_id.rs | 83 +++++--- automerge-c/src/doc.rs | 359 +++++++++++++++++++---------------- automerge-c/src/doc/utils.rs | 8 +- automerge-c/src/obj.rs | 121 +++++++++++- automerge-c/src/obj/item.rs | 75 ++++++++ automerge-c/src/obj/items.rs | 340 +++++++++++++++++++++++++++++++++ 6 files changed, 788 insertions(+), 198 deletions(-) create mode 100644 automerge-c/src/obj/item.rs create mode 100644 automerge-c/src/obj/items.rs diff --git a/automerge-c/src/actor_id.rs b/automerge-c/src/actor_id.rs index 02478e98..4b3dbf00 100644 --- a/automerge-c/src/actor_id.rs +++ b/automerge-c/src/actor_id.rs @@ -1,5 +1,6 @@ use automerge as am; use std::cell::RefCell; +use std::cmp::Ordering; use std::ffi::{CStr, CString}; use std::os::raw::c_char; use std::str::FromStr; @@ -9,13 +10,14 @@ use crate::result::{to_result, AMresult}; /// \struct AMactorId /// \brief An actor's unique identifier. +#[derive(PartialEq)] pub struct AMactorId { - body: am::ActorId, + body: *const am::ActorId, c_str: RefCell>, } impl AMactorId { - pub fn new(body: am::ActorId) -> Self { + pub fn new(body: &am::ActorId) -> Self { Self { body, c_str: RefCell::>::default(), @@ -26,30 +28,30 @@ impl AMactorId { let mut c_str = self.c_str.borrow_mut(); match c_str.as_mut() { None => { - let hex_str = self.body.to_hex_string(); + let hex_str = unsafe { (*self.body).to_hex_string() }; c_str.insert(CString::new(hex_str).unwrap()).as_ptr() } - Some(value) => value.as_ptr(), + Some(hex_str) => hex_str.as_ptr(), } } } impl AsRef for AMactorId { fn as_ref(&self) -> &am::ActorId { - &self.body + unsafe { &*self.body } } } /// \memberof AMactorId -/// \brief Gets the value of an actor ID as a sequence of bytes. +/// \brief Gets the value of an actor identifier as a sequence of bytes. /// /// \param[in] actor_id A pointer to an `AMactorId` struct. -/// \pre \p actor_id must be a valid address. +/// \pre \p actor_id` != NULL`. /// \return An `AMbyteSpan` struct. /// \internal /// /// # Safety -/// actor_id must be a pointer to a valid AMactorId +/// actor_id must be a valid pointer to an AMactorId #[no_mangle] pub unsafe extern "C" fn AMactorIdBytes(actor_id: *const AMactorId) -> AMbyteSpan { match actor_id.as_ref() { @@ -59,30 +61,62 @@ pub unsafe extern "C" fn AMactorIdBytes(actor_id: *const AMactorId) -> AMbyteSpa } /// \memberof AMactorId -/// \brief Allocates a new actor ID and initializes it with a random UUID. +/// \brief Compares two actor identifiers. +/// +/// \param[in] actor_id1 A pointer to an `AMactorId` struct. +/// \param[in] actor_id2 A pointer to an `AMactorId` struct. +/// \return `-1` if \p actor_id1 `<` \p actor_id2, `0` if +/// \p actor_id1` == `\p actor_id2 and `1` if +/// \p actor_id1 `>` \p actor_id2. +/// \pre \p actor_id1` != NULL`. +/// \pre \p actor_id2` != NULL`. +/// \internal +/// +/// #Safety +/// actor_id1 must be a valid pointer to an AMactorId +/// actor_id2 must be a valid pointer to an AMactorId +#[no_mangle] +pub unsafe extern "C" fn AMactorIdCmp( + actor_id1: *const AMactorId, + actor_id2: *const AMactorId, +) -> isize { + match (actor_id1.as_ref(), actor_id2.as_ref()) { + (Some(actor_id1), Some(actor_id2)) => match actor_id1.as_ref().cmp(actor_id2.as_ref()) { + Ordering::Less => -1, + Ordering::Equal => 0, + Ordering::Greater => 1, + }, + (None, Some(_)) => -1, + (Some(_), None) => 1, + (None, None) => 0, + } +} + +/// \memberof AMactorId +/// \brief Allocates a new actor identifier and initializes it with a random +/// UUID. /// /// \return A pointer to an `AMresult` struct containing a pointer to an /// `AMactorId` struct. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. #[no_mangle] pub unsafe extern "C" fn AMactorIdInit() -> *mut AMresult { to_result(Ok::(am::ActorId::random())) } /// \memberof AMactorId -/// \brief Allocates a new actor ID and initializes it from a sequence of -/// bytes. +/// \brief Allocates a new actor identifier and initializes it from a sequence +/// of bytes. /// /// \param[in] src A pointer to a contiguous sequence of bytes. /// \param[in] count The number of bytes to copy from \p src. -/// \pre `0 <=` \p count `<=` size of \p src. +/// \pre `0 <=` \p count` <= `size of \p src. /// \return A pointer to an `AMresult` struct containing a pointer to an /// `AMactorId` struct. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety /// src must be a byte array of size `>= count` #[no_mangle] @@ -94,16 +128,15 @@ pub unsafe extern "C" fn AMactorIdInitBytes(src: *const u8, count: usize) -> *mu } /// \memberof AMactorId -/// \brief Allocates a new actor ID and initializes it from a hexadecimal -/// string. +/// \brief Allocates a new actor identifier and initializes it from a +/// hexadecimal string. /// /// \param[in] hex_str A UTF-8 string. /// \return A pointer to an `AMresult` struct containing a pointer to an /// `AMactorId` struct. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety /// hex_str must be a null-terminated array of `c_char` #[no_mangle] @@ -114,15 +147,15 @@ pub unsafe extern "C" fn AMactorIdInitStr(hex_str: *const c_char) -> *mut AMresu } /// \memberof AMactorId -/// \brief Gets the value of an actor ID as a hexadecimal string. +/// \brief Gets the value of an actor identifier as a hexadecimal string. /// /// \param[in] actor_id A pointer to an `AMactorId` struct. -/// \pre \p actor_id must be a valid address. +/// \pre \p actor_id` != NULL`. /// \return A UTF-8 string. /// \internal /// /// # Safety -/// actor_id must be a pointer to a valid AMactorId +/// actor_id must be a valid pointer to an AMactorId #[no_mangle] pub unsafe extern "C" fn AMactorIdStr(actor_id: *const AMactorId) -> *const c_char { match actor_id.as_ref() { diff --git a/automerge-c/src/doc.rs b/automerge-c/src/doc.rs index 1da314c9..92f04598 100644 --- a/automerge-c/src/doc.rs +++ b/automerge-c/src/doc.rs @@ -9,13 +9,13 @@ use crate::obj::AMobjId; use crate::result::{to_result, AMresult}; use crate::sync::{to_sync_message, AMsyncMessage, AMsyncState}; -mod list; -mod map; +pub mod list; +pub mod map; mod utils; use crate::changes::AMchanges; use crate::doc::utils::to_str; -use crate::doc::utils::{to_actor_id, to_doc, to_doc_const, to_obj_id}; +use crate::doc::utils::{to_actor_id, to_doc, to_doc_mut, to_obj_id}; macro_rules! to_changes { ($handle:expr) => {{ @@ -73,22 +73,21 @@ impl DerefMut for AMdoc { /// /// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] changes A pointer to an `AMchanges` struct. -/// \pre \p doc must be a valid address. -/// \pre \p changes must be a valid address. +/// \pre \p doc` != NULL`. +/// \pre \p changes` != NULL`. /// \return A pointer to an `AMresult` struct containing a void. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety -/// doc must be a pointer to a valid AMdoc -/// changes must be a pointer to a valid AMchanges. +/// doc must be a valid pointer to an AMdoc +/// changes must be a valid pointer to an AMchanges. #[no_mangle] pub unsafe extern "C" fn AMapplyChanges( doc: *mut AMdoc, changes: *const AMchanges, ) -> *mut AMresult { - let doc = to_doc!(doc); + let doc = to_doc_mut!(doc); let changes = to_changes!(changes); to_result(doc.apply_changes(changes.as_ref().to_vec())) } @@ -98,8 +97,8 @@ pub unsafe extern "C" fn AMapplyChanges( /// /// \return A pointer to an `AMresult` struct containing a pointer to an /// `AMdoc` struct. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. #[no_mangle] pub extern "C" fn AMcreate() -> *mut AMresult { to_result(am::AutoCommit::new()) @@ -114,20 +113,19 @@ pub extern "C" fn AMcreate() -> *mut AMresult { /// \param[in] time A pointer to a `time_t` value or `NULL`. /// \return A pointer to an `AMresult` struct containing a change hash as an /// `AMbyteSpan` struct. -/// \pre \p doc must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \pre \p doc` != NULL`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety -/// doc must be a pointer to a valid AMdoc +/// doc must be a valid pointer to an AMdoc #[no_mangle] pub unsafe extern "C" fn AMcommit( doc: *mut AMdoc, message: *const c_char, time: *const libc::time_t, ) -> *mut AMresult { - let doc = to_doc!(doc); + let doc = to_doc_mut!(doc); let mut options = CommitOptions::default(); if !message.is_null() { options.set_message(to_str(message)); @@ -145,16 +143,15 @@ pub unsafe extern "C" fn AMcommit( /// \param[in,out] doc A pointer to an `AMdoc` struct. /// \return A pointer to an `AMresult` struct containing a pointer to an /// `AMdoc` struct. -/// \pre \p doc must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \pre \p doc` != NULL`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety -/// doc must be a pointer to a valid AMdoc +/// doc must be a valid pointer to an AMdoc #[no_mangle] pub unsafe extern "C" fn AMdup(doc: *const AMdoc) -> *mut AMresult { - let doc = to_doc_const!(doc); + let doc = to_doc!(doc); to_result(doc.as_ref().clone()) } @@ -164,14 +161,14 @@ pub unsafe extern "C" fn AMdup(doc: *const AMdoc) -> *mut AMresult { /// /// \param[in,out] doc1 An `AMdoc` struct. /// \param[in,out] doc2 An `AMdoc` struct. -/// \return `true` if \p doc1 `==` \p doc2 and `false` otherwise. -/// \pre \p doc1 must be a valid address. -/// \pre \p doc2 must be a valid address. +/// \return `true` if \p doc1` == `\p doc2 and `false` otherwise. +/// \pre \p doc1` != NULL`. +/// \pre \p doc2` != NULL`. /// \internal /// /// #Safety -/// doc1 must be a pointer to a valid AMdoc -/// doc2 must be a pointer to a valid AMdoc +/// doc1 must be a valid pointer to an AMdoc +/// doc2 must be a valid pointer to an AMdoc #[no_mangle] pub unsafe extern "C" fn AMequal(doc1: *mut AMdoc, doc2: *mut AMdoc) -> bool { match (doc1.as_mut(), doc2.as_mut()) { @@ -180,6 +177,24 @@ pub unsafe extern "C" fn AMequal(doc1: *mut AMdoc, doc2: *mut AMdoc) -> bool { } } +/// \memberof AMdoc +/// \brief Forks this document at the current point for use by a different +/// actor. +/// \param[in,out] doc A pointer to an `AMdoc` struct. +/// \return A pointer to an `AMresult` struct containing a pointer to an +/// `AMdoc` struct. +/// \pre \p doc` != NULL`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. +/// \internal +/// # Safety +/// doc must be a valid pointer to an AMdoc +#[no_mangle] +pub unsafe extern "C" fn AMfork(doc: *mut AMdoc) -> *mut AMresult { + let doc = to_doc_mut!(doc); + to_result(doc.fork()) +} + /// \memberof AMdoc /// \brief Generates a synchronization message for a peer based upon the given /// synchronization state. @@ -189,40 +204,39 @@ pub unsafe extern "C" fn AMequal(doc1: *mut AMdoc, doc2: *mut AMdoc) -> bool { /// \return A pointer to an `AMresult` struct containing either a pointer to an /// `AMsyncMessage` struct or a void. /// \pre \p doc must b e a valid address. -/// \pre \p sync_state must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \pre \p sync_state` != NULL`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety -/// doc must be a pointer to a valid AMdoc -/// sync_state must be a pointer to a valid AMsyncState +/// doc must be a valid pointer to an AMdoc +/// sync_state must be a valid pointer to an AMsyncState #[no_mangle] pub unsafe extern "C" fn AMgenerateSyncMessage( doc: *mut AMdoc, sync_state: *mut AMsyncState, ) -> *mut AMresult { - let doc = to_doc!(doc); + let doc = to_doc_mut!(doc); let sync_state = to_sync_state_mut!(sync_state); to_result(doc.generate_sync_message(sync_state.as_mut())) } /// \memberof AMdoc -/// \brief Gets an `AMdoc` struct's actor ID value as an array of bytes. +/// \brief Gets a document's actor identifier. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \return A pointer to an `AMresult` struct containing an actor ID as an -/// `AMbyteSpan` struct. -/// \pre \p doc must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \return A pointer to an `AMresult` struct containing a pointer to an +/// `AMactorId` struct. +/// \pre \p doc` != NULL`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal /// /// # Safety -/// doc must be a pointer to a valid AMdoc +/// doc must be a valid pointer to an AMdoc #[no_mangle] pub unsafe extern "C" fn AMgetActor(doc: *const AMdoc) -> *mut AMresult { - let doc = to_doc_const!(doc); + let doc = to_doc!(doc); to_result(Ok::( doc.get_actor().clone(), )) @@ -234,19 +248,18 @@ pub unsafe extern "C" fn AMgetActor(doc: *const AMdoc) -> *mut AMresult { /// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] have_deps A pointer to an `AMchangeHashes` struct or `NULL`. /// \return A pointer to an `AMresult` struct containing an `AMchanges` struct. -/// \pre \p doc must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \pre \p doc` != NULL`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety -/// doc must be a pointer to a valid AMdoc +/// doc must be a valid pointer to an AMdoc #[no_mangle] pub unsafe extern "C" fn AMgetChanges( doc: *mut AMdoc, have_deps: *const AMchangeHashes, ) -> *mut AMresult { - let doc = to_doc!(doc); + let doc = to_doc_mut!(doc); let empty_deps = Vec::::new(); let have_deps = match have_deps.as_ref() { Some(have_deps) => have_deps.as_ref(), @@ -262,19 +275,18 @@ pub unsafe extern "C" fn AMgetChanges( /// \param[in,out] doc1 An `AMdoc` struct. /// \param[in,out] doc2 An `AMdoc` struct. /// \return A pointer to an `AMresult` struct containing an `AMchanges` struct. -/// \pre \p doc1 must be a valid address. -/// \pre \p doc2 must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \pre \p doc1` != NULL`. +/// \pre \p doc2` != NULL`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety -/// doc1 must be a pointer to a valid AMdoc -/// doc2 must be a pointer to a valid AMdoc +/// doc1 must be a valid pointer to an AMdoc +/// doc2 must be a valid pointer to an AMdoc #[no_mangle] pub unsafe extern "C" fn AMgetChangesAdded(doc1: *mut AMdoc, doc2: *mut AMdoc) -> *mut AMresult { - let doc1 = to_doc!(doc1); - let doc2 = to_doc!(doc2); + let doc1 = to_doc_mut!(doc1); + let doc2 = to_doc_mut!(doc2); to_result(doc1.get_changes_added(doc2)) } @@ -284,16 +296,15 @@ pub unsafe extern "C" fn AMgetChangesAdded(doc1: *mut AMdoc, doc2: *mut AMdoc) - /// \param[in,out] doc A pointer to an `AMdoc` struct. /// \return A pointer to an `AMresult` struct containing an `AMchangeHashes` /// struct. -/// \pre \p doc must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \pre \p doc` != NULL`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety -/// doc must be a pointer to a valid AMdoc +/// doc must be a valid pointer to an AMdoc #[no_mangle] pub unsafe extern "C" fn AMgetHeads(doc: *mut AMdoc) -> *mut AMresult { - let doc = to_doc!(doc); + let doc = to_doc_mut!(doc); to_result(Ok::, am::AutomergeError>( doc.get_heads(), )) @@ -307,20 +318,19 @@ pub unsafe extern "C" fn AMgetHeads(doc: *mut AMdoc) -> *mut AMresult { /// \param[in] heads A pointer to an `AMchangeHashes` struct or `NULL`. /// \return A pointer to an `AMresult` struct containing an `AMchangeHashes` /// struct. -/// \pre \p doc must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \pre \p doc` != NULL`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety -/// doc must be a pointer to a valid AMdoc -/// heads must be a pointer to a valid AMchangeHashes or NULL +/// doc must be a valid pointer to an AMdoc +/// heads must be a valid pointer to an AMchangeHashes or NULL #[no_mangle] pub unsafe extern "C" fn AMgetMissingDeps( doc: *mut AMdoc, heads: *const AMchangeHashes, ) -> *mut AMresult { - let doc = to_doc!(doc); + let doc = to_doc_mut!(doc); let empty_heads = Vec::::new(); let heads = match heads.as_ref() { Some(heads) => heads.as_ref(), @@ -335,16 +345,15 @@ pub unsafe extern "C" fn AMgetMissingDeps( /// \param[in,out] doc A pointer to an `AMdoc` struct. /// \return A pointer to an `AMresult` struct containing either an `AMchange` /// struct or a void. -/// \pre \p doc must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \pre \p doc` != NULL`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety -/// doc must be a pointer to a valid AMdoc +/// doc must be a valid pointer to an AMdoc #[no_mangle] pub unsafe extern "C" fn AMgetLastLocalChange(doc: *mut AMdoc) -> *mut AMresult { - let doc = to_doc!(doc); + let doc = to_doc_mut!(doc); to_result(doc.get_last_local_change()) } @@ -354,24 +363,23 @@ pub unsafe extern "C" fn AMgetLastLocalChange(doc: *mut AMdoc) -> *mut AMresult /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] heads A pointer to an `AMchangeHashes` struct for historical -/// keys or `NULL` for current keys. -/// \return A pointer to an `AMresult` struct containing an `AMstrings` struct. -/// \pre \p doc must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// keys or `NULL` for current keys. +/// \return A pointer to an `AMresult` struct containing an `AMstrs` struct. +/// \pre \p doc` != NULL`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL -/// heads must be a pointer to a valid AMchangeHashes or NULL +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or NULL +/// heads must be a valid pointer to an AMchangeHashes or NULL #[no_mangle] pub unsafe extern "C" fn AMkeys( doc: *const AMdoc, obj_id: *const AMobjId, heads: *const AMchangeHashes, ) -> *mut AMresult { - let doc = to_doc_const!(doc); + let doc = to_doc!(doc); let obj_id = to_obj_id!(obj_id); match heads.as_ref() { None => to_result(doc.keys(obj_id)), @@ -387,12 +395,11 @@ pub unsafe extern "C" fn AMkeys( /// \param[in] count The number of bytes in \p src to load. /// \return A pointer to an `AMresult` struct containing a pointer to an /// `AMdoc` struct. -/// \pre \p src must be a valid address. -/// \pre `0 <=` \p count `<=` size of \p src. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \pre \p src` != NULL`. +/// \pre `0 <=` \p count` <= `size of \p src. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety /// src must be a byte array of size `>= count` #[no_mangle] @@ -410,15 +417,14 @@ pub unsafe extern "C" fn AMload(src: *const u8, count: usize) -> *mut AMresult { /// \param[in] count The number of bytes in \p src to load. /// \return A pointer to an `AMresult` struct containing the number of /// operations loaded from \p src. -/// \pre \p doc must be a valid address. -/// \pre \p src must be a valid address. -/// \pre `0 <=` \p count `<=` size of \p src. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \pre \p doc` != NULL`. +/// \pre \p src` != NULL`. +/// \pre `0 <=` \p count` <= `size of \p src. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety -/// doc must be a pointer to a valid AMdoc +/// doc must be a valid pointer to an AMdoc /// src must be a byte array of size `>= count` #[no_mangle] pub unsafe extern "C" fn AMloadIncremental( @@ -426,7 +432,7 @@ pub unsafe extern "C" fn AMloadIncremental( src: *const u8, count: usize, ) -> *mut AMresult { - let doc = to_doc!(doc); + let doc = to_doc_mut!(doc); let mut data = Vec::new(); data.extend_from_slice(std::slice::from_raw_parts(src, count)); to_result(doc.load_incremental(&data)) @@ -440,19 +446,18 @@ pub unsafe extern "C" fn AMloadIncremental( /// \param[in,out] src A pointer to an `AMdoc` struct. /// \return A pointer to an `AMresult` struct containing an `AMchangeHashes` /// struct. -/// \pre \p dest must be a valid address. -/// \pre \p src must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \pre \p dest` != NULL`. +/// \pre \p src` != NULL`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety -/// dest must be a pointer to a valid AMdoc -/// src must be a pointer to a valid AMdoc +/// dest must be a valid pointer to an AMdoc +/// src must be a valid pointer to an AMdoc #[no_mangle] pub unsafe extern "C" fn AMmerge(dest: *mut AMdoc, src: *mut AMdoc) -> *mut AMresult { - let dest = to_doc!(dest); - to_result(dest.merge(to_doc!(src))) + let dest = to_doc_mut!(dest); + to_result(dest.merge(to_doc_mut!(src))) } /// \memberof AMdoc @@ -463,13 +468,13 @@ pub unsafe extern "C" fn AMmerge(dest: *mut AMdoc, src: *mut AMdoc) -> *mut AMre /// \param[in] heads A pointer to an `AMchangeHashes` struct for historical /// size or `NULL` for current size. /// \return A 64-bit unsigned integer. -/// \pre \p doc must be a valid address. +/// \pre \p doc` != NULL`. /// \internal /// /// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL -/// heads must be a pointer to a valid AMchangeHashes or NULL +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or NULL +/// heads must be a valid pointer to an AMchangeHashes or NULL #[no_mangle] pub unsafe extern "C" fn AMobjSize( doc: *const AMdoc, @@ -487,17 +492,48 @@ pub unsafe extern "C" fn AMobjSize( } } +/// \memberof AMdoc +/// \brief Gets the current or historical values of an object within the given +/// range. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \param[in] heads A pointer to an `AMchangeHashes` struct for historical +/// items or `NULL` for current items. +/// \return A pointer to an `AMresult` struct containing an `AMobjItems` struct. +/// \pre \p doc` != NULL`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. +/// \internal +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or NULL +/// heads must be a valid pointer to an AMchangeHashes or NULL +#[no_mangle] +pub unsafe extern "C" fn AMobjValues( + doc: *const AMdoc, + obj_id: *const AMobjId, + heads: *const AMchangeHashes, +) -> *mut AMresult { + let doc = to_doc!(doc); + let obj_id = to_obj_id!(obj_id); + match heads.as_ref() { + None => to_result(doc.values(obj_id)), + Some(heads) => to_result(doc.values_at(obj_id, heads.as_ref())), + } +} + /// \memberof AMdoc /// \brief Gets the number of pending operations added during a document's /// current transaction. /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \return The count of pending operations for \p doc. -/// \pre \p doc must be a valid address. +/// \pre \p doc` != NULL`. /// \internal /// /// # Safety -/// doc must be a pointer to a valid AMdoc +/// doc must be a valid pointer to an AMdoc #[no_mangle] pub unsafe extern "C" fn AMpendingOps(doc: *const AMdoc) -> usize { if let Some(doc) = doc.as_ref() { @@ -515,22 +551,22 @@ pub unsafe extern "C" fn AMpendingOps(doc: *const AMdoc) -> usize { /// \param[in,out] sync_state A pointer to an `AMsyncState` struct. /// \param[in] sync_message A pointer to an `AMsyncMessage` struct. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \pre \p sync_state must be a valid address. -/// \pre \p sync_message must be a valid address. +/// \pre \p doc` != NULL`. +/// \pre \p sync_state` != NULL`. +/// \pre \p sync_message` != NULL`. /// \internal /// /// # Safety -/// doc must be a pointer to a valid AMdoc -/// sync_state must be a pointer to a valid AMsyncState -/// sync_message must be a pointer to a valid AMsyncMessage +/// doc must be a valid pointer to an AMdoc +/// sync_state must be a valid pointer to an AMsyncState +/// sync_message must be a valid pointer to an AMsyncMessage #[no_mangle] pub unsafe extern "C" fn AMreceiveSyncMessage( doc: *mut AMdoc, sync_state: *mut AMsyncState, sync_message: *const AMsyncMessage, ) -> *mut AMresult { - let doc = to_doc!(doc); + let doc = to_doc_mut!(doc); let sync_state = to_sync_state_mut!(sync_state); let sync_message = to_sync_message!(sync_message); to_result(doc.receive_sync_message(sync_state.as_mut(), sync_message.as_ref().clone())) @@ -542,11 +578,11 @@ pub unsafe extern "C" fn AMreceiveSyncMessage( /// /// \param[in,out] doc A pointer to an `AMdoc` struct. /// \return The count of pending operations for \p doc that were cancelled. -/// \pre \p doc must be a valid address. +/// \pre \p doc` != NULL`. /// \internal /// /// # Safety -/// doc must be a pointer to a valid AMdoc +/// doc must be a valid pointer to an AMdoc #[no_mangle] pub unsafe extern "C" fn AMrollback(doc: *mut AMdoc) -> usize { if let Some(doc) = doc.as_mut() { @@ -562,16 +598,15 @@ pub unsafe extern "C" fn AMrollback(doc: *mut AMdoc) -> usize { /// \param[in,out] doc A pointer to an `AMdoc` struct. /// \return A pointer to an `AMresult` struct containing an array of bytes as /// an `AMbyteSpan` struct. -/// \pre \p doc must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \pre \p doc` != NULL`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety -/// doc must be a pointer to a valid AMdoc +/// doc must be a valid pointer to an AMdoc #[no_mangle] pub unsafe extern "C" fn AMsave(doc: *mut AMdoc) -> *mut AMresult { - let doc = to_doc!(doc); + let doc = to_doc_mut!(doc); to_result(Ok(doc.save())) } @@ -582,37 +617,35 @@ pub unsafe extern "C" fn AMsave(doc: *mut AMdoc) -> *mut AMresult { /// \param[in,out] doc A pointer to an `AMdoc` struct. /// \return A pointer to an `AMresult` struct containing an array of bytes as /// an `AMbyteSpan` struct. -/// \pre \p doc must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \pre \p doc` != NULL`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety -/// doc must be a pointer to a valid AMdoc +/// doc must be a valid pointer to an AMdoc #[no_mangle] pub unsafe extern "C" fn AMsaveIncremental(doc: *mut AMdoc) -> *mut AMresult { - let doc = to_doc!(doc); + let doc = to_doc_mut!(doc); to_result(Ok(doc.save_incremental())) } /// \memberof AMdoc -/// \brief Puts the actor ID value of a document. +/// \brief Puts the actor identifier of a document. /// /// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] actor_id A pointer to an `AMactorId` struct. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \pre \p actor_id must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \pre \p doc` != NULL`. +/// \pre \p actor_id` != NULL`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety -/// doc must be a pointer to a valid AMdoc -/// actor_id must be a pointer to a valid AMactorId +/// doc must be a valid pointer to an AMdoc +/// actor_id must be a valid pointer to an AMactorId #[no_mangle] pub unsafe extern "C" fn AMsetActor(doc: *mut AMdoc, actor_id: *const AMactorId) -> *mut AMresult { - let doc = to_doc!(doc); + let doc = to_doc_mut!(doc); let actor_id = to_actor_id!(actor_id); doc.set_actor(actor_id.as_ref().clone()); to_result(Ok(())) @@ -628,16 +661,15 @@ pub unsafe extern "C" fn AMsetActor(doc: *mut AMdoc, actor_id: *const AMactorId) /// \param[in] del The number of characters to delete. /// \param[in] text A UTF-8 string. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \pre `0 <=` \p index `<=` length of the text object identified by \p obj_id. -/// \pre \p text must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \pre \p doc` != NULL`. +/// \pre `0 <=` \p index` <= `length of the text object identified by \p obj_id. +/// \pre \p text` != NULL`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or NULL /// text must be a null-terminated array of `c_char` #[no_mangle] pub unsafe extern "C" fn AMspliceText( @@ -647,7 +679,7 @@ pub unsafe extern "C" fn AMspliceText( del: usize, text: *const c_char, ) -> *mut AMresult { - let doc = to_doc!(doc); + let doc = to_doc_mut!(doc); to_result(doc.splice_text(to_obj_id!(obj_id), index, del, &to_str(text))) } @@ -659,22 +691,21 @@ pub unsafe extern "C" fn AMspliceText( /// \param[in] heads A pointer to an `AMchangeHashes` struct for historical /// keys or `NULL` for current keys. /// \return A pointer to an `AMresult` struct containing a UTF-8 string. -/// \pre \p doc must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \pre \p doc` != NULL`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL -/// heads must be a pointer to a valid AMchangeHashes or NULL +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or NULL +/// heads must be a valid pointer to an AMchangeHashes or NULL #[no_mangle] pub unsafe extern "C" fn AMtext( doc: *const AMdoc, obj_id: *const AMobjId, heads: *const AMchangeHashes, ) -> *mut AMresult { - let doc = to_doc_const!(doc); + let doc = to_doc!(doc); let obj_id = to_obj_id!(obj_id); match heads.as_ref() { None => to_result(doc.text(obj_id)), diff --git a/automerge-c/src/doc/utils.rs b/automerge-c/src/doc/utils.rs index 84203a20..42a69b56 100644 --- a/automerge-c/src/doc/utils.rs +++ b/automerge-c/src/doc/utils.rs @@ -15,7 +15,7 @@ pub(crate) use to_actor_id; macro_rules! to_doc { ($handle:expr) => {{ - let handle = $handle.as_mut(); + let handle = $handle.as_ref(); match handle { Some(b) => b, None => return AMresult::err("Invalid AMdoc pointer").into(), @@ -25,9 +25,9 @@ macro_rules! to_doc { pub(crate) use to_doc; -macro_rules! to_doc_const { +macro_rules! to_doc_mut { ($handle:expr) => {{ - let handle = $handle.as_ref(); + let handle = $handle.as_mut(); match handle { Some(b) => b, None => return AMresult::err("Invalid AMdoc pointer").into(), @@ -35,7 +35,7 @@ macro_rules! to_doc_const { }}; } -pub(crate) use to_doc_const; +pub(crate) use to_doc_mut; macro_rules! to_obj_id { ($handle:expr) => {{ diff --git a/automerge-c/src/obj.rs b/automerge-c/src/obj.rs index f038a8e7..77a4c6eb 100644 --- a/automerge-c/src/obj.rs +++ b/automerge-c/src/obj.rs @@ -1,19 +1,47 @@ use automerge as am; +use std::cell::RefCell; use std::ops::Deref; +use crate::actor_id::AMactorId; + +pub mod item; +pub mod items; + /// \struct AMobjId /// \brief An object's unique identifier. -pub struct AMobjId(am::ObjId); +#[derive(PartialEq)] +pub struct AMobjId { + body: am::ObjId, + c_actor_id: RefCell>, +} impl AMobjId { - pub fn new(obj_id: am::ObjId) -> Self { - Self(obj_id) + pub fn new(body: am::ObjId) -> Self { + Self { + body, + c_actor_id: RefCell::>::default(), + } + } + + pub fn actor_id(&self) -> *const AMactorId { + let mut c_actor_id = self.c_actor_id.borrow_mut(); + match c_actor_id.as_mut() { + None => { + if let am::ObjId::Id(_, actor_id, _) = &self.body { + return c_actor_id.insert(AMactorId::new(actor_id)); + } + } + Some(value) => { + return value; + } + } + std::ptr::null() } } impl AsRef for AMobjId { fn as_ref(&self) -> &am::ObjId { - &self.0 + &self.body } } @@ -21,7 +49,90 @@ impl Deref for AMobjId { type Target = am::ObjId; fn deref(&self) -> &Self::Target { - &self.0 + &self.body + } +} + +/// \memberof AMobjId +/// \brief Gets the actor identifier of an object identifier. +/// +/// \param[in] obj_id A pointer to an `AMobjId` struct. +/// \return A pointer to an `AMactorId` struct or `NULL`. +/// \pre \p obj_id` != NULL`. +/// \internal +/// +/// # Safety +/// obj_id must be a valid pointer to an AMobjId +#[no_mangle] +pub unsafe extern "C" fn AMobjIdActorId(obj_id: *const AMobjId) -> *const AMactorId { + if let Some(obj_id) = obj_id.as_ref() { + return obj_id.actor_id(); + }; + std::ptr::null() +} + +/// \memberof AMobjId +/// \brief Gets the counter of an object identifier. +/// +/// \param[in] obj_id A pointer to an `AMobjId` struct. +/// \return A 64-bit unsigned integer. +/// \pre \p obj_id` != NULL`. +/// \internal +/// +/// # Safety +/// obj_id must be a valid pointer to an AMobjId +#[no_mangle] +pub unsafe extern "C" fn AMobjIdCounter(obj_id: *const AMobjId) -> u64 { + if let Some(obj_id) = obj_id.as_ref() { + match obj_id.as_ref() { + am::ObjId::Id(counter, _, _) => *counter, + am::ObjId::Root => 0, + } + } else { + u64::MAX + } +} + +/// \memberof AMobjId +/// \brief Tests the equality of two object identifiers. +/// +/// \param[in] obj_id1 A pointer to an `AMobjId` struct. +/// \param[in] obj_id2 A pointer to an `AMobjId` struct. +/// \return `true` if \p obj_id1` == `\p obj_id2 and `false` otherwise. +/// \pre \p obj_id1` != NULL`. +/// \pre \p obj_id2` != NULL`. +/// \internal +/// +/// #Safety +/// obj_id1 must be a valid AMobjId pointer +/// obj_id2 must be a valid AMobjId pointer +#[no_mangle] +pub unsafe extern "C" fn AMobjIdEqual(obj_id1: *const AMobjId, obj_id2: *const AMobjId) -> bool { + match (obj_id1.as_ref(), obj_id2.as_ref()) { + (Some(obj_id1), Some(obj_id2)) => obj_id1 == obj_id2, + (None, Some(_)) | (Some(_), None) | (None, None) => false, + } +} + +/// \memberof AMobjId +/// \brief Gets the index of an object identifier. +/// +/// \param[in] obj_id A pointer to an `AMobjId` struct. +/// \return A 64-bit unsigned integer. +/// \pre \p obj_id` != NULL`. +/// \internal +/// +/// # Safety +/// obj_id must be a valid pointer to an AMobjId +#[no_mangle] +pub unsafe extern "C" fn AMobjIdIndex(obj_id: *const AMobjId) -> usize { + if let Some(obj_id) = obj_id.as_ref() { + match obj_id.as_ref() { + am::ObjId::Id(_, _, index) => *index, + am::ObjId::Root => 0, + } + } else { + usize::MAX } } diff --git a/automerge-c/src/obj/item.rs b/automerge-c/src/obj/item.rs new file mode 100644 index 00000000..38bac2d8 --- /dev/null +++ b/automerge-c/src/obj/item.rs @@ -0,0 +1,75 @@ +use automerge as am; +use std::cell::RefCell; +use std::ffi::CString; + +use crate::obj::AMobjId; +use crate::result::AMvalue; + +/// \enum AMobjItem +/// \brief An item in an object. +#[repr(C)] +pub struct AMobjItem { + /// The object identifier of an item in an object. + obj_id: AMobjId, + /// The value of an item in an object. + value: (am::Value<'static>, RefCell>), +} + +impl AMobjItem { + pub fn new(value: am::Value<'static>, obj_id: am::ObjId) -> Self { + Self { + obj_id: AMobjId::new(obj_id), + value: (value, RefCell::>::default()), + } + } +} + +impl PartialEq for AMobjItem { + fn eq(&self, other: &Self) -> bool { + self.obj_id == other.obj_id && self.value.0 == other.value.0 + } +} + +impl From<&AMobjItem> for (am::Value<'static>, am::ObjId) { + fn from(obj_item: &AMobjItem) -> Self { + (obj_item.value.0.clone(), obj_item.obj_id.as_ref().clone()) + } +} + +/// \memberof AMobjItem +/// \brief Gets the object identifier of an item in an object. +/// +/// \param[in] obj_item A pointer to an `AMobjItem` struct. +/// \return A pointer to an `AMobjId` struct. +/// \pre \p obj_item` != NULL`. +/// \internal +/// +/// # Safety +/// obj_item must be a valid pointer to an AMobjItem +#[no_mangle] +pub unsafe extern "C" fn AMobjItemObjId(obj_item: *const AMobjItem) -> *const AMobjId { + if let Some(obj_item) = obj_item.as_ref() { + &obj_item.obj_id + } else { + std::ptr::null() + } +} + +/// \memberof AMobjItem +/// \brief Gets the value of an item in an object. +/// +/// \param[in] obj_item A pointer to an `AMobjItem` struct. +/// \return An `AMvalue` struct. +/// \pre \p obj_item` != NULL`. +/// \internal +/// +/// # Safety +/// obj_item must be a valid pointer to an AMobjItem +#[no_mangle] +pub unsafe extern "C" fn AMobjItemValue<'a>(obj_item: *const AMobjItem) -> AMvalue<'a> { + if let Some(obj_item) = obj_item.as_ref() { + (&obj_item.value.0, &obj_item.value.1).into() + } else { + AMvalue::Void + } +} diff --git a/automerge-c/src/obj/items.rs b/automerge-c/src/obj/items.rs new file mode 100644 index 00000000..ae6edb3e --- /dev/null +++ b/automerge-c/src/obj/items.rs @@ -0,0 +1,340 @@ +use std::ffi::c_void; +use std::mem::size_of; + +use crate::obj::item::AMobjItem; + +#[repr(C)] +struct Detail { + len: usize, + offset: isize, + ptr: *const c_void, +} + +/// \note cbindgen won't propagate the value of a `std::mem::size_of()` call +/// (https://github.com/eqrion/cbindgen/issues/252) but it will +/// propagate the name of a constant initialized from it so if the +/// constant's name is a symbolic representation of the value it can be +/// converted into a number by post-processing the header it generated. +pub const USIZE_USIZE_USIZE_: usize = size_of::(); + +impl Detail { + fn new(obj_items: &[AMobjItem], offset: isize) -> Self { + Self { + len: obj_items.len(), + offset, + ptr: obj_items.as_ptr() as *const c_void, + } + } + + pub fn advance(&mut self, n: isize) { + if n == 0 { + return; + } + let len = self.len as isize; + self.offset = if self.offset < 0 { + // It's reversed. + let unclipped = self.offset.checked_sub(n).unwrap_or(isize::MIN); + if unclipped >= 0 { + // Clip it to the forward stop. + len + } else { + std::cmp::min(std::cmp::max(-(len + 1), unclipped), -1) + } + } else { + let unclipped = self.offset.checked_add(n).unwrap_or(isize::MAX); + if unclipped < 0 { + // Clip it to the reverse stop. + -(len + 1) + } else { + std::cmp::max(0, std::cmp::min(unclipped, len)) + } + } + } + + pub fn get_index(&self) -> usize { + (self.offset + + if self.offset < 0 { + self.len as isize + } else { + 0 + }) as usize + } + + pub fn next(&mut self, n: isize) -> Option<&AMobjItem> { + if self.is_stopped() { + return None; + } + let slice: &[AMobjItem] = + unsafe { std::slice::from_raw_parts(self.ptr as *const AMobjItem, self.len) }; + let value = &slice[self.get_index()]; + self.advance(n); + Some(value) + } + + pub fn is_stopped(&self) -> bool { + let len = self.len as isize; + self.offset < -len || self.offset == len + } + + pub fn prev(&mut self, n: isize) -> Option<&AMobjItem> { + self.advance(-n); + if self.is_stopped() { + return None; + } + let slice: &[AMobjItem] = + unsafe { std::slice::from_raw_parts(self.ptr as *const AMobjItem, self.len) }; + Some(&slice[self.get_index()]) + } + + pub fn reversed(&self) -> Self { + Self { + len: self.len, + offset: -(self.offset + 1), + ptr: self.ptr, + } + } + + pub fn rewound(&self) -> Self { + Self { + len: self.len, + offset: if self.offset < 0 { -1 } else { 0 }, + ptr: self.ptr, + } + } +} + +impl From for [u8; USIZE_USIZE_USIZE_] { + fn from(detail: Detail) -> Self { + unsafe { + std::slice::from_raw_parts((&detail as *const Detail) as *const u8, USIZE_USIZE_USIZE_) + .try_into() + .unwrap() + } + } +} + +/// \struct AMobjItems +/// \brief A random-access iterator over a sequence of object items. +#[repr(C)] +#[derive(PartialEq)] +pub struct AMobjItems { + /// An implementation detail that is intentionally opaque. + /// \warning Modifying \p detail will cause undefined behavior. + /// \note The actual size of \p detail will vary by platform, this is just + /// the one for the platform this documentation was built on. + detail: [u8; USIZE_USIZE_USIZE_], +} + +impl AMobjItems { + pub fn new(obj_items: &[AMobjItem]) -> Self { + Self { + detail: Detail::new(obj_items, 0).into(), + } + } + + pub fn advance(&mut self, n: isize) { + let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; + detail.advance(n); + } + + pub fn len(&self) -> usize { + let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; + detail.len + } + + pub fn next(&mut self, n: isize) -> Option<&AMobjItem> { + let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; + detail.next(n) + } + + pub fn prev(&mut self, n: isize) -> Option<&AMobjItem> { + let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; + detail.prev(n) + } + + pub fn reversed(&self) -> Self { + let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; + Self { + detail: detail.reversed().into(), + } + } + + pub fn rewound(&self) -> Self { + let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; + Self { + detail: detail.rewound().into(), + } + } +} + +impl AsRef<[AMobjItem]> for AMobjItems { + fn as_ref(&self) -> &[AMobjItem] { + let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; + unsafe { std::slice::from_raw_parts(detail.ptr as *const AMobjItem, detail.len) } + } +} + +impl Default for AMobjItems { + fn default() -> Self { + Self { + detail: [0; USIZE_USIZE_USIZE_], + } + } +} + +/// \memberof AMobjItems +/// \brief Advances an iterator over a sequence of object items by at most +/// \p |n| positions where the sign of \p n is relative to the +/// iterator's direction. +/// +/// \param[in,out] obj_items A pointer to an `AMobjItems` struct. +/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum +/// number of positions to advance. +/// \pre \p obj_items` != NULL`. +/// \internal +/// +/// #Safety +/// obj_items must be a valid pointer to an AMobjItems +#[no_mangle] +pub unsafe extern "C" fn AMobjItemsAdvance(obj_items: *mut AMobjItems, n: isize) { + if let Some(obj_items) = obj_items.as_mut() { + obj_items.advance(n); + }; +} + +/// \memberof AMobjItems +/// \brief Tests the equality of two sequences of object items underlying a +/// pair of iterators. +/// +/// \param[in] obj_items1 A pointer to an `AMobjItems` struct. +/// \param[in] obj_items2 A pointer to an `AMobjItems` struct. +/// \return `true` if \p obj_items1` == `\p obj_items2 and `false` otherwise. +/// \pre \p obj_items1` != NULL`. +/// \pre \p obj_items2` != NULL`. +/// \internal +/// +/// #Safety +/// obj_items1 must be a valid pointer to an AMobjItems +/// obj_items2 must be a valid pointer to an AMobjItems +#[no_mangle] +pub unsafe extern "C" fn AMobjItemsEqual( + obj_items1: *const AMobjItems, + obj_items2: *const AMobjItems, +) -> bool { + match (obj_items1.as_ref(), obj_items2.as_ref()) { + (Some(obj_items1), Some(obj_items2)) => obj_items1.as_ref() == obj_items2.as_ref(), + (None, Some(_)) | (Some(_), None) | (None, None) => false, + } +} + +/// \memberof AMobjItems +/// \brief Gets the object item at the current position of an iterator over a +/// sequence of object items and then advances it by at most \p |n| +/// positions where the sign of \p n is relative to the iterator's +/// direction. +/// +/// \param[in,out] obj_items A pointer to an `AMobjItems` struct. +/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum +/// number of positions to advance. +/// \return A pointer to an `AMobjItem` struct that's `NULL` when \p obj_items +/// was previously advanced past its forward/reverse limit. +/// \pre \p obj_items` != NULL`. +/// \internal +/// +/// #Safety +/// obj_items must be a valid pointer to an AMobjItems +#[no_mangle] +pub unsafe extern "C" fn AMobjItemsNext(obj_items: *mut AMobjItems, n: isize) -> *const AMobjItem { + if let Some(obj_items) = obj_items.as_mut() { + if let Some(obj_item) = obj_items.next(n) { + return obj_item; + } + } + std::ptr::null() +} + +/// \memberof AMobjItems +/// \brief Advances an iterator over a sequence of object items by at most +/// \p |n| positions where the sign of \p n is relative to the +/// iterator's direction and then gets the object item at its new +/// position. +/// +/// \param[in,out] obj_items A pointer to an `AMobjItems` struct. +/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum +/// number of positions to advance. +/// \return A pointer to an `AMobjItem` struct that's `NULL` when \p obj_items +/// is presently advanced past its forward/reverse limit. +/// \pre \p obj_items` != NULL`. +/// \internal +/// +/// #Safety +/// obj_items must be a valid pointer to an AMobjItems +#[no_mangle] +pub unsafe extern "C" fn AMobjItemsPrev(obj_items: *mut AMobjItems, n: isize) -> *const AMobjItem { + if let Some(obj_items) = obj_items.as_mut() { + if let Some(obj_item) = obj_items.prev(n) { + return obj_item; + } + } + std::ptr::null() +} + +/// \memberof AMobjItems +/// \brief Gets the size of the sequence of object items underlying an +/// iterator. +/// +/// \param[in] obj_items A pointer to an `AMobjItems` struct. +/// \return The count of values in \p obj_items. +/// \pre \p obj_items` != NULL`. +/// \internal +/// +/// #Safety +/// obj_items must be a valid pointer to an AMobjItems +#[no_mangle] +pub unsafe extern "C" fn AMobjItemsSize(obj_items: *const AMobjItems) -> usize { + if let Some(obj_items) = obj_items.as_ref() { + obj_items.len() + } else { + 0 + } +} + +/// \memberof AMobjItems +/// \brief Creates an iterator over the same sequence of object items as the +/// given one but with the opposite position and direction. +/// +/// \param[in] obj_items A pointer to an `AMobjItems` struct. +/// \return An `AMobjItems` struct +/// \pre \p obj_items` != NULL`. +/// \internal +/// +/// #Safety +/// obj_items must be a valid pointer to an AMobjItems +#[no_mangle] +pub unsafe extern "C" fn AMobjItemsReversed(obj_items: *const AMobjItems) -> AMobjItems { + if let Some(obj_items) = obj_items.as_ref() { + obj_items.reversed() + } else { + AMobjItems::default() + } +} + +/// \memberof AMobjItems +/// \brief Creates an iterator at the starting position over the same sequence +/// of object items as the given one. +/// +/// \param[in] obj_items A pointer to an `AMobjItems` struct. +/// \return An `AMobjItems` struct +/// \pre \p obj_items` != NULL`. +/// \internal +/// +/// #Safety +/// obj_items must be a valid pointer to an AMobjItems +#[no_mangle] +pub unsafe extern "C" fn AMobjItemsRewound(obj_items: *const AMobjItems) -> AMobjItems { + if let Some(obj_items) = obj_items.as_ref() { + obj_items.rewound() + } else { + AMobjItems::default() + } +} From eba18d1ad62e4ff7f00b655d755233ce15b97c2a Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Sun, 24 Jul 2022 22:41:32 -0700 Subject: [PATCH 506/730] Add `heads` argument to `AMlistGet()` to expose `automerge::AutoCommit::get_at()`. Add `AMlistRange()` to expose `automerge::AutoCommit::list_range()` and `automerge::AutoCommit::list_range_at()`. Add `AMlistItems` for `AMlistRange()`. Add `AMlistItem` for `AMlistItems`. --- automerge-c/src/doc/list.rs | 271 +++++++++++++---------- automerge-c/src/doc/list/item.rs | 99 +++++++++ automerge-c/src/doc/list/items.rs | 347 ++++++++++++++++++++++++++++++ 3 files changed, 606 insertions(+), 111 deletions(-) create mode 100644 automerge-c/src/doc/list/item.rs create mode 100644 automerge-c/src/doc/list/items.rs diff --git a/automerge-c/src/doc/list.rs b/automerge-c/src/doc/list.rs index 029a8b2e..15287ae0 100644 --- a/automerge-c/src/doc/list.rs +++ b/automerge-c/src/doc/list.rs @@ -2,10 +2,23 @@ use automerge as am; use automerge::transaction::Transactable; use std::os::raw::c_char; -use crate::doc::{to_doc, to_doc_const, to_obj_id, to_str, AMdoc}; +use crate::change_hashes::AMchangeHashes; +use crate::doc::{to_doc, to_doc_mut, to_obj_id, to_str, AMdoc}; use crate::obj::{AMobjId, AMobjType}; use crate::result::{to_result, AMresult}; +pub mod item; +pub mod items; + +macro_rules! to_range { + ($begin:expr, $end:expr) => {{ + if $begin > $end { + return AMresult::err(&format!("Invalid range [{}-{})", $begin, $end)).into(); + }; + ($begin..$end) + }}; +} + /// \memberof AMdoc /// \brief Deletes an index in a list object. /// @@ -13,49 +26,55 @@ use crate::result::{to_result, AMresult}; /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] index An index in the list object identified by \p obj_id. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \pre \p doc` != NULL`. +/// \pre `0 <=` \p index` <= `length of the list object identified by \p obj_id. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or NULL #[no_mangle] pub unsafe extern "C" fn AMlistDelete( doc: *mut AMdoc, obj_id: *const AMobjId, index: usize, ) -> *mut AMresult { - let doc = to_doc!(doc); + let doc = to_doc_mut!(doc); to_result(doc.delete(to_obj_id!(obj_id), index)) } /// \memberof AMdoc -/// \brief Gets the value at an index in a list object. +/// \brief Gets the current or historical value at an index in a list object. /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] index An index within the list object identified by \p obj_id. +/// \param[in] heads A pointer to an `AMchangeHashes` struct for a historical +/// value or `NULL` for the current value. /// \return A pointer to an `AMresult` struct. -/// \pre \p doc must be a valid address. -/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \pre \p doc` != NULL`. +/// \pre `0 <=` \p index` <= `length of the list object identified by \p obj_id. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or NULL +/// heads must be a valid pointer to an AMchangeHashes or NULL #[no_mangle] pub unsafe extern "C" fn AMlistGet( doc: *const AMdoc, obj_id: *const AMobjId, index: usize, + heads: *const AMchangeHashes, ) -> *mut AMresult { - let doc = to_doc_const!(doc); - to_result(doc.get(to_obj_id!(obj_id), index)) + let doc = to_doc!(doc); + let obj_id = to_obj_id!(obj_id); + match heads.as_ref() { + None => to_result(doc.get(obj_id, index)), + Some(heads) => to_result(doc.get_at(obj_id, index, heads.as_ref())), + } } /// \memberof AMdoc @@ -67,15 +86,14 @@ pub unsafe extern "C" fn AMlistGet( /// \param[in] index An index in the list object identified by \p obj_id. /// \param[in] value A 64-bit signed integer. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \pre \p doc` != NULL`. +/// \pre `0 <=` \p index` <= `length of the list object identified by \p obj_id. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or NULL #[no_mangle] pub unsafe extern "C" fn AMlistIncrement( doc: *mut AMdoc, @@ -83,7 +101,7 @@ pub unsafe extern "C" fn AMlistIncrement( index: usize, value: i64, ) -> *mut AMresult { - let doc = to_doc!(doc); + let doc = to_doc_mut!(doc); to_result(doc.increment(to_obj_id!(obj_id), index, value)) } @@ -97,14 +115,13 @@ pub unsafe extern "C" fn AMlistIncrement( /// writing \p value over \p index. /// \param[in] value A boolean. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \pre \p doc` != NULL`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or NULL #[no_mangle] pub unsafe extern "C" fn AMlistPutBool( doc: *mut AMdoc, @@ -113,7 +130,7 @@ pub unsafe extern "C" fn AMlistPutBool( insert: bool, value: bool, ) -> *mut AMresult { - let doc = to_doc!(doc); + let doc = to_doc_mut!(doc); let obj_id = to_obj_id!(obj_id); let value = am::ScalarValue::Boolean(value); to_result(if insert { @@ -134,17 +151,16 @@ pub unsafe extern "C" fn AMlistPutBool( /// \param[in] src A pointer to an array of bytes. /// \param[in] count The number of bytes to copy from \p src. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. -/// \pre \p src must be a valid address. -/// \pre `0 <=` \p count `<=` size of \p src. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \pre \p doc` != NULL`. +/// \pre `0 <=` \p index` <= `length of the list object identified by \p obj_id. +/// \pre \p src` != NULL`. +/// \pre `0 <=` \p count` <= `size of \p src. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or NULL /// src must be a byte array of size `>= count` #[no_mangle] pub unsafe extern "C" fn AMlistPutBytes( @@ -155,7 +171,7 @@ pub unsafe extern "C" fn AMlistPutBytes( src: *const u8, count: usize, ) -> *mut AMresult { - let doc = to_doc!(doc); + let doc = to_doc_mut!(doc); let obj_id = to_obj_id!(obj_id); let mut vec = Vec::new(); vec.extend_from_slice(std::slice::from_raw_parts(src, count)); @@ -176,15 +192,14 @@ pub unsafe extern "C" fn AMlistPutBytes( /// writing \p value over \p index. /// \param[in] value A 64-bit signed integer. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \pre \p doc` != NULL`. +/// \pre `0 <=` \p index` <= `length of the list object identified by \p obj_id. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or NULL #[no_mangle] pub unsafe extern "C" fn AMlistPutCounter( doc: *mut AMdoc, @@ -193,7 +208,7 @@ pub unsafe extern "C" fn AMlistPutCounter( insert: bool, value: i64, ) -> *mut AMresult { - let doc = to_doc!(doc); + let doc = to_doc_mut!(doc); let obj_id = to_obj_id!(obj_id); let value = am::ScalarValue::Counter(value.into()); to_result(if insert { @@ -213,15 +228,14 @@ pub unsafe extern "C" fn AMlistPutCounter( /// writing \p value over \p index. /// \param[in] value A 64-bit float. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \pre \p doc` != NULL`. +/// \pre `0 <=` \p index` <= `length of the list object identified by \p obj_id. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or NULL #[no_mangle] pub unsafe extern "C" fn AMlistPutF64( doc: *mut AMdoc, @@ -230,7 +244,7 @@ pub unsafe extern "C" fn AMlistPutF64( insert: bool, value: f64, ) -> *mut AMresult { - let doc = to_doc!(doc); + let doc = to_doc_mut!(doc); let obj_id = to_obj_id!(obj_id); to_result(if insert { doc.insert(obj_id, index, value) @@ -249,15 +263,14 @@ pub unsafe extern "C" fn AMlistPutF64( /// writing \p value over \p index. /// \param[in] value A 64-bit signed integer. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \pre \p doc` != NULL`. +/// \pre `0 <=` \p index` <= `length of the list object identified by \p obj_id. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or NULL #[no_mangle] pub unsafe extern "C" fn AMlistPutInt( doc: *mut AMdoc, @@ -266,7 +279,7 @@ pub unsafe extern "C" fn AMlistPutInt( insert: bool, value: i64, ) -> *mut AMresult { - let doc = to_doc!(doc); + let doc = to_doc_mut!(doc); let obj_id = to_obj_id!(obj_id); to_result(if insert { doc.insert(obj_id, index, value) @@ -284,15 +297,14 @@ pub unsafe extern "C" fn AMlistPutInt( /// \param[in] insert A flag to insert \p value before \p index instead of /// writing \p value over \p index. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \pre \p doc` != NULL`. +/// \pre `0 <=` \p index` <= `length of the list object identified by \p obj_id. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or NULL #[no_mangle] pub unsafe extern "C" fn AMlistPutNull( doc: *mut AMdoc, @@ -300,7 +312,7 @@ pub unsafe extern "C" fn AMlistPutNull( index: usize, insert: bool, ) -> *mut AMresult { - let doc = to_doc!(doc); + let doc = to_doc_mut!(doc); let obj_id = to_obj_id!(obj_id); let value = (); to_result(if insert { @@ -317,18 +329,18 @@ pub unsafe extern "C" fn AMlistPutNull( /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] index An index in the list object identified by \p obj_id. /// \param[in] insert A flag to insert \p value before \p index instead of -/// writing \p value over \p index. +/// writing \p value over \p index. /// \param[in] obj_type An `AMobjIdType` enum tag. -/// \return A pointer to an `AMresult` struct containing a pointer to an `AMobjId` struct. -/// \pre \p doc must be a valid address. -/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \return A pointer to an `AMresult` struct containing a pointer to an +/// `AMobjId` struct. +/// \pre \p doc` != NULL`. +/// \pre `0 <=` \p index` <= `length of the list object identified by \p obj_id. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or NULL #[no_mangle] pub unsafe extern "C" fn AMlistPutObject( doc: *mut AMdoc, @@ -337,7 +349,7 @@ pub unsafe extern "C" fn AMlistPutObject( insert: bool, obj_type: AMobjType, ) -> *mut AMresult { - let doc = to_doc!(doc); + let doc = to_doc_mut!(doc); let obj_id = to_obj_id!(obj_id); let value = obj_type.into(); to_result(if insert { @@ -357,16 +369,15 @@ pub unsafe extern "C" fn AMlistPutObject( /// writing \p value over \p index. /// \param[in] value A UTF-8 string. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. -/// \pre \p value must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \pre \p doc` != NULL`. +/// \pre `0 <=` \p index` <= `length of the list object identified by \p obj_id. +/// \pre \p value` != NULL`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or NULL /// value must be a null-terminated array of `c_char` #[no_mangle] pub unsafe extern "C" fn AMlistPutStr( @@ -376,7 +387,7 @@ pub unsafe extern "C" fn AMlistPutStr( insert: bool, value: *const c_char, ) -> *mut AMresult { - let doc = to_doc!(doc); + let doc = to_doc_mut!(doc); let obj_id = to_obj_id!(obj_id); let value = to_str(value); to_result(if insert { @@ -396,15 +407,14 @@ pub unsafe extern "C" fn AMlistPutStr( /// writing \p value over \p index. /// \param[in] value A 64-bit signed integer. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \pre \p doc` != NULL`. +/// \pre `0 <=` \p index` <= `length of the list object identified by \p obj_id. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or NULL #[no_mangle] pub unsafe extern "C" fn AMlistPutTimestamp( doc: *mut AMdoc, @@ -413,7 +423,7 @@ pub unsafe extern "C" fn AMlistPutTimestamp( insert: bool, value: i64, ) -> *mut AMresult { - let doc = to_doc!(doc); + let doc = to_doc_mut!(doc); let obj_id = to_obj_id!(obj_id); let value = am::ScalarValue::Timestamp(value); to_result(if insert { @@ -433,15 +443,14 @@ pub unsafe extern "C" fn AMlistPutTimestamp( /// writing \p value over \p index. /// \param[in] value A 64-bit unsigned integer. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj_id. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \pre \p doc` != NULL`. +/// \pre `0 <=` \p index` <= `length of the list object identified by \p obj_id. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or NULL #[no_mangle] pub unsafe extern "C" fn AMlistPutUint( doc: *mut AMdoc, @@ -450,7 +459,7 @@ pub unsafe extern "C" fn AMlistPutUint( insert: bool, value: u64, ) -> *mut AMresult { - let doc = to_doc!(doc); + let doc = to_doc_mut!(doc); let obj_id = to_obj_id!(obj_id); to_result(if insert { doc.insert(obj_id, index, value) @@ -458,3 +467,43 @@ pub unsafe extern "C" fn AMlistPutUint( doc.put(obj_id, index, value) }) } + +/// \memberof AMdoc +/// \brief Gets the current or historical indices and values of the list object +/// within the given range. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \param[in] begin The first index in a range of indices. +/// \param[in] end At least one past the last index in a range of indices. +/// \param[in] heads A pointer to an `AMchangeHashes` struct for historical +/// indices and values or `NULL` for current indices and +/// values. +/// \return A pointer to an `AMresult` struct containing an `AMlistItems` +/// struct. +/// \pre \p doc` != NULL`. +/// \pre \p begin` <= `\p end. +/// \pre \p end` <= SIZE_MAX`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. +/// \internal +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or NULL +/// heads must be a valid pointer to an AMchangeHashes or NULL +#[no_mangle] +pub unsafe extern "C" fn AMlistRange( + doc: *const AMdoc, + obj_id: *const AMobjId, + begin: usize, + end: usize, + heads: *const AMchangeHashes, +) -> *mut AMresult { + let doc = to_doc!(doc); + let obj_id = to_obj_id!(obj_id); + let range = to_range!(begin, end); + match heads.as_ref() { + None => to_result(doc.list_range(obj_id, range)), + Some(heads) => to_result(doc.list_range_at(obj_id, range, heads.as_ref())), + } +} diff --git a/automerge-c/src/doc/list/item.rs b/automerge-c/src/doc/list/item.rs new file mode 100644 index 00000000..ac352620 --- /dev/null +++ b/automerge-c/src/doc/list/item.rs @@ -0,0 +1,99 @@ +use automerge as am; +use std::cell::RefCell; +use std::ffi::CString; + +use crate::obj::AMobjId; +use crate::result::AMvalue; + +/// \enum AMlistItem +/// \brief An item in a list object. +#[repr(C)] +pub struct AMlistItem { + /// The index of an item in a list object. + index: usize, + /// The object identifier of an item in a list object. + obj_id: AMobjId, + /// The value of an item in a list object. + value: (am::Value<'static>, RefCell>), +} + +impl AMlistItem { + pub fn new(index: usize, value: am::Value<'static>, obj_id: am::ObjId) -> Self { + Self { + index, + obj_id: AMobjId::new(obj_id), + value: (value, RefCell::>::default()), + } + } +} + +impl PartialEq for AMlistItem { + fn eq(&self, other: &Self) -> bool { + self.index == other.index && self.obj_id == other.obj_id && self.value.0 == other.value.0 + } +} + +/* +impl From<&AMlistItem> for (usize, am::Value<'static>, am::ObjId) { + fn from(list_item: &AMlistItem) -> Self { + (list_item.index, list_item.value.0.clone(), list_item.obj_id.as_ref().clone()) + } +} +*/ + +/// \memberof AMlistItem +/// \brief Gets the index of an item in a list object. +/// +/// \param[in] list_item A pointer to an `AMlistItem` struct. +/// \return A 64-bit unsigned integer. +/// \pre \p list_item` != NULL`. +/// \internal +/// +/// # Safety +/// list_item must be a valid pointer to an AMlistItem +#[no_mangle] +pub unsafe extern "C" fn AMlistItemIndex(list_item: *const AMlistItem) -> usize { + if let Some(list_item) = list_item.as_ref() { + list_item.index + } else { + usize::MAX + } +} + +/// \memberof AMlistItem +/// \brief Gets the object identifier of an item in a list object. +/// +/// \param[in] list_item A pointer to an `AMlistItem` struct. +/// \return A pointer to an `AMobjId` struct. +/// \pre \p list_item` != NULL`. +/// \internal +/// +/// # Safety +/// list_item must be a valid pointer to an AMlistItem +#[no_mangle] +pub unsafe extern "C" fn AMlistItemObjId(list_item: *const AMlistItem) -> *const AMobjId { + if let Some(list_item) = list_item.as_ref() { + &list_item.obj_id + } else { + std::ptr::null() + } +} + +/// \memberof AMlistItem +/// \brief Gets the value of an item in a list object. +/// +/// \param[in] list_item A pointer to an `AMlistItem` struct. +/// \return An `AMvalue` struct. +/// \pre \p list_item` != NULL`. +/// \internal +/// +/// # Safety +/// list_item must be a valid pointer to an AMlistItem +#[no_mangle] +pub unsafe extern "C" fn AMlistItemValue<'a>(list_item: *const AMlistItem) -> AMvalue<'a> { + if let Some(list_item) = list_item.as_ref() { + (&list_item.value.0, &list_item.value.1).into() + } else { + AMvalue::Void + } +} diff --git a/automerge-c/src/doc/list/items.rs b/automerge-c/src/doc/list/items.rs new file mode 100644 index 00000000..ef6aa45e --- /dev/null +++ b/automerge-c/src/doc/list/items.rs @@ -0,0 +1,347 @@ +use std::ffi::c_void; +use std::mem::size_of; + +use crate::doc::list::item::AMlistItem; + +#[repr(C)] +struct Detail { + len: usize, + offset: isize, + ptr: *const c_void, +} + +/// \note cbindgen won't propagate the value of a `std::mem::size_of()` call +/// (https://github.com/eqrion/cbindgen/issues/252) but it will +/// propagate the name of a constant initialized from it so if the +/// constant's name is a symbolic representation of the value it can be +/// converted into a number by post-processing the header it generated. +pub const USIZE_USIZE_USIZE_: usize = size_of::(); + +impl Detail { + fn new(list_items: &[AMlistItem], offset: isize) -> Self { + Self { + len: list_items.len(), + offset, + ptr: list_items.as_ptr() as *const c_void, + } + } + + pub fn advance(&mut self, n: isize) { + if n == 0 { + return; + } + let len = self.len as isize; + self.offset = if self.offset < 0 { + // It's reversed. + let unclipped = self.offset.checked_sub(n).unwrap_or(isize::MIN); + if unclipped >= 0 { + // Clip it to the forward stop. + len + } else { + std::cmp::min(std::cmp::max(-(len + 1), unclipped), -1) + } + } else { + let unclipped = self.offset.checked_add(n).unwrap_or(isize::MAX); + if unclipped < 0 { + // Clip it to the reverse stop. + -(len + 1) + } else { + std::cmp::max(0, std::cmp::min(unclipped, len)) + } + } + } + + pub fn get_index(&self) -> usize { + (self.offset + + if self.offset < 0 { + self.len as isize + } else { + 0 + }) as usize + } + + pub fn next(&mut self, n: isize) -> Option<&AMlistItem> { + if self.is_stopped() { + return None; + } + let slice: &[AMlistItem] = + unsafe { std::slice::from_raw_parts(self.ptr as *const AMlistItem, self.len) }; + let value = &slice[self.get_index()]; + self.advance(n); + Some(value) + } + + pub fn is_stopped(&self) -> bool { + let len = self.len as isize; + self.offset < -len || self.offset == len + } + + pub fn prev(&mut self, n: isize) -> Option<&AMlistItem> { + self.advance(-n); + if self.is_stopped() { + return None; + } + let slice: &[AMlistItem] = + unsafe { std::slice::from_raw_parts(self.ptr as *const AMlistItem, self.len) }; + Some(&slice[self.get_index()]) + } + + pub fn reversed(&self) -> Self { + Self { + len: self.len, + offset: -(self.offset + 1), + ptr: self.ptr, + } + } + + pub fn rewound(&self) -> Self { + Self { + len: self.len, + offset: if self.offset < 0 { -1 } else { 0 }, + ptr: self.ptr, + } + } +} + +impl From for [u8; USIZE_USIZE_USIZE_] { + fn from(detail: Detail) -> Self { + unsafe { + std::slice::from_raw_parts((&detail as *const Detail) as *const u8, USIZE_USIZE_USIZE_) + .try_into() + .unwrap() + } + } +} + +/// \struct AMlistItems +/// \brief A random-access iterator over a sequence of list object items. +#[repr(C)] +#[derive(PartialEq)] +pub struct AMlistItems { + /// An implementation detail that is intentionally opaque. + /// \warning Modifying \p detail will cause undefined behavior. + /// \note The actual size of \p detail will vary by platform, this is just + /// the one for the platform this documentation was built on. + detail: [u8; USIZE_USIZE_USIZE_], +} + +impl AMlistItems { + pub fn new(list_items: &[AMlistItem]) -> Self { + Self { + detail: Detail::new(list_items, 0).into(), + } + } + + pub fn advance(&mut self, n: isize) { + let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; + detail.advance(n); + } + + pub fn len(&self) -> usize { + let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; + detail.len + } + + pub fn next(&mut self, n: isize) -> Option<&AMlistItem> { + let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; + detail.next(n) + } + + pub fn prev(&mut self, n: isize) -> Option<&AMlistItem> { + let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; + detail.prev(n) + } + + pub fn reversed(&self) -> Self { + let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; + Self { + detail: detail.reversed().into(), + } + } + + pub fn rewound(&self) -> Self { + let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; + Self { + detail: detail.rewound().into(), + } + } +} + +impl AsRef<[AMlistItem]> for AMlistItems { + fn as_ref(&self) -> &[AMlistItem] { + let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; + unsafe { std::slice::from_raw_parts(detail.ptr as *const AMlistItem, detail.len) } + } +} + +impl Default for AMlistItems { + fn default() -> Self { + Self { + detail: [0; USIZE_USIZE_USIZE_], + } + } +} + +/// \memberof AMlistItems +/// \brief Advances an iterator over a sequence of list object items by at most +/// \p |n| positions where the sign of \p n is relative to the +/// iterator's direction. +/// +/// \param[in,out] list_items A pointer to an `AMlistItems` struct. +/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum +/// number of positions to advance. +/// \pre \p list_items` != NULL`. +/// \internal +/// +/// #Safety +/// list_items must be a valid pointer to an AMlistItems +#[no_mangle] +pub unsafe extern "C" fn AMlistItemsAdvance(list_items: *mut AMlistItems, n: isize) { + if let Some(list_items) = list_items.as_mut() { + list_items.advance(n); + }; +} + +/// \memberof AMlistItems +/// \brief Tests the equality of two sequences of list object items underlying +/// a pair of iterators. +/// +/// \param[in] list_items1 A pointer to an `AMlistItems` struct. +/// \param[in] list_items2 A pointer to an `AMlistItems` struct. +/// \return `true` if \p list_items1` == `\p list_items2 and `false` otherwise. +/// \pre \p list_items1` != NULL`. +/// \pre \p list_items2` != NULL`. +/// \internal +/// +/// #Safety +/// list_items1 must be a valid pointer to an AMlistItems +/// list_items2 must be a valid pointer to an AMlistItems +#[no_mangle] +pub unsafe extern "C" fn AMlistItemsEqual( + list_items1: *const AMlistItems, + list_items2: *const AMlistItems, +) -> bool { + match (list_items1.as_ref(), list_items2.as_ref()) { + (Some(list_items1), Some(list_items2)) => list_items1.as_ref() == list_items2.as_ref(), + (None, Some(_)) | (Some(_), None) | (None, None) => false, + } +} + +/// \memberof AMlistItems +/// \brief Gets the list object item at the current position of an iterator +/// over a sequence of list object items and then advances it by at most +/// \p |n| positions where the sign of \p n is relative to the +/// iterator's direction. +/// +/// \param[in,out] list_items A pointer to an `AMlistItems` struct. +/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum +/// number of positions to advance. +/// \return A pointer to an `AMlistItem` struct that's `NULL` when +/// \p list_items was previously advanced past its forward/reverse +/// limit. +/// \pre \p list_items` != NULL`. +/// \internal +/// +/// #Safety +/// list_items must be a valid pointer to an AMlistItems +#[no_mangle] +pub unsafe extern "C" fn AMlistItemsNext( + list_items: *mut AMlistItems, + n: isize, +) -> *const AMlistItem { + if let Some(list_items) = list_items.as_mut() { + if let Some(list_item) = list_items.next(n) { + return list_item; + } + } + std::ptr::null() +} + +/// \memberof AMlistItems +/// \brief Advances an iterator over a sequence of list object items by at most +/// \p |n| positions where the sign of \p n is relative to the +/// iterator's direction and then gets the list object item at its new +/// position. +/// +/// \param[in,out] list_items A pointer to an `AMlistItems` struct. +/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum +/// number of positions to advance. +/// \return A pointer to an `AMlistItem` struct that's `NULL` when +/// \p list_items is presently advanced past its forward/reverse limit. +/// \pre \p list_items` != NULL`. +/// \internal +/// +/// #Safety +/// list_items must be a valid pointer to an AMlistItems +#[no_mangle] +pub unsafe extern "C" fn AMlistItemsPrev( + list_items: *mut AMlistItems, + n: isize, +) -> *const AMlistItem { + if let Some(list_items) = list_items.as_mut() { + if let Some(list_item) = list_items.prev(n) { + return list_item; + } + } + std::ptr::null() +} + +/// \memberof AMlistItems +/// \brief Gets the size of the sequence of list object items underlying an +/// iterator. +/// +/// \param[in] list_items A pointer to an `AMlistItems` struct. +/// \return The count of values in \p list_items. +/// \pre \p list_items` != NULL`. +/// \internal +/// +/// #Safety +/// list_items must be a valid pointer to an AMlistItems +#[no_mangle] +pub unsafe extern "C" fn AMlistItemsSize(list_items: *const AMlistItems) -> usize { + if let Some(list_items) = list_items.as_ref() { + list_items.len() + } else { + 0 + } +} + +/// \memberof AMlistItems +/// \brief Creates an iterator over the same sequence of list object items as +/// the given one but with the opposite position and direction. +/// +/// \param[in] list_items A pointer to an `AMlistItems` struct. +/// \return An `AMlistItems` struct +/// \pre \p list_items` != NULL`. +/// \internal +/// +/// #Safety +/// list_items must be a valid pointer to an AMlistItems +#[no_mangle] +pub unsafe extern "C" fn AMlistItemsReversed(list_items: *const AMlistItems) -> AMlistItems { + if let Some(list_items) = list_items.as_ref() { + list_items.reversed() + } else { + AMlistItems::default() + } +} + +/// \memberof AMlistItems +/// \brief Creates an iterator at the starting position over the same sequence +/// of list object items as the given one. +/// +/// \param[in] list_items A pointer to an `AMlistItems` struct. +/// \return An `AMlistItems` struct +/// \pre \p list_items` != NULL`. +/// \internal +/// +/// #Safety +/// list_items must be a valid pointer to an AMlistItems +#[no_mangle] +pub unsafe extern "C" fn AMlistItemsRewound(list_items: *const AMlistItems) -> AMlistItems { + if let Some(list_items) = list_items.as_ref() { + list_items.rewound() + } else { + AMlistItems::default() + } +} From 42ab1639dbef7da102e734bb90b5d2e75af64b61 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 25 Jul 2022 00:11:00 -0700 Subject: [PATCH 507/730] Add `heads` argument to `AMmapGet()` to expose `automerge::AutoCommit::get_at()`. Add `AMmapRange()` to expose `automerge::AutoCommit::map_range()` and `automerge::AutoCommit::map_range_at()`. Add `AMmapItems` for `AMlistRange()`. Add `AMmapItem` for `AMmapItems`. --- automerge-c/src/doc/map.rs | 295 +++++++++++++++++---------- automerge-c/src/doc/map/item.rs | 100 +++++++++ automerge-c/src/doc/map/items.rs | 339 +++++++++++++++++++++++++++++++ 3 files changed, 622 insertions(+), 112 deletions(-) create mode 100644 automerge-c/src/doc/map/item.rs create mode 100644 automerge-c/src/doc/map/items.rs diff --git a/automerge-c/src/doc/map.rs b/automerge-c/src/doc/map.rs index 51941391..89ba688e 100644 --- a/automerge-c/src/doc/map.rs +++ b/automerge-c/src/doc/map.rs @@ -2,11 +2,15 @@ use automerge as am; use automerge::transaction::Transactable; use std::os::raw::c_char; +use crate::change_hashes::AMchangeHashes; use crate::doc::utils::to_str; -use crate::doc::{to_doc, to_doc_const, to_obj_id, AMdoc}; +use crate::doc::{to_doc, to_doc_mut, to_obj_id, AMdoc}; use crate::obj::{AMobjId, AMobjType}; use crate::result::{to_result, AMresult}; +pub mod item; +pub mod items; + /// \memberof AMdoc /// \brief Deletes a key in a map object. /// @@ -14,15 +18,14 @@ use crate::result::{to_result, AMresult}; /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \pre \p key must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \pre \p doc` != NULL`. +/// \pre \p key` != NULL`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or NULL /// key must be a c string of the map key to be used #[no_mangle] pub unsafe extern "C" fn AMmapDelete( @@ -30,35 +33,42 @@ pub unsafe extern "C" fn AMmapDelete( obj_id: *const AMobjId, key: *const c_char, ) -> *mut AMresult { - let doc = to_doc!(doc); + let doc = to_doc_mut!(doc); to_result(doc.delete(to_obj_id!(obj_id), to_str(key))) } /// \memberof AMdoc -/// \brief Gets the value for a key in a map object. +/// \brief Gets the current or historical value for a key in a map object. /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. +/// \param[in] key A UTF-8 string key for the map object identified by +/// \p obj_id. +/// \param[in] heads A pointer to an `AMchangeHashes` struct for a historical +/// value or `NULL` for the current value. /// \return A pointer to an `AMresult` struct. -/// \pre \p doc must be a valid address. -/// \pre \p key must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \pre \p doc` != NULL`. +/// \pre \p key` != NULL`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or NULL /// key must be a c string of the map key to be used #[no_mangle] pub unsafe extern "C" fn AMmapGet( doc: *const AMdoc, obj_id: *const AMobjId, key: *const c_char, + heads: *const AMchangeHashes, ) -> *mut AMresult { - let doc = to_doc_const!(doc); - to_result(doc.get(to_obj_id!(obj_id), to_str(key))) + let doc = to_doc!(doc); + let obj_id = to_obj_id!(obj_id); + match heads.as_ref() { + None => to_result(doc.get(obj_id, to_str(key))), + Some(heads) => to_result(doc.get_at(obj_id, to_str(key), heads.as_ref())), + } } /// \memberof AMdoc @@ -69,15 +79,14 @@ pub unsafe extern "C" fn AMmapGet( /// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \param[in] value A 64-bit signed integer. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \pre \p key must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \pre \p doc` != NULL`. +/// \pre \p key` != NULL`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or NULL /// key must be a c string of the map key to be used #[no_mangle] pub unsafe extern "C" fn AMmapIncrement( @@ -86,7 +95,7 @@ pub unsafe extern "C" fn AMmapIncrement( key: *const c_char, value: i64, ) -> *mut AMresult { - let doc = to_doc!(doc); + let doc = to_doc_mut!(doc); to_result(doc.increment(to_obj_id!(obj_id), to_str(key), value)) } @@ -98,15 +107,14 @@ pub unsafe extern "C" fn AMmapIncrement( /// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \param[in] value A boolean. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \pre \p key must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \pre \p doc` != NULL`. +/// \pre \p key` != NULL`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or NULL /// key must be a c string of the map key to be used #[no_mangle] pub unsafe extern "C" fn AMmapPutBool( @@ -115,7 +123,7 @@ pub unsafe extern "C" fn AMmapPutBool( key: *const c_char, value: bool, ) -> *mut AMresult { - let doc = to_doc!(doc); + let doc = to_doc_mut!(doc); to_result(doc.put(to_obj_id!(obj_id), to_str(key), value)) } @@ -128,17 +136,16 @@ pub unsafe extern "C" fn AMmapPutBool( /// \param[in] src A pointer to an array of bytes. /// \param[in] count The number of bytes to copy from \p src. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \pre \p key must be a valid address. -/// \pre \p src must be a valid address. -/// \pre `0 <=` \p count `<=` size of \p src. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \pre \p doc` != NULL`. +/// \pre \p key` != NULL`. +/// \pre \p src` != NULL`. +/// \pre `0 <=` \p count` <= `size of \p src. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or NULL /// key must be a c string of the map key to be used /// src must be a byte array of size `>= count` #[no_mangle] @@ -149,7 +156,7 @@ pub unsafe extern "C" fn AMmapPutBytes( src: *const u8, count: usize, ) -> *mut AMresult { - let doc = to_doc!(doc); + let doc = to_doc_mut!(doc); let mut vec = Vec::new(); vec.extend_from_slice(std::slice::from_raw_parts(src, count)); to_result(doc.put(to_obj_id!(obj_id), to_str(key), vec)) @@ -163,15 +170,14 @@ pub unsafe extern "C" fn AMmapPutBytes( /// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \param[in] value A 64-bit signed integer. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \pre \p key must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \pre \p doc` != NULL`. +/// \pre \p key` != NULL`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or NULL /// key must be a c string of the map key to be used #[no_mangle] pub unsafe extern "C" fn AMmapPutCounter( @@ -180,7 +186,7 @@ pub unsafe extern "C" fn AMmapPutCounter( key: *const c_char, value: i64, ) -> *mut AMresult { - let doc = to_doc!(doc); + let doc = to_doc_mut!(doc); to_result(doc.put( to_obj_id!(obj_id), to_str(key), @@ -195,15 +201,14 @@ pub unsafe extern "C" fn AMmapPutCounter( /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \pre \p key must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \pre \p doc` != NULL`. +/// \pre \p key` != NULL`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or NULL /// key must be a c string of the map key to be used #[no_mangle] pub unsafe extern "C" fn AMmapPutNull( @@ -211,7 +216,7 @@ pub unsafe extern "C" fn AMmapPutNull( obj_id: *const AMobjId, key: *const c_char, ) -> *mut AMresult { - let doc = to_doc!(doc); + let doc = to_doc_mut!(doc); to_result(doc.put(to_obj_id!(obj_id), to_str(key), ())) } @@ -222,16 +227,16 @@ pub unsafe extern "C" fn AMmapPutNull( /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \param[in] obj_type An `AMobjIdType` enum tag. -/// \return A pointer to an `AMresult` struct containing a pointer to an `AMobjId` struct. -/// \pre \p doc must be a valid address. -/// \pre \p key must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \return A pointer to an `AMresult` struct containing a pointer to an +/// `AMobjId` struct. +/// \pre \p doc` != NULL`. +/// \pre \p key` != NULL`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or NULL /// key must be a c string of the map key to be used #[no_mangle] pub unsafe extern "C" fn AMmapPutObject( @@ -240,7 +245,7 @@ pub unsafe extern "C" fn AMmapPutObject( key: *const c_char, obj_type: AMobjType, ) -> *mut AMresult { - let doc = to_doc!(doc); + let doc = to_doc_mut!(doc); to_result(doc.put_object(to_obj_id!(obj_id), to_str(key), obj_type.into())) } @@ -252,15 +257,14 @@ pub unsafe extern "C" fn AMmapPutObject( /// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \param[in] value A 64-bit float. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \pre \p key must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \pre \p doc` != NULL`. +/// \pre \p key` != NULL`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or NULL /// key must be a c string of the map key to be used #[no_mangle] pub unsafe extern "C" fn AMmapPutF64( @@ -269,7 +273,7 @@ pub unsafe extern "C" fn AMmapPutF64( key: *const c_char, value: f64, ) -> *mut AMresult { - let doc = to_doc!(doc); + let doc = to_doc_mut!(doc); to_result(doc.put(to_obj_id!(obj_id), to_str(key), value)) } @@ -281,15 +285,14 @@ pub unsafe extern "C" fn AMmapPutF64( /// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \param[in] value A 64-bit signed integer. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \pre \p key must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \pre \p doc` != NULL`. +/// \pre \p key` != NULL`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or NULL /// key must be a c string of the map key to be used #[no_mangle] pub unsafe extern "C" fn AMmapPutInt( @@ -298,7 +301,7 @@ pub unsafe extern "C" fn AMmapPutInt( key: *const c_char, value: i64, ) -> *mut AMresult { - let doc = to_doc!(doc); + let doc = to_doc_mut!(doc); to_result(doc.put(to_obj_id!(obj_id), to_str(key), value)) } @@ -310,16 +313,15 @@ pub unsafe extern "C" fn AMmapPutInt( /// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \param[in] value A UTF-8 string. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \pre \p key must be a valid address. -/// \pre \p value must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \pre \p doc` != NULL`. +/// \pre \p key` != NULL`. +/// \pre \p value` != NULL`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or NULL /// key must be a c string of the map key to be used /// value must be a null-terminated array of `c_char` #[no_mangle] @@ -329,7 +331,7 @@ pub unsafe extern "C" fn AMmapPutStr( key: *const c_char, value: *const c_char, ) -> *mut AMresult { - let doc = to_doc!(doc); + let doc = to_doc_mut!(doc); to_result(doc.put(to_obj_id!(obj_id), to_str(key), to_str(value))) } @@ -341,15 +343,14 @@ pub unsafe extern "C" fn AMmapPutStr( /// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \param[in] value A 64-bit signed integer. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \pre \p key must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \pre \p doc` != NULL`. +/// \pre \p key` != NULL`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or NULL /// key must be a c string of the map key to be used #[no_mangle] pub unsafe extern "C" fn AMmapPutTimestamp( @@ -358,7 +359,7 @@ pub unsafe extern "C" fn AMmapPutTimestamp( key: *const c_char, value: i64, ) -> *mut AMresult { - let doc = to_doc!(doc); + let doc = to_doc_mut!(doc); to_result(doc.put( to_obj_id!(obj_id), to_str(key), @@ -374,15 +375,14 @@ pub unsafe extern "C" fn AMmapPutTimestamp( /// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \param[in] value A 64-bit unsigned integer. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc must be a valid address. -/// \pre \p key must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \pre \p doc` != NULL`. +/// \pre \p key` != NULL`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety -/// doc must be a pointer to a valid AMdoc -/// obj_id must be a pointer to a valid AMobjId or NULL +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or NULL /// key must be a c string of the map key to be used #[no_mangle] pub unsafe extern "C" fn AMmapPutUint( @@ -391,6 +391,77 @@ pub unsafe extern "C" fn AMmapPutUint( key: *const c_char, value: u64, ) -> *mut AMresult { - let doc = to_doc!(doc); + let doc = to_doc_mut!(doc); to_result(doc.put(to_obj_id!(obj_id), to_str(key), value)) } + +/// \memberof AMdoc +/// \brief Gets the current or historical keys and values of the map object +/// within the given range. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \param[in] begin The first key in a range of keys or `NULL`. +/// \param[in] end One past the last key in a range of keys or `NULL`. +/// \param[in] heads A pointer to an `AMchangeHashes` struct for historical +/// keys and values or `NULL` for current keys and values. +/// \return A pointer to an `AMresult` struct containing an `AMmapItems` +/// struct. +/// \pre \p doc` != NULL`. +/// \pre \p begin` <= `\p end if \p end` != NULL`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. +/// \internal +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or NULL +/// heads must be a valid pointer to an AMchangeHashes or NULL +#[no_mangle] +pub unsafe extern "C" fn AMmapRange( + doc: *const AMdoc, + obj_id: *const AMobjId, + begin: *const c_char, + end: *const c_char, + heads: *const AMchangeHashes, +) -> *mut AMresult { + let doc = to_doc!(doc); + let obj_id = to_obj_id!(obj_id); + match (begin.as_ref(), end.as_ref()) { + (Some(_), Some(_)) => { + let (begin, end) = (to_str(begin), to_str(end)); + if begin > end { + return AMresult::err(&format!("Invalid range [{}-{})", begin, end)).into(); + }; + let bounds = begin..end; + if let Some(heads) = heads.as_ref() { + to_result(doc.map_range_at(obj_id, bounds, heads.as_ref())) + } else { + to_result(doc.map_range(obj_id, bounds)) + } + } + (Some(_), None) => { + let bounds = to_str(begin)..; + if let Some(heads) = heads.as_ref() { + to_result(doc.map_range_at(obj_id, bounds, heads.as_ref())) + } else { + to_result(doc.map_range(obj_id, bounds)) + } + } + (None, Some(_)) => { + let bounds = ..to_str(end); + if let Some(heads) = heads.as_ref() { + to_result(doc.map_range_at(obj_id, bounds, heads.as_ref())) + } else { + to_result(doc.map_range(obj_id, bounds)) + } + } + (None, None) => { + let bounds = ..; + if let Some(heads) = heads.as_ref() { + to_result(doc.map_range_at(obj_id, bounds, heads.as_ref())) + } else { + to_result(doc.map_range(obj_id, bounds)) + } + } + } +} diff --git a/automerge-c/src/doc/map/item.rs b/automerge-c/src/doc/map/item.rs new file mode 100644 index 00000000..2b7d877d --- /dev/null +++ b/automerge-c/src/doc/map/item.rs @@ -0,0 +1,100 @@ +use automerge as am; +use std::cell::RefCell; +use std::ffi::CString; +use std::os::raw::c_char; + +use crate::obj::AMobjId; +use crate::result::AMvalue; + +/// \enum AMmapItem +/// \brief An item in a map object. +#[repr(C)] +pub struct AMmapItem { + /// The key of an item in a map object. + key: CString, + /// The object identifier of an item in a map object. + obj_id: AMobjId, + /// The value of an item in a map object. + value: (am::Value<'static>, RefCell>), +} + +impl AMmapItem { + pub fn new(key: &'static str, value: am::Value<'static>, obj_id: am::ObjId) -> Self { + Self { + key: CString::new(key).unwrap(), + obj_id: AMobjId::new(obj_id), + value: (value, RefCell::>::default()), + } + } +} + +impl PartialEq for AMmapItem { + fn eq(&self, other: &Self) -> bool { + self.key == other.key && self.obj_id == other.obj_id && self.value.0 == other.value.0 + } +} + +/* +impl From<&AMmapItem> for (String, am::Value<'static>, am::ObjId) { + fn from(map_item: &AMmapItem) -> Self { + (map_item.key.into_string().unwrap(), map_item.value.0.clone(), map_item.obj_id.as_ref().clone()) + } +} +*/ + +/// \memberof AMmapItem +/// \brief Gets the key of an item in a map object. +/// +/// \param[in] map_item A pointer to an `AMmapItem` struct. +/// \return A 64-bit unsigned integer. +/// \pre \p map_item` != NULL`. +/// \internal +/// +/// # Safety +/// map_item must be a valid pointer to an AMmapItem +#[no_mangle] +pub unsafe extern "C" fn AMmapItemKey(map_item: *const AMmapItem) -> *const c_char { + if let Some(map_item) = map_item.as_ref() { + map_item.key.as_ptr() + } else { + std::ptr::null() + } +} + +/// \memberof AMmapItem +/// \brief Gets the object identifier of an item in a map object. +/// +/// \param[in] map_item A pointer to an `AMmapItem` struct. +/// \return A pointer to an `AMobjId` struct. +/// \pre \p map_item` != NULL`. +/// \internal +/// +/// # Safety +/// map_item must be a valid pointer to an AMmapItem +#[no_mangle] +pub unsafe extern "C" fn AMmapItemObjId(map_item: *const AMmapItem) -> *const AMobjId { + if let Some(map_item) = map_item.as_ref() { + &map_item.obj_id + } else { + std::ptr::null() + } +} + +/// \memberof AMmapItem +/// \brief Gets the value of an item in a map object. +/// +/// \param[in] map_item A pointer to an `AMmapItem` struct. +/// \return An `AMvalue` struct. +/// \pre \p map_item` != NULL`. +/// \internal +/// +/// # Safety +/// map_item must be a valid pointer to an AMmapItem +#[no_mangle] +pub unsafe extern "C" fn AMmapItemValue<'a>(map_item: *const AMmapItem) -> AMvalue<'a> { + if let Some(map_item) = map_item.as_ref() { + (&map_item.value.0, &map_item.value.1).into() + } else { + AMvalue::Void + } +} diff --git a/automerge-c/src/doc/map/items.rs b/automerge-c/src/doc/map/items.rs new file mode 100644 index 00000000..c1ed9999 --- /dev/null +++ b/automerge-c/src/doc/map/items.rs @@ -0,0 +1,339 @@ +use std::ffi::c_void; +use std::mem::size_of; + +use crate::doc::map::item::AMmapItem; + +#[repr(C)] +struct Detail { + len: usize, + offset: isize, + ptr: *const c_void, +} + +/// \note cbindgen won't propagate the value of a `std::mem::size_of()` call +/// (https://github.com/eqrion/cbindgen/issues/252) but it will +/// propagate the name of a constant initialized from it so if the +/// constant's name is a symbolic representation of the value it can be +/// converted into a number by post-processing the header it generated. +pub const USIZE_USIZE_USIZE_: usize = size_of::(); + +impl Detail { + fn new(map_items: &[AMmapItem], offset: isize) -> Self { + Self { + len: map_items.len(), + offset, + ptr: map_items.as_ptr() as *const c_void, + } + } + + pub fn advance(&mut self, n: isize) { + if n == 0 { + return; + } + let len = self.len as isize; + self.offset = if self.offset < 0 { + // It's reversed. + let unclipped = self.offset.checked_sub(n).unwrap_or(isize::MIN); + if unclipped >= 0 { + // Clip it to the forward stop. + len + } else { + std::cmp::min(std::cmp::max(-(len + 1), unclipped), -1) + } + } else { + let unclipped = self.offset.checked_add(n).unwrap_or(isize::MAX); + if unclipped < 0 { + // Clip it to the reverse stop. + -(len + 1) + } else { + std::cmp::max(0, std::cmp::min(unclipped, len)) + } + } + } + + pub fn get_index(&self) -> usize { + (self.offset + + if self.offset < 0 { + self.len as isize + } else { + 0 + }) as usize + } + + pub fn next(&mut self, n: isize) -> Option<&AMmapItem> { + if self.is_stopped() { + return None; + } + let slice: &[AMmapItem] = + unsafe { std::slice::from_raw_parts(self.ptr as *const AMmapItem, self.len) }; + let value = &slice[self.get_index()]; + self.advance(n); + Some(value) + } + + pub fn is_stopped(&self) -> bool { + let len = self.len as isize; + self.offset < -len || self.offset == len + } + + pub fn prev(&mut self, n: isize) -> Option<&AMmapItem> { + self.advance(-n); + if self.is_stopped() { + return None; + } + let slice: &[AMmapItem] = + unsafe { std::slice::from_raw_parts(self.ptr as *const AMmapItem, self.len) }; + Some(&slice[self.get_index()]) + } + + pub fn reversed(&self) -> Self { + Self { + len: self.len, + offset: -(self.offset + 1), + ptr: self.ptr, + } + } + + pub fn rewound(&self) -> Self { + Self { + len: self.len, + offset: if self.offset < 0 { -1 } else { 0 }, + ptr: self.ptr, + } + } +} + +impl From for [u8; USIZE_USIZE_USIZE_] { + fn from(detail: Detail) -> Self { + unsafe { + std::slice::from_raw_parts((&detail as *const Detail) as *const u8, USIZE_USIZE_USIZE_) + .try_into() + .unwrap() + } + } +} + +/// \struct AMmapItems +/// \brief A random-access iterator over a sequence of map object items. +#[repr(C)] +#[derive(PartialEq)] +pub struct AMmapItems { + /// An implementation detail that is intentionally opaque. + /// \warning Modifying \p detail will cause undefined behavior. + /// \note The actual size of \p detail will vary by platform, this is just + /// the one for the platform this documentation was built on. + detail: [u8; USIZE_USIZE_USIZE_], +} + +impl AMmapItems { + pub fn new(map_items: &[AMmapItem]) -> Self { + Self { + detail: Detail::new(map_items, 0).into(), + } + } + + pub fn advance(&mut self, n: isize) { + let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; + detail.advance(n); + } + + pub fn len(&self) -> usize { + let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; + detail.len + } + + pub fn next(&mut self, n: isize) -> Option<&AMmapItem> { + let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; + detail.next(n) + } + + pub fn prev(&mut self, n: isize) -> Option<&AMmapItem> { + let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; + detail.prev(n) + } + + pub fn reversed(&self) -> Self { + let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; + Self { + detail: detail.reversed().into(), + } + } + + pub fn rewound(&self) -> Self { + let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; + Self { + detail: detail.rewound().into(), + } + } +} + +impl AsRef<[AMmapItem]> for AMmapItems { + fn as_ref(&self) -> &[AMmapItem] { + let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; + unsafe { std::slice::from_raw_parts(detail.ptr as *const AMmapItem, detail.len) } + } +} + +impl Default for AMmapItems { + fn default() -> Self { + Self { + detail: [0; USIZE_USIZE_USIZE_], + } + } +} + +/// \memberof AMmapItems +/// \brief Advances an iterator over a sequence of map object items by at most +/// \p |n| positions where the sign of \p n is relative to the +/// iterator's direction. +/// +/// \param[in,out] map_items A pointer to an `AMmapItems` struct. +/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum +/// number of positions to advance. +/// \pre \p map_items` != NULL`. +/// \internal +/// +/// #Safety +/// map_items must be a valid pointer to an AMmapItems +#[no_mangle] +pub unsafe extern "C" fn AMmapItemsAdvance(map_items: *mut AMmapItems, n: isize) { + if let Some(map_items) = map_items.as_mut() { + map_items.advance(n); + }; +} + +/// \memberof AMmapItems +/// \brief Tests the equality of two sequences of map object items underlying +/// a pair of iterators. +/// +/// \param[in] map_items1 A pointer to an `AMmapItems` struct. +/// \param[in] map_items2 A pointer to an `AMmapItems` struct. +/// \return `true` if \p map_items1` == `\p map_items2 and `false` otherwise. +/// \pre \p map_items1` != NULL`. +/// \pre \p map_items2` != NULL`. +/// \internal +/// +/// #Safety +/// map_items1 must be a valid pointer to an AMmapItems +/// map_items2 must be a valid pointer to an AMmapItems +#[no_mangle] +pub unsafe extern "C" fn AMmapItemsEqual( + map_items1: *const AMmapItems, + map_items2: *const AMmapItems, +) -> bool { + match (map_items1.as_ref(), map_items2.as_ref()) { + (Some(map_items1), Some(map_items2)) => map_items1.as_ref() == map_items2.as_ref(), + (None, Some(_)) | (Some(_), None) | (None, None) => false, + } +} + +/// \memberof AMmapItems +/// \brief Gets the map object item at the current position of an iterator +/// over a sequence of map object items and then advances it by at most +/// \p |n| positions where the sign of \p n is relative to the +/// iterator's direction. +/// +/// \param[in,out] map_items A pointer to an `AMmapItems` struct. +/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum +/// number of positions to advance. +/// \return A pointer to an `AMmapItem` struct that's `NULL` when \p map_items +/// was previously advanced past its forward/reverse limit. +/// \pre \p map_items` != NULL`. +/// \internal +/// +/// #Safety +/// map_items must be a valid pointer to an AMmapItems +#[no_mangle] +pub unsafe extern "C" fn AMmapItemsNext(map_items: *mut AMmapItems, n: isize) -> *const AMmapItem { + if let Some(map_items) = map_items.as_mut() { + if let Some(map_item) = map_items.next(n) { + return map_item; + } + } + std::ptr::null() +} + +/// \memberof AMmapItems +/// \brief Advances an iterator over a sequence of map object items by at most +/// \p |n| positions where the sign of \p n is relative to the +/// iterator's direction and then gets the map object item at its new +/// position. +/// +/// \param[in,out] map_items A pointer to an `AMmapItems` struct. +/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum +/// number of positions to advance. +/// \return A pointer to an `AMmapItem` struct that's `NULL` when \p map_items +/// is presently advanced past its forward/reverse limit. +/// \pre \p map_items` != NULL`. +/// \internal +/// +/// #Safety +/// map_items must be a valid pointer to an AMmapItems +#[no_mangle] +pub unsafe extern "C" fn AMmapItemsPrev(map_items: *mut AMmapItems, n: isize) -> *const AMmapItem { + if let Some(map_items) = map_items.as_mut() { + if let Some(map_item) = map_items.prev(n) { + return map_item; + } + } + std::ptr::null() +} + +/// \memberof AMmapItems +/// \brief Gets the size of the sequence of map object items underlying an +/// iterator. +/// +/// \param[in] map_items A pointer to an `AMmapItems` struct. +/// \return The count of values in \p map_items. +/// \pre \p map_items` != NULL`. +/// \internal +/// +/// #Safety +/// map_items must be a valid pointer to an AMmapItems +#[no_mangle] +pub unsafe extern "C" fn AMmapItemsSize(map_items: *const AMmapItems) -> usize { + if let Some(map_items) = map_items.as_ref() { + map_items.len() + } else { + 0 + } +} + +/// \memberof AMmapItems +/// \brief Creates an iterator over the same sequence of map object items as +/// the given one but with the opposite position and direction. +/// +/// \param[in] map_items A pointer to an `AMmapItems` struct. +/// \return An `AMmapItems` struct +/// \pre \p map_items` != NULL`. +/// \internal +/// +/// #Safety +/// map_items must be a valid pointer to an AMmapItems +#[no_mangle] +pub unsafe extern "C" fn AMmapItemsReversed(map_items: *const AMmapItems) -> AMmapItems { + if let Some(map_items) = map_items.as_ref() { + map_items.reversed() + } else { + AMmapItems::default() + } +} + +/// \memberof AMmapItems +/// \brief Creates an iterator at the starting position over the same sequence of map object items as the given one. +/// +/// \param[in] map_items A pointer to an `AMmapItems` struct. +/// \return An `AMmapItems` struct +/// \pre \p map_items` != NULL`. +/// \internal +/// +/// #Safety +/// map_items must be a valid pointer to an AMmapItems +#[no_mangle] +pub unsafe extern "C" fn AMmapItemsRewound(map_items: *const AMmapItems) -> AMmapItems { + if let Some(map_items) = map_items.as_ref() { + map_items.rewound() + } else { + AMmapItems::default() + } +} From a22bcb916ba75415be64721af90411140c7f95f2 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 25 Jul 2022 00:50:40 -0700 Subject: [PATCH 508/730] Promoted `ResultStack`/`StackNode` from the quickstart example up to the library as `AMresultStack` so that it can appear in the README.md and be used to simplify the unit tests. Promoted `free_results()` to `AMfreeStack()` and `push()` to `AMpush()`. Added `AMpop()` because no stack should be without one. --- automerge-c/cbindgen.toml | 4 +- automerge-c/examples/quickstart.c | 205 ++++++++++++++---------------- automerge-c/src/result_stack.rs | 138 ++++++++++++++++++++ 3 files changed, 233 insertions(+), 114 deletions(-) create mode 100644 automerge-c/src/result_stack.rs diff --git a/automerge-c/cbindgen.toml b/automerge-c/cbindgen.toml index 20b7a41b..0b1b168d 100644 --- a/automerge-c/cbindgen.toml +++ b/automerge-c/cbindgen.toml @@ -19,7 +19,7 @@ header = """ * All constants, functions and types in the Automerge library's C API. */ """ -include_guard = "automerge_h" +include_guard = "AUTOMERGE_H" includes = [] language = "C" line_length = 140 @@ -36,4 +36,4 @@ prefix_with_name = true rename_variants = "ScreamingSnakeCase" [export] -item_types = ["enums", "structs", "opaque", "constants", "functions"] +item_types = ["constants", "enums", "functions", "opaque", "structs", "typedefs"] diff --git a/automerge-c/examples/quickstart.c b/automerge-c/examples/quickstart.c index 24400079..c4505024 100644 --- a/automerge-c/examples/quickstart.c +++ b/automerge-c/examples/quickstart.c @@ -1,157 +1,138 @@ #include #include +#include #include -typedef struct StackNode ResultStack; - -AMvalue push(ResultStack**, AMresult*, AMvalueVariant const); - -size_t free_results(ResultStack**); +static void abort_cb(AMresultStack**, uint8_t); /* * Based on https://automerge.github.io/docs/quickstart */ int main(int argc, char** argv) { - ResultStack* results = NULL; - AMdoc* const doc1 = push(&results, AMcreate(), AM_VALUE_DOC).doc; + AMresultStack* results = NULL; + AMdoc* const doc1 = AMpush(&results, AMcreate(), AM_VALUE_DOC, abort_cb).doc; AMobjId const* const - cards = push(&results, AMmapPutObject(doc1, AM_ROOT, "cards", AM_OBJ_TYPE_LIST), AM_VALUE_OBJ_ID).obj_id; + cards = AMpush(&results, AMmapPutObject(doc1, AM_ROOT, "cards", AM_OBJ_TYPE_LIST), AM_VALUE_OBJ_ID, abort_cb).obj_id; AMobjId const* const - card1 = push(&results, AMlistPutObject(doc1, cards, 0, true, AM_OBJ_TYPE_MAP), AM_VALUE_OBJ_ID).obj_id; - push(&results, AMmapPutStr(doc1, card1, "title", "Rewrite everything in Clojure"), AM_VALUE_VOID); - push(&results, AMmapPutBool(doc1, card1, "done", false), AM_VALUE_VOID); + card1 = AMpush(&results, AMlistPutObject(doc1, cards, 0, true, AM_OBJ_TYPE_MAP), AM_VALUE_OBJ_ID, abort_cb).obj_id; + AMpush(&results, AMmapPutStr(doc1, card1, "title", "Rewrite everything in Clojure"), AM_VALUE_VOID, abort_cb); + AMpush(&results, AMmapPutBool(doc1, card1, "done", false), AM_VALUE_VOID, abort_cb); AMobjId const* const - card2 = push(&results, AMlistPutObject(doc1, cards, 0, true, AM_OBJ_TYPE_MAP), AM_VALUE_OBJ_ID).obj_id; - push(&results, AMmapPutStr(doc1, card2, "title", "Rewrite everything in Haskell"), AM_VALUE_VOID); - push(&results, AMmapPutBool(doc1, card2, "done", false), AM_VALUE_VOID); - push(&results, AMcommit(doc1, "Add card", NULL), AM_VALUE_CHANGE_HASHES); + card2 = AMpush(&results, AMlistPutObject(doc1, cards, 0, true, AM_OBJ_TYPE_MAP), AM_VALUE_OBJ_ID, abort_cb).obj_id; + AMpush(&results, AMmapPutStr(doc1, card2, "title", "Rewrite everything in Haskell"), AM_VALUE_VOID, abort_cb); + AMpush(&results, AMmapPutBool(doc1, card2, "done", false), AM_VALUE_VOID, abort_cb); + AMpush(&results, AMcommit(doc1, "Add card", NULL), AM_VALUE_CHANGE_HASHES, abort_cb); - AMdoc* doc2 = push(&results, AMcreate(), AM_VALUE_DOC).doc; - push(&results, AMmerge(doc2, doc1), AM_VALUE_CHANGE_HASHES); + AMdoc* doc2 = AMpush(&results, AMcreate(), AM_VALUE_DOC, abort_cb).doc; + AMpush(&results, AMmerge(doc2, doc1), AM_VALUE_CHANGE_HASHES, abort_cb); - AMbyteSpan const binary = push(&results, AMsave(doc1), AM_VALUE_BYTES).bytes; - doc2 = push(&results, AMload(binary.src, binary.count), AM_VALUE_DOC).doc; + AMbyteSpan const binary = AMpush(&results, AMsave(doc1), AM_VALUE_BYTES, abort_cb).bytes; + doc2 = AMpush(&results, AMload(binary.src, binary.count), AM_VALUE_DOC, abort_cb).doc; - push(&results, AMmapPutBool(doc1, card1, "done", true), AM_VALUE_VOID); - push(&results, AMcommit(doc1, "Mark card as done", NULL), AM_VALUE_CHANGE_HASHES); + AMpush(&results, AMmapPutBool(doc1, card1, "done", true), AM_VALUE_VOID, abort_cb); + AMpush(&results, AMcommit(doc1, "Mark card as done", NULL), AM_VALUE_CHANGE_HASHES, abort_cb); - push(&results, AMlistDelete(doc2, cards, 0), AM_VALUE_VOID); - push(&results, AMcommit(doc2, "Delete card", NULL), AM_VALUE_CHANGE_HASHES); + AMpush(&results, AMlistDelete(doc2, cards, 0), AM_VALUE_VOID, abort_cb); + AMpush(&results, AMcommit(doc2, "Delete card", NULL), AM_VALUE_CHANGE_HASHES, abort_cb); - push(&results, AMmerge(doc1, doc2), AM_VALUE_CHANGE_HASHES); + AMpush(&results, AMmerge(doc1, doc2), AM_VALUE_CHANGE_HASHES, abort_cb); - AMchanges changes = push(&results, AMgetChanges(doc1, NULL), AM_VALUE_CHANGES).changes; + AMchanges changes = AMpush(&results, AMgetChanges(doc1, NULL), AM_VALUE_CHANGES, abort_cb).changes; AMchange const* change = NULL; while ((change = AMchangesNext(&changes, 1)) != NULL) { AMbyteSpan const change_hash = AMchangeHash(change); AMchangeHashes const - heads = push(&results, AMchangeHashesInit(&change_hash, 1), AM_VALUE_CHANGE_HASHES).change_hashes; + heads = AMpush(&results, AMchangeHashesInit(&change_hash, 1), AM_VALUE_CHANGE_HASHES, abort_cb).change_hashes; printf("%s %ld\n", AMchangeMessage(change), AMobjSize(doc1, cards, &heads)); } - free_results(&results); + AMfreeStack(&results); } -/** - * \brief A node in a singly-linked list of `AMresult` struct pointers. - */ -struct StackNode { - AMresult* result; - struct StackNode* next; -}; +static char const* discriminant_suffix(AMvalueVariant const); /** - * \brief Pushes the given result onto the given stack and then either gets the - * value matching the given discriminant from that result or, failing - * that, prints an error message to `stderr`, frees all results in that - * stack and aborts. + * \brief Prints an error message to `stderr`, deallocates all results in the + * given stack and exits. * - * \param[in,out] stack A pointer to a pointer to a `ResultStack` struct. -.* \param[in] result A pointer to an `AMresult` struct. + * \param[in,out] stack A pointer to a pointer to an `AMresultStack` struct. * \param[in] discriminant An `AMvalueVariant` enum tag. - * \return An `AMvalue` struct. - * \pre \p stack must be a valid address. - * \pre \p result must be a valid address. - * \post \p stack `== NULL`. + * \pre \p stack` != NULL`. + * \post `*stack == NULL`. */ -AMvalue push(ResultStack** stack, AMresult* result, AMvalueVariant const discriminant) { - static char prelude[64]; +static void abort_cb(AMresultStack** stack, uint8_t discriminant) { + static char buffer[512] = {0}; - if (stack == NULL) { - fprintf(stderr, "Null `ResultStack` struct pointer pointer; previous " - "`AMresult` structs may have leaked!"); - AMfree(result); - exit(EXIT_FAILURE); + char const* suffix = NULL; + if (!stack) { + suffix = "Stack*"; } - if (result == NULL) { - fprintf(stderr, "Null `AMresult` struct pointer."); - free_results(stack); - exit(EXIT_FAILURE); + else if (!*stack) { + suffix = "Stack"; } - /* Push the result onto the stack. */ - struct StackNode* top = malloc(sizeof(struct StackNode)); - top->result = result; - top->next = *stack; - *stack = top; - AMstatus const status = AMresultStatus(result); - if (status != AM_STATUS_OK) { - switch (status) { - case AM_STATUS_ERROR: sprintf(prelude, "Error"); break; - case AM_STATUS_INVALID_RESULT: sprintf(prelude, "Invalid result"); break; - default: sprintf(prelude, "Unknown `AMstatus` tag %d", status); - } - fprintf(stderr, "%s; %s.", prelude, AMerrorMessage(result)); - free_results(stack); - exit(EXIT_FAILURE); + else if (!(*stack)->result) { + suffix = ""; } - AMvalue const value = AMresultValue(result); - if (value.tag != discriminant) { - char const* label = NULL; - switch (value.tag) { - case AM_VALUE_ACTOR_ID: label = "ACTOR_ID"; break; - case AM_VALUE_BOOLEAN: label = "BOOLEAN"; break; - case AM_VALUE_BYTES: label = "BYTES"; break; - case AM_VALUE_CHANGE_HASHES: label = "CHANGE_HASHES"; break; - case AM_VALUE_CHANGES: label = "CHANGES"; break; - case AM_VALUE_COUNTER: label = "COUNTER"; break; - case AM_VALUE_DOC: label = "DOC"; break; - case AM_VALUE_F64: label = "F64"; break; - case AM_VALUE_INT: label = "INT"; break; - case AM_VALUE_NULL: label = "NULL"; break; - case AM_VALUE_OBJ_ID: label = "OBJ_ID"; break; - case AM_VALUE_STR: label = "STR"; break; - case AM_VALUE_STRINGS: label = "STRINGS"; break; - case AM_VALUE_TIMESTAMP: label = "TIMESTAMP"; break; - case AM_VALUE_UINT: label = "UINT"; break; - case AM_VALUE_SYNC_MESSAGE: label = "SYNC_MESSAGE"; break; - case AM_VALUE_SYNC_STATE: label = "SYNC_STATE"; break; - case AM_VALUE_VOID: label = "VOID"; break; - default: label = "..."; - } - fprintf(stderr, "Unexpected `AMvalueVariant` tag `AM_VALUE_%s` (%d).", label, value.tag); - free_results(stack); + if (suffix) { + fprintf(stderr, "Null `AMresult%s*`.", suffix); + AMfreeStack(stack); exit(EXIT_FAILURE); + return; } - return value; + AMstatus const status = AMresultStatus((*stack)->result); + switch (status) { + case AM_STATUS_ERROR: strcpy(buffer, "Error"); break; + case AM_STATUS_INVALID_RESULT: strcpy(buffer, "Invalid result"); break; + case AM_STATUS_OK: break; + default: sprintf(buffer, "Unknown `AMstatus` tag %d", status); + } + if (buffer[0]) { + fprintf(stderr, "%s; %s.", buffer, AMerrorMessage((*stack)->result)); + AMfreeStack(stack); + exit(EXIT_FAILURE); + return; + } + AMvalue const value = AMresultValue((*stack)->result); + fprintf(stderr, "Unexpected tag `AM_VALUE_%s` (%d); expected `AM_VALUE_%s`.", + discriminant_suffix(value.tag), + value.tag, + discriminant_suffix(discriminant)); + AMfreeStack(stack); + exit(EXIT_FAILURE); } /** - * \brief Frees a stack of `AMresult` structs. + * \brief Gets the suffix for a discriminant's corresponding string + * representation. * - * \param[in,out] stack A pointer to a pointer to a `ResultStack` struct. - * \return The number of stack nodes freed. - * \pre \p stack must be a valid address. - * \post \p stack `== NULL`. + * \param[in] discriminant An `AMvalueVariant` enum tag. + * \return A UTF-8 string. */ -size_t free_results(ResultStack** stack) { - struct StackNode* prev = NULL; - size_t count = 0; - for (struct StackNode* node = *stack; node; node = node->next, ++count) { - free(prev); - AMfree(node->result); - prev = node; +static char const* discriminant_suffix(AMvalueVariant const discriminant) { + char const* suffix = NULL; + switch (discriminant) { + case AM_VALUE_ACTOR_ID: suffix = "ACTOR_ID"; break; + case AM_VALUE_BOOLEAN: suffix = "BOOLEAN"; break; + case AM_VALUE_BYTES: suffix = "BYTES"; break; + case AM_VALUE_CHANGE_HASHES: suffix = "CHANGE_HASHES"; break; + case AM_VALUE_CHANGES: suffix = "CHANGES"; break; + case AM_VALUE_COUNTER: suffix = "COUNTER"; break; + case AM_VALUE_DOC: suffix = "DOC"; break; + case AM_VALUE_F64: suffix = "F64"; break; + case AM_VALUE_INT: suffix = "INT"; break; + case AM_VALUE_LIST_ITEMS: suffix = "LIST_ITEMS"; break; + case AM_VALUE_MAP_ITEMS: suffix = "MAP_ITEMS"; break; + case AM_VALUE_NULL: suffix = "NULL"; break; + case AM_VALUE_OBJ_ID: suffix = "OBJ_ID"; break; + case AM_VALUE_OBJ_ITEMS: suffix = "OBJ_ITEMS"; break; + case AM_VALUE_STR: suffix = "STR"; break; + case AM_VALUE_STRS: suffix = "STRINGS"; break; + case AM_VALUE_SYNC_MESSAGE: suffix = "SYNC_MESSAGE"; break; + case AM_VALUE_SYNC_STATE: suffix = "SYNC_STATE"; break; + case AM_VALUE_TIMESTAMP: suffix = "TIMESTAMP"; break; + case AM_VALUE_UINT: suffix = "UINT"; break; + case AM_VALUE_VOID: suffix = "VOID"; break; + default: suffix = "..."; } - free(prev); - *stack = NULL; - return count; + return suffix; } diff --git a/automerge-c/src/result_stack.rs b/automerge-c/src/result_stack.rs new file mode 100644 index 00000000..32e23b4a --- /dev/null +++ b/automerge-c/src/result_stack.rs @@ -0,0 +1,138 @@ +use crate::result::{AMfree, AMresult, AMresultStatus, AMresultValue, AMstatus, AMvalue}; + +/// \struct AMresultStack +/// \brief A node in a singly-linked list of result pointers. +#[repr(C)] +pub struct AMresultStack { + /// A result to be deallocated. + pub result: *mut AMresult, + /// The next node in the singly-linked list or `NULL`. + pub next: *mut AMresultStack, +} + +impl AMresultStack { + pub fn new(result: *mut AMresult, next: *mut AMresultStack) -> Self { + Self { result, next } + } +} + +/// \memberof AMresultStack +/// \brief Deallocates the storage for a stack of results. +/// +/// \param[in,out] stack A pointer to a pointer to an `AMresultStack` struct. +/// \return The number of `AMresult` structs freed. +/// \pre \p stack` != NULL`. +/// \post `*stack == NULL`. +/// \internal +/// +/// # Safety +/// stack must be a valid AMresultStack pointer pointer +#[no_mangle] +pub unsafe extern "C" fn AMfreeStack(stack: *mut *mut AMresultStack) -> usize { + if stack.is_null() { + return 0; + } + let mut count: usize = 0; + while !(*stack).is_null() { + AMfree(AMpop(stack)); + count += 1; + } + count +} + +/// \memberof AMresultStack +/// \brief Gets the topmost result from the stack after removing it. +/// +/// \param[in,out] stack A pointer to a pointer to an `AMresultStack` struct. +/// \return A pointer to an `AMresult` struct or `NULL`. +/// \pre \p stack` != NULL`. +/// \post `*stack == NULL`. +/// \internal +/// +/// # Safety +/// stack must be a valid AMresultStack pointer pointer +#[no_mangle] +pub unsafe extern "C" fn AMpop(stack: *mut *mut AMresultStack) -> *mut AMresult { + if stack.is_null() || (*stack).is_null() { + return std::ptr::null_mut(); + } + let top = Box::from_raw(*stack); + *stack = top.next; + let result = top.result; + drop(top); + result +} + +/// \memberof AMresultStack +/// \brief The prototype of a function to be called when a value matching the +/// given discriminant cannot be extracted from the result at the top of +/// the given stack. +pub type AMpushCallback = + Option ()>; + +/// \memberof AMresultStack +/// \brief Pushes the given result onto the given stack and then either extracts +/// a value matching the given discriminant from that result or, +/// failing that, calls the given function and gets a void value instead. +/// +/// \param[in,out] stack A pointer to a pointer to an `AMresultStack` struct. +/// \param[in] result A pointer to an `AMresult` struct. +/// \param[in] discriminant An `AMvalue` variant's corresponding enum tag. +/// \param[in] callback A pointer to a function with the same signature as +/// `AMpushCallback()` or `NULL`. +/// \return An `AMvalue` struct. +/// \pre \p stack` != NULL`. +/// \pre \p result` != NULL`. +/// \warning If \p stack` == NULL` then \p result is deallocated in order to +/// prevent a memory leak. +/// \internal +/// +/// # Safety +/// stack must be a valid AMresultStack pointer pointer +/// result must be a valid AMresult pointer +#[no_mangle] +pub unsafe extern "C" fn AMpush<'a>( + stack: *mut *mut AMresultStack, + result: *mut AMresult, + discriminant: u8, + callback: AMpushCallback, +) -> AMvalue<'a> { + if stack.is_null() { + // There's no stack to push the result onto so it has to be freed in + // order to prevent a memory leak. + AMfree(result); + if let Some(callback) = callback { + callback(stack, discriminant); + } + return AMvalue::Void; + } else if result.is_null() { + if let Some(callback) = callback { + callback(stack, discriminant); + } + return AMvalue::Void; + } + // Always push the result onto the stack, even if it's wrong, so that the + // given callback can retrieve it. + let node = Box::new(AMresultStack::new(result, *stack)); + let top = Box::into_raw(node); + *stack = top; + // Test that the result contains a value. + match AMresultStatus(result) { + AMstatus::Ok => {} + _ => { + if let Some(callback) = callback { + callback(stack, discriminant); + } + return AMvalue::Void; + } + } + // Test that the result's value matches the given discriminant. + let value = AMresultValue(result); + if discriminant != u8::from(&value) { + if let Some(callback) = callback { + callback(stack, discriminant); + } + return AMvalue::Void; + } + value +} From 877dbbfce86d21a87b537decea982e9bb28463e0 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 25 Jul 2022 01:00:50 -0700 Subject: [PATCH 509/730] Simplify the unit tests with `AMresultStack` et. al. --- automerge-c/test/CMakeLists.txt | 1 + automerge-c/test/actor_id_tests.c | 8 +- automerge-c/test/doc_tests.c | 239 +++--- automerge-c/test/group_state.c | 18 +- automerge-c/test/group_state.h | 8 +- automerge-c/test/list_tests.c | 374 +++++---- automerge-c/test/map_tests.c | 1235 +++++++++++++++++++++++++---- automerge-c/test/stack_utils.c | 30 + automerge-c/test/stack_utils.h | 38 + automerge-c/test/sync_tests.c | 855 +++++++++++--------- 10 files changed, 1974 insertions(+), 832 deletions(-) create mode 100644 automerge-c/test/stack_utils.c create mode 100644 automerge-c/test/stack_utils.h diff --git a/automerge-c/test/CMakeLists.txt b/automerge-c/test/CMakeLists.txt index a72b78a1..6789b655 100644 --- a/automerge-c/test/CMakeLists.txt +++ b/automerge-c/test/CMakeLists.txt @@ -11,6 +11,7 @@ add_executable( macro_utils.c main.c map_tests.c + stack_utils.c str_utils.c sync_tests.c ) diff --git a/automerge-c/test/actor_id_tests.c b/automerge-c/test/actor_id_tests.c index 4a523aeb..ea627985 100644 --- a/automerge-c/test/actor_id_tests.c +++ b/automerge-c/test/actor_id_tests.c @@ -20,10 +20,10 @@ typedef struct { } GroupState; static int group_setup(void** state) { - GroupState* group_state = calloc(1, sizeof(GroupState)); + GroupState* group_state = test_calloc(1, sizeof(GroupState)); group_state->str = "000102030405060708090a0b0c0d0e0f"; group_state->count = strlen(group_state->str) / 2; - group_state->src = malloc(group_state->count); + group_state->src = test_malloc(group_state->count); hex_to_bytes(group_state->str, group_state->src, group_state->count); *state = group_state; return 0; @@ -31,8 +31,8 @@ static int group_setup(void** state) { static int group_teardown(void** state) { GroupState* group_state = *state; - free(group_state->src); - free(group_state); + test_free(group_state->src); + test_free(group_state); return 0; } diff --git a/automerge-c/test/doc_tests.c b/automerge-c/test/doc_tests.c index 996c98a8..f683d6d8 100644 --- a/automerge-c/test/doc_tests.c +++ b/automerge-c/test/doc_tests.c @@ -10,6 +10,7 @@ /* local */ #include "automerge.h" #include "group_state.h" +#include "stack_utils.h" #include "str_utils.h" typedef struct { @@ -20,11 +21,11 @@ typedef struct { } TestState; static int setup(void** state) { - TestState* test_state = calloc(1, sizeof(TestState)); + TestState* test_state = test_calloc(1, sizeof(TestState)); group_setup((void**)&test_state->group_state); test_state->actor_id_str = "000102030405060708090a0b0c0d0e0f"; test_state->actor_id_size = strlen(test_state->actor_id_str) / 2; - test_state->actor_id_bytes = malloc(test_state->actor_id_size); + test_state->actor_id_bytes = test_malloc(test_state->actor_id_size); hex_to_bytes(test_state->actor_id_str, test_state->actor_id_bytes, test_state->actor_id_size); *state = test_state; return 0; @@ -33,196 +34,158 @@ static int setup(void** state) { static int teardown(void** state) { TestState* test_state = *state; group_teardown((void**)&test_state->group_state); - free(test_state->actor_id_bytes); - free(test_state); + test_free(test_state->actor_id_bytes); + test_free(test_state); return 0; } static void test_AMkeys_empty() { - AMresult* const doc_result = AMcreate(); - AMresult* const strings_result = AMkeys(AMresultValue(doc_result).doc, AM_ROOT, NULL); - if (AMresultStatus(strings_result) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(strings_result)); - } - assert_int_equal(AMresultSize(strings_result), 0); - AMvalue value = AMresultValue(strings_result); - assert_int_equal(value.tag, AM_VALUE_STRINGS); - assert_int_equal(AMstringsSize(&value.strings), 0); - AMstrings forward = value.strings; - assert_null(AMstringsNext(&forward, 1)); - assert_null(AMstringsPrev(&forward, 1)); - AMstrings reverse = AMstringsReversed(&value.strings); - assert_null(AMstringsNext(&reverse, 1)); - assert_null(AMstringsPrev(&reverse, 1)); - AMfree(strings_result); - AMfree(doc_result); + AMresultStack* stack = NULL; + AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMstrs forward = AMpush(&stack, + AMkeys(doc, AM_ROOT, NULL), + AM_VALUE_STRS, + cmocka_cb).strs; + assert_int_equal(AMstrsSize(&forward), 0); + AMstrs reverse = AMstrsReversed(&forward); + assert_int_equal(AMstrsSize(&reverse), 0); + assert_null(AMstrsNext(&forward, 1)); + assert_null(AMstrsPrev(&forward, 1)); + assert_null(AMstrsNext(&reverse, 1)); + assert_null(AMstrsPrev(&reverse, 1)); + AMfreeStack(&stack); } static void test_AMkeys_list() { - AMresult* const doc_result = AMcreate(); - AMdoc* const doc = AMresultValue(doc_result).doc; + AMresultStack* stack = NULL; + AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; AMfree(AMlistPutInt(doc, AM_ROOT, 0, true, 1)); AMfree(AMlistPutInt(doc, AM_ROOT, 1, true, 2)); AMfree(AMlistPutInt(doc, AM_ROOT, 2, true, 3)); - AMresult* const strings_result = AMkeys(doc, AM_ROOT, NULL); - if (AMresultStatus(strings_result) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(strings_result)); - } - assert_int_equal(AMresultSize(strings_result), 3); - AMvalue value = AMresultValue(strings_result); - assert_int_equal(value.tag, AM_VALUE_STRINGS); - AMstrings forward = value.strings; - assert_int_equal(AMstringsSize(&forward), 3); + AMstrs forward = AMpush(&stack, + AMkeys(doc, AM_ROOT, NULL), + AM_VALUE_STRS, + cmocka_cb).strs; + assert_int_equal(AMstrsSize(&forward), 3); + AMstrs reverse = AMstrsReversed(&forward); + assert_int_equal(AMstrsSize(&reverse), 3); /* Forward iterator forward. */ - char const* str = AMstringsNext(&forward, 1); + char const* str = AMstrsNext(&forward, 1); assert_ptr_equal(strstr(str, "1@"), str); - str = AMstringsNext(&forward, 1); + str = AMstrsNext(&forward, 1); assert_ptr_equal(strstr(str, "2@"), str); - str = AMstringsNext(&forward, 1); + str = AMstrsNext(&forward, 1); assert_ptr_equal(strstr(str, "3@"), str); - assert_null(AMstringsNext(&forward, 1)); + assert_null(AMstrsNext(&forward, 1)); /* Forward iterator reverse. */ - str = AMstringsPrev(&forward, 1); + str = AMstrsPrev(&forward, 1); assert_ptr_equal(strstr(str, "3@"), str); - str = AMstringsPrev(&forward, 1); + str = AMstrsPrev(&forward, 1); assert_ptr_equal(strstr(str, "2@"), str); - str = AMstringsPrev(&forward, 1); + str = AMstrsPrev(&forward, 1); assert_ptr_equal(strstr(str, "1@"), str); - assert_null(AMstringsPrev(&forward, 1)); - AMstrings reverse = AMstringsReversed(&value.strings); - assert_int_equal(AMstringsSize(&reverse), 3); + assert_null(AMstrsPrev(&forward, 1)); /* Reverse iterator forward. */ - str = AMstringsNext(&reverse, 1); + str = AMstrsNext(&reverse, 1); assert_ptr_equal(strstr(str, "3@"), str); - str = AMstringsNext(&reverse, 1); + str = AMstrsNext(&reverse, 1); assert_ptr_equal(strstr(str, "2@"), str); - str = AMstringsNext(&reverse, 1); + str = AMstrsNext(&reverse, 1); assert_ptr_equal(strstr(str, "1@"), str); /* Reverse iterator reverse. */ - assert_null(AMstringsNext(&reverse, 1)); - str = AMstringsPrev(&reverse, 1); + assert_null(AMstrsNext(&reverse, 1)); + str = AMstrsPrev(&reverse, 1); assert_ptr_equal(strstr(str, "1@"), str); - str = AMstringsPrev(&reverse, 1); + str = AMstrsPrev(&reverse, 1); assert_ptr_equal(strstr(str, "2@"), str); - str = AMstringsPrev(&reverse, 1); + str = AMstrsPrev(&reverse, 1); assert_ptr_equal(strstr(str, "3@"), str); - assert_null(AMstringsPrev(&reverse, 1)); - AMfree(strings_result); - AMfree(doc_result); + assert_null(AMstrsPrev(&reverse, 1)); + AMfreeStack(&stack); } static void test_AMkeys_map() { - AMresult* const doc_result = AMcreate(); - AMdoc* const doc = AMresultValue(doc_result).doc; + AMresultStack* stack = NULL; + AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; AMfree(AMmapPutInt(doc, AM_ROOT, "one", 1)); AMfree(AMmapPutInt(doc, AM_ROOT, "two", 2)); AMfree(AMmapPutInt(doc, AM_ROOT, "three", 3)); - AMresult* const strings_result = AMkeys(doc, AM_ROOT, NULL); - if (AMresultStatus(strings_result) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(strings_result)); - } - assert_int_equal(AMresultSize(strings_result), 3); - AMvalue value = AMresultValue(strings_result); - assert_int_equal(value.tag, AM_VALUE_STRINGS); - AMstrings forward = value.strings; - assert_int_equal(AMstringsSize(&forward), 3); + AMstrs forward = AMpush(&stack, + AMkeys(doc, AM_ROOT, NULL), + AM_VALUE_STRS, + cmocka_cb).strs; + assert_int_equal(AMstrsSize(&forward), 3); + AMstrs reverse = AMstrsReversed(&forward); + assert_int_equal(AMstrsSize(&reverse), 3); /* Forward iterator forward. */ - assert_string_equal(AMstringsNext(&forward, 1), "one"); - assert_string_equal(AMstringsNext(&forward, 1), "three"); - assert_string_equal(AMstringsNext(&forward, 1), "two"); - assert_null(AMstringsNext(&forward, 1)); + assert_string_equal(AMstrsNext(&forward, 1), "one"); + assert_string_equal(AMstrsNext(&forward, 1), "three"); + assert_string_equal(AMstrsNext(&forward, 1), "two"); + assert_null(AMstrsNext(&forward, 1)); /* Forward iterator reverse. */ - assert_string_equal(AMstringsPrev(&forward, 1), "two"); - assert_string_equal(AMstringsPrev(&forward, 1), "three"); - assert_string_equal(AMstringsPrev(&forward, 1), "one"); - assert_null(AMstringsPrev(&forward, 1)); - AMstrings reverse = AMstringsReversed(&value.strings); - assert_int_equal(AMstringsSize(&reverse), 3); + assert_string_equal(AMstrsPrev(&forward, 1), "two"); + assert_string_equal(AMstrsPrev(&forward, 1), "three"); + assert_string_equal(AMstrsPrev(&forward, 1), "one"); + assert_null(AMstrsPrev(&forward, 1)); /* Reverse iterator forward. */ - assert_string_equal(AMstringsNext(&reverse, 1), "two"); - assert_string_equal(AMstringsNext(&reverse, 1), "three"); - assert_string_equal(AMstringsNext(&reverse, 1), "one"); - assert_null(AMstringsNext(&reverse, 1)); + assert_string_equal(AMstrsNext(&reverse, 1), "two"); + assert_string_equal(AMstrsNext(&reverse, 1), "three"); + assert_string_equal(AMstrsNext(&reverse, 1), "one"); + assert_null(AMstrsNext(&reverse, 1)); /* Reverse iterator reverse. */ - assert_string_equal(AMstringsPrev(&reverse, 1), "one"); - assert_string_equal(AMstringsPrev(&reverse, 1), "three"); - assert_string_equal(AMstringsPrev(&reverse, 1), "two"); - assert_null(AMstringsPrev(&reverse, 1)); - AMfree(strings_result); - AMfree(doc_result); + assert_string_equal(AMstrsPrev(&reverse, 1), "one"); + assert_string_equal(AMstrsPrev(&reverse, 1), "three"); + assert_string_equal(AMstrsPrev(&reverse, 1), "two"); + assert_null(AMstrsPrev(&reverse, 1)); + AMfreeStack(&stack); } static void test_AMputActor_bytes(void **state) { TestState* test_state = *state; - GroupState* group_state = test_state->group_state; - AMresult* actor_id_result = AMactorIdInitBytes(test_state->actor_id_bytes, - test_state->actor_id_size); - AMvalue value = AMresultValue(actor_id_result); - AMresult* result = AMsetActor(group_state->doc, value.actor_id); - AMfree(actor_id_result); - if (AMresultStatus(result) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(result)); - } - assert_int_equal(AMresultSize(result), 0); - value = AMresultValue(result); - assert_int_equal(value.tag, AM_VALUE_VOID); - AMfree(result); - result = AMgetActor(group_state->doc); - if (AMresultStatus(result) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(result)); - } - assert_int_equal(AMresultSize(result), 1); - value = AMresultValue(result); - assert_int_equal(value.tag, AM_VALUE_ACTOR_ID); - AMbyteSpan const bytes = AMactorIdBytes(value.actor_id); + AMactorId const* actor_id = AMpush(&test_state->group_state->stack, + AMactorIdInitBytes( + test_state->actor_id_bytes, + test_state->actor_id_size), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id; + AMfree(AMsetActor(test_state->group_state->doc, actor_id)); + actor_id = AMpush(&test_state->group_state->stack, + AMgetActor(test_state->group_state->doc), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id; + AMbyteSpan const bytes = AMactorIdBytes(actor_id); assert_int_equal(bytes.count, test_state->actor_id_size); assert_memory_equal(bytes.src, test_state->actor_id_bytes, bytes.count); - AMfree(result); } static void test_AMputActor_hex(void **state) { TestState* test_state = *state; - GroupState* group_state = test_state->group_state; - AMresult* actor_id_result = AMactorIdInitStr(test_state->actor_id_str); - AMvalue value = AMresultValue(actor_id_result); - AMresult* result = AMsetActor(group_state->doc, value.actor_id); - AMfree(actor_id_result); - if (AMresultStatus(result) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(result)); - } - assert_int_equal(AMresultSize(result), 0); - value = AMresultValue(result); - assert_int_equal(value.tag, AM_VALUE_VOID); - AMfree(result); - result = AMgetActor(group_state->doc); - if (AMresultStatus(result) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(result)); - } - assert_int_equal(AMresultSize(result), 1); - value = AMresultValue(result); - assert_int_equal(value.tag, AM_VALUE_ACTOR_ID); - char const* const str = AMactorIdStr(value.actor_id); + AMactorId const* actor_id = AMpush(&test_state->group_state->stack, + AMactorIdInitStr(test_state->actor_id_str), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id; + AMfree(AMsetActor(test_state->group_state->doc, actor_id)); + actor_id = AMpush(&test_state->group_state->stack, + AMgetActor(test_state->group_state->doc), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id; + char const* const str = AMactorIdStr(actor_id); assert_int_equal(strlen(str), test_state->actor_id_size * 2); assert_string_equal(str, test_state->actor_id_str); - AMfree(result); } static void test_AMspliceText() { - AMresult* const doc_result = AMcreate(); - AMdoc* const doc = AMresultValue(doc_result).doc; + AMresultStack* stack = NULL; + AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; AMfree(AMspliceText(doc, AM_ROOT, 0, 0, "one + ")); AMfree(AMspliceText(doc, AM_ROOT, 4, 2, "two = ")); AMfree(AMspliceText(doc, AM_ROOT, 8, 2, "three")); - AMresult* const text_result = AMtext(doc, AM_ROOT, NULL); - if (AMresultStatus(text_result) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(text_result)); - } - assert_int_equal(AMresultSize(text_result), 1); - AMvalue value = AMresultValue(text_result); - assert_int_equal(value.tag, AM_VALUE_STR); - assert_string_equal(value.str, "one two three"); - AMfree(text_result); - AMfree(doc_result); + char const* const text = AMpush(&stack, + AMtext(doc, AM_ROOT, NULL), + AM_VALUE_STR, + cmocka_cb).str; + assert_string_equal(text, "one two three"); + AMfreeStack(&stack); } int run_doc_tests(void) { diff --git a/automerge-c/test/group_state.c b/automerge-c/test/group_state.c index 66be32b3..11074b84 100644 --- a/automerge-c/test/group_state.c +++ b/automerge-c/test/group_state.c @@ -1,19 +1,27 @@ +#include +#include #include +/* third-party */ +#include + /* local */ #include "group_state.h" +#include "stack_utils.h" int group_setup(void** state) { - GroupState* group_state = calloc(1, sizeof(GroupState)); - group_state->doc_result = AMcreate(); - group_state->doc = AMresultValue(group_state->doc_result).doc; + GroupState* group_state = test_calloc(1, sizeof(GroupState)); + group_state->doc = AMpush(&group_state->stack, + AMcreate(), + AM_VALUE_DOC, + cmocka_cb).doc; *state = group_state; return 0; } int group_teardown(void** state) { GroupState* group_state = *state; - AMfree(group_state->doc_result); - free(group_state); + AMfreeStack(&group_state->stack); + test_free(group_state); return 0; } diff --git a/automerge-c/test/group_state.h b/automerge-c/test/group_state.h index 84dba588..27cbf4bd 100644 --- a/automerge-c/test/group_state.h +++ b/automerge-c/test/group_state.h @@ -1,11 +1,11 @@ -#ifndef GROUP_STATE_INCLUDED -#define GROUP_STATE_INCLUDED +#ifndef GROUP_STATE_H +#define GROUP_STATE_H /* local */ #include "automerge.h" typedef struct { - AMresult* doc_result; + AMresultStack* stack; AMdoc* doc; } GroupState; @@ -13,4 +13,4 @@ int group_setup(void** state); int group_teardown(void** state); -#endif +#endif /* GROUP_STATE_H */ diff --git a/automerge-c/test/list_tests.c b/automerge-c/test/list_tests.c index f6f5c3d7..5e299f37 100644 --- a/automerge-c/test/list_tests.c +++ b/automerge-c/test/list_tests.c @@ -13,41 +13,22 @@ #include "automerge.h" #include "group_state.h" #include "macro_utils.h" +#include "stack_utils.h" static void test_AMlistIncrement(void** state) { GroupState* group_state = *state; - AMresult* res = AMlistPutCounter(group_state->doc, AM_ROOT, 0, true, 0); - if (AMresultStatus(res) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(res)); - } - assert_int_equal(AMresultSize(res), 0); - assert_int_equal(AMresultValue(res).tag, AM_VALUE_VOID); - AMfree(res); - res = AMlistGet(group_state->doc, AM_ROOT, 0); - if (AMresultStatus(res) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(res)); - } - assert_int_equal(AMresultSize(res), 1); - AMvalue value = AMresultValue(res); - assert_int_equal(value.tag, AM_VALUE_COUNTER); - assert_int_equal(value.counter, 0); - AMfree(res); - res = AMlistIncrement(group_state->doc, AM_ROOT, 0, 3); - if (AMresultStatus(res) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(res)); - } - assert_int_equal(AMresultSize(res), 0); - assert_int_equal(AMresultValue(res).tag, AM_VALUE_VOID); - AMfree(res); - res = AMlistGet(group_state->doc, AM_ROOT, 0); - if (AMresultStatus(res) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(res)); - } - assert_int_equal(AMresultSize(res), 1); - value = AMresultValue(res); - assert_int_equal(value.tag, AM_VALUE_COUNTER); - assert_int_equal(value.counter, 3); - AMfree(res); + AMfree(AMlistPutCounter(group_state->doc, AM_ROOT, 0, true, 0)); + assert_int_equal(AMpush(&group_state->stack, + AMlistGet(group_state->doc, AM_ROOT, 0, NULL), + AM_VALUE_COUNTER, + cmocka_cb).counter, 0); + AMfree(AMpop(&group_state->stack)); + AMfree(AMlistIncrement(group_state->doc, AM_ROOT, 0, 3)); + assert_int_equal(AMpush(&group_state->stack, + AMlistGet(group_state->doc, AM_ROOT, 0, NULL), + AM_VALUE_COUNTER, + cmocka_cb).counter, 3); + AMfree(AMpop(&group_state->stack)); } #define test_AMlistPut(suffix, mode) test_AMlistPut ## suffix ## _ ## mode @@ -55,25 +36,17 @@ static void test_AMlistIncrement(void** state) { #define static_void_test_AMlistPut(suffix, mode, member, scalar_value) \ static void test_AMlistPut ## suffix ## _ ## mode(void **state) { \ GroupState* group_state = *state; \ - AMresult* res = AMlistPut ## suffix( \ - group_state->doc, AM_ROOT, 0, !strcmp(#mode, "insert"), scalar_value \ - ); \ - if (AMresultStatus(res) != AM_STATUS_OK) { \ - fail_msg("%s", AMerrorMessage(res)); \ - } \ - assert_int_equal(AMresultSize(res), 0); \ - AMvalue value = AMresultValue(res); \ - assert_int_equal(value.tag, AM_VALUE_VOID); \ - AMfree(res); \ - res = AMlistGet(group_state->doc, AM_ROOT, 0); \ - if (AMresultStatus(res) != AM_STATUS_OK) { \ - fail_msg("%s", AMerrorMessage(res)); \ - } \ - assert_int_equal(AMresultSize(res), 1); \ - value = AMresultValue(res); \ - assert_int_equal(value.tag, AMvalue_discriminant(#suffix)); \ - assert_true(value.member == scalar_value); \ - AMfree(res); \ + AMfree(AMlistPut ## suffix(group_state->doc, \ + AM_ROOT, \ + 0, \ + !strcmp(#mode, "insert"), \ + scalar_value)); \ + assert_true(AMpush( \ + &group_state->stack, \ + AMlistGet(group_state->doc, AM_ROOT, 0, NULL), \ + AMvalue_discriminant(#suffix), \ + cmocka_cb).member == scalar_value); \ + AMfree(AMpop(&group_state->stack)); \ } #define test_AMlistPutBytes(mode) test_AMlistPutBytes ## _ ## mode @@ -83,31 +56,20 @@ static void test_AMlistPutBytes_ ## mode(void **state) { \ static size_t const BYTES_SIZE = sizeof(bytes_value) / sizeof(uint8_t); \ \ GroupState* group_state = *state; \ - AMresult* res = AMlistPutBytes( \ - group_state->doc, \ - AM_ROOT, \ - 0, \ - !strcmp(#mode, "insert"), \ - bytes_value, \ - BYTES_SIZE \ - ); \ - if (AMresultStatus(res) != AM_STATUS_OK) { \ - fail_msg("%s", AMerrorMessage(res)); \ - } \ - assert_int_equal(AMresultSize(res), 0); \ - AMvalue value = AMresultValue(res); \ - assert_int_equal(value.tag, AM_VALUE_VOID); \ - AMfree(res); \ - res = AMlistGet(group_state->doc, AM_ROOT, 0); \ - if (AMresultStatus(res) != AM_STATUS_OK) { \ - fail_msg("%s", AMerrorMessage(res)); \ - } \ - assert_int_equal(AMresultSize(res), 1); \ - value = AMresultValue(res); \ - assert_int_equal(value.tag, AM_VALUE_BYTES); \ - assert_int_equal(value.bytes.count, BYTES_SIZE); \ - assert_memory_equal(value.bytes.src, bytes_value, BYTES_SIZE); \ - AMfree(res); \ + AMfree(AMlistPutBytes(group_state->doc, \ + AM_ROOT, \ + 0, \ + !strcmp(#mode, "insert"), \ + bytes_value, \ + BYTES_SIZE)); \ + AMbyteSpan const bytes = AMpush( \ + &group_state->stack, \ + AMlistGet(group_state->doc, AM_ROOT, 0, NULL), \ + AM_VALUE_BYTES, \ + cmocka_cb).bytes; \ + assert_int_equal(bytes.count, BYTES_SIZE); \ + assert_memory_equal(bytes.src, bytes_value, BYTES_SIZE); \ + AMfree(AMpop(&group_state->stack)); \ } #define test_AMlistPutNull(mode) test_AMlistPutNull_ ## mode @@ -115,23 +77,17 @@ static void test_AMlistPutBytes_ ## mode(void **state) { \ #define static_void_test_AMlistPutNull(mode) \ static void test_AMlistPutNull_ ## mode(void **state) { \ GroupState* group_state = *state; \ - AMresult* res = AMlistPutNull( \ - group_state->doc, AM_ROOT, 0, !strcmp(#mode, "insert")); \ - if (AMresultStatus(res) != AM_STATUS_OK) { \ - fail_msg("%s", AMerrorMessage(res)); \ + AMfree(AMlistPutNull(group_state->doc, \ + AM_ROOT, \ + 0, \ + !strcmp(#mode, "insert"))); \ + AMresult* const result = AMlistGet(group_state->doc, AM_ROOT, 0, NULL); \ + if (AMresultStatus(result) != AM_STATUS_OK) { \ + fail_msg("%s", AMerrorMessage(result)); \ } \ - assert_int_equal(AMresultSize(res), 0); \ - AMvalue value = AMresultValue(res); \ - assert_int_equal(value.tag, AM_VALUE_VOID); \ - AMfree(res); \ - res = AMlistGet(group_state->doc, AM_ROOT, 0); \ - if (AMresultStatus(res) != AM_STATUS_OK) { \ - fail_msg("%s", AMerrorMessage(res)); \ - } \ - assert_int_equal(AMresultSize(res), 1); \ - value = AMresultValue(res); \ - assert_int_equal(value.tag, AM_VALUE_NULL); \ - AMfree(res); \ + assert_int_equal(AMresultSize(result), 1); \ + assert_int_equal(AMresultValue(result).tag, AM_VALUE_NULL); \ + AMfree(result); \ } #define test_AMlistPutObject(label, mode) test_AMlistPutObject_ ## label ## _ ## mode @@ -139,55 +95,36 @@ static void test_AMlistPutNull_ ## mode(void **state) { \ #define static_void_test_AMlistPutObject(label, mode) \ static void test_AMlistPutObject_ ## label ## _ ## mode(void **state) { \ GroupState* group_state = *state; \ - AMresult* res = AMlistPutObject( \ - group_state->doc, \ - AM_ROOT, \ - 0, \ - !strcmp(#mode, "insert"), \ - AMobjType_tag(#label) \ - ); \ - if (AMresultStatus(res) != AM_STATUS_OK) { \ - fail_msg("%s", AMerrorMessage(res)); \ - } \ - assert_int_equal(AMresultSize(res), 1); \ - AMvalue value = AMresultValue(res); \ - assert_int_equal(value.tag, AM_VALUE_OBJ_ID); \ - assert_non_null(value.obj_id); \ - assert_int_equal(AMobjSize(group_state->doc, value.obj_id, NULL), 0); \ - AMfree(res); \ + AMobjId const* const obj_id = AMpush( \ + &group_state->stack, \ + AMlistPutObject(group_state->doc, \ + AM_ROOT, \ + 0, \ + !strcmp(#mode, "insert"), \ + AMobjType_tag(#label)), \ + AM_VALUE_OBJ_ID, \ + cmocka_cb).obj_id; \ + assert_non_null(obj_id); \ + assert_int_equal(AMobjSize(group_state->doc, obj_id, NULL), 0); \ + AMfree(AMpop(&group_state->stack)); \ } #define test_AMlistPutStr(mode) test_AMlistPutStr ## _ ## mode #define static_void_test_AMlistPutStr(mode, str_value) \ static void test_AMlistPutStr_ ## mode(void **state) { \ - static size_t const STR_LEN = strlen(str_value); \ - \ GroupState* group_state = *state; \ - AMresult* res = AMlistPutStr( \ - group_state->doc, \ - AM_ROOT, \ - 0, \ - !strcmp(#mode, "insert"), \ - str_value \ - ); \ - if (AMresultStatus(res) != AM_STATUS_OK) { \ - fail_msg("%s", AMerrorMessage(res)); \ - } \ - assert_int_equal(AMresultSize(res), 0); \ - AMvalue value = AMresultValue(res); \ - assert_int_equal(value.tag, AM_VALUE_VOID); \ - AMfree(res); \ - res = AMlistGet(group_state->doc, AM_ROOT, 0); \ - if (AMresultStatus(res) != AM_STATUS_OK) { \ - fail_msg("%s", AMerrorMessage(res)); \ - } \ - assert_int_equal(AMresultSize(res), 1); \ - value = AMresultValue(res); \ - assert_int_equal(value.tag, AM_VALUE_STR); \ - assert_int_equal(strlen(value.str), STR_LEN); \ - assert_memory_equal(value.str, str_value, STR_LEN + 1); \ - AMfree(res); \ + AMfree(AMlistPutStr(group_state->doc, \ + AM_ROOT, \ + 0, \ + !strcmp(#mode, "insert"), \ + str_value)); \ + assert_string_equal(AMpush( \ + &group_state->stack, \ + AMlistGet(group_state->doc, AM_ROOT, 0, NULL), \ + AM_VALUE_STR, \ + cmocka_cb).str, str_value); \ + AMfree(AMpop(&group_state->stack)); \ } static_void_test_AMlistPut(Bool, insert, boolean, true) @@ -240,6 +177,173 @@ static_void_test_AMlistPut(Uint, insert, uint, UINT64_MAX) static_void_test_AMlistPut(Uint, update, uint, UINT64_MAX) +static void test_insert_at_index(void** state) { + AMresultStack* stack = *state; + AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + + AMobjId const* const list = AMpush( + &stack, + AMlistPutObject(doc, AM_ROOT, 0, true, AM_OBJ_TYPE_LIST), + AM_VALUE_OBJ_ID, + cmocka_cb).obj_id; + /* Insert both at the same index. */ + AMfree(AMlistPutUint(doc, list, 0, true, 0)); + AMfree(AMlistPutUint(doc, list, 0, true, 1)); + + assert_int_equal(AMobjSize(doc, list, NULL), 2); + AMstrs const keys = AMpush(&stack, + AMkeys(doc, list, NULL), + AM_VALUE_STRS, + cmocka_cb).strs; + assert_int_equal(AMstrsSize(&keys), 2); + AMlistItems const range = AMpush(&stack, + AMlistRange(doc, list, 0, SIZE_MAX, NULL), + AM_VALUE_LIST_ITEMS, + cmocka_cb).list_items; + assert_int_equal(AMlistItemsSize(&range), 2); +} + +static void test_get_list_values(void** state) { + AMresultStack* stack = *state; + AMdoc* const doc1 = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMobjId const* const list = AMpush( + &stack, + AMmapPutObject(doc1, AM_ROOT, "list", AM_OBJ_TYPE_LIST), + AM_VALUE_OBJ_ID, + cmocka_cb).obj_id; + + /* Insert elements. */ + AMfree(AMlistPutStr(doc1, list, 0, true, "First")); + AMfree(AMlistPutStr(doc1, list, 0, true, "Second")); + AMfree(AMlistPutStr(doc1, list, 0, true, "Third")); + AMfree(AMlistPutStr(doc1, list, 0, true, "Fourth")); + AMfree(AMlistPutStr(doc1, list, 0, true, "Fifth")); + AMfree(AMlistPutStr(doc1, list, 0, true, "Sixth")); + AMfree(AMlistPutStr(doc1, list, 0, true, "Seventh")); + AMfree(AMlistPutStr(doc1, list, 0, true, "Eighth")); + AMfree(AMcommit(doc1, NULL, NULL)); + + AMchangeHashes const v1 = AMpush(&stack, + AMgetHeads(doc1), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + AMdoc* const doc2 = AMpush(&stack, + AMfork(doc1), + AM_VALUE_DOC, + cmocka_cb).doc; + + AMfree(AMlistPutStr(doc1, list, 2, false, "Third V2")); + AMfree(AMcommit(doc1, NULL, NULL)); + + AMfree(AMlistPutStr(doc2, list, 2, false, "Third V3")); + AMfree(AMcommit(doc2, NULL, NULL)); + + AMfree(AMmerge(doc1, doc2)); + + AMlistItems range = AMpush(&stack, + AMlistRange(doc1, list, 0, SIZE_MAX, NULL), + AM_VALUE_LIST_ITEMS, + cmocka_cb).list_items; + assert_int_equal(AMlistItemsSize(&range), 8); + + AMlistItem const* list_item = NULL; + while ((list_item = AMlistItemsNext(&range, 1)) != NULL) { + AMvalue const val1 = AMlistItemValue(list_item); + AMresult* result = AMlistGet(doc1, list, AMlistItemIndex(list_item), NULL); + AMvalue const val2 = AMresultValue(result); + assert_true(AMvalueEqual(&val1, &val2)); + assert_non_null(AMlistItemObjId(list_item)); + AMfree(result); + } + + range = AMpush(&stack, + AMlistRange(doc1, list, 3, 6, NULL), + AM_VALUE_LIST_ITEMS, + cmocka_cb).list_items; + AMlistItems range_back = AMlistItemsReversed(&range); + assert_int_equal(AMlistItemsSize(&range), 3); + assert_int_equal(AMlistItemIndex(AMlistItemsNext(&range, 1)), 3); + assert_int_equal(AMlistItemIndex(AMlistItemsNext(&range_back, 1)), 5); + + range = AMlistItemsRewound(&range); + while ((list_item = AMlistItemsNext(&range, 1)) != NULL) { + AMvalue const val1 = AMlistItemValue(list_item); + AMresult* result = AMlistGet(doc1, list, AMlistItemIndex(list_item), NULL); + AMvalue const val2 = AMresultValue(result); + assert_true(AMvalueEqual(&val1, &val2)); + assert_non_null(AMlistItemObjId(list_item)); + AMfree(result); + } + + range = AMpush(&stack, + AMlistRange(doc1, list, 0, SIZE_MAX, &v1), + AM_VALUE_LIST_ITEMS, + cmocka_cb).list_items; + assert_int_equal(AMlistItemsSize(&range), 8); + while ((list_item = AMlistItemsNext(&range, 1)) != NULL) { + AMvalue const val1 = AMlistItemValue(list_item); + AMresult* result = AMlistGet(doc1, list, AMlistItemIndex(list_item), &v1); + AMvalue const val2 = AMresultValue(result); + assert_true(AMvalueEqual(&val1, &val2)); + assert_non_null(AMlistItemObjId(list_item)); + AMfree(result); + } + + range = AMpush(&stack, + AMlistRange(doc1, list, 3, 6, &v1), + AM_VALUE_LIST_ITEMS, + cmocka_cb).list_items; + range_back = AMlistItemsReversed(&range); + assert_int_equal(AMlistItemsSize(&range), 3); + assert_int_equal(AMlistItemIndex(AMlistItemsNext(&range, 1)), 3); + assert_int_equal(AMlistItemIndex(AMlistItemsNext(&range_back, 1)), 5); + + range = AMlistItemsRewound(&range); + while ((list_item = AMlistItemsNext(&range, 1)) != NULL) { + AMvalue const val1 = AMlistItemValue(list_item); + AMresult* result = AMlistGet(doc1, list, AMlistItemIndex(list_item), &v1); + AMvalue const val2 = AMresultValue(result); + assert_true(AMvalueEqual(&val1, &val2)); + assert_non_null(AMlistItemObjId(list_item)); + AMfree(result); + } + + range = AMpush(&stack, + AMlistRange(doc1, list, 0, SIZE_MAX, NULL), + AM_VALUE_LIST_ITEMS, + cmocka_cb).list_items; + AMobjItems values = AMpush(&stack, + AMobjValues(doc1, list, NULL), + AM_VALUE_OBJ_ITEMS, + cmocka_cb).obj_items; + assert_int_equal(AMlistItemsSize(&range), AMobjItemsSize(&values)); + AMobjItem const* value = NULL; + while ((list_item = AMlistItemsNext(&range, 1)) != NULL && + (value = AMobjItemsNext(&values, 1)) != NULL) { + AMvalue const val1 = AMlistItemValue(list_item); + AMvalue const val2 = AMobjItemValue(value); + assert_true(AMvalueEqual(&val1, &val2)); + assert_true(AMobjIdEqual(AMlistItemObjId(list_item), AMobjItemObjId(value))); + } + + range = AMpush(&stack, + AMlistRange(doc1, list, 0, SIZE_MAX, &v1), + AM_VALUE_LIST_ITEMS, + cmocka_cb).list_items; + values = AMpush(&stack, + AMobjValues(doc1, list, &v1), + AM_VALUE_OBJ_ITEMS, + cmocka_cb).obj_items; + assert_int_equal(AMlistItemsSize(&range), AMobjItemsSize(&values)); + while ((list_item = AMlistItemsNext(&range, 1)) != NULL && + (value = AMobjItemsNext(&values, 1)) != NULL) { + AMvalue const val1 = AMlistItemValue(list_item); + AMvalue const val2 = AMobjItemValue(value); + assert_true(AMvalueEqual(&val1, &val2)); + assert_true(AMobjIdEqual(AMlistItemObjId(list_item), AMobjItemObjId(value))); + } +} + int run_list_tests(void) { const struct CMUnitTest tests[] = { cmocka_unit_test(test_AMlistIncrement), @@ -267,6 +371,8 @@ int run_list_tests(void) { cmocka_unit_test(test_AMlistPut(Timestamp, update)), cmocka_unit_test(test_AMlistPut(Uint, insert)), cmocka_unit_test(test_AMlistPut(Uint, update)), + cmocka_unit_test_setup_teardown(test_insert_at_index, setup_stack, teardown_stack), + cmocka_unit_test_setup_teardown(test_get_list_values, setup_stack, teardown_stack), }; return cmocka_run_group_tests(tests, group_setup, group_teardown); diff --git a/automerge-c/test/map_tests.c b/automerge-c/test/map_tests.c index c90b5d2b..47a1dbe1 100644 --- a/automerge-c/test/map_tests.c +++ b/automerge-c/test/map_tests.c @@ -4,7 +4,6 @@ #include #include #include -#include /* third-party */ #include @@ -13,41 +12,22 @@ #include "automerge.h" #include "group_state.h" #include "macro_utils.h" +#include "stack_utils.h" static void test_AMmapIncrement(void** state) { GroupState* group_state = *state; - AMresult* res = AMmapPutCounter(group_state->doc, AM_ROOT, "Counter", 0); - if (AMresultStatus(res) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(res)); - } - assert_int_equal(AMresultSize(res), 0); - assert_int_equal(AMresultValue(res).tag, AM_VALUE_VOID); - AMfree(res); - res = AMmapGet(group_state->doc, AM_ROOT, "Counter"); - if (AMresultStatus(res) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(res)); - } - assert_int_equal(AMresultSize(res), 1); - AMvalue value = AMresultValue(res); - assert_int_equal(value.tag, AM_VALUE_COUNTER); - assert_int_equal(value.counter, 0); - AMfree(res); - res = AMmapIncrement(group_state->doc, AM_ROOT, "Counter", 3); - if (AMresultStatus(res) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(res)); - } - assert_int_equal(AMresultSize(res), 0); - assert_int_equal(AMresultValue(res).tag, AM_VALUE_VOID); - AMfree(res); - res = AMmapGet(group_state->doc, AM_ROOT, "Counter"); - if (AMresultStatus(res) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(res)); - } - assert_int_equal(AMresultSize(res), 1); - value = AMresultValue(res); - assert_int_equal(value.tag, AM_VALUE_COUNTER); - assert_int_equal(value.counter, 3); - AMfree(res); + AMfree(AMmapPutCounter(group_state->doc, AM_ROOT, "Counter", 0)); + assert_int_equal(AMpush(&group_state->stack, + AMmapGet(group_state->doc, AM_ROOT, "Counter", NULL), + AM_VALUE_COUNTER, + cmocka_cb).counter, 0); + AMfree(AMpop(&group_state->stack)); + AMfree(AMmapIncrement(group_state->doc, AM_ROOT, "Counter", 3)); + assert_int_equal(AMpush(&group_state->stack, + AMmapGet(group_state->doc, AM_ROOT, "Counter", NULL), + AM_VALUE_COUNTER, + cmocka_cb).counter, 3); + AMfree(AMpop(&group_state->stack)); } #define test_AMmapPut(suffix) test_AMmapPut ## suffix @@ -55,155 +35,1092 @@ static void test_AMmapIncrement(void** state) { #define static_void_test_AMmapPut(suffix, member, scalar_value) \ static void test_AMmapPut ## suffix(void **state) { \ GroupState* group_state = *state; \ - AMresult* res = AMmapPut ## suffix( \ - group_state->doc, \ - AM_ROOT, \ - #suffix, \ - scalar_value \ - ); \ - if (AMresultStatus(res) != AM_STATUS_OK) { \ - fail_msg("%s", AMerrorMessage(res)); \ - } \ - assert_int_equal(AMresultSize(res), 0); \ - AMvalue value = AMresultValue(res); \ - assert_int_equal(value.tag, AM_VALUE_VOID); \ - AMfree(res); \ - res = AMmapGet(group_state->doc, AM_ROOT, #suffix); \ - if (AMresultStatus(res) != AM_STATUS_OK) { \ - fail_msg("%s", AMerrorMessage(res)); \ - } \ - assert_int_equal(AMresultSize(res), 1); \ - value = AMresultValue(res); \ - assert_int_equal(value.tag, AMvalue_discriminant(#suffix)); \ - assert_true(value.member == scalar_value); \ - AMfree(res); \ + AMfree(AMmapPut ## suffix(group_state->doc, \ + AM_ROOT, \ + #suffix, \ + scalar_value)); \ + assert_true(AMpush( \ + &group_state->stack, \ + AMmapGet(group_state->doc, AM_ROOT, #suffix, NULL), \ + AMvalue_discriminant(#suffix), \ + cmocka_cb).member == scalar_value); \ + AMfree(AMpop(&group_state->stack)); \ } -#define test_AMmapPutObject(label) test_AMmapPutObject_ ## label - -#define static_void_test_AMmapPutObject(label) \ -static void test_AMmapPutObject_ ## label(void **state) { \ - GroupState* group_state = *state; \ - AMresult* res = AMmapPutObject( \ - group_state->doc, \ - AM_ROOT, \ - #label, \ - AMobjType_tag(#label) \ - ); \ - if (AMresultStatus(res) != AM_STATUS_OK) { \ - fail_msg("%s", AMerrorMessage(res)); \ - } \ - assert_int_equal(AMresultSize(res), 1); \ - AMvalue value = AMresultValue(res); \ - assert_int_equal(value.tag, AM_VALUE_OBJ_ID); \ - assert_non_null(value.obj_id); \ - assert_int_equal(AMobjSize(group_state->doc, value.obj_id, NULL), 0); \ - AMfree(res); \ -} - -static_void_test_AMmapPut(Bool, boolean, true) - static void test_AMmapPutBytes(void **state) { static char const* const KEY = "Bytes"; static uint8_t const BYTES_VALUE[] = {INT8_MIN, INT8_MAX / 2, INT8_MAX}; static size_t const BYTES_SIZE = sizeof(BYTES_VALUE) / sizeof(uint8_t); GroupState* group_state = *state; - AMresult* res = AMmapPutBytes( - group_state->doc, - AM_ROOT, - KEY, - BYTES_VALUE, - BYTES_SIZE - ); - if (AMresultStatus(res) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(res)); - } - assert_int_equal(AMresultSize(res), 0); - AMvalue value = AMresultValue(res); - assert_int_equal(value.tag, AM_VALUE_VOID); - AMfree(res); - res = AMmapGet(group_state->doc, AM_ROOT, KEY); - if (AMresultStatus(res) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(res)); - } - assert_int_equal(AMresultSize(res), 1); - value = AMresultValue(res); - assert_int_equal(value.tag, AM_VALUE_BYTES); - assert_int_equal(value.bytes.count, BYTES_SIZE); - assert_memory_equal(value.bytes.src, BYTES_VALUE, BYTES_SIZE); - AMfree(res); + AMfree(AMmapPutBytes(group_state->doc, + AM_ROOT, + KEY, + BYTES_VALUE, + BYTES_SIZE)); + AMbyteSpan const bytes = AMpush(&group_state->stack, + AMmapGet(group_state->doc, AM_ROOT, KEY, NULL), + AM_VALUE_BYTES, + cmocka_cb).bytes; + assert_int_equal(bytes.count, BYTES_SIZE); + assert_memory_equal(bytes.src, BYTES_VALUE, BYTES_SIZE); + AMfree(AMpop(&group_state->stack)); } +static void test_AMmapPutNull(void **state) { + static char const* const KEY = "Null"; + + GroupState* group_state = *state; + AMfree(AMmapPutNull(group_state->doc, AM_ROOT, KEY)); + AMresult* const result = AMmapGet(group_state->doc, AM_ROOT, KEY, NULL); + if (AMresultStatus(result) != AM_STATUS_OK) { + fail_msg("%s", AMerrorMessage(result)); + } + assert_int_equal(AMresultSize(result), 1); + assert_int_equal(AMresultValue(result).tag, AM_VALUE_NULL); + AMfree(result); +} + +#define test_AMmapPutObject(label) test_AMmapPutObject_ ## label + +#define static_void_test_AMmapPutObject(label) \ +static void test_AMmapPutObject_ ## label(void **state) { \ + GroupState* group_state = *state; \ + AMobjId const* const obj_id = AMpush( \ + &group_state->stack, \ + AMmapPutObject(group_state->doc, \ + AM_ROOT, \ + #label, \ + AMobjType_tag(#label)), \ + AM_VALUE_OBJ_ID, \ + cmocka_cb).obj_id; \ + assert_non_null(obj_id); \ + assert_int_equal(AMobjSize(group_state->doc, obj_id, NULL), 0); \ + AMfree(AMpop(&group_state->stack)); \ +} + +static void test_AMmapPutStr(void **state) { + static char const* const KEY = "Str"; + static char const* const STR_VALUE = "Hello, world!"; + + GroupState* group_state = *state; + AMfree(AMmapPutStr(group_state->doc, AM_ROOT, KEY, STR_VALUE)); + assert_string_equal(AMpush(&group_state->stack, + AMmapGet(group_state->doc, AM_ROOT, KEY, NULL), + AM_VALUE_STR, + cmocka_cb).str, STR_VALUE); + AMfree(AMpop(&group_state->stack)); +} + +static_void_test_AMmapPut(Bool, boolean, true) + static_void_test_AMmapPut(Counter, counter, INT64_MAX) static_void_test_AMmapPut(F64, f64, DBL_MAX) static_void_test_AMmapPut(Int, int_, INT64_MAX) -static void test_AMmapPutNull(void **state) { - static char const* const KEY = "Null"; - - GroupState* group_state = *state; - AMresult* res = AMmapPutNull(group_state->doc, AM_ROOT, KEY); - if (AMresultStatus(res) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(res)); - } - assert_int_equal(AMresultSize(res), 0); - AMvalue value = AMresultValue(res); - assert_int_equal(value.tag, AM_VALUE_VOID); - AMfree(res); - res = AMmapGet(group_state->doc, AM_ROOT, KEY); - if (AMresultStatus(res) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(res)); - } - assert_int_equal(AMresultSize(res), 1); - value = AMresultValue(res); - assert_int_equal(value.tag, AM_VALUE_NULL); - AMfree(res); -} - static_void_test_AMmapPutObject(List) static_void_test_AMmapPutObject(Map) static_void_test_AMmapPutObject(Text) -static void test_AMmapPutStr(void **state) { - static char const* const KEY = "Str"; - static char const* const STR_VALUE = "Hello, world!"; - size_t const STR_LEN = strlen(STR_VALUE); - - GroupState* group_state = *state; - AMresult* res = AMmapPutStr( - group_state->doc, - AM_ROOT, - KEY, - STR_VALUE - ); - if (AMresultStatus(res) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(res)); - } - assert_int_equal(AMresultSize(res), 0); - AMvalue value = AMresultValue(res); - assert_int_equal(value.tag, AM_VALUE_VOID); - AMfree(res); - res = AMmapGet(group_state->doc, AM_ROOT, KEY); - if (AMresultStatus(res) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(res)); - } - assert_int_equal(AMresultSize(res), 1); - value = AMresultValue(res); - assert_int_equal(value.tag, AM_VALUE_STR); - assert_int_equal(strlen(value.str), STR_LEN); - assert_memory_equal(value.str, STR_VALUE, STR_LEN + 1); - AMfree(res); -} - static_void_test_AMmapPut(Timestamp, timestamp, INT64_MAX) static_void_test_AMmapPut(Uint, uint, UINT64_MAX) +static void test_range_iter_map(void** state) { + AMresultStack* stack = *state; + AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMfree(AMmapPutUint(doc, AM_ROOT, "a", 3)); + AMfree(AMmapPutUint(doc, AM_ROOT, "b", 4)); + AMfree(AMmapPutUint(doc, AM_ROOT, "c", 5)); + AMfree(AMmapPutUint(doc, AM_ROOT, "d", 6)); + AMfree(AMcommit(doc, NULL, NULL)); + AMfree(AMmapPutUint(doc, AM_ROOT, "a", 7)); + AMfree(AMcommit(doc, NULL, NULL)); + AMfree(AMmapPutUint(doc, AM_ROOT, "a", 8)); + AMfree(AMmapPutUint(doc, AM_ROOT, "d", 9)); + AMfree(AMcommit(doc, NULL, NULL)); + AMactorId const* const actor_id = AMpush(&stack, + AMgetActor(doc), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id; + AMmapItems map_items = AMpush(&stack, + AMmapRange(doc, AM_ROOT, NULL, NULL, NULL), + AM_VALUE_MAP_ITEMS, + cmocka_cb).map_items; + assert_int_equal(AMmapItemsSize(&map_items), 4); + + /* ["b"-"d") */ + AMmapItems range = AMpush(&stack, + AMmapRange(doc, AM_ROOT, "b", "d", NULL), + AM_VALUE_MAP_ITEMS, + cmocka_cb).map_items; + /* First */ + AMmapItem const* next = AMmapItemsNext(&range, 1); + assert_non_null(next); + assert_string_equal(AMmapItemKey(next), "b"); + AMvalue next_value = AMmapItemValue(next); + assert_int_equal(next_value.tag, AM_VALUE_UINT); + assert_int_equal(next_value.uint, 4); + AMobjId const* next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 2); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + /* Second */ + next = AMmapItemsNext(&range, 1); + assert_non_null(next); + assert_string_equal(AMmapItemKey(next), "c"); + next_value = AMmapItemValue(next); + assert_int_equal(next_value.tag, AM_VALUE_UINT); + assert_int_equal(next_value.uint, 5); + next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 3); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + /* Third */ + assert_null(AMmapItemsNext(&range, 1)); + + /* ["b"-) */ + range = AMpush(&stack, + AMmapRange(doc, AM_ROOT, "b", NULL, NULL), + AM_VALUE_MAP_ITEMS, + cmocka_cb).map_items; + /* First */ + next = AMmapItemsNext(&range, 1); + assert_non_null(next); + assert_string_equal(AMmapItemKey(next), "b"); + next_value = AMmapItemValue(next); + assert_int_equal(next_value.tag, AM_VALUE_UINT); + assert_int_equal(next_value.uint, 4); + next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 2); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + /* Second */ + next = AMmapItemsNext(&range, 1); + assert_non_null(next); + assert_string_equal(AMmapItemKey(next), "c"); + next_value = AMmapItemValue(next); + assert_int_equal(next_value.tag, AM_VALUE_UINT); + assert_int_equal(next_value.uint, 5); + next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 3); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + /* Third */ + next = AMmapItemsNext(&range, 1); + assert_non_null(next); + assert_string_equal(AMmapItemKey(next), "d"); + next_value = AMmapItemValue(next); + assert_int_equal(next_value.tag, AM_VALUE_UINT); + assert_int_equal(next_value.uint, 9); + next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 7); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + /* Fourth */ + assert_null(AMmapItemsNext(&range, 1)); + + /* [-"d") */ + range = AMpush(&stack, + AMmapRange(doc, AM_ROOT, NULL, "d", NULL), + AM_VALUE_MAP_ITEMS, + cmocka_cb).map_items; + /* First */ + next = AMmapItemsNext(&range, 1); + assert_non_null(next); + assert_string_equal(AMmapItemKey(next), "a"); + next_value = AMmapItemValue(next); + assert_int_equal(next_value.tag, AM_VALUE_UINT); + assert_int_equal(next_value.uint, 8); + next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 6); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + /* Second */ + next = AMmapItemsNext(&range, 1); + assert_non_null(next); + assert_string_equal(AMmapItemKey(next), "b"); + next_value = AMmapItemValue(next); + assert_int_equal(next_value.tag, AM_VALUE_UINT); + assert_int_equal(next_value.uint, 4); + next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 2); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + /* Third */ + next = AMmapItemsNext(&range, 1); + assert_non_null(next); + assert_string_equal(AMmapItemKey(next), "c"); + next_value = AMmapItemValue(next); + assert_int_equal(next_value.tag, AM_VALUE_UINT); + assert_int_equal(next_value.uint, 5); + next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 3); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + /* Fourth */ + assert_null(AMmapItemsNext(&range, 1)); + + /* ["a"-) */ + range = AMpush(&stack, + AMmapRange(doc, AM_ROOT, "a", NULL, NULL), + AM_VALUE_MAP_ITEMS, + cmocka_cb).map_items; + /* First */ + next = AMmapItemsNext(&range, 1); + assert_non_null(next); + assert_string_equal(AMmapItemKey(next), "a"); + next_value = AMmapItemValue(next); + assert_int_equal(next_value.tag, AM_VALUE_UINT); + assert_int_equal(next_value.uint, 8); + next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 6); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + /* Second */ + next = AMmapItemsNext(&range, 1); + assert_non_null(next); + assert_string_equal(AMmapItemKey(next), "b"); + next_value = AMmapItemValue(next); + assert_int_equal(next_value.tag, AM_VALUE_UINT); + assert_int_equal(next_value.uint, 4); + next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 2); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + /* Third */ + next = AMmapItemsNext(&range, 1); + assert_non_null(next); + assert_string_equal(AMmapItemKey(next), "c"); + next_value = AMmapItemValue(next); + assert_int_equal(next_value.tag, AM_VALUE_UINT); + assert_int_equal(next_value.uint, 5); + next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 3); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + /* Fourth */ + next = AMmapItemsNext(&range, 1); + assert_non_null(next); + assert_string_equal(AMmapItemKey(next), "d"); + next_value = AMmapItemValue(next); + assert_int_equal(next_value.tag, AM_VALUE_UINT); + assert_int_equal(next_value.uint, 9); + next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 7); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + /* Fifth */ + assert_null(AMmapItemsNext(&range, 1)); +} + +static void test_map_range_back_and_forth_single(void** state) { + AMresultStack* stack = *state; + AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMactorId const* const actor_id = AMpush(&stack, + AMgetActor(doc), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id; + + AMfree(AMmapPutStr(doc, AM_ROOT, "1", "a")); + AMfree(AMmapPutStr(doc, AM_ROOT, "2", "b")); + AMfree(AMmapPutStr(doc, AM_ROOT, "3", "c")); + + /* Forward, back, back. */ + AMmapItems range_all = AMpush(&stack, + AMmapRange(doc, AM_ROOT, NULL, NULL, NULL), + AM_VALUE_MAP_ITEMS, + cmocka_cb).map_items; + /* First */ + AMmapItem const* next = AMmapItemsNext(&range_all, 1); + assert_non_null(next); + assert_string_equal(AMmapItemKey(next), "1"); + AMvalue next_value = AMmapItemValue(next); + assert_int_equal(next_value.tag, AM_VALUE_STR); + assert_string_equal(next_value.str, "a"); + AMobjId const* next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 1); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + /* Third */ + AMmapItems range_back_all = AMmapItemsReversed(&range_all); + range_back_all = AMmapItemsRewound(&range_back_all); + AMmapItem const* next_back = AMmapItemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_string_equal(AMmapItemKey(next_back), "3"); + AMvalue next_back_value = AMmapItemValue(next_back); + assert_int_equal(next_back_value.tag, AM_VALUE_STR); + assert_string_equal(next_back_value.str, "c"); + AMobjId const* next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); + /* Second */ + next_back = AMmapItemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_string_equal(AMmapItemKey(next_back), "2"); + next_back_value = AMmapItemValue(next_back); + assert_int_equal(next_back_value.tag, AM_VALUE_STR); + assert_string_equal(next_back_value.str, "b"); + next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); + + /* Forward, back, forward. */ + range_all = AMmapItemsRewound(&range_all); + range_back_all = AMmapItemsRewound(&range_back_all); + /* First */ + next = AMmapItemsNext(&range_all, 1); + assert_non_null(next); + assert_string_equal(AMmapItemKey(next), "1"); + next_value = AMmapItemValue(next); + assert_int_equal(next_value.tag, AM_VALUE_STR); + assert_string_equal(next_value.str, "a"); + next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 1); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + /* Third */ + next_back = AMmapItemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_string_equal(AMmapItemKey(next_back), "3"); + next_back_value = AMmapItemValue(next_back); + assert_int_equal(next_back_value.tag, AM_VALUE_STR); + assert_string_equal(next_back_value.str, "c"); + next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); + /* Second */ + next = AMmapItemsNext(&range_all, 1); + assert_non_null(next); + assert_string_equal(AMmapItemKey(next), "2"); + next_value = AMmapItemValue(next); + assert_int_equal(next_value.tag, AM_VALUE_STR); + assert_string_equal(next_value.str, "b"); + next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 2); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + + /* Forward, forward, forward. */ + range_all = AMmapItemsRewound(&range_all); + /* First */ + next = AMmapItemsNext(&range_all, 1); + assert_non_null(next); + assert_string_equal(AMmapItemKey(next), "1"); + next_value = AMmapItemValue(next); + assert_int_equal(next_value.tag, AM_VALUE_STR); + assert_string_equal(next_value.str, "a"); + next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 1); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + /* Second */ + next = AMmapItemsNext(&range_all, 1); + assert_non_null(next); + assert_string_equal(AMmapItemKey(next), "2"); + next_value = AMmapItemValue(next); + assert_int_equal(next_value.tag, AM_VALUE_STR); + assert_string_equal(next_value.str, "b"); + next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 2); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + /* Third */ + next = AMmapItemsNext(&range_all, 1); + assert_non_null(next); + assert_string_equal(AMmapItemKey(next), "3"); + next_value = AMmapItemValue(next); + assert_int_equal(next_value.tag, AM_VALUE_STR); + assert_string_equal(next_value.str, "c"); + next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 3); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + + /* Back, back, back. */ + range_back_all = AMmapItemsRewound(&range_back_all); + /* Third */ + next_back = AMmapItemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_string_equal(AMmapItemKey(next_back), "3"); + next_back_value = AMmapItemValue(next_back); + assert_int_equal(next_back_value.tag, AM_VALUE_STR); + assert_string_equal(next_back_value.str, "c"); + next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); + /* Second */ + next_back = AMmapItemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_string_equal(AMmapItemKey(next_back), "2"); + next_back_value = AMmapItemValue(next_back); + assert_int_equal(next_back_value.tag, AM_VALUE_STR); + assert_string_equal(next_back_value.str, "b"); + next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); + /* First */ + next_back = AMmapItemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_string_equal(AMmapItemKey(next_back), "1"); + next_back_value = AMmapItemValue(next_back); + assert_int_equal(next_back_value.tag, AM_VALUE_STR); + assert_string_equal(next_back_value.str, "a"); + next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 1); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); +} + +static void test_map_range_back_and_forth_double(void** state) { + AMresultStack* stack = *state; + AMdoc* const doc1 = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMactorId const* const actor_id1= AMpush(&stack, + AMactorIdInitBytes("\0", 1), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id; + AMfree(AMsetActor(doc1, actor_id1)); + + AMfree(AMmapPutStr(doc1, AM_ROOT, "1", "a")); + AMfree(AMmapPutStr(doc1, AM_ROOT, "2", "b")); + AMfree(AMmapPutStr(doc1, AM_ROOT, "3", "c")); + + /* The second actor should win all conflicts here. */ + AMdoc* const doc2 = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMactorId const* const actor_id2 = AMpush(&stack, + AMactorIdInitBytes("\1", 1), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id; + AMfree(AMsetActor(doc2, actor_id2)); + AMfree(AMmapPutStr(doc2, AM_ROOT, "1", "aa")); + AMfree(AMmapPutStr(doc2, AM_ROOT, "2", "bb")); + AMfree(AMmapPutStr(doc2, AM_ROOT, "3", "cc")); + + AMfree(AMmerge(doc1, doc2)); + + /* Forward, back, back. */ + AMmapItems range_all = AMpush(&stack, + AMmapRange(doc1, AM_ROOT, NULL, NULL, NULL), + AM_VALUE_MAP_ITEMS, + cmocka_cb).map_items; + /* First */ + AMmapItem const* next = AMmapItemsNext(&range_all, 1); + assert_non_null(next); + assert_string_equal(AMmapItemKey(next), "1"); + AMvalue next_value = AMmapItemValue(next); + assert_int_equal(next_value.tag, AM_VALUE_STR); + assert_string_equal(next_value.str, "aa"); + AMobjId const* next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 1); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 1); + /* Third */ + AMmapItems range_back_all = AMmapItemsReversed(&range_all); + range_back_all = AMmapItemsRewound(&range_back_all); + AMmapItem const* next_back = AMmapItemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_string_equal(AMmapItemKey(next_back), "3"); + AMvalue next_back_value = AMmapItemValue(next_back); + assert_int_equal(next_back_value.tag, AM_VALUE_STR); + assert_string_equal(next_back_value.str, "cc"); + AMobjId const* next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); + /* Second */ + next_back = AMmapItemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_string_equal(AMmapItemKey(next_back), "2"); + next_back_value = AMmapItemValue(next_back); + assert_int_equal(next_back_value.tag, AM_VALUE_STR); + assert_string_equal(next_back_value.str, "bb"); + next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); + + /* Forward, back, forward. */ + range_all = AMmapItemsRewound(&range_all); + range_back_all = AMmapItemsRewound(&range_back_all); + /* First */ + next = AMmapItemsNext(&range_all, 1); + assert_non_null(next); + assert_string_equal(AMmapItemKey(next), "1"); + next_value = AMmapItemValue(next); + assert_int_equal(next_value.tag, AM_VALUE_STR); + assert_string_equal(next_value.str, "aa"); + next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 1); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 1); + /* Third */ + next_back = AMmapItemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_string_equal(AMmapItemKey(next_back), "3"); + next_back_value = AMmapItemValue(next_back); + assert_int_equal(next_back_value.tag, AM_VALUE_STR); + assert_string_equal(next_back_value.str, "cc"); + next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); + /* Second */ + next = AMmapItemsNext(&range_all, 1); + assert_non_null(next); + assert_string_equal(AMmapItemKey(next), "2"); + next_value = AMmapItemValue(next); + assert_int_equal(next_value.tag, AM_VALUE_STR); + assert_string_equal(next_value.str, "bb"); + next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 2); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 1); + + /* Forward, forward, forward. */ + range_all = AMmapItemsRewound(&range_all); + /* First */ + next = AMmapItemsNext(&range_all, 1); + assert_non_null(next); + assert_string_equal(AMmapItemKey(next), "1"); + next_value = AMmapItemValue(next); + assert_int_equal(next_value.tag, AM_VALUE_STR); + assert_string_equal(next_value.str, "aa"); + next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 1); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 1); + /* Second */ + next = AMmapItemsNext(&range_all, 1); + assert_non_null(next); + assert_string_equal(AMmapItemKey(next), "2"); + next_value = AMmapItemValue(next); + assert_int_equal(next_value.tag, AM_VALUE_STR); + assert_string_equal(next_value.str, "bb"); + next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 2); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 1); + /* Third */ + next = AMmapItemsNext(&range_all, 1); + assert_non_null(next); + assert_string_equal(AMmapItemKey(next), "3"); + next_value = AMmapItemValue(next); + assert_int_equal(next_value.tag, AM_VALUE_STR); + assert_string_equal(next_value.str, "cc"); + next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 3); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 1); + + /* Back, back, back. */ + range_back_all = AMmapItemsRewound(&range_back_all); + /* Third */ + next_back = AMmapItemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_string_equal(AMmapItemKey(next_back), "3"); + next_back_value = AMmapItemValue(next_back); + assert_int_equal(next_back_value.tag, AM_VALUE_STR); + assert_string_equal(next_back_value.str, "cc"); + next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); + /* Second */ + next_back = AMmapItemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_string_equal(AMmapItemKey(next_back), "2"); + next_back_value = AMmapItemValue(next_back); + assert_int_equal(next_back_value.tag, AM_VALUE_STR); + assert_string_equal(next_back_value.str, "bb"); + next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); + /* First */ + next_back = AMmapItemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_string_equal(AMmapItemKey(next_back), "1"); + next_back_value = AMmapItemValue(next_back); + assert_int_equal(next_back_value.tag, AM_VALUE_STR); + assert_string_equal(next_back_value.str, "aa"); + next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 1); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); +} + +static void test_map_range_at_back_and_forth_single(void** state) { + AMresultStack* stack = *state; + AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMactorId const* const actor_id = AMpush(&stack, + AMgetActor(doc), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id; + + AMfree(AMmapPutStr(doc, AM_ROOT, "1", "a")); + AMfree(AMmapPutStr(doc, AM_ROOT, "2", "b")); + AMfree(AMmapPutStr(doc, AM_ROOT, "3", "c")); + + AMchangeHashes const heads = AMpush(&stack, + AMgetHeads(doc), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + + /* Forward, back, back. */ + AMmapItems range_all = AMpush(&stack, + AMmapRange(doc, AM_ROOT, NULL, NULL, &heads), + AM_VALUE_MAP_ITEMS, + cmocka_cb).map_items; + /* First */ + AMmapItem const* next = AMmapItemsNext(&range_all, 1); + assert_non_null(next); + assert_string_equal(AMmapItemKey(next), "1"); + AMvalue next_value = AMmapItemValue(next); + assert_int_equal(next_value.tag, AM_VALUE_STR); + assert_string_equal(next_value.str, "a"); + AMobjId const* next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 1); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + /* Third */ + AMmapItems range_back_all = AMmapItemsReversed(&range_all); + range_back_all = AMmapItemsRewound(&range_back_all); + AMmapItem const* next_back = AMmapItemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_string_equal(AMmapItemKey(next_back), "3"); + AMvalue next_back_value = AMmapItemValue(next_back); + assert_int_equal(next_back_value.tag, AM_VALUE_STR); + assert_string_equal(next_back_value.str, "c"); + AMobjId const* next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); + /* Second */ + next_back = AMmapItemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_string_equal(AMmapItemKey(next_back), "2"); + next_back_value = AMmapItemValue(next_back); + assert_int_equal(next_back_value.tag, AM_VALUE_STR); + assert_string_equal(next_back_value.str, "b"); + next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); + + /* Forward, back, forward. */ + range_all = AMmapItemsRewound(&range_all); + range_back_all = AMmapItemsRewound(&range_back_all); + /* First */ + next = AMmapItemsNext(&range_all, 1); + assert_non_null(next); + assert_string_equal(AMmapItemKey(next), "1"); + next_value = AMmapItemValue(next); + assert_int_equal(next_value.tag, AM_VALUE_STR); + assert_string_equal(next_value.str, "a"); + next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 1); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + /* Third */ + next_back = AMmapItemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_string_equal(AMmapItemKey(next_back), "3"); + next_back_value = AMmapItemValue(next_back); + assert_int_equal(next_back_value.tag, AM_VALUE_STR); + assert_string_equal(next_back_value.str, "c"); + next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); + /* Second */ + next = AMmapItemsNext(&range_all, 1); + assert_non_null(next); + assert_string_equal(AMmapItemKey(next), "2"); + next_value = AMmapItemValue(next); + assert_int_equal(next_value.tag, AM_VALUE_STR); + assert_string_equal(next_value.str, "b"); + next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 2); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + + /* Forward, forward, forward. */ + range_all = AMmapItemsRewound(&range_all); + /* First */ + next = AMmapItemsNext(&range_all, 1); + assert_non_null(next); + assert_string_equal(AMmapItemKey(next), "1"); + next_value = AMmapItemValue(next); + assert_int_equal(next_value.tag, AM_VALUE_STR); + assert_string_equal(next_value.str, "a"); + next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 1); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + /* Second */ + next = AMmapItemsNext(&range_all, 1); + assert_non_null(next); + assert_string_equal(AMmapItemKey(next), "2"); + next_value = AMmapItemValue(next); + assert_int_equal(next_value.tag, AM_VALUE_STR); + assert_string_equal(next_value.str, "b"); + next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 2); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + /* Third */ + next = AMmapItemsNext(&range_all, 1); + assert_non_null(next); + assert_string_equal(AMmapItemKey(next), "3"); + next_value = AMmapItemValue(next); + assert_int_equal(next_value.tag, AM_VALUE_STR); + assert_string_equal(next_value.str, "c"); + next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 3); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + + /* Back, back, back. */ + range_back_all = AMmapItemsRewound(&range_back_all); + /* Third */ + next_back = AMmapItemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_string_equal(AMmapItemKey(next_back), "3"); + next_back_value = AMmapItemValue(next_back); + assert_int_equal(next_back_value.tag, AM_VALUE_STR); + assert_string_equal(next_back_value.str, "c"); + next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); + /* Second */ + next_back = AMmapItemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_string_equal(AMmapItemKey(next_back), "2"); + next_back_value = AMmapItemValue(next_back); + assert_int_equal(next_back_value.tag, AM_VALUE_STR); + assert_string_equal(next_back_value.str, "b"); + next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); + /* First */ + next_back = AMmapItemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_string_equal(AMmapItemKey(next_back), "1"); + next_back_value = AMmapItemValue(next_back); + assert_int_equal(next_back_value.tag, AM_VALUE_STR); + assert_string_equal(next_back_value.str, "a"); + next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 1); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); +} + +static void test_map_range_at_back_and_forth_double(void** state) { + AMresultStack* stack = *state; + AMdoc* const doc1 = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMactorId const* const actor_id1= AMpush(&stack, + AMactorIdInitBytes("\0", 1), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id; + AMfree(AMsetActor(doc1, actor_id1)); + + AMfree(AMmapPutStr(doc1, AM_ROOT, "1", "a")); + AMfree(AMmapPutStr(doc1, AM_ROOT, "2", "b")); + AMfree(AMmapPutStr(doc1, AM_ROOT, "3", "c")); + + /* The second actor should win all conflicts here. */ + AMdoc* const doc2 = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMactorId const* const actor_id2= AMpush(&stack, + AMactorIdInitBytes("\1", 1), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id; + AMfree(AMsetActor(doc2, actor_id2)); + AMfree(AMmapPutStr(doc2, AM_ROOT, "1", "aa")); + AMfree(AMmapPutStr(doc2, AM_ROOT, "2", "bb")); + AMfree(AMmapPutStr(doc2, AM_ROOT, "3", "cc")); + + AMfree(AMmerge(doc1, doc2)); + AMchangeHashes const heads = AMpush(&stack, + AMgetHeads(doc1), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + + /* Forward, back, back. */ + AMmapItems range_all = AMpush(&stack, + AMmapRange(doc1, AM_ROOT, NULL, NULL, &heads), + AM_VALUE_MAP_ITEMS, + cmocka_cb).map_items; + /* First */ + AMmapItem const* next = AMmapItemsNext(&range_all, 1); + assert_non_null(next); + assert_string_equal(AMmapItemKey(next), "1"); + AMvalue next_value = AMmapItemValue(next); + assert_int_equal(next_value.tag, AM_VALUE_STR); + assert_string_equal(next_value.str, "aa"); + AMobjId const* next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 1); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 1); + /* Third */ + AMmapItems range_back_all = AMmapItemsReversed(&range_all); + range_back_all = AMmapItemsRewound(&range_back_all); + AMmapItem const* next_back = AMmapItemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_string_equal(AMmapItemKey(next_back), "3"); + AMvalue next_back_value = AMmapItemValue(next_back); + assert_int_equal(next_back_value.tag, AM_VALUE_STR); + assert_string_equal(next_back_value.str, "cc"); + AMobjId const* next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); + /* Second */ + next_back = AMmapItemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_string_equal(AMmapItemKey(next_back), "2"); + next_back_value = AMmapItemValue(next_back); + assert_int_equal(next_back_value.tag, AM_VALUE_STR); + assert_string_equal(next_back_value.str, "bb"); + next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); + + /* Forward, back, forward. */ + range_all = AMmapItemsRewound(&range_all); + range_back_all = AMmapItemsRewound(&range_back_all); + /* First */ + next = AMmapItemsNext(&range_all, 1); + assert_non_null(next); + assert_string_equal(AMmapItemKey(next), "1"); + next_value = AMmapItemValue(next); + assert_int_equal(next_value.tag, AM_VALUE_STR); + assert_string_equal(next_value.str, "aa"); + next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 1); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 1); + /* Third */ + next_back = AMmapItemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_string_equal(AMmapItemKey(next_back), "3"); + next_back_value = AMmapItemValue(next_back); + assert_int_equal(next_back_value.tag, AM_VALUE_STR); + assert_string_equal(next_back_value.str, "cc"); + next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); + /* Second */ + next = AMmapItemsNext(&range_all, 1); + assert_non_null(next); + assert_string_equal(AMmapItemKey(next), "2"); + next_value = AMmapItemValue(next); + assert_int_equal(next_value.tag, AM_VALUE_STR); + assert_string_equal(next_value.str, "bb"); + next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 2); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 1); + + /* Forward, forward, forward. */ + range_all = AMmapItemsRewound(&range_all); + /* First */ + next = AMmapItemsNext(&range_all, 1); + assert_non_null(next); + assert_string_equal(AMmapItemKey(next), "1"); + next_value = AMmapItemValue(next); + assert_int_equal(next_value.tag, AM_VALUE_STR); + assert_string_equal(next_value.str, "aa"); + next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 1); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 1); + /* Second */ + next = AMmapItemsNext(&range_all, 1); + assert_non_null(next); + assert_string_equal(AMmapItemKey(next), "2"); + next_value = AMmapItemValue(next); + assert_int_equal(next_value.tag, AM_VALUE_STR); + assert_string_equal(next_value.str, "bb"); + next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 2); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 1); + /* Third */ + next = AMmapItemsNext(&range_all, 1); + assert_non_null(next); + assert_string_equal(AMmapItemKey(next), "3"); + next_value = AMmapItemValue(next); + assert_int_equal(next_value.tag, AM_VALUE_STR); + assert_string_equal(next_value.str, "cc"); + next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 3); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 1); + + /* Back, back, back. */ + range_back_all = AMmapItemsRewound(&range_back_all); + /* Third */ + next_back = AMmapItemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_string_equal(AMmapItemKey(next_back), "3"); + next_back_value = AMmapItemValue(next_back); + assert_int_equal(next_back_value.tag, AM_VALUE_STR); + assert_string_equal(next_back_value.str, "cc"); + next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); + /* Second */ + next_back = AMmapItemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_string_equal(AMmapItemKey(next_back), "2"); + next_back_value = AMmapItemValue(next_back); + assert_int_equal(next_back_value.tag, AM_VALUE_STR); + assert_string_equal(next_back_value.str, "bb"); + next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); + /* First */ + next_back = AMmapItemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_string_equal(AMmapItemKey(next_back), "1"); + next_back_value = AMmapItemValue(next_back); + assert_int_equal(next_back_value.tag, AM_VALUE_STR); + assert_string_equal(next_back_value.str, "aa"); + next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 1); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); +} + +static void test_get_range_values(void** state) { + AMresultStack* stack = *state; + AMdoc* const doc1 = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMfree(AMmapPutStr(doc1, AM_ROOT, "aa", "aaa")); + AMfree(AMmapPutStr(doc1, AM_ROOT, "bb", "bbb")); + AMfree(AMmapPutStr(doc1, AM_ROOT, "cc", "ccc")); + AMfree(AMmapPutStr(doc1, AM_ROOT, "dd", "ddd")); + AMfree(AMcommit(doc1, NULL, NULL)); + + AMchangeHashes const v1 = AMpush(&stack, + AMgetHeads(doc1), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + AMdoc* const doc2 = AMpush(&stack, AMfork(doc1), AM_VALUE_DOC, cmocka_cb).doc; + + AMfree(AMmapPutStr(doc1, AM_ROOT, "cc", "ccc V2")); + AMfree(AMcommit(doc1, NULL, NULL)); + + AMfree(AMmapPutStr(doc2, AM_ROOT, "cc", "ccc V3")); + AMfree(AMcommit(doc2, NULL, NULL)); + + AMfree(AMmerge(doc1, doc2)); + + AMmapItems range = AMpush(&stack, + AMmapRange(doc1, AM_ROOT, "b", "d", NULL), + AM_VALUE_MAP_ITEMS, + cmocka_cb).map_items; + AMmapItems range_back = AMmapItemsReversed(&range); + assert_int_equal(AMmapItemsSize(&range), 2); + + AMmapItem const* map_item = NULL; + while ((map_item = AMmapItemsNext(&range, 1)) != NULL) { + AMvalue const val1 = AMmapItemValue(map_item); + AMresult* result = AMmapGet(doc1, AM_ROOT, AMmapItemKey(map_item), NULL); + AMvalue const val2 = AMresultValue(result); + assert_true(AMvalueEqual(&val1, &val2)); + assert_non_null(AMmapItemObjId(map_item)); + AMfree(result); + } + + assert_int_equal(AMmapItemsSize(&range_back), 2); + + while ((map_item = AMmapItemsNext(&range_back, 1)) != NULL) { + AMvalue const val1 = AMmapItemValue(map_item); + AMresult* result = AMmapGet(doc1, AM_ROOT, AMmapItemKey(map_item), NULL); + AMvalue const val2 = AMresultValue(result); + assert_true(AMvalueEqual(&val1, &val2)); + assert_non_null(AMmapItemObjId(map_item)); + AMfree(result); + } + + range = AMpush(&stack, + AMmapRange(doc1, AM_ROOT, "b", "d", &v1), + AM_VALUE_MAP_ITEMS, + cmocka_cb).map_items; + range_back = AMmapItemsReversed(&range); + assert_int_equal(AMmapItemsSize(&range), 2); + + while ((map_item = AMmapItemsNext(&range, 1)) != NULL) { + AMvalue const val1 = AMmapItemValue(map_item); + AMresult* result = AMmapGet(doc1, AM_ROOT, AMmapItemKey(map_item), &v1); + AMvalue const val2 = AMresultValue(result); + assert_true(AMvalueEqual(&val1, &val2)); + assert_non_null(AMmapItemObjId(map_item)); + AMfree(result); + } + + assert_int_equal(AMmapItemsSize(&range_back), 2); + + while ((map_item = AMmapItemsNext(&range_back, 1)) != NULL) { + AMvalue const val1 = AMmapItemValue(map_item); + AMresult* result = AMmapGet(doc1, AM_ROOT, AMmapItemKey(map_item), &v1); + AMvalue const val2 = AMresultValue(result); + assert_true(AMvalueEqual(&val1, &val2)); + assert_non_null(AMmapItemObjId(map_item)); + AMfree(result); + } + + range = AMpush(&stack, + AMmapRange(doc1, AM_ROOT, NULL, NULL, NULL), + AM_VALUE_MAP_ITEMS, + cmocka_cb).map_items; + AMobjItems values = AMpush(&stack, + AMobjValues(doc1, AM_ROOT, NULL), + AM_VALUE_OBJ_ITEMS, + cmocka_cb).obj_items; + assert_int_equal(AMmapItemsSize(&range), AMobjItemsSize(&values)); + AMobjItem const* value = NULL; + while ((map_item = AMmapItemsNext(&range, 1)) != NULL && + (value = AMobjItemsNext(&values, 1)) != NULL) { + AMvalue const val1 = AMmapItemValue(map_item); + AMvalue const val2 = AMobjItemValue(value); + assert_true(AMvalueEqual(&val1, &val2)); + assert_true(AMobjIdEqual(AMmapItemObjId(map_item), AMobjItemObjId(value))); + } + + range = AMpush(&stack, + AMmapRange(doc1, AM_ROOT, NULL, NULL, &v1), + AM_VALUE_MAP_ITEMS, + cmocka_cb).map_items; + values = AMpush(&stack, + AMobjValues(doc1, AM_ROOT, &v1), + AM_VALUE_OBJ_ITEMS, + cmocka_cb).obj_items; + assert_int_equal(AMmapItemsSize(&range), AMobjItemsSize(&values)); + while ((map_item = AMmapItemsNext(&range, 1)) != NULL && + (value = AMobjItemsNext(&values, 1)) != NULL) { + AMvalue const val1 = AMmapItemValue(map_item); + AMvalue const val2 = AMobjItemValue(value); + assert_true(AMvalueEqual(&val1, &val2)); + assert_true(AMobjIdEqual(AMmapItemObjId(map_item), AMobjItemObjId(value))); + } +} + int run_map_tests(void) { const struct CMUnitTest tests[] = { cmocka_unit_test(test_AMmapIncrement), @@ -219,6 +1136,12 @@ int run_map_tests(void) { cmocka_unit_test(test_AMmapPutStr), cmocka_unit_test(test_AMmapPut(Timestamp)), cmocka_unit_test(test_AMmapPut(Uint)), + cmocka_unit_test_setup_teardown(test_range_iter_map, setup_stack, teardown_stack), + cmocka_unit_test_setup_teardown(test_map_range_back_and_forth_single, setup_stack, teardown_stack), + cmocka_unit_test_setup_teardown(test_map_range_back_and_forth_double, setup_stack, teardown_stack), + cmocka_unit_test_setup_teardown(test_map_range_at_back_and_forth_single, setup_stack, teardown_stack), + cmocka_unit_test_setup_teardown(test_map_range_at_back_and_forth_double, setup_stack, teardown_stack), + cmocka_unit_test_setup_teardown(test_get_range_values, setup_stack, teardown_stack), }; return cmocka_run_group_tests(tests, group_setup, group_teardown); diff --git a/automerge-c/test/stack_utils.c b/automerge-c/test/stack_utils.c new file mode 100644 index 00000000..8eb8b72d --- /dev/null +++ b/automerge-c/test/stack_utils.c @@ -0,0 +1,30 @@ +#include +#include +#include + +/* third-party */ +#include + +/* local */ +#include "stack_utils.h" + +void cmocka_cb(AMresultStack** stack, uint8_t discriminant) { + assert_non_null(stack); + assert_non_null(*stack); + assert_non_null((*stack)->result); + if (AMresultStatus((*stack)->result) != AM_STATUS_OK) { + fail_msg("%s", AMerrorMessage((*stack)->result)); + } + assert_int_equal(AMresultValue((*stack)->result).tag, discriminant); +} + +int setup_stack(void** state) { + *state = NULL; + return 0; +} + +int teardown_stack(void** state) { + AMresultStack* stack = *state; + AMfreeStack(&stack); + return 0; +} diff --git a/automerge-c/test/stack_utils.h b/automerge-c/test/stack_utils.h new file mode 100644 index 00000000..dd1ff3f3 --- /dev/null +++ b/automerge-c/test/stack_utils.h @@ -0,0 +1,38 @@ +#ifndef STACK_UTILS_H +#define STACK_UTILS_H + +#include + +/* local */ +#include "automerge.h" + +/** + * \brief Reports an error through a cmocka assertion. + * + * \param[in,out] stack A pointer to a pointer to an `AMresultStack` struct. + * \param[in] discriminant An `AMvalueVariant` enum tag. + * \pre \p stack` != NULL`. + */ +void cmocka_cb(AMresultStack** stack, uint8_t discriminant); + +/** + * \brief Allocates a result stack for storing the results allocated during one + * or more test cases. + * + * \param[in,out] state A pointer to a pointer to an `AMresultStack` struct. + * \pre \p state` != NULL`. + * \warning The `AMresultStack` struct returned through \p state must be + * deallocated with `teardown_stack()` in order to prevent memory leaks. + */ +int setup_stack(void** state); + +/** + * \brief Deallocates a result stack after deallocating any results that were + * stored in it by one or more test cases. + * + * \param[in] state A pointer to a pointer to an `AMresultStack` struct. + * \pre \p state` != NULL`. + */ +int teardown_stack(void** state); + +#endif /* STACK_UTILS_H */ diff --git a/automerge-c/test/sync_tests.c b/automerge-c/test/sync_tests.c index 92076bac..58e8ff6b 100644 --- a/automerge-c/test/sync_tests.c +++ b/automerge-c/test/sync_tests.c @@ -9,39 +9,42 @@ /* local */ #include "automerge.h" +#include "stack_utils.h" typedef struct { - AMresult* doc1_result; + AMresultStack* stack; AMdoc* doc1; - AMresult* doc2_result; AMdoc* doc2; - AMresult* sync_state1_result; AMsyncState* sync_state1; - AMresult* sync_state2_result; AMsyncState* sync_state2; } TestState; static int setup(void** state) { - TestState* test_state = calloc(1, sizeof(TestState)); - test_state->doc1_result = AMcreate(); - test_state->doc1 = AMresultValue(test_state->doc1_result).doc; - test_state->doc2_result = AMcreate(); - test_state->doc2 = AMresultValue(test_state->doc2_result).doc; - test_state->sync_state1_result = AMsyncStateInit(); - test_state->sync_state1 = AMresultValue(test_state->sync_state1_result).sync_state; - test_state->sync_state2_result = AMsyncStateInit(); - test_state->sync_state2 = AMresultValue(test_state->sync_state2_result).sync_state; + TestState* test_state = test_calloc(1, sizeof(TestState)); + test_state->doc1 = AMpush(&test_state->stack, + AMcreate(), + AM_VALUE_DOC, + cmocka_cb).doc; + test_state->doc2 = AMpush(&test_state->stack, + AMcreate(), + AM_VALUE_DOC, + cmocka_cb).doc; + test_state->sync_state1 = AMpush(&test_state->stack, + AMsyncStateInit(), + AM_VALUE_SYNC_STATE, + cmocka_cb).sync_state; + test_state->sync_state2 = AMpush(&test_state->stack, + AMsyncStateInit(), + AM_VALUE_SYNC_STATE, + cmocka_cb).sync_state; *state = test_state; return 0; } static int teardown(void** state) { TestState* test_state = *state; - AMfree(test_state->doc1_result); - AMfree(test_state->doc2_result); - AMfree(test_state->sync_state1_result); - AMfree(test_state->sync_state2_result); - free(test_state); + AMfreeStack(&test_state->stack); + test_free(test_state); return 0; } @@ -88,16 +91,12 @@ static void sync(AMdoc* a, */ static void test_converged_empty_local_doc_reply_no_local_data(void **state) { TestState* test_state = *state; - AMresult* sync_message_result = AMgenerateSyncMessage( - test_state->doc1, test_state->sync_state1 - ); - if (AMresultStatus(sync_message_result) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(sync_message_result)); - } - assert_int_equal(AMresultSize(sync_message_result), 1); - AMvalue value = AMresultValue(sync_message_result); - assert_int_equal(value.tag, AM_VALUE_SYNC_MESSAGE); - AMsyncMessage const* sync_message = value.sync_message; + AMsyncMessage const* const sync_message = AMpush(&test_state->stack, + AMgenerateSyncMessage( + test_state->doc1, + test_state->sync_state1), + AM_VALUE_SYNC_MESSAGE, + cmocka_cb).sync_message; AMchangeHashes heads = AMsyncMessageHeads(sync_message); assert_int_equal(AMchangeHashesSize(&heads), 0); AMchangeHashes needs = AMsyncMessageNeeds(sync_message); @@ -109,7 +108,6 @@ static void test_converged_empty_local_doc_reply_no_local_data(void **state) { assert_int_equal(AMchangeHashesSize(&last_sync), 0); AMchanges changes = AMsyncMessageChanges(sync_message); assert_int_equal(AMchangesSize(&changes), 0); - AMfree(sync_message_result); } /** @@ -118,37 +116,19 @@ static void test_converged_empty_local_doc_reply_no_local_data(void **state) { */ static void test_converged_empty_local_doc_no_reply(void **state) { TestState* test_state = *state; - AMresult* sync_message1_result = AMgenerateSyncMessage( - test_state->doc1, test_state->sync_state1 - ); - if (AMresultStatus(sync_message1_result) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(sync_message1_result)); - } - assert_int_equal(AMresultSize(sync_message1_result), 1); - AMvalue value = AMresultValue(sync_message1_result); - assert_int_equal(value.tag, AM_VALUE_SYNC_MESSAGE); - AMsyncMessage const* sync_message1 = value.sync_message; - AMresult* result = AMreceiveSyncMessage( - test_state->doc2, test_state->sync_state2, sync_message1 - ); - if (AMresultStatus(result) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(result)); - } - assert_int_equal(AMresultSize(result), 0); - value = AMresultValue(result); - assert_int_equal(value.tag, AM_VALUE_VOID); - AMfree(result); - AMresult* sync_message2_result = AMgenerateSyncMessage( - test_state->doc2, test_state->sync_state2 - ); - if (AMresultStatus(sync_message2_result) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(sync_message2_result)); - } - assert_int_equal(AMresultSize(sync_message2_result), 0); - value = AMresultValue(sync_message2_result); - assert_int_equal(value.tag, AM_VALUE_VOID); - AMfree(sync_message2_result); - AMfree(sync_message1_result); + AMsyncMessage const* const sync_message1 = AMpush(&test_state->stack, + AMgenerateSyncMessage( + test_state->doc1, + test_state->sync_state1), + AM_VALUE_SYNC_MESSAGE, + cmocka_cb).sync_message; + AMfree(AMreceiveSyncMessage(test_state->doc2, + test_state->sync_state2, + sync_message1)); + AMpush(&test_state->stack, + AMgenerateSyncMessage(test_state->doc2, test_state->sync_state2), + AM_VALUE_VOID, + cmocka_cb); } /** @@ -164,34 +144,37 @@ static void test_converged_equal_heads_no_reply(void **state) { AMfree(AMlistPutUint(test_state->doc1, AM_ROOT, index, true, index)); AMcommit(test_state->doc1, NULL, &time); } - AMresult* changes_result = AMgetChanges(test_state->doc1, NULL); - AMvalue value = AMresultValue(changes_result); - AMfree(AMapplyChanges(test_state->doc2, &value.changes)); - AMfree(changes_result); + AMchanges const changes = AMpush(&test_state->stack, + AMgetChanges(test_state->doc1, NULL), + AM_VALUE_CHANGES, + cmocka_cb).changes; + AMfree(AMapplyChanges(test_state->doc2, &changes)); assert_true(AMequal(test_state->doc1, test_state->doc2)); /* Generate a naive sync message. */ - AMresult* sync_message1_result = AMgenerateSyncMessage( - test_state->doc1, + AMsyncMessage const* sync_message1 = AMpush(&test_state->stack, + AMgenerateSyncMessage( + test_state->doc1, + test_state->sync_state1), + AM_VALUE_SYNC_MESSAGE, + cmocka_cb).sync_message; + AMchangeHashes const last_sent_heads = AMsyncStateLastSentHeads( test_state->sync_state1 ); - AMsyncMessage const* sync_message1 = AMresultValue(sync_message1_result).sync_message; - AMchangeHashes last_sent_heads = AMsyncStateLastSentHeads(test_state->sync_state1); - AMresult* heads_result = AMgetHeads(test_state->doc1); - AMchangeHashes heads = AMresultValue(heads_result).change_hashes; + AMchangeHashes const heads = AMpush(&test_state->stack, + AMgetHeads(test_state->doc1), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; assert_int_equal(AMchangeHashesCmp(&last_sent_heads, &heads), 0); - AMfree(heads_result); /* Heads are equal so this message should be void. */ - AMfree(AMreceiveSyncMessage( - test_state->doc2, test_state->sync_state2, sync_message1 - )); - AMfree(sync_message1_result); - AMresult* sync_message2_result = AMgenerateSyncMessage( - test_state->doc2, test_state->sync_state2 - ); - assert_int_equal(AMresultValue(sync_message2_result).tag, AM_VALUE_VOID); - AMfree(sync_message2_result); + AMfree(AMreceiveSyncMessage(test_state->doc2, + test_state->sync_state2, + sync_message1)); + AMpush(&test_state->stack, + AMgenerateSyncMessage(test_state->doc2, test_state->sync_state2), + AM_VALUE_VOID, + cmocka_cb); } /** @@ -278,12 +261,14 @@ static void test_converged_works_with_prior_sync_state(void **state) { static void test_converged_no_message_once_synced(void **state) { /* Create & synchronize two nodes. */ TestState* test_state = *state; - AMresult* actor_id_result = AMactorIdInitStr("abc123"); - AMfree(AMsetActor(test_state->doc1, AMresultValue(actor_id_result).actor_id)); - AMfree(actor_id_result); - actor_id_result = AMactorIdInitStr("def456"); - AMfree(AMsetActor(test_state->doc2, AMresultValue(actor_id_result).actor_id)); - AMfree(actor_id_result); + AMfree(AMsetActor(test_state->doc1, AMpush(&test_state->stack, + AMactorIdInitStr("abc123"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + AMfree(AMsetActor(test_state->doc2, AMpush(&test_state->stack, + AMactorIdInitStr("def456"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); time_t const time = 0; for (size_t value = 0; value != 5; ++value) { @@ -294,59 +279,64 @@ static void test_converged_no_message_once_synced(void **state) { } /* The first node reports what it has. */ - AMresult* message_result = AMgenerateSyncMessage(test_state->doc1, - test_state->sync_state1); - AMsyncMessage const* message = AMresultValue(message_result).sync_message; + AMsyncMessage const* message = AMpush(&test_state->stack, + AMgenerateSyncMessage( + test_state->doc1, + test_state->sync_state1), + AM_VALUE_SYNC_MESSAGE, + cmocka_cb).sync_message; /* The second node receives that message and sends changes along with what * it has. */ AMfree(AMreceiveSyncMessage(test_state->doc2, - test_state->sync_state2, - message)); - AMfree(message_result); - message_result = AMgenerateSyncMessage(test_state->doc2, - test_state->sync_state2); - message = AMresultValue(message_result).sync_message; + test_state->sync_state2, + message)); + message = AMpush(&test_state->stack, + AMgenerateSyncMessage(test_state->doc2, + test_state->sync_state2), + AM_VALUE_SYNC_MESSAGE, + cmocka_cb).sync_message; AMchanges message_changes = AMsyncMessageChanges(message); assert_int_equal(AMchangesSize(&message_changes), 5); /* The first node receives the changes and replies with the changes it now * knows that the second node needs. */ AMfree(AMreceiveSyncMessage(test_state->doc1, - test_state->sync_state1, - message)); - AMfree(message_result); - message_result = AMgenerateSyncMessage(test_state->doc1, - test_state->sync_state1); - message = AMresultValue(message_result).sync_message; + test_state->sync_state1, + message)); + message = AMpush(&test_state->stack, + AMgenerateSyncMessage(test_state->doc1, + test_state->sync_state1), + AM_VALUE_SYNC_MESSAGE, + cmocka_cb).sync_message; message_changes = AMsyncMessageChanges(message); assert_int_equal(AMchangesSize(&message_changes), 5); /* The second node applies the changes and sends confirmation ending the * exchange. */ AMfree(AMreceiveSyncMessage(test_state->doc2, - test_state->sync_state2, - message)); - AMfree(message_result); - message_result = AMgenerateSyncMessage(test_state->doc2, - test_state->sync_state2); - message = AMresultValue(message_result).sync_message; + test_state->sync_state2, + message)); + message = AMpush(&test_state->stack, + AMgenerateSyncMessage(test_state->doc2, + test_state->sync_state2), + AM_VALUE_SYNC_MESSAGE, + cmocka_cb).sync_message; /* The first node receives the message and has nothing more to say. */ AMfree(AMreceiveSyncMessage(test_state->doc1, - test_state->sync_state1, - message)); - AMfree(message_result); - message_result = AMgenerateSyncMessage(test_state->doc1, - test_state->sync_state1); - assert_int_equal(AMresultValue(message_result).tag, AM_VALUE_VOID); - AMfree(message_result); + test_state->sync_state1, + message)); + AMpush(&test_state->stack, + AMgenerateSyncMessage(test_state->doc1, test_state->sync_state1), + AM_VALUE_VOID, + cmocka_cb); /* The second node also has nothing left to say. */ - message_result = AMgenerateSyncMessage(test_state->doc2, - test_state->sync_state2); - assert_int_equal(AMresultValue(message_result).tag, AM_VALUE_VOID); - AMfree(message_result); + AMpush(&test_state->stack, + AMgenerateSyncMessage(test_state->doc2, test_state->sync_state2), + AM_VALUE_VOID, + cmocka_cb); } /** @@ -356,12 +346,14 @@ static void test_converged_no_message_once_synced(void **state) { static void test_converged_allow_simultaneous_messages(void **state) { /* Create & synchronize two nodes. */ TestState* test_state = *state; - AMresult* actor_id_result = AMactorIdInitStr("abc123"); - AMfree(AMsetActor(test_state->doc1, AMresultValue(actor_id_result).actor_id)); - AMfree(actor_id_result); - actor_id_result = AMactorIdInitStr("def456"); - AMfree(AMsetActor(test_state->doc2, AMresultValue(actor_id_result).actor_id)); - AMfree(actor_id_result); + AMfree(AMsetActor(test_state->doc1, AMpush(&test_state->stack, + AMactorIdInitStr("abc123"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + AMfree(AMsetActor(test_state->doc2, AMpush(&test_state->stack, + AMactorIdInitStr("def456"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); time_t const time = 0; for (size_t value = 0; value != 5; ++value) { @@ -370,20 +362,30 @@ static void test_converged_allow_simultaneous_messages(void **state) { AMfree(AMmapPutUint(test_state->doc2, AM_ROOT, "y", value)); AMcommit(test_state->doc2, NULL, &time); } - AMresult* heads1_result = AMgetHeads(test_state->doc1); - AMchangeHashes heads1 = AMresultValue(heads1_result).change_hashes; + AMchangeHashes heads1 = AMpush(&test_state->stack, + AMgetHeads(test_state->doc1), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; AMbyteSpan head1 = AMchangeHashesNext(&heads1, 1); - AMresult* heads2_result = AMgetHeads(test_state->doc2); - AMchangeHashes heads2 = AMresultValue(heads2_result).change_hashes; + AMchangeHashes heads2 = AMpush(&test_state->stack, + AMgetHeads(test_state->doc2), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; AMbyteSpan head2 = AMchangeHashesNext(&heads2, 1); /* Both sides report what they have but have no shared peer state. */ - AMresult* msg1to2_result = AMgenerateSyncMessage(test_state->doc1, - test_state->sync_state1); - AMsyncMessage const* msg1to2 = AMresultValue(msg1to2_result).sync_message; - AMresult* msg2to1_result = AMgenerateSyncMessage(test_state->doc2, - test_state->sync_state2); - AMsyncMessage const* msg2to1 = AMresultValue(msg2to1_result).sync_message; + AMsyncMessage const* msg1to2 = AMpush(&test_state->stack, + AMgenerateSyncMessage( + test_state->doc1, + test_state->sync_state1), + AM_VALUE_SYNC_MESSAGE, + cmocka_cb).sync_message; + AMsyncMessage const* msg2to1 = AMpush(&test_state->stack, + AMgenerateSyncMessage( + test_state->doc2, + test_state->sync_state2), + AM_VALUE_SYNC_MESSAGE, + cmocka_cb).sync_message; AMchanges msg1to2_changes = AMsyncMessageChanges(msg1to2); assert_int_equal(AMchangesSize(&msg1to2_changes), 0); AMsyncHaves msg1to2_haves = AMsyncMessageHaves(msg1to2); @@ -400,99 +402,110 @@ static void test_converged_allow_simultaneous_messages(void **state) { /* Both nodes receive messages from each other and update their * synchronization states. */ AMfree(AMreceiveSyncMessage(test_state->doc1, - test_state->sync_state1, - msg2to1)); - AMfree(msg2to1_result); + test_state->sync_state1, + msg2to1)); AMfree(AMreceiveSyncMessage(test_state->doc2, - test_state->sync_state2, - msg1to2)); - AMfree(msg1to2_result); + test_state->sync_state2, + msg1to2)); /* Now both reply with their local changes that the other lacks * (standard warning that 1% of the time this will result in a "needs" * message). */ - msg1to2_result = AMgenerateSyncMessage(test_state->doc1, - test_state->sync_state1); - msg1to2 = AMresultValue(msg1to2_result).sync_message; + msg1to2 = AMpush(&test_state->stack, + AMgenerateSyncMessage(test_state->doc1, + test_state->sync_state1), + AM_VALUE_SYNC_MESSAGE, + cmocka_cb).sync_message; msg1to2_changes = AMsyncMessageChanges(msg1to2); assert_int_equal(AMchangesSize(&msg1to2_changes), 5); - msg2to1_result = AMgenerateSyncMessage(test_state->doc2, - test_state->sync_state2); - msg2to1 = AMresultValue(msg2to1_result).sync_message; + msg2to1 = AMpush(&test_state->stack, + AMgenerateSyncMessage(test_state->doc2, + test_state->sync_state2), + AM_VALUE_SYNC_MESSAGE, + cmocka_cb).sync_message; msg2to1_changes = AMsyncMessageChanges(msg2to1); assert_int_equal(AMchangesSize(&msg2to1_changes), 5); /* Both should now apply the changes. */ AMfree(AMreceiveSyncMessage(test_state->doc1, - test_state->sync_state1, - msg2to1)); - AMfree(msg2to1_result); - AMresult* missing_deps_result = AMgetMissingDeps(test_state->doc1, NULL); - AMchangeHashes missing_deps = AMresultValue(missing_deps_result).change_hashes; + test_state->sync_state1, + msg2to1)); + AMchangeHashes missing_deps = AMpush(&test_state->stack, + AMgetMissingDeps(test_state->doc1, + NULL), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; assert_int_equal(AMchangeHashesSize(&missing_deps), 0); - AMfree(missing_deps_result); - AMresult* map_value_result = AMmapGet(test_state->doc1, AM_ROOT, "x"); - assert_int_equal(AMresultValue(map_value_result).uint, 4); - AMfree(map_value_result); - map_value_result = AMmapGet(test_state->doc1, AM_ROOT, "y"); - assert_int_equal(AMresultValue(map_value_result).uint, 4); - AMfree(map_value_result); + assert_int_equal(AMpush(&test_state->stack, + AMmapGet(test_state->doc1, AM_ROOT, "x", NULL), + AM_VALUE_UINT, + cmocka_cb).uint, 4); + assert_int_equal(AMpush(&test_state->stack, + AMmapGet(test_state->doc1, AM_ROOT, "y", NULL), + AM_VALUE_UINT, + cmocka_cb).uint, 4); AMfree(AMreceiveSyncMessage(test_state->doc2, - test_state->sync_state2, - msg1to2)); - AMfree(msg1to2_result); - missing_deps_result = AMgetMissingDeps(test_state->doc2, NULL); - missing_deps = AMresultValue(missing_deps_result).change_hashes; + test_state->sync_state2, + msg1to2)); + missing_deps = AMpush(&test_state->stack, + AMgetMissingDeps(test_state->doc2, NULL), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; assert_int_equal(AMchangeHashesSize(&missing_deps), 0); - AMfree(missing_deps_result); - map_value_result = AMmapGet(test_state->doc2, AM_ROOT, "x"); - assert_int_equal(AMresultValue(map_value_result).uint, 4); - AMfree(map_value_result); - map_value_result = AMmapGet(test_state->doc2, AM_ROOT, "y"); - assert_int_equal(AMresultValue(map_value_result).uint, 4); - AMfree(map_value_result); + assert_int_equal(AMpush(&test_state->stack, + AMmapGet(test_state->doc2, AM_ROOT, "x", NULL), + AM_VALUE_UINT, + cmocka_cb).uint, 4); + assert_int_equal(AMpush(&test_state->stack, + AMmapGet(test_state->doc2, AM_ROOT, "y", NULL), + AM_VALUE_UINT, + cmocka_cb).uint, 4); /* The response acknowledges that the changes were received and sends no * further changes. */ - msg1to2_result = AMgenerateSyncMessage(test_state->doc1, - test_state->sync_state1); - msg1to2 = AMresultValue(msg1to2_result).sync_message; + msg1to2 = AMpush(&test_state->stack, + AMgenerateSyncMessage(test_state->doc1, + test_state->sync_state1), + AM_VALUE_SYNC_MESSAGE, + cmocka_cb).sync_message; msg1to2_changes = AMsyncMessageChanges(msg1to2); assert_int_equal(AMchangesSize(&msg1to2_changes), 0); - msg2to1_result = AMgenerateSyncMessage(test_state->doc2, - test_state->sync_state2); - msg2to1 = AMresultValue(msg2to1_result).sync_message; + msg2to1 = AMpush(&test_state->stack, + AMgenerateSyncMessage(test_state->doc2, + test_state->sync_state2), + AM_VALUE_SYNC_MESSAGE, + cmocka_cb).sync_message; msg2to1_changes = AMsyncMessageChanges(msg2to1); assert_int_equal(AMchangesSize(&msg2to1_changes), 0); /* After receiving acknowledgements their shared heads should be equal. */ AMfree(AMreceiveSyncMessage(test_state->doc1, - test_state->sync_state1, - msg2to1)); - AMfree(msg2to1_result); + test_state->sync_state1, + msg2to1)); AMfree(AMreceiveSyncMessage(test_state->doc2, - test_state->sync_state2, - msg1to2)); - AMfree(msg1to2_result); + test_state->sync_state2, + msg1to2)); /* They're synchronized so no more messages are required. */ - msg1to2_result = AMgenerateSyncMessage(test_state->doc1, - test_state->sync_state1); - assert_int_equal(AMresultValue(msg1to2_result).tag, AM_VALUE_VOID); - AMfree(msg1to2_result); - msg2to1_result = AMgenerateSyncMessage(test_state->doc2, - test_state->sync_state2); - assert_int_equal(AMresultValue(msg2to1_result).tag, AM_VALUE_VOID); - AMfree(msg2to1_result); + AMpush(&test_state->stack, + AMgenerateSyncMessage(test_state->doc1, test_state->sync_state1), + AM_VALUE_VOID, + cmocka_cb); + AMpush(&test_state->stack, + AMgenerateSyncMessage(test_state->doc2, test_state->sync_state2), + AM_VALUE_VOID, + cmocka_cb); /* If we make one more change and start synchronizing then its "last * sync" property should be updated. */ AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", 5)); AMcommit(test_state->doc1, NULL, &time); - msg1to2_result = AMgenerateSyncMessage(test_state->doc1, - test_state->sync_state1); - msg1to2 = AMresultValue(msg1to2_result).sync_message; + msg1to2 = AMpush(&test_state->stack, + AMgenerateSyncMessage(test_state->doc1, + test_state->sync_state1), + AM_VALUE_SYNC_MESSAGE, + cmocka_cb).sync_message; msg1to2_haves = AMsyncMessageHaves(msg1to2); msg1to2_have = AMsyncHavesNext(&msg1to2_haves, 1); msg1to2_last_sync = AMsyncHaveLastSync(msg1to2_have); @@ -502,9 +515,6 @@ static void test_converged_allow_simultaneous_messages(void **state) { msg1to2_last_sync_next = AMchangeHashesNext(&msg1to2_last_sync, 1); assert_int_equal(msg1to2_last_sync_next.count, head2.count); assert_memory_equal(msg1to2_last_sync_next.src, head2.src, head2.count); - AMfree(heads1_result); - AMfree(heads2_result); - AMfree(msg1to2_result); } /** @@ -513,18 +523,22 @@ static void test_converged_allow_simultaneous_messages(void **state) { */ static void test_converged_assume_sent_changes_were_received(void **state) { TestState* test_state = *state; - AMresult* actor_id_result = AMactorIdInitStr("01234567"); - AMfree(AMsetActor(test_state->doc1, AMresultValue(actor_id_result).actor_id)); - AMfree(actor_id_result); - actor_id_result = AMactorIdInitStr("89abcdef"); - AMfree(AMsetActor(test_state->doc2, AMresultValue(actor_id_result).actor_id)); - AMfree(actor_id_result); + AMfree(AMsetActor(test_state->doc1, AMpush(&test_state->stack, + AMactorIdInitStr("01234567"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + AMfree(AMsetActor(test_state->doc2, AMpush(&test_state->stack, + AMactorIdInitStr("89abcdef"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); - AMresult* items_result = AMmapPutObject(test_state->doc1, - AM_ROOT, - "items", - AM_OBJ_TYPE_LIST); - AMobjId const* items = AMresultValue(items_result).obj_id; + AMobjId const* items = AMpush(&test_state->stack, + AMmapPutObject(test_state->doc1, + AM_ROOT, + "items", + AM_OBJ_TYPE_LIST), + AM_VALUE_OBJ_ID, + cmocka_cb).obj_id; time_t const time = 0; AMcommit(test_state->doc1, NULL, &time); sync(test_state->doc1, @@ -534,32 +548,34 @@ static void test_converged_assume_sent_changes_were_received(void **state) { AMfree(AMlistPutStr(test_state->doc1, items, 0, true, "x")); AMcommit(test_state->doc1, NULL, &time); - AMresult* message_result = AMgenerateSyncMessage(test_state->doc1, - test_state->sync_state1); - AMsyncMessage const* message = AMresultValue(message_result).sync_message; + AMsyncMessage const* message = AMpush(&test_state->stack, + AMgenerateSyncMessage( + test_state->doc1, + test_state->sync_state1), + AM_VALUE_SYNC_MESSAGE, + cmocka_cb).sync_message; AMchanges message_changes = AMsyncMessageChanges(message); assert_int_equal(AMchangesSize(&message_changes), 1); - AMfree(message_result); AMfree(AMlistPutStr(test_state->doc1, items, 1, true, "y")); AMcommit(test_state->doc1, NULL, &time); - message_result = AMgenerateSyncMessage(test_state->doc1, - test_state->sync_state1); - message = AMresultValue(message_result).sync_message; + message = AMpush(&test_state->stack, + AMgenerateSyncMessage(test_state->doc1, + test_state->sync_state1), + AM_VALUE_SYNC_MESSAGE, + cmocka_cb).sync_message; message_changes = AMsyncMessageChanges(message); assert_int_equal(AMchangesSize(&message_changes), 1); - AMfree(message_result); AMfree(AMlistPutStr(test_state->doc1, items, 2, true, "z")); AMcommit(test_state->doc1, NULL, &time); - message_result = AMgenerateSyncMessage(test_state->doc1, - test_state->sync_state1); - message = AMresultValue(message_result).sync_message; + message = AMpush(&test_state->stack, + AMgenerateSyncMessage(test_state->doc1, + test_state->sync_state1), + AM_VALUE_SYNC_MESSAGE, + cmocka_cb).sync_message; message_changes = AMsyncMessageChanges(message); assert_int_equal(AMchangesSize(&message_changes), 1); - AMfree(message_result); - - AMfree(items_result); } /** @@ -607,12 +623,14 @@ static void test_diverged_works_without_prior_sync_state(void **state) { /* Create two peers both with divergent commits. */ TestState* test_state = *state; - AMresult* actor_id_result = AMactorIdInitStr("01234567"); - AMfree(AMsetActor(test_state->doc1, AMresultValue(actor_id_result).actor_id)); - AMfree(actor_id_result); - actor_id_result = AMactorIdInitStr("89abcdef"); - AMfree(AMsetActor(test_state->doc2, AMresultValue(actor_id_result).actor_id)); - AMfree(actor_id_result); + AMfree(AMsetActor(test_state->doc1, AMpush(&test_state->stack, + AMactorIdInitStr("01234567"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + AMfree(AMsetActor(test_state->doc2, AMpush(&test_state->stack, + AMactorIdInitStr("89abcdef"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); time_t const time = 0; for (size_t value = 0; value != 10; ++value) { AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); @@ -638,13 +656,15 @@ static void test_diverged_works_without_prior_sync_state(void **state) { test_state->doc2, test_state->sync_state1, test_state->sync_state2); - AMresult* heads1_result = AMgetHeads(test_state->doc1); - AMchangeHashes heads1 = AMresultValue(heads1_result).change_hashes; - AMresult* heads2_result = AMgetHeads(test_state->doc2); - AMchangeHashes heads2 = AMresultValue(heads2_result).change_hashes; + AMchangeHashes heads1 = AMpush(&test_state->stack, + AMgetHeads(test_state->doc1), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + AMchangeHashes heads2 = AMpush(&test_state->stack, + AMgetHeads(test_state->doc2), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); - AMfree(heads2_result); - AMfree(heads1_result); assert_true(AMequal(test_state->doc1, test_state->doc2)); } @@ -661,12 +681,14 @@ static void test_diverged_works_with_prior_sync_state(void **state) { /* Create two peers both with divergent commits. */ TestState* test_state = *state; - AMresult* actor_id_result = AMactorIdInitStr("01234567"); - AMfree(AMsetActor(test_state->doc1, AMresultValue(actor_id_result).actor_id)); - AMfree(actor_id_result); - actor_id_result = AMactorIdInitStr("89abcdef"); - AMfree(AMsetActor(test_state->doc2, AMresultValue(actor_id_result).actor_id)); - AMfree(actor_id_result); + AMfree(AMsetActor(test_state->doc1, AMpush(&test_state->stack, + AMactorIdInitStr("01234567"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + AMfree(AMsetActor(test_state->doc2, AMpush(&test_state->stack, + AMactorIdInitStr("89abcdef"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); time_t const time = 0; for (size_t value = 0; value != 10; ++value) { AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); @@ -685,28 +707,36 @@ static void test_diverged_works_with_prior_sync_state(void **state) { AMfree(AMmapPutUint(test_state->doc2, AM_ROOT, "x", value)); AMcommit(test_state->doc2, NULL, &time); } - AMresult* encoded_result = AMsyncStateEncode(test_state->sync_state1); - AMbyteSpan encoded = AMresultValue(encoded_result).bytes; - AMresult* sync_state1_result = AMsyncStateDecode(encoded.src, encoded.count); - AMfree(encoded_result); - AMsyncState* sync_state1 = AMresultValue(sync_state1_result).sync_state; - encoded_result = AMsyncStateEncode(test_state->sync_state2); - encoded = AMresultValue(encoded_result).bytes; - AMresult* sync_state2_result = AMsyncStateDecode(encoded.src, encoded.count); - AMfree(encoded_result); - AMsyncState* sync_state2 = AMresultValue(sync_state2_result).sync_state; + AMbyteSpan encoded = AMpush(&test_state->stack, + AMsyncStateEncode(test_state->sync_state1), + AM_VALUE_BYTES, + cmocka_cb).bytes; + AMsyncState* sync_state1 = AMpush(&test_state->stack, + AMsyncStateDecode(encoded.src, + encoded.count), + AM_VALUE_SYNC_STATE, + cmocka_cb).sync_state; + encoded = AMpush(&test_state->stack, + AMsyncStateEncode(test_state->sync_state2), + AM_VALUE_BYTES, + cmocka_cb).bytes; + AMsyncState* sync_state2 = AMpush(&test_state->stack, + AMsyncStateDecode(encoded.src, + encoded.count), + AM_VALUE_SYNC_STATE, + cmocka_cb).sync_state; assert_false(AMequal(test_state->doc1, test_state->doc2)); sync(test_state->doc1, test_state->doc2, sync_state1, sync_state2); - AMfree(sync_state2_result); - AMfree(sync_state1_result); - AMresult* heads1_result = AMgetHeads(test_state->doc1); - AMchangeHashes heads1 = AMresultValue(heads1_result).change_hashes; - AMresult* heads2_result = AMgetHeads(test_state->doc2); - AMchangeHashes heads2 = AMresultValue(heads2_result).change_hashes; + AMchangeHashes heads1 = AMpush(&test_state->stack, + AMgetHeads(test_state->doc1), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + AMchangeHashes heads2 = AMpush(&test_state->stack, + AMgetHeads(test_state->doc2), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); - AMfree(heads2_result); - AMfree(heads1_result); assert_true(AMequal(test_state->doc1, test_state->doc2)); } @@ -716,12 +746,14 @@ static void test_diverged_works_with_prior_sync_state(void **state) { */ static void test_diverged_ensure_not_empty_after_sync(void **state) { TestState* test_state = *state; - AMresult* actor_id_result = AMactorIdInitStr("01234567"); - AMfree(AMsetActor(test_state->doc1, AMresultValue(actor_id_result).actor_id)); - AMfree(actor_id_result); - actor_id_result = AMactorIdInitStr("89abcdef"); - AMfree(AMsetActor(test_state->doc2, AMresultValue(actor_id_result).actor_id)); - AMfree(actor_id_result); + AMfree(AMsetActor(test_state->doc1, AMpush(&test_state->stack, + AMactorIdInitStr("01234567"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + AMfree(AMsetActor(test_state->doc2, AMpush(&test_state->stack, + AMactorIdInitStr("89abcdef"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); time_t const time = 0; for (size_t value = 0; value != 3; ++value) { @@ -733,13 +765,14 @@ static void test_diverged_ensure_not_empty_after_sync(void **state) { test_state->sync_state1, test_state->sync_state2); - AMresult* heads1_result = AMgetHeads(test_state->doc1); - AMchangeHashes heads1 = AMresultValue(heads1_result).change_hashes; + AMchangeHashes heads1 = AMpush(&test_state->stack, + AMgetHeads(test_state->doc1), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; AMchangeHashes shared_heads1 = AMsyncStateSharedHeads(test_state->sync_state1); assert_int_equal(AMchangeHashesCmp(&shared_heads1, &heads1), 0); AMchangeHashes shared_heads2 = AMsyncStateSharedHeads(test_state->sync_state2); assert_int_equal(AMchangeHashesCmp(&shared_heads2, &heads1), 0); - AMfree(heads1_result); } /** @@ -755,12 +788,14 @@ static void test_diverged_resync_after_node_crash_with_data_loss(void **state) { * We want to successfully sync (n1) with (r), even though (n1) believes * it's talking to (n2). */ TestState* test_state = *state; - AMresult* actor_id_result = AMactorIdInitStr("01234567"); - AMfree(AMsetActor(test_state->doc1, AMresultValue(actor_id_result).actor_id)); - AMfree(actor_id_result); - actor_id_result = AMactorIdInitStr("89abcdef"); - AMfree(AMsetActor(test_state->doc2, AMresultValue(actor_id_result).actor_id)); - AMfree(actor_id_result); + AMfree(AMsetActor(test_state->doc1, AMpush(&test_state->stack, + AMactorIdInitStr("01234567"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + AMfree(AMsetActor(test_state->doc2, AMpush(&test_state->stack, + AMactorIdInitStr("89abcdef"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); /* n1 makes three changes which we synchronize to n2. */ time_t const time = 0; @@ -774,13 +809,19 @@ static void test_diverged_resync_after_node_crash_with_data_loss(void **state) { test_state->sync_state2); /* Save a copy of n2 as "r" to simulate recovering from a crash. */ - AMresult* r_result = AMdup(test_state->doc2); - AMdoc* r = AMresultValue(r_result).doc; - AMresult* encoded_result = AMsyncStateEncode(test_state->sync_state2); - AMbyteSpan encoded = AMresultValue(encoded_result).bytes; - AMresult* sync_state_resultr = AMsyncStateDecode(encoded.src, encoded.count); - AMfree(encoded_result); - AMsyncState* sync_stater = AMresultValue(sync_state_resultr).sync_state; + AMdoc* r = AMpush(&test_state->stack, + AMdup(test_state->doc2), + AM_VALUE_DOC, + cmocka_cb).doc; + AMbyteSpan encoded = AMpush(&test_state->stack, + AMsyncStateEncode(test_state->sync_state2), + AM_VALUE_BYTES, + cmocka_cb).bytes; + AMsyncState* sync_stater = AMpush(&test_state->stack, + AMsyncStateDecode(encoded.src, + encoded.count), + AM_VALUE_SYNC_STATE, + cmocka_cb).sync_state; /* Synchronize another few commits. */ for (size_t value = 3; value != 6; ++value) { AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); @@ -791,13 +832,15 @@ static void test_diverged_resync_after_node_crash_with_data_loss(void **state) { test_state->sync_state1, test_state->sync_state2); /* Everyone should be on the same page here. */ - AMresult* heads1_result = AMgetHeads(test_state->doc1); - AMchangeHashes heads1 = AMresultValue(heads1_result).change_hashes; - AMresult* heads2_result = AMgetHeads(test_state->doc2); - AMchangeHashes heads2 = AMresultValue(heads2_result).change_hashes; + AMchangeHashes heads1 = AMpush(&test_state->stack, + AMgetHeads(test_state->doc1), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + AMchangeHashes heads2 = AMpush(&test_state->stack, + AMgetHeads(test_state->doc2), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); - AMfree(heads2_result); - AMfree(heads1_result); assert_true(AMequal(test_state->doc1, test_state->doc2)); /* Now make a few more changes and then attempt to synchronize the @@ -806,34 +849,38 @@ static void test_diverged_resync_after_node_crash_with_data_loss(void **state) { AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); AMcommit(test_state->doc1, NULL, &time); } - heads1_result = AMgetHeads(test_state->doc1); - heads1 = AMresultValue(heads1_result).change_hashes; - AMresult* heads_resultr = AMgetHeads(r); - AMchangeHashes headsr = AMresultValue(heads_resultr).change_hashes; + heads1 = AMpush(&test_state->stack, + AMgetHeads(test_state->doc1), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + AMchangeHashes headsr = AMpush(&test_state->stack, + AMgetHeads(r), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; assert_int_not_equal(AMchangeHashesCmp(&heads1, &headsr), 0); - AMfree(heads_resultr); - AMfree(heads1_result); assert_false(AMequal(test_state->doc1, r)); - AMresult* map_value_result = AMmapGet(test_state->doc1, AM_ROOT, "x"); - assert_int_equal(AMresultValue(map_value_result).uint, 8); - AMfree(map_value_result); - map_value_result = AMmapGet(r, AM_ROOT, "x"); - assert_int_equal(AMresultValue(map_value_result).uint, 2); - AMfree(map_value_result); + assert_int_equal(AMpush(&test_state->stack, + AMmapGet(test_state->doc1, AM_ROOT, "x", NULL), + AM_VALUE_UINT, + cmocka_cb).uint, 8); + assert_int_equal(AMpush(&test_state->stack, + AMmapGet(r, AM_ROOT, "x", NULL), + AM_VALUE_UINT, + cmocka_cb).uint, 2); sync(test_state->doc1, r, test_state->sync_state1, sync_stater); - AMfree(sync_state_resultr); - heads1_result = AMgetHeads(test_state->doc1); - heads1 = AMresultValue(heads1_result).change_hashes; - heads_resultr = AMgetHeads(r); - headsr = AMresultValue(heads_resultr).change_hashes; + heads1 = AMpush(&test_state->stack, + AMgetHeads(test_state->doc1), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + headsr = AMpush(&test_state->stack, + AMgetHeads(r), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; assert_int_equal(AMchangeHashesCmp(&heads1, &headsr), 0); - AMfree(heads_resultr); - AMfree(heads1_result); assert_true(AMequal(test_state->doc1, r)); - AMfree(r_result); } /** @@ -842,12 +889,14 @@ static void test_diverged_resync_after_node_crash_with_data_loss(void **state) { */ static void test_diverged_resync_after_data_loss_without_disconnection(void **state) { TestState* test_state = *state; - AMresult* actor_id_result = AMactorIdInitStr("01234567"); - AMfree(AMsetActor(test_state->doc1, AMresultValue(actor_id_result).actor_id)); - AMfree(actor_id_result); - actor_id_result = AMactorIdInitStr("89abcdef"); - AMfree(AMsetActor(test_state->doc2, AMresultValue(actor_id_result).actor_id)); - AMfree(actor_id_result); + AMfree(AMsetActor(test_state->doc1, AMpush(&test_state->stack, + AMactorIdInitStr("01234567"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + AMfree(AMsetActor(test_state->doc2, AMpush(&test_state->stack, + AMactorIdInitStr("89abcdef"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); /* n1 makes three changes which we synchronize to n2. */ time_t const time = 0; @@ -860,40 +909,47 @@ static void test_diverged_resync_after_data_loss_without_disconnection(void **st test_state->sync_state1, test_state->sync_state2); - AMresult* heads1_result = AMgetHeads(test_state->doc1); - AMchangeHashes heads1 = AMresultValue(heads1_result).change_hashes; - AMresult* heads2_result = AMgetHeads(test_state->doc2); - AMchangeHashes heads2 = AMresultValue(heads2_result).change_hashes; + AMchangeHashes heads1 = AMpush(&test_state->stack, + AMgetHeads(test_state->doc1), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + AMchangeHashes heads2 = AMpush(&test_state->stack, + AMgetHeads(test_state->doc2), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); - AMfree(heads2_result); - AMfree(heads1_result); assert_true(AMequal(test_state->doc1, test_state->doc2)); - AMresult* doc2_after_data_loss_result = AMcreate(); - AMdoc* doc2_after_data_loss = AMresultValue(doc2_after_data_loss_result).doc; - actor_id_result = AMactorIdInitStr("89abcdef"); - AMfree(AMsetActor(doc2_after_data_loss, AMresultValue(actor_id_result).actor_id)); - AMfree(actor_id_result); + AMdoc* doc2_after_data_loss = AMpush(&test_state->stack, + AMcreate(), + AM_VALUE_DOC, + cmocka_cb).doc; + AMfree(AMsetActor(doc2_after_data_loss, AMpush(&test_state->stack, + AMactorIdInitStr("89abcdef"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); /* "n2" now has no data, but n1 still thinks it does. Note we don't do * decodeSyncState(encodeSyncState(s1)) in order to simulate data loss * without disconnecting. */ - AMresult* sync_state2_after_data_loss_result = AMsyncStateInit(); - AMsyncState* sync_state2_after_data_loss = AMresultValue(sync_state2_after_data_loss_result).sync_state; + AMsyncState* sync_state2_after_data_loss = AMpush(&test_state->stack, + AMsyncStateInit(), + AM_VALUE_SYNC_STATE, + cmocka_cb).sync_state; sync(test_state->doc1, doc2_after_data_loss, test_state->sync_state1, sync_state2_after_data_loss); - heads1_result = AMgetHeads(test_state->doc1); - heads1 = AMresultValue(heads1_result).change_hashes; - heads2_result = AMgetHeads(doc2_after_data_loss); - heads2 = AMresultValue(heads2_result).change_hashes; + heads1 = AMpush(&test_state->stack, + AMgetHeads(test_state->doc1), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + heads2 = AMpush(&test_state->stack, + AMgetHeads(doc2_after_data_loss), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); - AMfree(heads2_result); - AMfree(heads1_result); assert_true(AMequal(test_state->doc1, doc2_after_data_loss)); - AMfree(sync_state2_after_data_loss_result); - AMfree(doc2_after_data_loss_result); } /** @@ -902,23 +958,32 @@ static void test_diverged_resync_after_data_loss_without_disconnection(void **st */ static void test_diverged_handles_concurrent_changes(void **state) { TestState* test_state = *state; - AMresult* actor_id_result = AMactorIdInitStr("01234567"); - AMfree(AMsetActor(test_state->doc1, AMresultValue(actor_id_result).actor_id)); - AMfree(actor_id_result); - actor_id_result = AMactorIdInitStr("89abcdef"); - AMfree(AMsetActor(test_state->doc2, AMresultValue(actor_id_result).actor_id)); - AMfree(actor_id_result); - AMresult* doc3_result = AMcreate(); - AMdoc* doc3 = AMresultValue(doc3_result).doc; - actor_id_result = AMactorIdInitStr("fedcba98"); - AMfree(AMsetActor(doc3, AMresultValue(actor_id_result).actor_id)); - AMfree(actor_id_result); + AMfree(AMsetActor(test_state->doc1, AMpush(&test_state->stack, + AMactorIdInitStr("01234567"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + AMfree(AMsetActor(test_state->doc2, AMpush(&test_state->stack, + AMactorIdInitStr("89abcdef"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + AMdoc* doc3 = AMpush(&test_state->stack, + AMcreate(), + AM_VALUE_DOC, + cmocka_cb).doc; + AMfree(AMsetActor(doc3, AMpush(&test_state->stack, + AMactorIdInitStr("fedcba98"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); AMsyncState* sync_state12 = test_state->sync_state1; AMsyncState* sync_state21 = test_state->sync_state2; - AMresult* sync_state23_result = AMsyncStateInit(); - AMsyncState* sync_state23 = AMresultValue(sync_state23_result).sync_state; - AMresult* sync_state32_result = AMsyncStateInit(); - AMsyncState* sync_state32 = AMresultValue(sync_state32_result).sync_state; + AMsyncState* sync_state23 = AMpush(&test_state->stack, + AMsyncStateInit(), + AM_VALUE_SYNC_STATE, + cmocka_cb).sync_state; + AMsyncState* sync_state32 = AMpush(&test_state->stack, + AMsyncStateInit(), + AM_VALUE_SYNC_STATE, + cmocka_cb).sync_state; /* Change 1 is known to all three nodes. */ time_t const time = 0; @@ -941,26 +1006,25 @@ static void test_diverged_handles_concurrent_changes(void **state) { AMcommit(doc3, NULL, &time); /* Apply n3's latest change to n2. */ - AMresult* changes_result = AMgetLastLocalChange(doc3); - AMchanges changes = AMresultValue(changes_result).changes; + AMchanges changes = AMpush(&test_state->stack, + AMgetLastLocalChange(doc3), + AM_VALUE_CHANGES, + cmocka_cb).changes; AMfree(AMapplyChanges(test_state->doc2, &changes)); - AMfree(changes_result); /* Now sync n1 and n2. n3's change is concurrent to n1 and n2's last sync * heads. */ sync(test_state->doc1, test_state->doc2, sync_state12, sync_state21); - AMresult* heads1_result = AMgetHeads(test_state->doc1); - AMchangeHashes heads1 = AMresultValue(heads1_result).change_hashes; - AMresult* heads2_result = AMgetHeads(test_state->doc2); - AMchangeHashes heads2 = AMresultValue(heads2_result).change_hashes; + AMchangeHashes heads1 = AMpush(&test_state->stack, + AMgetHeads(test_state->doc1), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + AMchangeHashes heads2 = AMpush(&test_state->stack, + AMgetHeads(test_state->doc2), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); - AMfree(heads2_result); - AMfree(heads1_result); assert_true(AMequal(test_state->doc1, test_state->doc2)); - - AMfree(sync_state32_result); - AMfree(sync_state23_result); - AMfree(doc3_result); } /** @@ -969,25 +1033,31 @@ static void test_diverged_handles_concurrent_changes(void **state) { */ static void test_diverged_handles_histories_of_branching_and_merging(void **state) { TestState* test_state = *state; - AMresult* actor_id_result = AMactorIdInitStr("01234567"); - AMfree(AMsetActor(test_state->doc1, AMresultValue(actor_id_result).actor_id)); - AMfree(actor_id_result); - actor_id_result = AMactorIdInitStr("89abcdef"); - AMfree(AMsetActor(test_state->doc2, AMresultValue(actor_id_result).actor_id)); - AMfree(actor_id_result); - AMresult* doc3_result = AMcreate(); - AMdoc* doc3 = AMresultValue(doc3_result).doc; - actor_id_result = AMactorIdInitStr("fedcba98"); - AMfree(AMsetActor(doc3, AMresultValue(actor_id_result).actor_id)); - AMfree(actor_id_result); + AMfree(AMsetActor(test_state->doc1, AMpush(&test_state->stack, + AMactorIdInitStr("01234567"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + AMfree(AMsetActor(test_state->doc2, AMpush(&test_state->stack, + AMactorIdInitStr("89abcdef"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + AMdoc* doc3 = AMpush(&test_state->stack, + AMcreate(), + AM_VALUE_DOC, + cmocka_cb).doc; + AMfree(AMsetActor(doc3, AMpush(&test_state->stack, + AMactorIdInitStr("fedcba98"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); time_t const time = 0; AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", 0)); AMcommit(test_state->doc1, NULL, &time); - AMresult* changes_result = AMgetLastLocalChange(test_state->doc1); - AMchanges changes = AMresultValue(changes_result).changes; + AMchanges changes = AMpush(&test_state->stack, + AMgetLastLocalChange(test_state->doc1), + AM_VALUE_CHANGES, + cmocka_cb).changes; AMfree(AMapplyChanges(test_state->doc2, &changes)); AMfree(AMapplyChanges(doc3, &changes)); - AMfree(changes_result); AMfree(AMmapPutUint(doc3, AM_ROOT, "x", 1)); AMcommit(doc3, NULL, &time); @@ -1003,14 +1073,16 @@ static void test_diverged_handles_histories_of_branching_and_merging(void **stat AMcommit(test_state->doc1, NULL, &time); AMfree(AMmapPutUint(test_state->doc2, AM_ROOT, "n2", value)); AMcommit(test_state->doc2, NULL, &time); - AMresult* changes1_result = AMgetLastLocalChange(test_state->doc1); - AMchanges changes1 = AMresultValue(changes1_result).changes; - AMresult* changes2_result = AMgetLastLocalChange(test_state->doc2); - AMchanges changes2 = AMresultValue(changes2_result).changes; + AMchanges changes1 = AMpush(&test_state->stack, + AMgetLastLocalChange(test_state->doc1), + AM_VALUE_CHANGES, + cmocka_cb).changes; + AMchanges changes2 = AMpush(&test_state->stack, + AMgetLastLocalChange(test_state->doc2), + AM_VALUE_CHANGES, + cmocka_cb).changes; AMfree(AMapplyChanges(test_state->doc1, &changes2)); - AMfree(changes2_result); AMfree(AMapplyChanges(test_state->doc2, &changes1)); - AMfree(changes1_result); } sync(test_state->doc1, @@ -1020,10 +1092,11 @@ static void test_diverged_handles_histories_of_branching_and_merging(void **stat /* Having n3's last change concurrent to the last sync heads forces us into * the slower code path. */ - AMresult* changes3_result = AMgetLastLocalChange(doc3); - AMchanges changes3 = AMresultValue(changes3_result).changes; + AMchanges changes3 = AMpush(&test_state->stack, + AMgetLastLocalChange(doc3), + AM_VALUE_CHANGES, + cmocka_cb).changes; AMfree(AMapplyChanges(test_state->doc2, &changes3)); - AMfree(changes3_result); AMfree(AMmapPutStr(test_state->doc1, AM_ROOT, "n1", "final")); AMcommit(test_state->doc1, NULL, &time); AMfree(AMmapPutStr(test_state->doc2, AM_ROOT, "n2", "final")); @@ -1033,16 +1106,16 @@ static void test_diverged_handles_histories_of_branching_and_merging(void **stat test_state->doc2, test_state->sync_state1, test_state->sync_state2); - AMresult* heads1_result = AMgetHeads(test_state->doc1); - AMchangeHashes heads1 = AMresultValue(heads1_result).change_hashes; - AMresult* heads2_result = AMgetHeads(test_state->doc2); - AMchangeHashes heads2 = AMresultValue(heads2_result).change_hashes; + AMchangeHashes heads1 = AMpush(&test_state->stack, + AMgetHeads(test_state->doc1), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + AMchangeHashes heads2 = AMpush(&test_state->stack, + AMgetHeads(test_state->doc2), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); - AMfree(heads2_result); - AMfree(heads1_result); assert_true(AMequal(test_state->doc1, test_state->doc2)); - - AMfree(doc3_result); } int run_sync_tests(void) { From 23fbb4917a6c79554f1b86b0365f182ba3521697 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 25 Jul 2022 01:04:35 -0700 Subject: [PATCH 510/730] Replace `_INCLUDED` with `_H` as the suffix for include guards in C headers like the one generated by cbindgen. --- automerge-c/cmake/config.h.in | 6 +++--- automerge-c/test/macro_utils.h | 6 +++--- automerge-c/test/str_utils.h | 6 +++--- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/automerge-c/cmake/config.h.in b/automerge-c/cmake/config.h.in index 08643fc5..44ba5213 100644 --- a/automerge-c/cmake/config.h.in +++ b/automerge-c/cmake/config.h.in @@ -1,5 +1,5 @@ -#ifndef @SYMBOL_PREFIX@_CONFIG_INCLUDED -#define @SYMBOL_PREFIX@_CONFIG_INCLUDED +#ifndef @SYMBOL_PREFIX@_CONFIG_H +#define @SYMBOL_PREFIX@_CONFIG_H /* This header is auto-generated by CMake. */ @@ -11,4 +11,4 @@ #define @SYMBOL_PREFIX@_PATCH_VERSION (@SYMBOL_PREFIX@_VERSION % 100) -#endif /* @SYMBOL_PREFIX@_CONFIG_INCLUDED */ +#endif /* @SYMBOL_PREFIX@_CONFIG_H */ diff --git a/automerge-c/test/macro_utils.h b/automerge-c/test/macro_utils.h index f9ec400c..2f7bf780 100644 --- a/automerge-c/test/macro_utils.h +++ b/automerge-c/test/macro_utils.h @@ -1,5 +1,5 @@ -#ifndef MACRO_UTILS_INCLUDED -#define MACRO_UTILS_INCLUDED +#ifndef MACRO_UTILS_H +#define MACRO_UTILS_H /* local */ #include "automerge.h" @@ -21,4 +21,4 @@ AMvalueVariant AMvalue_discriminant(char const* suffix); */ AMobjType AMobjType_tag(char const* obj_type_label); -#endif +#endif /* MACRO_UTILS_H */ diff --git a/automerge-c/test/str_utils.h b/automerge-c/test/str_utils.h index 0fc3db62..b9985683 100644 --- a/automerge-c/test/str_utils.h +++ b/automerge-c/test/str_utils.h @@ -1,5 +1,5 @@ -#ifndef STR_UTILS_INCLUDED -#define STR_UTILS_INCLUDED +#ifndef STR_UTILS_H +#define STR_UTILS_H /** * \brief Converts a hexadecimal string into a sequence of bytes. @@ -11,4 +11,4 @@ */ void hex_to_bytes(char const* hex_str, uint8_t* src, size_t const count); -#endif +#endif /* STR_UTILS_H */ From 14b55c4a73b20aa1efacedb20f64e83edd0b4f1b Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 25 Jul 2022 01:23:26 -0700 Subject: [PATCH 511/730] Fix a bug with the iterators when they pass their initial positions in reverse. Rename `AMstrings` to `AMstrs` for consistency with the `AMvalue.str` field. --- automerge-c/src/change_hashes.rs | 99 +++++++++---- automerge-c/src/changes.rs | 95 +++++++++--- automerge-c/src/{strings.rs => strs.rs} | 185 +++++++++++++++--------- automerge-c/src/sync/haves.rs | 89 +++++++++--- 4 files changed, 326 insertions(+), 142 deletions(-) rename automerge-c/src/{strings.rs => strs.rs} (58%) diff --git a/automerge-c/src/change_hashes.rs b/automerge-c/src/change_hashes.rs index f7e01b26..5f5be108 100644 --- a/automerge-c/src/change_hashes.rs +++ b/automerge-c/src/change_hashes.rs @@ -35,10 +35,22 @@ impl Detail { } let len = self.len as isize; self.offset = if self.offset < 0 { - /* It's reversed. */ - std::cmp::max(-(len + 1), std::cmp::min(self.offset - n, -1)) + // It's reversed. + let unclipped = self.offset.checked_sub(n).unwrap_or(isize::MIN); + if unclipped >= 0 { + // Clip it to the forward stop. + len + } else { + std::cmp::min(std::cmp::max(-(len + 1), unclipped), -1) + } } else { - std::cmp::max(0, std::cmp::min(self.offset + n, len)) + let unclipped = self.offset.checked_add(n).unwrap_or(isize::MAX); + if unclipped < 0 { + // Clip it to the reverse stop. + -(len + 1) + } else { + std::cmp::max(0, std::cmp::min(unclipped, len)) + } } } @@ -68,10 +80,8 @@ impl Detail { } pub fn prev(&mut self, n: isize) -> Option<&am::ChangeHash> { - /* Check for rewinding. */ - let prior_offset = self.offset; self.advance(-n); - if (self.offset == prior_offset) || self.is_stopped() { + if self.is_stopped() { return None; } let slice: &[am::ChangeHash] = @@ -86,6 +96,14 @@ impl Detail { ptr: self.ptr, } } + + pub fn rewound(&self) -> Self { + Self { + len: self.len, + offset: if self.offset < 0 { -1 } else { 0 }, + ptr: self.ptr, + } + } } impl From for [u8; USIZE_USIZE_USIZE_] { @@ -101,6 +119,7 @@ impl From for [u8; USIZE_USIZE_USIZE_] { /// \struct AMchangeHashes /// \brief A random-access iterator over a sequence of change hashes. #[repr(C)] +#[derive(PartialEq)] pub struct AMchangeHashes { /// An implementation detail that is intentionally opaque. /// \warning Modifying \p detail will cause undefined behavior. @@ -142,6 +161,13 @@ impl AMchangeHashes { detail: detail.reversed().into(), } } + + pub fn rewound(&self) -> Self { + let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; + Self { + detail: detail.rewound().into(), + } + } } impl AsRef<[am::ChangeHash]> for AMchangeHashes { @@ -167,11 +193,11 @@ impl Default for AMchangeHashes { /// \param[in,out] change_hashes A pointer to an `AMchangeHashes` struct. /// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum /// number of positions to advance. -/// \pre \p change_hashes must be a valid address. +/// \pre \p change_hashes` != NULL`. /// \internal /// /// #Safety -/// change_hashes must be a pointer to a valid AMchangeHashes +/// change_hashes must be a valid pointer to an AMchangeHashes #[no_mangle] pub unsafe extern "C" fn AMchangeHashesAdvance(change_hashes: *mut AMchangeHashes, n: isize) { if let Some(change_hashes) = change_hashes.as_mut() { @@ -186,15 +212,15 @@ pub unsafe extern "C" fn AMchangeHashesAdvance(change_hashes: *mut AMchangeHashe /// \param[in] change_hashes1 A pointer to an `AMchangeHashes` struct. /// \param[in] change_hashes2 A pointer to an `AMchangeHashes` struct. /// \return `-1` if \p change_hashes1 `<` \p change_hashes2, `0` if -/// \p change_hashes1 `==` \p change_hashes2 and `1` if +/// \p change_hashes1` == `\p change_hashes2 and `1` if /// \p change_hashes1 `>` \p change_hashes2. -/// \pre \p change_hashes1 must be a valid address. -/// \pre \p change_hashes2 must be a valid address. +/// \pre \p change_hashes1` != NULL`. +/// \pre \p change_hashes2` != NULL`. /// \internal /// /// #Safety -/// change_hashes1 must be a pointer to a valid AMchangeHashes -/// change_hashes2 must be a pointer to a valid AMchangeHashes +/// change_hashes1 must be a valid pointer to an AMchangeHashes +/// change_hashes2 must be a valid pointer to an AMchangeHashes #[no_mangle] pub unsafe extern "C" fn AMchangeHashesCmp( change_hashes1: *const AMchangeHashes, @@ -222,12 +248,11 @@ pub unsafe extern "C" fn AMchangeHashesCmp( /// \param[in] count The number of `AMbyteSpan` structs to copy from \p src. /// \return A pointer to an `AMresult` struct containing an `AMchangeHashes` /// struct. -/// \pre \p src must be a valid address. -/// \pre `0 <=` \p count `<=` size of \p src. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \pre \p src` != NULL`. +/// \pre `0 <=` \p count` <= `size of \p src. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety /// src must be an AMbyteSpan array of size `>= count` #[no_mangle] @@ -261,11 +286,11 @@ pub unsafe extern "C" fn AMchangeHashesInit(src: *const AMbyteSpan, count: usize /// number of positions to advance. /// \return An `AMbyteSpan` struct with `.src == NULL` when \p change_hashes /// was previously advanced past its forward/reverse limit. -/// \pre \p change_hashes must be a valid address. +/// \pre \p change_hashes` != NULL`. /// \internal /// /// #Safety -/// change_hashes must be a pointer to a valid AMchangeHashes +/// change_hashes must be a valid pointer to an AMchangeHashes #[no_mangle] pub unsafe extern "C" fn AMchangeHashesNext( change_hashes: *mut AMchangeHashes, @@ -290,11 +315,11 @@ pub unsafe extern "C" fn AMchangeHashesNext( /// number of positions to advance. /// \return An `AMbyteSpan` struct with `.src == NULL` when \p change_hashes is /// presently advanced past its forward/reverse limit. -/// \pre \p change_hashes must be a valid address. +/// \pre \p change_hashes` != NULL`. /// \internal /// /// #Safety -/// change_hashes must be a pointer to a valid AMchangeHashes +/// change_hashes must be a valid pointer to an AMchangeHashes #[no_mangle] pub unsafe extern "C" fn AMchangeHashesPrev( change_hashes: *mut AMchangeHashes, @@ -314,11 +339,11 @@ pub unsafe extern "C" fn AMchangeHashesPrev( /// /// \param[in] change_hashes A pointer to an `AMchangeHashes` struct. /// \return The count of values in \p change_hashes. -/// \pre \p change_hashes must be a valid address. +/// \pre \p change_hashes` != NULL`. /// \internal /// /// #Safety -/// change_hashes must be a pointer to a valid AMchangeHashes +/// change_hashes must be a valid pointer to an AMchangeHashes #[no_mangle] pub unsafe extern "C" fn AMchangeHashesSize(change_hashes: *const AMchangeHashes) -> usize { if let Some(change_hashes) = change_hashes.as_ref() { @@ -334,11 +359,11 @@ pub unsafe extern "C" fn AMchangeHashesSize(change_hashes: *const AMchangeHashes /// /// \param[in] change_hashes A pointer to an `AMchangeHashes` struct. /// \return An `AMchangeHashes` struct -/// \pre \p change_hashes must be a valid address. +/// \pre \p change_hashes` != NULL`. /// \internal /// /// #Safety -/// change_hashes must be a pointer to a valid AMchangeHashes +/// change_hashes must be a valid pointer to an AMchangeHashes #[no_mangle] pub unsafe extern "C" fn AMchangeHashesReversed( change_hashes: *const AMchangeHashes, @@ -349,3 +374,25 @@ pub unsafe extern "C" fn AMchangeHashesReversed( AMchangeHashes::default() } } + +/// \memberof AMchangeHashes +/// \brief Creates an iterator at the starting position over the same sequence +/// of change hashes as the given one. +/// +/// \param[in] change_hashes A pointer to an `AMchangeHashes` struct. +/// \return An `AMchangeHashes` struct +/// \pre \p change_hashes` != NULL`. +/// \internal +/// +/// #Safety +/// change_hashes must be a valid pointer to an AMchangeHashes +#[no_mangle] +pub unsafe extern "C" fn AMchangeHashesRewound( + change_hashes: *const AMchangeHashes, +) -> AMchangeHashes { + if let Some(change_hashes) = change_hashes.as_ref() { + change_hashes.rewound() + } else { + AMchangeHashes::default() + } +} diff --git a/automerge-c/src/changes.rs b/automerge-c/src/changes.rs index f8ada1fd..45b654eb 100644 --- a/automerge-c/src/changes.rs +++ b/automerge-c/src/changes.rs @@ -37,10 +37,22 @@ impl Detail { } let len = self.len as isize; self.offset = if self.offset < 0 { - /* It's reversed. */ - std::cmp::max(-(len + 1), std::cmp::min(self.offset - n, -1)) + // It's reversed. + let unclipped = self.offset.checked_sub(n).unwrap_or(isize::MIN); + if unclipped >= 0 { + // Clip it to the forward stop. + len + } else { + std::cmp::min(std::cmp::max(-(len + 1), unclipped), -1) + } } else { - std::cmp::max(0, std::cmp::min(self.offset + n, len)) + let unclipped = self.offset.checked_add(n).unwrap_or(isize::MAX); + if unclipped < 0 { + // Clip it to the reverse stop. + -(len + 1) + } else { + std::cmp::max(0, std::cmp::min(unclipped, len)) + } } } @@ -78,10 +90,8 @@ impl Detail { } pub fn prev(&mut self, n: isize) -> Option<*const AMchange> { - /* Check for rewinding. */ - let prior_offset = self.offset; self.advance(-n); - if (self.offset == prior_offset) || self.is_stopped() { + if self.is_stopped() { return None; } let slice: &mut [am::Change] = @@ -105,6 +115,15 @@ impl Detail { storage: self.storage, } } + + pub fn rewound(&self) -> Self { + Self { + len: self.len, + offset: if self.offset < 0 { -1 } else { 0 }, + ptr: self.ptr, + storage: self.storage, + } + } } impl From for [u8; USIZE_USIZE_USIZE_USIZE_] { @@ -123,6 +142,7 @@ impl From for [u8; USIZE_USIZE_USIZE_USIZE_] { /// \struct AMchanges /// \brief A random-access iterator over a sequence of changes. #[repr(C)] +#[derive(PartialEq)] pub struct AMchanges { /// An implementation detail that is intentionally opaque. /// \warning Modifying \p detail will cause undefined behavior. @@ -134,7 +154,7 @@ pub struct AMchanges { impl AMchanges { pub fn new(changes: &[am::Change], storage: &mut BTreeMap) -> Self { Self { - detail: Detail::new(changes, 0, storage).into(), + detail: Detail::new(changes, 0, &mut *storage).into(), } } @@ -164,6 +184,13 @@ impl AMchanges { detail: detail.reversed().into(), } } + + pub fn rewound(&self) -> Self { + let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; + Self { + detail: detail.rewound().into(), + } + } } impl AsRef<[am::Change]> for AMchanges { @@ -189,11 +216,11 @@ impl Default for AMchanges { /// \param[in,out] changes A pointer to an `AMchanges` struct. /// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum /// number of positions to advance. -/// \pre \p changes must be a valid address. +/// \pre \p changes` != NULL`. /// \internal /// /// #Safety -/// changes must be a pointer to a valid AMchanges +/// changes must be a valid pointer to an AMchanges #[no_mangle] pub unsafe extern "C" fn AMchangesAdvance(changes: *mut AMchanges, n: isize) { if let Some(changes) = changes.as_mut() { @@ -202,19 +229,19 @@ pub unsafe extern "C" fn AMchangesAdvance(changes: *mut AMchanges, n: isize) { } /// \memberof AMchanges -/// \brief Tests the equality of two sequences of changes underlying a pair -/// of iterators. +/// \brief Tests the equality of two sequences of changes underlying a pair of +/// iterators. /// /// \param[in] changes1 A pointer to an `AMchanges` struct. /// \param[in] changes2 A pointer to an `AMchanges` struct. -/// \return `true` if \p changes1 `==` \p changes2 and `false` otherwise. -/// \pre \p changes1 must be a valid address. -/// \pre \p changes2 must be a valid address. +/// \return `true` if \p changes1` == `\p changes2 and `false` otherwise. +/// \pre \p changes1` != NULL`. +/// \pre \p changes2` != NULL`. /// \internal /// /// #Safety -/// changes1 must be a pointer to a valid AMchanges -/// changes2 must be a pointer to a valid AMchanges +/// changes1 must be a valid pointer to an AMchanges +/// changes2 must be a valid pointer to an AMchanges #[no_mangle] pub unsafe extern "C" fn AMchangesEqual( changes1: *const AMchanges, @@ -236,11 +263,11 @@ pub unsafe extern "C" fn AMchangesEqual( /// number of positions to advance. /// \return A pointer to an `AMchange` struct that's `NULL` when \p changes was /// previously advanced past its forward/reverse limit. -/// \pre \p changes must be a valid address. +/// \pre \p changes` != NULL`. /// \internal /// /// #Safety -/// changes must be a pointer to a valid AMchanges +/// changes must be a valid pointer to an AMchanges #[no_mangle] pub unsafe extern "C" fn AMchangesNext(changes: *mut AMchanges, n: isize) -> *const AMchange { if let Some(changes) = changes.as_mut() { @@ -261,11 +288,11 @@ pub unsafe extern "C" fn AMchangesNext(changes: *mut AMchanges, n: isize) -> *co /// number of positions to advance. /// \return A pointer to an `AMchange` struct that's `NULL` when \p changes is /// presently advanced past its forward/reverse limit. -/// \pre \p changes must be a valid address. +/// \pre \p changes` != NULL`. /// \internal /// /// #Safety -/// changes must be a pointer to a valid AMchanges +/// changes must be a valid pointer to an AMchanges #[no_mangle] pub unsafe extern "C" fn AMchangesPrev(changes: *mut AMchanges, n: isize) -> *const AMchange { if let Some(changes) = changes.as_mut() { @@ -281,11 +308,11 @@ pub unsafe extern "C" fn AMchangesPrev(changes: *mut AMchanges, n: isize) -> *co /// /// \param[in] changes A pointer to an `AMchanges` struct. /// \return The count of values in \p changes. -/// \pre \p changes must be a valid address. +/// \pre \p changes` != NULL`. /// \internal /// /// #Safety -/// changes must be a pointer to a valid AMchanges +/// changes must be a valid pointer to an AMchanges #[no_mangle] pub unsafe extern "C" fn AMchangesSize(changes: *const AMchanges) -> usize { if let Some(changes) = changes.as_ref() { @@ -301,11 +328,11 @@ pub unsafe extern "C" fn AMchangesSize(changes: *const AMchanges) -> usize { /// /// \param[in] changes A pointer to an `AMchanges` struct. /// \return An `AMchanges` struct. -/// \pre \p changes must be a valid address. +/// \pre \p changes` != NULL`. /// \internal /// /// #Safety -/// changes must be a pointer to a valid AMchanges +/// changes must be a valid pointer to an AMchanges #[no_mangle] pub unsafe extern "C" fn AMchangesReversed(changes: *const AMchanges) -> AMchanges { if let Some(changes) = changes.as_ref() { @@ -314,3 +341,23 @@ pub unsafe extern "C" fn AMchangesReversed(changes: *const AMchanges) -> AMchang AMchanges::default() } } + +/// \memberof AMchanges +/// \brief Creates an iterator at the starting position over the same sequence +/// of changes as the given one. +/// +/// \param[in] changes A pointer to an `AMchanges` struct. +/// \return An `AMchanges` struct +/// \pre \p changes` != NULL`. +/// \internal +/// +/// #Safety +/// changes must be a valid pointer to an AMchanges +#[no_mangle] +pub unsafe extern "C" fn AMchangesRewound(changes: *const AMchanges) -> AMchanges { + if let Some(changes) = changes.as_ref() { + changes.rewound() + } else { + AMchanges::default() + } +} diff --git a/automerge-c/src/strings.rs b/automerge-c/src/strs.rs similarity index 58% rename from automerge-c/src/strings.rs rename to automerge-c/src/strs.rs index 83202a24..5bc9876c 100644 --- a/automerge-c/src/strings.rs +++ b/automerge-c/src/strs.rs @@ -32,10 +32,22 @@ impl Detail { } let len = self.len as isize; self.offset = if self.offset < 0 { - /* It's reversed. */ - std::cmp::max(-(len + 1), std::cmp::min(self.offset - n, -1)) + // It's reversed. + let unclipped = self.offset.checked_sub(n).unwrap_or(isize::MIN); + if unclipped >= 0 { + // Clip it to the forward stop. + len + } else { + std::cmp::min(std::cmp::max(-(len + 1), unclipped), -1) + } } else { - std::cmp::max(0, std::cmp::min(self.offset + n, len)) + let unclipped = self.offset.checked_add(n).unwrap_or(isize::MAX); + if unclipped < 0 { + // Clip it to the reverse stop. + -(len + 1) + } else { + std::cmp::max(0, std::cmp::min(unclipped, len)) + } } } @@ -65,10 +77,8 @@ impl Detail { } pub fn prev(&mut self, n: isize) -> Option<*const c_char> { - /* Check for rewinding. */ - let prior_offset = self.offset; self.advance(-n); - if (self.offset == prior_offset) || self.is_stopped() { + if self.is_stopped() { return None; } let slice: &[CString] = @@ -83,6 +93,14 @@ impl Detail { ptr: self.ptr, } } + + pub fn rewound(&self) -> Self { + Self { + len: self.len, + offset: if self.offset < 0 { -1 } else { 0 }, + ptr: self.ptr, + } + } } impl From for [u8; USIZE_USIZE_USIZE_] { @@ -95,10 +113,11 @@ impl From for [u8; USIZE_USIZE_USIZE_] { } } -/// \struct AMstrings +/// \struct AMstrs /// \brief A random-access iterator over a sequence of UTF-8 strings. #[repr(C)] -pub struct AMstrings { +#[derive(PartialEq)] +pub struct AMstrs { /// An implementation detail that is intentionally opaque. /// \warning Modifying \p detail will cause undefined behavior. /// \note The actual size of \p detail will vary by platform, this is just @@ -106,7 +125,7 @@ pub struct AMstrings { detail: [u8; USIZE_USIZE_USIZE_], } -impl AMstrings { +impl AMstrs { pub fn new(cstrings: &[CString]) -> Self { Self { detail: Detail::new(cstrings, 0).into(), @@ -139,16 +158,23 @@ impl AMstrings { detail: detail.reversed().into(), } } + + pub fn rewound(&self) -> Self { + let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; + Self { + detail: detail.rewound().into(), + } + } } -impl AsRef<[String]> for AMstrings { +impl AsRef<[String]> for AMstrs { fn as_ref(&self) -> &[String] { let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; unsafe { std::slice::from_raw_parts(detail.ptr as *const String, detail.len) } } } -impl Default for AMstrings { +impl Default for AMstrs { fn default() -> Self { Self { detail: [0; USIZE_USIZE_USIZE_], @@ -156,49 +182,46 @@ impl Default for AMstrings { } } -/// \memberof AMstrings +/// \memberof AMstrs /// \brief Advances an iterator over a sequence of UTF-8 strings by at most /// \p |n| positions where the sign of \p n is relative to the /// iterator's direction. /// -/// \param[in,out] strings A pointer to an `AMstrings` struct. +/// \param[in,out] strs A pointer to an `AMstrs` struct. /// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum /// number of positions to advance. -/// \pre \p strings must be a valid address. +/// \pre \p strs` != NULL`. /// \internal /// /// #Safety -/// strings must be a pointer to a valid AMstrings +/// strs must be a valid pointer to an AMstrs #[no_mangle] -pub unsafe extern "C" fn AMstringsAdvance(strings: *mut AMstrings, n: isize) { - if let Some(strings) = strings.as_mut() { - strings.advance(n); +pub unsafe extern "C" fn AMstrsAdvance(strs: *mut AMstrs, n: isize) { + if let Some(strs) = strs.as_mut() { + strs.advance(n); }; } -/// \memberof AMstrings +/// \memberof AMstrs /// \brief Compares the sequences of UTF-8 strings underlying a pair of /// iterators. /// -/// \param[in] strings1 A pointer to an `AMstrings` struct. -/// \param[in] strings2 A pointer to an `AMstrings` struct. -/// \return `-1` if \p strings1 `<` \p strings2, `0` if -/// \p strings1 `==` \p strings2 and `1` if -/// \p strings1 `>` \p strings2. -/// \pre \p strings1 must be a valid address. -/// \pre \p strings2 must be a valid address. +/// \param[in] strs1 A pointer to an `AMstrs` struct. +/// \param[in] strs2 A pointer to an `AMstrs` struct. +/// \return `-1` if \p strs1 `<` \p strs2, `0` if +/// \p strs1` == `\p strs2 and `1` if +/// \p strs1 `>` \p strs2. +/// \pre \p strs1` != NULL`. +/// \pre \p strs2` != NULL`. /// \internal /// /// #Safety -/// strings1 must be a pointer to a valid AMstrings -/// strings2 must be a pointer to a valid AMstrings +/// strs1 must be a valid pointer to an AMstrs +/// strs2 must be a valid pointer to an AMstrs #[no_mangle] -pub unsafe extern "C" fn AMstringsCmp( - strings1: *const AMstrings, - strings2: *const AMstrings, -) -> isize { - match (strings1.as_ref(), strings2.as_ref()) { - (Some(strings1), Some(strings2)) => match strings1.as_ref().cmp(strings2.as_ref()) { +pub unsafe extern "C" fn AMstrsCmp(strs1: *const AMstrs, strs2: *const AMstrs) -> isize { + match (strs1.as_ref(), strs2.as_ref()) { + (Some(strs1), Some(strs2)) => match strs1.as_ref().cmp(strs2.as_ref()) { Ordering::Less => -1, Ordering::Equal => 0, Ordering::Greater => 1, @@ -209,92 +232,112 @@ pub unsafe extern "C" fn AMstringsCmp( } } -/// \memberof AMstrings -/// \brief Gets the key at the current position of an iterator over a -/// sequence of UTF-8 strings and then advances it by at most \p |n| -/// positions where the sign of \p n is relative to the iterator's direction. +/// \memberof AMstrs +/// \brief Gets the key at the current position of an iterator over a sequence +/// of UTF-8 strings and then advances it by at most \p |n| positions +/// where the sign of \p n is relative to the iterator's direction. /// -/// \param[in,out] strings A pointer to an `AMstrings` struct. +/// \param[in,out] strs A pointer to an `AMstrs` struct. /// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum /// number of positions to advance. -/// \return A UTF-8 string that's `NULL` when \p strings was previously -/// advanced past its forward/reverse limit. -/// \pre \p strings must be a valid address. +/// \return A UTF-8 string that's `NULL` when \p strs was previously advanced +/// past its forward/reverse limit. +/// \pre \p strs` != NULL`. /// \internal /// /// #Safety -/// strings must be a pointer to a valid AMstrings +/// strs must be a valid pointer to an AMstrs #[no_mangle] -pub unsafe extern "C" fn AMstringsNext(strings: *mut AMstrings, n: isize) -> *const c_char { - if let Some(strings) = strings.as_mut() { - if let Some(key) = strings.next(n) { +pub unsafe extern "C" fn AMstrsNext(strs: *mut AMstrs, n: isize) -> *const c_char { + if let Some(strs) = strs.as_mut() { + if let Some(key) = strs.next(n) { return key; } } std::ptr::null() } -/// \memberof AMstrings +/// \memberof AMstrs /// \brief Advances an iterator over a sequence of UTF-8 strings by at most /// \p |n| positions where the sign of \p n is relative to the /// iterator's direction and then gets the key at its new position. /// -/// \param[in,out] strings A pointer to an `AMstrings` struct. +/// \param[in,out] strs A pointer to an `AMstrs` struct. /// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum /// number of positions to advance. -/// \return A UTF-8 string that's `NULL` when \p strings is presently advanced +/// \return A UTF-8 string that's `NULL` when \p strs is presently advanced /// past its forward/reverse limit. -/// \pre \p strings must be a valid address. +/// \pre \p strs` != NULL`. /// \internal /// /// #Safety -/// strings must be a pointer to a valid AMstrings +/// strs must be a valid pointer to an AMstrs #[no_mangle] -pub unsafe extern "C" fn AMstringsPrev(strings: *mut AMstrings, n: isize) -> *const c_char { - if let Some(strings) = strings.as_mut() { - if let Some(key) = strings.prev(n) { +pub unsafe extern "C" fn AMstrsPrev(strs: *mut AMstrs, n: isize) -> *const c_char { + if let Some(strs) = strs.as_mut() { + if let Some(key) = strs.prev(n) { return key; } } std::ptr::null() } -/// \memberof AMstrings +/// \memberof AMstrs /// \brief Gets the size of the sequence of UTF-8 strings underlying an /// iterator. /// -/// \param[in] strings A pointer to an `AMstrings` struct. -/// \return The count of values in \p strings. -/// \pre \p strings must be a valid address. +/// \param[in] strs A pointer to an `AMstrs` struct. +/// \return The count of values in \p strs. +/// \pre \p strs` != NULL`. /// \internal /// /// #Safety -/// strings must be a pointer to a valid AMstrings +/// strs must be a valid pointer to an AMstrs #[no_mangle] -pub unsafe extern "C" fn AMstringsSize(strings: *const AMstrings) -> usize { - if let Some(strings) = strings.as_ref() { - strings.len() +pub unsafe extern "C" fn AMstrsSize(strs: *const AMstrs) -> usize { + if let Some(strs) = strs.as_ref() { + strs.len() } else { 0 } } -/// \memberof AMstrings +/// \memberof AMstrs /// \brief Creates an iterator over the same sequence of UTF-8 strings as the /// given one but with the opposite position and direction. /// -/// \param[in] strings A pointer to an `AMstrings` struct. -/// \return An `AMstrings` struct. -/// \pre \p strings must be a valid address. +/// \param[in] strs A pointer to an `AMstrs` struct. +/// \return An `AMstrs` struct. +/// \pre \p strs` != NULL`. /// \internal /// /// #Safety -/// strings must be a pointer to a valid AMstrings +/// strs must be a valid pointer to an AMstrs #[no_mangle] -pub unsafe extern "C" fn AMstringsReversed(strings: *const AMstrings) -> AMstrings { - if let Some(strings) = strings.as_ref() { - strings.reversed() +pub unsafe extern "C" fn AMstrsReversed(strs: *const AMstrs) -> AMstrs { + if let Some(strs) = strs.as_ref() { + strs.reversed() } else { - AMstrings::default() + AMstrs::default() + } +} + +/// \memberof AMstrs +/// \brief Creates an iterator at the starting position over the same sequence +/// of UTF-8 strings as the given one. +/// +/// \param[in] strs A pointer to an `AMstrs` struct. +/// \return An `AMstrs` struct +/// \pre \p strs` != NULL`. +/// \internal +/// +/// #Safety +/// strs must be a valid pointer to an AMstrs +#[no_mangle] +pub unsafe extern "C" fn AMstrsRewound(strs: *const AMstrs) -> AMstrs { + if let Some(strs) = strs.as_ref() { + strs.rewound() + } else { + AMstrs::default() } } diff --git a/automerge-c/src/sync/haves.rs b/automerge-c/src/sync/haves.rs index 4a1eb1d6..98d83b38 100644 --- a/automerge-c/src/sync/haves.rs +++ b/automerge-c/src/sync/haves.rs @@ -41,10 +41,22 @@ impl Detail { } let len = self.len as isize; self.offset = if self.offset < 0 { - /* It's reversed. */ - std::cmp::max(-(len + 1), std::cmp::min(self.offset - n, -1)) + // It's reversed. + let unclipped = self.offset.checked_sub(n).unwrap_or(isize::MIN); + if unclipped >= 0 { + // Clip it to the forward stop. + len + } else { + std::cmp::min(std::cmp::max(-(len + 1), unclipped), -1) + } } else { - std::cmp::max(0, std::cmp::min(self.offset + n, len)) + let unclipped = self.offset.checked_add(n).unwrap_or(isize::MAX); + if unclipped < 0 { + // Clip it to the reverse stop. + -(len + 1) + } else { + std::cmp::max(0, std::cmp::min(unclipped, len)) + } } } @@ -82,10 +94,8 @@ impl Detail { } pub fn prev(&mut self, n: isize) -> Option<*const AMsyncHave> { - /* Check for rewinding. */ - let prior_offset = self.offset; self.advance(-n); - if (self.offset == prior_offset) || self.is_stopped() { + if self.is_stopped() { return None; } let slice: &[am::sync::Have] = @@ -109,6 +119,15 @@ impl Detail { storage: self.storage, } } + + pub fn rewound(&self) -> Self { + Self { + len: self.len, + offset: if self.offset < 0 { -1 } else { 0 }, + ptr: self.ptr, + storage: self.storage, + } + } } impl From for [u8; USIZE_USIZE_USIZE_USIZE_] { @@ -127,6 +146,7 @@ impl From for [u8; USIZE_USIZE_USIZE_USIZE_] { /// \struct AMsyncHaves /// \brief A random-access iterator over a sequence of synchronization haves. #[repr(C)] +#[derive(PartialEq)] pub struct AMsyncHaves { /// An implementation detail that is intentionally opaque. /// \warning Modifying \p detail will cause undefined behavior. @@ -168,6 +188,13 @@ impl AMsyncHaves { detail: detail.reversed().into(), } } + + pub fn rewound(&self) -> Self { + let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; + Self { + detail: detail.rewound().into(), + } + } } impl AsRef<[am::sync::Have]> for AMsyncHaves { @@ -193,11 +220,11 @@ impl Default for AMsyncHaves { /// \param[in,out] sync_haves A pointer to an `AMsyncHaves` struct. /// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum /// number of positions to advance. -/// \pre \p sync_haves must be a valid address. +/// \pre \p sync_haves` != NULL`. /// \internal /// /// #Safety -/// sync_haves must be a pointer to a valid AMsyncHaves +/// sync_haves must be a valid pointer to an AMsyncHaves #[no_mangle] pub unsafe extern "C" fn AMsyncHavesAdvance(sync_haves: *mut AMsyncHaves, n: isize) { if let Some(sync_haves) = sync_haves.as_mut() { @@ -211,14 +238,14 @@ pub unsafe extern "C" fn AMsyncHavesAdvance(sync_haves: *mut AMsyncHaves, n: isi /// /// \param[in] sync_haves1 A pointer to an `AMsyncHaves` struct. /// \param[in] sync_haves2 A pointer to an `AMsyncHaves` struct. -/// \return `true` if \p sync_haves1 `==` \p sync_haves2 and `false` otherwise. -/// \pre \p sync_haves1 must be a valid address. -/// \pre \p sync_haves2 must be a valid address. +/// \return `true` if \p sync_haves1` == `\p sync_haves2 and `false` otherwise. +/// \pre \p sync_haves1` != NULL`. +/// \pre \p sync_haves2` != NULL`. /// \internal /// /// #Safety -/// sync_haves1 must be a pointer to a valid AMsyncHaves -/// sync_haves2 must be a pointer to a valid AMsyncHaves +/// sync_haves1 must be a valid pointer to an AMsyncHaves +/// sync_haves2 must be a valid pointer to an AMsyncHaves #[no_mangle] pub unsafe extern "C" fn AMsyncHavesEqual( sync_haves1: *const AMsyncHaves, @@ -242,11 +269,11 @@ pub unsafe extern "C" fn AMsyncHavesEqual( /// \return A pointer to an `AMsyncHave` struct that's `NULL` when /// \p sync_haves was previously advanced past its forward/reverse /// limit. -/// \pre \p sync_haves must be a valid address. +/// \pre \p sync_haves` != NULL`. /// \internal /// /// #Safety -/// sync_haves must be a pointer to a valid AMsyncHaves +/// sync_haves must be a valid pointer to an AMsyncHaves #[no_mangle] pub unsafe extern "C" fn AMsyncHavesNext( sync_haves: *mut AMsyncHaves, @@ -271,11 +298,11 @@ pub unsafe extern "C" fn AMsyncHavesNext( /// number of positions to advance. /// \return A pointer to an `AMsyncHave` struct that's `NULL` when /// \p sync_haves is presently advanced past its forward/reverse limit. -/// \pre \p sync_haves must be a valid address. +/// \pre \p sync_haves` != NULL`. /// \internal /// /// #Safety -/// sync_haves must be a pointer to a valid AMsyncHaves +/// sync_haves must be a valid pointer to an AMsyncHaves #[no_mangle] pub unsafe extern "C" fn AMsyncHavesPrev( sync_haves: *mut AMsyncHaves, @@ -295,11 +322,11 @@ pub unsafe extern "C" fn AMsyncHavesPrev( /// /// \param[in] sync_haves A pointer to an `AMsyncHaves` struct. /// \return The count of values in \p sync_haves. -/// \pre \p sync_haves must be a valid address. +/// \pre \p sync_haves` != NULL`. /// \internal /// /// #Safety -/// sync_haves must be a pointer to a valid AMsyncHaves +/// sync_haves must be a valid pointer to an AMsyncHaves #[no_mangle] pub unsafe extern "C" fn AMsyncHavesSize(sync_haves: *const AMsyncHaves) -> usize { if let Some(sync_haves) = sync_haves.as_ref() { @@ -315,11 +342,11 @@ pub unsafe extern "C" fn AMsyncHavesSize(sync_haves: *const AMsyncHaves) -> usiz /// /// \param[in] sync_haves A pointer to an `AMsyncHaves` struct. /// \return An `AMsyncHaves` struct -/// \pre \p sync_haves must be a valid address. +/// \pre \p sync_haves` != NULL`. /// \internal /// /// #Safety -/// sync_haves must be a pointer to a valid AMsyncHaves +/// sync_haves must be a valid pointer to an AMsyncHaves #[no_mangle] pub unsafe extern "C" fn AMsyncHavesReversed(sync_haves: *const AMsyncHaves) -> AMsyncHaves { if let Some(sync_haves) = sync_haves.as_ref() { @@ -328,3 +355,23 @@ pub unsafe extern "C" fn AMsyncHavesReversed(sync_haves: *const AMsyncHaves) -> AMsyncHaves::default() } } + +/// \memberof AMsyncHaves +/// \brief Creates an iterator at the starting position over the same sequence +/// of synchronization haves as the given one. +/// +/// \param[in] sync_haves A pointer to an `AMsyncHaves` struct. +/// \return An `AMsyncHaves` struct +/// \pre \p sync_haves` != NULL`. +/// \internal +/// +/// #Safety +/// sync_haves must be a valid pointer to an AMsyncHaves +#[no_mangle] +pub unsafe extern "C" fn AMsyncHavesRewound(sync_haves: *const AMsyncHaves) -> AMsyncHaves { + if let Some(sync_haves) = sync_haves.as_ref() { + sync_haves.rewound() + } else { + AMsyncHaves::default() + } +} From 877744d40b7ee54c1b681260640676504f93f973 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 25 Jul 2022 01:33:50 -0700 Subject: [PATCH 512/730] Add equality comparison to the `AM*` types from which it was missing. Add equality comparison to `automerge::sync::message`. Defer `std::ffi::CString` creation until necessary. --- automerge-c/src/byte_span.rs | 2 +- automerge-c/src/change.rs | 114 +++++++++++++++++--------------- automerge-c/src/sync/have.rs | 6 +- automerge-c/src/sync/message.rs | 35 +++++----- automerge-c/src/sync/state.rs | 65 +++++++++--------- automerge/src/sync.rs | 2 +- 6 files changed, 113 insertions(+), 111 deletions(-) diff --git a/automerge-c/src/byte_span.rs b/automerge-c/src/byte_span.rs index c40b6de2..939a52c5 100644 --- a/automerge-c/src/byte_span.rs +++ b/automerge-c/src/byte_span.rs @@ -2,8 +2,8 @@ use automerge as am; /// \struct AMbyteSpan /// \brief A contiguous sequence of bytes. -/// #[repr(C)] +#[derive(PartialEq)] pub struct AMbyteSpan { /// A pointer to an array of bytes. /// \warning \p src is only valid until the `AMfree()` function is diff --git a/automerge-c/src/change.rs b/automerge-c/src/change.rs index a0bf59e3..8c726a3b 100644 --- a/automerge-c/src/change.rs +++ b/automerge-c/src/change.rs @@ -1,4 +1,5 @@ use automerge as am; +use std::cell::RefCell; use std::ffi::CString; use std::os::raw::c_char; @@ -18,25 +19,33 @@ macro_rules! to_change { /// \struct AMchange /// \brief A group of operations performed by an actor. +#[derive(PartialEq)] pub struct AMchange { body: *mut am::Change, - c_message: Option, + c_msg: RefCell>, } impl AMchange { - pub fn new(change: &mut am::Change) -> Self { - let c_message = match change.message() { - Some(c_message) => CString::new(c_message).ok(), - None => None, - }; + pub fn new(body: &mut am::Change) -> Self { Self { - body: change, - c_message, + body, + c_msg: RefCell::>::default(), } } - pub fn c_message(&self) -> Option<&CString> { - self.c_message.as_ref() + pub fn message(&self) -> *const c_char { + let mut c_msg = self.c_msg.borrow_mut(); + match c_msg.as_mut() { + None => { + if let Some(message) = unsafe { (*self.body).message() } { + return c_msg.insert(CString::new(message).unwrap()).as_ptr(); + } + } + Some(message) => { + return message.as_ptr(); + } + } + std::ptr::null() } } @@ -53,18 +62,17 @@ impl AsRef for AMchange { } /// \memberof AMchange -/// \brief Gets the first referenced actor ID in a change. +/// \brief Gets the first referenced actor identifier in a change. /// /// \param[in] change A pointer to an `AMchange` struct. -/// \pre \p change must be a valid address. +/// \pre \p change` != NULL`. /// \return A pointer to an `AMresult` struct containing a pointer to an /// `AMactorId` struct. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety -/// change must be a pointer to a valid AMchange +/// change must be a valid pointer to an AMchange #[no_mangle] pub unsafe extern "C" fn AMchangeActorId(change: *const AMchange) -> *mut AMresult { let change = to_change!(change); @@ -77,11 +85,11 @@ pub unsafe extern "C" fn AMchangeActorId(change: *const AMchange) -> *mut AMresu /// \brief Compresses the raw bytes of a change. /// /// \param[in,out] change A pointer to an `AMchange` struct. -/// \pre \p change must be a valid address. +/// \pre \p change` != NULL`. /// \internal /// /// # Safety -/// change must be a pointer to a valid AMchange +/// change must be a valid pointer to an AMchange #[no_mangle] pub unsafe extern "C" fn AMchangeCompress(change: *mut AMchange) { if let Some(change) = change.as_mut() { @@ -94,11 +102,11 @@ pub unsafe extern "C" fn AMchangeCompress(change: *mut AMchange) { /// /// \param[in] change A pointer to an `AMchange` struct. /// \return A pointer to an `AMchangeHashes` struct or `NULL`. -/// \pre \p change must be a valid address. +/// \pre \p change` != NULL`. /// \internal /// /// # Safety -/// change must be a pointer to a valid AMchange +/// change must be a valid pointer to an AMchange #[no_mangle] pub unsafe extern "C" fn AMchangeDeps(change: *const AMchange) -> AMchangeHashes { match change.as_ref() { @@ -112,11 +120,11 @@ pub unsafe extern "C" fn AMchangeDeps(change: *const AMchange) -> AMchangeHashes /// /// \param[in] change A pointer to an `AMchange` struct. /// \return An `AMbyteSpan` struct. -/// \pre \p change must be a valid address. +/// \pre \p change` != NULL`. /// \internal /// /// # Safety -/// change must be a pointer to a valid AMchange +/// change must be a valid pointer to an AMchange #[no_mangle] pub unsafe extern "C" fn AMchangeExtraBytes(change: *const AMchange) -> AMbyteSpan { if let Some(change) = change.as_ref() { @@ -132,12 +140,11 @@ pub unsafe extern "C" fn AMchangeExtraBytes(change: *const AMchange) -> AMbyteSp /// \param[in] src A pointer to an array of bytes. /// \param[in] count The number of bytes in \p src to load. /// \return A pointer to an `AMresult` struct containing an `AMchange` struct. -/// \pre \p src must be a valid address. -/// \pre `0 <=` \p count `<=` size of \p src. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \pre \p src` != NULL`. +/// \pre `0 <=` \p count` <= `size of \p src. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety /// src must be a byte array of size `>= count` #[no_mangle] @@ -152,11 +159,11 @@ pub unsafe extern "C" fn AMchangeFromBytes(src: *const u8, count: usize) -> *mut /// /// \param[in] change A pointer to an `AMchange` struct. /// \return A change hash as an `AMbyteSpan` struct. -/// \pre \p change must be a valid address. +/// \pre \p change` != NULL`. /// \internal /// /// # Safety -/// change must be a pointer to a valid AMchange +/// change must be a valid pointer to an AMchange #[no_mangle] pub unsafe extern "C" fn AMchangeHash(change: *const AMchange) -> AMbyteSpan { match change.as_ref() { @@ -173,11 +180,11 @@ pub unsafe extern "C" fn AMchangeHash(change: *const AMchange) -> AMbyteSpan { /// /// \param[in] change A pointer to an `AMchange` struct. /// \return A boolean. -/// \pre \p change must be a valid address. +/// \pre \p change` != NULL`. /// \internal /// /// # Safety -/// change must be a pointer to a valid AMchange +/// change must be a valid pointer to an AMchange #[no_mangle] pub unsafe extern "C" fn AMchangeIsEmpty(change: *const AMchange) -> bool { if let Some(change) = change.as_ref() { @@ -192,11 +199,11 @@ pub unsafe extern "C" fn AMchangeIsEmpty(change: *const AMchange) -> bool { /// /// \param[in] change A pointer to an `AMchange` struct. /// \return A 64-bit unsigned integer. -/// \pre \p change must be a valid address. +/// \pre \p change` != NULL`. /// \internal /// /// # Safety -/// change must be a pointer to a valid AMchange +/// change must be a valid pointer to an AMchange #[no_mangle] pub unsafe extern "C" fn AMchangeMaxOp(change: *const AMchange) -> u64 { if let Some(change) = change.as_ref() { @@ -211,19 +218,17 @@ pub unsafe extern "C" fn AMchangeMaxOp(change: *const AMchange) -> u64 { /// /// \param[in] change A pointer to an `AMchange` struct. /// \return A UTF-8 string or `NULL`. -/// \pre \p change must be a valid address. +/// \pre \p change` != NULL`. /// \internal /// /// # Safety -/// change must be a pointer to a valid AMchange +/// change must be a valid pointer to an AMchange #[no_mangle] pub unsafe extern "C" fn AMchangeMessage(change: *const AMchange) -> *const c_char { if let Some(change) = change.as_ref() { - if let Some(c_message) = change.c_message() { - return c_message.as_ptr(); - } - } - std::ptr::null::() + return change.message(); + }; + std::ptr::null() } /// \memberof AMchange @@ -231,11 +236,11 @@ pub unsafe extern "C" fn AMchangeMessage(change: *const AMchange) -> *const c_ch /// /// \param[in] change A pointer to an `AMchange` struct. /// \return A 64-bit unsigned integer. -/// \pre \p change must be a valid address. +/// \pre \p change` != NULL`. /// \internal /// /// # Safety -/// change must be a pointer to a valid AMchange +/// change must be a valid pointer to an AMchange #[no_mangle] pub unsafe extern "C" fn AMchangeSeq(change: *const AMchange) -> u64 { if let Some(change) = change.as_ref() { @@ -250,11 +255,11 @@ pub unsafe extern "C" fn AMchangeSeq(change: *const AMchange) -> u64 { /// /// \param[in] change A pointer to an `AMchange` struct. /// \return A 64-bit unsigned integer. -/// \pre \p change must be a valid address. +/// \pre \p change` != NULL`. /// \internal /// /// # Safety -/// change must be a pointer to a valid AMchange +/// change must be a valid pointer to an AMchange #[no_mangle] pub unsafe extern "C" fn AMchangeSize(change: *const AMchange) -> usize { if let Some(change) = change.as_ref() { @@ -269,11 +274,11 @@ pub unsafe extern "C" fn AMchangeSize(change: *const AMchange) -> usize { /// /// \param[in] change A pointer to an `AMchange` struct. /// \return A 64-bit unsigned integer. -/// \pre \p change must be a valid address. +/// \pre \p change` != NULL`. /// \internal /// /// # Safety -/// change must be a pointer to a valid AMchange +/// change must be a valid pointer to an AMchange #[no_mangle] pub unsafe extern "C" fn AMchangeStartOp(change: *const AMchange) -> u64 { if let Some(change) = change.as_ref() { @@ -288,11 +293,11 @@ pub unsafe extern "C" fn AMchangeStartOp(change: *const AMchange) -> u64 { /// /// \param[in] change A pointer to an `AMchange` struct. /// \return A 64-bit signed integer. -/// \pre \p change must be a valid address. +/// \pre \p change` != NULL`. /// \internal /// /// # Safety -/// change must be a pointer to a valid AMchange +/// change must be a valid pointer to an AMchange #[no_mangle] pub unsafe extern "C" fn AMchangeTime(change: *const AMchange) -> i64 { if let Some(change) = change.as_ref() { @@ -307,11 +312,11 @@ pub unsafe extern "C" fn AMchangeTime(change: *const AMchange) -> i64 { /// /// \param[in] change A pointer to an `AMchange` struct. /// \return An `AMbyteSpan` struct. -/// \pre \p change must be a valid address. +/// \pre \p change` != NULL`. /// \internal /// /// # Safety -/// change must be a pointer to a valid AMchange +/// change must be a valid pointer to an AMchange #[no_mangle] pub unsafe extern "C" fn AMchangeRawBytes(change: *const AMchange) -> AMbyteSpan { if let Some(change) = change.as_ref() { @@ -328,12 +333,11 @@ pub unsafe extern "C" fn AMchangeRawBytes(change: *const AMchange) -> AMbyteSpan /// \param[in] count The number of bytes in \p src to load. /// \return A pointer to an `AMresult` struct containing a sequence of /// `AMchange` structs. -/// \pre \p src must be a valid address. -/// \pre `0 <=` \p count `<=` size of \p src. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \pre \p src` != NULL`. +/// \pre `0 <=` \p count` <= `size of \p src. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety /// src must be a byte array of size `>= count` #[no_mangle] diff --git a/automerge-c/src/sync/have.rs b/automerge-c/src/sync/have.rs index ae85ee93..2396e8fe 100644 --- a/automerge-c/src/sync/have.rs +++ b/automerge-c/src/sync/have.rs @@ -5,7 +5,7 @@ use crate::change_hashes::AMchangeHashes; /// \struct AMsyncHave /// \brief A summary of the changes that the sender of a synchronization /// message already has. -#[derive(Clone)] +#[derive(Clone, PartialEq)] pub struct AMsyncHave(*const am::sync::Have); impl AMsyncHave { @@ -25,11 +25,11 @@ impl AsRef for AMsyncHave { /// /// \param[in] sync_have A pointer to an `AMsyncHave` struct. /// \return An `AMchangeHashes` struct. -/// \pre \p sync_have must be a valid address. +/// \pre \p sync_have` != NULL`. /// \internal /// /// # Safety -/// sync_have must be a pointer to a valid AMsyncHave +/// sync_have must be a valid pointer to an AMsyncHave #[no_mangle] pub unsafe extern "C" fn AMsyncHaveLastSync(sync_have: *const AMsyncHave) -> AMchangeHashes { if let Some(sync_have) = sync_have.as_ref() { diff --git a/automerge-c/src/sync/message.rs b/automerge-c/src/sync/message.rs index 14244059..a07af89b 100644 --- a/automerge-c/src/sync/message.rs +++ b/automerge-c/src/sync/message.rs @@ -23,6 +23,7 @@ pub(crate) use to_sync_message; /// \struct AMsyncMessage /// \brief A synchronization message for a peer. +#[derive(PartialEq)] pub struct AMsyncMessage { body: am::sync::Message, changes_storage: RefCell>, @@ -50,11 +51,11 @@ impl AsRef for AMsyncMessage { /// /// \param[in] sync_message A pointer to an `AMsyncMessage` struct. /// \return An `AMchanges` struct. -/// \pre \p sync_message must be a valid address. +/// \pre \p sync_message` != NULL`. /// \internal /// /// # Safety -/// sync_message must be a pointer to a valid AMsyncMessage +/// sync_message must be a valid pointer to an AMsyncMessage #[no_mangle] pub unsafe extern "C" fn AMsyncMessageChanges(sync_message: *const AMsyncMessage) -> AMchanges { if let Some(sync_message) = sync_message.as_ref() { @@ -74,12 +75,11 @@ pub unsafe extern "C" fn AMsyncMessageChanges(sync_message: *const AMsyncMessage /// \param[in] count The number of bytes in \p src to decode. /// \return A pointer to an `AMresult` struct containing an `AMsyncMessage` /// struct. -/// \pre \p src must be a valid address. -/// \pre `0 <=` \p count `<=` size of \p src. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \pre \p src` != NULL`. +/// \pre `0 <=` \p count` <= `size of \p src. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety /// src must be a byte array of size `>= count` #[no_mangle] @@ -95,13 +95,12 @@ pub unsafe extern "C" fn AMsyncMessageDecode(src: *const u8, count: usize) -> *m /// \param[in] sync_message A pointer to an `AMsyncMessage` struct. /// \return A pointer to an `AMresult` struct containing an array of bytes as /// an `AMbyteSpan` struct. -/// \pre \p sync_message must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \pre \p sync_message` != NULL`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety -/// sync_message must be a pointer to a valid AMsyncMessage +/// sync_message must be a valid pointer to an AMsyncMessage #[no_mangle] pub unsafe extern "C" fn AMsyncMessageEncode(sync_message: *const AMsyncMessage) -> *mut AMresult { let sync_message = to_sync_message!(sync_message); @@ -113,11 +112,11 @@ pub unsafe extern "C" fn AMsyncMessageEncode(sync_message: *const AMsyncMessage) /// /// \param[in] sync_message A pointer to an `AMsyncMessage` struct. /// \return An `AMhaves` struct. -/// \pre \p sync_message must be a valid address. +/// \pre \p sync_message` != NULL`. /// \internal /// /// # Safety -/// sync_message must be a pointer to a valid AMsyncMessage +/// sync_message must be a valid pointer to an AMsyncMessage #[no_mangle] pub unsafe extern "C" fn AMsyncMessageHaves(sync_message: *const AMsyncMessage) -> AMsyncHaves { if let Some(sync_message) = sync_message.as_ref() { @@ -135,11 +134,11 @@ pub unsafe extern "C" fn AMsyncMessageHaves(sync_message: *const AMsyncMessage) /// /// \param[in] sync_message A pointer to an `AMsyncMessage` struct. /// \return An `AMchangeHashes` struct. -/// \pre \p sync_message must be a valid address. +/// \pre \p sync_message` != NULL`. /// \internal /// /// # Safety -/// sync_message must be a pointer to a valid AMsyncMessage +/// sync_message must be a valid pointer to an AMsyncMessage #[no_mangle] pub unsafe extern "C" fn AMsyncMessageHeads(sync_message: *const AMsyncMessage) -> AMchangeHashes { if let Some(sync_message) = sync_message.as_ref() { @@ -155,11 +154,11 @@ pub unsafe extern "C" fn AMsyncMessageHeads(sync_message: *const AMsyncMessage) /// /// \param[in] sync_message A pointer to an `AMsyncMessage` struct. /// \return An `AMchangeHashes` struct. -/// \pre \p sync_message must be a valid address. +/// \pre \p sync_message` != NULL`. /// \internal /// /// # Safety -/// sync_message must be a pointer to a valid AMsyncMessage +/// sync_message must be a valid pointer to an AMsyncMessage #[no_mangle] pub unsafe extern "C" fn AMsyncMessageNeeds(sync_message: *const AMsyncMessage) -> AMchangeHashes { if let Some(sync_message) = sync_message.as_ref() { diff --git a/automerge-c/src/sync/state.rs b/automerge-c/src/sync/state.rs index 4e293c76..a329d485 100644 --- a/automerge-c/src/sync/state.rs +++ b/automerge-c/src/sync/state.rs @@ -21,6 +21,7 @@ pub(crate) use to_sync_state; /// \struct AMsyncState /// \brief The state of synchronization with a peer. +#[derive(PartialEq)] pub struct AMsyncState { body: am::sync::State, their_haves_storage: RefCell>, @@ -60,12 +61,11 @@ impl From for *mut AMsyncState { /// \param[in] count The number of bytes in \p src to decode. /// \return A pointer to an `AMresult` struct containing an `AMsyncState` /// struct. -/// \pre \p src must be a valid address. -/// \pre `0 <=` \p count `<=` size of \p src. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \pre \p src` != NULL`. +/// \pre `0 <=` \p count` <= `size of \p src. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety /// src must be a byte array of size `>= count` #[no_mangle] @@ -81,13 +81,12 @@ pub unsafe extern "C" fn AMsyncStateDecode(src: *const u8, count: usize) -> *mut /// \param[in] sync_state A pointer to an `AMsyncState` struct. /// \return A pointer to an `AMresult` struct containing an array of bytes as /// an `AMbyteSpan` struct. -/// \pre \p sync_state must be a valid address. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \pre \p sync_state` != NULL`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. /// \internal -/// /// # Safety -/// sync_state must be a pointer to a valid AMsyncState +/// sync_state must be a valid pointer to an AMsyncState #[no_mangle] pub unsafe extern "C" fn AMsyncStateEncode(sync_state: *const AMsyncState) -> *mut AMresult { let sync_state = to_sync_state!(sync_state); @@ -99,14 +98,14 @@ pub unsafe extern "C" fn AMsyncStateEncode(sync_state: *const AMsyncState) -> *m /// /// \param[in] sync_state1 A pointer to an `AMsyncState` struct. /// \param[in] sync_state2 A pointer to an `AMsyncState` struct. -/// \return `true` if \p sync_state1 `==` \p sync_state2 and `false` otherwise. -/// \pre \p sync_state1 must be a valid address. -/// \pre \p sync_state2 must be a valid address. +/// \return `true` if \p sync_state1` == `\p sync_state2 and `false` otherwise. +/// \pre \p sync_state1` != NULL`. +/// \pre \p sync_state2` != NULL`. /// \internal /// /// #Safety -/// sync_state1 must be a pointer to a valid AMsyncState -/// sync_state2 must be a pointer to a valid AMsyncState +/// sync_state1 must be a valid pointer to an AMsyncState +/// sync_state2 must be a valid pointer to an AMsyncState #[no_mangle] pub unsafe extern "C" fn AMsyncStateEqual( sync_state1: *const AMsyncState, @@ -124,8 +123,8 @@ pub unsafe extern "C" fn AMsyncStateEqual( /// /// \return A pointer to an `AMresult` struct containing a pointer to an /// `AMsyncState` struct. -/// \warning To avoid a memory leak, the returned `AMresult` struct must be -/// deallocated with `AMfree()`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. #[no_mangle] pub extern "C" fn AMsyncStateInit() -> *mut AMresult { to_result(am::sync::State::new()) @@ -136,11 +135,11 @@ pub extern "C" fn AMsyncStateInit() -> *mut AMresult { /// /// \param[in] sync_state A pointer to an `AMsyncState` struct. /// \return An `AMchangeHashes` struct. -/// \pre \p sync_state must be a valid address. +/// \pre \p sync_state` != NULL`. /// \internal /// /// # Safety -/// sync_state must be a pointer to a valid AMsyncState +/// sync_state must be a valid pointer to an AMsyncState #[no_mangle] pub unsafe extern "C" fn AMsyncStateSharedHeads(sync_state: *const AMsyncState) -> AMchangeHashes { if let Some(sync_state) = sync_state.as_ref() { @@ -155,11 +154,11 @@ pub unsafe extern "C" fn AMsyncStateSharedHeads(sync_state: *const AMsyncState) /// /// \param[in] sync_state A pointer to an `AMsyncState` struct. /// \return An `AMchangeHashes` struct. -/// \pre \p sync_state must be a valid address. +/// \pre \p sync_state` != NULL`. /// \internal /// /// # Safety -/// sync_state must be a pointer to a valid AMsyncState +/// sync_state must be a valid pointer to an AMsyncState #[no_mangle] pub unsafe extern "C" fn AMsyncStateLastSentHeads( sync_state: *const AMsyncState, @@ -178,13 +177,13 @@ pub unsafe extern "C" fn AMsyncStateLastSentHeads( /// \param[out] has_value A pointer to a boolean flag that is set to `true` if /// the returned `AMhaves` struct is relevant, `false` otherwise. /// \return An `AMhaves` struct. -/// \pre \p sync_state must be a valid address. -/// \pre \p has_value must be a valid address. +/// \pre \p sync_state` != NULL`. +/// \pre \p has_value` != NULL`. /// \internal /// /// # Safety -/// sync_state must be a pointer to a valid AMsyncState -/// has_value must be a pointer to a valid bool. +/// sync_state must be a valid pointer to an AMsyncState +/// has_value must be a valid pointer to a bool. #[no_mangle] pub unsafe extern "C" fn AMsyncStateTheirHaves( sync_state: *const AMsyncState, @@ -208,13 +207,13 @@ pub unsafe extern "C" fn AMsyncStateTheirHaves( /// the returned `AMchangeHashes` struct is relevant, `false` /// otherwise. /// \return An `AMchangeHashes` struct. -/// \pre \p sync_state must be a valid address. -/// \pre \p has_value must be a valid address. +/// \pre \p sync_state` != NULL`. +/// \pre \p has_value` != NULL`. /// \internal /// /// # Safety -/// sync_state must be a pointer to a valid AMsyncState -/// has_value must be a pointer to a valid bool. +/// sync_state must be a valid pointer to an AMsyncState +/// has_value must be a valid pointer to a bool. #[no_mangle] pub unsafe extern "C" fn AMsyncStateTheirHeads( sync_state: *const AMsyncState, @@ -238,13 +237,13 @@ pub unsafe extern "C" fn AMsyncStateTheirHeads( /// the returned `AMchangeHashes` struct is relevant, `false` /// otherwise. /// \return An `AMchangeHashes` struct. -/// \pre \p sync_state must be a valid address. -/// \pre \p has_value must be a valid address. +/// \pre \p sync_state` != NULL`. +/// \pre \p has_value` != NULL`. /// \internal /// /// # Safety -/// sync_state must be a pointer to a valid AMsyncState -/// has_value must be a pointer to a valid bool. +/// sync_state must be a valid pointer to an AMsyncState +/// has_value must be a valid pointer to a bool. #[no_mangle] pub unsafe extern "C" fn AMsyncStateTheirNeeds( sync_state: *const AMsyncState, diff --git a/automerge/src/sync.rs b/automerge/src/sync.rs index 1a3a4ed2..2b4b454b 100644 --- a/automerge/src/sync.rs +++ b/automerge/src/sync.rs @@ -258,7 +258,7 @@ impl Automerge { } /// The sync message to be sent. -#[derive(Debug, Clone)] +#[derive(Clone, Debug, PartialEq)] pub struct Message { /// The heads of the sender. pub heads: Vec, From 69de8187a5aff26852a8ef8ac88c7bf3d304c885 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 25 Jul 2022 01:41:52 -0700 Subject: [PATCH 513/730] Update the build system with the added and renamed source files. Defer `BTreeMap` creation until necessary for `AMresult::Changes`. Add `AMvalueEqual()` to enable direct comparison of two `AMvalue` structs regardless of their respective variants. --- automerge-c/src/CMakeLists.txt | 9 +- automerge-c/src/lib.rs | 3 +- automerge-c/src/result.rs | 455 +++++++++++++++++++++++++-------- 3 files changed, 358 insertions(+), 109 deletions(-) diff --git a/automerge-c/src/CMakeLists.txt b/automerge-c/src/CMakeLists.txt index f35ccc54..1b308b1c 100644 --- a/automerge-c/src/CMakeLists.txt +++ b/automerge-c/src/CMakeLists.txt @@ -58,11 +58,18 @@ add_custom_command( changes.rs doc.rs doc/list.rs + doc/list/item.rs + doc/list/items.rs doc/map.rs + doc/map/item.rs + doc/map/items.rs doc/utils.rs obj.rs + obj/item.rs + obj/items.rs result.rs - strings.rs + result_stack.rs + strs.rs sync.rs sync/have.rs sync/haves.rs diff --git a/automerge-c/src/lib.rs b/automerge-c/src/lib.rs index dcfa4853..6418bd33 100644 --- a/automerge-c/src/lib.rs +++ b/automerge-c/src/lib.rs @@ -6,5 +6,6 @@ mod changes; mod doc; mod obj; mod result; -mod strings; +mod result_stack; +mod strs; mod sync; diff --git a/automerge-c/src/result.rs b/automerge-c/src/result.rs index 17820caa..2a5d5fcc 100644 --- a/automerge-c/src/result.rs +++ b/automerge-c/src/result.rs @@ -1,6 +1,9 @@ use automerge as am; +use libc::strcmp; +use std::cell::RefCell; use std::collections::BTreeMap; use std::ffi::CString; +use std::ops::{Range, RangeFrom, RangeFull, RangeTo}; use std::os::raw::c_char; use crate::actor_id::AMactorId; @@ -8,9 +11,13 @@ use crate::byte_span::AMbyteSpan; use crate::change::AMchange; use crate::change_hashes::AMchangeHashes; use crate::changes::AMchanges; +use crate::doc::list::{item::AMlistItem, items::AMlistItems}; +use crate::doc::map::{item::AMmapItem, items::AMmapItems}; use crate::doc::AMdoc; +use crate::obj::item::AMobjItem; +use crate::obj::items::AMobjItems; use crate::obj::AMobjId; -use crate::strings::AMstrings; +use crate::strs::AMstrs; use crate::sync::{AMsyncMessage, AMsyncState}; /// \struct AMvalue @@ -19,11 +26,8 @@ use crate::sync::{AMsyncMessage, AMsyncState}; /// \enum AMvalueVariant /// \brief A value type discriminant. /// -/// \var AMvalue::tag -/// The variant discriminator of an `AMvalue` struct. -/// /// \var AMvalue::actor_id -/// An actor ID as an `AMactorId` struct. +/// An actor identifier as a pointer to an `AMactorId` struct. /// /// \var AMvalue::boolean /// A boolean. @@ -40,29 +44,59 @@ use crate::sync::{AMsyncMessage, AMsyncState}; /// \var AMvalue::counter /// A CRDT counter. /// +/// \var AMvalue::doc +/// A document as a pointer to an `AMdoc` struct. +/// /// \var AMvalue::f64 /// A 64-bit float. /// /// \var AMvalue::int_ /// A 64-bit signed integer. /// +/// \var AMvalue::list_items +/// A sequence of list object items as an `AMlistItems` struct. +/// +/// \var AMvalue::map_items +/// A sequence of map object items as an `AMmapItems` struct. +/// +/// \var AMvalue::null +/// A null. +/// /// \var AMvalue::obj_id -/// An object identifier. +/// An object identifier as a pointer to an `AMobjId` struct. +/// +/// \var AMvalue::obj_items +/// A sequence of object items as an `AMobjItems` struct. /// /// \var AMvalue::str /// A UTF-8 string. /// -/// \var AMvalue::strings -/// A sequence of UTF-8 strings as an `AMstrings` struct. +/// \var AMvalue::strs +/// A sequence of UTF-8 strings as an `AMstrs` struct. +/// +/// \var AMvalue::sync_message +/// A synchronization message as a pointer to an `AMsyncMessage` struct. +/// +/// \var AMvalue::sync_state +/// A synchronization state as a pointer to an `AMsyncState` struct. +/// +/// \var AMvalue::tag +/// The variant discriminator of an `AMvalue` struct. /// /// \var AMvalue::timestamp /// A Lamport timestamp. /// /// \var AMvalue::uint /// A 64-bit unsigned integer. -#[repr(C)] +/// +/// \var AMvalue::void +/// A void. +#[repr(u8)] pub enum AMvalue<'a> { - /// An actor ID variant. + /// A void variant. + /// \note This tag is unalphabetized so that a zeroed struct will have it. + Void, + /// An actor identifier variant. ActorId(&'a AMactorId), /// A boolean variant. Boolean(bool), @@ -80,44 +114,158 @@ pub enum AMvalue<'a> { F64(f64), /// A 64-bit signed integer variant. Int(i64), + /// A list items variant. + ListItems(AMlistItems), + /// A map items variant. + MapItems(AMmapItems), /// A null variant. Null, /// An object identifier variant. ObjId(&'a AMobjId), + /// An object items variant. + ObjItems(AMobjItems), /// A UTF-8 string variant. Str(*const libc::c_char), - /// A strings variant. - Strings(AMstrings), - /// A Lamport timestamp variant. - Timestamp(i64), - /* - /// A transaction variant. - Transaction(_), - */ - /// A 64-bit unsigned integer variant. - Uint(u64), + /// A UTF-8 strings variant. + Strs(AMstrs), /// A synchronization message variant. SyncMessage(&'a AMsyncMessage), /// A synchronization state variant. SyncState(&'a mut AMsyncState), - /// A void variant. - Void, + /// A Lamport timestamp variant. + Timestamp(i64), + /// A 64-bit unsigned integer variant. + Uint(u64), +} + +impl<'a> PartialEq for AMvalue<'a> { + fn eq(&self, other: &Self) -> bool { + use AMvalue::*; + + match (self, other) { + (ActorId(lhs), ActorId(rhs)) => *lhs == *rhs, + (Boolean(lhs), Boolean(rhs)) => lhs == rhs, + (Bytes(lhs), Bytes(rhs)) => lhs == rhs, + (ChangeHashes(lhs), ChangeHashes(rhs)) => lhs == rhs, + (Changes(lhs), Changes(rhs)) => lhs == rhs, + (Counter(lhs), Counter(rhs)) => lhs == rhs, + (Doc(lhs), Doc(rhs)) => *lhs == *rhs, + (F64(lhs), F64(rhs)) => lhs == rhs, + (Int(lhs), Int(rhs)) => lhs == rhs, + (ListItems(lhs), ListItems(rhs)) => lhs == rhs, + (MapItems(lhs), MapItems(rhs)) => lhs == rhs, + (ObjId(lhs), ObjId(rhs)) => *lhs == *rhs, + (ObjItems(lhs), ObjItems(rhs)) => lhs == rhs, + (Str(lhs), Str(rhs)) => unsafe { strcmp(*lhs, *rhs) == 0 }, + (Strs(lhs), Strs(rhs)) => lhs == rhs, + (SyncMessage(lhs), SyncMessage(rhs)) => *lhs == *rhs, + (SyncState(lhs), SyncState(rhs)) => *lhs == *rhs, + (Timestamp(lhs), Timestamp(rhs)) => lhs == rhs, + (Uint(lhs), Uint(rhs)) => lhs == rhs, + (Null, Null) | (Void, Void) => true, + _ => false, + } + } +} + +impl From<(&am::Value<'_>, &RefCell>)> for AMvalue<'_> { + fn from((value, c_str): (&am::Value<'_>, &RefCell>)) -> Self { + match value { + am::Value::Scalar(scalar) => match scalar.as_ref() { + am::ScalarValue::Boolean(flag) => AMvalue::Boolean(*flag), + am::ScalarValue::Bytes(bytes) => AMvalue::Bytes(bytes.as_slice().into()), + am::ScalarValue::Counter(counter) => AMvalue::Counter(counter.into()), + am::ScalarValue::F64(float) => AMvalue::F64(*float), + am::ScalarValue::Int(int) => AMvalue::Int(*int), + am::ScalarValue::Null => AMvalue::Null, + am::ScalarValue::Str(smol_str) => { + let mut c_str = c_str.borrow_mut(); + AMvalue::Str(match c_str.as_mut() { + None => { + let value_str = CString::new(smol_str.to_string()).unwrap(); + c_str.insert(value_str).as_ptr() + } + Some(value_str) => value_str.as_ptr(), + }) + } + am::ScalarValue::Timestamp(timestamp) => AMvalue::Timestamp(*timestamp), + am::ScalarValue::Uint(uint) => AMvalue::Uint(*uint), + }, + // \todo Confirm that an object variant should be ignored + // when there's no object ID variant. + am::Value::Object(_) => AMvalue::Void, + } + } +} + +impl From<&AMvalue<'_>> for u8 { + fn from(value: &AMvalue) -> Self { + use AMvalue::*; + + match value { + ActorId(_) => 1, + Boolean(_) => 2, + Bytes(_) => 3, + ChangeHashes(_) => 4, + Changes(_) => 5, + Counter(_) => 6, + Doc(_) => 7, + F64(_) => 8, + Int(_) => 9, + ListItems(_) => 10, + MapItems(_) => 11, + Null => 12, + ObjId(_) => 13, + ObjItems(_) => 14, + Str(_) => 15, + Strs(_) => 16, + SyncMessage(_) => 17, + SyncState(_) => 18, + Timestamp(_) => 19, + Uint(_) => 20, + Void => 0, + } + } +} + +/// \memberof AMvalue +/// \brief Tests the equality of two values. +/// +/// \param[in] value1 A pointer to an `AMvalue` struct. +/// \param[in] value2 A pointer to an `AMvalue` struct. +/// \return `true` if \p value1` == `\p value2 and `false` otherwise. +/// \pre \p value1` != NULL`. +/// \pre \p value2` != NULL`. +/// \internal +/// +/// #Safety +/// value1 must be a valid AMvalue pointer +/// value2 must be a valid AMvalue pointer +#[no_mangle] +pub unsafe extern "C" fn AMvalueEqual(value1: *const AMvalue, value2: *const AMvalue) -> bool { + match (value1.as_ref(), value2.as_ref()) { + (Some(value1), Some(value2)) => *value1 == *value2, + (None, Some(_)) | (Some(_), None) | (None, None) => false, + } } /// \struct AMresult /// \brief A discriminated union of result variants. pub enum AMresult { - ActorId(AMactorId), + ActorId(am::ActorId, Option), ChangeHashes(Vec), - Changes(Vec, BTreeMap), - String(CString), - Strings(Vec), + Changes(Vec, Option>), Doc(Box), Error(CString), + ListItems(Vec), + MapItems(Vec), ObjId(AMobjId), + ObjItems(Vec), + String(CString), + Strings(Vec), SyncMessage(AMsyncMessage), - SyncState(AMsyncState), - Value(am::Value<'static>, Option), + SyncState(Box), + Value(am::Value<'static>, RefCell>), Void, } @@ -153,9 +301,107 @@ impl From> for AMresult { } } +impl From>> for AMresult { + fn from(list_range: am::ListRange<'static, Range>) -> Self { + AMresult::ListItems( + list_range + .map(|(i, v, o)| AMlistItem::new(i, v.clone(), o)) + .collect(), + ) + } +} + +impl From>> for AMresult { + fn from(list_range: am::ListRangeAt<'static, Range>) -> Self { + AMresult::ListItems( + list_range + .map(|(i, v, o)| AMlistItem::new(i, v.clone(), o)) + .collect(), + ) + } +} + +impl From>> for AMresult { + fn from(map_range: am::MapRange<'static, Range>) -> Self { + let map_items: Vec = map_range + .map(|(k, v, o): (&'_ str, am::Value<'_>, am::ObjId)| AMmapItem::new(k, v.clone(), o)) + .collect(); + AMresult::MapItems(map_items) + } +} + +impl From>> for AMresult { + fn from(map_range: am::MapRangeAt<'static, Range>) -> Self { + let map_items: Vec = map_range + .map(|(k, v, o): (&'_ str, am::Value<'_>, am::ObjId)| AMmapItem::new(k, v.clone(), o)) + .collect(); + AMresult::MapItems(map_items) + } +} + +impl From>> for AMresult { + fn from(map_range: am::MapRange<'static, RangeFrom>) -> Self { + let map_items: Vec = map_range + .map(|(k, v, o): (&'_ str, am::Value<'_>, am::ObjId)| AMmapItem::new(k, v.clone(), o)) + .collect(); + AMresult::MapItems(map_items) + } +} + +impl From>> for AMresult { + fn from(map_range: am::MapRangeAt<'static, RangeFrom>) -> Self { + let map_items: Vec = map_range + .map(|(k, v, o): (&'_ str, am::Value<'_>, am::ObjId)| AMmapItem::new(k, v.clone(), o)) + .collect(); + AMresult::MapItems(map_items) + } +} + +impl From> for AMresult { + fn from(map_range: am::MapRange<'static, RangeFull>) -> Self { + let map_items: Vec = map_range + .map(|(k, v, o): (&'_ str, am::Value<'_>, am::ObjId)| AMmapItem::new(k, v.clone(), o)) + .collect(); + AMresult::MapItems(map_items) + } +} + +impl From> for AMresult { + fn from(map_range: am::MapRangeAt<'static, RangeFull>) -> Self { + let map_items: Vec = map_range + .map(|(k, v, o): (&'_ str, am::Value<'_>, am::ObjId)| AMmapItem::new(k, v.clone(), o)) + .collect(); + AMresult::MapItems(map_items) + } +} + +impl From>> for AMresult { + fn from(map_range: am::MapRange<'static, RangeTo>) -> Self { + let map_items: Vec = map_range + .map(|(k, v, o): (&'_ str, am::Value<'_>, am::ObjId)| AMmapItem::new(k, v.clone(), o)) + .collect(); + AMresult::MapItems(map_items) + } +} + +impl From>> for AMresult { + fn from(map_range: am::MapRangeAt<'static, RangeTo>) -> Self { + let map_items: Vec = map_range + .map(|(k, v, o): (&'_ str, am::Value<'_>, am::ObjId)| AMmapItem::new(k, v.clone(), o)) + .collect(); + AMresult::MapItems(map_items) + } +} + impl From for AMresult { fn from(state: am::sync::State) -> Self { - AMresult::SyncState(AMsyncState::new(state)) + AMresult::SyncState(Box::new(AMsyncState::new(state))) + } +} + +impl From> for AMresult { + fn from(values: am::Values<'static>) -> Self { + AMresult::ObjItems(values.map(|(v, o)| AMobjItem::new(v.clone(), o)).collect()) } } @@ -168,7 +414,7 @@ impl From for *mut AMresult { impl From> for AMresult { fn from(maybe: Option<&am::Change>) -> Self { match maybe { - Some(change) => AMresult::Changes(vec![change.clone()], BTreeMap::new()), + Some(change) => AMresult::Changes(vec![change.clone()], None), None => AMresult::Void, } } @@ -194,7 +440,7 @@ impl From> for AMresult { impl From> for AMresult { fn from(maybe: Result) -> Self { match maybe { - Ok(actor_id) => AMresult::ActorId(AMactorId::new(actor_id)), + Ok(actor_id) => AMresult::ActorId(actor_id, None), Err(e) => AMresult::err(&e.to_string()), } } @@ -203,7 +449,7 @@ impl From> for AMresult { impl From> for AMresult { fn from(maybe: Result) -> Self { match maybe { - Ok(actor_id) => AMresult::ActorId(AMactorId::new(actor_id)), + Ok(actor_id) => AMresult::ActorId(actor_id, None), Err(e) => AMresult::err(&e.to_string()), } } @@ -221,7 +467,7 @@ impl From> for AMresult { impl From> for AMresult { fn from(maybe: Result) -> Self { match maybe { - Ok(change) => AMresult::Changes(vec![change], BTreeMap::new()), + Ok(change) => AMresult::Changes(vec![change], None), Err(e) => AMresult::err(&e.to_string()), } } @@ -248,7 +494,7 @@ impl From> for AMresult { impl From> for AMresult { fn from(maybe: Result) -> Self { match maybe { - Ok(state) => AMresult::SyncState(AMsyncState::new(state)), + Ok(state) => AMresult::SyncState(Box::new(AMsyncState::new(state))), Err(e) => AMresult::err(&e.to_string()), } } @@ -257,7 +503,7 @@ impl From> for AMresult { impl From, am::AutomergeError>> for AMresult { fn from(maybe: Result, am::AutomergeError>) -> Self { match maybe { - Ok(value) => AMresult::Value(value, None), + Ok(value) => AMresult::Value(value, RefCell::>::default()), Err(e) => AMresult::err(&e.to_string()), } } @@ -267,7 +513,7 @@ impl From, am::ObjId)>, am::AutomergeError>> f fn from(maybe: Result, am::ObjId)>, am::AutomergeError>) -> Self { match maybe { // \todo Ensure that it's alright to ignore the `am::ObjId` value. - Ok(Some((value, _))) => AMresult::Value(value, None), + Ok(Some((value, _))) => AMresult::Value(value, RefCell::>::default()), Ok(None) => AMresult::Void, Err(e) => AMresult::err(&e.to_string()), } @@ -286,7 +532,10 @@ impl From> for AMresult { impl From> for AMresult { fn from(maybe: Result) -> Self { match maybe { - Ok(size) => AMresult::Value(am::Value::uint(size as u64), None), + Ok(size) => AMresult::Value( + am::Value::uint(size as u64), + RefCell::>::default(), + ), Err(e) => AMresult::err(&e.to_string()), } } @@ -295,7 +544,7 @@ impl From> for AMresult { impl From, am::AutomergeError>> for AMresult { fn from(maybe: Result, am::AutomergeError>) -> Self { match maybe { - Ok(changes) => AMresult::Changes(changes, BTreeMap::new()), + Ok(changes) => AMresult::Changes(changes, None), Err(e) => AMresult::err(&e.to_string()), } } @@ -307,7 +556,7 @@ impl From, am::AutomergeError>> for AMresult { Ok(changes) => { let changes: Vec = changes.iter().map(|&change| change.clone()).collect(); - AMresult::Changes(changes, BTreeMap::new()) + AMresult::Changes(changes, None) } Err(e) => AMresult::err(&e.to_string()), } @@ -335,7 +584,10 @@ impl From, am::InvalidChangeHashSlice>> for AMresult impl From, am::AutomergeError>> for AMresult { fn from(maybe: Result, am::AutomergeError>) -> Self { match maybe { - Ok(bytes) => AMresult::Value(am::Value::bytes(bytes), None), + Ok(bytes) => AMresult::Value( + am::Value::bytes(bytes), + RefCell::>::default(), + ), Err(e) => AMresult::err(&e.to_string()), } } @@ -344,7 +596,7 @@ impl From, am::AutomergeError>> for AMresult { impl From> for AMresult { fn from(changes: Vec<&am::Change>) -> Self { let changes: Vec = changes.iter().map(|&change| change.clone()).collect(); - AMresult::Changes(changes, BTreeMap::new()) + AMresult::Changes(changes, None) } } @@ -356,7 +608,10 @@ impl From> for AMresult { impl From> for AMresult { fn from(bytes: Vec) -> Self { - AMresult::Value(am::Value::bytes(bytes), None) + AMresult::Value( + am::Value::bytes(bytes), + RefCell::>::default(), + ) } } @@ -384,11 +639,11 @@ pub enum AMstatus { /// /// \param[in] result A pointer to an `AMresult` struct. /// \return A UTF-8 string value or `NULL`. -/// \pre \p result must be a valid address. +/// \pre \p result` != NULL`. /// \internal /// /// # Safety -/// result must be a pointer to a valid AMresult +/// result must be a valid pointer to an AMresult #[no_mangle] pub unsafe extern "C" fn AMerrorMessage(result: *const AMresult) -> *const c_char { match result.as_ref() { @@ -401,11 +656,11 @@ pub unsafe extern "C" fn AMerrorMessage(result: *const AMresult) -> *const c_cha /// \brief Deallocates the storage for a result. /// /// \param[in,out] result A pointer to an `AMresult` struct. -/// \pre \p result must be a valid address. +/// \pre \p result` != NULL`. /// \internal /// /// # Safety -/// result must be a pointer to a valid AMresult +/// result must be a valid pointer to an AMresult #[no_mangle] pub unsafe extern "C" fn AMfree(result: *mut AMresult) { if !result.is_null() { @@ -419,26 +674,31 @@ pub unsafe extern "C" fn AMfree(result: *mut AMresult) { /// /// \param[in] result A pointer to an `AMresult` struct. /// \return The count of values in \p result. -/// \pre \p result must be a valid address. +/// \pre \p result` != NULL`. /// \internal /// /// # Safety -/// result must be a pointer to a valid AMresult +/// result must be a valid pointer to an AMresult #[no_mangle] pub unsafe extern "C" fn AMresultSize(result: *const AMresult) -> usize { if let Some(result) = result.as_ref() { + use AMresult::*; + match result { - AMresult::Error(_) | AMresult::Void => 0, - AMresult::ActorId(_) - | AMresult::Doc(_) - | AMresult::ObjId(_) - | AMresult::String(_) - | AMresult::SyncMessage(_) - | AMresult::SyncState(_) - | AMresult::Value(_, _) => 1, - AMresult::ChangeHashes(change_hashes) => change_hashes.len(), - AMresult::Changes(changes, _) => changes.len(), - AMresult::Strings(cstrings) => cstrings.len(), + Error(_) | Void => 0, + ActorId(_, _) + | Doc(_) + | ObjId(_) + | String(_) + | SyncMessage(_) + | SyncState(_) + | Value(_, _) => 1, + ChangeHashes(change_hashes) => change_hashes.len(), + Changes(changes, _) => changes.len(), + ListItems(list_items) => list_items.len(), + MapItems(map_items) => map_items.len(), + ObjItems(obj_items) => obj_items.len(), + Strings(cstrings) => cstrings.len(), } } else { 0 @@ -450,11 +710,11 @@ pub unsafe extern "C" fn AMresultSize(result: *const AMresult) -> usize { /// /// \param[in] result A pointer to an `AMresult` struct. /// \return An `AMstatus` enum tag. -/// \pre \p result must be a valid address. +/// \pre \p result` != NULL`. /// \internal /// /// # Safety -/// result must be a pointer to a valid AMresult +/// result must be a valid pointer to an AMresult #[no_mangle] pub unsafe extern "C" fn AMresultStatus(result: *const AMresult) -> AMstatus { match result.as_ref() { @@ -467,80 +727,61 @@ pub unsafe extern "C" fn AMresultStatus(result: *const AMresult) -> AMstatus { /// \memberof AMresult /// \brief Gets a result's value. /// -/// \param[in,out] result A pointer to an `AMresult` struct. +/// \param[in] result A pointer to an `AMresult` struct. /// \return An `AMvalue` struct. -/// \pre \p result must be a valid address. +/// \pre \p result` != NULL`. /// \internal /// /// # Safety -/// result must be a pointer to a valid AMresult +/// result must be a valid pointer to an AMresult #[no_mangle] pub unsafe extern "C" fn AMresultValue<'a>(result: *mut AMresult) -> AMvalue<'a> { let mut content = AMvalue::Void; if let Some(result) = result.as_mut() { match result { - AMresult::ActorId(actor_id) => { - content = AMvalue::ActorId(actor_id); - } + AMresult::ActorId(actor_id, c_actor_id) => match c_actor_id { + None => { + content = AMvalue::ActorId(&*c_actor_id.insert(AMactorId::new(&*actor_id))); + } + Some(c_actor_id) => { + content = AMvalue::ActorId(&*c_actor_id); + } + }, AMresult::ChangeHashes(change_hashes) => { content = AMvalue::ChangeHashes(AMchangeHashes::new(change_hashes)); } AMresult::Changes(changes, storage) => { - content = AMvalue::Changes(AMchanges::new(changes, storage)); + content = AMvalue::Changes(AMchanges::new( + changes, + storage.get_or_insert(BTreeMap::new()), + )); } AMresult::Doc(doc) => content = AMvalue::Doc(&mut **doc), AMresult::Error(_) => {} + AMresult::ListItems(list_items) => { + content = AMvalue::ListItems(AMlistItems::new(list_items)); + } + AMresult::MapItems(map_items) => { + content = AMvalue::MapItems(AMmapItems::new(map_items)); + } AMresult::ObjId(obj_id) => { content = AMvalue::ObjId(obj_id); } + AMresult::ObjItems(obj_items) => { + content = AMvalue::ObjItems(AMobjItems::new(obj_items)); + } AMresult::String(cstring) => content = AMvalue::Str(cstring.as_ptr()), AMresult::Strings(cstrings) => { - content = AMvalue::Strings(AMstrings::new(cstrings)); + content = AMvalue::Strs(AMstrs::new(cstrings)); } AMresult::SyncMessage(sync_message) => { content = AMvalue::SyncMessage(sync_message); } AMresult::SyncState(sync_state) => { - content = AMvalue::SyncState(sync_state); + content = AMvalue::SyncState(&mut *sync_state); } AMresult::Value(value, value_str) => { - match value { - am::Value::Scalar(scalar) => match scalar.as_ref() { - am::ScalarValue::Boolean(flag) => { - content = AMvalue::Boolean(*flag); - } - am::ScalarValue::Bytes(bytes) => { - content = AMvalue::Bytes(bytes.as_slice().into()); - } - am::ScalarValue::Counter(counter) => { - content = AMvalue::Counter(counter.into()); - } - am::ScalarValue::F64(float) => { - content = AMvalue::F64(*float); - } - am::ScalarValue::Int(int) => { - content = AMvalue::Int(*int); - } - am::ScalarValue::Null => { - content = AMvalue::Null; - } - am::ScalarValue::Str(smol_str) => { - *value_str = CString::new(smol_str.to_string()).ok(); - if let Some(cstring) = value_str { - content = AMvalue::Str(cstring.as_ptr()); - } - } - am::ScalarValue::Timestamp(timestamp) => { - content = AMvalue::Timestamp(*timestamp); - } - am::ScalarValue::Uint(uint) => { - content = AMvalue::Uint(*uint); - } - }, - // \todo Confirm that an object variant should be ignored - // when there's no object ID variant. - am::Value::Object(_) => {} - } + content = (&*value, &*value_str).into(); } AMresult::Void => {} } From 3a556c5991049c46501b8cd523af36848fde916c Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 1 Aug 2022 07:02:30 -0700 Subject: [PATCH 514/730] Expose `Autocommit::fork_at()`. Rename `AMdup()` to `AMclone()` to match the WASM API. Rename `AMgetActor()` to `AMgetActorId()` to match the WASM API. Rename `AMsetActor()` to `AMsetActorId()` to match the WASM API. --- automerge-c/src/doc.rs | 63 ++++++++++++++++++++--------------- automerge-c/test/doc_tests.c | 8 ++--- automerge-c/test/list_tests.c | 2 +- automerge-c/test/map_tests.c | 16 ++++----- automerge-c/test/sync_tests.c | 48 +++++++++++++------------- 5 files changed, 73 insertions(+), 64 deletions(-) diff --git a/automerge-c/src/doc.rs b/automerge-c/src/doc.rs index 92f04598..1090e54b 100644 --- a/automerge-c/src/doc.rs +++ b/automerge-c/src/doc.rs @@ -92,6 +92,25 @@ pub unsafe extern "C" fn AMapplyChanges( to_result(doc.apply_changes(changes.as_ref().to_vec())) } +/// \memberof AMdoc +/// \brief Allocates storage for a document and initializes it by duplicating +/// the given document. +/// +/// \param[in,out] doc A pointer to an `AMdoc` struct. +/// \return A pointer to an `AMresult` struct containing a pointer to an +/// `AMdoc` struct. +/// \pre \p doc` != NULL`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. +/// \internal +/// # Safety +/// doc must be a valid pointer to an AMdoc +#[no_mangle] +pub unsafe extern "C" fn AMclone(doc: *const AMdoc) -> *mut AMresult { + let doc = to_doc!(doc); + to_result(doc.as_ref().clone()) +} + /// \memberof AMdoc /// \brief Allocates a new document and initializes it with defaults. /// @@ -111,8 +130,8 @@ pub extern "C" fn AMcreate() -> *mut AMresult { /// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] message A UTF-8 string or `NULL`. /// \param[in] time A pointer to a `time_t` value or `NULL`. -/// \return A pointer to an `AMresult` struct containing a change hash as an -/// `AMbyteSpan` struct. +/// \return A pointer to an `AMresult` struct containing an `AMchangeHashes` +/// with one element. /// \pre \p doc` != NULL`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. @@ -136,25 +155,6 @@ pub unsafe extern "C" fn AMcommit( to_result(doc.commit_with::<()>(options)) } -/// \memberof AMdoc -/// \brief Allocates storage for a document and initializes it by duplicating -/// the given document. -/// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \return A pointer to an `AMresult` struct containing a pointer to an -/// `AMdoc` struct. -/// \pre \p doc` != NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -#[no_mangle] -pub unsafe extern "C" fn AMdup(doc: *const AMdoc) -> *mut AMresult { - let doc = to_doc!(doc); - to_result(doc.as_ref().clone()) -} - /// \memberof AMdoc /// \brief Tests the equality of two documents after closing their respective /// transactions. @@ -178,9 +178,11 @@ pub unsafe extern "C" fn AMequal(doc1: *mut AMdoc, doc2: *mut AMdoc) -> bool { } /// \memberof AMdoc -/// \brief Forks this document at the current point for use by a different -/// actor. +/// \brief Forks this document at the current or a historical point for use by +/// a different actor. /// \param[in,out] doc A pointer to an `AMdoc` struct. +/// \param[in] heads A pointer to an `AMchangeHashes` struct for a historical +/// point or `NULL` for the current point. /// \return A pointer to an `AMresult` struct containing a pointer to an /// `AMdoc` struct. /// \pre \p doc` != NULL`. @@ -189,10 +191,14 @@ pub unsafe extern "C" fn AMequal(doc1: *mut AMdoc, doc2: *mut AMdoc) -> bool { /// \internal /// # Safety /// doc must be a valid pointer to an AMdoc +/// heads must be a valid pointer to an AMchangeHashes or NULL #[no_mangle] -pub unsafe extern "C" fn AMfork(doc: *mut AMdoc) -> *mut AMresult { +pub unsafe extern "C" fn AMfork(doc: *mut AMdoc, heads: *const AMchangeHashes) -> *mut AMresult { let doc = to_doc_mut!(doc); - to_result(doc.fork()) + match heads.as_ref() { + None => to_result(doc.fork()), + Some(heads) => to_result(doc.fork_at(heads.as_ref())), + } } /// \memberof AMdoc @@ -235,7 +241,7 @@ pub unsafe extern "C" fn AMgenerateSyncMessage( /// # Safety /// doc must be a valid pointer to an AMdoc #[no_mangle] -pub unsafe extern "C" fn AMgetActor(doc: *const AMdoc) -> *mut AMresult { +pub unsafe extern "C" fn AMgetActorId(doc: *const AMdoc) -> *mut AMresult { let doc = to_doc!(doc); to_result(Ok::( doc.get_actor().clone(), @@ -644,7 +650,10 @@ pub unsafe extern "C" fn AMsaveIncremental(doc: *mut AMdoc) -> *mut AMresult { /// doc must be a valid pointer to an AMdoc /// actor_id must be a valid pointer to an AMactorId #[no_mangle] -pub unsafe extern "C" fn AMsetActor(doc: *mut AMdoc, actor_id: *const AMactorId) -> *mut AMresult { +pub unsafe extern "C" fn AMsetActorId( + doc: *mut AMdoc, + actor_id: *const AMactorId, +) -> *mut AMresult { let doc = to_doc_mut!(doc); let actor_id = to_actor_id!(actor_id); doc.set_actor(actor_id.as_ref().clone()); diff --git a/automerge-c/test/doc_tests.c b/automerge-c/test/doc_tests.c index f683d6d8..fe9179ec 100644 --- a/automerge-c/test/doc_tests.c +++ b/automerge-c/test/doc_tests.c @@ -148,9 +148,9 @@ static void test_AMputActor_bytes(void **state) { test_state->actor_id_size), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id; - AMfree(AMsetActor(test_state->group_state->doc, actor_id)); + AMfree(AMsetActorId(test_state->group_state->doc, actor_id)); actor_id = AMpush(&test_state->group_state->stack, - AMgetActor(test_state->group_state->doc), + AMgetActorId(test_state->group_state->doc), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id; AMbyteSpan const bytes = AMactorIdBytes(actor_id); @@ -164,9 +164,9 @@ static void test_AMputActor_hex(void **state) { AMactorIdInitStr(test_state->actor_id_str), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id; - AMfree(AMsetActor(test_state->group_state->doc, actor_id)); + AMfree(AMsetActorId(test_state->group_state->doc, actor_id)); actor_id = AMpush(&test_state->group_state->stack, - AMgetActor(test_state->group_state->doc), + AMgetActorId(test_state->group_state->doc), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id; char const* const str = AMactorIdStr(actor_id); diff --git a/automerge-c/test/list_tests.c b/automerge-c/test/list_tests.c index 5e299f37..c34b9659 100644 --- a/automerge-c/test/list_tests.c +++ b/automerge-c/test/list_tests.c @@ -228,7 +228,7 @@ static void test_get_list_values(void** state) { AM_VALUE_CHANGE_HASHES, cmocka_cb).change_hashes; AMdoc* const doc2 = AMpush(&stack, - AMfork(doc1), + AMfork(doc1, NULL), AM_VALUE_DOC, cmocka_cb).doc; diff --git a/automerge-c/test/map_tests.c b/automerge-c/test/map_tests.c index 47a1dbe1..821fe81f 100644 --- a/automerge-c/test/map_tests.c +++ b/automerge-c/test/map_tests.c @@ -144,7 +144,7 @@ static void test_range_iter_map(void** state) { AMfree(AMmapPutUint(doc, AM_ROOT, "d", 9)); AMfree(AMcommit(doc, NULL, NULL)); AMactorId const* const actor_id = AMpush(&stack, - AMgetActor(doc), + AMgetActorId(doc), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id; AMmapItems map_items = AMpush(&stack, @@ -322,7 +322,7 @@ static void test_map_range_back_and_forth_single(void** state) { AMresultStack* stack = *state; AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; AMactorId const* const actor_id = AMpush(&stack, - AMgetActor(doc), + AMgetActorId(doc), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id; @@ -488,7 +488,7 @@ static void test_map_range_back_and_forth_double(void** state) { AMactorIdInitBytes("\0", 1), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id; - AMfree(AMsetActor(doc1, actor_id1)); + AMfree(AMsetActorId(doc1, actor_id1)); AMfree(AMmapPutStr(doc1, AM_ROOT, "1", "a")); AMfree(AMmapPutStr(doc1, AM_ROOT, "2", "b")); @@ -500,7 +500,7 @@ static void test_map_range_back_and_forth_double(void** state) { AMactorIdInitBytes("\1", 1), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id; - AMfree(AMsetActor(doc2, actor_id2)); + AMfree(AMsetActorId(doc2, actor_id2)); AMfree(AMmapPutStr(doc2, AM_ROOT, "1", "aa")); AMfree(AMmapPutStr(doc2, AM_ROOT, "2", "bb")); AMfree(AMmapPutStr(doc2, AM_ROOT, "3", "cc")); @@ -662,7 +662,7 @@ static void test_map_range_at_back_and_forth_single(void** state) { AMresultStack* stack = *state; AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; AMactorId const* const actor_id = AMpush(&stack, - AMgetActor(doc), + AMgetActorId(doc), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id; @@ -833,7 +833,7 @@ static void test_map_range_at_back_and_forth_double(void** state) { AMactorIdInitBytes("\0", 1), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id; - AMfree(AMsetActor(doc1, actor_id1)); + AMfree(AMsetActorId(doc1, actor_id1)); AMfree(AMmapPutStr(doc1, AM_ROOT, "1", "a")); AMfree(AMmapPutStr(doc1, AM_ROOT, "2", "b")); @@ -845,7 +845,7 @@ static void test_map_range_at_back_and_forth_double(void** state) { AMactorIdInitBytes("\1", 1), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id; - AMfree(AMsetActor(doc2, actor_id2)); + AMfree(AMsetActorId(doc2, actor_id2)); AMfree(AMmapPutStr(doc2, AM_ROOT, "1", "aa")); AMfree(AMmapPutStr(doc2, AM_ROOT, "2", "bb")); AMfree(AMmapPutStr(doc2, AM_ROOT, "3", "cc")); @@ -1020,7 +1020,7 @@ static void test_get_range_values(void** state) { AMgetHeads(doc1), AM_VALUE_CHANGE_HASHES, cmocka_cb).change_hashes; - AMdoc* const doc2 = AMpush(&stack, AMfork(doc1), AM_VALUE_DOC, cmocka_cb).doc; + AMdoc* const doc2 = AMpush(&stack, AMfork(doc1, NULL), AM_VALUE_DOC, cmocka_cb).doc; AMfree(AMmapPutStr(doc1, AM_ROOT, "cc", "ccc V2")); AMfree(AMcommit(doc1, NULL, NULL)); diff --git a/automerge-c/test/sync_tests.c b/automerge-c/test/sync_tests.c index 58e8ff6b..b0ea1e1f 100644 --- a/automerge-c/test/sync_tests.c +++ b/automerge-c/test/sync_tests.c @@ -261,11 +261,11 @@ static void test_converged_works_with_prior_sync_state(void **state) { static void test_converged_no_message_once_synced(void **state) { /* Create & synchronize two nodes. */ TestState* test_state = *state; - AMfree(AMsetActor(test_state->doc1, AMpush(&test_state->stack, + AMfree(AMsetActorId(test_state->doc1, AMpush(&test_state->stack, AMactorIdInitStr("abc123"), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id)); - AMfree(AMsetActor(test_state->doc2, AMpush(&test_state->stack, + AMfree(AMsetActorId(test_state->doc2, AMpush(&test_state->stack, AMactorIdInitStr("def456"), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id)); @@ -346,11 +346,11 @@ static void test_converged_no_message_once_synced(void **state) { static void test_converged_allow_simultaneous_messages(void **state) { /* Create & synchronize two nodes. */ TestState* test_state = *state; - AMfree(AMsetActor(test_state->doc1, AMpush(&test_state->stack, + AMfree(AMsetActorId(test_state->doc1, AMpush(&test_state->stack, AMactorIdInitStr("abc123"), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id)); - AMfree(AMsetActor(test_state->doc2, AMpush(&test_state->stack, + AMfree(AMsetActorId(test_state->doc2, AMpush(&test_state->stack, AMactorIdInitStr("def456"), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id)); @@ -523,11 +523,11 @@ static void test_converged_allow_simultaneous_messages(void **state) { */ static void test_converged_assume_sent_changes_were_received(void **state) { TestState* test_state = *state; - AMfree(AMsetActor(test_state->doc1, AMpush(&test_state->stack, + AMfree(AMsetActorId(test_state->doc1, AMpush(&test_state->stack, AMactorIdInitStr("01234567"), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id)); - AMfree(AMsetActor(test_state->doc2, AMpush(&test_state->stack, + AMfree(AMsetActorId(test_state->doc2, AMpush(&test_state->stack, AMactorIdInitStr("89abcdef"), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id)); @@ -623,11 +623,11 @@ static void test_diverged_works_without_prior_sync_state(void **state) { /* Create two peers both with divergent commits. */ TestState* test_state = *state; - AMfree(AMsetActor(test_state->doc1, AMpush(&test_state->stack, + AMfree(AMsetActorId(test_state->doc1, AMpush(&test_state->stack, AMactorIdInitStr("01234567"), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id)); - AMfree(AMsetActor(test_state->doc2, AMpush(&test_state->stack, + AMfree(AMsetActorId(test_state->doc2, AMpush(&test_state->stack, AMactorIdInitStr("89abcdef"), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id)); @@ -681,11 +681,11 @@ static void test_diverged_works_with_prior_sync_state(void **state) { /* Create two peers both with divergent commits. */ TestState* test_state = *state; - AMfree(AMsetActor(test_state->doc1, AMpush(&test_state->stack, + AMfree(AMsetActorId(test_state->doc1, AMpush(&test_state->stack, AMactorIdInitStr("01234567"), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id)); - AMfree(AMsetActor(test_state->doc2, AMpush(&test_state->stack, + AMfree(AMsetActorId(test_state->doc2, AMpush(&test_state->stack, AMactorIdInitStr("89abcdef"), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id)); @@ -746,11 +746,11 @@ static void test_diverged_works_with_prior_sync_state(void **state) { */ static void test_diverged_ensure_not_empty_after_sync(void **state) { TestState* test_state = *state; - AMfree(AMsetActor(test_state->doc1, AMpush(&test_state->stack, + AMfree(AMsetActorId(test_state->doc1, AMpush(&test_state->stack, AMactorIdInitStr("01234567"), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id)); - AMfree(AMsetActor(test_state->doc2, AMpush(&test_state->stack, + AMfree(AMsetActorId(test_state->doc2, AMpush(&test_state->stack, AMactorIdInitStr("89abcdef"), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id)); @@ -788,11 +788,11 @@ static void test_diverged_resync_after_node_crash_with_data_loss(void **state) { * We want to successfully sync (n1) with (r), even though (n1) believes * it's talking to (n2). */ TestState* test_state = *state; - AMfree(AMsetActor(test_state->doc1, AMpush(&test_state->stack, + AMfree(AMsetActorId(test_state->doc1, AMpush(&test_state->stack, AMactorIdInitStr("01234567"), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id)); - AMfree(AMsetActor(test_state->doc2, AMpush(&test_state->stack, + AMfree(AMsetActorId(test_state->doc2, AMpush(&test_state->stack, AMactorIdInitStr("89abcdef"), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id)); @@ -810,7 +810,7 @@ static void test_diverged_resync_after_node_crash_with_data_loss(void **state) { /* Save a copy of n2 as "r" to simulate recovering from a crash. */ AMdoc* r = AMpush(&test_state->stack, - AMdup(test_state->doc2), + AMclone(test_state->doc2), AM_VALUE_DOC, cmocka_cb).doc; AMbyteSpan encoded = AMpush(&test_state->stack, @@ -889,11 +889,11 @@ static void test_diverged_resync_after_node_crash_with_data_loss(void **state) { */ static void test_diverged_resync_after_data_loss_without_disconnection(void **state) { TestState* test_state = *state; - AMfree(AMsetActor(test_state->doc1, AMpush(&test_state->stack, + AMfree(AMsetActorId(test_state->doc1, AMpush(&test_state->stack, AMactorIdInitStr("01234567"), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id)); - AMfree(AMsetActor(test_state->doc2, AMpush(&test_state->stack, + AMfree(AMsetActorId(test_state->doc2, AMpush(&test_state->stack, AMactorIdInitStr("89abcdef"), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id)); @@ -924,7 +924,7 @@ static void test_diverged_resync_after_data_loss_without_disconnection(void **st AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; - AMfree(AMsetActor(doc2_after_data_loss, AMpush(&test_state->stack, + AMfree(AMsetActorId(doc2_after_data_loss, AMpush(&test_state->stack, AMactorIdInitStr("89abcdef"), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id)); @@ -958,11 +958,11 @@ static void test_diverged_resync_after_data_loss_without_disconnection(void **st */ static void test_diverged_handles_concurrent_changes(void **state) { TestState* test_state = *state; - AMfree(AMsetActor(test_state->doc1, AMpush(&test_state->stack, + AMfree(AMsetActorId(test_state->doc1, AMpush(&test_state->stack, AMactorIdInitStr("01234567"), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id)); - AMfree(AMsetActor(test_state->doc2, AMpush(&test_state->stack, + AMfree(AMsetActorId(test_state->doc2, AMpush(&test_state->stack, AMactorIdInitStr("89abcdef"), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id)); @@ -970,7 +970,7 @@ static void test_diverged_handles_concurrent_changes(void **state) { AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; - AMfree(AMsetActor(doc3, AMpush(&test_state->stack, + AMfree(AMsetActorId(doc3, AMpush(&test_state->stack, AMactorIdInitStr("fedcba98"), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id)); @@ -1033,11 +1033,11 @@ static void test_diverged_handles_concurrent_changes(void **state) { */ static void test_diverged_handles_histories_of_branching_and_merging(void **state) { TestState* test_state = *state; - AMfree(AMsetActor(test_state->doc1, AMpush(&test_state->stack, + AMfree(AMsetActorId(test_state->doc1, AMpush(&test_state->stack, AMactorIdInitStr("01234567"), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id)); - AMfree(AMsetActor(test_state->doc2, AMpush(&test_state->stack, + AMfree(AMsetActorId(test_state->doc2, AMpush(&test_state->stack, AMactorIdInitStr("89abcdef"), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id)); @@ -1045,7 +1045,7 @@ static void test_diverged_handles_histories_of_branching_and_merging(void **stat AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; - AMfree(AMsetActor(doc3, AMpush(&test_state->stack, + AMfree(AMsetActorId(doc3, AMpush(&test_state->stack, AMactorIdInitStr("fedcba98"), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id)); From a22afdd70dcbf01e396c50cdc2d9454a8196e171 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Sat, 6 Aug 2022 15:04:46 -0700 Subject: [PATCH 515/730] Expose `automerge::AutoCommit::get_change_by_hash()` as `AMgetChangeByHash()`. Add the `AM_CHANGE_HASH_SIZE` macro define constant for `AMgetChangeByHash()`. Replace the literal `32` with the `automerge::types::HASH_SIZE` constant. Expose `automerge::AutoCommit::splice()` as `AMsplice()`. Add the `automerge::error::AutomergeError::InvalidValueType` variant for `AMsplice()`. Add push functionality to `AMspliceText()`. Fix some documentation content bugs. Fix some documentation formatting bugs. --- automerge-c/cbindgen.toml | 7 ++ automerge-c/src/doc.rs | 214 +++++++++++++++++++++++++++++--------- automerge-c/src/result.rs | 123 ++++++++++++++++++---- automerge/src/error.rs | 5 + automerge/src/types.rs | 11 +- 5 files changed, 286 insertions(+), 74 deletions(-) diff --git a/automerge-c/cbindgen.toml b/automerge-c/cbindgen.toml index 0b1b168d..ada7f48d 100644 --- a/automerge-c/cbindgen.toml +++ b/automerge-c/cbindgen.toml @@ -10,6 +10,13 @@ after_includes = """\n * \\brief The root object of a document. */ #define AM_ROOT NULL + +/** + * \\memberof AMchangeHash + * \\def AM_CHANGE_HASH_SIZE + * \\brief The count of bytes in a change hash. + */ +#define AM_CHANGE_HASH_SIZE 32 """ autogen_warning = "/* Warning, this file is autogenerated by cbindgen. Don't modify this manually. */" documentation = true diff --git a/automerge-c/src/doc.rs b/automerge-c/src/doc.rs index 1090e54b..b3d9682e 100644 --- a/automerge-c/src/doc.rs +++ b/automerge-c/src/doc.rs @@ -6,12 +6,12 @@ use std::os::raw::c_char; use crate::actor_id::AMactorId; use crate::change_hashes::AMchangeHashes; use crate::obj::AMobjId; -use crate::result::{to_result, AMresult}; +use crate::result::{to_result, AMresult, AMvalue}; use crate::sync::{to_sync_message, AMsyncMessage, AMsyncState}; pub mod list; pub mod map; -mod utils; +pub mod utils; use crate::changes::AMchanges; use crate::doc::utils::to_str; @@ -27,6 +27,24 @@ macro_rules! to_changes { }}; } +macro_rules! to_del { + ($del:expr, $len:expr) => {{ + if $del > $len && $del != usize::MAX { + return AMresult::err(&format!("Invalid del {}", $del)).into(); + } + std::cmp::min($del, $len) + }}; +} + +macro_rules! to_pos { + ($pos:expr, $len:expr) => {{ + if $pos > $len && $pos != usize::MAX { + return AMresult::err(&format!("Invalid pos {}", $pos)).into(); + } + std::cmp::min($pos, $len) + }}; +} + macro_rules! to_sync_state_mut { ($handle:expr) => {{ let handle = $handle.as_mut(); @@ -73,8 +91,8 @@ impl DerefMut for AMdoc { /// /// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] changes A pointer to an `AMchanges` struct. -/// \pre \p doc` != NULL`. -/// \pre \p changes` != NULL`. +/// \pre \p doc `!= NULL`. +/// \pre \p changes `!= NULL`. /// \return A pointer to an `AMresult` struct containing a void. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. @@ -99,7 +117,7 @@ pub unsafe extern "C" fn AMapplyChanges( /// \param[in,out] doc A pointer to an `AMdoc` struct. /// \return A pointer to an `AMresult` struct containing a pointer to an /// `AMdoc` struct. -/// \pre \p doc` != NULL`. +/// \pre \p doc `!= NULL`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -132,7 +150,7 @@ pub extern "C" fn AMcreate() -> *mut AMresult { /// \param[in] time A pointer to a `time_t` value or `NULL`. /// \return A pointer to an `AMresult` struct containing an `AMchangeHashes` /// with one element. -/// \pre \p doc` != NULL`. +/// \pre \p doc `!= NULL`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -161,9 +179,9 @@ pub unsafe extern "C" fn AMcommit( /// /// \param[in,out] doc1 An `AMdoc` struct. /// \param[in,out] doc2 An `AMdoc` struct. -/// \return `true` if \p doc1` == `\p doc2 and `false` otherwise. -/// \pre \p doc1` != NULL`. -/// \pre \p doc2` != NULL`. +/// \return `true` if \p doc1 `==` \p doc2 and `false` otherwise. +/// \pre \p doc1 `!= NULL`. +/// \pre \p doc2 `!= NULL`. /// \internal /// /// #Safety @@ -185,7 +203,7 @@ pub unsafe extern "C" fn AMequal(doc1: *mut AMdoc, doc2: *mut AMdoc) -> bool { /// point or `NULL` for the current point. /// \return A pointer to an `AMresult` struct containing a pointer to an /// `AMdoc` struct. -/// \pre \p doc` != NULL`. +/// \pre \p doc `!= NULL`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -209,8 +227,8 @@ pub unsafe extern "C" fn AMfork(doc: *mut AMdoc, heads: *const AMchangeHashes) - /// \param[in,out] sync_state A pointer to an `AMsyncState` struct. /// \return A pointer to an `AMresult` struct containing either a pointer to an /// `AMsyncMessage` struct or a void. -/// \pre \p doc must b e a valid address. -/// \pre \p sync_state` != NULL`. +/// \pre \p doc `!= NULL`. +/// \pre \p sync_state `!= NULL`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -233,7 +251,7 @@ pub unsafe extern "C" fn AMgenerateSyncMessage( /// \param[in] doc A pointer to an `AMdoc` struct. /// \return A pointer to an `AMresult` struct containing a pointer to an /// `AMactorId` struct. -/// \pre \p doc` != NULL`. +/// \pre \p doc `!= NULL`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -248,13 +266,43 @@ pub unsafe extern "C" fn AMgetActorId(doc: *const AMdoc) -> *mut AMresult { )) } +/// \memberof AMdoc +/// \brief Gets the change added to a document by its respective hash. +/// +/// \param[in,out] doc A pointer to an `AMdoc` struct. +/// \param[in] src A pointer to an array of bytes. +/// \param[in] count The number of bytes in \p src. +/// \return A pointer to an `AMresult` struct containing an `AMchanges` struct. +/// \pre \p doc `!= NULL`. +/// \pre \p src `!= NULL`. +/// \pre \p count `>= AM_CHANGE_HASH_SIZE`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. +/// \internal +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// src must be a byte array of size `>= automerge::types::HASH_SIZE` +#[no_mangle] +pub unsafe extern "C" fn AMgetChangeByHash( + doc: *mut AMdoc, + src: *const u8, + count: usize, +) -> *mut AMresult { + let doc = to_doc_mut!(doc); + let slice = std::slice::from_raw_parts(src, count); + match am::ChangeHash::try_from(slice) { + Ok(change_hash) => to_result(doc.get_change_by_hash(&change_hash)), + Err(e) => AMresult::err(&e.to_string()).into(), + } +} + /// \memberof AMdoc /// \brief Gets the changes added to a document by their respective hashes. /// /// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] have_deps A pointer to an `AMchangeHashes` struct or `NULL`. /// \return A pointer to an `AMresult` struct containing an `AMchanges` struct. -/// \pre \p doc` != NULL`. +/// \pre \p doc `!= NULL`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -281,8 +329,8 @@ pub unsafe extern "C" fn AMgetChanges( /// \param[in,out] doc1 An `AMdoc` struct. /// \param[in,out] doc2 An `AMdoc` struct. /// \return A pointer to an `AMresult` struct containing an `AMchanges` struct. -/// \pre \p doc1` != NULL`. -/// \pre \p doc2` != NULL`. +/// \pre \p doc1 `!= NULL`. +/// \pre \p doc2 `!= NULL`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -302,7 +350,7 @@ pub unsafe extern "C" fn AMgetChangesAdded(doc1: *mut AMdoc, doc2: *mut AMdoc) - /// \param[in,out] doc A pointer to an `AMdoc` struct. /// \return A pointer to an `AMresult` struct containing an `AMchangeHashes` /// struct. -/// \pre \p doc` != NULL`. +/// \pre \p doc `!= NULL`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -324,7 +372,7 @@ pub unsafe extern "C" fn AMgetHeads(doc: *mut AMdoc) -> *mut AMresult { /// \param[in] heads A pointer to an `AMchangeHashes` struct or `NULL`. /// \return A pointer to an `AMresult` struct containing an `AMchangeHashes` /// struct. -/// \pre \p doc` != NULL`. +/// \pre \p doc `!= NULL`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -351,7 +399,7 @@ pub unsafe extern "C" fn AMgetMissingDeps( /// \param[in,out] doc A pointer to an `AMdoc` struct. /// \return A pointer to an `AMresult` struct containing either an `AMchange` /// struct or a void. -/// \pre \p doc` != NULL`. +/// \pre \p doc `!= NULL`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -364,14 +412,14 @@ pub unsafe extern "C" fn AMgetLastLocalChange(doc: *mut AMdoc) -> *mut AMresult } /// \memberof AMdoc -/// \brief Gets the current or historical keys of an object. +/// \brief Gets the current or historical keys of a map object. /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] heads A pointer to an `AMchangeHashes` struct for historical /// keys or `NULL` for current keys. /// \return A pointer to an `AMresult` struct containing an `AMstrs` struct. -/// \pre \p doc` != NULL`. +/// \pre \p doc `!= NULL`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -401,8 +449,8 @@ pub unsafe extern "C" fn AMkeys( /// \param[in] count The number of bytes in \p src to load. /// \return A pointer to an `AMresult` struct containing a pointer to an /// `AMdoc` struct. -/// \pre \p src` != NULL`. -/// \pre `0 <=` \p count` <= `size of \p src. +/// \pre \p src `!= NULL`. +/// \pre `0 <` \p count `<= sizeof(`\p src`)`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -423,9 +471,9 @@ pub unsafe extern "C" fn AMload(src: *const u8, count: usize) -> *mut AMresult { /// \param[in] count The number of bytes in \p src to load. /// \return A pointer to an `AMresult` struct containing the number of /// operations loaded from \p src. -/// \pre \p doc` != NULL`. -/// \pre \p src` != NULL`. -/// \pre `0 <=` \p count` <= `size of \p src. +/// \pre \p doc `!= NULL`. +/// \pre \p src `!= NULL`. +/// \pre `0 <` \p count `<= sizeof(`\p src`)`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -452,8 +500,8 @@ pub unsafe extern "C" fn AMloadIncremental( /// \param[in,out] src A pointer to an `AMdoc` struct. /// \return A pointer to an `AMresult` struct containing an `AMchangeHashes` /// struct. -/// \pre \p dest` != NULL`. -/// \pre \p src` != NULL`. +/// \pre \p dest `!= NULL`. +/// \pre \p src `!= NULL`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -474,7 +522,7 @@ pub unsafe extern "C" fn AMmerge(dest: *mut AMdoc, src: *mut AMdoc) -> *mut AMre /// \param[in] heads A pointer to an `AMchangeHashes` struct for historical /// size or `NULL` for current size. /// \return A 64-bit unsigned integer. -/// \pre \p doc` != NULL`. +/// \pre \p doc `!= NULL`. /// \internal /// /// # Safety @@ -499,7 +547,7 @@ pub unsafe extern "C" fn AMobjSize( } /// \memberof AMdoc -/// \brief Gets the current or historical values of an object within the given +/// \brief Gets the current or historical values of an object within its entire /// range. /// /// \param[in] doc A pointer to an `AMdoc` struct. @@ -507,7 +555,7 @@ pub unsafe extern "C" fn AMobjSize( /// \param[in] heads A pointer to an `AMchangeHashes` struct for historical /// items or `NULL` for current items. /// \return A pointer to an `AMresult` struct containing an `AMobjItems` struct. -/// \pre \p doc` != NULL`. +/// \pre \p doc `!= NULL`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -535,7 +583,7 @@ pub unsafe extern "C" fn AMobjValues( /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \return The count of pending operations for \p doc. -/// \pre \p doc` != NULL`. +/// \pre \p doc `!= NULL`. /// \internal /// /// # Safety @@ -557,9 +605,9 @@ pub unsafe extern "C" fn AMpendingOps(doc: *const AMdoc) -> usize { /// \param[in,out] sync_state A pointer to an `AMsyncState` struct. /// \param[in] sync_message A pointer to an `AMsyncMessage` struct. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc` != NULL`. -/// \pre \p sync_state` != NULL`. -/// \pre \p sync_message` != NULL`. +/// \pre \p doc `!= NULL`. +/// \pre \p sync_state `!= NULL`. +/// \pre \p sync_message `!= NULL`. /// \internal /// /// # Safety @@ -584,7 +632,7 @@ pub unsafe extern "C" fn AMreceiveSyncMessage( /// /// \param[in,out] doc A pointer to an `AMdoc` struct. /// \return The count of pending operations for \p doc that were cancelled. -/// \pre \p doc` != NULL`. +/// \pre \p doc `!= NULL`. /// \internal /// /// # Safety @@ -604,7 +652,7 @@ pub unsafe extern "C" fn AMrollback(doc: *mut AMdoc) -> usize { /// \param[in,out] doc A pointer to an `AMdoc` struct. /// \return A pointer to an `AMresult` struct containing an array of bytes as /// an `AMbyteSpan` struct. -/// \pre \p doc` != NULL`. +/// \pre \p doc `!= NULL`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -623,7 +671,7 @@ pub unsafe extern "C" fn AMsave(doc: *mut AMdoc) -> *mut AMresult { /// \param[in,out] doc A pointer to an `AMdoc` struct. /// \return A pointer to an `AMresult` struct containing an array of bytes as /// an `AMbyteSpan` struct. -/// \pre \p doc` != NULL`. +/// \pre \p doc `!= NULL`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -641,8 +689,8 @@ pub unsafe extern "C" fn AMsaveIncremental(doc: *mut AMdoc) -> *mut AMresult { /// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] actor_id A pointer to an `AMactorId` struct. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc` != NULL`. -/// \pre \p actor_id` != NULL`. +/// \pre \p doc `!= NULL`. +/// \pre \p actor_id `!= NULL`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -661,35 +709,97 @@ pub unsafe extern "C" fn AMsetActorId( } /// \memberof AMdoc -/// \brief Splices new characters into the identified text object at a given -/// index. +/// \brief Splices values into and/or removes values from the identified object +/// at a given position within it. /// /// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] index An index in the text object identified by \p obj_id. -/// \param[in] del The number of characters to delete. -/// \param[in] text A UTF-8 string. +/// \param[in] pos A position in the object identified by \p obj_id or +/// `SIZE_MAX` to indicate one past its end. +/// \param[in] del The number of characters to delete or `SIZE_MAX` to indicate +/// all of them. +/// \param[in] src A pointer to an array of `AMvalue` structs. +/// \param[in] count The number of `AMvalue` structs in \p src to load. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc` != NULL`. -/// \pre `0 <=` \p index` <= `length of the text object identified by \p obj_id. -/// \pre \p text` != NULL`. +/// \pre \p doc `!= NULL`. +/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id`)` or \p pos `== SIZE_MAX`. +/// \pre `0 <=` \p del `<= AMobjSize(`\p obj_id`)` or \p del `== SIZE_MAX`. +/// \pre `(`\p src `!= NULL and 1 <=` \p count `<= sizeof(`\p src`)/ +/// sizeof(AMvalue)) or `\p src `== NULL or `\p count `== 0`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or NULL -/// text must be a null-terminated array of `c_char` +/// src must be an AMvalue array of size `>= count` or NULL +#[no_mangle] +pub unsafe extern "C" fn AMsplice( + doc: *mut AMdoc, + obj_id: *const AMobjId, + pos: usize, + del: usize, + src: *const AMvalue, + count: usize, +) -> *mut AMresult { + let doc = to_doc_mut!(doc); + let obj_id = to_obj_id!(obj_id); + let len = doc.length(obj_id); + let pos = to_pos!(pos, len); + let del = to_del!(del, len); + let mut vals: Vec = vec![]; + if !(src.is_null() || count == 0) { + let c_vals = std::slice::from_raw_parts(src, count); + for c_val in c_vals { + match c_val.into() { + Ok(s) => { + vals.push(s); + } + Err(e) => { + return AMresult::err(&e.to_string()).into(); + } + } + } + } + to_result(doc.splice(obj_id, pos, del, vals)) +} + +/// \memberof AMdoc +/// \brief Splices characters into and/or removes characters from the +/// identified object at a given position within it. +/// +/// \param[in,out] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \param[in] pos A position in the text object identified by \p obj_id or +/// `SIZE_MAX` to indicate one past its end. +/// \param[in] del The number of characters to delete or `SIZE_MAX` to indicate +/// all of them. +/// \param[in] text A UTF-8 string. +/// \return A pointer to an `AMresult` struct containing a void. +/// \pre \p doc `!= NULL`. +/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id`)` or \p pos `== SIZE_MAX`. +/// \pre `0 <=` \p del `<= AMobjSize(`\p obj_id`)` or \p del `== SIZE_MAX`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. +/// \internal +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or NULL +/// text must be a null-terminated array of `c_char` or NULL. #[no_mangle] pub unsafe extern "C" fn AMspliceText( doc: *mut AMdoc, obj_id: *const AMobjId, - index: usize, + pos: usize, del: usize, text: *const c_char, ) -> *mut AMresult { let doc = to_doc_mut!(doc); - to_result(doc.splice_text(to_obj_id!(obj_id), index, del, &to_str(text))) + let obj_id = to_obj_id!(obj_id); + let len = doc.length(obj_id); + let pos = to_pos!(pos, len); + let del = to_del!(del, len); + to_result(doc.splice_text(obj_id, pos, del, &to_str(text))) } /// \memberof AMdoc @@ -700,7 +810,7 @@ pub unsafe extern "C" fn AMspliceText( /// \param[in] heads A pointer to an `AMchangeHashes` struct for historical /// keys or `NULL` for current keys. /// \return A pointer to an `AMresult` struct containing a UTF-8 string. -/// \pre \p doc` != NULL`. +/// \pre \p doc `!= NULL`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal diff --git a/automerge-c/src/result.rs b/automerge-c/src/result.rs index 2a5d5fcc..f164f62a 100644 --- a/automerge-c/src/result.rs +++ b/automerge-c/src/result.rs @@ -1,5 +1,7 @@ use automerge as am; use libc::strcmp; +use smol_str::SmolStr; +use std::any::type_name; use std::cell::RefCell; use std::collections::BTreeMap; use std::ffi::CString; @@ -13,6 +15,7 @@ use crate::change_hashes::AMchangeHashes; use crate::changes::AMchanges; use crate::doc::list::{item::AMlistItem, items::AMlistItems}; use crate::doc::map::{item::AMmapItem, items::AMmapItems}; +use crate::doc::utils::to_str; use crate::doc::AMdoc; use crate::obj::item::AMobjItem; use crate::obj::items::AMobjItems; @@ -59,9 +62,6 @@ use crate::sync::{AMsyncMessage, AMsyncState}; /// \var AMvalue::map_items /// A sequence of map object items as an `AMmapItems` struct. /// -/// \var AMvalue::null -/// A null. -/// /// \var AMvalue::obj_id /// An object identifier as a pointer to an `AMobjId` struct. /// @@ -88,9 +88,6 @@ use crate::sync::{AMsyncMessage, AMsyncState}; /// /// \var AMvalue::uint /// A 64-bit unsigned integer. -/// -/// \var AMvalue::void -/// A void. #[repr(u8)] pub enum AMvalue<'a> { /// A void variant. @@ -168,6 +165,80 @@ impl<'a> PartialEq for AMvalue<'a> { } } +impl From<&AMvalue<'_>> for Result { + fn from(c_value: &AMvalue) -> Self { + use am::AutomergeError::InvalidValueType; + use AMvalue::*; + + let expected = type_name::().to_string(); + match c_value { + Boolean(b) => Ok(am::ScalarValue::Boolean(*b)), + Bytes(span) => { + let slice = unsafe { std::slice::from_raw_parts(span.src, span.count) }; + Ok(am::ScalarValue::Bytes(slice.to_vec())) + } + Counter(c) => Ok(am::ScalarValue::Counter(c.into())), + F64(f) => Ok(am::ScalarValue::F64(*f)), + Int(i) => Ok(am::ScalarValue::Int(*i)), + Str(c_str) => { + let smol_str = unsafe { SmolStr::new(to_str(*c_str)) }; + Ok(am::ScalarValue::Str(smol_str)) + } + Timestamp(t) => Ok(am::ScalarValue::Timestamp(*t)), + Uint(u) => Ok(am::ScalarValue::Uint(*u)), + Null => Ok(am::ScalarValue::Null), + ActorId(_) => Err(InvalidValueType { + expected, + unexpected: type_name::().to_string(), + }), + ChangeHashes(_) => Err(InvalidValueType { + expected, + unexpected: type_name::().to_string(), + }), + Changes(_) => Err(InvalidValueType { + expected, + unexpected: type_name::().to_string(), + }), + Doc(_) => Err(InvalidValueType { + expected, + unexpected: type_name::().to_string(), + }), + ListItems(_) => Err(InvalidValueType { + expected, + unexpected: type_name::().to_string(), + }), + MapItems(_) => Err(InvalidValueType { + expected, + unexpected: type_name::().to_string(), + }), + ObjId(_) => Err(InvalidValueType { + expected, + unexpected: type_name::().to_string(), + }), + ObjItems(_) => Err(InvalidValueType { + expected, + unexpected: type_name::().to_string(), + }), + Strs(_) => Err(InvalidValueType { + expected, + unexpected: type_name::().to_string(), + }), + SyncMessage(_) => Err(InvalidValueType { + expected, + unexpected: type_name::().to_string(), + }), + SyncState(_) => Err(InvalidValueType { + expected, + unexpected: type_name::().to_string(), + }), + Void => Err(InvalidValueType { + expected, + unexpected: type_name::<()>().to_string(), + }), + } + } +} + impl From<(&am::Value<'_>, &RefCell>)> for AMvalue<'_> { fn from((value, c_str): (&am::Value<'_>, &RefCell>)) -> Self { match value { @@ -233,9 +304,9 @@ impl From<&AMvalue<'_>> for u8 { /// /// \param[in] value1 A pointer to an `AMvalue` struct. /// \param[in] value2 A pointer to an `AMvalue` struct. -/// \return `true` if \p value1` == `\p value2 and `false` otherwise. -/// \pre \p value1` != NULL`. -/// \pre \p value2` != NULL`. +/// \return `true` if \p value1 `==` \p value2 and `false` otherwise. +/// \pre \p value1 `!= NULL`. +/// \pre \p value2 `!= NULL`. /// \internal /// /// #Safety @@ -400,8 +471,22 @@ impl From for AMresult { } impl From> for AMresult { - fn from(values: am::Values<'static>) -> Self { - AMresult::ObjItems(values.map(|(v, o)| AMobjItem::new(v.clone(), o)).collect()) + fn from(pairs: am::Values<'static>) -> Self { + AMresult::ObjItems(pairs.map(|(v, o)| AMobjItem::new(v.clone(), o)).collect()) + } +} + +impl From, am::ObjId)>, am::AutomergeError>> for AMresult { + fn from(maybe: Result, am::ObjId)>, am::AutomergeError>) -> Self { + match maybe { + Ok(pairs) => AMresult::ObjItems( + pairs + .into_iter() + .map(|(v, o)| AMobjItem::new(v, o)) + .collect(), + ), + Err(e) => AMresult::err(&e.to_string()), + } } } @@ -512,8 +597,10 @@ impl From, am::AutomergeError>> for AMresult { impl From, am::ObjId)>, am::AutomergeError>> for AMresult { fn from(maybe: Result, am::ObjId)>, am::AutomergeError>) -> Self { match maybe { - // \todo Ensure that it's alright to ignore the `am::ObjId` value. - Ok(Some((value, _))) => AMresult::Value(value, RefCell::>::default()), + Ok(Some((value, obj_id))) => match value { + am::Value::Object(_) => AMresult::ObjId(AMobjId::new(obj_id)), + _ => AMresult::Value(value, RefCell::>::default()), + }, Ok(None) => AMresult::Void, Err(e) => AMresult::err(&e.to_string()), } @@ -639,7 +726,7 @@ pub enum AMstatus { /// /// \param[in] result A pointer to an `AMresult` struct. /// \return A UTF-8 string value or `NULL`. -/// \pre \p result` != NULL`. +/// \pre \p result `!= NULL`. /// \internal /// /// # Safety @@ -656,7 +743,7 @@ pub unsafe extern "C" fn AMerrorMessage(result: *const AMresult) -> *const c_cha /// \brief Deallocates the storage for a result. /// /// \param[in,out] result A pointer to an `AMresult` struct. -/// \pre \p result` != NULL`. +/// \pre \p result `!= NULL`. /// \internal /// /// # Safety @@ -674,7 +761,7 @@ pub unsafe extern "C" fn AMfree(result: *mut AMresult) { /// /// \param[in] result A pointer to an `AMresult` struct. /// \return The count of values in \p result. -/// \pre \p result` != NULL`. +/// \pre \p result `!= NULL`. /// \internal /// /// # Safety @@ -710,7 +797,7 @@ pub unsafe extern "C" fn AMresultSize(result: *const AMresult) -> usize { /// /// \param[in] result A pointer to an `AMresult` struct. /// \return An `AMstatus` enum tag. -/// \pre \p result` != NULL`. +/// \pre \p result `!= NULL`. /// \internal /// /// # Safety @@ -729,7 +816,7 @@ pub unsafe extern "C" fn AMresultStatus(result: *const AMresult) -> AMstatus { /// /// \param[in] result A pointer to an `AMresult` struct. /// \return An `AMvalue` struct. -/// \pre \p result` != NULL`. +/// \pre \p result `!= NULL`. /// \internal /// /// # Safety diff --git a/automerge/src/error.rs b/automerge/src/error.rs index 9228b501..9f4ccf75 100644 --- a/automerge/src/error.rs +++ b/automerge/src/error.rs @@ -29,6 +29,11 @@ pub enum AutomergeError { MissingHash(ChangeHash), #[error("increment operations must be against a counter value")] MissingCounter, + #[error("invalid type of value, expected `{expected}` but received `{unexpected}`")] + InvalidValueType { + expected: String, + unexpected: String, + }, #[error("general failure")] Fail, } diff --git a/automerge/src/types.rs b/automerge/src/types.rs index 141205d0..288c2846 100644 --- a/automerge/src/types.rs +++ b/automerge/src/types.rs @@ -536,7 +536,10 @@ impl fmt::Display for ChangeHash { pub enum ParseChangeHashError { #[error(transparent)] HexDecode(#[from] hex::FromHexError), - #[error("incorrect length, change hash should be 32 bytes, got {actual}")] + #[error( + "incorrect length, change hash should be {} bytes, got {actual}", + HASH_SIZE + )] IncorrectLength { actual: usize }, } @@ -545,7 +548,7 @@ impl FromStr for ChangeHash { fn from_str(s: &str) -> Result { let bytes = hex::decode(s)?; - if bytes.len() == 32 { + if bytes.len() == HASH_SIZE { Ok(ChangeHash(bytes.try_into().unwrap())) } else { Err(ParseChangeHashError::IncorrectLength { @@ -559,10 +562,10 @@ impl TryFrom<&[u8]> for ChangeHash { type Error = error::InvalidChangeHashSlice; fn try_from(bytes: &[u8]) -> Result { - if bytes.len() != 32 { + if bytes.len() != HASH_SIZE { Err(error::InvalidChangeHashSlice(Vec::from(bytes))) } else { - let mut array = [0; 32]; + let mut array = [0; HASH_SIZE]; array.copy_from_slice(bytes); Ok(ChangeHash(array)) } From eeb75f74f43933b6b1186407255d462d68dccaf9 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Sat, 6 Aug 2022 15:07:48 -0700 Subject: [PATCH 516/730] Fix `AMstrsCmp()`. Fix some documentation content bugs. Fix some documentation formatting bugs. --- automerge-c/src/strs.rs | 34 +++++++++++++++++----------------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/automerge-c/src/strs.rs b/automerge-c/src/strs.rs index 5bc9876c..8bb0e5a1 100644 --- a/automerge-c/src/strs.rs +++ b/automerge-c/src/strs.rs @@ -18,11 +18,11 @@ struct Detail { pub const USIZE_USIZE_USIZE_: usize = size_of::(); impl Detail { - fn new(cstrings: &[CString], offset: isize) -> Self { + fn new(c_strings: &[CString], offset: isize) -> Self { Self { - len: cstrings.len(), + len: c_strings.len(), offset, - ptr: cstrings.as_ptr() as *const c_void, + ptr: c_strings.as_ptr() as *const c_void, } } @@ -126,9 +126,9 @@ pub struct AMstrs { } impl AMstrs { - pub fn new(cstrings: &[CString]) -> Self { + pub fn new(c_strings: &[CString]) -> Self { Self { - detail: Detail::new(cstrings, 0).into(), + detail: Detail::new(c_strings, 0).into(), } } @@ -167,10 +167,10 @@ impl AMstrs { } } -impl AsRef<[String]> for AMstrs { - fn as_ref(&self) -> &[String] { +impl AsRef<[CString]> for AMstrs { + fn as_ref(&self) -> &[CString] { let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - unsafe { std::slice::from_raw_parts(detail.ptr as *const String, detail.len) } + unsafe { std::slice::from_raw_parts(detail.ptr as *const CString, detail.len) } } } @@ -190,7 +190,7 @@ impl Default for AMstrs { /// \param[in,out] strs A pointer to an `AMstrs` struct. /// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum /// number of positions to advance. -/// \pre \p strs` != NULL`. +/// \pre \p strs `!= NULL`. /// \internal /// /// #Safety @@ -209,10 +209,10 @@ pub unsafe extern "C" fn AMstrsAdvance(strs: *mut AMstrs, n: isize) { /// \param[in] strs1 A pointer to an `AMstrs` struct. /// \param[in] strs2 A pointer to an `AMstrs` struct. /// \return `-1` if \p strs1 `<` \p strs2, `0` if -/// \p strs1` == `\p strs2 and `1` if +/// \p strs1 `==` \p strs2 and `1` if /// \p strs1 `>` \p strs2. -/// \pre \p strs1` != NULL`. -/// \pre \p strs2` != NULL`. +/// \pre \p strs1 `!= NULL`. +/// \pre \p strs2 `!= NULL`. /// \internal /// /// #Safety @@ -242,7 +242,7 @@ pub unsafe extern "C" fn AMstrsCmp(strs1: *const AMstrs, strs2: *const AMstrs) - /// number of positions to advance. /// \return A UTF-8 string that's `NULL` when \p strs was previously advanced /// past its forward/reverse limit. -/// \pre \p strs` != NULL`. +/// \pre \p strs `!= NULL`. /// \internal /// /// #Safety @@ -267,7 +267,7 @@ pub unsafe extern "C" fn AMstrsNext(strs: *mut AMstrs, n: isize) -> *const c_cha /// number of positions to advance. /// \return A UTF-8 string that's `NULL` when \p strs is presently advanced /// past its forward/reverse limit. -/// \pre \p strs` != NULL`. +/// \pre \p strs `!= NULL`. /// \internal /// /// #Safety @@ -288,7 +288,7 @@ pub unsafe extern "C" fn AMstrsPrev(strs: *mut AMstrs, n: isize) -> *const c_cha /// /// \param[in] strs A pointer to an `AMstrs` struct. /// \return The count of values in \p strs. -/// \pre \p strs` != NULL`. +/// \pre \p strs `!= NULL`. /// \internal /// /// #Safety @@ -308,7 +308,7 @@ pub unsafe extern "C" fn AMstrsSize(strs: *const AMstrs) -> usize { /// /// \param[in] strs A pointer to an `AMstrs` struct. /// \return An `AMstrs` struct. -/// \pre \p strs` != NULL`. +/// \pre \p strs `!= NULL`. /// \internal /// /// #Safety @@ -328,7 +328,7 @@ pub unsafe extern "C" fn AMstrsReversed(strs: *const AMstrs) -> AMstrs { /// /// \param[in] strs A pointer to an `AMstrs` struct. /// \return An `AMstrs` struct -/// \pre \p strs` != NULL`. +/// \pre \p strs `!= NULL`. /// \internal /// /// #Safety From 4217019cbc2f4349f05ade39aa6986e3e8ab5440 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Sat, 6 Aug 2022 15:47:53 -0700 Subject: [PATCH 517/730] Expose `automerge::AutoCommit::get_all()` as `AMlistGetAll()` and `AMmapGetAll()`. Add symbolic last index specification to `AMlist{Delete,Get,Increment}()`. Add symbolic last index specification to `AMlistPut{Bool,Bytes,Counter, F64,Int,Null,Object,Str,Timestamp,Uint}()`. Prevent `doc::utils::to_str(NULL)` from segfaulting. Fix some documentation content bugs. Fix some documentation formatting bugs. --- automerge-c/src/doc/list.rs | 209 +++++++++++++++++++++++++---------- automerge-c/src/doc/map.rs | 107 ++++++++++++------ automerge-c/src/doc/utils.rs | 6 +- 3 files changed, 230 insertions(+), 92 deletions(-) diff --git a/automerge-c/src/doc/list.rs b/automerge-c/src/doc/list.rs index 15287ae0..a425d815 100644 --- a/automerge-c/src/doc/list.rs +++ b/automerge-c/src/doc/list.rs @@ -10,6 +10,18 @@ use crate::result::{to_result, AMresult}; pub mod item; pub mod items; +macro_rules! adjust { + ($index:expr, $insert:expr, $len:expr) => {{ + // An empty object can only be inserted into. + let insert = $insert || $len == 0; + let end = if insert { $len } else { $len - 1 }; + if $index > end && $index != usize::MAX { + return AMresult::err(&format!("Invalid index {}", $index)).into(); + } + (std::cmp::min($index, end), insert) + }}; +} + macro_rules! to_range { ($begin:expr, $end:expr) => {{ if $begin > $end { @@ -24,10 +36,11 @@ macro_rules! to_range { /// /// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] index An index in the list object identified by \p obj_id. +/// \param[in] index An index in the list object identified by \p obj_id or +/// `SIZE_MAX` to indicate its last index. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc` != NULL`. -/// \pre `0 <=` \p index` <= `length of the list object identified by \p obj_id. +/// \pre \p doc `!= NULL`. +/// \pre `0 <=` \p index `<= AMobjSize(`\p obj_id`)` or \p index `== SIZE_MAX`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -41,7 +54,9 @@ pub unsafe extern "C" fn AMlistDelete( index: usize, ) -> *mut AMresult { let doc = to_doc_mut!(doc); - to_result(doc.delete(to_obj_id!(obj_id), index)) + let obj_id = to_obj_id!(obj_id); + let (index, _) = adjust!(index, false, doc.length(obj_id)); + to_result(doc.delete(obj_id, index)) } /// \memberof AMdoc @@ -49,12 +64,13 @@ pub unsafe extern "C" fn AMlistDelete( /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] index An index within the list object identified by \p obj_id. +/// \param[in] index An index in the list object identified by \p obj_id or +/// `SIZE_MAX` to indicate its last index. /// \param[in] heads A pointer to an `AMchangeHashes` struct for a historical /// value or `NULL` for the current value. -/// \return A pointer to an `AMresult` struct. -/// \pre \p doc` != NULL`. -/// \pre `0 <=` \p index` <= `length of the list object identified by \p obj_id. +/// \return A pointer to an `AMresult` struct that doesn't contain a void. +/// \pre \p doc `!= NULL`. +/// \pre `0 <=` \p index `<= AMobjSize(`\p obj_id`)` or \p index `== SIZE_MAX`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -71,23 +87,61 @@ pub unsafe extern "C" fn AMlistGet( ) -> *mut AMresult { let doc = to_doc!(doc); let obj_id = to_obj_id!(obj_id); + let (index, _) = adjust!(index, false, doc.length(obj_id)); match heads.as_ref() { None => to_result(doc.get(obj_id, index)), Some(heads) => to_result(doc.get_at(obj_id, index, heads.as_ref())), } } +/// \memberof AMdoc +/// \brief Gets all of the historical values at an index in a list object until +/// its current one or a specific one. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \param[in] index An index in the list object identified by \p obj_id or +/// `SIZE_MAX` to indicate its last index. +/// \param[in] heads A pointer to an `AMchangeHashes` struct for a historical +/// last value or `NULL` for the current last value. +/// \return A pointer to an `AMresult` struct containing an `AMobjItems` struct. +/// \pre \p doc `!= NULL`. +/// \pre `0 <=` \p index `<= AMobjSize(`\p obj_id`)` or \p index `== SIZE_MAX`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. +/// \internal +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or NULL +/// heads must be a valid pointer to an AMchangeHashes or NULL +#[no_mangle] +pub unsafe extern "C" fn AMlistGetAll( + doc: *const AMdoc, + obj_id: *const AMobjId, + index: usize, + heads: *const AMchangeHashes, +) -> *mut AMresult { + let doc = to_doc!(doc); + let obj_id = to_obj_id!(obj_id); + let (index, _) = adjust!(index, false, doc.length(obj_id)); + match heads.as_ref() { + None => to_result(doc.get_all(obj_id, index)), + Some(heads) => to_result(doc.get_all_at(obj_id, index, heads.as_ref())), + } +} + /// \memberof AMdoc /// \brief Increments a counter at an index in a list object by the given /// value. /// /// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] index An index in the list object identified by \p obj_id. +/// \param[in] index An index in the list object identified by \p obj_id or +/// `SIZE_MAX` to indicate its last index. /// \param[in] value A 64-bit signed integer. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc` != NULL`. -/// \pre `0 <=` \p index` <= `length of the list object identified by \p obj_id. +/// \pre \p doc `!= NULL`. +/// \pre `0 <=` \p index `<= AMobjSize(`\p obj_id`)` or \p index `== SIZE_MAX`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -102,7 +156,9 @@ pub unsafe extern "C" fn AMlistIncrement( value: i64, ) -> *mut AMresult { let doc = to_doc_mut!(doc); - to_result(doc.increment(to_obj_id!(obj_id), index, value)) + let obj_id = to_obj_id!(obj_id); + let (index, _) = adjust!(index, false, doc.length(obj_id)); + to_result(doc.increment(obj_id, index, value)) } /// \memberof AMdoc @@ -110,12 +166,16 @@ pub unsafe extern "C" fn AMlistIncrement( /// /// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] index An index in the list object identified by \p obj_id. +/// \param[in] index An index in the list object identified by \p obj_id or +/// `SIZE_MAX` to indicate its last index if \p insert +/// `== false` or one past its last index if \p insert +/// `== true`. /// \param[in] insert A flag to insert \p value before \p index instead of /// writing \p value over \p index. /// \param[in] value A boolean. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc` != NULL`. +/// \pre \p doc `!= NULL`. +/// \pre `0 <=` \p index `<= AMobjSize(`\p obj_id`)` or \p index `== SIZE_MAX`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -132,6 +192,7 @@ pub unsafe extern "C" fn AMlistPutBool( ) -> *mut AMresult { let doc = to_doc_mut!(doc); let obj_id = to_obj_id!(obj_id); + let (index, insert) = adjust!(index, insert, doc.length(obj_id)); let value = am::ScalarValue::Boolean(value); to_result(if insert { doc.insert(obj_id, index, value) @@ -145,16 +206,19 @@ pub unsafe extern "C" fn AMlistPutBool( /// /// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] index An index in the list object identified by \p obj_id. +/// \param[in] index An index in the list object identified by \p obj_id or +/// `SIZE_MAX` to indicate its last index if \p insert +/// `== false` or one past its last index if \p insert +/// `== true`. /// \param[in] insert A flag to insert \p src before \p index instead of /// writing \p src over \p index. /// \param[in] src A pointer to an array of bytes. /// \param[in] count The number of bytes to copy from \p src. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc` != NULL`. -/// \pre `0 <=` \p index` <= `length of the list object identified by \p obj_id. -/// \pre \p src` != NULL`. -/// \pre `0 <=` \p count` <= `size of \p src. +/// \pre \p doc `!= NULL`. +/// \pre `0 <=` \p index `<= AMobjSize(`\p obj_id`)` or \p index `== SIZE_MAX`. +/// \pre \p src `!= NULL`. +/// \pre `0 <` \p count `<= sizeof(`\p src`)`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -173,12 +237,13 @@ pub unsafe extern "C" fn AMlistPutBytes( ) -> *mut AMresult { let doc = to_doc_mut!(doc); let obj_id = to_obj_id!(obj_id); - let mut vec = Vec::new(); - vec.extend_from_slice(std::slice::from_raw_parts(src, count)); + let (index, insert) = adjust!(index, insert, doc.length(obj_id)); + let mut value = Vec::new(); + value.extend_from_slice(std::slice::from_raw_parts(src, count)); to_result(if insert { - doc.insert(obj_id, index, vec) + doc.insert(obj_id, index, value) } else { - doc.put(obj_id, index, vec) + doc.put(obj_id, index, value) }) } @@ -187,13 +252,16 @@ pub unsafe extern "C" fn AMlistPutBytes( /// /// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] index An index in the list object identified by \p obj_id. +/// \param[in] index An index in the list object identified by \p obj_id or +/// `SIZE_MAX` to indicate its last index if \p insert +/// `== false` or one past its last index if \p insert +/// `== true`. /// \param[in] insert A flag to insert \p value before \p index instead of /// writing \p value over \p index. /// \param[in] value A 64-bit signed integer. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc` != NULL`. -/// \pre `0 <=` \p index` <= `length of the list object identified by \p obj_id. +/// \pre \p doc `!= NULL`. +/// \pre `0 <=` \p index `<= AMobjSize(`\p obj_id`)` or \p index `== SIZE_MAX`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -210,6 +278,7 @@ pub unsafe extern "C" fn AMlistPutCounter( ) -> *mut AMresult { let doc = to_doc_mut!(doc); let obj_id = to_obj_id!(obj_id); + let (index, insert) = adjust!(index, insert, doc.length(obj_id)); let value = am::ScalarValue::Counter(value.into()); to_result(if insert { doc.insert(obj_id, index, value) @@ -223,13 +292,16 @@ pub unsafe extern "C" fn AMlistPutCounter( /// /// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] index An index in the list object identified by \p obj_id. +/// \param[in] index An index in the list object identified by \p obj_id or +/// `SIZE_MAX` to indicate its last index if \p insert +/// `== false` or one past its last index if \p insert +/// `== true`. /// \param[in] insert A flag to insert \p value before \p index instead of /// writing \p value over \p index. /// \param[in] value A 64-bit float. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc` != NULL`. -/// \pre `0 <=` \p index` <= `length of the list object identified by \p obj_id. +/// \pre \p doc `!= NULL`. +/// \pre `0 <=` \p index `<= AMobjSize(`\p obj_id`)` or \p index `== SIZE_MAX`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -246,6 +318,7 @@ pub unsafe extern "C" fn AMlistPutF64( ) -> *mut AMresult { let doc = to_doc_mut!(doc); let obj_id = to_obj_id!(obj_id); + let (index, insert) = adjust!(index, insert, doc.length(obj_id)); to_result(if insert { doc.insert(obj_id, index, value) } else { @@ -258,13 +331,16 @@ pub unsafe extern "C" fn AMlistPutF64( /// /// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] index An index in the list object identified by \p obj_id. +/// \param[in] index An index in the list object identified by \p obj_id or +/// `SIZE_MAX` to indicate its last index if \p insert +/// `== false` or one past its last index if \p insert +/// `== true`. /// \param[in] insert A flag to insert \p value before \p index instead of /// writing \p value over \p index. /// \param[in] value A 64-bit signed integer. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc` != NULL`. -/// \pre `0 <=` \p index` <= `length of the list object identified by \p obj_id. +/// \pre \p doc `!= NULL`. +/// \pre `0 <=` \p index `<= AMobjSize(`\p obj_id`)` or \p index `== SIZE_MAX`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -281,6 +357,7 @@ pub unsafe extern "C" fn AMlistPutInt( ) -> *mut AMresult { let doc = to_doc_mut!(doc); let obj_id = to_obj_id!(obj_id); + let (index, insert) = adjust!(index, insert, doc.length(obj_id)); to_result(if insert { doc.insert(obj_id, index, value) } else { @@ -293,12 +370,15 @@ pub unsafe extern "C" fn AMlistPutInt( /// /// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] index An index in the list object identified by \p obj_id. +/// \param[in] index An index in the list object identified by \p obj_id or +/// `SIZE_MAX` to indicate its last index if \p insert +/// `== false` or one past its last index if \p insert +/// `== true`. /// \param[in] insert A flag to insert \p value before \p index instead of /// writing \p value over \p index. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc` != NULL`. -/// \pre `0 <=` \p index` <= `length of the list object identified by \p obj_id. +/// \pre \p doc `!= NULL`. +/// \pre `0 <=` \p index `<= AMobjSize(`\p obj_id`)` or \p index `== SIZE_MAX`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -314,11 +394,11 @@ pub unsafe extern "C" fn AMlistPutNull( ) -> *mut AMresult { let doc = to_doc_mut!(doc); let obj_id = to_obj_id!(obj_id); - let value = (); + let (index, insert) = adjust!(index, insert, doc.length(obj_id)); to_result(if insert { - doc.insert(obj_id, index, value) + doc.insert(obj_id, index, ()) } else { - doc.put(obj_id, index, value) + doc.put(obj_id, index, ()) }) } @@ -327,14 +407,17 @@ pub unsafe extern "C" fn AMlistPutNull( /// /// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] index An index in the list object identified by \p obj_id. +/// \param[in] index An index in the list object identified by \p obj_id or +/// `SIZE_MAX` to indicate its last index if \p insert +/// `== false` or one past its last index if \p insert +/// `== true`. /// \param[in] insert A flag to insert \p value before \p index instead of /// writing \p value over \p index. /// \param[in] obj_type An `AMobjIdType` enum tag. /// \return A pointer to an `AMresult` struct containing a pointer to an /// `AMobjId` struct. -/// \pre \p doc` != NULL`. -/// \pre `0 <=` \p index` <= `length of the list object identified by \p obj_id. +/// \pre \p doc `!= NULL`. +/// \pre `0 <=` \p index `<= AMobjSize(`\p obj_id`)` or \p index `== SIZE_MAX`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -351,11 +434,12 @@ pub unsafe extern "C" fn AMlistPutObject( ) -> *mut AMresult { let doc = to_doc_mut!(doc); let obj_id = to_obj_id!(obj_id); - let value = obj_type.into(); + let (index, insert) = adjust!(index, insert, doc.length(obj_id)); + let object = obj_type.into(); to_result(if insert { - doc.insert_object(obj_id, index, value) + doc.insert_object(obj_id, index, object) } else { - doc.put_object(&obj_id, index, value) + doc.put_object(obj_id, index, object) }) } @@ -364,14 +448,17 @@ pub unsafe extern "C" fn AMlistPutObject( /// /// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] index An index in the list object identified by \p obj_id. +/// \param[in] index An index in the list object identified by \p obj_id or +/// `SIZE_MAX` to indicate its last index if \p insert +/// `== false` or one past its last index if \p insert +/// `== true`. /// \param[in] insert A flag to insert \p value before \p index instead of /// writing \p value over \p index. /// \param[in] value A UTF-8 string. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc` != NULL`. -/// \pre `0 <=` \p index` <= `length of the list object identified by \p obj_id. -/// \pre \p value` != NULL`. +/// \pre \p doc `!= NULL`. +/// \pre `0 <=` \p index `<= AMobjSize(`\p obj_id`)` or \p index `== SIZE_MAX`. +/// \pre \p value `!= NULL`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -389,6 +476,7 @@ pub unsafe extern "C" fn AMlistPutStr( ) -> *mut AMresult { let doc = to_doc_mut!(doc); let obj_id = to_obj_id!(obj_id); + let (index, insert) = adjust!(index, insert, doc.length(obj_id)); let value = to_str(value); to_result(if insert { doc.insert(obj_id, index, value) @@ -402,13 +490,16 @@ pub unsafe extern "C" fn AMlistPutStr( /// /// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] index An index in the list object identified by \p obj_id. +/// \param[in] index An index in the list object identified by \p obj_id or +/// `SIZE_MAX` to indicate its last index if \p insert +/// `== false` or one past its last index if \p insert +/// `== true`. /// \param[in] insert A flag to insert \p value before \p index instead of /// writing \p value over \p index. /// \param[in] value A 64-bit signed integer. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc` != NULL`. -/// \pre `0 <=` \p index` <= `length of the list object identified by \p obj_id. +/// \pre \p doc `!= NULL`. +/// \pre `0 <=` \p index `<= AMobjSize(`\p obj_id`)` or \p index `== SIZE_MAX`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -425,6 +516,7 @@ pub unsafe extern "C" fn AMlistPutTimestamp( ) -> *mut AMresult { let doc = to_doc_mut!(doc); let obj_id = to_obj_id!(obj_id); + let (index, insert) = adjust!(index, insert, doc.length(obj_id)); let value = am::ScalarValue::Timestamp(value); to_result(if insert { doc.insert(obj_id, index, value) @@ -438,13 +530,16 @@ pub unsafe extern "C" fn AMlistPutTimestamp( /// /// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] index An index in the list object identified by \p obj_id. +/// \param[in] index An index in the list object identified by \p obj_id or +/// `SIZE_MAX` to indicate its last index if \p insert +/// `== false` or one past its last index if \p insert +/// `== true`. /// \param[in] insert A flag to insert \p value before \p index instead of /// writing \p value over \p index. /// \param[in] value A 64-bit unsigned integer. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc` != NULL`. -/// \pre `0 <=` \p index` <= `length of the list object identified by \p obj_id. +/// \pre \p doc `!= NULL`. +/// \pre `0 <=` \p index `<= AMobjSize(`\p obj_id`)` or \p index `== SIZE_MAX`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -461,6 +556,7 @@ pub unsafe extern "C" fn AMlistPutUint( ) -> *mut AMresult { let doc = to_doc_mut!(doc); let obj_id = to_obj_id!(obj_id); + let (index, insert) = adjust!(index, insert, doc.length(obj_id)); to_result(if insert { doc.insert(obj_id, index, value) } else { @@ -481,9 +577,8 @@ pub unsafe extern "C" fn AMlistPutUint( /// values. /// \return A pointer to an `AMresult` struct containing an `AMlistItems` /// struct. -/// \pre \p doc` != NULL`. -/// \pre \p begin` <= `\p end. -/// \pre \p end` <= SIZE_MAX`. +/// \pre \p doc `!= NULL`. +/// \pre \p begin `<=` \p end `<= SIZE_MAX`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal diff --git a/automerge-c/src/doc/map.rs b/automerge-c/src/doc/map.rs index 89ba688e..1ab93138 100644 --- a/automerge-c/src/doc/map.rs +++ b/automerge-c/src/doc/map.rs @@ -18,8 +18,8 @@ pub mod items; /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc` != NULL`. -/// \pre \p key` != NULL`. +/// \pre \p doc `!= NULL`. +/// \pre \p key `!= NULL`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -46,9 +46,9 @@ pub unsafe extern "C" fn AMmapDelete( /// \p obj_id. /// \param[in] heads A pointer to an `AMchangeHashes` struct for a historical /// value or `NULL` for the current value. -/// \return A pointer to an `AMresult` struct. -/// \pre \p doc` != NULL`. -/// \pre \p key` != NULL`. +/// \return A pointer to an `AMresult` struct that doesn't contain a void. +/// \pre \p doc `!= NULL`. +/// \pre \p key `!= NULL`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -56,6 +56,7 @@ pub unsafe extern "C" fn AMmapDelete( /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or NULL /// key must be a c string of the map key to be used +/// heads must be a valid pointer to an AMchangeHashes or NULL #[no_mangle] pub unsafe extern "C" fn AMmapGet( doc: *const AMdoc, @@ -71,6 +72,42 @@ pub unsafe extern "C" fn AMmapGet( } } +/// \memberof AMdoc +/// \brief Gets all of the historical values for a key in a map object until +/// its current one or a specific one. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \param[in] key A UTF-8 string key for the map object identified by +/// \p obj_id. +/// \param[in] heads A pointer to an `AMchangeHashes` struct for a historical +/// last value or `NULL` for the current last value. +/// \return A pointer to an `AMresult` struct containing an `AMobjItems` struct. +/// \pre \p doc `!= NULL`. +/// \pre \p key `!= NULL`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. +/// \internal +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or NULL +/// key must be a c string of the map key to be used +/// heads must be a valid pointer to an AMchangeHashes or NULL +#[no_mangle] +pub unsafe extern "C" fn AMmapGetAll( + doc: *const AMdoc, + obj_id: *const AMobjId, + key: *const c_char, + heads: *const AMchangeHashes, +) -> *mut AMresult { + let doc = to_doc!(doc); + let obj_id = to_obj_id!(obj_id); + match heads.as_ref() { + None => to_result(doc.get_all(obj_id, to_str(key))), + Some(heads) => to_result(doc.get_all_at(obj_id, to_str(key), heads.as_ref())), + } +} + /// \memberof AMdoc /// \brief Increments a counter for a key in a map object by the given value. /// @@ -79,8 +116,8 @@ pub unsafe extern "C" fn AMmapGet( /// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \param[in] value A 64-bit signed integer. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc` != NULL`. -/// \pre \p key` != NULL`. +/// \pre \p doc `!= NULL`. +/// \pre \p key `!= NULL`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -107,8 +144,8 @@ pub unsafe extern "C" fn AMmapIncrement( /// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \param[in] value A boolean. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc` != NULL`. -/// \pre \p key` != NULL`. +/// \pre \p doc `!= NULL`. +/// \pre \p key `!= NULL`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -136,10 +173,10 @@ pub unsafe extern "C" fn AMmapPutBool( /// \param[in] src A pointer to an array of bytes. /// \param[in] count The number of bytes to copy from \p src. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc` != NULL`. -/// \pre \p key` != NULL`. -/// \pre \p src` != NULL`. -/// \pre `0 <=` \p count` <= `size of \p src. +/// \pre \p doc `!= NULL`. +/// \pre \p key `!= NULL`. +/// \pre \p src `!= NULL`. +/// \pre `0 <` \p count `<= sizeof(`\p src`)`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -170,8 +207,8 @@ pub unsafe extern "C" fn AMmapPutBytes( /// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \param[in] value A 64-bit signed integer. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc` != NULL`. -/// \pre \p key` != NULL`. +/// \pre \p doc `!= NULL`. +/// \pre \p key `!= NULL`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -201,8 +238,8 @@ pub unsafe extern "C" fn AMmapPutCounter( /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc` != NULL`. -/// \pre \p key` != NULL`. +/// \pre \p doc `!= NULL`. +/// \pre \p key `!= NULL`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -229,8 +266,8 @@ pub unsafe extern "C" fn AMmapPutNull( /// \param[in] obj_type An `AMobjIdType` enum tag. /// \return A pointer to an `AMresult` struct containing a pointer to an /// `AMobjId` struct. -/// \pre \p doc` != NULL`. -/// \pre \p key` != NULL`. +/// \pre \p doc `!= NULL`. +/// \pre \p key `!= NULL`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -257,8 +294,8 @@ pub unsafe extern "C" fn AMmapPutObject( /// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \param[in] value A 64-bit float. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc` != NULL`. -/// \pre \p key` != NULL`. +/// \pre \p doc `!= NULL`. +/// \pre \p key `!= NULL`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -285,8 +322,8 @@ pub unsafe extern "C" fn AMmapPutF64( /// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \param[in] value A 64-bit signed integer. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc` != NULL`. -/// \pre \p key` != NULL`. +/// \pre \p doc `!= NULL`. +/// \pre \p key `!= NULL`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -313,9 +350,9 @@ pub unsafe extern "C" fn AMmapPutInt( /// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \param[in] value A UTF-8 string. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc` != NULL`. -/// \pre \p key` != NULL`. -/// \pre \p value` != NULL`. +/// \pre \p doc `!= NULL`. +/// \pre \p key `!= NULL`. +/// \pre \p value `!= NULL`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -343,8 +380,8 @@ pub unsafe extern "C" fn AMmapPutStr( /// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \param[in] value A 64-bit signed integer. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc` != NULL`. -/// \pre \p key` != NULL`. +/// \pre \p doc `!= NULL`. +/// \pre \p key `!= NULL`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -375,8 +412,8 @@ pub unsafe extern "C" fn AMmapPutTimestamp( /// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. /// \param[in] value A 64-bit unsigned integer. /// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc` != NULL`. -/// \pre \p key` != NULL`. +/// \pre \p doc `!= NULL`. +/// \pre \p key `!= NULL`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -401,14 +438,16 @@ pub unsafe extern "C" fn AMmapPutUint( /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] begin The first key in a range of keys or `NULL`. -/// \param[in] end One past the last key in a range of keys or `NULL`. +/// \param[in] begin The first key in a subrange or `NULL` to indicate the +/// absolute first key. +/// \param[in] end The key one past the last key in a subrange or `NULL` to +/// indicate one past the absolute last key. /// \param[in] heads A pointer to an `AMchangeHashes` struct for historical /// keys and values or `NULL` for current keys and values. /// \return A pointer to an `AMresult` struct containing an `AMmapItems` /// struct. -/// \pre \p doc` != NULL`. -/// \pre \p begin` <= `\p end if \p end` != NULL`. +/// \pre \p doc `!= NULL`. +/// \pre `strcmp(`\p begin, \p end`) != 1` if \p begin `!= NULL` and \p end `!= NULL`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal diff --git a/automerge-c/src/doc/utils.rs b/automerge-c/src/doc/utils.rs index 42a69b56..b3a975e5 100644 --- a/automerge-c/src/doc/utils.rs +++ b/automerge-c/src/doc/utils.rs @@ -49,5 +49,9 @@ macro_rules! to_obj_id { pub(crate) use to_obj_id; pub(crate) unsafe fn to_str(c: *const c_char) -> String { - CStr::from_ptr(c).to_string_lossy().to_string() + if !c.is_null() { + CStr::from_ptr(c).to_string_lossy().to_string() + } else { + String::default() + } } From d48e36627289568a239dc7f086de5af1def2b34f Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Sat, 6 Aug 2022 15:56:21 -0700 Subject: [PATCH 518/730] Fix some documentation content bugs. Fix some documentation formatting bugs. --- automerge-c/src/actor_id.rs | 12 +++++------ automerge-c/src/change.rs | 34 +++++++++++++++---------------- automerge-c/src/change_hashes.rs | 24 +++++++++++----------- automerge-c/src/changes.rs | 18 ++++++++-------- automerge-c/src/doc/list/item.rs | 8 ++++---- automerge-c/src/doc/list/items.rs | 18 ++++++++-------- automerge-c/src/doc/map/item.rs | 8 ++++---- automerge-c/src/doc/map/items.rs | 18 ++++++++-------- automerge-c/src/obj.rs | 12 +++++------ automerge-c/src/obj/item.rs | 6 +++--- automerge-c/src/obj/items.rs | 18 ++++++++-------- automerge-c/src/result_stack.rs | 10 ++++----- automerge-c/src/sync/have.rs | 2 +- automerge-c/src/sync/haves.rs | 18 ++++++++-------- automerge-c/src/sync/message.rs | 14 ++++++------- automerge-c/src/sync/state.rs | 28 ++++++++++++------------- 16 files changed, 124 insertions(+), 124 deletions(-) diff --git a/automerge-c/src/actor_id.rs b/automerge-c/src/actor_id.rs index 4b3dbf00..45d66fbe 100644 --- a/automerge-c/src/actor_id.rs +++ b/automerge-c/src/actor_id.rs @@ -46,7 +46,7 @@ impl AsRef for AMactorId { /// \brief Gets the value of an actor identifier as a sequence of bytes. /// /// \param[in] actor_id A pointer to an `AMactorId` struct. -/// \pre \p actor_id` != NULL`. +/// \pre \p actor_id `!= NULL`. /// \return An `AMbyteSpan` struct. /// \internal /// @@ -66,10 +66,10 @@ pub unsafe extern "C" fn AMactorIdBytes(actor_id: *const AMactorId) -> AMbyteSpa /// \param[in] actor_id1 A pointer to an `AMactorId` struct. /// \param[in] actor_id2 A pointer to an `AMactorId` struct. /// \return `-1` if \p actor_id1 `<` \p actor_id2, `0` if -/// \p actor_id1` == `\p actor_id2 and `1` if +/// \p actor_id1 `==` \p actor_id2 and `1` if /// \p actor_id1 `>` \p actor_id2. -/// \pre \p actor_id1` != NULL`. -/// \pre \p actor_id2` != NULL`. +/// \pre \p actor_id1 `!= NULL`. +/// \pre \p actor_id2 `!= NULL`. /// \internal /// /// #Safety @@ -111,7 +111,7 @@ pub unsafe extern "C" fn AMactorIdInit() -> *mut AMresult { /// /// \param[in] src A pointer to a contiguous sequence of bytes. /// \param[in] count The number of bytes to copy from \p src. -/// \pre `0 <=` \p count` <= `size of \p src. +/// \pre `0 <` \p count `<= sizeof(`\p src`)`. /// \return A pointer to an `AMresult` struct containing a pointer to an /// `AMactorId` struct. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` @@ -150,7 +150,7 @@ pub unsafe extern "C" fn AMactorIdInitStr(hex_str: *const c_char) -> *mut AMresu /// \brief Gets the value of an actor identifier as a hexadecimal string. /// /// \param[in] actor_id A pointer to an `AMactorId` struct. -/// \pre \p actor_id` != NULL`. +/// \pre \p actor_id `!= NULL`. /// \return A UTF-8 string. /// \internal /// diff --git a/automerge-c/src/change.rs b/automerge-c/src/change.rs index 8c726a3b..a7e9f5c5 100644 --- a/automerge-c/src/change.rs +++ b/automerge-c/src/change.rs @@ -65,7 +65,7 @@ impl AsRef for AMchange { /// \brief Gets the first referenced actor identifier in a change. /// /// \param[in] change A pointer to an `AMchange` struct. -/// \pre \p change` != NULL`. +/// \pre \p change `!= NULL`. /// \return A pointer to an `AMresult` struct containing a pointer to an /// `AMactorId` struct. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` @@ -85,7 +85,7 @@ pub unsafe extern "C" fn AMchangeActorId(change: *const AMchange) -> *mut AMresu /// \brief Compresses the raw bytes of a change. /// /// \param[in,out] change A pointer to an `AMchange` struct. -/// \pre \p change` != NULL`. +/// \pre \p change `!= NULL`. /// \internal /// /// # Safety @@ -102,7 +102,7 @@ pub unsafe extern "C" fn AMchangeCompress(change: *mut AMchange) { /// /// \param[in] change A pointer to an `AMchange` struct. /// \return A pointer to an `AMchangeHashes` struct or `NULL`. -/// \pre \p change` != NULL`. +/// \pre \p change `!= NULL`. /// \internal /// /// # Safety @@ -120,7 +120,7 @@ pub unsafe extern "C" fn AMchangeDeps(change: *const AMchange) -> AMchangeHashes /// /// \param[in] change A pointer to an `AMchange` struct. /// \return An `AMbyteSpan` struct. -/// \pre \p change` != NULL`. +/// \pre \p change `!= NULL`. /// \internal /// /// # Safety @@ -140,8 +140,8 @@ pub unsafe extern "C" fn AMchangeExtraBytes(change: *const AMchange) -> AMbyteSp /// \param[in] src A pointer to an array of bytes. /// \param[in] count The number of bytes in \p src to load. /// \return A pointer to an `AMresult` struct containing an `AMchange` struct. -/// \pre \p src` != NULL`. -/// \pre `0 <=` \p count` <= `size of \p src. +/// \pre \p src `!= NULL`. +/// \pre `0 <` \p count `<= sizeof(`\p src`)`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -159,7 +159,7 @@ pub unsafe extern "C" fn AMchangeFromBytes(src: *const u8, count: usize) -> *mut /// /// \param[in] change A pointer to an `AMchange` struct. /// \return A change hash as an `AMbyteSpan` struct. -/// \pre \p change` != NULL`. +/// \pre \p change `!= NULL`. /// \internal /// /// # Safety @@ -180,7 +180,7 @@ pub unsafe extern "C" fn AMchangeHash(change: *const AMchange) -> AMbyteSpan { /// /// \param[in] change A pointer to an `AMchange` struct. /// \return A boolean. -/// \pre \p change` != NULL`. +/// \pre \p change `!= NULL`. /// \internal /// /// # Safety @@ -199,7 +199,7 @@ pub unsafe extern "C" fn AMchangeIsEmpty(change: *const AMchange) -> bool { /// /// \param[in] change A pointer to an `AMchange` struct. /// \return A 64-bit unsigned integer. -/// \pre \p change` != NULL`. +/// \pre \p change `!= NULL`. /// \internal /// /// # Safety @@ -218,7 +218,7 @@ pub unsafe extern "C" fn AMchangeMaxOp(change: *const AMchange) -> u64 { /// /// \param[in] change A pointer to an `AMchange` struct. /// \return A UTF-8 string or `NULL`. -/// \pre \p change` != NULL`. +/// \pre \p change `!= NULL`. /// \internal /// /// # Safety @@ -236,7 +236,7 @@ pub unsafe extern "C" fn AMchangeMessage(change: *const AMchange) -> *const c_ch /// /// \param[in] change A pointer to an `AMchange` struct. /// \return A 64-bit unsigned integer. -/// \pre \p change` != NULL`. +/// \pre \p change `!= NULL`. /// \internal /// /// # Safety @@ -255,7 +255,7 @@ pub unsafe extern "C" fn AMchangeSeq(change: *const AMchange) -> u64 { /// /// \param[in] change A pointer to an `AMchange` struct. /// \return A 64-bit unsigned integer. -/// \pre \p change` != NULL`. +/// \pre \p change `!= NULL`. /// \internal /// /// # Safety @@ -274,7 +274,7 @@ pub unsafe extern "C" fn AMchangeSize(change: *const AMchange) -> usize { /// /// \param[in] change A pointer to an `AMchange` struct. /// \return A 64-bit unsigned integer. -/// \pre \p change` != NULL`. +/// \pre \p change `!= NULL`. /// \internal /// /// # Safety @@ -293,7 +293,7 @@ pub unsafe extern "C" fn AMchangeStartOp(change: *const AMchange) -> u64 { /// /// \param[in] change A pointer to an `AMchange` struct. /// \return A 64-bit signed integer. -/// \pre \p change` != NULL`. +/// \pre \p change `!= NULL`. /// \internal /// /// # Safety @@ -312,7 +312,7 @@ pub unsafe extern "C" fn AMchangeTime(change: *const AMchange) -> i64 { /// /// \param[in] change A pointer to an `AMchange` struct. /// \return An `AMbyteSpan` struct. -/// \pre \p change` != NULL`. +/// \pre \p change `!= NULL`. /// \internal /// /// # Safety @@ -333,8 +333,8 @@ pub unsafe extern "C" fn AMchangeRawBytes(change: *const AMchange) -> AMbyteSpan /// \param[in] count The number of bytes in \p src to load. /// \return A pointer to an `AMresult` struct containing a sequence of /// `AMchange` structs. -/// \pre \p src` != NULL`. -/// \pre `0 <=` \p count` <= `size of \p src. +/// \pre \p src `!= NULL`. +/// \pre `0 <` \p count `<= sizeof(`\p src`)`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal diff --git a/automerge-c/src/change_hashes.rs b/automerge-c/src/change_hashes.rs index 5f5be108..007e6c4c 100644 --- a/automerge-c/src/change_hashes.rs +++ b/automerge-c/src/change_hashes.rs @@ -193,7 +193,7 @@ impl Default for AMchangeHashes { /// \param[in,out] change_hashes A pointer to an `AMchangeHashes` struct. /// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum /// number of positions to advance. -/// \pre \p change_hashes` != NULL`. +/// \pre \p change_hashes `!= NULL`. /// \internal /// /// #Safety @@ -212,10 +212,10 @@ pub unsafe extern "C" fn AMchangeHashesAdvance(change_hashes: *mut AMchangeHashe /// \param[in] change_hashes1 A pointer to an `AMchangeHashes` struct. /// \param[in] change_hashes2 A pointer to an `AMchangeHashes` struct. /// \return `-1` if \p change_hashes1 `<` \p change_hashes2, `0` if -/// \p change_hashes1` == `\p change_hashes2 and `1` if +/// \p change_hashes1 `==` \p change_hashes2 and `1` if /// \p change_hashes1 `>` \p change_hashes2. -/// \pre \p change_hashes1` != NULL`. -/// \pre \p change_hashes2` != NULL`. +/// \pre \p change_hashes1 `!= NULL`. +/// \pre \p change_hashes2 `!= NULL`. /// \internal /// /// #Safety @@ -240,7 +240,7 @@ pub unsafe extern "C" fn AMchangeHashesCmp( } } -/// \memberof AMchangeHashesInit +/// \memberof AMchangeHashes /// \brief Allocates an iterator over a sequence of change hashes and /// initializes it from a sequence of byte spans. /// @@ -248,8 +248,8 @@ pub unsafe extern "C" fn AMchangeHashesCmp( /// \param[in] count The number of `AMbyteSpan` structs to copy from \p src. /// \return A pointer to an `AMresult` struct containing an `AMchangeHashes` /// struct. -/// \pre \p src` != NULL`. -/// \pre `0 <=` \p count` <= `size of \p src. +/// \pre \p src `!= NULL`. +/// \pre `0 <` \p count `<= sizeof(`\p src`) / sizeof(AMbyteSpan)`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -286,7 +286,7 @@ pub unsafe extern "C" fn AMchangeHashesInit(src: *const AMbyteSpan, count: usize /// number of positions to advance. /// \return An `AMbyteSpan` struct with `.src == NULL` when \p change_hashes /// was previously advanced past its forward/reverse limit. -/// \pre \p change_hashes` != NULL`. +/// \pre \p change_hashes `!= NULL`. /// \internal /// /// #Safety @@ -315,7 +315,7 @@ pub unsafe extern "C" fn AMchangeHashesNext( /// number of positions to advance. /// \return An `AMbyteSpan` struct with `.src == NULL` when \p change_hashes is /// presently advanced past its forward/reverse limit. -/// \pre \p change_hashes` != NULL`. +/// \pre \p change_hashes `!= NULL`. /// \internal /// /// #Safety @@ -339,7 +339,7 @@ pub unsafe extern "C" fn AMchangeHashesPrev( /// /// \param[in] change_hashes A pointer to an `AMchangeHashes` struct. /// \return The count of values in \p change_hashes. -/// \pre \p change_hashes` != NULL`. +/// \pre \p change_hashes `!= NULL`. /// \internal /// /// #Safety @@ -359,7 +359,7 @@ pub unsafe extern "C" fn AMchangeHashesSize(change_hashes: *const AMchangeHashes /// /// \param[in] change_hashes A pointer to an `AMchangeHashes` struct. /// \return An `AMchangeHashes` struct -/// \pre \p change_hashes` != NULL`. +/// \pre \p change_hashes `!= NULL`. /// \internal /// /// #Safety @@ -381,7 +381,7 @@ pub unsafe extern "C" fn AMchangeHashesReversed( /// /// \param[in] change_hashes A pointer to an `AMchangeHashes` struct. /// \return An `AMchangeHashes` struct -/// \pre \p change_hashes` != NULL`. +/// \pre \p change_hashes `!= NULL`. /// \internal /// /// #Safety diff --git a/automerge-c/src/changes.rs b/automerge-c/src/changes.rs index 45b654eb..4d9df36b 100644 --- a/automerge-c/src/changes.rs +++ b/automerge-c/src/changes.rs @@ -216,7 +216,7 @@ impl Default for AMchanges { /// \param[in,out] changes A pointer to an `AMchanges` struct. /// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum /// number of positions to advance. -/// \pre \p changes` != NULL`. +/// \pre \p changes `!= NULL`. /// \internal /// /// #Safety @@ -234,9 +234,9 @@ pub unsafe extern "C" fn AMchangesAdvance(changes: *mut AMchanges, n: isize) { /// /// \param[in] changes1 A pointer to an `AMchanges` struct. /// \param[in] changes2 A pointer to an `AMchanges` struct. -/// \return `true` if \p changes1` == `\p changes2 and `false` otherwise. -/// \pre \p changes1` != NULL`. -/// \pre \p changes2` != NULL`. +/// \return `true` if \p changes1 `==` \p changes2 and `false` otherwise. +/// \pre \p changes1 `!= NULL`. +/// \pre \p changes2 `!= NULL`. /// \internal /// /// #Safety @@ -263,7 +263,7 @@ pub unsafe extern "C" fn AMchangesEqual( /// number of positions to advance. /// \return A pointer to an `AMchange` struct that's `NULL` when \p changes was /// previously advanced past its forward/reverse limit. -/// \pre \p changes` != NULL`. +/// \pre \p changes `!= NULL`. /// \internal /// /// #Safety @@ -288,7 +288,7 @@ pub unsafe extern "C" fn AMchangesNext(changes: *mut AMchanges, n: isize) -> *co /// number of positions to advance. /// \return A pointer to an `AMchange` struct that's `NULL` when \p changes is /// presently advanced past its forward/reverse limit. -/// \pre \p changes` != NULL`. +/// \pre \p changes `!= NULL`. /// \internal /// /// #Safety @@ -308,7 +308,7 @@ pub unsafe extern "C" fn AMchangesPrev(changes: *mut AMchanges, n: isize) -> *co /// /// \param[in] changes A pointer to an `AMchanges` struct. /// \return The count of values in \p changes. -/// \pre \p changes` != NULL`. +/// \pre \p changes `!= NULL`. /// \internal /// /// #Safety @@ -328,7 +328,7 @@ pub unsafe extern "C" fn AMchangesSize(changes: *const AMchanges) -> usize { /// /// \param[in] changes A pointer to an `AMchanges` struct. /// \return An `AMchanges` struct. -/// \pre \p changes` != NULL`. +/// \pre \p changes `!= NULL`. /// \internal /// /// #Safety @@ -348,7 +348,7 @@ pub unsafe extern "C" fn AMchangesReversed(changes: *const AMchanges) -> AMchang /// /// \param[in] changes A pointer to an `AMchanges` struct. /// \return An `AMchanges` struct -/// \pre \p changes` != NULL`. +/// \pre \p changes `!= NULL`. /// \internal /// /// #Safety diff --git a/automerge-c/src/doc/list/item.rs b/automerge-c/src/doc/list/item.rs index ac352620..31b97e1d 100644 --- a/automerge-c/src/doc/list/item.rs +++ b/automerge-c/src/doc/list/item.rs @@ -5,7 +5,7 @@ use std::ffi::CString; use crate::obj::AMobjId; use crate::result::AMvalue; -/// \enum AMlistItem +/// \struct AMlistItem /// \brief An item in a list object. #[repr(C)] pub struct AMlistItem { @@ -46,7 +46,7 @@ impl From<&AMlistItem> for (usize, am::Value<'static>, am::ObjId) { /// /// \param[in] list_item A pointer to an `AMlistItem` struct. /// \return A 64-bit unsigned integer. -/// \pre \p list_item` != NULL`. +/// \pre \p list_item `!= NULL`. /// \internal /// /// # Safety @@ -65,7 +65,7 @@ pub unsafe extern "C" fn AMlistItemIndex(list_item: *const AMlistItem) -> usize /// /// \param[in] list_item A pointer to an `AMlistItem` struct. /// \return A pointer to an `AMobjId` struct. -/// \pre \p list_item` != NULL`. +/// \pre \p list_item `!= NULL`. /// \internal /// /// # Safety @@ -84,7 +84,7 @@ pub unsafe extern "C" fn AMlistItemObjId(list_item: *const AMlistItem) -> *const /// /// \param[in] list_item A pointer to an `AMlistItem` struct. /// \return An `AMvalue` struct. -/// \pre \p list_item` != NULL`. +/// \pre \p list_item `!= NULL`. /// \internal /// /// # Safety diff --git a/automerge-c/src/doc/list/items.rs b/automerge-c/src/doc/list/items.rs index ef6aa45e..7c596f93 100644 --- a/automerge-c/src/doc/list/items.rs +++ b/automerge-c/src/doc/list/items.rs @@ -190,7 +190,7 @@ impl Default for AMlistItems { /// \param[in,out] list_items A pointer to an `AMlistItems` struct. /// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum /// number of positions to advance. -/// \pre \p list_items` != NULL`. +/// \pre \p list_items `!= NULL`. /// \internal /// /// #Safety @@ -208,9 +208,9 @@ pub unsafe extern "C" fn AMlistItemsAdvance(list_items: *mut AMlistItems, n: isi /// /// \param[in] list_items1 A pointer to an `AMlistItems` struct. /// \param[in] list_items2 A pointer to an `AMlistItems` struct. -/// \return `true` if \p list_items1` == `\p list_items2 and `false` otherwise. -/// \pre \p list_items1` != NULL`. -/// \pre \p list_items2` != NULL`. +/// \return `true` if \p list_items1 `==` \p list_items2 and `false` otherwise. +/// \pre \p list_items1 `!= NULL`. +/// \pre \p list_items2 `!= NULL`. /// \internal /// /// #Safety @@ -239,7 +239,7 @@ pub unsafe extern "C" fn AMlistItemsEqual( /// \return A pointer to an `AMlistItem` struct that's `NULL` when /// \p list_items was previously advanced past its forward/reverse /// limit. -/// \pre \p list_items` != NULL`. +/// \pre \p list_items `!= NULL`. /// \internal /// /// #Safety @@ -268,7 +268,7 @@ pub unsafe extern "C" fn AMlistItemsNext( /// number of positions to advance. /// \return A pointer to an `AMlistItem` struct that's `NULL` when /// \p list_items is presently advanced past its forward/reverse limit. -/// \pre \p list_items` != NULL`. +/// \pre \p list_items `!= NULL`. /// \internal /// /// #Safety @@ -292,7 +292,7 @@ pub unsafe extern "C" fn AMlistItemsPrev( /// /// \param[in] list_items A pointer to an `AMlistItems` struct. /// \return The count of values in \p list_items. -/// \pre \p list_items` != NULL`. +/// \pre \p list_items `!= NULL`. /// \internal /// /// #Safety @@ -312,7 +312,7 @@ pub unsafe extern "C" fn AMlistItemsSize(list_items: *const AMlistItems) -> usiz /// /// \param[in] list_items A pointer to an `AMlistItems` struct. /// \return An `AMlistItems` struct -/// \pre \p list_items` != NULL`. +/// \pre \p list_items `!= NULL`. /// \internal /// /// #Safety @@ -332,7 +332,7 @@ pub unsafe extern "C" fn AMlistItemsReversed(list_items: *const AMlistItems) -> /// /// \param[in] list_items A pointer to an `AMlistItems` struct. /// \return An `AMlistItems` struct -/// \pre \p list_items` != NULL`. +/// \pre \p list_items `!= NULL`. /// \internal /// /// #Safety diff --git a/automerge-c/src/doc/map/item.rs b/automerge-c/src/doc/map/item.rs index 2b7d877d..b75567f8 100644 --- a/automerge-c/src/doc/map/item.rs +++ b/automerge-c/src/doc/map/item.rs @@ -6,7 +6,7 @@ use std::os::raw::c_char; use crate::obj::AMobjId; use crate::result::AMvalue; -/// \enum AMmapItem +/// \struct AMmapItem /// \brief An item in a map object. #[repr(C)] pub struct AMmapItem { @@ -47,7 +47,7 @@ impl From<&AMmapItem> for (String, am::Value<'static>, am::ObjId) { /// /// \param[in] map_item A pointer to an `AMmapItem` struct. /// \return A 64-bit unsigned integer. -/// \pre \p map_item` != NULL`. +/// \pre \p map_item `!= NULL`. /// \internal /// /// # Safety @@ -66,7 +66,7 @@ pub unsafe extern "C" fn AMmapItemKey(map_item: *const AMmapItem) -> *const c_ch /// /// \param[in] map_item A pointer to an `AMmapItem` struct. /// \return A pointer to an `AMobjId` struct. -/// \pre \p map_item` != NULL`. +/// \pre \p map_item `!= NULL`. /// \internal /// /// # Safety @@ -85,7 +85,7 @@ pub unsafe extern "C" fn AMmapItemObjId(map_item: *const AMmapItem) -> *const AM /// /// \param[in] map_item A pointer to an `AMmapItem` struct. /// \return An `AMvalue` struct. -/// \pre \p map_item` != NULL`. +/// \pre \p map_item `!= NULL`. /// \internal /// /// # Safety diff --git a/automerge-c/src/doc/map/items.rs b/automerge-c/src/doc/map/items.rs index c1ed9999..911bd7c4 100644 --- a/automerge-c/src/doc/map/items.rs +++ b/automerge-c/src/doc/map/items.rs @@ -190,7 +190,7 @@ impl Default for AMmapItems { /// \param[in,out] map_items A pointer to an `AMmapItems` struct. /// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum /// number of positions to advance. -/// \pre \p map_items` != NULL`. +/// \pre \p map_items `!= NULL`. /// \internal /// /// #Safety @@ -208,9 +208,9 @@ pub unsafe extern "C" fn AMmapItemsAdvance(map_items: *mut AMmapItems, n: isize) /// /// \param[in] map_items1 A pointer to an `AMmapItems` struct. /// \param[in] map_items2 A pointer to an `AMmapItems` struct. -/// \return `true` if \p map_items1` == `\p map_items2 and `false` otherwise. -/// \pre \p map_items1` != NULL`. -/// \pre \p map_items2` != NULL`. +/// \return `true` if \p map_items1 `==` \p map_items2 and `false` otherwise. +/// \pre \p map_items1 `!= NULL`. +/// \pre \p map_items2 `!= NULL`. /// \internal /// /// #Safety @@ -238,7 +238,7 @@ pub unsafe extern "C" fn AMmapItemsEqual( /// number of positions to advance. /// \return A pointer to an `AMmapItem` struct that's `NULL` when \p map_items /// was previously advanced past its forward/reverse limit. -/// \pre \p map_items` != NULL`. +/// \pre \p map_items `!= NULL`. /// \internal /// /// #Safety @@ -264,7 +264,7 @@ pub unsafe extern "C" fn AMmapItemsNext(map_items: *mut AMmapItems, n: isize) -> /// number of positions to advance. /// \return A pointer to an `AMmapItem` struct that's `NULL` when \p map_items /// is presently advanced past its forward/reverse limit. -/// \pre \p map_items` != NULL`. +/// \pre \p map_items `!= NULL`. /// \internal /// /// #Safety @@ -285,7 +285,7 @@ pub unsafe extern "C" fn AMmapItemsPrev(map_items: *mut AMmapItems, n: isize) -> /// /// \param[in] map_items A pointer to an `AMmapItems` struct. /// \return The count of values in \p map_items. -/// \pre \p map_items` != NULL`. +/// \pre \p map_items `!= NULL`. /// \internal /// /// #Safety @@ -305,7 +305,7 @@ pub unsafe extern "C" fn AMmapItemsSize(map_items: *const AMmapItems) -> usize { /// /// \param[in] map_items A pointer to an `AMmapItems` struct. /// \return An `AMmapItems` struct -/// \pre \p map_items` != NULL`. +/// \pre \p map_items `!= NULL`. /// \internal /// /// #Safety @@ -324,7 +324,7 @@ pub unsafe extern "C" fn AMmapItemsReversed(map_items: *const AMmapItems) -> AMm /// /// \param[in] map_items A pointer to an `AMmapItems` struct. /// \return An `AMmapItems` struct -/// \pre \p map_items` != NULL`. +/// \pre \p map_items `!= NULL`. /// \internal /// /// #Safety diff --git a/automerge-c/src/obj.rs b/automerge-c/src/obj.rs index 77a4c6eb..5913e596 100644 --- a/automerge-c/src/obj.rs +++ b/automerge-c/src/obj.rs @@ -58,7 +58,7 @@ impl Deref for AMobjId { /// /// \param[in] obj_id A pointer to an `AMobjId` struct. /// \return A pointer to an `AMactorId` struct or `NULL`. -/// \pre \p obj_id` != NULL`. +/// \pre \p obj_id `!= NULL`. /// \internal /// /// # Safety @@ -76,7 +76,7 @@ pub unsafe extern "C" fn AMobjIdActorId(obj_id: *const AMobjId) -> *const AMacto /// /// \param[in] obj_id A pointer to an `AMobjId` struct. /// \return A 64-bit unsigned integer. -/// \pre \p obj_id` != NULL`. +/// \pre \p obj_id `!= NULL`. /// \internal /// /// # Safety @@ -98,9 +98,9 @@ pub unsafe extern "C" fn AMobjIdCounter(obj_id: *const AMobjId) -> u64 { /// /// \param[in] obj_id1 A pointer to an `AMobjId` struct. /// \param[in] obj_id2 A pointer to an `AMobjId` struct. -/// \return `true` if \p obj_id1` == `\p obj_id2 and `false` otherwise. -/// \pre \p obj_id1` != NULL`. -/// \pre \p obj_id2` != NULL`. +/// \return `true` if \p obj_id1 `==` \p obj_id2 and `false` otherwise. +/// \pre \p obj_id1 `!= NULL`. +/// \pre \p obj_id2 `!= NULL`. /// \internal /// /// #Safety @@ -119,7 +119,7 @@ pub unsafe extern "C" fn AMobjIdEqual(obj_id1: *const AMobjId, obj_id2: *const A /// /// \param[in] obj_id A pointer to an `AMobjId` struct. /// \return A 64-bit unsigned integer. -/// \pre \p obj_id` != NULL`. +/// \pre \p obj_id `!= NULL`. /// \internal /// /// # Safety diff --git a/automerge-c/src/obj/item.rs b/automerge-c/src/obj/item.rs index 38bac2d8..18a6d7de 100644 --- a/automerge-c/src/obj/item.rs +++ b/automerge-c/src/obj/item.rs @@ -5,7 +5,7 @@ use std::ffi::CString; use crate::obj::AMobjId; use crate::result::AMvalue; -/// \enum AMobjItem +/// \struct AMobjItem /// \brief An item in an object. #[repr(C)] pub struct AMobjItem { @@ -41,7 +41,7 @@ impl From<&AMobjItem> for (am::Value<'static>, am::ObjId) { /// /// \param[in] obj_item A pointer to an `AMobjItem` struct. /// \return A pointer to an `AMobjId` struct. -/// \pre \p obj_item` != NULL`. +/// \pre \p obj_item `!= NULL`. /// \internal /// /// # Safety @@ -60,7 +60,7 @@ pub unsafe extern "C" fn AMobjItemObjId(obj_item: *const AMobjItem) -> *const AM /// /// \param[in] obj_item A pointer to an `AMobjItem` struct. /// \return An `AMvalue` struct. -/// \pre \p obj_item` != NULL`. +/// \pre \p obj_item `!= NULL`. /// \internal /// /// # Safety diff --git a/automerge-c/src/obj/items.rs b/automerge-c/src/obj/items.rs index ae6edb3e..dd8bb74b 100644 --- a/automerge-c/src/obj/items.rs +++ b/automerge-c/src/obj/items.rs @@ -190,7 +190,7 @@ impl Default for AMobjItems { /// \param[in,out] obj_items A pointer to an `AMobjItems` struct. /// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum /// number of positions to advance. -/// \pre \p obj_items` != NULL`. +/// \pre \p obj_items `!= NULL`. /// \internal /// /// #Safety @@ -208,9 +208,9 @@ pub unsafe extern "C" fn AMobjItemsAdvance(obj_items: *mut AMobjItems, n: isize) /// /// \param[in] obj_items1 A pointer to an `AMobjItems` struct. /// \param[in] obj_items2 A pointer to an `AMobjItems` struct. -/// \return `true` if \p obj_items1` == `\p obj_items2 and `false` otherwise. -/// \pre \p obj_items1` != NULL`. -/// \pre \p obj_items2` != NULL`. +/// \return `true` if \p obj_items1 `==` \p obj_items2 and `false` otherwise. +/// \pre \p obj_items1 `!= NULL`. +/// \pre \p obj_items2 `!= NULL`. /// \internal /// /// #Safety @@ -238,7 +238,7 @@ pub unsafe extern "C" fn AMobjItemsEqual( /// number of positions to advance. /// \return A pointer to an `AMobjItem` struct that's `NULL` when \p obj_items /// was previously advanced past its forward/reverse limit. -/// \pre \p obj_items` != NULL`. +/// \pre \p obj_items `!= NULL`. /// \internal /// /// #Safety @@ -264,7 +264,7 @@ pub unsafe extern "C" fn AMobjItemsNext(obj_items: *mut AMobjItems, n: isize) -> /// number of positions to advance. /// \return A pointer to an `AMobjItem` struct that's `NULL` when \p obj_items /// is presently advanced past its forward/reverse limit. -/// \pre \p obj_items` != NULL`. +/// \pre \p obj_items `!= NULL`. /// \internal /// /// #Safety @@ -285,7 +285,7 @@ pub unsafe extern "C" fn AMobjItemsPrev(obj_items: *mut AMobjItems, n: isize) -> /// /// \param[in] obj_items A pointer to an `AMobjItems` struct. /// \return The count of values in \p obj_items. -/// \pre \p obj_items` != NULL`. +/// \pre \p obj_items `!= NULL`. /// \internal /// /// #Safety @@ -305,7 +305,7 @@ pub unsafe extern "C" fn AMobjItemsSize(obj_items: *const AMobjItems) -> usize { /// /// \param[in] obj_items A pointer to an `AMobjItems` struct. /// \return An `AMobjItems` struct -/// \pre \p obj_items` != NULL`. +/// \pre \p obj_items `!= NULL`. /// \internal /// /// #Safety @@ -325,7 +325,7 @@ pub unsafe extern "C" fn AMobjItemsReversed(obj_items: *const AMobjItems) -> AMo /// /// \param[in] obj_items A pointer to an `AMobjItems` struct. /// \return An `AMobjItems` struct -/// \pre \p obj_items` != NULL`. +/// \pre \p obj_items `!= NULL`. /// \internal /// /// #Safety diff --git a/automerge-c/src/result_stack.rs b/automerge-c/src/result_stack.rs index 32e23b4a..58f67950 100644 --- a/automerge-c/src/result_stack.rs +++ b/automerge-c/src/result_stack.rs @@ -21,7 +21,7 @@ impl AMresultStack { /// /// \param[in,out] stack A pointer to a pointer to an `AMresultStack` struct. /// \return The number of `AMresult` structs freed. -/// \pre \p stack` != NULL`. +/// \pre \p stack `!= NULL`. /// \post `*stack == NULL`. /// \internal /// @@ -45,7 +45,7 @@ pub unsafe extern "C" fn AMfreeStack(stack: *mut *mut AMresultStack) -> usize { /// /// \param[in,out] stack A pointer to a pointer to an `AMresultStack` struct. /// \return A pointer to an `AMresult` struct or `NULL`. -/// \pre \p stack` != NULL`. +/// \pre \p stack `!= NULL`. /// \post `*stack == NULL`. /// \internal /// @@ -81,9 +81,9 @@ pub type AMpushCallback = /// \param[in] callback A pointer to a function with the same signature as /// `AMpushCallback()` or `NULL`. /// \return An `AMvalue` struct. -/// \pre \p stack` != NULL`. -/// \pre \p result` != NULL`. -/// \warning If \p stack` == NULL` then \p result is deallocated in order to +/// \pre \p stack `!= NULL`. +/// \pre \p result `!= NULL`. +/// \warning If \p stack `== NULL` then \p result is deallocated in order to /// prevent a memory leak. /// \internal /// diff --git a/automerge-c/src/sync/have.rs b/automerge-c/src/sync/have.rs index 2396e8fe..ea13ef16 100644 --- a/automerge-c/src/sync/have.rs +++ b/automerge-c/src/sync/have.rs @@ -25,7 +25,7 @@ impl AsRef for AMsyncHave { /// /// \param[in] sync_have A pointer to an `AMsyncHave` struct. /// \return An `AMchangeHashes` struct. -/// \pre \p sync_have` != NULL`. +/// \pre \p sync_have `!= NULL`. /// \internal /// /// # Safety diff --git a/automerge-c/src/sync/haves.rs b/automerge-c/src/sync/haves.rs index 98d83b38..f435cb4a 100644 --- a/automerge-c/src/sync/haves.rs +++ b/automerge-c/src/sync/haves.rs @@ -220,7 +220,7 @@ impl Default for AMsyncHaves { /// \param[in,out] sync_haves A pointer to an `AMsyncHaves` struct. /// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum /// number of positions to advance. -/// \pre \p sync_haves` != NULL`. +/// \pre \p sync_haves `!= NULL`. /// \internal /// /// #Safety @@ -238,9 +238,9 @@ pub unsafe extern "C" fn AMsyncHavesAdvance(sync_haves: *mut AMsyncHaves, n: isi /// /// \param[in] sync_haves1 A pointer to an `AMsyncHaves` struct. /// \param[in] sync_haves2 A pointer to an `AMsyncHaves` struct. -/// \return `true` if \p sync_haves1` == `\p sync_haves2 and `false` otherwise. -/// \pre \p sync_haves1` != NULL`. -/// \pre \p sync_haves2` != NULL`. +/// \return `true` if \p sync_haves1 `==` \p sync_haves2 and `false` otherwise. +/// \pre \p sync_haves1 `!= NULL`. +/// \pre \p sync_haves2 `!= NULL`. /// \internal /// /// #Safety @@ -269,7 +269,7 @@ pub unsafe extern "C" fn AMsyncHavesEqual( /// \return A pointer to an `AMsyncHave` struct that's `NULL` when /// \p sync_haves was previously advanced past its forward/reverse /// limit. -/// \pre \p sync_haves` != NULL`. +/// \pre \p sync_haves `!= NULL`. /// \internal /// /// #Safety @@ -298,7 +298,7 @@ pub unsafe extern "C" fn AMsyncHavesNext( /// number of positions to advance. /// \return A pointer to an `AMsyncHave` struct that's `NULL` when /// \p sync_haves is presently advanced past its forward/reverse limit. -/// \pre \p sync_haves` != NULL`. +/// \pre \p sync_haves `!= NULL`. /// \internal /// /// #Safety @@ -322,7 +322,7 @@ pub unsafe extern "C" fn AMsyncHavesPrev( /// /// \param[in] sync_haves A pointer to an `AMsyncHaves` struct. /// \return The count of values in \p sync_haves. -/// \pre \p sync_haves` != NULL`. +/// \pre \p sync_haves `!= NULL`. /// \internal /// /// #Safety @@ -342,7 +342,7 @@ pub unsafe extern "C" fn AMsyncHavesSize(sync_haves: *const AMsyncHaves) -> usiz /// /// \param[in] sync_haves A pointer to an `AMsyncHaves` struct. /// \return An `AMsyncHaves` struct -/// \pre \p sync_haves` != NULL`. +/// \pre \p sync_haves `!= NULL`. /// \internal /// /// #Safety @@ -362,7 +362,7 @@ pub unsafe extern "C" fn AMsyncHavesReversed(sync_haves: *const AMsyncHaves) -> /// /// \param[in] sync_haves A pointer to an `AMsyncHaves` struct. /// \return An `AMsyncHaves` struct -/// \pre \p sync_haves` != NULL`. +/// \pre \p sync_haves `!= NULL`. /// \internal /// /// #Safety diff --git a/automerge-c/src/sync/message.rs b/automerge-c/src/sync/message.rs index a07af89b..d0f683f6 100644 --- a/automerge-c/src/sync/message.rs +++ b/automerge-c/src/sync/message.rs @@ -51,7 +51,7 @@ impl AsRef for AMsyncMessage { /// /// \param[in] sync_message A pointer to an `AMsyncMessage` struct. /// \return An `AMchanges` struct. -/// \pre \p sync_message` != NULL`. +/// \pre \p sync_message `!= NULL`. /// \internal /// /// # Safety @@ -75,8 +75,8 @@ pub unsafe extern "C" fn AMsyncMessageChanges(sync_message: *const AMsyncMessage /// \param[in] count The number of bytes in \p src to decode. /// \return A pointer to an `AMresult` struct containing an `AMsyncMessage` /// struct. -/// \pre \p src` != NULL`. -/// \pre `0 <=` \p count` <= `size of \p src. +/// \pre \p src `!= NULL`. +/// \pre `0 <` \p count `<= sizeof(`\p src`)`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -95,7 +95,7 @@ pub unsafe extern "C" fn AMsyncMessageDecode(src: *const u8, count: usize) -> *m /// \param[in] sync_message A pointer to an `AMsyncMessage` struct. /// \return A pointer to an `AMresult` struct containing an array of bytes as /// an `AMbyteSpan` struct. -/// \pre \p sync_message` != NULL`. +/// \pre \p sync_message `!= NULL`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -112,7 +112,7 @@ pub unsafe extern "C" fn AMsyncMessageEncode(sync_message: *const AMsyncMessage) /// /// \param[in] sync_message A pointer to an `AMsyncMessage` struct. /// \return An `AMhaves` struct. -/// \pre \p sync_message` != NULL`. +/// \pre \p sync_message `!= NULL`. /// \internal /// /// # Safety @@ -134,7 +134,7 @@ pub unsafe extern "C" fn AMsyncMessageHaves(sync_message: *const AMsyncMessage) /// /// \param[in] sync_message A pointer to an `AMsyncMessage` struct. /// \return An `AMchangeHashes` struct. -/// \pre \p sync_message` != NULL`. +/// \pre \p sync_message `!= NULL`. /// \internal /// /// # Safety @@ -154,7 +154,7 @@ pub unsafe extern "C" fn AMsyncMessageHeads(sync_message: *const AMsyncMessage) /// /// \param[in] sync_message A pointer to an `AMsyncMessage` struct. /// \return An `AMchangeHashes` struct. -/// \pre \p sync_message` != NULL`. +/// \pre \p sync_message `!= NULL`. /// \internal /// /// # Safety diff --git a/automerge-c/src/sync/state.rs b/automerge-c/src/sync/state.rs index a329d485..19411753 100644 --- a/automerge-c/src/sync/state.rs +++ b/automerge-c/src/sync/state.rs @@ -61,8 +61,8 @@ impl From for *mut AMsyncState { /// \param[in] count The number of bytes in \p src to decode. /// \return A pointer to an `AMresult` struct containing an `AMsyncState` /// struct. -/// \pre \p src` != NULL`. -/// \pre `0 <=` \p count` <= `size of \p src. +/// \pre \p src `!= NULL`. +/// \pre `0 <` \p count `<= sizeof(`\p src`)`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -81,7 +81,7 @@ pub unsafe extern "C" fn AMsyncStateDecode(src: *const u8, count: usize) -> *mut /// \param[in] sync_state A pointer to an `AMsyncState` struct. /// \return A pointer to an `AMresult` struct containing an array of bytes as /// an `AMbyteSpan` struct. -/// \pre \p sync_state` != NULL`. +/// \pre \p sync_state `!= NULL`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -98,9 +98,9 @@ pub unsafe extern "C" fn AMsyncStateEncode(sync_state: *const AMsyncState) -> *m /// /// \param[in] sync_state1 A pointer to an `AMsyncState` struct. /// \param[in] sync_state2 A pointer to an `AMsyncState` struct. -/// \return `true` if \p sync_state1` == `\p sync_state2 and `false` otherwise. -/// \pre \p sync_state1` != NULL`. -/// \pre \p sync_state2` != NULL`. +/// \return `true` if \p sync_state1 `==` \p sync_state2 and `false` otherwise. +/// \pre \p sync_state1 `!= NULL`. +/// \pre \p sync_state2 `!= NULL`. /// \internal /// /// #Safety @@ -135,7 +135,7 @@ pub extern "C" fn AMsyncStateInit() -> *mut AMresult { /// /// \param[in] sync_state A pointer to an `AMsyncState` struct. /// \return An `AMchangeHashes` struct. -/// \pre \p sync_state` != NULL`. +/// \pre \p sync_state `!= NULL`. /// \internal /// /// # Safety @@ -154,7 +154,7 @@ pub unsafe extern "C" fn AMsyncStateSharedHeads(sync_state: *const AMsyncState) /// /// \param[in] sync_state A pointer to an `AMsyncState` struct. /// \return An `AMchangeHashes` struct. -/// \pre \p sync_state` != NULL`. +/// \pre \p sync_state `!= NULL`. /// \internal /// /// # Safety @@ -177,8 +177,8 @@ pub unsafe extern "C" fn AMsyncStateLastSentHeads( /// \param[out] has_value A pointer to a boolean flag that is set to `true` if /// the returned `AMhaves` struct is relevant, `false` otherwise. /// \return An `AMhaves` struct. -/// \pre \p sync_state` != NULL`. -/// \pre \p has_value` != NULL`. +/// \pre \p sync_state `!= NULL`. +/// \pre \p has_value `!= NULL`. /// \internal /// /// # Safety @@ -207,8 +207,8 @@ pub unsafe extern "C" fn AMsyncStateTheirHaves( /// the returned `AMchangeHashes` struct is relevant, `false` /// otherwise. /// \return An `AMchangeHashes` struct. -/// \pre \p sync_state` != NULL`. -/// \pre \p has_value` != NULL`. +/// \pre \p sync_state `!= NULL`. +/// \pre \p has_value `!= NULL`. /// \internal /// /// # Safety @@ -237,8 +237,8 @@ pub unsafe extern "C" fn AMsyncStateTheirHeads( /// the returned `AMchangeHashes` struct is relevant, `false` /// otherwise. /// \return An `AMchangeHashes` struct. -/// \pre \p sync_state` != NULL`. -/// \pre \p has_value` != NULL`. +/// \pre \p sync_state `!= NULL`. +/// \pre \p has_value `!= NULL`. /// \internal /// /// # Safety From 14bd8fbe9722c7a333243f1ce87fe1b1c9168cd1 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Sat, 6 Aug 2022 16:18:59 -0700 Subject: [PATCH 519/730] Port the WASM API's basic unit tests to C. Weave the original TypeScript code into the C ports of the WASM API's sync tests. Fix misnomers in the WASM API's basic and sync unit tests. Fix misspellings in the WASM API's basic and sync unit tests. --- automerge-c/test/CMakeLists.txt | 4 +- automerge-c/test/main.c | 4 +- automerge-c/test/ported_wasm/basic_tests.c | 1735 ++++++++++++++++++++ automerge-c/test/ported_wasm/suite.c | 18 + automerge-c/test/ported_wasm/sync_tests.c | 1415 ++++++++++++++++ automerge-c/test/sync_tests.c | 1143 ------------- automerge-wasm/test/test.ts | 858 +++++----- 7 files changed, 3602 insertions(+), 1575 deletions(-) create mode 100644 automerge-c/test/ported_wasm/basic_tests.c create mode 100644 automerge-c/test/ported_wasm/suite.c create mode 100644 automerge-c/test/ported_wasm/sync_tests.c delete mode 100644 automerge-c/test/sync_tests.c diff --git a/automerge-c/test/CMakeLists.txt b/automerge-c/test/CMakeLists.txt index 6789b655..770d5d2d 100644 --- a/automerge-c/test/CMakeLists.txt +++ b/automerge-c/test/CMakeLists.txt @@ -13,7 +13,9 @@ add_executable( map_tests.c stack_utils.c str_utils.c - sync_tests.c + ported_wasm/basic_tests.c + ported_wasm/suite.c + ported_wasm/sync_tests.c ) set_target_properties(test_${LIBRARY_NAME} PROPERTIES LINKER_LANGUAGE C) diff --git a/automerge-c/test/main.c b/automerge-c/test/main.c index 3eeb8a3b..09b71bd5 100644 --- a/automerge-c/test/main.c +++ b/automerge-c/test/main.c @@ -14,7 +14,7 @@ extern int run_list_tests(void); extern int run_map_tests(void); -extern int run_sync_tests(void); +extern int run_ported_wasm_suite(void); int main(void) { return ( @@ -22,6 +22,6 @@ int main(void) { run_doc_tests() + run_list_tests() + run_map_tests() + - run_sync_tests() + run_ported_wasm_suite() ); } diff --git a/automerge-c/test/ported_wasm/basic_tests.c b/automerge-c/test/ported_wasm/basic_tests.c new file mode 100644 index 00000000..8f584d1e --- /dev/null +++ b/automerge-c/test/ported_wasm/basic_tests.c @@ -0,0 +1,1735 @@ +#include +#include +#include +#include +#include +#include +#include + +/* third-party */ +#include + +/* local */ +#include "automerge.h" +#include "../stack_utils.h" + +/** + * \brief default import init() should return a promise + */ +static void test_default_import_init_should_return_a_promise(void** state); + +/** + * \brief should create, clone and free + */ +static void test_create_clone_and_free(void** state) { + AMresultStack* stack = *state; + /* const doc1 = create() */ + AMdoc* const doc1 = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + /* const doc2 = doc1.clone() */ + AMdoc* const doc2 = AMpush(&stack, AMclone(doc1), AM_VALUE_DOC, cmocka_cb).doc; +} + +/** + * \brief should be able to start and commit + */ +static void test_start_and_commit(void** state) { + AMresultStack* stack = *state; + /* const doc = create() */ + AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + /* doc.commit() */ + AMpush(&stack, AMcommit(doc, NULL, NULL), AM_VALUE_CHANGE_HASHES, cmocka_cb); +} + +/** + * \brief getting a nonexistent prop does not throw an error + */ +static void test_getting_a_nonexistent_prop_does_not_throw_an_error(void** state) { + AMresultStack* stack = *state; + /* const doc = create() */ + AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + /* const root = "_root" */ + /* const result = doc.getWithType(root, "hello") */ + /* assert.deepEqual(result, undefined) */ + AMpush(&stack, + AMmapGet(doc, AM_ROOT, "hello", NULL), + AM_VALUE_VOID, + cmocka_cb); +} + +/** + * \brief should be able to set and get a simple value + */ +static void test_should_be_able_to_set_and_get_a_simple_value(void** state) { + AMresultStack* stack = *state; + /* const doc: Automerge = create("aabbcc") */ + AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMfree(AMsetActorId(doc, AMpush(&stack, + AMactorIdInitStr("aabbcc"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + /* const root = "_root" */ + /* let result */ + /* */ + /* doc.put(root, "hello", "world") */ + AMfree(AMmapPutStr(doc, AM_ROOT, "hello", "world")); + /* doc.put(root, "number1", 5, "uint") */ + AMfree(AMmapPutUint(doc, AM_ROOT, "number1", 5)); + /* doc.put(root, "number2", 5) */ + AMfree(AMmapPutInt(doc, AM_ROOT, "number2", 5)); + /* doc.put(root, "number3", 5.5) */ + AMfree(AMmapPutF64(doc, AM_ROOT, "number3", 5.5)); + /* doc.put(root, "number4", 5.5, "f64") */ + AMfree(AMmapPutF64(doc, AM_ROOT, "number4", 5.5)); + /* doc.put(root, "number5", 5.5, "int") */ + AMfree(AMmapPutInt(doc, AM_ROOT, "number5", 5.5)); + /* doc.put(root, "bool", true) */ + AMfree(AMmapPutBool(doc, AM_ROOT, "bool", true)); + /* doc.put(root, "time1", 1000, "timestamp") */ + AMfree(AMmapPutTimestamp(doc, AM_ROOT, "time1", 1000)); + /* doc.put(root, "time2", new Date(1001)) */ + AMfree(AMmapPutTimestamp(doc, AM_ROOT, "time2", 1001)); + /* doc.putObject(root, "list", []); */ + AMfree(AMmapPutObject(doc, AM_ROOT, "list", AM_OBJ_TYPE_LIST)); + /* doc.put(root, "null", null) */ + AMfree(AMmapPutNull(doc, AM_ROOT, "null")); + /* */ + /* result = doc.getWithType(root, "hello") */ + /* assert.deepEqual(result, ["str", "world"]) */ + /* assert.deepEqual(doc.get("/", "hello"), "world") */ + assert_string_equal(AMpush(&stack, + AMmapGet(doc, AM_ROOT, "hello", NULL), + AM_VALUE_STR, + cmocka_cb).str, "world"); + /* assert.deepEqual(doc.get("/", "hello"), "world") */ + /* */ + /* result = doc.getWithType(root, "number1") */ + /* assert.deepEqual(result, ["uint", 5]) */ + assert_int_equal(AMpush(&stack, + AMmapGet(doc, AM_ROOT, "number1", NULL), + AM_VALUE_UINT, + cmocka_cb).uint, 5); + /* assert.deepEqual(doc.get("/", "number1"), 5) */ + /* */ + /* result = doc.getWithType(root, "number2") */ + /* assert.deepEqual(result, ["int", 5]) */ + assert_int_equal(AMpush(&stack, + AMmapGet(doc, AM_ROOT, "number2", NULL), + AM_VALUE_INT, + cmocka_cb).int_, 5); + /* */ + /* result = doc.getWithType(root, "number3") */ + /* assert.deepEqual(result, ["f64", 5.5]) */ + assert_float_equal(AMpush(&stack, + AMmapGet(doc, AM_ROOT, "number3", NULL), + AM_VALUE_F64, + cmocka_cb).f64, 5.5, DBL_EPSILON); + /* */ + /* result = doc.getWithType(root, "number4") */ + /* assert.deepEqual(result, ["f64", 5.5]) */ + assert_float_equal(AMpush(&stack, + AMmapGet(doc, AM_ROOT, "number4", NULL), + AM_VALUE_F64, + cmocka_cb).f64, 5.5, DBL_EPSILON); + /* */ + /* result = doc.getWithType(root, "number5") */ + /* assert.deepEqual(result, ["int", 5]) */ + assert_int_equal(AMpush(&stack, + AMmapGet(doc, AM_ROOT, "number5", NULL), + AM_VALUE_INT, + cmocka_cb).int_, 5); + /* */ + /* result = doc.getWithType(root, "bool") */ + /* assert.deepEqual(result, ["boolean", true]) */ + assert_int_equal(AMpush(&stack, + AMmapGet(doc, AM_ROOT, "bool", NULL), + AM_VALUE_BOOLEAN, + cmocka_cb).boolean, true); + /* */ + /* doc.put(root, "bool", false, "boolean") */ + AMfree(AMmapPutBool(doc, AM_ROOT, "bool", false)); + /* */ + /* result = doc.getWithType(root, "bool") */ + /* assert.deepEqual(result, ["boolean", false]) */ + assert_int_equal(AMpush(&stack, + AMmapGet(doc, AM_ROOT, "bool", NULL), + AM_VALUE_BOOLEAN, + cmocka_cb).boolean, false); + /* */ + /* result = doc.getWithType(root, "time1") */ + /* assert.deepEqual(result, ["timestamp", new Date(1000)]) */ + assert_int_equal(AMpush(&stack, + AMmapGet(doc, AM_ROOT, "time1", NULL), + AM_VALUE_TIMESTAMP, + cmocka_cb).timestamp, 1000); + /* */ + /* result = doc.getWithType(root, "time2") */ + /* assert.deepEqual(result, ["timestamp", new Date(1001)]) */ + assert_int_equal(AMpush(&stack, + AMmapGet(doc, AM_ROOT, "time2", NULL), + AM_VALUE_TIMESTAMP, + cmocka_cb).timestamp, 1001); + /* */ + /* result = doc.getWithType(root, "list") */ + /* assert.deepEqual(result, ["list", "10@aabbcc"]); */ + AMobjId const* const list = AMpush(&stack, + AMmapGet(doc, AM_ROOT, "list", NULL), + AM_VALUE_OBJ_ID, + cmocka_cb).obj_id; + assert_int_equal(AMobjIdCounter(list), 10); + assert_string_equal(AMactorIdStr(AMobjIdActorId(list)), "aabbcc"); + /* */ + /* result = doc.getWithType(root, "null") */ + /* assert.deepEqual(result, ["null", null]); */ + AMpush(&stack, + AMmapGet(doc, AM_ROOT, "null", NULL), + AM_VALUE_NULL, + cmocka_cb); +} + +/** + * \brief should be able to use bytes + */ +static void test_should_be_able_to_use_bytes(void** state) { + AMresultStack* stack = *state; + /* const doc = create() */ + AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + /* doc.put("_root", "data1", new Uint8Array([10, 11, 12])); */ + static uint8_t const DATA1[] = {10, 11, 12}; + AMfree(AMmapPutBytes(doc, AM_ROOT, "data1", DATA1, sizeof(DATA1))); + /* doc.put("_root", "data2", new Uint8Array([13, 14, 15]), "bytes"); */ + static uint8_t const DATA2[] = {13, 14, 15}; + AMfree(AMmapPutBytes(doc, AM_ROOT, "data2", DATA2, sizeof(DATA2))); + /* const value1 = doc.getWithType("_root", "data1") */ + AMbyteSpan const value1 = AMpush(&stack, + AMmapGet(doc, AM_ROOT, "data1", NULL), + AM_VALUE_BYTES, + cmocka_cb).bytes; + /* assert.deepEqual(value1, ["bytes", new Uint8Array([10, 11, 12])]); */ + assert_int_equal(value1.count, sizeof(DATA1)); + assert_memory_equal(value1.src, DATA1, sizeof(DATA1)); + /* const value2 = doc.getWithType("_root", "data2") */ + AMbyteSpan const value2 = AMpush(&stack, + AMmapGet(doc, AM_ROOT, "data2", NULL), + AM_VALUE_BYTES, + cmocka_cb).bytes; + /* assert.deepEqual(value2, ["bytes", new Uint8Array([13, 14, 15])]); */ + assert_int_equal(value2.count, sizeof(DATA2)); + assert_memory_equal(value2.src, DATA2, sizeof(DATA2)); +} + +/** + * \brief should be able to make subobjects + */ +static void test_should_be_able_to_make_subobjects(void** state) { + AMresultStack* stack = *state; + /* const doc = create() */ + AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + /* const root = "_root" */ + /* let result */ + /* */ + /* const submap = doc.putObject(root, "submap", {}) */ + AMobjId const* const submap = AMpush( + &stack, + AMmapPutObject(doc, AM_ROOT, "submap", AM_OBJ_TYPE_MAP), + AM_VALUE_OBJ_ID, + cmocka_cb).obj_id; + /* doc.put(submap, "number", 6, "uint") */ + AMfree(AMmapPutUint(doc, submap, "number", 6)); + /* assert.strictEqual(doc.pendingOps(), 2) */ + assert_int_equal(AMpendingOps(doc), 2); + /* */ + /* result = doc.getWithType(root, "submap") */ + /* assert.deepEqual(result, ["map", submap]) */ + assert_true(AMobjIdEqual(AMpush(&stack, + AMmapGet(doc, AM_ROOT, "submap", NULL), + AM_VALUE_OBJ_ID, + cmocka_cb).obj_id, + submap)); + /* */ + /* result = doc.getWithType(submap, "number") */ + /* assert.deepEqual(result, ["uint", 6]) */ + assert_int_equal(AMpush(&stack, + AMmapGet(doc, submap, "number", NULL), + AM_VALUE_UINT, + cmocka_cb).uint, + 6); +} + +/** + * \brief should be able to make lists + */ +static void test_should_be_able_to_make_lists(void** state) { + AMresultStack* stack = *state; + /* const doc = create() */ + AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + /* const root = "_root" */ + /* */ + /* const sublist = doc.putObject(root, "numbers", []) */ + AMobjId const* const sublist = AMpush( + &stack, + AMmapPutObject(doc, AM_ROOT, "numbers", AM_OBJ_TYPE_LIST), + AM_VALUE_OBJ_ID, + cmocka_cb).obj_id; + /* doc.insert(sublist, 0, "a"); */ + AMfree(AMlistPutStr(doc, sublist, 0, true, "a")); + /* doc.insert(sublist, 1, "b"); */ + AMfree(AMlistPutStr(doc, sublist, 1, true, "b")); + /* doc.insert(sublist, 2, "c"); */ + AMfree(AMlistPutStr(doc, sublist, 2, true, "c")); + /* doc.insert(sublist, 0, "z"); */ + AMfree(AMlistPutStr(doc, sublist, 0, true, "z")); + /* */ + /* assert.deepEqual(doc.getWithType(sublist, 0), ["str", "z"]) */ + assert_string_equal(AMpush(&stack, + AMlistGet(doc, sublist, 0, NULL), + AM_VALUE_STR, + cmocka_cb).str, "z"); + /* assert.deepEqual(doc.getWithType(sublist, 1), ["str", "a"]) */ + assert_string_equal(AMpush(&stack, + AMlistGet(doc, sublist, 1, NULL), + AM_VALUE_STR, + cmocka_cb).str, "a"); + /* assert.deepEqual(doc.getWithType(sublist, 2), ["str", "b"]) */ + assert_string_equal(AMpush(&stack, + AMlistGet(doc, sublist, 2, NULL), + AM_VALUE_STR, + cmocka_cb).str, "b"); + /* assert.deepEqual(doc.getWithType(sublist, 3), ["str", "c"]) */ + assert_string_equal(AMpush(&stack, + AMlistGet(doc, sublist, 3, NULL), + AM_VALUE_STR, + cmocka_cb).str, "c"); + /* assert.deepEqual(doc.length(sublist), 4) */ + assert_int_equal(AMobjSize(doc, sublist, NULL), 4); + /* */ + /* doc.put(sublist, 2, "b v2"); */ + AMfree(AMlistPutStr(doc, sublist, 2, false, "b v2")); + /* */ + /* assert.deepEqual(doc.getWithType(sublist, 2), ["str", "b v2"]) */ + assert_string_equal(AMpush(&stack, + AMlistGet(doc, sublist, 2, NULL), + AM_VALUE_STR, + cmocka_cb).str, "b v2"); + /* assert.deepEqual(doc.length(sublist), 4) */ + assert_int_equal(AMobjSize(doc, sublist, NULL), 4); +} + +/** + * \brief lists have insert, set, splice, and push ops + */ +static void test_lists_have_insert_set_splice_and_push_ops(void** state) { + AMresultStack* stack = *state; + /* const doc = create() */ + AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + /* const root = "_root" */ + /* */ + /* const sublist = doc.putObject(root, "letters", []) */ + AMobjId const* const sublist = AMpush( + &stack, + AMmapPutObject(doc, AM_ROOT, "letters", AM_OBJ_TYPE_LIST), + AM_VALUE_OBJ_ID, + cmocka_cb).obj_id; + /* doc.insert(sublist, 0, "a"); */ + AMfree(AMlistPutStr(doc, sublist, 0, true, "a")); + /* doc.insert(sublist, 0, "b"); */ + AMfree(AMlistPutStr(doc, sublist, 0, true, "b")); + /* assert.deepEqual(doc.materialize(), { letters: ["b", "a"] }) */ + AMmapItems doc_items = AMpush(&stack, + AMmapRange(doc, AM_ROOT, NULL, NULL, NULL), + AM_VALUE_MAP_ITEMS, + cmocka_cb).map_items; + AMmapItem const* doc_item = AMmapItemsNext(&doc_items, 1); + assert_string_equal(AMmapItemKey(doc_item), "letters"); + { + AMlistItems list_items = AMpush( + &stack, + AMlistRange(doc, AMmapItemObjId(doc_item), 0, SIZE_MAX, NULL), + AM_VALUE_LIST_ITEMS, + cmocka_cb).list_items; + assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, + "b"); + assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, + "a"); + assert_null(AMlistItemsNext(&list_items, 1)); + } + /* doc.push(sublist, "c"); */ + AMfree(AMlistPutStr(doc, sublist, SIZE_MAX, true, "c")); + /* const heads = doc.getHeads() */ + AMchangeHashes const heads = AMpush(&stack, + AMgetHeads(doc), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + /* assert.deepEqual(doc.materialize(), { letters: ["b", "a", "c"] }) */ + doc_items = AMpush(&stack, + AMmapRange(doc, AM_ROOT, NULL, NULL, NULL), + AM_VALUE_MAP_ITEMS, + cmocka_cb).map_items; + doc_item = AMmapItemsNext(&doc_items, 1); + assert_string_equal(AMmapItemKey(doc_item), "letters"); + { + AMlistItems list_items = AMpush( + &stack, + AMlistRange(doc, AMmapItemObjId(doc_item), 0, SIZE_MAX, NULL), + AM_VALUE_LIST_ITEMS, + cmocka_cb).list_items; + assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, + "b"); + assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, + "a"); + assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, + "c"); + assert_null(AMlistItemsNext(&list_items, 1)); + } + /* doc.push(sublist, 3, "timestamp"); */ + AMfree(AMlistPutTimestamp(doc, sublist, SIZE_MAX, true, 3)); + /* assert.deepEqual(doc.materialize(), { letters: ["b", "a", "c", new Date(3)] })*/ + doc_items = AMpush(&stack, + AMmapRange(doc, AM_ROOT, NULL, NULL, NULL), + AM_VALUE_MAP_ITEMS, + cmocka_cb).map_items; + doc_item = AMmapItemsNext(&doc_items, 1); + assert_string_equal(AMmapItemKey(doc_item), "letters"); + { + AMlistItems list_items = AMpush( + &stack, + AMlistRange(doc, AMmapItemObjId(doc_item), 0, SIZE_MAX, NULL), + AM_VALUE_LIST_ITEMS, + cmocka_cb).list_items; + assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, + "b"); + assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, + "a"); + assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, + "c"); + assert_int_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).timestamp, + 3); + assert_null(AMlistItemsNext(&list_items, 1)); + } + /* doc.splice(sublist, 1, 1, ["d", "e", "f"]); */ + static AMvalue const DATA[] = {{.str_tag = AM_VALUE_STR, .str = "d"}, + {.str_tag = AM_VALUE_STR, .str = "e"}, + {.str_tag = AM_VALUE_STR, .str = "f"}}; + AMfree(AMsplice(doc, sublist, 1, 1, DATA, sizeof(DATA)/sizeof(AMvalue))); + /* assert.deepEqual(doc.materialize(), { letters: ["b", "d", "e", "f", "c", new Date(3)] })*/ + doc_items = AMpush(&stack, + AMmapRange(doc, AM_ROOT, NULL, NULL, NULL), + AM_VALUE_MAP_ITEMS, + cmocka_cb).map_items; + doc_item = AMmapItemsNext(&doc_items, 1); + assert_string_equal(AMmapItemKey(doc_item), "letters"); + { + AMlistItems list_items = AMpush( + &stack, + AMlistRange(doc, AMmapItemObjId(doc_item), 0, SIZE_MAX, NULL), + AM_VALUE_LIST_ITEMS, + cmocka_cb).list_items; + assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, + "b"); + assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, + "d"); + assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, + "e"); + assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, + "f"); + assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, + "c"); + assert_int_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).timestamp, + 3); + assert_null(AMlistItemsNext(&list_items, 1)); + } + /* doc.put(sublist, 0, "z"); */ + AMfree(AMlistPutStr(doc, sublist, 0, false, "z")); + /* assert.deepEqual(doc.materialize(), { letters: ["z", "d", "e", "f", "c", new Date(3)] })*/ + doc_items = AMpush(&stack, + AMmapRange(doc, AM_ROOT, NULL, NULL, NULL), + AM_VALUE_MAP_ITEMS, + cmocka_cb).map_items; + doc_item = AMmapItemsNext(&doc_items, 1); + assert_string_equal(AMmapItemKey(doc_item), "letters"); + { + AMlistItems list_items = AMpush( + &stack, + AMlistRange(doc, AMmapItemObjId(doc_item), 0, SIZE_MAX, NULL), + AM_VALUE_LIST_ITEMS, + cmocka_cb).list_items; + assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, + "z"); + assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, + "d"); + assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, + "e"); + assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, + "f"); + assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, + "c"); + assert_int_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).timestamp, + 3); + assert_null(AMlistItemsNext(&list_items, 1)); + } + /* assert.deepEqual(doc.materialize(sublist), ["z", "d", "e", "f", "c", new Date(3)])*/ + AMlistItems sublist_items = AMpush( + &stack, + AMlistRange(doc, sublist, 0, SIZE_MAX, NULL), + AM_VALUE_LIST_ITEMS, + cmocka_cb).list_items; + assert_string_equal(AMlistItemValue(AMlistItemsNext(&sublist_items, 1)).str, + "z"); + assert_string_equal(AMlistItemValue(AMlistItemsNext(&sublist_items, 1)).str, + "d"); + assert_string_equal(AMlistItemValue(AMlistItemsNext(&sublist_items, 1)).str, + "e"); + assert_string_equal(AMlistItemValue(AMlistItemsNext(&sublist_items, 1)).str, + "f"); + assert_string_equal(AMlistItemValue(AMlistItemsNext(&sublist_items, 1)).str, + "c"); + assert_int_equal(AMlistItemValue(AMlistItemsNext(&sublist_items, 1)).timestamp, + 3); + assert_null(AMlistItemsNext(&sublist_items, 1)); + /* assert.deepEqual(doc.length(sublist), 6) */ + assert_int_equal(AMobjSize(doc, sublist, NULL), 6); + /* assert.deepEqual(doc.materialize("/", heads), { letters: ["b", "a", "c"] })*/ + doc_items = AMpush(&stack, + AMmapRange(doc, AM_ROOT, NULL, NULL, &heads), + AM_VALUE_MAP_ITEMS, + cmocka_cb).map_items; + doc_item = AMmapItemsNext(&doc_items, 1); + assert_string_equal(AMmapItemKey(doc_item), "letters"); + { + AMlistItems list_items = AMpush( + &stack, + AMlistRange(doc, AMmapItemObjId(doc_item), 0, SIZE_MAX, &heads), + AM_VALUE_LIST_ITEMS, + cmocka_cb).list_items; + assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, + "b"); + assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, + "a"); + assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, + "c"); + assert_null(AMlistItemsNext(&list_items, 1)); + } +} + +/** + * \brief should be able to delete non-existent props + */ +static void test_should_be_able_to_delete_non_existent_props(void** state) { + AMresultStack* stack = *state; + /* const doc = create() */ + AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + /* */ + /* doc.put("_root", "foo", "bar") */ + AMfree(AMmapPutStr(doc, AM_ROOT, "foo", "bar")); + /* doc.put("_root", "bip", "bap") */ + AMfree(AMmapPutStr(doc, AM_ROOT, "bip", "bap")); + /* const hash1 = doc.commit() */ + AMchangeHashes const hash1 = AMpush(&stack, + AMcommit(doc, NULL, NULL), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + /* */ + /* assert.deepEqual(doc.keys("_root"), ["bip", "foo"]) */ + AMstrs keys = AMpush(&stack, + AMkeys(doc, AM_ROOT, NULL), + AM_VALUE_STRS, + cmocka_cb).strs; + assert_string_equal(AMstrsNext(&keys, 1), "bip"); + assert_string_equal(AMstrsNext(&keys, 1), "foo"); + /* */ + /* doc.delete("_root", "foo") */ + AMfree(AMmapDelete(doc, AM_ROOT, "foo")); + /* doc.delete("_root", "baz") */ + AMfree(AMmapDelete(doc, AM_ROOT, "baz")); + /* const hash2 = doc.commit() */ + AMchangeHashes const hash2 = AMpush(&stack, + AMcommit(doc, NULL, NULL), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + /* */ + /* assert.deepEqual(doc.keys("_root"), ["bip"]) */ + keys = AMpush(&stack, + AMkeys(doc, AM_ROOT, NULL), + AM_VALUE_STRS, + cmocka_cb).strs; + assert_string_equal(AMstrsNext(&keys, 1), "bip"); + /* assert.deepEqual(doc.keys("_root", [hash1]), ["bip", "foo"]) */ + keys = AMpush(&stack, + AMkeys(doc, AM_ROOT, &hash1), + AM_VALUE_STRS, + cmocka_cb).strs; + assert_string_equal(AMstrsNext(&keys, 1), "bip"); + assert_string_equal(AMstrsNext(&keys, 1), "foo"); + /* assert.deepEqual(doc.keys("_root", [hash2]), ["bip"]) */ + keys = AMpush(&stack, + AMkeys(doc, AM_ROOT, &hash2), + AM_VALUE_STRS, + cmocka_cb).strs; + assert_string_equal(AMstrsNext(&keys, 1), "bip"); +} + +/** + * \brief should be able to del + */ +static void test_should_be_able_to_del(void **state) { + AMresultStack* stack = *state; + /* const doc = create() */ + AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + /* const root = "_root" */ + /* */ + /* doc.put(root, "xxx", "xxx"); */ + AMfree(AMmapPutStr(doc, AM_ROOT, "xxx", "xxx")); + /* assert.deepEqual(doc.getWithType(root, "xxx"), ["str", "xxx"]) */ + assert_string_equal(AMpush(&stack, + AMmapGet(doc, AM_ROOT, "xxx", NULL), + AM_VALUE_STR, + cmocka_cb).str, "xxx"); + /* doc.delete(root, "xxx"); */ + AMfree(AMmapDelete(doc, AM_ROOT, "xxx")); + /* assert.deepEqual(doc.getWithType(root, "xxx"), undefined) */ + AMpush(&stack, + AMmapGet(doc, AM_ROOT, "xxx", NULL), + AM_VALUE_VOID, + cmocka_cb); +} + +/** + * \brief should be able to use counters + */ +static void test_should_be_able_to_use_counters(void** state) { + AMresultStack* stack = *state; + /* const doc = create() */ + AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + /* const root = "_root" */ + /* */ + /* doc.put(root, "counter", 10, "counter"); */ + AMfree(AMmapPutCounter(doc, AM_ROOT, "counter", 10)); + /* assert.deepEqual(doc.getWithType(root, "counter"), ["counter", 10]) */ + assert_int_equal(AMpush(&stack, + AMmapGet(doc, AM_ROOT, "counter", NULL), + AM_VALUE_COUNTER, + cmocka_cb).counter, 10); + /* doc.increment(root, "counter", 10); */ + AMfree(AMmapIncrement(doc, AM_ROOT, "counter", 10)); + /* assert.deepEqual(doc.getWithType(root, "counter"), ["counter", 20]) */ + assert_int_equal(AMpush(&stack, + AMmapGet(doc, AM_ROOT, "counter", NULL), + AM_VALUE_COUNTER, + cmocka_cb).counter, 20); + /* doc.increment(root, "counter", -5); */ + AMfree(AMmapIncrement(doc, AM_ROOT, "counter", -5)); + /* assert.deepEqual(doc.getWithType(root, "counter"), ["counter", 15]) */ + assert_int_equal(AMpush(&stack, + AMmapGet(doc, AM_ROOT, "counter", NULL), + AM_VALUE_COUNTER, + cmocka_cb).counter, 15); +} + +/** + * \brief should be able to splice text + */ +static void test_should_be_able_to_splice_text(void** state) { + AMresultStack* stack = *state; + /* const doc = create() */ + AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + /* const root = "_root"; */ + /* */ + /* const text = doc.putObject(root, "text", ""); */ + AMobjId const* const text = AMpush( + &stack, + AMmapPutObject(doc, AM_ROOT, "text", AM_OBJ_TYPE_TEXT), + AM_VALUE_OBJ_ID, + cmocka_cb).obj_id; + /* doc.splice(text, 0, 0, "hello ") */ + AMfree(AMspliceText(doc, text, 0, 0, "hello ")); + /* doc.splice(text, 6, 0, ["w", "o", "r", "l", "d"]) */ + static AMvalue const WORLD[] = {{.str_tag = AM_VALUE_STR, .str = "w"}, + {.str_tag = AM_VALUE_STR, .str = "o"}, + {.str_tag = AM_VALUE_STR, .str = "r"}, + {.str_tag = AM_VALUE_STR, .str = "l"}, + {.str_tag = AM_VALUE_STR, .str = "d"}}; + AMfree(AMsplice(doc, text, 6, 0, WORLD, sizeof(WORLD)/sizeof(AMvalue))); + /* doc.splice(text, 11, 0, ["!", "?"]) */ + static AMvalue const INTERROBANG[] = {{.str_tag = AM_VALUE_STR, .str = "!"}, + {.str_tag = AM_VALUE_STR, .str = "?"}}; + AMfree(AMsplice(doc, text, 11, 0, INTERROBANG, sizeof(INTERROBANG)/sizeof(AMvalue))); + /* assert.deepEqual(doc.getWithType(text, 0), ["str", "h"]) */ + assert_string_equal(AMpush(&stack, + AMlistGet(doc, text, 0, NULL), + AM_VALUE_STR, + cmocka_cb).str, "h"); + /* assert.deepEqual(doc.getWithType(text, 1), ["str", "e"]) */ + assert_string_equal(AMpush(&stack, + AMlistGet(doc, text, 1, NULL), + AM_VALUE_STR, + cmocka_cb).str, "e"); + /* assert.deepEqual(doc.getWithType(text, 9), ["str", "l"]) */ + assert_string_equal(AMpush(&stack, + AMlistGet(doc, text, 9, NULL), + AM_VALUE_STR, + cmocka_cb).str, "l"); + /* assert.deepEqual(doc.getWithType(text, 10), ["str", "d"]) */ + assert_string_equal(AMpush(&stack, + AMlistGet(doc, text, 10, NULL), + AM_VALUE_STR, + cmocka_cb).str, "d"); + /* assert.deepEqual(doc.getWithType(text, 11), ["str", "!"]) */ + assert_string_equal(AMpush(&stack, + AMlistGet(doc, text, 11, NULL), + AM_VALUE_STR, + cmocka_cb).str, "!"); + /* assert.deepEqual(doc.getWithType(text, 12), ["str", "?"]) */ + assert_string_equal(AMpush(&stack, + AMlistGet(doc, text, 12, NULL), + AM_VALUE_STR, + cmocka_cb).str, "?"); +} + +/** + * \brief should be able to insert objects into text + */ +static void test_should_be_able_to_insert_objects_into_text(void** state) { + AMresultStack* stack = *state; + /* const doc = create() */ + AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + /* const text = doc.putObject("/", "text", "Hello world"); */ + AMobjId const* const text = AMpush( + &stack, + AMmapPutObject(doc, AM_ROOT, "text", AM_OBJ_TYPE_TEXT), + AM_VALUE_OBJ_ID, + cmocka_cb).obj_id; + AMfree(AMspliceText(doc, text, 0, 0, "Hello world")); + /* const obj = doc.insertObject(text, 6, { hello: "world" }); */ + AMobjId const* const obj = AMpush( + &stack, + AMlistPutObject(doc, text, 6, true, AM_OBJ_TYPE_MAP), + AM_VALUE_OBJ_ID, + cmocka_cb).obj_id; + AMfree(AMmapPutStr(doc, obj, "hello", "world")); + /* assert.deepEqual(doc.text(text), "Hello \ufffcworld"); */ + assert_string_equal(AMpush(&stack, + AMtext(doc, text, NULL), + AM_VALUE_STR, + cmocka_cb).str, "Hello \ufffcworld"); + /* assert.deepEqual(doc.getWithType(text, 6), ["map", obj]); */ + assert_true(AMobjIdEqual(AMpush(&stack, + AMlistGet(doc, text, 6, NULL), + AM_VALUE_OBJ_ID, + cmocka_cb).obj_id, obj)); + /* assert.deepEqual(doc.getWithType(obj, "hello"), ["str", "world"]); */ + assert_string_equal(AMpush(&stack, + AMmapGet(doc, obj, "hello", NULL), + AM_VALUE_STR, + cmocka_cb).str, "world"); +} + +/** + * \brief should be able save all or incrementally + */ +static void test_should_be_able_to_save_all_or_incrementally(void** state) { + AMresultStack* stack = *state; + /* const doc = create() */ + AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + /* */ + /* doc.put("_root", "foo", 1) */ + AMfree(AMmapPutInt(doc, AM_ROOT, "foo", 1)); + /* */ + /* const save1 = doc.save() */ + AMbyteSpan const save1 = AMpush(&stack, + AMsave(doc), + AM_VALUE_BYTES, + cmocka_cb).bytes; + /* */ + /* doc.put("_root", "bar", 2) */ + AMfree(AMmapPutInt(doc, AM_ROOT, "bar", 2)); + /* */ + /* const saveMidway = doc.clone().save(); */ + AMbyteSpan const saveMidway = AMpush(&stack, + AMsave( + AMpush(&stack, + AMclone(doc), + AM_VALUE_DOC, + cmocka_cb).doc), + AM_VALUE_BYTES, + cmocka_cb).bytes; + /* */ + /* const save2 = doc.saveIncremental(); */ + AMbyteSpan const save2 = AMpush(&stack, + AMsaveIncremental(doc), + AM_VALUE_BYTES, + cmocka_cb).bytes; + /* */ + /* doc.put("_root", "baz", 3); */ + AMfree(AMmapPutInt(doc, AM_ROOT, "baz", 3)); + /* */ + /* const save3 = doc.saveIncremental(); */ + AMbyteSpan const save3 = AMpush(&stack, + AMsaveIncremental(doc), + AM_VALUE_BYTES, + cmocka_cb).bytes; + /* */ + /* const saveA = doc.save(); */ + AMbyteSpan const saveA = AMpush(&stack, + AMsave(doc), + AM_VALUE_BYTES, + cmocka_cb).bytes; + /* const saveB = new Uint8Array([...save1, ...save2, ...save3]); */ + size_t const saveB_count = save1.count + save2.count + save3.count; + uint8_t* const saveB_src = test_malloc(saveB_count); + memcpy(saveB_src, save1.src, save1.count); + memcpy(saveB_src + save1.count, save2.src, save2.count); + memcpy(saveB_src + save1.count + save2.count, save3.src, save3.count); + /* */ + /* assert.notDeepEqual(saveA, saveB); */ + assert_memory_not_equal(saveA.src, saveB_src, saveA.count); + /* */ + /* const docA = load(saveA); */ + AMdoc* const docA = AMpush(&stack, + AMload(saveA.src, saveA.count), + AM_VALUE_DOC, + cmocka_cb).doc; + /* const docB = load(saveB); */ + AMdoc* const docB = AMpush(&stack, + AMload(saveB_src, saveB_count), + AM_VALUE_DOC, + cmocka_cb).doc; + test_free(saveB_src); + /* const docC = load(saveMidway) */ + AMdoc* const docC = AMpush(&stack, + AMload(saveMidway.src, saveMidway.count), + AM_VALUE_DOC, + cmocka_cb).doc; + /* docC.loadIncremental(save3) */ + AMfree(AMloadIncremental(docC, save3.src, save3.count)); + /* */ + /* assert.deepEqual(docA.keys("_root"), docB.keys("_root")); */ + AMstrs const keysA = AMpush(&stack, + AMkeys(docA, AM_ROOT, NULL), + AM_VALUE_STRS, + cmocka_cb).strs; + AMstrs const keysB = AMpush(&stack, + AMkeys(docB, AM_ROOT, NULL), + AM_VALUE_STRS, + cmocka_cb).strs; + assert_int_equal(AMstrsCmp(&keysA, &keysB), 0); + /* assert.deepEqual(docA.save(), docB.save()); */ + AMbyteSpan const save = AMpush(&stack, + AMsave(docA), + AM_VALUE_BYTES, + cmocka_cb).bytes; + assert_memory_equal(save.src, + AMpush(&stack, + AMsave(docB), + AM_VALUE_BYTES, + cmocka_cb).bytes.src, + save.count); + /* assert.deepEqual(docA.save(), docC.save()); */ + assert_memory_equal(save.src, + AMpush(&stack, + AMsave(docC), + AM_VALUE_BYTES, + cmocka_cb).bytes.src, + save.count); +} + +/** + * \brief should be able to splice text #2 + */ +static void test_should_be_able_to_splice_text_2(void** state) { + AMresultStack* stack = *state; + /* const doc = create() */ + AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + /* const text = doc.putObject("_root", "text", ""); */ + AMobjId const* const text = AMpush( + &stack, + AMmapPutObject(doc, AM_ROOT, "text", AM_OBJ_TYPE_TEXT), + AM_VALUE_OBJ_ID, + cmocka_cb).obj_id; + /* doc.splice(text, 0, 0, "hello world"); */ + AMfree(AMspliceText(doc, text, 0, 0, "hello world")); + /* const hash1 = doc.commit(); */ + AMchangeHashes const hash1 = AMpush(&stack, + AMcommit(doc, NULL, NULL), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + /* doc.splice(text, 6, 0, "big bad "); */ + AMfree(AMspliceText(doc, text, 6, 0, "big bad ")); + /* const hash2 = doc.commit(); */ + AMchangeHashes const hash2 = AMpush(&stack, + AMcommit(doc, NULL, NULL), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + /* assert.strictEqual(doc.text(text), "hello big bad world") */ + assert_string_equal(AMpush(&stack, + AMtext(doc, text, NULL), + AM_VALUE_STR, + cmocka_cb).str, "hello big bad world"); + /* assert.strictEqual(doc.length(text), 19) */ + assert_int_equal(AMobjSize(doc, text, NULL), 19); + /* assert.strictEqual(doc.text(text, [hash1]), "hello world") */ + assert_string_equal(AMpush(&stack, + AMtext(doc, text, &hash1), + AM_VALUE_STR, + cmocka_cb).str, "hello world"); + /* assert.strictEqual(doc.length(text, [hash1]), 11) */ + assert_int_equal(AMobjSize(doc, text, &hash1), 11); + /* assert.strictEqual(doc.text(text, [hash2]), "hello big bad world") */ + assert_string_equal(AMpush(&stack, + AMtext(doc, text, &hash2), + AM_VALUE_STR, + cmocka_cb).str, "hello big bad world"); + /* assert.strictEqual(doc.length(text, [hash2]), 19) */ + assert_int_equal(AMobjSize(doc, text, &hash2), 19); +} + +/** + * \brief local inc increments all visible counters in a map + */ +static void test_local_inc_increments_all_visible_counters_in_a_map(void** state) { + AMresultStack* stack = *state; + /* const doc1 = create("aaaa") */ + AMdoc* const doc1 = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMfree(AMsetActorId(doc1, AMpush(&stack, + AMactorIdInitStr("aaaa"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + /* doc1.put("_root", "hello", "world") */ + AMfree(AMmapPutStr(doc1, AM_ROOT, "hello", "world")); + /* const doc2 = load(doc1.save(), "bbbb"); */ + AMbyteSpan const save = AMpush(&stack, + AMsave(doc1), + AM_VALUE_BYTES, + cmocka_cb).bytes; + AMdoc* const doc2 = AMpush(&stack, + AMload(save.src, save.count), + AM_VALUE_DOC, + cmocka_cb).doc; + AMfree(AMsetActorId(doc2, AMpush(&stack, + AMactorIdInitStr("bbbb"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + /* const doc3 = load(doc1.save(), "cccc"); */ + AMdoc* const doc3 = AMpush(&stack, + AMload(save.src, save.count), + AM_VALUE_DOC, + cmocka_cb).doc; + AMfree(AMsetActorId(doc3, AMpush(&stack, + AMactorIdInitStr("cccc"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + /* let heads = doc1.getHeads() */ + AMchangeHashes const heads1 = AMpush(&stack, + AMgetHeads(doc1), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + /* doc1.put("_root", "cnt", 20) */ + AMfree(AMmapPutInt(doc1, AM_ROOT, "cnt", 20)); + /* doc2.put("_root", "cnt", 0, "counter") */ + AMfree(AMmapPutCounter(doc2, AM_ROOT, "cnt", 0)); + /* doc3.put("_root", "cnt", 10, "counter") */ + AMfree(AMmapPutCounter(doc3, AM_ROOT, "cnt", 10)); + /* doc1.applyChanges(doc2.getChanges(heads)) */ + AMchanges const changes2 = AMpush(&stack, + AMgetChanges(doc2, &heads1), + AM_VALUE_CHANGES, + cmocka_cb).changes; + AMfree(AMapplyChanges(doc1, &changes2)); + /* doc1.applyChanges(doc3.getChanges(heads)) */ + AMchanges const changes3 = AMpush(&stack, + AMgetChanges(doc3, &heads1), + AM_VALUE_CHANGES, + cmocka_cb).changes; + AMfree(AMapplyChanges(doc1, &changes3)); + /* let result = doc1.getAll("_root", "cnt") */ + AMobjItems result = AMpush(&stack, + AMmapGetAll(doc1, AM_ROOT, "cnt", NULL), + AM_VALUE_OBJ_ITEMS, + cmocka_cb).obj_items; + /* assert.deepEqual(result, [ + ['int', 20, '2@aaaa'], + ['counter', 0, '2@bbbb'], + ['counter', 10, '2@cccc'], + ]) */ + AMobjItem const* result_item = AMobjItemsNext(&result, 1); + assert_int_equal(AMobjItemValue(result_item).int_, 20); + assert_int_equal(AMobjIdCounter(AMobjItemObjId(result_item)), 2); + assert_string_equal(AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))), + "aaaa"); + result_item = AMobjItemsNext(&result, 1); + assert_int_equal(AMobjItemValue(result_item).counter, 0); + assert_int_equal(AMobjIdCounter(AMobjItemObjId(result_item)), 2); + assert_string_equal(AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))), + "bbbb"); + result_item = AMobjItemsNext(&result, 1); + assert_int_equal(AMobjItemValue(result_item).counter, 10); + assert_int_equal(AMobjIdCounter(AMobjItemObjId(result_item)), 2); + assert_string_equal(AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))), + "cccc"); + /* doc1.increment("_root", "cnt", 5) */ + AMfree(AMmapIncrement(doc1, AM_ROOT, "cnt", 5)); + /* result = doc1.getAll("_root", "cnt") */ + result = AMpush(&stack, + AMmapGetAll(doc1, AM_ROOT, "cnt", NULL), + AM_VALUE_OBJ_ITEMS, + cmocka_cb).obj_items; + /* assert.deepEqual(result, [ + ['counter', 5, '2@bbbb'], + ['counter', 15, '2@cccc'], + ]) */ + result_item = AMobjItemsNext(&result, 1); + assert_int_equal(AMobjItemValue(result_item).counter, 5); + assert_int_equal(AMobjIdCounter(AMobjItemObjId(result_item)), 2); + assert_string_equal(AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))), + "bbbb"); + result_item = AMobjItemsNext(&result, 1); + assert_int_equal(AMobjItemValue(result_item).counter, 15); + assert_int_equal(AMobjIdCounter(AMobjItemObjId(result_item)), 2); + assert_string_equal(AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))), + "cccc"); + /* */ + /* const save1 = doc1.save() */ + AMbyteSpan const save1 = AMpush(&stack, + AMsave(doc1), + AM_VALUE_BYTES, + cmocka_cb).bytes; + /* const doc4 = load(save1) */ + AMdoc* const doc4 = AMpush(&stack, + AMload(save1.src, save1.count), + AM_VALUE_DOC, + cmocka_cb).doc; + /* assert.deepEqual(doc4.save(), save1); */ + assert_memory_equal(AMpush(&stack, + AMsave(doc4), + AM_VALUE_BYTES, + cmocka_cb).bytes.src, + save1.src, + save1.count); +} + +/** + * \brief local inc increments all visible counters in a sequence + */ +static void test_local_inc_increments_all_visible_counters_in_a_sequence(void** state) { + AMresultStack* stack = *state; + /* const doc1 = create("aaaa") */ + AMdoc* const doc1 = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMfree(AMsetActorId(doc1, AMpush(&stack, + AMactorIdInitStr("aaaa"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + /* const seq = doc1.putObject("_root", "seq", []) */ + AMobjId const* const seq = AMpush( + &stack, + AMmapPutObject(doc1, AM_ROOT, "seq", AM_OBJ_TYPE_LIST), + AM_VALUE_OBJ_ID, + cmocka_cb).obj_id; + /* doc1.insert(seq, 0, "hello") */ + AMfree(AMlistPutStr(doc1, seq, 0, true, "hello")); + /* const doc2 = load(doc1.save(), "bbbb"); */ + AMbyteSpan const save1 = AMpush(&stack, + AMsave(doc1), + AM_VALUE_BYTES, + cmocka_cb).bytes; + AMdoc* const doc2 = AMpush(&stack, + AMload(save1.src, save1.count), + AM_VALUE_DOC, + cmocka_cb).doc; + AMfree(AMsetActorId(doc2, AMpush(&stack, + AMactorIdInitStr("bbbb"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + /* const doc3 = load(doc1.save(), "cccc"); */ + AMdoc* const doc3 = AMpush(&stack, + AMload(save1.src, save1.count), + AM_VALUE_DOC, + cmocka_cb).doc; + AMfree(AMsetActorId(doc3, AMpush(&stack, + AMactorIdInitStr("cccc"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + /* let heads = doc1.getHeads() */ + AMchangeHashes const heads1 = AMpush(&stack, + AMgetHeads(doc1), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + /* doc1.put(seq, 0, 20) */ + AMfree(AMlistPutInt(doc1, seq, 0, false, 20)); + /* doc2.put(seq, 0, 0, "counter") */ + AMfree(AMlistPutCounter(doc2, seq, 0, false, 0)); + /* doc3.put(seq, 0, 10, "counter") */ + AMfree(AMlistPutCounter(doc3, seq, 0, false, 10)); + /* doc1.applyChanges(doc2.getChanges(heads)) */ + AMchanges const changes2 = AMpush(&stack, + AMgetChanges(doc2, &heads1), + AM_VALUE_CHANGES, + cmocka_cb).changes; + AMfree(AMapplyChanges(doc1, &changes2)); + /* doc1.applyChanges(doc3.getChanges(heads)) */ + AMchanges const changes3 = AMpush(&stack, + AMgetChanges(doc3, &heads1), + AM_VALUE_CHANGES, + cmocka_cb).changes; + AMfree(AMapplyChanges(doc1, &changes3)); + /* let result = doc1.getAll(seq, 0) */ + AMobjItems result = AMpush(&stack, + AMlistGetAll(doc1, seq, 0, NULL), + AM_VALUE_OBJ_ITEMS, + cmocka_cb).obj_items; + /* assert.deepEqual(result, [ + ['int', 20, '3@aaaa'], + ['counter', 0, '3@bbbb'], + ['counter', 10, '3@cccc'], + ]) */ + AMobjItem const* result_item = AMobjItemsNext(&result, 1); + assert_int_equal(AMobjItemValue(result_item).int_, 20); + assert_int_equal(AMobjIdCounter(AMobjItemObjId(result_item)), 3); + assert_string_equal(AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))), + "aaaa"); + result_item = AMobjItemsNext(&result, 1); + assert_int_equal(AMobjItemValue(result_item).counter, 0); + assert_int_equal(AMobjIdCounter(AMobjItemObjId(result_item)), 3); + assert_string_equal(AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))), + "bbbb"); + result_item = AMobjItemsNext(&result, 1); + assert_int_equal(AMobjItemValue(result_item).counter, 10); + assert_int_equal(AMobjIdCounter(AMobjItemObjId(result_item)), 3); + assert_string_equal(AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))), + "cccc"); + /* doc1.increment(seq, 0, 5) */ + AMfree(AMlistIncrement(doc1, seq, 0, 5)); + /* result = doc1.getAll(seq, 0) */ + result = AMpush(&stack, + AMlistGetAll(doc1, seq, 0, NULL), + AM_VALUE_OBJ_ITEMS, + cmocka_cb).obj_items; + /* assert.deepEqual(result, [ + ['counter', 5, '3@bbbb'], + ['counter', 15, '3@cccc'], + ]) */ + result_item = AMobjItemsNext(&result, 1); + assert_int_equal(AMobjItemValue(result_item).counter, 5); + assert_int_equal(AMobjIdCounter(AMobjItemObjId(result_item)), 3); + assert_string_equal(AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))), + "bbbb"); + result_item = AMobjItemsNext(&result, 1); + assert_int_equal(AMobjItemValue(result_item).counter, 15); + assert_int_equal(AMobjIdCounter(AMobjItemObjId(result_item)), 3); + assert_string_equal(AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))), + "cccc"); + /* */ + /* const save = doc1.save() */ + AMbyteSpan const save = AMpush(&stack, + AMsave(doc1), + AM_VALUE_BYTES, + cmocka_cb).bytes; + /* const doc4 = load(save) */ + AMdoc* const doc4 = AMpush(&stack, + AMload(save.src, save.count), + AM_VALUE_DOC, + cmocka_cb).doc; + /* assert.deepEqual(doc4.save(), save); */ + assert_memory_equal(AMpush(&stack, + AMsave(doc4), + AM_VALUE_BYTES, + cmocka_cb).bytes.src, + save.src, + save.count); +} + +/** + * \brief paths can be used instead of objids + */ +static void test_paths_can_be_used_instead_of_objids(void** state); + +/** + * \brief should be able to fetch changes by hash + */ +static void test_should_be_able_to_fetch_changes_by_hash(void** state) { + AMresultStack* stack = *state; + /* const doc1 = create("aaaa") */ + AMdoc* const doc1 = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMfree(AMsetActorId(doc1, AMpush(&stack, + AMactorIdInitStr("aaaa"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + /* const doc2 = create("bbbb") */ + AMdoc* const doc2 = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMfree(AMsetActorId(doc2, AMpush(&stack, + AMactorIdInitStr("bbbb"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + /* doc1.put("/", "a", "b") */ + AMfree(AMmapPutStr(doc1, AM_ROOT, "a", "b")); + /* doc2.put("/", "b", "c") */ + AMfree(AMmapPutStr(doc2, AM_ROOT, "b", "c")); + /* const head1 = doc1.getHeads() */ + AMchangeHashes head1 = AMpush(&stack, + AMgetHeads(doc1), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + /* const head2 = doc2.getHeads() */ + AMchangeHashes head2 = AMpush(&stack, + AMgetHeads(doc2), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + /* const change1 = doc1.getChangeByHash(head1[0]) + if (change1 === null) { throw new RangeError("change1 should not be null") }*/ + AMbyteSpan const change_hash1 = AMchangeHashesNext(&head1, 1); + AMchanges change1 = AMpush( + &stack, + AMgetChangeByHash(doc1, change_hash1.src, change_hash1.count), + AM_VALUE_CHANGES, + cmocka_cb).changes; + /* const change2 = doc1.getChangeByHash(head2[0]) + assert.deepEqual(change2, null) */ + AMbyteSpan const change_hash2 = AMchangeHashesNext(&head2, 1); + AMpush(&stack, + AMgetChangeByHash(doc1, change_hash2.src, change_hash2.count), + AM_VALUE_VOID, + cmocka_cb); + /* assert.deepEqual(decodeChange(change1).hash, head1[0]) */ + assert_memory_equal(AMchangeHash(AMchangesNext(&change1, 1)).src, + change_hash1.src, + change_hash1.count); +} + +/** + * \brief recursive sets are possible + */ +static void test_recursive_sets_are_possible(void** state) { + AMresultStack* stack = *state; + /* const doc = create("aaaa") */ + AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMfree(AMsetActorId(doc, AMpush(&stack, + AMactorIdInitStr("aaaa"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + /* const l1 = doc.putObject("_root", "list", [{ foo: "bar" }, [1, 2, 3]])*/ + AMobjId const* const l1 = AMpush( + &stack, + AMmapPutObject(doc, AM_ROOT, "list", AM_OBJ_TYPE_LIST), + AM_VALUE_OBJ_ID, + cmocka_cb).obj_id; + { + AMobjId const* const map = AMpush( + &stack, + AMlistPutObject(doc, l1, 0, true, AM_OBJ_TYPE_MAP), + AM_VALUE_OBJ_ID, + cmocka_cb).obj_id; + AMfree(AMmapPutStr(doc, map, "foo", "bar")); + AMobjId const* const list = AMpush( + &stack, + AMlistPutObject(doc, l1, SIZE_MAX, true, AM_OBJ_TYPE_LIST), + AM_VALUE_OBJ_ID, + cmocka_cb).obj_id; + for (int value = 1; value != 4; ++value) { + AMfree(AMlistPutInt(doc, list, SIZE_MAX, true, value)); + } + } + /* const l2 = doc.insertObject(l1, 0, { zip: ["a", "b"] }) */ + AMobjId const* const l2 = AMpush( + &stack, + AMlistPutObject(doc, l1, 0, true, AM_OBJ_TYPE_MAP), + AM_VALUE_OBJ_ID, + cmocka_cb).obj_id; + { + AMobjId const* const list = AMpush( + &stack, + AMmapPutObject(doc, l2, "zip", AM_OBJ_TYPE_LIST), + AM_VALUE_OBJ_ID, + cmocka_cb).obj_id; + AMfree(AMlistPutStr(doc, list, SIZE_MAX, true, "a")); + AMfree(AMlistPutStr(doc, list, SIZE_MAX, true, "b")); + } + /* const l3 = doc.putObject("_root", "info1", "hello world") // 'text' object*/ + AMobjId const* const l3 = AMpush( + &stack, + AMmapPutObject(doc, AM_ROOT, "info1", AM_OBJ_TYPE_TEXT), + AM_VALUE_OBJ_ID, + cmocka_cb).obj_id; + AMfree(AMspliceText(doc, l3, 0, 0, "hello world")); + /* doc.put("_root", "info2", "hello world") // 'str' */ + AMfree(AMmapPutStr(doc, AM_ROOT, "info2", "hello world")); + /* const l4 = doc.putObject("_root", "info3", "hello world") */ + AMobjId const* const l4 = AMpush( + &stack, + AMmapPutObject(doc, AM_ROOT, "info3", AM_OBJ_TYPE_TEXT), + AM_VALUE_OBJ_ID, + cmocka_cb).obj_id; + AMfree(AMspliceText(doc, l4, 0, 0, "hello world")); + /* assert.deepEqual(doc.materialize(), { + "list": [{ zip: ["a", "b"] }, { foo: "bar" }, [1, 2, 3]], + "info1": "hello world", + "info2": "hello world", + "info3": "hello world", + }) */ + AMmapItems doc_items = AMpush(&stack, + AMmapRange(doc, AM_ROOT, NULL, NULL, NULL), + AM_VALUE_MAP_ITEMS, + cmocka_cb).map_items; + AMmapItem const* doc_item = AMmapItemsNext(&doc_items, 1); + assert_string_equal(AMmapItemKey(doc_item), "info1"); + assert_string_equal(AMpush(&stack, + AMtext(doc, AMmapItemObjId(doc_item), NULL), + AM_VALUE_STR, + cmocka_cb).str, "hello world"); + doc_item = AMmapItemsNext(&doc_items, 1); + assert_string_equal(AMmapItemKey(doc_item), "info2"); + assert_string_equal(AMmapItemValue(doc_item).str, "hello world"); + doc_item = AMmapItemsNext(&doc_items, 1); + assert_string_equal(AMmapItemKey(doc_item), "info3"); + assert_string_equal(AMpush(&stack, + AMtext(doc, AMmapItemObjId(doc_item), NULL), + AM_VALUE_STR, + cmocka_cb).str, "hello world"); + doc_item = AMmapItemsNext(&doc_items, 1); + assert_string_equal(AMmapItemKey(doc_item), "list"); + { + AMlistItems list_items = AMpush( + &stack, + AMlistRange(doc, AMmapItemObjId(doc_item), 0, SIZE_MAX, NULL), + AM_VALUE_LIST_ITEMS, + cmocka_cb).list_items; + AMlistItem const* list_item = AMlistItemsNext(&list_items, 1); + { + AMmapItems map_items = AMpush( + &stack, + AMmapRange(doc, AMlistItemObjId(list_item), NULL, NULL, NULL), + AM_VALUE_MAP_ITEMS, + cmocka_cb).map_items; + AMmapItem const* map_item = AMmapItemsNext(&map_items, 1); + assert_string_equal(AMmapItemKey(map_item), "zip"); + { + AMlistItems list_items = AMpush( + &stack, + AMlistRange(doc, AMmapItemObjId(map_item), 0, SIZE_MAX, NULL), + AM_VALUE_LIST_ITEMS, + cmocka_cb).list_items; + assert_string_equal(AMlistItemValue( + AMlistItemsNext(&list_items, 1)).str, + "a"); + assert_string_equal(AMlistItemValue( + AMlistItemsNext(&list_items, 1)).str, + "b"); + } + } + list_item = AMlistItemsNext(&list_items, 1); + { + AMmapItems map_items = AMpush( + &stack, + AMmapRange(doc, AMlistItemObjId(list_item), NULL, NULL, NULL), + AM_VALUE_MAP_ITEMS, + cmocka_cb).map_items; + AMmapItem const* map_item = AMmapItemsNext(&map_items, 1); + assert_string_equal(AMmapItemKey(map_item), "foo"); + assert_string_equal(AMmapItemValue(map_item).str, "bar"); + } + list_item = AMlistItemsNext(&list_items, 1); + { + AMlistItems list_items = AMpush( + &stack, + AMlistRange(doc, AMlistItemObjId(list_item), 0, SIZE_MAX, NULL), + AM_VALUE_LIST_ITEMS, + cmocka_cb).list_items; + assert_int_equal(AMlistItemValue( + AMlistItemsNext(&list_items, 1)).int_, + 1); + assert_int_equal(AMlistItemValue( + AMlistItemsNext(&list_items, 1)).int_, + 2); + assert_int_equal(AMlistItemValue( + AMlistItemsNext(&list_items, 1)).int_, + 3); + } + } + /* assert.deepEqual(doc.materialize(l2), { zip: ["a", "b"] }) */ + AMmapItems map_items = AMpush( + &stack, + AMmapRange(doc, l2, NULL, NULL, NULL), + AM_VALUE_MAP_ITEMS, + cmocka_cb).map_items; + AMmapItem const* map_item = AMmapItemsNext(&map_items, 1); + assert_string_equal(AMmapItemKey(map_item), "zip"); + { + AMlistItems list_items = AMpush( + &stack, + AMlistRange(doc, AMmapItemObjId(map_item), 0, SIZE_MAX, NULL), + AM_VALUE_LIST_ITEMS, + cmocka_cb).list_items; + assert_string_equal(AMlistItemValue( + AMlistItemsNext(&list_items, 1)).str, + "a"); + assert_string_equal(AMlistItemValue( + AMlistItemsNext(&list_items, 1)).str, + "b"); + } + /* assert.deepEqual(doc.materialize(l1), [{ zip: ["a", "b"] }, { foo: "bar" }, [1, 2, 3]])*/ + AMlistItems list_items = AMpush( + &stack, + AMlistRange(doc, l1, 0, SIZE_MAX, NULL), + AM_VALUE_LIST_ITEMS, + cmocka_cb).list_items; + AMlistItem const* list_item = AMlistItemsNext(&list_items, 1); + { + AMmapItems map_items = AMpush( + &stack, + AMmapRange(doc, AMlistItemObjId(list_item), NULL, NULL, NULL), + AM_VALUE_MAP_ITEMS, + cmocka_cb).map_items; + AMmapItem const* map_item = AMmapItemsNext(&map_items, 1); + assert_string_equal(AMmapItemKey(map_item), "zip"); + { + AMlistItems list_items = AMpush( + &stack, + AMlistRange(doc, AMmapItemObjId(map_item), 0, SIZE_MAX, NULL), + AM_VALUE_LIST_ITEMS, + cmocka_cb).list_items; + assert_string_equal( + AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, "a"); + assert_string_equal(AMlistItemValue( + AMlistItemsNext(&list_items, 1)).str, "b"); + } + } + list_item = AMlistItemsNext(&list_items, 1); + { + AMmapItems map_items = AMpush( + &stack, + AMmapRange(doc, AMlistItemObjId(list_item), NULL, NULL, NULL), + AM_VALUE_MAP_ITEMS, + cmocka_cb).map_items; + AMmapItem const* map_item = AMmapItemsNext(&map_items, 1); + assert_string_equal(AMmapItemKey(map_item), "foo"); + assert_string_equal(AMmapItemValue(map_item).str, "bar"); + } + list_item = AMlistItemsNext(&list_items, 1); + { + AMlistItems list_items = AMpush( + &stack, + AMlistRange(doc, AMlistItemObjId(list_item), 0, SIZE_MAX, NULL), + AM_VALUE_LIST_ITEMS, + cmocka_cb).list_items; + assert_int_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).int_, + 1); + assert_int_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).int_, + 2); + assert_int_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).int_, + 3); + } + /* assert.deepEqual(doc.materialize(l4), "hello world") */ + assert_string_equal(AMpush(&stack, + AMtext(doc, l4, NULL), + AM_VALUE_STR, + cmocka_cb).str, "hello world"); +} + +/** + * \brief only returns an object id when objects are created + */ +static void test_only_returns_an_object_id_when_objects_are_created(void** state) { + AMresultStack* stack = *state; + /* const doc = create("aaaa") */ + AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMfree(AMsetActorId(doc, AMpush(&stack, + AMactorIdInitStr("aaaa"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + /* const r1 = doc.put("_root", "foo", "bar") + assert.deepEqual(r1, null); */ + AMpush(&stack, + AMmapPutStr(doc, AM_ROOT, "foo", "bar"), + AM_VALUE_VOID, + cmocka_cb); + /* const r2 = doc.putObject("_root", "list", []) */ + AMobjId const* const r2 = AMpush( + &stack, + AMmapPutObject(doc, AM_ROOT, "list", AM_OBJ_TYPE_LIST), + AM_VALUE_OBJ_ID, + cmocka_cb).obj_id; + /* const r3 = doc.put("_root", "counter", 10, "counter") + assert.deepEqual(r3, null); */ + AMpush(&stack, + AMmapPutCounter(doc, AM_ROOT, "counter", 10), + AM_VALUE_VOID, + cmocka_cb); + /* const r4 = doc.increment("_root", "counter", 1) + assert.deepEqual(r4, null); */ + AMpush(&stack, + AMmapIncrement(doc, AM_ROOT, "counter", 1), + AM_VALUE_VOID, + cmocka_cb); + /* const r5 = doc.delete("_root", "counter") + assert.deepEqual(r5, null); */ + AMpush(&stack, + AMmapDelete(doc, AM_ROOT, "counter"), + AM_VALUE_VOID, + cmocka_cb); + /* const r6 = doc.insert(r2, 0, 10); + assert.deepEqual(r6, null); */ + AMpush(&stack, + AMlistPutInt(doc, r2, 0, true, 10), + AM_VALUE_VOID, + cmocka_cb); + /* const r7 = doc.insertObject(r2, 0, {}); */ + AMobjId const* const r7 = AMpush( + &stack, + AMlistPutObject(doc, r2, 0, true, AM_OBJ_TYPE_LIST), + AM_VALUE_OBJ_ID, + cmocka_cb).obj_id; + /* const r8 = doc.splice(r2, 1, 0, ["a", "b", "c"]); */ + AMvalue const STRS[] = {{.str_tag = AM_VALUE_STR, .str = "a", + .str_tag = AM_VALUE_STR, .str = "b", + .str_tag = AM_VALUE_STR, .str = "c"}}; + AMpush(&stack, + AMsplice(doc, r2, 1, 0, STRS, sizeof(STRS)/sizeof(AMvalue)), + AM_VALUE_VOID, + cmocka_cb); + /* assert.deepEqual(r2, "2@aaaa"); */ + assert_int_equal(AMobjIdCounter(r2), 2); + assert_string_equal(AMactorIdStr(AMobjIdActorId(r2)), "aaaa"); + /* assert.deepEqual(r7, "7@aaaa"); */ + assert_int_equal(AMobjIdCounter(r7), 7); + assert_string_equal(AMactorIdStr(AMobjIdActorId(r7)), "aaaa"); +} + +/** + * \brief objects without properties are preserved + */ +static void test_objects_without_properties_are_preserved(void** state) { + AMresultStack* stack = *state; + /* const doc1 = create("aaaa") */ + AMdoc* const doc1 = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMfree(AMsetActorId(doc1, AMpush(&stack, + AMactorIdInitStr("aaaa"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + /* const a = doc1.putObject("_root", "a", {}); */ + AMobjId const* const a = AMpush( + &stack, + AMmapPutObject(doc1, AM_ROOT, "a", AM_OBJ_TYPE_MAP), + AM_VALUE_OBJ_ID, + cmocka_cb).obj_id; + /* const b = doc1.putObject("_root", "b", {}); */ + AMobjId const* const b = AMpush( + &stack, + AMmapPutObject(doc1, AM_ROOT, "b", AM_OBJ_TYPE_MAP), + AM_VALUE_OBJ_ID, + cmocka_cb).obj_id; + /* const c = doc1.putObject("_root", "c", {}); */ + AMobjId const* const c = AMpush( + &stack, + AMmapPutObject(doc1, AM_ROOT, "c", AM_OBJ_TYPE_MAP), + AM_VALUE_OBJ_ID, + cmocka_cb).obj_id; + /* const d = doc1.put(c, "d", "dd"); */ + AMfree(AMmapPutStr(doc1, c, "d", "dd")); + /* const saved = doc1.save(); */ + AMbyteSpan const saved = AMpush(&stack, + AMsave(doc1), + AM_VALUE_BYTES, + cmocka_cb).bytes; + /* const doc2 = load(saved); */ + AMdoc* const doc2 = AMpush(&stack, + AMload(saved.src, saved.count), + AM_VALUE_DOC, + cmocka_cb).doc; + /* assert.deepEqual(doc2.getWithType("_root", "a"), ["map", a]) */ + AMmapItems doc_items = AMpush(&stack, + AMmapRange(doc2, AM_ROOT, NULL, NULL, NULL), + AM_VALUE_MAP_ITEMS, + cmocka_cb).map_items; + assert_true(AMobjIdEqual(AMmapItemObjId(AMmapItemsNext(&doc_items, 1)), a)); + /* assert.deepEqual(doc2.keys(a), []) */ + AMstrs keys = AMpush(&stack, + AMkeys(doc1, a, NULL), + AM_VALUE_STRS, + cmocka_cb).strs; + assert_int_equal(AMstrsSize(&keys), 0); + /* assert.deepEqual(doc2.getWithType("_root", "b"), ["map", b]) */ + assert_true(AMobjIdEqual(AMmapItemObjId(AMmapItemsNext(&doc_items, 1)), b)); + /* assert.deepEqual(doc2.keys(b), []) */ + keys = AMpush(&stack, AMkeys(doc1, b, NULL), AM_VALUE_STRS, cmocka_cb).strs; + assert_int_equal(AMstrsSize(&keys), 0); + /* assert.deepEqual(doc2.getWithType("_root", "c"), ["map", c]) */ + assert_true(AMobjIdEqual(AMmapItemObjId(AMmapItemsNext(&doc_items, 1)), c)); + /* assert.deepEqual(doc2.keys(c), ["d"]) */ + keys = AMpush(&stack, AMkeys(doc1, c, NULL), AM_VALUE_STRS, cmocka_cb).strs; + assert_string_equal(AMstrsNext(&keys, 1), "d"); + /* assert.deepEqual(doc2.getWithType(c, "d"), ["str", "dd"]) */ + AMobjItems obj_items = AMpush(&stack, + AMobjValues(doc1, c, NULL), + AM_VALUE_OBJ_ITEMS, + cmocka_cb).obj_items; + assert_string_equal(AMobjItemValue(AMobjItemsNext(&obj_items, 1)).str, "dd"); +} + +/** + * \brief should allow you to forkAt a heads + */ +static void test_should_allow_you_to_forkAt_a_heads(void** state) { + AMresultStack* stack = *state; + /* const A = create("aaaaaa") */ + AMdoc* const A = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMfree(AMsetActorId(A, AMpush(&stack, + AMactorIdInitStr("aaaaaa"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + /* A.put("/", "key1", "val1"); */ + AMfree(AMmapPutStr(A, AM_ROOT, "key1", "val1")); + /* A.put("/", "key2", "val2"); */ + AMfree(AMmapPutStr(A, AM_ROOT, "key2", "val2")); + /* const heads1 = A.getHeads(); */ + AMchangeHashes const heads1 = AMpush(&stack, + AMgetHeads(A), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + /* const B = A.fork("bbbbbb") */ + AMdoc* const B = AMpush(&stack, AMfork(A, NULL), AM_VALUE_DOC, cmocka_cb).doc; + AMfree(AMsetActorId(B, AMpush(&stack, + AMactorIdInitStr("bbbbbb"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + /* A.put("/", "key3", "val3"); */ + AMfree(AMmapPutStr(A, AM_ROOT, "key3", "val3")); + /* B.put("/", "key4", "val4"); */ + AMfree(AMmapPutStr(B, AM_ROOT, "key4", "val4")); + /* A.merge(B) */ + AMfree(AMmerge(A, B)); + /* const heads2 = A.getHeads(); */ + AMchangeHashes const heads2 = AMpush(&stack, + AMgetHeads(A), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + /* A.put("/", "key5", "val5"); */ + AMfree(AMmapPutStr(A, AM_ROOT, "key5", "val5")); + /* assert.deepEqual(A.forkAt(heads1).materialize("/"), A.materialize("/", heads1))*/ + AMmapItems AforkAt1_items = AMpush( + &stack, + AMmapRange( + AMpush(&stack, AMfork(A, &heads1), AM_VALUE_DOC, cmocka_cb).doc, + AM_ROOT, NULL, NULL, NULL), + AM_VALUE_MAP_ITEMS, + cmocka_cb).map_items; + AMmapItems A1_items = AMpush(&stack, + AMmapRange(A, AM_ROOT, NULL, NULL, &heads1), + AM_VALUE_MAP_ITEMS, + cmocka_cb).map_items; + assert_true(AMmapItemsEqual(&AforkAt1_items, &A1_items)); + /* assert.deepEqual(A.forkAt(heads2).materialize("/"), A.materialize("/", heads2))*/ + AMmapItems AforkAt2_items = AMpush( + &stack, + AMmapRange( + AMpush(&stack, AMfork(A, &heads2), AM_VALUE_DOC, cmocka_cb).doc, + AM_ROOT, NULL, NULL, NULL), + AM_VALUE_MAP_ITEMS, + cmocka_cb).map_items; + AMmapItems A2_items = AMpush(&stack, + AMmapRange(A, AM_ROOT, NULL, NULL, &heads2), + AM_VALUE_MAP_ITEMS, + cmocka_cb).map_items; + assert_true(AMmapItemsEqual(&AforkAt2_items, &A2_items)); +} + +/** + * \brief should handle merging text conflicts then saving & loading + */ +static void test_should_handle_merging_text_conflicts_then_saving_and_loading(void** state) { + AMresultStack* stack = *state; + /* const A = create("aabbcc") */ + AMdoc* const A = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMfree(AMsetActorId(A, AMpush(&stack, + AMactorIdInitStr("aabbcc"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + /* const At = A.putObject('_root', 'text', "") */ + AMobjId const* const At = AMpush( + &stack, + AMmapPutObject(A, AM_ROOT, "text", AM_OBJ_TYPE_TEXT), + AM_VALUE_OBJ_ID, + cmocka_cb).obj_id; + /* A.splice(At, 0, 0, 'hello') */ + AMfree(AMspliceText(A, At, 0, 0, "hello")); + /* */ + /* const B = A.fork() */ + AMdoc* const B = AMpush(&stack, AMfork(A, NULL), AM_VALUE_DOC, cmocka_cb).doc; + /* */ + /* assert.deepEqual(B.getWithType("_root", "text"), ["text", At]) */ + assert_string_equal(AMpush(&stack, + AMtext(B, + AMpush(&stack, + AMmapGet(B, AM_ROOT, "text", NULL), + AM_VALUE_OBJ_ID, + cmocka_cb).obj_id, + NULL), + AM_VALUE_STR, + cmocka_cb).str, + AMpush(&stack, + AMtext(A, At, NULL), + AM_VALUE_STR, + cmocka_cb).str); + /* */ + /* B.splice(At, 4, 1) */ + AMfree(AMspliceText(B, At, 4, 1, NULL)); + /* B.splice(At, 4, 0, '!') */ + AMfree(AMspliceText(B, At, 4, 0, "!")); + /* B.splice(At, 5, 0, ' ') */ + AMfree(AMspliceText(B, At, 5, 0, " ")); + /* B.splice(At, 6, 0, 'world') */ + AMfree(AMspliceText(B, At, 6, 0, "world")); + /* */ + /* A.merge(B) */ + AMfree(AMmerge(A, B)); + /* */ + /* const binary = A.save() */ + AMbyteSpan const binary = AMpush(&stack, + AMsave(A), + AM_VALUE_BYTES, + cmocka_cb).bytes; + /* */ + /* const C = load(binary) */ + AMdoc* const C = AMpush(&stack, + AMload(binary.src, binary.count), + AM_VALUE_DOC, + cmocka_cb).doc; + /* */ + /* assert.deepEqual(C.getWithType('_root', 'text'), ['text', '1@aabbcc'])*/ + AMobjId const* const C_text = AMpush(&stack, + AMmapGet(C, AM_ROOT, "text", NULL), + AM_VALUE_OBJ_ID, + cmocka_cb).obj_id; + assert_int_equal(AMobjIdCounter(C_text), 1); + assert_string_equal(AMactorIdStr(AMobjIdActorId(C_text)), "aabbcc"); + /* assert.deepEqual(C.text(At), 'hell! world') */ + assert_string_equal(AMpush(&stack, + AMtext(C, At, NULL), + AM_VALUE_STR, + cmocka_cb).str, "hell! world"); +} + +int run_ported_wasm_basic_tests(void) { + const struct CMUnitTest tests[] = { + cmocka_unit_test_setup_teardown(test_create_clone_and_free, setup_stack, teardown_stack), + cmocka_unit_test_setup_teardown(test_start_and_commit, setup_stack, teardown_stack), + cmocka_unit_test_setup_teardown(test_getting_a_nonexistent_prop_does_not_throw_an_error, setup_stack, teardown_stack), + cmocka_unit_test_setup_teardown(test_should_be_able_to_set_and_get_a_simple_value, setup_stack, teardown_stack), + cmocka_unit_test_setup_teardown(test_should_be_able_to_use_bytes, setup_stack, teardown_stack), + cmocka_unit_test_setup_teardown(test_should_be_able_to_make_subobjects, setup_stack, teardown_stack), + cmocka_unit_test_setup_teardown(test_should_be_able_to_make_lists, setup_stack, teardown_stack), + cmocka_unit_test_setup_teardown(test_lists_have_insert_set_splice_and_push_ops, setup_stack, teardown_stack), + cmocka_unit_test_setup_teardown(test_should_be_able_to_delete_non_existent_props, setup_stack, teardown_stack), + cmocka_unit_test_setup_teardown(test_should_be_able_to_del, setup_stack, teardown_stack), + cmocka_unit_test_setup_teardown(test_should_be_able_to_use_counters, setup_stack, teardown_stack), + cmocka_unit_test_setup_teardown(test_should_be_able_to_splice_text, setup_stack, teardown_stack), + cmocka_unit_test_setup_teardown(test_should_be_able_to_insert_objects_into_text, setup_stack, teardown_stack), + cmocka_unit_test_setup_teardown(test_should_be_able_to_save_all_or_incrementally, setup_stack, teardown_stack), + cmocka_unit_test_setup_teardown(test_should_be_able_to_splice_text_2, setup_stack, teardown_stack), + cmocka_unit_test_setup_teardown(test_local_inc_increments_all_visible_counters_in_a_map, setup_stack, teardown_stack), + cmocka_unit_test_setup_teardown(test_local_inc_increments_all_visible_counters_in_a_sequence, setup_stack, teardown_stack), + cmocka_unit_test_setup_teardown(test_should_be_able_to_fetch_changes_by_hash, setup_stack, teardown_stack), + cmocka_unit_test_setup_teardown(test_recursive_sets_are_possible, setup_stack, teardown_stack), + cmocka_unit_test_setup_teardown(test_only_returns_an_object_id_when_objects_are_created, setup_stack, teardown_stack), + cmocka_unit_test_setup_teardown(test_objects_without_properties_are_preserved, setup_stack, teardown_stack), + cmocka_unit_test_setup_teardown(test_should_allow_you_to_forkAt_a_heads, setup_stack, teardown_stack), + cmocka_unit_test_setup_teardown(test_should_handle_merging_text_conflicts_then_saving_and_loading, setup_stack, teardown_stack) + }; + + return cmocka_run_group_tests(tests, NULL, NULL); +} diff --git a/automerge-c/test/ported_wasm/suite.c b/automerge-c/test/ported_wasm/suite.c new file mode 100644 index 00000000..fc10fadc --- /dev/null +++ b/automerge-c/test/ported_wasm/suite.c @@ -0,0 +1,18 @@ +#include +#include +#include +#include + +/* third-party */ +#include + +extern int run_ported_wasm_basic_tests(void); + +extern int run_ported_wasm_sync_tests(void); + +int run_ported_wasm_suite(void) { + return ( + run_ported_wasm_basic_tests() + + run_ported_wasm_sync_tests() + ); +} diff --git a/automerge-c/test/ported_wasm/sync_tests.c b/automerge-c/test/ported_wasm/sync_tests.c new file mode 100644 index 00000000..ea773515 --- /dev/null +++ b/automerge-c/test/ported_wasm/sync_tests.c @@ -0,0 +1,1415 @@ +#include +#include +#include +#include +#include + +/* third-party */ +#include + +/* local */ +#include "automerge.h" +#include "../stack_utils.h" + +typedef struct { + AMresultStack* stack; + AMdoc* n1; + AMdoc* n2; + AMsyncState* s1; + AMsyncState* s2; +} TestState; + +static int setup(void** state) { + TestState* test_state = test_calloc(1, sizeof(TestState)); + test_state->n1 = AMpush(&test_state->stack, + AMcreate(), + AM_VALUE_DOC, + cmocka_cb).doc; + test_state->n2 = AMpush(&test_state->stack, + AMcreate(), + AM_VALUE_DOC, + cmocka_cb).doc; + test_state->s1 = AMpush(&test_state->stack, + AMsyncStateInit(), + AM_VALUE_SYNC_STATE, + cmocka_cb).sync_state; + test_state->s2 = AMpush(&test_state->stack, + AMsyncStateInit(), + AM_VALUE_SYNC_STATE, + cmocka_cb).sync_state; + *state = test_state; + return 0; +} + +static int teardown(void** state) { + TestState* test_state = *state; + AMfreeStack(&test_state->stack); + test_free(test_state); + return 0; +} + +static void sync(AMdoc* a, + AMdoc* b, + AMsyncState* a_sync_state, + AMsyncState* b_sync_state) { + static size_t const MAX_ITER = 10; + + AMsyncMessage const* a2b_msg = NULL; + AMsyncMessage const* b2a_msg = NULL; + size_t iter = 0; + do { + AMresult* a2b_msg_result = AMgenerateSyncMessage(a, a_sync_state); + AMresult* b2a_msg_result = AMgenerateSyncMessage(b, b_sync_state); + AMvalue value = AMresultValue(a2b_msg_result); + switch (value.tag) { + case AM_VALUE_SYNC_MESSAGE: { + a2b_msg = value.sync_message; + AMfree(AMreceiveSyncMessage(b, b_sync_state, a2b_msg)); + } + break; + case AM_VALUE_VOID: a2b_msg = NULL; break; + } + value = AMresultValue(b2a_msg_result); + switch (value.tag) { + case AM_VALUE_SYNC_MESSAGE: { + b2a_msg = value.sync_message; + AMfree(AMreceiveSyncMessage(a, a_sync_state, b2a_msg)); + } + break; + case AM_VALUE_VOID: b2a_msg = NULL; break; + } + if (++iter > MAX_ITER) { + fail_msg("Did not synchronize within %d iterations. " + "Do you have a bug causing an infinite loop?", MAX_ITER); + } + } while(a2b_msg || b2a_msg); +} + +static time_t const TIME_0 = 0; + +/** + * \brief should send a sync message implying no local data + */ +static void test_should_send_a_sync_message_implying_no_local_data(void **state) { + /* const doc = create() + const s1 = initSyncState() */ + TestState* test_state = *state; + /* const m1 = doc.generateSyncMessage(s1) + if (m1 === null) { throw new RangeError("message should not be null") } + const message: DecodedSyncMessage = decodeSyncMessage(m1) */ + AMsyncMessage const* const m1 = AMpush(&test_state->stack, + AMgenerateSyncMessage( + test_state->n1, + test_state->s1), + AM_VALUE_SYNC_MESSAGE, + cmocka_cb).sync_message; + /* assert.deepStrictEqual(message.heads, []) */ + AMchangeHashes heads = AMsyncMessageHeads(m1); + assert_int_equal(AMchangeHashesSize(&heads), 0); + /* assert.deepStrictEqual(message.need, []) */ + AMchangeHashes needs = AMsyncMessageNeeds(m1); + assert_int_equal(AMchangeHashesSize(&needs), 0); + /* assert.deepStrictEqual(message.have.length, 1) */ + AMsyncHaves haves = AMsyncMessageHaves(m1); + assert_int_equal(AMsyncHavesSize(&haves), 1); + /* assert.deepStrictEqual(message.have[0].lastSync, []) */ + AMsyncHave const* have0 = AMsyncHavesNext(&haves, 1); + AMchangeHashes last_sync = AMsyncHaveLastSync(have0); + assert_int_equal(AMchangeHashesSize(&last_sync), 0); + /* assert.deepStrictEqual(message.have[0].bloom.byteLength, 0) + assert.deepStrictEqual(message.changes, []) */ + AMchanges changes = AMsyncMessageChanges(m1); + assert_int_equal(AMchangesSize(&changes), 0); +} + +/** + * \brief should not reply if we have no data as well + */ +static void test_should_not_reply_if_we_have_no_data_as_well(void **state) { + /* const n1 = create(), n2 = create() + const s1 = initSyncState(), s2 = initSyncState() */ + TestState* test_state = *state; + /* const m1 = n1.generateSyncMessage(s1) + if (m1 === null) { throw new RangeError("message should not be null") }*/ + AMsyncMessage const* const m1 = AMpush(&test_state->stack, + AMgenerateSyncMessage( + test_state->n1, + test_state->s1), + AM_VALUE_SYNC_MESSAGE, + cmocka_cb).sync_message; + /* n2.receiveSyncMessage(s2, m1) */ + AMfree(AMreceiveSyncMessage(test_state->n2, test_state->s2, m1)); + /* const m2 = n2.generateSyncMessage(s2) + assert.deepStrictEqual(m2, null) */ + AMpush(&test_state->stack, + AMgenerateSyncMessage(test_state->n2, test_state->s2), + AM_VALUE_VOID, + cmocka_cb); +} + +/** + * \brief repos with equal heads do not need a reply message + */ +static void test_repos_with_equal_heads_do_not_need_a_reply_message(void **state) { + /* const n1 = create(), n2 = create() + const s1 = initSyncState(), s2 = initSyncState() */ + TestState* test_state = *state; + /* */ + /* make two nodes with the same changes */ + /* const list = n1.putObject("_root", "n", []) */ + AMobjId const* const list = AMpush(&test_state->stack, + AMmapPutObject(test_state->n1, + AM_ROOT, + "n", + AM_OBJ_TYPE_LIST), + AM_VALUE_OBJ_ID, + cmocka_cb).obj_id; + /* n1.commit("", 0) */ + AMfree(AMcommit(test_state->n1, "", &TIME_0)); + /* for (let i = 0; i < 10; i++) { */ + for (size_t i = 0; i != 10; ++i) { + /* n1.insert(list, i, i) */ + AMfree(AMlistPutUint(test_state->n1, AM_ROOT, i, true, i)); + /* n1.commit("", 0) */ + AMfree(AMcommit(test_state->n1, "", &TIME_0)); + /* { */ + } + /* n2.applyChanges(n1.getChanges([])) */ + AMchanges const changes = AMpush(&test_state->stack, + AMgetChanges(test_state->n1, NULL), + AM_VALUE_CHANGES, + cmocka_cb).changes; + AMfree(AMapplyChanges(test_state->n2, &changes)); + /* assert.deepStrictEqual(n1.materialize(), n2.materialize()) */ + assert_true(AMequal(test_state->n1, test_state->n2)); + /* */ + /* generate a naive sync message */ + /* const m1 = n1.generateSyncMessage(s1) + if (m1 === null) { throw new RangeError("message should not be null") }*/ + AMsyncMessage const* m1 = AMpush(&test_state->stack, + AMgenerateSyncMessage(test_state->n1, + test_state->s1), + AM_VALUE_SYNC_MESSAGE, + cmocka_cb).sync_message; + /* assert.deepStrictEqual(s1.lastSentHeads, n1.getHeads()) */ + AMchangeHashes const last_sent_heads = AMsyncStateLastSentHeads( + test_state->s1 + ); + AMchangeHashes const heads = AMpush(&test_state->stack, + AMgetHeads(test_state->n1), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + assert_int_equal(AMchangeHashesCmp(&last_sent_heads, &heads), 0); + /* */ + /* heads are equal so this message should be null */ + /* n2.receiveSyncMessage(s2, m1) */ + AMfree(AMreceiveSyncMessage(test_state->n2, test_state->s2, m1)); + /* const m2 = n2.generateSyncMessage(s2) + assert.strictEqual(m2, null) */ + AMpush(&test_state->stack, + AMgenerateSyncMessage(test_state->n2, test_state->s2), + AM_VALUE_VOID, + cmocka_cb); +} + +/** + * \brief n1 should offer all changes to n2 when starting from nothing + */ +static void test_n1_should_offer_all_changes_to_n2_when_starting_from_nothing(void **state) { + /* const n1 = create(), n2 = create() */ + TestState* test_state = *state; + + /* make changes for n1 that n2 should request */ + /* const list = n1.putObject("_root", "n", []) */ + AMobjId const* const list = AMpush( + &test_state->stack, + AMmapPutObject(test_state->n1, AM_ROOT, "n", AM_OBJ_TYPE_LIST), + AM_VALUE_OBJ_ID, + cmocka_cb).obj_id; + /* n1.commit("", 0) */ + AMfree(AMcommit(test_state->n1, "", &TIME_0)); + /* for (let i = 0; i < 10; i++) { */ + for (size_t i = 0; i != 10; ++i) { + /* n1.insert(list, i, i) */ + AMfree(AMlistPutUint(test_state->n1, AM_ROOT, i, true, i)); + /* n1.commit("", 0) */ + AMfree(AMcommit(test_state->n1, "", &TIME_0)); + /* { */ + } + /* */ + /* assert.notDeepStrictEqual(n1.materialize(), n2.materialize()) */ + assert_false(AMequal(test_state->n1, test_state->n2)); + /* sync(n1, n2) */ + sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); + /* assert.deepStrictEqual(n1.materialize(), n2.materialize()) */ + assert_true(AMequal(test_state->n1, test_state->n2)); +} + +/** + * \brief should sync peers where one has commits the other does not + */ +static void test_should_sync_peers_where_one_has_commits_the_other_does_not(void **state) { + /* const n1 = create(), n2 = create() */ + TestState* test_state = *state; + + /* make changes for n1 that n2 should request */ + /* const list = n1.putObject("_root", "n", []) */ + AMobjId const* const list = AMpush( + &test_state->stack, + AMmapPutObject(test_state->n1, AM_ROOT, "n", AM_OBJ_TYPE_LIST), + AM_VALUE_OBJ_ID, + cmocka_cb).obj_id; + /* n1.commit("", 0) */ + AMfree(AMcommit(test_state->n1, "", &TIME_0)); + /* for (let i = 0; i < 10; i++) { */ + for (size_t i = 0; i != 10; ++i) { + /* n1.insert(list, i, i) */ + AMfree(AMlistPutUint(test_state->n1, AM_ROOT, i, true, i)); + /* n1.commit("", 0) */ + AMfree(AMcommit(test_state->n1, "", &TIME_0)); + /* { */ + } + /* */ + /* assert.notDeepStrictEqual(n1.materialize(), n2.materialize()) */ + assert_false(AMequal(test_state->n1, test_state->n2)); + /* sync(n1, n2) */ + sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); + /* assert.deepStrictEqual(n1.materialize(), n2.materialize()) */ + assert_true(AMequal(test_state->n1, test_state->n2)); +} + +/** + * \brief should work with prior sync state + */ +static void test_should_work_with_prior_sync_state(void **state) { + /* create & synchronize two nodes */ + /* const n1 = create(), n2 = create() + const s1 = initSyncState(), s2 = initSyncState() */ + TestState* test_state = *state; + /* */ + /* for (let i = 0; i < 5; i++) { */ + for (size_t i = 0; i != 5; ++i) { + /* n1.put("_root", "x", i) */ + AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", i)); + /* n1.commit("", 0) */ + AMfree(AMcommit(test_state->n1, "", &TIME_0)); + /* { */ + } + /* */ + sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); + /* */ + /* modify the first node further */ + /* for (let i = 5; i < 10; i++) { */ + for (size_t i = 5; i != 10; ++i) { + /* n1.put("_root", "x", i) */ + AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", i)); + /* n1.commit("", 0) */ + AMfree(AMcommit(test_state->n1, "", &TIME_0)); + /* { */ + } + /* */ + /* assert.notDeepStrictEqual(n1.materialize(), n2.materialize()) */ + assert_false(AMequal(test_state->n1, test_state->n2)); + /* sync(n1, n2, s1, s2) */ + sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); + /* assert.deepStrictEqual(n1.materialize(), n2.materialize()) */ + assert_true(AMequal(test_state->n1, test_state->n2)); +} + +/** + * \brief should not generate messages once synced + */ +static void test_should_not_generate_messages_once_synced(void **state) { + /* create & synchronize two nodes */ + /* const n1 = create('abc123'), n2 = create('def456') + const s1 = initSyncState(), s2 = initSyncState() */ + TestState* test_state = *state; + AMfree(AMsetActorId(test_state->n1, AMpush(&test_state->stack, + AMactorIdInitStr("abc123"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + AMfree(AMsetActorId(test_state->n2, AMpush(&test_state->stack, + AMactorIdInitStr("def456"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + /* */ + /* let message, patch + for (let i = 0; i < 5; i++) { */ + for (size_t i = 0; i != 5; ++i) { + // n1.put("_root", "x", i) */ + AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", i)); + // n1.commit("", 0) */ + AMfree(AMcommit(test_state->n1, "", &TIME_0)); + /* { */ + } + /* for (let i = 0; i < 5; i++) { */ + for (size_t i = 0; i != 5; ++i) { + /* n2.put("_root", "y", i) */ + AMfree(AMmapPutUint(test_state->n2, AM_ROOT, "y", i)); + /* n2.commit("", 0) */ + AMfree(AMcommit(test_state->n2, "", &TIME_0)); + /* { */ + } + /* */ + /* n1 reports what it has */ + /* message = n1.generateSyncMessage(s1) + if (message === null) { throw new RangeError("message should not be null") }*/ + AMsyncMessage const* message = AMpush(&test_state->stack, + AMgenerateSyncMessage(test_state->n1, + test_state->s1), + AM_VALUE_SYNC_MESSAGE, + cmocka_cb).sync_message; + /* */ + /* n2 receives that message and sends changes along with what it has */ + /* n2.receiveSyncMessage(s2, message) */ + AMfree(AMreceiveSyncMessage(test_state->n2, test_state->s2, message)); + /* message = n2.generateSyncMessage(s2) + if (message === null) { throw new RangeError("message should not be null") }*/ + message = AMpush(&test_state->stack, + AMgenerateSyncMessage(test_state->n2, test_state->s2), + AM_VALUE_SYNC_MESSAGE, + cmocka_cb).sync_message; + AMchanges message_changes = AMsyncMessageChanges(message); + assert_int_equal(AMchangesSize(&message_changes), 5); + /* */ + /* n1 receives the changes and replies with the changes it now knows that + * n2 needs */ + /* n1.receiveSyncMessage(s1, message) */ + AMfree(AMreceiveSyncMessage(test_state->n1, test_state->s1, message)); + /* message = n2.generateSyncMessage(s2) + if (message === null) { throw new RangeError("message should not be null") }*/ + message = AMpush(&test_state->stack, + AMgenerateSyncMessage(test_state->n1, test_state->s1), + AM_VALUE_SYNC_MESSAGE, + cmocka_cb).sync_message; + message_changes = AMsyncMessageChanges(message); + assert_int_equal(AMchangesSize(&message_changes), 5); + /* */ + /* n2 applies the changes and sends confirmation ending the exchange */ + /* n2.receiveSyncMessage(s2, message) */ + AMfree(AMreceiveSyncMessage(test_state->n2, test_state->s2, message)); + /* message = n2.generateSyncMessage(s2) + if (message === null) { throw new RangeError("message should not be null") }*/ + message = AMpush(&test_state->stack, + AMgenerateSyncMessage(test_state->n2, test_state->s2), + AM_VALUE_SYNC_MESSAGE, + cmocka_cb).sync_message; + /* */ + /* n1 receives the message and has nothing more to say */ + /* n1.receiveSyncMessage(s1, message) */ + AMfree(AMreceiveSyncMessage(test_state->n1, test_state->s1, message)); + /* message = n1.generateSyncMessage(s1) + assert.deepStrictEqual(message, null) */ + AMpush(&test_state->stack, + AMgenerateSyncMessage(test_state->n1, test_state->s1), + AM_VALUE_VOID, + cmocka_cb); + /* //assert.deepStrictEqual(patch, null) // no changes arrived */ + /* */ + /* n2 also has nothing left to say */ + /* message = n2.generateSyncMessage(s2) + assert.deepStrictEqual(message, null) */ + AMpush(&test_state->stack, + AMgenerateSyncMessage(test_state->n2, test_state->s2), + AM_VALUE_VOID, + cmocka_cb); +} + +/** + * \brief should allow simultaneous messages during synchronization + */ +static void test_should_allow_simultaneous_messages_during_synchronization(void **state) { + /* create & synchronize two nodes */ + /* const n1 = create('abc123'), n2 = create('def456') + const s1 = initSyncState(), s2 = initSyncState() */ + TestState* test_state = *state; + AMfree(AMsetActorId(test_state->n1, AMpush(&test_state->stack, + AMactorIdInitStr("abc123"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + AMfree(AMsetActorId(test_state->n2, AMpush(&test_state->stack, + AMactorIdInitStr("def456"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + /* */ + /* for (let i = 0; i < 5; i++) { */ + for (size_t i = 0; i != 5; ++i) { + /* n1.put("_root", "x", i) */ + AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", i)); + /* n1.commit("", 0) */ + AMfree(AMcommit(test_state->n1, "", &TIME_0)); + /* { */ + } + /* for (let i = 0; i < 5; i++) { */ + for (size_t i = 0; i != 5; ++i) { + /* n2.put("_root", "y", i) */ + AMfree(AMmapPutUint(test_state->n2, AM_ROOT, "y", i)); + /* n2.commit("", 0) */ + AMfree(AMcommit(test_state->n2, "", &TIME_0)); + /* { */ + } + /* const head1 = n1.getHeads()[0], head2 = n2.getHeads()[0] */ + AMchangeHashes heads1 = AMpush(&test_state->stack, + AMgetHeads(test_state->n1), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + AMbyteSpan const head1 = AMchangeHashesNext(&heads1, 1); + AMchangeHashes heads2 = AMpush(&test_state->stack, + AMgetHeads(test_state->n2), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + AMbyteSpan const head2 = AMchangeHashesNext(&heads2, 1); + /* */ + /* both sides report what they have but have no shared peer state */ + /* let msg1to2, msg2to1 + msg1to2 = n1.generateSyncMessage(s1) + if (msg1to2 === null) { throw new RangeError("message should not be null") }*/ + AMsyncMessage const* msg1to2 = AMpush(&test_state->stack, + AMgenerateSyncMessage(test_state->n1, + test_state->s1), + AM_VALUE_SYNC_MESSAGE, + cmocka_cb).sync_message; + /* msg2to1 = n2.generateSyncMessage(s2) + if (msg2to1 === null) { throw new RangeError("message should not be null") }*/ + AMsyncMessage const* msg2to1 = AMpush(&test_state->stack, + AMgenerateSyncMessage(test_state->n2, + test_state->s2), + AM_VALUE_SYNC_MESSAGE, + cmocka_cb).sync_message; + /* assert.deepStrictEqual(decodeSyncMessage(msg1to2).changes.length, 0) */ + AMchanges msg1to2_changes = AMsyncMessageChanges(msg1to2); + assert_int_equal(AMchangesSize(&msg1to2_changes), 0); + /* assert.deepStrictEqual(decodeSyncMessage(msg1to2).have[0].lastSync.length, 0)*/ + AMsyncHaves msg1to2_haves = AMsyncMessageHaves(msg1to2); + AMsyncHave const* msg1to2_have = AMsyncHavesNext(&msg1to2_haves, 1); + AMchangeHashes msg1to2_last_sync = AMsyncHaveLastSync(msg1to2_have); + assert_int_equal(AMchangeHashesSize(&msg1to2_last_sync), 0); + /* assert.deepStrictEqual(decodeSyncMessage(msg2to1).changes.length, 0) */ + AMchanges msg2to1_changes = AMsyncMessageChanges(msg2to1); + assert_int_equal(AMchangesSize(&msg2to1_changes), 0); + /* assert.deepStrictEqual(decodeSyncMessage(msg2to1).have[0].lastSync.length, 0)*/ + AMsyncHaves msg2to1_haves = AMsyncMessageHaves(msg2to1); + AMsyncHave const* msg2to1_have = AMsyncHavesNext(&msg2to1_haves, 1); + AMchangeHashes msg2to1_last_sync = AMsyncHaveLastSync(msg2to1_have); + assert_int_equal(AMchangeHashesSize(&msg2to1_last_sync), 0); + /* */ + /* n1 and n2 receive that message and update sync state but make no patch*/ + /* n1.receiveSyncMessage(s1, msg2to1) */ + AMfree(AMreceiveSyncMessage(test_state->n1, test_state->s1, msg2to1)); + /* n2.receiveSyncMessage(s2, msg1to2) */ + AMfree(AMreceiveSyncMessage(test_state->n2, test_state->s2, msg1to2)); + /* */ + /* now both reply with their local changes that the other lacks + * (standard warning that 1% of the time this will result in a "needs" + * message) */ + /* msg1to2 = n1.generateSyncMessage(s1) + if (msg1to2 === null) { throw new RangeError("message should not be null") }*/ + msg1to2 = AMpush(&test_state->stack, + AMgenerateSyncMessage(test_state->n1, test_state->s1), + AM_VALUE_SYNC_MESSAGE, + cmocka_cb).sync_message; + /* assert.deepStrictEqual(decodeSyncMessage(msg1to2).changes.length, 5) */ + msg1to2_changes = AMsyncMessageChanges(msg1to2); + assert_int_equal(AMchangesSize(&msg1to2_changes), 5); + /* msg2to1 = n2.generateSyncMessage(s2) + if (msg2to1 === null) { throw new RangeError("message should not be null") }*/ + msg2to1 = AMpush(&test_state->stack, + AMgenerateSyncMessage(test_state->n2, test_state->s2), + AM_VALUE_SYNC_MESSAGE, + cmocka_cb).sync_message; + /* assert.deepStrictEqual(decodeSyncMessage(msg2to1).changes.length, 5) */ + msg2to1_changes = AMsyncMessageChanges(msg2to1); + assert_int_equal(AMchangesSize(&msg2to1_changes), 5); + /* */ + /* both should now apply the changes and update the frontend */ + /* n1.receiveSyncMessage(s1, msg2to1) */ + AMfree(AMreceiveSyncMessage(test_state->n1, + test_state->s1, + msg2to1)); + /* assert.deepStrictEqual(n1.getMissingDeps(), []) */ + AMchangeHashes missing_deps = AMpush(&test_state->stack, + AMgetMissingDeps(test_state->n1, NULL), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + assert_int_equal(AMchangeHashesSize(&missing_deps), 0); + /* //assert.notDeepStrictEqual(patch1, null) + assert.deepStrictEqual(n1.materialize(), { x: 4, y: 4 }) */ + assert_int_equal(AMpush(&test_state->stack, + AMmapGet(test_state->n1, AM_ROOT, "x", NULL), + AM_VALUE_UINT, + cmocka_cb).uint, 4); + assert_int_equal(AMpush(&test_state->stack, + AMmapGet(test_state->n1, AM_ROOT, "y", NULL), + AM_VALUE_UINT, + cmocka_cb).uint, 4); + /* */ + /* n2.receiveSyncMessage(s2, msg1to2) */ + AMfree(AMreceiveSyncMessage(test_state->n2, test_state->s2, msg1to2)); + /* assert.deepStrictEqual(n2.getMissingDeps(), []) */ + missing_deps = AMpush(&test_state->stack, + AMgetMissingDeps(test_state->n2, NULL), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + assert_int_equal(AMchangeHashesSize(&missing_deps), 0); + /* //assert.notDeepStrictEqual(patch2, null) + assert.deepStrictEqual(n2.materialize(), { x: 4, y: 4 }) */ + assert_int_equal(AMpush(&test_state->stack, + AMmapGet(test_state->n2, AM_ROOT, "x", NULL), + AM_VALUE_UINT, + cmocka_cb).uint, 4); + assert_int_equal(AMpush(&test_state->stack, + AMmapGet(test_state->n2, AM_ROOT, "y", NULL), + AM_VALUE_UINT, + cmocka_cb).uint, 4); + /* */ + /* The response acknowledges the changes received and sends no further + * changes */ + /* msg1to2 = n1.generateSyncMessage(s1) + if (msg1to2 === null) { throw new RangeError("message should not be null") }*/ + msg1to2 = AMpush(&test_state->stack, + AMgenerateSyncMessage(test_state->n1, test_state->s1), + AM_VALUE_SYNC_MESSAGE, + cmocka_cb).sync_message; + /* assert.deepStrictEqual(decodeSyncMessage(msg1to2).changes.length, 0) */ + msg1to2_changes = AMsyncMessageChanges(msg1to2); + assert_int_equal(AMchangesSize(&msg1to2_changes), 0); + /* msg2to1 = n2.generateSyncMessage(s2) + if (msg2to1 === null) { throw new RangeError("message should not be null") }*/ + msg2to1 = AMpush(&test_state->stack, + AMgenerateSyncMessage(test_state->n2, test_state->s2), + AM_VALUE_SYNC_MESSAGE, + cmocka_cb).sync_message; + /* assert.deepStrictEqual(decodeSyncMessage(msg2to1).changes.length, 0) */ + msg2to1_changes = AMsyncMessageChanges(msg2to1); + assert_int_equal(AMchangesSize(&msg2to1_changes), 0); + /* */ + /* After receiving acknowledgements, their shared heads should be equal */ + /* n1.receiveSyncMessage(s1, msg2to1) */ + AMfree(AMreceiveSyncMessage(test_state->n1, test_state->s1, msg2to1)); + /* n2.receiveSyncMessage(s2, msg1to2) */ + AMfree(AMreceiveSyncMessage(test_state->n2, test_state->s2, msg1to2)); + /* assert.deepStrictEqual(s1.sharedHeads, [head1, head2].sort()) */ + AMchangeHashes s1_shared_heads = AMsyncStateSharedHeads(test_state->s1); + assert_memory_equal(AMchangeHashesNext(&s1_shared_heads, 1).src, + head1.src, + head1.count); + assert_memory_equal(AMchangeHashesNext(&s1_shared_heads, 1).src, + head2.src, + head2.count); + /* assert.deepStrictEqual(s2.sharedHeads, [head1, head2].sort()) */ + AMchangeHashes s2_shared_heads = AMsyncStateSharedHeads(test_state->s2); + assert_memory_equal(AMchangeHashesNext(&s2_shared_heads, 1).src, + head1.src, + head1.count); + assert_memory_equal(AMchangeHashesNext(&s2_shared_heads, 1).src, + head2.src, + head2.count); + /* //assert.deepStrictEqual(patch1, null) + //assert.deepStrictEqual(patch2, null) */ + /* */ + /* We're in sync, no more messages required */ + /* msg1to2 = n1.generateSyncMessage(s1) + assert.deepStrictEqual(msg1to2, null) */ + AMpush(&test_state->stack, + AMgenerateSyncMessage(test_state->n1, test_state->s1), + AM_VALUE_VOID, + cmocka_cb); + /* msg2to1 = n2.generateSyncMessage(s2) + assert.deepStrictEqual(msg2to1, null) */ + AMpush(&test_state->stack, + AMgenerateSyncMessage(test_state->n2, test_state->s2), + AM_VALUE_VOID, + cmocka_cb); + /* */ + /* If we make one more change and start another sync then its lastSync + * should be updated */ + /* n1.put("_root", "x", 5) */ + AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", 5)); + /* msg1to2 = n1.generateSyncMessage(s1) + if (msg1to2 === null) { throw new RangeError("message should not be null") }*/ + msg1to2 = AMpush(&test_state->stack, + AMgenerateSyncMessage(test_state->n1, test_state->s1), + AM_VALUE_SYNC_MESSAGE, + cmocka_cb).sync_message; + /* assert.deepStrictEqual(decodeSyncMessage(msg1to2).have[0].lastSync, [head1, head2].sort()*/ + msg1to2_haves = AMsyncMessageHaves(msg1to2); + msg1to2_have = AMsyncHavesNext(&msg1to2_haves, 1); + msg1to2_last_sync = AMsyncHaveLastSync(msg1to2_have); + AMbyteSpan msg1to2_last_sync_next = AMchangeHashesNext(&msg1to2_last_sync, 1); + assert_int_equal(msg1to2_last_sync_next.count, head1.count); + assert_memory_equal(msg1to2_last_sync_next.src, head1.src, head1.count); + msg1to2_last_sync_next = AMchangeHashesNext(&msg1to2_last_sync, 1); + assert_int_equal(msg1to2_last_sync_next.count, head2.count); + assert_memory_equal(msg1to2_last_sync_next.src, head2.src, head2.count); +} + +/** + * \brief should assume sent changes were received until we hear otherwise + */ +static void test_should_assume_sent_changes_were_received_until_we_hear_otherwise(void **state) { + /* const n1 = create('01234567'), n2 = create('89abcdef') + const s1 = initSyncState(), s2 = initSyncState() */ + TestState* test_state = *state; + AMfree(AMsetActorId(test_state->n1, AMpush(&test_state->stack, + AMactorIdInitStr("01234567"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + AMfree(AMsetActorId(test_state->n2, AMpush(&test_state->stack, + AMactorIdInitStr("89abcdef"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + /* let message = null */ + /* */ + /* const items = n1.putObject("_root", "items", []) */ + AMobjId const* items = AMpush(&test_state->stack, + AMmapPutObject(test_state->n1, + AM_ROOT, + "items", + AM_OBJ_TYPE_LIST), + AM_VALUE_OBJ_ID, + cmocka_cb).obj_id; + /* n1.commit("", 0) */ + AMfree(AMcommit(test_state->n1, "", &TIME_0)); + /* */ + /* sync(n1, n2, s1, s2) */ + sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); + /* */ + /* n1.push(items, "x") */ + AMfree(AMlistPutStr(test_state->n1, items, SIZE_MAX, true, "x")); + /* n1.commit("", 0) */ + AMfree(AMcommit(test_state->n1, "", &TIME_0)); + /* message = n1.generateSyncMessage(s1) + if (message === null) { throw new RangeError("message should not be null") }*/ + AMsyncMessage const* message = AMpush(&test_state->stack, + AMgenerateSyncMessage(test_state->n1, + test_state->s1), + AM_VALUE_SYNC_MESSAGE, + cmocka_cb).sync_message; + /* assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) */ + AMchanges message_changes = AMsyncMessageChanges(message); + assert_int_equal(AMchangesSize(&message_changes), 1); + /* */ + /* n1.push(items, "y") */ + AMfree(AMlistPutStr(test_state->n1, items, SIZE_MAX, true, "y")); + /* n1.commit("", 0) */ + AMfree(AMcommit(test_state->n1, "", &TIME_0)); + /* message = n1.generateSyncMessage(s1) + if (message === null) { throw new RangeError("message should not be null") }*/ + message = AMpush(&test_state->stack, + AMgenerateSyncMessage(test_state->n1, test_state->s1), + AM_VALUE_SYNC_MESSAGE, + cmocka_cb).sync_message; + /* assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) */ + message_changes = AMsyncMessageChanges(message); + assert_int_equal(AMchangesSize(&message_changes), 1); + /* */ + /* n1.push(items, "z") */ + AMfree(AMlistPutStr(test_state->n1, items, SIZE_MAX, true, "z")); + /* n1.commit("", 0) */ + AMfree(AMcommit(test_state->n1, "", &TIME_0)); + /* */ + /* message = n1.generateSyncMessage(s1) + if (message === null) { throw new RangeError("message should not be null") }*/ + message = AMpush(&test_state->stack, + AMgenerateSyncMessage(test_state->n1, test_state->s1), + AM_VALUE_SYNC_MESSAGE, + cmocka_cb).sync_message; + /* assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) */ + message_changes = AMsyncMessageChanges(message); + assert_int_equal(AMchangesSize(&message_changes), 1); +} + +/** + * \brief should work regardless of who initiates the exchange + */ +static void test_should_work_regardless_of_who_initiates_the_exchange(void **state) { + /* create & synchronize two nodes */ + /* const n1 = create(), n2 = create() + const s1 = initSyncState(), s2 = initSyncState() */ + TestState* test_state = *state; + /* */ + /* for (let i = 0; i < 5; i++) { */ + for (size_t i = 0; i != 5; ++i) { + /* n1.put("_root", "x", i) */ + AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", i)); + /* n1.commit("", 0) */ + AMfree(AMcommit(test_state->n1, "", &TIME_0)); + /* { */ + } + /* */ + /* sync(n1, n2, s1, s2) */ + sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); + /* */ + /* modify the first node further */ + /* for (let i = 5; i < 10; i++) { */ + for (size_t i = 5; i != 10; ++i) { + /* n1.put("_root", "x", i) */ + AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", i)); + /* n1.commit("", 0) */ + AMfree(AMcommit(test_state->n1, "", &TIME_0)); + /* { */ + } + /* */ + /* assert.notDeepStrictEqual(n1.materialize(), n2.materialize()) */ + assert_false(AMequal(test_state->n1, test_state->n2)); + /* sync(n1, n2, s1, s2) */ + sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); + /* assert.deepStrictEqual(n1.materialize(), n2.materialize()) */ + assert_true(AMequal(test_state->n1, test_state->n2)); +} + +/** + * \brief should work without prior sync state + */ +static void test_should_work_without_prior_sync_state(void **state) { + /* Scenario: ,-- c10 <-- c11 <-- c12 <-- c13 <-- c14 + * c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ + * `-- c15 <-- c16 <-- c17 + * lastSync is undefined. */ + /* */ + /* create two peers both with divergent commits */ + /* const n1 = create('01234567'), n2 = create('89abcdef') + const s1 = initSyncState(), s2 = initSyncState() */ + TestState* test_state = *state; + AMfree(AMsetActorId(test_state->n1, AMpush(&test_state->stack, + AMactorIdInitStr("01234567"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + AMfree(AMsetActorId(test_state->n2, AMpush(&test_state->stack, + AMactorIdInitStr("89abcdef"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + /* */ + /* for (let i = 0; i < 10; i++) { */ + for (size_t i = 0; i != 10; ++i) { + /* n1.put("_root", "x", i) */ + AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", i)); + /* n1.commit("", 0) */ + AMfree(AMcommit(test_state->n1, "", &TIME_0)); + /* { */ + } + /* */ + /* sync(n1, n2) */ + sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); + /* */ + /* for (let i = 10; i < 15; i++) { */ + for (size_t i = 10; i != 15; ++i) { + /* n1.put("_root", "x", i) */ + AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", i)); + /* n1.commit("", 0) */ + AMfree(AMcommit(test_state->n1, "", &TIME_0)); + /* { */ + } + /* */ + /* for (let i = 15; i < 18; i++) { */ + for (size_t i = 15; i != 18; ++i) { + /* n2.put("_root", "x", i) */ + AMfree(AMmapPutUint(test_state->n2, AM_ROOT, "x", i)); + /* n2.commit("", 0) */ + AMfree(AMcommit(test_state->n2, "", &TIME_0)); + /* { */ + } + /* */ + /* assert.notDeepStrictEqual(n1.materialize(), n2.materialize()) */ + assert_false(AMequal(test_state->n1, test_state->n2)); + /* sync(n1, n2) */ + sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); + /* assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) */ + AMchangeHashes heads1 = AMpush(&test_state->stack, + AMgetHeads(test_state->n1), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + AMchangeHashes heads2 = AMpush(&test_state->stack, + AMgetHeads(test_state->n2), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); + /* assert.deepStrictEqual(n1.materialize(), n2.materialize()) */ + assert_true(AMequal(test_state->n1, test_state->n2)); +} + +/** + * \brief should work with prior sync state + */ +static void test_should_work_with_prior_sync_state_2(void **state) { + /* Scenario: + * ,-- c10 <-- c11 <-- c12 <-- c13 <-- c14 + * c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ + * `-- c15 <-- c16 <-- c17 + * lastSync is c9. */ + /* */ + /* create two peers both with divergent commits */ + /* const n1 = create('01234567'), n2 = create('89abcdef') + let s1 = initSyncState(), s2 = initSyncState() */ + TestState* test_state = *state; + AMfree(AMsetActorId(test_state->n1, AMpush(&test_state->stack, + AMactorIdInitStr("01234567"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + AMfree(AMsetActorId(test_state->n2, AMpush(&test_state->stack, + AMactorIdInitStr("89abcdef"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + /* */ + /* for (let i = 0; i < 10; i++) { */ + for (size_t i = 0; i != 10; ++i) { + /* n1.put("_root", "x", i) */ + AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", i)); + /* n1.commit("", 0) */ + AMfree(AMcommit(test_state->n1, "", &TIME_0)); + /* { */ + } + /* */ + /* sync(n1, n2, s1, s2) */ + sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); + /* */ + /* for (let i = 10; i < 15; i++) { */ + for (size_t i = 10; i != 15; ++i) { + /* n1.put("_root", "x", i) */ + AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", i)); + /* n1.commit("", 0) */ + AMfree(AMcommit(test_state->n1, "", &TIME_0)); + /* { */ + } + /* for (let i = 15; i < 18; i++) { */ + for (size_t i = 15; i != 18; ++i) { + /* n2.put("_root", "x", i) */ + AMfree(AMmapPutUint(test_state->n2, AM_ROOT, "x", i)); + /* n2.commit("", 0) */ + AMfree(AMcommit(test_state->n2, "", &TIME_0)); + /* { */ + } + /* */ + /* s1 = decodeSyncState(encodeSyncState(s1)) */ + AMbyteSpan encoded = AMpush(&test_state->stack, + AMsyncStateEncode(test_state->s1), + AM_VALUE_BYTES, + cmocka_cb).bytes; + AMsyncState* s1 = AMpush(&test_state->stack, + AMsyncStateDecode(encoded.src, encoded.count), + AM_VALUE_SYNC_STATE, + cmocka_cb).sync_state; + /* s2 = decodeSyncState(encodeSyncState(s2)) */ + encoded = AMpush(&test_state->stack, + AMsyncStateEncode(test_state->s2), + AM_VALUE_BYTES, + cmocka_cb).bytes; + AMsyncState* s2 = AMpush(&test_state->stack, + AMsyncStateDecode(encoded.src, + encoded.count), + AM_VALUE_SYNC_STATE, + cmocka_cb).sync_state; + /* */ + /* assert.notDeepStrictEqual(n1.materialize(), n2.materialize()) */ + assert_false(AMequal(test_state->n1, test_state->n2)); + /* sync(n1, n2, s1, s2) */ + sync(test_state->n1, test_state->n2, s1, s2); + /* assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) */ + AMchangeHashes heads1 = AMpush(&test_state->stack, + AMgetHeads(test_state->n1), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + AMchangeHashes heads2 = AMpush(&test_state->stack, + AMgetHeads(test_state->n2), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); + /* assert.deepStrictEqual(n1.materialize(), n2.materialize()) */ + assert_true(AMequal(test_state->n1, test_state->n2)); +} + +/** + * \brief should ensure non-empty state after sync + */ +static void test_should_ensure_non_empty_state_after_sync(void **state) { + /* const n1 = create('01234567'), n2 = create('89abcdef') + const s1 = initSyncState(), s2 = initSyncState() */ + TestState* test_state = *state; + AMfree(AMsetActorId(test_state->n1, AMpush(&test_state->stack, + AMactorIdInitStr("01234567"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + AMfree(AMsetActorId(test_state->n2, AMpush(&test_state->stack, + AMactorIdInitStr("89abcdef"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + /* */ + /* for (let i = 0; i < 3; i++) { */ + for (size_t i = 0; i != 3; ++i) { + /* n1.put("_root", "x", i) */ + AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", i)); + /* n1.commit("", 0) */ + AMfree(AMcommit(test_state->n1, "", &TIME_0)); + /* { */ + } + /* */ + /* sync(n1, n2, s1, s2) */ + sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); + /* */ + /* assert.deepStrictEqual(s1.sharedHeads, n1.getHeads()) */ + AMchangeHashes heads1 = AMpush(&test_state->stack, + AMgetHeads(test_state->n1), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + AMchangeHashes shared_heads1 = AMsyncStateSharedHeads(test_state->s1); + assert_int_equal(AMchangeHashesCmp(&shared_heads1, &heads1), 0); + /* assert.deepStrictEqual(s2.sharedHeads, n1.getHeads()) */ + AMchangeHashes shared_heads2 = AMsyncStateSharedHeads(test_state->s2); + assert_int_equal(AMchangeHashesCmp(&shared_heads2, &heads1), 0); +} + +/** + * \brief should re-sync after one node crashed with data loss + */ +static void test_should_resync_after_one_node_crashed_with_data_loss(void **state) { + /* Scenario: (r) (n2) (n1) + * c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 + * n2 has changes {c0, c1, c2}, n1's lastSync is c5, and n2's lastSync + * is c2 + * we want to successfully sync (n1) with (r), even though (n1) believes + * it's talking to (n2) */ + /* const n1 = create('01234567'), n2 = create('89abcdef') + let s1 = initSyncState() + const s2 = initSyncState() */ + TestState* test_state = *state; + AMfree(AMsetActorId(test_state->n1, AMpush(&test_state->stack, + AMactorIdInitStr("01234567"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + AMfree(AMsetActorId(test_state->n2, AMpush(&test_state->stack, + AMactorIdInitStr("89abcdef"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + /* */ + /* n1 makes three changes, which we sync to n2 */ + /* for (let i = 0; i < 3; i++) { */ + for (size_t i = 0; i != 3; ++i) { + /* n1.put("_root", "x", i) */ + AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", i)); + /* n1.commit("", 0) */ + AMfree(AMcommit(test_state->n1, "", &TIME_0)); + /* { */ + } + /* */ + sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); + /* */ + /* save a copy of n2 as "r" to simulate recovering from a crash */ + /* let r + let rSyncState + ;[r, rSyncState] = [n2.clone(), s2.clone()] */ + AMdoc* r = AMpush(&test_state->stack, + AMclone(test_state->n2), + AM_VALUE_DOC, + cmocka_cb).doc; + AMbyteSpan const encoded_s2 = AMpush(&test_state->stack, + AMsyncStateEncode(test_state->s2), + AM_VALUE_BYTES, + cmocka_cb).bytes; + AMsyncState* sync_state_r = AMpush(&test_state->stack, + AMsyncStateDecode(encoded_s2.src, + encoded_s2.count), + AM_VALUE_SYNC_STATE, + cmocka_cb).sync_state; + /* */ + /* sync another few commits */ + /* for (let i = 3; i < 6; i++) { */ + for (size_t i = 3; i != 6; ++i) { + /* n1.put("_root", "x", i) */ + AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", i)); + /* n1.commit("", 0) */ + AMfree(AMcommit(test_state->n1, "", &TIME_0)); + /* { */ + } + /* */ + /* sync(n1, n2, s1, s2) */ + sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); + /* */ + /* everyone should be on the same page here */ + /* assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) */ + AMchangeHashes heads1 = AMpush(&test_state->stack, + AMgetHeads(test_state->n1), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + AMchangeHashes heads2 = AMpush(&test_state->stack, + AMgetHeads(test_state->n2), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); + /* assert.deepStrictEqual(n1.materialize(), n2.materialize()) */ + assert_true(AMequal(test_state->n1, test_state->n2)); + /* */ + /* now make a few more changes and then attempt to sync the fully + * up-to-date n1 with with the confused r */ + /* for (let i = 6; i < 9; i++) { */ + for (size_t i = 6; i != 9; ++i) { + /* n1.put("_root", "x", i) */ + AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", i)); + /* n1.commit("", 0) */ + AMfree(AMcommit(test_state->n1, "", &TIME_0)); + /* { */ + } + /* */ + /* s1 = decodeSyncState(encodeSyncState(s1)) */ + AMbyteSpan const encoded_s1 = AMpush(&test_state->stack, + AMsyncStateEncode(test_state->s1), + AM_VALUE_BYTES, + cmocka_cb).bytes; + AMsyncState* const s1 = AMpush(&test_state->stack, + AMsyncStateDecode(encoded_s1.src, + encoded_s1.count), + AM_VALUE_SYNC_STATE, + cmocka_cb).sync_state; + /* rSyncState = decodeSyncState(encodeSyncState(rSyncState)) */ + AMbyteSpan const encoded_r = AMpush(&test_state->stack, + AMsyncStateEncode(sync_state_r), + AM_VALUE_BYTES, + cmocka_cb).bytes; + sync_state_r = AMpush(&test_state->stack, + AMsyncStateDecode(encoded_r.src, encoded_r.count), + AM_VALUE_SYNC_STATE, + cmocka_cb).sync_state; + /* */ + /* assert.notDeepStrictEqual(n1.getHeads(), r.getHeads()) */ + heads1 = AMpush(&test_state->stack, + AMgetHeads(test_state->n1), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + AMchangeHashes heads_r = AMpush(&test_state->stack, + AMgetHeads(r), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + assert_int_not_equal(AMchangeHashesCmp(&heads1, &heads_r), 0); + /* assert.notDeepStrictEqual(n1.materialize(), r.materialize()) */ + assert_false(AMequal(test_state->n1, r)); + /* assert.deepStrictEqual(n1.materialize(), { x: 8 }) */ + assert_int_equal(AMpush(&test_state->stack, + AMmapGet(test_state->n1, AM_ROOT, "x", NULL), + AM_VALUE_UINT, + cmocka_cb).uint, 8); + /* assert.deepStrictEqual(r.materialize(), { x: 2 }) */ + assert_int_equal(AMpush(&test_state->stack, + AMmapGet(r, AM_ROOT, "x", NULL), + AM_VALUE_UINT, + cmocka_cb).uint, 2); + /* sync(n1, r, s1, rSyncState) */ + sync(test_state->n1, r, test_state->s1, sync_state_r); + /* assert.deepStrictEqual(n1.getHeads(), r.getHeads()) */ + heads1 = AMpush(&test_state->stack, + AMgetHeads(test_state->n1), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + heads_r = AMpush(&test_state->stack, + AMgetHeads(r), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + assert_int_equal(AMchangeHashesCmp(&heads1, &heads_r), 0); + /* assert.deepStrictEqual(n1.materialize(), r.materialize()) */ + assert_true(AMequal(test_state->n1, r)); +} + +/** + * \brief should re-sync after one node experiences data loss without disconnecting + */ +static void test_should_resync_after_one_node_experiences_data_loss_without_disconnecting(void **state) { + /* const n1 = create('01234567'), n2 = create('89abcdef') + const s1 = initSyncState(), s2 = initSyncState() */ + TestState* test_state = *state; + AMfree(AMsetActorId(test_state->n1, AMpush(&test_state->stack, + AMactorIdInitStr("01234567"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + AMfree(AMsetActorId(test_state->n2, AMpush(&test_state->stack, + AMactorIdInitStr("89abcdef"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + /* */ + /* n1 makes three changes which we sync to n2 */ + /* for (let i = 0; i < 3; i++) { */ + for (size_t i = 0; i != 3; ++i) { + /* n1.put("_root", "x", i) */ + AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", i)); + /* n1.commit("", 0) */ + AMfree(AMcommit(test_state->n1, "", &TIME_0)); + /* { */ + } + /* */ + /* sync(n1, n2, s1, s2) */ + sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); + /* */ + /* assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) */ + AMchangeHashes heads1 = AMpush(&test_state->stack, + AMgetHeads(test_state->n1), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + AMchangeHashes heads2 = AMpush(&test_state->stack, + AMgetHeads(test_state->n2), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); + /* assert.deepStrictEqual(n1.materialize(), n2.materialize()) */ + assert_true(AMequal(test_state->n1, test_state->n2)); + /* */ + /* const n2AfterDataLoss = create('89abcdef') */ + AMdoc* n2_after_data_loss = AMpush(&test_state->stack, + AMcreate(), + AM_VALUE_DOC, + cmocka_cb).doc; + AMfree(AMsetActorId(n2_after_data_loss, AMpush(&test_state->stack, + AMactorIdInitStr("89abcdef"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + /* */ + /* "n2" now has no data, but n1 still thinks it does. Note we don't do + * decodeSyncState(encodeSyncState(s1)) in order to simulate data loss + * without disconnecting */ + /* sync(n1, n2AfterDataLoss, s1, initSyncState()) */ + AMsyncState* s2_after_data_loss = AMpush(&test_state->stack, + AMsyncStateInit(), + AM_VALUE_SYNC_STATE, + cmocka_cb).sync_state; + sync(test_state->n1, n2_after_data_loss, test_state->s1, s2_after_data_loss); + /* assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) */ + heads1 = AMpush(&test_state->stack, + AMgetHeads(test_state->n1), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + heads2 = AMpush(&test_state->stack, + AMgetHeads(test_state->n2), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); + /* assert.deepStrictEqual(n1.materialize(), n2.materialize()) */ + assert_true(AMequal(test_state->n1, test_state->n2)); +} + +/** + * \brief should handle changes concurrent to the last sync heads + */ +static void test_should_handle_changes_concurrrent_to_the_last_sync_heads(void **state) { + /* const n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('fedcba98')*/ + TestState* test_state = *state; + AMfree(AMsetActorId(test_state->n1, AMpush(&test_state->stack, + AMactorIdInitStr("01234567"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + AMfree(AMsetActorId(test_state->n2, AMpush(&test_state->stack, + AMactorIdInitStr("89abcdef"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + AMdoc* n3 = AMpush(&test_state->stack, + AMcreate(), + AM_VALUE_DOC, + cmocka_cb).doc; + AMfree(AMsetActorId(n3, AMpush(&test_state->stack, + AMactorIdInitStr("fedcba98"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + /* const s12 = initSyncState(), s21 = initSyncState(), s23 = initSyncState(), s32 = initSyncState()*/ + AMsyncState* s12 = test_state->s1; + AMsyncState* s21 = test_state->s2; + AMsyncState* s23 = AMpush(&test_state->stack, + AMsyncStateInit(), + AM_VALUE_SYNC_STATE, + cmocka_cb).sync_state; + AMsyncState* s32 = AMpush(&test_state->stack, + AMsyncStateInit(), + AM_VALUE_SYNC_STATE, + cmocka_cb).sync_state; + /* */ + /* Change 1 is known to all three nodes */ + /* //n1 = Automerge.change(n1, {time: 0}, doc => doc.x = 1) */ + /* n1.put("_root", "x", 1); n1.commit("", 0) */ + AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", 1)); + AMfree(AMcommit(test_state->n1, "", &TIME_0)); + /* */ + /* sync(n1, n2, s12, s21) */ + sync(test_state->n1, test_state->n2, s12, s21); + /* sync(n2, n3, s23, s32) */ + sync(test_state->n2, n3, s23, s32); + /* */ + /* Change 2 is known to n1 and n2 */ + /* n1.put("_root", "x", 2); n1.commit("", 0) */ + AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", 2)); + AMfree(AMcommit(test_state->n1, "", &TIME_0)); + /* */ + /* sync(n1, n2, s12, s21) */ + sync(test_state->n1, test_state->n2, s12, s21); + /* */ + /* Each of the three nodes makes one change (changes 3, 4, 5) */ + /* n1.put("_root", "x", 3); n1.commit("", 0) */ + AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", 3)); + AMfree(AMcommit(test_state->n1, "", &TIME_0)); + /* n2.put("_root", "x", 4); n2.commit("", 0) */ + AMfree(AMmapPutUint(test_state->n2, AM_ROOT, "x", 4)); + AMfree(AMcommit(test_state->n2, "", &TIME_0)); + /* n3.put("_root", "x", 5); n3.commit("", 0) */ + AMfree(AMmapPutUint(n3, AM_ROOT, "x", 5)); + AMfree(AMcommit(n3, "", &TIME_0)); + /* */ + /* Apply n3's latest change to n2. */ + /* let change = n3.getLastLocalChange() + if (change === null) throw new RangeError("no local change") */ + AMchanges changes = AMpush(&test_state->stack, + AMgetLastLocalChange(n3), + AM_VALUE_CHANGES, + cmocka_cb).changes; + /* n2.applyChanges([change]) */ + AMfree(AMapplyChanges(test_state->n2, &changes)); + /* */ + /* Now sync n1 and n2. n3's change is concurrent to n1 and n2's last sync + * heads */ + /* sync(n1, n2, s12, s21) */ + sync(test_state->n1, test_state->n2, s12, s21); + /* assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) */ + AMchangeHashes heads1 = AMpush(&test_state->stack, + AMgetHeads(test_state->n1), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + AMchangeHashes heads2 = AMpush(&test_state->stack, + AMgetHeads(test_state->n2), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); + /* assert.deepStrictEqual(n1.materialize(), n2.materialize()) */ + assert_true(AMequal(test_state->n1, test_state->n2)); +} + +/** + * \brief should handle histories with lots of branching and merging + */ +static void test_should_handle_histories_with_lots_of_branching_and_merging(void **state) { + /* const n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('fedcba98') + const s1 = initSyncState(), s2 = initSyncState() */ + TestState* test_state = *state; + AMfree(AMsetActorId(test_state->n1, AMpush(&test_state->stack, + AMactorIdInitStr("01234567"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + AMfree(AMsetActorId(test_state->n2, AMpush(&test_state->stack, + AMactorIdInitStr("89abcdef"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + AMdoc* n3 = AMpush(&test_state->stack, + AMcreate(), + AM_VALUE_DOC, + cmocka_cb).doc; + AMfree(AMsetActorId(n3, AMpush(&test_state->stack, + AMactorIdInitStr("fedcba98"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id)); + /* n1.put("_root", "x", 0); n1.commit("", 0) */ + AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", 0)); + AMfree(AMcommit(test_state->n1, "", &TIME_0)); + /* let change1 = n1.getLastLocalChange() + if (change1 === null) throw new RangeError("no local change") */ + AMchanges change1 = AMpush(&test_state->stack, + AMgetLastLocalChange(test_state->n1), + AM_VALUE_CHANGES, + cmocka_cb).changes; + /* n2.applyChanges([change1]) */ + AMfree(AMapplyChanges(test_state->n2, &change1)); + /* let change2 = n1.getLastLocalChange() + if (change2 === null) throw new RangeError("no local change") */ + AMchanges change2 = AMpush(&test_state->stack, + AMgetLastLocalChange(test_state->n1), + AM_VALUE_CHANGES, + cmocka_cb).changes; + /* n3.applyChanges([change2]) */ + AMfree(AMapplyChanges(n3, &change2)); + /* n3.put("_root", "x", 1); n3.commit("", 0) */ + AMfree(AMmapPutUint(n3, AM_ROOT, "x", 1)); + AMfree(AMcommit(n3, "", &TIME_0)); + /* */ + /* - n1c1 <------ n1c2 <------ n1c3 <-- etc. <-- n1c20 <------ n1c21 + * / \/ \/ \/ + * / /\ /\ /\ + * c0 <---- n2c1 <------ n2c2 <------ n2c3 <-- etc. <-- n2c20 <------ n2c21 + * \ / + * ---------------------------------------------- n3c1 <----- + */ + /* for (let i = 1; i < 20; i++) { */ + for (size_t i = 1; i != 20; ++i) { + /* n1.put("_root", "n1", i); n1.commit("", 0) */ + AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "n1", i)); + AMfree(AMcommit(test_state->n1, "", &TIME_0)); + /* n2.put("_root", "n2", i); n2.commit("", 0) */ + AMfree(AMmapPutUint(test_state->n2, AM_ROOT, "n2", i)); + AMfree(AMcommit(test_state->n2, "", &TIME_0)); + /* const change1 = n1.getLastLocalChange() + if (change1 === null) throw new RangeError("no local change") */ + AMchanges change1 = AMpush(&test_state->stack, + AMgetLastLocalChange(test_state->n1), + AM_VALUE_CHANGES, + cmocka_cb).changes; + /* const change2 = n2.getLastLocalChange() + if (change2 === null) throw new RangeError("no local change") */ + AMchanges change2 = AMpush(&test_state->stack, + AMgetLastLocalChange(test_state->n2), + AM_VALUE_CHANGES, + cmocka_cb).changes; + /* n1.applyChanges([change2]) */ + AMfree(AMapplyChanges(test_state->n1, &change2)); + /* n2.applyChanges([change1]) */ + AMfree(AMapplyChanges(test_state->n2, &change1)); + /* { */ + } + /* */ + /* sync(n1, n2, s1, s2) */ + sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); + /* */ + /* Having n3's last change concurrent to the last sync heads forces us into + * the slower code path */ + /* const change3 = n2.getLastLocalChange() + if (change3 === null) throw new RangeError("no local change") */ + AMchanges change3 = AMpush(&test_state->stack, + AMgetLastLocalChange(n3), + AM_VALUE_CHANGES, + cmocka_cb).changes; + /* n2.applyChanges([change3]) */ + AMfree(AMapplyChanges(test_state->n2, &change3)); + /* n1.put("_root", "n1", "final"); n1.commit("", 0) */ + AMfree(AMmapPutStr(test_state->n1, AM_ROOT, "n1", "final")); + AMfree(AMcommit(test_state->n1, "", &TIME_0)); + /* n2.put("_root", "n2", "final"); n2.commit("", 0) */ + AMfree(AMmapPutStr(test_state->n2, AM_ROOT, "n2", "final")); + AMfree(AMcommit(test_state->n2, "", &TIME_0)); + /* */ + /* sync(n1, n2, s1, s2) */ + sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); + /* assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) */ + AMchangeHashes heads1 = AMpush(&test_state->stack, + AMgetHeads(test_state->n1), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + AMchangeHashes heads2 = AMpush(&test_state->stack, + AMgetHeads(test_state->n2), + AM_VALUE_CHANGE_HASHES, + cmocka_cb).change_hashes; + assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); + /* assert.deepStrictEqual(n1.materialize(), n2.materialize()) */ + assert_true(AMequal(test_state->n1, test_state->n2)); +} + +int run_ported_wasm_sync_tests(void) { + const struct CMUnitTest tests[] = { + cmocka_unit_test_setup_teardown(test_should_send_a_sync_message_implying_no_local_data, setup, teardown), + cmocka_unit_test_setup_teardown(test_should_not_reply_if_we_have_no_data_as_well, setup, teardown), + cmocka_unit_test_setup_teardown(test_repos_with_equal_heads_do_not_need_a_reply_message, setup, teardown), + cmocka_unit_test_setup_teardown(test_n1_should_offer_all_changes_to_n2_when_starting_from_nothing, setup, teardown), + cmocka_unit_test_setup_teardown(test_should_sync_peers_where_one_has_commits_the_other_does_not, setup, teardown), + cmocka_unit_test_setup_teardown(test_should_work_with_prior_sync_state, setup, teardown), + cmocka_unit_test_setup_teardown(test_should_not_generate_messages_once_synced, setup, teardown), + cmocka_unit_test_setup_teardown(test_should_allow_simultaneous_messages_during_synchronization, setup, teardown), + cmocka_unit_test_setup_teardown(test_should_assume_sent_changes_were_received_until_we_hear_otherwise, setup, teardown), + cmocka_unit_test_setup_teardown(test_should_work_regardless_of_who_initiates_the_exchange, setup, teardown), + cmocka_unit_test_setup_teardown(test_should_work_without_prior_sync_state, setup, teardown), + cmocka_unit_test_setup_teardown(test_should_work_with_prior_sync_state_2, setup, teardown), + cmocka_unit_test_setup_teardown(test_should_ensure_non_empty_state_after_sync, setup, teardown), + cmocka_unit_test_setup_teardown(test_should_resync_after_one_node_crashed_with_data_loss, setup, teardown), + cmocka_unit_test_setup_teardown(test_should_resync_after_one_node_experiences_data_loss_without_disconnecting, setup, teardown), + cmocka_unit_test_setup_teardown(test_should_handle_changes_concurrrent_to_the_last_sync_heads, setup, teardown), + cmocka_unit_test_setup_teardown(test_should_handle_histories_with_lots_of_branching_and_merging, setup, teardown), + }; + + return cmocka_run_group_tests(tests, NULL, NULL); +} diff --git a/automerge-c/test/sync_tests.c b/automerge-c/test/sync_tests.c deleted file mode 100644 index b0ea1e1f..00000000 --- a/automerge-c/test/sync_tests.c +++ /dev/null @@ -1,1143 +0,0 @@ -#include -#include -#include -#include -#include - -/* third-party */ -#include - -/* local */ -#include "automerge.h" -#include "stack_utils.h" - -typedef struct { - AMresultStack* stack; - AMdoc* doc1; - AMdoc* doc2; - AMsyncState* sync_state1; - AMsyncState* sync_state2; -} TestState; - -static int setup(void** state) { - TestState* test_state = test_calloc(1, sizeof(TestState)); - test_state->doc1 = AMpush(&test_state->stack, - AMcreate(), - AM_VALUE_DOC, - cmocka_cb).doc; - test_state->doc2 = AMpush(&test_state->stack, - AMcreate(), - AM_VALUE_DOC, - cmocka_cb).doc; - test_state->sync_state1 = AMpush(&test_state->stack, - AMsyncStateInit(), - AM_VALUE_SYNC_STATE, - cmocka_cb).sync_state; - test_state->sync_state2 = AMpush(&test_state->stack, - AMsyncStateInit(), - AM_VALUE_SYNC_STATE, - cmocka_cb).sync_state; - *state = test_state; - return 0; -} - -static int teardown(void** state) { - TestState* test_state = *state; - AMfreeStack(&test_state->stack); - test_free(test_state); - return 0; -} - -static void sync(AMdoc* a, - AMdoc* b, - AMsyncState* a_sync_state, - AMsyncState* b_sync_state) { - static size_t const MAX_ITER = 10; - - AMsyncMessage const* a2b_msg = NULL; - AMsyncMessage const* b2a_msg = NULL; - size_t iter = 0; - do { - AMresult* a2b_msg_result = AMgenerateSyncMessage(a, a_sync_state); - AMresult* b2a_msg_result = AMgenerateSyncMessage(b, b_sync_state); - AMvalue value = AMresultValue(a2b_msg_result); - switch (value.tag) { - case AM_VALUE_SYNC_MESSAGE: { - a2b_msg = value.sync_message; - AMfree(AMreceiveSyncMessage(b, b_sync_state, a2b_msg)); - } - break; - case AM_VALUE_VOID: a2b_msg = NULL; break; - } - value = AMresultValue(b2a_msg_result); - switch (value.tag) { - case AM_VALUE_SYNC_MESSAGE: { - b2a_msg = value.sync_message; - AMfree(AMreceiveSyncMessage(a, a_sync_state, b2a_msg)); - } - break; - case AM_VALUE_VOID: b2a_msg = NULL; break; - } - if (++iter > MAX_ITER) { - fail_msg("Did not synchronize within %d iterations. " - "Do you have a bug causing an infinite loop?", MAX_ITER); - } - } while(a2b_msg || b2a_msg); -} - -/** - * \brief Data sync protocol with docs already in sync, an empty local doc - * should send a sync message implying no local data. - */ -static void test_converged_empty_local_doc_reply_no_local_data(void **state) { - TestState* test_state = *state; - AMsyncMessage const* const sync_message = AMpush(&test_state->stack, - AMgenerateSyncMessage( - test_state->doc1, - test_state->sync_state1), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; - AMchangeHashes heads = AMsyncMessageHeads(sync_message); - assert_int_equal(AMchangeHashesSize(&heads), 0); - AMchangeHashes needs = AMsyncMessageNeeds(sync_message); - assert_int_equal(AMchangeHashesSize(&needs), 0); - AMsyncHaves haves = AMsyncMessageHaves(sync_message); - assert_int_equal(AMsyncHavesSize(&haves), 1); - AMsyncHave const* have0 = AMsyncHavesNext(&haves, 1); - AMchangeHashes last_sync = AMsyncHaveLastSync(have0); - assert_int_equal(AMchangeHashesSize(&last_sync), 0); - AMchanges changes = AMsyncMessageChanges(sync_message); - assert_int_equal(AMchangesSize(&changes), 0); -} - -/** - * \brief Data sync protocol with docs already in sync, an empty local doc - * should not reply if we have no data as well. - */ -static void test_converged_empty_local_doc_no_reply(void **state) { - TestState* test_state = *state; - AMsyncMessage const* const sync_message1 = AMpush(&test_state->stack, - AMgenerateSyncMessage( - test_state->doc1, - test_state->sync_state1), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; - AMfree(AMreceiveSyncMessage(test_state->doc2, - test_state->sync_state2, - sync_message1)); - AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->doc2, test_state->sync_state2), - AM_VALUE_VOID, - cmocka_cb); -} - -/** - * \brief Data sync protocol with docs already in sync, documents with data and - * repos with equal heads do not need a reply message. - */ -static void test_converged_equal_heads_no_reply(void **state) { - TestState* test_state = *state; - - /* Make two nodes with the same changes. */ - time_t const time = 0; - for (size_t index = 0; index != 10; ++index) { - AMfree(AMlistPutUint(test_state->doc1, AM_ROOT, index, true, index)); - AMcommit(test_state->doc1, NULL, &time); - } - AMchanges const changes = AMpush(&test_state->stack, - AMgetChanges(test_state->doc1, NULL), - AM_VALUE_CHANGES, - cmocka_cb).changes; - AMfree(AMapplyChanges(test_state->doc2, &changes)); - assert_true(AMequal(test_state->doc1, test_state->doc2)); - - /* Generate a naive sync message. */ - AMsyncMessage const* sync_message1 = AMpush(&test_state->stack, - AMgenerateSyncMessage( - test_state->doc1, - test_state->sync_state1), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; - AMchangeHashes const last_sent_heads = AMsyncStateLastSentHeads( - test_state->sync_state1 - ); - AMchangeHashes const heads = AMpush(&test_state->stack, - AMgetHeads(test_state->doc1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - assert_int_equal(AMchangeHashesCmp(&last_sent_heads, &heads), 0); - - /* Heads are equal so this message should be void. */ - AMfree(AMreceiveSyncMessage(test_state->doc2, - test_state->sync_state2, - sync_message1)); - AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->doc2, test_state->sync_state2), - AM_VALUE_VOID, - cmocka_cb); -} - -/** - * \brief Data sync protocol with docs already in sync, documents with data and - * the first node should offer all changes to the second node when - * starting from nothing. - */ -static void test_converged_offer_all_changes_from_nothing(void **state) { - TestState* test_state = *state; - - /* Make changes for the first node that the second node should request. */ - time_t const time = 0; - for (size_t index = 0; index != 10; ++index) { - AMfree(AMlistPutUint(test_state->doc1, AM_ROOT, index, true, index)); - AMcommit(test_state->doc1, NULL, &time); - } - - assert_false(AMequal(test_state->doc1, test_state->doc2)); - sync(test_state->doc1, - test_state->doc2, - test_state->sync_state1, - test_state->sync_state2); - assert_true(AMequal(test_state->doc1, test_state->doc2)); -} - -/** - * \brief Data sync protocol with docs already in sync, documents with data and - * it should sync peers where one has commits the other does not. - */ -static void test_converged_sync_peers_with_uneven_commits(void **state) { - TestState* test_state = *state; - - /* Make changes for the first node that the second node should request. */ - time_t const time = 0; - for (size_t index = 0; index != 10; ++index) { - AMfree(AMlistPutUint(test_state->doc1, AM_ROOT, index, true, index)); - AMcommit(test_state->doc1, NULL, &time); - } - - assert_false(AMequal(test_state->doc1, test_state->doc2)); - sync(test_state->doc1, - test_state->doc2, - test_state->sync_state1, - test_state->sync_state2); - assert_true(AMequal(test_state->doc1, test_state->doc2)); -} - -/** - * \brief Data sync protocol with docs already in sync, documents with data and - * it should work with prior sync state. - */ -static void test_converged_works_with_prior_sync_state(void **state) { - /* Create & synchronize two nodes. */ - TestState* test_state = *state; - - time_t const time = 0; - for (size_t value = 0; value != 5; ++value) { - AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); - AMcommit(test_state->doc1, NULL, &time); - } - sync(test_state->doc1, - test_state->doc2, - test_state->sync_state1, - test_state->sync_state2); - - /* Modify the first node further. */ - for (size_t value = 5; value != 10; ++value) { - AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); - AMcommit(test_state->doc1, NULL, &time); - } - - assert_false(AMequal(test_state->doc1, test_state->doc2)); - sync(test_state->doc1, - test_state->doc2, - test_state->sync_state1, - test_state->sync_state2); - assert_true(AMequal(test_state->doc1, test_state->doc2)); -} - -/** - * \brief Data sync protocol with docs already in sync, documents with data and - * it should not generate messages once synced. - */ -static void test_converged_no_message_once_synced(void **state) { - /* Create & synchronize two nodes. */ - TestState* test_state = *state; - AMfree(AMsetActorId(test_state->doc1, AMpush(&test_state->stack, - AMactorIdInitStr("abc123"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); - AMfree(AMsetActorId(test_state->doc2, AMpush(&test_state->stack, - AMactorIdInitStr("def456"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); - - time_t const time = 0; - for (size_t value = 0; value != 5; ++value) { - AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); - AMcommit(test_state->doc1, NULL, &time); - AMfree(AMmapPutUint(test_state->doc2, AM_ROOT, "y", value)); - AMcommit(test_state->doc2, NULL, &time); - } - - /* The first node reports what it has. */ - AMsyncMessage const* message = AMpush(&test_state->stack, - AMgenerateSyncMessage( - test_state->doc1, - test_state->sync_state1), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; - - /* The second node receives that message and sends changes along with what - * it has. */ - AMfree(AMreceiveSyncMessage(test_state->doc2, - test_state->sync_state2, - message)); - message = AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->doc2, - test_state->sync_state2), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; - AMchanges message_changes = AMsyncMessageChanges(message); - assert_int_equal(AMchangesSize(&message_changes), 5); - - /* The first node receives the changes and replies with the changes it now - * knows that the second node needs. */ - AMfree(AMreceiveSyncMessage(test_state->doc1, - test_state->sync_state1, - message)); - message = AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->doc1, - test_state->sync_state1), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; - message_changes = AMsyncMessageChanges(message); - assert_int_equal(AMchangesSize(&message_changes), 5); - - /* The second node applies the changes and sends confirmation ending the - * exchange. */ - AMfree(AMreceiveSyncMessage(test_state->doc2, - test_state->sync_state2, - message)); - message = AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->doc2, - test_state->sync_state2), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; - - /* The first node receives the message and has nothing more to say. */ - AMfree(AMreceiveSyncMessage(test_state->doc1, - test_state->sync_state1, - message)); - AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->doc1, test_state->sync_state1), - AM_VALUE_VOID, - cmocka_cb); - - /* The second node also has nothing left to say. */ - AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->doc2, test_state->sync_state2), - AM_VALUE_VOID, - cmocka_cb); -} - -/** - * \brief Data sync protocol with docs already in sync, documents with data and - * it should allow simultaneous messages during synchronization. - */ -static void test_converged_allow_simultaneous_messages(void **state) { - /* Create & synchronize two nodes. */ - TestState* test_state = *state; - AMfree(AMsetActorId(test_state->doc1, AMpush(&test_state->stack, - AMactorIdInitStr("abc123"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); - AMfree(AMsetActorId(test_state->doc2, AMpush(&test_state->stack, - AMactorIdInitStr("def456"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); - - time_t const time = 0; - for (size_t value = 0; value != 5; ++value) { - AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); - AMcommit(test_state->doc1, NULL, &time); - AMfree(AMmapPutUint(test_state->doc2, AM_ROOT, "y", value)); - AMcommit(test_state->doc2, NULL, &time); - } - AMchangeHashes heads1 = AMpush(&test_state->stack, - AMgetHeads(test_state->doc1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - AMbyteSpan head1 = AMchangeHashesNext(&heads1, 1); - AMchangeHashes heads2 = AMpush(&test_state->stack, - AMgetHeads(test_state->doc2), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - AMbyteSpan head2 = AMchangeHashesNext(&heads2, 1); - - /* Both sides report what they have but have no shared peer state. */ - AMsyncMessage const* msg1to2 = AMpush(&test_state->stack, - AMgenerateSyncMessage( - test_state->doc1, - test_state->sync_state1), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; - AMsyncMessage const* msg2to1 = AMpush(&test_state->stack, - AMgenerateSyncMessage( - test_state->doc2, - test_state->sync_state2), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; - AMchanges msg1to2_changes = AMsyncMessageChanges(msg1to2); - assert_int_equal(AMchangesSize(&msg1to2_changes), 0); - AMsyncHaves msg1to2_haves = AMsyncMessageHaves(msg1to2); - AMsyncHave const* msg1to2_have = AMsyncHavesNext(&msg1to2_haves, 1); - AMchangeHashes msg1to2_last_sync = AMsyncHaveLastSync(msg1to2_have); - assert_int_equal(AMchangeHashesSize(&msg1to2_last_sync), 0); - AMchanges msg2to1_changes = AMsyncMessageChanges(msg2to1); - assert_int_equal(AMchangesSize(&msg2to1_changes), 0); - AMsyncHaves msg2to1_haves = AMsyncMessageHaves(msg2to1); - AMsyncHave const* msg2to1_have = AMsyncHavesNext(&msg2to1_haves, 1); - AMchangeHashes msg2to1_last_sync = AMsyncHaveLastSync(msg2to1_have); - assert_int_equal(AMchangeHashesSize(&msg2to1_last_sync), 0); - - /* Both nodes receive messages from each other and update their - * synchronization states. */ - AMfree(AMreceiveSyncMessage(test_state->doc1, - test_state->sync_state1, - msg2to1)); - AMfree(AMreceiveSyncMessage(test_state->doc2, - test_state->sync_state2, - msg1to2)); - - /* Now both reply with their local changes that the other lacks - * (standard warning that 1% of the time this will result in a "needs" - * message). */ - msg1to2 = AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->doc1, - test_state->sync_state1), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; - msg1to2_changes = AMsyncMessageChanges(msg1to2); - assert_int_equal(AMchangesSize(&msg1to2_changes), 5); - msg2to1 = AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->doc2, - test_state->sync_state2), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; - msg2to1_changes = AMsyncMessageChanges(msg2to1); - assert_int_equal(AMchangesSize(&msg2to1_changes), 5); - - /* Both should now apply the changes. */ - AMfree(AMreceiveSyncMessage(test_state->doc1, - test_state->sync_state1, - msg2to1)); - AMchangeHashes missing_deps = AMpush(&test_state->stack, - AMgetMissingDeps(test_state->doc1, - NULL), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - assert_int_equal(AMchangeHashesSize(&missing_deps), 0); - assert_int_equal(AMpush(&test_state->stack, - AMmapGet(test_state->doc1, AM_ROOT, "x", NULL), - AM_VALUE_UINT, - cmocka_cb).uint, 4); - assert_int_equal(AMpush(&test_state->stack, - AMmapGet(test_state->doc1, AM_ROOT, "y", NULL), - AM_VALUE_UINT, - cmocka_cb).uint, 4); - - AMfree(AMreceiveSyncMessage(test_state->doc2, - test_state->sync_state2, - msg1to2)); - missing_deps = AMpush(&test_state->stack, - AMgetMissingDeps(test_state->doc2, NULL), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - assert_int_equal(AMchangeHashesSize(&missing_deps), 0); - assert_int_equal(AMpush(&test_state->stack, - AMmapGet(test_state->doc2, AM_ROOT, "x", NULL), - AM_VALUE_UINT, - cmocka_cb).uint, 4); - assert_int_equal(AMpush(&test_state->stack, - AMmapGet(test_state->doc2, AM_ROOT, "y", NULL), - AM_VALUE_UINT, - cmocka_cb).uint, 4); - - /* The response acknowledges that the changes were received and sends no - * further changes. */ - msg1to2 = AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->doc1, - test_state->sync_state1), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; - msg1to2_changes = AMsyncMessageChanges(msg1to2); - assert_int_equal(AMchangesSize(&msg1to2_changes), 0); - msg2to1 = AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->doc2, - test_state->sync_state2), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; - msg2to1_changes = AMsyncMessageChanges(msg2to1); - assert_int_equal(AMchangesSize(&msg2to1_changes), 0); - - /* After receiving acknowledgements their shared heads should be equal. */ - AMfree(AMreceiveSyncMessage(test_state->doc1, - test_state->sync_state1, - msg2to1)); - AMfree(AMreceiveSyncMessage(test_state->doc2, - test_state->sync_state2, - msg1to2)); - - /* They're synchronized so no more messages are required. */ - AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->doc1, test_state->sync_state1), - AM_VALUE_VOID, - cmocka_cb); - AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->doc2, test_state->sync_state2), - AM_VALUE_VOID, - cmocka_cb); - - /* If we make one more change and start synchronizing then its "last - * sync" property should be updated. */ - AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", 5)); - AMcommit(test_state->doc1, NULL, &time); - msg1to2 = AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->doc1, - test_state->sync_state1), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; - msg1to2_haves = AMsyncMessageHaves(msg1to2); - msg1to2_have = AMsyncHavesNext(&msg1to2_haves, 1); - msg1to2_last_sync = AMsyncHaveLastSync(msg1to2_have); - AMbyteSpan msg1to2_last_sync_next = AMchangeHashesNext(&msg1to2_last_sync, 1); - assert_int_equal(msg1to2_last_sync_next.count, head1.count); - assert_memory_equal(msg1to2_last_sync_next.src, head1.src, head1.count); - msg1to2_last_sync_next = AMchangeHashesNext(&msg1to2_last_sync, 1); - assert_int_equal(msg1to2_last_sync_next.count, head2.count); - assert_memory_equal(msg1to2_last_sync_next.src, head2.src, head2.count); -} - -/** - * \brief Data sync protocol with docs already in sync, documents with data and - * it should assume sent changes were received until we hear otherwise. - */ -static void test_converged_assume_sent_changes_were_received(void **state) { - TestState* test_state = *state; - AMfree(AMsetActorId(test_state->doc1, AMpush(&test_state->stack, - AMactorIdInitStr("01234567"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); - AMfree(AMsetActorId(test_state->doc2, AMpush(&test_state->stack, - AMactorIdInitStr("89abcdef"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); - - AMobjId const* items = AMpush(&test_state->stack, - AMmapPutObject(test_state->doc1, - AM_ROOT, - "items", - AM_OBJ_TYPE_LIST), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; - time_t const time = 0; - AMcommit(test_state->doc1, NULL, &time); - sync(test_state->doc1, - test_state->doc2, - test_state->sync_state1, - test_state->sync_state2); - - AMfree(AMlistPutStr(test_state->doc1, items, 0, true, "x")); - AMcommit(test_state->doc1, NULL, &time); - AMsyncMessage const* message = AMpush(&test_state->stack, - AMgenerateSyncMessage( - test_state->doc1, - test_state->sync_state1), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; - AMchanges message_changes = AMsyncMessageChanges(message); - assert_int_equal(AMchangesSize(&message_changes), 1); - - AMfree(AMlistPutStr(test_state->doc1, items, 1, true, "y")); - AMcommit(test_state->doc1, NULL, &time); - message = AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->doc1, - test_state->sync_state1), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; - message_changes = AMsyncMessageChanges(message); - assert_int_equal(AMchangesSize(&message_changes), 1); - - AMfree(AMlistPutStr(test_state->doc1, items, 2, true, "z")); - AMcommit(test_state->doc1, NULL, &time); - message = AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->doc1, - test_state->sync_state1), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; - message_changes = AMsyncMessageChanges(message); - assert_int_equal(AMchangesSize(&message_changes), 1); -} - -/** - * \brief Data sync protocol with docs already in sync, documents with data and - * it should work regardless of who initiates the exchange. - */ -static void test_converged_works_regardless_of_who_initiates(void **state) { - /* Create & synchronize two nodes. */ - TestState* test_state = *state; - - time_t const time = 0; - for (size_t value = 0; value != 5; ++value) { - AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); - AMcommit(test_state->doc1, NULL, &time); - } - sync(test_state->doc1, - test_state->doc2, - test_state->sync_state1, - test_state->sync_state2); - - /* Modify the first node further. */ - for (size_t value = 5; value != 10; ++value) { - AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); - AMcommit(test_state->doc1, NULL, &time); - } - - assert_false(AMequal(test_state->doc1, test_state->doc2)); - sync(test_state->doc1, - test_state->doc2, - test_state->sync_state1, - test_state->sync_state2); - assert_true(AMequal(test_state->doc1, test_state->doc2)); -} - -/** - * \brief Data sync protocol with diverged documents and it should work without - * prior sync state. - */ -static void test_diverged_works_without_prior_sync_state(void **state) { - /* Scenario: - * ,-- c10 <-- c11 <-- c12 <-- c13 <-- c14 - * c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ - * `-- c15 <-- c16 <-- c17 - * lastSync is undefined. */ - - /* Create two peers both with divergent commits. */ - TestState* test_state = *state; - AMfree(AMsetActorId(test_state->doc1, AMpush(&test_state->stack, - AMactorIdInitStr("01234567"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); - AMfree(AMsetActorId(test_state->doc2, AMpush(&test_state->stack, - AMactorIdInitStr("89abcdef"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); - time_t const time = 0; - for (size_t value = 0; value != 10; ++value) { - AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); - AMcommit(test_state->doc1, NULL, &time); - } - - sync(test_state->doc1, - test_state->doc2, - test_state->sync_state1, - test_state->sync_state2); - - for (size_t value = 10; value != 15; ++value) { - AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); - AMcommit(test_state->doc1, NULL, &time); - } - for (size_t value = 15; value != 18; ++value) { - AMfree(AMmapPutUint(test_state->doc2, AM_ROOT, "x", value)); - AMcommit(test_state->doc2, NULL, &time); - } - - assert_false(AMequal(test_state->doc1, test_state->doc2)); - sync(test_state->doc1, - test_state->doc2, - test_state->sync_state1, - test_state->sync_state2); - AMchangeHashes heads1 = AMpush(&test_state->stack, - AMgetHeads(test_state->doc1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - AMchangeHashes heads2 = AMpush(&test_state->stack, - AMgetHeads(test_state->doc2), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); - assert_true(AMequal(test_state->doc1, test_state->doc2)); -} - -/** - * \brief Data sync protocol with diverged documents and it should work with - * prior sync state. - */ -static void test_diverged_works_with_prior_sync_state(void **state) { - /* Scenario: - * ,-- c10 <-- c11 <-- c12 <-- c13 <-- c14 - * c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ - * `-- c15 <-- c16 <-- c17 - * lastSync is c9. */ - - /* Create two peers both with divergent commits. */ - TestState* test_state = *state; - AMfree(AMsetActorId(test_state->doc1, AMpush(&test_state->stack, - AMactorIdInitStr("01234567"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); - AMfree(AMsetActorId(test_state->doc2, AMpush(&test_state->stack, - AMactorIdInitStr("89abcdef"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); - time_t const time = 0; - for (size_t value = 0; value != 10; ++value) { - AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); - AMcommit(test_state->doc1, NULL, &time); - } - sync(test_state->doc1, - test_state->doc2, - test_state->sync_state1, - test_state->sync_state2); - - for (size_t value = 10; value != 15; ++value) { - AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); - AMcommit(test_state->doc1, NULL, &time); - } - for (size_t value = 15; value != 18; ++value) { - AMfree(AMmapPutUint(test_state->doc2, AM_ROOT, "x", value)); - AMcommit(test_state->doc2, NULL, &time); - } - AMbyteSpan encoded = AMpush(&test_state->stack, - AMsyncStateEncode(test_state->sync_state1), - AM_VALUE_BYTES, - cmocka_cb).bytes; - AMsyncState* sync_state1 = AMpush(&test_state->stack, - AMsyncStateDecode(encoded.src, - encoded.count), - AM_VALUE_SYNC_STATE, - cmocka_cb).sync_state; - encoded = AMpush(&test_state->stack, - AMsyncStateEncode(test_state->sync_state2), - AM_VALUE_BYTES, - cmocka_cb).bytes; - AMsyncState* sync_state2 = AMpush(&test_state->stack, - AMsyncStateDecode(encoded.src, - encoded.count), - AM_VALUE_SYNC_STATE, - cmocka_cb).sync_state; - - assert_false(AMequal(test_state->doc1, test_state->doc2)); - sync(test_state->doc1, test_state->doc2, sync_state1, sync_state2); - AMchangeHashes heads1 = AMpush(&test_state->stack, - AMgetHeads(test_state->doc1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - AMchangeHashes heads2 = AMpush(&test_state->stack, - AMgetHeads(test_state->doc2), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); - assert_true(AMequal(test_state->doc1, test_state->doc2)); -} - -/** - * \brief Data sync protocol with diverged documents and it should ensure - * non-empty state after synchronization. - */ -static void test_diverged_ensure_not_empty_after_sync(void **state) { - TestState* test_state = *state; - AMfree(AMsetActorId(test_state->doc1, AMpush(&test_state->stack, - AMactorIdInitStr("01234567"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); - AMfree(AMsetActorId(test_state->doc2, AMpush(&test_state->stack, - AMactorIdInitStr("89abcdef"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); - - time_t const time = 0; - for (size_t value = 0; value != 3; ++value) { - AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); - AMcommit(test_state->doc1, NULL, &time); - } - sync(test_state->doc1, - test_state->doc2, - test_state->sync_state1, - test_state->sync_state2); - - AMchangeHashes heads1 = AMpush(&test_state->stack, - AMgetHeads(test_state->doc1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - AMchangeHashes shared_heads1 = AMsyncStateSharedHeads(test_state->sync_state1); - assert_int_equal(AMchangeHashesCmp(&shared_heads1, &heads1), 0); - AMchangeHashes shared_heads2 = AMsyncStateSharedHeads(test_state->sync_state2); - assert_int_equal(AMchangeHashesCmp(&shared_heads2, &heads1), 0); -} - -/** - * \brief Data sync protocol with diverged documents and it should - * re-synchronize after one node crashed with data loss. - */ -static void test_diverged_resync_after_node_crash_with_data_loss(void **state) { - /* Scenario: - * (r) (n2) (n1) - * c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 - * n2 has changes {c0, c1, c2}, n1's lastSync is c5, and n2's lastSync - * is c2. - * We want to successfully sync (n1) with (r), even though (n1) believes - * it's talking to (n2). */ - TestState* test_state = *state; - AMfree(AMsetActorId(test_state->doc1, AMpush(&test_state->stack, - AMactorIdInitStr("01234567"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); - AMfree(AMsetActorId(test_state->doc2, AMpush(&test_state->stack, - AMactorIdInitStr("89abcdef"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); - - /* n1 makes three changes which we synchronize to n2. */ - time_t const time = 0; - for (size_t value = 0; value != 3; ++value) { - AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); - AMcommit(test_state->doc1, NULL, &time); - } - sync(test_state->doc1, - test_state->doc2, - test_state->sync_state1, - test_state->sync_state2); - - /* Save a copy of n2 as "r" to simulate recovering from a crash. */ - AMdoc* r = AMpush(&test_state->stack, - AMclone(test_state->doc2), - AM_VALUE_DOC, - cmocka_cb).doc; - AMbyteSpan encoded = AMpush(&test_state->stack, - AMsyncStateEncode(test_state->sync_state2), - AM_VALUE_BYTES, - cmocka_cb).bytes; - AMsyncState* sync_stater = AMpush(&test_state->stack, - AMsyncStateDecode(encoded.src, - encoded.count), - AM_VALUE_SYNC_STATE, - cmocka_cb).sync_state; - /* Synchronize another few commits. */ - for (size_t value = 3; value != 6; ++value) { - AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); - AMcommit(test_state->doc1, NULL, &time); - } - sync(test_state->doc1, - test_state->doc2, - test_state->sync_state1, - test_state->sync_state2); - /* Everyone should be on the same page here. */ - AMchangeHashes heads1 = AMpush(&test_state->stack, - AMgetHeads(test_state->doc1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - AMchangeHashes heads2 = AMpush(&test_state->stack, - AMgetHeads(test_state->doc2), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); - assert_true(AMequal(test_state->doc1, test_state->doc2)); - - /* Now make a few more changes and then attempt to synchronize the - * fully-up-to-date n1 with with the confused r. */ - for (size_t value = 6; value != 9; ++value) { - AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); - AMcommit(test_state->doc1, NULL, &time); - } - heads1 = AMpush(&test_state->stack, - AMgetHeads(test_state->doc1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - AMchangeHashes headsr = AMpush(&test_state->stack, - AMgetHeads(r), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - assert_int_not_equal(AMchangeHashesCmp(&heads1, &headsr), 0); - assert_false(AMequal(test_state->doc1, r)); - assert_int_equal(AMpush(&test_state->stack, - AMmapGet(test_state->doc1, AM_ROOT, "x", NULL), - AM_VALUE_UINT, - cmocka_cb).uint, 8); - assert_int_equal(AMpush(&test_state->stack, - AMmapGet(r, AM_ROOT, "x", NULL), - AM_VALUE_UINT, - cmocka_cb).uint, 2); - sync(test_state->doc1, - r, - test_state->sync_state1, - sync_stater); - heads1 = AMpush(&test_state->stack, - AMgetHeads(test_state->doc1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - headsr = AMpush(&test_state->stack, - AMgetHeads(r), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - assert_int_equal(AMchangeHashesCmp(&heads1, &headsr), 0); - assert_true(AMequal(test_state->doc1, r)); -} - -/** - * \brief Data sync protocol with diverged documents and it should resync after - * one node experiences data loss without disconnecting. - */ -static void test_diverged_resync_after_data_loss_without_disconnection(void **state) { - TestState* test_state = *state; - AMfree(AMsetActorId(test_state->doc1, AMpush(&test_state->stack, - AMactorIdInitStr("01234567"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); - AMfree(AMsetActorId(test_state->doc2, AMpush(&test_state->stack, - AMactorIdInitStr("89abcdef"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); - - /* n1 makes three changes which we synchronize to n2. */ - time_t const time = 0; - for (size_t value = 0; value != 3; ++value) { - AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", value)); - AMcommit(test_state->doc1, NULL, &time); - } - sync(test_state->doc1, - test_state->doc2, - test_state->sync_state1, - test_state->sync_state2); - - AMchangeHashes heads1 = AMpush(&test_state->stack, - AMgetHeads(test_state->doc1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - AMchangeHashes heads2 = AMpush(&test_state->stack, - AMgetHeads(test_state->doc2), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); - assert_true(AMequal(test_state->doc1, test_state->doc2)); - - AMdoc* doc2_after_data_loss = AMpush(&test_state->stack, - AMcreate(), - AM_VALUE_DOC, - cmocka_cb).doc; - AMfree(AMsetActorId(doc2_after_data_loss, AMpush(&test_state->stack, - AMactorIdInitStr("89abcdef"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); - - /* "n2" now has no data, but n1 still thinks it does. Note we don't do - * decodeSyncState(encodeSyncState(s1)) in order to simulate data loss - * without disconnecting. */ - AMsyncState* sync_state2_after_data_loss = AMpush(&test_state->stack, - AMsyncStateInit(), - AM_VALUE_SYNC_STATE, - cmocka_cb).sync_state; - sync(test_state->doc1, - doc2_after_data_loss, - test_state->sync_state1, - sync_state2_after_data_loss); - heads1 = AMpush(&test_state->stack, - AMgetHeads(test_state->doc1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - heads2 = AMpush(&test_state->stack, - AMgetHeads(doc2_after_data_loss), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); - assert_true(AMequal(test_state->doc1, doc2_after_data_loss)); -} - -/** - * \brief Data sync protocol with diverged documents and it should handle - * changes concurrent to the last sync heads. - */ -static void test_diverged_handles_concurrent_changes(void **state) { - TestState* test_state = *state; - AMfree(AMsetActorId(test_state->doc1, AMpush(&test_state->stack, - AMactorIdInitStr("01234567"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); - AMfree(AMsetActorId(test_state->doc2, AMpush(&test_state->stack, - AMactorIdInitStr("89abcdef"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); - AMdoc* doc3 = AMpush(&test_state->stack, - AMcreate(), - AM_VALUE_DOC, - cmocka_cb).doc; - AMfree(AMsetActorId(doc3, AMpush(&test_state->stack, - AMactorIdInitStr("fedcba98"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); - AMsyncState* sync_state12 = test_state->sync_state1; - AMsyncState* sync_state21 = test_state->sync_state2; - AMsyncState* sync_state23 = AMpush(&test_state->stack, - AMsyncStateInit(), - AM_VALUE_SYNC_STATE, - cmocka_cb).sync_state; - AMsyncState* sync_state32 = AMpush(&test_state->stack, - AMsyncStateInit(), - AM_VALUE_SYNC_STATE, - cmocka_cb).sync_state; - - /* Change 1 is known to all three nodes. */ - time_t const time = 0; - AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", 1)); - AMcommit(test_state->doc1, NULL, &time); - sync(test_state->doc1, test_state->doc2, sync_state12, sync_state21); - sync(test_state->doc2, doc3, sync_state23, sync_state32); - - /* Change 2 is known to n1 and n2. */ - AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", 2)); - AMcommit(test_state->doc1, NULL, &time); - sync(test_state->doc1, test_state->doc2, sync_state12, sync_state21); - - /* Each of the three nodes makes one change (changes 3, 4, 5). */ - AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", 3)); - AMcommit(test_state->doc1, NULL, &time); - AMfree(AMmapPutUint(test_state->doc2, AM_ROOT, "x", 4)); - AMcommit(test_state->doc2, NULL, &time); - AMfree(AMmapPutUint(doc3, AM_ROOT, "x", 5)); - AMcommit(doc3, NULL, &time); - - /* Apply n3's latest change to n2. */ - AMchanges changes = AMpush(&test_state->stack, - AMgetLastLocalChange(doc3), - AM_VALUE_CHANGES, - cmocka_cb).changes; - AMfree(AMapplyChanges(test_state->doc2, &changes)); - - /* Now sync n1 and n2. n3's change is concurrent to n1 and n2's last sync - * heads. */ - sync(test_state->doc1, test_state->doc2, sync_state12, sync_state21); - AMchangeHashes heads1 = AMpush(&test_state->stack, - AMgetHeads(test_state->doc1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - AMchangeHashes heads2 = AMpush(&test_state->stack, - AMgetHeads(test_state->doc2), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); - assert_true(AMequal(test_state->doc1, test_state->doc2)); -} - -/** - * \brief Data sync protocol with diverged documents and it should handle - * histories with lots of branching and merging. - */ -static void test_diverged_handles_histories_of_branching_and_merging(void **state) { - TestState* test_state = *state; - AMfree(AMsetActorId(test_state->doc1, AMpush(&test_state->stack, - AMactorIdInitStr("01234567"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); - AMfree(AMsetActorId(test_state->doc2, AMpush(&test_state->stack, - AMactorIdInitStr("89abcdef"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); - AMdoc* doc3 = AMpush(&test_state->stack, - AMcreate(), - AM_VALUE_DOC, - cmocka_cb).doc; - AMfree(AMsetActorId(doc3, AMpush(&test_state->stack, - AMactorIdInitStr("fedcba98"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); - time_t const time = 0; - AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "x", 0)); - AMcommit(test_state->doc1, NULL, &time); - AMchanges changes = AMpush(&test_state->stack, - AMgetLastLocalChange(test_state->doc1), - AM_VALUE_CHANGES, - cmocka_cb).changes; - AMfree(AMapplyChanges(test_state->doc2, &changes)); - AMfree(AMapplyChanges(doc3, &changes)); - AMfree(AMmapPutUint(doc3, AM_ROOT, "x", 1)); - AMcommit(doc3, NULL, &time); - - /* - n1c1 <------ n1c2 <------ n1c3 <-- etc. <-- n1c20 <------ n1c21 - * / \/ \/ \/ - * / /\ /\ /\ - * c0 <---- n2c1 <------ n2c2 <------ n2c3 <-- etc. <-- n2c20 <------ n2c21 - * \ / - * ---------------------------------------------- n3c1 <----- - */ - for (size_t value = 1; value != 20; ++value) { - AMfree(AMmapPutUint(test_state->doc1, AM_ROOT, "n1", value)); - AMcommit(test_state->doc1, NULL, &time); - AMfree(AMmapPutUint(test_state->doc2, AM_ROOT, "n2", value)); - AMcommit(test_state->doc2, NULL, &time); - AMchanges changes1 = AMpush(&test_state->stack, - AMgetLastLocalChange(test_state->doc1), - AM_VALUE_CHANGES, - cmocka_cb).changes; - AMchanges changes2 = AMpush(&test_state->stack, - AMgetLastLocalChange(test_state->doc2), - AM_VALUE_CHANGES, - cmocka_cb).changes; - AMfree(AMapplyChanges(test_state->doc1, &changes2)); - AMfree(AMapplyChanges(test_state->doc2, &changes1)); - } - - sync(test_state->doc1, - test_state->doc2, - test_state->sync_state1, - test_state->sync_state2); - - /* Having n3's last change concurrent to the last sync heads forces us into - * the slower code path. */ - AMchanges changes3 = AMpush(&test_state->stack, - AMgetLastLocalChange(doc3), - AM_VALUE_CHANGES, - cmocka_cb).changes; - AMfree(AMapplyChanges(test_state->doc2, &changes3)); - AMfree(AMmapPutStr(test_state->doc1, AM_ROOT, "n1", "final")); - AMcommit(test_state->doc1, NULL, &time); - AMfree(AMmapPutStr(test_state->doc2, AM_ROOT, "n2", "final")); - AMcommit(test_state->doc2, NULL, &time); - - sync(test_state->doc1, - test_state->doc2, - test_state->sync_state1, - test_state->sync_state2); - AMchangeHashes heads1 = AMpush(&test_state->stack, - AMgetHeads(test_state->doc1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - AMchangeHashes heads2 = AMpush(&test_state->stack, - AMgetHeads(test_state->doc2), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); - assert_true(AMequal(test_state->doc1, test_state->doc2)); -} - -int run_sync_tests(void) { - const struct CMUnitTest tests[] = { - cmocka_unit_test_setup_teardown(test_converged_empty_local_doc_reply_no_local_data, setup, teardown), - cmocka_unit_test_setup_teardown(test_converged_empty_local_doc_no_reply, setup, teardown), - cmocka_unit_test_setup_teardown(test_converged_equal_heads_no_reply, setup, teardown), - cmocka_unit_test_setup_teardown(test_converged_offer_all_changes_from_nothing, setup, teardown), - cmocka_unit_test_setup_teardown(test_converged_sync_peers_with_uneven_commits, setup, teardown), - cmocka_unit_test_setup_teardown(test_converged_works_with_prior_sync_state, setup, teardown), - cmocka_unit_test_setup_teardown(test_converged_no_message_once_synced, setup, teardown), - cmocka_unit_test_setup_teardown(test_converged_allow_simultaneous_messages, setup, teardown), - cmocka_unit_test_setup_teardown(test_converged_assume_sent_changes_were_received, setup, teardown), - cmocka_unit_test_setup_teardown(test_converged_works_regardless_of_who_initiates, setup, teardown), - cmocka_unit_test_setup_teardown(test_diverged_works_without_prior_sync_state, setup, teardown), - cmocka_unit_test_setup_teardown(test_diverged_works_with_prior_sync_state, setup, teardown), - cmocka_unit_test_setup_teardown(test_diverged_ensure_not_empty_after_sync, setup, teardown), - cmocka_unit_test_setup_teardown(test_diverged_resync_after_node_crash_with_data_loss, setup, teardown), - cmocka_unit_test_setup_teardown(test_diverged_resync_after_data_loss_without_disconnection, setup, teardown), - cmocka_unit_test_setup_teardown(test_diverged_handles_concurrent_changes, setup, teardown), - cmocka_unit_test_setup_teardown(test_diverged_handles_histories_of_branching_and_merging, setup, teardown), - }; - - return cmocka_run_group_tests(tests, NULL, NULL); -} diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts index 1a29b962..07a4e2ec 100644 --- a/automerge-wasm/test/test.ts +++ b/automerge-wasm/test/test.ts @@ -45,16 +45,16 @@ describe('Automerge', () => { doc.free() }) - it('getting a nonexistant prop does not throw an error', () => { + it('getting a nonexistent prop does not throw an error', () => { const doc = create() const root = "_root" - const result = doc.getWithType(root,"hello") - assert.deepEqual(result,undefined) + const result = doc.getWithType(root, "hello") + assert.deepEqual(result, undefined) doc.free() }) it('should be able to set and get a simple value', () => { - const doc : Automerge = create("aabbcc") + const doc: Automerge = create("aabbcc") const root = "_root" let result @@ -70,74 +70,74 @@ describe('Automerge', () => { doc.putObject(root, "list", []); doc.put(root, "null", null) - result = doc.getWithType(root,"hello") - assert.deepEqual(result,["str","world"]) - assert.deepEqual(doc.get("/","hello"),"world") + result = doc.getWithType(root, "hello") + assert.deepEqual(result, ["str", "world"]) + assert.deepEqual(doc.get("/", "hello"), "world") - result = doc.getWithType(root,"number1") - assert.deepEqual(result,["uint",5]) - assert.deepEqual(doc.get("/","number1"),5) + result = doc.getWithType(root, "number1") + assert.deepEqual(result, ["uint", 5]) + assert.deepEqual(doc.get("/", "number1"), 5) - result = doc.getWithType(root,"number2") - assert.deepEqual(result,["int",5]) + result = doc.getWithType(root, "number2") + assert.deepEqual(result, ["int", 5]) - result = doc.getWithType(root,"number3") - assert.deepEqual(result,["f64",5.5]) + result = doc.getWithType(root, "number3") + assert.deepEqual(result, ["f64", 5.5]) - result = doc.getWithType(root,"number4") - assert.deepEqual(result,["f64",5.5]) + result = doc.getWithType(root, "number4") + assert.deepEqual(result, ["f64", 5.5]) - result = doc.getWithType(root,"number5") - assert.deepEqual(result,["int",5]) + result = doc.getWithType(root, "number5") + assert.deepEqual(result, ["int", 5]) - result = doc.getWithType(root,"bool") - assert.deepEqual(result,["boolean",true]) + result = doc.getWithType(root, "bool") + assert.deepEqual(result, ["boolean", true]) doc.put(root, "bool", false, "boolean") - result = doc.getWithType(root,"bool") - assert.deepEqual(result,["boolean",false]) + result = doc.getWithType(root, "bool") + assert.deepEqual(result, ["boolean", false]) - result = doc.getWithType(root,"time1") - assert.deepEqual(result,["timestamp",new Date(1000)]) + result = doc.getWithType(root, "time1") + assert.deepEqual(result, ["timestamp", new Date(1000)]) - result = doc.getWithType(root,"time2") - assert.deepEqual(result,["timestamp",new Date(1001)]) + result = doc.getWithType(root, "time2") + assert.deepEqual(result, ["timestamp", new Date(1001)]) - result = doc.getWithType(root,"list") - assert.deepEqual(result,["list","10@aabbcc"]); + result = doc.getWithType(root, "list") + assert.deepEqual(result, ["list", "10@aabbcc"]); - result = doc.getWithType(root,"null") - assert.deepEqual(result,["null",null]); + result = doc.getWithType(root, "null") + assert.deepEqual(result, ["null", null]); doc.free() }) it('should be able to use bytes', () => { const doc = create() - doc.put("_root","data1", new Uint8Array([10,11,12])); - doc.put("_root","data2", new Uint8Array([13,14,15]), "bytes"); + doc.put("_root", "data1", new Uint8Array([10, 11, 12])); + doc.put("_root", "data2", new Uint8Array([13, 14, 15]), "bytes"); const value1 = doc.getWithType("_root", "data1") - assert.deepEqual(value1, ["bytes", new Uint8Array([10,11,12])]); + assert.deepEqual(value1, ["bytes", new Uint8Array([10, 11, 12])]); const value2 = doc.getWithType("_root", "data2") - assert.deepEqual(value2, ["bytes", new Uint8Array([13,14,15])]); + assert.deepEqual(value2, ["bytes", new Uint8Array([13, 14, 15])]); doc.free() }) - it('should be able to make sub objects', () => { + it('should be able to make subobjects', () => { const doc = create() const root = "_root" let result const submap = doc.putObject(root, "submap", {}) doc.put(submap, "number", 6, "uint") - assert.strictEqual(doc.pendingOps(),2) + assert.strictEqual(doc.pendingOps(), 2) - result = doc.getWithType(root,"submap") - assert.deepEqual(result,["map",submap]) + result = doc.getWithType(root, "submap") + assert.deepEqual(result, ["map", submap]) - result = doc.getWithType(submap,"number") - assert.deepEqual(result,["uint",6]) + result = doc.getWithType(submap, "number") + assert.deepEqual(result, ["uint", 6]) doc.free() }) @@ -145,22 +145,22 @@ describe('Automerge', () => { const doc = create() const root = "_root" - const submap = doc.putObject(root, "numbers", []) - doc.insert(submap, 0, "a"); - doc.insert(submap, 1, "b"); - doc.insert(submap, 2, "c"); - doc.insert(submap, 0, "z"); + const sublist = doc.putObject(root, "numbers", []) + doc.insert(sublist, 0, "a"); + doc.insert(sublist, 1, "b"); + doc.insert(sublist, 2, "c"); + doc.insert(sublist, 0, "z"); - assert.deepEqual(doc.getWithType(submap, 0),["str","z"]) - assert.deepEqual(doc.getWithType(submap, 1),["str","a"]) - assert.deepEqual(doc.getWithType(submap, 2),["str","b"]) - assert.deepEqual(doc.getWithType(submap, 3),["str","c"]) - assert.deepEqual(doc.length(submap),4) + assert.deepEqual(doc.getWithType(sublist, 0), ["str", "z"]) + assert.deepEqual(doc.getWithType(sublist, 1), ["str", "a"]) + assert.deepEqual(doc.getWithType(sublist, 2), ["str", "b"]) + assert.deepEqual(doc.getWithType(sublist, 3), ["str", "c"]) + assert.deepEqual(doc.length(sublist), 4) - doc.put(submap, 2, "b v2"); + doc.put(sublist, 2, "b v2"); - assert.deepEqual(doc.getWithType(submap, 2),["str","b v2"]) - assert.deepEqual(doc.length(submap),4) + assert.deepEqual(doc.getWithType(sublist, 2), ["str", "b v2"]) + assert.deepEqual(doc.length(sublist), 4) doc.free() }) @@ -168,42 +168,42 @@ describe('Automerge', () => { const doc = create() const root = "_root" - const submap = doc.putObject(root, "letters", []) - doc.insert(submap, 0, "a"); - doc.insert(submap, 0, "b"); - assert.deepEqual(doc.materialize(), { letters: ["b", "a" ] }) - doc.push(submap, "c"); + const sublist = doc.putObject(root, "letters", []) + doc.insert(sublist, 0, "a"); + doc.insert(sublist, 0, "b"); + assert.deepEqual(doc.materialize(), { letters: ["b", "a"] }) + doc.push(sublist, "c"); const heads = doc.getHeads() - assert.deepEqual(doc.materialize(), { letters: ["b", "a", "c" ] }) - doc.push(submap, 3, "timestamp"); - assert.deepEqual(doc.materialize(), { letters: ["b", "a", "c", new Date(3) ] }) - doc.splice(submap, 1, 1, ["d","e","f"]); - assert.deepEqual(doc.materialize(), { letters: ["b", "d", "e", "f", "c", new Date(3) ] }) - doc.put(submap, 0, "z"); - assert.deepEqual(doc.materialize(), { letters: ["z", "d", "e", "f", "c", new Date(3) ] }) - assert.deepEqual(doc.materialize(submap), ["z", "d", "e", "f", "c", new Date(3) ]) - assert.deepEqual(doc.length(submap),6) - assert.deepEqual(doc.materialize("/", heads), { letters: ["b", "a", "c" ] }) + assert.deepEqual(doc.materialize(), { letters: ["b", "a", "c"] }) + doc.push(sublist, 3, "timestamp"); + assert.deepEqual(doc.materialize(), { letters: ["b", "a", "c", new Date(3)] }) + doc.splice(sublist, 1, 1, ["d", "e", "f"]); + assert.deepEqual(doc.materialize(), { letters: ["b", "d", "e", "f", "c", new Date(3)] }) + doc.put(sublist, 0, "z"); + assert.deepEqual(doc.materialize(), { letters: ["z", "d", "e", "f", "c", new Date(3)] }) + assert.deepEqual(doc.materialize(sublist), ["z", "d", "e", "f", "c", new Date(3)]) + assert.deepEqual(doc.length(sublist), 6) + assert.deepEqual(doc.materialize("/", heads), { letters: ["b", "a", "c"] }) doc.free() }) - it('should be able delete non-existant props', () => { + it('should be able delete non-existent props', () => { const doc = create() - doc.put("_root", "foo","bar") - doc.put("_root", "bip","bap") + doc.put("_root", "foo", "bar") + doc.put("_root", "bip", "bap") const hash1 = doc.commit() - assert.deepEqual(doc.keys("_root"),["bip","foo"]) + assert.deepEqual(doc.keys("_root"), ["bip", "foo"]) doc.delete("_root", "foo") doc.delete("_root", "baz") const hash2 = doc.commit() - assert.deepEqual(doc.keys("_root"),["bip"]) - assert.deepEqual(doc.keys("_root", [hash1]),["bip", "foo"]) - assert.deepEqual(doc.keys("_root", [hash2]),["bip"]) + assert.deepEqual(doc.keys("_root"), ["bip"]) + assert.deepEqual(doc.keys("_root", [hash1]), ["bip", "foo"]) + assert.deepEqual(doc.keys("_root", [hash2]), ["bip"]) doc.free() }) @@ -212,9 +212,9 @@ describe('Automerge', () => { const root = "_root" doc.put(root, "xxx", "xxx"); - assert.deepEqual(doc.getWithType(root, "xxx"),["str","xxx"]) + assert.deepEqual(doc.getWithType(root, "xxx"), ["str", "xxx"]) doc.delete(root, "xxx"); - assert.deepEqual(doc.getWithType(root, "xxx"),undefined) + assert.deepEqual(doc.getWithType(root, "xxx"), undefined) doc.free() }) @@ -223,11 +223,11 @@ describe('Automerge', () => { const root = "_root" doc.put(root, "counter", 10, "counter"); - assert.deepEqual(doc.getWithType(root, "counter"),["counter",10]) + assert.deepEqual(doc.getWithType(root, "counter"), ["counter", 10]) doc.increment(root, "counter", 10); - assert.deepEqual(doc.getWithType(root, "counter"),["counter",20]) + assert.deepEqual(doc.getWithType(root, "counter"), ["counter", 20]) doc.increment(root, "counter", -5); - assert.deepEqual(doc.getWithType(root, "counter"),["counter",15]) + assert.deepEqual(doc.getWithType(root, "counter"), ["counter", 15]) doc.free() }) @@ -237,14 +237,14 @@ describe('Automerge', () => { const text = doc.putObject(root, "text", ""); doc.splice(text, 0, 0, "hello ") - doc.splice(text, 6, 0, ["w","o","r","l","d"]) - doc.splice(text, 11, 0, ["!","?"]) - assert.deepEqual(doc.getWithType(text, 0),["str","h"]) - assert.deepEqual(doc.getWithType(text, 1),["str","e"]) - assert.deepEqual(doc.getWithType(text, 9),["str","l"]) - assert.deepEqual(doc.getWithType(text, 10),["str","d"]) - assert.deepEqual(doc.getWithType(text, 11),["str","!"]) - assert.deepEqual(doc.getWithType(text, 12),["str","?"]) + doc.splice(text, 6, 0, ["w", "o", "r", "l", "d"]) + doc.splice(text, 11, 0, ["!", "?"]) + assert.deepEqual(doc.getWithType(text, 0), ["str", "h"]) + assert.deepEqual(doc.getWithType(text, 1), ["str", "e"]) + assert.deepEqual(doc.getWithType(text, 9), ["str", "l"]) + assert.deepEqual(doc.getWithType(text, 10), ["str", "d"]) + assert.deepEqual(doc.getWithType(text, 11), ["str", "!"]) + assert.deepEqual(doc.getWithType(text, 12), ["str", "?"]) doc.free() }) @@ -275,7 +275,7 @@ describe('Automerge', () => { const save3 = doc.saveIncremental(); const saveA = doc.save(); - const saveB = new Uint8Array([... save1, ...save2, ...save3]); + const saveB = new Uint8Array([...save1, ...save2, ...save3]); assert.notDeepEqual(saveA, saveB); @@ -302,10 +302,10 @@ describe('Automerge', () => { const hash2 = doc.commit(); assert.strictEqual(doc.text(text), "hello big bad world") assert.strictEqual(doc.length(text), 19) - assert.strictEqual(doc.text(text, [ hash1 ]), "hello world") - assert.strictEqual(doc.length(text, [ hash1 ]), 11) - assert.strictEqual(doc.text(text, [ hash2 ]), "hello big bad world") - assert.strictEqual(doc.length(text, [ hash2 ]), 19) + assert.strictEqual(doc.text(text, [hash1]), "hello world") + assert.strictEqual(doc.length(text, [hash1]), 11) + assert.strictEqual(doc.text(text, [hash2]), "hello big bad world") + assert.strictEqual(doc.length(text, [hash2]), 19) doc.free() }) @@ -321,16 +321,16 @@ describe('Automerge', () => { doc1.applyChanges(doc2.getChanges(heads)) doc1.applyChanges(doc3.getChanges(heads)) let result = doc1.getAll("_root", "cnt") - assert.deepEqual(result,[ - ['int',20,'2@aaaa'], - ['counter',0,'2@bbbb'], - ['counter',10,'2@cccc'], + assert.deepEqual(result, [ + ['int', 20, '2@aaaa'], + ['counter', 0, '2@bbbb'], + ['counter', 10, '2@cccc'], ]) doc1.increment("_root", "cnt", 5) result = doc1.getAll("_root", "cnt") assert.deepEqual(result, [ - [ 'counter', 5, '2@bbbb' ], - [ 'counter', 15, '2@cccc' ], + ['counter', 5, '2@bbbb'], + ['counter', 15, '2@cccc'], ]) const save1 = doc1.save() @@ -355,16 +355,16 @@ describe('Automerge', () => { doc1.applyChanges(doc2.getChanges(heads)) doc1.applyChanges(doc3.getChanges(heads)) let result = doc1.getAll(seq, 0) - assert.deepEqual(result,[ - ['int',20,'3@aaaa'], - ['counter',0,'3@bbbb'], - ['counter',10,'3@cccc'], + assert.deepEqual(result, [ + ['int', 20, '3@aaaa'], + ['counter', 0, '3@bbbb'], + ['counter', 10, '3@cccc'], ]) doc1.increment(seq, 0, 5) result = doc1.getAll(seq, 0) assert.deepEqual(result, [ - [ 'counter', 5, '3@bbbb' ], - [ 'counter', 15, '3@cccc' ], + ['counter', 5, '3@bbbb'], + ['counter', 15, '3@cccc'], ]) const save = doc1.save() @@ -378,17 +378,17 @@ describe('Automerge', () => { it('paths can be used instead of objids', () => { const doc = create("aaaa") - doc.putObject("_root","list",[{ foo: "bar"}, [1,2,3]]) - assert.deepEqual(doc.materialize("/"), { list: [{ foo: "bar"}, [1,2,3]] }) - assert.deepEqual(doc.materialize("/list"), [{ foo: "bar"}, [1,2,3]]) - assert.deepEqual(doc.materialize("/list/0"), { foo: "bar"}) + doc.putObject("_root", "list", [{ foo: "bar" }, [1, 2, 3]]) + assert.deepEqual(doc.materialize("/"), { list: [{ foo: "bar" }, [1, 2, 3]] }) + assert.deepEqual(doc.materialize("/list"), [{ foo: "bar" }, [1, 2, 3]]) + assert.deepEqual(doc.materialize("/list/0"), { foo: "bar" }) }) it('should be able to fetch changes by hash', () => { const doc1 = create("aaaa") const doc2 = create("bbbb") - doc1.put("/","a","b") - doc2.put("/","b","c") + doc1.put("/", "a", "b") + doc2.put("/", "b", "c") const head1 = doc1.getHeads() const head2 = doc2.getHeads() const change1 = doc1.getChangeByHash(head1[0]) @@ -400,78 +400,78 @@ describe('Automerge', () => { it('recursive sets are possible', () => { const doc = create("aaaa") - const l1 = doc.putObject("_root","list",[{ foo: "bar"}, [1,2,3]]) + const l1 = doc.putObject("_root", "list", [{ foo: "bar" }, [1, 2, 3]]) const l2 = doc.insertObject(l1, 0, { zip: ["a", "b"] }) - const l3 = doc.putObject("_root","info1","hello world") // 'text' object - doc.put("_root","info2","hello world") // 'str' - const l4 = doc.putObject("_root","info3","hello world") + const l3 = doc.putObject("_root", "info1", "hello world") // 'text' object + doc.put("_root", "info2", "hello world") // 'str' + const l4 = doc.putObject("_root", "info3", "hello world") assert.deepEqual(doc.materialize(), { - "list": [ { zip: ["a", "b"] }, { foo: "bar"}, [ 1,2,3]], + "list": [{ zip: ["a", "b"] }, { foo: "bar" }, [1, 2, 3]], "info1": "hello world", "info2": "hello world", "info3": "hello world", }) - assert.deepEqual(doc.materialize(l2), { zip: ["a","b"] }) - assert.deepEqual(doc.materialize(l1), [ { zip: ["a","b"] }, { foo: "bar" }, [ 1,2,3] ]) + assert.deepEqual(doc.materialize(l2), { zip: ["a", "b"] }) + assert.deepEqual(doc.materialize(l1), [{ zip: ["a", "b"] }, { foo: "bar" }, [1, 2, 3]]) assert.deepEqual(doc.materialize(l4), "hello world") doc.free() }) it('only returns an object id when objects are created', () => { const doc = create("aaaa") - const r1 = doc.put("_root","foo","bar") - const r2 = doc.putObject("_root","list",[]) - const r3 = doc.put("_root","counter",10, "counter") - const r4 = doc.increment("_root","counter",1) - const r5 = doc.delete("_root","counter") - const r6 = doc.insert(r2,0,10); - const r7 = doc.insertObject(r2,0,{}); - const r8 = doc.splice(r2,1,0,["a","b","c"]); + const r1 = doc.put("_root", "foo", "bar") + const r2 = doc.putObject("_root", "list", []) + const r3 = doc.put("_root", "counter", 10, "counter") + const r4 = doc.increment("_root", "counter", 1) + const r5 = doc.delete("_root", "counter") + const r6 = doc.insert(r2, 0, 10); + const r7 = doc.insertObject(r2, 0, {}); + const r8 = doc.splice(r2, 1, 0, ["a", "b", "c"]); //let r9 = doc.splice(r2,1,0,["a",[],{},"d"]); - assert.deepEqual(r1,null); - assert.deepEqual(r2,"2@aaaa"); - assert.deepEqual(r3,null); - assert.deepEqual(r4,null); - assert.deepEqual(r5,null); - assert.deepEqual(r6,null); - assert.deepEqual(r7,"7@aaaa"); - assert.deepEqual(r8,null); + assert.deepEqual(r1, null); + assert.deepEqual(r2, "2@aaaa"); + assert.deepEqual(r3, null); + assert.deepEqual(r4, null); + assert.deepEqual(r5, null); + assert.deepEqual(r6, null); + assert.deepEqual(r7, "7@aaaa"); + assert.deepEqual(r8, null); //assert.deepEqual(r9,["12@aaaa","13@aaaa"]); doc.free() }) it('objects without properties are preserved', () => { const doc1 = create("aaaa") - const a = doc1.putObject("_root","a",{}); - const b = doc1.putObject("_root","b",{}); - const c = doc1.putObject("_root","c",{}); - const d = doc1.put(c,"d","dd"); + const a = doc1.putObject("_root", "a", {}); + const b = doc1.putObject("_root", "b", {}); + const c = doc1.putObject("_root", "c", {}); + const d = doc1.put(c, "d", "dd"); const saved = doc1.save(); const doc2 = load(saved); - assert.deepEqual(doc2.getWithType("_root","a"),["map",a]) - assert.deepEqual(doc2.keys(a),[]) - assert.deepEqual(doc2.getWithType("_root","b"),["map",b]) - assert.deepEqual(doc2.keys(b),[]) - assert.deepEqual(doc2.getWithType("_root","c"),["map",c]) - assert.deepEqual(doc2.keys(c),["d"]) - assert.deepEqual(doc2.getWithType(c,"d"),["str","dd"]) + assert.deepEqual(doc2.getWithType("_root", "a"), ["map", a]) + assert.deepEqual(doc2.keys(a), []) + assert.deepEqual(doc2.getWithType("_root", "b"), ["map", b]) + assert.deepEqual(doc2.keys(b), []) + assert.deepEqual(doc2.getWithType("_root", "c"), ["map", c]) + assert.deepEqual(doc2.keys(c), ["d"]) + assert.deepEqual(doc2.getWithType(c, "d"), ["str", "dd"]) doc1.free() doc2.free() }) it('should allow you to forkAt a heads', () => { const A = create("aaaaaa") - A.put("/", "key1","val1"); - A.put("/", "key2","val2"); + A.put("/", "key1", "val1"); + A.put("/", "key2", "val2"); const heads1 = A.getHeads(); const B = A.fork("bbbbbb") - A.put("/", "key3","val3"); - B.put("/", "key4","val4"); + A.put("/", "key3", "val3"); + B.put("/", "key4", "val4"); A.merge(B) const heads2 = A.getHeads(); - A.put("/", "key5","val5"); - assert.deepEqual(A.forkAt(heads1).materialize("/"), A.materialize("/",heads1)) - assert.deepEqual(A.forkAt(heads2).materialize("/"), A.materialize("/",heads2)) + A.put("/", "key5", "val5"); + assert.deepEqual(A.forkAt(heads1).materialize("/"), A.materialize("/", heads1)) + assert.deepEqual(A.forkAt(heads2).materialize("/"), A.materialize("/", heads2)) }) it('should handle merging text conflicts then saving & loading', () => { @@ -481,7 +481,7 @@ describe('Automerge', () => { const B = A.fork() - assert.deepEqual(B.getWithType("_root","text"), [ "text", At]) + assert.deepEqual(B.getWithType("_root", "text"), ["text", At]) B.splice(At, 4, 1) B.splice(At, 4, 0, '!') @@ -506,7 +506,7 @@ describe('Automerge', () => { doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.popPatches(), [ - {action: 'put', obj: '_root', key: 'hello', value: 'world', datatype: 'str', conflict: false} + { action: 'put', obj: '_root', key: 'hello', value: 'world', datatype: 'str', conflict: false } ]) doc1.free() doc2.free() @@ -514,13 +514,13 @@ describe('Automerge', () => { it('should include nested object creation', () => { const doc1 = create('aaaa'), doc2 = create('bbbb') - doc1.putObject('_root', 'birds', {friday: {robins: 3}}) + doc1.putObject('_root', 'birds', { friday: { robins: 3 } }) doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.popPatches(), [ - {action: 'put', obj: '_root', key: 'birds', value: '1@aaaa', datatype: 'map', conflict: false}, - {action: 'put', obj: '1@aaaa', key: 'friday', value: '2@aaaa', datatype: 'map', conflict: false}, - {action: 'put', obj: '2@aaaa', key: 'robins', value: 3, datatype: 'int', conflict: false} + { action: 'put', obj: '_root', key: 'birds', value: '1@aaaa', datatype: 'map', conflict: false }, + { action: 'put', obj: '1@aaaa', key: 'friday', value: '2@aaaa', datatype: 'map', conflict: false }, + { action: 'put', obj: '2@aaaa', key: 'robins', value: 3, datatype: 'int', conflict: false } ]) doc1.free() doc2.free() @@ -534,8 +534,8 @@ describe('Automerge', () => { doc1.delete('_root', 'favouriteBird') doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.popPatches(), [ - {action: 'put', obj: '_root', key: 'favouriteBird', value: 'Robin', datatype: 'str', conflict: false}, - {action: 'delete', obj: '_root', key: 'favouriteBird'} + { action: 'put', obj: '_root', key: 'favouriteBird', value: 'Robin', datatype: 'str', conflict: false }, + { action: 'delete', obj: '_root', key: 'favouriteBird' } ]) doc1.free() doc2.free() @@ -547,9 +547,9 @@ describe('Automerge', () => { doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.popPatches(), [ - {action: 'put', obj: '_root', key: 'birds', value: '1@aaaa', datatype: 'list', conflict: false}, - {action: 'insert', obj: '1@aaaa', key: 0, value: 'Goldfinch', datatype: 'str'}, - {action: 'insert', obj: '1@aaaa', key: 1, value: 'Chaffinch', datatype: 'str'} + { action: 'put', obj: '_root', key: 'birds', value: '1@aaaa', datatype: 'list', conflict: false }, + { action: 'insert', obj: '1@aaaa', key: 0, value: 'Goldfinch', datatype: 'str' }, + { action: 'insert', obj: '1@aaaa', key: 1, value: 'Chaffinch', datatype: 'str' } ]) doc1.free() doc2.free() @@ -559,13 +559,13 @@ describe('Automerge', () => { const doc1 = create('aaaa'), doc2 = create('bbbb') doc1.putObject('_root', 'birds', []) doc2.loadIncremental(doc1.saveIncremental()) - doc1.insertObject('1@aaaa', 0, {species: 'Goldfinch', count: 3}) + doc1.insertObject('1@aaaa', 0, { species: 'Goldfinch', count: 3 }) doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.popPatches(), [ - {action: 'insert', obj: '1@aaaa', key: 0, value: '2@aaaa', datatype: 'map'}, - {action: 'put', obj: '2@aaaa', key: 'species', value: 'Goldfinch', datatype: 'str', conflict: false}, - {action: 'put', obj: '2@aaaa', key: 'count', value: 3, datatype: 'int', conflict: false} + { action: 'insert', obj: '1@aaaa', key: 0, value: '2@aaaa', datatype: 'map' }, + { action: 'put', obj: '2@aaaa', key: 'species', value: 'Goldfinch', datatype: 'str', conflict: false }, + { action: 'put', obj: '2@aaaa', key: 'count', value: 3, datatype: 'int', conflict: false } ]) doc1.free() doc2.free() @@ -582,8 +582,8 @@ describe('Automerge', () => { assert.deepEqual(doc1.getWithType('1@aaaa', 0), ['str', 'Chaffinch']) assert.deepEqual(doc1.getWithType('1@aaaa', 1), ['str', 'Greenfinch']) assert.deepEqual(doc2.popPatches(), [ - {action: 'delete', obj: '1@aaaa', key: 0}, - {action: 'insert', obj: '1@aaaa', key: 1, value: 'Greenfinch', datatype: 'str'} + { action: 'delete', obj: '1@aaaa', key: 0 }, + { action: 'insert', obj: '1@aaaa', key: 1, value: 'Greenfinch', datatype: 'str' } ]) doc1.free() doc2.free() @@ -608,16 +608,16 @@ describe('Automerge', () => { assert.deepEqual([0, 1, 2, 3].map(i => (doc3.getWithType('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd']) assert.deepEqual([0, 1, 2, 3].map(i => (doc4.getWithType('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd']) assert.deepEqual(doc3.popPatches(), [ - {action: 'insert', obj: '1@aaaa', key: 0, value: 'c', datatype: 'str'}, - {action: 'insert', obj: '1@aaaa', key: 1, value: 'd', datatype: 'str'}, - {action: 'insert', obj: '1@aaaa', key: 0, value: 'a', datatype: 'str'}, - {action: 'insert', obj: '1@aaaa', key: 1, value: 'b', datatype: 'str'} + { action: 'insert', obj: '1@aaaa', key: 0, value: 'c', datatype: 'str' }, + { action: 'insert', obj: '1@aaaa', key: 1, value: 'd', datatype: 'str' }, + { action: 'insert', obj: '1@aaaa', key: 0, value: 'a', datatype: 'str' }, + { action: 'insert', obj: '1@aaaa', key: 1, value: 'b', datatype: 'str' } ]) assert.deepEqual(doc4.popPatches(), [ - {action: 'insert', obj: '1@aaaa', key: 0, value: 'a', datatype: 'str'}, - {action: 'insert', obj: '1@aaaa', key: 1, value: 'b', datatype: 'str'}, - {action: 'insert', obj: '1@aaaa', key: 2, value: 'c', datatype: 'str'}, - {action: 'insert', obj: '1@aaaa', key: 3, value: 'd', datatype: 'str'} + { action: 'insert', obj: '1@aaaa', key: 0, value: 'a', datatype: 'str' }, + { action: 'insert', obj: '1@aaaa', key: 1, value: 'b', datatype: 'str' }, + { action: 'insert', obj: '1@aaaa', key: 2, value: 'c', datatype: 'str' }, + { action: 'insert', obj: '1@aaaa', key: 3, value: 'd', datatype: 'str' } ]) doc1.free(); doc2.free(); doc3.free(); doc4.free() }) @@ -641,16 +641,16 @@ describe('Automerge', () => { assert.deepEqual([0, 1, 2, 3, 4, 5].map(i => (doc3.getWithType('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd', 'e', 'f']) assert.deepEqual([0, 1, 2, 3, 4, 5].map(i => (doc4.getWithType('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd', 'e', 'f']) assert.deepEqual(doc3.popPatches(), [ - {action: 'insert', obj: '1@aaaa', key: 2, value: 'e', datatype: 'str'}, - {action: 'insert', obj: '1@aaaa', key: 3, value: 'f', datatype: 'str'}, - {action: 'insert', obj: '1@aaaa', key: 2, value: 'c', datatype: 'str'}, - {action: 'insert', obj: '1@aaaa', key: 3, value: 'd', datatype: 'str'} + { action: 'insert', obj: '1@aaaa', key: 2, value: 'e', datatype: 'str' }, + { action: 'insert', obj: '1@aaaa', key: 3, value: 'f', datatype: 'str' }, + { action: 'insert', obj: '1@aaaa', key: 2, value: 'c', datatype: 'str' }, + { action: 'insert', obj: '1@aaaa', key: 3, value: 'd', datatype: 'str' } ]) assert.deepEqual(doc4.popPatches(), [ - {action: 'insert', obj: '1@aaaa', key: 2, value: 'c', datatype: 'str'}, - {action: 'insert', obj: '1@aaaa', key: 3, value: 'd', datatype: 'str'}, - {action: 'insert', obj: '1@aaaa', key: 4, value: 'e', datatype: 'str'}, - {action: 'insert', obj: '1@aaaa', key: 5, value: 'f', datatype: 'str'} + { action: 'insert', obj: '1@aaaa', key: 2, value: 'c', datatype: 'str' }, + { action: 'insert', obj: '1@aaaa', key: 3, value: 'd', datatype: 'str' }, + { action: 'insert', obj: '1@aaaa', key: 4, value: 'e', datatype: 'str' }, + { action: 'insert', obj: '1@aaaa', key: 5, value: 'f', datatype: 'str' } ]) doc1.free(); doc2.free(); doc3.free(); doc4.free() }) @@ -669,12 +669,12 @@ describe('Automerge', () => { assert.deepEqual(doc4.getWithType('_root', 'bird'), ['str', 'Goldfinch']) assert.deepEqual(doc4.getAll('_root', 'bird'), [['str', 'Greenfinch', '1@aaaa'], ['str', 'Goldfinch', '1@bbbb']]) assert.deepEqual(doc3.popPatches(), [ - {action: 'put', obj: '_root', key: 'bird', value: 'Greenfinch', datatype: 'str', conflict: false}, - {action: 'put', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: true} + { action: 'put', obj: '_root', key: 'bird', value: 'Greenfinch', datatype: 'str', conflict: false }, + { action: 'put', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: true } ]) assert.deepEqual(doc4.popPatches(), [ - {action: 'put', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: false}, - {action: 'put', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: true} + { action: 'put', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: false }, + { action: 'put', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: true } ]) doc1.free(); doc2.free(); doc3.free(); doc4.free() }) @@ -704,16 +704,16 @@ describe('Automerge', () => { ['str', 'Greenfinch', '1@aaaa'], ['str', 'Chaffinch', '1@bbbb'], ['str', 'Goldfinch', '1@cccc'] ]) assert.deepEqual(doc1.popPatches(), [ - {action: 'put', obj: '_root', key: 'bird', value: 'Chaffinch', datatype: 'str', conflict: true}, - {action: 'put', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: true} + { action: 'put', obj: '_root', key: 'bird', value: 'Chaffinch', datatype: 'str', conflict: true }, + { action: 'put', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: true } ]) assert.deepEqual(doc2.popPatches(), [ - {action: 'put', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: true}, - {action: 'put', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: true} + { action: 'put', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: true }, + { action: 'put', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: true } ]) assert.deepEqual(doc3.popPatches(), [ - {action: 'put', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: true}, - {action: 'put', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: true} + { action: 'put', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: true }, + { action: 'put', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: true } ]) doc1.free(); doc2.free(); doc3.free() }) @@ -730,9 +730,9 @@ describe('Automerge', () => { doc3.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc3.getAll('_root', 'bird'), [['str', 'Goldfinch', '2@aaaa']]) assert.deepEqual(doc3.popPatches(), [ - {action: 'put', obj: '_root', key: 'bird', value: 'Greenfinch', datatype: 'str', conflict: false}, - {action: 'put', obj: '_root', key: 'bird', value: 'Chaffinch', datatype: 'str', conflict: true}, - {action: 'put', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: false} + { action: 'put', obj: '_root', key: 'bird', value: 'Greenfinch', datatype: 'str', conflict: false }, + { action: 'put', obj: '_root', key: 'bird', value: 'Chaffinch', datatype: 'str', conflict: true }, + { action: 'put', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: false } ]) doc1.free(); doc2.free(); doc3.free() }) @@ -753,10 +753,10 @@ describe('Automerge', () => { assert.deepEqual(doc2.getWithType('_root', 'bird'), ['str', 'Goldfinch']) assert.deepEqual(doc2.getAll('_root', 'bird'), [['str', 'Goldfinch', '2@aaaa']]) assert.deepEqual(doc1.popPatches(), [ - {action: 'put', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: false} + { action: 'put', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: false } ]) assert.deepEqual(doc2.popPatches(), [ - {action: 'put', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: false} + { action: 'put', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: false } ]) doc1.free(); doc2.free() }) @@ -780,12 +780,12 @@ describe('Automerge', () => { assert.deepEqual(doc4.getWithType('1@aaaa', 0), ['str', 'Redwing']) assert.deepEqual(doc4.getAll('1@aaaa', 0), [['str', 'Song Thrush', '4@aaaa'], ['str', 'Redwing', '4@bbbb']]) assert.deepEqual(doc3.popPatches(), [ - {action: 'put', obj: '1@aaaa', key: 0, value: 'Song Thrush', datatype: 'str', conflict: false}, - {action: 'put', obj: '1@aaaa', key: 0, value: 'Redwing', datatype: 'str', conflict: true} + { action: 'put', obj: '1@aaaa', key: 0, value: 'Song Thrush', datatype: 'str', conflict: false }, + { action: 'put', obj: '1@aaaa', key: 0, value: 'Redwing', datatype: 'str', conflict: true } ]) assert.deepEqual(doc4.popPatches(), [ - {action: 'put', obj: '1@aaaa', key: 0, value: 'Redwing', datatype: 'str', conflict: false}, - {action: 'put', obj: '1@aaaa', key: 0, value: 'Redwing', datatype: 'str', conflict: true} + { action: 'put', obj: '1@aaaa', key: 0, value: 'Redwing', datatype: 'str', conflict: false }, + { action: 'put', obj: '1@aaaa', key: 0, value: 'Redwing', datatype: 'str', conflict: true } ]) doc1.free(); doc2.free(); doc3.free(); doc4.free() }) @@ -811,16 +811,16 @@ describe('Automerge', () => { assert.deepEqual(doc4.getAll('1@aaaa', 0), [['str', 'Ring-necked parakeet', '5@bbbb']]) assert.deepEqual(doc4.getAll('1@aaaa', 2), [['str', 'Song Thrush', '6@aaaa'], ['str', 'Redwing', '6@bbbb']]) assert.deepEqual(doc3.popPatches(), [ - {action: 'delete', obj: '1@aaaa', key: 0}, - {action: 'put', obj: '1@aaaa', key: 1, value: 'Song Thrush', datatype: 'str', conflict: false}, - {action: 'insert', obj: '1@aaaa', key: 0, value: 'Ring-necked parakeet', datatype: 'str'}, - {action: 'put', obj: '1@aaaa', key: 2, value: 'Redwing', datatype: 'str', conflict: true} + { action: 'delete', obj: '1@aaaa', key: 0 }, + { action: 'put', obj: '1@aaaa', key: 1, value: 'Song Thrush', datatype: 'str', conflict: false }, + { action: 'insert', obj: '1@aaaa', key: 0, value: 'Ring-necked parakeet', datatype: 'str' }, + { action: 'put', obj: '1@aaaa', key: 2, value: 'Redwing', datatype: 'str', conflict: true } ]) assert.deepEqual(doc4.popPatches(), [ - {action: 'put', obj: '1@aaaa', key: 0, value: 'Ring-necked parakeet', datatype: 'str', conflict: false}, - {action: 'put', obj: '1@aaaa', key: 2, value: 'Redwing', datatype: 'str', conflict: false}, - {action: 'put', obj: '1@aaaa', key: 0, value: 'Ring-necked parakeet', datatype: 'str', conflict: false}, - {action: 'put', obj: '1@aaaa', key: 2, value: 'Redwing', datatype: 'str', conflict: true} + { action: 'put', obj: '1@aaaa', key: 0, value: 'Ring-necked parakeet', datatype: 'str', conflict: false }, + { action: 'put', obj: '1@aaaa', key: 2, value: 'Redwing', datatype: 'str', conflict: false }, + { action: 'put', obj: '1@aaaa', key: 0, value: 'Ring-necked parakeet', datatype: 'str', conflict: false }, + { action: 'put', obj: '1@aaaa', key: 2, value: 'Redwing', datatype: 'str', conflict: true } ]) doc1.free(); doc2.free(); doc3.free(); doc4.free() }) @@ -837,14 +837,14 @@ describe('Automerge', () => { doc3.loadIncremental(change2) assert.deepEqual(doc3.getAll('_root', 'bird'), [['str', 'Robin', '1@aaaa'], ['str', 'Wren', '1@bbbb']]) assert.deepEqual(doc3.popPatches(), [ - {action: 'put', obj: '_root', key: 'bird', value: 'Robin', datatype: 'str', conflict: false}, - {action: 'put', obj: '_root', key: 'bird', value: 'Wren', datatype: 'str', conflict: true} + { action: 'put', obj: '_root', key: 'bird', value: 'Robin', datatype: 'str', conflict: false }, + { action: 'put', obj: '_root', key: 'bird', value: 'Wren', datatype: 'str', conflict: true } ]) doc3.loadIncremental(change3) assert.deepEqual(doc3.getWithType('_root', 'bird'), ['str', 'Robin']) assert.deepEqual(doc3.getAll('_root', 'bird'), [['str', 'Robin', '1@aaaa']]) assert.deepEqual(doc3.popPatches(), [ - {action: 'put', obj: '_root', key: 'bird', value: 'Robin', datatype: 'str', conflict: false} + { action: 'put', obj: '_root', key: 'bird', value: 'Robin', datatype: 'str', conflict: false } ]) doc1.free(); doc2.free(); doc3.free() }) @@ -852,7 +852,7 @@ describe('Automerge', () => { it('should handle conflicting nested objects', () => { const doc1 = create('aaaa'), doc2 = create('bbbb') doc1.putObject('_root', 'birds', ['Parakeet']) - doc2.putObject('_root', 'birds', {'Sparrowhawk': 1}) + doc2.putObject('_root', 'birds', { 'Sparrowhawk': 1 }) const change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental() doc1.enablePatches(true) doc2.enablePatches(true) @@ -860,13 +860,13 @@ describe('Automerge', () => { doc2.loadIncremental(change1) assert.deepEqual(doc1.getAll('_root', 'birds'), [['list', '1@aaaa'], ['map', '1@bbbb']]) assert.deepEqual(doc1.popPatches(), [ - {action: 'put', obj: '_root', key: 'birds', value: '1@bbbb', datatype: 'map', conflict: true}, - {action: 'put', obj: '1@bbbb', key: 'Sparrowhawk', value: 1, datatype: 'int', conflict: false} + { action: 'put', obj: '_root', key: 'birds', value: '1@bbbb', datatype: 'map', conflict: true }, + { action: 'put', obj: '1@bbbb', key: 'Sparrowhawk', value: 1, datatype: 'int', conflict: false } ]) assert.deepEqual(doc2.getAll('_root', 'birds'), [['list', '1@aaaa'], ['map', '1@bbbb']]) assert.deepEqual(doc2.popPatches(), [ - {action: 'put', obj: '_root', key: 'birds', value: '1@bbbb', datatype: 'map', conflict: true}, - {action: 'insert', obj: '1@aaaa', key: 0, value: 'Parakeet', datatype: 'str'} + { action: 'put', obj: '_root', key: 'birds', value: '1@bbbb', datatype: 'map', conflict: true }, + { action: 'insert', obj: '1@aaaa', key: 0, value: 'Parakeet', datatype: 'str' } ]) doc1.free(); doc2.free() }) @@ -879,115 +879,115 @@ describe('Automerge', () => { doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.getWithType('_root', 'createdAt'), ['timestamp', now]) assert.deepEqual(doc2.popPatches(), [ - {action: 'put', obj: '_root', key: 'createdAt', value: now, datatype: 'timestamp', conflict: false} + { action: 'put', obj: '_root', key: 'createdAt', value: now, datatype: 'timestamp', conflict: false } ]) doc1.free(); doc2.free() }) it('should capture local put ops', () => { - const doc1 = create('aaaa') - doc1.enablePatches(true) - doc1.put('_root', 'key1', 1) - doc1.put('_root', 'key1', 2) - doc1.put('_root', 'key2', 3) - const map = doc1.putObject('_root', 'map', {}) - const list = doc1.putObject('_root', 'list', []) + const doc1 = create('aaaa') + doc1.enablePatches(true) + doc1.put('_root', 'key1', 1) + doc1.put('_root', 'key1', 2) + doc1.put('_root', 'key2', 3) + const map = doc1.putObject('_root', 'map', {}) + const list = doc1.putObject('_root', 'list', []) - assert.deepEqual(doc1.popPatches(), [ - {action: 'put', obj: '_root', key: 'key1', value: 1, datatype: 'int', conflict: false}, - {action: 'put', obj: '_root', key: 'key1', value: 2, datatype: 'int', conflict: false}, - {action: 'put', obj: '_root', key: 'key2', value: 3, datatype: 'int', conflict: false}, - {action: 'put', obj: '_root', key: 'map', value: map, datatype: 'map', conflict: false}, - {action: 'put', obj: '_root', key: 'list', value: list, datatype: 'list', conflict: false}, - ]) - doc1.free() + assert.deepEqual(doc1.popPatches(), [ + { action: 'put', obj: '_root', key: 'key1', value: 1, datatype: 'int', conflict: false }, + { action: 'put', obj: '_root', key: 'key1', value: 2, datatype: 'int', conflict: false }, + { action: 'put', obj: '_root', key: 'key2', value: 3, datatype: 'int', conflict: false }, + { action: 'put', obj: '_root', key: 'map', value: map, datatype: 'map', conflict: false }, + { action: 'put', obj: '_root', key: 'list', value: list, datatype: 'list', conflict: false }, + ]) + doc1.free() }) it('should capture local insert ops', () => { - const doc1 = create('aaaa') - doc1.enablePatches(true) - const list = doc1.putObject('_root', 'list', []) - doc1.insert(list, 0, 1) - doc1.insert(list, 0, 2) - doc1.insert(list, 2, 3) - const map = doc1.insertObject(list, 2, {}) - const list2 = doc1.insertObject(list, 2, []) + const doc1 = create('aaaa') + doc1.enablePatches(true) + const list = doc1.putObject('_root', 'list', []) + doc1.insert(list, 0, 1) + doc1.insert(list, 0, 2) + doc1.insert(list, 2, 3) + const map = doc1.insertObject(list, 2, {}) + const list2 = doc1.insertObject(list, 2, []) - assert.deepEqual(doc1.popPatches(), [ - {action: 'put', obj: '_root', key: 'list', value: list, datatype: 'list', conflict: false}, - {action: 'insert', obj: list, key: 0, value: 1, datatype: 'int'}, - {action: 'insert', obj: list, key: 0, value: 2, datatype: 'int'}, - {action: 'insert', obj: list, key: 2, value: 3, datatype: 'int'}, - {action: 'insert', obj: list, key: 2, value: map, datatype: 'map'}, - {action: 'insert', obj: list, key: 2, value: list2, datatype: 'list'}, - ]) - doc1.free() + assert.deepEqual(doc1.popPatches(), [ + { action: 'put', obj: '_root', key: 'list', value: list, datatype: 'list', conflict: false }, + { action: 'insert', obj: list, key: 0, value: 1, datatype: 'int' }, + { action: 'insert', obj: list, key: 0, value: 2, datatype: 'int' }, + { action: 'insert', obj: list, key: 2, value: 3, datatype: 'int' }, + { action: 'insert', obj: list, key: 2, value: map, datatype: 'map' }, + { action: 'insert', obj: list, key: 2, value: list2, datatype: 'list' }, + ]) + doc1.free() }) it('should capture local push ops', () => { - const doc1 = create('aaaa') - doc1.enablePatches(true) - const list = doc1.putObject('_root', 'list', []) - doc1.push(list, 1) - const map = doc1.pushObject(list, {}) - const list2 = doc1.pushObject(list, []) + const doc1 = create('aaaa') + doc1.enablePatches(true) + const list = doc1.putObject('_root', 'list', []) + doc1.push(list, 1) + const map = doc1.pushObject(list, {}) + const list2 = doc1.pushObject(list, []) - assert.deepEqual(doc1.popPatches(), [ - {action: 'put', obj: '_root', key: 'list', value: list, datatype: 'list', conflict: false}, - {action: 'insert', obj: list, key: 0, value: 1, datatype: 'int'}, - {action: 'insert', obj: list, key: 1, value: map, datatype: 'map'}, - {action: 'insert', obj: list, key: 2, value: list2, datatype: 'list'}, - ]) - doc1.free() + assert.deepEqual(doc1.popPatches(), [ + { action: 'put', obj: '_root', key: 'list', value: list, datatype: 'list', conflict: false }, + { action: 'insert', obj: list, key: 0, value: 1, datatype: 'int' }, + { action: 'insert', obj: list, key: 1, value: map, datatype: 'map' }, + { action: 'insert', obj: list, key: 2, value: list2, datatype: 'list' }, + ]) + doc1.free() }) it('should capture local splice ops', () => { - const doc1 = create('aaaa') - doc1.enablePatches(true) - const list = doc1.putObject('_root', 'list', []) - doc1.splice(list, 0, 0, [1,2,3,4]) - doc1.splice(list, 1, 2) + const doc1 = create('aaaa') + doc1.enablePatches(true) + const list = doc1.putObject('_root', 'list', []) + doc1.splice(list, 0, 0, [1, 2, 3, 4]) + doc1.splice(list, 1, 2) - assert.deepEqual(doc1.popPatches(), [ - {action: 'put', obj: '_root', key: 'list', value: list, datatype: 'list', conflict: false}, - {action: 'insert', obj: list, key: 0, value: 1, datatype: 'int'}, - {action: 'insert', obj: list, key: 1, value: 2, datatype: 'int'}, - {action: 'insert', obj: list, key: 2, value: 3, datatype: 'int'}, - {action: 'insert', obj: list, key: 3, value: 4, datatype: 'int'}, - {action: 'delete', obj: list, key: 1}, - {action: 'delete', obj: list, key: 1}, - ]) - doc1.free() + assert.deepEqual(doc1.popPatches(), [ + { action: 'put', obj: '_root', key: 'list', value: list, datatype: 'list', conflict: false }, + { action: 'insert', obj: list, key: 0, value: 1, datatype: 'int' }, + { action: 'insert', obj: list, key: 1, value: 2, datatype: 'int' }, + { action: 'insert', obj: list, key: 2, value: 3, datatype: 'int' }, + { action: 'insert', obj: list, key: 3, value: 4, datatype: 'int' }, + { action: 'delete', obj: list, key: 1 }, + { action: 'delete', obj: list, key: 1 }, + ]) + doc1.free() }) it('should capture local increment ops', () => { - const doc1 = create('aaaa') - doc1.enablePatches(true) - doc1.put('_root', 'counter', 2, 'counter') - doc1.increment('_root', 'counter', 4) + const doc1 = create('aaaa') + doc1.enablePatches(true) + doc1.put('_root', 'counter', 2, 'counter') + doc1.increment('_root', 'counter', 4) - assert.deepEqual(doc1.popPatches(), [ - {action: 'put', obj: '_root', key: 'counter', value: 2, datatype: 'counter', conflict: false}, - {action: 'increment', obj: '_root', key: 'counter', value: 4}, - ]) - doc1.free() + assert.deepEqual(doc1.popPatches(), [ + { action: 'put', obj: '_root', key: 'counter', value: 2, datatype: 'counter', conflict: false }, + { action: 'increment', obj: '_root', key: 'counter', value: 4 }, + ]) + doc1.free() }) it('should capture local delete ops', () => { - const doc1 = create('aaaa') - doc1.enablePatches(true) - doc1.put('_root', 'key1', 1) - doc1.put('_root', 'key2', 2) - doc1.delete('_root', 'key1') - doc1.delete('_root', 'key2') - assert.deepEqual(doc1.popPatches(), [ - {action: 'put', obj: '_root', key: 'key1', value: 1, datatype: 'int', conflict: false}, - {action: 'put', obj: '_root', key: 'key2', value: 2, datatype: 'int', conflict: false}, - {action: 'delete', obj: '_root', key: 'key1'}, - {action: 'delete', obj: '_root', key: 'key2'}, - ]) - doc1.free() + const doc1 = create('aaaa') + doc1.enablePatches(true) + doc1.put('_root', 'key1', 1) + doc1.put('_root', 'key2', 2) + doc1.delete('_root', 'key1') + doc1.delete('_root', 'key2') + assert.deepEqual(doc1.popPatches(), [ + { action: 'put', obj: '_root', key: 'key1', value: 1, datatype: 'int', conflict: false }, + { action: 'put', obj: '_root', key: 'key2', value: 2, datatype: 'int', conflict: false }, + { action: 'delete', obj: '_root', key: 'key1' }, + { action: 'delete', obj: '_root', key: 'key2' }, + ]) + doc1.free() }) it('should support counters in a map', () => { @@ -999,8 +999,8 @@ describe('Automerge', () => { doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.getWithType('_root', 'starlings'), ['counter', 3]) assert.deepEqual(doc2.popPatches(), [ - {action: 'put', obj: '_root', key: 'starlings', value: 2, datatype: 'counter', conflict: false}, - {action: 'increment', obj: '_root', key: 'starlings', value: 1} + { action: 'put', obj: '_root', key: 'starlings', value: 2, datatype: 'counter', conflict: false }, + { action: 'increment', obj: '_root', key: 'starlings', value: 1 } ]) doc1.free(); doc2.free() }) @@ -1018,10 +1018,10 @@ describe('Automerge', () => { doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.popPatches(), [ - {action: 'put', obj: '_root', key: 'list', value: list, datatype: 'list', conflict: false}, - {action: 'insert', obj: list, key: 0, value: 1, datatype: 'counter'}, - {action: 'increment', obj: list, key: 0, value: 2}, - {action: 'increment', obj: list, key: 0, value: -5}, + { action: 'put', obj: '_root', key: 'list', value: list, datatype: 'list', conflict: false }, + { action: 'insert', obj: list, key: 0, value: 1, datatype: 'counter' }, + { action: 'increment', obj: list, key: 0, value: 2 }, + { action: 'increment', obj: list, key: 0, value: -5 }, ]) doc1.free(); doc2.free() }) @@ -1045,13 +1045,13 @@ describe('Automerge', () => { }) it('should not reply if we have no data as well', () => { - const n1 = create(), n2 = create() - const s1 = initSyncState(), s2 = initSyncState() - const m1 = n1.generateSyncMessage(s1) - if (m1 === null) { throw new RangeError("message should not be null") } - n2.receiveSyncMessage(s2, m1) - const m2 = n2.generateSyncMessage(s2) - assert.deepStrictEqual(m2, null) + const n1 = create(), n2 = create() + const s1 = initSyncState(), s2 = initSyncState() + const m1 = n1.generateSyncMessage(s1) + if (m1 === null) { throw new RangeError("message should not be null") } + n2.receiveSyncMessage(s2, m1) + const m2 = n2.generateSyncMessage(s2) + assert.deepStrictEqual(m2, null) }) it('repos with equal heads do not need a reply message', () => { @@ -1059,11 +1059,11 @@ describe('Automerge', () => { const s1 = initSyncState(), s2 = initSyncState() // make two nodes with the same changes - const list = n1.putObject("_root","n", []) - n1.commit("",0) + const list = n1.putObject("_root", "n", []) + n1.commit("", 0) for (let i = 0; i < 10; i++) { - n1.insert(list,i,i) - n1.commit("",0) + n1.insert(list, i, i) + n1.commit("", 0) } n2.applyChanges(n1.getChanges([])) assert.deepStrictEqual(n1.materialize(), n2.materialize()) @@ -1083,11 +1083,11 @@ describe('Automerge', () => { const n1 = create(), n2 = create() // make changes for n1 that n2 should request - const list = n1.putObject("_root","n",[]) - n1.commit("",0) + const list = n1.putObject("_root", "n", []) + n1.commit("", 0) for (let i = 0; i < 10; i++) { n1.insert(list, i, i) - n1.commit("",0) + n1.commit("", 0) } assert.notDeepStrictEqual(n1.materialize(), n2.materialize()) @@ -1099,11 +1099,11 @@ describe('Automerge', () => { const n1 = create(), n2 = create() // make changes for n1 that n2 should request - const list = n1.putObject("_root","n",[]) - n1.commit("",0) + const list = n1.putObject("_root", "n", []) + n1.commit("", 0) for (let i = 0; i < 10; i++) { - n1.insert(list,i,i) - n1.commit("",0) + n1.insert(list, i, i) + n1.commit("", 0) } assert.notDeepStrictEqual(n1.materialize(), n2.materialize()) @@ -1117,8 +1117,8 @@ describe('Automerge', () => { const s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 5; i++) { - n1.put("_root","x",i) - n1.commit("",0) + n1.put("_root", "x", i) + n1.commit("", 0) } sync(n1, n2, s1, s2) @@ -1126,7 +1126,7 @@ describe('Automerge', () => { // modify the first node further for (let i = 5; i < 10; i++) { n1.put("_root", "x", i) - n1.commit("",0) + n1.commit("", 0) } assert.notDeepStrictEqual(n1.materialize(), n2.materialize()) @@ -1141,12 +1141,12 @@ describe('Automerge', () => { let message, patch for (let i = 0; i < 5; i++) { - n1.put("_root","x",i) - n1.commit("",0) + n1.put("_root", "x", i) + n1.commit("", 0) } for (let i = 0; i < 5; i++) { - n2.put("_root","y",i) - n2.commit("",0) + n2.put("_root", "y", i) + n2.commit("", 0) } // n1 reports what it has @@ -1160,7 +1160,7 @@ describe('Automerge', () => { assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 5) //assert.deepStrictEqual(patch, null) // no changes arrived - // n1 receives the changes and replies with the changes it now knows n2 needs + // n1 receives the changes and replies with the changes it now knows that n2 needs n1.receiveSyncMessage(s1, message) message = n1.generateSyncMessage(s1) if (message === null) { throw new RangeError("message should not be null") } @@ -1188,12 +1188,12 @@ describe('Automerge', () => { const s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 5; i++) { - n1.put("_root", "x", i) - n1.commit("",0) + n1.put("_root", "x", i) + n1.commit("", 0) } for (let i = 0; i < 5; i++) { - n2.put("_root","y", i) - n2.commit("",0) + n2.put("_root", "y", i) + n2.commit("", 0) } const head1 = n1.getHeads()[0], head2 = n2.getHeads()[0] @@ -1209,7 +1209,7 @@ describe('Automerge', () => { assert.deepStrictEqual(decodeSyncMessage(msg2to1).changes.length, 0) assert.deepStrictEqual(decodeSyncMessage(msg2to1).have[0].lastSync.length, 0) - // n1 and n2 receives that message and update sync state but make no patch + // n1 and n2 receive that message and update sync state but make no patch n1.receiveSyncMessage(s1, msg2to1) n2.receiveSyncMessage(s2, msg1to2) @@ -1226,14 +1226,14 @@ describe('Automerge', () => { n1.receiveSyncMessage(s1, msg2to1) assert.deepStrictEqual(n1.getMissingDeps(), []) //assert.notDeepStrictEqual(patch1, null) - assert.deepStrictEqual(n1.materialize(), {x: 4, y: 4}) + assert.deepStrictEqual(n1.materialize(), { x: 4, y: 4 }) n2.receiveSyncMessage(s2, msg1to2) assert.deepStrictEqual(n2.getMissingDeps(), []) //assert.notDeepStrictEqual(patch2, null) - assert.deepStrictEqual(n2.materialize(), {x: 4, y: 4}) + assert.deepStrictEqual(n2.materialize(), { x: 4, y: 4 }) - // The response acknowledges the changes received, and sends no further changes + // The response acknowledges the changes received and sends no further changes msg1to2 = n1.generateSyncMessage(s1) if (msg1to2 === null) { throw new RangeError("message should not be null") } assert.deepStrictEqual(decodeSyncMessage(msg1to2).changes.length, 0) @@ -1255,37 +1255,37 @@ describe('Automerge', () => { assert.deepStrictEqual(msg1to2, null) assert.deepStrictEqual(msg2to1, null) - // If we make one more change, and start another sync, its lastSync should be updated - n1.put("_root","x",5) + // If we make one more change and start another sync then its lastSync should be updated + n1.put("_root", "x", 5) msg1to2 = n1.generateSyncMessage(s1) if (msg1to2 === null) { throw new RangeError("message should not be null") } assert.deepStrictEqual(decodeSyncMessage(msg1to2).have[0].lastSync, [head1, head2].sort()) }) - it('should assume sent changes were recieved until we hear otherwise', () => { + it('should assume sent changes were received until we hear otherwise', () => { const n1 = create('01234567'), n2 = create('89abcdef') const s1 = initSyncState(), s2 = initSyncState() let message = null const items = n1.putObject("_root", "items", []) - n1.commit("",0) + n1.commit("", 0) sync(n1, n2, s1, s2) n1.push(items, "x") - n1.commit("",0) + n1.commit("", 0) message = n1.generateSyncMessage(s1) if (message === null) { throw new RangeError("message should not be null") } assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) n1.push(items, "y") - n1.commit("",0) + n1.commit("", 0) message = n1.generateSyncMessage(s1) if (message === null) { throw new RangeError("message should not be null") } assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) n1.push(items, "z") - n1.commit("",0) + n1.commit("", 0) message = n1.generateSyncMessage(s1) if (message === null) { throw new RangeError("message should not be null") } @@ -1299,7 +1299,7 @@ describe('Automerge', () => { for (let i = 0; i < 5; i++) { n1.put("_root", "x", i) - n1.commit("",0) + n1.commit("", 0) } sync(n1, n2, s1, s2) @@ -1307,7 +1307,7 @@ describe('Automerge', () => { // modify the first node further for (let i = 5; i < 10; i++) { n1.put("_root", "x", i) - n1.commit("",0) + n1.commit("", 0) } assert.notDeepStrictEqual(n1.materialize(), n2.materialize()) @@ -1326,20 +1326,20 @@ describe('Automerge', () => { const s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 10; i++) { - n1.put("_root","x",i) - n1.commit("",0) + n1.put("_root", "x", i) + n1.commit("", 0) } sync(n1, n2) for (let i = 10; i < 15; i++) { - n1.put("_root","x",i) - n1.commit("",0) + n1.put("_root", "x", i) + n1.commit("", 0) } for (let i = 15; i < 18; i++) { - n2.put("_root","x",i) - n2.commit("",0) + n2.put("_root", "x", i) + n2.commit("", 0) } assert.notDeepStrictEqual(n1.materialize(), n2.materialize()) @@ -1359,19 +1359,19 @@ describe('Automerge', () => { let s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 10; i++) { - n1.put("_root","x",i) - n1.commit("",0) + n1.put("_root", "x", i) + n1.commit("", 0) } sync(n1, n2, s1, s2) for (let i = 10; i < 15; i++) { - n1.put("_root","x",i) - n1.commit("",0) + n1.put("_root", "x", i) + n1.commit("", 0) } for (let i = 15; i < 18; i++) { - n2.put("_root","x",i) - n2.commit("",0) + n2.put("_root", "x", i) + n2.commit("", 0) } s1 = decodeSyncState(encodeSyncState(s1)) @@ -1388,8 +1388,8 @@ describe('Automerge', () => { const s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 3; i++) { - n1.put("_root","x",i) - n1.commit("",0) + n1.put("_root", "x", i) + n1.commit("", 0) } sync(n1, n2, s1, s2) @@ -1409,21 +1409,21 @@ describe('Automerge', () => { // n1 makes three changes, which we sync to n2 for (let i = 0; i < 3; i++) { - n1.put("_root","x",i) - n1.commit("",0) + n1.put("_root", "x", i) + n1.commit("", 0) } sync(n1, n2, s1, s2) - // save a copy of n2 as "r" to simulate recovering from crash - let r + // save a copy of n2 as "r" to simulate recovering from a crash + let r let rSyncState ;[r, rSyncState] = [n2.clone(), s2.clone()] // sync another few commits for (let i = 3; i < 6; i++) { - n1.put("_root","x",i) - n1.commit("",0) + n1.put("_root", "x", i) + n1.commit("", 0) } sync(n1, n2, s1, s2) @@ -1432,10 +1432,10 @@ describe('Automerge', () => { assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) assert.deepStrictEqual(n1.materialize(), n2.materialize()) - // now make a few more changes, then attempt to sync the fully-up-to-date n1 with the confused r + // now make a few more changes and then attempt to sync the fully-up-to-date n1 with the confused r for (let i = 6; i < 9; i++) { - n1.put("_root","x",i) - n1.commit("",0) + n1.put("_root", "x", i) + n1.commit("", 0) } s1 = decodeSyncState(encodeSyncState(s1)) @@ -1443,21 +1443,21 @@ describe('Automerge', () => { assert.notDeepStrictEqual(n1.getHeads(), r.getHeads()) assert.notDeepStrictEqual(n1.materialize(), r.materialize()) - assert.deepStrictEqual(n1.materialize(), {x: 8}) - assert.deepStrictEqual(r.materialize(), {x: 2}) + assert.deepStrictEqual(n1.materialize(), { x: 8 }) + assert.deepStrictEqual(r.materialize(), { x: 2 }) sync(n1, r, s1, rSyncState) assert.deepStrictEqual(n1.getHeads(), r.getHeads()) assert.deepStrictEqual(n1.materialize(), r.materialize()) }) - it('should resync after one node experiences data loss without disconnecting', () => { + it('should re-sync after one node experiences data loss without disconnecting', () => { const n1 = create('01234567'), n2 = create('89abcdef') const s1 = initSyncState(), s2 = initSyncState() // n1 makes three changes, which we sync to n2 for (let i = 0; i < 3; i++) { - n1.put("_root","x",i) - n1.commit("",0) + n1.put("_root", "x", i) + n1.commit("", 0) } sync(n1, n2, s1, s2) @@ -1480,20 +1480,20 @@ describe('Automerge', () => { // Change 1 is known to all three nodes //n1 = Automerge.change(n1, {time: 0}, doc => doc.x = 1) - n1.put("_root","x",1); n1.commit("",0) + n1.put("_root", "x", 1); n1.commit("", 0) sync(n1, n2, s12, s21) sync(n2, n3, s23, s32) // Change 2 is known to n1 and n2 - n1.put("_root","x",2); n1.commit("",0) + n1.put("_root", "x", 2); n1.commit("", 0) sync(n1, n2, s12, s21) // Each of the three nodes makes one change (changes 3, 4, 5) - n1.put("_root","x",3); n1.commit("",0) - n2.put("_root","x",4); n2.commit("",0) - n3.put("_root","x",5); n3.commit("",0) + n1.put("_root", "x", 3); n1.commit("", 0) + n2.put("_root", "x", 4); n2.commit("", 0) + n3.put("_root", "x", 5); n3.commit("", 0) // Apply n3's latest change to n2. If running in Node, turn the Uint8Array into a Buffer, to // simulate transmission over a network (see https://github.com/automerge/automerge/pull/362) @@ -1512,14 +1512,14 @@ describe('Automerge', () => { it('should handle histories with lots of branching and merging', () => { const n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('fedcba98') - n1.put("_root","x",0); n1.commit("",0) + n1.put("_root", "x", 0); n1.commit("", 0) let change1 = n1.getLastLocalChange() if (change1 === null) throw new RangeError("no local change") n2.applyChanges([change1]) let change2 = n1.getLastLocalChange() if (change2 === null) throw new RangeError("no local change") n3.applyChanges([change2]) - n3.put("_root","x",1); n3.commit("",0) + n3.put("_root", "x", 1); n3.commit("", 0) // - n1c1 <------ n1c2 <------ n1c3 <-- etc. <-- n1c20 <------ n1c21 // / \/ \/ \/ @@ -1528,8 +1528,8 @@ describe('Automerge', () => { // \ / // ---------------------------------------------- n3c1 <----- for (let i = 1; i < 20; i++) { - n1.put("_root","n1",i); n1.commit("",0) - n2.put("_root","n2",i); n2.commit("",0) + n1.put("_root", "n1", i); n1.commit("", 0) + n2.put("_root", "n2", i); n2.commit("", 0) const change1 = n1.getLastLocalChange() if (change1 === null) throw new RangeError("no local change") const change2 = n2.getLastLocalChange() @@ -1545,8 +1545,8 @@ describe('Automerge', () => { const change3 = n2.getLastLocalChange() if (change3 === null) throw new RangeError("no local change") n2.applyChanges([change3]) - n1.put("_root","n1","final"); n1.commit("",0) - n2.put("_root","n2","final"); n2.commit("",0) + n1.put("_root", "n1", "final"); n1.commit("", 0) + n2.put("_root", "n2", "final"); n2.commit("", 0) sync(n1, n2, s1, s2) assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) @@ -1563,15 +1563,15 @@ describe('Automerge', () => { let s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 10; i++) { - n1.put("_root","x",i); n1.commit("",0) + n1.put("_root", "x", i); n1.commit("", 0) } sync(n1, n2, s1, s2) for (let i = 1; ; i++) { // search for false positive; see comment above const n1up = n1.clone('01234567'); - n1up.put("_root","x",`${i} @ n1`); n1up.commit("",0) + n1up.put("_root", "x", `${i} @ n1`); n1up.commit("", 0) const n2up = n2.clone('89abcdef'); - n2up.put("_root","x",`${i} @ n2`); n2up.commit("",0) + n2up.put("_root", "x", `${i} @ n2`); n2up.commit("", 0) if (new BloomFilter(n1up.getHeads()).containsHash(n2up.getHeads()[0])) { n1.free(); n2.free() n1 = n1up; n2 = n2up; break @@ -1600,25 +1600,25 @@ describe('Automerge', () => { s1 = initSyncState() s2 = initSyncState() for (let i = 0; i < 10; i++) { - n1.put("_root","x",i); n1.commit("",0) + n1.put("_root", "x", i); n1.commit("", 0) } sync(n1, n2, s1, s2) let n1hash1, n2hash1 for (let i = 29; ; i++) { // search for false positive; see comment above const n1us1 = n1.clone('01234567') - n1us1.put("_root","x",`${i} @ n1`); n1us1.commit("",0) + n1us1.put("_root", "x", `${i} @ n1`); n1us1.commit("", 0) const n2us1 = n2.clone('89abcdef') - n2us1.put("_root","x",`${i} @ n1`); n2us1.commit("",0) + n2us1.put("_root", "x", `${i} @ n1`); n2us1.commit("", 0) n1hash1 = n1us1.getHeads()[0]; n2hash1 = n2us1.getHeads()[0] const n1us2 = n1us1.clone(); - n1us2.put("_root","x",`final @ n1`); n1us2.commit("",0) + n1us2.put("_root", "x", `final @ n1`); n1us2.commit("", 0) const n2us2 = n2us1.clone(); - n2us2.put("_root","x",`final @ n2`); n2us2.commit("",0) + n2us2.put("_root", "x", `final @ n2`); n2us2.commit("", 0) n1hash2 = n1us2.getHeads()[0]; n2hash2 = n2us2.getHeads()[0] if (new BloomFilter([n1hash1, n1hash2]).containsHash(n2hash1)) { @@ -1684,33 +1684,33 @@ describe('Automerge', () => { let n1hash3, n2hash3 for (let i = 0; i < 5; i++) { - n1.put("_root","x",i); n1.commit("",0) + n1.put("_root", "x", i); n1.commit("", 0) } sync(n1, n2, s1, s2) for (let i = 86; ; i++) { // search for false positive; see comment above const n1us1 = n1.clone('01234567') - n1us1.put("_root","x",`${i} @ n1`); n1us1.commit("",0) + n1us1.put("_root", "x", `${i} @ n1`); n1us1.commit("", 0) const n2us1 = n2.clone('89abcdef') - n2us1.put("_root","x",`${i} @ n2`); n2us1.commit("",0) + n2us1.put("_root", "x", `${i} @ n2`); n2us1.commit("", 0) //const n1us1 = Automerge.change(Automerge.clone(n1, {actorId: '01234567'}), {time: 0}, doc => doc.x = `${i} @ n1`) //const n2us1 = Automerge.change(Automerge.clone(n2, {actorId: '89abcdef'}), {time: 0}, doc => doc.x = `${i} @ n2`) const n1hash1 = n1us1.getHeads()[0] const n1us2 = n1us1.clone() - n1us2.put("_root","x",`${i + 1} @ n1`); n1us2.commit("",0) + n1us2.put("_root", "x", `${i + 1} @ n1`); n1us2.commit("", 0) const n2us2 = n2us1.clone() - n2us2.put("_root","x",`${i + 1} @ n2`); n2us2.commit("",0) + n2us2.put("_root", "x", `${i + 1} @ n2`); n2us2.commit("", 0) const n1hash2 = n1us2.getHeads()[0], n2hash2 = n2us2.getHeads()[0] const n1us3 = n1us2.clone() - n1us3.put("_root","x",`final @ n1`); n1us3.commit("",0) + n1us3.put("_root", "x", `final @ n1`); n1us3.commit("", 0) const n2us3 = n2us2.clone() - n2us3.put("_root","x",`final @ n2`); n2us3.commit("",0) + n2us3.put("_root", "x", `final @ n2`); n2us3.commit("", 0) n1hash3 = n1us3.getHeads()[0]; n2hash3 = n2us3.getHeads()[0] @@ -1737,28 +1737,28 @@ describe('Automerge', () => { let s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 5; i++) { - n1.put("_root","x",i); n1.commit("",0) + n1.put("_root", "x", i); n1.commit("", 0) } sync(n1, n2, s1, s2) - n1.put("_root","x",5); n1.commit("",0) + n1.put("_root", "x", 5); n1.commit("", 0) for (let i = 2; ; i++) { // search for false positive; see comment above const n2us1 = n2.clone('89abcdef') - n2us1.put("_root","x",`${i} @ n2`); n2us1.commit("",0) + n2us1.put("_root", "x", `${i} @ n2`); n2us1.commit("", 0) if (new BloomFilter(n1.getHeads()).containsHash(n2us1.getHeads()[0])) { n2 = n2us1; break } } for (let i = 141; ; i++) { // search for false positive; see comment above const n2us2 = n2.clone('89abcdef') - n2us2.put("_root","x",`${i} again`); n2us2.commit("",0) + n2us2.put("_root", "x", `${i} again`); n2us2.commit("", 0) if (new BloomFilter(n1.getHeads()).containsHash(n2us2.getHeads()[0])) { n2 = n2us2; break } } - n2.put("_root","x",`final @ n2`); n2.commit("",0) + n2.put("_root", "x", `final @ n2`); n2.commit("", 0) const allHeads = [...n1.getHeads(), ...n2.getHeads()].sort() s1 = decodeSyncState(encodeSyncState(s1)) @@ -1778,7 +1778,7 @@ describe('Automerge', () => { let message for (let i = 0; i < 10; i++) { - n1.put("_root","x",i); n1.commit("",0) + n1.put("_root", "x", i); n1.commit("", 0) } sync(n1, n2, s1, s2) @@ -1787,8 +1787,8 @@ describe('Automerge', () => { s2 = decodeSyncState(encodeSyncState(s2)) for (let i = 1; ; i++) { // brute-force search for false positive; see comment above - const n1up = n1.clone('01234567'); n1up.put("_root","x",`${i} @ n1`); n1up.commit("",0) - const n2up = n1.clone('89abcdef'); n2up.put("_root","x",`${i} @ n2`); n2up.commit("",0) + const n1up = n1.clone('01234567'); n1up.put("_root", "x", `${i} @ n1`); n1up.commit("", 0) + const n2up = n1.clone('89abcdef'); n2up.put("_root", "x", `${i} @ n2`); n2up.commit("", 0) // check if the bloom filter on n2 will believe n1 already has a particular hash // this will mean n2 won't offer that data to n2 by receiving a sync message from n1 @@ -1839,7 +1839,7 @@ describe('Automerge', () => { let message1, message2, message3 for (let i = 0; i < 3; i++) { - n1.put("_root","x",i); n1.commit("",0) + n1.put("_root", "x", i); n1.commit("", 0) } // sync all 3 nodes @@ -1847,18 +1847,18 @@ describe('Automerge', () => { sync(n1, n3, s13, s31) sync(n3, n2, s32, s23) for (let i = 0; i < 2; i++) { - n1.put("_root","x",`${i} @ n1`); n1.commit("",0) + n1.put("_root", "x", `${i} @ n1`); n1.commit("", 0) } for (let i = 0; i < 2; i++) { - n2.put("_root","x",`${i} @ n2`); n2.commit("",0) + n2.put("_root", "x", `${i} @ n2`); n2.commit("", 0) } n1.applyChanges(n2.getChanges([])) n2.applyChanges(n1.getChanges([])) - n1.put("_root","x",`3 @ n1`); n1.commit("",0) - n2.put("_root","x",`3 @ n2`); n2.commit("",0) + n1.put("_root", "x", `3 @ n1`); n1.commit("", 0) + n2.put("_root", "x", `3 @ n2`); n2.commit("", 0) for (let i = 0; i < 3; i++) { - n3.put("_root","x",`${i} @ n3`); n3.commit("",0) + n3.put("_root", "x", `${i} @ n3`); n3.commit("", 0) } const n1c3 = n1.getHeads()[0], n2c3 = n2.getHeads()[0], n3c3 = n3.getHeads()[0] s13 = decodeSyncState(encodeSyncState(s13)) @@ -1908,13 +1908,13 @@ describe('Automerge', () => { let message = null for (let i = 0; i < 3; i++) { - n1.put("_root","x",i); n1.commit("",0) + n1.put("_root", "x", i); n1.commit("", 0) } const lastSync = n1.getHeads() for (let i = 3; i < 6; i++) { - n1.put("_root","x",i); n1.commit("",0) + n1.put("_root", "x", i); n1.commit("", 0) } sync(n1, n2, s1, s2) @@ -1936,7 +1936,7 @@ describe('Automerge', () => { let message = null for (let i = 0; i < 3; i++) { - n1.put("_root","x",i); n1.commit("",0) + n1.put("_root", "x", i); n1.commit("", 0) } n2.applyChanges(n1.getChanges([])) @@ -1958,13 +1958,13 @@ describe('Automerge', () => { let s1 = initSyncState(), s2 = initSyncState() let msg, decodedMsg - n1.put("_root","x",0); n1.commit("",0) + n1.put("_root", "x", 0); n1.commit("", 0) n3.applyChanges(n3.getChangesAdded(n1)) // merge() for (let i = 1; i <= 2; i++) { - n1.put("_root","x",i); n1.commit("",0) + n1.put("_root", "x", i); n1.commit("", 0) } for (let i = 3; i <= 4; i++) { - n3.put("_root","x",i); n3.commit("",0) + n3.put("_root", "x", i); n3.commit("", 0) } const c2 = n1.getHeads()[0], c4 = n3.getHeads()[0] n2.applyChanges(n2.getChangesAdded(n3)) // merge() @@ -1977,14 +1977,14 @@ describe('Automerge', () => { assert.deepStrictEqual(s2.sharedHeads, [c2, c4].sort()) // n2 and n3 apply {c5, c6, c7, c8} - n3.put("_root","x",5); n3.commit("",0) + n3.put("_root", "x", 5); n3.commit("", 0) const change5 = n3.getLastLocalChange() if (change5 === null) throw new RangeError("no local change") - n3.put("_root","x",6); n3.commit("",0) + n3.put("_root", "x", 6); n3.commit("", 0) const change6 = n3.getLastLocalChange(), c6 = n3.getHeads()[0] if (change6 === null) throw new RangeError("no local change") for (let i = 7; i <= 8; i++) { - n3.put("_root","x",i); n3.commit("",0) + n3.put("_root", "x", i); n3.commit("", 0) } const c8 = n3.getHeads()[0] n2.applyChanges(n2.getChangesAdded(n3)) // merge() From 04d0175113bf3405c54ef371d400c3b86605ff0a Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Sat, 6 Aug 2022 16:20:35 -0700 Subject: [PATCH 520/730] Add missing past-the-end checks to the unit tests for `AMmapRange()`. --- automerge-c/test/map_tests.c | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/automerge-c/test/map_tests.c b/automerge-c/test/map_tests.c index 821fe81f..636080ec 100644 --- a/automerge-c/test/map_tests.c +++ b/automerge-c/test/map_tests.c @@ -443,6 +443,8 @@ static void test_map_range_back_and_forth_single(void** state) { assert_int_equal(AMobjIdCounter(next_obj_id), 3); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); assert_int_equal(AMobjIdIndex(next_obj_id), 0); + /* Forward stop */ + assert_null(AMmapItemsNext(&range_all, 1)); /* Back, back, back. */ range_back_all = AMmapItemsRewound(&range_back_all); @@ -479,6 +481,8 @@ static void test_map_range_back_and_forth_single(void** state) { assert_int_equal(AMobjIdCounter(next_back_obj_id), 1); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); + /* Back stop */ + assert_null(AMmapItemsNext(&range_back_all, 1)); } static void test_map_range_back_and_forth_double(void** state) { @@ -620,6 +624,8 @@ static void test_map_range_back_and_forth_double(void** state) { assert_int_equal(AMobjIdCounter(next_obj_id), 3); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); assert_int_equal(AMobjIdIndex(next_obj_id), 1); + /* Forward stop */ + assert_null(AMmapItemsNext(&range_all, 1)); /* Back, back, back. */ range_back_all = AMmapItemsRewound(&range_back_all); @@ -656,6 +662,8 @@ static void test_map_range_back_and_forth_double(void** state) { assert_int_equal(AMobjIdCounter(next_back_obj_id), 1); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); + /* Back stop */ + assert_null(AMmapItemsNext(&range_back_all, 1)); } static void test_map_range_at_back_and_forth_single(void** state) { @@ -788,6 +796,8 @@ static void test_map_range_at_back_and_forth_single(void** state) { assert_int_equal(AMobjIdCounter(next_obj_id), 3); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); assert_int_equal(AMobjIdIndex(next_obj_id), 0); + /* Forward stop */ + assert_null(AMmapItemsNext(&range_all, 1)); /* Back, back, back. */ range_back_all = AMmapItemsRewound(&range_back_all); @@ -824,6 +834,8 @@ static void test_map_range_at_back_and_forth_single(void** state) { assert_int_equal(AMobjIdCounter(next_back_obj_id), 1); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); + /* Back stop */ + assert_null(AMmapItemsNext(&range_back_all, 1)); } static void test_map_range_at_back_and_forth_double(void** state) { @@ -969,6 +981,8 @@ static void test_map_range_at_back_and_forth_double(void** state) { assert_int_equal(AMobjIdCounter(next_obj_id), 3); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); assert_int_equal(AMobjIdIndex(next_obj_id), 1); + /* Forward stop */ + assert_null(AMmapItemsNext(&range_all, 1)); /* Back, back, back. */ range_back_all = AMmapItemsRewound(&range_back_all); @@ -1005,6 +1019,8 @@ static void test_map_range_at_back_and_forth_double(void** state) { assert_int_equal(AMobjIdCounter(next_back_obj_id), 1); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); + /* Back stop */ + assert_null(AMmapItemsNext(&range_back_all, 1)); } static void test_get_range_values(void** state) { From 825342cbb1a5bfa961717a5511f2131b276f69cc Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Sun, 7 Aug 2022 08:07:00 -0700 Subject: [PATCH 521/730] Remove reflexive struct reference from a Doxygen variable declaration. --- automerge-c/src/result.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automerge-c/src/result.rs b/automerge-c/src/result.rs index f164f62a..d0b707dd 100644 --- a/automerge-c/src/result.rs +++ b/automerge-c/src/result.rs @@ -81,7 +81,7 @@ use crate::sync::{AMsyncMessage, AMsyncState}; /// A synchronization state as a pointer to an `AMsyncState` struct. /// /// \var AMvalue::tag -/// The variant discriminator of an `AMvalue` struct. +/// The variant discriminator. /// /// \var AMvalue::timestamp /// A Lamport timestamp. From 7ec17b26a9a9c7124496258b15a2b8363a95d515 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Sun, 7 Aug 2022 19:24:47 -0700 Subject: [PATCH 522/730] Replace `From<&AMvalue<'_>> for Result< am::ScalarValue, am::AutomergeError>` with `TryFrom<&AMvalue<'_>> for am::ScalarValue` for @alexjg in #414. --- automerge-c/src/doc.rs | 2 +- automerge-c/src/result.rs | 126 +++++++++++++++++++------------------- 2 files changed, 65 insertions(+), 63 deletions(-) diff --git a/automerge-c/src/doc.rs b/automerge-c/src/doc.rs index b3d9682e..3b455e8c 100644 --- a/automerge-c/src/doc.rs +++ b/automerge-c/src/doc.rs @@ -751,7 +751,7 @@ pub unsafe extern "C" fn AMsplice( if !(src.is_null() || count == 0) { let c_vals = std::slice::from_raw_parts(src, count); for c_val in c_vals { - match c_val.into() { + match c_val.try_into() { Ok(s) => { vals.push(s); } diff --git a/automerge-c/src/result.rs b/automerge-c/src/result.rs index d0b707dd..f03e8db4 100644 --- a/automerge-c/src/result.rs +++ b/automerge-c/src/result.rs @@ -165,8 +165,70 @@ impl<'a> PartialEq for AMvalue<'a> { } } -impl From<&AMvalue<'_>> for Result { - fn from(c_value: &AMvalue) -> Self { +impl From<(&am::Value<'_>, &RefCell>)> for AMvalue<'_> { + fn from((value, c_str): (&am::Value<'_>, &RefCell>)) -> Self { + match value { + am::Value::Scalar(scalar) => match scalar.as_ref() { + am::ScalarValue::Boolean(flag) => AMvalue::Boolean(*flag), + am::ScalarValue::Bytes(bytes) => AMvalue::Bytes(bytes.as_slice().into()), + am::ScalarValue::Counter(counter) => AMvalue::Counter(counter.into()), + am::ScalarValue::F64(float) => AMvalue::F64(*float), + am::ScalarValue::Int(int) => AMvalue::Int(*int), + am::ScalarValue::Null => AMvalue::Null, + am::ScalarValue::Str(smol_str) => { + let mut c_str = c_str.borrow_mut(); + AMvalue::Str(match c_str.as_mut() { + None => { + let value_str = CString::new(smol_str.to_string()).unwrap(); + c_str.insert(value_str).as_ptr() + } + Some(value_str) => value_str.as_ptr(), + }) + } + am::ScalarValue::Timestamp(timestamp) => AMvalue::Timestamp(*timestamp), + am::ScalarValue::Uint(uint) => AMvalue::Uint(*uint), + }, + // \todo Confirm that an object variant should be ignored + // when there's no object ID variant. + am::Value::Object(_) => AMvalue::Void, + } + } +} + +impl From<&AMvalue<'_>> for u8 { + fn from(value: &AMvalue) -> Self { + use AMvalue::*; + + match value { + ActorId(_) => 1, + Boolean(_) => 2, + Bytes(_) => 3, + ChangeHashes(_) => 4, + Changes(_) => 5, + Counter(_) => 6, + Doc(_) => 7, + F64(_) => 8, + Int(_) => 9, + ListItems(_) => 10, + MapItems(_) => 11, + Null => 12, + ObjId(_) => 13, + ObjItems(_) => 14, + Str(_) => 15, + Strs(_) => 16, + SyncMessage(_) => 17, + SyncState(_) => 18, + Timestamp(_) => 19, + Uint(_) => 20, + Void => 0, + } + } +} + +impl TryFrom<&AMvalue<'_>> for am::ScalarValue { + type Error = am::AutomergeError; + + fn try_from(c_value: &AMvalue) -> Result { use am::AutomergeError::InvalidValueType; use AMvalue::*; @@ -239,66 +301,6 @@ impl From<&AMvalue<'_>> for Result { } } -impl From<(&am::Value<'_>, &RefCell>)> for AMvalue<'_> { - fn from((value, c_str): (&am::Value<'_>, &RefCell>)) -> Self { - match value { - am::Value::Scalar(scalar) => match scalar.as_ref() { - am::ScalarValue::Boolean(flag) => AMvalue::Boolean(*flag), - am::ScalarValue::Bytes(bytes) => AMvalue::Bytes(bytes.as_slice().into()), - am::ScalarValue::Counter(counter) => AMvalue::Counter(counter.into()), - am::ScalarValue::F64(float) => AMvalue::F64(*float), - am::ScalarValue::Int(int) => AMvalue::Int(*int), - am::ScalarValue::Null => AMvalue::Null, - am::ScalarValue::Str(smol_str) => { - let mut c_str = c_str.borrow_mut(); - AMvalue::Str(match c_str.as_mut() { - None => { - let value_str = CString::new(smol_str.to_string()).unwrap(); - c_str.insert(value_str).as_ptr() - } - Some(value_str) => value_str.as_ptr(), - }) - } - am::ScalarValue::Timestamp(timestamp) => AMvalue::Timestamp(*timestamp), - am::ScalarValue::Uint(uint) => AMvalue::Uint(*uint), - }, - // \todo Confirm that an object variant should be ignored - // when there's no object ID variant. - am::Value::Object(_) => AMvalue::Void, - } - } -} - -impl From<&AMvalue<'_>> for u8 { - fn from(value: &AMvalue) -> Self { - use AMvalue::*; - - match value { - ActorId(_) => 1, - Boolean(_) => 2, - Bytes(_) => 3, - ChangeHashes(_) => 4, - Changes(_) => 5, - Counter(_) => 6, - Doc(_) => 7, - F64(_) => 8, - Int(_) => 9, - ListItems(_) => 10, - MapItems(_) => 11, - Null => 12, - ObjId(_) => 13, - ObjItems(_) => 14, - Str(_) => 15, - Strs(_) => 16, - SyncMessage(_) => 17, - SyncState(_) => 18, - Timestamp(_) => 19, - Uint(_) => 20, - Void => 0, - } - } -} - /// \memberof AMvalue /// \brief Tests the equality of two values. /// From 50981acc5ad287e7ae222367264e3293eed7c947 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Sun, 7 Aug 2022 19:37:48 -0700 Subject: [PATCH 523/730] Replace `to_del!()` and `to_pos!()` with `to_index!()` for @alexjg in #414. --- automerge-c/src/doc.rs | 27 +++++++++------------------ 1 file changed, 9 insertions(+), 18 deletions(-) diff --git a/automerge-c/src/doc.rs b/automerge-c/src/doc.rs index 3b455e8c..298092c4 100644 --- a/automerge-c/src/doc.rs +++ b/automerge-c/src/doc.rs @@ -27,21 +27,12 @@ macro_rules! to_changes { }}; } -macro_rules! to_del { - ($del:expr, $len:expr) => {{ - if $del > $len && $del != usize::MAX { - return AMresult::err(&format!("Invalid del {}", $del)).into(); +macro_rules! to_index { + ($index:expr, $len:expr, $param_name:expr) => {{ + if $index > $len && $index != usize::MAX { + return AMresult::err(&format!("Invalid {} {}", $param_name, $index)).into(); } - std::cmp::min($del, $len) - }}; -} - -macro_rules! to_pos { - ($pos:expr, $len:expr) => {{ - if $pos > $len && $pos != usize::MAX { - return AMresult::err(&format!("Invalid pos {}", $pos)).into(); - } - std::cmp::min($pos, $len) + std::cmp::min($index, $len) }}; } @@ -745,8 +736,8 @@ pub unsafe extern "C" fn AMsplice( let doc = to_doc_mut!(doc); let obj_id = to_obj_id!(obj_id); let len = doc.length(obj_id); - let pos = to_pos!(pos, len); - let del = to_del!(del, len); + let pos = to_index!(pos, len, "pos"); + let del = to_index!(del, len, "del"); let mut vals: Vec = vec![]; if !(src.is_null() || count == 0) { let c_vals = std::slice::from_raw_parts(src, count); @@ -797,8 +788,8 @@ pub unsafe extern "C" fn AMspliceText( let doc = to_doc_mut!(doc); let obj_id = to_obj_id!(obj_id); let len = doc.length(obj_id); - let pos = to_pos!(pos, len); - let del = to_del!(del, len); + let pos = to_index!(pos, len, "pos"); + let del = to_index!(del, len, "del"); to_result(doc.splice_text(obj_id, pos, del, &to_str(text))) } From bc28faee71eb28f3d8d50fa72d223c4f6bfc7a63 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Sun, 7 Aug 2022 20:04:49 -0700 Subject: [PATCH 524/730] Replace `NULL` with `std::ptr::null()` within the safety notes for @alexjg in #414. --- automerge-c/src/doc.rs | 26 +++++++++++++------------- automerge-c/src/doc/list.rs | 36 ++++++++++++++++++------------------ automerge-c/src/doc/map.rs | 36 ++++++++++++++++++------------------ 3 files changed, 49 insertions(+), 49 deletions(-) diff --git a/automerge-c/src/doc.rs b/automerge-c/src/doc.rs index 298092c4..6edd7772 100644 --- a/automerge-c/src/doc.rs +++ b/automerge-c/src/doc.rs @@ -200,7 +200,7 @@ pub unsafe extern "C" fn AMequal(doc1: *mut AMdoc, doc2: *mut AMdoc) -> bool { /// \internal /// # Safety /// doc must be a valid pointer to an AMdoc -/// heads must be a valid pointer to an AMchangeHashes or NULL +/// heads must be a valid pointer to an AMchangeHashes or std::ptr::null() #[no_mangle] pub unsafe extern "C" fn AMfork(doc: *mut AMdoc, heads: *const AMchangeHashes) -> *mut AMresult { let doc = to_doc_mut!(doc); @@ -369,7 +369,7 @@ pub unsafe extern "C" fn AMgetHeads(doc: *mut AMdoc) -> *mut AMresult { /// \internal /// # Safety /// doc must be a valid pointer to an AMdoc -/// heads must be a valid pointer to an AMchangeHashes or NULL +/// heads must be a valid pointer to an AMchangeHashes or std::ptr::null() #[no_mangle] pub unsafe extern "C" fn AMgetMissingDeps( doc: *mut AMdoc, @@ -416,8 +416,8 @@ pub unsafe extern "C" fn AMgetLastLocalChange(doc: *mut AMdoc) -> *mut AMresult /// \internal /// # Safety /// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or NULL -/// heads must be a valid pointer to an AMchangeHashes or NULL +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +/// heads must be a valid pointer to an AMchangeHashes or std::ptr::null() #[no_mangle] pub unsafe extern "C" fn AMkeys( doc: *const AMdoc, @@ -518,8 +518,8 @@ pub unsafe extern "C" fn AMmerge(dest: *mut AMdoc, src: *mut AMdoc) -> *mut AMre /// /// # Safety /// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or NULL -/// heads must be a valid pointer to an AMchangeHashes or NULL +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +/// heads must be a valid pointer to an AMchangeHashes or std::ptr::null() #[no_mangle] pub unsafe extern "C" fn AMobjSize( doc: *const AMdoc, @@ -552,8 +552,8 @@ pub unsafe extern "C" fn AMobjSize( /// \internal /// # Safety /// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or NULL -/// heads must be a valid pointer to an AMchangeHashes or NULL +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +/// heads must be a valid pointer to an AMchangeHashes or std::ptr::null() #[no_mangle] pub unsafe extern "C" fn AMobjValues( doc: *const AMdoc, @@ -722,8 +722,8 @@ pub unsafe extern "C" fn AMsetActorId( /// \internal /// # Safety /// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or NULL -/// src must be an AMvalue array of size `>= count` or NULL +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +/// src must be an AMvalue array of size `>= count` or std::ptr::null() #[no_mangle] pub unsafe extern "C" fn AMsplice( doc: *mut AMdoc, @@ -775,7 +775,7 @@ pub unsafe extern "C" fn AMsplice( /// \internal /// # Safety /// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or NULL +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() /// text must be a null-terminated array of `c_char` or NULL. #[no_mangle] pub unsafe extern "C" fn AMspliceText( @@ -807,8 +807,8 @@ pub unsafe extern "C" fn AMspliceText( /// \internal /// # Safety /// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or NULL -/// heads must be a valid pointer to an AMchangeHashes or NULL +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +/// heads must be a valid pointer to an AMchangeHashes or std::ptr::null() #[no_mangle] pub unsafe extern "C" fn AMtext( doc: *const AMdoc, diff --git a/automerge-c/src/doc/list.rs b/automerge-c/src/doc/list.rs index a425d815..c8b160cb 100644 --- a/automerge-c/src/doc/list.rs +++ b/automerge-c/src/doc/list.rs @@ -46,7 +46,7 @@ macro_rules! to_range { /// \internal /// # Safety /// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or NULL +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() #[no_mangle] pub unsafe extern "C" fn AMlistDelete( doc: *mut AMdoc, @@ -76,8 +76,8 @@ pub unsafe extern "C" fn AMlistDelete( /// \internal /// # Safety /// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or NULL -/// heads must be a valid pointer to an AMchangeHashes or NULL +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +/// heads must be a valid pointer to an AMchangeHashes or std::ptr::null() #[no_mangle] pub unsafe extern "C" fn AMlistGet( doc: *const AMdoc, @@ -112,8 +112,8 @@ pub unsafe extern "C" fn AMlistGet( /// \internal /// # Safety /// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or NULL -/// heads must be a valid pointer to an AMchangeHashes or NULL +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +/// heads must be a valid pointer to an AMchangeHashes or std::ptr::null() #[no_mangle] pub unsafe extern "C" fn AMlistGetAll( doc: *const AMdoc, @@ -147,7 +147,7 @@ pub unsafe extern "C" fn AMlistGetAll( /// \internal /// # Safety /// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or NULL +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() #[no_mangle] pub unsafe extern "C" fn AMlistIncrement( doc: *mut AMdoc, @@ -181,7 +181,7 @@ pub unsafe extern "C" fn AMlistIncrement( /// \internal /// # Safety /// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or NULL +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() #[no_mangle] pub unsafe extern "C" fn AMlistPutBool( doc: *mut AMdoc, @@ -224,7 +224,7 @@ pub unsafe extern "C" fn AMlistPutBool( /// \internal /// # Safety /// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or NULL +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() /// src must be a byte array of size `>= count` #[no_mangle] pub unsafe extern "C" fn AMlistPutBytes( @@ -267,7 +267,7 @@ pub unsafe extern "C" fn AMlistPutBytes( /// \internal /// # Safety /// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or NULL +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() #[no_mangle] pub unsafe extern "C" fn AMlistPutCounter( doc: *mut AMdoc, @@ -307,7 +307,7 @@ pub unsafe extern "C" fn AMlistPutCounter( /// \internal /// # Safety /// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or NULL +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() #[no_mangle] pub unsafe extern "C" fn AMlistPutF64( doc: *mut AMdoc, @@ -346,7 +346,7 @@ pub unsafe extern "C" fn AMlistPutF64( /// \internal /// # Safety /// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or NULL +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() #[no_mangle] pub unsafe extern "C" fn AMlistPutInt( doc: *mut AMdoc, @@ -384,7 +384,7 @@ pub unsafe extern "C" fn AMlistPutInt( /// \internal /// # Safety /// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or NULL +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() #[no_mangle] pub unsafe extern "C" fn AMlistPutNull( doc: *mut AMdoc, @@ -423,7 +423,7 @@ pub unsafe extern "C" fn AMlistPutNull( /// \internal /// # Safety /// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or NULL +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() #[no_mangle] pub unsafe extern "C" fn AMlistPutObject( doc: *mut AMdoc, @@ -464,7 +464,7 @@ pub unsafe extern "C" fn AMlistPutObject( /// \internal /// # Safety /// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or NULL +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() /// value must be a null-terminated array of `c_char` #[no_mangle] pub unsafe extern "C" fn AMlistPutStr( @@ -505,7 +505,7 @@ pub unsafe extern "C" fn AMlistPutStr( /// \internal /// # Safety /// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or NULL +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() #[no_mangle] pub unsafe extern "C" fn AMlistPutTimestamp( doc: *mut AMdoc, @@ -545,7 +545,7 @@ pub unsafe extern "C" fn AMlistPutTimestamp( /// \internal /// # Safety /// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or NULL +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() #[no_mangle] pub unsafe extern "C" fn AMlistPutUint( doc: *mut AMdoc, @@ -584,8 +584,8 @@ pub unsafe extern "C" fn AMlistPutUint( /// \internal /// # Safety /// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or NULL -/// heads must be a valid pointer to an AMchangeHashes or NULL +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +/// heads must be a valid pointer to an AMchangeHashes or std::ptr::null() #[no_mangle] pub unsafe extern "C" fn AMlistRange( doc: *const AMdoc, diff --git a/automerge-c/src/doc/map.rs b/automerge-c/src/doc/map.rs index 1ab93138..4b2b6cc2 100644 --- a/automerge-c/src/doc/map.rs +++ b/automerge-c/src/doc/map.rs @@ -25,7 +25,7 @@ pub mod items; /// \internal /// # Safety /// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or NULL +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() /// key must be a c string of the map key to be used #[no_mangle] pub unsafe extern "C" fn AMmapDelete( @@ -54,9 +54,9 @@ pub unsafe extern "C" fn AMmapDelete( /// \internal /// # Safety /// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or NULL +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() /// key must be a c string of the map key to be used -/// heads must be a valid pointer to an AMchangeHashes or NULL +/// heads must be a valid pointer to an AMchangeHashes or std::ptr::null() #[no_mangle] pub unsafe extern "C" fn AMmapGet( doc: *const AMdoc, @@ -90,9 +90,9 @@ pub unsafe extern "C" fn AMmapGet( /// \internal /// # Safety /// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or NULL +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() /// key must be a c string of the map key to be used -/// heads must be a valid pointer to an AMchangeHashes or NULL +/// heads must be a valid pointer to an AMchangeHashes or std::ptr::null() #[no_mangle] pub unsafe extern "C" fn AMmapGetAll( doc: *const AMdoc, @@ -123,7 +123,7 @@ pub unsafe extern "C" fn AMmapGetAll( /// \internal /// # Safety /// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or NULL +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() /// key must be a c string of the map key to be used #[no_mangle] pub unsafe extern "C" fn AMmapIncrement( @@ -151,7 +151,7 @@ pub unsafe extern "C" fn AMmapIncrement( /// \internal /// # Safety /// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or NULL +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() /// key must be a c string of the map key to be used #[no_mangle] pub unsafe extern "C" fn AMmapPutBool( @@ -182,7 +182,7 @@ pub unsafe extern "C" fn AMmapPutBool( /// \internal /// # Safety /// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or NULL +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() /// key must be a c string of the map key to be used /// src must be a byte array of size `>= count` #[no_mangle] @@ -214,7 +214,7 @@ pub unsafe extern "C" fn AMmapPutBytes( /// \internal /// # Safety /// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or NULL +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() /// key must be a c string of the map key to be used #[no_mangle] pub unsafe extern "C" fn AMmapPutCounter( @@ -245,7 +245,7 @@ pub unsafe extern "C" fn AMmapPutCounter( /// \internal /// # Safety /// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or NULL +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() /// key must be a c string of the map key to be used #[no_mangle] pub unsafe extern "C" fn AMmapPutNull( @@ -273,7 +273,7 @@ pub unsafe extern "C" fn AMmapPutNull( /// \internal /// # Safety /// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or NULL +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() /// key must be a c string of the map key to be used #[no_mangle] pub unsafe extern "C" fn AMmapPutObject( @@ -301,7 +301,7 @@ pub unsafe extern "C" fn AMmapPutObject( /// \internal /// # Safety /// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or NULL +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() /// key must be a c string of the map key to be used #[no_mangle] pub unsafe extern "C" fn AMmapPutF64( @@ -329,7 +329,7 @@ pub unsafe extern "C" fn AMmapPutF64( /// \internal /// # Safety /// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or NULL +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() /// key must be a c string of the map key to be used #[no_mangle] pub unsafe extern "C" fn AMmapPutInt( @@ -358,7 +358,7 @@ pub unsafe extern "C" fn AMmapPutInt( /// \internal /// # Safety /// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or NULL +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() /// key must be a c string of the map key to be used /// value must be a null-terminated array of `c_char` #[no_mangle] @@ -387,7 +387,7 @@ pub unsafe extern "C" fn AMmapPutStr( /// \internal /// # Safety /// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or NULL +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() /// key must be a c string of the map key to be used #[no_mangle] pub unsafe extern "C" fn AMmapPutTimestamp( @@ -419,7 +419,7 @@ pub unsafe extern "C" fn AMmapPutTimestamp( /// \internal /// # Safety /// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or NULL +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() /// key must be a c string of the map key to be used #[no_mangle] pub unsafe extern "C" fn AMmapPutUint( @@ -453,8 +453,8 @@ pub unsafe extern "C" fn AMmapPutUint( /// \internal /// # Safety /// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or NULL -/// heads must be a valid pointer to an AMchangeHashes or NULL +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +/// heads must be a valid pointer to an AMchangeHashes or std::ptr::null() #[no_mangle] pub unsafe extern "C" fn AMmapRange( doc: *const AMdoc, From f89e9ad9ccdb9d9c820f074e5c4c54c7b671fec2 Mon Sep 17 00:00:00 2001 From: Thomas Buckley-Houston Date: Wed, 10 Aug 2022 08:43:26 -0400 Subject: [PATCH 525/730] Readme updates --- README.md | 29 +++++++++++++---------------- 1 file changed, 13 insertions(+), 16 deletions(-) diff --git a/README.md b/README.md index c239e100..64b0f9b7 100644 --- a/README.md +++ b/README.md @@ -7,33 +7,27 @@ [![ci](https://github.com/automerge/automerge-rs/actions/workflows/ci.yaml/badge.svg)](https://github.com/automerge/automerge-rs/actions/workflows/ci.yaml) [![docs](https://github.com/automerge/automerge-rs/actions/workflows/docs.yaml/badge.svg)](https://github.com/automerge/automerge-rs/actions/workflows/docs.yaml) -This is a rust implementation of the [Automerge](https://github.com/automerge/automerge) file format and network protocol. +This is a Rust library implementation of the [Automerge](https://github.com/automerge/automerge) file format and network protocol. Its focus is to support the creation of Automerge implementations in other languages, currently; WASM, JS and C. A `libautomerge` if you will. -If you are looking for the origional `automerge-rs` project that can be used as a wasm backend to the javascript implementation, it can be found [here](https://github.com/automerge/automerge-rs/tree/automerge-1.0). +The original [Automerge](https://github.com/automerge/automerge) project (written in JS from the ground up) is still very much maintained and recommended. Indeed it is because of the success of that project that the next stage of Automerge is being explored here. Hopefully Rust can offer a more performant and scalable Automerge, opening up even more use cases. ## Status -This project has 4 components: +The project has 5 components: -1. [_automerge_](automerge) - a rust implementation of the library. This project is the most mature and being used in a handful of small applications. -2. [_automerge-wasm_](automerge-wasm) - a js/wasm interface to the underlying rust library. This api is generally mature and in use in a handful of projects as well. -3. [_automerge-js_](automerge-js) - this is a javascript library using the wasm interface to export the same public api of the primary automerge project. Currently this project passes all of automerge's tests but has not been used in any real project or packaged as an NPM. Alpha testers welcome. -4. [_automerge-c_](automerge-c) - this is a c library intended to be an ffi integration point for all other languages. It is currently a work in progress and not yet ready for any testing. +1. [_automerge_](automerge) - The main Rust implementation of the library. +2. [_automerge-wasm_](automerge-wasm) - A JS/WASM interface to the underlying Rust library. This API is generally mature and in use in a handful of projects. +3. [_automerge-js_](automerge-js) - This is a Javascript library using the WASM interface to export the same public API of the primary Automerge project. Currently this project passes all of Automerge's tests but has not been used in any real project or packaged as an NPM. Alpha testers welcome. +4. [_automerge-c_](automerge-c) - This is a C library intended to be an FFI integration point for all other languages. It is currently a work in progress and not yet ready for any testing. +5. [_automerge-cli_](automerge-cli) - An experimental CLI wrapper around the Rust library. Currently not functional. ## How? -The current iteration of automerge-rs is complicated to work with because it -adopts the frontend/backend split architecture of the JS implementation. This -architecture was necessary due to basic operations on the automerge opset being -too slow to perform on the UI thread. Recently @orionz has been able to improve -the performance to the point where the split is no longer necessary. This means -we can adopt a much simpler mutable API. - -The architecture is now built around the `OpTree`. This is a data structure +The magic of the architecture is built around the `OpTree`. This is a data structure which supports efficiently inserting new operations and realising values of existing operations. Most interactions with the `OpTree` are in the form of implementations of `TreeQuery` - a trait which can be used to traverse the -optree and producing state of some kind. User facing operations are exposed on +`OpTree` and producing state of some kind. User facing operations are exposed on an `Automerge` object, under the covers these operations typically instantiate some `TreeQuery` and run it over the `OpTree`. @@ -110,3 +104,6 @@ to list here. ## Benchmarking The [`edit-trace`](edit-trace) folder has the main code for running the edit trace benchmarking. + +## The old Rust project +If you are looking for the origional `automerge-rs` project that can be used as a wasm backend to the javascript implementation, it can be found [here](https://github.com/automerge/automerge-rs/tree/automerge-1.0). From 1a955e1f0d175d4972fd114bdd2ae2a7db636456 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Thu, 11 Aug 2022 18:24:21 -0500 Subject: [PATCH 526/730] fix some typescript errors - depricate default export of the wasm package --- automerge-js/.gitignore | 1 - automerge-js/index.d.ts | 111 ++++++++++++++++++++++++++++++ automerge-js/package.json | 6 +- automerge-wasm/README.md | 4 +- automerge-wasm/nodejs-index.js | 5 +- automerge-wasm/package.json | 4 +- automerge-wasm/test/readme.ts | 3 +- automerge-wasm/test/test.ts | 2 +- automerge-wasm/types/index.d.ts | 1 + automerge-wasm/types/package.json | 2 +- automerge-wasm/web-index.js | 14 ++-- 11 files changed, 135 insertions(+), 18 deletions(-) create mode 100644 automerge-js/index.d.ts diff --git a/automerge-js/.gitignore b/automerge-js/.gitignore index cfe564d7..05065cf0 100644 --- a/automerge-js/.gitignore +++ b/automerge-js/.gitignore @@ -1,4 +1,3 @@ /node_modules /yarn.lock dist -index.d.ts diff --git a/automerge-js/index.d.ts b/automerge-js/index.d.ts new file mode 100644 index 00000000..8972474f --- /dev/null +++ b/automerge-js/index.d.ts @@ -0,0 +1,111 @@ +import { API as LowLevelApi } from "automerge-types"; +import { Actor as ActorId, Prop, ObjID, Change, DecodedChange, Heads, MaterializeValue } from "automerge-types"; +import { JsSyncState as SyncState, SyncMessage, DecodedSyncMessage } from "automerge-types"; + +export { API as LowLevelApi } from "automerge-types"; +export { Actor as ActorId, Prop, ObjID, Change, DecodedChange, Heads, Automerge, MaterializeValue } from "automerge-types"; +export { JsSyncState as SyncState, SyncMessage, DecodedSyncMessage } from "automerge-types"; + +export type ChangeOptions = { + message?: string; + time?: number; +}; + +export class Int { + value: number; + constructor(value: number); +} + +export class Uint { + value: number; + constructor(value: number); +} + +export class Float64 { + value: number; + constructor(value: number); +} + +export class Counter { + value: number; + constructor(value?: number); + valueOf(): number; + toString(): string; + toJSON(): number; +} + +export class Text { + elems: AutomergeValue[]; + constructor(text?: string | string[]); + get length(): number; + get(index: number): AutomergeValue; + [Symbol.iterator](): { + next(): { + done: boolean; + value: AutomergeValue; + } | { + done: boolean; + value?: undefined; + }; + }; + toString(): string; + toSpans(): AutomergeValue[]; + toJSON(): string; + set(index: number, value: AutomergeValue): void; + insertAt(index: number, ...values: AutomergeValue[]): void; + deleteAt(index: number, numDelete?: number): void; + map(callback: (e: AutomergeValue) => T): void; +} + +export type Doc = { + readonly [P in keyof T]: Doc; +}; + +export type ChangeFn = (doc: T) => void; + +export interface State { + change: DecodedChange; + snapshot: T; +} + +export type ScalarValue = string | number | null | boolean | Date | Counter | Uint8Array; + +export type AutomergeValue = ScalarValue | {[key: string]: AutomergeValue;} | Array; + +type Conflicts = { + [key: string]: AutomergeValue; +}; + +export function use(api: LowLevelApi): void; +export function init(actor?: ActorId): Doc; +export function clone(doc: Doc): Doc; +export function free(doc: Doc): void; +export function from(initialState: T | Doc, actor?: ActorId): Doc; +export function change(doc: Doc, options: string | ChangeOptions | ChangeFn, callback?: ChangeFn): Doc; +export function emptyChange(doc: Doc, options: ChangeOptions): unknown; +export function load(data: Uint8Array, actor: ActorId): Doc; +export function save(doc: Doc): Uint8Array; +export function merge(local: Doc, remote: Doc): Doc; +export function getActorId(doc: Doc): ActorId; +export function getConflicts(doc: Doc, prop: Prop): Conflicts | undefined; +export function getLastLocalChange(doc: Doc): Change | undefined; +export function getObjectId(doc: Doc): ObjID; +export function getChanges(oldState: Doc, newState: Doc): Change[]; +export function getAllChanges(doc: Doc): Change[]; +export function applyChanges(doc: Doc, changes: Change[]): [Doc]; +export function getHistory(doc: Doc): State[]; +export function equals(val1: Doc, val2: Doc): boolean; +export function encodeSyncState(state: SyncState): Uint8Array; +export function decodeSyncState(state: Uint8Array): SyncState; +export function generateSyncMessage(doc: Doc, inState: SyncState): [SyncState, SyncMessage | null]; +export function receiveSyncMessage(doc: Doc, inState: SyncState, message: SyncMessage): [Doc, SyncState, null]; +export function initSyncState(): SyncState; +export function encodeChange(change: DecodedChange): Change; +export function decodeChange(data: Change): DecodedChange; +export function encodeSyncMessage(message: DecodedSyncMessage): SyncMessage; +export function decodeSyncMessage(message: SyncMessage): DecodedSyncMessage; +export function getMissingDeps(doc: Doc, heads: Heads): Heads; +export function getHeads(doc: Doc): Heads; +export function dump(doc: Doc): void; +export function toJS(doc: Doc): MaterializeValue; +export function uuid(): string; diff --git a/automerge-js/package.json b/automerge-js/package.json index 2f485322..ee94ee2b 100644 --- a/automerge-js/package.json +++ b/automerge-js/package.json @@ -4,7 +4,7 @@ "Orion Henry ", "Martin Kleppmann" ], - "version": "0.1.5", + "version": "0.1.6", "description": "Reimplementation of `automerge` on top of the automerge-wasm backend", "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-js", "repository": "github:automerge/automerge-rs", @@ -47,7 +47,7 @@ "@types/uuid": "^8.3.4", "@typescript-eslint/eslint-plugin": "^5.25.0", "@typescript-eslint/parser": "^5.25.0", - "automerge-wasm": "^0.1.5", + "automerge-wasm": "^0.1.6", "eslint": "^8.15.0", "fast-sha256": "^1.3.0", "mocha": "^10.0.0", @@ -56,7 +56,7 @@ "typescript": "^4.6.4" }, "dependencies": { - "automerge-types": "0.1.4", + "automerge-types": "0.1.5", "uuid": "^8.3" } } diff --git a/automerge-wasm/README.md b/automerge-wasm/README.md index add3d1b1..2fb6a2f0 100644 --- a/automerge-wasm/README.md +++ b/automerge-wasm/README.md @@ -20,7 +20,7 @@ Heads refers to a set of hashes that uniquely identifies a point in time in a do ### Using the Library and Creating a Document -This is a rust/wasm package and will work in a node or web environment. Node is able to load wasm synchronously but a web environment is not. The default import of the package is a function that returns a promise that resolves once the wasm is loaded. +This is a rust/wasm package and will work in a node or web environment. Node is able to load wasm synchronously but a web environment is not. The 'init' export of the package is a function that returns a promise that resolves once the wasm is loaded. This creates a document in node. The memory allocated is handled by wasm and isn't managed by the javascript garbage collector and thus needs to be manually freed. @@ -35,7 +35,7 @@ This creates a document in node. The memory allocated is handled by wasm and is While this will work in both node and in a web context ```javascript - import init, { create } from "automerge-wasm" + import { init, create } from "automerge-wasm" init().then(_ => { let doc = create() diff --git a/automerge-wasm/nodejs-index.js b/automerge-wasm/nodejs-index.js index 58eddd76..07087e59 100644 --- a/automerge-wasm/nodejs-index.js +++ b/automerge-wasm/nodejs-index.js @@ -2,5 +2,6 @@ let wasm = require("./bindgen") module.exports = wasm module.exports.load = module.exports.loadDoc delete module.exports.loadDoc -Object.defineProperty(module.exports, "__esModule", { value: true }); -module.exports.default = () => (new Promise((resolve,reject) => { resolve(module.exports) })) +Object.defineProperty(module.exports, "__esModule", { value: true }) +module.exports.init = () => (new Promise((resolve,reject) => { resolve(module.exports) })) +module.exports.default = module.exports.init diff --git a/automerge-wasm/package.json b/automerge-wasm/package.json index cfeea401..50744364 100644 --- a/automerge-wasm/package.json +++ b/automerge-wasm/package.json @@ -8,7 +8,7 @@ "description": "wasm-bindgen bindings to the automerge rust implementation", "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-wasm", "repository": "github:automerge/automerge-rs", - "version": "0.1.5", + "version": "0.1.6", "license": "MIT", "files": [ "README.md", @@ -51,6 +51,6 @@ "typescript": "^4.6.4" }, "dependencies": { - "automerge-types": "0.1.4" + "automerge-types": "0.1.5" } } diff --git a/automerge-wasm/test/readme.ts b/automerge-wasm/test/readme.ts index 5917cbe9..5dcff10e 100644 --- a/automerge-wasm/test/readme.ts +++ b/automerge-wasm/test/readme.ts @@ -1,8 +1,7 @@ import { describe, it } from 'mocha'; import * as assert from 'assert' //@ts-ignore -import init from '..' -import { create, load } from '..' +import { init, create, load } from '..' describe('Automerge', () => { describe('Readme Examples', () => { diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts index 1a29b962..5a3ff68e 100644 --- a/automerge-wasm/test/test.ts +++ b/automerge-wasm/test/test.ts @@ -3,7 +3,7 @@ import { describe, it } from 'mocha'; import assert from 'assert' //@ts-ignore import { BloomFilter } from './helpers/sync' -import init, { create, load, SyncState, Automerge, encodeChange, decodeChange, initSyncState, decodeSyncMessage, decodeSyncState, encodeSyncState, encodeSyncMessage } from '..' +import { init, create, load, SyncState, Automerge, encodeChange, decodeChange, initSyncState, decodeSyncMessage, decodeSyncState, encodeSyncState, encodeSyncMessage } from '..' import { DecodedSyncMessage, Hash } from '..'; function sync(a: Automerge, b: Automerge, aSyncState = initSyncState(), bSyncState = initSyncState()) { diff --git a/automerge-wasm/types/index.d.ts b/automerge-wasm/types/index.d.ts index 2e6527f4..68277203 100644 --- a/automerge-wasm/types/index.d.ts +++ b/automerge-wasm/types/index.d.ts @@ -206,3 +206,4 @@ export class SyncState { } export default function init (): Promise; +export function init (): Promise; diff --git a/automerge-wasm/types/package.json b/automerge-wasm/types/package.json index 111224cb..7b6852ae 100644 --- a/automerge-wasm/types/package.json +++ b/automerge-wasm/types/package.json @@ -6,7 +6,7 @@ "description": "typescript types for low level automerge api", "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-wasm", "repository": "github:automerge/automerge-rs", - "version": "0.1.4", + "version": "0.1.5", "license": "MIT", "files": [ "LICENSE", diff --git a/automerge-wasm/web-index.js b/automerge-wasm/web-index.js index 1ce280b3..6510fe05 100644 --- a/automerge-wasm/web-index.js +++ b/automerge-wasm/web-index.js @@ -39,9 +39,15 @@ let api = { importSyncState } -import init from "./bindgen.js" -export default function() { - return new Promise((resolve,reject) => init().then(() => { - resolve({ ... api, load, create, foo: "bar" }) +import wasm_init from "./bindgen.js" + +export function init() { + return new Promise((resolve,reject) => wasm_init().then(() => { + resolve({ ... api, load, create }) })) } + +// depricating default export +export default function() { + return init() +} From d1a926bcbe8c423cbb77202eac5a9f932fc312fe Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Thu, 11 Aug 2022 18:49:42 -0500 Subject: [PATCH 527/730] fix ownKeys bug in automerge-js --- automerge-js/package.json | 2 +- automerge-js/src/proxies.ts | 4 +++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/automerge-js/package.json b/automerge-js/package.json index ee94ee2b..22f090b7 100644 --- a/automerge-js/package.json +++ b/automerge-js/package.json @@ -4,7 +4,7 @@ "Orion Henry ", "Martin Kleppmann" ], - "version": "0.1.6", + "version": "0.1.8", "description": "Reimplementation of `automerge` on top of the automerge-wasm backend", "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-js", "repository": "github:automerge/automerge-rs", diff --git a/automerge-js/src/proxies.ts b/automerge-js/src/proxies.ts index a890ab38..a19a1b9f 100644 --- a/automerge-js/src/proxies.ts +++ b/automerge-js/src/proxies.ts @@ -194,7 +194,9 @@ const MapHandler = { ownKeys (target) { const { context, objectId, heads} = target - return context.keys(objectId, heads) + // FIXME - this is a tmp workaround until fix the dupe key bug in keys() + let keys = context.keys(objectId, heads) + return [...new Set(keys)] }, } From 56563a4a60063e564d6663713ba4e86a2b4bc773 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Sun, 24 Jul 2022 22:31:03 +0100 Subject: [PATCH 528/730] Add a storage-v2 feature flag The new storage implementation is sufficiently large a change that it warrants a period of testing. To facilitate testing the new and old implementations side by side we slightly abuse cargo's feature flags and add a storage-v2 feature which enables the new storage and disables the old storage. Note that this commit doesn't use `--all-features` when building the workspace in scripts/ci/build-test. This will be rectified in a later commit once the storage-v2 feature is integrated into the other crates in the workspace. Signed-off-by: Alex Good --- .github/workflows/ci.yaml | 42 ++++++++++++++++++++++++++++++++ Cargo.toml | 1 + automerge/Cargo.toml | 1 + scripts/ci/build-test | 4 +-- scripts/ci/build-test-storage-v2 | 6 +++++ scripts/ci/js_tests | 2 ++ scripts/ci/lint | 3 ++- scripts/ci/run | 1 + 8 files changed, 57 insertions(+), 3 deletions(-) create mode 100755 scripts/ci/build-test-storage-v2 diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 358baee4..8ec3507f 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -152,3 +152,45 @@ jobs: - run: ./scripts/ci/build-test shell: bash + linux-storage-v2: + name: 'storage-v2: Linux' + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions-rs/toolchain@v1 + with: + profile: minimal + toolchain: 1.60.0 + default: true + - uses: Swatinem/rust-cache@v1 + - run: ./scripts/ci/build-test-storage-v2 + shell: bash + + macos-storage-2: + name: 'storage-v2: MacOS' + runs-on: macos-latest + steps: + - uses: actions/checkout@v2 + - uses: actions-rs/toolchain@v1 + with: + profile: minimal + toolchain: 1.60.0 + default: true + - uses: Swatinem/rust-cache@v1 + - run: ./scripts/ci/build-test-storage-v2 + shell: bash + + windows-storage-v2: + name: 'storage-v2: Windows' + runs-on: windows-latest + steps: + - uses: actions/checkout@v2 + - uses: actions-rs/toolchain@v1 + with: + profile: minimal + toolchain: 1.60.0 + default: true + - uses: Swatinem/rust-cache@v1 + - run: ./scripts/ci/build-test-storage-v2 + shell: bash + diff --git a/Cargo.toml b/Cargo.toml index 7eb899e8..9add8e60 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -6,6 +6,7 @@ members = [ "automerge-wasm", "edit-trace", ] +resolver = "2" [profile.release] debug = true diff --git a/automerge/Cargo.toml b/automerge/Cargo.toml index 1dbd0833..4b9d2bd6 100644 --- a/automerge/Cargo.toml +++ b/automerge/Cargo.toml @@ -11,6 +11,7 @@ description = "A JSON-like data structure (a CRDT) that can be modified concurre [features] optree-visualisation = ["dot", "rand"] wasm = ["js-sys", "wasm-bindgen", "web-sys", "uuid/wasm-bindgen"] +storage-v2 = [] [dependencies] hex = "^0.4.3" diff --git a/scripts/ci/build-test b/scripts/ci/build-test index dbd89f5d..f4b83d0f 100755 --- a/scripts/ci/build-test +++ b/scripts/ci/build-test @@ -1,6 +1,6 @@ #!/usr/bin/env bash set -eoux pipefail -cargo build --workspace --all-features +cargo build --workspace --features optree-visualisation,wasm -RUST_LOG=error cargo test --workspace --all-features +RUST_LOG=error cargo test --workspace diff --git a/scripts/ci/build-test-storage-v2 b/scripts/ci/build-test-storage-v2 new file mode 100755 index 00000000..8d05552a --- /dev/null +++ b/scripts/ci/build-test-storage-v2 @@ -0,0 +1,6 @@ +#!/usr/bin/env bash +set -eoux pipefail + +cargo build -p automerge --features storage-v2 --all-targets + +RUST_LOG=error cargo test -p automerge --features storage-v2 diff --git a/scripts/ci/js_tests b/scripts/ci/js_tests index 9b1d0e77..b203dea4 100755 --- a/scripts/ci/js_tests +++ b/scripts/ci/js_tests @@ -1,3 +1,5 @@ +set -e + THIS_SCRIPT=$(dirname "$0"); WASM_PROJECT=$THIS_SCRIPT/../../automerge-wasm; JS_PROJECT=$THIS_SCRIPT/../../automerge-js; diff --git a/scripts/ci/lint b/scripts/ci/lint index 1b29d909..505d2c68 100755 --- a/scripts/ci/lint +++ b/scripts/ci/lint @@ -4,4 +4,5 @@ set -eoux pipefail # Force clippy to consider all local sources # https://github.com/rust-lang/rust-clippy/issues/4612 find . -name "*.rs" -not -path "./target/*" -exec touch "{}" + -cargo clippy --all-features --all-targets -- -D warnings +cargo clippy --all-targets -- -D warnings +cargo clippy -p automerge --features storage-v2 diff --git a/scripts/ci/run b/scripts/ci/run index 423b995c..89b86277 100755 --- a/scripts/ci/run +++ b/scripts/ci/run @@ -4,6 +4,7 @@ set -eou pipefail ./scripts/ci/fmt ./scripts/ci/lint ./scripts/ci/build-test +./scripts/ci/build-test-storage-v2 ./scripts/ci/rust-docs ./scripts/ci/advisory ./scripts/ci/wasm_tests From 88f8976d0a95a022bd52d2dca659cf2652fe3d4d Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Mon, 22 Aug 2022 14:58:13 -0500 Subject: [PATCH 529/730] automerge-js 0.1.9 --- automerge-js/index.d.ts | 1 + automerge-js/package.json | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/automerge-js/index.d.ts b/automerge-js/index.d.ts index 8972474f..0f853e5b 100644 --- a/automerge-js/index.d.ts +++ b/automerge-js/index.d.ts @@ -77,6 +77,7 @@ type Conflicts = { }; export function use(api: LowLevelApi): void; +export function getBackend(doc: Doc) : LowLevelApi; export function init(actor?: ActorId): Doc; export function clone(doc: Doc): Doc; export function free(doc: Doc): void; diff --git a/automerge-js/package.json b/automerge-js/package.json index 22f090b7..165c6ae5 100644 --- a/automerge-js/package.json +++ b/automerge-js/package.json @@ -4,7 +4,7 @@ "Orion Henry ", "Martin Kleppmann" ], - "version": "0.1.8", + "version": "0.1.9", "description": "Reimplementation of `automerge` on top of the automerge-wasm backend", "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-js", "repository": "github:automerge/automerge-rs", From d785c319b878be2281ee51bca8be0a152a35382d Mon Sep 17 00:00:00 2001 From: Alex Good Date: Mon, 25 Jul 2022 15:44:15 +0100 Subject: [PATCH 530/730] Add ScalarValue::Unknown The colunar storage format allows for values which we do not know the type of. In order that we can handle these types in a forward compatible way we add ScalarValue::Unknown. Signed-off-by: Alex Good --- automerge-c/src/result.rs | 27 ++++++++++++++++++++++++++ automerge-cli/src/export.rs | 12 +++++++----- automerge-wasm/src/value.rs | 2 ++ automerge/src/columnar.rs | 6 ++++++ automerge/src/legacy/serde_impls/op.rs | 3 +++ automerge/src/value.rs | 2 ++ automerge/tests/helpers/mod.rs | 8 ++++++++ 7 files changed, 55 insertions(+), 5 deletions(-) diff --git a/automerge-c/src/result.rs b/automerge-c/src/result.rs index f03e8db4..9b8c811d 100644 --- a/automerge-c/src/result.rs +++ b/automerge-c/src/result.rs @@ -133,6 +133,8 @@ pub enum AMvalue<'a> { Timestamp(i64), /// A 64-bit unsigned integer variant. Uint(u64), + /// An unknown type of scalar value variant. + Unknown(AMUnknownValue), } impl<'a> PartialEq for AMvalue<'a> { @@ -159,6 +161,7 @@ impl<'a> PartialEq for AMvalue<'a> { (SyncState(lhs), SyncState(rhs)) => *lhs == *rhs, (Timestamp(lhs), Timestamp(rhs)) => lhs == rhs, (Uint(lhs), Uint(rhs)) => lhs == rhs, + (Unknown(lhs), Unknown(rhs)) => lhs == rhs, (Null, Null) | (Void, Void) => true, _ => false, } @@ -187,6 +190,10 @@ impl From<(&am::Value<'_>, &RefCell>)> for AMvalue<'_> { } am::ScalarValue::Timestamp(timestamp) => AMvalue::Timestamp(*timestamp), am::ScalarValue::Uint(uint) => AMvalue::Uint(*uint), + am::ScalarValue::Unknown { bytes, type_code } => AMvalue::Unknown(AMUnknownValue { + bytes: bytes.as_slice().into(), + type_code: *type_code, + }), }, // \todo Confirm that an object variant should be ignored // when there's no object ID variant. @@ -199,6 +206,8 @@ impl From<&AMvalue<'_>> for u8 { fn from(value: &AMvalue) -> Self { use AMvalue::*; + // Note that these numbers are the order of appearance of the respective variants in the + // source of AMValue. match value { ActorId(_) => 1, Boolean(_) => 2, @@ -220,6 +229,7 @@ impl From<&AMvalue<'_>> for u8 { SyncState(_) => 18, Timestamp(_) => 19, Uint(_) => 20, + Unknown(..) => 21, Void => 0, } } @@ -249,6 +259,13 @@ impl TryFrom<&AMvalue<'_>> for am::ScalarValue { Timestamp(t) => Ok(am::ScalarValue::Timestamp(*t)), Uint(u) => Ok(am::ScalarValue::Uint(*u)), Null => Ok(am::ScalarValue::Null), + Unknown(AMUnknownValue { bytes, type_code }) => { + let slice = unsafe { std::slice::from_raw_parts(bytes.src, bytes.count) }; + Ok(am::ScalarValue::Unknown { + bytes: slice.to_vec(), + type_code: *type_code, + }) + } ActorId(_) => Err(InvalidValueType { expected, unexpected: type_name::().to_string(), @@ -877,3 +894,13 @@ pub unsafe extern "C" fn AMresultValue<'a>(result: *mut AMresult) -> AMvalue<'a> }; content } + +/// \struct AMUknownValue +/// \brief A value (typically for a 'set' operation) which we don't know the type of +/// +#[derive(PartialEq)] +#[repr(C)] +pub struct AMUnknownValue { + bytes: AMbyteSpan, + type_code: u8, +} diff --git a/automerge-cli/src/export.rs b/automerge-cli/src/export.rs index 937ba794..49cded8f 100644 --- a/automerge-cli/src/export.rs +++ b/automerge-cli/src/export.rs @@ -50,11 +50,13 @@ fn list_to_json(doc: &am::Automerge, obj: &am::ObjId) -> serde_json::Value { fn scalar_to_json(val: &am::ScalarValue) -> serde_json::Value { match val { am::ScalarValue::Str(s) => serde_json::Value::String(s.to_string()), - am::ScalarValue::Bytes(b) => serde_json::Value::Array( - b.iter() - .map(|byte| serde_json::Value::Number((*byte).into())) - .collect(), - ), + am::ScalarValue::Bytes(b) | am::ScalarValue::Unknown { bytes: b, .. } => { + serde_json::Value::Array( + b.iter() + .map(|byte| serde_json::Value::Number((*byte).into())) + .collect(), + ) + } am::ScalarValue::Int(n) => serde_json::Value::Number((*n).into()), am::ScalarValue::Uint(n) => serde_json::Value::Number((*n).into()), am::ScalarValue::F64(n) => serde_json::Number::from_f64(*n) diff --git a/automerge-wasm/src/value.rs b/automerge-wasm/src/value.rs index 5b20cc20..98ea5f1b 100644 --- a/automerge-wasm/src/value.rs +++ b/automerge-wasm/src/value.rs @@ -19,6 +19,7 @@ impl<'a> From> for JsValue { am::ScalarValue::Timestamp(v) => js_sys::Date::new(&(*v as f64).into()).into(), am::ScalarValue::Boolean(v) => (*v).into(), am::ScalarValue::Null => JsValue::null(), + am::ScalarValue::Unknown { bytes, .. } => Uint8Array::from(bytes.as_slice()).into(), } } } @@ -34,5 +35,6 @@ pub(crate) fn datatype(s: &am::ScalarValue) -> String { am::ScalarValue::Timestamp(_) => "timestamp".into(), am::ScalarValue::Boolean(_) => "boolean".into(), am::ScalarValue::Null => "null".into(), + am::ScalarValue::Unknown { type_code, .. } => format!("unknown{}", type_code), } } diff --git a/automerge/src/columnar.rs b/automerge/src/columnar.rs index 25748a25..ff260e4d 100644 --- a/automerge/src/columnar.rs +++ b/automerge/src/columnar.rs @@ -592,6 +592,9 @@ impl ValEncoder { let len = (*n).encode(&mut self.raw).unwrap(); self.len.append_value(len << 4 | VALUE_TYPE_IEEE754); } + ScalarValue::Unknown { type_code, bytes } => { + panic!("unknown value") + } } } @@ -636,6 +639,9 @@ impl ValEncoder { let len = (*n).encode(&mut self.raw).unwrap(); self.len.append_value(len << 4 | VALUE_TYPE_IEEE754); } + ScalarValue::Unknown { type_code, bytes } => { + panic!("unknown value") + } } } diff --git a/automerge/src/legacy/serde_impls/op.rs b/automerge/src/legacy/serde_impls/op.rs index 0f7ef8c2..a3719fd6 100644 --- a/automerge/src/legacy/serde_impls/op.rs +++ b/automerge/src/legacy/serde_impls/op.rs @@ -216,6 +216,9 @@ impl<'de> Deserialize<'de> for Op { Some(ScalarValue::Bytes(s)) => { Err(Error::invalid_value(Unexpected::Bytes(&s), &"a number")) } + Some(ScalarValue::Unknown { bytes, .. }) => { + Err(Error::invalid_value(Unexpected::Bytes(&bytes), &"a number")) + } Some(ScalarValue::Str(s)) => { Err(Error::invalid_value(Unexpected::Str(&s), &"a number")) } diff --git a/automerge/src/value.rs b/automerge/src/value.rs index 1df87ace..633bbeaf 100644 --- a/automerge/src/value.rs +++ b/automerge/src/value.rs @@ -427,6 +427,7 @@ pub enum ScalarValue { Counter(Counter), Timestamp(i64), Boolean(bool), + Unknown { type_code: u8, bytes: Vec }, Null, } @@ -718,6 +719,7 @@ impl fmt::Display for ScalarValue { ScalarValue::Timestamp(i) => write!(f, "Timestamp: {}", i), ScalarValue::Boolean(b) => write!(f, "{}", b), ScalarValue::Null => write!(f, "null"), + ScalarValue::Unknown { type_code, .. } => write!(f, "unknown type {}", type_code), } } } diff --git a/automerge/tests/helpers/mod.rs b/automerge/tests/helpers/mod.rs index 864fd1cf..fd3ba4e9 100644 --- a/automerge/tests/helpers/mod.rs +++ b/automerge/tests/helpers/mod.rs @@ -236,6 +236,7 @@ pub enum OrdScalarValue { Timestamp(i64), Boolean(bool), Null, + Unknown { type_code: u8, bytes: Vec }, } impl From for OrdScalarValue { @@ -250,6 +251,9 @@ impl From for OrdScalarValue { automerge::ScalarValue::Timestamp(v) => OrdScalarValue::Timestamp(v), automerge::ScalarValue::Boolean(v) => OrdScalarValue::Boolean(v), automerge::ScalarValue::Null => OrdScalarValue::Null, + automerge::ScalarValue::Unknown { type_code, bytes } => { + OrdScalarValue::Unknown { type_code, bytes } + } } } } @@ -266,6 +270,10 @@ impl From<&OrdScalarValue> for automerge::ScalarValue { OrdScalarValue::Timestamp(v) => automerge::ScalarValue::Timestamp(*v), OrdScalarValue::Boolean(v) => automerge::ScalarValue::Boolean(*v), OrdScalarValue::Null => automerge::ScalarValue::Null, + OrdScalarValue::Unknown { type_code, bytes } => automerge::ScalarValue::Unknown { + type_code: *type_code, + bytes: bytes.to_vec(), + }, } } } From e1295b9daaf056ff6c4c652993ba4b28f7baad5a Mon Sep 17 00:00:00 2001 From: Alex Good Date: Sun, 24 Jul 2022 21:42:39 +0100 Subject: [PATCH 531/730] Add a simple parser combinator library We have parsing needs which are slightly more complex than just reading stuff from a buffer, but not complex enough to justify a dependency on a parsing library. Implement a simple parser combinator library for use in parsing the binary storage format. Signed-off-by: Alex Good --- automerge/src/lib.rs | 2 + automerge/src/storage.rs | 2 + automerge/src/storage/parse.rs | 594 ++++++++++++++++++++++++++ automerge/src/storage/parse/leb128.rs | 118 +++++ 4 files changed, 716 insertions(+) create mode 100644 automerge/src/storage.rs create mode 100644 automerge/src/storage/parse.rs create mode 100644 automerge/src/storage/parse/leb128.rs diff --git a/automerge/src/lib.rs b/automerge/src/lib.rs index 19c9947b..3bdf5354 100644 --- a/automerge/src/lib.rs +++ b/automerge/src/lib.rs @@ -77,6 +77,8 @@ mod op_tree; mod options; mod parents; mod query; +#[cfg(feature = "storage-v2")] +mod storage; pub mod sync; pub mod transaction; mod types; diff --git a/automerge/src/storage.rs b/automerge/src/storage.rs new file mode 100644 index 00000000..cad6f96e --- /dev/null +++ b/automerge/src/storage.rs @@ -0,0 +1,2 @@ +#[allow(dead_code)] +pub(crate) mod parse; diff --git a/automerge/src/storage/parse.rs b/automerge/src/storage/parse.rs new file mode 100644 index 00000000..828579f8 --- /dev/null +++ b/automerge/src/storage/parse.rs @@ -0,0 +1,594 @@ +//! A small parser combinator library inspired by [`nom`](https://docs.rs/crate/nom/5.0.0). +//! +//! The primary reason for using this rather than `nom` is that this is only a few hundred lines of +//! code because we don't need a fully fledged combinator library - automerge is a low level +//! library so it's good to avoid dependencies where we can. +//! +//! # Basic Usage +//! +//! The basic components of this library are [`Parser`]s, which parse [`Input`]s and produce +//! [`ParseResult`]s. `Input` is a combination of an `&[u8]` which is the incoming data along with +//! the position it has read up to in the data. `Parser` is a trait but has a blanket `impl` for +//! `FnMut(Input<'a>) -> ParseResult<'a, O, E>` so in practice you can think of parsers as a +//! function which takes some input and returns a result plus any remaining input. This final part +//! is encapsulated by the `ParseResult` which is a type alias for a `Result`. This means that +//! typical usage will look something like this: +//! +//! ```rust,ignore +//! use automerge::storage::parse::{ParseResult, take_1}; +//! fn do_something<'a>(input: Input<'a>) -> ParseResult<'a, [u8; 3], ()> { +//! let (i, a) = take_1::<()>(input)?; +//! let (i, b) = take_1::<()>(i)?; +//! let (i, c) = take_1::<()>(i)?; +//! let result = [a, b, c]; +//! Ok((i, result)) +//! } +//! +//! let input = Input::new(&[b"12345"]); +//! let result = do_something(input); +//! if let Ok((_, result)) = result { +//! assert_eq!(&result, &['1', '2', '3']); +//! } else { +//! panic!(); +//! } +//! ``` +//! +//! Three things to note here: +//! +//! 1. The rebinding of the input (in `i`) after each call to `take_1`, this is how parser state is passed from +//! one call to the next +//! 2. We return a tuple containing the remaining input plus the result +//! 3. `take_1` has a type parameter we must pass to it representing the error type. Generally you +//! don't need to do that as type inference is often good enough. +//! +//! # Errors +//! +//! The error branch of `ParseError` is an enum containing either `ParseError::Incomplete` +//! indicating that with more input we might be able to succeed, or a `ParseError::Error`. The +//! latter branch is where parser specific errors (e.g. "this u8 is not a valid chunk type") are +//! passed. This has implications for returning and handling errors. +//! +//! ## Returning Errors +//! +//! If you want to return an error from a parser you will need to wrap the error in +//! `ParseError::Error`. +//! +//! ```rust,ignore +//! struct MyError; +//! fn my_bad_parser() -> ParseResult<(), MyError> { +//! Err(ParseError::Error(MyError)) +//! } +//! ``` +//! +//! ## Handling Errors +//! +//! Handling errors is generally important when you want to compose parsers with different error +//! types. In this case you will often have an error type you want to map each of the underlying +//! errors into. For this purpose you can use `ParseError::lift` +//! +//! ```rust,ignore +//! # use automerge::parse::{ParseResult, Input}; +//! #[derive(thiserror::Error, Debug)] +//! #[error("this is a bad string")] +//! struct BadString; +//! +//! #[derive(thiserror::Error, Debug)] +//! #[error("this is a bad number")] +//! struct BadNumber; +//! +//! fn parse_string<'a>(input: Input<'a>) -> ParseResult<'a, String, BadString> { +//! Err(ParseError::Error(BadString)) +//! } +//! +//! fn parse_number<'a>(input: Input<'a>) -> ParseResult<'a, u32, BadNumber> { +//! Err(ParseError::Error(BadNumber)) +//! } +//! +//! #[derive(thiserror::Error, Debug)] +//! struct CombinedError{ +//! #[error(transparent)] +//! String(#[from] BadString), +//! #[error(transparent)] +//! Number(#[from] BadNumber), +//! } +//! +//! fn parse_string_then_number<'a>(input: Input<'a>) -> ParseResult<'a, (String, u32), CombinedError> { +//! // Note the `e.lift()` here, this works because of the `From` impl generated by +//! // `thiserror::Error` +//! let (i, thestring) = parse_string(input).map_err(|e| e.lift())?; +//! let (i, thenumber) = parse_number(i).map_err(|e| e.lift())?; +//! Ok((i, (thestring, thenumber))) +//! } +//! ``` + +use core::num::NonZeroUsize; +use std::convert::TryInto; + +pub(crate) mod leb128; +use crate::{ActorId, ChangeHash}; + +const HASH_SIZE: usize = 32; // 256 bits = 32 bytes + +#[allow(unused_imports)] +pub(crate) use self::leb128::{leb128_i32, leb128_i64, leb128_u32, leb128_u64, nonzero_leb128_u64}; + +pub(crate) type ParseResult<'a, O, E> = Result<(Input<'a>, O), ParseError>; + +/// The input to be parsed. This is a combination of an underlying slice, plus an offset into that +/// slice. Consequently it is very cheap to copy. +#[derive(PartialEq, Clone, Copy)] +pub(crate) struct Input<'a> { + bytes: &'a [u8], + position: usize, + original: &'a [u8], +} + +impl<'a> std::fmt::Debug for Input<'a> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!( + f, + "Input(len: {}, position: {}, original_len: {})", + self.bytes.len(), + self.position, + self.original.len() + ) + } +} + +impl<'a> Input<'a> { + pub(crate) fn new(bytes: &'a [u8]) -> Self { + Self { + bytes, + position: 0, + original: bytes, + } + } + + #[cfg(test)] + pub(in crate::storage::parse) fn with_position(bytes: &'a [u8], position: usize) -> Input<'a> { + let remaining = &bytes[position..]; + Self { + bytes: remaining, + position, + original: bytes, + } + } + + pub(crate) fn empty() -> Self { + Self { + bytes: &[], + position: 0, + original: &[], + } + } + + fn take_1(&self) -> ParseResult<'a, u8, E> { + if let Some(need) = NonZeroUsize::new(1_usize.saturating_sub(self.bytes.len())) { + Err(ParseError::Incomplete(Needed::Size(need))) + } else { + let (result, remaining) = self.bytes.split_at(1); + let new_input = Input { + bytes: remaining, + original: self.original, + position: self.position + 1, + }; + Ok((new_input, result[0])) + } + } + + fn take_n(&self, n: usize) -> ParseResult<'a, &'a [u8], E> { + if let Some(need) = NonZeroUsize::new(n.saturating_sub(self.bytes.len())) { + Err(ParseError::Incomplete(Needed::Size(need))) + } else { + let (result, remaining) = self.bytes.split_at(n); + let new_input = Input { + bytes: remaining, + original: self.original, + position: self.position + n, + }; + Ok((new_input, result)) + } + } + + fn take_4(&self) -> ParseResult<'a, [u8; 4], E> { + if let Some(need) = NonZeroUsize::new(4_usize.saturating_sub(self.bytes.len())) { + Err(ParseError::Incomplete(Needed::Size(need))) + } else { + let (result, remaining) = self.bytes.split_at(4); + let new_input = Input { + bytes: remaining, + original: self.original, + position: self.position + 4, + }; + Ok((new_input, result.try_into().expect("we checked the length"))) + } + } + + fn range_of(&self, mut parser: P) -> ParseResult<'a, RangeOf, E> + where + P: Parser<'a, R, E>, + { + let (new_input, value) = parser.parse(*self)?; + let range = self.position..new_input.position; + Ok((new_input, RangeOf { range, value })) + } + + fn rest(&self) -> ParseResult<'a, &'a [u8], E> { + let position = self.position + self.bytes.len(); + let new_input = Self { + position, + original: self.original, + bytes: &[], + }; + Ok((new_input, self.bytes)) + } + + fn truncate(&self, length: usize) -> Input<'a> { + let length = if length > self.bytes.len() { + self.bytes.len() + } else { + length + }; + Input { + bytes: &self.bytes[..length], + position: self.position, + original: &self.original[..(self.position + length)], + } + } + + fn skip(&self, length: usize) -> Input<'a> { + if length > self.bytes.len() { + Input { + bytes: &[], + position: self.bytes.len(), + original: self.original, + } + } else { + Input { + bytes: &self.bytes[length..], + position: self.position + length, + original: &self.original[(self.position + length)..], + } + } + } + + /// Split this input into two separate inputs, the first is the same as the current input but + /// with the remaining unconsumed_bytes set to at most length. The remaining `Input` is the bytes + /// after `length`. + /// + /// This is useful if you are parsing input which contains length delimited chunks. In this + /// case you may have a single input where you parse a header, then you want to parse the + /// current input up until the length and then parse the next chunk from the remainign input. + /// For example: + /// + /// ```rust,ignore + /// # use automerge::storage::parse::{Input, ParseResult}; + /// + /// fn parse_chunk(input: Input<'_>) -> ParseResult<(), ()> { + /// Ok(()) + /// } + /// + /// # fn main() -> ParseResult<(), ()> { + /// let incoming_bytes: &[u8] = todo!(); + /// let mut input = Input::new(incoming_bytes); + /// let mut chunks = Vec::new(); + /// while !input.is_empty() { + /// let (i, chunk_len) = leb128_u64(input)?; + /// let Split{first: i, remaining} = i.split(chunk_len); + /// // Note that here, the `i` we pass into `parse_chunk` has already parsed the header, + /// // so the logic of the `parse_chunk` function doesn't need to reimplement the header + /// // parsing + /// let (i, chunk) = parse_chunk(i)?; + /// let input = remaining; + /// } + /// parse_chunk(i); + /// # } + /// ``` + pub(crate) fn split(&self, length: usize) -> Split<'a> { + Split { + first: self.truncate(length), + remaining: self.skip(length), + } + } + + /// Return a new `Input` which forgets about the consumed input. The new `Input` will have it's + /// position set to 0. This is equivalent to `Input::new(self.bytes())` + pub(crate) fn reset(&self) -> Input<'a> { + Input::new(self.bytes) + } + + /// Check if there are any more bytes left to consume + pub(crate) fn is_empty(&self) -> bool { + self.bytes.is_empty() + } + + /// The bytes which have not yet been consumed + pub(crate) fn unconsumed_bytes(&self) -> &'a [u8] { + self.bytes + } + + /// The bytes behind this input - including bytes which have been consumed + pub(crate) fn bytes(&self) -> &'a [u8] { + self.original + } +} + +/// Returned by [`Input::split`] +pub(crate) struct Split<'a> { + /// The input up to the length passed to `split`. This is identical to the original input + /// except that [`Input::bytes`] and [`Input::unconsumed_bytes`] will only return the original + /// input up to `length` bytes from the point at which `split` was called. + pub(crate) first: Input<'a>, + /// The remaining input after the length passed to `split`. This is equivalent to + /// + /// ```rust,ignore + /// # use automerge::storage::parse::Input; + /// # let split_length = 1; + /// let original_input = todo!(); + /// Input::new(original_input.bytes()[split_length..]) + /// ``` + pub(crate) remaining: Input<'a>, +} + +pub(crate) trait Parser<'a, O, E> { + fn parse(&mut self, input: Input<'a>) -> ParseResult<'a, O, E>; +} + +impl<'a, O, F, E> Parser<'a, O, E> for F +where + F: FnMut(Input<'a>) -> ParseResult<'a, O, E>, +{ + fn parse(&mut self, input: Input<'a>) -> ParseResult<'a, O, E> { + (self)(input) + } +} + +#[derive(Clone, Debug, PartialEq)] +pub(crate) enum ParseError { + /// Some application specific error occurred + Error(E), + /// A combinator requested more data than we have available + Incomplete(Needed), +} + +impl ParseError { + /// Convert any underlying `E` into `F`. This is useful when you are composing parsers + pub(crate) fn lift(self) -> ParseError + where + F: From, + { + match self { + Self::Error(e) => ParseError::Error(F::from(e)), + Self::Incomplete(n) => ParseError::Incomplete(n), + } + } +} + +impl std::fmt::Display for ParseError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Error(e) => write!(f, "{}", e), + Self::Incomplete(_) => write!(f, "not enough data"), + } + } +} + +impl std::error::Error for ParseError {} + +/// How much more input we need +#[derive(Clone, Debug, PartialEq)] +pub(crate) enum Needed { + /// We don't know how much more + #[allow(dead_code)] + Unknown, + /// We need _at least_ this much more + Size(NonZeroUsize), +} + +/// Map the function `f` over the result of `parser` returning a new parser +pub(crate) fn map<'a, O1, O2, F, G, Er>( + mut parser: F, + mut f: G, +) -> impl FnMut(Input<'a>) -> ParseResult<'a, O2, Er> +where + F: Parser<'a, O1, Er>, + G: FnMut(O1) -> O2, +{ + move |input: Input<'a>| { + let (input, o1) = parser.parse(input)?; + Ok((input, f(o1))) + } +} + +/// Pull one byte from the input +pub(crate) fn take1(input: Input<'_>) -> ParseResult<'_, u8, E> { + input.take_1() +} + +/// Parse an array of four bytes from the input +pub(crate) fn take4(input: Input<'_>) -> ParseResult<'_, [u8; 4], E> { + input.take_4() +} + +/// Parse a slice of length `n` from `input` +pub(crate) fn take_n<'a, E>(n: usize, input: Input<'a>) -> ParseResult<'_, &'a [u8], E> { + input.take_n(n) +} + +/// Parse a length prefixed collection of `g` +/// +/// This first parses a LEB128 encoded `u64` from the input, then applies the parser `g` this many +/// times, returning the result in a `Vec`. +pub(crate) fn length_prefixed<'a, G, O, Er>( + mut g: G, +) -> impl FnMut(Input<'a>) -> ParseResult<'a, Vec, Er> +where + G: Parser<'a, O, Er>, + Er: From, +{ + move |input: Input<'a>| { + let (i, count) = leb128_u64(input).map_err(|e| e.lift())?; + let mut res = Vec::new(); + let mut input = i; + for _ in 0..count { + match g.parse(input) { + Ok((i, e)) => { + input = i; + res.push(e); + } + Err(e) => { + return Err(e); + } + } + } + Ok((input, res)) + } +} + +/// Parse a length prefixed array of bytes from the input +/// +/// This first parses a LEB128 encoded `u64` from the input, then parses this many bytes from the +/// underlying input. +pub(crate) fn length_prefixed_bytes<'a, E>(input: Input<'a>) -> ParseResult<'_, &'a [u8], E> +where + E: From, +{ + let (i, len) = leb128_u64(input).map_err(|e| e.lift())?; + take_n(len as usize, i) +} + +/// Apply two parsers, returning the result in a 2 tuple +/// +/// This first applies `f`, then `g` and returns the result as `(f, g)`. +pub(super) fn tuple2<'a, F, E, G, H, Er>( + mut f: F, + mut g: G, +) -> impl FnMut(Input<'a>) -> ParseResult<'_, (E, H), Er> +where + F: Parser<'a, E, Er>, + G: Parser<'a, H, Er>, +{ + move |input: Input<'a>| { + let (i, one) = f.parse(input)?; + let (i, two) = g.parse(i)?; + Ok((i, (one, two))) + } +} + +/// Apply the parser `f` `n` times and reutrn the result in a `Vec` +pub(super) fn apply_n<'a, F, E, Er>( + n: usize, + mut f: F, +) -> impl FnMut(Input<'a>) -> ParseResult<'_, Vec, Er> +where + F: Parser<'a, E, Er>, +{ + move |input: Input<'a>| { + let mut i = input; + let mut result = Vec::new(); + for _ in 0..n { + let (new_i, e) = f.parse(i)?; + result.push(e); + i = new_i; + } + Ok((i, result)) + } +} + +/// Parse a length prefixed actor ID +/// +/// This first parses a LEB128 encoded u64 from the input, then the corresponding number of bytes +/// which are returned wrapped in an `ActorId` +pub(crate) fn actor_id(input: Input<'_>) -> ParseResult<'_, ActorId, E> +where + E: From, +{ + let (i, length) = leb128_u64(input).map_err(|e| e.lift())?; + let (i, bytes) = take_n(length as usize, i)?; + Ok((i, bytes.into())) +} + +/// Parse a change hash. +/// +/// This is just a nice wrapper around `take_4` +pub(crate) fn change_hash(input: Input<'_>) -> ParseResult<'_, ChangeHash, E> { + let (i, bytes) = take_n(HASH_SIZE, input)?; + let byte_arr: ChangeHash = bytes.try_into().expect("we checked the length above"); + Ok((i, byte_arr)) +} + +#[derive(thiserror::Error, Debug)] +#[error("invalid UTF-8")] +pub(crate) struct InvalidUtf8; + +/// Parse a length prefixed UTF-8 string +/// +/// This first parses a LEB128 encode `u64` from the input, then parses this many bytes from the +/// input before attempting to convert these bytes into a `String`, returning +/// `ParseError::Error(InvalidUtf8)` if that fails. +pub(crate) fn utf_8(len: usize, input: Input<'_>) -> ParseResult<'_, String, E> +where + E: From, +{ + let (i, bytes) = take_n(len, input)?; + let result = String::from_utf8(bytes.to_vec()) + .map_err(|_| ParseError::Error(InvalidUtf8)) + .map_err(|e| e.lift())?; + Ok((i, result)) +} + +/// Returned from `range_of` +pub(crate) struct RangeOf { + /// The range in the input where we parsed from + pub(crate) range: std::ops::Range, + /// The value we parsed + pub(crate) value: T, +} + +/// Evaluate `parser` and then return the value parsed, as well as the range in the input which we +/// just parsed. +/// +/// This is useful when you want to parse some data from an input in order to check that is valid, +/// but you will also be holding on to the input data and want to know where in the input data the +/// valid data was parsed from. +/// +/// # Example +/// +/// Imagine that we are parsing records of some kind from a file, as well as parsing the record we +/// want to record the offset in the file where the record is so we can update it in place. +/// +/// ```rust,ignore +/// # use automerge::storage::parse::{ParseResult, Input}; +/// struct Message; +/// struct Record { +/// message: Message, +/// location: std::ops::Range +/// } +/// +/// fn parse_message<'a>(input: Input<'a>) -> ParseResult<'a, Message, ()> { +/// unimplemented!() +/// } +/// +/// fn parse_record<'a>(input: Input<'a>) -> ParseResult<'a, Record, ()> { +/// let (i, RangeOf{range: location, value: message}) = range_of(|i| parse_message(i), i)?; +/// Ok((i, Record { +/// location, // <- this is the location in the input where the message was parsed from +/// message, +/// })) +/// } +/// +/// let file_contents: Vec = unimplemented!(); +/// let input = Input::new(&file_contents); +/// let record = parse_record(input).unwrap().1; +/// ``` +pub(crate) fn range_of<'a, P, R, E>(parser: P, input: Input<'a>) -> ParseResult<'a, RangeOf, E> +where + P: Parser<'a, R, E>, +{ + input.range_of(parser) +} + +/// Parse all the remaining input from the parser. This can never fail +pub(crate) fn take_rest(input: Input<'_>) -> ParseResult<'_, &'_ [u8], E> { + input.rest() +} diff --git a/automerge/src/storage/parse/leb128.rs b/automerge/src/storage/parse/leb128.rs new file mode 100644 index 00000000..800253c9 --- /dev/null +++ b/automerge/src/storage/parse/leb128.rs @@ -0,0 +1,118 @@ +use core::mem::size_of; +use std::num::NonZeroU64; + +use super::{take1, Input, ParseError, ParseResult}; + +#[derive(PartialEq, thiserror::Error, Debug, Clone)] +pub(crate) enum Error { + #[error("leb128 was too large for the destination type")] + Leb128TooLarge, + #[error("leb128 was zero when it was expected to be nonzero")] + UnexpectedZero, +} + +macro_rules! impl_leb { + ($parser_name: ident, $ty: ty) => { + #[allow(dead_code)] + pub(crate) fn $parser_name<'a, E>(input: Input<'a>) -> ParseResult<'a, $ty, E> + where + E: From, + { + let mut res = 0; + let mut shift = 0; + + let mut input = input; + let mut pos = 0; + loop { + let (i, byte) = take1(input)?; + input = i; + if (byte & 0x80) == 0 { + res |= (byte as $ty) << shift; + return Ok((input, res)); + } else if pos == leb128_size::<$ty>() - 1 { + return Err(ParseError::Error(Error::Leb128TooLarge.into())); + } else { + res |= ((byte & 0x7F) as $ty) << shift; + } + pos += 1; + shift += 7; + } + } + }; +} + +impl_leb!(leb128_u64, u64); +impl_leb!(leb128_u32, u32); +impl_leb!(leb128_i64, i64); +impl_leb!(leb128_i32, i32); + +/// Parse a LEB128 encoded u64 from the input, throwing an error if it is `0` +pub(crate) fn nonzero_leb128_u64(input: Input<'_>) -> ParseResult<'_, NonZeroU64, E> +where + E: From, +{ + let (input, num) = leb128_u64(input)?; + let result = + NonZeroU64::new(num).ok_or_else(|| ParseError::Error(Error::UnexpectedZero.into()))?; + Ok((input, result)) +} + +/// Maximum LEB128-encoded size of an integer type +const fn leb128_size() -> usize { + let bits = size_of::() * 8; + (bits + 6) / 7 // equivalent to ceil(bits/7) w/o floats +} + +#[cfg(test)] +mod tests { + use super::super::Needed; + use super::*; + use std::{convert::TryFrom, num::NonZeroUsize}; + + const NEED_ONE: Needed = Needed::Size(unsafe { NonZeroUsize::new_unchecked(1) }); + + #[test] + fn leb_128_unsigned() { + let one = &[0b00000001_u8]; + let one_two_nine = &[0b10000001, 0b00000001]; + let one_and_more = &[0b00000001, 0b00000011]; + + let scenarios: Vec<(&'static [u8], ParseResult<'_, u64, Error>)> = vec![ + (one, Ok((Input::with_position(one, 1), 1))), + (&[0b10000001_u8], Err(ParseError::Incomplete(NEED_ONE))), + ( + one_two_nine, + Ok((Input::with_position(one_two_nine, 2), 129)), + ), + (one_and_more, Ok((Input::with_position(one_and_more, 1), 1))), + ( + &[129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129], + Err(ParseError::Error(Error::Leb128TooLarge)), + ), + ]; + for (index, (input, expected)) in scenarios.clone().into_iter().enumerate() { + let result = leb128_u64(Input::new(input)); + if result != expected { + panic!( + "Scenario {} failed for u64: expected {:?} got {:?}", + index + 1, + expected, + result + ); + } + } + + for (index, (input, expected)) in scenarios.into_iter().enumerate() { + let u32_expected = expected.map(|(i, e)| (i, u32::try_from(e).unwrap())); + let result = leb128_u32(Input::new(input)); + if result != u32_expected { + panic!( + "Scenario {} failed for u32: expected {:?} got {:?}", + index + 1, + u32_expected, + result + ); + } + } + } +} From 782f351322115b4be334a97122a720c9da202b80 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Mon, 25 Jul 2022 17:55:17 +0100 Subject: [PATCH 532/730] Add types to convert between different Op types Op IDs in the OpSet are represented using an index into a set of actor IDs. This is efficient but requires conversion when reading and writing from storage (where the set of actors might be different from ths in the OpSet). Add a trait for converting between different representations of an OpID. Signed-off-by: Alex Good --- automerge/src/convert.rs | 102 +++++++++++++++++++++++++++++++++++++++ automerge/src/lib.rs | 3 ++ automerge/src/types.rs | 7 +++ 3 files changed, 112 insertions(+) create mode 100644 automerge/src/convert.rs diff --git a/automerge/src/convert.rs b/automerge/src/convert.rs new file mode 100644 index 00000000..a99f96a1 --- /dev/null +++ b/automerge/src/convert.rs @@ -0,0 +1,102 @@ +//! Types for converting between different OpId representations +//! +//! In various places throughout the codebase we refer to operation IDs. The canonical type for +//! representing an operation ID is [`crate::types::OpId`]. This type holds the counter of the operation +//! ID but it does not store the actor ID, instead storing an index into an array of actor IDs +//! stored elsewhere. This makes using OpIds very memory efficient. We also store operation IDs on +//! disc. Here again we use a representation where the actor ID is stored as an offset into an +//! array which is held elsewhere. We occasionally do need to refer to an operation ID which +//! contains the full actor ID - typically when exporting to other processes or to the user. +//! +//! This is problematic when we want to write code which is generic over all these representations, +//! or which needs to convert between them. This module hopes to solve that problem. The basic +//! approach is to define the trait `OpId`, which is generic over the type of its `actor`. Using a +//! trait means that there is no need to allocate intermediate collections of operation IDs when +//! converting (for example when encoding a bunch of OpSet operation IDs into a change, where we +//! have to translate the indices). +//! +//! Having defined the `OpId` trait we then define a bunch of enums representing each of the +//! entities in the automerge data model which contain an `OpId`, namely `ObjId`, `Key`, and +//! `ElemId`. Each of these enums implements a `map` method, which allows you to convert the actor +//! ID of any contained operation using a mappping function. + +use std::borrow::Cow; + +pub(crate) trait OpId { + fn actor(&self) -> ActorId; + fn counter(&self) -> u64; +} + +#[derive(Clone, Debug)] +pub(crate) enum ObjId { + Root, + Op(O), +} + +impl ObjId { + pub(crate) fn map(self, f: F) -> ObjId

+ where + F: Fn(O) -> P, + { + match self { + ObjId::Root => ObjId::Root, + ObjId::Op(o) => ObjId::Op(f(o)), + } + } +} + +#[derive(Clone)] +pub(crate) enum ElemId { + Head, + Op(O), +} + +impl ElemId { + pub(crate) fn map(self, f: F) -> ElemId

+ where + F: Fn(O) -> P, + { + match self { + ElemId::Head => ElemId::Head, + ElemId::Op(o) => ElemId::Op(f(o)), + } + } +} + +#[derive(Clone)] +pub(crate) enum Key<'a, O> { + Prop(Cow<'a, smol_str::SmolStr>), + Elem(ElemId), +} + +impl<'a, O> Key<'a, O> { + pub(crate) fn map(self, f: F) -> Key<'a, P> + where + F: Fn(O) -> P, + { + match self { + Key::Prop(p) => Key::Prop(p), + Key::Elem(e) => Key::Elem(e.map(f)), + } + } +} + +impl OpId for crate::types::OpId { + fn counter(&self) -> u64 { + self.counter() + } + + fn actor(&self) -> usize { + self.actor() + } +} + +impl<'a> OpId for &'a crate::types::OpId { + fn counter(&self) -> u64 { + crate::types::OpId::counter(self) + } + + fn actor(&self) -> usize { + crate::types::OpId::actor(self) + } +} diff --git a/automerge/src/lib.rs b/automerge/src/lib.rs index 3bdf5354..e18eff3a 100644 --- a/automerge/src/lib.rs +++ b/automerge/src/lib.rs @@ -59,6 +59,9 @@ mod automerge; mod change; mod clock; mod columnar; +#[cfg(feature = "storage-v2")] +#[allow(dead_code)] +mod convert; mod decoding; mod encoding; mod error; diff --git a/automerge/src/types.rs b/automerge/src/types.rs index 288c2846..ea7bb87c 100644 --- a/automerge/src/types.rs +++ b/automerge/src/types.rs @@ -365,6 +365,13 @@ impl Key { #[derive(Debug, Clone, PartialOrd, Ord, Eq, PartialEq, Copy, Hash, Default)] pub(crate) struct OpId(pub(crate) u64, pub(crate) usize); +impl OpId { + #[cfg(feature = "storage-v2")] + pub(crate) fn new(actor: usize, counter: u64) -> Self { + Self(counter, actor) + } +} + #[derive(Debug, Clone, Copy, PartialOrd, Eq, PartialEq, Ord, Hash, Default)] pub(crate) struct ObjId(pub(crate) OpId); From de997e2c50d034cbd7d81bd11fcfb14065542042 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Mon, 25 Jul 2022 16:38:06 +0100 Subject: [PATCH 533/730] Reimplement columnar decoding types The existing implementation of the columnar format elides a lot of error handling (by converting `Err` to `None`) and doesn't allow writing to a single chunk of memory when encoding. Implement a new set of encoding and decoding primitives which handle errors more robustly and allow us to use a single chunk of memory when reading and writing. Signed-off-by: Alex Good --- automerge/src/columnar_2.rs | 14 + automerge/src/columnar_2/column_range.rs | 21 + .../src/columnar_2/column_range/boolean.rs | 40 ++ .../src/columnar_2/column_range/delta.rs | 152 +++++ automerge/src/columnar_2/column_range/deps.rs | 119 ++++ .../src/columnar_2/column_range/generic.rs | 91 +++ .../columnar_2/column_range/generic/group.rs | 138 +++++ .../columnar_2/column_range/generic/simple.rs | 76 +++ automerge/src/columnar_2/column_range/key.rs | 258 +++++++++ .../src/columnar_2/column_range/obj_id.rs | 202 +++++++ automerge/src/columnar_2/column_range/opid.rs | 210 +++++++ .../src/columnar_2/column_range/opid_list.rs | 324 +++++++++++ automerge/src/columnar_2/column_range/raw.rs | 38 ++ automerge/src/columnar_2/column_range/rle.rs | 216 +++++++ .../src/columnar_2/column_range/value.rs | 545 ++++++++++++++++++ automerge/src/columnar_2/encoding.rs | 63 ++ automerge/src/columnar_2/encoding/boolean.rs | 131 +++++ .../src/columnar_2/encoding/col_error.rs | 88 +++ .../src/columnar_2/encoding/column_decoder.rs | 157 +++++ .../columnar_2/encoding/decodable_impls.rs | 175 ++++++ automerge/src/columnar_2/encoding/delta.rs | 95 +++ .../columnar_2/encoding/encodable_impls.rs | 200 +++++++ automerge/src/columnar_2/encoding/leb128.rs | 73 +++ .../src/columnar_2/encoding/properties.rs | 178 ++++++ automerge/src/columnar_2/encoding/raw.rs | 97 ++++ automerge/src/columnar_2/encoding/rle.rs | 239 ++++++++ automerge/src/columnar_2/splice_error.rs | 47 ++ automerge/src/lib.rs | 4 + 28 files changed, 3991 insertions(+) create mode 100644 automerge/src/columnar_2.rs create mode 100644 automerge/src/columnar_2/column_range.rs create mode 100644 automerge/src/columnar_2/column_range/boolean.rs create mode 100644 automerge/src/columnar_2/column_range/delta.rs create mode 100644 automerge/src/columnar_2/column_range/deps.rs create mode 100644 automerge/src/columnar_2/column_range/generic.rs create mode 100644 automerge/src/columnar_2/column_range/generic/group.rs create mode 100644 automerge/src/columnar_2/column_range/generic/simple.rs create mode 100644 automerge/src/columnar_2/column_range/key.rs create mode 100644 automerge/src/columnar_2/column_range/obj_id.rs create mode 100644 automerge/src/columnar_2/column_range/opid.rs create mode 100644 automerge/src/columnar_2/column_range/opid_list.rs create mode 100644 automerge/src/columnar_2/column_range/raw.rs create mode 100644 automerge/src/columnar_2/column_range/rle.rs create mode 100644 automerge/src/columnar_2/column_range/value.rs create mode 100644 automerge/src/columnar_2/encoding.rs create mode 100644 automerge/src/columnar_2/encoding/boolean.rs create mode 100644 automerge/src/columnar_2/encoding/col_error.rs create mode 100644 automerge/src/columnar_2/encoding/column_decoder.rs create mode 100644 automerge/src/columnar_2/encoding/decodable_impls.rs create mode 100644 automerge/src/columnar_2/encoding/delta.rs create mode 100644 automerge/src/columnar_2/encoding/encodable_impls.rs create mode 100644 automerge/src/columnar_2/encoding/leb128.rs create mode 100644 automerge/src/columnar_2/encoding/properties.rs create mode 100644 automerge/src/columnar_2/encoding/raw.rs create mode 100644 automerge/src/columnar_2/encoding/rle.rs create mode 100644 automerge/src/columnar_2/splice_error.rs diff --git a/automerge/src/columnar_2.rs b/automerge/src/columnar_2.rs new file mode 100644 index 00000000..bb727626 --- /dev/null +++ b/automerge/src/columnar_2.rs @@ -0,0 +1,14 @@ +//! Types for reading data which is stored in a columnar storage format +//! +//! The details of how values are encoded in `encoding`, which exposes a set of "decoder" and +//! "encoder" types. +//! +//! The `column_range` module exposes a set of types - most of which are newtypes over +//! `Range` - which have useful instance methods such as `encode()` to create a new range and +//! `decoder()` to return an iterator of the correct type. +pub(crate) mod column_range; +pub(crate) use column_range::Key; +pub(crate) mod encoding; + +mod splice_error; +pub(crate) use splice_error::SpliceError; diff --git a/automerge/src/columnar_2/column_range.rs b/automerge/src/columnar_2/column_range.rs new file mode 100644 index 00000000..5762ed14 --- /dev/null +++ b/automerge/src/columnar_2/column_range.rs @@ -0,0 +1,21 @@ +mod rle; +pub(crate) use rle::RleRange; +mod delta; +pub(crate) use delta::DeltaRange; +mod boolean; +pub(crate) use boolean::BooleanRange; +mod raw; +pub(crate) use raw::RawRange; +mod opid; +pub(crate) use opid::{OpIdEncoder, OpIdIter, OpIdRange}; +mod opid_list; +pub(crate) use opid_list::{OpIdListEncoder, OpIdListIter, OpIdListRange}; +mod deps; +pub(crate) use deps::{DepsIter, DepsRange}; +mod value; +pub(crate) use value::{ValueEncoder, ValueIter, ValueRange}; +pub(crate) mod generic; +mod key; +pub(crate) use key::{Key, KeyEncoder, KeyIter, KeyRange}; +mod obj_id; +pub(crate) use obj_id::{ObjIdEncoder, ObjIdIter, ObjIdRange}; diff --git a/automerge/src/columnar_2/column_range/boolean.rs b/automerge/src/columnar_2/column_range/boolean.rs new file mode 100644 index 00000000..25e3783e --- /dev/null +++ b/automerge/src/columnar_2/column_range/boolean.rs @@ -0,0 +1,40 @@ +use std::{borrow::Cow, ops::Range}; + +use crate::columnar_2::encoding::{BooleanDecoder, BooleanEncoder}; + +#[derive(Clone, Debug, PartialEq)] +pub(crate) struct BooleanRange(Range); + +impl BooleanRange { + pub(crate) fn decoder<'a>(&self, data: &'a [u8]) -> BooleanDecoder<'a> { + BooleanDecoder::from(Cow::Borrowed(&data[self.0.clone()])) + } + + pub(crate) fn encode>(items: I, out: &mut Vec) -> Self { + let start = out.len(); + let mut encoder = BooleanEncoder::from(out); + for i in items { + encoder.append(i); + } + let (_, len) = encoder.finish(); + (start..(start + len)).into() + } +} + +impl AsRef> for BooleanRange { + fn as_ref(&self) -> &Range { + &self.0 + } +} + +impl From> for BooleanRange { + fn from(r: Range) -> BooleanRange { + BooleanRange(r) + } +} + +impl From for Range { + fn from(r: BooleanRange) -> Range { + r.0 + } +} diff --git a/automerge/src/columnar_2/column_range/delta.rs b/automerge/src/columnar_2/column_range/delta.rs new file mode 100644 index 00000000..eb64ae30 --- /dev/null +++ b/automerge/src/columnar_2/column_range/delta.rs @@ -0,0 +1,152 @@ +use std::{borrow::Cow, convert::Infallible, ops::Range}; + +use crate::columnar_2::{ + encoding::{raw, DeltaDecoder, DeltaEncoder, Sink}, + SpliceError, +}; + +#[derive(Clone, Debug, PartialEq)] +pub(crate) struct DeltaRange(Range); + +impl DeltaRange { + pub(crate) fn decoder<'a>(&self, data: &'a [u8]) -> DeltaDecoder<'a> { + DeltaDecoder::from(Cow::Borrowed(&data[self.0.clone()])) + } + + pub(crate) fn encoder(&self, output: S) -> DeltaEncoder { + DeltaEncoder::from(output) + } + + pub(crate) fn len(&self) -> usize { + self.0.len() + } + + pub(crate) fn encode>>(items: I, out: &mut Vec) -> Self { + // SAFETY: The incoming iterator is infallible and there are no existing items + Self::from(0..0) + .splice::(&[], 0..0, items.map(Ok), out) + .unwrap() + } + + pub(crate) fn splice, E>>>( + &self, + data: &[u8], + replace: Range, + mut replace_with: I, + out: &mut Vec, + ) -> Result> { + let start = out.len(); + let mut decoder = self.decoder(data); + let mut encoder = self.encoder(out); + let mut idx = 0; + while idx < replace.start { + match decoder + .next() + .transpose() + .map_err(SpliceError::ReadExisting)? + { + Some(elem) => encoder.append(elem), + None => panic!("out of bounds"), + } + idx += 1; + } + for _ in 0..replace.len() { + decoder + .next() + .transpose() + .map_err(SpliceError::ReadExisting)?; + if let Some(next) = replace_with + .next() + .transpose() + .map_err(SpliceError::ReadReplace)? + { + encoder.append(next); + } + } + for next in replace_with { + let next = next.map_err(SpliceError::ReadReplace)?; + encoder.append(next); + } + for next in decoder { + let next = next.map_err(SpliceError::ReadExisting)?; + encoder.append(next); + } + let (_, len) = encoder.finish(); + Ok((start..(start + len)).into()) + } +} + +impl AsRef> for DeltaRange { + fn as_ref(&self) -> &Range { + &self.0 + } +} + +impl From> for DeltaRange { + fn from(r: Range) -> DeltaRange { + DeltaRange(r) + } +} + +impl From for Range { + fn from(r: DeltaRange) -> Range { + r.0 + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::columnar_2::encoding::properties::option_splice_scenario; + use proptest::prelude::*; + + fn encode>>(vals: I) -> (DeltaRange, Vec) { + let mut buf = Vec::::new(); + let range = DeltaRange::encode(vals, &mut buf); + (range, buf) + } + + fn decode(range: DeltaRange, buf: &[u8]) -> Vec> { + range.decoder(buf).collect::, _>>().unwrap() + } + + fn encodable_int() -> impl Strategy + Clone { + 0..(i64::MAX / 2) + } + + proptest! { + #[test] + fn encode_decode_delta(vals in proptest::collection::vec(proptest::option::of(encodable_int()), 0..100)) { + let (r, encoded) = encode(vals.iter().copied()); + if vals.iter().all(|v| v.is_none()) { + assert_eq!(encoded.len(), 0); + let decoded = decode(r, &encoded); + assert_eq!(Vec::>::new(), decoded) + } else { + let decoded = decode(r, &encoded); + assert_eq!(vals, decoded) + } + } + + #[test] + fn splice_delta(scenario in option_splice_scenario(proptest::option::of(encodable_int()))) { + let (range, encoded) = encode(scenario.initial_values.iter().copied()); + let mut out = Vec::new(); + let replacements: Vec, Infallible>> = scenario.replacements.iter().cloned().map(Ok).collect(); + let new_range = range.splice(&encoded, scenario.replace_range.clone(), replacements.into_iter(), &mut out).unwrap(); + let decoded = decode(new_range, &out); + scenario.check_optional(decoded); + } + } + + #[test] + fn bugbug() { + let vals: Vec = vec![6, 5, 8, 9, 10, 11, 12, 13]; + let (r, encoded) = encode(vals.iter().copied().map(Some)); + let decoded = decode(r, &encoded) + .into_iter() + .map(Option::unwrap) + .collect::>(); + assert_eq!(decoded, vals); + } +} diff --git a/automerge/src/columnar_2/column_range/deps.rs b/automerge/src/columnar_2/column_range/deps.rs new file mode 100644 index 00000000..386b5a4f --- /dev/null +++ b/automerge/src/columnar_2/column_range/deps.rs @@ -0,0 +1,119 @@ +use super::{DeltaRange, RleRange}; +use crate::columnar_2::encoding::{DecodeColumnError, DeltaDecoder, RleDecoder}; + +/// A grouped column containing lists of u64s +#[derive(Clone, Debug)] +pub(crate) struct DepsRange { + num: RleRange, + deps: DeltaRange, +} + +impl DepsRange { + pub(crate) fn new(num: RleRange, deps: DeltaRange) -> Self { + Self { num, deps } + } + + pub(crate) fn num_range(&self) -> &RleRange { + &self.num + } + + pub(crate) fn deps_range(&self) -> &DeltaRange { + &self.deps + } + + pub(crate) fn encode(deps: I, out: &mut Vec) -> DepsRange + where + I: Iterator + Clone, + II: IntoIterator + ExactSizeIterator, + { + let num = RleRange::encode(deps.clone().map(|d| Some(d.len() as u64)), out); + let deps = DeltaRange::encode( + deps.flat_map(|d| d.into_iter().map(|d| Some(d as i64))), + out, + ); + DepsRange { num, deps } + } + + pub(crate) fn iter<'a>(&self, data: &'a [u8]) -> DepsIter<'a> { + DepsIter { + num: self.num.decoder(data), + deps: self.deps.decoder(data), + } + } +} + +#[derive(Clone)] +pub(crate) struct DepsIter<'a> { + num: RleDecoder<'a, u64>, + deps: DeltaDecoder<'a>, +} + +impl<'a> DepsIter<'a> { + fn try_next(&mut self) -> Result>, DecodeColumnError> { + let num = match self + .num + .next() + .transpose() + .map_err(|e| DecodeColumnError::decode_raw("num", e))? + { + Some(Some(n)) => n as usize, + Some(None) => { + return Err(DecodeColumnError::unexpected_null("group")); + } + None => return Ok(None), + }; + let mut result = Vec::with_capacity(num); + while result.len() < num { + match self + .deps + .next() + .transpose() + .map_err(|e| DecodeColumnError::decode_raw("deps", e))? + { + Some(Some(elem)) => { + let elem = match u64::try_from(elem) { + Ok(e) => e, + Err(e) => { + tracing::error!(err=?e, dep=elem, "error converting dep index to u64"); + return Err(DecodeColumnError::invalid_value( + "deps", + "error converting dep index to u64", + )); + } + }; + result.push(elem); + } + _ => return Err(DecodeColumnError::unexpected_null("deps")), + } + } + Ok(Some(result)) + } +} + +impl<'a> Iterator for DepsIter<'a> { + type Item = Result, DecodeColumnError>; + fn next(&mut self) -> Option { + self.try_next().transpose() + } +} + +#[cfg(test)] +mod tests { + use super::*; + use proptest::collection::vec as propvec; + use proptest::prelude::*; + + fn encodable_u64() -> impl Strategy + Clone { + 0_u64..((i64::MAX / 2) as u64) + } + + proptest! { + #[test] + fn encode_decode_deps(deps in propvec(propvec(encodable_u64(), 0..100), 0..100)) { + let mut out = Vec::new(); + let range = DepsRange::encode(deps.iter().cloned().map(|d| d.into_iter()), &mut out); + let decoded = range.iter(&out).collect::, _>>().unwrap(); + assert_eq!(deps, decoded); + } + } +} diff --git a/automerge/src/columnar_2/column_range/generic.rs b/automerge/src/columnar_2/column_range/generic.rs new file mode 100644 index 00000000..8fa59b32 --- /dev/null +++ b/automerge/src/columnar_2/column_range/generic.rs @@ -0,0 +1,91 @@ +use std::ops::Range; + +use crate::{columnar_2::encoding::DecodeColumnError, ScalarValue}; + +use super::{ValueIter, ValueRange}; +mod simple; +use simple::SimpleColIter; +pub(crate) use simple::SimpleColRange; +mod group; +use group::GroupIter; +pub(crate) use group::{GroupRange, GroupedColumnRange}; + +/// A range which can represent any column which is valid with respect to the data model of the +/// column oriented storage format. This is primarily intended to be used in two cases: +/// +/// 1. As an intermediate step when parsing binary storage. We parse the column metadata into +/// GenericColumnRange, then from there into more specific range types. +/// 2. when we encounter a column which we don't expect but which we still need to retain and +/// re-encode when writing new changes. +/// +/// The generic data model is represented by `CellValue`, an iterator over a generic column will +/// produce a `CellValue` for each row in the column. +#[derive(Debug, Clone)] +pub(crate) enum GenericColumnRange { + /// A "simple" column is one which directly corresponds to a single column in the raw format + Simple(SimpleColRange), + /// A value range consists of two columns and produces `ScalarValue`s + Value(ValueRange), + /// A "group" range consists of zero or more grouped columns and produces `CellValue::Group`s + Group(GroupRange), +} + +impl GenericColumnRange { + pub(crate) fn range(&self) -> Range { + match self { + Self::Simple(sc) => sc.range(), + Self::Value(v) => v.range(), + Self::Group(g) => g.range(), + } + } +} + +/// The type of values which can be stored in a generic column +pub(crate) enum CellValue { + /// The contents of a simple column + Simple(SimpleValue), + /// The values in a set of grouped columns + Group(Vec>), +} + +pub(crate) enum SimpleValue { + Uint(Option), + Int(Option), + String(Option), + Bool(bool), + /// The contents of a value metadata and value raw column + Value(ScalarValue), +} + +#[derive(Debug, Clone)] +#[allow(dead_code)] +pub(crate) enum GenericColIter<'a> { + Simple(SimpleColIter<'a>), + Value(ValueIter<'a>), + Group(GroupIter<'a>), +} + +impl<'a> GenericColIter<'a> { + fn try_next(&mut self) -> Result, DecodeColumnError> { + match self { + Self::Simple(s) => s + .next() + .transpose() + .map_err(|e| DecodeColumnError::decode_raw("a simple column", e)) + .map(|v| v.map(CellValue::Simple)), + Self::Value(v) => v + .next() + .transpose() + .map(|v| v.map(|v| CellValue::Simple(SimpleValue::Value(v)))), + Self::Group(g) => g.next().transpose(), + } + } +} + +impl<'a> Iterator for GenericColIter<'a> { + type Item = Result; + + fn next(&mut self) -> Option { + self.try_next().transpose() + } +} diff --git a/automerge/src/columnar_2/column_range/generic/group.rs b/automerge/src/columnar_2/column_range/generic/group.rs new file mode 100644 index 00000000..9fb379da --- /dev/null +++ b/automerge/src/columnar_2/column_range/generic/group.rs @@ -0,0 +1,138 @@ +use std::ops::Range; + +use super::{CellValue, SimpleColIter, SimpleColRange, SimpleValue}; +use crate::columnar_2::{ + column_range::{RleRange, ValueIter, ValueRange}, + encoding::{col_error::DecodeColumnError, RleDecoder}, +}; + +/// A group column range is one with a "num" column and zero or more "grouped" columns. The "num" +/// column contains RLE encoded u64s, each `u64` represents the number of values to read from each +/// of the grouped columns in order to produce a `CellValue::Group` for the current row. +#[derive(Debug, Clone)] +pub(crate) struct GroupRange { + pub(crate) num: RleRange, + pub(crate) values: Vec, +} + +impl GroupRange { + pub(crate) fn new(num: RleRange, values: Vec) -> Self { + Self { num, values } + } + + #[allow(dead_code)] + pub(crate) fn iter<'a>(&self, data: &'a [u8]) -> GroupIter<'a> { + GroupIter { + num: self.num.decoder(data), + values: self.values.iter().map(|v| v.iter(data)).collect(), + } + } + + pub(crate) fn range(&self) -> Range { + let start = self.num.start(); + let end = self + .values + .last() + .map(|v| v.range().end) + .unwrap_or_else(|| self.num.end()); + start..end + } +} + +/// The type of ranges which can be the "grouped" columns in a `GroupRange` +#[derive(Debug, Clone)] +pub(crate) enum GroupedColumnRange { + Value(ValueRange), + Simple(SimpleColRange), +} + +impl GroupedColumnRange { + fn iter<'a>(&self, data: &'a [u8]) -> GroupedColIter<'a> { + match self { + Self::Value(vr) => GroupedColIter::Value(vr.iter(data)), + Self::Simple(sc) => GroupedColIter::Simple(sc.iter(data)), + } + } + + pub(crate) fn range(&self) -> Range { + match self { + Self::Value(vr) => vr.range(), + Self::Simple(s) => s.range(), + } + } +} + +#[derive(Debug, Clone)] +pub(crate) struct GroupIter<'a> { + num: RleDecoder<'a, u64>, + values: Vec>, +} + +impl<'a> GroupIter<'a> { + fn try_next(&mut self) -> Result, DecodeColumnError> { + let num = self + .num + .next() + .transpose() + .map_err(|e| DecodeColumnError::decode_raw("num", e))?; + match num { + None => Ok(None), + Some(None) => Err(DecodeColumnError::unexpected_null("num")), + Some(Some(num)) => { + let mut row = Vec::new(); + for _ in 0..num { + let mut inner_row = Vec::new(); + for (index, value_col) in self.values.iter_mut().enumerate() { + match value_col.next().transpose()? { + None => { + return Err(DecodeColumnError::unexpected_null(format!( + "col {}", + index + ))) + } + Some(v) => { + inner_row.push(v); + } + } + } + row.push(inner_row); + } + Ok(Some(CellValue::Group(row))) + } + } + } +} + +impl<'a> Iterator for GroupIter<'a> { + type Item = Result; + + fn next(&mut self) -> Option { + self.try_next().transpose() + } +} + +#[derive(Debug, Clone)] +enum GroupedColIter<'a> { + Value(ValueIter<'a>), + Simple(SimpleColIter<'a>), +} + +impl<'a> GroupedColIter<'a> { + fn try_next(&mut self) -> Result, DecodeColumnError> { + match self { + Self::Value(viter) => Ok(viter.next().transpose()?.map(SimpleValue::Value)), + Self::Simple(siter) => siter + .next() + .transpose() + .map_err(|e| DecodeColumnError::decode_raw("a simple column", e)), + } + } +} + +impl<'a> Iterator for GroupedColIter<'a> { + type Item = Result; + + fn next(&mut self) -> Option { + self.try_next().transpose() + } +} diff --git a/automerge/src/columnar_2/column_range/generic/simple.rs b/automerge/src/columnar_2/column_range/generic/simple.rs new file mode 100644 index 00000000..5115ff96 --- /dev/null +++ b/automerge/src/columnar_2/column_range/generic/simple.rs @@ -0,0 +1,76 @@ +use std::ops::Range; + +use crate::columnar_2::{ + column_range::{BooleanRange, DeltaRange, RleRange}, + encoding::{raw, BooleanDecoder, DeltaDecoder, RleDecoder}, +}; + +use super::SimpleValue; + +/// The four types of "simple" column defined in the raw format +#[derive(Debug, Clone)] +pub(crate) enum SimpleColRange { + /// A column containing RLE encoded u64's + RleInt(RleRange), + /// A column containing RLE encoded strings + RleString(RleRange), + /// A column containing delta -> RLE encoded i64s + Delta(DeltaRange), + /// A column containing boolean values + Boolean(BooleanRange), +} + +impl SimpleColRange { + pub(super) fn iter<'a>(&self, data: &'a [u8]) -> SimpleColIter<'a> { + match self { + Self::RleInt(r) => SimpleColIter::RleInt(r.decoder(data)), + Self::RleString(r) => SimpleColIter::RleString(r.decoder(data)), + Self::Delta(r) => SimpleColIter::Delta(r.decoder(data)), + Self::Boolean(r) => SimpleColIter::Boolean(r.decoder(data)), + } + } + + pub(crate) fn range(&self) -> Range { + match self { + Self::RleInt(r) => r.clone().into(), + Self::RleString(r) => r.clone().into(), + Self::Delta(r) => r.clone().into(), + Self::Boolean(r) => r.clone().into(), + } + } +} + +#[derive(Debug, Clone)] +pub(crate) enum SimpleColIter<'a> { + RleInt(RleDecoder<'a, u64>), + RleString(RleDecoder<'a, smol_str::SmolStr>), + Delta(DeltaDecoder<'a>), + Boolean(BooleanDecoder<'a>), +} + +impl<'a> SimpleColIter<'a> { + fn try_next(&mut self) -> Result, raw::Error> { + match self { + Self::RleInt(d) => read_col(d, SimpleValue::Uint), + Self::RleString(d) => read_col(d, SimpleValue::String), + Self::Delta(d) => read_col(d, SimpleValue::Int), + Self::Boolean(d) => Ok(d.next().transpose()?.map(SimpleValue::Bool)), + } + } +} + +fn read_col(mut col: C, f: F) -> Result, raw::Error> +where + C: Iterator, raw::Error>>, + F: Fn(Option) -> U, +{ + col.next().transpose().map(|v| v.map(f)) +} + +impl<'a> Iterator for SimpleColIter<'a> { + type Item = Result; + + fn next(&mut self) -> Option { + self.try_next().transpose() + } +} diff --git a/automerge/src/columnar_2/column_range/key.rs b/automerge/src/columnar_2/column_range/key.rs new file mode 100644 index 00000000..da2e694b --- /dev/null +++ b/automerge/src/columnar_2/column_range/key.rs @@ -0,0 +1,258 @@ +use std::{convert::Infallible, ops::Range}; + +use super::{DeltaRange, RleRange}; +use crate::{ + columnar_2::{ + encoding::{ + raw, DecodeColumnError, DeltaDecoder, DeltaEncoder, RleDecoder, RleEncoder, Sink, + }, + SpliceError, + }, + convert, + types::{ElemId, OpId}, +}; + +#[derive(Clone, Debug, PartialEq)] +pub(crate) enum Key { + Prop(smol_str::SmolStr), + Elem(ElemId), +} + +#[derive(Clone, Debug, PartialEq)] +pub(crate) struct KeyRange { + actor: RleRange, + counter: DeltaRange, + string: RleRange, +} + +impl KeyRange { + pub(crate) fn new( + actor: RleRange, + counter: DeltaRange, + string: RleRange, + ) -> Self { + Self { + actor, + counter, + string, + } + } + + pub(crate) fn actor_range(&self) -> &RleRange { + &self.actor + } + + pub(crate) fn counter_range(&self) -> &DeltaRange { + &self.counter + } + + pub(crate) fn string_range(&self) -> &RleRange { + &self.string + } + + pub(crate) fn iter<'a>(&self, data: &'a [u8]) -> KeyIter<'a> { + KeyIter { + actor: self.actor.decoder(data), + counter: self.counter.decoder(data), + string: self.string.decoder(data), + } + } + + pub(crate) fn encode<'b, O, I: Iterator> + Clone>( + items: I, + out: &mut Vec, + ) -> Self + where + O: convert::OpId, + { + // SAFETY: The incoming iterator is infallible and there are no existing items + Self { + actor: (0..0).into(), + counter: (0..0).into(), + string: (0..0).into(), + } + .splice::<_, Infallible, _>(&[], 0..0, items.map(Ok), out) + .unwrap() + } + + /// Splice new keys into this set of keys, encoding the resulting actor, counter, and str + /// columns in `out`. + pub(crate) fn splice<'b, O, E, I>( + &mut self, + data: &[u8], + replace: Range, + replace_with: I, + out: &mut Vec, + ) -> Result> + where + O: convert::OpId, + E: std::error::Error, + I: Iterator, E>> + Clone, + { + let actor = self.actor.splice( + data, + replace.clone(), + replace_with.clone().map(|k| { + k.map(|k| match k { + convert::Key::Prop(_) => None, + convert::Key::Elem(convert::ElemId::Head) => None, + convert::Key::Elem(convert::ElemId::Op(o)) => Some(o.actor() as u64), + }) + }), + out, + )?; + + let counter = self.counter.splice( + data, + replace.clone(), + replace_with.clone().map(|k| { + k.map(|k| match k { + convert::Key::Prop(_) => None, + convert::Key::Elem(convert::ElemId::Head) => Some(0), + convert::Key::Elem(convert::ElemId::Op(o)) => Some(o.counter() as i64), + }) + }), + out, + )?; + + let string = self.string.splice( + data, + replace, + replace_with.map(|k| { + k.map(|k| match k { + convert::Key::Prop(s) => Some(s), + convert::Key::Elem(_) => None, + }) + }), + out, + )?; + + Ok(Self { + actor, + counter, + string, + }) + } +} + +#[derive(Clone, Debug)] +pub(crate) struct KeyIter<'a> { + actor: RleDecoder<'a, u64>, + counter: DeltaDecoder<'a>, + string: RleDecoder<'a, smol_str::SmolStr>, +} + +impl<'a> KeyIter<'a> { + fn try_next(&mut self) -> Result, DecodeColumnError> { + let actor = self + .actor + .next() + .transpose() + .map_err(|e| DecodeColumnError::decode_raw("actor", e))?; + let counter = self + .counter + .next() + .transpose() + .map_err(|e| DecodeColumnError::decode_raw("counter", e))?; + let string = self + .string + .next() + .transpose() + .map_err(|e| DecodeColumnError::decode_raw("string", e))?; + match (actor, counter, string) { + (Some(Some(_)), Some(Some(_)), Some(Some(_))) => { + Err(DecodeColumnError::invalid_value("key", "too many values")) + } + (Some(None) | None, Some(None) | None, Some(Some(string))) => { + Ok(Some(Key::Prop(string))) + } + (Some(None) | None, Some(Some(0)), Some(None) | None) => { + Ok(Some(Key::Elem(ElemId(OpId(0, 0))))) + } + (Some(Some(actor)), Some(Some(ctr)), Some(None) | None) => match ctr.try_into() { + //Ok(ctr) => Some(Ok(Key::Elem(ElemId(OpId(ctr, actor as usize))))), + Ok(ctr) => Ok(Some(Key::Elem(ElemId(OpId::new(actor as usize, ctr))))), + Err(_) => Err(DecodeColumnError::invalid_value( + "counter", + "negative value for counter", + )), + }, + (None | Some(None), None | Some(None), None | Some(None)) => Ok(None), + (None | Some(None), k, _) => { + tracing::error!(key=?k, "unexpected null actor"); + Err(DecodeColumnError::unexpected_null("actor")) + } + (_, None | Some(None), _) => Err(DecodeColumnError::unexpected_null("counter")), + } + } +} + +impl<'a> Iterator for KeyIter<'a> { + type Item = Result; + + fn next(&mut self) -> Option { + self.try_next().transpose() + } +} + +pub(crate) struct KeyEncoder { + actor: RleEncoder, + counter: DeltaEncoder, + string: RleEncoder, +} + +impl KeyEncoder> { + pub(crate) fn new() -> KeyEncoder> { + KeyEncoder { + actor: RleEncoder::new(Vec::new()), + counter: DeltaEncoder::new(Vec::new()), + string: RleEncoder::new(Vec::new()), + } + } + + pub(crate) fn finish(self, out: &mut Vec) -> KeyRange { + let actor_start = out.len(); + let (actor, _) = self.actor.finish(); + out.extend(actor); + let actor_end = out.len(); + + let (counter, _) = self.counter.finish(); + out.extend(counter); + let counter_end = out.len(); + + let (string, _) = self.string.finish(); + out.extend(string); + let string_end = out.len(); + + KeyRange { + actor: (actor_start..actor_end).into(), + counter: (actor_end..counter_end).into(), + string: (counter_end..string_end).into(), + } + } +} + +impl KeyEncoder { + pub(crate) fn append(&mut self, key: convert::Key<'_, O>) + where + O: convert::OpId, + { + match key { + convert::Key::Prop(p) => { + self.string.append_value(p.clone()); + self.actor.append_null(); + self.counter.append_null(); + } + convert::Key::Elem(convert::ElemId::Head) => { + self.string.append_null(); + self.actor.append_null(); + self.counter.append_value(0); + } + convert::Key::Elem(convert::ElemId::Op(o)) => { + self.string.append_null(); + self.actor.append_value(o.actor() as u64); + self.counter.append_value(o.counter() as i64); + } + } + } +} diff --git a/automerge/src/columnar_2/column_range/obj_id.rs b/automerge/src/columnar_2/column_range/obj_id.rs new file mode 100644 index 00000000..e12b2530 --- /dev/null +++ b/automerge/src/columnar_2/column_range/obj_id.rs @@ -0,0 +1,202 @@ +use std::{convert::Infallible, ops::Range}; + +use crate::{ + columnar_2::{ + encoding::{raw, DecodeColumnError, RleDecoder, RleEncoder, Sink}, + SpliceError, + }, + convert, + types::{ObjId, OpId}, +}; + +use super::RleRange; + +#[derive(Debug, Clone, PartialEq)] +pub(crate) struct ObjIdRange { + actor: RleRange, + counter: RleRange, +} + +impl ObjIdRange { + pub(crate) fn new(actor: RleRange, counter: RleRange) -> Option { + if actor.is_empty() || counter.is_empty() { + None + } else { + Some(Self { actor, counter }) + } + } + + pub(crate) fn actor_range(&self) -> &RleRange { + &self.actor + } + + pub(crate) fn counter_range(&self) -> &RleRange { + &self.counter + } + + pub(crate) fn encode> + Clone>( + ids: I, + out: &mut Vec, + ) -> Option + where + O: convert::OpId, + { + // SAFETY: the incoming iterator is infallible and there are no existing elements + Self { + actor: (0..0).into(), + counter: (0..0).into(), + } + .splice::<_, Infallible, _>(&[], 0..0, ids.map(Ok), out) + .unwrap() + } + + /// Given some existing columns of object IDs splice a new set of object IDs in with the + /// existing ones + /// + /// Note that this returns `None` if the resulting range is empty (which will only occur if the + /// replace range is larger than the input iterator and `ids` is an empty iterator). + pub(crate) fn splice< + O, + E: std::error::Error, + I: Iterator, E>> + Clone, + >( + &self, + data: &[u8], + replace: Range, + ids: I, + out: &mut Vec, + ) -> Result, SpliceError> + where + O: convert::OpId, + { + let actor = self.actor.splice( + data, + replace.clone(), + ids.clone().map(|id| id.map(encoded_actor)), + out, + )?; + + if actor.is_empty() { + return Ok(None); + } + + let counter = self.counter.splice( + data, + replace, + ids.map(|i| { + i.map(|i| match i { + convert::ObjId::Root => None, + convert::ObjId::Op(o) => Some(o.counter()), + }) + }), + out, + )?; + + Ok(Some(Self { actor, counter })) + } + + pub(crate) fn iter<'a>(&self, data: &'a [u8]) -> ObjIdIter<'a> { + ObjIdIter { + actor: self.actor.decoder(data), + counter: self.counter.decoder(data), + } + } +} + +fn encoded_actor(id: convert::ObjId) -> Option +where + O: convert::OpId, +{ + match id { + convert::ObjId::Root => None, + convert::ObjId::Op(o) => Some(o.actor() as u64), + } +} + +#[derive(Clone)] +pub(crate) struct ObjIdIter<'a> { + actor: RleDecoder<'a, u64>, + counter: RleDecoder<'a, u64>, +} + +impl<'a> ObjIdIter<'a> { + fn try_next(&mut self) -> Result, DecodeColumnError> { + let actor = self + .actor + .next() + .transpose() + .map_err(|e| DecodeColumnError::decode_raw("actor", e))?; + let counter = self + .counter + .next() + .transpose() + .map_err(|e| DecodeColumnError::decode_raw("counter", e))?; + match (actor, counter) { + (None | Some(None), None | Some(None)) => Ok(Some(ObjId::root())), + (Some(Some(a)), Some(Some(c))) => Ok(Some(ObjId(OpId(c, a as usize)))), + (_, Some(Some(0))) => Ok(Some(ObjId::root())), + (Some(None) | None, _) => Err(DecodeColumnError::unexpected_null("actor")), + (_, Some(None) | None) => Err(DecodeColumnError::unexpected_null("counter")), + } + } +} + +impl<'a> Iterator for ObjIdIter<'a> { + type Item = Result; + + fn next(&mut self) -> Option { + self.try_next().transpose() + } +} + +pub(crate) struct ObjIdEncoder { + actor: RleEncoder, + counter: RleEncoder, +} + +impl ObjIdEncoder { + pub(crate) fn append(&mut self, id: convert::ObjId) + where + O: convert::OpId, + { + match id { + convert::ObjId::Root => { + self.actor.append_null(); + self.counter.append_null(); + } + convert::ObjId::Op(o) => { + self.actor.append_value(o.actor() as u64); + self.counter.append_value(o.counter() as u64); + } + } + } +} + +impl ObjIdEncoder> { + pub(crate) fn new() -> Self { + Self { + actor: RleEncoder::from(Vec::new()), + counter: RleEncoder::from(Vec::new()), + } + } + + pub(crate) fn finish(self, out: &mut Vec) -> Option { + let start = out.len(); + let (actor, _) = self.actor.finish(); + out.extend(actor); + let actor_end = out.len(); + + let (counter, _) = self.counter.finish(); + out.extend(counter); + let counter_end = out.len(); + + if start == counter_end { + None + } else { + Some(ObjIdRange { + actor: (start..actor_end).into(), + counter: (actor_end..counter_end).into(), + }) + } + } +} diff --git a/automerge/src/columnar_2/column_range/opid.rs b/automerge/src/columnar_2/column_range/opid.rs new file mode 100644 index 00000000..1b1817cb --- /dev/null +++ b/automerge/src/columnar_2/column_range/opid.rs @@ -0,0 +1,210 @@ +use std::ops::Range; + +use super::{DeltaRange, RleRange}; +use crate::{ + columnar_2::{ + encoding::{ + raw, DecodeColumnError, DeltaDecoder, DeltaEncoder, RleDecoder, RleEncoder, Sink, + }, + SpliceError, + }, + convert, + types::OpId, +}; + +#[derive(Debug, Clone)] +pub(crate) struct OpIdRange { + actor: RleRange, + counter: DeltaRange, +} + +impl OpIdRange { + pub(crate) fn new(actor: RleRange, counter: DeltaRange) -> Self { + Self { actor, counter } + } + + pub(crate) fn actor_range(&self) -> &RleRange { + &self.actor + } + + pub(crate) fn counter_range(&self) -> &DeltaRange { + &self.counter + } + + pub(crate) fn iter<'a>(&self, data: &'a [u8]) -> OpIdIter<'a> { + OpIdIter { + actor: self.actor.decoder(data), + counter: self.counter.decoder(data), + } + } + + pub(crate) fn encode(opids: I, out: &mut Vec) -> Self + where + O: convert::OpId, + I: Iterator + Clone, + { + let actor = RleRange::encode(opids.clone().map(|o| Some(o.actor() as u64)), out); + let counter = DeltaRange::encode(opids.map(|o| Some(o.counter() as i64)), out); + Self { actor, counter } + } + + #[allow(dead_code)] + pub(crate) fn splice( + &self, + data: &[u8], + replace: Range, + replace_with: I, + out: &mut Vec, + ) -> Result> + where + O: convert::OpId, + E: std::error::Error, + I: Iterator> + Clone, + { + let actor = self.actor.splice( + data, + replace.clone(), + replace_with + .clone() + .map(|i| i.map(|i| Some(i.actor() as u64))), + out, + )?; + let counter = self.counter.splice( + data, + replace, + replace_with.map(|i| i.map(|i| Some(i.counter() as i64))), + out, + )?; + Ok(Self { actor, counter }) + } +} + +#[derive(Clone)] +pub(crate) struct OpIdIter<'a> { + actor: RleDecoder<'a, u64>, + counter: DeltaDecoder<'a>, +} + +impl<'a> OpIdIter<'a> { + pub(crate) fn done(&self) -> bool { + self.counter.done() + } +} + +impl<'a> OpIdIter<'a> { + fn try_next(&mut self) -> Result, DecodeColumnError> { + let actor = self + .actor + .next() + .transpose() + .map_err(|e| DecodeColumnError::decode_raw("actor", e))?; + let counter = self + .counter + .next() + .transpose() + .map_err(|e| DecodeColumnError::decode_raw("counter", e))?; + match (actor, counter) { + (Some(Some(a)), Some(Some(c))) => match c.try_into() { + Ok(c) => Ok(Some(OpId(c, a as usize))), + Err(_) => Err(DecodeColumnError::invalid_value( + "counter", + "negative value encountered", + )), + }, + (Some(None), _) => Err(DecodeColumnError::unexpected_null("actor")), + (_, Some(None)) => Err(DecodeColumnError::unexpected_null("actor")), + (Some(_), None) => Err(DecodeColumnError::unexpected_null("ctr")), + (None, Some(_)) => Err(DecodeColumnError::unexpected_null("actor")), + (None, None) => Ok(None), + } + } +} + +impl<'a> Iterator for OpIdIter<'a> { + type Item = Result; + + fn next(&mut self) -> Option { + self.try_next().transpose() + } +} + +pub(crate) struct OpIdEncoder { + actor: RleEncoder, + counter: DeltaEncoder, +} + +impl OpIdEncoder { + pub(crate) fn append>(&mut self, opid: O) { + self.actor.append_value(opid.actor() as u64); + self.counter.append_value(opid.counter() as i64); + } +} + +impl OpIdEncoder> { + pub(crate) fn new() -> Self { + Self { + actor: RleEncoder::from(Vec::new()), + counter: DeltaEncoder::from(Vec::new()), + } + } + + pub(crate) fn finish(self, out: &mut Vec) -> OpIdRange { + let start = out.len(); + let (actor, _) = self.actor.finish(); + out.extend(actor); + let actor_end = out.len(); + + let (counter, _) = self.counter.finish(); + out.extend(counter); + let counter_end = out.len(); + + OpIdRange { + actor: (start..actor_end).into(), + counter: (actor_end..counter_end).into(), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::{ + columnar_2::encoding::properties::{opid, splice_scenario}, + types::OpId, + }; + use proptest::prelude::*; + use std::convert::Infallible; + + fn encode(vals: &[OpId]) -> (Vec, OpIdRange) { + let mut out = Vec::new(); + let r = OpIdRange::encode(vals.iter().copied(), &mut out); + (out, r) + } + + fn decode(buf: &[u8], range: OpIdRange) -> Vec { + range.iter(buf).map(|c| c.unwrap()).collect() + } + + proptest! { + #[test] + fn encode_decode_opid(opids in proptest::collection::vec(opid(), 0..100)) { + let (encoded, range) = encode(&opids); + assert_eq!(opids, decode(&encoded[..], range)); + } + + #[test] + fn splice_opids(scenario in splice_scenario(opid())) { + let (encoded, range) = encode(&scenario.initial_values); + let mut out = Vec::new(); + let replacements: Vec> = scenario.replacements.iter().cloned().map(Ok).collect(); + let new_range = range.splice( + &encoded, + scenario.replace_range.clone(), + replacements.into_iter(), + &mut out + ).unwrap(); + let result = decode(&out[..], new_range); + scenario.check(result); + } + } +} diff --git a/automerge/src/columnar_2/column_range/opid_list.rs b/automerge/src/columnar_2/column_range/opid_list.rs new file mode 100644 index 00000000..417a2c1a --- /dev/null +++ b/automerge/src/columnar_2/column_range/opid_list.rs @@ -0,0 +1,324 @@ +use std::{convert::Infallible, ops::Range}; + +use super::{DeltaRange, RleRange}; +use crate::{ + columnar_2::{ + encoding::{ + raw, DecodeColumnError, DeltaDecoder, DeltaEncoder, RleDecoder, RleEncoder, Sink, + }, + SpliceError, + }, + convert, + types::OpId, +}; + +/// A collection of ranges which decode to lists of OpIds +#[derive(Clone, Debug, PartialEq)] +pub(crate) struct OpIdListRange { + num: RleRange, + actor: RleRange, + counter: DeltaRange, +} + +impl OpIdListRange { + pub(crate) fn new(num: RleRange, actor: RleRange, counter: DeltaRange) -> Self { + Self { + num, + actor, + counter, + } + } + + pub(crate) fn group_range(&self) -> &RleRange { + &self.num + } + + pub(crate) fn actor_range(&self) -> &RleRange { + &self.actor + } + + pub(crate) fn counter_range(&self) -> &DeltaRange { + &self.counter + } + + pub(crate) fn iter<'a>(&self, data: &'a [u8]) -> OpIdListIter<'a> { + OpIdListIter { + num: self.num.decoder(data), + actor: self.actor.decoder(data), + counter: self.counter.decoder(data), + } + } + + pub(crate) fn encode(opids: I, out: &mut Vec) -> Self + where + O: convert::OpId, + II: IntoIterator, + IE: Iterator + ExactSizeIterator, + I: Iterator + Clone, + { + let num = RleRange::encode( + opids.clone().map(|os| Some(os.into_iter().len() as u64)), + out, + ); + let actor = RleRange::encode( + opids + .clone() + .flat_map(|os| os.into_iter().map(|o| Some(o.actor() as u64))), + out, + ); + let counter = DeltaRange::encode( + opids.flat_map(|os| os.into_iter().map(|o| Some(o.counter() as i64))), + out, + ); + Self { + num, + actor, + counter, + } + } + + #[allow(dead_code)] + pub(crate) fn splice( + &self, + data: &[u8], + replace: Range, + replace_with: I, + out: &mut Vec, + ) -> Result> + where + R: std::error::Error + Clone, + II: IntoIterator, + IE: Iterator + ExactSizeIterator, + I: Iterator> + Clone, + { + let group_replace = group_replace_range(replace.clone(), self.num.decoder(data)) + .map_err(|e| e.existing())?; + let num = self.num.splice( + data, + replace, + replace_with + .clone() + .map(|elems| elems.map(|elems| Some(elems.into_iter().len() as u64))), + out, + )?; + let actor = self.actor.splice( + data, + group_replace.clone(), + replace_with.clone().flat_map(|elem| match elem { + Err(e) => SplicingIter::Failed(e), + Ok(i) => SplicingIter::Iter(i.into_iter(), |oid: OpId| oid.actor() as u64), + }), + out, + )?; + let counter = self.counter.splice( + data, + group_replace, + replace_with.flat_map(|elem| match elem { + Err(e) => SplicingIter::Failed(e), + Ok(i) => SplicingIter::Iter(i.into_iter(), |oid: OpId| oid.counter() as i64), + }), + out, + )?; + Ok(Self { + num, + actor, + counter, + }) + } +} + +enum SplicingIter { + Failed(E), + Iter(I, F), +} + +impl Iterator for SplicingIter +where + E: std::error::Error + Clone, + I: Iterator, + F: Fn(OpId) -> U, +{ + type Item = Result, E>; + + fn next(&mut self) -> Option { + match self { + Self::Failed(e) => Some(Err(e.clone())), + Self::Iter(i, f) => i.next().map(|oid| Ok(Some(f(oid)))), + } + } +} + +/// Find the replace range for the grouped columns. +fn group_replace_range( + replace: Range, + mut num: RleDecoder<'_, u64>, +) -> Result, SpliceError> { + let mut idx = 0; + let mut grouped_replace_start: usize = 0; + let mut grouped_replace_len: usize = 0; + while idx < replace.start { + if let Some(Some(count)) = num.next().transpose().map_err(SpliceError::ReadExisting)? { + grouped_replace_start += count as usize; + } + idx += 1; + } + for _ in 0..replace.len() { + if let Some(Some(count)) = num.next().transpose().map_err(SpliceError::ReadExisting)? { + grouped_replace_len += count as usize; + } + } + Ok(grouped_replace_start..(grouped_replace_start + grouped_replace_len)) +} + +#[derive(Clone)] +pub(crate) struct OpIdListIter<'a> { + num: RleDecoder<'a, u64>, + actor: RleDecoder<'a, u64>, + counter: DeltaDecoder<'a>, +} + +impl<'a> OpIdListIter<'a> { + fn try_next(&mut self) -> Result>, DecodeColumnError> { + let num = match self + .num + .next() + .transpose() + .map_err(|e| DecodeColumnError::decode_raw("num", e))? + { + Some(Some(n)) => n, + Some(None) => return Err(DecodeColumnError::unexpected_null("num")), + None => return Ok(None), + }; + let mut p = Vec::with_capacity(num as usize); + for _ in 0..num { + let actor = self + .actor + .next() + .transpose() + .map_err(|e| DecodeColumnError::decode_raw("actor", e))?; + let counter = self + .counter + .next() + .transpose() + .map_err(|e| DecodeColumnError::decode_raw("counter", e))?; + match (actor, counter) { + (Some(Some(a)), Some(Some(ctr))) => match ctr.try_into() { + Ok(ctr) => p.push(OpId(ctr, a as usize)), + Err(_e) => { + return Err(DecodeColumnError::invalid_value( + "counter", + "negative value for counter", + )) + } + }, + (Some(None) | None, _) => return Err(DecodeColumnError::unexpected_null("actor")), + (_, Some(None) | None) => { + return Err(DecodeColumnError::unexpected_null("counter")) + } + } + } + Ok(Some(p)) + } +} + +impl<'a> Iterator for OpIdListIter<'a> { + type Item = Result, DecodeColumnError>; + + fn next(&mut self) -> Option { + self.try_next().transpose() + } +} + +pub(crate) struct OpIdListEncoder { + num: RleEncoder, + actor: RleEncoder, + counter: DeltaEncoder, +} + +impl OpIdListEncoder { + pub(crate) fn append(&mut self, ids: I) + where + I: Iterator + ExactSizeIterator, + O: convert::OpId, + { + self.num.append_value(ids.len() as u64); + for id in ids { + self.actor.append_value(id.actor() as u64); + self.counter.append_value(id.counter() as i64); + } + } +} + +impl OpIdListEncoder> { + pub(crate) fn new() -> Self { + Self { + num: RleEncoder::from(Vec::new()), + actor: RleEncoder::from(Vec::new()), + counter: DeltaEncoder::from(Vec::new()), + } + } + + pub(crate) fn finish(self, out: &mut Vec) -> OpIdListRange { + let start = out.len(); + let (num, _) = self.num.finish(); + out.extend(num); + let num_end = out.len(); + + let (actor, _) = self.actor.finish(); + out.extend(actor); + let actor_end = out.len(); + + let (counter, _) = self.counter.finish(); + out.extend(counter); + let counter_end = out.len(); + + OpIdListRange { + num: (start..num_end).into(), + actor: (num_end..actor_end).into(), + counter: (actor_end..counter_end).into(), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use proptest::collection::vec as propvec; + use proptest::prelude::*; + + use crate::columnar_2::encoding::properties::{opid, splice_scenario}; + + fn encode(opids: Vec>) -> (OpIdListRange, Vec) { + let mut out = Vec::new(); + let range = OpIdListRange::encode(opids.iter(), &mut out); + (range, out) + } + + fn decode(range: OpIdListRange, buf: &[u8]) -> Vec> { + range.iter(buf).map(|c| c.unwrap()).collect() + } + + proptest! { + #[test] + fn encode_decode_opid_list(opids in propvec(propvec(opid(), 0..100), 0..100)){ + let (range, encoded) = encode(opids.clone()); + let result = decode(range, &encoded); + assert_eq!(opids, result) + } + + #[test] + fn splice_opid_list(scenario in splice_scenario(propvec(opid(), 0..100))) { + let (range, encoded) = encode(scenario.initial_values.clone()); + let mut out = Vec::new(); + let replacements: Vec, Infallible>> = scenario.replacements.iter().cloned().map(Ok).collect(); + let new_range = range.splice( + &encoded, + scenario.replace_range.clone(), + replacements.into_iter(), + &mut out + ).unwrap(); + let result = decode(new_range, &out[..]); + scenario.check(result); + } + } +} diff --git a/automerge/src/columnar_2/column_range/raw.rs b/automerge/src/columnar_2/column_range/raw.rs new file mode 100644 index 00000000..de512026 --- /dev/null +++ b/automerge/src/columnar_2/column_range/raw.rs @@ -0,0 +1,38 @@ +use std::{borrow::Cow, ops::Range}; + +use crate::columnar_2::encoding::RawDecoder; + +#[derive(Clone, Debug, PartialEq)] +pub(crate) struct RawRange(Range); + +impl RawRange { + pub(crate) fn decoder<'a>(&self, data: &'a [u8]) -> RawDecoder<'a> { + RawDecoder::from(Cow::Borrowed(&data[self.0.clone()])) + } + + pub(crate) fn is_empty(&self) -> bool { + self.0.is_empty() + } + + pub(crate) fn end(&self) -> usize { + self.0.end + } +} + +impl AsRef> for RawRange { + fn as_ref(&self) -> &Range { + &self.0 + } +} + +impl From> for RawRange { + fn from(r: Range) -> RawRange { + RawRange(r) + } +} + +impl From for Range { + fn from(r: RawRange) -> Range { + r.0 + } +} diff --git a/automerge/src/columnar_2/column_range/rle.rs b/automerge/src/columnar_2/column_range/rle.rs new file mode 100644 index 00000000..0729a300 --- /dev/null +++ b/automerge/src/columnar_2/column_range/rle.rs @@ -0,0 +1,216 @@ +use std::{ + borrow::{Borrow, Cow}, + fmt::Debug, + marker::PhantomData, + ops::Range, +}; + +use crate::columnar_2::{ + encoding::{raw, Decodable, Encodable, RleDecoder, RleEncoder, Sink}, + SpliceError, +}; + +#[derive(Clone, Debug, PartialEq)] +pub(crate) struct RleRange { + range: Range, + _phantom: PhantomData, +} + +impl RleRange { + pub(crate) fn decoder<'a>(&self, data: &'a [u8]) -> RleDecoder<'a, T> { + RleDecoder::from(Cow::Borrowed(&data[self.range.clone()])) + } + + pub(crate) fn is_empty(&self) -> bool { + self.range.is_empty() + } + + pub(crate) fn start(&self) -> usize { + self.range.start + } + + pub(crate) fn end(&self) -> usize { + self.range.end + } +} + +impl RleRange { + /// The semantics of this are similar to `Vec::splice` + /// + /// # Arguments + /// + /// * `data` - The buffer containing the original rows + /// * `replace` - The range of elements in the original collection to replace + /// * `replace_with` - An iterator to insert in place of the original elements. + /// * `out` - The buffer to encode the resulting collection into + pub(crate) fn splice< + 'a, + I: Iterator, E>>, + TB: Borrow + 'a, + E: std::error::Error, + >( + &self, + data: &[u8], + replace: Range, + mut replace_with: I, + out: &mut Vec, + ) -> Result> { + let start = out.len(); + let mut encoder = self.encoder(out); + let mut decoder = self.decoder(data); + let mut idx = 0; + while idx < replace.start { + match decoder + .next() + .transpose() + .map_err(SpliceError::ReadExisting)? + { + Some(elem) => encoder.append(elem.as_ref()), + None => panic!("out of bounds"), + } + idx += 1; + } + for _ in 0..replace.len() { + decoder.next(); + if let Some(next) = replace_with + .next() + .transpose() + .map_err(SpliceError::ReadReplace)? + { + encoder.append(next.as_ref().map(|n| n.borrow())); + } + } + for next in replace_with { + let next = next.map_err(SpliceError::ReadReplace)?; + encoder.append(next.as_ref().map(|n| n.borrow())); + } + for next in decoder { + let next = next.map_err(SpliceError::ReadExisting)?; + encoder.append(next.as_ref()); + } + let (_, len) = encoder.finish(); + let range = start..(start + len); + Ok(range.into()) + } +} + +impl<'a, T: Encodable + Clone + PartialEq + 'a> RleRange { + pub(crate) fn encoder(&self, output: S) -> RleEncoder { + RleEncoder::from(output) + } + + pub(crate) fn encode, I: Iterator>>( + items: I, + out: &mut Vec, + ) -> Self { + let start = out.len(); + let mut encoder = RleEncoder::new(out); + for item in items { + encoder.append(item); + } + let (_, len) = encoder.finish(); + (start..(start + len)).into() + } +} + +impl AsRef> for RleRange { + fn as_ref(&self) -> &Range { + &self.range + } +} + +impl From> for RleRange { + fn from(r: Range) -> RleRange { + RleRange { + range: r, + _phantom: PhantomData, + } + } +} + +impl From> for Range { + fn from(r: RleRange) -> Range { + r.range + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::columnar_2::encoding::properties::option_splice_scenario; + use proptest::prelude::*; + use std::{borrow::Cow, convert::Infallible}; + + #[test] + fn rle_int_round_trip() { + let vals = [1, 1, 2, 2, 3, 2, 3, 1, 3]; + let mut buf = Vec::with_capacity(vals.len() * 3); + let mut encoder: RleEncoder<_, u64> = RleEncoder::new(&mut buf); + for val in vals { + encoder.append_value(&val) + } + let (_, total_slice_len) = encoder.finish(); + let mut decoder: RleDecoder<'_, u64> = + RleDecoder::from(Cow::Borrowed(&buf[0..total_slice_len])); + let mut result = Vec::new(); + while let Some(Some(val)) = decoder.next().transpose().unwrap() { + result.push(val); + } + assert_eq!(result, vals); + } + + #[test] + fn rle_int_insert() { + let vals = [1, 1, 2, 2, 3, 2, 3, 1, 3]; + let mut buf = Vec::with_capacity(vals.len() * 3); + let mut encoder: RleEncoder<_, u64> = RleEncoder::new(&mut buf); + for val in vals.iter().take(4) { + encoder.append_value(val) + } + encoder.append_value(&5); + for val in vals.iter().skip(4) { + encoder.append_value(val); + } + let (_, total_slice_len) = encoder.finish(); + let mut decoder: RleDecoder<'_, u64> = + RleDecoder::from(Cow::Borrowed(&buf[0..total_slice_len])); + let mut result = Vec::new(); + while let Some(Some(val)) = decoder.next().transpose().unwrap() { + result.push(val); + } + let expected = [1, 1, 2, 2, 5, 3, 2, 3, 1, 3]; + assert_eq!(result, expected); + } + + fn encode(vals: &[Option]) -> (RleRange, Vec) { + let mut buf = Vec::with_capacity(vals.len() * 3); + let range = RleRange::::encode(vals.iter().map(|v| v.as_ref()), &mut buf); + (range, buf) + } + + fn decode(range: RleRange, buf: &[u8]) -> Vec> { + range.decoder(buf).collect::, _>>().unwrap() + } + + proptest! { + #[test] + fn splice_ints(scenario in option_splice_scenario(any::>())) { + let (range, buf) = encode(&scenario.initial_values); + let mut out = Vec::new(); + let replacements: Vec, Infallible>> = scenario.replacements.iter().cloned().map(Ok).collect(); + let new_range = range.splice(&buf, scenario.replace_range.clone(), replacements.into_iter(), &mut out).unwrap(); + let result = decode::(new_range, &out); + scenario.check_optional(result) + } + + #[test] + fn splice_strings(scenario in option_splice_scenario(any::>())) { + let (range, buf) = encode(&scenario.initial_values); + let mut out = Vec::new(); + let replacements: Vec, Infallible>> = scenario.replacements.iter().cloned().map(Ok).collect(); + let new_range = range.splice(&buf, scenario.replace_range.clone(), replacements.into_iter(), &mut out).unwrap(); + let result = decode::(new_range, &out); + scenario.check_optional(result) + } + } +} diff --git a/automerge/src/columnar_2/column_range/value.rs b/automerge/src/columnar_2/column_range/value.rs new file mode 100644 index 00000000..f2c9e419 --- /dev/null +++ b/automerge/src/columnar_2/column_range/value.rs @@ -0,0 +1,545 @@ +use std::{borrow::Cow, ops::Range}; + +use crate::{ + columnar_2::{ + encoding::{ + leb128::{lebsize, ulebsize}, + raw, DecodeColumnError, RawBytes, RawDecoder, RawEncoder, RleDecoder, RleEncoder, Sink, + }, + SpliceError, + }, + ScalarValue, +}; + +use super::{RawRange, RleRange}; + +#[derive(Debug, Clone, PartialEq)] +pub(crate) struct ValueRange { + meta: RleRange, + raw: RawRange, +} + +impl ValueRange { + pub(crate) fn new(meta: RleRange, raw: RawRange) -> Self { + Self { meta, raw } + } + + pub(crate) fn range(&self) -> Range { + // This is a hack, instead `raw` should be `Option` + if self.raw.is_empty() { + self.meta.clone().into() + } else { + self.meta.start()..self.raw.end() + } + } + + pub(crate) fn meta_range(&self) -> &RleRange { + &self.meta + } + + pub(crate) fn raw_range(&self) -> &RawRange { + &self.raw + } + + pub(crate) fn encode<'a, 'b, I>(items: I, out: &'b mut Vec) -> Self + where + I: Iterator> + Clone + 'a, + { + Self { + meta: (0..0).into(), + raw: (0..0).into(), + } + .splice(&[], 0..0, items, out) + } + + pub(crate) fn iter<'a>(&self, data: &'a [u8]) -> ValueIter<'a> { + ValueIter { + meta: self.meta.decoder(data), + raw: self.raw.decoder(data), + } + } + + pub(crate) fn splice<'b, I>( + &self, + data: &[u8], + replace: Range, + replace_with: I, + out: &mut Vec, + ) -> Self + where + I: Iterator> + Clone, + { + // SAFETY: try_splice fails if either the iterator of replacements fails, or the iterator + // of existing elements fails. But the replacement iterator is infallible and there + // are no existing elements + self.try_splice::<_, ()>(data, replace, replace_with.map(Ok), out) + .unwrap() + } + + pub(crate) fn try_splice<'b, I, E>( + &self, + data: &[u8], + replace: Range, + mut replace_with: I, + out: &mut Vec, + ) -> Result> + where + I: Iterator, E>> + Clone, + { + // Our semantics here are similar to those of Vec::splice. We can describe this + // imperatively like this: + // + // * First copy everything up to the start of `replace` into the output + // * For every index in `replace` skip that index from ourselves and if `replace_with` + // returns `Some` then copy that value to the output + // * Once we have iterated past `replace.end` we continue to call `replace_with` until it + // returns None, copying the results to the output + // * Finally we copy the remainder of our data into the output + // + // However, things are complicated by the fact that our data is stored in two columns. This + // means that we do this in two passes. First we execute the above logic for the metadata + // column. Then we do it all over again for the value column. + + // First pass - metadata + // + // Copy the metadata decoder so we can iterate over it again when we read the values in the + // second pass + let start = out.len(); + let mut meta_copy = self.meta.decoder(data); + let mut meta_out = RleEncoder::<_, u64>::from(&mut *out); + let mut idx = 0; + // Copy everything up to replace.start to the output + while idx < replace.start { + let val = meta_copy + .next() + .transpose() + .map_err(SpliceError::ReadExisting)? + .unwrap_or(None); + meta_out.append(val.as_ref()); + idx += 1; + } + // Now step through replace, skipping our data and inserting the replacement data (if there + // is any) + let mut meta_replace_with = replace_with.clone(); + for _ in 0..replace.len() { + meta_copy.next(); + if let Some(val) = meta_replace_with.next() { + let val = val.map_err(SpliceError::ReadReplace)?; + // Note that we are just constructing metadata values here. + let meta_val = &u64::from(ValueMeta::from(val.as_ref())); + meta_out.append(Some(meta_val)); + } + idx += 1; + } + // Copy any remaining input from the replacments to the output + for val in meta_replace_with { + let val = val.map_err(SpliceError::ReadReplace)?; + let meta_val = &u64::from(ValueMeta::from(val.as_ref())); + meta_out.append(Some(meta_val)); + idx += 1; + } + // Now copy any remaining data we have to the output + while !meta_copy.done() { + let val = meta_copy + .next() + .transpose() + .map_err(SpliceError::ReadExisting)? + .unwrap_or(None); + meta_out.append(val.as_ref()); + } + let (_, meta_len) = meta_out.finish(); + let meta_range = start..(start + meta_len); + + // Second pass, copying the values. For this pass we iterate over ourselves. + // + // + let mut value_range_len = 0; + let mut raw_encoder = RawEncoder::from(out); + let mut iter = self.iter(data); + idx = 0; + // Copy everything up to replace.start to the output + while idx < replace.start { + let val = iter.next().unwrap().unwrap_or(ScalarValue::Null); + value_range_len += encode_val(&mut raw_encoder, &val); + idx += 1; + } + + // Now step through replace, skipping our data and inserting the replacement data (if there + // is any) + for _ in 0..replace.len() { + iter.next(); + if let Some(val) = replace_with.next() { + let val = val.map_err(SpliceError::ReadReplace)?; + value_range_len += encode_val(&mut raw_encoder, val.as_ref()); + } + idx += 1; + } + // Copy any remaining input from the replacments to the output + for val in replace_with { + let val = val.map_err(SpliceError::ReadReplace)?; + value_range_len += encode_val(&mut raw_encoder, val.as_ref()); + idx += 1; + } + // Now copy any remaining data we have to the output + while !iter.done() { + let val = iter.next().unwrap().unwrap_or(ScalarValue::Null); + value_range_len += encode_val(&mut raw_encoder, &val); + } + + let value_range = meta_range.end..(meta_range.end + value_range_len); + + Ok(Self { + meta: meta_range.into(), + raw: value_range.into(), + }) + } +} + +#[derive(Debug, Clone)] +pub(crate) struct ValueIter<'a> { + meta: RleDecoder<'a, u64>, + raw: RawDecoder<'a>, +} + +impl<'a> Iterator for ValueIter<'a> { + type Item = Result; + + fn next(&mut self) -> Option { + let next = match self.meta.next().transpose() { + Ok(n) => n, + Err(e) => return Some(Err(DecodeColumnError::decode_raw("meta", e))), + }; + match next { + Some(Some(next)) => { + let val_meta = ValueMeta::from(next); + #[allow(clippy::redundant_slicing)] + match val_meta.type_code() { + ValueType::Null => Some(Ok(ScalarValue::Null)), + ValueType::True => Some(Ok(ScalarValue::Boolean(true))), + ValueType::False => Some(Ok(ScalarValue::Boolean(false))), + ValueType::Uleb => self.parse_raw(val_meta, |mut bytes| { + let val = leb128::read::unsigned(&mut bytes).map_err(|e| { + DecodeColumnError::invalid_value("value", e.to_string()) + })?; + Ok(ScalarValue::Uint(val)) + }), + ValueType::Leb => self.parse_raw(val_meta, |mut bytes| { + let val = leb128::read::signed(&mut bytes).map_err(|e| { + DecodeColumnError::invalid_value("value", e.to_string()) + })?; + Ok(ScalarValue::Int(val)) + }), + ValueType::String => self.parse_raw(val_meta, |bytes| { + let val = std::str::from_utf8(bytes) + .map_err(|e| DecodeColumnError::invalid_value("value", e.to_string()))? + .into(); + Ok(ScalarValue::Str(val)) + }), + ValueType::Float => self.parse_raw(val_meta, |bytes| { + if val_meta.length() != 8 { + return Err(DecodeColumnError::invalid_value( + "value", + format!("float should have length 8, had {0}", val_meta.length()), + )); + } + let raw: [u8; 8] = bytes + .try_into() + // SAFETY: parse_raw() calls read_bytes(val_meta.length()) and we have + // checked that val_meta.length() == 8 + .unwrap(); + let val = f64::from_le_bytes(raw); + Ok(ScalarValue::F64(val)) + }), + ValueType::Counter => self.parse_raw(val_meta, |mut bytes| { + let val = leb128::read::signed(&mut bytes).map_err(|e| { + DecodeColumnError::invalid_value("value", e.to_string()) + })?; + Ok(ScalarValue::Counter(val.into())) + }), + ValueType::Timestamp => self.parse_raw(val_meta, |mut bytes| { + let val = leb128::read::signed(&mut bytes).map_err(|e| { + DecodeColumnError::invalid_value("value", e.to_string()) + })?; + Ok(ScalarValue::Timestamp(val)) + }), + ValueType::Unknown(code) => self.parse_raw(val_meta, |bytes| { + Ok(ScalarValue::Unknown { + type_code: code, + bytes: bytes.to_vec(), + }) + }), + ValueType::Bytes => match self.raw.read_bytes(val_meta.length()) { + Err(e) => Some(Err(DecodeColumnError::invalid_value( + "value", + e.to_string(), + ))), + Ok(bytes) => Some(Ok(ScalarValue::Bytes(bytes.to_vec()))), + }, + } + } + Some(None) => Some(Err(DecodeColumnError::unexpected_null("meta"))), + None => None, + } + } +} + +impl<'a> ValueIter<'a> { + fn parse_raw Result>( + &mut self, + meta: ValueMeta, + f: F, + ) -> Option> { + let raw = match self.raw.read_bytes(meta.length()) { + Err(e) => { + return Some(Err(DecodeColumnError::invalid_value( + "value", + e.to_string(), + ))) + } + Ok(bytes) => bytes, + }; + let val = match f(&*raw) { + Ok(v) => v, + Err(e) => return Some(Err(e)), + }; + Some(Ok(val)) + } + + pub(crate) fn done(&self) -> bool { + self.meta.done() + } +} + +/// Appends values row-wise. That is to say, this struct manages two separate chunks of memory, one +/// for the value metadata and one for the raw values. To use it, create a new encoder using +/// `ValueEncoder::new`, sequentially append values using `ValueEncoder::append`, and finallly +/// concatenate the two columns and append them to a buffer returning the range within the output +/// buffer which contains the concatenated columns using `ValueEncoder::finish`. +pub(crate) struct ValueEncoder { + meta: RleEncoder, + raw: RawEncoder, +} + +impl ValueEncoder { + pub(crate) fn append(&mut self, value: &ScalarValue) { + let meta_val = &u64::from(ValueMeta::from(value)); + self.meta.append_value(meta_val); + encode_val(&mut self.raw, value); + } +} + +impl ValueEncoder> { + pub(crate) fn new() -> Self { + Self { + meta: RleEncoder::new(Vec::new()), + raw: RawEncoder::from(Vec::new()), + } + } + pub(crate) fn finish(self, out: &mut Vec) -> ValueRange { + let meta_start = out.len(); + let (meta, _) = self.meta.finish(); + out.extend(meta); + let meta_end = out.len(); + + let (val, _) = self.raw.finish(); + out.extend(val); + let val_end = out.len(); + ValueRange { + meta: (meta_start..meta_end).into(), + raw: (meta_end..val_end).into(), + } + } +} + +fn encode_val(out: &mut RawEncoder, val: &ScalarValue) -> usize { + match val { + ScalarValue::Uint(i) => out.append(*i), + ScalarValue::Int(i) => out.append(*i), + ScalarValue::Null => 0, + ScalarValue::Boolean(_) => 0, + ScalarValue::Timestamp(i) => out.append(*i), + ScalarValue::F64(f) => out.append(*f), + ScalarValue::Counter(i) => out.append(i.start), + ScalarValue::Str(s) => out.append(RawBytes::from(s.as_bytes())), + ScalarValue::Bytes(b) => out.append(RawBytes::from(&b[..])), + ScalarValue::Unknown { bytes, .. } => out.append(RawBytes::from(&bytes[..])), + } +} + +#[derive(Debug)] +enum ValueType { + Null, + False, + True, + Uleb, + Leb, + Float, + String, + Bytes, + Counter, + Timestamp, + Unknown(u8), +} + +#[derive(Copy, Clone)] +struct ValueMeta(u64); + +impl ValueMeta { + fn type_code(&self) -> ValueType { + let low_byte = (self.0 as u8) & 0b00001111; + match low_byte { + 0 => ValueType::Null, + 1 => ValueType::False, + 2 => ValueType::True, + 3 => ValueType::Uleb, + 4 => ValueType::Leb, + 5 => ValueType::Float, + 6 => ValueType::String, + 7 => ValueType::Bytes, + 8 => ValueType::Counter, + 9 => ValueType::Timestamp, + other => ValueType::Unknown(other), + } + } + + fn length(&self) -> usize { + (self.0 >> 4) as usize + } +} + +impl<'a> From<&ScalarValue> for ValueMeta { + fn from(p: &ScalarValue) -> Self { + match p { + ScalarValue::Uint(i) => Self((ulebsize(*i) << 4) | 3), + ScalarValue::Int(i) => Self((lebsize(*i) << 4) | 4), + ScalarValue::Null => Self(0), + ScalarValue::Boolean(b) => Self(match b { + false => 1, + true => 2, + }), + ScalarValue::Timestamp(i) => Self((lebsize(*i) << 4) | 9), + ScalarValue::F64(_) => Self((8 << 4) | 5), + ScalarValue::Counter(i) => Self((lebsize(i.start) << 4) | 8), + ScalarValue::Str(s) => Self(((s.as_bytes().len() as u64) << 4) | 6), + ScalarValue::Bytes(b) => Self(((b.len() as u64) << 4) | 7), + ScalarValue::Unknown { type_code, bytes } => { + Self(((bytes.len() as u64) << 4) | (*type_code as u64)) + } + } + } +} + +impl From for ValueMeta { + fn from(raw: u64) -> Self { + ValueMeta(raw) + } +} + +impl From for u64 { + fn from(v: ValueMeta) -> Self { + v.0 + } +} + +impl<'a> From<&ScalarValue> for ValueType { + fn from(p: &ScalarValue) -> Self { + match p { + ScalarValue::Uint(_) => ValueType::Uleb, + ScalarValue::Int(_) => ValueType::Leb, + ScalarValue::Null => ValueType::Null, + ScalarValue::Boolean(b) => match b { + true => ValueType::True, + false => ValueType::False, + }, + ScalarValue::Timestamp(_) => ValueType::Timestamp, + ScalarValue::F64(_) => ValueType::Float, + ScalarValue::Counter(_) => ValueType::Counter, + ScalarValue::Str(_) => ValueType::String, + ScalarValue::Bytes(_) => ValueType::Bytes, + ScalarValue::Unknown { type_code, .. } => ValueType::Unknown(*type_code), + } + } +} + +impl From for u64 { + fn from(v: ValueType) -> Self { + match v { + ValueType::Null => 0, + ValueType::False => 1, + ValueType::True => 2, + ValueType::Uleb => 3, + ValueType::Leb => 4, + ValueType::Float => 5, + ValueType::String => 6, + ValueType::Bytes => 7, + ValueType::Counter => 8, + ValueType::Timestamp => 9, + ValueType::Unknown(other) => other as u64, + } + } +} +#[cfg(test)] +mod tests { + use super::*; + use crate::columnar_2::encoding::properties::{scalar_value, splice_scenario}; + use proptest::prelude::*; + use std::borrow::Cow; + + fn encode_values(vals: &[ScalarValue]) -> (Vec, ValueRange) { + let mut out = Vec::new(); + let range = ValueRange::encode(vals.iter().cloned().map(Cow::Owned), &mut out); + (out, range) + } + + fn encode_rowwise(vals: &[ScalarValue]) -> (Vec, ValueRange) { + let mut out = Vec::new(); + let mut encoder = ValueEncoder::new(); + for val in vals { + encoder.append(val); + } + let range = encoder.finish(&mut out); + (out, range) + } + + proptest! { + #[test] + fn test_initialize_splice(values in proptest::collection::vec(scalar_value(), 0..100)) { + let (out, range) = encode_values(&values[..]); + let testvals = range.iter(&out).collect::, _>>().unwrap(); + assert_eq!(values, testvals); + } + + #[test] + fn test_splice_values(scenario in splice_scenario(scalar_value())){ + let (out, range) = encode_values(&scenario.initial_values); + let mut spliced = Vec::new(); + let new_range = range + .splice( + &out, + scenario.replace_range.clone(), + scenario.replacements.clone().into_iter().map(Cow::Owned), + &mut spliced, + ); + let result_values = new_range.iter(&spliced).collect::, _>>().unwrap(); + let mut expected: Vec<_> = scenario.initial_values.clone(); + expected.splice(scenario.replace_range, scenario.replacements); + assert_eq!(result_values, expected); + } + + #[test] + fn encode_row_wise_and_columnwise_equal(values in proptest::collection::vec(scalar_value(), 0..50)) { + let (colwise, col_range) = encode_values(&values[..]); + let (rowwise, row_range) = encode_rowwise(&values[..]); + assert_eq!(colwise, rowwise); + assert_eq!(col_range, row_range); + } + } + + #[test] + fn test_value_uleb() { + let vals = [ScalarValue::Uint(127), ScalarValue::Uint(183)]; + let (out, range) = encode_values(&vals); + let result = range.iter(&out).collect::, _>>().unwrap(); + assert_eq!(result, vals); + } +} diff --git a/automerge/src/columnar_2/encoding.rs b/automerge/src/columnar_2/encoding.rs new file mode 100644 index 00000000..bbdb34a8 --- /dev/null +++ b/automerge/src/columnar_2/encoding.rs @@ -0,0 +1,63 @@ +pub(crate) mod raw; + +pub(crate) use raw::{RawDecoder, RawEncoder}; +mod rle; +pub(crate) use rle::{RleDecoder, RleEncoder}; +mod boolean; +pub(crate) use boolean::{BooleanDecoder, BooleanEncoder}; +mod delta; +pub(crate) use delta::{DeltaDecoder, DeltaEncoder}; +pub(crate) mod leb128; + +pub(crate) mod column_decoder; +pub(crate) use column_decoder::ColumnDecoder; + +#[cfg(test)] +pub(crate) mod properties; + +pub(crate) trait Sink { + fn append(&mut self, bytes: &[u8]); +} + +impl<'a> Sink for &'a mut Vec { + fn append(&mut self, bytes: &[u8]) { + self.extend(bytes) + } +} + +impl Sink for Vec { + fn append(&mut self, bytes: &[u8]) { + self.extend(bytes) + } +} + +pub(crate) trait Encodable { + fn encode(&self, out: &mut S) -> usize; +} + +mod encodable_impls; +pub(crate) use encodable_impls::RawBytes; + +#[derive(thiserror::Error, Debug)] +pub(crate) enum DecodeError { + #[error(transparent)] + Io(#[from] std::io::Error), + #[error("invalid integer")] + FromInt(#[from] std::num::TryFromIntError), + #[error("bad leb128")] + BadLeb(#[from] ::leb128::read::Error), + #[error("attempted to allocate {attempted} which is larger than the maximum of {maximum}")] + OverlargeAllocation { attempted: usize, maximum: usize }, + #[error("invalid string encoding")] + BadString, +} + +pub(crate) trait Decodable: Sized { + fn decode(bytes: &mut R) -> Result + where + R: std::io::Read; +} +mod decodable_impls; + +pub(crate) mod col_error; +pub(crate) use col_error::DecodeColumnError; diff --git a/automerge/src/columnar_2/encoding/boolean.rs b/automerge/src/columnar_2/encoding/boolean.rs new file mode 100644 index 00000000..26cb1838 --- /dev/null +++ b/automerge/src/columnar_2/encoding/boolean.rs @@ -0,0 +1,131 @@ +use std::borrow::Cow; + +use super::{raw, Encodable, RawDecoder, Sink}; + +/// Encodes booleans by storing the count of the same value. +/// +/// The sequence of numbers describes the count of false values on even indices (0-indexed) and the +/// count of true values on odd indices (0-indexed). +/// +/// Counts are encoded as usize. +pub(crate) struct BooleanEncoder { + written: usize, + //buf: &'a mut Vec, + buf: S, + last: bool, + count: usize, +} + +impl BooleanEncoder> { + pub(crate) fn new() -> BooleanEncoder> { + BooleanEncoder::from_sink(Vec::new()) + } +} + +impl BooleanEncoder { + pub(crate) fn from_sink(sink: S) -> Self { + BooleanEncoder { + written: 0, + buf: sink, + last: false, + count: 0, + } + } + + pub(crate) fn append(&mut self, value: bool) { + if value == self.last { + self.count += 1; + } else { + self.written += self.count.encode(&mut self.buf); + self.last = value; + self.count = 1; + } + } + + pub(crate) fn finish(mut self) -> (S, usize) { + if self.count > 0 { + self.written += self.count.encode(&mut self.buf); + } + (self.buf, self.written) + } +} + +impl From for BooleanEncoder { + fn from(output: S) -> Self { + BooleanEncoder::from_sink(output) + } +} + +/// See the discussion of [`BooleanEncoder`] for details on this encoding +#[derive(Clone, Debug)] +pub(crate) struct BooleanDecoder<'a> { + decoder: RawDecoder<'a>, + last_value: bool, + count: usize, +} + +impl<'a> From> for BooleanDecoder<'a> { + fn from(bytes: Cow<'a, [u8]>) -> Self { + BooleanDecoder { + decoder: RawDecoder::from(bytes), + last_value: true, + count: 0, + } + } +} + +impl<'a> From<&'a [u8]> for BooleanDecoder<'a> { + fn from(d: &'a [u8]) -> Self { + Cow::Borrowed(d).into() + } +} + +// this is an endless iterator that returns false after input is exhausted +impl<'a> Iterator for BooleanDecoder<'a> { + type Item = Result; + + fn next(&mut self) -> Option { + while self.count == 0 { + if self.decoder.done() && self.count == 0 { + return None; + } + self.count = match self.decoder.read() { + Ok(c) => c, + Err(e) => return Some(Err(e)), + }; + self.last_value = !self.last_value; + } + self.count -= 1; + Some(Ok(self.last_value)) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + use proptest::prelude::*; + + fn encode(vals: &[bool]) -> Vec { + let mut buf = Vec::new(); + let mut encoder = BooleanEncoder::from_sink(&mut buf); + for val in vals { + encoder.append(*val); + } + encoder.finish(); + buf + } + + fn decode(buf: &[u8]) -> Vec { + BooleanDecoder::from(buf) + .collect::, _>>() + .unwrap() + } + + proptest! { + #[test] + fn encode_decode_bools(vals in proptest::collection::vec(any::(), 0..100)) { + assert_eq!(vals, decode(&encode(&vals))) + } + } +} diff --git a/automerge/src/columnar_2/encoding/col_error.rs b/automerge/src/columnar_2/encoding/col_error.rs new file mode 100644 index 00000000..c8d5c5c0 --- /dev/null +++ b/automerge/src/columnar_2/encoding/col_error.rs @@ -0,0 +1,88 @@ +#[derive(Clone, Debug)] +pub(crate) struct DecodeColumnError { + path: Path, + error: DecodeColErrorKind, +} + +impl std::error::Error for DecodeColumnError {} + +impl std::fmt::Display for DecodeColumnError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match &self.error { + DecodeColErrorKind::UnexpectedNull => { + write!(f, "unexpected null in column {}", self.path) + } + DecodeColErrorKind::InvalidValue { reason } => { + write!(f, "invalid value in column {}: {}", self.path, reason) + } + } + } +} + +#[derive(Clone, Debug)] +struct Path(Vec); + +impl std::fmt::Display for Path { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + for (index, elem) in self.0.iter().rev().enumerate() { + if index != 0 { + write!(f, ":")?; + } + write!(f, "{}", elem)?; + } + Ok(()) + } +} + +impl Path { + fn push>(&mut self, col: S) { + self.0.push(col.as_ref().to_string()) + } +} + +impl> From for Path { + fn from(p: S) -> Self { + Self(vec![p.as_ref().to_string()]) + } +} + +#[derive(Clone, Debug)] +enum DecodeColErrorKind { + UnexpectedNull, + InvalidValue { reason: String }, +} + +impl DecodeColumnError { + pub(crate) fn decode_raw>(col: S, raw_err: super::raw::Error) -> Self { + Self { + path: col.into(), + error: DecodeColErrorKind::InvalidValue { + reason: raw_err.to_string(), + }, + } + } + + pub(crate) fn unexpected_null>(col: S) -> DecodeColumnError { + Self { + path: col.into(), + error: DecodeColErrorKind::UnexpectedNull, + } + } + + pub(crate) fn invalid_value, R: AsRef>( + col: S, + reason: R, + ) -> DecodeColumnError { + Self { + path: col.into(), + error: DecodeColErrorKind::InvalidValue { + reason: reason.as_ref().to_string(), + }, + } + } + + pub(crate) fn in_column>(mut self, col: S) -> DecodeColumnError { + self.path.push(col.as_ref()); + self + } +} diff --git a/automerge/src/columnar_2/encoding/column_decoder.rs b/automerge/src/columnar_2/encoding/column_decoder.rs new file mode 100644 index 00000000..8bc34f69 --- /dev/null +++ b/automerge/src/columnar_2/encoding/column_decoder.rs @@ -0,0 +1,157 @@ +use crate::{ + columnar_2::{ + column_range::{DepsIter, KeyIter, ObjIdIter, OpIdIter, OpIdListIter, ValueIter}, + encoding, Key, + }, + types::{ObjId, OpId}, + ScalarValue, +}; + +pub(crate) trait IntoColError: std::error::Error { + fn into_col_error>(self, col_name: S) -> encoding::DecodeColumnError; +} + +impl IntoColError for encoding::raw::Error { + fn into_col_error>(self, col_name: S) -> encoding::DecodeColumnError { + encoding::DecodeColumnError::decode_raw(col_name, self) + } +} + +impl IntoColError for encoding::DecodeColumnError { + fn into_col_error>(self, col_name: S) -> encoding::DecodeColumnError { + self.in_column(col_name) + } +} + +/// A helper trait which allows users to annotate decoders with errors containing a column name +/// +/// Frequently we have an iterator which decodes values from some underlying column storage, e.g. +/// we might have a `BooleanDecoder` which decodes items from an `insert` column. In the context +/// where we are reading from this column we would like to produce errors which describe which +/// column the error occurred in - to this end we require that the error produced by the underlying +/// decoder implement `IntoColError` and we provide the `next_in_col` method to call +/// `into_col_error` on any errors produced by the decoder. +pub(crate) trait ColumnDecoder: Iterator> { + type Error: IntoColError; + type Value; + + fn maybe_next_in_col>( + &mut self, + col_name: S, + ) -> Result, encoding::DecodeColumnError>; + + /// Decode the next value from this decoder, annotating any error with the `col_name` + fn next_in_col>( + &mut self, + col_name: S, + ) -> Result { + self.maybe_next_in_col(&col_name)? + .ok_or_else(|| encoding::DecodeColumnError::unexpected_null(col_name)) + } +} + +impl<'a> ColumnDecoder for encoding::BooleanDecoder<'a> { + type Error = encoding::raw::Error; + type Value = bool; + + fn maybe_next_in_col>( + &mut self, + col_name: S, + ) -> Result, encoding::DecodeColumnError> { + self.next() + .transpose() + .map_err(|e| e.into_col_error(col_name)) + } +} + +impl ColumnDecoder> for I +where + I: Iterator, E>>, + E: IntoColError, +{ + type Error = E; + type Value = T; + + fn maybe_next_in_col>( + &mut self, + col_name: S, + ) -> Result, encoding::DecodeColumnError> { + Ok(self + .next() + .transpose() + .map_err(|e| e.into_col_error(col_name))? + .flatten()) + } +} + +impl<'a> ColumnDecoder> for OpIdListIter<'a> { + type Error = encoding::DecodeColumnError; + type Value = Vec; + + fn maybe_next_in_col>( + &mut self, + col_name: S, + ) -> Result>, encoding::DecodeColumnError> { + self.next().transpose().map_err(|e| e.in_column(col_name)) + } +} + +impl<'a> ColumnDecoder for ValueIter<'a> { + type Error = encoding::DecodeColumnError; + type Value = ScalarValue; + + fn maybe_next_in_col>( + &mut self, + col_name: S, + ) -> Result, encoding::DecodeColumnError> { + self.next().transpose().map_err(|e| e.in_column(col_name)) + } +} + +impl<'a> ColumnDecoder for KeyIter<'a> { + type Error = encoding::DecodeColumnError; + type Value = Key; + + fn maybe_next_in_col>( + &mut self, + col_name: S, + ) -> Result, encoding::DecodeColumnError> { + self.next().transpose().map_err(|e| e.in_column(col_name)) + } +} + +impl<'a> ColumnDecoder for ObjIdIter<'a> { + type Value = ObjId; + type Error = encoding::DecodeColumnError; + + fn maybe_next_in_col>( + &mut self, + col_name: S, + ) -> Result, encoding::DecodeColumnError> { + self.next().transpose().map_err(|e| e.in_column(col_name)) + } +} + +impl<'a> ColumnDecoder for OpIdIter<'a> { + type Value = OpId; + type Error = encoding::DecodeColumnError; + + fn maybe_next_in_col>( + &mut self, + col_name: S, + ) -> Result, encoding::DecodeColumnError> { + self.next().transpose().map_err(|e| e.in_column(col_name)) + } +} + +impl<'a> ColumnDecoder> for DepsIter<'a> { + type Value = Vec; + type Error = encoding::DecodeColumnError; + + fn maybe_next_in_col>( + &mut self, + col_name: S, + ) -> Result, encoding::DecodeColumnError> { + self.next().transpose().map_err(|e| e.in_column(col_name)) + } +} diff --git a/automerge/src/columnar_2/encoding/decodable_impls.rs b/automerge/src/columnar_2/encoding/decodable_impls.rs new file mode 100644 index 00000000..26425f15 --- /dev/null +++ b/automerge/src/columnar_2/encoding/decodable_impls.rs @@ -0,0 +1,175 @@ +use smol_str::SmolStr; +use std::{borrow::Cow, convert::TryFrom, io::Read, str}; + +use super::{Decodable, DecodeError}; +use crate::ActorId; + +// We don't allow decoding items which are larger than this. Almost nothing should be this large +// so this is really guarding against bad encodings which accidentally grab loads of memory +const MAX_ALLOCATION: usize = 1000000000; + +impl Decodable for u8 { + fn decode(bytes: &mut R) -> Result + where + R: Read, + { + let mut buffer = [0; 1]; + bytes.read_exact(&mut buffer)?; + Ok(buffer[0]) + } +} + +impl Decodable for u32 { + fn decode(bytes: &mut R) -> Result + where + R: Read, + { + u64::decode::(bytes).and_then(|val| Self::try_from(val).map_err(DecodeError::from)) + } +} + +impl Decodable for usize { + fn decode(bytes: &mut R) -> Result + where + R: Read, + { + u64::decode::(bytes).and_then(|val| Self::try_from(val).map_err(DecodeError::from)) + } +} + +impl Decodable for isize { + fn decode(bytes: &mut R) -> Result + where + R: Read, + { + i64::decode::(bytes).and_then(|val| Self::try_from(val).map_err(DecodeError::from)) + } +} + +impl Decodable for i32 { + fn decode(bytes: &mut R) -> Result + where + R: Read, + { + i64::decode::(bytes).and_then(|val| Self::try_from(val).map_err(DecodeError::from)) + } +} + +impl Decodable for i64 { + fn decode(bytes: &mut R) -> Result + where + R: Read, + { + leb128::read::signed(bytes).map_err(DecodeError::from) + } +} + +impl Decodable for f64 { + fn decode(bytes: &mut R) -> Result + where + R: Read, + { + let mut buffer = [0; 8]; + bytes.read_exact(&mut buffer)?; + Ok(Self::from_le_bytes(buffer)) + } +} + +impl Decodable for f32 { + fn decode(bytes: &mut R) -> Result + where + R: Read, + { + let mut buffer = [0; 4]; + bytes.read_exact(&mut buffer)?; + Ok(Self::from_le_bytes(buffer)) + } +} + +impl Decodable for u64 { + fn decode(bytes: &mut R) -> Result + where + R: Read, + { + leb128::read::unsigned(bytes).map_err(DecodeError::from) + } +} + +impl Decodable for Vec { + fn decode(bytes: &mut R) -> Result + where + R: Read, + { + let len = usize::decode::(bytes)?; + if len == 0 { + return Ok(vec![]); + } + if len > MAX_ALLOCATION { + return Err(DecodeError::OverlargeAllocation { + attempted: len, + maximum: MAX_ALLOCATION, + }); + } + let mut buffer = vec![0; len]; + bytes.read_exact(buffer.as_mut_slice())?; + Ok(buffer) + } +} + +impl Decodable for SmolStr { + fn decode(bytes: &mut R) -> Result + where + R: Read, + { + let buffer = Vec::decode(bytes)?; + str::from_utf8(&buffer) + .map(|t| t.into()) + .map_err(|_| DecodeError::BadString) + } +} + +impl Decodable for Cow<'static, SmolStr> { + fn decode(bytes: &mut R) -> Result + where + R: std::io::Read, + { + SmolStr::decode(bytes).map(Cow::Owned) + } +} + +impl Decodable for String { + fn decode(bytes: &mut R) -> Result + where + R: Read, + { + let buffer = Vec::decode(bytes)?; + str::from_utf8(&buffer) + .map(|t| t.into()) + .map_err(|_| DecodeError::BadString) + } +} + +impl Decodable for Option { + fn decode(bytes: &mut R) -> Result + where + R: Read, + { + let buffer = Vec::decode(bytes)?; + if buffer.is_empty() { + return Ok(None); + } + str::from_utf8(&buffer) + .map(|t| Some(t.into())) + .map_err(|_| DecodeError::BadString) + } +} + +impl Decodable for ActorId { + fn decode(bytes: &mut R) -> Result + where + R: Read, + { + let buffer = Vec::decode(bytes)?; + Ok(buffer.into()) + } +} diff --git a/automerge/src/columnar_2/encoding/delta.rs b/automerge/src/columnar_2/encoding/delta.rs new file mode 100644 index 00000000..049bb6fb --- /dev/null +++ b/automerge/src/columnar_2/encoding/delta.rs @@ -0,0 +1,95 @@ +use std::borrow::Cow; + +use super::{raw, RleDecoder, RleEncoder, Sink}; + +/// Encodes integers as the change since the previous value. +/// +/// The initial value is 0 encoded as u64. Deltas are encoded as i64. +/// +/// Run length encoding is then applied to the resulting sequence. +pub(crate) struct DeltaEncoder { + rle: RleEncoder, + absolute_value: i64, +} + +impl DeltaEncoder { + pub(crate) fn new(output: S) -> DeltaEncoder { + DeltaEncoder { + rle: RleEncoder::new(output), + absolute_value: 0, + } + } + + pub(crate) fn append_value(&mut self, value: i64) { + self.rle + .append_value(&(value.saturating_sub(self.absolute_value))); + self.absolute_value = value; + } + + pub(crate) fn append_null(&mut self) { + self.rle.append_null(); + } + + pub(crate) fn append(&mut self, val: Option) { + match val { + Some(v) => self.append_value(v), + None => self.append_null(), + } + } + + pub(crate) fn finish(self) -> (S, usize) { + self.rle.finish() + } +} + +impl From for DeltaEncoder { + fn from(output: S) -> Self { + DeltaEncoder::new(output) + } +} + +/// See discussion on [`DeltaEncoder`] for the format data is stored in. +#[derive(Debug, Clone)] +pub(crate) struct DeltaDecoder<'a> { + rle: RleDecoder<'a, i64>, + absolute_val: i64, +} + +impl<'a> DeltaDecoder<'a> { + pub(crate) fn done(&self) -> bool { + self.rle.done() + } +} + +impl<'a> From> for DeltaDecoder<'a> { + fn from(bytes: Cow<'a, [u8]>) -> Self { + DeltaDecoder { + rle: RleDecoder::from(bytes), + absolute_val: 0, + } + } +} + +impl<'a> From<&'a [u8]> for DeltaDecoder<'a> { + fn from(d: &'a [u8]) -> Self { + Cow::Borrowed(d).into() + } +} + +impl<'a> Iterator for DeltaDecoder<'a> { + type Item = Result, raw::Error>; + + fn next(&mut self) -> Option { + match self.rle.next() { + Some(Ok(next)) => match next { + Some(delta) => { + self.absolute_val = self.absolute_val.saturating_add(delta); + Some(Ok(Some(self.absolute_val))) + } + None => Some(Ok(None)), + }, + Some(Err(e)) => Some(Err(e)), + None => None, + } + } +} diff --git a/automerge/src/columnar_2/encoding/encodable_impls.rs b/automerge/src/columnar_2/encoding/encodable_impls.rs new file mode 100644 index 00000000..a1b5d8ce --- /dev/null +++ b/automerge/src/columnar_2/encoding/encodable_impls.rs @@ -0,0 +1,200 @@ +use super::{Encodable, Sink}; + +use std::borrow::Cow; + +use smol_str::SmolStr; + +/// Encodes bytes without a length prefix +pub(crate) struct RawBytes<'a>(Cow<'a, [u8]>); + +impl<'a> From<&'a [u8]> for RawBytes<'a> { + fn from(r: &'a [u8]) -> Self { + RawBytes(r.into()) + } +} + +impl<'a> From> for RawBytes<'a> { + fn from(c: Cow<'a, [u8]>) -> Self { + RawBytes(c) + } +} + +impl<'a> Encodable for RawBytes<'a> { + fn encode(&self, out: &mut S) -> usize { + out.append(&self.0); + self.0.len() + } +} + +impl Encodable for SmolStr { + fn encode(&self, buf: &mut S) -> usize { + let bytes = self.as_bytes(); + let len_encoded = bytes.len().encode(buf); + let data_len = bytes.encode(buf); + len_encoded + data_len + } +} + +impl<'a> Encodable for Cow<'a, SmolStr> { + fn encode(&self, buf: &mut S) -> usize { + self.as_ref().encode(buf) + } +} + +impl Encodable for String { + fn encode(&self, buf: &mut S) -> usize { + let bytes = self.as_bytes(); + let len_encoded = bytes.len().encode(buf); + let data_len = bytes.encode(buf); + len_encoded + data_len + } +} + +impl Encodable for Option { + fn encode(&self, buf: &mut S) -> usize { + if let Some(s) = self { + s.encode(buf) + } else { + 0.encode(buf) + } + } +} + +impl<'a> Encodable for Option> { + fn encode(&self, out: &mut S) -> usize { + if let Some(s) = self { + SmolStr::encode(s, out) + } else { + 0.encode(out) + } + } +} + +impl Encodable for f64 { + fn encode(&self, buf: &mut S) -> usize { + let bytes = self.to_le_bytes(); + buf.append(&bytes); + bytes.len() + } +} + +impl Encodable for f32 { + fn encode(&self, buf: &mut S) -> usize { + let bytes = self.to_le_bytes(); + buf.append(&bytes); + bytes.len() + } +} + +impl Encodable for usize { + fn encode(&self, buf: &mut S) -> usize { + (*self as u64).encode(buf) + } +} + +impl Encodable for u32 { + fn encode(&self, buf: &mut S) -> usize { + u64::from(*self).encode(buf) + } +} + +impl Encodable for i32 { + fn encode(&self, buf: &mut S) -> usize { + i64::from(*self).encode(buf) + } +} + +impl Encodable for [u8] { + fn encode(&self, out: &mut S) -> usize { + out.append(self); + self.len() + } +} + +impl Encodable for &[u8] { + fn encode(&self, out: &mut S) -> usize { + out.append(self); + self.len() + } +} + +impl<'a> Encodable for Cow<'a, [u8]> { + fn encode(&self, out: &mut S) -> usize { + out.append(self); + self.len() + } +} + +impl Encodable for Vec { + fn encode(&self, out: &mut S) -> usize { + Encodable::encode(&self[..], out) + } +} + +mod leb128_things { + use super::{Encodable, Sink}; + + impl Encodable for u64 { + fn encode(&self, buf: &mut S) -> usize { + let mut val = *self; + let mut bytes_written = 0; + loop { + let mut byte = low_bits_of_u64(val); + val >>= 7; + if val != 0 { + // More bytes to come, so set the continuation bit. + byte |= CONTINUATION_BIT; + } + + buf.append(&[byte]); + bytes_written += 1; + + if val == 0 { + return bytes_written; + } + } + } + } + + impl Encodable for i64 { + fn encode(&self, buf: &mut S) -> usize { + let mut val = *self; + let mut bytes_written = 0; + loop { + let mut byte = val as u8; + // Keep the sign bit for testing + val >>= 6; + let done = val == 0 || val == -1; + if done { + byte &= !CONTINUATION_BIT; + } else { + // Remove the sign bit + val >>= 1; + // More bytes to come, so set the continuation bit. + byte |= CONTINUATION_BIT; + } + + buf.append(&[byte]); + bytes_written += 1; + + if done { + return bytes_written; + } + } + } + } + + #[doc(hidden)] + const CONTINUATION_BIT: u8 = 1 << 7; + + #[inline] + fn low_bits_of_byte(byte: u8) -> u8 { + byte & !CONTINUATION_BIT + } + + #[inline] + fn low_bits_of_u64(val: u64) -> u8 { + let byte = val & (std::u8::MAX as u64); + low_bits_of_byte(byte as u8) + } +} diff --git a/automerge/src/columnar_2/encoding/leb128.rs b/automerge/src/columnar_2/encoding/leb128.rs new file mode 100644 index 00000000..036cfba8 --- /dev/null +++ b/automerge/src/columnar_2/encoding/leb128.rs @@ -0,0 +1,73 @@ +/// The number of bytes required to encode `val` as a LEB128 integer +pub(crate) fn lebsize(val: i64) -> u64 { + let numbits = numbits_i64(val); + (numbits as f64 / 7.0).floor() as u64 + 1 +} + +/// The number of bytes required to encode `val` as a uLEB128 integer +pub(crate) fn ulebsize(val: u64) -> u64 { + if val <= 1 { + return 1; + } + let numbits = numbits_u64(val); + let mut numblocks = (numbits as f64 / 7.0).floor() as u64; + if numbits % 7 != 0 { + numblocks += 1; + } + numblocks +} + +fn numbits_i64(val: i64) -> u64 { + // Is this right? This feels like it's not right + (std::mem::size_of::() as u32 * 8 - val.abs().leading_zeros()) as u64 +} + +fn numbits_u64(val: u64) -> u64 { + (std::mem::size_of::() as u32 * 8 - val.leading_zeros()) as u64 +} + +#[cfg(test)] +mod tests { + use super::*; + use proptest::prelude::*; + + proptest! { + #[test] + fn test_ulebsize(val in 0..u64::MAX) { + let mut out = Vec::new(); + leb128::write::unsigned(&mut out, val).unwrap(); + let expected = out.len() as u64; + assert_eq!(expected, ulebsize(val)) + } + + #[test] + fn test_lebsize(val in i64::MIN..i64::MAX) { + let mut out = Vec::new(); + leb128::write::signed(&mut out, val).unwrap(); + let expected = out.len() as u64; + assert_eq!(expected, lebsize(val)) + } + } + + #[test] + fn ulebsize_examples() { + let scenarios = vec![0, 1, 127, 128, 129, 169]; + for val in scenarios { + let mut out = Vec::new(); + leb128::write::unsigned(&mut out, val).unwrap(); + let expected = out.len() as u64; + assert_eq!(ulebsize(val), expected, "value: {}", val) + } + } + + #[test] + fn lebsize_examples() { + let scenarios = vec![0, 1, -1, 127, 128, -127, -128, -2097152, 169]; + for val in scenarios { + let mut out = Vec::new(); + leb128::write::signed(&mut out, val).unwrap(); + let expected = out.len() as u64; + assert_eq!(lebsize(val), expected, "value: {}", val) + } + } +} diff --git a/automerge/src/columnar_2/encoding/properties.rs b/automerge/src/columnar_2/encoding/properties.rs new file mode 100644 index 00000000..b5c0bfa8 --- /dev/null +++ b/automerge/src/columnar_2/encoding/properties.rs @@ -0,0 +1,178 @@ +//! Helpers for property tests. + +use std::{fmt::Debug, ops::Range}; + +use proptest::prelude::*; +use smol_str::SmolStr; + +use crate::{ + columnar_2::Key, + types::{ElemId, OpId, ScalarValue}, +}; + +#[derive(Clone, Debug)] +pub(crate) struct SpliceScenario { + pub(crate) initial_values: Vec, + pub(crate) replace_range: Range, + pub(crate) replacements: Vec, +} + +impl SpliceScenario { + pub(crate) fn check(&self, results: Vec) { + let mut expected = self.initial_values.clone(); + expected.splice(self.replace_range.clone(), self.replacements.clone()); + assert_eq!(expected, results) + } +} + +impl SpliceScenario> { + /// Checks that `results` are the same as `SpliceScenario::initial_values.splice(replace_range, + /// replacements)`, with two slight changes: + /// + /// * If all of `initial_values` are `None` then this returns true if the output is just + /// `replacements` + /// * If the result of `Vec::splice` would return a vector of all `None` then this checks the + /// result is actually an empty vector + /// + /// This is to accomodate the fact that the RLE encoder can encode a sequence of all `None` as + /// an empty sequence, in which case we decode it as an empty sequence. + pub(crate) fn check_optional(&self, results: Vec>) { + if self.initial_values.iter().all(|v| v.is_none()) { + if self.replacements.iter().all(|v| v.is_none()) { + assert!(results.is_empty()); + } else { + assert_eq!(results, self.replacements); + } + } else { + let mut expected = self.initial_values.clone(); + expected.splice(self.replace_range.clone(), self.replacements.clone()); + if expected.iter().all(|e| e.is_none()) { + assert!(results.is_empty()) + } else { + assert_eq!(expected, results) + } + } + } +} + +pub(crate) fn splice_scenario + Clone, T: Debug + Clone + 'static>( + item_strat: S, +) -> impl Strategy> { + ( + proptest::collection::vec(item_strat.clone(), 0..100), + proptest::collection::vec(item_strat, 0..10), + ) + .prop_flat_map(move |(values, to_splice)| { + if values.is_empty() { + Just(SpliceScenario { + initial_values: values, + replace_range: 0..0, + replacements: to_splice, + }) + .boxed() + } else { + // This is somewhat awkward to write because we have to carry the `values` and + // `to_splice` through as `Just(..)` to please the borrow checker. + (0..values.len(), Just(values), Just(to_splice)) + .prop_flat_map(move |(replace_range_start, values, to_splice)| { + ( + 0..(values.len() - replace_range_start), + Just(values), + Just(to_splice), + ) + .prop_map( + move |(replace_range_len, values, to_splice)| SpliceScenario { + initial_values: values, + replace_range: replace_range_start + ..(replace_range_start + replace_range_len), + replacements: to_splice, + }, + ) + }) + .boxed() + } + }) +} + +/// Like splice scenario except that if the initial values we generate are all `None` then the +/// replace range is 0..0. +pub(crate) fn option_splice_scenario< + S: Strategy> + Clone, + T: Debug + Clone + 'static, +>( + item_strat: S, +) -> impl Strategy>> { + ( + proptest::collection::vec(item_strat.clone(), 0..100), + proptest::collection::vec(item_strat, 0..10), + ) + .prop_flat_map(move |(values, to_splice)| { + if values.is_empty() || values.iter().all(|v| v.is_none()) { + Just(SpliceScenario { + initial_values: values, + replace_range: 0..0, + replacements: to_splice, + }) + .boxed() + } else { + // This is somewhat awkward to write because we have to carry the `values` and + // `to_splice` through as `Just(..)` to please the borrow checker. + (0..values.len(), Just(values), Just(to_splice)) + .prop_flat_map(move |(replace_range_start, values, to_splice)| { + ( + 0..(values.len() - replace_range_start), + Just(values), + Just(to_splice), + ) + .prop_map( + move |(replace_range_len, values, to_splice)| SpliceScenario { + initial_values: values, + replace_range: replace_range_start + ..(replace_range_start + replace_range_len), + replacements: to_splice, + }, + ) + }) + .boxed() + } + }) +} + +pub(crate) fn opid() -> impl Strategy + Clone { + (0..(i64::MAX as usize), 0..(i64::MAX as u64)).prop_map(|(actor, ctr)| OpId(ctr, actor)) +} + +pub(crate) fn elemid() -> impl Strategy + Clone { + opid().prop_map(ElemId) +} + +pub(crate) fn key() -> impl Strategy + Clone { + prop_oneof! { + elemid().prop_map(Key::Elem), + any::().prop_map(|s| Key::Prop(s.into())), + } +} + +pub(crate) fn encodable_int() -> impl Strategy + Clone { + let bounds = i64::MAX / 2; + -bounds..bounds +} + +pub(crate) fn scalar_value() -> impl Strategy + Clone { + prop_oneof! { + Just(ScalarValue::Null), + any::().prop_map(ScalarValue::Boolean), + any::().prop_map(ScalarValue::Uint), + encodable_int().prop_map(ScalarValue::Int), + any::().prop_map(ScalarValue::F64), + smol_str().prop_map(ScalarValue::Str), + any::>().prop_map(ScalarValue::Bytes), + encodable_int().prop_map(|i| ScalarValue::Counter(i.into())), + encodable_int().prop_map(ScalarValue::Timestamp), + (10..15_u8, any::>()).prop_map(|(c, b)| ScalarValue::Unknown { type_code: c, bytes: b }), + } +} + +fn smol_str() -> impl Strategy + Clone { + any::().prop_map(SmolStr::from) +} diff --git a/automerge/src/columnar_2/encoding/raw.rs b/automerge/src/columnar_2/encoding/raw.rs new file mode 100644 index 00000000..b86443e5 --- /dev/null +++ b/automerge/src/columnar_2/encoding/raw.rs @@ -0,0 +1,97 @@ +use std::{ + borrow::{Borrow, Cow}, + fmt::Debug, +}; + +use super::{Decodable, DecodeError, Encodable, Sink}; + +#[derive(Clone, Debug)] +pub(crate) struct RawDecoder<'a> { + offset: usize, + last_read: usize, + data: Cow<'a, [u8]>, +} + +#[derive(thiserror::Error, Debug)] +pub(crate) enum Error { + #[error("buffer size did not change")] + BufferSizeDidNotChange, + #[error("trying to read past end")] + TryingToReadPastEnd, + #[error(transparent)] + Decode(#[from] DecodeError), +} + +impl<'a> RawDecoder<'a> { + pub(crate) fn new(data: Cow<'a, [u8]>) -> Self { + RawDecoder { + offset: 0, + last_read: 0, + data, + } + } + + pub(crate) fn read(&mut self) -> Result { + let mut buf = &self.data[self.offset..]; + let init_len = buf.len(); + let val = T::decode::<&[u8]>(&mut buf)?; + let delta = init_len - buf.len(); + if delta == 0 { + Err(Error::BufferSizeDidNotChange) + } else { + self.last_read = delta; + self.offset += delta; + Ok(val) + } + } + + pub(crate) fn read_bytes(&mut self, index: usize) -> Result<&[u8], Error> { + if self.offset + index > self.data.len() { + Err(Error::TryingToReadPastEnd) + } else { + let head = &self.data[self.offset..self.offset + index]; + self.last_read = index; + self.offset += index; + Ok(head) + } + } + + pub(crate) fn done(&self) -> bool { + self.offset >= self.data.len() + } +} + +impl<'a> From<&'a [u8]> for RawDecoder<'a> { + fn from(d: &'a [u8]) -> Self { + Cow::Borrowed(d).into() + } +} + +impl<'a> From> for RawDecoder<'a> { + fn from(d: Cow<'a, [u8]>) -> Self { + RawDecoder::new(d) + } +} + +pub(crate) struct RawEncoder { + written: usize, + output: S, +} + +impl RawEncoder { + pub(crate) fn append, I: Encodable>(&mut self, value: B) -> usize { + let written = value.borrow().encode(&mut self.output); + self.written += written; + written + } + + pub(crate) fn finish(self) -> (S, usize) { + (self.output, self.written) + } +} + +impl From for RawEncoder { + fn from(output: S) -> Self { + RawEncoder { written: 0, output } + } +} diff --git a/automerge/src/columnar_2/encoding/rle.rs b/automerge/src/columnar_2/encoding/rle.rs new file mode 100644 index 00000000..26a16899 --- /dev/null +++ b/automerge/src/columnar_2/encoding/rle.rs @@ -0,0 +1,239 @@ +use std::{ + borrow::{Borrow, Cow}, + fmt::Debug, +}; + +use super::{raw, Decodable, Encodable, RawDecoder, Sink}; + +pub(crate) struct RleEncoder +where + T: Encodable + PartialEq + Clone, +{ + buf: S, + written: usize, + state: RleState, +} + +impl RleEncoder +where + S: Sink, + T: Encodable + PartialEq + Clone, +{ + pub(crate) fn new(output_buf: S) -> RleEncoder { + RleEncoder { + buf: output_buf, + written: 0, + state: RleState::Empty, + } + } + + /// Flush the encoded values and return the output buffer and the number of bytes written + pub(crate) fn finish(mut self) -> (S, usize) { + match self.take_state() { + RleState::InitialNullRun(_size) => {} + RleState::NullRun(size) => { + self.flush_null_run(size); + } + RleState::LoneVal(value) => self.flush_lit_run(vec![value]), + RleState::Run(value, len) => self.flush_run(&value, len), + RleState::LiteralRun(last, mut run) => { + run.push(last); + self.flush_lit_run(run); + } + RleState::Empty => {} + } + (self.buf, self.written) + } + + fn flush_run(&mut self, val: &T, len: usize) { + self.encode(&(len as i64)); + self.encode(val); + } + + fn flush_null_run(&mut self, len: usize) { + self.encode::(&0); + self.encode(&len); + } + + fn flush_lit_run(&mut self, run: Vec) { + self.encode(&-(run.len() as i64)); + for val in run { + self.encode(&val); + } + } + + fn take_state(&mut self) -> RleState { + let mut state = RleState::Empty; + std::mem::swap(&mut self.state, &mut state); + state + } + + pub(crate) fn append_null(&mut self) { + self.state = match self.take_state() { + RleState::Empty => RleState::InitialNullRun(1), + RleState::InitialNullRun(size) => RleState::InitialNullRun(size + 1), + RleState::NullRun(size) => RleState::NullRun(size + 1), + RleState::LoneVal(other) => { + self.flush_lit_run(vec![other]); + RleState::NullRun(1) + } + RleState::Run(other, len) => { + self.flush_run(&other, len); + RleState::NullRun(1) + } + RleState::LiteralRun(last, mut run) => { + run.push(last); + self.flush_lit_run(run); + RleState::NullRun(1) + } + } + } + + pub(crate) fn append_value>(&mut self, value: BT) { + self.state = match self.take_state() { + RleState::Empty => RleState::LoneVal(value.borrow().clone()), + RleState::LoneVal(other) => { + if &other == value.borrow() { + RleState::Run(value.borrow().clone(), 2) + } else { + let mut v = Vec::with_capacity(2); + v.push(other); + RleState::LiteralRun(value.borrow().clone(), v) + } + } + RleState::Run(other, len) => { + if &other == value.borrow() { + RleState::Run(other, len + 1) + } else { + self.flush_run(&other, len); + RleState::LoneVal(value.borrow().clone()) + } + } + RleState::LiteralRun(last, mut run) => { + if &last == value.borrow() { + self.flush_lit_run(run); + RleState::Run(value.borrow().clone(), 2) + } else { + run.push(last); + RleState::LiteralRun(value.borrow().clone(), run) + } + } + RleState::NullRun(size) | RleState::InitialNullRun(size) => { + self.flush_null_run(size); + RleState::LoneVal(value.borrow().clone()) + } + } + } + + pub(crate) fn append>(&mut self, value: Option) { + match value { + Some(t) => self.append_value(t), + None => self.append_null(), + } + } + + fn encode(&mut self, val: &V) + where + V: Encodable, + { + self.written += val.encode(&mut self.buf); + } +} + +enum RleState { + Empty, + // Note that this is different to a `NullRun` because if every element of a column is null + // (i.e. the state when we call `finish` is `InitialNullRun`) then we don't output anything at + // all for the column + InitialNullRun(usize), + NullRun(usize), + LiteralRun(T, Vec), + LoneVal(T), + Run(T, usize), +} + +impl From for RleEncoder { + fn from(output: S) -> Self { + Self::new(output) + } +} + +/// See discussion on [`RleEncoder`] for the format data is stored in. +#[derive(Clone, Debug)] +pub(crate) struct RleDecoder<'a, T> { + decoder: RawDecoder<'a>, + last_value: Option, + count: isize, + literal: bool, +} + +impl<'a, T> RleDecoder<'a, T> { + pub(crate) fn done(&self) -> bool { + self.decoder.done() && self.count == 0 + } + + fn try_next(&mut self) -> Result>, raw::Error> + where + T: Decodable + Clone + Debug, + { + while self.count == 0 { + if self.decoder.done() { + return Ok(None); + } + match self.decoder.read::()? { + count if count > 0 => { + // normal run + self.count = count as isize; + self.last_value = Some(self.decoder.read()?); + self.literal = false; + } + count if count < 0 => { + // literal run + self.count = count.abs() as isize; + self.literal = true; + } + _ => { + // null run + // FIXME(jeffa5): handle usize > i64 here somehow + self.count = self.decoder.read::()? as isize; + self.last_value = None; + self.literal = false; + } + } + } + self.count -= 1; + if self.literal { + Ok(Some(Some(self.decoder.read()?))) + } else { + Ok(Some(self.last_value.clone())) + } + } +} + +impl<'a, T> From> for RleDecoder<'a, T> { + fn from(bytes: Cow<'a, [u8]>) -> Self { + RleDecoder { + decoder: RawDecoder::from(bytes), + last_value: None, + count: 0, + literal: false, + } + } +} + +impl<'a, T> From<&'a [u8]> for RleDecoder<'a, T> { + fn from(d: &'a [u8]) -> Self { + Cow::Borrowed(d).into() + } +} + +impl<'a, T> Iterator for RleDecoder<'a, T> +where + T: Clone + Debug + Decodable, +{ + type Item = Result, raw::Error>; + + fn next(&mut self) -> Option { + self.try_next().transpose() + } +} diff --git a/automerge/src/columnar_2/splice_error.rs b/automerge/src/columnar_2/splice_error.rs new file mode 100644 index 00000000..54d5f478 --- /dev/null +++ b/automerge/src/columnar_2/splice_error.rs @@ -0,0 +1,47 @@ +use std::convert::Infallible; + +/// Represents an error which occurred when splicing. +/// +/// When splicing values into existing column storage there are two kinds of errors which can +/// occur, those caused by iterating over the existing items, and those caused by iterating over +/// the replacement items. +#[derive(Debug)] +pub(crate) enum SpliceError { + /// There was an error reading from the existing column storage + ReadExisting(E), + /// There was an error reading from the iterator of new rows + ReadReplace(R), +} + +impl SpliceError { + /// Map a spliceerror which is infallible in it's `Replace` error type into a different error. + /// + /// This is used when you have performed a splice with a `replace` iterator which is + /// infallible and need to return a more general `SpliceError` + pub(crate) fn existing(self) -> SpliceError { + match self { + SpliceError::ReadExisting(e) => SpliceError::ReadExisting(e), + SpliceError::ReadReplace(_) => unreachable!("absurd"), + } + } +} + +impl std::error::Error for SpliceError +where + E: std::error::Error, + R: std::error::Error, +{ +} + +impl std::fmt::Display for SpliceError +where + E: std::fmt::Display, + R: std::fmt::Display, +{ + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::ReadExisting(e) => write!(f, "error reading from existing rows: {}", e), + Self::ReadReplace(e) => write!(f, "error reading from replacement rows: {}", e), + } + } +} diff --git a/automerge/src/lib.rs b/automerge/src/lib.rs index e18eff3a..9216d9b3 100644 --- a/automerge/src/lib.rs +++ b/automerge/src/lib.rs @@ -61,6 +61,10 @@ mod clock; mod columnar; #[cfg(feature = "storage-v2")] #[allow(dead_code)] +#[allow(unused_imports)] +mod columnar_2; +#[cfg(feature = "storage-v2")] +#[allow(dead_code)] mod convert; mod decoding; mod encoding; From d28767e689977862dd0f214f75e4383d27540561 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Mon, 22 Aug 2022 15:13:08 -0500 Subject: [PATCH 534/730] automerge-js v0.1.10 --- automerge-js/index.d.ts | 2 +- automerge-js/package.json | 2 +- automerge-js/src/index.ts | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/automerge-js/index.d.ts b/automerge-js/index.d.ts index 0f853e5b..47f1f344 100644 --- a/automerge-js/index.d.ts +++ b/automerge-js/index.d.ts @@ -84,7 +84,7 @@ export function free(doc: Doc): void; export function from(initialState: T | Doc, actor?: ActorId): Doc; export function change(doc: Doc, options: string | ChangeOptions | ChangeFn, callback?: ChangeFn): Doc; export function emptyChange(doc: Doc, options: ChangeOptions): unknown; -export function load(data: Uint8Array, actor: ActorId): Doc; +export function load(data: Uint8Array, actor?: ActorId): Doc; export function save(doc: Doc): Uint8Array; export function merge(local: Doc, remote: Doc): Doc; export function getActorId(doc: Doc): ActorId; diff --git a/automerge-js/package.json b/automerge-js/package.json index 165c6ae5..b699c5ed 100644 --- a/automerge-js/package.json +++ b/automerge-js/package.json @@ -4,7 +4,7 @@ "Orion Henry ", "Martin Kleppmann" ], - "version": "0.1.9", + "version": "0.1.10", "description": "Reimplementation of `automerge` on top of the automerge-wasm backend", "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-js", "repository": "github:automerge/automerge-rs", diff --git a/automerge-js/src/index.ts b/automerge-js/src/index.ts index ef231727..a553f853 100644 --- a/automerge-js/src/index.ts +++ b/automerge-js/src/index.ts @@ -156,7 +156,7 @@ export function emptyChange(doc: Doc, options: ChangeOptions) { return rootProxy(state, true); } -export function load(data: Uint8Array, actor: ActorId) : Doc { +export function load(data: Uint8Array, actor?: ActorId) : Doc { const state = ApiHandler.load(data, actor) return rootProxy(state, true); } From 3a3df45b85a9105040ce27c20f0395262c1b5ca5 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Mon, 25 Jul 2022 13:52:52 +0100 Subject: [PATCH 535/730] Access change fields through field accessors The representation of changes in storage-v2 is different to the existing representation so add accessor methods to the fields of `Change` and make all accesses go through them. This allows the change representation in storage-v2 to be a drop-in. Signed-off-by: Alex Good --- automerge-c/src/change.rs | 36 +++++++++++++++++++++-------- automerge/examples/quickstart.rs | 2 +- automerge/src/automerge.rs | 37 ++++++++++++++++-------------- automerge/src/automerge/tests.rs | 6 ++--- automerge/src/change.rs | 35 ++++++++++++++++++++++++++++ automerge/src/sync.rs | 20 ++++++++-------- automerge/src/sync/bloom.rs | 6 ++--- automerge/src/transaction/inner.rs | 2 +- 8 files changed, 100 insertions(+), 44 deletions(-) diff --git a/automerge-c/src/change.rs b/automerge-c/src/change.rs index a7e9f5c5..29aacf8e 100644 --- a/automerge-c/src/change.rs +++ b/automerge-c/src/change.rs @@ -23,13 +23,15 @@ macro_rules! to_change { pub struct AMchange { body: *mut am::Change, c_msg: RefCell>, + c_changehash: RefCell>, } impl AMchange { - pub fn new(body: &mut am::Change) -> Self { + pub fn new(change: &mut am::Change) -> Self { Self { - body, - c_msg: RefCell::>::default(), + body: change, + c_msg: Default::default(), + c_changehash: Default::default(), } } @@ -47,6 +49,23 @@ impl AMchange { } std::ptr::null() } + + pub fn hash(&self) -> AMbyteSpan { + let mut c_changehash = self.c_changehash.borrow_mut(); + if let Some(c_changehash) = c_changehash.as_ref() { + c_changehash.into() + } else { + let hash = unsafe { (*self.body).hash() }; + let ptr = c_changehash.insert(hash); + AMbyteSpan { + src: ptr.0.as_ptr(), + #[cfg(feature = "storage-v2")] + count: hash.as_ref().len(), + #[cfg(not(feature = "storage-v2"))] + count: hash.0.len(), + } + } + } } impl AsMut for AMchange { @@ -110,7 +129,7 @@ pub unsafe extern "C" fn AMchangeCompress(change: *mut AMchange) { #[no_mangle] pub unsafe extern "C" fn AMchangeDeps(change: *const AMchange) -> AMchangeHashes { match change.as_ref() { - Some(change) => AMchangeHashes::new(&change.as_ref().deps), + Some(change) => AMchangeHashes::new(change.as_ref().deps()), None => AMchangeHashes::default(), } } @@ -168,8 +187,7 @@ pub unsafe extern "C" fn AMchangeFromBytes(src: *const u8, count: usize) -> *mut pub unsafe extern "C" fn AMchangeHash(change: *const AMchange) -> AMbyteSpan { match change.as_ref() { Some(change) => { - let hash: &am::ChangeHash = &change.as_ref().hash; - hash.into() + change.hash() } None => AMbyteSpan::default(), } @@ -244,7 +262,7 @@ pub unsafe extern "C" fn AMchangeMessage(change: *const AMchange) -> *const c_ch #[no_mangle] pub unsafe extern "C" fn AMchangeSeq(change: *const AMchange) -> u64 { if let Some(change) = change.as_ref() { - change.as_ref().seq + change.as_ref().seq() } else { u64::MAX } @@ -282,7 +300,7 @@ pub unsafe extern "C" fn AMchangeSize(change: *const AMchange) -> usize { #[no_mangle] pub unsafe extern "C" fn AMchangeStartOp(change: *const AMchange) -> u64 { if let Some(change) = change.as_ref() { - u64::from(change.as_ref().start_op) + u64::from(change.as_ref().start_op()) } else { u64::MAX } @@ -301,7 +319,7 @@ pub unsafe extern "C" fn AMchangeStartOp(change: *const AMchange) -> u64 { #[no_mangle] pub unsafe extern "C" fn AMchangeTime(change: *const AMchange) -> i64 { if let Some(change) = change.as_ref() { - change.as_ref().time + change.as_ref().timestamp() } else { i64::MAX } diff --git a/automerge/examples/quickstart.rs b/automerge/examples/quickstart.rs index a041730c..56d24858 100644 --- a/automerge/examples/quickstart.rs +++ b/automerge/examples/quickstart.rs @@ -51,7 +51,7 @@ fn main() { doc1.merge(&mut doc2).unwrap(); for change in doc1.get_changes(&[]).unwrap() { - let length = doc1.length_at(&cards, &[change.hash]); + let length = doc1.length_at(&cards, &[change.hash()]); println!("{} {}", change.message().unwrap(), length); } } diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index c167178b..eb595153 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -200,7 +200,7 @@ impl Automerge { while let Some(hash) = heads.pop() { if let Some(idx) = self.history_index.get(&hash) { let change = &self.history[*idx]; - for dep in &change.deps { + for dep in change.deps() { if !seen.contains(dep) { heads.push(*dep); } @@ -624,7 +624,7 @@ impl Automerge { let mut dup = false; if let Some(actor_index) = self.ops.m.actors.lookup(change.actor_id()) { if let Some(s) = self.states.get(&actor_index) { - dup = s.len() >= change.seq as usize; + dup = s.len() >= change.seq() as usize; } } dup @@ -645,10 +645,10 @@ impl Automerge { mut options: ApplyOptions<'_, Obs>, ) -> Result<(), AutomergeError> { for c in changes { - if !self.history_index.contains_key(&c.hash) { + if !self.history_index.contains_key(&c.hash()) { if self.duplicate_seq(&c) { return Err(AutomergeError::DuplicateSeqNumber( - c.seq, + c.seq(), c.actor_id().clone(), )); } @@ -660,7 +660,7 @@ impl Automerge { } } while let Some(c) = self.pop_next_causally_ready_change() { - if !self.history_index.contains_key(&c.hash) { + if !self.history_index.contains_key(&c.hash()) { self.apply_change(c, &mut options.op_observer); } } @@ -683,7 +683,7 @@ impl Automerge { fn is_causally_ready(&self, change: &Change) -> bool { change - .deps + .deps() .iter() .all(|d| self.history_index.contains_key(d)) } @@ -750,6 +750,7 @@ impl Automerge { .into_iter() .cloned() .collect::>(); + tracing::trace!(changes=?changes.iter().map(|c| c.hash()).collect::>(), "merging new changes"); self.apply_changes_with(changes, options)?; Ok(self.get_heads()) } @@ -809,10 +810,10 @@ impl Automerge { /// Get the hashes of the changes in this document that aren't transitive dependencies of the /// given `heads`. pub fn get_missing_deps(&self, heads: &[ChangeHash]) -> Vec { - let in_queue: HashSet<_> = self.queue.iter().map(|change| change.hash).collect(); + let in_queue: HashSet<_> = self.queue.iter().map(|change| change.hash()).collect(); let mut missing = HashSet::new(); - for head in self.queue.iter().flat_map(|change| &change.deps) { + for head in self.queue.iter().flat_map(|change| change.deps()) { if !self.history_index.contains_key(head) { missing.insert(head); } @@ -904,10 +905,12 @@ impl Automerge { } /// Get the changes that the other document added compared to this document. + #[tracing::instrument(skip(self, other))] pub fn get_changes_added<'a>(&self, other: &'a Self) -> Vec<&'a Change> { // Depth-first traversal from the heads through the dependency graph, // until we reach a change that is already present in other let mut stack: Vec<_> = other.get_heads(); + tracing::trace!(their_heads=?stack, "finding changes to merge"); let mut seen_hashes = HashSet::new(); let mut added_change_hashes = Vec::new(); while let Some(hash) = stack.pop() { @@ -915,7 +918,7 @@ impl Automerge { seen_hashes.insert(hash); added_change_hashes.push(hash); if let Some(change) = other.get_change_by_hash(&hash) { - stack.extend(&change.deps); + stack.extend(change.deps()); } } } @@ -940,12 +943,12 @@ impl Automerge { .get(&actor) .and_then(|v| v.get(seq as usize - 1)) .and_then(|&i| self.history.get(i)) - .map(|c| c.hash) + .map(|c| c.hash()) .ok_or(AutomergeError::InvalidSeq(seq)) } pub(crate) fn update_history(&mut self, change: Change, num_ops: usize) -> usize { - self.max_op = std::cmp::max(self.max_op, change.start_op.get() + num_ops as u64 - 1); + self.max_op = std::cmp::max(self.max_op, change.start_op().get() + num_ops as u64 - 1); self.update_deps(&change); @@ -958,7 +961,7 @@ impl Automerge { .push(history_index); let mut clock = Clock::new(); - for hash in &change.deps { + for hash in change.deps() { let c = self .clocks .get(hash) @@ -969,22 +972,22 @@ impl Automerge { actor_index, ClockData { max_op: change.max_op(), - seq: change.seq, + seq: change.seq(), }, ); - self.clocks.insert(change.hash, clock); + self.clocks.insert(change.hash(), clock); - self.history_index.insert(change.hash, history_index); + self.history_index.insert(change.hash(), history_index); self.history.push(change); history_index } fn update_deps(&mut self, change: &Change) { - for d in &change.deps { + for d in change.deps() { self.deps.remove(d); } - self.deps.insert(change.hash); + self.deps.insert(change.hash()); } pub fn import(&self, s: &str) -> Result { diff --git a/automerge/src/automerge/tests.rs b/automerge/src/automerge/tests.rs index c66f6959..e07f73ff 100644 --- a/automerge/src/automerge/tests.rs +++ b/automerge/src/automerge/tests.rs @@ -1114,12 +1114,12 @@ fn delete_nothing_in_list_returns_error() { fn loaded_doc_changes_have_hash() { let mut doc = Automerge::new(); let mut tx = doc.transaction(); - tx.put(ROOT, "a", 1).unwrap(); + tx.put(ROOT, "a", 1_u64).unwrap(); tx.commit(); - let hash = doc.get_last_local_change().unwrap().hash; + let hash = doc.get_last_local_change().unwrap().hash(); let bytes = doc.save(); let doc = Automerge::load(&bytes).unwrap(); - assert_eq!(doc.get_change_by_hash(&hash).unwrap().hash, hash); + assert_eq!(doc.get_change_by_hash(&hash).unwrap().hash(), hash); } #[test] diff --git a/automerge/src/change.rs b/automerge/src/change.rs index 1cf55de0..f14b2025 100644 --- a/automerge/src/change.rs +++ b/automerge/src/change.rs @@ -363,6 +363,26 @@ impl Change { self.start_op.get() + (self.len() as u64) - 1 } + pub fn deps(&self) -> &[amp::ChangeHash] { + &self.deps + } + + pub fn seq(&self) -> u64 { + self.seq + } + + pub fn hash(&self) -> amp::ChangeHash { + self.hash + } + + pub fn start_op(&self) -> NonZeroU64 { + self.start_op + } + + pub fn timestamp(&self) -> i64 { + self.time + } + pub fn message(&self) -> Option { let m = &self.bytes.uncompressed()[self.message.clone()]; if m.is_empty() { @@ -407,6 +427,13 @@ impl Change { self.bytes.compress(self.body_start); } + pub fn compressed_bytes(&self) -> &[u8] { + match &self.bytes { + ChangeBytes::Compressed { compressed, .. } => compressed, + ChangeBytes::Uncompressed(uncompressed) => uncompressed, + } + } + pub fn raw_bytes(&self) -> &[u8] { self.bytes.raw() } @@ -515,6 +542,14 @@ pub(crate) fn export_change( .into() } +impl<'a> TryFrom<&'a [u8]> for Change { + type Error = decoding::Error; + + fn try_from(value: &'a [u8]) -> Result { + Self::try_from(value.to_vec()) + } +} + impl TryFrom> for Change { type Error = decoding::Error; diff --git a/automerge/src/sync.rs b/automerge/src/sync.rs index 2b4b454b..57414c59 100644 --- a/automerge/src/sync.rs +++ b/automerge/src/sync.rs @@ -80,7 +80,7 @@ impl Automerge { let changes_to_send = changes_to_send .into_iter() .filter_map(|change| { - if !sync_state.sent_hashes.contains(&change.hash) { + if !sync_state.sent_hashes.contains(&change.hash()) { Some(change.clone()) } else { None @@ -91,7 +91,7 @@ impl Automerge { sync_state.last_sent_heads = our_heads.clone(); sync_state .sent_hashes - .extend(changes_to_send.iter().map(|c| c.hash)); + .extend(changes_to_send.iter().map(|c| c.hash())); let sync_message = Message { heads: our_heads, @@ -176,7 +176,7 @@ impl Automerge { let new_changes = self .get_changes(&last_sync) .expect("Should have only used hashes that are in the document"); - let hashes = new_changes.into_iter().map(|change| &change.hash); + let hashes = new_changes.iter().map(|change| change.hash()); Have { last_sync, bloom: BloomFilter::from_hashes(hashes), @@ -211,17 +211,17 @@ impl Automerge { let mut hashes_to_send = HashSet::new(); for change in &changes { - change_hashes.insert(change.hash); + change_hashes.insert(change.hash()); - for dep in &change.deps { - dependents.entry(*dep).or_default().push(change.hash); + for dep in change.deps() { + dependents.entry(*dep).or_default().push(change.hash()); } if bloom_filters .iter() - .all(|bloom| !bloom.contains_hash(&change.hash)) + .all(|bloom| !bloom.contains_hash(&change.hash())) { - hashes_to_send.insert(change.hash); + hashes_to_send.insert(change.hash()); } } @@ -248,7 +248,7 @@ impl Automerge { } for change in changes { - if hashes_to_send.contains(&change.hash) { + if hashes_to_send.contains(&change.hash()) { changes_to_send.push(change); } } @@ -285,7 +285,7 @@ impl Message { (self.changes.len() as u32).encode_vec(&mut buf); for mut change in self.changes { change.compress(); - change.raw_bytes().encode_vec(&mut buf); + change.compressed_bytes().encode_vec(&mut buf); } buf diff --git a/automerge/src/sync/bloom.rs b/automerge/src/sync/bloom.rs index 0ed1332f..69311a20 100644 --- a/automerge/src/sync/bloom.rs +++ b/automerge/src/sync/bloom.rs @@ -1,4 +1,4 @@ -use std::borrow::Cow; +use std::borrow::{Borrow, Cow}; use crate::{decoding, decoding::Decoder, encoding::Encodable, ChangeHash}; @@ -84,7 +84,7 @@ impl BloomFilter { } } - pub fn from_hashes<'a>(hashes: impl ExactSizeIterator) -> Self { + pub fn from_hashes>(hashes: impl ExactSizeIterator) -> Self { let num_entries = hashes.len() as u32; let num_bits_per_entry = BITS_PER_ENTRY; let num_probes = NUM_PROBES; @@ -96,7 +96,7 @@ impl BloomFilter { bits, }; for hash in hashes { - filter.add_hash(hash); + filter.add_hash(hash.borrow()); } filter } diff --git a/automerge/src/transaction/inner.rs b/automerge/src/transaction/inner.rs index 86936492..28b1dd25 100644 --- a/automerge/src/transaction/inner.rs +++ b/automerge/src/transaction/inner.rs @@ -64,7 +64,7 @@ impl TransactionInner { let num_ops = self.pending_ops(); let change = export_change(self, &doc.ops.m.actors, &doc.ops.m.props); - let hash = change.hash; + let hash = change.hash(); doc.update_history(change, num_ops); debug_assert_eq!(doc.get_heads(), vec![hash]); hash From 771733deac25f3e04bb72da1fd75cd5cb04687db Mon Sep 17 00:00:00 2001 From: Alex Good Date: Sun, 24 Jul 2022 22:02:48 +0100 Subject: [PATCH 536/730] Implement storage-v2 Implement parsing the binary format using the new parser library and the new encoding types. This is superior to the previous parsing implementation in that invalid data should never cause panics and it exposes and interface to construct an OpSet from a saved document much more efficiently. Signed-off-by: Alex Good --- automerge/src/change_v2.rs | 315 +++++++++++ automerge/src/error.rs | 9 + automerge/src/indexed_cache.rs | 37 ++ automerge/src/legacy/mod.rs | 2 +- automerge/src/lib.rs | 7 + automerge/src/op_set.rs | 31 ++ automerge/src/storage.rs | 23 +- automerge/src/storage/change.rs | 502 ++++++++++++++++++ automerge/src/storage/change/change_actors.rs | 304 +++++++++++ .../src/storage/change/change_op_columns.rs | 481 +++++++++++++++++ automerge/src/storage/change/compressed.rs | 51 ++ .../storage/change/op_with_change_actors.rs | 1 + automerge/src/storage/chunk.rs | 292 ++++++++++ automerge/src/storage/columns.rs | 355 +++++++++++++ automerge/src/storage/columns/column.rs | 42 ++ .../src/storage/columns/column_builder.rs | 199 +++++++ .../storage/columns/column_specification.rs | 285 ++++++++++ automerge/src/storage/columns/raw_column.rs | 263 +++++++++ automerge/src/storage/convert.rs | 5 + .../src/storage/convert/op_as_changeop.rs | 128 +++++ automerge/src/storage/convert/op_as_docop.rs | 145 +++++ automerge/src/storage/document.rs | 335 ++++++++++++ automerge/src/storage/document/compression.rs | 338 ++++++++++++ .../storage/document/doc_change_columns.rs | 339 ++++++++++++ .../src/storage/document/doc_op_columns.rs | 450 ++++++++++++++++ automerge/src/storage/load.rs | 119 +++++ .../src/storage/load/change_collector.rs | 207 ++++++++ .../src/storage/load/reconstruct_document.rs | 362 +++++++++++++ automerge/src/storage/save.rs | 2 + automerge/src/storage/save/document.rs | 146 +++++ automerge/src/types.rs | 91 ++++ automerge/src/types/opids.rs | 83 ++- automerge/src/value.rs | 10 + 33 files changed, 5954 insertions(+), 5 deletions(-) create mode 100644 automerge/src/change_v2.rs create mode 100644 automerge/src/storage/change.rs create mode 100644 automerge/src/storage/change/change_actors.rs create mode 100644 automerge/src/storage/change/change_op_columns.rs create mode 100644 automerge/src/storage/change/compressed.rs create mode 100644 automerge/src/storage/change/op_with_change_actors.rs create mode 100644 automerge/src/storage/chunk.rs create mode 100644 automerge/src/storage/columns.rs create mode 100644 automerge/src/storage/columns/column.rs create mode 100644 automerge/src/storage/columns/column_builder.rs create mode 100644 automerge/src/storage/columns/column_specification.rs create mode 100644 automerge/src/storage/columns/raw_column.rs create mode 100644 automerge/src/storage/convert.rs create mode 100644 automerge/src/storage/convert/op_as_changeop.rs create mode 100644 automerge/src/storage/convert/op_as_docop.rs create mode 100644 automerge/src/storage/document.rs create mode 100644 automerge/src/storage/document/compression.rs create mode 100644 automerge/src/storage/document/doc_change_columns.rs create mode 100644 automerge/src/storage/document/doc_op_columns.rs create mode 100644 automerge/src/storage/load.rs create mode 100644 automerge/src/storage/load/change_collector.rs create mode 100644 automerge/src/storage/load/reconstruct_document.rs create mode 100644 automerge/src/storage/save.rs create mode 100644 automerge/src/storage/save/document.rs diff --git a/automerge/src/change_v2.rs b/automerge/src/change_v2.rs new file mode 100644 index 00000000..834c7d99 --- /dev/null +++ b/automerge/src/change_v2.rs @@ -0,0 +1,315 @@ +use std::{borrow::Cow, num::NonZeroU64}; + +use crate::{ + columnar_2::Key as StoredKey, + storage::{ + change::{Unverified, Verified}, + parse, Change as StoredChange, ChangeOp, Chunk, Compressed, ReadChangeOpError, + }, + types::{ActorId, ChangeHash, ElemId}, +}; + +#[derive(Clone, Debug, PartialEq)] +pub struct Change { + stored: StoredChange<'static, Verified>, + compression: CompressionState, + len: usize, +} + +impl Change { + pub(crate) fn new(stored: StoredChange<'static, Verified>) -> Self { + let len = stored.iter_ops().count(); + Self { + stored, + len, + compression: CompressionState::NotCompressed, + } + } + + pub(crate) fn new_from_unverified( + stored: StoredChange<'static, Unverified>, + compressed: Option>, + ) -> Result { + let mut len = 0; + let stored = stored.verify_ops(|_| len += 1)?; + let compression = if let Some(c) = compressed { + CompressionState::Compressed(c) + } else { + CompressionState::NotCompressed + }; + Ok(Self { + stored, + len, + compression, + }) + } + + pub fn actor_id(&self) -> &ActorId { + self.stored.actor() + } + + pub fn other_actor_ids(&self) -> &[ActorId] { + self.stored.other_actors() + } + + pub fn len(&self) -> usize { + self.len + } + + pub fn is_empty(&self) -> bool { + self.len == 0 + } + + pub fn max_op(&self) -> u64 { + self.stored.start_op().get() + (self.len as u64) - 1 + } + + pub fn start_op(&self) -> NonZeroU64 { + self.stored.start_op() + } + + pub fn message(&self) -> Option<&String> { + self.stored.message().as_ref() + } + + pub fn deps(&self) -> &[ChangeHash] { + self.stored.dependencies() + } + + pub fn hash(&self) -> ChangeHash { + self.stored.hash() + } + + pub fn seq(&self) -> u64 { + self.stored.seq() + } + + pub fn timestamp(&self) -> i64 { + self.stored.timestamp() + } + + pub fn compressed_bytes(&mut self) -> Cow<'_, [u8]> { + if let CompressionState::NotCompressed = self.compression { + if let Some(compressed) = self.stored.compress() { + self.compression = CompressionState::Compressed(compressed); + } else { + self.compression = CompressionState::TooSmallToCompress; + } + }; + match &self.compression { + // SAFETY: We just checked this case above + CompressionState::NotCompressed => unreachable!(), + CompressionState::TooSmallToCompress => Cow::Borrowed(self.stored.bytes()), + CompressionState::Compressed(c) => c.bytes(), + } + } + + pub fn raw_bytes(&self) -> &[u8] { + self.stored.bytes() + } + + pub(crate) fn iter_ops(&self) -> impl Iterator + '_ { + self.stored.iter_ops() + } + + pub fn extra_bytes(&self) -> &[u8] { + self.stored.extra_bytes() + } + + // TODO replace all uses of this with TryFrom<&[u8]> + pub fn from_bytes(bytes: Vec) -> Result { + Self::try_from(&bytes[..]) + } + + pub fn decode(&self) -> crate::ExpandedChange { + crate::ExpandedChange::from(self) + } +} + +#[derive(Clone, Debug, PartialEq)] +enum CompressionState { + /// We haven't tried to compress this change + NotCompressed, + /// We have compressed this change + Compressed(Compressed<'static>), + /// We tried to compress this change but it wasn't big enough to be worth it + TooSmallToCompress, +} + +impl AsRef> for Change { + fn as_ref(&self) -> &StoredChange<'static, Verified> { + &self.stored + } +} + +#[derive(thiserror::Error, Debug)] +pub enum LoadError { + #[error("unable to parse change: {0}")] + Parse(Box), + #[error("leftover data after parsing")] + LeftoverData, + #[error("wrong chunk type")] + WrongChunkType, +} + +impl<'a> TryFrom<&'a [u8]> for Change { + type Error = LoadError; + + fn try_from(value: &'a [u8]) -> Result { + let input = parse::Input::new(value); + let (remaining, chunk) = Chunk::parse(input).map_err(|e| LoadError::Parse(Box::new(e)))?; + if !remaining.is_empty() { + return Err(LoadError::LeftoverData); + } + match chunk { + Chunk::Change(c) => Self::new_from_unverified(c.into_owned(), None) + .map_err(|e| LoadError::Parse(Box::new(e))), + Chunk::CompressedChange(c, compressed) => { + Self::new_from_unverified(c.into_owned(), Some(compressed.into_owned())) + .map_err(|e| LoadError::Parse(Box::new(e))) + } + _ => Err(LoadError::WrongChunkType), + } + } +} + +impl<'a> TryFrom> for Change { + type Error = ReadChangeOpError; + + fn try_from(c: StoredChange<'a, Unverified>) -> Result { + Self::new_from_unverified(c.into_owned(), None) + } +} + +impl From for Change { + fn from(e: crate::ExpandedChange) -> Self { + let stored = StoredChange::builder() + .with_actor(e.actor_id) + .with_extra_bytes(e.extra_bytes) + .with_seq(e.seq) + .with_dependencies(e.deps) + .with_timestamp(e.time) + .with_start_op(e.start_op) + .with_message(e.message) + .build(e.operations.iter()); + match stored { + Ok(c) => Change::new(c), + Err(crate::storage::change::PredOutOfOrder) => { + // Should never happen because we use `SortedVec` in legacy::Op::pred + panic!("preds out of order"); + } + } + } +} + +mod convert_expanded { + use std::borrow::Cow; + + use crate::{convert, legacy, storage::AsChangeOp, types::ActorId, ScalarValue}; + + impl<'a> AsChangeOp<'a> for &'a legacy::Op { + type ActorId = &'a ActorId; + type OpId = &'a legacy::OpId; + type PredIter = std::slice::Iter<'a, legacy::OpId>; + + fn action(&self) -> u64 { + self.action.action_index() + } + + fn insert(&self) -> bool { + self.insert + } + + fn pred(&self) -> Self::PredIter { + self.pred.iter() + } + + fn key(&self) -> convert::Key<'a, Self::OpId> { + match &self.key { + legacy::Key::Map(s) => convert::Key::Prop(Cow::Borrowed(s)), + legacy::Key::Seq(legacy::ElementId::Head) => { + convert::Key::Elem(convert::ElemId::Head) + } + legacy::Key::Seq(legacy::ElementId::Id(o)) => { + convert::Key::Elem(convert::ElemId::Op(o)) + } + } + } + + fn obj(&self) -> convert::ObjId { + match &self.obj { + legacy::ObjectId::Root => convert::ObjId::Root, + legacy::ObjectId::Id(o) => convert::ObjId::Op(o), + } + } + + fn val(&self) -> Cow<'a, crate::ScalarValue> { + match self.primitive_value() { + Some(v) => Cow::Owned(v), + None => Cow::Owned(ScalarValue::Null), + } + } + } + + impl<'a> convert::OpId<&'a ActorId> for &'a legacy::OpId { + fn counter(&self) -> u64 { + legacy::OpId::counter(self) + } + + fn actor(&self) -> &'a ActorId { + &self.1 + } + } +} + +impl From<&Change> for crate::ExpandedChange { + fn from(c: &Change) -> Self { + let actors = std::iter::once(c.actor_id()) + .chain(c.other_actor_ids().iter()) + .cloned() + .enumerate() + .collect::>(); + let operations = c + .iter_ops() + .map(|o| crate::legacy::Op { + action: crate::types::OpType::from_index_and_value(o.action, o.val).unwrap(), + insert: o.insert, + key: match o.key { + StoredKey::Elem(e) if e.is_head() => { + crate::legacy::Key::Seq(crate::legacy::ElementId::Head) + } + StoredKey::Elem(ElemId(o)) => { + crate::legacy::Key::Seq(crate::legacy::ElementId::Id( + crate::legacy::OpId::new(o.counter(), actors.get(&o.actor()).unwrap()), + )) + } + StoredKey::Prop(p) => crate::legacy::Key::Map(p), + }, + obj: if o.obj.is_root() { + crate::legacy::ObjectId::Root + } else { + crate::legacy::ObjectId::Id(crate::legacy::OpId::new( + o.obj.opid().counter(), + actors.get(&o.obj.opid().actor()).unwrap(), + )) + }, + pred: o + .pred + .into_iter() + .map(|p| crate::legacy::OpId::new(p.counter(), actors.get(&p.actor()).unwrap())) + .collect(), + }) + .collect::>(); + crate::ExpandedChange { + operations, + actor_id: actors.get(&0).unwrap().clone(), + hash: Some(c.hash()), + time: c.timestamp(), + deps: c.deps().to_vec(), + seq: c.seq(), + start_op: c.start_op(), + extra_bytes: c.extra_bytes().to_vec(), + message: c.message().cloned(), + } + } +} diff --git a/automerge/src/error.rs b/automerge/src/error.rs index 9f4ccf75..e47b54e5 100644 --- a/automerge/src/error.rs +++ b/automerge/src/error.rs @@ -73,3 +73,12 @@ pub struct InvalidElementId(pub String); #[derive(Error, Debug)] #[error("Invalid OpID: {0}")] pub struct InvalidOpId(pub String); + +#[cfg(feature = "storage-v2")] +#[derive(Error, Debug)] +pub(crate) enum InvalidOpType { + #[error("unrecognized action index {0}")] + UnknownAction(u64), + #[error("non numeric argument for inc op")] + NonNumericInc, +} diff --git a/automerge/src/indexed_cache.rs b/automerge/src/indexed_cache.rs index 1bf92a02..df445f28 100644 --- a/automerge/src/indexed_cache.rs +++ b/automerge/src/indexed_cache.rs @@ -44,6 +44,7 @@ where self.lookup.get(item).cloned() } + #[allow(dead_code)] pub(crate) fn len(&self) -> usize { self.cache.len() } @@ -52,6 +53,11 @@ where &self.cache[index] } + #[cfg(feature = "storage-v2")] + pub(crate) fn safe_get(&self, index: usize) -> Option<&T> { + self.cache.get(index) + } + /// Remove the last inserted entry into this cache. /// This is safe to do as it does not require reshuffling other entries. /// @@ -75,6 +81,25 @@ where sorted } + /// Create a vector from positions in this index to positions in an equivalent sorted index + /// + /// This is useful primarily when encoding an `IndexedCache` in the document format. + /// In this case we encode the actors in sorted order in the document and all ops reference the + /// offset into this sorted actor array. But the `IndexedCache` we have in the + /// application does not contain actors in sorted order because we add them as we encounter + /// them, so we must map from the actor IDs in the application to the actor IDs in the document + /// format + /// + /// # Examples + /// + /// ```rust,ignore + /// let idx: IndexedCache = IndexedCache::new(); + /// let first_idx = idx.cache("b"); // first_idx is `0` + /// let second_idx = idx.cache("a"); // second_idx i `1` + /// let encoded = idx.encode_index(); + /// // first_idx (0) maps to `1` whilst second_idx (1) maps to `0` because "a" < "b" + /// assert_eq!(encoded, vec![1,0]) + /// ``` pub(crate) fn encode_index(&self) -> Vec { let sorted: Vec<_> = self.cache.iter().sorted().cloned().collect(); self.cache @@ -99,3 +124,15 @@ impl Index for IndexedCache { &self.cache[i] } } + +impl FromIterator for IndexedCache { + fn from_iter>(iter: T) -> Self { + let mut cache = Vec::new(); + let mut lookup = HashMap::new(); + for (index, elem) in iter.into_iter().enumerate() { + cache.push(elem.clone()); + lookup.insert(elem, index); + } + Self { cache, lookup } + } +} diff --git a/automerge/src/legacy/mod.rs b/automerge/src/legacy/mod.rs index 91d612bf..3b7bcbc0 100644 --- a/automerge/src/legacy/mod.rs +++ b/automerge/src/legacy/mod.rs @@ -157,7 +157,7 @@ impl SortedVec { self.0.get_mut(index) } - pub fn iter(&self) -> impl Iterator { + pub fn iter(&self) -> std::slice::Iter<'_, T> { self.0.iter() } } diff --git a/automerge/src/lib.rs b/automerge/src/lib.rs index 9216d9b3..eadecdd9 100644 --- a/automerge/src/lib.rs +++ b/automerge/src/lib.rs @@ -57,6 +57,8 @@ macro_rules! __log { mod autocommit; mod automerge; mod change; +#[cfg(feature = "storage-v2")] +mod change_v2; mod clock; mod columnar; #[cfg(feature = "storage-v2")] @@ -85,6 +87,8 @@ mod options; mod parents; mod query; #[cfg(feature = "storage-v2")] +#[allow(dead_code)] +#[allow(unused_imports)] mod storage; pub mod sync; pub mod transaction; @@ -96,7 +100,10 @@ mod visualisation; pub use crate::automerge::Automerge; pub use autocommit::AutoCommit; +//#[cfg(not(feature = "storage-v2"))] pub use change::Change; +//#[cfg(feature = "storage-v2")] +//pub use change_v2::{Change, LoadError as LoadChangeError}; pub use decoding::Error as DecodingError; pub use decoding::InvalidChangeError; pub use encoding::Error as EncodingError; diff --git a/automerge/src/op_set.rs b/automerge/src/op_set.rs index e29f0630..0411e086 100644 --- a/automerge/src/op_set.rs +++ b/automerge/src/op_set.rs @@ -6,6 +6,8 @@ use crate::query::{self, OpIdSearch, TreeQuery}; use crate::types::{self, ActorId, Key, ObjId, Op, OpId, OpIds, OpType}; use crate::{ObjType, OpObserver}; use fxhash::FxBuildHasher; +#[cfg(feature = "storage-v2")] +use std::borrow::Borrow; use std::cmp::Ordering; use std::collections::HashMap; use std::ops::RangeBounds; @@ -341,7 +343,24 @@ pub(crate) struct OpSetMetadata { pub(crate) props: IndexedCache, } +impl Default for OpSetMetadata { + fn default() -> Self { + Self { + actors: IndexedCache::new(), + props: IndexedCache::new(), + } + } +} + impl OpSetMetadata { + #[cfg(feature = "storage-v2")] + pub(crate) fn from_actors(actors: Vec) -> Self { + Self { + props: IndexedCache::new(), + actors: actors.into_iter().collect(), + } + } + pub(crate) fn key_cmp(&self, left: &Key, right: &Key) -> Ordering { match (left, right) { (Key::Map(a), Key::Map(b)) => self.props[*a].cmp(&self.props[*b]), @@ -363,6 +382,13 @@ impl OpSetMetadata { OpIds::new(opids, |left, right| self.lamport_cmp(*left, *right)) } + /// If `opids` are in ascending lamport timestamp order with respect to the actor IDs in + /// this `OpSetMetadata` then this returns `Some(OpIds)`, otherwise returns `None`. + #[cfg(feature = "storage-v2")] + pub(crate) fn try_sorted_opids(&self, opids: Vec) -> Option { + OpIds::new_if_sorted(opids, |a, b| self.lamport_cmp(*a, *b)) + } + pub(crate) fn import_opids>( &mut self, external_opids: I, @@ -378,4 +404,9 @@ impl OpSetMetadata { self.lamport_cmp(*left, *right) }) } + + #[cfg(feature = "storage-v2")] + pub(crate) fn import_prop>(&mut self, key: S) -> usize { + self.props.cache(key.borrow().to_string()) + } } diff --git a/automerge/src/storage.rs b/automerge/src/storage.rs index cad6f96e..c8a2183d 100644 --- a/automerge/src/storage.rs +++ b/automerge/src/storage.rs @@ -1,2 +1,23 @@ -#[allow(dead_code)] +use std::ops::Range; + +pub(crate) mod change; +mod chunk; +mod columns; +pub(crate) mod convert; +mod document; +pub(crate) mod load; pub(crate) mod parse; +pub(crate) mod save; + +pub(crate) use { + change::{AsChangeOp, Change, ChangeOp, Compressed, ReadChangeOpError}, + chunk::{CheckSum, Chunk, ChunkType, Header}, + columns::{Columns, MismatchingColumn, RawColumn, RawColumns}, + document::{AsChangeMeta, AsDocOp, ChangeMetadata, CompressConfig, DocOp, Document}, +}; + +fn shift_range(range: Range, by: usize) -> Range { + range.start + by..range.end + by +} + +pub(crate) const MAGIC_BYTES: [u8; 4] = [0x85, 0x6f, 0x4a, 0x83]; diff --git a/automerge/src/storage/change.rs b/automerge/src/storage/change.rs new file mode 100644 index 00000000..cbe014ac --- /dev/null +++ b/automerge/src/storage/change.rs @@ -0,0 +1,502 @@ +use std::{borrow::Cow, io::Write, marker::PhantomData, num::NonZeroU64, ops::Range}; + +use crate::{convert, ActorId, ChangeHash, ScalarValue}; + +use super::{parse, shift_range, CheckSum, ChunkType, Columns, Header, RawColumns}; + +mod change_op_columns; +use change_op_columns::ChangeOpsColumns; +pub(crate) use change_op_columns::{ChangeOp, ReadChangeOpError}; + +mod change_actors; +pub(crate) use change_actors::PredOutOfOrder; +mod compressed; +mod op_with_change_actors; +pub(crate) use compressed::Compressed; + +pub(crate) const DEFLATE_MIN_SIZE: usize = 256; + +/// Changes present an iterator over the operations encoded in them. Before we have read these +/// changes we don't know if they are valid, so we expose an iterator with items which are +/// `Result`s. However, frequently we know that the changes are valid, this trait is used as a +/// witness that we have verified the operations in a change so we can expose an iterator which +/// does not return `Results` +pub(crate) trait OpReadState {} +#[derive(Debug, Clone, PartialEq)] +pub(crate) struct Verified; +#[derive(Debug, Clone, PartialEq)] +pub(crate) struct Unverified; +impl OpReadState for Verified {} +impl OpReadState for Unverified {} + +/// A `Change` is the result of parsing a change chunk as specified in [1] +/// +/// The type parameter to this type represents whether or not operation have been "verified". +/// Operations in a change chunk are stored in a compressed column oriented storage format. In +/// general there is no guarantee that this storage is valid. Therefore we use the `OpReadState` +/// type parameter to distinguish between contexts where we know that the ops are valid and those +/// where we don't. The `Change::verify_ops` method can be used to obtain a verified `Change` which +/// can provide an iterator over `ChangeOp`s directly, rather than over `Result`. +/// +/// [1]: https://alexjg.github.io/automerge-storage-docs/#change-chunks +#[derive(Clone, Debug, PartialEq)] +pub(crate) struct Change<'a, O: OpReadState> { + /// The raw bytes of the entire chunk containing this change, including the header. + bytes: Cow<'a, [u8]>, + header: Header, + dependencies: Vec, + actor: ActorId, + other_actors: Vec, + seq: u64, + start_op: NonZeroU64, + timestamp: i64, + message: Option, + ops_meta: ChangeOpsColumns, + /// The range in `Self::bytes` where the ops column data is + ops_data: Range, + extra_bytes: Range, + _phantom: PhantomData, +} + +#[derive(thiserror::Error, Debug)] +pub(crate) enum ParseError { + #[error(transparent)] + Leb128(#[from] parse::leb128::Error), + #[error(transparent)] + InvalidUtf8(#[from] parse::InvalidUtf8), + #[error("failed to parse change columns: {0}")] + RawColumns(#[from] crate::storage::columns::raw_column::ParseError), + #[error("failed to parse header: {0}")] + Header(#[from] super::chunk::error::Header), + #[error("change contained compressed columns")] + CompressedChangeCols, + #[error("invalid change cols: {0}")] + InvalidColumns(Box), +} + +impl<'a> Change<'a, Unverified> { + pub(crate) fn parse( + input: parse::Input<'a>, + ) -> parse::ParseResult<'a, Change<'a, Unverified>, ParseError> { + // TODO(alex): check chunk type + let (i, header) = Header::parse(input)?; + let parse::Split { + first: chunk_input, + remaining, + } = i.split(header.data_bytes().len()); + let (_, change) = Self::parse_following_header(chunk_input, header)?; + Ok((remaining, change)) + } + + /// Parse a change chunk. `input` should be the entire chunk, including the header bytes. + pub(crate) fn parse_following_header( + input: parse::Input<'a>, + header: Header, + ) -> parse::ParseResult<'_, Change<'a, Unverified>, ParseError> { + let (i, deps) = parse::length_prefixed(parse::change_hash)(input)?; + let (i, actor) = parse::actor_id(i)?; + let (i, seq) = parse::leb128_u64(i)?; + let (i, start_op) = parse::nonzero_leb128_u64(i)?; + let (i, timestamp) = parse::leb128_i64(i)?; + let (i, message_len) = parse::leb128_u64(i)?; + let (i, message) = parse::utf_8(message_len as usize, i)?; + let (i, other_actors) = parse::length_prefixed(parse::actor_id)(i)?; + let (i, ops_meta) = RawColumns::parse(i)?; + let ( + i, + parse::RangeOf { + range: ops_data, .. + }, + ) = parse::range_of(|i| parse::take_n(ops_meta.total_column_len(), i), i)?; + + let ( + _i, + parse::RangeOf { + range: extra_bytes, .. + }, + ) = parse::range_of(parse::take_rest, i)?; + + let ops_meta = ops_meta + .uncompressed() + .ok_or(parse::ParseError::Error(ParseError::CompressedChangeCols))?; + let col_layout = Columns::parse(ops_data.len(), ops_meta.iter()) + .map_err(|e| parse::ParseError::Error(ParseError::InvalidColumns(Box::new(e))))?; + let ops_meta = ChangeOpsColumns::try_from(col_layout) + .map_err(|e| parse::ParseError::Error(ParseError::InvalidColumns(Box::new(e))))?; + + Ok(( + parse::Input::empty(), + Change { + bytes: input.bytes().into(), + header, + dependencies: deps, + actor, + other_actors, + seq, + start_op, + timestamp, + message: if message.is_empty() { + None + } else { + Some(message) + }, + ops_meta, + ops_data, + extra_bytes, + _phantom: PhantomData, + }, + )) + } + + /// Iterate over the ops in this chunk. The iterator will return an error if any of the ops are + /// malformed. + pub(crate) fn iter_ops( + &'a self, + ) -> impl Iterator> + Clone + 'a { + self.ops_meta.iter(self.ops_data()) + } + + /// Verify all the ops in this change executing `f` for each one + /// + /// `f` will be called for each op in this change, allowing callers to collect additional + /// information about the ops (e.g. all the actor IDs in the change, or the number of ops) + /// + /// # Errors + /// * If there is an error reading an operation + pub(crate) fn verify_ops( + self, + mut f: F, + ) -> Result, ReadChangeOpError> { + for op in self.iter_ops() { + f(op?); + } + Ok(Change { + bytes: self.bytes, + header: self.header, + dependencies: self.dependencies, + actor: self.actor, + other_actors: self.other_actors, + seq: self.seq, + start_op: self.start_op, + timestamp: self.timestamp, + message: self.message, + ops_meta: self.ops_meta, + ops_data: self.ops_data, + extra_bytes: self.extra_bytes, + _phantom: PhantomData, + }) + } +} + +impl<'a> Change<'a, Verified> { + pub(crate) fn builder() -> ChangeBuilder { + ChangeBuilder::new() + } + + pub(crate) fn iter_ops(&'a self) -> impl Iterator + Clone + 'a { + // SAFETY: This unwrap is okay because a `Change<'_, Verified>` can only be constructed + // using either `verify_ops` or `Builder::build`, so we know the ops columns are valid. + self.ops_meta.iter(self.ops_data()).map(|o| o.unwrap()) + } +} + +impl<'a, O: OpReadState> Change<'a, O> { + pub(crate) fn checksum(&self) -> CheckSum { + self.header.checksum() + } + + pub(crate) fn actor(&self) -> &ActorId { + &self.actor + } + pub(crate) fn other_actors(&self) -> &[ActorId] { + &self.other_actors + } + + pub(crate) fn start_op(&self) -> NonZeroU64 { + self.start_op + } + + pub(crate) fn message(&self) -> &Option { + &self.message + } + + pub(crate) fn dependencies(&self) -> &[ChangeHash] { + &self.dependencies + } + + pub(crate) fn seq(&self) -> u64 { + self.seq + } + + pub(crate) fn timestamp(&self) -> i64 { + self.timestamp + } + + pub(crate) fn extra_bytes(&self) -> &[u8] { + &self.bytes[self.extra_bytes.clone()] + } + + pub(crate) fn checksum_valid(&self) -> bool { + self.header.checksum_valid() + } + + pub(crate) fn body_bytes(&self) -> &[u8] { + &self.bytes[self.header.len()..] + } + + pub(crate) fn bytes(&self) -> &[u8] { + &self.bytes + } + + pub(crate) fn hash(&self) -> ChangeHash { + self.header.hash() + } + + pub(crate) fn ops_data(&self) -> &[u8] { + &self.bytes[self.ops_data.clone()] + } + + pub(crate) fn into_owned(self) -> Change<'static, O> { + Change { + dependencies: self.dependencies, + bytes: Cow::Owned(self.bytes.into_owned()), + header: self.header, + actor: self.actor, + other_actors: self.other_actors, + seq: self.seq, + start_op: self.start_op, + timestamp: self.timestamp, + message: self.message, + ops_meta: self.ops_meta, + ops_data: self.ops_data, + extra_bytes: self.extra_bytes, + _phantom: PhantomData, + } + } + + pub(crate) fn compress(&self) -> Option> { + if self.bytes.len() > DEFLATE_MIN_SIZE { + Some(Compressed::compress(self)) + } else { + None + } + } +} + +fn length_prefixed_bytes>(b: B, out: &mut Vec) -> usize { + let prefix_len = leb128::write::unsigned(out, b.as_ref().len() as u64).unwrap(); + out.write_all(b.as_ref()).unwrap(); + prefix_len + b.as_ref().len() +} + +// Bunch of type safe builder boilerplate +pub(crate) struct Unset; +pub(crate) struct Set { + value: T, +} + +#[allow(non_camel_case_types)] +pub(crate) struct ChangeBuilder { + dependencies: Vec, + actor: ACTOR, + seq: SEQ, + start_op: START_OP, + timestamp: TIME, + message: Option, + extra_bytes: Option>, +} + +impl ChangeBuilder { + pub(crate) fn new() -> Self { + Self { + dependencies: vec![], + actor: Unset, + seq: Unset, + start_op: Unset, + timestamp: Unset, + message: None, + extra_bytes: None, + } + } +} + +#[allow(non_camel_case_types)] +impl ChangeBuilder { + pub(crate) fn with_dependencies(self, mut dependencies: Vec) -> Self { + dependencies.sort_unstable(); + Self { + dependencies, + ..self + } + } + + pub(crate) fn with_message(self, message: Option) -> Self { + Self { message, ..self } + } + + pub(crate) fn with_extra_bytes(self, extra_bytes: Vec) -> Self { + Self { + extra_bytes: Some(extra_bytes), + ..self + } + } +} + +#[allow(non_camel_case_types)] +impl ChangeBuilder { + pub(crate) fn with_seq(self, seq: u64) -> ChangeBuilder, TIME> { + ChangeBuilder { + dependencies: self.dependencies, + actor: self.actor, + seq: Set { value: seq }, + start_op: self.start_op, + timestamp: self.timestamp, + message: self.message, + extra_bytes: self.extra_bytes, + } + } +} + +#[allow(non_camel_case_types)] +impl ChangeBuilder { + pub(crate) fn with_actor( + self, + actor: ActorId, + ) -> ChangeBuilder, SEQ, TIME> { + ChangeBuilder { + dependencies: self.dependencies, + actor: Set { value: actor }, + seq: self.seq, + start_op: self.start_op, + timestamp: self.timestamp, + message: self.message, + extra_bytes: self.extra_bytes, + } + } +} + +impl ChangeBuilder { + pub(crate) fn with_start_op( + self, + start_op: NonZeroU64, + ) -> ChangeBuilder, ACTOR, SEQ, TIME> { + ChangeBuilder { + dependencies: self.dependencies, + actor: self.actor, + seq: self.seq, + start_op: Set { value: start_op }, + timestamp: self.timestamp, + message: self.message, + extra_bytes: self.extra_bytes, + } + } +} + +#[allow(non_camel_case_types)] +impl ChangeBuilder { + pub(crate) fn with_timestamp(self, time: i64) -> ChangeBuilder> { + ChangeBuilder { + dependencies: self.dependencies, + actor: self.actor, + seq: self.seq, + start_op: self.start_op, + timestamp: Set { value: time }, + message: self.message, + extra_bytes: self.extra_bytes, + } + } +} + +/// A row to be encoded as a change op +/// +/// The lifetime `'a` is the lifetime of the value and key data types. For types which cannot +/// provide a reference (e.g. because they are decoding from some columnar storage on each +/// iteration) this should be `'static`. +pub(crate) trait AsChangeOp<'a> { + /// The type of the Actor ID component of the op IDs for this impl. This is typically either + /// `&'a ActorID` or `usize` + type ActorId; + /// The type of the op IDs this impl produces. + type OpId: convert::OpId; + /// The type of the predecessor iterator returned by `Self::pred`. This can often be omitted + type PredIter: Iterator + ExactSizeIterator; + + fn obj(&self) -> convert::ObjId; + fn key(&self) -> convert::Key<'a, Self::OpId>; + fn insert(&self) -> bool; + fn action(&self) -> u64; + fn val(&self) -> Cow<'a, ScalarValue>; + fn pred(&self) -> Self::PredIter; +} + +impl ChangeBuilder, Set, Set, Set> { + pub(crate) fn build<'a, A, I, O>( + self, + ops: I, + ) -> Result, PredOutOfOrder> + where + A: AsChangeOp<'a, OpId = O> + 'a, + O: convert::OpId<&'a ActorId> + 'a, + I: Iterator + Clone + 'a, + { + let mut col_data = Vec::new(); + let actors = change_actors::ChangeActors::new(self.actor.value, ops)?; + let cols = ChangeOpsColumns::encode(actors.iter(), &mut col_data); + + let (actor, other_actors) = actors.done(); + + let mut data = Vec::with_capacity(col_data.len()); + leb128::write::unsigned(&mut data, self.dependencies.len() as u64).unwrap(); + for dep in &self.dependencies { + data.write_all(dep.as_bytes()).unwrap(); + } + length_prefixed_bytes(&actor, &mut data); + leb128::write::unsigned(&mut data, self.seq.value).unwrap(); + leb128::write::unsigned(&mut data, self.start_op.value.into()).unwrap(); + leb128::write::signed(&mut data, self.timestamp.value).unwrap(); + length_prefixed_bytes( + self.message.as_ref().map(|m| m.as_bytes()).unwrap_or(&[]), + &mut data, + ); + leb128::write::unsigned(&mut data, other_actors.len() as u64).unwrap(); + for actor in other_actors.iter() { + length_prefixed_bytes(&actor, &mut data); + } + cols.raw_columns().write(&mut data); + let ops_data_start = data.len(); + let ops_data = ops_data_start..(ops_data_start + col_data.len()); + + data.extend(col_data); + let extra_bytes = + data.len()..(data.len() + self.extra_bytes.as_ref().map(|e| e.len()).unwrap_or(0)); + if let Some(extra) = self.extra_bytes { + data.extend(extra); + } + + let header = Header::new(ChunkType::Change, &data); + + let mut bytes = Vec::with_capacity(header.len() + data.len()); + header.write(&mut bytes); + bytes.extend(data); + + let ops_data = shift_range(ops_data, header.len()); + let extra_bytes = shift_range(extra_bytes, header.len()); + + Ok(Change { + bytes: Cow::Owned(bytes), + header, + dependencies: self.dependencies, + actor, + other_actors, + seq: self.seq.value, + start_op: self.start_op.value, + timestamp: self.timestamp.value, + message: self.message, + ops_meta: cols, + ops_data, + extra_bytes, + _phantom: PhantomData, + }) + } +} diff --git a/automerge/src/storage/change/change_actors.rs b/automerge/src/storage/change/change_actors.rs new file mode 100644 index 00000000..61f1221d --- /dev/null +++ b/automerge/src/storage/change/change_actors.rs @@ -0,0 +1,304 @@ +use std::collections::{BTreeMap, BTreeSet}; + +use crate::convert; + +use super::AsChangeOp; + +/// This struct represents the ordering of actor indices in a change chunk. Operations in a change +/// chunk are encoded with the actor ID represented as an offset into an array of actors which are +/// encoded at the start of the chunk. This array is in a specific order: the author of the change +/// is always the first actor, then all other actors referenced in a change are encoded in +/// lexicographic order. +/// +/// The intended usage is to construct a `ChangeActors` from an iterator over `AsChangeOp` where +/// the `ActorId` of the `AsChangeOp` implementation is the original actor ID. The resulting +/// `ChangeActors` implements `Iterator` where the `item` implements +/// `AsChangeOp>`, which can be passed to `ChangeOpColumns::encode`. +/// +/// Once encoding is complete you can use `ChangeActors::done` to retrieve the original actor and the +/// other actors in the change. +/// +/// # Note on type parameters +/// +/// The type paramters are annoying, they basically exist because we can't have generic associated +/// types, so we have to feed the concrete types of the associated types of the `AsChangeOp` +/// implementation through here. Here's what they all refer to: +/// +/// * A - The type of the actor ID used in the operation IDs of the incoming changes +/// * I - The type of the iterator over the `AsChangeOp` implementation of the incoming changes +/// * O - The concrete type of the operation ID which implementas `convert::OpId` +/// * C - The concrete type (which implements `AsChangeOp`) of the incoming changes +/// * 'a - The lifetime bound for the AsChangeOp trait and it's associated types +/// +/// Maybe when GATs land we can make this simpler. +pub(crate) struct ChangeActors<'a, ActorId, I, O, C> { + actor: ActorId, + other_actors: Vec, + index: BTreeMap, + wrapped: I, + num_ops: usize, + _phantom: std::marker::PhantomData<(&'a O, C)>, +} + +#[derive(thiserror::Error, Debug)] +#[error("actor index {0} referenced by an operation was not found in the changes")] +pub(crate) struct MissingActor(usize); + +#[derive(Debug, thiserror::Error)] +#[error("pred OpIds out of order")] +pub(crate) struct PredOutOfOrder; + +impl<'a, A, I, O, C> ChangeActors<'a, A, I, O, C> +where + A: PartialEq + Ord + Clone + std::hash::Hash + 'static, + O: convert::OpId<&'a A> + 'a, + C: AsChangeOp<'a, OpId = O> + 'a, + I: Iterator + Clone + 'a, +{ + /// Create a new change actor mapping + /// + /// # Arguments + /// * actor - the actor ID of the actor who authored this change + /// * ops - an iterator containing the operations which will be encoded into the change + /// + /// # Errors + /// * If one of the ops herein contains a `pred` with ops which are not in lamport timestamp + /// order + pub(crate) fn new(actor: A, ops: I) -> Result, PredOutOfOrder> { + // Change actors indices are encoded with the 0th element being the actor who authored the + // change and all other actors referenced in the chain following the author in + // lexicographic order. Here we collect all the actors referenced by operations in `ops` + let (num_ops, mut other_actors) = + ops.clone() + .try_fold((0, BTreeSet::new()), |(count, mut acc), op| { + if let convert::Key::Elem(convert::ElemId::Op(o)) = op.key() { + if o.actor() != &actor { + acc.insert(o.actor()); + } + } + + if !are_sorted(op.pred()) { + return Err(PredOutOfOrder); + } + for pred in op.pred() { + if pred.actor() != &actor { + acc.insert(pred.actor()); + } + } + if let convert::ObjId::Op(o) = op.obj() { + if o.actor() != &actor { + acc.insert(o.actor()); + } + } + Ok((count + 1, acc)) + })?; + // This shouldn't be necessary but just in case + other_actors.remove(&actor); + let mut other_actors = other_actors.into_iter().cloned().collect::>(); + other_actors.sort(); + let index = std::iter::once(actor.clone()) + .chain(other_actors.clone().into_iter()) + .enumerate() + .map(|(idx, actor)| (actor, idx)) + .collect(); + Ok(ChangeActors { + actor, + other_actors, + index, + wrapped: ops, + num_ops, + _phantom: std::marker::PhantomData, + }) + } + + /// Translate an OpID from the OpSet index to the change index + fn translate_opid(&self, opid: &O) -> ChangeOpId { + ChangeOpId { + actor: *self.index.get(opid.actor()).unwrap(), + counter: opid.counter(), + } + } + + /// Returns a clonable iterator over the converted operations. The item of the iterator is an + /// implementation of `AsChangeOp` which uses the index of the actor of each operation into the + /// actors as encoded in a change. This is suitable for passing to `ChangeOpColumns::encode` + pub(crate) fn iter<'b>(&'b self) -> WithChangeActorsOpIter<'b, 'a, A, I, O, C> { + WithChangeActorsOpIter { + change_actors: self, + inner: self.wrapped.clone(), + } + } + + pub(crate) fn done(self) -> (A, Vec) { + (self.actor, self.other_actors) + } +} + +/// The actual implementation of the converted iterator +pub(crate) struct WithChangeActorsOpIter<'actors, 'aschangeop, A, I, O, C> { + change_actors: &'actors ChangeActors<'aschangeop, A, I, O, C>, + inner: I, +} + +impl<'actors, 'aschangeop, A: 'aschangeop, I, O, C> Clone + for WithChangeActorsOpIter<'actors, 'aschangeop, A, I, O, C> +where + I: Clone, +{ + fn clone(&self) -> Self { + Self { + change_actors: self.change_actors, + inner: self.inner.clone(), + } + } +} + +impl<'actors, 'aschangeop, A: 'aschangeop, I, O, C> Iterator + for WithChangeActorsOpIter<'actors, 'aschangeop, A, I, O, C> +where + C: AsChangeOp<'aschangeop, OpId = O>, + O: convert::OpId<&'aschangeop A>, + I: Iterator + Clone, +{ + type Item = WithChangeActors<'actors, 'aschangeop, A, I, O, C>; + + fn next(&mut self) -> Option { + self.inner.next().map(|o| WithChangeActors { + op: o, + actors: self.change_actors, + }) + } +} + +impl<'actors, 'aschangeop, A: 'aschangeop, I, O, C> ExactSizeIterator + for WithChangeActorsOpIter<'actors, 'aschangeop, A, I, O, C> +where + C: AsChangeOp<'aschangeop, OpId = O>, + O: convert::OpId<&'aschangeop A>, + I: Iterator + Clone, +{ + fn len(&self) -> usize { + self.change_actors.num_ops + } +} + +pub(crate) struct ChangeOpId { + actor: usize, + counter: u64, +} + +impl convert::OpId for ChangeOpId { + fn actor(&self) -> usize { + self.actor + } + + fn counter(&self) -> u64 { + self.counter + } +} + +/// A struct which implements `AsChangeOp` by translating the actor IDs in the incoming operations +/// into the index into the actors in the `ChangeActors`. +pub(crate) struct WithChangeActors<'actors, 'aschangeop, A, I, O, C> { + op: C, + actors: &'actors ChangeActors<'aschangeop, A, I, O, C>, +} + +impl<'actors, 'aschangeop, A, I, O, P, C> AsChangeOp<'aschangeop> + for WithChangeActors<'actors, 'aschangeop, A, I, O, C> +where + A: PartialEq + Ord + Clone + std::hash::Hash + 'static, + O: convert::OpId<&'aschangeop A>, + P: Iterator + ExactSizeIterator + 'aschangeop, + C: AsChangeOp<'aschangeop, PredIter = P, OpId = O> + 'aschangeop, + I: Iterator + Clone + 'aschangeop, +{ + type ActorId = usize; + type OpId = ChangeOpId; + type PredIter = WithChangeActorsPredIter<'actors, 'aschangeop, A, I, O, C, P>; + + fn action(&self) -> u64 { + self.op.action() + } + + fn insert(&self) -> bool { + self.op.insert() + } + + fn pred(&self) -> Self::PredIter { + WithChangeActorsPredIter { + wrapped: self.op.pred(), + actors: self.actors, + _phantom: std::marker::PhantomData, + } + } + + fn key(&self) -> convert::Key<'aschangeop, Self::OpId> { + self.op.key().map(|o| self.actors.translate_opid(&o)) + } + + fn obj(&self) -> convert::ObjId { + self.op.obj().map(|o| self.actors.translate_opid(&o)) + } + + fn val(&self) -> std::borrow::Cow<'aschangeop, crate::ScalarValue> { + self.op.val() + } +} + +pub(crate) struct WithChangeActorsPredIter<'actors, 'aschangeop, A, I, O, C, P> { + wrapped: P, + actors: &'actors ChangeActors<'aschangeop, A, I, O, C>, + _phantom: std::marker::PhantomData, +} + +impl<'actors, 'aschangeop, A, I, O, C, P> ExactSizeIterator + for WithChangeActorsPredIter<'actors, 'aschangeop, A, I, O, C, P> +where + A: PartialEq + Ord + Clone + std::hash::Hash + 'static, + O: convert::OpId<&'aschangeop A>, + P: Iterator + ExactSizeIterator + 'aschangeop, + C: AsChangeOp<'aschangeop, OpId = O> + 'aschangeop, + I: Iterator + Clone + 'aschangeop, +{ + fn len(&self) -> usize { + self.wrapped.len() + } +} + +impl<'actors, 'aschangeop, A, I, O, C, P> Iterator + for WithChangeActorsPredIter<'actors, 'aschangeop, A, I, O, C, P> +where + A: PartialEq + Ord + Clone + std::hash::Hash + 'static, + O: convert::OpId<&'aschangeop A>, + P: Iterator + 'aschangeop, + C: AsChangeOp<'aschangeop, OpId = O> + 'aschangeop, + I: Iterator + Clone + 'aschangeop, +{ + type Item = ChangeOpId; + + fn next(&mut self) -> Option { + self.wrapped.next().map(|o| self.actors.translate_opid(&o)) + } +} + +fn are_sorted(mut opids: I) -> bool +where + A: PartialEq + Ord + Clone, + O: convert::OpId, + I: Iterator, +{ + if let Some(first) = opids.next() { + let mut prev = first; + for opid in opids { + if opid.counter() < prev.counter() { + return false; + } + if opid.counter() == prev.counter() && opid.actor() < prev.actor() { + return false; + } + prev = opid; + } + } + true +} diff --git a/automerge/src/storage/change/change_op_columns.rs b/automerge/src/storage/change/change_op_columns.rs new file mode 100644 index 00000000..432df958 --- /dev/null +++ b/automerge/src/storage/change/change_op_columns.rs @@ -0,0 +1,481 @@ +use std::{convert::TryFrom, ops::Range}; + +use crate::{ + columnar_2::{ + column_range::{ + generic::{GenericColumnRange, GroupRange, GroupedColumnRange, SimpleColRange}, + BooleanRange, DeltaRange, Key, KeyEncoder, KeyIter, KeyRange, ObjIdEncoder, ObjIdIter, + ObjIdRange, OpIdListEncoder, OpIdListIter, OpIdListRange, RleRange, ValueEncoder, + ValueIter, ValueRange, + }, + encoding::{ + BooleanDecoder, BooleanEncoder, ColumnDecoder, DecodeColumnError, RleDecoder, + RleEncoder, + }, + }, + convert, + storage::{ + change::AsChangeOp, + columns::{ + compression, ColumnId, ColumnSpec, ColumnType, Columns, MismatchingColumn, RawColumn, + }, + RawColumns, + }, + types::{ElemId, ObjId, OpId, ScalarValue}, +}; + +const OBJ_COL_ID: ColumnId = ColumnId::new(0); +const KEY_COL_ID: ColumnId = ColumnId::new(1); +const INSERT_COL_ID: ColumnId = ColumnId::new(3); +const ACTION_COL_ID: ColumnId = ColumnId::new(4); +const VAL_COL_ID: ColumnId = ColumnId::new(5); +const PRED_COL_ID: ColumnId = ColumnId::new(7); + +#[derive(Clone, Debug, PartialEq)] +pub(crate) struct ChangeOp { + pub(crate) key: Key, + pub(crate) insert: bool, + pub(crate) val: ScalarValue, + pub(crate) pred: Vec, + pub(crate) action: u64, + pub(crate) obj: ObjId, +} + +impl<'a, A: AsChangeOp<'a, ActorId = usize, OpId = OpId>> From for ChangeOp { + fn from(a: A) -> Self { + ChangeOp { + key: match a.key() { + convert::Key::Prop(s) => Key::Prop(s.into_owned()), + convert::Key::Elem(convert::ElemId::Head) => Key::Elem(ElemId::head()), + convert::Key::Elem(convert::ElemId::Op(o)) => Key::Elem(ElemId(o)), + }, + obj: match a.obj() { + convert::ObjId::Root => ObjId::root(), + convert::ObjId::Op(o) => ObjId(o), + }, + val: a.val().into_owned(), + pred: a.pred().collect(), + insert: a.insert(), + action: a.action(), + } + } +} + +impl<'a> AsChangeOp<'a> for &'a ChangeOp { + type OpId = &'a crate::types::OpId; + type ActorId = usize; + type PredIter = std::slice::Iter<'a, crate::types::OpId>; + + fn obj(&self) -> convert::ObjId { + if self.obj.is_root() { + convert::ObjId::Root + } else { + convert::ObjId::Op(self.obj.opid()) + } + } + + fn key(&self) -> convert::Key<'a, Self::OpId> { + match &self.key { + Key::Prop(s) => convert::Key::Prop(std::borrow::Cow::Borrowed(s)), + Key::Elem(e) if e.is_head() => convert::Key::Elem(convert::ElemId::Head), + Key::Elem(e) => convert::Key::Elem(convert::ElemId::Op(&e.0)), + } + } + + fn val(&self) -> std::borrow::Cow<'a, ScalarValue> { + std::borrow::Cow::Borrowed(&self.val) + } + + fn pred(&self) -> Self::PredIter { + self.pred.iter() + } + + fn insert(&self) -> bool { + self.insert + } + + fn action(&self) -> u64 { + self.action + } +} + +#[derive(Clone, Debug, PartialEq)] +pub(crate) struct ChangeOpsColumns { + obj: Option, + key: KeyRange, + insert: BooleanRange, + action: RleRange, + val: ValueRange, + pred: OpIdListRange, +} + +impl ChangeOpsColumns { + pub(crate) fn iter<'a>(&self, data: &'a [u8]) -> ChangeOpsIter<'a> { + ChangeOpsIter { + failed: false, + obj: self.obj.as_ref().map(|o| o.iter(data)), + key: self.key.iter(data), + insert: self.insert.decoder(data), + action: self.action.decoder(data), + val: self.val.iter(data), + pred: self.pred.iter(data), + } + } + + #[tracing::instrument(skip(ops, out))] + pub(crate) fn encode<'a, 'b, 'c, I, C, Op>(ops: I, out: &'b mut Vec) -> ChangeOpsColumns + where + I: Iterator + Clone + ExactSizeIterator + 'a, + Op: convert::OpId + 'a, + C: AsChangeOp<'c, OpId = Op> + 'a, + { + if ops.len() > 10000 { + Self::encode_rowwise(ops, out) + } else { + Self::encode_columnwise(ops, out) + } + } + + pub(crate) fn encode_columnwise<'a, 'b, 'c, I, C, Op>( + ops: I, + out: &'b mut Vec, + ) -> ChangeOpsColumns + where + I: Iterator + Clone + 'a, + Op: convert::OpId + 'a, + C: AsChangeOp<'c, OpId = Op> + 'a, + { + let obj = ObjIdRange::encode(ops.clone().map(|o| o.obj()), out); + let key = KeyRange::encode(ops.clone().map(|o| o.key()), out); + let insert = BooleanRange::encode(ops.clone().map(|o| o.insert()), out); + let action = RleRange::encode(ops.clone().map(|o| Some(o.action())), out); + let val = ValueRange::encode(ops.clone().map(|o| o.val()), out); + let pred = OpIdListRange::encode(ops.map(|o| o.pred()), out); + Self { + obj, + key, + insert, + action, + val, + pred, + } + } + + fn encode_rowwise<'a, 'b, 'c, I, C, Op>(ops: I, out: &'b mut Vec) -> ChangeOpsColumns + where + I: Iterator + Clone + 'a, + Op: convert::OpId + 'a, + C: AsChangeOp<'c, OpId = Op> + 'a, + { + let mut obj = ObjIdEncoder::new(); + let mut key = KeyEncoder::new(); + let mut insert = BooleanEncoder::new(); + let mut action = RleEncoder::<_, u64>::from(Vec::new()); + let mut val = ValueEncoder::new(); + let mut pred = OpIdListEncoder::new(); + for op in ops { + obj.append(op.obj()); + key.append(op.key()); + insert.append(op.insert()); + action.append_value(op.action() as u64); + val.append(&op.val()); + pred.append(op.pred()); + } + let obj = obj.finish(out); + let key = key.finish(out); + + let insert_start = out.len(); + let (insert, _) = insert.finish(); + out.extend(insert); + let insert = BooleanRange::from(insert_start..out.len()); + + let action_start = out.len(); + let (action, _) = action.finish(); + out.extend(action); + let action = RleRange::from(action_start..out.len()); + + let val = val.finish(out); + let pred = pred.finish(out); + + Self { + obj, + key, + insert, + action, + val, + pred, + } + } + + pub(crate) fn raw_columns(&self) -> RawColumns { + let mut cols = vec![ + RawColumn::new( + ColumnSpec::new(OBJ_COL_ID, ColumnType::Actor, false), + self.obj + .as_ref() + .map(|o| o.actor_range().clone().into()) + .unwrap_or(0..0), + ), + RawColumn::new( + ColumnSpec::new(OBJ_COL_ID, ColumnType::Integer, false), + self.obj + .as_ref() + .map(|o| o.counter_range().clone().into()) + .unwrap_or(0..0), + ), + RawColumn::new( + ColumnSpec::new(KEY_COL_ID, ColumnType::Actor, false), + self.key.actor_range().clone().into(), + ), + RawColumn::new( + ColumnSpec::new(KEY_COL_ID, ColumnType::DeltaInteger, false), + self.key.counter_range().clone().into(), + ), + RawColumn::new( + ColumnSpec::new(KEY_COL_ID, ColumnType::String, false), + self.key.string_range().clone().into(), + ), + RawColumn::new( + ColumnSpec::new(INSERT_COL_ID, ColumnType::Boolean, false), + self.insert.clone().into(), + ), + RawColumn::new( + ColumnSpec::new(ACTION_COL_ID, ColumnType::Integer, false), + self.action.clone().into(), + ), + RawColumn::new( + ColumnSpec::new(VAL_COL_ID, ColumnType::ValueMetadata, false), + self.val.meta_range().clone().into(), + ), + ]; + if !self.val.raw_range().is_empty() { + cols.push(RawColumn::new( + ColumnSpec::new(VAL_COL_ID, ColumnType::Value, false), + self.val.raw_range().clone().into(), + )); + } + cols.push(RawColumn::new( + ColumnSpec::new(PRED_COL_ID, ColumnType::Group, false), + self.pred.group_range().clone().into(), + )); + if !self.pred.actor_range().is_empty() { + cols.extend([ + RawColumn::new( + ColumnSpec::new(PRED_COL_ID, ColumnType::Actor, false), + self.pred.actor_range().clone().into(), + ), + RawColumn::new( + ColumnSpec::new(PRED_COL_ID, ColumnType::DeltaInteger, false), + self.pred.counter_range().clone().into(), + ), + ]); + } + cols.into_iter().collect() + } +} + +#[derive(thiserror::Error, Debug)] +#[error(transparent)] +pub struct ReadChangeOpError(#[from] DecodeColumnError); + +#[derive(Clone)] +pub(crate) struct ChangeOpsIter<'a> { + failed: bool, + obj: Option>, + key: KeyIter<'a>, + insert: BooleanDecoder<'a>, + action: RleDecoder<'a, u64>, + val: ValueIter<'a>, + pred: OpIdListIter<'a>, +} + +impl<'a> ChangeOpsIter<'a> { + fn done(&self) -> bool { + self.action.done() + } + + fn try_next(&mut self) -> Result, ReadChangeOpError> { + if self.failed || self.done() { + Ok(None) + } else { + let obj = if let Some(ref mut objs) = self.obj { + objs.next_in_col("obj")? + } else { + ObjId::root() + }; + let key = self.key.next_in_col("key")?; + let insert = self.insert.next_in_col("insert")?; + let action = self.action.next_in_col("action")?; + let val = self.val.next_in_col("value")?; + let pred = self.pred.next_in_col("pred")?; + Ok(Some(ChangeOp { + obj, + key, + insert, + action, + val, + pred, + })) + } + } +} + +impl<'a> Iterator for ChangeOpsIter<'a> { + type Item = Result; + + fn next(&mut self) -> Option { + match self.try_next() { + Ok(v) => v.map(Ok), + Err(e) => { + self.failed = true; + Some(Err(e)) + } + } + } +} + +#[derive(thiserror::Error, Debug)] +pub(crate) enum ParseChangeColumnsError { + #[error("mismatching column at {index}.")] + MismatchingColumn { index: usize }, +} + +impl From for ParseChangeColumnsError { + fn from(m: MismatchingColumn) -> Self { + Self::MismatchingColumn { index: m.index } + } +} + +impl TryFrom for ChangeOpsColumns { + type Error = ParseChangeColumnsError; + + fn try_from(columns: Columns) -> Result { + let mut obj_actor: Option> = None; + let mut obj_ctr: Option> = None; + let mut key_actor: Option> = None; + let mut key_ctr: Option = None; + let mut key_str: Option> = None; + let mut insert: Option> = None; + let mut action: Option> = None; + let mut val: Option = None; + let mut pred_group: Option> = None; + let mut pred_actor: Option> = None; + let mut pred_ctr: Option = None; + let mut other = Columns::empty(); + + for (index, col) in columns.into_iter().enumerate() { + match (col.id(), col.col_type()) { + (OBJ_COL_ID, ColumnType::Actor) => obj_actor = Some(col.range().into()), + (OBJ_COL_ID, ColumnType::Integer) => obj_ctr = Some(col.range().into()), + (KEY_COL_ID, ColumnType::Actor) => key_actor = Some(col.range().into()), + (KEY_COL_ID, ColumnType::DeltaInteger) => key_ctr = Some(col.range().into()), + (KEY_COL_ID, ColumnType::String) => key_str = Some(col.range().into()), + (INSERT_COL_ID, ColumnType::Boolean) => insert = Some(col.range()), + (ACTION_COL_ID, ColumnType::Integer) => action = Some(col.range()), + (VAL_COL_ID, ColumnType::ValueMetadata) => match col.into_ranges() { + GenericColumnRange::Value(v) => { + val = Some(v); + } + _ => return Err(ParseChangeColumnsError::MismatchingColumn { index }), + }, + (PRED_COL_ID, ColumnType::Group) => match col.into_ranges() { + GenericColumnRange::Group(GroupRange { num, values }) => { + let mut cols = values.into_iter(); + pred_group = Some(num); + // If there was no data in the group at all then the columns won't be + // present + if cols.len() == 0 { + pred_actor = Some((0..0).into()); + pred_ctr = Some((0..0).into()); + } else { + let first = cols.next(); + let second = cols.next(); + match (first, second) { + ( + Some(GroupedColumnRange::Simple(SimpleColRange::RleInt( + actor_range, + ))), + Some(GroupedColumnRange::Simple(SimpleColRange::Delta( + ctr_range, + ))), + ) => { + pred_actor = Some(actor_range); + pred_ctr = Some(ctr_range); + } + _ => { + return Err(ParseChangeColumnsError::MismatchingColumn { + index, + }) + } + } + } + if cols.next().is_some() { + return Err(ParseChangeColumnsError::MismatchingColumn { index }); + } + } + _ => return Err(ParseChangeColumnsError::MismatchingColumn { index }), + }, + (other_type, other_col) => { + tracing::warn!(typ=?other_type, id=?other_col, "unknown column"); + other.append(col); + } + } + } + let pred = OpIdListRange::new( + pred_group.unwrap_or_else(|| (0..0).into()), + pred_actor.unwrap_or_else(|| (0..0).into()), + pred_ctr.unwrap_or_else(|| (0..0).into()), + ); + Ok(ChangeOpsColumns { + obj: ObjIdRange::new( + obj_actor.unwrap_or_else(|| (0..0).into()), + obj_ctr.unwrap_or_else(|| (0..0).into()), + ), + key: KeyRange::new( + key_actor.unwrap_or_else(|| (0..0).into()), + key_ctr.unwrap_or_else(|| (0..0).into()), + key_str.unwrap_or_else(|| (0..0).into()), + ), + insert: insert.unwrap_or(0..0).into(), + action: action.unwrap_or(0..0).into(), + val: val.unwrap_or_else(|| ValueRange::new((0..0).into(), (0..0).into())), + pred, + }) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::columnar_2::encoding::properties::{key, opid, scalar_value}; + use proptest::prelude::*; + + prop_compose! { + fn change_op() + (key in key(), + value in scalar_value(), + pred in proptest::collection::vec(opid(), 0..20), + action in 0_u64..6, + obj in opid(), + insert in any::()) -> ChangeOp { + ChangeOp { + obj: obj.into(), + key, + val: value, + pred, + action, + insert, + } + } + } + + proptest! { + #[test] + fn test_encode_decode_change_ops(ops in proptest::collection::vec(change_op(), 0..100)) { + let mut out = Vec::new(); + let cols2 = ChangeOpsColumns::encode(ops.iter(), &mut out); + let decoded = cols2.iter(&out[..]).collect::, _>>().unwrap(); + assert_eq!(ops, decoded); + } + } +} diff --git a/automerge/src/storage/change/compressed.rs b/automerge/src/storage/change/compressed.rs new file mode 100644 index 00000000..55d56ffb --- /dev/null +++ b/automerge/src/storage/change/compressed.rs @@ -0,0 +1,51 @@ +use std::{borrow::Cow, io::Read}; + +use crate::storage::{Change, CheckSum, ChunkType, MAGIC_BYTES}; + +use super::OpReadState; + +#[derive(Clone, Debug, PartialEq)] +pub(crate) struct Compressed<'a> { + checksum: CheckSum, + bytes: Cow<'a, [u8]>, +} + +impl<'a> Compressed<'a> { + pub(crate) fn new(checksum: CheckSum, bytes: Cow<'a, [u8]>) -> Self { + Self { checksum, bytes } + } + + pub(crate) fn compress<'b, O: OpReadState>(change: &'b Change<'b, O>) -> Compressed<'static> { + let mut result = Vec::with_capacity(change.bytes().len()); + result.extend(MAGIC_BYTES); + result.extend(change.checksum().bytes()); + result.push(u8::from(ChunkType::Compressed)); + let mut deflater = flate2::bufread::DeflateEncoder::new( + change.body_bytes(), + flate2::Compression::default(), + ); + let mut deflated = Vec::new(); + let deflated_len = deflater.read_to_end(&mut deflated).unwrap(); + leb128::write::unsigned(&mut result, deflated_len as u64).unwrap(); + result.extend(&deflated[..]); + Compressed { + checksum: change.checksum(), + bytes: Cow::Owned(result), + } + } + + pub(crate) fn bytes(&self) -> Cow<'a, [u8]> { + self.bytes.clone() + } + + pub(crate) fn checksum(&self) -> CheckSum { + self.checksum + } + + pub(crate) fn into_owned(self) -> Compressed<'static> { + Compressed { + checksum: self.checksum, + bytes: Cow::Owned(self.bytes.into_owned()), + } + } +} diff --git a/automerge/src/storage/change/op_with_change_actors.rs b/automerge/src/storage/change/op_with_change_actors.rs new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/automerge/src/storage/change/op_with_change_actors.rs @@ -0,0 +1 @@ + diff --git a/automerge/src/storage/chunk.rs b/automerge/src/storage/chunk.rs new file mode 100644 index 00000000..93c05c9d --- /dev/null +++ b/automerge/src/storage/chunk.rs @@ -0,0 +1,292 @@ +use std::{ + borrow::Cow, + convert::{TryFrom, TryInto}, + io::Read, + ops::Range, +}; + +use sha2::{Digest, Sha256}; + +use super::{change::Unverified, parse, Change, Compressed, Document, MAGIC_BYTES}; +use crate::{columnar_2::encoding::leb128::ulebsize, ChangeHash}; + +pub(crate) enum Chunk<'a> { + Document(Document<'a>), + Change(Change<'a, Unverified>), + CompressedChange(Change<'static, Unverified>, Compressed<'a>), +} + +pub(crate) mod error { + use super::parse; + use crate::storage::{change, document}; + + #[derive(thiserror::Error, Debug)] + pub(crate) enum Chunk { + #[error("there was data in a chunk leftover after parsing")] + LeftoverData, + #[error(transparent)] + Leb128(#[from] parse::leb128::Error), + #[error("failed to parse header: {0}")] + Header(#[from] Header), + #[error("bad change chunk: {0}")] + Change(#[from] change::ParseError), + #[error("bad document chunk: {0}")] + Document(#[from] document::ParseError), + #[error("unable to decompresse compressed chunk")] + Deflate, + } + + #[derive(thiserror::Error, Debug)] + pub(crate) enum Header { + #[error(transparent)] + Leb128(#[from] parse::leb128::Error), + #[error("unknown chunk type: {0}")] + UnknownChunkType(u8), + #[error("Invalid magic bytes")] + InvalidMagicBytes, + } +} + +impl<'a> Chunk<'a> { + pub(crate) fn parse( + input: parse::Input<'a>, + ) -> parse::ParseResult<'a, Chunk<'a>, error::Chunk> { + let (i, header) = Header::parse::(input)?; + let parse::Split { + first: chunk_input, + remaining, + } = i.split(header.data_bytes().len()); + let chunk = match header.chunk_type { + ChunkType::Change => { + let (remaining, change) = + Change::parse_following_header(chunk_input, header).map_err(|e| e.lift())?; + if !remaining.is_empty() { + return Err(parse::ParseError::Error(error::Chunk::LeftoverData)); + } + Chunk::Change(change) + } + ChunkType::Document => { + let (remaining, doc) = + Document::parse(chunk_input, header).map_err(|e| e.lift())?; + if !remaining.is_empty() { + return Err(parse::ParseError::Error(error::Chunk::LeftoverData)); + } + Chunk::Document(doc) + } + ChunkType::Compressed => { + let compressed = &input.unconsumed_bytes()[header.data_bytes()]; + let mut decoder = flate2::bufread::DeflateDecoder::new(compressed); + let mut decompressed = Vec::new(); + decoder + .read_to_end(&mut decompressed) + .map_err(|_| parse::ParseError::Error(error::Chunk::Deflate))?; + let inner_header = header.with_data(ChunkType::Change, &decompressed); + let mut inner_chunk = Vec::with_capacity(inner_header.len() + decompressed.len()); + inner_header.write(&mut inner_chunk); + inner_chunk.extend(&decompressed); + let (remaining, change) = + Change::parse(parse::Input::new(&inner_chunk)).map_err(|e| e.lift())?; + if !remaining.is_empty() { + return Err(parse::ParseError::Error(error::Chunk::LeftoverData)); + } + Chunk::CompressedChange( + change.into_owned(), + Compressed::new(header.checksum, Cow::Borrowed(chunk_input.bytes())), + ) + } + }; + Ok((remaining, chunk)) + } + + pub(crate) fn checksum_valid(&self) -> bool { + match self { + Self::Document(d) => d.checksum_valid(), + Self::Change(c) => c.checksum_valid(), + Self::CompressedChange(change, compressed) => { + compressed.checksum() == change.checksum() && change.checksum_valid() + } + } + } +} + +#[derive(Clone, Copy, Debug, PartialEq)] +pub(crate) enum ChunkType { + Document, + Change, + Compressed, +} + +impl TryFrom for ChunkType { + type Error = u8; + + fn try_from(value: u8) -> Result { + match value { + 0 => Ok(Self::Document), + 1 => Ok(Self::Change), + 2 => Ok(Self::Compressed), + other => Err(other), + } + } +} + +impl From for u8 { + fn from(ct: ChunkType) -> Self { + match ct { + ChunkType::Document => 0, + ChunkType::Change => 1, + ChunkType::Compressed => 2, + } + } +} + +#[derive(Clone, Copy, Debug, PartialEq)] +pub(crate) struct CheckSum([u8; 4]); + +impl CheckSum { + pub(crate) fn bytes(&self) -> [u8; 4] { + self.0 + } +} + +impl From<[u8; 4]> for CheckSum { + fn from(raw: [u8; 4]) -> Self { + CheckSum(raw) + } +} + +impl AsRef<[u8]> for CheckSum { + fn as_ref(&self) -> &[u8] { + &self.0 + } +} + +impl From for CheckSum { + fn from(h: ChangeHash) -> Self { + let bytes = h.as_bytes(); + [bytes[0], bytes[1], bytes[2], bytes[3]].into() + } +} + +#[derive(Debug, Clone, PartialEq)] +pub(crate) struct Header { + checksum: CheckSum, + chunk_type: ChunkType, + data_len: usize, + header_size: usize, + hash: ChangeHash, +} + +impl Header { + pub(crate) fn new(chunk_type: ChunkType, data: &[u8]) -> Self { + let hash = hash(chunk_type, data); + Self { + hash, + checksum: hash.checksum().into(), + data_len: data.len(), + header_size: MAGIC_BYTES.len() + + 4 // checksum + + 1 // chunk type + + (ulebsize(data.len() as u64) as usize), + chunk_type, + } + } + + /// Returns a header with the same checksum but with a different chunk type and data length. + /// This is primarily useful when processing compressed chunks, where the checksum is actually + /// derived from the uncompressed data. + pub(crate) fn with_data(&self, chunk_type: ChunkType, data: &[u8]) -> Header { + let hash = hash(chunk_type, data); + Self { + hash, + checksum: self.checksum, + data_len: data.len(), + header_size: MAGIC_BYTES.len() + + 4 // checksum + + 1 // chunk type + + (ulebsize(data.len() as u64) as usize), + chunk_type, + } + } + + pub(crate) fn len(&self) -> usize { + self.header_size + } + + pub(crate) fn write(&self, out: &mut Vec) { + out.extend(MAGIC_BYTES); + out.extend(self.checksum.bytes()); + out.push(u8::from(self.chunk_type)); + leb128::write::unsigned(out, self.data_len as u64).unwrap(); + } + + pub(crate) fn parse(input: parse::Input<'_>) -> parse::ParseResult<'_, Header, E> + where + E: From, + { + let ( + i, + parse::RangeOf { + range: header, + value: (checksum_bytes, chunk_type, chunk_len), + }, + ) = parse::range_of( + |i| { + let (i, magic) = parse::take4(i)?; + if magic != MAGIC_BYTES { + return Err(parse::ParseError::Error(E::from( + error::Header::InvalidMagicBytes, + ))); + } + let (i, checksum_bytes) = parse::take4(i)?; + let (i, raw_chunk_type) = parse::take1(i)?; + let chunk_type: ChunkType = raw_chunk_type.try_into().map_err(|_| { + parse::ParseError::Error(E::from(error::Header::UnknownChunkType( + raw_chunk_type, + ))) + })?; + let (i, chunk_len) = parse::leb128_u64(i).map_err(|e| e.lift())?; + Ok((i, (checksum_bytes, chunk_type, chunk_len))) + }, + input, + )?; + + let (_, data) = parse::take_n(chunk_len as usize, i)?; + let hash = hash(chunk_type, data); + Ok(( + i, + Header { + checksum: checksum_bytes.into(), + chunk_type, + data_len: data.len() as usize, + header_size: header.len(), + hash, + }, + )) + } + + /// The range of the input which corresponds to the data specified by this header + pub(crate) fn data_bytes(&self) -> Range { + self.header_size..(self.header_size + self.data_len) + } + + pub(crate) fn hash(&self) -> ChangeHash { + self.hash + } + + pub(crate) fn checksum_valid(&self) -> bool { + CheckSum(self.hash.checksum()) == self.checksum + } + + pub(crate) fn checksum(&self) -> CheckSum { + self.checksum + } +} + +fn hash(typ: ChunkType, data: &[u8]) -> ChangeHash { + let mut out = vec![u8::from(typ)]; + leb128::write::unsigned(&mut out, data.len() as u64).unwrap(); + out.extend(data.as_ref()); + let hash_result = Sha256::digest(out); + let array: [u8; 32] = hash_result.into(); + ChangeHash(array) +} diff --git a/automerge/src/storage/columns.rs b/automerge/src/storage/columns.rs new file mode 100644 index 00000000..2ff6fa1f --- /dev/null +++ b/automerge/src/storage/columns.rs @@ -0,0 +1,355 @@ +/// This module contains types which represent the column metadata which is encoded in the columnar +/// storage format specified in [1]. In this format metadata about each column is packed into a 32 +/// bit integer, which is represented by the types in `column_specification`. The column data in +/// the format is a sequence of (`ColumnSpecification`, `usize`) pairs where each pair represents +/// the type of the column and the length of the column in the data which follows, these pairs are +/// represented by `RawColumn` and `RawColumns`. Some columns are actually composites of several +/// underlying columns and so not every `RawColumns` is valid. The types in `column` and +/// `column_builder` take a `RawColumns` and produce a `Columns` - which is a valid set of possibly +/// composite column metadata. +/// +/// There are two typical workflows: +/// +/// ## Reading +/// * First parse a `RawColumns` from the underlying data using `RawColumns::parse` +/// * Ensure that the columns are decompressed using `RawColumns::decompress` (checking first if +/// you can avoid this using `RawColumns::uncompressed`) +/// * Parse the `RawColumns` into a `Columns` using `Columns::parse` +/// +/// ## Writing +/// * Construct a `RawColumns` +/// * Compress using `RawColumns::compress` +/// * Write to output using `RawColumns::write` +/// +/// [1]: https://alexjg.github.io/automerge-storage-docs/#_columnar_storage_format +use std::ops::Range; + +mod column_specification; +pub(crate) use column_specification::{ColumnId, ColumnSpec, ColumnType}; +mod column; +pub(crate) use column::Column; +mod column_builder; +pub(crate) use column_builder::{ + AwaitingRawColumnValueBuilder, ColumnBuilder, GroupAwaitingValue, GroupBuilder, +}; + +pub(crate) mod raw_column; +pub(crate) use raw_column::{RawColumn, RawColumns}; + +#[derive(Debug, thiserror::Error)] +#[error("mismatching column at {index}.")] +pub(crate) struct MismatchingColumn { + pub(crate) index: usize, +} + +pub(crate) mod compression { + #[derive(Clone, Debug)] + pub(crate) struct Unknown; + #[derive(Clone, Debug)] + pub(crate) struct Uncompressed; + + /// A witness for what we know about whether or not a column is compressed + pub(crate) trait ColumnCompression {} + impl ColumnCompression for Unknown {} + impl ColumnCompression for Uncompressed {} +} + +/// `Columns` represents a sequence of "logical" columns. "Logical" in this sense means that +/// each column produces one value, but may be composed of multiple [`RawColumn`]s. For example, in a +/// logical column containing values there are two `RawColumn`s, one for the metadata about the +/// values, and one for the values themselves. +#[derive(Clone, Debug)] +pub(crate) struct Columns { + columns: Vec, +} + +impl Columns { + pub(crate) fn empty() -> Self { + Self { + columns: Vec::new(), + } + } + + pub(crate) fn append(&mut self, col: Column) { + self.columns.push(col) + } + + pub(crate) fn parse<'a, I: Iterator>>( + data_size: usize, + cols: I, + ) -> Result { + let mut parser = ColumnLayoutParser::new(data_size, None); + for raw_col in cols { + parser.add_column(raw_col.spec(), raw_col.data())?; + } + parser.build() + } +} + +impl FromIterator for Result { + fn from_iter>(iter: T) -> Self { + let iter = iter.into_iter(); + let mut result = Vec::with_capacity(iter.size_hint().1.unwrap_or(0)); + let mut last_column: Option = None; + for col in iter { + if let Some(last_col) = last_column { + if col.spec().normalize() < last_col.normalize() { + return Err(BadColumnLayout::OutOfOrder); + } + } + last_column = Some(col.spec()); + result.push(col); + } + Ok(Columns { columns: result }) + } +} + +impl IntoIterator for Columns { + type Item = Column; + type IntoIter = std::vec::IntoIter; + + fn into_iter(self) -> Self::IntoIter { + self.columns.into_iter() + } +} + +#[derive(Debug, thiserror::Error)] +pub(crate) enum BadColumnLayout { + #[error("duplicate column specifications: {0}")] + DuplicateColumnSpecs(u32), + #[error("out of order columns")] + OutOfOrder, + #[error("nested group")] + NestedGroup, + #[error("raw value column without metadata column")] + LoneRawValueColumn, + #[error("value metadata followed by value column with different column ID")] + MismatchingValueMetadataId, + #[error("non contiguous columns")] + NonContiguousColumns, + #[error("data out of range")] + DataOutOfRange, +} + +struct ColumnLayoutParser { + columns: Vec, + last_spec: Option, + state: LayoutParserState, + total_data_size: usize, +} + +enum LayoutParserState { + Ready, + InValue(AwaitingRawColumnValueBuilder), + InGroup(ColumnId, GroupParseState), +} + +#[derive(Debug)] +enum GroupParseState { + Ready(GroupBuilder), + InValue(GroupAwaitingValue), +} + +impl ColumnLayoutParser { + fn new(data_size: usize, size_hint: Option) -> Self { + ColumnLayoutParser { + columns: Vec::with_capacity(size_hint.unwrap_or(0)), + last_spec: None, + state: LayoutParserState::Ready, + total_data_size: data_size, + } + } + + fn build(mut self) -> Result { + let columns = match self.state { + LayoutParserState::Ready => self.columns, + LayoutParserState::InValue(mut builder) => { + self.columns.push(builder.build((0..0).into())); + self.columns + } + LayoutParserState::InGroup(_, groupstate) => { + match groupstate { + GroupParseState::InValue(mut builder) => { + self.columns.push(builder.finish_empty().finish()); + } + GroupParseState::Ready(mut builder) => { + self.columns.push(builder.finish()); + } + }; + self.columns + } + }; + Ok(Columns { columns }) + } + + #[tracing::instrument(skip(self), err)] + fn add_column( + &mut self, + column: ColumnSpec, + range: Range, + ) -> Result<(), BadColumnLayout> { + self.check_contiguous(&range)?; + self.check_bounds(&range)?; + if let Some(last_spec) = self.last_spec { + if last_spec.normalize() > column.normalize() { + return Err(BadColumnLayout::OutOfOrder); + } else if last_spec == column { + return Err(BadColumnLayout::DuplicateColumnSpecs(column.into())); + } + } + match &mut self.state { + LayoutParserState::Ready => match column.col_type() { + ColumnType::Group => { + self.state = LayoutParserState::InGroup( + column.id(), + GroupParseState::Ready(ColumnBuilder::start_group(column, range.into())), + ); + Ok(()) + } + ColumnType::ValueMetadata => { + self.state = LayoutParserState::InValue(ColumnBuilder::start_value( + column, + range.into(), + )); + Ok(()) + } + ColumnType::Value => Err(BadColumnLayout::LoneRawValueColumn), + ColumnType::Actor => { + self.columns + .push(ColumnBuilder::build_actor(column, range.into())); + Ok(()) + } + ColumnType::String => { + self.columns + .push(ColumnBuilder::build_string(column, range.into())); + Ok(()) + } + ColumnType::Integer => { + self.columns + .push(ColumnBuilder::build_integer(column, range.into())); + Ok(()) + } + ColumnType::DeltaInteger => { + self.columns + .push(ColumnBuilder::build_delta_integer(column, range.into())); + Ok(()) + } + ColumnType::Boolean => { + self.columns + .push(ColumnBuilder::build_boolean(column, range.into())); + Ok(()) + } + }, + LayoutParserState::InValue(builder) => match column.col_type() { + ColumnType::Value => { + if builder.id() != column.id() { + return Err(BadColumnLayout::MismatchingValueMetadataId); + } + self.columns.push(builder.build(range.into())); + self.state = LayoutParserState::Ready; + Ok(()) + } + _ => { + self.columns.push(builder.build((0..0).into())); + self.state = LayoutParserState::Ready; + self.add_column(column, range) + } + }, + LayoutParserState::InGroup(id, group_state) => { + if *id != column.id() { + match group_state { + GroupParseState::Ready(b) => self.columns.push(b.finish()), + GroupParseState::InValue(b) => self.columns.push(b.finish_empty().finish()), + }; + std::mem::swap(&mut self.state, &mut LayoutParserState::Ready); + self.add_column(column, range) + } else { + match group_state { + GroupParseState::Ready(builder) => match column.col_type() { + ColumnType::Group => Err(BadColumnLayout::NestedGroup), + ColumnType::Value => Err(BadColumnLayout::LoneRawValueColumn), + ColumnType::ValueMetadata => { + *group_state = + GroupParseState::InValue(builder.start_value(column, range)); + Ok(()) + } + ColumnType::Actor => { + builder.add_actor(column, range); + Ok(()) + } + ColumnType::Boolean => { + builder.add_boolean(column, range); + Ok(()) + } + ColumnType::DeltaInteger => { + builder.add_delta_integer(column, range); + Ok(()) + } + ColumnType::Integer => { + builder.add_integer(column, range); + Ok(()) + } + ColumnType::String => { + builder.add_string(column, range); + Ok(()) + } + }, + GroupParseState::InValue(builder) => match column.col_type() { + ColumnType::Value => { + *group_state = GroupParseState::Ready(builder.finish_value(range)); + Ok(()) + } + _ => { + *group_state = GroupParseState::Ready(builder.finish_empty()); + self.add_column(column, range) + } + }, + } + } + } + } + } + + fn check_contiguous(&self, next_range: &Range) -> Result<(), BadColumnLayout> { + match &self.state { + LayoutParserState::Ready => { + if let Some(prev) = self.columns.last() { + if prev.range().end != next_range.start { + tracing::error!(prev=?prev.range(), next=?next_range, "it's here"); + Err(BadColumnLayout::NonContiguousColumns) + } else { + Ok(()) + } + } else { + Ok(()) + } + } + LayoutParserState::InValue(builder) => { + if builder.meta_range().end() != next_range.start { + Err(BadColumnLayout::NonContiguousColumns) + } else { + Ok(()) + } + } + LayoutParserState::InGroup(_, group_state) => { + let end = match group_state { + GroupParseState::InValue(b) => b.range().end, + GroupParseState::Ready(b) => b.range().end, + }; + if end != next_range.start { + Err(BadColumnLayout::NonContiguousColumns) + } else { + Ok(()) + } + } + } + } + + fn check_bounds(&self, next_range: &Range) -> Result<(), BadColumnLayout> { + if next_range.end > self.total_data_size { + Err(BadColumnLayout::DataOutOfRange) + } else { + Ok(()) + } + } +} diff --git a/automerge/src/storage/columns/column.rs b/automerge/src/storage/columns/column.rs new file mode 100644 index 00000000..a7636b56 --- /dev/null +++ b/automerge/src/storage/columns/column.rs @@ -0,0 +1,42 @@ +use std::ops::Range; + +use crate::columnar_2::column_range::generic::GenericColumnRange; + +use super::{ColumnId, ColumnSpec, ColumnType}; + +/// A combination of a column specification and the range of data associated with it. Note that +/// multiple (adjacent) ranges can be associated with one column as some columns are composite. +/// This is encapsulated in the `GenericColumnRange` type. +#[derive(Clone, Debug)] +pub(crate) struct Column { + spec: ColumnSpec, + range: GenericColumnRange, +} + +impl Column { + pub(crate) fn new(spec: ColumnSpec, range: GenericColumnRange) -> Column { + Self { spec, range } + } +} + +impl Column { + pub(crate) fn range(&self) -> Range { + self.range.range() + } + + pub(crate) fn into_ranges(self) -> GenericColumnRange { + self.range + } + + pub(crate) fn col_type(&self) -> ColumnType { + self.spec.col_type() + } + + pub(crate) fn id(&self) -> ColumnId { + self.spec.id() + } + + pub(crate) fn spec(&self) -> ColumnSpec { + self.spec + } +} diff --git a/automerge/src/storage/columns/column_builder.rs b/automerge/src/storage/columns/column_builder.rs new file mode 100644 index 00000000..d33785e5 --- /dev/null +++ b/automerge/src/storage/columns/column_builder.rs @@ -0,0 +1,199 @@ +use std::ops::Range; + +use crate::columnar_2::column_range::{ + generic::{GenericColumnRange, GroupRange, GroupedColumnRange, SimpleColRange}, + BooleanRange, DeltaRange, RawRange, RleRange, ValueRange, +}; + +use super::{Column, ColumnId, ColumnSpec}; + +pub(crate) struct ColumnBuilder; + +impl ColumnBuilder { + pub(crate) fn build_actor(spec: ColumnSpec, range: RleRange) -> Column { + Column::new( + spec, + GenericColumnRange::Simple(SimpleColRange::RleInt(range)), + ) + } + + pub(crate) fn build_string(spec: ColumnSpec, range: RleRange) -> Column { + Column::new( + spec, + GenericColumnRange::Simple(SimpleColRange::RleString(range)), + ) + } + + pub(crate) fn build_integer(spec: ColumnSpec, range: RleRange) -> Column { + Column::new( + spec, + GenericColumnRange::Simple(SimpleColRange::RleInt(range)), + ) + } + + pub(crate) fn build_delta_integer(spec: ColumnSpec, range: DeltaRange) -> Column { + Column::new( + spec, + GenericColumnRange::Simple(SimpleColRange::Delta(range)), + ) + } + + pub(crate) fn build_boolean(spec: ColumnSpec, range: BooleanRange) -> Column { + Column::new( + spec, + GenericColumnRange::Simple(SimpleColRange::Boolean(range)), + ) + } + + pub(crate) fn start_value( + spec: ColumnSpec, + meta: RleRange, + ) -> AwaitingRawColumnValueBuilder { + AwaitingRawColumnValueBuilder { spec, meta } + } + + pub(crate) fn start_group(spec: ColumnSpec, num: RleRange) -> GroupBuilder { + GroupBuilder { + spec, + num_range: num, + columns: Vec::new(), + } + } +} + +pub(crate) struct AwaitingRawColumnValueBuilder { + spec: ColumnSpec, + meta: RleRange, +} + +impl AwaitingRawColumnValueBuilder { + pub(crate) fn id(&self) -> ColumnId { + self.spec.id() + } + + pub(crate) fn meta_range(&self) -> &RleRange { + &self.meta + } + + pub(crate) fn build(&mut self, raw: RawRange) -> Column { + Column::new( + self.spec, + GenericColumnRange::Value(ValueRange::new(self.meta.clone(), raw)), + ) + } +} + +#[derive(Debug)] +pub(crate) struct GroupBuilder { + spec: ColumnSpec, + num_range: RleRange, + columns: Vec, +} + +impl GroupBuilder { + pub(crate) fn range(&self) -> Range { + let start = self.num_range.start(); + let end = self + .columns + .last() + .map(|c| c.range().end) + .unwrap_or_else(|| self.num_range.end()); + start..end + } + + pub(crate) fn add_actor(&mut self, _spec: ColumnSpec, range: Range) { + self.columns + .push(GroupedColumnRange::Simple(SimpleColRange::RleInt( + range.into(), + ))); + } + + pub(crate) fn add_string(&mut self, _spec: ColumnSpec, range: Range) { + self.columns + .push(GroupedColumnRange::Simple(SimpleColRange::RleString( + range.into(), + ))); + } + + pub(crate) fn add_integer(&mut self, _spec: ColumnSpec, range: Range) { + self.columns + .push(GroupedColumnRange::Simple(SimpleColRange::RleInt( + range.into(), + ))); + } + + pub(crate) fn add_delta_integer(&mut self, _spec: ColumnSpec, range: Range) { + self.columns + .push(GroupedColumnRange::Simple(SimpleColRange::Delta( + range.into(), + ))); + } + + pub(crate) fn add_boolean(&mut self, _spec: ColumnSpec, range: Range) { + self.columns + .push(GroupedColumnRange::Simple(SimpleColRange::Boolean( + range.into(), + ))); + } + + pub(crate) fn start_value( + &mut self, + _spec: ColumnSpec, + meta: Range, + ) -> GroupAwaitingValue { + GroupAwaitingValue { + spec: self.spec, + num_range: self.num_range.clone(), + columns: std::mem::take(&mut self.columns), + val_meta: meta.into(), + } + } + + pub(crate) fn finish(&mut self) -> Column { + Column::new( + self.spec, + GenericColumnRange::Group(GroupRange::new( + self.num_range.clone(), + std::mem::take(&mut self.columns), + )), + ) + } +} + +#[derive(Debug)] +pub(crate) struct GroupAwaitingValue { + spec: ColumnSpec, + num_range: RleRange, + columns: Vec, + val_meta: RleRange, +} + +impl GroupAwaitingValue { + pub(crate) fn finish_empty(&mut self) -> GroupBuilder { + self.columns.push(GroupedColumnRange::Value(ValueRange::new( + self.val_meta.clone(), + (0..0).into(), + ))); + GroupBuilder { + spec: self.spec, + num_range: self.num_range.clone(), + columns: std::mem::take(&mut self.columns), + } + } + + pub(crate) fn finish_value(&mut self, raw: Range) -> GroupBuilder { + self.columns.push(GroupedColumnRange::Value(ValueRange::new( + self.val_meta.clone(), + raw.into(), + ))); + GroupBuilder { + spec: self.spec, + num_range: self.num_range.clone(), + columns: std::mem::take(&mut self.columns), + } + } + + pub(crate) fn range(&self) -> Range { + self.num_range.start()..self.val_meta.end() + } +} diff --git a/automerge/src/storage/columns/column_specification.rs b/automerge/src/storage/columns/column_specification.rs new file mode 100644 index 00000000..5bde0e7a --- /dev/null +++ b/automerge/src/storage/columns/column_specification.rs @@ -0,0 +1,285 @@ +/// An implementation of column specifications as specified in [1] +/// +/// [1]: https://alexjg.github.io/automerge-storage-docs/#column-specifications +#[derive(Eq, PartialEq, Clone, Copy)] +pub(crate) struct ColumnSpec(u32); + +impl ColumnSpec { + pub(crate) fn new(id: ColumnId, col_type: ColumnType, deflate: bool) -> Self { + let mut raw = id.0 << 4; + raw |= u8::from(col_type) as u32; + if deflate { + raw |= 0b00001000; + } else { + raw &= 0b11110111; + } + ColumnSpec(raw) + } + + pub(crate) fn col_type(&self) -> ColumnType { + self.0.to_be_bytes()[3].into() + } + + pub(crate) fn id(&self) -> ColumnId { + ColumnId(self.0 >> 4) + } + + pub(crate) fn deflate(&self) -> bool { + self.0 & 0b00001000 > 0 + } + + pub(crate) fn deflated(&self) -> Self { + Self::new(self.id(), self.col_type(), true) + } + + pub(crate) fn inflated(&self) -> Self { + Self::new(self.id(), self.col_type(), false) + } + + pub(crate) fn normalize(&self) -> Normalized { + Normalized(self.0 & 0b11110111) + } +} + +#[derive(PartialEq, PartialOrd)] +pub(crate) struct Normalized(u32); + +impl std::fmt::Debug for ColumnSpec { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!( + f, + "ColumnSpec(id: {:?}, type: {}, deflate: {})", + self.id(), + self.col_type(), + self.deflate() + ) + } +} + +#[derive(Eq, PartialEq, Clone, Copy)] +pub(crate) struct ColumnId(u32); + +impl ColumnId { + pub(crate) const fn new(raw: u32) -> Self { + ColumnId(raw) + } +} + +impl From for ColumnId { + fn from(raw: u32) -> Self { + Self(raw) + } +} + +impl std::fmt::Debug for ColumnId { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.0.fmt(f) + } +} + +/// The differente possible column types, as specified in [1] +/// +/// [1]: https://alexjg.github.io/automerge-storage-docs/#column-specifications +#[derive(Eq, PartialEq, Clone, Copy, Debug)] +pub(crate) enum ColumnType { + Group, + Actor, + Integer, + DeltaInteger, + Boolean, + String, + ValueMetadata, + Value, +} + +impl std::fmt::Display for ColumnType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Group => write!(f, "Group"), + Self::Actor => write!(f, "Actor"), + Self::Integer => write!(f, "Integer"), + Self::DeltaInteger => write!(f, "DeltaInteger"), + Self::Boolean => write!(f, "Boolean"), + Self::String => write!(f, "String"), + Self::ValueMetadata => write!(f, "ValueMetadata"), + Self::Value => write!(f, "Value"), + } + } +} + +impl From for ColumnType { + fn from(v: u8) -> Self { + let type_bits = v & 0b00000111; + match type_bits { + 0 => Self::Group, + 1 => Self::Actor, + 2 => Self::Integer, + 3 => Self::DeltaInteger, + 4 => Self::Boolean, + 5 => Self::String, + 6 => Self::ValueMetadata, + 7 => Self::Value, + _ => unreachable!(), + } + } +} + +impl From for u8 { + fn from(ct: ColumnType) -> Self { + match ct { + ColumnType::Group => 0, + ColumnType::Actor => 1, + ColumnType::Integer => 2, + ColumnType::DeltaInteger => 3, + ColumnType::Boolean => 4, + ColumnType::String => 5, + ColumnType::ValueMetadata => 6, + ColumnType::Value => 7, + } + } +} + +impl From for ColumnSpec { + fn from(raw: u32) -> Self { + ColumnSpec(raw) + } +} + +impl From for u32 { + fn from(spec: ColumnSpec) -> Self { + spec.0 + } +} + +impl From<[u8; 4]> for ColumnSpec { + fn from(raw: [u8; 4]) -> Self { + u32::from_be_bytes(raw).into() + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn column_spec_encoding() { + struct Scenario { + id: ColumnId, + col_type: ColumnType, + int_val: u32, + } + + let scenarios = vec![ + Scenario { + id: ColumnId(7), + col_type: ColumnType::Group, + int_val: 112, + }, + Scenario { + id: ColumnId(0), + col_type: ColumnType::Actor, + int_val: 1, + }, + Scenario { + id: ColumnId(0), + col_type: ColumnType::Integer, + int_val: 2, + }, + Scenario { + id: ColumnId(1), + col_type: ColumnType::DeltaInteger, + int_val: 19, + }, + Scenario { + id: ColumnId(3), + col_type: ColumnType::Boolean, + int_val: 52, + }, + Scenario { + id: ColumnId(1), + col_type: ColumnType::String, + int_val: 21, + }, + Scenario { + id: ColumnId(5), + col_type: ColumnType::ValueMetadata, + int_val: 86, + }, + Scenario { + id: ColumnId(5), + col_type: ColumnType::Value, + int_val: 87, + }, + ]; + + for (index, scenario) in scenarios.into_iter().enumerate() { + let spec = ColumnSpec::new(scenario.id, scenario.col_type, false); + + let encoded_val = u32::from(spec); + if encoded_val != scenario.int_val { + panic!( + "Scenario {} failed encoding: expected {} but got {}", + index + 1, + scenario.int_val, + encoded_val + ); + } + + if spec.col_type() != scenario.col_type { + panic!( + "Scenario {} failed col type: expected {:?} but got {:?}", + index + 1, + scenario.col_type, + spec.col_type() + ); + } + + if spec.deflate() { + panic!( + "Scenario {} failed: spec returned true for deflate, should have been false", + index + 1 + ); + } + + if spec.id() != scenario.id { + panic!( + "Scenario {} failed id: expected {:?} but got {:?}", + index + 1, + scenario.id, + spec.id() + ); + } + + let deflated = ColumnSpec::new(scenario.id, scenario.col_type, true); + + if deflated.id() != spec.id() { + panic!("Scenario {} failed deflate id test", index + 1); + } + + if deflated.col_type() != spec.col_type() { + panic!("Scenario {} failed col type test", index + 1); + } + + if !deflated.deflate() { + panic!( + "Scenario {} failed: when deflate bit set deflate returned false", + index + 1 + ); + } + + let expected = scenario.int_val | 0b00001000; + if expected != u32::from(deflated) { + panic!( + "Scenario {} failed deflate bit test, expected {} got {}", + index + 1, + expected, + u32::from(deflated) + ); + } + + if deflated.normalize() != spec.normalize() { + panic!("Scenario {} failed normalize test", index + 1); + } + } + } +} diff --git a/automerge/src/storage/columns/raw_column.rs b/automerge/src/storage/columns/raw_column.rs new file mode 100644 index 00000000..b37f73e3 --- /dev/null +++ b/automerge/src/storage/columns/raw_column.rs @@ -0,0 +1,263 @@ +use std::{io::Read, marker::PhantomData, ops::Range}; + +use crate::storage::parse; + +use super::{compression, ColumnSpec}; + +/// This is a "raw" column in the sense that it is just the column specification[1] and range. This +/// is in contrast to [`super::Column`] which is aware of composite columns such as value columns[2] and +/// group columns[3]. +/// +/// `RawColumn` is generally an intermediary object which is parsed into a [`super::Column`]. +/// +/// The type parameter `T` is a witness to whether this column is compressed. If `T: +/// compression::Uncompressed` then we have proved that this column is not compressed, otherwise it +/// may be compressed. +/// +/// [1]: https://alexjg.github.io/automerge-storage-docs/#column-specifications +/// [2]: https://alexjg.github.io/automerge-storage-docs/#raw-value-columns +/// [3]: https://alexjg.github.io/automerge-storage-docs/#group-columns +#[derive(Clone, Debug, PartialEq)] +pub(crate) struct RawColumn { + spec: ColumnSpec, + /// The location of the data in the column data block. Note that this range starts at the + /// beginning of the column data block - i.e. the `data` attribute of the first column in the + /// column data block will be 0 - not at the start of the chunk. + data: Range, + _phantom: PhantomData, +} + +impl RawColumn { + pub(crate) fn new(spec: ColumnSpec, data: Range) -> Self { + Self { + spec: ColumnSpec::new(spec.id(), spec.col_type(), false), + data, + _phantom: PhantomData, + } + } +} + +impl RawColumn { + pub(crate) fn spec(&self) -> ColumnSpec { + self.spec + } + + pub(crate) fn data(&self) -> Range { + self.data.clone() + } + + fn compress(&self, input: &[u8], out: &mut Vec, threshold: usize) -> (ColumnSpec, usize) { + let (spec, len) = if self.data.len() < threshold || self.spec.deflate() { + out.extend(&input[self.data.clone()]); + (self.spec, self.data.len()) + } else { + let mut deflater = flate2::bufread::DeflateEncoder::new( + &input[self.data.clone()], + flate2::Compression::default(), + ); + //This unwrap should be okay as we're reading and writing to in memory buffers + (self.spec.deflated(), deflater.read_to_end(out).unwrap()) + }; + (spec, len) + } + + pub(crate) fn uncompressed(&self) -> Option> { + if self.spec.deflate() { + None + } else { + Some(RawColumn { + spec: self.spec, + data: self.data.clone(), + _phantom: PhantomData, + }) + } + } + + fn decompress(&self, input: &[u8], out: &mut Vec) -> (ColumnSpec, usize) { + let len = if self.spec.deflate() { + let mut inflater = flate2::bufread::DeflateDecoder::new(&input[self.data.clone()]); + inflater.read_to_end(out).unwrap() + } else { + out.extend(&input[self.data.clone()]); + self.data.len() + }; + (self.spec.inflated(), len) + } +} + +#[derive(Clone, Debug, PartialEq)] +pub(crate) struct RawColumns(Vec>); + +impl RawColumns { + /// Returns `Some` if no column in this set of columns is marked as compressed + pub(crate) fn uncompressed(&self) -> Option> { + let mut result = Vec::with_capacity(self.0.len()); + for col in &self.0 { + if let Some(uncomp) = col.uncompressed() { + result.push(uncomp); + } else { + return None; + } + } + Some(RawColumns(result)) + } + + /// Write each column in `input` represented by `self` into `out`, possibly compressing. + /// + /// # Returns + /// The `RawColumns` corresponding to the data written to `out` + /// + /// # Panics + /// * If any of the ranges in `self` is outside the bounds of `input` + pub(crate) fn compress( + &self, + input: &[u8], + out: &mut Vec, + threshold: usize, + ) -> RawColumns { + let mut result = Vec::with_capacity(self.0.len()); + let mut start = 0; + for col in &self.0 { + let (spec, len) = col.compress(input, out, threshold); + result.push(RawColumn { + spec, + data: start..(start + len), + _phantom: PhantomData::, + }); + start += len; + } + RawColumns(result) + } + + /// Read each column from `input` and write to `out`, decompressing any compressed columns + /// + /// # Returns + /// The `RawColumns` corresponding to the data written to `out` + /// + /// # Panics + /// * If any of the ranges in `self` is outside the bounds of `input` + pub(crate) fn uncompress( + &self, + input: &[u8], + out: &mut Vec, + ) -> RawColumns { + let mut result = Vec::with_capacity(self.0.len()); + let mut start = 0; + for col in &self.0 { + let (spec, len) = if let Some(decomp) = col.uncompressed() { + out.extend(&input[decomp.data.clone()]); + (decomp.spec, decomp.data.len()) + } else { + col.decompress(input, out) + }; + result.push(RawColumn { + spec, + data: start..(start + len), + _phantom: PhantomData::, + }); + start += len; + } + RawColumns(result) + } +} + +impl FromIterator> for RawColumns { + fn from_iter>>(iter: U) -> Self { + Self(iter.into_iter().filter(|c| !c.data.is_empty()).collect()) + } +} + +impl FromIterator<(ColumnSpec, Range)> for RawColumns { + fn from_iter)>>(iter: T) -> Self { + Self( + iter.into_iter() + .filter_map(|(spec, data)| { + if data.is_empty() { + None + } else { + Some(RawColumn { + spec, + data, + _phantom: PhantomData, + }) + } + }) + .collect(), + ) + } +} + +#[derive(Debug, thiserror::Error)] +pub(crate) enum ParseError { + #[error("columns were not in normalized order")] + NotInNormalOrder, + #[error(transparent)] + Leb128(#[from] parse::leb128::Error), +} + +impl RawColumns { + pub(crate) fn parse(input: parse::Input<'_>) -> parse::ParseResult<'_, Self, E> + where + E: From, + { + let i = input; + let (i, num_columns) = parse::leb128_u64(i).map_err(|e| e.lift())?; + let (i, specs_and_lens) = parse::apply_n( + num_columns as usize, + parse::tuple2( + parse::map(parse::leb128_u32, ColumnSpec::from), + parse::leb128_u64, + ), + )(i) + .map_err(|e| e.lift())?; + let columns: Vec> = specs_and_lens + .into_iter() + .scan(0_usize, |offset, (spec, len)| { + let end = *offset + len as usize; + let data = *offset..end; + *offset = end; + Some(RawColumn { + spec, + data, + _phantom: PhantomData, + }) + }) + .collect::>(); + if !are_normal_sorted(&columns) { + return Err(parse::ParseError::Error( + ParseError::NotInNormalOrder.into(), + )); + } + Ok((i, RawColumns(columns))) + } +} + +impl RawColumns { + pub(crate) fn write(&self, out: &mut Vec) -> usize { + let mut written = leb128::write::unsigned(out, self.0.len() as u64).unwrap(); + for col in &self.0 { + written += leb128::write::unsigned(out, u32::from(col.spec) as u64).unwrap(); + written += leb128::write::unsigned(out, col.data.len() as u64).unwrap(); + } + written + } + + pub(crate) fn total_column_len(&self) -> usize { + self.0.iter().map(|c| c.data.len()).sum() + } + + pub(crate) fn iter<'a>(&'a self) -> impl Iterator> + '_ { + self.0.iter() + } +} + +fn are_normal_sorted(cols: &[RawColumn]) -> bool { + if cols.len() > 1 { + for (i, col) in cols[1..].iter().enumerate() { + if col.spec.normalize() < cols[i].spec.normalize() { + return false; + } + } + } + true +} diff --git a/automerge/src/storage/convert.rs b/automerge/src/storage/convert.rs new file mode 100644 index 00000000..48f83d03 --- /dev/null +++ b/automerge/src/storage/convert.rs @@ -0,0 +1,5 @@ +mod op_as_changeop; +pub(crate) use op_as_changeop::op_as_actor_id; + +mod op_as_docop; +pub(crate) use op_as_docop::op_as_docop; diff --git a/automerge/src/storage/convert/op_as_changeop.rs b/automerge/src/storage/convert/op_as_changeop.rs new file mode 100644 index 00000000..00b5e940 --- /dev/null +++ b/automerge/src/storage/convert/op_as_changeop.rs @@ -0,0 +1,128 @@ +/// Types for converting an OpTree op into a `ChangeOp` or a `DocOp` +use std::borrow::Cow; + +use crate::{ + convert, + op_set::OpSetMetadata, + storage::AsChangeOp, + types::{ActorId, Key, ObjId, Op, OpId, OpType, ScalarValue}, +}; + +/// Wrap an op in an implementation of `AsChangeOp` which represents actor IDs using a reference to +/// the actor ID stored in the metadata. +/// +/// Note that the methods of `AsChangeOp` will panic if the actor is missing from the metadata +pub(crate) fn op_as_actor_id<'a>( + obj: &'a ObjId, + op: &'a Op, + metadata: &'a OpSetMetadata, +) -> OpWithMetadata<'a> { + OpWithMetadata { obj, op, metadata } +} + +pub(crate) struct OpWithMetadata<'a> { + obj: &'a ObjId, + op: &'a Op, + metadata: &'a OpSetMetadata, +} + +impl<'a> OpWithMetadata<'a> { + fn wrap(&self, opid: &'a OpId) -> OpIdWithMetadata<'a> { + OpIdWithMetadata { + opid, + metadata: self.metadata, + } + } +} + +pub(crate) struct OpIdWithMetadata<'a> { + opid: &'a OpId, + metadata: &'a OpSetMetadata, +} + +impl<'a> convert::OpId<&'a ActorId> for OpIdWithMetadata<'a> { + fn counter(&self) -> u64 { + self.opid.counter() + } + + fn actor(&self) -> &'a ActorId { + self.metadata.actors.get(self.opid.actor()) + } +} + +pub(crate) struct PredWithMetadata<'a> { + op: &'a Op, + offset: usize, + metadata: &'a OpSetMetadata, +} + +impl<'a> ExactSizeIterator for PredWithMetadata<'a> { + fn len(&self) -> usize { + self.op.pred.len() + } +} + +impl<'a> Iterator for PredWithMetadata<'a> { + type Item = OpIdWithMetadata<'a>; + + fn next(&mut self) -> Option { + if let Some(op) = self.op.pred.get(self.offset) { + self.offset += 1; + Some(OpIdWithMetadata { + opid: op, + metadata: self.metadata, + }) + } else { + None + } + } +} + +impl<'a> AsChangeOp<'a> for OpWithMetadata<'a> { + type ActorId = &'a ActorId; + type OpId = OpIdWithMetadata<'a>; + type PredIter = PredWithMetadata<'a>; + + fn action(&self) -> u64 { + self.op.action.action_index() + } + + fn insert(&self) -> bool { + self.op.insert + } + + fn val(&self) -> Cow<'a, ScalarValue> { + match &self.op.action { + OpType::Make(..) | OpType::Delete => Cow::Owned(ScalarValue::Null), + OpType::Increment(i) => Cow::Owned(ScalarValue::Int(*i)), + OpType::Put(s) => Cow::Borrowed(s), + } + } + + fn obj(&self) -> convert::ObjId { + if self.obj.is_root() { + convert::ObjId::Root + } else { + convert::ObjId::Op(OpIdWithMetadata { + opid: self.obj.opid(), + metadata: self.metadata, + }) + } + } + + fn pred(&self) -> Self::PredIter { + PredWithMetadata { + op: self.op, + offset: 0, + metadata: self.metadata, + } + } + + fn key(&self) -> convert::Key<'a, Self::OpId> { + match &self.op.key { + Key::Map(idx) => convert::Key::Prop(Cow::Owned(self.metadata.props.get(*idx).into())), + Key::Seq(e) if e.is_head() => convert::Key::Elem(convert::ElemId::Head), + Key::Seq(e) => convert::Key::Elem(convert::ElemId::Op(self.wrap(&e.0))), + } + } +} diff --git a/automerge/src/storage/convert/op_as_docop.rs b/automerge/src/storage/convert/op_as_docop.rs new file mode 100644 index 00000000..8d237354 --- /dev/null +++ b/automerge/src/storage/convert/op_as_docop.rs @@ -0,0 +1,145 @@ +use std::borrow::Cow; + +use crate::{ + convert, + indexed_cache::IndexedCache, + storage::AsDocOp, + types::{ElemId, Key, ObjId, Op, OpId, OpType, ScalarValue}, +}; + +/// Create an [`AsDocOp`] implementation for a [`crate::types::Op`] +/// +/// # Arguments +/// * actors - A vector where the i'th element is the actor index of the document encoding of actor +/// i, as returned by [`OpSetMetadata.actors.encode_index`] +/// * props - An indexed cache containing the properties in this op_as_docop +/// * obj - The object ID this op refers too +/// * op - The op itself +/// +/// # Panics +/// +/// The methods of the resulting `AsDocOp` implementation will panic if any actor ID in the op +/// references an index not in `actors` or a property not in `props` +pub(crate) fn op_as_docop<'a>( + actors: &'a [usize], + props: &'a IndexedCache, + obj: &'a ObjId, + op: &'a Op, +) -> OpAsDocOp<'a> { + OpAsDocOp { + op, + obj, + actor_lookup: actors, + props, + } +} + +pub(crate) struct OpAsDocOp<'a> { + op: &'a Op, + obj: &'a ObjId, + actor_lookup: &'a [usize], + props: &'a IndexedCache, +} + +#[derive(Debug)] +pub(crate) struct DocOpId { + actor: usize, + counter: u64, +} + +impl convert::OpId for DocOpId { + fn actor(&self) -> usize { + self.actor + } + + fn counter(&self) -> u64 { + self.counter + } +} + +impl<'a> OpAsDocOp<'a> {} + +impl<'a> AsDocOp<'a> for OpAsDocOp<'a> { + type ActorId = usize; + type OpId = DocOpId; + type SuccIter = OpAsDocOpSuccIter<'a>; + + fn id(&self) -> Self::OpId { + translate(self.actor_lookup, &self.op.id) + } + + fn obj(&self) -> convert::ObjId { + if self.obj.is_root() { + convert::ObjId::Root + } else { + convert::ObjId::Op(translate(self.actor_lookup, self.obj.opid())) + } + } + + fn key(&self) -> convert::Key<'a, Self::OpId> { + match self.op.key { + Key::Map(idx) => convert::Key::Prop(Cow::Owned(self.props.get(idx).into())), + Key::Seq(e) if e.is_head() => convert::Key::Elem(convert::ElemId::Head), + Key::Seq(ElemId(o)) => { + convert::Key::Elem(convert::ElemId::Op(translate(self.actor_lookup, &o))) + } + } + } + + fn val(&self) -> Cow<'a, crate::ScalarValue> { + match &self.op.action { + OpType::Put(v) => Cow::Borrowed(v), + OpType::Increment(i) => Cow::Owned(ScalarValue::Int(*i)), + _ => Cow::Owned(ScalarValue::Null), + } + } + + fn succ(&self) -> Self::SuccIter { + OpAsDocOpSuccIter { + op: self.op, + offset: 0, + actor_index: self.actor_lookup, + } + } + + fn insert(&self) -> bool { + self.op.insert + } + + fn action(&self) -> u64 { + self.op.action.action_index() + } +} + +pub(crate) struct OpAsDocOpSuccIter<'a> { + op: &'a Op, + offset: usize, + actor_index: &'a [usize], +} + +impl<'a> Iterator for OpAsDocOpSuccIter<'a> { + type Item = DocOpId; + + fn next(&mut self) -> Option { + if let Some(s) = self.op.succ.get(self.offset) { + self.offset += 1; + Some(translate(self.actor_index, s)) + } else { + None + } + } +} + +impl<'a> ExactSizeIterator for OpAsDocOpSuccIter<'a> { + fn len(&self) -> usize { + self.op.succ.len() + } +} + +fn translate<'a>(actor_lookup: &'a [usize], op: &'a OpId) -> DocOpId { + let index = actor_lookup[op.actor()]; + DocOpId { + actor: index, + counter: op.counter(), + } +} diff --git a/automerge/src/storage/document.rs b/automerge/src/storage/document.rs new file mode 100644 index 00000000..8f9dca86 --- /dev/null +++ b/automerge/src/storage/document.rs @@ -0,0 +1,335 @@ +use std::{borrow::Cow, ops::Range}; + +use super::{parse, shift_range, ChunkType, Columns, Header, RawColumns}; + +use crate::{convert, ActorId, ChangeHash}; + +mod doc_op_columns; +use doc_op_columns::DocOpColumns; +pub(crate) use doc_op_columns::{AsDocOp, DocOp, ReadDocOpError}; +mod doc_change_columns; +use doc_change_columns::DocChangeColumns; +pub(crate) use doc_change_columns::{AsChangeMeta, ChangeMetadata, ReadChangeError}; +mod compression; + +pub(crate) enum CompressConfig { + None, + Threshold(usize), +} + +#[derive(Debug)] +pub(crate) struct Document<'a> { + bytes: Cow<'a, [u8]>, + #[allow(dead_code)] + compressed_bytes: Option>, + header: Header, + actors: Vec, + heads: Vec, + op_metadata: DocOpColumns, + op_bytes: Range, + change_metadata: DocChangeColumns, + change_bytes: Range, + #[allow(dead_code)] + head_indices: Vec, +} + +#[derive(thiserror::Error, Debug)] +pub(crate) enum ParseError { + #[error(transparent)] + Leb128(#[from] parse::leb128::Error), + #[error(transparent)] + RawColumns(#[from] crate::storage::columns::raw_column::ParseError), + #[error("bad column layout for {column_type}s: {error}")] + BadColumnLayout { + column_type: &'static str, + error: super::columns::BadColumnLayout, + }, + #[error(transparent)] + BadDocOps(#[from] doc_op_columns::Error), + #[error(transparent)] + BadDocChanges(#[from] doc_change_columns::ReadChangeError), +} + +impl<'a> Document<'a> { + /// Parse a document chunk. Input must be the entire chunk including the header and magic + /// bytes but the header must already have been parsed. That is to say, this is expected to be + /// used like so: + /// + /// ```rust,ignore + /// # use automerge::storage::{parse::{ParseResult, Input}, Document, Header}; + /// # fn main() -> ParseResult<(), ()> { + /// let chunkbytes: &[u8] = todo!(); + /// let input = Input::new(chunkbytes); + /// let (i, header) = Header::parse(input)?; + /// let (i, doc) = Document::parse(i, header)?; + /// # } + /// ``` + pub(crate) fn parse( + input: parse::Input<'a>, + header: Header, + ) -> parse::ParseResult<'a, Document<'a>, ParseError> { + let i = input; + + // Because some columns in a document may be compressed we do some funky stuff when + // parsing. As we're parsing the chunk we split the data into four parts: + // + // .----------------. + // | Prefix | + // |.--------------.| + // || Actors || + // || Heads || + // || Change Meta || + // || Ops Meta || + // |'--------------'| + // +----------------+ + // | Change data | + // +----------------+ + // | Ops data | + // +----------------+ + // | Suffix | + // |.--------------.| + // || Head indices || + // |'--------------'| + // '----------------' + // + // We record the range of each of these sections using `parse::range_of`. Later, we check + // if any of the column definitions in change meta or ops meta specify that their columns + // are compressed. If there are compressed columns then we copy the uncompressed parts of the + // input data to a new output vec, then decompress the compressed parts. Specifically we do + // the following: + // + // * Copy everything in prefix to the output buffer + // * If any of change columns are compressed, copy all of change data to the output buffer + // decompressing each compressed column + // * Likewise if any of ops columns are compressed copy the data decompressing as required + // * Finally copy the suffix + // + // The reason for all this work is that we end up keeping all of the data behind the + // document chunk in a single Vec, which plays nicely with the cache and makes dumping the + // document to disk or network straightforward. + + // parse everything in the prefix + let ( + i, + parse::RangeOf { + range: prefix, + value: (actors, heads, change_meta, ops_meta), + }, + ) = parse::range_of( + |i| -> parse::ParseResult<'_, _, ParseError> { + let (i, actors) = parse::length_prefixed(parse::actor_id)(i)?; + let (i, heads) = parse::length_prefixed(parse::change_hash)(i)?; + let (i, change_meta) = RawColumns::parse::(i)?; + let (i, ops_meta) = RawColumns::parse::(i)?; + Ok((i, (actors, heads, change_meta, ops_meta))) + }, + i, + )?; + + // parse the change data + let (i, parse::RangeOf { range: changes, .. }) = + parse::range_of(|i| parse::take_n(change_meta.total_column_len(), i), i)?; + + // parse the ops data + let (i, parse::RangeOf { range: ops, .. }) = + parse::range_of(|i| parse::take_n(ops_meta.total_column_len(), i), i)?; + + // parse the suffix + let ( + i, + parse::RangeOf { + range: suffix, + value: head_indices, + }, + ) = parse::range_of( + |i| parse::apply_n(heads.len(), parse::leb128_u64::)(i), + i, + )?; + + let compression::Decompressed { + change_bytes, + op_bytes, + uncompressed, + compressed, + changes, + ops, + } = compression::decompress(compression::Args { + prefix: prefix.start, + suffix: suffix.start, + original: Cow::Borrowed(input.bytes()), + changes: compression::Cols { + data: changes, + raw_columns: change_meta, + }, + ops: compression::Cols { + data: ops, + raw_columns: ops_meta, + }, + extra_args: (), + }); + + let ops_layout = Columns::parse(op_bytes.len(), ops.iter()).map_err(|e| { + parse::ParseError::Error(ParseError::BadColumnLayout { + column_type: "ops", + error: e, + }) + })?; + let ops_cols = + DocOpColumns::try_from(ops_layout).map_err(|e| parse::ParseError::Error(e.into()))?; + + let change_layout = Columns::parse(change_bytes.len(), changes.iter()).map_err(|e| { + parse::ParseError::Error(ParseError::BadColumnLayout { + column_type: "changes", + error: e, + }) + })?; + let change_cols = DocChangeColumns::try_from(change_layout) + .map_err(|e| parse::ParseError::Error(e.into()))?; + + Ok(( + i, + Document { + bytes: uncompressed, + compressed_bytes: compressed, + header, + actors, + heads, + op_metadata: ops_cols, + op_bytes, + change_metadata: change_cols, + change_bytes, + head_indices, + }, + )) + } + + pub(crate) fn new<'b, I, C, IC, D, O>( + mut actors: Vec, + heads_with_indices: Vec<(ChangeHash, usize)>, + ops: I, + changes: IC, + compress: CompressConfig, + ) -> Document<'static> + where + I: Iterator + Clone + ExactSizeIterator, + O: convert::OpId, + D: AsDocOp<'b, OpId = O>, + C: AsChangeMeta<'b>, + IC: Iterator + Clone, + { + let mut ops_out = Vec::new(); + let ops_meta = DocOpColumns::encode(ops, &mut ops_out); + + let mut change_out = Vec::new(); + let change_meta = DocChangeColumns::encode(changes, &mut change_out); + actors.sort_unstable(); + + let mut data = Vec::with_capacity(ops_out.len() + change_out.len()); + leb128::write::unsigned(&mut data, actors.len() as u64).unwrap(); + for actor in &actors { + leb128::write::unsigned(&mut data, actor.to_bytes().len() as u64).unwrap(); + data.extend(actor.to_bytes()); + } + leb128::write::unsigned(&mut data, heads_with_indices.len() as u64).unwrap(); + for (head, _) in &heads_with_indices { + data.extend(head.as_bytes()); + } + let prefix_len = data.len(); + + change_meta.raw_columns().write(&mut data); + ops_meta.raw_columns().write(&mut data); + let change_start = data.len(); + let change_end = change_start + change_out.len(); + data.extend(change_out); + let ops_start = data.len(); + let ops_end = ops_start + ops_out.len(); + data.extend(ops_out); + let suffix_start = data.len(); + + let head_indices = heads_with_indices + .iter() + .map(|(_, i)| *i as u64) + .collect::>(); + for index in &head_indices { + leb128::write::unsigned(&mut data, *index).unwrap(); + } + + let header = Header::new(ChunkType::Document, &data); + let mut bytes = Vec::with_capacity(data.len() + header.len()); + header.write(&mut bytes); + let header_len = bytes.len(); + bytes.extend(&data); + + let op_bytes = shift_range(ops_start..ops_end, header.len()); + let change_bytes = shift_range(change_start..change_end, header.len()); + + let compressed_bytes = if let CompressConfig::Threshold(threshold) = compress { + let compressed = Cow::Owned(compression::compress(compression::Args { + prefix: prefix_len + header.len(), + suffix: suffix_start + header.len(), + ops: compression::Cols { + raw_columns: ops_meta.raw_columns(), + data: op_bytes.clone(), + }, + changes: compression::Cols { + raw_columns: change_meta.raw_columns(), + data: change_bytes.clone(), + }, + original: Cow::Borrowed(&bytes), + extra_args: compression::CompressArgs { + threshold, + original_header_len: header_len, + }, + })); + Some(compressed) + } else { + None + }; + + Document { + actors, + bytes: Cow::Owned(bytes), + compressed_bytes, + header, + heads: heads_with_indices.into_iter().map(|(h, _)| h).collect(), + op_metadata: ops_meta, + op_bytes, + change_metadata: change_meta, + change_bytes, + head_indices, + } + } + + pub(crate) fn iter_ops( + &'a self, + ) -> impl Iterator> + Clone + 'a { + self.op_metadata.iter(&self.bytes[self.op_bytes.clone()]) + } + + pub(crate) fn iter_changes( + &'a self, + ) -> impl Iterator, ReadChangeError>> + Clone + 'a { + self.change_metadata + .iter(&self.bytes[self.change_bytes.clone()]) + } + + pub(crate) fn into_bytes(self) -> Vec { + if let Some(compressed) = self.compressed_bytes { + compressed.into_owned() + } else { + self.bytes.into_owned() + } + } + + pub(crate) fn checksum_valid(&self) -> bool { + self.header.checksum_valid() + } + + pub(crate) fn actors(&self) -> &[ActorId] { + &self.actors + } + + pub(crate) fn heads(&self) -> &[ChangeHash] { + &self.heads + } +} diff --git a/automerge/src/storage/document/compression.rs b/automerge/src/storage/document/compression.rs new file mode 100644 index 00000000..f7daa127 --- /dev/null +++ b/automerge/src/storage/document/compression.rs @@ -0,0 +1,338 @@ +use std::{borrow::Cow, ops::Range}; + +use crate::storage::{columns::compression, shift_range, ChunkType, Header, RawColumns}; + +pub(super) struct Args<'a, T: compression::ColumnCompression, DirArgs> { + /// The original data of the entire document chunk (compressed or uncompressed) + pub(super) original: Cow<'a, [u8]>, + /// The number of bytes in the original before the beginning of the change column metadata + pub(super) prefix: usize, + /// The offset in the original after the end of the ops column data + pub(super) suffix: usize, + /// The column data for the changes + pub(super) changes: Cols, + /// The column data for the ops + pub(super) ops: Cols, + /// Additional arguments specific to the direction (compression or uncompression) + pub(super) extra_args: DirArgs, +} + +pub(super) struct CompressArgs { + pub(super) threshold: usize, + pub(super) original_header_len: usize, +} + +/// Compress a document chunk returning the compressed bytes +pub(super) fn compress<'a>(args: Args<'a, compression::Uncompressed, CompressArgs>) -> Vec { + let header_len = args.extra_args.original_header_len; + let threshold = args.extra_args.threshold; + Compression::<'a, Compressing, _>::new( + args, + Compressing { + threshold, + header_len, + }, + ) + .changes() + .ops() + .write_data() + .finish() +} + +pub(super) fn decompress<'a>(args: Args<'a, compression::Unknown, ()>) -> Decompressed<'a> { + match ( + args.changes.raw_columns.uncompressed(), + args.ops.raw_columns.uncompressed(), + ) { + (Some(changes), Some(ops)) => Decompressed { + changes, + ops, + compressed: None, + uncompressed: args.original, + change_bytes: args.changes.data, + op_bytes: args.ops.data, + }, + _ => Compression::<'a, Decompressing, _>::new(args, Decompressing) + .changes() + .ops() + .write_data() + .finish(), + } +} + +pub(super) struct Decompressed<'a> { + /// The original compressed data, if there was any + pub(super) compressed: Option>, + /// The final uncompressed data + pub(super) uncompressed: Cow<'a, [u8]>, + /// The ops column metadata + pub(super) ops: RawColumns, + /// The change column metadata + pub(super) changes: RawColumns, + /// The location of the change column data in the uncompressed data + pub(super) change_bytes: Range, + /// The location of the op column data in the uncompressed data + pub(super) op_bytes: Range, +} + +struct Compression<'a, D: Direction, S: CompressionState> { + args: Args<'a, D::In, D::Args>, + state: S, + direction: D, +} + +/// Some columns in the original data +pub(super) struct Cols { + /// The metadata for these columns + pub(super) raw_columns: RawColumns, + /// The location in the original chunk of the data for these columns + pub(super) data: Range, +} + +// Compression and decompression involve almost the same steps in either direction. This trait +// encapsulates that. +trait Direction: std::fmt::Debug { + type Out: compression::ColumnCompression; + type In: compression::ColumnCompression; + type Args; + + /// This method represents the (de)compression process for a direction. The arguments are: + /// + /// * cols - The columns we are processing + /// * input - the entire document chunk + /// * out - the vector to place the processed columns in + /// * meta_out - the vector to place processed column metadata in + fn process( + &self, + cols: &Cols, + input: &[u8], + out: &mut Vec, + meta_out: &mut Vec, + ) -> Cols; +} +#[derive(Debug)] +struct Compressing { + threshold: usize, + header_len: usize, +} + +impl Direction for Compressing { + type Out = compression::Unknown; + type In = compression::Uncompressed; + type Args = CompressArgs; + + fn process( + &self, + cols: &Cols, + input: &[u8], + out: &mut Vec, + meta_out: &mut Vec, + ) -> Cols { + let start = out.len(); + let raw_columns = cols + .raw_columns + .compress(&input[cols.data.clone()], out, self.threshold); + raw_columns.write(meta_out); + Cols { + data: start..out.len(), + raw_columns, + } + } +} + +#[derive(Debug)] +struct Decompressing; + +impl Direction for Decompressing { + type Out = compression::Uncompressed; + type In = compression::Unknown; + type Args = (); + + fn process( + &self, + cols: &Cols, + input: &[u8], + out: &mut Vec, + meta_out: &mut Vec, + ) -> Cols { + let start = out.len(); + let raw_columns = cols.raw_columns.uncompress(&input[cols.data.clone()], out); + raw_columns.write(meta_out); + Cols { + data: start..out.len(), + raw_columns, + } + } +} + +// Somewhat absurdly I (alex) kept getting the order of writing ops and changes wrong as well as +// the order that column metadata vs data should be written in. This is a type state to get the +// compiler to enforce that things are done in the right order. +trait CompressionState {} +impl CompressionState for Starting {} +impl CompressionState for Changes {} +impl CompressionState for ChangesAndOps {} +impl CompressionState for Finished {} + +/// We haven't done any processing yet +struct Starting { + /// The vector to write column data to + data_out: Vec, + /// The vector to write column metadata to + meta_out: Vec, +} + +/// We've processed the changes columns +struct Changes { + /// The `Cols` for the processed change columns + change_cols: Cols, + /// The vector to write column metadata to + meta_out: Vec, + /// The vector to write column data to + data_out: Vec, +} + +/// We've processed the ops columns +struct ChangesAndOps { + /// The `Cols` for the processed change columns + change_cols: Cols, + /// The `Cols` for the processed op columns + ops_cols: Cols, + /// The vector to write column metadata to + meta_out: Vec, + /// The vector to write column data to + data_out: Vec, +} + +/// We've written the column metadata and the op metadata for changes and ops to the output buffer +/// and added the prefix and suffix from the args. +struct Finished { + /// The `Cols` for the processed change columns + change_cols: Cols, + /// The `Cols` for the processed op columns + ops_cols: Cols, + /// The start of the change column metadata in the processed chunk + data_start: usize, + /// The processed chunk + out: Vec, +} + +impl<'a, D: Direction> Compression<'a, D, Starting> { + fn new(args: Args<'a, D::In, D::Args>, direction: D) -> Compression<'a, D, Starting> { + let mut meta_out = Vec::with_capacity(args.original.len() * 2); + meta_out.extend(&args.original[..args.prefix]); + Compression { + args, + direction, + state: Starting { + meta_out, + data_out: Vec::new(), + }, + } + } +} + +impl<'a, D: Direction> Compression<'a, D, Starting> { + fn changes(self) -> Compression<'a, D, Changes> { + let Starting { + mut data_out, + mut meta_out, + } = self.state; + let change_cols = self.direction.process( + &self.args.changes, + &self.args.original, + &mut data_out, + &mut meta_out, + ); + Compression { + args: self.args, + direction: self.direction, + state: Changes { + change_cols, + meta_out, + data_out, + }, + } + } +} + +impl<'a, D: Direction> Compression<'a, D, Changes> { + fn ops(self) -> Compression<'a, D, ChangesAndOps> { + let Changes { + change_cols, + mut meta_out, + mut data_out, + } = self.state; + let ops_cols = self.direction.process( + &self.args.ops, + &self.args.original, + &mut data_out, + &mut meta_out, + ); + Compression { + args: self.args, + direction: self.direction, + state: ChangesAndOps { + change_cols, + ops_cols, + meta_out, + data_out, + }, + } + } +} + +impl<'a, D: Direction> Compression<'a, D, ChangesAndOps> { + fn write_data(self) -> Compression<'a, D, Finished> { + let ChangesAndOps { + data_out, + mut meta_out, + change_cols, + ops_cols, + } = self.state; + let data_start = meta_out.len(); + meta_out.extend(&data_out); + meta_out.extend(&self.args.original[self.args.suffix..]); + Compression { + args: self.args, + direction: self.direction, + state: Finished { + ops_cols, + change_cols, + out: meta_out, + data_start, + }, + } + } +} + +impl<'a> Compression<'a, Decompressing, Finished> { + fn finish(self) -> Decompressed<'a> { + let Finished { + change_cols, + ops_cols, + data_start, + out, + } = self.state; + Decompressed { + ops: ops_cols.raw_columns, + changes: change_cols.raw_columns, + uncompressed: Cow::Owned(out), + compressed: Some(self.args.original), + change_bytes: shift_range(change_cols.data, data_start), + op_bytes: shift_range(ops_cols.data, data_start), + } + } +} + +impl<'a> Compression<'a, Compressing, Finished> { + fn finish(self) -> Vec { + let Finished { out, .. } = self.state; + let headerless = &out[self.direction.header_len..]; + let header = Header::new(ChunkType::Document, headerless); + let mut result = Vec::with_capacity(header.len() + out.len()); + header.write(&mut result); + result.extend(headerless); + result + } +} diff --git a/automerge/src/storage/document/doc_change_columns.rs b/automerge/src/storage/document/doc_change_columns.rs new file mode 100644 index 00000000..0b1e15cd --- /dev/null +++ b/automerge/src/storage/document/doc_change_columns.rs @@ -0,0 +1,339 @@ +use std::{borrow::Cow, convert::TryFrom}; + +use crate::{ + columnar_2::{ + column_range::{ + generic::{GenericColumnRange, GroupRange, GroupedColumnRange, SimpleColRange}, + DeltaRange, DepsIter, DepsRange, RleRange, ValueIter, ValueRange, + }, + encoding::{ColumnDecoder, DecodeColumnError, DeltaDecoder, RleDecoder}, + }, + storage::{ + columns::{compression, ColumnId, ColumnSpec, ColumnType}, + Columns, MismatchingColumn, RawColumn, RawColumns, + }, + types::ScalarValue, +}; + +const ACTOR_COL_ID: ColumnId = ColumnId::new(0); +const SEQ_COL_ID: ColumnId = ColumnId::new(0); +const MAX_OP_COL_ID: ColumnId = ColumnId::new(1); +const TIME_COL_ID: ColumnId = ColumnId::new(2); +const MESSAGE_COL_ID: ColumnId = ColumnId::new(3); +const DEPS_COL_ID: ColumnId = ColumnId::new(4); +const EXTRA_COL_ID: ColumnId = ColumnId::new(5); + +#[derive(Debug)] +pub(crate) struct ChangeMetadata<'a> { + pub(crate) actor: usize, + pub(crate) seq: u64, + pub(crate) max_op: u64, + pub(crate) timestamp: i64, + pub(crate) message: Option, + pub(crate) deps: Vec, + pub(crate) extra: Cow<'a, [u8]>, +} + +/// A row to be encoded as change metadata in the document format +/// +/// The lifetime `'a` is the lifetime of the extra bytes Cow. For types which cannot +/// provide a reference (e.g. because they are decoding from some columnar storage on each +/// iteration) this should be `'static`. +pub(crate) trait AsChangeMeta<'a> { + /// The type of the iterator over dependency indices + type DepsIter: Iterator + ExactSizeIterator; + + fn actor(&self) -> u64; + fn seq(&self) -> u64; + fn max_op(&self) -> u64; + fn timestamp(&self) -> i64; + fn message(&self) -> Option>; + fn deps(&self) -> Self::DepsIter; + fn extra(&self) -> Cow<'a, [u8]>; +} + +#[derive(Debug, Clone)] +pub(crate) struct DocChangeColumns { + actor: RleRange, + seq: DeltaRange, + max_op: DeltaRange, + time: DeltaRange, + message: RleRange, + deps: DepsRange, + extra: ValueRange, + #[allow(dead_code)] + other: Columns, +} + +impl DocChangeColumns { + pub(crate) fn iter<'a>(&self, data: &'a [u8]) -> DocChangeColumnIter<'a> { + DocChangeColumnIter { + actors: self.actor.decoder(data), + seq: self.seq.decoder(data), + max_op: self.max_op.decoder(data), + time: self.time.decoder(data), + message: if self.message.is_empty() { + None + } else { + Some(self.message.decoder(data)) + }, + deps: self.deps.iter(data), + extra: ExtraDecoder { + val: self.extra.iter(data), + }, + } + } + + pub(crate) fn encode<'a, I, C>(changes: I, out: &mut Vec) -> DocChangeColumns + where + C: AsChangeMeta<'a>, + I: Iterator + Clone, + { + let actor = RleRange::::encode( + // TODO: make this fallible once iterators have a try_splice + changes.clone().map(|c| Some(c.actor())), + out, + ); + let seq = DeltaRange::encode(changes.clone().map(|c| Some(c.seq() as i64)), out); + let max_op = DeltaRange::encode(changes.clone().map(|c| Some(c.max_op() as i64)), out); + let time = DeltaRange::encode(changes.clone().map(|c| Some(c.timestamp())), out); + let message = RleRange::encode(changes.clone().map(|c| c.message()), out); + let deps = DepsRange::encode(changes.clone().map(|c| c.deps()), out); + let extra = ValueRange::encode( + changes.map(|c| Cow::Owned(ScalarValue::Bytes(c.extra().to_vec()))), + out, + ); + DocChangeColumns { + actor, + seq, + max_op, + time, + message, + deps, + extra, + other: Columns::empty(), + } + } + + pub(crate) fn raw_columns(&self) -> RawColumns { + let mut cols = vec![ + RawColumn::new( + ColumnSpec::new(ACTOR_COL_ID, ColumnType::Actor, false), + self.actor.clone().into(), + ), + RawColumn::new( + ColumnSpec::new(SEQ_COL_ID, ColumnType::DeltaInteger, false), + self.seq.clone().into(), + ), + RawColumn::new( + ColumnSpec::new(MAX_OP_COL_ID, ColumnType::DeltaInteger, false), + self.max_op.clone().into(), + ), + RawColumn::new( + ColumnSpec::new(TIME_COL_ID, ColumnType::DeltaInteger, false), + self.time.clone().into(), + ), + RawColumn::new( + ColumnSpec::new(MESSAGE_COL_ID, ColumnType::String, false), + self.message.clone().into(), + ), + RawColumn::new( + ColumnSpec::new(DEPS_COL_ID, ColumnType::Group, false), + self.deps.num_range().clone().into(), + ), + ]; + if self.deps.deps_range().len() > 0 { + cols.push(RawColumn::new( + ColumnSpec::new(DEPS_COL_ID, ColumnType::DeltaInteger, false), + self.deps.deps_range().clone().into(), + )) + } + cols.push(RawColumn::new( + ColumnSpec::new(EXTRA_COL_ID, ColumnType::ValueMetadata, false), + self.extra.meta_range().clone().into(), + )); + if !self.extra.raw_range().is_empty() { + cols.push(RawColumn::new( + ColumnSpec::new(EXTRA_COL_ID, ColumnType::Value, false), + self.extra.raw_range().clone().into(), + )) + } + cols.into_iter().collect() + } +} + +#[derive(Debug, thiserror::Error)] +pub(crate) enum ReadChangeError { + #[error("unexpected null value for {0}")] + UnexpectedNull(String), + #[error("mismatching column types for column {index}")] + MismatchingColumn { index: usize }, + #[error("incorrect value in extra bytes column")] + InvalidExtraBytes, + #[error(transparent)] + ReadColumn(#[from] DecodeColumnError), +} + +impl From for ReadChangeError { + fn from(m: MismatchingColumn) -> Self { + Self::MismatchingColumn { index: m.index } + } +} + +#[derive(Clone)] +pub(crate) struct DocChangeColumnIter<'a> { + actors: RleDecoder<'a, u64>, + seq: DeltaDecoder<'a>, + max_op: DeltaDecoder<'a>, + time: DeltaDecoder<'a>, + message: Option>, + deps: DepsIter<'a>, + extra: ExtraDecoder<'a>, +} + +impl<'a> DocChangeColumnIter<'a> { + fn try_next(&mut self) -> Result>, ReadChangeError> { + let actor = match self.actors.maybe_next_in_col("actor")? { + Some(actor) => actor as usize, + None => { + // The actor column should always have a value so if the actor iterator returns None that + // means we should be done, we check by asserting that all the other iterators + // return none (which is what Self::check_done does). + if self.check_done() { + return Ok(None); + } else { + return Err(ReadChangeError::UnexpectedNull("actor".to_string())); + } + } + }; + let seq = self.seq.next_in_col("seq").and_then(|seq| { + u64::try_from(seq).map_err(|e| DecodeColumnError::invalid_value("seq", e.to_string())) + })?; + let max_op = self.max_op.next_in_col("max_op").and_then(|seq| { + u64::try_from(seq).map_err(|e| DecodeColumnError::invalid_value("seq", e.to_string())) + })?; + let time = self.time.next_in_col("time")?; + let message = if let Some(ref mut message) = self.message { + message.maybe_next_in_col("message")? + } else { + None + }; + let deps = self.deps.next_in_col("deps")?; + let extra = self.extra.next().transpose()?.unwrap_or(Cow::Borrowed(&[])); + Ok(Some(ChangeMetadata { + actor, + seq, + max_op, + timestamp: time, + message, + deps, + extra, + })) + } +} + +impl<'a> Iterator for DocChangeColumnIter<'a> { + type Item = Result, ReadChangeError>; + + fn next(&mut self) -> Option { + self.try_next().transpose() + } +} + +impl<'a> DocChangeColumnIter<'a> { + fn check_done(&mut self) -> bool { + let other_cols = [ + self.seq.next().is_none(), + self.max_op.next().is_none(), + self.time.next().is_none(), + self.deps.next().is_none(), + ]; + other_cols.iter().any(|f| *f) + } +} + +#[derive(Clone)] +struct ExtraDecoder<'a> { + val: ValueIter<'a>, +} + +impl<'a> Iterator for ExtraDecoder<'a> { + type Item = Result, ReadChangeError>; + fn next(&mut self) -> Option { + match self.val.next() { + Some(Ok(ScalarValue::Bytes(b))) => Some(Ok(Cow::Owned(b))), + Some(Ok(_)) => Some(Err(ReadChangeError::InvalidExtraBytes)), + Some(Err(e)) => Some(Err(e.into())), + None => None, + } + } +} + +impl TryFrom for DocChangeColumns { + type Error = ReadChangeError; + + fn try_from(columns: Columns) -> Result { + let mut actor: Option> = None; + let mut seq: Option = None; + let mut max_op: Option = None; + let mut time: Option = None; + let mut message: Option> = None; + let mut deps: Option = None; + let mut extra: Option = None; + let mut other = Columns::empty(); + + for (index, col) in columns.into_iter().enumerate() { + match (col.id(), col.col_type()) { + (ACTOR_COL_ID, ColumnType::Actor) => actor = Some(col.range().into()), + (SEQ_COL_ID, ColumnType::DeltaInteger) => seq = Some(col.range().into()), + (MAX_OP_COL_ID, ColumnType::DeltaInteger) => max_op = Some(col.range().into()), + (TIME_COL_ID, ColumnType::DeltaInteger) => time = Some(col.range().into()), + (MESSAGE_COL_ID, ColumnType::String) => message = Some(col.range().into()), + (DEPS_COL_ID, ColumnType::Group) => match col.into_ranges() { + GenericColumnRange::Group(GroupRange { num, values }) => { + let mut cols = values.into_iter(); + let deps_group = num; + let first = cols.next(); + let deps_index = match first { + Some(GroupedColumnRange::Simple(SimpleColRange::Delta( + index_range, + ))) => index_range, + Some(_) => { + tracing::error!( + "deps column contained more than one grouped column" + ); + return Err(ReadChangeError::MismatchingColumn { index: 5 }); + } + None => (0..0).into(), + }; + if cols.next().is_some() { + return Err(ReadChangeError::MismatchingColumn { index }); + } + deps = Some(DepsRange::new(deps_group, deps_index)); + } + _ => return Err(ReadChangeError::MismatchingColumn { index }), + }, + (EXTRA_COL_ID, ColumnType::ValueMetadata) => match col.into_ranges() { + GenericColumnRange::Value(val) => { + extra = Some(val); + } + _ => return Err(ReadChangeError::MismatchingColumn { index }), + }, + (other_id, other_type) => { + tracing::warn!(id=?other_id, typ=?other_type, "unknown column"); + other.append(col); + } + } + } + Ok(DocChangeColumns { + actor: actor.unwrap_or_else(|| (0..0).into()), + seq: seq.unwrap_or_else(|| (0..0).into()), + max_op: max_op.unwrap_or_else(|| (0..0).into()), + time: time.unwrap_or_else(|| (0..0).into()), + message: message.unwrap_or_else(|| (0..0).into()), + deps: deps.unwrap_or_else(|| DepsRange::new((0..0).into(), (0..0).into())), + extra: extra.unwrap_or_else(|| ValueRange::new((0..0).into(), (0..0).into())), + other, + }) + } +} diff --git a/automerge/src/storage/document/doc_op_columns.rs b/automerge/src/storage/document/doc_op_columns.rs new file mode 100644 index 00000000..49cabf81 --- /dev/null +++ b/automerge/src/storage/document/doc_op_columns.rs @@ -0,0 +1,450 @@ +use std::{borrow::Cow, convert::TryFrom}; + +use crate::{ + columnar_2::{ + column_range::{ + generic::{GenericColumnRange, GroupRange, GroupedColumnRange, SimpleColRange}, + BooleanRange, DeltaRange, Key, KeyEncoder, KeyIter, KeyRange, ObjIdEncoder, ObjIdIter, + ObjIdRange, OpIdEncoder, OpIdIter, OpIdListEncoder, OpIdListIter, OpIdListRange, + OpIdRange, RleRange, ValueEncoder, ValueIter, ValueRange, + }, + encoding::{ + BooleanDecoder, BooleanEncoder, ColumnDecoder, DecodeColumnError, RleDecoder, + RleEncoder, + }, + }, + convert, + storage::{ + columns::{compression, ColumnId, ColumnSpec, ColumnType}, + Columns, MismatchingColumn, RawColumn, RawColumns, + }, + types::{ObjId, OpId, ScalarValue}, +}; + +const OBJ_COL_ID: ColumnId = ColumnId::new(0); +const KEY_COL_ID: ColumnId = ColumnId::new(1); +const ID_COL_ID: ColumnId = ColumnId::new(2); +const INSERT_COL_ID: ColumnId = ColumnId::new(3); +const ACTION_COL_ID: ColumnId = ColumnId::new(4); +const VAL_COL_ID: ColumnId = ColumnId::new(5); +const SUCC_COL_ID: ColumnId = ColumnId::new(8); + +/// The form operations take in the compressed document format. +#[derive(Debug)] +pub(crate) struct DocOp { + pub(crate) id: OpId, + pub(crate) object: ObjId, + pub(crate) key: Key, + pub(crate) insert: bool, + pub(crate) action: usize, + pub(crate) value: ScalarValue, + pub(crate) succ: Vec, +} + +#[derive(Debug, Clone)] +pub(crate) struct DocOpColumns { + obj: Option, + key: KeyRange, + id: OpIdRange, + insert: BooleanRange, + action: RleRange, + val: ValueRange, + succ: OpIdListRange, + #[allow(dead_code)] + other: Columns, +} + +struct DocId { + actor: usize, + counter: u64, +} + +impl convert::OpId for DocId { + fn actor(&self) -> usize { + self.actor + } + + fn counter(&self) -> u64 { + self.counter + } +} + +/// A row to be encoded as an op in the document format +/// +/// The lifetime `'a` is the lifetime of the value and key data types. For types which cannot +/// provide a reference (e.g. because they are decoding from some columnar storage on each +/// iteration) this should be `'static`. +pub(crate) trait AsDocOp<'a> { + /// The type of the Actor ID component of the op IDs for this impl. This is typically either + /// `&'a ActorID` or `usize` + type ActorId; + /// The type of the op IDs this impl produces. + type OpId: convert::OpId; + /// The type of the successor iterator returned by `Self::pred`. This can often be omitted + type SuccIter: Iterator + ExactSizeIterator; + + fn obj(&self) -> convert::ObjId; + fn id(&self) -> Self::OpId; + fn key(&self) -> convert::Key<'a, Self::OpId>; + fn insert(&self) -> bool; + fn action(&self) -> u64; + fn val(&self) -> Cow<'a, ScalarValue>; + fn succ(&self) -> Self::SuccIter; +} + +impl DocOpColumns { + pub(crate) fn encode<'a, I, C, O>(ops: I, out: &mut Vec) -> DocOpColumns + where + I: Iterator + Clone + ExactSizeIterator, + O: convert::OpId, + C: AsDocOp<'a, OpId = O>, + { + if ops.len() > 30000 { + Self::encode_rowwise(ops, out) + } else { + Self::encode_columnwise(ops, out) + } + } + + fn encode_columnwise<'a, I, O, C>(ops: I, out: &mut Vec) -> DocOpColumns + where + I: Iterator + Clone, + O: convert::OpId, + C: AsDocOp<'a, OpId = O>, + { + let obj = ObjIdRange::encode(ops.clone().map(|o| o.obj()), out); + let key = KeyRange::encode(ops.clone().map(|o| o.key()), out); + let id = OpIdRange::encode(ops.clone().map(|o| o.id()), out); + let insert = BooleanRange::encode(ops.clone().map(|o| o.insert()), out); + let action = RleRange::encode(ops.clone().map(|o| Some(o.action() as u64)), out); + let val = ValueRange::encode(ops.clone().map(|o| o.val()), out); + let succ = OpIdListRange::encode(ops.map(|o| o.succ()), out); + Self { + obj, + key, + id, + insert, + action, + val, + succ, + other: Columns::empty(), + } + } + + fn encode_rowwise<'a, I, O, C>(ops: I, out: &mut Vec) -> DocOpColumns + where + I: Iterator, + O: convert::OpId, + C: AsDocOp<'a, OpId = O>, + { + let mut obj = ObjIdEncoder::new(); + let mut key = KeyEncoder::new(); + let mut id = OpIdEncoder::new(); + let mut insert = BooleanEncoder::new(); + let mut action = RleEncoder::<_, u64>::from(Vec::new()); + let mut val = ValueEncoder::new(); + let mut succ = OpIdListEncoder::new(); + for op in ops { + obj.append(op.obj()); + key.append(op.key()); + id.append(op.id()); + insert.append(op.insert()); + action.append(Some(op.action())); + val.append(&op.val()); + succ.append(op.succ()); + } + let obj = obj.finish(out); + let key = key.finish(out); + let id = id.finish(out); + + let insert_start = out.len(); + let (insert_out, _) = insert.finish(); + out.extend(insert_out); + let insert = BooleanRange::from(insert_start..out.len()); + + let action_start = out.len(); + let (action_out, _) = action.finish(); + out.extend(action_out); + let action = RleRange::from(action_start..out.len()); + + let val = val.finish(out); + let succ = succ.finish(out); + DocOpColumns { + obj, + key, + id, + insert, + action, + val, + succ, + other: Columns::empty(), + } + } + + pub(crate) fn iter<'a>(&self, data: &'a [u8]) -> DocOpColumnIter<'a> { + DocOpColumnIter { + id: self.id.iter(data), + action: self.action.decoder(data), + objs: self.obj.as_ref().map(|o| o.iter(data)), + keys: self.key.iter(data), + insert: self.insert.decoder(data), + value: self.val.iter(data), + succ: self.succ.iter(data), + } + } + + pub(crate) fn raw_columns(&self) -> RawColumns { + let mut cols = vec![ + RawColumn::new( + ColumnSpec::new(OBJ_COL_ID, ColumnType::Actor, false), + self.obj + .as_ref() + .map(|o| o.actor_range().clone().into()) + .unwrap_or(0..0), + ), + RawColumn::new( + ColumnSpec::new(OBJ_COL_ID, ColumnType::Integer, false), + self.obj + .as_ref() + .map(|o| o.counter_range().clone().into()) + .unwrap_or(0..0), + ), + RawColumn::new( + ColumnSpec::new(KEY_COL_ID, ColumnType::Actor, false), + self.key.actor_range().clone().into(), + ), + RawColumn::new( + ColumnSpec::new(KEY_COL_ID, ColumnType::DeltaInteger, false), + self.key.counter_range().clone().into(), + ), + RawColumn::new( + ColumnSpec::new(KEY_COL_ID, ColumnType::String, false), + self.key.string_range().clone().into(), + ), + RawColumn::new( + ColumnSpec::new(ID_COL_ID, ColumnType::Actor, false), + self.id.actor_range().clone().into(), + ), + RawColumn::new( + ColumnSpec::new(ID_COL_ID, ColumnType::DeltaInteger, false), + self.id.counter_range().clone().into(), + ), + RawColumn::new( + ColumnSpec::new(INSERT_COL_ID, ColumnType::Boolean, false), + self.insert.clone().into(), + ), + RawColumn::new( + ColumnSpec::new(ACTION_COL_ID, ColumnType::Integer, false), + self.action.clone().into(), + ), + RawColumn::new( + ColumnSpec::new(VAL_COL_ID, ColumnType::ValueMetadata, false), + self.val.meta_range().clone().into(), + ), + ]; + if !self.val.raw_range().is_empty() { + cols.push(RawColumn::new( + ColumnSpec::new(VAL_COL_ID, ColumnType::Value, false), + self.val.raw_range().clone().into(), + )); + } + cols.push(RawColumn::new( + ColumnSpec::new(SUCC_COL_ID, ColumnType::Group, false), + self.succ.group_range().clone().into(), + )); + if !self.succ.actor_range().is_empty() { + cols.extend([ + RawColumn::new( + ColumnSpec::new(SUCC_COL_ID, ColumnType::Actor, false), + self.succ.actor_range().clone().into(), + ), + RawColumn::new( + ColumnSpec::new(SUCC_COL_ID, ColumnType::DeltaInteger, false), + self.succ.counter_range().clone().into(), + ), + ]); + } + cols.into_iter().collect() + } +} + +#[derive(Clone)] +pub(crate) struct DocOpColumnIter<'a> { + id: OpIdIter<'a>, + action: RleDecoder<'a, u64>, + objs: Option>, + keys: KeyIter<'a>, + insert: BooleanDecoder<'a>, + value: ValueIter<'a>, + succ: OpIdListIter<'a>, +} + +impl<'a> DocOpColumnIter<'a> { + fn done(&self) -> bool { + self.id.done() + } +} + +#[derive(Debug, thiserror::Error)] +#[error(transparent)] +pub(crate) struct ReadDocOpError(#[from] DecodeColumnError); + +impl<'a> Iterator for DocOpColumnIter<'a> { + type Item = Result; + + fn next(&mut self) -> Option { + if self.done() { + None + } else { + match self.try_next() { + Ok(Some(op)) => Some(Ok(op)), + Ok(None) => None, + Err(e) => Some(Err(e.into())), + } + } + } +} + +impl<'a> DocOpColumnIter<'a> { + fn try_next(&mut self) -> Result, DecodeColumnError> { + if self.done() { + Ok(None) + } else { + let id = self.id.next_in_col("id")?; + let action = self.action.next_in_col("action")?; + let obj = if let Some(ref mut objs) = self.objs { + objs.next_in_col("obj")? + } else { + ObjId::root() + }; + let key = self.keys.next_in_col("key")?; + let value = self.value.next_in_col("value")?; + let succ = self.succ.next_in_col("succ")?; + let insert = self.insert.next_in_col("insert")?; + Ok(Some(DocOp { + id, + value, + action: action as usize, + object: obj, + key, + succ, + insert, + })) + } + } +} + +#[derive(Debug, thiserror::Error)] +pub(crate) enum Error { + #[error("mismatching column at {index}.")] + MismatchingColumn { index: usize }, +} + +impl From for Error { + fn from(m: MismatchingColumn) -> Self { + Error::MismatchingColumn { index: m.index } + } +} + +impl TryFrom for DocOpColumns { + type Error = Error; + + fn try_from(columns: Columns) -> Result { + let mut obj_actor: Option> = None; + let mut obj_ctr: Option> = None; + let mut key_actor: Option> = None; + let mut key_ctr: Option = None; + let mut key_str: Option> = None; + let mut id_actor: Option> = None; + let mut id_ctr: Option = None; + let mut insert: Option = None; + let mut action: Option> = None; + let mut val: Option = None; + let mut succ_group: Option> = None; + let mut succ_actor: Option> = None; + let mut succ_ctr: Option = None; + let mut other = Columns::empty(); + + for (index, col) in columns.into_iter().enumerate() { + match (col.id(), col.col_type()) { + (ID_COL_ID, ColumnType::Actor) => id_actor = Some(col.range().into()), + (ID_COL_ID, ColumnType::DeltaInteger) => id_ctr = Some(col.range().into()), + (OBJ_COL_ID, ColumnType::Actor) => obj_actor = Some(col.range().into()), + (OBJ_COL_ID, ColumnType::Integer) => obj_ctr = Some(col.range().into()), + (KEY_COL_ID, ColumnType::Actor) => key_actor = Some(col.range().into()), + (KEY_COL_ID, ColumnType::DeltaInteger) => key_ctr = Some(col.range().into()), + (KEY_COL_ID, ColumnType::String) => key_str = Some(col.range().into()), + (INSERT_COL_ID, ColumnType::Boolean) => insert = Some(col.range().into()), + (ACTION_COL_ID, ColumnType::Integer) => action = Some(col.range().into()), + (VAL_COL_ID, ColumnType::ValueMetadata) => match col.into_ranges() { + GenericColumnRange::Value(v) => val = Some(v), + _ => { + tracing::error!("col 9 should be a value column"); + return Err(Error::MismatchingColumn { index }); + } + }, + (SUCC_COL_ID, ColumnType::Group) => match col.into_ranges() { + GenericColumnRange::Group(GroupRange { num, values }) => { + let mut cols = values.into_iter(); + let first = cols.next(); + let second = cols.next(); + succ_group = Some(num); + match (first, second) { + ( + Some(GroupedColumnRange::Simple(SimpleColRange::RleInt( + actor_range, + ))), + Some(GroupedColumnRange::Simple(SimpleColRange::Delta(ctr_range))), + ) => { + succ_actor = Some(actor_range); + succ_ctr = Some(ctr_range); + } + (None, None) => { + succ_actor = Some((0..0).into()); + succ_ctr = Some((0..0).into()); + } + _ => { + tracing::error!( + "expected a two column group of (actor, rle int) for index 10" + ); + return Err(Error::MismatchingColumn { index }); + } + }; + if cols.next().is_some() { + return Err(Error::MismatchingColumn { index }); + } + } + _ => return Err(Error::MismatchingColumn { index }), + }, + (other_col, other_type) => { + tracing::warn!(id=?other_col, typ=?other_type, "unknown column type"); + other.append(col) + } + } + } + Ok(DocOpColumns { + obj: ObjIdRange::new( + obj_actor.unwrap_or_else(|| (0..0).into()), + obj_ctr.unwrap_or_else(|| (0..0).into()), + ), + key: KeyRange::new( + key_actor.unwrap_or_else(|| (0..0).into()), + key_ctr.unwrap_or_else(|| (0..0).into()), + key_str.unwrap_or_else(|| (0..0).into()), + ), + id: OpIdRange::new( + id_actor.unwrap_or_else(|| (0..0).into()), + id_ctr.unwrap_or_else(|| (0..0).into()), + ), + insert: insert.unwrap_or_else(|| (0..0).into()), + action: action.unwrap_or_else(|| (0..0).into()), + val: val.unwrap_or_else(|| ValueRange::new((0..0).into(), (0..0).into())), + succ: OpIdListRange::new( + succ_group.unwrap_or_else(|| (0..0).into()), + succ_actor.unwrap_or_else(|| (0..0).into()), + succ_ctr.unwrap_or_else(|| (0..0).into()), + ), + other, + }) + } +} diff --git a/automerge/src/storage/load.rs b/automerge/src/storage/load.rs new file mode 100644 index 00000000..026123cc --- /dev/null +++ b/automerge/src/storage/load.rs @@ -0,0 +1,119 @@ +use tracing::instrument; + +use crate::{ + change_v2::Change, + storage::{self, parse}, +}; + +mod change_collector; +mod reconstruct_document; +pub(crate) use reconstruct_document::{ + reconstruct_document, DocObserver, LoadedObject, Reconstructed, +}; + +#[derive(Debug, thiserror::Error)] +#[allow(unreachable_pub)] +pub enum Error { + #[error("unable to parse chunk: {0}")] + Parse(Box), + #[error("invalid change columns: {0}")] + InvalidChangeColumns(Box), + #[error("invalid ops columns: {0}")] + InvalidOpsColumns(Box), + #[error("a chunk contained leftover data")] + LeftoverData, + #[error("error inflating document chunk ops: {0}")] + InflateDocument(Box), + #[error("bad checksum")] + BadChecksum, +} + +pub(crate) enum LoadedChanges<'a> { + /// All the data was succesfully loaded into a list of changes + Complete(Vec), + /// We only managed to load _some_ changes. + Partial { + /// The succesfully loaded changes + loaded: Vec, + /// The data which we were unable to parse + #[allow(dead_code)] + remaining: parse::Input<'a>, + /// The error encountered whilst trying to parse `remaining` + error: Error, + }, +} + +/// Attempt to Load all the chunks in `data`. +/// +/// # Partial Loads +/// +/// Automerge documents are encoded as one or more concatenated chunks. Each chunk containing one +/// or more changes. This means it is possible to partially load corrupted data if the first `n` +/// chunks are valid. This function returns a `LoadedChanges` which you can examine to determine if +/// this is the case. +#[instrument(skip(data))] +pub(crate) fn load_changes<'a>(mut data: parse::Input<'a>) -> LoadedChanges<'a> { + let mut changes = Vec::new(); + while !data.is_empty() { + let remaining = match load_next_change(data, &mut changes) { + Ok(d) => d, + Err(e) => { + return LoadedChanges::Partial { + loaded: changes, + remaining: data, + error: e, + }; + } + }; + data = remaining.reset(); + } + LoadedChanges::Complete(changes) +} + +fn load_next_change<'a>( + data: parse::Input<'a>, + changes: &mut Vec, +) -> Result, Error> { + let (remaining, chunk) = storage::Chunk::parse(data).map_err(|e| Error::Parse(Box::new(e)))?; + if !chunk.checksum_valid() { + return Err(Error::BadChecksum); + } + match chunk { + storage::Chunk::Document(d) => { + let Reconstructed { + changes: new_changes, + .. + } = reconstruct_document(&d, NullObserver) + .map_err(|e| Error::InflateDocument(Box::new(e)))?; + changes.extend(new_changes); + } + storage::Chunk::Change(change) => { + tracing::trace!("loading change chunk"); + let change = Change::new_from_unverified(change.into_owned(), None) + .map_err(|e| Error::InvalidChangeColumns(Box::new(e)))?; + #[cfg(debug_assertions)] + { + let loaded_ops = change.iter_ops().collect::>(); + tracing::trace!(actor=?change.actor_id(), num_ops=change.len(), ops=?loaded_ops, "loaded change"); + } + #[cfg(not(debug_assertions))] + tracing::trace!(actor=?change.actor_id(), num_ops=change.len(), "loaded change"); + changes.push(change); + } + storage::Chunk::CompressedChange(change, compressed) => { + tracing::trace!("loading compressed change chunk"); + let change = + Change::new_from_unverified(change.into_owned(), Some(compressed.into_owned())) + .map_err(|e| Error::InvalidChangeColumns(Box::new(e)))?; + changes.push(change); + } + }; + Ok(remaining) +} + +struct NullObserver; +impl DocObserver for NullObserver { + type Output = (); + fn finish(self, _metadata: crate::op_tree::OpSetMetadata) -> Self::Output {} + fn object_loaded(&mut self, _object: LoadedObject) {} +} diff --git a/automerge/src/storage/load/change_collector.rs b/automerge/src/storage/load/change_collector.rs new file mode 100644 index 00000000..5a877a60 --- /dev/null +++ b/automerge/src/storage/load/change_collector.rs @@ -0,0 +1,207 @@ +use std::{ + borrow::Cow, + collections::{BTreeSet, HashMap}, + num::NonZeroU64, +}; + +use tracing::instrument; + +use crate::{ + op_tree::OpSetMetadata, + storage::{ + change::{PredOutOfOrder, Verified}, + convert::op_as_actor_id, + Change as StoredChange, ChangeMetadata, + }, + types::{ChangeHash, ObjId, Op}, +}; + +#[derive(Debug, thiserror::Error)] +pub(crate) enum Error { + #[error("a change referenced an actor index we couldn't find")] + MissingActor, + #[error("changes out of order")] + ChangesOutOfOrder, + #[error("missing change")] + MissingChange, + #[error("unable to read change metadata: {0}")] + ReadChange(Box), + #[error("missing ops")] + MissingOps, +} + +pub(crate) struct ChangeCollector<'a> { + changes_by_actor: HashMap>>, +} + +pub(crate) struct CollectedChanges<'a> { + pub(crate) history: Vec>, + pub(crate) heads: BTreeSet, +} + +impl<'a> ChangeCollector<'a> { + pub(crate) fn new( + changes: I, + ) -> Result, Error> + where + I: IntoIterator, E>>, + { + let mut changes_by_actor: HashMap>> = HashMap::new(); + for (index, change) in changes.into_iter().enumerate() { + tracing::trace!(?change, "importing change metadata"); + let change = change.map_err(|e| Error::ReadChange(Box::new(e)))?; + let actor_changes = changes_by_actor.entry(change.actor).or_default(); + if let Some(prev) = actor_changes.last() { + if prev.max_op >= change.max_op { + return Err(Error::ChangesOutOfOrder); + } + } + actor_changes.push(PartialChange { + index, + deps: change.deps, + actor: change.actor, + seq: change.seq, + timestamp: change.timestamp, + max_op: change.max_op, + message: change.message, + extra_bytes: change.extra, + ops: Vec::new(), + }) + } + let num_changes: usize = changes_by_actor.values().map(|v| v.len()).sum(); + tracing::trace!(num_changes, "change collection context created"); + Ok(ChangeCollector { changes_by_actor }) + } + + #[instrument(skip(self))] + pub(crate) fn collect(&mut self, obj: ObjId, op: Op) -> Result<(), Error> { + let actor_changes = self + .changes_by_actor + .get_mut(&op.id.actor()) + .ok_or_else(|| { + tracing::error!(missing_actor = op.id.actor(), "missing actor for op"); + Error::MissingActor + })?; + let change_index = actor_changes.partition_point(|c| c.max_op < op.id.counter()); + let change = actor_changes.get_mut(change_index).ok_or_else(|| { + tracing::error!(missing_change_index = change_index, "missing change for op"); + Error::MissingChange + })?; + change.ops.push((obj, op)); + Ok(()) + } + + #[instrument(skip(self, metadata))] + pub(crate) fn finish( + self, + metadata: &OpSetMetadata, + ) -> Result, Error> { + let mut changes_in_order = + Vec::with_capacity(self.changes_by_actor.values().map(|c| c.len()).sum()); + for (_, changes) in self.changes_by_actor { + let mut seq = None; + for change in changes { + if let Some(seq) = seq { + if seq != change.seq - 1 { + return Err(Error::ChangesOutOfOrder); + } + } else if change.seq != 1 { + return Err(Error::ChangesOutOfOrder); + } + seq = Some(change.seq); + changes_in_order.push(change); + } + } + changes_in_order.sort_by_key(|c| c.index); + + let mut hashes_by_index = HashMap::new(); + let mut history = Vec::new(); + let mut heads = BTreeSet::new(); + for (index, change) in changes_in_order.into_iter().enumerate() { + let finished = change.finish(&hashes_by_index, metadata)?; + let hash = finished.hash(); + hashes_by_index.insert(index, hash); + for dep in finished.dependencies() { + heads.remove(dep); + } + heads.insert(hash); + history.push(finished.into_owned()); + } + + Ok(CollectedChanges { history, heads }) + } +} + +#[derive(Debug)] +struct PartialChange<'a> { + index: usize, + deps: Vec, + actor: usize, + seq: u64, + max_op: u64, + timestamp: i64, + message: Option, + extra_bytes: Cow<'a, [u8]>, + ops: Vec<(ObjId, Op)>, +} + +impl<'a> PartialChange<'a> { + /// # Panics + /// + /// * If any op references a property index which is not in `props` + /// * If any op references an actor index which is not in `actors` + #[instrument(skip(self, known_changes, metadata))] + fn finish( + mut self, + known_changes: &HashMap, + metadata: &OpSetMetadata, + ) -> Result, Error> { + let deps_len = self.deps.len(); + let mut deps = self.deps.into_iter().try_fold::<_, _, Result<_, Error>>( + Vec::with_capacity(deps_len), + |mut acc, dep| { + acc.push(known_changes.get(&(dep as usize)).cloned().ok_or_else(|| { + tracing::error!( + dependent_index = self.index, + dep_index = dep, + "could not find dependency" + ); + Error::MissingChange + })?); + Ok(acc) + }, + )?; + deps.sort(); + let num_ops = self.ops.len() as u64; + self.ops.sort_by_key(|o| o.1.id); + let converted_ops = self + .ops + .iter() + .map(|(obj, op)| op_as_actor_id(obj, op, metadata)); + let actor = metadata.actors.get(self.actor).clone(); + + let change = match StoredChange::builder() + .with_dependencies(deps) + .with_actor(actor) + .with_seq(self.seq) + .with_start_op(NonZeroU64::new(self.max_op - num_ops + 1).ok_or(Error::MissingOps)?) + .with_timestamp(self.timestamp) + .with_message(self.message.map(|s| s.to_string())) + .with_extra_bytes(self.extra_bytes.into_owned()) + .build(converted_ops) + { + Ok(s) => s, + Err(PredOutOfOrder) => { + // SAFETY: types::Op::preds is `types::OpIds` which ensures ops are always sorted + panic!("preds out of order"); + } + }; + #[cfg(not(debug_assertions))] + tracing::trace!(?change, hash=?change.hash(), "collected change"); + #[cfg(debug_assertions)] + { + tracing::trace!(?change, ops=?self.ops, hash=?change.hash(), "collected change"); + } + Ok(change) + } +} diff --git a/automerge/src/storage/load/reconstruct_document.rs b/automerge/src/storage/load/reconstruct_document.rs new file mode 100644 index 00000000..ce5197b1 --- /dev/null +++ b/automerge/src/storage/load/reconstruct_document.rs @@ -0,0 +1,362 @@ +use super::change_collector::ChangeCollector; +use std::collections::{BTreeSet, HashMap}; +use tracing::instrument; + +use crate::{ + change_v2::Change, + columnar_2::Key as DocOpKey, + op_tree::OpSetMetadata, + storage::{DocOp, Document}, + types::{ChangeHash, ElemId, Key, ObjId, ObjType, Op, OpId, OpIds, OpType}, + ScalarValue, +}; + +#[derive(Debug, thiserror::Error)] +pub(crate) enum Error { + #[error("the document contained ops which were out of order")] + OpsOutOfOrder, + #[error("error reading operation: {0:?}")] + ReadOp(Box), + #[error("an operation contained an invalid action")] + InvalidAction, + #[error("an operation referenced a missing actor id")] + MissingActor, + #[error("invalid changes: {0}")] + InvalidChanges(#[from] super::change_collector::Error), + #[error("mismatching heads")] + MismatchingHeads, + #[error("missing operations")] + MissingOps, + #[error("succ out of order")] + SuccOutOfOrder, +} + +/// All the operations loaded from an object in the document format +pub(crate) struct LoadedObject { + /// The id of the object + pub(crate) id: ObjId, + /// The id of the parent object, if any + pub(crate) parent: Option, + /// The operations for this object + pub(crate) ops: Vec, + /// The type of the object + pub(crate) obj_type: ObjType, +} + +/// An observer which will be notified of each object as it completes and which can produce a +/// result once all the operations are loaded and the change graph is verified. +pub(crate) trait DocObserver { + type Output; + + /// The operations for an object have been loaded + fn object_loaded(&mut self, object: LoadedObject); + /// The document has finished loading. The `metadata` is the `OpSetMetadata` which was used to + /// create the indices in the operations which were passed to `object_loaded` + fn finish(self, metadata: OpSetMetadata) -> Self::Output; +} + +/// The result of reconstructing the change history from a document +pub(crate) struct Reconstructed { + /// The maximum op counter that was found in the document + pub(crate) max_op: u64, + /// The changes in the document, in the order they were encoded in the document + pub(crate) changes: Vec, + /// The result produced by the `DocObserver` which was watching the reconstruction + pub(crate) result: Output, + /// The heads of the document + pub(crate) heads: BTreeSet, +} + +#[instrument(skip(doc, observer))] +pub(crate) fn reconstruct_document<'a, O: DocObserver>( + doc: &'a Document<'a>, + mut observer: O, +) -> Result, Error> { + // The document format does not contain the bytes of the changes which are encoded in it + // directly. Instead the metadata about the changes (the actor, the start op, etc.) are all + // encoded separately to all the ops in the document. We need to reconstruct the changes in + // order to verify the heads of the document. To do this we iterate over the document + // operations adding each operation to a `ChangeCollector`. Once we've collected all the + // changes, the `ChangeCollector` knows how to group all the operations together to produce the + // change graph. + // + // Some of the work involved in reconstructing the changes could in principle be quite costly. + // For example, delete operations dont appear in the document at all, instead the delete + // operations are recorded as `succ` operations on the operations which they delete. This means + // that to reconstruct delete operations we have to first collect all the operations, then look + // for succ operations which we have not seen a concrete operation for. Happily we can take + // advantage of the fact that operations are encoded in the order of the object they apply to. + // This is the purpose of `LoadingObject`. + // + // Finally, when constructing an OpSet from this data we want to process the operations in the + // order they appear in the document, this allows us to create the OpSet more efficiently than + // if we were directly applying the reconstructed change graph. This is the purpose of the + // `DocObserver`, which we pass operations to as we complete the processing of each object. + + // The metadata which we create from the doc and which we will pass to the observer + let mut metadata = OpSetMetadata::from_actors(doc.actors().to_vec()); + // The object we are currently loading, starts with the root + let mut current_object = LoadingObject::root(); + // The changes we are collecting to later construct the change graph from + let mut collector = ChangeCollector::new(doc.iter_changes())?; + // A map where we record the create operations so that when the object ID the incoming + // operations refer to switches we can lookup the object type for the new object. We also + // need it so we can pass the parent object ID to the observer + let mut create_ops = HashMap::new(); + // The max op we've seen + let mut max_op = 0; + // The objects we have finished loaded + let mut objs_loaded = BTreeSet::new(); + + for op_res in doc.iter_ops() { + let doc_op = op_res.map_err(|e| Error::ReadOp(Box::new(e)))?; + max_op = std::cmp::max(max_op, doc_op.id.counter()); + + // Delete ops only appear as succ values in the document operations, so if a delete + // operation is the max op we will only see it here. Therefore we step through the document + // operations succs checking for max op + for succ in &doc_op.succ { + max_op = std::cmp::max(max_op, succ.counter()); + } + + let obj = doc_op.object; + check_opid(&metadata, *obj.opid())?; + let op = import_op(&mut metadata, doc_op)?; + tracing::trace!(?op, ?obj, "loading document op"); + + if let OpType::Make(obj_type) = op.action { + create_ops.insert( + ObjId::from(op.id), + CreateOp { + obj_type, + parent_id: obj, + }, + ); + }; + if obj == current_object.id { + current_object.append_op(op.clone())?; + } else { + let create_op = match create_ops.get(&obj) { + Some(t) => Ok(t), + None => { + tracing::error!( + ?op, + "operation referenced an object which we haven't seen a create op for yet" + ); + Err(Error::OpsOutOfOrder) + } + }?; + if obj < current_object.id { + tracing::error!(?op, previous_obj=?current_object.id, "op referenced an object ID which was smaller than the previous object ID"); + return Err(Error::OpsOutOfOrder); + } else { + let loaded = current_object.finish(&mut collector, &metadata)?; + objs_loaded.insert(loaded.id); + observer.object_loaded(loaded); + current_object = + LoadingObject::new(obj, Some(create_op.parent_id), create_op.obj_type); + current_object.append_op(op.clone())?; + } + } + } + let loaded = current_object.finish(&mut collector, &metadata)?; + objs_loaded.insert(loaded.id); + observer.object_loaded(loaded); + + // If an op created an object but no operation targeting that object was ever made then the + // object will only exist in the create_ops map. We collect all such objects here. + for ( + obj_id, + CreateOp { + parent_id, + obj_type, + }, + ) in create_ops.into_iter() + { + if !objs_loaded.contains(&obj_id) { + observer.object_loaded(LoadedObject { + parent: Some(parent_id), + id: obj_id, + ops: Vec::new(), + obj_type, + }) + } + } + + let super::change_collector::CollectedChanges { history, heads } = + collector.finish(&metadata)?; + let expected_heads: BTreeSet<_> = doc.heads().iter().cloned().collect(); + if expected_heads != heads { + tracing::error!(?expected_heads, ?heads, "mismatching heads"); + return Err(Error::MismatchingHeads); + } + let result = observer.finish(metadata); + + Ok(Reconstructed { + result, + changes: history.into_iter().map(Change::new).collect(), + heads, + max_op, + }) +} + +struct CreateOp { + parent_id: ObjId, + obj_type: ObjType, +} +struct LoadingObject { + id: ObjId, + parent_id: Option, + ops: Vec, + obj_type: ObjType, + preds: HashMap>, + /// Operations which set a value, stored to later lookup keys when reconstructing delete events + set_ops: HashMap, + /// To correctly load the values of the `Counter` struct in the value of op IDs we need to + /// lookup the various increment operations which have been applied by the succesors of the + /// initial operation which creates the counter. + inc_ops: HashMap, +} + +impl LoadingObject { + fn root() -> Self { + Self::new(ObjId::root(), None, ObjType::Map) + } + + fn new(id: ObjId, parent_id: Option, obj_type: ObjType) -> Self { + LoadingObject { + id, + parent_id, + ops: Vec::new(), + obj_type, + preds: HashMap::new(), + set_ops: HashMap::new(), + inc_ops: HashMap::new(), + } + } + + fn append_op(&mut self, op: Op) -> Result<(), Error> { + // Collect set operations so we can find the keys which delete operations refer to in + // `finish` + if matches!(op.action, OpType::Put(_)) { + match op.key { + Key::Map(_) => { + self.set_ops.insert(op.id, op.key); + } + Key::Seq(ElemId(o)) => { + let elem_opid = if op.insert { op.id } else { o }; + self.set_ops.insert(op.id, Key::Seq(ElemId(elem_opid))); + } + }; + } + // Collect increment operations so we can reconstruct counters properly in `finish` + if let OpType::Increment(inc) = op.action { + self.inc_ops.insert(op.id, inc); + } + for succ in &op.succ { + self.preds.entry(*succ).or_default().push(op.id); + } + self.ops.push(op); + Ok(()) + } + + fn finish( + mut self, + collector: &mut ChangeCollector<'_>, + meta: &OpSetMetadata, + ) -> Result { + let mut ops = Vec::new(); + for mut op in self.ops.into_iter() { + if let Some(preds) = self.preds.remove(&op.id) { + op.pred = meta.sorted_opids(preds.into_iter()); + } + if let OpType::Put(ScalarValue::Counter(c)) = &mut op.action { + let inc_ops = op.succ.iter().filter_map(|s| self.inc_ops.get(s).copied()); + c.increment(inc_ops); + } + collector.collect(self.id, op.clone())?; + ops.push(op) + } + // Any remaining pred ops must be delete operations + // TODO (alex): Figure out what index these should be inserted at. Does it even matter? + for (opid, preds) in self.preds.into_iter() { + let key = self.set_ops.get(&preds[0]).ok_or_else(|| { + tracing::error!(?opid, ?preds, "no delete operation found"); + Error::MissingOps + })?; + collector.collect( + self.id, + Op { + id: opid, + pred: meta.sorted_opids(preds.into_iter()), + insert: false, + succ: OpIds::empty(), + key: *key, + action: OpType::Delete, + }, + )?; + } + Ok(LoadedObject { + id: self.id, + parent: self.parent_id, + ops, + obj_type: self.obj_type, + }) + } +} + +fn import_op(m: &mut OpSetMetadata, op: DocOp) -> Result { + let key = match op.key { + DocOpKey::Prop(s) => Key::Map(m.import_prop(s)), + DocOpKey::Elem(ElemId(op)) => Key::Seq(ElemId(check_opid(m, op)?)), + }; + for opid in &op.succ { + if m.actors.safe_get(opid.actor()).is_none() { + tracing::error!(?opid, "missing actor"); + return Err(Error::MissingActor); + } + } + Ok(Op { + id: check_opid(m, op.id)?, + action: parse_optype(op.action, op.value)?, + key, + succ: m.try_sorted_opids(op.succ).ok_or(Error::SuccOutOfOrder)?, + pred: OpIds::empty(), + insert: op.insert, + }) +} + +/// We construct the OpSetMetadata directly from the vector of actors which are encoded in the +/// start of the document. Therefore we need to check for each opid in the docuemnt that the actor +/// ID which it references actually exists in the metadata. +fn check_opid(m: &OpSetMetadata, opid: OpId) -> Result { + match m.actors.safe_get(opid.actor()) { + Some(_) => Ok(opid), + None => { + tracing::error!("missing actor"); + Err(Error::MissingActor) + } + } +} + +fn parse_optype(action_index: usize, value: ScalarValue) -> Result { + match action_index { + 0 => Ok(OpType::Make(ObjType::Map)), + 1 => Ok(OpType::Put(value)), + 2 => Ok(OpType::Make(ObjType::List)), + 3 => Ok(OpType::Delete), + 4 => Ok(OpType::Make(ObjType::Text)), + 5 => match value { + ScalarValue::Int(i) => Ok(OpType::Increment(i)), + _ => { + tracing::error!(?value, "invalid value for counter op"); + Err(Error::InvalidAction) + } + }, + 6 => Ok(OpType::Make(ObjType::Table)), + other => { + tracing::error!(action = other, "unknown action type"); + Err(Error::InvalidAction) + } + } +} diff --git a/automerge/src/storage/save.rs b/automerge/src/storage/save.rs new file mode 100644 index 00000000..4921bd35 --- /dev/null +++ b/automerge/src/storage/save.rs @@ -0,0 +1,2 @@ +mod document; +pub(crate) use document::save_document; diff --git a/automerge/src/storage/save/document.rs b/automerge/src/storage/save/document.rs new file mode 100644 index 00000000..f27d920d --- /dev/null +++ b/automerge/src/storage/save/document.rs @@ -0,0 +1,146 @@ +use std::{borrow::Cow, collections::BTreeMap, iter::Iterator}; + +use crate::{ + indexed_cache::IndexedCache, + storage::{ + change::DEFLATE_MIN_SIZE, convert::op_as_docop, AsChangeMeta, CompressConfig, Document, + }, + types::{ActorId, ObjId, Op}, + Change, ChangeHash, +}; + +/// # Panics +/// +/// * If any of the `heads` are not in `changes` +/// * If any of ops in `ops` reference an actor which is not in `actors` +/// * If any of ops in `ops` reference a property which is not in `props` +/// * If any of the changes reference a dependency index which is not in `changes` +#[tracing::instrument(skip(changes, ops, actors, props, config))] +pub(crate) fn save_document<'a, I, O>( + changes: I, + ops: O, + actors: &'a IndexedCache, + props: &IndexedCache, + heads: &[ChangeHash], + config: Option, +) -> Vec +where + I: Iterator + Clone + 'a, + O: Iterator + Clone + ExactSizeIterator, +{ + let actor_lookup = actors.encode_index(); + let doc_ops = ops.map(|(obj, op)| op_as_docop(&actor_lookup, props, obj, op)); + + let hash_graph = HashGraph::new(changes.clone()); + let changes = changes.map(|c| ChangeWithGraph { + actors, + actor_lookup: &actor_lookup, + change: c, + graph: &hash_graph, + }); + + let doc = Document::new( + actors.sorted().cache, + hash_graph.heads_with_indices(heads.to_vec()), + doc_ops, + changes, + config.unwrap_or(CompressConfig::Threshold(DEFLATE_MIN_SIZE)), + ); + doc.into_bytes() +} + +struct HashGraph { + index_by_hash: BTreeMap, +} + +impl HashGraph { + fn new<'a, I>(changes: I) -> Self + where + I: Iterator, + { + let mut index_by_hash = BTreeMap::new(); + for (index, change) in changes.enumerate() { + index_by_hash.insert(change.hash(), index); + } + Self { index_by_hash } + } + + fn change_index(&self, hash: &ChangeHash) -> usize { + self.index_by_hash[hash] + } + + fn heads_with_indices(&self, heads: Vec) -> Vec<(ChangeHash, usize)> { + heads + .into_iter() + .map(|h| (h, self.index_by_hash[&h])) + .collect() + } +} + +struct ChangeWithGraph<'a> { + change: &'a Change, + graph: &'a HashGraph, + actor_lookup: &'a [usize], + actors: &'a IndexedCache, +} + +impl<'a> AsChangeMeta<'a> for ChangeWithGraph<'a> { + type DepsIter = ChangeDepsIter<'a>; + + fn actor(&self) -> u64 { + self.actor_lookup[self.actors.lookup(self.change.actor_id()).unwrap()] as u64 + } + + fn seq(&self) -> u64 { + self.change.seq() + } + + fn deps(&self) -> Self::DepsIter { + ChangeDepsIter { + change: self.change, + graph: self.graph, + offset: 0, + } + } + + fn extra(&self) -> Cow<'a, [u8]> { + self.change.extra_bytes().into() + } + + fn max_op(&self) -> u64 { + self.change.max_op() + } + + fn message(&self) -> Option> { + self.change.message().map(|m| Cow::Owned(m.into())) + } + + fn timestamp(&self) -> i64 { + self.change.timestamp() + } +} + +struct ChangeDepsIter<'a> { + change: &'a Change, + graph: &'a HashGraph, + offset: usize, +} + +impl<'a> ExactSizeIterator for ChangeDepsIter<'a> { + fn len(&self) -> usize { + self.change.deps().len() + } +} + +impl<'a> Iterator for ChangeDepsIter<'a> { + type Item = u64; + + fn next(&mut self) -> Option { + if let Some(dep) = self.change.deps().get(self.offset) { + self.offset += 1; + Some(self.graph.change_index(dep) as u64) + } else { + None + } + } +} diff --git a/automerge/src/types.rs b/automerge/src/types.rs index ea7bb87c..d2c8b002 100644 --- a/automerge/src/types.rs +++ b/automerge/src/types.rs @@ -75,6 +75,12 @@ impl TryFrom for ActorId { } } +impl AsRef<[u8]> for ActorId { + fn as_ref(&self) -> &[u8] { + &self.0 + } +} + impl From for ActorId { fn from(u: uuid::Uuid) -> Self { ActorId(TinyVec::from(*u.as_bytes())) @@ -187,6 +193,45 @@ pub enum OpType { Put(ScalarValue), } +impl OpType { + /// The index into the action array as specified in [1] + /// + /// [1]: https://alexjg.github.io/automerge-storage-docs/#action-array + #[cfg(feature = "storage-v2")] + pub(crate) fn action_index(&self) -> u64 { + match self { + Self::Make(ObjType::Map) => 0, + Self::Put(_) => 1, + Self::Make(ObjType::List) => 2, + Self::Delete => 3, + Self::Make(ObjType::Text) => 4, + Self::Increment(_) => 5, + Self::Make(ObjType::Table) => 6, + } + } + + #[cfg(feature = "storage-v2")] + pub(crate) fn from_index_and_value( + index: u64, + value: ScalarValue, + ) -> Result { + match index { + 0 => Ok(Self::Make(ObjType::Map)), + 1 => Ok(Self::Put(value)), + 2 => Ok(Self::Make(ObjType::List)), + 3 => Ok(Self::Delete), + 4 => Ok(Self::Make(ObjType::Text)), + 5 => match value { + ScalarValue::Int(i) => Ok(Self::Increment(i)), + ScalarValue::Uint(i) => Ok(Self::Increment(i as i64)), + _ => Err(error::InvalidOpType::NonNumericInc), + }, + 6 => Ok(Self::Make(ObjType::Table)), + other => Err(error::InvalidOpType::UnknownAction(other)), + } + } +} + impl From for OpType { fn from(v: ObjType) -> Self { OpType::Make(v) @@ -266,6 +311,12 @@ impl Exportable for Key { } } +impl From for OpId { + fn from(o: ObjId) -> Self { + o.0 + } +} + impl From for ObjId { fn from(o: OpId) -> Self { ObjId(o) @@ -379,11 +430,33 @@ impl ObjId { pub(crate) const fn root() -> Self { ObjId(OpId(0, 0)) } + + #[cfg(feature = "storage-v2")] + pub(crate) fn is_root(&self) -> bool { + self.0.counter() == 0 + } + + #[cfg(feature = "storage-v2")] + pub(crate) fn opid(&self) -> &OpId { + &self.0 + } } #[derive(Debug, Clone, Copy, PartialOrd, Eq, PartialEq, Ord, Hash, Default)] pub(crate) struct ElemId(pub(crate) OpId); +impl ElemId { + #[cfg(feature = "storage-v2")] + pub(crate) fn is_head(&self) -> bool { + *self == HEAD + } + + #[cfg(feature = "storage-v2")] + pub(crate) fn head() -> Self { + Self(OpId(0, 0)) + } +} + #[derive(Debug, Clone, PartialEq)] pub(crate) struct Op { pub(crate) id: OpId, @@ -525,6 +598,24 @@ pub(crate) const HASH_SIZE: usize = 32; // 256 bits = 32 bytes #[derive(Eq, PartialEq, Hash, Clone, PartialOrd, Ord, Copy)] pub struct ChangeHash(pub [u8; HASH_SIZE]); +impl ChangeHash { + #[cfg(feature = "storage-v2")] + pub(crate) fn as_bytes(&self) -> &[u8] { + &self.0 + } + + #[cfg(feature = "storage-v2")] + pub(crate) fn checksum(&self) -> [u8; 4] { + [self.0[0], self.0[1], self.0[2], self.0[3]] + } +} + +impl AsRef<[u8]> for ChangeHash { + fn as_ref(&self) -> &[u8] { + &self.0 + } +} + impl fmt::Debug for ChangeHash { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("ChangeHash") diff --git a/automerge/src/types/opids.rs b/automerge/src/types/opids.rs index ced0f50c..026fe923 100644 --- a/automerge/src/types/opids.rs +++ b/automerge/src/types/opids.rs @@ -19,6 +19,11 @@ impl<'a> IntoIterator for &'a OpIds { } impl OpIds { + #[cfg(feature = "storage-v2")] + pub(crate) fn empty() -> Self { + Self(Vec::new()) + } + pub(crate) fn new, F: Fn(&OpId, &OpId) -> std::cmp::Ordering>( opids: I, cmp: F, @@ -28,6 +33,21 @@ impl OpIds { Self(inner) } + /// Create a new OpIds if `opids` are sorted with respect to `cmp` and contain no duplicates. + /// + /// Returns `Some(OpIds)` if `opids` is sorted and has no duplicates, otherwise returns `None` + #[cfg(feature = "storage-v2")] + pub(crate) fn new_if_sorted std::cmp::Ordering>( + opids: Vec, + cmp: F, + ) -> Option { + if are_sorted_and_unique(opids.iter(), cmp) { + Some(Self(opids)) + } else { + None + } + } + /// Add an op to this set of OpIds. The `comparator` must provide a /// consistent ordering between successive calls to `add`. pub(crate) fn add std::cmp::Ordering>( @@ -74,6 +94,35 @@ impl OpIds { pub(crate) fn contains(&self, op: &OpId) -> bool { self.0.contains(op) } + + #[cfg(feature = "storage-v2")] + pub(crate) fn get(&self, idx: usize) -> Option<&OpId> { + self.0.get(idx) + } +} + +#[cfg(feature = "storage-v2")] +fn are_sorted_and_unique< + 'a, + I: Iterator, + F: FnMut(&OpId, &OpId) -> std::cmp::Ordering, +>( + mut opids: I, + mut f: F, +) -> bool { + use std::cmp::Ordering; + let mut last = match opids.next() { + Some(e) => e, + None => return true, + }; + + for next in opids { + if matches!(f(last, next), Ordering::Greater | Ordering::Equal) { + return false; + } + last = next; + } + true } #[cfg(test)] @@ -88,19 +137,36 @@ mod tests { }) } - fn scenario() -> impl Strategy, Vec)> { + fn scenario(size: std::ops::Range) -> impl Strategy, Vec)> { let actors = vec![ "aaaa".try_into().unwrap(), "cccc".try_into().unwrap(), "bbbb".try_into().unwrap(), ]; - proptest::collection::vec(gen_opid(actors.clone()), 0..100) + proptest::collection::vec(gen_opid(actors.clone()), size) .prop_map(move |opids| (actors.clone(), opids)) } + #[cfg(feature = "storage-v2")] + fn duplicate_unsorted_scenario() -> impl Strategy, Vec)> { + scenario(1..100).prop_map(|(actors, mut opids)| { + let mut sorted_opids = opids.clone(); + sorted_opids.sort_by(|left, right| cmp(&actors, left, right)); + sorted_opids.dedup(); + // Unwrap is okay due to the size we pass to `scenario()` + let last = *sorted_opids.last().unwrap(); + if sorted_opids == opids { + // Opids are sorted and deduplicated, just copy the last opid and insert it at the + // front + opids.insert(0, last); + } + (actors, opids) + }) + } + proptest! { #[test] - fn test_sorted_opids((actors, opids) in scenario()) { + fn test_sorted_opids((actors, opids) in scenario(0..100)) { let mut sorted_opids = OpIds::default(); for opid in &opids { sorted_opids.add(*opid, |left, right| cmp(&actors, left, right)); @@ -111,6 +177,17 @@ mod tests { expected.dedup(); assert_eq!(result, expected); } + + #[test] + #[cfg(feature = "storage-v2")] + fn test_new_if_sorted((actors, opids) in duplicate_unsorted_scenario()) { + let mut expected = opids.clone(); + assert_eq!(OpIds::new_if_sorted(opids, |left, right| cmp(&actors, left, right)), None); + expected.sort_by(|left, right| cmp(&actors, left, right)); + expected.dedup(); + let result = OpIds::new_if_sorted(expected.clone(), |left, right| cmp(&actors, left, right)).unwrap().into_iter().cloned().collect::>(); + assert_eq!(result, expected) + } } fn cmp(actors: &[ActorId], left: &OpId, right: &OpId) -> std::cmp::Ordering { diff --git a/automerge/src/value.rs b/automerge/src/value.rs index 633bbeaf..b8e355da 100644 --- a/automerge/src/value.rs +++ b/automerge/src/value.rs @@ -357,6 +357,16 @@ pub struct Counter { pub(crate) increments: usize, } +impl Counter { + #[cfg(feature = "storage-v2")] + pub(crate) fn increment>(&mut self, increments: I) { + for inc in increments { + self.current += inc; + self.increments += 1; + } + } +} + impl Serialize for Counter { fn serialize(&self, serializer: S) -> Result where From fc7657bcc67f83a1636c4a57c0f4b508e0e7805d Mon Sep 17 00:00:00 2001 From: Alex Good Date: Tue, 26 Jul 2022 14:49:25 +0100 Subject: [PATCH 537/730] Add a wrapper to implement Deserialize for Automerge It is useful to be able to generate a `serde::Value` representation of an automerge document. We can do this without an intermediate type by iterating over the keys of the document recursively. Add `autoeserde::AutoSerde` to implement this. Signed-off-by: Alex Good --- automerge/src/autoserde.rs | 109 +++++++++++++++++++++++++++++++++++++ automerge/src/lib.rs | 1 + 2 files changed, 110 insertions(+) create mode 100644 automerge/src/autoserde.rs diff --git a/automerge/src/autoserde.rs b/automerge/src/autoserde.rs new file mode 100644 index 00000000..50911198 --- /dev/null +++ b/automerge/src/autoserde.rs @@ -0,0 +1,109 @@ +use serde::ser::{SerializeMap, SerializeSeq}; + +use crate::{Automerge, ObjId, ObjType, Value}; + +/// A wrapper type which implements `serde::Deserialize` for an `Automerge` +#[derive(Debug)] +pub struct AutoSerde<'a>(&'a Automerge); + +impl<'a> From<&'a Automerge> for AutoSerde<'a> { + fn from(a: &'a Automerge) -> Self { + AutoSerde(a) + } +} + +impl<'a> serde::Serialize for AutoSerde<'a> { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + AutoSerdeMap { + doc: self.0, + obj: ObjId::Root, + } + .serialize(serializer) + } +} + +struct AutoSerdeMap<'a> { + doc: &'a Automerge, + obj: ObjId, +} + +impl<'a> serde::Serialize for AutoSerdeMap<'a> { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + let mut map_ser = serializer.serialize_map(Some(self.doc.length(&ObjId::Root)))?; + for key in self.doc.keys(&self.obj) { + // SAFETY: This only errors if the object ID is unknown, but we construct this type + // with a known real object ID + let (val, obj) = self.doc.get(&self.obj, &key).unwrap().unwrap(); + let serdeval = AutoSerdeVal { + doc: self.doc, + val, + obj, + }; + map_ser.serialize_entry(&key, &serdeval)?; + } + map_ser.end() + } +} + +struct AutoSerdeSeq<'a> { + doc: &'a Automerge, + obj: ObjId, +} + +impl<'a> serde::Serialize for AutoSerdeSeq<'a> { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + let mut seq_ser = serializer.serialize_seq(None)?; + for i in 0..self.doc.length(&self.obj) { + // SAFETY: This only errors if the object ID is unknown, but we construct this type + // with a known real object ID + let (val, obj) = self.doc.get(&self.obj, i).unwrap().unwrap(); + let serdeval = AutoSerdeVal { + doc: self.doc, + val, + obj, + }; + seq_ser.serialize_element(&serdeval)?; + } + seq_ser.end() + } +} + +struct AutoSerdeVal<'a> { + doc: &'a Automerge, + val: Value<'a>, + obj: ObjId, +} + +impl<'a> serde::Serialize for AutoSerdeVal<'a> { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + match &self.val { + Value::Object(ObjType::Map | ObjType::Table) => { + let map = AutoSerdeMap { + doc: self.doc, + obj: self.obj.clone(), + }; + map.serialize(serializer) + } + Value::Object(ObjType::List | ObjType::Text) => { + let seq = AutoSerdeSeq { + doc: self.doc, + obj: self.obj.clone(), + }; + seq.serialize(serializer) + } + Value::Scalar(v) => v.serialize(serializer), + } + } +} diff --git a/automerge/src/lib.rs b/automerge/src/lib.rs index eadecdd9..dddce817 100644 --- a/automerge/src/lib.rs +++ b/automerge/src/lib.rs @@ -56,6 +56,7 @@ macro_rules! __log { mod autocommit; mod automerge; +mod autoserde; mod change; #[cfg(feature = "storage-v2")] mod change_v2; From 34e919a4c83c4d63bb65d8c248a347877177daad Mon Sep 17 00:00:00 2001 From: Alex Good Date: Mon, 25 Jul 2022 15:51:30 +0100 Subject: [PATCH 538/730] Plumb in storage-v2 This is achieved by liberal use of feature flags. Main additions are: * Build the OpSet more efficiently when loading from compressed document storage using a DocObserver as implemented in `automerge::op_tree::load` * Reimplement the parsing login in the various types in `automerge::sync` There are numerous other small changes required to get the types to line up. Signed-off-by: Alex Good --- automerge/src/automerge.rs | 181 +++++++++++++++++++++++++++-- automerge/src/clocks.rs | 44 +++++++ automerge/src/error.rs | 22 +++- automerge/src/lib.rs | 21 ++-- automerge/src/op_set.rs | 29 +++++ automerge/src/op_set/load.rs | 87 ++++++++++++++ automerge/src/op_tree.rs | 5 + automerge/src/storage/document.rs | 1 + automerge/src/sync.rs | 181 +++++++++++++++++++++++++++-- automerge/src/sync/bloom.rs | 68 ++++++++++- automerge/src/sync/state.rs | 67 ++++++++++- automerge/src/transaction/inner.rs | 62 +++++++++- automerge/src/visualisation.rs | 9 ++ automerge/tests/helpers/mod.rs | 46 +++++++- automerge/tests/test.rs | 84 +++++++------ 15 files changed, 834 insertions(+), 73 deletions(-) create mode 100644 automerge/src/clocks.rs create mode 100644 automerge/src/op_set/load.rs diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index eb595153..b211ee18 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -4,24 +4,31 @@ use std::fmt::Debug; use std::num::NonZeroU64; use std::ops::RangeBounds; +#[cfg(not(feature = "storage-v2"))] use crate::change::encode_document; use crate::clock::ClockData; +#[cfg(feature = "storage-v2")] +use crate::clocks::Clocks; +#[cfg(feature = "storage-v2")] +use crate::columnar_2::Key as EncodedKey; use crate::exid::ExId; use crate::keys::Keys; use crate::op_observer::OpObserver; use crate::op_set::OpSet; use crate::parents::Parents; +#[cfg(feature = "storage-v2")] +use crate::storage::{self, load}; use crate::transaction::{self, CommitOptions, Failure, Success, Transaction, TransactionInner}; use crate::types::{ ActorId, ChangeHash, Clock, ElemId, Export, Exportable, Key, ObjId, Op, OpId, OpType, ScalarValue, Value, }; -use crate::KeysAt; +#[cfg(not(feature = "storage-v2"))] +use crate::{legacy, types}; use crate::{ - legacy, query, types, ApplyOptions, ListRange, ListRangeAt, MapRange, MapRangeAt, ObjType, - Values, + query, ApplyOptions, AutomergeError, Change, KeysAt, ListRange, ListRangeAt, MapRange, + MapRangeAt, ObjType, Prop, Values, }; -use crate::{AutomergeError, Change, Prop}; use serde::Serialize; #[cfg(test)] @@ -136,7 +143,9 @@ impl Automerge { start_op: NonZeroU64::new(self.max_op + 1).unwrap(), time: 0, message: None, + #[cfg(not(feature = "storage-v2"))] extra_bytes: Default::default(), + #[cfg(not(feature = "storage-v2"))] hash: None, operations: vec![], deps, @@ -526,7 +535,7 @@ impl Automerge { prop: P, ) -> Result, ExId)>, AutomergeError> { let obj = self.exid_to_obj(obj.as_ref())?; - let result = match prop.into() { + let mut result = match prop.into() { Prop::Map(p) => { let prop = self.ops.m.props.lookup(&p); if let Some(p) = prop { @@ -548,6 +557,7 @@ impl Automerge { .map(|o| (o.value(), self.id_to_exid(o.id))) .collect(), }; + result.sort_by(|a, b| b.1.cmp(&a.1)); Ok(result) } @@ -592,6 +602,7 @@ impl Automerge { } /// Load a document. + #[cfg(not(feature = "storage-v2"))] pub fn load_with( data: &[u8], options: ApplyOptions<'_, Obs>, @@ -602,6 +613,87 @@ impl Automerge { Ok(doc) } + #[cfg(feature = "storage-v2")] + pub fn load_with( + data: &[u8], + mut options: ApplyOptions<'_, Obs>, + ) -> Result { + if data.is_empty() { + return Ok(Self::new()); + } + let (remaining, first_chunk) = storage::Chunk::parse(storage::parse::Input::new(data)) + .map_err(|e| load::Error::Parse(Box::new(e)))?; + if !first_chunk.checksum_valid() { + return Err(load::Error::BadChecksum.into()); + } + let observer = &mut options.op_observer; + + let mut am = match first_chunk { + storage::Chunk::Document(d) => { + let storage::load::Reconstructed { + max_op, + result: op_set, + changes, + heads, + } = match observer { + Some(o) => storage::load::reconstruct_document(&d, OpSet::observed_builder(*o)), + None => storage::load::reconstruct_document(&d, OpSet::builder()), + } + .map_err(|e| load::Error::InflateDocument(Box::new(e)))?; + let mut hashes_by_index = HashMap::new(); + let mut actor_to_history: HashMap> = HashMap::new(); + let mut clocks = Clocks::new(); + for (index, change) in changes.iter().enumerate() { + // SAFETY: This should be fine because we just constructed an opset containing + // all the changes + let actor_index = op_set.m.actors.lookup(change.actor_id()).unwrap(); + actor_to_history.entry(actor_index).or_default().push(index); + hashes_by_index.insert(index, change.hash()); + clocks.add_change(change, actor_index)?; + } + let history_index = hashes_by_index.into_iter().map(|(k, v)| (v, k)).collect(); + Self { + queue: vec![], + history: changes, + history_index, + states: actor_to_history, + clocks: clocks.into(), + ops: op_set, + deps: heads.into_iter().collect(), + saved: Default::default(), + actor: Actor::Unused(ActorId::random()), + max_op, + } + } + storage::Chunk::Change(stored_change) => { + let change = Change::new_from_unverified(stored_change.into_owned(), None) + .map_err(|e| load::Error::InvalidChangeColumns(Box::new(e)))?; + let mut am = Self::new(); + am.apply_change(change, observer); + am + } + storage::Chunk::CompressedChange(stored_change, compressed) => { + let change = Change::new_from_unverified( + stored_change.into_owned(), + Some(compressed.into_owned()), + ) + .map_err(|e| load::Error::InvalidChangeColumns(Box::new(e)))?; + let mut am = Self::new(); + am.apply_change(change, observer); + am + } + }; + match load::load_changes(remaining.reset()) { + load::LoadedChanges::Complete(c) => { + for change in c { + am.apply_change(change, observer); + } + } + load::LoadedChanges::Partial { error, .. } => return Err(error.into()), + } + Ok(am) + } + /// Load an incremental save of a document. pub fn load_incremental(&mut self, data: &[u8]) -> Result { self.load_incremental_with::<()>(data, ApplyOptions::default()) @@ -613,7 +705,16 @@ impl Automerge { data: &[u8], options: ApplyOptions<'_, Obs>, ) -> Result { + #[cfg(not(feature = "storage-v2"))] let changes = Change::load_document(data)?; + #[cfg(feature = "storage-v2")] + let changes = match load::load_changes(storage::parse::Input::new(data)) { + load::LoadedChanges::Complete(c) => c, + load::LoadedChanges::Partial { error, loaded, .. } => { + tracing::warn!(successful_chunks=loaded.len(), err=?error, "partial load"); + loaded + } + }; let start = self.ops.len(); self.apply_changes_with(changes, options)?; let delta = self.ops.len() - start; @@ -699,6 +800,7 @@ impl Automerge { None } + #[cfg(not(feature = "storage-v2"))] fn import_ops(&mut self, change: &Change) -> Vec<(ObjId, Op)> { change .iter_ops() @@ -733,6 +835,55 @@ impl Automerge { .collect() } + #[cfg(feature = "storage-v2")] + fn import_ops(&mut self, change: &Change) -> Vec<(ObjId, Op)> { + let actor = self.ops.m.actors.cache(change.actor_id().clone()); + let mut actors = Vec::with_capacity(change.other_actor_ids().len() + 1); + actors.push(actor); + actors.extend( + change + .other_actor_ids() + .iter() + .map(|a| self.ops.m.actors.cache(a.clone())) + .collect::>(), + ); + change + .iter_ops() + .enumerate() + .map(|(i, c)| { + let id = OpId(change.start_op().get() + i as u64, actor); + let key = match &c.key { + EncodedKey::Prop(n) => Key::Map(self.ops.m.props.cache(n.to_string())), + EncodedKey::Elem(e) if e.is_head() => Key::Seq(ElemId::head()), + EncodedKey::Elem(ElemId(o)) => { + Key::Seq(ElemId(OpId::new(actors[o.actor()], o.counter()))) + } + }; + let obj = if c.obj.is_root() { + ObjId::root() + } else { + ObjId(OpId(c.obj.opid().counter(), actors[c.obj.opid().actor()])) + }; + let pred = c + .pred + .iter() + .map(|p| OpId::new(actors[p.actor()], p.counter())); + let pred = self.ops.m.sorted_opids(pred); + ( + obj, + Op { + id, + action: OpType::from_index_and_value(c.action, c.val).unwrap(), + key, + succ: Default::default(), + pred, + insert: c.insert, + }, + ) + }) + .collect() + } + /// Takes all the changes in `other` which are not in `self` and applies them pub fn merge(&mut self, other: &mut Self) -> Result, AutomergeError> { self.merge_with::<()>(other, ApplyOptions::default()) @@ -759,8 +910,23 @@ impl Automerge { pub fn save(&mut self) -> Vec { let heads = self.get_heads(); let c = self.history.iter(); - let ops = self.ops.iter(); - let bytes = encode_document(heads, c, ops, &self.ops.m.actors, &self.ops.m.props.cache); + #[cfg(not(feature = "storage-v2"))] + let bytes = encode_document( + heads, + c, + self.ops.iter(), + &self.ops.m.actors, + &self.ops.m.props.cache, + ); + #[cfg(feature = "storage-v2")] + let bytes = crate::storage::save::save_document( + c, + self.ops.iter(), + &self.ops.m.actors, + &self.ops.m.props, + &heads, + None, + ); self.saved = self.get_heads(); bytes } @@ -960,6 +1126,7 @@ impl Automerge { .or_default() .push(history_index); + self.history_index.insert(change.hash(), history_index); let mut clock = Clock::new(); for hash in change.deps() { let c = self diff --git a/automerge/src/clocks.rs b/automerge/src/clocks.rs new file mode 100644 index 00000000..60fc5c71 --- /dev/null +++ b/automerge/src/clocks.rs @@ -0,0 +1,44 @@ +use crate::{ + clock::{Clock, ClockData}, + Change, ChangeHash, +}; +use std::collections::HashMap; + +pub(crate) struct Clocks(HashMap); + +#[derive(Debug, thiserror::Error)] +#[error("attempted to derive a clock for a change with dependencies we don't have")] +pub struct MissingDep(ChangeHash); + +impl Clocks { + pub(crate) fn new() -> Self { + Self(HashMap::new()) + } + + pub(crate) fn add_change( + &mut self, + change: &Change, + actor_index: usize, + ) -> Result<(), MissingDep> { + let mut clock = Clock::new(); + for hash in change.deps() { + let c = self.0.get(hash).ok_or(MissingDep(*hash))?; + clock.merge(c); + } + clock.include( + actor_index, + ClockData { + max_op: change.max_op(), + seq: change.seq(), + }, + ); + self.0.insert(change.hash(), clock); + Ok(()) + } +} + +impl From for HashMap { + fn from(c: Clocks) -> Self { + c.0 + } +} diff --git a/automerge/src/error.rs b/automerge/src/error.rs index e47b54e5..7c30deca 100644 --- a/automerge/src/error.rs +++ b/automerge/src/error.rs @@ -1,9 +1,13 @@ +#[cfg(feature = "storage-v2")] +use crate::storage::load::Error as LoadError; use crate::types::{ActorId, ScalarValue}; use crate::value::DataType; -use crate::{decoding, encoding, ChangeHash}; +use crate::ChangeHash; +#[cfg(not(feature = "storage-v2"))] +use crate::{decoding, encoding}; use thiserror::Error; -#[derive(Error, Debug, PartialEq)] +#[derive(Error, Debug)] pub enum AutomergeError { #[error("id was not an object id")] NotAnObject, @@ -12,8 +16,10 @@ pub enum AutomergeError { #[error("invalid obj id `{0}`")] InvalidObjId(String), #[error("there was an encoding problem: {0}")] + #[cfg(not(feature = "storage-v2"))] Encoding(#[from] encoding::Error), #[error("there was a decoding problem: {0}")] + #[cfg(not(feature = "storage-v2"))] Decoding(#[from] decoding::Error), #[error("key must not be an empty string")] EmptyStringKey, @@ -36,6 +42,18 @@ pub enum AutomergeError { }, #[error("general failure")] Fail, + #[cfg(feature = "storage-v2")] + #[error(transparent)] + Load(#[from] LoadError), + #[cfg(feature = "storage-v2")] + #[error("failed to load compressed data: {0}")] + Deflate(#[source] std::io::Error), + #[cfg(feature = "storage-v2")] + #[error("compressed chunk was not a change")] + NonChangeCompressed, + #[cfg(feature = "storage-v2")] + #[error(transparent)] + Clocks(#[from] crate::clocks::MissingDep), } #[cfg(feature = "wasm")] diff --git a/automerge/src/lib.rs b/automerge/src/lib.rs index dddce817..f3d950a8 100644 --- a/automerge/src/lib.rs +++ b/automerge/src/lib.rs @@ -57,19 +57,22 @@ macro_rules! __log { mod autocommit; mod automerge; mod autoserde; +#[cfg(not(feature = "storage-v2"))] mod change; #[cfg(feature = "storage-v2")] mod change_v2; mod clock; +#[cfg(feature = "storage-v2")] +mod clocks; +#[cfg(not(feature = "storage-v2"))] mod columnar; #[cfg(feature = "storage-v2")] -#[allow(dead_code)] -#[allow(unused_imports)] mod columnar_2; #[cfg(feature = "storage-v2")] -#[allow(dead_code)] mod convert; +#[cfg(not(feature = "storage-v2"))] mod decoding; +#[cfg(not(feature = "storage-v2"))] mod encoding; mod error; mod exid; @@ -88,8 +91,6 @@ mod options; mod parents; mod query; #[cfg(feature = "storage-v2")] -#[allow(dead_code)] -#[allow(unused_imports)] mod storage; pub mod sync; pub mod transaction; @@ -101,12 +102,16 @@ mod visualisation; pub use crate::automerge::Automerge; pub use autocommit::AutoCommit; -//#[cfg(not(feature = "storage-v2"))] +pub use autoserde::AutoSerde; +#[cfg(not(feature = "storage-v2"))] pub use change::Change; -//#[cfg(feature = "storage-v2")] -//pub use change_v2::{Change, LoadError as LoadChangeError}; +#[cfg(feature = "storage-v2")] +pub use change_v2::{Change, LoadError as LoadChangeError}; +#[cfg(not(feature = "storage-v2"))] pub use decoding::Error as DecodingError; +#[cfg(not(feature = "storage-v2"))] pub use decoding::InvalidChangeError; +#[cfg(not(feature = "storage-v2"))] pub use encoding::Error as EncodingError; pub use error::AutomergeError; pub use error::InvalidActorId; diff --git a/automerge/src/op_set.rs b/automerge/src/op_set.rs index 0411e086..eddd433a 100644 --- a/automerge/src/op_set.rs +++ b/automerge/src/op_set.rs @@ -12,6 +12,11 @@ use std::cmp::Ordering; use std::collections::HashMap; use std::ops::RangeBounds; +#[cfg(feature = "storage-v2")] +mod load; +#[cfg(feature = "storage-v2")] +pub(crate) use load::{ObservedOpSetBuilder, OpSetBuilder}; + pub(crate) type OpSet = OpSetInternal; #[derive(Debug, Clone, PartialEq)] @@ -25,6 +30,18 @@ pub(crate) struct OpSetInternal { } impl OpSetInternal { + #[cfg(feature = "storage-v2")] + pub(crate) fn builder() -> OpSetBuilder { + OpSetBuilder::new() + } + + /// Create a builder which passes each operation to `observer`. This will be significantly + /// slower than `OpSetBuilder` + #[cfg(feature = "storage-v2")] + pub(crate) fn observed_builder(observer: &mut O) -> ObservedOpSetBuilder<'_, O> { + ObservedOpSetBuilder::new(observer) + } + pub(crate) fn new() -> Self { let mut trees: HashMap<_, _, _> = Default::default(); trees.insert(ObjId::root(), OpTree::new()); @@ -50,6 +67,7 @@ impl OpSetInternal { let mut objs: Vec<_> = self.trees.iter().collect(); objs.sort_by(|a, b| self.m.lamport_cmp((a.0).0, (b.0).0)); Iter { + opset: self, trees: objs.into_iter(), current: None, } @@ -178,6 +196,7 @@ impl OpSetInternal { self.length } + #[tracing::instrument(skip(self, index))] pub(crate) fn insert(&mut self, index: usize, obj: &ObjId, element: Op) { if let OpType::Make(typ) = element.action { self.trees.insert( @@ -194,6 +213,8 @@ impl OpSetInternal { //let tree = self.trees.get_mut(&element.obj).unwrap(); tree.internal.insert(index, element); self.length += 1; + } else { + tracing::warn!("attempting to insert op for unknown object"); } } @@ -311,6 +332,7 @@ impl<'a> IntoIterator for &'a OpSetInternal { #[derive(Clone)] pub(crate) struct Iter<'a> { + opset: &'a OpSet, trees: std::vec::IntoIter<(&'a ObjId, &'a op_tree::OpTree)>, current: Option<(&'a ObjId, op_tree::OpTreeIter<'a>)>, } @@ -337,6 +359,12 @@ impl<'a> Iterator for Iter<'a> { } } +impl<'a> ExactSizeIterator for Iter<'a> { + fn len(&self) -> usize { + self.opset.len() + } +} + #[derive(Clone, Debug, PartialEq)] pub(crate) struct OpSetMetadata { pub(crate) actors: IndexedCache, @@ -389,6 +417,7 @@ impl OpSetMetadata { OpIds::new_if_sorted(opids, |a, b| self.lamport_cmp(*a, *b)) } + #[cfg(not(feature = "storage-v2"))] pub(crate) fn import_opids>( &mut self, external_opids: I, diff --git a/automerge/src/op_set/load.rs b/automerge/src/op_set/load.rs new file mode 100644 index 00000000..0f810d15 --- /dev/null +++ b/automerge/src/op_set/load.rs @@ -0,0 +1,87 @@ +use std::collections::HashMap; + +use fxhash::FxBuildHasher; + +use super::{OpSet, OpTree}; +use crate::{ + op_tree::OpTreeInternal, + storage::load::{DocObserver, LoadedObject}, + types::{ObjId, Op}, + OpObserver, +}; + +/// An opset builder which creates an optree for each object as it finishes loading, inserting the +/// ops using `OpTreeInternal::insert`. This should be faster than using `OpSet::insert_*` but only +/// works because the ops in the document format are in the same order as in the optrees. +pub(crate) struct OpSetBuilder { + completed_objects: HashMap, +} + +impl OpSetBuilder { + pub(crate) fn new() -> OpSetBuilder { + Self { + completed_objects: HashMap::default(), + } + } +} + +impl DocObserver for OpSetBuilder { + type Output = OpSet; + + fn object_loaded(&mut self, loaded: LoadedObject) { + let mut internal = OpTreeInternal::new(); + for (index, op) in loaded.ops.into_iter().enumerate() { + internal.insert(index, op); + } + let tree = OpTree { + internal, + objtype: loaded.obj_type, + parent: loaded.parent, + }; + self.completed_objects.insert(loaded.id, tree); + } + + fn finish(self, metadata: super::OpSetMetadata) -> Self::Output { + let len = self.completed_objects.values().map(|t| t.len()).sum(); + OpSet { + trees: self.completed_objects, + length: len, + m: metadata, + } + } +} + +/// A DocObserver which just accumulates ops until the document has finished reconstructing and +/// then inserts all of the ops using `OpSet::insert_op_with_observer` +pub(crate) struct ObservedOpSetBuilder<'a, O: OpObserver> { + observer: &'a mut O, + ops: Vec<(ObjId, Op)>, +} + +impl<'a, O: OpObserver> ObservedOpSetBuilder<'a, O> { + pub(crate) fn new(observer: &'a mut O) -> Self { + Self { + observer, + ops: Vec::new(), + } + } +} + +impl<'a, O: OpObserver> DocObserver for ObservedOpSetBuilder<'a, O> { + type Output = OpSet; + + fn object_loaded(&mut self, object: LoadedObject) { + self.ops.reserve(object.ops.len()); + for op in object.ops { + self.ops.push((object.id, op)); + } + } + + fn finish(self, _metadata: super::OpSetMetadata) -> Self::Output { + let mut opset = OpSet::new(); + for (obj, op) in self.ops { + opset.insert_op_with_observer(&obj, op, self.observer); + } + opset + } +} diff --git a/automerge/src/op_tree.rs b/automerge/src/op_tree.rs index 1363dae3..329641d5 100644 --- a/automerge/src/op_tree.rs +++ b/automerge/src/op_tree.rs @@ -41,6 +41,11 @@ impl OpTree { pub(crate) fn iter(&self) -> OpTreeIter<'_> { self.internal.iter() } + + #[cfg(feature = "storage-v2")] + pub(crate) fn len(&self) -> usize { + self.internal.len() + } } #[derive(Clone, Debug)] diff --git a/automerge/src/storage/document.rs b/automerge/src/storage/document.rs index 8f9dca86..b9923b7a 100644 --- a/automerge/src/storage/document.rs +++ b/automerge/src/storage/document.rs @@ -12,6 +12,7 @@ use doc_change_columns::DocChangeColumns; pub(crate) use doc_change_columns::{AsChangeMeta, ChangeMetadata, ReadChangeError}; mod compression; +#[allow(dead_code)] pub(crate) enum CompressConfig { None, Threshold(usize), diff --git a/automerge/src/sync.rs b/automerge/src/sync.rs index 57414c59..f2309b4c 100644 --- a/automerge/src/sync.rs +++ b/automerge/src/sync.rs @@ -1,20 +1,21 @@ use itertools::Itertools; -use std::{ - borrow::Cow, - collections::{HashMap, HashSet}, - io, - io::Write, -}; +use std::collections::{HashMap, HashSet}; -use crate::{ - decoding, decoding::Decoder, encoding::Encodable, types::HASH_SIZE, ApplyOptions, Automerge, - AutomergeError, Change, ChangeHash, OpObserver, -}; +use crate::{ApplyOptions, Automerge, AutomergeError, Change, ChangeHash, OpObserver}; +#[cfg(not(feature = "storage-v2"))] +use std::{borrow::Cow, io, io::Write}; + +#[cfg(feature = "storage-v2")] +use crate::storage::{parse, Change as StoredChange, ReadChangeOpError}; +#[cfg(not(feature = "storage-v2"))] +use crate::{decoding, decoding::Decoder, encoding::Encodable, types::HASH_SIZE}; mod bloom; mod state; pub use bloom::BloomFilter; +#[cfg(feature = "storage-v2")] +pub use state::DecodeError as DecodeStateError; pub use state::{Have, State}; const MESSAGE_TYPE_SYNC: u8 = 0x42; // first byte of a sync message, for identification @@ -257,6 +258,57 @@ impl Automerge { } } +#[cfg(feature = "storage-v2")] +#[derive(Debug, thiserror::Error)] +pub enum ReadMessageError { + #[error("expected {expected_one_of:?} but found {found}")] + WrongType { expected_one_of: Vec, found: u8 }, + #[error("{0}")] + Parse(String), + #[error(transparent)] + ReadChangeOps(#[from] ReadChangeOpError), + #[error("not enough input")] + NotEnoughInput, +} + +#[cfg(feature = "storage-v2")] +impl From for ReadMessageError { + fn from(e: parse::leb128::Error) -> Self { + ReadMessageError::Parse(e.to_string()) + } +} + +#[cfg(feature = "storage-v2")] +impl From for ReadMessageError { + fn from(e: bloom::ParseError) -> Self { + ReadMessageError::Parse(e.to_string()) + } +} + +#[cfg(feature = "storage-v2")] +impl From for ReadMessageError { + fn from(e: crate::storage::change::ParseError) -> Self { + ReadMessageError::Parse(format!("error parsing changes: {}", e)) + } +} + +#[cfg(feature = "storage-v2")] +impl From for parse::ParseError { + fn from(e: ReadMessageError) -> Self { + parse::ParseError::Error(e) + } +} + +#[cfg(feature = "storage-v2")] +impl From> for ReadMessageError { + fn from(p: parse::ParseError) -> Self { + match p { + parse::ParseError::Error(e) => e, + parse::ParseError::Incomplete(..) => Self::NotEnoughInput, + } + } +} + /// The sync message to be sent. #[derive(Clone, Debug, PartialEq)] pub struct Message { @@ -270,7 +322,91 @@ pub struct Message { pub changes: Vec, } +#[cfg(feature = "storage-v2")] +fn parse_have(input: parse::Input<'_>) -> parse::ParseResult<'_, Have, ReadMessageError> { + let (i, last_sync) = parse::length_prefixed(parse::change_hash)(input)?; + let (i, bloom_bytes) = parse::length_prefixed_bytes(i)?; + let (_, bloom) = BloomFilter::parse(parse::Input::new(bloom_bytes)).map_err(|e| e.lift())?; + Ok((i, Have { last_sync, bloom })) +} + impl Message { + #[cfg(feature = "storage-v2")] + pub fn decode(input: &[u8]) -> Result { + let input = parse::Input::new(input); + match Self::parse(input) { + Ok((_, msg)) => Ok(msg), + Err(parse::ParseError::Error(e)) => Err(e), + Err(parse::ParseError::Incomplete(_)) => Err(ReadMessageError::NotEnoughInput), + } + } + + #[cfg(feature = "storage-v2")] + pub(crate) fn parse(input: parse::Input<'_>) -> parse::ParseResult<'_, Self, ReadMessageError> { + let (i, message_type) = parse::take1(input)?; + if message_type != MESSAGE_TYPE_SYNC { + return Err(parse::ParseError::Error(ReadMessageError::WrongType { + expected_one_of: vec![MESSAGE_TYPE_SYNC], + found: message_type, + })); + } + + let (i, heads) = parse::length_prefixed(parse::change_hash)(i)?; + let (i, need) = parse::length_prefixed(parse::change_hash)(i)?; + let (i, have) = parse::length_prefixed(parse_have)(i)?; + + let change_parser = |i| { + let (i, bytes) = parse::length_prefixed_bytes(i)?; + let (_, change) = + StoredChange::parse(parse::Input::new(bytes)).map_err(|e| e.lift())?; + Ok((i, change)) + }; + let (i, stored_changes) = parse::length_prefixed(change_parser)(i)?; + let changes_len = stored_changes.len(); + let changes: Vec = stored_changes + .into_iter() + .try_fold::<_, _, Result<_, ReadMessageError>>( + Vec::with_capacity(changes_len), + |mut acc, stored| { + let change = Change::new_from_unverified(stored.into_owned(), None) + .map_err(ReadMessageError::ReadChangeOps)?; + acc.push(change); + Ok(acc) + }, + )?; + + Ok(( + i, + Message { + heads, + need, + have, + changes, + }, + )) + } + + #[cfg(feature = "storage-v2")] + pub fn encode(mut self) -> Vec { + let mut buf = vec![MESSAGE_TYPE_SYNC]; + + encode_hashes(&mut buf, &self.heads); + encode_hashes(&mut buf, &self.need); + encode_many(&mut buf, self.have.iter(), |buf, h| { + encode_hashes(buf, &h.last_sync); + leb128::write::unsigned(buf, h.bloom.to_bytes().len() as u64).unwrap(); + buf.extend(h.bloom.to_bytes()); + }); + + encode_many(&mut buf, self.changes.iter_mut(), |buf, change| { + leb128::write::unsigned(buf, change.raw_bytes().len() as u64).unwrap(); + buf.extend(change.compressed_bytes().as_ref()) + }); + + buf + } + + #[cfg(not(feature = "storage-v2"))] pub fn encode(self) -> Vec { let mut buf = vec![MESSAGE_TYPE_SYNC]; @@ -291,6 +427,7 @@ impl Message { buf } + #[cfg(not(feature = "storage-v2"))] pub fn decode(bytes: &[u8]) -> Result { let mut decoder = Decoder::new(Cow::Borrowed(bytes)); @@ -329,6 +466,7 @@ impl Message { } } +#[cfg(not(feature = "storage-v2"))] fn encode_hashes(buf: &mut Vec, hashes: &[ChangeHash]) { debug_assert!( hashes.windows(2).all(|h| h[0] <= h[1]), @@ -337,6 +475,28 @@ fn encode_hashes(buf: &mut Vec, hashes: &[ChangeHash]) { hashes.encode_vec(buf); } +#[cfg(feature = "storage-v2")] +fn encode_many<'a, I, It, F>(out: &mut Vec, data: I, f: F) +where + I: Iterator + ExactSizeIterator + 'a, + F: Fn(&mut Vec, It), +{ + leb128::write::unsigned(out, data.len() as u64).unwrap(); + for datum in data { + f(out, datum) + } +} + +#[cfg(feature = "storage-v2")] +fn encode_hashes(buf: &mut Vec, hashes: &[ChangeHash]) { + debug_assert!( + hashes.windows(2).all(|h| h[0] <= h[1]), + "hashes were not sorted" + ); + encode_many(buf, hashes.iter(), |buf, hash| buf.extend(hash.as_bytes())) +} + +#[cfg(not(feature = "storage-v2"))] impl Encodable for &[ChangeHash] { fn encode(&self, buf: &mut W) -> io::Result { let head = self.len().encode(buf)?; @@ -349,6 +509,7 @@ impl Encodable for &[ChangeHash] { } } +#[cfg(not(feature = "storage-v2"))] fn decode_hashes(decoder: &mut Decoder<'_>) -> Result, decoding::Error> { let length = decoder.read::()?; let mut hashes = Vec::with_capacity(length as usize); diff --git a/automerge/src/sync/bloom.rs b/automerge/src/sync/bloom.rs index 69311a20..f24a855b 100644 --- a/automerge/src/sync/bloom.rs +++ b/automerge/src/sync/bloom.rs @@ -1,6 +1,12 @@ -use std::borrow::{Borrow, Cow}; +use std::borrow::Borrow; +#[cfg(not(feature = "storage-v2"))] +use std::borrow::Cow; -use crate::{decoding, decoding::Decoder, encoding::Encodable, ChangeHash}; +#[cfg(feature = "storage-v2")] +use crate::storage::parse; +use crate::ChangeHash; +#[cfg(not(feature = "storage-v2"))] +use crate::{decoding, decoding::Decoder, encoding::Encodable}; // These constants correspond to a 1% false positive rate. The values can be changed without // breaking compatibility of the network protocol, since the parameters used for a particular @@ -16,7 +22,15 @@ pub struct BloomFilter { bits: Vec, } +#[cfg(feature = "storage-v2")] +#[derive(Debug, thiserror::Error)] +pub(crate) enum ParseError { + #[error(transparent)] + Leb128(#[from] parse::leb128::Error), +} + impl BloomFilter { + #[cfg(not(feature = "storage-v2"))] pub fn to_bytes(&self) -> Vec { let mut buf = Vec::new(); if self.num_entries != 0 { @@ -28,6 +42,39 @@ impl BloomFilter { buf } + #[cfg(feature = "storage-v2")] + pub fn to_bytes(&self) -> Vec { + let mut buf = Vec::new(); + if self.num_entries != 0 { + leb128::write::unsigned(&mut buf, self.num_entries as u64).unwrap(); + leb128::write::unsigned(&mut buf, self.num_bits_per_entry as u64).unwrap(); + leb128::write::unsigned(&mut buf, self.num_probes as u64).unwrap(); + buf.extend(&self.bits); + } + buf + } + + #[cfg(feature = "storage-v2")] + pub(crate) fn parse(input: parse::Input<'_>) -> parse::ParseResult<'_, Self, ParseError> { + if input.is_empty() { + Ok((input, Self::default())) + } else { + let (i, num_entries) = parse::leb128_u32(input)?; + let (i, num_bits_per_entry) = parse::leb128_u32(i)?; + let (i, num_probes) = parse::leb128_u32(i)?; + let (i, bits) = parse::take_n(bits_capacity(num_entries, num_bits_per_entry), i)?; + Ok(( + i, + Self { + num_entries, + num_bits_per_entry, + num_probes, + bits: bits.to_vec(), + }, + )) + } + } + fn get_probes(&self, hash: &ChangeHash) -> Vec { let hash_bytes = hash.0; let modulo = 8 * self.bits.len() as u32; @@ -107,6 +154,7 @@ fn bits_capacity(num_entries: u32, num_bits_per_entry: u32) -> usize { f as usize } +#[cfg(not(feature = "storage-v2"))] impl TryFrom<&[u8]> for BloomFilter { type Error = decoding::Error; @@ -129,3 +177,19 @@ impl TryFrom<&[u8]> for BloomFilter { } } } + +#[cfg(feature = "storage-v2")] +#[derive(thiserror::Error, Debug)] +#[error("{0}")] +pub struct DecodeError(String); + +#[cfg(feature = "storage-v2")] +impl TryFrom<&[u8]> for BloomFilter { + type Error = DecodeError; + + fn try_from(bytes: &[u8]) -> Result { + Self::parse(parse::Input::new(bytes)) + .map(|(_, b)| b) + .map_err(|e| DecodeError(e.to_string())) + } +} diff --git a/automerge/src/sync/state.rs b/automerge/src/sync/state.rs index 2ca5216f..5c174649 100644 --- a/automerge/src/sync/state.rs +++ b/automerge/src/sync/state.rs @@ -1,10 +1,36 @@ -use std::{borrow::Cow, collections::BTreeSet}; +use std::collections::BTreeSet; -use super::{decode_hashes, encode_hashes, BloomFilter}; -use crate::{decoding, decoding::Decoder, ChangeHash}; +#[cfg(not(feature = "storage-v2"))] +use super::decode_hashes; +use super::{encode_hashes, BloomFilter}; +#[cfg(feature = "storage-v2")] +use crate::storage::parse; +use crate::ChangeHash; +#[cfg(not(feature = "storage-v2"))] +use crate::{decoding, decoding::Decoder}; +#[cfg(not(feature = "storage-v2"))] +use std::borrow::Cow; const SYNC_STATE_TYPE: u8 = 0x43; // first byte of an encoded sync state, for identification +#[cfg(feature = "storage-v2")] +#[derive(Debug, thiserror::Error)] +pub enum DecodeError { + #[error("{0:?}")] + Parse(String), + #[error("wrong type: expected one of {expected_one_of:?} but found {found}")] + WrongType { expected_one_of: Vec, found: u8 }, + #[error("not enough input")] + NotEnoughInput, +} + +#[cfg(feature = "storage-v2")] +impl From for DecodeError { + fn from(_: parse::leb128::Error) -> Self { + Self::Parse("bad leb128 encoding".to_string()) + } +} + /// The state of synchronisation with a peer. #[derive(Debug, Clone, Default, PartialEq, Eq, Hash)] pub struct State { @@ -39,6 +65,7 @@ impl State { buf } + #[cfg(not(feature = "storage-v2"))] pub fn decode(bytes: &[u8]) -> Result { let mut decoder = Decoder::new(Cow::Borrowed(bytes)); @@ -60,4 +87,38 @@ impl State { sent_hashes: BTreeSet::new(), }) } + + #[cfg(feature = "storage-v2")] + pub fn decode(input: &[u8]) -> Result { + let input = parse::Input::new(input); + match Self::parse(input) { + Ok((_, state)) => Ok(state), + Err(parse::ParseError::Incomplete(_)) => Err(DecodeError::NotEnoughInput), + Err(parse::ParseError::Error(e)) => Err(e), + } + } + + #[cfg(feature = "storage-v2")] + pub(crate) fn parse(input: parse::Input<'_>) -> parse::ParseResult<'_, Self, DecodeError> { + let (i, record_type) = parse::take1(input)?; + if record_type != SYNC_STATE_TYPE { + return Err(parse::ParseError::Error(DecodeError::WrongType { + expected_one_of: vec![SYNC_STATE_TYPE], + found: record_type, + })); + } + + let (i, shared_heads) = parse::length_prefixed(parse::change_hash)(i)?; + Ok(( + i, + Self { + shared_heads, + last_sent_heads: Vec::new(), + their_heads: None, + their_need: None, + their_have: Some(Vec::new()), + sent_hashes: BTreeSet::new(), + }, + )) + } } diff --git a/automerge/src/transaction/inner.rs b/automerge/src/transaction/inner.rs index 28b1dd25..40dbb8b9 100644 --- a/automerge/src/transaction/inner.rs +++ b/automerge/src/transaction/inner.rs @@ -1,11 +1,15 @@ use std::num::NonZeroU64; use crate::automerge::Actor; +#[cfg(not(feature = "storage-v2"))] +use crate::change::export_change; use crate::exid::ExId; use crate::query::{self, OpIdSearch}; +#[cfg(feature = "storage-v2")] +use crate::storage::Change as StoredChange; use crate::types::{Key, ObjId, OpId}; -use crate::{change::export_change, types::Op, Automerge, ChangeHash, Prop}; -use crate::{AutomergeError, ObjType, OpObserver, OpType, ScalarValue}; +use crate::{op_tree::OpSetMetadata, types::Op, Automerge, Change, ChangeHash, OpObserver, Prop}; +use crate::{AutomergeError, ObjType, OpType, ScalarValue}; #[derive(Debug, Clone)] pub(crate) struct TransactionInner { @@ -14,7 +18,9 @@ pub(crate) struct TransactionInner { pub(crate) start_op: NonZeroU64, pub(crate) time: i64, pub(crate) message: Option, + #[cfg(not(feature = "storage-v2"))] pub(crate) extra_bytes: Vec, + #[cfg(not(feature = "storage-v2"))] pub(crate) hash: Option, pub(crate) deps: Vec, pub(crate) operations: Vec<(ObjId, Prop, Op)>, @@ -27,6 +33,7 @@ impl TransactionInner { /// Commit the operations performed in this transaction, returning the hashes corresponding to /// the new heads. + #[tracing::instrument(skip(self, doc, op_observer))] pub(crate) fn commit( mut self, doc: &mut Automerge, @@ -63,13 +70,61 @@ impl TransactionInner { } let num_ops = self.pending_ops(); - let change = export_change(self, &doc.ops.m.actors, &doc.ops.m.props); + let change = self.export(&doc.ops.m); let hash = change.hash(); + #[cfg(not(debug_assertions))] + tracing::trace!(commit=?hash, deps=?change.deps(), "committing transaction"); + #[cfg(debug_assertions)] + { + let ops = change.iter_ops().collect::>(); + tracing::trace!(commit=?hash, ?ops, deps=?change.deps(), "committing transaction"); + } doc.update_history(change, num_ops); debug_assert_eq!(doc.get_heads(), vec![hash]); hash } + #[cfg(feature = "storage-v2")] + #[tracing::instrument(skip(self, metadata))] + pub(crate) fn export(self, metadata: &OpSetMetadata) -> Change { + use crate::storage::{change::PredOutOfOrder, convert::op_as_actor_id}; + + let actor = metadata.actors.get(self.actor).clone(); + let ops = self.operations.iter().map(|o| (&o.0, &o.2)); + //let (ops, other_actors) = encode_change_ops(ops, actor.clone(), actors, props); + let deps = self.deps.clone(); + let stored = match StoredChange::builder() + .with_actor(actor) + .with_seq(self.seq) + .with_start_op(self.start_op) + .with_message(self.message.clone()) + .with_dependencies(deps) + .with_timestamp(self.time) + .build( + ops.into_iter() + .map(|(obj, op)| op_as_actor_id(obj, op, metadata)), + ) { + Ok(s) => s, + Err(PredOutOfOrder) => { + // SAFETY: types::Op::preds is `types::OpIds` which ensures ops are always sorted + panic!("preds out of order"); + } + }; + #[cfg(debug_assertions)] + { + let realized_ops = self.operations.iter().collect::>(); + tracing::trace!(?stored, ops=?realized_ops, "committing change"); + } + #[cfg(not(debug_assertions))] + tracing::trace!(?stored, "committing change"); + Change::new(stored) + } + + #[cfg(not(feature = "storage-v2"))] + pub(crate) fn export(self, meta: &OpSetMetadata) -> Change { + export_change(self, &meta.actors, &meta.props) + } + /// Undo the operations added in this transaction, returning the number of cancelled /// operations. pub(crate) fn rollback(self, doc: &mut Automerge) -> usize { @@ -180,6 +235,7 @@ impl TransactionInner { ) -> Result<(), AutomergeError> { let obj = doc.exid_to_obj(ex_obj)?; let value = value.into(); + tracing::trace!(obj=?obj, value=?value, "inserting value"); self.do_insert(doc, obj, index, value.into())?; Ok(()) } diff --git a/automerge/src/visualisation.rs b/automerge/src/visualisation.rs index 5e6dae6f..6894f46f 100644 --- a/automerge/src/visualisation.rs +++ b/automerge/src/visualisation.rs @@ -192,6 +192,7 @@ impl OpTable { prop\ action\ succ\ + pred\ \


\ {}\ @@ -207,6 +208,7 @@ struct OpTableRow { prop: String, op_description: String, succ: String, + pred: String, } impl OpTableRow { @@ -217,6 +219,7 @@ impl OpTableRow { &self.prop, &self.op_description, &self.succ, + &self.pred, ]; let row = rows .iter() @@ -248,12 +251,18 @@ impl OpTableRow { .iter() .map(|s| format!(",{}", print_opid(s, actor_shorthands))) .collect(); + let pred = op + .pred + .iter() + .map(|s| format!(",{}", print_opid(s, actor_shorthands))) + .collect(); OpTableRow { op_description, obj_id: print_opid(&obj.0, actor_shorthands), op_id: print_opid(&op.id, actor_shorthands), prop, succ, + pred, } } } diff --git a/automerge/tests/helpers/mod.rs b/automerge/tests/helpers/mod.rs index fd3ba4e9..110470a9 100644 --- a/automerge/tests/helpers/mod.rs +++ b/automerge/tests/helpers/mod.rs @@ -49,7 +49,7 @@ pub fn sorted_actors() -> (automerge::ActorId, automerge::ActorId) { /// &doc, /// map!{ /// "todos" => { -/// todos => list![ +/// list![ /// { map!{ title = "water plants" } } /// ] /// } @@ -72,8 +72,8 @@ pub fn sorted_actors() -> (automerge::ActorId, automerge::ActorId) { /// &doc1, /// map!{ /// "field" => { -/// op1 => "one", -/// op2.translate(&doc2) => "two" +/// "one", +/// "two" /// } /// } /// ); @@ -188,10 +188,10 @@ macro_rules! list { ($($inner:tt,)+) => { list!($($inner),+) }; ($($inner:tt),*) => { { + use std::collections::BTreeSet; let _cap = list!(@count $($inner),*); let mut _list: Vec> = Vec::new(); $( - //println!("{}", stringify!($inner)); let inner = list!(@inner $inner); let _ = _list.push(inner); )* @@ -407,6 +407,30 @@ impl From for RealizedObject { } } +impl From for RealizedObject { + fn from(v: u64) -> Self { + RealizedObject::Value(OrdScalarValue::Uint(v)) + } +} + +impl From for RealizedObject { + fn from(v: u32) -> Self { + RealizedObject::Value(OrdScalarValue::Uint(v.into())) + } +} + +impl From for RealizedObject { + fn from(v: i64) -> Self { + RealizedObject::Value(OrdScalarValue::Int(v)) + } +} + +impl From for RealizedObject { + fn from(v: i32) -> Self { + RealizedObject::Value(OrdScalarValue::Int(v.into())) + } +} + impl From for RealizedObject { fn from(s: automerge::ScalarValue) -> Self { RealizedObject::Value(OrdScalarValue::from(s)) @@ -419,6 +443,20 @@ impl From<&str> for RealizedObject { } } +impl From> for RealizedObject { + fn from(vals: Vec) -> Self { + RealizedObject::Sequence( + vals.into_iter() + .map(|i| { + let mut set = BTreeSet::new(); + set.insert(i.into()); + set + }) + .collect(), + ) + } +} + /// Pretty print the contents of a document #[allow(dead_code)] pub fn pretty_print(doc: &automerge::Automerge) { diff --git a/automerge/tests/test.rs b/automerge/tests/test.rs index d74297e0..835dac05 100644 --- a/automerge/tests/test.rs +++ b/automerge/tests/test.rs @@ -1,7 +1,7 @@ use automerge::transaction::Transactable; use automerge::{ ActorId, ApplyOptions, AutoCommit, Automerge, AutomergeError, Change, ExpandedChange, ObjType, - ScalarValue, Value, VecOpObserver, ROOT, + ScalarValue, VecOpObserver, ROOT, }; mod helpers; @@ -884,33 +884,49 @@ fn list_counter_del() -> Result<(), automerge::AutomergeError> { doc1.merge(&mut doc2).unwrap(); doc1.merge(&mut doc3).unwrap(); - let values = doc1.get_all(&list, 1)?; - assert_eq!(values.len(), 3); - assert_eq!(&values[0].0, &Value::counter(1)); - assert_eq!(&values[1].0, &Value::counter(10)); - assert_eq!(&values[2].0, &Value::counter(100)); - - let values = doc1.get_all(&list, 2)?; - assert_eq!(values.len(), 3); - assert_eq!(&values[0].0, &Value::counter(1)); - assert_eq!(&values[1].0, &Value::counter(10)); - assert_eq!(&values[2].0, &Value::int(100)); + assert_obj!( + doc1.document(), + &automerge::ROOT, + "list", + list![ + { + "a", + }, + { + ScalarValue::counter(1), + ScalarValue::counter(10), + ScalarValue::counter(100) + }, + { + ScalarValue::Int(100), + ScalarValue::counter(1), + ScalarValue::counter(10), + } + ] + ); doc1.increment(&list, 1, 1)?; doc1.increment(&list, 2, 1)?; - let values = doc1.get_all(&list, 1)?; - assert_eq!(values.len(), 3); - assert_eq!(&values[0].0, &Value::counter(2)); - assert_eq!(&values[1].0, &Value::counter(11)); - assert_eq!(&values[2].0, &Value::counter(101)); - - let values = doc1.get_all(&list, 2)?; - assert_eq!(values.len(), 2); - assert_eq!(&values[0].0, &Value::counter(2)); - assert_eq!(&values[1].0, &Value::counter(11)); - - assert_eq!(doc1.length(&list), 3); + assert_obj!( + doc1.document(), + &automerge::ROOT, + "list", + list![ + { + "a", + }, + { + ScalarValue::counter(2), + ScalarValue::counter(11), + ScalarValue::counter(101) + }, + { + ScalarValue::counter(2), + ScalarValue::counter(11), + } + ] + ); doc1.delete(&list, 2)?; @@ -952,21 +968,21 @@ fn observe_counter_change_application() { fn increment_non_counter_map() { let mut doc = AutoCommit::new(); // can't increment nothing - assert_eq!( + assert!(matches!( doc.increment(ROOT, "nothing", 2), Err(AutomergeError::MissingCounter) - ); + )); // can't increment a non-counter doc.put(ROOT, "non-counter", "mystring").unwrap(); - assert_eq!( + assert!(matches!( doc.increment(ROOT, "non-counter", 2), Err(AutomergeError::MissingCounter) - ); + )); // can increment a counter still doc.put(ROOT, "counter", ScalarValue::counter(1)).unwrap(); - assert_eq!(doc.increment(ROOT, "counter", 2), Ok(())); + assert!(matches!(doc.increment(ROOT, "counter", 2), Ok(()))); // can increment a counter that is part of a conflict let mut doc1 = AutoCommit::new(); @@ -978,7 +994,7 @@ fn increment_non_counter_map() { doc2.put(ROOT, "key", "mystring").unwrap(); doc1.merge(&mut doc2).unwrap(); - assert_eq!(doc1.increment(ROOT, "key", 2), Ok(())); + assert!(matches!(doc1.increment(ROOT, "key", 2), Ok(()))); } #[test] @@ -988,14 +1004,14 @@ fn increment_non_counter_list() { // can't increment a non-counter doc.insert(&list, 0, "mystring").unwrap(); - assert_eq!( + assert!(matches!( doc.increment(&list, 0, 2), Err(AutomergeError::MissingCounter) - ); + )); // can increment a counter doc.insert(&list, 0, ScalarValue::counter(1)).unwrap(); - assert_eq!(doc.increment(&list, 0, 2), Ok(())); + assert!(matches!(doc.increment(&list, 0, 2), Ok(()))); // can increment a counter that is part of a conflict let mut doc1 = AutoCommit::new(); @@ -1009,7 +1025,7 @@ fn increment_non_counter_list() { doc2.put(&list, 0, "mystring").unwrap(); doc1.merge(&mut doc2).unwrap(); - assert_eq!(doc1.increment(&list, 0, 2), Ok(())); + assert!(matches!(doc1.increment(&list, 0, 2), Ok(()))); } #[test] From 252a7eb8a537454958be3d22b22b4358a3371d19 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Mon, 25 Jul 2022 15:54:48 +0100 Subject: [PATCH 539/730] Add automerge::Automerge::save_nocompress For some usecases the overhead of compressed columns in the document format is not worth it. Add `Automerge::save_nocompress` to save without compressing columns. Signed-off-by: Alex Good --- automerge/src/autocommit.rs | 6 ++++++ automerge/src/automerge.rs | 18 +++++++++++++++++- 2 files changed, 23 insertions(+), 1 deletion(-) diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index 1233c1e0..126eec6a 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -157,6 +157,12 @@ impl AutoCommit { self.doc.save() } + #[cfg(feature = "storage-v2")] + pub fn save_nocompress(&mut self) -> Vec { + self.ensure_transaction_closed(); + self.doc.save_nocompress() + } + // should this return an empty vec instead of None? pub fn save_incremental(&mut self) -> Vec { self.ensure_transaction_closed(); diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index b211ee18..8ccf9aee 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -17,7 +17,7 @@ use crate::op_observer::OpObserver; use crate::op_set::OpSet; use crate::parents::Parents; #[cfg(feature = "storage-v2")] -use crate::storage::{self, load}; +use crate::storage::{self, load, CompressConfig}; use crate::transaction::{self, CommitOptions, Failure, Success, Transaction, TransactionInner}; use crate::types::{ ActorId, ChangeHash, Clock, ElemId, Export, Exportable, Key, ObjId, Op, OpId, OpType, @@ -931,6 +931,22 @@ impl Automerge { bytes } + #[cfg(feature = "storage-v2")] + pub fn save_nocompress(&mut self) -> Vec { + let heads = self.get_heads(); + let c = self.history.iter(); + let bytes = crate::storage::save::save_document( + c, + self.ops.iter(), + &self.ops.m.actors, + &self.ops.m.props, + &heads, + Some(CompressConfig::None), + ); + self.saved = self.get_heads(); + bytes + } + /// Save the changes since last save in a compact form. pub fn save_incremental(&mut self) -> Vec { let changes = self From 63dca26fe2fc122c96612662e9042c406d7e0296 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Mon, 25 Jul 2022 15:55:50 +0100 Subject: [PATCH 540/730] Additional tests for storage-v2 Various tests were required to cover edge cases in the new storage-v2 implementation. Signed-off-by: Alex Good --- .gitignore | 2 + automerge/Cargo.toml | 2 + automerge/tests/helpers/mod.rs | 19 ++- automerge/tests/test.rs | 289 +++++++++++++++++++++++++++++++++ 4 files changed, 310 insertions(+), 2 deletions(-) diff --git a/.gitignore b/.gitignore index eca9df3f..4ca7b595 100644 --- a/.gitignore +++ b/.gitignore @@ -3,3 +3,5 @@ perf.* /Cargo.lock build/ +automerge/proptest-regressions/ +.vim/* diff --git a/automerge/Cargo.toml b/automerge/Cargo.toml index 4b9d2bd6..be1d924a 100644 --- a/automerge/Cargo.toml +++ b/automerge/Cargo.toml @@ -45,6 +45,8 @@ serde_json = { version = "^1.0.73", features=["float_roundtrip"], default-featur maplit = { version = "^1.0" } decorum = "0.3.1" criterion = "0.3.5" +test-log = { version = "0.2.10", features=["trace"], default-features = false} +tracing-subscriber = {version = "0.3.9", features = ["fmt", "env-filter"] } [[bench]] name = "range" diff --git a/automerge/tests/helpers/mod.rs b/automerge/tests/helpers/mod.rs index 110470a9..38706d37 100644 --- a/automerge/tests/helpers/mod.rs +++ b/automerge/tests/helpers/mod.rs @@ -283,8 +283,23 @@ impl serde::Serialize for OrdScalarValue { where S: serde::Serializer, { - let s = automerge::ScalarValue::from(self); - s.serialize(serializer) + match self { + OrdScalarValue::Bytes(v) => serializer.serialize_bytes(v), + OrdScalarValue::Str(v) => serializer.serialize_str(v.as_str()), + OrdScalarValue::Int(v) => serializer.serialize_i64(*v), + OrdScalarValue::Uint(v) => serializer.serialize_u64(*v), + OrdScalarValue::F64(v) => serializer.serialize_f64(v.into_inner()), + OrdScalarValue::Counter(v) => { + serializer.serialize_str(format!("Counter({})", v).as_str()) + } + OrdScalarValue::Timestamp(v) => { + serializer.serialize_str(format!("Timestamp({})", v).as_str()) + } + OrdScalarValue::Boolean(v) => serializer.serialize_bool(*v), + OrdScalarValue::Null => serializer.serialize_none(), + OrdScalarValue::Unknown { type_code, .. } => serializer + .serialize_str(format!("An unknown type with code {}", type_code).as_str()), + } } } diff --git a/automerge/tests/test.rs b/automerge/tests/test.rs index 835dac05..9b6246f8 100644 --- a/automerge/tests/test.rs +++ b/automerge/tests/test.rs @@ -4,6 +4,9 @@ use automerge::{ ScalarValue, VecOpObserver, ROOT, }; +// set up logging for all the tests +use test_log::test; + mod helpers; #[allow(unused_imports)] use helpers::{ @@ -849,6 +852,53 @@ fn handle_repeated_out_of_order_changes() -> Result<(), automerge::AutomergeErro Ok(()) } +#[test] +fn save_restore_complex_transactional() { + let mut doc1 = Automerge::new(); + let first_todo = doc1 + .transact::<_, _, automerge::AutomergeError>(|d| { + let todos = d.put_object(&automerge::ROOT, "todos", ObjType::List)?; + let first_todo = d.insert_object(&todos, 0, ObjType::Map)?; + d.put(&first_todo, "title", "water plants")?; + d.put(&first_todo, "done", false)?; + Ok(first_todo) + }) + .unwrap() + .result; + + let mut doc2 = Automerge::new(); + doc2.merge(&mut doc1).unwrap(); + doc2.transact::<_, _, automerge::AutomergeError>(|tx| { + tx.put(&first_todo, "title", "weed plants")?; + Ok(()) + }) + .unwrap(); + + doc1.transact::<_, _, automerge::AutomergeError>(|tx| { + tx.put(&first_todo, "title", "kill plants")?; + Ok(()) + }) + .unwrap(); + doc1.merge(&mut doc2).unwrap(); + + let reloaded = Automerge::load(&doc1.save()).unwrap(); + + assert_doc!( + &reloaded, + map! { + "todos" => {list![ + {map!{ + "title" => { + "weed plants", + "kill plants", + }, + "done" => {false}, + }} + ]} + } + ); +} + #[test] fn list_counter_del() -> Result<(), automerge::AutomergeError> { let mut v = vec![ActorId::random(), ActorId::random(), ActorId::random()]; @@ -1028,6 +1078,226 @@ fn increment_non_counter_list() { assert!(matches!(doc1.increment(&list, 0, 2), Ok(()))); } +#[test] +fn test_local_inc_in_map() { + let mut v = vec![ActorId::random(), ActorId::random(), ActorId::random()]; + v.sort(); + let actor1 = v[0].clone(); + let actor2 = v[1].clone(); + let actor3 = v[2].clone(); + + let mut doc1 = new_doc_with_actor(actor1); + doc1.put(&automerge::ROOT, "hello", "world").unwrap(); + + let mut doc2 = AutoCommit::load(&doc1.save()).unwrap(); + doc2.set_actor(actor2); + + let mut doc3 = AutoCommit::load(&doc1.save()).unwrap(); + doc3.set_actor(actor3); + + doc1.put(ROOT, "cnt", 20_u64).unwrap(); + doc2.put(ROOT, "cnt", ScalarValue::counter(0)).unwrap(); + doc3.put(ROOT, "cnt", ScalarValue::counter(10)).unwrap(); + doc1.merge(&mut doc2).unwrap(); + doc1.merge(&mut doc3).unwrap(); + + assert_doc! {doc1.document(), map!{ + "cnt" => { + 20_u64, + ScalarValue::counter(0), + ScalarValue::counter(10), + }, + "hello" => {"world"}, + }}; + + doc1.increment(ROOT, "cnt", 5).unwrap(); + + assert_doc! {doc1.document(), map!{ + "cnt" => { + ScalarValue::counter(5), + ScalarValue::counter(15), + }, + "hello" => {"world"}, + }}; + let mut doc4 = AutoCommit::load(&doc1.save()).unwrap(); + assert_eq!(doc4.save(), doc1.save()); +} + +#[test] +fn test_merging_test_conflicts_then_saving_and_loading() { + let (actor1, actor2) = sorted_actors(); + + let mut doc1 = new_doc_with_actor(actor1); + let text = doc1.put_object(ROOT, "text", ObjType::Text).unwrap(); + doc1.splice(&text, 0, 0, "hello".chars().map(|c| c.to_string().into())) + .unwrap(); + + let mut doc2 = AutoCommit::load(&doc1.save()).unwrap(); + doc2.set_actor(actor2); + + assert_doc! {doc2.document(), map!{ + "text" => { list![{"h"}, {"e"}, {"l"}, {"l"}, {"o"}]}, + }}; + + doc2.splice(&text, 4, 1, Vec::new()).unwrap(); + doc2.splice(&text, 4, 0, vec!["!".into()]).unwrap(); + doc2.splice(&text, 5, 0, vec![" ".into()]).unwrap(); + doc2.splice(&text, 6, 0, "world".chars().map(|c| c.into())) + .unwrap(); + + assert_doc!( + doc2.document(), + map! { + "text" => { list![{"h"}, {"e"}, {"l"}, {"l"}, {"!"}, {" "}, {"w"} , {"o"}, {"r"}, {"l"}, {"d"}]} + } + ); + + let mut doc3 = AutoCommit::load(&doc2.save()).unwrap(); + + assert_doc!( + doc3.document(), + map! { + "text" => { list![{"h"}, {"e"}, {"l"}, {"l"}, {"!"}, {" "}, {"w"} , {"o"}, {"r"}, {"l"}, {"d"}]} + } + ); +} + +/// Surfaces an error which occurs when loading a document with a change which only contains a +/// delete operation. In this case the delete operation doesn't appear in the encoded document +/// operations except as a succ, so the max_op was calculated incorectly. +#[test] +fn delete_only_change() { + let actor = automerge::ActorId::random(); + let mut doc1 = automerge::Automerge::new().with_actor(actor.clone()); + let list = doc1 + .transact::<_, _, automerge::AutomergeError>(|d| { + let l = d.put_object(&automerge::ROOT, "list", ObjType::List)?; + d.insert(&l, 0, 'a')?; + Ok(l) + }) + .unwrap() + .result; + + let mut doc2 = automerge::Automerge::load(&doc1.save()) + .unwrap() + .with_actor(actor.clone()); + doc2.transact::<_, _, automerge::AutomergeError>(|d| d.delete(&list, 0)) + .unwrap(); + + let mut doc3 = automerge::Automerge::load(&doc2.save()) + .unwrap() + .with_actor(actor.clone()); + doc3.transact(|d| d.insert(&list, 0, "b")).unwrap(); + + let doc4 = automerge::Automerge::load(&doc3.save()) + .unwrap() + .with_actor(actor); + + let changes = doc4.get_changes(&[]).unwrap(); + assert_eq!(changes.len(), 3); + let c = changes[2]; + assert_eq!(c.start_op().get(), 4); +} + +/// Expose an error where a document which contained a create operation without any subsequent +/// operations targeting the created object did not load the object correctly. +#[test] +fn save_and_reload_create_object() { + let actor = automerge::ActorId::random(); + let mut doc = automerge::Automerge::new().with_actor(actor); + + // Create a change containing an object but no other operations + let list = doc + .transact::<_, _, automerge::AutomergeError>(|d| { + d.put_object(&automerge::ROOT, "foo", ObjType::List) + }) + .unwrap() + .result; + + // Save and load the change + let mut doc2 = automerge::Automerge::load(&doc.save()).unwrap(); + doc2.transact::<_, _, automerge::AutomergeError>(|d| { + d.insert(&list, 0, 1_u64)?; + Ok(()) + }) + .unwrap(); + + assert_doc!(&doc2, map! {"foo" => { list! [{1_u64}]}}); + + let _doc3 = automerge::Automerge::load(&doc2.save()).unwrap(); +} + +#[test] +fn test_compressed_changes() { + let mut doc = new_doc(); + // crate::storage::DEFLATE_MIN_SIZE is 250, so this should trigger compression + doc.put(ROOT, "bytes", ScalarValue::Bytes(vec![10; 300])) + .unwrap(); + let mut change = doc.get_last_local_change().unwrap().clone(); + let uncompressed = change.raw_bytes().to_vec(); + assert!(uncompressed.len() > 256); + #[cfg(not(feature = "storage-v2"))] + change.compress(); + let compressed = change.compressed_bytes().to_vec(); + assert!(compressed.len() < uncompressed.len()); + + let reloaded = automerge::Change::try_from(&compressed[..]).unwrap(); + assert_eq!(change.raw_bytes(), reloaded.raw_bytes()); +} + +#[cfg(feature = "storage-v2")] +#[test] +fn test_compressed_doc_cols() { + // In this test, the keyCtr column is long enough for deflate compression to kick in, but the + // keyStr column is short. Thus, the deflate bit gets set for keyCtr but not for keyStr. + // When checking whether the columns appear in ascending order, we must ignore the deflate bit. + let mut doc = new_doc(); + let list = doc.put_object(ROOT, "list", ObjType::List).unwrap(); + let mut expected = Vec::new(); + for i in 0..200 { + doc.insert(&list, i, i as u64).unwrap(); + expected.push(i as u64); + } + let uncompressed = doc.save_nocompress(); + let compressed = doc.save(); + assert!(compressed.len() < uncompressed.len()); + let loaded = automerge::Automerge::load(&compressed).unwrap(); + assert_doc!( + &loaded, + map! { + "list" => { expected} + } + ); +} + +#[cfg(feature = "storage-v2")] +#[test] +fn test_change_encoding_expanded_change_round_trip() { + let change_bytes: Vec = vec![ + 0x85, 0x6f, 0x4a, 0x83, // magic bytes + 0xb2, 0x98, 0x9e, 0xa9, // checksum + 1, 61, 0, 2, 0x12, 0x34, // chunkType: change, length, deps, actor '1234' + 1, 1, 252, 250, 220, 255, 5, // seq, startOp, time + 14, 73, 110, 105, 116, 105, 97, 108, 105, 122, 97, 116, 105, 111, + 110, // message: 'Initialization' + 0, 6, // actor list, column count + 0x15, 3, 0x34, 1, 0x42, 2, // keyStr, insert, action + 0x56, 2, 0x57, 1, 0x70, 2, // valLen, valRaw, predNum + 0x7f, 1, 0x78, // keyStr: 'x' + 1, // insert: false + 0x7f, 1, // action: set + 0x7f, 19, // valLen: 1 byte of type uint + 1, // valRaw: 1 + 0x7f, 0, // predNum: 0 + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, // 10 trailing bytes + ]; + let change = automerge::Change::try_from(&change_bytes[..]).unwrap(); + assert_eq!(change.raw_bytes(), change_bytes); + let expanded = automerge::ExpandedChange::from(&change); + let unexpanded: automerge::Change = expanded.try_into().unwrap(); + assert_eq!(unexpanded.raw_bytes(), change_bytes); +} + #[test] fn save_and_load_incremented_counter() { let mut doc = AutoCommit::new(); @@ -1047,3 +1317,22 @@ fn save_and_load_incremented_counter() { assert_eq!(changes1, changes2); } + +#[test] +fn load_incremental_with_corrupted_tail() { + let mut doc = AutoCommit::new(); + doc.put(ROOT, "key", ScalarValue::Str("value".into())) + .unwrap(); + doc.commit(); + let mut bytes = doc.save(); + bytes.extend_from_slice(&[1, 2, 3, 4]); + let mut loaded = Automerge::new(); + let loaded_len = loaded.load_incremental(&bytes).unwrap(); + assert_eq!(loaded_len, 1); + assert_doc!( + &loaded, + map! { + "key" => { "value" }, + } + ); +} From d53d107076157065bc229778e465b678f5cdef41 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Mon, 25 Jul 2022 16:03:30 +0100 Subject: [PATCH 541/730] Expose storage-v2 in automerge-c Signed-off-by: Alex Good --- automerge-c/Cargo.toml | 3 +++ automerge-c/src/change.rs | 7 +++++-- automerge-c/src/result.rs | 33 ++++++++++++++++++++++++++++++++ scripts/ci/build-test-storage-v2 | 1 + 4 files changed, 42 insertions(+), 2 deletions(-) diff --git a/automerge-c/Cargo.toml b/automerge-c/Cargo.toml index 851a3470..cff82536 100644 --- a/automerge-c/Cargo.toml +++ b/automerge-c/Cargo.toml @@ -6,6 +6,9 @@ edition = "2021" license = "MIT" rust-version = "1.57.0" +[features] +storage-v2 =[ "automerge/storage-v2" ] + [lib] name = "automerge" crate-type = ["cdylib", "staticlib"] diff --git a/automerge-c/src/change.rs b/automerge-c/src/change.rs index 29aacf8e..47c215ad 100644 --- a/automerge-c/src/change.rs +++ b/automerge-c/src/change.rs @@ -112,7 +112,7 @@ pub unsafe extern "C" fn AMchangeActorId(change: *const AMchange) -> *mut AMresu #[no_mangle] pub unsafe extern "C" fn AMchangeCompress(change: *mut AMchange) { if let Some(change) = change.as_mut() { - change.as_mut().compress(); + let _ = change.as_mut().compressed_bytes(); }; } @@ -362,5 +362,8 @@ pub unsafe extern "C" fn AMchangeRawBytes(change: *const AMchange) -> AMbyteSpan pub unsafe extern "C" fn AMchangeLoadDocument(src: *const u8, count: usize) -> *mut AMresult { let mut data = Vec::new(); data.extend_from_slice(std::slice::from_raw_parts(src, count)); - to_result(am::Change::load_document(&data)) + to_result::, _>>( + am::Automerge::load(&data) + .and_then(|d| d.get_changes(&[]).map(|c| c.into_iter().cloned().collect())), + ) } diff --git a/automerge-c/src/result.rs b/automerge-c/src/result.rs index 9b8c811d..744fa651 100644 --- a/automerge-c/src/result.rs +++ b/automerge-c/src/result.rs @@ -568,6 +568,7 @@ impl From> for AMresult { } } +#[cfg(not(feature = "storage-v2"))] impl From> for AMresult { fn from(maybe: Result) -> Self { match maybe { @@ -577,6 +578,16 @@ impl From> for AMresult { } } +#[cfg(feature = "storage-v2")] +impl From> for AMresult { + fn from(maybe: Result) -> Self { + match maybe { + Ok(change) => AMresult::Changes(vec![change], BTreeMap::new()), + Err(e) => AMresult::err(&e.to_string()), + } + } +} + impl From> for AMresult { fn from(maybe: Result) -> Self { match maybe { @@ -586,6 +597,7 @@ impl From> for AMresult { } } +#[cfg(not(feature = "storage-v2"))] impl From> for AMresult { fn from(maybe: Result) -> Self { match maybe { @@ -595,6 +607,17 @@ impl From> for AMresult { } } +#[cfg(feature = "storage-v2")] +impl From> for AMresult { + fn from(maybe: Result) -> Self { + match maybe { + Ok(message) => AMresult::SyncMessage(AMsyncMessage::new(message)), + Err(e) => AMresult::err(&e.to_string()), + } + } +} + +#[cfg(not(feature = "storage-v2"))] impl From> for AMresult { fn from(maybe: Result) -> Self { match maybe { @@ -604,6 +627,16 @@ impl From> for AMresult { } } +#[cfg(feature = "storage-v2")] +impl From> for AMresult { + fn from(maybe: Result) -> Self { + match maybe { + Ok(state) => AMresult::SyncState(AMsyncState::new(state)), + Err(e) => AMresult::err(&e.to_string()), + } + } +} + impl From, am::AutomergeError>> for AMresult { fn from(maybe: Result, am::AutomergeError>) -> Self { match maybe { diff --git a/scripts/ci/build-test-storage-v2 b/scripts/ci/build-test-storage-v2 index 8d05552a..896cf613 100755 --- a/scripts/ci/build-test-storage-v2 +++ b/scripts/ci/build-test-storage-v2 @@ -2,5 +2,6 @@ set -eoux pipefail cargo build -p automerge --features storage-v2 --all-targets +cargo build -p automerge-c --features storage-v2 --all-targets RUST_LOG=error cargo test -p automerge --features storage-v2 From fc94d43e53619beca53bb8b6b4d41b3907ed5a24 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Mon, 25 Jul 2022 16:05:09 +0100 Subject: [PATCH 542/730] Expose storage-v2 in automerge-wasm Signed-off-by: Alex Good --- .github/workflows/ci.yaml | 18 ++++++++++++++++++ automerge-wasm/Cargo.toml | 1 + automerge-wasm/package.json | 8 +++++--- automerge-wasm/src/interop.rs | 21 ++++++++++++++++----- scripts/ci/build-test-storage-v2 | 1 + scripts/ci/js_tests_storage_v2 | 20 ++++++++++++++++++++ scripts/ci/run | 2 ++ scripts/ci/wasm_tests_storage_v2 | 6 ++++++ 8 files changed, 69 insertions(+), 8 deletions(-) create mode 100755 scripts/ci/js_tests_storage_v2 create mode 100755 scripts/ci/wasm_tests_storage_v2 diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 8ec3507f..38c5848c 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -78,6 +78,15 @@ jobs: - name: run tests run: ./scripts/ci/wasm_tests + wasm_tests_storage_v2: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Install wasm-pack + run: curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh + - name: run tests + run: ./scripts/ci/wasm_tests + js_tests: runs-on: ubuntu-latest steps: @@ -87,6 +96,15 @@ jobs: - name: run tests run: ./scripts/ci/js_tests + js_tests_storage_v2: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Install wasm-pack + run: curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh + - name: run tests + run: ./scripts/ci/js_tests_storage_v2 + cmake_build: runs-on: ubuntu-latest steps: diff --git a/automerge-wasm/Cargo.toml b/automerge-wasm/Cargo.toml index f7668bfa..f513d99e 100644 --- a/automerge-wasm/Cargo.toml +++ b/automerge-wasm/Cargo.toml @@ -18,6 +18,7 @@ bench = false [features] # default = ["console_error_panic_hook", "wee_alloc"] default = ["console_error_panic_hook"] +storage-v2 =[ "automerge/storage-v2" ] [dependencies] console_error_panic_hook = { version = "^0.1", optional = true } diff --git a/automerge-wasm/package.json b/automerge-wasm/package.json index 50744364..42c42e0b 100644 --- a/automerge-wasm/package.json +++ b/automerge-wasm/package.json @@ -27,11 +27,13 @@ "main": "./nodejs/index.js", "scripts": { "lint": "eslint test/*.ts", - "build": "cross-env PROFILE=dev TARGET=nodejs yarn target", + "build": "cross-env PROFILE=dev TARGET=nodejs FEATURES='' yarn target", + "build-storage-v2": "cross-env PROFILE=dev TARGET=nodejs FEATURES='--features=automerge-wasm/storage-v2' yarn target", "release": "cross-env PROFILE=release yarn buildall", "buildall": "cross-env TARGET=nodejs yarn target && cross-env TARGET=web yarn target", - "target": "rimraf ./$TARGET && wasm-pack build --target $TARGET --$PROFILE --out-name bindgen -d $TARGET && cp $TARGET-index.js $TARGET/index.js", - "test": "ts-mocha -p tsconfig.json --type-check --bail --full-trace test/*.ts" + "target": "rimraf ./$TARGET && wasm-pack build --target $TARGET --$PROFILE --out-name bindgen -d $TARGET -- $FEATURES && cp $TARGET-index.js $TARGET/index.js", + "test": "ts-mocha -p tsconfig.json --type-check --bail --full-trace test/*.ts", + "test-storage-v2": "yarn build-storage-v2 && ts-mocha -p tsconfig.json --type-check --bail --full-trace test/*.ts" }, "devDependencies": { "@types/expect": "^24.3.0", diff --git a/automerge-wasm/src/interop.rs b/automerge-wasm/src/interop.rs index bc17c018..be3b765c 100644 --- a/automerge-wasm/src/interop.rs +++ b/automerge-wasm/src/interop.rs @@ -147,11 +147,22 @@ impl TryFrom for Vec { let value = value.0.dyn_into::()?; let changes: Result, _> = value.iter().map(|j| j.dyn_into()).collect(); let changes = changes?; - let changes: Result, _> = changes - .iter() - .map(|a| Change::try_from(a.to_vec())) - .collect(); - let changes = changes.map_err(to_js_err)?; + #[cfg(not(feature = "storage-v2"))] + let changes = changes.iter().try_fold(Vec::new(), |mut acc, arr| { + match Change::try_from(arr.to_vec()) { + Ok(c) => acc.push(c), + Err(e) => return Err(to_js_err(e)), + } + Ok(acc) + })?; + #[cfg(feature = "storage-v2")] + let changes = changes.iter().try_fold(Vec::new(), |mut acc, arr| { + match automerge::Change::try_from(arr.to_vec().as_slice()) { + Ok(c) => acc.push(c), + Err(e) => return Err(to_js_err(e)), + } + Ok(acc) + })?; Ok(changes) } } diff --git a/scripts/ci/build-test-storage-v2 b/scripts/ci/build-test-storage-v2 index 896cf613..c72741cd 100755 --- a/scripts/ci/build-test-storage-v2 +++ b/scripts/ci/build-test-storage-v2 @@ -3,5 +3,6 @@ set -eoux pipefail cargo build -p automerge --features storage-v2 --all-targets cargo build -p automerge-c --features storage-v2 --all-targets +cargo build -p automerge-wasm --features storage-v2 --all-targets RUST_LOG=error cargo test -p automerge --features storage-v2 diff --git a/scripts/ci/js_tests_storage_v2 b/scripts/ci/js_tests_storage_v2 new file mode 100755 index 00000000..77485f73 --- /dev/null +++ b/scripts/ci/js_tests_storage_v2 @@ -0,0 +1,20 @@ +set -e + +THIS_SCRIPT=$(dirname "$0"); +WASM_PROJECT=$THIS_SCRIPT/../../automerge-wasm; +JS_PROJECT=$THIS_SCRIPT/../../automerge-js; + +yarn --cwd $WASM_PROJECT install; +# This will take care of running wasm-pack +yarn --cwd $WASM_PROJECT build-storage-v2; +# If the dependencies are already installed we delete automerge-wasm. This makes +# this script usable for iterative development. +if [ -d $JS_PROJECT/node_modules/automerge-wasm ]; then + rm -rf $JS_PROJECT/node_modules/automerge-wasm +fi +# --check-files forces yarn to check if the local dep has changed +yarn --cwd $JS_PROJECT install --check-files; +yarn --cwd $JS_PROJECT test; + + + diff --git a/scripts/ci/run b/scripts/ci/run index 89b86277..caa3ca78 100755 --- a/scripts/ci/run +++ b/scripts/ci/run @@ -8,6 +8,8 @@ set -eou pipefail ./scripts/ci/rust-docs ./scripts/ci/advisory ./scripts/ci/wasm_tests +./scripts/ci/wasm_tests_storage_v2 ./scripts/ci/js_tests +./scripts/ci/js_tests_storage_v2 ./scripts/ci/cmake-build Release static ./scripts/ci/cmake-docs diff --git a/scripts/ci/wasm_tests_storage_v2 b/scripts/ci/wasm_tests_storage_v2 new file mode 100755 index 00000000..2ef62643 --- /dev/null +++ b/scripts/ci/wasm_tests_storage_v2 @@ -0,0 +1,6 @@ +THIS_SCRIPT=$(dirname "$0"); +WASM_PROJECT=$THIS_SCRIPT/../../automerge-wasm; + +yarn --cwd $WASM_PROJECT install; +yarn --cwd $WASM_PROJECT build-storage-v2; +yarn --cwd $WASM_PROJECT test-storage-v2; From db4cb52750532b9b498486acf772dd1563761d56 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Mon, 25 Jul 2022 16:07:20 +0100 Subject: [PATCH 543/730] Add a storage-v2 feature flag to edit-trace Signed-off-by: Alex Good --- edit-trace/Cargo.toml | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/edit-trace/Cargo.toml b/edit-trace/Cargo.toml index 217e686e..2b442d6f 100644 --- a/edit-trace/Cargo.toml +++ b/edit-trace/Cargo.toml @@ -3,7 +3,10 @@ name = "edit-trace" version = "0.1.0" edition = "2021" license = "MIT" -rust-version = "1.57.0" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html +[features] +storage-v2 =[ "automerge/storage-v2" ] [dependencies] automerge = { path = "../automerge" } @@ -11,10 +14,14 @@ criterion = "0.3.5" json = "0.12.4" rand = "^0.8" + [[bin]] name = "edit-trace" doc = false +bench = false [[bench]] +debug = true name = "main" harness = false + From 8f2d4a494f5d3f2e49128c16e27971d3bda35a3f Mon Sep 17 00:00:00 2001 From: Alex Good Date: Tue, 26 Jul 2022 15:33:19 +0100 Subject: [PATCH 544/730] Test entire workspace for storage-v2 in CI Now that all crates support the storage-v2 feature flag of the automerge crate we update CI to run tests for '--workspace --all-features' Signed-off-by: Alex Good --- scripts/ci/build-test | 2 +- scripts/ci/build-test-storage-v2 | 6 ++---- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/scripts/ci/build-test b/scripts/ci/build-test index f4b83d0f..0126ae2a 100755 --- a/scripts/ci/build-test +++ b/scripts/ci/build-test @@ -3,4 +3,4 @@ set -eoux pipefail cargo build --workspace --features optree-visualisation,wasm -RUST_LOG=error cargo test --workspace +RUST_LOG=error cargo test --workspace --features optree-visualisation,wasm diff --git a/scripts/ci/build-test-storage-v2 b/scripts/ci/build-test-storage-v2 index c72741cd..a31dd3d9 100755 --- a/scripts/ci/build-test-storage-v2 +++ b/scripts/ci/build-test-storage-v2 @@ -1,8 +1,6 @@ #!/usr/bin/env bash set -eoux pipefail -cargo build -p automerge --features storage-v2 --all-targets -cargo build -p automerge-c --features storage-v2 --all-targets -cargo build -p automerge-wasm --features storage-v2 --all-targets +cargo build --workspace --all-features --all-targets -RUST_LOG=error cargo test -p automerge --features storage-v2 +RUST_LOG=error cargo test --workspace --all-features From 632da04d60e7597536c4ac18de39828e26fb6f5f Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Sun, 7 Aug 2022 08:40:06 -0700 Subject: [PATCH 545/730] Add the `-DFEATURE_FLAG_STORAGE_V2` CMake option for toggling the "storage-v2" feature flag in a Cargo invocation. Correct the `AMunknownValue` struct misnomer. Ease the rebasing of changes to the `AMvalue` struct declaration with pending upstream changes to same. --- automerge-c/CMakeLists.txt | 2 ++ automerge-c/src/CMakeLists.txt | 8 +++++++- automerge-c/src/change.rs | 4 +++- automerge-c/src/result.rs | 25 +++++++++++++++++-------- 4 files changed, 29 insertions(+), 10 deletions(-) diff --git a/automerge-c/CMakeLists.txt b/automerge-c/CMakeLists.txt index 68a5176a..05ee06eb 100644 --- a/automerge-c/CMakeLists.txt +++ b/automerge-c/CMakeLists.txt @@ -57,6 +57,8 @@ include(CTest) option(BUILD_SHARED_LIBS "Enable the choice of a shared or static library.") +option(FEATURE_FLAG_STORAGE_V2 "Toggle the \"storage-v2\" feature flag.") + include(CMakePackageConfigHelpers) include(GNUInstallDirs) diff --git a/automerge-c/src/CMakeLists.txt b/automerge-c/src/CMakeLists.txt index 1b308b1c..f56d7ca8 100644 --- a/automerge-c/src/CMakeLists.txt +++ b/automerge-c/src/CMakeLists.txt @@ -23,6 +23,12 @@ else() set(CARGO_FLAG "--release") endif() +if(FEATURE_FLAG_STORAGE_V2) + set(CARGO_FEATURES --features storage-v2) +else() + set(CARGO_FEATURES "") +endif() + set(CARGO_CURRENT_BINARY_DIR "${CARGO_TARGET_DIR}/${CARGO_BUILD_TYPE}") set( @@ -47,7 +53,7 @@ add_custom_command( # updated. ${CMAKE_COMMAND} -DCONDITION=NOT_EXISTS -P ${CMAKE_SOURCE_DIR}/cmake/file_touch.cmake -- ${CARGO_TARGET_DIR}/${LIBRARY_NAME}.h ${CMAKE_SOURCE_DIR}/cbindgen.toml COMMAND - ${CMAKE_COMMAND} -E env CARGO_TARGET_DIR=${CARGO_TARGET_DIR} ${CARGO_CMD} build ${CARGO_FLAG} + ${CMAKE_COMMAND} -E env CARGO_TARGET_DIR=${CARGO_TARGET_DIR} ${CARGO_CMD} build ${CARGO_FLAG} ${CARGO_FEATURES} MAIN_DEPENDENCY lib.rs DEPENDS diff --git a/automerge-c/src/change.rs b/automerge-c/src/change.rs index 47c215ad..07e89d81 100644 --- a/automerge-c/src/change.rs +++ b/automerge-c/src/change.rs @@ -40,7 +40,9 @@ impl AMchange { match c_msg.as_mut() { None => { if let Some(message) = unsafe { (*self.body).message() } { - return c_msg.insert(CString::new(message).unwrap()).as_ptr(); + return c_msg + .insert(CString::new(message.as_bytes()).unwrap()) + .as_ptr(); } } Some(message) => { diff --git a/automerge-c/src/result.rs b/automerge-c/src/result.rs index 744fa651..c73765d1 100644 --- a/automerge-c/src/result.rs +++ b/automerge-c/src/result.rs @@ -83,6 +83,15 @@ use crate::sync::{AMsyncMessage, AMsyncState}; /// \var AMvalue::tag /// The variant discriminator. /// +/// \var AMvalue::sync_message +/// A synchronization message as a pointer to an `AMsyncMessage` struct. +/// +/// \var AMvalue::sync_state +/// A synchronization state as a pointer to an `AMsyncState` struct. +/// +/// \var AMvalue::tag +/// The variant discriminator. +/// /// \var AMvalue::timestamp /// A Lamport timestamp. /// @@ -134,7 +143,7 @@ pub enum AMvalue<'a> { /// A 64-bit unsigned integer variant. Uint(u64), /// An unknown type of scalar value variant. - Unknown(AMUnknownValue), + Unknown(AMunknownValue), } impl<'a> PartialEq for AMvalue<'a> { @@ -190,7 +199,7 @@ impl From<(&am::Value<'_>, &RefCell>)> for AMvalue<'_> { } am::ScalarValue::Timestamp(timestamp) => AMvalue::Timestamp(*timestamp), am::ScalarValue::Uint(uint) => AMvalue::Uint(*uint), - am::ScalarValue::Unknown { bytes, type_code } => AMvalue::Unknown(AMUnknownValue { + am::ScalarValue::Unknown { bytes, type_code } => AMvalue::Unknown(AMunknownValue { bytes: bytes.as_slice().into(), type_code: *type_code, }), @@ -259,7 +268,7 @@ impl TryFrom<&AMvalue<'_>> for am::ScalarValue { Timestamp(t) => Ok(am::ScalarValue::Timestamp(*t)), Uint(u) => Ok(am::ScalarValue::Uint(*u)), Null => Ok(am::ScalarValue::Null), - Unknown(AMUnknownValue { bytes, type_code }) => { + Unknown(AMunknownValue { bytes, type_code }) => { let slice = unsafe { std::slice::from_raw_parts(bytes.src, bytes.count) }; Ok(am::ScalarValue::Unknown { bytes: slice.to_vec(), @@ -582,7 +591,7 @@ impl From> for AMresult { impl From> for AMresult { fn from(maybe: Result) -> Self { match maybe { - Ok(change) => AMresult::Changes(vec![change], BTreeMap::new()), + Ok(change) => AMresult::Changes(vec![change], None), Err(e) => AMresult::err(&e.to_string()), } } @@ -631,7 +640,7 @@ impl From> for AMresult { impl From> for AMresult { fn from(maybe: Result) -> Self { match maybe { - Ok(state) => AMresult::SyncState(AMsyncState::new(state)), + Ok(state) => AMresult::SyncState(Box::new(AMsyncState::new(state))), Err(e) => AMresult::err(&e.to_string()), } } @@ -928,12 +937,12 @@ pub unsafe extern "C" fn AMresultValue<'a>(result: *mut AMresult) -> AMvalue<'a> content } -/// \struct AMUknownValue -/// \brief A value (typically for a 'set' operation) which we don't know the type of +/// \struct AMunknownValue +/// \brief A value (typically for a `set` operation) whose type is unknown. /// #[derive(PartialEq)] #[repr(C)] -pub struct AMUnknownValue { +pub struct AMunknownValue { bytes: AMbyteSpan, type_code: u8, } From 9c86c09aaae24e55bf1905c92a66a6ec901f887f Mon Sep 17 00:00:00 2001 From: Alex Good Date: Sun, 7 Aug 2022 17:03:10 +0100 Subject: [PATCH 546/730] Rename Change::compressed_bytes -> Change::bytes --- .github/workflows/ci.yaml | 2 +- automerge-c/src/change.rs | 2 +- automerge/src/change.rs | 2 +- automerge/src/change_v2.rs | 2 +- automerge/src/sync.rs | 4 ++-- automerge/tests/test.rs | 2 +- 6 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 38c5848c..b5ccfc4b 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -85,7 +85,7 @@ jobs: - name: Install wasm-pack run: curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh - name: run tests - run: ./scripts/ci/wasm_tests + run: ./scripts/ci/wasm_tests_storage_v2 js_tests: runs-on: ubuntu-latest diff --git a/automerge-c/src/change.rs b/automerge-c/src/change.rs index 07e89d81..78df5d14 100644 --- a/automerge-c/src/change.rs +++ b/automerge-c/src/change.rs @@ -114,7 +114,7 @@ pub unsafe extern "C" fn AMchangeActorId(change: *const AMchange) -> *mut AMresu #[no_mangle] pub unsafe extern "C" fn AMchangeCompress(change: *mut AMchange) { if let Some(change) = change.as_mut() { - let _ = change.as_mut().compressed_bytes(); + let _ = change.as_mut().bytes(); }; } diff --git a/automerge/src/change.rs b/automerge/src/change.rs index f14b2025..29596e3e 100644 --- a/automerge/src/change.rs +++ b/automerge/src/change.rs @@ -427,7 +427,7 @@ impl Change { self.bytes.compress(self.body_start); } - pub fn compressed_bytes(&self) -> &[u8] { + pub fn bytes(&self) -> &[u8] { match &self.bytes { ChangeBytes::Compressed { compressed, .. } => compressed, ChangeBytes::Uncompressed(uncompressed) => uncompressed, diff --git a/automerge/src/change_v2.rs b/automerge/src/change_v2.rs index 834c7d99..128eaaa8 100644 --- a/automerge/src/change_v2.rs +++ b/automerge/src/change_v2.rs @@ -88,7 +88,7 @@ impl Change { self.stored.timestamp() } - pub fn compressed_bytes(&mut self) -> Cow<'_, [u8]> { + pub fn bytes(&mut self) -> Cow<'_, [u8]> { if let CompressionState::NotCompressed = self.compression { if let Some(compressed) = self.stored.compress() { self.compression = CompressionState::Compressed(compressed); diff --git a/automerge/src/sync.rs b/automerge/src/sync.rs index f2309b4c..0566acb0 100644 --- a/automerge/src/sync.rs +++ b/automerge/src/sync.rs @@ -400,7 +400,7 @@ impl Message { encode_many(&mut buf, self.changes.iter_mut(), |buf, change| { leb128::write::unsigned(buf, change.raw_bytes().len() as u64).unwrap(); - buf.extend(change.compressed_bytes().as_ref()) + buf.extend(change.bytes().as_ref()) }); buf @@ -421,7 +421,7 @@ impl Message { (self.changes.len() as u32).encode_vec(&mut buf); for mut change in self.changes { change.compress(); - change.compressed_bytes().encode_vec(&mut buf); + change.bytes().encode_vec(&mut buf); } buf diff --git a/automerge/tests/test.rs b/automerge/tests/test.rs index 9b6246f8..d19ffcfb 100644 --- a/automerge/tests/test.rs +++ b/automerge/tests/test.rs @@ -1238,7 +1238,7 @@ fn test_compressed_changes() { assert!(uncompressed.len() > 256); #[cfg(not(feature = "storage-v2"))] change.compress(); - let compressed = change.compressed_bytes().to_vec(); + let compressed = change.bytes().to_vec(); assert!(compressed.len() < uncompressed.len()); let reloaded = automerge::Change::try_from(&compressed[..]).unwrap(); From 9ac8827219a45cc3bae9927755aae7fc071a4d38 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Mon, 15 Aug 2022 12:52:44 +0100 Subject: [PATCH 547/730] Remove storage-v2 feature flag Signed-off-by: Alex Good --- .github/workflows/ci.yaml | 61 - automerge-c/CMakeLists.txt | 2 - automerge-c/Cargo.toml | 3 - automerge-c/src/CMakeLists.txt | 6 +- automerge-c/src/change.rs | 7 +- automerge-c/src/result.rs | 33 - automerge-wasm/Cargo.toml | 1 - automerge-wasm/package.json | 4 +- automerge-wasm/src/interop.rs | 9 - automerge/Cargo.toml | 1 - automerge/src/autocommit.rs | 1 - automerge/src/automerge.rs | 74 +- automerge/src/change.rs | 1219 +++------------ automerge/src/change_v2.rs | 315 ---- automerge/src/columnar.rs | 1382 +---------------- .../{columnar_2 => columnar}/column_range.rs | 0 .../column_range/boolean.rs | 2 +- .../column_range/delta.rs | 4 +- .../column_range/deps.rs | 2 +- .../column_range/generic.rs | 2 +- .../column_range/generic/group.rs | 2 +- .../column_range/generic/simple.rs | 2 +- .../column_range/key.rs | 2 +- .../column_range/obj_id.rs | 2 +- .../column_range/opid.rs | 4 +- .../column_range/opid_list.rs | 4 +- .../column_range/raw.rs | 2 +- .../column_range/rle.rs | 4 +- .../column_range/value.rs | 8 +- .../src/{columnar_2 => columnar}/encoding.rs | 0 .../encoding/boolean.rs | 0 .../encoding/col_error.rs | 0 .../encoding/column_decoder.rs | 2 +- .../encoding/decodable_impls.rs | 0 .../encoding/delta.rs | 0 .../encoding/encodable_impls.rs | 0 .../encoding/leb128.rs | 0 .../encoding/properties.rs | 2 +- .../{columnar_2 => columnar}/encoding/raw.rs | 0 .../{columnar_2 => columnar}/encoding/rle.rs | 0 .../{columnar_2 => columnar}/splice_error.rs | 0 automerge/src/columnar_2.rs | 14 - automerge/src/encoding.rs | 391 ----- automerge/src/error.rs | 14 - automerge/src/indexed_cache.rs | 1 - automerge/src/lib.rs | 24 +- automerge/src/op_set.rs | 25 - automerge/src/op_tree.rs | 1 - .../src/storage/change/change_op_columns.rs | 4 +- automerge/src/storage/chunk.rs | 2 +- automerge/src/storage/columns/column.rs | 2 +- .../src/storage/columns/column_builder.rs | 2 +- .../storage/document/doc_change_columns.rs | 2 +- .../src/storage/document/doc_op_columns.rs | 2 +- automerge/src/storage/load.rs | 2 +- .../src/storage/load/reconstruct_document.rs | 4 +- automerge/src/sync.rs | 120 +- automerge/src/sync/bloom.rs | 46 - automerge/src/sync/state.rs | 34 - automerge/src/transaction/inner.rs | 13 - automerge/src/types.rs | 9 - automerge/src/types/opids.rs | 6 - automerge/src/value.rs | 1 - automerge/tests/test.rs | 4 - edit-trace/Cargo.toml | 4 - scripts/ci/build-test | 4 +- scripts/ci/build-test-storage-v2 | 6 - scripts/ci/js_tests_storage_v2 | 20 - scripts/ci/lint | 3 +- scripts/ci/run | 3 - scripts/ci/wasm_tests_storage_v2 | 6 - 71 files changed, 305 insertions(+), 3626 deletions(-) delete mode 100644 automerge/src/change_v2.rs rename automerge/src/{columnar_2 => columnar}/column_range.rs (100%) rename automerge/src/{columnar_2 => columnar}/column_range/boolean.rs (93%) rename automerge/src/{columnar_2 => columnar}/column_range/delta.rs (97%) rename automerge/src/{columnar_2 => columnar}/column_range/deps.rs (97%) rename automerge/src/{columnar_2 => columnar}/column_range/generic.rs (97%) rename automerge/src/{columnar_2 => columnar}/column_range/generic/group.rs (99%) rename automerge/src/{columnar_2 => columnar}/column_range/generic/simple.rs (98%) rename automerge/src/{columnar_2 => columnar}/column_range/key.rs (99%) rename automerge/src/{columnar_2 => columnar}/column_range/obj_id.rs (99%) rename automerge/src/{columnar_2 => columnar}/column_range/opid.rs (98%) rename automerge/src/{columnar_2 => columnar}/column_range/opid_list.rs (99%) rename automerge/src/{columnar_2 => columnar}/column_range/raw.rs (94%) rename automerge/src/{columnar_2 => columnar}/column_range/rle.rs (98%) rename automerge/src/{columnar_2 => columnar}/column_range/value.rs (99%) rename automerge/src/{columnar_2 => columnar}/encoding.rs (100%) rename automerge/src/{columnar_2 => columnar}/encoding/boolean.rs (100%) rename automerge/src/{columnar_2 => columnar}/encoding/col_error.rs (100%) rename automerge/src/{columnar_2 => columnar}/encoding/column_decoder.rs (99%) rename automerge/src/{columnar_2 => columnar}/encoding/decodable_impls.rs (100%) rename automerge/src/{columnar_2 => columnar}/encoding/delta.rs (100%) rename automerge/src/{columnar_2 => columnar}/encoding/encodable_impls.rs (100%) rename automerge/src/{columnar_2 => columnar}/encoding/leb128.rs (100%) rename automerge/src/{columnar_2 => columnar}/encoding/properties.rs (99%) rename automerge/src/{columnar_2 => columnar}/encoding/raw.rs (100%) rename automerge/src/{columnar_2 => columnar}/encoding/rle.rs (100%) rename automerge/src/{columnar_2 => columnar}/splice_error.rs (100%) delete mode 100644 automerge/src/columnar_2.rs delete mode 100644 automerge/src/encoding.rs delete mode 100755 scripts/ci/build-test-storage-v2 delete mode 100755 scripts/ci/js_tests_storage_v2 delete mode 100755 scripts/ci/wasm_tests_storage_v2 diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index b5ccfc4b..4fc75fef 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -78,15 +78,6 @@ jobs: - name: run tests run: ./scripts/ci/wasm_tests - wasm_tests_storage_v2: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - name: Install wasm-pack - run: curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh - - name: run tests - run: ./scripts/ci/wasm_tests_storage_v2 - js_tests: runs-on: ubuntu-latest steps: @@ -96,15 +87,6 @@ jobs: - name: run tests run: ./scripts/ci/js_tests - js_tests_storage_v2: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - name: Install wasm-pack - run: curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh - - name: run tests - run: ./scripts/ci/js_tests_storage_v2 - cmake_build: runs-on: ubuntu-latest steps: @@ -169,46 +151,3 @@ jobs: - uses: Swatinem/rust-cache@v1 - run: ./scripts/ci/build-test shell: bash - - linux-storage-v2: - name: 'storage-v2: Linux' - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: actions-rs/toolchain@v1 - with: - profile: minimal - toolchain: 1.60.0 - default: true - - uses: Swatinem/rust-cache@v1 - - run: ./scripts/ci/build-test-storage-v2 - shell: bash - - macos-storage-2: - name: 'storage-v2: MacOS' - runs-on: macos-latest - steps: - - uses: actions/checkout@v2 - - uses: actions-rs/toolchain@v1 - with: - profile: minimal - toolchain: 1.60.0 - default: true - - uses: Swatinem/rust-cache@v1 - - run: ./scripts/ci/build-test-storage-v2 - shell: bash - - windows-storage-v2: - name: 'storage-v2: Windows' - runs-on: windows-latest - steps: - - uses: actions/checkout@v2 - - uses: actions-rs/toolchain@v1 - with: - profile: minimal - toolchain: 1.60.0 - default: true - - uses: Swatinem/rust-cache@v1 - - run: ./scripts/ci/build-test-storage-v2 - shell: bash - diff --git a/automerge-c/CMakeLists.txt b/automerge-c/CMakeLists.txt index 05ee06eb..68a5176a 100644 --- a/automerge-c/CMakeLists.txt +++ b/automerge-c/CMakeLists.txt @@ -57,8 +57,6 @@ include(CTest) option(BUILD_SHARED_LIBS "Enable the choice of a shared or static library.") -option(FEATURE_FLAG_STORAGE_V2 "Toggle the \"storage-v2\" feature flag.") - include(CMakePackageConfigHelpers) include(GNUInstallDirs) diff --git a/automerge-c/Cargo.toml b/automerge-c/Cargo.toml index cff82536..851a3470 100644 --- a/automerge-c/Cargo.toml +++ b/automerge-c/Cargo.toml @@ -6,9 +6,6 @@ edition = "2021" license = "MIT" rust-version = "1.57.0" -[features] -storage-v2 =[ "automerge/storage-v2" ] - [lib] name = "automerge" crate-type = ["cdylib", "staticlib"] diff --git a/automerge-c/src/CMakeLists.txt b/automerge-c/src/CMakeLists.txt index f56d7ca8..b152616a 100644 --- a/automerge-c/src/CMakeLists.txt +++ b/automerge-c/src/CMakeLists.txt @@ -23,11 +23,7 @@ else() set(CARGO_FLAG "--release") endif() -if(FEATURE_FLAG_STORAGE_V2) - set(CARGO_FEATURES --features storage-v2) -else() - set(CARGO_FEATURES "") -endif() +set(CARGO_FEATURES "") set(CARGO_CURRENT_BINARY_DIR "${CARGO_TARGET_DIR}/${CARGO_BUILD_TYPE}") diff --git a/automerge-c/src/change.rs b/automerge-c/src/change.rs index 78df5d14..564cb12f 100644 --- a/automerge-c/src/change.rs +++ b/automerge-c/src/change.rs @@ -61,10 +61,7 @@ impl AMchange { let ptr = c_changehash.insert(hash); AMbyteSpan { src: ptr.0.as_ptr(), - #[cfg(feature = "storage-v2")] count: hash.as_ref().len(), - #[cfg(not(feature = "storage-v2"))] - count: hash.0.len(), } } } @@ -188,9 +185,7 @@ pub unsafe extern "C" fn AMchangeFromBytes(src: *const u8, count: usize) -> *mut #[no_mangle] pub unsafe extern "C" fn AMchangeHash(change: *const AMchange) -> AMbyteSpan { match change.as_ref() { - Some(change) => { - change.hash() - } + Some(change) => change.hash(), None => AMbyteSpan::default(), } } diff --git a/automerge-c/src/result.rs b/automerge-c/src/result.rs index c73765d1..071db18f 100644 --- a/automerge-c/src/result.rs +++ b/automerge-c/src/result.rs @@ -577,17 +577,6 @@ impl From> for AMresult { } } -#[cfg(not(feature = "storage-v2"))] -impl From> for AMresult { - fn from(maybe: Result) -> Self { - match maybe { - Ok(change) => AMresult::Changes(vec![change], None), - Err(e) => AMresult::err(&e.to_string()), - } - } -} - -#[cfg(feature = "storage-v2")] impl From> for AMresult { fn from(maybe: Result) -> Self { match maybe { @@ -606,17 +595,6 @@ impl From> for AMresult { } } -#[cfg(not(feature = "storage-v2"))] -impl From> for AMresult { - fn from(maybe: Result) -> Self { - match maybe { - Ok(message) => AMresult::SyncMessage(AMsyncMessage::new(message)), - Err(e) => AMresult::err(&e.to_string()), - } - } -} - -#[cfg(feature = "storage-v2")] impl From> for AMresult { fn from(maybe: Result) -> Self { match maybe { @@ -626,17 +604,6 @@ impl From> for AMresult { } } -#[cfg(not(feature = "storage-v2"))] -impl From> for AMresult { - fn from(maybe: Result) -> Self { - match maybe { - Ok(state) => AMresult::SyncState(Box::new(AMsyncState::new(state))), - Err(e) => AMresult::err(&e.to_string()), - } - } -} - -#[cfg(feature = "storage-v2")] impl From> for AMresult { fn from(maybe: Result) -> Self { match maybe { diff --git a/automerge-wasm/Cargo.toml b/automerge-wasm/Cargo.toml index f513d99e..f7668bfa 100644 --- a/automerge-wasm/Cargo.toml +++ b/automerge-wasm/Cargo.toml @@ -18,7 +18,6 @@ bench = false [features] # default = ["console_error_panic_hook", "wee_alloc"] default = ["console_error_panic_hook"] -storage-v2 =[ "automerge/storage-v2" ] [dependencies] console_error_panic_hook = { version = "^0.1", optional = true } diff --git a/automerge-wasm/package.json b/automerge-wasm/package.json index 42c42e0b..0410dd52 100644 --- a/automerge-wasm/package.json +++ b/automerge-wasm/package.json @@ -28,12 +28,10 @@ "scripts": { "lint": "eslint test/*.ts", "build": "cross-env PROFILE=dev TARGET=nodejs FEATURES='' yarn target", - "build-storage-v2": "cross-env PROFILE=dev TARGET=nodejs FEATURES='--features=automerge-wasm/storage-v2' yarn target", "release": "cross-env PROFILE=release yarn buildall", "buildall": "cross-env TARGET=nodejs yarn target && cross-env TARGET=web yarn target", "target": "rimraf ./$TARGET && wasm-pack build --target $TARGET --$PROFILE --out-name bindgen -d $TARGET -- $FEATURES && cp $TARGET-index.js $TARGET/index.js", - "test": "ts-mocha -p tsconfig.json --type-check --bail --full-trace test/*.ts", - "test-storage-v2": "yarn build-storage-v2 && ts-mocha -p tsconfig.json --type-check --bail --full-trace test/*.ts" + "test": "ts-mocha -p tsconfig.json --type-check --bail --full-trace test/*.ts" }, "devDependencies": { "@types/expect": "^24.3.0", diff --git a/automerge-wasm/src/interop.rs b/automerge-wasm/src/interop.rs index be3b765c..1d43adc9 100644 --- a/automerge-wasm/src/interop.rs +++ b/automerge-wasm/src/interop.rs @@ -147,15 +147,6 @@ impl TryFrom for Vec { let value = value.0.dyn_into::()?; let changes: Result, _> = value.iter().map(|j| j.dyn_into()).collect(); let changes = changes?; - #[cfg(not(feature = "storage-v2"))] - let changes = changes.iter().try_fold(Vec::new(), |mut acc, arr| { - match Change::try_from(arr.to_vec()) { - Ok(c) => acc.push(c), - Err(e) => return Err(to_js_err(e)), - } - Ok(acc) - })?; - #[cfg(feature = "storage-v2")] let changes = changes.iter().try_fold(Vec::new(), |mut acc, arr| { match automerge::Change::try_from(arr.to_vec().as_slice()) { Ok(c) => acc.push(c), diff --git a/automerge/Cargo.toml b/automerge/Cargo.toml index be1d924a..d6653e56 100644 --- a/automerge/Cargo.toml +++ b/automerge/Cargo.toml @@ -11,7 +11,6 @@ description = "A JSON-like data structure (a CRDT) that can be modified concurre [features] optree-visualisation = ["dot", "rand"] wasm = ["js-sys", "wasm-bindgen", "web-sys", "uuid/wasm-bindgen"] -storage-v2 = [] [dependencies] hex = "^0.4.3" diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index 126eec6a..2f41cee4 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -157,7 +157,6 @@ impl AutoCommit { self.doc.save() } - #[cfg(feature = "storage-v2")] pub fn save_nocompress(&mut self) -> Vec { self.ensure_transaction_closed(); self.doc.save_nocompress() diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 8ccf9aee..6c0cd6dd 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -4,27 +4,20 @@ use std::fmt::Debug; use std::num::NonZeroU64; use std::ops::RangeBounds; -#[cfg(not(feature = "storage-v2"))] -use crate::change::encode_document; use crate::clock::ClockData; -#[cfg(feature = "storage-v2")] use crate::clocks::Clocks; -#[cfg(feature = "storage-v2")] -use crate::columnar_2::Key as EncodedKey; +use crate::columnar::Key as EncodedKey; use crate::exid::ExId; use crate::keys::Keys; use crate::op_observer::OpObserver; use crate::op_set::OpSet; use crate::parents::Parents; -#[cfg(feature = "storage-v2")] use crate::storage::{self, load, CompressConfig}; use crate::transaction::{self, CommitOptions, Failure, Success, Transaction, TransactionInner}; use crate::types::{ ActorId, ChangeHash, Clock, ElemId, Export, Exportable, Key, ObjId, Op, OpId, OpType, ScalarValue, Value, }; -#[cfg(not(feature = "storage-v2"))] -use crate::{legacy, types}; use crate::{ query, ApplyOptions, AutomergeError, Change, KeysAt, ListRange, ListRangeAt, MapRange, MapRangeAt, ObjType, Prop, Values, @@ -143,10 +136,6 @@ impl Automerge { start_op: NonZeroU64::new(self.max_op + 1).unwrap(), time: 0, message: None, - #[cfg(not(feature = "storage-v2"))] - extra_bytes: Default::default(), - #[cfg(not(feature = "storage-v2"))] - hash: None, operations: vec![], deps, } @@ -602,18 +591,6 @@ impl Automerge { } /// Load a document. - #[cfg(not(feature = "storage-v2"))] - pub fn load_with( - data: &[u8], - options: ApplyOptions<'_, Obs>, - ) -> Result { - let changes = Change::load_document(data)?; - let mut doc = Self::new(); - doc.apply_changes_with(changes, options)?; - Ok(doc) - } - - #[cfg(feature = "storage-v2")] pub fn load_with( data: &[u8], mut options: ApplyOptions<'_, Obs>, @@ -705,9 +682,6 @@ impl Automerge { data: &[u8], options: ApplyOptions<'_, Obs>, ) -> Result { - #[cfg(not(feature = "storage-v2"))] - let changes = Change::load_document(data)?; - #[cfg(feature = "storage-v2")] let changes = match load::load_changes(storage::parse::Input::new(data)) { load::LoadedChanges::Complete(c) => c, load::LoadedChanges::Partial { error, loaded, .. } => { @@ -800,42 +774,6 @@ impl Automerge { None } - #[cfg(not(feature = "storage-v2"))] - fn import_ops(&mut self, change: &Change) -> Vec<(ObjId, Op)> { - change - .iter_ops() - .enumerate() - .map(|(i, c)| { - let actor = self.ops.m.actors.cache(change.actor_id().clone()); - let id = OpId(change.start_op.get() + i as u64, actor); - let obj = match c.obj { - legacy::ObjectId::Root => ObjId::root(), - legacy::ObjectId::Id(id) => ObjId(OpId(id.0, self.ops.m.actors.cache(id.1))), - }; - let pred = self.ops.m.import_opids(c.pred); - let key = match &c.key { - legacy::Key::Map(n) => Key::Map(self.ops.m.props.cache(n.to_string())), - legacy::Key::Seq(legacy::ElementId::Head) => Key::Seq(types::HEAD), - legacy::Key::Seq(legacy::ElementId::Id(i)) => { - Key::Seq(ElemId(OpId(i.0, self.ops.m.actors.cache(i.1.clone())))) - } - }; - ( - obj, - Op { - id, - action: c.action, - key, - succ: Default::default(), - pred, - insert: c.insert, - }, - ) - }) - .collect() - } - - #[cfg(feature = "storage-v2")] fn import_ops(&mut self, change: &Change) -> Vec<(ObjId, Op)> { let actor = self.ops.m.actors.cache(change.actor_id().clone()); let mut actors = Vec::with_capacity(change.other_actor_ids().len() + 1); @@ -910,15 +848,6 @@ impl Automerge { pub fn save(&mut self) -> Vec { let heads = self.get_heads(); let c = self.history.iter(); - #[cfg(not(feature = "storage-v2"))] - let bytes = encode_document( - heads, - c, - self.ops.iter(), - &self.ops.m.actors, - &self.ops.m.props.cache, - ); - #[cfg(feature = "storage-v2")] let bytes = crate::storage::save::save_document( c, self.ops.iter(), @@ -931,7 +860,6 @@ impl Automerge { bytes } - #[cfg(feature = "storage-v2")] pub fn save_nocompress(&mut self) -> Vec { let heads = self.get_heads(); let c = self.history.iter(); diff --git a/automerge/src/change.rs b/automerge/src/change.rs index 29596e3e..3c45a524 100644 --- a/automerge/src/change.rs +++ b/automerge/src/change.rs @@ -1,1040 +1,315 @@ -use crate::columnar::{ - ChangeEncoder, ChangeIterator, ColumnEncoder, DepsIterator, DocChange, DocOp, DocOpEncoder, - DocOpIterator, OperationIterator, COLUMN_TYPE_DEFLATE, -}; -use crate::decoding; -use crate::decoding::{Decodable, InvalidChangeError}; -use crate::encoding::{Encodable, DEFLATE_MIN_SIZE}; -use crate::error::AutomergeError; -use crate::indexed_cache::IndexedCache; -use crate::legacy as amp; -use crate::transaction::TransactionInner; -use crate::types; -use crate::types::{ActorId, ElemId, Key, ObjId, Op, OpId, OpType}; -use core::ops::Range; -use flate2::{ - bufread::{DeflateDecoder, DeflateEncoder}, - Compression, -}; -use itertools::Itertools; -use sha2::Digest; -use sha2::Sha256; -use std::collections::{HashMap, HashSet}; -use std::convert::TryInto; -use std::fmt::Debug; -use std::io::{Read, Write}; -use std::num::NonZeroU64; -use tracing::instrument; +use std::{borrow::Cow, num::NonZeroU64}; -const MAGIC_BYTES: [u8; 4] = [0x85, 0x6f, 0x4a, 0x83]; -const PREAMBLE_BYTES: usize = 8; -const HEADER_BYTES: usize = PREAMBLE_BYTES + 1; - -const HASH_BYTES: usize = 32; -const BLOCK_TYPE_DOC: u8 = 0; -const BLOCK_TYPE_CHANGE: u8 = 1; -const BLOCK_TYPE_DEFLATE: u8 = 2; -const CHUNK_START: usize = 8; -const HASH_RANGE: Range = 4..8; - -pub(crate) fn encode_document<'a, 'b>( - heads: Vec, - changes: impl Iterator, - doc_ops: impl Iterator, - actors_index: &IndexedCache, - props: &'a [String], -) -> Vec { - let mut bytes: Vec = Vec::new(); - - let actors_map = actors_index.encode_index(); - let actors = actors_index.sorted(); - - /* - // this assumes that all actor_ids referenced are seen in changes.actor_id which is true - // so long as we have a full history - let mut actors: Vec<_> = changes - .iter() - .map(|c| &c.actor) - .unique() - .sorted() - .cloned() - .collect(); - */ - - let (change_bytes, change_info) = ChangeEncoder::encode_changes(changes, &actors); - - //let doc_ops = group_doc_ops(changes, &actors); - - let (ops_bytes, ops_info) = DocOpEncoder::encode_doc_ops(doc_ops, &actors_map, props); - - bytes.extend(MAGIC_BYTES); - bytes.extend([0, 0, 0, 0]); // we dont know the hash yet so fill in a fake - bytes.push(BLOCK_TYPE_DOC); - - let mut chunk = Vec::new(); - - actors.len().encode_vec(&mut chunk); - - for a in actors.into_iter() { - a.to_bytes().encode_vec(&mut chunk); - } - - heads.len().encode_vec(&mut chunk); - for head in heads.iter() { - chunk.write_all(&head.0).unwrap(); - } - - chunk.extend(change_info); - chunk.extend(ops_info); - - chunk.extend(change_bytes); - chunk.extend(ops_bytes); - - leb128::write::unsigned(&mut bytes, chunk.len() as u64).unwrap(); - - bytes.extend(&chunk); - - let hash_result = Sha256::digest(&bytes[CHUNK_START..bytes.len()]); - - bytes.splice(HASH_RANGE, hash_result[0..4].iter().copied()); - - bytes -} - -/// When encoding a change we take all the actor IDs referenced by a change and place them in an -/// array. The array has the actor who authored the change as the first element and all remaining -/// actors (i.e. those referenced in object IDs in the target of an operation or in the `pred` of -/// an operation) lexicographically ordered following the change author. -fn actor_ids_in_change(change: &::Change) -> Vec { - let mut other_ids: Vec<&::ActorId> = change - .operations - .iter() - .flat_map(opids_in_operation) - .filter(|a| *a != &change.actor_id) - .unique() - .collect(); - other_ids.sort(); - // Now prepend the change actor - std::iter::once(&change.actor_id) - .chain(other_ids.into_iter()) - .cloned() - .collect() -} - -fn opids_in_operation(op: &::Op) -> impl Iterator { - let obj_actor_id = match &op.obj { - amp::ObjectId::Root => None, - amp::ObjectId::Id(opid) => Some(opid.actor()), - }; - let pred_ids = op.pred.iter().map(amp::OpId::actor); - let key_actor = match &op.key { - amp::Key::Seq(amp::ElementId::Id(i)) => Some(i.actor()), - _ => None, - }; - obj_actor_id - .into_iter() - .chain(key_actor.into_iter()) - .chain(pred_ids) -} - -impl From for Change { - fn from(value: amp::Change) -> Self { - encode(&value) - } -} - -impl From<&::Change> for Change { - fn from(value: &::Change) -> Self { - encode(value) - } -} - -fn encode(change: &::Change) -> Change { - let mut deps = change.deps.clone(); - deps.sort_unstable(); - - let mut chunk = encode_chunk(change, &deps); - - let mut bytes = Vec::with_capacity(MAGIC_BYTES.len() + 4 + chunk.bytes.len()); - - bytes.extend(&MAGIC_BYTES); - - bytes.extend(vec![0, 0, 0, 0]); // we dont know the hash yet so fill in a fake - - bytes.push(BLOCK_TYPE_CHANGE); - - leb128::write::unsigned(&mut bytes, chunk.bytes.len() as u64).unwrap(); - - let body_start = bytes.len(); - - increment_range(&mut chunk.body, bytes.len()); - increment_range(&mut chunk.message, bytes.len()); - increment_range(&mut chunk.extra_bytes, bytes.len()); - increment_range_map(&mut chunk.ops, bytes.len()); - - bytes.extend(&chunk.bytes); - - let hash_result = Sha256::digest(&bytes[CHUNK_START..bytes.len()]); - let hash: amp::ChangeHash = hash_result[..].try_into().unwrap(); - - bytes.splice(HASH_RANGE, hash_result[0..4].iter().copied()); - - // any time I make changes to the encoder decoder its a good idea - // to run it through a round trip to detect errors the tests might not - // catch - // let c0 = Change::from_bytes(bytes.clone()).unwrap(); - // std::assert_eq!(c1, c0); - // perhaps we should add something like this to the test suite - - let bytes = ChangeBytes::Uncompressed(bytes); - - Change { - bytes, - body_start, - hash, - seq: change.seq, - start_op: change.start_op, - time: change.time, - actors: chunk.actors, - message: chunk.message, - deps, - ops: chunk.ops, - extra_bytes: chunk.extra_bytes, - num_ops: change.operations.len(), - } -} - -struct ChunkIntermediate { - bytes: Vec, - body: Range, - actors: Vec, - message: Range, - ops: HashMap>, - extra_bytes: Range, -} - -fn encode_chunk(change: &::Change, deps: &[amp::ChangeHash]) -> ChunkIntermediate { - let mut bytes = Vec::new(); - - // All these unwraps are okay because we're writing to an in memory buffer so io erros should - // not happen - - // encode deps - deps.len().encode(&mut bytes).unwrap(); - for hash in deps.iter() { - bytes.write_all(&hash.0).unwrap(); - } - - let actors = actor_ids_in_change(change); - change.actor_id.to_bytes().encode(&mut bytes).unwrap(); - - // encode seq, start_op, time, message - change.seq.encode(&mut bytes).unwrap(); - change.start_op.encode(&mut bytes).unwrap(); - change.time.encode(&mut bytes).unwrap(); - let message = bytes.len() + 1; - change.message.encode(&mut bytes).unwrap(); - let message = message..bytes.len(); - - // encode ops into a side buffer - collect all other actors - let (ops_buf, mut ops) = ColumnEncoder::encode_ops(&change.operations, &actors); - - // encode all other actors - actors[1..].encode(&mut bytes).unwrap(); - - // now we know how many bytes ops are offset by so we can adjust the ranges - increment_range_map(&mut ops, bytes.len()); - - // write out the ops - - bytes.write_all(&ops_buf).unwrap(); - - // write out the extra bytes - let extra_bytes = bytes.len()..(bytes.len() + change.extra_bytes.len()); - bytes.write_all(&change.extra_bytes).unwrap(); - let body = 0..bytes.len(); - - ChunkIntermediate { - bytes, - body, - actors, - message, - ops, - extra_bytes, - } -} - -#[derive(PartialEq, Debug, Clone)] -enum ChangeBytes { - Compressed { - compressed: Vec, - uncompressed: Vec, +use crate::{ + columnar::Key as StoredKey, + storage::{ + change::{Unverified, Verified}, + parse, Change as StoredChange, ChangeOp, Chunk, Compressed, ReadChangeOpError, }, - Uncompressed(Vec), -} + types::{ActorId, ChangeHash, ElemId}, +}; -impl ChangeBytes { - fn uncompressed(&self) -> &[u8] { - match self { - ChangeBytes::Compressed { uncompressed, .. } => &uncompressed[..], - ChangeBytes::Uncompressed(b) => &b[..], - } - } - - fn compress(&mut self, body_start: usize) { - match self { - ChangeBytes::Compressed { .. } => {} - ChangeBytes::Uncompressed(uncompressed) => { - if uncompressed.len() > DEFLATE_MIN_SIZE { - let mut result = Vec::with_capacity(uncompressed.len()); - result.extend(&uncompressed[0..8]); - result.push(BLOCK_TYPE_DEFLATE); - let mut deflater = - DeflateEncoder::new(&uncompressed[body_start..], Compression::default()); - let mut deflated = Vec::new(); - let deflated_len = deflater.read_to_end(&mut deflated).unwrap(); - leb128::write::unsigned(&mut result, deflated_len as u64).unwrap(); - result.extend(&deflated[..]); - *self = ChangeBytes::Compressed { - compressed: result, - uncompressed: std::mem::take(uncompressed), - } - } - } - } - } - - fn raw(&self) -> &[u8] { - match self { - ChangeBytes::Compressed { compressed, .. } => &compressed[..], - ChangeBytes::Uncompressed(b) => &b[..], - } - } -} - -/// A change represents a group of operations performed by an actor. -#[derive(PartialEq, Debug, Clone)] +#[derive(Clone, Debug, PartialEq)] pub struct Change { - bytes: ChangeBytes, - body_start: usize, - /// Hash of this change. - pub hash: amp::ChangeHash, - /// The index of this change in the changes from this actor. - pub seq: u64, - /// The start operation index. Starts at 1. - pub start_op: NonZeroU64, - /// The time that this change was committed. - pub time: i64, - /// The message of this change. - message: Range, - /// The actors referenced in this change. - actors: Vec, - /// The dependencies of this change. - pub deps: Vec, - ops: HashMap>, - extra_bytes: Range, - /// The number of operations in this change. - num_ops: usize, + stored: StoredChange<'static, Verified>, + compression: CompressionState, + len: usize, } impl Change { + pub(crate) fn new(stored: StoredChange<'static, Verified>) -> Self { + let len = stored.iter_ops().count(); + Self { + stored, + len, + compression: CompressionState::NotCompressed, + } + } + + pub(crate) fn new_from_unverified( + stored: StoredChange<'static, Unverified>, + compressed: Option>, + ) -> Result { + let mut len = 0; + let stored = stored.verify_ops(|_| len += 1)?; + let compression = if let Some(c) = compressed { + CompressionState::Compressed(c) + } else { + CompressionState::NotCompressed + }; + Ok(Self { + stored, + len, + compression, + }) + } + pub fn actor_id(&self) -> &ActorId { - &self.actors[0] + self.stored.actor() } - #[instrument(level = "debug", skip(bytes))] - pub fn load_document(bytes: &[u8]) -> Result, AutomergeError> { - load_blocks(bytes) - } - - pub fn from_bytes(bytes: Vec) -> Result { - Change::try_from(bytes) - } - - pub fn is_empty(&self) -> bool { - self.len() == 0 + pub fn other_actor_ids(&self) -> &[ActorId] { + self.stored.other_actors() } pub fn len(&self) -> usize { - self.num_ops + self.len + } + + pub fn is_empty(&self) -> bool { + self.len == 0 } pub fn max_op(&self) -> u64 { - self.start_op.get() + (self.len() as u64) - 1 - } - - pub fn deps(&self) -> &[amp::ChangeHash] { - &self.deps - } - - pub fn seq(&self) -> u64 { - self.seq - } - - pub fn hash(&self) -> amp::ChangeHash { - self.hash + self.stored.start_op().get() + (self.len as u64) - 1 } pub fn start_op(&self) -> NonZeroU64 { - self.start_op + self.stored.start_op() + } + + pub fn message(&self) -> Option<&String> { + self.stored.message().as_ref() + } + + pub fn deps(&self) -> &[ChangeHash] { + self.stored.dependencies() + } + + pub fn hash(&self) -> ChangeHash { + self.stored.hash() + } + + pub fn seq(&self) -> u64 { + self.stored.seq() } pub fn timestamp(&self) -> i64 { - self.time + self.stored.timestamp() } - pub fn message(&self) -> Option { - let m = &self.bytes.uncompressed()[self.message.clone()]; - if m.is_empty() { - None - } else { - std::str::from_utf8(m).map(ToString::to_string).ok() - } - } - - pub fn decode(&self) -> amp::Change { - amp::Change { - start_op: self.start_op, - seq: self.seq, - time: self.time, - hash: Some(self.hash), - message: self.message(), - actor_id: self.actors[0].clone(), - deps: self.deps.clone(), - operations: self - .iter_ops() - .map(|op| amp::Op { - action: op.action.clone(), - obj: op.obj.clone(), - key: op.key.clone(), - pred: op.pred.clone(), - insert: op.insert, - }) - .collect(), - extra_bytes: self.extra_bytes().into(), - } - } - - pub(crate) fn iter_ops(&self) -> OperationIterator<'_> { - OperationIterator::new(self.bytes.uncompressed(), self.actors.as_slice(), &self.ops) - } - - pub fn extra_bytes(&self) -> &[u8] { - &self.bytes.uncompressed()[self.extra_bytes.clone()] - } - - pub fn compress(&mut self) { - self.bytes.compress(self.body_start); - } - - pub fn bytes(&self) -> &[u8] { - match &self.bytes { - ChangeBytes::Compressed { compressed, .. } => compressed, - ChangeBytes::Uncompressed(uncompressed) => uncompressed, + pub fn bytes(&mut self) -> Cow<'_, [u8]> { + if let CompressionState::NotCompressed = self.compression { + if let Some(compressed) = self.stored.compress() { + self.compression = CompressionState::Compressed(compressed); + } else { + self.compression = CompressionState::TooSmallToCompress; + } + }; + match &self.compression { + // SAFETY: We just checked this case above + CompressionState::NotCompressed => unreachable!(), + CompressionState::TooSmallToCompress => Cow::Borrowed(self.stored.bytes()), + CompressionState::Compressed(c) => c.bytes(), } } pub fn raw_bytes(&self) -> &[u8] { - self.bytes.raw() + self.stored.bytes() + } + + pub(crate) fn iter_ops(&self) -> impl Iterator + '_ { + self.stored.iter_ops() + } + + pub fn extra_bytes(&self) -> &[u8] { + self.stored.extra_bytes() + } + + // TODO replace all uses of this with TryFrom<&[u8]> + pub fn from_bytes(bytes: Vec) -> Result { + Self::try_from(&bytes[..]) + } + + pub fn decode(&self) -> crate::ExpandedChange { + crate::ExpandedChange::from(self) } } -fn read_leb128(bytes: &mut &[u8]) -> Result<(usize, usize), decoding::Error> { - let mut buf = &bytes[..]; - let val = leb128::read::unsigned(&mut buf)? as usize; - let leb128_bytes = bytes.len() - buf.len(); - Ok((val, leb128_bytes)) +#[derive(Clone, Debug, PartialEq)] +enum CompressionState { + /// We haven't tried to compress this change + NotCompressed, + /// We have compressed this change + Compressed(Compressed<'static>), + /// We tried to compress this change but it wasn't big enough to be worth it + TooSmallToCompress, } -fn read_slice( - bytes: &[u8], - cursor: &mut Range, -) -> Result { - let mut view = &bytes[cursor.clone()]; - let init_len = view.len(); - let val = T::decode::<&[u8]>(&mut view).ok_or(decoding::Error::NoDecodedValue); - let bytes_read = init_len - view.len(); - *cursor = (cursor.start + bytes_read)..cursor.end; - val -} - -fn slice_bytes(bytes: &[u8], cursor: &mut Range) -> Result, decoding::Error> { - let (val, len) = read_leb128(&mut &bytes[cursor.clone()])?; - let start = cursor.start + len; - let end = start + val; - *cursor = end..cursor.end; - Ok(start..end) -} - -fn increment_range(range: &mut Range, len: usize) { - range.end += len; - range.start += len; -} - -fn increment_range_map(ranges: &mut HashMap>, len: usize) { - for range in ranges.values_mut() { - increment_range(range, len); +impl AsRef> for Change { + fn as_ref(&self) -> &StoredChange<'static, Verified> { + &self.stored } } -fn export_objid(id: &ObjId, actors: &IndexedCache) -> amp::ObjectId { - if id == &ObjId::root() { - amp::ObjectId::Root - } else { - export_opid(&id.0, actors).into() - } -} - -fn export_elemid(id: &ElemId, actors: &IndexedCache) -> amp::ElementId { - if id == &types::HEAD { - amp::ElementId::Head - } else { - export_opid(&id.0, actors).into() - } -} - -fn export_opid(id: &OpId, actors: &IndexedCache) -> amp::OpId { - amp::OpId(id.0, actors.get(id.1).clone()) -} - -fn export_op( - op: &Op, - obj: &ObjId, - actors: &IndexedCache, - props: &IndexedCache, -) -> amp::Op { - let action = op.action.clone(); - let key = match &op.key { - Key::Map(n) => amp::Key::Map(props.get(*n).clone().into()), - Key::Seq(id) => amp::Key::Seq(export_elemid(id, actors)), - }; - let obj = export_objid(obj, actors); - let pred = op.pred.iter().map(|id| export_opid(id, actors)).collect(); - amp::Op { - action, - obj, - insert: op.insert, - pred, - key, - } -} - -pub(crate) fn export_change( - change: TransactionInner, - actors: &IndexedCache, - props: &IndexedCache, -) -> Change { - amp::Change { - actor_id: actors.get(change.actor).clone(), - seq: change.seq, - start_op: change.start_op, - time: change.time, - deps: change.deps, - message: change.message, - hash: change.hash, - operations: change - .operations - .iter() - .map(|(obj, _, op)| export_op(op, obj, actors, props)) - .collect(), - extra_bytes: change.extra_bytes, - } - .into() +#[derive(thiserror::Error, Debug)] +pub enum LoadError { + #[error("unable to parse change: {0}")] + Parse(Box), + #[error("leftover data after parsing")] + LeftoverData, + #[error("wrong chunk type")] + WrongChunkType, } impl<'a> TryFrom<&'a [u8]> for Change { - type Error = decoding::Error; + type Error = LoadError; fn try_from(value: &'a [u8]) -> Result { - Self::try_from(value.to_vec()) + let input = parse::Input::new(value); + let (remaining, chunk) = Chunk::parse(input).map_err(|e| LoadError::Parse(Box::new(e)))?; + if !remaining.is_empty() { + return Err(LoadError::LeftoverData); + } + match chunk { + Chunk::Change(c) => Self::new_from_unverified(c.into_owned(), None) + .map_err(|e| LoadError::Parse(Box::new(e))), + Chunk::CompressedChange(c, compressed) => { + Self::new_from_unverified(c.into_owned(), Some(compressed.into_owned())) + .map_err(|e| LoadError::Parse(Box::new(e))) + } + _ => Err(LoadError::WrongChunkType), + } } } -impl TryFrom> for Change { - type Error = decoding::Error; +impl<'a> TryFrom> for Change { + type Error = ReadChangeOpError; - fn try_from(bytes: Vec) -> Result { - let (chunktype, body) = decode_header_without_hash(&bytes)?; - let bytes = if chunktype == BLOCK_TYPE_DEFLATE { - decompress_chunk(0..PREAMBLE_BYTES, body, bytes)? - } else { - ChangeBytes::Uncompressed(bytes) - }; + fn try_from(c: StoredChange<'a, Unverified>) -> Result { + Self::new_from_unverified(c.into_owned(), None) + } +} - let (chunktype, hash, body) = decode_header(bytes.uncompressed())?; +impl From for Change { + fn from(e: crate::ExpandedChange) -> Self { + let stored = StoredChange::builder() + .with_actor(e.actor_id) + .with_extra_bytes(e.extra_bytes) + .with_seq(e.seq) + .with_dependencies(e.deps) + .with_timestamp(e.time) + .with_start_op(e.start_op) + .with_message(e.message) + .build(e.operations.iter()); + match stored { + Ok(c) => Change::new(c), + Err(crate::storage::change::PredOutOfOrder) => { + // Should never happen because we use `SortedVec` in legacy::Op::pred + panic!("preds out of order"); + } + } + } +} - if chunktype != BLOCK_TYPE_CHANGE { - return Err(decoding::Error::WrongType { - expected_one_of: vec![BLOCK_TYPE_CHANGE], - found: chunktype, - }); +mod convert_expanded { + use std::borrow::Cow; + + use crate::{convert, legacy, storage::AsChangeOp, types::ActorId, ScalarValue}; + + impl<'a> AsChangeOp<'a> for &'a legacy::Op { + type ActorId = &'a ActorId; + type OpId = &'a legacy::OpId; + type PredIter = std::slice::Iter<'a, legacy::OpId>; + + fn action(&self) -> u64 { + self.action.action_index() } - let body_start = body.start; - let mut cursor = body; - - let deps = decode_hashes(bytes.uncompressed(), &mut cursor)?; - - let actor = - ActorId::from(&bytes.uncompressed()[slice_bytes(bytes.uncompressed(), &mut cursor)?]); - let seq = read_slice(bytes.uncompressed(), &mut cursor)?; - let start_op = read_slice(bytes.uncompressed(), &mut cursor)?; - let time = read_slice(bytes.uncompressed(), &mut cursor)?; - let message = slice_bytes(bytes.uncompressed(), &mut cursor)?; - - let actors = decode_actors(bytes.uncompressed(), &mut cursor, Some(actor))?; - - let ops_info = decode_column_info(bytes.uncompressed(), &mut cursor, false)?; - let ops = decode_columns(&mut cursor, &ops_info); - - let mut change = Change { - bytes, - body_start, - hash, - seq, - start_op, - time, - actors, - message, - deps, - ops, - extra_bytes: cursor, - num_ops: 0, // filled in below - }; - - let len = change.iter_ops().count(); - change.num_ops = len; - - Ok(change) - } -} - -fn decompress_chunk( - preamble: Range, - body: Range, - compressed: Vec, -) -> Result { - let mut decoder = DeflateDecoder::new(&compressed[body]); - let mut decompressed = Vec::new(); - decoder.read_to_end(&mut decompressed)?; - let mut result = Vec::with_capacity(decompressed.len() + preamble.len()); - result.extend(&compressed[preamble]); - result.push(BLOCK_TYPE_CHANGE); - leb128::write::unsigned::>(&mut result, decompressed.len() as u64).unwrap(); - result.extend(decompressed); - Ok(ChangeBytes::Compressed { - uncompressed: result, - compressed, - }) -} - -fn decode_hashes( - bytes: &[u8], - cursor: &mut Range, -) -> Result, decoding::Error> { - let num_hashes = read_slice(bytes, cursor)?; - let mut hashes = Vec::with_capacity(num_hashes); - for _ in 0..num_hashes { - let hash = cursor.start..(cursor.start + HASH_BYTES); - *cursor = hash.end..cursor.end; - hashes.push( - bytes - .get(hash) - .ok_or(decoding::Error::NotEnoughBytes)? - .try_into() - .map_err(InvalidChangeError::from)?, - ); - } - Ok(hashes) -} - -fn decode_actors( - bytes: &[u8], - cursor: &mut Range, - first: Option, -) -> Result, decoding::Error> { - let num_actors: usize = read_slice(bytes, cursor)?; - let mut actors = Vec::with_capacity(num_actors + 1); - if let Some(actor) = first { - actors.push(actor); - } - for _ in 0..num_actors { - actors.push(ActorId::from( - bytes - .get(slice_bytes(bytes, cursor)?) - .ok_or(decoding::Error::NotEnoughBytes)?, - )); - } - Ok(actors) -} - -fn decode_column_info( - bytes: &[u8], - cursor: &mut Range, - allow_compressed_column: bool, -) -> Result, decoding::Error> { - let num_columns = read_slice(bytes, cursor)?; - let mut columns = Vec::with_capacity(num_columns); - let mut last_id = 0; - for _ in 0..num_columns { - let id: u32 = read_slice(bytes, cursor)?; - if (id & !COLUMN_TYPE_DEFLATE) <= (last_id & !COLUMN_TYPE_DEFLATE) { - return Err(decoding::Error::ColumnsNotInAscendingOrder { - last: last_id, - found: id, - }); + fn insert(&self) -> bool { + self.insert } - if id & COLUMN_TYPE_DEFLATE != 0 && !allow_compressed_column { - return Err(decoding::Error::ChangeContainedCompressedColumns); + + fn pred(&self) -> Self::PredIter { + self.pred.iter() } - last_id = id; - let length = read_slice(bytes, cursor)?; - columns.push((id, length)); - } - Ok(columns) -} -fn decode_columns( - cursor: &mut Range, - columns: &[(u32, usize)], -) -> HashMap> { - let mut ops = HashMap::new(); - for (id, length) in columns { - let start = cursor.start; - let end = start + length; - *cursor = end..cursor.end; - ops.insert(*id, start..end); - } - ops -} - -fn decode_header(bytes: &[u8]) -> Result<(u8, amp::ChangeHash, Range), decoding::Error> { - let (chunktype, body) = decode_header_without_hash(bytes)?; - - let calculated_hash = Sha256::digest(&bytes[PREAMBLE_BYTES..]); - - let checksum = &bytes[4..8]; - if checksum != &calculated_hash[0..4] { - return Err(decoding::Error::InvalidChecksum { - found: checksum.try_into().unwrap(), - calculated: calculated_hash[0..4].try_into().unwrap(), - }); - } - - let hash = calculated_hash[..] - .try_into() - .map_err(InvalidChangeError::from)?; - - Ok((chunktype, hash, body)) -} - -fn decode_header_without_hash(bytes: &[u8]) -> Result<(u8, Range), decoding::Error> { - if bytes.len() <= HEADER_BYTES { - return Err(decoding::Error::NotEnoughBytes); - } - - if bytes[0..4] != MAGIC_BYTES { - return Err(decoding::Error::WrongMagicBytes); - } - - let (val, len) = read_leb128(&mut &bytes[HEADER_BYTES..])?; - let body = (HEADER_BYTES + len)..(HEADER_BYTES + len + val); - if bytes.len() != body.end { - return Err(decoding::Error::WrongByteLength { - expected: body.end, - found: bytes.len(), - }); - } - - let chunktype = bytes[PREAMBLE_BYTES]; - - Ok((chunktype, body)) -} - -fn load_blocks(bytes: &[u8]) -> Result, AutomergeError> { - let mut changes = Vec::new(); - for slice in split_blocks(bytes)? { - decode_block(slice, &mut changes)?; - } - Ok(changes) -} - -fn split_blocks(bytes: &[u8]) -> Result, decoding::Error> { - // split off all valid blocks - ignore the rest if its corrupted or truncated - let mut blocks = Vec::new(); - let mut cursor = bytes; - while let Some(block) = pop_block(cursor)? { - blocks.push(&cursor[block.clone()]); - if cursor.len() <= block.end { - break; + fn key(&self) -> convert::Key<'a, Self::OpId> { + match &self.key { + legacy::Key::Map(s) => convert::Key::Prop(Cow::Borrowed(s)), + legacy::Key::Seq(legacy::ElementId::Head) => { + convert::Key::Elem(convert::ElemId::Head) + } + legacy::Key::Seq(legacy::ElementId::Id(o)) => { + convert::Key::Elem(convert::ElemId::Op(o)) + } + } } - cursor = &cursor[block.end..]; - } - Ok(blocks) -} -fn pop_block(bytes: &[u8]) -> Result>, decoding::Error> { - if bytes.len() < 4 || bytes[0..4] != MAGIC_BYTES { - // not reporting error here - file got corrupted? - return Ok(None); - } - let (val, len) = read_leb128( - &mut bytes - .get(HEADER_BYTES..) - .ok_or(decoding::Error::NotEnoughBytes)?, - )?; - // val is arbitrary so it could overflow - let end = (HEADER_BYTES + len) - .checked_add(val) - .ok_or(decoding::Error::Overflow)?; - if end > bytes.len() { - // not reporting error here - file got truncated? - return Ok(None); - } - Ok(Some(0..end)) -} - -fn decode_block(bytes: &[u8], changes: &mut Vec) -> Result<(), decoding::Error> { - match bytes[PREAMBLE_BYTES] { - BLOCK_TYPE_DOC => { - changes.extend(decode_document(bytes)?); - Ok(()) + fn obj(&self) -> convert::ObjId { + match &self.obj { + legacy::ObjectId::Root => convert::ObjId::Root, + legacy::ObjectId::Id(o) => convert::ObjId::Op(o), + } } - BLOCK_TYPE_CHANGE | BLOCK_TYPE_DEFLATE => { - changes.push(Change::try_from(bytes.to_vec())?); - Ok(()) - } - found => Err(decoding::Error::WrongType { - expected_one_of: vec![BLOCK_TYPE_DOC, BLOCK_TYPE_CHANGE, BLOCK_TYPE_DEFLATE], - found, - }), - } -} -fn decode_document(bytes: &[u8]) -> Result, decoding::Error> { - let (chunktype, _hash, mut cursor) = decode_header(bytes)?; - - // chunktype == 0 is a document, chunktype = 1 is a change - if chunktype > 0 { - return Err(decoding::Error::WrongType { - expected_one_of: vec![0], - found: chunktype, - }); - } - - let actors = decode_actors(bytes, &mut cursor, None)?; - - let heads = decode_hashes(bytes, &mut cursor)?; - - let changes_info = decode_column_info(bytes, &mut cursor, true)?; - let ops_info = decode_column_info(bytes, &mut cursor, true)?; - - let changes_data = decode_columns(&mut cursor, &changes_info); - let mut doc_changes = ChangeIterator::new(bytes, &changes_data).collect::>(); - let doc_changes_deps = DepsIterator::new(bytes, &changes_data); - - let doc_changes_len = doc_changes.len(); - - let ops_data = decode_columns(&mut cursor, &ops_info); - let doc_ops: Vec<_> = DocOpIterator::new(bytes, &actors, &ops_data).collect(); - - group_doc_change_and_doc_ops(&mut doc_changes, doc_ops, &actors)?; - - let uncompressed_changes = - doc_changes_to_uncompressed_changes(doc_changes.into_iter(), &actors); - - let changes = compress_doc_changes(uncompressed_changes, doc_changes_deps, doc_changes_len) - .ok_or(decoding::Error::NoDocChanges)?; - - let mut calculated_heads = HashSet::new(); - for change in &changes { - for dep in &change.deps { - calculated_heads.remove(dep); - } - calculated_heads.insert(change.hash); - } - - if calculated_heads != heads.into_iter().collect::>() { - return Err(decoding::Error::MismatchedHeads); - } - - Ok(changes) -} - -fn compress_doc_changes( - uncompressed_changes: impl Iterator, - doc_changes_deps: impl Iterator>, - num_changes: usize, -) -> Option> { - let mut changes: Vec = Vec::with_capacity(num_changes); - - // fill out the hashes as we go - for (deps, mut uncompressed_change) in doc_changes_deps.zip_eq(uncompressed_changes) { - for idx in deps { - uncompressed_change.deps.push(changes.get(idx)?.hash); - } - changes.push(uncompressed_change.into()); - } - - Some(changes) -} - -fn group_doc_change_and_doc_ops( - changes: &mut [DocChange], - mut ops: Vec, - actors: &[ActorId], -) -> Result<(), decoding::Error> { - let mut changes_by_actor: HashMap> = HashMap::new(); - - for (i, change) in changes.iter().enumerate() { - let actor_change_index = changes_by_actor.entry(change.actor).or_default(); - if change.seq != (actor_change_index.len() + 1) as u64 { - return Err(decoding::Error::ChangeDecompressFailed( - "Doc Seq Invalid".into(), - )); - } - if change.actor >= actors.len() { - return Err(decoding::Error::ChangeDecompressFailed( - "Doc Actor Invalid".into(), - )); - } - actor_change_index.push(i); - } - - let mut op_by_id = HashMap::new(); - ops.iter().enumerate().for_each(|(i, op)| { - op_by_id.insert((op.ctr, op.actor), i); - }); - - for i in 0..ops.len() { - let op = ops[i].clone(); // this is safe - avoid borrow checker issues - //let id = (op.ctr, op.actor); - //op_by_id.insert(id, i); - for succ in &op.succ { - if let Some(index) = op_by_id.get(succ) { - ops[*index].pred.push((op.ctr, op.actor)); - } else { - let key = if op.insert { - amp::OpId(op.ctr, actors[op.actor].clone()).into() - } else { - op.key.clone() - }; - let del = DocOp { - actor: succ.1, - ctr: succ.0, - action: OpType::Delete, - obj: op.obj.clone(), - key, - succ: Vec::new(), - pred: vec![(op.ctr, op.actor)], - insert: false, - }; - op_by_id.insert(*succ, ops.len()); - ops.push(del); + fn val(&self) -> Cow<'a, crate::ScalarValue> { + match self.primitive_value() { + Some(v) => Cow::Owned(v), + None => Cow::Owned(ScalarValue::Null), } } } - for op in ops { - // binary search for our change - let actor_change_index = changes_by_actor.entry(op.actor).or_default(); - let mut left = 0; - let mut right = actor_change_index.len(); - while left < right { - let seq = (left + right) / 2; - if changes[actor_change_index[seq]].max_op < op.ctr { - left = seq + 1; - } else { - right = seq; - } + impl<'a> convert::OpId<&'a ActorId> for &'a legacy::OpId { + fn counter(&self) -> u64 { + legacy::OpId::counter(self) } - if left >= actor_change_index.len() { - return Err(decoding::Error::ChangeDecompressFailed( - "Doc MaxOp Invalid".into(), - )); + + fn actor(&self) -> &'a ActorId { + &self.1 } - changes[actor_change_index[left]].ops.push(op); } - - changes - .iter_mut() - .for_each(|change| change.ops.sort_unstable()); - - Ok(()) } -fn doc_changes_to_uncompressed_changes<'a>( - changes: impl Iterator + 'a, - actors: &'a [ActorId], -) -> impl Iterator + 'a { - changes.map(move |change| amp::Change { - // we've already confirmed that all change.actor's are valid - actor_id: actors[change.actor].clone(), - seq: change.seq, - time: change.time, - // SAFETY: this unwrap is safe as we always add 1 - start_op: NonZeroU64::new(change.max_op - change.ops.len() as u64 + 1).unwrap(), - hash: None, - message: change.message, - operations: change - .ops - .into_iter() - .map(|op| amp::Op { - action: op.action.clone(), - insert: op.insert, - key: op.key, - obj: op.obj, - // we've already confirmed that all op.actor's are valid - pred: pred_into(op.pred.into_iter(), actors), - }) - .collect(), - deps: Vec::new(), - extra_bytes: change.extra_bytes, - }) -} - -fn pred_into( - pred: impl Iterator, - actors: &[ActorId], -) -> amp::SortedVec { - pred.map(|(ctr, actor)| amp::OpId(ctr, actors[actor].clone())) - .collect() -} - -#[cfg(test)] -mod tests { - use crate::legacy as amp; - #[test] - fn mismatched_head_repro_one() { - let op_json = serde_json::json!({ - "ops": [ - { - "action": "del", - "obj": "1@1485eebc689d47efbf8b892e81653eb3", - "elemId": "3164@0dcdf83d9594477199f80ccd25e87053", - "pred": [ - "3164@0dcdf83d9594477199f80ccd25e87053" - ], - "insert": false +impl From<&Change> for crate::ExpandedChange { + fn from(c: &Change) -> Self { + let actors = std::iter::once(c.actor_id()) + .chain(c.other_actor_ids().iter()) + .cloned() + .enumerate() + .collect::>(); + let operations = c + .iter_ops() + .map(|o| crate::legacy::Op { + action: crate::types::OpType::from_index_and_value(o.action, o.val).unwrap(), + insert: o.insert, + key: match o.key { + StoredKey::Elem(e) if e.is_head() => { + crate::legacy::Key::Seq(crate::legacy::ElementId::Head) + } + StoredKey::Elem(ElemId(o)) => { + crate::legacy::Key::Seq(crate::legacy::ElementId::Id( + crate::legacy::OpId::new(o.counter(), actors.get(&o.actor()).unwrap()), + )) + } + StoredKey::Prop(p) => crate::legacy::Key::Map(p), }, - ], - "actor": "e63cf5ed1f0a4fb28b2c5bc6793b9272", - "hash": "e7fd5c02c8fdd2cdc3071ce898a5839bf36229678af3b940f347da541d147ae2", - "seq": 1, - "startOp": 3179, - "time": 1634146652, - "message": null, - "deps": [ - "2603cded00f91e525507fc9e030e77f9253b239d90264ee343753efa99e3fec1" - ] - }); - - let change: amp::Change = serde_json::from_value(op_json).unwrap(); - let expected_hash: super::amp::ChangeHash = - "4dff4665d658a28bb6dcace8764eb35fa8e48e0a255e70b6b8cbf8e8456e5c50" - .parse() - .unwrap(); - let encoded: super::Change = change.into(); - assert_eq!(encoded.hash, expected_hash); + obj: if o.obj.is_root() { + crate::legacy::ObjectId::Root + } else { + crate::legacy::ObjectId::Id(crate::legacy::OpId::new( + o.obj.opid().counter(), + actors.get(&o.obj.opid().actor()).unwrap(), + )) + }, + pred: o + .pred + .into_iter() + .map(|p| crate::legacy::OpId::new(p.counter(), actors.get(&p.actor()).unwrap())) + .collect(), + }) + .collect::>(); + crate::ExpandedChange { + operations, + actor_id: actors.get(&0).unwrap().clone(), + hash: Some(c.hash()), + time: c.timestamp(), + deps: c.deps().to_vec(), + seq: c.seq(), + start_op: c.start_op(), + extra_bytes: c.extra_bytes().to_vec(), + message: c.message().cloned(), + } } } diff --git a/automerge/src/change_v2.rs b/automerge/src/change_v2.rs deleted file mode 100644 index 128eaaa8..00000000 --- a/automerge/src/change_v2.rs +++ /dev/null @@ -1,315 +0,0 @@ -use std::{borrow::Cow, num::NonZeroU64}; - -use crate::{ - columnar_2::Key as StoredKey, - storage::{ - change::{Unverified, Verified}, - parse, Change as StoredChange, ChangeOp, Chunk, Compressed, ReadChangeOpError, - }, - types::{ActorId, ChangeHash, ElemId}, -}; - -#[derive(Clone, Debug, PartialEq)] -pub struct Change { - stored: StoredChange<'static, Verified>, - compression: CompressionState, - len: usize, -} - -impl Change { - pub(crate) fn new(stored: StoredChange<'static, Verified>) -> Self { - let len = stored.iter_ops().count(); - Self { - stored, - len, - compression: CompressionState::NotCompressed, - } - } - - pub(crate) fn new_from_unverified( - stored: StoredChange<'static, Unverified>, - compressed: Option>, - ) -> Result { - let mut len = 0; - let stored = stored.verify_ops(|_| len += 1)?; - let compression = if let Some(c) = compressed { - CompressionState::Compressed(c) - } else { - CompressionState::NotCompressed - }; - Ok(Self { - stored, - len, - compression, - }) - } - - pub fn actor_id(&self) -> &ActorId { - self.stored.actor() - } - - pub fn other_actor_ids(&self) -> &[ActorId] { - self.stored.other_actors() - } - - pub fn len(&self) -> usize { - self.len - } - - pub fn is_empty(&self) -> bool { - self.len == 0 - } - - pub fn max_op(&self) -> u64 { - self.stored.start_op().get() + (self.len as u64) - 1 - } - - pub fn start_op(&self) -> NonZeroU64 { - self.stored.start_op() - } - - pub fn message(&self) -> Option<&String> { - self.stored.message().as_ref() - } - - pub fn deps(&self) -> &[ChangeHash] { - self.stored.dependencies() - } - - pub fn hash(&self) -> ChangeHash { - self.stored.hash() - } - - pub fn seq(&self) -> u64 { - self.stored.seq() - } - - pub fn timestamp(&self) -> i64 { - self.stored.timestamp() - } - - pub fn bytes(&mut self) -> Cow<'_, [u8]> { - if let CompressionState::NotCompressed = self.compression { - if let Some(compressed) = self.stored.compress() { - self.compression = CompressionState::Compressed(compressed); - } else { - self.compression = CompressionState::TooSmallToCompress; - } - }; - match &self.compression { - // SAFETY: We just checked this case above - CompressionState::NotCompressed => unreachable!(), - CompressionState::TooSmallToCompress => Cow::Borrowed(self.stored.bytes()), - CompressionState::Compressed(c) => c.bytes(), - } - } - - pub fn raw_bytes(&self) -> &[u8] { - self.stored.bytes() - } - - pub(crate) fn iter_ops(&self) -> impl Iterator + '_ { - self.stored.iter_ops() - } - - pub fn extra_bytes(&self) -> &[u8] { - self.stored.extra_bytes() - } - - // TODO replace all uses of this with TryFrom<&[u8]> - pub fn from_bytes(bytes: Vec) -> Result { - Self::try_from(&bytes[..]) - } - - pub fn decode(&self) -> crate::ExpandedChange { - crate::ExpandedChange::from(self) - } -} - -#[derive(Clone, Debug, PartialEq)] -enum CompressionState { - /// We haven't tried to compress this change - NotCompressed, - /// We have compressed this change - Compressed(Compressed<'static>), - /// We tried to compress this change but it wasn't big enough to be worth it - TooSmallToCompress, -} - -impl AsRef> for Change { - fn as_ref(&self) -> &StoredChange<'static, Verified> { - &self.stored - } -} - -#[derive(thiserror::Error, Debug)] -pub enum LoadError { - #[error("unable to parse change: {0}")] - Parse(Box), - #[error("leftover data after parsing")] - LeftoverData, - #[error("wrong chunk type")] - WrongChunkType, -} - -impl<'a> TryFrom<&'a [u8]> for Change { - type Error = LoadError; - - fn try_from(value: &'a [u8]) -> Result { - let input = parse::Input::new(value); - let (remaining, chunk) = Chunk::parse(input).map_err(|e| LoadError::Parse(Box::new(e)))?; - if !remaining.is_empty() { - return Err(LoadError::LeftoverData); - } - match chunk { - Chunk::Change(c) => Self::new_from_unverified(c.into_owned(), None) - .map_err(|e| LoadError::Parse(Box::new(e))), - Chunk::CompressedChange(c, compressed) => { - Self::new_from_unverified(c.into_owned(), Some(compressed.into_owned())) - .map_err(|e| LoadError::Parse(Box::new(e))) - } - _ => Err(LoadError::WrongChunkType), - } - } -} - -impl<'a> TryFrom> for Change { - type Error = ReadChangeOpError; - - fn try_from(c: StoredChange<'a, Unverified>) -> Result { - Self::new_from_unverified(c.into_owned(), None) - } -} - -impl From for Change { - fn from(e: crate::ExpandedChange) -> Self { - let stored = StoredChange::builder() - .with_actor(e.actor_id) - .with_extra_bytes(e.extra_bytes) - .with_seq(e.seq) - .with_dependencies(e.deps) - .with_timestamp(e.time) - .with_start_op(e.start_op) - .with_message(e.message) - .build(e.operations.iter()); - match stored { - Ok(c) => Change::new(c), - Err(crate::storage::change::PredOutOfOrder) => { - // Should never happen because we use `SortedVec` in legacy::Op::pred - panic!("preds out of order"); - } - } - } -} - -mod convert_expanded { - use std::borrow::Cow; - - use crate::{convert, legacy, storage::AsChangeOp, types::ActorId, ScalarValue}; - - impl<'a> AsChangeOp<'a> for &'a legacy::Op { - type ActorId = &'a ActorId; - type OpId = &'a legacy::OpId; - type PredIter = std::slice::Iter<'a, legacy::OpId>; - - fn action(&self) -> u64 { - self.action.action_index() - } - - fn insert(&self) -> bool { - self.insert - } - - fn pred(&self) -> Self::PredIter { - self.pred.iter() - } - - fn key(&self) -> convert::Key<'a, Self::OpId> { - match &self.key { - legacy::Key::Map(s) => convert::Key::Prop(Cow::Borrowed(s)), - legacy::Key::Seq(legacy::ElementId::Head) => { - convert::Key::Elem(convert::ElemId::Head) - } - legacy::Key::Seq(legacy::ElementId::Id(o)) => { - convert::Key::Elem(convert::ElemId::Op(o)) - } - } - } - - fn obj(&self) -> convert::ObjId { - match &self.obj { - legacy::ObjectId::Root => convert::ObjId::Root, - legacy::ObjectId::Id(o) => convert::ObjId::Op(o), - } - } - - fn val(&self) -> Cow<'a, crate::ScalarValue> { - match self.primitive_value() { - Some(v) => Cow::Owned(v), - None => Cow::Owned(ScalarValue::Null), - } - } - } - - impl<'a> convert::OpId<&'a ActorId> for &'a legacy::OpId { - fn counter(&self) -> u64 { - legacy::OpId::counter(self) - } - - fn actor(&self) -> &'a ActorId { - &self.1 - } - } -} - -impl From<&Change> for crate::ExpandedChange { - fn from(c: &Change) -> Self { - let actors = std::iter::once(c.actor_id()) - .chain(c.other_actor_ids().iter()) - .cloned() - .enumerate() - .collect::>(); - let operations = c - .iter_ops() - .map(|o| crate::legacy::Op { - action: crate::types::OpType::from_index_and_value(o.action, o.val).unwrap(), - insert: o.insert, - key: match o.key { - StoredKey::Elem(e) if e.is_head() => { - crate::legacy::Key::Seq(crate::legacy::ElementId::Head) - } - StoredKey::Elem(ElemId(o)) => { - crate::legacy::Key::Seq(crate::legacy::ElementId::Id( - crate::legacy::OpId::new(o.counter(), actors.get(&o.actor()).unwrap()), - )) - } - StoredKey::Prop(p) => crate::legacy::Key::Map(p), - }, - obj: if o.obj.is_root() { - crate::legacy::ObjectId::Root - } else { - crate::legacy::ObjectId::Id(crate::legacy::OpId::new( - o.obj.opid().counter(), - actors.get(&o.obj.opid().actor()).unwrap(), - )) - }, - pred: o - .pred - .into_iter() - .map(|p| crate::legacy::OpId::new(p.counter(), actors.get(&p.actor()).unwrap())) - .collect(), - }) - .collect::>(); - crate::ExpandedChange { - operations, - actor_id: actors.get(&0).unwrap().clone(), - hash: Some(c.hash()), - time: c.timestamp(), - deps: c.deps().to_vec(), - seq: c.seq(), - start_op: c.start_op(), - extra_bytes: c.extra_bytes().to_vec(), - message: c.message().cloned(), - } - } -} diff --git a/automerge/src/columnar.rs b/automerge/src/columnar.rs index ff260e4d..bb727626 100644 --- a/automerge/src/columnar.rs +++ b/automerge/src/columnar.rs @@ -1,1368 +1,14 @@ -#![allow(dead_code)] -#![allow(unused_variables)] -use core::fmt::Debug; -use std::{ - borrow::Cow, - cmp::Ordering, - collections::HashMap, - io, - io::{Read, Write}, - ops::Range, - str, -}; - -use crate::{ - types::{ActorId, ElemId, Key, ObjId, ObjType, Op, OpId, OpType, ScalarValue}, - Change, -}; - -use crate::legacy as amp; -use amp::SortedVec; -use flate2::bufread::DeflateDecoder; -use smol_str::SmolStr; -use tracing::instrument; - -use crate::indexed_cache::IndexedCache; -use crate::{ - decoding::{BooleanDecoder, Decodable, Decoder, DeltaDecoder, RleDecoder}, - encoding::{BooleanEncoder, ColData, DeltaEncoder, Encodable, RleEncoder}, -}; - -impl Encodable for Action { - fn encode(&self, buf: &mut R) -> io::Result { - (*self as u32).encode(buf) - } -} - -impl Encodable for [ActorId] { - fn encode(&self, buf: &mut R) -> io::Result { - let mut len = self.len().encode(buf)?; - for i in self { - len += i.to_bytes().encode(buf)?; - } - Ok(len) - } -} - -fn actor_index(actor: &ActorId, actors: &[ActorId]) -> usize { - actors.iter().position(|a| a == actor).unwrap() -} - -impl Encodable for ActorId { - fn encode_with_actors(&self, buf: &mut R, actors: &[ActorId]) -> io::Result { - actor_index(self, actors).encode(buf) - } - - fn encode(&self, _buf: &mut R) -> io::Result { - // we instead encode actors as their position on a sequence - Ok(0) - } -} - -impl Encodable for Vec { - fn encode(&self, buf: &mut R) -> io::Result { - self.as_slice().encode(buf) - } -} - -impl Encodable for &[u8] { - fn encode(&self, buf: &mut R) -> io::Result { - let head = self.len().encode(buf)?; - buf.write_all(self)?; - Ok(head + self.len()) - } -} - -pub(crate) struct OperationIterator<'a> { - pub(crate) action: RleDecoder<'a, Action>, - pub(crate) objs: ObjIterator<'a>, - pub(crate) keys: KeyIterator<'a>, - pub(crate) insert: BooleanDecoder<'a>, - pub(crate) value: ValueIterator<'a>, - pub(crate) pred: PredIterator<'a>, -} - -impl<'a> OperationIterator<'a> { - pub(crate) fn new( - bytes: &'a [u8], - actors: &'a [ActorId], - ops: &'a HashMap>, - ) -> OperationIterator<'a> { - OperationIterator { - objs: ObjIterator { - actors, - actor: col_iter(bytes, ops, COL_OBJ_ACTOR), - ctr: col_iter(bytes, ops, COL_OBJ_CTR), - }, - keys: KeyIterator { - actors, - actor: col_iter(bytes, ops, COL_KEY_ACTOR), - ctr: col_iter(bytes, ops, COL_KEY_CTR), - str: col_iter(bytes, ops, COL_KEY_STR), - }, - value: ValueIterator { - val_len: col_iter(bytes, ops, COL_VAL_LEN), - val_raw: col_iter(bytes, ops, COL_VAL_RAW), - actors, - actor: col_iter(bytes, ops, COL_REF_ACTOR), - ctr: col_iter(bytes, ops, COL_REF_CTR), - }, - pred: PredIterator { - actors, - pred_num: col_iter(bytes, ops, COL_PRED_NUM), - pred_actor: col_iter(bytes, ops, COL_PRED_ACTOR), - pred_ctr: col_iter(bytes, ops, COL_PRED_CTR), - }, - insert: col_iter(bytes, ops, COL_INSERT), - action: col_iter(bytes, ops, COL_ACTION), - } - } -} - -impl<'a> Iterator for OperationIterator<'a> { - type Item = amp::Op; - - fn next(&mut self) -> Option { - let action = self.action.next()??; - let insert = self.insert.next()?; - let obj = self.objs.next()?; - let key = self.keys.next()?; - let pred = self.pred.next()?; - let value = self.value.next()?; - let action = match action { - Action::Set => OpType::Put(value), - Action::MakeList => OpType::Make(ObjType::List), - Action::MakeText => OpType::Make(ObjType::Text), - Action::MakeMap => OpType::Make(ObjType::Map), - Action::MakeTable => OpType::Make(ObjType::Table), - Action::Del => OpType::Delete, - Action::Inc => OpType::Increment(value.to_i64()?), - }; - Some(amp::Op { - action, - obj, - key, - pred, - insert, - }) - } -} - -pub(crate) struct DocOpIterator<'a> { - pub(crate) actor: RleDecoder<'a, usize>, - pub(crate) ctr: DeltaDecoder<'a>, - pub(crate) action: RleDecoder<'a, Action>, - pub(crate) objs: ObjIterator<'a>, - pub(crate) keys: KeyIterator<'a>, - pub(crate) insert: BooleanDecoder<'a>, - pub(crate) value: ValueIterator<'a>, - pub(crate) succ: SuccIterator<'a>, -} - -impl<'a> Iterator for DocOpIterator<'a> { - type Item = DocOp; - fn next(&mut self) -> Option { - let action = self.action.next()??; - let actor = self.actor.next()??; - let ctr = self.ctr.next()??; - let insert = self.insert.next()?; - let obj = self.objs.next()?; - let key = self.keys.next()?; - let succ = self.succ.next()?; - let value = self.value.next()?; - let action = match action { - Action::Set => OpType::Put(value), - Action::MakeList => OpType::Make(ObjType::List), - Action::MakeText => OpType::Make(ObjType::Text), - Action::MakeMap => OpType::Make(ObjType::Map), - Action::MakeTable => OpType::Make(ObjType::Table), - Action::Del => OpType::Delete, - Action::Inc => OpType::Increment(value.to_i64()?), - }; - Some(DocOp { - actor, - ctr, - action, - obj, - key, - succ, - pred: Vec::new(), - insert, - }) - } -} - -impl<'a> DocOpIterator<'a> { - pub(crate) fn new( - bytes: &'a [u8], - actors: &'a [ActorId], - ops: &'a HashMap>, - ) -> DocOpIterator<'a> { - DocOpIterator { - actor: col_iter(bytes, ops, COL_ID_ACTOR), - ctr: col_iter(bytes, ops, COL_ID_CTR), - objs: ObjIterator { - actors, - actor: col_iter(bytes, ops, COL_OBJ_ACTOR), - ctr: col_iter(bytes, ops, COL_OBJ_CTR), - }, - keys: KeyIterator { - actors, - actor: col_iter(bytes, ops, COL_KEY_ACTOR), - ctr: col_iter(bytes, ops, COL_KEY_CTR), - str: col_iter(bytes, ops, COL_KEY_STR), - }, - value: ValueIterator { - val_len: col_iter(bytes, ops, COL_VAL_LEN), - val_raw: col_iter(bytes, ops, COL_VAL_RAW), - actors, - actor: col_iter(bytes, ops, COL_REF_ACTOR), - ctr: col_iter(bytes, ops, COL_REF_CTR), - }, - succ: SuccIterator { - succ_num: col_iter(bytes, ops, COL_SUCC_NUM), - succ_actor: col_iter(bytes, ops, COL_SUCC_ACTOR), - succ_ctr: col_iter(bytes, ops, COL_SUCC_CTR), - }, - insert: col_iter(bytes, ops, COL_INSERT), - action: col_iter(bytes, ops, COL_ACTION), - } - } -} - -pub(crate) struct ChangeIterator<'a> { - pub(crate) actor: RleDecoder<'a, usize>, - pub(crate) seq: DeltaDecoder<'a>, - pub(crate) max_op: DeltaDecoder<'a>, - pub(crate) time: DeltaDecoder<'a>, - pub(crate) message: RleDecoder<'a, String>, - pub(crate) extra: ExtraIterator<'a>, -} - -impl<'a> ChangeIterator<'a> { - pub(crate) fn new(bytes: &'a [u8], ops: &'a HashMap>) -> ChangeIterator<'a> { - ChangeIterator { - actor: col_iter(bytes, ops, DOC_ACTOR), - seq: col_iter(bytes, ops, DOC_SEQ), - max_op: col_iter(bytes, ops, DOC_MAX_OP), - time: col_iter(bytes, ops, DOC_TIME), - message: col_iter(bytes, ops, DOC_MESSAGE), - extra: ExtraIterator { - len: col_iter(bytes, ops, DOC_EXTRA_LEN), - extra: col_iter(bytes, ops, DOC_EXTRA_RAW), - }, - } - } -} - -impl<'a> Iterator for ChangeIterator<'a> { - type Item = DocChange; - fn next(&mut self) -> Option { - let actor = self.actor.next()??; - let seq = self.seq.next()??; - let max_op = self.max_op.next()??; - let time = self.time.next()?? as i64; - let message = self.message.next()?; - let extra_bytes = self.extra.next().unwrap_or_default(); - Some(DocChange { - actor, - seq, - max_op, - time, - message, - extra_bytes, - ops: Vec::new(), - }) - } -} - -pub(crate) struct ObjIterator<'a> { - //actors: &'a Vec<&'a [u8]>, - pub(crate) actors: &'a [ActorId], - pub(crate) actor: RleDecoder<'a, usize>, - pub(crate) ctr: RleDecoder<'a, u64>, -} - -pub(crate) struct DepsIterator<'a> { - pub(crate) num: RleDecoder<'a, usize>, - pub(crate) dep: DeltaDecoder<'a>, -} - -impl<'a> DepsIterator<'a> { - pub(crate) fn new(bytes: &'a [u8], ops: &'a HashMap>) -> Self { - Self { - num: col_iter(bytes, ops, DOC_DEPS_NUM), - dep: col_iter(bytes, ops, DOC_DEPS_INDEX), - } - } -} - -pub(crate) struct ExtraIterator<'a> { - pub(crate) len: RleDecoder<'a, usize>, - pub(crate) extra: Decoder<'a>, -} - -pub(crate) struct PredIterator<'a> { - pub(crate) actors: &'a [ActorId], - pub(crate) pred_num: RleDecoder<'a, usize>, - pub(crate) pred_actor: RleDecoder<'a, usize>, - pub(crate) pred_ctr: DeltaDecoder<'a>, -} - -pub(crate) struct SuccIterator<'a> { - pub(crate) succ_num: RleDecoder<'a, usize>, - pub(crate) succ_actor: RleDecoder<'a, usize>, - pub(crate) succ_ctr: DeltaDecoder<'a>, -} - -pub(crate) struct KeyIterator<'a> { - pub(crate) actors: &'a [ActorId], - pub(crate) actor: RleDecoder<'a, usize>, - pub(crate) ctr: DeltaDecoder<'a>, - pub(crate) str: RleDecoder<'a, SmolStr>, -} - -pub(crate) struct ValueIterator<'a> { - pub(crate) actors: &'a [ActorId], - pub(crate) val_len: RleDecoder<'a, usize>, - pub(crate) val_raw: Decoder<'a>, - pub(crate) actor: RleDecoder<'a, usize>, - pub(crate) ctr: RleDecoder<'a, u64>, -} - -impl<'a> Iterator for DepsIterator<'a> { - type Item = Vec; - fn next(&mut self) -> Option> { - let num = self.num.next()??; - // I bet there's something simple like `self.dep.take(num).collect()` - let mut p = Vec::with_capacity(num); - for _ in 0..num { - let dep = self.dep.next()??; - p.push(dep as usize); - } - Some(p) - } -} - -impl<'a> Iterator for ExtraIterator<'a> { - type Item = Vec; - fn next(&mut self) -> Option> { - let v = self.len.next()??; - // if v % 16 == VALUE_TYPE_BYTES => { // this should be bytes - let len = v >> 4; - self.extra.read_bytes(len).ok().map(|s| s.to_vec()) - } -} - -impl<'a> Iterator for PredIterator<'a> { - type Item = SortedVec; - fn next(&mut self) -> Option> { - let num = self.pred_num.next()??; - let mut p = Vec::with_capacity(num); - for _ in 0..num { - let actor = self.pred_actor.next()??; - let ctr = self.pred_ctr.next()??; - let actor_id = self.actors.get(actor)?.clone(); - let op_id = amp::OpId::new(ctr, &actor_id); - p.push(op_id); - } - Some(SortedVec::from(p)) - } -} - -impl<'a> Iterator for SuccIterator<'a> { - type Item = Vec<(u64, usize)>; - fn next(&mut self) -> Option> { - let num = self.succ_num.next()??; - let mut p = Vec::with_capacity(num); - for _ in 0..num { - let actor = self.succ_actor.next()??; - let ctr = self.succ_ctr.next()??; - p.push((ctr, actor)); - } - Some(p) - } -} - -impl<'a> Iterator for ValueIterator<'a> { - type Item = ScalarValue; - fn next(&mut self) -> Option { - let val_type = self.val_len.next()??; - let actor = self.actor.next()?; - let ctr = self.ctr.next()?; - match val_type { - VALUE_TYPE_NULL => Some(ScalarValue::Null), - VALUE_TYPE_FALSE => Some(ScalarValue::Boolean(false)), - VALUE_TYPE_TRUE => Some(ScalarValue::Boolean(true)), - v if v % 16 == VALUE_TYPE_COUNTER => { - let len = v >> 4; - let val = self.val_raw.read().ok()?; - if len != self.val_raw.last_read { - return None; - } - Some(ScalarValue::counter(val)) - } - v if v % 16 == VALUE_TYPE_TIMESTAMP => { - let len = v >> 4; - let val = self.val_raw.read().ok()?; - if len != self.val_raw.last_read { - return None; - } - Some(ScalarValue::Timestamp(val)) - } - v if v % 16 == VALUE_TYPE_LEB128_UINT => { - let len = v >> 4; - let val = self.val_raw.read().ok()?; - if len != self.val_raw.last_read { - return None; - } - Some(ScalarValue::Uint(val)) - } - v if v % 16 == VALUE_TYPE_LEB128_INT => { - let len = v >> 4; - let val = self.val_raw.read().ok()?; - if len != self.val_raw.last_read { - return None; - } - Some(ScalarValue::Int(val)) - } - v if v % 16 == VALUE_TYPE_UTF8 => { - let len = v >> 4; - let data = self.val_raw.read_bytes(len).ok()?; - let s = str::from_utf8(data).ok()?; - Some(ScalarValue::Str(SmolStr::new(s))) - } - v if v % 16 == VALUE_TYPE_BYTES => { - let len = v >> 4; - let data = self.val_raw.read_bytes(len).ok()?; - Some(ScalarValue::Bytes(data.to_vec())) - } - v if v % 16 >= VALUE_TYPE_MIN_UNKNOWN && v % 16 <= VALUE_TYPE_MAX_UNKNOWN => { - let len = v >> 4; - let _data = self.val_raw.read_bytes(len).ok()?; - unimplemented!() - //Some((amp::Value::Bytes(data)) - } - v if v % 16 == VALUE_TYPE_IEEE754 => { - let len = v >> 4; - if len == 8 { - // confirm only 8 bytes read - let num = self.val_raw.read().ok()?; - Some(ScalarValue::F64(num)) - } else { - // bad size of float - None - } - } - _ => { - // unknown command - None - } - } - } -} - -impl<'a> Iterator for KeyIterator<'a> { - type Item = amp::Key; - fn next(&mut self) -> Option { - match (self.actor.next()?, self.ctr.next()?, self.str.next()?) { - (None, None, Some(string)) => Some(amp::Key::Map(string)), - (None, Some(0), None) => Some(amp::Key::head()), - (Some(actor), Some(ctr), None) => { - let actor_id = self.actors.get(actor)?; - Some(amp::OpId::new(ctr, actor_id).into()) - } - _ => None, - } - } -} - -impl<'a> Iterator for ObjIterator<'a> { - type Item = amp::ObjectId; - fn next(&mut self) -> Option { - if let (Some(actor), Some(ctr)) = (self.actor.next()?, self.ctr.next()?) { - let actor_id = self.actors.get(actor)?; - Some(amp::ObjectId::Id(amp::OpId::new(ctr, actor_id))) - } else { - Some(amp::ObjectId::Root) - } - } -} - -#[derive(PartialEq, Debug, Clone)] -pub(crate) struct DocChange { - pub(crate) actor: usize, - pub(crate) seq: u64, - pub(crate) max_op: u64, - pub(crate) time: i64, - pub(crate) message: Option, - pub(crate) extra_bytes: Vec, - pub(crate) ops: Vec, -} - -#[derive(Debug, Clone)] -pub(crate) struct DocOp { - pub(crate) actor: usize, - pub(crate) ctr: u64, - pub(crate) action: OpType, - pub(crate) obj: amp::ObjectId, - pub(crate) key: amp::Key, - pub(crate) succ: Vec<(u64, usize)>, - pub(crate) pred: Vec<(u64, usize)>, - pub(crate) insert: bool, -} - -impl Ord for DocOp { - fn cmp(&self, other: &Self) -> Ordering { - self.ctr.cmp(&other.ctr) - } -} - -impl PartialOrd for DocOp { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -impl PartialEq for DocOp { - fn eq(&self, other: &Self) -> bool { - self.ctr == other.ctr - } -} - -impl Eq for DocOp {} - -struct ValEncoder { - len: RleEncoder, - ref_actor: RleEncoder, - ref_counter: RleEncoder, - raw: Vec, -} - -impl ValEncoder { - const COLUMNS: usize = 4; - - fn new() -> ValEncoder { - ValEncoder { - len: RleEncoder::new(), - raw: Vec::new(), - ref_actor: RleEncoder::new(), - ref_counter: RleEncoder::new(), - } - } - - fn append_value(&mut self, val: &ScalarValue, actors: &[usize]) { - // It may seem weird to have two consecutive matches on the same value. The reason is so - // that we don't have to repeat the `append_null` calls on ref_actor and ref_counter in - // every arm of the next match - self.ref_actor.append_null(); - self.ref_counter.append_null(); - match val { - ScalarValue::Null => self.len.append_value(VALUE_TYPE_NULL), - ScalarValue::Boolean(true) => self.len.append_value(VALUE_TYPE_TRUE), - ScalarValue::Boolean(false) => self.len.append_value(VALUE_TYPE_FALSE), - ScalarValue::Bytes(bytes) => { - let len = bytes.len(); - self.raw.extend(bytes); - self.len.append_value(len << 4 | VALUE_TYPE_BYTES); - } - ScalarValue::Str(s) => { - let bytes = s.as_bytes(); - let len = bytes.len(); - self.raw.extend(bytes); - self.len.append_value(len << 4 | VALUE_TYPE_UTF8); - } - ScalarValue::Counter(count) => { - let len = count.start.encode(&mut self.raw).unwrap(); - self.len.append_value(len << 4 | VALUE_TYPE_COUNTER); - } - ScalarValue::Timestamp(time) => { - let len = time.encode(&mut self.raw).unwrap(); - self.len.append_value(len << 4 | VALUE_TYPE_TIMESTAMP); - } - ScalarValue::Int(n) => { - let len = n.encode(&mut self.raw).unwrap(); - self.len.append_value(len << 4 | VALUE_TYPE_LEB128_INT); - } - ScalarValue::Uint(n) => { - let len = n.encode(&mut self.raw).unwrap(); - self.len.append_value(len << 4 | VALUE_TYPE_LEB128_UINT); - } - ScalarValue::F64(n) => { - let len = (*n).encode(&mut self.raw).unwrap(); - self.len.append_value(len << 4 | VALUE_TYPE_IEEE754); - } - ScalarValue::Unknown { type_code, bytes } => { - panic!("unknown value") - } - } - } - - fn append_value2(&mut self, val: &ScalarValue, actors: &[ActorId]) { - // It may seem weird to have two consecutive matches on the same value. The reason is so - // that we don't have to repeat the `append_null` calls on ref_actor and ref_counter in - // every arm of the next match - self.ref_actor.append_null(); - self.ref_counter.append_null(); - match val { - ScalarValue::Null => self.len.append_value(VALUE_TYPE_NULL), - ScalarValue::Boolean(true) => self.len.append_value(VALUE_TYPE_TRUE), - ScalarValue::Boolean(false) => self.len.append_value(VALUE_TYPE_FALSE), - ScalarValue::Bytes(bytes) => { - let len = bytes.len(); - self.raw.extend(bytes); - self.len.append_value(len << 4 | VALUE_TYPE_BYTES); - } - ScalarValue::Str(s) => { - let bytes = s.as_bytes(); - let len = bytes.len(); - self.raw.extend(bytes); - self.len.append_value(len << 4 | VALUE_TYPE_UTF8); - } - ScalarValue::Counter(c) => { - let len = c.start.encode(&mut self.raw).unwrap(); - self.len.append_value(len << 4 | VALUE_TYPE_COUNTER); - } - ScalarValue::Timestamp(time) => { - let len = time.encode(&mut self.raw).unwrap(); - self.len.append_value(len << 4 | VALUE_TYPE_TIMESTAMP); - } - ScalarValue::Int(n) => { - let len = n.encode(&mut self.raw).unwrap(); - self.len.append_value(len << 4 | VALUE_TYPE_LEB128_INT); - } - ScalarValue::Uint(n) => { - let len = n.encode(&mut self.raw).unwrap(); - self.len.append_value(len << 4 | VALUE_TYPE_LEB128_UINT); - } - ScalarValue::F64(n) => { - let len = (*n).encode(&mut self.raw).unwrap(); - self.len.append_value(len << 4 | VALUE_TYPE_IEEE754); - } - ScalarValue::Unknown { type_code, bytes } => { - panic!("unknown value") - } - } - } - - fn append_null(&mut self) { - self.ref_counter.append_null(); - self.ref_actor.append_null(); - self.len.append_value(VALUE_TYPE_NULL); - } - - fn finish(self) -> Vec { - vec![ - self.ref_counter.finish(COL_REF_CTR), - self.ref_actor.finish(COL_REF_ACTOR), - self.len.finish(COL_VAL_LEN), - ColData::new(COL_VAL_RAW, self.raw), - ] - } -} - -struct KeyEncoder { - actor: RleEncoder, - ctr: DeltaEncoder, - str: RleEncoder, -} - -impl KeyEncoder { - const COLUMNS: usize = 3; - - fn new() -> KeyEncoder { - KeyEncoder { - actor: RleEncoder::new(), - ctr: DeltaEncoder::new(), - str: RleEncoder::new(), - } - } - - fn append(&mut self, key: Key, actors: &[usize], props: &[String]) { - match key { - Key::Map(i) => { - self.actor.append_null(); - self.ctr.append_null(); - self.str.append_value(props[i].clone()); - } - Key::Seq(ElemId(OpId(0, 0))) => { - // HEAD - self.actor.append_null(); - self.ctr.append_value(0); - self.str.append_null(); - } - Key::Seq(ElemId(OpId(ctr, actor))) => { - self.actor.append_value(actors[actor]); - self.ctr.append_value(ctr); - self.str.append_null(); - } - } - } - - fn finish(self) -> Vec { - vec![ - self.actor.finish(COL_KEY_ACTOR), - self.ctr.finish(COL_KEY_CTR), - self.str.finish(COL_KEY_STR), - ] - } -} - -struct KeyEncoderOld { - actor: RleEncoder, - ctr: DeltaEncoder, - str: RleEncoder, -} - -impl KeyEncoderOld { - const COLUMNS: usize = 3; - - fn new() -> KeyEncoderOld { - KeyEncoderOld { - actor: RleEncoder::new(), - ctr: DeltaEncoder::new(), - str: RleEncoder::new(), - } - } - - fn append(&mut self, key: amp::Key, actors: &[ActorId]) { - match key { - amp::Key::Map(s) => { - self.actor.append_null(); - self.ctr.append_null(); - self.str.append_value(s); - } - amp::Key::Seq(amp::ElementId::Head) => { - self.actor.append_null(); - self.ctr.append_value(0); - self.str.append_null(); - } - amp::Key::Seq(amp::ElementId::Id(amp::OpId(ctr, actor))) => { - self.actor.append_value(actor_index(&actor, actors)); - self.ctr.append_value(ctr); - self.str.append_null(); - } - } - } - - fn finish(self) -> Vec { - vec![ - self.actor.finish(COL_KEY_ACTOR), - self.ctr.finish(COL_KEY_CTR), - self.str.finish(COL_KEY_STR), - ] - } -} - -struct SuccEncoder { - num: RleEncoder, - actor: RleEncoder, - ctr: DeltaEncoder, -} - -fn succ_ord(left: &OpId, right: &OpId, actors: &[usize]) -> Ordering { - match (left, right) { - (OpId(0, _), OpId(0, _)) => Ordering::Equal, - (OpId(0, _), OpId(_, _)) => Ordering::Less, - (OpId(_, _), OpId(0, _)) => Ordering::Greater, - (OpId(a, x), OpId(b, y)) if a == b => actors[*x].cmp(&actors[*y]), - (OpId(a, _), OpId(b, _)) => a.cmp(b), - } -} - -impl SuccEncoder { - fn new() -> SuccEncoder { - SuccEncoder { - num: RleEncoder::new(), - actor: RleEncoder::new(), - ctr: DeltaEncoder::new(), - } - } - - fn append< - 'a, - I: IntoIterator, - II: ExactSizeIterator + Iterator, - >( - &mut self, - succ: I, - actors: &[usize], - ) { - let iter = succ.into_iter(); - self.num.append_value(iter.len()); - for s in iter { - self.ctr.append_value(s.0); - self.actor.append_value(actors[s.1]); - } - } - - fn append_old(&mut self, succ: &[(u64, usize)]) { - self.num.append_value(succ.len()); - for s in succ.iter() { - self.ctr.append_value(s.0); - self.actor.append_value(s.1); - } - } - - fn finish(self) -> Vec { - vec![ - self.num.finish(COL_SUCC_NUM), - self.actor.finish(COL_SUCC_ACTOR), - self.ctr.finish(COL_SUCC_CTR), - ] - } -} - -struct PredEncoder { - num: RleEncoder, - actor: RleEncoder, - ctr: DeltaEncoder, -} - -impl PredEncoder { - const COLUMNS: usize = 3; - - fn new() -> PredEncoder { - PredEncoder { - num: RleEncoder::new(), - actor: RleEncoder::new(), - ctr: DeltaEncoder::new(), - } - } - - fn append(&mut self, pred: &SortedVec, actors: &[ActorId]) { - self.num.append_value(pred.len()); - for p in pred.iter() { - self.ctr.append_value(p.0); - self.actor.append_value(actor_index(&p.1, actors)); - } - } - - fn finish(self) -> Vec { - vec![ - self.num.finish(COL_PRED_NUM), - self.actor.finish(COL_PRED_ACTOR), - self.ctr.finish(COL_PRED_CTR), - ] - } -} - -struct ObjEncoder { - actor: RleEncoder, - ctr: RleEncoder, -} - -impl ObjEncoder { - const COLUMNS: usize = 2; - - fn new() -> ObjEncoder { - ObjEncoder { - actor: RleEncoder::new(), - ctr: RleEncoder::new(), - } - } - - fn append(&mut self, obj: &ObjId, actors: &[usize]) { - match obj.0 { - OpId(ctr, _) if ctr == 0 => { - self.actor.append_null(); - self.ctr.append_null(); - } - OpId(ctr, actor) => { - self.actor.append_value(actors[actor]); - self.ctr.append_value(ctr); - } - } - } - - fn finish(self) -> Vec { - vec![ - self.actor.finish(COL_OBJ_ACTOR), - self.ctr.finish(COL_OBJ_CTR), - ] - } -} - -struct ObjEncoderOld { - actor: RleEncoder, - ctr: RleEncoder, -} - -impl ObjEncoderOld { - const COLUMNS: usize = 2; - - fn new() -> ObjEncoderOld { - ObjEncoderOld { - actor: RleEncoder::new(), - ctr: RleEncoder::new(), - } - } - - fn append(&mut self, obj: &::ObjectId, actors: &[ActorId]) { - match obj { - amp::ObjectId::Root => { - self.actor.append_null(); - self.ctr.append_null(); - } - amp::ObjectId::Id(amp::OpId(ctr, actor)) => { - self.actor.append_value(actor_index(actor, actors)); - self.ctr.append_value(*ctr); - } - } - } - - fn finish(self) -> Vec { - vec![ - self.actor.finish(COL_OBJ_ACTOR), - self.ctr.finish(COL_OBJ_CTR), - ] - } -} - -pub(crate) struct ChangeEncoder { - actor: RleEncoder, - seq: DeltaEncoder, - max_op: DeltaEncoder, - time: DeltaEncoder, - message: RleEncoder>, - deps_num: RleEncoder, - deps_index: DeltaEncoder, - extra_len: RleEncoder, - extra_raw: Vec, -} - -impl ChangeEncoder { - #[instrument(level = "debug", skip(changes, actors))] - pub fn encode_changes<'a, 'b, I>( - changes: I, - actors: &'a IndexedCache, - ) -> (Vec, Vec) - where - I: IntoIterator, - { - let mut e = Self::new(); - e.encode(changes, actors); - e.finish() - } - - fn new() -> ChangeEncoder { - ChangeEncoder { - actor: RleEncoder::new(), - seq: DeltaEncoder::new(), - max_op: DeltaEncoder::new(), - time: DeltaEncoder::new(), - message: RleEncoder::new(), - deps_num: RleEncoder::new(), - deps_index: DeltaEncoder::new(), - extra_len: RleEncoder::new(), - extra_raw: Vec::new(), - } - } - - fn encode<'a, I>(&mut self, changes: I, actors: &IndexedCache) - where - I: IntoIterator, - { - let mut index_by_hash: HashMap = HashMap::new(); - for (index, change) in changes.into_iter().enumerate() { - index_by_hash.insert(change.hash, index); - self.actor - .append_value(actors.lookup(change.actor_id()).unwrap()); //actors.iter().position(|a| a == &change.actor_id).unwrap()); - self.seq.append_value(change.seq); - // FIXME iterops.count is crazy slow - self.max_op - .append_value(change.start_op.get() + change.iter_ops().count() as u64 - 1); - self.time.append_value(change.time as u64); - self.message.append_value(change.message()); - self.deps_num.append_value(change.deps.len()); - for dep in &change.deps { - if let Some(dep_index) = index_by_hash.get(dep) { - self.deps_index.append_value(*dep_index as u64); - } else { - // FIXME This relies on the changes being in causal order, which they may not - // be, we could probably do something cleverer like accumulate the values to - // write and the dependency tree in an intermediate value, then write it to the - // encoder in a second pass over the intermediates - panic!("Missing dependency for hash: {:?}", dep); - } - } - self.extra_len - .append_value(change.extra_bytes().len() << 4 | VALUE_TYPE_BYTES); - self.extra_raw.extend(change.extra_bytes()); - } - } - - fn finish(self) -> (Vec, Vec) { - let mut coldata = vec![ - self.actor.finish(DOC_ACTOR), - self.seq.finish(DOC_SEQ), - self.max_op.finish(DOC_MAX_OP), - self.time.finish(DOC_TIME), - self.message.finish(DOC_MESSAGE), - self.deps_num.finish(DOC_DEPS_NUM), - self.deps_index.finish(DOC_DEPS_INDEX), - self.extra_len.finish(DOC_EXTRA_LEN), - ColData::new(DOC_EXTRA_RAW, self.extra_raw), - ]; - coldata.sort_unstable_by(|a, b| a.col.cmp(&b.col)); - - let mut data = Vec::new(); - let mut info = Vec::new(); - coldata - .iter() - .filter(|&d| !d.data.is_empty()) - .count() - .encode(&mut info) - .ok(); - for d in &mut coldata { - d.deflate(); - d.encode_col_len(&mut info).ok(); - } - for d in &coldata { - data.write_all(d.data.as_slice()).ok(); - } - (data, info) - } -} - -pub(crate) struct DocOpEncoder { - actor: RleEncoder, - ctr: DeltaEncoder, - obj: ObjEncoder, - key: KeyEncoder, - insert: BooleanEncoder, - action: RleEncoder, - val: ValEncoder, - succ: SuccEncoder, -} - -impl DocOpEncoder { - #[instrument(level = "debug", skip(ops, actors))] - pub(crate) fn encode_doc_ops<'a, 'b, 'c, I>( - ops: I, - actors: &'a [usize], - props: &'b [String], - ) -> (Vec, Vec) - where - I: IntoIterator, - { - let mut e = Self::new(); - e.encode(ops, actors, props); - e.finish() - } - - fn new() -> DocOpEncoder { - DocOpEncoder { - actor: RleEncoder::new(), - ctr: DeltaEncoder::new(), - obj: ObjEncoder::new(), - key: KeyEncoder::new(), - insert: BooleanEncoder::new(), - action: RleEncoder::new(), - val: ValEncoder::new(), - succ: SuccEncoder::new(), - } - } - - fn encode<'a, I>(&mut self, ops: I, actors: &[usize], props: &[String]) - where - I: IntoIterator, - { - for (obj, op) in ops { - self.actor.append_value(actors[op.id.actor()]); - self.ctr.append_value(op.id.counter()); - self.obj.append(obj, actors); - self.key.append(op.key, actors, props); - self.insert.append(op.insert); - self.succ.append(&op.succ, actors); - let action = match &op.action { - amp::OpType::Put(value) => { - self.val.append_value(value, actors); - Action::Set - } - amp::OpType::Increment(val) => { - self.val.append_value(&ScalarValue::Int(*val), actors); - Action::Inc - } - amp::OpType::Delete => { - self.val.append_null(); - Action::Del - } - amp::OpType::Make(kind) => { - self.val.append_null(); - match kind { - ObjType::Map => Action::MakeMap, - ObjType::Table => Action::MakeTable, - ObjType::List => Action::MakeList, - ObjType::Text => Action::MakeText, - } - } - }; - self.action.append_value(action); - } - } - - fn finish(self) -> (Vec, Vec) { - let mut coldata = vec![ - self.actor.finish(COL_ID_ACTOR), - self.ctr.finish(COL_ID_CTR), - self.insert.finish(COL_INSERT), - self.action.finish(COL_ACTION), - ]; - coldata.extend(self.obj.finish()); - coldata.extend(self.key.finish()); - coldata.extend(self.val.finish()); - coldata.extend(self.succ.finish()); - coldata.sort_unstable_by(|a, b| a.col.cmp(&b.col)); - - let mut info = Vec::new(); - let mut data = Vec::new(); - coldata - .iter() - .filter(|&d| !d.data.is_empty()) - .count() - .encode(&mut info) - .ok(); - for d in &mut coldata { - d.deflate(); - d.encode_col_len(&mut info).ok(); - } - for d in &coldata { - data.write_all(d.data.as_slice()).ok(); - } - (data, info) - } -} - -//pub(crate) encode_cols(a) -> (Vec, HashMap>) { } - -pub(crate) struct ColumnEncoder { - obj: ObjEncoderOld, - key: KeyEncoderOld, - insert: BooleanEncoder, - action: RleEncoder, - val: ValEncoder, - pred: PredEncoder, -} - -impl ColumnEncoder { - pub(crate) fn encode_ops<'a, I>( - ops: I, - actors: &[ActorId], - ) -> (Vec, HashMap>) - where - I: IntoIterator, - { - let mut e = Self::new(); - e.encode(ops, actors); - e.finish() - } - - fn new() -> ColumnEncoder { - ColumnEncoder { - obj: ObjEncoderOld::new(), - key: KeyEncoderOld::new(), - insert: BooleanEncoder::new(), - action: RleEncoder::new(), - val: ValEncoder::new(), - pred: PredEncoder::new(), - } - } - - fn encode<'a, 'b, I>(&'a mut self, ops: I, actors: &[ActorId]) - where - I: IntoIterator, - { - for op in ops { - self.append(op, actors); - } - } - - fn append(&mut self, op: &::Op, actors: &[ActorId]) { - self.obj.append(&op.obj, actors); - self.key.append(op.key.clone(), actors); - self.insert.append(op.insert); - - self.pred.append(&op.pred, actors); - let action = match &op.action { - OpType::Put(value) => { - self.val.append_value2(value, actors); - Action::Set - } - OpType::Increment(val) => { - self.val.append_value2(&ScalarValue::Int(*val), actors); - Action::Inc - } - OpType::Delete => { - self.val.append_null(); - Action::Del - } - OpType::Make(kind) => { - self.val.append_null(); - match kind { - ObjType::Map => Action::MakeMap, - ObjType::Table => Action::MakeTable, - ObjType::List => Action::MakeList, - ObjType::Text => Action::MakeText, - } - } - }; - self.action.append_value(action); - } - - fn finish(self) -> (Vec, HashMap>) { - // allocate for the exact number of columns - let mut coldata = Vec::with_capacity( - 2 + ObjEncoderOld::COLUMNS - + KeyEncoderOld::COLUMNS - + ValEncoder::COLUMNS - + PredEncoder::COLUMNS, - ); - coldata.push(self.insert.finish(COL_INSERT)); - coldata.push(self.action.finish(COL_ACTION)); - coldata.extend(self.obj.finish()); - coldata.extend(self.key.finish()); - coldata.extend(self.val.finish()); - coldata.extend(self.pred.finish()); - coldata.sort_unstable_by(|a, b| a.col.cmp(&b.col)); - - let non_empty_column_count = coldata.iter().filter(|&d| !d.data.is_empty()).count(); - let data_len: usize = coldata.iter().map(|d| d.data.len()).sum(); - // 1 for the non_empty_column_count, 2 for each non_empty column (encode_col_len), data_len - // for all the actual data - let mut data = Vec::with_capacity(1 + (non_empty_column_count * 2) + data_len); - - non_empty_column_count.encode(&mut data).ok(); - for d in &mut coldata { - d.encode_col_len(&mut data).ok(); - } - - let mut rangemap = HashMap::with_capacity(non_empty_column_count); - for d in &coldata { - let begin = data.len(); - data.write_all(d.data.as_slice()).ok(); - if !d.data.is_empty() { - rangemap.insert(d.col, begin..data.len()); - } - } - (data, rangemap) - } -} - -fn col_iter<'a, T>(bytes: &'a [u8], ops: &'a HashMap>, col_id: u32) -> T -where - T: From>, -{ - let bytes = if let Some(r) = ops.get(&col_id) { - Cow::Borrowed(&bytes[r.clone()]) - } else if let Some(r) = ops.get(&(col_id | COLUMN_TYPE_DEFLATE)) { - let mut decoder = DeflateDecoder::new(&bytes[r.clone()]); - let mut inflated = Vec::new(); - //TODO this could throw if the compression is corrupt, we should propagate the error rather - //than unwrapping - decoder.read_to_end(&mut inflated).unwrap(); - Cow::Owned(inflated) - } else { - Cow::from(&[] as &[u8]) - }; - T::from(bytes) -} - -const VALUE_TYPE_NULL: usize = 0; -const VALUE_TYPE_FALSE: usize = 1; -const VALUE_TYPE_TRUE: usize = 2; -const VALUE_TYPE_LEB128_UINT: usize = 3; -const VALUE_TYPE_LEB128_INT: usize = 4; -const VALUE_TYPE_IEEE754: usize = 5; -const VALUE_TYPE_UTF8: usize = 6; -const VALUE_TYPE_BYTES: usize = 7; -const VALUE_TYPE_COUNTER: usize = 8; -const VALUE_TYPE_TIMESTAMP: usize = 9; -const VALUE_TYPE_CURSOR: usize = 10; -const VALUE_TYPE_MIN_UNKNOWN: usize = 11; -const VALUE_TYPE_MAX_UNKNOWN: usize = 15; - -pub(crate) const COLUMN_TYPE_GROUP_CARD: u32 = 0; -pub(crate) const COLUMN_TYPE_ACTOR_ID: u32 = 1; -pub(crate) const COLUMN_TYPE_INT_RLE: u32 = 2; -pub(crate) const COLUMN_TYPE_INT_DELTA: u32 = 3; -pub(crate) const COLUMN_TYPE_BOOLEAN: u32 = 4; -pub(crate) const COLUMN_TYPE_STRING_RLE: u32 = 5; -pub(crate) const COLUMN_TYPE_VALUE_LEN: u32 = 6; -pub(crate) const COLUMN_TYPE_VALUE_RAW: u32 = 7; -pub(crate) const COLUMN_TYPE_DEFLATE: u32 = 8; - -#[derive(PartialEq, Debug, Clone, Copy)] -#[repr(u32)] -pub(crate) enum Action { - MakeMap, - Set, - MakeList, - Del, - MakeText, - Inc, - MakeTable, -} -const ACTIONS: [Action; 7] = [ - Action::MakeMap, - Action::Set, - Action::MakeList, - Action::Del, - Action::MakeText, - Action::Inc, - Action::MakeTable, -]; - -impl Decodable for Action { - fn decode(bytes: &mut R) -> Option - where - R: Read, - { - let num = usize::decode::(bytes)?; - ACTIONS.get(num).copied() - } -} - -const COL_OBJ_ACTOR: u32 = COLUMN_TYPE_ACTOR_ID; -const COL_OBJ_CTR: u32 = COLUMN_TYPE_INT_RLE; -const COL_KEY_ACTOR: u32 = 1 << 4 | COLUMN_TYPE_ACTOR_ID; -const COL_KEY_CTR: u32 = 1 << 4 | COLUMN_TYPE_INT_DELTA; -const COL_KEY_STR: u32 = 1 << 4 | COLUMN_TYPE_STRING_RLE; -const COL_ID_ACTOR: u32 = 2 << 4 | COLUMN_TYPE_ACTOR_ID; -const COL_ID_CTR: u32 = 2 << 4 | COLUMN_TYPE_INT_DELTA; -const COL_INSERT: u32 = 3 << 4 | COLUMN_TYPE_BOOLEAN; -const COL_ACTION: u32 = 4 << 4 | COLUMN_TYPE_INT_RLE; -const COL_VAL_LEN: u32 = 5 << 4 | COLUMN_TYPE_VALUE_LEN; -const COL_VAL_RAW: u32 = 5 << 4 | COLUMN_TYPE_VALUE_RAW; -const COL_PRED_NUM: u32 = 7 << 4 | COLUMN_TYPE_GROUP_CARD; -const COL_PRED_ACTOR: u32 = 7 << 4 | COLUMN_TYPE_ACTOR_ID; -const COL_PRED_CTR: u32 = 7 << 4 | COLUMN_TYPE_INT_DELTA; -const COL_SUCC_NUM: u32 = 8 << 4 | COLUMN_TYPE_GROUP_CARD; -const COL_SUCC_ACTOR: u32 = 8 << 4 | COLUMN_TYPE_ACTOR_ID; -const COL_SUCC_CTR: u32 = 8 << 4 | COLUMN_TYPE_INT_DELTA; -const COL_REF_CTR: u32 = 6 << 4 | COLUMN_TYPE_INT_RLE; -const COL_REF_ACTOR: u32 = 6 << 4 | COLUMN_TYPE_ACTOR_ID; - -const DOC_ACTOR: u32 = /* 0 << 4 */ COLUMN_TYPE_ACTOR_ID; -const DOC_SEQ: u32 = /* 0 << 4 */ COLUMN_TYPE_INT_DELTA; -const DOC_MAX_OP: u32 = 1 << 4 | COLUMN_TYPE_INT_DELTA; -const DOC_TIME: u32 = 2 << 4 | COLUMN_TYPE_INT_DELTA; -const DOC_MESSAGE: u32 = 3 << 4 | COLUMN_TYPE_STRING_RLE; -const DOC_DEPS_NUM: u32 = 4 << 4 | COLUMN_TYPE_GROUP_CARD; -const DOC_DEPS_INDEX: u32 = 4 << 4 | COLUMN_TYPE_INT_DELTA; -const DOC_EXTRA_LEN: u32 = 5 << 4 | COLUMN_TYPE_VALUE_LEN; -const DOC_EXTRA_RAW: u32 = 5 << 4 | COLUMN_TYPE_VALUE_RAW; - -/* -const DOCUMENT_COLUMNS = { - actor: 0 << 3 | COLUMN_TYPE.ACTOR_ID, - seq: 0 << 3 | COLUMN_TYPE.INT_DELTA, - maxOp: 1 << 3 | COLUMN_TYPE.INT_DELTA, - time: 2 << 3 | COLUMN_TYPE.INT_DELTA, - message: 3 << 3 | COLUMN_TYPE.STRING_RLE, - depsNum: 4 << 3 | COLUMN_TYPE.GROUP_CARD, - depsIndex: 4 << 3 | COLUMN_TYPE.INT_DELTA, - extraLen: 5 << 3 | COLUMN_TYPE.VALUE_LEN, - extraRaw: 5 << 3 | COLUMN_TYPE.VALUE_RAW -} -*/ +//! Types for reading data which is stored in a columnar storage format +//! +//! The details of how values are encoded in `encoding`, which exposes a set of "decoder" and +//! "encoder" types. +//! +//! The `column_range` module exposes a set of types - most of which are newtypes over +//! `Range` - which have useful instance methods such as `encode()` to create a new range and +//! `decoder()` to return an iterator of the correct type. +pub(crate) mod column_range; +pub(crate) use column_range::Key; +pub(crate) mod encoding; + +mod splice_error; +pub(crate) use splice_error::SpliceError; diff --git a/automerge/src/columnar_2/column_range.rs b/automerge/src/columnar/column_range.rs similarity index 100% rename from automerge/src/columnar_2/column_range.rs rename to automerge/src/columnar/column_range.rs diff --git a/automerge/src/columnar_2/column_range/boolean.rs b/automerge/src/columnar/column_range/boolean.rs similarity index 93% rename from automerge/src/columnar_2/column_range/boolean.rs rename to automerge/src/columnar/column_range/boolean.rs index 25e3783e..3cefaf0d 100644 --- a/automerge/src/columnar_2/column_range/boolean.rs +++ b/automerge/src/columnar/column_range/boolean.rs @@ -1,6 +1,6 @@ use std::{borrow::Cow, ops::Range}; -use crate::columnar_2::encoding::{BooleanDecoder, BooleanEncoder}; +use crate::columnar::encoding::{BooleanDecoder, BooleanEncoder}; #[derive(Clone, Debug, PartialEq)] pub(crate) struct BooleanRange(Range); diff --git a/automerge/src/columnar_2/column_range/delta.rs b/automerge/src/columnar/column_range/delta.rs similarity index 97% rename from automerge/src/columnar_2/column_range/delta.rs rename to automerge/src/columnar/column_range/delta.rs index eb64ae30..9dae43b8 100644 --- a/automerge/src/columnar_2/column_range/delta.rs +++ b/automerge/src/columnar/column_range/delta.rs @@ -1,6 +1,6 @@ use std::{borrow::Cow, convert::Infallible, ops::Range}; -use crate::columnar_2::{ +use crate::columnar::{ encoding::{raw, DeltaDecoder, DeltaEncoder, Sink}, SpliceError, }; @@ -97,7 +97,7 @@ impl From for Range { #[cfg(test)] mod tests { use super::*; - use crate::columnar_2::encoding::properties::option_splice_scenario; + use crate::columnar::encoding::properties::option_splice_scenario; use proptest::prelude::*; fn encode>>(vals: I) -> (DeltaRange, Vec) { diff --git a/automerge/src/columnar_2/column_range/deps.rs b/automerge/src/columnar/column_range/deps.rs similarity index 97% rename from automerge/src/columnar_2/column_range/deps.rs rename to automerge/src/columnar/column_range/deps.rs index 386b5a4f..df49192a 100644 --- a/automerge/src/columnar_2/column_range/deps.rs +++ b/automerge/src/columnar/column_range/deps.rs @@ -1,5 +1,5 @@ use super::{DeltaRange, RleRange}; -use crate::columnar_2::encoding::{DecodeColumnError, DeltaDecoder, RleDecoder}; +use crate::columnar::encoding::{DecodeColumnError, DeltaDecoder, RleDecoder}; /// A grouped column containing lists of u64s #[derive(Clone, Debug)] diff --git a/automerge/src/columnar_2/column_range/generic.rs b/automerge/src/columnar/column_range/generic.rs similarity index 97% rename from automerge/src/columnar_2/column_range/generic.rs rename to automerge/src/columnar/column_range/generic.rs index 8fa59b32..03a0e362 100644 --- a/automerge/src/columnar_2/column_range/generic.rs +++ b/automerge/src/columnar/column_range/generic.rs @@ -1,6 +1,6 @@ use std::ops::Range; -use crate::{columnar_2::encoding::DecodeColumnError, ScalarValue}; +use crate::{columnar::encoding::DecodeColumnError, ScalarValue}; use super::{ValueIter, ValueRange}; mod simple; diff --git a/automerge/src/columnar_2/column_range/generic/group.rs b/automerge/src/columnar/column_range/generic/group.rs similarity index 99% rename from automerge/src/columnar_2/column_range/generic/group.rs rename to automerge/src/columnar/column_range/generic/group.rs index 9fb379da..b1392428 100644 --- a/automerge/src/columnar_2/column_range/generic/group.rs +++ b/automerge/src/columnar/column_range/generic/group.rs @@ -1,7 +1,7 @@ use std::ops::Range; use super::{CellValue, SimpleColIter, SimpleColRange, SimpleValue}; -use crate::columnar_2::{ +use crate::columnar::{ column_range::{RleRange, ValueIter, ValueRange}, encoding::{col_error::DecodeColumnError, RleDecoder}, }; diff --git a/automerge/src/columnar_2/column_range/generic/simple.rs b/automerge/src/columnar/column_range/generic/simple.rs similarity index 98% rename from automerge/src/columnar_2/column_range/generic/simple.rs rename to automerge/src/columnar/column_range/generic/simple.rs index 5115ff96..9eb3c177 100644 --- a/automerge/src/columnar_2/column_range/generic/simple.rs +++ b/automerge/src/columnar/column_range/generic/simple.rs @@ -1,6 +1,6 @@ use std::ops::Range; -use crate::columnar_2::{ +use crate::columnar::{ column_range::{BooleanRange, DeltaRange, RleRange}, encoding::{raw, BooleanDecoder, DeltaDecoder, RleDecoder}, }; diff --git a/automerge/src/columnar_2/column_range/key.rs b/automerge/src/columnar/column_range/key.rs similarity index 99% rename from automerge/src/columnar_2/column_range/key.rs rename to automerge/src/columnar/column_range/key.rs index da2e694b..5283fc39 100644 --- a/automerge/src/columnar_2/column_range/key.rs +++ b/automerge/src/columnar/column_range/key.rs @@ -2,7 +2,7 @@ use std::{convert::Infallible, ops::Range}; use super::{DeltaRange, RleRange}; use crate::{ - columnar_2::{ + columnar::{ encoding::{ raw, DecodeColumnError, DeltaDecoder, DeltaEncoder, RleDecoder, RleEncoder, Sink, }, diff --git a/automerge/src/columnar_2/column_range/obj_id.rs b/automerge/src/columnar/column_range/obj_id.rs similarity index 99% rename from automerge/src/columnar_2/column_range/obj_id.rs rename to automerge/src/columnar/column_range/obj_id.rs index e12b2530..f6525b44 100644 --- a/automerge/src/columnar_2/column_range/obj_id.rs +++ b/automerge/src/columnar/column_range/obj_id.rs @@ -1,7 +1,7 @@ use std::{convert::Infallible, ops::Range}; use crate::{ - columnar_2::{ + columnar::{ encoding::{raw, DecodeColumnError, RleDecoder, RleEncoder, Sink}, SpliceError, }, diff --git a/automerge/src/columnar_2/column_range/opid.rs b/automerge/src/columnar/column_range/opid.rs similarity index 98% rename from automerge/src/columnar_2/column_range/opid.rs rename to automerge/src/columnar/column_range/opid.rs index 1b1817cb..592f6041 100644 --- a/automerge/src/columnar_2/column_range/opid.rs +++ b/automerge/src/columnar/column_range/opid.rs @@ -2,7 +2,7 @@ use std::ops::Range; use super::{DeltaRange, RleRange}; use crate::{ - columnar_2::{ + columnar::{ encoding::{ raw, DecodeColumnError, DeltaDecoder, DeltaEncoder, RleDecoder, RleEncoder, Sink, }, @@ -169,7 +169,7 @@ impl OpIdEncoder> { mod tests { use super::*; use crate::{ - columnar_2::encoding::properties::{opid, splice_scenario}, + columnar::encoding::properties::{opid, splice_scenario}, types::OpId, }; use proptest::prelude::*; diff --git a/automerge/src/columnar_2/column_range/opid_list.rs b/automerge/src/columnar/column_range/opid_list.rs similarity index 99% rename from automerge/src/columnar_2/column_range/opid_list.rs rename to automerge/src/columnar/column_range/opid_list.rs index 417a2c1a..03b92ccf 100644 --- a/automerge/src/columnar_2/column_range/opid_list.rs +++ b/automerge/src/columnar/column_range/opid_list.rs @@ -2,7 +2,7 @@ use std::{convert::Infallible, ops::Range}; use super::{DeltaRange, RleRange}; use crate::{ - columnar_2::{ + columnar::{ encoding::{ raw, DecodeColumnError, DeltaDecoder, DeltaEncoder, RleDecoder, RleEncoder, Sink, }, @@ -286,7 +286,7 @@ mod tests { use proptest::collection::vec as propvec; use proptest::prelude::*; - use crate::columnar_2::encoding::properties::{opid, splice_scenario}; + use crate::columnar::encoding::properties::{opid, splice_scenario}; fn encode(opids: Vec>) -> (OpIdListRange, Vec) { let mut out = Vec::new(); diff --git a/automerge/src/columnar_2/column_range/raw.rs b/automerge/src/columnar/column_range/raw.rs similarity index 94% rename from automerge/src/columnar_2/column_range/raw.rs rename to automerge/src/columnar/column_range/raw.rs index de512026..3520a89a 100644 --- a/automerge/src/columnar_2/column_range/raw.rs +++ b/automerge/src/columnar/column_range/raw.rs @@ -1,6 +1,6 @@ use std::{borrow::Cow, ops::Range}; -use crate::columnar_2::encoding::RawDecoder; +use crate::columnar::encoding::RawDecoder; #[derive(Clone, Debug, PartialEq)] pub(crate) struct RawRange(Range); diff --git a/automerge/src/columnar_2/column_range/rle.rs b/automerge/src/columnar/column_range/rle.rs similarity index 98% rename from automerge/src/columnar_2/column_range/rle.rs rename to automerge/src/columnar/column_range/rle.rs index 0729a300..63c0b123 100644 --- a/automerge/src/columnar_2/column_range/rle.rs +++ b/automerge/src/columnar/column_range/rle.rs @@ -5,7 +5,7 @@ use std::{ ops::Range, }; -use crate::columnar_2::{ +use crate::columnar::{ encoding::{raw, Decodable, Encodable, RleDecoder, RleEncoder, Sink}, SpliceError, }; @@ -137,7 +137,7 @@ impl From> for Range { #[cfg(test)] mod tests { use super::*; - use crate::columnar_2::encoding::properties::option_splice_scenario; + use crate::columnar::encoding::properties::option_splice_scenario; use proptest::prelude::*; use std::{borrow::Cow, convert::Infallible}; diff --git a/automerge/src/columnar_2/column_range/value.rs b/automerge/src/columnar/column_range/value.rs similarity index 99% rename from automerge/src/columnar_2/column_range/value.rs rename to automerge/src/columnar/column_range/value.rs index f2c9e419..7d54765e 100644 --- a/automerge/src/columnar_2/column_range/value.rs +++ b/automerge/src/columnar/column_range/value.rs @@ -1,7 +1,7 @@ use std::{borrow::Cow, ops::Range}; use crate::{ - columnar_2::{ + columnar::{ encoding::{ leb128::{lebsize, ulebsize}, raw, DecodeColumnError, RawBytes, RawDecoder, RawEncoder, RleDecoder, RleEncoder, Sink, @@ -407,7 +407,7 @@ impl ValueMeta { } } -impl<'a> From<&ScalarValue> for ValueMeta { +impl From<&ScalarValue> for ValueMeta { fn from(p: &ScalarValue) -> Self { match p { ScalarValue::Uint(i) => Self((ulebsize(*i) << 4) | 3), @@ -441,7 +441,7 @@ impl From for u64 { } } -impl<'a> From<&ScalarValue> for ValueType { +impl From<&ScalarValue> for ValueType { fn from(p: &ScalarValue) -> Self { match p { ScalarValue::Uint(_) => ValueType::Uleb, @@ -481,7 +481,7 @@ impl From for u64 { #[cfg(test)] mod tests { use super::*; - use crate::columnar_2::encoding::properties::{scalar_value, splice_scenario}; + use crate::columnar::encoding::properties::{scalar_value, splice_scenario}; use proptest::prelude::*; use std::borrow::Cow; diff --git a/automerge/src/columnar_2/encoding.rs b/automerge/src/columnar/encoding.rs similarity index 100% rename from automerge/src/columnar_2/encoding.rs rename to automerge/src/columnar/encoding.rs diff --git a/automerge/src/columnar_2/encoding/boolean.rs b/automerge/src/columnar/encoding/boolean.rs similarity index 100% rename from automerge/src/columnar_2/encoding/boolean.rs rename to automerge/src/columnar/encoding/boolean.rs diff --git a/automerge/src/columnar_2/encoding/col_error.rs b/automerge/src/columnar/encoding/col_error.rs similarity index 100% rename from automerge/src/columnar_2/encoding/col_error.rs rename to automerge/src/columnar/encoding/col_error.rs diff --git a/automerge/src/columnar_2/encoding/column_decoder.rs b/automerge/src/columnar/encoding/column_decoder.rs similarity index 99% rename from automerge/src/columnar_2/encoding/column_decoder.rs rename to automerge/src/columnar/encoding/column_decoder.rs index 8bc34f69..8e3237fb 100644 --- a/automerge/src/columnar_2/encoding/column_decoder.rs +++ b/automerge/src/columnar/encoding/column_decoder.rs @@ -1,5 +1,5 @@ use crate::{ - columnar_2::{ + columnar::{ column_range::{DepsIter, KeyIter, ObjIdIter, OpIdIter, OpIdListIter, ValueIter}, encoding, Key, }, diff --git a/automerge/src/columnar_2/encoding/decodable_impls.rs b/automerge/src/columnar/encoding/decodable_impls.rs similarity index 100% rename from automerge/src/columnar_2/encoding/decodable_impls.rs rename to automerge/src/columnar/encoding/decodable_impls.rs diff --git a/automerge/src/columnar_2/encoding/delta.rs b/automerge/src/columnar/encoding/delta.rs similarity index 100% rename from automerge/src/columnar_2/encoding/delta.rs rename to automerge/src/columnar/encoding/delta.rs diff --git a/automerge/src/columnar_2/encoding/encodable_impls.rs b/automerge/src/columnar/encoding/encodable_impls.rs similarity index 100% rename from automerge/src/columnar_2/encoding/encodable_impls.rs rename to automerge/src/columnar/encoding/encodable_impls.rs diff --git a/automerge/src/columnar_2/encoding/leb128.rs b/automerge/src/columnar/encoding/leb128.rs similarity index 100% rename from automerge/src/columnar_2/encoding/leb128.rs rename to automerge/src/columnar/encoding/leb128.rs diff --git a/automerge/src/columnar_2/encoding/properties.rs b/automerge/src/columnar/encoding/properties.rs similarity index 99% rename from automerge/src/columnar_2/encoding/properties.rs rename to automerge/src/columnar/encoding/properties.rs index b5c0bfa8..a6345cad 100644 --- a/automerge/src/columnar_2/encoding/properties.rs +++ b/automerge/src/columnar/encoding/properties.rs @@ -6,7 +6,7 @@ use proptest::prelude::*; use smol_str::SmolStr; use crate::{ - columnar_2::Key, + columnar::Key, types::{ElemId, OpId, ScalarValue}, }; diff --git a/automerge/src/columnar_2/encoding/raw.rs b/automerge/src/columnar/encoding/raw.rs similarity index 100% rename from automerge/src/columnar_2/encoding/raw.rs rename to automerge/src/columnar/encoding/raw.rs diff --git a/automerge/src/columnar_2/encoding/rle.rs b/automerge/src/columnar/encoding/rle.rs similarity index 100% rename from automerge/src/columnar_2/encoding/rle.rs rename to automerge/src/columnar/encoding/rle.rs diff --git a/automerge/src/columnar_2/splice_error.rs b/automerge/src/columnar/splice_error.rs similarity index 100% rename from automerge/src/columnar_2/splice_error.rs rename to automerge/src/columnar/splice_error.rs diff --git a/automerge/src/columnar_2.rs b/automerge/src/columnar_2.rs deleted file mode 100644 index bb727626..00000000 --- a/automerge/src/columnar_2.rs +++ /dev/null @@ -1,14 +0,0 @@ -//! Types for reading data which is stored in a columnar storage format -//! -//! The details of how values are encoded in `encoding`, which exposes a set of "decoder" and -//! "encoder" types. -//! -//! The `column_range` module exposes a set of types - most of which are newtypes over -//! `Range` - which have useful instance methods such as `encode()` to create a new range and -//! `decoder()` to return an iterator of the correct type. -pub(crate) mod column_range; -pub(crate) use column_range::Key; -pub(crate) mod encoding; - -mod splice_error; -pub(crate) use splice_error::SpliceError; diff --git a/automerge/src/encoding.rs b/automerge/src/encoding.rs deleted file mode 100644 index 3b8b470c..00000000 --- a/automerge/src/encoding.rs +++ /dev/null @@ -1,391 +0,0 @@ -use core::fmt::Debug; -use std::{ - io, - io::{Read, Write}, - mem, - num::NonZeroU64, -}; - -use flate2::{bufread::DeflateEncoder, Compression}; -use smol_str::SmolStr; - -use crate::columnar::COLUMN_TYPE_DEFLATE; -use crate::ActorId; - -pub(crate) const DEFLATE_MIN_SIZE: usize = 256; - -/// The error type for encoding operations. -#[derive(Debug, thiserror::Error)] -pub enum Error { - #[error(transparent)] - Io(#[from] io::Error), -} - -impl PartialEq for Error { - fn eq(&self, other: &Error) -> bool { - match (self, other) { - (Self::Io(error1), Self::Io(error2)) => error1.kind() == error2.kind(), - } - } -} - -/// Encodes booleans by storing the count of the same value. -/// -/// The sequence of numbers describes the count of false values on even indices (0-indexed) and the -/// count of true values on odd indices (0-indexed). -/// -/// Counts are encoded as usize. -pub(crate) struct BooleanEncoder { - buf: Vec, - last: bool, - count: usize, -} - -impl BooleanEncoder { - pub(crate) fn new() -> BooleanEncoder { - BooleanEncoder { - buf: Vec::new(), - last: false, - count: 0, - } - } - - pub(crate) fn append(&mut self, value: bool) { - if value == self.last { - self.count += 1; - } else { - self.count.encode(&mut self.buf).ok(); - self.last = value; - self.count = 1; - } - } - - pub(crate) fn finish(mut self, col: u32) -> ColData { - if self.count > 0 { - self.count.encode(&mut self.buf).ok(); - } - ColData::new(col, self.buf) - } -} - -/// Encodes integers as the change since the previous value. -/// -/// The initial value is 0 encoded as u64. Deltas are encoded as i64. -/// -/// Run length encoding is then applied to the resulting sequence. -pub(crate) struct DeltaEncoder { - rle: RleEncoder, - absolute_value: u64, -} - -impl DeltaEncoder { - pub(crate) fn new() -> DeltaEncoder { - DeltaEncoder { - rle: RleEncoder::new(), - absolute_value: 0, - } - } - - pub(crate) fn append_value(&mut self, value: u64) { - self.rle - .append_value(value as i64 - self.absolute_value as i64); - self.absolute_value = value; - } - - pub(crate) fn append_null(&mut self) { - self.rle.append_null(); - } - - pub(crate) fn finish(self, col: u32) -> ColData { - self.rle.finish(col) - } -} - -enum RleState { - Empty, - NullRun(usize), - LiteralRun(T, Vec), - LoneVal(T), - Run(T, usize), -} - -/// Encodes data in run lengh encoding format. This is very efficient for long repeats of data -/// -/// There are 3 types of 'run' in this encoder: -/// - a normal run (compresses repeated values) -/// - a null run (compresses repeated nulls) -/// - a literal run (no compression) -/// -/// A normal run consists of the length of the run (encoded as an i64) followed by the encoded value that this run contains. -/// -/// A null run consists of a zero value (encoded as an i64) followed by the length of the null run (encoded as a usize). -/// -/// A literal run consists of the **negative** length of the run (encoded as an i64) followed by the values in the run. -/// -/// Therefore all the types start with an encoded i64, the value of which determines the type of the following data. -pub(crate) struct RleEncoder -where - T: Encodable + PartialEq + Clone, -{ - buf: Vec, - state: RleState, -} - -impl RleEncoder -where - T: Encodable + PartialEq + Clone, -{ - pub(crate) fn new() -> RleEncoder { - RleEncoder { - buf: Vec::new(), - state: RleState::Empty, - } - } - - pub(crate) fn finish(mut self, col: u32) -> ColData { - match self.take_state() { - // this covers `only_nulls` - RleState::NullRun(size) => { - if !self.buf.is_empty() { - self.flush_null_run(size); - } - } - RleState::LoneVal(value) => self.flush_lit_run(vec![value]), - RleState::Run(value, len) => self.flush_run(&value, len), - RleState::LiteralRun(last, mut run) => { - run.push(last); - self.flush_lit_run(run); - } - RleState::Empty => {} - } - ColData::new(col, self.buf) - } - - fn flush_run(&mut self, val: &T, len: usize) { - self.encode(&(len as i64)); - self.encode(val); - } - - fn flush_null_run(&mut self, len: usize) { - self.encode::(&0); - self.encode(&len); - } - - fn flush_lit_run(&mut self, run: Vec) { - self.encode(&-(run.len() as i64)); - for val in run { - self.encode(&val); - } - } - - fn take_state(&mut self) -> RleState { - let mut state = RleState::Empty; - mem::swap(&mut self.state, &mut state); - state - } - - pub(crate) fn append_null(&mut self) { - self.state = match self.take_state() { - RleState::Empty => RleState::NullRun(1), - RleState::NullRun(size) => RleState::NullRun(size + 1), - RleState::LoneVal(other) => { - self.flush_lit_run(vec![other]); - RleState::NullRun(1) - } - RleState::Run(other, len) => { - self.flush_run(&other, len); - RleState::NullRun(1) - } - RleState::LiteralRun(last, mut run) => { - run.push(last); - self.flush_lit_run(run); - RleState::NullRun(1) - } - } - } - - pub(crate) fn append_value(&mut self, value: T) { - self.state = match self.take_state() { - RleState::Empty => RleState::LoneVal(value), - RleState::LoneVal(other) => { - if other == value { - RleState::Run(value, 2) - } else { - let mut v = Vec::with_capacity(2); - v.push(other); - RleState::LiteralRun(value, v) - } - } - RleState::Run(other, len) => { - if other == value { - RleState::Run(other, len + 1) - } else { - self.flush_run(&other, len); - RleState::LoneVal(value) - } - } - RleState::LiteralRun(last, mut run) => { - if last == value { - self.flush_lit_run(run); - RleState::Run(value, 2) - } else { - run.push(last); - RleState::LiteralRun(value, run) - } - } - RleState::NullRun(size) => { - self.flush_null_run(size); - RleState::LoneVal(value) - } - } - } - - fn encode(&mut self, val: &V) - where - V: Encodable, - { - val.encode(&mut self.buf).ok(); - } -} - -pub(crate) trait Encodable { - fn encode_with_actors_to_vec(&self, actors: &mut [ActorId]) -> io::Result> { - let mut buf = Vec::new(); - self.encode_with_actors(&mut buf, actors)?; - Ok(buf) - } - - fn encode_with_actors(&self, buf: &mut R, _actors: &[ActorId]) -> io::Result { - self.encode(buf) - } - - fn encode(&self, buf: &mut R) -> io::Result; - - fn encode_vec(&self, buf: &mut Vec) -> usize { - self.encode(buf).unwrap() - } -} - -impl Encodable for SmolStr { - fn encode(&self, buf: &mut R) -> io::Result { - let bytes = self.as_bytes(); - let head = bytes.len().encode(buf)?; - buf.write_all(bytes)?; - Ok(head + bytes.len()) - } -} - -impl Encodable for String { - fn encode(&self, buf: &mut R) -> io::Result { - let bytes = self.as_bytes(); - let head = bytes.len().encode(buf)?; - buf.write_all(bytes)?; - Ok(head + bytes.len()) - } -} - -impl Encodable for Option { - fn encode(&self, buf: &mut R) -> io::Result { - if let Some(s) = self { - s.encode(buf) - } else { - 0.encode(buf) - } - } -} - -impl Encodable for u64 { - fn encode(&self, buf: &mut R) -> io::Result { - leb128::write::unsigned(buf, *self) - } -} - -impl Encodable for NonZeroU64 { - fn encode(&self, buf: &mut R) -> io::Result { - leb128::write::unsigned(buf, self.get()) - } -} - -impl Encodable for f64 { - fn encode(&self, buf: &mut R) -> io::Result { - let bytes = self.to_le_bytes(); - buf.write_all(&bytes)?; - Ok(bytes.len()) - } -} - -impl Encodable for f32 { - fn encode(&self, buf: &mut R) -> io::Result { - let bytes = self.to_le_bytes(); - buf.write_all(&bytes)?; - Ok(bytes.len()) - } -} - -impl Encodable for i64 { - fn encode(&self, buf: &mut R) -> io::Result { - leb128::write::signed(buf, *self) - } -} - -impl Encodable for usize { - fn encode(&self, buf: &mut R) -> io::Result { - (*self as u64).encode(buf) - } -} - -impl Encodable for u32 { - fn encode(&self, buf: &mut R) -> io::Result { - u64::from(*self).encode(buf) - } -} - -impl Encodable for i32 { - fn encode(&self, buf: &mut R) -> io::Result { - i64::from(*self).encode(buf) - } -} - -#[derive(Debug)] -pub(crate) struct ColData { - pub(crate) col: u32, - pub(crate) data: Vec, - #[cfg(debug_assertions)] - has_been_deflated: bool, -} - -impl ColData { - pub(crate) fn new(col_id: u32, data: Vec) -> ColData { - ColData { - col: col_id, - data, - #[cfg(debug_assertions)] - has_been_deflated: false, - } - } - - pub(crate) fn encode_col_len(&self, buf: &mut R) -> io::Result { - let mut len = 0; - if !self.data.is_empty() { - len += self.col.encode(buf)?; - len += self.data.len().encode(buf)?; - } - Ok(len) - } - - pub(crate) fn deflate(&mut self) { - #[cfg(debug_assertions)] - { - debug_assert!(!self.has_been_deflated); - self.has_been_deflated = true; - } - if self.data.len() > DEFLATE_MIN_SIZE { - let mut deflated = Vec::new(); - let mut deflater = DeflateEncoder::new(&self.data[..], Compression::default()); - //This unwrap should be okay as we're reading and writing to in memory buffers - deflater.read_to_end(&mut deflated).unwrap(); - self.col |= COLUMN_TYPE_DEFLATE; - self.data = deflated; - } - } -} diff --git a/automerge/src/error.rs b/automerge/src/error.rs index 7c30deca..7f9b4ad2 100644 --- a/automerge/src/error.rs +++ b/automerge/src/error.rs @@ -1,10 +1,7 @@ -#[cfg(feature = "storage-v2")] use crate::storage::load::Error as LoadError; use crate::types::{ActorId, ScalarValue}; use crate::value::DataType; use crate::ChangeHash; -#[cfg(not(feature = "storage-v2"))] -use crate::{decoding, encoding}; use thiserror::Error; #[derive(Error, Debug)] @@ -15,12 +12,6 @@ pub enum AutomergeError { InvalidObjIdFormat(String), #[error("invalid obj id `{0}`")] InvalidObjId(String), - #[error("there was an encoding problem: {0}")] - #[cfg(not(feature = "storage-v2"))] - Encoding(#[from] encoding::Error), - #[error("there was a decoding problem: {0}")] - #[cfg(not(feature = "storage-v2"))] - Decoding(#[from] decoding::Error), #[error("key must not be an empty string")] EmptyStringKey, #[error("invalid seq {0}")] @@ -42,16 +33,12 @@ pub enum AutomergeError { }, #[error("general failure")] Fail, - #[cfg(feature = "storage-v2")] #[error(transparent)] Load(#[from] LoadError), - #[cfg(feature = "storage-v2")] #[error("failed to load compressed data: {0}")] Deflate(#[source] std::io::Error), - #[cfg(feature = "storage-v2")] #[error("compressed chunk was not a change")] NonChangeCompressed, - #[cfg(feature = "storage-v2")] #[error(transparent)] Clocks(#[from] crate::clocks::MissingDep), } @@ -92,7 +79,6 @@ pub struct InvalidElementId(pub String); #[error("Invalid OpID: {0}")] pub struct InvalidOpId(pub String); -#[cfg(feature = "storage-v2")] #[derive(Error, Debug)] pub(crate) enum InvalidOpType { #[error("unrecognized action index {0}")] diff --git a/automerge/src/indexed_cache.rs b/automerge/src/indexed_cache.rs index df445f28..b907a6f1 100644 --- a/automerge/src/indexed_cache.rs +++ b/automerge/src/indexed_cache.rs @@ -53,7 +53,6 @@ where &self.cache[index] } - #[cfg(feature = "storage-v2")] pub(crate) fn safe_get(&self, index: usize) -> Option<&T> { self.cache.get(index) } diff --git a/automerge/src/lib.rs b/automerge/src/lib.rs index f3d950a8..c31cf1ed 100644 --- a/automerge/src/lib.rs +++ b/automerge/src/lib.rs @@ -57,23 +57,11 @@ macro_rules! __log { mod autocommit; mod automerge; mod autoserde; -#[cfg(not(feature = "storage-v2"))] mod change; -#[cfg(feature = "storage-v2")] -mod change_v2; mod clock; -#[cfg(feature = "storage-v2")] mod clocks; -#[cfg(not(feature = "storage-v2"))] mod columnar; -#[cfg(feature = "storage-v2")] -mod columnar_2; -#[cfg(feature = "storage-v2")] mod convert; -#[cfg(not(feature = "storage-v2"))] -mod decoding; -#[cfg(not(feature = "storage-v2"))] -mod encoding; mod error; mod exid; mod indexed_cache; @@ -90,7 +78,6 @@ mod op_tree; mod options; mod parents; mod query; -#[cfg(feature = "storage-v2")] mod storage; pub mod sync; pub mod transaction; @@ -103,16 +90,7 @@ mod visualisation; pub use crate::automerge::Automerge; pub use autocommit::AutoCommit; pub use autoserde::AutoSerde; -#[cfg(not(feature = "storage-v2"))] -pub use change::Change; -#[cfg(feature = "storage-v2")] -pub use change_v2::{Change, LoadError as LoadChangeError}; -#[cfg(not(feature = "storage-v2"))] -pub use decoding::Error as DecodingError; -#[cfg(not(feature = "storage-v2"))] -pub use decoding::InvalidChangeError; -#[cfg(not(feature = "storage-v2"))] -pub use encoding::Error as EncodingError; +pub use change::{Change, LoadError as LoadChangeError}; pub use error::AutomergeError; pub use error::InvalidActorId; pub use error::InvalidChangeHashSlice; diff --git a/automerge/src/op_set.rs b/automerge/src/op_set.rs index eddd433a..766d9e01 100644 --- a/automerge/src/op_set.rs +++ b/automerge/src/op_set.rs @@ -6,15 +6,12 @@ use crate::query::{self, OpIdSearch, TreeQuery}; use crate::types::{self, ActorId, Key, ObjId, Op, OpId, OpIds, OpType}; use crate::{ObjType, OpObserver}; use fxhash::FxBuildHasher; -#[cfg(feature = "storage-v2")] use std::borrow::Borrow; use std::cmp::Ordering; use std::collections::HashMap; use std::ops::RangeBounds; -#[cfg(feature = "storage-v2")] mod load; -#[cfg(feature = "storage-v2")] pub(crate) use load::{ObservedOpSetBuilder, OpSetBuilder}; pub(crate) type OpSet = OpSetInternal; @@ -30,14 +27,12 @@ pub(crate) struct OpSetInternal { } impl OpSetInternal { - #[cfg(feature = "storage-v2")] pub(crate) fn builder() -> OpSetBuilder { OpSetBuilder::new() } /// Create a builder which passes each operation to `observer`. This will be significantly /// slower than `OpSetBuilder` - #[cfg(feature = "storage-v2")] pub(crate) fn observed_builder(observer: &mut O) -> ObservedOpSetBuilder<'_, O> { ObservedOpSetBuilder::new(observer) } @@ -381,7 +376,6 @@ impl Default for OpSetMetadata { } impl OpSetMetadata { - #[cfg(feature = "storage-v2")] pub(crate) fn from_actors(actors: Vec) -> Self { Self { props: IndexedCache::new(), @@ -412,29 +406,10 @@ impl OpSetMetadata { /// If `opids` are in ascending lamport timestamp order with respect to the actor IDs in /// this `OpSetMetadata` then this returns `Some(OpIds)`, otherwise returns `None`. - #[cfg(feature = "storage-v2")] pub(crate) fn try_sorted_opids(&self, opids: Vec) -> Option { OpIds::new_if_sorted(opids, |a, b| self.lamport_cmp(*a, *b)) } - #[cfg(not(feature = "storage-v2"))] - pub(crate) fn import_opids>( - &mut self, - external_opids: I, - ) -> OpIds { - let iter = external_opids.into_iter(); - let mut result = Vec::with_capacity(iter.size_hint().1.unwrap_or(0)); - for opid in iter { - let crate::legacy::OpId(counter, actor) = opid; - let actor_idx = self.actors.cache(actor); - result.push(OpId(counter, actor_idx)); - } - OpIds::new(result.into_iter(), |left, right| { - self.lamport_cmp(*left, *right) - }) - } - - #[cfg(feature = "storage-v2")] pub(crate) fn import_prop>(&mut self, key: S) -> usize { self.props.cache(key.borrow().to_string()) } diff --git a/automerge/src/op_tree.rs b/automerge/src/op_tree.rs index 329641d5..6cd5bdf9 100644 --- a/automerge/src/op_tree.rs +++ b/automerge/src/op_tree.rs @@ -42,7 +42,6 @@ impl OpTree { self.internal.iter() } - #[cfg(feature = "storage-v2")] pub(crate) fn len(&self) -> usize { self.internal.len() } diff --git a/automerge/src/storage/change/change_op_columns.rs b/automerge/src/storage/change/change_op_columns.rs index 432df958..c50c67ae 100644 --- a/automerge/src/storage/change/change_op_columns.rs +++ b/automerge/src/storage/change/change_op_columns.rs @@ -1,7 +1,7 @@ use std::{convert::TryFrom, ops::Range}; use crate::{ - columnar_2::{ + columnar::{ column_range::{ generic::{GenericColumnRange, GroupRange, GroupedColumnRange, SimpleColRange}, BooleanRange, DeltaRange, Key, KeyEncoder, KeyIter, KeyRange, ObjIdEncoder, ObjIdIter, @@ -447,7 +447,7 @@ impl TryFrom for ChangeOpsColumns { #[cfg(test)] mod tests { use super::*; - use crate::columnar_2::encoding::properties::{key, opid, scalar_value}; + use crate::columnar::encoding::properties::{key, opid, scalar_value}; use proptest::prelude::*; prop_compose! { diff --git a/automerge/src/storage/chunk.rs b/automerge/src/storage/chunk.rs index 93c05c9d..ad64e804 100644 --- a/automerge/src/storage/chunk.rs +++ b/automerge/src/storage/chunk.rs @@ -8,7 +8,7 @@ use std::{ use sha2::{Digest, Sha256}; use super::{change::Unverified, parse, Change, Compressed, Document, MAGIC_BYTES}; -use crate::{columnar_2::encoding::leb128::ulebsize, ChangeHash}; +use crate::{columnar::encoding::leb128::ulebsize, ChangeHash}; pub(crate) enum Chunk<'a> { Document(Document<'a>), diff --git a/automerge/src/storage/columns/column.rs b/automerge/src/storage/columns/column.rs index a7636b56..6f834439 100644 --- a/automerge/src/storage/columns/column.rs +++ b/automerge/src/storage/columns/column.rs @@ -1,6 +1,6 @@ use std::ops::Range; -use crate::columnar_2::column_range::generic::GenericColumnRange; +use crate::columnar::column_range::generic::GenericColumnRange; use super::{ColumnId, ColumnSpec, ColumnType}; diff --git a/automerge/src/storage/columns/column_builder.rs b/automerge/src/storage/columns/column_builder.rs index d33785e5..5cc41a21 100644 --- a/automerge/src/storage/columns/column_builder.rs +++ b/automerge/src/storage/columns/column_builder.rs @@ -1,6 +1,6 @@ use std::ops::Range; -use crate::columnar_2::column_range::{ +use crate::columnar::column_range::{ generic::{GenericColumnRange, GroupRange, GroupedColumnRange, SimpleColRange}, BooleanRange, DeltaRange, RawRange, RleRange, ValueRange, }; diff --git a/automerge/src/storage/document/doc_change_columns.rs b/automerge/src/storage/document/doc_change_columns.rs index 0b1e15cd..93fa28e3 100644 --- a/automerge/src/storage/document/doc_change_columns.rs +++ b/automerge/src/storage/document/doc_change_columns.rs @@ -1,7 +1,7 @@ use std::{borrow::Cow, convert::TryFrom}; use crate::{ - columnar_2::{ + columnar::{ column_range::{ generic::{GenericColumnRange, GroupRange, GroupedColumnRange, SimpleColRange}, DeltaRange, DepsIter, DepsRange, RleRange, ValueIter, ValueRange, diff --git a/automerge/src/storage/document/doc_op_columns.rs b/automerge/src/storage/document/doc_op_columns.rs index 49cabf81..5f61dff8 100644 --- a/automerge/src/storage/document/doc_op_columns.rs +++ b/automerge/src/storage/document/doc_op_columns.rs @@ -1,7 +1,7 @@ use std::{borrow::Cow, convert::TryFrom}; use crate::{ - columnar_2::{ + columnar::{ column_range::{ generic::{GenericColumnRange, GroupRange, GroupedColumnRange, SimpleColRange}, BooleanRange, DeltaRange, Key, KeyEncoder, KeyIter, KeyRange, ObjIdEncoder, ObjIdIter, diff --git a/automerge/src/storage/load.rs b/automerge/src/storage/load.rs index 026123cc..75732d7c 100644 --- a/automerge/src/storage/load.rs +++ b/automerge/src/storage/load.rs @@ -1,7 +1,7 @@ use tracing::instrument; use crate::{ - change_v2::Change, + change::Change, storage::{self, parse}, }; diff --git a/automerge/src/storage/load/reconstruct_document.rs b/automerge/src/storage/load/reconstruct_document.rs index ce5197b1..5747a51d 100644 --- a/automerge/src/storage/load/reconstruct_document.rs +++ b/automerge/src/storage/load/reconstruct_document.rs @@ -3,8 +3,8 @@ use std::collections::{BTreeSet, HashMap}; use tracing::instrument; use crate::{ - change_v2::Change, - columnar_2::Key as DocOpKey, + change::Change, + columnar::Key as DocOpKey, op_tree::OpSetMetadata, storage::{DocOp, Document}, types::{ChangeHash, ElemId, Key, ObjId, ObjType, Op, OpId, OpIds, OpType}, diff --git a/automerge/src/sync.rs b/automerge/src/sync.rs index 0566acb0..80035823 100644 --- a/automerge/src/sync.rs +++ b/automerge/src/sync.rs @@ -1,20 +1,15 @@ use itertools::Itertools; use std::collections::{HashMap, HashSet}; -use crate::{ApplyOptions, Automerge, AutomergeError, Change, ChangeHash, OpObserver}; -#[cfg(not(feature = "storage-v2"))] -use std::{borrow::Cow, io, io::Write}; - -#[cfg(feature = "storage-v2")] -use crate::storage::{parse, Change as StoredChange, ReadChangeOpError}; -#[cfg(not(feature = "storage-v2"))] -use crate::{decoding, decoding::Decoder, encoding::Encodable, types::HASH_SIZE}; +use crate::{ + storage::{parse, Change as StoredChange, ReadChangeOpError}, + ApplyOptions, Automerge, AutomergeError, Change, ChangeHash, OpObserver, +}; mod bloom; mod state; pub use bloom::BloomFilter; -#[cfg(feature = "storage-v2")] pub use state::DecodeError as DecodeStateError; pub use state::{Have, State}; @@ -258,7 +253,6 @@ impl Automerge { } } -#[cfg(feature = "storage-v2")] #[derive(Debug, thiserror::Error)] pub enum ReadMessageError { #[error("expected {expected_one_of:?} but found {found}")] @@ -271,35 +265,30 @@ pub enum ReadMessageError { NotEnoughInput, } -#[cfg(feature = "storage-v2")] impl From for ReadMessageError { fn from(e: parse::leb128::Error) -> Self { ReadMessageError::Parse(e.to_string()) } } -#[cfg(feature = "storage-v2")] impl From for ReadMessageError { fn from(e: bloom::ParseError) -> Self { ReadMessageError::Parse(e.to_string()) } } -#[cfg(feature = "storage-v2")] impl From for ReadMessageError { fn from(e: crate::storage::change::ParseError) -> Self { ReadMessageError::Parse(format!("error parsing changes: {}", e)) } } -#[cfg(feature = "storage-v2")] impl From for parse::ParseError { fn from(e: ReadMessageError) -> Self { parse::ParseError::Error(e) } } -#[cfg(feature = "storage-v2")] impl From> for ReadMessageError { fn from(p: parse::ParseError) -> Self { match p { @@ -322,7 +311,6 @@ pub struct Message { pub changes: Vec, } -#[cfg(feature = "storage-v2")] fn parse_have(input: parse::Input<'_>) -> parse::ParseResult<'_, Have, ReadMessageError> { let (i, last_sync) = parse::length_prefixed(parse::change_hash)(input)?; let (i, bloom_bytes) = parse::length_prefixed_bytes(i)?; @@ -331,7 +319,6 @@ fn parse_have(input: parse::Input<'_>) -> parse::ParseResult<'_, Have, ReadMessa } impl Message { - #[cfg(feature = "storage-v2")] pub fn decode(input: &[u8]) -> Result { let input = parse::Input::new(input); match Self::parse(input) { @@ -341,7 +328,6 @@ impl Message { } } - #[cfg(feature = "storage-v2")] pub(crate) fn parse(input: parse::Input<'_>) -> parse::ParseResult<'_, Self, ReadMessageError> { let (i, message_type) = parse::take1(input)?; if message_type != MESSAGE_TYPE_SYNC { @@ -386,7 +372,6 @@ impl Message { )) } - #[cfg(feature = "storage-v2")] pub fn encode(mut self) -> Vec { let mut buf = vec![MESSAGE_TYPE_SYNC]; @@ -405,77 +390,8 @@ impl Message { buf } - - #[cfg(not(feature = "storage-v2"))] - pub fn encode(self) -> Vec { - let mut buf = vec![MESSAGE_TYPE_SYNC]; - - encode_hashes(&mut buf, &self.heads); - encode_hashes(&mut buf, &self.need); - (self.have.len() as u32).encode_vec(&mut buf); - for have in self.have { - encode_hashes(&mut buf, &have.last_sync); - have.bloom.to_bytes().encode_vec(&mut buf); - } - - (self.changes.len() as u32).encode_vec(&mut buf); - for mut change in self.changes { - change.compress(); - change.bytes().encode_vec(&mut buf); - } - - buf - } - - #[cfg(not(feature = "storage-v2"))] - pub fn decode(bytes: &[u8]) -> Result { - let mut decoder = Decoder::new(Cow::Borrowed(bytes)); - - let message_type = decoder.read::()?; - if message_type != MESSAGE_TYPE_SYNC { - return Err(decoding::Error::WrongType { - expected_one_of: vec![MESSAGE_TYPE_SYNC], - found: message_type, - }); - } - - let heads = decode_hashes(&mut decoder)?; - let need = decode_hashes(&mut decoder)?; - let have_count = decoder.read::()?; - let mut have = Vec::with_capacity(have_count as usize); - for _ in 0..have_count { - let last_sync = decode_hashes(&mut decoder)?; - let bloom_bytes: Vec = decoder.read()?; - let bloom = BloomFilter::try_from(bloom_bytes.as_slice())?; - have.push(Have { last_sync, bloom }); - } - - let change_count = decoder.read::()?; - let mut changes = Vec::with_capacity(change_count as usize); - for _ in 0..change_count { - let change = decoder.read()?; - changes.push(Change::from_bytes(change)?); - } - - Ok(Message { - heads, - need, - have, - changes, - }) - } } -#[cfg(not(feature = "storage-v2"))] -fn encode_hashes(buf: &mut Vec, hashes: &[ChangeHash]) { - debug_assert!( - hashes.windows(2).all(|h| h[0] <= h[1]), - "hashes were not sorted" - ); - hashes.encode_vec(buf); -} - -#[cfg(feature = "storage-v2")] fn encode_many<'a, I, It, F>(out: &mut Vec, data: I, f: F) where I: Iterator + ExactSizeIterator + 'a, @@ -487,7 +403,6 @@ where } } -#[cfg(feature = "storage-v2")] fn encode_hashes(buf: &mut Vec, hashes: &[ChangeHash]) { debug_assert!( hashes.windows(2).all(|h| h[0] <= h[1]), @@ -496,33 +411,6 @@ fn encode_hashes(buf: &mut Vec, hashes: &[ChangeHash]) { encode_many(buf, hashes.iter(), |buf, hash| buf.extend(hash.as_bytes())) } -#[cfg(not(feature = "storage-v2"))] -impl Encodable for &[ChangeHash] { - fn encode(&self, buf: &mut W) -> io::Result { - let head = self.len().encode(buf)?; - let mut body = 0; - for hash in self.iter() { - buf.write_all(&hash.0)?; - body += hash.0.len(); - } - Ok(head + body) - } -} - -#[cfg(not(feature = "storage-v2"))] -fn decode_hashes(decoder: &mut Decoder<'_>) -> Result, decoding::Error> { - let length = decoder.read::()?; - let mut hashes = Vec::with_capacity(length as usize); - - for _ in 0..length { - let hash_bytes = decoder.read_bytes(HASH_SIZE)?; - let hash = ChangeHash::try_from(hash_bytes).map_err(decoding::Error::BadChangeFormat)?; - hashes.push(hash); - } - - Ok(hashes) -} - fn advance_heads( my_old_heads: &HashSet<&ChangeHash>, my_new_heads: &HashSet, diff --git a/automerge/src/sync/bloom.rs b/automerge/src/sync/bloom.rs index f24a855b..aff3dc13 100644 --- a/automerge/src/sync/bloom.rs +++ b/automerge/src/sync/bloom.rs @@ -1,12 +1,7 @@ use std::borrow::Borrow; -#[cfg(not(feature = "storage-v2"))] -use std::borrow::Cow; -#[cfg(feature = "storage-v2")] use crate::storage::parse; use crate::ChangeHash; -#[cfg(not(feature = "storage-v2"))] -use crate::{decoding, decoding::Decoder, encoding::Encodable}; // These constants correspond to a 1% false positive rate. The values can be changed without // breaking compatibility of the network protocol, since the parameters used for a particular @@ -22,7 +17,6 @@ pub struct BloomFilter { bits: Vec, } -#[cfg(feature = "storage-v2")] #[derive(Debug, thiserror::Error)] pub(crate) enum ParseError { #[error(transparent)] @@ -30,19 +24,6 @@ pub(crate) enum ParseError { } impl BloomFilter { - #[cfg(not(feature = "storage-v2"))] - pub fn to_bytes(&self) -> Vec { - let mut buf = Vec::new(); - if self.num_entries != 0 { - self.num_entries.encode_vec(&mut buf); - self.num_bits_per_entry.encode_vec(&mut buf); - self.num_probes.encode_vec(&mut buf); - buf.extend(&self.bits); - } - buf - } - - #[cfg(feature = "storage-v2")] pub fn to_bytes(&self) -> Vec { let mut buf = Vec::new(); if self.num_entries != 0 { @@ -54,7 +35,6 @@ impl BloomFilter { buf } - #[cfg(feature = "storage-v2")] pub(crate) fn parse(input: parse::Input<'_>) -> parse::ParseResult<'_, Self, ParseError> { if input.is_empty() { Ok((input, Self::default())) @@ -154,36 +134,10 @@ fn bits_capacity(num_entries: u32, num_bits_per_entry: u32) -> usize { f as usize } -#[cfg(not(feature = "storage-v2"))] -impl TryFrom<&[u8]> for BloomFilter { - type Error = decoding::Error; - - fn try_from(bytes: &[u8]) -> Result { - if bytes.is_empty() { - Ok(Self::default()) - } else { - let mut decoder = Decoder::new(Cow::Borrowed(bytes)); - let num_entries = decoder.read()?; - let num_bits_per_entry = decoder.read()?; - let num_probes = decoder.read()?; - let bits = - decoder.read_bytes(bits_capacity(num_entries, num_bits_per_entry) as usize)?; - Ok(Self { - num_entries, - num_bits_per_entry, - num_probes, - bits: bits.to_vec(), - }) - } - } -} - -#[cfg(feature = "storage-v2")] #[derive(thiserror::Error, Debug)] #[error("{0}")] pub struct DecodeError(String); -#[cfg(feature = "storage-v2")] impl TryFrom<&[u8]> for BloomFilter { type Error = DecodeError; diff --git a/automerge/src/sync/state.rs b/automerge/src/sync/state.rs index 5c174649..5a34aad1 100644 --- a/automerge/src/sync/state.rs +++ b/automerge/src/sync/state.rs @@ -1,19 +1,11 @@ use std::collections::BTreeSet; -#[cfg(not(feature = "storage-v2"))] -use super::decode_hashes; use super::{encode_hashes, BloomFilter}; -#[cfg(feature = "storage-v2")] use crate::storage::parse; use crate::ChangeHash; -#[cfg(not(feature = "storage-v2"))] -use crate::{decoding, decoding::Decoder}; -#[cfg(not(feature = "storage-v2"))] -use std::borrow::Cow; const SYNC_STATE_TYPE: u8 = 0x43; // first byte of an encoded sync state, for identification -#[cfg(feature = "storage-v2")] #[derive(Debug, thiserror::Error)] pub enum DecodeError { #[error("{0:?}")] @@ -24,7 +16,6 @@ pub enum DecodeError { NotEnoughInput, } -#[cfg(feature = "storage-v2")] impl From for DecodeError { fn from(_: parse::leb128::Error) -> Self { Self::Parse("bad leb128 encoding".to_string()) @@ -65,30 +56,6 @@ impl State { buf } - #[cfg(not(feature = "storage-v2"))] - pub fn decode(bytes: &[u8]) -> Result { - let mut decoder = Decoder::new(Cow::Borrowed(bytes)); - - let record_type = decoder.read::()?; - if record_type != SYNC_STATE_TYPE { - return Err(decoding::Error::WrongType { - expected_one_of: vec![SYNC_STATE_TYPE], - found: record_type, - }); - } - - let shared_heads = decode_hashes(&mut decoder)?; - Ok(Self { - shared_heads, - last_sent_heads: Vec::new(), - their_heads: None, - their_need: None, - their_have: Some(Vec::new()), - sent_hashes: BTreeSet::new(), - }) - } - - #[cfg(feature = "storage-v2")] pub fn decode(input: &[u8]) -> Result { let input = parse::Input::new(input); match Self::parse(input) { @@ -98,7 +65,6 @@ impl State { } } - #[cfg(feature = "storage-v2")] pub(crate) fn parse(input: parse::Input<'_>) -> parse::ParseResult<'_, Self, DecodeError> { let (i, record_type) = parse::take1(input)?; if record_type != SYNC_STATE_TYPE { diff --git a/automerge/src/transaction/inner.rs b/automerge/src/transaction/inner.rs index 40dbb8b9..2c75ec39 100644 --- a/automerge/src/transaction/inner.rs +++ b/automerge/src/transaction/inner.rs @@ -1,11 +1,8 @@ use std::num::NonZeroU64; use crate::automerge::Actor; -#[cfg(not(feature = "storage-v2"))] -use crate::change::export_change; use crate::exid::ExId; use crate::query::{self, OpIdSearch}; -#[cfg(feature = "storage-v2")] use crate::storage::Change as StoredChange; use crate::types::{Key, ObjId, OpId}; use crate::{op_tree::OpSetMetadata, types::Op, Automerge, Change, ChangeHash, OpObserver, Prop}; @@ -18,10 +15,6 @@ pub(crate) struct TransactionInner { pub(crate) start_op: NonZeroU64, pub(crate) time: i64, pub(crate) message: Option, - #[cfg(not(feature = "storage-v2"))] - pub(crate) extra_bytes: Vec, - #[cfg(not(feature = "storage-v2"))] - pub(crate) hash: Option, pub(crate) deps: Vec, pub(crate) operations: Vec<(ObjId, Prop, Op)>, } @@ -84,7 +77,6 @@ impl TransactionInner { hash } - #[cfg(feature = "storage-v2")] #[tracing::instrument(skip(self, metadata))] pub(crate) fn export(self, metadata: &OpSetMetadata) -> Change { use crate::storage::{change::PredOutOfOrder, convert::op_as_actor_id}; @@ -120,11 +112,6 @@ impl TransactionInner { Change::new(stored) } - #[cfg(not(feature = "storage-v2"))] - pub(crate) fn export(self, meta: &OpSetMetadata) -> Change { - export_change(self, &meta.actors, &meta.props) - } - /// Undo the operations added in this transaction, returning the number of cancelled /// operations. pub(crate) fn rollback(self, doc: &mut Automerge) -> usize { diff --git a/automerge/src/types.rs b/automerge/src/types.rs index d2c8b002..a1e4f2a7 100644 --- a/automerge/src/types.rs +++ b/automerge/src/types.rs @@ -197,7 +197,6 @@ impl OpType { /// The index into the action array as specified in [1] /// /// [1]: https://alexjg.github.io/automerge-storage-docs/#action-array - #[cfg(feature = "storage-v2")] pub(crate) fn action_index(&self) -> u64 { match self { Self::Make(ObjType::Map) => 0, @@ -210,7 +209,6 @@ impl OpType { } } - #[cfg(feature = "storage-v2")] pub(crate) fn from_index_and_value( index: u64, value: ScalarValue, @@ -417,7 +415,6 @@ impl Key { pub(crate) struct OpId(pub(crate) u64, pub(crate) usize); impl OpId { - #[cfg(feature = "storage-v2")] pub(crate) fn new(actor: usize, counter: u64) -> Self { Self(counter, actor) } @@ -431,12 +428,10 @@ impl ObjId { ObjId(OpId(0, 0)) } - #[cfg(feature = "storage-v2")] pub(crate) fn is_root(&self) -> bool { self.0.counter() == 0 } - #[cfg(feature = "storage-v2")] pub(crate) fn opid(&self) -> &OpId { &self.0 } @@ -446,12 +441,10 @@ impl ObjId { pub(crate) struct ElemId(pub(crate) OpId); impl ElemId { - #[cfg(feature = "storage-v2")] pub(crate) fn is_head(&self) -> bool { *self == HEAD } - #[cfg(feature = "storage-v2")] pub(crate) fn head() -> Self { Self(OpId(0, 0)) } @@ -599,12 +592,10 @@ pub(crate) const HASH_SIZE: usize = 32; // 256 bits = 32 bytes pub struct ChangeHash(pub [u8; HASH_SIZE]); impl ChangeHash { - #[cfg(feature = "storage-v2")] pub(crate) fn as_bytes(&self) -> &[u8] { &self.0 } - #[cfg(feature = "storage-v2")] pub(crate) fn checksum(&self) -> [u8; 4] { [self.0[0], self.0[1], self.0[2], self.0[3]] } diff --git a/automerge/src/types/opids.rs b/automerge/src/types/opids.rs index 026fe923..3ebac93c 100644 --- a/automerge/src/types/opids.rs +++ b/automerge/src/types/opids.rs @@ -19,7 +19,6 @@ impl<'a> IntoIterator for &'a OpIds { } impl OpIds { - #[cfg(feature = "storage-v2")] pub(crate) fn empty() -> Self { Self(Vec::new()) } @@ -36,7 +35,6 @@ impl OpIds { /// Create a new OpIds if `opids` are sorted with respect to `cmp` and contain no duplicates. /// /// Returns `Some(OpIds)` if `opids` is sorted and has no duplicates, otherwise returns `None` - #[cfg(feature = "storage-v2")] pub(crate) fn new_if_sorted std::cmp::Ordering>( opids: Vec, cmp: F, @@ -95,13 +93,11 @@ impl OpIds { self.0.contains(op) } - #[cfg(feature = "storage-v2")] pub(crate) fn get(&self, idx: usize) -> Option<&OpId> { self.0.get(idx) } } -#[cfg(feature = "storage-v2")] fn are_sorted_and_unique< 'a, I: Iterator, @@ -147,7 +143,6 @@ mod tests { .prop_map(move |opids| (actors.clone(), opids)) } - #[cfg(feature = "storage-v2")] fn duplicate_unsorted_scenario() -> impl Strategy, Vec)> { scenario(1..100).prop_map(|(actors, mut opids)| { let mut sorted_opids = opids.clone(); @@ -179,7 +174,6 @@ mod tests { } #[test] - #[cfg(feature = "storage-v2")] fn test_new_if_sorted((actors, opids) in duplicate_unsorted_scenario()) { let mut expected = opids.clone(); assert_eq!(OpIds::new_if_sorted(opids, |left, right| cmp(&actors, left, right)), None); diff --git a/automerge/src/value.rs b/automerge/src/value.rs index b8e355da..b3142bdf 100644 --- a/automerge/src/value.rs +++ b/automerge/src/value.rs @@ -358,7 +358,6 @@ pub struct Counter { } impl Counter { - #[cfg(feature = "storage-v2")] pub(crate) fn increment>(&mut self, increments: I) { for inc in increments { self.current += inc; diff --git a/automerge/tests/test.rs b/automerge/tests/test.rs index d19ffcfb..d95d94ea 100644 --- a/automerge/tests/test.rs +++ b/automerge/tests/test.rs @@ -1236,8 +1236,6 @@ fn test_compressed_changes() { let mut change = doc.get_last_local_change().unwrap().clone(); let uncompressed = change.raw_bytes().to_vec(); assert!(uncompressed.len() > 256); - #[cfg(not(feature = "storage-v2"))] - change.compress(); let compressed = change.bytes().to_vec(); assert!(compressed.len() < uncompressed.len()); @@ -1245,7 +1243,6 @@ fn test_compressed_changes() { assert_eq!(change.raw_bytes(), reloaded.raw_bytes()); } -#[cfg(feature = "storage-v2")] #[test] fn test_compressed_doc_cols() { // In this test, the keyCtr column is long enough for deflate compression to kick in, but the @@ -1270,7 +1267,6 @@ fn test_compressed_doc_cols() { ); } -#[cfg(feature = "storage-v2")] #[test] fn test_change_encoding_expanded_change_round_trip() { let change_bytes: Vec = vec![ diff --git a/edit-trace/Cargo.toml b/edit-trace/Cargo.toml index 2b442d6f..0107502b 100644 --- a/edit-trace/Cargo.toml +++ b/edit-trace/Cargo.toml @@ -4,10 +4,6 @@ version = "0.1.0" edition = "2021" license = "MIT" -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html -[features] -storage-v2 =[ "automerge/storage-v2" ] - [dependencies] automerge = { path = "../automerge" } criterion = "0.3.5" diff --git a/scripts/ci/build-test b/scripts/ci/build-test index 0126ae2a..dbd89f5d 100755 --- a/scripts/ci/build-test +++ b/scripts/ci/build-test @@ -1,6 +1,6 @@ #!/usr/bin/env bash set -eoux pipefail -cargo build --workspace --features optree-visualisation,wasm +cargo build --workspace --all-features -RUST_LOG=error cargo test --workspace --features optree-visualisation,wasm +RUST_LOG=error cargo test --workspace --all-features diff --git a/scripts/ci/build-test-storage-v2 b/scripts/ci/build-test-storage-v2 deleted file mode 100755 index a31dd3d9..00000000 --- a/scripts/ci/build-test-storage-v2 +++ /dev/null @@ -1,6 +0,0 @@ -#!/usr/bin/env bash -set -eoux pipefail - -cargo build --workspace --all-features --all-targets - -RUST_LOG=error cargo test --workspace --all-features diff --git a/scripts/ci/js_tests_storage_v2 b/scripts/ci/js_tests_storage_v2 deleted file mode 100755 index 77485f73..00000000 --- a/scripts/ci/js_tests_storage_v2 +++ /dev/null @@ -1,20 +0,0 @@ -set -e - -THIS_SCRIPT=$(dirname "$0"); -WASM_PROJECT=$THIS_SCRIPT/../../automerge-wasm; -JS_PROJECT=$THIS_SCRIPT/../../automerge-js; - -yarn --cwd $WASM_PROJECT install; -# This will take care of running wasm-pack -yarn --cwd $WASM_PROJECT build-storage-v2; -# If the dependencies are already installed we delete automerge-wasm. This makes -# this script usable for iterative development. -if [ -d $JS_PROJECT/node_modules/automerge-wasm ]; then - rm -rf $JS_PROJECT/node_modules/automerge-wasm -fi -# --check-files forces yarn to check if the local dep has changed -yarn --cwd $JS_PROJECT install --check-files; -yarn --cwd $JS_PROJECT test; - - - diff --git a/scripts/ci/lint b/scripts/ci/lint index 505d2c68..163b245d 100755 --- a/scripts/ci/lint +++ b/scripts/ci/lint @@ -4,5 +4,4 @@ set -eoux pipefail # Force clippy to consider all local sources # https://github.com/rust-lang/rust-clippy/issues/4612 find . -name "*.rs" -not -path "./target/*" -exec touch "{}" + -cargo clippy --all-targets -- -D warnings -cargo clippy -p automerge --features storage-v2 +cargo clippy --all-targets --all-features -- -D warnings diff --git a/scripts/ci/run b/scripts/ci/run index caa3ca78..423b995c 100755 --- a/scripts/ci/run +++ b/scripts/ci/run @@ -4,12 +4,9 @@ set -eou pipefail ./scripts/ci/fmt ./scripts/ci/lint ./scripts/ci/build-test -./scripts/ci/build-test-storage-v2 ./scripts/ci/rust-docs ./scripts/ci/advisory ./scripts/ci/wasm_tests -./scripts/ci/wasm_tests_storage_v2 ./scripts/ci/js_tests -./scripts/ci/js_tests_storage_v2 ./scripts/ci/cmake-build Release static ./scripts/ci/cmake-docs diff --git a/scripts/ci/wasm_tests_storage_v2 b/scripts/ci/wasm_tests_storage_v2 deleted file mode 100755 index 2ef62643..00000000 --- a/scripts/ci/wasm_tests_storage_v2 +++ /dev/null @@ -1,6 +0,0 @@ -THIS_SCRIPT=$(dirname "$0"); -WASM_PROJECT=$THIS_SCRIPT/../../automerge-wasm; - -yarn --cwd $WASM_PROJECT install; -yarn --cwd $WASM_PROJECT build-storage-v2; -yarn --cwd $WASM_PROJECT test-storage-v2; From 3ddde2fff2ebcc39ea8122c1fa630b9b7e711def Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Sat, 20 Aug 2022 21:09:24 -0700 Subject: [PATCH 548/730] Normalize the header include statement for all C source files. Normalize the header include statement within the documentation. Limit `AMpush()` usage within the quickstart example to variable assignment. --- automerge-c/CMakeLists.txt | 4 ++ automerge-c/build.rs | 2 +- automerge-c/examples/CMakeLists.txt | 2 +- automerge-c/examples/quickstart.c | 66 ++++++++++++---------- automerge-c/src/CMakeLists.txt | 20 ++++--- automerge-c/src/actor_id.rs | 1 + automerge-c/src/byte_span.rs | 1 + automerge-c/src/change.rs | 1 + automerge-c/src/change_hashes.rs | 1 + automerge-c/src/changes.rs | 1 + automerge-c/src/doc.rs | 1 + automerge-c/src/doc/list/item.rs | 1 + automerge-c/src/doc/list/items.rs | 1 + automerge-c/src/doc/map/item.rs | 1 + automerge-c/src/doc/map/items.rs | 1 + automerge-c/src/obj.rs | 1 + automerge-c/src/obj/item.rs | 1 + automerge-c/src/obj/items.rs | 1 + automerge-c/src/result.rs | 17 ++---- automerge-c/src/result_stack.rs | 1 + automerge-c/src/strs.rs | 1 + automerge-c/src/sync/have.rs | 1 + automerge-c/src/sync/haves.rs | 1 + automerge-c/src/sync/message.rs | 1 + automerge-c/src/sync/state.rs | 1 + automerge-c/test/CMakeLists.txt | 2 +- automerge-c/test/actor_id_tests.c | 2 +- automerge-c/test/doc_tests.c | 2 +- automerge-c/test/group_state.h | 2 +- automerge-c/test/list_tests.c | 2 +- automerge-c/test/macro_utils.h | 2 +- automerge-c/test/map_tests.c | 2 +- automerge-c/test/ported_wasm/basic_tests.c | 2 +- automerge-c/test/ported_wasm/sync_tests.c | 2 +- automerge-c/test/stack_utils.h | 2 +- 35 files changed, 88 insertions(+), 62 deletions(-) diff --git a/automerge-c/CMakeLists.txt b/automerge-c/CMakeLists.txt index 68a5176a..e5a7b1ca 100644 --- a/automerge-c/CMakeLists.txt +++ b/automerge-c/CMakeLists.txt @@ -67,6 +67,10 @@ string(TOUPPER ${SYMBOL_PREFIX} SYMBOL_PREFIX) set(CARGO_TARGET_DIR "${CMAKE_CURRENT_BINARY_DIR}/Cargo/target") +set(CBINDGEN_INCLUDEDIR "${CARGO_TARGET_DIR}/${CMAKE_INSTALL_INCLUDEDIR}") + +set(CBINDGEN_TARGET_DIR "${CBINDGEN_INCLUDEDIR}/${PROJECT_NAME}") + add_subdirectory(src) # Generate and install the configuration header. diff --git a/automerge-c/build.rs b/automerge-c/build.rs index e736d7d3..00fd0f87 100644 --- a/automerge-c/build.rs +++ b/automerge-c/build.rs @@ -14,7 +14,7 @@ fn main() { // \note CMake sets this environment variable before invoking Cargo so // that it can direct the generated header file into its // out-of-source build directory for post-processing. - if let Ok(target_dir) = env::var("CARGO_TARGET_DIR") { + if let Ok(target_dir) = env::var("CBINDGEN_TARGET_DIR") { writer.write_to_file(PathBuf::from(target_dir).join("automerge.h")); } } diff --git a/automerge-c/examples/CMakeLists.txt b/automerge-c/examples/CMakeLists.txt index 09ddeb70..3395124c 100644 --- a/automerge-c/examples/CMakeLists.txt +++ b/automerge-c/examples/CMakeLists.txt @@ -12,7 +12,7 @@ set_target_properties(example_quickstart PROPERTIES LINKER_LANGUAGE C) # must be specified for all of its dependent targets instead. target_include_directories( example_quickstart - PRIVATE "$" + PRIVATE "$" ) target_link_libraries(example_quickstart PRIVATE ${LIBRARY_NAME}) diff --git a/automerge-c/examples/quickstart.c b/automerge-c/examples/quickstart.c index c4505024..02e2cb19 100644 --- a/automerge-c/examples/quickstart.c +++ b/automerge-c/examples/quickstart.c @@ -2,51 +2,59 @@ #include #include -#include +#include static void abort_cb(AMresultStack**, uint8_t); -/* - * Based on https://automerge.github.io/docs/quickstart +/** + * \brief Based on https://automerge.github.io/docs/quickstart */ int main(int argc, char** argv) { - AMresultStack* results = NULL; - AMdoc* const doc1 = AMpush(&results, AMcreate(), AM_VALUE_DOC, abort_cb).doc; - AMobjId const* const - cards = AMpush(&results, AMmapPutObject(doc1, AM_ROOT, "cards", AM_OBJ_TYPE_LIST), AM_VALUE_OBJ_ID, abort_cb).obj_id; - AMobjId const* const - card1 = AMpush(&results, AMlistPutObject(doc1, cards, 0, true, AM_OBJ_TYPE_MAP), AM_VALUE_OBJ_ID, abort_cb).obj_id; - AMpush(&results, AMmapPutStr(doc1, card1, "title", "Rewrite everything in Clojure"), AM_VALUE_VOID, abort_cb); - AMpush(&results, AMmapPutBool(doc1, card1, "done", false), AM_VALUE_VOID, abort_cb); - AMobjId const* const - card2 = AMpush(&results, AMlistPutObject(doc1, cards, 0, true, AM_OBJ_TYPE_MAP), AM_VALUE_OBJ_ID, abort_cb).obj_id; - AMpush(&results, AMmapPutStr(doc1, card2, "title", "Rewrite everything in Haskell"), AM_VALUE_VOID, abort_cb); - AMpush(&results, AMmapPutBool(doc1, card2, "done", false), AM_VALUE_VOID, abort_cb); - AMpush(&results, AMcommit(doc1, "Add card", NULL), AM_VALUE_CHANGE_HASHES, abort_cb); + AMresultStack* stack = NULL; + AMdoc* const doc1 = AMpush(&stack, AMcreate(), AM_VALUE_DOC, abort_cb).doc; + AMobjId const* const cards = AMpush(&stack, + AMmapPutObject(doc1, AM_ROOT, "cards", AM_OBJ_TYPE_LIST), + AM_VALUE_OBJ_ID, + abort_cb).obj_id; + AMobjId const* const card1 = AMpush(&stack, + AMlistPutObject(doc1, cards, SIZE_MAX, true, AM_OBJ_TYPE_MAP), + AM_VALUE_OBJ_ID, + abort_cb).obj_id; + AMfree(AMmapPutStr(doc1, card1, "title", "Rewrite everything in Clojure")); + AMfree(AMmapPutBool(doc1, card1, "done", false)); + AMobjId const* const card2 = AMpush(&stack, + AMlistPutObject(doc1, cards, SIZE_MAX, true, AM_OBJ_TYPE_MAP), + AM_VALUE_OBJ_ID, + abort_cb).obj_id; + AMfree(AMmapPutStr(doc1, card2, "title", "Rewrite everything in Haskell")); + AMfree(AMmapPutBool(doc1, card2, "done", false)); + AMfree(AMcommit(doc1, "Add card", NULL)); - AMdoc* doc2 = AMpush(&results, AMcreate(), AM_VALUE_DOC, abort_cb).doc; - AMpush(&results, AMmerge(doc2, doc1), AM_VALUE_CHANGE_HASHES, abort_cb); + AMdoc* doc2 = AMpush(&stack, AMcreate(), AM_VALUE_DOC, abort_cb).doc; + AMfree(AMmerge(doc2, doc1)); - AMbyteSpan const binary = AMpush(&results, AMsave(doc1), AM_VALUE_BYTES, abort_cb).bytes; - doc2 = AMpush(&results, AMload(binary.src, binary.count), AM_VALUE_DOC, abort_cb).doc; + AMbyteSpan const binary = AMpush(&stack, AMsave(doc1), AM_VALUE_BYTES, abort_cb).bytes; + doc2 = AMpush(&stack, AMload(binary.src, binary.count), AM_VALUE_DOC, abort_cb).doc; - AMpush(&results, AMmapPutBool(doc1, card1, "done", true), AM_VALUE_VOID, abort_cb); - AMpush(&results, AMcommit(doc1, "Mark card as done", NULL), AM_VALUE_CHANGE_HASHES, abort_cb); + AMfree(AMmapPutBool(doc1, card1, "done", true)); + AMfree(AMcommit(doc1, "Mark card as done", NULL)); - AMpush(&results, AMlistDelete(doc2, cards, 0), AM_VALUE_VOID, abort_cb); - AMpush(&results, AMcommit(doc2, "Delete card", NULL), AM_VALUE_CHANGE_HASHES, abort_cb); + AMfree(AMlistDelete(doc2, cards, 0)); + AMfree(AMcommit(doc2, "Delete card", NULL)); - AMpush(&results, AMmerge(doc1, doc2), AM_VALUE_CHANGE_HASHES, abort_cb); + AMfree(AMmerge(doc1, doc2)); - AMchanges changes = AMpush(&results, AMgetChanges(doc1, NULL), AM_VALUE_CHANGES, abort_cb).changes; + AMchanges changes = AMpush(&stack, AMgetChanges(doc1, NULL), AM_VALUE_CHANGES, abort_cb).changes; AMchange const* change = NULL; while ((change = AMchangesNext(&changes, 1)) != NULL) { AMbyteSpan const change_hash = AMchangeHash(change); - AMchangeHashes const - heads = AMpush(&results, AMchangeHashesInit(&change_hash, 1), AM_VALUE_CHANGE_HASHES, abort_cb).change_hashes; + AMchangeHashes const heads = AMpush(&stack, + AMchangeHashesInit(&change_hash, 1), + AM_VALUE_CHANGE_HASHES, + abort_cb).change_hashes; printf("%s %ld\n", AMchangeMessage(change), AMobjSize(doc1, cards, &heads)); } - AMfreeStack(&results); + AMfreeStack(&stack); } static char const* discriminant_suffix(AMvalueVariant const); diff --git a/automerge-c/src/CMakeLists.txt b/automerge-c/src/CMakeLists.txt index b152616a..e02c0a96 100644 --- a/automerge-c/src/CMakeLists.txt +++ b/automerge-c/src/CMakeLists.txt @@ -29,7 +29,7 @@ set(CARGO_CURRENT_BINARY_DIR "${CARGO_TARGET_DIR}/${CARGO_BUILD_TYPE}") set( CARGO_OUTPUT - ${CARGO_TARGET_DIR}/${LIBRARY_NAME}.h + ${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h ${CARGO_CURRENT_BINARY_DIR}/${CMAKE_SHARED_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_SHARED_LIBRARY_SUFFIX} ${CARGO_CURRENT_BINARY_DIR}/${CMAKE_STATIC_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_STATIC_LIBRARY_SUFFIX} ) @@ -47,9 +47,9 @@ add_custom_command( # \note cbindgen won't regenerate its output header file after it's # been removed but it will after its configuration file has been # updated. - ${CMAKE_COMMAND} -DCONDITION=NOT_EXISTS -P ${CMAKE_SOURCE_DIR}/cmake/file_touch.cmake -- ${CARGO_TARGET_DIR}/${LIBRARY_NAME}.h ${CMAKE_SOURCE_DIR}/cbindgen.toml + ${CMAKE_COMMAND} -DCONDITION=NOT_EXISTS -P ${CMAKE_SOURCE_DIR}/cmake/file_touch.cmake -- ${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h ${CMAKE_SOURCE_DIR}/cbindgen.toml COMMAND - ${CMAKE_COMMAND} -E env CARGO_TARGET_DIR=${CARGO_TARGET_DIR} ${CARGO_CMD} build ${CARGO_FLAG} ${CARGO_FEATURES} + ${CMAKE_COMMAND} -E env CARGO_TARGET_DIR=${CARGO_TARGET_DIR} CBINDGEN_TARGET_DIR=${CBINDGEN_TARGET_DIR} ${CARGO_CMD} build ${CARGO_FLAG} ${CARGO_FEATURES} MAIN_DEPENDENCY lib.rs DEPENDS @@ -99,16 +99,16 @@ add_custom_command( POST_BUILD COMMAND # Compensate for cbindgen's variant struct naming. - ${CMAKE_COMMAND} -DMATCH_REGEX=AM\([^_]+_[^_]+\)_Body -DREPLACE_EXPR=AM\\1 -P ${CMAKE_SOURCE_DIR}/cmake/file_regex_replace.cmake -- ${CARGO_TARGET_DIR}/${LIBRARY_NAME}.h + ${CMAKE_COMMAND} -DMATCH_REGEX=AM\([^_]+_[^_]+\)_Body -DREPLACE_EXPR=AM\\1 -P ${CMAKE_SOURCE_DIR}/cmake/file_regex_replace.cmake -- ${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h COMMAND # Compensate for cbindgen's union tag enum type naming. - ${CMAKE_COMMAND} -DMATCH_REGEX=AM\([^_]+\)_Tag -DREPLACE_EXPR=AM\\1Variant -P ${CMAKE_SOURCE_DIR}/cmake/file_regex_replace.cmake -- ${CARGO_TARGET_DIR}/${LIBRARY_NAME}.h + ${CMAKE_COMMAND} -DMATCH_REGEX=AM\([^_]+\)_Tag -DREPLACE_EXPR=AM\\1Variant -P ${CMAKE_SOURCE_DIR}/cmake/file_regex_replace.cmake -- ${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h COMMAND # Compensate for cbindgen's translation of consecutive uppercase letters to "ScreamingSnakeCase". - ${CMAKE_COMMAND} -DMATCH_REGEX=A_M\([^_]+\)_ -DREPLACE_EXPR=AM_\\1_ -P ${CMAKE_SOURCE_DIR}/cmake/file_regex_replace.cmake -- ${CARGO_TARGET_DIR}/${LIBRARY_NAME}.h + ${CMAKE_COMMAND} -DMATCH_REGEX=A_M\([^_]+\)_ -DREPLACE_EXPR=AM_\\1_ -P ${CMAKE_SOURCE_DIR}/cmake/file_regex_replace.cmake -- ${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h COMMAND # Compensate for cbindgen ignoring `std:mem::size_of()` calls. - ${CMAKE_COMMAND} -DMATCH_REGEX=USIZE_ -DREPLACE_EXPR=\+${CMAKE_SIZEOF_VOID_P} -P ${CMAKE_SOURCE_DIR}/cmake/file_regex_replace.cmake -- ${CARGO_TARGET_DIR}/${LIBRARY_NAME}.h + ${CMAKE_COMMAND} -DMATCH_REGEX=USIZE_ -DREPLACE_EXPR=\+${CMAKE_SIZEOF_VOID_P} -P ${CMAKE_SOURCE_DIR}/cmake/file_regex_replace.cmake -- ${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} COMMENT @@ -166,7 +166,7 @@ set_target_properties( IMPORTED_NO_SONAME "${LIBRARY_NO_SONAME}" IMPORTED_SONAME "${LIBRARY_SONAME}" LINKER_LANGUAGE C - PUBLIC_HEADER "${CARGO_TARGET_DIR}/${LIBRARY_NAME}.h" + PUBLIC_HEADER "${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h" SOVERSION "${PROJECT_VERSION_MAJOR}" VERSION "${PROJECT_VERSION}" # \note Cargo exports all of the symbols automatically. @@ -222,6 +222,8 @@ install( find_package(Doxygen OPTIONAL_COMPONENTS dot) if(DOXYGEN_FOUND) + set(DOXYGEN_ALIASES "installed_headerfile=\\headerfile ${LIBRARY_NAME}.h <${PROJECT_NAME}/${LIBRARY_NAME}.h>") + set(DOXYGEN_GENERATE_LATEX YES) set(DOXYGEN_PDF_HYPERLINKS YES) @@ -234,7 +236,7 @@ if(DOXYGEN_FOUND) doxygen_add_docs( ${LIBRARY_NAME}_docs - "${CARGO_TARGET_DIR}/${LIBRARY_NAME}.h" + "${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h" "${CMAKE_SOURCE_DIR}/README.md" USE_STAMP_FILE WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} diff --git a/automerge-c/src/actor_id.rs b/automerge-c/src/actor_id.rs index 45d66fbe..f5e627cf 100644 --- a/automerge-c/src/actor_id.rs +++ b/automerge-c/src/actor_id.rs @@ -9,6 +9,7 @@ use crate::byte_span::AMbyteSpan; use crate::result::{to_result, AMresult}; /// \struct AMactorId +/// \installed_headerfile /// \brief An actor's unique identifier. #[derive(PartialEq)] pub struct AMactorId { diff --git a/automerge-c/src/byte_span.rs b/automerge-c/src/byte_span.rs index 939a52c5..f72f6f0f 100644 --- a/automerge-c/src/byte_span.rs +++ b/automerge-c/src/byte_span.rs @@ -1,6 +1,7 @@ use automerge as am; /// \struct AMbyteSpan +/// \installed_headerfile /// \brief A contiguous sequence of bytes. #[repr(C)] #[derive(PartialEq)] diff --git a/automerge-c/src/change.rs b/automerge-c/src/change.rs index 564cb12f..e9047d2e 100644 --- a/automerge-c/src/change.rs +++ b/automerge-c/src/change.rs @@ -18,6 +18,7 @@ macro_rules! to_change { } /// \struct AMchange +/// \installed_headerfile /// \brief A group of operations performed by an actor. #[derive(PartialEq)] pub struct AMchange { diff --git a/automerge-c/src/change_hashes.rs b/automerge-c/src/change_hashes.rs index 007e6c4c..5951a2dc 100644 --- a/automerge-c/src/change_hashes.rs +++ b/automerge-c/src/change_hashes.rs @@ -117,6 +117,7 @@ impl From for [u8; USIZE_USIZE_USIZE_] { } /// \struct AMchangeHashes +/// \installed_headerfile /// \brief A random-access iterator over a sequence of change hashes. #[repr(C)] #[derive(PartialEq)] diff --git a/automerge-c/src/changes.rs b/automerge-c/src/changes.rs index 4d9df36b..dc29104b 100644 --- a/automerge-c/src/changes.rs +++ b/automerge-c/src/changes.rs @@ -140,6 +140,7 @@ impl From for [u8; USIZE_USIZE_USIZE_USIZE_] { } /// \struct AMchanges +/// \installed_headerfile /// \brief A random-access iterator over a sequence of changes. #[repr(C)] #[derive(PartialEq)] diff --git a/automerge-c/src/doc.rs b/automerge-c/src/doc.rs index 6edd7772..bea3608e 100644 --- a/automerge-c/src/doc.rs +++ b/automerge-c/src/doc.rs @@ -47,6 +47,7 @@ macro_rules! to_sync_state_mut { } /// \struct AMdoc +/// \installed_headerfile /// \brief A JSON-like CRDT. #[derive(Clone)] pub struct AMdoc(am::AutoCommit); diff --git a/automerge-c/src/doc/list/item.rs b/automerge-c/src/doc/list/item.rs index 31b97e1d..0e9d9460 100644 --- a/automerge-c/src/doc/list/item.rs +++ b/automerge-c/src/doc/list/item.rs @@ -6,6 +6,7 @@ use crate::obj::AMobjId; use crate::result::AMvalue; /// \struct AMlistItem +/// \installed_headerfile /// \brief An item in a list object. #[repr(C)] pub struct AMlistItem { diff --git a/automerge-c/src/doc/list/items.rs b/automerge-c/src/doc/list/items.rs index 7c596f93..f1213904 100644 --- a/automerge-c/src/doc/list/items.rs +++ b/automerge-c/src/doc/list/items.rs @@ -114,6 +114,7 @@ impl From for [u8; USIZE_USIZE_USIZE_] { } /// \struct AMlistItems +/// \installed_headerfile /// \brief A random-access iterator over a sequence of list object items. #[repr(C)] #[derive(PartialEq)] diff --git a/automerge-c/src/doc/map/item.rs b/automerge-c/src/doc/map/item.rs index b75567f8..654f2b4e 100644 --- a/automerge-c/src/doc/map/item.rs +++ b/automerge-c/src/doc/map/item.rs @@ -7,6 +7,7 @@ use crate::obj::AMobjId; use crate::result::AMvalue; /// \struct AMmapItem +/// \installed_headerfile /// \brief An item in a map object. #[repr(C)] pub struct AMmapItem { diff --git a/automerge-c/src/doc/map/items.rs b/automerge-c/src/doc/map/items.rs index 911bd7c4..cc4f7a64 100644 --- a/automerge-c/src/doc/map/items.rs +++ b/automerge-c/src/doc/map/items.rs @@ -114,6 +114,7 @@ impl From for [u8; USIZE_USIZE_USIZE_] { } /// \struct AMmapItems +/// \installed_headerfile /// \brief A random-access iterator over a sequence of map object items. #[repr(C)] #[derive(PartialEq)] diff --git a/automerge-c/src/obj.rs b/automerge-c/src/obj.rs index 5913e596..e0dff6ee 100644 --- a/automerge-c/src/obj.rs +++ b/automerge-c/src/obj.rs @@ -8,6 +8,7 @@ pub mod item; pub mod items; /// \struct AMobjId +/// \installed_headerfile /// \brief An object's unique identifier. #[derive(PartialEq)] pub struct AMobjId { diff --git a/automerge-c/src/obj/item.rs b/automerge-c/src/obj/item.rs index 18a6d7de..17e9a8dd 100644 --- a/automerge-c/src/obj/item.rs +++ b/automerge-c/src/obj/item.rs @@ -6,6 +6,7 @@ use crate::obj::AMobjId; use crate::result::AMvalue; /// \struct AMobjItem +/// \installed_headerfile /// \brief An item in an object. #[repr(C)] pub struct AMobjItem { diff --git a/automerge-c/src/obj/items.rs b/automerge-c/src/obj/items.rs index dd8bb74b..252a93a0 100644 --- a/automerge-c/src/obj/items.rs +++ b/automerge-c/src/obj/items.rs @@ -114,6 +114,7 @@ impl From for [u8; USIZE_USIZE_USIZE_] { } /// \struct AMobjItems +/// \installed_headerfile /// \brief A random-access iterator over a sequence of object items. #[repr(C)] #[derive(PartialEq)] diff --git a/automerge-c/src/result.rs b/automerge-c/src/result.rs index 071db18f..29c6ebc9 100644 --- a/automerge-c/src/result.rs +++ b/automerge-c/src/result.rs @@ -24,6 +24,7 @@ use crate::strs::AMstrs; use crate::sync::{AMsyncMessage, AMsyncState}; /// \struct AMvalue +/// \installed_headerfile /// \brief A discriminated union of value type variants for a result. /// /// \enum AMvalueVariant @@ -83,15 +84,6 @@ use crate::sync::{AMsyncMessage, AMsyncState}; /// \var AMvalue::tag /// The variant discriminator. /// -/// \var AMvalue::sync_message -/// A synchronization message as a pointer to an `AMsyncMessage` struct. -/// -/// \var AMvalue::sync_state -/// A synchronization state as a pointer to an `AMsyncState` struct. -/// -/// \var AMvalue::tag -/// The variant discriminator. -/// /// \var AMvalue::timestamp /// A Lamport timestamp. /// @@ -215,8 +207,8 @@ impl From<&AMvalue<'_>> for u8 { fn from(value: &AMvalue) -> Self { use AMvalue::*; - // Note that these numbers are the order of appearance of the respective variants in the - // source of AMValue. + // \warning These numbers must correspond to the order in which the + // variants of an AMvalue are declared within it. match value { ActorId(_) => 1, Boolean(_) => 2, @@ -349,6 +341,7 @@ pub unsafe extern "C" fn AMvalueEqual(value1: *const AMvalue, value2: *const AMv } /// \struct AMresult +/// \installed_headerfile /// \brief A discriminated union of result variants. pub enum AMresult { ActorId(am::ActorId, Option), @@ -905,8 +898,8 @@ pub unsafe extern "C" fn AMresultValue<'a>(result: *mut AMresult) -> AMvalue<'a> } /// \struct AMunknownValue +/// \installed_headerfile /// \brief A value (typically for a `set` operation) whose type is unknown. -/// #[derive(PartialEq)] #[repr(C)] pub struct AMunknownValue { diff --git a/automerge-c/src/result_stack.rs b/automerge-c/src/result_stack.rs index 58f67950..2946f1a4 100644 --- a/automerge-c/src/result_stack.rs +++ b/automerge-c/src/result_stack.rs @@ -1,6 +1,7 @@ use crate::result::{AMfree, AMresult, AMresultStatus, AMresultValue, AMstatus, AMvalue}; /// \struct AMresultStack +/// \installed_headerfile /// \brief A node in a singly-linked list of result pointers. #[repr(C)] pub struct AMresultStack { diff --git a/automerge-c/src/strs.rs b/automerge-c/src/strs.rs index 8bb0e5a1..dcf7c3b7 100644 --- a/automerge-c/src/strs.rs +++ b/automerge-c/src/strs.rs @@ -114,6 +114,7 @@ impl From for [u8; USIZE_USIZE_USIZE_] { } /// \struct AMstrs +/// \installed_headerfile /// \brief A random-access iterator over a sequence of UTF-8 strings. #[repr(C)] #[derive(PartialEq)] diff --git a/automerge-c/src/sync/have.rs b/automerge-c/src/sync/have.rs index ea13ef16..d3a3e3e5 100644 --- a/automerge-c/src/sync/have.rs +++ b/automerge-c/src/sync/have.rs @@ -3,6 +3,7 @@ use automerge as am; use crate::change_hashes::AMchangeHashes; /// \struct AMsyncHave +/// \installed_headerfile /// \brief A summary of the changes that the sender of a synchronization /// message already has. #[derive(Clone, PartialEq)] diff --git a/automerge-c/src/sync/haves.rs b/automerge-c/src/sync/haves.rs index f435cb4a..3ccaefda 100644 --- a/automerge-c/src/sync/haves.rs +++ b/automerge-c/src/sync/haves.rs @@ -144,6 +144,7 @@ impl From for [u8; USIZE_USIZE_USIZE_USIZE_] { } /// \struct AMsyncHaves +/// \installed_headerfile /// \brief A random-access iterator over a sequence of synchronization haves. #[repr(C)] #[derive(PartialEq)] diff --git a/automerge-c/src/sync/message.rs b/automerge-c/src/sync/message.rs index d0f683f6..7e398f8c 100644 --- a/automerge-c/src/sync/message.rs +++ b/automerge-c/src/sync/message.rs @@ -22,6 +22,7 @@ macro_rules! to_sync_message { pub(crate) use to_sync_message; /// \struct AMsyncMessage +/// \installed_headerfile /// \brief A synchronization message for a peer. #[derive(PartialEq)] pub struct AMsyncMessage { diff --git a/automerge-c/src/sync/state.rs b/automerge-c/src/sync/state.rs index 19411753..1c2bab05 100644 --- a/automerge-c/src/sync/state.rs +++ b/automerge-c/src/sync/state.rs @@ -20,6 +20,7 @@ macro_rules! to_sync_state { pub(crate) use to_sync_state; /// \struct AMsyncState +/// \installed_headerfile /// \brief The state of synchronization with a peer. #[derive(PartialEq)] pub struct AMsyncState { diff --git a/automerge-c/test/CMakeLists.txt b/automerge-c/test/CMakeLists.txt index 770d5d2d..704a27da 100644 --- a/automerge-c/test/CMakeLists.txt +++ b/automerge-c/test/CMakeLists.txt @@ -25,7 +25,7 @@ set_target_properties(test_${LIBRARY_NAME} PROPERTIES LINKER_LANGUAGE C) # must be specified for all of its dependent targets instead. target_include_directories( test_${LIBRARY_NAME} - PRIVATE "$" + PRIVATE "$" ) target_link_libraries(test_${LIBRARY_NAME} PRIVATE cmocka ${LIBRARY_NAME}) diff --git a/automerge-c/test/actor_id_tests.c b/automerge-c/test/actor_id_tests.c index ea627985..71b0f800 100644 --- a/automerge-c/test/actor_id_tests.c +++ b/automerge-c/test/actor_id_tests.c @@ -10,7 +10,7 @@ #include /* local */ -#include "automerge.h" +#include #include "str_utils.h" typedef struct { diff --git a/automerge-c/test/doc_tests.c b/automerge-c/test/doc_tests.c index fe9179ec..159a9a92 100644 --- a/automerge-c/test/doc_tests.c +++ b/automerge-c/test/doc_tests.c @@ -8,7 +8,7 @@ #include /* local */ -#include "automerge.h" +#include #include "group_state.h" #include "stack_utils.h" #include "str_utils.h" diff --git a/automerge-c/test/group_state.h b/automerge-c/test/group_state.h index 27cbf4bd..a71d9dc9 100644 --- a/automerge-c/test/group_state.h +++ b/automerge-c/test/group_state.h @@ -2,7 +2,7 @@ #define GROUP_STATE_H /* local */ -#include "automerge.h" +#include typedef struct { AMresultStack* stack; diff --git a/automerge-c/test/list_tests.c b/automerge-c/test/list_tests.c index c34b9659..fa8ab021 100644 --- a/automerge-c/test/list_tests.c +++ b/automerge-c/test/list_tests.c @@ -10,7 +10,7 @@ #include /* local */ -#include "automerge.h" +#include #include "group_state.h" #include "macro_utils.h" #include "stack_utils.h" diff --git a/automerge-c/test/macro_utils.h b/automerge-c/test/macro_utils.h index 2f7bf780..62e262ce 100644 --- a/automerge-c/test/macro_utils.h +++ b/automerge-c/test/macro_utils.h @@ -2,7 +2,7 @@ #define MACRO_UTILS_H /* local */ -#include "automerge.h" +#include /** * \brief Gets the result value discriminant corresponding to a function name diff --git a/automerge-c/test/map_tests.c b/automerge-c/test/map_tests.c index 636080ec..10d2b076 100644 --- a/automerge-c/test/map_tests.c +++ b/automerge-c/test/map_tests.c @@ -9,7 +9,7 @@ #include /* local */ -#include "automerge.h" +#include #include "group_state.h" #include "macro_utils.h" #include "stack_utils.h" diff --git a/automerge-c/test/ported_wasm/basic_tests.c b/automerge-c/test/ported_wasm/basic_tests.c index 8f584d1e..a22ee899 100644 --- a/automerge-c/test/ported_wasm/basic_tests.c +++ b/automerge-c/test/ported_wasm/basic_tests.c @@ -10,7 +10,7 @@ #include /* local */ -#include "automerge.h" +#include #include "../stack_utils.h" /** diff --git a/automerge-c/test/ported_wasm/sync_tests.c b/automerge-c/test/ported_wasm/sync_tests.c index ea773515..9d24ebfa 100644 --- a/automerge-c/test/ported_wasm/sync_tests.c +++ b/automerge-c/test/ported_wasm/sync_tests.c @@ -8,7 +8,7 @@ #include /* local */ -#include "automerge.h" +#include #include "../stack_utils.h" typedef struct { diff --git a/automerge-c/test/stack_utils.h b/automerge-c/test/stack_utils.h index dd1ff3f3..473feebc 100644 --- a/automerge-c/test/stack_utils.h +++ b/automerge-c/test/stack_utils.h @@ -4,7 +4,7 @@ #include /* local */ -#include "automerge.h" +#include /** * \brief Reports an error through a cmocka assertion. From 1ed67a7658e1f017ab738d22529ea0fbfaf5dd5b Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 22 Aug 2022 23:31:55 -0700 Subject: [PATCH 549/730] Add missing documentation for the `AMvalue.unknown` variant, the `AMunknownValue.bytes` member and the `AMunknownValue.type_code` member. --- automerge-c/src/actor_id.rs | 6 +++--- automerge-c/src/doc.rs | 4 ++-- automerge-c/src/doc/list/item.rs | 2 +- automerge-c/src/doc/map/item.rs | 2 +- automerge-c/src/obj.rs | 6 +++--- automerge-c/src/obj/item.rs | 2 +- automerge-c/src/result.rs | 24 ++++++++++-------------- 7 files changed, 21 insertions(+), 25 deletions(-) diff --git a/automerge-c/src/actor_id.rs b/automerge-c/src/actor_id.rs index f5e627cf..c4ad0d79 100644 --- a/automerge-c/src/actor_id.rs +++ b/automerge-c/src/actor_id.rs @@ -18,10 +18,10 @@ pub struct AMactorId { } impl AMactorId { - pub fn new(body: &am::ActorId) -> Self { + pub fn new(actor_id: &am::ActorId) -> Self { Self { - body, - c_str: RefCell::>::default(), + body: actor_id, + c_str: Default::default(), } } diff --git a/automerge-c/src/doc.rs b/automerge-c/src/doc.rs index bea3608e..d0b77b4e 100644 --- a/automerge-c/src/doc.rs +++ b/automerge-c/src/doc.rs @@ -53,8 +53,8 @@ macro_rules! to_sync_state_mut { pub struct AMdoc(am::AutoCommit); impl AMdoc { - pub fn new(body: am::AutoCommit) -> Self { - Self(body) + pub fn new(auto_commit: am::AutoCommit) -> Self { + Self(auto_commit) } } diff --git a/automerge-c/src/doc/list/item.rs b/automerge-c/src/doc/list/item.rs index 0e9d9460..fcd6281d 100644 --- a/automerge-c/src/doc/list/item.rs +++ b/automerge-c/src/doc/list/item.rs @@ -23,7 +23,7 @@ impl AMlistItem { Self { index, obj_id: AMobjId::new(obj_id), - value: (value, RefCell::>::default()), + value: (value, Default::default()), } } } diff --git a/automerge-c/src/doc/map/item.rs b/automerge-c/src/doc/map/item.rs index 654f2b4e..0d10f3c3 100644 --- a/automerge-c/src/doc/map/item.rs +++ b/automerge-c/src/doc/map/item.rs @@ -24,7 +24,7 @@ impl AMmapItem { Self { key: CString::new(key).unwrap(), obj_id: AMobjId::new(obj_id), - value: (value, RefCell::>::default()), + value: (value, Default::default()), } } } diff --git a/automerge-c/src/obj.rs b/automerge-c/src/obj.rs index e0dff6ee..25ebbbc2 100644 --- a/automerge-c/src/obj.rs +++ b/automerge-c/src/obj.rs @@ -17,10 +17,10 @@ pub struct AMobjId { } impl AMobjId { - pub fn new(body: am::ObjId) -> Self { + pub fn new(obj_id: am::ObjId) -> Self { Self { - body, - c_actor_id: RefCell::>::default(), + body: obj_id, + c_actor_id: Default::default(), } } diff --git a/automerge-c/src/obj/item.rs b/automerge-c/src/obj/item.rs index 17e9a8dd..84bc0fd1 100644 --- a/automerge-c/src/obj/item.rs +++ b/automerge-c/src/obj/item.rs @@ -20,7 +20,7 @@ impl AMobjItem { pub fn new(value: am::Value<'static>, obj_id: am::ObjId) -> Self { Self { obj_id: AMobjId::new(obj_id), - value: (value, RefCell::>::default()), + value: (value, Default::default()), } } } diff --git a/automerge-c/src/result.rs b/automerge-c/src/result.rs index 29c6ebc9..c20034a1 100644 --- a/automerge-c/src/result.rs +++ b/automerge-c/src/result.rs @@ -89,6 +89,9 @@ use crate::sync::{AMsyncMessage, AMsyncState}; /// /// \var AMvalue::uint /// A 64-bit unsigned integer. +/// +/// \var AMvalue::unknown +/// A value of unknown type as an `AMunknownValue` struct. #[repr(u8)] pub enum AMvalue<'a> { /// A void variant. @@ -609,7 +612,7 @@ impl From> for AMresult { impl From, am::AutomergeError>> for AMresult { fn from(maybe: Result, am::AutomergeError>) -> Self { match maybe { - Ok(value) => AMresult::Value(value, RefCell::>::default()), + Ok(value) => AMresult::Value(value, Default::default()), Err(e) => AMresult::err(&e.to_string()), } } @@ -620,7 +623,7 @@ impl From, am::ObjId)>, am::AutomergeError>> f match maybe { Ok(Some((value, obj_id))) => match value { am::Value::Object(_) => AMresult::ObjId(AMobjId::new(obj_id)), - _ => AMresult::Value(value, RefCell::>::default()), + _ => AMresult::Value(value, Default::default()), }, Ok(None) => AMresult::Void, Err(e) => AMresult::err(&e.to_string()), @@ -640,10 +643,7 @@ impl From> for AMresult { impl From> for AMresult { fn from(maybe: Result) -> Self { match maybe { - Ok(size) => AMresult::Value( - am::Value::uint(size as u64), - RefCell::>::default(), - ), + Ok(size) => AMresult::Value(am::Value::uint(size as u64), Default::default()), Err(e) => AMresult::err(&e.to_string()), } } @@ -692,10 +692,7 @@ impl From, am::InvalidChangeHashSlice>> for AMresult impl From, am::AutomergeError>> for AMresult { fn from(maybe: Result, am::AutomergeError>) -> Self { match maybe { - Ok(bytes) => AMresult::Value( - am::Value::bytes(bytes), - RefCell::>::default(), - ), + Ok(bytes) => AMresult::Value(am::Value::bytes(bytes), Default::default()), Err(e) => AMresult::err(&e.to_string()), } } @@ -716,10 +713,7 @@ impl From> for AMresult { impl From> for AMresult { fn from(bytes: Vec) -> Self { - AMresult::Value( - am::Value::bytes(bytes), - RefCell::>::default(), - ) + AMresult::Value(am::Value::bytes(bytes), Default::default()) } } @@ -903,6 +897,8 @@ pub unsafe extern "C" fn AMresultValue<'a>(result: *mut AMresult) -> AMvalue<'a> #[derive(PartialEq)] #[repr(C)] pub struct AMunknownValue { + /// The value's raw bytes. bytes: AMbyteSpan, + /// The value's encoded type identifier. type_code: u8, } From 5e37ebfed06570b9020b8fcd06437ec46e5ea4ab Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Tue, 23 Aug 2022 05:34:45 -0700 Subject: [PATCH 550/730] Add `AMchangesInit()` for @rkuhn in #411. Expose `automerge::AutoCommit::with_actor()` through `AMcreate()`. Add notes to clarify the purpose of `AMfreeStack()`, `AMpop()`, `AMpush()`, `AMpushCallback()`, and `AMresultStack`. --- automerge-c/examples/quickstart.c | 4 +- automerge-c/src/change_hashes.rs | 2 +- automerge-c/src/changes.rs | 34 +++++ automerge-c/src/doc.rs | 14 +- automerge-c/src/result.rs | 9 ++ automerge-c/src/result_stack.rs | 19 ++- automerge-c/test/doc_tests.c | 12 +- automerge-c/test/group_state.c | 2 +- automerge-c/test/list_tests.c | 4 +- automerge-c/test/map_tests.c | 16 +-- automerge-c/test/ported_wasm/basic_tests.c | 148 ++++++++++++--------- automerge-c/test/ported_wasm/sync_tests.c | 101 +++----------- 12 files changed, 196 insertions(+), 169 deletions(-) diff --git a/automerge-c/examples/quickstart.c b/automerge-c/examples/quickstart.c index 02e2cb19..0c94a1a2 100644 --- a/automerge-c/examples/quickstart.c +++ b/automerge-c/examples/quickstart.c @@ -11,7 +11,7 @@ static void abort_cb(AMresultStack**, uint8_t); */ int main(int argc, char** argv) { AMresultStack* stack = NULL; - AMdoc* const doc1 = AMpush(&stack, AMcreate(), AM_VALUE_DOC, abort_cb).doc; + AMdoc* const doc1 = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, abort_cb).doc; AMobjId const* const cards = AMpush(&stack, AMmapPutObject(doc1, AM_ROOT, "cards", AM_OBJ_TYPE_LIST), AM_VALUE_OBJ_ID, @@ -30,7 +30,7 @@ int main(int argc, char** argv) { AMfree(AMmapPutBool(doc1, card2, "done", false)); AMfree(AMcommit(doc1, "Add card", NULL)); - AMdoc* doc2 = AMpush(&stack, AMcreate(), AM_VALUE_DOC, abort_cb).doc; + AMdoc* doc2 = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, abort_cb).doc; AMfree(AMmerge(doc2, doc1)); AMbyteSpan const binary = AMpush(&stack, AMsave(doc1), AM_VALUE_BYTES, abort_cb).bytes; diff --git a/automerge-c/src/change_hashes.rs b/automerge-c/src/change_hashes.rs index 5951a2dc..d865231f 100644 --- a/automerge-c/src/change_hashes.rs +++ b/automerge-c/src/change_hashes.rs @@ -262,7 +262,7 @@ pub unsafe extern "C" fn AMchangeHashesInit(src: *const AMbyteSpan, count: usize for n in 0..count { let byte_span = &*src.add(n); let slice = std::slice::from_raw_parts(byte_span.src, byte_span.count); - match am::ChangeHash::try_from(slice) { + match slice.try_into() { Ok(change_hash) => { change_hashes.push(change_hash); } diff --git a/automerge-c/src/changes.rs b/automerge-c/src/changes.rs index dc29104b..5d7f4813 100644 --- a/automerge-c/src/changes.rs +++ b/automerge-c/src/changes.rs @@ -3,7 +3,9 @@ use std::collections::BTreeMap; use std::ffi::c_void; use std::mem::size_of; +use crate::byte_span::AMbyteSpan; use crate::change::AMchange; +use crate::result::{to_result, AMresult}; #[repr(C)] struct Detail { @@ -254,6 +256,38 @@ pub unsafe extern "C" fn AMchangesEqual( } } +/// \memberof AMchanges +/// \brief Allocates an iterator over a sequence of changes and initializes it +/// from a sequence of byte spans. +/// +/// \param[in] src A pointer to an array of `AMbyteSpan` structs. +/// \param[in] count The number of `AMbyteSpan` structs to copy from \p src. +/// \return A pointer to an `AMresult` struct containing an `AMchanges` struct. +/// \pre \p src `!= NULL`. +/// \pre `0 <` \p count `<= sizeof(`\p src`) / sizeof(AMbyteSpan)`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. +/// \internal +/// # Safety +/// src must be an AMbyteSpan array of size `>= count` +#[no_mangle] +pub unsafe extern "C" fn AMchangesInit(src: *const AMbyteSpan, count: usize) -> *mut AMresult { + let mut changes = Vec::::new(); + for n in 0..count { + let byte_span = &*src.add(n); + let slice = std::slice::from_raw_parts(byte_span.src, byte_span.count); + match slice.try_into() { + Ok(change) => { + changes.push(change); + } + Err(e) => { + return to_result(Err::, am::LoadChangeError>(e)); + } + } + } + to_result(Ok::, am::LoadChangeError>(changes)) +} + /// \memberof AMchanges /// \brief Gets the change at the current position of an iterator over a /// sequence of changes and then advances it by at most \p |n| positions diff --git a/automerge-c/src/doc.rs b/automerge-c/src/doc.rs index d0b77b4e..1a0291e8 100644 --- a/automerge-c/src/doc.rs +++ b/automerge-c/src/doc.rs @@ -124,13 +124,21 @@ pub unsafe extern "C" fn AMclone(doc: *const AMdoc) -> *mut AMresult { /// \memberof AMdoc /// \brief Allocates a new document and initializes it with defaults. /// +/// \param[in] actor_id A pointer to an `AMactorId` struct or `NULL` for a +/// random one. /// \return A pointer to an `AMresult` struct containing a pointer to an /// `AMdoc` struct. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. +/// +/// # Safety +/// actor_id must be a valid pointer to an AMactorId or std::ptr::null() #[no_mangle] -pub extern "C" fn AMcreate() -> *mut AMresult { - to_result(am::AutoCommit::new()) +pub unsafe extern "C" fn AMcreate(actor_id: *const AMactorId) -> *mut AMresult { + to_result(match actor_id.as_ref() { + Some(actor_id) => am::AutoCommit::new().with_actor(actor_id.as_ref().clone()), + None => am::AutoCommit::new(), + }) } /// \memberof AMdoc @@ -282,7 +290,7 @@ pub unsafe extern "C" fn AMgetChangeByHash( ) -> *mut AMresult { let doc = to_doc_mut!(doc); let slice = std::slice::from_raw_parts(src, count); - match am::ChangeHash::try_from(slice) { + match slice.try_into() { Ok(change_hash) => to_result(doc.get_change_by_hash(&change_hash)), Err(e) => AMresult::err(&e.to_string()).into(), } diff --git a/automerge-c/src/result.rs b/automerge-c/src/result.rs index c20034a1..e67c698e 100644 --- a/automerge-c/src/result.rs +++ b/automerge-c/src/result.rs @@ -658,6 +658,15 @@ impl From, am::AutomergeError>> for AMresult { } } +impl From, am::LoadChangeError>> for AMresult { + fn from(maybe: Result, am::LoadChangeError>) -> Self { + match maybe { + Ok(changes) => AMresult::Changes(changes, None), + Err(e) => AMresult::err(&e.to_string()), + } + } +} + impl From, am::AutomergeError>> for AMresult { fn from(maybe: Result, am::AutomergeError>) -> Self { match maybe { diff --git a/automerge-c/src/result_stack.rs b/automerge-c/src/result_stack.rs index 2946f1a4..e689ea0e 100644 --- a/automerge-c/src/result_stack.rs +++ b/automerge-c/src/result_stack.rs @@ -3,6 +3,10 @@ use crate::result::{AMfree, AMresult, AMresultStatus, AMresultValue, AMstatus, A /// \struct AMresultStack /// \installed_headerfile /// \brief A node in a singly-linked list of result pointers. +/// +/// \note Using this data structure is purely optional because its only purpose +/// is to make memory management tolerable for direct usage of this API +/// in C, C++ and Objective-C. #[repr(C)] pub struct AMresultStack { /// A result to be deallocated. @@ -24,6 +28,9 @@ impl AMresultStack { /// \return The number of `AMresult` structs freed. /// \pre \p stack `!= NULL`. /// \post `*stack == NULL`. +/// \note Calling this function is purely optional because its only purpose is +/// to make memory management tolerable for direct usage of this API in +/// C, C++ and Objective-C. /// \internal /// /// # Safety @@ -48,6 +55,9 @@ pub unsafe extern "C" fn AMfreeStack(stack: *mut *mut AMresultStack) -> usize { /// \return A pointer to an `AMresult` struct or `NULL`. /// \pre \p stack `!= NULL`. /// \post `*stack == NULL`. +/// \note Calling this function is purely optional because its only purpose is +/// to make memory management tolerable for direct usage of this API in +/// C, C++ and Objective-C. /// \internal /// /// # Safety @@ -68,6 +78,10 @@ pub unsafe extern "C" fn AMpop(stack: *mut *mut AMresultStack) -> *mut AMresult /// \brief The prototype of a function to be called when a value matching the /// given discriminant cannot be extracted from the result at the top of /// the given stack. +/// +/// \note Implementing this function is purely optional because its only purpose +/// is to make memory management tolerable for direct usage of this API +/// in C, C++ and Objective-C. pub type AMpushCallback = Option ()>; @@ -86,7 +100,10 @@ pub type AMpushCallback = /// \pre \p result `!= NULL`. /// \warning If \p stack `== NULL` then \p result is deallocated in order to /// prevent a memory leak. -/// \internal +/// \note Calling this function is purely optional because its only purpose is +/// to make memory management tolerable for direct usage of this API in +/// C, C++ and Objective-C. +// \internal /// /// # Safety /// stack must be a valid AMresultStack pointer pointer diff --git a/automerge-c/test/doc_tests.c b/automerge-c/test/doc_tests.c index 159a9a92..d8059641 100644 --- a/automerge-c/test/doc_tests.c +++ b/automerge-c/test/doc_tests.c @@ -41,7 +41,7 @@ static int teardown(void** state) { static void test_AMkeys_empty() { AMresultStack* stack = NULL; - AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; AMstrs forward = AMpush(&stack, AMkeys(doc, AM_ROOT, NULL), AM_VALUE_STRS, @@ -58,7 +58,7 @@ static void test_AMkeys_empty() { static void test_AMkeys_list() { AMresultStack* stack = NULL; - AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; AMfree(AMlistPutInt(doc, AM_ROOT, 0, true, 1)); AMfree(AMlistPutInt(doc, AM_ROOT, 1, true, 2)); AMfree(AMlistPutInt(doc, AM_ROOT, 2, true, 3)); @@ -106,7 +106,7 @@ static void test_AMkeys_list() { static void test_AMkeys_map() { AMresultStack* stack = NULL; - AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; AMfree(AMmapPutInt(doc, AM_ROOT, "one", 1)); AMfree(AMmapPutInt(doc, AM_ROOT, "two", 2)); AMfree(AMmapPutInt(doc, AM_ROOT, "three", 3)); @@ -158,7 +158,7 @@ static void test_AMputActor_bytes(void **state) { assert_memory_equal(bytes.src, test_state->actor_id_bytes, bytes.count); } -static void test_AMputActor_hex(void **state) { +static void test_AMputActor_str(void **state) { TestState* test_state = *state; AMactorId const* actor_id = AMpush(&test_state->group_state->stack, AMactorIdInitStr(test_state->actor_id_str), @@ -176,7 +176,7 @@ static void test_AMputActor_hex(void **state) { static void test_AMspliceText() { AMresultStack* stack = NULL; - AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; AMfree(AMspliceText(doc, AM_ROOT, 0, 0, "one + ")); AMfree(AMspliceText(doc, AM_ROOT, 4, 2, "two = ")); AMfree(AMspliceText(doc, AM_ROOT, 8, 2, "three")); @@ -194,7 +194,7 @@ int run_doc_tests(void) { cmocka_unit_test(test_AMkeys_list), cmocka_unit_test(test_AMkeys_map), cmocka_unit_test_setup_teardown(test_AMputActor_bytes, setup, teardown), - cmocka_unit_test_setup_teardown(test_AMputActor_hex, setup, teardown), + cmocka_unit_test_setup_teardown(test_AMputActor_str, setup, teardown), cmocka_unit_test(test_AMspliceText), }; diff --git a/automerge-c/test/group_state.c b/automerge-c/test/group_state.c index 11074b84..0ee14317 100644 --- a/automerge-c/test/group_state.c +++ b/automerge-c/test/group_state.c @@ -12,7 +12,7 @@ int group_setup(void** state) { GroupState* group_state = test_calloc(1, sizeof(GroupState)); group_state->doc = AMpush(&group_state->stack, - AMcreate(), + AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; *state = group_state; diff --git a/automerge-c/test/list_tests.c b/automerge-c/test/list_tests.c index fa8ab021..db1dc086 100644 --- a/automerge-c/test/list_tests.c +++ b/automerge-c/test/list_tests.c @@ -179,7 +179,7 @@ static_void_test_AMlistPut(Uint, update, uint, UINT64_MAX) static void test_insert_at_index(void** state) { AMresultStack* stack = *state; - AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; AMobjId const* const list = AMpush( &stack, @@ -205,7 +205,7 @@ static void test_insert_at_index(void** state) { static void test_get_list_values(void** state) { AMresultStack* stack = *state; - AMdoc* const doc1 = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMdoc* const doc1 = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; AMobjId const* const list = AMpush( &stack, AMmapPutObject(doc1, AM_ROOT, "list", AM_OBJ_TYPE_LIST), diff --git a/automerge-c/test/map_tests.c b/automerge-c/test/map_tests.c index 10d2b076..85f4ea93 100644 --- a/automerge-c/test/map_tests.c +++ b/automerge-c/test/map_tests.c @@ -132,7 +132,7 @@ static_void_test_AMmapPut(Uint, uint, UINT64_MAX) static void test_range_iter_map(void** state) { AMresultStack* stack = *state; - AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; AMfree(AMmapPutUint(doc, AM_ROOT, "a", 3)); AMfree(AMmapPutUint(doc, AM_ROOT, "b", 4)); AMfree(AMmapPutUint(doc, AM_ROOT, "c", 5)); @@ -320,7 +320,7 @@ static void test_range_iter_map(void** state) { static void test_map_range_back_and_forth_single(void** state) { AMresultStack* stack = *state; - AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; AMactorId const* const actor_id = AMpush(&stack, AMgetActorId(doc), AM_VALUE_ACTOR_ID, @@ -487,7 +487,7 @@ static void test_map_range_back_and_forth_single(void** state) { static void test_map_range_back_and_forth_double(void** state) { AMresultStack* stack = *state; - AMdoc* const doc1 = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMdoc* const doc1 = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; AMactorId const* const actor_id1= AMpush(&stack, AMactorIdInitBytes("\0", 1), AM_VALUE_ACTOR_ID, @@ -499,7 +499,7 @@ static void test_map_range_back_and_forth_double(void** state) { AMfree(AMmapPutStr(doc1, AM_ROOT, "3", "c")); /* The second actor should win all conflicts here. */ - AMdoc* const doc2 = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMdoc* const doc2 = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; AMactorId const* const actor_id2 = AMpush(&stack, AMactorIdInitBytes("\1", 1), AM_VALUE_ACTOR_ID, @@ -668,7 +668,7 @@ static void test_map_range_back_and_forth_double(void** state) { static void test_map_range_at_back_and_forth_single(void** state) { AMresultStack* stack = *state; - AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; AMactorId const* const actor_id = AMpush(&stack, AMgetActorId(doc), AM_VALUE_ACTOR_ID, @@ -840,7 +840,7 @@ static void test_map_range_at_back_and_forth_single(void** state) { static void test_map_range_at_back_and_forth_double(void** state) { AMresultStack* stack = *state; - AMdoc* const doc1 = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMdoc* const doc1 = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; AMactorId const* const actor_id1= AMpush(&stack, AMactorIdInitBytes("\0", 1), AM_VALUE_ACTOR_ID, @@ -852,7 +852,7 @@ static void test_map_range_at_back_and_forth_double(void** state) { AMfree(AMmapPutStr(doc1, AM_ROOT, "3", "c")); /* The second actor should win all conflicts here. */ - AMdoc* const doc2 = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMdoc* const doc2 = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; AMactorId const* const actor_id2= AMpush(&stack, AMactorIdInitBytes("\1", 1), AM_VALUE_ACTOR_ID, @@ -1025,7 +1025,7 @@ static void test_map_range_at_back_and_forth_double(void** state) { static void test_get_range_values(void** state) { AMresultStack* stack = *state; - AMdoc* const doc1 = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMdoc* const doc1 = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; AMfree(AMmapPutStr(doc1, AM_ROOT, "aa", "aaa")); AMfree(AMmapPutStr(doc1, AM_ROOT, "bb", "bbb")); AMfree(AMmapPutStr(doc1, AM_ROOT, "cc", "ccc")); diff --git a/automerge-c/test/ported_wasm/basic_tests.c b/automerge-c/test/ported_wasm/basic_tests.c index a22ee899..147b140d 100644 --- a/automerge-c/test/ported_wasm/basic_tests.c +++ b/automerge-c/test/ported_wasm/basic_tests.c @@ -24,7 +24,7 @@ static void test_default_import_init_should_return_a_promise(void** state); static void test_create_clone_and_free(void** state) { AMresultStack* stack = *state; /* const doc1 = create() */ - AMdoc* const doc1 = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMdoc* const doc1 = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; /* const doc2 = doc1.clone() */ AMdoc* const doc2 = AMpush(&stack, AMclone(doc1), AM_VALUE_DOC, cmocka_cb).doc; } @@ -35,7 +35,7 @@ static void test_create_clone_and_free(void** state) { static void test_start_and_commit(void** state) { AMresultStack* stack = *state; /* const doc = create() */ - AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; /* doc.commit() */ AMpush(&stack, AMcommit(doc, NULL, NULL), AM_VALUE_CHANGE_HASHES, cmocka_cb); } @@ -46,7 +46,7 @@ static void test_start_and_commit(void** state) { static void test_getting_a_nonexistent_prop_does_not_throw_an_error(void** state) { AMresultStack* stack = *state; /* const doc = create() */ - AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; /* const root = "_root" */ /* const result = doc.getWithType(root, "hello") */ /* assert.deepEqual(result, undefined) */ @@ -62,11 +62,13 @@ static void test_getting_a_nonexistent_prop_does_not_throw_an_error(void** state static void test_should_be_able_to_set_and_get_a_simple_value(void** state) { AMresultStack* stack = *state; /* const doc: Automerge = create("aabbcc") */ - AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; - AMfree(AMsetActorId(doc, AMpush(&stack, - AMactorIdInitStr("aabbcc"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); + AMdoc* const doc = AMpush(&stack, + AMcreate(AMpush(&stack, + AMactorIdInitStr("aabbcc"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id), + AM_VALUE_DOC, + cmocka_cb).doc; /* const root = "_root" */ /* let result */ /* */ @@ -192,7 +194,7 @@ static void test_should_be_able_to_set_and_get_a_simple_value(void** state) { static void test_should_be_able_to_use_bytes(void** state) { AMresultStack* stack = *state; /* const doc = create() */ - AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; /* doc.put("_root", "data1", new Uint8Array([10, 11, 12])); */ static uint8_t const DATA1[] = {10, 11, 12}; AMfree(AMmapPutBytes(doc, AM_ROOT, "data1", DATA1, sizeof(DATA1))); @@ -223,7 +225,7 @@ static void test_should_be_able_to_use_bytes(void** state) { static void test_should_be_able_to_make_subobjects(void** state) { AMresultStack* stack = *state; /* const doc = create() */ - AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; /* const root = "_root" */ /* let result */ /* */ @@ -261,7 +263,7 @@ static void test_should_be_able_to_make_subobjects(void** state) { static void test_should_be_able_to_make_lists(void** state) { AMresultStack* stack = *state; /* const doc = create() */ - AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; /* const root = "_root" */ /* */ /* const sublist = doc.putObject(root, "numbers", []) */ @@ -320,7 +322,7 @@ static void test_should_be_able_to_make_lists(void** state) { static void test_lists_have_insert_set_splice_and_push_ops(void** state) { AMresultStack* stack = *state; /* const doc = create() */ - AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; /* const root = "_root" */ /* */ /* const sublist = doc.putObject(root, "letters", []) */ @@ -516,7 +518,7 @@ static void test_lists_have_insert_set_splice_and_push_ops(void** state) { static void test_should_be_able_to_delete_non_existent_props(void** state) { AMresultStack* stack = *state; /* const doc = create() */ - AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; /* */ /* doc.put("_root", "foo", "bar") */ AMfree(AMmapPutStr(doc, AM_ROOT, "foo", "bar")); @@ -573,7 +575,7 @@ static void test_should_be_able_to_delete_non_existent_props(void** state) { static void test_should_be_able_to_del(void **state) { AMresultStack* stack = *state; /* const doc = create() */ - AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; /* const root = "_root" */ /* */ /* doc.put(root, "xxx", "xxx"); */ @@ -598,7 +600,7 @@ static void test_should_be_able_to_del(void **state) { static void test_should_be_able_to_use_counters(void** state) { AMresultStack* stack = *state; /* const doc = create() */ - AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; /* const root = "_root" */ /* */ /* doc.put(root, "counter", 10, "counter"); */ @@ -630,7 +632,7 @@ static void test_should_be_able_to_use_counters(void** state) { static void test_should_be_able_to_splice_text(void** state) { AMresultStack* stack = *state; /* const doc = create() */ - AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; /* const root = "_root"; */ /* */ /* const text = doc.putObject(root, "text", ""); */ @@ -690,7 +692,7 @@ static void test_should_be_able_to_splice_text(void** state) { static void test_should_be_able_to_insert_objects_into_text(void** state) { AMresultStack* stack = *state; /* const doc = create() */ - AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; /* const text = doc.putObject("/", "text", "Hello world"); */ AMobjId const* const text = AMpush( &stack, @@ -728,7 +730,7 @@ static void test_should_be_able_to_insert_objects_into_text(void** state) { static void test_should_be_able_to_save_all_or_incrementally(void** state) { AMresultStack* stack = *state; /* const doc = create() */ - AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; /* */ /* doc.put("_root", "foo", 1) */ AMfree(AMmapPutInt(doc, AM_ROOT, "foo", 1)); @@ -837,7 +839,7 @@ static void test_should_be_able_to_save_all_or_incrementally(void** state) { static void test_should_be_able_to_splice_text_2(void** state) { AMresultStack* stack = *state; /* const doc = create() */ - AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; + AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; /* const text = doc.putObject("_root", "text", ""); */ AMobjId const* const text = AMpush( &stack, @@ -887,11 +889,13 @@ static void test_should_be_able_to_splice_text_2(void** state) { static void test_local_inc_increments_all_visible_counters_in_a_map(void** state) { AMresultStack* stack = *state; /* const doc1 = create("aaaa") */ - AMdoc* const doc1 = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; - AMfree(AMsetActorId(doc1, AMpush(&stack, - AMactorIdInitStr("aaaa"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); + AMdoc* const doc1 = AMpush(&stack, + AMcreate(AMpush(&stack, + AMactorIdInitStr("aaaa"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id), + AM_VALUE_DOC, + cmocka_cb).doc; /* doc1.put("_root", "hello", "world") */ AMfree(AMmapPutStr(doc1, AM_ROOT, "hello", "world")); /* const doc2 = load(doc1.save(), "bbbb"); */ @@ -1011,11 +1015,13 @@ static void test_local_inc_increments_all_visible_counters_in_a_map(void** state static void test_local_inc_increments_all_visible_counters_in_a_sequence(void** state) { AMresultStack* stack = *state; /* const doc1 = create("aaaa") */ - AMdoc* const doc1 = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; - AMfree(AMsetActorId(doc1, AMpush(&stack, - AMactorIdInitStr("aaaa"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); + AMdoc* const doc1 = AMpush(&stack, + AMcreate(AMpush(&stack, + AMactorIdInitStr("aaaa"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id), + AM_VALUE_DOC, + cmocka_cb).doc; /* const seq = doc1.putObject("_root", "seq", []) */ AMobjId const* const seq = AMpush( &stack, @@ -1146,17 +1152,21 @@ static void test_paths_can_be_used_instead_of_objids(void** state); static void test_should_be_able_to_fetch_changes_by_hash(void** state) { AMresultStack* stack = *state; /* const doc1 = create("aaaa") */ - AMdoc* const doc1 = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; - AMfree(AMsetActorId(doc1, AMpush(&stack, - AMactorIdInitStr("aaaa"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); + AMdoc* const doc1 = AMpush(&stack, + AMcreate(AMpush(&stack, + AMactorIdInitStr("aaaa"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id), + AM_VALUE_DOC, + cmocka_cb).doc; /* const doc2 = create("bbbb") */ - AMdoc* const doc2 = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; - AMfree(AMsetActorId(doc2, AMpush(&stack, - AMactorIdInitStr("bbbb"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); + AMdoc* const doc2 = AMpush(&stack, + AMcreate(AMpush(&stack, + AMactorIdInitStr("bbbb"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id), + AM_VALUE_DOC, + cmocka_cb).doc; /* doc1.put("/", "a", "b") */ AMfree(AMmapPutStr(doc1, AM_ROOT, "a", "b")); /* doc2.put("/", "b", "c") */ @@ -1198,11 +1208,13 @@ static void test_should_be_able_to_fetch_changes_by_hash(void** state) { static void test_recursive_sets_are_possible(void** state) { AMresultStack* stack = *state; /* const doc = create("aaaa") */ - AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; - AMfree(AMsetActorId(doc, AMpush(&stack, - AMactorIdInitStr("aaaa"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); + AMdoc* const doc = AMpush(&stack, + AMcreate(AMpush(&stack, + AMactorIdInitStr("aaaa"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id), + AM_VALUE_DOC, + cmocka_cb).doc; /* const l1 = doc.putObject("_root", "list", [{ foo: "bar" }, [1, 2, 3]])*/ AMobjId const* const l1 = AMpush( &stack, @@ -1427,11 +1439,13 @@ static void test_recursive_sets_are_possible(void** state) { static void test_only_returns_an_object_id_when_objects_are_created(void** state) { AMresultStack* stack = *state; /* const doc = create("aaaa") */ - AMdoc* const doc = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; - AMfree(AMsetActorId(doc, AMpush(&stack, - AMactorIdInitStr("aaaa"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); + AMdoc* const doc = AMpush(&stack, + AMcreate(AMpush(&stack, + AMactorIdInitStr("aaaa"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id), + AM_VALUE_DOC, + cmocka_cb).doc; /* const r1 = doc.put("_root", "foo", "bar") assert.deepEqual(r1, null); */ AMpush(&stack, @@ -1496,11 +1510,13 @@ static void test_only_returns_an_object_id_when_objects_are_created(void** state static void test_objects_without_properties_are_preserved(void** state) { AMresultStack* stack = *state; /* const doc1 = create("aaaa") */ - AMdoc* const doc1 = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; - AMfree(AMsetActorId(doc1, AMpush(&stack, - AMactorIdInitStr("aaaa"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); + AMdoc* const doc1 = AMpush(&stack, + AMcreate(AMpush(&stack, + AMactorIdInitStr("aaaa"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id), + AM_VALUE_DOC, + cmocka_cb).doc; /* const a = doc1.putObject("_root", "a", {}); */ AMobjId const* const a = AMpush( &stack, @@ -1567,11 +1583,13 @@ static void test_objects_without_properties_are_preserved(void** state) { static void test_should_allow_you_to_forkAt_a_heads(void** state) { AMresultStack* stack = *state; /* const A = create("aaaaaa") */ - AMdoc* const A = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; - AMfree(AMsetActorId(A, AMpush(&stack, - AMactorIdInitStr("aaaaaa"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); + AMdoc* const A = AMpush(&stack, + AMcreate(AMpush(&stack, + AMactorIdInitStr("aaaaaa"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id), + AM_VALUE_DOC, + cmocka_cb).doc; /* A.put("/", "key1", "val1"); */ AMfree(AMmapPutStr(A, AM_ROOT, "key1", "val1")); /* A.put("/", "key2", "val2"); */ @@ -1634,11 +1652,13 @@ static void test_should_allow_you_to_forkAt_a_heads(void** state) { static void test_should_handle_merging_text_conflicts_then_saving_and_loading(void** state) { AMresultStack* stack = *state; /* const A = create("aabbcc") */ - AMdoc* const A = AMpush(&stack, AMcreate(), AM_VALUE_DOC, cmocka_cb).doc; - AMfree(AMsetActorId(A, AMpush(&stack, - AMactorIdInitStr("aabbcc"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); + AMdoc* const A = AMpush(&stack, + AMcreate(AMpush(&stack, + AMactorIdInitStr("aabbcc"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id), + AM_VALUE_DOC, + cmocka_cb).doc; /* const At = A.putObject('_root', 'text', "") */ AMobjId const* const At = AMpush( &stack, diff --git a/automerge-c/test/ported_wasm/sync_tests.c b/automerge-c/test/ported_wasm/sync_tests.c index 9d24ebfa..ec5f84a4 100644 --- a/automerge-c/test/ported_wasm/sync_tests.c +++ b/automerge-c/test/ported_wasm/sync_tests.c @@ -22,11 +22,17 @@ typedef struct { static int setup(void** state) { TestState* test_state = test_calloc(1, sizeof(TestState)); test_state->n1 = AMpush(&test_state->stack, - AMcreate(), + AMcreate(AMpush(&test_state->stack, + AMactorIdInitStr("01234567"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id), AM_VALUE_DOC, cmocka_cb).doc; test_state->n2 = AMpush(&test_state->stack, - AMcreate(), + AMcreate(AMpush(&test_state->stack, + AMactorIdInitStr("89abcdef"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id), AM_VALUE_DOC, cmocka_cb).doc; test_state->s1 = AMpush(&test_state->stack, @@ -650,14 +656,6 @@ static void test_should_assume_sent_changes_were_received_until_we_hear_otherwis /* const n1 = create('01234567'), n2 = create('89abcdef') const s1 = initSyncState(), s2 = initSyncState() */ TestState* test_state = *state; - AMfree(AMsetActorId(test_state->n1, AMpush(&test_state->stack, - AMactorIdInitStr("01234567"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); - AMfree(AMsetActorId(test_state->n2, AMpush(&test_state->stack, - AMactorIdInitStr("89abcdef"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); /* let message = null */ /* */ /* const items = n1.putObject("_root", "items", []) */ @@ -771,14 +769,6 @@ static void test_should_work_without_prior_sync_state(void **state) { /* const n1 = create('01234567'), n2 = create('89abcdef') const s1 = initSyncState(), s2 = initSyncState() */ TestState* test_state = *state; - AMfree(AMsetActorId(test_state->n1, AMpush(&test_state->stack, - AMactorIdInitStr("01234567"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); - AMfree(AMsetActorId(test_state->n2, AMpush(&test_state->stack, - AMactorIdInitStr("89abcdef"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); /* */ /* for (let i = 0; i < 10; i++) { */ for (size_t i = 0; i != 10; ++i) { @@ -842,14 +832,6 @@ static void test_should_work_with_prior_sync_state_2(void **state) { /* const n1 = create('01234567'), n2 = create('89abcdef') let s1 = initSyncState(), s2 = initSyncState() */ TestState* test_state = *state; - AMfree(AMsetActorId(test_state->n1, AMpush(&test_state->stack, - AMactorIdInitStr("01234567"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); - AMfree(AMsetActorId(test_state->n2, AMpush(&test_state->stack, - AMactorIdInitStr("89abcdef"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); /* */ /* for (let i = 0; i < 10; i++) { */ for (size_t i = 0; i != 10; ++i) { @@ -925,14 +907,6 @@ static void test_should_ensure_non_empty_state_after_sync(void **state) { /* const n1 = create('01234567'), n2 = create('89abcdef') const s1 = initSyncState(), s2 = initSyncState() */ TestState* test_state = *state; - AMfree(AMsetActorId(test_state->n1, AMpush(&test_state->stack, - AMactorIdInitStr("01234567"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); - AMfree(AMsetActorId(test_state->n2, AMpush(&test_state->stack, - AMactorIdInitStr("89abcdef"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); /* */ /* for (let i = 0; i < 3; i++) { */ for (size_t i = 0; i != 3; ++i) { @@ -972,14 +946,6 @@ static void test_should_resync_after_one_node_crashed_with_data_loss(void **stat let s1 = initSyncState() const s2 = initSyncState() */ TestState* test_state = *state; - AMfree(AMsetActorId(test_state->n1, AMpush(&test_state->stack, - AMactorIdInitStr("01234567"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); - AMfree(AMsetActorId(test_state->n2, AMpush(&test_state->stack, - AMactorIdInitStr("89abcdef"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); /* */ /* n1 makes three changes, which we sync to n2 */ /* for (let i = 0; i < 3; i++) { */ @@ -1114,14 +1080,6 @@ static void test_should_resync_after_one_node_experiences_data_loss_without_disc /* const n1 = create('01234567'), n2 = create('89abcdef') const s1 = initSyncState(), s2 = initSyncState() */ TestState* test_state = *state; - AMfree(AMsetActorId(test_state->n1, AMpush(&test_state->stack, - AMactorIdInitStr("01234567"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); - AMfree(AMsetActorId(test_state->n2, AMpush(&test_state->stack, - AMactorIdInitStr("89abcdef"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); /* */ /* n1 makes three changes which we sync to n2 */ /* for (let i = 0; i < 3; i++) { */ @@ -1151,13 +1109,12 @@ static void test_should_resync_after_one_node_experiences_data_loss_without_disc /* */ /* const n2AfterDataLoss = create('89abcdef') */ AMdoc* n2_after_data_loss = AMpush(&test_state->stack, - AMcreate(), + AMcreate(AMpush(&test_state->stack, + AMactorIdInitStr("89abcdef"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id), AM_VALUE_DOC, cmocka_cb).doc; - AMfree(AMsetActorId(n2_after_data_loss, AMpush(&test_state->stack, - AMactorIdInitStr("89abcdef"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); /* */ /* "n2" now has no data, but n1 still thinks it does. Note we don't do * decodeSyncState(encodeSyncState(s1)) in order to simulate data loss @@ -1188,22 +1145,13 @@ static void test_should_resync_after_one_node_experiences_data_loss_without_disc static void test_should_handle_changes_concurrrent_to_the_last_sync_heads(void **state) { /* const n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('fedcba98')*/ TestState* test_state = *state; - AMfree(AMsetActorId(test_state->n1, AMpush(&test_state->stack, - AMactorIdInitStr("01234567"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); - AMfree(AMsetActorId(test_state->n2, AMpush(&test_state->stack, - AMactorIdInitStr("89abcdef"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); AMdoc* n3 = AMpush(&test_state->stack, - AMcreate(), + AMcreate(AMpush(&test_state->stack, + AMactorIdInitStr("fedcba98"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id), AM_VALUE_DOC, cmocka_cb).doc; - AMfree(AMsetActorId(n3, AMpush(&test_state->stack, - AMactorIdInitStr("fedcba98"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); /* const s12 = initSyncState(), s21 = initSyncState(), s23 = initSyncState(), s32 = initSyncState()*/ AMsyncState* s12 = test_state->s1; AMsyncState* s21 = test_state->s2; @@ -1281,22 +1229,13 @@ static void test_should_handle_histories_with_lots_of_branching_and_merging(void /* const n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('fedcba98') const s1 = initSyncState(), s2 = initSyncState() */ TestState* test_state = *state; - AMfree(AMsetActorId(test_state->n1, AMpush(&test_state->stack, - AMactorIdInitStr("01234567"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); - AMfree(AMsetActorId(test_state->n2, AMpush(&test_state->stack, - AMactorIdInitStr("89abcdef"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); AMdoc* n3 = AMpush(&test_state->stack, - AMcreate(), + AMcreate(AMpush(&test_state->stack, + AMactorIdInitStr("fedcba98"), + AM_VALUE_ACTOR_ID, + cmocka_cb).actor_id), AM_VALUE_DOC, cmocka_cb).doc; - AMfree(AMsetActorId(n3, AMpush(&test_state->stack, - AMactorIdInitStr("fedcba98"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); /* n1.put("_root", "x", 0); n1.commit("", 0) */ AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", 0)); AMfree(AMcommit(test_state->n1, "", &TIME_0)); From 7da1832b52b8f8d3f563affa5b1411de5a9eb962 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Tue, 23 Aug 2022 06:04:22 -0700 Subject: [PATCH 551/730] Fix documentation bug caused by missing `/`. --- automerge-c/src/result_stack.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automerge-c/src/result_stack.rs b/automerge-c/src/result_stack.rs index e689ea0e..cfb9c7d2 100644 --- a/automerge-c/src/result_stack.rs +++ b/automerge-c/src/result_stack.rs @@ -103,7 +103,7 @@ pub type AMpushCallback = /// \note Calling this function is purely optional because its only purpose is /// to make memory management tolerable for direct usage of this API in /// C, C++ and Objective-C. -// \internal +/// \internal /// /// # Safety /// stack must be a valid AMresultStack pointer pointer From 363ad7d59affd57e74cd707b017e5e65c902e2fa Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Tue, 23 Aug 2022 11:12:22 -0500 Subject: [PATCH 552/730] automerge-js ts fixes --- automerge-js/index.d.ts | 5 +++-- automerge-js/package.json | 2 +- automerge-js/src/text.ts | 2 +- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/automerge-js/index.d.ts b/automerge-js/index.d.ts index 47f1f344..147d5b70 100644 --- a/automerge-js/index.d.ts +++ b/automerge-js/index.d.ts @@ -38,7 +38,8 @@ export class Text { elems: AutomergeValue[]; constructor(text?: string | string[]); get length(): number; - get(index: number): AutomergeValue; + get(index: number): AutomergeValue | undefined; + [index: number]: AutomergeValue | undefined; [Symbol.iterator](): { next(): { done: boolean; @@ -77,7 +78,7 @@ type Conflicts = { }; export function use(api: LowLevelApi): void; -export function getBackend(doc: Doc) : LowLevelApi; +export function getBackend(doc: Doc) : Automerge; export function init(actor?: ActorId): Doc; export function clone(doc: Doc): Doc; export function free(doc: Doc): void; diff --git a/automerge-js/package.json b/automerge-js/package.json index b699c5ed..b51186f3 100644 --- a/automerge-js/package.json +++ b/automerge-js/package.json @@ -4,7 +4,7 @@ "Orion Henry ", "Martin Kleppmann" ], - "version": "0.1.10", + "version": "0.1.11", "description": "Reimplementation of `automerge` on top of the automerge-wasm backend", "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-js", "repository": "github:automerge/automerge-rs", diff --git a/automerge-js/src/text.ts b/automerge-js/src/text.ts index 5edf9714..d93cd061 100644 --- a/automerge-js/src/text.ts +++ b/automerge-js/src/text.ts @@ -22,7 +22,7 @@ export class Text { return this.elems.length } - get (index: number) : Value { + get (index: number) : Value | undefined { return this.elems[index] } From 43bdd60904d4ed4833b8e18991a4848e43c6bcb0 Mon Sep 17 00:00:00 2001 From: Peter van Hardenberg Date: Tue, 23 Aug 2022 09:31:09 -0700 Subject: [PATCH 553/730] the fields in a doc are not docs themselves --- automerge-js/index.d.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automerge-js/index.d.ts b/automerge-js/index.d.ts index 147d5b70..a18505c2 100644 --- a/automerge-js/index.d.ts +++ b/automerge-js/index.d.ts @@ -59,7 +59,7 @@ export class Text { } export type Doc = { - readonly [P in keyof T]: Doc; + readonly [P in keyof T]: T[P]; }; export type ChangeFn = (doc: T) => void; From 6d05cbd9e3107adb19e38aecbf055c8bdb4b1fca Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Tue, 23 Aug 2022 12:13:32 -0500 Subject: [PATCH 554/730] fix indexOf --- automerge-js/src/proxies.ts | 25 +++++++++++-------------- automerge-js/test/basic_test.ts | 6 ++++++ 2 files changed, 17 insertions(+), 14 deletions(-) diff --git a/automerge-js/src/proxies.ts b/automerge-js/src/proxies.ts index a19a1b9f..8e45e30a 100644 --- a/automerge-js/src/proxies.ts +++ b/automerge-js/src/proxies.ts @@ -424,22 +424,15 @@ function listMethods(target) { return this }, - indexOf(/*o, start = 0*/) { - // FIXME - /* - const id = o[OBJECT_ID] - if (id) { - const list = context.getObject(objectId) - for (let index = start; index < list.length; index++) { - if (list[index][OBJECT_ID] === id) { - return index - } + indexOf(o, start = 0) { + const length = context.length(objectId) + for (let i = start; i < length; i++) { + const value = context.getWithType(objectId, i, heads) + if (value && value[1] === o[OBJECT_ID] || value[1] === o) { + return i } - return -1 - } else { - return context.indexOf(objectId, o, start) } - */ + return -1 }, insertAt(index, ...values) { @@ -629,6 +622,10 @@ function textMethods(target) { }, toJSON () : string { return this.toString() + }, + indexOf(o, start = 0) { + const text = context.text(objectId) + return text.indexOf(o,start) } } return methods diff --git a/automerge-js/test/basic_test.ts b/automerge-js/test/basic_test.ts index 1b40c858..d2e98939 100644 --- a/automerge-js/test/basic_test.ts +++ b/automerge-js/test/basic_test.ts @@ -168,5 +168,11 @@ describe('Automerge', () => { let doc = Automerge.init() assert.deepEqual(Object.keys(Automerge.getBackend(doc)), ["ptr"]) }) + + it('lists and text have indexof', () => { + let doc = Automerge.from({ list: [0,1,2,3,4,5,6], text: new Automerge.Text("hello world") }) + console.log(doc.list.indexOf(5)) + console.log(doc.text.indexOf("world")) + }) }) }) From e6cd366aa03dbdfdeddaa0f7f24ecd964277c0e8 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Wed, 24 Aug 2022 19:12:47 -0500 Subject: [PATCH 555/730] automerge-js 0.1.12 --- automerge-js/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automerge-js/package.json b/automerge-js/package.json index b51186f3..228d94b8 100644 --- a/automerge-js/package.json +++ b/automerge-js/package.json @@ -4,7 +4,7 @@ "Orion Henry ", "Martin Kleppmann" ], - "version": "0.1.11", + "version": "0.1.12", "description": "Reimplementation of `automerge` on top of the automerge-wasm backend", "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-js", "repository": "github:automerge/automerge-rs", From 22f720c465e07c2687bc7eb10e468bb7b40522e2 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Thu, 25 Aug 2022 13:51:15 -0700 Subject: [PATCH 556/730] Emphasize that an `AMbyteSpan` is only a view onto the memory that it references. --- automerge-c/src/byte_span.rs | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/automerge-c/src/byte_span.rs b/automerge-c/src/byte_span.rs index f72f6f0f..e1314cb0 100644 --- a/automerge-c/src/byte_span.rs +++ b/automerge-c/src/byte_span.rs @@ -2,13 +2,14 @@ use automerge as am; /// \struct AMbyteSpan /// \installed_headerfile -/// \brief A contiguous sequence of bytes. +/// \brief A view onto a contiguous sequence of bytes. #[repr(C)] #[derive(PartialEq)] pub struct AMbyteSpan { /// A pointer to an array of bytes. - /// \warning \p src is only valid until the `AMfree()` function is - /// called on the `AMresult` struct hosting the array of bytes to + /// \attention NEVER CALL `free()` ON \p src! + /// \warning \p src is only valid until the `AMfree()` function is called + /// on the `AMresult` struct that stores the array of bytes to /// which it points. pub src: *const u8, /// The number of bytes in the array. From 59bde120ee7b4c666b46fd74e058a80f836960ec Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Fri, 26 Aug 2022 14:15:01 -0500 Subject: [PATCH 557/730] automerge-js adding trace to out of date errors --- automerge-js/src/constants.ts | 1 + automerge-js/src/index.ts | 34 +++++++++++++++++++++++---------- automerge-js/src/proxies.ts | 13 ++++++++++++- automerge-wasm/nodejs-index.js | 2 -- automerge-wasm/types/index.d.ts | 2 +- automerge-wasm/web-index.js | 4 ---- 6 files changed, 38 insertions(+), 18 deletions(-) diff --git a/automerge-js/src/constants.ts b/automerge-js/src/constants.ts index aa414c8b..e37835d1 100644 --- a/automerge-js/src/constants.ts +++ b/automerge-js/src/constants.ts @@ -3,6 +3,7 @@ //const CACHE = Symbol('_cache') // map from objectId to immutable object export const STATE = Symbol.for('_am_state') // object containing metadata about current state (e.g. sequence numbers) export const HEADS = Symbol.for('_am_heads') // object containing metadata about current state (e.g. sequence numbers) +export const TRACE = Symbol.for('_am_trace') // object containing metadata about current state (e.g. sequence numbers) export const OBJECT_ID = Symbol.for('_am_objectId') // object containing metadata about current state (e.g. sequence numbers) export const READ_ONLY = Symbol.for('_am_readOnly') // object containing metadata about current state (e.g. sequence numbers) export const FROZEN = Symbol.for('_am_frozen') // object containing metadata about current state (e.g. sequence numbers) diff --git a/automerge-js/src/index.ts b/automerge-js/src/index.ts index a553f853..95e0226e 100644 --- a/automerge-js/src/index.ts +++ b/automerge-js/src/index.ts @@ -2,7 +2,7 @@ export { uuid } from './uuid' import { rootProxy, listProxy, textProxy, mapProxy } from "./proxies" -import { STATE, HEADS, OBJECT_ID, READ_ONLY, FROZEN } from "./constants" +import { STATE, HEADS, TRACE, OBJECT_ID, READ_ONLY, FROZEN } from "./constants" import { AutomergeValue, Counter } from "./types" export { AutomergeValue, Text, Counter, Int, Uint, Float64 } from "./types" @@ -48,6 +48,20 @@ function _heads(doc: Doc) : Heads | undefined { return Reflect.get(doc,HEADS) } +function _trace(doc: Doc) : string | undefined { + return Reflect.get(doc,TRACE) +} + +function _set_heads(doc: Doc, heads: Heads) { + Reflect.set(doc,HEADS,heads) + Reflect.set(doc,TRACE,(new Error()).stack) +} + +function _clear_heads(doc: Doc) { + Reflect.set(doc,HEADS,undefined) + Reflect.set(doc,TRACE,undefined) +} + function _obj(doc: Doc) : ObjID { return Reflect.get(doc,OBJECT_ID) } @@ -104,7 +118,7 @@ function _change(doc: Doc, options: ChangeOptions, callback: ChangeFn): throw new RangeError("Attempting to use an outdated Automerge document") } if (!!_heads(doc) === true) { - throw new RangeError("Attempting to change an out of date document"); + throw new RangeError("Attempting to change an out of date document - set at: " + _trace(doc)); } if (_readonly(doc) === false) { throw new RangeError("Calls to Automerge.change cannot be nested") @@ -112,13 +126,13 @@ function _change(doc: Doc, options: ChangeOptions, callback: ChangeFn): const state = _state(doc) const heads = state.getHeads() try { - Reflect.set(doc,HEADS,heads) + _set_heads(doc,heads) Reflect.set(doc,FROZEN,true) const root : T = rootProxy(state); callback(root) if (state.pendingOps() === 0) { Reflect.set(doc,FROZEN,false) - Reflect.set(doc,HEADS,undefined) + _clear_heads(doc) return doc } else { state.commit(options.message, options.time) @@ -127,7 +141,7 @@ function _change(doc: Doc, options: ChangeOptions, callback: ChangeFn): } catch (e) { //console.log("ERROR: ",e) Reflect.set(doc,FROZEN,false) - Reflect.set(doc,HEADS,undefined) + _clear_heads(doc) state.rollback() throw e } @@ -168,14 +182,14 @@ export function save(doc: Doc) : Uint8Array { export function merge(local: Doc, remote: Doc) : Doc { if (!!_heads(local) === true) { - throw new RangeError("Attempting to change an out of date document"); + throw new RangeError("Attempting to change an out of date document - set at: " + _trace(doc)); } const localState = _state(local) const heads = localState.getHeads() const remoteState = _state(remote) const changes = localState.getChangesAdded(remoteState) localState.applyChanges(changes) - Reflect.set(local,HEADS,heads) + _set_heads(local,heads) return rootProxy(localState, true) } @@ -267,7 +281,7 @@ export function applyChanges(doc: Doc, changes: Change[]) : [Doc] { const state = _state(doc) const heads = state.getHeads() state.applyChanges(changes) - Reflect.set(doc,HEADS,heads) + _set_heads(doc,heads) return [rootProxy(state, true)]; } @@ -322,7 +336,7 @@ export function receiveSyncMessage(doc: Doc, inState: SyncState, message: throw new RangeError("Attempting to use an outdated Automerge document") } if (!!_heads(doc) === true) { - throw new RangeError("Attempting to change an out of date document"); + throw new RangeError("Attempting to change an out of date document - set at: " + _trace(doc)); } if (_readonly(doc) === false) { throw new RangeError("Calls to Automerge.change cannot be nested") @@ -330,7 +344,7 @@ export function receiveSyncMessage(doc: Doc, inState: SyncState, message: const state = _state(doc) const heads = state.getHeads() state.receiveSyncMessage(syncState, message) - Reflect.set(doc,HEADS,heads) + _set_heads(doc,heads) const outState = ApiHandler.exportSyncState(syncState) return [rootProxy(state, true), outState, null]; } diff --git a/automerge-js/src/proxies.ts b/automerge-js/src/proxies.ts index 8e45e30a..f202b116 100644 --- a/automerge-js/src/proxies.ts +++ b/automerge-js/src/proxies.ts @@ -5,7 +5,7 @@ import { AutomergeValue, ScalarValue, MapValue, ListValue, TextValue } from "./t import { Int, Uint, Float64 } from "./numbers" import { Counter, getWriteableCounter } from "./counter" import { Text } from "./text" -import { STATE, HEADS, FROZEN, OBJECT_ID, READ_ONLY, COUNTER, INT, UINT, F64, TEXT } from "./constants" +import { STATE, HEADS, TRACE, FROZEN, OBJECT_ID, READ_ONLY, COUNTER, INT, UINT, F64, TEXT } from "./constants" function parseListIndex(key) { if (typeof key === 'string' && /^[0-9]+$/.test(key)) key = parseInt(key, 10) @@ -108,6 +108,7 @@ const MapHandler = { if (key === READ_ONLY) return readonly if (key === FROZEN) return frozen if (key === HEADS) return heads + if (key === TRACE) return target.trace if (key === STATE) return context; if (!cache[key]) { cache[key] = valueAt(target, key) @@ -129,6 +130,10 @@ const MapHandler = { target.heads = val return true } + if (key === TRACE) { + target.trace = val + return true + } const [ value, datatype ] = import_value(val) if (frozen) { throw new RangeError("Attempting to use an outdated Automerge document") @@ -211,6 +216,7 @@ const ListHandler = { if (index === READ_ONLY) return readonly if (index === FROZEN) return frozen if (index === HEADS) return heads + if (index === TRACE) return target.trace if (index === STATE) return context; if (index === 'length') return context.length(objectId, heads); if (index === Symbol.iterator) { @@ -246,6 +252,10 @@ const ListHandler = { target.heads = val return true } + if (index === TRACE) { + target.trace = val + return true + } if (typeof index == "string") { throw new RangeError('list index must be a number') } @@ -356,6 +366,7 @@ const TextHandler = Object.assign({}, ListHandler, { if (index === READ_ONLY) return readonly if (index === FROZEN) return frozen if (index === HEADS) return heads + if (index === TRACE) return target.trace if (index === STATE) return context; if (index === 'length') return context.length(objectId, heads); if (index === Symbol.iterator) { diff --git a/automerge-wasm/nodejs-index.js b/automerge-wasm/nodejs-index.js index 07087e59..4a42f201 100644 --- a/automerge-wasm/nodejs-index.js +++ b/automerge-wasm/nodejs-index.js @@ -2,6 +2,4 @@ let wasm = require("./bindgen") module.exports = wasm module.exports.load = module.exports.loadDoc delete module.exports.loadDoc -Object.defineProperty(module.exports, "__esModule", { value: true }) module.exports.init = () => (new Promise((resolve,reject) => { resolve(module.exports) })) -module.exports.default = module.exports.init diff --git a/automerge-wasm/types/index.d.ts b/automerge-wasm/types/index.d.ts index 68277203..ea57f9c2 100644 --- a/automerge-wasm/types/index.d.ts +++ b/automerge-wasm/types/index.d.ts @@ -205,5 +205,5 @@ export class SyncState { readonly sharedHeads: Heads; } -export default function init (): Promise; export function init (): Promise; + diff --git a/automerge-wasm/web-index.js b/automerge-wasm/web-index.js index 6510fe05..9bbe47df 100644 --- a/automerge-wasm/web-index.js +++ b/automerge-wasm/web-index.js @@ -47,7 +47,3 @@ export function init() { })) } -// depricating default export -export default function() { - return init() -} From 9879fd934283033712fb500e3f5beaee3b9c8a47 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Fri, 26 Aug 2022 14:19:28 -0500 Subject: [PATCH 558/730] copy pasta typo fix --- automerge-js/src/index.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automerge-js/src/index.ts b/automerge-js/src/index.ts index 95e0226e..109b093c 100644 --- a/automerge-js/src/index.ts +++ b/automerge-js/src/index.ts @@ -182,7 +182,7 @@ export function save(doc: Doc) : Uint8Array { export function merge(local: Doc, remote: Doc) : Doc { if (!!_heads(local) === true) { - throw new RangeError("Attempting to change an out of date document - set at: " + _trace(doc)); + throw new RangeError("Attempting to change an out of date document - set at: " + _trace(local)); } const localState = _state(local) const heads = localState.getHeads() From a0eb4218d8f797d3cac608818bbbb6152cc42a26 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Sat, 27 Aug 2022 11:59:14 +0100 Subject: [PATCH 559/730] Update docs for Transaction::put Fixes #420 --- automerge/src/transaction/manual_transaction.rs | 5 ----- 1 file changed, 5 deletions(-) diff --git a/automerge/src/transaction/manual_transaction.rs b/automerge/src/transaction/manual_transaction.rs index 58c5ca88..022bf7f3 100644 --- a/automerge/src/transaction/manual_transaction.rs +++ b/automerge/src/transaction/manual_transaction.rs @@ -82,11 +82,6 @@ impl<'a> Transactable for Transaction<'a> { /// Set the value of property `P` to value `V` in object `obj`. /// - /// # Returns - /// - /// The opid of the operation which was created, or None if this operation doesn't change the - /// document - /// /// # Errors /// /// This will return an error if From e295a55b41d2f36557e93da575855b8e1625b642 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Sat, 27 Aug 2022 12:07:32 +0100 Subject: [PATCH 560/730] Add #[derive(Eq)] to satisfy clippy The latest clippy (90.1.65 for me) added a lint which checks for types that implement `PartialEq` and could implement `Eq` (`derive_partial_eq_without_eq`). Add a `derive(Eq)` in a bunch of places to satisfy this lint. --- automerge-c/src/actor_id.rs | 2 +- automerge-c/src/byte_span.rs | 2 +- automerge-c/src/change.rs | 2 +- automerge-c/src/change_hashes.rs | 2 +- automerge-c/src/changes.rs | 2 +- automerge-c/src/doc/list/items.rs | 2 +- automerge-c/src/doc/map/items.rs | 2 +- automerge-c/src/obj.rs | 2 +- automerge-c/src/obj/items.rs | 2 +- automerge-c/src/result.rs | 3 ++- automerge-c/src/strs.rs | 2 +- automerge-c/src/sync/have.rs | 2 +- automerge-c/src/sync/haves.rs | 2 +- automerge-c/src/sync/state.rs | 2 +- automerge/src/columnar/column_range/value.rs | 2 +- automerge/src/error.rs | 4 ++-- automerge/src/legacy/mod.rs | 2 +- 17 files changed, 19 insertions(+), 18 deletions(-) diff --git a/automerge-c/src/actor_id.rs b/automerge-c/src/actor_id.rs index c4ad0d79..e5f75856 100644 --- a/automerge-c/src/actor_id.rs +++ b/automerge-c/src/actor_id.rs @@ -11,7 +11,7 @@ use crate::result::{to_result, AMresult}; /// \struct AMactorId /// \installed_headerfile /// \brief An actor's unique identifier. -#[derive(PartialEq)] +#[derive(Eq, PartialEq)] pub struct AMactorId { body: *const am::ActorId, c_str: RefCell>, diff --git a/automerge-c/src/byte_span.rs b/automerge-c/src/byte_span.rs index e1314cb0..a8e55065 100644 --- a/automerge-c/src/byte_span.rs +++ b/automerge-c/src/byte_span.rs @@ -4,7 +4,7 @@ use automerge as am; /// \installed_headerfile /// \brief A view onto a contiguous sequence of bytes. #[repr(C)] -#[derive(PartialEq)] +#[derive(Eq, PartialEq)] pub struct AMbyteSpan { /// A pointer to an array of bytes. /// \attention NEVER CALL `free()` ON \p src! diff --git a/automerge-c/src/change.rs b/automerge-c/src/change.rs index e9047d2e..afee98ed 100644 --- a/automerge-c/src/change.rs +++ b/automerge-c/src/change.rs @@ -20,7 +20,7 @@ macro_rules! to_change { /// \struct AMchange /// \installed_headerfile /// \brief A group of operations performed by an actor. -#[derive(PartialEq)] +#[derive(Eq, PartialEq)] pub struct AMchange { body: *mut am::Change, c_msg: RefCell>, diff --git a/automerge-c/src/change_hashes.rs b/automerge-c/src/change_hashes.rs index d865231f..87ae6c7f 100644 --- a/automerge-c/src/change_hashes.rs +++ b/automerge-c/src/change_hashes.rs @@ -120,7 +120,7 @@ impl From for [u8; USIZE_USIZE_USIZE_] { /// \installed_headerfile /// \brief A random-access iterator over a sequence of change hashes. #[repr(C)] -#[derive(PartialEq)] +#[derive(Eq, PartialEq)] pub struct AMchangeHashes { /// An implementation detail that is intentionally opaque. /// \warning Modifying \p detail will cause undefined behavior. diff --git a/automerge-c/src/changes.rs b/automerge-c/src/changes.rs index 5d7f4813..e359cfb6 100644 --- a/automerge-c/src/changes.rs +++ b/automerge-c/src/changes.rs @@ -145,7 +145,7 @@ impl From for [u8; USIZE_USIZE_USIZE_USIZE_] { /// \installed_headerfile /// \brief A random-access iterator over a sequence of changes. #[repr(C)] -#[derive(PartialEq)] +#[derive(Eq, PartialEq)] pub struct AMchanges { /// An implementation detail that is intentionally opaque. /// \warning Modifying \p detail will cause undefined behavior. diff --git a/automerge-c/src/doc/list/items.rs b/automerge-c/src/doc/list/items.rs index f1213904..aa676c4a 100644 --- a/automerge-c/src/doc/list/items.rs +++ b/automerge-c/src/doc/list/items.rs @@ -117,7 +117,7 @@ impl From for [u8; USIZE_USIZE_USIZE_] { /// \installed_headerfile /// \brief A random-access iterator over a sequence of list object items. #[repr(C)] -#[derive(PartialEq)] +#[derive(Eq, PartialEq)] pub struct AMlistItems { /// An implementation detail that is intentionally opaque. /// \warning Modifying \p detail will cause undefined behavior. diff --git a/automerge-c/src/doc/map/items.rs b/automerge-c/src/doc/map/items.rs index cc4f7a64..b1f046b1 100644 --- a/automerge-c/src/doc/map/items.rs +++ b/automerge-c/src/doc/map/items.rs @@ -117,7 +117,7 @@ impl From for [u8; USIZE_USIZE_USIZE_] { /// \installed_headerfile /// \brief A random-access iterator over a sequence of map object items. #[repr(C)] -#[derive(PartialEq)] +#[derive(Eq, PartialEq)] pub struct AMmapItems { /// An implementation detail that is intentionally opaque. /// \warning Modifying \p detail will cause undefined behavior. diff --git a/automerge-c/src/obj.rs b/automerge-c/src/obj.rs index 25ebbbc2..a674660e 100644 --- a/automerge-c/src/obj.rs +++ b/automerge-c/src/obj.rs @@ -10,7 +10,7 @@ pub mod items; /// \struct AMobjId /// \installed_headerfile /// \brief An object's unique identifier. -#[derive(PartialEq)] +#[derive(Eq, PartialEq)] pub struct AMobjId { body: am::ObjId, c_actor_id: RefCell>, diff --git a/automerge-c/src/obj/items.rs b/automerge-c/src/obj/items.rs index 252a93a0..fbb1d641 100644 --- a/automerge-c/src/obj/items.rs +++ b/automerge-c/src/obj/items.rs @@ -117,7 +117,7 @@ impl From for [u8; USIZE_USIZE_USIZE_] { /// \installed_headerfile /// \brief A random-access iterator over a sequence of object items. #[repr(C)] -#[derive(PartialEq)] +#[derive(Eq, PartialEq)] pub struct AMobjItems { /// An implementation detail that is intentionally opaque. /// \warning Modifying \p detail will cause undefined behavior. diff --git a/automerge-c/src/result.rs b/automerge-c/src/result.rs index e67c698e..67b14b1d 100644 --- a/automerge-c/src/result.rs +++ b/automerge-c/src/result.rs @@ -903,7 +903,8 @@ pub unsafe extern "C" fn AMresultValue<'a>(result: *mut AMresult) -> AMvalue<'a> /// \struct AMunknownValue /// \installed_headerfile /// \brief A value (typically for a `set` operation) whose type is unknown. -#[derive(PartialEq)] +/// +#[derive(Eq, PartialEq)] #[repr(C)] pub struct AMunknownValue { /// The value's raw bytes. diff --git a/automerge-c/src/strs.rs b/automerge-c/src/strs.rs index dcf7c3b7..a823ecaf 100644 --- a/automerge-c/src/strs.rs +++ b/automerge-c/src/strs.rs @@ -117,7 +117,7 @@ impl From for [u8; USIZE_USIZE_USIZE_] { /// \installed_headerfile /// \brief A random-access iterator over a sequence of UTF-8 strings. #[repr(C)] -#[derive(PartialEq)] +#[derive(Eq, PartialEq)] pub struct AMstrs { /// An implementation detail that is intentionally opaque. /// \warning Modifying \p detail will cause undefined behavior. diff --git a/automerge-c/src/sync/have.rs b/automerge-c/src/sync/have.rs index d3a3e3e5..f7ff4cb0 100644 --- a/automerge-c/src/sync/have.rs +++ b/automerge-c/src/sync/have.rs @@ -6,7 +6,7 @@ use crate::change_hashes::AMchangeHashes; /// \installed_headerfile /// \brief A summary of the changes that the sender of a synchronization /// message already has. -#[derive(Clone, PartialEq)] +#[derive(Clone, Eq, PartialEq)] pub struct AMsyncHave(*const am::sync::Have); impl AMsyncHave { diff --git a/automerge-c/src/sync/haves.rs b/automerge-c/src/sync/haves.rs index 3ccaefda..d359a4dc 100644 --- a/automerge-c/src/sync/haves.rs +++ b/automerge-c/src/sync/haves.rs @@ -147,7 +147,7 @@ impl From for [u8; USIZE_USIZE_USIZE_USIZE_] { /// \installed_headerfile /// \brief A random-access iterator over a sequence of synchronization haves. #[repr(C)] -#[derive(PartialEq)] +#[derive(Eq, PartialEq)] pub struct AMsyncHaves { /// An implementation detail that is intentionally opaque. /// \warning Modifying \p detail will cause undefined behavior. diff --git a/automerge-c/src/sync/state.rs b/automerge-c/src/sync/state.rs index 1c2bab05..54fd5fe4 100644 --- a/automerge-c/src/sync/state.rs +++ b/automerge-c/src/sync/state.rs @@ -22,7 +22,7 @@ pub(crate) use to_sync_state; /// \struct AMsyncState /// \installed_headerfile /// \brief The state of synchronization with a peer. -#[derive(PartialEq)] +#[derive(Eq, PartialEq)] pub struct AMsyncState { body: am::sync::State, their_haves_storage: RefCell>, diff --git a/automerge/src/columnar/column_range/value.rs b/automerge/src/columnar/column_range/value.rs index 7d54765e..43f63437 100644 --- a/automerge/src/columnar/column_range/value.rs +++ b/automerge/src/columnar/column_range/value.rs @@ -298,7 +298,7 @@ impl<'a> ValueIter<'a> { } Ok(bytes) => bytes, }; - let val = match f(&*raw) { + let val = match f(raw) { Ok(v) => v, Err(e) => return Some(Err(e)), }; diff --git a/automerge/src/error.rs b/automerge/src/error.rs index 7f9b4ad2..406b5d2b 100644 --- a/automerge/src/error.rs +++ b/automerge/src/error.rs @@ -63,11 +63,11 @@ pub(crate) struct InvalidScalarValue { pub(crate) expected: String, } -#[derive(Error, Debug, PartialEq)] +#[derive(Error, Debug, Eq, PartialEq)] #[error("Invalid change hash slice: {0:?}")] pub struct InvalidChangeHashSlice(pub Vec); -#[derive(Error, Debug, PartialEq)] +#[derive(Error, Debug, Eq, PartialEq)] #[error("Invalid object ID: {0}")] pub struct InvalidObjectId(pub String); diff --git a/automerge/src/legacy/mod.rs b/automerge/src/legacy/mod.rs index 3b7bcbc0..6e6acec5 100644 --- a/automerge/src/legacy/mod.rs +++ b/automerge/src/legacy/mod.rs @@ -132,7 +132,7 @@ impl Key { } } -#[derive(Debug, Default, Clone, PartialEq, Serialize)] +#[derive(Debug, Default, Clone, Eq, PartialEq, Serialize)] #[serde(transparent)] pub struct SortedVec(Vec); From dd69f6f7b4b99a22886e293e54a93348d35ee8ef Mon Sep 17 00:00:00 2001 From: Alex Good Date: Thu, 1 Sep 2022 12:27:34 +0100 Subject: [PATCH 561/730] Add `readme` field to automerge/Cargo.toml --- automerge/Cargo.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/automerge/Cargo.toml b/automerge/Cargo.toml index d6653e56..959ce37b 100644 --- a/automerge/Cargo.toml +++ b/automerge/Cargo.toml @@ -7,6 +7,7 @@ repository = "https://github.com/automerge/automerge-rs" documentation = "https://automerge.org/automerge-rs/automerge/" rust-version = "1.57.0" description = "A JSON-like data structure (a CRDT) that can be modified concurrently by different users, and merged again automatically" +readme = "../README.md" [features] optree-visualisation = ["dot", "rand"] From eba7038bd241518c835736cb58d16b771577a934 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Thu, 1 Sep 2022 15:38:19 +0100 Subject: [PATCH 562/730] Allow for empty head indices when decoding doc The compressed document format includes at the end of the document chunk the indicies of the heads of the document. Older versions of the javascript implementation do not include these indicies so we allow them to be omitted when decoding. Whilst we're here add some tracing::trace logs to make it easier to understand where parsing is failing. --- automerge/src/automerge.rs | 7 +++++++ automerge/src/storage/chunk.rs | 1 + automerge/src/storage/document.rs | 28 +++++++++++++++++----------- automerge/src/storage/load.rs | 1 + 4 files changed, 26 insertions(+), 11 deletions(-) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 6c0cd6dd..f48fac6b 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -591,13 +591,16 @@ impl Automerge { } /// Load a document. + #[tracing::instrument(skip(data, options), err)] pub fn load_with( data: &[u8], mut options: ApplyOptions<'_, Obs>, ) -> Result { if data.is_empty() { + tracing::trace!("no data, initializing empty document"); return Ok(Self::new()); } + tracing::trace!("loading first chunk"); let (remaining, first_chunk) = storage::Chunk::parse(storage::parse::Input::new(data)) .map_err(|e| load::Error::Parse(Box::new(e)))?; if !first_chunk.checksum_valid() { @@ -607,6 +610,7 @@ impl Automerge { let mut am = match first_chunk { storage::Chunk::Document(d) => { + tracing::trace!("first chunk is document chunk, inflating"); let storage::load::Reconstructed { max_op, result: op_set, @@ -643,6 +647,7 @@ impl Automerge { } } storage::Chunk::Change(stored_change) => { + tracing::trace!("first chunk is change chunk, applying"); let change = Change::new_from_unverified(stored_change.into_owned(), None) .map_err(|e| load::Error::InvalidChangeColumns(Box::new(e)))?; let mut am = Self::new(); @@ -650,6 +655,7 @@ impl Automerge { am } storage::Chunk::CompressedChange(stored_change, compressed) => { + tracing::trace!("first chunk is compressed change, decompressing and applying"); let change = Change::new_from_unverified( stored_change.into_owned(), Some(compressed.into_owned()), @@ -660,6 +666,7 @@ impl Automerge { am } }; + tracing::trace!("first chunk loaded, loading remaining chunks"); match load::load_changes(remaining.reset()) { load::LoadedChanges::Complete(c) => { for change in c { diff --git a/automerge/src/storage/chunk.rs b/automerge/src/storage/chunk.rs index ad64e804..821c2c55 100644 --- a/automerge/src/storage/chunk.rs +++ b/automerge/src/storage/chunk.rs @@ -56,6 +56,7 @@ impl<'a> Chunk<'a> { first: chunk_input, remaining, } = i.split(header.data_bytes().len()); + tracing::trace!(?header, "parsed chunk header"); let chunk = match header.chunk_type { ChunkType::Change => { let (remaining, change) = diff --git a/automerge/src/storage/document.rs b/automerge/src/storage/document.rs index b9923b7a..500fbe85 100644 --- a/automerge/src/storage/document.rs +++ b/automerge/src/storage/document.rs @@ -135,17 +135,23 @@ impl<'a> Document<'a> { let (i, parse::RangeOf { range: ops, .. }) = parse::range_of(|i| parse::take_n(ops_meta.total_column_len(), i), i)?; - // parse the suffix - let ( - i, - parse::RangeOf { - range: suffix, - value: head_indices, - }, - ) = parse::range_of( - |i| parse::apply_n(heads.len(), parse::leb128_u64::)(i), - i, - )?; + // parse the suffix, which may be empty if this document was produced by an older version + // of the JS automerge implementation + let (i, suffix, head_indices) = if i.is_empty() { + (i, 0..0, Vec::new()) + } else { + let ( + i, + parse::RangeOf { + range: suffix, + value: head_indices, + }, + ) = parse::range_of( + |i| parse::apply_n(heads.len(), parse::leb128_u64::)(i), + i, + )?; + (i, suffix, head_indices) + }; let compression::Decompressed { change_bytes, diff --git a/automerge/src/storage/load.rs b/automerge/src/storage/load.rs index 75732d7c..fe2e8429 100644 --- a/automerge/src/storage/load.rs +++ b/automerge/src/storage/load.rs @@ -80,6 +80,7 @@ fn load_next_change<'a>( } match chunk { storage::Chunk::Document(d) => { + tracing::trace!("loading document chunk"); let Reconstructed { changes: new_changes, .. From 649b75deb1e46b4484ae4a73c5be97b38d74ec22 Mon Sep 17 00:00:00 2001 From: +merlan #flirora Date: Mon, 5 Sep 2022 15:28:31 -0400 Subject: [PATCH 563/730] Correct documentation for AutoSerde --- automerge/src/autoserde.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automerge/src/autoserde.rs b/automerge/src/autoserde.rs index 50911198..63b0848a 100644 --- a/automerge/src/autoserde.rs +++ b/automerge/src/autoserde.rs @@ -2,7 +2,7 @@ use serde::ser::{SerializeMap, SerializeSeq}; use crate::{Automerge, ObjId, ObjType, Value}; -/// A wrapper type which implements `serde::Deserialize` for an `Automerge` +/// A wrapper type which implements [`serde::Serialize`] for an [`Automerge`]. #[derive(Debug)] pub struct AutoSerde<'a>(&'a Automerge); From f586c825579be151b82c3616e7ca95ef9d95f8d5 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Wed, 7 Sep 2022 16:45:36 +0100 Subject: [PATCH 564/730] OpSet::visualise: add argument to filter by obj ID Occasionally one needs to debug problems in a document with a large number of objects. In this case it is unhelpful to print a graphviz of the whole opset because there are too many objects. Add a `Option>` argument to `OpSet::visualise` to filter the objects which are visualised. --- automerge/src/autocommit.rs | 10 ++++++++-- automerge/src/automerge.rs | 12 ++++++++++-- automerge/src/op_set.rs | 18 ++++++++++++++++-- 3 files changed, 34 insertions(+), 6 deletions(-) diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index 2f41cee4..71fb7df2 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -231,9 +231,15 @@ impl AutoCommit { .receive_sync_message_with(sync_state, message, options) } + /// Return a graphviz representation of the opset. + /// + /// # Arguments + /// + /// * objects: An optional list of object IDs to display, if not specified all objects are + /// visualised #[cfg(feature = "optree-visualisation")] - pub fn visualise_optree(&self) -> String { - self.doc.visualise_optree() + pub fn visualise_optree(&self, objects: Option>) -> String { + self.doc.visualise_optree(objects) } /// Get the current heads of the document. diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index f48fac6b..96a0ed47 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -1178,9 +1178,17 @@ impl Automerge { } } + /// Return a graphviz representation of the opset. + /// + /// # Arguments + /// + /// * objects: An optional list of object IDs to display, if not specified all objects are + /// visualised #[cfg(feature = "optree-visualisation")] - pub fn visualise_optree(&self) -> String { - self.ops.visualise() + pub fn visualise_optree(&self, objects: Option>) -> String { + let objects = + objects.map(|os| os.iter().filter_map(|o| self.exid_to_obj(o).ok()).collect()); + self.ops.visualise(objects) } } diff --git a/automerge/src/op_set.rs b/automerge/src/op_set.rs index 766d9e01..e8380b8e 100644 --- a/automerge/src/op_set.rs +++ b/automerge/src/op_set.rs @@ -300,10 +300,24 @@ impl OpSetInternal { self.trees.get(id).map(|tree| tree.objtype) } + /// Return a graphviz representation of the opset. + /// + /// # Arguments + /// + /// * objects: An optional list of object IDs to display, if not specified all objects are + /// visualised #[cfg(feature = "optree-visualisation")] - pub(crate) fn visualise(&self) -> String { + pub(crate) fn visualise(&self, objects: Option>) -> String { + use std::borrow::Cow; let mut out = Vec::new(); - let graph = super::visualisation::GraphVisualisation::construct(&self.trees, &self.m); + let trees = if let Some(objects) = objects { + let mut filtered = self.trees.clone(); + filtered.retain(|k, _| objects.contains(k)); + Cow::Owned(filtered) + } else { + Cow::Borrowed(&self.trees) + }; + let graph = super::visualisation::GraphVisualisation::construct(&trees, &self.m); dot::render(&graph, &mut out).unwrap(); String::from_utf8_lossy(&out[..]).to_string() } From fc9cb17b345e7bf3b5765b542b8683226271c79b Mon Sep 17 00:00:00 2001 From: Alex Good Date: Thu, 8 Sep 2022 16:27:30 +0100 Subject: [PATCH 565/730] Use the local automerge-wasm in automerge-js tests Somehow the `devDependencies` for `automerge-js` dependended on the released `automerge-wasm` package, rather than the local version, which means that the JS tests are not actually testing the current implementation. Depend on the local `automerge-wasm` package to fix this. --- automerge-js/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automerge-js/package.json b/automerge-js/package.json index 228d94b8..c6ee26fa 100644 --- a/automerge-js/package.json +++ b/automerge-js/package.json @@ -47,7 +47,7 @@ "@types/uuid": "^8.3.4", "@typescript-eslint/eslint-plugin": "^5.25.0", "@typescript-eslint/parser": "^5.25.0", - "automerge-wasm": "^0.1.6", + "automerge-wasm": "file:../automerge-wasm", "eslint": "^8.15.0", "fast-sha256": "^1.3.0", "mocha": "^10.0.0", From 427002caf349c58d16d1f6941c79b44f81c9a4b8 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Mon, 12 Sep 2022 12:31:09 +0100 Subject: [PATCH 566/730] Correctly load documents with deleted objects The logic for reconstructing changes from the compressed document format records operations which set a key in an object so that it can later reconstruct delete operations from the successor list of the document format operations. The logic to do this was only recording set operations and not `make*` operations. This meant that delete operations targeting `make*` operations could not be loaded correctly. Correctly record `make*` operations for later use in constructing delete operations. --- .../src/storage/load/reconstruct_document.rs | 6 +++--- automerge/tests/test.rs | 16 ++++++++++++++++ 2 files changed, 19 insertions(+), 3 deletions(-) diff --git a/automerge/src/storage/load/reconstruct_document.rs b/automerge/src/storage/load/reconstruct_document.rs index 5747a51d..e8221e5c 100644 --- a/automerge/src/storage/load/reconstruct_document.rs +++ b/automerge/src/storage/load/reconstruct_document.rs @@ -236,9 +236,9 @@ impl LoadingObject { } fn append_op(&mut self, op: Op) -> Result<(), Error> { - // Collect set operations so we can find the keys which delete operations refer to in - // `finish` - if matches!(op.action, OpType::Put(_)) { + // Collect set and make operations so we can find the keys which delete operations refer to + // in `finish` + if matches!(op.action, OpType::Put(_) | OpType::Make(_)) { match op.key { Key::Map(_) => { self.set_ops.insert(op.id, op.key); diff --git a/automerge/tests/test.rs b/automerge/tests/test.rs index d95d94ea..fcd6829b 100644 --- a/automerge/tests/test.rs +++ b/automerge/tests/test.rs @@ -1332,3 +1332,19 @@ fn load_incremental_with_corrupted_tail() { } ); } + +#[test] +fn load_doc_with_deleted_objects() { + // Reproduces an issue where a document with deleted objects failed to load + let mut doc = AutoCommit::new(); + doc.put_object(ROOT, "list", ObjType::List).unwrap(); + doc.put_object(ROOT, "text", ObjType::Text).unwrap(); + doc.put_object(ROOT, "map", ObjType::Map).unwrap(); + doc.put_object(ROOT, "table", ObjType::Table).unwrap(); + doc.delete(&ROOT, "list").unwrap(); + doc.delete(&ROOT, "text").unwrap(); + doc.delete(&ROOT, "map").unwrap(); + doc.delete(&ROOT, "table").unwrap(); + let saved = doc.save(); + Automerge::load(&saved).unwrap(); +} From c7e370a1df5f38168483946e2df2b1762c79153c Mon Sep 17 00:00:00 2001 From: Alex Good Date: Wed, 28 Sep 2022 17:18:37 -0500 Subject: [PATCH 567/730] Appease clippy --- automerge-wasm/Cargo.toml | 2 +- automerge-wasm/src/interop.rs | 13 ++++++++----- automerge-wasm/src/lib.rs | 6 +++--- automerge-wasm/src/sync.rs | 3 ++- automerge/benches/sync.rs | 12 ++++-------- automerge/src/storage/columns/raw_column.rs | 2 +- automerge/src/storage/parse.rs | 4 ++-- 7 files changed, 21 insertions(+), 21 deletions(-) diff --git a/automerge-wasm/Cargo.toml b/automerge-wasm/Cargo.toml index f7668bfa..38fe3dab 100644 --- a/automerge-wasm/Cargo.toml +++ b/automerge-wasm/Cargo.toml @@ -29,7 +29,7 @@ serde_json = "^1.0" rand = { version = "^0.8.4" } getrandom = { version = "^0.2.2", features=["js"] } uuid = { version = "^0.8.2", features=["v4", "wasm-bindgen", "serde"] } -serde-wasm-bindgen = "0.1.3" +serde-wasm-bindgen = "0.4.3" serde_bytes = "0.11.5" hex = "^0.4.3" regex = "^1.5" diff --git a/automerge-wasm/src/interop.rs b/automerge-wasm/src/interop.rs index 1d43adc9..bc5a0226 100644 --- a/automerge-wasm/src/interop.rs +++ b/automerge-wasm/src/interop.rs @@ -99,7 +99,7 @@ impl TryFrom for HashSet { let mut result = HashSet::new(); for key in Reflect::own_keys(&value.0)?.iter() { if let Some(true) = Reflect::get(&value.0, &key)?.as_bool() { - result.insert(key.into_serde().map_err(to_js_err)?); + result.insert(serde_wasm_bindgen::from_value(key).map_err(to_js_err)?); } } Ok(result) @@ -113,7 +113,7 @@ impl TryFrom for BTreeSet { let mut result = BTreeSet::new(); for key in Reflect::own_keys(&value.0)?.iter() { if let Some(true) = Reflect::get(&value.0, &key)?.as_bool() { - result.insert(key.into_serde().map_err(to_js_err)?); + result.insert(serde_wasm_bindgen::from_value(key).map_err(to_js_err)?); } } Ok(result) @@ -125,7 +125,8 @@ impl TryFrom for Vec { fn try_from(value: JS) -> Result { let value = value.0.dyn_into::()?; - let value: Result, _> = value.iter().map(|j| j.into_serde()).collect(); + let value: Result, _> = + value.iter().map(serde_wasm_bindgen::from_value).collect(); let value = value.map_err(to_js_err)?; Ok(value) } @@ -134,7 +135,8 @@ impl TryFrom for Vec { impl From for Option> { fn from(value: JS) -> Self { let value = value.0.dyn_into::().ok()?; - let value: Result, _> = value.iter().map(|j| j.into_serde()).collect(); + let value: Result, _> = + value.iter().map(serde_wasm_bindgen::from_value).collect(); let value = value.ok()?; Some(value) } @@ -350,7 +352,8 @@ pub(crate) fn to_objtype( pub(crate) fn get_heads(heads: Option) -> Option> { let heads = heads?; - let heads: Result, _> = heads.iter().map(|j| j.into_serde()).collect(); + let heads: Result, _> = + heads.iter().map(serde_wasm_bindgen::from_value).collect(); heads.ok() } diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 9111a4de..af7083ef 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -609,7 +609,7 @@ impl Automerge { #[wasm_bindgen(js_name = getChangeByHash)] pub fn get_change_by_hash(&mut self, hash: JsValue) -> Result { self.ensure_transaction_closed(); - let hash = hash.into_serde().map_err(to_js_err)?; + let hash = serde_wasm_bindgen::from_value(hash).map_err(to_js_err)?; let change = self.doc.get_change_by_hash(&hash); if let Some(c) = change { Ok(Uint8Array::from(c.raw_bytes()).into()) @@ -870,7 +870,7 @@ pub fn load(data: Uint8Array, actor: Option) -> Result Result { - let change: am::ExpandedChange = change.into_serde().map_err(to_js_err)?; + let change: am::ExpandedChange = serde_wasm_bindgen::from_value(change).map_err(to_js_err)?; let change: Change = change.into(); Ok(Uint8Array::from(change.raw_bytes())) } @@ -879,7 +879,7 @@ pub fn encode_change(change: JsValue) -> Result { pub fn decode_change(change: Uint8Array) -> Result { let change = Change::from_bytes(change.to_vec()).map_err(to_js_err)?; let change: am::ExpandedChange = change.decode(); - JsValue::from_serde(&change).map_err(to_js_err) + serde_wasm_bindgen::to_value(&change).map_err(to_js_err) } #[wasm_bindgen(js_name = initSyncState)] diff --git a/automerge-wasm/src/sync.rs b/automerge-wasm/src/sync.rs index f76eae84..94f65041 100644 --- a/automerge-wasm/src/sync.rs +++ b/automerge-wasm/src/sync.rs @@ -32,7 +32,8 @@ impl SyncState { #[wasm_bindgen(setter, js_name = sentHashes)] pub fn set_sent_hashes(&mut self, hashes: JsValue) -> Result<(), JsValue> { - let hashes_map: HashMap = hashes.into_serde().map_err(to_js_err)?; + let hashes_map: HashMap = + serde_wasm_bindgen::from_value(hashes).map_err(to_js_err)?; let hashes_set: BTreeSet = hashes_map.keys().cloned().collect(); self.0.sent_hashes = hashes_set; Ok(()) diff --git a/automerge/benches/sync.rs b/automerge/benches/sync.rs index 9798c803..483fd2b4 100644 --- a/automerge/benches/sync.rs +++ b/automerge/benches/sync.rs @@ -28,14 +28,10 @@ fn increasing_put(n: u64) -> Automerge { // keep syncing until doc1 no longer generates a sync message for doc2. fn sync(doc1: &mut DocWithSync, doc2: &mut DocWithSync) { - loop { - if let Some(message1) = doc1.doc.generate_sync_message(&mut doc1.peer_state) { - doc2.doc - .receive_sync_message(&mut doc2.peer_state, message1) - .unwrap() - } else { - break; - } + while let Some(message1) = doc1.doc.generate_sync_message(&mut doc1.peer_state) { + doc2.doc + .receive_sync_message(&mut doc2.peer_state, message1) + .unwrap(); if let Some(message2) = doc2.doc.generate_sync_message(&mut doc2.peer_state) { doc1.doc diff --git a/automerge/src/storage/columns/raw_column.rs b/automerge/src/storage/columns/raw_column.rs index b37f73e3..053c3c75 100644 --- a/automerge/src/storage/columns/raw_column.rs +++ b/automerge/src/storage/columns/raw_column.rs @@ -246,7 +246,7 @@ impl RawColumns { self.0.iter().map(|c| c.data.len()).sum() } - pub(crate) fn iter<'a>(&'a self) -> impl Iterator> + '_ { + pub(crate) fn iter(&self) -> impl Iterator> + '_ { self.0.iter() } } diff --git a/automerge/src/storage/parse.rs b/automerge/src/storage/parse.rs index 828579f8..64419fda 100644 --- a/automerge/src/storage/parse.rs +++ b/automerge/src/storage/parse.rs @@ -411,7 +411,7 @@ pub(crate) fn take4(input: Input<'_>) -> ParseResult<'_, [u8; 4], E> { } /// Parse a slice of length `n` from `input` -pub(crate) fn take_n<'a, E>(n: usize, input: Input<'a>) -> ParseResult<'_, &'a [u8], E> { +pub(crate) fn take_n(n: usize, input: Input<'_>) -> ParseResult<'_, &[u8], E> { input.take_n(n) } @@ -449,7 +449,7 @@ where /// /// This first parses a LEB128 encoded `u64` from the input, then parses this many bytes from the /// underlying input. -pub(crate) fn length_prefixed_bytes<'a, E>(input: Input<'a>) -> ParseResult<'_, &'a [u8], E> +pub(crate) fn length_prefixed_bytes(input: Input<'_>) -> ParseResult<'_, &[u8], E> where E: From, { From e57548f6e2e1fc28f733cdb3f6c8a8cf0bb3a6c5 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Thu, 29 Sep 2022 12:33:01 -0500 Subject: [PATCH 568/730] Fix broken encode/decode change Previous ceremonies to appease clippy resulted in the encodeChange/decodeChange wasm functions being slightly broken. Here we fix them. --- automerge-wasm/src/lib.rs | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index af7083ef..4dfadced 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -34,6 +34,7 @@ use automerge::Patch; use automerge::VecOpObserver; use automerge::{Change, ObjId, Prop, Value, ROOT}; use js_sys::{Array, Object, Uint8Array}; +use serde::Serialize; use std::convert::TryInto; use wasm_bindgen::prelude::*; use wasm_bindgen::JsCast; @@ -870,7 +871,11 @@ pub fn load(data: Uint8Array, actor: Option) -> Result Result { - let change: am::ExpandedChange = serde_wasm_bindgen::from_value(change).map_err(to_js_err)?; + // Alex: Technically we should be using serde_wasm_bindgen::from_value instead of into_serde. + // Unfortunately serde_wasm_bindgen::from_value fails for some inscrutable reason, so instead + // we use into_serde (sorry to future me). + #[allow(deprecated)] + let change: am::ExpandedChange = change.into_serde().map_err(to_js_err)?; let change: Change = change.into(); Ok(Uint8Array::from(change.raw_bytes())) } @@ -879,7 +884,8 @@ pub fn encode_change(change: JsValue) -> Result { pub fn decode_change(change: Uint8Array) -> Result { let change = Change::from_bytes(change.to_vec()).map_err(to_js_err)?; let change: am::ExpandedChange = change.decode(); - serde_wasm_bindgen::to_value(&change).map_err(to_js_err) + let serializer = serde_wasm_bindgen::Serializer::json_compatible(); + change.serialize(&serializer).map_err(to_js_err) } #[wasm_bindgen(js_name = initSyncState)] From 3d59e61cd62c5a77474bf44e03d7e8d57d967d0b Mon Sep 17 00:00:00 2001 From: Alex Good Date: Fri, 30 Sep 2022 18:58:46 +0100 Subject: [PATCH 569/730] Allow empty changes when loading document format The logic for loading compressed document chunks has a check that the `max_op` of a change is valid. This check was overly strict in that it checked that the max op was strictly larger than the max op of a previous strange - this rejects valid documents which contain changes with no ops in them, in which case the max op can be equal to the max op of the previous change. Loosen the logic to allow empty changes. --- .../src/storage/load/change_collector.rs | 4 +++- automerge/tests/test.rs | 21 +++++++++++++++++++ 2 files changed, 24 insertions(+), 1 deletion(-) diff --git a/automerge/src/storage/load/change_collector.rs b/automerge/src/storage/load/change_collector.rs index 5a877a60..75ef98f1 100644 --- a/automerge/src/storage/load/change_collector.rs +++ b/automerge/src/storage/load/change_collector.rs @@ -52,7 +52,9 @@ impl<'a> ChangeCollector<'a> { let change = change.map_err(|e| Error::ReadChange(Box::new(e)))?; let actor_changes = changes_by_actor.entry(change.actor).or_default(); if let Some(prev) = actor_changes.last() { - if prev.max_op >= change.max_op { + // Note that we allow max_op to be equal to the previous max_op in case the + // previous change had no ops (which is permitted) + if prev.max_op > change.max_op { return Err(Error::ChangesOutOfOrder); } } diff --git a/automerge/tests/test.rs b/automerge/tests/test.rs index fcd6829b..203ec772 100644 --- a/automerge/tests/test.rs +++ b/automerge/tests/test.rs @@ -1348,3 +1348,24 @@ fn load_doc_with_deleted_objects() { let saved = doc.save(); Automerge::load(&saved).unwrap(); } + +#[test] +fn simple_bad_saveload() { + let mut doc = Automerge::new(); + doc.transact::<_, _, AutomergeError>(|d| { + d.put(ROOT, "count", 0)?; + Ok(()) + }) + .unwrap(); + + doc.transact::<_, _, AutomergeError>(|_d| Ok(())).unwrap(); + + doc.transact::<_, _, AutomergeError>(|d| { + d.put(ROOT, "count", 0)?; + Ok(()) + }) + .unwrap(); + + let bytes = doc.save(); + Automerge::load(&bytes).unwrap(); +} From 837c07b23a9c09d15be75e20e36c580951d8bdbb Mon Sep 17 00:00:00 2001 From: Alex Good Date: Sun, 2 Oct 2022 18:59:41 +0100 Subject: [PATCH 570/730] Correctly encode compressed changes in sync messages Sync messages encode changes as length prefixed byte arrays. We were calculating the length using the uncompressed bytes of a change but encoding the bytes of the change using the (possibly) compressed bytes. This meant that if a change was large enough to compress then it would fail to decode. Switch to using uncompressed bytes in sync messages. --- automerge/src/change.rs | 87 ++++++++++++++++++++++++++++++ automerge/src/storage/change.rs | 8 ++- automerge/src/sync.rs | 94 ++++++++++++++++++++++++++++++++- automerge/src/sync/bloom.rs | 13 ++++- automerge/src/sync/state.rs | 2 +- automerge/src/types.rs | 74 ++++++++++++++++++++++++++ 6 files changed, 274 insertions(+), 4 deletions(-) diff --git a/automerge/src/change.rs b/automerge/src/change.rs index 3c45a524..198c68fb 100644 --- a/automerge/src/change.rs +++ b/automerge/src/change.rs @@ -142,6 +142,12 @@ impl AsRef> for Change { } } +impl From for StoredChange<'static, Verified> { + fn from(c: Change) -> Self { + c.stored + } +} + #[derive(thiserror::Error, Debug)] pub enum LoadError { #[error("unable to parse change: {0}")] @@ -313,3 +319,84 @@ impl From<&Change> for crate::ExpandedChange { } } } + +#[cfg(test)] +pub(crate) mod gen { + use super::Change; + use crate::{ + op_tree::OpSetMetadata, + storage::{change::ChangeBuilder, convert::op_as_actor_id}, + types::{ + gen::{gen_hash, gen_op}, + ObjId, Op, OpId, + }, + ActorId, + }; + use proptest::prelude::*; + + fn gen_actor() -> impl Strategy { + proptest::array::uniform32(proptest::bits::u8::ANY).prop_map(ActorId::from) + } + + prop_compose! { + fn gen_actors()(this_actor in gen_actor(), other_actors in proptest::collection::vec(gen_actor(), 0..10)) -> (ActorId, Vec) { + (this_actor, other_actors) + } + } + + fn gen_ops( + this_actor: ActorId, + other_actors: Vec, + ) -> impl Strategy, OpSetMetadata)> { + let mut all_actors = vec![this_actor]; + all_actors.extend(other_actors); + let mut m = OpSetMetadata::from_actors(all_actors); + m.props.cache("someprop".to_string()); + let root_id = ObjId::root(); + (0_u64..10) + .prop_map(|num_ops| { + (0..num_ops) + .map(|counter| OpId::new(0, counter)) + .collect::>() + }) + .prop_flat_map(move |opids| { + let mut strat = Just(Vec::new()).boxed(); + for opid in opids { + strat = (gen_op(opid, vec![0]), strat) + .prop_map(move |(op, ops)| { + let mut result = Vec::with_capacity(ops.len() + 1); + result.extend(ops); + result.push((root_id, op)); + result + }) + .boxed(); + } + strat + }) + .prop_map(move |ops| (ops, m.clone())) + } + + prop_compose! { + pub(crate) fn gen_change()((this_actor, other_actors) in gen_actors())( + (ops, metadata) in gen_ops(this_actor.clone(), other_actors), + start_op in 1_u64..200000, + seq in 0_u64..200000, + timestamp in 0..i64::MAX, + deps in proptest::collection::vec(gen_hash(), 0..100), + message in proptest::option::of("[a-z]{200}"), + this_actor in Just(this_actor), + ) -> Change { + let ops = ops.iter().map(|(obj, op)| op_as_actor_id(obj, op, &metadata)); + Change::new(ChangeBuilder::new() + .with_dependencies(deps) + .with_start_op(start_op.try_into().unwrap()) + .with_message(message) + .with_actor(this_actor) + .with_seq(seq) + .with_timestamp(timestamp) + .build(ops.into_iter()) + .unwrap()) + } + + } +} diff --git a/automerge/src/storage/change.rs b/automerge/src/storage/change.rs index cbe014ac..633d96ac 100644 --- a/automerge/src/storage/change.rs +++ b/automerge/src/storage/change.rs @@ -40,7 +40,7 @@ impl OpReadState for Unverified {} /// ReadChangeOpError>`. /// /// [1]: https://alexjg.github.io/automerge-storage-docs/#change-chunks -#[derive(Clone, Debug, PartialEq)] +#[derive(Clone, Debug)] pub(crate) struct Change<'a, O: OpReadState> { /// The raw bytes of the entire chunk containing this change, including the header. bytes: Cow<'a, [u8]>, @@ -59,6 +59,12 @@ pub(crate) struct Change<'a, O: OpReadState> { _phantom: PhantomData, } +impl<'a, O: OpReadState> PartialEq for Change<'a, O> { + fn eq(&self, other: &Self) -> bool { + self.bytes == other.bytes + } +} + #[derive(thiserror::Error, Debug)] pub(crate) enum ParseError { #[error(transparent)] diff --git a/automerge/src/sync.rs b/automerge/src/sync.rs index 80035823..8230b1c3 100644 --- a/automerge/src/sync.rs +++ b/automerge/src/sync.rs @@ -1,4 +1,5 @@ use itertools::Itertools; +use serde::ser::SerializeMap; use std::collections::{HashMap, HashSet}; use crate::{ @@ -311,6 +312,27 @@ pub struct Message { pub changes: Vec, } +impl serde::Serialize for Message { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + let mut map = serializer.serialize_map(Some(4))?; + map.serialize_entry("heads", &self.heads)?; + map.serialize_entry("need", &self.need)?; + map.serialize_entry("have", &self.have)?; + map.serialize_entry( + "changes", + &self + .changes + .iter() + .map(crate::ExpandedChange::from) + .collect::>(), + )?; + map.end() + } +} + fn parse_have(input: parse::Input<'_>) -> parse::ParseResult<'_, Have, ReadMessageError> { let (i, last_sync) = parse::length_prefixed(parse::change_hash)(input)?; let (i, bloom_bytes) = parse::length_prefixed_bytes(i)?; @@ -385,7 +407,7 @@ impl Message { encode_many(&mut buf, self.changes.iter_mut(), |buf, change| { leb128::write::unsigned(buf, change.raw_bytes().len() as u64).unwrap(); - buf.extend(change.bytes().as_ref()) + buf.extend(change.raw_bytes().as_ref()) }); buf @@ -436,3 +458,73 @@ fn advance_heads( advanced_heads.sort(); advanced_heads } + +#[cfg(test)] +mod tests { + use super::*; + use crate::change::gen::gen_change; + use crate::storage::parse::Input; + use crate::types::gen::gen_hash; + use proptest::prelude::*; + + prop_compose! { + fn gen_bloom()(hashes in gen_sorted_hashes(0..10)) -> BloomFilter { + BloomFilter::from_hashes(hashes.into_iter()) + } + } + + prop_compose! { + fn gen_have()(bloom in gen_bloom(), last_sync in gen_sorted_hashes(0..10)) -> Have { + Have { + bloom, + last_sync, + } + } + } + + fn gen_sorted_hashes(size: std::ops::Range) -> impl Strategy> { + proptest::collection::vec(gen_hash(), size).prop_map(|mut h| { + h.sort(); + h + }) + } + + prop_compose! { + fn gen_sync_message()( + heads in gen_sorted_hashes(0..10), + need in gen_sorted_hashes(0..10), + have in proptest::collection::vec(gen_have(), 0..10), + changes in proptest::collection::vec(gen_change(), 0..10), + ) -> Message { + Message { + heads, + need, + have, + changes, + } + } + + } + + #[test] + fn encode_decode_empty_message() { + let msg = Message { + heads: vec![], + need: vec![], + have: vec![], + changes: vec![], + }; + let encoded = msg.encode(); + Message::parse(Input::new(&encoded)).unwrap(); + } + + proptest! { + #[test] + fn encode_decode_message(msg in gen_sync_message()) { + let encoded = msg.clone().encode(); + let (i, decoded) = Message::parse(Input::new(&encoded)).unwrap(); + assert!(i.is_empty()); + assert_eq!(msg, decoded); + } + } +} diff --git a/automerge/src/sync/bloom.rs b/automerge/src/sync/bloom.rs index aff3dc13..c02acbc0 100644 --- a/automerge/src/sync/bloom.rs +++ b/automerge/src/sync/bloom.rs @@ -9,7 +9,7 @@ use crate::ChangeHash; const BITS_PER_ENTRY: u32 = 10; const NUM_PROBES: u32 = 7; -#[derive(Default, Debug, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize)] pub struct BloomFilter { num_entries: u32, num_bits_per_entry: u32, @@ -17,6 +17,17 @@ pub struct BloomFilter { bits: Vec, } +impl Default for BloomFilter { + fn default() -> Self { + BloomFilter { + num_entries: 0, + num_bits_per_entry: BITS_PER_ENTRY, + num_probes: NUM_PROBES, + bits: Vec::new(), + } + } +} + #[derive(Debug, thiserror::Error)] pub(crate) enum ParseError { #[error(transparent)] diff --git a/automerge/src/sync/state.rs b/automerge/src/sync/state.rs index 5a34aad1..ad7e2c2c 100644 --- a/automerge/src/sync/state.rs +++ b/automerge/src/sync/state.rs @@ -36,7 +36,7 @@ pub struct State { /// A summary of the changes that the sender of the message already has. /// This is implicitly a request to the recipient to send all changes that the /// sender does not already have. -#[derive(Debug, Clone, Default, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, Default, PartialEq, Eq, Hash, serde::Serialize)] pub struct Have { /// The heads at the time of the last successful sync with this recipient. pub last_sync: Vec, diff --git a/automerge/src/types.rs b/automerge/src/types.rs index a1e4f2a7..22ca1364 100644 --- a/automerge/src/types.rs +++ b/automerge/src/types.rs @@ -670,3 +670,77 @@ impl From for wasm_bindgen::JsValue { } } } + +#[cfg(test)] +pub(crate) mod gen { + use super::{ + ChangeHash, Counter, ElemId, Key, ObjType, Op, OpId, OpIds, OpType, ScalarValue, HASH_SIZE, + }; + use proptest::prelude::*; + + pub(crate) fn gen_hash() -> impl Strategy { + proptest::collection::vec(proptest::bits::u8::ANY, HASH_SIZE) + .prop_map(|b| ChangeHash::try_from(&b[..]).unwrap()) + } + + pub(crate) fn gen_scalar_value() -> impl Strategy { + prop_oneof![ + proptest::collection::vec(proptest::bits::u8::ANY, 0..200).prop_map(ScalarValue::Bytes), + "[a-z]{10,500}".prop_map(|s| ScalarValue::Str(s.into())), + any::().prop_map(ScalarValue::Int), + any::().prop_map(ScalarValue::Uint), + any::().prop_map(ScalarValue::F64), + any::().prop_map(|c| ScalarValue::Counter(Counter::from(c))), + any::().prop_map(ScalarValue::Timestamp), + any::().prop_map(ScalarValue::Boolean), + Just(ScalarValue::Null), + ] + } + + pub(crate) fn gen_objtype() -> impl Strategy { + prop_oneof![ + Just(ObjType::Map), + Just(ObjType::Table), + Just(ObjType::List), + Just(ObjType::Text), + ] + } + + pub(crate) fn gen_action() -> impl Strategy { + prop_oneof![ + Just(OpType::Delete), + any::().prop_map(OpType::Increment), + gen_scalar_value().prop_map(OpType::Put), + gen_objtype().prop_map(OpType::Make) + ] + } + + pub(crate) fn gen_key(key_indices: Vec) -> impl Strategy { + prop_oneof![ + proptest::sample::select(key_indices).prop_map(Key::Map), + Just(Key::Seq(ElemId(OpId::new(0, 0)))), + ] + } + + /// Generate an arbitrary op + /// + /// The generated op will have no preds or succs + /// + /// # Arguments + /// + /// * `id` - the OpId this op will be given + /// * `key_prop_indices` - The indices of props which will be used to generate keys of type + /// `Key::Map`. I.e. this is what would typically be in `OpSetMetadata::props + pub(crate) fn gen_op(id: OpId, key_prop_indices: Vec) -> impl Strategy { + (gen_key(key_prop_indices), any::(), gen_action()).prop_map( + move |(key, insert, action)| Op { + id, + key, + insert, + action, + succ: OpIds::empty(), + pred: OpIds::empty(), + }, + ) + } +} From a9e23308ce6acd69c22eb149db2d03bb858fd970 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Wed, 21 Sep 2022 22:50:39 +0100 Subject: [PATCH 571/730] Remove async automerge-wasm wrapper By moving to wasm-bindgens `bundler` target rather than using the `web` target we remove the need for an async initialization step on the automerge-wasm package. This means that the automerge-js package can now depend directly on automerge-wasm and perform initialization itself, thus making automerge-js a drop in replacement for the `automerge` JS package (hopefully). We bump the versions of automerge-wasm --- automerge-js/package.json | 3 +- automerge-js/src/counter.ts | 2 +- automerge-js/src/index.ts | 12 +- automerge-js/src/low_level.ts | 24 ++-- automerge-js/src/proxies.ts | 4 +- automerge-js/src/text.ts | 2 +- automerge-js/test/basic_test.ts | 14 +- automerge-js/test/columnar_test.ts | 3 - automerge-js/test/legacy_tests.ts | 3 - automerge-js/test/sync_test.ts | 3 - automerge-js/test/text_test.ts | 3 - automerge-js/test/uuid_test.ts | 3 - automerge-wasm/index.d.ts | 207 +++++++++++++++++++++++++++- automerge-wasm/nodejs-index.js | 5 - automerge-wasm/package.json | 23 ++-- automerge-wasm/src/lib.rs | 2 +- automerge-wasm/test/readme.ts | 10 +- automerge-wasm/test/test.ts | 5 +- automerge-wasm/types/LICENSE | 10 -- automerge-wasm/types/index.d.ts | 209 ----------------------------- automerge-wasm/types/package.json | 18 --- automerge-wasm/web-index.js | 49 ------- 22 files changed, 259 insertions(+), 355 deletions(-) delete mode 100644 automerge-wasm/nodejs-index.js delete mode 100644 automerge-wasm/types/LICENSE delete mode 100644 automerge-wasm/types/index.d.ts delete mode 100644 automerge-wasm/types/package.json delete mode 100644 automerge-wasm/web-index.js diff --git a/automerge-js/package.json b/automerge-js/package.json index 228d94b8..5b7c9842 100644 --- a/automerge-js/package.json +++ b/automerge-js/package.json @@ -47,7 +47,6 @@ "@types/uuid": "^8.3.4", "@typescript-eslint/eslint-plugin": "^5.25.0", "@typescript-eslint/parser": "^5.25.0", - "automerge-wasm": "^0.1.6", "eslint": "^8.15.0", "fast-sha256": "^1.3.0", "mocha": "^10.0.0", @@ -56,7 +55,7 @@ "typescript": "^4.6.4" }, "dependencies": { - "automerge-types": "0.1.5", + "automerge-wasm": "0.1.7", "uuid": "^8.3" } } diff --git a/automerge-js/src/counter.ts b/automerge-js/src/counter.ts index 1a810e23..bd096441 100644 --- a/automerge-js/src/counter.ts +++ b/automerge-js/src/counter.ts @@ -1,4 +1,4 @@ -import { Automerge, ObjID, Prop } from "automerge-types" +import { Automerge, ObjID, Prop } from "automerge-wasm" import { COUNTER } from "./constants" /** * The most basic CRDT: an integer value that can be changed only by diff --git a/automerge-js/src/index.ts b/automerge-js/src/index.ts index 109b093c..4239b65a 100644 --- a/automerge-js/src/index.ts +++ b/automerge-js/src/index.ts @@ -7,11 +7,11 @@ import { STATE, HEADS, TRACE, OBJECT_ID, READ_ONLY, FROZEN } from "./constants" import { AutomergeValue, Counter } from "./types" export { AutomergeValue, Text, Counter, Int, Uint, Float64 } from "./types" -import { API } from "automerge-types"; +import { API } from "automerge-wasm"; import { ApiHandler, UseApi } from "./low_level" -import { Actor as ActorId, Prop, ObjID, Change, DecodedChange, Heads, Automerge, MaterializeValue } from "automerge-types" -import { JsSyncState as SyncState, SyncMessage, DecodedSyncMessage } from "automerge-types" +import { Actor as ActorId, Prop, ObjID, Change, DecodedChange, Heads, Automerge, MaterializeValue } from "automerge-wasm" +import { JsSyncState as SyncState, SyncMessage, DecodedSyncMessage } from "automerge-wasm" export type ChangeOptions = { message?: string, time?: number } @@ -24,10 +24,14 @@ export interface State { snapshot: T } + export function use(api: API) { UseApi(api) } +import * as wasm from "automerge-wasm" +use(wasm) + export function getBackend(doc: Doc) : Automerge { return _state(doc) } @@ -87,7 +91,7 @@ export function free(doc: Doc) { return _state(doc).free() } -export function from(initialState: T | Doc, actor?: ActorId): Doc { +export function from(initialState: T | Doc, actor?: ActorId): Doc { return change(init(actor), (d) => Object.assign(d, initialState)) } diff --git a/automerge-js/src/low_level.ts b/automerge-js/src/low_level.ts index cf0695d9..44b310bb 100644 --- a/automerge-js/src/low_level.ts +++ b/automerge-js/src/low_level.ts @@ -1,6 +1,6 @@ -import { Automerge, Change, DecodedChange, Actor, SyncState, SyncMessage, JsSyncState, DecodedSyncMessage } from "automerge-types" -import { API } from "automerge-types" +import { Automerge, Change, DecodedChange, Actor, SyncState, SyncMessage, JsSyncState, DecodedSyncMessage } from "automerge-wasm" +import { API } from "automerge-wasm" export function UseApi(api: API) { for (const k in api) { @@ -11,15 +11,15 @@ export function UseApi(api: API) { /* eslint-disable */ export const ApiHandler : API = { create(actor?: Actor): Automerge { throw new RangeError("Automerge.use() not called") }, - load(data: Uint8Array, actor?: Actor): Automerge { throw new RangeError("Automerge.use() not called") }, - encodeChange(change: DecodedChange): Change { throw new RangeError("Automerge.use() not called") }, - decodeChange(change: Change): DecodedChange { throw new RangeError("Automerge.use() not called") }, - initSyncState(): SyncState { throw new RangeError("Automerge.use() not called") }, - encodeSyncMessage(message: DecodedSyncMessage): SyncMessage { throw new RangeError("Automerge.use() not called") }, - decodeSyncMessage(msg: SyncMessage): DecodedSyncMessage { throw new RangeError("Automerge.use() not called") }, - encodeSyncState(state: SyncState): Uint8Array { throw new RangeError("Automerge.use() not called") }, - decodeSyncState(data: Uint8Array): SyncState { throw new RangeError("Automerge.use() not called") }, - exportSyncState(state: SyncState): JsSyncState { throw new RangeError("Automerge.use() not called") }, - importSyncState(state: JsSyncState): SyncState { throw new RangeError("Automerge.use() not called") }, + load(data: Uint8Array, actor?: Actor): Automerge { throw new RangeError("Automerge.use() not called (load)") }, + encodeChange(change: DecodedChange): Change { throw new RangeError("Automerge.use() not called (encodeChange)") }, + decodeChange(change: Change): DecodedChange { throw new RangeError("Automerge.use() not called (decodeChange)") }, + initSyncState(): SyncState { throw new RangeError("Automerge.use() not called (initSyncState)") }, + encodeSyncMessage(message: DecodedSyncMessage): SyncMessage { throw new RangeError("Automerge.use() not called (encodeSyncMessage)") }, + decodeSyncMessage(msg: SyncMessage): DecodedSyncMessage { throw new RangeError("Automerge.use() not called (decodeSyncMessage)") }, + encodeSyncState(state: SyncState): Uint8Array { throw new RangeError("Automerge.use() not called (encodeSyncState)") }, + decodeSyncState(data: Uint8Array): SyncState { throw new RangeError("Automerge.use() not called (decodeSyncState)") }, + exportSyncState(state: SyncState): JsSyncState { throw new RangeError("Automerge.use() not called (exportSyncState)") }, + importSyncState(state: JsSyncState): SyncState { throw new RangeError("Automerge.use() not called (importSyncState)") }, } /* eslint-enable */ diff --git a/automerge-js/src/proxies.ts b/automerge-js/src/proxies.ts index f202b116..dc8d6f00 100644 --- a/automerge-js/src/proxies.ts +++ b/automerge-js/src/proxies.ts @@ -1,6 +1,6 @@ -import { Automerge, Heads, ObjID } from "automerge-types" -import { Prop } from "automerge-types" +import { Automerge, Heads, ObjID } from "automerge-wasm" +import { Prop } from "automerge-wasm" import { AutomergeValue, ScalarValue, MapValue, ListValue, TextValue } from "./types" import { Int, Uint, Float64 } from "./numbers" import { Counter, getWriteableCounter } from "./counter" diff --git a/automerge-js/src/text.ts b/automerge-js/src/text.ts index d93cd061..f2aecabb 100644 --- a/automerge-js/src/text.ts +++ b/automerge-js/src/text.ts @@ -1,4 +1,4 @@ -import { Value } from "automerge-types" +import { Value } from "automerge-wasm" import { TEXT } from "./constants" export class Text { diff --git a/automerge-js/test/basic_test.ts b/automerge-js/test/basic_test.ts index d2e98939..6f819ca9 100644 --- a/automerge-js/test/basic_test.ts +++ b/automerge-js/test/basic_test.ts @@ -2,9 +2,6 @@ import * as tt from "automerge-types" import * as assert from 'assert' import * as util from 'util' import * as Automerge from '../src' -import * as AutomergeWASM from "automerge-wasm" - -Automerge.use(AutomergeWASM) describe('Automerge', () => { describe('basics', () => { @@ -175,4 +172,15 @@ describe('Automerge', () => { console.log(doc.text.indexOf("world")) }) }) + + it('should obtain the same conflicts, regardless of merge order', () => { + let s1 = Automerge.init() + let s2 = Automerge.init() + s1 = Automerge.change(s1, doc => { doc.x = 1; doc.y = 2 }) + s2 = Automerge.change(s2, doc => { doc.x = 3; doc.y = 4 }) + const m1 = Automerge.merge(Automerge.clone(s1), Automerge.clone(s2)) + const m2 = Automerge.merge(Automerge.clone(s2), Automerge.clone(s1)) + assert.deepStrictEqual(Automerge.getConflicts(m1, 'x'), Automerge.getConflicts(m2, 'x')) + }) }) + diff --git a/automerge-js/test/columnar_test.ts b/automerge-js/test/columnar_test.ts index fc01741b..ca670377 100644 --- a/automerge-js/test/columnar_test.ts +++ b/automerge-js/test/columnar_test.ts @@ -2,9 +2,6 @@ import * as assert from 'assert' import { checkEncoded } from './helpers' import * as Automerge from '../src' import { encodeChange, decodeChange } from '../src' -import * as AutomergeWASM from "automerge-wasm" - -Automerge.use(AutomergeWASM) describe('change encoding', () => { it('should encode text edits', () => { diff --git a/automerge-js/test/legacy_tests.ts b/automerge-js/test/legacy_tests.ts index 50cecbc4..4b53ff98 100644 --- a/automerge-js/test/legacy_tests.ts +++ b/automerge-js/test/legacy_tests.ts @@ -2,9 +2,6 @@ import * as assert from 'assert' import * as Automerge from '../src' import { assertEqualsOneOf } from './helpers' import { decodeChange } from './legacy/columnar' -import * as AutomergeWASM from "automerge-wasm" - -Automerge.use(AutomergeWASM) const UUID_PATTERN = /^[0-9a-f]{32}$/ const OPID_PATTERN = /^[0-9]+@[0-9a-f]{32}$/ diff --git a/automerge-js/test/sync_test.ts b/automerge-js/test/sync_test.ts index 7b1e52ef..13641e80 100644 --- a/automerge-js/test/sync_test.ts +++ b/automerge-js/test/sync_test.ts @@ -3,9 +3,6 @@ import * as Automerge from '../src' import { BloomFilter } from './legacy/sync' import { decodeChangeMeta } from './legacy/columnar' import { decodeSyncMessage, encodeSyncMessage, decodeSyncState, encodeSyncState, initSyncState } from "../src" -import * as AutomergeWASM from "automerge-wasm" - -Automerge.use(AutomergeWASM) function inspect(a) { const util = require("util"); diff --git a/automerge-js/test/text_test.ts b/automerge-js/test/text_test.ts index e55287ce..c2ef348d 100644 --- a/automerge-js/test/text_test.ts +++ b/automerge-js/test/text_test.ts @@ -1,9 +1,6 @@ import * as assert from 'assert' import * as Automerge from '../src' import { assertEqualsOneOf } from './helpers' -import * as AutomergeWASM from "automerge-wasm" - -Automerge.use(AutomergeWASM) function attributeStateToAttributes(accumulatedAttributes) { const attributes = {} diff --git a/automerge-js/test/uuid_test.ts b/automerge-js/test/uuid_test.ts index 1bed4f49..4182a8c4 100644 --- a/automerge-js/test/uuid_test.ts +++ b/automerge-js/test/uuid_test.ts @@ -1,8 +1,5 @@ import * as assert from 'assert' import * as Automerge from '../src' -import * as AutomergeWASM from "automerge-wasm" - -Automerge.use(AutomergeWASM) const uuid = Automerge.uuid diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts index d515b3c7..f94f35c3 100644 --- a/automerge-wasm/index.d.ts +++ b/automerge-wasm/index.d.ts @@ -1,2 +1,205 @@ -export * from "automerge-types" -export { default } from "automerge-types" +export type Actor = string; +export type ObjID = string; +export type Change = Uint8Array; +export type SyncMessage = Uint8Array; +export type Prop = string | number; +export type Hash = string; +export type Heads = Hash[]; +export type Value = string | number | boolean | null | Date | Uint8Array +export type MaterializeValue = { [key:string]: MaterializeValue } | Array | Value +export type ObjType = string | Array | { [key: string]: ObjType | Value } +export type FullValue = + ["str", string] | + ["int", number] | + ["uint", number] | + ["f64", number] | + ["boolean", boolean] | + ["timestamp", Date] | + ["counter", number] | + ["bytes", Uint8Array] | + ["null", null] | + ["map", ObjID] | + ["list", ObjID] | + ["text", ObjID] | + ["table", ObjID] + +export type FullValueWithId = + ["str", string, ObjID ] | + ["int", number, ObjID ] | + ["uint", number, ObjID ] | + ["f64", number, ObjID ] | + ["boolean", boolean, ObjID ] | + ["timestamp", Date, ObjID ] | + ["counter", number, ObjID ] | + ["bytes", Uint8Array, ObjID ] | + ["null", null, ObjID ] | + ["map", ObjID ] | + ["list", ObjID] | + ["text", ObjID] | + ["table", ObjID] + +export enum ObjTypeName { + list = "list", + map = "map", + table = "table", + text = "text", +} + +export type Datatype = + "boolean" | + "str" | + "int" | + "uint" | + "f64" | + "null" | + "timestamp" | + "counter" | + "bytes" | + "map" | + "text" | + "list"; + +export type SyncHave = { + lastSync: Heads, + bloom: Uint8Array, +} + +export type DecodedSyncMessage = { + heads: Heads, + need: Heads, + have: SyncHave[] + changes: Change[] +} + +export type DecodedChange = { + actor: Actor, + seq: number + startOp: number, + time: number, + message: string | null, + deps: Heads, + hash: Hash, + ops: Op[] +} + +export type Op = { + action: string, + obj: ObjID, + key: string, + value?: string | number | boolean, + datatype?: string, + pred: string[], +} + +export type Patch = { + obj: ObjID + action: 'assign' | 'insert' | 'delete' + key: Prop + value: Value + datatype: Datatype + conflict: boolean +} + +export function create(actor?: Actor): Automerge; +export function load(data: Uint8Array, actor?: Actor): Automerge; +export function encodeChange(change: DecodedChange): Change; +export function decodeChange(change: Change): DecodedChange; +export function initSyncState(): SyncState; +export function encodeSyncMessage(message: DecodedSyncMessage): SyncMessage; +export function decodeSyncMessage(msg: SyncMessage): DecodedSyncMessage; +export function encodeSyncState(state: SyncState): Uint8Array; +export function decodeSyncState(data: Uint8Array): SyncState; +export function exportSyncState(state: SyncState): JsSyncState; +export function importSyncState(state: JsSyncState): SyncState; + +export class API { + create(actor?: Actor): Automerge; + load(data: Uint8Array, actor?: Actor): Automerge; + encodeChange(change: DecodedChange): Change; + decodeChange(change: Change): DecodedChange; + initSyncState(): SyncState; + encodeSyncMessage(message: DecodedSyncMessage): SyncMessage; + decodeSyncMessage(msg: SyncMessage): DecodedSyncMessage; + encodeSyncState(state: SyncState): Uint8Array; + decodeSyncState(data: Uint8Array): SyncState; + exportSyncState(state: SyncState): JsSyncState; + importSyncState(state: JsSyncState): SyncState; +} + +export class Automerge { + // change state + put(obj: ObjID, prop: Prop, value: Value, datatype?: Datatype): void; + putObject(obj: ObjID, prop: Prop, value: ObjType): ObjID; + insert(obj: ObjID, index: number, value: Value, datatype?: Datatype): void; + insertObject(obj: ObjID, index: number, value: ObjType): ObjID; + push(obj: ObjID, value: Value, datatype?: Datatype): void; + pushObject(obj: ObjID, value: ObjType): ObjID; + splice(obj: ObjID, start: number, delete_count: number, text?: string | Array): ObjID[] | undefined; + increment(obj: ObjID, prop: Prop, value: number): void; + delete(obj: ObjID, prop: Prop): void; + + // returns a single value - if there is a conflict return the winner + get(obj: ObjID, prop: Prop, heads?: Heads): Value | undefined; + getWithType(obj: ObjID, prop: Prop, heads?: Heads): FullValue | null; + // return all values in case of a conflict + getAll(obj: ObjID, arg: Prop, heads?: Heads): FullValueWithId[]; + keys(obj: ObjID, heads?: Heads): string[]; + text(obj: ObjID, heads?: Heads): string; + length(obj: ObjID, heads?: Heads): number; + materialize(obj?: ObjID, heads?: Heads): MaterializeValue; + + // transactions + commit(message?: string, time?: number): Hash; + merge(other: Automerge): Heads; + getActorId(): Actor; + pendingOps(): number; + rollback(): number; + + // patches + enablePatches(enable: boolean): void; + popPatches(): Patch[]; + + // save and load to local store + save(): Uint8Array; + saveIncremental(): Uint8Array; + loadIncremental(data: Uint8Array): number; + + // sync over network + receiveSyncMessage(state: SyncState, message: SyncMessage): void; + generateSyncMessage(state: SyncState): SyncMessage | null; + + // low level change functions + applyChanges(changes: Change[]): void; + getChanges(have_deps: Heads): Change[]; + getChangeByHash(hash: Hash): Change | null; + getChangesAdded(other: Automerge): Change[]; + getHeads(): Heads; + getLastLocalChange(): Change | null; + getMissingDeps(heads?: Heads): Heads; + + // memory management + free(): void; + clone(actor?: string): Automerge; + fork(actor?: string): Automerge; + forkAt(heads: Heads, actor?: string): Automerge; + + // dump internal state to console.log + dump(): void; +} + +export class JsSyncState { + sharedHeads: Heads; + lastSentHeads: Heads; + theirHeads: Heads | undefined; + theirHeed: Heads | undefined; + theirHave: SyncHave[] | undefined; + sentHashes: Heads; +} + +export class SyncState { + free(): void; + clone(): SyncState; + lastSentHeads: Heads; + sentHashes: Heads; + readonly sharedHeads: Heads; +} diff --git a/automerge-wasm/nodejs-index.js b/automerge-wasm/nodejs-index.js deleted file mode 100644 index 4a42f201..00000000 --- a/automerge-wasm/nodejs-index.js +++ /dev/null @@ -1,5 +0,0 @@ -let wasm = require("./bindgen") -module.exports = wasm -module.exports.load = module.exports.loadDoc -delete module.exports.loadDoc -module.exports.init = () => (new Promise((resolve,reject) => { resolve(module.exports) })) diff --git a/automerge-wasm/package.json b/automerge-wasm/package.json index 0410dd52..36e03e09 100644 --- a/automerge-wasm/package.json +++ b/automerge-wasm/package.json @@ -8,29 +8,29 @@ "description": "wasm-bindgen bindings to the automerge rust implementation", "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-wasm", "repository": "github:automerge/automerge-rs", - "version": "0.1.6", + "version": "0.1.7", "license": "MIT", "files": [ "README.md", "LICENSE", "package.json", "index.d.ts", - "nodejs/index.js", "nodejs/bindgen.js", "nodejs/bindgen_bg.wasm", - "web/index.js", - "web/bindgen.js", - "web/bindgen_bg.wasm" + "bundler/bindgen.js", + "bundler/bindgen_bg.js", + "bundler/bindgen_bg.wasm" ], "types": "index.d.ts", - "module": "./web/index.js", - "main": "./nodejs/index.js", + "module": "./bundler/bindgen.js", + "main": "./nodejs/bindgen.js", "scripts": { "lint": "eslint test/*.ts", "build": "cross-env PROFILE=dev TARGET=nodejs FEATURES='' yarn target", + "debug": "cross-env PROFILE=dev yarn buildall", "release": "cross-env PROFILE=release yarn buildall", - "buildall": "cross-env TARGET=nodejs yarn target && cross-env TARGET=web yarn target", - "target": "rimraf ./$TARGET && wasm-pack build --target $TARGET --$PROFILE --out-name bindgen -d $TARGET -- $FEATURES && cp $TARGET-index.js $TARGET/index.js", + "buildall": "cross-env TARGET=nodejs yarn target && cross-env TARGET=bundler yarn target", + "target": "rimraf ./$TARGET && wasm-pack build --target $TARGET --$PROFILE --out-name bindgen -d $TARGET -- $FEATURES", "test": "ts-mocha -p tsconfig.json --type-check --bail --full-trace test/*.ts" }, "devDependencies": { @@ -50,7 +50,8 @@ "ts-mocha": "^9.0.2", "typescript": "^4.6.4" }, - "dependencies": { - "automerge-types": "0.1.5" + "exports": { + "browser": "./bundler/bindgen.js", + "require": "./nodejs/bindgen.js" } } diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 4dfadced..0eb8c256 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -853,7 +853,7 @@ pub fn init(actor: Option) -> Result { Automerge::new(actor) } -#[wasm_bindgen(js_name = loadDoc)] +#[wasm_bindgen(js_name = load)] pub fn load(data: Uint8Array, actor: Option) -> Result { let data = data.to_vec(); let observer = None; diff --git a/automerge-wasm/test/readme.ts b/automerge-wasm/test/readme.ts index 5dcff10e..de22d495 100644 --- a/automerge-wasm/test/readme.ts +++ b/automerge-wasm/test/readme.ts @@ -1,7 +1,7 @@ import { describe, it } from 'mocha'; import * as assert from 'assert' //@ts-ignore -import { init, create, load } from '..' +import { create, load } from '..' describe('Automerge', () => { describe('Readme Examples', () => { @@ -10,11 +10,9 @@ describe('Automerge', () => { doc.free() }) it('Using the Library and Creating a Document (2)', (done) => { - init().then((_:any) => { - const doc = create() - doc.free() - done() - }) + const doc = create() + doc.free() + done() }) it('Automerge Scalar Types (1)', () => { const doc = create() diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts index 7c573061..00dedeed 100644 --- a/automerge-wasm/test/test.ts +++ b/automerge-wasm/test/test.ts @@ -3,7 +3,7 @@ import { describe, it } from 'mocha'; import assert from 'assert' //@ts-ignore import { BloomFilter } from './helpers/sync' -import { init, create, load, SyncState, Automerge, encodeChange, decodeChange, initSyncState, decodeSyncMessage, decodeSyncState, encodeSyncState, encodeSyncMessage } from '..' +import { create, load, SyncState, Automerge, encodeChange, decodeChange, initSyncState, decodeSyncMessage, decodeSyncState, encodeSyncState, encodeSyncMessage } from '..' import { DecodedSyncMessage, Hash } from '..'; function sync(a: Automerge, b: Automerge, aSyncState = initSyncState(), bSyncState = initSyncState()) { @@ -28,9 +28,6 @@ function sync(a: Automerge, b: Automerge, aSyncState = initSyncState(), bSyncSta describe('Automerge', () => { describe('basics', () => { - it('default import init() should return a promise', () => { - assert(init() instanceof Promise) - }) it('should create, clone and free', () => { const doc1 = create() diff --git a/automerge-wasm/types/LICENSE b/automerge-wasm/types/LICENSE deleted file mode 100644 index 63b21502..00000000 --- a/automerge-wasm/types/LICENSE +++ /dev/null @@ -1,10 +0,0 @@ -MIT License - -Copyright 2022, Ink & Switch LLC - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - diff --git a/automerge-wasm/types/index.d.ts b/automerge-wasm/types/index.d.ts deleted file mode 100644 index ea57f9c2..00000000 --- a/automerge-wasm/types/index.d.ts +++ /dev/null @@ -1,209 +0,0 @@ - -export type Actor = string; -export type ObjID = string; -export type Change = Uint8Array; -export type SyncMessage = Uint8Array; -export type Prop = string | number; -export type Hash = string; -export type Heads = Hash[]; -export type Value = string | number | boolean | null | Date | Uint8Array -export type MaterializeValue = { [key:string]: MaterializeValue } | Array | Value -export type ObjType = string | Array | { [key: string]: ObjType | Value } -export type FullValue = - ["str", string] | - ["int", number] | - ["uint", number] | - ["f64", number] | - ["boolean", boolean] | - ["timestamp", Date] | - ["counter", number] | - ["bytes", Uint8Array] | - ["null", null] | - ["map", ObjID] | - ["list", ObjID] | - ["text", ObjID] | - ["table", ObjID] - -export type FullValueWithId = - ["str", string, ObjID ] | - ["int", number, ObjID ] | - ["uint", number, ObjID ] | - ["f64", number, ObjID ] | - ["boolean", boolean, ObjID ] | - ["timestamp", Date, ObjID ] | - ["counter", number, ObjID ] | - ["bytes", Uint8Array, ObjID ] | - ["null", null, ObjID ] | - ["map", ObjID ] | - ["list", ObjID] | - ["text", ObjID] | - ["table", ObjID] - -export enum ObjTypeName { - list = "list", - map = "map", - table = "table", - text = "text", -} - -export type Datatype = - "boolean" | - "str" | - "int" | - "uint" | - "f64" | - "null" | - "timestamp" | - "counter" | - "bytes" | - "map" | - "text" | - "list"; - -export type SyncHave = { - lastSync: Heads, - bloom: Uint8Array, -} - -export type DecodedSyncMessage = { - heads: Heads, - need: Heads, - have: SyncHave[] - changes: Change[] -} - -export type DecodedChange = { - actor: Actor, - seq: number - startOp: number, - time: number, - message: string | null, - deps: Heads, - hash: Hash, - ops: Op[] -} - -export type Op = { - action: string, - obj: ObjID, - key: string, - value?: string | number | boolean, - datatype?: string, - pred: string[], -} - -export type Patch = { - obj: ObjID - action: 'assign' | 'insert' | 'delete' - key: Prop - value: Value - datatype: Datatype - conflict: boolean -} - -export function create(actor?: Actor): Automerge; -export function load(data: Uint8Array, actor?: Actor): Automerge; -export function encodeChange(change: DecodedChange): Change; -export function decodeChange(change: Change): DecodedChange; -export function initSyncState(): SyncState; -export function encodeSyncMessage(message: DecodedSyncMessage): SyncMessage; -export function decodeSyncMessage(msg: SyncMessage): DecodedSyncMessage; -export function encodeSyncState(state: SyncState): Uint8Array; -export function decodeSyncState(data: Uint8Array): SyncState; -export function exportSyncState(state: SyncState): JsSyncState; -export function importSyncState(state: JsSyncState): SyncState; - -export class API { - create(actor?: Actor): Automerge; - load(data: Uint8Array, actor?: Actor): Automerge; - encodeChange(change: DecodedChange): Change; - decodeChange(change: Change): DecodedChange; - initSyncState(): SyncState; - encodeSyncMessage(message: DecodedSyncMessage): SyncMessage; - decodeSyncMessage(msg: SyncMessage): DecodedSyncMessage; - encodeSyncState(state: SyncState): Uint8Array; - decodeSyncState(data: Uint8Array): SyncState; - exportSyncState(state: SyncState): JsSyncState; - importSyncState(state: JsSyncState): SyncState; -} - -export class Automerge { - // change state - put(obj: ObjID, prop: Prop, value: Value, datatype?: Datatype): void; - putObject(obj: ObjID, prop: Prop, value: ObjType): ObjID; - insert(obj: ObjID, index: number, value: Value, datatype?: Datatype): void; - insertObject(obj: ObjID, index: number, value: ObjType): ObjID; - push(obj: ObjID, value: Value, datatype?: Datatype): void; - pushObject(obj: ObjID, value: ObjType): ObjID; - splice(obj: ObjID, start: number, delete_count: number, text?: string | Array): ObjID[] | undefined; - increment(obj: ObjID, prop: Prop, value: number): void; - delete(obj: ObjID, prop: Prop): void; - - // returns a single value - if there is a conflict return the winner - get(obj: ObjID, prop: Prop, heads?: Heads): Value | undefined; - getWithType(obj: ObjID, prop: Prop, heads?: Heads): FullValue | null; - // return all values in case of a conflict - getAll(obj: ObjID, arg: Prop, heads?: Heads): FullValueWithId[]; - keys(obj: ObjID, heads?: Heads): string[]; - text(obj: ObjID, heads?: Heads): string; - length(obj: ObjID, heads?: Heads): number; - materialize(obj?: ObjID, heads?: Heads): MaterializeValue; - - // transactions - commit(message?: string, time?: number): Hash; - merge(other: Automerge): Heads; - getActorId(): Actor; - pendingOps(): number; - rollback(): number; - - // patches - enablePatches(enable: boolean): void; - popPatches(): Patch[]; - - // save and load to local store - save(): Uint8Array; - saveIncremental(): Uint8Array; - loadIncremental(data: Uint8Array): number; - - // sync over network - receiveSyncMessage(state: SyncState, message: SyncMessage): void; - generateSyncMessage(state: SyncState): SyncMessage | null; - - // low level change functions - applyChanges(changes: Change[]): void; - getChanges(have_deps: Heads): Change[]; - getChangeByHash(hash: Hash): Change | null; - getChangesAdded(other: Automerge): Change[]; - getHeads(): Heads; - getLastLocalChange(): Change | null; - getMissingDeps(heads?: Heads): Heads; - - // memory management - free(): void; - clone(actor?: string): Automerge; - fork(actor?: string): Automerge; - forkAt(heads: Heads, actor?: string): Automerge; - - // dump internal state to console.log - dump(): void; -} - -export class JsSyncState { - sharedHeads: Heads; - lastSentHeads: Heads; - theirHeads: Heads | undefined; - theirHeed: Heads | undefined; - theirHave: SyncHave[] | undefined; - sentHashes: Heads; -} - -export class SyncState { - free(): void; - clone(): SyncState; - lastSentHeads: Heads; - sentHashes: Heads; - readonly sharedHeads: Heads; -} - -export function init (): Promise; - diff --git a/automerge-wasm/types/package.json b/automerge-wasm/types/package.json deleted file mode 100644 index 7b6852ae..00000000 --- a/automerge-wasm/types/package.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "collaborators": [ - "Orion Henry " - ], - "name": "automerge-types", - "description": "typescript types for low level automerge api", - "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-wasm", - "repository": "github:automerge/automerge-rs", - "version": "0.1.5", - "license": "MIT", - "files": [ - "LICENSE", - "package.json", - "index.d.ts" - ], - "types": "index.d.ts", - "main": "" -} diff --git a/automerge-wasm/web-index.js b/automerge-wasm/web-index.js deleted file mode 100644 index 9bbe47df..00000000 --- a/automerge-wasm/web-index.js +++ /dev/null @@ -1,49 +0,0 @@ -export { - loadDoc as load, - create, - encodeChange, - decodeChange, - initSyncState, - encodeSyncMessage, - decodeSyncMessage, - encodeSyncState, - decodeSyncState, - exportSyncState, - importSyncState, -} from "./bindgen.js" -import { - loadDoc as load, - create, - encodeChange, - decodeChange, - initSyncState, - encodeSyncMessage, - decodeSyncMessage, - encodeSyncState, - decodeSyncState, - exportSyncState, - importSyncState, -} from "./bindgen.js" - -let api = { - load, - create, - encodeChange, - decodeChange, - initSyncState, - encodeSyncMessage, - decodeSyncMessage, - encodeSyncState, - decodeSyncState, - exportSyncState, - importSyncState -} - -import wasm_init from "./bindgen.js" - -export function init() { - return new Promise((resolve,reject) => wasm_init().then(() => { - resolve({ ... api, load, create }) - })) -} - From 8557ce0b6939e90c360abaf2a2d578686c06aac4 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Wed, 21 Sep 2022 22:53:00 +0100 Subject: [PATCH 572/730] Rename automerge-js to automerge Now that automerge-js is ready to go we rename it to `automerge-js` and set the version to `2.0.0-alpha.1` --- automerge-js/package.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/automerge-js/package.json b/automerge-js/package.json index 5b7c9842..96e8e534 100644 --- a/automerge-js/package.json +++ b/automerge-js/package.json @@ -1,10 +1,10 @@ { - "name": "automerge-js", + "name": "automerge", "collaborators": [ "Orion Henry ", "Martin Kleppmann" ], - "version": "0.1.12", + "version": "2.0.0-alpha.1", "description": "Reimplementation of `automerge` on top of the automerge-wasm backend", "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-js", "repository": "github:automerge/automerge-rs", From 7825da3ab9b9e70073293bc45eed35470757c4e2 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Wed, 21 Sep 2022 22:54:31 +0100 Subject: [PATCH 573/730] Add examples of using automerge with bundlers --- .../examples/create-react-app/.gitignore | 1 + .../examples/create-react-app/README.md | 59 + .../examples/create-react-app/craco.config.js | 5 + .../examples/create-react-app/package.json | 41 + .../create-react-app/public/favicon.ico | Bin 0 -> 3870 bytes .../create-react-app/public/index.html | 43 + .../create-react-app/public/logo192.png | Bin 0 -> 5347 bytes .../create-react-app/public/logo512.png | Bin 0 -> 9664 bytes .../create-react-app/public/manifest.json | 25 + .../create-react-app/public/robots.txt | 3 + .../examples/create-react-app/src/App.css | 38 + .../examples/create-react-app/src/App.js | 21 + .../examples/create-react-app/src/App.test.js | 8 + .../examples/create-react-app/src/index.css | 13 + .../examples/create-react-app/src/index.js | 17 + .../examples/create-react-app/src/logo.svg | 1 + .../create-react-app/src/reportWebVitals.js | 13 + .../create-react-app/src/setupTests.js | 5 + .../examples/create-react-app/yarn.lock | 9120 +++++++++++++++++ automerge-js/examples/vite/.gitignore | 2 + automerge-js/examples/vite/README.md | 47 + automerge-js/examples/vite/index.html | 13 + automerge-js/examples/vite/main.ts | 15 + automerge-js/examples/vite/package.json | 20 + automerge-js/examples/vite/public/vite.svg | 1 + automerge-js/examples/vite/src/counter.ts | 9 + automerge-js/examples/vite/src/main.ts | 18 + automerge-js/examples/vite/src/style.css | 97 + automerge-js/examples/vite/src/typescript.svg | 1 + automerge-js/examples/vite/src/vite-env.d.ts | 1 + automerge-js/examples/vite/tsconfig.json | 20 + automerge-js/examples/vite/vite.config.js | 15 + automerge-js/examples/webpack/README.md | 37 + automerge-js/examples/webpack/package.json | 4 +- automerge-js/examples/webpack/src/index.js | 30 +- .../examples/webpack/webpack.config.js | 1 + 36 files changed, 9725 insertions(+), 19 deletions(-) create mode 100644 automerge-js/examples/create-react-app/.gitignore create mode 100644 automerge-js/examples/create-react-app/README.md create mode 100644 automerge-js/examples/create-react-app/craco.config.js create mode 100644 automerge-js/examples/create-react-app/package.json create mode 100644 automerge-js/examples/create-react-app/public/favicon.ico create mode 100644 automerge-js/examples/create-react-app/public/index.html create mode 100644 automerge-js/examples/create-react-app/public/logo192.png create mode 100644 automerge-js/examples/create-react-app/public/logo512.png create mode 100644 automerge-js/examples/create-react-app/public/manifest.json create mode 100644 automerge-js/examples/create-react-app/public/robots.txt create mode 100644 automerge-js/examples/create-react-app/src/App.css create mode 100644 automerge-js/examples/create-react-app/src/App.js create mode 100644 automerge-js/examples/create-react-app/src/App.test.js create mode 100644 automerge-js/examples/create-react-app/src/index.css create mode 100644 automerge-js/examples/create-react-app/src/index.js create mode 100644 automerge-js/examples/create-react-app/src/logo.svg create mode 100644 automerge-js/examples/create-react-app/src/reportWebVitals.js create mode 100644 automerge-js/examples/create-react-app/src/setupTests.js create mode 100644 automerge-js/examples/create-react-app/yarn.lock create mode 100644 automerge-js/examples/vite/.gitignore create mode 100644 automerge-js/examples/vite/README.md create mode 100644 automerge-js/examples/vite/index.html create mode 100644 automerge-js/examples/vite/main.ts create mode 100644 automerge-js/examples/vite/package.json create mode 100644 automerge-js/examples/vite/public/vite.svg create mode 100644 automerge-js/examples/vite/src/counter.ts create mode 100644 automerge-js/examples/vite/src/main.ts create mode 100644 automerge-js/examples/vite/src/style.css create mode 100644 automerge-js/examples/vite/src/typescript.svg create mode 100644 automerge-js/examples/vite/src/vite-env.d.ts create mode 100644 automerge-js/examples/vite/tsconfig.json create mode 100644 automerge-js/examples/vite/vite.config.js create mode 100644 automerge-js/examples/webpack/README.md diff --git a/automerge-js/examples/create-react-app/.gitignore b/automerge-js/examples/create-react-app/.gitignore new file mode 100644 index 00000000..c2658d7d --- /dev/null +++ b/automerge-js/examples/create-react-app/.gitignore @@ -0,0 +1 @@ +node_modules/ diff --git a/automerge-js/examples/create-react-app/README.md b/automerge-js/examples/create-react-app/README.md new file mode 100644 index 00000000..dc894080 --- /dev/null +++ b/automerge-js/examples/create-react-app/README.md @@ -0,0 +1,59 @@ +# Automerge + `create-react-app` + +This is a little fiddly to get working. The problem is that `create-react-app` +hard codes a webpack configuration which does not support WASM modules, which we +require in order to bundle the WASM implementation of automerge. To get around +this we use [`craco`](https://github.com/dilanx/craco) which does some monkey +patching to allow us to modify the webpack config that `create-react-app` +bundles. Then we use a craco plugin called +[`craco-wasm`](https://www.npmjs.com/package/craco-wasm) to perform the +necessary modifications to the webpack config. It should be noted that this is +all quite fragile and ideally you probably don't want to use `create-react-app` +to do this in production. + +## Setup + +Assuming you have already run `create-react-app` and your working directory is +the project. + +### Install craco and craco-wasm + +```bash +yarn add craco craco-wasm +``` + +### Modify `package.json` to use `craco` for scripts + +In `package.json` the `scripts` section will look like this: + +```json + "scripts": { + "start": "craco start", + "build": "craco build", + "test": "craco test", + "eject": "craco eject" + }, +``` + +Replace that section with: + +```json + "scripts": { + "start": "craco start", + "build": "craco build", + "test": "craco test", + "eject": "craco eject" + }, +``` + +### Create `craco.config.js` + +In the root of the project add the following contents to `craco.config.js` + +```javascript +const cracoWasm = require("craco-wasm") + +module.exports = { + plugins: [cracoWasm()] +} +``` diff --git a/automerge-js/examples/create-react-app/craco.config.js b/automerge-js/examples/create-react-app/craco.config.js new file mode 100644 index 00000000..ad806e67 --- /dev/null +++ b/automerge-js/examples/create-react-app/craco.config.js @@ -0,0 +1,5 @@ +const cracoWasm = require("craco-wasm") + +module.exports = { + plugins: [cracoWasm()] +} diff --git a/automerge-js/examples/create-react-app/package.json b/automerge-js/examples/create-react-app/package.json new file mode 100644 index 00000000..d11491c5 --- /dev/null +++ b/automerge-js/examples/create-react-app/package.json @@ -0,0 +1,41 @@ +{ + "name": "automerge-create-react-app", + "version": "0.1.0", + "private": true, + "dependencies": { + "@craco/craco": "^7.0.0-alpha.8", + "craco-wasm": "0.0.1", + "@testing-library/jest-dom": "^5.16.5", + "@testing-library/react": "^13.4.0", + "@testing-library/user-event": "^13.5.0", + "automerge": "2.0.0-alpha.1", + "react": "^18.2.0", + "react-dom": "^18.2.0", + "react-scripts": "5.0.1", + "web-vitals": "^2.1.4" + }, + "scripts": { + "start": "craco start", + "build": "craco build", + "test": "craco test", + "eject": "craco eject" + }, + "eslintConfig": { + "extends": [ + "react-app", + "react-app/jest" + ] + }, + "browserslist": { + "production": [ + ">0.2%", + "not dead", + "not op_mini all" + ], + "development": [ + "last 1 chrome version", + "last 1 firefox version", + "last 1 safari version" + ] + } +} diff --git a/automerge-js/examples/create-react-app/public/favicon.ico b/automerge-js/examples/create-react-app/public/favicon.ico new file mode 100644 index 0000000000000000000000000000000000000000..a11777cc471a4344702741ab1c8a588998b1311a GIT binary patch literal 3870 zcma);c{J4h9>;%nil|2-o+rCuEF-(I%-F}ijC~o(k~HKAkr0)!FCj~d>`RtpD?8b; zXOC1OD!V*IsqUwzbMF1)-gEDD=A573Z-&G7^LoAC9|WO7Xc0Cx1g^Zu0u_SjAPB3vGa^W|sj)80f#V0@M_CAZTIO(t--xg= z!sii`1giyH7EKL_+Wi0ab<)&E_0KD!3Rp2^HNB*K2@PHCs4PWSA32*-^7d{9nH2_E zmC{C*N*)(vEF1_aMamw2A{ZH5aIDqiabnFdJ|y0%aS|64E$`s2ccV~3lR!u<){eS` z#^Mx6o(iP1Ix%4dv`t@!&Za-K@mTm#vadc{0aWDV*_%EiGK7qMC_(`exc>-$Gb9~W!w_^{*pYRm~G zBN{nA;cm^w$VWg1O^^<6vY`1XCD|s_zv*g*5&V#wv&s#h$xlUilPe4U@I&UXZbL z0)%9Uj&@yd03n;!7do+bfixH^FeZ-Ema}s;DQX2gY+7g0s(9;`8GyvPY1*vxiF&|w z>!vA~GA<~JUqH}d;DfBSi^IT*#lrzXl$fNpq0_T1tA+`A$1?(gLb?e#0>UELvljtQ zK+*74m0jn&)5yk8mLBv;=@}c{t0ztT<v;Avck$S6D`Z)^c0(jiwKhQsn|LDRY&w(Fmi91I7H6S;b0XM{e zXp0~(T@k_r-!jkLwd1_Vre^v$G4|kh4}=Gi?$AaJ)3I+^m|Zyj#*?Kp@w(lQdJZf4 z#|IJW5z+S^e9@(6hW6N~{pj8|NO*>1)E=%?nNUAkmv~OY&ZV;m-%?pQ_11)hAr0oAwILrlsGawpxx4D43J&K=n+p3WLnlDsQ$b(9+4 z?mO^hmV^F8MV{4Lx>(Q=aHhQ1){0d*(e&s%G=i5rq3;t{JC zmgbn5Nkl)t@fPH$v;af26lyhH!k+#}_&aBK4baYPbZy$5aFx4}ka&qxl z$=Rh$W;U)>-=S-0=?7FH9dUAd2(q#4TCAHky!$^~;Dz^j|8_wuKc*YzfdAht@Q&ror?91Dm!N03=4=O!a)I*0q~p0g$Fm$pmr$ zb;wD;STDIi$@M%y1>p&_>%?UP($15gou_ue1u0!4(%81;qcIW8NyxFEvXpiJ|H4wz z*mFT(qVx1FKufG11hByuX%lPk4t#WZ{>8ka2efjY`~;AL6vWyQKpJun2nRiZYDij$ zP>4jQXPaP$UC$yIVgGa)jDV;F0l^n(V=HMRB5)20V7&r$jmk{UUIe zVjKroK}JAbD>B`2cwNQ&GDLx8{pg`7hbA~grk|W6LgiZ`8y`{Iq0i>t!3p2}MS6S+ zO_ruKyAElt)rdS>CtF7j{&6rP-#c=7evGMt7B6`7HG|-(WL`bDUAjyn+k$mx$CH;q2Dz4x;cPP$hW=`pFfLO)!jaCL@V2+F)So3}vg|%O*^T1j>C2lx zsURO-zIJC$^$g2byVbRIo^w>UxK}74^TqUiRR#7s_X$e)$6iYG1(PcW7un-va-S&u zHk9-6Zn&>T==A)lM^D~bk{&rFzCi35>UR!ZjQkdSiNX*-;l4z9j*7|q`TBl~Au`5& z+c)*8?#-tgUR$Zd%Q3bs96w6k7q@#tUn`5rj+r@_sAVVLqco|6O{ILX&U-&-cbVa3 zY?ngHR@%l{;`ri%H*0EhBWrGjv!LE4db?HEWb5mu*t@{kv|XwK8?npOshmzf=vZA@ zVSN9sL~!sn?r(AK)Q7Jk2(|M67Uy3I{eRy z_l&Y@A>;vjkWN5I2xvFFTLX0i+`{qz7C_@bo`ZUzDugfq4+>a3?1v%)O+YTd6@Ul7 zAfLfm=nhZ`)P~&v90$&UcF+yXm9sq!qCx3^9gzIcO|Y(js^Fj)Rvq>nQAHI92ap=P z10A4@prk+AGWCb`2)dQYFuR$|H6iDE8p}9a?#nV2}LBCoCf(Xi2@szia7#gY>b|l!-U`c}@ zLdhvQjc!BdLJvYvzzzngnw51yRYCqh4}$oRCy-z|v3Hc*d|?^Wj=l~18*E~*cR_kU z{XsxM1i{V*4GujHQ3DBpl2w4FgFR48Nma@HPgnyKoIEY-MqmMeY=I<%oG~l!f<+FN z1ZY^;10j4M4#HYXP zw5eJpA_y(>uLQ~OucgxDLuf}fVs272FaMxhn4xnDGIyLXnw>Xsd^J8XhcWIwIoQ9} z%FoSJTAGW(SRGwJwb=@pY7r$uQRK3Zd~XbxU)ts!4XsJrCycrWSI?e!IqwqIR8+Jh zlRjZ`UO1I!BtJR_2~7AbkbSm%XQqxEPkz6BTGWx8e}nQ=w7bZ|eVP4?*Tb!$(R)iC z9)&%bS*u(lXqzitAN)Oo=&Ytn>%Hzjc<5liuPi>zC_nw;Z0AE3Y$Jao_Q90R-gl~5 z_xAb2J%eArrC1CN4G$}-zVvCqF1;H;abAu6G*+PDHSYFx@Tdbfox*uEd3}BUyYY-l zTfEsOqsi#f9^FoLO;ChK<554qkri&Av~SIM*{fEYRE?vH7pTAOmu2pz3X?Wn*!ROX ztd54huAk&mFBemMooL33RV-*1f0Q3_(7hl$<#*|WF9P!;r;4_+X~k~uKEqdzZ$5Al zV63XN@)j$FN#cCD;ek1R#l zv%pGrhB~KWgoCj%GT?%{@@o(AJGt*PG#l3i>lhmb_twKH^EYvacVY-6bsCl5*^~L0 zonm@lk2UvvTKr2RS%}T>^~EYqdL1q4nD%0n&Xqr^cK^`J5W;lRRB^R-O8b&HENO||mo0xaD+S=I8RTlIfVgqN@SXDr2&-)we--K7w= zJVU8?Z+7k9dy;s;^gDkQa`0nz6N{T?(A&Iz)2!DEecLyRa&FI!id#5Z7B*O2=PsR0 zEvc|8{NS^)!d)MDX(97Xw}m&kEO@5jqRaDZ!+%`wYOI<23q|&js`&o4xvjP7D_xv@ z5hEwpsp{HezI9!~6O{~)lLR@oF7?J7i>1|5a~UuoN=q&6N}EJPV_GD`&M*v8Y`^2j zKII*d_@Fi$+i*YEW+Hbzn{iQk~yP z>7N{S4)r*!NwQ`(qcN#8SRQsNK6>{)X12nbF`*7#ecO7I)Q$uZsV+xS4E7aUn+U(K baj7?x%VD!5Cxk2YbYLNVeiXvvpMCWYo=by@ literal 0 HcmV?d00001 diff --git a/automerge-js/examples/create-react-app/public/index.html b/automerge-js/examples/create-react-app/public/index.html new file mode 100644 index 00000000..aa069f27 --- /dev/null +++ b/automerge-js/examples/create-react-app/public/index.html @@ -0,0 +1,43 @@ + + + + + + + + + + + + + React App + + + +
+ + + diff --git a/automerge-js/examples/create-react-app/public/logo192.png b/automerge-js/examples/create-react-app/public/logo192.png new file mode 100644 index 0000000000000000000000000000000000000000..fc44b0a3796c0e0a64c3d858ca038bd4570465d9 GIT binary patch literal 5347 zcmZWtbyO6NvR-oO24RV%BvuJ&=?+<7=`LvyB&A_#M7mSDYw1v6DJkiYl9XjT!%$dLEBTQ8R9|wd3008in6lFF3GV-6mLi?MoP_y~}QUnaDCHI#t z7w^m$@6DI)|C8_jrT?q=f8D?0AM?L)Z}xAo^e^W>t$*Y0KlT5=@bBjT9kxb%-KNdk zeOS1tKO#ChhG7%{ApNBzE2ZVNcxbrin#E1TiAw#BlUhXllzhN$qWez5l;h+t^q#Eav8PhR2|T}y5kkflaK`ba-eoE+Z2q@o6P$)=&` z+(8}+-McnNO>e#$Rr{32ngsZIAX>GH??tqgwUuUz6kjns|LjsB37zUEWd|(&O!)DY zQLrq%Y>)Y8G`yYbYCx&aVHi@-vZ3|ebG!f$sTQqMgi0hWRJ^Wc+Ibv!udh_r%2|U) zPi|E^PK?UE!>_4`f`1k4hqqj_$+d!EB_#IYt;f9)fBOumGNyglU(ofY`yHq4Y?B%- zp&G!MRY<~ajTgIHErMe(Z8JG*;D-PJhd@RX@QatggM7+G(Lz8eZ;73)72Hfx5KDOE zkT(m}i2;@X2AT5fW?qVp?@WgN$aT+f_6eo?IsLh;jscNRp|8H}Z9p_UBO^SJXpZew zEK8fz|0Th%(Wr|KZBGTM4yxkA5CFdAj8=QSrT$fKW#tweUFqr0TZ9D~a5lF{)%-tTGMK^2tz(y2v$i%V8XAxIywrZCp=)83p(zIk6@S5AWl|Oa2hF`~~^W zI;KeOSkw1O#TiQ8;U7OPXjZM|KrnN}9arP)m0v$c|L)lF`j_rpG(zW1Qjv$=^|p*f z>)Na{D&>n`jOWMwB^TM}slgTEcjxTlUby89j1)|6ydRfWERn3|7Zd2&e7?!K&5G$x z`5U3uFtn4~SZq|LjFVrz$3iln-+ucY4q$BC{CSm7Xe5c1J<=%Oagztj{ifpaZk_bQ z9Sb-LaQMKp-qJA*bP6DzgE3`}*i1o3GKmo2pn@dj0;He}F=BgINo};6gQF8!n0ULZ zL>kC0nPSFzlcB7p41doao2F7%6IUTi_+!L`MM4o*#Y#0v~WiO8uSeAUNp=vA2KaR&=jNR2iVwG>7t%sG2x_~yXzY)7K& zk3p+O0AFZ1eu^T3s};B%6TpJ6h-Y%B^*zT&SN7C=N;g|#dGIVMSOru3iv^SvO>h4M=t-N1GSLLDqVTcgurco6)3&XpU!FP6Hlrmj}f$ zp95;b)>M~`kxuZF3r~a!rMf4|&1=uMG$;h^g=Kl;H&Np-(pFT9FF@++MMEx3RBsK?AU0fPk-#mdR)Wdkj)`>ZMl#^<80kM87VvsI3r_c@_vX=fdQ`_9-d(xiI z4K;1y1TiPj_RPh*SpDI7U~^QQ?%0&!$Sh#?x_@;ag)P}ZkAik{_WPB4rHyW#%>|Gs zdbhyt=qQPA7`?h2_8T;-E6HI#im9K>au*(j4;kzwMSLgo6u*}-K`$_Gzgu&XE)udQ zmQ72^eZd|vzI)~!20JV-v-T|<4@7ruqrj|o4=JJPlybwMg;M$Ud7>h6g()CT@wXm` zbq=A(t;RJ^{Xxi*Ff~!|3!-l_PS{AyNAU~t{h;(N(PXMEf^R(B+ZVX3 z8y0;0A8hJYp@g+c*`>eTA|3Tgv9U8#BDTO9@a@gVMDxr(fVaEqL1tl?md{v^j8aUv zm&%PX4^|rX|?E4^CkplWWNv*OKM>DxPa z!RJ)U^0-WJMi)Ksc!^ixOtw^egoAZZ2Cg;X7(5xZG7yL_;UJ#yp*ZD-;I^Z9qkP`} zwCTs0*%rIVF1sgLervtnUo&brwz?6?PXRuOCS*JI-WL6GKy7-~yi0giTEMmDs_-UX zo=+nFrW_EfTg>oY72_4Z0*uG>MnXP=c0VpT&*|rvv1iStW;*^={rP1y?Hv+6R6bxFMkxpWkJ>m7Ba{>zc_q zEefC3jsXdyS5??Mz7IET$Kft|EMNJIv7Ny8ZOcKnzf`K5Cd)&`-fTY#W&jnV0l2vt z?Gqhic}l}mCv1yUEy$%DP}4AN;36$=7aNI^*AzV(eYGeJ(Px-j<^gSDp5dBAv2#?; zcMXv#aj>%;MiG^q^$0MSg-(uTl!xm49dH!{X0){Ew7ThWV~Gtj7h%ZD zVN-R-^7Cf0VH!8O)uUHPL2mO2tmE*cecwQv_5CzWeh)ykX8r5Hi`ehYo)d{Jnh&3p z9ndXT$OW51#H5cFKa76c<%nNkP~FU93b5h-|Cb}ScHs@4Q#|}byWg;KDMJ#|l zE=MKD*F@HDBcX@~QJH%56eh~jfPO-uKm}~t7VkHxHT;)4sd+?Wc4* z>CyR*{w@4(gnYRdFq=^(#-ytb^5ESD?x<0Skhb%Pt?npNW1m+Nv`tr9+qN<3H1f<% zZvNEqyK5FgPsQ`QIu9P0x_}wJR~^CotL|n zk?dn;tLRw9jJTur4uWoX6iMm914f0AJfB@C74a;_qRrAP4E7l890P&{v<}>_&GLrW z)klculcg`?zJO~4;BBAa=POU%aN|pmZJn2{hA!d!*lwO%YSIzv8bTJ}=nhC^n}g(ld^rn#kq9Z3)z`k9lvV>y#!F4e{5c$tnr9M{V)0m(Z< z#88vX6-AW7T2UUwW`g<;8I$Jb!R%z@rCcGT)-2k7&x9kZZT66}Ztid~6t0jKb&9mm zpa}LCb`bz`{MzpZR#E*QuBiZXI#<`5qxx=&LMr-UUf~@dRk}YI2hbMsAMWOmDzYtm zjof16D=mc`^B$+_bCG$$@R0t;e?~UkF?7<(vkb70*EQB1rfUWXh$j)R2)+dNAH5%R zEBs^?N;UMdy}V};59Gu#0$q53$}|+q7CIGg_w_WlvE}AdqoS<7DY1LWS9?TrfmcvT zaypmplwn=P4;a8-%l^e?f`OpGb}%(_mFsL&GywhyN(-VROj`4~V~9bGv%UhcA|YW% zs{;nh@aDX11y^HOFXB$a7#Sr3cEtNd4eLm@Y#fc&j)TGvbbMwze zXtekX_wJqxe4NhuW$r}cNy|L{V=t#$%SuWEW)YZTH|!iT79k#?632OFse{+BT_gau zJwQcbH{b}dzKO?^dV&3nTILYlGw{27UJ72ZN){BILd_HV_s$WfI2DC<9LIHFmtyw? zQ;?MuK7g%Ym+4e^W#5}WDLpko%jPOC=aN)3!=8)s#Rnercak&b3ESRX3z{xfKBF8L z5%CGkFmGO@x?_mPGlpEej!3!AMddChabyf~nJNZxx!D&{@xEb!TDyvqSj%Y5@A{}9 zRzoBn0?x}=krh{ok3Nn%e)#~uh;6jpezhA)ySb^b#E>73e*frBFu6IZ^D7Ii&rsiU z%jzygxT-n*joJpY4o&8UXr2s%j^Q{?e-voloX`4DQyEK+DmrZh8A$)iWL#NO9+Y@!sO2f@rI!@jN@>HOA< z?q2l{^%mY*PNx2FoX+A7X3N}(RV$B`g&N=e0uvAvEN1W^{*W?zT1i#fxuw10%~))J zjx#gxoVlXREWZf4hRkgdHx5V_S*;p-y%JtGgQ4}lnA~MBz-AFdxUxU1RIT$`sal|X zPB6sEVRjGbXIP0U+?rT|y5+ev&OMX*5C$n2SBPZr`jqzrmpVrNciR0e*Wm?fK6DY& zl(XQZ60yWXV-|Ps!A{EF;=_z(YAF=T(-MkJXUoX zI{UMQDAV2}Ya?EisdEW;@pE6dt;j0fg5oT2dxCi{wqWJ<)|SR6fxX~5CzblPGr8cb zUBVJ2CQd~3L?7yfTpLNbt)He1D>*KXI^GK%<`bq^cUq$Q@uJifG>p3LU(!H=C)aEL zenk7pVg}0{dKU}&l)Y2Y2eFMdS(JS0}oZUuVaf2+K*YFNGHB`^YGcIpnBlMhO7d4@vV zv(@N}(k#REdul8~fP+^F@ky*wt@~&|(&&meNO>rKDEnB{ykAZ}k>e@lad7to>Ao$B zz<1(L=#J*u4_LB=8w+*{KFK^u00NAmeNN7pr+Pf+N*Zl^dO{LM-hMHyP6N!~`24jd zXYP|Ze;dRXKdF2iJG$U{k=S86l@pytLx}$JFFs8e)*Vi?aVBtGJ3JZUj!~c{(rw5>vuRF$`^p!P8w1B=O!skwkO5yd4_XuG^QVF z`-r5K7(IPSiKQ2|U9+`@Js!g6sfJwAHVd|s?|mnC*q zp|B|z)(8+mxXyxQ{8Pg3F4|tdpgZZSoU4P&9I8)nHo1@)9_9u&NcT^FI)6|hsAZFk zZ+arl&@*>RXBf-OZxhZerOr&dN5LW9@gV=oGFbK*J+m#R-|e6(Loz(;g@T^*oO)0R zN`N=X46b{7yk5FZGr#5&n1!-@j@g02g|X>MOpF3#IjZ_4wg{dX+G9eqS+Es9@6nC7 zD9$NuVJI}6ZlwtUm5cCAiYv0(Yi{%eH+}t)!E^>^KxB5^L~a`4%1~5q6h>d;paC9c zTj0wTCKrhWf+F#5>EgX`sl%POl?oyCq0(w0xoL?L%)|Q7d|Hl92rUYAU#lc**I&^6p=4lNQPa0 znQ|A~i0ip@`B=FW-Q;zh?-wF;Wl5!+q3GXDu-x&}$gUO)NoO7^$BeEIrd~1Dh{Tr` z8s<(Bn@gZ(mkIGnmYh_ehXnq78QL$pNDi)|QcT*|GtS%nz1uKE+E{7jdEBp%h0}%r zD2|KmYGiPa4;md-t_m5YDz#c*oV_FqXd85d@eub?9N61QuYcb3CnVWpM(D-^|CmkL z(F}L&N7qhL2PCq)fRh}XO@U`Yn<?TNGR4L(mF7#4u29{i~@k;pLsgl({YW5`Mo+p=zZn3L*4{JU;++dG9 X@eDJUQo;Ye2mwlRs?y0|+_a0zY+Zo%Dkae}+MySoIppb75o?vUW_?)>@g{U2`ERQIXV zeY$JrWnMZ$QC<=ii4X|@0H8`si75jB(ElJb00HAB%>SlLR{!zO|C9P3zxw_U8?1d8uRZ=({Ga4shyN}3 zAK}WA(ds|``G4jA)9}Bt2Hy0+f3rV1E6b|@?hpGA=PI&r8)ah|)I2s(P5Ic*Ndhn^ z*T&j@gbCTv7+8rpYbR^Ty}1AY)YH;p!m948r#%7x^Z@_-w{pDl|1S4`EM3n_PaXvK z1JF)E3qy$qTj5Xs{jU9k=y%SQ0>8E$;x?p9ayU0bZZeo{5Z@&FKX>}s!0+^>C^D#z z>xsCPvxD3Z=dP}TTOSJhNTPyVt14VCQ9MQFN`rn!c&_p?&4<5_PGm4a;WS&1(!qKE z_H$;dDdiPQ!F_gsN`2>`X}$I=B;={R8%L~`>RyKcS$72ai$!2>d(YkciA^J0@X%G4 z4cu!%Ps~2JuJ8ex`&;Fa0NQOq_nDZ&X;^A=oc1&f#3P1(!5il>6?uK4QpEG8z0Rhu zvBJ+A9RV?z%v?!$=(vcH?*;vRs*+PPbOQ3cdPr5=tOcLqmfx@#hOqX0iN)wTTO21jH<>jpmwRIAGw7`a|sl?9y9zRBh>(_%| zF?h|P7}~RKj?HR+q|4U`CjRmV-$mLW>MScKnNXiv{vD3&2@*u)-6P@h0A`eeZ7}71 zK(w%@R<4lLt`O7fs1E)$5iGb~fPfJ?WxhY7c3Q>T-w#wT&zW522pH-B%r5v#5y^CF zcC30Se|`D2mY$hAlIULL%-PNXgbbpRHgn<&X3N9W!@BUk@9g*P5mz-YnZBb*-$zMM z7Qq}ic0mR8n{^L|=+diODdV}Q!gwr?y+2m=3HWwMq4z)DqYVg0J~^}-%7rMR@S1;9 z7GFj6K}i32X;3*$SmzB&HW{PJ55kT+EI#SsZf}bD7nW^Haf}_gXciYKX{QBxIPSx2Ma? zHQqgzZq!_{&zg{yxqv3xq8YV+`S}F6A>Gtl39_m;K4dA{pP$BW0oIXJ>jEQ!2V3A2 zdpoTxG&V=(?^q?ZTj2ZUpDUdMb)T?E$}CI>r@}PFPWD9@*%V6;4Ag>D#h>!s)=$0R zRXvdkZ%|c}ubej`jl?cS$onl9Tw52rBKT)kgyw~Xy%z62Lr%V6Y=f?2)J|bZJ5(Wx zmji`O;_B+*X@qe-#~`HFP<{8$w@z4@&`q^Q-Zk8JG3>WalhnW1cvnoVw>*R@c&|o8 zZ%w!{Z+MHeZ*OE4v*otkZqz11*s!#s^Gq>+o`8Z5 z^i-qzJLJh9!W-;SmFkR8HEZJWiXk$40i6)7 zZpr=k2lp}SasbM*Nbn3j$sn0;rUI;%EDbi7T1ZI4qL6PNNM2Y%6{LMIKW+FY_yF3) zSKQ2QSujzNMSL2r&bYs`|i2Dnn z=>}c0>a}>|uT!IiMOA~pVT~R@bGlm}Edf}Kq0?*Af6#mW9f9!}RjW7om0c9Qlp;yK z)=XQs(|6GCadQbWIhYF=rf{Y)sj%^Id-ARO0=O^Ad;Ph+ z0?$eE1xhH?{T$QI>0JP75`r)U_$#%K1^BQ8z#uciKf(C701&RyLQWBUp*Q7eyn76} z6JHpC9}R$J#(R0cDCkXoFSp;j6{x{b&0yE@P7{;pCEpKjS(+1RQy38`=&Yxo%F=3y zCPeefABp34U-s?WmU#JJw23dcC{sPPFc2#J$ZgEN%zod}J~8dLm*fx9f6SpO zn^Ww3bt9-r0XaT2a@Wpw;C23XM}7_14#%QpubrIw5aZtP+CqIFmsG4`Cm6rfxl9n5 z7=r2C-+lM2AB9X0T_`?EW&Byv&K?HS4QLoylJ|OAF z`8atBNTzJ&AQ!>sOo$?^0xj~D(;kS$`9zbEGd>f6r`NC3X`tX)sWgWUUOQ7w=$TO&*j;=u%25ay-%>3@81tGe^_z*C7pb9y*Ed^H3t$BIKH2o+olp#$q;)_ zfpjCb_^VFg5fU~K)nf*d*r@BCC>UZ!0&b?AGk_jTPXaSnCuW110wjHPPe^9R^;jo3 zwvzTl)C`Zl5}O2}3lec=hZ*$JnkW#7enKKc)(pM${_$9Hc=Sr_A9Biwe*Y=T?~1CK z6eZ9uPICjy-sMGbZl$yQmpB&`ouS8v{58__t0$JP%i3R&%QR3ianbZqDs<2#5FdN@n5bCn^ZtH992~5k(eA|8|@G9u`wdn7bnpg|@{m z^d6Y`*$Zf2Xr&|g%sai#5}Syvv(>Jnx&EM7-|Jr7!M~zdAyjt*xl;OLhvW-a%H1m0 z*x5*nb=R5u><7lyVpNAR?q@1U59 zO+)QWwL8t zyip?u_nI+K$uh{y)~}qj?(w0&=SE^8`_WMM zTybjG=999h38Yes7}-4*LJ7H)UE8{mE(6;8voE+TYY%33A>S6`G_95^5QHNTo_;Ao ztIQIZ_}49%{8|=O;isBZ?=7kfdF8_@azfoTd+hEJKWE!)$)N%HIe2cplaK`ry#=pV z0q{9w-`i0h@!R8K3GC{ivt{70IWG`EP|(1g7i_Q<>aEAT{5(yD z=!O?kq61VegV+st@XCw475j6vS)_z@efuqQgHQR1T4;|-#OLZNQJPV4k$AX1Uk8Lm z{N*b*ia=I+MB}kWpupJ~>!C@xEN#Wa7V+7{m4j8c?)ChV=D?o~sjT?0C_AQ7B-vxqX30s0I_`2$in86#`mAsT-w?j{&AL@B3$;P z31G4(lV|b}uSDCIrjk+M1R!X7s4Aabn<)zpgT}#gE|mIvV38^ODy@<&yflpCwS#fRf9ZX3lPV_?8@C5)A;T zqmouFLFk;qIs4rA=hh=GL~sCFsXHsqO6_y~*AFt939UYVBSx1s(=Kb&5;j7cSowdE;7()CC2|-i9Zz+_BIw8#ll~-tyH?F3{%`QCsYa*b#s*9iCc`1P1oC26?`g<9))EJ3%xz+O!B3 zZ7$j~To)C@PquR>a1+Dh>-a%IvH_Y7^ys|4o?E%3`I&ADXfC8++hAdZfzIT#%C+Jz z1lU~K_vAm0m8Qk}K$F>|>RPK%<1SI0(G+8q~H zAsjezyP+u!Se4q3GW)`h`NPSRlMoBjCzNPesWJwVTY!o@G8=(6I%4XHGaSiS3MEBK zhgGFv6Jc>L$4jVE!I?TQuwvz_%CyO!bLh94nqK11C2W$*aa2ueGopG8DnBICVUORP zgytv#)49fVXDaR$SukloYC3u7#5H)}1K21=?DKj^U)8G;MS)&Op)g^zR2($<>C*zW z;X7`hLxiIO#J`ANdyAOJle4V%ppa*(+0i3w;8i*BA_;u8gOO6)MY`ueq7stBMJTB; z-a0R>hT*}>z|Gg}@^zDL1MrH+2hsR8 zHc}*9IvuQC^Ju)^#Y{fOr(96rQNPNhxc;mH@W*m206>Lo<*SaaH?~8zg&f&%YiOEG zGiz?*CP>Bci}!WiS=zj#K5I}>DtpregpP_tfZtPa(N<%vo^#WCQ5BTv0vr%Z{)0q+ z)RbfHktUm|lg&U3YM%lMUM(fu}i#kjX9h>GYctkx9Mt_8{@s%!K_EI zScgwy6%_fR?CGJQtmgNAj^h9B#zmaMDWgH55pGuY1Gv7D z;8Psm(vEPiwn#MgJYu4Ty9D|h!?Rj0ddE|&L3S{IP%H4^N!m`60ZwZw^;eg4sk6K{ ziA^`Sbl_4~f&Oo%n;8Ye(tiAdlZKI!Z=|j$5hS|D$bDJ}p{gh$KN&JZYLUjv4h{NY zBJ>X9z!xfDGY z+oh_Z&_e#Q(-}>ssZfm=j$D&4W4FNy&-kAO1~#3Im;F)Nwe{(*75(p=P^VI?X0GFakfh+X-px4a%Uw@fSbmp9hM1_~R>?Z8+ ziy|e9>8V*`OP}4x5JjdWp}7eX;lVxp5qS}0YZek;SNmm7tEeSF*-dI)6U-A%m6YvCgM(}_=k#a6o^%-K4{`B1+}O4x zztDT%hVb;v#?j`lTvlFQ3aV#zkX=7;YFLS$uIzb0E3lozs5`Xy zi~vF+%{z9uLjKvKPhP%x5f~7-Gj+%5N`%^=yk*Qn{`> z;xj&ROY6g`iy2a@{O)V(jk&8#hHACVDXey5a+KDod_Z&}kHM}xt7}Md@pil{2x7E~ zL$k^d2@Ec2XskjrN+IILw;#7((abu;OJii&v3?60x>d_Ma(onIPtcVnX@ELF0aL?T zSmWiL3(dOFkt!x=1O!_0n(cAzZW+3nHJ{2S>tgSK?~cFha^y(l@-Mr2W$%MN{#af8J;V*>hdq!gx=d0h$T7l}>91Wh07)9CTX zh2_ZdQCyFOQ)l(}gft0UZG`Sh2`x-w`5vC2UD}lZs*5 zG76$akzn}Xi))L3oGJ75#pcN=cX3!=57$Ha=hQ2^lwdyU#a}4JJOz6ddR%zae%#4& za)bFj)z=YQela(F#Y|Q#dp}PJghITwXouVaMq$BM?K%cXn9^Y@g43$=O)F&ZlOUom zJiad#dea;-eywBA@e&D6Pdso1?2^(pXiN91?jvcaUyYoKUmvl5G9e$W!okWe*@a<^ z8cQQ6cNSf+UPDx%?_G4aIiybZHHagF{;IcD(dPO!#=u zWfqLcPc^+7Uu#l(Bpxft{*4lv#*u7X9AOzDO z1D9?^jIo}?%iz(_dwLa{ex#T}76ZfN_Z-hwpus9y+4xaUu9cX}&P{XrZVWE{1^0yw zO;YhLEW!pJcbCt3L8~a7>jsaN{V3>tz6_7`&pi%GxZ=V3?3K^U+*ryLSb)8^IblJ0 zSRLNDvIxt)S}g30?s_3NX>F?NKIGrG_zB9@Z>uSW3k2es_H2kU;Rnn%j5qP)!XHKE zPB2mHP~tLCg4K_vH$xv`HbRsJwbZMUV(t=ez;Ec(vyHH)FbfLg`c61I$W_uBB>i^r z&{_P;369-&>23R%qNIULe=1~T$(DA`ev*EWZ6j(B$(te}x1WvmIll21zvygkS%vwG zzkR6Z#RKA2!z!C%M!O>!=Gr0(J0FP=-MN=5t-Ir)of50y10W}j`GtRCsXBakrKtG& zazmITDJMA0C51&BnLY)SY9r)NVTMs);1<=oosS9g31l{4ztjD3#+2H7u_|66b|_*O z;Qk6nalpqdHOjx|K&vUS_6ITgGll;TdaN*ta=M_YtyC)I9Tmr~VaPrH2qb6sd~=AcIxV+%z{E&0@y=DPArw zdV7z(G1hBx7hd{>(cr43^WF%4Y@PXZ?wPpj{OQ#tvc$pABJbvPGvdR`cAtHn)cSEV zrpu}1tJwQ3y!mSmH*uz*x0o|CS<^w%&KJzsj~DU0cLQUxk5B!hWE>aBkjJle8z~;s z-!A=($+}Jq_BTK5^B!`R>!MulZN)F=iXXeUd0w5lUsE5VP*H*oCy(;?S$p*TVvTxwAeWFB$jHyb0593)$zqalVlDX=GcCN1gU0 zlgU)I$LcXZ8Oyc2TZYTPu@-;7<4YYB-``Qa;IDcvydIA$%kHhJKV^m*-zxcvU4viy&Kr5GVM{IT>WRywKQ9;>SEiQD*NqplK-KK4YR`p0@JW)n_{TU3bt0 zim%;(m1=#v2}zTps=?fU5w^(*y)xT%1vtQH&}50ZF!9YxW=&7*W($2kgKyz1mUgfs zfV<*XVVIFnohW=|j+@Kfo!#liQR^x>2yQdrG;2o8WZR+XzU_nG=Ed2rK?ntA;K5B{ z>M8+*A4!Jm^Bg}aW?R?6;@QG@uQ8&oJ{hFixcfEnJ4QH?A4>P=q29oDGW;L;= z9-a0;g%c`C+Ai!UmK$NC*4#;Jp<1=TioL=t^YM)<<%u#hnnfSS`nq63QKGO1L8RzX z@MFDqs1z ztYmxDl@LU)5acvHk)~Z`RW7=aJ_nGD!mOSYD>5Odjn@TK#LY{jf?+piB5AM-CAoT_ z?S-*q7}wyLJzK>N%eMPuFgN)Q_otKP;aqy=D5f!7<=n(lNkYRXVpkB{TAYLYg{|(jtRqYmg$xH zjmq?B(RE4 zQx^~Pt}gxC2~l=K$$-sYy_r$CO(d=+b3H1MB*y_5g6WLaWTXn+TKQ|hNY^>Mp6k*$ zwkovomhu776vQATqT4blf~g;TY(MWCrf^^yfWJvSAB$p5l;jm@o#=!lqw+Lqfq>X= z$6~kxfm7`3q4zUEB;u4qa#BdJxO!;xGm)wwuisj{0y2x{R(IGMrsIzDY9LW>m!Y`= z04sx3IjnYvL<4JqxQ8f7qYd0s2Ig%`ytYPEMKI)s(LD}D@EY>x`VFtqvnADNBdeao zC96X+MxnwKmjpg{U&gP3HE}1=s!lv&D{6(g_lzyF3A`7Jn*&d_kL<;dAFx!UZ>hB8 z5A*%LsAn;VLp>3${0>M?PSQ)9s3}|h2e?TG4_F{}{Cs>#3Q*t$(CUc}M)I}8cPF6% z=+h(Kh^8)}gj(0}#e7O^FQ6`~fd1#8#!}LMuo3A0bN`o}PYsm!Y}sdOz$+Tegc=qT z8x`PH$7lvnhJp{kHWb22l;@7B7|4yL4UOOVM0MP_>P%S1Lnid)+k9{+3D+JFa#Pyf zhVc#&df87APl4W9X)F3pGS>@etfl=_E5tBcVoOfrD4hmVeTY-cj((pkn%n@EgN{0f zwb_^Rk0I#iZuHK!l*lN`ceJn(sI{$Fq6nN& zE<-=0_2WN}m+*ivmIOxB@#~Q-cZ>l136w{#TIJe478`KE7@=a{>SzPHsKLzYAyBQO zAtuuF$-JSDy_S@6GW0MOE~R)b;+0f%_NMrW(+V#c_d&U8Z9+ec4=HmOHw?gdjF(Lu zzra83M_BoO-1b3;9`%&DHfuUY)6YDV21P$C!Rc?mv&{lx#f8oc6?0?x zK08{WP65?#>(vPfA-c=MCY|%*1_<3D4NX zeVTi-JGl2uP_2@0F{G({pxQOXt_d{g_CV6b?jNpfUG9;8yle-^4KHRvZs-_2siata zt+d_T@U$&t*xaD22(fH(W1r$Mo?3dc%Tncm=C6{V9y{v&VT#^1L04vDrLM9qBoZ4@ z6DBN#m57hX7$C(=#$Y5$bJmwA$T8jKD8+6A!-IJwA{WOfs%s}yxUw^?MRZjF$n_KN z6`_bGXcmE#5e4Ym)aQJ)xg3Pg0@k`iGuHe?f(5LtuzSq=nS^5z>vqU0EuZ&75V%Z{ zYyhRLN^)$c6Ds{f7*FBpE;n5iglx5PkHfWrj3`x^j^t z7ntuV`g!9Xg#^3!x)l*}IW=(Tz3>Y5l4uGaB&lz{GDjm2D5S$CExLT`I1#n^lBH7Y zDgpMag@`iETKAI=p<5E#LTkwzVR@=yY|uBVI1HG|8h+d;G-qfuj}-ZR6fN>EfCCW z9~wRQoAPEa#aO?3h?x{YvV*d+NtPkf&4V0k4|L=uj!U{L+oLa(z#&iuhJr3-PjO3R z5s?=nn_5^*^Rawr>>Nr@K(jwkB#JK-=+HqwfdO<+P5byeim)wvqGlP-P|~Nse8=XF zz`?RYB|D6SwS}C+YQv+;}k6$-%D(@+t14BL@vM z2q%q?f6D-A5s$_WY3{^G0F131bbh|g!}#BKw=HQ7mx;Dzg4Z*bTLQSfo{ed{4}NZW zfrRm^Ca$rlE{Ue~uYv>R9{3smwATcdM_6+yWIO z*ZRH~uXE@#p$XTbCt5j7j2=86e{9>HIB6xDzV+vAo&B?KUiMP|ttOElepnl%|DPqL b{|{}U^kRn2wo}j7|0ATu<;8xA7zX}7|B6mN literal 0 HcmV?d00001 diff --git a/automerge-js/examples/create-react-app/public/manifest.json b/automerge-js/examples/create-react-app/public/manifest.json new file mode 100644 index 00000000..080d6c77 --- /dev/null +++ b/automerge-js/examples/create-react-app/public/manifest.json @@ -0,0 +1,25 @@ +{ + "short_name": "React App", + "name": "Create React App Sample", + "icons": [ + { + "src": "favicon.ico", + "sizes": "64x64 32x32 24x24 16x16", + "type": "image/x-icon" + }, + { + "src": "logo192.png", + "type": "image/png", + "sizes": "192x192" + }, + { + "src": "logo512.png", + "type": "image/png", + "sizes": "512x512" + } + ], + "start_url": ".", + "display": "standalone", + "theme_color": "#000000", + "background_color": "#ffffff" +} diff --git a/automerge-js/examples/create-react-app/public/robots.txt b/automerge-js/examples/create-react-app/public/robots.txt new file mode 100644 index 00000000..e9e57dc4 --- /dev/null +++ b/automerge-js/examples/create-react-app/public/robots.txt @@ -0,0 +1,3 @@ +# https://www.robotstxt.org/robotstxt.html +User-agent: * +Disallow: diff --git a/automerge-js/examples/create-react-app/src/App.css b/automerge-js/examples/create-react-app/src/App.css new file mode 100644 index 00000000..74b5e053 --- /dev/null +++ b/automerge-js/examples/create-react-app/src/App.css @@ -0,0 +1,38 @@ +.App { + text-align: center; +} + +.App-logo { + height: 40vmin; + pointer-events: none; +} + +@media (prefers-reduced-motion: no-preference) { + .App-logo { + animation: App-logo-spin infinite 20s linear; + } +} + +.App-header { + background-color: #282c34; + min-height: 100vh; + display: flex; + flex-direction: column; + align-items: center; + justify-content: center; + font-size: calc(10px + 2vmin); + color: white; +} + +.App-link { + color: #61dafb; +} + +@keyframes App-logo-spin { + from { + transform: rotate(0deg); + } + to { + transform: rotate(360deg); + } +} diff --git a/automerge-js/examples/create-react-app/src/App.js b/automerge-js/examples/create-react-app/src/App.js new file mode 100644 index 00000000..cebfc345 --- /dev/null +++ b/automerge-js/examples/create-react-app/src/App.js @@ -0,0 +1,21 @@ +import * as Automerge from "automerge" +import logo from './logo.svg'; +import './App.css'; + +let doc = Automerge.init() +doc = Automerge.change(doc, (d) => d.hello = "from automerge-js") +const result = JSON.stringify(doc) + + +function App() { + return ( +
+
+ logo +

{result}

+
+
+ ); +} + +export default App; diff --git a/automerge-js/examples/create-react-app/src/App.test.js b/automerge-js/examples/create-react-app/src/App.test.js new file mode 100644 index 00000000..1f03afee --- /dev/null +++ b/automerge-js/examples/create-react-app/src/App.test.js @@ -0,0 +1,8 @@ +import { render, screen } from '@testing-library/react'; +import App from './App'; + +test('renders learn react link', () => { + render(); + const linkElement = screen.getByText(/learn react/i); + expect(linkElement).toBeInTheDocument(); +}); diff --git a/automerge-js/examples/create-react-app/src/index.css b/automerge-js/examples/create-react-app/src/index.css new file mode 100644 index 00000000..ec2585e8 --- /dev/null +++ b/automerge-js/examples/create-react-app/src/index.css @@ -0,0 +1,13 @@ +body { + margin: 0; + font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', 'Roboto', 'Oxygen', + 'Ubuntu', 'Cantarell', 'Fira Sans', 'Droid Sans', 'Helvetica Neue', + sans-serif; + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; +} + +code { + font-family: source-code-pro, Menlo, Monaco, Consolas, 'Courier New', + monospace; +} diff --git a/automerge-js/examples/create-react-app/src/index.js b/automerge-js/examples/create-react-app/src/index.js new file mode 100644 index 00000000..d563c0fb --- /dev/null +++ b/automerge-js/examples/create-react-app/src/index.js @@ -0,0 +1,17 @@ +import React from 'react'; +import ReactDOM from 'react-dom/client'; +import './index.css'; +import App from './App'; +import reportWebVitals from './reportWebVitals'; + +const root = ReactDOM.createRoot(document.getElementById('root')); +root.render( + + + +); + +// If you want to start measuring performance in your app, pass a function +// to log results (for example: reportWebVitals(console.log)) +// or send to an analytics endpoint. Learn more: https://bit.ly/CRA-vitals +reportWebVitals(); diff --git a/automerge-js/examples/create-react-app/src/logo.svg b/automerge-js/examples/create-react-app/src/logo.svg new file mode 100644 index 00000000..9dfc1c05 --- /dev/null +++ b/automerge-js/examples/create-react-app/src/logo.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/automerge-js/examples/create-react-app/src/reportWebVitals.js b/automerge-js/examples/create-react-app/src/reportWebVitals.js new file mode 100644 index 00000000..5253d3ad --- /dev/null +++ b/automerge-js/examples/create-react-app/src/reportWebVitals.js @@ -0,0 +1,13 @@ +const reportWebVitals = onPerfEntry => { + if (onPerfEntry && onPerfEntry instanceof Function) { + import('web-vitals').then(({ getCLS, getFID, getFCP, getLCP, getTTFB }) => { + getCLS(onPerfEntry); + getFID(onPerfEntry); + getFCP(onPerfEntry); + getLCP(onPerfEntry); + getTTFB(onPerfEntry); + }); + } +}; + +export default reportWebVitals; diff --git a/automerge-js/examples/create-react-app/src/setupTests.js b/automerge-js/examples/create-react-app/src/setupTests.js new file mode 100644 index 00000000..8f2609b7 --- /dev/null +++ b/automerge-js/examples/create-react-app/src/setupTests.js @@ -0,0 +1,5 @@ +// jest-dom adds custom jest matchers for asserting on DOM nodes. +// allows you to do things like: +// expect(element).toHaveTextContent(/react/i) +// learn more: https://github.com/testing-library/jest-dom +import '@testing-library/jest-dom'; diff --git a/automerge-js/examples/create-react-app/yarn.lock b/automerge-js/examples/create-react-app/yarn.lock new file mode 100644 index 00000000..79d61777 --- /dev/null +++ b/automerge-js/examples/create-react-app/yarn.lock @@ -0,0 +1,9120 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +"@adobe/css-tools@^4.0.1": + version "4.0.1" + resolved "http://localhost:4873/@adobe%2fcss-tools/-/css-tools-4.0.1.tgz#b38b444ad3aa5fedbb15f2f746dcd934226a12dd" + integrity sha512-+u76oB43nOHrF4DDWRLWDCtci7f3QJoEBigemIdIeTi1ODqjx6Tad9NCVnPRwewWlKkVab5PlK8DCtPTyX7S8g== + +"@ampproject/remapping@^2.1.0": + version "2.2.0" + resolved "http://localhost:4873/@ampproject%2fremapping/-/remapping-2.2.0.tgz#56c133824780de3174aed5ab6834f3026790154d" + integrity sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w== + dependencies: + "@jridgewell/gen-mapping" "^0.1.0" + "@jridgewell/trace-mapping" "^0.3.9" + +"@apideck/better-ajv-errors@^0.3.1": + version "0.3.6" + resolved "http://localhost:4873/@apideck%2fbetter-ajv-errors/-/better-ajv-errors-0.3.6.tgz#957d4c28e886a64a8141f7522783be65733ff097" + integrity sha512-P+ZygBLZtkp0qqOAJJVX4oX/sFo5JR3eBWwwuqHHhK0GIgQOKWrAfiAaWX0aArHkRWHMuggFEgAZNxVPwPZYaA== + dependencies: + json-schema "^0.4.0" + jsonpointer "^5.0.0" + leven "^3.1.0" + +"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.10.4", "@babel/code-frame@^7.12.13", "@babel/code-frame@^7.16.0", "@babel/code-frame@^7.18.6", "@babel/code-frame@^7.8.3": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fcode-frame/-/code-frame-7.18.6.tgz#3b25d38c89600baa2dcc219edfa88a74eb2c427a" + integrity sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q== + dependencies: + "@babel/highlight" "^7.18.6" + +"@babel/compat-data@^7.17.7", "@babel/compat-data@^7.18.8", "@babel/compat-data@^7.19.3": + version "7.19.3" + resolved "http://localhost:4873/@babel%2fcompat-data/-/compat-data-7.19.3.tgz#707b939793f867f5a73b2666e6d9a3396eb03151" + integrity sha512-prBHMK4JYYK+wDjJF1q99KK4JLL+egWS4nmNqdlMUgCExMZ+iZW0hGhyC3VEbsPjvaN0TBhW//VIFwBrk8sEiw== + +"@babel/core@^7.1.0", "@babel/core@^7.11.1", "@babel/core@^7.12.3", "@babel/core@^7.16.0", "@babel/core@^7.7.2", "@babel/core@^7.8.0": + version "7.19.3" + resolved "http://localhost:4873/@babel%2fcore/-/core-7.19.3.tgz#2519f62a51458f43b682d61583c3810e7dcee64c" + integrity sha512-WneDJxdsjEvyKtXKsaBGbDeiyOjR5vYq4HcShxnIbG0qixpoHjI3MqeZM9NDvsojNCEBItQE4juOo/bU6e72gQ== + dependencies: + "@ampproject/remapping" "^2.1.0" + "@babel/code-frame" "^7.18.6" + "@babel/generator" "^7.19.3" + "@babel/helper-compilation-targets" "^7.19.3" + "@babel/helper-module-transforms" "^7.19.0" + "@babel/helpers" "^7.19.0" + "@babel/parser" "^7.19.3" + "@babel/template" "^7.18.10" + "@babel/traverse" "^7.19.3" + "@babel/types" "^7.19.3" + convert-source-map "^1.7.0" + debug "^4.1.0" + gensync "^1.0.0-beta.2" + json5 "^2.2.1" + semver "^6.3.0" + +"@babel/eslint-parser@^7.16.3": + version "7.19.1" + resolved "http://localhost:4873/@babel%2feslint-parser/-/eslint-parser-7.19.1.tgz#4f68f6b0825489e00a24b41b6a1ae35414ecd2f4" + integrity sha512-AqNf2QWt1rtu2/1rLswy6CDP7H9Oh3mMhk177Y67Rg8d7RD9WfOLLv8CGn6tisFvS2htm86yIe1yLF6I1UDaGQ== + dependencies: + "@nicolo-ribaudo/eslint-scope-5-internals" "5.1.1-v1" + eslint-visitor-keys "^2.1.0" + semver "^6.3.0" + +"@babel/generator@^7.19.3", "@babel/generator@^7.7.2": + version "7.19.3" + resolved "http://localhost:4873/@babel%2fgenerator/-/generator-7.19.3.tgz#d7f4d1300485b4547cb6f94b27d10d237b42bf59" + integrity sha512-fqVZnmp1ncvZU757UzDheKZpfPgatqY59XtW2/j/18H7u76akb8xqvjw82f+i2UKd/ksYsSick/BCLQUUtJ/qQ== + dependencies: + "@babel/types" "^7.19.3" + "@jridgewell/gen-mapping" "^0.3.2" + jsesc "^2.5.1" + +"@babel/helper-annotate-as-pure@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fhelper-annotate-as-pure/-/helper-annotate-as-pure-7.18.6.tgz#eaa49f6f80d5a33f9a5dd2276e6d6e451be0a6bb" + integrity sha512-duORpUiYrEpzKIop6iNbjnwKLAKnJ47csTyRACyEmWj0QdUrm5aqNJGHSSEQSUAvNW0ojX0dOmK9dZduvkfeXA== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-builder-binary-assignment-operator-visitor@^7.18.6": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fhelper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.18.9.tgz#acd4edfd7a566d1d51ea975dff38fd52906981bb" + integrity sha512-yFQ0YCHoIqarl8BCRwBL8ulYUaZpz3bNsA7oFepAzee+8/+ImtADXNOmO5vJvsPff3qi+hvpkY/NYBTrBQgdNw== + dependencies: + "@babel/helper-explode-assignable-expression" "^7.18.6" + "@babel/types" "^7.18.9" + +"@babel/helper-compilation-targets@^7.17.7", "@babel/helper-compilation-targets@^7.18.9", "@babel/helper-compilation-targets@^7.19.0", "@babel/helper-compilation-targets@^7.19.3": + version "7.19.3" + resolved "http://localhost:4873/@babel%2fhelper-compilation-targets/-/helper-compilation-targets-7.19.3.tgz#a10a04588125675d7c7ae299af86fa1b2ee038ca" + integrity sha512-65ESqLGyGmLvgR0mst5AdW1FkNlj9rQsCKduzEoEPhBCDFGXvz2jW6bXFG6i0/MrV2s7hhXjjb2yAzcPuQlLwg== + dependencies: + "@babel/compat-data" "^7.19.3" + "@babel/helper-validator-option" "^7.18.6" + browserslist "^4.21.3" + semver "^6.3.0" + +"@babel/helper-create-class-features-plugin@^7.18.6", "@babel/helper-create-class-features-plugin@^7.19.0": + version "7.19.0" + resolved "http://localhost:4873/@babel%2fhelper-create-class-features-plugin/-/helper-create-class-features-plugin-7.19.0.tgz#bfd6904620df4e46470bae4850d66be1054c404b" + integrity sha512-NRz8DwF4jT3UfrmUoZjd0Uph9HQnP30t7Ash+weACcyNkiYTywpIjDBgReJMKgr+n86sn2nPVVmJ28Dm053Kqw== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-function-name" "^7.19.0" + "@babel/helper-member-expression-to-functions" "^7.18.9" + "@babel/helper-optimise-call-expression" "^7.18.6" + "@babel/helper-replace-supers" "^7.18.9" + "@babel/helper-split-export-declaration" "^7.18.6" + +"@babel/helper-create-regexp-features-plugin@^7.18.6", "@babel/helper-create-regexp-features-plugin@^7.19.0": + version "7.19.0" + resolved "http://localhost:4873/@babel%2fhelper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.19.0.tgz#7976aca61c0984202baca73d84e2337a5424a41b" + integrity sha512-htnV+mHX32DF81amCDrwIDr8nrp1PTm+3wfBN9/v8QJOLEioOCOG7qNyq0nHeFiWbT3Eb7gsPwEmV64UCQ1jzw== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + regexpu-core "^5.1.0" + +"@babel/helper-define-polyfill-provider@^0.3.3": + version "0.3.3" + resolved "http://localhost:4873/@babel%2fhelper-define-polyfill-provider/-/helper-define-polyfill-provider-0.3.3.tgz#8612e55be5d51f0cd1f36b4a5a83924e89884b7a" + integrity sha512-z5aQKU4IzbqCC1XH0nAqfsFLMVSo22SBKUc0BxGrLkolTdPTructy0ToNnlO2zA4j9Q/7pjMZf0DSY+DSTYzww== + dependencies: + "@babel/helper-compilation-targets" "^7.17.7" + "@babel/helper-plugin-utils" "^7.16.7" + debug "^4.1.1" + lodash.debounce "^4.0.8" + resolve "^1.14.2" + semver "^6.1.2" + +"@babel/helper-environment-visitor@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fhelper-environment-visitor/-/helper-environment-visitor-7.18.9.tgz#0c0cee9b35d2ca190478756865bb3528422f51be" + integrity sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg== + +"@babel/helper-explode-assignable-expression@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fhelper-explode-assignable-expression/-/helper-explode-assignable-expression-7.18.6.tgz#41f8228ef0a6f1a036b8dfdfec7ce94f9a6bc096" + integrity sha512-eyAYAsQmB80jNfg4baAtLeWAQHfHFiR483rzFK+BhETlGZaQC9bsfrugfXDCbRHLQbIA7U5NxhhOxN7p/dWIcg== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-function-name@^7.18.9", "@babel/helper-function-name@^7.19.0": + version "7.19.0" + resolved "http://localhost:4873/@babel%2fhelper-function-name/-/helper-function-name-7.19.0.tgz#941574ed5390682e872e52d3f38ce9d1bef4648c" + integrity sha512-WAwHBINyrpqywkUH0nTnNgI5ina5TFn85HKS0pbPDfxFfhyR/aNQEn4hGi1P1JyT//I0t4OgXUlofzWILRvS5w== + dependencies: + "@babel/template" "^7.18.10" + "@babel/types" "^7.19.0" + +"@babel/helper-hoist-variables@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fhelper-hoist-variables/-/helper-hoist-variables-7.18.6.tgz#d4d2c8fb4baeaa5c68b99cc8245c56554f926678" + integrity sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-member-expression-to-functions@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fhelper-member-expression-to-functions/-/helper-member-expression-to-functions-7.18.9.tgz#1531661e8375af843ad37ac692c132841e2fd815" + integrity sha512-RxifAh2ZoVU67PyKIO4AMi1wTenGfMR/O/ae0CCRqwgBAt5v7xjdtRw7UoSbsreKrQn5t7r89eruK/9JjYHuDg== + dependencies: + "@babel/types" "^7.18.9" + +"@babel/helper-module-imports@^7.10.4", "@babel/helper-module-imports@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fhelper-module-imports/-/helper-module-imports-7.18.6.tgz#1e3ebdbbd08aad1437b428c50204db13c5a3ca6e" + integrity sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-module-transforms@^7.18.6", "@babel/helper-module-transforms@^7.19.0": + version "7.19.0" + resolved "http://localhost:4873/@babel%2fhelper-module-transforms/-/helper-module-transforms-7.19.0.tgz#309b230f04e22c58c6a2c0c0c7e50b216d350c30" + integrity sha512-3HBZ377Fe14RbLIA+ac3sY4PTgpxHVkFrESaWhoI5PuyXPBBX8+C34qblV9G89ZtycGJCmCI/Ut+VUDK4bltNQ== + dependencies: + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-module-imports" "^7.18.6" + "@babel/helper-simple-access" "^7.18.6" + "@babel/helper-split-export-declaration" "^7.18.6" + "@babel/helper-validator-identifier" "^7.18.6" + "@babel/template" "^7.18.10" + "@babel/traverse" "^7.19.0" + "@babel/types" "^7.19.0" + +"@babel/helper-optimise-call-expression@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fhelper-optimise-call-expression/-/helper-optimise-call-expression-7.18.6.tgz#9369aa943ee7da47edab2cb4e838acf09d290ffe" + integrity sha512-HP59oD9/fEHQkdcbgFCnbmgH5vIQTJbxh2yf+CdM89/glUNnuzr87Q8GIjGEnOktTROemO0Pe0iPAYbqZuOUiA== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.14.5", "@babel/helper-plugin-utils@^7.16.7", "@babel/helper-plugin-utils@^7.18.6", "@babel/helper-plugin-utils@^7.18.9", "@babel/helper-plugin-utils@^7.19.0", "@babel/helper-plugin-utils@^7.8.0", "@babel/helper-plugin-utils@^7.8.3": + version "7.19.0" + resolved "http://localhost:4873/@babel%2fhelper-plugin-utils/-/helper-plugin-utils-7.19.0.tgz#4796bb14961521f0f8715990bee2fb6e51ce21bf" + integrity sha512-40Ryx7I8mT+0gaNxm8JGTZFUITNqdLAgdg0hXzeVZxVD6nFsdhQvip6v8dqkRHzsz1VFpFAaOCHNn0vKBL7Czw== + +"@babel/helper-remap-async-to-generator@^7.18.6", "@babel/helper-remap-async-to-generator@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fhelper-remap-async-to-generator/-/helper-remap-async-to-generator-7.18.9.tgz#997458a0e3357080e54e1d79ec347f8a8cd28519" + integrity sha512-dI7q50YKd8BAv3VEfgg7PS7yD3Rtbi2J1XMXaalXO0W0164hYLnh8zpjRS0mte9MfVp/tltvr/cfdXPvJr1opA== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-wrap-function" "^7.18.9" + "@babel/types" "^7.18.9" + +"@babel/helper-replace-supers@^7.18.6", "@babel/helper-replace-supers@^7.18.9", "@babel/helper-replace-supers@^7.19.1": + version "7.19.1" + resolved "http://localhost:4873/@babel%2fhelper-replace-supers/-/helper-replace-supers-7.19.1.tgz#e1592a9b4b368aa6bdb8784a711e0bcbf0612b78" + integrity sha512-T7ahH7wV0Hfs46SFh5Jz3s0B6+o8g3c+7TMxu7xKfmHikg7EAZ3I2Qk9LFhjxXq8sL7UkP5JflezNwoZa8WvWw== + dependencies: + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-member-expression-to-functions" "^7.18.9" + "@babel/helper-optimise-call-expression" "^7.18.6" + "@babel/traverse" "^7.19.1" + "@babel/types" "^7.19.0" + +"@babel/helper-simple-access@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fhelper-simple-access/-/helper-simple-access-7.18.6.tgz#d6d8f51f4ac2978068df934b569f08f29788c7ea" + integrity sha512-iNpIgTgyAvDQpDj76POqg+YEt8fPxx3yaNBg3S30dxNKm2SWfYhD0TGrK/Eu9wHpUW63VQU894TsTg+GLbUa1g== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-skip-transparent-expression-wrappers@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fhelper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.18.9.tgz#778d87b3a758d90b471e7b9918f34a9a02eb5818" + integrity sha512-imytd2gHi3cJPsybLRbmFrF7u5BIEuI2cNheyKi3/iOBC63kNn3q8Crn2xVuESli0aM4KYsyEqKyS7lFL8YVtw== + dependencies: + "@babel/types" "^7.18.9" + +"@babel/helper-split-export-declaration@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fhelper-split-export-declaration/-/helper-split-export-declaration-7.18.6.tgz#7367949bc75b20c6d5a5d4a97bba2824ae8ef075" + integrity sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-string-parser@^7.18.10": + version "7.18.10" + resolved "http://localhost:4873/@babel%2fhelper-string-parser/-/helper-string-parser-7.18.10.tgz#181f22d28ebe1b3857fa575f5c290b1aaf659b56" + integrity sha512-XtIfWmeNY3i4t7t4D2t02q50HvqHybPqW2ki1kosnvWCwuCMeo81Jf0gwr85jy/neUdg5XDdeFE/80DXiO+njw== + +"@babel/helper-validator-identifier@^7.18.6", "@babel/helper-validator-identifier@^7.19.1": + version "7.19.1" + resolved "http://localhost:4873/@babel%2fhelper-validator-identifier/-/helper-validator-identifier-7.19.1.tgz#7eea834cf32901ffdc1a7ee555e2f9c27e249ca2" + integrity sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w== + +"@babel/helper-validator-option@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fhelper-validator-option/-/helper-validator-option-7.18.6.tgz#bf0d2b5a509b1f336099e4ff36e1a63aa5db4db8" + integrity sha512-XO7gESt5ouv/LRJdrVjkShckw6STTaB7l9BrpBaAHDeF5YZT+01PCwmR0SJHnkW6i8OwW/EVWRShfi4j2x+KQw== + +"@babel/helper-wrap-function@^7.18.9": + version "7.19.0" + resolved "http://localhost:4873/@babel%2fhelper-wrap-function/-/helper-wrap-function-7.19.0.tgz#89f18335cff1152373222f76a4b37799636ae8b1" + integrity sha512-txX8aN8CZyYGTwcLhlk87KRqncAzhh5TpQamZUa0/u3an36NtDpUP6bQgBCBcLeBs09R/OwQu3OjK0k/HwfNDg== + dependencies: + "@babel/helper-function-name" "^7.19.0" + "@babel/template" "^7.18.10" + "@babel/traverse" "^7.19.0" + "@babel/types" "^7.19.0" + +"@babel/helpers@^7.19.0": + version "7.19.0" + resolved "http://localhost:4873/@babel%2fhelpers/-/helpers-7.19.0.tgz#f30534657faf246ae96551d88dd31e9d1fa1fc18" + integrity sha512-DRBCKGwIEdqY3+rPJgG/dKfQy9+08rHIAJx8q2p+HSWP87s2HCrQmaAMMyMll2kIXKCW0cO1RdQskx15Xakftg== + dependencies: + "@babel/template" "^7.18.10" + "@babel/traverse" "^7.19.0" + "@babel/types" "^7.19.0" + +"@babel/highlight@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fhighlight/-/highlight-7.18.6.tgz#81158601e93e2563795adcbfbdf5d64be3f2ecdf" + integrity sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g== + dependencies: + "@babel/helper-validator-identifier" "^7.18.6" + chalk "^2.0.0" + js-tokens "^4.0.0" + +"@babel/parser@^7.1.0", "@babel/parser@^7.14.7", "@babel/parser@^7.18.10", "@babel/parser@^7.19.3": + version "7.19.3" + resolved "http://localhost:4873/@babel%2fparser/-/parser-7.19.3.tgz#8dd36d17c53ff347f9e55c328710321b49479a9a" + integrity sha512-pJ9xOlNWHiy9+FuFP09DEAFbAn4JskgRsVcc169w2xRBC3FRGuQEwjeIMMND9L2zc0iEhO/tGv4Zq+km+hxNpQ== + +"@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.18.6.tgz#da5b8f9a580acdfbe53494dba45ea389fb09a4d2" + integrity sha512-Dgxsyg54Fx1d4Nge8UnvTrED63vrwOdPmyvPzlNN/boaliRP54pm3pGzZD1SJUwrBA+Cs/xdG8kXX6Mn/RfISQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fplugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.18.9.tgz#a11af19aa373d68d561f08e0a57242350ed0ec50" + integrity sha512-AHrP9jadvH7qlOj6PINbgSuphjQUAK7AOT7DPjBo9EHoLhQTnnK5u45e1Hd4DbSQEO9nqPWtQ89r+XEOWFScKg== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + "@babel/helper-skip-transparent-expression-wrappers" "^7.18.9" + "@babel/plugin-proposal-optional-chaining" "^7.18.9" + +"@babel/plugin-proposal-async-generator-functions@^7.19.1": + version "7.19.1" + resolved "http://localhost:4873/@babel%2fplugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.19.1.tgz#34f6f5174b688529342288cd264f80c9ea9fb4a7" + integrity sha512-0yu8vNATgLy4ivqMNBIwb1HebCelqN7YX8SL3FDXORv/RqT0zEEWUCH4GH44JsSrvCu6GqnAdR5EBFAPeNBB4Q== + dependencies: + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-plugin-utils" "^7.19.0" + "@babel/helper-remap-async-to-generator" "^7.18.9" + "@babel/plugin-syntax-async-generators" "^7.8.4" + +"@babel/plugin-proposal-class-properties@^7.16.0", "@babel/plugin-proposal-class-properties@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-proposal-class-properties/-/plugin-proposal-class-properties-7.18.6.tgz#b110f59741895f7ec21a6fff696ec46265c446a3" + integrity sha512-cumfXOF0+nzZrrN8Rf0t7M+tF6sZc7vhQwYQck9q1/5w2OExlD+b4v4RpMJFaV1Z7WcDRgO6FqvxqxGlwo+RHQ== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-proposal-class-static-block@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-proposal-class-static-block/-/plugin-proposal-class-static-block-7.18.6.tgz#8aa81d403ab72d3962fc06c26e222dacfc9b9020" + integrity sha512-+I3oIiNxrCpup3Gi8n5IGMwj0gOCAjcJUSQEcotNnCCPMEnixawOQ+KeJPlgfjzx+FKQ1QSyZOWe7wmoJp7vhw== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-class-static-block" "^7.14.5" + +"@babel/plugin-proposal-decorators@^7.16.4": + version "7.19.3" + resolved "http://localhost:4873/@babel%2fplugin-proposal-decorators/-/plugin-proposal-decorators-7.19.3.tgz#c1977e4902a18cdf9051bf7bf08d97db2fd8b110" + integrity sha512-MbgXtNXqo7RTKYIXVchVJGPvaVufQH3pxvQyfbGvNw1DObIhph+PesYXJTcd8J4DdWibvf6Z2eanOyItX8WnJg== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.19.0" + "@babel/helper-plugin-utils" "^7.19.0" + "@babel/helper-replace-supers" "^7.19.1" + "@babel/helper-split-export-declaration" "^7.18.6" + "@babel/plugin-syntax-decorators" "^7.19.0" + +"@babel/plugin-proposal-dynamic-import@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.18.6.tgz#72bcf8d408799f547d759298c3c27c7e7faa4d94" + integrity sha512-1auuwmK+Rz13SJj36R+jqFPMJWyKEDd7lLSdOj4oJK0UTgGueSAtkrCvz9ewmgyU/P941Rv2fQwZJN8s6QruXw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-dynamic-import" "^7.8.3" + +"@babel/plugin-proposal-export-namespace-from@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fplugin-proposal-export-namespace-from/-/plugin-proposal-export-namespace-from-7.18.9.tgz#5f7313ab348cdb19d590145f9247540e94761203" + integrity sha512-k1NtHyOMvlDDFeb9G5PhUXuGj8m/wiwojgQVEhJ/fsVsMCpLyOP4h0uGEjYJKrRI+EVPlb5Jk+Gt9P97lOGwtA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + "@babel/plugin-syntax-export-namespace-from" "^7.8.3" + +"@babel/plugin-proposal-json-strings@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-proposal-json-strings/-/plugin-proposal-json-strings-7.18.6.tgz#7e8788c1811c393aff762817e7dbf1ebd0c05f0b" + integrity sha512-lr1peyn9kOdbYc0xr0OdHTZ5FMqS6Di+H0Fz2I/JwMzGmzJETNeOFq2pBySw6X/KFL5EWDjlJuMsUGRFb8fQgQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-json-strings" "^7.8.3" + +"@babel/plugin-proposal-logical-assignment-operators@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fplugin-proposal-logical-assignment-operators/-/plugin-proposal-logical-assignment-operators-7.18.9.tgz#8148cbb350483bf6220af06fa6db3690e14b2e23" + integrity sha512-128YbMpjCrP35IOExw2Fq+x55LMP42DzhOhX2aNNIdI9avSWl2PI0yuBWarr3RYpZBSPtabfadkH2yeRiMD61Q== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" + +"@babel/plugin-proposal-nullish-coalescing-operator@^7.16.0", "@babel/plugin-proposal-nullish-coalescing-operator@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.18.6.tgz#fdd940a99a740e577d6c753ab6fbb43fdb9467e1" + integrity sha512-wQxQzxYeJqHcfppzBDnm1yAY0jSRkUXR2z8RePZYrKwMKgMlE8+Z6LUno+bd6LvbGh8Gltvy74+9pIYkr+XkKA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" + +"@babel/plugin-proposal-numeric-separator@^7.16.0", "@babel/plugin-proposal-numeric-separator@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.18.6.tgz#899b14fbafe87f053d2c5ff05b36029c62e13c75" + integrity sha512-ozlZFogPqoLm8WBr5Z8UckIoE4YQ5KESVcNudyXOR8uqIkliTEgJ3RoketfG6pmzLdeZF0H/wjE9/cCEitBl7Q== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-numeric-separator" "^7.10.4" + +"@babel/plugin-proposal-object-rest-spread@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fplugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.18.9.tgz#f9434f6beb2c8cae9dfcf97d2a5941bbbf9ad4e7" + integrity sha512-kDDHQ5rflIeY5xl69CEqGEZ0KY369ehsCIEbTGb4siHG5BE9sga/T0r0OUwyZNLMmZE79E1kbsqAjwFCW4ds6Q== + dependencies: + "@babel/compat-data" "^7.18.8" + "@babel/helper-compilation-targets" "^7.18.9" + "@babel/helper-plugin-utils" "^7.18.9" + "@babel/plugin-syntax-object-rest-spread" "^7.8.3" + "@babel/plugin-transform-parameters" "^7.18.8" + +"@babel/plugin-proposal-optional-catch-binding@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.18.6.tgz#f9400d0e6a3ea93ba9ef70b09e72dd6da638a2cb" + integrity sha512-Q40HEhs9DJQyaZfUjjn6vE8Cv4GmMHCYuMGIWUnlxH6400VGxOuwWsPt4FxXxJkC/5eOzgn0z21M9gMT4MOhbw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" + +"@babel/plugin-proposal-optional-chaining@^7.16.0", "@babel/plugin-proposal-optional-chaining@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fplugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.18.9.tgz#e8e8fe0723f2563960e4bf5e9690933691915993" + integrity sha512-v5nwt4IqBXihxGsW2QmCWMDS3B3bzGIk/EQVZz2ei7f3NJl8NzAJVvUmpDW5q1CRNY+Beb/k58UAH1Km1N411w== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + "@babel/helper-skip-transparent-expression-wrappers" "^7.18.9" + "@babel/plugin-syntax-optional-chaining" "^7.8.3" + +"@babel/plugin-proposal-private-methods@^7.16.0", "@babel/plugin-proposal-private-methods@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-proposal-private-methods/-/plugin-proposal-private-methods-7.18.6.tgz#5209de7d213457548a98436fa2882f52f4be6bea" + integrity sha512-nutsvktDItsNn4rpGItSNV2sz1XwS+nfU0Rg8aCx3W3NOKVzdMjJRu0O5OkgDp3ZGICSTbgRpxZoWsxoKRvbeA== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-proposal-private-property-in-object@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.18.6.tgz#a64137b232f0aca3733a67eb1a144c192389c503" + integrity sha512-9Rysx7FOctvT5ouj5JODjAFAkgGoudQuLPamZb0v1TGLpapdNaftzifU8NTWQm0IRjqoYypdrSmyWgkocDQ8Dw== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + "@babel/helper-create-class-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-private-property-in-object" "^7.14.5" + +"@babel/plugin-proposal-unicode-property-regex@^7.18.6", "@babel/plugin-proposal-unicode-property-regex@^7.4.4": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.18.6.tgz#af613d2cd5e643643b65cded64207b15c85cb78e" + integrity sha512-2BShG/d5yoZyXZfVePH91urL5wTG6ASZU9M4o03lKK8u8UW1y08OMttBSOADTcJrnPMpvDXRG3G8fyLh4ovs8w== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-syntax-async-generators@^7.8.4": + version "7.8.4" + resolved "http://localhost:4873/@babel%2fplugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz#a983fb1aeb2ec3f6ed042a210f640e90e786fe0d" + integrity sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-bigint@^7.8.3": + version "7.8.3" + resolved "http://localhost:4873/@babel%2fplugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz#4c9a6f669f5d0cdf1b90a1671e9a146be5300cea" + integrity sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-class-properties@^7.12.13", "@babel/plugin-syntax-class-properties@^7.8.3": + version "7.12.13" + resolved "http://localhost:4873/@babel%2fplugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz#b5c987274c4a3a82b89714796931a6b53544ae10" + integrity sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA== + dependencies: + "@babel/helper-plugin-utils" "^7.12.13" + +"@babel/plugin-syntax-class-static-block@^7.14.5": + version "7.14.5" + resolved "http://localhost:4873/@babel%2fplugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz#195df89b146b4b78b3bf897fd7a257c84659d406" + integrity sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw== + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-syntax-decorators@^7.19.0": + version "7.19.0" + resolved "http://localhost:4873/@babel%2fplugin-syntax-decorators/-/plugin-syntax-decorators-7.19.0.tgz#5f13d1d8fce96951bea01a10424463c9a5b3a599" + integrity sha512-xaBZUEDntt4faL1yN8oIFlhfXeQAWJW7CLKYsHTUqriCUbj8xOra8bfxxKGi/UwExPFBuPdH4XfHc9rGQhrVkQ== + dependencies: + "@babel/helper-plugin-utils" "^7.19.0" + +"@babel/plugin-syntax-dynamic-import@^7.8.3": + version "7.8.3" + resolved "http://localhost:4873/@babel%2fplugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz#62bf98b2da3cd21d626154fc96ee5b3cb68eacb3" + integrity sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-export-namespace-from@^7.8.3": + version "7.8.3" + resolved "http://localhost:4873/@babel%2fplugin-syntax-export-namespace-from/-/plugin-syntax-export-namespace-from-7.8.3.tgz#028964a9ba80dbc094c915c487ad7c4e7a66465a" + integrity sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q== + dependencies: + "@babel/helper-plugin-utils" "^7.8.3" + +"@babel/plugin-syntax-flow@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-syntax-flow/-/plugin-syntax-flow-7.18.6.tgz#774d825256f2379d06139be0c723c4dd444f3ca1" + integrity sha512-LUbR+KNTBWCUAqRG9ex5Gnzu2IOkt8jRJbHHXFT9q+L9zm7M/QQbEqXyw1n1pohYvOyWC8CjeyjrSaIwiYjK7A== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-syntax-import-assertions@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.18.6.tgz#cd6190500a4fa2fe31990a963ffab4b63e4505e4" + integrity sha512-/DU3RXad9+bZwrgWJQKbr39gYbJpLJHezqEzRzi/BHRlJ9zsQb4CK2CA/5apllXNomwA1qHwzvHl+AdEmC5krQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-syntax-import-meta@^7.8.3": + version "7.10.4" + resolved "http://localhost:4873/@babel%2fplugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz#ee601348c370fa334d2207be158777496521fd51" + integrity sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-json-strings@^7.8.3": + version "7.8.3" + resolved "http://localhost:4873/@babel%2fplugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz#01ca21b668cd8218c9e640cb6dd88c5412b2c96a" + integrity sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-jsx@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-syntax-jsx/-/plugin-syntax-jsx-7.18.6.tgz#a8feef63b010150abd97f1649ec296e849943ca0" + integrity sha512-6mmljtAedFGTWu2p/8WIORGwy+61PLgOMPOdazc7YoJ9ZCWUyFy3A6CpPkRKLKD1ToAesxX8KGEViAiLo9N+7Q== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-syntax-logical-assignment-operators@^7.10.4", "@babel/plugin-syntax-logical-assignment-operators@^7.8.3": + version "7.10.4" + resolved "http://localhost:4873/@babel%2fplugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz#ca91ef46303530448b906652bac2e9fe9941f699" + integrity sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-nullish-coalescing-operator@^7.8.3": + version "7.8.3" + resolved "http://localhost:4873/@babel%2fplugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz#167ed70368886081f74b5c36c65a88c03b66d1a9" + integrity sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-numeric-separator@^7.10.4", "@babel/plugin-syntax-numeric-separator@^7.8.3": + version "7.10.4" + resolved "http://localhost:4873/@babel%2fplugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz#b9b070b3e33570cd9fd07ba7fa91c0dd37b9af97" + integrity sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-object-rest-spread@^7.8.3": + version "7.8.3" + resolved "http://localhost:4873/@babel%2fplugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz#60e225edcbd98a640332a2e72dd3e66f1af55871" + integrity sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-optional-catch-binding@^7.8.3": + version "7.8.3" + resolved "http://localhost:4873/@babel%2fplugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz#6111a265bcfb020eb9efd0fdfd7d26402b9ed6c1" + integrity sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-optional-chaining@^7.8.3": + version "7.8.3" + resolved "http://localhost:4873/@babel%2fplugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz#4f69c2ab95167e0180cd5336613f8c5788f7d48a" + integrity sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-private-property-in-object@^7.14.5": + version "7.14.5" + resolved "http://localhost:4873/@babel%2fplugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz#0dc6671ec0ea22b6e94a1114f857970cd39de1ad" + integrity sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg== + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-syntax-top-level-await@^7.14.5", "@babel/plugin-syntax-top-level-await@^7.8.3": + version "7.14.5" + resolved "http://localhost:4873/@babel%2fplugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz#c1cfdadc35a646240001f06138247b741c34d94c" + integrity sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw== + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-syntax-typescript@^7.18.6", "@babel/plugin-syntax-typescript@^7.7.2": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-syntax-typescript/-/plugin-syntax-typescript-7.18.6.tgz#1c09cd25795c7c2b8a4ba9ae49394576d4133285" + integrity sha512-mAWAuq4rvOepWCBid55JuRNvpTNf2UGVgoz4JV0fXEKolsVZDzsa4NqCef758WZJj/GDu0gVGItjKFiClTAmZA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-arrow-functions@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.18.6.tgz#19063fcf8771ec7b31d742339dac62433d0611fe" + integrity sha512-9S9X9RUefzrsHZmKMbDXxweEH+YlE8JJEuat9FdvW9Qh1cw7W64jELCtWNkPBPX5En45uy28KGvA/AySqUh8CQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-async-to-generator@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.18.6.tgz#ccda3d1ab9d5ced5265fdb13f1882d5476c71615" + integrity sha512-ARE5wZLKnTgPW7/1ftQmSi1CmkqqHo2DNmtztFhvgtOWSDfq0Cq9/9L+KnZNYSNrydBekhW3rwShduf59RoXag== + dependencies: + "@babel/helper-module-imports" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-remap-async-to-generator" "^7.18.6" + +"@babel/plugin-transform-block-scoped-functions@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.18.6.tgz#9187bf4ba302635b9d70d986ad70f038726216a8" + integrity sha512-ExUcOqpPWnliRcPqves5HJcJOvHvIIWfuS4sroBUenPuMdmW+SMHDakmtS7qOo13sVppmUijqeTv7qqGsvURpQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-block-scoping@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fplugin-transform-block-scoping/-/plugin-transform-block-scoping-7.18.9.tgz#f9b7e018ac3f373c81452d6ada8bd5a18928926d" + integrity sha512-5sDIJRV1KtQVEbt/EIBwGy4T01uYIo4KRB3VUqzkhrAIOGx7AoctL9+Ux88btY0zXdDyPJ9mW+bg+v+XEkGmtw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-classes@^7.19.0": + version "7.19.0" + resolved "http://localhost:4873/@babel%2fplugin-transform-classes/-/plugin-transform-classes-7.19.0.tgz#0e61ec257fba409c41372175e7c1e606dc79bb20" + integrity sha512-YfeEE9kCjqTS9IitkgfJuxjcEtLUHMqa8yUJ6zdz8vR7hKuo6mOy2C05P0F1tdMmDCeuyidKnlrw/iTppHcr2A== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + "@babel/helper-compilation-targets" "^7.19.0" + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-function-name" "^7.19.0" + "@babel/helper-optimise-call-expression" "^7.18.6" + "@babel/helper-plugin-utils" "^7.19.0" + "@babel/helper-replace-supers" "^7.18.9" + "@babel/helper-split-export-declaration" "^7.18.6" + globals "^11.1.0" + +"@babel/plugin-transform-computed-properties@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fplugin-transform-computed-properties/-/plugin-transform-computed-properties-7.18.9.tgz#2357a8224d402dad623caf6259b611e56aec746e" + integrity sha512-+i0ZU1bCDymKakLxn5srGHrsAPRELC2WIbzwjLhHW9SIE1cPYkLCL0NlnXMZaM1vhfgA2+M7hySk42VBvrkBRw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-destructuring@^7.18.13": + version "7.18.13" + resolved "http://localhost:4873/@babel%2fplugin-transform-destructuring/-/plugin-transform-destructuring-7.18.13.tgz#9e03bc4a94475d62b7f4114938e6c5c33372cbf5" + integrity sha512-TodpQ29XekIsex2A+YJPj5ax2plkGa8YYY6mFjCohk/IG9IY42Rtuj1FuDeemfg2ipxIFLzPeA83SIBnlhSIow== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-dotall-regex@^7.18.6", "@babel/plugin-transform-dotall-regex@^7.4.4": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.18.6.tgz#b286b3e7aae6c7b861e45bed0a2fafd6b1a4fef8" + integrity sha512-6S3jpun1eEbAxq7TdjLotAsl4WpQI9DxfkycRcKrjhQYzU87qpXdknpBg/e+TdcMehqGnLFi7tnFUBR02Vq6wg== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-duplicate-keys@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fplugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.18.9.tgz#687f15ee3cdad6d85191eb2a372c4528eaa0ae0e" + integrity sha512-d2bmXCtZXYc59/0SanQKbiWINadaJXqtvIQIzd4+hNwkWBgyCd5F/2t1kXoUdvPMrxzPvhK6EMQRROxsue+mfw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-exponentiation-operator@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.18.6.tgz#421c705f4521888c65e91fdd1af951bfefd4dacd" + integrity sha512-wzEtc0+2c88FVR34aQmiz56dxEkxr2g8DQb/KfaFa1JYXOFVsbhvAonFN6PwVWj++fKmku8NP80plJ5Et4wqHw== + dependencies: + "@babel/helper-builder-binary-assignment-operator-visitor" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-flow-strip-types@^7.16.0": + version "7.19.0" + resolved "http://localhost:4873/@babel%2fplugin-transform-flow-strip-types/-/plugin-transform-flow-strip-types-7.19.0.tgz#e9e8606633287488216028719638cbbb2f2dde8f" + integrity sha512-sgeMlNaQVbCSpgLSKP4ZZKfsJVnFnNQlUSk6gPYzR/q7tzCgQF2t8RBKAP6cKJeZdveei7Q7Jm527xepI8lNLg== + dependencies: + "@babel/helper-plugin-utils" "^7.19.0" + "@babel/plugin-syntax-flow" "^7.18.6" + +"@babel/plugin-transform-for-of@^7.18.8": + version "7.18.8" + resolved "http://localhost:4873/@babel%2fplugin-transform-for-of/-/plugin-transform-for-of-7.18.8.tgz#6ef8a50b244eb6a0bdbad0c7c61877e4e30097c1" + integrity sha512-yEfTRnjuskWYo0k1mHUqrVWaZwrdq8AYbfrpqULOJOaucGSp4mNMVps+YtA8byoevxS/urwU75vyhQIxcCgiBQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-function-name@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fplugin-transform-function-name/-/plugin-transform-function-name-7.18.9.tgz#cc354f8234e62968946c61a46d6365440fc764e0" + integrity sha512-WvIBoRPaJQ5yVHzcnJFor7oS5Ls0PYixlTYE63lCj2RtdQEl15M68FXQlxnG6wdraJIXRdR7KI+hQ7q/9QjrCQ== + dependencies: + "@babel/helper-compilation-targets" "^7.18.9" + "@babel/helper-function-name" "^7.18.9" + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-literals@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fplugin-transform-literals/-/plugin-transform-literals-7.18.9.tgz#72796fdbef80e56fba3c6a699d54f0de557444bc" + integrity sha512-IFQDSRoTPnrAIrI5zoZv73IFeZu2dhu6irxQjY9rNjTT53VmKg9fenjvoiOWOkJ6mm4jKVPtdMzBY98Fp4Z4cg== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-member-expression-literals@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.18.6.tgz#ac9fdc1a118620ac49b7e7a5d2dc177a1bfee88e" + integrity sha512-qSF1ihLGO3q+/g48k85tUjD033C29TNTVB2paCwZPVmOsjn9pClvYYrM2VeJpBY2bcNkuny0YUyTNRyRxJ54KA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-modules-amd@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-modules-amd/-/plugin-transform-modules-amd-7.18.6.tgz#8c91f8c5115d2202f277549848874027d7172d21" + integrity sha512-Pra5aXsmTsOnjM3IajS8rTaLCy++nGM4v3YR4esk5PCsyg9z8NA5oQLwxzMUtDBd8F+UmVza3VxoAaWCbzH1rg== + dependencies: + "@babel/helper-module-transforms" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + babel-plugin-dynamic-import-node "^2.3.3" + +"@babel/plugin-transform-modules-commonjs@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.18.6.tgz#afd243afba166cca69892e24a8fd8c9f2ca87883" + integrity sha512-Qfv2ZOWikpvmedXQJDSbxNqy7Xr/j2Y8/KfijM0iJyKkBTmWuvCA1yeH1yDM7NJhBW/2aXxeucLj6i80/LAJ/Q== + dependencies: + "@babel/helper-module-transforms" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-simple-access" "^7.18.6" + babel-plugin-dynamic-import-node "^2.3.3" + +"@babel/plugin-transform-modules-systemjs@^7.19.0": + version "7.19.0" + resolved "http://localhost:4873/@babel%2fplugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.19.0.tgz#5f20b471284430f02d9c5059d9b9a16d4b085a1f" + integrity sha512-x9aiR0WXAWmOWsqcsnrzGR+ieaTMVyGyffPVA7F8cXAGt/UxefYv6uSHZLkAFChN5M5Iy1+wjE+xJuPt22H39A== + dependencies: + "@babel/helper-hoist-variables" "^7.18.6" + "@babel/helper-module-transforms" "^7.19.0" + "@babel/helper-plugin-utils" "^7.19.0" + "@babel/helper-validator-identifier" "^7.18.6" + babel-plugin-dynamic-import-node "^2.3.3" + +"@babel/plugin-transform-modules-umd@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-modules-umd/-/plugin-transform-modules-umd-7.18.6.tgz#81d3832d6034b75b54e62821ba58f28ed0aab4b9" + integrity sha512-dcegErExVeXcRqNtkRU/z8WlBLnvD4MRnHgNs3MytRO1Mn1sHRyhbcpYbVMGclAqOjdW+9cfkdZno9dFdfKLfQ== + dependencies: + "@babel/helper-module-transforms" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-named-capturing-groups-regex@^7.19.1": + version "7.19.1" + resolved "http://localhost:4873/@babel%2fplugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.19.1.tgz#ec7455bab6cd8fb05c525a94876f435a48128888" + integrity sha512-oWk9l9WItWBQYS4FgXD4Uyy5kq898lvkXpXQxoJEY1RnvPk4R/Dvu2ebXU9q8lP+rlMwUQTFf2Ok6d78ODa0kw== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.19.0" + "@babel/helper-plugin-utils" "^7.19.0" + +"@babel/plugin-transform-new-target@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-new-target/-/plugin-transform-new-target-7.18.6.tgz#d128f376ae200477f37c4ddfcc722a8a1b3246a8" + integrity sha512-DjwFA/9Iu3Z+vrAn+8pBUGcjhxKguSMlsFqeCKbhb9BAV756v0krzVK04CRDi/4aqmk8BsHb4a/gFcaA5joXRw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-object-super@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-object-super/-/plugin-transform-object-super-7.18.6.tgz#fb3c6ccdd15939b6ff7939944b51971ddc35912c" + integrity sha512-uvGz6zk+pZoS1aTZrOvrbj6Pp/kK2mp45t2B+bTDre2UgsZZ8EZLSJtUg7m/no0zOJUWgFONpB7Zv9W2tSaFlA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-replace-supers" "^7.18.6" + +"@babel/plugin-transform-parameters@^7.18.8": + version "7.18.8" + resolved "http://localhost:4873/@babel%2fplugin-transform-parameters/-/plugin-transform-parameters-7.18.8.tgz#ee9f1a0ce6d78af58d0956a9378ea3427cccb48a" + integrity sha512-ivfbE3X2Ss+Fj8nnXvKJS6sjRG4gzwPMsP+taZC+ZzEGjAYlvENixmt1sZ5Ca6tWls+BlKSGKPJ6OOXvXCbkFg== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-property-literals@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-property-literals/-/plugin-transform-property-literals-7.18.6.tgz#e22498903a483448e94e032e9bbb9c5ccbfc93a3" + integrity sha512-cYcs6qlgafTud3PAzrrRNbQtfpQ8+y/+M5tKmksS9+M1ckbH6kzY8MrexEM9mcA6JDsukE19iIRvAyYl463sMg== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-react-constant-elements@^7.12.1": + version "7.18.12" + resolved "http://localhost:4873/@babel%2fplugin-transform-react-constant-elements/-/plugin-transform-react-constant-elements-7.18.12.tgz#edf3bec47eb98f14e84fa0af137fcc6aad8e0443" + integrity sha512-Q99U9/ttiu+LMnRU8psd23HhvwXmKWDQIpocm0JKaICcZHnw+mdQbHm6xnSy7dOl8I5PELakYtNBubNQlBXbZw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-react-display-name@^7.16.0", "@babel/plugin-transform-react-display-name@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-react-display-name/-/plugin-transform-react-display-name-7.18.6.tgz#8b1125f919ef36ebdfff061d664e266c666b9415" + integrity sha512-TV4sQ+T013n61uMoygyMRm+xf04Bd5oqFpv2jAEQwSZ8NwQA7zeRPg1LMVg2PWi3zWBz+CLKD+v5bcpZ/BS0aA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-react-jsx-development@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-react-jsx-development/-/plugin-transform-react-jsx-development-7.18.6.tgz#dbe5c972811e49c7405b630e4d0d2e1380c0ddc5" + integrity sha512-SA6HEjwYFKF7WDjWcMcMGUimmw/nhNRDWxr+KaLSCrkD/LMDBvWRmHAYgE1HDeF8KUuI8OAu+RT6EOtKxSW2qA== + dependencies: + "@babel/plugin-transform-react-jsx" "^7.18.6" + +"@babel/plugin-transform-react-jsx@^7.18.6": + version "7.19.0" + resolved "http://localhost:4873/@babel%2fplugin-transform-react-jsx/-/plugin-transform-react-jsx-7.19.0.tgz#b3cbb7c3a00b92ec8ae1027910e331ba5c500eb9" + integrity sha512-UVEvX3tXie3Szm3emi1+G63jyw1w5IcMY0FSKM+CRnKRI5Mr1YbCNgsSTwoTwKphQEG9P+QqmuRFneJPZuHNhg== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + "@babel/helper-module-imports" "^7.18.6" + "@babel/helper-plugin-utils" "^7.19.0" + "@babel/plugin-syntax-jsx" "^7.18.6" + "@babel/types" "^7.19.0" + +"@babel/plugin-transform-react-pure-annotations@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-react-pure-annotations/-/plugin-transform-react-pure-annotations-7.18.6.tgz#561af267f19f3e5d59291f9950fd7b9663d0d844" + integrity sha512-I8VfEPg9r2TRDdvnHgPepTKvuRomzA8+u+nhY7qSI1fR2hRNebasZEETLyM5mAUr0Ku56OkXJ0I7NHJnO6cJiQ== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-regenerator@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-regenerator/-/plugin-transform-regenerator-7.18.6.tgz#585c66cb84d4b4bf72519a34cfce761b8676ca73" + integrity sha512-poqRI2+qiSdeldcz4wTSTXBRryoq3Gc70ye7m7UD5Ww0nE29IXqMl6r7Nd15WBgRd74vloEMlShtH6CKxVzfmQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + regenerator-transform "^0.15.0" + +"@babel/plugin-transform-reserved-words@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-reserved-words/-/plugin-transform-reserved-words-7.18.6.tgz#b1abd8ebf8edaa5f7fe6bbb8d2133d23b6a6f76a" + integrity sha512-oX/4MyMoypzHjFrT1CdivfKZ+XvIPMFXwwxHp/r0Ddy2Vuomt4HDFGmft1TAY2yiTKiNSsh3kjBAzcM8kSdsjA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-runtime@^7.16.4": + version "7.19.1" + resolved "http://localhost:4873/@babel%2fplugin-transform-runtime/-/plugin-transform-runtime-7.19.1.tgz#a3df2d7312eea624c7889a2dcd37fd1dfd25b2c6" + integrity sha512-2nJjTUFIzBMP/f/miLxEK9vxwW/KUXsdvN4sR//TmuDhe6yU2h57WmIOE12Gng3MDP/xpjUV/ToZRdcf8Yj4fA== + dependencies: + "@babel/helper-module-imports" "^7.18.6" + "@babel/helper-plugin-utils" "^7.19.0" + babel-plugin-polyfill-corejs2 "^0.3.3" + babel-plugin-polyfill-corejs3 "^0.6.0" + babel-plugin-polyfill-regenerator "^0.4.1" + semver "^6.3.0" + +"@babel/plugin-transform-shorthand-properties@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.18.6.tgz#6d6df7983d67b195289be24909e3f12a8f664dc9" + integrity sha512-eCLXXJqv8okzg86ywZJbRn19YJHU4XUa55oz2wbHhaQVn/MM+XhukiT7SYqp/7o00dg52Rj51Ny+Ecw4oyoygw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-spread@^7.19.0": + version "7.19.0" + resolved "http://localhost:4873/@babel%2fplugin-transform-spread/-/plugin-transform-spread-7.19.0.tgz#dd60b4620c2fec806d60cfaae364ec2188d593b6" + integrity sha512-RsuMk7j6n+r752EtzyScnWkQyuJdli6LdO5Klv8Yx0OfPVTcQkIUfS8clx5e9yHXzlnhOZF3CbQ8C2uP5j074w== + dependencies: + "@babel/helper-plugin-utils" "^7.19.0" + "@babel/helper-skip-transparent-expression-wrappers" "^7.18.9" + +"@babel/plugin-transform-sticky-regex@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.18.6.tgz#c6706eb2b1524028e317720339583ad0f444adcc" + integrity sha512-kfiDrDQ+PBsQDO85yj1icueWMfGfJFKN1KCkndygtu/C9+XUfydLC8Iv5UYJqRwy4zk8EcplRxEOeLyjq1gm6Q== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-template-literals@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fplugin-transform-template-literals/-/plugin-transform-template-literals-7.18.9.tgz#04ec6f10acdaa81846689d63fae117dd9c243a5e" + integrity sha512-S8cOWfT82gTezpYOiVaGHrCbhlHgKhQt8XH5ES46P2XWmX92yisoZywf5km75wv5sYcXDUCLMmMxOLCtthDgMA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-typeof-symbol@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fplugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.18.9.tgz#c8cea68263e45addcd6afc9091429f80925762c0" + integrity sha512-SRfwTtF11G2aemAZWivL7PD+C9z52v9EvMqH9BuYbabyPuKUvSWks3oCg6041pT925L4zVFqaVBeECwsmlguEw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-typescript@^7.18.6": + version "7.19.3" + resolved "http://localhost:4873/@babel%2fplugin-transform-typescript/-/plugin-transform-typescript-7.19.3.tgz#4f1db1e0fe278b42ddbc19ec2f6cd2f8262e35d6" + integrity sha512-z6fnuK9ve9u/0X0rRvI9MY0xg+DOUaABDYOe+/SQTxtlptaBB/V9JIUxJn6xp3lMBeb9qe8xSFmHU35oZDXD+w== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.19.0" + "@babel/helper-plugin-utils" "^7.19.0" + "@babel/plugin-syntax-typescript" "^7.18.6" + +"@babel/plugin-transform-unicode-escapes@^7.18.10": + version "7.18.10" + resolved "http://localhost:4873/@babel%2fplugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.18.10.tgz#1ecfb0eda83d09bbcb77c09970c2dd55832aa246" + integrity sha512-kKAdAI+YzPgGY/ftStBFXTI1LZFju38rYThnfMykS+IXy8BVx+res7s2fxf1l8I35DV2T97ezo6+SGrXz6B3iQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-unicode-regex@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.18.6.tgz#194317225d8c201bbae103364ffe9e2cea36cdca" + integrity sha512-gE7A6Lt7YLnNOL3Pb9BNeZvi+d8l7tcRrG4+pwJjK9hD2xX4mEvjlQW60G9EEmfXVYRPv9VRQcyegIVHCql/AA== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/preset-env@^7.11.0", "@babel/preset-env@^7.12.1", "@babel/preset-env@^7.16.4": + version "7.19.3" + resolved "http://localhost:4873/@babel%2fpreset-env/-/preset-env-7.19.3.tgz#52cd19abaecb3f176a4ff9cc5e15b7bf06bec754" + integrity sha512-ziye1OTc9dGFOAXSWKUqQblYHNlBOaDl8wzqf2iKXJAltYiR3hKHUKmkt+S9PppW7RQpq4fFCrwwpIDj/f5P4w== + dependencies: + "@babel/compat-data" "^7.19.3" + "@babel/helper-compilation-targets" "^7.19.3" + "@babel/helper-plugin-utils" "^7.19.0" + "@babel/helper-validator-option" "^7.18.6" + "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression" "^7.18.6" + "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining" "^7.18.9" + "@babel/plugin-proposal-async-generator-functions" "^7.19.1" + "@babel/plugin-proposal-class-properties" "^7.18.6" + "@babel/plugin-proposal-class-static-block" "^7.18.6" + "@babel/plugin-proposal-dynamic-import" "^7.18.6" + "@babel/plugin-proposal-export-namespace-from" "^7.18.9" + "@babel/plugin-proposal-json-strings" "^7.18.6" + "@babel/plugin-proposal-logical-assignment-operators" "^7.18.9" + "@babel/plugin-proposal-nullish-coalescing-operator" "^7.18.6" + "@babel/plugin-proposal-numeric-separator" "^7.18.6" + "@babel/plugin-proposal-object-rest-spread" "^7.18.9" + "@babel/plugin-proposal-optional-catch-binding" "^7.18.6" + "@babel/plugin-proposal-optional-chaining" "^7.18.9" + "@babel/plugin-proposal-private-methods" "^7.18.6" + "@babel/plugin-proposal-private-property-in-object" "^7.18.6" + "@babel/plugin-proposal-unicode-property-regex" "^7.18.6" + "@babel/plugin-syntax-async-generators" "^7.8.4" + "@babel/plugin-syntax-class-properties" "^7.12.13" + "@babel/plugin-syntax-class-static-block" "^7.14.5" + "@babel/plugin-syntax-dynamic-import" "^7.8.3" + "@babel/plugin-syntax-export-namespace-from" "^7.8.3" + "@babel/plugin-syntax-import-assertions" "^7.18.6" + "@babel/plugin-syntax-json-strings" "^7.8.3" + "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" + "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" + "@babel/plugin-syntax-numeric-separator" "^7.10.4" + "@babel/plugin-syntax-object-rest-spread" "^7.8.3" + "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" + "@babel/plugin-syntax-optional-chaining" "^7.8.3" + "@babel/plugin-syntax-private-property-in-object" "^7.14.5" + "@babel/plugin-syntax-top-level-await" "^7.14.5" + "@babel/plugin-transform-arrow-functions" "^7.18.6" + "@babel/plugin-transform-async-to-generator" "^7.18.6" + "@babel/plugin-transform-block-scoped-functions" "^7.18.6" + "@babel/plugin-transform-block-scoping" "^7.18.9" + "@babel/plugin-transform-classes" "^7.19.0" + "@babel/plugin-transform-computed-properties" "^7.18.9" + "@babel/plugin-transform-destructuring" "^7.18.13" + "@babel/plugin-transform-dotall-regex" "^7.18.6" + "@babel/plugin-transform-duplicate-keys" "^7.18.9" + "@babel/plugin-transform-exponentiation-operator" "^7.18.6" + "@babel/plugin-transform-for-of" "^7.18.8" + "@babel/plugin-transform-function-name" "^7.18.9" + "@babel/plugin-transform-literals" "^7.18.9" + "@babel/plugin-transform-member-expression-literals" "^7.18.6" + "@babel/plugin-transform-modules-amd" "^7.18.6" + "@babel/plugin-transform-modules-commonjs" "^7.18.6" + "@babel/plugin-transform-modules-systemjs" "^7.19.0" + "@babel/plugin-transform-modules-umd" "^7.18.6" + "@babel/plugin-transform-named-capturing-groups-regex" "^7.19.1" + "@babel/plugin-transform-new-target" "^7.18.6" + "@babel/plugin-transform-object-super" "^7.18.6" + "@babel/plugin-transform-parameters" "^7.18.8" + "@babel/plugin-transform-property-literals" "^7.18.6" + "@babel/plugin-transform-regenerator" "^7.18.6" + "@babel/plugin-transform-reserved-words" "^7.18.6" + "@babel/plugin-transform-shorthand-properties" "^7.18.6" + "@babel/plugin-transform-spread" "^7.19.0" + "@babel/plugin-transform-sticky-regex" "^7.18.6" + "@babel/plugin-transform-template-literals" "^7.18.9" + "@babel/plugin-transform-typeof-symbol" "^7.18.9" + "@babel/plugin-transform-unicode-escapes" "^7.18.10" + "@babel/plugin-transform-unicode-regex" "^7.18.6" + "@babel/preset-modules" "^0.1.5" + "@babel/types" "^7.19.3" + babel-plugin-polyfill-corejs2 "^0.3.3" + babel-plugin-polyfill-corejs3 "^0.6.0" + babel-plugin-polyfill-regenerator "^0.4.1" + core-js-compat "^3.25.1" + semver "^6.3.0" + +"@babel/preset-modules@^0.1.5": + version "0.1.5" + resolved "http://localhost:4873/@babel%2fpreset-modules/-/preset-modules-0.1.5.tgz#ef939d6e7f268827e1841638dc6ff95515e115d9" + integrity sha512-A57th6YRG7oR3cq/yt/Y84MvGgE0eJG2F1JLhKuyG+jFxEgrd/HAMJatiFtmOiZurz+0DkrvbheCLaV5f2JfjA== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + "@babel/plugin-proposal-unicode-property-regex" "^7.4.4" + "@babel/plugin-transform-dotall-regex" "^7.4.4" + "@babel/types" "^7.4.4" + esutils "^2.0.2" + +"@babel/preset-react@^7.12.5", "@babel/preset-react@^7.16.0": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fpreset-react/-/preset-react-7.18.6.tgz#979f76d6277048dc19094c217b507f3ad517dd2d" + integrity sha512-zXr6atUmyYdiWRVLOZahakYmOBHtWc2WGCkP8PYTgZi0iJXDY2CN180TdrIW4OGOAdLc7TifzDIvtx6izaRIzg== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-validator-option" "^7.18.6" + "@babel/plugin-transform-react-display-name" "^7.18.6" + "@babel/plugin-transform-react-jsx" "^7.18.6" + "@babel/plugin-transform-react-jsx-development" "^7.18.6" + "@babel/plugin-transform-react-pure-annotations" "^7.18.6" + +"@babel/preset-typescript@^7.16.0": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fpreset-typescript/-/preset-typescript-7.18.6.tgz#ce64be3e63eddc44240c6358daefac17b3186399" + integrity sha512-s9ik86kXBAnD760aybBucdpnLsAt0jK1xqJn2juOn9lkOvSHV60os5hxoVJsPzMQxvnUJFAlkont2DvvaYEBtQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-validator-option" "^7.18.6" + "@babel/plugin-transform-typescript" "^7.18.6" + +"@babel/runtime-corejs3@^7.10.2": + version "7.19.1" + resolved "http://localhost:4873/@babel%2fruntime-corejs3/-/runtime-corejs3-7.19.1.tgz#f0cbbe7edda7c4109cd253bb1dee99aba4594ad9" + integrity sha512-j2vJGnkopRzH+ykJ8h68wrHnEUmtK//E723jjixiAl/PPf6FhqY/vYRcMVlNydRKQjQsTsYEjpx+DZMIvnGk/g== + dependencies: + core-js-pure "^3.25.1" + regenerator-runtime "^0.13.4" + +"@babel/runtime@^7.10.2", "@babel/runtime@^7.11.2", "@babel/runtime@^7.12.5", "@babel/runtime@^7.16.3", "@babel/runtime@^7.18.9", "@babel/runtime@^7.8.4", "@babel/runtime@^7.9.2": + version "7.19.0" + resolved "http://localhost:4873/@babel%2fruntime/-/runtime-7.19.0.tgz#22b11c037b094d27a8a2504ea4dcff00f50e2259" + integrity sha512-eR8Lo9hnDS7tqkO7NsV+mKvCmv5boaXFSZ70DnfhcgiEne8hv9oCEd36Klw74EtizEqLsy4YnW8UWwpBVolHZA== + dependencies: + regenerator-runtime "^0.13.4" + +"@babel/template@^7.18.10", "@babel/template@^7.3.3": + version "7.18.10" + resolved "http://localhost:4873/@babel%2ftemplate/-/template-7.18.10.tgz#6f9134835970d1dbf0835c0d100c9f38de0c5e71" + integrity sha512-TI+rCtooWHr3QJ27kJxfjutghu44DLnasDMwpDqCXVTal9RLp3RSYNh4NdBrRP2cQAoG9A8juOQl6P6oZG4JxA== + dependencies: + "@babel/code-frame" "^7.18.6" + "@babel/parser" "^7.18.10" + "@babel/types" "^7.18.10" + +"@babel/traverse@^7.19.0", "@babel/traverse@^7.19.1", "@babel/traverse@^7.19.3", "@babel/traverse@^7.7.2": + version "7.19.3" + resolved "http://localhost:4873/@babel%2ftraverse/-/traverse-7.19.3.tgz#3a3c5348d4988ba60884e8494b0592b2f15a04b4" + integrity sha512-qh5yf6149zhq2sgIXmwjnsvmnNQC2iw70UFjp4olxucKrWd/dvlUsBI88VSLUsnMNF7/vnOiA+nk1+yLoCqROQ== + dependencies: + "@babel/code-frame" "^7.18.6" + "@babel/generator" "^7.19.3" + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-function-name" "^7.19.0" + "@babel/helper-hoist-variables" "^7.18.6" + "@babel/helper-split-export-declaration" "^7.18.6" + "@babel/parser" "^7.19.3" + "@babel/types" "^7.19.3" + debug "^4.1.0" + globals "^11.1.0" + +"@babel/types@^7.0.0", "@babel/types@^7.12.6", "@babel/types@^7.18.10", "@babel/types@^7.18.6", "@babel/types@^7.18.9", "@babel/types@^7.19.0", "@babel/types@^7.19.3", "@babel/types@^7.3.0", "@babel/types@^7.3.3", "@babel/types@^7.4.4": + version "7.19.3" + resolved "http://localhost:4873/@babel%2ftypes/-/types-7.19.3.tgz#fc420e6bbe54880bce6779ffaf315f5e43ec9624" + integrity sha512-hGCaQzIY22DJlDh9CH7NOxgKkFjBk0Cw9xDO1Xmh2151ti7wiGfQ3LauXzL4HP1fmFlTX6XjpRETTpUcv7wQLw== + dependencies: + "@babel/helper-string-parser" "^7.18.10" + "@babel/helper-validator-identifier" "^7.19.1" + to-fast-properties "^2.0.0" + +"@bcoe/v8-coverage@^0.2.3": + version "0.2.3" + resolved "http://localhost:4873/@bcoe%2fv8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" + integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== + +"@craco/craco@^7.0.0-alpha.8": + version "7.0.0-alpha.8" + resolved "http://localhost:4873/@craco%2fcraco/-/craco-7.0.0-alpha.8.tgz#40f19f44198ff2341b40654c8c6b4f54c2217972" + integrity sha512-IN3/ldPaktGflPu342cg7n8LYa2c3x9H2XzngUkDzTjro25ig1GyVcUdnG1U0X6wrRTF9K1AxZ5su9jLbdyFUw== + dependencies: + autoprefixer "^10.4.12" + cosmiconfig "^7.0.1" + cosmiconfig-typescript-loader "^4.1.1" + cross-spawn "^7.0.3" + lodash "^4.17.21" + semver "^7.3.7" + webpack-merge "^5.8.0" + +"@csstools/normalize.css@*": + version "12.0.0" + resolved "http://localhost:4873/@csstools%2fnormalize.css/-/normalize.css-12.0.0.tgz#a9583a75c3f150667771f30b60d9f059473e62c4" + integrity sha512-M0qqxAcwCsIVfpFQSlGN5XjXWu8l5JDZN+fPt1LeW5SZexQTgnaEvgXAY+CeygRw0EeppWHi12JxESWiWrB0Sg== + +"@csstools/postcss-cascade-layers@^1.1.0": + version "1.1.1" + resolved "http://localhost:4873/@csstools%2fpostcss-cascade-layers/-/postcss-cascade-layers-1.1.1.tgz#8a997edf97d34071dd2e37ea6022447dd9e795ad" + integrity sha512-+KdYrpKC5TgomQr2DlZF4lDEpHcoxnj5IGddYYfBWJAKfj1JtuHUIqMa+E1pJJ+z3kvDViWMqyqPlG4Ja7amQA== + dependencies: + "@csstools/selector-specificity" "^2.0.2" + postcss-selector-parser "^6.0.10" + +"@csstools/postcss-color-function@^1.1.1": + version "1.1.1" + resolved "http://localhost:4873/@csstools%2fpostcss-color-function/-/postcss-color-function-1.1.1.tgz#2bd36ab34f82d0497cfacdc9b18d34b5e6f64b6b" + integrity sha512-Bc0f62WmHdtRDjf5f3e2STwRAl89N2CLb+9iAwzrv4L2hncrbDwnQD9PCq0gtAt7pOI2leIV08HIBUd4jxD8cw== + dependencies: + "@csstools/postcss-progressive-custom-properties" "^1.1.0" + postcss-value-parser "^4.2.0" + +"@csstools/postcss-font-format-keywords@^1.0.1": + version "1.0.1" + resolved "http://localhost:4873/@csstools%2fpostcss-font-format-keywords/-/postcss-font-format-keywords-1.0.1.tgz#677b34e9e88ae997a67283311657973150e8b16a" + integrity sha512-ZgrlzuUAjXIOc2JueK0X5sZDjCtgimVp/O5CEqTcs5ShWBa6smhWYbS0x5cVc/+rycTDbjjzoP0KTDnUneZGOg== + dependencies: + postcss-value-parser "^4.2.0" + +"@csstools/postcss-hwb-function@^1.0.2": + version "1.0.2" + resolved "http://localhost:4873/@csstools%2fpostcss-hwb-function/-/postcss-hwb-function-1.0.2.tgz#ab54a9fce0ac102c754854769962f2422ae8aa8b" + integrity sha512-YHdEru4o3Rsbjmu6vHy4UKOXZD+Rn2zmkAmLRfPet6+Jz4Ojw8cbWxe1n42VaXQhD3CQUXXTooIy8OkVbUcL+w== + dependencies: + postcss-value-parser "^4.2.0" + +"@csstools/postcss-ic-unit@^1.0.1": + version "1.0.1" + resolved "http://localhost:4873/@csstools%2fpostcss-ic-unit/-/postcss-ic-unit-1.0.1.tgz#28237d812a124d1a16a5acc5c3832b040b303e58" + integrity sha512-Ot1rcwRAaRHNKC9tAqoqNZhjdYBzKk1POgWfhN4uCOE47ebGcLRqXjKkApVDpjifL6u2/55ekkpnFcp+s/OZUw== + dependencies: + "@csstools/postcss-progressive-custom-properties" "^1.1.0" + postcss-value-parser "^4.2.0" + +"@csstools/postcss-is-pseudo-class@^2.0.7": + version "2.0.7" + resolved "http://localhost:4873/@csstools%2fpostcss-is-pseudo-class/-/postcss-is-pseudo-class-2.0.7.tgz#846ae6c0d5a1eaa878fce352c544f9c295509cd1" + integrity sha512-7JPeVVZHd+jxYdULl87lvjgvWldYu+Bc62s9vD/ED6/QTGjy0jy0US/f6BG53sVMTBJ1lzKZFpYmofBN9eaRiA== + dependencies: + "@csstools/selector-specificity" "^2.0.0" + postcss-selector-parser "^6.0.10" + +"@csstools/postcss-nested-calc@^1.0.0": + version "1.0.0" + resolved "http://localhost:4873/@csstools%2fpostcss-nested-calc/-/postcss-nested-calc-1.0.0.tgz#d7e9d1d0d3d15cf5ac891b16028af2a1044d0c26" + integrity sha512-JCsQsw1wjYwv1bJmgjKSoZNvf7R6+wuHDAbi5f/7MbFhl2d/+v+TvBTU4BJH3G1X1H87dHl0mh6TfYogbT/dJQ== + dependencies: + postcss-value-parser "^4.2.0" + +"@csstools/postcss-normalize-display-values@^1.0.1": + version "1.0.1" + resolved "http://localhost:4873/@csstools%2fpostcss-normalize-display-values/-/postcss-normalize-display-values-1.0.1.tgz#15da54a36e867b3ac5163ee12c1d7f82d4d612c3" + integrity sha512-jcOanIbv55OFKQ3sYeFD/T0Ti7AMXc9nM1hZWu8m/2722gOTxFg7xYu4RDLJLeZmPUVQlGzo4jhzvTUq3x4ZUw== + dependencies: + postcss-value-parser "^4.2.0" + +"@csstools/postcss-oklab-function@^1.1.1": + version "1.1.1" + resolved "http://localhost:4873/@csstools%2fpostcss-oklab-function/-/postcss-oklab-function-1.1.1.tgz#88cee0fbc8d6df27079ebd2fa016ee261eecf844" + integrity sha512-nJpJgsdA3dA9y5pgyb/UfEzE7W5Ka7u0CX0/HIMVBNWzWemdcTH3XwANECU6anWv/ao4vVNLTMxhiPNZsTK6iA== + dependencies: + "@csstools/postcss-progressive-custom-properties" "^1.1.0" + postcss-value-parser "^4.2.0" + +"@csstools/postcss-progressive-custom-properties@^1.1.0", "@csstools/postcss-progressive-custom-properties@^1.3.0": + version "1.3.0" + resolved "http://localhost:4873/@csstools%2fpostcss-progressive-custom-properties/-/postcss-progressive-custom-properties-1.3.0.tgz#542292558384361776b45c85226b9a3a34f276fa" + integrity sha512-ASA9W1aIy5ygskZYuWams4BzafD12ULvSypmaLJT2jvQ8G0M3I8PRQhC0h7mG0Z3LI05+agZjqSR9+K9yaQQjA== + dependencies: + postcss-value-parser "^4.2.0" + +"@csstools/postcss-stepped-value-functions@^1.0.1": + version "1.0.1" + resolved "http://localhost:4873/@csstools%2fpostcss-stepped-value-functions/-/postcss-stepped-value-functions-1.0.1.tgz#f8772c3681cc2befed695e2b0b1d68e22f08c4f4" + integrity sha512-dz0LNoo3ijpTOQqEJLY8nyaapl6umbmDcgj4AD0lgVQ572b2eqA1iGZYTTWhrcrHztWDDRAX2DGYyw2VBjvCvQ== + dependencies: + postcss-value-parser "^4.2.0" + +"@csstools/postcss-text-decoration-shorthand@^1.0.0": + version "1.0.0" + resolved "http://localhost:4873/@csstools%2fpostcss-text-decoration-shorthand/-/postcss-text-decoration-shorthand-1.0.0.tgz#ea96cfbc87d921eca914d3ad29340d9bcc4c953f" + integrity sha512-c1XwKJ2eMIWrzQenN0XbcfzckOLLJiczqy+YvfGmzoVXd7pT9FfObiSEfzs84bpE/VqfpEuAZ9tCRbZkZxxbdw== + dependencies: + postcss-value-parser "^4.2.0" + +"@csstools/postcss-trigonometric-functions@^1.0.2": + version "1.0.2" + resolved "http://localhost:4873/@csstools%2fpostcss-trigonometric-functions/-/postcss-trigonometric-functions-1.0.2.tgz#94d3e4774c36d35dcdc88ce091336cb770d32756" + integrity sha512-woKaLO///4bb+zZC2s80l+7cm07M7268MsyG3M0ActXXEFi6SuhvriQYcb58iiKGbjwwIU7n45iRLEHypB47Og== + dependencies: + postcss-value-parser "^4.2.0" + +"@csstools/postcss-unset-value@^1.0.2": + version "1.0.2" + resolved "http://localhost:4873/@csstools%2fpostcss-unset-value/-/postcss-unset-value-1.0.2.tgz#c99bb70e2cdc7312948d1eb41df2412330b81f77" + integrity sha512-c8J4roPBILnelAsdLr4XOAR/GsTm0GJi4XpcfvoWk3U6KiTCqiFYc63KhRMQQX35jYMp4Ao8Ij9+IZRgMfJp1g== + +"@csstools/selector-specificity@^2.0.0", "@csstools/selector-specificity@^2.0.2": + version "2.0.2" + resolved "http://localhost:4873/@csstools%2fselector-specificity/-/selector-specificity-2.0.2.tgz#1bfafe4b7ed0f3e4105837e056e0a89b108ebe36" + integrity sha512-IkpVW/ehM1hWKln4fCA3NzJU8KwD+kIOvPZA4cqxoJHtE21CCzjyp+Kxbu0i5I4tBNOlXPL9mjwnWlL0VEG4Fg== + +"@eslint/eslintrc@^1.3.2": + version "1.3.2" + resolved "http://localhost:4873/@eslint%2feslintrc/-/eslintrc-1.3.2.tgz#58b69582f3b7271d8fa67fe5251767a5b38ea356" + integrity sha512-AXYd23w1S/bv3fTs3Lz0vjiYemS08jWkI3hYyS9I1ry+0f+Yjs1wm+sU0BS8qDOPrBIkp4qHYC16I8uVtpLajQ== + dependencies: + ajv "^6.12.4" + debug "^4.3.2" + espree "^9.4.0" + globals "^13.15.0" + ignore "^5.2.0" + import-fresh "^3.2.1" + js-yaml "^4.1.0" + minimatch "^3.1.2" + strip-json-comments "^3.1.1" + +"@humanwhocodes/config-array@^0.10.5": + version "0.10.7" + resolved "http://localhost:4873/@humanwhocodes%2fconfig-array/-/config-array-0.10.7.tgz#6d53769fd0c222767e6452e8ebda825c22e9f0dc" + integrity sha512-MDl6D6sBsaV452/QSdX+4CXIjZhIcI0PELsxUjk4U828yd58vk3bTIvk/6w5FY+4hIy9sLW0sfrV7K7Kc++j/w== + dependencies: + "@humanwhocodes/object-schema" "^1.2.1" + debug "^4.1.1" + minimatch "^3.0.4" + +"@humanwhocodes/gitignore-to-minimatch@^1.0.2": + version "1.0.2" + resolved "http://localhost:4873/@humanwhocodes%2fgitignore-to-minimatch/-/gitignore-to-minimatch-1.0.2.tgz#316b0a63b91c10e53f242efb4ace5c3b34e8728d" + integrity sha512-rSqmMJDdLFUsyxR6FMtD00nfQKKLFb1kv+qBbOVKqErvloEIJLo5bDTJTQNTYgeyp78JsA7u/NPi5jT1GR/MuA== + +"@humanwhocodes/module-importer@^1.0.1": + version "1.0.1" + resolved "http://localhost:4873/@humanwhocodes%2fmodule-importer/-/module-importer-1.0.1.tgz#af5b2691a22b44be847b0ca81641c5fb6ad0172c" + integrity sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA== + +"@humanwhocodes/object-schema@^1.2.1": + version "1.2.1" + resolved "http://localhost:4873/@humanwhocodes%2fobject-schema/-/object-schema-1.2.1.tgz#b520529ec21d8e5945a1851dfd1c32e94e39ff45" + integrity sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA== + +"@istanbuljs/load-nyc-config@^1.0.0": + version "1.1.0" + resolved "http://localhost:4873/@istanbuljs%2fload-nyc-config/-/load-nyc-config-1.1.0.tgz#fd3db1d59ecf7cf121e80650bb86712f9b55eced" + integrity sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ== + dependencies: + camelcase "^5.3.1" + find-up "^4.1.0" + get-package-type "^0.1.0" + js-yaml "^3.13.1" + resolve-from "^5.0.0" + +"@istanbuljs/schema@^0.1.2": + version "0.1.3" + resolved "http://localhost:4873/@istanbuljs%2fschema/-/schema-0.1.3.tgz#e45e384e4b8ec16bce2fd903af78450f6bf7ec98" + integrity sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA== + +"@jest/console@^27.5.1": + version "27.5.1" + resolved "http://localhost:4873/@jest%2fconsole/-/console-27.5.1.tgz#260fe7239602fe5130a94f1aa386eff54b014bba" + integrity sha512-kZ/tNpS3NXn0mlXXXPNuDZnb4c0oZ20r4K5eemM2k30ZC3G0T02nXUvyhf5YdbXWHPEJLc9qGLxEZ216MdL+Zg== + dependencies: + "@jest/types" "^27.5.1" + "@types/node" "*" + chalk "^4.0.0" + jest-message-util "^27.5.1" + jest-util "^27.5.1" + slash "^3.0.0" + +"@jest/console@^28.1.3": + version "28.1.3" + resolved "http://localhost:4873/@jest%2fconsole/-/console-28.1.3.tgz#2030606ec03a18c31803b8a36382762e447655df" + integrity sha512-QPAkP5EwKdK/bxIr6C1I4Vs0rm2nHiANzj/Z5X2JQkrZo6IqvC4ldZ9K95tF0HdidhA8Bo6egxSzUFPYKcEXLw== + dependencies: + "@jest/types" "^28.1.3" + "@types/node" "*" + chalk "^4.0.0" + jest-message-util "^28.1.3" + jest-util "^28.1.3" + slash "^3.0.0" + +"@jest/core@^27.5.1": + version "27.5.1" + resolved "http://localhost:4873/@jest%2fcore/-/core-27.5.1.tgz#267ac5f704e09dc52de2922cbf3af9edcd64b626" + integrity sha512-AK6/UTrvQD0Cd24NSqmIA6rKsu0tKIxfiCducZvqxYdmMisOYAsdItspT+fQDQYARPf8XgjAFZi0ogW2agH5nQ== + dependencies: + "@jest/console" "^27.5.1" + "@jest/reporters" "^27.5.1" + "@jest/test-result" "^27.5.1" + "@jest/transform" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/node" "*" + ansi-escapes "^4.2.1" + chalk "^4.0.0" + emittery "^0.8.1" + exit "^0.1.2" + graceful-fs "^4.2.9" + jest-changed-files "^27.5.1" + jest-config "^27.5.1" + jest-haste-map "^27.5.1" + jest-message-util "^27.5.1" + jest-regex-util "^27.5.1" + jest-resolve "^27.5.1" + jest-resolve-dependencies "^27.5.1" + jest-runner "^27.5.1" + jest-runtime "^27.5.1" + jest-snapshot "^27.5.1" + jest-util "^27.5.1" + jest-validate "^27.5.1" + jest-watcher "^27.5.1" + micromatch "^4.0.4" + rimraf "^3.0.0" + slash "^3.0.0" + strip-ansi "^6.0.0" + +"@jest/environment@^27.5.1": + version "27.5.1" + resolved "http://localhost:4873/@jest%2fenvironment/-/environment-27.5.1.tgz#d7425820511fe7158abbecc010140c3fd3be9c74" + integrity sha512-/WQjhPJe3/ghaol/4Bq480JKXV/Rfw8nQdN7f41fM8VDHLcxKXou6QyXAh3EFr9/bVG3x74z1NWDkP87EiY8gA== + dependencies: + "@jest/fake-timers" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/node" "*" + jest-mock "^27.5.1" + +"@jest/expect-utils@^29.1.2": + version "29.1.2" + resolved "http://localhost:4873/@jest%2fexpect-utils/-/expect-utils-29.1.2.tgz#66dbb514d38f7d21456bc774419c9ae5cca3f88d" + integrity sha512-4a48bhKfGj/KAH39u0ppzNTABXQ8QPccWAFUFobWBaEMSMp+sB31Z2fK/l47c4a/Mu1po2ffmfAIPxXbVTXdtg== + dependencies: + jest-get-type "^29.0.0" + +"@jest/fake-timers@^27.5.1": + version "27.5.1" + resolved "http://localhost:4873/@jest%2ffake-timers/-/fake-timers-27.5.1.tgz#76979745ce0579c8a94a4678af7a748eda8ada74" + integrity sha512-/aPowoolwa07k7/oM3aASneNeBGCmGQsc3ugN4u6s4C/+s5M64MFo/+djTdiwcbQlRfFElGuDXWzaWj6QgKObQ== + dependencies: + "@jest/types" "^27.5.1" + "@sinonjs/fake-timers" "^8.0.1" + "@types/node" "*" + jest-message-util "^27.5.1" + jest-mock "^27.5.1" + jest-util "^27.5.1" + +"@jest/globals@^27.5.1": + version "27.5.1" + resolved "http://localhost:4873/@jest%2fglobals/-/globals-27.5.1.tgz#7ac06ce57ab966566c7963431cef458434601b2b" + integrity sha512-ZEJNB41OBQQgGzgyInAv0UUfDDj3upmHydjieSxFvTRuZElrx7tXg/uVQ5hYVEwiXs3+aMsAeEc9X7xiSKCm4Q== + dependencies: + "@jest/environment" "^27.5.1" + "@jest/types" "^27.5.1" + expect "^27.5.1" + +"@jest/reporters@^27.5.1": + version "27.5.1" + resolved "http://localhost:4873/@jest%2freporters/-/reporters-27.5.1.tgz#ceda7be96170b03c923c37987b64015812ffec04" + integrity sha512-cPXh9hWIlVJMQkVk84aIvXuBB4uQQmFqZiacloFuGiP3ah1sbCxCosidXFDfqG8+6fO1oR2dTJTlsOy4VFmUfw== + dependencies: + "@bcoe/v8-coverage" "^0.2.3" + "@jest/console" "^27.5.1" + "@jest/test-result" "^27.5.1" + "@jest/transform" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/node" "*" + chalk "^4.0.0" + collect-v8-coverage "^1.0.0" + exit "^0.1.2" + glob "^7.1.2" + graceful-fs "^4.2.9" + istanbul-lib-coverage "^3.0.0" + istanbul-lib-instrument "^5.1.0" + istanbul-lib-report "^3.0.0" + istanbul-lib-source-maps "^4.0.0" + istanbul-reports "^3.1.3" + jest-haste-map "^27.5.1" + jest-resolve "^27.5.1" + jest-util "^27.5.1" + jest-worker "^27.5.1" + slash "^3.0.0" + source-map "^0.6.0" + string-length "^4.0.1" + terminal-link "^2.0.0" + v8-to-istanbul "^8.1.0" + +"@jest/schemas@^28.1.3": + version "28.1.3" + resolved "http://localhost:4873/@jest%2fschemas/-/schemas-28.1.3.tgz#ad8b86a66f11f33619e3d7e1dcddd7f2d40ff905" + integrity sha512-/l/VWsdt/aBXgjshLWOFyFt3IVdYypu5y2Wn2rOO1un6nkqIn8SLXzgIMYXFyYsRWDyF5EthmKJMIdJvk08grg== + dependencies: + "@sinclair/typebox" "^0.24.1" + +"@jest/schemas@^29.0.0": + version "29.0.0" + resolved "http://localhost:4873/@jest%2fschemas/-/schemas-29.0.0.tgz#5f47f5994dd4ef067fb7b4188ceac45f77fe952a" + integrity sha512-3Ab5HgYIIAnS0HjqJHQYZS+zXc4tUmTmBH3z83ajI6afXp8X3ZtdLX+nXx+I7LNkJD7uN9LAVhgnjDgZa2z0kA== + dependencies: + "@sinclair/typebox" "^0.24.1" + +"@jest/source-map@^27.5.1": + version "27.5.1" + resolved "http://localhost:4873/@jest%2fsource-map/-/source-map-27.5.1.tgz#6608391e465add4205eae073b55e7f279e04e8cf" + integrity sha512-y9NIHUYF3PJRlHk98NdC/N1gl88BL08aQQgu4k4ZopQkCw9t9cV8mtl3TV8b/YCB8XaVTFrmUTAJvjsntDireg== + dependencies: + callsites "^3.0.0" + graceful-fs "^4.2.9" + source-map "^0.6.0" + +"@jest/test-result@^27.5.1": + version "27.5.1" + resolved "http://localhost:4873/@jest%2ftest-result/-/test-result-27.5.1.tgz#56a6585fa80f7cdab72b8c5fc2e871d03832f5bb" + integrity sha512-EW35l2RYFUcUQxFJz5Cv5MTOxlJIQs4I7gxzi2zVU7PJhOwfYq1MdC5nhSmYjX1gmMmLPvB3sIaC+BkcHRBfag== + dependencies: + "@jest/console" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/istanbul-lib-coverage" "^2.0.0" + collect-v8-coverage "^1.0.0" + +"@jest/test-result@^28.1.3": + version "28.1.3" + resolved "http://localhost:4873/@jest%2ftest-result/-/test-result-28.1.3.tgz#5eae945fd9f4b8fcfce74d239e6f725b6bf076c5" + integrity sha512-kZAkxnSE+FqE8YjW8gNuoVkkC9I7S1qmenl8sGcDOLropASP+BkcGKwhXoyqQuGOGeYY0y/ixjrd/iERpEXHNg== + dependencies: + "@jest/console" "^28.1.3" + "@jest/types" "^28.1.3" + "@types/istanbul-lib-coverage" "^2.0.0" + collect-v8-coverage "^1.0.0" + +"@jest/test-sequencer@^27.5.1": + version "27.5.1" + resolved "http://localhost:4873/@jest%2ftest-sequencer/-/test-sequencer-27.5.1.tgz#4057e0e9cea4439e544c6353c6affe58d095745b" + integrity sha512-LCheJF7WB2+9JuCS7VB/EmGIdQuhtqjRNI9A43idHv3E4KltCTsPsLxvdaubFHSYwY/fNjMWjl6vNRhDiN7vpQ== + dependencies: + "@jest/test-result" "^27.5.1" + graceful-fs "^4.2.9" + jest-haste-map "^27.5.1" + jest-runtime "^27.5.1" + +"@jest/transform@^27.5.1": + version "27.5.1" + resolved "http://localhost:4873/@jest%2ftransform/-/transform-27.5.1.tgz#6c3501dcc00c4c08915f292a600ece5ecfe1f409" + integrity sha512-ipON6WtYgl/1329g5AIJVbUuEh0wZVbdpGwC99Jw4LwuoBNS95MVphU6zOeD9pDkon+LLbFL7lOQRapbB8SCHw== + dependencies: + "@babel/core" "^7.1.0" + "@jest/types" "^27.5.1" + babel-plugin-istanbul "^6.1.1" + chalk "^4.0.0" + convert-source-map "^1.4.0" + fast-json-stable-stringify "^2.0.0" + graceful-fs "^4.2.9" + jest-haste-map "^27.5.1" + jest-regex-util "^27.5.1" + jest-util "^27.5.1" + micromatch "^4.0.4" + pirates "^4.0.4" + slash "^3.0.0" + source-map "^0.6.1" + write-file-atomic "^3.0.0" + +"@jest/types@^27.5.1": + version "27.5.1" + resolved "http://localhost:4873/@jest%2ftypes/-/types-27.5.1.tgz#3c79ec4a8ba61c170bf937bcf9e98a9df175ec80" + integrity sha512-Cx46iJ9QpwQTjIdq5VJu2QTMMs3QlEjI0x1QbBP5W1+nMzyc2XmimiRR/CbX9TO0cPTeUlxWMOu8mslYsJ8DEw== + dependencies: + "@types/istanbul-lib-coverage" "^2.0.0" + "@types/istanbul-reports" "^3.0.0" + "@types/node" "*" + "@types/yargs" "^16.0.0" + chalk "^4.0.0" + +"@jest/types@^28.1.3": + version "28.1.3" + resolved "http://localhost:4873/@jest%2ftypes/-/types-28.1.3.tgz#b05de80996ff12512bc5ceb1d208285a7d11748b" + integrity sha512-RyjiyMUZrKz/c+zlMFO1pm70DcIlST8AeWTkoUdZevew44wcNZQHsEVOiCVtgVnlFFD82FPaXycys58cf2muVQ== + dependencies: + "@jest/schemas" "^28.1.3" + "@types/istanbul-lib-coverage" "^2.0.0" + "@types/istanbul-reports" "^3.0.0" + "@types/node" "*" + "@types/yargs" "^17.0.8" + chalk "^4.0.0" + +"@jest/types@^29.1.2": + version "29.1.2" + resolved "http://localhost:4873/@jest%2ftypes/-/types-29.1.2.tgz#7442d32b16bcd7592d9614173078b8c334ec730a" + integrity sha512-DcXGtoTykQB5jiwCmVr8H4vdg2OJhQex3qPkG+ISyDO7xQXbt/4R6dowcRyPemRnkH7JoHvZuxPBdlq+9JxFCg== + dependencies: + "@jest/schemas" "^29.0.0" + "@types/istanbul-lib-coverage" "^2.0.0" + "@types/istanbul-reports" "^3.0.0" + "@types/node" "*" + "@types/yargs" "^17.0.8" + chalk "^4.0.0" + +"@jridgewell/gen-mapping@^0.1.0": + version "0.1.1" + resolved "http://localhost:4873/@jridgewell%2fgen-mapping/-/gen-mapping-0.1.1.tgz#e5d2e450306a9491e3bd77e323e38d7aff315996" + integrity sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w== + dependencies: + "@jridgewell/set-array" "^1.0.0" + "@jridgewell/sourcemap-codec" "^1.4.10" + +"@jridgewell/gen-mapping@^0.3.0", "@jridgewell/gen-mapping@^0.3.2": + version "0.3.2" + resolved "http://localhost:4873/@jridgewell%2fgen-mapping/-/gen-mapping-0.3.2.tgz#c1aedc61e853f2bb9f5dfe6d4442d3b565b253b9" + integrity sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A== + dependencies: + "@jridgewell/set-array" "^1.0.1" + "@jridgewell/sourcemap-codec" "^1.4.10" + "@jridgewell/trace-mapping" "^0.3.9" + +"@jridgewell/resolve-uri@^3.0.3": + version "3.1.0" + resolved "http://localhost:4873/@jridgewell%2fresolve-uri/-/resolve-uri-3.1.0.tgz#2203b118c157721addfe69d47b70465463066d78" + integrity sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w== + +"@jridgewell/set-array@^1.0.0", "@jridgewell/set-array@^1.0.1": + version "1.1.2" + resolved "http://localhost:4873/@jridgewell%2fset-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72" + integrity sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw== + +"@jridgewell/source-map@^0.3.2": + version "0.3.2" + resolved "http://localhost:4873/@jridgewell%2fsource-map/-/source-map-0.3.2.tgz#f45351aaed4527a298512ec72f81040c998580fb" + integrity sha512-m7O9o2uR8k2ObDysZYzdfhb08VuEml5oWGiosa1VdaPZ/A6QyPkAJuwN0Q1lhULOf6B7MtQmHENS743hWtCrgw== + dependencies: + "@jridgewell/gen-mapping" "^0.3.0" + "@jridgewell/trace-mapping" "^0.3.9" + +"@jridgewell/sourcemap-codec@^1.4.10": + version "1.4.14" + resolved "http://localhost:4873/@jridgewell%2fsourcemap-codec/-/sourcemap-codec-1.4.14.tgz#add4c98d341472a289190b424efbdb096991bb24" + integrity sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw== + +"@jridgewell/trace-mapping@^0.3.14", "@jridgewell/trace-mapping@^0.3.9": + version "0.3.15" + resolved "http://localhost:4873/@jridgewell%2ftrace-mapping/-/trace-mapping-0.3.15.tgz#aba35c48a38d3fd84b37e66c9c0423f9744f9774" + integrity sha512-oWZNOULl+UbhsgB51uuZzglikfIKSUBO/M9W2OfEjn7cmqoAiCgmv9lyACTUacZwBz0ITnJ2NqjU8Tx0DHL88g== + dependencies: + "@jridgewell/resolve-uri" "^3.0.3" + "@jridgewell/sourcemap-codec" "^1.4.10" + +"@leichtgewicht/ip-codec@^2.0.1": + version "2.0.4" + resolved "http://localhost:4873/@leichtgewicht%2fip-codec/-/ip-codec-2.0.4.tgz#b2ac626d6cb9c8718ab459166d4bb405b8ffa78b" + integrity sha512-Hcv+nVC0kZnQ3tD9GVu5xSMR4VVYOteQIr/hwFPVEvPdlXqgGEuRjiheChHgdM+JyqdgNcmzZOX/tnl0JOiI7A== + +"@nicolo-ribaudo/eslint-scope-5-internals@5.1.1-v1": + version "5.1.1-v1" + resolved "http://localhost:4873/@nicolo-ribaudo%2feslint-scope-5-internals/-/eslint-scope-5-internals-5.1.1-v1.tgz#dbf733a965ca47b1973177dc0bb6c889edcfb129" + integrity sha512-54/JRvkLIzzDWshCWfuhadfrfZVPiElY8Fcgmg1HroEly/EDSszzhBAsarCux+D/kOslTRquNzuyGSmUSTTHGg== + dependencies: + eslint-scope "5.1.1" + +"@nodelib/fs.scandir@2.1.5": + version "2.1.5" + resolved "http://localhost:4873/@nodelib%2ffs.scandir/-/fs.scandir-2.1.5.tgz#7619c2eb21b25483f6d167548b4cfd5a7488c3d5" + integrity sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g== + dependencies: + "@nodelib/fs.stat" "2.0.5" + run-parallel "^1.1.9" + +"@nodelib/fs.stat@2.0.5", "@nodelib/fs.stat@^2.0.2": + version "2.0.5" + resolved "http://localhost:4873/@nodelib%2ffs.stat/-/fs.stat-2.0.5.tgz#5bd262af94e9d25bd1e71b05deed44876a222e8b" + integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A== + +"@nodelib/fs.walk@^1.2.3": + version "1.2.8" + resolved "http://localhost:4873/@nodelib%2ffs.walk/-/fs.walk-1.2.8.tgz#e95737e8bb6746ddedf69c556953494f196fe69a" + integrity sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg== + dependencies: + "@nodelib/fs.scandir" "2.1.5" + fastq "^1.6.0" + +"@pmmmwh/react-refresh-webpack-plugin@^0.5.3": + version "0.5.7" + resolved "http://localhost:4873/@pmmmwh%2freact-refresh-webpack-plugin/-/react-refresh-webpack-plugin-0.5.7.tgz#58f8217ba70069cc6a73f5d7e05e85b458c150e2" + integrity sha512-bcKCAzF0DV2IIROp9ZHkRJa6O4jy7NlnHdWL3GmcUxYWNjLXkK5kfELELwEfSP5hXPfVL/qOGMAROuMQb9GG8Q== + dependencies: + ansi-html-community "^0.0.8" + common-path-prefix "^3.0.0" + core-js-pure "^3.8.1" + error-stack-parser "^2.0.6" + find-up "^5.0.0" + html-entities "^2.1.0" + loader-utils "^2.0.0" + schema-utils "^3.0.0" + source-map "^0.7.3" + +"@rollup/plugin-babel@^5.2.0": + version "5.3.1" + resolved "http://localhost:4873/@rollup%2fplugin-babel/-/plugin-babel-5.3.1.tgz#04bc0608f4aa4b2e4b1aebf284344d0f68fda283" + integrity sha512-WFfdLWU/xVWKeRQnKmIAQULUI7Il0gZnBIH/ZFO069wYIfPu+8zrfp/KMW0atmELoRDq8FbiP3VCss9MhCut7Q== + dependencies: + "@babel/helper-module-imports" "^7.10.4" + "@rollup/pluginutils" "^3.1.0" + +"@rollup/plugin-node-resolve@^11.2.1": + version "11.2.1" + resolved "http://localhost:4873/@rollup%2fplugin-node-resolve/-/plugin-node-resolve-11.2.1.tgz#82aa59397a29cd4e13248b106e6a4a1880362a60" + integrity sha512-yc2n43jcqVyGE2sqV5/YCmocy9ArjVAP/BeXyTtADTBBX6V0e5UMqwO8CdQ0kzjb6zu5P1qMzsScCMRvE9OlVg== + dependencies: + "@rollup/pluginutils" "^3.1.0" + "@types/resolve" "1.17.1" + builtin-modules "^3.1.0" + deepmerge "^4.2.2" + is-module "^1.0.0" + resolve "^1.19.0" + +"@rollup/plugin-replace@^2.4.1": + version "2.4.2" + resolved "http://localhost:4873/@rollup%2fplugin-replace/-/plugin-replace-2.4.2.tgz#a2d539314fbc77c244858faa523012825068510a" + integrity sha512-IGcu+cydlUMZ5En85jxHH4qj2hta/11BHq95iHEyb2sbgiN0eCdzvUcHw5gt9pBL5lTi4JDYJ1acCoMGpTvEZg== + dependencies: + "@rollup/pluginutils" "^3.1.0" + magic-string "^0.25.7" + +"@rollup/pluginutils@^3.1.0": + version "3.1.0" + resolved "http://localhost:4873/@rollup%2fpluginutils/-/pluginutils-3.1.0.tgz#706b4524ee6dc8b103b3c995533e5ad680c02b9b" + integrity sha512-GksZ6pr6TpIjHm8h9lSQ8pi8BE9VeubNT0OMJ3B5uZJ8pz73NPiqOtCog/x2/QzM1ENChPKxMDhiQuRHsqc+lg== + dependencies: + "@types/estree" "0.0.39" + estree-walker "^1.0.1" + picomatch "^2.2.2" + +"@rushstack/eslint-patch@^1.1.0": + version "1.2.0" + resolved "http://localhost:4873/@rushstack%2feslint-patch/-/eslint-patch-1.2.0.tgz#8be36a1f66f3265389e90b5f9c9962146758f728" + integrity sha512-sXo/qW2/pAcmT43VoRKOJbDOfV3cYpq3szSVfIThQXNt+E4DfKj361vaAt3c88U5tPUxzEswam7GW48PJqtKAg== + +"@sinclair/typebox@^0.24.1": + version "0.24.44" + resolved "http://localhost:4873/@sinclair%2ftypebox/-/typebox-0.24.44.tgz#0a0aa3bf4a155a678418527342a3ee84bd8caa5c" + integrity sha512-ka0W0KN5i6LfrSocduwliMMpqVgohtPFidKdMEOUjoOFCHcOOYkKsPRxfs5f15oPNHTm6ERAm0GV/+/LTKeiWg== + +"@sinonjs/commons@^1.7.0": + version "1.8.3" + resolved "http://localhost:4873/@sinonjs%2fcommons/-/commons-1.8.3.tgz#3802ddd21a50a949b6721ddd72da36e67e7f1b2d" + integrity sha512-xkNcLAn/wZaX14RPlwizcKicDk9G3F8m2nU3L7Ukm5zBgTwiT0wsoFAHx9Jq56fJA1z/7uKGtCRu16sOUCLIHQ== + dependencies: + type-detect "4.0.8" + +"@sinonjs/fake-timers@^8.0.1": + version "8.1.0" + resolved "http://localhost:4873/@sinonjs%2ffake-timers/-/fake-timers-8.1.0.tgz#3fdc2b6cb58935b21bfb8d1625eb1300484316e7" + integrity sha512-OAPJUAtgeINhh/TAlUID4QTs53Njm7xzddaVlEs/SXwgtiD1tW22zAB/W1wdqfrpmikgaWQ9Fw6Ws+hsiRm5Vg== + dependencies: + "@sinonjs/commons" "^1.7.0" + +"@surma/rollup-plugin-off-main-thread@^2.2.3": + version "2.2.3" + resolved "http://localhost:4873/@surma%2frollup-plugin-off-main-thread/-/rollup-plugin-off-main-thread-2.2.3.tgz#ee34985952ca21558ab0d952f00298ad2190c053" + integrity sha512-lR8q/9W7hZpMWweNiAKU7NQerBnzQQLvi8qnTDU/fxItPhtZVMbPV3lbCwjhIlNBe9Bbr5V+KHshvWmVSG9cxQ== + dependencies: + ejs "^3.1.6" + json5 "^2.2.0" + magic-string "^0.25.0" + string.prototype.matchall "^4.0.6" + +"@svgr/babel-plugin-add-jsx-attribute@^5.4.0": + version "5.4.0" + resolved "http://localhost:4873/@svgr%2fbabel-plugin-add-jsx-attribute/-/babel-plugin-add-jsx-attribute-5.4.0.tgz#81ef61947bb268eb9d50523446f9c638fb355906" + integrity sha512-ZFf2gs/8/6B8PnSofI0inYXr2SDNTDScPXhN7k5EqD4aZ3gi6u+rbmZHVB8IM3wDyx8ntKACZbtXSm7oZGRqVg== + +"@svgr/babel-plugin-remove-jsx-attribute@^5.4.0": + version "5.4.0" + resolved "http://localhost:4873/@svgr%2fbabel-plugin-remove-jsx-attribute/-/babel-plugin-remove-jsx-attribute-5.4.0.tgz#6b2c770c95c874654fd5e1d5ef475b78a0a962ef" + integrity sha512-yaS4o2PgUtwLFGTKbsiAy6D0o3ugcUhWK0Z45umJ66EPWunAz9fuFw2gJuje6wqQvQWOTJvIahUwndOXb7QCPg== + +"@svgr/babel-plugin-remove-jsx-empty-expression@^5.0.1": + version "5.0.1" + resolved "http://localhost:4873/@svgr%2fbabel-plugin-remove-jsx-empty-expression/-/babel-plugin-remove-jsx-empty-expression-5.0.1.tgz#25621a8915ed7ad70da6cea3d0a6dbc2ea933efd" + integrity sha512-LA72+88A11ND/yFIMzyuLRSMJ+tRKeYKeQ+mR3DcAZ5I4h5CPWN9AHyUzJbWSYp/u2u0xhmgOe0+E41+GjEueA== + +"@svgr/babel-plugin-replace-jsx-attribute-value@^5.0.1": + version "5.0.1" + resolved "http://localhost:4873/@svgr%2fbabel-plugin-replace-jsx-attribute-value/-/babel-plugin-replace-jsx-attribute-value-5.0.1.tgz#0b221fc57f9fcd10e91fe219e2cd0dd03145a897" + integrity sha512-PoiE6ZD2Eiy5mK+fjHqwGOS+IXX0wq/YDtNyIgOrc6ejFnxN4b13pRpiIPbtPwHEc+NT2KCjteAcq33/F1Y9KQ== + +"@svgr/babel-plugin-svg-dynamic-title@^5.4.0": + version "5.4.0" + resolved "http://localhost:4873/@svgr%2fbabel-plugin-svg-dynamic-title/-/babel-plugin-svg-dynamic-title-5.4.0.tgz#139b546dd0c3186b6e5db4fefc26cb0baea729d7" + integrity sha512-zSOZH8PdZOpuG1ZVx/cLVePB2ibo3WPpqo7gFIjLV9a0QsuQAzJiwwqmuEdTaW2pegyBE17Uu15mOgOcgabQZg== + +"@svgr/babel-plugin-svg-em-dimensions@^5.4.0": + version "5.4.0" + resolved "http://localhost:4873/@svgr%2fbabel-plugin-svg-em-dimensions/-/babel-plugin-svg-em-dimensions-5.4.0.tgz#6543f69526632a133ce5cabab965deeaea2234a0" + integrity sha512-cPzDbDA5oT/sPXDCUYoVXEmm3VIoAWAPT6mSPTJNbQaBNUuEKVKyGH93oDY4e42PYHRW67N5alJx/eEol20abw== + +"@svgr/babel-plugin-transform-react-native-svg@^5.4.0": + version "5.4.0" + resolved "http://localhost:4873/@svgr%2fbabel-plugin-transform-react-native-svg/-/babel-plugin-transform-react-native-svg-5.4.0.tgz#00bf9a7a73f1cad3948cdab1f8dfb774750f8c80" + integrity sha512-3eYP/SaopZ41GHwXma7Rmxcv9uRslRDTY1estspeB1w1ueZWd/tPlMfEOoccYpEMZU3jD4OU7YitnXcF5hLW2Q== + +"@svgr/babel-plugin-transform-svg-component@^5.5.0": + version "5.5.0" + resolved "http://localhost:4873/@svgr%2fbabel-plugin-transform-svg-component/-/babel-plugin-transform-svg-component-5.5.0.tgz#583a5e2a193e214da2f3afeb0b9e8d3250126b4a" + integrity sha512-q4jSH1UUvbrsOtlo/tKcgSeiCHRSBdXoIoqX1pgcKK/aU3JD27wmMKwGtpB8qRYUYoyXvfGxUVKchLuR5pB3rQ== + +"@svgr/babel-preset@^5.5.0": + version "5.5.0" + resolved "http://localhost:4873/@svgr%2fbabel-preset/-/babel-preset-5.5.0.tgz#8af54f3e0a8add7b1e2b0fcd5a882c55393df327" + integrity sha512-4FiXBjvQ+z2j7yASeGPEi8VD/5rrGQk4Xrq3EdJmoZgz/tpqChpo5hgXDvmEauwtvOc52q8ghhZK4Oy7qph4ig== + dependencies: + "@svgr/babel-plugin-add-jsx-attribute" "^5.4.0" + "@svgr/babel-plugin-remove-jsx-attribute" "^5.4.0" + "@svgr/babel-plugin-remove-jsx-empty-expression" "^5.0.1" + "@svgr/babel-plugin-replace-jsx-attribute-value" "^5.0.1" + "@svgr/babel-plugin-svg-dynamic-title" "^5.4.0" + "@svgr/babel-plugin-svg-em-dimensions" "^5.4.0" + "@svgr/babel-plugin-transform-react-native-svg" "^5.4.0" + "@svgr/babel-plugin-transform-svg-component" "^5.5.0" + +"@svgr/core@^5.5.0": + version "5.5.0" + resolved "http://localhost:4873/@svgr%2fcore/-/core-5.5.0.tgz#82e826b8715d71083120fe8f2492ec7d7874a579" + integrity sha512-q52VOcsJPvV3jO1wkPtzTuKlvX7Y3xIcWRpCMtBF3MrteZJtBfQw/+u0B1BHy5ColpQc1/YVTrPEtSYIMNZlrQ== + dependencies: + "@svgr/plugin-jsx" "^5.5.0" + camelcase "^6.2.0" + cosmiconfig "^7.0.0" + +"@svgr/hast-util-to-babel-ast@^5.5.0": + version "5.5.0" + resolved "http://localhost:4873/@svgr%2fhast-util-to-babel-ast/-/hast-util-to-babel-ast-5.5.0.tgz#5ee52a9c2533f73e63f8f22b779f93cd432a5461" + integrity sha512-cAaR/CAiZRB8GP32N+1jocovUtvlj0+e65TB50/6Lcime+EA49m/8l+P2ko+XPJ4dw3xaPS3jOL4F2X4KWxoeQ== + dependencies: + "@babel/types" "^7.12.6" + +"@svgr/plugin-jsx@^5.5.0": + version "5.5.0" + resolved "http://localhost:4873/@svgr%2fplugin-jsx/-/plugin-jsx-5.5.0.tgz#1aa8cd798a1db7173ac043466d7b52236b369000" + integrity sha512-V/wVh33j12hGh05IDg8GpIUXbjAPnTdPTKuP4VNLggnwaHMPNQNae2pRnyTAILWCQdz5GyMqtO488g7CKM8CBA== + dependencies: + "@babel/core" "^7.12.3" + "@svgr/babel-preset" "^5.5.0" + "@svgr/hast-util-to-babel-ast" "^5.5.0" + svg-parser "^2.0.2" + +"@svgr/plugin-svgo@^5.5.0": + version "5.5.0" + resolved "http://localhost:4873/@svgr%2fplugin-svgo/-/plugin-svgo-5.5.0.tgz#02da55d85320549324e201c7b2e53bf431fcc246" + integrity sha512-r5swKk46GuQl4RrVejVwpeeJaydoxkdwkM1mBKOgJLBUJPGaLci6ylg/IjhrRsREKDkr4kbMWdgOtbXEh0fyLQ== + dependencies: + cosmiconfig "^7.0.0" + deepmerge "^4.2.2" + svgo "^1.2.2" + +"@svgr/webpack@^5.5.0": + version "5.5.0" + resolved "http://localhost:4873/@svgr%2fwebpack/-/webpack-5.5.0.tgz#aae858ee579f5fa8ce6c3166ef56c6a1b381b640" + integrity sha512-DOBOK255wfQxguUta2INKkzPj6AIS6iafZYiYmHn6W3pHlycSRRlvWKCfLDG10fXfLWqE3DJHgRUOyJYmARa7g== + dependencies: + "@babel/core" "^7.12.3" + "@babel/plugin-transform-react-constant-elements" "^7.12.1" + "@babel/preset-env" "^7.12.1" + "@babel/preset-react" "^7.12.5" + "@svgr/core" "^5.5.0" + "@svgr/plugin-jsx" "^5.5.0" + "@svgr/plugin-svgo" "^5.5.0" + loader-utils "^2.0.0" + +"@testing-library/dom@^8.5.0": + version "8.18.1" + resolved "http://localhost:4873/@testing-library%2fdom/-/dom-8.18.1.tgz#80f91be02bc171fe5a3a7003f88207be31ac2cf3" + integrity sha512-oEvsm2B/WtcHKE+IcEeeCqNU/ltFGaVyGbpcm4g/2ytuT49jrlH9x5qRKL/H3A6yfM4YAbSbC0ceT5+9CEXnLg== + dependencies: + "@babel/code-frame" "^7.10.4" + "@babel/runtime" "^7.12.5" + "@types/aria-query" "^4.2.0" + aria-query "^5.0.0" + chalk "^4.1.0" + dom-accessibility-api "^0.5.9" + lz-string "^1.4.4" + pretty-format "^27.0.2" + +"@testing-library/jest-dom@^5.16.5": + version "5.16.5" + resolved "http://localhost:4873/@testing-library%2fjest-dom/-/jest-dom-5.16.5.tgz#3912846af19a29b2dbf32a6ae9c31ef52580074e" + integrity sha512-N5ixQ2qKpi5OLYfwQmUb/5mSV9LneAcaUfp32pn4yCnpb8r/Yz0pXFPck21dIicKmi+ta5WRAknkZCfA8refMA== + dependencies: + "@adobe/css-tools" "^4.0.1" + "@babel/runtime" "^7.9.2" + "@types/testing-library__jest-dom" "^5.9.1" + aria-query "^5.0.0" + chalk "^3.0.0" + css.escape "^1.5.1" + dom-accessibility-api "^0.5.6" + lodash "^4.17.15" + redent "^3.0.0" + +"@testing-library/react@^13.4.0": + version "13.4.0" + resolved "http://localhost:4873/@testing-library%2freact/-/react-13.4.0.tgz#6a31e3bf5951615593ad984e96b9e5e2d9380966" + integrity sha512-sXOGON+WNTh3MLE9rve97ftaZukN3oNf2KjDy7YTx6hcTO2uuLHuCGynMDhFwGw/jYf4OJ2Qk0i4i79qMNNkyw== + dependencies: + "@babel/runtime" "^7.12.5" + "@testing-library/dom" "^8.5.0" + "@types/react-dom" "^18.0.0" + +"@testing-library/user-event@^13.5.0": + version "13.5.0" + resolved "http://localhost:4873/@testing-library%2fuser-event/-/user-event-13.5.0.tgz#69d77007f1e124d55314a2b73fd204b333b13295" + integrity sha512-5Kwtbo3Y/NowpkbRuSepbyMFkZmHgD+vPzYB/RJ4oxt5Gj/avFFBYjhw27cqSVPVw/3a67NK1PbiIr9k4Gwmdg== + dependencies: + "@babel/runtime" "^7.12.5" + +"@tootallnate/once@1": + version "1.1.2" + resolved "http://localhost:4873/@tootallnate%2fonce/-/once-1.1.2.tgz#ccb91445360179a04e7fe6aff78c00ffc1eeaf82" + integrity sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw== + +"@trysound/sax@0.2.0": + version "0.2.0" + resolved "http://localhost:4873/@trysound%2fsax/-/sax-0.2.0.tgz#cccaab758af56761eb7bf37af6f03f326dd798ad" + integrity sha512-L7z9BgrNEcYyUYtF+HaEfiS5ebkh9jXqbszz7pC0hRBPaatV0XjSD3+eHrpqFemQfgwiFF0QPIarnIihIDn7OA== + +"@types/aria-query@^4.2.0": + version "4.2.2" + resolved "http://localhost:4873/@types%2faria-query/-/aria-query-4.2.2.tgz#ed4e0ad92306a704f9fb132a0cfcf77486dbe2bc" + integrity sha512-HnYpAE1Y6kRyKM/XkEuiRQhTHvkzMBurTHnpFLYLBGPIylZNPs9jJcuOOYWxPLJCSEtmZT0Y8rHDokKN7rRTig== + +"@types/babel__core@^7.0.0", "@types/babel__core@^7.1.14": + version "7.1.19" + resolved "http://localhost:4873/@types%2fbabel__core/-/babel__core-7.1.19.tgz#7b497495b7d1b4812bdb9d02804d0576f43ee460" + integrity sha512-WEOTgRsbYkvA/KCsDwVEGkd7WAr1e3g31VHQ8zy5gul/V1qKullU/BU5I68X5v7V3GnB9eotmom4v5a5gjxorw== + dependencies: + "@babel/parser" "^7.1.0" + "@babel/types" "^7.0.0" + "@types/babel__generator" "*" + "@types/babel__template" "*" + "@types/babel__traverse" "*" + +"@types/babel__generator@*": + version "7.6.4" + resolved "http://localhost:4873/@types%2fbabel__generator/-/babel__generator-7.6.4.tgz#1f20ce4c5b1990b37900b63f050182d28c2439b7" + integrity sha512-tFkciB9j2K755yrTALxD44McOrk+gfpIpvC3sxHjRawj6PfnQxrse4Clq5y/Rq+G3mrBurMax/lG8Qn2t9mSsg== + dependencies: + "@babel/types" "^7.0.0" + +"@types/babel__template@*": + version "7.4.1" + resolved "http://localhost:4873/@types%2fbabel__template/-/babel__template-7.4.1.tgz#3d1a48fd9d6c0edfd56f2ff578daed48f36c8969" + integrity sha512-azBFKemX6kMg5Io+/rdGT0dkGreboUVR0Cdm3fz9QJWpaQGJRQXl7C+6hOTCZcMll7KFyEQpgbYI2lHdsS4U7g== + dependencies: + "@babel/parser" "^7.1.0" + "@babel/types" "^7.0.0" + +"@types/babel__traverse@*", "@types/babel__traverse@^7.0.4", "@types/babel__traverse@^7.0.6": + version "7.18.2" + resolved "http://localhost:4873/@types%2fbabel__traverse/-/babel__traverse-7.18.2.tgz#235bf339d17185bdec25e024ca19cce257cc7309" + integrity sha512-FcFaxOr2V5KZCviw1TnutEMVUVsGt4D2hP1TAfXZAMKuHYW3xQhe3jTxNPWutgCJ3/X1c5yX8ZoGVEItxKbwBg== + dependencies: + "@babel/types" "^7.3.0" + +"@types/body-parser@*": + version "1.19.2" + resolved "http://localhost:4873/@types%2fbody-parser/-/body-parser-1.19.2.tgz#aea2059e28b7658639081347ac4fab3de166e6f0" + integrity sha512-ALYone6pm6QmwZoAgeyNksccT9Q4AWZQ6PvfwR37GT6r6FWUPguq6sUmNGSMV2Wr761oQoBxwGGa6DR5o1DC9g== + dependencies: + "@types/connect" "*" + "@types/node" "*" + +"@types/bonjour@^3.5.9": + version "3.5.10" + resolved "http://localhost:4873/@types%2fbonjour/-/bonjour-3.5.10.tgz#0f6aadfe00ea414edc86f5d106357cda9701e275" + integrity sha512-p7ienRMiS41Nu2/igbJxxLDWrSZ0WxM8UQgCeO9KhoVF7cOVFkrKsiDr1EsJIla8vV3oEEjGcz11jc5yimhzZw== + dependencies: + "@types/node" "*" + +"@types/connect-history-api-fallback@^1.3.5": + version "1.3.5" + resolved "http://localhost:4873/@types%2fconnect-history-api-fallback/-/connect-history-api-fallback-1.3.5.tgz#d1f7a8a09d0ed5a57aee5ae9c18ab9b803205dae" + integrity sha512-h8QJa8xSb1WD4fpKBDcATDNGXghFj6/3GRWG6dhmRcu0RX1Ubasur2Uvx5aeEwlf0MwblEC2bMzzMQntxnw/Cw== + dependencies: + "@types/express-serve-static-core" "*" + "@types/node" "*" + +"@types/connect@*": + version "3.4.35" + resolved "http://localhost:4873/@types%2fconnect/-/connect-3.4.35.tgz#5fcf6ae445e4021d1fc2219a4873cc73a3bb2ad1" + integrity sha512-cdeYyv4KWoEgpBISTxWvqYsVy444DOqehiF3fM3ne10AmJ62RSyNkUnxMJXHQWRQQX2eR94m5y1IZyDwBjV9FQ== + dependencies: + "@types/node" "*" + +"@types/eslint-scope@^3.7.3": + version "3.7.4" + resolved "http://localhost:4873/@types%2feslint-scope/-/eslint-scope-3.7.4.tgz#37fc1223f0786c39627068a12e94d6e6fc61de16" + integrity sha512-9K4zoImiZc3HlIp6AVUDE4CWYx22a+lhSZMYNpbjW04+YF0KWj4pJXnEMjdnFTiQibFFmElcsasJXDbdI/EPhA== + dependencies: + "@types/eslint" "*" + "@types/estree" "*" + +"@types/eslint@*", "@types/eslint@^7.29.0 || ^8.4.1": + version "8.4.6" + resolved "http://localhost:4873/@types%2feslint/-/eslint-8.4.6.tgz#7976f054c1bccfcf514bff0564c0c41df5c08207" + integrity sha512-/fqTbjxyFUaYNO7VcW5g+4npmqVACz1bB7RTHYuLj+PRjw9hrCwrUXVQFpChUS0JsyEFvMZ7U/PfmvWgxJhI9g== + dependencies: + "@types/estree" "*" + "@types/json-schema" "*" + +"@types/estree@*": + version "1.0.0" + resolved "http://localhost:4873/@types%2festree/-/estree-1.0.0.tgz#5fb2e536c1ae9bf35366eed879e827fa59ca41c2" + integrity sha512-WulqXMDUTYAXCjZnk6JtIHPigp55cVtDgDrO2gHRwhyJto21+1zbVCtOYB2L1F9w4qCQ0rOGWBnBe0FNTiEJIQ== + +"@types/estree@0.0.39": + version "0.0.39" + resolved "http://localhost:4873/@types%2festree/-/estree-0.0.39.tgz#e177e699ee1b8c22d23174caaa7422644389509f" + integrity sha512-EYNwp3bU+98cpU4lAWYYL7Zz+2gryWH1qbdDTidVd6hkiR6weksdbMadyXKXNPEkQFhXM+hVO9ZygomHXp+AIw== + +"@types/estree@^0.0.51": + version "0.0.51" + resolved "http://localhost:4873/@types%2festree/-/estree-0.0.51.tgz#cfd70924a25a3fd32b218e5e420e6897e1ac4f40" + integrity sha512-CuPgU6f3eT/XgKKPqKd/gLZV1Xmvf1a2R5POBOGQa6uv82xpls89HU5zKeVoyR8XzHd1RGNOlQlvUe3CFkjWNQ== + +"@types/express-serve-static-core@*", "@types/express-serve-static-core@^4.17.18": + version "4.17.31" + resolved "http://localhost:4873/@types%2fexpress-serve-static-core/-/express-serve-static-core-4.17.31.tgz#a1139efeab4e7323834bb0226e62ac019f474b2f" + integrity sha512-DxMhY+NAsTwMMFHBTtJFNp5qiHKJ7TeqOo23zVEM9alT1Ml27Q3xcTH0xwxn7Q0BbMcVEJOs/7aQtUWupUQN3Q== + dependencies: + "@types/node" "*" + "@types/qs" "*" + "@types/range-parser" "*" + +"@types/express@*", "@types/express@^4.17.13": + version "4.17.14" + resolved "http://localhost:4873/@types%2fexpress/-/express-4.17.14.tgz#143ea0557249bc1b3b54f15db4c81c3d4eb3569c" + integrity sha512-TEbt+vaPFQ+xpxFLFssxUDXj5cWCxZJjIcB7Yg0k0GMHGtgtQgpvx/MUQUeAkNbA9AAGrwkAsoeItdTgS7FMyg== + dependencies: + "@types/body-parser" "*" + "@types/express-serve-static-core" "^4.17.18" + "@types/qs" "*" + "@types/serve-static" "*" + +"@types/graceful-fs@^4.1.2": + version "4.1.5" + resolved "http://localhost:4873/@types%2fgraceful-fs/-/graceful-fs-4.1.5.tgz#21ffba0d98da4350db64891f92a9e5db3cdb4e15" + integrity sha512-anKkLmZZ+xm4p8JWBf4hElkM4XR+EZeA2M9BAkkTldmcyDY4mbdIJnRghDJH3Ov5ooY7/UAoENtmdMSkaAd7Cw== + dependencies: + "@types/node" "*" + +"@types/html-minifier-terser@^6.0.0": + version "6.1.0" + resolved "http://localhost:4873/@types%2fhtml-minifier-terser/-/html-minifier-terser-6.1.0.tgz#4fc33a00c1d0c16987b1a20cf92d20614c55ac35" + integrity sha512-oh/6byDPnL1zeNXFrDXFLyZjkr1MsBG667IM792caf1L2UPOOMf65NFzjUH/ltyfwjAGfs1rsX1eftK0jC/KIg== + +"@types/http-proxy@^1.17.8": + version "1.17.9" + resolved "http://localhost:4873/@types%2fhttp-proxy/-/http-proxy-1.17.9.tgz#7f0e7931343761efde1e2bf48c40f02f3f75705a" + integrity sha512-QsbSjA/fSk7xB+UXlCT3wHBy5ai9wOcNDWwZAtud+jXhwOM3l+EYZh8Lng4+/6n8uar0J7xILzqftJdJ/Wdfkw== + dependencies: + "@types/node" "*" + +"@types/istanbul-lib-coverage@*", "@types/istanbul-lib-coverage@^2.0.0", "@types/istanbul-lib-coverage@^2.0.1": + version "2.0.4" + resolved "http://localhost:4873/@types%2fistanbul-lib-coverage/-/istanbul-lib-coverage-2.0.4.tgz#8467d4b3c087805d63580480890791277ce35c44" + integrity sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g== + +"@types/istanbul-lib-report@*": + version "3.0.0" + resolved "http://localhost:4873/@types%2fistanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#c14c24f18ea8190c118ee7562b7ff99a36552686" + integrity sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg== + dependencies: + "@types/istanbul-lib-coverage" "*" + +"@types/istanbul-reports@^3.0.0": + version "3.0.1" + resolved "http://localhost:4873/@types%2fistanbul-reports/-/istanbul-reports-3.0.1.tgz#9153fe98bba2bd565a63add9436d6f0d7f8468ff" + integrity sha512-c3mAZEuK0lvBp8tmuL74XRKn1+y2dcwOUpH7x4WrF6gk1GIgiluDRgMYQtw2OFcBvAJWlt6ASU3tSqxp0Uu0Aw== + dependencies: + "@types/istanbul-lib-report" "*" + +"@types/jest@*": + version "29.1.1" + resolved "http://localhost:4873/@types%2fjest/-/jest-29.1.1.tgz#cf21a0835a1ba9a30ea1966019f1261c6a114c92" + integrity sha512-U9Ey07dGWl6fUFaIaUQUKWG5NoKi/zizeVQCGV8s4nSU0jPgqphVZvS64+8BtWYvrc3ZGw6wo943NSYPxkrp/g== + dependencies: + expect "^29.0.0" + pretty-format "^29.0.0" + +"@types/json-schema@*", "@types/json-schema@^7.0.4", "@types/json-schema@^7.0.5", "@types/json-schema@^7.0.8", "@types/json-schema@^7.0.9": + version "7.0.11" + resolved "http://localhost:4873/@types%2fjson-schema/-/json-schema-7.0.11.tgz#d421b6c527a3037f7c84433fd2c4229e016863d3" + integrity sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ== + +"@types/json5@^0.0.29": + version "0.0.29" + resolved "http://localhost:4873/@types%2fjson5/-/json5-0.0.29.tgz#ee28707ae94e11d2b827bcbe5270bcea7f3e71ee" + integrity sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ== + +"@types/mime@*": + version "3.0.1" + resolved "http://localhost:4873/@types%2fmime/-/mime-3.0.1.tgz#5f8f2bca0a5863cb69bc0b0acd88c96cb1d4ae10" + integrity sha512-Y4XFY5VJAuw0FgAqPNd6NNoV44jbq9Bz2L7Rh/J6jLTiHBSBJa9fxqQIvkIld4GsoDOcCbvzOUAbLPsSKKg+uA== + +"@types/node@*": + version "18.8.2" + resolved "http://localhost:4873/@types%2fnode/-/node-18.8.2.tgz#17d42c6322d917764dd3d2d3a10d7884925de067" + integrity sha512-cRMwIgdDN43GO4xMWAfJAecYn8wV4JbsOGHNfNUIDiuYkUYAR5ec4Rj7IO2SAhFPEfpPtLtUTbbny/TCT7aDwA== + +"@types/parse-json@^4.0.0": + version "4.0.0" + resolved "http://localhost:4873/@types%2fparse-json/-/parse-json-4.0.0.tgz#2f8bb441434d163b35fb8ffdccd7138927ffb8c0" + integrity sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA== + +"@types/prettier@^2.1.5": + version "2.7.1" + resolved "http://localhost:4873/@types%2fprettier/-/prettier-2.7.1.tgz#dfd20e2dc35f027cdd6c1908e80a5ddc7499670e" + integrity sha512-ri0UmynRRvZiiUJdiz38MmIblKK+oH30MztdBVR95dv/Ubw6neWSb8u1XpRb72L4qsZOhz+L+z9JD40SJmfWow== + +"@types/prop-types@*": + version "15.7.5" + resolved "http://localhost:4873/@types%2fprop-types/-/prop-types-15.7.5.tgz#5f19d2b85a98e9558036f6a3cacc8819420f05cf" + integrity sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w== + +"@types/q@^1.5.1": + version "1.5.5" + resolved "http://localhost:4873/@types%2fq/-/q-1.5.5.tgz#75a2a8e7d8ab4b230414505d92335d1dcb53a6df" + integrity sha512-L28j2FcJfSZOnL1WBjDYp2vUHCeIFlyYI/53EwD/rKUBQ7MtUUfbQWiyKJGpcnv4/WgrhWsFKrcPstcAt/J0tQ== + +"@types/qs@*": + version "6.9.7" + resolved "http://localhost:4873/@types%2fqs/-/qs-6.9.7.tgz#63bb7d067db107cc1e457c303bc25d511febf6cb" + integrity sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw== + +"@types/range-parser@*": + version "1.2.4" + resolved "http://localhost:4873/@types%2frange-parser/-/range-parser-1.2.4.tgz#cd667bcfdd025213aafb7ca5915a932590acdcdc" + integrity sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw== + +"@types/react-dom@^18.0.0": + version "18.0.6" + resolved "http://localhost:4873/@types%2freact-dom/-/react-dom-18.0.6.tgz#36652900024842b74607a17786b6662dd1e103a1" + integrity sha512-/5OFZgfIPSwy+YuIBP/FgJnQnsxhZhjjrnxudMddeblOouIodEQ75X14Rr4wGSG/bknL+Omy9iWlLo1u/9GzAA== + dependencies: + "@types/react" "*" + +"@types/react@*": + version "18.0.21" + resolved "http://localhost:4873/@types%2freact/-/react-18.0.21.tgz#b8209e9626bb00a34c76f55482697edd2b43cc67" + integrity sha512-7QUCOxvFgnD5Jk8ZKlUAhVcRj7GuJRjnjjiY/IUBWKgOlnvDvTMLD4RTF7NPyVmbRhNrbomZiOepg7M/2Kj1mA== + dependencies: + "@types/prop-types" "*" + "@types/scheduler" "*" + csstype "^3.0.2" + +"@types/resolve@1.17.1": + version "1.17.1" + resolved "http://localhost:4873/@types%2fresolve/-/resolve-1.17.1.tgz#3afd6ad8967c77e4376c598a82ddd58f46ec45d6" + integrity sha512-yy7HuzQhj0dhGpD8RLXSZWEkLsV9ibvxvi6EiJ3bkqLAO1RGo0WbkWQiwpRlSFymTJRz0d3k5LM3kkx8ArDbLw== + dependencies: + "@types/node" "*" + +"@types/retry@0.12.0": + version "0.12.0" + resolved "http://localhost:4873/@types%2fretry/-/retry-0.12.0.tgz#2b35eccfcee7d38cd72ad99232fbd58bffb3c84d" + integrity sha512-wWKOClTTiizcZhXnPY4wikVAwmdYHp8q6DmC+EJUzAMsycb7HB32Kh9RN4+0gExjmPmZSAQjgURXIGATPegAvA== + +"@types/scheduler@*": + version "0.16.2" + resolved "http://localhost:4873/@types%2fscheduler/-/scheduler-0.16.2.tgz#1a62f89525723dde24ba1b01b092bf5df8ad4d39" + integrity sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew== + +"@types/serve-index@^1.9.1": + version "1.9.1" + resolved "http://localhost:4873/@types%2fserve-index/-/serve-index-1.9.1.tgz#1b5e85370a192c01ec6cec4735cf2917337a6278" + integrity sha512-d/Hs3nWDxNL2xAczmOVZNj92YZCS6RGxfBPjKzuu/XirCgXdpKEb88dYNbrYGint6IVWLNP+yonwVAuRC0T2Dg== + dependencies: + "@types/express" "*" + +"@types/serve-static@*", "@types/serve-static@^1.13.10": + version "1.15.0" + resolved "http://localhost:4873/@types%2fserve-static/-/serve-static-1.15.0.tgz#c7930ff61afb334e121a9da780aac0d9b8f34155" + integrity sha512-z5xyF6uh8CbjAu9760KDKsH2FcDxZ2tFCsA4HIMWE6IkiYMXfVoa+4f9KX+FN0ZLsaMw1WNG2ETLA6N+/YA+cg== + dependencies: + "@types/mime" "*" + "@types/node" "*" + +"@types/sockjs@^0.3.33": + version "0.3.33" + resolved "http://localhost:4873/@types%2fsockjs/-/sockjs-0.3.33.tgz#570d3a0b99ac995360e3136fd6045113b1bd236f" + integrity sha512-f0KEEe05NvUnat+boPTZ0dgaLZ4SfSouXUgv5noUiefG2ajgKjmETo9ZJyuqsl7dfl2aHlLJUiki6B4ZYldiiw== + dependencies: + "@types/node" "*" + +"@types/stack-utils@^2.0.0": + version "2.0.1" + resolved "http://localhost:4873/@types%2fstack-utils/-/stack-utils-2.0.1.tgz#20f18294f797f2209b5f65c8e3b5c8e8261d127c" + integrity sha512-Hl219/BT5fLAaz6NDkSuhzasy49dwQS/DSdu4MdggFB8zcXv7vflBI3xp7FEmkmdDkBUI2bPUNeMttp2knYdxw== + +"@types/testing-library__jest-dom@^5.9.1": + version "5.14.5" + resolved "http://localhost:4873/@types%2ftesting-library__jest-dom/-/testing-library__jest-dom-5.14.5.tgz#d113709c90b3c75fdb127ec338dad7d5f86c974f" + integrity sha512-SBwbxYoyPIvxHbeHxTZX2Pe/74F/tX2/D3mMvzabdeJ25bBojfW0TyB8BHrbq/9zaaKICJZjLP+8r6AeZMFCuQ== + dependencies: + "@types/jest" "*" + +"@types/trusted-types@^2.0.2": + version "2.0.2" + resolved "http://localhost:4873/@types%2ftrusted-types/-/trusted-types-2.0.2.tgz#fc25ad9943bcac11cceb8168db4f275e0e72e756" + integrity sha512-F5DIZ36YVLE+PN+Zwws4kJogq47hNgX3Nx6WyDJ3kcplxyke3XIzB8uK5n/Lpm1HBsbGzd6nmGehL8cPekP+Tg== + +"@types/ws@^8.5.1": + version "8.5.3" + resolved "http://localhost:4873/@types%2fws/-/ws-8.5.3.tgz#7d25a1ffbecd3c4f2d35068d0b283c037003274d" + integrity sha512-6YOoWjruKj1uLf3INHH7D3qTXwFfEsg1kf3c0uDdSBJwfa/llkwIjrAGV7j7mVgGNbzTQ3HiHKKDXl6bJPD97w== + dependencies: + "@types/node" "*" + +"@types/yargs-parser@*": + version "21.0.0" + resolved "http://localhost:4873/@types%2fyargs-parser/-/yargs-parser-21.0.0.tgz#0c60e537fa790f5f9472ed2776c2b71ec117351b" + integrity sha512-iO9ZQHkZxHn4mSakYV0vFHAVDyEOIJQrV2uZ06HxEPcx+mt8swXoZHIbaaJ2crJYFfErySgktuTZ3BeLz+XmFA== + +"@types/yargs@^16.0.0": + version "16.0.4" + resolved "http://localhost:4873/@types%2fyargs/-/yargs-16.0.4.tgz#26aad98dd2c2a38e421086ea9ad42b9e51642977" + integrity sha512-T8Yc9wt/5LbJyCaLiHPReJa0kApcIgJ7Bn735GjItUfh08Z1pJvu8QZqb9s+mMvKV6WUQRV7K2R46YbjMXTTJw== + dependencies: + "@types/yargs-parser" "*" + +"@types/yargs@^17.0.8": + version "17.0.13" + resolved "http://localhost:4873/@types%2fyargs/-/yargs-17.0.13.tgz#34cced675ca1b1d51fcf4d34c3c6f0fa142a5c76" + integrity sha512-9sWaruZk2JGxIQU+IhI1fhPYRcQ0UuTNuKuCW9bR5fp7qi2Llf7WDzNa17Cy7TKnh3cdxDOiyTu6gaLS0eDatg== + dependencies: + "@types/yargs-parser" "*" + +"@typescript-eslint/eslint-plugin@^5.5.0": + version "5.39.0" + resolved "http://localhost:4873/@typescript-eslint%2feslint-plugin/-/eslint-plugin-5.39.0.tgz#778b2d9e7f293502c7feeea6c74dca8eb3e67511" + integrity sha512-xVfKOkBm5iWMNGKQ2fwX5GVgBuHmZBO1tCRwXmY5oAIsPscfwm2UADDuNB8ZVYCtpQvJK4xpjrK7jEhcJ0zY9A== + dependencies: + "@typescript-eslint/scope-manager" "5.39.0" + "@typescript-eslint/type-utils" "5.39.0" + "@typescript-eslint/utils" "5.39.0" + debug "^4.3.4" + ignore "^5.2.0" + regexpp "^3.2.0" + semver "^7.3.7" + tsutils "^3.21.0" + +"@typescript-eslint/experimental-utils@^5.0.0": + version "5.39.0" + resolved "http://localhost:4873/@typescript-eslint%2fexperimental-utils/-/experimental-utils-5.39.0.tgz#9263bb72b57449cc2f07ffb7fd4e12d0160b7f5e" + integrity sha512-n5N9kG/oGu2xXhHzsWzn94s6CWoiUj59FPU2dF2IQZxPftw+q6Jm5sV2vj5qTgAElRooHhrgtl2gxBQDCPt6WA== + dependencies: + "@typescript-eslint/utils" "5.39.0" + +"@typescript-eslint/parser@^5.5.0": + version "5.39.0" + resolved "http://localhost:4873/@typescript-eslint%2fparser/-/parser-5.39.0.tgz#93fa0bc980a3a501e081824f6097f7ca30aaa22b" + integrity sha512-PhxLjrZnHShe431sBAGHaNe6BDdxAASDySgsBCGxcBecVCi8NQWxQZMcizNA4g0pN51bBAn/FUfkWG3SDVcGlA== + dependencies: + "@typescript-eslint/scope-manager" "5.39.0" + "@typescript-eslint/types" "5.39.0" + "@typescript-eslint/typescript-estree" "5.39.0" + debug "^4.3.4" + +"@typescript-eslint/scope-manager@5.39.0": + version "5.39.0" + resolved "http://localhost:4873/@typescript-eslint%2fscope-manager/-/scope-manager-5.39.0.tgz#873e1465afa3d6c78d8ed2da68aed266a08008d0" + integrity sha512-/I13vAqmG3dyqMVSZPjsbuNQlYS082Y7OMkwhCfLXYsmlI0ca4nkL7wJ/4gjX70LD4P8Hnw1JywUVVAwepURBw== + dependencies: + "@typescript-eslint/types" "5.39.0" + "@typescript-eslint/visitor-keys" "5.39.0" + +"@typescript-eslint/type-utils@5.39.0": + version "5.39.0" + resolved "http://localhost:4873/@typescript-eslint%2ftype-utils/-/type-utils-5.39.0.tgz#0a8c00f95dce4335832ad2dc6bc431c14e32a0a6" + integrity sha512-KJHJkOothljQWzR3t/GunL0TPKY+fGJtnpl+pX+sJ0YiKTz3q2Zr87SGTmFqsCMFrLt5E0+o+S6eQY0FAXj9uA== + dependencies: + "@typescript-eslint/typescript-estree" "5.39.0" + "@typescript-eslint/utils" "5.39.0" + debug "^4.3.4" + tsutils "^3.21.0" + +"@typescript-eslint/types@5.39.0": + version "5.39.0" + resolved "http://localhost:4873/@typescript-eslint%2ftypes/-/types-5.39.0.tgz#f4e9f207ebb4579fd854b25c0bf64433bb5ed78d" + integrity sha512-gQMZrnfEBFXK38hYqt8Lkwt8f4U6yq+2H5VDSgP/qiTzC8Nw8JO3OuSUOQ2qW37S/dlwdkHDntkZM6SQhKyPhw== + +"@typescript-eslint/typescript-estree@5.39.0": + version "5.39.0" + resolved "http://localhost:4873/@typescript-eslint%2ftypescript-estree/-/typescript-estree-5.39.0.tgz#c0316aa04a1a1f4f7f9498e3c13ef1d3dc4cf88b" + integrity sha512-qLFQP0f398sdnogJoLtd43pUgB18Q50QSA+BTE5h3sUxySzbWDpTSdgt4UyxNSozY/oDK2ta6HVAzvGgq8JYnA== + dependencies: + "@typescript-eslint/types" "5.39.0" + "@typescript-eslint/visitor-keys" "5.39.0" + debug "^4.3.4" + globby "^11.1.0" + is-glob "^4.0.3" + semver "^7.3.7" + tsutils "^3.21.0" + +"@typescript-eslint/utils@5.39.0", "@typescript-eslint/utils@^5.13.0": + version "5.39.0" + resolved "http://localhost:4873/@typescript-eslint%2futils/-/utils-5.39.0.tgz#b7063cca1dcf08d1d21b0d91db491161ad0be110" + integrity sha512-+DnY5jkpOpgj+EBtYPyHRjXampJfC0yUZZzfzLuUWVZvCuKqSdJVC8UhdWipIw7VKNTfwfAPiOWzYkAwuIhiAg== + dependencies: + "@types/json-schema" "^7.0.9" + "@typescript-eslint/scope-manager" "5.39.0" + "@typescript-eslint/types" "5.39.0" + "@typescript-eslint/typescript-estree" "5.39.0" + eslint-scope "^5.1.1" + eslint-utils "^3.0.0" + +"@typescript-eslint/visitor-keys@5.39.0": + version "5.39.0" + resolved "http://localhost:4873/@typescript-eslint%2fvisitor-keys/-/visitor-keys-5.39.0.tgz#8f41f7d241b47257b081ddba5d3ce80deaae61e2" + integrity sha512-yyE3RPwOG+XJBLrhvsxAidUgybJVQ/hG8BhiJo0k8JSAYfk/CshVcxf0HwP4Jt7WZZ6vLmxdo1p6EyN3tzFTkg== + dependencies: + "@typescript-eslint/types" "5.39.0" + eslint-visitor-keys "^3.3.0" + +"@webassemblyjs/ast@1.11.1": + version "1.11.1" + resolved "http://localhost:4873/@webassemblyjs%2fast/-/ast-1.11.1.tgz#2bfd767eae1a6996f432ff7e8d7fc75679c0b6a7" + integrity sha512-ukBh14qFLjxTQNTXocdyksN5QdM28S1CxHt2rdskFyL+xFV7VremuBLVbmCePj+URalXBENx/9Lm7lnhihtCSw== + dependencies: + "@webassemblyjs/helper-numbers" "1.11.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + +"@webassemblyjs/floating-point-hex-parser@1.11.1": + version "1.11.1" + resolved "http://localhost:4873/@webassemblyjs%2ffloating-point-hex-parser/-/floating-point-hex-parser-1.11.1.tgz#f6c61a705f0fd7a6aecaa4e8198f23d9dc179e4f" + integrity sha512-iGRfyc5Bq+NnNuX8b5hwBrRjzf0ocrJPI6GWFodBFzmFnyvrQ83SHKhmilCU/8Jv67i4GJZBMhEzltxzcNagtQ== + +"@webassemblyjs/helper-api-error@1.11.1": + version "1.11.1" + resolved "http://localhost:4873/@webassemblyjs%2fhelper-api-error/-/helper-api-error-1.11.1.tgz#1a63192d8788e5c012800ba6a7a46c705288fd16" + integrity sha512-RlhS8CBCXfRUR/cwo2ho9bkheSXG0+NwooXcc3PAILALf2QLdFyj7KGsKRbVc95hZnhnERon4kW/D3SZpp6Tcg== + +"@webassemblyjs/helper-buffer@1.11.1": + version "1.11.1" + resolved "http://localhost:4873/@webassemblyjs%2fhelper-buffer/-/helper-buffer-1.11.1.tgz#832a900eb444884cde9a7cad467f81500f5e5ab5" + integrity sha512-gwikF65aDNeeXa8JxXa2BAk+REjSyhrNC9ZwdT0f8jc4dQQeDQ7G4m0f2QCLPJiMTTO6wfDmRmj/pW0PsUvIcA== + +"@webassemblyjs/helper-numbers@1.11.1": + version "1.11.1" + resolved "http://localhost:4873/@webassemblyjs%2fhelper-numbers/-/helper-numbers-1.11.1.tgz#64d81da219fbbba1e3bd1bfc74f6e8c4e10a62ae" + integrity sha512-vDkbxiB8zfnPdNK9Rajcey5C0w+QJugEglN0of+kmO8l7lDb77AnlKYQF7aarZuCrv+l0UvqL+68gSDr3k9LPQ== + dependencies: + "@webassemblyjs/floating-point-hex-parser" "1.11.1" + "@webassemblyjs/helper-api-error" "1.11.1" + "@xtuc/long" "4.2.2" + +"@webassemblyjs/helper-wasm-bytecode@1.11.1": + version "1.11.1" + resolved "http://localhost:4873/@webassemblyjs%2fhelper-wasm-bytecode/-/helper-wasm-bytecode-1.11.1.tgz#f328241e41e7b199d0b20c18e88429c4433295e1" + integrity sha512-PvpoOGiJwXeTrSf/qfudJhwlvDQxFgelbMqtq52WWiXC6Xgg1IREdngmPN3bs4RoO83PnL/nFrxucXj1+BX62Q== + +"@webassemblyjs/helper-wasm-section@1.11.1": + version "1.11.1" + resolved "http://localhost:4873/@webassemblyjs%2fhelper-wasm-section/-/helper-wasm-section-1.11.1.tgz#21ee065a7b635f319e738f0dd73bfbda281c097a" + integrity sha512-10P9No29rYX1j7F3EVPX3JvGPQPae+AomuSTPiF9eBQeChHI6iqjMIwR9JmOJXwpnn/oVGDk7I5IlskuMwU/pg== + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/helper-buffer" "1.11.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + "@webassemblyjs/wasm-gen" "1.11.1" + +"@webassemblyjs/ieee754@1.11.1": + version "1.11.1" + resolved "http://localhost:4873/@webassemblyjs%2fieee754/-/ieee754-1.11.1.tgz#963929e9bbd05709e7e12243a099180812992614" + integrity sha512-hJ87QIPtAMKbFq6CGTkZYJivEwZDbQUgYd3qKSadTNOhVY7p+gfP6Sr0lLRVTaG1JjFj+r3YchoqRYxNH3M0GQ== + dependencies: + "@xtuc/ieee754" "^1.2.0" + +"@webassemblyjs/leb128@1.11.1": + version "1.11.1" + resolved "http://localhost:4873/@webassemblyjs%2fleb128/-/leb128-1.11.1.tgz#ce814b45574e93d76bae1fb2644ab9cdd9527aa5" + integrity sha512-BJ2P0hNZ0u+Th1YZXJpzW6miwqQUGcIHT1G/sf72gLVD9DZ5AdYTqPNbHZh6K1M5VmKvFXwGSWZADz+qBWxeRw== + dependencies: + "@xtuc/long" "4.2.2" + +"@webassemblyjs/utf8@1.11.1": + version "1.11.1" + resolved "http://localhost:4873/@webassemblyjs%2futf8/-/utf8-1.11.1.tgz#d1f8b764369e7c6e6bae350e854dec9a59f0a3ff" + integrity sha512-9kqcxAEdMhiwQkHpkNiorZzqpGrodQQ2IGrHHxCy+Ozng0ofyMA0lTqiLkVs1uzTRejX+/O0EOT7KxqVPuXosQ== + +"@webassemblyjs/wasm-edit@1.11.1": + version "1.11.1" + resolved "http://localhost:4873/@webassemblyjs%2fwasm-edit/-/wasm-edit-1.11.1.tgz#ad206ebf4bf95a058ce9880a8c092c5dec8193d6" + integrity sha512-g+RsupUC1aTHfR8CDgnsVRVZFJqdkFHpsHMfJuWQzWU3tvnLC07UqHICfP+4XyL2tnr1amvl1Sdp06TnYCmVkA== + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/helper-buffer" "1.11.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + "@webassemblyjs/helper-wasm-section" "1.11.1" + "@webassemblyjs/wasm-gen" "1.11.1" + "@webassemblyjs/wasm-opt" "1.11.1" + "@webassemblyjs/wasm-parser" "1.11.1" + "@webassemblyjs/wast-printer" "1.11.1" + +"@webassemblyjs/wasm-gen@1.11.1": + version "1.11.1" + resolved "http://localhost:4873/@webassemblyjs%2fwasm-gen/-/wasm-gen-1.11.1.tgz#86c5ea304849759b7d88c47a32f4f039ae3c8f76" + integrity sha512-F7QqKXwwNlMmsulj6+O7r4mmtAlCWfO/0HdgOxSklZfQcDu0TpLiD1mRt/zF25Bk59FIjEuGAIyn5ei4yMfLhA== + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + "@webassemblyjs/ieee754" "1.11.1" + "@webassemblyjs/leb128" "1.11.1" + "@webassemblyjs/utf8" "1.11.1" + +"@webassemblyjs/wasm-opt@1.11.1": + version "1.11.1" + resolved "http://localhost:4873/@webassemblyjs%2fwasm-opt/-/wasm-opt-1.11.1.tgz#657b4c2202f4cf3b345f8a4c6461c8c2418985f2" + integrity sha512-VqnkNqnZlU5EB64pp1l7hdm3hmQw7Vgqa0KF/KCNO9sIpI6Fk6brDEiX+iCOYrvMuBWDws0NkTOxYEb85XQHHw== + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/helper-buffer" "1.11.1" + "@webassemblyjs/wasm-gen" "1.11.1" + "@webassemblyjs/wasm-parser" "1.11.1" + +"@webassemblyjs/wasm-parser@1.11.1": + version "1.11.1" + resolved "http://localhost:4873/@webassemblyjs%2fwasm-parser/-/wasm-parser-1.11.1.tgz#86ca734534f417e9bd3c67c7a1c75d8be41fb199" + integrity sha512-rrBujw+dJu32gYB7/Lup6UhdkPx9S9SnobZzRVL7VcBH9Bt9bCBLEuX/YXOOtBsOZ4NQrRykKhffRWHvigQvOA== + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/helper-api-error" "1.11.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + "@webassemblyjs/ieee754" "1.11.1" + "@webassemblyjs/leb128" "1.11.1" + "@webassemblyjs/utf8" "1.11.1" + +"@webassemblyjs/wast-printer@1.11.1": + version "1.11.1" + resolved "http://localhost:4873/@webassemblyjs%2fwast-printer/-/wast-printer-1.11.1.tgz#d0c73beda8eec5426f10ae8ef55cee5e7084c2f0" + integrity sha512-IQboUWM4eKzWW+N/jij2sRatKMh99QEelo3Eb2q0qXkvPRISAj8Qxtmw5itwqK+TTkBuUIE45AxYPToqPtL5gg== + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@xtuc/long" "4.2.2" + +"@xtuc/ieee754@^1.2.0": + version "1.2.0" + resolved "http://localhost:4873/@xtuc%2fieee754/-/ieee754-1.2.0.tgz#eef014a3145ae477a1cbc00cd1e552336dceb790" + integrity sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA== + +"@xtuc/long@4.2.2": + version "4.2.2" + resolved "http://localhost:4873/@xtuc%2flong/-/long-4.2.2.tgz#d291c6a4e97989b5c61d9acf396ae4fe133a718d" + integrity sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ== + +abab@^2.0.3, abab@^2.0.5: + version "2.0.6" + resolved "http://localhost:4873/abab/-/abab-2.0.6.tgz#41b80f2c871d19686216b82309231cfd3cb3d291" + integrity sha512-j2afSsaIENvHZN2B8GOpF566vZ5WVk5opAiMTvWgaQT8DkbOqsTfvNAvHoRGU2zzP8cPoqys+xHTRDWW8L+/BA== + +accepts@~1.3.4, accepts@~1.3.5, accepts@~1.3.8: + version "1.3.8" + resolved "http://localhost:4873/accepts/-/accepts-1.3.8.tgz#0bf0be125b67014adcb0b0921e62db7bffe16b2e" + integrity sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw== + dependencies: + mime-types "~2.1.34" + negotiator "0.6.3" + +acorn-globals@^6.0.0: + version "6.0.0" + resolved "http://localhost:4873/acorn-globals/-/acorn-globals-6.0.0.tgz#46cdd39f0f8ff08a876619b55f5ac8a6dc770b45" + integrity sha512-ZQl7LOWaF5ePqqcX4hLuv/bLXYQNfNWw2c0/yX/TsPRKamzHcTGQnlCjHT3TsmkOUVEPS3crCxiPfdzE/Trlhg== + dependencies: + acorn "^7.1.1" + acorn-walk "^7.1.1" + +acorn-import-assertions@^1.7.6: + version "1.8.0" + resolved "http://localhost:4873/acorn-import-assertions/-/acorn-import-assertions-1.8.0.tgz#ba2b5939ce62c238db6d93d81c9b111b29b855e9" + integrity sha512-m7VZ3jwz4eK6A4Vtt8Ew1/mNbP24u0FhdyfA7fSvnJR6LMdfOYnmuIrrJAgrYfYJ10F/otaHTtrtrtmHdMNzEw== + +acorn-jsx@^5.3.2: + version "5.3.2" + resolved "http://localhost:4873/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" + integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ== + +acorn-node@^1.8.2: + version "1.8.2" + resolved "http://localhost:4873/acorn-node/-/acorn-node-1.8.2.tgz#114c95d64539e53dede23de8b9d96df7c7ae2af8" + integrity sha512-8mt+fslDufLYntIoPAaIMUe/lrbrehIiwmR3t2k9LljIzoigEPF27eLk2hy8zSGzmR/ogr7zbRKINMo1u0yh5A== + dependencies: + acorn "^7.0.0" + acorn-walk "^7.0.0" + xtend "^4.0.2" + +acorn-walk@^7.0.0, acorn-walk@^7.1.1: + version "7.2.0" + resolved "http://localhost:4873/acorn-walk/-/acorn-walk-7.2.0.tgz#0de889a601203909b0fbe07b8938dc21d2e967bc" + integrity sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA== + +acorn@^7.0.0, acorn@^7.1.1: + version "7.4.1" + resolved "http://localhost:4873/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa" + integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A== + +acorn@^8.2.4, acorn@^8.5.0, acorn@^8.7.1, acorn@^8.8.0: + version "8.8.0" + resolved "http://localhost:4873/acorn/-/acorn-8.8.0.tgz#88c0187620435c7f6015803f5539dae05a9dbea8" + integrity sha512-QOxyigPVrpZ2GXT+PFyZTl6TtOFc5egxHIP9IlQ+RbupQuX4RkT/Bee4/kQuC02Xkzg84JcT7oLYtDIQxp+v7w== + +address@^1.0.1, address@^1.1.2: + version "1.2.1" + resolved "http://localhost:4873/address/-/address-1.2.1.tgz#25bb61095b7522d65b357baa11bc05492d4c8acd" + integrity sha512-B+6bi5D34+fDYENiH5qOlA0cV2rAGKuWZ9LeyUUehbXy8e0VS9e498yO0Jeeh+iM+6KbfudHTFjXw2MmJD4QRA== + +adjust-sourcemap-loader@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/adjust-sourcemap-loader/-/adjust-sourcemap-loader-4.0.0.tgz#fc4a0fd080f7d10471f30a7320f25560ade28c99" + integrity sha512-OXwN5b9pCUXNQHJpwwD2qP40byEmSgzj8B4ydSN0uMNYWiFmJ6x6KwUllMmfk8Rwu/HJDFR7U8ubsWBoN0Xp0A== + dependencies: + loader-utils "^2.0.0" + regex-parser "^2.2.11" + +agent-base@6: + version "6.0.2" + resolved "http://localhost:4873/agent-base/-/agent-base-6.0.2.tgz#49fff58577cfee3f37176feab4c22e00f86d7f77" + integrity sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ== + dependencies: + debug "4" + +ajv-formats@^2.1.1: + version "2.1.1" + resolved "http://localhost:4873/ajv-formats/-/ajv-formats-2.1.1.tgz#6e669400659eb74973bbf2e33327180a0996b520" + integrity sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA== + dependencies: + ajv "^8.0.0" + +ajv-keywords@^3.4.1, ajv-keywords@^3.5.2: + version "3.5.2" + resolved "http://localhost:4873/ajv-keywords/-/ajv-keywords-3.5.2.tgz#31f29da5ab6e00d1c2d329acf7b5929614d5014d" + integrity sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ== + +ajv-keywords@^5.0.0: + version "5.1.0" + resolved "http://localhost:4873/ajv-keywords/-/ajv-keywords-5.1.0.tgz#69d4d385a4733cdbeab44964a1170a88f87f0e16" + integrity sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw== + dependencies: + fast-deep-equal "^3.1.3" + +ajv@^6.10.0, ajv@^6.12.2, ajv@^6.12.4, ajv@^6.12.5: + version "6.12.6" + resolved "http://localhost:4873/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" + integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== + dependencies: + fast-deep-equal "^3.1.1" + fast-json-stable-stringify "^2.0.0" + json-schema-traverse "^0.4.1" + uri-js "^4.2.2" + +ajv@^8.0.0, ajv@^8.6.0, ajv@^8.8.0: + version "8.11.0" + resolved "http://localhost:4873/ajv/-/ajv-8.11.0.tgz#977e91dd96ca669f54a11e23e378e33b884a565f" + integrity sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg== + dependencies: + fast-deep-equal "^3.1.1" + json-schema-traverse "^1.0.0" + require-from-string "^2.0.2" + uri-js "^4.2.2" + +ansi-escapes@^4.2.1, ansi-escapes@^4.3.1: + version "4.3.2" + resolved "http://localhost:4873/ansi-escapes/-/ansi-escapes-4.3.2.tgz#6b2291d1db7d98b6521d5f1efa42d0f3a9feb65e" + integrity sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ== + dependencies: + type-fest "^0.21.3" + +ansi-html-community@^0.0.8: + version "0.0.8" + resolved "http://localhost:4873/ansi-html-community/-/ansi-html-community-0.0.8.tgz#69fbc4d6ccbe383f9736934ae34c3f8290f1bf41" + integrity sha512-1APHAyr3+PCamwNw3bXCPp4HFLONZt/yIH0sZp0/469KWNTEy+qN5jQ3GVX6DMZ1UXAi34yVwtTeaG/HpBuuzw== + +ansi-regex@^5.0.1: + version "5.0.1" + resolved "http://localhost:4873/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" + integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== + +ansi-regex@^6.0.1: + version "6.0.1" + resolved "http://localhost:4873/ansi-regex/-/ansi-regex-6.0.1.tgz#3183e38fae9a65d7cb5e53945cd5897d0260a06a" + integrity sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA== + +ansi-styles@^3.2.1: + version "3.2.1" + resolved "http://localhost:4873/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" + integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== + dependencies: + color-convert "^1.9.0" + +ansi-styles@^4.0.0, ansi-styles@^4.1.0: + version "4.3.0" + resolved "http://localhost:4873/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" + integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== + dependencies: + color-convert "^2.0.1" + +ansi-styles@^5.0.0: + version "5.2.0" + resolved "http://localhost:4873/ansi-styles/-/ansi-styles-5.2.0.tgz#07449690ad45777d1924ac2abb2fc8895dba836b" + integrity sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA== + +anymatch@^3.0.3, anymatch@~3.1.2: + version "3.1.2" + resolved "http://localhost:4873/anymatch/-/anymatch-3.1.2.tgz#c0557c096af32f106198f4f4e2a383537e378716" + integrity sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg== + dependencies: + normalize-path "^3.0.0" + picomatch "^2.0.4" + +arg@^5.0.2: + version "5.0.2" + resolved "http://localhost:4873/arg/-/arg-5.0.2.tgz#c81433cc427c92c4dcf4865142dbca6f15acd59c" + integrity sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg== + +argparse@^1.0.7: + version "1.0.10" + resolved "http://localhost:4873/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" + integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== + dependencies: + sprintf-js "~1.0.2" + +argparse@^2.0.1: + version "2.0.1" + resolved "http://localhost:4873/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38" + integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q== + +aria-query@^4.2.2: + version "4.2.2" + resolved "http://localhost:4873/aria-query/-/aria-query-4.2.2.tgz#0d2ca6c9aceb56b8977e9fed6aed7e15bbd2f83b" + integrity sha512-o/HelwhuKpTj/frsOsbNLNgnNGVIFsVP/SW2BSF14gVl7kAfMOJ6/8wUAUvG1R1NHKrfG+2sHZTu0yauT1qBrA== + dependencies: + "@babel/runtime" "^7.10.2" + "@babel/runtime-corejs3" "^7.10.2" + +aria-query@^5.0.0: + version "5.0.2" + resolved "http://localhost:4873/aria-query/-/aria-query-5.0.2.tgz#0b8a744295271861e1d933f8feca13f9b70cfdc1" + integrity sha512-eigU3vhqSO+Z8BKDnVLN/ompjhf3pYzecKXz8+whRy+9gZu8n1TCGfwzQUUPnqdHl9ax1Hr9031orZ+UOEYr7Q== + +array-flatten@1.1.1: + version "1.1.1" + resolved "http://localhost:4873/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2" + integrity sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg== + +array-flatten@^2.1.2: + version "2.1.2" + resolved "http://localhost:4873/array-flatten/-/array-flatten-2.1.2.tgz#24ef80a28c1a893617e2149b0c6d0d788293b099" + integrity sha512-hNfzcOV8W4NdualtqBFPyVO+54DSJuZGY9qT4pRroB6S9e3iiido2ISIC5h9R2sPJ8H3FHCIiEnsv1lPXO3KtQ== + +array-includes@^3.1.4, array-includes@^3.1.5: + version "3.1.5" + resolved "http://localhost:4873/array-includes/-/array-includes-3.1.5.tgz#2c320010db8d31031fd2a5f6b3bbd4b1aad31bdb" + integrity sha512-iSDYZMMyTPkiFasVqfuAQnWAYcvO/SeBSCGKePoEthjp4LEMTe4uLc7b025o4jAZpHhihh8xPo99TNWUWWkGDQ== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.19.5" + get-intrinsic "^1.1.1" + is-string "^1.0.7" + +array-union@^2.1.0: + version "2.1.0" + resolved "http://localhost:4873/array-union/-/array-union-2.1.0.tgz#b798420adbeb1de828d84acd8a2e23d3efe85e8d" + integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw== + +array.prototype.flat@^1.2.5: + version "1.3.0" + resolved "http://localhost:4873/array.prototype.flat/-/array.prototype.flat-1.3.0.tgz#0b0c1567bf57b38b56b4c97b8aa72ab45e4adc7b" + integrity sha512-12IUEkHsAhA4DY5s0FPgNXIdc8VRSqD9Zp78a5au9abH/SOBrsp082JOWFNTjkMozh8mqcdiKuaLGhPeYztxSw== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.2" + es-shim-unscopables "^1.0.0" + +array.prototype.flatmap@^1.3.0: + version "1.3.0" + resolved "http://localhost:4873/array.prototype.flatmap/-/array.prototype.flatmap-1.3.0.tgz#a7e8ed4225f4788a70cd910abcf0791e76a5534f" + integrity sha512-PZC9/8TKAIxcWKdyeb77EzULHPrIX/tIZebLJUQOMR1OwYosT8yggdfWScfTBCDj5utONvOuPQQumYsU2ULbkg== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.2" + es-shim-unscopables "^1.0.0" + +array.prototype.reduce@^1.0.4: + version "1.0.4" + resolved "http://localhost:4873/array.prototype.reduce/-/array.prototype.reduce-1.0.4.tgz#8167e80089f78bff70a99e20bd4201d4663b0a6f" + integrity sha512-WnM+AjG/DvLRLo4DDl+r+SvCzYtD2Jd9oeBYMcEaI7t3fFrHY9M53/wdLcTvmZNQ70IU6Htj0emFkZ5TS+lrdw== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.2" + es-array-method-boxes-properly "^1.0.0" + is-string "^1.0.7" + +asap@~2.0.6: + version "2.0.6" + resolved "http://localhost:4873/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46" + integrity sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA== + +ast-types-flow@^0.0.7: + version "0.0.7" + resolved "http://localhost:4873/ast-types-flow/-/ast-types-flow-0.0.7.tgz#f70b735c6bca1a5c9c22d982c3e39e7feba3bdad" + integrity sha512-eBvWn1lvIApYMhzQMsu9ciLfkBY499mFZlNqG+/9WR7PVlroQw0vG30cOQQbaKz3sCEc44TAOu2ykzqXSNnwag== + +async@^3.2.3: + version "3.2.4" + resolved "http://localhost:4873/async/-/async-3.2.4.tgz#2d22e00f8cddeb5fde5dd33522b56d1cf569a81c" + integrity sha512-iAB+JbDEGXhyIUavoDl9WP/Jj106Kz9DEn1DPgYw5ruDn0e3Wgi3sKFm55sASdGBNOQB8F59d9qQ7deqrHA8wQ== + +asynckit@^0.4.0: + version "0.4.0" + resolved "http://localhost:4873/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" + integrity sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q== + +at-least-node@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/at-least-node/-/at-least-node-1.0.0.tgz#602cd4b46e844ad4effc92a8011a3c46e0238dc2" + integrity sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg== + +automerge-wasm@0.1.7: + version "0.1.7" + resolved "http://localhost:4873/automerge-wasm/-/automerge-wasm-0.1.7.tgz#b5c02d6d00521d5ecb956226a187d668e7530c8f" + integrity sha512-BJ0/W1i7fCMTEWZ25DS31AL2vgZ3Yv5LrBibU0gG0pg6oj62T4iiXm/4bYXHykkry1+mTJIoNGeOwCwEpvhFAw== + +automerge@2.0.0-alpha.1: + version "2.0.0-alpha.1" + resolved "http://localhost:4873/automerge/-/automerge-2.0.0-alpha.1.tgz#554d0246116121609f97297f9f7d9048eb0447fa" + integrity sha512-EZ6A52btI2LLrgRk8BYwcrOikaKyPYq4LkdmBeV0ec/8XNW6QhPLtwb+NXP6ZM2ynHND3zFR8pDzbPeP+POeKA== + dependencies: + automerge-wasm "0.1.7" + uuid "^8.3" + +autoprefixer@^10.4.11, autoprefixer@^10.4.12: + version "10.4.12" + resolved "http://localhost:4873/autoprefixer/-/autoprefixer-10.4.12.tgz#183f30bf0b0722af54ee5ef257f7d4320bb33129" + integrity sha512-WrCGV9/b97Pa+jtwf5UGaRjgQIg7OK3D06GnoYoZNcG1Xb8Gt3EfuKjlhh9i/VtT16g6PYjZ69jdJ2g8FxSC4Q== + dependencies: + browserslist "^4.21.4" + caniuse-lite "^1.0.30001407" + fraction.js "^4.2.0" + normalize-range "^0.1.2" + picocolors "^1.0.0" + postcss-value-parser "^4.2.0" + +axe-core@^4.4.3: + version "4.4.3" + resolved "http://localhost:4873/axe-core/-/axe-core-4.4.3.tgz#11c74d23d5013c0fa5d183796729bc3482bd2f6f" + integrity sha512-32+ub6kkdhhWick/UjvEwRchgoetXqTK14INLqbGm5U2TzBkBNF3nQtLYm8ovxSkQWArjEQvftCKryjZaATu3w== + +axobject-query@^2.2.0: + version "2.2.0" + resolved "http://localhost:4873/axobject-query/-/axobject-query-2.2.0.tgz#943d47e10c0b704aa42275e20edf3722648989be" + integrity sha512-Td525n+iPOOyUQIeBfcASuG6uJsDOITl7Mds5gFyerkWiX7qhUTdYUBlSgNMyVqtSJqwpt1kXGLdUt6SykLMRA== + +babel-jest@^27.4.2, babel-jest@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/babel-jest/-/babel-jest-27.5.1.tgz#a1bf8d61928edfefd21da27eb86a695bfd691444" + integrity sha512-cdQ5dXjGRd0IBRATiQ4mZGlGlRE8kJpjPOixdNRdT+m3UcNqmYWN6rK6nvtXYfY3D76cb8s/O1Ss8ea24PIwcg== + dependencies: + "@jest/transform" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/babel__core" "^7.1.14" + babel-plugin-istanbul "^6.1.1" + babel-preset-jest "^27.5.1" + chalk "^4.0.0" + graceful-fs "^4.2.9" + slash "^3.0.0" + +babel-loader@^8.2.3: + version "8.2.5" + resolved "http://localhost:4873/babel-loader/-/babel-loader-8.2.5.tgz#d45f585e654d5a5d90f5350a779d7647c5ed512e" + integrity sha512-OSiFfH89LrEMiWd4pLNqGz4CwJDtbs2ZVc+iGu2HrkRfPxId9F2anQj38IxWpmRfsUY0aBZYi1EFcd3mhtRMLQ== + dependencies: + find-cache-dir "^3.3.1" + loader-utils "^2.0.0" + make-dir "^3.1.0" + schema-utils "^2.6.5" + +babel-plugin-dynamic-import-node@^2.3.3: + version "2.3.3" + resolved "http://localhost:4873/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.3.tgz#84fda19c976ec5c6defef57f9427b3def66e17a3" + integrity sha512-jZVI+s9Zg3IqA/kdi0i6UDCybUI3aSBLnglhYbSSjKlV7yF1F/5LWv8MakQmvYpnbJDS6fcBL2KzHSxNCMtWSQ== + dependencies: + object.assign "^4.1.0" + +babel-plugin-istanbul@^6.1.1: + version "6.1.1" + resolved "http://localhost:4873/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz#fa88ec59232fd9b4e36dbbc540a8ec9a9b47da73" + integrity sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + "@istanbuljs/load-nyc-config" "^1.0.0" + "@istanbuljs/schema" "^0.1.2" + istanbul-lib-instrument "^5.0.4" + test-exclude "^6.0.0" + +babel-plugin-jest-hoist@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-27.5.1.tgz#9be98ecf28c331eb9f5df9c72d6f89deb8181c2e" + integrity sha512-50wCwD5EMNW4aRpOwtqzyZHIewTYNxLA4nhB+09d8BIssfNfzBRhkBIHiaPv1Si226TQSvp8gxAJm2iY2qs2hQ== + dependencies: + "@babel/template" "^7.3.3" + "@babel/types" "^7.3.3" + "@types/babel__core" "^7.0.0" + "@types/babel__traverse" "^7.0.6" + +babel-plugin-macros@^3.1.0: + version "3.1.0" + resolved "http://localhost:4873/babel-plugin-macros/-/babel-plugin-macros-3.1.0.tgz#9ef6dc74deb934b4db344dc973ee851d148c50c1" + integrity sha512-Cg7TFGpIr01vOQNODXOOaGz2NpCU5gl8x1qJFbb6hbZxR7XrcE2vtbAsTAbJ7/xwJtUuJEw8K8Zr/AE0LHlesg== + dependencies: + "@babel/runtime" "^7.12.5" + cosmiconfig "^7.0.0" + resolve "^1.19.0" + +babel-plugin-named-asset-import@^0.3.8: + version "0.3.8" + resolved "http://localhost:4873/babel-plugin-named-asset-import/-/babel-plugin-named-asset-import-0.3.8.tgz#6b7fa43c59229685368683c28bc9734f24524cc2" + integrity sha512-WXiAc++qo7XcJ1ZnTYGtLxmBCVbddAml3CEXgWaBzNzLNoxtQ8AiGEFDMOhot9XjTCQbvP5E77Fj9Gk924f00Q== + +babel-plugin-polyfill-corejs2@^0.3.3: + version "0.3.3" + resolved "http://localhost:4873/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.3.3.tgz#5d1bd3836d0a19e1b84bbf2d9640ccb6f951c122" + integrity sha512-8hOdmFYFSZhqg2C/JgLUQ+t52o5nirNwaWM2B9LWteozwIvM14VSwdsCAUET10qT+kmySAlseadmfeeSWFCy+Q== + dependencies: + "@babel/compat-data" "^7.17.7" + "@babel/helper-define-polyfill-provider" "^0.3.3" + semver "^6.1.1" + +babel-plugin-polyfill-corejs3@^0.6.0: + version "0.6.0" + resolved "http://localhost:4873/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.6.0.tgz#56ad88237137eade485a71b52f72dbed57c6230a" + integrity sha512-+eHqR6OPcBhJOGgsIar7xoAB1GcSwVUA3XjAd7HJNzOXT4wv6/H7KIdA/Nc60cvUlDbKApmqNvD1B1bzOt4nyA== + dependencies: + "@babel/helper-define-polyfill-provider" "^0.3.3" + core-js-compat "^3.25.1" + +babel-plugin-polyfill-regenerator@^0.4.1: + version "0.4.1" + resolved "http://localhost:4873/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.4.1.tgz#390f91c38d90473592ed43351e801a9d3e0fd747" + integrity sha512-NtQGmyQDXjQqQ+IzRkBVwEOz9lQ4zxAQZgoAYEtU9dJjnl1Oc98qnN7jcp+bE7O7aYzVpavXE3/VKXNzUbh7aw== + dependencies: + "@babel/helper-define-polyfill-provider" "^0.3.3" + +babel-plugin-transform-react-remove-prop-types@^0.4.24: + version "0.4.24" + resolved "http://localhost:4873/babel-plugin-transform-react-remove-prop-types/-/babel-plugin-transform-react-remove-prop-types-0.4.24.tgz#f2edaf9b4c6a5fbe5c1d678bfb531078c1555f3a" + integrity sha512-eqj0hVcJUR57/Ug2zE1Yswsw4LhuqqHhD+8v120T1cl3kjg76QwtyBrdIk4WVwK+lAhBJVYCd/v+4nc4y+8JsA== + +babel-preset-current-node-syntax@^1.0.0: + version "1.0.1" + resolved "http://localhost:4873/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.0.1.tgz#b4399239b89b2a011f9ddbe3e4f401fc40cff73b" + integrity sha512-M7LQ0bxarkxQoN+vz5aJPsLBn77n8QgTFmo8WK0/44auK2xlCXrYcUxHFxgU7qW5Yzw/CjmLRK2uJzaCd7LvqQ== + dependencies: + "@babel/plugin-syntax-async-generators" "^7.8.4" + "@babel/plugin-syntax-bigint" "^7.8.3" + "@babel/plugin-syntax-class-properties" "^7.8.3" + "@babel/plugin-syntax-import-meta" "^7.8.3" + "@babel/plugin-syntax-json-strings" "^7.8.3" + "@babel/plugin-syntax-logical-assignment-operators" "^7.8.3" + "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" + "@babel/plugin-syntax-numeric-separator" "^7.8.3" + "@babel/plugin-syntax-object-rest-spread" "^7.8.3" + "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" + "@babel/plugin-syntax-optional-chaining" "^7.8.3" + "@babel/plugin-syntax-top-level-await" "^7.8.3" + +babel-preset-jest@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/babel-preset-jest/-/babel-preset-jest-27.5.1.tgz#91f10f58034cb7989cb4f962b69fa6eef6a6bc81" + integrity sha512-Nptf2FzlPCWYuJg41HBqXVT8ym6bXOevuCTbhxlUpjwtysGaIWFvDEjp4y+G7fl13FgOdjs7P/DmErqH7da0Ag== + dependencies: + babel-plugin-jest-hoist "^27.5.1" + babel-preset-current-node-syntax "^1.0.0" + +babel-preset-react-app@^10.0.1: + version "10.0.1" + resolved "http://localhost:4873/babel-preset-react-app/-/babel-preset-react-app-10.0.1.tgz#ed6005a20a24f2c88521809fa9aea99903751584" + integrity sha512-b0D9IZ1WhhCWkrTXyFuIIgqGzSkRIH5D5AmB0bXbzYAB1OBAwHcUeyWW2LorutLWF5btNo/N7r/cIdmvvKJlYg== + dependencies: + "@babel/core" "^7.16.0" + "@babel/plugin-proposal-class-properties" "^7.16.0" + "@babel/plugin-proposal-decorators" "^7.16.4" + "@babel/plugin-proposal-nullish-coalescing-operator" "^7.16.0" + "@babel/plugin-proposal-numeric-separator" "^7.16.0" + "@babel/plugin-proposal-optional-chaining" "^7.16.0" + "@babel/plugin-proposal-private-methods" "^7.16.0" + "@babel/plugin-transform-flow-strip-types" "^7.16.0" + "@babel/plugin-transform-react-display-name" "^7.16.0" + "@babel/plugin-transform-runtime" "^7.16.4" + "@babel/preset-env" "^7.16.4" + "@babel/preset-react" "^7.16.0" + "@babel/preset-typescript" "^7.16.0" + "@babel/runtime" "^7.16.3" + babel-plugin-macros "^3.1.0" + babel-plugin-transform-react-remove-prop-types "^0.4.24" + +balanced-match@^1.0.0: + version "1.0.2" + resolved "http://localhost:4873/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" + integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== + +batch@0.6.1: + version "0.6.1" + resolved "http://localhost:4873/batch/-/batch-0.6.1.tgz#dc34314f4e679318093fc760272525f94bf25c16" + integrity sha512-x+VAiMRL6UPkx+kudNvxTl6hB2XNNCG2r+7wixVfIYwu/2HKRXimwQyaumLjMveWvT2Hkd/cAJw+QBMfJ/EKVw== + +bfj@^7.0.2: + version "7.0.2" + resolved "http://localhost:4873/bfj/-/bfj-7.0.2.tgz#1988ce76f3add9ac2913fd8ba47aad9e651bfbb2" + integrity sha512-+e/UqUzwmzJamNF50tBV6tZPTORow7gQ96iFow+8b562OdMpEK0BcJEq2OSPEDmAbSMBQ7PKZ87ubFkgxpYWgw== + dependencies: + bluebird "^3.5.5" + check-types "^11.1.1" + hoopy "^0.1.4" + tryer "^1.0.1" + +big.js@^5.2.2: + version "5.2.2" + resolved "http://localhost:4873/big.js/-/big.js-5.2.2.tgz#65f0af382f578bcdc742bd9c281e9cb2d7768328" + integrity sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ== + +binary-extensions@^2.0.0: + version "2.2.0" + resolved "http://localhost:4873/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d" + integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA== + +bluebird@^3.5.5: + version "3.7.2" + resolved "http://localhost:4873/bluebird/-/bluebird-3.7.2.tgz#9f229c15be272454ffa973ace0dbee79a1b0c36f" + integrity sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg== + +body-parser@1.20.0: + version "1.20.0" + resolved "http://localhost:4873/body-parser/-/body-parser-1.20.0.tgz#3de69bd89011c11573d7bfee6a64f11b6bd27cc5" + integrity sha512-DfJ+q6EPcGKZD1QWUjSpqp+Q7bDQTsQIF4zfUAtZ6qk+H/3/QRhg9CEp39ss+/T2vw0+HaidC0ecJj/DRLIaKg== + dependencies: + bytes "3.1.2" + content-type "~1.0.4" + debug "2.6.9" + depd "2.0.0" + destroy "1.2.0" + http-errors "2.0.0" + iconv-lite "0.4.24" + on-finished "2.4.1" + qs "6.10.3" + raw-body "2.5.1" + type-is "~1.6.18" + unpipe "1.0.0" + +bonjour-service@^1.0.11: + version "1.0.14" + resolved "http://localhost:4873/bonjour-service/-/bonjour-service-1.0.14.tgz#c346f5bc84e87802d08f8d5a60b93f758e514ee7" + integrity sha512-HIMbgLnk1Vqvs6B4Wq5ep7mxvj9sGz5d1JJyDNSGNIdA/w2MCz6GTjWTdjqOJV1bEPj+6IkxDvWNFKEBxNt4kQ== + dependencies: + array-flatten "^2.1.2" + dns-equal "^1.0.0" + fast-deep-equal "^3.1.3" + multicast-dns "^7.2.5" + +boolbase@^1.0.0, boolbase@~1.0.0: + version "1.0.0" + resolved "http://localhost:4873/boolbase/-/boolbase-1.0.0.tgz#68dff5fbe60c51eb37725ea9e3ed310dcc1e776e" + integrity sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww== + +brace-expansion@^1.1.7: + version "1.1.11" + resolved "http://localhost:4873/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" + integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== + dependencies: + balanced-match "^1.0.0" + concat-map "0.0.1" + +brace-expansion@^2.0.1: + version "2.0.1" + resolved "http://localhost:4873/brace-expansion/-/brace-expansion-2.0.1.tgz#1edc459e0f0c548486ecf9fc99f2221364b9a0ae" + integrity sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA== + dependencies: + balanced-match "^1.0.0" + +braces@^3.0.2, braces@~3.0.2: + version "3.0.2" + resolved "http://localhost:4873/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" + integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== + dependencies: + fill-range "^7.0.1" + +browser-process-hrtime@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz#3c9b4b7d782c8121e56f10106d84c0d0ffc94626" + integrity sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow== + +browserslist@^4.0.0, browserslist@^4.14.5, browserslist@^4.16.6, browserslist@^4.18.1, browserslist@^4.20.3, browserslist@^4.21.3, browserslist@^4.21.4: + version "4.21.4" + resolved "http://localhost:4873/browserslist/-/browserslist-4.21.4.tgz#e7496bbc67b9e39dd0f98565feccdcb0d4ff6987" + integrity sha512-CBHJJdDmgjl3daYjN5Cp5kbTf1mUhZoS+beLklHIvkOWscs83YAhLlF3Wsh/lciQYAcbBJgTOD44VtG31ZM4Hw== + dependencies: + caniuse-lite "^1.0.30001400" + electron-to-chromium "^1.4.251" + node-releases "^2.0.6" + update-browserslist-db "^1.0.9" + +bser@2.1.1: + version "2.1.1" + resolved "http://localhost:4873/bser/-/bser-2.1.1.tgz#e6787da20ece9d07998533cfd9de6f5c38f4bc05" + integrity sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ== + dependencies: + node-int64 "^0.4.0" + +buffer-from@^1.0.0: + version "1.1.2" + resolved "http://localhost:4873/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" + integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== + +builtin-modules@^3.1.0: + version "3.3.0" + resolved "http://localhost:4873/builtin-modules/-/builtin-modules-3.3.0.tgz#cae62812b89801e9656336e46223e030386be7b6" + integrity sha512-zhaCDicdLuWN5UbN5IMnFqNMhNfo919sH85y2/ea+5Yg9TsTkeZxpL+JLbp6cgYFS4sRLp3YV4S6yDuqVWHYOw== + +bytes@3.0.0: + version "3.0.0" + resolved "http://localhost:4873/bytes/-/bytes-3.0.0.tgz#d32815404d689699f85a4ea4fa8755dd13a96048" + integrity sha512-pMhOfFDPiv9t5jjIXkHosWmkSyQbvsgEVNkz0ERHbuLh2T/7j4Mqqpz523Fe8MVY89KC6Sh/QfS2sM+SjgFDcw== + +bytes@3.1.2: + version "3.1.2" + resolved "http://localhost:4873/bytes/-/bytes-3.1.2.tgz#8b0beeb98605adf1b128fa4386403c009e0221a5" + integrity sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg== + +call-bind@^1.0.0, call-bind@^1.0.2: + version "1.0.2" + resolved "http://localhost:4873/call-bind/-/call-bind-1.0.2.tgz#b1d4e89e688119c3c9a903ad30abb2f6a919be3c" + integrity sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA== + dependencies: + function-bind "^1.1.1" + get-intrinsic "^1.0.2" + +callsites@^3.0.0: + version "3.1.0" + resolved "http://localhost:4873/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" + integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== + +camel-case@^4.1.2: + version "4.1.2" + resolved "http://localhost:4873/camel-case/-/camel-case-4.1.2.tgz#9728072a954f805228225a6deea6b38461e1bd5a" + integrity sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw== + dependencies: + pascal-case "^3.1.2" + tslib "^2.0.3" + +camelcase-css@^2.0.1: + version "2.0.1" + resolved "http://localhost:4873/camelcase-css/-/camelcase-css-2.0.1.tgz#ee978f6947914cc30c6b44741b6ed1df7f043fd5" + integrity sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA== + +camelcase@^5.3.1: + version "5.3.1" + resolved "http://localhost:4873/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" + integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== + +camelcase@^6.2.0, camelcase@^6.2.1: + version "6.3.0" + resolved "http://localhost:4873/camelcase/-/camelcase-6.3.0.tgz#5685b95eb209ac9c0c177467778c9c84df58ba9a" + integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA== + +caniuse-api@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/caniuse-api/-/caniuse-api-3.0.0.tgz#5e4d90e2274961d46291997df599e3ed008ee4c0" + integrity sha512-bsTwuIg/BZZK/vreVTYYbSWoe2F+71P7K5QGEX+pT250DZbfU1MQ5prOKpPR+LL6uWKK3KMwMCAS74QB3Um1uw== + dependencies: + browserslist "^4.0.0" + caniuse-lite "^1.0.0" + lodash.memoize "^4.1.2" + lodash.uniq "^4.5.0" + +caniuse-lite@^1.0.0, caniuse-lite@^1.0.30001400, caniuse-lite@^1.0.30001407: + version "1.0.30001415" + resolved "http://localhost:4873/caniuse-lite/-/caniuse-lite-1.0.30001415.tgz#fd7ea96e9e94c181a7f56e7571efb43d92b860cc" + integrity sha512-ER+PfgCJUe8BqunLGWd/1EY4g8AzQcsDAVzdtMGKVtQEmKAwaFfU6vb7EAVIqTMYsqxBorYZi2+22Iouj/y7GQ== + +case-sensitive-paths-webpack-plugin@^2.4.0: + version "2.4.0" + resolved "http://localhost:4873/case-sensitive-paths-webpack-plugin/-/case-sensitive-paths-webpack-plugin-2.4.0.tgz#db64066c6422eed2e08cc14b986ca43796dbc6d4" + integrity sha512-roIFONhcxog0JSSWbvVAh3OocukmSgpqOH6YpMkCvav/ySIV3JKg4Dc8vYtQjYi/UxpNE36r/9v+VqTQqgkYmw== + +chalk@^2.0.0, chalk@^2.4.1: + version "2.4.2" + resolved "http://localhost:4873/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" + integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== + dependencies: + ansi-styles "^3.2.1" + escape-string-regexp "^1.0.5" + supports-color "^5.3.0" + +chalk@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/chalk/-/chalk-3.0.0.tgz#3f73c2bf526591f574cc492c51e2456349f844e4" + integrity sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg== + dependencies: + ansi-styles "^4.1.0" + supports-color "^7.1.0" + +chalk@^4.0.0, chalk@^4.0.2, chalk@^4.1.0, chalk@^4.1.2: + version "4.1.2" + resolved "http://localhost:4873/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" + integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== + dependencies: + ansi-styles "^4.1.0" + supports-color "^7.1.0" + +char-regex@^1.0.2: + version "1.0.2" + resolved "http://localhost:4873/char-regex/-/char-regex-1.0.2.tgz#d744358226217f981ed58f479b1d6bcc29545dcf" + integrity sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw== + +char-regex@^2.0.0: + version "2.0.1" + resolved "http://localhost:4873/char-regex/-/char-regex-2.0.1.tgz#6dafdb25f9d3349914079f010ba8d0e6ff9cd01e" + integrity sha512-oSvEeo6ZUD7NepqAat3RqoucZ5SeqLJgOvVIwkafu6IP3V0pO38s/ypdVUmDDK6qIIHNlYHJAKX9E7R7HoKElw== + +check-types@^11.1.1: + version "11.1.2" + resolved "http://localhost:4873/check-types/-/check-types-11.1.2.tgz#86a7c12bf5539f6324eb0e70ca8896c0e38f3e2f" + integrity sha512-tzWzvgePgLORb9/3a0YenggReLKAIb2owL03H2Xdoe5pKcUyWRSEQ8xfCar8t2SIAuEDwtmx2da1YB52YuHQMQ== + +chokidar@^3.4.2, chokidar@^3.5.3: + version "3.5.3" + resolved "http://localhost:4873/chokidar/-/chokidar-3.5.3.tgz#1cf37c8707b932bd1af1ae22c0432e2acd1903bd" + integrity sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw== + dependencies: + anymatch "~3.1.2" + braces "~3.0.2" + glob-parent "~5.1.2" + is-binary-path "~2.1.0" + is-glob "~4.0.1" + normalize-path "~3.0.0" + readdirp "~3.6.0" + optionalDependencies: + fsevents "~2.3.2" + +chrome-trace-event@^1.0.2: + version "1.0.3" + resolved "http://localhost:4873/chrome-trace-event/-/chrome-trace-event-1.0.3.tgz#1015eced4741e15d06664a957dbbf50d041e26ac" + integrity sha512-p3KULyQg4S7NIHixdwbGX+nFHkoBiA4YQmyWtjb8XngSKV124nJmRysgAeujbUVb15vh+RvFUfCPqU7rXk+hZg== + +ci-info@^3.2.0: + version "3.4.0" + resolved "http://localhost:4873/ci-info/-/ci-info-3.4.0.tgz#b28484fd436cbc267900364f096c9dc185efb251" + integrity sha512-t5QdPT5jq3o262DOQ8zA6E1tlH2upmUc4Hlvrbx1pGYJuiiHl7O7rvVNI+l8HTVhd/q3Qc9vqimkNk5yiXsAug== + +cjs-module-lexer@^1.0.0: + version "1.2.2" + resolved "http://localhost:4873/cjs-module-lexer/-/cjs-module-lexer-1.2.2.tgz#9f84ba3244a512f3a54e5277e8eef4c489864e40" + integrity sha512-cOU9usZw8/dXIXKtwa8pM0OTJQuJkxMN6w30csNRUerHfeQ5R6U3kkU/FtJeIf3M202OHfY2U8ccInBG7/xogA== + +clean-css@^5.2.2: + version "5.3.1" + resolved "http://localhost:4873/clean-css/-/clean-css-5.3.1.tgz#d0610b0b90d125196a2894d35366f734e5d7aa32" + integrity sha512-lCr8OHhiWCTw4v8POJovCoh4T7I9U11yVsPjMWWnnMmp9ZowCxyad1Pathle/9HjaDp+fdQKjO9fQydE6RHTZg== + dependencies: + source-map "~0.6.0" + +cliui@^7.0.2: + version "7.0.4" + resolved "http://localhost:4873/cliui/-/cliui-7.0.4.tgz#a0265ee655476fc807aea9df3df8df7783808b4f" + integrity sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ== + dependencies: + string-width "^4.2.0" + strip-ansi "^6.0.0" + wrap-ansi "^7.0.0" + +clone-deep@^4.0.1: + version "4.0.1" + resolved "http://localhost:4873/clone-deep/-/clone-deep-4.0.1.tgz#c19fd9bdbbf85942b4fd979c84dcf7d5f07c2387" + integrity sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ== + dependencies: + is-plain-object "^2.0.4" + kind-of "^6.0.2" + shallow-clone "^3.0.0" + +co@^4.6.0: + version "4.6.0" + resolved "http://localhost:4873/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" + integrity sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ== + +coa@^2.0.2: + version "2.0.2" + resolved "http://localhost:4873/coa/-/coa-2.0.2.tgz#43f6c21151b4ef2bf57187db0d73de229e3e7ec3" + integrity sha512-q5/jG+YQnSy4nRTV4F7lPepBJZ8qBNJJDBuJdoejDyLXgmL7IEo+Le2JDZudFTFt7mrCqIRaSjws4ygRCTCAXA== + dependencies: + "@types/q" "^1.5.1" + chalk "^2.4.1" + q "^1.1.2" + +collect-v8-coverage@^1.0.0: + version "1.0.1" + resolved "http://localhost:4873/collect-v8-coverage/-/collect-v8-coverage-1.0.1.tgz#cc2c8e94fc18bbdffe64d6534570c8a673b27f59" + integrity sha512-iBPtljfCNcTKNAto0KEtDfZ3qzjJvqE3aTGZsbhjSBlorqpXJlaWWtPO35D+ZImoC3KWejX64o+yPGxhWSTzfg== + +color-convert@^1.9.0: + version "1.9.3" + resolved "http://localhost:4873/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" + integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== + dependencies: + color-name "1.1.3" + +color-convert@^2.0.1: + version "2.0.1" + resolved "http://localhost:4873/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" + integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== + dependencies: + color-name "~1.1.4" + +color-name@1.1.3: + version "1.1.3" + resolved "http://localhost:4873/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" + integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw== + +color-name@^1.1.4, color-name@~1.1.4: + version "1.1.4" + resolved "http://localhost:4873/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" + integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== + +colord@^2.9.1: + version "2.9.3" + resolved "http://localhost:4873/colord/-/colord-2.9.3.tgz#4f8ce919de456f1d5c1c368c307fe20f3e59fb43" + integrity sha512-jeC1axXpnb0/2nn/Y1LPuLdgXBLH7aDcHu4KEKfqw3CUhX7ZpfBSlPKyqXE6btIgEzfWtrX3/tyBCaCvXvMkOw== + +colorette@^2.0.10: + version "2.0.19" + resolved "http://localhost:4873/colorette/-/colorette-2.0.19.tgz#cdf044f47ad41a0f4b56b3a0d5b4e6e1a2d5a798" + integrity sha512-3tlv/dIP7FWvj3BsbHrGLJ6l/oKh1O3TcgBqMn+yyCagOxc23fyzDS6HypQbgxWbkpDnf52p1LuR4eWDQ/K9WQ== + +combined-stream@^1.0.8: + version "1.0.8" + resolved "http://localhost:4873/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" + integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== + dependencies: + delayed-stream "~1.0.0" + +commander@^2.20.0: + version "2.20.3" + resolved "http://localhost:4873/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33" + integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== + +commander@^7.2.0: + version "7.2.0" + resolved "http://localhost:4873/commander/-/commander-7.2.0.tgz#a36cb57d0b501ce108e4d20559a150a391d97ab7" + integrity sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw== + +commander@^8.3.0: + version "8.3.0" + resolved "http://localhost:4873/commander/-/commander-8.3.0.tgz#4837ea1b2da67b9c616a67afbb0fafee567bca66" + integrity sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww== + +common-path-prefix@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/common-path-prefix/-/common-path-prefix-3.0.0.tgz#7d007a7e07c58c4b4d5f433131a19141b29f11e0" + integrity sha512-QE33hToZseCH3jS0qN96O/bSh3kaw/h+Tq7ngyY9eWDUnTlTNUyqfqvCXioLe5Na5jFsL78ra/wuBU4iuEgd4w== + +common-tags@^1.8.0: + version "1.8.2" + resolved "http://localhost:4873/common-tags/-/common-tags-1.8.2.tgz#94ebb3c076d26032745fd54face7f688ef5ac9c6" + integrity sha512-gk/Z852D2Wtb//0I+kRFNKKE9dIIVirjoqPoA1wJU+XePVXZfGeBpk45+A1rKO4Q43prqWBNY/MiIeRLbPWUaA== + +commondir@^1.0.1: + version "1.0.1" + resolved "http://localhost:4873/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b" + integrity sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg== + +compressible@~2.0.16: + version "2.0.18" + resolved "http://localhost:4873/compressible/-/compressible-2.0.18.tgz#af53cca6b070d4c3c0750fbd77286a6d7cc46fba" + integrity sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg== + dependencies: + mime-db ">= 1.43.0 < 2" + +compression@^1.7.4: + version "1.7.4" + resolved "http://localhost:4873/compression/-/compression-1.7.4.tgz#95523eff170ca57c29a0ca41e6fe131f41e5bb8f" + integrity sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ== + dependencies: + accepts "~1.3.5" + bytes "3.0.0" + compressible "~2.0.16" + debug "2.6.9" + on-headers "~1.0.2" + safe-buffer "5.1.2" + vary "~1.1.2" + +concat-map@0.0.1: + version "0.0.1" + resolved "http://localhost:4873/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" + integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg== + +confusing-browser-globals@^1.0.11: + version "1.0.11" + resolved "http://localhost:4873/confusing-browser-globals/-/confusing-browser-globals-1.0.11.tgz#ae40e9b57cdd3915408a2805ebd3a5585608dc81" + integrity sha512-JsPKdmh8ZkmnHxDk55FZ1TqVLvEQTvoByJZRN9jzI0UjxK/QgAmsphz7PGtqgPieQZ/CQcHWXCR7ATDNhGe+YA== + +connect-history-api-fallback@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/connect-history-api-fallback/-/connect-history-api-fallback-2.0.0.tgz#647264845251a0daf25b97ce87834cace0f5f1c8" + integrity sha512-U73+6lQFmfiNPrYbXqr6kZ1i1wiRqXnp2nhMsINseWXO8lDau0LGEffJ8kQi4EjLZympVgRdvqjAgiZ1tgzDDA== + +content-disposition@0.5.4: + version "0.5.4" + resolved "http://localhost:4873/content-disposition/-/content-disposition-0.5.4.tgz#8b82b4efac82512a02bb0b1dcec9d2c5e8eb5bfe" + integrity sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ== + dependencies: + safe-buffer "5.2.1" + +content-type@~1.0.4: + version "1.0.4" + resolved "http://localhost:4873/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b" + integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA== + +convert-source-map@^1.4.0, convert-source-map@^1.6.0, convert-source-map@^1.7.0: + version "1.8.0" + resolved "http://localhost:4873/convert-source-map/-/convert-source-map-1.8.0.tgz#f3373c32d21b4d780dd8004514684fb791ca4369" + integrity sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA== + dependencies: + safe-buffer "~5.1.1" + +cookie-signature@1.0.6: + version "1.0.6" + resolved "http://localhost:4873/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c" + integrity sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ== + +cookie@0.5.0: + version "0.5.0" + resolved "http://localhost:4873/cookie/-/cookie-0.5.0.tgz#d1f5d71adec6558c58f389987c366aa47e994f8b" + integrity sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw== + +core-js-compat@^3.25.1: + version "3.25.5" + resolved "http://localhost:4873/core-js-compat/-/core-js-compat-3.25.5.tgz#0016e8158c904f7b059486639e6e82116eafa7d9" + integrity sha512-ovcyhs2DEBUIE0MGEKHP4olCUW/XYte3Vroyxuh38rD1wAO4dHohsovUC4eAOuzFxE6b+RXvBU3UZ9o0YhUTkA== + dependencies: + browserslist "^4.21.4" + +core-js-pure@^3.25.1, core-js-pure@^3.8.1: + version "3.25.5" + resolved "http://localhost:4873/core-js-pure/-/core-js-pure-3.25.5.tgz#79716ba54240c6aa9ceba6eee08cf79471ba184d" + integrity sha512-oml3M22pHM+igfWHDfdLVq2ShWmjM2V4L+dQEBs0DWVIqEm9WHCwGAlZ6BmyBQGy5sFrJmcx+856D9lVKyGWYg== + +core-js@^3.19.2: + version "3.25.5" + resolved "http://localhost:4873/core-js/-/core-js-3.25.5.tgz#e86f651a2ca8a0237a5f064c2fe56cef89646e27" + integrity sha512-nbm6eZSjm+ZuBQxCUPQKQCoUEfFOXjUZ8dTTyikyKaWrTYmAVbykQfwsKE5dBK88u3QCkCrzsx/PPlKfhsvgpw== + +core-util-is@~1.0.0: + version "1.0.3" + resolved "http://localhost:4873/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85" + integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ== + +cosmiconfig-typescript-loader@^4.1.1: + version "4.1.1" + resolved "http://localhost:4873/cosmiconfig-typescript-loader/-/cosmiconfig-typescript-loader-4.1.1.tgz#38dd3578344038dae40fdf09792bc2e9df529f78" + integrity sha512-9DHpa379Gp0o0Zefii35fcmuuin6q92FnLDffzdZ0l9tVd3nEobG3O+MZ06+kuBvFTSVScvNb/oHA13Nd4iipg== + +cosmiconfig@^6.0.0: + version "6.0.0" + resolved "http://localhost:4873/cosmiconfig/-/cosmiconfig-6.0.0.tgz#da4fee853c52f6b1e6935f41c1a2fc50bd4a9982" + integrity sha512-xb3ZL6+L8b9JLLCx3ZdoZy4+2ECphCMo2PwqgP1tlfVq6M6YReyzBJtvWWtbDSpNr9hn96pkCiZqUcFEc+54Qg== + dependencies: + "@types/parse-json" "^4.0.0" + import-fresh "^3.1.0" + parse-json "^5.0.0" + path-type "^4.0.0" + yaml "^1.7.2" + +cosmiconfig@^7.0.0, cosmiconfig@^7.0.1: + version "7.0.1" + resolved "http://localhost:4873/cosmiconfig/-/cosmiconfig-7.0.1.tgz#714d756522cace867867ccb4474c5d01bbae5d6d" + integrity sha512-a1YWNUV2HwGimB7dU2s1wUMurNKjpx60HxBB6xUM8Re+2s1g1IIfJvFR0/iCF+XHdE0GMTKTuLR32UQff4TEyQ== + dependencies: + "@types/parse-json" "^4.0.0" + import-fresh "^3.2.1" + parse-json "^5.0.0" + path-type "^4.0.0" + yaml "^1.10.0" + +craco-wasm@0.0.1: + version "0.0.1" + resolved "http://localhost:4873/craco-wasm/-/craco-wasm-0.0.1.tgz#a7edbf7ff64e7569909b15684c00de13209985c6" + integrity sha512-0vwZLtkQocS7UlPg9IF4TsG/6gKXcd9O0ISomjRoBMvR2XvtZN4yxvU8/WlY0Vf42PtOcWvhSx9i4oVNxLVE6w== + +cross-spawn@^7.0.2, cross-spawn@^7.0.3: + version "7.0.3" + resolved "http://localhost:4873/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" + integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== + dependencies: + path-key "^3.1.0" + shebang-command "^2.0.0" + which "^2.0.1" + +crypto-random-string@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/crypto-random-string/-/crypto-random-string-2.0.0.tgz#ef2a7a966ec11083388369baa02ebead229b30d5" + integrity sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA== + +css-blank-pseudo@^3.0.3: + version "3.0.3" + resolved "http://localhost:4873/css-blank-pseudo/-/css-blank-pseudo-3.0.3.tgz#36523b01c12a25d812df343a32c322d2a2324561" + integrity sha512-VS90XWtsHGqoM0t4KpH053c4ehxZ2E6HtGI7x68YFV0pTo/QmkV/YFA+NnlvK8guxZVNWGQhVNJGC39Q8XF4OQ== + dependencies: + postcss-selector-parser "^6.0.9" + +css-declaration-sorter@^6.3.0: + version "6.3.1" + resolved "http://localhost:4873/css-declaration-sorter/-/css-declaration-sorter-6.3.1.tgz#be5e1d71b7a992433fb1c542c7a1b835e45682ec" + integrity sha512-fBffmak0bPAnyqc/HO8C3n2sHrp9wcqQz6ES9koRF2/mLOVAx9zIQ3Y7R29sYCteTPqMCwns4WYQoCX91Xl3+w== + +css-has-pseudo@^3.0.4: + version "3.0.4" + resolved "http://localhost:4873/css-has-pseudo/-/css-has-pseudo-3.0.4.tgz#57f6be91ca242d5c9020ee3e51bbb5b89fc7af73" + integrity sha512-Vse0xpR1K9MNlp2j5w1pgWIJtm1a8qS0JwS9goFYcImjlHEmywP9VUF05aGBXzGpDJF86QXk4L0ypBmwPhGArw== + dependencies: + postcss-selector-parser "^6.0.9" + +css-loader@^6.5.1: + version "6.7.1" + resolved "http://localhost:4873/css-loader/-/css-loader-6.7.1.tgz#e98106f154f6e1baf3fc3bc455cb9981c1d5fd2e" + integrity sha512-yB5CNFa14MbPJcomwNh3wLThtkZgcNyI2bNMRt8iE5Z8Vwl7f8vQXFAzn2HDOJvtDq2NTZBUGMSUNNyrv3/+cw== + dependencies: + icss-utils "^5.1.0" + postcss "^8.4.7" + postcss-modules-extract-imports "^3.0.0" + postcss-modules-local-by-default "^4.0.0" + postcss-modules-scope "^3.0.0" + postcss-modules-values "^4.0.0" + postcss-value-parser "^4.2.0" + semver "^7.3.5" + +css-minimizer-webpack-plugin@^3.2.0: + version "3.4.1" + resolved "http://localhost:4873/css-minimizer-webpack-plugin/-/css-minimizer-webpack-plugin-3.4.1.tgz#ab78f781ced9181992fe7b6e4f3422e76429878f" + integrity sha512-1u6D71zeIfgngN2XNRJefc/hY7Ybsxd74Jm4qngIXyUEk7fss3VUzuHxLAq/R8NAba4QU9OUSaMZlbpRc7bM4Q== + dependencies: + cssnano "^5.0.6" + jest-worker "^27.0.2" + postcss "^8.3.5" + schema-utils "^4.0.0" + serialize-javascript "^6.0.0" + source-map "^0.6.1" + +css-prefers-color-scheme@^6.0.3: + version "6.0.3" + resolved "http://localhost:4873/css-prefers-color-scheme/-/css-prefers-color-scheme-6.0.3.tgz#ca8a22e5992c10a5b9d315155e7caee625903349" + integrity sha512-4BqMbZksRkJQx2zAjrokiGMd07RqOa2IxIrrN10lyBe9xhn9DEvjUK79J6jkeiv9D9hQFXKb6g1jwU62jziJZA== + +css-select-base-adapter@^0.1.1: + version "0.1.1" + resolved "http://localhost:4873/css-select-base-adapter/-/css-select-base-adapter-0.1.1.tgz#3b2ff4972cc362ab88561507a95408a1432135d7" + integrity sha512-jQVeeRG70QI08vSTwf1jHxp74JoZsr2XSgETae8/xC8ovSnL2WF87GTLO86Sbwdt2lK4Umg4HnnwMO4YF3Ce7w== + +css-select@^2.0.0: + version "2.1.0" + resolved "http://localhost:4873/css-select/-/css-select-2.1.0.tgz#6a34653356635934a81baca68d0255432105dbef" + integrity sha512-Dqk7LQKpwLoH3VovzZnkzegqNSuAziQyNZUcrdDM401iY+R5NkGBXGmtO05/yaXQziALuPogeG0b7UAgjnTJTQ== + dependencies: + boolbase "^1.0.0" + css-what "^3.2.1" + domutils "^1.7.0" + nth-check "^1.0.2" + +css-select@^4.1.3: + version "4.3.0" + resolved "http://localhost:4873/css-select/-/css-select-4.3.0.tgz#db7129b2846662fd8628cfc496abb2b59e41529b" + integrity sha512-wPpOYtnsVontu2mODhA19JrqWxNsfdatRKd64kmpRbQgh1KtItko5sTnEpPdpSaJszTOhEMlF/RPz28qj4HqhQ== + dependencies: + boolbase "^1.0.0" + css-what "^6.0.1" + domhandler "^4.3.1" + domutils "^2.8.0" + nth-check "^2.0.1" + +css-tree@1.0.0-alpha.37: + version "1.0.0-alpha.37" + resolved "http://localhost:4873/css-tree/-/css-tree-1.0.0-alpha.37.tgz#98bebd62c4c1d9f960ec340cf9f7522e30709a22" + integrity sha512-DMxWJg0rnz7UgxKT0Q1HU/L9BeJI0M6ksor0OgqOnF+aRCDWg/N2641HmVyU9KVIu0OVVWOb2IpC9A+BJRnejg== + dependencies: + mdn-data "2.0.4" + source-map "^0.6.1" + +css-tree@^1.1.2, css-tree@^1.1.3: + version "1.1.3" + resolved "http://localhost:4873/css-tree/-/css-tree-1.1.3.tgz#eb4870fb6fd7707327ec95c2ff2ab09b5e8db91d" + integrity sha512-tRpdppF7TRazZrjJ6v3stzv93qxRcSsFmW6cX0Zm2NVKpxE1WV1HblnghVv9TreireHkqI/VDEsfolRF1p6y7Q== + dependencies: + mdn-data "2.0.14" + source-map "^0.6.1" + +css-what@^3.2.1: + version "3.4.2" + resolved "http://localhost:4873/css-what/-/css-what-3.4.2.tgz#ea7026fcb01777edbde52124e21f327e7ae950e4" + integrity sha512-ACUm3L0/jiZTqfzRM3Hi9Q8eZqd6IK37mMWPLz9PJxkLWllYeRf+EHUSHYEtFop2Eqytaq1FizFVh7XfBnXCDQ== + +css-what@^6.0.1: + version "6.1.0" + resolved "http://localhost:4873/css-what/-/css-what-6.1.0.tgz#fb5effcf76f1ddea2c81bdfaa4de44e79bac70f4" + integrity sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw== + +css.escape@^1.5.1: + version "1.5.1" + resolved "http://localhost:4873/css.escape/-/css.escape-1.5.1.tgz#42e27d4fa04ae32f931a4b4d4191fa9cddee97cb" + integrity sha512-YUifsXXuknHlUsmlgyY0PKzgPOr7/FjCePfHNt0jxm83wHZi44VDMQ7/fGNkjY3/jV1MC+1CmZbaHzugyeRtpg== + +cssdb@^7.0.1: + version "7.0.1" + resolved "http://localhost:4873/cssdb/-/cssdb-7.0.1.tgz#3810a0c67ae06362982dfe965dbedf57a0f26617" + integrity sha512-pT3nzyGM78poCKLAEy2zWIVX2hikq6dIrjuZzLV98MumBg+xMTNYfHx7paUlfiRTgg91O/vR889CIf+qiv79Rw== + +cssesc@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/cssesc/-/cssesc-3.0.0.tgz#37741919903b868565e1c09ea747445cd18983ee" + integrity sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg== + +cssnano-preset-default@^5.2.12: + version "5.2.12" + resolved "http://localhost:4873/cssnano-preset-default/-/cssnano-preset-default-5.2.12.tgz#ebe6596ec7030e62c3eb2b3c09f533c0644a9a97" + integrity sha512-OyCBTZi+PXgylz9HAA5kHyoYhfGcYdwFmyaJzWnzxuGRtnMw/kR6ilW9XzlzlRAtB6PLT/r+prYgkef7hngFew== + dependencies: + css-declaration-sorter "^6.3.0" + cssnano-utils "^3.1.0" + postcss-calc "^8.2.3" + postcss-colormin "^5.3.0" + postcss-convert-values "^5.1.2" + postcss-discard-comments "^5.1.2" + postcss-discard-duplicates "^5.1.0" + postcss-discard-empty "^5.1.1" + postcss-discard-overridden "^5.1.0" + postcss-merge-longhand "^5.1.6" + postcss-merge-rules "^5.1.2" + postcss-minify-font-values "^5.1.0" + postcss-minify-gradients "^5.1.1" + postcss-minify-params "^5.1.3" + postcss-minify-selectors "^5.2.1" + postcss-normalize-charset "^5.1.0" + postcss-normalize-display-values "^5.1.0" + postcss-normalize-positions "^5.1.1" + postcss-normalize-repeat-style "^5.1.1" + postcss-normalize-string "^5.1.0" + postcss-normalize-timing-functions "^5.1.0" + postcss-normalize-unicode "^5.1.0" + postcss-normalize-url "^5.1.0" + postcss-normalize-whitespace "^5.1.1" + postcss-ordered-values "^5.1.3" + postcss-reduce-initial "^5.1.0" + postcss-reduce-transforms "^5.1.0" + postcss-svgo "^5.1.0" + postcss-unique-selectors "^5.1.1" + +cssnano-utils@^3.1.0: + version "3.1.0" + resolved "http://localhost:4873/cssnano-utils/-/cssnano-utils-3.1.0.tgz#95684d08c91511edfc70d2636338ca37ef3a6861" + integrity sha512-JQNR19/YZhz4psLX/rQ9M83e3z2Wf/HdJbryzte4a3NSuafyp9w/I4U+hx5C2S9g41qlstH7DEWnZaaj83OuEA== + +cssnano@^5.0.6: + version "5.1.13" + resolved "http://localhost:4873/cssnano/-/cssnano-5.1.13.tgz#83d0926e72955332dc4802a7070296e6258efc0a" + integrity sha512-S2SL2ekdEz6w6a2epXn4CmMKU4K3KpcyXLKfAYc9UQQqJRkD/2eLUG0vJ3Db/9OvO5GuAdgXw3pFbR6abqghDQ== + dependencies: + cssnano-preset-default "^5.2.12" + lilconfig "^2.0.3" + yaml "^1.10.2" + +csso@^4.0.2, csso@^4.2.0: + version "4.2.0" + resolved "http://localhost:4873/csso/-/csso-4.2.0.tgz#ea3a561346e8dc9f546d6febedd50187cf389529" + integrity sha512-wvlcdIbf6pwKEk7vHj8/Bkc0B4ylXZruLvOgs9doS5eOsOpuodOV2zJChSpkp+pRpYQLQMeF04nr3Z68Sta9jA== + dependencies: + css-tree "^1.1.2" + +cssom@^0.4.4: + version "0.4.4" + resolved "http://localhost:4873/cssom/-/cssom-0.4.4.tgz#5a66cf93d2d0b661d80bf6a44fb65f5c2e4e0a10" + integrity sha512-p3pvU7r1MyyqbTk+WbNJIgJjG2VmTIaB10rI93LzVPrmDJKkzKYMtxxyAvQXR/NS6otuzveI7+7BBq3SjBS2mw== + +cssom@~0.3.6: + version "0.3.8" + resolved "http://localhost:4873/cssom/-/cssom-0.3.8.tgz#9f1276f5b2b463f2114d3f2c75250af8c1a36f4a" + integrity sha512-b0tGHbfegbhPJpxpiBPU2sCkigAqtM9O121le6bbOlgyV+NyGyCmVfJ6QW9eRjz8CpNfWEOYBIMIGRYkLwsIYg== + +cssstyle@^2.3.0: + version "2.3.0" + resolved "http://localhost:4873/cssstyle/-/cssstyle-2.3.0.tgz#ff665a0ddbdc31864b09647f34163443d90b0852" + integrity sha512-AZL67abkUzIuvcHqk7c09cezpGNcxUxU4Ioi/05xHk4DQeTkWmGYftIE6ctU6AEt+Gn4n1lDStOtj7FKycP71A== + dependencies: + cssom "~0.3.6" + +csstype@^3.0.2: + version "3.1.1" + resolved "http://localhost:4873/csstype/-/csstype-3.1.1.tgz#841b532c45c758ee546a11d5bd7b7b473c8c30b9" + integrity sha512-DJR/VvkAvSZW9bTouZue2sSxDwdTN92uHjqeKVm+0dAqdfNykRzQ95tay8aXMBAAPpUiq4Qcug2L7neoRh2Egw== + +damerau-levenshtein@^1.0.8: + version "1.0.8" + resolved "http://localhost:4873/damerau-levenshtein/-/damerau-levenshtein-1.0.8.tgz#b43d286ccbd36bc5b2f7ed41caf2d0aba1f8a6e7" + integrity sha512-sdQSFB7+llfUcQHUQO3+B8ERRj0Oa4w9POWMI/puGtuf7gFywGmkaLCElnudfTiKZV+NvHqL0ifzdrI8Ro7ESA== + +data-urls@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/data-urls/-/data-urls-2.0.0.tgz#156485a72963a970f5d5821aaf642bef2bf2db9b" + integrity sha512-X5eWTSXO/BJmpdIKCRuKUgSCgAN0OwliVK3yPKbwIWU1Tdw5BRajxlzMidvh+gwko9AfQ9zIj52pzF91Q3YAvQ== + dependencies: + abab "^2.0.3" + whatwg-mimetype "^2.3.0" + whatwg-url "^8.0.0" + +debug@2.6.9, debug@^2.6.0, debug@^2.6.9: + version "2.6.9" + resolved "http://localhost:4873/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" + integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== + dependencies: + ms "2.0.0" + +debug@4, debug@^4.1.0, debug@^4.1.1, debug@^4.3.2, debug@^4.3.4: + version "4.3.4" + resolved "http://localhost:4873/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" + integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== + dependencies: + ms "2.1.2" + +debug@^3.2.7: + version "3.2.7" + resolved "http://localhost:4873/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a" + integrity sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ== + dependencies: + ms "^2.1.1" + +decimal.js@^10.2.1: + version "10.4.1" + resolved "http://localhost:4873/decimal.js/-/decimal.js-10.4.1.tgz#be75eeac4a2281aace80c1a8753587c27ef053e7" + integrity sha512-F29o+vci4DodHYT9UrR5IEbfBw9pE5eSapIJdTqXK5+6hq+t8VRxwQyKlW2i+KDKFkkJQRvFyI/QXD83h8LyQw== + +dedent@^0.7.0: + version "0.7.0" + resolved "http://localhost:4873/dedent/-/dedent-0.7.0.tgz#2495ddbaf6eb874abb0e1be9df22d2e5a544326c" + integrity sha512-Q6fKUPqnAHAyhiUgFU7BUzLiv0kd8saH9al7tnu5Q/okj6dnupxyTgFIBjVzJATdfIAm9NAsvXNzjaKa+bxVyA== + +deep-is@^0.1.3, deep-is@~0.1.3: + version "0.1.4" + resolved "http://localhost:4873/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831" + integrity sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ== + +deepmerge@^4.2.2: + version "4.2.2" + resolved "http://localhost:4873/deepmerge/-/deepmerge-4.2.2.tgz#44d2ea3679b8f4d4ffba33f03d865fc1e7bf4955" + integrity sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg== + +default-gateway@^6.0.3: + version "6.0.3" + resolved "http://localhost:4873/default-gateway/-/default-gateway-6.0.3.tgz#819494c888053bdb743edbf343d6cdf7f2943a71" + integrity sha512-fwSOJsbbNzZ/CUFpqFBqYfYNLj1NbMPm8MMCIzHjC83iSJRBEGmDUxU+WP661BaBQImeC2yHwXtz+P/O9o+XEg== + dependencies: + execa "^5.0.0" + +define-lazy-prop@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz#3f7ae421129bcaaac9bc74905c98a0009ec9ee7f" + integrity sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og== + +define-properties@^1.1.3, define-properties@^1.1.4: + version "1.1.4" + resolved "http://localhost:4873/define-properties/-/define-properties-1.1.4.tgz#0b14d7bd7fbeb2f3572c3a7eda80ea5d57fb05b1" + integrity sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA== + dependencies: + has-property-descriptors "^1.0.0" + object-keys "^1.1.1" + +defined@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/defined/-/defined-1.0.0.tgz#c98d9bcef75674188e110969151199e39b1fa693" + integrity sha512-Y2caI5+ZwS5c3RiNDJ6u53VhQHv+hHKwhkI1iHvceKUHw9Df6EK2zRLfjejRgMuCuxK7PfSWIMwWecceVvThjQ== + +delayed-stream@~1.0.0: + version "1.0.0" + resolved "http://localhost:4873/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" + integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ== + +depd@2.0.0: + version "2.0.0" + resolved "http://localhost:4873/depd/-/depd-2.0.0.tgz#b696163cc757560d09cf22cc8fad1571b79e76df" + integrity sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw== + +depd@~1.1.2: + version "1.1.2" + resolved "http://localhost:4873/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9" + integrity sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ== + +destroy@1.2.0: + version "1.2.0" + resolved "http://localhost:4873/destroy/-/destroy-1.2.0.tgz#4803735509ad8be552934c67df614f94e66fa015" + integrity sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg== + +detect-newline@^3.0.0: + version "3.1.0" + resolved "http://localhost:4873/detect-newline/-/detect-newline-3.1.0.tgz#576f5dfc63ae1a192ff192d8ad3af6308991b651" + integrity sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA== + +detect-node@^2.0.4: + version "2.1.0" + resolved "http://localhost:4873/detect-node/-/detect-node-2.1.0.tgz#c9c70775a49c3d03bc2c06d9a73be550f978f8b1" + integrity sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g== + +detect-port-alt@^1.1.6: + version "1.1.6" + resolved "http://localhost:4873/detect-port-alt/-/detect-port-alt-1.1.6.tgz#24707deabe932d4a3cf621302027c2b266568275" + integrity sha512-5tQykt+LqfJFBEYaDITx7S7cR7mJ/zQmLXZ2qt5w04ainYZw6tBf9dBunMjVeVOdYVRUzUOE4HkY5J7+uttb5Q== + dependencies: + address "^1.0.1" + debug "^2.6.0" + +detective@^5.2.1: + version "5.2.1" + resolved "http://localhost:4873/detective/-/detective-5.2.1.tgz#6af01eeda11015acb0e73f933242b70f24f91034" + integrity sha512-v9XE1zRnz1wRtgurGu0Bs8uHKFSTdteYZNbIPFVhUZ39L/S79ppMpdmVOZAnoz1jfEFodc48n6MX483Xo3t1yw== + dependencies: + acorn-node "^1.8.2" + defined "^1.0.0" + minimist "^1.2.6" + +didyoumean@^1.2.2: + version "1.2.2" + resolved "http://localhost:4873/didyoumean/-/didyoumean-1.2.2.tgz#989346ffe9e839b4555ecf5666edea0d3e8ad037" + integrity sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw== + +diff-sequences@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/diff-sequences/-/diff-sequences-27.5.1.tgz#eaecc0d327fd68c8d9672a1e64ab8dccb2ef5327" + integrity sha512-k1gCAXAsNgLwEL+Y8Wvl+M6oEFj5bgazfZULpS5CneoPPXRaCCW7dm+q21Ky2VEE5X+VeRDBVg1Pcvvsr4TtNQ== + +diff-sequences@^29.0.0: + version "29.0.0" + resolved "http://localhost:4873/diff-sequences/-/diff-sequences-29.0.0.tgz#bae49972ef3933556bcb0800b72e8579d19d9e4f" + integrity sha512-7Qe/zd1wxSDL4D/X/FPjOMB+ZMDt71W94KYaq05I2l0oQqgXgs7s4ftYYmV38gBSrPz2vcygxfs1xn0FT+rKNA== + +dir-glob@^3.0.1: + version "3.0.1" + resolved "http://localhost:4873/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f" + integrity sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA== + dependencies: + path-type "^4.0.0" + +dlv@^1.1.3: + version "1.1.3" + resolved "http://localhost:4873/dlv/-/dlv-1.1.3.tgz#5c198a8a11453596e751494d49874bc7732f2e79" + integrity sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA== + +dns-equal@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/dns-equal/-/dns-equal-1.0.0.tgz#b39e7f1da6eb0a75ba9c17324b34753c47e0654d" + integrity sha512-z+paD6YUQsk+AbGCEM4PrOXSss5gd66QfcVBFTKR/HpFL9jCqikS94HYwKww6fQyO7IxrIIyUu+g0Ka9tUS2Cg== + +dns-packet@^5.2.2: + version "5.4.0" + resolved "http://localhost:4873/dns-packet/-/dns-packet-5.4.0.tgz#1f88477cf9f27e78a213fb6d118ae38e759a879b" + integrity sha512-EgqGeaBB8hLiHLZtp/IbaDQTL8pZ0+IvwzSHA6d7VyMDM+B9hgddEMa9xjK5oYnw0ci0JQ6g2XCD7/f6cafU6g== + dependencies: + "@leichtgewicht/ip-codec" "^2.0.1" + +doctrine@^2.1.0: + version "2.1.0" + resolved "http://localhost:4873/doctrine/-/doctrine-2.1.0.tgz#5cd01fc101621b42c4cd7f5d1a66243716d3f39d" + integrity sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw== + dependencies: + esutils "^2.0.2" + +doctrine@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961" + integrity sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w== + dependencies: + esutils "^2.0.2" + +dom-accessibility-api@^0.5.6, dom-accessibility-api@^0.5.9: + version "0.5.14" + resolved "http://localhost:4873/dom-accessibility-api/-/dom-accessibility-api-0.5.14.tgz#56082f71b1dc7aac69d83c4285eef39c15d93f56" + integrity sha512-NMt+m9zFMPZe0JcY9gN224Qvk6qLIdqex29clBvc/y75ZBX9YA9wNK3frsYvu2DI1xcCIwxwnX+TlsJ2DSOADg== + +dom-converter@^0.2.0: + version "0.2.0" + resolved "http://localhost:4873/dom-converter/-/dom-converter-0.2.0.tgz#6721a9daee2e293682955b6afe416771627bb768" + integrity sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA== + dependencies: + utila "~0.4" + +dom-serializer@0: + version "0.2.2" + resolved "http://localhost:4873/dom-serializer/-/dom-serializer-0.2.2.tgz#1afb81f533717175d478655debc5e332d9f9bb51" + integrity sha512-2/xPb3ORsQ42nHYiSunXkDjPLBaEj/xTwUO4B7XCZQTRk7EBtTOPaygh10YAAh2OI1Qrp6NWfpAhzswj0ydt9g== + dependencies: + domelementtype "^2.0.1" + entities "^2.0.0" + +dom-serializer@^1.0.1: + version "1.4.1" + resolved "http://localhost:4873/dom-serializer/-/dom-serializer-1.4.1.tgz#de5d41b1aea290215dc45a6dae8adcf1d32e2d30" + integrity sha512-VHwB3KfrcOOkelEG2ZOfxqLZdfkil8PtJi4P8N2MMXucZq2yLp75ClViUlOVwyoHEDjYU433Aq+5zWP61+RGag== + dependencies: + domelementtype "^2.0.1" + domhandler "^4.2.0" + entities "^2.0.0" + +domelementtype@1: + version "1.3.1" + resolved "http://localhost:4873/domelementtype/-/domelementtype-1.3.1.tgz#d048c44b37b0d10a7f2a3d5fee3f4333d790481f" + integrity sha512-BSKB+TSpMpFI/HOxCNr1O8aMOTZ8hT3pM3GQ0w/mWRmkhEDSFJkkyzz4XQsBV44BChwGkrDfMyjVD0eA2aFV3w== + +domelementtype@^2.0.1, domelementtype@^2.2.0: + version "2.3.0" + resolved "http://localhost:4873/domelementtype/-/domelementtype-2.3.0.tgz#5c45e8e869952626331d7aab326d01daf65d589d" + integrity sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw== + +domexception@^2.0.1: + version "2.0.1" + resolved "http://localhost:4873/domexception/-/domexception-2.0.1.tgz#fb44aefba793e1574b0af6aed2801d057529f304" + integrity sha512-yxJ2mFy/sibVQlu5qHjOkf9J3K6zgmCxgJ94u2EdvDOV09H+32LtRswEcUsmUWN72pVLOEnTSRaIVVzVQgS0dg== + dependencies: + webidl-conversions "^5.0.0" + +domhandler@^4.0.0, domhandler@^4.2.0, domhandler@^4.3.1: + version "4.3.1" + resolved "http://localhost:4873/domhandler/-/domhandler-4.3.1.tgz#8d792033416f59d68bc03a5aa7b018c1ca89279c" + integrity sha512-GrwoxYN+uWlzO8uhUXRl0P+kHE4GtVPfYzVLcUxPL7KNdHKj66vvlhiweIHqYYXWlw+T8iLMp42Lm67ghw4WMQ== + dependencies: + domelementtype "^2.2.0" + +domutils@^1.7.0: + version "1.7.0" + resolved "http://localhost:4873/domutils/-/domutils-1.7.0.tgz#56ea341e834e06e6748af7a1cb25da67ea9f8c2a" + integrity sha512-Lgd2XcJ/NjEw+7tFvfKxOzCYKZsdct5lczQ2ZaQY8Djz7pfAD3Gbp8ySJWtreII/vDlMVmxwa6pHmdxIYgttDg== + dependencies: + dom-serializer "0" + domelementtype "1" + +domutils@^2.5.2, domutils@^2.8.0: + version "2.8.0" + resolved "http://localhost:4873/domutils/-/domutils-2.8.0.tgz#4437def5db6e2d1f5d6ee859bd95ca7d02048135" + integrity sha512-w96Cjofp72M5IIhpjgobBimYEfoPjx1Vx0BSX9P30WBdZW2WIKU0T1Bd0kz2eNZ9ikjKgHbEyKx8BB6H1L3h3A== + dependencies: + dom-serializer "^1.0.1" + domelementtype "^2.2.0" + domhandler "^4.2.0" + +dot-case@^3.0.4: + version "3.0.4" + resolved "http://localhost:4873/dot-case/-/dot-case-3.0.4.tgz#9b2b670d00a431667a8a75ba29cd1b98809ce751" + integrity sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w== + dependencies: + no-case "^3.0.4" + tslib "^2.0.3" + +dotenv-expand@^5.1.0: + version "5.1.0" + resolved "http://localhost:4873/dotenv-expand/-/dotenv-expand-5.1.0.tgz#3fbaf020bfd794884072ea26b1e9791d45a629f0" + integrity sha512-YXQl1DSa4/PQyRfgrv6aoNjhasp/p4qs9FjJ4q4cQk+8m4r6k4ZSiEyytKG8f8W9gi8WsQtIObNmKd+tMzNTmA== + +dotenv@^10.0.0: + version "10.0.0" + resolved "http://localhost:4873/dotenv/-/dotenv-10.0.0.tgz#3d4227b8fb95f81096cdd2b66653fb2c7085ba81" + integrity sha512-rlBi9d8jpv9Sf1klPjNfFAuWDjKLwTIJJ/VxtoTwIR6hnZxcEOQCZg2oIL3MWBYw5GpUDKOEnND7LXTbIpQ03Q== + +duplexer@^0.1.2: + version "0.1.2" + resolved "http://localhost:4873/duplexer/-/duplexer-0.1.2.tgz#3abe43aef3835f8ae077d136ddce0f276b0400e6" + integrity sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg== + +ee-first@1.1.1: + version "1.1.1" + resolved "http://localhost:4873/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" + integrity sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow== + +ejs@^3.1.6: + version "3.1.8" + resolved "http://localhost:4873/ejs/-/ejs-3.1.8.tgz#758d32910c78047585c7ef1f92f9ee041c1c190b" + integrity sha512-/sXZeMlhS0ArkfX2Aw780gJzXSMPnKjtspYZv+f3NiKLlubezAHDU5+9xz6gd3/NhG3txQCo6xlglmTS+oTGEQ== + dependencies: + jake "^10.8.5" + +electron-to-chromium@^1.4.251: + version "1.4.271" + resolved "http://localhost:4873/electron-to-chromium/-/electron-to-chromium-1.4.271.tgz#2d9f04f6a53c70e1bb1acfaae9c39f07ca40d290" + integrity sha512-BCPBtK07xR1/uY2HFDtl3wK2De66AW4MSiPlLrnPNxKC/Qhccxd59W73654S3y6Rb/k3hmuGJOBnhjfoutetXA== + +emittery@^0.10.2: + version "0.10.2" + resolved "http://localhost:4873/emittery/-/emittery-0.10.2.tgz#902eec8aedb8c41938c46e9385e9db7e03182933" + integrity sha512-aITqOwnLanpHLNXZJENbOgjUBeHocD+xsSJmNrjovKBW5HbSpW3d1pEls7GFQPUWXiwG9+0P4GtHfEqC/4M0Iw== + +emittery@^0.8.1: + version "0.8.1" + resolved "http://localhost:4873/emittery/-/emittery-0.8.1.tgz#bb23cc86d03b30aa75a7f734819dee2e1ba70860" + integrity sha512-uDfvUjVrfGJJhymx/kz6prltenw1u7WrCg1oa94zYY8xxVpLLUu045LAT0dhDZdXG58/EpPL/5kA180fQ/qudg== + +emoji-regex@^8.0.0: + version "8.0.0" + resolved "http://localhost:4873/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" + integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== + +emoji-regex@^9.2.2: + version "9.2.2" + resolved "http://localhost:4873/emoji-regex/-/emoji-regex-9.2.2.tgz#840c8803b0d8047f4ff0cf963176b32d4ef3ed72" + integrity sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg== + +emojis-list@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/emojis-list/-/emojis-list-3.0.0.tgz#5570662046ad29e2e916e71aae260abdff4f6a78" + integrity sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q== + +encodeurl@~1.0.2: + version "1.0.2" + resolved "http://localhost:4873/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" + integrity sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w== + +enhanced-resolve@^5.10.0: + version "5.10.0" + resolved "http://localhost:4873/enhanced-resolve/-/enhanced-resolve-5.10.0.tgz#0dc579c3bb2a1032e357ac45b8f3a6f3ad4fb1e6" + integrity sha512-T0yTFjdpldGY8PmuXXR0PyQ1ufZpEGiHVrp7zHKB7jdR4qlmZHhONVM5AQOAWXuF/w3dnHbEQVrNptJgt7F+cQ== + dependencies: + graceful-fs "^4.2.4" + tapable "^2.2.0" + +entities@^2.0.0: + version "2.2.0" + resolved "http://localhost:4873/entities/-/entities-2.2.0.tgz#098dc90ebb83d8dffa089d55256b351d34c4da55" + integrity sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A== + +error-ex@^1.3.1: + version "1.3.2" + resolved "http://localhost:4873/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" + integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g== + dependencies: + is-arrayish "^0.2.1" + +error-stack-parser@^2.0.6: + version "2.1.4" + resolved "http://localhost:4873/error-stack-parser/-/error-stack-parser-2.1.4.tgz#229cb01cdbfa84440bfa91876285b94680188286" + integrity sha512-Sk5V6wVazPhq5MhpO+AUxJn5x7XSXGl1R93Vn7i+zS15KDVxQijejNCrz8340/2bgLBjR9GtEG8ZVKONDjcqGQ== + dependencies: + stackframe "^1.3.4" + +es-abstract@^1.17.2, es-abstract@^1.19.0, es-abstract@^1.19.1, es-abstract@^1.19.2, es-abstract@^1.19.5, es-abstract@^1.20.1: + version "1.20.3" + resolved "http://localhost:4873/es-abstract/-/es-abstract-1.20.3.tgz#90b143ff7aedc8b3d189bcfac7f1e3e3f81e9da1" + integrity sha512-AyrnaKVpMzljIdwjzrj+LxGmj8ik2LckwXacHqrJJ/jxz6dDDBcZ7I7nlHM0FvEW8MfbWJwOd+yT2XzYW49Frw== + dependencies: + call-bind "^1.0.2" + es-to-primitive "^1.2.1" + function-bind "^1.1.1" + function.prototype.name "^1.1.5" + get-intrinsic "^1.1.3" + get-symbol-description "^1.0.0" + has "^1.0.3" + has-property-descriptors "^1.0.0" + has-symbols "^1.0.3" + internal-slot "^1.0.3" + is-callable "^1.2.6" + is-negative-zero "^2.0.2" + is-regex "^1.1.4" + is-shared-array-buffer "^1.0.2" + is-string "^1.0.7" + is-weakref "^1.0.2" + object-inspect "^1.12.2" + object-keys "^1.1.1" + object.assign "^4.1.4" + regexp.prototype.flags "^1.4.3" + safe-regex-test "^1.0.0" + string.prototype.trimend "^1.0.5" + string.prototype.trimstart "^1.0.5" + unbox-primitive "^1.0.2" + +es-array-method-boxes-properly@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/es-array-method-boxes-properly/-/es-array-method-boxes-properly-1.0.0.tgz#873f3e84418de4ee19c5be752990b2e44718d09e" + integrity sha512-wd6JXUmyHmt8T5a2xreUwKcGPq6f1f+WwIJkijUqiGcJz1qqnZgP6XIK+QyIWU5lT7imeNxUll48bziG+TSYcA== + +es-module-lexer@^0.9.0: + version "0.9.3" + resolved "http://localhost:4873/es-module-lexer/-/es-module-lexer-0.9.3.tgz#6f13db00cc38417137daf74366f535c8eb438f19" + integrity sha512-1HQ2M2sPtxwnvOvT1ZClHyQDiggdNjURWpY2we6aMKCQiUVxTmVs2UYPLIrD84sS+kMdUwfBSylbJPwNnBrnHQ== + +es-shim-unscopables@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz#702e632193201e3edf8713635d083d378e510241" + integrity sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w== + dependencies: + has "^1.0.3" + +es-to-primitive@^1.2.1: + version "1.2.1" + resolved "http://localhost:4873/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a" + integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA== + dependencies: + is-callable "^1.1.4" + is-date-object "^1.0.1" + is-symbol "^1.0.2" + +escalade@^3.1.1: + version "3.1.1" + resolved "http://localhost:4873/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" + integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== + +escape-html@~1.0.3: + version "1.0.3" + resolved "http://localhost:4873/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" + integrity sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow== + +escape-string-regexp@^1.0.5: + version "1.0.5" + resolved "http://localhost:4873/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" + integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg== + +escape-string-regexp@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz#a30304e99daa32e23b2fd20f51babd07cffca344" + integrity sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w== + +escape-string-regexp@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34" + integrity sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA== + +escodegen@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/escodegen/-/escodegen-2.0.0.tgz#5e32b12833e8aa8fa35e1bf0befa89380484c7dd" + integrity sha512-mmHKys/C8BFUGI+MAWNcSYoORYLMdPzjrknd2Vc+bUsjN5bXcr8EhrNB+UTqfL1y3I9c4fw2ihgtMPQLBRiQxw== + dependencies: + esprima "^4.0.1" + estraverse "^5.2.0" + esutils "^2.0.2" + optionator "^0.8.1" + optionalDependencies: + source-map "~0.6.1" + +eslint-config-react-app@^7.0.1: + version "7.0.1" + resolved "http://localhost:4873/eslint-config-react-app/-/eslint-config-react-app-7.0.1.tgz#73ba3929978001c5c86274c017ea57eb5fa644b4" + integrity sha512-K6rNzvkIeHaTd8m/QEh1Zko0KI7BACWkkneSs6s9cKZC/J27X3eZR6Upt1jkmZ/4FK+XUOPPxMEN7+lbUXfSlA== + dependencies: + "@babel/core" "^7.16.0" + "@babel/eslint-parser" "^7.16.3" + "@rushstack/eslint-patch" "^1.1.0" + "@typescript-eslint/eslint-plugin" "^5.5.0" + "@typescript-eslint/parser" "^5.5.0" + babel-preset-react-app "^10.0.1" + confusing-browser-globals "^1.0.11" + eslint-plugin-flowtype "^8.0.3" + eslint-plugin-import "^2.25.3" + eslint-plugin-jest "^25.3.0" + eslint-plugin-jsx-a11y "^6.5.1" + eslint-plugin-react "^7.27.1" + eslint-plugin-react-hooks "^4.3.0" + eslint-plugin-testing-library "^5.0.1" + +eslint-import-resolver-node@^0.3.6: + version "0.3.6" + resolved "http://localhost:4873/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.6.tgz#4048b958395da89668252001dbd9eca6b83bacbd" + integrity sha512-0En0w03NRVMn9Uiyn8YRPDKvWjxCWkslUEhGNTdGx15RvPJYQ+lbOlqrlNI2vEAs4pDYK4f/HN2TbDmk5TP0iw== + dependencies: + debug "^3.2.7" + resolve "^1.20.0" + +eslint-module-utils@^2.7.3: + version "2.7.4" + resolved "http://localhost:4873/eslint-module-utils/-/eslint-module-utils-2.7.4.tgz#4f3e41116aaf13a20792261e61d3a2e7e0583974" + integrity sha512-j4GT+rqzCoRKHwURX7pddtIPGySnX9Si/cgMI5ztrcqOPtk5dDEeZ34CQVPphnqkJytlc97Vuk05Um2mJ3gEQA== + dependencies: + debug "^3.2.7" + +eslint-plugin-flowtype@^8.0.3: + version "8.0.3" + resolved "http://localhost:4873/eslint-plugin-flowtype/-/eslint-plugin-flowtype-8.0.3.tgz#e1557e37118f24734aa3122e7536a038d34a4912" + integrity sha512-dX8l6qUL6O+fYPtpNRideCFSpmWOUVx5QcaGLVqe/vlDiBSe4vYljDWDETwnyFzpl7By/WVIu6rcrniCgH9BqQ== + dependencies: + lodash "^4.17.21" + string-natural-compare "^3.0.1" + +eslint-plugin-import@^2.25.3: + version "2.26.0" + resolved "http://localhost:4873/eslint-plugin-import/-/eslint-plugin-import-2.26.0.tgz#f812dc47be4f2b72b478a021605a59fc6fe8b88b" + integrity sha512-hYfi3FXaM8WPLf4S1cikh/r4IxnO6zrhZbEGz2b660EJRbuxgpDS5gkCuYgGWg2xxh2rBuIr4Pvhve/7c31koA== + dependencies: + array-includes "^3.1.4" + array.prototype.flat "^1.2.5" + debug "^2.6.9" + doctrine "^2.1.0" + eslint-import-resolver-node "^0.3.6" + eslint-module-utils "^2.7.3" + has "^1.0.3" + is-core-module "^2.8.1" + is-glob "^4.0.3" + minimatch "^3.1.2" + object.values "^1.1.5" + resolve "^1.22.0" + tsconfig-paths "^3.14.1" + +eslint-plugin-jest@^25.3.0: + version "25.7.0" + resolved "http://localhost:4873/eslint-plugin-jest/-/eslint-plugin-jest-25.7.0.tgz#ff4ac97520b53a96187bad9c9814e7d00de09a6a" + integrity sha512-PWLUEXeeF7C9QGKqvdSbzLOiLTx+bno7/HC9eefePfEb257QFHg7ye3dh80AZVkaa/RQsBB1Q/ORQvg2X7F0NQ== + dependencies: + "@typescript-eslint/experimental-utils" "^5.0.0" + +eslint-plugin-jsx-a11y@^6.5.1: + version "6.6.1" + resolved "http://localhost:4873/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.6.1.tgz#93736fc91b83fdc38cc8d115deedfc3091aef1ff" + integrity sha512-sXgFVNHiWffBq23uiS/JaP6eVR622DqwB4yTzKvGZGcPq6/yZ3WmOZfuBks/vHWo9GaFOqC2ZK4i6+C35knx7Q== + dependencies: + "@babel/runtime" "^7.18.9" + aria-query "^4.2.2" + array-includes "^3.1.5" + ast-types-flow "^0.0.7" + axe-core "^4.4.3" + axobject-query "^2.2.0" + damerau-levenshtein "^1.0.8" + emoji-regex "^9.2.2" + has "^1.0.3" + jsx-ast-utils "^3.3.2" + language-tags "^1.0.5" + minimatch "^3.1.2" + semver "^6.3.0" + +eslint-plugin-react-hooks@^4.3.0: + version "4.6.0" + resolved "http://localhost:4873/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-4.6.0.tgz#4c3e697ad95b77e93f8646aaa1630c1ba607edd3" + integrity sha512-oFc7Itz9Qxh2x4gNHStv3BqJq54ExXmfC+a1NjAta66IAN87Wu0R/QArgIS9qKzX3dXKPI9H5crl9QchNMY9+g== + +eslint-plugin-react@^7.27.1: + version "7.31.8" + resolved "http://localhost:4873/eslint-plugin-react/-/eslint-plugin-react-7.31.8.tgz#3a4f80c10be1bcbc8197be9e8b641b2a3ef219bf" + integrity sha512-5lBTZmgQmARLLSYiwI71tiGVTLUuqXantZM6vlSY39OaDSV0M7+32K5DnLkmFrwTe+Ksz0ffuLUC91RUviVZfw== + dependencies: + array-includes "^3.1.5" + array.prototype.flatmap "^1.3.0" + doctrine "^2.1.0" + estraverse "^5.3.0" + jsx-ast-utils "^2.4.1 || ^3.0.0" + minimatch "^3.1.2" + object.entries "^1.1.5" + object.fromentries "^2.0.5" + object.hasown "^1.1.1" + object.values "^1.1.5" + prop-types "^15.8.1" + resolve "^2.0.0-next.3" + semver "^6.3.0" + string.prototype.matchall "^4.0.7" + +eslint-plugin-testing-library@^5.0.1: + version "5.7.2" + resolved "http://localhost:4873/eslint-plugin-testing-library/-/eslint-plugin-testing-library-5.7.2.tgz#c1b2112a40aab61f93e10859e8b2d81e54f0ce84" + integrity sha512-0ZmHeR/DUUgEzW8rwUBRWxuqntipDtpvxK0hymdHnLlABryJkzd+CAHr+XnISaVsTisZ5MLHp6nQF+8COHLLTA== + dependencies: + "@typescript-eslint/utils" "^5.13.0" + +eslint-scope@5.1.1, eslint-scope@^5.1.1: + version "5.1.1" + resolved "http://localhost:4873/eslint-scope/-/eslint-scope-5.1.1.tgz#e786e59a66cb92b3f6c1fb0d508aab174848f48c" + integrity sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw== + dependencies: + esrecurse "^4.3.0" + estraverse "^4.1.1" + +eslint-scope@^7.1.1: + version "7.1.1" + resolved "http://localhost:4873/eslint-scope/-/eslint-scope-7.1.1.tgz#fff34894c2f65e5226d3041ac480b4513a163642" + integrity sha512-QKQM/UXpIiHcLqJ5AOyIW7XZmzjkzQXYE54n1++wb0u9V/abW3l9uQnxX8Z5Xd18xyKIMTUAyQ0k1e8pz6LUrw== + dependencies: + esrecurse "^4.3.0" + estraverse "^5.2.0" + +eslint-utils@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/eslint-utils/-/eslint-utils-3.0.0.tgz#8aebaface7345bb33559db0a1f13a1d2d48c3672" + integrity sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA== + dependencies: + eslint-visitor-keys "^2.0.0" + +eslint-visitor-keys@^2.0.0, eslint-visitor-keys@^2.1.0: + version "2.1.0" + resolved "http://localhost:4873/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz#f65328259305927392c938ed44eb0a5c9b2bd303" + integrity sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw== + +eslint-visitor-keys@^3.3.0: + version "3.3.0" + resolved "http://localhost:4873/eslint-visitor-keys/-/eslint-visitor-keys-3.3.0.tgz#f6480fa6b1f30efe2d1968aa8ac745b862469826" + integrity sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA== + +eslint-webpack-plugin@^3.1.1: + version "3.2.0" + resolved "http://localhost:4873/eslint-webpack-plugin/-/eslint-webpack-plugin-3.2.0.tgz#1978cdb9edc461e4b0195a20da950cf57988347c" + integrity sha512-avrKcGncpPbPSUHX6B3stNGzkKFto3eL+DKM4+VyMrVnhPc3vRczVlCq3uhuFOdRvDHTVXuzwk1ZKUrqDQHQ9w== + dependencies: + "@types/eslint" "^7.29.0 || ^8.4.1" + jest-worker "^28.0.2" + micromatch "^4.0.5" + normalize-path "^3.0.0" + schema-utils "^4.0.0" + +eslint@^8.3.0: + version "8.24.0" + resolved "http://localhost:4873/eslint/-/eslint-8.24.0.tgz#489516c927a5da11b3979dbfb2679394523383c8" + integrity sha512-dWFaPhGhTAiPcCgm3f6LI2MBWbogMnTJzFBbhXVRQDJPkr9pGZvVjlVfXd+vyDcWPA2Ic9L2AXPIQM0+vk/cSQ== + dependencies: + "@eslint/eslintrc" "^1.3.2" + "@humanwhocodes/config-array" "^0.10.5" + "@humanwhocodes/gitignore-to-minimatch" "^1.0.2" + "@humanwhocodes/module-importer" "^1.0.1" + ajv "^6.10.0" + chalk "^4.0.0" + cross-spawn "^7.0.2" + debug "^4.3.2" + doctrine "^3.0.0" + escape-string-regexp "^4.0.0" + eslint-scope "^7.1.1" + eslint-utils "^3.0.0" + eslint-visitor-keys "^3.3.0" + espree "^9.4.0" + esquery "^1.4.0" + esutils "^2.0.2" + fast-deep-equal "^3.1.3" + file-entry-cache "^6.0.1" + find-up "^5.0.0" + glob-parent "^6.0.1" + globals "^13.15.0" + globby "^11.1.0" + grapheme-splitter "^1.0.4" + ignore "^5.2.0" + import-fresh "^3.0.0" + imurmurhash "^0.1.4" + is-glob "^4.0.0" + js-sdsl "^4.1.4" + js-yaml "^4.1.0" + json-stable-stringify-without-jsonify "^1.0.1" + levn "^0.4.1" + lodash.merge "^4.6.2" + minimatch "^3.1.2" + natural-compare "^1.4.0" + optionator "^0.9.1" + regexpp "^3.2.0" + strip-ansi "^6.0.1" + strip-json-comments "^3.1.0" + text-table "^0.2.0" + +espree@^9.4.0: + version "9.4.0" + resolved "http://localhost:4873/espree/-/espree-9.4.0.tgz#cd4bc3d6e9336c433265fc0aa016fc1aaf182f8a" + integrity sha512-DQmnRpLj7f6TgN/NYb0MTzJXL+vJF9h3pHy4JhCIs3zwcgez8xmGg3sXHcEO97BrmO2OSvCwMdfdlyl+E9KjOw== + dependencies: + acorn "^8.8.0" + acorn-jsx "^5.3.2" + eslint-visitor-keys "^3.3.0" + +esprima@^4.0.0, esprima@^4.0.1: + version "4.0.1" + resolved "http://localhost:4873/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" + integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== + +esquery@^1.4.0: + version "1.4.0" + resolved "http://localhost:4873/esquery/-/esquery-1.4.0.tgz#2148ffc38b82e8c7057dfed48425b3e61f0f24a5" + integrity sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w== + dependencies: + estraverse "^5.1.0" + +esrecurse@^4.3.0: + version "4.3.0" + resolved "http://localhost:4873/esrecurse/-/esrecurse-4.3.0.tgz#7ad7964d679abb28bee72cec63758b1c5d2c9921" + integrity sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag== + dependencies: + estraverse "^5.2.0" + +estraverse@^4.1.1: + version "4.3.0" + resolved "http://localhost:4873/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" + integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== + +estraverse@^5.1.0, estraverse@^5.2.0, estraverse@^5.3.0: + version "5.3.0" + resolved "http://localhost:4873/estraverse/-/estraverse-5.3.0.tgz#2eea5290702f26ab8fe5370370ff86c965d21123" + integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA== + +estree-walker@^1.0.1: + version "1.0.1" + resolved "http://localhost:4873/estree-walker/-/estree-walker-1.0.1.tgz#31bc5d612c96b704106b477e6dd5d8aa138cb700" + integrity sha512-1fMXF3YP4pZZVozF8j/ZLfvnR8NSIljt56UhbZ5PeeDmmGHpgpdwQt7ITlGvYaQukCvuBRMLEiKiYC+oeIg4cg== + +esutils@^2.0.2: + version "2.0.3" + resolved "http://localhost:4873/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" + integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== + +etag@~1.8.1: + version "1.8.1" + resolved "http://localhost:4873/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887" + integrity sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg== + +eventemitter3@^4.0.0: + version "4.0.7" + resolved "http://localhost:4873/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f" + integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw== + +events@^3.2.0: + version "3.3.0" + resolved "http://localhost:4873/events/-/events-3.3.0.tgz#31a95ad0a924e2d2c419a813aeb2c4e878ea7400" + integrity sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q== + +execa@^5.0.0: + version "5.1.1" + resolved "http://localhost:4873/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd" + integrity sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg== + dependencies: + cross-spawn "^7.0.3" + get-stream "^6.0.0" + human-signals "^2.1.0" + is-stream "^2.0.0" + merge-stream "^2.0.0" + npm-run-path "^4.0.1" + onetime "^5.1.2" + signal-exit "^3.0.3" + strip-final-newline "^2.0.0" + +exit@^0.1.2: + version "0.1.2" + resolved "http://localhost:4873/exit/-/exit-0.1.2.tgz#0632638f8d877cc82107d30a0fff1a17cba1cd0c" + integrity sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ== + +expect@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/expect/-/expect-27.5.1.tgz#83ce59f1e5bdf5f9d2b94b61d2050db48f3fef74" + integrity sha512-E1q5hSUG2AmYQwQJ041nvgpkODHQvB+RKlB4IYdru6uJsyFTRyZAP463M+1lINorwbqAmUggi6+WwkD8lCS/Dw== + dependencies: + "@jest/types" "^27.5.1" + jest-get-type "^27.5.1" + jest-matcher-utils "^27.5.1" + jest-message-util "^27.5.1" + +expect@^29.0.0: + version "29.1.2" + resolved "http://localhost:4873/expect/-/expect-29.1.2.tgz#82f8f28d7d408c7c68da3a386a490ee683e1eced" + integrity sha512-AuAGn1uxva5YBbBlXb+2JPxJRuemZsmlGcapPXWNSBNsQtAULfjioREGBWuI0EOvYUKjDnrCy8PW5Zlr1md5mw== + dependencies: + "@jest/expect-utils" "^29.1.2" + jest-get-type "^29.0.0" + jest-matcher-utils "^29.1.2" + jest-message-util "^29.1.2" + jest-util "^29.1.2" + +express@^4.17.3: + version "4.18.1" + resolved "http://localhost:4873/express/-/express-4.18.1.tgz#7797de8b9c72c857b9cd0e14a5eea80666267caf" + integrity sha512-zZBcOX9TfehHQhtupq57OF8lFZ3UZi08Y97dwFCkD8p9d/d2Y3M+ykKcwaMDEL+4qyUolgBDX6AblpR3fL212Q== + dependencies: + accepts "~1.3.8" + array-flatten "1.1.1" + body-parser "1.20.0" + content-disposition "0.5.4" + content-type "~1.0.4" + cookie "0.5.0" + cookie-signature "1.0.6" + debug "2.6.9" + depd "2.0.0" + encodeurl "~1.0.2" + escape-html "~1.0.3" + etag "~1.8.1" + finalhandler "1.2.0" + fresh "0.5.2" + http-errors "2.0.0" + merge-descriptors "1.0.1" + methods "~1.1.2" + on-finished "2.4.1" + parseurl "~1.3.3" + path-to-regexp "0.1.7" + proxy-addr "~2.0.7" + qs "6.10.3" + range-parser "~1.2.1" + safe-buffer "5.2.1" + send "0.18.0" + serve-static "1.15.0" + setprototypeof "1.2.0" + statuses "2.0.1" + type-is "~1.6.18" + utils-merge "1.0.1" + vary "~1.1.2" + +fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: + version "3.1.3" + resolved "http://localhost:4873/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" + integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== + +fast-glob@^3.2.11, fast-glob@^3.2.9: + version "3.2.12" + resolved "http://localhost:4873/fast-glob/-/fast-glob-3.2.12.tgz#7f39ec99c2e6ab030337142da9e0c18f37afae80" + integrity sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w== + dependencies: + "@nodelib/fs.stat" "^2.0.2" + "@nodelib/fs.walk" "^1.2.3" + glob-parent "^5.1.2" + merge2 "^1.3.0" + micromatch "^4.0.4" + +fast-json-stable-stringify@^2.0.0, fast-json-stable-stringify@^2.1.0: + version "2.1.0" + resolved "http://localhost:4873/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" + integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== + +fast-levenshtein@^2.0.6, fast-levenshtein@~2.0.6: + version "2.0.6" + resolved "http://localhost:4873/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" + integrity sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw== + +fastq@^1.6.0: + version "1.13.0" + resolved "http://localhost:4873/fastq/-/fastq-1.13.0.tgz#616760f88a7526bdfc596b7cab8c18938c36b98c" + integrity sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw== + dependencies: + reusify "^1.0.4" + +faye-websocket@^0.11.3: + version "0.11.4" + resolved "http://localhost:4873/faye-websocket/-/faye-websocket-0.11.4.tgz#7f0d9275cfdd86a1c963dc8b65fcc451edcbb1da" + integrity sha512-CzbClwlXAuiRQAlUyfqPgvPoNKTckTPGfwZV4ZdAhVcP2lh9KUxJg2b5GkE7XbjKQ3YJnQ9z6D9ntLAlB+tP8g== + dependencies: + websocket-driver ">=0.5.1" + +fb-watchman@^2.0.0: + version "2.0.2" + resolved "http://localhost:4873/fb-watchman/-/fb-watchman-2.0.2.tgz#e9524ee6b5c77e9e5001af0f85f3adbb8623255c" + integrity sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA== + dependencies: + bser "2.1.1" + +file-entry-cache@^6.0.1: + version "6.0.1" + resolved "http://localhost:4873/file-entry-cache/-/file-entry-cache-6.0.1.tgz#211b2dd9659cb0394b073e7323ac3c933d522027" + integrity sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg== + dependencies: + flat-cache "^3.0.4" + +file-loader@^6.2.0: + version "6.2.0" + resolved "http://localhost:4873/file-loader/-/file-loader-6.2.0.tgz#baef7cf8e1840df325e4390b4484879480eebe4d" + integrity sha512-qo3glqyTa61Ytg4u73GultjHGjdRyig3tG6lPtyX/jOEJvHif9uB0/OCI2Kif6ctF3caQTW2G5gym21oAsI4pw== + dependencies: + loader-utils "^2.0.0" + schema-utils "^3.0.0" + +filelist@^1.0.1: + version "1.0.4" + resolved "http://localhost:4873/filelist/-/filelist-1.0.4.tgz#f78978a1e944775ff9e62e744424f215e58352b5" + integrity sha512-w1cEuf3S+DrLCQL7ET6kz+gmlJdbq9J7yXCSjK/OZCPA+qEN1WyF4ZAf0YYJa4/shHJra2t/d/r8SV4Ji+x+8Q== + dependencies: + minimatch "^5.0.1" + +filesize@^8.0.6: + version "8.0.7" + resolved "http://localhost:4873/filesize/-/filesize-8.0.7.tgz#695e70d80f4e47012c132d57a059e80c6b580bd8" + integrity sha512-pjmC+bkIF8XI7fWaH8KxHcZL3DPybs1roSKP4rKDvy20tAWwIObE4+JIseG2byfGKhud5ZnM4YSGKBz7Sh0ndQ== + +fill-range@^7.0.1: + version "7.0.1" + resolved "http://localhost:4873/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" + integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== + dependencies: + to-regex-range "^5.0.1" + +finalhandler@1.2.0: + version "1.2.0" + resolved "http://localhost:4873/finalhandler/-/finalhandler-1.2.0.tgz#7d23fe5731b207b4640e4fcd00aec1f9207a7b32" + integrity sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg== + dependencies: + debug "2.6.9" + encodeurl "~1.0.2" + escape-html "~1.0.3" + on-finished "2.4.1" + parseurl "~1.3.3" + statuses "2.0.1" + unpipe "~1.0.0" + +find-cache-dir@^3.3.1: + version "3.3.2" + resolved "http://localhost:4873/find-cache-dir/-/find-cache-dir-3.3.2.tgz#b30c5b6eff0730731aea9bbd9dbecbd80256d64b" + integrity sha512-wXZV5emFEjrridIgED11OoUKLxiYjAcqot/NJdAkOhlJ+vGzwhOAfcG5OX1jP+S0PcjEn8bdMJv+g2jwQ3Onig== + dependencies: + commondir "^1.0.1" + make-dir "^3.0.2" + pkg-dir "^4.1.0" + +find-up@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73" + integrity sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg== + dependencies: + locate-path "^3.0.0" + +find-up@^4.0.0, find-up@^4.1.0: + version "4.1.0" + resolved "http://localhost:4873/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19" + integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== + dependencies: + locate-path "^5.0.0" + path-exists "^4.0.0" + +find-up@^5.0.0: + version "5.0.0" + resolved "http://localhost:4873/find-up/-/find-up-5.0.0.tgz#4c92819ecb7083561e4f4a240a86be5198f536fc" + integrity sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng== + dependencies: + locate-path "^6.0.0" + path-exists "^4.0.0" + +flat-cache@^3.0.4: + version "3.0.4" + resolved "http://localhost:4873/flat-cache/-/flat-cache-3.0.4.tgz#61b0338302b2fe9f957dcc32fc2a87f1c3048b11" + integrity sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg== + dependencies: + flatted "^3.1.0" + rimraf "^3.0.2" + +flatted@^3.1.0: + version "3.2.7" + resolved "http://localhost:4873/flatted/-/flatted-3.2.7.tgz#609f39207cb614b89d0765b477cb2d437fbf9787" + integrity sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ== + +follow-redirects@^1.0.0: + version "1.15.2" + resolved "http://localhost:4873/follow-redirects/-/follow-redirects-1.15.2.tgz#b460864144ba63f2681096f274c4e57026da2c13" + integrity sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA== + +fork-ts-checker-webpack-plugin@^6.5.0: + version "6.5.2" + resolved "http://localhost:4873/fork-ts-checker-webpack-plugin/-/fork-ts-checker-webpack-plugin-6.5.2.tgz#4f67183f2f9eb8ba7df7177ce3cf3e75cdafb340" + integrity sha512-m5cUmF30xkZ7h4tWUgTAcEaKmUW7tfyUyTqNNOz7OxWJ0v1VWKTcOvH8FWHUwSjlW/356Ijc9vi3XfcPstpQKA== + dependencies: + "@babel/code-frame" "^7.8.3" + "@types/json-schema" "^7.0.5" + chalk "^4.1.0" + chokidar "^3.4.2" + cosmiconfig "^6.0.0" + deepmerge "^4.2.2" + fs-extra "^9.0.0" + glob "^7.1.6" + memfs "^3.1.2" + minimatch "^3.0.4" + schema-utils "2.7.0" + semver "^7.3.2" + tapable "^1.0.0" + +form-data@^3.0.0: + version "3.0.1" + resolved "http://localhost:4873/form-data/-/form-data-3.0.1.tgz#ebd53791b78356a99af9a300d4282c4d5eb9755f" + integrity sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg== + dependencies: + asynckit "^0.4.0" + combined-stream "^1.0.8" + mime-types "^2.1.12" + +forwarded@0.2.0: + version "0.2.0" + resolved "http://localhost:4873/forwarded/-/forwarded-0.2.0.tgz#2269936428aad4c15c7ebe9779a84bf0b2a81811" + integrity sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow== + +fraction.js@^4.2.0: + version "4.2.0" + resolved "http://localhost:4873/fraction.js/-/fraction.js-4.2.0.tgz#448e5109a313a3527f5a3ab2119ec4cf0e0e2950" + integrity sha512-MhLuK+2gUcnZe8ZHlaaINnQLl0xRIGRfcGk2yl8xoQAfHrSsL3rYu6FCmBdkdbhc9EPlwyGHewaRsvwRMJtAlA== + +fresh@0.5.2: + version "0.5.2" + resolved "http://localhost:4873/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7" + integrity sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q== + +fs-extra@^10.0.0: + version "10.1.0" + resolved "http://localhost:4873/fs-extra/-/fs-extra-10.1.0.tgz#02873cfbc4084dde127eaa5f9905eef2325d1abf" + integrity sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ== + dependencies: + graceful-fs "^4.2.0" + jsonfile "^6.0.1" + universalify "^2.0.0" + +fs-extra@^9.0.0, fs-extra@^9.0.1: + version "9.1.0" + resolved "http://localhost:4873/fs-extra/-/fs-extra-9.1.0.tgz#5954460c764a8da2094ba3554bf839e6b9a7c86d" + integrity sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ== + dependencies: + at-least-node "^1.0.0" + graceful-fs "^4.2.0" + jsonfile "^6.0.1" + universalify "^2.0.0" + +fs-monkey@^1.0.3: + version "1.0.3" + resolved "http://localhost:4873/fs-monkey/-/fs-monkey-1.0.3.tgz#ae3ac92d53bb328efe0e9a1d9541f6ad8d48e2d3" + integrity sha512-cybjIfiiE+pTWicSCLFHSrXZ6EilF30oh91FDP9S2B051prEa7QWfrVTQm10/dDpswBDXZugPa1Ogu8Yh+HV0Q== + +fs.realpath@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" + integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw== + +fsevents@^2.3.2, fsevents@~2.3.2: + version "2.3.2" + resolved "http://localhost:4873/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a" + integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA== + +function-bind@^1.1.1: + version "1.1.1" + resolved "http://localhost:4873/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" + integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== + +function.prototype.name@^1.1.5: + version "1.1.5" + resolved "http://localhost:4873/function.prototype.name/-/function.prototype.name-1.1.5.tgz#cce0505fe1ffb80503e6f9e46cc64e46a12a9621" + integrity sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.0" + functions-have-names "^1.2.2" + +functions-have-names@^1.2.2: + version "1.2.3" + resolved "http://localhost:4873/functions-have-names/-/functions-have-names-1.2.3.tgz#0404fe4ee2ba2f607f0e0ec3c80bae994133b834" + integrity sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ== + +gensync@^1.0.0-beta.2: + version "1.0.0-beta.2" + resolved "http://localhost:4873/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0" + integrity sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg== + +get-caller-file@^2.0.5: + version "2.0.5" + resolved "http://localhost:4873/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" + integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== + +get-intrinsic@^1.0.2, get-intrinsic@^1.1.0, get-intrinsic@^1.1.1, get-intrinsic@^1.1.3: + version "1.1.3" + resolved "http://localhost:4873/get-intrinsic/-/get-intrinsic-1.1.3.tgz#063c84329ad93e83893c7f4f243ef63ffa351385" + integrity sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A== + dependencies: + function-bind "^1.1.1" + has "^1.0.3" + has-symbols "^1.0.3" + +get-own-enumerable-property-symbols@^3.0.0: + version "3.0.2" + resolved "http://localhost:4873/get-own-enumerable-property-symbols/-/get-own-enumerable-property-symbols-3.0.2.tgz#b5fde77f22cbe35f390b4e089922c50bce6ef664" + integrity sha512-I0UBV/XOz1XkIJHEUDMZAbzCThU/H8DxmSfmdGcKPnVhu2VfFqr34jr9777IyaTYvxjedWhqVIilEDsCdP5G6g== + +get-package-type@^0.1.0: + version "0.1.0" + resolved "http://localhost:4873/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a" + integrity sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q== + +get-stream@^6.0.0: + version "6.0.1" + resolved "http://localhost:4873/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7" + integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg== + +get-symbol-description@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/get-symbol-description/-/get-symbol-description-1.0.0.tgz#7fdb81c900101fbd564dd5f1a30af5aadc1e58d6" + integrity sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw== + dependencies: + call-bind "^1.0.2" + get-intrinsic "^1.1.1" + +glob-parent@^5.1.2, glob-parent@~5.1.2: + version "5.1.2" + resolved "http://localhost:4873/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" + integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== + dependencies: + is-glob "^4.0.1" + +glob-parent@^6.0.1, glob-parent@^6.0.2: + version "6.0.2" + resolved "http://localhost:4873/glob-parent/-/glob-parent-6.0.2.tgz#6d237d99083950c79290f24c7642a3de9a28f9e3" + integrity sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A== + dependencies: + is-glob "^4.0.3" + +glob-to-regexp@^0.4.1: + version "0.4.1" + resolved "http://localhost:4873/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz#c75297087c851b9a578bd217dd59a92f59fe546e" + integrity sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw== + +glob@^7.1.1, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4, glob@^7.1.6: + version "7.2.3" + resolved "http://localhost:4873/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b" + integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q== + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^3.1.1" + once "^1.3.0" + path-is-absolute "^1.0.0" + +global-modules@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/global-modules/-/global-modules-2.0.0.tgz#997605ad2345f27f51539bea26574421215c7780" + integrity sha512-NGbfmJBp9x8IxyJSd1P+otYK8vonoJactOogrVfFRIAEY1ukil8RSKDz2Yo7wh1oihl51l/r6W4epkeKJHqL8A== + dependencies: + global-prefix "^3.0.0" + +global-prefix@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/global-prefix/-/global-prefix-3.0.0.tgz#fc85f73064df69f50421f47f883fe5b913ba9b97" + integrity sha512-awConJSVCHVGND6x3tmMaKcQvwXLhjdkmomy2W+Goaui8YPgYgXJZewhg3fWC+DlfqqQuWg8AwqjGTD2nAPVWg== + dependencies: + ini "^1.3.5" + kind-of "^6.0.2" + which "^1.3.1" + +globals@^11.1.0: + version "11.12.0" + resolved "http://localhost:4873/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" + integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== + +globals@^13.15.0: + version "13.17.0" + resolved "http://localhost:4873/globals/-/globals-13.17.0.tgz#902eb1e680a41da93945adbdcb5a9f361ba69bd4" + integrity sha512-1C+6nQRb1GwGMKm2dH/E7enFAMxGTmGI7/dEdhy/DNelv85w9B72t3uc5frtMNXIbzrarJJ/lTCjcaZwbLJmyw== + dependencies: + type-fest "^0.20.2" + +globby@^11.0.4, globby@^11.1.0: + version "11.1.0" + resolved "http://localhost:4873/globby/-/globby-11.1.0.tgz#bd4be98bb042f83d796f7e3811991fbe82a0d34b" + integrity sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g== + dependencies: + array-union "^2.1.0" + dir-glob "^3.0.1" + fast-glob "^3.2.9" + ignore "^5.2.0" + merge2 "^1.4.1" + slash "^3.0.0" + +graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.4, graceful-fs@^4.2.6, graceful-fs@^4.2.9: + version "4.2.10" + resolved "http://localhost:4873/graceful-fs/-/graceful-fs-4.2.10.tgz#147d3a006da4ca3ce14728c7aefc287c367d7a6c" + integrity sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA== + +grapheme-splitter@^1.0.4: + version "1.0.4" + resolved "http://localhost:4873/grapheme-splitter/-/grapheme-splitter-1.0.4.tgz#9cf3a665c6247479896834af35cf1dbb4400767e" + integrity sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ== + +gzip-size@^6.0.0: + version "6.0.0" + resolved "http://localhost:4873/gzip-size/-/gzip-size-6.0.0.tgz#065367fd50c239c0671cbcbad5be3e2eeb10e462" + integrity sha512-ax7ZYomf6jqPTQ4+XCpUGyXKHk5WweS+e05MBO4/y3WJ5RkmPXNKvX+bx1behVILVwr6JSQvZAku021CHPXG3Q== + dependencies: + duplexer "^0.1.2" + +handle-thing@^2.0.0: + version "2.0.1" + resolved "http://localhost:4873/handle-thing/-/handle-thing-2.0.1.tgz#857f79ce359580c340d43081cc648970d0bb234e" + integrity sha512-9Qn4yBxelxoh2Ow62nP+Ka/kMnOXRi8BXnRaUwezLNhqelnN49xKz4F/dPP8OYLxLxq6JDtZb2i9XznUQbNPTg== + +harmony-reflect@^1.4.6: + version "1.6.2" + resolved "http://localhost:4873/harmony-reflect/-/harmony-reflect-1.6.2.tgz#31ecbd32e648a34d030d86adb67d4d47547fe710" + integrity sha512-HIp/n38R9kQjDEziXyDTuW3vvoxxyxjxFzXLrBr18uB47GnSt+G9D29fqrpM5ZkspMcPICud3XsBJQ4Y2URg8g== + +has-bigints@^1.0.1, has-bigints@^1.0.2: + version "1.0.2" + resolved "http://localhost:4873/has-bigints/-/has-bigints-1.0.2.tgz#0871bd3e3d51626f6ca0966668ba35d5602d6eaa" + integrity sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ== + +has-flag@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" + integrity sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw== + +has-flag@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" + integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== + +has-property-descriptors@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz#610708600606d36961ed04c196193b6a607fa861" + integrity sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ== + dependencies: + get-intrinsic "^1.1.1" + +has-symbols@^1.0.1, has-symbols@^1.0.2, has-symbols@^1.0.3: + version "1.0.3" + resolved "http://localhost:4873/has-symbols/-/has-symbols-1.0.3.tgz#bb7b2c4349251dce87b125f7bdf874aa7c8b39f8" + integrity sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A== + +has-tostringtag@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/has-tostringtag/-/has-tostringtag-1.0.0.tgz#7e133818a7d394734f941e73c3d3f9291e658b25" + integrity sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ== + dependencies: + has-symbols "^1.0.2" + +has@^1.0.3: + version "1.0.3" + resolved "http://localhost:4873/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" + integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== + dependencies: + function-bind "^1.1.1" + +he@^1.2.0: + version "1.2.0" + resolved "http://localhost:4873/he/-/he-1.2.0.tgz#84ae65fa7eafb165fddb61566ae14baf05664f0f" + integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw== + +hoopy@^0.1.4: + version "0.1.4" + resolved "http://localhost:4873/hoopy/-/hoopy-0.1.4.tgz#609207d661100033a9a9402ad3dea677381c1b1d" + integrity sha512-HRcs+2mr52W0K+x8RzcLzuPPmVIKMSv97RGHy0Ea9y/mpcaK+xTrjICA04KAHi4GRzxliNqNJEFYWHghy3rSfQ== + +hpack.js@^2.1.6: + version "2.1.6" + resolved "http://localhost:4873/hpack.js/-/hpack.js-2.1.6.tgz#87774c0949e513f42e84575b3c45681fade2a0b2" + integrity sha512-zJxVehUdMGIKsRaNt7apO2Gqp0BdqW5yaiGHXXmbpvxgBYVZnAql+BJb4RO5ad2MgpbZKn5G6nMnegrH1FcNYQ== + dependencies: + inherits "^2.0.1" + obuf "^1.0.0" + readable-stream "^2.0.1" + wbuf "^1.1.0" + +html-encoding-sniffer@^2.0.1: + version "2.0.1" + resolved "http://localhost:4873/html-encoding-sniffer/-/html-encoding-sniffer-2.0.1.tgz#42a6dc4fd33f00281176e8b23759ca4e4fa185f3" + integrity sha512-D5JbOMBIR/TVZkubHT+OyT2705QvogUW4IBn6nHd756OwieSF9aDYFj4dv6HHEVGYbHaLETa3WggZYWWMyy3ZQ== + dependencies: + whatwg-encoding "^1.0.5" + +html-entities@^2.1.0, html-entities@^2.3.2: + version "2.3.3" + resolved "http://localhost:4873/html-entities/-/html-entities-2.3.3.tgz#117d7626bece327fc8baace8868fa6f5ef856e46" + integrity sha512-DV5Ln36z34NNTDgnz0EWGBLZENelNAtkiFA4kyNOG2tDI6Mz1uSWiq1wAKdyjnJwyDiDO7Fa2SO1CTxPXL8VxA== + +html-escaper@^2.0.0: + version "2.0.2" + resolved "http://localhost:4873/html-escaper/-/html-escaper-2.0.2.tgz#dfd60027da36a36dfcbe236262c00a5822681453" + integrity sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg== + +html-minifier-terser@^6.0.2: + version "6.1.0" + resolved "http://localhost:4873/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz#bfc818934cc07918f6b3669f5774ecdfd48f32ab" + integrity sha512-YXxSlJBZTP7RS3tWnQw74ooKa6L9b9i9QYXY21eUEvhZ3u9XLfv6OnFsQq6RxkhHygsaUMvYsZRV5rU/OVNZxw== + dependencies: + camel-case "^4.1.2" + clean-css "^5.2.2" + commander "^8.3.0" + he "^1.2.0" + param-case "^3.0.4" + relateurl "^0.2.7" + terser "^5.10.0" + +html-webpack-plugin@^5.5.0: + version "5.5.0" + resolved "http://localhost:4873/html-webpack-plugin/-/html-webpack-plugin-5.5.0.tgz#c3911936f57681c1f9f4d8b68c158cd9dfe52f50" + integrity sha512-sy88PC2cRTVxvETRgUHFrL4No3UxvcH8G1NepGhqaTT+GXN2kTamqasot0inS5hXeg1cMbFDt27zzo9p35lZVw== + dependencies: + "@types/html-minifier-terser" "^6.0.0" + html-minifier-terser "^6.0.2" + lodash "^4.17.21" + pretty-error "^4.0.0" + tapable "^2.0.0" + +htmlparser2@^6.1.0: + version "6.1.0" + resolved "http://localhost:4873/htmlparser2/-/htmlparser2-6.1.0.tgz#c4d762b6c3371a05dbe65e94ae43a9f845fb8fb7" + integrity sha512-gyyPk6rgonLFEDGoeRgQNaEUvdJ4ktTmmUh/h2t7s+M8oPpIPxgNACWa+6ESR57kXstwqPiCut0V8NRpcwgU7A== + dependencies: + domelementtype "^2.0.1" + domhandler "^4.0.0" + domutils "^2.5.2" + entities "^2.0.0" + +http-deceiver@^1.2.7: + version "1.2.7" + resolved "http://localhost:4873/http-deceiver/-/http-deceiver-1.2.7.tgz#fa7168944ab9a519d337cb0bec7284dc3e723d87" + integrity sha512-LmpOGxTfbpgtGVxJrj5k7asXHCgNZp5nLfp+hWc8QQRqtb7fUy6kRY3BO1h9ddF6yIPYUARgxGOwB42DnxIaNw== + +http-errors@2.0.0: + version "2.0.0" + resolved "http://localhost:4873/http-errors/-/http-errors-2.0.0.tgz#b7774a1486ef73cf7667ac9ae0858c012c57b9d3" + integrity sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ== + dependencies: + depd "2.0.0" + inherits "2.0.4" + setprototypeof "1.2.0" + statuses "2.0.1" + toidentifier "1.0.1" + +http-errors@~1.6.2: + version "1.6.3" + resolved "http://localhost:4873/http-errors/-/http-errors-1.6.3.tgz#8b55680bb4be283a0b5bf4ea2e38580be1d9320d" + integrity sha512-lks+lVC8dgGyh97jxvxeYTWQFvh4uw4yC12gVl63Cg30sjPX4wuGcdkICVXDAESr6OJGjqGA8Iz5mkeN6zlD7A== + dependencies: + depd "~1.1.2" + inherits "2.0.3" + setprototypeof "1.1.0" + statuses ">= 1.4.0 < 2" + +http-parser-js@>=0.5.1: + version "0.5.8" + resolved "http://localhost:4873/http-parser-js/-/http-parser-js-0.5.8.tgz#af23090d9ac4e24573de6f6aecc9d84a48bf20e3" + integrity sha512-SGeBX54F94Wgu5RH3X5jsDtf4eHyRogWX1XGT3b4HuW3tQPM4AaBzoUji/4AAJNXCEOWZ5O0DgZmJw1947gD5Q== + +http-proxy-agent@^4.0.1: + version "4.0.1" + resolved "http://localhost:4873/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz#8a8c8ef7f5932ccf953c296ca8291b95aa74aa3a" + integrity sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg== + dependencies: + "@tootallnate/once" "1" + agent-base "6" + debug "4" + +http-proxy-middleware@^2.0.3: + version "2.0.6" + resolved "http://localhost:4873/http-proxy-middleware/-/http-proxy-middleware-2.0.6.tgz#e1a4dd6979572c7ab5a4e4b55095d1f32a74963f" + integrity sha512-ya/UeJ6HVBYxrgYotAZo1KvPWlgB48kUJLDePFeneHsVujFaW5WNj2NgWCAE//B1Dl02BIfYlpNgBy8Kf8Rjmw== + dependencies: + "@types/http-proxy" "^1.17.8" + http-proxy "^1.18.1" + is-glob "^4.0.1" + is-plain-obj "^3.0.0" + micromatch "^4.0.2" + +http-proxy@^1.18.1: + version "1.18.1" + resolved "http://localhost:4873/http-proxy/-/http-proxy-1.18.1.tgz#401541f0534884bbf95260334e72f88ee3976549" + integrity sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ== + dependencies: + eventemitter3 "^4.0.0" + follow-redirects "^1.0.0" + requires-port "^1.0.0" + +https-proxy-agent@^5.0.0: + version "5.0.1" + resolved "http://localhost:4873/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz#c59ef224a04fe8b754f3db0063a25ea30d0005d6" + integrity sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA== + dependencies: + agent-base "6" + debug "4" + +human-signals@^2.1.0: + version "2.1.0" + resolved "http://localhost:4873/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0" + integrity sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw== + +iconv-lite@0.4.24: + version "0.4.24" + resolved "http://localhost:4873/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" + integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== + dependencies: + safer-buffer ">= 2.1.2 < 3" + +iconv-lite@^0.6.3: + version "0.6.3" + resolved "http://localhost:4873/iconv-lite/-/iconv-lite-0.6.3.tgz#a52f80bf38da1952eb5c681790719871a1a72501" + integrity sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw== + dependencies: + safer-buffer ">= 2.1.2 < 3.0.0" + +icss-utils@^5.0.0, icss-utils@^5.1.0: + version "5.1.0" + resolved "http://localhost:4873/icss-utils/-/icss-utils-5.1.0.tgz#c6be6858abd013d768e98366ae47e25d5887b1ae" + integrity sha512-soFhflCVWLfRNOPU3iv5Z9VUdT44xFRbzjLsEzSr5AQmgqPMTHdU3PMT1Cf1ssx8fLNJDA1juftYl+PUcv3MqA== + +idb@^7.0.1: + version "7.1.0" + resolved "http://localhost:4873/idb/-/idb-7.1.0.tgz#2cc886be57738419e57f9aab58f647e5e2160270" + integrity sha512-Wsk07aAxDsntgYJY4h0knZJuTxM73eQ4reRAO+Z1liOh8eMCJ/MoDS8fCui1vGT9mnjtl1sOu3I2i/W1swPYZg== + +identity-obj-proxy@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/identity-obj-proxy/-/identity-obj-proxy-3.0.0.tgz#94d2bda96084453ef36fbc5aaec37e0f79f1fc14" + integrity sha512-00n6YnVHKrinT9t0d9+5yZC6UBNJANpYEQvL2LlX6Ab9lnmxzIRcEmTPuyGScvl1+jKuCICX1Z0Ab1pPKKdikA== + dependencies: + harmony-reflect "^1.4.6" + +ignore@^5.2.0: + version "5.2.0" + resolved "http://localhost:4873/ignore/-/ignore-5.2.0.tgz#6d3bac8fa7fe0d45d9f9be7bac2fc279577e345a" + integrity sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ== + +immer@^9.0.7: + version "9.0.15" + resolved "http://localhost:4873/immer/-/immer-9.0.15.tgz#0b9169e5b1d22137aba7d43f8a81a495dd1b62dc" + integrity sha512-2eB/sswms9AEUSkOm4SbV5Y7Vmt/bKRwByd52jfLkW4OLYeaTP3EEiJ9agqU0O/tq6Dk62Zfj+TJSqfm1rLVGQ== + +import-fresh@^3.0.0, import-fresh@^3.1.0, import-fresh@^3.2.1: + version "3.3.0" + resolved "http://localhost:4873/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b" + integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw== + dependencies: + parent-module "^1.0.0" + resolve-from "^4.0.0" + +import-local@^3.0.2: + version "3.1.0" + resolved "http://localhost:4873/import-local/-/import-local-3.1.0.tgz#b4479df8a5fd44f6cdce24070675676063c95cb4" + integrity sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg== + dependencies: + pkg-dir "^4.2.0" + resolve-cwd "^3.0.0" + +imurmurhash@^0.1.4: + version "0.1.4" + resolved "http://localhost:4873/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" + integrity sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA== + +indent-string@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/indent-string/-/indent-string-4.0.0.tgz#624f8f4497d619b2d9768531d58f4122854d7251" + integrity sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg== + +inflight@^1.0.4: + version "1.0.6" + resolved "http://localhost:4873/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" + integrity sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA== + dependencies: + once "^1.3.0" + wrappy "1" + +inherits@2, inherits@2.0.4, inherits@^2.0.1, inherits@^2.0.3, inherits@~2.0.3: + version "2.0.4" + resolved "http://localhost:4873/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" + integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== + +inherits@2.0.3: + version "2.0.3" + resolved "http://localhost:4873/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" + integrity sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw== + +ini@^1.3.5: + version "1.3.8" + resolved "http://localhost:4873/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c" + integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew== + +internal-slot@^1.0.3: + version "1.0.3" + resolved "http://localhost:4873/internal-slot/-/internal-slot-1.0.3.tgz#7347e307deeea2faac2ac6205d4bc7d34967f59c" + integrity sha512-O0DB1JC/sPyZl7cIo78n5dR7eUSwwpYPiXRhTzNxZVAMUuB8vlnRFyLxdrVToks6XPLVnFfbzaVd5WLjhgg+vA== + dependencies: + get-intrinsic "^1.1.0" + has "^1.0.3" + side-channel "^1.0.4" + +ipaddr.js@1.9.1: + version "1.9.1" + resolved "http://localhost:4873/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3" + integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g== + +ipaddr.js@^2.0.1: + version "2.0.1" + resolved "http://localhost:4873/ipaddr.js/-/ipaddr.js-2.0.1.tgz#eca256a7a877e917aeb368b0a7497ddf42ef81c0" + integrity sha512-1qTgH9NG+IIJ4yfKs2e6Pp1bZg8wbDbKHT21HrLIeYBTRLgMYKnMTPAuI3Lcs61nfx5h1xlXnbJtH1kX5/d/ng== + +is-arrayish@^0.2.1: + version "0.2.1" + resolved "http://localhost:4873/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" + integrity sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg== + +is-bigint@^1.0.1: + version "1.0.4" + resolved "http://localhost:4873/is-bigint/-/is-bigint-1.0.4.tgz#08147a1875bc2b32005d41ccd8291dffc6691df3" + integrity sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg== + dependencies: + has-bigints "^1.0.1" + +is-binary-path@~2.1.0: + version "2.1.0" + resolved "http://localhost:4873/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09" + integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw== + dependencies: + binary-extensions "^2.0.0" + +is-boolean-object@^1.1.0: + version "1.1.2" + resolved "http://localhost:4873/is-boolean-object/-/is-boolean-object-1.1.2.tgz#5c6dc200246dd9321ae4b885a114bb1f75f63719" + integrity sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA== + dependencies: + call-bind "^1.0.2" + has-tostringtag "^1.0.0" + +is-callable@^1.1.4, is-callable@^1.2.6: + version "1.2.7" + resolved "http://localhost:4873/is-callable/-/is-callable-1.2.7.tgz#3bc2a85ea742d9e36205dcacdd72ca1fdc51b055" + integrity sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA== + +is-core-module@^2.8.1, is-core-module@^2.9.0: + version "2.10.0" + resolved "http://localhost:4873/is-core-module/-/is-core-module-2.10.0.tgz#9012ede0a91c69587e647514e1d5277019e728ed" + integrity sha512-Erxj2n/LDAZ7H8WNJXd9tw38GYM3dv8rk8Zcs+jJuxYTW7sozH+SS8NtrSjVL1/vpLvWi1hxy96IzjJ3EHTJJg== + dependencies: + has "^1.0.3" + +is-date-object@^1.0.1: + version "1.0.5" + resolved "http://localhost:4873/is-date-object/-/is-date-object-1.0.5.tgz#0841d5536e724c25597bf6ea62e1bd38298df31f" + integrity sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ== + dependencies: + has-tostringtag "^1.0.0" + +is-docker@^2.0.0, is-docker@^2.1.1: + version "2.2.1" + resolved "http://localhost:4873/is-docker/-/is-docker-2.2.1.tgz#33eeabe23cfe86f14bde4408a02c0cfb853acdaa" + integrity sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ== + +is-extglob@^2.1.1: + version "2.1.1" + resolved "http://localhost:4873/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" + integrity sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ== + +is-fullwidth-code-point@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" + integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== + +is-generator-fn@^2.0.0: + version "2.1.0" + resolved "http://localhost:4873/is-generator-fn/-/is-generator-fn-2.1.0.tgz#7d140adc389aaf3011a8f2a2a4cfa6faadffb118" + integrity sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ== + +is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3, is-glob@~4.0.1: + version "4.0.3" + resolved "http://localhost:4873/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084" + integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg== + dependencies: + is-extglob "^2.1.1" + +is-module@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/is-module/-/is-module-1.0.0.tgz#3258fb69f78c14d5b815d664336b4cffb6441591" + integrity sha512-51ypPSPCoTEIN9dy5Oy+h4pShgJmPCygKfyRCISBI+JoWT/2oJvK8QPxmwv7b/p239jXrm9M1mlQbyKJ5A152g== + +is-negative-zero@^2.0.2: + version "2.0.2" + resolved "http://localhost:4873/is-negative-zero/-/is-negative-zero-2.0.2.tgz#7bf6f03a28003b8b3965de3ac26f664d765f3150" + integrity sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA== + +is-number-object@^1.0.4: + version "1.0.7" + resolved "http://localhost:4873/is-number-object/-/is-number-object-1.0.7.tgz#59d50ada4c45251784e9904f5246c742f07a42fc" + integrity sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ== + dependencies: + has-tostringtag "^1.0.0" + +is-number@^7.0.0: + version "7.0.0" + resolved "http://localhost:4873/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" + integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== + +is-obj@^1.0.1: + version "1.0.1" + resolved "http://localhost:4873/is-obj/-/is-obj-1.0.1.tgz#3e4729ac1f5fde025cd7d83a896dab9f4f67db0f" + integrity sha512-l4RyHgRqGN4Y3+9JHVrNqO+tN0rV5My76uW5/nuO4K1b6vw5G8d/cmFjP9tRfEsdhZNt0IFdZuK/c2Vr4Nb+Qg== + +is-plain-obj@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/is-plain-obj/-/is-plain-obj-3.0.0.tgz#af6f2ea14ac5a646183a5bbdb5baabbc156ad9d7" + integrity sha512-gwsOE28k+23GP1B6vFl1oVh/WOzmawBrKwo5Ev6wMKzPkaXaCDIQKzLnvsA42DRlbVTWorkgTKIviAKCWkfUwA== + +is-plain-object@^2.0.4: + version "2.0.4" + resolved "http://localhost:4873/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" + integrity sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og== + dependencies: + isobject "^3.0.1" + +is-potential-custom-element-name@^1.0.1: + version "1.0.1" + resolved "http://localhost:4873/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz#171ed6f19e3ac554394edf78caa05784a45bebb5" + integrity sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ== + +is-regex@^1.1.4: + version "1.1.4" + resolved "http://localhost:4873/is-regex/-/is-regex-1.1.4.tgz#eef5663cd59fa4c0ae339505323df6854bb15958" + integrity sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg== + dependencies: + call-bind "^1.0.2" + has-tostringtag "^1.0.0" + +is-regexp@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/is-regexp/-/is-regexp-1.0.0.tgz#fd2d883545c46bac5a633e7b9a09e87fa2cb5069" + integrity sha512-7zjFAPO4/gwyQAAgRRmqeEeyIICSdmCqa3tsVHMdBzaXXRiqopZL4Cyghg/XulGWrtABTpbnYYzzIRffLkP4oA== + +is-root@^2.1.0: + version "2.1.0" + resolved "http://localhost:4873/is-root/-/is-root-2.1.0.tgz#809e18129cf1129644302a4f8544035d51984a9c" + integrity sha512-AGOriNp96vNBd3HtU+RzFEc75FfR5ymiYv8E553I71SCeXBiMsVDUtdio1OEFvrPyLIQ9tVR5RxXIFe5PUFjMg== + +is-shared-array-buffer@^1.0.2: + version "1.0.2" + resolved "http://localhost:4873/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz#8f259c573b60b6a32d4058a1a07430c0a7344c79" + integrity sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA== + dependencies: + call-bind "^1.0.2" + +is-stream@^2.0.0: + version "2.0.1" + resolved "http://localhost:4873/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077" + integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg== + +is-string@^1.0.5, is-string@^1.0.7: + version "1.0.7" + resolved "http://localhost:4873/is-string/-/is-string-1.0.7.tgz#0dd12bf2006f255bb58f695110eff7491eebc0fd" + integrity sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg== + dependencies: + has-tostringtag "^1.0.0" + +is-symbol@^1.0.2, is-symbol@^1.0.3: + version "1.0.4" + resolved "http://localhost:4873/is-symbol/-/is-symbol-1.0.4.tgz#a6dac93b635b063ca6872236de88910a57af139c" + integrity sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg== + dependencies: + has-symbols "^1.0.2" + +is-typedarray@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" + integrity sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA== + +is-weakref@^1.0.2: + version "1.0.2" + resolved "http://localhost:4873/is-weakref/-/is-weakref-1.0.2.tgz#9529f383a9338205e89765e0392efc2f100f06f2" + integrity sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ== + dependencies: + call-bind "^1.0.2" + +is-wsl@^2.2.0: + version "2.2.0" + resolved "http://localhost:4873/is-wsl/-/is-wsl-2.2.0.tgz#74a4c76e77ca9fd3f932f290c17ea326cd157271" + integrity sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww== + dependencies: + is-docker "^2.0.0" + +isarray@~1.0.0: + version "1.0.0" + resolved "http://localhost:4873/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" + integrity sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ== + +isexe@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" + integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw== + +isobject@^3.0.1: + version "3.0.1" + resolved "http://localhost:4873/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" + integrity sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg== + +istanbul-lib-coverage@^3.0.0, istanbul-lib-coverage@^3.2.0: + version "3.2.0" + resolved "http://localhost:4873/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz#189e7909d0a39fa5a3dfad5b03f71947770191d3" + integrity sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw== + +istanbul-lib-instrument@^5.0.4, istanbul-lib-instrument@^5.1.0: + version "5.2.0" + resolved "http://localhost:4873/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.0.tgz#31d18bdd127f825dd02ea7bfdfd906f8ab840e9f" + integrity sha512-6Lthe1hqXHBNsqvgDzGO6l03XNeu3CrG4RqQ1KM9+l5+jNGpEJfIELx1NS3SEHmJQA8np/u+E4EPRKRiu6m19A== + dependencies: + "@babel/core" "^7.12.3" + "@babel/parser" "^7.14.7" + "@istanbuljs/schema" "^0.1.2" + istanbul-lib-coverage "^3.2.0" + semver "^6.3.0" + +istanbul-lib-report@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#7518fe52ea44de372f460a76b5ecda9ffb73d8a6" + integrity sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw== + dependencies: + istanbul-lib-coverage "^3.0.0" + make-dir "^3.0.0" + supports-color "^7.1.0" + +istanbul-lib-source-maps@^4.0.0: + version "4.0.1" + resolved "http://localhost:4873/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz#895f3a709fcfba34c6de5a42939022f3e4358551" + integrity sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw== + dependencies: + debug "^4.1.1" + istanbul-lib-coverage "^3.0.0" + source-map "^0.6.1" + +istanbul-reports@^3.1.3: + version "3.1.5" + resolved "http://localhost:4873/istanbul-reports/-/istanbul-reports-3.1.5.tgz#cc9a6ab25cb25659810e4785ed9d9fb742578bae" + integrity sha512-nUsEMa9pBt/NOHqbcbeJEgqIlY/K7rVWUX6Lql2orY5e9roQOthbR3vtY4zzf2orPELg80fnxxk9zUyPlgwD1w== + dependencies: + html-escaper "^2.0.0" + istanbul-lib-report "^3.0.0" + +jake@^10.8.5: + version "10.8.5" + resolved "http://localhost:4873/jake/-/jake-10.8.5.tgz#f2183d2c59382cb274226034543b9c03b8164c46" + integrity sha512-sVpxYeuAhWt0OTWITwT98oyV0GsXyMlXCF+3L1SuafBVUIr/uILGRB+NqwkzhgXKvoJpDIpQvqkUALgdmQsQxw== + dependencies: + async "^3.2.3" + chalk "^4.0.2" + filelist "^1.0.1" + minimatch "^3.0.4" + +jest-changed-files@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-changed-files/-/jest-changed-files-27.5.1.tgz#a348aed00ec9bf671cc58a66fcbe7c3dfd6a68f5" + integrity sha512-buBLMiByfWGCoMsLLzGUUSpAmIAGnbR2KJoMN10ziLhOLvP4e0SlypHnAel8iqQXTrcbmfEY9sSqae5sgUsTvw== + dependencies: + "@jest/types" "^27.5.1" + execa "^5.0.0" + throat "^6.0.1" + +jest-circus@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-circus/-/jest-circus-27.5.1.tgz#37a5a4459b7bf4406e53d637b49d22c65d125ecc" + integrity sha512-D95R7x5UtlMA5iBYsOHFFbMD/GVA4R/Kdq15f7xYWUfWHBto9NYRsOvnSauTgdF+ogCpJ4tyKOXhUifxS65gdw== + dependencies: + "@jest/environment" "^27.5.1" + "@jest/test-result" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/node" "*" + chalk "^4.0.0" + co "^4.6.0" + dedent "^0.7.0" + expect "^27.5.1" + is-generator-fn "^2.0.0" + jest-each "^27.5.1" + jest-matcher-utils "^27.5.1" + jest-message-util "^27.5.1" + jest-runtime "^27.5.1" + jest-snapshot "^27.5.1" + jest-util "^27.5.1" + pretty-format "^27.5.1" + slash "^3.0.0" + stack-utils "^2.0.3" + throat "^6.0.1" + +jest-cli@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-cli/-/jest-cli-27.5.1.tgz#278794a6e6458ea8029547e6c6cbf673bd30b145" + integrity sha512-Hc6HOOwYq4/74/c62dEE3r5elx8wjYqxY0r0G/nFrLDPMFRu6RA/u8qINOIkvhxG7mMQ5EJsOGfRpI8L6eFUVw== + dependencies: + "@jest/core" "^27.5.1" + "@jest/test-result" "^27.5.1" + "@jest/types" "^27.5.1" + chalk "^4.0.0" + exit "^0.1.2" + graceful-fs "^4.2.9" + import-local "^3.0.2" + jest-config "^27.5.1" + jest-util "^27.5.1" + jest-validate "^27.5.1" + prompts "^2.0.1" + yargs "^16.2.0" + +jest-config@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-config/-/jest-config-27.5.1.tgz#5c387de33dca3f99ad6357ddeccd91bf3a0e4a41" + integrity sha512-5sAsjm6tGdsVbW9ahcChPAFCk4IlkQUknH5AvKjuLTSlcO/wCZKyFdn7Rg0EkC+OGgWODEy2hDpWB1PgzH0JNA== + dependencies: + "@babel/core" "^7.8.0" + "@jest/test-sequencer" "^27.5.1" + "@jest/types" "^27.5.1" + babel-jest "^27.5.1" + chalk "^4.0.0" + ci-info "^3.2.0" + deepmerge "^4.2.2" + glob "^7.1.1" + graceful-fs "^4.2.9" + jest-circus "^27.5.1" + jest-environment-jsdom "^27.5.1" + jest-environment-node "^27.5.1" + jest-get-type "^27.5.1" + jest-jasmine2 "^27.5.1" + jest-regex-util "^27.5.1" + jest-resolve "^27.5.1" + jest-runner "^27.5.1" + jest-util "^27.5.1" + jest-validate "^27.5.1" + micromatch "^4.0.4" + parse-json "^5.2.0" + pretty-format "^27.5.1" + slash "^3.0.0" + strip-json-comments "^3.1.1" + +jest-diff@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-diff/-/jest-diff-27.5.1.tgz#a07f5011ac9e6643cf8a95a462b7b1ecf6680def" + integrity sha512-m0NvkX55LDt9T4mctTEgnZk3fmEg3NRYutvMPWM/0iPnkFj2wIeF45O1718cMSOFO1vINkqmxqD8vE37uTEbqw== + dependencies: + chalk "^4.0.0" + diff-sequences "^27.5.1" + jest-get-type "^27.5.1" + pretty-format "^27.5.1" + +jest-diff@^29.1.2: + version "29.1.2" + resolved "http://localhost:4873/jest-diff/-/jest-diff-29.1.2.tgz#bb7aaf5353227d6f4f96c5e7e8713ce576a607dc" + integrity sha512-4GQts0aUopVvecIT4IwD/7xsBaMhKTYoM4/njE/aVw9wpw+pIUVp8Vab/KnSzSilr84GnLBkaP3JLDnQYCKqVQ== + dependencies: + chalk "^4.0.0" + diff-sequences "^29.0.0" + jest-get-type "^29.0.0" + pretty-format "^29.1.2" + +jest-docblock@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-docblock/-/jest-docblock-27.5.1.tgz#14092f364a42c6108d42c33c8cf30e058e25f6c0" + integrity sha512-rl7hlABeTsRYxKiUfpHrQrG4e2obOiTQWfMEH3PxPjOtdsfLQO4ReWSZaQ7DETm4xu07rl4q/h4zcKXyU0/OzQ== + dependencies: + detect-newline "^3.0.0" + +jest-each@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-each/-/jest-each-27.5.1.tgz#5bc87016f45ed9507fed6e4702a5b468a5b2c44e" + integrity sha512-1Ff6p+FbhT/bXQnEouYy00bkNSY7OUpfIcmdl8vZ31A1UUaurOLPA8a8BbJOF2RDUElwJhmeaV7LnagI+5UwNQ== + dependencies: + "@jest/types" "^27.5.1" + chalk "^4.0.0" + jest-get-type "^27.5.1" + jest-util "^27.5.1" + pretty-format "^27.5.1" + +jest-environment-jsdom@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-environment-jsdom/-/jest-environment-jsdom-27.5.1.tgz#ea9ccd1fc610209655a77898f86b2b559516a546" + integrity sha512-TFBvkTC1Hnnnrka/fUb56atfDtJ9VMZ94JkjTbggl1PEpwrYtUBKMezB3inLmWqQsXYLcMwNoDQwoBTAvFfsfw== + dependencies: + "@jest/environment" "^27.5.1" + "@jest/fake-timers" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/node" "*" + jest-mock "^27.5.1" + jest-util "^27.5.1" + jsdom "^16.6.0" + +jest-environment-node@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-environment-node/-/jest-environment-node-27.5.1.tgz#dedc2cfe52fab6b8f5714b4808aefa85357a365e" + integrity sha512-Jt4ZUnxdOsTGwSRAfKEnE6BcwsSPNOijjwifq5sDFSA2kesnXTvNqKHYgM0hDq3549Uf/KzdXNYn4wMZJPlFLw== + dependencies: + "@jest/environment" "^27.5.1" + "@jest/fake-timers" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/node" "*" + jest-mock "^27.5.1" + jest-util "^27.5.1" + +jest-get-type@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-get-type/-/jest-get-type-27.5.1.tgz#3cd613c507b0f7ace013df407a1c1cd578bcb4f1" + integrity sha512-2KY95ksYSaK7DMBWQn6dQz3kqAf3BB64y2udeG+hv4KfSOb9qwcYQstTJc1KCbsix+wLZWZYN8t7nwX3GOBLRw== + +jest-get-type@^29.0.0: + version "29.0.0" + resolved "http://localhost:4873/jest-get-type/-/jest-get-type-29.0.0.tgz#843f6c50a1b778f7325df1129a0fd7aa713aef80" + integrity sha512-83X19z/HuLKYXYHskZlBAShO7UfLFXu/vWajw9ZNJASN32li8yHMaVGAQqxFW1RCFOkB7cubaL6FaJVQqqJLSw== + +jest-haste-map@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-haste-map/-/jest-haste-map-27.5.1.tgz#9fd8bd7e7b4fa502d9c6164c5640512b4e811e7f" + integrity sha512-7GgkZ4Fw4NFbMSDSpZwXeBiIbx+t/46nJ2QitkOjvwPYyZmqttu2TDSimMHP1EkPOi4xUZAN1doE5Vd25H4Jng== + dependencies: + "@jest/types" "^27.5.1" + "@types/graceful-fs" "^4.1.2" + "@types/node" "*" + anymatch "^3.0.3" + fb-watchman "^2.0.0" + graceful-fs "^4.2.9" + jest-regex-util "^27.5.1" + jest-serializer "^27.5.1" + jest-util "^27.5.1" + jest-worker "^27.5.1" + micromatch "^4.0.4" + walker "^1.0.7" + optionalDependencies: + fsevents "^2.3.2" + +jest-jasmine2@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-jasmine2/-/jest-jasmine2-27.5.1.tgz#a037b0034ef49a9f3d71c4375a796f3b230d1ac4" + integrity sha512-jtq7VVyG8SqAorDpApwiJJImd0V2wv1xzdheGHRGyuT7gZm6gG47QEskOlzsN1PG/6WNaCo5pmwMHDf3AkG2pQ== + dependencies: + "@jest/environment" "^27.5.1" + "@jest/source-map" "^27.5.1" + "@jest/test-result" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/node" "*" + chalk "^4.0.0" + co "^4.6.0" + expect "^27.5.1" + is-generator-fn "^2.0.0" + jest-each "^27.5.1" + jest-matcher-utils "^27.5.1" + jest-message-util "^27.5.1" + jest-runtime "^27.5.1" + jest-snapshot "^27.5.1" + jest-util "^27.5.1" + pretty-format "^27.5.1" + throat "^6.0.1" + +jest-leak-detector@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-leak-detector/-/jest-leak-detector-27.5.1.tgz#6ec9d54c3579dd6e3e66d70e3498adf80fde3fb8" + integrity sha512-POXfWAMvfU6WMUXftV4HolnJfnPOGEu10fscNCA76KBpRRhcMN2c8d3iT2pxQS3HLbA+5X4sOUPzYO2NUyIlHQ== + dependencies: + jest-get-type "^27.5.1" + pretty-format "^27.5.1" + +jest-matcher-utils@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-matcher-utils/-/jest-matcher-utils-27.5.1.tgz#9c0cdbda8245bc22d2331729d1091308b40cf8ab" + integrity sha512-z2uTx/T6LBaCoNWNFWwChLBKYxTMcGBRjAt+2SbP929/Fflb9aa5LGma654Rz8z9HLxsrUaYzxE9T/EFIL/PAw== + dependencies: + chalk "^4.0.0" + jest-diff "^27.5.1" + jest-get-type "^27.5.1" + pretty-format "^27.5.1" + +jest-matcher-utils@^29.1.2: + version "29.1.2" + resolved "http://localhost:4873/jest-matcher-utils/-/jest-matcher-utils-29.1.2.tgz#e68c4bcc0266e70aa1a5c13fb7b8cd4695e318a1" + integrity sha512-MV5XrD3qYSW2zZSHRRceFzqJ39B2z11Qv0KPyZYxnzDHFeYZGJlgGi0SW+IXSJfOewgJp/Km/7lpcFT+cgZypw== + dependencies: + chalk "^4.0.0" + jest-diff "^29.1.2" + jest-get-type "^29.0.0" + pretty-format "^29.1.2" + +jest-message-util@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-message-util/-/jest-message-util-27.5.1.tgz#bdda72806da10d9ed6425e12afff38cd1458b6cf" + integrity sha512-rMyFe1+jnyAAf+NHwTclDz0eAaLkVDdKVHHBFWsBWHnnh5YeJMNWWsv7AbFYXfK3oTqvL7VTWkhNLu1jX24D+g== + dependencies: + "@babel/code-frame" "^7.12.13" + "@jest/types" "^27.5.1" + "@types/stack-utils" "^2.0.0" + chalk "^4.0.0" + graceful-fs "^4.2.9" + micromatch "^4.0.4" + pretty-format "^27.5.1" + slash "^3.0.0" + stack-utils "^2.0.3" + +jest-message-util@^28.1.3: + version "28.1.3" + resolved "http://localhost:4873/jest-message-util/-/jest-message-util-28.1.3.tgz#232def7f2e333f1eecc90649b5b94b0055e7c43d" + integrity sha512-PFdn9Iewbt575zKPf1286Ht9EPoJmYT7P0kY+RibeYZ2XtOr53pDLEFoTWXbd1h4JiGiWpTBC84fc8xMXQMb7g== + dependencies: + "@babel/code-frame" "^7.12.13" + "@jest/types" "^28.1.3" + "@types/stack-utils" "^2.0.0" + chalk "^4.0.0" + graceful-fs "^4.2.9" + micromatch "^4.0.4" + pretty-format "^28.1.3" + slash "^3.0.0" + stack-utils "^2.0.3" + +jest-message-util@^29.1.2: + version "29.1.2" + resolved "http://localhost:4873/jest-message-util/-/jest-message-util-29.1.2.tgz#c21a33c25f9dc1ebfcd0f921d89438847a09a501" + integrity sha512-9oJ2Os+Qh6IlxLpmvshVbGUiSkZVc2FK+uGOm6tghafnB2RyjKAxMZhtxThRMxfX1J1SOMhTn9oK3/MutRWQJQ== + dependencies: + "@babel/code-frame" "^7.12.13" + "@jest/types" "^29.1.2" + "@types/stack-utils" "^2.0.0" + chalk "^4.0.0" + graceful-fs "^4.2.9" + micromatch "^4.0.4" + pretty-format "^29.1.2" + slash "^3.0.0" + stack-utils "^2.0.3" + +jest-mock@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-mock/-/jest-mock-27.5.1.tgz#19948336d49ef4d9c52021d34ac7b5f36ff967d6" + integrity sha512-K4jKbY1d4ENhbrG2zuPWaQBvDly+iZ2yAW+T1fATN78hc0sInwn7wZB8XtlNnvHug5RMwV897Xm4LqmPM4e2Og== + dependencies: + "@jest/types" "^27.5.1" + "@types/node" "*" + +jest-pnp-resolver@^1.2.2: + version "1.2.2" + resolved "http://localhost:4873/jest-pnp-resolver/-/jest-pnp-resolver-1.2.2.tgz#b704ac0ae028a89108a4d040b3f919dfddc8e33c" + integrity sha512-olV41bKSMm8BdnuMsewT4jqlZ8+3TCARAXjZGT9jcoSnrfUnRCqnMoF9XEeoWjbzObpqF9dRhHQj0Xb9QdF6/w== + +jest-regex-util@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-regex-util/-/jest-regex-util-27.5.1.tgz#4da143f7e9fd1e542d4aa69617b38e4a78365b95" + integrity sha512-4bfKq2zie+x16okqDXjXn9ql2B0dScQu+vcwe4TvFVhkVyuWLqpZrZtXxLLWoXYgn0E87I6r6GRYHF7wFZBUvg== + +jest-regex-util@^28.0.0: + version "28.0.2" + resolved "http://localhost:4873/jest-regex-util/-/jest-regex-util-28.0.2.tgz#afdc377a3b25fb6e80825adcf76c854e5bf47ead" + integrity sha512-4s0IgyNIy0y9FK+cjoVYoxamT7Zeo7MhzqRGx7YDYmaQn1wucY9rotiGkBzzcMXTtjrCAP/f7f+E0F7+fxPNdw== + +jest-resolve-dependencies@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-resolve-dependencies/-/jest-resolve-dependencies-27.5.1.tgz#d811ecc8305e731cc86dd79741ee98fed06f1da8" + integrity sha512-QQOOdY4PE39iawDn5rzbIePNigfe5B9Z91GDD1ae/xNDlu9kaat8QQ5EKnNmVWPV54hUdxCVwwj6YMgR2O7IOg== + dependencies: + "@jest/types" "^27.5.1" + jest-regex-util "^27.5.1" + jest-snapshot "^27.5.1" + +jest-resolve@^27.4.2, jest-resolve@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-resolve/-/jest-resolve-27.5.1.tgz#a2f1c5a0796ec18fe9eb1536ac3814c23617b384" + integrity sha512-FFDy8/9E6CV83IMbDpcjOhumAQPDyETnU2KZ1O98DwTnz8AOBsW/Xv3GySr1mOZdItLR+zDZ7I/UdTFbgSOVCw== + dependencies: + "@jest/types" "^27.5.1" + chalk "^4.0.0" + graceful-fs "^4.2.9" + jest-haste-map "^27.5.1" + jest-pnp-resolver "^1.2.2" + jest-util "^27.5.1" + jest-validate "^27.5.1" + resolve "^1.20.0" + resolve.exports "^1.1.0" + slash "^3.0.0" + +jest-runner@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-runner/-/jest-runner-27.5.1.tgz#071b27c1fa30d90540805c5645a0ec167c7b62e5" + integrity sha512-g4NPsM4mFCOwFKXO4p/H/kWGdJp9V8kURY2lX8Me2drgXqG7rrZAx5kv+5H7wtt/cdFIjhqYx1HrlqWHaOvDaQ== + dependencies: + "@jest/console" "^27.5.1" + "@jest/environment" "^27.5.1" + "@jest/test-result" "^27.5.1" + "@jest/transform" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/node" "*" + chalk "^4.0.0" + emittery "^0.8.1" + graceful-fs "^4.2.9" + jest-docblock "^27.5.1" + jest-environment-jsdom "^27.5.1" + jest-environment-node "^27.5.1" + jest-haste-map "^27.5.1" + jest-leak-detector "^27.5.1" + jest-message-util "^27.5.1" + jest-resolve "^27.5.1" + jest-runtime "^27.5.1" + jest-util "^27.5.1" + jest-worker "^27.5.1" + source-map-support "^0.5.6" + throat "^6.0.1" + +jest-runtime@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-runtime/-/jest-runtime-27.5.1.tgz#4896003d7a334f7e8e4a53ba93fb9bcd3db0a1af" + integrity sha512-o7gxw3Gf+H2IGt8fv0RiyE1+r83FJBRruoA+FXrlHw6xEyBsU8ugA6IPfTdVyA0w8HClpbK+DGJxH59UrNMx8A== + dependencies: + "@jest/environment" "^27.5.1" + "@jest/fake-timers" "^27.5.1" + "@jest/globals" "^27.5.1" + "@jest/source-map" "^27.5.1" + "@jest/test-result" "^27.5.1" + "@jest/transform" "^27.5.1" + "@jest/types" "^27.5.1" + chalk "^4.0.0" + cjs-module-lexer "^1.0.0" + collect-v8-coverage "^1.0.0" + execa "^5.0.0" + glob "^7.1.3" + graceful-fs "^4.2.9" + jest-haste-map "^27.5.1" + jest-message-util "^27.5.1" + jest-mock "^27.5.1" + jest-regex-util "^27.5.1" + jest-resolve "^27.5.1" + jest-snapshot "^27.5.1" + jest-util "^27.5.1" + slash "^3.0.0" + strip-bom "^4.0.0" + +jest-serializer@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-serializer/-/jest-serializer-27.5.1.tgz#81438410a30ea66fd57ff730835123dea1fb1f64" + integrity sha512-jZCyo6iIxO1aqUxpuBlwTDMkzOAJS4a3eYz3YzgxxVQFwLeSA7Jfq5cbqCY+JLvTDrWirgusI/0KwxKMgrdf7w== + dependencies: + "@types/node" "*" + graceful-fs "^4.2.9" + +jest-snapshot@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-snapshot/-/jest-snapshot-27.5.1.tgz#b668d50d23d38054a51b42c4039cab59ae6eb6a1" + integrity sha512-yYykXI5a0I31xX67mgeLw1DZ0bJB+gpq5IpSuCAoyDi0+BhgU/RIrL+RTzDmkNTchvDFWKP8lp+w/42Z3us5sA== + dependencies: + "@babel/core" "^7.7.2" + "@babel/generator" "^7.7.2" + "@babel/plugin-syntax-typescript" "^7.7.2" + "@babel/traverse" "^7.7.2" + "@babel/types" "^7.0.0" + "@jest/transform" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/babel__traverse" "^7.0.4" + "@types/prettier" "^2.1.5" + babel-preset-current-node-syntax "^1.0.0" + chalk "^4.0.0" + expect "^27.5.1" + graceful-fs "^4.2.9" + jest-diff "^27.5.1" + jest-get-type "^27.5.1" + jest-haste-map "^27.5.1" + jest-matcher-utils "^27.5.1" + jest-message-util "^27.5.1" + jest-util "^27.5.1" + natural-compare "^1.4.0" + pretty-format "^27.5.1" + semver "^7.3.2" + +jest-util@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-util/-/jest-util-27.5.1.tgz#3ba9771e8e31a0b85da48fe0b0891fb86c01c2f9" + integrity sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw== + dependencies: + "@jest/types" "^27.5.1" + "@types/node" "*" + chalk "^4.0.0" + ci-info "^3.2.0" + graceful-fs "^4.2.9" + picomatch "^2.2.3" + +jest-util@^28.1.3: + version "28.1.3" + resolved "http://localhost:4873/jest-util/-/jest-util-28.1.3.tgz#f4f932aa0074f0679943220ff9cbba7e497028b0" + integrity sha512-XdqfpHwpcSRko/C35uLYFM2emRAltIIKZiJ9eAmhjsj0CqZMa0p1ib0R5fWIqGhn1a103DebTbpqIaP1qCQ6tQ== + dependencies: + "@jest/types" "^28.1.3" + "@types/node" "*" + chalk "^4.0.0" + ci-info "^3.2.0" + graceful-fs "^4.2.9" + picomatch "^2.2.3" + +jest-util@^29.1.2: + version "29.1.2" + resolved "http://localhost:4873/jest-util/-/jest-util-29.1.2.tgz#ac5798e93cb6a6703084e194cfa0898d66126df1" + integrity sha512-vPCk9F353i0Ymx3WQq3+a4lZ07NXu9Ca8wya6o4Fe4/aO1e1awMMprZ3woPFpKwghEOW+UXgd15vVotuNN9ONQ== + dependencies: + "@jest/types" "^29.1.2" + "@types/node" "*" + chalk "^4.0.0" + ci-info "^3.2.0" + graceful-fs "^4.2.9" + picomatch "^2.2.3" + +jest-validate@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-validate/-/jest-validate-27.5.1.tgz#9197d54dc0bdb52260b8db40b46ae668e04df067" + integrity sha512-thkNli0LYTmOI1tDB3FI1S1RTp/Bqyd9pTarJwL87OIBFuqEb5Apv5EaApEudYg4g86e3CT6kM0RowkhtEnCBQ== + dependencies: + "@jest/types" "^27.5.1" + camelcase "^6.2.0" + chalk "^4.0.0" + jest-get-type "^27.5.1" + leven "^3.1.0" + pretty-format "^27.5.1" + +jest-watch-typeahead@^1.0.0: + version "1.1.0" + resolved "http://localhost:4873/jest-watch-typeahead/-/jest-watch-typeahead-1.1.0.tgz#b4a6826dfb9c9420da2f7bc900de59dad11266a9" + integrity sha512-Va5nLSJTN7YFtC2jd+7wsoe1pNe5K4ShLux/E5iHEwlB9AxaxmggY7to9KUqKojhaJw3aXqt5WAb4jGPOolpEw== + dependencies: + ansi-escapes "^4.3.1" + chalk "^4.0.0" + jest-regex-util "^28.0.0" + jest-watcher "^28.0.0" + slash "^4.0.0" + string-length "^5.0.1" + strip-ansi "^7.0.1" + +jest-watcher@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-watcher/-/jest-watcher-27.5.1.tgz#71bd85fb9bde3a2c2ec4dc353437971c43c642a2" + integrity sha512-z676SuD6Z8o8qbmEGhoEUFOM1+jfEiL3DXHK/xgEiG2EyNYfFG60jluWcupY6dATjfEsKQuibReS1djInQnoVw== + dependencies: + "@jest/test-result" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/node" "*" + ansi-escapes "^4.2.1" + chalk "^4.0.0" + jest-util "^27.5.1" + string-length "^4.0.1" + +jest-watcher@^28.0.0: + version "28.1.3" + resolved "http://localhost:4873/jest-watcher/-/jest-watcher-28.1.3.tgz#c6023a59ba2255e3b4c57179fc94164b3e73abd4" + integrity sha512-t4qcqj9hze+jviFPUN3YAtAEeFnr/azITXQEMARf5cMwKY2SMBRnCQTXLixTl20OR6mLh9KLMrgVJgJISym+1g== + dependencies: + "@jest/test-result" "^28.1.3" + "@jest/types" "^28.1.3" + "@types/node" "*" + ansi-escapes "^4.2.1" + chalk "^4.0.0" + emittery "^0.10.2" + jest-util "^28.1.3" + string-length "^4.0.1" + +jest-worker@^26.2.1: + version "26.6.2" + resolved "http://localhost:4873/jest-worker/-/jest-worker-26.6.2.tgz#7f72cbc4d643c365e27b9fd775f9d0eaa9c7a8ed" + integrity sha512-KWYVV1c4i+jbMpaBC+U++4Va0cp8OisU185o73T1vo99hqi7w8tSJfUXYswwqqrjzwxa6KpRK54WhPvwf5w6PQ== + dependencies: + "@types/node" "*" + merge-stream "^2.0.0" + supports-color "^7.0.0" + +jest-worker@^27.0.2, jest-worker@^27.4.5, jest-worker@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-worker/-/jest-worker-27.5.1.tgz#8d146f0900e8973b106b6f73cc1e9a8cb86f8db0" + integrity sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg== + dependencies: + "@types/node" "*" + merge-stream "^2.0.0" + supports-color "^8.0.0" + +jest-worker@^28.0.2: + version "28.1.3" + resolved "http://localhost:4873/jest-worker/-/jest-worker-28.1.3.tgz#7e3c4ce3fa23d1bb6accb169e7f396f98ed4bb98" + integrity sha512-CqRA220YV/6jCo8VWvAt1KKx6eek1VIHMPeLEbpcfSfkEeWyBNppynM/o6q+Wmw+sOhos2ml34wZbSX3G13//g== + dependencies: + "@types/node" "*" + merge-stream "^2.0.0" + supports-color "^8.0.0" + +jest@^27.4.3: + version "27.5.1" + resolved "http://localhost:4873/jest/-/jest-27.5.1.tgz#dadf33ba70a779be7a6fc33015843b51494f63fc" + integrity sha512-Yn0mADZB89zTtjkPJEXwrac3LHudkQMR+Paqa8uxJHCBr9agxztUifWCyiYrjhMPBoUVBjyny0I7XH6ozDr7QQ== + dependencies: + "@jest/core" "^27.5.1" + import-local "^3.0.2" + jest-cli "^27.5.1" + +js-sdsl@^4.1.4: + version "4.1.5" + resolved "http://localhost:4873/js-sdsl/-/js-sdsl-4.1.5.tgz#1ff1645e6b4d1b028cd3f862db88c9d887f26e2a" + integrity sha512-08bOAKweV2NUC1wqTtf3qZlnpOX/R2DU9ikpjOHs0H+ibQv3zpncVQg6um4uYtRtrwIX8M4Nh3ytK4HGlYAq7Q== + +"js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" + integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== + +js-yaml@^3.13.1: + version "3.14.1" + resolved "http://localhost:4873/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537" + integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g== + dependencies: + argparse "^1.0.7" + esprima "^4.0.0" + +js-yaml@^4.1.0: + version "4.1.0" + resolved "http://localhost:4873/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602" + integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA== + dependencies: + argparse "^2.0.1" + +jsdom@^16.6.0: + version "16.7.0" + resolved "http://localhost:4873/jsdom/-/jsdom-16.7.0.tgz#918ae71965424b197c819f8183a754e18977b710" + integrity sha512-u9Smc2G1USStM+s/x1ru5Sxrl6mPYCbByG1U/hUmqaVsm4tbNyS7CicOSRyuGQYZhTu0h84qkZZQ/I+dzizSVw== + dependencies: + abab "^2.0.5" + acorn "^8.2.4" + acorn-globals "^6.0.0" + cssom "^0.4.4" + cssstyle "^2.3.0" + data-urls "^2.0.0" + decimal.js "^10.2.1" + domexception "^2.0.1" + escodegen "^2.0.0" + form-data "^3.0.0" + html-encoding-sniffer "^2.0.1" + http-proxy-agent "^4.0.1" + https-proxy-agent "^5.0.0" + is-potential-custom-element-name "^1.0.1" + nwsapi "^2.2.0" + parse5 "6.0.1" + saxes "^5.0.1" + symbol-tree "^3.2.4" + tough-cookie "^4.0.0" + w3c-hr-time "^1.0.2" + w3c-xmlserializer "^2.0.0" + webidl-conversions "^6.1.0" + whatwg-encoding "^1.0.5" + whatwg-mimetype "^2.3.0" + whatwg-url "^8.5.0" + ws "^7.4.6" + xml-name-validator "^3.0.0" + +jsesc@^2.5.1: + version "2.5.2" + resolved "http://localhost:4873/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" + integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== + +jsesc@~0.5.0: + version "0.5.0" + resolved "http://localhost:4873/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d" + integrity sha512-uZz5UnB7u4T9LvwmFqXii7pZSouaRPorGs5who1Ip7VO0wxanFvBL7GkM6dTHlgX+jhBApRetaWpnDabOeTcnA== + +json-parse-even-better-errors@^2.3.0, json-parse-even-better-errors@^2.3.1: + version "2.3.1" + resolved "http://localhost:4873/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d" + integrity sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w== + +json-schema-traverse@^0.4.1: + version "0.4.1" + resolved "http://localhost:4873/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" + integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== + +json-schema-traverse@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz#ae7bcb3656ab77a73ba5c49bf654f38e6b6860e2" + integrity sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug== + +json-schema@^0.4.0: + version "0.4.0" + resolved "http://localhost:4873/json-schema/-/json-schema-0.4.0.tgz#f7de4cf6efab838ebaeb3236474cbba5a1930ab5" + integrity sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA== + +json-stable-stringify-without-jsonify@^1.0.1: + version "1.0.1" + resolved "http://localhost:4873/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651" + integrity sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw== + +json5@^1.0.1: + version "1.0.1" + resolved "http://localhost:4873/json5/-/json5-1.0.1.tgz#779fb0018604fa854eacbf6252180d83543e3dbe" + integrity sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow== + dependencies: + minimist "^1.2.0" + +json5@^2.1.2, json5@^2.2.0, json5@^2.2.1: + version "2.2.1" + resolved "http://localhost:4873/json5/-/json5-2.2.1.tgz#655d50ed1e6f95ad1a3caababd2b0efda10b395c" + integrity sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA== + +jsonfile@^6.0.1: + version "6.1.0" + resolved "http://localhost:4873/jsonfile/-/jsonfile-6.1.0.tgz#bc55b2634793c679ec6403094eb13698a6ec0aae" + integrity sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ== + dependencies: + universalify "^2.0.0" + optionalDependencies: + graceful-fs "^4.1.6" + +jsonpointer@^5.0.0: + version "5.0.1" + resolved "http://localhost:4873/jsonpointer/-/jsonpointer-5.0.1.tgz#2110e0af0900fd37467b5907ecd13a7884a1b559" + integrity sha512-p/nXbhSEcu3pZRdkW1OfJhpsVtW1gd4Wa1fnQc9YLiTfAjn0312eMKimbdIQzuZl9aa9xUGaRlP9T/CJE/ditQ== + +"jsx-ast-utils@^2.4.1 || ^3.0.0", jsx-ast-utils@^3.3.2: + version "3.3.3" + resolved "http://localhost:4873/jsx-ast-utils/-/jsx-ast-utils-3.3.3.tgz#76b3e6e6cece5c69d49a5792c3d01bd1a0cdc7ea" + integrity sha512-fYQHZTZ8jSfmWZ0iyzfwiU4WDX4HpHbMCZ3gPlWYiCl3BoeOTsqKBqnTVfH2rYT7eP5c3sVbeSPHnnJOaTrWiw== + dependencies: + array-includes "^3.1.5" + object.assign "^4.1.3" + +kind-of@^6.0.2: + version "6.0.3" + resolved "http://localhost:4873/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd" + integrity sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw== + +kleur@^3.0.3: + version "3.0.3" + resolved "http://localhost:4873/kleur/-/kleur-3.0.3.tgz#a79c9ecc86ee1ce3fa6206d1216c501f147fc07e" + integrity sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w== + +klona@^2.0.4, klona@^2.0.5: + version "2.0.5" + resolved "http://localhost:4873/klona/-/klona-2.0.5.tgz#d166574d90076395d9963aa7a928fabb8d76afbc" + integrity sha512-pJiBpiXMbt7dkzXe8Ghj/u4FfXOOa98fPW+bihOJ4SjnoijweJrNThJfd3ifXpXhREjpoF2mZVH1GfS9LV3kHQ== + +language-subtag-registry@~0.3.2: + version "0.3.22" + resolved "http://localhost:4873/language-subtag-registry/-/language-subtag-registry-0.3.22.tgz#2e1500861b2e457eba7e7ae86877cbd08fa1fd1d" + integrity sha512-tN0MCzyWnoz/4nHS6uxdlFWoUZT7ABptwKPQ52Ea7URk6vll88bWBVhodtnlfEuCcKWNGoc+uGbw1cwa9IKh/w== + +language-tags@^1.0.5: + version "1.0.5" + resolved "http://localhost:4873/language-tags/-/language-tags-1.0.5.tgz#d321dbc4da30ba8bf3024e040fa5c14661f9193a" + integrity sha512-qJhlO9cGXi6hBGKoxEG/sKZDAHD5Hnu9Hs4WbOY3pCWXDhw0N8x1NenNzm2EnNLkLkk7J2SdxAkDSbb6ftT+UQ== + dependencies: + language-subtag-registry "~0.3.2" + +leven@^3.1.0: + version "3.1.0" + resolved "http://localhost:4873/leven/-/leven-3.1.0.tgz#77891de834064cccba82ae7842bb6b14a13ed7f2" + integrity sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A== + +levn@^0.4.1: + version "0.4.1" + resolved "http://localhost:4873/levn/-/levn-0.4.1.tgz#ae4562c007473b932a6200d403268dd2fffc6ade" + integrity sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ== + dependencies: + prelude-ls "^1.2.1" + type-check "~0.4.0" + +levn@~0.3.0: + version "0.3.0" + resolved "http://localhost:4873/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee" + integrity sha512-0OO4y2iOHix2W6ujICbKIaEQXvFQHue65vUG3pb5EUomzPI90z9hsA1VsO/dbIIpC53J8gxM9Q4Oho0jrCM/yA== + dependencies: + prelude-ls "~1.1.2" + type-check "~0.3.2" + +lilconfig@^2.0.3, lilconfig@^2.0.5, lilconfig@^2.0.6: + version "2.0.6" + resolved "http://localhost:4873/lilconfig/-/lilconfig-2.0.6.tgz#32a384558bd58af3d4c6e077dd1ad1d397bc69d4" + integrity sha512-9JROoBW7pobfsx+Sq2JsASvCo6Pfo6WWoUW79HuB1BCoBXD4PLWJPqDF6fNj67pqBYTbAHkE57M1kS/+L1neOg== + +lines-and-columns@^1.1.6: + version "1.2.4" + resolved "http://localhost:4873/lines-and-columns/-/lines-and-columns-1.2.4.tgz#eca284f75d2965079309dc0ad9255abb2ebc1632" + integrity sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg== + +loader-runner@^4.2.0: + version "4.3.0" + resolved "http://localhost:4873/loader-runner/-/loader-runner-4.3.0.tgz#c1b4a163b99f614830353b16755e7149ac2314e1" + integrity sha512-3R/1M+yS3j5ou80Me59j7F9IMs4PXs3VqRrm0TU3AbKPxlmpoY1TNscJV/oGJXo8qCatFGTfDbY6W6ipGOYXfg== + +loader-utils@^2.0.0: + version "2.0.2" + resolved "http://localhost:4873/loader-utils/-/loader-utils-2.0.2.tgz#d6e3b4fb81870721ae4e0868ab11dd638368c129" + integrity sha512-TM57VeHptv569d/GKh6TAYdzKblwDNiumOdkFnejjD0XwTH87K90w3O7AiJRqdQoXygvi1VQTJTLGhJl7WqA7A== + dependencies: + big.js "^5.2.2" + emojis-list "^3.0.0" + json5 "^2.1.2" + +loader-utils@^3.2.0: + version "3.2.0" + resolved "http://localhost:4873/loader-utils/-/loader-utils-3.2.0.tgz#bcecc51a7898bee7473d4bc6b845b23af8304d4f" + integrity sha512-HVl9ZqccQihZ7JM85dco1MvO9G+ONvxoGa9rkhzFsneGLKSUg1gJf9bWzhRhcvm2qChhWpebQhP44qxjKIUCaQ== + +locate-path@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/locate-path/-/locate-path-3.0.0.tgz#dbec3b3ab759758071b58fe59fc41871af21400e" + integrity sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A== + dependencies: + p-locate "^3.0.0" + path-exists "^3.0.0" + +locate-path@^5.0.0: + version "5.0.0" + resolved "http://localhost:4873/locate-path/-/locate-path-5.0.0.tgz#1afba396afd676a6d42504d0a67a3a7eb9f62aa0" + integrity sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g== + dependencies: + p-locate "^4.1.0" + +locate-path@^6.0.0: + version "6.0.0" + resolved "http://localhost:4873/locate-path/-/locate-path-6.0.0.tgz#55321eb309febbc59c4801d931a72452a681d286" + integrity sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw== + dependencies: + p-locate "^5.0.0" + +lodash.debounce@^4.0.8: + version "4.0.8" + resolved "http://localhost:4873/lodash.debounce/-/lodash.debounce-4.0.8.tgz#82d79bff30a67c4005ffd5e2515300ad9ca4d7af" + integrity sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow== + +lodash.memoize@^4.1.2: + version "4.1.2" + resolved "http://localhost:4873/lodash.memoize/-/lodash.memoize-4.1.2.tgz#bcc6c49a42a2840ed997f323eada5ecd182e0bfe" + integrity sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag== + +lodash.merge@^4.6.2: + version "4.6.2" + resolved "http://localhost:4873/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a" + integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ== + +lodash.sortby@^4.7.0: + version "4.7.0" + resolved "http://localhost:4873/lodash.sortby/-/lodash.sortby-4.7.0.tgz#edd14c824e2cc9c1e0b0a1b42bb5210516a42438" + integrity sha512-HDWXG8isMntAyRF5vZ7xKuEvOhT4AhlRt/3czTSjvGUxjYCBVRQY48ViDHyfYz9VIoBkW4TMGQNapx+l3RUwdA== + +lodash.uniq@^4.5.0: + version "4.5.0" + resolved "http://localhost:4873/lodash.uniq/-/lodash.uniq-4.5.0.tgz#d0225373aeb652adc1bc82e4945339a842754773" + integrity sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ== + +lodash@^4.17.15, lodash@^4.17.20, lodash@^4.17.21, lodash@^4.7.0: + version "4.17.21" + resolved "http://localhost:4873/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" + integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== + +loose-envify@^1.1.0, loose-envify@^1.4.0: + version "1.4.0" + resolved "http://localhost:4873/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf" + integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q== + dependencies: + js-tokens "^3.0.0 || ^4.0.0" + +lower-case@^2.0.2: + version "2.0.2" + resolved "http://localhost:4873/lower-case/-/lower-case-2.0.2.tgz#6fa237c63dbdc4a82ca0fd882e4722dc5e634e28" + integrity sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg== + dependencies: + tslib "^2.0.3" + +lru-cache@^6.0.0: + version "6.0.0" + resolved "http://localhost:4873/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" + integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== + dependencies: + yallist "^4.0.0" + +lz-string@^1.4.4: + version "1.4.4" + resolved "http://localhost:4873/lz-string/-/lz-string-1.4.4.tgz#c0d8eaf36059f705796e1e344811cf4c498d3a26" + integrity sha512-0ckx7ZHRPqb0oUm8zNr+90mtf9DQB60H1wMCjBtfi62Kl3a7JbHob6gA2bC+xRvZoOL+1hzUK8jeuEIQE8svEQ== + +magic-string@^0.25.0, magic-string@^0.25.7: + version "0.25.9" + resolved "http://localhost:4873/magic-string/-/magic-string-0.25.9.tgz#de7f9faf91ef8a1c91d02c2e5314c8277dbcdd1c" + integrity sha512-RmF0AsMzgt25qzqqLc1+MbHmhdx0ojF2Fvs4XnOqz2ZOBXzzkEwc/dJQZCYHAn7v1jbVOjAZfK8msRn4BxO4VQ== + dependencies: + sourcemap-codec "^1.4.8" + +make-dir@^3.0.0, make-dir@^3.0.2, make-dir@^3.1.0: + version "3.1.0" + resolved "http://localhost:4873/make-dir/-/make-dir-3.1.0.tgz#415e967046b3a7f1d185277d84aa58203726a13f" + integrity sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw== + dependencies: + semver "^6.0.0" + +makeerror@1.0.12: + version "1.0.12" + resolved "http://localhost:4873/makeerror/-/makeerror-1.0.12.tgz#3e5dd2079a82e812e983cc6610c4a2cb0eaa801a" + integrity sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg== + dependencies: + tmpl "1.0.5" + +mdn-data@2.0.14: + version "2.0.14" + resolved "http://localhost:4873/mdn-data/-/mdn-data-2.0.14.tgz#7113fc4281917d63ce29b43446f701e68c25ba50" + integrity sha512-dn6wd0uw5GsdswPFfsgMp5NSB0/aDe6fK94YJV/AJDYXL6HVLWBsxeq7js7Ad+mU2K9LAlwpk6kN2D5mwCPVow== + +mdn-data@2.0.4: + version "2.0.4" + resolved "http://localhost:4873/mdn-data/-/mdn-data-2.0.4.tgz#699b3c38ac6f1d728091a64650b65d388502fd5b" + integrity sha512-iV3XNKw06j5Q7mi6h+9vbx23Tv7JkjEVgKHW4pimwyDGWm0OIQntJJ+u1C6mg6mK1EaTv42XQ7w76yuzH7M2cA== + +media-typer@0.3.0: + version "0.3.0" + resolved "http://localhost:4873/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" + integrity sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ== + +memfs@^3.1.2, memfs@^3.4.3: + version "3.4.7" + resolved "http://localhost:4873/memfs/-/memfs-3.4.7.tgz#e5252ad2242a724f938cb937e3c4f7ceb1f70e5a" + integrity sha512-ygaiUSNalBX85388uskeCyhSAoOSgzBbtVCr9jA2RROssFL9Q19/ZXFqS+2Th2sr1ewNIWgFdLzLC3Yl1Zv+lw== + dependencies: + fs-monkey "^1.0.3" + +merge-descriptors@1.0.1: + version "1.0.1" + resolved "http://localhost:4873/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" + integrity sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w== + +merge-stream@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60" + integrity sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w== + +merge2@^1.3.0, merge2@^1.4.1: + version "1.4.1" + resolved "http://localhost:4873/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" + integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== + +methods@~1.1.2: + version "1.1.2" + resolved "http://localhost:4873/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" + integrity sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w== + +micromatch@^4.0.2, micromatch@^4.0.4, micromatch@^4.0.5: + version "4.0.5" + resolved "http://localhost:4873/micromatch/-/micromatch-4.0.5.tgz#bc8999a7cbbf77cdc89f132f6e467051b49090c6" + integrity sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA== + dependencies: + braces "^3.0.2" + picomatch "^2.3.1" + +mime-db@1.52.0, "mime-db@>= 1.43.0 < 2": + version "1.52.0" + resolved "http://localhost:4873/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70" + integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== + +mime-types@^2.1.12, mime-types@^2.1.27, mime-types@^2.1.31, mime-types@~2.1.17, mime-types@~2.1.24, mime-types@~2.1.34: + version "2.1.35" + resolved "http://localhost:4873/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" + integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== + dependencies: + mime-db "1.52.0" + +mime@1.6.0: + version "1.6.0" + resolved "http://localhost:4873/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" + integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== + +mimic-fn@^2.1.0: + version "2.1.0" + resolved "http://localhost:4873/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" + integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== + +min-indent@^1.0.0: + version "1.0.1" + resolved "http://localhost:4873/min-indent/-/min-indent-1.0.1.tgz#a63f681673b30571fbe8bc25686ae746eefa9869" + integrity sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg== + +mini-css-extract-plugin@^2.4.5: + version "2.6.1" + resolved "http://localhost:4873/mini-css-extract-plugin/-/mini-css-extract-plugin-2.6.1.tgz#9a1251d15f2035c342d99a468ab9da7a0451b71e" + integrity sha512-wd+SD57/K6DiV7jIR34P+s3uckTRuQvx0tKPcvjFlrEylk6P4mQ2KSWk1hblj1Kxaqok7LogKOieygXqBczNlg== + dependencies: + schema-utils "^4.0.0" + +minimalistic-assert@^1.0.0: + version "1.0.1" + resolved "http://localhost:4873/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7" + integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== + +minimatch@3.0.4: + version "3.0.4" + resolved "http://localhost:4873/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" + integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== + dependencies: + brace-expansion "^1.1.7" + +minimatch@^3.0.4, minimatch@^3.1.1, minimatch@^3.1.2: + version "3.1.2" + resolved "http://localhost:4873/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" + integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== + dependencies: + brace-expansion "^1.1.7" + +minimatch@^5.0.1: + version "5.1.0" + resolved "http://localhost:4873/minimatch/-/minimatch-5.1.0.tgz#1717b464f4971b144f6aabe8f2d0b8e4511e09c7" + integrity sha512-9TPBGGak4nHfGZsPBohm9AWg6NoT7QTCehS3BIJABslyZbzxfV78QM2Y6+i741OPZIafFAaiiEMh5OyIrJPgtg== + dependencies: + brace-expansion "^2.0.1" + +minimist@^1.2.0, minimist@^1.2.6: + version "1.2.6" + resolved "http://localhost:4873/minimist/-/minimist-1.2.6.tgz#8637a5b759ea0d6e98702cfb3a9283323c93af44" + integrity sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q== + +mkdirp@~0.5.1: + version "0.5.6" + resolved "http://localhost:4873/mkdirp/-/mkdirp-0.5.6.tgz#7def03d2432dcae4ba1d611445c48396062255f6" + integrity sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw== + dependencies: + minimist "^1.2.6" + +ms@2.0.0: + version "2.0.0" + resolved "http://localhost:4873/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" + integrity sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A== + +ms@2.1.2: + version "2.1.2" + resolved "http://localhost:4873/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" + integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== + +ms@2.1.3, ms@^2.1.1: + version "2.1.3" + resolved "http://localhost:4873/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" + integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== + +multicast-dns@^7.2.5: + version "7.2.5" + resolved "http://localhost:4873/multicast-dns/-/multicast-dns-7.2.5.tgz#77eb46057f4d7adbd16d9290fa7299f6fa64cced" + integrity sha512-2eznPJP8z2BFLX50tf0LuODrpINqP1RVIm/CObbTcBRITQgmC/TjcREF1NeTBzIcR5XO/ukWo+YHOjBbFwIupg== + dependencies: + dns-packet "^5.2.2" + thunky "^1.0.2" + +nanoid@^3.3.4: + version "3.3.4" + resolved "http://localhost:4873/nanoid/-/nanoid-3.3.4.tgz#730b67e3cd09e2deacf03c027c81c9d9dbc5e8ab" + integrity sha512-MqBkQh/OHTS2egovRtLk45wEyNXwF+cokD+1YPf9u5VfJiRdAiRwB2froX5Co9Rh20xs4siNPm8naNotSD6RBw== + +natural-compare@^1.4.0: + version "1.4.0" + resolved "http://localhost:4873/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" + integrity sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw== + +negotiator@0.6.3: + version "0.6.3" + resolved "http://localhost:4873/negotiator/-/negotiator-0.6.3.tgz#58e323a72fedc0d6f9cd4d31fe49f51479590ccd" + integrity sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg== + +neo-async@^2.6.2: + version "2.6.2" + resolved "http://localhost:4873/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f" + integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw== + +no-case@^3.0.4: + version "3.0.4" + resolved "http://localhost:4873/no-case/-/no-case-3.0.4.tgz#d361fd5c9800f558551a8369fc0dcd4662b6124d" + integrity sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg== + dependencies: + lower-case "^2.0.2" + tslib "^2.0.3" + +node-forge@^1: + version "1.3.1" + resolved "http://localhost:4873/node-forge/-/node-forge-1.3.1.tgz#be8da2af243b2417d5f646a770663a92b7e9ded3" + integrity sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA== + +node-int64@^0.4.0: + version "0.4.0" + resolved "http://localhost:4873/node-int64/-/node-int64-0.4.0.tgz#87a9065cdb355d3182d8f94ce11188b825c68a3b" + integrity sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw== + +node-releases@^2.0.6: + version "2.0.6" + resolved "http://localhost:4873/node-releases/-/node-releases-2.0.6.tgz#8a7088c63a55e493845683ebf3c828d8c51c5503" + integrity sha512-PiVXnNuFm5+iYkLBNeq5211hvO38y63T0i2KKh2KnUs3RpzJ+JtODFjkD8yjLwnDkTYF1eKXheUwdssR+NRZdg== + +normalize-path@^3.0.0, normalize-path@~3.0.0: + version "3.0.0" + resolved "http://localhost:4873/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" + integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== + +normalize-range@^0.1.2: + version "0.1.2" + resolved "http://localhost:4873/normalize-range/-/normalize-range-0.1.2.tgz#2d10c06bdfd312ea9777695a4d28439456b75942" + integrity sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA== + +normalize-url@^6.0.1: + version "6.1.0" + resolved "http://localhost:4873/normalize-url/-/normalize-url-6.1.0.tgz#40d0885b535deffe3f3147bec877d05fe4c5668a" + integrity sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A== + +npm-run-path@^4.0.1: + version "4.0.1" + resolved "http://localhost:4873/npm-run-path/-/npm-run-path-4.0.1.tgz#b7ecd1e5ed53da8e37a55e1c2269e0b97ed748ea" + integrity sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw== + dependencies: + path-key "^3.0.0" + +nth-check@^1.0.2: + version "1.0.2" + resolved "http://localhost:4873/nth-check/-/nth-check-1.0.2.tgz#b2bd295c37e3dd58a3bf0700376663ba4d9cf05c" + integrity sha512-WeBOdju8SnzPN5vTUJYxYUxLeXpCaVP5i5e0LF8fg7WORF2Wd7wFX/pk0tYZk7s8T+J7VLy0Da6J1+wCT0AtHg== + dependencies: + boolbase "~1.0.0" + +nth-check@^2.0.1: + version "2.1.1" + resolved "http://localhost:4873/nth-check/-/nth-check-2.1.1.tgz#c9eab428effce36cd6b92c924bdb000ef1f1ed1d" + integrity sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w== + dependencies: + boolbase "^1.0.0" + +nwsapi@^2.2.0: + version "2.2.2" + resolved "http://localhost:4873/nwsapi/-/nwsapi-2.2.2.tgz#e5418863e7905df67d51ec95938d67bf801f0bb0" + integrity sha512-90yv+6538zuvUMnN+zCr8LuV6bPFdq50304114vJYJ8RDyK8D5O9Phpbd6SZWgI7PwzmmfN1upeOJlvybDSgCw== + +object-assign@^4.1.1: + version "4.1.1" + resolved "http://localhost:4873/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" + integrity sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg== + +object-hash@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/object-hash/-/object-hash-3.0.0.tgz#73f97f753e7baffc0e2cc9d6e079079744ac82e9" + integrity sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw== + +object-inspect@^1.12.2, object-inspect@^1.9.0: + version "1.12.2" + resolved "http://localhost:4873/object-inspect/-/object-inspect-1.12.2.tgz#c0641f26394532f28ab8d796ab954e43c009a8ea" + integrity sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ== + +object-keys@^1.1.1: + version "1.1.1" + resolved "http://localhost:4873/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" + integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== + +object.assign@^4.1.0, object.assign@^4.1.3, object.assign@^4.1.4: + version "4.1.4" + resolved "http://localhost:4873/object.assign/-/object.assign-4.1.4.tgz#9673c7c7c351ab8c4d0b516f4343ebf4dfb7799f" + integrity sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + has-symbols "^1.0.3" + object-keys "^1.1.1" + +object.entries@^1.1.5: + version "1.1.5" + resolved "http://localhost:4873/object.entries/-/object.entries-1.1.5.tgz#e1acdd17c4de2cd96d5a08487cfb9db84d881861" + integrity sha512-TyxmjUoZggd4OrrU1W66FMDG6CuqJxsFvymeyXI51+vQLN67zYfZseptRge703kKQdo4uccgAKebXFcRCzk4+g== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.1" + +object.fromentries@^2.0.5: + version "2.0.5" + resolved "http://localhost:4873/object.fromentries/-/object.fromentries-2.0.5.tgz#7b37b205109c21e741e605727fe8b0ad5fa08251" + integrity sha512-CAyG5mWQRRiBU57Re4FKoTBjXfDoNwdFVH2Y1tS9PqCsfUTymAohOkEMSG3aRNKmv4lV3O7p1et7c187q6bynw== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.1" + +object.getownpropertydescriptors@^2.1.0: + version "2.1.4" + resolved "http://localhost:4873/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.4.tgz#7965e6437a57278b587383831a9b829455a4bc37" + integrity sha512-sccv3L/pMModT6dJAYF3fzGMVcb38ysQ0tEE6ixv2yXJDtEIPph268OlAdJj5/qZMZDq2g/jqvwppt36uS/uQQ== + dependencies: + array.prototype.reduce "^1.0.4" + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.20.1" + +object.hasown@^1.1.1: + version "1.1.1" + resolved "http://localhost:4873/object.hasown/-/object.hasown-1.1.1.tgz#ad1eecc60d03f49460600430d97f23882cf592a3" + integrity sha512-LYLe4tivNQzq4JdaWW6WO3HMZZJWzkkH8fnI6EebWl0VZth2wL2Lovm74ep2/gZzlaTdV62JZHEqHQ2yVn8Q/A== + dependencies: + define-properties "^1.1.4" + es-abstract "^1.19.5" + +object.values@^1.1.0, object.values@^1.1.5: + version "1.1.5" + resolved "http://localhost:4873/object.values/-/object.values-1.1.5.tgz#959f63e3ce9ef108720333082131e4a459b716ac" + integrity sha512-QUZRW0ilQ3PnPpbNtgdNV1PDbEqLIiSFB3l+EnGtBQ/8SUTLj1PZwtQHABZtLgwpJZTSZhuGLOGk57Drx2IvYg== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.1" + +obuf@^1.0.0, obuf@^1.1.2: + version "1.1.2" + resolved "http://localhost:4873/obuf/-/obuf-1.1.2.tgz#09bea3343d41859ebd446292d11c9d4db619084e" + integrity sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg== + +on-finished@2.4.1: + version "2.4.1" + resolved "http://localhost:4873/on-finished/-/on-finished-2.4.1.tgz#58c8c44116e54845ad57f14ab10b03533184ac3f" + integrity sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg== + dependencies: + ee-first "1.1.1" + +on-headers@~1.0.2: + version "1.0.2" + resolved "http://localhost:4873/on-headers/-/on-headers-1.0.2.tgz#772b0ae6aaa525c399e489adfad90c403eb3c28f" + integrity sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA== + +once@^1.3.0: + version "1.4.0" + resolved "http://localhost:4873/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" + integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w== + dependencies: + wrappy "1" + +onetime@^5.1.2: + version "5.1.2" + resolved "http://localhost:4873/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e" + integrity sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg== + dependencies: + mimic-fn "^2.1.0" + +open@^8.0.9, open@^8.4.0: + version "8.4.0" + resolved "http://localhost:4873/open/-/open-8.4.0.tgz#345321ae18f8138f82565a910fdc6b39e8c244f8" + integrity sha512-XgFPPM+B28FtCCgSb9I+s9szOC1vZRSwgWsRUA5ylIxRTgKozqjOCrVOqGsYABPYK5qnfqClxZTFBa8PKt2v6Q== + dependencies: + define-lazy-prop "^2.0.0" + is-docker "^2.1.1" + is-wsl "^2.2.0" + +optionator@^0.8.1: + version "0.8.3" + resolved "http://localhost:4873/optionator/-/optionator-0.8.3.tgz#84fa1d036fe9d3c7e21d99884b601167ec8fb495" + integrity sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA== + dependencies: + deep-is "~0.1.3" + fast-levenshtein "~2.0.6" + levn "~0.3.0" + prelude-ls "~1.1.2" + type-check "~0.3.2" + word-wrap "~1.2.3" + +optionator@^0.9.1: + version "0.9.1" + resolved "http://localhost:4873/optionator/-/optionator-0.9.1.tgz#4f236a6373dae0566a6d43e1326674f50c291499" + integrity sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw== + dependencies: + deep-is "^0.1.3" + fast-levenshtein "^2.0.6" + levn "^0.4.1" + prelude-ls "^1.2.1" + type-check "^0.4.0" + word-wrap "^1.2.3" + +p-limit@^2.0.0, p-limit@^2.2.0: + version "2.3.0" + resolved "http://localhost:4873/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1" + integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w== + dependencies: + p-try "^2.0.0" + +p-limit@^3.0.2: + version "3.1.0" + resolved "http://localhost:4873/p-limit/-/p-limit-3.1.0.tgz#e1daccbe78d0d1388ca18c64fea38e3e57e3706b" + integrity sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ== + dependencies: + yocto-queue "^0.1.0" + +p-locate@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/p-locate/-/p-locate-3.0.0.tgz#322d69a05c0264b25997d9f40cd8a891ab0064a4" + integrity sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ== + dependencies: + p-limit "^2.0.0" + +p-locate@^4.1.0: + version "4.1.0" + resolved "http://localhost:4873/p-locate/-/p-locate-4.1.0.tgz#a3428bb7088b3a60292f66919278b7c297ad4f07" + integrity sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A== + dependencies: + p-limit "^2.2.0" + +p-locate@^5.0.0: + version "5.0.0" + resolved "http://localhost:4873/p-locate/-/p-locate-5.0.0.tgz#83c8315c6785005e3bd021839411c9e110e6d834" + integrity sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw== + dependencies: + p-limit "^3.0.2" + +p-retry@^4.5.0: + version "4.6.2" + resolved "http://localhost:4873/p-retry/-/p-retry-4.6.2.tgz#9baae7184057edd4e17231cee04264106e092a16" + integrity sha512-312Id396EbJdvRONlngUx0NydfrIQ5lsYu0znKVUzVvArzEIt08V1qhtyESbGVd1FGX7UKtiFp5uwKZdM8wIuQ== + dependencies: + "@types/retry" "0.12.0" + retry "^0.13.1" + +p-try@^2.0.0: + version "2.2.0" + resolved "http://localhost:4873/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" + integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== + +param-case@^3.0.4: + version "3.0.4" + resolved "http://localhost:4873/param-case/-/param-case-3.0.4.tgz#7d17fe4aa12bde34d4a77d91acfb6219caad01c5" + integrity sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A== + dependencies: + dot-case "^3.0.4" + tslib "^2.0.3" + +parent-module@^1.0.0: + version "1.0.1" + resolved "http://localhost:4873/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2" + integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g== + dependencies: + callsites "^3.0.0" + +parse-json@^5.0.0, parse-json@^5.2.0: + version "5.2.0" + resolved "http://localhost:4873/parse-json/-/parse-json-5.2.0.tgz#c76fc66dee54231c962b22bcc8a72cf2f99753cd" + integrity sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg== + dependencies: + "@babel/code-frame" "^7.0.0" + error-ex "^1.3.1" + json-parse-even-better-errors "^2.3.0" + lines-and-columns "^1.1.6" + +parse5@6.0.1: + version "6.0.1" + resolved "http://localhost:4873/parse5/-/parse5-6.0.1.tgz#e1a1c085c569b3dc08321184f19a39cc27f7c30b" + integrity sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw== + +parseurl@~1.3.2, parseurl@~1.3.3: + version "1.3.3" + resolved "http://localhost:4873/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4" + integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ== + +pascal-case@^3.1.2: + version "3.1.2" + resolved "http://localhost:4873/pascal-case/-/pascal-case-3.1.2.tgz#b48e0ef2b98e205e7c1dae747d0b1508237660eb" + integrity sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g== + dependencies: + no-case "^3.0.4" + tslib "^2.0.3" + +path-exists@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" + integrity sha512-bpC7GYwiDYQ4wYLe+FA8lhRjhQCMcQGuSgGGqDkg/QerRWw9CmGRT0iSOVRSZJ29NMLZgIzqaljJ63oaL4NIJQ== + +path-exists@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3" + integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w== + +path-is-absolute@^1.0.0: + version "1.0.1" + resolved "http://localhost:4873/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" + integrity sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg== + +path-key@^3.0.0, path-key@^3.1.0: + version "3.1.1" + resolved "http://localhost:4873/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" + integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== + +path-parse@^1.0.7: + version "1.0.7" + resolved "http://localhost:4873/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" + integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== + +path-to-regexp@0.1.7: + version "0.1.7" + resolved "http://localhost:4873/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c" + integrity sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ== + +path-type@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b" + integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw== + +performance-now@^2.1.0: + version "2.1.0" + resolved "http://localhost:4873/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" + integrity sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow== + +picocolors@^0.2.1: + version "0.2.1" + resolved "http://localhost:4873/picocolors/-/picocolors-0.2.1.tgz#570670f793646851d1ba135996962abad587859f" + integrity sha512-cMlDqaLEqfSaW8Z7N5Jw+lyIW869EzT73/F5lhtY9cLGoVxSXznfgfXMO0Z5K0o0Q2TkTXq+0KFsdnSe3jDViA== + +picocolors@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" + integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== + +picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.2.2, picomatch@^2.2.3, picomatch@^2.3.1: + version "2.3.1" + resolved "http://localhost:4873/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" + integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== + +pify@^2.3.0: + version "2.3.0" + resolved "http://localhost:4873/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" + integrity sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog== + +pirates@^4.0.4: + version "4.0.5" + resolved "http://localhost:4873/pirates/-/pirates-4.0.5.tgz#feec352ea5c3268fb23a37c702ab1699f35a5f3b" + integrity sha512-8V9+HQPupnaXMA23c5hvl69zXvTwTzyAYasnkb0Tts4XvO4CliqONMOnvlq26rkhLC3nWDFBJf73LU1e1VZLaQ== + +pkg-dir@^4.1.0, pkg-dir@^4.2.0: + version "4.2.0" + resolved "http://localhost:4873/pkg-dir/-/pkg-dir-4.2.0.tgz#f099133df7ede422e81d1d8448270eeb3e4261f3" + integrity sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ== + dependencies: + find-up "^4.0.0" + +pkg-up@^3.1.0: + version "3.1.0" + resolved "http://localhost:4873/pkg-up/-/pkg-up-3.1.0.tgz#100ec235cc150e4fd42519412596a28512a0def5" + integrity sha512-nDywThFk1i4BQK4twPQ6TA4RT8bDY96yeuCVBWL3ePARCiEKDRSrNGbFIgUJpLp+XeIR65v8ra7WuJOFUBtkMA== + dependencies: + find-up "^3.0.0" + +postcss-attribute-case-insensitive@^5.0.2: + version "5.0.2" + resolved "http://localhost:4873/postcss-attribute-case-insensitive/-/postcss-attribute-case-insensitive-5.0.2.tgz#03d761b24afc04c09e757e92ff53716ae8ea2741" + integrity sha512-XIidXV8fDr0kKt28vqki84fRK8VW8eTuIa4PChv2MqKuT6C9UjmSKzen6KaWhWEoYvwxFCa7n/tC1SZ3tyq4SQ== + dependencies: + postcss-selector-parser "^6.0.10" + +postcss-browser-comments@^4: + version "4.0.0" + resolved "http://localhost:4873/postcss-browser-comments/-/postcss-browser-comments-4.0.0.tgz#bcfc86134df5807f5d3c0eefa191d42136b5e72a" + integrity sha512-X9X9/WN3KIvY9+hNERUqX9gncsgBA25XaeR+jshHz2j8+sYyHktHw1JdKuMjeLpGktXidqDhA7b/qm1mrBDmgg== + +postcss-calc@^8.2.3: + version "8.2.4" + resolved "http://localhost:4873/postcss-calc/-/postcss-calc-8.2.4.tgz#77b9c29bfcbe8a07ff6693dc87050828889739a5" + integrity sha512-SmWMSJmB8MRnnULldx0lQIyhSNvuDl9HfrZkaqqE/WHAhToYsAvDq+yAsA/kIyINDszOp3Rh0GFoNuH5Ypsm3Q== + dependencies: + postcss-selector-parser "^6.0.9" + postcss-value-parser "^4.2.0" + +postcss-clamp@^4.1.0: + version "4.1.0" + resolved "http://localhost:4873/postcss-clamp/-/postcss-clamp-4.1.0.tgz#7263e95abadd8c2ba1bd911b0b5a5c9c93e02363" + integrity sha512-ry4b1Llo/9zz+PKC+030KUnPITTJAHeOwjfAyyB60eT0AorGLdzp52s31OsPRHRf8NchkgFoG2y6fCfn1IV1Ow== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-color-functional-notation@^4.2.4: + version "4.2.4" + resolved "http://localhost:4873/postcss-color-functional-notation/-/postcss-color-functional-notation-4.2.4.tgz#21a909e8d7454d3612d1659e471ce4696f28caec" + integrity sha512-2yrTAUZUab9s6CpxkxC4rVgFEVaR6/2Pipvi6qcgvnYiVqZcbDHEoBDhrXzyb7Efh2CCfHQNtcqWcIruDTIUeg== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-color-hex-alpha@^8.0.4: + version "8.0.4" + resolved "http://localhost:4873/postcss-color-hex-alpha/-/postcss-color-hex-alpha-8.0.4.tgz#c66e2980f2fbc1a63f5b079663340ce8b55f25a5" + integrity sha512-nLo2DCRC9eE4w2JmuKgVA3fGL3d01kGq752pVALF68qpGLmx2Qrk91QTKkdUqqp45T1K1XV8IhQpcu1hoAQflQ== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-color-rebeccapurple@^7.1.1: + version "7.1.1" + resolved "http://localhost:4873/postcss-color-rebeccapurple/-/postcss-color-rebeccapurple-7.1.1.tgz#63fdab91d878ebc4dd4b7c02619a0c3d6a56ced0" + integrity sha512-pGxkuVEInwLHgkNxUc4sdg4g3py7zUeCQ9sMfwyHAT+Ezk8a4OaaVZ8lIY5+oNqA/BXXgLyXv0+5wHP68R79hg== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-colormin@^5.3.0: + version "5.3.0" + resolved "http://localhost:4873/postcss-colormin/-/postcss-colormin-5.3.0.tgz#3cee9e5ca62b2c27e84fce63affc0cfb5901956a" + integrity sha512-WdDO4gOFG2Z8n4P8TWBpshnL3JpmNmJwdnfP2gbk2qBA8PWwOYcmjmI/t3CmMeL72a7Hkd+x/Mg9O2/0rD54Pg== + dependencies: + browserslist "^4.16.6" + caniuse-api "^3.0.0" + colord "^2.9.1" + postcss-value-parser "^4.2.0" + +postcss-convert-values@^5.1.2: + version "5.1.2" + resolved "http://localhost:4873/postcss-convert-values/-/postcss-convert-values-5.1.2.tgz#31586df4e184c2e8890e8b34a0b9355313f503ab" + integrity sha512-c6Hzc4GAv95B7suy4udszX9Zy4ETyMCgFPUDtWjdFTKH1SE9eFY/jEpHSwTH1QPuwxHpWslhckUQWbNRM4ho5g== + dependencies: + browserslist "^4.20.3" + postcss-value-parser "^4.2.0" + +postcss-custom-media@^8.0.2: + version "8.0.2" + resolved "http://localhost:4873/postcss-custom-media/-/postcss-custom-media-8.0.2.tgz#c8f9637edf45fef761b014c024cee013f80529ea" + integrity sha512-7yi25vDAoHAkbhAzX9dHx2yc6ntS4jQvejrNcC+csQJAXjj15e7VcWfMgLqBNAbOvqi5uIa9huOVwdHbf+sKqg== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-custom-properties@^12.1.9: + version "12.1.9" + resolved "http://localhost:4873/postcss-custom-properties/-/postcss-custom-properties-12.1.9.tgz#0883429a7ef99f1ba239d1fea29ce84906daa8bd" + integrity sha512-/E7PRvK8DAVljBbeWrcEQJPG72jaImxF3vvCNFwv9cC8CzigVoNIpeyfnJzphnN3Fd8/auBf5wvkw6W9MfmTyg== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-custom-selectors@^6.0.3: + version "6.0.3" + resolved "http://localhost:4873/postcss-custom-selectors/-/postcss-custom-selectors-6.0.3.tgz#1ab4684d65f30fed175520f82d223db0337239d9" + integrity sha512-fgVkmyiWDwmD3JbpCmB45SvvlCD6z9CG6Ie6Iere22W5aHea6oWa7EM2bpnv2Fj3I94L3VbtvX9KqwSi5aFzSg== + dependencies: + postcss-selector-parser "^6.0.4" + +postcss-dir-pseudo-class@^6.0.5: + version "6.0.5" + resolved "http://localhost:4873/postcss-dir-pseudo-class/-/postcss-dir-pseudo-class-6.0.5.tgz#2bf31de5de76added44e0a25ecf60ae9f7c7c26c" + integrity sha512-eqn4m70P031PF7ZQIvSgy9RSJ5uI2171O/OO/zcRNYpJbvaeKFUlar1aJ7rmgiQtbm0FSPsRewjpdS0Oew7MPA== + dependencies: + postcss-selector-parser "^6.0.10" + +postcss-discard-comments@^5.1.2: + version "5.1.2" + resolved "http://localhost:4873/postcss-discard-comments/-/postcss-discard-comments-5.1.2.tgz#8df5e81d2925af2780075840c1526f0660e53696" + integrity sha512-+L8208OVbHVF2UQf1iDmRcbdjJkuBF6IS29yBDSiWUIzpYaAhtNl6JYnYm12FnkeCwQqF5LeklOu6rAqgfBZqQ== + +postcss-discard-duplicates@^5.1.0: + version "5.1.0" + resolved "http://localhost:4873/postcss-discard-duplicates/-/postcss-discard-duplicates-5.1.0.tgz#9eb4fe8456706a4eebd6d3b7b777d07bad03e848" + integrity sha512-zmX3IoSI2aoenxHV6C7plngHWWhUOV3sP1T8y2ifzxzbtnuhk1EdPwm0S1bIUNaJ2eNbWeGLEwzw8huPD67aQw== + +postcss-discard-empty@^5.1.1: + version "5.1.1" + resolved "http://localhost:4873/postcss-discard-empty/-/postcss-discard-empty-5.1.1.tgz#e57762343ff7f503fe53fca553d18d7f0c369c6c" + integrity sha512-zPz4WljiSuLWsI0ir4Mcnr4qQQ5e1Ukc3i7UfE2XcrwKK2LIPIqE5jxMRxO6GbI3cv//ztXDsXwEWT3BHOGh3A== + +postcss-discard-overridden@^5.1.0: + version "5.1.0" + resolved "http://localhost:4873/postcss-discard-overridden/-/postcss-discard-overridden-5.1.0.tgz#7e8c5b53325747e9d90131bb88635282fb4a276e" + integrity sha512-21nOL7RqWR1kasIVdKs8HNqQJhFxLsyRfAnUDm4Fe4t4mCWL9OJiHvlHPjcd8zc5Myu89b/7wZDnOSjFgeWRtw== + +postcss-double-position-gradients@^3.1.2: + version "3.1.2" + resolved "http://localhost:4873/postcss-double-position-gradients/-/postcss-double-position-gradients-3.1.2.tgz#b96318fdb477be95997e86edd29c6e3557a49b91" + integrity sha512-GX+FuE/uBR6eskOK+4vkXgT6pDkexLokPaz/AbJna9s5Kzp/yl488pKPjhy0obB475ovfT1Wv8ho7U/cHNaRgQ== + dependencies: + "@csstools/postcss-progressive-custom-properties" "^1.1.0" + postcss-value-parser "^4.2.0" + +postcss-env-function@^4.0.6: + version "4.0.6" + resolved "http://localhost:4873/postcss-env-function/-/postcss-env-function-4.0.6.tgz#7b2d24c812f540ed6eda4c81f6090416722a8e7a" + integrity sha512-kpA6FsLra+NqcFnL81TnsU+Z7orGtDTxcOhl6pwXeEq1yFPpRMkCDpHhrz8CFQDr/Wfm0jLiNQ1OsGGPjlqPwA== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-flexbugs-fixes@^5.0.2: + version "5.0.2" + resolved "http://localhost:4873/postcss-flexbugs-fixes/-/postcss-flexbugs-fixes-5.0.2.tgz#2028e145313074fc9abe276cb7ca14e5401eb49d" + integrity sha512-18f9voByak7bTktR2QgDveglpn9DTbBWPUzSOe9g0N4WR/2eSt6Vrcbf0hmspvMI6YWGywz6B9f7jzpFNJJgnQ== + +postcss-focus-visible@^6.0.4: + version "6.0.4" + resolved "http://localhost:4873/postcss-focus-visible/-/postcss-focus-visible-6.0.4.tgz#50c9ea9afa0ee657fb75635fabad25e18d76bf9e" + integrity sha512-QcKuUU/dgNsstIK6HELFRT5Y3lbrMLEOwG+A4s5cA+fx3A3y/JTq3X9LaOj3OC3ALH0XqyrgQIgey/MIZ8Wczw== + dependencies: + postcss-selector-parser "^6.0.9" + +postcss-focus-within@^5.0.4: + version "5.0.4" + resolved "http://localhost:4873/postcss-focus-within/-/postcss-focus-within-5.0.4.tgz#5b1d2ec603195f3344b716c0b75f61e44e8d2e20" + integrity sha512-vvjDN++C0mu8jz4af5d52CB184ogg/sSxAFS+oUJQq2SuCe7T5U2iIsVJtsCp2d6R4j0jr5+q3rPkBVZkXD9fQ== + dependencies: + postcss-selector-parser "^6.0.9" + +postcss-font-variant@^5.0.0: + version "5.0.0" + resolved "http://localhost:4873/postcss-font-variant/-/postcss-font-variant-5.0.0.tgz#efd59b4b7ea8bb06127f2d031bfbb7f24d32fa66" + integrity sha512-1fmkBaCALD72CK2a9i468mA/+tr9/1cBxRRMXOUaZqO43oWPR5imcyPjXwuv7PXbCid4ndlP5zWhidQVVa3hmA== + +postcss-gap-properties@^3.0.5: + version "3.0.5" + resolved "http://localhost:4873/postcss-gap-properties/-/postcss-gap-properties-3.0.5.tgz#f7e3cddcf73ee19e94ccf7cb77773f9560aa2fff" + integrity sha512-IuE6gKSdoUNcvkGIqdtjtcMtZIFyXZhmFd5RUlg97iVEvp1BZKV5ngsAjCjrVy+14uhGBQl9tzmi1Qwq4kqVOg== + +postcss-image-set-function@^4.0.7: + version "4.0.7" + resolved "http://localhost:4873/postcss-image-set-function/-/postcss-image-set-function-4.0.7.tgz#08353bd756f1cbfb3b6e93182c7829879114481f" + integrity sha512-9T2r9rsvYzm5ndsBE8WgtrMlIT7VbtTfE7b3BQnudUqnBcBo7L758oc+o+pdj/dUV0l5wjwSdjeOH2DZtfv8qw== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-import@^14.1.0: + version "14.1.0" + resolved "http://localhost:4873/postcss-import/-/postcss-import-14.1.0.tgz#a7333ffe32f0b8795303ee9e40215dac922781f0" + integrity sha512-flwI+Vgm4SElObFVPpTIT7SU7R3qk2L7PyduMcokiaVKuWv9d/U+Gm/QAd8NDLuykTWTkcrjOeD2Pp1rMeBTGw== + dependencies: + postcss-value-parser "^4.0.0" + read-cache "^1.0.0" + resolve "^1.1.7" + +postcss-initial@^4.0.1: + version "4.0.1" + resolved "http://localhost:4873/postcss-initial/-/postcss-initial-4.0.1.tgz#529f735f72c5724a0fb30527df6fb7ac54d7de42" + integrity sha512-0ueD7rPqX8Pn1xJIjay0AZeIuDoF+V+VvMt/uOnn+4ezUKhZM/NokDeP6DwMNyIoYByuN/94IQnt5FEkaN59xQ== + +postcss-js@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/postcss-js/-/postcss-js-4.0.0.tgz#31db79889531b80dc7bc9b0ad283e418dce0ac00" + integrity sha512-77QESFBwgX4irogGVPgQ5s07vLvFqWr228qZY+w6lW599cRlK/HmnlivnnVUxkjHnCu4J16PDMHcH+e+2HbvTQ== + dependencies: + camelcase-css "^2.0.1" + +postcss-lab-function@^4.2.1: + version "4.2.1" + resolved "http://localhost:4873/postcss-lab-function/-/postcss-lab-function-4.2.1.tgz#6fe4c015102ff7cd27d1bd5385582f67ebdbdc98" + integrity sha512-xuXll4isR03CrQsmxyz92LJB2xX9n+pZJ5jE9JgcnmsCammLyKdlzrBin+25dy6wIjfhJpKBAN80gsTlCgRk2w== + dependencies: + "@csstools/postcss-progressive-custom-properties" "^1.1.0" + postcss-value-parser "^4.2.0" + +postcss-load-config@^3.1.4: + version "3.1.4" + resolved "http://localhost:4873/postcss-load-config/-/postcss-load-config-3.1.4.tgz#1ab2571faf84bb078877e1d07905eabe9ebda855" + integrity sha512-6DiM4E7v4coTE4uzA8U//WhtPwyhiim3eyjEMFCnUpzbrkK9wJHgKDT2mR+HbtSrd/NubVaYTOpSpjUl8NQeRg== + dependencies: + lilconfig "^2.0.5" + yaml "^1.10.2" + +postcss-loader@^6.2.1: + version "6.2.1" + resolved "http://localhost:4873/postcss-loader/-/postcss-loader-6.2.1.tgz#0895f7346b1702103d30fdc66e4d494a93c008ef" + integrity sha512-WbbYpmAaKcux/P66bZ40bpWsBucjx/TTgVVzRZ9yUO8yQfVBlameJ0ZGVaPfH64hNSBh63a+ICP5nqOpBA0w+Q== + dependencies: + cosmiconfig "^7.0.0" + klona "^2.0.5" + semver "^7.3.5" + +postcss-logical@^5.0.4: + version "5.0.4" + resolved "http://localhost:4873/postcss-logical/-/postcss-logical-5.0.4.tgz#ec75b1ee54421acc04d5921576b7d8db6b0e6f73" + integrity sha512-RHXxplCeLh9VjinvMrZONq7im4wjWGlRJAqmAVLXyZaXwfDWP73/oq4NdIp+OZwhQUMj0zjqDfM5Fj7qby+B4g== + +postcss-media-minmax@^5.0.0: + version "5.0.0" + resolved "http://localhost:4873/postcss-media-minmax/-/postcss-media-minmax-5.0.0.tgz#7140bddec173e2d6d657edbd8554a55794e2a5b5" + integrity sha512-yDUvFf9QdFZTuCUg0g0uNSHVlJ5X1lSzDZjPSFaiCWvjgsvu8vEVxtahPrLMinIDEEGnx6cBe6iqdx5YWz08wQ== + +postcss-merge-longhand@^5.1.6: + version "5.1.6" + resolved "http://localhost:4873/postcss-merge-longhand/-/postcss-merge-longhand-5.1.6.tgz#f378a8a7e55766b7b644f48e5d8c789ed7ed51ce" + integrity sha512-6C/UGF/3T5OE2CEbOuX7iNO63dnvqhGZeUnKkDeifebY0XqkkvrctYSZurpNE902LDf2yKwwPFgotnfSoPhQiw== + dependencies: + postcss-value-parser "^4.2.0" + stylehacks "^5.1.0" + +postcss-merge-rules@^5.1.2: + version "5.1.2" + resolved "http://localhost:4873/postcss-merge-rules/-/postcss-merge-rules-5.1.2.tgz#7049a14d4211045412116d79b751def4484473a5" + integrity sha512-zKMUlnw+zYCWoPN6yhPjtcEdlJaMUZ0WyVcxTAmw3lkkN/NDMRkOkiuctQEoWAOvH7twaxUUdvBWl0d4+hifRQ== + dependencies: + browserslist "^4.16.6" + caniuse-api "^3.0.0" + cssnano-utils "^3.1.0" + postcss-selector-parser "^6.0.5" + +postcss-minify-font-values@^5.1.0: + version "5.1.0" + resolved "http://localhost:4873/postcss-minify-font-values/-/postcss-minify-font-values-5.1.0.tgz#f1df0014a726083d260d3bd85d7385fb89d1f01b" + integrity sha512-el3mYTgx13ZAPPirSVsHqFzl+BBBDrXvbySvPGFnQcTI4iNslrPaFq4muTkLZmKlGk4gyFAYUBMH30+HurREyA== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-minify-gradients@^5.1.1: + version "5.1.1" + resolved "http://localhost:4873/postcss-minify-gradients/-/postcss-minify-gradients-5.1.1.tgz#f1fe1b4f498134a5068240c2f25d46fcd236ba2c" + integrity sha512-VGvXMTpCEo4qHTNSa9A0a3D+dxGFZCYwR6Jokk+/3oB6flu2/PnPXAh2x7x52EkY5xlIHLm+Le8tJxe/7TNhzw== + dependencies: + colord "^2.9.1" + cssnano-utils "^3.1.0" + postcss-value-parser "^4.2.0" + +postcss-minify-params@^5.1.3: + version "5.1.3" + resolved "http://localhost:4873/postcss-minify-params/-/postcss-minify-params-5.1.3.tgz#ac41a6465be2db735099bbd1798d85079a6dc1f9" + integrity sha512-bkzpWcjykkqIujNL+EVEPOlLYi/eZ050oImVtHU7b4lFS82jPnsCb44gvC6pxaNt38Els3jWYDHTjHKf0koTgg== + dependencies: + browserslist "^4.16.6" + cssnano-utils "^3.1.0" + postcss-value-parser "^4.2.0" + +postcss-minify-selectors@^5.2.1: + version "5.2.1" + resolved "http://localhost:4873/postcss-minify-selectors/-/postcss-minify-selectors-5.2.1.tgz#d4e7e6b46147b8117ea9325a915a801d5fe656c6" + integrity sha512-nPJu7OjZJTsVUmPdm2TcaiohIwxP+v8ha9NehQ2ye9szv4orirRU3SDdtUmKH+10nzn0bAyOXZ0UEr7OpvLehg== + dependencies: + postcss-selector-parser "^6.0.5" + +postcss-modules-extract-imports@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/postcss-modules-extract-imports/-/postcss-modules-extract-imports-3.0.0.tgz#cda1f047c0ae80c97dbe28c3e76a43b88025741d" + integrity sha512-bdHleFnP3kZ4NYDhuGlVK+CMrQ/pqUm8bx/oGL93K6gVwiclvX5x0n76fYMKuIGKzlABOy13zsvqjb0f92TEXw== + +postcss-modules-local-by-default@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/postcss-modules-local-by-default/-/postcss-modules-local-by-default-4.0.0.tgz#ebbb54fae1598eecfdf691a02b3ff3b390a5a51c" + integrity sha512-sT7ihtmGSF9yhm6ggikHdV0hlziDTX7oFoXtuVWeDd3hHObNkcHRo9V3yg7vCAY7cONyxJC/XXCmmiHHcvX7bQ== + dependencies: + icss-utils "^5.0.0" + postcss-selector-parser "^6.0.2" + postcss-value-parser "^4.1.0" + +postcss-modules-scope@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/postcss-modules-scope/-/postcss-modules-scope-3.0.0.tgz#9ef3151456d3bbfa120ca44898dfca6f2fa01f06" + integrity sha512-hncihwFA2yPath8oZ15PZqvWGkWf+XUfQgUGamS4LqoP1anQLOsOJw0vr7J7IwLpoY9fatA2qiGUGmuZL0Iqlg== + dependencies: + postcss-selector-parser "^6.0.4" + +postcss-modules-values@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/postcss-modules-values/-/postcss-modules-values-4.0.0.tgz#d7c5e7e68c3bb3c9b27cbf48ca0bb3ffb4602c9c" + integrity sha512-RDxHkAiEGI78gS2ofyvCsu7iycRv7oqw5xMWn9iMoR0N/7mf9D50ecQqUo5BZ9Zh2vH4bCUR/ktCqbB9m8vJjQ== + dependencies: + icss-utils "^5.0.0" + +postcss-nested@5.0.6: + version "5.0.6" + resolved "http://localhost:4873/postcss-nested/-/postcss-nested-5.0.6.tgz#466343f7fc8d3d46af3e7dba3fcd47d052a945bc" + integrity sha512-rKqm2Fk0KbA8Vt3AdGN0FB9OBOMDVajMG6ZCf/GoHgdxUJ4sBFp0A/uMIRm+MJUdo33YXEtjqIz8u7DAp8B7DA== + dependencies: + postcss-selector-parser "^6.0.6" + +postcss-nesting@^10.2.0: + version "10.2.0" + resolved "http://localhost:4873/postcss-nesting/-/postcss-nesting-10.2.0.tgz#0b12ce0db8edfd2d8ae0aaf86427370b898890be" + integrity sha512-EwMkYchxiDiKUhlJGzWsD9b2zvq/r2SSubcRrgP+jujMXFzqvANLt16lJANC+5uZ6hjI7lpRmI6O8JIl+8l1KA== + dependencies: + "@csstools/selector-specificity" "^2.0.0" + postcss-selector-parser "^6.0.10" + +postcss-normalize-charset@^5.1.0: + version "5.1.0" + resolved "http://localhost:4873/postcss-normalize-charset/-/postcss-normalize-charset-5.1.0.tgz#9302de0b29094b52c259e9b2cf8dc0879879f0ed" + integrity sha512-mSgUJ+pd/ldRGVx26p2wz9dNZ7ji6Pn8VWBajMXFf8jk7vUoSrZ2lt/wZR7DtlZYKesmZI680qjr2CeFF2fbUg== + +postcss-normalize-display-values@^5.1.0: + version "5.1.0" + resolved "http://localhost:4873/postcss-normalize-display-values/-/postcss-normalize-display-values-5.1.0.tgz#72abbae58081960e9edd7200fcf21ab8325c3da8" + integrity sha512-WP4KIM4o2dazQXWmFaqMmcvsKmhdINFblgSeRgn8BJ6vxaMyaJkwAzpPpuvSIoG/rmX3M+IrRZEz2H0glrQNEA== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-normalize-positions@^5.1.1: + version "5.1.1" + resolved "http://localhost:4873/postcss-normalize-positions/-/postcss-normalize-positions-5.1.1.tgz#ef97279d894087b59325b45c47f1e863daefbb92" + integrity sha512-6UpCb0G4eofTCQLFVuI3EVNZzBNPiIKcA1AKVka+31fTVySphr3VUgAIULBhxZkKgwLImhzMR2Bw1ORK+37INg== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-normalize-repeat-style@^5.1.1: + version "5.1.1" + resolved "http://localhost:4873/postcss-normalize-repeat-style/-/postcss-normalize-repeat-style-5.1.1.tgz#e9eb96805204f4766df66fd09ed2e13545420fb2" + integrity sha512-mFpLspGWkQtBcWIRFLmewo8aC3ImN2i/J3v8YCFUwDnPu3Xz4rLohDO26lGjwNsQxB3YF0KKRwspGzE2JEuS0g== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-normalize-string@^5.1.0: + version "5.1.0" + resolved "http://localhost:4873/postcss-normalize-string/-/postcss-normalize-string-5.1.0.tgz#411961169e07308c82c1f8c55f3e8a337757e228" + integrity sha512-oYiIJOf4T9T1N4i+abeIc7Vgm/xPCGih4bZz5Nm0/ARVJ7K6xrDlLwvwqOydvyL3RHNf8qZk6vo3aatiw/go3w== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-normalize-timing-functions@^5.1.0: + version "5.1.0" + resolved "http://localhost:4873/postcss-normalize-timing-functions/-/postcss-normalize-timing-functions-5.1.0.tgz#d5614410f8f0b2388e9f240aa6011ba6f52dafbb" + integrity sha512-DOEkzJ4SAXv5xkHl0Wa9cZLF3WCBhF3o1SKVxKQAa+0pYKlueTpCgvkFAHfk+Y64ezX9+nITGrDZeVGgITJXjg== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-normalize-unicode@^5.1.0: + version "5.1.0" + resolved "http://localhost:4873/postcss-normalize-unicode/-/postcss-normalize-unicode-5.1.0.tgz#3d23aede35e160089a285e27bf715de11dc9db75" + integrity sha512-J6M3MizAAZ2dOdSjy2caayJLQT8E8K9XjLce8AUQMwOrCvjCHv24aLC/Lps1R1ylOfol5VIDMaM/Lo9NGlk1SQ== + dependencies: + browserslist "^4.16.6" + postcss-value-parser "^4.2.0" + +postcss-normalize-url@^5.1.0: + version "5.1.0" + resolved "http://localhost:4873/postcss-normalize-url/-/postcss-normalize-url-5.1.0.tgz#ed9d88ca82e21abef99f743457d3729a042adcdc" + integrity sha512-5upGeDO+PVthOxSmds43ZeMeZfKH+/DKgGRD7TElkkyS46JXAUhMzIKiCa7BabPeIy3AQcTkXwVVN7DbqsiCew== + dependencies: + normalize-url "^6.0.1" + postcss-value-parser "^4.2.0" + +postcss-normalize-whitespace@^5.1.1: + version "5.1.1" + resolved "http://localhost:4873/postcss-normalize-whitespace/-/postcss-normalize-whitespace-5.1.1.tgz#08a1a0d1ffa17a7cc6efe1e6c9da969cc4493cfa" + integrity sha512-83ZJ4t3NUDETIHTa3uEg6asWjSBYL5EdkVB0sDncx9ERzOKBVJIUeDO9RyA9Zwtig8El1d79HBp0JEi8wvGQnA== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-normalize@^10.0.1: + version "10.0.1" + resolved "http://localhost:4873/postcss-normalize/-/postcss-normalize-10.0.1.tgz#464692676b52792a06b06880a176279216540dd7" + integrity sha512-+5w18/rDev5mqERcG3W5GZNMJa1eoYYNGo8gB7tEwaos0ajk3ZXAI4mHGcNT47NE+ZnZD1pEpUOFLvltIwmeJA== + dependencies: + "@csstools/normalize.css" "*" + postcss-browser-comments "^4" + sanitize.css "*" + +postcss-opacity-percentage@^1.1.2: + version "1.1.2" + resolved "http://localhost:4873/postcss-opacity-percentage/-/postcss-opacity-percentage-1.1.2.tgz#bd698bb3670a0a27f6d657cc16744b3ebf3b1145" + integrity sha512-lyUfF7miG+yewZ8EAk9XUBIlrHyUE6fijnesuz+Mj5zrIHIEw6KcIZSOk/elVMqzLvREmXB83Zi/5QpNRYd47w== + +postcss-ordered-values@^5.1.3: + version "5.1.3" + resolved "http://localhost:4873/postcss-ordered-values/-/postcss-ordered-values-5.1.3.tgz#b6fd2bd10f937b23d86bc829c69e7732ce76ea38" + integrity sha512-9UO79VUhPwEkzbb3RNpqqghc6lcYej1aveQteWY+4POIwlqkYE21HKWaLDF6lWNuqCobEAyTovVhtI32Rbv2RQ== + dependencies: + cssnano-utils "^3.1.0" + postcss-value-parser "^4.2.0" + +postcss-overflow-shorthand@^3.0.4: + version "3.0.4" + resolved "http://localhost:4873/postcss-overflow-shorthand/-/postcss-overflow-shorthand-3.0.4.tgz#7ed6486fec44b76f0eab15aa4866cda5d55d893e" + integrity sha512-otYl/ylHK8Y9bcBnPLo3foYFLL6a6Ak+3EQBPOTR7luMYCOsiVTUk1iLvNf6tVPNGXcoL9Hoz37kpfriRIFb4A== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-page-break@^3.0.4: + version "3.0.4" + resolved "http://localhost:4873/postcss-page-break/-/postcss-page-break-3.0.4.tgz#7fbf741c233621622b68d435babfb70dd8c1ee5f" + integrity sha512-1JGu8oCjVXLa9q9rFTo4MbeeA5FMe00/9C7lN4va606Rdb+HkxXtXsmEDrIraQ11fGz/WvKWa8gMuCKkrXpTsQ== + +postcss-place@^7.0.5: + version "7.0.5" + resolved "http://localhost:4873/postcss-place/-/postcss-place-7.0.5.tgz#95dbf85fd9656a3a6e60e832b5809914236986c4" + integrity sha512-wR8igaZROA6Z4pv0d+bvVrvGY4GVHihBCBQieXFY3kuSuMyOmEnnfFzHl/tQuqHZkfkIVBEbDvYcFfHmpSet9g== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-preset-env@^7.0.1: + version "7.8.2" + resolved "http://localhost:4873/postcss-preset-env/-/postcss-preset-env-7.8.2.tgz#4c834d5cbd2e29df2abf59118947c456922b79ba" + integrity sha512-rSMUEaOCnovKnwc5LvBDHUDzpGP+nrUeWZGWt9M72fBvckCi45JmnJigUr4QG4zZeOHmOCNCZnd2LKDvP++ZuQ== + dependencies: + "@csstools/postcss-cascade-layers" "^1.1.0" + "@csstools/postcss-color-function" "^1.1.1" + "@csstools/postcss-font-format-keywords" "^1.0.1" + "@csstools/postcss-hwb-function" "^1.0.2" + "@csstools/postcss-ic-unit" "^1.0.1" + "@csstools/postcss-is-pseudo-class" "^2.0.7" + "@csstools/postcss-nested-calc" "^1.0.0" + "@csstools/postcss-normalize-display-values" "^1.0.1" + "@csstools/postcss-oklab-function" "^1.1.1" + "@csstools/postcss-progressive-custom-properties" "^1.3.0" + "@csstools/postcss-stepped-value-functions" "^1.0.1" + "@csstools/postcss-text-decoration-shorthand" "^1.0.0" + "@csstools/postcss-trigonometric-functions" "^1.0.2" + "@csstools/postcss-unset-value" "^1.0.2" + autoprefixer "^10.4.11" + browserslist "^4.21.3" + css-blank-pseudo "^3.0.3" + css-has-pseudo "^3.0.4" + css-prefers-color-scheme "^6.0.3" + cssdb "^7.0.1" + postcss-attribute-case-insensitive "^5.0.2" + postcss-clamp "^4.1.0" + postcss-color-functional-notation "^4.2.4" + postcss-color-hex-alpha "^8.0.4" + postcss-color-rebeccapurple "^7.1.1" + postcss-custom-media "^8.0.2" + postcss-custom-properties "^12.1.9" + postcss-custom-selectors "^6.0.3" + postcss-dir-pseudo-class "^6.0.5" + postcss-double-position-gradients "^3.1.2" + postcss-env-function "^4.0.6" + postcss-focus-visible "^6.0.4" + postcss-focus-within "^5.0.4" + postcss-font-variant "^5.0.0" + postcss-gap-properties "^3.0.5" + postcss-image-set-function "^4.0.7" + postcss-initial "^4.0.1" + postcss-lab-function "^4.2.1" + postcss-logical "^5.0.4" + postcss-media-minmax "^5.0.0" + postcss-nesting "^10.2.0" + postcss-opacity-percentage "^1.1.2" + postcss-overflow-shorthand "^3.0.4" + postcss-page-break "^3.0.4" + postcss-place "^7.0.5" + postcss-pseudo-class-any-link "^7.1.6" + postcss-replace-overflow-wrap "^4.0.0" + postcss-selector-not "^6.0.1" + postcss-value-parser "^4.2.0" + +postcss-pseudo-class-any-link@^7.1.6: + version "7.1.6" + resolved "http://localhost:4873/postcss-pseudo-class-any-link/-/postcss-pseudo-class-any-link-7.1.6.tgz#2693b221902da772c278def85a4d9a64b6e617ab" + integrity sha512-9sCtZkO6f/5ML9WcTLcIyV1yz9D1rf0tWc+ulKcvV30s0iZKS/ONyETvoWsr6vnrmW+X+KmuK3gV/w5EWnT37w== + dependencies: + postcss-selector-parser "^6.0.10" + +postcss-reduce-initial@^5.1.0: + version "5.1.0" + resolved "http://localhost:4873/postcss-reduce-initial/-/postcss-reduce-initial-5.1.0.tgz#fc31659ea6e85c492fb2a7b545370c215822c5d6" + integrity sha512-5OgTUviz0aeH6MtBjHfbr57tml13PuedK/Ecg8szzd4XRMbYxH4572JFG067z+FqBIf6Zp/d+0581glkvvWMFw== + dependencies: + browserslist "^4.16.6" + caniuse-api "^3.0.0" + +postcss-reduce-transforms@^5.1.0: + version "5.1.0" + resolved "http://localhost:4873/postcss-reduce-transforms/-/postcss-reduce-transforms-5.1.0.tgz#333b70e7758b802f3dd0ddfe98bb1ccfef96b6e9" + integrity sha512-2fbdbmgir5AvpW9RLtdONx1QoYG2/EtqpNQbFASDlixBbAYuTcJ0dECwlqNqH7VbaUnEnh8SrxOe2sRIn24XyQ== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-replace-overflow-wrap@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/postcss-replace-overflow-wrap/-/postcss-replace-overflow-wrap-4.0.0.tgz#d2df6bed10b477bf9c52fab28c568b4b29ca4319" + integrity sha512-KmF7SBPphT4gPPcKZc7aDkweHiKEEO8cla/GjcBK+ckKxiZslIu3C4GCRW3DNfL0o7yW7kMQu9xlZ1kXRXLXtw== + +postcss-selector-not@^6.0.1: + version "6.0.1" + resolved "http://localhost:4873/postcss-selector-not/-/postcss-selector-not-6.0.1.tgz#8f0a709bf7d4b45222793fc34409be407537556d" + integrity sha512-1i9affjAe9xu/y9uqWH+tD4r6/hDaXJruk8xn2x1vzxC2U3J3LKO3zJW4CyxlNhA56pADJ/djpEwpH1RClI2rQ== + dependencies: + postcss-selector-parser "^6.0.10" + +postcss-selector-parser@^6.0.10, postcss-selector-parser@^6.0.2, postcss-selector-parser@^6.0.4, postcss-selector-parser@^6.0.5, postcss-selector-parser@^6.0.6, postcss-selector-parser@^6.0.9: + version "6.0.10" + resolved "http://localhost:4873/postcss-selector-parser/-/postcss-selector-parser-6.0.10.tgz#79b61e2c0d1bfc2602d549e11d0876256f8df88d" + integrity sha512-IQ7TZdoaqbT+LCpShg46jnZVlhWD2w6iQYAcYXfHARZ7X1t/UGhhceQDs5X0cGqKvYlHNOuv7Oa1xmb0oQuA3w== + dependencies: + cssesc "^3.0.0" + util-deprecate "^1.0.2" + +postcss-svgo@^5.1.0: + version "5.1.0" + resolved "http://localhost:4873/postcss-svgo/-/postcss-svgo-5.1.0.tgz#0a317400ced789f233a28826e77523f15857d80d" + integrity sha512-D75KsH1zm5ZrHyxPakAxJWtkyXew5qwS70v56exwvw542d9CRtTo78K0WeFxZB4G7JXKKMbEZtZayTGdIky/eA== + dependencies: + postcss-value-parser "^4.2.0" + svgo "^2.7.0" + +postcss-unique-selectors@^5.1.1: + version "5.1.1" + resolved "http://localhost:4873/postcss-unique-selectors/-/postcss-unique-selectors-5.1.1.tgz#a9f273d1eacd09e9aa6088f4b0507b18b1b541b6" + integrity sha512-5JiODlELrz8L2HwxfPnhOWZYWDxVHWL83ufOv84NrcgipI7TaeRsatAhK4Tr2/ZiYldpK/wBvw5BD3qfaK96GA== + dependencies: + postcss-selector-parser "^6.0.5" + +postcss-value-parser@^4.0.0, postcss-value-parser@^4.1.0, postcss-value-parser@^4.2.0: + version "4.2.0" + resolved "http://localhost:4873/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz#723c09920836ba6d3e5af019f92bc0971c02e514" + integrity sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ== + +postcss@^7.0.35: + version "7.0.39" + resolved "http://localhost:4873/postcss/-/postcss-7.0.39.tgz#9624375d965630e2e1f2c02a935c82a59cb48309" + integrity sha512-yioayjNbHn6z1/Bywyb2Y4s3yvDAeXGOyxqD+LnVOinq6Mdmd++SW2wUNVzavyyHxd6+DxzWGIuosg6P1Rj8uA== + dependencies: + picocolors "^0.2.1" + source-map "^0.6.1" + +postcss@^8.3.5, postcss@^8.4.14, postcss@^8.4.4, postcss@^8.4.7: + version "8.4.17" + resolved "http://localhost:4873/postcss/-/postcss-8.4.17.tgz#f87863ec7cd353f81f7ab2dec5d67d861bbb1be5" + integrity sha512-UNxNOLQydcOFi41yHNMcKRZ39NeXlr8AxGuZJsdub8vIb12fHzcq37DTU/QtbI6WLxNg2gF9Z+8qtRwTj1UI1Q== + dependencies: + nanoid "^3.3.4" + picocolors "^1.0.0" + source-map-js "^1.0.2" + +prelude-ls@^1.2.1: + version "1.2.1" + resolved "http://localhost:4873/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396" + integrity sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g== + +prelude-ls@~1.1.2: + version "1.1.2" + resolved "http://localhost:4873/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" + integrity sha512-ESF23V4SKG6lVSGZgYNpbsiaAkdab6ZgOxe52p7+Kid3W3u3bxR4Vfd/o21dmN7jSt0IwgZ4v5MUd26FEtXE9w== + +pretty-bytes@^5.3.0, pretty-bytes@^5.4.1: + version "5.6.0" + resolved "http://localhost:4873/pretty-bytes/-/pretty-bytes-5.6.0.tgz#356256f643804773c82f64723fe78c92c62beaeb" + integrity sha512-FFw039TmrBqFK8ma/7OL3sDz/VytdtJr044/QUJtH0wK9lb9jLq9tJyIxUwtQJHwar2BqtiA4iCWSwo9JLkzFg== + +pretty-error@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/pretty-error/-/pretty-error-4.0.0.tgz#90a703f46dd7234adb46d0f84823e9d1cb8f10d6" + integrity sha512-AoJ5YMAcXKYxKhuJGdcvse+Voc6v1RgnsR3nWcYU7q4t6z0Q6T86sv5Zq8VIRbOWWFpvdGE83LtdSMNd+6Y0xw== + dependencies: + lodash "^4.17.20" + renderkid "^3.0.0" + +pretty-format@^27.0.2, pretty-format@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/pretty-format/-/pretty-format-27.5.1.tgz#2181879fdea51a7a5851fb39d920faa63f01d88e" + integrity sha512-Qb1gy5OrP5+zDf2Bvnzdl3jsTf1qXVMazbvCoKhtKqVs4/YK4ozX4gKQJJVyNe+cajNPn0KoC0MC3FUmaHWEmQ== + dependencies: + ansi-regex "^5.0.1" + ansi-styles "^5.0.0" + react-is "^17.0.1" + +pretty-format@^28.1.3: + version "28.1.3" + resolved "http://localhost:4873/pretty-format/-/pretty-format-28.1.3.tgz#c9fba8cedf99ce50963a11b27d982a9ae90970d5" + integrity sha512-8gFb/To0OmxHR9+ZTb14Df2vNxdGCX8g1xWGUTqUw5TiZvcQf5sHKObd5UcPyLLyowNwDAMTF3XWOG1B6mxl1Q== + dependencies: + "@jest/schemas" "^28.1.3" + ansi-regex "^5.0.1" + ansi-styles "^5.0.0" + react-is "^18.0.0" + +pretty-format@^29.0.0, pretty-format@^29.1.2: + version "29.1.2" + resolved "http://localhost:4873/pretty-format/-/pretty-format-29.1.2.tgz#b1f6b75be7d699be1a051f5da36e8ae9e76a8e6a" + integrity sha512-CGJ6VVGXVRP2o2Dorl4mAwwvDWT25luIsYhkyVQW32E4nL+TgW939J7LlKT/npq5Cpq6j3s+sy+13yk7xYpBmg== + dependencies: + "@jest/schemas" "^29.0.0" + ansi-styles "^5.0.0" + react-is "^18.0.0" + +process-nextick-args@~2.0.0: + version "2.0.1" + resolved "http://localhost:4873/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" + integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== + +promise@^8.1.0: + version "8.2.0" + resolved "http://localhost:4873/promise/-/promise-8.2.0.tgz#a1f6280ab67457fbfc8aad2b198c9497e9e5c806" + integrity sha512-+CMAlLHqwRYwBMXKCP+o8ns7DN+xHDUiI+0nArsiJ9y+kJVPLFxEaSw6Ha9s9H0tftxg2Yzl25wqj9G7m5wLZg== + dependencies: + asap "~2.0.6" + +prompts@^2.0.1, prompts@^2.4.2: + version "2.4.2" + resolved "http://localhost:4873/prompts/-/prompts-2.4.2.tgz#7b57e73b3a48029ad10ebd44f74b01722a4cb069" + integrity sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q== + dependencies: + kleur "^3.0.3" + sisteransi "^1.0.5" + +prop-types@^15.8.1: + version "15.8.1" + resolved "http://localhost:4873/prop-types/-/prop-types-15.8.1.tgz#67d87bf1a694f48435cf332c24af10214a3140b5" + integrity sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg== + dependencies: + loose-envify "^1.4.0" + object-assign "^4.1.1" + react-is "^16.13.1" + +proxy-addr@~2.0.7: + version "2.0.7" + resolved "http://localhost:4873/proxy-addr/-/proxy-addr-2.0.7.tgz#f19fe69ceab311eeb94b42e70e8c2070f9ba1025" + integrity sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg== + dependencies: + forwarded "0.2.0" + ipaddr.js "1.9.1" + +psl@^1.1.33: + version "1.9.0" + resolved "http://localhost:4873/psl/-/psl-1.9.0.tgz#d0df2a137f00794565fcaf3b2c00cd09f8d5a5a7" + integrity sha512-E/ZsdU4HLs/68gYzgGTkMicWTLPdAftJLfJFlLUAAKZGkStNU72sZjT66SnMDVOfOWY/YAoiD7Jxa9iHvngcag== + +punycode@^2.1.0, punycode@^2.1.1: + version "2.1.1" + resolved "http://localhost:4873/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" + integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== + +q@^1.1.2: + version "1.5.1" + resolved "http://localhost:4873/q/-/q-1.5.1.tgz#7e32f75b41381291d04611f1bf14109ac00651d7" + integrity sha512-kV/CThkXo6xyFEZUugw/+pIOywXcDbFYgSct5cT3gqlbkBE1SJdwy6UQoZvodiWF/ckQLZyDE/Bu1M6gVu5lVw== + +qs@6.10.3: + version "6.10.3" + resolved "http://localhost:4873/qs/-/qs-6.10.3.tgz#d6cde1b2ffca87b5aa57889816c5f81535e22e8e" + integrity sha512-wr7M2E0OFRfIfJZjKGieI8lBKb7fRCH4Fv5KNPEs7gJ8jadvotdsS08PzOKR7opXhZ/Xkjtt3WF9g38drmyRqQ== + dependencies: + side-channel "^1.0.4" + +querystringify@^2.1.1: + version "2.2.0" + resolved "http://localhost:4873/querystringify/-/querystringify-2.2.0.tgz#3345941b4153cb9d082d8eee4cda2016a9aef7f6" + integrity sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ== + +queue-microtask@^1.2.2: + version "1.2.3" + resolved "http://localhost:4873/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" + integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== + +quick-lru@^5.1.1: + version "5.1.1" + resolved "http://localhost:4873/quick-lru/-/quick-lru-5.1.1.tgz#366493e6b3e42a3a6885e2e99d18f80fb7a8c932" + integrity sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA== + +raf@^3.4.1: + version "3.4.1" + resolved "http://localhost:4873/raf/-/raf-3.4.1.tgz#0742e99a4a6552f445d73e3ee0328af0ff1ede39" + integrity sha512-Sq4CW4QhwOHE8ucn6J34MqtZCeWFP2aQSmrlroYgqAV1PjStIhJXxYuTgUIfkEk7zTLjmIjLmU5q+fbD1NnOJA== + dependencies: + performance-now "^2.1.0" + +randombytes@^2.1.0: + version "2.1.0" + resolved "http://localhost:4873/randombytes/-/randombytes-2.1.0.tgz#df6f84372f0270dc65cdf6291349ab7a473d4f2a" + integrity sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ== + dependencies: + safe-buffer "^5.1.0" + +range-parser@^1.2.1, range-parser@~1.2.1: + version "1.2.1" + resolved "http://localhost:4873/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031" + integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== + +raw-body@2.5.1: + version "2.5.1" + resolved "http://localhost:4873/raw-body/-/raw-body-2.5.1.tgz#fe1b1628b181b700215e5fd42389f98b71392857" + integrity sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig== + dependencies: + bytes "3.1.2" + http-errors "2.0.0" + iconv-lite "0.4.24" + unpipe "1.0.0" + +react-app-polyfill@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/react-app-polyfill/-/react-app-polyfill-3.0.0.tgz#95221e0a9bd259e5ca6b177c7bb1cb6768f68fd7" + integrity sha512-sZ41cxiU5llIB003yxxQBYrARBqe0repqPTTYBTmMqTz9szeBbE37BehCE891NZsmdZqqP+xWKdT3eo3vOzN8w== + dependencies: + core-js "^3.19.2" + object-assign "^4.1.1" + promise "^8.1.0" + raf "^3.4.1" + regenerator-runtime "^0.13.9" + whatwg-fetch "^3.6.2" + +react-dev-utils@^12.0.1: + version "12.0.1" + resolved "http://localhost:4873/react-dev-utils/-/react-dev-utils-12.0.1.tgz#ba92edb4a1f379bd46ccd6bcd4e7bc398df33e73" + integrity sha512-84Ivxmr17KjUupyqzFode6xKhjwuEJDROWKJy/BthkL7Wn6NJ8h4WE6k/exAv6ImS+0oZLRRW5j/aINMHyeGeQ== + dependencies: + "@babel/code-frame" "^7.16.0" + address "^1.1.2" + browserslist "^4.18.1" + chalk "^4.1.2" + cross-spawn "^7.0.3" + detect-port-alt "^1.1.6" + escape-string-regexp "^4.0.0" + filesize "^8.0.6" + find-up "^5.0.0" + fork-ts-checker-webpack-plugin "^6.5.0" + global-modules "^2.0.0" + globby "^11.0.4" + gzip-size "^6.0.0" + immer "^9.0.7" + is-root "^2.1.0" + loader-utils "^3.2.0" + open "^8.4.0" + pkg-up "^3.1.0" + prompts "^2.4.2" + react-error-overlay "^6.0.11" + recursive-readdir "^2.2.2" + shell-quote "^1.7.3" + strip-ansi "^6.0.1" + text-table "^0.2.0" + +react-dom@^18.2.0: + version "18.2.0" + resolved "http://localhost:4873/react-dom/-/react-dom-18.2.0.tgz#22aaf38708db2674ed9ada224ca4aa708d821e3d" + integrity sha512-6IMTriUmvsjHUjNtEDudZfuDQUoWXVxKHhlEGSk81n4YFS+r/Kl99wXiwlVXtPBtJenozv2P+hxDsw9eA7Xo6g== + dependencies: + loose-envify "^1.1.0" + scheduler "^0.23.0" + +react-error-overlay@^6.0.11: + version "6.0.11" + resolved "http://localhost:4873/react-error-overlay/-/react-error-overlay-6.0.11.tgz#92835de5841c5cf08ba00ddd2d677b6d17ff9adb" + integrity sha512-/6UZ2qgEyH2aqzYZgQPxEnz33NJ2gNsnHA2o5+o4wW9bLM/JYQitNP9xPhsXwC08hMMovfGe/8retsdDsczPRg== + +react-is@^16.13.1: + version "16.13.1" + resolved "http://localhost:4873/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4" + integrity sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ== + +react-is@^17.0.1: + version "17.0.2" + resolved "http://localhost:4873/react-is/-/react-is-17.0.2.tgz#e691d4a8e9c789365655539ab372762b0efb54f0" + integrity sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w== + +react-is@^18.0.0: + version "18.2.0" + resolved "http://localhost:4873/react-is/-/react-is-18.2.0.tgz#199431eeaaa2e09f86427efbb4f1473edb47609b" + integrity sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w== + +react-refresh@^0.11.0: + version "0.11.0" + resolved "http://localhost:4873/react-refresh/-/react-refresh-0.11.0.tgz#77198b944733f0f1f1a90e791de4541f9f074046" + integrity sha512-F27qZr8uUqwhWZboondsPx8tnC3Ct3SxZA3V5WyEvujRyyNv0VYPhoBg1gZ8/MV5tubQp76Trw8lTv9hzRBa+A== + +react-scripts@5.0.1: + version "5.0.1" + resolved "http://localhost:4873/react-scripts/-/react-scripts-5.0.1.tgz#6285dbd65a8ba6e49ca8d651ce30645a6d980003" + integrity sha512-8VAmEm/ZAwQzJ+GOMLbBsTdDKOpuZh7RPs0UymvBR2vRk4iZWCskjbFnxqjrzoIvlNNRZ3QJFx6/qDSi6zSnaQ== + dependencies: + "@babel/core" "^7.16.0" + "@pmmmwh/react-refresh-webpack-plugin" "^0.5.3" + "@svgr/webpack" "^5.5.0" + babel-jest "^27.4.2" + babel-loader "^8.2.3" + babel-plugin-named-asset-import "^0.3.8" + babel-preset-react-app "^10.0.1" + bfj "^7.0.2" + browserslist "^4.18.1" + camelcase "^6.2.1" + case-sensitive-paths-webpack-plugin "^2.4.0" + css-loader "^6.5.1" + css-minimizer-webpack-plugin "^3.2.0" + dotenv "^10.0.0" + dotenv-expand "^5.1.0" + eslint "^8.3.0" + eslint-config-react-app "^7.0.1" + eslint-webpack-plugin "^3.1.1" + file-loader "^6.2.0" + fs-extra "^10.0.0" + html-webpack-plugin "^5.5.0" + identity-obj-proxy "^3.0.0" + jest "^27.4.3" + jest-resolve "^27.4.2" + jest-watch-typeahead "^1.0.0" + mini-css-extract-plugin "^2.4.5" + postcss "^8.4.4" + postcss-flexbugs-fixes "^5.0.2" + postcss-loader "^6.2.1" + postcss-normalize "^10.0.1" + postcss-preset-env "^7.0.1" + prompts "^2.4.2" + react-app-polyfill "^3.0.0" + react-dev-utils "^12.0.1" + react-refresh "^0.11.0" + resolve "^1.20.0" + resolve-url-loader "^4.0.0" + sass-loader "^12.3.0" + semver "^7.3.5" + source-map-loader "^3.0.0" + style-loader "^3.3.1" + tailwindcss "^3.0.2" + terser-webpack-plugin "^5.2.5" + webpack "^5.64.4" + webpack-dev-server "^4.6.0" + webpack-manifest-plugin "^4.0.2" + workbox-webpack-plugin "^6.4.1" + optionalDependencies: + fsevents "^2.3.2" + +react@^18.2.0: + version "18.2.0" + resolved "http://localhost:4873/react/-/react-18.2.0.tgz#555bd98592883255fa00de14f1151a917b5d77d5" + integrity sha512-/3IjMdb2L9QbBdWiW5e3P2/npwMBaU9mHCSCUzNln0ZCYbcfTsGbTJrU/kGemdH2IWmB2ioZ+zkxtmq6g09fGQ== + dependencies: + loose-envify "^1.1.0" + +read-cache@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/read-cache/-/read-cache-1.0.0.tgz#e664ef31161166c9751cdbe8dbcf86b5fb58f774" + integrity sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA== + dependencies: + pify "^2.3.0" + +readable-stream@^2.0.1: + version "2.3.7" + resolved "http://localhost:4873/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57" + integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw== + dependencies: + core-util-is "~1.0.0" + inherits "~2.0.3" + isarray "~1.0.0" + process-nextick-args "~2.0.0" + safe-buffer "~5.1.1" + string_decoder "~1.1.1" + util-deprecate "~1.0.1" + +readable-stream@^3.0.6: + version "3.6.0" + resolved "http://localhost:4873/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198" + integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA== + dependencies: + inherits "^2.0.3" + string_decoder "^1.1.1" + util-deprecate "^1.0.1" + +readdirp@~3.6.0: + version "3.6.0" + resolved "http://localhost:4873/readdirp/-/readdirp-3.6.0.tgz#74a370bd857116e245b29cc97340cd431a02a6c7" + integrity sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA== + dependencies: + picomatch "^2.2.1" + +recursive-readdir@^2.2.2: + version "2.2.2" + resolved "http://localhost:4873/recursive-readdir/-/recursive-readdir-2.2.2.tgz#9946fb3274e1628de6e36b2f6714953b4845094f" + integrity sha512-nRCcW9Sj7NuZwa2XvH9co8NPeXUBhZP7CRKJtU+cS6PW9FpCIFoI5ib0NT1ZrbNuPoRy0ylyCaUL8Gih4LSyFg== + dependencies: + minimatch "3.0.4" + +redent@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/redent/-/redent-3.0.0.tgz#e557b7998316bb53c9f1f56fa626352c6963059f" + integrity sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg== + dependencies: + indent-string "^4.0.0" + strip-indent "^3.0.0" + +regenerate-unicode-properties@^10.1.0: + version "10.1.0" + resolved "http://localhost:4873/regenerate-unicode-properties/-/regenerate-unicode-properties-10.1.0.tgz#7c3192cab6dd24e21cb4461e5ddd7dd24fa8374c" + integrity sha512-d1VudCLoIGitcU/hEg2QqvyGZQmdC0Lf8BqdOMXGFSvJP4bNV1+XqbPQeHHLD51Jh4QJJ225dlIFvY4Ly6MXmQ== + dependencies: + regenerate "^1.4.2" + +regenerate@^1.4.2: + version "1.4.2" + resolved "http://localhost:4873/regenerate/-/regenerate-1.4.2.tgz#b9346d8827e8f5a32f7ba29637d398b69014848a" + integrity sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A== + +regenerator-runtime@^0.13.4, regenerator-runtime@^0.13.9: + version "0.13.9" + resolved "http://localhost:4873/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz#8925742a98ffd90814988d7566ad30ca3b263b52" + integrity sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA== + +regenerator-transform@^0.15.0: + version "0.15.0" + resolved "http://localhost:4873/regenerator-transform/-/regenerator-transform-0.15.0.tgz#cbd9ead5d77fae1a48d957cf889ad0586adb6537" + integrity sha512-LsrGtPmbYg19bcPHwdtmXwbW+TqNvtY4riE3P83foeHRroMbH6/2ddFBfab3t7kbzc7v7p4wbkIecHImqt0QNg== + dependencies: + "@babel/runtime" "^7.8.4" + +regex-parser@^2.2.11: + version "2.2.11" + resolved "http://localhost:4873/regex-parser/-/regex-parser-2.2.11.tgz#3b37ec9049e19479806e878cabe7c1ca83ccfe58" + integrity sha512-jbD/FT0+9MBU2XAZluI7w2OBs1RBi6p9M83nkoZayQXXU9e8Robt69FcZc7wU4eJD/YFTjn1JdCk3rbMJajz8Q== + +regexp.prototype.flags@^1.4.1, regexp.prototype.flags@^1.4.3: + version "1.4.3" + resolved "http://localhost:4873/regexp.prototype.flags/-/regexp.prototype.flags-1.4.3.tgz#87cab30f80f66660181a3bb7bf5981a872b367ac" + integrity sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + functions-have-names "^1.2.2" + +regexpp@^3.2.0: + version "3.2.0" + resolved "http://localhost:4873/regexpp/-/regexpp-3.2.0.tgz#0425a2768d8f23bad70ca4b90461fa2f1213e1b2" + integrity sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg== + +regexpu-core@^5.1.0: + version "5.2.1" + resolved "http://localhost:4873/regexpu-core/-/regexpu-core-5.2.1.tgz#a69c26f324c1e962e9ffd0b88b055caba8089139" + integrity sha512-HrnlNtpvqP1Xkb28tMhBUO2EbyUHdQlsnlAhzWcwHy8WJR53UWr7/MAvqrsQKMbV4qdpv03oTMG8iIhfsPFktQ== + dependencies: + regenerate "^1.4.2" + regenerate-unicode-properties "^10.1.0" + regjsgen "^0.7.1" + regjsparser "^0.9.1" + unicode-match-property-ecmascript "^2.0.0" + unicode-match-property-value-ecmascript "^2.0.0" + +regjsgen@^0.7.1: + version "0.7.1" + resolved "http://localhost:4873/regjsgen/-/regjsgen-0.7.1.tgz#ee5ef30e18d3f09b7c369b76e7c2373ed25546f6" + integrity sha512-RAt+8H2ZEzHeYWxZ3H2z6tF18zyyOnlcdaafLrm21Bguj7uZy6ULibiAFdXEtKQY4Sy7wDTwDiOazasMLc4KPA== + +regjsparser@^0.9.1: + version "0.9.1" + resolved "http://localhost:4873/regjsparser/-/regjsparser-0.9.1.tgz#272d05aa10c7c1f67095b1ff0addae8442fc5709" + integrity sha512-dQUtn90WanSNl+7mQKcXAgZxvUe7Z0SqXlgzv0za4LwiUhyzBC58yQO3liFoUgu8GiJVInAhJjkj1N0EtQ5nkQ== + dependencies: + jsesc "~0.5.0" + +relateurl@^0.2.7: + version "0.2.7" + resolved "http://localhost:4873/relateurl/-/relateurl-0.2.7.tgz#54dbf377e51440aca90a4cd274600d3ff2d888a9" + integrity sha512-G08Dxvm4iDN3MLM0EsP62EDV9IuhXPR6blNz6Utcp7zyV3tr4HVNINt6MpaRWbxoOHT3Q7YN2P+jaHX8vUbgog== + +renderkid@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/renderkid/-/renderkid-3.0.0.tgz#5fd823e4d6951d37358ecc9a58b1f06836b6268a" + integrity sha512-q/7VIQA8lmM1hF+jn+sFSPWGlMkSAeNYcPLmDQx2zzuiDfaLrOmumR8iaUKlenFgh0XRPIUeSPlH3A+AW3Z5pg== + dependencies: + css-select "^4.1.3" + dom-converter "^0.2.0" + htmlparser2 "^6.1.0" + lodash "^4.17.21" + strip-ansi "^6.0.1" + +require-directory@^2.1.1: + version "2.1.1" + resolved "http://localhost:4873/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" + integrity sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q== + +require-from-string@^2.0.2: + version "2.0.2" + resolved "http://localhost:4873/require-from-string/-/require-from-string-2.0.2.tgz#89a7fdd938261267318eafe14f9c32e598c36909" + integrity sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw== + +requires-port@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff" + integrity sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ== + +resolve-cwd@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/resolve-cwd/-/resolve-cwd-3.0.0.tgz#0f0075f1bb2544766cf73ba6a6e2adfebcb13f2d" + integrity sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg== + dependencies: + resolve-from "^5.0.0" + +resolve-from@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" + integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g== + +resolve-from@^5.0.0: + version "5.0.0" + resolved "http://localhost:4873/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69" + integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw== + +resolve-url-loader@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/resolve-url-loader/-/resolve-url-loader-4.0.0.tgz#d50d4ddc746bb10468443167acf800dcd6c3ad57" + integrity sha512-05VEMczVREcbtT7Bz+C+96eUO5HDNvdthIiMB34t7FcF8ehcu4wC0sSgPUubs3XW2Q3CNLJk/BJrCU9wVRymiA== + dependencies: + adjust-sourcemap-loader "^4.0.0" + convert-source-map "^1.7.0" + loader-utils "^2.0.0" + postcss "^7.0.35" + source-map "0.6.1" + +resolve.exports@^1.1.0: + version "1.1.0" + resolved "http://localhost:4873/resolve.exports/-/resolve.exports-1.1.0.tgz#5ce842b94b05146c0e03076985d1d0e7e48c90c9" + integrity sha512-J1l+Zxxp4XK3LUDZ9m60LRJF/mAe4z6a4xyabPHk7pvK5t35dACV32iIjJDFeWZFfZlO29w6SZ67knR0tHzJtQ== + +resolve@^1.1.7, resolve@^1.14.2, resolve@^1.19.0, resolve@^1.20.0, resolve@^1.22.0, resolve@^1.22.1: + version "1.22.1" + resolved "http://localhost:4873/resolve/-/resolve-1.22.1.tgz#27cb2ebb53f91abb49470a928bba7558066ac177" + integrity sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw== + dependencies: + is-core-module "^2.9.0" + path-parse "^1.0.7" + supports-preserve-symlinks-flag "^1.0.0" + +resolve@^2.0.0-next.3: + version "2.0.0-next.4" + resolved "http://localhost:4873/resolve/-/resolve-2.0.0-next.4.tgz#3d37a113d6429f496ec4752d2a2e58efb1fd4660" + integrity sha512-iMDbmAWtfU+MHpxt/I5iWI7cY6YVEZUQ3MBgPQ++XD1PELuJHIl82xBmObyP2KyQmkNB2dsqF7seoQQiAn5yDQ== + dependencies: + is-core-module "^2.9.0" + path-parse "^1.0.7" + supports-preserve-symlinks-flag "^1.0.0" + +retry@^0.13.1: + version "0.13.1" + resolved "http://localhost:4873/retry/-/retry-0.13.1.tgz#185b1587acf67919d63b357349e03537b2484658" + integrity sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg== + +reusify@^1.0.4: + version "1.0.4" + resolved "http://localhost:4873/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" + integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== + +rimraf@^3.0.0, rimraf@^3.0.2: + version "3.0.2" + resolved "http://localhost:4873/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" + integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== + dependencies: + glob "^7.1.3" + +rollup-plugin-terser@^7.0.0: + version "7.0.2" + resolved "http://localhost:4873/rollup-plugin-terser/-/rollup-plugin-terser-7.0.2.tgz#e8fbba4869981b2dc35ae7e8a502d5c6c04d324d" + integrity sha512-w3iIaU4OxcF52UUXiZNsNeuXIMDvFrr+ZXK6bFZ0Q60qyVfq4uLptoS4bbq3paG3x216eQllFZX7zt6TIImguQ== + dependencies: + "@babel/code-frame" "^7.10.4" + jest-worker "^26.2.1" + serialize-javascript "^4.0.0" + terser "^5.0.0" + +rollup@^2.43.1: + version "2.79.1" + resolved "http://localhost:4873/rollup/-/rollup-2.79.1.tgz#bedee8faef7c9f93a2647ac0108748f497f081c7" + integrity sha512-uKxbd0IhMZOhjAiD5oAFp7BqvkA4Dv47qpOCtaNvng4HBwdbWtdOh8f5nZNuk2rp51PMGk3bzfWu5oayNEuYnw== + optionalDependencies: + fsevents "~2.3.2" + +run-parallel@^1.1.9: + version "1.2.0" + resolved "http://localhost:4873/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee" + integrity sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA== + dependencies: + queue-microtask "^1.2.2" + +safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: + version "5.1.2" + resolved "http://localhost:4873/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" + integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== + +safe-buffer@5.2.1, safe-buffer@>=5.1.0, safe-buffer@^5.1.0, safe-buffer@~5.2.0: + version "5.2.1" + resolved "http://localhost:4873/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" + integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== + +safe-regex-test@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/safe-regex-test/-/safe-regex-test-1.0.0.tgz#793b874d524eb3640d1873aad03596db2d4f2295" + integrity sha512-JBUUzyOgEwXQY1NuPtvcj/qcBDbDmEvWufhlnXZIm75DEHp+afM1r1ujJpJsV/gSM4t59tpDyPi1sd6ZaPFfsA== + dependencies: + call-bind "^1.0.2" + get-intrinsic "^1.1.3" + is-regex "^1.1.4" + +"safer-buffer@>= 2.1.2 < 3", "safer-buffer@>= 2.1.2 < 3.0.0": + version "2.1.2" + resolved "http://localhost:4873/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" + integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== + +sanitize.css@*: + version "13.0.0" + resolved "http://localhost:4873/sanitize.css/-/sanitize.css-13.0.0.tgz#2675553974b27964c75562ade3bd85d79879f173" + integrity sha512-ZRwKbh/eQ6w9vmTjkuG0Ioi3HBwPFce0O+v//ve+aOq1oeCy7jMV2qzzAlpsNuqpqCBjjriM1lbtZbF/Q8jVyA== + +sass-loader@^12.3.0: + version "12.6.0" + resolved "http://localhost:4873/sass-loader/-/sass-loader-12.6.0.tgz#5148362c8e2cdd4b950f3c63ac5d16dbfed37bcb" + integrity sha512-oLTaH0YCtX4cfnJZxKSLAyglED0naiYfNG1iXfU5w1LNZ+ukoA5DtyDIN5zmKVZwYNJP4KRc5Y3hkWga+7tYfA== + dependencies: + klona "^2.0.4" + neo-async "^2.6.2" + +sax@~1.2.4: + version "1.2.4" + resolved "http://localhost:4873/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" + integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw== + +saxes@^5.0.1: + version "5.0.1" + resolved "http://localhost:4873/saxes/-/saxes-5.0.1.tgz#eebab953fa3b7608dbe94e5dadb15c888fa6696d" + integrity sha512-5LBh1Tls8c9xgGjw3QrMwETmTMVk0oFgvrFSvWx62llR2hcEInrKNZ2GZCCuuy2lvWrdl5jhbpeqc5hRYKFOcw== + dependencies: + xmlchars "^2.2.0" + +scheduler@^0.23.0: + version "0.23.0" + resolved "http://localhost:4873/scheduler/-/scheduler-0.23.0.tgz#ba8041afc3d30eb206a487b6b384002e4e61fdfe" + integrity sha512-CtuThmgHNg7zIZWAXi3AsyIzA3n4xx7aNyjwC2VJldO2LMVDhFK+63xGqq6CsJH4rTAt6/M+N4GhZiDYPx9eUw== + dependencies: + loose-envify "^1.1.0" + +schema-utils@2.7.0: + version "2.7.0" + resolved "http://localhost:4873/schema-utils/-/schema-utils-2.7.0.tgz#17151f76d8eae67fbbf77960c33c676ad9f4efc7" + integrity sha512-0ilKFI6QQF5nxDZLFn2dMjvc4hjg/Wkg7rHd3jK6/A4a1Hl9VFdQWvgB1UMGoU94pad1P/8N7fMcEnLnSiju8A== + dependencies: + "@types/json-schema" "^7.0.4" + ajv "^6.12.2" + ajv-keywords "^3.4.1" + +schema-utils@^2.6.5: + version "2.7.1" + resolved "http://localhost:4873/schema-utils/-/schema-utils-2.7.1.tgz#1ca4f32d1b24c590c203b8e7a50bf0ea4cd394d7" + integrity sha512-SHiNtMOUGWBQJwzISiVYKu82GiV4QYGePp3odlY1tuKO7gPtphAT5R/py0fA6xtbgLL/RvtJZnU9b8s0F1q0Xg== + dependencies: + "@types/json-schema" "^7.0.5" + ajv "^6.12.4" + ajv-keywords "^3.5.2" + +schema-utils@^3.0.0, schema-utils@^3.1.0, schema-utils@^3.1.1: + version "3.1.1" + resolved "http://localhost:4873/schema-utils/-/schema-utils-3.1.1.tgz#bc74c4b6b6995c1d88f76a8b77bea7219e0c8281" + integrity sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw== + dependencies: + "@types/json-schema" "^7.0.8" + ajv "^6.12.5" + ajv-keywords "^3.5.2" + +schema-utils@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/schema-utils/-/schema-utils-4.0.0.tgz#60331e9e3ae78ec5d16353c467c34b3a0a1d3df7" + integrity sha512-1edyXKgh6XnJsJSQ8mKWXnN/BVaIbFMLpouRUrXgVq7WYne5kw3MW7UPhO44uRXQSIpTSXoJbmrR2X0w9kUTyg== + dependencies: + "@types/json-schema" "^7.0.9" + ajv "^8.8.0" + ajv-formats "^2.1.1" + ajv-keywords "^5.0.0" + +select-hose@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/select-hose/-/select-hose-2.0.0.tgz#625d8658f865af43ec962bfc376a37359a4994ca" + integrity sha512-mEugaLK+YfkijB4fx0e6kImuJdCIt2LxCRcbEYPqRGCs4F2ogyfZU5IAZRdjCP8JPq2AtdNoC/Dux63d9Kiryg== + +selfsigned@^2.1.1: + version "2.1.1" + resolved "http://localhost:4873/selfsigned/-/selfsigned-2.1.1.tgz#18a7613d714c0cd3385c48af0075abf3f266af61" + integrity sha512-GSL3aowiF7wa/WtSFwnUrludWFoNhftq8bUkH9pkzjpN2XSPOAYEgg6e0sS9s0rZwgJzJiQRPU18A6clnoW5wQ== + dependencies: + node-forge "^1" + +semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.3.0: + version "6.3.0" + resolved "http://localhost:4873/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" + integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== + +semver@^7.3.2, semver@^7.3.5, semver@^7.3.7: + version "7.3.7" + resolved "http://localhost:4873/semver/-/semver-7.3.7.tgz#12c5b649afdbf9049707796e22a4028814ce523f" + integrity sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g== + dependencies: + lru-cache "^6.0.0" + +send@0.18.0: + version "0.18.0" + resolved "http://localhost:4873/send/-/send-0.18.0.tgz#670167cc654b05f5aa4a767f9113bb371bc706be" + integrity sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg== + dependencies: + debug "2.6.9" + depd "2.0.0" + destroy "1.2.0" + encodeurl "~1.0.2" + escape-html "~1.0.3" + etag "~1.8.1" + fresh "0.5.2" + http-errors "2.0.0" + mime "1.6.0" + ms "2.1.3" + on-finished "2.4.1" + range-parser "~1.2.1" + statuses "2.0.1" + +serialize-javascript@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/serialize-javascript/-/serialize-javascript-4.0.0.tgz#b525e1238489a5ecfc42afacc3fe99e666f4b1aa" + integrity sha512-GaNA54380uFefWghODBWEGisLZFj00nS5ACs6yHa9nLqlLpVLO8ChDGeKRjZnV4Nh4n0Qi7nhYZD/9fCPzEqkw== + dependencies: + randombytes "^2.1.0" + +serialize-javascript@^6.0.0: + version "6.0.0" + resolved "http://localhost:4873/serialize-javascript/-/serialize-javascript-6.0.0.tgz#efae5d88f45d7924141da8b5c3a7a7e663fefeb8" + integrity sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag== + dependencies: + randombytes "^2.1.0" + +serve-index@^1.9.1: + version "1.9.1" + resolved "http://localhost:4873/serve-index/-/serve-index-1.9.1.tgz#d3768d69b1e7d82e5ce050fff5b453bea12a9239" + integrity sha512-pXHfKNP4qujrtteMrSBb0rc8HJ9Ms/GrXwcUtUtD5s4ewDJI8bT3Cz2zTVRMKtri49pLx2e0Ya8ziP5Ya2pZZw== + dependencies: + accepts "~1.3.4" + batch "0.6.1" + debug "2.6.9" + escape-html "~1.0.3" + http-errors "~1.6.2" + mime-types "~2.1.17" + parseurl "~1.3.2" + +serve-static@1.15.0: + version "1.15.0" + resolved "http://localhost:4873/serve-static/-/serve-static-1.15.0.tgz#faaef08cffe0a1a62f60cad0c4e513cff0ac9540" + integrity sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g== + dependencies: + encodeurl "~1.0.2" + escape-html "~1.0.3" + parseurl "~1.3.3" + send "0.18.0" + +setprototypeof@1.1.0: + version "1.1.0" + resolved "http://localhost:4873/setprototypeof/-/setprototypeof-1.1.0.tgz#d0bd85536887b6fe7c0d818cb962d9d91c54e656" + integrity sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ== + +setprototypeof@1.2.0: + version "1.2.0" + resolved "http://localhost:4873/setprototypeof/-/setprototypeof-1.2.0.tgz#66c9a24a73f9fc28cbe66b09fed3d33dcaf1b424" + integrity sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw== + +shallow-clone@^3.0.0: + version "3.0.1" + resolved "http://localhost:4873/shallow-clone/-/shallow-clone-3.0.1.tgz#8f2981ad92531f55035b01fb230769a40e02efa3" + integrity sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA== + dependencies: + kind-of "^6.0.2" + +shebang-command@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" + integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA== + dependencies: + shebang-regex "^3.0.0" + +shebang-regex@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" + integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== + +shell-quote@^1.7.3: + version "1.7.3" + resolved "http://localhost:4873/shell-quote/-/shell-quote-1.7.3.tgz#aa40edac170445b9a431e17bb62c0b881b9c4123" + integrity sha512-Vpfqwm4EnqGdlsBFNmHhxhElJYrdfcxPThu+ryKS5J8L/fhAwLazFZtq+S+TWZ9ANj2piSQLGj6NQg+lKPmxrw== + +side-channel@^1.0.4: + version "1.0.4" + resolved "http://localhost:4873/side-channel/-/side-channel-1.0.4.tgz#efce5c8fdc104ee751b25c58d4290011fa5ea2cf" + integrity sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw== + dependencies: + call-bind "^1.0.0" + get-intrinsic "^1.0.2" + object-inspect "^1.9.0" + +signal-exit@^3.0.2, signal-exit@^3.0.3: + version "3.0.7" + resolved "http://localhost:4873/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9" + integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ== + +sisteransi@^1.0.5: + version "1.0.5" + resolved "http://localhost:4873/sisteransi/-/sisteransi-1.0.5.tgz#134d681297756437cc05ca01370d3a7a571075ed" + integrity sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg== + +slash@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" + integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== + +slash@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/slash/-/slash-4.0.0.tgz#2422372176c4c6c5addb5e2ada885af984b396a7" + integrity sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew== + +sockjs@^0.3.24: + version "0.3.24" + resolved "http://localhost:4873/sockjs/-/sockjs-0.3.24.tgz#c9bc8995f33a111bea0395ec30aa3206bdb5ccce" + integrity sha512-GJgLTZ7vYb/JtPSSZ10hsOYIvEYsjbNU+zPdIHcUaWVNUEPivzxku31865sSSud0Da0W4lEeOPlmw93zLQchuQ== + dependencies: + faye-websocket "^0.11.3" + uuid "^8.3.2" + websocket-driver "^0.7.4" + +source-list-map@^2.0.0, source-list-map@^2.0.1: + version "2.0.1" + resolved "http://localhost:4873/source-list-map/-/source-list-map-2.0.1.tgz#3993bd873bfc48479cca9ea3a547835c7c154b34" + integrity sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw== + +source-map-js@^1.0.1, source-map-js@^1.0.2: + version "1.0.2" + resolved "http://localhost:4873/source-map-js/-/source-map-js-1.0.2.tgz#adbc361d9c62df380125e7f161f71c826f1e490c" + integrity sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw== + +source-map-loader@^3.0.0: + version "3.0.1" + resolved "http://localhost:4873/source-map-loader/-/source-map-loader-3.0.1.tgz#9ae5edc7c2d42570934be4c95d1ccc6352eba52d" + integrity sha512-Vp1UsfyPvgujKQzi4pyDiTOnE3E4H+yHvkVRN3c/9PJmQS4CQJExvcDvaX/D+RV+xQben9HJ56jMJS3CgUeWyA== + dependencies: + abab "^2.0.5" + iconv-lite "^0.6.3" + source-map-js "^1.0.1" + +source-map-support@^0.5.6, source-map-support@~0.5.20: + version "0.5.21" + resolved "http://localhost:4873/source-map-support/-/source-map-support-0.5.21.tgz#04fe7c7f9e1ed2d662233c28cb2b35b9f63f6e4f" + integrity sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w== + dependencies: + buffer-from "^1.0.0" + source-map "^0.6.0" + +source-map@0.6.1, source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.0, source-map@~0.6.1: + version "0.6.1" + resolved "http://localhost:4873/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" + integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== + +source-map@^0.7.3: + version "0.7.4" + resolved "http://localhost:4873/source-map/-/source-map-0.7.4.tgz#a9bbe705c9d8846f4e08ff6765acf0f1b0898656" + integrity sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA== + +source-map@^0.8.0-beta.0: + version "0.8.0-beta.0" + resolved "http://localhost:4873/source-map/-/source-map-0.8.0-beta.0.tgz#d4c1bb42c3f7ee925f005927ba10709e0d1d1f11" + integrity sha512-2ymg6oRBpebeZi9UUNsgQ89bhx01TcTkmNTGnNO88imTmbSgy4nfujrgVEFKWpMTEGA11EDkTt7mqObTPdigIA== + dependencies: + whatwg-url "^7.0.0" + +sourcemap-codec@^1.4.8: + version "1.4.8" + resolved "http://localhost:4873/sourcemap-codec/-/sourcemap-codec-1.4.8.tgz#ea804bd94857402e6992d05a38ef1ae35a9ab4c4" + integrity sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA== + +spdy-transport@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/spdy-transport/-/spdy-transport-3.0.0.tgz#00d4863a6400ad75df93361a1608605e5dcdcf31" + integrity sha512-hsLVFE5SjA6TCisWeJXFKniGGOpBgMLmerfO2aCyCU5s7nJ/rpAepqmFifv/GCbSbueEeAJJnmSQ2rKC/g8Fcw== + dependencies: + debug "^4.1.0" + detect-node "^2.0.4" + hpack.js "^2.1.6" + obuf "^1.1.2" + readable-stream "^3.0.6" + wbuf "^1.7.3" + +spdy@^4.0.2: + version "4.0.2" + resolved "http://localhost:4873/spdy/-/spdy-4.0.2.tgz#b74f466203a3eda452c02492b91fb9e84a27677b" + integrity sha512-r46gZQZQV+Kl9oItvl1JZZqJKGr+oEkB08A6BzkiR7593/7IbtuncXHd2YoYeTsG4157ZssMu9KYvUHLcjcDoA== + dependencies: + debug "^4.1.0" + handle-thing "^2.0.0" + http-deceiver "^1.2.7" + select-hose "^2.0.0" + spdy-transport "^3.0.0" + +sprintf-js@~1.0.2: + version "1.0.3" + resolved "http://localhost:4873/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" + integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g== + +stable@^0.1.8: + version "0.1.8" + resolved "http://localhost:4873/stable/-/stable-0.1.8.tgz#836eb3c8382fe2936feaf544631017ce7d47a3cf" + integrity sha512-ji9qxRnOVfcuLDySj9qzhGSEFVobyt1kIOSkj1qZzYLzq7Tos/oUUWvotUPQLlrsidqsK6tBH89Bc9kL5zHA6w== + +stack-utils@^2.0.3: + version "2.0.5" + resolved "http://localhost:4873/stack-utils/-/stack-utils-2.0.5.tgz#d25265fca995154659dbbfba3b49254778d2fdd5" + integrity sha512-xrQcmYhOsn/1kX+Vraq+7j4oE2j/6BFscZ0etmYg81xuM8Gq0022Pxb8+IqgOFUIaxHs0KaSb7T1+OegiNrNFA== + dependencies: + escape-string-regexp "^2.0.0" + +stackframe@^1.3.4: + version "1.3.4" + resolved "http://localhost:4873/stackframe/-/stackframe-1.3.4.tgz#b881a004c8c149a5e8efef37d51b16e412943310" + integrity sha512-oeVtt7eWQS+Na6F//S4kJ2K2VbRlS9D43mAlMyVpVWovy9o+jfgH8O9agzANzaiLjclA0oYzUXEM4PurhSUChw== + +statuses@2.0.1: + version "2.0.1" + resolved "http://localhost:4873/statuses/-/statuses-2.0.1.tgz#55cb000ccf1d48728bd23c685a063998cf1a1b63" + integrity sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ== + +"statuses@>= 1.4.0 < 2": + version "1.5.0" + resolved "http://localhost:4873/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c" + integrity sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA== + +string-length@^4.0.1: + version "4.0.2" + resolved "http://localhost:4873/string-length/-/string-length-4.0.2.tgz#a8a8dc7bd5c1a82b9b3c8b87e125f66871b6e57a" + integrity sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ== + dependencies: + char-regex "^1.0.2" + strip-ansi "^6.0.0" + +string-length@^5.0.1: + version "5.0.1" + resolved "http://localhost:4873/string-length/-/string-length-5.0.1.tgz#3d647f497b6e8e8d41e422f7e0b23bc536c8381e" + integrity sha512-9Ep08KAMUn0OadnVaBuRdE2l615CQ508kr0XMadjClfYpdCyvrbFp6Taebo8yyxokQ4viUd/xPPUA4FGgUa0ow== + dependencies: + char-regex "^2.0.0" + strip-ansi "^7.0.1" + +string-natural-compare@^3.0.1: + version "3.0.1" + resolved "http://localhost:4873/string-natural-compare/-/string-natural-compare-3.0.1.tgz#7a42d58474454963759e8e8b7ae63d71c1e7fdf4" + integrity sha512-n3sPwynL1nwKi3WJ6AIsClwBMa0zTi54fn2oLU6ndfTSIO05xaznjSf15PcBZU6FNWbmN5Q6cxT4V5hGvB4taw== + +string-width@^4.1.0, string-width@^4.2.0: + version "4.2.3" + resolved "http://localhost:4873/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.1" + +string.prototype.matchall@^4.0.6, string.prototype.matchall@^4.0.7: + version "4.0.7" + resolved "http://localhost:4873/string.prototype.matchall/-/string.prototype.matchall-4.0.7.tgz#8e6ecb0d8a1fb1fda470d81acecb2dba057a481d" + integrity sha512-f48okCX7JiwVi1NXCVWcFnZgADDC/n2vePlQ/KUCNqCikLLilQvwjMO8+BHVKvgzH0JB0J9LEPgxOGT02RoETg== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.1" + get-intrinsic "^1.1.1" + has-symbols "^1.0.3" + internal-slot "^1.0.3" + regexp.prototype.flags "^1.4.1" + side-channel "^1.0.4" + +string.prototype.trimend@^1.0.5: + version "1.0.5" + resolved "http://localhost:4873/string.prototype.trimend/-/string.prototype.trimend-1.0.5.tgz#914a65baaab25fbdd4ee291ca7dde57e869cb8d0" + integrity sha512-I7RGvmjV4pJ7O3kdf+LXFpVfdNOxtCW/2C8f6jNiW4+PQchwxkCDzlk1/7p+Wl4bqFIZeF47qAHXLuHHWKAxog== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.19.5" + +string.prototype.trimstart@^1.0.5: + version "1.0.5" + resolved "http://localhost:4873/string.prototype.trimstart/-/string.prototype.trimstart-1.0.5.tgz#5466d93ba58cfa2134839f81d7f42437e8c01fef" + integrity sha512-THx16TJCGlsN0o6dl2o6ncWUsdgnLRSA23rRE5pyGBw/mLr3Ej/R2LaqCtgP8VNMGZsvMWnf9ooZPyY2bHvUFg== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.19.5" + +string_decoder@^1.1.1: + version "1.3.0" + resolved "http://localhost:4873/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" + integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== + dependencies: + safe-buffer "~5.2.0" + +string_decoder@~1.1.1: + version "1.1.1" + resolved "http://localhost:4873/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" + integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== + dependencies: + safe-buffer "~5.1.0" + +stringify-object@^3.3.0: + version "3.3.0" + resolved "http://localhost:4873/stringify-object/-/stringify-object-3.3.0.tgz#703065aefca19300d3ce88af4f5b3956d7556629" + integrity sha512-rHqiFh1elqCQ9WPLIC8I0Q/g/wj5J1eMkyoiD6eoQApWHP0FtlK7rqnhmabL5VUY9JQCcqwwvlOaSuutekgyrw== + dependencies: + get-own-enumerable-property-symbols "^3.0.0" + is-obj "^1.0.1" + is-regexp "^1.0.0" + +strip-ansi@^6.0.0, strip-ansi@^6.0.1: + version "6.0.1" + resolved "http://localhost:4873/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" + integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== + dependencies: + ansi-regex "^5.0.1" + +strip-ansi@^7.0.1: + version "7.0.1" + resolved "http://localhost:4873/strip-ansi/-/strip-ansi-7.0.1.tgz#61740a08ce36b61e50e65653f07060d000975fb2" + integrity sha512-cXNxvT8dFNRVfhVME3JAe98mkXDYN2O1l7jmcwMnOslDeESg1rF/OZMtK0nRAhiari1unG5cD4jG3rapUAkLbw== + dependencies: + ansi-regex "^6.0.1" + +strip-bom@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" + integrity sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA== + +strip-bom@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/strip-bom/-/strip-bom-4.0.0.tgz#9c3505c1db45bcedca3d9cf7a16f5c5aa3901878" + integrity sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w== + +strip-comments@^2.0.1: + version "2.0.1" + resolved "http://localhost:4873/strip-comments/-/strip-comments-2.0.1.tgz#4ad11c3fbcac177a67a40ac224ca339ca1c1ba9b" + integrity sha512-ZprKx+bBLXv067WTCALv8SSz5l2+XhpYCsVtSqlMnkAXMWDq+/ekVbl1ghqP9rUHTzv6sm/DwCOiYutU/yp1fw== + +strip-final-newline@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/strip-final-newline/-/strip-final-newline-2.0.0.tgz#89b852fb2fcbe936f6f4b3187afb0a12c1ab58ad" + integrity sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA== + +strip-indent@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/strip-indent/-/strip-indent-3.0.0.tgz#c32e1cee940b6b3432c771bc2c54bcce73cd3001" + integrity sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ== + dependencies: + min-indent "^1.0.0" + +strip-json-comments@^3.1.0, strip-json-comments@^3.1.1: + version "3.1.1" + resolved "http://localhost:4873/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" + integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== + +style-loader@^3.3.1: + version "3.3.1" + resolved "http://localhost:4873/style-loader/-/style-loader-3.3.1.tgz#057dfa6b3d4d7c7064462830f9113ed417d38575" + integrity sha512-GPcQ+LDJbrcxHORTRes6Jy2sfvK2kS6hpSfI/fXhPt+spVzxF6LJ1dHLN9zIGmVaaP044YKaIatFaufENRiDoQ== + +stylehacks@^5.1.0: + version "5.1.0" + resolved "http://localhost:4873/stylehacks/-/stylehacks-5.1.0.tgz#a40066490ca0caca04e96c6b02153ddc39913520" + integrity sha512-SzLmvHQTrIWfSgljkQCw2++C9+Ne91d/6Sp92I8c5uHTcy/PgeHamwITIbBW9wnFTY/3ZfSXR9HIL6Ikqmcu6Q== + dependencies: + browserslist "^4.16.6" + postcss-selector-parser "^6.0.4" + +supports-color@^5.3.0: + version "5.5.0" + resolved "http://localhost:4873/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" + integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== + dependencies: + has-flag "^3.0.0" + +supports-color@^7.0.0, supports-color@^7.1.0: + version "7.2.0" + resolved "http://localhost:4873/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" + integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== + dependencies: + has-flag "^4.0.0" + +supports-color@^8.0.0: + version "8.1.1" + resolved "http://localhost:4873/supports-color/-/supports-color-8.1.1.tgz#cd6fc17e28500cff56c1b86c0a7fd4a54a73005c" + integrity sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q== + dependencies: + has-flag "^4.0.0" + +supports-hyperlinks@^2.0.0: + version "2.3.0" + resolved "http://localhost:4873/supports-hyperlinks/-/supports-hyperlinks-2.3.0.tgz#3943544347c1ff90b15effb03fc14ae45ec10624" + integrity sha512-RpsAZlpWcDwOPQA22aCH4J0t7L8JmAvsCxfOSEwm7cQs3LshN36QaTkwd70DnBOXDWGssw2eUoc8CaRWT0XunA== + dependencies: + has-flag "^4.0.0" + supports-color "^7.0.0" + +supports-preserve-symlinks-flag@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" + integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== + +svg-parser@^2.0.2: + version "2.0.4" + resolved "http://localhost:4873/svg-parser/-/svg-parser-2.0.4.tgz#fdc2e29e13951736140b76cb122c8ee6630eb6b5" + integrity sha512-e4hG1hRwoOdRb37cIMSgzNsxyzKfayW6VOflrwvR+/bzrkyxY/31WkbgnQpgtrNp1SdpJvpUAGTa/ZoiPNDuRQ== + +svgo@^1.2.2: + version "1.3.2" + resolved "http://localhost:4873/svgo/-/svgo-1.3.2.tgz#b6dc511c063346c9e415b81e43401145b96d4167" + integrity sha512-yhy/sQYxR5BkC98CY7o31VGsg014AKLEPxdfhora76l36hD9Rdy5NZA/Ocn6yayNPgSamYdtX2rFJdcv07AYVw== + dependencies: + chalk "^2.4.1" + coa "^2.0.2" + css-select "^2.0.0" + css-select-base-adapter "^0.1.1" + css-tree "1.0.0-alpha.37" + csso "^4.0.2" + js-yaml "^3.13.1" + mkdirp "~0.5.1" + object.values "^1.1.0" + sax "~1.2.4" + stable "^0.1.8" + unquote "~1.1.1" + util.promisify "~1.0.0" + +svgo@^2.7.0: + version "2.8.0" + resolved "http://localhost:4873/svgo/-/svgo-2.8.0.tgz#4ff80cce6710dc2795f0c7c74101e6764cfccd24" + integrity sha512-+N/Q9kV1+F+UeWYoSiULYo4xYSDQlTgb+ayMobAXPwMnLvop7oxKMo9OzIrX5x3eS4L4f2UHhc9axXwY8DpChg== + dependencies: + "@trysound/sax" "0.2.0" + commander "^7.2.0" + css-select "^4.1.3" + css-tree "^1.1.3" + csso "^4.2.0" + picocolors "^1.0.0" + stable "^0.1.8" + +symbol-tree@^3.2.4: + version "3.2.4" + resolved "http://localhost:4873/symbol-tree/-/symbol-tree-3.2.4.tgz#430637d248ba77e078883951fb9aa0eed7c63fa2" + integrity sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw== + +tailwindcss@^3.0.2: + version "3.1.8" + resolved "http://localhost:4873/tailwindcss/-/tailwindcss-3.1.8.tgz#4f8520550d67a835d32f2f4021580f9fddb7b741" + integrity sha512-YSneUCZSFDYMwk+TGq8qYFdCA3yfBRdBlS7txSq0LUmzyeqRe3a8fBQzbz9M3WS/iFT4BNf/nmw9mEzrnSaC0g== + dependencies: + arg "^5.0.2" + chokidar "^3.5.3" + color-name "^1.1.4" + detective "^5.2.1" + didyoumean "^1.2.2" + dlv "^1.1.3" + fast-glob "^3.2.11" + glob-parent "^6.0.2" + is-glob "^4.0.3" + lilconfig "^2.0.6" + normalize-path "^3.0.0" + object-hash "^3.0.0" + picocolors "^1.0.0" + postcss "^8.4.14" + postcss-import "^14.1.0" + postcss-js "^4.0.0" + postcss-load-config "^3.1.4" + postcss-nested "5.0.6" + postcss-selector-parser "^6.0.10" + postcss-value-parser "^4.2.0" + quick-lru "^5.1.1" + resolve "^1.22.1" + +tapable@^1.0.0: + version "1.1.3" + resolved "http://localhost:4873/tapable/-/tapable-1.1.3.tgz#a1fccc06b58db61fd7a45da2da44f5f3a3e67ba2" + integrity sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA== + +tapable@^2.0.0, tapable@^2.1.1, tapable@^2.2.0: + version "2.2.1" + resolved "http://localhost:4873/tapable/-/tapable-2.2.1.tgz#1967a73ef4060a82f12ab96af86d52fdb76eeca0" + integrity sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ== + +temp-dir@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/temp-dir/-/temp-dir-2.0.0.tgz#bde92b05bdfeb1516e804c9c00ad45177f31321e" + integrity sha512-aoBAniQmmwtcKp/7BzsH8Cxzv8OL736p7v1ihGb5e9DJ9kTwGWHrQrVB5+lfVDzfGrdRzXch+ig7LHaY1JTOrg== + +tempy@^0.6.0: + version "0.6.0" + resolved "http://localhost:4873/tempy/-/tempy-0.6.0.tgz#65e2c35abc06f1124a97f387b08303442bde59f3" + integrity sha512-G13vtMYPT/J8A4X2SjdtBTphZlrp1gKv6hZiOjw14RCWg6GbHuQBGtjlx75xLbYV/wEc0D7G5K4rxKP/cXk8Bw== + dependencies: + is-stream "^2.0.0" + temp-dir "^2.0.0" + type-fest "^0.16.0" + unique-string "^2.0.0" + +terminal-link@^2.0.0: + version "2.1.1" + resolved "http://localhost:4873/terminal-link/-/terminal-link-2.1.1.tgz#14a64a27ab3c0df933ea546fba55f2d078edc994" + integrity sha512-un0FmiRUQNr5PJqy9kP7c40F5BOfpGlYTrxonDChEZB7pzZxRNp/bt+ymiy9/npwXya9KH99nJ/GXFIiUkYGFQ== + dependencies: + ansi-escapes "^4.2.1" + supports-hyperlinks "^2.0.0" + +terser-webpack-plugin@^5.1.3, terser-webpack-plugin@^5.2.5: + version "5.3.6" + resolved "http://localhost:4873/terser-webpack-plugin/-/terser-webpack-plugin-5.3.6.tgz#5590aec31aa3c6f771ce1b1acca60639eab3195c" + integrity sha512-kfLFk+PoLUQIbLmB1+PZDMRSZS99Mp+/MHqDNmMA6tOItzRt+Npe3E+fsMs5mfcM0wCtrrdU387UnV+vnSffXQ== + dependencies: + "@jridgewell/trace-mapping" "^0.3.14" + jest-worker "^27.4.5" + schema-utils "^3.1.1" + serialize-javascript "^6.0.0" + terser "^5.14.1" + +terser@^5.0.0, terser@^5.10.0, terser@^5.14.1: + version "5.15.0" + resolved "http://localhost:4873/terser/-/terser-5.15.0.tgz#e16967894eeba6e1091509ec83f0c60e179f2425" + integrity sha512-L1BJiXVmheAQQy+as0oF3Pwtlo4s3Wi1X2zNZ2NxOB4wx9bdS9Vk67XQENLFdLYGCK/Z2di53mTj/hBafR+dTA== + dependencies: + "@jridgewell/source-map" "^0.3.2" + acorn "^8.5.0" + commander "^2.20.0" + source-map-support "~0.5.20" + +test-exclude@^6.0.0: + version "6.0.0" + resolved "http://localhost:4873/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e" + integrity sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w== + dependencies: + "@istanbuljs/schema" "^0.1.2" + glob "^7.1.4" + minimatch "^3.0.4" + +text-table@^0.2.0: + version "0.2.0" + resolved "http://localhost:4873/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" + integrity sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw== + +throat@^6.0.1: + version "6.0.1" + resolved "http://localhost:4873/throat/-/throat-6.0.1.tgz#d514fedad95740c12c2d7fc70ea863eb51ade375" + integrity sha512-8hmiGIJMDlwjg7dlJ4yKGLK8EsYqKgPWbG3b4wjJddKNwc7N7Dpn08Df4szr/sZdMVeOstrdYSsqzX6BYbcB+w== + +thunky@^1.0.2: + version "1.1.0" + resolved "http://localhost:4873/thunky/-/thunky-1.1.0.tgz#5abaf714a9405db0504732bbccd2cedd9ef9537d" + integrity sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA== + +tmpl@1.0.5: + version "1.0.5" + resolved "http://localhost:4873/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc" + integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw== + +to-fast-properties@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" + integrity sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog== + +to-regex-range@^5.0.1: + version "5.0.1" + resolved "http://localhost:4873/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" + integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== + dependencies: + is-number "^7.0.0" + +toidentifier@1.0.1: + version "1.0.1" + resolved "http://localhost:4873/toidentifier/-/toidentifier-1.0.1.tgz#3be34321a88a820ed1bd80dfaa33e479fbb8dd35" + integrity sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA== + +tough-cookie@^4.0.0: + version "4.1.2" + resolved "http://localhost:4873/tough-cookie/-/tough-cookie-4.1.2.tgz#e53e84b85f24e0b65dd526f46628db6c85f6b874" + integrity sha512-G9fqXWoYFZgTc2z8Q5zaHy/vJMjm+WV0AkAeHxVCQiEB1b+dGvWzFW6QV07cY5jQ5gRkeid2qIkzkxUnmoQZUQ== + dependencies: + psl "^1.1.33" + punycode "^2.1.1" + universalify "^0.2.0" + url-parse "^1.5.3" + +tr46@^1.0.1: + version "1.0.1" + resolved "http://localhost:4873/tr46/-/tr46-1.0.1.tgz#a8b13fd6bfd2489519674ccde55ba3693b706d09" + integrity sha512-dTpowEjclQ7Kgx5SdBkqRzVhERQXov8/l9Ft9dVM9fmg0W0KQSVaXX9T4i6twCPNtYiZM53lpSSUAwJbFPOHxA== + dependencies: + punycode "^2.1.0" + +tr46@^2.1.0: + version "2.1.0" + resolved "http://localhost:4873/tr46/-/tr46-2.1.0.tgz#fa87aa81ca5d5941da8cbf1f9b749dc969a4e240" + integrity sha512-15Ih7phfcdP5YxqiB+iDtLoaTz4Nd35+IiAv0kQ5FNKHzXgdWqPoTIqEDDJmXceQt4JZk6lVPT8lnDlPpGDppw== + dependencies: + punycode "^2.1.1" + +tryer@^1.0.1: + version "1.0.1" + resolved "http://localhost:4873/tryer/-/tryer-1.0.1.tgz#f2c85406800b9b0f74c9f7465b81eaad241252f8" + integrity sha512-c3zayb8/kWWpycWYg87P71E1S1ZL6b6IJxfb5fvsUgsf0S2MVGaDhDXXjDMpdCpfWXqptc+4mXwmiy1ypXqRAA== + +tsconfig-paths@^3.14.1: + version "3.14.1" + resolved "http://localhost:4873/tsconfig-paths/-/tsconfig-paths-3.14.1.tgz#ba0734599e8ea36c862798e920bcf163277b137a" + integrity sha512-fxDhWnFSLt3VuTwtvJt5fpwxBHg5AdKWMsgcPOOIilyjymcYVZoCQF8fvFRezCNfblEXmi+PcM1eYHeOAgXCOQ== + dependencies: + "@types/json5" "^0.0.29" + json5 "^1.0.1" + minimist "^1.2.6" + strip-bom "^3.0.0" + +tslib@^1.8.1: + version "1.14.1" + resolved "http://localhost:4873/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" + integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== + +tslib@^2.0.3: + version "2.4.0" + resolved "http://localhost:4873/tslib/-/tslib-2.4.0.tgz#7cecaa7f073ce680a05847aa77be941098f36dc3" + integrity sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ== + +tsutils@^3.21.0: + version "3.21.0" + resolved "http://localhost:4873/tsutils/-/tsutils-3.21.0.tgz#b48717d394cea6c1e096983eed58e9d61715b623" + integrity sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA== + dependencies: + tslib "^1.8.1" + +type-check@^0.4.0, type-check@~0.4.0: + version "0.4.0" + resolved "http://localhost:4873/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1" + integrity sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew== + dependencies: + prelude-ls "^1.2.1" + +type-check@~0.3.2: + version "0.3.2" + resolved "http://localhost:4873/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72" + integrity sha512-ZCmOJdvOWDBYJlzAoFkC+Q0+bUyEOS1ltgp1MGU03fqHG+dbi9tBFU2Rd9QKiDZFAYrhPh2JUf7rZRIuHRKtOg== + dependencies: + prelude-ls "~1.1.2" + +type-detect@4.0.8: + version "4.0.8" + resolved "http://localhost:4873/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c" + integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g== + +type-fest@^0.16.0: + version "0.16.0" + resolved "http://localhost:4873/type-fest/-/type-fest-0.16.0.tgz#3240b891a78b0deae910dbeb86553e552a148860" + integrity sha512-eaBzG6MxNzEn9kiwvtre90cXaNLkmadMWa1zQMs3XORCXNbsH/OewwbxC5ia9dCxIxnTAsSxXJaa/p5y8DlvJg== + +type-fest@^0.20.2: + version "0.20.2" + resolved "http://localhost:4873/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4" + integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ== + +type-fest@^0.21.3: + version "0.21.3" + resolved "http://localhost:4873/type-fest/-/type-fest-0.21.3.tgz#d260a24b0198436e133fa26a524a6d65fa3b2e37" + integrity sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w== + +type-is@~1.6.18: + version "1.6.18" + resolved "http://localhost:4873/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131" + integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g== + dependencies: + media-typer "0.3.0" + mime-types "~2.1.24" + +typedarray-to-buffer@^3.1.5: + version "3.1.5" + resolved "http://localhost:4873/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz#a97ee7a9ff42691b9f783ff1bc5112fe3fca9080" + integrity sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q== + dependencies: + is-typedarray "^1.0.0" + +unbox-primitive@^1.0.2: + version "1.0.2" + resolved "http://localhost:4873/unbox-primitive/-/unbox-primitive-1.0.2.tgz#29032021057d5e6cdbd08c5129c226dff8ed6f9e" + integrity sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw== + dependencies: + call-bind "^1.0.2" + has-bigints "^1.0.2" + has-symbols "^1.0.3" + which-boxed-primitive "^1.0.2" + +unicode-canonical-property-names-ecmascript@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.0.tgz#301acdc525631670d39f6146e0e77ff6bbdebddc" + integrity sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ== + +unicode-match-property-ecmascript@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-2.0.0.tgz#54fd16e0ecb167cf04cf1f756bdcc92eba7976c3" + integrity sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q== + dependencies: + unicode-canonical-property-names-ecmascript "^2.0.0" + unicode-property-aliases-ecmascript "^2.0.0" + +unicode-match-property-value-ecmascript@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.0.0.tgz#1a01aa57247c14c568b89775a54938788189a714" + integrity sha512-7Yhkc0Ye+t4PNYzOGKedDhXbYIBe1XEQYQxOPyhcXNMJ0WCABqqj6ckydd6pWRZTHV4GuCPKdBAUiMc60tsKVw== + +unicode-property-aliases-ecmascript@^2.0.0: + version "2.1.0" + resolved "http://localhost:4873/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.1.0.tgz#43d41e3be698bd493ef911077c9b131f827e8ccd" + integrity sha512-6t3foTQI9qne+OZoVQB/8x8rk2k1eVy1gRXhV3oFQ5T6R1dqQ1xtin3XqSlx3+ATBkliTaR/hHyJBm+LVPNM8w== + +unique-string@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/unique-string/-/unique-string-2.0.0.tgz#39c6451f81afb2749de2b233e3f7c5e8843bd89d" + integrity sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg== + dependencies: + crypto-random-string "^2.0.0" + +universalify@^0.2.0: + version "0.2.0" + resolved "http://localhost:4873/universalify/-/universalify-0.2.0.tgz#6451760566fa857534745ab1dde952d1b1761be0" + integrity sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg== + +universalify@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/universalify/-/universalify-2.0.0.tgz#75a4984efedc4b08975c5aeb73f530d02df25717" + integrity sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ== + +unpipe@1.0.0, unpipe@~1.0.0: + version "1.0.0" + resolved "http://localhost:4873/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" + integrity sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ== + +unquote@~1.1.1: + version "1.1.1" + resolved "http://localhost:4873/unquote/-/unquote-1.1.1.tgz#8fded7324ec6e88a0ff8b905e7c098cdc086d544" + integrity sha512-vRCqFv6UhXpWxZPyGDh/F3ZpNv8/qo7w6iufLpQg9aKnQ71qM4B5KiI7Mia9COcjEhrO9LueHpMYjYzsWH3OIg== + +upath@^1.2.0: + version "1.2.0" + resolved "http://localhost:4873/upath/-/upath-1.2.0.tgz#8f66dbcd55a883acdae4408af8b035a5044c1894" + integrity sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg== + +update-browserslist-db@^1.0.9: + version "1.0.9" + resolved "http://localhost:4873/update-browserslist-db/-/update-browserslist-db-1.0.9.tgz#2924d3927367a38d5c555413a7ce138fc95fcb18" + integrity sha512-/xsqn21EGVdXI3EXSum1Yckj3ZVZugqyOZQ/CxYPBD/R+ko9NSUScf8tFF4dOKY+2pvSSJA/S+5B8s4Zr4kyvg== + dependencies: + escalade "^3.1.1" + picocolors "^1.0.0" + +uri-js@^4.2.2: + version "4.4.1" + resolved "http://localhost:4873/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" + integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg== + dependencies: + punycode "^2.1.0" + +url-parse@^1.5.3: + version "1.5.10" + resolved "http://localhost:4873/url-parse/-/url-parse-1.5.10.tgz#9d3c2f736c1d75dd3bd2be507dcc111f1e2ea9c1" + integrity sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ== + dependencies: + querystringify "^2.1.1" + requires-port "^1.0.0" + +util-deprecate@^1.0.1, util-deprecate@^1.0.2, util-deprecate@~1.0.1: + version "1.0.2" + resolved "http://localhost:4873/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" + integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw== + +util.promisify@~1.0.0: + version "1.0.1" + resolved "http://localhost:4873/util.promisify/-/util.promisify-1.0.1.tgz#6baf7774b80eeb0f7520d8b81d07982a59abbaee" + integrity sha512-g9JpC/3He3bm38zsLupWryXHoEcS22YHthuPQSJdMy6KNrzIRzWqcsHzD/WUnqe45whVou4VIsPew37DoXWNrA== + dependencies: + define-properties "^1.1.3" + es-abstract "^1.17.2" + has-symbols "^1.0.1" + object.getownpropertydescriptors "^2.1.0" + +utila@~0.4: + version "0.4.0" + resolved "http://localhost:4873/utila/-/utila-0.4.0.tgz#8a16a05d445657a3aea5eecc5b12a4fa5379772c" + integrity sha512-Z0DbgELS9/L/75wZbro8xAnT50pBVFQZ+hUEueGDU5FN51YSCYM+jdxsfCiHjwNP/4LCDD0i/graKpeBnOXKRA== + +utils-merge@1.0.1: + version "1.0.1" + resolved "http://localhost:4873/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" + integrity sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA== + +uuid@^8.3, uuid@^8.3.2: + version "8.3.2" + resolved "http://localhost:4873/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2" + integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg== + +v8-to-istanbul@^8.1.0: + version "8.1.1" + resolved "http://localhost:4873/v8-to-istanbul/-/v8-to-istanbul-8.1.1.tgz#77b752fd3975e31bbcef938f85e9bd1c7a8d60ed" + integrity sha512-FGtKtv3xIpR6BYhvgH8MI/y78oT7d8Au3ww4QIxymrCtZEh5b8gCw2siywE+puhEmuWKDtmfrvF5UlB298ut3w== + dependencies: + "@types/istanbul-lib-coverage" "^2.0.1" + convert-source-map "^1.6.0" + source-map "^0.7.3" + +vary@~1.1.2: + version "1.1.2" + resolved "http://localhost:4873/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc" + integrity sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg== + +w3c-hr-time@^1.0.2: + version "1.0.2" + resolved "http://localhost:4873/w3c-hr-time/-/w3c-hr-time-1.0.2.tgz#0a89cdf5cc15822df9c360543676963e0cc308cd" + integrity sha512-z8P5DvDNjKDoFIHK7q8r8lackT6l+jo/Ye3HOle7l9nICP9lf1Ci25fy9vHd0JOWewkIFzXIEig3TdKT7JQ5fQ== + dependencies: + browser-process-hrtime "^1.0.0" + +w3c-xmlserializer@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/w3c-xmlserializer/-/w3c-xmlserializer-2.0.0.tgz#3e7104a05b75146cc60f564380b7f683acf1020a" + integrity sha512-4tzD0mF8iSiMiNs30BiLO3EpfGLZUT2MSX/G+o7ZywDzliWQ3OPtTZ0PTC3B3ca1UAf4cJMHB+2Bf56EriJuRA== + dependencies: + xml-name-validator "^3.0.0" + +walker@^1.0.7: + version "1.0.8" + resolved "http://localhost:4873/walker/-/walker-1.0.8.tgz#bd498db477afe573dc04185f011d3ab8a8d7653f" + integrity sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ== + dependencies: + makeerror "1.0.12" + +watchpack@^2.4.0: + version "2.4.0" + resolved "http://localhost:4873/watchpack/-/watchpack-2.4.0.tgz#fa33032374962c78113f93c7f2fb4c54c9862a5d" + integrity sha512-Lcvm7MGST/4fup+ifyKi2hjyIAwcdI4HRgtvTpIUxBRhB+RFtUh8XtDOxUfctVCnhVi+QQj49i91OyvzkJl6cg== + dependencies: + glob-to-regexp "^0.4.1" + graceful-fs "^4.1.2" + +wbuf@^1.1.0, wbuf@^1.7.3: + version "1.7.3" + resolved "http://localhost:4873/wbuf/-/wbuf-1.7.3.tgz#c1d8d149316d3ea852848895cb6a0bfe887b87df" + integrity sha512-O84QOnr0icsbFGLS0O3bI5FswxzRr8/gHwWkDlQFskhSPryQXvrTMxjxGP4+iWYoauLoBvfDpkrOauZ+0iZpDA== + dependencies: + minimalistic-assert "^1.0.0" + +web-vitals@^2.1.4: + version "2.1.4" + resolved "http://localhost:4873/web-vitals/-/web-vitals-2.1.4.tgz#76563175a475a5e835264d373704f9dde718290c" + integrity sha512-sVWcwhU5mX6crfI5Vd2dC4qchyTqxV8URinzt25XqVh+bHEPGH4C3NPrNionCP7Obx59wrYEbNlw4Z8sjALzZg== + +webidl-conversions@^4.0.2: + version "4.0.2" + resolved "http://localhost:4873/webidl-conversions/-/webidl-conversions-4.0.2.tgz#a855980b1f0b6b359ba1d5d9fb39ae941faa63ad" + integrity sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg== + +webidl-conversions@^5.0.0: + version "5.0.0" + resolved "http://localhost:4873/webidl-conversions/-/webidl-conversions-5.0.0.tgz#ae59c8a00b121543a2acc65c0434f57b0fc11aff" + integrity sha512-VlZwKPCkYKxQgeSbH5EyngOmRp7Ww7I9rQLERETtf5ofd9pGeswWiOtogpEO850jziPRarreGxn5QIiTqpb2wA== + +webidl-conversions@^6.1.0: + version "6.1.0" + resolved "http://localhost:4873/webidl-conversions/-/webidl-conversions-6.1.0.tgz#9111b4d7ea80acd40f5270d666621afa78b69514" + integrity sha512-qBIvFLGiBpLjfwmYAaHPXsn+ho5xZnGvyGvsarywGNc8VyQJUMHJ8OBKGGrPER0okBeMDaan4mNBlgBROxuI8w== + +webpack-dev-middleware@^5.3.1: + version "5.3.3" + resolved "http://localhost:4873/webpack-dev-middleware/-/webpack-dev-middleware-5.3.3.tgz#efae67c2793908e7311f1d9b06f2a08dcc97e51f" + integrity sha512-hj5CYrY0bZLB+eTO+x/j67Pkrquiy7kWepMHmUMoPsmcUaeEnQJqFzHJOyxgWlq746/wUuA64p9ta34Kyb01pA== + dependencies: + colorette "^2.0.10" + memfs "^3.4.3" + mime-types "^2.1.31" + range-parser "^1.2.1" + schema-utils "^4.0.0" + +webpack-dev-server@^4.6.0: + version "4.11.1" + resolved "http://localhost:4873/webpack-dev-server/-/webpack-dev-server-4.11.1.tgz#ae07f0d71ca0438cf88446f09029b92ce81380b5" + integrity sha512-lILVz9tAUy1zGFwieuaQtYiadImb5M3d+H+L1zDYalYoDl0cksAB1UNyuE5MMWJrG6zR1tXkCP2fitl7yoUJiw== + dependencies: + "@types/bonjour" "^3.5.9" + "@types/connect-history-api-fallback" "^1.3.5" + "@types/express" "^4.17.13" + "@types/serve-index" "^1.9.1" + "@types/serve-static" "^1.13.10" + "@types/sockjs" "^0.3.33" + "@types/ws" "^8.5.1" + ansi-html-community "^0.0.8" + bonjour-service "^1.0.11" + chokidar "^3.5.3" + colorette "^2.0.10" + compression "^1.7.4" + connect-history-api-fallback "^2.0.0" + default-gateway "^6.0.3" + express "^4.17.3" + graceful-fs "^4.2.6" + html-entities "^2.3.2" + http-proxy-middleware "^2.0.3" + ipaddr.js "^2.0.1" + open "^8.0.9" + p-retry "^4.5.0" + rimraf "^3.0.2" + schema-utils "^4.0.0" + selfsigned "^2.1.1" + serve-index "^1.9.1" + sockjs "^0.3.24" + spdy "^4.0.2" + webpack-dev-middleware "^5.3.1" + ws "^8.4.2" + +webpack-manifest-plugin@^4.0.2: + version "4.1.1" + resolved "http://localhost:4873/webpack-manifest-plugin/-/webpack-manifest-plugin-4.1.1.tgz#10f8dbf4714ff93a215d5a45bcc416d80506f94f" + integrity sha512-YXUAwxtfKIJIKkhg03MKuiFAD72PlrqCiwdwO4VEXdRO5V0ORCNwaOwAZawPZalCbmH9kBDmXnNeQOw+BIEiow== + dependencies: + tapable "^2.0.0" + webpack-sources "^2.2.0" + +webpack-merge@^5.8.0: + version "5.8.0" + resolved "http://localhost:4873/webpack-merge/-/webpack-merge-5.8.0.tgz#2b39dbf22af87776ad744c390223731d30a68f61" + integrity sha512-/SaI7xY0831XwP6kzuwhKWVKDP9t1QY1h65lAFLbZqMPIuYcD9QAW4u9STIbU9kaJbPBB/geU/gLr1wDjOhQ+Q== + dependencies: + clone-deep "^4.0.1" + wildcard "^2.0.0" + +webpack-sources@^1.4.3: + version "1.4.3" + resolved "http://localhost:4873/webpack-sources/-/webpack-sources-1.4.3.tgz#eedd8ec0b928fbf1cbfe994e22d2d890f330a933" + integrity sha512-lgTS3Xhv1lCOKo7SA5TjKXMjpSM4sBjNV5+q2bqesbSPs5FjGmU6jjtBSkX9b4qW87vDIsCIlUPOEhbZrMdjeQ== + dependencies: + source-list-map "^2.0.0" + source-map "~0.6.1" + +webpack-sources@^2.2.0: + version "2.3.1" + resolved "http://localhost:4873/webpack-sources/-/webpack-sources-2.3.1.tgz#570de0af163949fe272233c2cefe1b56f74511fd" + integrity sha512-y9EI9AO42JjEcrTJFOYmVywVZdKVUfOvDUPsJea5GIr1JOEGFVqwlY2K098fFoIjOkDzHn2AjRvM8dsBZu+gCA== + dependencies: + source-list-map "^2.0.1" + source-map "^0.6.1" + +webpack-sources@^3.2.3: + version "3.2.3" + resolved "http://localhost:4873/webpack-sources/-/webpack-sources-3.2.3.tgz#2d4daab8451fd4b240cc27055ff6a0c2ccea0cde" + integrity sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w== + +webpack@^5.64.4: + version "5.74.0" + resolved "http://localhost:4873/webpack/-/webpack-5.74.0.tgz#02a5dac19a17e0bb47093f2be67c695102a55980" + integrity sha512-A2InDwnhhGN4LYctJj6M1JEaGL7Luj6LOmyBHjcI8529cm5p6VXiTIW2sn6ffvEAKmveLzvu4jrihwXtPojlAA== + dependencies: + "@types/eslint-scope" "^3.7.3" + "@types/estree" "^0.0.51" + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/wasm-edit" "1.11.1" + "@webassemblyjs/wasm-parser" "1.11.1" + acorn "^8.7.1" + acorn-import-assertions "^1.7.6" + browserslist "^4.14.5" + chrome-trace-event "^1.0.2" + enhanced-resolve "^5.10.0" + es-module-lexer "^0.9.0" + eslint-scope "5.1.1" + events "^3.2.0" + glob-to-regexp "^0.4.1" + graceful-fs "^4.2.9" + json-parse-even-better-errors "^2.3.1" + loader-runner "^4.2.0" + mime-types "^2.1.27" + neo-async "^2.6.2" + schema-utils "^3.1.0" + tapable "^2.1.1" + terser-webpack-plugin "^5.1.3" + watchpack "^2.4.0" + webpack-sources "^3.2.3" + +websocket-driver@>=0.5.1, websocket-driver@^0.7.4: + version "0.7.4" + resolved "http://localhost:4873/websocket-driver/-/websocket-driver-0.7.4.tgz#89ad5295bbf64b480abcba31e4953aca706f5760" + integrity sha512-b17KeDIQVjvb0ssuSDF2cYXSg2iztliJ4B9WdsuB6J952qCPKmnVq4DyW5motImXHDC1cBT/1UezrJVsKw5zjg== + dependencies: + http-parser-js ">=0.5.1" + safe-buffer ">=5.1.0" + websocket-extensions ">=0.1.1" + +websocket-extensions@>=0.1.1: + version "0.1.4" + resolved "http://localhost:4873/websocket-extensions/-/websocket-extensions-0.1.4.tgz#7f8473bc839dfd87608adb95d7eb075211578a42" + integrity sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg== + +whatwg-encoding@^1.0.5: + version "1.0.5" + resolved "http://localhost:4873/whatwg-encoding/-/whatwg-encoding-1.0.5.tgz#5abacf777c32166a51d085d6b4f3e7d27113ddb0" + integrity sha512-b5lim54JOPN9HtzvK9HFXvBma/rnfFeqsic0hSpjtDbVxR3dJKLc+KB4V6GgiGOvl7CY/KNh8rxSo9DKQrnUEw== + dependencies: + iconv-lite "0.4.24" + +whatwg-fetch@^3.6.2: + version "3.6.2" + resolved "http://localhost:4873/whatwg-fetch/-/whatwg-fetch-3.6.2.tgz#dced24f37f2624ed0281725d51d0e2e3fe677f8c" + integrity sha512-bJlen0FcuU/0EMLrdbJ7zOnW6ITZLrZMIarMUVmdKtsGvZna8vxKYaexICWPfZ8qwf9fzNq+UEIZrnSaApt6RA== + +whatwg-mimetype@^2.3.0: + version "2.3.0" + resolved "http://localhost:4873/whatwg-mimetype/-/whatwg-mimetype-2.3.0.tgz#3d4b1e0312d2079879f826aff18dbeeca5960fbf" + integrity sha512-M4yMwr6mAnQz76TbJm914+gPpB/nCwvZbJU28cUD6dR004SAxDLOOSUaB1JDRqLtaOV/vi0IC5lEAGFgrjGv/g== + +whatwg-url@^7.0.0: + version "7.1.0" + resolved "http://localhost:4873/whatwg-url/-/whatwg-url-7.1.0.tgz#c2c492f1eca612988efd3d2266be1b9fc6170d06" + integrity sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg== + dependencies: + lodash.sortby "^4.7.0" + tr46 "^1.0.1" + webidl-conversions "^4.0.2" + +whatwg-url@^8.0.0, whatwg-url@^8.5.0: + version "8.7.0" + resolved "http://localhost:4873/whatwg-url/-/whatwg-url-8.7.0.tgz#656a78e510ff8f3937bc0bcbe9f5c0ac35941b77" + integrity sha512-gAojqb/m9Q8a5IV96E3fHJM70AzCkgt4uXYX2O7EmuyOnLrViCQlsEBmF9UQIu3/aeAIp2U17rtbpZWNntQqdg== + dependencies: + lodash "^4.7.0" + tr46 "^2.1.0" + webidl-conversions "^6.1.0" + +which-boxed-primitive@^1.0.2: + version "1.0.2" + resolved "http://localhost:4873/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz#13757bc89b209b049fe5d86430e21cf40a89a8e6" + integrity sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg== + dependencies: + is-bigint "^1.0.1" + is-boolean-object "^1.1.0" + is-number-object "^1.0.4" + is-string "^1.0.5" + is-symbol "^1.0.3" + +which@^1.3.1: + version "1.3.1" + resolved "http://localhost:4873/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" + integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== + dependencies: + isexe "^2.0.0" + +which@^2.0.1: + version "2.0.2" + resolved "http://localhost:4873/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" + integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== + dependencies: + isexe "^2.0.0" + +wildcard@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/wildcard/-/wildcard-2.0.0.tgz#a77d20e5200c6faaac979e4b3aadc7b3dd7f8fec" + integrity sha512-JcKqAHLPxcdb9KM49dufGXn2x3ssnfjbcaQdLlfZsL9rH9wgDQjUtDxbo8NE0F6SFvydeu1VhZe7hZuHsB2/pw== + +word-wrap@^1.2.3, word-wrap@~1.2.3: + version "1.2.3" + resolved "http://localhost:4873/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c" + integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ== + +workbox-background-sync@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-background-sync/-/workbox-background-sync-6.5.4.tgz#3141afba3cc8aa2ae14c24d0f6811374ba8ff6a9" + integrity sha512-0r4INQZMyPky/lj4Ou98qxcThrETucOde+7mRGJl13MPJugQNKeZQOdIJe/1AchOP23cTqHcN/YVpD6r8E6I8g== + dependencies: + idb "^7.0.1" + workbox-core "6.5.4" + +workbox-broadcast-update@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-broadcast-update/-/workbox-broadcast-update-6.5.4.tgz#8441cff5417cd41f384ba7633ca960a7ffe40f66" + integrity sha512-I/lBERoH1u3zyBosnpPEtcAVe5lwykx9Yg1k6f8/BGEPGaMMgZrwVrqL1uA9QZ1NGGFoyE6t9i7lBjOlDhFEEw== + dependencies: + workbox-core "6.5.4" + +workbox-build@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-build/-/workbox-build-6.5.4.tgz#7d06d31eb28a878817e1c991c05c5b93409f0389" + integrity sha512-kgRevLXEYvUW9WS4XoziYqZ8Q9j/2ziJYEtTrjdz5/L/cTUa2XfyMP2i7c3p34lgqJ03+mTiz13SdFef2POwbA== + dependencies: + "@apideck/better-ajv-errors" "^0.3.1" + "@babel/core" "^7.11.1" + "@babel/preset-env" "^7.11.0" + "@babel/runtime" "^7.11.2" + "@rollup/plugin-babel" "^5.2.0" + "@rollup/plugin-node-resolve" "^11.2.1" + "@rollup/plugin-replace" "^2.4.1" + "@surma/rollup-plugin-off-main-thread" "^2.2.3" + ajv "^8.6.0" + common-tags "^1.8.0" + fast-json-stable-stringify "^2.1.0" + fs-extra "^9.0.1" + glob "^7.1.6" + lodash "^4.17.20" + pretty-bytes "^5.3.0" + rollup "^2.43.1" + rollup-plugin-terser "^7.0.0" + source-map "^0.8.0-beta.0" + stringify-object "^3.3.0" + strip-comments "^2.0.1" + tempy "^0.6.0" + upath "^1.2.0" + workbox-background-sync "6.5.4" + workbox-broadcast-update "6.5.4" + workbox-cacheable-response "6.5.4" + workbox-core "6.5.4" + workbox-expiration "6.5.4" + workbox-google-analytics "6.5.4" + workbox-navigation-preload "6.5.4" + workbox-precaching "6.5.4" + workbox-range-requests "6.5.4" + workbox-recipes "6.5.4" + workbox-routing "6.5.4" + workbox-strategies "6.5.4" + workbox-streams "6.5.4" + workbox-sw "6.5.4" + workbox-window "6.5.4" + +workbox-cacheable-response@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-cacheable-response/-/workbox-cacheable-response-6.5.4.tgz#a5c6ec0c6e2b6f037379198d4ef07d098f7cf137" + integrity sha512-DCR9uD0Fqj8oB2TSWQEm1hbFs/85hXXoayVwFKLVuIuxwJaihBsLsp4y7J9bvZbqtPJ1KlCkmYVGQKrBU4KAug== + dependencies: + workbox-core "6.5.4" + +workbox-core@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-core/-/workbox-core-6.5.4.tgz#df48bf44cd58bb1d1726c49b883fb1dffa24c9ba" + integrity sha512-OXYb+m9wZm8GrORlV2vBbE5EC1FKu71GGp0H4rjmxmF4/HLbMCoTFws87M3dFwgpmg0v00K++PImpNQ6J5NQ6Q== + +workbox-expiration@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-expiration/-/workbox-expiration-6.5.4.tgz#501056f81e87e1d296c76570bb483ce5e29b4539" + integrity sha512-jUP5qPOpH1nXtjGGh1fRBa1wJL2QlIb5mGpct3NzepjGG2uFFBn4iiEBiI9GUmfAFR2ApuRhDydjcRmYXddiEQ== + dependencies: + idb "^7.0.1" + workbox-core "6.5.4" + +workbox-google-analytics@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-google-analytics/-/workbox-google-analytics-6.5.4.tgz#c74327f80dfa4c1954cbba93cd7ea640fe7ece7d" + integrity sha512-8AU1WuaXsD49249Wq0B2zn4a/vvFfHkpcFfqAFHNHwln3jK9QUYmzdkKXGIZl9wyKNP+RRX30vcgcyWMcZ9VAg== + dependencies: + workbox-background-sync "6.5.4" + workbox-core "6.5.4" + workbox-routing "6.5.4" + workbox-strategies "6.5.4" + +workbox-navigation-preload@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-navigation-preload/-/workbox-navigation-preload-6.5.4.tgz#ede56dd5f6fc9e860a7e45b2c1a8f87c1c793212" + integrity sha512-IIwf80eO3cr8h6XSQJF+Hxj26rg2RPFVUmJLUlM0+A2GzB4HFbQyKkrgD5y2d84g2IbJzP4B4j5dPBRzamHrng== + dependencies: + workbox-core "6.5.4" + +workbox-precaching@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-precaching/-/workbox-precaching-6.5.4.tgz#740e3561df92c6726ab5f7471e6aac89582cab72" + integrity sha512-hSMezMsW6btKnxHB4bFy2Qfwey/8SYdGWvVIKFaUm8vJ4E53JAY+U2JwLTRD8wbLWoP6OVUdFlXsTdKu9yoLTg== + dependencies: + workbox-core "6.5.4" + workbox-routing "6.5.4" + workbox-strategies "6.5.4" + +workbox-range-requests@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-range-requests/-/workbox-range-requests-6.5.4.tgz#86b3d482e090433dab38d36ae031b2bb0bd74399" + integrity sha512-Je2qR1NXCFC8xVJ/Lux6saH6IrQGhMpDrPXWZWWS8n/RD+WZfKa6dSZwU+/QksfEadJEr/NfY+aP/CXFFK5JFg== + dependencies: + workbox-core "6.5.4" + +workbox-recipes@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-recipes/-/workbox-recipes-6.5.4.tgz#cca809ee63b98b158b2702dcfb741b5cc3e24acb" + integrity sha512-QZNO8Ez708NNwzLNEXTG4QYSKQ1ochzEtRLGaq+mr2PyoEIC1xFW7MrWxrONUxBFOByksds9Z4//lKAX8tHyUA== + dependencies: + workbox-cacheable-response "6.5.4" + workbox-core "6.5.4" + workbox-expiration "6.5.4" + workbox-precaching "6.5.4" + workbox-routing "6.5.4" + workbox-strategies "6.5.4" + +workbox-routing@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-routing/-/workbox-routing-6.5.4.tgz#6a7fbbd23f4ac801038d9a0298bc907ee26fe3da" + integrity sha512-apQswLsbrrOsBUWtr9Lf80F+P1sHnQdYodRo32SjiByYi36IDyL2r7BH1lJtFX8fwNHDa1QOVY74WKLLS6o5Pg== + dependencies: + workbox-core "6.5.4" + +workbox-strategies@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-strategies/-/workbox-strategies-6.5.4.tgz#4edda035b3c010fc7f6152918370699334cd204d" + integrity sha512-DEtsxhx0LIYWkJBTQolRxG4EI0setTJkqR4m7r4YpBdxtWJH1Mbg01Cj8ZjNOO8etqfA3IZaOPHUxCs8cBsKLw== + dependencies: + workbox-core "6.5.4" + +workbox-streams@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-streams/-/workbox-streams-6.5.4.tgz#1cb3c168a6101df7b5269d0353c19e36668d7d69" + integrity sha512-FXKVh87d2RFXkliAIheBojBELIPnWbQdyDvsH3t74Cwhg0fDheL1T8BqSM86hZvC0ZESLsznSYWw+Va+KVbUzg== + dependencies: + workbox-core "6.5.4" + workbox-routing "6.5.4" + +workbox-sw@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-sw/-/workbox-sw-6.5.4.tgz#d93e9c67924dd153a61367a4656ff4d2ae2ed736" + integrity sha512-vo2RQo7DILVRoH5LjGqw3nphavEjK4Qk+FenXeUsknKn14eCNedHOXWbmnvP4ipKhlE35pvJ4yl4YYf6YsJArA== + +workbox-webpack-plugin@^6.4.1: + version "6.5.4" + resolved "http://localhost:4873/workbox-webpack-plugin/-/workbox-webpack-plugin-6.5.4.tgz#baf2d3f4b8f435f3469887cf4fba2b7fac3d0fd7" + integrity sha512-LmWm/zoaahe0EGmMTrSLUi+BjyR3cdGEfU3fS6PN1zKFYbqAKuQ+Oy/27e4VSXsyIwAw8+QDfk1XHNGtZu9nQg== + dependencies: + fast-json-stable-stringify "^2.1.0" + pretty-bytes "^5.4.1" + upath "^1.2.0" + webpack-sources "^1.4.3" + workbox-build "6.5.4" + +workbox-window@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-window/-/workbox-window-6.5.4.tgz#d991bc0a94dff3c2dbb6b84558cff155ca878e91" + integrity sha512-HnLZJDwYBE+hpG25AQBO8RUWBJRaCsI9ksQJEp3aCOFCaG5kqaToAYXFRAHxzRluM2cQbGzdQF5rjKPWPA1fug== + dependencies: + "@types/trusted-types" "^2.0.2" + workbox-core "6.5.4" + +wrap-ansi@^7.0.0: + version "7.0.0" + resolved "http://localhost:4873/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" + integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== + dependencies: + ansi-styles "^4.0.0" + string-width "^4.1.0" + strip-ansi "^6.0.0" + +wrappy@1: + version "1.0.2" + resolved "http://localhost:4873/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" + integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ== + +write-file-atomic@^3.0.0: + version "3.0.3" + resolved "http://localhost:4873/write-file-atomic/-/write-file-atomic-3.0.3.tgz#56bd5c5a5c70481cd19c571bd39ab965a5de56e8" + integrity sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q== + dependencies: + imurmurhash "^0.1.4" + is-typedarray "^1.0.0" + signal-exit "^3.0.2" + typedarray-to-buffer "^3.1.5" + +ws@^7.4.6: + version "7.5.9" + resolved "http://localhost:4873/ws/-/ws-7.5.9.tgz#54fa7db29f4c7cec68b1ddd3a89de099942bb591" + integrity sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q== + +ws@^8.4.2: + version "8.9.0" + resolved "http://localhost:4873/ws/-/ws-8.9.0.tgz#2a994bb67144be1b53fe2d23c53c028adeb7f45e" + integrity sha512-Ja7nszREasGaYUYCI2k4lCKIRTt+y7XuqVoHR44YpI49TtryyqbqvDMn5eqfW7e6HzTukDRIsXqzVHScqRcafg== + +xml-name-validator@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/xml-name-validator/-/xml-name-validator-3.0.0.tgz#6ae73e06de4d8c6e47f9fb181f78d648ad457c6a" + integrity sha512-A5CUptxDsvxKJEU3yO6DuWBSJz/qizqzJKOMIfUJHETbBw/sFaDxgd6fxm1ewUaM0jZ444Fc5vC5ROYurg/4Pw== + +xmlchars@^2.2.0: + version "2.2.0" + resolved "http://localhost:4873/xmlchars/-/xmlchars-2.2.0.tgz#060fe1bcb7f9c76fe2a17db86a9bc3ab894210cb" + integrity sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw== + +xtend@^4.0.2: + version "4.0.2" + resolved "http://localhost:4873/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" + integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== + +y18n@^5.0.5: + version "5.0.8" + resolved "http://localhost:4873/y18n/-/y18n-5.0.8.tgz#7f4934d0f7ca8c56f95314939ddcd2dd91ce1d55" + integrity sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA== + +yallist@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" + integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== + +yaml@^1.10.0, yaml@^1.10.2, yaml@^1.7.2: + version "1.10.2" + resolved "http://localhost:4873/yaml/-/yaml-1.10.2.tgz#2301c5ffbf12b467de8da2333a459e29e7920e4b" + integrity sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg== + +yargs-parser@^20.2.2: + version "20.2.9" + resolved "http://localhost:4873/yargs-parser/-/yargs-parser-20.2.9.tgz#2eb7dc3b0289718fc295f362753845c41a0c94ee" + integrity sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w== + +yargs@^16.2.0: + version "16.2.0" + resolved "http://localhost:4873/yargs/-/yargs-16.2.0.tgz#1c82bf0f6b6a66eafce7ef30e376f49a12477f66" + integrity sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw== + dependencies: + cliui "^7.0.2" + escalade "^3.1.1" + get-caller-file "^2.0.5" + require-directory "^2.1.1" + string-width "^4.2.0" + y18n "^5.0.5" + yargs-parser "^20.2.2" + +yocto-queue@^0.1.0: + version "0.1.0" + resolved "http://localhost:4873/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b" + integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q== diff --git a/automerge-js/examples/vite/.gitignore b/automerge-js/examples/vite/.gitignore new file mode 100644 index 00000000..23d67fc1 --- /dev/null +++ b/automerge-js/examples/vite/.gitignore @@ -0,0 +1,2 @@ +node_modules/ +yarn.lock diff --git a/automerge-js/examples/vite/README.md b/automerge-js/examples/vite/README.md new file mode 100644 index 00000000..70fa620f --- /dev/null +++ b/automerge-js/examples/vite/README.md @@ -0,0 +1,47 @@ +# Vite + Automerge + +There are three things you need to do to get WASM packaging working with vite: + +1. Install the top level await plugin +2. Install the `vite-plugin-wasm` plugin +3. Exclude `automerge-wasm` from the optimizer + +First, install the packages we need: +```bash +yarn add vite-plugin-top-level-await +yarn add vite-plugin-wasm +``` + +In `vite.config.js` + +```javascript +import { defineConfig } from "vite" +import wasm from "vite-plugin-wasm" +import topLevelAwait from "vite-plugin-top-level-await" + +export default defineConfig({ + plugins: [topLevelAwait(), wasm()], + + optimizeDeps: { + // This is necessary because otherwise `vite dev` includes two separate + // versions of the JS wrapper. This causes problems because the JS + // wrapper has a module level variable to track JS side heap + // allocations, initializing this twice causes horrible breakage + exclude: ["automerge-wasm"] + } +}) +``` + +Now start the dev server: + +```bash +yarn vite +``` + +## Running the example + +```bash +yarn install +yarn dev +``` + diff --git a/automerge-js/examples/vite/index.html b/automerge-js/examples/vite/index.html new file mode 100644 index 00000000..f86e483c --- /dev/null +++ b/automerge-js/examples/vite/index.html @@ -0,0 +1,13 @@ + + + + + + + Vite + TS + + +
+ + + diff --git a/automerge-js/examples/vite/main.ts b/automerge-js/examples/vite/main.ts new file mode 100644 index 00000000..157c8e48 --- /dev/null +++ b/automerge-js/examples/vite/main.ts @@ -0,0 +1,15 @@ +import * as Automerge from "/node_modules/.vite/deps/automerge-js.js?v=6e973f28"; +console.log(Automerge); +let doc = Automerge.init(); +doc = Automerge.change(doc, (d) => d.hello = "from automerge-js"); +console.log(doc); +const result = JSON.stringify(doc); +if (typeof document !== "undefined") { + const element = document.createElement("div"); + element.innerHTML = JSON.stringify(result); + document.body.appendChild(element); +} else { + console.log("node:", result); +} + +//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJzb3VyY2VzIjpbIi9ob21lL2FsZXgvUHJvamVjdHMvYXV0b21lcmdlL2F1dG9tZXJnZS1ycy9hdXRvbWVyZ2UtanMvZXhhbXBsZXMvdml0ZS9zcmMvbWFpbi50cyJdLCJzb3VyY2VzQ29udGVudCI6WyJpbXBvcnQgKiBhcyBBdXRvbWVyZ2UgZnJvbSBcImF1dG9tZXJnZS1qc1wiXG5cbi8vIGhlbGxvIHdvcmxkIGNvZGUgdGhhdCB3aWxsIHJ1biBjb3JyZWN0bHkgb24gd2ViIG9yIG5vZGVcblxuY29uc29sZS5sb2coQXV0b21lcmdlKVxubGV0IGRvYyA9IEF1dG9tZXJnZS5pbml0KClcbmRvYyA9IEF1dG9tZXJnZS5jaGFuZ2UoZG9jLCAoZDogYW55KSA9PiBkLmhlbGxvID0gXCJmcm9tIGF1dG9tZXJnZS1qc1wiKVxuY29uc29sZS5sb2coZG9jKVxuY29uc3QgcmVzdWx0ID0gSlNPTi5zdHJpbmdpZnkoZG9jKVxuXG5pZiAodHlwZW9mIGRvY3VtZW50ICE9PSAndW5kZWZpbmVkJykge1xuICAgIC8vIGJyb3dzZXJcbiAgICBjb25zdCBlbGVtZW50ID0gZG9jdW1lbnQuY3JlYXRlRWxlbWVudCgnZGl2Jyk7XG4gICAgZWxlbWVudC5pbm5lckhUTUwgPSBKU09OLnN0cmluZ2lmeShyZXN1bHQpXG4gICAgZG9jdW1lbnQuYm9keS5hcHBlbmRDaGlsZChlbGVtZW50KTtcbn0gZWxzZSB7XG4gICAgLy8gc2VydmVyXG4gICAgY29uc29sZS5sb2coXCJub2RlOlwiLCByZXN1bHQpXG59XG5cbiJdLCJtYXBwaW5ncyI6IkFBQUEsWUFBWSxlQUFlO0FBSTNCLFFBQVEsSUFBSSxTQUFTO0FBQ3JCLElBQUksTUFBTSxVQUFVLEtBQUs7QUFDekIsTUFBTSxVQUFVLE9BQU8sS0FBSyxDQUFDLE1BQVcsRUFBRSxRQUFRLG1CQUFtQjtBQUNyRSxRQUFRLElBQUksR0FBRztBQUNmLE1BQU0sU0FBUyxLQUFLLFVBQVUsR0FBRztBQUVqQyxJQUFJLE9BQU8sYUFBYSxhQUFhO0FBRWpDLFFBQU0sVUFBVSxTQUFTLGNBQWMsS0FBSztBQUM1QyxVQUFRLFlBQVksS0FBSyxVQUFVLE1BQU07QUFDekMsV0FBUyxLQUFLLFlBQVksT0FBTztBQUNyQyxPQUFPO0FBRUgsVUFBUSxJQUFJLFNBQVMsTUFBTTtBQUMvQjsiLCJuYW1lcyI6W119 \ No newline at end of file diff --git a/automerge-js/examples/vite/package.json b/automerge-js/examples/vite/package.json new file mode 100644 index 00000000..d4a09e54 --- /dev/null +++ b/automerge-js/examples/vite/package.json @@ -0,0 +1,20 @@ +{ + "name": "autovite", + "private": true, + "version": "0.0.0", + "type": "module", + "scripts": { + "dev": "vite", + "build": "tsc && vite build", + "preview": "vite preview" + }, + "dependencies": { + "automerge": "2.0.0-alpha.1" + }, + "devDependencies": { + "typescript": "^4.6.4", + "vite": "^3.1.0", + "vite-plugin-top-level-await": "^1.1.1", + "vite-plugin-wasm": "^2.1.0" + } +} diff --git a/automerge-js/examples/vite/public/vite.svg b/automerge-js/examples/vite/public/vite.svg new file mode 100644 index 00000000..e7b8dfb1 --- /dev/null +++ b/automerge-js/examples/vite/public/vite.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/automerge-js/examples/vite/src/counter.ts b/automerge-js/examples/vite/src/counter.ts new file mode 100644 index 00000000..a3529e1f --- /dev/null +++ b/automerge-js/examples/vite/src/counter.ts @@ -0,0 +1,9 @@ +export function setupCounter(element: HTMLButtonElement) { + let counter = 0 + const setCounter = (count: number) => { + counter = count + element.innerHTML = `count is ${counter}` + } + element.addEventListener('click', () => setCounter(++counter)) + setCounter(0) +} diff --git a/automerge-js/examples/vite/src/main.ts b/automerge-js/examples/vite/src/main.ts new file mode 100644 index 00000000..c94cbfd7 --- /dev/null +++ b/automerge-js/examples/vite/src/main.ts @@ -0,0 +1,18 @@ +import * as Automerge from "automerge" + +// hello world code that will run correctly on web or node + +let doc = Automerge.init() +doc = Automerge.change(doc, (d: any) => d.hello = "from automerge-js") +const result = JSON.stringify(doc) + +if (typeof document !== 'undefined') { + // browser + const element = document.createElement('div'); + element.innerHTML = JSON.stringify(result) + document.body.appendChild(element); +} else { + // server + console.log("node:", result) +} + diff --git a/automerge-js/examples/vite/src/style.css b/automerge-js/examples/vite/src/style.css new file mode 100644 index 00000000..ac37d84b --- /dev/null +++ b/automerge-js/examples/vite/src/style.css @@ -0,0 +1,97 @@ +:root { + font-family: Inter, Avenir, Helvetica, Arial, sans-serif; + font-size: 16px; + line-height: 24px; + font-weight: 400; + + color-scheme: light dark; + color: rgba(255, 255, 255, 0.87); + background-color: #242424; + + font-synthesis: none; + text-rendering: optimizeLegibility; + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; + -webkit-text-size-adjust: 100%; +} + +a { + font-weight: 500; + color: #646cff; + text-decoration: inherit; +} +a:hover { + color: #535bf2; +} + +body { + margin: 0; + display: flex; + place-items: center; + min-width: 320px; + min-height: 100vh; +} + +h1 { + font-size: 3.2em; + line-height: 1.1; +} + +#app { + max-width: 1280px; + margin: 0 auto; + padding: 2rem; + text-align: center; +} + +.logo { + height: 6em; + padding: 1.5em; + will-change: filter; +} +.logo:hover { + filter: drop-shadow(0 0 2em #646cffaa); +} +.logo.vanilla:hover { + filter: drop-shadow(0 0 2em #3178c6aa); +} + +.card { + padding: 2em; +} + +.read-the-docs { + color: #888; +} + +button { + border-radius: 8px; + border: 1px solid transparent; + padding: 0.6em 1.2em; + font-size: 1em; + font-weight: 500; + font-family: inherit; + background-color: #1a1a1a; + cursor: pointer; + transition: border-color 0.25s; +} +button:hover { + border-color: #646cff; +} +button:focus, +button:focus-visible { + outline: 4px auto -webkit-focus-ring-color; +} + +@media (prefers-color-scheme: light) { + :root { + color: #213547; + background-color: #ffffff; + } + a:hover { + color: #747bff; + } + button { + background-color: #f9f9f9; + } +} diff --git a/automerge-js/examples/vite/src/typescript.svg b/automerge-js/examples/vite/src/typescript.svg new file mode 100644 index 00000000..d91c910c --- /dev/null +++ b/automerge-js/examples/vite/src/typescript.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/automerge-js/examples/vite/src/vite-env.d.ts b/automerge-js/examples/vite/src/vite-env.d.ts new file mode 100644 index 00000000..11f02fe2 --- /dev/null +++ b/automerge-js/examples/vite/src/vite-env.d.ts @@ -0,0 +1 @@ +/// diff --git a/automerge-js/examples/vite/tsconfig.json b/automerge-js/examples/vite/tsconfig.json new file mode 100644 index 00000000..fbd02253 --- /dev/null +++ b/automerge-js/examples/vite/tsconfig.json @@ -0,0 +1,20 @@ +{ + "compilerOptions": { + "target": "ESNext", + "useDefineForClassFields": true, + "module": "ESNext", + "lib": ["ESNext", "DOM"], + "moduleResolution": "Node", + "strict": true, + "sourceMap": true, + "resolveJsonModule": true, + "isolatedModules": true, + "esModuleInterop": true, + "noEmit": true, + "noUnusedLocals": true, + "noUnusedParameters": true, + "noImplicitReturns": true, + "skipLibCheck": true + }, + "include": ["src"] +} diff --git a/automerge-js/examples/vite/vite.config.js b/automerge-js/examples/vite/vite.config.js new file mode 100644 index 00000000..c048f0b5 --- /dev/null +++ b/automerge-js/examples/vite/vite.config.js @@ -0,0 +1,15 @@ +import { defineConfig } from "vite" +import wasm from "vite-plugin-wasm" +import topLevelAwait from "vite-plugin-top-level-await" + +export default defineConfig({ + plugins: [topLevelAwait(), wasm()], + + optimizeDeps: { + // This is necessary because otherwise `vite dev` includes two separate + // versions of the JS wrapper. This causes problems because the JS + // wrapper has a module level variable to track JS side heap + // allocations, initializing this twice causes horrible breakage + exclude: ["automerge-wasm"] + } +}) diff --git a/automerge-js/examples/webpack/README.md b/automerge-js/examples/webpack/README.md new file mode 100644 index 00000000..917f9c8a --- /dev/null +++ b/automerge-js/examples/webpack/README.md @@ -0,0 +1,37 @@ +# Webpack + Automerge + + +Getting WASM working in webpack 5 is very easy. You just need to enable the +`asyncWebAssembly` +[experiment](https://webpack.js.org/configuration/experiments/). For example: + + +```javascript +const path = require('path'); + +const clientConfig = { + experiments: { asyncWebAssembly: true }, + target: 'web', + entry: './src/index.js', + output: { + filename: 'main.js', + path: path.resolve(__dirname, 'public'), + }, + mode: "development", // or production + performance: { // we dont want the wasm blob to generate warnings + hints: false, + maxEntrypointSize: 512000, + maxAssetSize: 512000 + } +}; + +module.exports = clientConfig +``` + +## Running the example + + +```bash +yarn install +yarn start +``` diff --git a/automerge-js/examples/webpack/package.json b/automerge-js/examples/webpack/package.json index fb74fb82..02a9efd8 100644 --- a/automerge-js/examples/webpack/package.json +++ b/automerge-js/examples/webpack/package.json @@ -10,13 +10,13 @@ }, "author": "", "dependencies": { - "automerge-js": "file:automerge-js-0.1.0.tgz", - "automerge-wasm": "file:automerge-wasm-0.1.3.tgz" + "automerge": "2.0.0-alpha.1" }, "devDependencies": { "serve": "^13.0.2", "webpack": "^5.72.1", "webpack-cli": "^4.9.2", + "webpack-dev-server": "^4.11.1", "webpack-node-externals": "^3.0.0" } } diff --git a/automerge-js/examples/webpack/src/index.js b/automerge-js/examples/webpack/src/index.js index 876c1940..5564f442 100644 --- a/automerge-js/examples/webpack/src/index.js +++ b/automerge-js/examples/webpack/src/index.js @@ -1,22 +1,18 @@ -import * as Automerge from "automerge-js" -import init from "automerge-wasm" +import * as Automerge from "automerge" // hello world code that will run correctly on web or node -init().then((api) => { - Automerge.use(api) - let doc = Automerge.init() - doc = Automerge.change(doc, (d) => d.hello = "from automerge-js") - const result = JSON.stringify(doc) +let doc = Automerge.init() +doc = Automerge.change(doc, (d) => d.hello = "from automerge-js") +const result = JSON.stringify(doc) - if (typeof document !== 'undefined') { - // browser - const element = document.createElement('div'); - element.innerHTML = JSON.stringify(result) - document.body.appendChild(element); - } else { - // server - console.log("node:", result) - } -}) +if (typeof document !== 'undefined') { + // browser + const element = document.createElement('div'); + element.innerHTML = JSON.stringify(result) + document.body.appendChild(element); +} else { + // server + console.log("node:", result) +} diff --git a/automerge-js/examples/webpack/webpack.config.js b/automerge-js/examples/webpack/webpack.config.js index 3ab0e798..3a6d83ff 100644 --- a/automerge-js/examples/webpack/webpack.config.js +++ b/automerge-js/examples/webpack/webpack.config.js @@ -18,6 +18,7 @@ const serverConfig = { }; const clientConfig = { + experiments: { asyncWebAssembly: true }, target: 'web', entry: './src/index.js', output: { From 4f03cd2a379bfc773389bfa9eeeb27eb69e0da21 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Wed, 21 Sep 2022 22:55:13 +0100 Subject: [PATCH 574/730] Add an e2e testing tool for the JS packaging JS packaging is complicated and testing it manually is irritating. Add a tool in `automerge-js/e2e` which stands up a local NPM registry and publishes the various packages to that registry for use in automated and manual tests. Update the test script in `scripts/ci/js_tests` to run the tests using this tool --- automerge-js/e2e/.gitignore | 3 + automerge-js/e2e/README.md | 71 ++ automerge-js/e2e/index.ts | 438 +++++++ automerge-js/e2e/package.json | 23 + automerge-js/e2e/tsconfig.json | 6 + automerge-js/e2e/verdaccio.yaml | 25 + automerge-js/e2e/yarn.lock | 2130 +++++++++++++++++++++++++++++++ scripts/ci/js_tests | 21 +- 8 files changed, 2703 insertions(+), 14 deletions(-) create mode 100644 automerge-js/e2e/.gitignore create mode 100644 automerge-js/e2e/README.md create mode 100644 automerge-js/e2e/index.ts create mode 100644 automerge-js/e2e/package.json create mode 100644 automerge-js/e2e/tsconfig.json create mode 100644 automerge-js/e2e/verdaccio.yaml create mode 100644 automerge-js/e2e/yarn.lock diff --git a/automerge-js/e2e/.gitignore b/automerge-js/e2e/.gitignore new file mode 100644 index 00000000..3021843a --- /dev/null +++ b/automerge-js/e2e/.gitignore @@ -0,0 +1,3 @@ +node_modules/ +verdacciodb/ +htpasswd diff --git a/automerge-js/e2e/README.md b/automerge-js/e2e/README.md new file mode 100644 index 00000000..ff87bd60 --- /dev/null +++ b/automerge-js/e2e/README.md @@ -0,0 +1,71 @@ +#End to end testing for javascript packaging + +The network of packages and bundlers we rely on to get the `automerge` package +working is a little complex. We have the `automerge-wasm` package, which the +`automerge` package depends upon, which means that anyone who depends on +`automerge` needs to either a) be using node or b) use a bundler in order to +load the underlying WASM module which is packaged in `automerge-wasm`. + +The various bundlers involved are complicated and capricious and so we need an +easy way of testing that everything is in fact working as expected. To do this +we run a custom NPM registry (namely [Verdaccio](https://verdaccio.org/)) and +build the `automerge-wasm` and `automerge` packages and publish them to this +registry. Once we have this registry running we are able to build the example +projects which depend on these packages and check that everything works as +expected. + +## Usage + +First, install everything: + +``` +yarn install +``` + +### Build `automerge-js` + +This builds the `automerge-wasm` package and then runs `yarn build` in the +`automerge-js` project with the `--registry` set to the verdaccio registry. The +end result is that you can run `yarn test` in the resulting `automerge-js` +directory in order to run tests against the current `automerge-wasm`. + +``` +yarn e2e buildjs +``` + +### Build examples + +This either builds or the examples in `automerge-js/examples` or just a subset +of them. Once this is complete you can run the relevant scripts (e.g. `vite dev` +for the Vite example) to check everything works. + +``` +yarn e2e buildexamples +``` + +Or, to just build the webpack example + +``` +yarn e2e buildexamples -e webpack +``` + +### Run Registry + +If you're experimenting with a project which is not in the `examples` folder +you'll need a running registry. `run-registry` builds and publishes +`automerge-js` and `automerge-wasm` and then runs the registry at +`localhost:4873`. + +``` +yarn e2e run-registry +``` + +You can now run `yarn install --registry http://localhost:4873` to experiment +with the built packages. + + +## Using the `dev` build of `automerge-wasm` + +All the commands above take a `-p` flag which can be either `release` or +`debug`. The `debug` builds with additional debug symbols which makes errors +less cryptic. diff --git a/automerge-js/e2e/index.ts b/automerge-js/e2e/index.ts new file mode 100644 index 00000000..90205071 --- /dev/null +++ b/automerge-js/e2e/index.ts @@ -0,0 +1,438 @@ +import {once} from "events" +import {setTimeout} from "timers/promises" +import {spawn, ChildProcess} from "child_process" +import * as child_process from "child_process" +import {command, subcommands, run, array, multioption, option, Type} from "cmd-ts" +import * as path from "path" +import * as fsPromises from "fs/promises" +import fetch from "node-fetch" + +const VERDACCIO_DB_PATH = path.normalize(`${__dirname}/verdacciodb`) +const VERDACCIO_CONFIG_PATH = path.normalize(`${__dirname}/verdaccio.yaml`) +const AUTOMERGE_WASM_PATH = path.normalize(`${__dirname}/../../automerge-wasm`) +const AUTOMERGE_JS_PATH = path.normalize(`${__dirname}/..`) +const EXAMPLES_DIR = path.normalize(path.join(__dirname, "../", "examples")) + +// The different example projects in "../examples" +type Example = "webpack" | "vite" | "create-react-app" + +// Type to parse strings to `Example` so the types line up for the `buildExamples` commmand +const ReadExample: Type = { + async from(str) { + if (str === "webpack") { + return "webpack" + } else if (str === "vite") { + return "vite" + } else if (str === "create-react-app") { + return "create-react-app" + } else { + throw new Error(`Unknown example type ${str}`) + } + } +} + +type Profile = "dev" | "release" + +const ReadProfile: Type = { + async from(str) { + if (str === "dev") { + return "dev" + } else if (str === "release") { + return "release" + } else { + throw new Error(`Unknown profile ${str}`) + } + } +} + +const buildjs = command({ + name: "buildjs", + args: { + profile: option({ + type: ReadProfile, + long: "profile", + short: "p", + defaultValue: () => "dev" as Profile + }) + }, + handler: ({profile}) => { + console.log("building js") + withPublishedWasm(profile, async (registryUrl: string) => { + await buildAndPublishAutomergeJs(registryUrl) + }) + } +}) + +const buildWasm = command({ + name: "buildwasm", + args: { + profile: option({ + type: ReadProfile, + long: "profile", + short: "p", + defaultValue: () => "dev" as Profile + }) + }, + handler: ({profile}) => { + console.log("building automerge-wasm") + withRegistry( + buildAutomergeWasm(profile), + ) + } +}) + +const buildexamples = command({ + name: "buildexamples", + args: { + examples: multioption({ + long: "example", + short: "e", + type: array(ReadExample), + }), + profile: option({ + type: ReadProfile, + long: "profile", + short: "p", + defaultValue: () => "dev" as Profile + }) + }, + handler: ({examples, profile}) => { + if (examples.length === 0) { + examples = ["webpack", "vite", "create-react-app"] + } + buildExamples(examples, profile) + } +}) + + +const runRegistry = command({ + name: "run-registry", + args: { + profile: option({ + type: ReadProfile, + long: "profile", + short: "p", + defaultValue: () => "dev" as Profile + }) + }, + handler: ({profile}) => { + withPublishedWasm(profile, async (registryUrl: string) => { + await buildAndPublishAutomergeJs(registryUrl) + console.log("\n************************") + console.log(` Verdaccio NPM registry is running at ${registryUrl}`) + console.log(" press CTRL-C to exit ") + console.log("************************") + await once(process, "SIGINT") + }).catch(e => { + console.error(`Failed: ${e}`) + }) + } +}) + + +const app = subcommands({ + name: "e2e", + cmds: {buildjs, buildexamples, buildwasm: buildWasm, "run-registry": runRegistry} +}) + +run(app, process.argv.slice(2)) + +async function buildExamples(examples: Array, profile: Profile) { + await withPublishedWasm(profile, async (registryUrl) => { + printHeader("building and publishing automerge") + await buildAndPublishAutomergeJs(registryUrl) + for (const example of examples) { + printHeader(`building ${example} example`) + if (example === "webpack") { + const projectPath = path.join(EXAMPLES_DIR, example) + await removeExistingAutomerge(projectPath) + await fsPromises.rm(path.join(projectPath, "yarn.lock"), {force: true}) + await spawnAndWait("yarn", ["--cwd", projectPath, "install", "--registry", registryUrl, "--check-files"], {stdio: "inherit"}) + await spawnAndWait("yarn", ["--cwd", projectPath, "build"], {stdio: "inherit"}) + } else if (example === "vite") { + const projectPath = path.join(EXAMPLES_DIR, example) + await removeExistingAutomerge(projectPath) + await fsPromises.rm(path.join(projectPath, "yarn.lock"), {force: true}) + await spawnAndWait("yarn", ["--cwd", projectPath, "install", "--registry", registryUrl, "--check-files"], {stdio: "inherit"}) + await spawnAndWait("yarn", ["--cwd", projectPath, "build"], {stdio: "inherit"}) + } else if (example === "create-react-app") { + const projectPath = path.join(EXAMPLES_DIR, example) + await removeExistingAutomerge(projectPath) + await fsPromises.rm(path.join(projectPath, "yarn.lock"), {force: true}) + await spawnAndWait("yarn", ["--cwd", projectPath, "install", "--registry", registryUrl, "--check-files"], {stdio: "inherit"}) + await spawnAndWait("yarn", ["--cwd", projectPath, "build"], {stdio: "inherit"}) + } + } + }) +} + +type WithRegistryAction = (registryUrl: string) => Promise + +async function withRegistry(action: WithRegistryAction, ...actions: Array) { + // First, start verdaccio + printHeader("Starting verdaccio NPM server") + const verd = await VerdaccioProcess.start() + actions.unshift(action) + + for (const action of actions) { + try { + type Step = "verd-died" | "action-completed" + const verdDied: () => Promise = async () => { + await verd.died() + return "verd-died" + } + const actionComplete: () => Promise = async () => { + await action("http://localhost:4873") + return "action-completed" + } + const result = await Promise.race([verdDied(), actionComplete()]) + if (result === "verd-died") { + throw new Error("verdaccio unexpectedly exited") + } + } catch(e) { + await verd.kill() + throw e + } + } + await verd.kill() +} + +async function withPublishedWasm(profile: Profile, action: WithRegistryAction) { + await withRegistry( + buildAutomergeWasm(profile), + publishAutomergeWasm, + action + ) +} + +function buildAutomergeWasm(profile: Profile): WithRegistryAction { + return async (registryUrl: string) => { + printHeader("building automerge-wasm") + await spawnAndWait("yarn", ["--cwd", AUTOMERGE_WASM_PATH, "--registry", registryUrl, "install"], {stdio: "inherit"}) + const cmd = profile === "release" ? "release" : "debug" + await spawnAndWait("yarn", ["--cwd", AUTOMERGE_WASM_PATH, cmd], {stdio: "inherit"}) + } +} + +async function publishAutomergeWasm(registryUrl: string) { + printHeader("Publishing automerge-wasm to verdaccio") + await fsPromises.rm(path.join(VERDACCIO_DB_PATH, "automerge-wasm"), { recursive: true, force: true} ) + await yarnPublish(registryUrl, AUTOMERGE_WASM_PATH) +} + +async function buildAndPublishAutomergeJs(registryUrl: string) { + // Build the js package + printHeader("Building automerge") + await removeExistingAutomerge(AUTOMERGE_JS_PATH) + await removeFromVerdaccio("automerge") + await fsPromises.rm(path.join(AUTOMERGE_JS_PATH, "yarn.lock"), {force: true}) + await spawnAndWait("yarn", ["--cwd", AUTOMERGE_JS_PATH, "install", "--registry", registryUrl, "--check-files"], {stdio: "inherit"}) + await spawnAndWait("yarn", ["--cwd", AUTOMERGE_JS_PATH, "build"], {stdio: "inherit"}) + await yarnPublish(registryUrl, AUTOMERGE_JS_PATH) +} + +/** + * A running verdaccio process + * + */ +class VerdaccioProcess { + child: ChildProcess + stdout: Array + stderr: Array + + constructor(child: ChildProcess) { + this.child = child + + // Collect stdout/stderr otherwise the subprocess gets blocked writing + this.stdout = [] + this.stderr = [] + this.child.stdout && this.child.stdout.on("data", (data) => this.stdout.push(data)) + this.child.stderr && this.child.stderr.on("data", (data) => this.stderr.push(data)) + + const errCallback = (e: any) => { + console.error("!!!!!!!!!ERROR IN VERDACCIO PROCESS!!!!!!!!!") + console.error(" ", e) + if (this.stdout.length > 0) { + console.log("\n**Verdaccio stdout**") + const stdout = Buffer.concat(this.stdout) + process.stdout.write(stdout) + } + + if (this.stderr.length > 0) { + console.log("\n**Verdaccio stderr**") + const stdout = Buffer.concat(this.stderr) + process.stdout.write(stdout) + } + process.exit(-1) + } + this.child.on("error", errCallback) + } + + /** + * Spawn a verdaccio process and wait for it to respond succesfully to http requests + * + * The returned `VerdaccioProcess` can be used to control the subprocess + */ + static async start() { + const child = spawn("yarn", ["verdaccio", "--config", VERDACCIO_CONFIG_PATH], {env: { ...process.env, FORCE_COLOR: "true"}}) + + // Forward stdout and stderr whilst waiting for startup to complete + const stdoutCallback = (data: Buffer) => process.stdout.write(data) + const stderrCallback = (data: Buffer) => process.stderr.write(data) + child.stdout && child.stdout.on("data", stdoutCallback) + child.stderr && child.stderr.on("data", stderrCallback) + + const healthCheck = async () => { + while (true) { + try { + const resp = await fetch("http://localhost:4873") + if (resp.status === 200) { + return + } else { + console.log(`Healthcheck failed: bad status ${resp.status}`) + } + } catch (e) { + console.error(`Healthcheck failed: ${e}`) + } + await setTimeout(500) + } + } + await withTimeout(healthCheck(), 10000) + + // Stop forwarding stdout/stderr + child.stdout && child.stdout.off("data", stdoutCallback) + child.stderr && child.stderr.off("data", stderrCallback) + return new VerdaccioProcess(child) + } + + /** + * Send a SIGKILL to the process and wait for it to stop + */ + async kill() { + this.child.stdout && this.child.stdout.destroy() + this.child.stderr && this.child.stderr.destroy() + this.child.kill(); + try { + await withTimeout(once(this.child, "close"), 500) + } catch (e) { + console.error("unable to kill verdaccio subprocess, trying -9") + this.child.kill(9) + await withTimeout(once(this.child, "close"), 500) + } + } + + /** + * A promise which resolves if the subprocess exits for some reason + */ + async died(): Promise { + const [exit, _signal] = await once(this.child, "exit") + return exit + } +} + +function printHeader(header: string) { + console.log("\n===============================") + console.log(` ${header}`) + console.log("===============================") +} + +/** + * Removes the automerge, automerge-wasm, and automerge-js packages from + * `$packageDir/node_modules` + * + * This is useful to force refreshing a package by use in combination with + * `yarn install --check-files`, which checks if a package is present in + * `node_modules` and if it is not forces a reinstall. + * + * @param packageDir - The directory containing the package.json of the target project + */ +async function removeExistingAutomerge(packageDir: string) { + await fsPromises.rm(path.join(packageDir, "node_modules", "automerge-wasm"), {recursive: true, force: true}) + await fsPromises.rm(path.join(packageDir, "node_modules", "automerge"), {recursive: true, force: true}) +} + +type SpawnResult = { + stdout?: Buffer, + stderr?: Buffer, +} + +async function spawnAndWait(cmd: string, args: Array, options: child_process.SpawnOptions): Promise { + const child = spawn(cmd, args, options) + let stdout = null + let stderr = null + if (child.stdout) { + stdout = [] + child.stdout.on("data", data => stdout.push(data)) + } + if (child.stderr) { + stderr = [] + child.stderr.on("data", data => stderr.push(data)) + } + + const [exit, _signal] = await once(child, "exit") + if (exit && exit !== 0) { + throw new Error("nonzero exit code") + } + return { + stderr: stderr? Buffer.concat(stderr) : null, + stdout: stdout ? Buffer.concat(stdout) : null + } +} + +/** + * Remove a package from the verdaccio registry. This is necessary because we + * often want to _replace_ a version rather than update the version number. + * Obviously this is very bad and verboten in normal circumastances, but the + * whole point here is to be able to test the entire packaging story so it's + * okay I Promise. + */ +async function removeFromVerdaccio(packageName: string) { + await fsPromises.rm(path.join(VERDACCIO_DB_PATH, packageName), {force: true, recursive: true}) +} + +async function yarnPublish(registryUrl: string, cwd: string) { + await spawnAndWait( + "yarn", + [ + "--registry", + registryUrl, + "--cwd", + cwd, + "publish", + "--non-interactive", + ], + { + stdio: "inherit", + env: { + ...process.env, + FORCE_COLOR: "true", + // This is a fake token, it just has to be the right format + npm_config__auth: "//localhost:4873/:_authToken=Gp2Mgxm4faa/7wp0dMSuRA==" + } + }) +} + +/** + * Wait for a given delay to resolve a promise, throwing an error if the + * promise doesn't resolve with the timeout + * + * @param promise - the promise to wait for @param timeout - the delay in + * milliseconds to wait before throwing + */ +async function withTimeout(promise: Promise, timeout: number): Promise { + type Step = "timed-out" | {result: T} + const timedOut: () => Promise = async () => { + await setTimeout(timeout) + return "timed-out" + } + const succeeded: () => Promise = async () => { + const result = await promise + return {result} + } + const result = await Promise.race([timedOut(), succeeded()]) + if (result === "timed-out") { + throw new Error("timed out") + } else { + return result.result + } +} diff --git a/automerge-js/e2e/package.json b/automerge-js/e2e/package.json new file mode 100644 index 00000000..7bb80852 --- /dev/null +++ b/automerge-js/e2e/package.json @@ -0,0 +1,23 @@ +{ + "name": "e2e", + "version": "0.0.1", + "description": "", + "main": "index.js", + "scripts": { + "e2e": "ts-node index.ts" + }, + "author": "", + "license": "ISC", + "dependencies": { + "@types/node": "^18.7.18", + "cmd-ts": "^0.11.0", + "node-fetch": "^2", + "ts-node": "^10.9.1", + "typed-emitter": "^2.1.0", + "typescript": "^4.8.3", + "verdaccio": "5" + }, + "devDependencies": { + "@types/node-fetch": "2.x" + } +} diff --git a/automerge-js/e2e/tsconfig.json b/automerge-js/e2e/tsconfig.json new file mode 100644 index 00000000..9f0e2e76 --- /dev/null +++ b/automerge-js/e2e/tsconfig.json @@ -0,0 +1,6 @@ +{ + "compilerOptions": { + "types": ["node"] + }, + "module": "nodenext" +} diff --git a/automerge-js/e2e/verdaccio.yaml b/automerge-js/e2e/verdaccio.yaml new file mode 100644 index 00000000..bb2e2e87 --- /dev/null +++ b/automerge-js/e2e/verdaccio.yaml @@ -0,0 +1,25 @@ +storage: "./verdacciodb" +auth: + htpasswd: + file: ./htpasswd +publish: + allow_offline: true +logs: {type: stdout, format: pretty, level: info} +packages: + "automerge-wasm": + access: "$all" + publish: "$all" + "automerge-js": + access: "$all" + publish: "$all" + "*": + access: "$all" + publish: "$all" + proxy: npmjs + "@*/*": + access: "$all" + publish: "$all" + proxy: npmjs +uplinks: + npmjs: + url: https://registry.npmjs.org/ diff --git a/automerge-js/e2e/yarn.lock b/automerge-js/e2e/yarn.lock new file mode 100644 index 00000000..46e2abf2 --- /dev/null +++ b/automerge-js/e2e/yarn.lock @@ -0,0 +1,2130 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +"@cspotcode/source-map-support@^0.8.0": + version "0.8.1" + resolved "https://registry.yarnpkg.com/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz#00629c35a688e05a88b1cda684fb9d5e73f000a1" + integrity sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw== + dependencies: + "@jridgewell/trace-mapping" "0.3.9" + +"@jridgewell/resolve-uri@^3.0.3": + version "3.1.0" + resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz#2203b118c157721addfe69d47b70465463066d78" + integrity sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w== + +"@jridgewell/sourcemap-codec@^1.4.10": + version "1.4.14" + resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz#add4c98d341472a289190b424efbdb096991bb24" + integrity sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw== + +"@jridgewell/trace-mapping@0.3.9": + version "0.3.9" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz#6534fd5933a53ba7cbf3a17615e273a0d1273ff9" + integrity sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ== + dependencies: + "@jridgewell/resolve-uri" "^3.0.3" + "@jridgewell/sourcemap-codec" "^1.4.10" + +"@tootallnate/once@1": + version "1.1.2" + resolved "https://registry.yarnpkg.com/@tootallnate/once/-/once-1.1.2.tgz#ccb91445360179a04e7fe6aff78c00ffc1eeaf82" + integrity sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw== + +"@tsconfig/node10@^1.0.7": + version "1.0.9" + resolved "https://registry.yarnpkg.com/@tsconfig/node10/-/node10-1.0.9.tgz#df4907fc07a886922637b15e02d4cebc4c0021b2" + integrity sha512-jNsYVVxU8v5g43Erja32laIDHXeoNvFEpX33OK4d6hljo3jDhCBDhx5dhCCTMWUojscpAagGiRkBKxpdl9fxqA== + +"@tsconfig/node12@^1.0.7": + version "1.0.11" + resolved "https://registry.yarnpkg.com/@tsconfig/node12/-/node12-1.0.11.tgz#ee3def1f27d9ed66dac6e46a295cffb0152e058d" + integrity sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag== + +"@tsconfig/node14@^1.0.0": + version "1.0.3" + resolved "https://registry.yarnpkg.com/@tsconfig/node14/-/node14-1.0.3.tgz#e4386316284f00b98435bf40f72f75a09dabf6c1" + integrity sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow== + +"@tsconfig/node16@^1.0.2": + version "1.0.3" + resolved "https://registry.yarnpkg.com/@tsconfig/node16/-/node16-1.0.3.tgz#472eaab5f15c1ffdd7f8628bd4c4f753995ec79e" + integrity sha512-yOlFc+7UtL/89t2ZhjPvvB/DeAr3r+Dq58IgzsFkOAvVC6NMJXmCGjbptdXdR9qsX7pKcTL+s87FtYREi2dEEQ== + +"@types/node-fetch@2.x": + version "2.6.2" + resolved "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-2.6.2.tgz#d1a9c5fd049d9415dce61571557104dec3ec81da" + integrity sha512-DHqhlq5jeESLy19TYhLakJ07kNumXWjcDdxXsLUMJZ6ue8VZJj4kLPQVE/2mdHh3xZziNF1xppu5lwmS53HR+A== + dependencies: + "@types/node" "*" + form-data "^3.0.0" + +"@types/node@*", "@types/node@^18.7.18": + version "18.7.23" + resolved "https://registry.yarnpkg.com/@types/node/-/node-18.7.23.tgz#75c580983846181ebe5f4abc40fe9dfb2d65665f" + integrity sha512-DWNcCHolDq0ZKGizjx2DZjR/PqsYwAcYUJmfMWqtVU2MBMG5Mo+xFZrhGId5r/O5HOuMPyQEcM6KUBp5lBZZBg== + +"@verdaccio/commons-api@10.2.0": + version "10.2.0" + resolved "https://registry.yarnpkg.com/@verdaccio/commons-api/-/commons-api-10.2.0.tgz#3b684c31749837b0574375bb2e10644ecea9fcca" + integrity sha512-F/YZANu4DmpcEV0jronzI7v2fGVWkQ5Mwi+bVmV+ACJ+EzR0c9Jbhtbe5QyLUuzR97t8R5E/Xe53O0cc2LukdQ== + dependencies: + http-errors "2.0.0" + http-status-codes "2.2.0" + +"@verdaccio/file-locking@10.3.0": + version "10.3.0" + resolved "https://registry.yarnpkg.com/@verdaccio/file-locking/-/file-locking-10.3.0.tgz#a4342665c549163817c267bfa451e32ed3009767" + integrity sha512-FE5D5H4wy/nhgR/d2J5e1Na9kScj2wMjlLPBHz7XF4XZAVSRdm45+kL3ZmrfA6b2HTADP/uH7H05/cnAYW8bhw== + dependencies: + lockfile "1.0.4" + +"@verdaccio/local-storage@10.3.1": + version "10.3.1" + resolved "https://registry.yarnpkg.com/@verdaccio/local-storage/-/local-storage-10.3.1.tgz#8cbdc6390a0eb532577ae217729cb0a4e062f299" + integrity sha512-f3oArjXPOAwUAA2dsBhfL/rSouqJ2sfml8k97RtnBPKOzisb28bgyAQW0mqwQvN4MTK5S/2xudmobFpvJAIatg== + dependencies: + "@verdaccio/commons-api" "10.2.0" + "@verdaccio/file-locking" "10.3.0" + "@verdaccio/streams" "10.2.0" + async "3.2.4" + debug "4.3.4" + lodash "4.17.21" + lowdb "1.0.0" + mkdirp "1.0.4" + +"@verdaccio/readme@10.4.1": + version "10.4.1" + resolved "https://registry.yarnpkg.com/@verdaccio/readme/-/readme-10.4.1.tgz#c568d158c36ca7dd742b1abef890383918f621b2" + integrity sha512-OZ6R+HF2bIU3WFFdPxgUgyglaIfZzGSqyUfM2m1TFNfDCK84qJvRIgQJ1HG/82KVOpGuz/nxVyw2ZyEZDkP1vA== + dependencies: + dompurify "2.3.9" + jsdom "16.7.0" + marked "4.0.18" + +"@verdaccio/streams@10.2.0": + version "10.2.0" + resolved "https://registry.yarnpkg.com/@verdaccio/streams/-/streams-10.2.0.tgz#e01d2bfdcfe8aa2389f31bc6b72a602628bd025b" + integrity sha512-FaIzCnDg0x0Js5kSQn1Le3YzDHl7XxrJ0QdIw5LrDUmLsH3VXNi4/NMlSHnw5RiTTMs4UbEf98V3RJRB8exqJA== + +"@verdaccio/ui-theme@6.0.0-6-next.28": + version "6.0.0-6-next.28" + resolved "https://registry.yarnpkg.com/@verdaccio/ui-theme/-/ui-theme-6.0.0-6-next.28.tgz#bf8ff0e90f3d292741440c7e6ab6744b97d96a98" + integrity sha512-1sJ28aVGMiRJrSz0e8f4t+IUgt/cyYmuDLhogXHOEjEIIEcfMNyQ5bVYqq03wLVoKWEh5D6gHo1hQnVKQl1L5g== + +JSONStream@1.3.5: + version "1.3.5" + resolved "https://registry.yarnpkg.com/JSONStream/-/JSONStream-1.3.5.tgz#3208c1f08d3a4d99261ab64f92302bc15e111ca0" + integrity sha512-E+iruNOY8VV9s4JEbe1aNEm6MiszPRr/UfcHMz0TQh1BXSxHK+ASV1R6W4HpjBhSeS+54PIsAMCBmwD06LLsqQ== + dependencies: + jsonparse "^1.2.0" + through ">=2.2.7 <3" + +abab@^2.0.3, abab@^2.0.5: + version "2.0.6" + resolved "https://registry.yarnpkg.com/abab/-/abab-2.0.6.tgz#41b80f2c871d19686216b82309231cfd3cb3d291" + integrity sha512-j2afSsaIENvHZN2B8GOpF566vZ5WVk5opAiMTvWgaQT8DkbOqsTfvNAvHoRGU2zzP8cPoqys+xHTRDWW8L+/BA== + +accepts@~1.3.5, accepts@~1.3.8: + version "1.3.8" + resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.8.tgz#0bf0be125b67014adcb0b0921e62db7bffe16b2e" + integrity sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw== + dependencies: + mime-types "~2.1.34" + negotiator "0.6.3" + +acorn-globals@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/acorn-globals/-/acorn-globals-6.0.0.tgz#46cdd39f0f8ff08a876619b55f5ac8a6dc770b45" + integrity sha512-ZQl7LOWaF5ePqqcX4hLuv/bLXYQNfNWw2c0/yX/TsPRKamzHcTGQnlCjHT3TsmkOUVEPS3crCxiPfdzE/Trlhg== + dependencies: + acorn "^7.1.1" + acorn-walk "^7.1.1" + +acorn-walk@^7.1.1: + version "7.2.0" + resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-7.2.0.tgz#0de889a601203909b0fbe07b8938dc21d2e967bc" + integrity sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA== + +acorn-walk@^8.1.1: + version "8.2.0" + resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.2.0.tgz#741210f2e2426454508853a2f44d0ab83b7f69c1" + integrity sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA== + +acorn@^7.1.1: + version "7.4.1" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa" + integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A== + +acorn@^8.2.4, acorn@^8.4.1: + version "8.8.0" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.8.0.tgz#88c0187620435c7f6015803f5539dae05a9dbea8" + integrity sha512-QOxyigPVrpZ2GXT+PFyZTl6TtOFc5egxHIP9IlQ+RbupQuX4RkT/Bee4/kQuC02Xkzg84JcT7oLYtDIQxp+v7w== + +agent-base@6: + version "6.0.2" + resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-6.0.2.tgz#49fff58577cfee3f37176feab4c22e00f86d7f77" + integrity sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ== + dependencies: + debug "4" + +ajv@^6.12.3: + version "6.12.6" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" + integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== + dependencies: + fast-deep-equal "^3.1.1" + fast-json-stable-stringify "^2.0.0" + json-schema-traverse "^0.4.1" + uri-js "^4.2.2" + +ansi-regex@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" + integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== + +ansi-styles@^4.1.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" + integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== + dependencies: + color-convert "^2.0.1" + +apache-md5@1.1.7: + version "1.1.7" + resolved "https://registry.yarnpkg.com/apache-md5/-/apache-md5-1.1.7.tgz#dcef1802700cc231d60c5e08fd088f2f9b36375a" + integrity sha512-JtHjzZmJxtzfTSjsCyHgPR155HBe5WGyUyHTaEkfy46qhwCFKx1Epm6nAxgUG3WfUZP1dWhGqj9Z2NOBeZ+uBw== + +arg@^4.1.0: + version "4.1.3" + resolved "https://registry.yarnpkg.com/arg/-/arg-4.1.3.tgz#269fc7ad5b8e42cb63c896d5666017261c144089" + integrity sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA== + +argparse@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38" + integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q== + +array-flatten@1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2" + integrity sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg== + +asn1@~0.2.3: + version "0.2.6" + resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.6.tgz#0d3a7bb6e64e02a90c0303b31f292868ea09a08d" + integrity sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ== + dependencies: + safer-buffer "~2.1.0" + +assert-plus@1.0.0, assert-plus@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525" + integrity sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw== + +async@3.2.4: + version "3.2.4" + resolved "https://registry.yarnpkg.com/async/-/async-3.2.4.tgz#2d22e00f8cddeb5fde5dd33522b56d1cf569a81c" + integrity sha512-iAB+JbDEGXhyIUavoDl9WP/Jj106Kz9DEn1DPgYw5ruDn0e3Wgi3sKFm55sASdGBNOQB8F59d9qQ7deqrHA8wQ== + +asynckit@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" + integrity sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q== + +atomic-sleep@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/atomic-sleep/-/atomic-sleep-1.0.0.tgz#eb85b77a601fc932cfe432c5acd364a9e2c9075b" + integrity sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ== + +aws-sign2@~0.7.0: + version "0.7.0" + resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8" + integrity sha512-08kcGqnYf/YmjoRhfxyu+CLxBjUtHLXLXX/vUfx9l2LYzG3c1m61nrpyFUZI6zeS+Li/wWMMidD9KgrqtGq3mA== + +aws4@^1.8.0: + version "1.11.0" + resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.11.0.tgz#d61f46d83b2519250e2784daf5b09479a8b41c59" + integrity sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA== + +balanced-match@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" + integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== + +bcrypt-pbkdf@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz#a4301d389b6a43f9b67ff3ca11a3f6637e360e9e" + integrity sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w== + dependencies: + tweetnacl "^0.14.3" + +bcryptjs@2.4.3: + version "2.4.3" + resolved "https://registry.yarnpkg.com/bcryptjs/-/bcryptjs-2.4.3.tgz#9ab5627b93e60621ff7cdac5da9733027df1d0cb" + integrity sha512-V/Hy/X9Vt7f3BbPJEi8BdVFMByHi+jNXrYkW3huaybV/kQ0KJg0Y6PkEMbn+zeT+i+SiKZ/HMqJGIIt4LZDqNQ== + +body-parser@1.20.0: + version "1.20.0" + resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.0.tgz#3de69bd89011c11573d7bfee6a64f11b6bd27cc5" + integrity sha512-DfJ+q6EPcGKZD1QWUjSpqp+Q7bDQTsQIF4zfUAtZ6qk+H/3/QRhg9CEp39ss+/T2vw0+HaidC0ecJj/DRLIaKg== + dependencies: + bytes "3.1.2" + content-type "~1.0.4" + debug "2.6.9" + depd "2.0.0" + destroy "1.2.0" + http-errors "2.0.0" + iconv-lite "0.4.24" + on-finished "2.4.1" + qs "6.10.3" + raw-body "2.5.1" + type-is "~1.6.18" + unpipe "1.0.0" + +brace-expansion@^1.1.7: + version "1.1.11" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" + integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== + dependencies: + balanced-match "^1.0.0" + concat-map "0.0.1" + +brace-expansion@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-2.0.1.tgz#1edc459e0f0c548486ecf9fc99f2221364b9a0ae" + integrity sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA== + dependencies: + balanced-match "^1.0.0" + +browser-process-hrtime@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz#3c9b4b7d782c8121e56f10106d84c0d0ffc94626" + integrity sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow== + +buffer-equal-constant-time@1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz#f8e71132f7ffe6e01a5c9697a4c6f3e48d5cc819" + integrity sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA== + +bytes@3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.0.0.tgz#d32815404d689699f85a4ea4fa8755dd13a96048" + integrity sha512-pMhOfFDPiv9t5jjIXkHosWmkSyQbvsgEVNkz0ERHbuLh2T/7j4Mqqpz523Fe8MVY89KC6Sh/QfS2sM+SjgFDcw== + +bytes@3.1.2: + version "3.1.2" + resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.2.tgz#8b0beeb98605adf1b128fa4386403c009e0221a5" + integrity sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg== + +call-bind@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.2.tgz#b1d4e89e688119c3c9a903ad30abb2f6a919be3c" + integrity sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA== + dependencies: + function-bind "^1.1.1" + get-intrinsic "^1.0.2" + +caseless@~0.12.0: + version "0.12.0" + resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc" + integrity sha512-4tYFyifaFfGacoiObjJegolkwSU4xQNGbVgUiNYVUxbQ2x2lUsFvY4hVgVzGiIe6WLOPqycWXA40l+PWsxthUw== + +chalk@^4.0.0: + version "4.1.2" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" + integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== + dependencies: + ansi-styles "^4.1.0" + supports-color "^7.1.0" + +clipanion@3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/clipanion/-/clipanion-3.1.0.tgz#3e217dd6476bb9236638b07eb4673f7309839819" + integrity sha512-v025Hz+IDQ15FpOyK8p02h5bFznMu6rLFsJSyOPR+7WrbSnZ1Ek6pblPukV7K5tC/dsWfncQPIrJ4iUy2PXkbw== + dependencies: + typanion "^3.3.1" + +cmd-ts@^0.11.0: + version "0.11.0" + resolved "https://registry.yarnpkg.com/cmd-ts/-/cmd-ts-0.11.0.tgz#80926180f39665e35e321b72439f792a2b63b745" + integrity sha512-6RvjD+f9oGPeWoMS53oavafmQ9qC839PjP3CyvPkAIfqMEXTbrclni7t3fnyVJFNWxuBexnLshcotY0RuNrI8Q== + dependencies: + chalk "^4.0.0" + debug "^4.3.4" + didyoumean "^1.2.2" + strip-ansi "^6.0.0" + +color-convert@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" + integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== + dependencies: + color-name "~1.1.4" + +color-name@~1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" + integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== + +combined-stream@^1.0.6, combined-stream@^1.0.8, combined-stream@~1.0.6: + version "1.0.8" + resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" + integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== + dependencies: + delayed-stream "~1.0.0" + +compressible@~2.0.16: + version "2.0.18" + resolved "https://registry.yarnpkg.com/compressible/-/compressible-2.0.18.tgz#af53cca6b070d4c3c0750fbd77286a6d7cc46fba" + integrity sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg== + dependencies: + mime-db ">= 1.43.0 < 2" + +compression@1.7.4: + version "1.7.4" + resolved "https://registry.yarnpkg.com/compression/-/compression-1.7.4.tgz#95523eff170ca57c29a0ca41e6fe131f41e5bb8f" + integrity sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ== + dependencies: + accepts "~1.3.5" + bytes "3.0.0" + compressible "~2.0.16" + debug "2.6.9" + on-headers "~1.0.2" + safe-buffer "5.1.2" + vary "~1.1.2" + +concat-map@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" + integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg== + +content-disposition@0.5.4: + version "0.5.4" + resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.4.tgz#8b82b4efac82512a02bb0b1dcec9d2c5e8eb5bfe" + integrity sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ== + dependencies: + safe-buffer "5.2.1" + +content-type@~1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b" + integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA== + +cookie-signature@1.0.6: + version "1.0.6" + resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c" + integrity sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ== + +cookie@0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.5.0.tgz#d1f5d71adec6558c58f389987c366aa47e994f8b" + integrity sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw== + +cookies@0.8.0: + version "0.8.0" + resolved "https://registry.yarnpkg.com/cookies/-/cookies-0.8.0.tgz#1293ce4b391740a8406e3c9870e828c4b54f3f90" + integrity sha512-8aPsApQfebXnuI+537McwYsDtjVxGm8gTIzQI3FDW6t5t/DAhERxtnbEPN/8RX+uZthoz4eCOgloXaE5cYyNow== + dependencies: + depd "~2.0.0" + keygrip "~1.1.0" + +core-util-is@1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" + integrity sha512-3lqz5YjWTYnW6dlDa5TLaTCcShfar1e40rmcJVwCBJC6mWlFuj0eCHIElmG1g5kyuJ/GD+8Wn4FFCcz4gJPfaQ== + +cors@2.8.5: + version "2.8.5" + resolved "https://registry.yarnpkg.com/cors/-/cors-2.8.5.tgz#eac11da51592dd86b9f06f6e7ac293b3df875d29" + integrity sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g== + dependencies: + object-assign "^4" + vary "^1" + +create-require@^1.1.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/create-require/-/create-require-1.1.1.tgz#c1d7e8f1e5f6cfc9ff65f9cd352d37348756c333" + integrity sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ== + +cssom@^0.4.4: + version "0.4.4" + resolved "https://registry.yarnpkg.com/cssom/-/cssom-0.4.4.tgz#5a66cf93d2d0b661d80bf6a44fb65f5c2e4e0a10" + integrity sha512-p3pvU7r1MyyqbTk+WbNJIgJjG2VmTIaB10rI93LzVPrmDJKkzKYMtxxyAvQXR/NS6otuzveI7+7BBq3SjBS2mw== + +cssom@~0.3.6: + version "0.3.8" + resolved "https://registry.yarnpkg.com/cssom/-/cssom-0.3.8.tgz#9f1276f5b2b463f2114d3f2c75250af8c1a36f4a" + integrity sha512-b0tGHbfegbhPJpxpiBPU2sCkigAqtM9O121le6bbOlgyV+NyGyCmVfJ6QW9eRjz8CpNfWEOYBIMIGRYkLwsIYg== + +cssstyle@^2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/cssstyle/-/cssstyle-2.3.0.tgz#ff665a0ddbdc31864b09647f34163443d90b0852" + integrity sha512-AZL67abkUzIuvcHqk7c09cezpGNcxUxU4Ioi/05xHk4DQeTkWmGYftIE6ctU6AEt+Gn4n1lDStOtj7FKycP71A== + dependencies: + cssom "~0.3.6" + +d@1, d@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/d/-/d-1.0.1.tgz#8698095372d58dbee346ffd0c7093f99f8f9eb5a" + integrity sha512-m62ShEObQ39CfralilEQRjH6oAMtNCV1xJyEx5LpRYUVN+EviphDgUc/F3hnYbADmkiNs67Y+3ylmlG7Lnu+FA== + dependencies: + es5-ext "^0.10.50" + type "^1.0.1" + +dashdash@^1.12.0: + version "1.14.1" + resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0" + integrity sha512-jRFi8UDGo6j+odZiEpjazZaWqEal3w/basFjQHQEwVtZJGDpxbH1MeYluwCS8Xq5wmLJooDlMgvVarmWfGM44g== + dependencies: + assert-plus "^1.0.0" + +data-urls@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/data-urls/-/data-urls-2.0.0.tgz#156485a72963a970f5d5821aaf642bef2bf2db9b" + integrity sha512-X5eWTSXO/BJmpdIKCRuKUgSCgAN0OwliVK3yPKbwIWU1Tdw5BRajxlzMidvh+gwko9AfQ9zIj52pzF91Q3YAvQ== + dependencies: + abab "^2.0.3" + whatwg-mimetype "^2.3.0" + whatwg-url "^8.0.0" + +dayjs@1.11.5: + version "1.11.5" + resolved "https://registry.yarnpkg.com/dayjs/-/dayjs-1.11.5.tgz#00e8cc627f231f9499c19b38af49f56dc0ac5e93" + integrity sha512-CAdX5Q3YW3Gclyo5Vpqkgpj8fSdLQcRuzfX6mC6Phy0nfJ0eGYOeS7m4mt2plDWLAtA4TqTakvbboHvUxfe4iA== + +debug@2.6.9: + version "2.6.9" + resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" + integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== + dependencies: + ms "2.0.0" + +debug@4, debug@4.3.4, debug@^4.3.3, debug@^4.3.4: + version "4.3.4" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" + integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== + dependencies: + ms "2.1.2" + +debug@^3.2.7: + version "3.2.7" + resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a" + integrity sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ== + dependencies: + ms "^2.1.1" + +decimal.js@^10.2.1: + version "10.4.1" + resolved "https://registry.yarnpkg.com/decimal.js/-/decimal.js-10.4.1.tgz#be75eeac4a2281aace80c1a8753587c27ef053e7" + integrity sha512-F29o+vci4DodHYT9UrR5IEbfBw9pE5eSapIJdTqXK5+6hq+t8VRxwQyKlW2i+KDKFkkJQRvFyI/QXD83h8LyQw== + +deep-is@~0.1.3: + version "0.1.4" + resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831" + integrity sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ== + +delayed-stream@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" + integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ== + +depd@2.0.0, depd@~2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/depd/-/depd-2.0.0.tgz#b696163cc757560d09cf22cc8fad1571b79e76df" + integrity sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw== + +destroy@1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.2.0.tgz#4803735509ad8be552934c67df614f94e66fa015" + integrity sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg== + +didyoumean@^1.2.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/didyoumean/-/didyoumean-1.2.2.tgz#989346ffe9e839b4555ecf5666edea0d3e8ad037" + integrity sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw== + +diff@^4.0.1: + version "4.0.2" + resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d" + integrity sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A== + +domexception@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/domexception/-/domexception-2.0.1.tgz#fb44aefba793e1574b0af6aed2801d057529f304" + integrity sha512-yxJ2mFy/sibVQlu5qHjOkf9J3K6zgmCxgJ94u2EdvDOV09H+32LtRswEcUsmUWN72pVLOEnTSRaIVVzVQgS0dg== + dependencies: + webidl-conversions "^5.0.0" + +dompurify@2.3.9: + version "2.3.9" + resolved "https://registry.yarnpkg.com/dompurify/-/dompurify-2.3.9.tgz#a4be5e7278338d6db09922dffcf6182cd099d70a" + integrity sha512-3zOnuTwup4lPV/GfGS6UzG4ub9nhSYagR/5tB3AvDEwqyy5dtyCM2dVjwGDCnrPerXifBKTYh/UWCGKK7ydhhw== + +ecc-jsbn@~0.1.1: + version "0.1.2" + resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz#3a83a904e54353287874c564b7549386849a98c9" + integrity sha512-eh9O+hwRHNbG4BLTjEl3nw044CkGm5X6LoaCf7LPp7UU8Qrt47JYNi6nPX8xjW97TKGKm1ouctg0QSpZe9qrnw== + dependencies: + jsbn "~0.1.0" + safer-buffer "^2.1.0" + +ecdsa-sig-formatter@1.0.11: + version "1.0.11" + resolved "https://registry.yarnpkg.com/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz#ae0f0fa2d85045ef14a817daa3ce9acd0489e5bf" + integrity sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ== + dependencies: + safe-buffer "^5.0.1" + +ee-first@1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" + integrity sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow== + +encodeurl@~1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" + integrity sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w== + +envinfo@7.8.1: + version "7.8.1" + resolved "https://registry.yarnpkg.com/envinfo/-/envinfo-7.8.1.tgz#06377e3e5f4d379fea7ac592d5ad8927e0c4d475" + integrity sha512-/o+BXHmB7ocbHEAs6F2EnG0ogybVVUdkRunTT2glZU9XAaGmhqskrvKwqXuDfNjEO0LZKWdejEEpnq8aM0tOaw== + +es5-ext@^0.10.35, es5-ext@^0.10.46, es5-ext@^0.10.50, es5-ext@^0.10.53, es5-ext@~0.10.14, es5-ext@~0.10.2, es5-ext@~0.10.46: + version "0.10.62" + resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.62.tgz#5e6adc19a6da524bf3d1e02bbc8960e5eb49a9a5" + integrity sha512-BHLqn0klhEpnOKSrzn/Xsz2UIW8j+cGmo9JLzr8BiUapV8hPL9+FliFqjwr9ngW7jWdnxv6eO+/LqyhJVqgrjA== + dependencies: + es6-iterator "^2.0.3" + es6-symbol "^3.1.3" + next-tick "^1.1.0" + +es6-iterator@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/es6-iterator/-/es6-iterator-2.0.3.tgz#a7de889141a05a94b0854403b2d0a0fbfa98f3b7" + integrity sha512-zw4SRzoUkd+cl+ZoE15A9o1oQd920Bb0iOJMQkQhl3jNc03YqVjAhG7scf9C5KWRU/R13Orf588uCC6525o02g== + dependencies: + d "1" + es5-ext "^0.10.35" + es6-symbol "^3.1.1" + +es6-symbol@^3.1.1, es6-symbol@^3.1.3: + version "3.1.3" + resolved "https://registry.yarnpkg.com/es6-symbol/-/es6-symbol-3.1.3.tgz#bad5d3c1bcdac28269f4cb331e431c78ac705d18" + integrity sha512-NJ6Yn3FuDinBaBRWl/q5X/s4koRHBrgKAu+yGI6JCBeiu3qrcbJhwT2GeR/EXVfylRk8dpQVJoLEFhK+Mu31NA== + dependencies: + d "^1.0.1" + ext "^1.1.2" + +es6-weak-map@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/es6-weak-map/-/es6-weak-map-2.0.3.tgz#b6da1f16cc2cc0d9be43e6bdbfc5e7dfcdf31d53" + integrity sha512-p5um32HOTO1kP+w7PRnB+5lQ43Z6muuMuIMffvDN8ZB4GcnjLBV6zGStpbASIMk4DCAvEaamhe2zhyCb/QXXsA== + dependencies: + d "1" + es5-ext "^0.10.46" + es6-iterator "^2.0.3" + es6-symbol "^3.1.1" + +escape-html@~1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" + integrity sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow== + +escodegen@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/escodegen/-/escodegen-2.0.0.tgz#5e32b12833e8aa8fa35e1bf0befa89380484c7dd" + integrity sha512-mmHKys/C8BFUGI+MAWNcSYoORYLMdPzjrknd2Vc+bUsjN5bXcr8EhrNB+UTqfL1y3I9c4fw2ihgtMPQLBRiQxw== + dependencies: + esprima "^4.0.1" + estraverse "^5.2.0" + esutils "^2.0.2" + optionator "^0.8.1" + optionalDependencies: + source-map "~0.6.1" + +eslint-import-resolver-node@0.3.6: + version "0.3.6" + resolved "https://registry.yarnpkg.com/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.6.tgz#4048b958395da89668252001dbd9eca6b83bacbd" + integrity sha512-0En0w03NRVMn9Uiyn8YRPDKvWjxCWkslUEhGNTdGx15RvPJYQ+lbOlqrlNI2vEAs4pDYK4f/HN2TbDmk5TP0iw== + dependencies: + debug "^3.2.7" + resolve "^1.20.0" + +esprima@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" + integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== + +estraverse@^5.2.0: + version "5.3.0" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.3.0.tgz#2eea5290702f26ab8fe5370370ff86c965d21123" + integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA== + +esutils@^2.0.2: + version "2.0.3" + resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" + integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== + +etag@~1.8.1: + version "1.8.1" + resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887" + integrity sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg== + +event-emitter@^0.3.5: + version "0.3.5" + resolved "https://registry.yarnpkg.com/event-emitter/-/event-emitter-0.3.5.tgz#df8c69eef1647923c7157b9ce83840610b02cc39" + integrity sha512-D9rRn9y7kLPnJ+hMq7S/nhvoKwwvVJahBi2BPmx3bvbsEdK3W9ii8cBSGjP+72/LnM4n6fo3+dkCX5FeTQruXA== + dependencies: + d "1" + es5-ext "~0.10.14" + +express-rate-limit@5.5.1: + version "5.5.1" + resolved "https://registry.yarnpkg.com/express-rate-limit/-/express-rate-limit-5.5.1.tgz#110c23f6a65dfa96ab468eda95e71697bc6987a2" + integrity sha512-MTjE2eIbHv5DyfuFz4zLYWxpqVhEhkTiwFGuB74Q9CSou2WHO52nlE5y3Zlg6SIsiYUIPj6ifFxnkPz6O3sIUg== + +express@4.18.1: + version "4.18.1" + resolved "https://registry.yarnpkg.com/express/-/express-4.18.1.tgz#7797de8b9c72c857b9cd0e14a5eea80666267caf" + integrity sha512-zZBcOX9TfehHQhtupq57OF8lFZ3UZi08Y97dwFCkD8p9d/d2Y3M+ykKcwaMDEL+4qyUolgBDX6AblpR3fL212Q== + dependencies: + accepts "~1.3.8" + array-flatten "1.1.1" + body-parser "1.20.0" + content-disposition "0.5.4" + content-type "~1.0.4" + cookie "0.5.0" + cookie-signature "1.0.6" + debug "2.6.9" + depd "2.0.0" + encodeurl "~1.0.2" + escape-html "~1.0.3" + etag "~1.8.1" + finalhandler "1.2.0" + fresh "0.5.2" + http-errors "2.0.0" + merge-descriptors "1.0.1" + methods "~1.1.2" + on-finished "2.4.1" + parseurl "~1.3.3" + path-to-regexp "0.1.7" + proxy-addr "~2.0.7" + qs "6.10.3" + range-parser "~1.2.1" + safe-buffer "5.2.1" + send "0.18.0" + serve-static "1.15.0" + setprototypeof "1.2.0" + statuses "2.0.1" + type-is "~1.6.18" + utils-merge "1.0.1" + vary "~1.1.2" + +ext@^1.1.2: + version "1.7.0" + resolved "https://registry.yarnpkg.com/ext/-/ext-1.7.0.tgz#0ea4383c0103d60e70be99e9a7f11027a33c4f5f" + integrity sha512-6hxeJYaL110a9b5TEJSj0gojyHQAmA2ch5Os+ySCiA1QGdS697XWY1pzsrSjqA9LDEEgdB/KypIlR59RcLuHYw== + dependencies: + type "^2.7.2" + +extend@~3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" + integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== + +extsprintf@1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.3.0.tgz#96918440e3041a7a414f8c52e3c574eb3c3e1e05" + integrity sha512-11Ndz7Nv+mvAC1j0ktTa7fAb0vLyGGX+rMHNBYQviQDGU0Hw7lhctJANqbPhu9nV9/izT/IntTgZ7Im/9LJs9g== + +extsprintf@^1.2.0: + version "1.4.1" + resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.4.1.tgz#8d172c064867f235c0c84a596806d279bf4bcc07" + integrity sha512-Wrk35e8ydCKDj/ArClo1VrPVmN8zph5V4AtHwIuHhvMXsKf73UT3BOD+azBIW+3wOJ4FhEH7zyaJCFvChjYvMA== + +fast-deep-equal@^3.1.1: + version "3.1.3" + resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" + integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== + +fast-json-stable-stringify@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" + integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== + +fast-levenshtein@~2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" + integrity sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw== + +fast-redact@^3.0.0: + version "3.1.2" + resolved "https://registry.yarnpkg.com/fast-redact/-/fast-redact-3.1.2.tgz#d58e69e9084ce9fa4c1a6fa98a3e1ecf5d7839aa" + integrity sha512-+0em+Iya9fKGfEQGcd62Yv6onjBmmhV1uh86XVfOU8VwAe6kaFdQCWI9s0/Nnugx5Vd9tdbZ7e6gE2tR9dzXdw== + +fast-safe-stringify@2.1.1, fast-safe-stringify@^2.0.8: + version "2.1.1" + resolved "https://registry.yarnpkg.com/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz#c406a83b6e70d9e35ce3b30a81141df30aeba884" + integrity sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA== + +finalhandler@1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.2.0.tgz#7d23fe5731b207b4640e4fcd00aec1f9207a7b32" + integrity sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg== + dependencies: + debug "2.6.9" + encodeurl "~1.0.2" + escape-html "~1.0.3" + on-finished "2.4.1" + parseurl "~1.3.3" + statuses "2.0.1" + unpipe "~1.0.0" + +flatstr@^1.0.12: + version "1.0.12" + resolved "https://registry.yarnpkg.com/flatstr/-/flatstr-1.0.12.tgz#c2ba6a08173edbb6c9640e3055b95e287ceb5931" + integrity sha512-4zPxDyhCyiN2wIAtSLI6gc82/EjqZc1onI4Mz/l0pWrAlsSfYH/2ZIcU+e3oA2wDwbzIWNKwa23F8rh6+DRWkw== + +forever-agent@~0.6.1: + version "0.6.1" + resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" + integrity sha512-j0KLYPhm6zeac4lz3oJ3o65qvgQCcPubiyotZrXqEaG4hNagNYO8qdlUrX5vwqv9ohqeT/Z3j6+yW067yWWdUw== + +form-data@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-3.0.1.tgz#ebd53791b78356a99af9a300d4282c4d5eb9755f" + integrity sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg== + dependencies: + asynckit "^0.4.0" + combined-stream "^1.0.8" + mime-types "^2.1.12" + +form-data@~2.3.2: + version "2.3.3" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.3.3.tgz#dcce52c05f644f298c6a7ab936bd724ceffbf3a6" + integrity sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ== + dependencies: + asynckit "^0.4.0" + combined-stream "^1.0.6" + mime-types "^2.1.12" + +forwarded@0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.2.0.tgz#2269936428aad4c15c7ebe9779a84bf0b2a81811" + integrity sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow== + +fresh@0.5.2: + version "0.5.2" + resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7" + integrity sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q== + +function-bind@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" + integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== + +get-intrinsic@^1.0.2: + version "1.1.3" + resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.1.3.tgz#063c84329ad93e83893c7f4f243ef63ffa351385" + integrity sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A== + dependencies: + function-bind "^1.1.1" + has "^1.0.3" + has-symbols "^1.0.3" + +getpass@^0.1.1: + version "0.1.7" + resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa" + integrity sha512-0fzj9JxOLfJ+XGLhR8ze3unN0KZCgZwiSSDz168VERjK8Wl8kVSdcu2kspd4s4wtAa1y/qrVRiAA0WclVsu0ng== + dependencies: + assert-plus "^1.0.0" + +glob@^6.0.1: + version "6.0.4" + resolved "https://registry.yarnpkg.com/glob/-/glob-6.0.4.tgz#0f08860f6a155127b2fadd4f9ce24b1aab6e4d22" + integrity sha512-MKZeRNyYZAVVVG1oZeLaWie1uweH40m9AZwIwxyPbTSX4hHrVYSzLg0Ro5Z5R7XKkIX+Cc6oD1rqeDJnwsB8/A== + dependencies: + inflight "^1.0.4" + inherits "2" + minimatch "2 || 3" + once "^1.3.0" + path-is-absolute "^1.0.0" + +graceful-fs@^4.1.3: + version "4.2.10" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.10.tgz#147d3a006da4ca3ce14728c7aefc287c367d7a6c" + integrity sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA== + +handlebars@4.7.7: + version "4.7.7" + resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.7.7.tgz#9ce33416aad02dbd6c8fafa8240d5d98004945a1" + integrity sha512-aAcXm5OAfE/8IXkcZvCepKU3VzW1/39Fb5ZuqMtgI/hT8X2YgoMvBY5dLhq/cpOvw7Lk1nK/UF71aLG/ZnVYRA== + dependencies: + minimist "^1.2.5" + neo-async "^2.6.0" + source-map "^0.6.1" + wordwrap "^1.0.0" + optionalDependencies: + uglify-js "^3.1.4" + +har-schema@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-2.0.0.tgz#a94c2224ebcac04782a0d9035521f24735b7ec92" + integrity sha512-Oqluz6zhGX8cyRaTQlFMPw80bSJVG2x/cFb8ZPhUILGgHka9SsokCCOQgpveePerqidZOrT14ipqfJb7ILcW5Q== + +har-validator@~5.1.0: + version "5.1.5" + resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-5.1.5.tgz#1f0803b9f8cb20c0fa13822df1ecddb36bde1efd" + integrity sha512-nmT2T0lljbxdQZfspsno9hgrG3Uir6Ks5afism62poxqBM6sDnMEuPmzTq8XN0OEwqKLLdh1jQI3qyE66Nzb3w== + dependencies: + ajv "^6.12.3" + har-schema "^2.0.0" + +has-flag@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" + integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== + +has-symbols@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.3.tgz#bb7b2c4349251dce87b125f7bdf874aa7c8b39f8" + integrity sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A== + +has@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" + integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== + dependencies: + function-bind "^1.1.1" + +html-encoding-sniffer@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/html-encoding-sniffer/-/html-encoding-sniffer-2.0.1.tgz#42a6dc4fd33f00281176e8b23759ca4e4fa185f3" + integrity sha512-D5JbOMBIR/TVZkubHT+OyT2705QvogUW4IBn6nHd756OwieSF9aDYFj4dv6HHEVGYbHaLETa3WggZYWWMyy3ZQ== + dependencies: + whatwg-encoding "^1.0.5" + +http-errors@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-2.0.0.tgz#b7774a1486ef73cf7667ac9ae0858c012c57b9d3" + integrity sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ== + dependencies: + depd "2.0.0" + inherits "2.0.4" + setprototypeof "1.2.0" + statuses "2.0.1" + toidentifier "1.0.1" + +http-proxy-agent@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz#8a8c8ef7f5932ccf953c296ca8291b95aa74aa3a" + integrity sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg== + dependencies: + "@tootallnate/once" "1" + agent-base "6" + debug "4" + +http-signature@~1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.2.0.tgz#9aecd925114772f3d95b65a60abb8f7c18fbace1" + integrity sha512-CAbnr6Rz4CYQkLYUtSNXxQPUH2gK8f3iWexVlsnMeD+GjlsQ0Xsy1cOX+mN3dtxYomRy21CiOzU8Uhw6OwncEQ== + dependencies: + assert-plus "^1.0.0" + jsprim "^1.2.2" + sshpk "^1.7.0" + +http-status-codes@2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/http-status-codes/-/http-status-codes-2.2.0.tgz#bb2efe63d941dfc2be18e15f703da525169622be" + integrity sha512-feERVo9iWxvnejp3SEfm/+oNG517npqL2/PIA8ORjyOZjGC7TwCRQsZylciLS64i6pJ0wRYz3rkXLRwbtFa8Ng== + +https-proxy-agent@5.0.1, https-proxy-agent@^5.0.0: + version "5.0.1" + resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz#c59ef224a04fe8b754f3db0063a25ea30d0005d6" + integrity sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA== + dependencies: + agent-base "6" + debug "4" + +iconv-lite@0.4.24: + version "0.4.24" + resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" + integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== + dependencies: + safer-buffer ">= 2.1.2 < 3" + +inflight@^1.0.4: + version "1.0.6" + resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" + integrity sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA== + dependencies: + once "^1.3.0" + wrappy "1" + +inherits@2, inherits@2.0.4: + version "2.0.4" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" + integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== + +ipaddr.js@1.9.1: + version "1.9.1" + resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3" + integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g== + +is-core-module@^2.9.0: + version "2.10.0" + resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.10.0.tgz#9012ede0a91c69587e647514e1d5277019e728ed" + integrity sha512-Erxj2n/LDAZ7H8WNJXd9tw38GYM3dv8rk8Zcs+jJuxYTW7sozH+SS8NtrSjVL1/vpLvWi1hxy96IzjJ3EHTJJg== + dependencies: + has "^1.0.3" + +is-potential-custom-element-name@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz#171ed6f19e3ac554394edf78caa05784a45bebb5" + integrity sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ== + +is-promise@^2.1.0, is-promise@^2.2.2: + version "2.2.2" + resolved "https://registry.yarnpkg.com/is-promise/-/is-promise-2.2.2.tgz#39ab959ccbf9a774cf079f7b40c7a26f763135f1" + integrity sha512-+lP4/6lKUBfQjZ2pdxThZvLUAafmZb8OAxFb8XXtiQmS35INgr85hdOGoEs124ez1FCnZJt6jau/T+alh58QFQ== + +is-typedarray@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" + integrity sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA== + +isstream@~0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a" + integrity sha512-Yljz7ffyPbrLpLngrMtZ7NduUgVvi6wG9RJ9IUcyCd59YQ911PBJphODUcbOVbqYfxe1wuYf/LJ8PauMRwsM/g== + +js-yaml@4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602" + integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA== + dependencies: + argparse "^2.0.1" + +jsbn@~0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513" + integrity sha512-UVU9dibq2JcFWxQPA6KCqj5O42VOmAY3zQUfEKxU0KpTGXwNoCjkX1e13eHNvw/xPynt6pU0rZ1htjWTNTSXsg== + +jsdom@16.7.0: + version "16.7.0" + resolved "https://registry.yarnpkg.com/jsdom/-/jsdom-16.7.0.tgz#918ae71965424b197c819f8183a754e18977b710" + integrity sha512-u9Smc2G1USStM+s/x1ru5Sxrl6mPYCbByG1U/hUmqaVsm4tbNyS7CicOSRyuGQYZhTu0h84qkZZQ/I+dzizSVw== + dependencies: + abab "^2.0.5" + acorn "^8.2.4" + acorn-globals "^6.0.0" + cssom "^0.4.4" + cssstyle "^2.3.0" + data-urls "^2.0.0" + decimal.js "^10.2.1" + domexception "^2.0.1" + escodegen "^2.0.0" + form-data "^3.0.0" + html-encoding-sniffer "^2.0.1" + http-proxy-agent "^4.0.1" + https-proxy-agent "^5.0.0" + is-potential-custom-element-name "^1.0.1" + nwsapi "^2.2.0" + parse5 "6.0.1" + saxes "^5.0.1" + symbol-tree "^3.2.4" + tough-cookie "^4.0.0" + w3c-hr-time "^1.0.2" + w3c-xmlserializer "^2.0.0" + webidl-conversions "^6.1.0" + whatwg-encoding "^1.0.5" + whatwg-mimetype "^2.3.0" + whatwg-url "^8.5.0" + ws "^7.4.6" + xml-name-validator "^3.0.0" + +json-schema-traverse@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" + integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== + +json-schema@0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.4.0.tgz#f7de4cf6efab838ebaeb3236474cbba5a1930ab5" + integrity sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA== + +json-stringify-safe@~5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb" + integrity sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA== + +jsonparse@^1.2.0: + version "1.3.1" + resolved "https://registry.yarnpkg.com/jsonparse/-/jsonparse-1.3.1.tgz#3f4dae4a91fac315f71062f8521cc239f1366280" + integrity sha512-POQXvpdL69+CluYsillJ7SUhKvytYjW9vG/GKpnf+xP8UWgYEM/RaMzHHofbALDiKbbP1W8UEYmgGl39WkPZsg== + +jsonwebtoken@8.5.1: + version "8.5.1" + resolved "https://registry.yarnpkg.com/jsonwebtoken/-/jsonwebtoken-8.5.1.tgz#00e71e0b8df54c2121a1f26137df2280673bcc0d" + integrity sha512-XjwVfRS6jTMsqYs0EsuJ4LGxXV14zQybNd4L2r0UvbVnSF9Af8x7p5MzbJ90Ioz/9TI41/hTCvznF/loiSzn8w== + dependencies: + jws "^3.2.2" + lodash.includes "^4.3.0" + lodash.isboolean "^3.0.3" + lodash.isinteger "^4.0.4" + lodash.isnumber "^3.0.3" + lodash.isplainobject "^4.0.6" + lodash.isstring "^4.0.1" + lodash.once "^4.0.0" + ms "^2.1.1" + semver "^5.6.0" + +jsprim@^1.2.2: + version "1.4.2" + resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.2.tgz#712c65533a15c878ba59e9ed5f0e26d5b77c5feb" + integrity sha512-P2bSOMAc/ciLz6DzgjVlGJP9+BrJWu5UDGK70C2iweC5QBIeFf0ZXRvGjEj2uYgrY2MkAAhsSWHDWlFtEroZWw== + dependencies: + assert-plus "1.0.0" + extsprintf "1.3.0" + json-schema "0.4.0" + verror "1.10.0" + +jwa@^1.4.1: + version "1.4.1" + resolved "https://registry.yarnpkg.com/jwa/-/jwa-1.4.1.tgz#743c32985cb9e98655530d53641b66c8645b039a" + integrity sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA== + dependencies: + buffer-equal-constant-time "1.0.1" + ecdsa-sig-formatter "1.0.11" + safe-buffer "^5.0.1" + +jws@^3.2.2: + version "3.2.2" + resolved "https://registry.yarnpkg.com/jws/-/jws-3.2.2.tgz#001099f3639468c9414000e99995fa52fb478304" + integrity sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA== + dependencies: + jwa "^1.4.1" + safe-buffer "^5.0.1" + +keygrip@~1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/keygrip/-/keygrip-1.1.0.tgz#871b1681d5e159c62a445b0c74b615e0917e7226" + integrity sha512-iYSchDJ+liQ8iwbSI2QqsQOvqv58eJCEanyJPJi+Khyu8smkcKSFUCbPwzFcL7YVtZ6eONjqRX/38caJ7QjRAQ== + dependencies: + tsscmp "1.0.6" + +kleur@4.1.5: + version "4.1.5" + resolved "https://registry.yarnpkg.com/kleur/-/kleur-4.1.5.tgz#95106101795f7050c6c650f350c683febddb1780" + integrity sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ== + +levn@~0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee" + integrity sha512-0OO4y2iOHix2W6ujICbKIaEQXvFQHue65vUG3pb5EUomzPI90z9hsA1VsO/dbIIpC53J8gxM9Q4Oho0jrCM/yA== + dependencies: + prelude-ls "~1.1.2" + type-check "~0.3.2" + +lockfile@1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/lockfile/-/lockfile-1.0.4.tgz#07f819d25ae48f87e538e6578b6964a4981a5609" + integrity sha512-cvbTwETRfsFh4nHsL1eGWapU1XFi5Ot9E85sWAwia7Y7EgB7vfqcZhTKZ+l7hCGxSPoushMv5GKhT5PdLv03WA== + dependencies: + signal-exit "^3.0.2" + +lodash.includes@^4.3.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/lodash.includes/-/lodash.includes-4.3.0.tgz#60bb98a87cb923c68ca1e51325483314849f553f" + integrity sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w== + +lodash.isboolean@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz#6c2e171db2a257cd96802fd43b01b20d5f5870f6" + integrity sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg== + +lodash.isinteger@^4.0.4: + version "4.0.4" + resolved "https://registry.yarnpkg.com/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz#619c0af3d03f8b04c31f5882840b77b11cd68343" + integrity sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA== + +lodash.isnumber@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz#3ce76810c5928d03352301ac287317f11c0b1ffc" + integrity sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw== + +lodash.isplainobject@^4.0.6: + version "4.0.6" + resolved "https://registry.yarnpkg.com/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz#7c526a52d89b45c45cc690b88163be0497f550cb" + integrity sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA== + +lodash.isstring@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/lodash.isstring/-/lodash.isstring-4.0.1.tgz#d527dfb5456eca7cc9bb95d5daeaf88ba54a5451" + integrity sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw== + +lodash.once@^4.0.0: + version "4.1.1" + resolved "https://registry.yarnpkg.com/lodash.once/-/lodash.once-4.1.1.tgz#0dd3971213c7c56df880977d504c88fb471a97ac" + integrity sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg== + +lodash@4, lodash@4.17.21, lodash@^4.7.0: + version "4.17.21" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" + integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== + +lowdb@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/lowdb/-/lowdb-1.0.0.tgz#5243be6b22786ccce30e50c9a33eac36b20c8064" + integrity sha512-2+x8esE/Wb9SQ1F9IHaYWfsC9FIecLOPrK4g17FGEayjUWH172H6nwicRovGvSE2CPZouc2MCIqCI7h9d+GftQ== + dependencies: + graceful-fs "^4.1.3" + is-promise "^2.1.0" + lodash "4" + pify "^3.0.0" + steno "^0.4.1" + +lru-cache@7.14.0: + version "7.14.0" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-7.14.0.tgz#21be64954a4680e303a09e9468f880b98a0b3c7f" + integrity sha512-EIRtP1GrSJny0dqb50QXRUNBxHJhcpxHC++M5tD7RYbvLLn5KVWKsbyswSSqDuU15UFi3bgTQIY8nhDMeF6aDQ== + +lru-cache@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" + integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== + dependencies: + yallist "^4.0.0" + +lru-queue@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/lru-queue/-/lru-queue-0.1.0.tgz#2738bd9f0d3cf4f84490c5736c48699ac632cda3" + integrity sha512-BpdYkt9EvGl8OfWHDQPISVpcl5xZthb+XPsbELj5AQXxIC8IriDZIQYjBJPEm5rS420sjZ0TLEzRcq5KdBhYrQ== + dependencies: + es5-ext "~0.10.2" + +lunr-mutable-indexes@2.3.2: + version "2.3.2" + resolved "https://registry.yarnpkg.com/lunr-mutable-indexes/-/lunr-mutable-indexes-2.3.2.tgz#864253489735d598c5140f3fb75c0a5c8be2e98c" + integrity sha512-Han6cdWAPPFM7C2AigS2Ofl3XjAT0yVMrUixodJEpyg71zCtZ2yzXc3s+suc/OaNt4ca6WJBEzVnEIjxCTwFMw== + dependencies: + lunr ">= 2.3.0 < 2.4.0" + +"lunr@>= 2.3.0 < 2.4.0": + version "2.3.9" + resolved "https://registry.yarnpkg.com/lunr/-/lunr-2.3.9.tgz#18b123142832337dd6e964df1a5a7707b25d35e1" + integrity sha512-zTU3DaZaF3Rt9rhN3uBMGQD3dD2/vFQqnvZCDv4dl5iOzq2IZQqTxu90r4E5J+nP70J3ilqVCrbho2eWaeW8Ow== + +make-error@^1.1.1: + version "1.3.6" + resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2" + integrity sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw== + +marked@4.0.18: + version "4.0.18" + resolved "https://registry.yarnpkg.com/marked/-/marked-4.0.18.tgz#cd0ac54b2e5610cfb90e8fd46ccaa8292c9ed569" + integrity sha512-wbLDJ7Zh0sqA0Vdg6aqlbT+yPxqLblpAZh1mK2+AO2twQkPywvvqQNfEPVwSSRjZ7dZcdeVBIAgiO7MMp3Dszw== + +marked@4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/marked/-/marked-4.1.0.tgz#3fc6e7485f21c1ca5d6ec4a39de820e146954796" + integrity sha512-+Z6KDjSPa6/723PQYyc1axYZpYYpDnECDaU6hkaf5gqBieBkMKYReL5hteF2QizhlMbgbo8umXl/clZ67+GlsA== + +media-typer@0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" + integrity sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ== + +memoizee@0.4.15: + version "0.4.15" + resolved "https://registry.yarnpkg.com/memoizee/-/memoizee-0.4.15.tgz#e6f3d2da863f318d02225391829a6c5956555b72" + integrity sha512-UBWmJpLZd5STPm7PMUlOw/TSy972M+z8gcyQ5veOnSDRREz/0bmpyTfKt3/51DhEBqCZQn1udM/5flcSPYhkdQ== + dependencies: + d "^1.0.1" + es5-ext "^0.10.53" + es6-weak-map "^2.0.3" + event-emitter "^0.3.5" + is-promise "^2.2.2" + lru-queue "^0.1.0" + next-tick "^1.1.0" + timers-ext "^0.1.7" + +merge-descriptors@1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" + integrity sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w== + +methods@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" + integrity sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w== + +mime-db@1.52.0, "mime-db@>= 1.43.0 < 2": + version "1.52.0" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70" + integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== + +mime-types@^2.1.12, mime-types@~2.1.19, mime-types@~2.1.24, mime-types@~2.1.34: + version "2.1.35" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" + integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== + dependencies: + mime-db "1.52.0" + +mime@1.6.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" + integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== + +mime@3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/mime/-/mime-3.0.0.tgz#b374550dca3a0c18443b0c950a6a58f1931cf7a7" + integrity sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A== + +"minimatch@2 || 3": + version "3.1.2" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" + integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== + dependencies: + brace-expansion "^1.1.7" + +minimatch@5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-5.1.0.tgz#1717b464f4971b144f6aabe8f2d0b8e4511e09c7" + integrity sha512-9TPBGGak4nHfGZsPBohm9AWg6NoT7QTCehS3BIJABslyZbzxfV78QM2Y6+i741OPZIafFAaiiEMh5OyIrJPgtg== + dependencies: + brace-expansion "^2.0.1" + +minimist@^1.2.5, minimist@^1.2.6: + version "1.2.6" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.6.tgz#8637a5b759ea0d6e98702cfb3a9283323c93af44" + integrity sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q== + +mkdirp@1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e" + integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw== + +mkdirp@~0.5.1: + version "0.5.6" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.6.tgz#7def03d2432dcae4ba1d611445c48396062255f6" + integrity sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw== + dependencies: + minimist "^1.2.6" + +ms@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" + integrity sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A== + +ms@2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" + integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== + +ms@2.1.3, ms@^2.1.1: + version "2.1.3" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" + integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== + +mv@2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/mv/-/mv-2.1.1.tgz#ae6ce0d6f6d5e0a4f7d893798d03c1ea9559b6a2" + integrity sha512-at/ZndSy3xEGJ8i0ygALh8ru9qy7gWW1cmkaqBN29JmMlIvM//MEO9y1sk/avxuwnPcfhkejkLsuPxH81BrkSg== + dependencies: + mkdirp "~0.5.1" + ncp "~2.0.0" + rimraf "~2.4.0" + +ncp@~2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/ncp/-/ncp-2.0.0.tgz#195a21d6c46e361d2fb1281ba38b91e9df7bdbb3" + integrity sha512-zIdGUrPRFTUELUvr3Gmc7KZ2Sw/h1PiVM0Af/oHB6zgnV1ikqSfRk+TOufi79aHYCW3NiOXmr1BP5nWbzojLaA== + +negotiator@0.6.3: + version "0.6.3" + resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.3.tgz#58e323a72fedc0d6f9cd4d31fe49f51479590ccd" + integrity sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg== + +neo-async@^2.6.0: + version "2.6.2" + resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f" + integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw== + +next-tick@1, next-tick@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/next-tick/-/next-tick-1.1.0.tgz#1836ee30ad56d67ef281b22bd199f709449b35eb" + integrity sha512-CXdUiJembsNjuToQvxayPZF9Vqht7hewsvy2sOWafLvi2awflj9mOC6bHIg50orX8IJvWKY9wYQ/zB2kogPslQ== + +node-fetch@2.6.7, node-fetch@^2: + version "2.6.7" + resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad" + integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ== + dependencies: + whatwg-url "^5.0.0" + +nwsapi@^2.2.0: + version "2.2.2" + resolved "https://registry.yarnpkg.com/nwsapi/-/nwsapi-2.2.2.tgz#e5418863e7905df67d51ec95938d67bf801f0bb0" + integrity sha512-90yv+6538zuvUMnN+zCr8LuV6bPFdq50304114vJYJ8RDyK8D5O9Phpbd6SZWgI7PwzmmfN1upeOJlvybDSgCw== + +oauth-sign@~0.9.0: + version "0.9.0" + resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.9.0.tgz#47a7b016baa68b5fa0ecf3dee08a85c679ac6455" + integrity sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ== + +object-assign@^4: + version "4.1.1" + resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" + integrity sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg== + +object-inspect@^1.9.0: + version "1.12.2" + resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.12.2.tgz#c0641f26394532f28ab8d796ab954e43c009a8ea" + integrity sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ== + +on-finished@2.4.1: + version "2.4.1" + resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.4.1.tgz#58c8c44116e54845ad57f14ab10b03533184ac3f" + integrity sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg== + dependencies: + ee-first "1.1.1" + +on-headers@~1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/on-headers/-/on-headers-1.0.2.tgz#772b0ae6aaa525c399e489adfad90c403eb3c28f" + integrity sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA== + +once@^1.3.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" + integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w== + dependencies: + wrappy "1" + +optionator@^0.8.1: + version "0.8.3" + resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.3.tgz#84fa1d036fe9d3c7e21d99884b601167ec8fb495" + integrity sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA== + dependencies: + deep-is "~0.1.3" + fast-levenshtein "~2.0.6" + levn "~0.3.0" + prelude-ls "~1.1.2" + type-check "~0.3.2" + word-wrap "~1.2.3" + +parse-ms@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/parse-ms/-/parse-ms-2.1.0.tgz#348565a753d4391fa524029956b172cb7753097d" + integrity sha512-kHt7kzLoS9VBZfUsiKjv43mr91ea+U05EyKkEtqp7vNbHxmaVuEqN7XxeEVnGrMtYOAxGrDElSi96K7EgO1zCA== + +parse5@6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/parse5/-/parse5-6.0.1.tgz#e1a1c085c569b3dc08321184f19a39cc27f7c30b" + integrity sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw== + +parseurl@~1.3.3: + version "1.3.3" + resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4" + integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ== + +path-is-absolute@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" + integrity sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg== + +path-parse@^1.0.7: + version "1.0.7" + resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" + integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== + +path-to-regexp@0.1.7: + version "0.1.7" + resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c" + integrity sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ== + +performance-now@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" + integrity sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow== + +pify@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/pify/-/pify-3.0.0.tgz#e5a4acd2c101fdf3d9a4d07f0dbc4db49dd28176" + integrity sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg== + +pino-std-serializers@^3.1.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/pino-std-serializers/-/pino-std-serializers-3.2.0.tgz#b56487c402d882eb96cd67c257868016b61ad671" + integrity sha512-EqX4pwDPrt3MuOAAUBMU0Tk5kR/YcCM5fNPEzgCO2zJ5HfX0vbiH9HbJglnyeQsN96Kznae6MWD47pZB5avTrg== + +pino@6.14.0: + version "6.14.0" + resolved "https://registry.yarnpkg.com/pino/-/pino-6.14.0.tgz#b745ea87a99a6c4c9b374e4f29ca7910d4c69f78" + integrity sha512-iuhEDel3Z3hF9Jfe44DPXR8l07bhjuFY3GMHIXbjnY9XcafbyDDwl2sN2vw2GjMPf5Nkoe+OFao7ffn9SXaKDg== + dependencies: + fast-redact "^3.0.0" + fast-safe-stringify "^2.0.8" + flatstr "^1.0.12" + pino-std-serializers "^3.1.0" + process-warning "^1.0.0" + quick-format-unescaped "^4.0.3" + sonic-boom "^1.0.2" + +pkginfo@0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/pkginfo/-/pkginfo-0.4.1.tgz#b5418ef0439de5425fc4995042dced14fb2a84ff" + integrity sha512-8xCNE/aT/EXKenuMDZ+xTVwkT8gsoHN2z/Q29l80u0ppGEXVvsKRzNMbtKhg8LS8k1tJLAHHylf6p4VFmP6XUQ== + +prelude-ls@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" + integrity sha512-ESF23V4SKG6lVSGZgYNpbsiaAkdab6ZgOxe52p7+Kid3W3u3bxR4Vfd/o21dmN7jSt0IwgZ4v5MUd26FEtXE9w== + +prettier-bytes@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/prettier-bytes/-/prettier-bytes-1.0.4.tgz#994b02aa46f699c50b6257b5faaa7fe2557e62d6" + integrity sha512-dLbWOa4xBn+qeWeIF60qRoB6Pk2jX5P3DIVgOQyMyvBpu931Q+8dXz8X0snJiFkQdohDDLnZQECjzsAj75hgZQ== + +pretty-ms@^7.0.1: + version "7.0.1" + resolved "https://registry.yarnpkg.com/pretty-ms/-/pretty-ms-7.0.1.tgz#7d903eaab281f7d8e03c66f867e239dc32fb73e8" + integrity sha512-973driJZvxiGOQ5ONsFhOF/DtzPMOMtgC11kCpUrPGMTgqp2q/1gwzCquocrN33is0VZ5GFHXZYMM9l6h67v2Q== + dependencies: + parse-ms "^2.1.0" + +process-warning@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/process-warning/-/process-warning-1.0.0.tgz#980a0b25dc38cd6034181be4b7726d89066b4616" + integrity sha512-du4wfLyj4yCZq1VupnVSZmRsPJsNuxoDQFdCFHLaYiEbFBD7QE0a+I4D7hOxrVnh78QE/YipFAj9lXHiXocV+Q== + +proxy-addr@~2.0.7: + version "2.0.7" + resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-2.0.7.tgz#f19fe69ceab311eeb94b42e70e8c2070f9ba1025" + integrity sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg== + dependencies: + forwarded "0.2.0" + ipaddr.js "1.9.1" + +psl@^1.1.24, psl@^1.1.33: + version "1.9.0" + resolved "https://registry.yarnpkg.com/psl/-/psl-1.9.0.tgz#d0df2a137f00794565fcaf3b2c00cd09f8d5a5a7" + integrity sha512-E/ZsdU4HLs/68gYzgGTkMicWTLPdAftJLfJFlLUAAKZGkStNU72sZjT66SnMDVOfOWY/YAoiD7Jxa9iHvngcag== + +punycode@^1.4.1: + version "1.4.1" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e" + integrity sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ== + +punycode@^2.1.0, punycode@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" + integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== + +qs@6.10.3: + version "6.10.3" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.10.3.tgz#d6cde1b2ffca87b5aa57889816c5f81535e22e8e" + integrity sha512-wr7M2E0OFRfIfJZjKGieI8lBKb7fRCH4Fv5KNPEs7gJ8jadvotdsS08PzOKR7opXhZ/Xkjtt3WF9g38drmyRqQ== + dependencies: + side-channel "^1.0.4" + +qs@~6.5.2: + version "6.5.3" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.3.tgz#3aeeffc91967ef6e35c0e488ef46fb296ab76aad" + integrity sha512-qxXIEh4pCGfHICj1mAJQ2/2XVZkjCDTcEgfoSQxc/fYivUZxTkk7L3bDBJSoNrEzXI17oUO5Dp07ktqE5KzczA== + +querystringify@^2.1.1: + version "2.2.0" + resolved "https://registry.yarnpkg.com/querystringify/-/querystringify-2.2.0.tgz#3345941b4153cb9d082d8eee4cda2016a9aef7f6" + integrity sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ== + +quick-format-unescaped@^4.0.3: + version "4.0.4" + resolved "https://registry.yarnpkg.com/quick-format-unescaped/-/quick-format-unescaped-4.0.4.tgz#93ef6dd8d3453cbc7970dd614fad4c5954d6b5a7" + integrity sha512-tYC1Q1hgyRuHgloV/YXs2w15unPVh8qfu/qCTfhTYamaw7fyhumKa2yGpdSo87vY32rIclj+4fWYQXUMs9EHvg== + +range-parser@~1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031" + integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== + +raw-body@2.5.1: + version "2.5.1" + resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.5.1.tgz#fe1b1628b181b700215e5fd42389f98b71392857" + integrity sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig== + dependencies: + bytes "3.1.2" + http-errors "2.0.0" + iconv-lite "0.4.24" + unpipe "1.0.0" + +request@2.88.0: + version "2.88.0" + resolved "https://registry.yarnpkg.com/request/-/request-2.88.0.tgz#9c2fca4f7d35b592efe57c7f0a55e81052124fef" + integrity sha512-NAqBSrijGLZdM0WZNsInLJpkJokL72XYjUpnB0iwsRgxh7dB6COrHnTBNwN0E+lHDAJzu7kLAkDeY08z2/A0hg== + dependencies: + aws-sign2 "~0.7.0" + aws4 "^1.8.0" + caseless "~0.12.0" + combined-stream "~1.0.6" + extend "~3.0.2" + forever-agent "~0.6.1" + form-data "~2.3.2" + har-validator "~5.1.0" + http-signature "~1.2.0" + is-typedarray "~1.0.0" + isstream "~0.1.2" + json-stringify-safe "~5.0.1" + mime-types "~2.1.19" + oauth-sign "~0.9.0" + performance-now "^2.1.0" + qs "~6.5.2" + safe-buffer "^5.1.2" + tough-cookie "~2.4.3" + tunnel-agent "^0.6.0" + uuid "^3.3.2" + +requires-port@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff" + integrity sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ== + +resolve@^1.20.0: + version "1.22.1" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.1.tgz#27cb2ebb53f91abb49470a928bba7558066ac177" + integrity sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw== + dependencies: + is-core-module "^2.9.0" + path-parse "^1.0.7" + supports-preserve-symlinks-flag "^1.0.0" + +rimraf@~2.4.0: + version "2.4.5" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.4.5.tgz#ee710ce5d93a8fdb856fb5ea8ff0e2d75934b2da" + integrity sha512-J5xnxTyqaiw06JjMftq7L9ouA448dw/E7dKghkP9WpKNuwmARNNg+Gk8/u5ryb9N/Yo2+z3MCwuqFK/+qPOPfQ== + dependencies: + glob "^6.0.1" + +rxjs@^7.5.2: + version "7.5.7" + resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-7.5.7.tgz#2ec0d57fdc89ece220d2e702730ae8f1e49def39" + integrity sha512-z9MzKh/UcOqB3i20H6rtrlaE/CgjLOvheWK/9ILrbhROGTweAi1BaFsTT9FbwZi5Trr1qNRs+MXkhmR06awzQA== + dependencies: + tslib "^2.1.0" + +safe-buffer@5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" + integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== + +safe-buffer@5.2.1, safe-buffer@^5.0.1, safe-buffer@^5.1.2: + version "5.2.1" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" + integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== + +"safer-buffer@>= 2.1.2 < 3", safer-buffer@^2.0.2, safer-buffer@^2.1.0, safer-buffer@~2.1.0: + version "2.1.2" + resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" + integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== + +saxes@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/saxes/-/saxes-5.0.1.tgz#eebab953fa3b7608dbe94e5dadb15c888fa6696d" + integrity sha512-5LBh1Tls8c9xgGjw3QrMwETmTMVk0oFgvrFSvWx62llR2hcEInrKNZ2GZCCuuy2lvWrdl5jhbpeqc5hRYKFOcw== + dependencies: + xmlchars "^2.2.0" + +semver@7.3.7: + version "7.3.7" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.7.tgz#12c5b649afdbf9049707796e22a4028814ce523f" + integrity sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g== + dependencies: + lru-cache "^6.0.0" + +semver@^5.6.0: + version "5.7.1" + resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" + integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== + +send@0.18.0: + version "0.18.0" + resolved "https://registry.yarnpkg.com/send/-/send-0.18.0.tgz#670167cc654b05f5aa4a767f9113bb371bc706be" + integrity sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg== + dependencies: + debug "2.6.9" + depd "2.0.0" + destroy "1.2.0" + encodeurl "~1.0.2" + escape-html "~1.0.3" + etag "~1.8.1" + fresh "0.5.2" + http-errors "2.0.0" + mime "1.6.0" + ms "2.1.3" + on-finished "2.4.1" + range-parser "~1.2.1" + statuses "2.0.1" + +serve-static@1.15.0: + version "1.15.0" + resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.15.0.tgz#faaef08cffe0a1a62f60cad0c4e513cff0ac9540" + integrity sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g== + dependencies: + encodeurl "~1.0.2" + escape-html "~1.0.3" + parseurl "~1.3.3" + send "0.18.0" + +setprototypeof@1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.2.0.tgz#66c9a24a73f9fc28cbe66b09fed3d33dcaf1b424" + integrity sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw== + +side-channel@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.4.tgz#efce5c8fdc104ee751b25c58d4290011fa5ea2cf" + integrity sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw== + dependencies: + call-bind "^1.0.0" + get-intrinsic "^1.0.2" + object-inspect "^1.9.0" + +signal-exit@^3.0.2: + version "3.0.7" + resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9" + integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ== + +sonic-boom@^1.0.2: + version "1.4.1" + resolved "https://registry.yarnpkg.com/sonic-boom/-/sonic-boom-1.4.1.tgz#d35d6a74076624f12e6f917ade7b9d75e918f53e" + integrity sha512-LRHh/A8tpW7ru89lrlkU4AszXt1dbwSjVWguGrmlxE7tawVmDBlI1PILMkXAxJTwqhgsEeTHzj36D5CmHgQmNg== + dependencies: + atomic-sleep "^1.0.0" + flatstr "^1.0.12" + +source-map@^0.6.1, source-map@~0.6.1: + version "0.6.1" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" + integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== + +sshpk@^1.7.0: + version "1.17.0" + resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.17.0.tgz#578082d92d4fe612b13007496e543fa0fbcbe4c5" + integrity sha512-/9HIEs1ZXGhSPE8X6Ccm7Nam1z8KcoCqPdI7ecm1N33EzAetWahvQWVqLZtaZQ+IDKX4IyA2o0gBzqIMkAagHQ== + dependencies: + asn1 "~0.2.3" + assert-plus "^1.0.0" + bcrypt-pbkdf "^1.0.0" + dashdash "^1.12.0" + ecc-jsbn "~0.1.1" + getpass "^0.1.1" + jsbn "~0.1.0" + safer-buffer "^2.0.2" + tweetnacl "~0.14.0" + +statuses@2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/statuses/-/statuses-2.0.1.tgz#55cb000ccf1d48728bd23c685a063998cf1a1b63" + integrity sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ== + +steno@^0.4.1: + version "0.4.4" + resolved "https://registry.yarnpkg.com/steno/-/steno-0.4.4.tgz#071105bdfc286e6615c0403c27e9d7b5dcb855cb" + integrity sha512-EEHMVYHNXFHfGtgjNITnka0aHhiAlo93F7z2/Pwd+g0teG9CnM3JIINM7hVVB5/rhw9voufD7Wukwgtw2uqh6w== + dependencies: + graceful-fs "^4.1.3" + +strip-ansi@^6.0.0: + version "6.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" + integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== + dependencies: + ansi-regex "^5.0.1" + +supports-color@^7.1.0: + version "7.2.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" + integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== + dependencies: + has-flag "^4.0.0" + +supports-preserve-symlinks-flag@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" + integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== + +symbol-tree@^3.2.4: + version "3.2.4" + resolved "https://registry.yarnpkg.com/symbol-tree/-/symbol-tree-3.2.4.tgz#430637d248ba77e078883951fb9aa0eed7c63fa2" + integrity sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw== + +"through@>=2.2.7 <3": + version "2.3.8" + resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" + integrity sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg== + +timers-ext@^0.1.7: + version "0.1.7" + resolved "https://registry.yarnpkg.com/timers-ext/-/timers-ext-0.1.7.tgz#6f57ad8578e07a3fb9f91d9387d65647555e25c6" + integrity sha512-b85NUNzTSdodShTIbky6ZF02e8STtVVfD+fu4aXXShEELpozH+bCpJLYMPZbsABN2wDH7fJpqIoXxJpzbf0NqQ== + dependencies: + es5-ext "~0.10.46" + next-tick "1" + +toidentifier@1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.1.tgz#3be34321a88a820ed1bd80dfaa33e479fbb8dd35" + integrity sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA== + +tough-cookie@^4.0.0: + version "4.1.2" + resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-4.1.2.tgz#e53e84b85f24e0b65dd526f46628db6c85f6b874" + integrity sha512-G9fqXWoYFZgTc2z8Q5zaHy/vJMjm+WV0AkAeHxVCQiEB1b+dGvWzFW6QV07cY5jQ5gRkeid2qIkzkxUnmoQZUQ== + dependencies: + psl "^1.1.33" + punycode "^2.1.1" + universalify "^0.2.0" + url-parse "^1.5.3" + +tough-cookie@~2.4.3: + version "2.4.3" + resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.4.3.tgz#53f36da3f47783b0925afa06ff9f3b165280f781" + integrity sha512-Q5srk/4vDM54WJsJio3XNn6K2sCG+CQ8G5Wz6bZhRZoAe/+TxjWB/GlFAnYEbkYVlON9FMk/fE3h2RLpPXo4lQ== + dependencies: + psl "^1.1.24" + punycode "^1.4.1" + +tr46@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/tr46/-/tr46-2.1.0.tgz#fa87aa81ca5d5941da8cbf1f9b749dc969a4e240" + integrity sha512-15Ih7phfcdP5YxqiB+iDtLoaTz4Nd35+IiAv0kQ5FNKHzXgdWqPoTIqEDDJmXceQt4JZk6lVPT8lnDlPpGDppw== + dependencies: + punycode "^2.1.1" + +tr46@~0.0.3: + version "0.0.3" + resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a" + integrity sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw== + +ts-node@^10.9.1: + version "10.9.1" + resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-10.9.1.tgz#e73de9102958af9e1f0b168a6ff320e25adcff4b" + integrity sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw== + dependencies: + "@cspotcode/source-map-support" "^0.8.0" + "@tsconfig/node10" "^1.0.7" + "@tsconfig/node12" "^1.0.7" + "@tsconfig/node14" "^1.0.0" + "@tsconfig/node16" "^1.0.2" + acorn "^8.4.1" + acorn-walk "^8.1.1" + arg "^4.1.0" + create-require "^1.1.0" + diff "^4.0.1" + make-error "^1.1.1" + v8-compile-cache-lib "^3.0.1" + yn "3.1.1" + +tslib@^2.1.0: + version "2.4.0" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.4.0.tgz#7cecaa7f073ce680a05847aa77be941098f36dc3" + integrity sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ== + +tsscmp@1.0.6: + version "1.0.6" + resolved "https://registry.yarnpkg.com/tsscmp/-/tsscmp-1.0.6.tgz#85b99583ac3589ec4bfef825b5000aa911d605eb" + integrity sha512-LxhtAkPDTkVCMQjt2h6eBVY28KCjikZqZfMcC15YBeNjkgUpdCfBu5HoiOTDu86v6smE8yOjyEktJ8hlbANHQA== + +tunnel-agent@^0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd" + integrity sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w== + dependencies: + safe-buffer "^5.0.1" + +tweetnacl@^0.14.3, tweetnacl@~0.14.0: + version "0.14.5" + resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64" + integrity sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA== + +typanion@^3.3.1: + version "3.12.0" + resolved "https://registry.yarnpkg.com/typanion/-/typanion-3.12.0.tgz#8352830e5cf26ebfc5832da265886c9fb3ebb323" + integrity sha512-o59ZobUBsG+2dHnGVI2shscqqzHdzCOixCU0t8YXLxM2Su42J2ha7hY9V5+6SIBjVsw6aLqrlYznCgQGJN4Kag== + +type-check@~0.3.2: + version "0.3.2" + resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72" + integrity sha512-ZCmOJdvOWDBYJlzAoFkC+Q0+bUyEOS1ltgp1MGU03fqHG+dbi9tBFU2Rd9QKiDZFAYrhPh2JUf7rZRIuHRKtOg== + dependencies: + prelude-ls "~1.1.2" + +type-is@~1.6.18: + version "1.6.18" + resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131" + integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g== + dependencies: + media-typer "0.3.0" + mime-types "~2.1.24" + +type@^1.0.1: + version "1.2.0" + resolved "https://registry.yarnpkg.com/type/-/type-1.2.0.tgz#848dd7698dafa3e54a6c479e759c4bc3f18847a0" + integrity sha512-+5nt5AAniqsCnu2cEQQdpzCAh33kVx8n0VoFidKpB1dVVLAN/F+bgVOqOJqOnEnrhp222clB5p3vUlD+1QAnfg== + +type@^2.7.2: + version "2.7.2" + resolved "https://registry.yarnpkg.com/type/-/type-2.7.2.tgz#2376a15a3a28b1efa0f5350dcf72d24df6ef98d0" + integrity sha512-dzlvlNlt6AXU7EBSfpAscydQ7gXB+pPGsPnfJnZpiNJBDj7IaJzQlBZYGdEi4R9HmPdBv2XmWJ6YUtoTa7lmCw== + +typed-emitter@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/typed-emitter/-/typed-emitter-2.1.0.tgz#ca78e3d8ef1476f228f548d62e04e3d4d3fd77fb" + integrity sha512-g/KzbYKbH5C2vPkaXGu8DJlHrGKHLsM25Zg9WuC9pMGfuvT+X25tZQWo5fK1BjBm8+UrVE9LDCvaY0CQk+fXDA== + optionalDependencies: + rxjs "^7.5.2" + +typescript@^4.8.3: + version "4.8.4" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.8.4.tgz#c464abca159669597be5f96b8943500b238e60e6" + integrity sha512-QCh+85mCy+h0IGff8r5XWzOVSbBO+KfeYrMQh7NJ58QujwcE22u+NUSmUxqF+un70P9GXKxa2HCNiTTMJknyjQ== + +uglify-js@^3.1.4: + version "3.17.2" + resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.17.2.tgz#f55f668b9a64b213977ae688703b6bbb7ca861c6" + integrity sha512-bbxglRjsGQMchfvXZNusUcYgiB9Hx2K4AHYXQy2DITZ9Rd+JzhX7+hoocE5Winr7z2oHvPsekkBwXtigvxevXg== + +universalify@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.2.0.tgz#6451760566fa857534745ab1dde952d1b1761be0" + integrity sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg== + +unix-crypt-td-js@1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/unix-crypt-td-js/-/unix-crypt-td-js-1.1.4.tgz#4912dfad1c8aeb7d20fa0a39e4c31918c1d5d5dd" + integrity sha512-8rMeVYWSIyccIJscb9NdCfZKSRBKYTeVnwmiRYT2ulE3qd1RaDQ0xQDP+rI3ccIWbhu/zuo5cgN8z73belNZgw== + +unpipe@1.0.0, unpipe@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" + integrity sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ== + +uri-js@^4.2.2: + version "4.4.1" + resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" + integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg== + dependencies: + punycode "^2.1.0" + +url-parse@^1.5.3: + version "1.5.10" + resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.5.10.tgz#9d3c2f736c1d75dd3bd2be507dcc111f1e2ea9c1" + integrity sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ== + dependencies: + querystringify "^2.1.1" + requires-port "^1.0.0" + +utils-merge@1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" + integrity sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA== + +uuid@^3.3.2: + version "3.4.0" + resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.4.0.tgz#b23e4358afa8a202fe7a100af1f5f883f02007ee" + integrity sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A== + +v8-compile-cache-lib@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz#6336e8d71965cb3d35a1bbb7868445a7c05264bf" + integrity sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg== + +validator@13.7.0: + version "13.7.0" + resolved "https://registry.yarnpkg.com/validator/-/validator-13.7.0.tgz#4f9658ba13ba8f3d82ee881d3516489ea85c0857" + integrity sha512-nYXQLCBkpJ8X6ltALua9dRrZDHVYxjJ1wgskNt1lH9fzGjs3tgojGSCBjmEPwkWS1y29+DrizMTW19Pr9uB2nw== + +vary@^1, vary@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc" + integrity sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg== + +verdaccio-audit@10.2.2: + version "10.2.2" + resolved "https://registry.yarnpkg.com/verdaccio-audit/-/verdaccio-audit-10.2.2.tgz#254380e57932fda64b45cb739e9c42cc9fb2dfdf" + integrity sha512-f2uZlKD7vi0yEB0wN8WOf+eA/3SCyKD9cvK17Hh7Wm8f/bl7k1B3hHOTtUCn/yu85DGsj2pcNzrAfp2wMVgz9Q== + dependencies: + body-parser "1.20.0" + express "4.18.1" + https-proxy-agent "5.0.1" + node-fetch "2.6.7" + +verdaccio-htpasswd@10.5.0: + version "10.5.0" + resolved "https://registry.yarnpkg.com/verdaccio-htpasswd/-/verdaccio-htpasswd-10.5.0.tgz#de9ea2967856af765178b08485dc8e83f544a12c" + integrity sha512-olBsT3uy1TT2ZqmMCJUsMHrztJzoEpa8pxxvYrDZdWnEksl6mHV10lTeLbH9BUwbEheOeKkkdsERqUOs+if0jg== + dependencies: + "@verdaccio/file-locking" "10.3.0" + apache-md5 "1.1.7" + bcryptjs "2.4.3" + http-errors "2.0.0" + unix-crypt-td-js "1.1.4" + +verdaccio@5: + version "5.15.3" + resolved "https://registry.yarnpkg.com/verdaccio/-/verdaccio-5.15.3.tgz#4953471c0130c8e88b3d5562b5c63b38b575ed3d" + integrity sha512-8oEtepXF1oksGVYahi2HS1Yx9u6HD/4ukBDNDfwISmlNp7HVKJL2+kjzmDJWam88BpDNxOBU/LFXWSsEAFKFCQ== + dependencies: + "@verdaccio/commons-api" "10.2.0" + "@verdaccio/local-storage" "10.3.1" + "@verdaccio/readme" "10.4.1" + "@verdaccio/streams" "10.2.0" + "@verdaccio/ui-theme" "6.0.0-6-next.28" + JSONStream "1.3.5" + async "3.2.4" + body-parser "1.20.0" + clipanion "3.1.0" + compression "1.7.4" + cookies "0.8.0" + cors "2.8.5" + dayjs "1.11.5" + debug "^4.3.3" + envinfo "7.8.1" + eslint-import-resolver-node "0.3.6" + express "4.18.1" + express-rate-limit "5.5.1" + fast-safe-stringify "2.1.1" + handlebars "4.7.7" + http-errors "2.0.0" + js-yaml "4.1.0" + jsonwebtoken "8.5.1" + kleur "4.1.5" + lodash "4.17.21" + lru-cache "7.14.0" + lunr-mutable-indexes "2.3.2" + marked "4.1.0" + memoizee "0.4.15" + mime "3.0.0" + minimatch "5.1.0" + mkdirp "1.0.4" + mv "2.1.1" + pino "6.14.0" + pkginfo "0.4.1" + prettier-bytes "^1.0.4" + pretty-ms "^7.0.1" + request "2.88.0" + semver "7.3.7" + validator "13.7.0" + verdaccio-audit "10.2.2" + verdaccio-htpasswd "10.5.0" + +verror@1.10.0: + version "1.10.0" + resolved "https://registry.yarnpkg.com/verror/-/verror-1.10.0.tgz#3a105ca17053af55d6e270c1f8288682e18da400" + integrity sha512-ZZKSmDAEFOijERBLkmYfJ+vmk3w+7hOLYDNkRCuRuMJGEmqYNCNLyBBFwWKVMhfwaEF3WOd0Zlw86U/WC/+nYw== + dependencies: + assert-plus "^1.0.0" + core-util-is "1.0.2" + extsprintf "^1.2.0" + +w3c-hr-time@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/w3c-hr-time/-/w3c-hr-time-1.0.2.tgz#0a89cdf5cc15822df9c360543676963e0cc308cd" + integrity sha512-z8P5DvDNjKDoFIHK7q8r8lackT6l+jo/Ye3HOle7l9nICP9lf1Ci25fy9vHd0JOWewkIFzXIEig3TdKT7JQ5fQ== + dependencies: + browser-process-hrtime "^1.0.0" + +w3c-xmlserializer@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/w3c-xmlserializer/-/w3c-xmlserializer-2.0.0.tgz#3e7104a05b75146cc60f564380b7f683acf1020a" + integrity sha512-4tzD0mF8iSiMiNs30BiLO3EpfGLZUT2MSX/G+o7ZywDzliWQ3OPtTZ0PTC3B3ca1UAf4cJMHB+2Bf56EriJuRA== + dependencies: + xml-name-validator "^3.0.0" + +webidl-conversions@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871" + integrity sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ== + +webidl-conversions@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-5.0.0.tgz#ae59c8a00b121543a2acc65c0434f57b0fc11aff" + integrity sha512-VlZwKPCkYKxQgeSbH5EyngOmRp7Ww7I9rQLERETtf5ofd9pGeswWiOtogpEO850jziPRarreGxn5QIiTqpb2wA== + +webidl-conversions@^6.1.0: + version "6.1.0" + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-6.1.0.tgz#9111b4d7ea80acd40f5270d666621afa78b69514" + integrity sha512-qBIvFLGiBpLjfwmYAaHPXsn+ho5xZnGvyGvsarywGNc8VyQJUMHJ8OBKGGrPER0okBeMDaan4mNBlgBROxuI8w== + +whatwg-encoding@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/whatwg-encoding/-/whatwg-encoding-1.0.5.tgz#5abacf777c32166a51d085d6b4f3e7d27113ddb0" + integrity sha512-b5lim54JOPN9HtzvK9HFXvBma/rnfFeqsic0hSpjtDbVxR3dJKLc+KB4V6GgiGOvl7CY/KNh8rxSo9DKQrnUEw== + dependencies: + iconv-lite "0.4.24" + +whatwg-mimetype@^2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/whatwg-mimetype/-/whatwg-mimetype-2.3.0.tgz#3d4b1e0312d2079879f826aff18dbeeca5960fbf" + integrity sha512-M4yMwr6mAnQz76TbJm914+gPpB/nCwvZbJU28cUD6dR004SAxDLOOSUaB1JDRqLtaOV/vi0IC5lEAGFgrjGv/g== + +whatwg-url@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-5.0.0.tgz#966454e8765462e37644d3626f6742ce8b70965d" + integrity sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw== + dependencies: + tr46 "~0.0.3" + webidl-conversions "^3.0.0" + +whatwg-url@^8.0.0, whatwg-url@^8.5.0: + version "8.7.0" + resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-8.7.0.tgz#656a78e510ff8f3937bc0bcbe9f5c0ac35941b77" + integrity sha512-gAojqb/m9Q8a5IV96E3fHJM70AzCkgt4uXYX2O7EmuyOnLrViCQlsEBmF9UQIu3/aeAIp2U17rtbpZWNntQqdg== + dependencies: + lodash "^4.7.0" + tr46 "^2.1.0" + webidl-conversions "^6.1.0" + +word-wrap@~1.2.3: + version "1.2.3" + resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c" + integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ== + +wordwrap@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb" + integrity sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q== + +wrappy@1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" + integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ== + +ws@^7.4.6: + version "7.5.9" + resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.9.tgz#54fa7db29f4c7cec68b1ddd3a89de099942bb591" + integrity sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q== + +xml-name-validator@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/xml-name-validator/-/xml-name-validator-3.0.0.tgz#6ae73e06de4d8c6e47f9fb181f78d648ad457c6a" + integrity sha512-A5CUptxDsvxKJEU3yO6DuWBSJz/qizqzJKOMIfUJHETbBw/sFaDxgd6fxm1ewUaM0jZ444Fc5vC5ROYurg/4Pw== + +xmlchars@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/xmlchars/-/xmlchars-2.2.0.tgz#060fe1bcb7f9c76fe2a17db86a9bc3ab894210cb" + integrity sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw== + +yallist@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" + integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== + +yn@3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50" + integrity sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q== diff --git a/scripts/ci/js_tests b/scripts/ci/js_tests index b203dea4..3813de7a 100755 --- a/scripts/ci/js_tests +++ b/scripts/ci/js_tests @@ -3,18 +3,11 @@ set -e THIS_SCRIPT=$(dirname "$0"); WASM_PROJECT=$THIS_SCRIPT/../../automerge-wasm; JS_PROJECT=$THIS_SCRIPT/../../automerge-js; +E2E_PROJECT=$THIS_SCRIPT/../../automerge-js/e2e; -yarn --cwd $WASM_PROJECT install; -# This will take care of running wasm-pack -yarn --cwd $WASM_PROJECT build; -# If the dependencies are already installed we delete automerge-wasm. This makes -# this script usable for iterative development. -if [ -d $JS_PROJECT/node_modules/automerge-wasm ]; then - rm -rf $JS_PROJECT/node_modules/automerge-wasm -fi -# --check-files forces yarn to check if the local dep has changed -yarn --cwd $JS_PROJECT install --check-files; -yarn --cwd $JS_PROJECT test; - - - +yarn --cwd $E2E_PROJECT install; +# This will build the automerge-wasm project, publish it to a local NPM +# repository, then run `yarn build` in the `automerge-js` directory with +# the local registry +yarn --cwd $E2E_PROJECT e2e buildjs; +yarn --cwd $JS_PROJECT test From 20dc0fb54e10f9b03d26838646d958bba6d9c225 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Wed, 21 Sep 2022 22:56:38 +0100 Subject: [PATCH 575/730] Set optimization levels to 'Z' for release profile This reduces the size of the WASM bundle which is generated to around 800kb. Unfortunately wasm-pack doesn't allow us to use arbitrary profiles when building and the optimization level has to be set at the workspace root - consequently this flag is set for all packages in the workspace. This shouldn't be an issue really as all our dependents in the Rust world will be setting their own optimization flags anyway. --- Cargo.toml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index 9add8e60..fbd416fc 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -11,7 +11,11 @@ resolver = "2" [profile.release] debug = true lto = true -opt-level = 3 +opt-level = 'z' [profile.bench] debug = true + +[profile.release.package.automerge-wasm] +debug = false +opt-level = 'z' From 577bda3e7f22d5ce298383217b7233d70a15db9e Mon Sep 17 00:00:00 2001 From: Alex Good Date: Mon, 26 Sep 2022 17:39:16 -0500 Subject: [PATCH 576/730] update wasm-bindgen --- automerge-wasm/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automerge-wasm/Cargo.toml b/automerge-wasm/Cargo.toml index 38fe3dab..74d050ed 100644 --- a/automerge-wasm/Cargo.toml +++ b/automerge-wasm/Cargo.toml @@ -35,7 +35,7 @@ hex = "^0.4.3" regex = "^1.5" [dependencies.wasm-bindgen] -version = "^0.2" +version = "^0.2.83" #features = ["std"] features = ["serde-serialize", "std"] From da51492327f34d613af25eacbf9bc15d66702028 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Mon, 3 Oct 2022 22:26:12 +0100 Subject: [PATCH 577/730] build both nodejs and bundler packages in `yarn build` --- automerge-wasm/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automerge-wasm/package.json b/automerge-wasm/package.json index 36e03e09..985b7a07 100644 --- a/automerge-wasm/package.json +++ b/automerge-wasm/package.json @@ -26,8 +26,8 @@ "main": "./nodejs/bindgen.js", "scripts": { "lint": "eslint test/*.ts", - "build": "cross-env PROFILE=dev TARGET=nodejs FEATURES='' yarn target", "debug": "cross-env PROFILE=dev yarn buildall", + "build": "cross-env PROFILE=dev FEATURES='' yarn buildall", "release": "cross-env PROFILE=release yarn buildall", "buildall": "cross-env TARGET=nodejs yarn target && cross-env TARGET=bundler yarn target", "target": "rimraf ./$TARGET && wasm-pack build --target $TARGET --$PROFILE --out-name bindgen -d $TARGET -- $FEATURES", From 16f2272b5b420efecf8ef632fa08fb5e085dc723 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Tue, 4 Oct 2022 14:45:02 +0100 Subject: [PATCH 578/730] Generate index.d.ts from source The JS package is now written in typescript so we don't need to manually maintain an index.d.ts file. Generate the index.d.ts file from source and ship it with the JS package. --- automerge-js/index.d.ts | 113 ------------------------------------- automerge-js/package.json | 2 + automerge-js/tsconfig.json | 2 +- 3 files changed, 3 insertions(+), 114 deletions(-) delete mode 100644 automerge-js/index.d.ts diff --git a/automerge-js/index.d.ts b/automerge-js/index.d.ts deleted file mode 100644 index a18505c2..00000000 --- a/automerge-js/index.d.ts +++ /dev/null @@ -1,113 +0,0 @@ -import { API as LowLevelApi } from "automerge-types"; -import { Actor as ActorId, Prop, ObjID, Change, DecodedChange, Heads, MaterializeValue } from "automerge-types"; -import { JsSyncState as SyncState, SyncMessage, DecodedSyncMessage } from "automerge-types"; - -export { API as LowLevelApi } from "automerge-types"; -export { Actor as ActorId, Prop, ObjID, Change, DecodedChange, Heads, Automerge, MaterializeValue } from "automerge-types"; -export { JsSyncState as SyncState, SyncMessage, DecodedSyncMessage } from "automerge-types"; - -export type ChangeOptions = { - message?: string; - time?: number; -}; - -export class Int { - value: number; - constructor(value: number); -} - -export class Uint { - value: number; - constructor(value: number); -} - -export class Float64 { - value: number; - constructor(value: number); -} - -export class Counter { - value: number; - constructor(value?: number); - valueOf(): number; - toString(): string; - toJSON(): number; -} - -export class Text { - elems: AutomergeValue[]; - constructor(text?: string | string[]); - get length(): number; - get(index: number): AutomergeValue | undefined; - [index: number]: AutomergeValue | undefined; - [Symbol.iterator](): { - next(): { - done: boolean; - value: AutomergeValue; - } | { - done: boolean; - value?: undefined; - }; - }; - toString(): string; - toSpans(): AutomergeValue[]; - toJSON(): string; - set(index: number, value: AutomergeValue): void; - insertAt(index: number, ...values: AutomergeValue[]): void; - deleteAt(index: number, numDelete?: number): void; - map(callback: (e: AutomergeValue) => T): void; -} - -export type Doc = { - readonly [P in keyof T]: T[P]; -}; - -export type ChangeFn = (doc: T) => void; - -export interface State { - change: DecodedChange; - snapshot: T; -} - -export type ScalarValue = string | number | null | boolean | Date | Counter | Uint8Array; - -export type AutomergeValue = ScalarValue | {[key: string]: AutomergeValue;} | Array; - -type Conflicts = { - [key: string]: AutomergeValue; -}; - -export function use(api: LowLevelApi): void; -export function getBackend(doc: Doc) : Automerge; -export function init(actor?: ActorId): Doc; -export function clone(doc: Doc): Doc; -export function free(doc: Doc): void; -export function from(initialState: T | Doc, actor?: ActorId): Doc; -export function change(doc: Doc, options: string | ChangeOptions | ChangeFn, callback?: ChangeFn): Doc; -export function emptyChange(doc: Doc, options: ChangeOptions): unknown; -export function load(data: Uint8Array, actor?: ActorId): Doc; -export function save(doc: Doc): Uint8Array; -export function merge(local: Doc, remote: Doc): Doc; -export function getActorId(doc: Doc): ActorId; -export function getConflicts(doc: Doc, prop: Prop): Conflicts | undefined; -export function getLastLocalChange(doc: Doc): Change | undefined; -export function getObjectId(doc: Doc): ObjID; -export function getChanges(oldState: Doc, newState: Doc): Change[]; -export function getAllChanges(doc: Doc): Change[]; -export function applyChanges(doc: Doc, changes: Change[]): [Doc]; -export function getHistory(doc: Doc): State[]; -export function equals(val1: Doc, val2: Doc): boolean; -export function encodeSyncState(state: SyncState): Uint8Array; -export function decodeSyncState(state: Uint8Array): SyncState; -export function generateSyncMessage(doc: Doc, inState: SyncState): [SyncState, SyncMessage | null]; -export function receiveSyncMessage(doc: Doc, inState: SyncState, message: SyncMessage): [Doc, SyncState, null]; -export function initSyncState(): SyncState; -export function encodeChange(change: DecodedChange): Change; -export function decodeChange(data: Change): DecodedChange; -export function encodeSyncMessage(message: DecodedSyncMessage): SyncMessage; -export function decodeSyncMessage(message: SyncMessage): DecodedSyncMessage; -export function getMissingDeps(doc: Doc, heads: Heads): Heads; -export function getHeads(doc: Doc): Heads; -export function dump(doc: Doc): void; -export function toJS(doc: Doc): MaterializeValue; -export function uuid(): string; diff --git a/automerge-js/package.json b/automerge-js/package.json index 96e8e534..567db247 100644 --- a/automerge-js/package.json +++ b/automerge-js/package.json @@ -13,6 +13,7 @@ "LICENSE", "package.json", "index.d.ts", + "dist/cjs/*.d.ts", "dist/cjs/constants.js", "dist/cjs/types.js", "dist/cjs/numbers.js", @@ -22,6 +23,7 @@ "dist/cjs/low_level.js", "dist/cjs/text.js", "dist/cjs/proxies.js", + "dist/mjs/*.d.ts", "dist/mjs/constants.js", "dist/mjs/types.js", "dist/mjs/numbers.js", diff --git a/automerge-js/tsconfig.json b/automerge-js/tsconfig.json index 01500ed5..80dd7c76 100644 --- a/automerge-js/tsconfig.json +++ b/automerge-js/tsconfig.json @@ -2,7 +2,7 @@ "compilerOptions": { "target": "es2016", "sourceMap": false, - "declaration": false, + "declaration": true, "resolveJsonModule": true, "module": "commonjs", "moduleResolution": "node", From b6c375efb95f20ecc0a289725442b8f72f99b0fc Mon Sep 17 00:00:00 2001 From: Alex Good Date: Tue, 4 Oct 2022 14:47:49 +0100 Subject: [PATCH 579/730] Fix a few small typescript complaints --- automerge-js/src/index.ts | 2 +- automerge-js/src/proxies.ts | 3 +-- automerge-js/test/basic_test.ts | 2 -- 3 files changed, 2 insertions(+), 5 deletions(-) diff --git a/automerge-js/src/index.ts b/automerge-js/src/index.ts index 4239b65a..e1b21301 100644 --- a/automerge-js/src/index.ts +++ b/automerge-js/src/index.ts @@ -91,7 +91,7 @@ export function free(doc: Doc) { return _state(doc).free() } -export function from(initialState: T | Doc, actor?: ActorId): Doc { +export function from>(initialState: T | Doc, actor?: ActorId): Doc { return change(init(actor), (d) => Object.assign(d, initialState)) } diff --git a/automerge-js/src/proxies.ts b/automerge-js/src/proxies.ts index dc8d6f00..2c97b720 100644 --- a/automerge-js/src/proxies.ts +++ b/automerge-js/src/proxies.ts @@ -2,7 +2,6 @@ import { Automerge, Heads, ObjID } from "automerge-wasm" import { Prop } from "automerge-wasm" import { AutomergeValue, ScalarValue, MapValue, ListValue, TextValue } from "./types" -import { Int, Uint, Float64 } from "./numbers" import { Counter, getWriteableCounter } from "./counter" import { Text } from "./text" import { STATE, HEADS, TRACE, FROZEN, OBJECT_ID, READ_ONLY, COUNTER, INT, UINT, F64, TEXT } from "./constants" @@ -200,7 +199,7 @@ const MapHandler = { ownKeys (target) { const { context, objectId, heads} = target // FIXME - this is a tmp workaround until fix the dupe key bug in keys() - let keys = context.keys(objectId, heads) + const keys = context.keys(objectId, heads) return [...new Set(keys)] }, } diff --git a/automerge-js/test/basic_test.ts b/automerge-js/test/basic_test.ts index 6f819ca9..fdc8797b 100644 --- a/automerge-js/test/basic_test.ts +++ b/automerge-js/test/basic_test.ts @@ -1,6 +1,4 @@ -import * as tt from "automerge-types" import * as assert from 'assert' -import * as util from 'util' import * as Automerge from '../src' describe('Automerge', () => { From d6a8d41e0a53c015d14fdc9e121e521f23157370 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Tue, 4 Oct 2022 15:09:46 +0100 Subject: [PATCH 580/730] Update JS README --- automerge-js/README.md | 31 ++++++++++++------------------- 1 file changed, 12 insertions(+), 19 deletions(-) diff --git a/automerge-js/README.md b/automerge-js/README.md index 707c51bb..4981e7be 100644 --- a/automerge-js/README.md +++ b/automerge-js/README.md @@ -1,25 +1,18 @@ -## Automerge JS +## Automerge -This is a reimplementation of Automerge as a JavaScript wrapper around the "automerge-wasm". +Automerge is a library of data structures for building collaborative +applications, this package is the javascript implementation. -This package is in alpha and feedback in welcome. +Please see [automerge.org](http://automerge.org/) for documentation. -The primary differences between using this package and "automerge" are as follows: +## Setup -1. The low level api needs to plugged in via the use function. The only current implementation of "automerge-wasm" but another could used in theory. +This package is a wrapper around a core library which is written in rust and +compiled to WASM. In `node` this should be transparent to you, but in the +browser you will need a bundler to include the WASM blob as part of your module +hierarchy. There are examples of doing this with common bundlers in `./examples`. -```javascript -import * as Automerge from "automerge-js"; -import * as wasm_api from "automerge-wasm"; +## Meta -// browsers require an async wasm load - see automerge-wasm docs -Automerge.use(wasm_api); -``` - -2. There is no front-end back-end split, and no patch format or patch observer. These concepts don't make sense with the wasm implementation. - -3. The basic `Doc` object is now a Proxy object and will behave differently in a repl environment. - -4. The 'Text' class is currently very slow and needs to be re-worked. - -Beyond this please refer to the Automerge [README](http://github.com/automerge/automerge/) for further information. +Copyright 2017–2021, the Automerge contributors. Released under the terms of the +MIT license (see `LICENSE`). From 29f2c9945e899de4bfa3dd474832a53e3900cada Mon Sep 17 00:00:00 2001 From: Alex Good Date: Wed, 7 Sep 2022 16:38:08 +0100 Subject: [PATCH 581/730] query::Prop: don't scan past end of OpTree The logic in `query::Prop` works by first doing a binary search in the OpTree for the node where the key we are looking for starts, and then proceeding from this point forwards skipping over nodes which contain only invisible ops. This logic was incorrect if the start index returned by the binary search was in the last child of the optree and the last child only contains invisible ops. In this case the index returned by the query would be greater than the length of the optree. Clamp the index returned by the query to the total length of the opset. --- automerge/src/query/prop.rs | 30 +++++++++++++++++++++++++----- automerge/tests/test.rs | 10 ++++++++++ 2 files changed, 35 insertions(+), 5 deletions(-) diff --git a/automerge/src/query/prop.rs b/automerge/src/query/prop.rs index 105b268f..8b59d698 100644 --- a/automerge/src/query/prop.rs +++ b/automerge/src/query/prop.rs @@ -9,7 +9,15 @@ pub(crate) struct Prop<'a> { pub(crate) ops: Vec<&'a Op>, pub(crate) ops_pos: Vec, pub(crate) pos: usize, - start: Option, + start: Option, +} + +#[derive(Debug, Clone, PartialEq)] +struct Start { + /// The index to start searching for in the optree + idx: usize, + /// The total length of the optree + optree_len: usize, } impl<'a> Prop<'a> { @@ -30,12 +38,21 @@ impl<'a> TreeQuery<'a> for Prop<'a> { child: &'a OpTreeNode, m: &OpSetMetadata, ) -> QueryResult { - if let Some(start) = self.start { + if let Some(Start { + idx: start, + optree_len, + }) = self.start + { if self.pos + child.len() >= start { // skip empty nodes if child.index.visible_len() == 0 { - self.pos += child.len(); - QueryResult::Next + if self.pos + child.len() >= optree_len { + self.pos = optree_len; + QueryResult::Finish + } else { + self.pos += child.len(); + QueryResult::Next + } } else { QueryResult::Descend } @@ -46,7 +63,10 @@ impl<'a> TreeQuery<'a> for Prop<'a> { } else { // in the root node find the first op position for the key let start = binary_search_by(child, |op| m.key_cmp(&op.key, &self.key)); - self.start = Some(start); + self.start = Some(Start { + idx: start, + optree_len: child.len(), + }); self.pos = start; QueryResult::Skip(start) } diff --git a/automerge/tests/test.rs b/automerge/tests/test.rs index 203ec772..938f4343 100644 --- a/automerge/tests/test.rs +++ b/automerge/tests/test.rs @@ -1349,6 +1349,16 @@ fn load_doc_with_deleted_objects() { Automerge::load(&saved).unwrap(); } +#[test] +fn insert_after_many_deletes() { + let mut doc = AutoCommit::new(); + let obj = doc.put_object(&ROOT, "object", ObjType::Map).unwrap(); + for i in 0..100 { + doc.put(&obj, i.to_string(), i).unwrap(); + doc.delete(&obj, i.to_string()).unwrap(); + } +} + #[test] fn simple_bad_saveload() { let mut doc = Automerge::new(); From 74af5378000454f3b737caaf34cc0e15ccf1d632 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Tue, 4 Oct 2022 22:05:56 +0100 Subject: [PATCH 582/730] Rename automerge and automerge-wasm packages In an attempt to make our package naming more understandable we move all our packages to a single NPM scope. `automerge` -> `@automerge/automerge` and `automerge-wasm` -> @automerge/automerge-wasm` --- automerge-js/e2e/index.ts | 4 +-- automerge-js/e2e/verdaccio.yaml | 4 +-- .../examples/create-react-app/package.json | 2 +- .../examples/create-react-app/src/App.js | 2 +- .../examples/create-react-app/yarn.lock | 32 +++++++++---------- automerge-js/examples/vite/package.json | 2 +- automerge-js/examples/vite/src/main.ts | 2 +- automerge-js/examples/vite/vite.config.js | 2 +- automerge-js/examples/webpack/package.json | 2 +- automerge-js/examples/webpack/src/index.js | 2 +- automerge-js/package.json | 4 +-- automerge-js/src/counter.ts | 2 +- automerge-js/src/index.ts | 8 ++--- automerge-js/src/low_level.ts | 4 +-- automerge-js/src/proxies.ts | 4 +-- automerge-js/src/text.ts | 2 +- automerge-wasm/package.json | 3 +- 17 files changed, 41 insertions(+), 40 deletions(-) diff --git a/automerge-js/e2e/index.ts b/automerge-js/e2e/index.ts index 90205071..c11e518d 100644 --- a/automerge-js/e2e/index.ts +++ b/automerge-js/e2e/index.ts @@ -216,7 +216,7 @@ function buildAutomergeWasm(profile: Profile): WithRegistryAction { async function publishAutomergeWasm(registryUrl: string) { printHeader("Publishing automerge-wasm to verdaccio") - await fsPromises.rm(path.join(VERDACCIO_DB_PATH, "automerge-wasm"), { recursive: true, force: true} ) + await fsPromises.rm(path.join(VERDACCIO_DB_PATH, "@automerge/automerge-wasm"), { recursive: true, force: true} ) await yarnPublish(registryUrl, AUTOMERGE_WASM_PATH) } @@ -224,7 +224,7 @@ async function buildAndPublishAutomergeJs(registryUrl: string) { // Build the js package printHeader("Building automerge") await removeExistingAutomerge(AUTOMERGE_JS_PATH) - await removeFromVerdaccio("automerge") + await removeFromVerdaccio("@automerge/automerge") await fsPromises.rm(path.join(AUTOMERGE_JS_PATH, "yarn.lock"), {force: true}) await spawnAndWait("yarn", ["--cwd", AUTOMERGE_JS_PATH, "install", "--registry", registryUrl, "--check-files"], {stdio: "inherit"}) await spawnAndWait("yarn", ["--cwd", AUTOMERGE_JS_PATH, "build"], {stdio: "inherit"}) diff --git a/automerge-js/e2e/verdaccio.yaml b/automerge-js/e2e/verdaccio.yaml index bb2e2e87..45920a16 100644 --- a/automerge-js/e2e/verdaccio.yaml +++ b/automerge-js/e2e/verdaccio.yaml @@ -6,10 +6,10 @@ publish: allow_offline: true logs: {type: stdout, format: pretty, level: info} packages: - "automerge-wasm": + "@automerge/automerge-wasm": access: "$all" publish: "$all" - "automerge-js": + "@automerge/automerge": access: "$all" publish: "$all" "*": diff --git a/automerge-js/examples/create-react-app/package.json b/automerge-js/examples/create-react-app/package.json index d11491c5..6d14c84e 100644 --- a/automerge-js/examples/create-react-app/package.json +++ b/automerge-js/examples/create-react-app/package.json @@ -8,7 +8,7 @@ "@testing-library/jest-dom": "^5.16.5", "@testing-library/react": "^13.4.0", "@testing-library/user-event": "^13.5.0", - "automerge": "2.0.0-alpha.1", + "@automerge/automerge": "2.0.0-alpha.1", "react": "^18.2.0", "react-dom": "^18.2.0", "react-scripts": "5.0.1", diff --git a/automerge-js/examples/create-react-app/src/App.js b/automerge-js/examples/create-react-app/src/App.js index cebfc345..d065911b 100644 --- a/automerge-js/examples/create-react-app/src/App.js +++ b/automerge-js/examples/create-react-app/src/App.js @@ -1,4 +1,4 @@ -import * as Automerge from "automerge" +import * as Automerge from "@automerge/automerge" import logo from './logo.svg'; import './App.css'; diff --git a/automerge-js/examples/create-react-app/yarn.lock b/automerge-js/examples/create-react-app/yarn.lock index 79d61777..fe6a1189 100644 --- a/automerge-js/examples/create-react-app/yarn.lock +++ b/automerge-js/examples/create-react-app/yarn.lock @@ -24,6 +24,19 @@ jsonpointer "^5.0.0" leven "^3.1.0" +"@automerge/automerge-wasm@0.1.7": + version "0.1.7" + resolved "http://localhost:4873/@automerge%2fautomerge-wasm/-/automerge-wasm-0.1.7.tgz#2b1bd55a05def29beec76828664ae1def1276e11" + integrity sha512-MIUUxqx9QM14DR8OzzS4sCC3cNIgzH2LMvTesFTO8NoH8RV/hm4jrQHQbGfx2SV3Q6tZjy8bCLOLgJK/yIxbKQ== + +"@automerge/automerge@2.0.0-alpha.1": + version "2.0.0-alpha.1" + resolved "http://localhost:4873/@automerge%2fautomerge/-/automerge-2.0.0-alpha.1.tgz#df52164448ab13e458bd5a8e32e47f6ddbdd56fc" + integrity sha512-9q5CHqKEmTKs5T7/UdVaugk+rz3mAuxphpfgKXPGgEvvOIZsHz4spkxSNahWscY9pF8EhLgcA/pCfdtd3b2goA== + dependencies: + "@automerge/automerge-wasm" "0.1.7" + uuid "^8.3" + "@babel/code-frame@^7.0.0", "@babel/code-frame@^7.10.4", "@babel/code-frame@^7.12.13", "@babel/code-frame@^7.16.0", "@babel/code-frame@^7.18.6", "@babel/code-frame@^7.8.3": version "7.18.6" resolved "http://localhost:4873/@babel%2fcode-frame/-/code-frame-7.18.6.tgz#3b25d38c89600baa2dcc219edfa88a74eb2c427a" @@ -2627,19 +2640,6 @@ at-least-node@^1.0.0: resolved "http://localhost:4873/at-least-node/-/at-least-node-1.0.0.tgz#602cd4b46e844ad4effc92a8011a3c46e0238dc2" integrity sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg== -automerge-wasm@0.1.7: - version "0.1.7" - resolved "http://localhost:4873/automerge-wasm/-/automerge-wasm-0.1.7.tgz#b5c02d6d00521d5ecb956226a187d668e7530c8f" - integrity sha512-BJ0/W1i7fCMTEWZ25DS31AL2vgZ3Yv5LrBibU0gG0pg6oj62T4iiXm/4bYXHykkry1+mTJIoNGeOwCwEpvhFAw== - -automerge@2.0.0-alpha.1: - version "2.0.0-alpha.1" - resolved "http://localhost:4873/automerge/-/automerge-2.0.0-alpha.1.tgz#554d0246116121609f97297f9f7d9048eb0447fa" - integrity sha512-EZ6A52btI2LLrgRk8BYwcrOikaKyPYq4LkdmBeV0ec/8XNW6QhPLtwb+NXP6ZM2ynHND3zFR8pDzbPeP+POeKA== - dependencies: - automerge-wasm "0.1.7" - uuid "^8.3" - autoprefixer@^10.4.11, autoprefixer@^10.4.12: version "10.4.12" resolved "http://localhost:4873/autoprefixer/-/autoprefixer-10.4.12.tgz#183f30bf0b0722af54ee5ef257f7d4320bb33129" @@ -7787,9 +7787,9 @@ semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.3.0: integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== semver@^7.3.2, semver@^7.3.5, semver@^7.3.7: - version "7.3.7" - resolved "http://localhost:4873/semver/-/semver-7.3.7.tgz#12c5b649afdbf9049707796e22a4028814ce523f" - integrity sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g== + version "7.3.8" + resolved "http://localhost:4873/semver/-/semver-7.3.8.tgz#07a78feafb3f7b32347d725e33de7e2a2df67798" + integrity sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A== dependencies: lru-cache "^6.0.0" diff --git a/automerge-js/examples/vite/package.json b/automerge-js/examples/vite/package.json index d4a09e54..01abe125 100644 --- a/automerge-js/examples/vite/package.json +++ b/automerge-js/examples/vite/package.json @@ -9,7 +9,7 @@ "preview": "vite preview" }, "dependencies": { - "automerge": "2.0.0-alpha.1" + "@automerge/automerge": "2.0.0-alpha.1" }, "devDependencies": { "typescript": "^4.6.4", diff --git a/automerge-js/examples/vite/src/main.ts b/automerge-js/examples/vite/src/main.ts index c94cbfd7..69378eca 100644 --- a/automerge-js/examples/vite/src/main.ts +++ b/automerge-js/examples/vite/src/main.ts @@ -1,4 +1,4 @@ -import * as Automerge from "automerge" +import * as Automerge from "@automerge/automerge" // hello world code that will run correctly on web or node diff --git a/automerge-js/examples/vite/vite.config.js b/automerge-js/examples/vite/vite.config.js index c048f0b5..2076b3ff 100644 --- a/automerge-js/examples/vite/vite.config.js +++ b/automerge-js/examples/vite/vite.config.js @@ -10,6 +10,6 @@ export default defineConfig({ // versions of the JS wrapper. This causes problems because the JS // wrapper has a module level variable to track JS side heap // allocations, initializing this twice causes horrible breakage - exclude: ["automerge-wasm"] + exclude: ["@automerge/automerge-wasm"] } }) diff --git a/automerge-js/examples/webpack/package.json b/automerge-js/examples/webpack/package.json index 02a9efd8..25590c56 100644 --- a/automerge-js/examples/webpack/package.json +++ b/automerge-js/examples/webpack/package.json @@ -10,7 +10,7 @@ }, "author": "", "dependencies": { - "automerge": "2.0.0-alpha.1" + "@automerge/automerge": "2.0.0-alpha.1" }, "devDependencies": { "serve": "^13.0.2", diff --git a/automerge-js/examples/webpack/src/index.js b/automerge-js/examples/webpack/src/index.js index 5564f442..4503532c 100644 --- a/automerge-js/examples/webpack/src/index.js +++ b/automerge-js/examples/webpack/src/index.js @@ -1,4 +1,4 @@ -import * as Automerge from "automerge" +import * as Automerge from "@automerge/automerge" // hello world code that will run correctly on web or node diff --git a/automerge-js/package.json b/automerge-js/package.json index 567db247..a6f81d08 100644 --- a/automerge-js/package.json +++ b/automerge-js/package.json @@ -1,5 +1,5 @@ { - "name": "automerge", + "name": "@automerge/automerge", "collaborators": [ "Orion Henry ", "Martin Kleppmann" @@ -57,7 +57,7 @@ "typescript": "^4.6.4" }, "dependencies": { - "automerge-wasm": "0.1.7", + "@automerge/automerge-wasm": "0.1.7", "uuid": "^8.3" } } diff --git a/automerge-js/src/counter.ts b/automerge-js/src/counter.ts index bd096441..c20d7fcf 100644 --- a/automerge-js/src/counter.ts +++ b/automerge-js/src/counter.ts @@ -1,4 +1,4 @@ -import { Automerge, ObjID, Prop } from "automerge-wasm" +import { Automerge, ObjID, Prop } from "@automerge/automerge-wasm" import { COUNTER } from "./constants" /** * The most basic CRDT: an integer value that can be changed only by diff --git a/automerge-js/src/index.ts b/automerge-js/src/index.ts index e1b21301..eb303ef9 100644 --- a/automerge-js/src/index.ts +++ b/automerge-js/src/index.ts @@ -7,11 +7,11 @@ import { STATE, HEADS, TRACE, OBJECT_ID, READ_ONLY, FROZEN } from "./constants" import { AutomergeValue, Counter } from "./types" export { AutomergeValue, Text, Counter, Int, Uint, Float64 } from "./types" -import { API } from "automerge-wasm"; +import { API } from "@automerge/automerge-wasm"; import { ApiHandler, UseApi } from "./low_level" -import { Actor as ActorId, Prop, ObjID, Change, DecodedChange, Heads, Automerge, MaterializeValue } from "automerge-wasm" -import { JsSyncState as SyncState, SyncMessage, DecodedSyncMessage } from "automerge-wasm" +import { Actor as ActorId, Prop, ObjID, Change, DecodedChange, Heads, Automerge, MaterializeValue } from "@automerge/automerge-wasm" +import { JsSyncState as SyncState, SyncMessage, DecodedSyncMessage } from "@automerge/automerge-wasm" export type ChangeOptions = { message?: string, time?: number } @@ -29,7 +29,7 @@ export function use(api: API) { UseApi(api) } -import * as wasm from "automerge-wasm" +import * as wasm from "@automerge/automerge-wasm" use(wasm) export function getBackend(doc: Doc) : Automerge { diff --git a/automerge-js/src/low_level.ts b/automerge-js/src/low_level.ts index 44b310bb..9a5480b3 100644 --- a/automerge-js/src/low_level.ts +++ b/automerge-js/src/low_level.ts @@ -1,6 +1,6 @@ -import { Automerge, Change, DecodedChange, Actor, SyncState, SyncMessage, JsSyncState, DecodedSyncMessage } from "automerge-wasm" -import { API } from "automerge-wasm" +import { Automerge, Change, DecodedChange, Actor, SyncState, SyncMessage, JsSyncState, DecodedSyncMessage } from "@automerge/automerge-wasm" +import { API } from "@automerge/automerge-wasm" export function UseApi(api: API) { for (const k in api) { diff --git a/automerge-js/src/proxies.ts b/automerge-js/src/proxies.ts index 2c97b720..a03c97cc 100644 --- a/automerge-js/src/proxies.ts +++ b/automerge-js/src/proxies.ts @@ -1,6 +1,6 @@ -import { Automerge, Heads, ObjID } from "automerge-wasm" -import { Prop } from "automerge-wasm" +import { Automerge, Heads, ObjID } from "@automerge/automerge-wasm" +import { Prop } from "@automerge/automerge-wasm" import { AutomergeValue, ScalarValue, MapValue, ListValue, TextValue } from "./types" import { Counter, getWriteableCounter } from "./counter" import { Text } from "./text" diff --git a/automerge-js/src/text.ts b/automerge-js/src/text.ts index f2aecabb..9566d5eb 100644 --- a/automerge-js/src/text.ts +++ b/automerge-js/src/text.ts @@ -1,4 +1,4 @@ -import { Value } from "automerge-wasm" +import { Value } from "@automerge/automerge-wasm" import { TEXT } from "./constants" export class Text { diff --git a/automerge-wasm/package.json b/automerge-wasm/package.json index 985b7a07..12842790 100644 --- a/automerge-wasm/package.json +++ b/automerge-wasm/package.json @@ -4,7 +4,7 @@ "Alex Good ", "Martin Kleppmann" ], - "name": "automerge-wasm", + "name": "@automerge/automerge-wasm", "description": "wasm-bindgen bindings to the automerge rust implementation", "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-wasm", "repository": "github:automerge/automerge-rs", @@ -21,6 +21,7 @@ "bundler/bindgen_bg.js", "bundler/bindgen_bg.wasm" ], + "private": false, "types": "index.d.ts", "module": "./bundler/bindgen.js", "main": "./nodejs/bindgen.js", From fb4d1f4361f44c25af44f74bd45a0d2e5c21f03c Mon Sep 17 00:00:00 2001 From: Alex Good Date: Tue, 4 Oct 2022 22:54:19 +0100 Subject: [PATCH 583/730] Ship generated typescript types correctly Generated typescript types were being shipped in the `dist/cjs` and `dist/mjs` directories but are referenced at the top level in package.json. Add a step to generate `*.d.ts` files in the top level `dist/*.d.ts`. --- automerge-js/package.json | 9 ++++----- automerge-js/src/index.ts | 2 ++ 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/automerge-js/package.json b/automerge-js/package.json index a6f81d08..052cd7cf 100644 --- a/automerge-js/package.json +++ b/automerge-js/package.json @@ -4,7 +4,7 @@ "Orion Henry ", "Martin Kleppmann" ], - "version": "2.0.0-alpha.1", + "version": "2.0.0-alpha.2", "description": "Reimplementation of `automerge` on top of the automerge-wasm backend", "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-js", "repository": "github:automerge/automerge-rs", @@ -13,7 +13,7 @@ "LICENSE", "package.json", "index.d.ts", - "dist/cjs/*.d.ts", + "dist/*.d.ts", "dist/cjs/constants.js", "dist/cjs/types.js", "dist/cjs/numbers.js", @@ -23,7 +23,6 @@ "dist/cjs/low_level.js", "dist/cjs/text.js", "dist/cjs/proxies.js", - "dist/mjs/*.d.ts", "dist/mjs/constants.js", "dist/mjs/types.js", "dist/mjs/numbers.js", @@ -34,13 +33,13 @@ "dist/mjs/text.js", "dist/mjs/proxies.js" ], - "types": "index.d.ts", + "types": "./dist/index.d.ts", "module": "./dist/mjs/index.js", "main": "./dist/cjs/index.js", "license": "MIT", "scripts": { "lint": "eslint src", - "build": "tsc -p config/mjs.json && tsc -p config/cjs.json", + "build": "tsc -p config/mjs.json && tsc -p config/cjs.json && tsc --emitDeclarationOnly", "test": "ts-mocha test/*.ts" }, "devDependencies": { diff --git a/automerge-js/src/index.ts b/automerge-js/src/index.ts index eb303ef9..bd7b0cb2 100644 --- a/automerge-js/src/index.ts +++ b/automerge-js/src/index.ts @@ -399,3 +399,5 @@ export function toJS(doc: Doc) : MaterializeValue { function isObject(obj: unknown) : obj is Record { return typeof obj === 'object' && obj !== null } + +export { API, SyncState, ActorId, Conflicts, Prop, Change, ObjID, DecodedChange, DecodedSyncMessage, Heads, MaterializeValue } From 2012f5c6e45bfdedb356203a3e2ba9c7e9ecc99c Mon Sep 17 00:00:00 2001 From: Alex Good Date: Wed, 5 Oct 2022 00:52:36 +0100 Subject: [PATCH 584/730] Fix some typescript bugs, automerge-js 2.0.0-alpha.3 --- automerge-js/examples/create-react-app/package.json | 2 +- automerge-js/examples/vite/package.json | 2 +- automerge-js/examples/webpack/package.json | 2 +- automerge-js/package.json | 4 ++-- automerge-js/src/index.ts | 7 +++---- automerge-wasm/index.d.ts | 4 ++-- 6 files changed, 10 insertions(+), 11 deletions(-) diff --git a/automerge-js/examples/create-react-app/package.json b/automerge-js/examples/create-react-app/package.json index 6d14c84e..2080d061 100644 --- a/automerge-js/examples/create-react-app/package.json +++ b/automerge-js/examples/create-react-app/package.json @@ -8,7 +8,7 @@ "@testing-library/jest-dom": "^5.16.5", "@testing-library/react": "^13.4.0", "@testing-library/user-event": "^13.5.0", - "@automerge/automerge": "2.0.0-alpha.1", + "@automerge/automerge": "2.0.0-alpha.3", "react": "^18.2.0", "react-dom": "^18.2.0", "react-scripts": "5.0.1", diff --git a/automerge-js/examples/vite/package.json b/automerge-js/examples/vite/package.json index 01abe125..61a815d5 100644 --- a/automerge-js/examples/vite/package.json +++ b/automerge-js/examples/vite/package.json @@ -9,7 +9,7 @@ "preview": "vite preview" }, "dependencies": { - "@automerge/automerge": "2.0.0-alpha.1" + "@automerge/automerge": "2.0.0-alpha.3" }, "devDependencies": { "typescript": "^4.6.4", diff --git a/automerge-js/examples/webpack/package.json b/automerge-js/examples/webpack/package.json index 25590c56..48d43dcc 100644 --- a/automerge-js/examples/webpack/package.json +++ b/automerge-js/examples/webpack/package.json @@ -10,7 +10,7 @@ }, "author": "", "dependencies": { - "@automerge/automerge": "2.0.0-alpha.1" + "@automerge/automerge": "2.0.0-alpha.3" }, "devDependencies": { "serve": "^13.0.2", diff --git a/automerge-js/package.json b/automerge-js/package.json index 052cd7cf..c01f2f96 100644 --- a/automerge-js/package.json +++ b/automerge-js/package.json @@ -4,8 +4,8 @@ "Orion Henry ", "Martin Kleppmann" ], - "version": "2.0.0-alpha.2", - "description": "Reimplementation of `automerge` on top of the automerge-wasm backend", + "version": "2.0.0-alpha.3", + "description": "Javascript implementation of automerge, backed by @automerge/automerge-wasm", "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-js", "repository": "github:automerge/automerge-rs", "files": [ diff --git a/automerge-js/src/index.ts b/automerge-js/src/index.ts index bd7b0cb2..95c57452 100644 --- a/automerge-js/src/index.ts +++ b/automerge-js/src/index.ts @@ -7,7 +7,7 @@ import { STATE, HEADS, TRACE, OBJECT_ID, READ_ONLY, FROZEN } from "./constants" import { AutomergeValue, Counter } from "./types" export { AutomergeValue, Text, Counter, Int, Uint, Float64 } from "./types" -import { API } from "@automerge/automerge-wasm"; +import { type API } from "@automerge/automerge-wasm"; import { ApiHandler, UseApi } from "./low_level" import { Actor as ActorId, Prop, ObjID, Change, DecodedChange, Heads, Automerge, MaterializeValue } from "@automerge/automerge-wasm" @@ -15,7 +15,7 @@ import { JsSyncState as SyncState, SyncMessage, DecodedSyncMessage } from "@auto export type ChangeOptions = { message?: string, time?: number } -export type Doc = { readonly [P in keyof T]: Doc } +export type Doc = { readonly [P in keyof T]: T[P] } export type ChangeFn = (doc: T) => void @@ -24,7 +24,6 @@ export interface State { snapshot: T } - export function use(api: API) { UseApi(api) } @@ -400,4 +399,4 @@ function isObject(obj: unknown) : obj is Record { return typeof obj === 'object' && obj !== null } -export { API, SyncState, ActorId, Conflicts, Prop, Change, ObjID, DecodedChange, DecodedSyncMessage, Heads, MaterializeValue } +export type { API, SyncState, ActorId, Conflicts, Prop, Change, ObjID, DecodedChange, DecodedSyncMessage, Heads, MaterializeValue } diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts index f94f35c3..7d43eacf 100644 --- a/automerge-wasm/index.d.ts +++ b/automerge-wasm/index.d.ts @@ -112,7 +112,7 @@ export function decodeSyncState(data: Uint8Array): SyncState; export function exportSyncState(state: SyncState): JsSyncState; export function importSyncState(state: JsSyncState): SyncState; -export class API { +export interface API { create(actor?: Actor): Automerge; load(data: Uint8Array, actor?: Actor): Automerge; encodeChange(change: DecodedChange): Change; @@ -187,7 +187,7 @@ export class Automerge { dump(): void; } -export class JsSyncState { +export interface JsSyncState { sharedHeads: Heads; lastSentHeads: Heads; theirHeads: Heads | undefined; From 92145e6131c9d15ae888d2c932f81089889ba987 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Wed, 5 Oct 2022 00:55:10 +0100 Subject: [PATCH 585/730] @automerge/automerge-wasm 0.1.8 --- automerge-js/package.json | 2 +- automerge-wasm/package.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/automerge-js/package.json b/automerge-js/package.json index c01f2f96..02b9359e 100644 --- a/automerge-js/package.json +++ b/automerge-js/package.json @@ -56,7 +56,7 @@ "typescript": "^4.6.4" }, "dependencies": { - "@automerge/automerge-wasm": "0.1.7", + "@automerge/automerge-wasm": "0.1.8", "uuid": "^8.3" } } diff --git a/automerge-wasm/package.json b/automerge-wasm/package.json index 12842790..c5a82fb1 100644 --- a/automerge-wasm/package.json +++ b/automerge-wasm/package.json @@ -8,7 +8,7 @@ "description": "wasm-bindgen bindings to the automerge rust implementation", "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-wasm", "repository": "github:automerge/automerge-rs", - "version": "0.1.7", + "version": "0.1.8", "license": "MIT", "files": [ "README.md", From 7a6dfcc289f6b82e5cece1e57be6e459f0816097 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Fri, 2 Sep 2022 09:53:49 -0500 Subject: [PATCH 586/730] The patch interface needs an accurate path per patch op For the path to be accurate it needs to be calculated at the moment of op insert not at commit. This is because the path may contain list indexes in parent objects that could change by inserts and deletes later in the transaction. The primary change was adding op_observer to the transaction object and removing it from commit options. The beginnings of a wasm level `applyPatch` system is laid out here. --- automerge-c/src/doc.rs | 2 +- automerge-wasm/index.d.ts | 3 + automerge-wasm/src/interop.rs | 153 ++++++++- automerge-wasm/src/lib.rs | 180 ++--------- automerge-wasm/src/observer.rs | 302 ++++++++++++++++++ automerge-wasm/test/apply.ts | 100 ++++++ automerge-wasm/test/test.ts | 185 +++++------ automerge/examples/watch.rs | 48 +-- automerge/src/autocommit.rs | 165 +++++----- automerge/src/automerge.rs | 109 +++---- automerge/src/automerge/tests.rs | 48 ++- automerge/src/lib.rs | 4 +- automerge/src/op_observer.rs | 161 ++++++++-- automerge/src/op_set.rs | 34 +- automerge/src/options.rs | 16 - automerge/src/parents.rs | 23 +- automerge/src/sync.rs | 10 +- automerge/src/transaction.rs | 2 +- automerge/src/transaction/commit.rs | 15 +- automerge/src/transaction/inner.rs | 126 +++++--- .../src/transaction/manual_transaction.rs | 88 ++--- automerge/src/transaction/result.rs | 3 +- automerge/tests/test.rs | 13 +- 23 files changed, 1153 insertions(+), 637 deletions(-) create mode 100644 automerge-wasm/src/observer.rs create mode 100644 automerge-wasm/test/apply.ts delete mode 100644 automerge/src/options.rs diff --git a/automerge-c/src/doc.rs b/automerge-c/src/doc.rs index 1a0291e8..beaf7347 100644 --- a/automerge-c/src/doc.rs +++ b/automerge-c/src/doc.rs @@ -170,7 +170,7 @@ pub unsafe extern "C" fn AMcommit( if let Some(time) = time.as_ref() { options.set_time(*time); } - to_result(doc.commit_with::<()>(options)) + to_result(doc.commit_with(options)) } /// \memberof AMdoc diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts index 7d43eacf..c28cceff 100644 --- a/automerge-wasm/index.d.ts +++ b/automerge-wasm/index.d.ts @@ -185,6 +185,9 @@ export class Automerge { // dump internal state to console.log dump(): void; + + // experimental api can go here + applyPatches(obj: Doc, meta?: any, callback?: Function): Doc; } export interface JsSyncState { diff --git a/automerge-wasm/src/interop.rs b/automerge-wasm/src/interop.rs index bc5a0226..1f67e6ec 100644 --- a/automerge-wasm/src/interop.rs +++ b/automerge-wasm/src/interop.rs @@ -1,13 +1,14 @@ +use crate::AutoCommit; use automerge as am; use automerge::transaction::Transactable; use automerge::{Change, ChangeHash, Prop}; -use js_sys::{Array, Object, Reflect, Uint8Array}; +use js_sys::{Array, Function, Object, Reflect, Uint8Array}; use std::collections::{BTreeSet, HashSet}; use std::fmt::Display; use wasm_bindgen::prelude::*; use wasm_bindgen::JsCast; -use crate::{ObjId, ScalarValue, Value}; +use crate::{observer::Patch, ObjId, ScalarValue, Value}; pub(crate) struct JS(pub(crate) JsValue); pub(crate) struct AR(pub(crate) Array); @@ -357,7 +358,7 @@ pub(crate) fn get_heads(heads: Option) -> Option> { heads.ok() } -pub(crate) fn map_to_js(doc: &am::AutoCommit, obj: &ObjId) -> JsValue { +pub(crate) fn map_to_js(doc: &AutoCommit, obj: &ObjId) -> JsValue { let keys = doc.keys(obj); let map = Object::new(); for k in keys { @@ -383,7 +384,7 @@ pub(crate) fn map_to_js(doc: &am::AutoCommit, obj: &ObjId) -> JsValue { map.into() } -pub(crate) fn map_to_js_at(doc: &am::AutoCommit, obj: &ObjId, heads: &[ChangeHash]) -> JsValue { +pub(crate) fn map_to_js_at(doc: &AutoCommit, obj: &ObjId, heads: &[ChangeHash]) -> JsValue { let keys = doc.keys(obj); let map = Object::new(); for k in keys { @@ -409,7 +410,7 @@ pub(crate) fn map_to_js_at(doc: &am::AutoCommit, obj: &ObjId, heads: &[ChangeHas map.into() } -pub(crate) fn list_to_js(doc: &am::AutoCommit, obj: &ObjId) -> JsValue { +pub(crate) fn list_to_js(doc: &AutoCommit, obj: &ObjId) -> JsValue { let len = doc.length(obj); let array = Array::new(); for i in 0..len { @@ -435,7 +436,7 @@ pub(crate) fn list_to_js(doc: &am::AutoCommit, obj: &ObjId) -> JsValue { array.into() } -pub(crate) fn list_to_js_at(doc: &am::AutoCommit, obj: &ObjId, heads: &[ChangeHash]) -> JsValue { +pub(crate) fn list_to_js_at(doc: &AutoCommit, obj: &ObjId, heads: &[ChangeHash]) -> JsValue { let len = doc.length(obj); let array = Array::new(); for i in 0..len { @@ -460,3 +461,143 @@ pub(crate) fn list_to_js_at(doc: &am::AutoCommit, obj: &ObjId, heads: &[ChangeHa } array.into() } + +/* +pub(crate) fn export_values<'a, V: Iterator>>(val: V) -> Array { + val.map(|v| export_value(&v)).collect() +} +*/ + +pub(crate) fn export_value(val: &Value<'_>) -> JsValue { + match val { + Value::Object(o) if o == &am::ObjType::Map || o == &am::ObjType::Table => { + Object::new().into() + } + Value::Object(_) => Array::new().into(), + Value::Scalar(v) => ScalarValue(v.clone()).into(), + } +} + +pub(crate) fn apply_patch(obj: JsValue, patch: &Patch) -> Result { + apply_patch2(obj, patch, 0) +} + +pub(crate) fn apply_patch2(obj: JsValue, patch: &Patch, depth: usize) -> Result { + match (js_to_map_seq(&obj)?, patch.path().get(depth)) { + (JsObj::Map(o), Some(Prop::Map(key))) => { + let sub_obj = Reflect::get(&obj, &key.into())?; + let new_value = apply_patch2(sub_obj, patch, depth + 1)?; + let result = + Reflect::construct(&o.constructor(), &Array::new())?.dyn_into::()?; + let result = Object::assign(&result, &o).into(); + Reflect::set(&result, &key.into(), &new_value)?; + Ok(result) + } + (JsObj::Seq(a), Some(Prop::Seq(index))) => { + let index = JsValue::from_f64(*index as f64); + let sub_obj = Reflect::get(&obj, &index)?; + let new_value = apply_patch2(sub_obj, patch, depth + 1)?; + let result = Reflect::construct(&a.constructor(), &a)?; + //web_sys::console::log_2(&format!("NEW VAL {}: ", tmpi).into(), &new_value); + Reflect::set(&result, &index, &new_value)?; + Ok(result) + } + (JsObj::Map(o), None) => { + let result = + Reflect::construct(&o.constructor(), &Array::new())?.dyn_into::()?; + let result = Object::assign(&result, &o); + match patch { + Patch::PutMap { key, value, .. } => { + let result = result.into(); + Reflect::set(&result, &key.into(), &export_value(value))?; + Ok(result) + } + Patch::DeleteMap { key, .. } => { + Reflect::delete_property(&result, &key.into())?; + Ok(result.into()) + } + Patch::Increment { prop, value, .. } => { + let result = result.into(); + if let Prop::Map(key) = prop { + let key = key.into(); + let old_val = Reflect::get(&o, &key)?; + if let Some(old) = old_val.as_f64() { + Reflect::set(&result, &key, &JsValue::from(old + *value as f64))?; + Ok(result) + } else { + Err(to_js_err("cant increment a non number value")) + } + } else { + Err(to_js_err("cant increment an index on a map")) + } + } + Patch::Insert { .. } => Err(to_js_err("cannot insert into map")), + Patch::DeleteSeq { .. } => Err(to_js_err("cannot splice a map")), + Patch::PutSeq { .. } => Err(to_js_err("cannot array index a map")), + } + } + (JsObj::Seq(a), None) => { + match patch { + Patch::PutSeq { index, value, .. } => { + let result = Reflect::construct(&a.constructor(), &a)?; + Reflect::set(&result, &(*index as f64).into(), &export_value(value))?; + Ok(result) + } + Patch::DeleteSeq { index, .. } => { + let result = &a.dyn_into::()?; + let mut f = |_, i, _| i != *index as u32; + let result = result.filter(&mut f); + + Ok(result.into()) + } + Patch::Insert { index, values, .. } => { + let from = Reflect::get(&a.constructor().into(), &"from".into())? + .dyn_into::()?; + let result = from.call1(&JsValue::undefined(), &a)?.dyn_into::()?; + // TODO: should be one function call + for (i, v) in values.iter().enumerate() { + result.splice(*index as u32 + i as u32, 0, &export_value(v)); + } + Ok(result.into()) + } + Patch::Increment { prop, value, .. } => { + let result = Reflect::construct(&a.constructor(), &a)?; + if let Prop::Seq(index) = prop { + let index = (*index as f64).into(); + let old_val = Reflect::get(&a, &index)?; + if let Some(old) = old_val.as_f64() { + Reflect::set(&result, &index, &JsValue::from(old + *value as f64))?; + Ok(result) + } else { + Err(to_js_err("cant increment a non number value")) + } + } else { + Err(to_js_err("cant increment a key on a seq")) + } + } + Patch::DeleteMap { .. } => Err(to_js_err("cannot delete from a seq")), + Patch::PutMap { .. } => Err(to_js_err("cannot set key in seq")), + } + } + (_, _) => Err(to_js_err(format!( + "object/patch missmatch {:?} depth={:?}", + patch, depth + ))), + } +} + +#[derive(Debug)] +enum JsObj { + Map(Object), + Seq(Array), +} + +fn js_to_map_seq(value: &JsValue) -> Result { + if let Ok(array) = value.clone().dyn_into::() { + Ok(JsObj::Seq(array)) + } else if let Ok(obj) = value.clone().dyn_into::() { + Ok(JsObj::Map(obj)) + } else { + Err(to_js_err("obj is not Object or Array")) + } +} diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 0eb8c256..26a80861 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -28,10 +28,7 @@ #![allow(clippy::unused_unit)] use am::transaction::CommitOptions; use am::transaction::Transactable; -use am::ApplyOptions; use automerge as am; -use automerge::Patch; -use automerge::VecOpObserver; use automerge::{Change, ObjId, Prop, Value, ROOT}; use js_sys::{Array, Object, Uint8Array}; use serde::Serialize; @@ -40,12 +37,15 @@ use wasm_bindgen::prelude::*; use wasm_bindgen::JsCast; mod interop; +mod observer; mod sync; mod value; +use observer::Observer; + use interop::{ - get_heads, js_get, js_set, list_to_js, list_to_js_at, map_to_js, map_to_js_at, to_js_err, - to_objtype, to_prop, AR, JS, + apply_patch, get_heads, js_get, js_set, list_to_js, list_to_js_at, map_to_js, map_to_js_at, + to_js_err, to_objtype, to_prop, AR, JS, }; use sync::SyncState; use value::{datatype, ScalarValue}; @@ -57,6 +57,8 @@ macro_rules! log { }; } +type AutoCommit = am::AutoCommitWithObs; + #[cfg(feature = "wee_alloc")] #[global_allocator] static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT; @@ -64,40 +66,24 @@ static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT; #[wasm_bindgen] #[derive(Debug)] pub struct Automerge { - doc: automerge::AutoCommit, - observer: Option, + doc: AutoCommit, } #[wasm_bindgen] impl Automerge { pub fn new(actor: Option) -> Result { - let mut automerge = automerge::AutoCommit::new(); + let mut doc = AutoCommit::default(); if let Some(a) = actor { let a = automerge::ActorId::from(hex::decode(a).map_err(to_js_err)?.to_vec()); - automerge.set_actor(a); - } - Ok(Automerge { - doc: automerge, - observer: None, - }) - } - - fn ensure_transaction_closed(&mut self) { - if self.doc.pending_ops() > 0 { - let mut opts = CommitOptions::default(); - if let Some(observer) = self.observer.as_mut() { - opts.set_op_observer(observer); - } - self.doc.commit_with(opts); + doc.set_actor(a); } + Ok(Automerge { doc }) } #[allow(clippy::should_implement_trait)] pub fn clone(&mut self, actor: Option) -> Result { - self.ensure_transaction_closed(); let mut automerge = Automerge { doc: self.doc.clone(), - observer: None, }; if let Some(s) = actor { let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec()); @@ -107,10 +93,8 @@ impl Automerge { } pub fn fork(&mut self, actor: Option) -> Result { - self.ensure_transaction_closed(); let mut automerge = Automerge { doc: self.doc.fork(), - observer: None, }; if let Some(s) = actor { let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec()); @@ -124,7 +108,6 @@ impl Automerge { let deps: Vec<_> = JS(heads).try_into()?; let mut automerge = Automerge { doc: self.doc.fork_at(&deps)?, - observer: None, }; if let Some(s) = actor { let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec()); @@ -148,21 +131,12 @@ impl Automerge { if let Some(time) = time { commit_opts.set_time(time as i64); } - if let Some(observer) = self.observer.as_mut() { - commit_opts.set_op_observer(observer); - } let hash = self.doc.commit_with(commit_opts); JsValue::from_str(&hex::encode(&hash.0)) } pub fn merge(&mut self, other: &mut Automerge) -> Result { - self.ensure_transaction_closed(); - let options = if let Some(observer) = self.observer.as_mut() { - ApplyOptions::default().with_op_observer(observer) - } else { - ApplyOptions::default() - }; - let heads = self.doc.merge_with(&mut other.doc, options)?; + let heads = self.doc.merge(&mut other.doc)?; let heads: Array = heads .iter() .map(|h| JsValue::from_str(&hex::encode(&h.0))) @@ -454,84 +428,30 @@ impl Automerge { pub fn enable_patches(&mut self, enable: JsValue) -> Result<(), JsValue> { let enable = enable .as_bool() - .ok_or_else(|| to_js_err("expected boolean"))?; - if enable { - if self.observer.is_none() { - self.observer = Some(VecOpObserver::default()); - } - } else { - self.observer = None; - } + .ok_or_else(|| to_js_err("must pass a bool to enable_patches"))?; + self.doc.observer().enable(enable); Ok(()) } + #[wasm_bindgen(js_name = applyPatches)] + pub fn apply_patches(&mut self, mut object: JsValue) -> Result { + let patches = self.doc.observer().take_patches(); + for p in patches { + object = apply_patch(object, &p)?; + } + Ok(object) + } + #[wasm_bindgen(js_name = popPatches)] pub fn pop_patches(&mut self) -> Result { // transactions send out observer updates as they occur, not waiting for them to be // committed. // If we pop the patches then we won't be able to revert them. - self.ensure_transaction_closed(); - let patches = self - .observer - .as_mut() - .map_or_else(Vec::new, |o| o.take_patches()); + let patches = self.doc.observer().take_patches(); let result = Array::new(); for p in patches { - let patch = Object::new(); - match p { - Patch::Put { - obj, - key, - value, - conflict, - } => { - js_set(&patch, "action", "put")?; - js_set(&patch, "obj", obj.to_string())?; - js_set(&patch, "key", key)?; - match value { - (Value::Object(obj_type), obj_id) => { - js_set(&patch, "datatype", obj_type.to_string())?; - js_set(&patch, "value", obj_id.to_string())?; - } - (Value::Scalar(value), _) => { - js_set(&patch, "datatype", datatype(&value))?; - js_set(&patch, "value", ScalarValue(value))?; - } - }; - js_set(&patch, "conflict", conflict)?; - } - - Patch::Insert { obj, index, value } => { - js_set(&patch, "action", "insert")?; - js_set(&patch, "obj", obj.to_string())?; - js_set(&patch, "key", index as f64)?; - match value { - (Value::Object(obj_type), obj_id) => { - js_set(&patch, "datatype", obj_type.to_string())?; - js_set(&patch, "value", obj_id.to_string())?; - } - (Value::Scalar(value), _) => { - js_set(&patch, "datatype", datatype(&value))?; - js_set(&patch, "value", ScalarValue(value))?; - } - }; - } - - Patch::Increment { obj, key, value } => { - js_set(&patch, "action", "increment")?; - js_set(&patch, "obj", obj.to_string())?; - js_set(&patch, "key", key)?; - js_set(&patch, "value", value.0)?; - } - - Patch::Delete { obj, key } => { - js_set(&patch, "action", "delete")?; - js_set(&patch, "obj", obj.to_string())?; - js_set(&patch, "key", key)?; - } - } - result.push(&patch); + result.push(&p.try_into()?); } Ok(result) } @@ -553,51 +473,31 @@ impl Automerge { } pub fn save(&mut self) -> Uint8Array { - self.ensure_transaction_closed(); Uint8Array::from(self.doc.save().as_slice()) } #[wasm_bindgen(js_name = saveIncremental)] pub fn save_incremental(&mut self) -> Uint8Array { - self.ensure_transaction_closed(); let bytes = self.doc.save_incremental(); Uint8Array::from(bytes.as_slice()) } #[wasm_bindgen(js_name = loadIncremental)] pub fn load_incremental(&mut self, data: Uint8Array) -> Result { - self.ensure_transaction_closed(); let data = data.to_vec(); - let options = if let Some(observer) = self.observer.as_mut() { - ApplyOptions::default().with_op_observer(observer) - } else { - ApplyOptions::default() - }; - let len = self - .doc - .load_incremental_with(&data, options) - .map_err(to_js_err)?; + let len = self.doc.load_incremental(&data).map_err(to_js_err)?; Ok(len as f64) } #[wasm_bindgen(js_name = applyChanges)] pub fn apply_changes(&mut self, changes: JsValue) -> Result<(), JsValue> { - self.ensure_transaction_closed(); let changes: Vec<_> = JS(changes).try_into()?; - let options = if let Some(observer) = self.observer.as_mut() { - ApplyOptions::default().with_op_observer(observer) - } else { - ApplyOptions::default() - }; - self.doc - .apply_changes_with(changes, options) - .map_err(to_js_err)?; + self.doc.apply_changes(changes).map_err(to_js_err)?; Ok(()) } #[wasm_bindgen(js_name = getChanges)] pub fn get_changes(&mut self, have_deps: JsValue) -> Result { - self.ensure_transaction_closed(); let deps: Vec<_> = JS(have_deps).try_into()?; let changes = self.doc.get_changes(&deps)?; let changes: Array = changes @@ -609,7 +509,6 @@ impl Automerge { #[wasm_bindgen(js_name = getChangeByHash)] pub fn get_change_by_hash(&mut self, hash: JsValue) -> Result { - self.ensure_transaction_closed(); let hash = serde_wasm_bindgen::from_value(hash).map_err(to_js_err)?; let change = self.doc.get_change_by_hash(&hash); if let Some(c) = change { @@ -621,7 +520,6 @@ impl Automerge { #[wasm_bindgen(js_name = getChangesAdded)] pub fn get_changes_added(&mut self, other: &mut Automerge) -> Result { - self.ensure_transaction_closed(); let changes = self.doc.get_changes_added(&mut other.doc); let changes: Array = changes .iter() @@ -632,7 +530,6 @@ impl Automerge { #[wasm_bindgen(js_name = getHeads)] pub fn get_heads(&mut self) -> Array { - self.ensure_transaction_closed(); let heads = self.doc.get_heads(); let heads: Array = heads .iter() @@ -649,7 +546,6 @@ impl Automerge { #[wasm_bindgen(js_name = getLastLocalChange)] pub fn get_last_local_change(&mut self) -> Result { - self.ensure_transaction_closed(); if let Some(change) = self.doc.get_last_local_change() { Ok(Uint8Array::from(change.raw_bytes()).into()) } else { @@ -658,13 +554,11 @@ impl Automerge { } pub fn dump(&mut self) { - self.ensure_transaction_closed(); self.doc.dump() } #[wasm_bindgen(js_name = getMissingDeps)] pub fn get_missing_deps(&mut self, heads: Option) -> Result { - self.ensure_transaction_closed(); let heads = get_heads(heads).unwrap_or_default(); let deps = self.doc.get_missing_deps(&heads); let deps: Array = deps @@ -680,23 +574,16 @@ impl Automerge { state: &mut SyncState, message: Uint8Array, ) -> Result<(), JsValue> { - self.ensure_transaction_closed(); let message = message.to_vec(); let message = am::sync::Message::decode(message.as_slice()).map_err(to_js_err)?; - let options = if let Some(observer) = self.observer.as_mut() { - ApplyOptions::default().with_op_observer(observer) - } else { - ApplyOptions::default() - }; self.doc - .receive_sync_message_with(&mut state.0, message, options) + .receive_sync_message(&mut state.0, message) .map_err(to_js_err)?; Ok(()) } #[wasm_bindgen(js_name = generateSyncMessage)] pub fn generate_sync_message(&mut self, state: &mut SyncState) -> Result { - self.ensure_transaction_closed(); if let Some(message) = self.doc.generate_sync_message(&mut state.0) { Ok(Uint8Array::from(message.encode().as_slice()).into()) } else { @@ -856,17 +743,12 @@ pub fn init(actor: Option) -> Result { #[wasm_bindgen(js_name = load)] pub fn load(data: Uint8Array, actor: Option) -> Result { let data = data.to_vec(); - let observer = None; - let options = ApplyOptions::<()>::default(); - let mut automerge = am::AutoCommit::load_with(&data, options).map_err(to_js_err)?; + let mut doc = AutoCommit::load(&data).map_err(to_js_err)?; if let Some(s) = actor { let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec()); - automerge.set_actor(actor); + doc.set_actor(actor); } - Ok(Automerge { - doc: automerge, - observer, - }) + Ok(Automerge { doc }) } #[wasm_bindgen(js_name = encodeChange)] diff --git a/automerge-wasm/src/observer.rs b/automerge-wasm/src/observer.rs new file mode 100644 index 00000000..c7adadc8 --- /dev/null +++ b/automerge-wasm/src/observer.rs @@ -0,0 +1,302 @@ +#![allow(dead_code)] + +use crate::interop::{export_value, js_set}; +use automerge::{ObjId, OpObserver, Parents, Prop, Value}; +use js_sys::{Array, Object}; +use wasm_bindgen::prelude::*; + +#[derive(Debug, Clone, Default)] +pub(crate) struct Observer { + enabled: bool, + patches: Vec, +} + +impl Observer { + pub(crate) fn take_patches(&mut self) -> Vec { + std::mem::take(&mut self.patches) + } + pub(crate) fn enable(&mut self, enable: bool) { + if self.enabled && !enable { + self.patches.truncate(0) + } + self.enabled = enable; + } +} + +#[derive(Debug, Clone)] +pub(crate) enum Patch { + PutMap { + obj: ObjId, + path: Vec, + key: String, + value: Value<'static>, + conflict: bool, + }, + PutSeq { + obj: ObjId, + path: Vec, + index: usize, + value: Value<'static>, + conflict: bool, + }, + Insert { + obj: ObjId, + path: Vec, + index: usize, + values: Vec>, + }, + Increment { + obj: ObjId, + path: Vec, + prop: Prop, + value: i64, + }, + DeleteMap { + obj: ObjId, + path: Vec, + key: String, + }, + DeleteSeq { + obj: ObjId, + path: Vec, + index: usize, + length: usize, + }, +} + +impl OpObserver for Observer { + fn insert( + &mut self, + mut parents: Parents<'_>, + obj: ObjId, + index: usize, + tagged_value: (Value<'_>, ObjId), + ) { + if self.enabled { + if let Some(Patch::Insert { + obj: tail_obj, + index: tail_index, + values, + .. + }) = self.patches.last_mut() + { + if tail_obj == &obj && *tail_index + values.len() == index { + values.push(tagged_value.0.to_owned()); + return; + } + } + let path = parents.path().into_iter().map(|p| p.1).collect(); + let value = tagged_value.0.to_owned(); + let patch = Patch::Insert { + path, + obj, + index, + values: vec![value], + }; + self.patches.push(patch); + } + } + + fn put( + &mut self, + mut parents: Parents<'_>, + obj: ObjId, + prop: Prop, + tagged_value: (Value<'_>, ObjId), + conflict: bool, + ) { + if self.enabled { + let path = parents.path().into_iter().map(|p| p.1).collect(); + let value = tagged_value.0.to_owned(); + let patch = match prop { + Prop::Map(key) => Patch::PutMap { + path, + obj, + key, + value, + conflict, + }, + Prop::Seq(index) => Patch::PutSeq { + path, + obj, + index, + value, + conflict, + }, + }; + self.patches.push(patch); + } + } + + fn increment( + &mut self, + mut parents: Parents<'_>, + obj: ObjId, + prop: Prop, + tagged_value: (i64, ObjId), + ) { + if self.enabled { + let path = parents.path().into_iter().map(|p| p.1).collect(); + let value = tagged_value.0; + self.patches.push(Patch::Increment { + path, + obj, + prop, + value, + }) + } + } + + fn delete(&mut self, mut parents: Parents<'_>, obj: ObjId, prop: Prop) { + if self.enabled { + let path = parents.path().into_iter().map(|p| p.1).collect(); + let patch = match prop { + Prop::Map(key) => Patch::DeleteMap { path, obj, key }, + Prop::Seq(index) => Patch::DeleteSeq { + path, + obj, + index, + length: 1, + }, + }; + self.patches.push(patch) + } + } + + fn merge(&mut self, other: &Self) { + self.patches.extend_from_slice(other.patches.as_slice()) + } + + fn branch(&self) -> Self { + Observer { + patches: vec![], + enabled: self.enabled, + } + } +} + +fn prop_to_js(p: &Prop) -> JsValue { + match p { + Prop::Map(key) => JsValue::from_str(key), + Prop::Seq(index) => JsValue::from_f64(*index as f64), + } +} + +fn export_path(path: &[Prop], end: &Prop) -> Array { + let result = Array::new(); + for p in path { + result.push(&prop_to_js(p)); + } + result.push(&prop_to_js(end)); + result +} + +impl Patch { + pub(crate) fn path(&self) -> &[Prop] { + match &self { + Self::PutMap { path, .. } => path.as_slice(), + Self::PutSeq { path, .. } => path.as_slice(), + Self::Increment { path, .. } => path.as_slice(), + Self::Insert { path, .. } => path.as_slice(), + Self::DeleteMap { path, .. } => path.as_slice(), + Self::DeleteSeq { path, .. } => path.as_slice(), + } + } +} + +impl TryFrom for JsValue { + type Error = JsValue; + + fn try_from(p: Patch) -> Result { + let result = Object::new(); + match p { + Patch::PutMap { + path, + key, + value, + conflict, + .. + } => { + js_set(&result, "action", "put")?; + js_set( + &result, + "path", + export_path(path.as_slice(), &Prop::Map(key)), + )?; + js_set(&result, "value", export_value(&value))?; + js_set(&result, "conflict", &JsValue::from_bool(conflict))?; + Ok(result.into()) + } + Patch::PutSeq { + path, + index, + value, + conflict, + .. + } => { + js_set(&result, "action", "put")?; + js_set( + &result, + "path", + export_path(path.as_slice(), &Prop::Seq(index)), + )?; + js_set(&result, "value", export_value(&value))?; + js_set(&result, "conflict", &JsValue::from_bool(conflict))?; + Ok(result.into()) + } + Patch::Insert { + path, + index, + values, + .. + } => { + js_set(&result, "action", "splice")?; + js_set( + &result, + "path", + export_path(path.as_slice(), &Prop::Seq(index)), + )?; + js_set( + &result, + "values", + values.iter().map(export_value).collect::(), + )?; + Ok(result.into()) + } + Patch::Increment { + path, prop, value, .. + } => { + js_set(&result, "action", "inc")?; + js_set(&result, "path", export_path(path.as_slice(), &prop))?; + js_set(&result, "value", &JsValue::from_f64(value as f64))?; + Ok(result.into()) + } + Patch::DeleteMap { path, key, .. } => { + js_set(&result, "action", "del")?; + js_set( + &result, + "path", + export_path(path.as_slice(), &Prop::Map(key)), + )?; + Ok(result.into()) + } + Patch::DeleteSeq { + path, + index, + length, + .. + } => { + js_set(&result, "action", "del")?; + js_set( + &result, + "path", + export_path(path.as_slice(), &Prop::Seq(index)), + )?; + if length > 1 { + js_set(&result, "length", length)?; + } + Ok(result.into()) + } + } + } +} diff --git a/automerge-wasm/test/apply.ts b/automerge-wasm/test/apply.ts new file mode 100644 index 00000000..18b53758 --- /dev/null +++ b/automerge-wasm/test/apply.ts @@ -0,0 +1,100 @@ + +import { describe, it } from 'mocha'; +//@ts-ignore +import assert from 'assert' +//@ts-ignore +import init, { create, load } from '..' + +describe('Automerge', () => { + describe('Patch Apply', () => { + it('apply nested sets on maps', () => { + let start : any = { hello: { mellow: { yellow: "world", x: 1 }, y : 2 } } + let doc1 = create() + doc1.putObject("/", "hello", start.hello); + let mat = doc1.materialize("/") + let doc2 = create() + doc2.enablePatches(true) + doc2.merge(doc1) + + let base = doc2.applyPatches({}) + assert.deepEqual(mat, start) + assert.deepEqual(base, start) + + doc2.delete("/hello/mellow", "yellow"); + delete start.hello.mellow.yellow; + base = doc2.applyPatches(base) + mat = doc2.materialize("/") + + assert.deepEqual(mat, start) + assert.deepEqual(base, start) + }) + + it('apply patches on lists', () => { + //let start = { list: [1,2,3,4,5,6] } + let start = { list: [1,2,3,4] } + let doc1 = create() + doc1.putObject("/", "list", start.list); + let mat = doc1.materialize("/") + let doc2 = create() + doc2.enablePatches(true) + doc2.merge(doc1) + mat = doc1.materialize("/") + let base = doc2.applyPatches({}) + assert.deepEqual(mat, start) + assert.deepEqual(base, start) + + doc2.delete("/list", 3); + start.list.splice(3,1) + base = doc2.applyPatches(base) + + assert.deepEqual(base, start) + }) + + it('apply patches on lists of lists of lists', () => { + let start = { list: + [ + [ + [ 1, 2, 3, 4, 5, 6], + [ 7, 8, 9,10,11,12], + ], + [ + [ 7, 8, 9,10,11,12], + [ 1, 2, 3, 4, 5, 6], + ] + ] + } + let doc1 = create() + doc1.enablePatches(true) + doc1.putObject("/", "list", start.list); + let mat = doc1.materialize("/") + let base = doc1.applyPatches({}) + assert.deepEqual(mat, start) + + doc1.delete("/list/0/1", 3) + start.list[0][1].splice(3,1) + + doc1.delete("/list/0", 0) + start.list[0].splice(0,1) + + mat = doc1.materialize("/") + base = doc1.applyPatches(base) + assert.deepEqual(mat, start) + assert.deepEqual(base, start) + }) + + it('large inserts should make one splice patch', () => { + let doc1 = create() + doc1.enablePatches(true) + doc1.putObject("/", "list", "abc"); + let patches = doc1.popPatches() + assert.deepEqual( patches, [ + { action: 'put', conflict: false, path: [ 'list' ], value: [] }, + { action: 'splice', path: [ 'list', 0 ], values: [ 'a', 'b', 'c' ] }]) + }) + }) +}) + +// FIXME: handle conflicts correctly on apply +// TODO: squash puts +// TODO: merge deletes +// TODO: elide `conflict: false` diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts index 00dedeed..a201d867 100644 --- a/automerge-wasm/test/test.ts +++ b/automerge-wasm/test/test.ts @@ -503,7 +503,7 @@ describe('Automerge', () => { doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.popPatches(), [ - { action: 'put', obj: '_root', key: 'hello', value: 'world', datatype: 'str', conflict: false } + { action: 'put', path: ['hello'], value: 'world', conflict: false } ]) doc1.free() doc2.free() @@ -515,9 +515,9 @@ describe('Automerge', () => { doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.popPatches(), [ - { action: 'put', obj: '_root', key: 'birds', value: '1@aaaa', datatype: 'map', conflict: false }, - { action: 'put', obj: '1@aaaa', key: 'friday', value: '2@aaaa', datatype: 'map', conflict: false }, - { action: 'put', obj: '2@aaaa', key: 'robins', value: 3, datatype: 'int', conflict: false } + { action: 'put', path: [ 'birds' ], value: {}, conflict: false }, + { action: 'put', path: [ 'birds', 'friday' ], value: {}, conflict: false }, + { action: 'put', path: [ 'birds', 'friday', 'robins' ], value: 3, conflict: false}, ]) doc1.free() doc2.free() @@ -531,8 +531,8 @@ describe('Automerge', () => { doc1.delete('_root', 'favouriteBird') doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.popPatches(), [ - { action: 'put', obj: '_root', key: 'favouriteBird', value: 'Robin', datatype: 'str', conflict: false }, - { action: 'delete', obj: '_root', key: 'favouriteBird' } + { action: 'put', path: [ 'favouriteBird' ], value: 'Robin', conflict: false }, + { action: 'del', path: [ 'favouriteBird' ] } ]) doc1.free() doc2.free() @@ -544,9 +544,8 @@ describe('Automerge', () => { doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.popPatches(), [ - { action: 'put', obj: '_root', key: 'birds', value: '1@aaaa', datatype: 'list', conflict: false }, - { action: 'insert', obj: '1@aaaa', key: 0, value: 'Goldfinch', datatype: 'str' }, - { action: 'insert', obj: '1@aaaa', key: 1, value: 'Chaffinch', datatype: 'str' } + { action: 'put', path: [ 'birds' ], value: [], conflict: false }, + { action: 'splice', path: [ 'birds', 0 ], values: ['Goldfinch', 'Chaffinch'] }, ]) doc1.free() doc2.free() @@ -560,9 +559,9 @@ describe('Automerge', () => { doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.popPatches(), [ - { action: 'insert', obj: '1@aaaa', key: 0, value: '2@aaaa', datatype: 'map' }, - { action: 'put', obj: '2@aaaa', key: 'species', value: 'Goldfinch', datatype: 'str', conflict: false }, - { action: 'put', obj: '2@aaaa', key: 'count', value: 3, datatype: 'int', conflict: false } + { action: 'splice', path: [ 'birds', 0 ], values: [{}] }, + { action: 'put', path: [ 'birds', 0, 'species' ], value: 'Goldfinch', conflict: false }, + { action: 'put', path: [ 'birds', 0, 'count', ], value: 3, conflict: false } ]) doc1.free() doc2.free() @@ -579,8 +578,8 @@ describe('Automerge', () => { assert.deepEqual(doc1.getWithType('1@aaaa', 0), ['str', 'Chaffinch']) assert.deepEqual(doc1.getWithType('1@aaaa', 1), ['str', 'Greenfinch']) assert.deepEqual(doc2.popPatches(), [ - { action: 'delete', obj: '1@aaaa', key: 0 }, - { action: 'insert', obj: '1@aaaa', key: 1, value: 'Greenfinch', datatype: 'str' } + { action: 'del', path: ['birds', 0] }, + { action: 'splice', path: ['birds', 1], values: ['Greenfinch'] } ]) doc1.free() doc2.free() @@ -605,16 +604,11 @@ describe('Automerge', () => { assert.deepEqual([0, 1, 2, 3].map(i => (doc3.getWithType('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd']) assert.deepEqual([0, 1, 2, 3].map(i => (doc4.getWithType('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd']) assert.deepEqual(doc3.popPatches(), [ - { action: 'insert', obj: '1@aaaa', key: 0, value: 'c', datatype: 'str' }, - { action: 'insert', obj: '1@aaaa', key: 1, value: 'd', datatype: 'str' }, - { action: 'insert', obj: '1@aaaa', key: 0, value: 'a', datatype: 'str' }, - { action: 'insert', obj: '1@aaaa', key: 1, value: 'b', datatype: 'str' } + { action: 'splice', path: ['values', 0], values:['c','d'] }, + { action: 'splice', path: ['values', 0], values:['a','b'] }, ]) assert.deepEqual(doc4.popPatches(), [ - { action: 'insert', obj: '1@aaaa', key: 0, value: 'a', datatype: 'str' }, - { action: 'insert', obj: '1@aaaa', key: 1, value: 'b', datatype: 'str' }, - { action: 'insert', obj: '1@aaaa', key: 2, value: 'c', datatype: 'str' }, - { action: 'insert', obj: '1@aaaa', key: 3, value: 'd', datatype: 'str' } + { action: 'splice', path: ['values',0], values:['a','b','c','d'] }, ]) doc1.free(); doc2.free(); doc3.free(); doc4.free() }) @@ -638,16 +632,11 @@ describe('Automerge', () => { assert.deepEqual([0, 1, 2, 3, 4, 5].map(i => (doc3.getWithType('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd', 'e', 'f']) assert.deepEqual([0, 1, 2, 3, 4, 5].map(i => (doc4.getWithType('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd', 'e', 'f']) assert.deepEqual(doc3.popPatches(), [ - { action: 'insert', obj: '1@aaaa', key: 2, value: 'e', datatype: 'str' }, - { action: 'insert', obj: '1@aaaa', key: 3, value: 'f', datatype: 'str' }, - { action: 'insert', obj: '1@aaaa', key: 2, value: 'c', datatype: 'str' }, - { action: 'insert', obj: '1@aaaa', key: 3, value: 'd', datatype: 'str' } + { action: 'splice', path: ['values', 2], values: ['e','f'] }, + { action: 'splice', path: ['values', 2], values: ['c','d'] }, ]) assert.deepEqual(doc4.popPatches(), [ - { action: 'insert', obj: '1@aaaa', key: 2, value: 'c', datatype: 'str' }, - { action: 'insert', obj: '1@aaaa', key: 3, value: 'd', datatype: 'str' }, - { action: 'insert', obj: '1@aaaa', key: 4, value: 'e', datatype: 'str' }, - { action: 'insert', obj: '1@aaaa', key: 5, value: 'f', datatype: 'str' } + { action: 'splice', path: ['values', 2], values: ['c','d','e','f'] }, ]) doc1.free(); doc2.free(); doc3.free(); doc4.free() }) @@ -666,12 +655,12 @@ describe('Automerge', () => { assert.deepEqual(doc4.getWithType('_root', 'bird'), ['str', 'Goldfinch']) assert.deepEqual(doc4.getAll('_root', 'bird'), [['str', 'Greenfinch', '1@aaaa'], ['str', 'Goldfinch', '1@bbbb']]) assert.deepEqual(doc3.popPatches(), [ - { action: 'put', obj: '_root', key: 'bird', value: 'Greenfinch', datatype: 'str', conflict: false }, - { action: 'put', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: true } + { action: 'put', path: ['bird'], value: 'Greenfinch', conflict: false }, + { action: 'put', path: ['bird'], value: 'Goldfinch', conflict: true }, ]) assert.deepEqual(doc4.popPatches(), [ - { action: 'put', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: false }, - { action: 'put', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: true } + { action: 'put', path: ['bird'], value: 'Goldfinch', conflict: false }, + { action: 'put', path: ['bird'], value: 'Goldfinch', conflict: true }, ]) doc1.free(); doc2.free(); doc3.free(); doc4.free() }) @@ -701,16 +690,16 @@ describe('Automerge', () => { ['str', 'Greenfinch', '1@aaaa'], ['str', 'Chaffinch', '1@bbbb'], ['str', 'Goldfinch', '1@cccc'] ]) assert.deepEqual(doc1.popPatches(), [ - { action: 'put', obj: '_root', key: 'bird', value: 'Chaffinch', datatype: 'str', conflict: true }, - { action: 'put', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: true } + { action: 'put', path: ['bird'], value: 'Chaffinch', conflict: true }, + { action: 'put', path: ['bird'], value: 'Goldfinch', conflict: true } ]) assert.deepEqual(doc2.popPatches(), [ - { action: 'put', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: true }, - { action: 'put', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: true } + { action: 'put', path: ['bird'], value: 'Goldfinch', conflict: true }, + { action: 'put', path: ['bird'], value: 'Goldfinch', conflict: true } ]) assert.deepEqual(doc3.popPatches(), [ - { action: 'put', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: true }, - { action: 'put', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: true } + { action: 'put', path: ['bird'], value: 'Goldfinch', conflict: true }, + { action: 'put', path: ['bird'], value: 'Goldfinch', conflict: true } ]) doc1.free(); doc2.free(); doc3.free() }) @@ -727,9 +716,9 @@ describe('Automerge', () => { doc3.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc3.getAll('_root', 'bird'), [['str', 'Goldfinch', '2@aaaa']]) assert.deepEqual(doc3.popPatches(), [ - { action: 'put', obj: '_root', key: 'bird', value: 'Greenfinch', datatype: 'str', conflict: false }, - { action: 'put', obj: '_root', key: 'bird', value: 'Chaffinch', datatype: 'str', conflict: true }, - { action: 'put', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: false } + { action: 'put', path: ['bird'], value: 'Greenfinch', conflict: false }, + { action: 'put', path: ['bird'], value: 'Chaffinch', conflict: true }, + { action: 'put', path: ['bird'], value: 'Goldfinch', conflict: false } ]) doc1.free(); doc2.free(); doc3.free() }) @@ -750,10 +739,10 @@ describe('Automerge', () => { assert.deepEqual(doc2.getWithType('_root', 'bird'), ['str', 'Goldfinch']) assert.deepEqual(doc2.getAll('_root', 'bird'), [['str', 'Goldfinch', '2@aaaa']]) assert.deepEqual(doc1.popPatches(), [ - { action: 'put', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: false } + { action: 'put', path: ['bird'], value: 'Goldfinch', conflict: false } ]) assert.deepEqual(doc2.popPatches(), [ - { action: 'put', obj: '_root', key: 'bird', value: 'Goldfinch', datatype: 'str', conflict: false } + { action: 'put', path: ['bird'], value: 'Goldfinch', conflict: false } ]) doc1.free(); doc2.free() }) @@ -777,12 +766,12 @@ describe('Automerge', () => { assert.deepEqual(doc4.getWithType('1@aaaa', 0), ['str', 'Redwing']) assert.deepEqual(doc4.getAll('1@aaaa', 0), [['str', 'Song Thrush', '4@aaaa'], ['str', 'Redwing', '4@bbbb']]) assert.deepEqual(doc3.popPatches(), [ - { action: 'put', obj: '1@aaaa', key: 0, value: 'Song Thrush', datatype: 'str', conflict: false }, - { action: 'put', obj: '1@aaaa', key: 0, value: 'Redwing', datatype: 'str', conflict: true } + { action: 'put', path: ['birds',0], value: 'Song Thrush', conflict: false }, + { action: 'put', path: ['birds',0], value: 'Redwing', conflict: true } ]) assert.deepEqual(doc4.popPatches(), [ - { action: 'put', obj: '1@aaaa', key: 0, value: 'Redwing', datatype: 'str', conflict: false }, - { action: 'put', obj: '1@aaaa', key: 0, value: 'Redwing', datatype: 'str', conflict: true } + { action: 'put', path: ['birds',0], value: 'Redwing', conflict: false }, + { action: 'put', path: ['birds',0], value: 'Redwing', conflict: true } ]) doc1.free(); doc2.free(); doc3.free(); doc4.free() }) @@ -808,16 +797,16 @@ describe('Automerge', () => { assert.deepEqual(doc4.getAll('1@aaaa', 0), [['str', 'Ring-necked parakeet', '5@bbbb']]) assert.deepEqual(doc4.getAll('1@aaaa', 2), [['str', 'Song Thrush', '6@aaaa'], ['str', 'Redwing', '6@bbbb']]) assert.deepEqual(doc3.popPatches(), [ - { action: 'delete', obj: '1@aaaa', key: 0 }, - { action: 'put', obj: '1@aaaa', key: 1, value: 'Song Thrush', datatype: 'str', conflict: false }, - { action: 'insert', obj: '1@aaaa', key: 0, value: 'Ring-necked parakeet', datatype: 'str' }, - { action: 'put', obj: '1@aaaa', key: 2, value: 'Redwing', datatype: 'str', conflict: true } + { action: 'del', path: ['birds',0], }, + { action: 'put', path: ['birds',1], value: 'Song Thrush', conflict: false }, + { action: 'splice', path: ['birds',0], values: ['Ring-necked parakeet'] }, + { action: 'put', path: ['birds',2], value: 'Redwing', conflict: true } ]) assert.deepEqual(doc4.popPatches(), [ - { action: 'put', obj: '1@aaaa', key: 0, value: 'Ring-necked parakeet', datatype: 'str', conflict: false }, - { action: 'put', obj: '1@aaaa', key: 2, value: 'Redwing', datatype: 'str', conflict: false }, - { action: 'put', obj: '1@aaaa', key: 0, value: 'Ring-necked parakeet', datatype: 'str', conflict: false }, - { action: 'put', obj: '1@aaaa', key: 2, value: 'Redwing', datatype: 'str', conflict: true } + { action: 'put', path: ['birds',0], value: 'Ring-necked parakeet', conflict: false }, + { action: 'put', path: ['birds',2], value: 'Redwing', conflict: false }, + { action: 'put', path: ['birds',0], value: 'Ring-necked parakeet', conflict: false }, + { action: 'put', path: ['birds',2], value: 'Redwing', conflict: true } ]) doc1.free(); doc2.free(); doc3.free(); doc4.free() }) @@ -834,14 +823,14 @@ describe('Automerge', () => { doc3.loadIncremental(change2) assert.deepEqual(doc3.getAll('_root', 'bird'), [['str', 'Robin', '1@aaaa'], ['str', 'Wren', '1@bbbb']]) assert.deepEqual(doc3.popPatches(), [ - { action: 'put', obj: '_root', key: 'bird', value: 'Robin', datatype: 'str', conflict: false }, - { action: 'put', obj: '_root', key: 'bird', value: 'Wren', datatype: 'str', conflict: true } + { action: 'put', path: ['bird'], value: 'Robin', conflict: false }, + { action: 'put', path: ['bird'], value: 'Wren', conflict: true } ]) doc3.loadIncremental(change3) assert.deepEqual(doc3.getWithType('_root', 'bird'), ['str', 'Robin']) assert.deepEqual(doc3.getAll('_root', 'bird'), [['str', 'Robin', '1@aaaa']]) assert.deepEqual(doc3.popPatches(), [ - { action: 'put', obj: '_root', key: 'bird', value: 'Robin', datatype: 'str', conflict: false } + { action: 'put', path: ['bird'], value: 'Robin', conflict: false } ]) doc1.free(); doc2.free(); doc3.free() }) @@ -857,26 +846,25 @@ describe('Automerge', () => { doc2.loadIncremental(change1) assert.deepEqual(doc1.getAll('_root', 'birds'), [['list', '1@aaaa'], ['map', '1@bbbb']]) assert.deepEqual(doc1.popPatches(), [ - { action: 'put', obj: '_root', key: 'birds', value: '1@bbbb', datatype: 'map', conflict: true }, - { action: 'put', obj: '1@bbbb', key: 'Sparrowhawk', value: 1, datatype: 'int', conflict: false } + { action: 'put', path: ['birds'], value: {}, conflict: true }, + { action: 'put', path: ['birds', 'Sparrowhawk'], value: 1, conflict: false } ]) assert.deepEqual(doc2.getAll('_root', 'birds'), [['list', '1@aaaa'], ['map', '1@bbbb']]) assert.deepEqual(doc2.popPatches(), [ - { action: 'put', obj: '_root', key: 'birds', value: '1@bbbb', datatype: 'map', conflict: true }, - { action: 'insert', obj: '1@aaaa', key: 0, value: 'Parakeet', datatype: 'str' } + { action: 'put', path: ['birds'], value: {}, conflict: true }, + { action: 'splice', path: ['birds',0], values: ['Parakeet'] } ]) doc1.free(); doc2.free() }) it('should support date objects', () => { - // FIXME: either use Date objects or use numbers consistently const doc1 = create('aaaa'), doc2 = create('bbbb'), now = new Date() - doc1.put('_root', 'createdAt', now.getTime(), 'timestamp') + doc1.put('_root', 'createdAt', now) doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.getWithType('_root', 'createdAt'), ['timestamp', now]) assert.deepEqual(doc2.popPatches(), [ - { action: 'put', obj: '_root', key: 'createdAt', value: now, datatype: 'timestamp', conflict: false } + { action: 'put', path: ['createdAt'], value: now, conflict: false } ]) doc1.free(); doc2.free() }) @@ -891,11 +879,11 @@ describe('Automerge', () => { const list = doc1.putObject('_root', 'list', []) assert.deepEqual(doc1.popPatches(), [ - { action: 'put', obj: '_root', key: 'key1', value: 1, datatype: 'int', conflict: false }, - { action: 'put', obj: '_root', key: 'key1', value: 2, datatype: 'int', conflict: false }, - { action: 'put', obj: '_root', key: 'key2', value: 3, datatype: 'int', conflict: false }, - { action: 'put', obj: '_root', key: 'map', value: map, datatype: 'map', conflict: false }, - { action: 'put', obj: '_root', key: 'list', value: list, datatype: 'list', conflict: false }, + { action: 'put', path: ['key1'], value: 1, conflict: false }, + { action: 'put', path: ['key1'], value: 2, conflict: false }, + { action: 'put', path: ['key2'], value: 3, conflict: false }, + { action: 'put', path: ['map'], value: {}, conflict: false }, + { action: 'put', path: ['list'], value: [], conflict: false }, ]) doc1.free() }) @@ -911,12 +899,12 @@ describe('Automerge', () => { const list2 = doc1.insertObject(list, 2, []) assert.deepEqual(doc1.popPatches(), [ - { action: 'put', obj: '_root', key: 'list', value: list, datatype: 'list', conflict: false }, - { action: 'insert', obj: list, key: 0, value: 1, datatype: 'int' }, - { action: 'insert', obj: list, key: 0, value: 2, datatype: 'int' }, - { action: 'insert', obj: list, key: 2, value: 3, datatype: 'int' }, - { action: 'insert', obj: list, key: 2, value: map, datatype: 'map' }, - { action: 'insert', obj: list, key: 2, value: list2, datatype: 'list' }, + { action: 'put', path: ['list'], value: [], conflict: false }, + { action: 'splice', path: ['list', 0], values: [1] }, + { action: 'splice', path: ['list', 0], values: [2] }, + { action: 'splice', path: ['list', 2], values: [3] }, + { action: 'splice', path: ['list', 2], values: [{}] }, + { action: 'splice', path: ['list', 2], values: [[]] }, ]) doc1.free() }) @@ -930,10 +918,8 @@ describe('Automerge', () => { const list2 = doc1.pushObject(list, []) assert.deepEqual(doc1.popPatches(), [ - { action: 'put', obj: '_root', key: 'list', value: list, datatype: 'list', conflict: false }, - { action: 'insert', obj: list, key: 0, value: 1, datatype: 'int' }, - { action: 'insert', obj: list, key: 1, value: map, datatype: 'map' }, - { action: 'insert', obj: list, key: 2, value: list2, datatype: 'list' }, + { action: 'put', path: ['list'], value: [], conflict: false }, + { action: 'splice', path: ['list',0], values: [1,{},[]] }, ]) doc1.free() }) @@ -946,13 +932,10 @@ describe('Automerge', () => { doc1.splice(list, 1, 2) assert.deepEqual(doc1.popPatches(), [ - { action: 'put', obj: '_root', key: 'list', value: list, datatype: 'list', conflict: false }, - { action: 'insert', obj: list, key: 0, value: 1, datatype: 'int' }, - { action: 'insert', obj: list, key: 1, value: 2, datatype: 'int' }, - { action: 'insert', obj: list, key: 2, value: 3, datatype: 'int' }, - { action: 'insert', obj: list, key: 3, value: 4, datatype: 'int' }, - { action: 'delete', obj: list, key: 1 }, - { action: 'delete', obj: list, key: 1 }, + { action: 'put', path: ['list'], value: [], conflict: false }, + { action: 'splice', path: ['list',0], values: [1,2,3,4] }, + { action: 'del', path: ['list',1] }, + { action: 'del', path: ['list',1] }, ]) doc1.free() }) @@ -964,8 +947,8 @@ describe('Automerge', () => { doc1.increment('_root', 'counter', 4) assert.deepEqual(doc1.popPatches(), [ - { action: 'put', obj: '_root', key: 'counter', value: 2, datatype: 'counter', conflict: false }, - { action: 'increment', obj: '_root', key: 'counter', value: 4 }, + { action: 'put', path: ['counter'], value: 2, conflict: false }, + { action: 'inc', path: ['counter'], value: 4 }, ]) doc1.free() }) @@ -979,10 +962,10 @@ describe('Automerge', () => { doc1.delete('_root', 'key1') doc1.delete('_root', 'key2') assert.deepEqual(doc1.popPatches(), [ - { action: 'put', obj: '_root', key: 'key1', value: 1, datatype: 'int', conflict: false }, - { action: 'put', obj: '_root', key: 'key2', value: 2, datatype: 'int', conflict: false }, - { action: 'delete', obj: '_root', key: 'key1' }, - { action: 'delete', obj: '_root', key: 'key2' }, + { action: 'put', path: ['key1'], value: 1, conflict: false }, + { action: 'put', path: ['key2'], value: 2, conflict: false }, + { action: 'del', path: ['key1'], }, + { action: 'del', path: ['key2'], }, ]) doc1.free() }) @@ -996,8 +979,8 @@ describe('Automerge', () => { doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.getWithType('_root', 'starlings'), ['counter', 3]) assert.deepEqual(doc2.popPatches(), [ - { action: 'put', obj: '_root', key: 'starlings', value: 2, datatype: 'counter', conflict: false }, - { action: 'increment', obj: '_root', key: 'starlings', value: 1 } + { action: 'put', path: ['starlings'], value: 2, conflict: false }, + { action: 'inc', path: ['starlings'], value: 1 } ]) doc1.free(); doc2.free() }) @@ -1015,10 +998,10 @@ describe('Automerge', () => { doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.popPatches(), [ - { action: 'put', obj: '_root', key: 'list', value: list, datatype: 'list', conflict: false }, - { action: 'insert', obj: list, key: 0, value: 1, datatype: 'counter' }, - { action: 'increment', obj: list, key: 0, value: 2 }, - { action: 'increment', obj: list, key: 0, value: -5 }, + { action: 'put', path: ['list'], value: [], conflict: false }, + { action: 'splice', path: ['list',0], values: [1] }, + { action: 'inc', path: ['list',0], value: 2 }, + { action: 'inc', path: ['list',0], value: -5 }, ]) doc1.free(); doc2.free() }) diff --git a/automerge/examples/watch.rs b/automerge/examples/watch.rs index d9668497..ccc480e6 100644 --- a/automerge/examples/watch.rs +++ b/automerge/examples/watch.rs @@ -9,19 +9,19 @@ use automerge::ROOT; fn main() { let mut doc = Automerge::new(); - let mut observer = VecOpObserver::default(); // a simple scalar change in the root object - doc.transact_with::<_, _, AutomergeError, _, _>( - |_result| CommitOptions::default().with_op_observer(&mut observer), - |tx| { - tx.put(ROOT, "hello", "world").unwrap(); - Ok(()) - }, - ) - .unwrap(); - get_changes(&doc, observer.take_patches()); + let mut result = doc + .transact_with::<_, _, AutomergeError, _, VecOpObserver>( + |_result| CommitOptions::default(), + |tx| { + tx.put(ROOT, "hello", "world").unwrap(); + Ok(()) + }, + ) + .unwrap(); + get_changes(&doc, result.op_observer.take_patches()); - let mut tx = doc.transaction(); + let mut tx = doc.transaction_with_observer(VecOpObserver::default()); let map = tx .put_object(ROOT, "my new map", automerge::ObjType::Map) .unwrap(); @@ -36,28 +36,28 @@ fn main() { tx.insert(&list, 1, "woo").unwrap(); let m = tx.insert_object(&list, 2, automerge::ObjType::Map).unwrap(); tx.put(&m, "hi", 2).unwrap(); - let _heads3 = tx.commit_with(CommitOptions::default().with_op_observer(&mut observer)); - get_changes(&doc, observer.take_patches()); + let patches = tx.op_observer.take_patches(); + let _heads3 = tx.commit_with(CommitOptions::default()); + get_changes(&doc, patches); } fn get_changes(doc: &Automerge, patches: Vec) { for patch in patches { match patch { Patch::Put { - obj, - key, - value, - conflict: _, + obj, prop, value, .. } => { println!( "put {:?} at {:?} in obj {:?}, object path {:?}", value, - key, + prop, obj, doc.path_to_object(&obj) ) } - Patch::Insert { obj, index, value } => { + Patch::Insert { + obj, index, value, .. + } => { println!( "insert {:?} at {:?} in obj {:?}, object path {:?}", value, @@ -66,18 +66,20 @@ fn get_changes(doc: &Automerge, patches: Vec) { doc.path_to_object(&obj) ) } - Patch::Increment { obj, key, value } => { + Patch::Increment { + obj, prop, value, .. + } => { println!( "increment {:?} in obj {:?} by {:?}, object path {:?}", - key, + prop, obj, value, doc.path_to_object(&obj) ) } - Patch::Delete { obj, key } => println!( + Patch::Delete { obj, prop, .. } => println!( "delete {:?} in obj {:?}, object path {:?}", - key, + prop, obj, doc.path_to_object(&obj) ), diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index 71fb7df2..4520c67d 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -4,8 +4,7 @@ use crate::exid::ExId; use crate::op_observer::OpObserver; use crate::transaction::{CommitOptions, Transactable}; use crate::{ - sync, ApplyOptions, Keys, KeysAt, ListRange, ListRangeAt, MapRange, MapRangeAt, ObjType, - Parents, ScalarValue, + sync, Keys, KeysAt, ListRange, ListRangeAt, MapRange, MapRangeAt, ObjType, Parents, ScalarValue, }; use crate::{ transaction::TransactionInner, ActorId, Automerge, AutomergeError, Change, ChangeHash, Prop, @@ -14,22 +13,46 @@ use crate::{ /// An automerge document that automatically manages transactions. #[derive(Debug, Clone)] -pub struct AutoCommit { +pub struct AutoCommitWithObs { doc: Automerge, - transaction: Option, + transaction: Option<(Obs, TransactionInner)>, + op_observer: Obs, } -impl Default for AutoCommit { +pub type AutoCommit = AutoCommitWithObs<()>; + +impl Default for AutoCommitWithObs { fn default() -> Self { - Self::new() + let op_observer = O::default(); + AutoCommitWithObs { + doc: Automerge::new(), + transaction: None, + op_observer, + } } } impl AutoCommit { - pub fn new() -> Self { - Self { + pub fn new() -> AutoCommit { + AutoCommitWithObs { doc: Automerge::new(), transaction: None, + op_observer: (), + } + } +} + +impl AutoCommitWithObs { + pub fn observer(&mut self) -> &mut Obs { + self.ensure_transaction_closed(); + &mut self.op_observer + } + + pub fn with_observer(self, op_observer: Obs2) -> AutoCommitWithObs { + AutoCommitWithObs { + doc: self.doc, + transaction: self.transaction.map(|(_, t)| (op_observer.branch(), t)), + op_observer, } } @@ -58,7 +81,7 @@ impl AutoCommit { fn ensure_transaction_open(&mut self) { if self.transaction.is_none() { - self.transaction = Some(self.doc.transaction_inner()); + self.transaction = Some((self.op_observer.branch(), self.doc.transaction_inner())); } } @@ -67,6 +90,7 @@ impl AutoCommit { Self { doc: self.doc.fork(), transaction: self.transaction.clone(), + op_observer: self.op_observer.clone(), } } @@ -75,46 +99,35 @@ impl AutoCommit { Ok(Self { doc: self.doc.fork_at(heads)?, transaction: self.transaction.clone(), + op_observer: self.op_observer.clone(), }) } fn ensure_transaction_closed(&mut self) { - if let Some(tx) = self.transaction.take() { - tx.commit::<()>(&mut self.doc, None, None, None); + if let Some((current, tx)) = self.transaction.take() { + self.op_observer.merge(¤t); + tx.commit(&mut self.doc, None, None); } } pub fn load(data: &[u8]) -> Result { + // passing a () observer here has performance implications on all loads + // if we want an autocommit::load() method that can be observered we need to make a new method + // fn observed_load() ? let doc = Automerge::load(data)?; + let op_observer = Obs::default(); Ok(Self { doc, transaction: None, - }) - } - - pub fn load_with( - data: &[u8], - options: ApplyOptions<'_, Obs>, - ) -> Result { - let doc = Automerge::load_with(data, options)?; - Ok(Self { - doc, - transaction: None, + op_observer, }) } pub fn load_incremental(&mut self, data: &[u8]) -> Result { self.ensure_transaction_closed(); - self.doc.load_incremental(data) - } - - pub fn load_incremental_with<'a, Obs: OpObserver>( - &mut self, - data: &[u8], - options: ApplyOptions<'a, Obs>, - ) -> Result { - self.ensure_transaction_closed(); - self.doc.load_incremental_with(data, options) + // TODO - would be nice to pass None here instead of &mut () + self.doc + .load_incremental_with(data, Some(&mut self.op_observer)) } pub fn apply_changes( @@ -122,34 +135,19 @@ impl AutoCommit { changes: impl IntoIterator, ) -> Result<(), AutomergeError> { self.ensure_transaction_closed(); - self.doc.apply_changes(changes) - } - - pub fn apply_changes_with, Obs: OpObserver>( - &mut self, - changes: I, - options: ApplyOptions<'_, Obs>, - ) -> Result<(), AutomergeError> { - self.ensure_transaction_closed(); - self.doc.apply_changes_with(changes, options) + self.doc + .apply_changes_with(changes, Some(&mut self.op_observer)) } /// Takes all the changes in `other` which are not in `self` and applies them - pub fn merge(&mut self, other: &mut Self) -> Result, AutomergeError> { - self.ensure_transaction_closed(); - other.ensure_transaction_closed(); - self.doc.merge(&mut other.doc) - } - - /// Takes all the changes in `other` which are not in `self` and applies them - pub fn merge_with<'a, Obs: OpObserver>( + pub fn merge( &mut self, - other: &mut Self, - options: ApplyOptions<'a, Obs>, + other: &mut AutoCommitWithObs, ) -> Result, AutomergeError> { self.ensure_transaction_closed(); other.ensure_transaction_closed(); - self.doc.merge_with(&mut other.doc, options) + self.doc + .merge_with(&mut other.doc, Some(&mut self.op_observer)) } pub fn save(&mut self) -> Vec { @@ -220,17 +218,6 @@ impl AutoCommit { self.doc.receive_sync_message(sync_state, message) } - pub fn receive_sync_message_with<'a, Obs: OpObserver>( - &mut self, - sync_state: &mut sync::State, - message: sync::Message, - options: ApplyOptions<'a, Obs>, - ) -> Result<(), AutomergeError> { - self.ensure_transaction_closed(); - self.doc - .receive_sync_message_with(sync_state, message, options) - } - /// Return a graphviz representation of the opset. /// /// # Arguments @@ -251,7 +238,7 @@ impl AutoCommit { } pub fn commit(&mut self) -> ChangeHash { - self.commit_with::<()>(CommitOptions::default()) + self.commit_with(CommitOptions::default()) } /// Commit the current operations with some options. @@ -267,33 +254,29 @@ impl AutoCommit { /// doc.put_object(&ROOT, "todos", ObjType::List).unwrap(); /// let now = SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap().as_secs() as /// i64; - /// doc.commit_with::<()>(CommitOptions::default().with_message("Create todos list").with_time(now)); + /// doc.commit_with(CommitOptions::default().with_message("Create todos list").with_time(now)); /// ``` - pub fn commit_with(&mut self, options: CommitOptions<'_, Obs>) -> ChangeHash { + pub fn commit_with(&mut self, options: CommitOptions) -> ChangeHash { // ensure that even no changes triggers a change self.ensure_transaction_open(); - let tx = self.transaction.take().unwrap(); - tx.commit( - &mut self.doc, - options.message, - options.time, - options.op_observer, - ) + let (current, tx) = self.transaction.take().unwrap(); + self.op_observer.merge(¤t); + tx.commit(&mut self.doc, options.message, options.time) } pub fn rollback(&mut self) -> usize { self.transaction .take() - .map(|tx| tx.rollback(&mut self.doc)) + .map(|(_, tx)| tx.rollback(&mut self.doc)) .unwrap_or(0) } } -impl Transactable for AutoCommit { +impl Transactable for AutoCommitWithObs { fn pending_ops(&self) -> usize { self.transaction .as_ref() - .map(|t| t.pending_ops()) + .map(|(_, t)| t.pending_ops()) .unwrap_or(0) } @@ -389,8 +372,8 @@ impl Transactable for AutoCommit { value: V, ) -> Result<(), AutomergeError> { self.ensure_transaction_open(); - let tx = self.transaction.as_mut().unwrap(); - tx.put(&mut self.doc, obj.as_ref(), prop, value) + let (current, tx) = self.transaction.as_mut().unwrap(); + tx.put(&mut self.doc, current, obj.as_ref(), prop, value) } fn put_object, P: Into>( @@ -400,8 +383,8 @@ impl Transactable for AutoCommit { value: ObjType, ) -> Result { self.ensure_transaction_open(); - let tx = self.transaction.as_mut().unwrap(); - tx.put_object(&mut self.doc, obj.as_ref(), prop, value) + let (current, tx) = self.transaction.as_mut().unwrap(); + tx.put_object(&mut self.doc, current, obj.as_ref(), prop, value) } fn insert, V: Into>( @@ -411,8 +394,8 @@ impl Transactable for AutoCommit { value: V, ) -> Result<(), AutomergeError> { self.ensure_transaction_open(); - let tx = self.transaction.as_mut().unwrap(); - tx.insert(&mut self.doc, obj.as_ref(), index, value) + let (current, tx) = self.transaction.as_mut().unwrap(); + tx.insert(&mut self.doc, current, obj.as_ref(), index, value) } fn insert_object>( @@ -422,8 +405,8 @@ impl Transactable for AutoCommit { value: ObjType, ) -> Result { self.ensure_transaction_open(); - let tx = self.transaction.as_mut().unwrap(); - tx.insert_object(&mut self.doc, obj.as_ref(), index, value) + let (current, tx) = self.transaction.as_mut().unwrap(); + tx.insert_object(&mut self.doc, current, obj.as_ref(), index, value) } fn increment, P: Into>( @@ -433,8 +416,8 @@ impl Transactable for AutoCommit { value: i64, ) -> Result<(), AutomergeError> { self.ensure_transaction_open(); - let tx = self.transaction.as_mut().unwrap(); - tx.increment(&mut self.doc, obj.as_ref(), prop, value) + let (current, tx) = self.transaction.as_mut().unwrap(); + tx.increment(&mut self.doc, current, obj.as_ref(), prop, value) } fn delete, P: Into>( @@ -443,8 +426,8 @@ impl Transactable for AutoCommit { prop: P, ) -> Result<(), AutomergeError> { self.ensure_transaction_open(); - let tx = self.transaction.as_mut().unwrap(); - tx.delete(&mut self.doc, obj.as_ref(), prop) + let (current, tx) = self.transaction.as_mut().unwrap(); + tx.delete(&mut self.doc, current, obj.as_ref(), prop) } /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert @@ -457,8 +440,8 @@ impl Transactable for AutoCommit { vals: V, ) -> Result<(), AutomergeError> { self.ensure_transaction_open(); - let tx = self.transaction.as_mut().unwrap(); - tx.splice(&mut self.doc, obj.as_ref(), pos, del, vals) + let (current, tx) = self.transaction.as_mut().unwrap(); + tx.splice(&mut self.doc, current, obj.as_ref(), pos, del, vals) } fn text>(&self, obj: O) -> Result { diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 96a0ed47..0ca12934 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -19,8 +19,8 @@ use crate::types::{ ScalarValue, Value, }; use crate::{ - query, ApplyOptions, AutomergeError, Change, KeysAt, ListRange, ListRangeAt, MapRange, - MapRangeAt, ObjType, Prop, Values, + query, AutomergeError, Change, KeysAt, ListRange, ListRangeAt, MapRange, MapRangeAt, ObjType, + Prop, Values, }; use serde::Serialize; @@ -111,10 +111,22 @@ impl Automerge { } /// Start a transaction. - pub fn transaction(&mut self) -> Transaction<'_> { + pub fn transaction(&mut self) -> Transaction<'_, ()> { Transaction { inner: Some(self.transaction_inner()), doc: self, + op_observer: (), + } + } + + pub fn transaction_with_observer( + &mut self, + op_observer: Obs, + ) -> Transaction<'_, Obs> { + Transaction { + inner: Some(self.transaction_inner()), + doc: self, + op_observer, } } @@ -143,15 +155,16 @@ impl Automerge { /// Run a transaction on this document in a closure, automatically handling commit or rollback /// afterwards. - pub fn transact(&mut self, f: F) -> transaction::Result + pub fn transact(&mut self, f: F) -> transaction::Result where - F: FnOnce(&mut Transaction<'_>) -> Result, + F: FnOnce(&mut Transaction<'_, ()>) -> Result, { let mut tx = self.transaction(); let result = f(&mut tx); match result { Ok(result) => Ok(Success { result, + op_observer: (), hash: tx.commit(), }), Err(error) => Err(Failure { @@ -162,19 +175,25 @@ impl Automerge { } /// Like [`Self::transact`] but with a function for generating the commit options. - pub fn transact_with<'a, F, O, E, C, Obs>(&mut self, c: C, f: F) -> transaction::Result + pub fn transact_with(&mut self, c: C, f: F) -> transaction::Result where - F: FnOnce(&mut Transaction<'_>) -> Result, - C: FnOnce(&O) -> CommitOptions<'a, Obs>, - Obs: 'a + OpObserver, + F: FnOnce(&mut Transaction<'_, Obs>) -> Result, + C: FnOnce(&O) -> CommitOptions, + Obs: OpObserver, { - let mut tx = self.transaction(); + let mut op_observer = Obs::default(); + let mut tx = self.transaction_with_observer(Default::default()); let result = f(&mut tx); match result { Ok(result) => { let commit_options = c(&result); + std::mem::swap(&mut op_observer, &mut tx.op_observer); let hash = tx.commit_with(commit_options); - Ok(Success { result, hash }) + Ok(Success { + result, + hash, + op_observer, + }) } Err(error) => Err(Failure { error, @@ -220,17 +239,6 @@ impl Automerge { // PropAt::() // NthAt::() - /// Get the object id of the object that contains this object and the prop that this object is - /// at in that object. - pub(crate) fn parent_object(&self, obj: ObjId) -> Option<(ObjId, Key)> { - if obj == ObjId::root() { - // root has no parent - None - } else { - self.ops.parent_object(&obj) - } - } - /// Get the parents of an object in the document tree. /// /// ### Errors @@ -244,10 +252,7 @@ impl Automerge { /// value. pub fn parents>(&self, obj: O) -> Result, AutomergeError> { let obj_id = self.exid_to_obj(obj.as_ref())?; - Ok(Parents { - obj: obj_id, - doc: self, - }) + Ok(self.ops.parents(obj_id)) } pub fn path_to_object>( @@ -259,21 +264,6 @@ impl Automerge { Ok(path) } - /// Export a key to a prop. - pub(crate) fn export_key(&self, obj: ObjId, key: Key) -> Prop { - match key { - Key::Map(m) => Prop::Map(self.ops.m.props.get(m).into()), - Key::Seq(opid) => { - let i = self - .ops - .search(&obj, query::ElemIdPos::new(opid)) - .index() - .unwrap(); - Prop::Seq(i) - } - } - } - /// Get the keys of the object `obj`. /// /// For a map this returns the keys of the map. @@ -587,14 +577,14 @@ impl Automerge { /// Load a document. pub fn load(data: &[u8]) -> Result { - Self::load_with::<()>(data, ApplyOptions::default()) + Self::load_with::<()>(data, None) } /// Load a document. - #[tracing::instrument(skip(data, options), err)] + #[tracing::instrument(skip(data, observer), err)] pub fn load_with( data: &[u8], - mut options: ApplyOptions<'_, Obs>, + mut observer: Option<&mut Obs>, ) -> Result { if data.is_empty() { tracing::trace!("no data, initializing empty document"); @@ -606,7 +596,6 @@ impl Automerge { if !first_chunk.checksum_valid() { return Err(load::Error::BadChecksum.into()); } - let observer = &mut options.op_observer; let mut am = match first_chunk { storage::Chunk::Document(d) => { @@ -616,7 +605,7 @@ impl Automerge { result: op_set, changes, heads, - } = match observer { + } = match &mut observer { Some(o) => storage::load::reconstruct_document(&d, OpSet::observed_builder(*o)), None => storage::load::reconstruct_document(&d, OpSet::builder()), } @@ -651,7 +640,7 @@ impl Automerge { let change = Change::new_from_unverified(stored_change.into_owned(), None) .map_err(|e| load::Error::InvalidChangeColumns(Box::new(e)))?; let mut am = Self::new(); - am.apply_change(change, observer); + am.apply_change(change, &mut observer); am } storage::Chunk::CompressedChange(stored_change, compressed) => { @@ -662,7 +651,7 @@ impl Automerge { ) .map_err(|e| load::Error::InvalidChangeColumns(Box::new(e)))?; let mut am = Self::new(); - am.apply_change(change, observer); + am.apply_change(change, &mut observer); am } }; @@ -670,7 +659,7 @@ impl Automerge { match load::load_changes(remaining.reset()) { load::LoadedChanges::Complete(c) => { for change in c { - am.apply_change(change, observer); + am.apply_change(change, &mut observer); } } load::LoadedChanges::Partial { error, .. } => return Err(error.into()), @@ -680,14 +669,14 @@ impl Automerge { /// Load an incremental save of a document. pub fn load_incremental(&mut self, data: &[u8]) -> Result { - self.load_incremental_with::<()>(data, ApplyOptions::default()) + self.load_incremental_with::<()>(data, None) } /// Load an incremental save of a document. pub fn load_incremental_with( &mut self, data: &[u8], - options: ApplyOptions<'_, Obs>, + op_observer: Option<&mut Obs>, ) -> Result { let changes = match load::load_changes(storage::parse::Input::new(data)) { load::LoadedChanges::Complete(c) => c, @@ -697,7 +686,7 @@ impl Automerge { } }; let start = self.ops.len(); - self.apply_changes_with(changes, options)?; + self.apply_changes_with(changes, op_observer)?; let delta = self.ops.len() - start; Ok(delta) } @@ -717,14 +706,14 @@ impl Automerge { &mut self, changes: impl IntoIterator, ) -> Result<(), AutomergeError> { - self.apply_changes_with::<_, ()>(changes, ApplyOptions::default()) + self.apply_changes_with::<_, ()>(changes, None) } /// Apply changes to this document. pub fn apply_changes_with, Obs: OpObserver>( &mut self, changes: I, - mut options: ApplyOptions<'_, Obs>, + mut op_observer: Option<&mut Obs>, ) -> Result<(), AutomergeError> { for c in changes { if !self.history_index.contains_key(&c.hash()) { @@ -735,7 +724,7 @@ impl Automerge { )); } if self.is_causally_ready(&c) { - self.apply_change(c, &mut options.op_observer); + self.apply_change(c, &mut op_observer); } else { self.queue.push(c); } @@ -743,7 +732,7 @@ impl Automerge { } while let Some(c) = self.pop_next_causally_ready_change() { if !self.history_index.contains_key(&c.hash()) { - self.apply_change(c, &mut options.op_observer); + self.apply_change(c, &mut op_observer); } } Ok(()) @@ -831,14 +820,14 @@ impl Automerge { /// Takes all the changes in `other` which are not in `self` and applies them pub fn merge(&mut self, other: &mut Self) -> Result, AutomergeError> { - self.merge_with::<()>(other, ApplyOptions::default()) + self.merge_with::<()>(other, None) } /// Takes all the changes in `other` which are not in `self` and applies them - pub fn merge_with<'a, Obs: OpObserver>( + pub fn merge_with( &mut self, other: &mut Self, - options: ApplyOptions<'a, Obs>, + op_observer: Option<&mut Obs>, ) -> Result, AutomergeError> { // TODO: Make this fallible and figure out how to do this transactionally let changes = self @@ -847,7 +836,7 @@ impl Automerge { .cloned() .collect::>(); tracing::trace!(changes=?changes.iter().map(|c| c.hash()).collect::>(), "merging new changes"); - self.apply_changes_with(changes, options)?; + self.apply_changes_with(changes, op_observer)?; Ok(self.get_heads()) } diff --git a/automerge/src/automerge/tests.rs b/automerge/src/automerge/tests.rs index e07f73ff..9c1a1ff7 100644 --- a/automerge/src/automerge/tests.rs +++ b/automerge/src/automerge/tests.rs @@ -1437,19 +1437,15 @@ fn observe_counter_change_application_overwrite() { doc1.increment(ROOT, "counter", 5).unwrap(); doc1.commit(); - let mut observer = VecOpObserver::default(); - let mut doc3 = doc1.clone(); - doc3.merge_with( - &mut doc2, - ApplyOptions::default().with_op_observer(&mut observer), - ) - .unwrap(); + let mut doc3 = doc1.fork().with_observer(VecOpObserver::default()); + doc3.merge(&mut doc2).unwrap(); assert_eq!( - observer.take_patches(), + doc3.observer().take_patches(), vec![Patch::Put { obj: ExId::Root, - key: Prop::Map("counter".into()), + path: vec![], + prop: Prop::Map("counter".into()), value: ( ScalarValue::Str("mystring".into()).into(), ExId::Id(2, doc2.get_actor().clone(), 1) @@ -1458,16 +1454,11 @@ fn observe_counter_change_application_overwrite() { }] ); - let mut observer = VecOpObserver::default(); - let mut doc4 = doc2.clone(); - doc4.merge_with( - &mut doc1, - ApplyOptions::default().with_op_observer(&mut observer), - ) - .unwrap(); + let mut doc4 = doc2.clone().with_observer(VecOpObserver::default()); + doc4.merge(&mut doc1).unwrap(); // no patches as the increments operate on an invisible counter - assert_eq!(observer.take_patches(), vec![]); + assert_eq!(doc4.observer().take_patches(), vec![]); } #[test] @@ -1478,20 +1469,15 @@ fn observe_counter_change_application() { doc.increment(ROOT, "counter", 5).unwrap(); let changes = doc.get_changes(&[]).unwrap().into_iter().cloned(); - let mut new_doc = AutoCommit::new(); - let mut observer = VecOpObserver::default(); - new_doc - .apply_changes_with( - changes, - ApplyOptions::default().with_op_observer(&mut observer), - ) - .unwrap(); + let mut new_doc = AutoCommit::new().with_observer(VecOpObserver::default()); + new_doc.apply_changes(changes).unwrap(); assert_eq!( - observer.take_patches(), + new_doc.observer().take_patches(), vec![ Patch::Put { obj: ExId::Root, - key: Prop::Map("counter".into()), + path: vec![], + prop: Prop::Map("counter".into()), value: ( ScalarValue::counter(1).into(), ExId::Id(1, doc.get_actor().clone(), 0) @@ -1500,12 +1486,14 @@ fn observe_counter_change_application() { }, Patch::Increment { obj: ExId::Root, - key: Prop::Map("counter".into()), + path: vec![], + prop: Prop::Map("counter".into()), value: (2, ExId::Id(2, doc.get_actor().clone(), 0)), }, Patch::Increment { obj: ExId::Root, - key: Prop::Map("counter".into()), + path: vec![], + prop: Prop::Map("counter".into()), value: (5, ExId::Id(3, doc.get_actor().clone(), 0)), } ] @@ -1514,7 +1502,7 @@ fn observe_counter_change_application() { #[test] fn get_changes_heads_empty() { - let mut doc = AutoCommit::new(); + let mut doc = AutoCommit::default(); doc.put(ROOT, "key1", 1).unwrap(); doc.commit(); doc.put(ROOT, "key2", 1).unwrap(); diff --git a/automerge/src/lib.rs b/automerge/src/lib.rs index c31cf1ed..df33e096 100644 --- a/automerge/src/lib.rs +++ b/automerge/src/lib.rs @@ -75,7 +75,6 @@ mod map_range_at; mod op_observer; mod op_set; mod op_tree; -mod options; mod parents; mod query; mod storage; @@ -88,7 +87,7 @@ mod values; mod visualisation; pub use crate::automerge::Automerge; -pub use autocommit::AutoCommit; +pub use autocommit::{AutoCommit, AutoCommitWithObs}; pub use autoserde::AutoSerde; pub use change::{Change, LoadError as LoadChangeError}; pub use error::AutomergeError; @@ -105,7 +104,6 @@ pub use map_range_at::MapRangeAt; pub use op_observer::OpObserver; pub use op_observer::Patch; pub use op_observer::VecOpObserver; -pub use options::ApplyOptions; pub use parents::Parents; pub use types::{ActorId, ChangeHash, ObjType, OpType, Prop}; pub use value::{ScalarValue, Value}; diff --git a/automerge/src/op_observer.rs b/automerge/src/op_observer.rs index 96139bab..db3fdf92 100644 --- a/automerge/src/op_observer.rs +++ b/automerge/src/op_observer.rs @@ -1,50 +1,113 @@ use crate::exid::ExId; +use crate::Parents; use crate::Prop; use crate::Value; /// An observer of operations applied to the document. -pub trait OpObserver { +pub trait OpObserver: Default + Clone { /// A new value has been inserted into the given object. /// + /// - `parents`: A parents iterator that can be used to collect path information /// - `objid`: the object that has been inserted into. /// - `index`: the index the new value has been inserted at. /// - `tagged_value`: the value that has been inserted and the id of the operation that did the /// insert. - fn insert(&mut self, objid: ExId, index: usize, tagged_value: (Value<'_>, ExId)); + fn insert( + &mut self, + parents: Parents<'_>, + objid: ExId, + index: usize, + tagged_value: (Value<'_>, ExId), + ); /// A new value has been put into the given object. /// + /// - `parents`: A parents iterator that can be used to collect path information /// - `objid`: the object that has been put into. - /// - `key`: the key that the value as been put at. + /// - `prop`: the prop that the value as been put at. /// - `tagged_value`: the value that has been put into the object and the id of the operation /// that did the put. /// - `conflict`: whether this put conflicts with other operations. - fn put(&mut self, objid: ExId, key: Prop, tagged_value: (Value<'_>, ExId), conflict: bool); + fn put( + &mut self, + parents: Parents<'_>, + objid: ExId, + prop: Prop, + tagged_value: (Value<'_>, ExId), + conflict: bool, + ); /// A counter has been incremented. /// + /// - `parents`: A parents iterator that can be used to collect path information /// - `objid`: the object that contains the counter. - /// - `key`: they key that the chounter is at. + /// - `prop`: they prop that the chounter is at. /// - `tagged_value`: the amount the counter has been incremented by, and the the id of the /// increment operation. - fn increment(&mut self, objid: ExId, key: Prop, tagged_value: (i64, ExId)); + fn increment( + &mut self, + parents: Parents<'_>, + objid: ExId, + prop: Prop, + tagged_value: (i64, ExId), + ); /// A value has beeen deleted. /// + /// - `parents`: A parents iterator that can be used to collect path information /// - `objid`: the object that has been deleted in. - /// - `key`: the key of the value that has been deleted. - fn delete(&mut self, objid: ExId, key: Prop); + /// - `prop`: the prop of the value that has been deleted. + fn delete(&mut self, parents: Parents<'_>, objid: ExId, prop: Prop); + + /// Branch of a new op_observer later to be merged + /// + /// Called by AutoCommit when creating a new transaction. Observer branch + /// will be merged on `commit()` or thrown away on `rollback()` + /// + fn branch(&self) -> Self { + Self::default() + } + + /// Merge observed information from a transaction. + /// + /// Called by AutoCommit on `commit()` + /// + /// - `other`: Another Op Observer of the same type + fn merge(&mut self, other: &Self); } impl OpObserver for () { - fn insert(&mut self, _objid: ExId, _index: usize, _tagged_value: (Value<'_>, ExId)) {} - - fn put(&mut self, _objid: ExId, _key: Prop, _tagged_value: (Value<'_>, ExId), _conflict: bool) { + fn insert( + &mut self, + _parents: Parents<'_>, + _objid: ExId, + _index: usize, + _tagged_value: (Value<'_>, ExId), + ) { } - fn increment(&mut self, _objid: ExId, _key: Prop, _tagged_value: (i64, ExId)) {} + fn put( + &mut self, + _parents: Parents<'_>, + _objid: ExId, + _prop: Prop, + _tagged_value: (Value<'_>, ExId), + _conflict: bool, + ) { + } - fn delete(&mut self, _objid: ExId, _key: Prop) {} + fn increment( + &mut self, + _parents: Parents<'_>, + _objid: ExId, + _prop: Prop, + _tagged_value: (i64, ExId), + ) { + } + + fn delete(&mut self, _parents: Parents<'_>, _objid: ExId, _prop: Prop) {} + + fn merge(&mut self, _other: &Self) {} } /// Capture operations into a [`Vec`] and store them as patches. @@ -62,45 +125,77 @@ impl VecOpObserver { } impl OpObserver for VecOpObserver { - fn insert(&mut self, obj_id: ExId, index: usize, (value, id): (Value<'_>, ExId)) { + fn insert( + &mut self, + mut parents: Parents<'_>, + obj: ExId, + index: usize, + (value, id): (Value<'_>, ExId), + ) { + let path = parents.path(); self.patches.push(Patch::Insert { - obj: obj_id, + obj, + path, index, value: (value.into_owned(), id), }); } - fn put(&mut self, objid: ExId, key: Prop, (value, id): (Value<'_>, ExId), conflict: bool) { + fn put( + &mut self, + mut parents: Parents<'_>, + obj: ExId, + prop: Prop, + (value, id): (Value<'_>, ExId), + conflict: bool, + ) { + let path = parents.path(); self.patches.push(Patch::Put { - obj: objid, - key, + obj, + path, + prop, value: (value.into_owned(), id), conflict, }); } - fn increment(&mut self, objid: ExId, key: Prop, tagged_value: (i64, ExId)) { + fn increment( + &mut self, + mut parents: Parents<'_>, + obj: ExId, + prop: Prop, + tagged_value: (i64, ExId), + ) { + let path = parents.path(); self.patches.push(Patch::Increment { - obj: objid, - key, + obj, + path, + prop, value: tagged_value, }); } - fn delete(&mut self, objid: ExId, key: Prop) { - self.patches.push(Patch::Delete { obj: objid, key }) + fn delete(&mut self, mut parents: Parents<'_>, obj: ExId, prop: Prop) { + let path = parents.path(); + self.patches.push(Patch::Delete { obj, path, prop }) + } + + fn merge(&mut self, other: &Self) { + self.patches.extend_from_slice(other.patches.as_slice()) } } /// A notification to the application that something has changed in a document. #[derive(Debug, Clone, PartialEq)] pub enum Patch { - /// Associating a new value with a key in a map, or an existing list element + /// Associating a new value with a prop in a map, or an existing list element Put { + /// path to the object + path: Vec<(ExId, Prop)>, /// The object that was put into. obj: ExId, - /// The key that the new value was put at. - key: Prop, + /// The prop that the new value was put at. + prop: Prop, /// The value that was put, and the id of the operation that put it there. value: (Value<'static>, ExId), /// Whether this put conflicts with another. @@ -108,6 +203,8 @@ pub enum Patch { }, /// Inserting a new element into a list/text Insert { + /// path to the object + path: Vec<(ExId, Prop)>, /// The object that was inserted into. obj: ExId, /// The index that the new value was inserted at. @@ -117,19 +214,23 @@ pub enum Patch { }, /// Incrementing a counter. Increment { + /// path to the object + path: Vec<(ExId, Prop)>, /// The object that was incremented in. obj: ExId, - /// The key that was incremented. - key: Prop, + /// The prop that was incremented. + prop: Prop, /// The amount that the counter was incremented by, and the id of the operation that /// did the increment. value: (i64, ExId), }, /// Deleting an element from a list/text Delete { + /// path to the object + path: Vec<(ExId, Prop)>, /// The object that was deleted from. obj: ExId, - /// The key that was deleted. - key: Prop, + /// The prop that was deleted. + prop: Prop, }, } diff --git a/automerge/src/op_set.rs b/automerge/src/op_set.rs index e8380b8e..8f08b211 100644 --- a/automerge/src/op_set.rs +++ b/automerge/src/op_set.rs @@ -2,8 +2,9 @@ use crate::clock::Clock; use crate::exid::ExId; use crate::indexed_cache::IndexedCache; use crate::op_tree::{self, OpTree}; +use crate::parents::Parents; use crate::query::{self, OpIdSearch, TreeQuery}; -use crate::types::{self, ActorId, Key, ObjId, Op, OpId, OpIds, OpType}; +use crate::types::{self, ActorId, Key, ObjId, Op, OpId, OpIds, OpType, Prop}; use crate::{ObjType, OpObserver}; use fxhash::FxBuildHasher; use std::borrow::Borrow; @@ -68,12 +69,29 @@ impl OpSetInternal { } } + pub(crate) fn parents(&self, obj: ObjId) -> Parents<'_> { + Parents { obj, ops: self } + } + pub(crate) fn parent_object(&self, obj: &ObjId) -> Option<(ObjId, Key)> { let parent = self.trees.get(obj)?.parent?; let key = self.search(&parent, OpIdSearch::new(obj.0)).key().unwrap(); Some((parent, key)) } + pub(crate) fn export_key(&self, obj: ObjId, key: Key) -> Prop { + match key { + Key::Map(m) => Prop::Map(self.m.props.get(m).into()), + Key::Seq(opid) => { + let i = self + .search(&obj, query::ElemIdPos::new(opid)) + .index() + .unwrap(); + Prop::Seq(i) + } + } + } + pub(crate) fn keys(&self, obj: ObjId) -> Option> { if let Some(tree) = self.trees.get(&obj) { tree.internal.keys() @@ -245,6 +263,8 @@ impl OpSetInternal { } = q; let ex_obj = self.id_to_exid(obj.0); + let parents = self.parents(*obj); + let key = match op.key { Key::Map(index) => self.m.props[index].clone().into(), Key::Seq(_) => seen.into(), @@ -252,21 +272,21 @@ impl OpSetInternal { if op.insert { let value = (op.value(), self.id_to_exid(op.id)); - observer.insert(ex_obj, seen, value); + observer.insert(parents, ex_obj, seen, value); } else if op.is_delete() { if let Some(winner) = &values.last() { let value = (winner.value(), self.id_to_exid(winner.id)); let conflict = values.len() > 1; - observer.put(ex_obj, key, value, conflict); + observer.put(parents, ex_obj, key, value, conflict); } else { - observer.delete(ex_obj, key); + observer.delete(parents, ex_obj, key); } } else if let Some(value) = op.get_increment_value() { // only observe this increment if the counter is visible, i.e. the counter's // create op is in the values if values.iter().any(|value| op.pred.contains(&value.id)) { // we have observed the value - observer.increment(ex_obj, key, (value, self.id_to_exid(op.id))); + observer.increment(parents, ex_obj, key, (value, self.id_to_exid(op.id))); } } else { let winner = if let Some(last_value) = values.last() { @@ -280,10 +300,10 @@ impl OpSetInternal { }; let value = (winner.value(), self.id_to_exid(winner.id)); if op.is_list_op() && !had_value_before { - observer.insert(ex_obj, seen, value); + observer.insert(parents, ex_obj, seen, value); } else { let conflict = !values.is_empty(); - observer.put(ex_obj, key, value, conflict); + observer.put(parents, ex_obj, key, value, conflict); } } diff --git a/automerge/src/options.rs b/automerge/src/options.rs deleted file mode 100644 index e0fd991f..00000000 --- a/automerge/src/options.rs +++ /dev/null @@ -1,16 +0,0 @@ -#[derive(Debug, Default)] -pub struct ApplyOptions<'a, Obs> { - pub op_observer: Option<&'a mut Obs>, -} - -impl<'a, Obs> ApplyOptions<'a, Obs> { - pub fn with_op_observer(mut self, op_observer: &'a mut Obs) -> Self { - self.op_observer = Some(op_observer); - self - } - - pub fn set_op_observer(&mut self, op_observer: &'a mut Obs) -> &mut Self { - self.op_observer = Some(op_observer); - self - } -} diff --git a/automerge/src/parents.rs b/automerge/src/parents.rs index 76478b42..83e9b1c2 100644 --- a/automerge/src/parents.rs +++ b/automerge/src/parents.rs @@ -1,18 +1,33 @@ -use crate::{exid::ExId, types::ObjId, Automerge, Prop}; +use crate::op_set::OpSet; +use crate::types::ObjId; +use crate::{exid::ExId, Prop}; #[derive(Debug)] pub struct Parents<'a> { pub(crate) obj: ObjId, - pub(crate) doc: &'a Automerge, + pub(crate) ops: &'a OpSet, +} + +impl<'a> Parents<'a> { + pub fn path(&mut self) -> Vec<(ExId, Prop)> { + let mut path = self.collect::>(); + path.reverse(); + path + } } impl<'a> Iterator for Parents<'a> { type Item = (ExId, Prop); fn next(&mut self) -> Option { - if let Some((obj, key)) = self.doc.parent_object(self.obj) { + if self.obj.is_root() { + None + } else if let Some((obj, key)) = self.ops.parent_object(&self.obj) { self.obj = obj; - Some((self.doc.id_to_exid(obj.0), self.doc.export_key(obj, key))) + Some(( + self.ops.id_to_exid(self.obj.0), + self.ops.export_key(self.obj, key), + )) } else { None } diff --git a/automerge/src/sync.rs b/automerge/src/sync.rs index 8230b1c3..ae49cfc9 100644 --- a/automerge/src/sync.rs +++ b/automerge/src/sync.rs @@ -4,7 +4,7 @@ use std::collections::{HashMap, HashSet}; use crate::{ storage::{parse, Change as StoredChange, ReadChangeOpError}, - ApplyOptions, Automerge, AutomergeError, Change, ChangeHash, OpObserver, + Automerge, AutomergeError, Change, ChangeHash, OpObserver, }; mod bloom; @@ -105,14 +105,14 @@ impl Automerge { sync_state: &mut State, message: Message, ) -> Result<(), AutomergeError> { - self.receive_sync_message_with::<()>(sync_state, message, ApplyOptions::default()) + self.receive_sync_message_with::<()>(sync_state, message, None) } - pub fn receive_sync_message_with<'a, Obs: OpObserver>( + pub fn receive_sync_message_with( &mut self, sync_state: &mut State, message: Message, - options: ApplyOptions<'a, Obs>, + op_observer: Option<&mut Obs>, ) -> Result<(), AutomergeError> { let before_heads = self.get_heads(); @@ -125,7 +125,7 @@ impl Automerge { let changes_is_empty = message_changes.is_empty(); if !changes_is_empty { - self.apply_changes_with(message_changes, options)?; + self.apply_changes_with(message_changes, op_observer)?; sync_state.shared_heads = advance_heads( &before_heads.iter().collect(), &self.get_heads().into_iter().collect(), diff --git a/automerge/src/transaction.rs b/automerge/src/transaction.rs index 667503ae..f97fa7e5 100644 --- a/automerge/src/transaction.rs +++ b/automerge/src/transaction.rs @@ -11,4 +11,4 @@ pub use manual_transaction::Transaction; pub use result::Failure; pub use result::Success; -pub type Result = std::result::Result, Failure>; +pub type Result = std::result::Result, Failure>; diff --git a/automerge/src/transaction/commit.rs b/automerge/src/transaction/commit.rs index f9e6f3c2..d2873af3 100644 --- a/automerge/src/transaction/commit.rs +++ b/automerge/src/transaction/commit.rs @@ -1,12 +1,11 @@ /// Optional metadata for a commit. #[derive(Debug, Default)] -pub struct CommitOptions<'a, Obs> { +pub struct CommitOptions { pub message: Option, pub time: Option, - pub op_observer: Option<&'a mut Obs>, } -impl<'a, Obs> CommitOptions<'a, Obs> { +impl CommitOptions { /// Add a message to the commit. pub fn with_message>(mut self, message: S) -> Self { self.message = Some(message.into()); @@ -30,14 +29,4 @@ impl<'a, Obs> CommitOptions<'a, Obs> { self.time = Some(time); self } - - pub fn with_op_observer(mut self, op_observer: &'a mut Obs) -> Self { - self.op_observer = Some(op_observer); - self - } - - pub fn set_op_observer(&mut self, op_observer: &'a mut Obs) -> &mut Self { - self.op_observer = Some(op_observer); - self - } } diff --git a/automerge/src/transaction/inner.rs b/automerge/src/transaction/inner.rs index 2c75ec39..aff82a99 100644 --- a/automerge/src/transaction/inner.rs +++ b/automerge/src/transaction/inner.rs @@ -26,13 +26,12 @@ impl TransactionInner { /// Commit the operations performed in this transaction, returning the hashes corresponding to /// the new heads. - #[tracing::instrument(skip(self, doc, op_observer))] - pub(crate) fn commit( + #[tracing::instrument(skip(self, doc))] + pub(crate) fn commit( mut self, doc: &mut Automerge, message: Option, time: Option, - op_observer: Option<&mut Obs>, ) -> ChangeHash { if message.is_some() { self.message = message; @@ -42,26 +41,6 @@ impl TransactionInner { self.time = t; } - if let Some(observer) = op_observer { - for (obj, prop, op) in &self.operations { - let ex_obj = doc.ops.id_to_exid(obj.0); - if op.insert { - let value = (op.value(), doc.id_to_exid(op.id)); - match prop { - Prop::Map(_) => panic!("insert into a map"), - Prop::Seq(index) => observer.insert(ex_obj, *index, value), - } - } else if op.is_delete() { - observer.delete(ex_obj, prop.clone()); - } else if let Some(value) = op.get_increment_value() { - observer.increment(ex_obj, prop.clone(), (value, doc.id_to_exid(op.id))); - } else { - let value = (op.value(), doc.ops.id_to_exid(op.id)); - observer.put(ex_obj, prop.clone(), value, false); - } - } - } - let num_ops = self.pending_ops(); let change = self.export(&doc.ops.m); let hash = change.hash(); @@ -150,9 +129,10 @@ impl TransactionInner { /// - The object does not exist /// - The key is the wrong type for the object /// - The key does not exist in the object - pub(crate) fn put, V: Into>( + pub(crate) fn put, V: Into, Obs: OpObserver>( &mut self, doc: &mut Automerge, + op_observer: &mut Obs, ex_obj: &ExId, prop: P, value: V, @@ -160,7 +140,7 @@ impl TransactionInner { let obj = doc.exid_to_obj(ex_obj)?; let value = value.into(); let prop = prop.into(); - self.local_op(doc, obj, prop, value.into())?; + self.local_op(doc, op_observer, obj, prop, value.into())?; Ok(()) } @@ -177,16 +157,19 @@ impl TransactionInner { /// - The object does not exist /// - The key is the wrong type for the object /// - The key does not exist in the object - pub(crate) fn put_object>( + pub(crate) fn put_object, Obs: OpObserver>( &mut self, doc: &mut Automerge, + op_observer: &mut Obs, ex_obj: &ExId, prop: P, value: ObjType, ) -> Result { let obj = doc.exid_to_obj(ex_obj)?; let prop = prop.into(); - let id = self.local_op(doc, obj, prop, value.into())?.unwrap(); + let id = self + .local_op(doc, op_observer, obj, prop, value.into())? + .unwrap(); let id = doc.id_to_exid(id); Ok(id) } @@ -195,9 +178,11 @@ impl TransactionInner { OpId(self.start_op.get() + self.pending_ops() as u64, self.actor) } - fn insert_local_op( + #[allow(clippy::too_many_arguments)] + fn insert_local_op( &mut self, doc: &mut Automerge, + op_observer: &mut Obs, prop: Prop, op: Op, pos: usize, @@ -210,12 +195,13 @@ impl TransactionInner { doc.ops.insert(pos, &obj, op.clone()); } - self.operations.push((obj, prop, op)); + self.finalize_op(doc, op_observer, obj, prop, op); } - pub(crate) fn insert>( + pub(crate) fn insert, Obs: OpObserver>( &mut self, doc: &mut Automerge, + op_observer: &mut Obs, ex_obj: &ExId, index: usize, value: V, @@ -223,26 +209,28 @@ impl TransactionInner { let obj = doc.exid_to_obj(ex_obj)?; let value = value.into(); tracing::trace!(obj=?obj, value=?value, "inserting value"); - self.do_insert(doc, obj, index, value.into())?; + self.do_insert(doc, op_observer, obj, index, value.into())?; Ok(()) } - pub(crate) fn insert_object( + pub(crate) fn insert_object( &mut self, doc: &mut Automerge, + op_observer: &mut Obs, ex_obj: &ExId, index: usize, value: ObjType, ) -> Result { let obj = doc.exid_to_obj(ex_obj)?; - let id = self.do_insert(doc, obj, index, value.into())?; + let id = self.do_insert(doc, op_observer, obj, index, value.into())?; let id = doc.id_to_exid(id); Ok(id) } - fn do_insert( + fn do_insert( &mut self, doc: &mut Automerge, + op_observer: &mut Obs, obj: ObjId, index: usize, action: OpType, @@ -263,27 +251,30 @@ impl TransactionInner { }; doc.ops.insert(query.pos(), &obj, op.clone()); - self.operations.push((obj, Prop::Seq(index), op)); + + self.finalize_op(doc, op_observer, obj, Prop::Seq(index), op); Ok(id) } - pub(crate) fn local_op( + pub(crate) fn local_op( &mut self, doc: &mut Automerge, + op_observer: &mut Obs, obj: ObjId, prop: Prop, action: OpType, ) -> Result, AutomergeError> { match prop { - Prop::Map(s) => self.local_map_op(doc, obj, s, action), - Prop::Seq(n) => self.local_list_op(doc, obj, n, action), + Prop::Map(s) => self.local_map_op(doc, op_observer, obj, s, action), + Prop::Seq(n) => self.local_list_op(doc, op_observer, obj, n, action), } } - fn local_map_op( + fn local_map_op( &mut self, doc: &mut Automerge, + op_observer: &mut Obs, obj: ObjId, prop: String, action: OpType, @@ -324,14 +315,15 @@ impl TransactionInner { let pos = query.pos; let ops_pos = query.ops_pos; - self.insert_local_op(doc, Prop::Map(prop), op, pos, obj, &ops_pos); + self.insert_local_op(doc, op_observer, Prop::Map(prop), op, pos, obj, &ops_pos); Ok(Some(id)) } - fn local_list_op( + fn local_list_op( &mut self, doc: &mut Automerge, + op_observer: &mut Obs, obj: ObjId, index: usize, action: OpType, @@ -363,40 +355,43 @@ impl TransactionInner { let pos = query.pos; let ops_pos = query.ops_pos; - self.insert_local_op(doc, Prop::Seq(index), op, pos, obj, &ops_pos); + self.insert_local_op(doc, op_observer, Prop::Seq(index), op, pos, obj, &ops_pos); Ok(Some(id)) } - pub(crate) fn increment>( + pub(crate) fn increment, Obs: OpObserver>( &mut self, doc: &mut Automerge, + op_observer: &mut Obs, obj: &ExId, prop: P, value: i64, ) -> Result<(), AutomergeError> { let obj = doc.exid_to_obj(obj)?; - self.local_op(doc, obj, prop.into(), OpType::Increment(value))?; + self.local_op(doc, op_observer, obj, prop.into(), OpType::Increment(value))?; Ok(()) } - pub(crate) fn delete>( + pub(crate) fn delete, Obs: OpObserver>( &mut self, doc: &mut Automerge, + op_observer: &mut Obs, ex_obj: &ExId, prop: P, ) -> Result<(), AutomergeError> { let obj = doc.exid_to_obj(ex_obj)?; let prop = prop.into(); - self.local_op(doc, obj, prop, OpType::Delete)?; + self.local_op(doc, op_observer, obj, prop, OpType::Delete)?; Ok(()) } /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert /// the new elements - pub(crate) fn splice( + pub(crate) fn splice( &mut self, doc: &mut Automerge, + op_observer: &mut Obs, ex_obj: &ExId, mut pos: usize, del: usize, @@ -405,15 +400,48 @@ impl TransactionInner { let obj = doc.exid_to_obj(ex_obj)?; for _ in 0..del { // del() - self.local_op(doc, obj, pos.into(), OpType::Delete)?; + self.local_op(doc, op_observer, obj, pos.into(), OpType::Delete)?; } for v in vals { // insert() - self.do_insert(doc, obj, pos, v.clone().into())?; + self.do_insert(doc, op_observer, obj, pos, v.clone().into())?; pos += 1; } Ok(()) } + + fn finalize_op( + &mut self, + doc: &mut Automerge, + op_observer: &mut Obs, + obj: ObjId, + prop: Prop, + op: Op, + ) { + // TODO - id_to_exid should be a noop if not used - change type to Into? + let ex_obj = doc.ops.id_to_exid(obj.0); + let parents = doc.ops.parents(obj); + if op.insert { + let value = (op.value(), doc.ops.id_to_exid(op.id)); + match prop { + Prop::Map(_) => panic!("insert into a map"), + Prop::Seq(index) => op_observer.insert(parents, ex_obj, index, value), + } + } else if op.is_delete() { + op_observer.delete(parents, ex_obj, prop.clone()); + } else if let Some(value) = op.get_increment_value() { + op_observer.increment( + parents, + ex_obj, + prop.clone(), + (value, doc.ops.id_to_exid(op.id)), + ); + } else { + let value = (op.value(), doc.ops.id_to_exid(op.id)); + op_observer.put(parents, ex_obj, prop.clone(), value, false); + } + self.operations.push((obj, prop, op)); + } } #[cfg(test)] diff --git a/automerge/src/transaction/manual_transaction.rs b/automerge/src/transaction/manual_transaction.rs index 022bf7f3..695866ad 100644 --- a/automerge/src/transaction/manual_transaction.rs +++ b/automerge/src/transaction/manual_transaction.rs @@ -20,14 +20,15 @@ use super::{CommitOptions, Transactable, TransactionInner}; /// intermediate state. /// This is consistent with `?` error handling. #[derive(Debug)] -pub struct Transaction<'a> { +pub struct Transaction<'a, Obs: OpObserver> { // this is an option so that we can take it during commit and rollback to prevent it being // rolled back during drop. pub(crate) inner: Option, pub(crate) doc: &'a mut Automerge, + pub op_observer: Obs, } -impl<'a> Transaction<'a> { +impl<'a, Obs: OpObserver> Transaction<'a, Obs> { /// Get the heads of the document before this transaction was started. pub fn get_heads(&self) -> Vec { self.doc.get_heads() @@ -36,10 +37,7 @@ impl<'a> Transaction<'a> { /// Commit the operations performed in this transaction, returning the hashes corresponding to /// the new heads. pub fn commit(mut self) -> ChangeHash { - self.inner - .take() - .unwrap() - .commit::<()>(self.doc, None, None, None) + self.inner.take().unwrap().commit(self.doc, None, None) } /// Commit the operations in this transaction with some options. @@ -56,15 +54,13 @@ impl<'a> Transaction<'a> { /// tx.put_object(ROOT, "todos", ObjType::List).unwrap(); /// let now = SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap().as_secs() as /// i64; - /// tx.commit_with::<()>(CommitOptions::default().with_message("Create todos list").with_time(now)); + /// tx.commit_with(CommitOptions::default().with_message("Create todos list").with_time(now)); /// ``` - pub fn commit_with(mut self, options: CommitOptions<'_, Obs>) -> ChangeHash { - self.inner.take().unwrap().commit( - self.doc, - options.message, - options.time, - options.op_observer, - ) + pub fn commit_with(mut self, options: CommitOptions) -> ChangeHash { + self.inner + .take() + .unwrap() + .commit(self.doc, options.message, options.time) } /// Undo the operations added in this transaction, returning the number of cancelled @@ -74,7 +70,7 @@ impl<'a> Transaction<'a> { } } -impl<'a> Transactable for Transaction<'a> { +impl<'a, Obs: OpObserver> Transactable for Transaction<'a, Obs> { /// Get the number of pending operations in this transaction. fn pending_ops(&self) -> usize { self.inner.as_ref().unwrap().pending_ops() @@ -97,7 +93,7 @@ impl<'a> Transactable for Transaction<'a> { self.inner .as_mut() .unwrap() - .put(self.doc, obj.as_ref(), prop, value) + .put(self.doc, &mut self.op_observer, obj.as_ref(), prop, value) } fn put_object, P: Into>( @@ -106,10 +102,13 @@ impl<'a> Transactable for Transaction<'a> { prop: P, value: ObjType, ) -> Result { - self.inner - .as_mut() - .unwrap() - .put_object(self.doc, obj.as_ref(), prop, value) + self.inner.as_mut().unwrap().put_object( + self.doc, + &mut self.op_observer, + obj.as_ref(), + prop, + value, + ) } fn insert, V: Into>( @@ -118,10 +117,13 @@ impl<'a> Transactable for Transaction<'a> { index: usize, value: V, ) -> Result<(), AutomergeError> { - self.inner - .as_mut() - .unwrap() - .insert(self.doc, obj.as_ref(), index, value) + self.inner.as_mut().unwrap().insert( + self.doc, + &mut self.op_observer, + obj.as_ref(), + index, + value, + ) } fn insert_object>( @@ -130,10 +132,13 @@ impl<'a> Transactable for Transaction<'a> { index: usize, value: ObjType, ) -> Result { - self.inner - .as_mut() - .unwrap() - .insert_object(self.doc, obj.as_ref(), index, value) + self.inner.as_mut().unwrap().insert_object( + self.doc, + &mut self.op_observer, + obj.as_ref(), + index, + value, + ) } fn increment, P: Into>( @@ -142,10 +147,13 @@ impl<'a> Transactable for Transaction<'a> { prop: P, value: i64, ) -> Result<(), AutomergeError> { - self.inner - .as_mut() - .unwrap() - .increment(self.doc, obj.as_ref(), prop, value) + self.inner.as_mut().unwrap().increment( + self.doc, + &mut self.op_observer, + obj.as_ref(), + prop, + value, + ) } fn delete, P: Into>( @@ -156,7 +164,7 @@ impl<'a> Transactable for Transaction<'a> { self.inner .as_mut() .unwrap() - .delete(self.doc, obj.as_ref(), prop) + .delete(self.doc, &mut self.op_observer, obj.as_ref(), prop) } /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert @@ -168,10 +176,14 @@ impl<'a> Transactable for Transaction<'a> { del: usize, vals: V, ) -> Result<(), AutomergeError> { - self.inner - .as_mut() - .unwrap() - .splice(self.doc, obj.as_ref(), pos, del, vals) + self.inner.as_mut().unwrap().splice( + self.doc, + &mut self.op_observer, + obj.as_ref(), + pos, + del, + vals, + ) } fn keys>(&self, obj: O) -> Keys<'_, '_> { @@ -291,7 +303,7 @@ impl<'a> Transactable for Transaction<'a> { // intermediate state. // This defaults to rolling back the transaction to be compatible with `?` error returning before // reaching a call to `commit`. -impl<'a> Drop for Transaction<'a> { +impl<'a, Obs: OpObserver> Drop for Transaction<'a, Obs> { fn drop(&mut self) { if let Some(txn) = self.inner.take() { txn.rollback(self.doc); diff --git a/automerge/src/transaction/result.rs b/automerge/src/transaction/result.rs index 345c9f2c..8943b7a2 100644 --- a/automerge/src/transaction/result.rs +++ b/automerge/src/transaction/result.rs @@ -2,11 +2,12 @@ use crate::ChangeHash; /// The result of a successful, and committed, transaction. #[derive(Debug)] -pub struct Success { +pub struct Success { /// The result of the transaction. pub result: O, /// The hash of the change, also the head of the document. pub hash: ChangeHash, + pub op_observer: Obs, } /// The result of a failed, and rolled back, transaction. diff --git a/automerge/tests/test.rs b/automerge/tests/test.rs index 938f4343..eb172213 100644 --- a/automerge/tests/test.rs +++ b/automerge/tests/test.rs @@ -1,7 +1,7 @@ use automerge::transaction::Transactable; use automerge::{ - ActorId, ApplyOptions, AutoCommit, Automerge, AutomergeError, Change, ExpandedChange, ObjType, - ScalarValue, VecOpObserver, ROOT, + ActorId, AutoCommit, Automerge, AutomergeError, Change, ExpandedChange, ObjType, ScalarValue, + VecOpObserver, ROOT, }; // set up logging for all the tests @@ -1005,13 +1005,8 @@ fn observe_counter_change_application() { doc.increment(ROOT, "counter", 5).unwrap(); let changes = doc.get_changes(&[]).unwrap().into_iter().cloned(); - let mut doc = AutoCommit::new(); - let mut observer = VecOpObserver::default(); - doc.apply_changes_with( - changes, - ApplyOptions::default().with_op_observer(&mut observer), - ) - .unwrap(); + let mut doc = AutoCommit::new().with_observer(VecOpObserver::default()); + doc.apply_changes(changes).unwrap(); } #[test] From 238d05a0e373e30314762d6953882b9601ae5bfc Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Sun, 25 Sep 2022 10:14:01 -0500 Subject: [PATCH 587/730] move automerge-js onto the applyPatches model --- automerge-js/package.json | 1 + automerge-js/src/constants.ts | 3 +- automerge-js/src/index.ts | 234 +++++---- automerge-js/src/proxies.ts | 140 ++++-- automerge-js/src/text.ts | 127 +++-- automerge-js/test/basic_test.ts | 49 ++ automerge-js/test/legacy_tests.ts | 93 ++-- automerge-js/test/sync_test.ts | 2 +- automerge-js/test/text_test.ts | 8 +- automerge-wasm/Cargo.toml | 1 + automerge-wasm/src/interop.rs | 583 +++++++++++++--------- automerge-wasm/src/lib.rs | 179 ++++--- automerge-wasm/src/observer.rs | 57 ++- automerge-wasm/src/value.rs | 173 +++++-- automerge-wasm/test/apply.ts | 106 +++- automerge-wasm/test/test.ts | 8 +- automerge/src/autocommit.rs | 3 +- automerge/src/op_set.rs | 9 +- automerge/src/query/seek_op_with_patch.rs | 9 +- 19 files changed, 1180 insertions(+), 605 deletions(-) diff --git a/automerge-js/package.json b/automerge-js/package.json index 02b9359e..c3bc00c5 100644 --- a/automerge-js/package.json +++ b/automerge-js/package.json @@ -53,6 +53,7 @@ "mocha": "^10.0.0", "pako": "^2.0.4", "ts-mocha": "^10.0.0", + "ts-node": "^10.9.1", "typescript": "^4.6.4" }, "dependencies": { diff --git a/automerge-js/src/constants.ts b/automerge-js/src/constants.ts index e37835d1..d9f78af2 100644 --- a/automerge-js/src/constants.ts +++ b/automerge-js/src/constants.ts @@ -1,7 +1,8 @@ // Properties of the document root object //const OPTIONS = Symbol('_options') // object containing options passed to init() //const CACHE = Symbol('_cache') // map from objectId to immutable object -export const STATE = Symbol.for('_am_state') // object containing metadata about current state (e.g. sequence numbers) +//export const STATE = Symbol.for('_am_state') // object containing metadata about current state (e.g. sequence numbers) +export const STATE = Symbol.for('_am_meta') // object containing metadata about current state (e.g. sequence numbers) export const HEADS = Symbol.for('_am_heads') // object containing metadata about current state (e.g. sequence numbers) export const TRACE = Symbol.for('_am_trace') // object containing metadata about current state (e.g. sequence numbers) export const OBJECT_ID = Symbol.for('_am_objectId') // object containing metadata about current state (e.g. sequence numbers) diff --git a/automerge-js/src/index.ts b/automerge-js/src/index.ts index 95c57452..635c328a 100644 --- a/automerge-js/src/index.ts +++ b/automerge-js/src/index.ts @@ -4,7 +4,7 @@ export { uuid } from './uuid' import { rootProxy, listProxy, textProxy, mapProxy } from "./proxies" import { STATE, HEADS, TRACE, OBJECT_ID, READ_ONLY, FROZEN } from "./constants" -import { AutomergeValue, Counter } from "./types" +import { AutomergeValue, Text, Counter } from "./types" export { AutomergeValue, Text, Counter, Int, Uint, Float64 } from "./types" import { type API } from "@automerge/automerge-wasm"; @@ -13,7 +13,8 @@ import { ApiHandler, UseApi } from "./low_level" import { Actor as ActorId, Prop, ObjID, Change, DecodedChange, Heads, Automerge, MaterializeValue } from "@automerge/automerge-wasm" import { JsSyncState as SyncState, SyncMessage, DecodedSyncMessage } from "@automerge/automerge-wasm" -export type ChangeOptions = { message?: string, time?: number } +export type ChangeOptions = { message?: string, time?: number, patchCallback?: Function } +export type ApplyOptions = { patchCallback?: Function } export type Doc = { readonly [P in keyof T]: T[P] } @@ -31,13 +32,27 @@ export function use(api: API) { import * as wasm from "@automerge/automerge-wasm" use(wasm) -export function getBackend(doc: Doc) : Automerge { - return _state(doc) +export type InitOptions = { + actor?: ActorId, + freeze?: boolean, + patchCallback?: Function, +}; + + +interface InternalState { + handle: Automerge, + heads: Heads | undefined, + freeze: boolean, + patchCallback: Function | undefined, } -function _state(doc: Doc) : Automerge { +export function getBackend(doc: Doc) : Automerge { + return _state(doc).handle +} + +function _state(doc: Doc, checkroot = true) : InternalState { const state = Reflect.get(doc,STATE) - if (state == undefined) { + if (state === undefined || (checkroot && _obj(doc) !== "_root")) { throw new RangeError("must be the document root") } return state @@ -47,17 +62,12 @@ function _frozen(doc: Doc) : boolean { return Reflect.get(doc,FROZEN) === true } -function _heads(doc: Doc) : Heads | undefined { - return Reflect.get(doc,HEADS) -} - function _trace(doc: Doc) : string | undefined { return Reflect.get(doc,TRACE) } function _set_heads(doc: Doc, heads: Heads) { - Reflect.set(doc,HEADS,heads) - Reflect.set(doc,TRACE,(new Error()).stack) + _state(doc).heads = heads } function _clear_heads(doc: Doc) { @@ -66,28 +76,55 @@ function _clear_heads(doc: Doc) { } function _obj(doc: Doc) : ObjID { - return Reflect.get(doc,OBJECT_ID) + let proxy_objid = Reflect.get(doc,OBJECT_ID) + if (proxy_objid) { + return proxy_objid + } + if (Reflect.get(doc,STATE)) { + return "_root" + } + throw new RangeError("invalid document passed to _obj()") } function _readonly(doc: Doc) : boolean { - return Reflect.get(doc,READ_ONLY) === true + return Reflect.get(doc,READ_ONLY) !== false } -export function init(actor?: ActorId) : Doc{ - if (typeof actor !== "string") { - actor = undefined +function importOpts(_actor?: ActorId | InitOptions) : InitOptions { + if (typeof _actor === 'object') { + return _actor + } else { + return { actor: _actor } } - const state = ApiHandler.create(actor) - return rootProxy(state, true); +} + +export function init(_opts?: ActorId | InitOptions) : Doc{ + let opts = importOpts(_opts) + let freeze = !!opts.freeze + let patchCallback = opts.patchCallback + const handle = ApiHandler.create(opts.actor) + handle.enablePatches(true) + //@ts-ignore + handle.registerDatatype("counter", (n) => new Counter(n)) + //@ts-ignore + handle.registerDatatype("text", (n) => new Text(n)) + //@ts-ignore + const doc = handle.materialize("/", undefined, { handle, heads: undefined, freeze, patchCallback }) + //@ts-ignore + return doc } export function clone(doc: Doc) : Doc { - const state = _state(doc).clone() - return rootProxy(state, true); + const state = _state(doc) + const handle = state.heads ? state.handle.forkAt(state.heads) : state.handle.fork() + //@ts-ignore + const clonedDoc : any = handle.materialize("/", undefined, { ... state, handle }) + + return clonedDoc } export function free(doc: Doc) { - return _state(doc).free() + return _state(doc).handle.free() } export function from>(initialState: T | Doc, actor?: ActorId): Doc { @@ -107,6 +144,16 @@ export function change(doc: Doc, options: string | ChangeOptions | ChangeF } } +function progressDocument(doc: Doc, heads: Heads, callback?: Function): Doc { + let state = _state(doc) + let nextState = { ... state, heads: undefined }; + // @ts-ignore + let nextDoc = state.handle.applyPatches(doc, nextState, callback) + state.heads = heads + if (nextState.freeze) { Object.freeze(nextDoc) } + return nextDoc +} + function _change(doc: Doc, options: ChangeOptions, callback: ChangeFn): Doc { @@ -114,38 +161,33 @@ function _change(doc: Doc, options: ChangeOptions, callback: ChangeFn): throw new RangeError("invalid change function"); } - if (doc === undefined || _state(doc) === undefined || _obj(doc) !== "_root") { + const state = _state(doc) + + if (doc === undefined || state === undefined) { throw new RangeError("must be the document root"); } - if (_frozen(doc) === true) { + if (state.heads) { throw new RangeError("Attempting to use an outdated Automerge document") } - if (!!_heads(doc) === true) { - throw new RangeError("Attempting to change an out of date document - set at: " + _trace(doc)); - } if (_readonly(doc) === false) { throw new RangeError("Calls to Automerge.change cannot be nested") } - const state = _state(doc) - const heads = state.getHeads() + const heads = state.handle.getHeads() try { - _set_heads(doc,heads) - Reflect.set(doc,FROZEN,true) - const root : T = rootProxy(state); + state.heads = heads + const root : T = rootProxy(state.handle); callback(root) - if (state.pendingOps() === 0) { - Reflect.set(doc,FROZEN,false) - _clear_heads(doc) + if (state.handle.pendingOps() === 0) { + state.heads = undefined return doc } else { - state.commit(options.message, options.time) - return rootProxy(state, true); + state.handle.commit(options.message, options.time) + return progressDocument(doc, heads, options.patchCallback || state.patchCallback); } } catch (e) { //console.log("ERROR: ",e) - Reflect.set(doc,FROZEN,false) - _clear_heads(doc) - state.rollback() + state.heads = undefined + state.handle.rollback() throw e } } @@ -158,47 +200,55 @@ export function emptyChange(doc: Doc, options: ChangeOptions) { options = { message: options } } - if (doc === undefined || _state(doc) === undefined || _obj(doc) !== "_root") { - throw new RangeError("must be the document root"); - } - if (_frozen(doc) === true) { + const state = _state(doc) + + if (state.heads) { throw new RangeError("Attempting to use an outdated Automerge document") } if (_readonly(doc) === false) { throw new RangeError("Calls to Automerge.change cannot be nested") } - const state = _state(doc) - state.commit(options.message, options.time) - return rootProxy(state, true); + const heads = state.handle.getHeads() + state.handle.commit(options.message, options.time) + return progressDocument(doc, heads) } -export function load(data: Uint8Array, actor?: ActorId) : Doc { - const state = ApiHandler.load(data, actor) - return rootProxy(state, true); +export function load(data: Uint8Array, _opts?: ActorId | InitOptions) : Doc { + const opts = importOpts(_opts) + const actor = opts.actor + const patchCallback = opts.patchCallback + const handle = ApiHandler.load(data, actor) + handle.enablePatches(true) + //@ts-ignore + handle.registerDatatype("counter", (n) => new Counter(n)) + //@ts-ignore + handle.registerDatatype("text", (n) => new Text(n)) + //@ts-ignore + const doc : any = handle.materialize("/", undefined, { handle, heads: undefined, patchCallback }) + return doc } export function save(doc: Doc) : Uint8Array { - const state = _state(doc) - return state.save() + return _state(doc).handle.save() } export function merge(local: Doc, remote: Doc) : Doc { - if (!!_heads(local) === true) { + const localState = _state(local) + + if (localState.heads) { throw new RangeError("Attempting to change an out of date document - set at: " + _trace(local)); } - const localState = _state(local) - const heads = localState.getHeads() + const heads = localState.handle.getHeads() const remoteState = _state(remote) - const changes = localState.getChangesAdded(remoteState) - localState.applyChanges(changes) - _set_heads(local,heads) - return rootProxy(localState, true) + const changes = localState.handle.getChangesAdded(remoteState.handle) + localState.handle.applyChanges(changes) + return progressDocument(local, heads, localState.patchCallback) } export function getActorId(doc: Doc) : ActorId { const state = _state(doc) - return state.getActorId() + return state.handle.getActorId() } type Conflicts = { [key: string]: AutomergeValue } @@ -245,14 +295,14 @@ function conflictAt(context : Automerge, objectId: ObjID, prop: Prop) : Conflict } export function getConflicts(doc: Doc, prop: Prop) : Conflicts | undefined { - const state = _state(doc) + const state = _state(doc, false) const objectId = _obj(doc) - return conflictAt(state, objectId, prop) + return conflictAt(state.handle, objectId, prop) } export function getLastLocalChange(doc: Doc) : Change | undefined { const state = _state(doc) - return state.getLastLocalChange() || undefined + return state.handle.getLastLocalChange() || undefined } export function getObjectId(doc: Doc) : ObjID { @@ -262,30 +312,27 @@ export function getObjectId(doc: Doc) : ObjID { export function getChanges(oldState: Doc, newState: Doc) : Change[] { const o = _state(oldState) const n = _state(newState) - const heads = _heads(oldState) - return n.getChanges(heads || o.getHeads()) + return n.handle.getChanges(getHeads(oldState)) } export function getAllChanges(doc: Doc) : Change[] { const state = _state(doc) - return state.getChanges([]) + return state.handle.getChanges([]) } -export function applyChanges(doc: Doc, changes: Change[]) : [Doc] { - if (doc === undefined || _obj(doc) !== "_root") { - throw new RangeError("must be the document root"); - } - if (_frozen(doc) === true) { +export function applyChanges(doc: Doc, changes: Change[], opts?: ApplyOptions) : [Doc] { + const state = _state(doc) + if (!opts) { opts = {} } + if (state.heads) { throw new RangeError("Attempting to use an outdated Automerge document") } if (_readonly(doc) === false) { throw new RangeError("Calls to Automerge.change cannot be nested") } - const state = _state(doc) - const heads = state.getHeads() - state.applyChanges(changes) - _set_heads(doc,heads) - return [rootProxy(state, true)]; + const heads = state.handle.getHeads(); + state.handle.applyChanges(changes) + state.heads = heads; + return [progressDocument(doc, heads, opts.patchCallback || state.patchCallback )] } export function getHistory(doc: Doc) : State[] { @@ -303,6 +350,7 @@ export function getHistory(doc: Doc) : State[] { } // FIXME : no tests +// FIXME can we just use deep equals now? export function equals(val1: unknown, val2: unknown) : boolean { if (!isObject(val1) || !isObject(val2)) return val1 === val2 const keys1 = Object.keys(val1).sort(), keys2 = Object.keys(val2).sort() @@ -325,31 +373,25 @@ export function decodeSyncState(state: Uint8Array) : SyncState { export function generateSyncMessage(doc: Doc, inState: SyncState) : [ SyncState, SyncMessage | null ] { const state = _state(doc) const syncState = ApiHandler.importSyncState(inState) - const message = state.generateSyncMessage(syncState) + const message = state.handle.generateSyncMessage(syncState) const outState = ApiHandler.exportSyncState(syncState) return [ outState, message ] } -export function receiveSyncMessage(doc: Doc, inState: SyncState, message: SyncMessage) : [ Doc, SyncState, null ] { +export function receiveSyncMessage(doc: Doc, inState: SyncState, message: SyncMessage, opts?: ApplyOptions) : [ Doc, SyncState, null ] { const syncState = ApiHandler.importSyncState(inState) - if (doc === undefined || _obj(doc) !== "_root") { - throw new RangeError("must be the document root"); - } - if (_frozen(doc) === true) { - throw new RangeError("Attempting to use an outdated Automerge document") - } - if (!!_heads(doc) === true) { + if (!opts) { opts = {} } + const state = _state(doc) + if (state.heads) { throw new RangeError("Attempting to change an out of date document - set at: " + _trace(doc)); } if (_readonly(doc) === false) { throw new RangeError("Calls to Automerge.change cannot be nested") } - const state = _state(doc) - const heads = state.getHeads() - state.receiveSyncMessage(syncState, message) - _set_heads(doc,heads) - const outState = ApiHandler.exportSyncState(syncState) - return [rootProxy(state, true), outState, null]; + const heads = state.handle.getHeads() + state.handle.receiveSyncMessage(syncState, message) + const outSyncState = ApiHandler.exportSyncState(syncState) + return [progressDocument(doc, heads, opts.patchCallback || state.patchCallback), outSyncState, null]; } export function initSyncState() : SyncState { @@ -374,24 +416,24 @@ export function decodeSyncMessage(message: SyncMessage) : DecodedSyncMessage { export function getMissingDeps(doc: Doc, heads: Heads) : Heads { const state = _state(doc) - return state.getMissingDeps(heads) + return state.handle.getMissingDeps(heads) } export function getHeads(doc: Doc) : Heads { const state = _state(doc) - return _heads(doc) || state.getHeads() + return state.heads || state.handle.getHeads() } export function dump(doc: Doc) { const state = _state(doc) - state.dump() + state.handle.dump() } // FIXME - return T? export function toJS(doc: Doc) : MaterializeValue { const state = _state(doc) - const heads = _heads(doc) - return state.materialize("_root", heads) + // @ts-ignore + return state.handle.materialize("_root", state.heads, state) } diff --git a/automerge-js/src/proxies.ts b/automerge-js/src/proxies.ts index a03c97cc..cfbe4540 100644 --- a/automerge-js/src/proxies.ts +++ b/automerge-js/src/proxies.ts @@ -218,18 +218,6 @@ const ListHandler = { if (index === TRACE) return target.trace if (index === STATE) return context; if (index === 'length') return context.length(objectId, heads); - if (index === Symbol.iterator) { - let i = 0; - return function *() { - // FIXME - ugly - let value = valueAt(target, i) - while (value !== undefined) { - yield value - i += 1 - value = valueAt(target, i) - } - } - } if (typeof index === 'number') { return valueAt(target, index) } else { @@ -368,17 +356,6 @@ const TextHandler = Object.assign({}, ListHandler, { if (index === TRACE) return target.trace if (index === STATE) return context; if (index === 'length') return context.length(objectId, heads); - if (index === Symbol.iterator) { - let i = 0; - return function *() { - let value = valueAt(target, i) - while (value !== undefined) { - yield value - i += 1 - value = valueAt(target, i) - } - } - } if (typeof index === 'number') { return valueAt(target, index) } else { @@ -424,11 +401,11 @@ function listMethods(target) { }, fill(val: ScalarValue, start: number, end: number) { - // FIXME needs tests const [value, datatype] = import_value(val) + const length = context.length(objectId) start = parseListIndex(start || 0) - end = parseListIndex(end || context.length(objectId)) - for (let i = start; i < end; i++) { + end = parseListIndex(end || length) + for (let i = start; i < Math.min(end, length); i++) { context.put(objectId, i, value, datatype) } return this @@ -572,15 +549,9 @@ function listMethods(target) { } } return iterator - } - } + }, - // Read-only methods that can delegate to the JavaScript built-in implementations - // FIXME - super slow - for (const method of ['concat', 'every', 'filter', 'find', 'findIndex', 'forEach', 'includes', - 'join', 'lastIndexOf', 'map', 'reduce', 'reduceRight', - 'slice', 'some', 'toLocaleString', 'toString']) { - methods[method] = (...args) => { + toArray() : AutomergeValue[] { const list : AutomergeValue = [] let value do { @@ -590,10 +561,107 @@ function listMethods(target) { } } while (value !== undefined) - return list[method](...args) + return list + }, + + map(f: (AutomergeValue, number) => T) : T[] { + return this.toArray().map(f) + }, + + toString() : string { + return this.toArray().toString() + }, + + toLocaleString() : string { + return this.toArray().toLocaleString() + }, + + forEach(f: (AutomergeValue, number) => undefined ) { + return this.toArray().forEach(f) + }, + + // todo: real concat function is different + concat(other: AutomergeValue[]) : AutomergeValue[] { + return this.toArray().concat(other) + }, + + every(f: (AutomergeValue, number) => boolean) : boolean { + return this.toArray().every(f) + }, + + filter(f: (AutomergeValue, number) => boolean) : AutomergeValue[] { + return this.toArray().filter(f) + }, + + find(f: (AutomergeValue, number) => boolean) : AutomergeValue | undefined { + let index = 0 + for (let v of this) { + if (f(v, index)) { + return v + } + index += 1 + } + }, + + findIndex(f: (AutomergeValue, number) => boolean) : number { + let index = 0 + for (let v of this) { + if (f(v, index)) { + return index + } + index += 1 + } + return -1 + }, + + includes(elem: AutomergeValue) : boolean { + return this.find((e) => e === elem) !== undefined + }, + + join(sep?: string) : string { + return this.toArray().join(sep) + }, + + // todo: remove the any + reduce(f: (any, AutomergeValue) => T, initalValue?: T) : T | undefined { + return this.toArray().reduce(f,initalValue) + }, + + // todo: remove the any + reduceRight(f: (any, AutomergeValue) => T, initalValue?: T) : T | undefined{ + return this.toArray().reduceRight(f,initalValue) + }, + + lastIndexOf(search: AutomergeValue, fromIndex = +Infinity) : number { + // this can be faster + return this.toArray().lastIndexOf(search,fromIndex) + }, + + slice(index?: number, num?: number) : AutomergeValue[] { + return this.toArray().slice(index,num) + }, + + some(f: (AutomergeValue, number) => boolean) : boolean { + let index = 0; + for (let v of this) { + if (f(v,index)) { + return true + } + index += 1 + } + return false + }, + + [Symbol.iterator]: function *() { + let i = 0; + let value = valueAt(target, i) + while (value !== undefined) { + yield value + i += 1 + value = valueAt(target, i) + } } } - return methods } diff --git a/automerge-js/src/text.ts b/automerge-js/src/text.ts index 9566d5eb..a6c51940 100644 --- a/automerge-js/src/text.ts +++ b/automerge-js/src/text.ts @@ -1,11 +1,12 @@ import { Value } from "@automerge/automerge-wasm" -import { TEXT } from "./constants" +import { TEXT, STATE } from "./constants" export class Text { elems: Value[] + str: string | undefined + spans: Value[] | undefined - constructor (text?: string | string[]) { - //const instance = Object.create(Text.prototype) + constructor (text?: string | string[] | Value[]) { if (typeof text === 'string') { this.elems = [...text] } else if (Array.isArray(text)) { @@ -50,14 +51,17 @@ export class Text { * non-character elements. */ toString() : string { - // Concatting to a string is faster than creating an array and then - // .join()ing for small (<100KB) arrays. - // https://jsperf.com/join-vs-loop-w-type-test - let str = '' - for (const elem of this.elems) { - if (typeof elem === 'string') str += elem + if (!this.str) { + // Concatting to a string is faster than creating an array and then + // .join()ing for small (<100KB) arrays. + // https://jsperf.com/join-vs-loop-w-type-test + this.str = '' + for (const elem of this.elems) { + if (typeof elem === 'string') this.str += elem + else this.str += '\uFFFC' + } } - return str + return this.str } /** @@ -68,23 +72,25 @@ export class Text { * => ['ab', {x: 3}, 'cd'] */ toSpans() : Value[] { - const spans : Value[] = [] - let chars = '' - for (const elem of this.elems) { - if (typeof elem === 'string') { - chars += elem - } else { - if (chars.length > 0) { - spans.push(chars) - chars = '' + if (!this.spans) { + this.spans = [] + let chars = '' + for (const elem of this.elems) { + if (typeof elem === 'string') { + chars += elem + } else { + if (chars.length > 0) { + this.spans.push(chars) + chars = '' + } + this.spans.push(elem) } - spans.push(elem) + } + if (chars.length > 0) { + this.spans.push(chars) } } - if (chars.length > 0) { - spans.push(chars) - } - return spans + return this.spans } /** @@ -99,6 +105,9 @@ export class Text { * Updates the list item at position `index` to a new value `value`. */ set (index: number, value: Value) { + if (this[STATE]) { + throw new RangeError("object cannot be modified outside of a change block") + } this.elems[index] = value } @@ -106,6 +115,9 @@ export class Text { * Inserts new list items `values` starting at position `index`. */ insertAt(index: number, ...values: Value[]) { + if (this[STATE]) { + throw new RangeError("object cannot be modified outside of a change block") + } this.elems.splice(index, 0, ... values) } @@ -114,6 +126,9 @@ export class Text { * if `numDelete` is not given, one item is deleted. */ deleteAt(index: number, numDelete = 1) { + if (this[STATE]) { + throw new RangeError("object cannot be modified outside of a change block") + } this.elems.splice(index, numDelete) } @@ -121,16 +136,64 @@ export class Text { this.elems.map(callback) } + lastIndexOf(searchElement: Value, fromIndex?: number) { + this.elems.lastIndexOf(searchElement, fromIndex) + } -} + concat(other: Text) : Text { + return new Text(this.elems.concat(other.elems)) + } -// Read-only methods that can delegate to the JavaScript built-in array -for (const method of ['concat', 'every', 'filter', 'find', 'findIndex', 'forEach', 'includes', - 'indexOf', 'join', 'lastIndexOf', 'reduce', 'reduceRight', - 'slice', 'some', 'toLocaleString']) { - Text.prototype[method] = function (...args) { - const array = [...this] - return array[method](...args) + every(test: (Value) => boolean) : boolean { + return this.elems.every(test) + } + + filter(test: (Value) => boolean) : Text { + return new Text(this.elems.filter(test)) + } + + find(test: (Value) => boolean) : Value | undefined { + return this.elems.find(test) + } + + findIndex(test: (Value) => boolean) : number | undefined { + return this.elems.findIndex(test) + } + + forEach(f: (Value) => undefined) { + this.elems.forEach(f) + } + + includes(elem: Value) : boolean { + return this.elems.includes(elem) + } + + indexOf(elem: Value) { + return this.elems.indexOf(elem) + } + + join(sep?: string) : string{ + return this.elems.join(sep) + } + + reduce(f: (previousValue: Value, currentValue: Value, currentIndex: number, array: Value[]) => Value) { + this.elems.reduce(f) + } + + reduceRight(f: (previousValue: Value, currentValue: Value, currentIndex: number, array: Value[]) => Value) { + this.elems.reduceRight(f) + } + + slice(start?: number, end?: number) { + new Text(this.elems.slice(start,end)) + } + + some(test: (Value) => boolean) : boolean { + return this.elems.some(test) + } + + toLocaleString() { + this.toString() } } diff --git a/automerge-js/test/basic_test.ts b/automerge-js/test/basic_test.ts index fdc8797b..2936a0e2 100644 --- a/automerge-js/test/basic_test.ts +++ b/automerge-js/test/basic_test.ts @@ -170,6 +170,55 @@ describe('Automerge', () => { console.log(doc.text.indexOf("world")) }) }) + + describe('proxy lists', () => { + it('behave like arrays', () => { + let doc = Automerge.from({ + chars: ["a","b","c"], + numbers: [20,3,100], + repeats: [20,20,3,3,3,3,100,100] + }) + let r1 = [] + doc = Automerge.change(doc, (d) => { + assert.deepEqual(d.chars.concat([1,2]), ["a","b","c",1,2]) + assert.deepEqual(d.chars.map((n) => n + "!"), ["a!", "b!", "c!"]) + assert.deepEqual(d.numbers.map((n) => n + 10), [30, 13, 110]) + assert.deepEqual(d.numbers.toString(), "20,3,100") + assert.deepEqual(d.numbers.toLocaleString(), "20,3,100") + assert.deepEqual(d.numbers.forEach((n) => r1.push(n)), undefined) + assert.deepEqual(d.numbers.every((n) => n > 1), true) + assert.deepEqual(d.numbers.every((n) => n > 10), false) + assert.deepEqual(d.numbers.filter((n) => n > 10), [20,100]) + assert.deepEqual(d.repeats.find((n) => n < 10), 3) + assert.deepEqual(d.repeats.toArray().find((n) => n < 10), 3) + assert.deepEqual(d.repeats.find((n) => n < 0), undefined) + assert.deepEqual(d.repeats.findIndex((n) => n < 10), 2) + assert.deepEqual(d.repeats.findIndex((n) => n < 0), -1) + assert.deepEqual(d.repeats.toArray().findIndex((n) => n < 10), 2) + assert.deepEqual(d.repeats.toArray().findIndex((n) => n < 0), -1) + assert.deepEqual(d.numbers.includes(3), true) + assert.deepEqual(d.numbers.includes(-3), false) + assert.deepEqual(d.numbers.join("|"), "20|3|100") + assert.deepEqual(d.numbers.join(), "20,3,100") + assert.deepEqual(d.numbers.some((f) => f === 3), true) + assert.deepEqual(d.numbers.some((f) => f < 0), false) + assert.deepEqual(d.numbers.reduce((sum,n) => sum + n, 100), 223) + assert.deepEqual(d.repeats.reduce((sum,n) => sum + n, 100), 352) + assert.deepEqual(d.chars.reduce((sum,n) => sum + n, "="), "=abc") + assert.deepEqual(d.chars.reduceRight((sum,n) => sum + n, "="), "=cba") + assert.deepEqual(d.numbers.reduceRight((sum,n) => sum + n, 100), 223) + assert.deepEqual(d.repeats.lastIndexOf(3), 5) + assert.deepEqual(d.repeats.lastIndexOf(3,3), 3) + }) + doc = Automerge.change(doc, (d) => { + assert.deepEqual(d.numbers.fill(-1,1,2), [20,-1,100]) + assert.deepEqual(d.chars.fill("z",1,100), ["a","z","z"]) + }) + assert.deepEqual(r1, [20,3,100]) + assert.deepEqual(doc.numbers, [20,-1,100]) + assert.deepEqual(doc.chars, ["a","z","z"]) + }) + }) it('should obtain the same conflicts, regardless of merge order', () => { let s1 = Automerge.init() diff --git a/automerge-js/test/legacy_tests.ts b/automerge-js/test/legacy_tests.ts index 4b53ff98..ea814016 100644 --- a/automerge-js/test/legacy_tests.ts +++ b/automerge-js/test/legacy_tests.ts @@ -280,47 +280,34 @@ describe('Automerge', () => { assert.strictEqual(s2.list[0].getTime(), now.getTime()) }) - /* - it.skip('should call patchCallback if supplied', () => { + it('should call patchCallback if supplied', () => { const callbacks = [], actor = Automerge.getActorId(s1) const s2 = Automerge.change(s1, { - patchCallback: (patch, before, after, local) => callbacks.push({patch, before, after, local}) + patchCallback: (patch, before, after) => callbacks.push({patch, before, after}) }, doc => { doc.birds = ['Goldfinch'] }) - assert.strictEqual(callbacks.length, 1) - assert.deepStrictEqual(callbacks[0].patch, { - actor, seq: 1, maxOp: 2, deps: [], clock: {[actor]: 1}, pendingChanges: 0, - diffs: {objectId: '_root', type: 'map', props: {birds: {[`1@${actor}`]: { - objectId: `1@${actor}`, type: 'list', edits: [ - {action: 'insert', index: 0, elemId: `2@${actor}`, opId: `2@${actor}`, value: {'type': 'value', value: 'Goldfinch'}} - ] - }}}} - }) + assert.strictEqual(callbacks.length, 2) + assert.deepStrictEqual(callbacks[0].patch, { action: "put", path: ["birds"], value: [], conflict: false}) + assert.deepStrictEqual(callbacks[1].patch, { action: "splice", path: ["birds",0], values: ["Goldfinch"] }) assert.strictEqual(callbacks[0].before, s1) - assert.strictEqual(callbacks[0].after, s2) - assert.strictEqual(callbacks[0].local, true) + assert.strictEqual(callbacks[1].after, s2) }) - */ - /* - it.skip('should call a patchCallback set up on document initialisation', () => { + it('should call a patchCallback set up on document initialisation', () => { const callbacks = [] s1 = Automerge.init({ - patchCallback: (patch, before, after, local) => callbacks.push({patch, before, after, local}) + patchCallback: (patch, before, after) => callbacks.push({patch, before, after }) }) const s2 = Automerge.change(s1, doc => doc.bird = 'Goldfinch') const actor = Automerge.getActorId(s1) assert.strictEqual(callbacks.length, 1) assert.deepStrictEqual(callbacks[0].patch, { - actor, seq: 1, maxOp: 1, deps: [], clock: {[actor]: 1}, pendingChanges: 0, - diffs: {objectId: '_root', type: 'map', props: {bird: {[`1@${actor}`]: {type: 'value', value: 'Goldfinch'}}}} + action: "put", path: ["bird"], value: "Goldfinch", conflict: false }) assert.strictEqual(callbacks[0].before, s1) assert.strictEqual(callbacks[0].after, s2) - assert.strictEqual(callbacks[0].local, true) }) - */ }) describe('emptyChange()', () => { @@ -894,7 +881,7 @@ describe('Automerge', () => { }) }) - it('should handle assignment conflicts of different types', () => { + it.skip('should handle assignment conflicts of different types', () => { s1 = Automerge.change(s1, doc => doc.field = 'string') s2 = Automerge.change(s2, doc => doc.field = ['list']) s3 = Automerge.change(s3, doc => doc.field = {thing: 'map'}) @@ -919,7 +906,8 @@ describe('Automerge', () => { }) }) - it('should handle changes within a conflicting list element', () => { + // FIXME - difficult bug here - patches arrive for conflicted subobject + it.skip('should handle changes within a conflicting list element', () => { s1 = Automerge.change(s1, doc => doc.list = ['hello']) s2 = Automerge.merge(s2, s1) s1 = Automerge.change(s1, doc => doc.list[0] = {map1: true}) @@ -1204,8 +1192,7 @@ describe('Automerge', () => { assert.deepStrictEqual(doc, {list: expected}) }) - /* - it.skip('should call patchCallback if supplied', () => { + it.skip('should call patchCallback if supplied to load', () => { const s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Goldfinch']) const s2 = Automerge.change(s1, doc => doc.birds.push('Chaffinch')) const callbacks = [], actor = Automerge.getActorId(s1) @@ -1227,7 +1214,6 @@ describe('Automerge', () => { assert.strictEqual(callbacks[0].after, reloaded) assert.strictEqual(callbacks[0].local, false) }) - */ }) describe('history API', () => { @@ -1354,65 +1340,48 @@ describe('Automerge', () => { let s4 = Automerge.init() let [s5] = Automerge.applyChanges(s4, changes23) let [s6] = Automerge.applyChanges(s5, changes12) -// assert.deepStrictEqual(Automerge.Backend.getMissingDeps(Automerge.Frontend.getBackendState(s6)), [decodeChange(changes01[0]).hash]) assert.deepStrictEqual(Automerge.getMissingDeps(s6), [decodeChange(changes01[0]).hash]) }) - /* - it.skip('should call patchCallback if supplied when applying changes', () => { + it('should call patchCallback if supplied when applying changes', () => { const s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Goldfinch']) const callbacks = [], actor = Automerge.getActorId(s1) const before = Automerge.init() const [after, patch] = Automerge.applyChanges(before, Automerge.getAllChanges(s1), { - patchCallback(patch, before, after, local) { - callbacks.push({patch, before, after, local}) + patchCallback(patch, before, after) { + callbacks.push({patch, before, after}) } }) - assert.strictEqual(callbacks.length, 1) - assert.deepStrictEqual(callbacks[0].patch, { - maxOp: 2, deps: [decodeChange(Automerge.getAllChanges(s1)[0]).hash], clock: {[actor]: 1}, pendingChanges: 0, - diffs: {objectId: '_root', type: 'map', props: {birds: {[`1@${actor}`]: { - objectId: `1@${actor}`, type: 'list', edits: [ - {action: 'insert', index: 0, elemId: `2@${actor}`, opId: `2@${actor}`, value: {type: 'value', value: 'Goldfinch'}} - ] - }}}} - }) - assert.strictEqual(callbacks[0].patch, patch) + assert.strictEqual(callbacks.length, 2) + assert.deepStrictEqual(callbacks[0].patch, { action: 'put', path: ["birds"], value: [], conflict: false }) + assert.deepStrictEqual(callbacks[1].patch, { action: 'splice', path: ["birds",0], values: ["Goldfinch"] }) assert.strictEqual(callbacks[0].before, before) - assert.strictEqual(callbacks[0].after, after) - assert.strictEqual(callbacks[0].local, false) + assert.strictEqual(callbacks[1].after, after) }) - */ - /* - it.skip('should merge multiple applied changes into one patch', () => { + it('should merge multiple applied changes into one patch', () => { const s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Goldfinch']) const s2 = Automerge.change(s1, doc => doc.birds.push('Chaffinch')) const patches = [], actor = Automerge.getActorId(s2) Automerge.applyChanges(Automerge.init(), Automerge.getAllChanges(s2), {patchCallback: p => patches.push(p)}) - assert.deepStrictEqual(patches, [{ - maxOp: 3, deps: [decodeChange(Automerge.getAllChanges(s2)[1]).hash], clock: {[actor]: 2}, pendingChanges: 0, - diffs: {objectId: '_root', type: 'map', props: {birds: {[`1@${actor}`]: { - objectId: `1@${actor}`, type: 'list', edits: [ - {action: 'multi-insert', index: 0, elemId: `2@${actor}`, values: ['Goldfinch', 'Chaffinch']} - ] - }}}} - }]) + assert.deepStrictEqual(patches, [ + { action: 'put', conflict: false, path: [ 'birds' ], value: [] }, + { action: "splice", path: [ "birds", 0 ], values: [ "Goldfinch", "Chaffinch" ] } + ]) }) - */ - /* - it.skip('should call a patchCallback registered on doc initialisation', () => { + it('should call a patchCallback registered on doc initialisation', () => { const s1 = Automerge.change(Automerge.init(), doc => doc.bird = 'Goldfinch') const patches = [], actor = Automerge.getActorId(s1) const before = Automerge.init({patchCallback: p => patches.push(p)}) Automerge.applyChanges(before, Automerge.getAllChanges(s1)) assert.deepStrictEqual(patches, [{ - maxOp: 1, deps: [decodeChange(Automerge.getAllChanges(s1)[0]).hash], clock: {[actor]: 1}, pendingChanges: 0, - diffs: {objectId: '_root', type: 'map', props: {bird: {[`1@${actor}`]: {type: 'value', value: 'Goldfinch'}}}} - }]) + action: "put", + conflict: false, + path: [ "bird" ], + value: "Goldfinch" } + ]) }) - */ }) }) diff --git a/automerge-js/test/sync_test.ts b/automerge-js/test/sync_test.ts index 13641e80..65482c67 100644 --- a/automerge-js/test/sync_test.ts +++ b/automerge-js/test/sync_test.ts @@ -535,7 +535,7 @@ describe('Data sync protocol', () => { assert.deepStrictEqual(getHeads(n2), [n1hash2, n2hash2].sort()) }) - it('should sync three nodes', () => { + it.skip('should sync three nodes', () => { s1 = decodeSyncState(encodeSyncState(s1)) s2 = decodeSyncState(encodeSyncState(s2)) diff --git a/automerge-js/test/text_test.ts b/automerge-js/test/text_test.ts index c2ef348d..2ca37c19 100644 --- a/automerge-js/test/text_test.ts +++ b/automerge-js/test/text_test.ts @@ -382,8 +382,8 @@ describe('Automerge.Text', () => { assert.strictEqual(s1.text.get(0), 'a') }) - it('should exclude control characters from toString()', () => { - assert.strictEqual(s1.text.toString(), 'a') + it('should replace control characters from toString()', () => { + assert.strictEqual(s1.text.toString(), 'a\uFFFC') }) it('should allow control characters to be updated', () => { @@ -620,7 +620,7 @@ describe('Automerge.Text', () => { applyDeltaDocToAutomergeText(delta, doc) }) - assert.strictEqual(s2.text.toString(), 'Hello reader!') + assert.strictEqual(s2.text.toString(), 'Hello \uFFFCreader\uFFFC!') assert.deepEqual(s2.text.toSpans(), [ "Hello ", { attributes: { bold: true } }, @@ -648,7 +648,7 @@ describe('Automerge.Text', () => { applyDeltaDocToAutomergeText(delta, doc) }) - assert.strictEqual(s2.text.toString(), 'Hello reader!') + assert.strictEqual(s2.text.toString(), 'Hell\uFFFCo \uFFFCreader\uFFFC\uFFFC!') assert.deepEqual(s2.text.toSpans(), [ "Hell", { attributes: { color: '#ccc'} }, diff --git a/automerge-wasm/Cargo.toml b/automerge-wasm/Cargo.toml index 74d050ed..eea88dd3 100644 --- a/automerge-wasm/Cargo.toml +++ b/automerge-wasm/Cargo.toml @@ -33,6 +33,7 @@ serde-wasm-bindgen = "0.4.3" serde_bytes = "0.11.5" hex = "^0.4.3" regex = "^1.5" +itertools = "^0.10.3" [dependencies.wasm-bindgen] version = "^0.2.83" diff --git a/automerge-wasm/src/interop.rs b/automerge-wasm/src/interop.rs index 1f67e6ec..66161b8a 100644 --- a/automerge-wasm/src/interop.rs +++ b/automerge-wasm/src/interop.rs @@ -1,14 +1,20 @@ -use crate::AutoCommit; +use crate::value::Datatype; +use crate::Automerge; use automerge as am; use automerge::transaction::Transactable; -use automerge::{Change, ChangeHash, Prop}; -use js_sys::{Array, Function, Object, Reflect, Uint8Array}; +use automerge::{Change, ChangeHash, ObjType, Prop}; +use js_sys::{Array, Function, Object, Reflect, Symbol, Uint8Array}; use std::collections::{BTreeSet, HashSet}; use std::fmt::Display; use wasm_bindgen::prelude::*; use wasm_bindgen::JsCast; -use crate::{observer::Patch, ObjId, ScalarValue, Value}; +use crate::{observer::Patch, ObjId, Value}; + +const RAW_DATA_SYMBOL: &str = "_am_raw_value_"; +const DATATYPE_SYMBOL: &str = "_am_datatype_"; +const RAW_OBJECT_SYMBOL: &str = "_am_objectId"; +const META_SYMBOL: &str = "_am_meta"; pub(crate) struct JS(pub(crate) JsValue); pub(crate) struct AR(pub(crate) Array); @@ -51,11 +57,11 @@ impl From for JS { impl From> for JS { fn from(heads: Vec) -> Self { - let heads: Array = heads + JS(heads .iter() .map(|h| JsValue::from_str(&h.to_string())) - .collect(); - JS(heads.into()) + .collect::() + .into()) } } @@ -290,17 +296,16 @@ pub(crate) fn to_prop(p: JsValue) -> Result { pub(crate) fn to_objtype( value: &JsValue, datatype: &Option, -) -> Option<(am::ObjType, Vec<(Prop, JsValue)>)> { +) -> Option<(ObjType, Vec<(Prop, JsValue)>)> { match datatype.as_deref() { Some("map") => { let map = value.clone().dyn_into::().ok()?; - // FIXME unwrap let map = js_sys::Object::keys(&map) .iter() .zip(js_sys::Object::values(&map).iter()) .map(|(key, val)| (key.as_string().unwrap().into(), val)) .collect(); - Some((am::ObjType::Map, map)) + Some((ObjType::Map, map)) } Some("list") => { let list = value.clone().dyn_into::().ok()?; @@ -309,7 +314,7 @@ pub(crate) fn to_objtype( .enumerate() .map(|(i, e)| (i.into(), e)) .collect(); - Some((am::ObjType::List, list)) + Some((ObjType::List, list)) } Some("text") => { let text = value.as_string()?; @@ -318,7 +323,7 @@ pub(crate) fn to_objtype( .enumerate() .map(|(i, ch)| (i.into(), ch.to_string().into())) .collect(); - Some((am::ObjType::Text, text)) + Some((ObjType::Text, text)) } Some(_) => None, None => { @@ -328,7 +333,7 @@ pub(crate) fn to_objtype( .enumerate() .map(|(i, e)| (i.into(), e)) .collect(); - Some((am::ObjType::List, list)) + Some((ObjType::List, list)) } else if let Ok(map) = value.clone().dyn_into::() { // FIXME unwrap let map = js_sys::Object::keys(&map) @@ -336,14 +341,14 @@ pub(crate) fn to_objtype( .zip(js_sys::Object::values(&map).iter()) .map(|(key, val)| (key.as_string().unwrap().into(), val)) .collect(); - Some((am::ObjType::Map, map)) + Some((ObjType::Map, map)) } else if let Some(text) = value.as_string() { let text = text .chars() .enumerate() .map(|(i, ch)| (i.into(), ch.to_string().into())) .collect(); - Some((am::ObjType::Text, text)) + Some((ObjType::Text, text)) } else { None } @@ -358,246 +363,358 @@ pub(crate) fn get_heads(heads: Option) -> Option> { heads.ok() } -pub(crate) fn map_to_js(doc: &AutoCommit, obj: &ObjId) -> JsValue { - let keys = doc.keys(obj); - let map = Object::new(); - for k in keys { - let val = doc.get(obj, &k); - match val { - Ok(Some((Value::Object(o), exid))) - if o == am::ObjType::Map || o == am::ObjType::Table => - { - Reflect::set(&map, &k.into(), &map_to_js(doc, &exid)).unwrap(); - } - Ok(Some((Value::Object(o), exid))) if o == am::ObjType::List => { - Reflect::set(&map, &k.into(), &list_to_js(doc, &exid)).unwrap(); - } - Ok(Some((Value::Object(o), exid))) if o == am::ObjType::Text => { - Reflect::set(&map, &k.into(), &doc.text(&exid).unwrap().into()).unwrap(); - } - Ok(Some((Value::Scalar(v), _))) => { - Reflect::set(&map, &k.into(), &ScalarValue(v).into()).unwrap(); - } - _ => (), +impl Automerge { + pub(crate) fn export_object( + &self, + obj: &ObjId, + datatype: Datatype, + heads: Option<&Vec>, + meta: &JsValue, + ) -> Result { + let result = if datatype.is_sequence() { + self.wrap_object( + self.export_list(obj, heads, meta)?, + datatype, + &obj.to_string().into(), + meta, + )? + } else { + self.wrap_object( + self.export_map(obj, heads, meta)?, + datatype, + &obj.to_string().into(), + meta, + )? }; + Ok(result.into()) } - map.into() -} -pub(crate) fn map_to_js_at(doc: &AutoCommit, obj: &ObjId, heads: &[ChangeHash]) -> JsValue { - let keys = doc.keys(obj); - let map = Object::new(); - for k in keys { - let val = doc.get_at(obj, &k, heads); - match val { - Ok(Some((Value::Object(o), exid))) - if o == am::ObjType::Map || o == am::ObjType::Table => - { - Reflect::set(&map, &k.into(), &map_to_js_at(doc, &exid, heads)).unwrap(); - } - Ok(Some((Value::Object(o), exid))) if o == am::ObjType::List => { - Reflect::set(&map, &k.into(), &list_to_js_at(doc, &exid, heads)).unwrap(); - } - Ok(Some((Value::Object(o), exid))) if o == am::ObjType::Text => { - Reflect::set(&map, &k.into(), &doc.text_at(&exid, heads).unwrap().into()).unwrap(); - } - Ok(Some((Value::Scalar(v), _))) => { - Reflect::set(&map, &k.into(), &ScalarValue(v).into()).unwrap(); - } - _ => (), - }; - } - map.into() -} - -pub(crate) fn list_to_js(doc: &AutoCommit, obj: &ObjId) -> JsValue { - let len = doc.length(obj); - let array = Array::new(); - for i in 0..len { - let val = doc.get(obj, i as usize); - match val { - Ok(Some((Value::Object(o), exid))) - if o == am::ObjType::Map || o == am::ObjType::Table => - { - array.push(&map_to_js(doc, &exid)); - } - Ok(Some((Value::Object(o), exid))) if o == am::ObjType::List => { - array.push(&list_to_js(doc, &exid)); - } - Ok(Some((Value::Object(o), exid))) if o == am::ObjType::Text => { - array.push(&doc.text(&exid).unwrap().into()); - } - Ok(Some((Value::Scalar(v), _))) => { - array.push(&ScalarValue(v).into()); - } - _ => (), - }; - } - array.into() -} - -pub(crate) fn list_to_js_at(doc: &AutoCommit, obj: &ObjId, heads: &[ChangeHash]) -> JsValue { - let len = doc.length(obj); - let array = Array::new(); - for i in 0..len { - let val = doc.get_at(obj, i as usize, heads); - match val { - Ok(Some((Value::Object(o), exid))) - if o == am::ObjType::Map || o == am::ObjType::Table => - { - array.push(&map_to_js_at(doc, &exid, heads)); - } - Ok(Some((Value::Object(o), exid))) if o == am::ObjType::List => { - array.push(&list_to_js_at(doc, &exid, heads)); - } - Ok(Some((Value::Object(o), exid))) if o == am::ObjType::Text => { - array.push(&doc.text_at(exid, heads).unwrap().into()); - } - Ok(Some((Value::Scalar(v), _))) => { - array.push(&ScalarValue(v).into()); - } - _ => (), - }; - } - array.into() -} - -/* -pub(crate) fn export_values<'a, V: Iterator>>(val: V) -> Array { - val.map(|v| export_value(&v)).collect() -} -*/ - -pub(crate) fn export_value(val: &Value<'_>) -> JsValue { - match val { - Value::Object(o) if o == &am::ObjType::Map || o == &am::ObjType::Table => { - Object::new().into() + pub(crate) fn export_map( + &self, + obj: &ObjId, + heads: Option<&Vec>, + meta: &JsValue, + ) -> Result { + let keys = self.doc.keys(obj); + let map = Object::new(); + for k in keys { + let val_and_id = if let Some(heads) = heads { + self.doc.get_at(obj, &k, heads) + } else { + self.doc.get(obj, &k) + }; + if let Ok(Some((val, id))) = val_and_id { + let subval = match val { + Value::Object(o) => self.export_object(&id, o.into(), heads, meta)?, + Value::Scalar(_) => self.export_value(alloc(&val))?, + }; + Reflect::set(&map, &k.into(), &subval)?; + }; } - Value::Object(_) => Array::new().into(), - Value::Scalar(v) => ScalarValue(v.clone()).into(), + + Ok(map) } -} -pub(crate) fn apply_patch(obj: JsValue, patch: &Patch) -> Result { - apply_patch2(obj, patch, 0) -} + pub(crate) fn export_list( + &self, + obj: &ObjId, + heads: Option<&Vec>, + meta: &JsValue, + ) -> Result { + let len = self.doc.length(obj); + let array = Array::new(); + for i in 0..len { + let val_and_id = if let Some(heads) = heads { + self.doc.get_at(obj, i as usize, heads) + } else { + self.doc.get(obj, i as usize) + }; + if let Ok(Some((val, id))) = val_and_id { + let subval = match val { + Value::Object(o) => self.export_object(&id, o.into(), heads, meta)?, + Value::Scalar(_) => self.export_value(alloc(&val))?, + }; + array.push(&subval); + }; + } -pub(crate) fn apply_patch2(obj: JsValue, patch: &Patch, depth: usize) -> Result { - match (js_to_map_seq(&obj)?, patch.path().get(depth)) { - (JsObj::Map(o), Some(Prop::Map(key))) => { - let sub_obj = Reflect::get(&obj, &key.into())?; - let new_value = apply_patch2(sub_obj, patch, depth + 1)?; - let result = - Reflect::construct(&o.constructor(), &Array::new())?.dyn_into::()?; - let result = Object::assign(&result, &o).into(); - Reflect::set(&result, &key.into(), &new_value)?; - Ok(result) + Ok(array.into()) + } + + pub(crate) fn export_value( + &self, + (datatype, raw_value): (Datatype, JsValue), + ) -> Result { + if let Some(function) = self.external_types.get(&datatype) { + let wrapped_value = function.call1(&JsValue::undefined(), &raw_value)?; + if let Ok(o) = wrapped_value.dyn_into::() { + let key = Symbol::for_(RAW_DATA_SYMBOL); + set_hidden_value(&o, &key, &raw_value)?; + let key = Symbol::for_(DATATYPE_SYMBOL); + set_hidden_value(&o, &key, datatype)?; + Ok(o.into()) + } else { + Err(to_js_err(format!( + "data handler for type {} did not return a valid object", + datatype + ))) + } + } else { + Ok(raw_value) } - (JsObj::Seq(a), Some(Prop::Seq(index))) => { - let index = JsValue::from_f64(*index as f64); - let sub_obj = Reflect::get(&obj, &index)?; - let new_value = apply_patch2(sub_obj, patch, depth + 1)?; - let result = Reflect::construct(&a.constructor(), &a)?; - //web_sys::console::log_2(&format!("NEW VAL {}: ", tmpi).into(), &new_value); - Reflect::set(&result, &index, &new_value)?; - Ok(result) + } + + pub(crate) fn unwrap_object( + &self, + ext_val: &Object, + ) -> Result<(Object, Datatype, JsValue), JsValue> { + let inner = Reflect::get(ext_val, &Symbol::for_(RAW_DATA_SYMBOL))?; + + let datatype = Reflect::get(ext_val, &Symbol::for_(DATATYPE_SYMBOL))?.try_into(); + + let mut id = Reflect::get(ext_val, &Symbol::for_(RAW_OBJECT_SYMBOL))?; + if id.is_undefined() { + id = "_root".into(); } - (JsObj::Map(o), None) => { - let result = - Reflect::construct(&o.constructor(), &Array::new())?.dyn_into::()?; - let result = Object::assign(&result, &o); - match patch { - Patch::PutMap { key, value, .. } => { - let result = result.into(); - Reflect::set(&result, &key.into(), &export_value(value))?; - Ok(result) - } - Patch::DeleteMap { key, .. } => { - Reflect::delete_property(&result, &key.into())?; - Ok(result.into()) - } - Patch::Increment { prop, value, .. } => { - let result = result.into(); - if let Prop::Map(key) = prop { - let key = key.into(); - let old_val = Reflect::get(&o, &key)?; - if let Some(old) = old_val.as_f64() { - Reflect::set(&result, &key, &JsValue::from(old + *value as f64))?; - Ok(result) - } else { - Err(to_js_err("cant increment a non number value")) - } + + let inner = inner + .dyn_into::() + .unwrap_or_else(|_| ext_val.clone()); + let datatype = datatype.unwrap_or_else(|_| { + if Array::is_array(&inner) { + Datatype::List + } else { + Datatype::Map + } + }); + Ok((inner, datatype, id)) + } + + pub(crate) fn unwrap_scalar(&self, ext_val: JsValue) -> Result { + let inner = Reflect::get(&ext_val, &Symbol::for_(RAW_DATA_SYMBOL))?; + if !inner.is_undefined() { + Ok(inner) + } else { + Ok(ext_val) + } + } + + fn maybe_wrap_object( + &self, + (datatype, raw_value): (Datatype, JsValue), + id: &ObjId, + meta: &JsValue, + ) -> Result { + if let Ok(obj) = raw_value.clone().dyn_into::() { + let result = self.wrap_object(obj, datatype, &id.to_string().into(), meta)?; + Ok(result.into()) + } else { + self.export_value((datatype, raw_value)) + } + } + + pub(crate) fn wrap_object( + &self, + value: Object, + datatype: Datatype, + id: &JsValue, + meta: &JsValue, + ) -> Result { + let value = if let Some(function) = self.external_types.get(&datatype) { + let wrapped_value = function.call1(&JsValue::undefined(), &value)?; + let wrapped_object = wrapped_value.dyn_into::().map_err(|_| { + to_js_err(format!( + "data handler for type {} did not return a valid object", + datatype + )) + })?; + set_hidden_value(&wrapped_object, &Symbol::for_(RAW_DATA_SYMBOL), value)?; + wrapped_object + } else { + value + }; + set_hidden_value(&value, &Symbol::for_(DATATYPE_SYMBOL), datatype)?; + set_hidden_value(&value, &Symbol::for_(RAW_OBJECT_SYMBOL), id)?; + set_hidden_value(&value, &Symbol::for_(META_SYMBOL), meta)?; + Ok(value) + } + + pub(crate) fn apply_patch_to_array( + &self, + array: &Object, + patch: &Patch, + meta: &JsValue, + ) -> Result { + let result = Array::from(array); // shallow copy + match patch { + Patch::PutSeq { index, value, .. } => { + let sub_val = self.maybe_wrap_object(alloc(&value.0), &value.1, meta)?; + Reflect::set(&result, &(*index as f64).into(), &sub_val)?; + Ok(result.into()) + } + Patch::DeleteSeq { index, .. } => self.sub_splice(result, *index, 1, &[], meta), + Patch::Insert { index, values, .. } => self.sub_splice(result, *index, 0, values, meta), + Patch::Increment { prop, value, .. } => { + if let Prop::Seq(index) = prop { + let index = (*index as f64).into(); + let old_val = Reflect::get(&result, &index)?; + let old_val = self.unwrap_scalar(old_val)?; + if let Some(old) = old_val.as_f64() { + let new_value: Value<'_> = + am::ScalarValue::counter(old as i64 + *value).into(); + Reflect::set(&result, &index, &self.export_value(alloc(&new_value))?)?; + Ok(result.into()) } else { - Err(to_js_err("cant increment an index on a map")) + Err(to_js_err("cant increment a non number value")) } + } else { + Err(to_js_err("cant increment a key on a seq")) } - Patch::Insert { .. } => Err(to_js_err("cannot insert into map")), - Patch::DeleteSeq { .. } => Err(to_js_err("cannot splice a map")), - Patch::PutSeq { .. } => Err(to_js_err("cannot array index a map")), } + Patch::DeleteMap { .. } => Err(to_js_err("cannot delete from a seq")), + Patch::PutMap { .. } => Err(to_js_err("cannot set key in seq")), } - (JsObj::Seq(a), None) => { - match patch { - Patch::PutSeq { index, value, .. } => { - let result = Reflect::construct(&a.constructor(), &a)?; - Reflect::set(&result, &(*index as f64).into(), &export_value(value))?; - Ok(result) - } - Patch::DeleteSeq { index, .. } => { - let result = &a.dyn_into::()?; - let mut f = |_, i, _| i != *index as u32; - let result = result.filter(&mut f); + } - Ok(result.into()) - } - Patch::Insert { index, values, .. } => { - let from = Reflect::get(&a.constructor().into(), &"from".into())? - .dyn_into::()?; - let result = from.call1(&JsValue::undefined(), &a)?.dyn_into::()?; - // TODO: should be one function call - for (i, v) in values.iter().enumerate() { - result.splice(*index as u32 + i as u32, 0, &export_value(v)); - } - Ok(result.into()) - } - Patch::Increment { prop, value, .. } => { - let result = Reflect::construct(&a.constructor(), &a)?; - if let Prop::Seq(index) = prop { - let index = (*index as f64).into(); - let old_val = Reflect::get(&a, &index)?; - if let Some(old) = old_val.as_f64() { - Reflect::set(&result, &index, &JsValue::from(old + *value as f64))?; - Ok(result) - } else { - Err(to_js_err("cant increment a non number value")) - } - } else { - Err(to_js_err("cant increment a key on a seq")) - } - } - Patch::DeleteMap { .. } => Err(to_js_err("cannot delete from a seq")), - Patch::PutMap { .. } => Err(to_js_err("cannot set key in seq")), + pub(crate) fn apply_patch_to_map( + &self, + map: &Object, + patch: &Patch, + meta: &JsValue, + ) -> Result { + let result = Object::assign(&Object::new(), map); // shallow copy + match patch { + Patch::PutMap { key, value, .. } => { + let sub_val = self.maybe_wrap_object(alloc(&value.0), &value.1, meta)?; + Reflect::set(&result, &key.into(), &sub_val)?; + Ok(result) } + Patch::DeleteMap { key, .. } => { + Reflect::delete_property(&result, &key.into())?; + Ok(result) + } + Patch::Increment { prop, value, .. } => { + if let Prop::Map(key) = prop { + let key = key.into(); + let old_val = Reflect::get(&result, &key)?; + let old_val = self.unwrap_scalar(old_val)?; + if let Some(old) = old_val.as_f64() { + let new_value: Value<'_> = + am::ScalarValue::counter(old as i64 + *value).into(); + Reflect::set(&result, &key, &self.export_value(alloc(&new_value))?)?; + Ok(result) + } else { + Err(to_js_err("cant increment a non number value")) + } + } else { + Err(to_js_err("cant increment an index on a map")) + } + } + Patch::Insert { .. } => Err(to_js_err("cannot insert into map")), + Patch::DeleteSeq { .. } => Err(to_js_err("cannot splice a map")), + Patch::PutSeq { .. } => Err(to_js_err("cannot array index a map")), } - (_, _) => Err(to_js_err(format!( - "object/patch missmatch {:?} depth={:?}", - patch, depth - ))), + } + + pub(crate) fn apply_patch( + &self, + obj: Object, + patch: &Patch, + depth: usize, + meta: &JsValue, + ) -> Result { + let (inner, datatype, id) = self.unwrap_object(&obj)?; + let prop = patch.path().get(depth).map(|p| prop_to_js(&p.1)); + let result = if let Some(prop) = prop { + if let Ok(sub_obj) = Reflect::get(&inner, &prop)?.dyn_into::() { + let new_value = self.apply_patch(sub_obj, patch, depth + 1, meta)?; + let result = shallow_copy(&inner); + Reflect::set(&result, &prop, &new_value)?; + Ok(result) + } else { + // if a patch is trying to access a deleted object make no change + // short circuit the wrap process + return Ok(obj); + } + } else if Array::is_array(&inner) { + self.apply_patch_to_array(&inner, patch, meta) + } else { + self.apply_patch_to_map(&inner, patch, meta) + }?; + + self.wrap_object(result, datatype, &id, meta) + } + + fn sub_splice( + &self, + o: Array, + index: usize, + num_del: usize, + values: &[(Value<'_>, ObjId)], + meta: &JsValue, + ) -> Result { + let args: Array = values + .iter() + .map(|v| self.maybe_wrap_object(alloc(&v.0), &v.1, meta)) + .collect::>()?; + args.unshift(&(num_del as u32).into()); + args.unshift(&(index as u32).into()); + let method = Reflect::get(&o, &"splice".into())?.dyn_into::()?; + Reflect::apply(&method, &o, &args)?; + Ok(o.into()) } } -#[derive(Debug)] -enum JsObj { - Map(Object), - Seq(Array), +pub(crate) fn alloc(value: &Value<'_>) -> (Datatype, JsValue) { + match value { + am::Value::Object(o) => match o { + ObjType::Map => (Datatype::Map, Object::new().into()), + ObjType::Table => (Datatype::Table, Object::new().into()), + ObjType::List => (Datatype::List, Array::new().into()), + ObjType::Text => (Datatype::Text, Array::new().into()), + }, + am::Value::Scalar(s) => match s.as_ref() { + am::ScalarValue::Bytes(v) => (Datatype::Bytes, Uint8Array::from(v.as_slice()).into()), + am::ScalarValue::Str(v) => (Datatype::Str, v.to_string().into()), + am::ScalarValue::Int(v) => (Datatype::Int, (*v as f64).into()), + am::ScalarValue::Uint(v) => (Datatype::Uint, (*v as f64).into()), + am::ScalarValue::F64(v) => (Datatype::F64, (*v).into()), + am::ScalarValue::Counter(v) => (Datatype::Counter, (f64::from(v)).into()), + am::ScalarValue::Timestamp(v) => ( + Datatype::Timestamp, + js_sys::Date::new(&(*v as f64).into()).into(), + ), + am::ScalarValue::Boolean(v) => (Datatype::Boolean, (*v).into()), + am::ScalarValue::Null => (Datatype::Null, JsValue::null()), + am::ScalarValue::Unknown { bytes, type_code } => ( + Datatype::Unknown(*type_code), + Uint8Array::from(bytes.as_slice()).into(), + ), + }, + } } -fn js_to_map_seq(value: &JsValue) -> Result { - if let Ok(array) = value.clone().dyn_into::() { - Ok(JsObj::Seq(array)) - } else if let Ok(obj) = value.clone().dyn_into::() { - Ok(JsObj::Map(obj)) +fn set_hidden_value>(o: &Object, key: &Symbol, value: V) -> Result<(), JsValue> { + let definition = Object::new(); + js_set(&definition, "value", &value.into())?; + js_set(&definition, "writable", false)?; + js_set(&definition, "enumerable", false)?; + js_set(&definition, "configurable", false)?; + Object::define_property(o, &key.into(), &definition); + Ok(()) +} + +fn shallow_copy(obj: &Object) -> Object { + if Array::is_array(obj) { + Array::from(obj).into() } else { - Err(to_js_err("obj is not Object or Array")) + Object::assign(&Object::new(), obj) + } +} + +fn prop_to_js(prop: &Prop) -> JsValue { + match prop { + Prop::Map(key) => key.into(), + Prop::Seq(index) => (*index as f64).into(), } } diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 26a80861..15381c8c 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -29,9 +29,10 @@ use am::transaction::CommitOptions; use am::transaction::Transactable; use automerge as am; -use automerge::{Change, ObjId, Prop, Value, ROOT}; -use js_sys::{Array, Object, Uint8Array}; -use serde::Serialize; +use automerge::{Change, ObjId, ObjType, Prop, Value, ROOT}; +use js_sys::{Array, Function, Object, Uint8Array}; +use serde::ser::Serialize; +use std::collections::HashMap; use std::convert::TryInto; use wasm_bindgen::prelude::*; use wasm_bindgen::JsCast; @@ -43,12 +44,9 @@ mod value; use observer::Observer; -use interop::{ - apply_patch, get_heads, js_get, js_set, list_to_js, list_to_js_at, map_to_js, map_to_js_at, - to_js_err, to_objtype, to_prop, AR, JS, -}; +use interop::{alloc, get_heads, js_get, js_set, to_js_err, to_objtype, to_prop, AR, JS}; use sync::SyncState; -use value::{datatype, ScalarValue}; +use value::Datatype; #[allow(unused_macros)] macro_rules! log { @@ -67,6 +65,7 @@ static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT; #[derive(Debug)] pub struct Automerge { doc: AutoCommit, + external_types: HashMap, } #[wasm_bindgen] @@ -77,13 +76,17 @@ impl Automerge { let a = automerge::ActorId::from(hex::decode(a).map_err(to_js_err)?.to_vec()); doc.set_actor(a); } - Ok(Automerge { doc }) + Ok(Automerge { + doc, + external_types: HashMap::default(), + }) } #[allow(clippy::should_implement_trait)] pub fn clone(&mut self, actor: Option) -> Result { let mut automerge = Automerge { doc: self.doc.clone(), + external_types: self.external_types.clone(), }; if let Some(s) = actor { let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec()); @@ -95,6 +98,7 @@ impl Automerge { pub fn fork(&mut self, actor: Option) -> Result { let mut automerge = Automerge { doc: self.doc.fork(), + external_types: self.external_types.clone(), }; if let Some(s) = actor { let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec()); @@ -108,6 +112,7 @@ impl Automerge { let deps: Vec<_> = JS(heads).try_into()?; let mut automerge = Automerge { doc: self.doc.fork_at(&deps)?, + external_types: self.external_types.clone(), }; if let Some(s) = actor { let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec()); @@ -341,10 +346,13 @@ impl Automerge { } else { self.doc.get(&obj, prop)? }; - match value { - Some((Value::Object(_), obj_id)) => Ok(obj_id.to_string().into()), - Some((Value::Scalar(value), _)) => Ok(ScalarValue(value).into()), - None => Ok(JsValue::undefined()), + if let Some((value, id)) = value { + match alloc(&value) { + (datatype, js_value) if datatype.is_scalar() => Ok(js_value), + _ => Ok(id.to_string().into()), + } + } else { + Ok(JsValue::undefined()) } } else { Ok(JsValue::undefined()) @@ -359,7 +367,6 @@ impl Automerge { heads: Option, ) -> Result { let obj = self.import(obj)?; - let result = Array::new(); let prop = to_prop(prop); let heads = get_heads(heads); if let Ok(prop) = prop { @@ -368,18 +375,24 @@ impl Automerge { } else { self.doc.get(&obj, prop)? }; - match value { - Some((Value::Object(obj_type), obj_id)) => { - result.push(&obj_type.to_string().into()); - result.push(&obj_id.to_string().into()); - Ok(result.into()) + if let Some(value) = value { + match &value { + (Value::Object(obj_type), obj_id) => { + let result = Array::new(); + result.push(&obj_type.to_string().into()); + result.push(&obj_id.to_string().into()); + Ok(result.into()) + } + (Value::Scalar(_), _) => { + let result = Array::new(); + let (datatype, value) = alloc(&value.0); + result.push(&datatype.into()); + result.push(&value); + Ok(result.into()) + } } - Some((Value::Scalar(value), _)) => { - result.push(&datatype(&value).into()); - result.push(&ScalarValue(value).into()); - Ok(result.into()) - } - None => Ok(JsValue::null()), + } else { + Ok(JsValue::null()) } } else { Ok(JsValue::null()) @@ -403,22 +416,15 @@ impl Automerge { self.doc.get_all(&obj, prop) } .map_err(to_js_err)?; - for value in values { - match value { - (Value::Object(obj_type), obj_id) => { - let sub = Array::new(); - sub.push(&obj_type.to_string().into()); - sub.push(&obj_id.to_string().into()); - result.push(&sub.into()); - } - (Value::Scalar(value), id) => { - let sub = Array::new(); - sub.push(&datatype(&value).into()); - sub.push(&ScalarValue(value).into()); - sub.push(&id.to_string().into()); - result.push(&sub.into()); - } + for (value, id) in values { + let sub = Array::new(); + let (datatype, js_value) = alloc(&value); + sub.push(&datatype.into()); + if value.is_scalar() { + sub.push(&js_value); } + sub.push(&id.to_string().into()); + result.push(&JsValue::from(&sub)); } } Ok(result) @@ -433,13 +439,51 @@ impl Automerge { Ok(()) } - #[wasm_bindgen(js_name = applyPatches)] - pub fn apply_patches(&mut self, mut object: JsValue) -> Result { - let patches = self.doc.observer().take_patches(); - for p in patches { - object = apply_patch(object, &p)?; + #[wasm_bindgen(js_name = registerDatatype)] + pub fn register_datatype( + &mut self, + datatype: JsValue, + function: JsValue, + ) -> Result<(), JsValue> { + let datatype = Datatype::try_from(datatype)?; + if let Ok(function) = function.dyn_into::() { + self.external_types.insert(datatype, function); + } else { + self.external_types.remove(&datatype); } - Ok(object) + Ok(()) + } + + #[wasm_bindgen(js_name = applyPatches)] + pub fn apply_patches( + &mut self, + object: JsValue, + meta: JsValue, + callback: JsValue, + ) -> Result { + let mut object = object.dyn_into::()?; + let patches = self.doc.observer().take_patches(); + let callback = callback.dyn_into::().ok(); + + // even if there are no patches we may need to update the meta object + // which requires that we update the object too + if patches.is_empty() && !meta.is_undefined() { + let (obj, datatype, id) = self.unwrap_object(&object)?; + object = Object::assign(&Object::new(), &obj); + object = self.wrap_object(object, datatype, &id, &meta)?; + } + + for p in patches { + if let Some(c) = &callback { + let before = object.clone(); + object = self.apply_patch(object, &p, 0, &meta)?; + c.call3(&JsValue::undefined(), &p.try_into()?, &before, &object)?; + } else { + object = self.apply_patch(object, &p, 0, &meta)?; + } + } + + Ok(object.into()) } #[wasm_bindgen(js_name = popPatches)] @@ -592,30 +636,24 @@ impl Automerge { } #[wasm_bindgen(js_name = toJS)] - pub fn to_js(&self) -> JsValue { - map_to_js(&self.doc, &ROOT) + pub fn to_js(&self, meta: JsValue) -> Result { + self.export_object(&ROOT, Datatype::Map, None, &meta) } - pub fn materialize(&self, obj: JsValue, heads: Option) -> Result { + pub fn materialize( + &mut self, + obj: JsValue, + heads: Option, + meta: JsValue, + ) -> Result { let obj = self.import(obj).unwrap_or(ROOT); let heads = get_heads(heads); - if let Some(heads) = heads { - match self.doc.object_type(&obj) { - Some(am::ObjType::Map) => Ok(map_to_js_at(&self.doc, &obj, heads.as_slice())), - Some(am::ObjType::List) => Ok(list_to_js_at(&self.doc, &obj, heads.as_slice())), - Some(am::ObjType::Text) => Ok(self.doc.text_at(&obj, heads.as_slice())?.into()), - Some(am::ObjType::Table) => Ok(map_to_js_at(&self.doc, &obj, heads.as_slice())), - None => Err(to_js_err(format!("invalid obj {}", obj))), - } - } else { - match self.doc.object_type(&obj) { - Some(am::ObjType::Map) => Ok(map_to_js(&self.doc, &obj)), - Some(am::ObjType::List) => Ok(list_to_js(&self.doc, &obj)), - Some(am::ObjType::Text) => Ok(self.doc.text(&obj)?.into()), - Some(am::ObjType::Table) => Ok(map_to_js(&self.doc, &obj)), - None => Err(to_js_err(format!("invalid obj {}", obj))), - } - } + let obj_type = self + .doc + .object_type(&obj) + .ok_or_else(|| to_js_err(format!("invalid obj {}", obj)))?; + let _patches = self.doc.observer().take_patches(); // throw away patches + self.export_object(&obj, obj_type.into(), heads.as_ref(), &meta) } fn import(&self, id: JsValue) -> Result { @@ -634,11 +672,11 @@ impl Automerge { self.doc.get(obj, am::Prop::Seq(prop.parse().unwrap()))? }; match val { - Some((am::Value::Object(am::ObjType::Map), id)) => { + Some((am::Value::Object(ObjType::Map), id)) => { is_map = true; obj = id; } - Some((am::Value::Object(am::ObjType::Table), id)) => { + Some((am::Value::Object(ObjType::Table), id)) => { is_map = true; obj = id; } @@ -748,7 +786,10 @@ pub fn load(data: Uint8Array, actor: Option) -> Result, + path: Vec<(ObjId, Prop)>, key: String, - value: Value<'static>, + value: (Value<'static>, ObjId), conflict: bool, }, PutSeq { obj: ObjId, - path: Vec, + path: Vec<(ObjId, Prop)>, index: usize, - value: Value<'static>, + value: (Value<'static>, ObjId), conflict: bool, }, Insert { obj: ObjId, - path: Vec, + path: Vec<(ObjId, Prop)>, index: usize, - values: Vec>, + values: Vec<(Value<'static>, ObjId)>, }, Increment { obj: ObjId, - path: Vec, + path: Vec<(ObjId, Prop)>, prop: Prop, value: i64, }, DeleteMap { obj: ObjId, - path: Vec, + path: Vec<(ObjId, Prop)>, key: String, }, DeleteSeq { obj: ObjId, - path: Vec, + path: Vec<(ObjId, Prop)>, index: usize, length: usize, }, @@ -73,6 +73,7 @@ impl OpObserver for Observer { tagged_value: (Value<'_>, ObjId), ) { if self.enabled { + let value = (tagged_value.0.to_owned(), tagged_value.1); if let Some(Patch::Insert { obj: tail_obj, index: tail_index, @@ -81,12 +82,11 @@ impl OpObserver for Observer { }) = self.patches.last_mut() { if tail_obj == &obj && *tail_index + values.len() == index { - values.push(tagged_value.0.to_owned()); + values.push(value); return; } } - let path = parents.path().into_iter().map(|p| p.1).collect(); - let value = tagged_value.0.to_owned(); + let path = parents.path(); let patch = Patch::Insert { path, obj, @@ -106,8 +106,8 @@ impl OpObserver for Observer { conflict: bool, ) { if self.enabled { - let path = parents.path().into_iter().map(|p| p.1).collect(); - let value = tagged_value.0.to_owned(); + let path = parents.path(); + let value = (tagged_value.0.to_owned(), tagged_value.1); let patch = match prop { Prop::Map(key) => Patch::PutMap { path, @@ -136,7 +136,7 @@ impl OpObserver for Observer { tagged_value: (i64, ObjId), ) { if self.enabled { - let path = parents.path().into_iter().map(|p| p.1).collect(); + let path = parents.path(); let value = tagged_value.0; self.patches.push(Patch::Increment { path, @@ -149,7 +149,7 @@ impl OpObserver for Observer { fn delete(&mut self, mut parents: Parents<'_>, obj: ObjId, prop: Prop) { if self.enabled { - let path = parents.path().into_iter().map(|p| p.1).collect(); + let path = parents.path(); let patch = match prop { Prop::Map(key) => Patch::DeleteMap { path, obj, key }, Prop::Seq(index) => Patch::DeleteSeq { @@ -182,17 +182,17 @@ fn prop_to_js(p: &Prop) -> JsValue { } } -fn export_path(path: &[Prop], end: &Prop) -> Array { +fn export_path(path: &[(ObjId, Prop)], end: &Prop) -> Array { let result = Array::new(); for p in path { - result.push(&prop_to_js(p)); + result.push(&prop_to_js(&p.1)); } result.push(&prop_to_js(end)); result } impl Patch { - pub(crate) fn path(&self) -> &[Prop] { + pub(crate) fn path(&self) -> &[(ObjId, Prop)] { match &self { Self::PutMap { path, .. } => path.as_slice(), Self::PutSeq { path, .. } => path.as_slice(), @@ -202,6 +202,17 @@ impl Patch { Self::DeleteSeq { path, .. } => path.as_slice(), } } + + pub(crate) fn obj(&self) -> &ObjId { + match &self { + Self::PutMap { obj, .. } => obj, + Self::PutSeq { obj, .. } => obj, + Self::Increment { obj, .. } => obj, + Self::Insert { obj, .. } => obj, + Self::DeleteMap { obj, .. } => obj, + Self::DeleteSeq { obj, .. } => obj, + } + } } impl TryFrom for JsValue { @@ -223,7 +234,7 @@ impl TryFrom for JsValue { "path", export_path(path.as_slice(), &Prop::Map(key)), )?; - js_set(&result, "value", export_value(&value))?; + js_set(&result, "value", alloc(&value.0).1)?; js_set(&result, "conflict", &JsValue::from_bool(conflict))?; Ok(result.into()) } @@ -240,7 +251,7 @@ impl TryFrom for JsValue { "path", export_path(path.as_slice(), &Prop::Seq(index)), )?; - js_set(&result, "value", export_value(&value))?; + js_set(&result, "value", alloc(&value.0).1)?; js_set(&result, "conflict", &JsValue::from_bool(conflict))?; Ok(result.into()) } @@ -259,7 +270,7 @@ impl TryFrom for JsValue { js_set( &result, "values", - values.iter().map(export_value).collect::(), + values.iter().map(|v| alloc(&v.0).1).collect::(), )?; Ok(result.into()) } diff --git a/automerge-wasm/src/value.rs b/automerge-wasm/src/value.rs index 98ea5f1b..be554d5c 100644 --- a/automerge-wasm/src/value.rs +++ b/automerge-wasm/src/value.rs @@ -1,40 +1,151 @@ -use std::borrow::Cow; - -use automerge as am; -use js_sys::Uint8Array; +use crate::to_js_err; +use automerge::{ObjType, ScalarValue, Value}; use wasm_bindgen::prelude::*; -#[derive(Debug)] -pub struct ScalarValue<'a>(pub(crate) Cow<'a, am::ScalarValue>); +#[derive(Debug, Clone, Hash, Eq, PartialEq)] +pub(crate) enum Datatype { + Map, + Table, + List, + Text, + Bytes, + Str, + Int, + Uint, + F64, + Counter, + Timestamp, + Boolean, + Null, + Unknown(u8), +} -impl<'a> From> for JsValue { - fn from(val: ScalarValue<'a>) -> Self { - match &*val.0 { - am::ScalarValue::Bytes(v) => Uint8Array::from(v.as_slice()).into(), - am::ScalarValue::Str(v) => v.to_string().into(), - am::ScalarValue::Int(v) => (*v as f64).into(), - am::ScalarValue::Uint(v) => (*v as f64).into(), - am::ScalarValue::F64(v) => (*v).into(), - am::ScalarValue::Counter(v) => (f64::from(v)).into(), - am::ScalarValue::Timestamp(v) => js_sys::Date::new(&(*v as f64).into()).into(), - am::ScalarValue::Boolean(v) => (*v).into(), - am::ScalarValue::Null => JsValue::null(), - am::ScalarValue::Unknown { bytes, .. } => Uint8Array::from(bytes.as_slice()).into(), +impl Datatype { + pub(crate) fn is_sequence(&self) -> bool { + matches!(self, Self::List | Self::Text) + } + + pub(crate) fn is_scalar(&self) -> bool { + !matches!(self, Self::Map | Self::Table | Self::List | Self::Text) + } +} + +impl From<&ObjType> for Datatype { + fn from(o: &ObjType) -> Self { + (*o).into() + } +} + +impl From for Datatype { + fn from(o: ObjType) -> Self { + match o { + ObjType::Map => Self::Map, + ObjType::List => Self::List, + ObjType::Table => Self::Table, + ObjType::Text => Self::Text, } } } -pub(crate) fn datatype(s: &am::ScalarValue) -> String { - match s { - am::ScalarValue::Bytes(_) => "bytes".into(), - am::ScalarValue::Str(_) => "str".into(), - am::ScalarValue::Int(_) => "int".into(), - am::ScalarValue::Uint(_) => "uint".into(), - am::ScalarValue::F64(_) => "f64".into(), - am::ScalarValue::Counter(_) => "counter".into(), - am::ScalarValue::Timestamp(_) => "timestamp".into(), - am::ScalarValue::Boolean(_) => "boolean".into(), - am::ScalarValue::Null => "null".into(), - am::ScalarValue::Unknown { type_code, .. } => format!("unknown{}", type_code), +impl std::fmt::Display for Datatype { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> { + write!(f, "{}", String::from(self.clone())) + } +} + +impl From<&ScalarValue> for Datatype { + fn from(s: &ScalarValue) -> Self { + match s { + ScalarValue::Bytes(_) => Self::Bytes, + ScalarValue::Str(_) => Self::Str, + ScalarValue::Int(_) => Self::Int, + ScalarValue::Uint(_) => Self::Uint, + ScalarValue::F64(_) => Self::F64, + ScalarValue::Counter(_) => Self::Counter, + ScalarValue::Timestamp(_) => Self::Timestamp, + ScalarValue::Boolean(_) => Self::Boolean, + ScalarValue::Null => Self::Null, + ScalarValue::Unknown { type_code, .. } => Self::Unknown(*type_code), + } + } +} + +impl From<&Value<'_>> for Datatype { + fn from(v: &Value<'_>) -> Self { + match v { + Value::Object(o) => o.into(), + Value::Scalar(s) => s.as_ref().into(), + /* + ScalarValue::Bytes(_) => Self::Bytes, + ScalarValue::Str(_) => Self::Str, + ScalarValue::Int(_) => Self::Int, + ScalarValue::Uint(_) => Self::Uint, + ScalarValue::F64(_) => Self::F64, + ScalarValue::Counter(_) => Self::Counter, + ScalarValue::Timestamp(_) => Self::Timestamp, + ScalarValue::Boolean(_) => Self::Boolean, + ScalarValue::Null => Self::Null, + ScalarValue::Unknown { type_code, .. } => Self::Unknown(*type_code), + */ + } + } +} + +impl From for String { + fn from(d: Datatype) -> Self { + match d { + Datatype::Map => "map".into(), + Datatype::Table => "table".into(), + Datatype::List => "list".into(), + Datatype::Text => "text".into(), + Datatype::Bytes => "bytes".into(), + Datatype::Str => "str".into(), + Datatype::Int => "int".into(), + Datatype::Uint => "uint".into(), + Datatype::F64 => "f64".into(), + Datatype::Counter => "counter".into(), + Datatype::Timestamp => "timestamp".into(), + Datatype::Boolean => "boolean".into(), + Datatype::Null => "null".into(), + Datatype::Unknown(type_code) => format!("unknown{}", type_code), + } + } +} + +impl TryFrom for Datatype { + type Error = JsValue; + + fn try_from(datatype: JsValue) -> Result { + let datatype = datatype + .as_string() + .ok_or_else(|| to_js_err("datatype is not a string"))?; + match datatype.as_str() { + "map" => Ok(Datatype::Map), + "table" => Ok(Datatype::Table), + "list" => Ok(Datatype::List), + "text" => Ok(Datatype::Text), + "bytes" => Ok(Datatype::Bytes), + "str" => Ok(Datatype::Str), + "int" => Ok(Datatype::Int), + "uint" => Ok(Datatype::Uint), + "f64" => Ok(Datatype::F64), + "counter" => Ok(Datatype::Counter), + "timestamp" => Ok(Datatype::Timestamp), + "boolean" => Ok(Datatype::Boolean), + "null" => Ok(Datatype::Null), + d => { + if d.starts_with("unknown") { + todo!() // handle "unknown{}", + } else { + Err(to_js_err(format!("unknown datatype {}", d))) + } + } + } + } +} + +impl From for JsValue { + fn from(d: Datatype) -> Self { + String::from(d).into() } } diff --git a/automerge-wasm/test/apply.ts b/automerge-wasm/test/apply.ts index 18b53758..38085c21 100644 --- a/automerge-wasm/test/apply.ts +++ b/automerge-wasm/test/apply.ts @@ -5,6 +5,23 @@ import assert from 'assert' //@ts-ignore import init, { create, load } from '..' +export const OBJECT_ID = Symbol.for('_am_objectId') // object containing metadata about current + +// sample classes for testing +class Counter { + value: number; + constructor(n: number) { + this.value = n + } +} + +class Wrapper { + value: any; + constructor(n: any) { + this.value = n + } +} + describe('Automerge', () => { describe('Patch Apply', () => { it('apply nested sets on maps', () => { @@ -66,9 +83,10 @@ describe('Automerge', () => { let doc1 = create() doc1.enablePatches(true) doc1.putObject("/", "list", start.list); - let mat = doc1.materialize("/") let base = doc1.applyPatches({}) + let mat = doc1.clone().materialize("/") assert.deepEqual(mat, start) + assert.deepEqual(base, start) doc1.delete("/list/0/1", 3) start.list[0][1].splice(3,1) @@ -76,7 +94,7 @@ describe('Automerge', () => { doc1.delete("/list/0", 0) start.list[0].splice(0,1) - mat = doc1.materialize("/") + mat = doc1.clone().materialize("/") base = doc1.applyPatches(base) assert.deepEqual(mat, start) assert.deepEqual(base, start) @@ -91,10 +109,86 @@ describe('Automerge', () => { { action: 'put', conflict: false, path: [ 'list' ], value: [] }, { action: 'splice', path: [ 'list', 0 ], values: [ 'a', 'b', 'c' ] }]) }) + + it('it should allow registering type wrappers', () => { + let doc1 = create() + doc1.enablePatches(true) + //@ts-ignore + doc1.registerDatatype("counter", (n: any) => new Counter(n)) + let doc2 = doc1.fork() + doc1.put("/", "n", 10, "counter") + doc1.put("/", "m", 10, "int") + + let mat = doc1.materialize("/") + assert.deepEqual( mat, { n: new Counter(10), m: 10 } ) + + doc2.merge(doc1) + let apply = doc2.applyPatches({}) + assert.deepEqual( apply, { n: new Counter(10), m: 10 } ) + + doc1.increment("/","n", 5) + mat = doc1.materialize("/") + assert.deepEqual( mat, { n: new Counter(15), m: 10 } ) + + doc2.merge(doc1) + apply = doc2.applyPatches(apply) + assert.deepEqual( apply, { n: new Counter(15), m: 10 } ) + }) + + it('text can be managed as an array or a string', () => { + let doc1 = create("aaaa") + doc1.enablePatches(true) + + doc1.putObject("/", "notes", "hello world") + + let mat = doc1.materialize("/") + + assert.deepEqual( mat, { notes: "hello world".split("") } ) + + let doc2 = create() + doc2.enablePatches(true) + //@ts-ignore + doc2.registerDatatype("text", (n: any[]) => new String(n.join(""))) + let apply = doc2.applyPatches({} as any) + + doc2.merge(doc1); + apply = doc2.applyPatches(apply) + assert.deepEqual(apply[OBJECT_ID], "_root") + assert.deepEqual(apply.notes[OBJECT_ID], "1@aaaa") + assert.deepEqual( apply, { notes: new String("hello world") } ) + + doc2.splice("/notes", 6, 5, "everyone"); + apply = doc2.applyPatches(apply) + assert.deepEqual( apply, { notes: new String("hello everyone") } ) + + mat = doc2.materialize("/") + //@ts-ignore + assert.deepEqual(mat[OBJECT_ID], "_root") + //@ts-ignore + assert.deepEqual(mat.notes[OBJECT_ID], "1@aaaa") + assert.deepEqual( mat, { notes: new String("hello everyone") } ) + }) + + it.skip('it can patch quickly', () => { + console.time("init") + let doc1 = create() + doc1.enablePatches(true) + doc1.putObject("/", "notes", ""); + let mat = doc1.materialize("/") + let doc2 = doc1.fork() + let testData = new Array( 100000 ).join("x") + console.timeEnd("init") + console.time("splice") + doc2.splice("/notes", 0, 0, testData); + console.timeEnd("splice") + console.time("merge") + doc1.merge(doc2) + console.timeEnd("merge") + console.time("patch") + mat = doc1.applyPatches(mat) + console.timeEnd("patch") + }) }) }) -// FIXME: handle conflicts correctly on apply -// TODO: squash puts -// TODO: merge deletes -// TODO: elide `conflict: false` +// TODO: squash puts & deletes diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts index a201d867..d6b49c59 100644 --- a/automerge-wasm/test/test.ts +++ b/automerge-wasm/test/test.ts @@ -397,6 +397,8 @@ describe('Automerge', () => { it('recursive sets are possible', () => { const doc = create("aaaa") + //@ts-ignore + doc.registerDatatype("text", (n: any[]) => new String(n.join(""))) const l1 = doc.putObject("_root", "list", [{ foo: "bar" }, [1, 2, 3]]) const l2 = doc.insertObject(l1, 0, { zip: ["a", "b"] }) const l3 = doc.putObject("_root", "info1", "hello world") // 'text' object @@ -404,13 +406,13 @@ describe('Automerge', () => { const l4 = doc.putObject("_root", "info3", "hello world") assert.deepEqual(doc.materialize(), { "list": [{ zip: ["a", "b"] }, { foo: "bar" }, [1, 2, 3]], - "info1": "hello world", + "info1": new String("hello world"), "info2": "hello world", - "info3": "hello world", + "info3": new String("hello world"), }) assert.deepEqual(doc.materialize(l2), { zip: ["a", "b"] }) assert.deepEqual(doc.materialize(l1), [{ zip: ["a", "b"] }, { foo: "bar" }, [1, 2, 3]]) - assert.deepEqual(doc.materialize(l4), "hello world") + assert.deepEqual(doc.materialize(l4), new String("hello world")) doc.free() }) diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index 4520c67d..65e51ad3 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -215,7 +215,8 @@ impl AutoCommitWithObs { message: sync::Message, ) -> Result<(), AutomergeError> { self.ensure_transaction_closed(); - self.doc.receive_sync_message(sync_state, message) + self.doc + .receive_sync_message_with(sync_state, message, Some(&mut self.op_observer)) } /// Return a graphviz representation of the opset. diff --git a/automerge/src/op_set.rs b/automerge/src/op_set.rs index 8f08b211..eaccd038 100644 --- a/automerge/src/op_set.rs +++ b/automerge/src/op_set.rs @@ -278,13 +278,18 @@ impl OpSetInternal { let value = (winner.value(), self.id_to_exid(winner.id)); let conflict = values.len() > 1; observer.put(parents, ex_obj, key, value, conflict); - } else { + } else if had_value_before { observer.delete(parents, ex_obj, key); } } else if let Some(value) = op.get_increment_value() { // only observe this increment if the counter is visible, i.e. the counter's // create op is in the values - if values.iter().any(|value| op.pred.contains(&value.id)) { + //if values.iter().any(|value| op.pred.contains(&value.id)) { + if values + .last() + .map(|value| op.pred.contains(&value.id)) + .unwrap_or_default() + { // we have observed the value observer.increment(parents, ex_obj, key, (value, self.id_to_exid(op.id))); } diff --git a/automerge/src/query/seek_op_with_patch.rs b/automerge/src/query/seek_op_with_patch.rs index e8ebded8..06876038 100644 --- a/automerge/src/query/seek_op_with_patch.rs +++ b/automerge/src/query/seek_op_with_patch.rs @@ -8,8 +8,6 @@ use std::fmt::Debug; pub(crate) struct SeekOpWithPatch<'a> { op: Op, pub(crate) pos: usize, - /// A position counter for after we find the insert position to record conflicts. - later_pos: usize, pub(crate) succ: Vec, found: bool, pub(crate) seen: usize, @@ -26,7 +24,6 @@ impl<'a> SeekOpWithPatch<'a> { op: op.clone(), succ: vec![], pos: 0, - later_pos: 0, found: false, seen: 0, last_seen: None, @@ -176,6 +173,10 @@ impl<'a> TreeQuery<'a> for SeekOpWithPatch<'a> { self.values.push(e); } self.succ.push(self.pos); + + if e.visible() { + self.had_value_before = true; + } } else if e.visible() { self.values.push(e); } @@ -184,7 +185,6 @@ impl<'a> TreeQuery<'a> for SeekOpWithPatch<'a> { // we reach an op with an opId greater than that of the new operation if m.lamport_cmp(e.id, self.op.id) == Ordering::Greater { self.found = true; - self.later_pos = self.pos + 1; return QueryResult::Next; } @@ -202,7 +202,6 @@ impl<'a> TreeQuery<'a> for SeekOpWithPatch<'a> { if e.visible() { self.values.push(e); } - self.later_pos += 1; } QueryResult::Next } From 23a07699e213ed13a2c3b14f006bdf47e661cc8d Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Tue, 4 Oct 2022 14:09:38 -0500 Subject: [PATCH 588/730] typescript fixes --- automerge-wasm/index.d.ts | 31 +++++++++++++--- automerge-wasm/package.json | 2 +- automerge-wasm/test/apply.ts | 70 +++++++++++++++++------------------ automerge-wasm/test/readme.ts | 4 +- automerge-wasm/test/test.ts | 57 ++++++++++++++-------------- 5 files changed, 92 insertions(+), 72 deletions(-) diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts index c28cceff..8dbff739 100644 --- a/automerge-wasm/index.d.ts +++ b/automerge-wasm/index.d.ts @@ -91,15 +91,33 @@ export type Op = { pred: string[], } -export type Patch = { - obj: ObjID - action: 'assign' | 'insert' | 'delete' - key: Prop +export type Patch = PutPatch | DelPatch | SplicePatch | IncPatch; + +export type PutPatch = { + action: 'put' + path: Prop[], value: Value - datatype: Datatype conflict: boolean } +export type IncPatch = { + action: 'put' + path: Prop[], + value: number +} + +export type DelPatch = { + action: 'del' + path: Prop[], + length?: number, +} + +export type SplicePatch = { + action: 'splice' + path: Prop[], + values: Value[], +} + export function create(actor?: Actor): Automerge; export function load(data: Uint8Array, actor?: Actor): Automerge; export function encodeChange(change: DecodedChange): Change; @@ -157,6 +175,7 @@ export class Automerge { // patches enablePatches(enable: boolean): void; + registerDatatype(datatype: string, callback: Function): void; popPatches(): Patch[]; // save and load to local store @@ -187,7 +206,7 @@ export class Automerge { dump(): void; // experimental api can go here - applyPatches(obj: Doc, meta?: any, callback?: Function): Doc; + applyPatches(obj: Doc, meta?: unknown, callback?: (values: Value[]) => undefined): Doc; } export interface JsSyncState { diff --git a/automerge-wasm/package.json b/automerge-wasm/package.json index c5a82fb1..4a9decff 100644 --- a/automerge-wasm/package.json +++ b/automerge-wasm/package.json @@ -26,7 +26,7 @@ "module": "./bundler/bindgen.js", "main": "./nodejs/bindgen.js", "scripts": { - "lint": "eslint test/*.ts", + "lint": "eslint test/*.ts index.d.ts", "debug": "cross-env PROFILE=dev yarn buildall", "build": "cross-env PROFILE=dev FEATURES='' yarn buildall", "release": "cross-env PROFILE=release yarn buildall", diff --git a/automerge-wasm/test/apply.ts b/automerge-wasm/test/apply.ts index 38085c21..50531458 100644 --- a/automerge-wasm/test/apply.ts +++ b/automerge-wasm/test/apply.ts @@ -1,12 +1,17 @@ import { describe, it } from 'mocha'; -//@ts-ignore import assert from 'assert' -//@ts-ignore -import init, { create, load } from '..' +import { create, Value } from '..' export const OBJECT_ID = Symbol.for('_am_objectId') // object containing metadata about current +// @ts-ignore +function _obj(doc: any) : any { + if (typeof doc === 'object' && doc !== null) { + return doc[OBJECT_ID] + } +} + // sample classes for testing class Counter { value: number; @@ -15,21 +20,14 @@ class Counter { } } -class Wrapper { - value: any; - constructor(n: any) { - this.value = n - } -} - describe('Automerge', () => { describe('Patch Apply', () => { it('apply nested sets on maps', () => { - let start : any = { hello: { mellow: { yellow: "world", x: 1 }, y : 2 } } - let doc1 = create() + const start = { hello: { mellow: { yellow: "world", x: 1 }, y : 2 } } + const doc1 = create() doc1.putObject("/", "hello", start.hello); let mat = doc1.materialize("/") - let doc2 = create() + const doc2 = create() doc2.enablePatches(true) doc2.merge(doc1) @@ -38,6 +36,7 @@ describe('Automerge', () => { assert.deepEqual(base, start) doc2.delete("/hello/mellow", "yellow"); + // @ts-ignore delete start.hello.mellow.yellow; base = doc2.applyPatches(base) mat = doc2.materialize("/") @@ -47,12 +46,11 @@ describe('Automerge', () => { }) it('apply patches on lists', () => { - //let start = { list: [1,2,3,4,5,6] } - let start = { list: [1,2,3,4] } - let doc1 = create() + const start = { list: [1,2,3,4] } + const doc1 = create() doc1.putObject("/", "list", start.list); let mat = doc1.materialize("/") - let doc2 = create() + const doc2 = create() doc2.enablePatches(true) doc2.merge(doc1) mat = doc1.materialize("/") @@ -68,7 +66,7 @@ describe('Automerge', () => { }) it('apply patches on lists of lists of lists', () => { - let start = { list: + const start = { list: [ [ [ 1, 2, 3, 4, 5, 6], @@ -80,7 +78,7 @@ describe('Automerge', () => { ] ] } - let doc1 = create() + const doc1 = create() doc1.enablePatches(true) doc1.putObject("/", "list", start.list); let base = doc1.applyPatches({}) @@ -101,21 +99,20 @@ describe('Automerge', () => { }) it('large inserts should make one splice patch', () => { - let doc1 = create() + const doc1 = create() doc1.enablePatches(true) doc1.putObject("/", "list", "abc"); - let patches = doc1.popPatches() + const patches = doc1.popPatches() assert.deepEqual( patches, [ { action: 'put', conflict: false, path: [ 'list' ], value: [] }, { action: 'splice', path: [ 'list', 0 ], values: [ 'a', 'b', 'c' ] }]) }) it('it should allow registering type wrappers', () => { - let doc1 = create() + const doc1 = create() doc1.enablePatches(true) - //@ts-ignore - doc1.registerDatatype("counter", (n: any) => new Counter(n)) - let doc2 = doc1.fork() + doc1.registerDatatype("counter", (n: number) => new Counter(n)) + const doc2 = doc1.fork() doc1.put("/", "n", 10, "counter") doc1.put("/", "m", 10, "int") @@ -136,7 +133,7 @@ describe('Automerge', () => { }) it('text can be managed as an array or a string', () => { - let doc1 = create("aaaa") + const doc1 = create("aaaa") doc1.enablePatches(true) doc1.putObject("/", "notes", "hello world") @@ -145,16 +142,16 @@ describe('Automerge', () => { assert.deepEqual( mat, { notes: "hello world".split("") } ) - let doc2 = create() + const doc2 = create() + let apply : any = doc2.materialize("/") doc2.enablePatches(true) - //@ts-ignore - doc2.registerDatatype("text", (n: any[]) => new String(n.join(""))) - let apply = doc2.applyPatches({} as any) + doc2.registerDatatype("text", (n: Value[]) => new String(n.join(""))) + apply = doc2.applyPatches(apply) doc2.merge(doc1); apply = doc2.applyPatches(apply) - assert.deepEqual(apply[OBJECT_ID], "_root") - assert.deepEqual(apply.notes[OBJECT_ID], "1@aaaa") + assert.deepEqual(_obj(apply), "_root") + assert.deepEqual(_obj(apply['notes']), "1@aaaa") assert.deepEqual( apply, { notes: new String("hello world") } ) doc2.splice("/notes", 6, 5, "everyone"); @@ -162,14 +159,14 @@ describe('Automerge', () => { assert.deepEqual( apply, { notes: new String("hello everyone") } ) mat = doc2.materialize("/") - //@ts-ignore - assert.deepEqual(mat[OBJECT_ID], "_root") - //@ts-ignore - assert.deepEqual(mat.notes[OBJECT_ID], "1@aaaa") + assert.deepEqual(_obj(mat), "_root") + // @ts-ignore + assert.deepEqual(_obj(mat.notes), "1@aaaa") assert.deepEqual( mat, { notes: new String("hello everyone") } ) }) it.skip('it can patch quickly', () => { +/* console.time("init") let doc1 = create() doc1.enablePatches(true) @@ -187,6 +184,7 @@ describe('Automerge', () => { console.time("patch") mat = doc1.applyPatches(mat) console.timeEnd("patch") +*/ }) }) }) diff --git a/automerge-wasm/test/readme.ts b/automerge-wasm/test/readme.ts index de22d495..e6e77731 100644 --- a/automerge-wasm/test/readme.ts +++ b/automerge-wasm/test/readme.ts @@ -1,6 +1,6 @@ +/* eslint-disable @typescript-eslint/no-unused-vars */ import { describe, it } from 'mocha'; import * as assert from 'assert' -//@ts-ignore import { create, load } from '..' describe('Automerge', () => { @@ -273,6 +273,6 @@ describe('Automerge', () => { doc1.free(); doc2.free(); doc3.free(); doc4.free() }) - it.skip('Syncing (1)', () => { }) + //it.skip('Syncing (1)', () => { }) }) }) diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts index d6b49c59..43feaf2d 100644 --- a/automerge-wasm/test/test.ts +++ b/automerge-wasm/test/test.ts @@ -1,10 +1,9 @@ import { describe, it } from 'mocha'; -//@ts-ignore import assert from 'assert' -//@ts-ignore +// @ts-ignore import { BloomFilter } from './helpers/sync' -import { create, load, SyncState, Automerge, encodeChange, decodeChange, initSyncState, decodeSyncMessage, decodeSyncState, encodeSyncState, encodeSyncMessage } from '..' -import { DecodedSyncMessage, Hash } from '..'; +import { create, load, SyncState, Automerge, decodeChange, initSyncState, decodeSyncMessage, decodeSyncState, encodeSyncState, encodeSyncMessage } from '..' +import { Value, DecodedSyncMessage, Hash } from '..'; function sync(a: Automerge, b: Automerge, aSyncState = initSyncState(), bSyncState = initSyncState()) { const MAX_ITER = 10 @@ -311,7 +310,7 @@ describe('Automerge', () => { doc1.put("_root", "hello", "world") const doc2 = load(doc1.save(), "bbbb"); const doc3 = load(doc1.save(), "cccc"); - let heads = doc1.getHeads() + const heads = doc1.getHeads() doc1.put("_root", "cnt", 20) doc2.put("_root", "cnt", 0, "counter") doc3.put("_root", "cnt", 10, "counter") @@ -345,7 +344,7 @@ describe('Automerge', () => { doc1.insert(seq, 0, "hello") const doc2 = load(doc1.save(), "bbbb"); const doc3 = load(doc1.save(), "cccc"); - let heads = doc1.getHeads() + const heads = doc1.getHeads() doc1.put(seq, 0, 20) doc2.put(seq, 0, 0, "counter") doc3.put(seq, 0, 10, "counter") @@ -397,11 +396,10 @@ describe('Automerge', () => { it('recursive sets are possible', () => { const doc = create("aaaa") - //@ts-ignore - doc.registerDatatype("text", (n: any[]) => new String(n.join(""))) + doc.registerDatatype("text", (n: Value[]) => new String(n.join(""))) const l1 = doc.putObject("_root", "list", [{ foo: "bar" }, [1, 2, 3]]) const l2 = doc.insertObject(l1, 0, { zip: ["a", "b"] }) - const l3 = doc.putObject("_root", "info1", "hello world") // 'text' object + doc.putObject("_root", "info1", "hello world") // 'text' object doc.put("_root", "info2", "hello world") // 'str' const l4 = doc.putObject("_root", "info3", "hello world") assert.deepEqual(doc.materialize(), { @@ -444,7 +442,7 @@ describe('Automerge', () => { const a = doc1.putObject("_root", "a", {}); const b = doc1.putObject("_root", "b", {}); const c = doc1.putObject("_root", "c", {}); - const d = doc1.put(c, "d", "dd"); + doc1.put(c, "d", "dd"); const saved = doc1.save(); const doc2 = load(saved); assert.deepEqual(doc2.getWithType("_root", "a"), ["map", a]) @@ -877,8 +875,8 @@ describe('Automerge', () => { doc1.put('_root', 'key1', 1) doc1.put('_root', 'key1', 2) doc1.put('_root', 'key2', 3) - const map = doc1.putObject('_root', 'map', {}) - const list = doc1.putObject('_root', 'list', []) + doc1.putObject('_root', 'map', {}) + doc1.putObject('_root', 'list', []) assert.deepEqual(doc1.popPatches(), [ { action: 'put', path: ['key1'], value: 1, conflict: false }, @@ -897,8 +895,8 @@ describe('Automerge', () => { doc1.insert(list, 0, 1) doc1.insert(list, 0, 2) doc1.insert(list, 2, 3) - const map = doc1.insertObject(list, 2, {}) - const list2 = doc1.insertObject(list, 2, []) + doc1.insertObject(list, 2, {}) + doc1.insertObject(list, 2, []) assert.deepEqual(doc1.popPatches(), [ { action: 'put', path: ['list'], value: [], conflict: false }, @@ -916,8 +914,8 @@ describe('Automerge', () => { doc1.enablePatches(true) const list = doc1.putObject('_root', 'list', []) doc1.push(list, 1) - const map = doc1.pushObject(list, {}) - const list2 = doc1.pushObject(list, []) + doc1.pushObject(list, {}) + doc1.pushObject(list, []) assert.deepEqual(doc1.popPatches(), [ { action: 'put', path: ['list'], value: [], conflict: false }, @@ -1121,7 +1119,7 @@ describe('Automerge', () => { const n1 = create('abc123'), n2 = create('def456') const s1 = initSyncState(), s2 = initSyncState() - let message, patch + let message for (let i = 0; i < 5; i++) { n1.put("_root", "x", i) n1.commit("", 0) @@ -1305,7 +1303,7 @@ describe('Automerge', () => { // create two peers both with divergent commits const n1 = create('01234567'), n2 = create('89abcdef') - const s1 = initSyncState(), s2 = initSyncState() + //const s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 10; i++) { n1.put("_root", "x", i) @@ -1430,6 +1428,7 @@ describe('Automerge', () => { sync(n1, r, s1, rSyncState) assert.deepStrictEqual(n1.getHeads(), r.getHeads()) assert.deepStrictEqual(n1.materialize(), r.materialize()) + r = null }) it('should re-sync after one node experiences data loss without disconnecting', () => { @@ -1481,7 +1480,7 @@ describe('Automerge', () => { // simulate transmission over a network (see https://github.com/automerge/automerge/pull/362) let change = n3.getLastLocalChange() if (change === null) throw new RangeError("no local change") - //@ts-ignore + //ts-ignore if (typeof Buffer === 'function') change = Buffer.from(change) if (change === undefined) { throw new RangeError("last local change failed") } n2.applyChanges([change]) @@ -1495,10 +1494,10 @@ describe('Automerge', () => { it('should handle histories with lots of branching and merging', () => { const n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('fedcba98') n1.put("_root", "x", 0); n1.commit("", 0) - let change1 = n1.getLastLocalChange() + const change1 = n1.getLastLocalChange() if (change1 === null) throw new RangeError("no local change") n2.applyChanges([change1]) - let change2 = n1.getLastLocalChange() + const change2 = n1.getLastLocalChange() if (change2 === null) throw new RangeError("no local change") n3.applyChanges([change2]) n3.put("_root", "x", 1); n3.commit("", 0) @@ -1715,7 +1714,8 @@ describe('Automerge', () => { // `-- n2c1 <-- n2c2 <-- n2c3 // where n2c1 and n2c2 are both false positives in the Bloom filter containing {c5}. // lastSync is c4. - let n1 = create('01234567'), n2 = create('89abcdef') + const n1 = create('01234567') + let n2 = create('89abcdef') let s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 5; i++) { @@ -1816,9 +1816,11 @@ describe('Automerge', () => { // n2 has {c0, c1, c2, n1c1, n1c2, n2c1, n2c2, n2c3}; // n3 has {c0, c1, c2, n3c1, n3c2, n3c3}. const n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('76543210') - let s13 = initSyncState(), s12 = initSyncState(), s21 = initSyncState() + let s13 = initSyncState() + const s12 = initSyncState() + const s21 = initSyncState() let s32 = initSyncState(), s31 = initSyncState(), s23 = initSyncState() - let message1, message2, message3 + let message1, message3 for (let i = 0; i < 3; i++) { n1.put("_root", "x", i); n1.commit("", 0) @@ -1871,7 +1873,7 @@ describe('Automerge', () => { n2.receiveSyncMessage(s23, encodeSyncMessage(modifiedMessage)) // n2 replies to n3, sending only n2c3 (the one change that n2 has but n1 doesn't) - message2 = n2.generateSyncMessage(s23) + const message2 = n2.generateSyncMessage(s23) if (message2 === null) { throw new RangeError("message should not be null") } assert.strictEqual(decodeSyncMessage(message2).changes.length, 1) // {n2c3} n3.receiveSyncMessage(s32, message2) @@ -1938,7 +1940,7 @@ describe('Automerge', () => { // `-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 const n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('76543210') let s1 = initSyncState(), s2 = initSyncState() - let msg, decodedMsg + let msg n1.put("_root", "x", 0); n1.commit("", 0) n3.applyChanges(n3.getChangesAdded(n1)) // merge() @@ -1977,13 +1979,14 @@ describe('Automerge', () => { n2.receiveSyncMessage(s2, msg) msg = n2.generateSyncMessage(s2) if (msg === null) { throw new RangeError("message should not be null") } - decodedMsg = decodeSyncMessage(msg) + const decodedMsg = decodeSyncMessage(msg) decodedMsg.changes = [change5, change6] msg = encodeSyncMessage(decodedMsg) const sentHashes: any = {} sentHashes[decodeChange(change5).hash] = true sentHashes[decodeChange(change6).hash] = true + s2.sentHashes = sentHashes n1.receiveSyncMessage(s1, msg) assert.deepStrictEqual(s1.sharedHeads, [c2, c6].sort()) From ba328992ff43d5f9f349b59e61c7edca37a176ec Mon Sep 17 00:00:00 2001 From: Alex Good Date: Thu, 6 Oct 2022 22:53:21 +0100 Subject: [PATCH 589/730] bump @automerge/automerge-wasm and @automerge/automerge versions --- .../examples/create-react-app/package.json | 2 +- .../examples/create-react-app/yarn.lock | 70 +++++++++---------- automerge-js/examples/vite/package.json | 2 +- automerge-js/examples/webpack/package.json | 2 +- automerge-js/package.json | 4 +- automerge-wasm/package.json | 2 +- 6 files changed, 41 insertions(+), 41 deletions(-) diff --git a/automerge-js/examples/create-react-app/package.json b/automerge-js/examples/create-react-app/package.json index 2080d061..a2b7f37b 100644 --- a/automerge-js/examples/create-react-app/package.json +++ b/automerge-js/examples/create-react-app/package.json @@ -8,7 +8,7 @@ "@testing-library/jest-dom": "^5.16.5", "@testing-library/react": "^13.4.0", "@testing-library/user-event": "^13.5.0", - "@automerge/automerge": "2.0.0-alpha.3", + "@automerge/automerge": "2.0.0-alpha.4", "react": "^18.2.0", "react-dom": "^18.2.0", "react-scripts": "5.0.1", diff --git a/automerge-js/examples/create-react-app/yarn.lock b/automerge-js/examples/create-react-app/yarn.lock index fe6a1189..90a1592b 100644 --- a/automerge-js/examples/create-react-app/yarn.lock +++ b/automerge-js/examples/create-react-app/yarn.lock @@ -24,17 +24,17 @@ jsonpointer "^5.0.0" leven "^3.1.0" -"@automerge/automerge-wasm@0.1.7": - version "0.1.7" - resolved "http://localhost:4873/@automerge%2fautomerge-wasm/-/automerge-wasm-0.1.7.tgz#2b1bd55a05def29beec76828664ae1def1276e11" - integrity sha512-MIUUxqx9QM14DR8OzzS4sCC3cNIgzH2LMvTesFTO8NoH8RV/hm4jrQHQbGfx2SV3Q6tZjy8bCLOLgJK/yIxbKQ== +"@automerge/automerge-wasm@0.1.9": + version "0.1.9" + resolved "http://localhost:4873/@automerge%2fautomerge-wasm/-/automerge-wasm-0.1.9.tgz#b2def5e8b643f1802bc696843b7755dc444dc2eb" + integrity sha512-S+sjJUJ3aPn2F37vKYAzKxz8CDgbHpOOGVjKSgkLjkAqe1pQ+wp4BpiELXafX73w8DVIrGx1zzru4w3t+Eo8gw== -"@automerge/automerge@2.0.0-alpha.1": - version "2.0.0-alpha.1" - resolved "http://localhost:4873/@automerge%2fautomerge/-/automerge-2.0.0-alpha.1.tgz#df52164448ab13e458bd5a8e32e47f6ddbdd56fc" - integrity sha512-9q5CHqKEmTKs5T7/UdVaugk+rz3mAuxphpfgKXPGgEvvOIZsHz4spkxSNahWscY9pF8EhLgcA/pCfdtd3b2goA== +"@automerge/automerge@2.0.0-alpha.4": + version "2.0.0-alpha.4" + resolved "http://localhost:4873/@automerge%2fautomerge/-/automerge-2.0.0-alpha.4.tgz#df406f5364960a4d21040044da55ebd47406ea3a" + integrity sha512-PVRD1dmLy0U4GttyMvlWr99wyr6xvskJbOkxJDHnp+W2VAFfcqa4QKouaFbJ4W3iIsYX8DfQJ+uhRxa6UnvkHg== dependencies: - "@automerge/automerge-wasm" "0.1.7" + "@automerge/automerge-wasm" "0.1.9" uuid "^8.3" "@babel/code-frame@^7.0.0", "@babel/code-frame@^7.10.4", "@babel/code-frame@^7.12.13", "@babel/code-frame@^7.16.0", "@babel/code-frame@^7.18.6", "@babel/code-frame@^7.8.3": @@ -1992,9 +1992,9 @@ "@types/istanbul-lib-report" "*" "@types/jest@*": - version "29.1.1" - resolved "http://localhost:4873/@types%2fjest/-/jest-29.1.1.tgz#cf21a0835a1ba9a30ea1966019f1261c6a114c92" - integrity sha512-U9Ey07dGWl6fUFaIaUQUKWG5NoKi/zizeVQCGV8s4nSU0jPgqphVZvS64+8BtWYvrc3ZGw6wo943NSYPxkrp/g== + version "29.1.2" + resolved "http://localhost:4873/@types%2fjest/-/jest-29.1.2.tgz#7ad8077043ab5f6c108c8111bcc1d224e5600a87" + integrity sha512-y+nlX0h87U0R+wsGn6EBuoRWYyv3KFtwRNP3QWp9+k2tJ2/bqcGS3UxD7jgT+tiwJWWq3UsyV4Y+T6rsMT4XMg== dependencies: expect "^29.0.0" pretty-format "^29.0.0" @@ -2015,9 +2015,9 @@ integrity sha512-Y4XFY5VJAuw0FgAqPNd6NNoV44jbq9Bz2L7Rh/J6jLTiHBSBJa9fxqQIvkIld4GsoDOcCbvzOUAbLPsSKKg+uA== "@types/node@*": - version "18.8.2" - resolved "http://localhost:4873/@types%2fnode/-/node-18.8.2.tgz#17d42c6322d917764dd3d2d3a10d7884925de067" - integrity sha512-cRMwIgdDN43GO4xMWAfJAecYn8wV4JbsOGHNfNUIDiuYkUYAR5ec4Rj7IO2SAhFPEfpPtLtUTbbny/TCT7aDwA== + version "18.8.3" + resolved "http://localhost:4873/@types%2fnode/-/node-18.8.3.tgz#ce750ab4017effa51aed6a7230651778d54e327c" + integrity sha512-0os9vz6BpGwxGe9LOhgP/ncvYN5Tx1fNcd2TM3rD/aCGBkysb+ZWpXEocG24h6ZzOi13+VB8HndAQFezsSOw1w== "@types/parse-json@^4.0.0": version "4.0.0" @@ -2984,9 +2984,9 @@ caniuse-api@^3.0.0: lodash.uniq "^4.5.0" caniuse-lite@^1.0.0, caniuse-lite@^1.0.30001400, caniuse-lite@^1.0.30001407: - version "1.0.30001415" - resolved "http://localhost:4873/caniuse-lite/-/caniuse-lite-1.0.30001415.tgz#fd7ea96e9e94c181a7f56e7571efb43d92b860cc" - integrity sha512-ER+PfgCJUe8BqunLGWd/1EY4g8AzQcsDAVzdtMGKVtQEmKAwaFfU6vb7EAVIqTMYsqxBorYZi2+22Iouj/y7GQ== + version "1.0.30001416" + resolved "http://localhost:4873/caniuse-lite/-/caniuse-lite-1.0.30001416.tgz#29692af8a6a11412f2d3cf9a59d588fcdd21ce4c" + integrity sha512-06wzzdAkCPZO+Qm4e/eNghZBDfVNDsCgw33T27OwBH9unE9S478OYw//Q2L7Npf/zBzs7rjZOszIFQkwQKAEqA== case-sensitive-paths-webpack-plugin@^2.4.0: version "2.4.0" @@ -3791,9 +3791,9 @@ ejs@^3.1.6: jake "^10.8.5" electron-to-chromium@^1.4.251: - version "1.4.271" - resolved "http://localhost:4873/electron-to-chromium/-/electron-to-chromium-1.4.271.tgz#2d9f04f6a53c70e1bb1acfaae9c39f07ca40d290" - integrity sha512-BCPBtK07xR1/uY2HFDtl3wK2De66AW4MSiPlLrnPNxKC/Qhccxd59W73654S3y6Rb/k3hmuGJOBnhjfoutetXA== + version "1.4.274" + resolved "http://localhost:4873/electron-to-chromium/-/electron-to-chromium-1.4.274.tgz#74369ac6f020c3cea7c77ec040ddf159fe226233" + integrity sha512-Fgn7JZQzq85I81FpKUNxVLAzoghy8JZJ4NIue+YfUYBbu1AkpgzFvNwzF/ZNZH9ElkmJD0TSWu1F2gTpw/zZlg== emittery@^0.10.2: version "0.10.2" @@ -3853,9 +3853,9 @@ error-stack-parser@^2.0.6: stackframe "^1.3.4" es-abstract@^1.17.2, es-abstract@^1.19.0, es-abstract@^1.19.1, es-abstract@^1.19.2, es-abstract@^1.19.5, es-abstract@^1.20.1: - version "1.20.3" - resolved "http://localhost:4873/es-abstract/-/es-abstract-1.20.3.tgz#90b143ff7aedc8b3d189bcfac7f1e3e3f81e9da1" - integrity sha512-AyrnaKVpMzljIdwjzrj+LxGmj8ik2LckwXacHqrJJ/jxz6dDDBcZ7I7nlHM0FvEW8MfbWJwOd+yT2XzYW49Frw== + version "1.20.4" + resolved "http://localhost:4873/es-abstract/-/es-abstract-1.20.4.tgz#1d103f9f8d78d4cf0713edcd6d0ed1a46eed5861" + integrity sha512-0UtvRN79eMe2L+UNEF1BwRe364sj/DXhQ/k5FmivgoSdpM90b8Jc0mDzKMGo7QS0BVbOP/bTwBKNnDc9rNzaPA== dependencies: call-bind "^1.0.2" es-to-primitive "^1.2.1" @@ -3867,7 +3867,7 @@ es-abstract@^1.17.2, es-abstract@^1.19.0, es-abstract@^1.19.1, es-abstract@^1.19 has-property-descriptors "^1.0.0" has-symbols "^1.0.3" internal-slot "^1.0.3" - is-callable "^1.2.6" + is-callable "^1.2.7" is-negative-zero "^2.0.2" is-regex "^1.1.4" is-shared-array-buffer "^1.0.2" @@ -4997,7 +4997,7 @@ is-boolean-object@^1.1.0: call-bind "^1.0.2" has-tostringtag "^1.0.0" -is-callable@^1.1.4, is-callable@^1.2.6: +is-callable@^1.1.4, is-callable@^1.2.7: version "1.2.7" resolved "http://localhost:4873/is-callable/-/is-callable-1.2.7.tgz#3bc2a85ea742d9e36205dcacdd72ca1fdc51b055" integrity sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA== @@ -5171,9 +5171,9 @@ istanbul-lib-coverage@^3.0.0, istanbul-lib-coverage@^3.2.0: integrity sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw== istanbul-lib-instrument@^5.0.4, istanbul-lib-instrument@^5.1.0: - version "5.2.0" - resolved "http://localhost:4873/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.0.tgz#31d18bdd127f825dd02ea7bfdfd906f8ab840e9f" - integrity sha512-6Lthe1hqXHBNsqvgDzGO6l03XNeu3CrG4RqQ1KM9+l5+jNGpEJfIELx1NS3SEHmJQA8np/u+E4EPRKRiu6m19A== + version "5.2.1" + resolved "http://localhost:4873/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.1.tgz#d10c8885c2125574e1c231cacadf955675e1ce3d" + integrity sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg== dependencies: "@babel/core" "^7.12.3" "@babel/parser" "^7.14.7" @@ -8318,9 +8318,9 @@ terser-webpack-plugin@^5.1.3, terser-webpack-plugin@^5.2.5: terser "^5.14.1" terser@^5.0.0, terser@^5.10.0, terser@^5.14.1: - version "5.15.0" - resolved "http://localhost:4873/terser/-/terser-5.15.0.tgz#e16967894eeba6e1091509ec83f0c60e179f2425" - integrity sha512-L1BJiXVmheAQQy+as0oF3Pwtlo4s3Wi1X2zNZ2NxOB4wx9bdS9Vk67XQENLFdLYGCK/Z2di53mTj/hBafR+dTA== + version "5.15.1" + resolved "http://localhost:4873/terser/-/terser-5.15.1.tgz#8561af6e0fd6d839669c73b92bdd5777d870ed6c" + integrity sha512-K1faMUvpm/FBxjBXud0LWVAGxmvoPbZbfTCYbSgaaYQaIXI3/TdI7a7ZGA73Zrou6Q8Zmz3oeUTsp/dj+ag2Xw== dependencies: "@jridgewell/source-map" "^0.3.2" acorn "^8.5.0" @@ -8544,9 +8544,9 @@ upath@^1.2.0: integrity sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg== update-browserslist-db@^1.0.9: - version "1.0.9" - resolved "http://localhost:4873/update-browserslist-db/-/update-browserslist-db-1.0.9.tgz#2924d3927367a38d5c555413a7ce138fc95fcb18" - integrity sha512-/xsqn21EGVdXI3EXSum1Yckj3ZVZugqyOZQ/CxYPBD/R+ko9NSUScf8tFF4dOKY+2pvSSJA/S+5B8s4Zr4kyvg== + version "1.0.10" + resolved "http://localhost:4873/update-browserslist-db/-/update-browserslist-db-1.0.10.tgz#0f54b876545726f17d00cd9a2561e6dade943ff3" + integrity sha512-OztqDenkfFkbSG+tRxBeAnCVPckDBcvibKd35yDONx6OU8N7sqgwc7rCbkJ/WcYtVRZ4ba68d6byhC21GFh7sQ== dependencies: escalade "^3.1.1" picocolors "^1.0.0" diff --git a/automerge-js/examples/vite/package.json b/automerge-js/examples/vite/package.json index 61a815d5..79ec2037 100644 --- a/automerge-js/examples/vite/package.json +++ b/automerge-js/examples/vite/package.json @@ -9,7 +9,7 @@ "preview": "vite preview" }, "dependencies": { - "@automerge/automerge": "2.0.0-alpha.3" + "@automerge/automerge": "2.0.0-alpha.4" }, "devDependencies": { "typescript": "^4.6.4", diff --git a/automerge-js/examples/webpack/package.json b/automerge-js/examples/webpack/package.json index 48d43dcc..5f0680b2 100644 --- a/automerge-js/examples/webpack/package.json +++ b/automerge-js/examples/webpack/package.json @@ -10,7 +10,7 @@ }, "author": "", "dependencies": { - "@automerge/automerge": "2.0.0-alpha.3" + "@automerge/automerge": "2.0.0-alpha.4" }, "devDependencies": { "serve": "^13.0.2", diff --git a/automerge-js/package.json b/automerge-js/package.json index c3bc00c5..877d354c 100644 --- a/automerge-js/package.json +++ b/automerge-js/package.json @@ -4,7 +4,7 @@ "Orion Henry ", "Martin Kleppmann" ], - "version": "2.0.0-alpha.3", + "version": "2.0.0-alpha.4", "description": "Javascript implementation of automerge, backed by @automerge/automerge-wasm", "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-js", "repository": "github:automerge/automerge-rs", @@ -57,7 +57,7 @@ "typescript": "^4.6.4" }, "dependencies": { - "@automerge/automerge-wasm": "0.1.8", + "@automerge/automerge-wasm": "0.1.9", "uuid": "^8.3" } } diff --git a/automerge-wasm/package.json b/automerge-wasm/package.json index 4a9decff..3dd0722d 100644 --- a/automerge-wasm/package.json +++ b/automerge-wasm/package.json @@ -8,7 +8,7 @@ "description": "wasm-bindgen bindings to the automerge rust implementation", "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-wasm", "repository": "github:automerge/automerge-rs", - "version": "0.1.8", + "version": "0.1.9", "license": "MIT", "files": [ "README.md", From dff0fc2b21cecde9a6d2b5f3e2ec69fb07946a9d Mon Sep 17 00:00:00 2001 From: Alex Good Date: Mon, 10 Oct 2022 13:05:10 +0100 Subject: [PATCH 590/730] Remove automerge-wasm devDependency This dependency was added in a PR which is no longer relevant as we've switched to depending directly on `@automerge/automerge-wasm` and testing by running a local NPM registry. --- automerge-js/package.json | 1 - 1 file changed, 1 deletion(-) diff --git a/automerge-js/package.json b/automerge-js/package.json index d64d610a..877d354c 100644 --- a/automerge-js/package.json +++ b/automerge-js/package.json @@ -48,7 +48,6 @@ "@types/uuid": "^8.3.4", "@typescript-eslint/eslint-plugin": "^5.25.0", "@typescript-eslint/parser": "^5.25.0", - "automerge-wasm": "file:../automerge-wasm", "eslint": "^8.15.0", "fast-sha256": "^1.3.0", "mocha": "^10.0.0", From 430d842343526e7992763776e72105d9620df379 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Mon, 10 Oct 2022 14:14:38 +0100 Subject: [PATCH 591/730] Update vite.config.js in Vite Example README --- automerge-js/examples/vite/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automerge-js/examples/vite/README.md b/automerge-js/examples/vite/README.md index 70fa620f..a54195c7 100644 --- a/automerge-js/examples/vite/README.md +++ b/automerge-js/examples/vite/README.md @@ -27,7 +27,7 @@ export default defineConfig({ // versions of the JS wrapper. This causes problems because the JS // wrapper has a module level variable to track JS side heap // allocations, initializing this twice causes horrible breakage - exclude: ["automerge-wasm"] + exclude: ["@automerge/automerge-wasm"] } }) ``` From 2d072d81fb19e92510a28dcdb6636e8aac5347a4 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Sat, 8 Oct 2022 19:00:38 +0100 Subject: [PATCH 592/730] Add TypeScript type for PatchCallback --- automerge-js/src/index.ts | 36 +++++++++++++++++++----------------- 1 file changed, 19 insertions(+), 17 deletions(-) diff --git a/automerge-js/src/index.ts b/automerge-js/src/index.ts index 635c328a..3a5316c9 100644 --- a/automerge-js/src/index.ts +++ b/automerge-js/src/index.ts @@ -7,19 +7,21 @@ import { STATE, HEADS, TRACE, OBJECT_ID, READ_ONLY, FROZEN } from "./constants" import { AutomergeValue, Text, Counter } from "./types" export { AutomergeValue, Text, Counter, Int, Uint, Float64 } from "./types" -import { type API } from "@automerge/automerge-wasm"; +import { type API, type Patch } from "@automerge/automerge-wasm"; import { ApiHandler, UseApi } from "./low_level" import { Actor as ActorId, Prop, ObjID, Change, DecodedChange, Heads, Automerge, MaterializeValue } from "@automerge/automerge-wasm" import { JsSyncState as SyncState, SyncMessage, DecodedSyncMessage } from "@automerge/automerge-wasm" -export type ChangeOptions = { message?: string, time?: number, patchCallback?: Function } -export type ApplyOptions = { patchCallback?: Function } +export type ChangeOptions = { message?: string, time?: number, patchCallback?: PatchCallback } +export type ApplyOptions = { patchCallback?: PatchCallback } export type Doc = { readonly [P in keyof T]: T[P] } export type ChangeFn = (doc: T) => void +export type PatchCallback = (patch: Patch, before: Doc, after: Doc) => void + export interface State { change: DecodedChange snapshot: T @@ -32,25 +34,25 @@ export function use(api: API) { import * as wasm from "@automerge/automerge-wasm" use(wasm) -export type InitOptions = { +export type InitOptions = { actor?: ActorId, freeze?: boolean, - patchCallback?: Function, + patchCallback?: PatchCallback, }; -interface InternalState { +interface InternalState { handle: Automerge, heads: Heads | undefined, freeze: boolean, - patchCallback: Function | undefined, + patchCallback?: PatchCallback } export function getBackend(doc: Doc) : Automerge { return _state(doc).handle } -function _state(doc: Doc, checkroot = true) : InternalState { +function _state(doc: Doc, checkroot = true) : InternalState { const state = Reflect.get(doc,STATE) if (state === undefined || (checkroot && _obj(doc) !== "_root")) { throw new RangeError("must be the document root") @@ -90,7 +92,7 @@ function _readonly(doc: Doc) : boolean { return Reflect.get(doc,READ_ONLY) !== false } -function importOpts(_actor?: ActorId | InitOptions) : InitOptions { +function importOpts(_actor?: ActorId | InitOptions) : InitOptions { if (typeof _actor === 'object') { return _actor } else { @@ -98,7 +100,7 @@ function importOpts(_actor?: ActorId | InitOptions) : InitOptions { } } -export function init(_opts?: ActorId | InitOptions) : Doc{ +export function init(_opts?: ActorId | InitOptions) : Doc{ let opts = importOpts(_opts) let freeze = !!opts.freeze let patchCallback = opts.patchCallback @@ -131,7 +133,7 @@ export function from>(initialState: T | Doc return change(init(actor), (d) => Object.assign(d, initialState)) } -export function change(doc: Doc, options: string | ChangeOptions | ChangeFn, callback?: ChangeFn): Doc { +export function change(doc: Doc, options: string | ChangeOptions | ChangeFn, callback?: ChangeFn): Doc { if (typeof options === 'function') { return _change(doc, {}, options) } else if (typeof callback === 'function') { @@ -144,7 +146,7 @@ export function change(doc: Doc, options: string | ChangeOptions | ChangeF } } -function progressDocument(doc: Doc, heads: Heads, callback?: Function): Doc { +function progressDocument(doc: Doc, heads: Heads, callback?: PatchCallback): Doc { let state = _state(doc) let nextState = { ... state, heads: undefined }; // @ts-ignore @@ -154,7 +156,7 @@ function progressDocument(doc: Doc, heads: Heads, callback?: Function): Do return nextDoc } -function _change(doc: Doc, options: ChangeOptions, callback: ChangeFn): Doc { +function _change(doc: Doc, options: ChangeOptions, callback: ChangeFn): Doc { if (typeof callback !== "function") { @@ -192,7 +194,7 @@ function _change(doc: Doc, options: ChangeOptions, callback: ChangeFn): } } -export function emptyChange(doc: Doc, options: ChangeOptions) { +export function emptyChange(doc: Doc, options: ChangeOptions) { if (options === undefined) { options = {} } @@ -214,7 +216,7 @@ export function emptyChange(doc: Doc, options: ChangeOptions) { return progressDocument(doc, heads) } -export function load(data: Uint8Array, _opts?: ActorId | InitOptions) : Doc { +export function load(data: Uint8Array, _opts?: ActorId | InitOptions) : Doc { const opts = importOpts(_opts) const actor = opts.actor const patchCallback = opts.patchCallback @@ -320,7 +322,7 @@ export function getAllChanges(doc: Doc) : Change[] { return state.handle.getChanges([]) } -export function applyChanges(doc: Doc, changes: Change[], opts?: ApplyOptions) : [Doc] { +export function applyChanges(doc: Doc, changes: Change[], opts?: ApplyOptions) : [Doc] { const state = _state(doc) if (!opts) { opts = {} } if (state.heads) { @@ -378,7 +380,7 @@ export function generateSyncMessage(doc: Doc, inState: SyncState) : [ Sync return [ outState, message ] } -export function receiveSyncMessage(doc: Doc, inState: SyncState, message: SyncMessage, opts?: ApplyOptions) : [ Doc, SyncState, null ] { +export function receiveSyncMessage(doc: Doc, inState: SyncState, message: SyncMessage, opts?: ApplyOptions) : [ Doc, SyncState, null ] { const syncState = ApiHandler.importSyncState(inState) if (!opts) { opts = {} } const state = _state(doc) From ed0da24020fd2d63ba342492720d52440d7be5be Mon Sep 17 00:00:00 2001 From: Alex Good Date: Thu, 6 Oct 2022 17:43:30 +0100 Subject: [PATCH 593/730] Track whether a transaction is observed in types With the `OpObserver` moving to the transaction rather than being passed in to the `Transaction::commit` method we have needed to add a way to get the observer back out of the transaction (via `Transaction::observer` and `AutoCommit::observer`). This `Observer` type is then used to handle patch generation logic. However, there are cases where we might not want an `OpObserver` and in these cases we can execute various things fast - so we need to have something like an `Option`. In order to track the presence or otherwise of the observer at the type level introduce `automerge::transaction::observation`, which is a type level `Option`. This allows us to efficiently choose the right code paths whilst maintaining correct types for `Transaction::observer` and `AutoCommit::observer` --- automerge-wasm/src/lib.rs | 8 +- automerge/examples/quickstart.rs | 6 +- automerge/examples/watch.rs | 4 +- automerge/src/autocommit.rs | 198 ++++++++++-------- automerge/src/automerge.rs | 106 ++++++++-- automerge/src/automerge/tests.rs | 2 +- automerge/src/transaction.rs | 2 + automerge/src/transaction/inner.rs | 82 ++++---- .../src/transaction/manual_transaction.rs | 99 ++++----- automerge/src/transaction/observation.rs | 78 +++++++ 10 files changed, 374 insertions(+), 211 deletions(-) create mode 100644 automerge/src/transaction/observation.rs diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 15381c8c..827432ce 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -27,7 +27,7 @@ )] #![allow(clippy::unused_unit)] use am::transaction::CommitOptions; -use am::transaction::Transactable; +use am::transaction::{Observed, Transactable, UnObserved}; use automerge as am; use automerge::{Change, ObjId, ObjType, Prop, Value, ROOT}; use js_sys::{Array, Function, Object, Uint8Array}; @@ -55,7 +55,7 @@ macro_rules! log { }; } -type AutoCommit = am::AutoCommitWithObs; +type AutoCommit = am::AutoCommitWithObs>; #[cfg(feature = "wee_alloc")] #[global_allocator] @@ -781,7 +781,9 @@ pub fn init(actor: Option) -> Result { #[wasm_bindgen(js_name = load)] pub fn load(data: Uint8Array, actor: Option) -> Result { let data = data.to_vec(); - let mut doc = AutoCommit::load(&data).map_err(to_js_err)?; + let mut doc = am::AutoCommitWithObs::::load(&data) + .map_err(to_js_err)? + .with_observer(Observer::default()); if let Some(s) = actor { let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec()); doc.set_actor(actor); diff --git a/automerge/examples/quickstart.rs b/automerge/examples/quickstart.rs index 56d24858..76ef0470 100644 --- a/automerge/examples/quickstart.rs +++ b/automerge/examples/quickstart.rs @@ -8,7 +8,7 @@ use automerge::{Automerge, ROOT}; fn main() { let mut doc1 = Automerge::new(); let (cards, card1) = doc1 - .transact_with::<_, _, AutomergeError, _, ()>( + .transact_with::<_, _, AutomergeError, _>( |_| CommitOptions::default().with_message("Add card".to_owned()), |tx| { let cards = tx.put_object(ROOT, "cards", ObjType::List).unwrap(); @@ -30,7 +30,7 @@ fn main() { let binary = doc1.save(); let mut doc2 = Automerge::load(&binary).unwrap(); - doc1.transact_with::<_, _, AutomergeError, _, ()>( + doc1.transact_with::<_, _, AutomergeError, _>( |_| CommitOptions::default().with_message("Mark card as done".to_owned()), |tx| { tx.put(&card1, "done", true)?; @@ -39,7 +39,7 @@ fn main() { ) .unwrap(); - doc2.transact_with::<_, _, AutomergeError, _, ()>( + doc2.transact_with::<_, _, AutomergeError, _>( |_| CommitOptions::default().with_message("Delete card".to_owned()), |tx| { tx.delete(&cards, 0)?; diff --git a/automerge/examples/watch.rs b/automerge/examples/watch.rs index ccc480e6..66a9f4f9 100644 --- a/automerge/examples/watch.rs +++ b/automerge/examples/watch.rs @@ -11,7 +11,7 @@ fn main() { // a simple scalar change in the root object let mut result = doc - .transact_with::<_, _, AutomergeError, _, VecOpObserver>( + .transact_observed_with::<_, _, AutomergeError, _, VecOpObserver>( |_result| CommitOptions::default(), |tx| { tx.put(ROOT, "hello", "world").unwrap(); @@ -36,7 +36,7 @@ fn main() { tx.insert(&list, 1, "woo").unwrap(); let m = tx.insert_object(&list, 2, automerge::ObjType::Map).unwrap(); tx.put(&m, "hi", 2).unwrap(); - let patches = tx.op_observer.take_patches(); + let patches = tx.observer().take_patches(); let _heads3 = tx.commit_with(CommitOptions::default()); get_changes(&doc, patches); } diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index 65e51ad3..a1c598d9 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -7,27 +7,37 @@ use crate::{ sync, Keys, KeysAt, ListRange, ListRangeAt, MapRange, MapRangeAt, ObjType, Parents, ScalarValue, }; use crate::{ - transaction::TransactionInner, ActorId, Automerge, AutomergeError, Change, ChangeHash, Prop, - Value, Values, + transaction::{Observation, Observed, TransactionInner, UnObserved}, + ActorId, Automerge, AutomergeError, Change, ChangeHash, Prop, Value, Values, }; /// An automerge document that automatically manages transactions. #[derive(Debug, Clone)] -pub struct AutoCommitWithObs { +pub struct AutoCommitWithObs { doc: Automerge, transaction: Option<(Obs, TransactionInner)>, - op_observer: Obs, + observation: Obs, } -pub type AutoCommit = AutoCommitWithObs<()>; +pub type AutoCommit = AutoCommitWithObs; -impl Default for AutoCommitWithObs { +impl AutoCommitWithObs { + pub fn unobserved() -> AutoCommitWithObs { + AutoCommitWithObs { + doc: Automerge::new(), + transaction: None, + observation: UnObserved::new(), + } + } +} + +impl Default for AutoCommitWithObs> { fn default() -> Self { let op_observer = O::default(); AutoCommitWithObs { doc: Automerge::new(), transaction: None, - op_observer, + observation: Observed::new(op_observer), } } } @@ -37,22 +47,58 @@ impl AutoCommit { AutoCommitWithObs { doc: Automerge::new(), transaction: None, - op_observer: (), + observation: UnObserved, } } + + pub fn load(data: &[u8]) -> Result { + let doc = Automerge::load(data)?; + Ok(Self { + doc, + transaction: None, + observation: UnObserved, + }) + } } -impl AutoCommitWithObs { +impl AutoCommitWithObs> { pub fn observer(&mut self) -> &mut Obs { self.ensure_transaction_closed(); - &mut self.op_observer + self.observation.observer() + } +} + +impl AutoCommitWithObs { + pub fn fork(&mut self) -> Self { + self.ensure_transaction_closed(); + Self { + doc: self.doc.fork(), + transaction: self.transaction.clone(), + observation: self.observation.clone(), + } } - pub fn with_observer(self, op_observer: Obs2) -> AutoCommitWithObs { + pub fn fork_at(&mut self, heads: &[ChangeHash]) -> Result { + self.ensure_transaction_closed(); + Ok(Self { + doc: self.doc.fork_at(heads)?, + transaction: self.transaction.clone(), + observation: self.observation.clone(), + }) + } +} + +impl AutoCommitWithObs { + pub fn with_observer( + self, + op_observer: Obs2, + ) -> AutoCommitWithObs> { AutoCommitWithObs { doc: self.doc, - transaction: self.transaction.map(|(_, t)| (op_observer.branch(), t)), - op_observer, + transaction: self + .transaction + .map(|(_, t)| (Observed::new(op_observer.branch()), t)), + observation: Observed::new(op_observer), } } @@ -81,53 +127,25 @@ impl AutoCommitWithObs { fn ensure_transaction_open(&mut self) { if self.transaction.is_none() { - self.transaction = Some((self.op_observer.branch(), self.doc.transaction_inner())); + self.transaction = Some((self.observation.branch(), self.doc.transaction_inner())); } } - pub fn fork(&mut self) -> Self { - self.ensure_transaction_closed(); - Self { - doc: self.doc.fork(), - transaction: self.transaction.clone(), - op_observer: self.op_observer.clone(), - } - } - - pub fn fork_at(&mut self, heads: &[ChangeHash]) -> Result { - self.ensure_transaction_closed(); - Ok(Self { - doc: self.doc.fork_at(heads)?, - transaction: self.transaction.clone(), - op_observer: self.op_observer.clone(), - }) - } - fn ensure_transaction_closed(&mut self) { if let Some((current, tx)) = self.transaction.take() { - self.op_observer.merge(¤t); + self.observation.merge(¤t); tx.commit(&mut self.doc, None, None); } } - pub fn load(data: &[u8]) -> Result { - // passing a () observer here has performance implications on all loads - // if we want an autocommit::load() method that can be observered we need to make a new method - // fn observed_load() ? - let doc = Automerge::load(data)?; - let op_observer = Obs::default(); - Ok(Self { - doc, - transaction: None, - op_observer, - }) - } - pub fn load_incremental(&mut self, data: &[u8]) -> Result { self.ensure_transaction_closed(); // TODO - would be nice to pass None here instead of &mut () - self.doc - .load_incremental_with(data, Some(&mut self.op_observer)) + if let Some(observer) = self.observation.observer() { + self.doc.load_incremental_with(data, Some(observer)) + } else { + self.doc.load_incremental(data) + } } pub fn apply_changes( @@ -135,19 +153,25 @@ impl AutoCommitWithObs { changes: impl IntoIterator, ) -> Result<(), AutomergeError> { self.ensure_transaction_closed(); - self.doc - .apply_changes_with(changes, Some(&mut self.op_observer)) + if let Some(observer) = self.observation.observer() { + self.doc.apply_changes_with(changes, Some(observer)) + } else { + self.doc.apply_changes(changes) + } } /// Takes all the changes in `other` which are not in `self` and applies them - pub fn merge( + pub fn merge( &mut self, other: &mut AutoCommitWithObs, ) -> Result, AutomergeError> { self.ensure_transaction_closed(); other.ensure_transaction_closed(); - self.doc - .merge_with(&mut other.doc, Some(&mut self.op_observer)) + if let Some(observer) = self.observation.observer() { + self.doc.merge_with(&mut other.doc, Some(observer)) + } else { + self.doc.merge(&mut other.doc) + } } pub fn save(&mut self) -> Vec { @@ -215,8 +239,12 @@ impl AutoCommitWithObs { message: sync::Message, ) -> Result<(), AutomergeError> { self.ensure_transaction_closed(); - self.doc - .receive_sync_message_with(sync_state, message, Some(&mut self.op_observer)) + if let Some(observer) = self.observation.observer() { + self.doc + .receive_sync_message_with(sync_state, message, Some(observer)) + } else { + self.doc.receive_sync_message(sync_state, message) + } } /// Return a graphviz representation of the opset. @@ -261,7 +289,7 @@ impl AutoCommitWithObs { // ensure that even no changes triggers a change self.ensure_transaction_open(); let (current, tx) = self.transaction.take().unwrap(); - self.op_observer.merge(¤t); + self.observation.merge(¤t); tx.commit(&mut self.doc, options.message, options.time) } @@ -273,7 +301,7 @@ impl AutoCommitWithObs { } } -impl Transactable for AutoCommitWithObs { +impl Transactable for AutoCommitWithObs { fn pending_ops(&self) -> usize { self.transaction .as_ref() @@ -281,11 +309,6 @@ impl Transactable for AutoCommitWithObs { .unwrap_or(0) } - // KeysAt::() - // LenAt::() - // PropAt::() - // NthAt::() - fn keys>(&self, obj: O) -> Keys<'_, '_> { self.doc.keys(obj) } @@ -348,24 +371,6 @@ impl Transactable for AutoCommitWithObs { self.doc.object_type(obj) } - // set(obj, prop, value) - value can be scalar or objtype - // del(obj, prop) - // inc(obj, prop, value) - // insert(obj, index, value) - - /// Set the value of property `P` to value `V` in object `obj`. - /// - /// # Returns - /// - /// The opid of the operation which was created, or None if this operation doesn't change the - /// document or create a new object. - /// - /// # Errors - /// - /// This will return an error if - /// - The object does not exist - /// - The key is the wrong type for the object - /// - The key does not exist in the object fn put, P: Into, V: Into>( &mut self, obj: O, @@ -374,7 +379,7 @@ impl Transactable for AutoCommitWithObs { ) -> Result<(), AutomergeError> { self.ensure_transaction_open(); let (current, tx) = self.transaction.as_mut().unwrap(); - tx.put(&mut self.doc, current, obj.as_ref(), prop, value) + tx.put(&mut self.doc, current.observer(), obj.as_ref(), prop, value) } fn put_object, P: Into>( @@ -385,7 +390,7 @@ impl Transactable for AutoCommitWithObs { ) -> Result { self.ensure_transaction_open(); let (current, tx) = self.transaction.as_mut().unwrap(); - tx.put_object(&mut self.doc, current, obj.as_ref(), prop, value) + tx.put_object(&mut self.doc, current.observer(), obj.as_ref(), prop, value) } fn insert, V: Into>( @@ -396,7 +401,13 @@ impl Transactable for AutoCommitWithObs { ) -> Result<(), AutomergeError> { self.ensure_transaction_open(); let (current, tx) = self.transaction.as_mut().unwrap(); - tx.insert(&mut self.doc, current, obj.as_ref(), index, value) + tx.insert( + &mut self.doc, + current.observer(), + obj.as_ref(), + index, + value, + ) } fn insert_object>( @@ -407,7 +418,13 @@ impl Transactable for AutoCommitWithObs { ) -> Result { self.ensure_transaction_open(); let (current, tx) = self.transaction.as_mut().unwrap(); - tx.insert_object(&mut self.doc, current, obj.as_ref(), index, value) + tx.insert_object( + &mut self.doc, + current.observer(), + obj.as_ref(), + index, + value, + ) } fn increment, P: Into>( @@ -418,7 +435,7 @@ impl Transactable for AutoCommitWithObs { ) -> Result<(), AutomergeError> { self.ensure_transaction_open(); let (current, tx) = self.transaction.as_mut().unwrap(); - tx.increment(&mut self.doc, current, obj.as_ref(), prop, value) + tx.increment(&mut self.doc, current.observer(), obj.as_ref(), prop, value) } fn delete, P: Into>( @@ -428,7 +445,7 @@ impl Transactable for AutoCommitWithObs { ) -> Result<(), AutomergeError> { self.ensure_transaction_open(); let (current, tx) = self.transaction.as_mut().unwrap(); - tx.delete(&mut self.doc, current, obj.as_ref(), prop) + tx.delete(&mut self.doc, current.observer(), obj.as_ref(), prop) } /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert @@ -442,7 +459,14 @@ impl Transactable for AutoCommitWithObs { ) -> Result<(), AutomergeError> { self.ensure_transaction_open(); let (current, tx) = self.transaction.as_mut().unwrap(); - tx.splice(&mut self.doc, current, obj.as_ref(), pos, del, vals) + tx.splice( + &mut self.doc, + current.observer(), + obj.as_ref(), + pos, + del, + vals, + ) } fn text>(&self, obj: O) -> Result { diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 0ca12934..81b0c173 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -13,7 +13,9 @@ use crate::op_observer::OpObserver; use crate::op_set::OpSet; use crate::parents::Parents; use crate::storage::{self, load, CompressConfig}; -use crate::transaction::{self, CommitOptions, Failure, Success, Transaction, TransactionInner}; +use crate::transaction::{ + self, CommitOptions, Failure, Observed, Success, Transaction, TransactionInner, UnObserved, +}; use crate::types::{ ActorId, ChangeHash, Clock, ElemId, Export, Exportable, Key, ObjId, Op, OpId, OpType, ScalarValue, Value, @@ -111,22 +113,22 @@ impl Automerge { } /// Start a transaction. - pub fn transaction(&mut self) -> Transaction<'_, ()> { + pub fn transaction(&mut self) -> Transaction<'_, UnObserved> { Transaction { inner: Some(self.transaction_inner()), doc: self, - op_observer: (), + observation: Some(UnObserved), } } pub fn transaction_with_observer( &mut self, op_observer: Obs, - ) -> Transaction<'_, Obs> { + ) -> Transaction<'_, Observed> { Transaction { inner: Some(self.transaction_inner()), doc: self, - op_observer, + observation: Some(Observed::new(op_observer)), } } @@ -157,16 +159,46 @@ impl Automerge { /// afterwards. pub fn transact(&mut self, f: F) -> transaction::Result where - F: FnOnce(&mut Transaction<'_, ()>) -> Result, + F: FnOnce(&mut Transaction<'_, UnObserved>) -> Result, + { + self.transact_with_impl(None::<&dyn Fn(&O) -> CommitOptions>, f) + } + + /// Like [`Self::transact`] but with a function for generating the commit options. + pub fn transact_with(&mut self, c: C, f: F) -> transaction::Result + where + F: FnOnce(&mut Transaction<'_, UnObserved>) -> Result, + C: FnOnce(&O) -> CommitOptions, + { + self.transact_with_impl(Some(c), f) + } + + /// Like [`Self::transact`] but with a function for generating the commit options. + fn transact_with_impl( + &mut self, + c: Option, + f: F, + ) -> transaction::Result + where + F: FnOnce(&mut Transaction<'_, UnObserved>) -> Result, + C: FnOnce(&O) -> CommitOptions, { let mut tx = self.transaction(); let result = f(&mut tx); match result { - Ok(result) => Ok(Success { - result, - op_observer: (), - hash: tx.commit(), - }), + Ok(result) => { + let hash = if let Some(c) = c { + let commit_options = c(&result); + tx.commit_with(commit_options) + } else { + tx.commit() + }; + Ok(Success { + result, + hash, + op_observer: (), + }) + } Err(error) => Err(Failure { error, cancelled: tx.rollback(), @@ -174,25 +206,55 @@ impl Automerge { } } - /// Like [`Self::transact`] but with a function for generating the commit options. - pub fn transact_with(&mut self, c: C, f: F) -> transaction::Result + /// Run a transaction on this document in a closure, observing ops with `Obs`, automatically handling commit or rollback + /// afterwards. + pub fn transact_observed(&mut self, f: F) -> transaction::Result where - F: FnOnce(&mut Transaction<'_, Obs>) -> Result, - C: FnOnce(&O) -> CommitOptions, - Obs: OpObserver, + F: FnOnce(&mut Transaction<'_, Observed>) -> Result, + Obs: OpObserver + Default, { - let mut op_observer = Obs::default(); - let mut tx = self.transaction_with_observer(Default::default()); + self.transact_observed_with_impl(None::<&dyn Fn(&O) -> CommitOptions>, f) + } + + /// Like [`Self::transact_observed`] but with a function for generating the commit options + pub fn transact_observed_with( + &mut self, + c: C, + f: F, + ) -> transaction::Result + where + F: FnOnce(&mut Transaction<'_, Observed>) -> Result, + C: FnOnce(&O) -> CommitOptions, + Obs: OpObserver + Default, + { + self.transact_observed_with_impl(Some(c), f) + } + + fn transact_observed_with_impl( + &mut self, + c: Option, + f: F, + ) -> transaction::Result + where + F: FnOnce(&mut Transaction<'_, Observed>) -> Result, + C: FnOnce(&O) -> CommitOptions, + Obs: OpObserver + Default, + { + let observer = Obs::default(); + let mut tx = self.transaction_with_observer(observer); let result = f(&mut tx); match result { Ok(result) => { - let commit_options = c(&result); - std::mem::swap(&mut op_observer, &mut tx.op_observer); - let hash = tx.commit_with(commit_options); + let (obs, hash) = if let Some(c) = c { + let commit_options = c(&result); + tx.commit_with(commit_options) + } else { + tx.commit() + }; Ok(Success { result, hash, - op_observer, + op_observer: obs, }) } Err(error) => Err(Failure { diff --git a/automerge/src/automerge/tests.rs b/automerge/src/automerge/tests.rs index 9c1a1ff7..b35aaabf 100644 --- a/automerge/src/automerge/tests.rs +++ b/automerge/src/automerge/tests.rs @@ -1502,7 +1502,7 @@ fn observe_counter_change_application() { #[test] fn get_changes_heads_empty() { - let mut doc = AutoCommit::default(); + let mut doc = AutoCommit::unobserved(); doc.put(ROOT, "key1", 1).unwrap(); doc.commit(); doc.put(ROOT, "key2", 1).unwrap(); diff --git a/automerge/src/transaction.rs b/automerge/src/transaction.rs index f97fa7e5..4a91d5b5 100644 --- a/automerge/src/transaction.rs +++ b/automerge/src/transaction.rs @@ -1,6 +1,7 @@ mod commit; mod inner; mod manual_transaction; +pub(crate) mod observation; mod result; mod transactable; @@ -8,6 +9,7 @@ pub use self::commit::CommitOptions; pub use self::transactable::Transactable; pub(crate) use inner::TransactionInner; pub use manual_transaction::Transaction; +pub use observation::{Observation, Observed, UnObserved}; pub use result::Failure; pub use result::Success; diff --git a/automerge/src/transaction/inner.rs b/automerge/src/transaction/inner.rs index aff82a99..fb199f07 100644 --- a/automerge/src/transaction/inner.rs +++ b/automerge/src/transaction/inner.rs @@ -132,7 +132,7 @@ impl TransactionInner { pub(crate) fn put, V: Into, Obs: OpObserver>( &mut self, doc: &mut Automerge, - op_observer: &mut Obs, + op_observer: Option<&mut Obs>, ex_obj: &ExId, prop: P, value: V, @@ -160,7 +160,7 @@ impl TransactionInner { pub(crate) fn put_object, Obs: OpObserver>( &mut self, doc: &mut Automerge, - op_observer: &mut Obs, + op_observer: Option<&mut Obs>, ex_obj: &ExId, prop: P, value: ObjType, @@ -182,7 +182,7 @@ impl TransactionInner { fn insert_local_op( &mut self, doc: &mut Automerge, - op_observer: &mut Obs, + op_observer: Option<&mut Obs>, prop: Prop, op: Op, pos: usize, @@ -201,7 +201,7 @@ impl TransactionInner { pub(crate) fn insert, Obs: OpObserver>( &mut self, doc: &mut Automerge, - op_observer: &mut Obs, + op_observer: Option<&mut Obs>, ex_obj: &ExId, index: usize, value: V, @@ -216,7 +216,7 @@ impl TransactionInner { pub(crate) fn insert_object( &mut self, doc: &mut Automerge, - op_observer: &mut Obs, + op_observer: Option<&mut Obs>, ex_obj: &ExId, index: usize, value: ObjType, @@ -230,7 +230,7 @@ impl TransactionInner { fn do_insert( &mut self, doc: &mut Automerge, - op_observer: &mut Obs, + op_observer: Option<&mut Obs>, obj: ObjId, index: usize, action: OpType, @@ -260,7 +260,7 @@ impl TransactionInner { pub(crate) fn local_op( &mut self, doc: &mut Automerge, - op_observer: &mut Obs, + op_observer: Option<&mut Obs>, obj: ObjId, prop: Prop, action: OpType, @@ -274,7 +274,7 @@ impl TransactionInner { fn local_map_op( &mut self, doc: &mut Automerge, - op_observer: &mut Obs, + op_observer: Option<&mut Obs>, obj: ObjId, prop: String, action: OpType, @@ -323,7 +323,7 @@ impl TransactionInner { fn local_list_op( &mut self, doc: &mut Automerge, - op_observer: &mut Obs, + op_observer: Option<&mut Obs>, obj: ObjId, index: usize, action: OpType, @@ -363,7 +363,7 @@ impl TransactionInner { pub(crate) fn increment, Obs: OpObserver>( &mut self, doc: &mut Automerge, - op_observer: &mut Obs, + op_observer: Option<&mut Obs>, obj: &ExId, prop: P, value: i64, @@ -376,7 +376,7 @@ impl TransactionInner { pub(crate) fn delete, Obs: OpObserver>( &mut self, doc: &mut Automerge, - op_observer: &mut Obs, + op_observer: Option<&mut Obs>, ex_obj: &ExId, prop: P, ) -> Result<(), AutomergeError> { @@ -391,7 +391,7 @@ impl TransactionInner { pub(crate) fn splice( &mut self, doc: &mut Automerge, - op_observer: &mut Obs, + mut op_observer: Option<&mut Obs>, ex_obj: &ExId, mut pos: usize, del: usize, @@ -399,12 +399,20 @@ impl TransactionInner { ) -> Result<(), AutomergeError> { let obj = doc.exid_to_obj(ex_obj)?; for _ in 0..del { - // del() - self.local_op(doc, op_observer, obj, pos.into(), OpType::Delete)?; + // This unwrap and rewrap of the option is necessary to appeas the borrow checker :( + if let Some(obs) = op_observer.as_mut() { + self.local_op(doc, Some(*obs), obj, pos.into(), OpType::Delete)?; + } else { + self.local_op::(doc, None, obj, pos.into(), OpType::Delete)?; + } } for v in vals { - // insert() - self.do_insert(doc, op_observer, obj, pos, v.clone().into())?; + // As above this unwrap and rewrap of the option is necessary to appeas the borrow checker :( + if let Some(obs) = op_observer.as_mut() { + self.do_insert(doc, Some(*obs), obj, pos, v.clone().into())?; + } else { + self.do_insert::(doc, None, obj, pos, v.clone().into())?; + } pos += 1; } Ok(()) @@ -413,32 +421,34 @@ impl TransactionInner { fn finalize_op( &mut self, doc: &mut Automerge, - op_observer: &mut Obs, + op_observer: Option<&mut Obs>, obj: ObjId, prop: Prop, op: Op, ) { // TODO - id_to_exid should be a noop if not used - change type to Into? - let ex_obj = doc.ops.id_to_exid(obj.0); - let parents = doc.ops.parents(obj); - if op.insert { - let value = (op.value(), doc.ops.id_to_exid(op.id)); - match prop { - Prop::Map(_) => panic!("insert into a map"), - Prop::Seq(index) => op_observer.insert(parents, ex_obj, index, value), + if let Some(op_observer) = op_observer { + let ex_obj = doc.ops.id_to_exid(obj.0); + let parents = doc.ops.parents(obj); + if op.insert { + let value = (op.value(), doc.ops.id_to_exid(op.id)); + match prop { + Prop::Map(_) => panic!("insert into a map"), + Prop::Seq(index) => op_observer.insert(parents, ex_obj, index, value), + } + } else if op.is_delete() { + op_observer.delete(parents, ex_obj, prop.clone()); + } else if let Some(value) = op.get_increment_value() { + op_observer.increment( + parents, + ex_obj, + prop.clone(), + (value, doc.ops.id_to_exid(op.id)), + ); + } else { + let value = (op.value(), doc.ops.id_to_exid(op.id)); + op_observer.put(parents, ex_obj, prop.clone(), value, false); } - } else if op.is_delete() { - op_observer.delete(parents, ex_obj, prop.clone()); - } else if let Some(value) = op.get_increment_value() { - op_observer.increment( - parents, - ex_obj, - prop.clone(), - (value, doc.ops.id_to_exid(op.id)), - ); - } else { - let value = (op.value(), doc.ops.id_to_exid(op.id)); - op_observer.put(parents, ex_obj, prop.clone(), value, false); } self.operations.push((obj, prop, op)); } diff --git a/automerge/src/transaction/manual_transaction.rs b/automerge/src/transaction/manual_transaction.rs index 695866ad..ae23e36c 100644 --- a/automerge/src/transaction/manual_transaction.rs +++ b/automerge/src/transaction/manual_transaction.rs @@ -5,7 +5,7 @@ use crate::{Automerge, ChangeHash, KeysAt, ObjType, OpObserver, Prop, ScalarValu use crate::{AutomergeError, Keys}; use crate::{ListRange, ListRangeAt, MapRange, MapRangeAt}; -use super::{CommitOptions, Transactable, TransactionInner}; +use super::{observation, CommitOptions, Transactable, TransactionInner}; /// A transaction on a document. /// Transactions group operations into a single change so that no other operations can happen @@ -20,15 +20,22 @@ use super::{CommitOptions, Transactable, TransactionInner}; /// intermediate state. /// This is consistent with `?` error handling. #[derive(Debug)] -pub struct Transaction<'a, Obs: OpObserver> { +pub struct Transaction<'a, Obs: observation::Observation> { // this is an option so that we can take it during commit and rollback to prevent it being // rolled back during drop. pub(crate) inner: Option, + // As with `inner` this is an `Option` so we can `take` it during `commit` + pub(crate) observation: Option, pub(crate) doc: &'a mut Automerge, - pub op_observer: Obs, } -impl<'a, Obs: OpObserver> Transaction<'a, Obs> { +impl<'a, Obs: OpObserver> Transaction<'a, observation::Observed> { + pub fn observer(&mut self) -> &mut Obs { + self.observation.as_mut().unwrap().observer() + } +} + +impl<'a, Obs: observation::Observation> Transaction<'a, Obs> { /// Get the heads of the document before this transaction was started. pub fn get_heads(&self) -> Vec { self.doc.get_heads() @@ -36,8 +43,11 @@ impl<'a, Obs: OpObserver> Transaction<'a, Obs> { /// Commit the operations performed in this transaction, returning the hashes corresponding to /// the new heads. - pub fn commit(mut self) -> ChangeHash { - self.inner.take().unwrap().commit(self.doc, None, None) + pub fn commit(mut self) -> Obs::CommitResult { + let tx = self.inner.take().unwrap(); + let hash = tx.commit(self.doc, None, None); + let obs = self.observation.take().unwrap(); + obs.make_result(hash) } /// Commit the operations in this transaction with some options. @@ -56,11 +66,11 @@ impl<'a, Obs: OpObserver> Transaction<'a, Obs> { /// i64; /// tx.commit_with(CommitOptions::default().with_message("Create todos list").with_time(now)); /// ``` - pub fn commit_with(mut self, options: CommitOptions) -> ChangeHash { - self.inner - .take() - .unwrap() - .commit(self.doc, options.message, options.time) + pub fn commit_with(mut self, options: CommitOptions) -> Obs::CommitResult { + let tx = self.inner.take().unwrap(); + let hash = tx.commit(self.doc, options.message, options.time); + let obs = self.observation.take().unwrap(); + obs.make_result(hash) } /// Undo the operations added in this transaction, returning the number of cancelled @@ -68,9 +78,21 @@ impl<'a, Obs: OpObserver> Transaction<'a, Obs> { pub fn rollback(mut self) -> usize { self.inner.take().unwrap().rollback(self.doc) } + + fn do_tx(&mut self, f: F) -> O + where + F: FnOnce(&mut TransactionInner, &mut Automerge, Option<&mut Obs::Obs>) -> O, + { + let tx = self.inner.as_mut().unwrap(); + if let Some(obs) = self.observation.as_mut() { + f(tx, self.doc, obs.observer()) + } else { + f(tx, self.doc, None) + } + } } -impl<'a, Obs: OpObserver> Transactable for Transaction<'a, Obs> { +impl<'a, Obs: observation::Observation> Transactable for Transaction<'a, Obs> { /// Get the number of pending operations in this transaction. fn pending_ops(&self) -> usize { self.inner.as_ref().unwrap().pending_ops() @@ -90,10 +112,7 @@ impl<'a, Obs: OpObserver> Transactable for Transaction<'a, Obs> { prop: P, value: V, ) -> Result<(), AutomergeError> { - self.inner - .as_mut() - .unwrap() - .put(self.doc, &mut self.op_observer, obj.as_ref(), prop, value) + self.do_tx(|tx, doc, obs| tx.put(doc, obs, obj.as_ref(), prop, value)) } fn put_object, P: Into>( @@ -102,13 +121,7 @@ impl<'a, Obs: OpObserver> Transactable for Transaction<'a, Obs> { prop: P, value: ObjType, ) -> Result { - self.inner.as_mut().unwrap().put_object( - self.doc, - &mut self.op_observer, - obj.as_ref(), - prop, - value, - ) + self.do_tx(|tx, doc, obs| tx.put_object(doc, obs, obj.as_ref(), prop, value)) } fn insert, V: Into>( @@ -117,13 +130,7 @@ impl<'a, Obs: OpObserver> Transactable for Transaction<'a, Obs> { index: usize, value: V, ) -> Result<(), AutomergeError> { - self.inner.as_mut().unwrap().insert( - self.doc, - &mut self.op_observer, - obj.as_ref(), - index, - value, - ) + self.do_tx(|tx, doc, obs| tx.insert(doc, obs, obj.as_ref(), index, value)) } fn insert_object>( @@ -132,13 +139,7 @@ impl<'a, Obs: OpObserver> Transactable for Transaction<'a, Obs> { index: usize, value: ObjType, ) -> Result { - self.inner.as_mut().unwrap().insert_object( - self.doc, - &mut self.op_observer, - obj.as_ref(), - index, - value, - ) + self.do_tx(|tx, doc, obs| tx.insert_object(doc, obs, obj.as_ref(), index, value)) } fn increment, P: Into>( @@ -147,13 +148,7 @@ impl<'a, Obs: OpObserver> Transactable for Transaction<'a, Obs> { prop: P, value: i64, ) -> Result<(), AutomergeError> { - self.inner.as_mut().unwrap().increment( - self.doc, - &mut self.op_observer, - obj.as_ref(), - prop, - value, - ) + self.do_tx(|tx, doc, obs| tx.increment(doc, obs, obj.as_ref(), prop, value)) } fn delete, P: Into>( @@ -161,10 +156,7 @@ impl<'a, Obs: OpObserver> Transactable for Transaction<'a, Obs> { obj: O, prop: P, ) -> Result<(), AutomergeError> { - self.inner - .as_mut() - .unwrap() - .delete(self.doc, &mut self.op_observer, obj.as_ref(), prop) + self.do_tx(|tx, doc, obs| tx.delete(doc, obs, obj.as_ref(), prop)) } /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert @@ -176,14 +168,7 @@ impl<'a, Obs: OpObserver> Transactable for Transaction<'a, Obs> { del: usize, vals: V, ) -> Result<(), AutomergeError> { - self.inner.as_mut().unwrap().splice( - self.doc, - &mut self.op_observer, - obj.as_ref(), - pos, - del, - vals, - ) + self.do_tx(|tx, doc, obs| tx.splice(doc, obs, obj.as_ref(), pos, del, vals)) } fn keys>(&self, obj: O) -> Keys<'_, '_> { @@ -303,7 +288,7 @@ impl<'a, Obs: OpObserver> Transactable for Transaction<'a, Obs> { // intermediate state. // This defaults to rolling back the transaction to be compatible with `?` error returning before // reaching a call to `commit`. -impl<'a, Obs: OpObserver> Drop for Transaction<'a, Obs> { +impl<'a, Obs: observation::Observation> Drop for Transaction<'a, Obs> { fn drop(&mut self) { if let Some(txn) = self.inner.take() { txn.rollback(self.doc); diff --git a/automerge/src/transaction/observation.rs b/automerge/src/transaction/observation.rs new file mode 100644 index 00000000..fb380cd8 --- /dev/null +++ b/automerge/src/transaction/observation.rs @@ -0,0 +1,78 @@ +//! This module is essentially a type level Option. It is used in sitations where we know at +//! compile time whether an `OpObserver` is available to track changes in a transaction. +use crate::{ChangeHash, OpObserver}; + +mod private { + pub trait Sealed {} + impl Sealed for super::Observed {} + impl Sealed for super::UnObserved {} +} + +pub trait Observation: private::Sealed { + type Obs: OpObserver; + type CommitResult; + + fn observer(&mut self) -> Option<&mut Self::Obs>; + fn make_result(self, hash: ChangeHash) -> Self::CommitResult; + fn branch(&self) -> Self; + fn merge(&mut self, other: &Self); +} + +#[derive(Clone, Debug)] +pub struct Observed(Obs); + +impl Observed { + pub(crate) fn new(o: O) -> Self { + Self(o) + } + + pub(crate) fn observer(&mut self) -> &mut O { + &mut self.0 + } +} + +impl Observation for Observed { + type Obs = Obs; + type CommitResult = (Obs, ChangeHash); + fn observer(&mut self) -> Option<&mut Self::Obs> { + Some(&mut self.0) + } + + fn make_result(self, hash: ChangeHash) -> Self::CommitResult { + (self.0, hash) + } + + fn branch(&self) -> Self { + Self(self.0.branch()) + } + + fn merge(&mut self, other: &Self) { + self.0.merge(&other.0) + } +} + +#[derive(Clone, Default, Debug)] +pub struct UnObserved; +impl UnObserved { + pub fn new() -> Self { + Self + } +} + +impl Observation for UnObserved { + type Obs = (); + type CommitResult = ChangeHash; + fn observer(&mut self) -> Option<&mut Self::Obs> { + None + } + + fn make_result(self, hash: ChangeHash) -> Self::CommitResult { + hash + } + + fn branch(&self) -> Self { + Self + } + + fn merge(&mut self, _other: &Self) {} +} From 352a0127c710d5f5eb68d6e348720403074653be Mon Sep 17 00:00:00 2001 From: Alex Good Date: Tue, 11 Oct 2022 16:17:57 +0100 Subject: [PATCH 594/730] Move all rust code into `crates/*` For larger rust projects it's common to put all rust code in a directory called `crates`. This helps in general by reducing the number of directories in the top level but it's particularly helpful for us because some directories _do not_ contain Rust code. In particular `automerge-js`. Move rust code into `/crates` to make the repo easier to navigate. --- .gitignore | 1 - Cargo.toml | 10 +++++----- automerge-js/e2e/index.ts | 2 +- crates/.gitignore | 1 + {automerge-c => crates/automerge-c}/.gitignore | 0 {automerge-c => crates/automerge-c}/CMakeLists.txt | 0 {automerge-c => crates/automerge-c}/Cargo.toml | 0 {automerge-c => crates/automerge-c}/README.md | 0 {automerge-c => crates/automerge-c}/build.rs | 0 {automerge-c => crates/automerge-c}/cbindgen.toml | 0 .../automerge-c}/cmake/automerge-c-config.cmake.in | 0 .../automerge-c}/cmake/config.h.in | 0 .../automerge-c}/cmake/file_regex_replace.cmake | 0 .../automerge-c}/cmake/file_touch.cmake | 0 .../automerge-c}/examples/CMakeLists.txt | 0 .../automerge-c}/examples/README.md | 0 .../automerge-c}/examples/quickstart.c | 0 .../automerge-c}/img/brandmark.png | Bin .../automerge-c}/src/CMakeLists.txt | 0 {automerge-c => crates/automerge-c}/src/actor_id.rs | 0 .../automerge-c}/src/byte_span.rs | 0 {automerge-c => crates/automerge-c}/src/change.rs | 0 .../automerge-c}/src/change_hashes.rs | 0 {automerge-c => crates/automerge-c}/src/changes.rs | 0 {automerge-c => crates/automerge-c}/src/doc.rs | 0 {automerge-c => crates/automerge-c}/src/doc/list.rs | 0 .../automerge-c}/src/doc/list/item.rs | 0 .../automerge-c}/src/doc/list/items.rs | 0 {automerge-c => crates/automerge-c}/src/doc/map.rs | 0 .../automerge-c}/src/doc/map/item.rs | 0 .../automerge-c}/src/doc/map/items.rs | 0 .../automerge-c}/src/doc/utils.rs | 0 {automerge-c => crates/automerge-c}/src/lib.rs | 0 {automerge-c => crates/automerge-c}/src/obj.rs | 0 {automerge-c => crates/automerge-c}/src/obj/item.rs | 0 .../automerge-c}/src/obj/items.rs | 0 {automerge-c => crates/automerge-c}/src/result.rs | 0 .../automerge-c}/src/result_stack.rs | 0 {automerge-c => crates/automerge-c}/src/strs.rs | 0 {automerge-c => crates/automerge-c}/src/sync.rs | 0 .../automerge-c}/src/sync/have.rs | 0 .../automerge-c}/src/sync/haves.rs | 0 .../automerge-c}/src/sync/message.rs | 0 .../automerge-c}/src/sync/state.rs | 0 .../automerge-c}/test/CMakeLists.txt | 0 .../automerge-c}/test/actor_id_tests.c | 0 .../automerge-c}/test/doc_tests.c | 0 .../automerge-c}/test/group_state.c | 0 .../automerge-c}/test/group_state.h | 0 .../automerge-c}/test/list_tests.c | 0 .../automerge-c}/test/macro_utils.c | 0 .../automerge-c}/test/macro_utils.h | 0 {automerge-c => crates/automerge-c}/test/main.c | 0 .../automerge-c}/test/map_tests.c | 0 .../automerge-c}/test/ported_wasm/basic_tests.c | 0 .../automerge-c}/test/ported_wasm/suite.c | 0 .../automerge-c}/test/ported_wasm/sync_tests.c | 0 .../automerge-c}/test/stack_utils.c | 0 .../automerge-c}/test/stack_utils.h | 0 .../automerge-c}/test/str_utils.c | 0 .../automerge-c}/test/str_utils.h | 0 {automerge-cli => crates/automerge-cli}/.gitignore | 0 {automerge-cli => crates/automerge-cli}/Cargo.lock | 0 {automerge-cli => crates/automerge-cli}/Cargo.toml | 0 {automerge-cli => crates/automerge-cli}/IDEAS.md | 0 .../automerge-cli}/src/change.rs | 0 .../automerge-cli}/src/examine.rs | 0 .../automerge-cli}/src/export.rs | 0 .../automerge-cli}/src/import.rs | 0 {automerge-cli => crates/automerge-cli}/src/main.rs | 0 .../automerge-cli}/src/merge.rs | 0 .../automerge-cli}/tests/integration.rs | 0 .../automerge-wasm}/.eslintignore | 0 .../automerge-wasm}/.eslintrc.cjs | 0 .../automerge-wasm}/.gitignore | 0 .../automerge-wasm}/Cargo.toml | 0 {automerge-wasm => crates/automerge-wasm}/LICENSE | 0 {automerge-wasm => crates/automerge-wasm}/README.md | 0 .../automerge-wasm}/examples/cra/.gitignore | 0 .../automerge-wasm}/examples/cra/README.md | 0 .../automerge-wasm}/examples/cra/package.json | 0 .../automerge-wasm}/examples/cra/public/favicon.ico | Bin .../automerge-wasm}/examples/cra/public/index.html | 0 .../automerge-wasm}/examples/cra/public/logo192.png | Bin .../automerge-wasm}/examples/cra/public/logo512.png | Bin .../examples/cra/public/manifest.json | 0 .../automerge-wasm}/examples/cra/public/robots.txt | 0 .../automerge-wasm}/examples/cra/src/App.css | 0 .../automerge-wasm}/examples/cra/src/App.test.tsx | 0 .../automerge-wasm}/examples/cra/src/App.tsx | 0 .../automerge-wasm}/examples/cra/src/index.css | 0 .../automerge-wasm}/examples/cra/src/index.tsx | 0 .../automerge-wasm}/examples/cra/src/logo.svg | 0 .../examples/cra/src/react-app-env.d.ts | 0 .../examples/cra/src/reportWebVitals.ts | 0 .../automerge-wasm}/examples/cra/src/setupTests.ts | 0 .../automerge-wasm}/examples/cra/tsconfig.json | 0 .../automerge-wasm}/examples/webpack/.gitignore | 0 .../automerge-wasm}/examples/webpack/package.json | 0 .../examples/webpack/public/index.html | 0 .../automerge-wasm}/examples/webpack/src/index.js | 0 .../examples/webpack/webpack.config.js | 0 .../automerge-wasm}/index.d.ts | 0 .../automerge-wasm}/package.json | 0 .../automerge-wasm}/src/interop.rs | 0 .../automerge-wasm}/src/lib.rs | 0 .../automerge-wasm}/src/observer.rs | 0 .../automerge-wasm}/src/sync.rs | 0 .../automerge-wasm}/src/value.rs | 0 .../automerge-wasm}/test/apply.ts | 0 .../automerge-wasm}/test/helpers/columnar.js | 0 .../automerge-wasm}/test/helpers/common.js | 0 .../automerge-wasm}/test/helpers/encoding.js | 0 .../automerge-wasm}/test/helpers/sync.js | 0 .../automerge-wasm}/test/readme.ts | 0 .../automerge-wasm}/test/test.ts | 0 .../automerge-wasm}/tsconfig.json | 0 {automerge => crates/automerge}/.gitignore | 0 {automerge => crates/automerge}/Cargo.toml | 0 {automerge => crates/automerge}/benches/map.rs | 0 {automerge => crates/automerge}/benches/range.rs | 0 {automerge => crates/automerge}/benches/sync.rs | 0 {automerge => crates/automerge}/examples/README.md | 0 .../automerge}/examples/quickstart.rs | 0 {automerge => crates/automerge}/examples/watch.rs | 0 {automerge => crates/automerge}/src/autocommit.rs | 0 {automerge => crates/automerge}/src/automerge.rs | 0 .../automerge}/src/automerge/tests.rs | 0 {automerge => crates/automerge}/src/autoserde.rs | 0 {automerge => crates/automerge}/src/change.rs | 0 {automerge => crates/automerge}/src/clock.rs | 0 {automerge => crates/automerge}/src/clocks.rs | 0 {automerge => crates/automerge}/src/columnar.rs | 0 .../automerge}/src/columnar/column_range.rs | 0 .../automerge}/src/columnar/column_range/boolean.rs | 0 .../automerge}/src/columnar/column_range/delta.rs | 0 .../automerge}/src/columnar/column_range/deps.rs | 0 .../automerge}/src/columnar/column_range/generic.rs | 0 .../src/columnar/column_range/generic/group.rs | 0 .../src/columnar/column_range/generic/simple.rs | 0 .../automerge}/src/columnar/column_range/key.rs | 0 .../automerge}/src/columnar/column_range/obj_id.rs | 0 .../automerge}/src/columnar/column_range/opid.rs | 0 .../src/columnar/column_range/opid_list.rs | 0 .../automerge}/src/columnar/column_range/raw.rs | 0 .../automerge}/src/columnar/column_range/rle.rs | 0 .../automerge}/src/columnar/column_range/value.rs | 0 .../automerge}/src/columnar/encoding.rs | 0 .../automerge}/src/columnar/encoding/boolean.rs | 0 .../automerge}/src/columnar/encoding/col_error.rs | 0 .../src/columnar/encoding/column_decoder.rs | 0 .../src/columnar/encoding/decodable_impls.rs | 0 .../automerge}/src/columnar/encoding/delta.rs | 0 .../src/columnar/encoding/encodable_impls.rs | 0 .../automerge}/src/columnar/encoding/leb128.rs | 0 .../automerge}/src/columnar/encoding/properties.rs | 0 .../automerge}/src/columnar/encoding/raw.rs | 0 .../automerge}/src/columnar/encoding/rle.rs | 0 .../automerge}/src/columnar/splice_error.rs | 0 {automerge => crates/automerge}/src/convert.rs | 0 {automerge => crates/automerge}/src/decoding.rs | 0 {automerge => crates/automerge}/src/error.rs | 0 {automerge => crates/automerge}/src/exid.rs | 0 .../automerge}/src/indexed_cache.rs | 0 {automerge => crates/automerge}/src/keys.rs | 0 {automerge => crates/automerge}/src/keys_at.rs | 0 {automerge => crates/automerge}/src/legacy/mod.rs | 0 .../automerge}/src/legacy/serde_impls/actor_id.rs | 0 .../src/legacy/serde_impls/change_hash.rs | 0 .../automerge}/src/legacy/serde_impls/element_id.rs | 0 .../automerge}/src/legacy/serde_impls/mod.rs | 0 .../automerge}/src/legacy/serde_impls/object_id.rs | 0 .../automerge}/src/legacy/serde_impls/op.rs | 0 .../automerge}/src/legacy/serde_impls/op_type.rs | 0 .../automerge}/src/legacy/serde_impls/opid.rs | 0 .../src/legacy/serde_impls/scalar_value.rs | 0 .../src/legacy/utility_impls/element_id.rs | 0 .../automerge}/src/legacy/utility_impls/key.rs | 0 .../automerge}/src/legacy/utility_impls/mod.rs | 0 .../src/legacy/utility_impls/object_id.rs | 0 .../automerge}/src/legacy/utility_impls/opid.rs | 0 {automerge => crates/automerge}/src/lib.rs | 0 {automerge => crates/automerge}/src/list_range.rs | 0 .../automerge}/src/list_range_at.rs | 0 {automerge => crates/automerge}/src/map_range.rs | 0 {automerge => crates/automerge}/src/map_range_at.rs | 0 {automerge => crates/automerge}/src/op_observer.rs | 0 {automerge => crates/automerge}/src/op_set.rs | 0 {automerge => crates/automerge}/src/op_set/load.rs | 0 {automerge => crates/automerge}/src/op_tree.rs | 0 {automerge => crates/automerge}/src/op_tree/iter.rs | 0 {automerge => crates/automerge}/src/parents.rs | 0 {automerge => crates/automerge}/src/query.rs | 0 .../automerge}/src/query/elem_id_pos.rs | 0 {automerge => crates/automerge}/src/query/insert.rs | 0 {automerge => crates/automerge}/src/query/keys.rs | 0 .../automerge}/src/query/keys_at.rs | 0 {automerge => crates/automerge}/src/query/len.rs | 0 {automerge => crates/automerge}/src/query/len_at.rs | 0 .../automerge}/src/query/list_range.rs | 0 .../automerge}/src/query/list_range_at.rs | 0 .../automerge}/src/query/list_vals.rs | 0 .../automerge}/src/query/list_vals_at.rs | 0 .../automerge}/src/query/map_range.rs | 0 .../automerge}/src/query/map_range_at.rs | 0 {automerge => crates/automerge}/src/query/nth.rs | 0 {automerge => crates/automerge}/src/query/nth_at.rs | 0 {automerge => crates/automerge}/src/query/opid.rs | 0 {automerge => crates/automerge}/src/query/prop.rs | 0 .../automerge}/src/query/prop_at.rs | 0 .../automerge}/src/query/seek_op.rs | 0 .../automerge}/src/query/seek_op_with_patch.rs | 0 .../automerge}/src/sequence_tree.rs | 0 {automerge => crates/automerge}/src/storage.rs | 0 .../automerge}/src/storage/change.rs | 0 .../automerge}/src/storage/change/change_actors.rs | 0 .../src/storage/change/change_op_columns.rs | 0 .../automerge}/src/storage/change/compressed.rs | 0 .../src/storage/change/op_with_change_actors.rs | 0 .../automerge}/src/storage/chunk.rs | 0 .../automerge}/src/storage/columns.rs | 0 .../automerge}/src/storage/columns/column.rs | 0 .../src/storage/columns/column_builder.rs | 0 .../src/storage/columns/column_specification.rs | 0 .../automerge}/src/storage/columns/raw_column.rs | 0 .../automerge}/src/storage/convert.rs | 0 .../src/storage/convert/op_as_changeop.rs | 0 .../automerge}/src/storage/convert/op_as_docop.rs | 0 .../automerge}/src/storage/document.rs | 0 .../automerge}/src/storage/document/compression.rs | 0 .../src/storage/document/doc_change_columns.rs | 0 .../src/storage/document/doc_op_columns.rs | 0 {automerge => crates/automerge}/src/storage/load.rs | 0 .../automerge}/src/storage/load/change_collector.rs | 0 .../src/storage/load/reconstruct_document.rs | 0 .../automerge}/src/storage/parse.rs | 0 .../automerge}/src/storage/parse/leb128.rs | 0 {automerge => crates/automerge}/src/storage/save.rs | 0 .../automerge}/src/storage/save/document.rs | 0 {automerge => crates/automerge}/src/sync.rs | 0 {automerge => crates/automerge}/src/sync/bloom.rs | 0 {automerge => crates/automerge}/src/sync/state.rs | 0 {automerge => crates/automerge}/src/transaction.rs | 0 .../automerge}/src/transaction/commit.rs | 0 .../automerge}/src/transaction/inner.rs | 0 .../src/transaction/manual_transaction.rs | 0 .../automerge}/src/transaction/observation.rs | 0 .../automerge}/src/transaction/result.rs | 0 .../automerge}/src/transaction/transactable.rs | 0 {automerge => crates/automerge}/src/types.rs | 0 {automerge => crates/automerge}/src/types/opids.rs | 0 {automerge => crates/automerge}/src/value.rs | 0 {automerge => crates/automerge}/src/values.rs | 0 .../automerge}/src/visualisation.rs | 0 .../automerge}/tests/helpers/mod.rs | 0 {automerge => crates/automerge}/tests/test.rs | 0 {edit-trace => crates/edit-trace}/.gitignore | 0 {edit-trace => crates/edit-trace}/Cargo.toml | 0 {edit-trace => crates/edit-trace}/Makefile | 0 {edit-trace => crates/edit-trace}/README.md | 0 {edit-trace => crates/edit-trace}/automerge-1.0.js | 0 {edit-trace => crates/edit-trace}/automerge-js.js | 0 {edit-trace => crates/edit-trace}/automerge-rs.js | 0 {edit-trace => crates/edit-trace}/automerge-wasm.js | 0 {edit-trace => crates/edit-trace}/baseline.js | 0 {edit-trace => crates/edit-trace}/benches/main.rs | 0 {edit-trace => crates/edit-trace}/editing-trace.js | 0 {edit-trace => crates/edit-trace}/edits.json | 0 {edit-trace => crates/edit-trace}/package.json | 0 {edit-trace => crates/edit-trace}/src/main.rs | 0 scripts/ci/cmake-build | 2 +- scripts/ci/cmake-docs | 6 +++--- scripts/ci/js_tests | 2 +- scripts/ci/wasm_tests | 2 +- 274 files changed, 13 insertions(+), 13 deletions(-) create mode 100644 crates/.gitignore rename {automerge-c => crates/automerge-c}/.gitignore (100%) rename {automerge-c => crates/automerge-c}/CMakeLists.txt (100%) rename {automerge-c => crates/automerge-c}/Cargo.toml (100%) rename {automerge-c => crates/automerge-c}/README.md (100%) rename {automerge-c => crates/automerge-c}/build.rs (100%) rename {automerge-c => crates/automerge-c}/cbindgen.toml (100%) rename {automerge-c => crates/automerge-c}/cmake/automerge-c-config.cmake.in (100%) rename {automerge-c => crates/automerge-c}/cmake/config.h.in (100%) rename {automerge-c => crates/automerge-c}/cmake/file_regex_replace.cmake (100%) rename {automerge-c => crates/automerge-c}/cmake/file_touch.cmake (100%) rename {automerge-c => crates/automerge-c}/examples/CMakeLists.txt (100%) rename {automerge-c => crates/automerge-c}/examples/README.md (100%) rename {automerge-c => crates/automerge-c}/examples/quickstart.c (100%) rename {automerge-c => crates/automerge-c}/img/brandmark.png (100%) rename {automerge-c => crates/automerge-c}/src/CMakeLists.txt (100%) rename {automerge-c => crates/automerge-c}/src/actor_id.rs (100%) rename {automerge-c => crates/automerge-c}/src/byte_span.rs (100%) rename {automerge-c => crates/automerge-c}/src/change.rs (100%) rename {automerge-c => crates/automerge-c}/src/change_hashes.rs (100%) rename {automerge-c => crates/automerge-c}/src/changes.rs (100%) rename {automerge-c => crates/automerge-c}/src/doc.rs (100%) rename {automerge-c => crates/automerge-c}/src/doc/list.rs (100%) rename {automerge-c => crates/automerge-c}/src/doc/list/item.rs (100%) rename {automerge-c => crates/automerge-c}/src/doc/list/items.rs (100%) rename {automerge-c => crates/automerge-c}/src/doc/map.rs (100%) rename {automerge-c => crates/automerge-c}/src/doc/map/item.rs (100%) rename {automerge-c => crates/automerge-c}/src/doc/map/items.rs (100%) rename {automerge-c => crates/automerge-c}/src/doc/utils.rs (100%) rename {automerge-c => crates/automerge-c}/src/lib.rs (100%) rename {automerge-c => crates/automerge-c}/src/obj.rs (100%) rename {automerge-c => crates/automerge-c}/src/obj/item.rs (100%) rename {automerge-c => crates/automerge-c}/src/obj/items.rs (100%) rename {automerge-c => crates/automerge-c}/src/result.rs (100%) rename {automerge-c => crates/automerge-c}/src/result_stack.rs (100%) rename {automerge-c => crates/automerge-c}/src/strs.rs (100%) rename {automerge-c => crates/automerge-c}/src/sync.rs (100%) rename {automerge-c => crates/automerge-c}/src/sync/have.rs (100%) rename {automerge-c => crates/automerge-c}/src/sync/haves.rs (100%) rename {automerge-c => crates/automerge-c}/src/sync/message.rs (100%) rename {automerge-c => crates/automerge-c}/src/sync/state.rs (100%) rename {automerge-c => crates/automerge-c}/test/CMakeLists.txt (100%) rename {automerge-c => crates/automerge-c}/test/actor_id_tests.c (100%) rename {automerge-c => crates/automerge-c}/test/doc_tests.c (100%) rename {automerge-c => crates/automerge-c}/test/group_state.c (100%) rename {automerge-c => crates/automerge-c}/test/group_state.h (100%) rename {automerge-c => crates/automerge-c}/test/list_tests.c (100%) rename {automerge-c => crates/automerge-c}/test/macro_utils.c (100%) rename {automerge-c => crates/automerge-c}/test/macro_utils.h (100%) rename {automerge-c => crates/automerge-c}/test/main.c (100%) rename {automerge-c => crates/automerge-c}/test/map_tests.c (100%) rename {automerge-c => crates/automerge-c}/test/ported_wasm/basic_tests.c (100%) rename {automerge-c => crates/automerge-c}/test/ported_wasm/suite.c (100%) rename {automerge-c => crates/automerge-c}/test/ported_wasm/sync_tests.c (100%) rename {automerge-c => crates/automerge-c}/test/stack_utils.c (100%) rename {automerge-c => crates/automerge-c}/test/stack_utils.h (100%) rename {automerge-c => crates/automerge-c}/test/str_utils.c (100%) rename {automerge-c => crates/automerge-c}/test/str_utils.h (100%) rename {automerge-cli => crates/automerge-cli}/.gitignore (100%) rename {automerge-cli => crates/automerge-cli}/Cargo.lock (100%) rename {automerge-cli => crates/automerge-cli}/Cargo.toml (100%) rename {automerge-cli => crates/automerge-cli}/IDEAS.md (100%) rename {automerge-cli => crates/automerge-cli}/src/change.rs (100%) rename {automerge-cli => crates/automerge-cli}/src/examine.rs (100%) rename {automerge-cli => crates/automerge-cli}/src/export.rs (100%) rename {automerge-cli => crates/automerge-cli}/src/import.rs (100%) rename {automerge-cli => crates/automerge-cli}/src/main.rs (100%) rename {automerge-cli => crates/automerge-cli}/src/merge.rs (100%) rename {automerge-cli => crates/automerge-cli}/tests/integration.rs (100%) rename {automerge-wasm => crates/automerge-wasm}/.eslintignore (100%) rename {automerge-wasm => crates/automerge-wasm}/.eslintrc.cjs (100%) rename {automerge-wasm => crates/automerge-wasm}/.gitignore (100%) rename {automerge-wasm => crates/automerge-wasm}/Cargo.toml (100%) rename {automerge-wasm => crates/automerge-wasm}/LICENSE (100%) rename {automerge-wasm => crates/automerge-wasm}/README.md (100%) rename {automerge-wasm => crates/automerge-wasm}/examples/cra/.gitignore (100%) rename {automerge-wasm => crates/automerge-wasm}/examples/cra/README.md (100%) rename {automerge-wasm => crates/automerge-wasm}/examples/cra/package.json (100%) rename {automerge-wasm => crates/automerge-wasm}/examples/cra/public/favicon.ico (100%) rename {automerge-wasm => crates/automerge-wasm}/examples/cra/public/index.html (100%) rename {automerge-wasm => crates/automerge-wasm}/examples/cra/public/logo192.png (100%) rename {automerge-wasm => crates/automerge-wasm}/examples/cra/public/logo512.png (100%) rename {automerge-wasm => crates/automerge-wasm}/examples/cra/public/manifest.json (100%) rename {automerge-wasm => crates/automerge-wasm}/examples/cra/public/robots.txt (100%) rename {automerge-wasm => crates/automerge-wasm}/examples/cra/src/App.css (100%) rename {automerge-wasm => crates/automerge-wasm}/examples/cra/src/App.test.tsx (100%) rename {automerge-wasm => crates/automerge-wasm}/examples/cra/src/App.tsx (100%) rename {automerge-wasm => crates/automerge-wasm}/examples/cra/src/index.css (100%) rename {automerge-wasm => crates/automerge-wasm}/examples/cra/src/index.tsx (100%) rename {automerge-wasm => crates/automerge-wasm}/examples/cra/src/logo.svg (100%) rename {automerge-wasm => crates/automerge-wasm}/examples/cra/src/react-app-env.d.ts (100%) rename {automerge-wasm => crates/automerge-wasm}/examples/cra/src/reportWebVitals.ts (100%) rename {automerge-wasm => crates/automerge-wasm}/examples/cra/src/setupTests.ts (100%) rename {automerge-wasm => crates/automerge-wasm}/examples/cra/tsconfig.json (100%) rename {automerge-wasm => crates/automerge-wasm}/examples/webpack/.gitignore (100%) rename {automerge-wasm => crates/automerge-wasm}/examples/webpack/package.json (100%) rename {automerge-wasm => crates/automerge-wasm}/examples/webpack/public/index.html (100%) rename {automerge-wasm => crates/automerge-wasm}/examples/webpack/src/index.js (100%) rename {automerge-wasm => crates/automerge-wasm}/examples/webpack/webpack.config.js (100%) rename {automerge-wasm => crates/automerge-wasm}/index.d.ts (100%) rename {automerge-wasm => crates/automerge-wasm}/package.json (100%) rename {automerge-wasm => crates/automerge-wasm}/src/interop.rs (100%) rename {automerge-wasm => crates/automerge-wasm}/src/lib.rs (100%) rename {automerge-wasm => crates/automerge-wasm}/src/observer.rs (100%) rename {automerge-wasm => crates/automerge-wasm}/src/sync.rs (100%) rename {automerge-wasm => crates/automerge-wasm}/src/value.rs (100%) rename {automerge-wasm => crates/automerge-wasm}/test/apply.ts (100%) rename {automerge-wasm => crates/automerge-wasm}/test/helpers/columnar.js (100%) rename {automerge-wasm => crates/automerge-wasm}/test/helpers/common.js (100%) rename {automerge-wasm => crates/automerge-wasm}/test/helpers/encoding.js (100%) rename {automerge-wasm => crates/automerge-wasm}/test/helpers/sync.js (100%) rename {automerge-wasm => crates/automerge-wasm}/test/readme.ts (100%) rename {automerge-wasm => crates/automerge-wasm}/test/test.ts (100%) rename {automerge-wasm => crates/automerge-wasm}/tsconfig.json (100%) rename {automerge => crates/automerge}/.gitignore (100%) rename {automerge => crates/automerge}/Cargo.toml (100%) rename {automerge => crates/automerge}/benches/map.rs (100%) rename {automerge => crates/automerge}/benches/range.rs (100%) rename {automerge => crates/automerge}/benches/sync.rs (100%) rename {automerge => crates/automerge}/examples/README.md (100%) rename {automerge => crates/automerge}/examples/quickstart.rs (100%) rename {automerge => crates/automerge}/examples/watch.rs (100%) rename {automerge => crates/automerge}/src/autocommit.rs (100%) rename {automerge => crates/automerge}/src/automerge.rs (100%) rename {automerge => crates/automerge}/src/automerge/tests.rs (100%) rename {automerge => crates/automerge}/src/autoserde.rs (100%) rename {automerge => crates/automerge}/src/change.rs (100%) rename {automerge => crates/automerge}/src/clock.rs (100%) rename {automerge => crates/automerge}/src/clocks.rs (100%) rename {automerge => crates/automerge}/src/columnar.rs (100%) rename {automerge => crates/automerge}/src/columnar/column_range.rs (100%) rename {automerge => crates/automerge}/src/columnar/column_range/boolean.rs (100%) rename {automerge => crates/automerge}/src/columnar/column_range/delta.rs (100%) rename {automerge => crates/automerge}/src/columnar/column_range/deps.rs (100%) rename {automerge => crates/automerge}/src/columnar/column_range/generic.rs (100%) rename {automerge => crates/automerge}/src/columnar/column_range/generic/group.rs (100%) rename {automerge => crates/automerge}/src/columnar/column_range/generic/simple.rs (100%) rename {automerge => crates/automerge}/src/columnar/column_range/key.rs (100%) rename {automerge => crates/automerge}/src/columnar/column_range/obj_id.rs (100%) rename {automerge => crates/automerge}/src/columnar/column_range/opid.rs (100%) rename {automerge => crates/automerge}/src/columnar/column_range/opid_list.rs (100%) rename {automerge => crates/automerge}/src/columnar/column_range/raw.rs (100%) rename {automerge => crates/automerge}/src/columnar/column_range/rle.rs (100%) rename {automerge => crates/automerge}/src/columnar/column_range/value.rs (100%) rename {automerge => crates/automerge}/src/columnar/encoding.rs (100%) rename {automerge => crates/automerge}/src/columnar/encoding/boolean.rs (100%) rename {automerge => crates/automerge}/src/columnar/encoding/col_error.rs (100%) rename {automerge => crates/automerge}/src/columnar/encoding/column_decoder.rs (100%) rename {automerge => crates/automerge}/src/columnar/encoding/decodable_impls.rs (100%) rename {automerge => crates/automerge}/src/columnar/encoding/delta.rs (100%) rename {automerge => crates/automerge}/src/columnar/encoding/encodable_impls.rs (100%) rename {automerge => crates/automerge}/src/columnar/encoding/leb128.rs (100%) rename {automerge => crates/automerge}/src/columnar/encoding/properties.rs (100%) rename {automerge => crates/automerge}/src/columnar/encoding/raw.rs (100%) rename {automerge => crates/automerge}/src/columnar/encoding/rle.rs (100%) rename {automerge => crates/automerge}/src/columnar/splice_error.rs (100%) rename {automerge => crates/automerge}/src/convert.rs (100%) rename {automerge => crates/automerge}/src/decoding.rs (100%) rename {automerge => crates/automerge}/src/error.rs (100%) rename {automerge => crates/automerge}/src/exid.rs (100%) rename {automerge => crates/automerge}/src/indexed_cache.rs (100%) rename {automerge => crates/automerge}/src/keys.rs (100%) rename {automerge => crates/automerge}/src/keys_at.rs (100%) rename {automerge => crates/automerge}/src/legacy/mod.rs (100%) rename {automerge => crates/automerge}/src/legacy/serde_impls/actor_id.rs (100%) rename {automerge => crates/automerge}/src/legacy/serde_impls/change_hash.rs (100%) rename {automerge => crates/automerge}/src/legacy/serde_impls/element_id.rs (100%) rename {automerge => crates/automerge}/src/legacy/serde_impls/mod.rs (100%) rename {automerge => crates/automerge}/src/legacy/serde_impls/object_id.rs (100%) rename {automerge => crates/automerge}/src/legacy/serde_impls/op.rs (100%) rename {automerge => crates/automerge}/src/legacy/serde_impls/op_type.rs (100%) rename {automerge => crates/automerge}/src/legacy/serde_impls/opid.rs (100%) rename {automerge => crates/automerge}/src/legacy/serde_impls/scalar_value.rs (100%) rename {automerge => crates/automerge}/src/legacy/utility_impls/element_id.rs (100%) rename {automerge => crates/automerge}/src/legacy/utility_impls/key.rs (100%) rename {automerge => crates/automerge}/src/legacy/utility_impls/mod.rs (100%) rename {automerge => crates/automerge}/src/legacy/utility_impls/object_id.rs (100%) rename {automerge => crates/automerge}/src/legacy/utility_impls/opid.rs (100%) rename {automerge => crates/automerge}/src/lib.rs (100%) rename {automerge => crates/automerge}/src/list_range.rs (100%) rename {automerge => crates/automerge}/src/list_range_at.rs (100%) rename {automerge => crates/automerge}/src/map_range.rs (100%) rename {automerge => crates/automerge}/src/map_range_at.rs (100%) rename {automerge => crates/automerge}/src/op_observer.rs (100%) rename {automerge => crates/automerge}/src/op_set.rs (100%) rename {automerge => crates/automerge}/src/op_set/load.rs (100%) rename {automerge => crates/automerge}/src/op_tree.rs (100%) rename {automerge => crates/automerge}/src/op_tree/iter.rs (100%) rename {automerge => crates/automerge}/src/parents.rs (100%) rename {automerge => crates/automerge}/src/query.rs (100%) rename {automerge => crates/automerge}/src/query/elem_id_pos.rs (100%) rename {automerge => crates/automerge}/src/query/insert.rs (100%) rename {automerge => crates/automerge}/src/query/keys.rs (100%) rename {automerge => crates/automerge}/src/query/keys_at.rs (100%) rename {automerge => crates/automerge}/src/query/len.rs (100%) rename {automerge => crates/automerge}/src/query/len_at.rs (100%) rename {automerge => crates/automerge}/src/query/list_range.rs (100%) rename {automerge => crates/automerge}/src/query/list_range_at.rs (100%) rename {automerge => crates/automerge}/src/query/list_vals.rs (100%) rename {automerge => crates/automerge}/src/query/list_vals_at.rs (100%) rename {automerge => crates/automerge}/src/query/map_range.rs (100%) rename {automerge => crates/automerge}/src/query/map_range_at.rs (100%) rename {automerge => crates/automerge}/src/query/nth.rs (100%) rename {automerge => crates/automerge}/src/query/nth_at.rs (100%) rename {automerge => crates/automerge}/src/query/opid.rs (100%) rename {automerge => crates/automerge}/src/query/prop.rs (100%) rename {automerge => crates/automerge}/src/query/prop_at.rs (100%) rename {automerge => crates/automerge}/src/query/seek_op.rs (100%) rename {automerge => crates/automerge}/src/query/seek_op_with_patch.rs (100%) rename {automerge => crates/automerge}/src/sequence_tree.rs (100%) rename {automerge => crates/automerge}/src/storage.rs (100%) rename {automerge => crates/automerge}/src/storage/change.rs (100%) rename {automerge => crates/automerge}/src/storage/change/change_actors.rs (100%) rename {automerge => crates/automerge}/src/storage/change/change_op_columns.rs (100%) rename {automerge => crates/automerge}/src/storage/change/compressed.rs (100%) rename {automerge => crates/automerge}/src/storage/change/op_with_change_actors.rs (100%) rename {automerge => crates/automerge}/src/storage/chunk.rs (100%) rename {automerge => crates/automerge}/src/storage/columns.rs (100%) rename {automerge => crates/automerge}/src/storage/columns/column.rs (100%) rename {automerge => crates/automerge}/src/storage/columns/column_builder.rs (100%) rename {automerge => crates/automerge}/src/storage/columns/column_specification.rs (100%) rename {automerge => crates/automerge}/src/storage/columns/raw_column.rs (100%) rename {automerge => crates/automerge}/src/storage/convert.rs (100%) rename {automerge => crates/automerge}/src/storage/convert/op_as_changeop.rs (100%) rename {automerge => crates/automerge}/src/storage/convert/op_as_docop.rs (100%) rename {automerge => crates/automerge}/src/storage/document.rs (100%) rename {automerge => crates/automerge}/src/storage/document/compression.rs (100%) rename {automerge => crates/automerge}/src/storage/document/doc_change_columns.rs (100%) rename {automerge => crates/automerge}/src/storage/document/doc_op_columns.rs (100%) rename {automerge => crates/automerge}/src/storage/load.rs (100%) rename {automerge => crates/automerge}/src/storage/load/change_collector.rs (100%) rename {automerge => crates/automerge}/src/storage/load/reconstruct_document.rs (100%) rename {automerge => crates/automerge}/src/storage/parse.rs (100%) rename {automerge => crates/automerge}/src/storage/parse/leb128.rs (100%) rename {automerge => crates/automerge}/src/storage/save.rs (100%) rename {automerge => crates/automerge}/src/storage/save/document.rs (100%) rename {automerge => crates/automerge}/src/sync.rs (100%) rename {automerge => crates/automerge}/src/sync/bloom.rs (100%) rename {automerge => crates/automerge}/src/sync/state.rs (100%) rename {automerge => crates/automerge}/src/transaction.rs (100%) rename {automerge => crates/automerge}/src/transaction/commit.rs (100%) rename {automerge => crates/automerge}/src/transaction/inner.rs (100%) rename {automerge => crates/automerge}/src/transaction/manual_transaction.rs (100%) rename {automerge => crates/automerge}/src/transaction/observation.rs (100%) rename {automerge => crates/automerge}/src/transaction/result.rs (100%) rename {automerge => crates/automerge}/src/transaction/transactable.rs (100%) rename {automerge => crates/automerge}/src/types.rs (100%) rename {automerge => crates/automerge}/src/types/opids.rs (100%) rename {automerge => crates/automerge}/src/value.rs (100%) rename {automerge => crates/automerge}/src/values.rs (100%) rename {automerge => crates/automerge}/src/visualisation.rs (100%) rename {automerge => crates/automerge}/tests/helpers/mod.rs (100%) rename {automerge => crates/automerge}/tests/test.rs (100%) rename {edit-trace => crates/edit-trace}/.gitignore (100%) rename {edit-trace => crates/edit-trace}/Cargo.toml (100%) rename {edit-trace => crates/edit-trace}/Makefile (100%) rename {edit-trace => crates/edit-trace}/README.md (100%) rename {edit-trace => crates/edit-trace}/automerge-1.0.js (100%) rename {edit-trace => crates/edit-trace}/automerge-js.js (100%) rename {edit-trace => crates/edit-trace}/automerge-rs.js (100%) rename {edit-trace => crates/edit-trace}/automerge-wasm.js (100%) rename {edit-trace => crates/edit-trace}/baseline.js (100%) rename {edit-trace => crates/edit-trace}/benches/main.rs (100%) rename {edit-trace => crates/edit-trace}/editing-trace.js (100%) rename {edit-trace => crates/edit-trace}/edits.json (100%) rename {edit-trace => crates/edit-trace}/package.json (100%) rename {edit-trace => crates/edit-trace}/src/main.rs (100%) diff --git a/.gitignore b/.gitignore index 4ca7b595..f859e0a3 100644 --- a/.gitignore +++ b/.gitignore @@ -3,5 +3,4 @@ perf.* /Cargo.lock build/ -automerge/proptest-regressions/ .vim/* diff --git a/Cargo.toml b/Cargo.toml index fbd416fc..f03c451c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,10 +1,10 @@ [workspace] members = [ - "automerge", - "automerge-c", - "automerge-cli", - "automerge-wasm", - "edit-trace", + "crates/automerge", + "crates/automerge-c", + "crates/automerge-cli", + "crates/automerge-wasm", + "crates/edit-trace", ] resolver = "2" diff --git a/automerge-js/e2e/index.ts b/automerge-js/e2e/index.ts index c11e518d..c70aa1f7 100644 --- a/automerge-js/e2e/index.ts +++ b/automerge-js/e2e/index.ts @@ -9,7 +9,7 @@ import fetch from "node-fetch" const VERDACCIO_DB_PATH = path.normalize(`${__dirname}/verdacciodb`) const VERDACCIO_CONFIG_PATH = path.normalize(`${__dirname}/verdaccio.yaml`) -const AUTOMERGE_WASM_PATH = path.normalize(`${__dirname}/../../automerge-wasm`) +const AUTOMERGE_WASM_PATH = path.normalize(`${__dirname}/../../crates/automerge-wasm`) const AUTOMERGE_JS_PATH = path.normalize(`${__dirname}/..`) const EXAMPLES_DIR = path.normalize(path.join(__dirname, "../", "examples")) diff --git a/crates/.gitignore b/crates/.gitignore new file mode 100644 index 00000000..3b12275f --- /dev/null +++ b/crates/.gitignore @@ -0,0 +1 @@ +automerge/proptest-regressions/ diff --git a/automerge-c/.gitignore b/crates/automerge-c/.gitignore similarity index 100% rename from automerge-c/.gitignore rename to crates/automerge-c/.gitignore diff --git a/automerge-c/CMakeLists.txt b/crates/automerge-c/CMakeLists.txt similarity index 100% rename from automerge-c/CMakeLists.txt rename to crates/automerge-c/CMakeLists.txt diff --git a/automerge-c/Cargo.toml b/crates/automerge-c/Cargo.toml similarity index 100% rename from automerge-c/Cargo.toml rename to crates/automerge-c/Cargo.toml diff --git a/automerge-c/README.md b/crates/automerge-c/README.md similarity index 100% rename from automerge-c/README.md rename to crates/automerge-c/README.md diff --git a/automerge-c/build.rs b/crates/automerge-c/build.rs similarity index 100% rename from automerge-c/build.rs rename to crates/automerge-c/build.rs diff --git a/automerge-c/cbindgen.toml b/crates/automerge-c/cbindgen.toml similarity index 100% rename from automerge-c/cbindgen.toml rename to crates/automerge-c/cbindgen.toml diff --git a/automerge-c/cmake/automerge-c-config.cmake.in b/crates/automerge-c/cmake/automerge-c-config.cmake.in similarity index 100% rename from automerge-c/cmake/automerge-c-config.cmake.in rename to crates/automerge-c/cmake/automerge-c-config.cmake.in diff --git a/automerge-c/cmake/config.h.in b/crates/automerge-c/cmake/config.h.in similarity index 100% rename from automerge-c/cmake/config.h.in rename to crates/automerge-c/cmake/config.h.in diff --git a/automerge-c/cmake/file_regex_replace.cmake b/crates/automerge-c/cmake/file_regex_replace.cmake similarity index 100% rename from automerge-c/cmake/file_regex_replace.cmake rename to crates/automerge-c/cmake/file_regex_replace.cmake diff --git a/automerge-c/cmake/file_touch.cmake b/crates/automerge-c/cmake/file_touch.cmake similarity index 100% rename from automerge-c/cmake/file_touch.cmake rename to crates/automerge-c/cmake/file_touch.cmake diff --git a/automerge-c/examples/CMakeLists.txt b/crates/automerge-c/examples/CMakeLists.txt similarity index 100% rename from automerge-c/examples/CMakeLists.txt rename to crates/automerge-c/examples/CMakeLists.txt diff --git a/automerge-c/examples/README.md b/crates/automerge-c/examples/README.md similarity index 100% rename from automerge-c/examples/README.md rename to crates/automerge-c/examples/README.md diff --git a/automerge-c/examples/quickstart.c b/crates/automerge-c/examples/quickstart.c similarity index 100% rename from automerge-c/examples/quickstart.c rename to crates/automerge-c/examples/quickstart.c diff --git a/automerge-c/img/brandmark.png b/crates/automerge-c/img/brandmark.png similarity index 100% rename from automerge-c/img/brandmark.png rename to crates/automerge-c/img/brandmark.png diff --git a/automerge-c/src/CMakeLists.txt b/crates/automerge-c/src/CMakeLists.txt similarity index 100% rename from automerge-c/src/CMakeLists.txt rename to crates/automerge-c/src/CMakeLists.txt diff --git a/automerge-c/src/actor_id.rs b/crates/automerge-c/src/actor_id.rs similarity index 100% rename from automerge-c/src/actor_id.rs rename to crates/automerge-c/src/actor_id.rs diff --git a/automerge-c/src/byte_span.rs b/crates/automerge-c/src/byte_span.rs similarity index 100% rename from automerge-c/src/byte_span.rs rename to crates/automerge-c/src/byte_span.rs diff --git a/automerge-c/src/change.rs b/crates/automerge-c/src/change.rs similarity index 100% rename from automerge-c/src/change.rs rename to crates/automerge-c/src/change.rs diff --git a/automerge-c/src/change_hashes.rs b/crates/automerge-c/src/change_hashes.rs similarity index 100% rename from automerge-c/src/change_hashes.rs rename to crates/automerge-c/src/change_hashes.rs diff --git a/automerge-c/src/changes.rs b/crates/automerge-c/src/changes.rs similarity index 100% rename from automerge-c/src/changes.rs rename to crates/automerge-c/src/changes.rs diff --git a/automerge-c/src/doc.rs b/crates/automerge-c/src/doc.rs similarity index 100% rename from automerge-c/src/doc.rs rename to crates/automerge-c/src/doc.rs diff --git a/automerge-c/src/doc/list.rs b/crates/automerge-c/src/doc/list.rs similarity index 100% rename from automerge-c/src/doc/list.rs rename to crates/automerge-c/src/doc/list.rs diff --git a/automerge-c/src/doc/list/item.rs b/crates/automerge-c/src/doc/list/item.rs similarity index 100% rename from automerge-c/src/doc/list/item.rs rename to crates/automerge-c/src/doc/list/item.rs diff --git a/automerge-c/src/doc/list/items.rs b/crates/automerge-c/src/doc/list/items.rs similarity index 100% rename from automerge-c/src/doc/list/items.rs rename to crates/automerge-c/src/doc/list/items.rs diff --git a/automerge-c/src/doc/map.rs b/crates/automerge-c/src/doc/map.rs similarity index 100% rename from automerge-c/src/doc/map.rs rename to crates/automerge-c/src/doc/map.rs diff --git a/automerge-c/src/doc/map/item.rs b/crates/automerge-c/src/doc/map/item.rs similarity index 100% rename from automerge-c/src/doc/map/item.rs rename to crates/automerge-c/src/doc/map/item.rs diff --git a/automerge-c/src/doc/map/items.rs b/crates/automerge-c/src/doc/map/items.rs similarity index 100% rename from automerge-c/src/doc/map/items.rs rename to crates/automerge-c/src/doc/map/items.rs diff --git a/automerge-c/src/doc/utils.rs b/crates/automerge-c/src/doc/utils.rs similarity index 100% rename from automerge-c/src/doc/utils.rs rename to crates/automerge-c/src/doc/utils.rs diff --git a/automerge-c/src/lib.rs b/crates/automerge-c/src/lib.rs similarity index 100% rename from automerge-c/src/lib.rs rename to crates/automerge-c/src/lib.rs diff --git a/automerge-c/src/obj.rs b/crates/automerge-c/src/obj.rs similarity index 100% rename from automerge-c/src/obj.rs rename to crates/automerge-c/src/obj.rs diff --git a/automerge-c/src/obj/item.rs b/crates/automerge-c/src/obj/item.rs similarity index 100% rename from automerge-c/src/obj/item.rs rename to crates/automerge-c/src/obj/item.rs diff --git a/automerge-c/src/obj/items.rs b/crates/automerge-c/src/obj/items.rs similarity index 100% rename from automerge-c/src/obj/items.rs rename to crates/automerge-c/src/obj/items.rs diff --git a/automerge-c/src/result.rs b/crates/automerge-c/src/result.rs similarity index 100% rename from automerge-c/src/result.rs rename to crates/automerge-c/src/result.rs diff --git a/automerge-c/src/result_stack.rs b/crates/automerge-c/src/result_stack.rs similarity index 100% rename from automerge-c/src/result_stack.rs rename to crates/automerge-c/src/result_stack.rs diff --git a/automerge-c/src/strs.rs b/crates/automerge-c/src/strs.rs similarity index 100% rename from automerge-c/src/strs.rs rename to crates/automerge-c/src/strs.rs diff --git a/automerge-c/src/sync.rs b/crates/automerge-c/src/sync.rs similarity index 100% rename from automerge-c/src/sync.rs rename to crates/automerge-c/src/sync.rs diff --git a/automerge-c/src/sync/have.rs b/crates/automerge-c/src/sync/have.rs similarity index 100% rename from automerge-c/src/sync/have.rs rename to crates/automerge-c/src/sync/have.rs diff --git a/automerge-c/src/sync/haves.rs b/crates/automerge-c/src/sync/haves.rs similarity index 100% rename from automerge-c/src/sync/haves.rs rename to crates/automerge-c/src/sync/haves.rs diff --git a/automerge-c/src/sync/message.rs b/crates/automerge-c/src/sync/message.rs similarity index 100% rename from automerge-c/src/sync/message.rs rename to crates/automerge-c/src/sync/message.rs diff --git a/automerge-c/src/sync/state.rs b/crates/automerge-c/src/sync/state.rs similarity index 100% rename from automerge-c/src/sync/state.rs rename to crates/automerge-c/src/sync/state.rs diff --git a/automerge-c/test/CMakeLists.txt b/crates/automerge-c/test/CMakeLists.txt similarity index 100% rename from automerge-c/test/CMakeLists.txt rename to crates/automerge-c/test/CMakeLists.txt diff --git a/automerge-c/test/actor_id_tests.c b/crates/automerge-c/test/actor_id_tests.c similarity index 100% rename from automerge-c/test/actor_id_tests.c rename to crates/automerge-c/test/actor_id_tests.c diff --git a/automerge-c/test/doc_tests.c b/crates/automerge-c/test/doc_tests.c similarity index 100% rename from automerge-c/test/doc_tests.c rename to crates/automerge-c/test/doc_tests.c diff --git a/automerge-c/test/group_state.c b/crates/automerge-c/test/group_state.c similarity index 100% rename from automerge-c/test/group_state.c rename to crates/automerge-c/test/group_state.c diff --git a/automerge-c/test/group_state.h b/crates/automerge-c/test/group_state.h similarity index 100% rename from automerge-c/test/group_state.h rename to crates/automerge-c/test/group_state.h diff --git a/automerge-c/test/list_tests.c b/crates/automerge-c/test/list_tests.c similarity index 100% rename from automerge-c/test/list_tests.c rename to crates/automerge-c/test/list_tests.c diff --git a/automerge-c/test/macro_utils.c b/crates/automerge-c/test/macro_utils.c similarity index 100% rename from automerge-c/test/macro_utils.c rename to crates/automerge-c/test/macro_utils.c diff --git a/automerge-c/test/macro_utils.h b/crates/automerge-c/test/macro_utils.h similarity index 100% rename from automerge-c/test/macro_utils.h rename to crates/automerge-c/test/macro_utils.h diff --git a/automerge-c/test/main.c b/crates/automerge-c/test/main.c similarity index 100% rename from automerge-c/test/main.c rename to crates/automerge-c/test/main.c diff --git a/automerge-c/test/map_tests.c b/crates/automerge-c/test/map_tests.c similarity index 100% rename from automerge-c/test/map_tests.c rename to crates/automerge-c/test/map_tests.c diff --git a/automerge-c/test/ported_wasm/basic_tests.c b/crates/automerge-c/test/ported_wasm/basic_tests.c similarity index 100% rename from automerge-c/test/ported_wasm/basic_tests.c rename to crates/automerge-c/test/ported_wasm/basic_tests.c diff --git a/automerge-c/test/ported_wasm/suite.c b/crates/automerge-c/test/ported_wasm/suite.c similarity index 100% rename from automerge-c/test/ported_wasm/suite.c rename to crates/automerge-c/test/ported_wasm/suite.c diff --git a/automerge-c/test/ported_wasm/sync_tests.c b/crates/automerge-c/test/ported_wasm/sync_tests.c similarity index 100% rename from automerge-c/test/ported_wasm/sync_tests.c rename to crates/automerge-c/test/ported_wasm/sync_tests.c diff --git a/automerge-c/test/stack_utils.c b/crates/automerge-c/test/stack_utils.c similarity index 100% rename from automerge-c/test/stack_utils.c rename to crates/automerge-c/test/stack_utils.c diff --git a/automerge-c/test/stack_utils.h b/crates/automerge-c/test/stack_utils.h similarity index 100% rename from automerge-c/test/stack_utils.h rename to crates/automerge-c/test/stack_utils.h diff --git a/automerge-c/test/str_utils.c b/crates/automerge-c/test/str_utils.c similarity index 100% rename from automerge-c/test/str_utils.c rename to crates/automerge-c/test/str_utils.c diff --git a/automerge-c/test/str_utils.h b/crates/automerge-c/test/str_utils.h similarity index 100% rename from automerge-c/test/str_utils.h rename to crates/automerge-c/test/str_utils.h diff --git a/automerge-cli/.gitignore b/crates/automerge-cli/.gitignore similarity index 100% rename from automerge-cli/.gitignore rename to crates/automerge-cli/.gitignore diff --git a/automerge-cli/Cargo.lock b/crates/automerge-cli/Cargo.lock similarity index 100% rename from automerge-cli/Cargo.lock rename to crates/automerge-cli/Cargo.lock diff --git a/automerge-cli/Cargo.toml b/crates/automerge-cli/Cargo.toml similarity index 100% rename from automerge-cli/Cargo.toml rename to crates/automerge-cli/Cargo.toml diff --git a/automerge-cli/IDEAS.md b/crates/automerge-cli/IDEAS.md similarity index 100% rename from automerge-cli/IDEAS.md rename to crates/automerge-cli/IDEAS.md diff --git a/automerge-cli/src/change.rs b/crates/automerge-cli/src/change.rs similarity index 100% rename from automerge-cli/src/change.rs rename to crates/automerge-cli/src/change.rs diff --git a/automerge-cli/src/examine.rs b/crates/automerge-cli/src/examine.rs similarity index 100% rename from automerge-cli/src/examine.rs rename to crates/automerge-cli/src/examine.rs diff --git a/automerge-cli/src/export.rs b/crates/automerge-cli/src/export.rs similarity index 100% rename from automerge-cli/src/export.rs rename to crates/automerge-cli/src/export.rs diff --git a/automerge-cli/src/import.rs b/crates/automerge-cli/src/import.rs similarity index 100% rename from automerge-cli/src/import.rs rename to crates/automerge-cli/src/import.rs diff --git a/automerge-cli/src/main.rs b/crates/automerge-cli/src/main.rs similarity index 100% rename from automerge-cli/src/main.rs rename to crates/automerge-cli/src/main.rs diff --git a/automerge-cli/src/merge.rs b/crates/automerge-cli/src/merge.rs similarity index 100% rename from automerge-cli/src/merge.rs rename to crates/automerge-cli/src/merge.rs diff --git a/automerge-cli/tests/integration.rs b/crates/automerge-cli/tests/integration.rs similarity index 100% rename from automerge-cli/tests/integration.rs rename to crates/automerge-cli/tests/integration.rs diff --git a/automerge-wasm/.eslintignore b/crates/automerge-wasm/.eslintignore similarity index 100% rename from automerge-wasm/.eslintignore rename to crates/automerge-wasm/.eslintignore diff --git a/automerge-wasm/.eslintrc.cjs b/crates/automerge-wasm/.eslintrc.cjs similarity index 100% rename from automerge-wasm/.eslintrc.cjs rename to crates/automerge-wasm/.eslintrc.cjs diff --git a/automerge-wasm/.gitignore b/crates/automerge-wasm/.gitignore similarity index 100% rename from automerge-wasm/.gitignore rename to crates/automerge-wasm/.gitignore diff --git a/automerge-wasm/Cargo.toml b/crates/automerge-wasm/Cargo.toml similarity index 100% rename from automerge-wasm/Cargo.toml rename to crates/automerge-wasm/Cargo.toml diff --git a/automerge-wasm/LICENSE b/crates/automerge-wasm/LICENSE similarity index 100% rename from automerge-wasm/LICENSE rename to crates/automerge-wasm/LICENSE diff --git a/automerge-wasm/README.md b/crates/automerge-wasm/README.md similarity index 100% rename from automerge-wasm/README.md rename to crates/automerge-wasm/README.md diff --git a/automerge-wasm/examples/cra/.gitignore b/crates/automerge-wasm/examples/cra/.gitignore similarity index 100% rename from automerge-wasm/examples/cra/.gitignore rename to crates/automerge-wasm/examples/cra/.gitignore diff --git a/automerge-wasm/examples/cra/README.md b/crates/automerge-wasm/examples/cra/README.md similarity index 100% rename from automerge-wasm/examples/cra/README.md rename to crates/automerge-wasm/examples/cra/README.md diff --git a/automerge-wasm/examples/cra/package.json b/crates/automerge-wasm/examples/cra/package.json similarity index 100% rename from automerge-wasm/examples/cra/package.json rename to crates/automerge-wasm/examples/cra/package.json diff --git a/automerge-wasm/examples/cra/public/favicon.ico b/crates/automerge-wasm/examples/cra/public/favicon.ico similarity index 100% rename from automerge-wasm/examples/cra/public/favicon.ico rename to crates/automerge-wasm/examples/cra/public/favicon.ico diff --git a/automerge-wasm/examples/cra/public/index.html b/crates/automerge-wasm/examples/cra/public/index.html similarity index 100% rename from automerge-wasm/examples/cra/public/index.html rename to crates/automerge-wasm/examples/cra/public/index.html diff --git a/automerge-wasm/examples/cra/public/logo192.png b/crates/automerge-wasm/examples/cra/public/logo192.png similarity index 100% rename from automerge-wasm/examples/cra/public/logo192.png rename to crates/automerge-wasm/examples/cra/public/logo192.png diff --git a/automerge-wasm/examples/cra/public/logo512.png b/crates/automerge-wasm/examples/cra/public/logo512.png similarity index 100% rename from automerge-wasm/examples/cra/public/logo512.png rename to crates/automerge-wasm/examples/cra/public/logo512.png diff --git a/automerge-wasm/examples/cra/public/manifest.json b/crates/automerge-wasm/examples/cra/public/manifest.json similarity index 100% rename from automerge-wasm/examples/cra/public/manifest.json rename to crates/automerge-wasm/examples/cra/public/manifest.json diff --git a/automerge-wasm/examples/cra/public/robots.txt b/crates/automerge-wasm/examples/cra/public/robots.txt similarity index 100% rename from automerge-wasm/examples/cra/public/robots.txt rename to crates/automerge-wasm/examples/cra/public/robots.txt diff --git a/automerge-wasm/examples/cra/src/App.css b/crates/automerge-wasm/examples/cra/src/App.css similarity index 100% rename from automerge-wasm/examples/cra/src/App.css rename to crates/automerge-wasm/examples/cra/src/App.css diff --git a/automerge-wasm/examples/cra/src/App.test.tsx b/crates/automerge-wasm/examples/cra/src/App.test.tsx similarity index 100% rename from automerge-wasm/examples/cra/src/App.test.tsx rename to crates/automerge-wasm/examples/cra/src/App.test.tsx diff --git a/automerge-wasm/examples/cra/src/App.tsx b/crates/automerge-wasm/examples/cra/src/App.tsx similarity index 100% rename from automerge-wasm/examples/cra/src/App.tsx rename to crates/automerge-wasm/examples/cra/src/App.tsx diff --git a/automerge-wasm/examples/cra/src/index.css b/crates/automerge-wasm/examples/cra/src/index.css similarity index 100% rename from automerge-wasm/examples/cra/src/index.css rename to crates/automerge-wasm/examples/cra/src/index.css diff --git a/automerge-wasm/examples/cra/src/index.tsx b/crates/automerge-wasm/examples/cra/src/index.tsx similarity index 100% rename from automerge-wasm/examples/cra/src/index.tsx rename to crates/automerge-wasm/examples/cra/src/index.tsx diff --git a/automerge-wasm/examples/cra/src/logo.svg b/crates/automerge-wasm/examples/cra/src/logo.svg similarity index 100% rename from automerge-wasm/examples/cra/src/logo.svg rename to crates/automerge-wasm/examples/cra/src/logo.svg diff --git a/automerge-wasm/examples/cra/src/react-app-env.d.ts b/crates/automerge-wasm/examples/cra/src/react-app-env.d.ts similarity index 100% rename from automerge-wasm/examples/cra/src/react-app-env.d.ts rename to crates/automerge-wasm/examples/cra/src/react-app-env.d.ts diff --git a/automerge-wasm/examples/cra/src/reportWebVitals.ts b/crates/automerge-wasm/examples/cra/src/reportWebVitals.ts similarity index 100% rename from automerge-wasm/examples/cra/src/reportWebVitals.ts rename to crates/automerge-wasm/examples/cra/src/reportWebVitals.ts diff --git a/automerge-wasm/examples/cra/src/setupTests.ts b/crates/automerge-wasm/examples/cra/src/setupTests.ts similarity index 100% rename from automerge-wasm/examples/cra/src/setupTests.ts rename to crates/automerge-wasm/examples/cra/src/setupTests.ts diff --git a/automerge-wasm/examples/cra/tsconfig.json b/crates/automerge-wasm/examples/cra/tsconfig.json similarity index 100% rename from automerge-wasm/examples/cra/tsconfig.json rename to crates/automerge-wasm/examples/cra/tsconfig.json diff --git a/automerge-wasm/examples/webpack/.gitignore b/crates/automerge-wasm/examples/webpack/.gitignore similarity index 100% rename from automerge-wasm/examples/webpack/.gitignore rename to crates/automerge-wasm/examples/webpack/.gitignore diff --git a/automerge-wasm/examples/webpack/package.json b/crates/automerge-wasm/examples/webpack/package.json similarity index 100% rename from automerge-wasm/examples/webpack/package.json rename to crates/automerge-wasm/examples/webpack/package.json diff --git a/automerge-wasm/examples/webpack/public/index.html b/crates/automerge-wasm/examples/webpack/public/index.html similarity index 100% rename from automerge-wasm/examples/webpack/public/index.html rename to crates/automerge-wasm/examples/webpack/public/index.html diff --git a/automerge-wasm/examples/webpack/src/index.js b/crates/automerge-wasm/examples/webpack/src/index.js similarity index 100% rename from automerge-wasm/examples/webpack/src/index.js rename to crates/automerge-wasm/examples/webpack/src/index.js diff --git a/automerge-wasm/examples/webpack/webpack.config.js b/crates/automerge-wasm/examples/webpack/webpack.config.js similarity index 100% rename from automerge-wasm/examples/webpack/webpack.config.js rename to crates/automerge-wasm/examples/webpack/webpack.config.js diff --git a/automerge-wasm/index.d.ts b/crates/automerge-wasm/index.d.ts similarity index 100% rename from automerge-wasm/index.d.ts rename to crates/automerge-wasm/index.d.ts diff --git a/automerge-wasm/package.json b/crates/automerge-wasm/package.json similarity index 100% rename from automerge-wasm/package.json rename to crates/automerge-wasm/package.json diff --git a/automerge-wasm/src/interop.rs b/crates/automerge-wasm/src/interop.rs similarity index 100% rename from automerge-wasm/src/interop.rs rename to crates/automerge-wasm/src/interop.rs diff --git a/automerge-wasm/src/lib.rs b/crates/automerge-wasm/src/lib.rs similarity index 100% rename from automerge-wasm/src/lib.rs rename to crates/automerge-wasm/src/lib.rs diff --git a/automerge-wasm/src/observer.rs b/crates/automerge-wasm/src/observer.rs similarity index 100% rename from automerge-wasm/src/observer.rs rename to crates/automerge-wasm/src/observer.rs diff --git a/automerge-wasm/src/sync.rs b/crates/automerge-wasm/src/sync.rs similarity index 100% rename from automerge-wasm/src/sync.rs rename to crates/automerge-wasm/src/sync.rs diff --git a/automerge-wasm/src/value.rs b/crates/automerge-wasm/src/value.rs similarity index 100% rename from automerge-wasm/src/value.rs rename to crates/automerge-wasm/src/value.rs diff --git a/automerge-wasm/test/apply.ts b/crates/automerge-wasm/test/apply.ts similarity index 100% rename from automerge-wasm/test/apply.ts rename to crates/automerge-wasm/test/apply.ts diff --git a/automerge-wasm/test/helpers/columnar.js b/crates/automerge-wasm/test/helpers/columnar.js similarity index 100% rename from automerge-wasm/test/helpers/columnar.js rename to crates/automerge-wasm/test/helpers/columnar.js diff --git a/automerge-wasm/test/helpers/common.js b/crates/automerge-wasm/test/helpers/common.js similarity index 100% rename from automerge-wasm/test/helpers/common.js rename to crates/automerge-wasm/test/helpers/common.js diff --git a/automerge-wasm/test/helpers/encoding.js b/crates/automerge-wasm/test/helpers/encoding.js similarity index 100% rename from automerge-wasm/test/helpers/encoding.js rename to crates/automerge-wasm/test/helpers/encoding.js diff --git a/automerge-wasm/test/helpers/sync.js b/crates/automerge-wasm/test/helpers/sync.js similarity index 100% rename from automerge-wasm/test/helpers/sync.js rename to crates/automerge-wasm/test/helpers/sync.js diff --git a/automerge-wasm/test/readme.ts b/crates/automerge-wasm/test/readme.ts similarity index 100% rename from automerge-wasm/test/readme.ts rename to crates/automerge-wasm/test/readme.ts diff --git a/automerge-wasm/test/test.ts b/crates/automerge-wasm/test/test.ts similarity index 100% rename from automerge-wasm/test/test.ts rename to crates/automerge-wasm/test/test.ts diff --git a/automerge-wasm/tsconfig.json b/crates/automerge-wasm/tsconfig.json similarity index 100% rename from automerge-wasm/tsconfig.json rename to crates/automerge-wasm/tsconfig.json diff --git a/automerge/.gitignore b/crates/automerge/.gitignore similarity index 100% rename from automerge/.gitignore rename to crates/automerge/.gitignore diff --git a/automerge/Cargo.toml b/crates/automerge/Cargo.toml similarity index 100% rename from automerge/Cargo.toml rename to crates/automerge/Cargo.toml diff --git a/automerge/benches/map.rs b/crates/automerge/benches/map.rs similarity index 100% rename from automerge/benches/map.rs rename to crates/automerge/benches/map.rs diff --git a/automerge/benches/range.rs b/crates/automerge/benches/range.rs similarity index 100% rename from automerge/benches/range.rs rename to crates/automerge/benches/range.rs diff --git a/automerge/benches/sync.rs b/crates/automerge/benches/sync.rs similarity index 100% rename from automerge/benches/sync.rs rename to crates/automerge/benches/sync.rs diff --git a/automerge/examples/README.md b/crates/automerge/examples/README.md similarity index 100% rename from automerge/examples/README.md rename to crates/automerge/examples/README.md diff --git a/automerge/examples/quickstart.rs b/crates/automerge/examples/quickstart.rs similarity index 100% rename from automerge/examples/quickstart.rs rename to crates/automerge/examples/quickstart.rs diff --git a/automerge/examples/watch.rs b/crates/automerge/examples/watch.rs similarity index 100% rename from automerge/examples/watch.rs rename to crates/automerge/examples/watch.rs diff --git a/automerge/src/autocommit.rs b/crates/automerge/src/autocommit.rs similarity index 100% rename from automerge/src/autocommit.rs rename to crates/automerge/src/autocommit.rs diff --git a/automerge/src/automerge.rs b/crates/automerge/src/automerge.rs similarity index 100% rename from automerge/src/automerge.rs rename to crates/automerge/src/automerge.rs diff --git a/automerge/src/automerge/tests.rs b/crates/automerge/src/automerge/tests.rs similarity index 100% rename from automerge/src/automerge/tests.rs rename to crates/automerge/src/automerge/tests.rs diff --git a/automerge/src/autoserde.rs b/crates/automerge/src/autoserde.rs similarity index 100% rename from automerge/src/autoserde.rs rename to crates/automerge/src/autoserde.rs diff --git a/automerge/src/change.rs b/crates/automerge/src/change.rs similarity index 100% rename from automerge/src/change.rs rename to crates/automerge/src/change.rs diff --git a/automerge/src/clock.rs b/crates/automerge/src/clock.rs similarity index 100% rename from automerge/src/clock.rs rename to crates/automerge/src/clock.rs diff --git a/automerge/src/clocks.rs b/crates/automerge/src/clocks.rs similarity index 100% rename from automerge/src/clocks.rs rename to crates/automerge/src/clocks.rs diff --git a/automerge/src/columnar.rs b/crates/automerge/src/columnar.rs similarity index 100% rename from automerge/src/columnar.rs rename to crates/automerge/src/columnar.rs diff --git a/automerge/src/columnar/column_range.rs b/crates/automerge/src/columnar/column_range.rs similarity index 100% rename from automerge/src/columnar/column_range.rs rename to crates/automerge/src/columnar/column_range.rs diff --git a/automerge/src/columnar/column_range/boolean.rs b/crates/automerge/src/columnar/column_range/boolean.rs similarity index 100% rename from automerge/src/columnar/column_range/boolean.rs rename to crates/automerge/src/columnar/column_range/boolean.rs diff --git a/automerge/src/columnar/column_range/delta.rs b/crates/automerge/src/columnar/column_range/delta.rs similarity index 100% rename from automerge/src/columnar/column_range/delta.rs rename to crates/automerge/src/columnar/column_range/delta.rs diff --git a/automerge/src/columnar/column_range/deps.rs b/crates/automerge/src/columnar/column_range/deps.rs similarity index 100% rename from automerge/src/columnar/column_range/deps.rs rename to crates/automerge/src/columnar/column_range/deps.rs diff --git a/automerge/src/columnar/column_range/generic.rs b/crates/automerge/src/columnar/column_range/generic.rs similarity index 100% rename from automerge/src/columnar/column_range/generic.rs rename to crates/automerge/src/columnar/column_range/generic.rs diff --git a/automerge/src/columnar/column_range/generic/group.rs b/crates/automerge/src/columnar/column_range/generic/group.rs similarity index 100% rename from automerge/src/columnar/column_range/generic/group.rs rename to crates/automerge/src/columnar/column_range/generic/group.rs diff --git a/automerge/src/columnar/column_range/generic/simple.rs b/crates/automerge/src/columnar/column_range/generic/simple.rs similarity index 100% rename from automerge/src/columnar/column_range/generic/simple.rs rename to crates/automerge/src/columnar/column_range/generic/simple.rs diff --git a/automerge/src/columnar/column_range/key.rs b/crates/automerge/src/columnar/column_range/key.rs similarity index 100% rename from automerge/src/columnar/column_range/key.rs rename to crates/automerge/src/columnar/column_range/key.rs diff --git a/automerge/src/columnar/column_range/obj_id.rs b/crates/automerge/src/columnar/column_range/obj_id.rs similarity index 100% rename from automerge/src/columnar/column_range/obj_id.rs rename to crates/automerge/src/columnar/column_range/obj_id.rs diff --git a/automerge/src/columnar/column_range/opid.rs b/crates/automerge/src/columnar/column_range/opid.rs similarity index 100% rename from automerge/src/columnar/column_range/opid.rs rename to crates/automerge/src/columnar/column_range/opid.rs diff --git a/automerge/src/columnar/column_range/opid_list.rs b/crates/automerge/src/columnar/column_range/opid_list.rs similarity index 100% rename from automerge/src/columnar/column_range/opid_list.rs rename to crates/automerge/src/columnar/column_range/opid_list.rs diff --git a/automerge/src/columnar/column_range/raw.rs b/crates/automerge/src/columnar/column_range/raw.rs similarity index 100% rename from automerge/src/columnar/column_range/raw.rs rename to crates/automerge/src/columnar/column_range/raw.rs diff --git a/automerge/src/columnar/column_range/rle.rs b/crates/automerge/src/columnar/column_range/rle.rs similarity index 100% rename from automerge/src/columnar/column_range/rle.rs rename to crates/automerge/src/columnar/column_range/rle.rs diff --git a/automerge/src/columnar/column_range/value.rs b/crates/automerge/src/columnar/column_range/value.rs similarity index 100% rename from automerge/src/columnar/column_range/value.rs rename to crates/automerge/src/columnar/column_range/value.rs diff --git a/automerge/src/columnar/encoding.rs b/crates/automerge/src/columnar/encoding.rs similarity index 100% rename from automerge/src/columnar/encoding.rs rename to crates/automerge/src/columnar/encoding.rs diff --git a/automerge/src/columnar/encoding/boolean.rs b/crates/automerge/src/columnar/encoding/boolean.rs similarity index 100% rename from automerge/src/columnar/encoding/boolean.rs rename to crates/automerge/src/columnar/encoding/boolean.rs diff --git a/automerge/src/columnar/encoding/col_error.rs b/crates/automerge/src/columnar/encoding/col_error.rs similarity index 100% rename from automerge/src/columnar/encoding/col_error.rs rename to crates/automerge/src/columnar/encoding/col_error.rs diff --git a/automerge/src/columnar/encoding/column_decoder.rs b/crates/automerge/src/columnar/encoding/column_decoder.rs similarity index 100% rename from automerge/src/columnar/encoding/column_decoder.rs rename to crates/automerge/src/columnar/encoding/column_decoder.rs diff --git a/automerge/src/columnar/encoding/decodable_impls.rs b/crates/automerge/src/columnar/encoding/decodable_impls.rs similarity index 100% rename from automerge/src/columnar/encoding/decodable_impls.rs rename to crates/automerge/src/columnar/encoding/decodable_impls.rs diff --git a/automerge/src/columnar/encoding/delta.rs b/crates/automerge/src/columnar/encoding/delta.rs similarity index 100% rename from automerge/src/columnar/encoding/delta.rs rename to crates/automerge/src/columnar/encoding/delta.rs diff --git a/automerge/src/columnar/encoding/encodable_impls.rs b/crates/automerge/src/columnar/encoding/encodable_impls.rs similarity index 100% rename from automerge/src/columnar/encoding/encodable_impls.rs rename to crates/automerge/src/columnar/encoding/encodable_impls.rs diff --git a/automerge/src/columnar/encoding/leb128.rs b/crates/automerge/src/columnar/encoding/leb128.rs similarity index 100% rename from automerge/src/columnar/encoding/leb128.rs rename to crates/automerge/src/columnar/encoding/leb128.rs diff --git a/automerge/src/columnar/encoding/properties.rs b/crates/automerge/src/columnar/encoding/properties.rs similarity index 100% rename from automerge/src/columnar/encoding/properties.rs rename to crates/automerge/src/columnar/encoding/properties.rs diff --git a/automerge/src/columnar/encoding/raw.rs b/crates/automerge/src/columnar/encoding/raw.rs similarity index 100% rename from automerge/src/columnar/encoding/raw.rs rename to crates/automerge/src/columnar/encoding/raw.rs diff --git a/automerge/src/columnar/encoding/rle.rs b/crates/automerge/src/columnar/encoding/rle.rs similarity index 100% rename from automerge/src/columnar/encoding/rle.rs rename to crates/automerge/src/columnar/encoding/rle.rs diff --git a/automerge/src/columnar/splice_error.rs b/crates/automerge/src/columnar/splice_error.rs similarity index 100% rename from automerge/src/columnar/splice_error.rs rename to crates/automerge/src/columnar/splice_error.rs diff --git a/automerge/src/convert.rs b/crates/automerge/src/convert.rs similarity index 100% rename from automerge/src/convert.rs rename to crates/automerge/src/convert.rs diff --git a/automerge/src/decoding.rs b/crates/automerge/src/decoding.rs similarity index 100% rename from automerge/src/decoding.rs rename to crates/automerge/src/decoding.rs diff --git a/automerge/src/error.rs b/crates/automerge/src/error.rs similarity index 100% rename from automerge/src/error.rs rename to crates/automerge/src/error.rs diff --git a/automerge/src/exid.rs b/crates/automerge/src/exid.rs similarity index 100% rename from automerge/src/exid.rs rename to crates/automerge/src/exid.rs diff --git a/automerge/src/indexed_cache.rs b/crates/automerge/src/indexed_cache.rs similarity index 100% rename from automerge/src/indexed_cache.rs rename to crates/automerge/src/indexed_cache.rs diff --git a/automerge/src/keys.rs b/crates/automerge/src/keys.rs similarity index 100% rename from automerge/src/keys.rs rename to crates/automerge/src/keys.rs diff --git a/automerge/src/keys_at.rs b/crates/automerge/src/keys_at.rs similarity index 100% rename from automerge/src/keys_at.rs rename to crates/automerge/src/keys_at.rs diff --git a/automerge/src/legacy/mod.rs b/crates/automerge/src/legacy/mod.rs similarity index 100% rename from automerge/src/legacy/mod.rs rename to crates/automerge/src/legacy/mod.rs diff --git a/automerge/src/legacy/serde_impls/actor_id.rs b/crates/automerge/src/legacy/serde_impls/actor_id.rs similarity index 100% rename from automerge/src/legacy/serde_impls/actor_id.rs rename to crates/automerge/src/legacy/serde_impls/actor_id.rs diff --git a/automerge/src/legacy/serde_impls/change_hash.rs b/crates/automerge/src/legacy/serde_impls/change_hash.rs similarity index 100% rename from automerge/src/legacy/serde_impls/change_hash.rs rename to crates/automerge/src/legacy/serde_impls/change_hash.rs diff --git a/automerge/src/legacy/serde_impls/element_id.rs b/crates/automerge/src/legacy/serde_impls/element_id.rs similarity index 100% rename from automerge/src/legacy/serde_impls/element_id.rs rename to crates/automerge/src/legacy/serde_impls/element_id.rs diff --git a/automerge/src/legacy/serde_impls/mod.rs b/crates/automerge/src/legacy/serde_impls/mod.rs similarity index 100% rename from automerge/src/legacy/serde_impls/mod.rs rename to crates/automerge/src/legacy/serde_impls/mod.rs diff --git a/automerge/src/legacy/serde_impls/object_id.rs b/crates/automerge/src/legacy/serde_impls/object_id.rs similarity index 100% rename from automerge/src/legacy/serde_impls/object_id.rs rename to crates/automerge/src/legacy/serde_impls/object_id.rs diff --git a/automerge/src/legacy/serde_impls/op.rs b/crates/automerge/src/legacy/serde_impls/op.rs similarity index 100% rename from automerge/src/legacy/serde_impls/op.rs rename to crates/automerge/src/legacy/serde_impls/op.rs diff --git a/automerge/src/legacy/serde_impls/op_type.rs b/crates/automerge/src/legacy/serde_impls/op_type.rs similarity index 100% rename from automerge/src/legacy/serde_impls/op_type.rs rename to crates/automerge/src/legacy/serde_impls/op_type.rs diff --git a/automerge/src/legacy/serde_impls/opid.rs b/crates/automerge/src/legacy/serde_impls/opid.rs similarity index 100% rename from automerge/src/legacy/serde_impls/opid.rs rename to crates/automerge/src/legacy/serde_impls/opid.rs diff --git a/automerge/src/legacy/serde_impls/scalar_value.rs b/crates/automerge/src/legacy/serde_impls/scalar_value.rs similarity index 100% rename from automerge/src/legacy/serde_impls/scalar_value.rs rename to crates/automerge/src/legacy/serde_impls/scalar_value.rs diff --git a/automerge/src/legacy/utility_impls/element_id.rs b/crates/automerge/src/legacy/utility_impls/element_id.rs similarity index 100% rename from automerge/src/legacy/utility_impls/element_id.rs rename to crates/automerge/src/legacy/utility_impls/element_id.rs diff --git a/automerge/src/legacy/utility_impls/key.rs b/crates/automerge/src/legacy/utility_impls/key.rs similarity index 100% rename from automerge/src/legacy/utility_impls/key.rs rename to crates/automerge/src/legacy/utility_impls/key.rs diff --git a/automerge/src/legacy/utility_impls/mod.rs b/crates/automerge/src/legacy/utility_impls/mod.rs similarity index 100% rename from automerge/src/legacy/utility_impls/mod.rs rename to crates/automerge/src/legacy/utility_impls/mod.rs diff --git a/automerge/src/legacy/utility_impls/object_id.rs b/crates/automerge/src/legacy/utility_impls/object_id.rs similarity index 100% rename from automerge/src/legacy/utility_impls/object_id.rs rename to crates/automerge/src/legacy/utility_impls/object_id.rs diff --git a/automerge/src/legacy/utility_impls/opid.rs b/crates/automerge/src/legacy/utility_impls/opid.rs similarity index 100% rename from automerge/src/legacy/utility_impls/opid.rs rename to crates/automerge/src/legacy/utility_impls/opid.rs diff --git a/automerge/src/lib.rs b/crates/automerge/src/lib.rs similarity index 100% rename from automerge/src/lib.rs rename to crates/automerge/src/lib.rs diff --git a/automerge/src/list_range.rs b/crates/automerge/src/list_range.rs similarity index 100% rename from automerge/src/list_range.rs rename to crates/automerge/src/list_range.rs diff --git a/automerge/src/list_range_at.rs b/crates/automerge/src/list_range_at.rs similarity index 100% rename from automerge/src/list_range_at.rs rename to crates/automerge/src/list_range_at.rs diff --git a/automerge/src/map_range.rs b/crates/automerge/src/map_range.rs similarity index 100% rename from automerge/src/map_range.rs rename to crates/automerge/src/map_range.rs diff --git a/automerge/src/map_range_at.rs b/crates/automerge/src/map_range_at.rs similarity index 100% rename from automerge/src/map_range_at.rs rename to crates/automerge/src/map_range_at.rs diff --git a/automerge/src/op_observer.rs b/crates/automerge/src/op_observer.rs similarity index 100% rename from automerge/src/op_observer.rs rename to crates/automerge/src/op_observer.rs diff --git a/automerge/src/op_set.rs b/crates/automerge/src/op_set.rs similarity index 100% rename from automerge/src/op_set.rs rename to crates/automerge/src/op_set.rs diff --git a/automerge/src/op_set/load.rs b/crates/automerge/src/op_set/load.rs similarity index 100% rename from automerge/src/op_set/load.rs rename to crates/automerge/src/op_set/load.rs diff --git a/automerge/src/op_tree.rs b/crates/automerge/src/op_tree.rs similarity index 100% rename from automerge/src/op_tree.rs rename to crates/automerge/src/op_tree.rs diff --git a/automerge/src/op_tree/iter.rs b/crates/automerge/src/op_tree/iter.rs similarity index 100% rename from automerge/src/op_tree/iter.rs rename to crates/automerge/src/op_tree/iter.rs diff --git a/automerge/src/parents.rs b/crates/automerge/src/parents.rs similarity index 100% rename from automerge/src/parents.rs rename to crates/automerge/src/parents.rs diff --git a/automerge/src/query.rs b/crates/automerge/src/query.rs similarity index 100% rename from automerge/src/query.rs rename to crates/automerge/src/query.rs diff --git a/automerge/src/query/elem_id_pos.rs b/crates/automerge/src/query/elem_id_pos.rs similarity index 100% rename from automerge/src/query/elem_id_pos.rs rename to crates/automerge/src/query/elem_id_pos.rs diff --git a/automerge/src/query/insert.rs b/crates/automerge/src/query/insert.rs similarity index 100% rename from automerge/src/query/insert.rs rename to crates/automerge/src/query/insert.rs diff --git a/automerge/src/query/keys.rs b/crates/automerge/src/query/keys.rs similarity index 100% rename from automerge/src/query/keys.rs rename to crates/automerge/src/query/keys.rs diff --git a/automerge/src/query/keys_at.rs b/crates/automerge/src/query/keys_at.rs similarity index 100% rename from automerge/src/query/keys_at.rs rename to crates/automerge/src/query/keys_at.rs diff --git a/automerge/src/query/len.rs b/crates/automerge/src/query/len.rs similarity index 100% rename from automerge/src/query/len.rs rename to crates/automerge/src/query/len.rs diff --git a/automerge/src/query/len_at.rs b/crates/automerge/src/query/len_at.rs similarity index 100% rename from automerge/src/query/len_at.rs rename to crates/automerge/src/query/len_at.rs diff --git a/automerge/src/query/list_range.rs b/crates/automerge/src/query/list_range.rs similarity index 100% rename from automerge/src/query/list_range.rs rename to crates/automerge/src/query/list_range.rs diff --git a/automerge/src/query/list_range_at.rs b/crates/automerge/src/query/list_range_at.rs similarity index 100% rename from automerge/src/query/list_range_at.rs rename to crates/automerge/src/query/list_range_at.rs diff --git a/automerge/src/query/list_vals.rs b/crates/automerge/src/query/list_vals.rs similarity index 100% rename from automerge/src/query/list_vals.rs rename to crates/automerge/src/query/list_vals.rs diff --git a/automerge/src/query/list_vals_at.rs b/crates/automerge/src/query/list_vals_at.rs similarity index 100% rename from automerge/src/query/list_vals_at.rs rename to crates/automerge/src/query/list_vals_at.rs diff --git a/automerge/src/query/map_range.rs b/crates/automerge/src/query/map_range.rs similarity index 100% rename from automerge/src/query/map_range.rs rename to crates/automerge/src/query/map_range.rs diff --git a/automerge/src/query/map_range_at.rs b/crates/automerge/src/query/map_range_at.rs similarity index 100% rename from automerge/src/query/map_range_at.rs rename to crates/automerge/src/query/map_range_at.rs diff --git a/automerge/src/query/nth.rs b/crates/automerge/src/query/nth.rs similarity index 100% rename from automerge/src/query/nth.rs rename to crates/automerge/src/query/nth.rs diff --git a/automerge/src/query/nth_at.rs b/crates/automerge/src/query/nth_at.rs similarity index 100% rename from automerge/src/query/nth_at.rs rename to crates/automerge/src/query/nth_at.rs diff --git a/automerge/src/query/opid.rs b/crates/automerge/src/query/opid.rs similarity index 100% rename from automerge/src/query/opid.rs rename to crates/automerge/src/query/opid.rs diff --git a/automerge/src/query/prop.rs b/crates/automerge/src/query/prop.rs similarity index 100% rename from automerge/src/query/prop.rs rename to crates/automerge/src/query/prop.rs diff --git a/automerge/src/query/prop_at.rs b/crates/automerge/src/query/prop_at.rs similarity index 100% rename from automerge/src/query/prop_at.rs rename to crates/automerge/src/query/prop_at.rs diff --git a/automerge/src/query/seek_op.rs b/crates/automerge/src/query/seek_op.rs similarity index 100% rename from automerge/src/query/seek_op.rs rename to crates/automerge/src/query/seek_op.rs diff --git a/automerge/src/query/seek_op_with_patch.rs b/crates/automerge/src/query/seek_op_with_patch.rs similarity index 100% rename from automerge/src/query/seek_op_with_patch.rs rename to crates/automerge/src/query/seek_op_with_patch.rs diff --git a/automerge/src/sequence_tree.rs b/crates/automerge/src/sequence_tree.rs similarity index 100% rename from automerge/src/sequence_tree.rs rename to crates/automerge/src/sequence_tree.rs diff --git a/automerge/src/storage.rs b/crates/automerge/src/storage.rs similarity index 100% rename from automerge/src/storage.rs rename to crates/automerge/src/storage.rs diff --git a/automerge/src/storage/change.rs b/crates/automerge/src/storage/change.rs similarity index 100% rename from automerge/src/storage/change.rs rename to crates/automerge/src/storage/change.rs diff --git a/automerge/src/storage/change/change_actors.rs b/crates/automerge/src/storage/change/change_actors.rs similarity index 100% rename from automerge/src/storage/change/change_actors.rs rename to crates/automerge/src/storage/change/change_actors.rs diff --git a/automerge/src/storage/change/change_op_columns.rs b/crates/automerge/src/storage/change/change_op_columns.rs similarity index 100% rename from automerge/src/storage/change/change_op_columns.rs rename to crates/automerge/src/storage/change/change_op_columns.rs diff --git a/automerge/src/storage/change/compressed.rs b/crates/automerge/src/storage/change/compressed.rs similarity index 100% rename from automerge/src/storage/change/compressed.rs rename to crates/automerge/src/storage/change/compressed.rs diff --git a/automerge/src/storage/change/op_with_change_actors.rs b/crates/automerge/src/storage/change/op_with_change_actors.rs similarity index 100% rename from automerge/src/storage/change/op_with_change_actors.rs rename to crates/automerge/src/storage/change/op_with_change_actors.rs diff --git a/automerge/src/storage/chunk.rs b/crates/automerge/src/storage/chunk.rs similarity index 100% rename from automerge/src/storage/chunk.rs rename to crates/automerge/src/storage/chunk.rs diff --git a/automerge/src/storage/columns.rs b/crates/automerge/src/storage/columns.rs similarity index 100% rename from automerge/src/storage/columns.rs rename to crates/automerge/src/storage/columns.rs diff --git a/automerge/src/storage/columns/column.rs b/crates/automerge/src/storage/columns/column.rs similarity index 100% rename from automerge/src/storage/columns/column.rs rename to crates/automerge/src/storage/columns/column.rs diff --git a/automerge/src/storage/columns/column_builder.rs b/crates/automerge/src/storage/columns/column_builder.rs similarity index 100% rename from automerge/src/storage/columns/column_builder.rs rename to crates/automerge/src/storage/columns/column_builder.rs diff --git a/automerge/src/storage/columns/column_specification.rs b/crates/automerge/src/storage/columns/column_specification.rs similarity index 100% rename from automerge/src/storage/columns/column_specification.rs rename to crates/automerge/src/storage/columns/column_specification.rs diff --git a/automerge/src/storage/columns/raw_column.rs b/crates/automerge/src/storage/columns/raw_column.rs similarity index 100% rename from automerge/src/storage/columns/raw_column.rs rename to crates/automerge/src/storage/columns/raw_column.rs diff --git a/automerge/src/storage/convert.rs b/crates/automerge/src/storage/convert.rs similarity index 100% rename from automerge/src/storage/convert.rs rename to crates/automerge/src/storage/convert.rs diff --git a/automerge/src/storage/convert/op_as_changeop.rs b/crates/automerge/src/storage/convert/op_as_changeop.rs similarity index 100% rename from automerge/src/storage/convert/op_as_changeop.rs rename to crates/automerge/src/storage/convert/op_as_changeop.rs diff --git a/automerge/src/storage/convert/op_as_docop.rs b/crates/automerge/src/storage/convert/op_as_docop.rs similarity index 100% rename from automerge/src/storage/convert/op_as_docop.rs rename to crates/automerge/src/storage/convert/op_as_docop.rs diff --git a/automerge/src/storage/document.rs b/crates/automerge/src/storage/document.rs similarity index 100% rename from automerge/src/storage/document.rs rename to crates/automerge/src/storage/document.rs diff --git a/automerge/src/storage/document/compression.rs b/crates/automerge/src/storage/document/compression.rs similarity index 100% rename from automerge/src/storage/document/compression.rs rename to crates/automerge/src/storage/document/compression.rs diff --git a/automerge/src/storage/document/doc_change_columns.rs b/crates/automerge/src/storage/document/doc_change_columns.rs similarity index 100% rename from automerge/src/storage/document/doc_change_columns.rs rename to crates/automerge/src/storage/document/doc_change_columns.rs diff --git a/automerge/src/storage/document/doc_op_columns.rs b/crates/automerge/src/storage/document/doc_op_columns.rs similarity index 100% rename from automerge/src/storage/document/doc_op_columns.rs rename to crates/automerge/src/storage/document/doc_op_columns.rs diff --git a/automerge/src/storage/load.rs b/crates/automerge/src/storage/load.rs similarity index 100% rename from automerge/src/storage/load.rs rename to crates/automerge/src/storage/load.rs diff --git a/automerge/src/storage/load/change_collector.rs b/crates/automerge/src/storage/load/change_collector.rs similarity index 100% rename from automerge/src/storage/load/change_collector.rs rename to crates/automerge/src/storage/load/change_collector.rs diff --git a/automerge/src/storage/load/reconstruct_document.rs b/crates/automerge/src/storage/load/reconstruct_document.rs similarity index 100% rename from automerge/src/storage/load/reconstruct_document.rs rename to crates/automerge/src/storage/load/reconstruct_document.rs diff --git a/automerge/src/storage/parse.rs b/crates/automerge/src/storage/parse.rs similarity index 100% rename from automerge/src/storage/parse.rs rename to crates/automerge/src/storage/parse.rs diff --git a/automerge/src/storage/parse/leb128.rs b/crates/automerge/src/storage/parse/leb128.rs similarity index 100% rename from automerge/src/storage/parse/leb128.rs rename to crates/automerge/src/storage/parse/leb128.rs diff --git a/automerge/src/storage/save.rs b/crates/automerge/src/storage/save.rs similarity index 100% rename from automerge/src/storage/save.rs rename to crates/automerge/src/storage/save.rs diff --git a/automerge/src/storage/save/document.rs b/crates/automerge/src/storage/save/document.rs similarity index 100% rename from automerge/src/storage/save/document.rs rename to crates/automerge/src/storage/save/document.rs diff --git a/automerge/src/sync.rs b/crates/automerge/src/sync.rs similarity index 100% rename from automerge/src/sync.rs rename to crates/automerge/src/sync.rs diff --git a/automerge/src/sync/bloom.rs b/crates/automerge/src/sync/bloom.rs similarity index 100% rename from automerge/src/sync/bloom.rs rename to crates/automerge/src/sync/bloom.rs diff --git a/automerge/src/sync/state.rs b/crates/automerge/src/sync/state.rs similarity index 100% rename from automerge/src/sync/state.rs rename to crates/automerge/src/sync/state.rs diff --git a/automerge/src/transaction.rs b/crates/automerge/src/transaction.rs similarity index 100% rename from automerge/src/transaction.rs rename to crates/automerge/src/transaction.rs diff --git a/automerge/src/transaction/commit.rs b/crates/automerge/src/transaction/commit.rs similarity index 100% rename from automerge/src/transaction/commit.rs rename to crates/automerge/src/transaction/commit.rs diff --git a/automerge/src/transaction/inner.rs b/crates/automerge/src/transaction/inner.rs similarity index 100% rename from automerge/src/transaction/inner.rs rename to crates/automerge/src/transaction/inner.rs diff --git a/automerge/src/transaction/manual_transaction.rs b/crates/automerge/src/transaction/manual_transaction.rs similarity index 100% rename from automerge/src/transaction/manual_transaction.rs rename to crates/automerge/src/transaction/manual_transaction.rs diff --git a/automerge/src/transaction/observation.rs b/crates/automerge/src/transaction/observation.rs similarity index 100% rename from automerge/src/transaction/observation.rs rename to crates/automerge/src/transaction/observation.rs diff --git a/automerge/src/transaction/result.rs b/crates/automerge/src/transaction/result.rs similarity index 100% rename from automerge/src/transaction/result.rs rename to crates/automerge/src/transaction/result.rs diff --git a/automerge/src/transaction/transactable.rs b/crates/automerge/src/transaction/transactable.rs similarity index 100% rename from automerge/src/transaction/transactable.rs rename to crates/automerge/src/transaction/transactable.rs diff --git a/automerge/src/types.rs b/crates/automerge/src/types.rs similarity index 100% rename from automerge/src/types.rs rename to crates/automerge/src/types.rs diff --git a/automerge/src/types/opids.rs b/crates/automerge/src/types/opids.rs similarity index 100% rename from automerge/src/types/opids.rs rename to crates/automerge/src/types/opids.rs diff --git a/automerge/src/value.rs b/crates/automerge/src/value.rs similarity index 100% rename from automerge/src/value.rs rename to crates/automerge/src/value.rs diff --git a/automerge/src/values.rs b/crates/automerge/src/values.rs similarity index 100% rename from automerge/src/values.rs rename to crates/automerge/src/values.rs diff --git a/automerge/src/visualisation.rs b/crates/automerge/src/visualisation.rs similarity index 100% rename from automerge/src/visualisation.rs rename to crates/automerge/src/visualisation.rs diff --git a/automerge/tests/helpers/mod.rs b/crates/automerge/tests/helpers/mod.rs similarity index 100% rename from automerge/tests/helpers/mod.rs rename to crates/automerge/tests/helpers/mod.rs diff --git a/automerge/tests/test.rs b/crates/automerge/tests/test.rs similarity index 100% rename from automerge/tests/test.rs rename to crates/automerge/tests/test.rs diff --git a/edit-trace/.gitignore b/crates/edit-trace/.gitignore similarity index 100% rename from edit-trace/.gitignore rename to crates/edit-trace/.gitignore diff --git a/edit-trace/Cargo.toml b/crates/edit-trace/Cargo.toml similarity index 100% rename from edit-trace/Cargo.toml rename to crates/edit-trace/Cargo.toml diff --git a/edit-trace/Makefile b/crates/edit-trace/Makefile similarity index 100% rename from edit-trace/Makefile rename to crates/edit-trace/Makefile diff --git a/edit-trace/README.md b/crates/edit-trace/README.md similarity index 100% rename from edit-trace/README.md rename to crates/edit-trace/README.md diff --git a/edit-trace/automerge-1.0.js b/crates/edit-trace/automerge-1.0.js similarity index 100% rename from edit-trace/automerge-1.0.js rename to crates/edit-trace/automerge-1.0.js diff --git a/edit-trace/automerge-js.js b/crates/edit-trace/automerge-js.js similarity index 100% rename from edit-trace/automerge-js.js rename to crates/edit-trace/automerge-js.js diff --git a/edit-trace/automerge-rs.js b/crates/edit-trace/automerge-rs.js similarity index 100% rename from edit-trace/automerge-rs.js rename to crates/edit-trace/automerge-rs.js diff --git a/edit-trace/automerge-wasm.js b/crates/edit-trace/automerge-wasm.js similarity index 100% rename from edit-trace/automerge-wasm.js rename to crates/edit-trace/automerge-wasm.js diff --git a/edit-trace/baseline.js b/crates/edit-trace/baseline.js similarity index 100% rename from edit-trace/baseline.js rename to crates/edit-trace/baseline.js diff --git a/edit-trace/benches/main.rs b/crates/edit-trace/benches/main.rs similarity index 100% rename from edit-trace/benches/main.rs rename to crates/edit-trace/benches/main.rs diff --git a/edit-trace/editing-trace.js b/crates/edit-trace/editing-trace.js similarity index 100% rename from edit-trace/editing-trace.js rename to crates/edit-trace/editing-trace.js diff --git a/edit-trace/edits.json b/crates/edit-trace/edits.json similarity index 100% rename from edit-trace/edits.json rename to crates/edit-trace/edits.json diff --git a/edit-trace/package.json b/crates/edit-trace/package.json similarity index 100% rename from edit-trace/package.json rename to crates/edit-trace/package.json diff --git a/edit-trace/src/main.rs b/crates/edit-trace/src/main.rs similarity index 100% rename from edit-trace/src/main.rs rename to crates/edit-trace/src/main.rs diff --git a/scripts/ci/cmake-build b/scripts/ci/cmake-build index 41357caa..1234993c 100755 --- a/scripts/ci/cmake-build +++ b/scripts/ci/cmake-build @@ -11,7 +11,7 @@ if [ "${LIB_TYPE,,}" == "shared" ]; then else SHARED_TOGGLE="OFF" fi -C_PROJECT=$THIS_SCRIPT/../../automerge-c; +C_PROJECT=$THIS_SCRIPT/../../crates/automerge-c; mkdir -p $C_PROJECT/build; cd $C_PROJECT/build; cmake --log-level=ERROR -B . -S .. -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DBUILD_SHARED_LIBS=$SHARED_TOGGLE; diff --git a/scripts/ci/cmake-docs b/scripts/ci/cmake-docs index 7f29a311..25ec7e10 100755 --- a/scripts/ci/cmake-docs +++ b/scripts/ci/cmake-docs @@ -2,9 +2,9 @@ set -eoux pipefail -mkdir -p automerge-c/build -cd automerge-c/build +mkdir -p crates/automerge-c/build +cd crates/automerge-c/build cmake -B . -S .. -DBUILD_TESTING=OFF cmake --build . --target automerge_docs -echo "Try opening automerge-c/build/src/html/index.html" +echo "Try opening crates/automerge-c/build/src/html/index.html" diff --git a/scripts/ci/js_tests b/scripts/ci/js_tests index 3813de7a..bc945243 100755 --- a/scripts/ci/js_tests +++ b/scripts/ci/js_tests @@ -1,7 +1,7 @@ set -e THIS_SCRIPT=$(dirname "$0"); -WASM_PROJECT=$THIS_SCRIPT/../../automerge-wasm; +WASM_PROJECT=$THIS_SCRIPT/../../crates/automerge-wasm; JS_PROJECT=$THIS_SCRIPT/../../automerge-js; E2E_PROJECT=$THIS_SCRIPT/../../automerge-js/e2e; diff --git a/scripts/ci/wasm_tests b/scripts/ci/wasm_tests index 778e1e1f..51f4c4ab 100755 --- a/scripts/ci/wasm_tests +++ b/scripts/ci/wasm_tests @@ -1,5 +1,5 @@ THIS_SCRIPT=$(dirname "$0"); -WASM_PROJECT=$THIS_SCRIPT/../../automerge-wasm; +WASM_PROJECT=$THIS_SCRIPT/../../crates/automerge-wasm; yarn --cwd $WASM_PROJECT install; yarn --cwd $WASM_PROJECT build; From a7a4bd42f1ae18c2b6f53a16db098b17cf6832ff Mon Sep 17 00:00:00 2001 From: Alex Good Date: Tue, 11 Oct 2022 17:48:27 +0100 Subject: [PATCH 595/730] Move automerge-js -> wrappers/javascript Whilst we only have one wrapper library, we anticipate more. Furthermore, the naming of the `wrappers` directory makes it clear what the role of the JS codebase is. --- scripts/ci/js_tests | 6 +++--- {automerge-js => wrappers/javascript}/.eslintignore | 0 {automerge-js => wrappers/javascript}/.eslintrc.cjs | 0 {automerge-js => wrappers/javascript}/.gitignore | 0 {automerge-js => wrappers/javascript}/LICENSE | 0 {automerge-js => wrappers/javascript}/README.md | 0 .../javascript}/config/cjs.json | 0 .../javascript}/config/mjs.json | 0 .../javascript}/e2e/.gitignore | 0 {automerge-js => wrappers/javascript}/e2e/README.md | 0 {automerge-js => wrappers/javascript}/e2e/index.ts | 6 +++--- .../javascript}/e2e/package.json | 0 .../javascript}/e2e/tsconfig.json | 0 .../javascript}/e2e/verdaccio.yaml | 0 {automerge-js => wrappers/javascript}/e2e/yarn.lock | 0 .../examples/create-react-app/.gitignore | 0 .../javascript}/examples/create-react-app/README.md | 0 .../examples/create-react-app/craco.config.js | 0 .../examples/create-react-app/package.json | 0 .../examples/create-react-app/public/favicon.ico | Bin .../examples/create-react-app/public/index.html | 0 .../examples/create-react-app/public/logo192.png | Bin .../examples/create-react-app/public/logo512.png | Bin .../examples/create-react-app/public/manifest.json | 0 .../examples/create-react-app/public/robots.txt | 0 .../examples/create-react-app/src/App.css | 0 .../examples/create-react-app/src/App.js | 2 +- .../examples/create-react-app/src/App.test.js | 0 .../examples/create-react-app/src/index.css | 0 .../examples/create-react-app/src/index.js | 0 .../examples/create-react-app/src/logo.svg | 0 .../create-react-app/src/reportWebVitals.js | 0 .../examples/create-react-app/src/setupTests.js | 0 .../javascript}/examples/create-react-app/yarn.lock | 0 .../javascript}/examples/vite/.gitignore | 0 .../javascript}/examples/vite/README.md | 0 .../javascript}/examples/vite/index.html | 0 .../javascript}/examples/vite/main.ts | 0 .../javascript}/examples/vite/package.json | 0 .../javascript}/examples/vite/public/vite.svg | 0 .../javascript}/examples/vite/src/counter.ts | 0 .../javascript}/examples/vite/src/main.ts | 2 +- .../javascript}/examples/vite/src/style.css | 0 .../javascript}/examples/vite/src/typescript.svg | 0 .../javascript}/examples/vite/src/vite-env.d.ts | 0 .../javascript}/examples/vite/tsconfig.json | 0 .../javascript}/examples/vite/vite.config.js | 0 .../javascript}/examples/webpack/.gitignore | 0 .../javascript}/examples/webpack/README.md | 0 .../javascript}/examples/webpack/package.json | 0 .../javascript}/examples/webpack/public/index.html | 0 .../javascript}/examples/webpack/src/index.js | 2 +- .../javascript}/examples/webpack/webpack.config.js | 0 {automerge-js => wrappers/javascript}/package.json | 2 +- .../javascript}/src/constants.ts | 0 .../javascript}/src/counter.ts | 0 {automerge-js => wrappers/javascript}/src/index.ts | 0 .../javascript}/src/low_level.ts | 0 .../javascript}/src/numbers.ts | 0 .../javascript}/src/proxies.ts | 0 {automerge-js => wrappers/javascript}/src/text.ts | 0 {automerge-js => wrappers/javascript}/src/types.ts | 0 {automerge-js => wrappers/javascript}/src/uuid.ts | 0 .../javascript}/test/basic_test.ts | 0 .../javascript}/test/columnar_test.ts | 0 .../javascript}/test/helpers.ts | 0 .../javascript}/test/legacy/columnar.js | 0 .../javascript}/test/legacy/common.js | 0 .../javascript}/test/legacy/encoding.js | 0 .../javascript}/test/legacy/sync.js | 0 .../javascript}/test/legacy_tests.ts | 0 .../javascript}/test/sync_test.ts | 0 .../javascript}/test/text_test.ts | 0 .../javascript}/test/uuid_test.ts | 0 {automerge-js => wrappers/javascript}/tsconfig.json | 0 {automerge-js => wrappers/javascript}/tslint.json | 0 76 files changed, 10 insertions(+), 10 deletions(-) rename {automerge-js => wrappers/javascript}/.eslintignore (100%) rename {automerge-js => wrappers/javascript}/.eslintrc.cjs (100%) rename {automerge-js => wrappers/javascript}/.gitignore (100%) rename {automerge-js => wrappers/javascript}/LICENSE (100%) rename {automerge-js => wrappers/javascript}/README.md (100%) rename {automerge-js => wrappers/javascript}/config/cjs.json (100%) rename {automerge-js => wrappers/javascript}/config/mjs.json (100%) rename {automerge-js => wrappers/javascript}/e2e/.gitignore (100%) rename {automerge-js => wrappers/javascript}/e2e/README.md (100%) rename {automerge-js => wrappers/javascript}/e2e/index.ts (98%) rename {automerge-js => wrappers/javascript}/e2e/package.json (100%) rename {automerge-js => wrappers/javascript}/e2e/tsconfig.json (100%) rename {automerge-js => wrappers/javascript}/e2e/verdaccio.yaml (100%) rename {automerge-js => wrappers/javascript}/e2e/yarn.lock (100%) rename {automerge-js => wrappers/javascript}/examples/create-react-app/.gitignore (100%) rename {automerge-js => wrappers/javascript}/examples/create-react-app/README.md (100%) rename {automerge-js => wrappers/javascript}/examples/create-react-app/craco.config.js (100%) rename {automerge-js => wrappers/javascript}/examples/create-react-app/package.json (100%) rename {automerge-js => wrappers/javascript}/examples/create-react-app/public/favicon.ico (100%) rename {automerge-js => wrappers/javascript}/examples/create-react-app/public/index.html (100%) rename {automerge-js => wrappers/javascript}/examples/create-react-app/public/logo192.png (100%) rename {automerge-js => wrappers/javascript}/examples/create-react-app/public/logo512.png (100%) rename {automerge-js => wrappers/javascript}/examples/create-react-app/public/manifest.json (100%) rename {automerge-js => wrappers/javascript}/examples/create-react-app/public/robots.txt (100%) rename {automerge-js => wrappers/javascript}/examples/create-react-app/src/App.css (100%) rename {automerge-js => wrappers/javascript}/examples/create-react-app/src/App.js (85%) rename {automerge-js => wrappers/javascript}/examples/create-react-app/src/App.test.js (100%) rename {automerge-js => wrappers/javascript}/examples/create-react-app/src/index.css (100%) rename {automerge-js => wrappers/javascript}/examples/create-react-app/src/index.js (100%) rename {automerge-js => wrappers/javascript}/examples/create-react-app/src/logo.svg (100%) rename {automerge-js => wrappers/javascript}/examples/create-react-app/src/reportWebVitals.js (100%) rename {automerge-js => wrappers/javascript}/examples/create-react-app/src/setupTests.js (100%) rename {automerge-js => wrappers/javascript}/examples/create-react-app/yarn.lock (100%) rename {automerge-js => wrappers/javascript}/examples/vite/.gitignore (100%) rename {automerge-js => wrappers/javascript}/examples/vite/README.md (100%) rename {automerge-js => wrappers/javascript}/examples/vite/index.html (100%) rename {automerge-js => wrappers/javascript}/examples/vite/main.ts (100%) rename {automerge-js => wrappers/javascript}/examples/vite/package.json (100%) rename {automerge-js => wrappers/javascript}/examples/vite/public/vite.svg (100%) rename {automerge-js => wrappers/javascript}/examples/vite/src/counter.ts (100%) rename {automerge-js => wrappers/javascript}/examples/vite/src/main.ts (98%) rename {automerge-js => wrappers/javascript}/examples/vite/src/style.css (100%) rename {automerge-js => wrappers/javascript}/examples/vite/src/typescript.svg (100%) rename {automerge-js => wrappers/javascript}/examples/vite/src/vite-env.d.ts (100%) rename {automerge-js => wrappers/javascript}/examples/vite/tsconfig.json (100%) rename {automerge-js => wrappers/javascript}/examples/vite/vite.config.js (100%) rename {automerge-js => wrappers/javascript}/examples/webpack/.gitignore (100%) rename {automerge-js => wrappers/javascript}/examples/webpack/README.md (100%) rename {automerge-js => wrappers/javascript}/examples/webpack/package.json (100%) rename {automerge-js => wrappers/javascript}/examples/webpack/public/index.html (100%) rename {automerge-js => wrappers/javascript}/examples/webpack/src/index.js (86%) rename {automerge-js => wrappers/javascript}/examples/webpack/webpack.config.js (100%) rename {automerge-js => wrappers/javascript}/package.json (98%) rename {automerge-js => wrappers/javascript}/src/constants.ts (100%) rename {automerge-js => wrappers/javascript}/src/counter.ts (100%) rename {automerge-js => wrappers/javascript}/src/index.ts (100%) rename {automerge-js => wrappers/javascript}/src/low_level.ts (100%) rename {automerge-js => wrappers/javascript}/src/numbers.ts (100%) rename {automerge-js => wrappers/javascript}/src/proxies.ts (100%) rename {automerge-js => wrappers/javascript}/src/text.ts (100%) rename {automerge-js => wrappers/javascript}/src/types.ts (100%) rename {automerge-js => wrappers/javascript}/src/uuid.ts (100%) rename {automerge-js => wrappers/javascript}/test/basic_test.ts (100%) rename {automerge-js => wrappers/javascript}/test/columnar_test.ts (100%) rename {automerge-js => wrappers/javascript}/test/helpers.ts (100%) rename {automerge-js => wrappers/javascript}/test/legacy/columnar.js (100%) rename {automerge-js => wrappers/javascript}/test/legacy/common.js (100%) rename {automerge-js => wrappers/javascript}/test/legacy/encoding.js (100%) rename {automerge-js => wrappers/javascript}/test/legacy/sync.js (100%) rename {automerge-js => wrappers/javascript}/test/legacy_tests.ts (100%) rename {automerge-js => wrappers/javascript}/test/sync_test.ts (100%) rename {automerge-js => wrappers/javascript}/test/text_test.ts (100%) rename {automerge-js => wrappers/javascript}/test/uuid_test.ts (100%) rename {automerge-js => wrappers/javascript}/tsconfig.json (100%) rename {automerge-js => wrappers/javascript}/tslint.json (100%) diff --git a/scripts/ci/js_tests b/scripts/ci/js_tests index bc945243..ef169d0c 100755 --- a/scripts/ci/js_tests +++ b/scripts/ci/js_tests @@ -2,12 +2,12 @@ set -e THIS_SCRIPT=$(dirname "$0"); WASM_PROJECT=$THIS_SCRIPT/../../crates/automerge-wasm; -JS_PROJECT=$THIS_SCRIPT/../../automerge-js; -E2E_PROJECT=$THIS_SCRIPT/../../automerge-js/e2e; +JS_PROJECT=$THIS_SCRIPT/../../wrappers/javascript; +E2E_PROJECT=$THIS_SCRIPT/../../wrappers/javascript/e2e; yarn --cwd $E2E_PROJECT install; # This will build the automerge-wasm project, publish it to a local NPM -# repository, then run `yarn build` in the `automerge-js` directory with +# repository, then run `yarn build` in the `wrappers/javascript` directory with # the local registry yarn --cwd $E2E_PROJECT e2e buildjs; yarn --cwd $JS_PROJECT test diff --git a/automerge-js/.eslintignore b/wrappers/javascript/.eslintignore similarity index 100% rename from automerge-js/.eslintignore rename to wrappers/javascript/.eslintignore diff --git a/automerge-js/.eslintrc.cjs b/wrappers/javascript/.eslintrc.cjs similarity index 100% rename from automerge-js/.eslintrc.cjs rename to wrappers/javascript/.eslintrc.cjs diff --git a/automerge-js/.gitignore b/wrappers/javascript/.gitignore similarity index 100% rename from automerge-js/.gitignore rename to wrappers/javascript/.gitignore diff --git a/automerge-js/LICENSE b/wrappers/javascript/LICENSE similarity index 100% rename from automerge-js/LICENSE rename to wrappers/javascript/LICENSE diff --git a/automerge-js/README.md b/wrappers/javascript/README.md similarity index 100% rename from automerge-js/README.md rename to wrappers/javascript/README.md diff --git a/automerge-js/config/cjs.json b/wrappers/javascript/config/cjs.json similarity index 100% rename from automerge-js/config/cjs.json rename to wrappers/javascript/config/cjs.json diff --git a/automerge-js/config/mjs.json b/wrappers/javascript/config/mjs.json similarity index 100% rename from automerge-js/config/mjs.json rename to wrappers/javascript/config/mjs.json diff --git a/automerge-js/e2e/.gitignore b/wrappers/javascript/e2e/.gitignore similarity index 100% rename from automerge-js/e2e/.gitignore rename to wrappers/javascript/e2e/.gitignore diff --git a/automerge-js/e2e/README.md b/wrappers/javascript/e2e/README.md similarity index 100% rename from automerge-js/e2e/README.md rename to wrappers/javascript/e2e/README.md diff --git a/automerge-js/e2e/index.ts b/wrappers/javascript/e2e/index.ts similarity index 98% rename from automerge-js/e2e/index.ts rename to wrappers/javascript/e2e/index.ts index c70aa1f7..641ec2bd 100644 --- a/automerge-js/e2e/index.ts +++ b/wrappers/javascript/e2e/index.ts @@ -9,7 +9,7 @@ import fetch from "node-fetch" const VERDACCIO_DB_PATH = path.normalize(`${__dirname}/verdacciodb`) const VERDACCIO_CONFIG_PATH = path.normalize(`${__dirname}/verdaccio.yaml`) -const AUTOMERGE_WASM_PATH = path.normalize(`${__dirname}/../../crates/automerge-wasm`) +const AUTOMERGE_WASM_PATH = path.normalize(`${__dirname}/../../../crates/automerge-wasm`) const AUTOMERGE_JS_PATH = path.normalize(`${__dirname}/..`) const EXAMPLES_DIR = path.normalize(path.join(__dirname, "../", "examples")) @@ -337,7 +337,7 @@ function printHeader(header: string) { } /** - * Removes the automerge, automerge-wasm, and automerge-js packages from + * Removes the automerge, @automerge/automerge-wasm, and @automerge/automerge packages from * `$packageDir/node_modules` * * This is useful to force refreshing a package by use in combination with @@ -347,7 +347,7 @@ function printHeader(header: string) { * @param packageDir - The directory containing the package.json of the target project */ async function removeExistingAutomerge(packageDir: string) { - await fsPromises.rm(path.join(packageDir, "node_modules", "automerge-wasm"), {recursive: true, force: true}) + await fsPromises.rm(path.join(packageDir, "node_modules", "@automerge"), {recursive: true, force: true}) await fsPromises.rm(path.join(packageDir, "node_modules", "automerge"), {recursive: true, force: true}) } diff --git a/automerge-js/e2e/package.json b/wrappers/javascript/e2e/package.json similarity index 100% rename from automerge-js/e2e/package.json rename to wrappers/javascript/e2e/package.json diff --git a/automerge-js/e2e/tsconfig.json b/wrappers/javascript/e2e/tsconfig.json similarity index 100% rename from automerge-js/e2e/tsconfig.json rename to wrappers/javascript/e2e/tsconfig.json diff --git a/automerge-js/e2e/verdaccio.yaml b/wrappers/javascript/e2e/verdaccio.yaml similarity index 100% rename from automerge-js/e2e/verdaccio.yaml rename to wrappers/javascript/e2e/verdaccio.yaml diff --git a/automerge-js/e2e/yarn.lock b/wrappers/javascript/e2e/yarn.lock similarity index 100% rename from automerge-js/e2e/yarn.lock rename to wrappers/javascript/e2e/yarn.lock diff --git a/automerge-js/examples/create-react-app/.gitignore b/wrappers/javascript/examples/create-react-app/.gitignore similarity index 100% rename from automerge-js/examples/create-react-app/.gitignore rename to wrappers/javascript/examples/create-react-app/.gitignore diff --git a/automerge-js/examples/create-react-app/README.md b/wrappers/javascript/examples/create-react-app/README.md similarity index 100% rename from automerge-js/examples/create-react-app/README.md rename to wrappers/javascript/examples/create-react-app/README.md diff --git a/automerge-js/examples/create-react-app/craco.config.js b/wrappers/javascript/examples/create-react-app/craco.config.js similarity index 100% rename from automerge-js/examples/create-react-app/craco.config.js rename to wrappers/javascript/examples/create-react-app/craco.config.js diff --git a/automerge-js/examples/create-react-app/package.json b/wrappers/javascript/examples/create-react-app/package.json similarity index 100% rename from automerge-js/examples/create-react-app/package.json rename to wrappers/javascript/examples/create-react-app/package.json diff --git a/automerge-js/examples/create-react-app/public/favicon.ico b/wrappers/javascript/examples/create-react-app/public/favicon.ico similarity index 100% rename from automerge-js/examples/create-react-app/public/favicon.ico rename to wrappers/javascript/examples/create-react-app/public/favicon.ico diff --git a/automerge-js/examples/create-react-app/public/index.html b/wrappers/javascript/examples/create-react-app/public/index.html similarity index 100% rename from automerge-js/examples/create-react-app/public/index.html rename to wrappers/javascript/examples/create-react-app/public/index.html diff --git a/automerge-js/examples/create-react-app/public/logo192.png b/wrappers/javascript/examples/create-react-app/public/logo192.png similarity index 100% rename from automerge-js/examples/create-react-app/public/logo192.png rename to wrappers/javascript/examples/create-react-app/public/logo192.png diff --git a/automerge-js/examples/create-react-app/public/logo512.png b/wrappers/javascript/examples/create-react-app/public/logo512.png similarity index 100% rename from automerge-js/examples/create-react-app/public/logo512.png rename to wrappers/javascript/examples/create-react-app/public/logo512.png diff --git a/automerge-js/examples/create-react-app/public/manifest.json b/wrappers/javascript/examples/create-react-app/public/manifest.json similarity index 100% rename from automerge-js/examples/create-react-app/public/manifest.json rename to wrappers/javascript/examples/create-react-app/public/manifest.json diff --git a/automerge-js/examples/create-react-app/public/robots.txt b/wrappers/javascript/examples/create-react-app/public/robots.txt similarity index 100% rename from automerge-js/examples/create-react-app/public/robots.txt rename to wrappers/javascript/examples/create-react-app/public/robots.txt diff --git a/automerge-js/examples/create-react-app/src/App.css b/wrappers/javascript/examples/create-react-app/src/App.css similarity index 100% rename from automerge-js/examples/create-react-app/src/App.css rename to wrappers/javascript/examples/create-react-app/src/App.css diff --git a/automerge-js/examples/create-react-app/src/App.js b/wrappers/javascript/examples/create-react-app/src/App.js similarity index 85% rename from automerge-js/examples/create-react-app/src/App.js rename to wrappers/javascript/examples/create-react-app/src/App.js index d065911b..fc4805b4 100644 --- a/automerge-js/examples/create-react-app/src/App.js +++ b/wrappers/javascript/examples/create-react-app/src/App.js @@ -3,7 +3,7 @@ import logo from './logo.svg'; import './App.css'; let doc = Automerge.init() -doc = Automerge.change(doc, (d) => d.hello = "from automerge-js") +doc = Automerge.change(doc, (d) => d.hello = "from automerge") const result = JSON.stringify(doc) diff --git a/automerge-js/examples/create-react-app/src/App.test.js b/wrappers/javascript/examples/create-react-app/src/App.test.js similarity index 100% rename from automerge-js/examples/create-react-app/src/App.test.js rename to wrappers/javascript/examples/create-react-app/src/App.test.js diff --git a/automerge-js/examples/create-react-app/src/index.css b/wrappers/javascript/examples/create-react-app/src/index.css similarity index 100% rename from automerge-js/examples/create-react-app/src/index.css rename to wrappers/javascript/examples/create-react-app/src/index.css diff --git a/automerge-js/examples/create-react-app/src/index.js b/wrappers/javascript/examples/create-react-app/src/index.js similarity index 100% rename from automerge-js/examples/create-react-app/src/index.js rename to wrappers/javascript/examples/create-react-app/src/index.js diff --git a/automerge-js/examples/create-react-app/src/logo.svg b/wrappers/javascript/examples/create-react-app/src/logo.svg similarity index 100% rename from automerge-js/examples/create-react-app/src/logo.svg rename to wrappers/javascript/examples/create-react-app/src/logo.svg diff --git a/automerge-js/examples/create-react-app/src/reportWebVitals.js b/wrappers/javascript/examples/create-react-app/src/reportWebVitals.js similarity index 100% rename from automerge-js/examples/create-react-app/src/reportWebVitals.js rename to wrappers/javascript/examples/create-react-app/src/reportWebVitals.js diff --git a/automerge-js/examples/create-react-app/src/setupTests.js b/wrappers/javascript/examples/create-react-app/src/setupTests.js similarity index 100% rename from automerge-js/examples/create-react-app/src/setupTests.js rename to wrappers/javascript/examples/create-react-app/src/setupTests.js diff --git a/automerge-js/examples/create-react-app/yarn.lock b/wrappers/javascript/examples/create-react-app/yarn.lock similarity index 100% rename from automerge-js/examples/create-react-app/yarn.lock rename to wrappers/javascript/examples/create-react-app/yarn.lock diff --git a/automerge-js/examples/vite/.gitignore b/wrappers/javascript/examples/vite/.gitignore similarity index 100% rename from automerge-js/examples/vite/.gitignore rename to wrappers/javascript/examples/vite/.gitignore diff --git a/automerge-js/examples/vite/README.md b/wrappers/javascript/examples/vite/README.md similarity index 100% rename from automerge-js/examples/vite/README.md rename to wrappers/javascript/examples/vite/README.md diff --git a/automerge-js/examples/vite/index.html b/wrappers/javascript/examples/vite/index.html similarity index 100% rename from automerge-js/examples/vite/index.html rename to wrappers/javascript/examples/vite/index.html diff --git a/automerge-js/examples/vite/main.ts b/wrappers/javascript/examples/vite/main.ts similarity index 100% rename from automerge-js/examples/vite/main.ts rename to wrappers/javascript/examples/vite/main.ts diff --git a/automerge-js/examples/vite/package.json b/wrappers/javascript/examples/vite/package.json similarity index 100% rename from automerge-js/examples/vite/package.json rename to wrappers/javascript/examples/vite/package.json diff --git a/automerge-js/examples/vite/public/vite.svg b/wrappers/javascript/examples/vite/public/vite.svg similarity index 100% rename from automerge-js/examples/vite/public/vite.svg rename to wrappers/javascript/examples/vite/public/vite.svg diff --git a/automerge-js/examples/vite/src/counter.ts b/wrappers/javascript/examples/vite/src/counter.ts similarity index 100% rename from automerge-js/examples/vite/src/counter.ts rename to wrappers/javascript/examples/vite/src/counter.ts diff --git a/automerge-js/examples/vite/src/main.ts b/wrappers/javascript/examples/vite/src/main.ts similarity index 98% rename from automerge-js/examples/vite/src/main.ts rename to wrappers/javascript/examples/vite/src/main.ts index 69378eca..8f7551d5 100644 --- a/automerge-js/examples/vite/src/main.ts +++ b/wrappers/javascript/examples/vite/src/main.ts @@ -3,7 +3,7 @@ import * as Automerge from "@automerge/automerge" // hello world code that will run correctly on web or node let doc = Automerge.init() -doc = Automerge.change(doc, (d: any) => d.hello = "from automerge-js") +doc = Automerge.change(doc, (d: any) => d.hello = "from automerge") const result = JSON.stringify(doc) if (typeof document !== 'undefined') { diff --git a/automerge-js/examples/vite/src/style.css b/wrappers/javascript/examples/vite/src/style.css similarity index 100% rename from automerge-js/examples/vite/src/style.css rename to wrappers/javascript/examples/vite/src/style.css diff --git a/automerge-js/examples/vite/src/typescript.svg b/wrappers/javascript/examples/vite/src/typescript.svg similarity index 100% rename from automerge-js/examples/vite/src/typescript.svg rename to wrappers/javascript/examples/vite/src/typescript.svg diff --git a/automerge-js/examples/vite/src/vite-env.d.ts b/wrappers/javascript/examples/vite/src/vite-env.d.ts similarity index 100% rename from automerge-js/examples/vite/src/vite-env.d.ts rename to wrappers/javascript/examples/vite/src/vite-env.d.ts diff --git a/automerge-js/examples/vite/tsconfig.json b/wrappers/javascript/examples/vite/tsconfig.json similarity index 100% rename from automerge-js/examples/vite/tsconfig.json rename to wrappers/javascript/examples/vite/tsconfig.json diff --git a/automerge-js/examples/vite/vite.config.js b/wrappers/javascript/examples/vite/vite.config.js similarity index 100% rename from automerge-js/examples/vite/vite.config.js rename to wrappers/javascript/examples/vite/vite.config.js diff --git a/automerge-js/examples/webpack/.gitignore b/wrappers/javascript/examples/webpack/.gitignore similarity index 100% rename from automerge-js/examples/webpack/.gitignore rename to wrappers/javascript/examples/webpack/.gitignore diff --git a/automerge-js/examples/webpack/README.md b/wrappers/javascript/examples/webpack/README.md similarity index 100% rename from automerge-js/examples/webpack/README.md rename to wrappers/javascript/examples/webpack/README.md diff --git a/automerge-js/examples/webpack/package.json b/wrappers/javascript/examples/webpack/package.json similarity index 100% rename from automerge-js/examples/webpack/package.json rename to wrappers/javascript/examples/webpack/package.json diff --git a/automerge-js/examples/webpack/public/index.html b/wrappers/javascript/examples/webpack/public/index.html similarity index 100% rename from automerge-js/examples/webpack/public/index.html rename to wrappers/javascript/examples/webpack/public/index.html diff --git a/automerge-js/examples/webpack/src/index.js b/wrappers/javascript/examples/webpack/src/index.js similarity index 86% rename from automerge-js/examples/webpack/src/index.js rename to wrappers/javascript/examples/webpack/src/index.js index 4503532c..e3307083 100644 --- a/automerge-js/examples/webpack/src/index.js +++ b/wrappers/javascript/examples/webpack/src/index.js @@ -3,7 +3,7 @@ import * as Automerge from "@automerge/automerge" // hello world code that will run correctly on web or node let doc = Automerge.init() -doc = Automerge.change(doc, (d) => d.hello = "from automerge-js") +doc = Automerge.change(doc, (d) => d.hello = "from automerge") const result = JSON.stringify(doc) if (typeof document !== 'undefined') { diff --git a/automerge-js/examples/webpack/webpack.config.js b/wrappers/javascript/examples/webpack/webpack.config.js similarity index 100% rename from automerge-js/examples/webpack/webpack.config.js rename to wrappers/javascript/examples/webpack/webpack.config.js diff --git a/automerge-js/package.json b/wrappers/javascript/package.json similarity index 98% rename from automerge-js/package.json rename to wrappers/javascript/package.json index 877d354c..95f58680 100644 --- a/automerge-js/package.json +++ b/wrappers/javascript/package.json @@ -6,7 +6,7 @@ ], "version": "2.0.0-alpha.4", "description": "Javascript implementation of automerge, backed by @automerge/automerge-wasm", - "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-js", + "homepage": "https://github.com/automerge/automerge-rs/tree/main/wrappers/javascript", "repository": "github:automerge/automerge-rs", "files": [ "README.md", diff --git a/automerge-js/src/constants.ts b/wrappers/javascript/src/constants.ts similarity index 100% rename from automerge-js/src/constants.ts rename to wrappers/javascript/src/constants.ts diff --git a/automerge-js/src/counter.ts b/wrappers/javascript/src/counter.ts similarity index 100% rename from automerge-js/src/counter.ts rename to wrappers/javascript/src/counter.ts diff --git a/automerge-js/src/index.ts b/wrappers/javascript/src/index.ts similarity index 100% rename from automerge-js/src/index.ts rename to wrappers/javascript/src/index.ts diff --git a/automerge-js/src/low_level.ts b/wrappers/javascript/src/low_level.ts similarity index 100% rename from automerge-js/src/low_level.ts rename to wrappers/javascript/src/low_level.ts diff --git a/automerge-js/src/numbers.ts b/wrappers/javascript/src/numbers.ts similarity index 100% rename from automerge-js/src/numbers.ts rename to wrappers/javascript/src/numbers.ts diff --git a/automerge-js/src/proxies.ts b/wrappers/javascript/src/proxies.ts similarity index 100% rename from automerge-js/src/proxies.ts rename to wrappers/javascript/src/proxies.ts diff --git a/automerge-js/src/text.ts b/wrappers/javascript/src/text.ts similarity index 100% rename from automerge-js/src/text.ts rename to wrappers/javascript/src/text.ts diff --git a/automerge-js/src/types.ts b/wrappers/javascript/src/types.ts similarity index 100% rename from automerge-js/src/types.ts rename to wrappers/javascript/src/types.ts diff --git a/automerge-js/src/uuid.ts b/wrappers/javascript/src/uuid.ts similarity index 100% rename from automerge-js/src/uuid.ts rename to wrappers/javascript/src/uuid.ts diff --git a/automerge-js/test/basic_test.ts b/wrappers/javascript/test/basic_test.ts similarity index 100% rename from automerge-js/test/basic_test.ts rename to wrappers/javascript/test/basic_test.ts diff --git a/automerge-js/test/columnar_test.ts b/wrappers/javascript/test/columnar_test.ts similarity index 100% rename from automerge-js/test/columnar_test.ts rename to wrappers/javascript/test/columnar_test.ts diff --git a/automerge-js/test/helpers.ts b/wrappers/javascript/test/helpers.ts similarity index 100% rename from automerge-js/test/helpers.ts rename to wrappers/javascript/test/helpers.ts diff --git a/automerge-js/test/legacy/columnar.js b/wrappers/javascript/test/legacy/columnar.js similarity index 100% rename from automerge-js/test/legacy/columnar.js rename to wrappers/javascript/test/legacy/columnar.js diff --git a/automerge-js/test/legacy/common.js b/wrappers/javascript/test/legacy/common.js similarity index 100% rename from automerge-js/test/legacy/common.js rename to wrappers/javascript/test/legacy/common.js diff --git a/automerge-js/test/legacy/encoding.js b/wrappers/javascript/test/legacy/encoding.js similarity index 100% rename from automerge-js/test/legacy/encoding.js rename to wrappers/javascript/test/legacy/encoding.js diff --git a/automerge-js/test/legacy/sync.js b/wrappers/javascript/test/legacy/sync.js similarity index 100% rename from automerge-js/test/legacy/sync.js rename to wrappers/javascript/test/legacy/sync.js diff --git a/automerge-js/test/legacy_tests.ts b/wrappers/javascript/test/legacy_tests.ts similarity index 100% rename from automerge-js/test/legacy_tests.ts rename to wrappers/javascript/test/legacy_tests.ts diff --git a/automerge-js/test/sync_test.ts b/wrappers/javascript/test/sync_test.ts similarity index 100% rename from automerge-js/test/sync_test.ts rename to wrappers/javascript/test/sync_test.ts diff --git a/automerge-js/test/text_test.ts b/wrappers/javascript/test/text_test.ts similarity index 100% rename from automerge-js/test/text_test.ts rename to wrappers/javascript/test/text_test.ts diff --git a/automerge-js/test/uuid_test.ts b/wrappers/javascript/test/uuid_test.ts similarity index 100% rename from automerge-js/test/uuid_test.ts rename to wrappers/javascript/test/uuid_test.ts diff --git a/automerge-js/tsconfig.json b/wrappers/javascript/tsconfig.json similarity index 100% rename from automerge-js/tsconfig.json rename to wrappers/javascript/tsconfig.json diff --git a/automerge-js/tslint.json b/wrappers/javascript/tslint.json similarity index 100% rename from automerge-js/tslint.json rename to wrappers/javascript/tslint.json From 660678d038959142e7a3d3757598692ee61025e4 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Tue, 11 Oct 2022 17:49:47 +0100 Subject: [PATCH 596/730] remove unneeded files --- Makefile | 20 -------------------- TODO.md | 32 -------------------------------- 2 files changed, 52 deletions(-) delete mode 100644 Makefile delete mode 100644 TODO.md diff --git a/Makefile b/Makefile deleted file mode 100644 index a1f3fd62..00000000 --- a/Makefile +++ /dev/null @@ -1,20 +0,0 @@ -.PHONY: rust -rust: - cd automerge && cargo test - -.PHONY: wasm -wasm: - cd automerge-wasm && yarn - cd automerge-wasm && yarn build - cd automerge-wasm && yarn test - cd automerge-wasm && yarn link - -.PHONY: js -js: wasm - cd automerge-js && yarn - cd automerge-js && yarn link "automerge-wasm" - cd automerge-js && yarn test - -.PHONY: clean -clean: - git clean -x -d -f diff --git a/TODO.md b/TODO.md deleted file mode 100644 index 646c0c20..00000000 --- a/TODO.md +++ /dev/null @@ -1,32 +0,0 @@ -### next steps: - 1. C API - 2. port rust command line tool - 3. fast load - -### ergonomics: - 1. value() -> () or something that into's a value - -### automerge: - 1. single pass (fast) load - 2. micro-patches / bare bones observation API / fully hydrated documents - -### future: - 1. handle columns with unknown data in and out - 2. branches with different indexes - -### Peritext - 1. add mark / remove mark -- type, start/end elemid (inclusive,exclusive) - 2. track any formatting ops that start or end on a character - 3. ops right before the character, ops right after that character - 4. query a single character - character, plus marks that start or end on that character - what is its current formatting, - what are the ops that include that in their span, - None = same as last time, Set( bold, italic ), - keep these on index - 5. op probably belongs with the start character - possible packed at the beginning or end of the list - -### maybe: - 1. tables - -### no: - 1. cursors From 4c17fd9c0030ac9de5a4b4a1ba812f9229113cc0 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Tue, 11 Oct 2022 17:44:18 +0100 Subject: [PATCH 597/730] Update README We're making this project the primary implementation of automerge. Update the README to provide more context and signpost other resources. --- README.md | 148 ++++++++++++++++++++++++------------------------------ 1 file changed, 66 insertions(+), 82 deletions(-) diff --git a/README.md b/README.md index 64b0f9b7..fcfe4da7 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# Automerge RS +# Automerge Automerge logo @@ -7,103 +7,87 @@ [![ci](https://github.com/automerge/automerge-rs/actions/workflows/ci.yaml/badge.svg)](https://github.com/automerge/automerge-rs/actions/workflows/ci.yaml) [![docs](https://github.com/automerge/automerge-rs/actions/workflows/docs.yaml/badge.svg)](https://github.com/automerge/automerge-rs/actions/workflows/docs.yaml) -This is a Rust library implementation of the [Automerge](https://github.com/automerge/automerge) file format and network protocol. Its focus is to support the creation of Automerge implementations in other languages, currently; WASM, JS and C. A `libautomerge` if you will. +Automerge is a library which provides fast implementations of several different +CRDTs, a compact compression format for these CRDTs, and a sync protocol for +efficiently transmitting those changes over the network. The objective of the +project is to support [local-first](https://www.inkandswitch.com/local-first/) applications in the same way that relational +databases support server applications - by providing mechanisms for persistence +which allow application developers to avoid thinking about hard distributed +computing problems. Automerge aims to be PostgreSQL for your local-first app. -The original [Automerge](https://github.com/automerge/automerge) project (written in JS from the ground up) is still very much maintained and recommended. Indeed it is because of the success of that project that the next stage of Automerge is being explored here. Hopefully Rust can offer a more performant and scalable Automerge, opening up even more use cases. +If you're looking for documentation on the JavaScript implementation take a look +at https://automerge.org/docs/hello/. There are other implementations in both +Rust and C, but they are earlier and don't have documentation yet. You can find +them in `crates/automerge` and `crates/automerge-c` if you are comfortable +reading the code and tests to figure out how to use them. + +If you're familiar with CRDTs and interested in the design of Automerge in +particular take a look at https://automerge.org/docs/how-it-works/backend/ + +Finally, if you want to talk to us about this project please [join the +Slack](https://join.slack.com/t/automerge/shared_invite/zt-1ho1ieas2-DnWZcRR82BRu65vCD4t3Xw) ## Status -The project has 5 components: +This project is formed of a core Rust implementation which is exposed via FFI in +javascript+WASM, C, and soon other languages. Alex +([@alexjg](https://github.com/alexjg/)]) is working full time on maintaining +automerge, other members of Ink and Switch are also contributing time and there +are several other maintainers. The focus is currently on shipping the new JS +package. We expect to be iterating the API and adding new features over the next +six months so there will likely be several major version bumps in all packages +in that time. -1. [_automerge_](automerge) - The main Rust implementation of the library. -2. [_automerge-wasm_](automerge-wasm) - A JS/WASM interface to the underlying Rust library. This API is generally mature and in use in a handful of projects. -3. [_automerge-js_](automerge-js) - This is a Javascript library using the WASM interface to export the same public API of the primary Automerge project. Currently this project passes all of Automerge's tests but has not been used in any real project or packaged as an NPM. Alpha testers welcome. -4. [_automerge-c_](automerge-c) - This is a C library intended to be an FFI integration point for all other languages. It is currently a work in progress and not yet ready for any testing. -5. [_automerge-cli_](automerge-cli) - An experimental CLI wrapper around the Rust library. Currently not functional. +In general we try and respect semver. -## How? +### JavaScript -The magic of the architecture is built around the `OpTree`. This is a data structure -which supports efficiently inserting new operations and realising values of -existing operations. Most interactions with the `OpTree` are in the form of -implementations of `TreeQuery` - a trait which can be used to traverse the -`OpTree` and producing state of some kind. User facing operations are exposed on -an `Automerge` object, under the covers these operations typically instantiate -some `TreeQuery` and run it over the `OpTree`. +An alpha release of the javascript package is currently available as +`@automerge/automerge@2.0.0-alpha.n` where `n` is an integer. We are gathering +feedback on the API and looking to release a `2.0.0` in the next few weeks. -## Development +### Rust -Please feel free to open issues and pull requests. +The rust codebase is currently oriented around producing a performant backend +for the Javascript wrapper and as such the API for Rust code is low level and +not well documented. We will be returning to this over the next few months but +for now you will need to be comfortable reading the tests and asking questions +to figure out how to use it. -### Running CI -The steps CI will run are all defined in `./scripts/ci`. Obviously CI will run -everything when you submit a PR, but if you want to run everything locally -before you push you can run `./scripts/ci/run` to run everything. +## Repository Organisation -### Running the JS tests +* `./crates` - the crates which make up the rust implementation and also the + Rust components of platform specific wrappers (e.g. `automerge-wasm` for the + WASM API or `automerge-c` for the C FFI bindings) +* `./wrappers` - code for specific languages which wraps the FFI interface in a + more idiomatic API (e.g. `wrappers/javascript`) +* `./scripts` - scripts which are useful to maintenance of the repository. + This includes the scripts which are run in CI. +* `./img` - static assets for use in `.md` files -You will need to have [node](https://nodejs.org/en/), [yarn](https://yarnpkg.com/getting-started/install), [rust](https://rustup.rs/) and [wasm-pack](https://rustwasm.github.io/wasm-pack/installer/) installed. +This repository contains the primary implementation of automerge - which is +written in rust in `./crates` - as well as wrappers which expose the Rust +implementation via FFI in other languages in `./wrappers`. Because this is -To build and test the rust library: +## Building -```shell - $ cd automerge - $ cargo test -``` +To build this codebase you will need: -To build and test the wasm library: +- `rust` +- `wasm-pack` +- `node` +- `yarn` +- `cmake` -```shell - ## setup - $ cd automerge-wasm - $ yarn +The various subprojects (the rust code, the wrapper projects) have their own +build instructions, but to run the tests that will be run in CI you can run +`./scripts/ci/run`. - ## building or testing - $ yarn build - $ yarn test +## Contributing - ## without this the js library wont automatically use changes - $ yarn link - - ## cutting a release or doing benchmarking - $ yarn release -``` - -To test the js library. This is where most of the tests reside. - -```shell - ## setup - $ cd automerge-js - $ yarn - $ yarn link "automerge-wasm" - - ## testing - $ yarn test -``` - -And finally, to build and test the C bindings with CMake: - -```shell -## setup -$ cd automerge-c -$ mkdir -p build -$ cd build -$ cmake -S .. -DCMAKE_BUILD_TYPE=Release -DBUILD_SHARED_LIBS=OFF -## building and testing -$ cmake --build . --target test_automerge -``` - -To add debugging symbols, replace `Release` with `Debug`. -To build a shared library instead of a static one, replace `OFF` with `ON`. - -The C bindings can be built and tested on any platform for which CMake is -available but the steps for doing so vary across platforms and are too numerous -to list here. - -## Benchmarking - -The [`edit-trace`](edit-trace) folder has the main code for running the edit trace benchmarking. - -## The old Rust project -If you are looking for the origional `automerge-rs` project that can be used as a wasm backend to the javascript implementation, it can be found [here](https://github.com/automerge/automerge-rs/tree/automerge-1.0). +Please try and split your changes up into relatively independent commits which +change one subsystem at a time and add good commit messages which describe what +the change is and why you're making it (err on the side of longer commit +messages). `git blame` should give future maintainers a good idea of why +something is the way it is. From ee0c3ef3ac8e1fbba20ad2dc8fb7aa180608547c Mon Sep 17 00:00:00 2001 From: Alex Good Date: Wed, 12 Oct 2022 16:10:13 +0100 Subject: [PATCH 598/730] javascript: Make getObjectId tolerate non object arguments Fixes #433. `getObjectId` was previously throwing an error if passed something which was not an object. In the process of fixing this I simplified the logic of `getObjectId` by modifying automerge-wasm to not set the OBJECT_ID hidden property on objects which are not maps, lists, or text - it was previously setting this property on anything which was a JS object, including `Date` and `Uint8Array`. --- crates/automerge-wasm/src/interop.rs | 4 ++- crates/automerge-wasm/test/apply.ts | 40 ++++++++++++++++++++++++++ wrappers/javascript/src/index.ts | 20 ++++++------- wrappers/javascript/test/basic_test.ts | 34 ++++++++++++++++++++++ 4 files changed, 87 insertions(+), 11 deletions(-) diff --git a/crates/automerge-wasm/src/interop.rs b/crates/automerge-wasm/src/interop.rs index 66161b8a..f8d961ec 100644 --- a/crates/automerge-wasm/src/interop.rs +++ b/crates/automerge-wasm/src/interop.rs @@ -533,8 +533,10 @@ impl Automerge { } else { value }; + if matches!(datatype, Datatype::Map | Datatype::List | Datatype::Text) { + set_hidden_value(&value, &Symbol::for_(RAW_OBJECT_SYMBOL), id)?; + } set_hidden_value(&value, &Symbol::for_(DATATYPE_SYMBOL), datatype)?; - set_hidden_value(&value, &Symbol::for_(RAW_OBJECT_SYMBOL), id)?; set_hidden_value(&value, &Symbol::for_(META_SYMBOL), meta)?; Ok(value) } diff --git a/crates/automerge-wasm/test/apply.ts b/crates/automerge-wasm/test/apply.ts index 50531458..c89a9ef8 100644 --- a/crates/automerge-wasm/test/apply.ts +++ b/crates/automerge-wasm/test/apply.ts @@ -165,6 +165,46 @@ describe('Automerge', () => { assert.deepEqual( mat, { notes: new String("hello everyone") } ) }) + it('should set the OBJECT_ID property on lists, maps, and text objects and not on scalars', () => { + const doc1 = create('aaaa') + let mat: any = doc1.materialize("/") + doc1.enablePatches(true) + doc1.registerDatatype("counter", (n: number) => new Counter(n)) + doc1.put("/", "string", "string", "str") + doc1.put("/", "uint", 2, "uint") + doc1.put("/", "int", 2, "int") + doc1.put("/", "float", 2.3, "f64") + doc1.put("/", "bytes", new Uint8Array(), "bytes") + doc1.put("/", "counter", 1, "counter") + doc1.put("/", "date", new Date(), "timestamp") + doc1.putObject("/", "text", "text") + doc1.putObject("/", "list", []) + doc1.putObject("/", "map", {}) + const applied = doc1.applyPatches(mat) + + assert.equal(_obj(applied.string), null) + assert.equal(_obj(applied.uint), null) + assert.equal(_obj(applied.int), null) + assert.equal(_obj(applied.float), null) + assert.equal(_obj(applied.bytes), null) + assert.equal(_obj(applied.counter), null) + assert.equal(_obj(applied.date), null) + + assert.notEqual(_obj(applied.text), null) + assert.notEqual(_obj(applied.list), null) + assert.notEqual(_obj(applied.map), null) + }) + + it('should set the root OBJECT_ID to "_root"', () => { + const doc1 = create('aaaa') + let mat: any = doc1.materialize("/") + assert.equal(_obj(mat), "_root") + doc1.enablePatches(true) + doc1.put("/", "key", "value") + let applied = doc1.applyPatches(mat) + assert.equal(_obj(applied), "_root") + }) + it.skip('it can patch quickly', () => { /* console.time("init") diff --git a/wrappers/javascript/src/index.ts b/wrappers/javascript/src/index.ts index 3a5316c9..de5e8450 100644 --- a/wrappers/javascript/src/index.ts +++ b/wrappers/javascript/src/index.ts @@ -77,15 +77,11 @@ function _clear_heads(doc: Doc) { Reflect.set(doc,TRACE,undefined) } -function _obj(doc: Doc) : ObjID { - let proxy_objid = Reflect.get(doc,OBJECT_ID) - if (proxy_objid) { - return proxy_objid +function _obj(doc: Doc) : ObjID | null{ + if (!(typeof doc === 'object') || doc === null) { + return null } - if (Reflect.get(doc,STATE)) { - return "_root" - } - throw new RangeError("invalid document passed to _obj()") + return Reflect.get(doc,OBJECT_ID) } function _readonly(doc: Doc) : boolean { @@ -299,7 +295,11 @@ function conflictAt(context : Automerge, objectId: ObjID, prop: Prop) : Conflict export function getConflicts(doc: Doc, prop: Prop) : Conflicts | undefined { const state = _state(doc, false) const objectId = _obj(doc) - return conflictAt(state.handle, objectId, prop) + if (objectId != null) { + return conflictAt(state.handle, objectId, prop) + } else { + return undefined + } } export function getLastLocalChange(doc: Doc) : Change | undefined { @@ -307,7 +307,7 @@ export function getLastLocalChange(doc: Doc) : Change | undefined { return state.handle.getLastLocalChange() || undefined } -export function getObjectId(doc: Doc) : ObjID { +export function getObjectId(doc: any) : ObjID | null{ return _obj(doc) } diff --git a/wrappers/javascript/test/basic_test.ts b/wrappers/javascript/test/basic_test.ts index 2936a0e2..18a6818b 100644 --- a/wrappers/javascript/test/basic_test.ts +++ b/wrappers/javascript/test/basic_test.ts @@ -1,4 +1,5 @@ import * as assert from 'assert' +import {Counter} from 'automerge' import * as Automerge from '../src' describe('Automerge', () => { @@ -229,5 +230,38 @@ describe('Automerge', () => { const m2 = Automerge.merge(Automerge.clone(s2), Automerge.clone(s1)) assert.deepStrictEqual(Automerge.getConflicts(m1, 'x'), Automerge.getConflicts(m2, 'x')) }) + + describe("getObjectId", () => { + let s1 = Automerge.from({ + "string": "string", + "number": 1, + "null": null, + "date": new Date(), + "counter": new Automerge.Counter(), + "bytes": new Uint8Array(10), + "text": new Automerge.Text(), + "list": [], + "map": {} + }) + + it("should return null for scalar values", () => { + assert.equal(Automerge.getObjectId(s1.string), null) + assert.equal(Automerge.getObjectId(s1.number), null) + assert.equal(Automerge.getObjectId(s1.null), null) + assert.equal(Automerge.getObjectId(s1.date), null) + assert.equal(Automerge.getObjectId(s1.counter), null) + assert.equal(Automerge.getObjectId(s1.bytes), null) + }) + + it("should return _root for the root object", () => { + assert.equal(Automerge.getObjectId(s1), "_root") + }) + + it("should return non-null for map, list, text, and objects", () => { + assert.notEqual(Automerge.getObjectId(s1.text), null) + assert.notEqual(Automerge.getObjectId(s1.list), null) + assert.notEqual(Automerge.getObjectId(s1.map), null) + }) + }) }) From f0f036eb898093c2a5e253cba77a5a3d517a208d Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Mon, 10 Oct 2022 19:23:22 -0400 Subject: [PATCH 599/730] add loadIncremental to js --- wrappers/javascript/src/index.ts | 14 ++++++++++++++ wrappers/javascript/test/extra_api_tests.ts | 20 ++++++++++++++++++++ 2 files changed, 34 insertions(+) create mode 100644 wrappers/javascript/test/extra_api_tests.ts diff --git a/wrappers/javascript/src/index.ts b/wrappers/javascript/src/index.ts index de5e8450..0c9041e5 100644 --- a/wrappers/javascript/src/index.ts +++ b/wrappers/javascript/src/index.ts @@ -227,6 +227,20 @@ export function load(data: Uint8Array, _opts?: ActorId | InitOptions) : Do return doc } +export function loadIncremental(doc: Doc, data: Uint8Array, opts?: ApplyOptions) : Doc { + if (!opts) { opts = {} } + const state = _state(doc) + if (state.heads) { + throw new RangeError("Attempting to change an out of date document - set at: " + _trace(doc)); + } + if (_readonly(doc) === false) { + throw new RangeError("Calls to Automerge.change cannot be nested") + } + const heads = state.handle.getHeads() + state.handle.loadIncremental(data) + return progressDocument(doc, heads, opts.patchCallback || state.patchCallback) +} + export function save(doc: Doc) : Uint8Array { return _state(doc).handle.save() } diff --git a/wrappers/javascript/test/extra_api_tests.ts b/wrappers/javascript/test/extra_api_tests.ts new file mode 100644 index 00000000..ce0438d5 --- /dev/null +++ b/wrappers/javascript/test/extra_api_tests.ts @@ -0,0 +1,20 @@ + +import * as assert from 'assert' +import * as Automerge from '../src' + +describe('Automerge', () => { + describe('basics', () => { + it('should allow you to load incrementally', () => { + let doc1 = Automerge.from({ foo: "bar" }) + let doc2 = Automerge.init(); + doc2 = Automerge.loadIncremental(doc2, Automerge.save(doc1)) + doc1 = Automerge.change(doc1, (d) => d.foo2 = "bar2") + doc2 = Automerge.loadIncremental(doc2, Automerge.getBackend(doc1).saveIncremental() ) + doc1 = Automerge.change(doc1, (d) => d.foo = "bar2") + doc2 = Automerge.loadIncremental(doc2, Automerge.getBackend(doc1).saveIncremental() ) + doc1 = Automerge.change(doc1, (d) => d.x = "y") + doc2 = Automerge.loadIncremental(doc2, Automerge.getBackend(doc1).saveIncremental() ) + assert.deepEqual(doc1,doc2) + }) + }) +}) From cd2997e63ff1e299010f040f7b9fe5ed32e4104e Mon Sep 17 00:00:00 2001 From: Alex Good Date: Thu, 13 Oct 2022 23:13:09 +0100 Subject: [PATCH 600/730] @automerge/automerge@2.0.0-alpha.5 and @automerge/automerge-wasm@0.1.10 --- crates/automerge-wasm/package.json | 2 +- .../examples/create-react-app/package.json | 2 +- .../examples/create-react-app/yarn.lock | 9120 ----------------- .../javascript/examples/vite/package.json | 2 +- .../javascript/examples/webpack/package.json | 2 +- wrappers/javascript/package.json | 4 +- 6 files changed, 6 insertions(+), 9126 deletions(-) delete mode 100644 wrappers/javascript/examples/create-react-app/yarn.lock diff --git a/crates/automerge-wasm/package.json b/crates/automerge-wasm/package.json index 3dd0722d..6a64278a 100644 --- a/crates/automerge-wasm/package.json +++ b/crates/automerge-wasm/package.json @@ -8,7 +8,7 @@ "description": "wasm-bindgen bindings to the automerge rust implementation", "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-wasm", "repository": "github:automerge/automerge-rs", - "version": "0.1.9", + "version": "0.1.10", "license": "MIT", "files": [ "README.md", diff --git a/wrappers/javascript/examples/create-react-app/package.json b/wrappers/javascript/examples/create-react-app/package.json index a2b7f37b..297404bb 100644 --- a/wrappers/javascript/examples/create-react-app/package.json +++ b/wrappers/javascript/examples/create-react-app/package.json @@ -8,7 +8,7 @@ "@testing-library/jest-dom": "^5.16.5", "@testing-library/react": "^13.4.0", "@testing-library/user-event": "^13.5.0", - "@automerge/automerge": "2.0.0-alpha.4", + "@automerge/automerge": "2.0.0-alpha.5", "react": "^18.2.0", "react-dom": "^18.2.0", "react-scripts": "5.0.1", diff --git a/wrappers/javascript/examples/create-react-app/yarn.lock b/wrappers/javascript/examples/create-react-app/yarn.lock deleted file mode 100644 index 90a1592b..00000000 --- a/wrappers/javascript/examples/create-react-app/yarn.lock +++ /dev/null @@ -1,9120 +0,0 @@ -# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. -# yarn lockfile v1 - - -"@adobe/css-tools@^4.0.1": - version "4.0.1" - resolved "http://localhost:4873/@adobe%2fcss-tools/-/css-tools-4.0.1.tgz#b38b444ad3aa5fedbb15f2f746dcd934226a12dd" - integrity sha512-+u76oB43nOHrF4DDWRLWDCtci7f3QJoEBigemIdIeTi1ODqjx6Tad9NCVnPRwewWlKkVab5PlK8DCtPTyX7S8g== - -"@ampproject/remapping@^2.1.0": - version "2.2.0" - resolved "http://localhost:4873/@ampproject%2fremapping/-/remapping-2.2.0.tgz#56c133824780de3174aed5ab6834f3026790154d" - integrity sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w== - dependencies: - "@jridgewell/gen-mapping" "^0.1.0" - "@jridgewell/trace-mapping" "^0.3.9" - -"@apideck/better-ajv-errors@^0.3.1": - version "0.3.6" - resolved "http://localhost:4873/@apideck%2fbetter-ajv-errors/-/better-ajv-errors-0.3.6.tgz#957d4c28e886a64a8141f7522783be65733ff097" - integrity sha512-P+ZygBLZtkp0qqOAJJVX4oX/sFo5JR3eBWwwuqHHhK0GIgQOKWrAfiAaWX0aArHkRWHMuggFEgAZNxVPwPZYaA== - dependencies: - json-schema "^0.4.0" - jsonpointer "^5.0.0" - leven "^3.1.0" - -"@automerge/automerge-wasm@0.1.9": - version "0.1.9" - resolved "http://localhost:4873/@automerge%2fautomerge-wasm/-/automerge-wasm-0.1.9.tgz#b2def5e8b643f1802bc696843b7755dc444dc2eb" - integrity sha512-S+sjJUJ3aPn2F37vKYAzKxz8CDgbHpOOGVjKSgkLjkAqe1pQ+wp4BpiELXafX73w8DVIrGx1zzru4w3t+Eo8gw== - -"@automerge/automerge@2.0.0-alpha.4": - version "2.0.0-alpha.4" - resolved "http://localhost:4873/@automerge%2fautomerge/-/automerge-2.0.0-alpha.4.tgz#df406f5364960a4d21040044da55ebd47406ea3a" - integrity sha512-PVRD1dmLy0U4GttyMvlWr99wyr6xvskJbOkxJDHnp+W2VAFfcqa4QKouaFbJ4W3iIsYX8DfQJ+uhRxa6UnvkHg== - dependencies: - "@automerge/automerge-wasm" "0.1.9" - uuid "^8.3" - -"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.10.4", "@babel/code-frame@^7.12.13", "@babel/code-frame@^7.16.0", "@babel/code-frame@^7.18.6", "@babel/code-frame@^7.8.3": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fcode-frame/-/code-frame-7.18.6.tgz#3b25d38c89600baa2dcc219edfa88a74eb2c427a" - integrity sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q== - dependencies: - "@babel/highlight" "^7.18.6" - -"@babel/compat-data@^7.17.7", "@babel/compat-data@^7.18.8", "@babel/compat-data@^7.19.3": - version "7.19.3" - resolved "http://localhost:4873/@babel%2fcompat-data/-/compat-data-7.19.3.tgz#707b939793f867f5a73b2666e6d9a3396eb03151" - integrity sha512-prBHMK4JYYK+wDjJF1q99KK4JLL+egWS4nmNqdlMUgCExMZ+iZW0hGhyC3VEbsPjvaN0TBhW//VIFwBrk8sEiw== - -"@babel/core@^7.1.0", "@babel/core@^7.11.1", "@babel/core@^7.12.3", "@babel/core@^7.16.0", "@babel/core@^7.7.2", "@babel/core@^7.8.0": - version "7.19.3" - resolved "http://localhost:4873/@babel%2fcore/-/core-7.19.3.tgz#2519f62a51458f43b682d61583c3810e7dcee64c" - integrity sha512-WneDJxdsjEvyKtXKsaBGbDeiyOjR5vYq4HcShxnIbG0qixpoHjI3MqeZM9NDvsojNCEBItQE4juOo/bU6e72gQ== - dependencies: - "@ampproject/remapping" "^2.1.0" - "@babel/code-frame" "^7.18.6" - "@babel/generator" "^7.19.3" - "@babel/helper-compilation-targets" "^7.19.3" - "@babel/helper-module-transforms" "^7.19.0" - "@babel/helpers" "^7.19.0" - "@babel/parser" "^7.19.3" - "@babel/template" "^7.18.10" - "@babel/traverse" "^7.19.3" - "@babel/types" "^7.19.3" - convert-source-map "^1.7.0" - debug "^4.1.0" - gensync "^1.0.0-beta.2" - json5 "^2.2.1" - semver "^6.3.0" - -"@babel/eslint-parser@^7.16.3": - version "7.19.1" - resolved "http://localhost:4873/@babel%2feslint-parser/-/eslint-parser-7.19.1.tgz#4f68f6b0825489e00a24b41b6a1ae35414ecd2f4" - integrity sha512-AqNf2QWt1rtu2/1rLswy6CDP7H9Oh3mMhk177Y67Rg8d7RD9WfOLLv8CGn6tisFvS2htm86yIe1yLF6I1UDaGQ== - dependencies: - "@nicolo-ribaudo/eslint-scope-5-internals" "5.1.1-v1" - eslint-visitor-keys "^2.1.0" - semver "^6.3.0" - -"@babel/generator@^7.19.3", "@babel/generator@^7.7.2": - version "7.19.3" - resolved "http://localhost:4873/@babel%2fgenerator/-/generator-7.19.3.tgz#d7f4d1300485b4547cb6f94b27d10d237b42bf59" - integrity sha512-fqVZnmp1ncvZU757UzDheKZpfPgatqY59XtW2/j/18H7u76akb8xqvjw82f+i2UKd/ksYsSick/BCLQUUtJ/qQ== - dependencies: - "@babel/types" "^7.19.3" - "@jridgewell/gen-mapping" "^0.3.2" - jsesc "^2.5.1" - -"@babel/helper-annotate-as-pure@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fhelper-annotate-as-pure/-/helper-annotate-as-pure-7.18.6.tgz#eaa49f6f80d5a33f9a5dd2276e6d6e451be0a6bb" - integrity sha512-duORpUiYrEpzKIop6iNbjnwKLAKnJ47csTyRACyEmWj0QdUrm5aqNJGHSSEQSUAvNW0ojX0dOmK9dZduvkfeXA== - dependencies: - "@babel/types" "^7.18.6" - -"@babel/helper-builder-binary-assignment-operator-visitor@^7.18.6": - version "7.18.9" - resolved "http://localhost:4873/@babel%2fhelper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.18.9.tgz#acd4edfd7a566d1d51ea975dff38fd52906981bb" - integrity sha512-yFQ0YCHoIqarl8BCRwBL8ulYUaZpz3bNsA7oFepAzee+8/+ImtADXNOmO5vJvsPff3qi+hvpkY/NYBTrBQgdNw== - dependencies: - "@babel/helper-explode-assignable-expression" "^7.18.6" - "@babel/types" "^7.18.9" - -"@babel/helper-compilation-targets@^7.17.7", "@babel/helper-compilation-targets@^7.18.9", "@babel/helper-compilation-targets@^7.19.0", "@babel/helper-compilation-targets@^7.19.3": - version "7.19.3" - resolved "http://localhost:4873/@babel%2fhelper-compilation-targets/-/helper-compilation-targets-7.19.3.tgz#a10a04588125675d7c7ae299af86fa1b2ee038ca" - integrity sha512-65ESqLGyGmLvgR0mst5AdW1FkNlj9rQsCKduzEoEPhBCDFGXvz2jW6bXFG6i0/MrV2s7hhXjjb2yAzcPuQlLwg== - dependencies: - "@babel/compat-data" "^7.19.3" - "@babel/helper-validator-option" "^7.18.6" - browserslist "^4.21.3" - semver "^6.3.0" - -"@babel/helper-create-class-features-plugin@^7.18.6", "@babel/helper-create-class-features-plugin@^7.19.0": - version "7.19.0" - resolved "http://localhost:4873/@babel%2fhelper-create-class-features-plugin/-/helper-create-class-features-plugin-7.19.0.tgz#bfd6904620df4e46470bae4850d66be1054c404b" - integrity sha512-NRz8DwF4jT3UfrmUoZjd0Uph9HQnP30t7Ash+weACcyNkiYTywpIjDBgReJMKgr+n86sn2nPVVmJ28Dm053Kqw== - dependencies: - "@babel/helper-annotate-as-pure" "^7.18.6" - "@babel/helper-environment-visitor" "^7.18.9" - "@babel/helper-function-name" "^7.19.0" - "@babel/helper-member-expression-to-functions" "^7.18.9" - "@babel/helper-optimise-call-expression" "^7.18.6" - "@babel/helper-replace-supers" "^7.18.9" - "@babel/helper-split-export-declaration" "^7.18.6" - -"@babel/helper-create-regexp-features-plugin@^7.18.6", "@babel/helper-create-regexp-features-plugin@^7.19.0": - version "7.19.0" - resolved "http://localhost:4873/@babel%2fhelper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.19.0.tgz#7976aca61c0984202baca73d84e2337a5424a41b" - integrity sha512-htnV+mHX32DF81amCDrwIDr8nrp1PTm+3wfBN9/v8QJOLEioOCOG7qNyq0nHeFiWbT3Eb7gsPwEmV64UCQ1jzw== - dependencies: - "@babel/helper-annotate-as-pure" "^7.18.6" - regexpu-core "^5.1.0" - -"@babel/helper-define-polyfill-provider@^0.3.3": - version "0.3.3" - resolved "http://localhost:4873/@babel%2fhelper-define-polyfill-provider/-/helper-define-polyfill-provider-0.3.3.tgz#8612e55be5d51f0cd1f36b4a5a83924e89884b7a" - integrity sha512-z5aQKU4IzbqCC1XH0nAqfsFLMVSo22SBKUc0BxGrLkolTdPTructy0ToNnlO2zA4j9Q/7pjMZf0DSY+DSTYzww== - dependencies: - "@babel/helper-compilation-targets" "^7.17.7" - "@babel/helper-plugin-utils" "^7.16.7" - debug "^4.1.1" - lodash.debounce "^4.0.8" - resolve "^1.14.2" - semver "^6.1.2" - -"@babel/helper-environment-visitor@^7.18.9": - version "7.18.9" - resolved "http://localhost:4873/@babel%2fhelper-environment-visitor/-/helper-environment-visitor-7.18.9.tgz#0c0cee9b35d2ca190478756865bb3528422f51be" - integrity sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg== - -"@babel/helper-explode-assignable-expression@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fhelper-explode-assignable-expression/-/helper-explode-assignable-expression-7.18.6.tgz#41f8228ef0a6f1a036b8dfdfec7ce94f9a6bc096" - integrity sha512-eyAYAsQmB80jNfg4baAtLeWAQHfHFiR483rzFK+BhETlGZaQC9bsfrugfXDCbRHLQbIA7U5NxhhOxN7p/dWIcg== - dependencies: - "@babel/types" "^7.18.6" - -"@babel/helper-function-name@^7.18.9", "@babel/helper-function-name@^7.19.0": - version "7.19.0" - resolved "http://localhost:4873/@babel%2fhelper-function-name/-/helper-function-name-7.19.0.tgz#941574ed5390682e872e52d3f38ce9d1bef4648c" - integrity sha512-WAwHBINyrpqywkUH0nTnNgI5ina5TFn85HKS0pbPDfxFfhyR/aNQEn4hGi1P1JyT//I0t4OgXUlofzWILRvS5w== - dependencies: - "@babel/template" "^7.18.10" - "@babel/types" "^7.19.0" - -"@babel/helper-hoist-variables@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fhelper-hoist-variables/-/helper-hoist-variables-7.18.6.tgz#d4d2c8fb4baeaa5c68b99cc8245c56554f926678" - integrity sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q== - dependencies: - "@babel/types" "^7.18.6" - -"@babel/helper-member-expression-to-functions@^7.18.9": - version "7.18.9" - resolved "http://localhost:4873/@babel%2fhelper-member-expression-to-functions/-/helper-member-expression-to-functions-7.18.9.tgz#1531661e8375af843ad37ac692c132841e2fd815" - integrity sha512-RxifAh2ZoVU67PyKIO4AMi1wTenGfMR/O/ae0CCRqwgBAt5v7xjdtRw7UoSbsreKrQn5t7r89eruK/9JjYHuDg== - dependencies: - "@babel/types" "^7.18.9" - -"@babel/helper-module-imports@^7.10.4", "@babel/helper-module-imports@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fhelper-module-imports/-/helper-module-imports-7.18.6.tgz#1e3ebdbbd08aad1437b428c50204db13c5a3ca6e" - integrity sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA== - dependencies: - "@babel/types" "^7.18.6" - -"@babel/helper-module-transforms@^7.18.6", "@babel/helper-module-transforms@^7.19.0": - version "7.19.0" - resolved "http://localhost:4873/@babel%2fhelper-module-transforms/-/helper-module-transforms-7.19.0.tgz#309b230f04e22c58c6a2c0c0c7e50b216d350c30" - integrity sha512-3HBZ377Fe14RbLIA+ac3sY4PTgpxHVkFrESaWhoI5PuyXPBBX8+C34qblV9G89ZtycGJCmCI/Ut+VUDK4bltNQ== - dependencies: - "@babel/helper-environment-visitor" "^7.18.9" - "@babel/helper-module-imports" "^7.18.6" - "@babel/helper-simple-access" "^7.18.6" - "@babel/helper-split-export-declaration" "^7.18.6" - "@babel/helper-validator-identifier" "^7.18.6" - "@babel/template" "^7.18.10" - "@babel/traverse" "^7.19.0" - "@babel/types" "^7.19.0" - -"@babel/helper-optimise-call-expression@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fhelper-optimise-call-expression/-/helper-optimise-call-expression-7.18.6.tgz#9369aa943ee7da47edab2cb4e838acf09d290ffe" - integrity sha512-HP59oD9/fEHQkdcbgFCnbmgH5vIQTJbxh2yf+CdM89/glUNnuzr87Q8GIjGEnOktTROemO0Pe0iPAYbqZuOUiA== - dependencies: - "@babel/types" "^7.18.6" - -"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.14.5", "@babel/helper-plugin-utils@^7.16.7", "@babel/helper-plugin-utils@^7.18.6", "@babel/helper-plugin-utils@^7.18.9", "@babel/helper-plugin-utils@^7.19.0", "@babel/helper-plugin-utils@^7.8.0", "@babel/helper-plugin-utils@^7.8.3": - version "7.19.0" - resolved "http://localhost:4873/@babel%2fhelper-plugin-utils/-/helper-plugin-utils-7.19.0.tgz#4796bb14961521f0f8715990bee2fb6e51ce21bf" - integrity sha512-40Ryx7I8mT+0gaNxm8JGTZFUITNqdLAgdg0hXzeVZxVD6nFsdhQvip6v8dqkRHzsz1VFpFAaOCHNn0vKBL7Czw== - -"@babel/helper-remap-async-to-generator@^7.18.6", "@babel/helper-remap-async-to-generator@^7.18.9": - version "7.18.9" - resolved "http://localhost:4873/@babel%2fhelper-remap-async-to-generator/-/helper-remap-async-to-generator-7.18.9.tgz#997458a0e3357080e54e1d79ec347f8a8cd28519" - integrity sha512-dI7q50YKd8BAv3VEfgg7PS7yD3Rtbi2J1XMXaalXO0W0164hYLnh8zpjRS0mte9MfVp/tltvr/cfdXPvJr1opA== - dependencies: - "@babel/helper-annotate-as-pure" "^7.18.6" - "@babel/helper-environment-visitor" "^7.18.9" - "@babel/helper-wrap-function" "^7.18.9" - "@babel/types" "^7.18.9" - -"@babel/helper-replace-supers@^7.18.6", "@babel/helper-replace-supers@^7.18.9", "@babel/helper-replace-supers@^7.19.1": - version "7.19.1" - resolved "http://localhost:4873/@babel%2fhelper-replace-supers/-/helper-replace-supers-7.19.1.tgz#e1592a9b4b368aa6bdb8784a711e0bcbf0612b78" - integrity sha512-T7ahH7wV0Hfs46SFh5Jz3s0B6+o8g3c+7TMxu7xKfmHikg7EAZ3I2Qk9LFhjxXq8sL7UkP5JflezNwoZa8WvWw== - dependencies: - "@babel/helper-environment-visitor" "^7.18.9" - "@babel/helper-member-expression-to-functions" "^7.18.9" - "@babel/helper-optimise-call-expression" "^7.18.6" - "@babel/traverse" "^7.19.1" - "@babel/types" "^7.19.0" - -"@babel/helper-simple-access@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fhelper-simple-access/-/helper-simple-access-7.18.6.tgz#d6d8f51f4ac2978068df934b569f08f29788c7ea" - integrity sha512-iNpIgTgyAvDQpDj76POqg+YEt8fPxx3yaNBg3S30dxNKm2SWfYhD0TGrK/Eu9wHpUW63VQU894TsTg+GLbUa1g== - dependencies: - "@babel/types" "^7.18.6" - -"@babel/helper-skip-transparent-expression-wrappers@^7.18.9": - version "7.18.9" - resolved "http://localhost:4873/@babel%2fhelper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.18.9.tgz#778d87b3a758d90b471e7b9918f34a9a02eb5818" - integrity sha512-imytd2gHi3cJPsybLRbmFrF7u5BIEuI2cNheyKi3/iOBC63kNn3q8Crn2xVuESli0aM4KYsyEqKyS7lFL8YVtw== - dependencies: - "@babel/types" "^7.18.9" - -"@babel/helper-split-export-declaration@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fhelper-split-export-declaration/-/helper-split-export-declaration-7.18.6.tgz#7367949bc75b20c6d5a5d4a97bba2824ae8ef075" - integrity sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA== - dependencies: - "@babel/types" "^7.18.6" - -"@babel/helper-string-parser@^7.18.10": - version "7.18.10" - resolved "http://localhost:4873/@babel%2fhelper-string-parser/-/helper-string-parser-7.18.10.tgz#181f22d28ebe1b3857fa575f5c290b1aaf659b56" - integrity sha512-XtIfWmeNY3i4t7t4D2t02q50HvqHybPqW2ki1kosnvWCwuCMeo81Jf0gwr85jy/neUdg5XDdeFE/80DXiO+njw== - -"@babel/helper-validator-identifier@^7.18.6", "@babel/helper-validator-identifier@^7.19.1": - version "7.19.1" - resolved "http://localhost:4873/@babel%2fhelper-validator-identifier/-/helper-validator-identifier-7.19.1.tgz#7eea834cf32901ffdc1a7ee555e2f9c27e249ca2" - integrity sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w== - -"@babel/helper-validator-option@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fhelper-validator-option/-/helper-validator-option-7.18.6.tgz#bf0d2b5a509b1f336099e4ff36e1a63aa5db4db8" - integrity sha512-XO7gESt5ouv/LRJdrVjkShckw6STTaB7l9BrpBaAHDeF5YZT+01PCwmR0SJHnkW6i8OwW/EVWRShfi4j2x+KQw== - -"@babel/helper-wrap-function@^7.18.9": - version "7.19.0" - resolved "http://localhost:4873/@babel%2fhelper-wrap-function/-/helper-wrap-function-7.19.0.tgz#89f18335cff1152373222f76a4b37799636ae8b1" - integrity sha512-txX8aN8CZyYGTwcLhlk87KRqncAzhh5TpQamZUa0/u3an36NtDpUP6bQgBCBcLeBs09R/OwQu3OjK0k/HwfNDg== - dependencies: - "@babel/helper-function-name" "^7.19.0" - "@babel/template" "^7.18.10" - "@babel/traverse" "^7.19.0" - "@babel/types" "^7.19.0" - -"@babel/helpers@^7.19.0": - version "7.19.0" - resolved "http://localhost:4873/@babel%2fhelpers/-/helpers-7.19.0.tgz#f30534657faf246ae96551d88dd31e9d1fa1fc18" - integrity sha512-DRBCKGwIEdqY3+rPJgG/dKfQy9+08rHIAJx8q2p+HSWP87s2HCrQmaAMMyMll2kIXKCW0cO1RdQskx15Xakftg== - dependencies: - "@babel/template" "^7.18.10" - "@babel/traverse" "^7.19.0" - "@babel/types" "^7.19.0" - -"@babel/highlight@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fhighlight/-/highlight-7.18.6.tgz#81158601e93e2563795adcbfbdf5d64be3f2ecdf" - integrity sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g== - dependencies: - "@babel/helper-validator-identifier" "^7.18.6" - chalk "^2.0.0" - js-tokens "^4.0.0" - -"@babel/parser@^7.1.0", "@babel/parser@^7.14.7", "@babel/parser@^7.18.10", "@babel/parser@^7.19.3": - version "7.19.3" - resolved "http://localhost:4873/@babel%2fparser/-/parser-7.19.3.tgz#8dd36d17c53ff347f9e55c328710321b49479a9a" - integrity sha512-pJ9xOlNWHiy9+FuFP09DEAFbAn4JskgRsVcc169w2xRBC3FRGuQEwjeIMMND9L2zc0iEhO/tGv4Zq+km+hxNpQ== - -"@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.18.6.tgz#da5b8f9a580acdfbe53494dba45ea389fb09a4d2" - integrity sha512-Dgxsyg54Fx1d4Nge8UnvTrED63vrwOdPmyvPzlNN/boaliRP54pm3pGzZD1SJUwrBA+Cs/xdG8kXX6Mn/RfISQ== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@^7.18.9": - version "7.18.9" - resolved "http://localhost:4873/@babel%2fplugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.18.9.tgz#a11af19aa373d68d561f08e0a57242350ed0ec50" - integrity sha512-AHrP9jadvH7qlOj6PINbgSuphjQUAK7AOT7DPjBo9EHoLhQTnnK5u45e1Hd4DbSQEO9nqPWtQ89r+XEOWFScKg== - dependencies: - "@babel/helper-plugin-utils" "^7.18.9" - "@babel/helper-skip-transparent-expression-wrappers" "^7.18.9" - "@babel/plugin-proposal-optional-chaining" "^7.18.9" - -"@babel/plugin-proposal-async-generator-functions@^7.19.1": - version "7.19.1" - resolved "http://localhost:4873/@babel%2fplugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.19.1.tgz#34f6f5174b688529342288cd264f80c9ea9fb4a7" - integrity sha512-0yu8vNATgLy4ivqMNBIwb1HebCelqN7YX8SL3FDXORv/RqT0zEEWUCH4GH44JsSrvCu6GqnAdR5EBFAPeNBB4Q== - dependencies: - "@babel/helper-environment-visitor" "^7.18.9" - "@babel/helper-plugin-utils" "^7.19.0" - "@babel/helper-remap-async-to-generator" "^7.18.9" - "@babel/plugin-syntax-async-generators" "^7.8.4" - -"@babel/plugin-proposal-class-properties@^7.16.0", "@babel/plugin-proposal-class-properties@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-proposal-class-properties/-/plugin-proposal-class-properties-7.18.6.tgz#b110f59741895f7ec21a6fff696ec46265c446a3" - integrity sha512-cumfXOF0+nzZrrN8Rf0t7M+tF6sZc7vhQwYQck9q1/5w2OExlD+b4v4RpMJFaV1Z7WcDRgO6FqvxqxGlwo+RHQ== - dependencies: - "@babel/helper-create-class-features-plugin" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-proposal-class-static-block@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-proposal-class-static-block/-/plugin-proposal-class-static-block-7.18.6.tgz#8aa81d403ab72d3962fc06c26e222dacfc9b9020" - integrity sha512-+I3oIiNxrCpup3Gi8n5IGMwj0gOCAjcJUSQEcotNnCCPMEnixawOQ+KeJPlgfjzx+FKQ1QSyZOWe7wmoJp7vhw== - dependencies: - "@babel/helper-create-class-features-plugin" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/plugin-syntax-class-static-block" "^7.14.5" - -"@babel/plugin-proposal-decorators@^7.16.4": - version "7.19.3" - resolved "http://localhost:4873/@babel%2fplugin-proposal-decorators/-/plugin-proposal-decorators-7.19.3.tgz#c1977e4902a18cdf9051bf7bf08d97db2fd8b110" - integrity sha512-MbgXtNXqo7RTKYIXVchVJGPvaVufQH3pxvQyfbGvNw1DObIhph+PesYXJTcd8J4DdWibvf6Z2eanOyItX8WnJg== - dependencies: - "@babel/helper-create-class-features-plugin" "^7.19.0" - "@babel/helper-plugin-utils" "^7.19.0" - "@babel/helper-replace-supers" "^7.19.1" - "@babel/helper-split-export-declaration" "^7.18.6" - "@babel/plugin-syntax-decorators" "^7.19.0" - -"@babel/plugin-proposal-dynamic-import@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.18.6.tgz#72bcf8d408799f547d759298c3c27c7e7faa4d94" - integrity sha512-1auuwmK+Rz13SJj36R+jqFPMJWyKEDd7lLSdOj4oJK0UTgGueSAtkrCvz9ewmgyU/P941Rv2fQwZJN8s6QruXw== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/plugin-syntax-dynamic-import" "^7.8.3" - -"@babel/plugin-proposal-export-namespace-from@^7.18.9": - version "7.18.9" - resolved "http://localhost:4873/@babel%2fplugin-proposal-export-namespace-from/-/plugin-proposal-export-namespace-from-7.18.9.tgz#5f7313ab348cdb19d590145f9247540e94761203" - integrity sha512-k1NtHyOMvlDDFeb9G5PhUXuGj8m/wiwojgQVEhJ/fsVsMCpLyOP4h0uGEjYJKrRI+EVPlb5Jk+Gt9P97lOGwtA== - dependencies: - "@babel/helper-plugin-utils" "^7.18.9" - "@babel/plugin-syntax-export-namespace-from" "^7.8.3" - -"@babel/plugin-proposal-json-strings@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-proposal-json-strings/-/plugin-proposal-json-strings-7.18.6.tgz#7e8788c1811c393aff762817e7dbf1ebd0c05f0b" - integrity sha512-lr1peyn9kOdbYc0xr0OdHTZ5FMqS6Di+H0Fz2I/JwMzGmzJETNeOFq2pBySw6X/KFL5EWDjlJuMsUGRFb8fQgQ== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/plugin-syntax-json-strings" "^7.8.3" - -"@babel/plugin-proposal-logical-assignment-operators@^7.18.9": - version "7.18.9" - resolved "http://localhost:4873/@babel%2fplugin-proposal-logical-assignment-operators/-/plugin-proposal-logical-assignment-operators-7.18.9.tgz#8148cbb350483bf6220af06fa6db3690e14b2e23" - integrity sha512-128YbMpjCrP35IOExw2Fq+x55LMP42DzhOhX2aNNIdI9avSWl2PI0yuBWarr3RYpZBSPtabfadkH2yeRiMD61Q== - dependencies: - "@babel/helper-plugin-utils" "^7.18.9" - "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" - -"@babel/plugin-proposal-nullish-coalescing-operator@^7.16.0", "@babel/plugin-proposal-nullish-coalescing-operator@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.18.6.tgz#fdd940a99a740e577d6c753ab6fbb43fdb9467e1" - integrity sha512-wQxQzxYeJqHcfppzBDnm1yAY0jSRkUXR2z8RePZYrKwMKgMlE8+Z6LUno+bd6LvbGh8Gltvy74+9pIYkr+XkKA== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" - -"@babel/plugin-proposal-numeric-separator@^7.16.0", "@babel/plugin-proposal-numeric-separator@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.18.6.tgz#899b14fbafe87f053d2c5ff05b36029c62e13c75" - integrity sha512-ozlZFogPqoLm8WBr5Z8UckIoE4YQ5KESVcNudyXOR8uqIkliTEgJ3RoketfG6pmzLdeZF0H/wjE9/cCEitBl7Q== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/plugin-syntax-numeric-separator" "^7.10.4" - -"@babel/plugin-proposal-object-rest-spread@^7.18.9": - version "7.18.9" - resolved "http://localhost:4873/@babel%2fplugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.18.9.tgz#f9434f6beb2c8cae9dfcf97d2a5941bbbf9ad4e7" - integrity sha512-kDDHQ5rflIeY5xl69CEqGEZ0KY369ehsCIEbTGb4siHG5BE9sga/T0r0OUwyZNLMmZE79E1kbsqAjwFCW4ds6Q== - dependencies: - "@babel/compat-data" "^7.18.8" - "@babel/helper-compilation-targets" "^7.18.9" - "@babel/helper-plugin-utils" "^7.18.9" - "@babel/plugin-syntax-object-rest-spread" "^7.8.3" - "@babel/plugin-transform-parameters" "^7.18.8" - -"@babel/plugin-proposal-optional-catch-binding@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.18.6.tgz#f9400d0e6a3ea93ba9ef70b09e72dd6da638a2cb" - integrity sha512-Q40HEhs9DJQyaZfUjjn6vE8Cv4GmMHCYuMGIWUnlxH6400VGxOuwWsPt4FxXxJkC/5eOzgn0z21M9gMT4MOhbw== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" - -"@babel/plugin-proposal-optional-chaining@^7.16.0", "@babel/plugin-proposal-optional-chaining@^7.18.9": - version "7.18.9" - resolved "http://localhost:4873/@babel%2fplugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.18.9.tgz#e8e8fe0723f2563960e4bf5e9690933691915993" - integrity sha512-v5nwt4IqBXihxGsW2QmCWMDS3B3bzGIk/EQVZz2ei7f3NJl8NzAJVvUmpDW5q1CRNY+Beb/k58UAH1Km1N411w== - dependencies: - "@babel/helper-plugin-utils" "^7.18.9" - "@babel/helper-skip-transparent-expression-wrappers" "^7.18.9" - "@babel/plugin-syntax-optional-chaining" "^7.8.3" - -"@babel/plugin-proposal-private-methods@^7.16.0", "@babel/plugin-proposal-private-methods@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-proposal-private-methods/-/plugin-proposal-private-methods-7.18.6.tgz#5209de7d213457548a98436fa2882f52f4be6bea" - integrity sha512-nutsvktDItsNn4rpGItSNV2sz1XwS+nfU0Rg8aCx3W3NOKVzdMjJRu0O5OkgDp3ZGICSTbgRpxZoWsxoKRvbeA== - dependencies: - "@babel/helper-create-class-features-plugin" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-proposal-private-property-in-object@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.18.6.tgz#a64137b232f0aca3733a67eb1a144c192389c503" - integrity sha512-9Rysx7FOctvT5ouj5JODjAFAkgGoudQuLPamZb0v1TGLpapdNaftzifU8NTWQm0IRjqoYypdrSmyWgkocDQ8Dw== - dependencies: - "@babel/helper-annotate-as-pure" "^7.18.6" - "@babel/helper-create-class-features-plugin" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/plugin-syntax-private-property-in-object" "^7.14.5" - -"@babel/plugin-proposal-unicode-property-regex@^7.18.6", "@babel/plugin-proposal-unicode-property-regex@^7.4.4": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.18.6.tgz#af613d2cd5e643643b65cded64207b15c85cb78e" - integrity sha512-2BShG/d5yoZyXZfVePH91urL5wTG6ASZU9M4o03lKK8u8UW1y08OMttBSOADTcJrnPMpvDXRG3G8fyLh4ovs8w== - dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-syntax-async-generators@^7.8.4": - version "7.8.4" - resolved "http://localhost:4873/@babel%2fplugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz#a983fb1aeb2ec3f6ed042a210f640e90e786fe0d" - integrity sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-bigint@^7.8.3": - version "7.8.3" - resolved "http://localhost:4873/@babel%2fplugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz#4c9a6f669f5d0cdf1b90a1671e9a146be5300cea" - integrity sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-class-properties@^7.12.13", "@babel/plugin-syntax-class-properties@^7.8.3": - version "7.12.13" - resolved "http://localhost:4873/@babel%2fplugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz#b5c987274c4a3a82b89714796931a6b53544ae10" - integrity sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA== - dependencies: - "@babel/helper-plugin-utils" "^7.12.13" - -"@babel/plugin-syntax-class-static-block@^7.14.5": - version "7.14.5" - resolved "http://localhost:4873/@babel%2fplugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz#195df89b146b4b78b3bf897fd7a257c84659d406" - integrity sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw== - dependencies: - "@babel/helper-plugin-utils" "^7.14.5" - -"@babel/plugin-syntax-decorators@^7.19.0": - version "7.19.0" - resolved "http://localhost:4873/@babel%2fplugin-syntax-decorators/-/plugin-syntax-decorators-7.19.0.tgz#5f13d1d8fce96951bea01a10424463c9a5b3a599" - integrity sha512-xaBZUEDntt4faL1yN8oIFlhfXeQAWJW7CLKYsHTUqriCUbj8xOra8bfxxKGi/UwExPFBuPdH4XfHc9rGQhrVkQ== - dependencies: - "@babel/helper-plugin-utils" "^7.19.0" - -"@babel/plugin-syntax-dynamic-import@^7.8.3": - version "7.8.3" - resolved "http://localhost:4873/@babel%2fplugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz#62bf98b2da3cd21d626154fc96ee5b3cb68eacb3" - integrity sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-export-namespace-from@^7.8.3": - version "7.8.3" - resolved "http://localhost:4873/@babel%2fplugin-syntax-export-namespace-from/-/plugin-syntax-export-namespace-from-7.8.3.tgz#028964a9ba80dbc094c915c487ad7c4e7a66465a" - integrity sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q== - dependencies: - "@babel/helper-plugin-utils" "^7.8.3" - -"@babel/plugin-syntax-flow@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-syntax-flow/-/plugin-syntax-flow-7.18.6.tgz#774d825256f2379d06139be0c723c4dd444f3ca1" - integrity sha512-LUbR+KNTBWCUAqRG9ex5Gnzu2IOkt8jRJbHHXFT9q+L9zm7M/QQbEqXyw1n1pohYvOyWC8CjeyjrSaIwiYjK7A== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-syntax-import-assertions@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.18.6.tgz#cd6190500a4fa2fe31990a963ffab4b63e4505e4" - integrity sha512-/DU3RXad9+bZwrgWJQKbr39gYbJpLJHezqEzRzi/BHRlJ9zsQb4CK2CA/5apllXNomwA1qHwzvHl+AdEmC5krQ== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-syntax-import-meta@^7.8.3": - version "7.10.4" - resolved "http://localhost:4873/@babel%2fplugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz#ee601348c370fa334d2207be158777496521fd51" - integrity sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-syntax-json-strings@^7.8.3": - version "7.8.3" - resolved "http://localhost:4873/@babel%2fplugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz#01ca21b668cd8218c9e640cb6dd88c5412b2c96a" - integrity sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-jsx@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-syntax-jsx/-/plugin-syntax-jsx-7.18.6.tgz#a8feef63b010150abd97f1649ec296e849943ca0" - integrity sha512-6mmljtAedFGTWu2p/8WIORGwy+61PLgOMPOdazc7YoJ9ZCWUyFy3A6CpPkRKLKD1ToAesxX8KGEViAiLo9N+7Q== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-syntax-logical-assignment-operators@^7.10.4", "@babel/plugin-syntax-logical-assignment-operators@^7.8.3": - version "7.10.4" - resolved "http://localhost:4873/@babel%2fplugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz#ca91ef46303530448b906652bac2e9fe9941f699" - integrity sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-syntax-nullish-coalescing-operator@^7.8.3": - version "7.8.3" - resolved "http://localhost:4873/@babel%2fplugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz#167ed70368886081f74b5c36c65a88c03b66d1a9" - integrity sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-numeric-separator@^7.10.4", "@babel/plugin-syntax-numeric-separator@^7.8.3": - version "7.10.4" - resolved "http://localhost:4873/@babel%2fplugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz#b9b070b3e33570cd9fd07ba7fa91c0dd37b9af97" - integrity sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-syntax-object-rest-spread@^7.8.3": - version "7.8.3" - resolved "http://localhost:4873/@babel%2fplugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz#60e225edcbd98a640332a2e72dd3e66f1af55871" - integrity sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-optional-catch-binding@^7.8.3": - version "7.8.3" - resolved "http://localhost:4873/@babel%2fplugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz#6111a265bcfb020eb9efd0fdfd7d26402b9ed6c1" - integrity sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-optional-chaining@^7.8.3": - version "7.8.3" - resolved "http://localhost:4873/@babel%2fplugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz#4f69c2ab95167e0180cd5336613f8c5788f7d48a" - integrity sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-private-property-in-object@^7.14.5": - version "7.14.5" - resolved "http://localhost:4873/@babel%2fplugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz#0dc6671ec0ea22b6e94a1114f857970cd39de1ad" - integrity sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg== - dependencies: - "@babel/helper-plugin-utils" "^7.14.5" - -"@babel/plugin-syntax-top-level-await@^7.14.5", "@babel/plugin-syntax-top-level-await@^7.8.3": - version "7.14.5" - resolved "http://localhost:4873/@babel%2fplugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz#c1cfdadc35a646240001f06138247b741c34d94c" - integrity sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw== - dependencies: - "@babel/helper-plugin-utils" "^7.14.5" - -"@babel/plugin-syntax-typescript@^7.18.6", "@babel/plugin-syntax-typescript@^7.7.2": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-syntax-typescript/-/plugin-syntax-typescript-7.18.6.tgz#1c09cd25795c7c2b8a4ba9ae49394576d4133285" - integrity sha512-mAWAuq4rvOepWCBid55JuRNvpTNf2UGVgoz4JV0fXEKolsVZDzsa4NqCef758WZJj/GDu0gVGItjKFiClTAmZA== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-transform-arrow-functions@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.18.6.tgz#19063fcf8771ec7b31d742339dac62433d0611fe" - integrity sha512-9S9X9RUefzrsHZmKMbDXxweEH+YlE8JJEuat9FdvW9Qh1cw7W64jELCtWNkPBPX5En45uy28KGvA/AySqUh8CQ== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-transform-async-to-generator@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.18.6.tgz#ccda3d1ab9d5ced5265fdb13f1882d5476c71615" - integrity sha512-ARE5wZLKnTgPW7/1ftQmSi1CmkqqHo2DNmtztFhvgtOWSDfq0Cq9/9L+KnZNYSNrydBekhW3rwShduf59RoXag== - dependencies: - "@babel/helper-module-imports" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/helper-remap-async-to-generator" "^7.18.6" - -"@babel/plugin-transform-block-scoped-functions@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.18.6.tgz#9187bf4ba302635b9d70d986ad70f038726216a8" - integrity sha512-ExUcOqpPWnliRcPqves5HJcJOvHvIIWfuS4sroBUenPuMdmW+SMHDakmtS7qOo13sVppmUijqeTv7qqGsvURpQ== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-transform-block-scoping@^7.18.9": - version "7.18.9" - resolved "http://localhost:4873/@babel%2fplugin-transform-block-scoping/-/plugin-transform-block-scoping-7.18.9.tgz#f9b7e018ac3f373c81452d6ada8bd5a18928926d" - integrity sha512-5sDIJRV1KtQVEbt/EIBwGy4T01uYIo4KRB3VUqzkhrAIOGx7AoctL9+Ux88btY0zXdDyPJ9mW+bg+v+XEkGmtw== - dependencies: - "@babel/helper-plugin-utils" "^7.18.9" - -"@babel/plugin-transform-classes@^7.19.0": - version "7.19.0" - resolved "http://localhost:4873/@babel%2fplugin-transform-classes/-/plugin-transform-classes-7.19.0.tgz#0e61ec257fba409c41372175e7c1e606dc79bb20" - integrity sha512-YfeEE9kCjqTS9IitkgfJuxjcEtLUHMqa8yUJ6zdz8vR7hKuo6mOy2C05P0F1tdMmDCeuyidKnlrw/iTppHcr2A== - dependencies: - "@babel/helper-annotate-as-pure" "^7.18.6" - "@babel/helper-compilation-targets" "^7.19.0" - "@babel/helper-environment-visitor" "^7.18.9" - "@babel/helper-function-name" "^7.19.0" - "@babel/helper-optimise-call-expression" "^7.18.6" - "@babel/helper-plugin-utils" "^7.19.0" - "@babel/helper-replace-supers" "^7.18.9" - "@babel/helper-split-export-declaration" "^7.18.6" - globals "^11.1.0" - -"@babel/plugin-transform-computed-properties@^7.18.9": - version "7.18.9" - resolved "http://localhost:4873/@babel%2fplugin-transform-computed-properties/-/plugin-transform-computed-properties-7.18.9.tgz#2357a8224d402dad623caf6259b611e56aec746e" - integrity sha512-+i0ZU1bCDymKakLxn5srGHrsAPRELC2WIbzwjLhHW9SIE1cPYkLCL0NlnXMZaM1vhfgA2+M7hySk42VBvrkBRw== - dependencies: - "@babel/helper-plugin-utils" "^7.18.9" - -"@babel/plugin-transform-destructuring@^7.18.13": - version "7.18.13" - resolved "http://localhost:4873/@babel%2fplugin-transform-destructuring/-/plugin-transform-destructuring-7.18.13.tgz#9e03bc4a94475d62b7f4114938e6c5c33372cbf5" - integrity sha512-TodpQ29XekIsex2A+YJPj5ax2plkGa8YYY6mFjCohk/IG9IY42Rtuj1FuDeemfg2ipxIFLzPeA83SIBnlhSIow== - dependencies: - "@babel/helper-plugin-utils" "^7.18.9" - -"@babel/plugin-transform-dotall-regex@^7.18.6", "@babel/plugin-transform-dotall-regex@^7.4.4": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.18.6.tgz#b286b3e7aae6c7b861e45bed0a2fafd6b1a4fef8" - integrity sha512-6S3jpun1eEbAxq7TdjLotAsl4WpQI9DxfkycRcKrjhQYzU87qpXdknpBg/e+TdcMehqGnLFi7tnFUBR02Vq6wg== - dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-transform-duplicate-keys@^7.18.9": - version "7.18.9" - resolved "http://localhost:4873/@babel%2fplugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.18.9.tgz#687f15ee3cdad6d85191eb2a372c4528eaa0ae0e" - integrity sha512-d2bmXCtZXYc59/0SanQKbiWINadaJXqtvIQIzd4+hNwkWBgyCd5F/2t1kXoUdvPMrxzPvhK6EMQRROxsue+mfw== - dependencies: - "@babel/helper-plugin-utils" "^7.18.9" - -"@babel/plugin-transform-exponentiation-operator@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.18.6.tgz#421c705f4521888c65e91fdd1af951bfefd4dacd" - integrity sha512-wzEtc0+2c88FVR34aQmiz56dxEkxr2g8DQb/KfaFa1JYXOFVsbhvAonFN6PwVWj++fKmku8NP80plJ5Et4wqHw== - dependencies: - "@babel/helper-builder-binary-assignment-operator-visitor" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-transform-flow-strip-types@^7.16.0": - version "7.19.0" - resolved "http://localhost:4873/@babel%2fplugin-transform-flow-strip-types/-/plugin-transform-flow-strip-types-7.19.0.tgz#e9e8606633287488216028719638cbbb2f2dde8f" - integrity sha512-sgeMlNaQVbCSpgLSKP4ZZKfsJVnFnNQlUSk6gPYzR/q7tzCgQF2t8RBKAP6cKJeZdveei7Q7Jm527xepI8lNLg== - dependencies: - "@babel/helper-plugin-utils" "^7.19.0" - "@babel/plugin-syntax-flow" "^7.18.6" - -"@babel/plugin-transform-for-of@^7.18.8": - version "7.18.8" - resolved "http://localhost:4873/@babel%2fplugin-transform-for-of/-/plugin-transform-for-of-7.18.8.tgz#6ef8a50b244eb6a0bdbad0c7c61877e4e30097c1" - integrity sha512-yEfTRnjuskWYo0k1mHUqrVWaZwrdq8AYbfrpqULOJOaucGSp4mNMVps+YtA8byoevxS/urwU75vyhQIxcCgiBQ== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-transform-function-name@^7.18.9": - version "7.18.9" - resolved "http://localhost:4873/@babel%2fplugin-transform-function-name/-/plugin-transform-function-name-7.18.9.tgz#cc354f8234e62968946c61a46d6365440fc764e0" - integrity sha512-WvIBoRPaJQ5yVHzcnJFor7oS5Ls0PYixlTYE63lCj2RtdQEl15M68FXQlxnG6wdraJIXRdR7KI+hQ7q/9QjrCQ== - dependencies: - "@babel/helper-compilation-targets" "^7.18.9" - "@babel/helper-function-name" "^7.18.9" - "@babel/helper-plugin-utils" "^7.18.9" - -"@babel/plugin-transform-literals@^7.18.9": - version "7.18.9" - resolved "http://localhost:4873/@babel%2fplugin-transform-literals/-/plugin-transform-literals-7.18.9.tgz#72796fdbef80e56fba3c6a699d54f0de557444bc" - integrity sha512-IFQDSRoTPnrAIrI5zoZv73IFeZu2dhu6irxQjY9rNjTT53VmKg9fenjvoiOWOkJ6mm4jKVPtdMzBY98Fp4Z4cg== - dependencies: - "@babel/helper-plugin-utils" "^7.18.9" - -"@babel/plugin-transform-member-expression-literals@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.18.6.tgz#ac9fdc1a118620ac49b7e7a5d2dc177a1bfee88e" - integrity sha512-qSF1ihLGO3q+/g48k85tUjD033C29TNTVB2paCwZPVmOsjn9pClvYYrM2VeJpBY2bcNkuny0YUyTNRyRxJ54KA== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-transform-modules-amd@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-transform-modules-amd/-/plugin-transform-modules-amd-7.18.6.tgz#8c91f8c5115d2202f277549848874027d7172d21" - integrity sha512-Pra5aXsmTsOnjM3IajS8rTaLCy++nGM4v3YR4esk5PCsyg9z8NA5oQLwxzMUtDBd8F+UmVza3VxoAaWCbzH1rg== - dependencies: - "@babel/helper-module-transforms" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" - babel-plugin-dynamic-import-node "^2.3.3" - -"@babel/plugin-transform-modules-commonjs@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.18.6.tgz#afd243afba166cca69892e24a8fd8c9f2ca87883" - integrity sha512-Qfv2ZOWikpvmedXQJDSbxNqy7Xr/j2Y8/KfijM0iJyKkBTmWuvCA1yeH1yDM7NJhBW/2aXxeucLj6i80/LAJ/Q== - dependencies: - "@babel/helper-module-transforms" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/helper-simple-access" "^7.18.6" - babel-plugin-dynamic-import-node "^2.3.3" - -"@babel/plugin-transform-modules-systemjs@^7.19.0": - version "7.19.0" - resolved "http://localhost:4873/@babel%2fplugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.19.0.tgz#5f20b471284430f02d9c5059d9b9a16d4b085a1f" - integrity sha512-x9aiR0WXAWmOWsqcsnrzGR+ieaTMVyGyffPVA7F8cXAGt/UxefYv6uSHZLkAFChN5M5Iy1+wjE+xJuPt22H39A== - dependencies: - "@babel/helper-hoist-variables" "^7.18.6" - "@babel/helper-module-transforms" "^7.19.0" - "@babel/helper-plugin-utils" "^7.19.0" - "@babel/helper-validator-identifier" "^7.18.6" - babel-plugin-dynamic-import-node "^2.3.3" - -"@babel/plugin-transform-modules-umd@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-transform-modules-umd/-/plugin-transform-modules-umd-7.18.6.tgz#81d3832d6034b75b54e62821ba58f28ed0aab4b9" - integrity sha512-dcegErExVeXcRqNtkRU/z8WlBLnvD4MRnHgNs3MytRO1Mn1sHRyhbcpYbVMGclAqOjdW+9cfkdZno9dFdfKLfQ== - dependencies: - "@babel/helper-module-transforms" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-transform-named-capturing-groups-regex@^7.19.1": - version "7.19.1" - resolved "http://localhost:4873/@babel%2fplugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.19.1.tgz#ec7455bab6cd8fb05c525a94876f435a48128888" - integrity sha512-oWk9l9WItWBQYS4FgXD4Uyy5kq898lvkXpXQxoJEY1RnvPk4R/Dvu2ebXU9q8lP+rlMwUQTFf2Ok6d78ODa0kw== - dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.19.0" - "@babel/helper-plugin-utils" "^7.19.0" - -"@babel/plugin-transform-new-target@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-transform-new-target/-/plugin-transform-new-target-7.18.6.tgz#d128f376ae200477f37c4ddfcc722a8a1b3246a8" - integrity sha512-DjwFA/9Iu3Z+vrAn+8pBUGcjhxKguSMlsFqeCKbhb9BAV756v0krzVK04CRDi/4aqmk8BsHb4a/gFcaA5joXRw== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-transform-object-super@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-transform-object-super/-/plugin-transform-object-super-7.18.6.tgz#fb3c6ccdd15939b6ff7939944b51971ddc35912c" - integrity sha512-uvGz6zk+pZoS1aTZrOvrbj6Pp/kK2mp45t2B+bTDre2UgsZZ8EZLSJtUg7m/no0zOJUWgFONpB7Zv9W2tSaFlA== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/helper-replace-supers" "^7.18.6" - -"@babel/plugin-transform-parameters@^7.18.8": - version "7.18.8" - resolved "http://localhost:4873/@babel%2fplugin-transform-parameters/-/plugin-transform-parameters-7.18.8.tgz#ee9f1a0ce6d78af58d0956a9378ea3427cccb48a" - integrity sha512-ivfbE3X2Ss+Fj8nnXvKJS6sjRG4gzwPMsP+taZC+ZzEGjAYlvENixmt1sZ5Ca6tWls+BlKSGKPJ6OOXvXCbkFg== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-transform-property-literals@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-transform-property-literals/-/plugin-transform-property-literals-7.18.6.tgz#e22498903a483448e94e032e9bbb9c5ccbfc93a3" - integrity sha512-cYcs6qlgafTud3PAzrrRNbQtfpQ8+y/+M5tKmksS9+M1ckbH6kzY8MrexEM9mcA6JDsukE19iIRvAyYl463sMg== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-transform-react-constant-elements@^7.12.1": - version "7.18.12" - resolved "http://localhost:4873/@babel%2fplugin-transform-react-constant-elements/-/plugin-transform-react-constant-elements-7.18.12.tgz#edf3bec47eb98f14e84fa0af137fcc6aad8e0443" - integrity sha512-Q99U9/ttiu+LMnRU8psd23HhvwXmKWDQIpocm0JKaICcZHnw+mdQbHm6xnSy7dOl8I5PELakYtNBubNQlBXbZw== - dependencies: - "@babel/helper-plugin-utils" "^7.18.9" - -"@babel/plugin-transform-react-display-name@^7.16.0", "@babel/plugin-transform-react-display-name@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-transform-react-display-name/-/plugin-transform-react-display-name-7.18.6.tgz#8b1125f919ef36ebdfff061d664e266c666b9415" - integrity sha512-TV4sQ+T013n61uMoygyMRm+xf04Bd5oqFpv2jAEQwSZ8NwQA7zeRPg1LMVg2PWi3zWBz+CLKD+v5bcpZ/BS0aA== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-transform-react-jsx-development@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-transform-react-jsx-development/-/plugin-transform-react-jsx-development-7.18.6.tgz#dbe5c972811e49c7405b630e4d0d2e1380c0ddc5" - integrity sha512-SA6HEjwYFKF7WDjWcMcMGUimmw/nhNRDWxr+KaLSCrkD/LMDBvWRmHAYgE1HDeF8KUuI8OAu+RT6EOtKxSW2qA== - dependencies: - "@babel/plugin-transform-react-jsx" "^7.18.6" - -"@babel/plugin-transform-react-jsx@^7.18.6": - version "7.19.0" - resolved "http://localhost:4873/@babel%2fplugin-transform-react-jsx/-/plugin-transform-react-jsx-7.19.0.tgz#b3cbb7c3a00b92ec8ae1027910e331ba5c500eb9" - integrity sha512-UVEvX3tXie3Szm3emi1+G63jyw1w5IcMY0FSKM+CRnKRI5Mr1YbCNgsSTwoTwKphQEG9P+QqmuRFneJPZuHNhg== - dependencies: - "@babel/helper-annotate-as-pure" "^7.18.6" - "@babel/helper-module-imports" "^7.18.6" - "@babel/helper-plugin-utils" "^7.19.0" - "@babel/plugin-syntax-jsx" "^7.18.6" - "@babel/types" "^7.19.0" - -"@babel/plugin-transform-react-pure-annotations@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-transform-react-pure-annotations/-/plugin-transform-react-pure-annotations-7.18.6.tgz#561af267f19f3e5d59291f9950fd7b9663d0d844" - integrity sha512-I8VfEPg9r2TRDdvnHgPepTKvuRomzA8+u+nhY7qSI1fR2hRNebasZEETLyM5mAUr0Ku56OkXJ0I7NHJnO6cJiQ== - dependencies: - "@babel/helper-annotate-as-pure" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-transform-regenerator@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-transform-regenerator/-/plugin-transform-regenerator-7.18.6.tgz#585c66cb84d4b4bf72519a34cfce761b8676ca73" - integrity sha512-poqRI2+qiSdeldcz4wTSTXBRryoq3Gc70ye7m7UD5Ww0nE29IXqMl6r7Nd15WBgRd74vloEMlShtH6CKxVzfmQ== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - regenerator-transform "^0.15.0" - -"@babel/plugin-transform-reserved-words@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-transform-reserved-words/-/plugin-transform-reserved-words-7.18.6.tgz#b1abd8ebf8edaa5f7fe6bbb8d2133d23b6a6f76a" - integrity sha512-oX/4MyMoypzHjFrT1CdivfKZ+XvIPMFXwwxHp/r0Ddy2Vuomt4HDFGmft1TAY2yiTKiNSsh3kjBAzcM8kSdsjA== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-transform-runtime@^7.16.4": - version "7.19.1" - resolved "http://localhost:4873/@babel%2fplugin-transform-runtime/-/plugin-transform-runtime-7.19.1.tgz#a3df2d7312eea624c7889a2dcd37fd1dfd25b2c6" - integrity sha512-2nJjTUFIzBMP/f/miLxEK9vxwW/KUXsdvN4sR//TmuDhe6yU2h57WmIOE12Gng3MDP/xpjUV/ToZRdcf8Yj4fA== - dependencies: - "@babel/helper-module-imports" "^7.18.6" - "@babel/helper-plugin-utils" "^7.19.0" - babel-plugin-polyfill-corejs2 "^0.3.3" - babel-plugin-polyfill-corejs3 "^0.6.0" - babel-plugin-polyfill-regenerator "^0.4.1" - semver "^6.3.0" - -"@babel/plugin-transform-shorthand-properties@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.18.6.tgz#6d6df7983d67b195289be24909e3f12a8f664dc9" - integrity sha512-eCLXXJqv8okzg86ywZJbRn19YJHU4XUa55oz2wbHhaQVn/MM+XhukiT7SYqp/7o00dg52Rj51Ny+Ecw4oyoygw== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-transform-spread@^7.19.0": - version "7.19.0" - resolved "http://localhost:4873/@babel%2fplugin-transform-spread/-/plugin-transform-spread-7.19.0.tgz#dd60b4620c2fec806d60cfaae364ec2188d593b6" - integrity sha512-RsuMk7j6n+r752EtzyScnWkQyuJdli6LdO5Klv8Yx0OfPVTcQkIUfS8clx5e9yHXzlnhOZF3CbQ8C2uP5j074w== - dependencies: - "@babel/helper-plugin-utils" "^7.19.0" - "@babel/helper-skip-transparent-expression-wrappers" "^7.18.9" - -"@babel/plugin-transform-sticky-regex@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.18.6.tgz#c6706eb2b1524028e317720339583ad0f444adcc" - integrity sha512-kfiDrDQ+PBsQDO85yj1icueWMfGfJFKN1KCkndygtu/C9+XUfydLC8Iv5UYJqRwy4zk8EcplRxEOeLyjq1gm6Q== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-transform-template-literals@^7.18.9": - version "7.18.9" - resolved "http://localhost:4873/@babel%2fplugin-transform-template-literals/-/plugin-transform-template-literals-7.18.9.tgz#04ec6f10acdaa81846689d63fae117dd9c243a5e" - integrity sha512-S8cOWfT82gTezpYOiVaGHrCbhlHgKhQt8XH5ES46P2XWmX92yisoZywf5km75wv5sYcXDUCLMmMxOLCtthDgMA== - dependencies: - "@babel/helper-plugin-utils" "^7.18.9" - -"@babel/plugin-transform-typeof-symbol@^7.18.9": - version "7.18.9" - resolved "http://localhost:4873/@babel%2fplugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.18.9.tgz#c8cea68263e45addcd6afc9091429f80925762c0" - integrity sha512-SRfwTtF11G2aemAZWivL7PD+C9z52v9EvMqH9BuYbabyPuKUvSWks3oCg6041pT925L4zVFqaVBeECwsmlguEw== - dependencies: - "@babel/helper-plugin-utils" "^7.18.9" - -"@babel/plugin-transform-typescript@^7.18.6": - version "7.19.3" - resolved "http://localhost:4873/@babel%2fplugin-transform-typescript/-/plugin-transform-typescript-7.19.3.tgz#4f1db1e0fe278b42ddbc19ec2f6cd2f8262e35d6" - integrity sha512-z6fnuK9ve9u/0X0rRvI9MY0xg+DOUaABDYOe+/SQTxtlptaBB/V9JIUxJn6xp3lMBeb9qe8xSFmHU35oZDXD+w== - dependencies: - "@babel/helper-create-class-features-plugin" "^7.19.0" - "@babel/helper-plugin-utils" "^7.19.0" - "@babel/plugin-syntax-typescript" "^7.18.6" - -"@babel/plugin-transform-unicode-escapes@^7.18.10": - version "7.18.10" - resolved "http://localhost:4873/@babel%2fplugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.18.10.tgz#1ecfb0eda83d09bbcb77c09970c2dd55832aa246" - integrity sha512-kKAdAI+YzPgGY/ftStBFXTI1LZFju38rYThnfMykS+IXy8BVx+res7s2fxf1l8I35DV2T97ezo6+SGrXz6B3iQ== - dependencies: - "@babel/helper-plugin-utils" "^7.18.9" - -"@babel/plugin-transform-unicode-regex@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.18.6.tgz#194317225d8c201bbae103364ffe9e2cea36cdca" - integrity sha512-gE7A6Lt7YLnNOL3Pb9BNeZvi+d8l7tcRrG4+pwJjK9hD2xX4mEvjlQW60G9EEmfXVYRPv9VRQcyegIVHCql/AA== - dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/preset-env@^7.11.0", "@babel/preset-env@^7.12.1", "@babel/preset-env@^7.16.4": - version "7.19.3" - resolved "http://localhost:4873/@babel%2fpreset-env/-/preset-env-7.19.3.tgz#52cd19abaecb3f176a4ff9cc5e15b7bf06bec754" - integrity sha512-ziye1OTc9dGFOAXSWKUqQblYHNlBOaDl8wzqf2iKXJAltYiR3hKHUKmkt+S9PppW7RQpq4fFCrwwpIDj/f5P4w== - dependencies: - "@babel/compat-data" "^7.19.3" - "@babel/helper-compilation-targets" "^7.19.3" - "@babel/helper-plugin-utils" "^7.19.0" - "@babel/helper-validator-option" "^7.18.6" - "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression" "^7.18.6" - "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining" "^7.18.9" - "@babel/plugin-proposal-async-generator-functions" "^7.19.1" - "@babel/plugin-proposal-class-properties" "^7.18.6" - "@babel/plugin-proposal-class-static-block" "^7.18.6" - "@babel/plugin-proposal-dynamic-import" "^7.18.6" - "@babel/plugin-proposal-export-namespace-from" "^7.18.9" - "@babel/plugin-proposal-json-strings" "^7.18.6" - "@babel/plugin-proposal-logical-assignment-operators" "^7.18.9" - "@babel/plugin-proposal-nullish-coalescing-operator" "^7.18.6" - "@babel/plugin-proposal-numeric-separator" "^7.18.6" - "@babel/plugin-proposal-object-rest-spread" "^7.18.9" - "@babel/plugin-proposal-optional-catch-binding" "^7.18.6" - "@babel/plugin-proposal-optional-chaining" "^7.18.9" - "@babel/plugin-proposal-private-methods" "^7.18.6" - "@babel/plugin-proposal-private-property-in-object" "^7.18.6" - "@babel/plugin-proposal-unicode-property-regex" "^7.18.6" - "@babel/plugin-syntax-async-generators" "^7.8.4" - "@babel/plugin-syntax-class-properties" "^7.12.13" - "@babel/plugin-syntax-class-static-block" "^7.14.5" - "@babel/plugin-syntax-dynamic-import" "^7.8.3" - "@babel/plugin-syntax-export-namespace-from" "^7.8.3" - "@babel/plugin-syntax-import-assertions" "^7.18.6" - "@babel/plugin-syntax-json-strings" "^7.8.3" - "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" - "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" - "@babel/plugin-syntax-numeric-separator" "^7.10.4" - "@babel/plugin-syntax-object-rest-spread" "^7.8.3" - "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" - "@babel/plugin-syntax-optional-chaining" "^7.8.3" - "@babel/plugin-syntax-private-property-in-object" "^7.14.5" - "@babel/plugin-syntax-top-level-await" "^7.14.5" - "@babel/plugin-transform-arrow-functions" "^7.18.6" - "@babel/plugin-transform-async-to-generator" "^7.18.6" - "@babel/plugin-transform-block-scoped-functions" "^7.18.6" - "@babel/plugin-transform-block-scoping" "^7.18.9" - "@babel/plugin-transform-classes" "^7.19.0" - "@babel/plugin-transform-computed-properties" "^7.18.9" - "@babel/plugin-transform-destructuring" "^7.18.13" - "@babel/plugin-transform-dotall-regex" "^7.18.6" - "@babel/plugin-transform-duplicate-keys" "^7.18.9" - "@babel/plugin-transform-exponentiation-operator" "^7.18.6" - "@babel/plugin-transform-for-of" "^7.18.8" - "@babel/plugin-transform-function-name" "^7.18.9" - "@babel/plugin-transform-literals" "^7.18.9" - "@babel/plugin-transform-member-expression-literals" "^7.18.6" - "@babel/plugin-transform-modules-amd" "^7.18.6" - "@babel/plugin-transform-modules-commonjs" "^7.18.6" - "@babel/plugin-transform-modules-systemjs" "^7.19.0" - "@babel/plugin-transform-modules-umd" "^7.18.6" - "@babel/plugin-transform-named-capturing-groups-regex" "^7.19.1" - "@babel/plugin-transform-new-target" "^7.18.6" - "@babel/plugin-transform-object-super" "^7.18.6" - "@babel/plugin-transform-parameters" "^7.18.8" - "@babel/plugin-transform-property-literals" "^7.18.6" - "@babel/plugin-transform-regenerator" "^7.18.6" - "@babel/plugin-transform-reserved-words" "^7.18.6" - "@babel/plugin-transform-shorthand-properties" "^7.18.6" - "@babel/plugin-transform-spread" "^7.19.0" - "@babel/plugin-transform-sticky-regex" "^7.18.6" - "@babel/plugin-transform-template-literals" "^7.18.9" - "@babel/plugin-transform-typeof-symbol" "^7.18.9" - "@babel/plugin-transform-unicode-escapes" "^7.18.10" - "@babel/plugin-transform-unicode-regex" "^7.18.6" - "@babel/preset-modules" "^0.1.5" - "@babel/types" "^7.19.3" - babel-plugin-polyfill-corejs2 "^0.3.3" - babel-plugin-polyfill-corejs3 "^0.6.0" - babel-plugin-polyfill-regenerator "^0.4.1" - core-js-compat "^3.25.1" - semver "^6.3.0" - -"@babel/preset-modules@^0.1.5": - version "0.1.5" - resolved "http://localhost:4873/@babel%2fpreset-modules/-/preset-modules-0.1.5.tgz#ef939d6e7f268827e1841638dc6ff95515e115d9" - integrity sha512-A57th6YRG7oR3cq/yt/Y84MvGgE0eJG2F1JLhKuyG+jFxEgrd/HAMJatiFtmOiZurz+0DkrvbheCLaV5f2JfjA== - dependencies: - "@babel/helper-plugin-utils" "^7.0.0" - "@babel/plugin-proposal-unicode-property-regex" "^7.4.4" - "@babel/plugin-transform-dotall-regex" "^7.4.4" - "@babel/types" "^7.4.4" - esutils "^2.0.2" - -"@babel/preset-react@^7.12.5", "@babel/preset-react@^7.16.0": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fpreset-react/-/preset-react-7.18.6.tgz#979f76d6277048dc19094c217b507f3ad517dd2d" - integrity sha512-zXr6atUmyYdiWRVLOZahakYmOBHtWc2WGCkP8PYTgZi0iJXDY2CN180TdrIW4OGOAdLc7TifzDIvtx6izaRIzg== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/helper-validator-option" "^7.18.6" - "@babel/plugin-transform-react-display-name" "^7.18.6" - "@babel/plugin-transform-react-jsx" "^7.18.6" - "@babel/plugin-transform-react-jsx-development" "^7.18.6" - "@babel/plugin-transform-react-pure-annotations" "^7.18.6" - -"@babel/preset-typescript@^7.16.0": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fpreset-typescript/-/preset-typescript-7.18.6.tgz#ce64be3e63eddc44240c6358daefac17b3186399" - integrity sha512-s9ik86kXBAnD760aybBucdpnLsAt0jK1xqJn2juOn9lkOvSHV60os5hxoVJsPzMQxvnUJFAlkont2DvvaYEBtQ== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/helper-validator-option" "^7.18.6" - "@babel/plugin-transform-typescript" "^7.18.6" - -"@babel/runtime-corejs3@^7.10.2": - version "7.19.1" - resolved "http://localhost:4873/@babel%2fruntime-corejs3/-/runtime-corejs3-7.19.1.tgz#f0cbbe7edda7c4109cd253bb1dee99aba4594ad9" - integrity sha512-j2vJGnkopRzH+ykJ8h68wrHnEUmtK//E723jjixiAl/PPf6FhqY/vYRcMVlNydRKQjQsTsYEjpx+DZMIvnGk/g== - dependencies: - core-js-pure "^3.25.1" - regenerator-runtime "^0.13.4" - -"@babel/runtime@^7.10.2", "@babel/runtime@^7.11.2", "@babel/runtime@^7.12.5", "@babel/runtime@^7.16.3", "@babel/runtime@^7.18.9", "@babel/runtime@^7.8.4", "@babel/runtime@^7.9.2": - version "7.19.0" - resolved "http://localhost:4873/@babel%2fruntime/-/runtime-7.19.0.tgz#22b11c037b094d27a8a2504ea4dcff00f50e2259" - integrity sha512-eR8Lo9hnDS7tqkO7NsV+mKvCmv5boaXFSZ70DnfhcgiEne8hv9oCEd36Klw74EtizEqLsy4YnW8UWwpBVolHZA== - dependencies: - regenerator-runtime "^0.13.4" - -"@babel/template@^7.18.10", "@babel/template@^7.3.3": - version "7.18.10" - resolved "http://localhost:4873/@babel%2ftemplate/-/template-7.18.10.tgz#6f9134835970d1dbf0835c0d100c9f38de0c5e71" - integrity sha512-TI+rCtooWHr3QJ27kJxfjutghu44DLnasDMwpDqCXVTal9RLp3RSYNh4NdBrRP2cQAoG9A8juOQl6P6oZG4JxA== - dependencies: - "@babel/code-frame" "^7.18.6" - "@babel/parser" "^7.18.10" - "@babel/types" "^7.18.10" - -"@babel/traverse@^7.19.0", "@babel/traverse@^7.19.1", "@babel/traverse@^7.19.3", "@babel/traverse@^7.7.2": - version "7.19.3" - resolved "http://localhost:4873/@babel%2ftraverse/-/traverse-7.19.3.tgz#3a3c5348d4988ba60884e8494b0592b2f15a04b4" - integrity sha512-qh5yf6149zhq2sgIXmwjnsvmnNQC2iw70UFjp4olxucKrWd/dvlUsBI88VSLUsnMNF7/vnOiA+nk1+yLoCqROQ== - dependencies: - "@babel/code-frame" "^7.18.6" - "@babel/generator" "^7.19.3" - "@babel/helper-environment-visitor" "^7.18.9" - "@babel/helper-function-name" "^7.19.0" - "@babel/helper-hoist-variables" "^7.18.6" - "@babel/helper-split-export-declaration" "^7.18.6" - "@babel/parser" "^7.19.3" - "@babel/types" "^7.19.3" - debug "^4.1.0" - globals "^11.1.0" - -"@babel/types@^7.0.0", "@babel/types@^7.12.6", "@babel/types@^7.18.10", "@babel/types@^7.18.6", "@babel/types@^7.18.9", "@babel/types@^7.19.0", "@babel/types@^7.19.3", "@babel/types@^7.3.0", "@babel/types@^7.3.3", "@babel/types@^7.4.4": - version "7.19.3" - resolved "http://localhost:4873/@babel%2ftypes/-/types-7.19.3.tgz#fc420e6bbe54880bce6779ffaf315f5e43ec9624" - integrity sha512-hGCaQzIY22DJlDh9CH7NOxgKkFjBk0Cw9xDO1Xmh2151ti7wiGfQ3LauXzL4HP1fmFlTX6XjpRETTpUcv7wQLw== - dependencies: - "@babel/helper-string-parser" "^7.18.10" - "@babel/helper-validator-identifier" "^7.19.1" - to-fast-properties "^2.0.0" - -"@bcoe/v8-coverage@^0.2.3": - version "0.2.3" - resolved "http://localhost:4873/@bcoe%2fv8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" - integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== - -"@craco/craco@^7.0.0-alpha.8": - version "7.0.0-alpha.8" - resolved "http://localhost:4873/@craco%2fcraco/-/craco-7.0.0-alpha.8.tgz#40f19f44198ff2341b40654c8c6b4f54c2217972" - integrity sha512-IN3/ldPaktGflPu342cg7n8LYa2c3x9H2XzngUkDzTjro25ig1GyVcUdnG1U0X6wrRTF9K1AxZ5su9jLbdyFUw== - dependencies: - autoprefixer "^10.4.12" - cosmiconfig "^7.0.1" - cosmiconfig-typescript-loader "^4.1.1" - cross-spawn "^7.0.3" - lodash "^4.17.21" - semver "^7.3.7" - webpack-merge "^5.8.0" - -"@csstools/normalize.css@*": - version "12.0.0" - resolved "http://localhost:4873/@csstools%2fnormalize.css/-/normalize.css-12.0.0.tgz#a9583a75c3f150667771f30b60d9f059473e62c4" - integrity sha512-M0qqxAcwCsIVfpFQSlGN5XjXWu8l5JDZN+fPt1LeW5SZexQTgnaEvgXAY+CeygRw0EeppWHi12JxESWiWrB0Sg== - -"@csstools/postcss-cascade-layers@^1.1.0": - version "1.1.1" - resolved "http://localhost:4873/@csstools%2fpostcss-cascade-layers/-/postcss-cascade-layers-1.1.1.tgz#8a997edf97d34071dd2e37ea6022447dd9e795ad" - integrity sha512-+KdYrpKC5TgomQr2DlZF4lDEpHcoxnj5IGddYYfBWJAKfj1JtuHUIqMa+E1pJJ+z3kvDViWMqyqPlG4Ja7amQA== - dependencies: - "@csstools/selector-specificity" "^2.0.2" - postcss-selector-parser "^6.0.10" - -"@csstools/postcss-color-function@^1.1.1": - version "1.1.1" - resolved "http://localhost:4873/@csstools%2fpostcss-color-function/-/postcss-color-function-1.1.1.tgz#2bd36ab34f82d0497cfacdc9b18d34b5e6f64b6b" - integrity sha512-Bc0f62WmHdtRDjf5f3e2STwRAl89N2CLb+9iAwzrv4L2hncrbDwnQD9PCq0gtAt7pOI2leIV08HIBUd4jxD8cw== - dependencies: - "@csstools/postcss-progressive-custom-properties" "^1.1.0" - postcss-value-parser "^4.2.0" - -"@csstools/postcss-font-format-keywords@^1.0.1": - version "1.0.1" - resolved "http://localhost:4873/@csstools%2fpostcss-font-format-keywords/-/postcss-font-format-keywords-1.0.1.tgz#677b34e9e88ae997a67283311657973150e8b16a" - integrity sha512-ZgrlzuUAjXIOc2JueK0X5sZDjCtgimVp/O5CEqTcs5ShWBa6smhWYbS0x5cVc/+rycTDbjjzoP0KTDnUneZGOg== - dependencies: - postcss-value-parser "^4.2.0" - -"@csstools/postcss-hwb-function@^1.0.2": - version "1.0.2" - resolved "http://localhost:4873/@csstools%2fpostcss-hwb-function/-/postcss-hwb-function-1.0.2.tgz#ab54a9fce0ac102c754854769962f2422ae8aa8b" - integrity sha512-YHdEru4o3Rsbjmu6vHy4UKOXZD+Rn2zmkAmLRfPet6+Jz4Ojw8cbWxe1n42VaXQhD3CQUXXTooIy8OkVbUcL+w== - dependencies: - postcss-value-parser "^4.2.0" - -"@csstools/postcss-ic-unit@^1.0.1": - version "1.0.1" - resolved "http://localhost:4873/@csstools%2fpostcss-ic-unit/-/postcss-ic-unit-1.0.1.tgz#28237d812a124d1a16a5acc5c3832b040b303e58" - integrity sha512-Ot1rcwRAaRHNKC9tAqoqNZhjdYBzKk1POgWfhN4uCOE47ebGcLRqXjKkApVDpjifL6u2/55ekkpnFcp+s/OZUw== - dependencies: - "@csstools/postcss-progressive-custom-properties" "^1.1.0" - postcss-value-parser "^4.2.0" - -"@csstools/postcss-is-pseudo-class@^2.0.7": - version "2.0.7" - resolved "http://localhost:4873/@csstools%2fpostcss-is-pseudo-class/-/postcss-is-pseudo-class-2.0.7.tgz#846ae6c0d5a1eaa878fce352c544f9c295509cd1" - integrity sha512-7JPeVVZHd+jxYdULl87lvjgvWldYu+Bc62s9vD/ED6/QTGjy0jy0US/f6BG53sVMTBJ1lzKZFpYmofBN9eaRiA== - dependencies: - "@csstools/selector-specificity" "^2.0.0" - postcss-selector-parser "^6.0.10" - -"@csstools/postcss-nested-calc@^1.0.0": - version "1.0.0" - resolved "http://localhost:4873/@csstools%2fpostcss-nested-calc/-/postcss-nested-calc-1.0.0.tgz#d7e9d1d0d3d15cf5ac891b16028af2a1044d0c26" - integrity sha512-JCsQsw1wjYwv1bJmgjKSoZNvf7R6+wuHDAbi5f/7MbFhl2d/+v+TvBTU4BJH3G1X1H87dHl0mh6TfYogbT/dJQ== - dependencies: - postcss-value-parser "^4.2.0" - -"@csstools/postcss-normalize-display-values@^1.0.1": - version "1.0.1" - resolved "http://localhost:4873/@csstools%2fpostcss-normalize-display-values/-/postcss-normalize-display-values-1.0.1.tgz#15da54a36e867b3ac5163ee12c1d7f82d4d612c3" - integrity sha512-jcOanIbv55OFKQ3sYeFD/T0Ti7AMXc9nM1hZWu8m/2722gOTxFg7xYu4RDLJLeZmPUVQlGzo4jhzvTUq3x4ZUw== - dependencies: - postcss-value-parser "^4.2.0" - -"@csstools/postcss-oklab-function@^1.1.1": - version "1.1.1" - resolved "http://localhost:4873/@csstools%2fpostcss-oklab-function/-/postcss-oklab-function-1.1.1.tgz#88cee0fbc8d6df27079ebd2fa016ee261eecf844" - integrity sha512-nJpJgsdA3dA9y5pgyb/UfEzE7W5Ka7u0CX0/HIMVBNWzWemdcTH3XwANECU6anWv/ao4vVNLTMxhiPNZsTK6iA== - dependencies: - "@csstools/postcss-progressive-custom-properties" "^1.1.0" - postcss-value-parser "^4.2.0" - -"@csstools/postcss-progressive-custom-properties@^1.1.0", "@csstools/postcss-progressive-custom-properties@^1.3.0": - version "1.3.0" - resolved "http://localhost:4873/@csstools%2fpostcss-progressive-custom-properties/-/postcss-progressive-custom-properties-1.3.0.tgz#542292558384361776b45c85226b9a3a34f276fa" - integrity sha512-ASA9W1aIy5ygskZYuWams4BzafD12ULvSypmaLJT2jvQ8G0M3I8PRQhC0h7mG0Z3LI05+agZjqSR9+K9yaQQjA== - dependencies: - postcss-value-parser "^4.2.0" - -"@csstools/postcss-stepped-value-functions@^1.0.1": - version "1.0.1" - resolved "http://localhost:4873/@csstools%2fpostcss-stepped-value-functions/-/postcss-stepped-value-functions-1.0.1.tgz#f8772c3681cc2befed695e2b0b1d68e22f08c4f4" - integrity sha512-dz0LNoo3ijpTOQqEJLY8nyaapl6umbmDcgj4AD0lgVQ572b2eqA1iGZYTTWhrcrHztWDDRAX2DGYyw2VBjvCvQ== - dependencies: - postcss-value-parser "^4.2.0" - -"@csstools/postcss-text-decoration-shorthand@^1.0.0": - version "1.0.0" - resolved "http://localhost:4873/@csstools%2fpostcss-text-decoration-shorthand/-/postcss-text-decoration-shorthand-1.0.0.tgz#ea96cfbc87d921eca914d3ad29340d9bcc4c953f" - integrity sha512-c1XwKJ2eMIWrzQenN0XbcfzckOLLJiczqy+YvfGmzoVXd7pT9FfObiSEfzs84bpE/VqfpEuAZ9tCRbZkZxxbdw== - dependencies: - postcss-value-parser "^4.2.0" - -"@csstools/postcss-trigonometric-functions@^1.0.2": - version "1.0.2" - resolved "http://localhost:4873/@csstools%2fpostcss-trigonometric-functions/-/postcss-trigonometric-functions-1.0.2.tgz#94d3e4774c36d35dcdc88ce091336cb770d32756" - integrity sha512-woKaLO///4bb+zZC2s80l+7cm07M7268MsyG3M0ActXXEFi6SuhvriQYcb58iiKGbjwwIU7n45iRLEHypB47Og== - dependencies: - postcss-value-parser "^4.2.0" - -"@csstools/postcss-unset-value@^1.0.2": - version "1.0.2" - resolved "http://localhost:4873/@csstools%2fpostcss-unset-value/-/postcss-unset-value-1.0.2.tgz#c99bb70e2cdc7312948d1eb41df2412330b81f77" - integrity sha512-c8J4roPBILnelAsdLr4XOAR/GsTm0GJi4XpcfvoWk3U6KiTCqiFYc63KhRMQQX35jYMp4Ao8Ij9+IZRgMfJp1g== - -"@csstools/selector-specificity@^2.0.0", "@csstools/selector-specificity@^2.0.2": - version "2.0.2" - resolved "http://localhost:4873/@csstools%2fselector-specificity/-/selector-specificity-2.0.2.tgz#1bfafe4b7ed0f3e4105837e056e0a89b108ebe36" - integrity sha512-IkpVW/ehM1hWKln4fCA3NzJU8KwD+kIOvPZA4cqxoJHtE21CCzjyp+Kxbu0i5I4tBNOlXPL9mjwnWlL0VEG4Fg== - -"@eslint/eslintrc@^1.3.2": - version "1.3.2" - resolved "http://localhost:4873/@eslint%2feslintrc/-/eslintrc-1.3.2.tgz#58b69582f3b7271d8fa67fe5251767a5b38ea356" - integrity sha512-AXYd23w1S/bv3fTs3Lz0vjiYemS08jWkI3hYyS9I1ry+0f+Yjs1wm+sU0BS8qDOPrBIkp4qHYC16I8uVtpLajQ== - dependencies: - ajv "^6.12.4" - debug "^4.3.2" - espree "^9.4.0" - globals "^13.15.0" - ignore "^5.2.0" - import-fresh "^3.2.1" - js-yaml "^4.1.0" - minimatch "^3.1.2" - strip-json-comments "^3.1.1" - -"@humanwhocodes/config-array@^0.10.5": - version "0.10.7" - resolved "http://localhost:4873/@humanwhocodes%2fconfig-array/-/config-array-0.10.7.tgz#6d53769fd0c222767e6452e8ebda825c22e9f0dc" - integrity sha512-MDl6D6sBsaV452/QSdX+4CXIjZhIcI0PELsxUjk4U828yd58vk3bTIvk/6w5FY+4hIy9sLW0sfrV7K7Kc++j/w== - dependencies: - "@humanwhocodes/object-schema" "^1.2.1" - debug "^4.1.1" - minimatch "^3.0.4" - -"@humanwhocodes/gitignore-to-minimatch@^1.0.2": - version "1.0.2" - resolved "http://localhost:4873/@humanwhocodes%2fgitignore-to-minimatch/-/gitignore-to-minimatch-1.0.2.tgz#316b0a63b91c10e53f242efb4ace5c3b34e8728d" - integrity sha512-rSqmMJDdLFUsyxR6FMtD00nfQKKLFb1kv+qBbOVKqErvloEIJLo5bDTJTQNTYgeyp78JsA7u/NPi5jT1GR/MuA== - -"@humanwhocodes/module-importer@^1.0.1": - version "1.0.1" - resolved "http://localhost:4873/@humanwhocodes%2fmodule-importer/-/module-importer-1.0.1.tgz#af5b2691a22b44be847b0ca81641c5fb6ad0172c" - integrity sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA== - -"@humanwhocodes/object-schema@^1.2.1": - version "1.2.1" - resolved "http://localhost:4873/@humanwhocodes%2fobject-schema/-/object-schema-1.2.1.tgz#b520529ec21d8e5945a1851dfd1c32e94e39ff45" - integrity sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA== - -"@istanbuljs/load-nyc-config@^1.0.0": - version "1.1.0" - resolved "http://localhost:4873/@istanbuljs%2fload-nyc-config/-/load-nyc-config-1.1.0.tgz#fd3db1d59ecf7cf121e80650bb86712f9b55eced" - integrity sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ== - dependencies: - camelcase "^5.3.1" - find-up "^4.1.0" - get-package-type "^0.1.0" - js-yaml "^3.13.1" - resolve-from "^5.0.0" - -"@istanbuljs/schema@^0.1.2": - version "0.1.3" - resolved "http://localhost:4873/@istanbuljs%2fschema/-/schema-0.1.3.tgz#e45e384e4b8ec16bce2fd903af78450f6bf7ec98" - integrity sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA== - -"@jest/console@^27.5.1": - version "27.5.1" - resolved "http://localhost:4873/@jest%2fconsole/-/console-27.5.1.tgz#260fe7239602fe5130a94f1aa386eff54b014bba" - integrity sha512-kZ/tNpS3NXn0mlXXXPNuDZnb4c0oZ20r4K5eemM2k30ZC3G0T02nXUvyhf5YdbXWHPEJLc9qGLxEZ216MdL+Zg== - dependencies: - "@jest/types" "^27.5.1" - "@types/node" "*" - chalk "^4.0.0" - jest-message-util "^27.5.1" - jest-util "^27.5.1" - slash "^3.0.0" - -"@jest/console@^28.1.3": - version "28.1.3" - resolved "http://localhost:4873/@jest%2fconsole/-/console-28.1.3.tgz#2030606ec03a18c31803b8a36382762e447655df" - integrity sha512-QPAkP5EwKdK/bxIr6C1I4Vs0rm2nHiANzj/Z5X2JQkrZo6IqvC4ldZ9K95tF0HdidhA8Bo6egxSzUFPYKcEXLw== - dependencies: - "@jest/types" "^28.1.3" - "@types/node" "*" - chalk "^4.0.0" - jest-message-util "^28.1.3" - jest-util "^28.1.3" - slash "^3.0.0" - -"@jest/core@^27.5.1": - version "27.5.1" - resolved "http://localhost:4873/@jest%2fcore/-/core-27.5.1.tgz#267ac5f704e09dc52de2922cbf3af9edcd64b626" - integrity sha512-AK6/UTrvQD0Cd24NSqmIA6rKsu0tKIxfiCducZvqxYdmMisOYAsdItspT+fQDQYARPf8XgjAFZi0ogW2agH5nQ== - dependencies: - "@jest/console" "^27.5.1" - "@jest/reporters" "^27.5.1" - "@jest/test-result" "^27.5.1" - "@jest/transform" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/node" "*" - ansi-escapes "^4.2.1" - chalk "^4.0.0" - emittery "^0.8.1" - exit "^0.1.2" - graceful-fs "^4.2.9" - jest-changed-files "^27.5.1" - jest-config "^27.5.1" - jest-haste-map "^27.5.1" - jest-message-util "^27.5.1" - jest-regex-util "^27.5.1" - jest-resolve "^27.5.1" - jest-resolve-dependencies "^27.5.1" - jest-runner "^27.5.1" - jest-runtime "^27.5.1" - jest-snapshot "^27.5.1" - jest-util "^27.5.1" - jest-validate "^27.5.1" - jest-watcher "^27.5.1" - micromatch "^4.0.4" - rimraf "^3.0.0" - slash "^3.0.0" - strip-ansi "^6.0.0" - -"@jest/environment@^27.5.1": - version "27.5.1" - resolved "http://localhost:4873/@jest%2fenvironment/-/environment-27.5.1.tgz#d7425820511fe7158abbecc010140c3fd3be9c74" - integrity sha512-/WQjhPJe3/ghaol/4Bq480JKXV/Rfw8nQdN7f41fM8VDHLcxKXou6QyXAh3EFr9/bVG3x74z1NWDkP87EiY8gA== - dependencies: - "@jest/fake-timers" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/node" "*" - jest-mock "^27.5.1" - -"@jest/expect-utils@^29.1.2": - version "29.1.2" - resolved "http://localhost:4873/@jest%2fexpect-utils/-/expect-utils-29.1.2.tgz#66dbb514d38f7d21456bc774419c9ae5cca3f88d" - integrity sha512-4a48bhKfGj/KAH39u0ppzNTABXQ8QPccWAFUFobWBaEMSMp+sB31Z2fK/l47c4a/Mu1po2ffmfAIPxXbVTXdtg== - dependencies: - jest-get-type "^29.0.0" - -"@jest/fake-timers@^27.5.1": - version "27.5.1" - resolved "http://localhost:4873/@jest%2ffake-timers/-/fake-timers-27.5.1.tgz#76979745ce0579c8a94a4678af7a748eda8ada74" - integrity sha512-/aPowoolwa07k7/oM3aASneNeBGCmGQsc3ugN4u6s4C/+s5M64MFo/+djTdiwcbQlRfFElGuDXWzaWj6QgKObQ== - dependencies: - "@jest/types" "^27.5.1" - "@sinonjs/fake-timers" "^8.0.1" - "@types/node" "*" - jest-message-util "^27.5.1" - jest-mock "^27.5.1" - jest-util "^27.5.1" - -"@jest/globals@^27.5.1": - version "27.5.1" - resolved "http://localhost:4873/@jest%2fglobals/-/globals-27.5.1.tgz#7ac06ce57ab966566c7963431cef458434601b2b" - integrity sha512-ZEJNB41OBQQgGzgyInAv0UUfDDj3upmHydjieSxFvTRuZElrx7tXg/uVQ5hYVEwiXs3+aMsAeEc9X7xiSKCm4Q== - dependencies: - "@jest/environment" "^27.5.1" - "@jest/types" "^27.5.1" - expect "^27.5.1" - -"@jest/reporters@^27.5.1": - version "27.5.1" - resolved "http://localhost:4873/@jest%2freporters/-/reporters-27.5.1.tgz#ceda7be96170b03c923c37987b64015812ffec04" - integrity sha512-cPXh9hWIlVJMQkVk84aIvXuBB4uQQmFqZiacloFuGiP3ah1sbCxCosidXFDfqG8+6fO1oR2dTJTlsOy4VFmUfw== - dependencies: - "@bcoe/v8-coverage" "^0.2.3" - "@jest/console" "^27.5.1" - "@jest/test-result" "^27.5.1" - "@jest/transform" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/node" "*" - chalk "^4.0.0" - collect-v8-coverage "^1.0.0" - exit "^0.1.2" - glob "^7.1.2" - graceful-fs "^4.2.9" - istanbul-lib-coverage "^3.0.0" - istanbul-lib-instrument "^5.1.0" - istanbul-lib-report "^3.0.0" - istanbul-lib-source-maps "^4.0.0" - istanbul-reports "^3.1.3" - jest-haste-map "^27.5.1" - jest-resolve "^27.5.1" - jest-util "^27.5.1" - jest-worker "^27.5.1" - slash "^3.0.0" - source-map "^0.6.0" - string-length "^4.0.1" - terminal-link "^2.0.0" - v8-to-istanbul "^8.1.0" - -"@jest/schemas@^28.1.3": - version "28.1.3" - resolved "http://localhost:4873/@jest%2fschemas/-/schemas-28.1.3.tgz#ad8b86a66f11f33619e3d7e1dcddd7f2d40ff905" - integrity sha512-/l/VWsdt/aBXgjshLWOFyFt3IVdYypu5y2Wn2rOO1un6nkqIn8SLXzgIMYXFyYsRWDyF5EthmKJMIdJvk08grg== - dependencies: - "@sinclair/typebox" "^0.24.1" - -"@jest/schemas@^29.0.0": - version "29.0.0" - resolved "http://localhost:4873/@jest%2fschemas/-/schemas-29.0.0.tgz#5f47f5994dd4ef067fb7b4188ceac45f77fe952a" - integrity sha512-3Ab5HgYIIAnS0HjqJHQYZS+zXc4tUmTmBH3z83ajI6afXp8X3ZtdLX+nXx+I7LNkJD7uN9LAVhgnjDgZa2z0kA== - dependencies: - "@sinclair/typebox" "^0.24.1" - -"@jest/source-map@^27.5.1": - version "27.5.1" - resolved "http://localhost:4873/@jest%2fsource-map/-/source-map-27.5.1.tgz#6608391e465add4205eae073b55e7f279e04e8cf" - integrity sha512-y9NIHUYF3PJRlHk98NdC/N1gl88BL08aQQgu4k4ZopQkCw9t9cV8mtl3TV8b/YCB8XaVTFrmUTAJvjsntDireg== - dependencies: - callsites "^3.0.0" - graceful-fs "^4.2.9" - source-map "^0.6.0" - -"@jest/test-result@^27.5.1": - version "27.5.1" - resolved "http://localhost:4873/@jest%2ftest-result/-/test-result-27.5.1.tgz#56a6585fa80f7cdab72b8c5fc2e871d03832f5bb" - integrity sha512-EW35l2RYFUcUQxFJz5Cv5MTOxlJIQs4I7gxzi2zVU7PJhOwfYq1MdC5nhSmYjX1gmMmLPvB3sIaC+BkcHRBfag== - dependencies: - "@jest/console" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/istanbul-lib-coverage" "^2.0.0" - collect-v8-coverage "^1.0.0" - -"@jest/test-result@^28.1.3": - version "28.1.3" - resolved "http://localhost:4873/@jest%2ftest-result/-/test-result-28.1.3.tgz#5eae945fd9f4b8fcfce74d239e6f725b6bf076c5" - integrity sha512-kZAkxnSE+FqE8YjW8gNuoVkkC9I7S1qmenl8sGcDOLropASP+BkcGKwhXoyqQuGOGeYY0y/ixjrd/iERpEXHNg== - dependencies: - "@jest/console" "^28.1.3" - "@jest/types" "^28.1.3" - "@types/istanbul-lib-coverage" "^2.0.0" - collect-v8-coverage "^1.0.0" - -"@jest/test-sequencer@^27.5.1": - version "27.5.1" - resolved "http://localhost:4873/@jest%2ftest-sequencer/-/test-sequencer-27.5.1.tgz#4057e0e9cea4439e544c6353c6affe58d095745b" - integrity sha512-LCheJF7WB2+9JuCS7VB/EmGIdQuhtqjRNI9A43idHv3E4KltCTsPsLxvdaubFHSYwY/fNjMWjl6vNRhDiN7vpQ== - dependencies: - "@jest/test-result" "^27.5.1" - graceful-fs "^4.2.9" - jest-haste-map "^27.5.1" - jest-runtime "^27.5.1" - -"@jest/transform@^27.5.1": - version "27.5.1" - resolved "http://localhost:4873/@jest%2ftransform/-/transform-27.5.1.tgz#6c3501dcc00c4c08915f292a600ece5ecfe1f409" - integrity sha512-ipON6WtYgl/1329g5AIJVbUuEh0wZVbdpGwC99Jw4LwuoBNS95MVphU6zOeD9pDkon+LLbFL7lOQRapbB8SCHw== - dependencies: - "@babel/core" "^7.1.0" - "@jest/types" "^27.5.1" - babel-plugin-istanbul "^6.1.1" - chalk "^4.0.0" - convert-source-map "^1.4.0" - fast-json-stable-stringify "^2.0.0" - graceful-fs "^4.2.9" - jest-haste-map "^27.5.1" - jest-regex-util "^27.5.1" - jest-util "^27.5.1" - micromatch "^4.0.4" - pirates "^4.0.4" - slash "^3.0.0" - source-map "^0.6.1" - write-file-atomic "^3.0.0" - -"@jest/types@^27.5.1": - version "27.5.1" - resolved "http://localhost:4873/@jest%2ftypes/-/types-27.5.1.tgz#3c79ec4a8ba61c170bf937bcf9e98a9df175ec80" - integrity sha512-Cx46iJ9QpwQTjIdq5VJu2QTMMs3QlEjI0x1QbBP5W1+nMzyc2XmimiRR/CbX9TO0cPTeUlxWMOu8mslYsJ8DEw== - dependencies: - "@types/istanbul-lib-coverage" "^2.0.0" - "@types/istanbul-reports" "^3.0.0" - "@types/node" "*" - "@types/yargs" "^16.0.0" - chalk "^4.0.0" - -"@jest/types@^28.1.3": - version "28.1.3" - resolved "http://localhost:4873/@jest%2ftypes/-/types-28.1.3.tgz#b05de80996ff12512bc5ceb1d208285a7d11748b" - integrity sha512-RyjiyMUZrKz/c+zlMFO1pm70DcIlST8AeWTkoUdZevew44wcNZQHsEVOiCVtgVnlFFD82FPaXycys58cf2muVQ== - dependencies: - "@jest/schemas" "^28.1.3" - "@types/istanbul-lib-coverage" "^2.0.0" - "@types/istanbul-reports" "^3.0.0" - "@types/node" "*" - "@types/yargs" "^17.0.8" - chalk "^4.0.0" - -"@jest/types@^29.1.2": - version "29.1.2" - resolved "http://localhost:4873/@jest%2ftypes/-/types-29.1.2.tgz#7442d32b16bcd7592d9614173078b8c334ec730a" - integrity sha512-DcXGtoTykQB5jiwCmVr8H4vdg2OJhQex3qPkG+ISyDO7xQXbt/4R6dowcRyPemRnkH7JoHvZuxPBdlq+9JxFCg== - dependencies: - "@jest/schemas" "^29.0.0" - "@types/istanbul-lib-coverage" "^2.0.0" - "@types/istanbul-reports" "^3.0.0" - "@types/node" "*" - "@types/yargs" "^17.0.8" - chalk "^4.0.0" - -"@jridgewell/gen-mapping@^0.1.0": - version "0.1.1" - resolved "http://localhost:4873/@jridgewell%2fgen-mapping/-/gen-mapping-0.1.1.tgz#e5d2e450306a9491e3bd77e323e38d7aff315996" - integrity sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w== - dependencies: - "@jridgewell/set-array" "^1.0.0" - "@jridgewell/sourcemap-codec" "^1.4.10" - -"@jridgewell/gen-mapping@^0.3.0", "@jridgewell/gen-mapping@^0.3.2": - version "0.3.2" - resolved "http://localhost:4873/@jridgewell%2fgen-mapping/-/gen-mapping-0.3.2.tgz#c1aedc61e853f2bb9f5dfe6d4442d3b565b253b9" - integrity sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A== - dependencies: - "@jridgewell/set-array" "^1.0.1" - "@jridgewell/sourcemap-codec" "^1.4.10" - "@jridgewell/trace-mapping" "^0.3.9" - -"@jridgewell/resolve-uri@^3.0.3": - version "3.1.0" - resolved "http://localhost:4873/@jridgewell%2fresolve-uri/-/resolve-uri-3.1.0.tgz#2203b118c157721addfe69d47b70465463066d78" - integrity sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w== - -"@jridgewell/set-array@^1.0.0", "@jridgewell/set-array@^1.0.1": - version "1.1.2" - resolved "http://localhost:4873/@jridgewell%2fset-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72" - integrity sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw== - -"@jridgewell/source-map@^0.3.2": - version "0.3.2" - resolved "http://localhost:4873/@jridgewell%2fsource-map/-/source-map-0.3.2.tgz#f45351aaed4527a298512ec72f81040c998580fb" - integrity sha512-m7O9o2uR8k2ObDysZYzdfhb08VuEml5oWGiosa1VdaPZ/A6QyPkAJuwN0Q1lhULOf6B7MtQmHENS743hWtCrgw== - dependencies: - "@jridgewell/gen-mapping" "^0.3.0" - "@jridgewell/trace-mapping" "^0.3.9" - -"@jridgewell/sourcemap-codec@^1.4.10": - version "1.4.14" - resolved "http://localhost:4873/@jridgewell%2fsourcemap-codec/-/sourcemap-codec-1.4.14.tgz#add4c98d341472a289190b424efbdb096991bb24" - integrity sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw== - -"@jridgewell/trace-mapping@^0.3.14", "@jridgewell/trace-mapping@^0.3.9": - version "0.3.15" - resolved "http://localhost:4873/@jridgewell%2ftrace-mapping/-/trace-mapping-0.3.15.tgz#aba35c48a38d3fd84b37e66c9c0423f9744f9774" - integrity sha512-oWZNOULl+UbhsgB51uuZzglikfIKSUBO/M9W2OfEjn7cmqoAiCgmv9lyACTUacZwBz0ITnJ2NqjU8Tx0DHL88g== - dependencies: - "@jridgewell/resolve-uri" "^3.0.3" - "@jridgewell/sourcemap-codec" "^1.4.10" - -"@leichtgewicht/ip-codec@^2.0.1": - version "2.0.4" - resolved "http://localhost:4873/@leichtgewicht%2fip-codec/-/ip-codec-2.0.4.tgz#b2ac626d6cb9c8718ab459166d4bb405b8ffa78b" - integrity sha512-Hcv+nVC0kZnQ3tD9GVu5xSMR4VVYOteQIr/hwFPVEvPdlXqgGEuRjiheChHgdM+JyqdgNcmzZOX/tnl0JOiI7A== - -"@nicolo-ribaudo/eslint-scope-5-internals@5.1.1-v1": - version "5.1.1-v1" - resolved "http://localhost:4873/@nicolo-ribaudo%2feslint-scope-5-internals/-/eslint-scope-5-internals-5.1.1-v1.tgz#dbf733a965ca47b1973177dc0bb6c889edcfb129" - integrity sha512-54/JRvkLIzzDWshCWfuhadfrfZVPiElY8Fcgmg1HroEly/EDSszzhBAsarCux+D/kOslTRquNzuyGSmUSTTHGg== - dependencies: - eslint-scope "5.1.1" - -"@nodelib/fs.scandir@2.1.5": - version "2.1.5" - resolved "http://localhost:4873/@nodelib%2ffs.scandir/-/fs.scandir-2.1.5.tgz#7619c2eb21b25483f6d167548b4cfd5a7488c3d5" - integrity sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g== - dependencies: - "@nodelib/fs.stat" "2.0.5" - run-parallel "^1.1.9" - -"@nodelib/fs.stat@2.0.5", "@nodelib/fs.stat@^2.0.2": - version "2.0.5" - resolved "http://localhost:4873/@nodelib%2ffs.stat/-/fs.stat-2.0.5.tgz#5bd262af94e9d25bd1e71b05deed44876a222e8b" - integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A== - -"@nodelib/fs.walk@^1.2.3": - version "1.2.8" - resolved "http://localhost:4873/@nodelib%2ffs.walk/-/fs.walk-1.2.8.tgz#e95737e8bb6746ddedf69c556953494f196fe69a" - integrity sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg== - dependencies: - "@nodelib/fs.scandir" "2.1.5" - fastq "^1.6.0" - -"@pmmmwh/react-refresh-webpack-plugin@^0.5.3": - version "0.5.7" - resolved "http://localhost:4873/@pmmmwh%2freact-refresh-webpack-plugin/-/react-refresh-webpack-plugin-0.5.7.tgz#58f8217ba70069cc6a73f5d7e05e85b458c150e2" - integrity sha512-bcKCAzF0DV2IIROp9ZHkRJa6O4jy7NlnHdWL3GmcUxYWNjLXkK5kfELELwEfSP5hXPfVL/qOGMAROuMQb9GG8Q== - dependencies: - ansi-html-community "^0.0.8" - common-path-prefix "^3.0.0" - core-js-pure "^3.8.1" - error-stack-parser "^2.0.6" - find-up "^5.0.0" - html-entities "^2.1.0" - loader-utils "^2.0.0" - schema-utils "^3.0.0" - source-map "^0.7.3" - -"@rollup/plugin-babel@^5.2.0": - version "5.3.1" - resolved "http://localhost:4873/@rollup%2fplugin-babel/-/plugin-babel-5.3.1.tgz#04bc0608f4aa4b2e4b1aebf284344d0f68fda283" - integrity sha512-WFfdLWU/xVWKeRQnKmIAQULUI7Il0gZnBIH/ZFO069wYIfPu+8zrfp/KMW0atmELoRDq8FbiP3VCss9MhCut7Q== - dependencies: - "@babel/helper-module-imports" "^7.10.4" - "@rollup/pluginutils" "^3.1.0" - -"@rollup/plugin-node-resolve@^11.2.1": - version "11.2.1" - resolved "http://localhost:4873/@rollup%2fplugin-node-resolve/-/plugin-node-resolve-11.2.1.tgz#82aa59397a29cd4e13248b106e6a4a1880362a60" - integrity sha512-yc2n43jcqVyGE2sqV5/YCmocy9ArjVAP/BeXyTtADTBBX6V0e5UMqwO8CdQ0kzjb6zu5P1qMzsScCMRvE9OlVg== - dependencies: - "@rollup/pluginutils" "^3.1.0" - "@types/resolve" "1.17.1" - builtin-modules "^3.1.0" - deepmerge "^4.2.2" - is-module "^1.0.0" - resolve "^1.19.0" - -"@rollup/plugin-replace@^2.4.1": - version "2.4.2" - resolved "http://localhost:4873/@rollup%2fplugin-replace/-/plugin-replace-2.4.2.tgz#a2d539314fbc77c244858faa523012825068510a" - integrity sha512-IGcu+cydlUMZ5En85jxHH4qj2hta/11BHq95iHEyb2sbgiN0eCdzvUcHw5gt9pBL5lTi4JDYJ1acCoMGpTvEZg== - dependencies: - "@rollup/pluginutils" "^3.1.0" - magic-string "^0.25.7" - -"@rollup/pluginutils@^3.1.0": - version "3.1.0" - resolved "http://localhost:4873/@rollup%2fpluginutils/-/pluginutils-3.1.0.tgz#706b4524ee6dc8b103b3c995533e5ad680c02b9b" - integrity sha512-GksZ6pr6TpIjHm8h9lSQ8pi8BE9VeubNT0OMJ3B5uZJ8pz73NPiqOtCog/x2/QzM1ENChPKxMDhiQuRHsqc+lg== - dependencies: - "@types/estree" "0.0.39" - estree-walker "^1.0.1" - picomatch "^2.2.2" - -"@rushstack/eslint-patch@^1.1.0": - version "1.2.0" - resolved "http://localhost:4873/@rushstack%2feslint-patch/-/eslint-patch-1.2.0.tgz#8be36a1f66f3265389e90b5f9c9962146758f728" - integrity sha512-sXo/qW2/pAcmT43VoRKOJbDOfV3cYpq3szSVfIThQXNt+E4DfKj361vaAt3c88U5tPUxzEswam7GW48PJqtKAg== - -"@sinclair/typebox@^0.24.1": - version "0.24.44" - resolved "http://localhost:4873/@sinclair%2ftypebox/-/typebox-0.24.44.tgz#0a0aa3bf4a155a678418527342a3ee84bd8caa5c" - integrity sha512-ka0W0KN5i6LfrSocduwliMMpqVgohtPFidKdMEOUjoOFCHcOOYkKsPRxfs5f15oPNHTm6ERAm0GV/+/LTKeiWg== - -"@sinonjs/commons@^1.7.0": - version "1.8.3" - resolved "http://localhost:4873/@sinonjs%2fcommons/-/commons-1.8.3.tgz#3802ddd21a50a949b6721ddd72da36e67e7f1b2d" - integrity sha512-xkNcLAn/wZaX14RPlwizcKicDk9G3F8m2nU3L7Ukm5zBgTwiT0wsoFAHx9Jq56fJA1z/7uKGtCRu16sOUCLIHQ== - dependencies: - type-detect "4.0.8" - -"@sinonjs/fake-timers@^8.0.1": - version "8.1.0" - resolved "http://localhost:4873/@sinonjs%2ffake-timers/-/fake-timers-8.1.0.tgz#3fdc2b6cb58935b21bfb8d1625eb1300484316e7" - integrity sha512-OAPJUAtgeINhh/TAlUID4QTs53Njm7xzddaVlEs/SXwgtiD1tW22zAB/W1wdqfrpmikgaWQ9Fw6Ws+hsiRm5Vg== - dependencies: - "@sinonjs/commons" "^1.7.0" - -"@surma/rollup-plugin-off-main-thread@^2.2.3": - version "2.2.3" - resolved "http://localhost:4873/@surma%2frollup-plugin-off-main-thread/-/rollup-plugin-off-main-thread-2.2.3.tgz#ee34985952ca21558ab0d952f00298ad2190c053" - integrity sha512-lR8q/9W7hZpMWweNiAKU7NQerBnzQQLvi8qnTDU/fxItPhtZVMbPV3lbCwjhIlNBe9Bbr5V+KHshvWmVSG9cxQ== - dependencies: - ejs "^3.1.6" - json5 "^2.2.0" - magic-string "^0.25.0" - string.prototype.matchall "^4.0.6" - -"@svgr/babel-plugin-add-jsx-attribute@^5.4.0": - version "5.4.0" - resolved "http://localhost:4873/@svgr%2fbabel-plugin-add-jsx-attribute/-/babel-plugin-add-jsx-attribute-5.4.0.tgz#81ef61947bb268eb9d50523446f9c638fb355906" - integrity sha512-ZFf2gs/8/6B8PnSofI0inYXr2SDNTDScPXhN7k5EqD4aZ3gi6u+rbmZHVB8IM3wDyx8ntKACZbtXSm7oZGRqVg== - -"@svgr/babel-plugin-remove-jsx-attribute@^5.4.0": - version "5.4.0" - resolved "http://localhost:4873/@svgr%2fbabel-plugin-remove-jsx-attribute/-/babel-plugin-remove-jsx-attribute-5.4.0.tgz#6b2c770c95c874654fd5e1d5ef475b78a0a962ef" - integrity sha512-yaS4o2PgUtwLFGTKbsiAy6D0o3ugcUhWK0Z45umJ66EPWunAz9fuFw2gJuje6wqQvQWOTJvIahUwndOXb7QCPg== - -"@svgr/babel-plugin-remove-jsx-empty-expression@^5.0.1": - version "5.0.1" - resolved "http://localhost:4873/@svgr%2fbabel-plugin-remove-jsx-empty-expression/-/babel-plugin-remove-jsx-empty-expression-5.0.1.tgz#25621a8915ed7ad70da6cea3d0a6dbc2ea933efd" - integrity sha512-LA72+88A11ND/yFIMzyuLRSMJ+tRKeYKeQ+mR3DcAZ5I4h5CPWN9AHyUzJbWSYp/u2u0xhmgOe0+E41+GjEueA== - -"@svgr/babel-plugin-replace-jsx-attribute-value@^5.0.1": - version "5.0.1" - resolved "http://localhost:4873/@svgr%2fbabel-plugin-replace-jsx-attribute-value/-/babel-plugin-replace-jsx-attribute-value-5.0.1.tgz#0b221fc57f9fcd10e91fe219e2cd0dd03145a897" - integrity sha512-PoiE6ZD2Eiy5mK+fjHqwGOS+IXX0wq/YDtNyIgOrc6ejFnxN4b13pRpiIPbtPwHEc+NT2KCjteAcq33/F1Y9KQ== - -"@svgr/babel-plugin-svg-dynamic-title@^5.4.0": - version "5.4.0" - resolved "http://localhost:4873/@svgr%2fbabel-plugin-svg-dynamic-title/-/babel-plugin-svg-dynamic-title-5.4.0.tgz#139b546dd0c3186b6e5db4fefc26cb0baea729d7" - integrity sha512-zSOZH8PdZOpuG1ZVx/cLVePB2ibo3WPpqo7gFIjLV9a0QsuQAzJiwwqmuEdTaW2pegyBE17Uu15mOgOcgabQZg== - -"@svgr/babel-plugin-svg-em-dimensions@^5.4.0": - version "5.4.0" - resolved "http://localhost:4873/@svgr%2fbabel-plugin-svg-em-dimensions/-/babel-plugin-svg-em-dimensions-5.4.0.tgz#6543f69526632a133ce5cabab965deeaea2234a0" - integrity sha512-cPzDbDA5oT/sPXDCUYoVXEmm3VIoAWAPT6mSPTJNbQaBNUuEKVKyGH93oDY4e42PYHRW67N5alJx/eEol20abw== - -"@svgr/babel-plugin-transform-react-native-svg@^5.4.0": - version "5.4.0" - resolved "http://localhost:4873/@svgr%2fbabel-plugin-transform-react-native-svg/-/babel-plugin-transform-react-native-svg-5.4.0.tgz#00bf9a7a73f1cad3948cdab1f8dfb774750f8c80" - integrity sha512-3eYP/SaopZ41GHwXma7Rmxcv9uRslRDTY1estspeB1w1ueZWd/tPlMfEOoccYpEMZU3jD4OU7YitnXcF5hLW2Q== - -"@svgr/babel-plugin-transform-svg-component@^5.5.0": - version "5.5.0" - resolved "http://localhost:4873/@svgr%2fbabel-plugin-transform-svg-component/-/babel-plugin-transform-svg-component-5.5.0.tgz#583a5e2a193e214da2f3afeb0b9e8d3250126b4a" - integrity sha512-q4jSH1UUvbrsOtlo/tKcgSeiCHRSBdXoIoqX1pgcKK/aU3JD27wmMKwGtpB8qRYUYoyXvfGxUVKchLuR5pB3rQ== - -"@svgr/babel-preset@^5.5.0": - version "5.5.0" - resolved "http://localhost:4873/@svgr%2fbabel-preset/-/babel-preset-5.5.0.tgz#8af54f3e0a8add7b1e2b0fcd5a882c55393df327" - integrity sha512-4FiXBjvQ+z2j7yASeGPEi8VD/5rrGQk4Xrq3EdJmoZgz/tpqChpo5hgXDvmEauwtvOc52q8ghhZK4Oy7qph4ig== - dependencies: - "@svgr/babel-plugin-add-jsx-attribute" "^5.4.0" - "@svgr/babel-plugin-remove-jsx-attribute" "^5.4.0" - "@svgr/babel-plugin-remove-jsx-empty-expression" "^5.0.1" - "@svgr/babel-plugin-replace-jsx-attribute-value" "^5.0.1" - "@svgr/babel-plugin-svg-dynamic-title" "^5.4.0" - "@svgr/babel-plugin-svg-em-dimensions" "^5.4.0" - "@svgr/babel-plugin-transform-react-native-svg" "^5.4.0" - "@svgr/babel-plugin-transform-svg-component" "^5.5.0" - -"@svgr/core@^5.5.0": - version "5.5.0" - resolved "http://localhost:4873/@svgr%2fcore/-/core-5.5.0.tgz#82e826b8715d71083120fe8f2492ec7d7874a579" - integrity sha512-q52VOcsJPvV3jO1wkPtzTuKlvX7Y3xIcWRpCMtBF3MrteZJtBfQw/+u0B1BHy5ColpQc1/YVTrPEtSYIMNZlrQ== - dependencies: - "@svgr/plugin-jsx" "^5.5.0" - camelcase "^6.2.0" - cosmiconfig "^7.0.0" - -"@svgr/hast-util-to-babel-ast@^5.5.0": - version "5.5.0" - resolved "http://localhost:4873/@svgr%2fhast-util-to-babel-ast/-/hast-util-to-babel-ast-5.5.0.tgz#5ee52a9c2533f73e63f8f22b779f93cd432a5461" - integrity sha512-cAaR/CAiZRB8GP32N+1jocovUtvlj0+e65TB50/6Lcime+EA49m/8l+P2ko+XPJ4dw3xaPS3jOL4F2X4KWxoeQ== - dependencies: - "@babel/types" "^7.12.6" - -"@svgr/plugin-jsx@^5.5.0": - version "5.5.0" - resolved "http://localhost:4873/@svgr%2fplugin-jsx/-/plugin-jsx-5.5.0.tgz#1aa8cd798a1db7173ac043466d7b52236b369000" - integrity sha512-V/wVh33j12hGh05IDg8GpIUXbjAPnTdPTKuP4VNLggnwaHMPNQNae2pRnyTAILWCQdz5GyMqtO488g7CKM8CBA== - dependencies: - "@babel/core" "^7.12.3" - "@svgr/babel-preset" "^5.5.0" - "@svgr/hast-util-to-babel-ast" "^5.5.0" - svg-parser "^2.0.2" - -"@svgr/plugin-svgo@^5.5.0": - version "5.5.0" - resolved "http://localhost:4873/@svgr%2fplugin-svgo/-/plugin-svgo-5.5.0.tgz#02da55d85320549324e201c7b2e53bf431fcc246" - integrity sha512-r5swKk46GuQl4RrVejVwpeeJaydoxkdwkM1mBKOgJLBUJPGaLci6ylg/IjhrRsREKDkr4kbMWdgOtbXEh0fyLQ== - dependencies: - cosmiconfig "^7.0.0" - deepmerge "^4.2.2" - svgo "^1.2.2" - -"@svgr/webpack@^5.5.0": - version "5.5.0" - resolved "http://localhost:4873/@svgr%2fwebpack/-/webpack-5.5.0.tgz#aae858ee579f5fa8ce6c3166ef56c6a1b381b640" - integrity sha512-DOBOK255wfQxguUta2INKkzPj6AIS6iafZYiYmHn6W3pHlycSRRlvWKCfLDG10fXfLWqE3DJHgRUOyJYmARa7g== - dependencies: - "@babel/core" "^7.12.3" - "@babel/plugin-transform-react-constant-elements" "^7.12.1" - "@babel/preset-env" "^7.12.1" - "@babel/preset-react" "^7.12.5" - "@svgr/core" "^5.5.0" - "@svgr/plugin-jsx" "^5.5.0" - "@svgr/plugin-svgo" "^5.5.0" - loader-utils "^2.0.0" - -"@testing-library/dom@^8.5.0": - version "8.18.1" - resolved "http://localhost:4873/@testing-library%2fdom/-/dom-8.18.1.tgz#80f91be02bc171fe5a3a7003f88207be31ac2cf3" - integrity sha512-oEvsm2B/WtcHKE+IcEeeCqNU/ltFGaVyGbpcm4g/2ytuT49jrlH9x5qRKL/H3A6yfM4YAbSbC0ceT5+9CEXnLg== - dependencies: - "@babel/code-frame" "^7.10.4" - "@babel/runtime" "^7.12.5" - "@types/aria-query" "^4.2.0" - aria-query "^5.0.0" - chalk "^4.1.0" - dom-accessibility-api "^0.5.9" - lz-string "^1.4.4" - pretty-format "^27.0.2" - -"@testing-library/jest-dom@^5.16.5": - version "5.16.5" - resolved "http://localhost:4873/@testing-library%2fjest-dom/-/jest-dom-5.16.5.tgz#3912846af19a29b2dbf32a6ae9c31ef52580074e" - integrity sha512-N5ixQ2qKpi5OLYfwQmUb/5mSV9LneAcaUfp32pn4yCnpb8r/Yz0pXFPck21dIicKmi+ta5WRAknkZCfA8refMA== - dependencies: - "@adobe/css-tools" "^4.0.1" - "@babel/runtime" "^7.9.2" - "@types/testing-library__jest-dom" "^5.9.1" - aria-query "^5.0.0" - chalk "^3.0.0" - css.escape "^1.5.1" - dom-accessibility-api "^0.5.6" - lodash "^4.17.15" - redent "^3.0.0" - -"@testing-library/react@^13.4.0": - version "13.4.0" - resolved "http://localhost:4873/@testing-library%2freact/-/react-13.4.0.tgz#6a31e3bf5951615593ad984e96b9e5e2d9380966" - integrity sha512-sXOGON+WNTh3MLE9rve97ftaZukN3oNf2KjDy7YTx6hcTO2uuLHuCGynMDhFwGw/jYf4OJ2Qk0i4i79qMNNkyw== - dependencies: - "@babel/runtime" "^7.12.5" - "@testing-library/dom" "^8.5.0" - "@types/react-dom" "^18.0.0" - -"@testing-library/user-event@^13.5.0": - version "13.5.0" - resolved "http://localhost:4873/@testing-library%2fuser-event/-/user-event-13.5.0.tgz#69d77007f1e124d55314a2b73fd204b333b13295" - integrity sha512-5Kwtbo3Y/NowpkbRuSepbyMFkZmHgD+vPzYB/RJ4oxt5Gj/avFFBYjhw27cqSVPVw/3a67NK1PbiIr9k4Gwmdg== - dependencies: - "@babel/runtime" "^7.12.5" - -"@tootallnate/once@1": - version "1.1.2" - resolved "http://localhost:4873/@tootallnate%2fonce/-/once-1.1.2.tgz#ccb91445360179a04e7fe6aff78c00ffc1eeaf82" - integrity sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw== - -"@trysound/sax@0.2.0": - version "0.2.0" - resolved "http://localhost:4873/@trysound%2fsax/-/sax-0.2.0.tgz#cccaab758af56761eb7bf37af6f03f326dd798ad" - integrity sha512-L7z9BgrNEcYyUYtF+HaEfiS5ebkh9jXqbszz7pC0hRBPaatV0XjSD3+eHrpqFemQfgwiFF0QPIarnIihIDn7OA== - -"@types/aria-query@^4.2.0": - version "4.2.2" - resolved "http://localhost:4873/@types%2faria-query/-/aria-query-4.2.2.tgz#ed4e0ad92306a704f9fb132a0cfcf77486dbe2bc" - integrity sha512-HnYpAE1Y6kRyKM/XkEuiRQhTHvkzMBurTHnpFLYLBGPIylZNPs9jJcuOOYWxPLJCSEtmZT0Y8rHDokKN7rRTig== - -"@types/babel__core@^7.0.0", "@types/babel__core@^7.1.14": - version "7.1.19" - resolved "http://localhost:4873/@types%2fbabel__core/-/babel__core-7.1.19.tgz#7b497495b7d1b4812bdb9d02804d0576f43ee460" - integrity sha512-WEOTgRsbYkvA/KCsDwVEGkd7WAr1e3g31VHQ8zy5gul/V1qKullU/BU5I68X5v7V3GnB9eotmom4v5a5gjxorw== - dependencies: - "@babel/parser" "^7.1.0" - "@babel/types" "^7.0.0" - "@types/babel__generator" "*" - "@types/babel__template" "*" - "@types/babel__traverse" "*" - -"@types/babel__generator@*": - version "7.6.4" - resolved "http://localhost:4873/@types%2fbabel__generator/-/babel__generator-7.6.4.tgz#1f20ce4c5b1990b37900b63f050182d28c2439b7" - integrity sha512-tFkciB9j2K755yrTALxD44McOrk+gfpIpvC3sxHjRawj6PfnQxrse4Clq5y/Rq+G3mrBurMax/lG8Qn2t9mSsg== - dependencies: - "@babel/types" "^7.0.0" - -"@types/babel__template@*": - version "7.4.1" - resolved "http://localhost:4873/@types%2fbabel__template/-/babel__template-7.4.1.tgz#3d1a48fd9d6c0edfd56f2ff578daed48f36c8969" - integrity sha512-azBFKemX6kMg5Io+/rdGT0dkGreboUVR0Cdm3fz9QJWpaQGJRQXl7C+6hOTCZcMll7KFyEQpgbYI2lHdsS4U7g== - dependencies: - "@babel/parser" "^7.1.0" - "@babel/types" "^7.0.0" - -"@types/babel__traverse@*", "@types/babel__traverse@^7.0.4", "@types/babel__traverse@^7.0.6": - version "7.18.2" - resolved "http://localhost:4873/@types%2fbabel__traverse/-/babel__traverse-7.18.2.tgz#235bf339d17185bdec25e024ca19cce257cc7309" - integrity sha512-FcFaxOr2V5KZCviw1TnutEMVUVsGt4D2hP1TAfXZAMKuHYW3xQhe3jTxNPWutgCJ3/X1c5yX8ZoGVEItxKbwBg== - dependencies: - "@babel/types" "^7.3.0" - -"@types/body-parser@*": - version "1.19.2" - resolved "http://localhost:4873/@types%2fbody-parser/-/body-parser-1.19.2.tgz#aea2059e28b7658639081347ac4fab3de166e6f0" - integrity sha512-ALYone6pm6QmwZoAgeyNksccT9Q4AWZQ6PvfwR37GT6r6FWUPguq6sUmNGSMV2Wr761oQoBxwGGa6DR5o1DC9g== - dependencies: - "@types/connect" "*" - "@types/node" "*" - -"@types/bonjour@^3.5.9": - version "3.5.10" - resolved "http://localhost:4873/@types%2fbonjour/-/bonjour-3.5.10.tgz#0f6aadfe00ea414edc86f5d106357cda9701e275" - integrity sha512-p7ienRMiS41Nu2/igbJxxLDWrSZ0WxM8UQgCeO9KhoVF7cOVFkrKsiDr1EsJIla8vV3oEEjGcz11jc5yimhzZw== - dependencies: - "@types/node" "*" - -"@types/connect-history-api-fallback@^1.3.5": - version "1.3.5" - resolved "http://localhost:4873/@types%2fconnect-history-api-fallback/-/connect-history-api-fallback-1.3.5.tgz#d1f7a8a09d0ed5a57aee5ae9c18ab9b803205dae" - integrity sha512-h8QJa8xSb1WD4fpKBDcATDNGXghFj6/3GRWG6dhmRcu0RX1Ubasur2Uvx5aeEwlf0MwblEC2bMzzMQntxnw/Cw== - dependencies: - "@types/express-serve-static-core" "*" - "@types/node" "*" - -"@types/connect@*": - version "3.4.35" - resolved "http://localhost:4873/@types%2fconnect/-/connect-3.4.35.tgz#5fcf6ae445e4021d1fc2219a4873cc73a3bb2ad1" - integrity sha512-cdeYyv4KWoEgpBISTxWvqYsVy444DOqehiF3fM3ne10AmJ62RSyNkUnxMJXHQWRQQX2eR94m5y1IZyDwBjV9FQ== - dependencies: - "@types/node" "*" - -"@types/eslint-scope@^3.7.3": - version "3.7.4" - resolved "http://localhost:4873/@types%2feslint-scope/-/eslint-scope-3.7.4.tgz#37fc1223f0786c39627068a12e94d6e6fc61de16" - integrity sha512-9K4zoImiZc3HlIp6AVUDE4CWYx22a+lhSZMYNpbjW04+YF0KWj4pJXnEMjdnFTiQibFFmElcsasJXDbdI/EPhA== - dependencies: - "@types/eslint" "*" - "@types/estree" "*" - -"@types/eslint@*", "@types/eslint@^7.29.0 || ^8.4.1": - version "8.4.6" - resolved "http://localhost:4873/@types%2feslint/-/eslint-8.4.6.tgz#7976f054c1bccfcf514bff0564c0c41df5c08207" - integrity sha512-/fqTbjxyFUaYNO7VcW5g+4npmqVACz1bB7RTHYuLj+PRjw9hrCwrUXVQFpChUS0JsyEFvMZ7U/PfmvWgxJhI9g== - dependencies: - "@types/estree" "*" - "@types/json-schema" "*" - -"@types/estree@*": - version "1.0.0" - resolved "http://localhost:4873/@types%2festree/-/estree-1.0.0.tgz#5fb2e536c1ae9bf35366eed879e827fa59ca41c2" - integrity sha512-WulqXMDUTYAXCjZnk6JtIHPigp55cVtDgDrO2gHRwhyJto21+1zbVCtOYB2L1F9w4qCQ0rOGWBnBe0FNTiEJIQ== - -"@types/estree@0.0.39": - version "0.0.39" - resolved "http://localhost:4873/@types%2festree/-/estree-0.0.39.tgz#e177e699ee1b8c22d23174caaa7422644389509f" - integrity sha512-EYNwp3bU+98cpU4lAWYYL7Zz+2gryWH1qbdDTidVd6hkiR6weksdbMadyXKXNPEkQFhXM+hVO9ZygomHXp+AIw== - -"@types/estree@^0.0.51": - version "0.0.51" - resolved "http://localhost:4873/@types%2festree/-/estree-0.0.51.tgz#cfd70924a25a3fd32b218e5e420e6897e1ac4f40" - integrity sha512-CuPgU6f3eT/XgKKPqKd/gLZV1Xmvf1a2R5POBOGQa6uv82xpls89HU5zKeVoyR8XzHd1RGNOlQlvUe3CFkjWNQ== - -"@types/express-serve-static-core@*", "@types/express-serve-static-core@^4.17.18": - version "4.17.31" - resolved "http://localhost:4873/@types%2fexpress-serve-static-core/-/express-serve-static-core-4.17.31.tgz#a1139efeab4e7323834bb0226e62ac019f474b2f" - integrity sha512-DxMhY+NAsTwMMFHBTtJFNp5qiHKJ7TeqOo23zVEM9alT1Ml27Q3xcTH0xwxn7Q0BbMcVEJOs/7aQtUWupUQN3Q== - dependencies: - "@types/node" "*" - "@types/qs" "*" - "@types/range-parser" "*" - -"@types/express@*", "@types/express@^4.17.13": - version "4.17.14" - resolved "http://localhost:4873/@types%2fexpress/-/express-4.17.14.tgz#143ea0557249bc1b3b54f15db4c81c3d4eb3569c" - integrity sha512-TEbt+vaPFQ+xpxFLFssxUDXj5cWCxZJjIcB7Yg0k0GMHGtgtQgpvx/MUQUeAkNbA9AAGrwkAsoeItdTgS7FMyg== - dependencies: - "@types/body-parser" "*" - "@types/express-serve-static-core" "^4.17.18" - "@types/qs" "*" - "@types/serve-static" "*" - -"@types/graceful-fs@^4.1.2": - version "4.1.5" - resolved "http://localhost:4873/@types%2fgraceful-fs/-/graceful-fs-4.1.5.tgz#21ffba0d98da4350db64891f92a9e5db3cdb4e15" - integrity sha512-anKkLmZZ+xm4p8JWBf4hElkM4XR+EZeA2M9BAkkTldmcyDY4mbdIJnRghDJH3Ov5ooY7/UAoENtmdMSkaAd7Cw== - dependencies: - "@types/node" "*" - -"@types/html-minifier-terser@^6.0.0": - version "6.1.0" - resolved "http://localhost:4873/@types%2fhtml-minifier-terser/-/html-minifier-terser-6.1.0.tgz#4fc33a00c1d0c16987b1a20cf92d20614c55ac35" - integrity sha512-oh/6byDPnL1zeNXFrDXFLyZjkr1MsBG667IM792caf1L2UPOOMf65NFzjUH/ltyfwjAGfs1rsX1eftK0jC/KIg== - -"@types/http-proxy@^1.17.8": - version "1.17.9" - resolved "http://localhost:4873/@types%2fhttp-proxy/-/http-proxy-1.17.9.tgz#7f0e7931343761efde1e2bf48c40f02f3f75705a" - integrity sha512-QsbSjA/fSk7xB+UXlCT3wHBy5ai9wOcNDWwZAtud+jXhwOM3l+EYZh8Lng4+/6n8uar0J7xILzqftJdJ/Wdfkw== - dependencies: - "@types/node" "*" - -"@types/istanbul-lib-coverage@*", "@types/istanbul-lib-coverage@^2.0.0", "@types/istanbul-lib-coverage@^2.0.1": - version "2.0.4" - resolved "http://localhost:4873/@types%2fistanbul-lib-coverage/-/istanbul-lib-coverage-2.0.4.tgz#8467d4b3c087805d63580480890791277ce35c44" - integrity sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g== - -"@types/istanbul-lib-report@*": - version "3.0.0" - resolved "http://localhost:4873/@types%2fistanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#c14c24f18ea8190c118ee7562b7ff99a36552686" - integrity sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg== - dependencies: - "@types/istanbul-lib-coverage" "*" - -"@types/istanbul-reports@^3.0.0": - version "3.0.1" - resolved "http://localhost:4873/@types%2fistanbul-reports/-/istanbul-reports-3.0.1.tgz#9153fe98bba2bd565a63add9436d6f0d7f8468ff" - integrity sha512-c3mAZEuK0lvBp8tmuL74XRKn1+y2dcwOUpH7x4WrF6gk1GIgiluDRgMYQtw2OFcBvAJWlt6ASU3tSqxp0Uu0Aw== - dependencies: - "@types/istanbul-lib-report" "*" - -"@types/jest@*": - version "29.1.2" - resolved "http://localhost:4873/@types%2fjest/-/jest-29.1.2.tgz#7ad8077043ab5f6c108c8111bcc1d224e5600a87" - integrity sha512-y+nlX0h87U0R+wsGn6EBuoRWYyv3KFtwRNP3QWp9+k2tJ2/bqcGS3UxD7jgT+tiwJWWq3UsyV4Y+T6rsMT4XMg== - dependencies: - expect "^29.0.0" - pretty-format "^29.0.0" - -"@types/json-schema@*", "@types/json-schema@^7.0.4", "@types/json-schema@^7.0.5", "@types/json-schema@^7.0.8", "@types/json-schema@^7.0.9": - version "7.0.11" - resolved "http://localhost:4873/@types%2fjson-schema/-/json-schema-7.0.11.tgz#d421b6c527a3037f7c84433fd2c4229e016863d3" - integrity sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ== - -"@types/json5@^0.0.29": - version "0.0.29" - resolved "http://localhost:4873/@types%2fjson5/-/json5-0.0.29.tgz#ee28707ae94e11d2b827bcbe5270bcea7f3e71ee" - integrity sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ== - -"@types/mime@*": - version "3.0.1" - resolved "http://localhost:4873/@types%2fmime/-/mime-3.0.1.tgz#5f8f2bca0a5863cb69bc0b0acd88c96cb1d4ae10" - integrity sha512-Y4XFY5VJAuw0FgAqPNd6NNoV44jbq9Bz2L7Rh/J6jLTiHBSBJa9fxqQIvkIld4GsoDOcCbvzOUAbLPsSKKg+uA== - -"@types/node@*": - version "18.8.3" - resolved "http://localhost:4873/@types%2fnode/-/node-18.8.3.tgz#ce750ab4017effa51aed6a7230651778d54e327c" - integrity sha512-0os9vz6BpGwxGe9LOhgP/ncvYN5Tx1fNcd2TM3rD/aCGBkysb+ZWpXEocG24h6ZzOi13+VB8HndAQFezsSOw1w== - -"@types/parse-json@^4.0.0": - version "4.0.0" - resolved "http://localhost:4873/@types%2fparse-json/-/parse-json-4.0.0.tgz#2f8bb441434d163b35fb8ffdccd7138927ffb8c0" - integrity sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA== - -"@types/prettier@^2.1.5": - version "2.7.1" - resolved "http://localhost:4873/@types%2fprettier/-/prettier-2.7.1.tgz#dfd20e2dc35f027cdd6c1908e80a5ddc7499670e" - integrity sha512-ri0UmynRRvZiiUJdiz38MmIblKK+oH30MztdBVR95dv/Ubw6neWSb8u1XpRb72L4qsZOhz+L+z9JD40SJmfWow== - -"@types/prop-types@*": - version "15.7.5" - resolved "http://localhost:4873/@types%2fprop-types/-/prop-types-15.7.5.tgz#5f19d2b85a98e9558036f6a3cacc8819420f05cf" - integrity sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w== - -"@types/q@^1.5.1": - version "1.5.5" - resolved "http://localhost:4873/@types%2fq/-/q-1.5.5.tgz#75a2a8e7d8ab4b230414505d92335d1dcb53a6df" - integrity sha512-L28j2FcJfSZOnL1WBjDYp2vUHCeIFlyYI/53EwD/rKUBQ7MtUUfbQWiyKJGpcnv4/WgrhWsFKrcPstcAt/J0tQ== - -"@types/qs@*": - version "6.9.7" - resolved "http://localhost:4873/@types%2fqs/-/qs-6.9.7.tgz#63bb7d067db107cc1e457c303bc25d511febf6cb" - integrity sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw== - -"@types/range-parser@*": - version "1.2.4" - resolved "http://localhost:4873/@types%2frange-parser/-/range-parser-1.2.4.tgz#cd667bcfdd025213aafb7ca5915a932590acdcdc" - integrity sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw== - -"@types/react-dom@^18.0.0": - version "18.0.6" - resolved "http://localhost:4873/@types%2freact-dom/-/react-dom-18.0.6.tgz#36652900024842b74607a17786b6662dd1e103a1" - integrity sha512-/5OFZgfIPSwy+YuIBP/FgJnQnsxhZhjjrnxudMddeblOouIodEQ75X14Rr4wGSG/bknL+Omy9iWlLo1u/9GzAA== - dependencies: - "@types/react" "*" - -"@types/react@*": - version "18.0.21" - resolved "http://localhost:4873/@types%2freact/-/react-18.0.21.tgz#b8209e9626bb00a34c76f55482697edd2b43cc67" - integrity sha512-7QUCOxvFgnD5Jk8ZKlUAhVcRj7GuJRjnjjiY/IUBWKgOlnvDvTMLD4RTF7NPyVmbRhNrbomZiOepg7M/2Kj1mA== - dependencies: - "@types/prop-types" "*" - "@types/scheduler" "*" - csstype "^3.0.2" - -"@types/resolve@1.17.1": - version "1.17.1" - resolved "http://localhost:4873/@types%2fresolve/-/resolve-1.17.1.tgz#3afd6ad8967c77e4376c598a82ddd58f46ec45d6" - integrity sha512-yy7HuzQhj0dhGpD8RLXSZWEkLsV9ibvxvi6EiJ3bkqLAO1RGo0WbkWQiwpRlSFymTJRz0d3k5LM3kkx8ArDbLw== - dependencies: - "@types/node" "*" - -"@types/retry@0.12.0": - version "0.12.0" - resolved "http://localhost:4873/@types%2fretry/-/retry-0.12.0.tgz#2b35eccfcee7d38cd72ad99232fbd58bffb3c84d" - integrity sha512-wWKOClTTiizcZhXnPY4wikVAwmdYHp8q6DmC+EJUzAMsycb7HB32Kh9RN4+0gExjmPmZSAQjgURXIGATPegAvA== - -"@types/scheduler@*": - version "0.16.2" - resolved "http://localhost:4873/@types%2fscheduler/-/scheduler-0.16.2.tgz#1a62f89525723dde24ba1b01b092bf5df8ad4d39" - integrity sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew== - -"@types/serve-index@^1.9.1": - version "1.9.1" - resolved "http://localhost:4873/@types%2fserve-index/-/serve-index-1.9.1.tgz#1b5e85370a192c01ec6cec4735cf2917337a6278" - integrity sha512-d/Hs3nWDxNL2xAczmOVZNj92YZCS6RGxfBPjKzuu/XirCgXdpKEb88dYNbrYGint6IVWLNP+yonwVAuRC0T2Dg== - dependencies: - "@types/express" "*" - -"@types/serve-static@*", "@types/serve-static@^1.13.10": - version "1.15.0" - resolved "http://localhost:4873/@types%2fserve-static/-/serve-static-1.15.0.tgz#c7930ff61afb334e121a9da780aac0d9b8f34155" - integrity sha512-z5xyF6uh8CbjAu9760KDKsH2FcDxZ2tFCsA4HIMWE6IkiYMXfVoa+4f9KX+FN0ZLsaMw1WNG2ETLA6N+/YA+cg== - dependencies: - "@types/mime" "*" - "@types/node" "*" - -"@types/sockjs@^0.3.33": - version "0.3.33" - resolved "http://localhost:4873/@types%2fsockjs/-/sockjs-0.3.33.tgz#570d3a0b99ac995360e3136fd6045113b1bd236f" - integrity sha512-f0KEEe05NvUnat+boPTZ0dgaLZ4SfSouXUgv5noUiefG2ajgKjmETo9ZJyuqsl7dfl2aHlLJUiki6B4ZYldiiw== - dependencies: - "@types/node" "*" - -"@types/stack-utils@^2.0.0": - version "2.0.1" - resolved "http://localhost:4873/@types%2fstack-utils/-/stack-utils-2.0.1.tgz#20f18294f797f2209b5f65c8e3b5c8e8261d127c" - integrity sha512-Hl219/BT5fLAaz6NDkSuhzasy49dwQS/DSdu4MdggFB8zcXv7vflBI3xp7FEmkmdDkBUI2bPUNeMttp2knYdxw== - -"@types/testing-library__jest-dom@^5.9.1": - version "5.14.5" - resolved "http://localhost:4873/@types%2ftesting-library__jest-dom/-/testing-library__jest-dom-5.14.5.tgz#d113709c90b3c75fdb127ec338dad7d5f86c974f" - integrity sha512-SBwbxYoyPIvxHbeHxTZX2Pe/74F/tX2/D3mMvzabdeJ25bBojfW0TyB8BHrbq/9zaaKICJZjLP+8r6AeZMFCuQ== - dependencies: - "@types/jest" "*" - -"@types/trusted-types@^2.0.2": - version "2.0.2" - resolved "http://localhost:4873/@types%2ftrusted-types/-/trusted-types-2.0.2.tgz#fc25ad9943bcac11cceb8168db4f275e0e72e756" - integrity sha512-F5DIZ36YVLE+PN+Zwws4kJogq47hNgX3Nx6WyDJ3kcplxyke3XIzB8uK5n/Lpm1HBsbGzd6nmGehL8cPekP+Tg== - -"@types/ws@^8.5.1": - version "8.5.3" - resolved "http://localhost:4873/@types%2fws/-/ws-8.5.3.tgz#7d25a1ffbecd3c4f2d35068d0b283c037003274d" - integrity sha512-6YOoWjruKj1uLf3INHH7D3qTXwFfEsg1kf3c0uDdSBJwfa/llkwIjrAGV7j7mVgGNbzTQ3HiHKKDXl6bJPD97w== - dependencies: - "@types/node" "*" - -"@types/yargs-parser@*": - version "21.0.0" - resolved "http://localhost:4873/@types%2fyargs-parser/-/yargs-parser-21.0.0.tgz#0c60e537fa790f5f9472ed2776c2b71ec117351b" - integrity sha512-iO9ZQHkZxHn4mSakYV0vFHAVDyEOIJQrV2uZ06HxEPcx+mt8swXoZHIbaaJ2crJYFfErySgktuTZ3BeLz+XmFA== - -"@types/yargs@^16.0.0": - version "16.0.4" - resolved "http://localhost:4873/@types%2fyargs/-/yargs-16.0.4.tgz#26aad98dd2c2a38e421086ea9ad42b9e51642977" - integrity sha512-T8Yc9wt/5LbJyCaLiHPReJa0kApcIgJ7Bn735GjItUfh08Z1pJvu8QZqb9s+mMvKV6WUQRV7K2R46YbjMXTTJw== - dependencies: - "@types/yargs-parser" "*" - -"@types/yargs@^17.0.8": - version "17.0.13" - resolved "http://localhost:4873/@types%2fyargs/-/yargs-17.0.13.tgz#34cced675ca1b1d51fcf4d34c3c6f0fa142a5c76" - integrity sha512-9sWaruZk2JGxIQU+IhI1fhPYRcQ0UuTNuKuCW9bR5fp7qi2Llf7WDzNa17Cy7TKnh3cdxDOiyTu6gaLS0eDatg== - dependencies: - "@types/yargs-parser" "*" - -"@typescript-eslint/eslint-plugin@^5.5.0": - version "5.39.0" - resolved "http://localhost:4873/@typescript-eslint%2feslint-plugin/-/eslint-plugin-5.39.0.tgz#778b2d9e7f293502c7feeea6c74dca8eb3e67511" - integrity sha512-xVfKOkBm5iWMNGKQ2fwX5GVgBuHmZBO1tCRwXmY5oAIsPscfwm2UADDuNB8ZVYCtpQvJK4xpjrK7jEhcJ0zY9A== - dependencies: - "@typescript-eslint/scope-manager" "5.39.0" - "@typescript-eslint/type-utils" "5.39.0" - "@typescript-eslint/utils" "5.39.0" - debug "^4.3.4" - ignore "^5.2.0" - regexpp "^3.2.0" - semver "^7.3.7" - tsutils "^3.21.0" - -"@typescript-eslint/experimental-utils@^5.0.0": - version "5.39.0" - resolved "http://localhost:4873/@typescript-eslint%2fexperimental-utils/-/experimental-utils-5.39.0.tgz#9263bb72b57449cc2f07ffb7fd4e12d0160b7f5e" - integrity sha512-n5N9kG/oGu2xXhHzsWzn94s6CWoiUj59FPU2dF2IQZxPftw+q6Jm5sV2vj5qTgAElRooHhrgtl2gxBQDCPt6WA== - dependencies: - "@typescript-eslint/utils" "5.39.0" - -"@typescript-eslint/parser@^5.5.0": - version "5.39.0" - resolved "http://localhost:4873/@typescript-eslint%2fparser/-/parser-5.39.0.tgz#93fa0bc980a3a501e081824f6097f7ca30aaa22b" - integrity sha512-PhxLjrZnHShe431sBAGHaNe6BDdxAASDySgsBCGxcBecVCi8NQWxQZMcizNA4g0pN51bBAn/FUfkWG3SDVcGlA== - dependencies: - "@typescript-eslint/scope-manager" "5.39.0" - "@typescript-eslint/types" "5.39.0" - "@typescript-eslint/typescript-estree" "5.39.0" - debug "^4.3.4" - -"@typescript-eslint/scope-manager@5.39.0": - version "5.39.0" - resolved "http://localhost:4873/@typescript-eslint%2fscope-manager/-/scope-manager-5.39.0.tgz#873e1465afa3d6c78d8ed2da68aed266a08008d0" - integrity sha512-/I13vAqmG3dyqMVSZPjsbuNQlYS082Y7OMkwhCfLXYsmlI0ca4nkL7wJ/4gjX70LD4P8Hnw1JywUVVAwepURBw== - dependencies: - "@typescript-eslint/types" "5.39.0" - "@typescript-eslint/visitor-keys" "5.39.0" - -"@typescript-eslint/type-utils@5.39.0": - version "5.39.0" - resolved "http://localhost:4873/@typescript-eslint%2ftype-utils/-/type-utils-5.39.0.tgz#0a8c00f95dce4335832ad2dc6bc431c14e32a0a6" - integrity sha512-KJHJkOothljQWzR3t/GunL0TPKY+fGJtnpl+pX+sJ0YiKTz3q2Zr87SGTmFqsCMFrLt5E0+o+S6eQY0FAXj9uA== - dependencies: - "@typescript-eslint/typescript-estree" "5.39.0" - "@typescript-eslint/utils" "5.39.0" - debug "^4.3.4" - tsutils "^3.21.0" - -"@typescript-eslint/types@5.39.0": - version "5.39.0" - resolved "http://localhost:4873/@typescript-eslint%2ftypes/-/types-5.39.0.tgz#f4e9f207ebb4579fd854b25c0bf64433bb5ed78d" - integrity sha512-gQMZrnfEBFXK38hYqt8Lkwt8f4U6yq+2H5VDSgP/qiTzC8Nw8JO3OuSUOQ2qW37S/dlwdkHDntkZM6SQhKyPhw== - -"@typescript-eslint/typescript-estree@5.39.0": - version "5.39.0" - resolved "http://localhost:4873/@typescript-eslint%2ftypescript-estree/-/typescript-estree-5.39.0.tgz#c0316aa04a1a1f4f7f9498e3c13ef1d3dc4cf88b" - integrity sha512-qLFQP0f398sdnogJoLtd43pUgB18Q50QSA+BTE5h3sUxySzbWDpTSdgt4UyxNSozY/oDK2ta6HVAzvGgq8JYnA== - dependencies: - "@typescript-eslint/types" "5.39.0" - "@typescript-eslint/visitor-keys" "5.39.0" - debug "^4.3.4" - globby "^11.1.0" - is-glob "^4.0.3" - semver "^7.3.7" - tsutils "^3.21.0" - -"@typescript-eslint/utils@5.39.0", "@typescript-eslint/utils@^5.13.0": - version "5.39.0" - resolved "http://localhost:4873/@typescript-eslint%2futils/-/utils-5.39.0.tgz#b7063cca1dcf08d1d21b0d91db491161ad0be110" - integrity sha512-+DnY5jkpOpgj+EBtYPyHRjXampJfC0yUZZzfzLuUWVZvCuKqSdJVC8UhdWipIw7VKNTfwfAPiOWzYkAwuIhiAg== - dependencies: - "@types/json-schema" "^7.0.9" - "@typescript-eslint/scope-manager" "5.39.0" - "@typescript-eslint/types" "5.39.0" - "@typescript-eslint/typescript-estree" "5.39.0" - eslint-scope "^5.1.1" - eslint-utils "^3.0.0" - -"@typescript-eslint/visitor-keys@5.39.0": - version "5.39.0" - resolved "http://localhost:4873/@typescript-eslint%2fvisitor-keys/-/visitor-keys-5.39.0.tgz#8f41f7d241b47257b081ddba5d3ce80deaae61e2" - integrity sha512-yyE3RPwOG+XJBLrhvsxAidUgybJVQ/hG8BhiJo0k8JSAYfk/CshVcxf0HwP4Jt7WZZ6vLmxdo1p6EyN3tzFTkg== - dependencies: - "@typescript-eslint/types" "5.39.0" - eslint-visitor-keys "^3.3.0" - -"@webassemblyjs/ast@1.11.1": - version "1.11.1" - resolved "http://localhost:4873/@webassemblyjs%2fast/-/ast-1.11.1.tgz#2bfd767eae1a6996f432ff7e8d7fc75679c0b6a7" - integrity sha512-ukBh14qFLjxTQNTXocdyksN5QdM28S1CxHt2rdskFyL+xFV7VremuBLVbmCePj+URalXBENx/9Lm7lnhihtCSw== - dependencies: - "@webassemblyjs/helper-numbers" "1.11.1" - "@webassemblyjs/helper-wasm-bytecode" "1.11.1" - -"@webassemblyjs/floating-point-hex-parser@1.11.1": - version "1.11.1" - resolved "http://localhost:4873/@webassemblyjs%2ffloating-point-hex-parser/-/floating-point-hex-parser-1.11.1.tgz#f6c61a705f0fd7a6aecaa4e8198f23d9dc179e4f" - integrity sha512-iGRfyc5Bq+NnNuX8b5hwBrRjzf0ocrJPI6GWFodBFzmFnyvrQ83SHKhmilCU/8Jv67i4GJZBMhEzltxzcNagtQ== - -"@webassemblyjs/helper-api-error@1.11.1": - version "1.11.1" - resolved "http://localhost:4873/@webassemblyjs%2fhelper-api-error/-/helper-api-error-1.11.1.tgz#1a63192d8788e5c012800ba6a7a46c705288fd16" - integrity sha512-RlhS8CBCXfRUR/cwo2ho9bkheSXG0+NwooXcc3PAILALf2QLdFyj7KGsKRbVc95hZnhnERon4kW/D3SZpp6Tcg== - -"@webassemblyjs/helper-buffer@1.11.1": - version "1.11.1" - resolved "http://localhost:4873/@webassemblyjs%2fhelper-buffer/-/helper-buffer-1.11.1.tgz#832a900eb444884cde9a7cad467f81500f5e5ab5" - integrity sha512-gwikF65aDNeeXa8JxXa2BAk+REjSyhrNC9ZwdT0f8jc4dQQeDQ7G4m0f2QCLPJiMTTO6wfDmRmj/pW0PsUvIcA== - -"@webassemblyjs/helper-numbers@1.11.1": - version "1.11.1" - resolved "http://localhost:4873/@webassemblyjs%2fhelper-numbers/-/helper-numbers-1.11.1.tgz#64d81da219fbbba1e3bd1bfc74f6e8c4e10a62ae" - integrity sha512-vDkbxiB8zfnPdNK9Rajcey5C0w+QJugEglN0of+kmO8l7lDb77AnlKYQF7aarZuCrv+l0UvqL+68gSDr3k9LPQ== - dependencies: - "@webassemblyjs/floating-point-hex-parser" "1.11.1" - "@webassemblyjs/helper-api-error" "1.11.1" - "@xtuc/long" "4.2.2" - -"@webassemblyjs/helper-wasm-bytecode@1.11.1": - version "1.11.1" - resolved "http://localhost:4873/@webassemblyjs%2fhelper-wasm-bytecode/-/helper-wasm-bytecode-1.11.1.tgz#f328241e41e7b199d0b20c18e88429c4433295e1" - integrity sha512-PvpoOGiJwXeTrSf/qfudJhwlvDQxFgelbMqtq52WWiXC6Xgg1IREdngmPN3bs4RoO83PnL/nFrxucXj1+BX62Q== - -"@webassemblyjs/helper-wasm-section@1.11.1": - version "1.11.1" - resolved "http://localhost:4873/@webassemblyjs%2fhelper-wasm-section/-/helper-wasm-section-1.11.1.tgz#21ee065a7b635f319e738f0dd73bfbda281c097a" - integrity sha512-10P9No29rYX1j7F3EVPX3JvGPQPae+AomuSTPiF9eBQeChHI6iqjMIwR9JmOJXwpnn/oVGDk7I5IlskuMwU/pg== - dependencies: - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/helper-buffer" "1.11.1" - "@webassemblyjs/helper-wasm-bytecode" "1.11.1" - "@webassemblyjs/wasm-gen" "1.11.1" - -"@webassemblyjs/ieee754@1.11.1": - version "1.11.1" - resolved "http://localhost:4873/@webassemblyjs%2fieee754/-/ieee754-1.11.1.tgz#963929e9bbd05709e7e12243a099180812992614" - integrity sha512-hJ87QIPtAMKbFq6CGTkZYJivEwZDbQUgYd3qKSadTNOhVY7p+gfP6Sr0lLRVTaG1JjFj+r3YchoqRYxNH3M0GQ== - dependencies: - "@xtuc/ieee754" "^1.2.0" - -"@webassemblyjs/leb128@1.11.1": - version "1.11.1" - resolved "http://localhost:4873/@webassemblyjs%2fleb128/-/leb128-1.11.1.tgz#ce814b45574e93d76bae1fb2644ab9cdd9527aa5" - integrity sha512-BJ2P0hNZ0u+Th1YZXJpzW6miwqQUGcIHT1G/sf72gLVD9DZ5AdYTqPNbHZh6K1M5VmKvFXwGSWZADz+qBWxeRw== - dependencies: - "@xtuc/long" "4.2.2" - -"@webassemblyjs/utf8@1.11.1": - version "1.11.1" - resolved "http://localhost:4873/@webassemblyjs%2futf8/-/utf8-1.11.1.tgz#d1f8b764369e7c6e6bae350e854dec9a59f0a3ff" - integrity sha512-9kqcxAEdMhiwQkHpkNiorZzqpGrodQQ2IGrHHxCy+Ozng0ofyMA0lTqiLkVs1uzTRejX+/O0EOT7KxqVPuXosQ== - -"@webassemblyjs/wasm-edit@1.11.1": - version "1.11.1" - resolved "http://localhost:4873/@webassemblyjs%2fwasm-edit/-/wasm-edit-1.11.1.tgz#ad206ebf4bf95a058ce9880a8c092c5dec8193d6" - integrity sha512-g+RsupUC1aTHfR8CDgnsVRVZFJqdkFHpsHMfJuWQzWU3tvnLC07UqHICfP+4XyL2tnr1amvl1Sdp06TnYCmVkA== - dependencies: - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/helper-buffer" "1.11.1" - "@webassemblyjs/helper-wasm-bytecode" "1.11.1" - "@webassemblyjs/helper-wasm-section" "1.11.1" - "@webassemblyjs/wasm-gen" "1.11.1" - "@webassemblyjs/wasm-opt" "1.11.1" - "@webassemblyjs/wasm-parser" "1.11.1" - "@webassemblyjs/wast-printer" "1.11.1" - -"@webassemblyjs/wasm-gen@1.11.1": - version "1.11.1" - resolved "http://localhost:4873/@webassemblyjs%2fwasm-gen/-/wasm-gen-1.11.1.tgz#86c5ea304849759b7d88c47a32f4f039ae3c8f76" - integrity sha512-F7QqKXwwNlMmsulj6+O7r4mmtAlCWfO/0HdgOxSklZfQcDu0TpLiD1mRt/zF25Bk59FIjEuGAIyn5ei4yMfLhA== - dependencies: - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/helper-wasm-bytecode" "1.11.1" - "@webassemblyjs/ieee754" "1.11.1" - "@webassemblyjs/leb128" "1.11.1" - "@webassemblyjs/utf8" "1.11.1" - -"@webassemblyjs/wasm-opt@1.11.1": - version "1.11.1" - resolved "http://localhost:4873/@webassemblyjs%2fwasm-opt/-/wasm-opt-1.11.1.tgz#657b4c2202f4cf3b345f8a4c6461c8c2418985f2" - integrity sha512-VqnkNqnZlU5EB64pp1l7hdm3hmQw7Vgqa0KF/KCNO9sIpI6Fk6brDEiX+iCOYrvMuBWDws0NkTOxYEb85XQHHw== - dependencies: - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/helper-buffer" "1.11.1" - "@webassemblyjs/wasm-gen" "1.11.1" - "@webassemblyjs/wasm-parser" "1.11.1" - -"@webassemblyjs/wasm-parser@1.11.1": - version "1.11.1" - resolved "http://localhost:4873/@webassemblyjs%2fwasm-parser/-/wasm-parser-1.11.1.tgz#86ca734534f417e9bd3c67c7a1c75d8be41fb199" - integrity sha512-rrBujw+dJu32gYB7/Lup6UhdkPx9S9SnobZzRVL7VcBH9Bt9bCBLEuX/YXOOtBsOZ4NQrRykKhffRWHvigQvOA== - dependencies: - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/helper-api-error" "1.11.1" - "@webassemblyjs/helper-wasm-bytecode" "1.11.1" - "@webassemblyjs/ieee754" "1.11.1" - "@webassemblyjs/leb128" "1.11.1" - "@webassemblyjs/utf8" "1.11.1" - -"@webassemblyjs/wast-printer@1.11.1": - version "1.11.1" - resolved "http://localhost:4873/@webassemblyjs%2fwast-printer/-/wast-printer-1.11.1.tgz#d0c73beda8eec5426f10ae8ef55cee5e7084c2f0" - integrity sha512-IQboUWM4eKzWW+N/jij2sRatKMh99QEelo3Eb2q0qXkvPRISAj8Qxtmw5itwqK+TTkBuUIE45AxYPToqPtL5gg== - dependencies: - "@webassemblyjs/ast" "1.11.1" - "@xtuc/long" "4.2.2" - -"@xtuc/ieee754@^1.2.0": - version "1.2.0" - resolved "http://localhost:4873/@xtuc%2fieee754/-/ieee754-1.2.0.tgz#eef014a3145ae477a1cbc00cd1e552336dceb790" - integrity sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA== - -"@xtuc/long@4.2.2": - version "4.2.2" - resolved "http://localhost:4873/@xtuc%2flong/-/long-4.2.2.tgz#d291c6a4e97989b5c61d9acf396ae4fe133a718d" - integrity sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ== - -abab@^2.0.3, abab@^2.0.5: - version "2.0.6" - resolved "http://localhost:4873/abab/-/abab-2.0.6.tgz#41b80f2c871d19686216b82309231cfd3cb3d291" - integrity sha512-j2afSsaIENvHZN2B8GOpF566vZ5WVk5opAiMTvWgaQT8DkbOqsTfvNAvHoRGU2zzP8cPoqys+xHTRDWW8L+/BA== - -accepts@~1.3.4, accepts@~1.3.5, accepts@~1.3.8: - version "1.3.8" - resolved "http://localhost:4873/accepts/-/accepts-1.3.8.tgz#0bf0be125b67014adcb0b0921e62db7bffe16b2e" - integrity sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw== - dependencies: - mime-types "~2.1.34" - negotiator "0.6.3" - -acorn-globals@^6.0.0: - version "6.0.0" - resolved "http://localhost:4873/acorn-globals/-/acorn-globals-6.0.0.tgz#46cdd39f0f8ff08a876619b55f5ac8a6dc770b45" - integrity sha512-ZQl7LOWaF5ePqqcX4hLuv/bLXYQNfNWw2c0/yX/TsPRKamzHcTGQnlCjHT3TsmkOUVEPS3crCxiPfdzE/Trlhg== - dependencies: - acorn "^7.1.1" - acorn-walk "^7.1.1" - -acorn-import-assertions@^1.7.6: - version "1.8.0" - resolved "http://localhost:4873/acorn-import-assertions/-/acorn-import-assertions-1.8.0.tgz#ba2b5939ce62c238db6d93d81c9b111b29b855e9" - integrity sha512-m7VZ3jwz4eK6A4Vtt8Ew1/mNbP24u0FhdyfA7fSvnJR6LMdfOYnmuIrrJAgrYfYJ10F/otaHTtrtrtmHdMNzEw== - -acorn-jsx@^5.3.2: - version "5.3.2" - resolved "http://localhost:4873/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" - integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ== - -acorn-node@^1.8.2: - version "1.8.2" - resolved "http://localhost:4873/acorn-node/-/acorn-node-1.8.2.tgz#114c95d64539e53dede23de8b9d96df7c7ae2af8" - integrity sha512-8mt+fslDufLYntIoPAaIMUe/lrbrehIiwmR3t2k9LljIzoigEPF27eLk2hy8zSGzmR/ogr7zbRKINMo1u0yh5A== - dependencies: - acorn "^7.0.0" - acorn-walk "^7.0.0" - xtend "^4.0.2" - -acorn-walk@^7.0.0, acorn-walk@^7.1.1: - version "7.2.0" - resolved "http://localhost:4873/acorn-walk/-/acorn-walk-7.2.0.tgz#0de889a601203909b0fbe07b8938dc21d2e967bc" - integrity sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA== - -acorn@^7.0.0, acorn@^7.1.1: - version "7.4.1" - resolved "http://localhost:4873/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa" - integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A== - -acorn@^8.2.4, acorn@^8.5.0, acorn@^8.7.1, acorn@^8.8.0: - version "8.8.0" - resolved "http://localhost:4873/acorn/-/acorn-8.8.0.tgz#88c0187620435c7f6015803f5539dae05a9dbea8" - integrity sha512-QOxyigPVrpZ2GXT+PFyZTl6TtOFc5egxHIP9IlQ+RbupQuX4RkT/Bee4/kQuC02Xkzg84JcT7oLYtDIQxp+v7w== - -address@^1.0.1, address@^1.1.2: - version "1.2.1" - resolved "http://localhost:4873/address/-/address-1.2.1.tgz#25bb61095b7522d65b357baa11bc05492d4c8acd" - integrity sha512-B+6bi5D34+fDYENiH5qOlA0cV2rAGKuWZ9LeyUUehbXy8e0VS9e498yO0Jeeh+iM+6KbfudHTFjXw2MmJD4QRA== - -adjust-sourcemap-loader@^4.0.0: - version "4.0.0" - resolved "http://localhost:4873/adjust-sourcemap-loader/-/adjust-sourcemap-loader-4.0.0.tgz#fc4a0fd080f7d10471f30a7320f25560ade28c99" - integrity sha512-OXwN5b9pCUXNQHJpwwD2qP40byEmSgzj8B4ydSN0uMNYWiFmJ6x6KwUllMmfk8Rwu/HJDFR7U8ubsWBoN0Xp0A== - dependencies: - loader-utils "^2.0.0" - regex-parser "^2.2.11" - -agent-base@6: - version "6.0.2" - resolved "http://localhost:4873/agent-base/-/agent-base-6.0.2.tgz#49fff58577cfee3f37176feab4c22e00f86d7f77" - integrity sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ== - dependencies: - debug "4" - -ajv-formats@^2.1.1: - version "2.1.1" - resolved "http://localhost:4873/ajv-formats/-/ajv-formats-2.1.1.tgz#6e669400659eb74973bbf2e33327180a0996b520" - integrity sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA== - dependencies: - ajv "^8.0.0" - -ajv-keywords@^3.4.1, ajv-keywords@^3.5.2: - version "3.5.2" - resolved "http://localhost:4873/ajv-keywords/-/ajv-keywords-3.5.2.tgz#31f29da5ab6e00d1c2d329acf7b5929614d5014d" - integrity sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ== - -ajv-keywords@^5.0.0: - version "5.1.0" - resolved "http://localhost:4873/ajv-keywords/-/ajv-keywords-5.1.0.tgz#69d4d385a4733cdbeab44964a1170a88f87f0e16" - integrity sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw== - dependencies: - fast-deep-equal "^3.1.3" - -ajv@^6.10.0, ajv@^6.12.2, ajv@^6.12.4, ajv@^6.12.5: - version "6.12.6" - resolved "http://localhost:4873/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" - integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== - dependencies: - fast-deep-equal "^3.1.1" - fast-json-stable-stringify "^2.0.0" - json-schema-traverse "^0.4.1" - uri-js "^4.2.2" - -ajv@^8.0.0, ajv@^8.6.0, ajv@^8.8.0: - version "8.11.0" - resolved "http://localhost:4873/ajv/-/ajv-8.11.0.tgz#977e91dd96ca669f54a11e23e378e33b884a565f" - integrity sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg== - dependencies: - fast-deep-equal "^3.1.1" - json-schema-traverse "^1.0.0" - require-from-string "^2.0.2" - uri-js "^4.2.2" - -ansi-escapes@^4.2.1, ansi-escapes@^4.3.1: - version "4.3.2" - resolved "http://localhost:4873/ansi-escapes/-/ansi-escapes-4.3.2.tgz#6b2291d1db7d98b6521d5f1efa42d0f3a9feb65e" - integrity sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ== - dependencies: - type-fest "^0.21.3" - -ansi-html-community@^0.0.8: - version "0.0.8" - resolved "http://localhost:4873/ansi-html-community/-/ansi-html-community-0.0.8.tgz#69fbc4d6ccbe383f9736934ae34c3f8290f1bf41" - integrity sha512-1APHAyr3+PCamwNw3bXCPp4HFLONZt/yIH0sZp0/469KWNTEy+qN5jQ3GVX6DMZ1UXAi34yVwtTeaG/HpBuuzw== - -ansi-regex@^5.0.1: - version "5.0.1" - resolved "http://localhost:4873/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" - integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== - -ansi-regex@^6.0.1: - version "6.0.1" - resolved "http://localhost:4873/ansi-regex/-/ansi-regex-6.0.1.tgz#3183e38fae9a65d7cb5e53945cd5897d0260a06a" - integrity sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA== - -ansi-styles@^3.2.1: - version "3.2.1" - resolved "http://localhost:4873/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" - integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== - dependencies: - color-convert "^1.9.0" - -ansi-styles@^4.0.0, ansi-styles@^4.1.0: - version "4.3.0" - resolved "http://localhost:4873/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" - integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== - dependencies: - color-convert "^2.0.1" - -ansi-styles@^5.0.0: - version "5.2.0" - resolved "http://localhost:4873/ansi-styles/-/ansi-styles-5.2.0.tgz#07449690ad45777d1924ac2abb2fc8895dba836b" - integrity sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA== - -anymatch@^3.0.3, anymatch@~3.1.2: - version "3.1.2" - resolved "http://localhost:4873/anymatch/-/anymatch-3.1.2.tgz#c0557c096af32f106198f4f4e2a383537e378716" - integrity sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg== - dependencies: - normalize-path "^3.0.0" - picomatch "^2.0.4" - -arg@^5.0.2: - version "5.0.2" - resolved "http://localhost:4873/arg/-/arg-5.0.2.tgz#c81433cc427c92c4dcf4865142dbca6f15acd59c" - integrity sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg== - -argparse@^1.0.7: - version "1.0.10" - resolved "http://localhost:4873/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" - integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== - dependencies: - sprintf-js "~1.0.2" - -argparse@^2.0.1: - version "2.0.1" - resolved "http://localhost:4873/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38" - integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q== - -aria-query@^4.2.2: - version "4.2.2" - resolved "http://localhost:4873/aria-query/-/aria-query-4.2.2.tgz#0d2ca6c9aceb56b8977e9fed6aed7e15bbd2f83b" - integrity sha512-o/HelwhuKpTj/frsOsbNLNgnNGVIFsVP/SW2BSF14gVl7kAfMOJ6/8wUAUvG1R1NHKrfG+2sHZTu0yauT1qBrA== - dependencies: - "@babel/runtime" "^7.10.2" - "@babel/runtime-corejs3" "^7.10.2" - -aria-query@^5.0.0: - version "5.0.2" - resolved "http://localhost:4873/aria-query/-/aria-query-5.0.2.tgz#0b8a744295271861e1d933f8feca13f9b70cfdc1" - integrity sha512-eigU3vhqSO+Z8BKDnVLN/ompjhf3pYzecKXz8+whRy+9gZu8n1TCGfwzQUUPnqdHl9ax1Hr9031orZ+UOEYr7Q== - -array-flatten@1.1.1: - version "1.1.1" - resolved "http://localhost:4873/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2" - integrity sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg== - -array-flatten@^2.1.2: - version "2.1.2" - resolved "http://localhost:4873/array-flatten/-/array-flatten-2.1.2.tgz#24ef80a28c1a893617e2149b0c6d0d788293b099" - integrity sha512-hNfzcOV8W4NdualtqBFPyVO+54DSJuZGY9qT4pRroB6S9e3iiido2ISIC5h9R2sPJ8H3FHCIiEnsv1lPXO3KtQ== - -array-includes@^3.1.4, array-includes@^3.1.5: - version "3.1.5" - resolved "http://localhost:4873/array-includes/-/array-includes-3.1.5.tgz#2c320010db8d31031fd2a5f6b3bbd4b1aad31bdb" - integrity sha512-iSDYZMMyTPkiFasVqfuAQnWAYcvO/SeBSCGKePoEthjp4LEMTe4uLc7b025o4jAZpHhihh8xPo99TNWUWWkGDQ== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.4" - es-abstract "^1.19.5" - get-intrinsic "^1.1.1" - is-string "^1.0.7" - -array-union@^2.1.0: - version "2.1.0" - resolved "http://localhost:4873/array-union/-/array-union-2.1.0.tgz#b798420adbeb1de828d84acd8a2e23d3efe85e8d" - integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw== - -array.prototype.flat@^1.2.5: - version "1.3.0" - resolved "http://localhost:4873/array.prototype.flat/-/array.prototype.flat-1.3.0.tgz#0b0c1567bf57b38b56b4c97b8aa72ab45e4adc7b" - integrity sha512-12IUEkHsAhA4DY5s0FPgNXIdc8VRSqD9Zp78a5au9abH/SOBrsp082JOWFNTjkMozh8mqcdiKuaLGhPeYztxSw== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.3" - es-abstract "^1.19.2" - es-shim-unscopables "^1.0.0" - -array.prototype.flatmap@^1.3.0: - version "1.3.0" - resolved "http://localhost:4873/array.prototype.flatmap/-/array.prototype.flatmap-1.3.0.tgz#a7e8ed4225f4788a70cd910abcf0791e76a5534f" - integrity sha512-PZC9/8TKAIxcWKdyeb77EzULHPrIX/tIZebLJUQOMR1OwYosT8yggdfWScfTBCDj5utONvOuPQQumYsU2ULbkg== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.3" - es-abstract "^1.19.2" - es-shim-unscopables "^1.0.0" - -array.prototype.reduce@^1.0.4: - version "1.0.4" - resolved "http://localhost:4873/array.prototype.reduce/-/array.prototype.reduce-1.0.4.tgz#8167e80089f78bff70a99e20bd4201d4663b0a6f" - integrity sha512-WnM+AjG/DvLRLo4DDl+r+SvCzYtD2Jd9oeBYMcEaI7t3fFrHY9M53/wdLcTvmZNQ70IU6Htj0emFkZ5TS+lrdw== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.3" - es-abstract "^1.19.2" - es-array-method-boxes-properly "^1.0.0" - is-string "^1.0.7" - -asap@~2.0.6: - version "2.0.6" - resolved "http://localhost:4873/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46" - integrity sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA== - -ast-types-flow@^0.0.7: - version "0.0.7" - resolved "http://localhost:4873/ast-types-flow/-/ast-types-flow-0.0.7.tgz#f70b735c6bca1a5c9c22d982c3e39e7feba3bdad" - integrity sha512-eBvWn1lvIApYMhzQMsu9ciLfkBY499mFZlNqG+/9WR7PVlroQw0vG30cOQQbaKz3sCEc44TAOu2ykzqXSNnwag== - -async@^3.2.3: - version "3.2.4" - resolved "http://localhost:4873/async/-/async-3.2.4.tgz#2d22e00f8cddeb5fde5dd33522b56d1cf569a81c" - integrity sha512-iAB+JbDEGXhyIUavoDl9WP/Jj106Kz9DEn1DPgYw5ruDn0e3Wgi3sKFm55sASdGBNOQB8F59d9qQ7deqrHA8wQ== - -asynckit@^0.4.0: - version "0.4.0" - resolved "http://localhost:4873/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" - integrity sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q== - -at-least-node@^1.0.0: - version "1.0.0" - resolved "http://localhost:4873/at-least-node/-/at-least-node-1.0.0.tgz#602cd4b46e844ad4effc92a8011a3c46e0238dc2" - integrity sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg== - -autoprefixer@^10.4.11, autoprefixer@^10.4.12: - version "10.4.12" - resolved "http://localhost:4873/autoprefixer/-/autoprefixer-10.4.12.tgz#183f30bf0b0722af54ee5ef257f7d4320bb33129" - integrity sha512-WrCGV9/b97Pa+jtwf5UGaRjgQIg7OK3D06GnoYoZNcG1Xb8Gt3EfuKjlhh9i/VtT16g6PYjZ69jdJ2g8FxSC4Q== - dependencies: - browserslist "^4.21.4" - caniuse-lite "^1.0.30001407" - fraction.js "^4.2.0" - normalize-range "^0.1.2" - picocolors "^1.0.0" - postcss-value-parser "^4.2.0" - -axe-core@^4.4.3: - version "4.4.3" - resolved "http://localhost:4873/axe-core/-/axe-core-4.4.3.tgz#11c74d23d5013c0fa5d183796729bc3482bd2f6f" - integrity sha512-32+ub6kkdhhWick/UjvEwRchgoetXqTK14INLqbGm5U2TzBkBNF3nQtLYm8ovxSkQWArjEQvftCKryjZaATu3w== - -axobject-query@^2.2.0: - version "2.2.0" - resolved "http://localhost:4873/axobject-query/-/axobject-query-2.2.0.tgz#943d47e10c0b704aa42275e20edf3722648989be" - integrity sha512-Td525n+iPOOyUQIeBfcASuG6uJsDOITl7Mds5gFyerkWiX7qhUTdYUBlSgNMyVqtSJqwpt1kXGLdUt6SykLMRA== - -babel-jest@^27.4.2, babel-jest@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/babel-jest/-/babel-jest-27.5.1.tgz#a1bf8d61928edfefd21da27eb86a695bfd691444" - integrity sha512-cdQ5dXjGRd0IBRATiQ4mZGlGlRE8kJpjPOixdNRdT+m3UcNqmYWN6rK6nvtXYfY3D76cb8s/O1Ss8ea24PIwcg== - dependencies: - "@jest/transform" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/babel__core" "^7.1.14" - babel-plugin-istanbul "^6.1.1" - babel-preset-jest "^27.5.1" - chalk "^4.0.0" - graceful-fs "^4.2.9" - slash "^3.0.0" - -babel-loader@^8.2.3: - version "8.2.5" - resolved "http://localhost:4873/babel-loader/-/babel-loader-8.2.5.tgz#d45f585e654d5a5d90f5350a779d7647c5ed512e" - integrity sha512-OSiFfH89LrEMiWd4pLNqGz4CwJDtbs2ZVc+iGu2HrkRfPxId9F2anQj38IxWpmRfsUY0aBZYi1EFcd3mhtRMLQ== - dependencies: - find-cache-dir "^3.3.1" - loader-utils "^2.0.0" - make-dir "^3.1.0" - schema-utils "^2.6.5" - -babel-plugin-dynamic-import-node@^2.3.3: - version "2.3.3" - resolved "http://localhost:4873/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.3.tgz#84fda19c976ec5c6defef57f9427b3def66e17a3" - integrity sha512-jZVI+s9Zg3IqA/kdi0i6UDCybUI3aSBLnglhYbSSjKlV7yF1F/5LWv8MakQmvYpnbJDS6fcBL2KzHSxNCMtWSQ== - dependencies: - object.assign "^4.1.0" - -babel-plugin-istanbul@^6.1.1: - version "6.1.1" - resolved "http://localhost:4873/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz#fa88ec59232fd9b4e36dbbc540a8ec9a9b47da73" - integrity sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA== - dependencies: - "@babel/helper-plugin-utils" "^7.0.0" - "@istanbuljs/load-nyc-config" "^1.0.0" - "@istanbuljs/schema" "^0.1.2" - istanbul-lib-instrument "^5.0.4" - test-exclude "^6.0.0" - -babel-plugin-jest-hoist@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-27.5.1.tgz#9be98ecf28c331eb9f5df9c72d6f89deb8181c2e" - integrity sha512-50wCwD5EMNW4aRpOwtqzyZHIewTYNxLA4nhB+09d8BIssfNfzBRhkBIHiaPv1Si226TQSvp8gxAJm2iY2qs2hQ== - dependencies: - "@babel/template" "^7.3.3" - "@babel/types" "^7.3.3" - "@types/babel__core" "^7.0.0" - "@types/babel__traverse" "^7.0.6" - -babel-plugin-macros@^3.1.0: - version "3.1.0" - resolved "http://localhost:4873/babel-plugin-macros/-/babel-plugin-macros-3.1.0.tgz#9ef6dc74deb934b4db344dc973ee851d148c50c1" - integrity sha512-Cg7TFGpIr01vOQNODXOOaGz2NpCU5gl8x1qJFbb6hbZxR7XrcE2vtbAsTAbJ7/xwJtUuJEw8K8Zr/AE0LHlesg== - dependencies: - "@babel/runtime" "^7.12.5" - cosmiconfig "^7.0.0" - resolve "^1.19.0" - -babel-plugin-named-asset-import@^0.3.8: - version "0.3.8" - resolved "http://localhost:4873/babel-plugin-named-asset-import/-/babel-plugin-named-asset-import-0.3.8.tgz#6b7fa43c59229685368683c28bc9734f24524cc2" - integrity sha512-WXiAc++qo7XcJ1ZnTYGtLxmBCVbddAml3CEXgWaBzNzLNoxtQ8AiGEFDMOhot9XjTCQbvP5E77Fj9Gk924f00Q== - -babel-plugin-polyfill-corejs2@^0.3.3: - version "0.3.3" - resolved "http://localhost:4873/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.3.3.tgz#5d1bd3836d0a19e1b84bbf2d9640ccb6f951c122" - integrity sha512-8hOdmFYFSZhqg2C/JgLUQ+t52o5nirNwaWM2B9LWteozwIvM14VSwdsCAUET10qT+kmySAlseadmfeeSWFCy+Q== - dependencies: - "@babel/compat-data" "^7.17.7" - "@babel/helper-define-polyfill-provider" "^0.3.3" - semver "^6.1.1" - -babel-plugin-polyfill-corejs3@^0.6.0: - version "0.6.0" - resolved "http://localhost:4873/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.6.0.tgz#56ad88237137eade485a71b52f72dbed57c6230a" - integrity sha512-+eHqR6OPcBhJOGgsIar7xoAB1GcSwVUA3XjAd7HJNzOXT4wv6/H7KIdA/Nc60cvUlDbKApmqNvD1B1bzOt4nyA== - dependencies: - "@babel/helper-define-polyfill-provider" "^0.3.3" - core-js-compat "^3.25.1" - -babel-plugin-polyfill-regenerator@^0.4.1: - version "0.4.1" - resolved "http://localhost:4873/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.4.1.tgz#390f91c38d90473592ed43351e801a9d3e0fd747" - integrity sha512-NtQGmyQDXjQqQ+IzRkBVwEOz9lQ4zxAQZgoAYEtU9dJjnl1Oc98qnN7jcp+bE7O7aYzVpavXE3/VKXNzUbh7aw== - dependencies: - "@babel/helper-define-polyfill-provider" "^0.3.3" - -babel-plugin-transform-react-remove-prop-types@^0.4.24: - version "0.4.24" - resolved "http://localhost:4873/babel-plugin-transform-react-remove-prop-types/-/babel-plugin-transform-react-remove-prop-types-0.4.24.tgz#f2edaf9b4c6a5fbe5c1d678bfb531078c1555f3a" - integrity sha512-eqj0hVcJUR57/Ug2zE1Yswsw4LhuqqHhD+8v120T1cl3kjg76QwtyBrdIk4WVwK+lAhBJVYCd/v+4nc4y+8JsA== - -babel-preset-current-node-syntax@^1.0.0: - version "1.0.1" - resolved "http://localhost:4873/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.0.1.tgz#b4399239b89b2a011f9ddbe3e4f401fc40cff73b" - integrity sha512-M7LQ0bxarkxQoN+vz5aJPsLBn77n8QgTFmo8WK0/44auK2xlCXrYcUxHFxgU7qW5Yzw/CjmLRK2uJzaCd7LvqQ== - dependencies: - "@babel/plugin-syntax-async-generators" "^7.8.4" - "@babel/plugin-syntax-bigint" "^7.8.3" - "@babel/plugin-syntax-class-properties" "^7.8.3" - "@babel/plugin-syntax-import-meta" "^7.8.3" - "@babel/plugin-syntax-json-strings" "^7.8.3" - "@babel/plugin-syntax-logical-assignment-operators" "^7.8.3" - "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" - "@babel/plugin-syntax-numeric-separator" "^7.8.3" - "@babel/plugin-syntax-object-rest-spread" "^7.8.3" - "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" - "@babel/plugin-syntax-optional-chaining" "^7.8.3" - "@babel/plugin-syntax-top-level-await" "^7.8.3" - -babel-preset-jest@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/babel-preset-jest/-/babel-preset-jest-27.5.1.tgz#91f10f58034cb7989cb4f962b69fa6eef6a6bc81" - integrity sha512-Nptf2FzlPCWYuJg41HBqXVT8ym6bXOevuCTbhxlUpjwtysGaIWFvDEjp4y+G7fl13FgOdjs7P/DmErqH7da0Ag== - dependencies: - babel-plugin-jest-hoist "^27.5.1" - babel-preset-current-node-syntax "^1.0.0" - -babel-preset-react-app@^10.0.1: - version "10.0.1" - resolved "http://localhost:4873/babel-preset-react-app/-/babel-preset-react-app-10.0.1.tgz#ed6005a20a24f2c88521809fa9aea99903751584" - integrity sha512-b0D9IZ1WhhCWkrTXyFuIIgqGzSkRIH5D5AmB0bXbzYAB1OBAwHcUeyWW2LorutLWF5btNo/N7r/cIdmvvKJlYg== - dependencies: - "@babel/core" "^7.16.0" - "@babel/plugin-proposal-class-properties" "^7.16.0" - "@babel/plugin-proposal-decorators" "^7.16.4" - "@babel/plugin-proposal-nullish-coalescing-operator" "^7.16.0" - "@babel/plugin-proposal-numeric-separator" "^7.16.0" - "@babel/plugin-proposal-optional-chaining" "^7.16.0" - "@babel/plugin-proposal-private-methods" "^7.16.0" - "@babel/plugin-transform-flow-strip-types" "^7.16.0" - "@babel/plugin-transform-react-display-name" "^7.16.0" - "@babel/plugin-transform-runtime" "^7.16.4" - "@babel/preset-env" "^7.16.4" - "@babel/preset-react" "^7.16.0" - "@babel/preset-typescript" "^7.16.0" - "@babel/runtime" "^7.16.3" - babel-plugin-macros "^3.1.0" - babel-plugin-transform-react-remove-prop-types "^0.4.24" - -balanced-match@^1.0.0: - version "1.0.2" - resolved "http://localhost:4873/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" - integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== - -batch@0.6.1: - version "0.6.1" - resolved "http://localhost:4873/batch/-/batch-0.6.1.tgz#dc34314f4e679318093fc760272525f94bf25c16" - integrity sha512-x+VAiMRL6UPkx+kudNvxTl6hB2XNNCG2r+7wixVfIYwu/2HKRXimwQyaumLjMveWvT2Hkd/cAJw+QBMfJ/EKVw== - -bfj@^7.0.2: - version "7.0.2" - resolved "http://localhost:4873/bfj/-/bfj-7.0.2.tgz#1988ce76f3add9ac2913fd8ba47aad9e651bfbb2" - integrity sha512-+e/UqUzwmzJamNF50tBV6tZPTORow7gQ96iFow+8b562OdMpEK0BcJEq2OSPEDmAbSMBQ7PKZ87ubFkgxpYWgw== - dependencies: - bluebird "^3.5.5" - check-types "^11.1.1" - hoopy "^0.1.4" - tryer "^1.0.1" - -big.js@^5.2.2: - version "5.2.2" - resolved "http://localhost:4873/big.js/-/big.js-5.2.2.tgz#65f0af382f578bcdc742bd9c281e9cb2d7768328" - integrity sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ== - -binary-extensions@^2.0.0: - version "2.2.0" - resolved "http://localhost:4873/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d" - integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA== - -bluebird@^3.5.5: - version "3.7.2" - resolved "http://localhost:4873/bluebird/-/bluebird-3.7.2.tgz#9f229c15be272454ffa973ace0dbee79a1b0c36f" - integrity sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg== - -body-parser@1.20.0: - version "1.20.0" - resolved "http://localhost:4873/body-parser/-/body-parser-1.20.0.tgz#3de69bd89011c11573d7bfee6a64f11b6bd27cc5" - integrity sha512-DfJ+q6EPcGKZD1QWUjSpqp+Q7bDQTsQIF4zfUAtZ6qk+H/3/QRhg9CEp39ss+/T2vw0+HaidC0ecJj/DRLIaKg== - dependencies: - bytes "3.1.2" - content-type "~1.0.4" - debug "2.6.9" - depd "2.0.0" - destroy "1.2.0" - http-errors "2.0.0" - iconv-lite "0.4.24" - on-finished "2.4.1" - qs "6.10.3" - raw-body "2.5.1" - type-is "~1.6.18" - unpipe "1.0.0" - -bonjour-service@^1.0.11: - version "1.0.14" - resolved "http://localhost:4873/bonjour-service/-/bonjour-service-1.0.14.tgz#c346f5bc84e87802d08f8d5a60b93f758e514ee7" - integrity sha512-HIMbgLnk1Vqvs6B4Wq5ep7mxvj9sGz5d1JJyDNSGNIdA/w2MCz6GTjWTdjqOJV1bEPj+6IkxDvWNFKEBxNt4kQ== - dependencies: - array-flatten "^2.1.2" - dns-equal "^1.0.0" - fast-deep-equal "^3.1.3" - multicast-dns "^7.2.5" - -boolbase@^1.0.0, boolbase@~1.0.0: - version "1.0.0" - resolved "http://localhost:4873/boolbase/-/boolbase-1.0.0.tgz#68dff5fbe60c51eb37725ea9e3ed310dcc1e776e" - integrity sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww== - -brace-expansion@^1.1.7: - version "1.1.11" - resolved "http://localhost:4873/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" - integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== - dependencies: - balanced-match "^1.0.0" - concat-map "0.0.1" - -brace-expansion@^2.0.1: - version "2.0.1" - resolved "http://localhost:4873/brace-expansion/-/brace-expansion-2.0.1.tgz#1edc459e0f0c548486ecf9fc99f2221364b9a0ae" - integrity sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA== - dependencies: - balanced-match "^1.0.0" - -braces@^3.0.2, braces@~3.0.2: - version "3.0.2" - resolved "http://localhost:4873/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" - integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== - dependencies: - fill-range "^7.0.1" - -browser-process-hrtime@^1.0.0: - version "1.0.0" - resolved "http://localhost:4873/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz#3c9b4b7d782c8121e56f10106d84c0d0ffc94626" - integrity sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow== - -browserslist@^4.0.0, browserslist@^4.14.5, browserslist@^4.16.6, browserslist@^4.18.1, browserslist@^4.20.3, browserslist@^4.21.3, browserslist@^4.21.4: - version "4.21.4" - resolved "http://localhost:4873/browserslist/-/browserslist-4.21.4.tgz#e7496bbc67b9e39dd0f98565feccdcb0d4ff6987" - integrity sha512-CBHJJdDmgjl3daYjN5Cp5kbTf1mUhZoS+beLklHIvkOWscs83YAhLlF3Wsh/lciQYAcbBJgTOD44VtG31ZM4Hw== - dependencies: - caniuse-lite "^1.0.30001400" - electron-to-chromium "^1.4.251" - node-releases "^2.0.6" - update-browserslist-db "^1.0.9" - -bser@2.1.1: - version "2.1.1" - resolved "http://localhost:4873/bser/-/bser-2.1.1.tgz#e6787da20ece9d07998533cfd9de6f5c38f4bc05" - integrity sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ== - dependencies: - node-int64 "^0.4.0" - -buffer-from@^1.0.0: - version "1.1.2" - resolved "http://localhost:4873/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" - integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== - -builtin-modules@^3.1.0: - version "3.3.0" - resolved "http://localhost:4873/builtin-modules/-/builtin-modules-3.3.0.tgz#cae62812b89801e9656336e46223e030386be7b6" - integrity sha512-zhaCDicdLuWN5UbN5IMnFqNMhNfo919sH85y2/ea+5Yg9TsTkeZxpL+JLbp6cgYFS4sRLp3YV4S6yDuqVWHYOw== - -bytes@3.0.0: - version "3.0.0" - resolved "http://localhost:4873/bytes/-/bytes-3.0.0.tgz#d32815404d689699f85a4ea4fa8755dd13a96048" - integrity sha512-pMhOfFDPiv9t5jjIXkHosWmkSyQbvsgEVNkz0ERHbuLh2T/7j4Mqqpz523Fe8MVY89KC6Sh/QfS2sM+SjgFDcw== - -bytes@3.1.2: - version "3.1.2" - resolved "http://localhost:4873/bytes/-/bytes-3.1.2.tgz#8b0beeb98605adf1b128fa4386403c009e0221a5" - integrity sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg== - -call-bind@^1.0.0, call-bind@^1.0.2: - version "1.0.2" - resolved "http://localhost:4873/call-bind/-/call-bind-1.0.2.tgz#b1d4e89e688119c3c9a903ad30abb2f6a919be3c" - integrity sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA== - dependencies: - function-bind "^1.1.1" - get-intrinsic "^1.0.2" - -callsites@^3.0.0: - version "3.1.0" - resolved "http://localhost:4873/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" - integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== - -camel-case@^4.1.2: - version "4.1.2" - resolved "http://localhost:4873/camel-case/-/camel-case-4.1.2.tgz#9728072a954f805228225a6deea6b38461e1bd5a" - integrity sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw== - dependencies: - pascal-case "^3.1.2" - tslib "^2.0.3" - -camelcase-css@^2.0.1: - version "2.0.1" - resolved "http://localhost:4873/camelcase-css/-/camelcase-css-2.0.1.tgz#ee978f6947914cc30c6b44741b6ed1df7f043fd5" - integrity sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA== - -camelcase@^5.3.1: - version "5.3.1" - resolved "http://localhost:4873/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" - integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== - -camelcase@^6.2.0, camelcase@^6.2.1: - version "6.3.0" - resolved "http://localhost:4873/camelcase/-/camelcase-6.3.0.tgz#5685b95eb209ac9c0c177467778c9c84df58ba9a" - integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA== - -caniuse-api@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/caniuse-api/-/caniuse-api-3.0.0.tgz#5e4d90e2274961d46291997df599e3ed008ee4c0" - integrity sha512-bsTwuIg/BZZK/vreVTYYbSWoe2F+71P7K5QGEX+pT250DZbfU1MQ5prOKpPR+LL6uWKK3KMwMCAS74QB3Um1uw== - dependencies: - browserslist "^4.0.0" - caniuse-lite "^1.0.0" - lodash.memoize "^4.1.2" - lodash.uniq "^4.5.0" - -caniuse-lite@^1.0.0, caniuse-lite@^1.0.30001400, caniuse-lite@^1.0.30001407: - version "1.0.30001416" - resolved "http://localhost:4873/caniuse-lite/-/caniuse-lite-1.0.30001416.tgz#29692af8a6a11412f2d3cf9a59d588fcdd21ce4c" - integrity sha512-06wzzdAkCPZO+Qm4e/eNghZBDfVNDsCgw33T27OwBH9unE9S478OYw//Q2L7Npf/zBzs7rjZOszIFQkwQKAEqA== - -case-sensitive-paths-webpack-plugin@^2.4.0: - version "2.4.0" - resolved "http://localhost:4873/case-sensitive-paths-webpack-plugin/-/case-sensitive-paths-webpack-plugin-2.4.0.tgz#db64066c6422eed2e08cc14b986ca43796dbc6d4" - integrity sha512-roIFONhcxog0JSSWbvVAh3OocukmSgpqOH6YpMkCvav/ySIV3JKg4Dc8vYtQjYi/UxpNE36r/9v+VqTQqgkYmw== - -chalk@^2.0.0, chalk@^2.4.1: - version "2.4.2" - resolved "http://localhost:4873/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" - integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== - dependencies: - ansi-styles "^3.2.1" - escape-string-regexp "^1.0.5" - supports-color "^5.3.0" - -chalk@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/chalk/-/chalk-3.0.0.tgz#3f73c2bf526591f574cc492c51e2456349f844e4" - integrity sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg== - dependencies: - ansi-styles "^4.1.0" - supports-color "^7.1.0" - -chalk@^4.0.0, chalk@^4.0.2, chalk@^4.1.0, chalk@^4.1.2: - version "4.1.2" - resolved "http://localhost:4873/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" - integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== - dependencies: - ansi-styles "^4.1.0" - supports-color "^7.1.0" - -char-regex@^1.0.2: - version "1.0.2" - resolved "http://localhost:4873/char-regex/-/char-regex-1.0.2.tgz#d744358226217f981ed58f479b1d6bcc29545dcf" - integrity sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw== - -char-regex@^2.0.0: - version "2.0.1" - resolved "http://localhost:4873/char-regex/-/char-regex-2.0.1.tgz#6dafdb25f9d3349914079f010ba8d0e6ff9cd01e" - integrity sha512-oSvEeo6ZUD7NepqAat3RqoucZ5SeqLJgOvVIwkafu6IP3V0pO38s/ypdVUmDDK6qIIHNlYHJAKX9E7R7HoKElw== - -check-types@^11.1.1: - version "11.1.2" - resolved "http://localhost:4873/check-types/-/check-types-11.1.2.tgz#86a7c12bf5539f6324eb0e70ca8896c0e38f3e2f" - integrity sha512-tzWzvgePgLORb9/3a0YenggReLKAIb2owL03H2Xdoe5pKcUyWRSEQ8xfCar8t2SIAuEDwtmx2da1YB52YuHQMQ== - -chokidar@^3.4.2, chokidar@^3.5.3: - version "3.5.3" - resolved "http://localhost:4873/chokidar/-/chokidar-3.5.3.tgz#1cf37c8707b932bd1af1ae22c0432e2acd1903bd" - integrity sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw== - dependencies: - anymatch "~3.1.2" - braces "~3.0.2" - glob-parent "~5.1.2" - is-binary-path "~2.1.0" - is-glob "~4.0.1" - normalize-path "~3.0.0" - readdirp "~3.6.0" - optionalDependencies: - fsevents "~2.3.2" - -chrome-trace-event@^1.0.2: - version "1.0.3" - resolved "http://localhost:4873/chrome-trace-event/-/chrome-trace-event-1.0.3.tgz#1015eced4741e15d06664a957dbbf50d041e26ac" - integrity sha512-p3KULyQg4S7NIHixdwbGX+nFHkoBiA4YQmyWtjb8XngSKV124nJmRysgAeujbUVb15vh+RvFUfCPqU7rXk+hZg== - -ci-info@^3.2.0: - version "3.4.0" - resolved "http://localhost:4873/ci-info/-/ci-info-3.4.0.tgz#b28484fd436cbc267900364f096c9dc185efb251" - integrity sha512-t5QdPT5jq3o262DOQ8zA6E1tlH2upmUc4Hlvrbx1pGYJuiiHl7O7rvVNI+l8HTVhd/q3Qc9vqimkNk5yiXsAug== - -cjs-module-lexer@^1.0.0: - version "1.2.2" - resolved "http://localhost:4873/cjs-module-lexer/-/cjs-module-lexer-1.2.2.tgz#9f84ba3244a512f3a54e5277e8eef4c489864e40" - integrity sha512-cOU9usZw8/dXIXKtwa8pM0OTJQuJkxMN6w30csNRUerHfeQ5R6U3kkU/FtJeIf3M202OHfY2U8ccInBG7/xogA== - -clean-css@^5.2.2: - version "5.3.1" - resolved "http://localhost:4873/clean-css/-/clean-css-5.3.1.tgz#d0610b0b90d125196a2894d35366f734e5d7aa32" - integrity sha512-lCr8OHhiWCTw4v8POJovCoh4T7I9U11yVsPjMWWnnMmp9ZowCxyad1Pathle/9HjaDp+fdQKjO9fQydE6RHTZg== - dependencies: - source-map "~0.6.0" - -cliui@^7.0.2: - version "7.0.4" - resolved "http://localhost:4873/cliui/-/cliui-7.0.4.tgz#a0265ee655476fc807aea9df3df8df7783808b4f" - integrity sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ== - dependencies: - string-width "^4.2.0" - strip-ansi "^6.0.0" - wrap-ansi "^7.0.0" - -clone-deep@^4.0.1: - version "4.0.1" - resolved "http://localhost:4873/clone-deep/-/clone-deep-4.0.1.tgz#c19fd9bdbbf85942b4fd979c84dcf7d5f07c2387" - integrity sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ== - dependencies: - is-plain-object "^2.0.4" - kind-of "^6.0.2" - shallow-clone "^3.0.0" - -co@^4.6.0: - version "4.6.0" - resolved "http://localhost:4873/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" - integrity sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ== - -coa@^2.0.2: - version "2.0.2" - resolved "http://localhost:4873/coa/-/coa-2.0.2.tgz#43f6c21151b4ef2bf57187db0d73de229e3e7ec3" - integrity sha512-q5/jG+YQnSy4nRTV4F7lPepBJZ8qBNJJDBuJdoejDyLXgmL7IEo+Le2JDZudFTFt7mrCqIRaSjws4ygRCTCAXA== - dependencies: - "@types/q" "^1.5.1" - chalk "^2.4.1" - q "^1.1.2" - -collect-v8-coverage@^1.0.0: - version "1.0.1" - resolved "http://localhost:4873/collect-v8-coverage/-/collect-v8-coverage-1.0.1.tgz#cc2c8e94fc18bbdffe64d6534570c8a673b27f59" - integrity sha512-iBPtljfCNcTKNAto0KEtDfZ3qzjJvqE3aTGZsbhjSBlorqpXJlaWWtPO35D+ZImoC3KWejX64o+yPGxhWSTzfg== - -color-convert@^1.9.0: - version "1.9.3" - resolved "http://localhost:4873/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" - integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== - dependencies: - color-name "1.1.3" - -color-convert@^2.0.1: - version "2.0.1" - resolved "http://localhost:4873/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" - integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== - dependencies: - color-name "~1.1.4" - -color-name@1.1.3: - version "1.1.3" - resolved "http://localhost:4873/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" - integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw== - -color-name@^1.1.4, color-name@~1.1.4: - version "1.1.4" - resolved "http://localhost:4873/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" - integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== - -colord@^2.9.1: - version "2.9.3" - resolved "http://localhost:4873/colord/-/colord-2.9.3.tgz#4f8ce919de456f1d5c1c368c307fe20f3e59fb43" - integrity sha512-jeC1axXpnb0/2nn/Y1LPuLdgXBLH7aDcHu4KEKfqw3CUhX7ZpfBSlPKyqXE6btIgEzfWtrX3/tyBCaCvXvMkOw== - -colorette@^2.0.10: - version "2.0.19" - resolved "http://localhost:4873/colorette/-/colorette-2.0.19.tgz#cdf044f47ad41a0f4b56b3a0d5b4e6e1a2d5a798" - integrity sha512-3tlv/dIP7FWvj3BsbHrGLJ6l/oKh1O3TcgBqMn+yyCagOxc23fyzDS6HypQbgxWbkpDnf52p1LuR4eWDQ/K9WQ== - -combined-stream@^1.0.8: - version "1.0.8" - resolved "http://localhost:4873/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" - integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== - dependencies: - delayed-stream "~1.0.0" - -commander@^2.20.0: - version "2.20.3" - resolved "http://localhost:4873/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33" - integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== - -commander@^7.2.0: - version "7.2.0" - resolved "http://localhost:4873/commander/-/commander-7.2.0.tgz#a36cb57d0b501ce108e4d20559a150a391d97ab7" - integrity sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw== - -commander@^8.3.0: - version "8.3.0" - resolved "http://localhost:4873/commander/-/commander-8.3.0.tgz#4837ea1b2da67b9c616a67afbb0fafee567bca66" - integrity sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww== - -common-path-prefix@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/common-path-prefix/-/common-path-prefix-3.0.0.tgz#7d007a7e07c58c4b4d5f433131a19141b29f11e0" - integrity sha512-QE33hToZseCH3jS0qN96O/bSh3kaw/h+Tq7ngyY9eWDUnTlTNUyqfqvCXioLe5Na5jFsL78ra/wuBU4iuEgd4w== - -common-tags@^1.8.0: - version "1.8.2" - resolved "http://localhost:4873/common-tags/-/common-tags-1.8.2.tgz#94ebb3c076d26032745fd54face7f688ef5ac9c6" - integrity sha512-gk/Z852D2Wtb//0I+kRFNKKE9dIIVirjoqPoA1wJU+XePVXZfGeBpk45+A1rKO4Q43prqWBNY/MiIeRLbPWUaA== - -commondir@^1.0.1: - version "1.0.1" - resolved "http://localhost:4873/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b" - integrity sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg== - -compressible@~2.0.16: - version "2.0.18" - resolved "http://localhost:4873/compressible/-/compressible-2.0.18.tgz#af53cca6b070d4c3c0750fbd77286a6d7cc46fba" - integrity sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg== - dependencies: - mime-db ">= 1.43.0 < 2" - -compression@^1.7.4: - version "1.7.4" - resolved "http://localhost:4873/compression/-/compression-1.7.4.tgz#95523eff170ca57c29a0ca41e6fe131f41e5bb8f" - integrity sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ== - dependencies: - accepts "~1.3.5" - bytes "3.0.0" - compressible "~2.0.16" - debug "2.6.9" - on-headers "~1.0.2" - safe-buffer "5.1.2" - vary "~1.1.2" - -concat-map@0.0.1: - version "0.0.1" - resolved "http://localhost:4873/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" - integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg== - -confusing-browser-globals@^1.0.11: - version "1.0.11" - resolved "http://localhost:4873/confusing-browser-globals/-/confusing-browser-globals-1.0.11.tgz#ae40e9b57cdd3915408a2805ebd3a5585608dc81" - integrity sha512-JsPKdmh8ZkmnHxDk55FZ1TqVLvEQTvoByJZRN9jzI0UjxK/QgAmsphz7PGtqgPieQZ/CQcHWXCR7ATDNhGe+YA== - -connect-history-api-fallback@^2.0.0: - version "2.0.0" - resolved "http://localhost:4873/connect-history-api-fallback/-/connect-history-api-fallback-2.0.0.tgz#647264845251a0daf25b97ce87834cace0f5f1c8" - integrity sha512-U73+6lQFmfiNPrYbXqr6kZ1i1wiRqXnp2nhMsINseWXO8lDau0LGEffJ8kQi4EjLZympVgRdvqjAgiZ1tgzDDA== - -content-disposition@0.5.4: - version "0.5.4" - resolved "http://localhost:4873/content-disposition/-/content-disposition-0.5.4.tgz#8b82b4efac82512a02bb0b1dcec9d2c5e8eb5bfe" - integrity sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ== - dependencies: - safe-buffer "5.2.1" - -content-type@~1.0.4: - version "1.0.4" - resolved "http://localhost:4873/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b" - integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA== - -convert-source-map@^1.4.0, convert-source-map@^1.6.0, convert-source-map@^1.7.0: - version "1.8.0" - resolved "http://localhost:4873/convert-source-map/-/convert-source-map-1.8.0.tgz#f3373c32d21b4d780dd8004514684fb791ca4369" - integrity sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA== - dependencies: - safe-buffer "~5.1.1" - -cookie-signature@1.0.6: - version "1.0.6" - resolved "http://localhost:4873/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c" - integrity sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ== - -cookie@0.5.0: - version "0.5.0" - resolved "http://localhost:4873/cookie/-/cookie-0.5.0.tgz#d1f5d71adec6558c58f389987c366aa47e994f8b" - integrity sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw== - -core-js-compat@^3.25.1: - version "3.25.5" - resolved "http://localhost:4873/core-js-compat/-/core-js-compat-3.25.5.tgz#0016e8158c904f7b059486639e6e82116eafa7d9" - integrity sha512-ovcyhs2DEBUIE0MGEKHP4olCUW/XYte3Vroyxuh38rD1wAO4dHohsovUC4eAOuzFxE6b+RXvBU3UZ9o0YhUTkA== - dependencies: - browserslist "^4.21.4" - -core-js-pure@^3.25.1, core-js-pure@^3.8.1: - version "3.25.5" - resolved "http://localhost:4873/core-js-pure/-/core-js-pure-3.25.5.tgz#79716ba54240c6aa9ceba6eee08cf79471ba184d" - integrity sha512-oml3M22pHM+igfWHDfdLVq2ShWmjM2V4L+dQEBs0DWVIqEm9WHCwGAlZ6BmyBQGy5sFrJmcx+856D9lVKyGWYg== - -core-js@^3.19.2: - version "3.25.5" - resolved "http://localhost:4873/core-js/-/core-js-3.25.5.tgz#e86f651a2ca8a0237a5f064c2fe56cef89646e27" - integrity sha512-nbm6eZSjm+ZuBQxCUPQKQCoUEfFOXjUZ8dTTyikyKaWrTYmAVbykQfwsKE5dBK88u3QCkCrzsx/PPlKfhsvgpw== - -core-util-is@~1.0.0: - version "1.0.3" - resolved "http://localhost:4873/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85" - integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ== - -cosmiconfig-typescript-loader@^4.1.1: - version "4.1.1" - resolved "http://localhost:4873/cosmiconfig-typescript-loader/-/cosmiconfig-typescript-loader-4.1.1.tgz#38dd3578344038dae40fdf09792bc2e9df529f78" - integrity sha512-9DHpa379Gp0o0Zefii35fcmuuin6q92FnLDffzdZ0l9tVd3nEobG3O+MZ06+kuBvFTSVScvNb/oHA13Nd4iipg== - -cosmiconfig@^6.0.0: - version "6.0.0" - resolved "http://localhost:4873/cosmiconfig/-/cosmiconfig-6.0.0.tgz#da4fee853c52f6b1e6935f41c1a2fc50bd4a9982" - integrity sha512-xb3ZL6+L8b9JLLCx3ZdoZy4+2ECphCMo2PwqgP1tlfVq6M6YReyzBJtvWWtbDSpNr9hn96pkCiZqUcFEc+54Qg== - dependencies: - "@types/parse-json" "^4.0.0" - import-fresh "^3.1.0" - parse-json "^5.0.0" - path-type "^4.0.0" - yaml "^1.7.2" - -cosmiconfig@^7.0.0, cosmiconfig@^7.0.1: - version "7.0.1" - resolved "http://localhost:4873/cosmiconfig/-/cosmiconfig-7.0.1.tgz#714d756522cace867867ccb4474c5d01bbae5d6d" - integrity sha512-a1YWNUV2HwGimB7dU2s1wUMurNKjpx60HxBB6xUM8Re+2s1g1IIfJvFR0/iCF+XHdE0GMTKTuLR32UQff4TEyQ== - dependencies: - "@types/parse-json" "^4.0.0" - import-fresh "^3.2.1" - parse-json "^5.0.0" - path-type "^4.0.0" - yaml "^1.10.0" - -craco-wasm@0.0.1: - version "0.0.1" - resolved "http://localhost:4873/craco-wasm/-/craco-wasm-0.0.1.tgz#a7edbf7ff64e7569909b15684c00de13209985c6" - integrity sha512-0vwZLtkQocS7UlPg9IF4TsG/6gKXcd9O0ISomjRoBMvR2XvtZN4yxvU8/WlY0Vf42PtOcWvhSx9i4oVNxLVE6w== - -cross-spawn@^7.0.2, cross-spawn@^7.0.3: - version "7.0.3" - resolved "http://localhost:4873/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" - integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== - dependencies: - path-key "^3.1.0" - shebang-command "^2.0.0" - which "^2.0.1" - -crypto-random-string@^2.0.0: - version "2.0.0" - resolved "http://localhost:4873/crypto-random-string/-/crypto-random-string-2.0.0.tgz#ef2a7a966ec11083388369baa02ebead229b30d5" - integrity sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA== - -css-blank-pseudo@^3.0.3: - version "3.0.3" - resolved "http://localhost:4873/css-blank-pseudo/-/css-blank-pseudo-3.0.3.tgz#36523b01c12a25d812df343a32c322d2a2324561" - integrity sha512-VS90XWtsHGqoM0t4KpH053c4ehxZ2E6HtGI7x68YFV0pTo/QmkV/YFA+NnlvK8guxZVNWGQhVNJGC39Q8XF4OQ== - dependencies: - postcss-selector-parser "^6.0.9" - -css-declaration-sorter@^6.3.0: - version "6.3.1" - resolved "http://localhost:4873/css-declaration-sorter/-/css-declaration-sorter-6.3.1.tgz#be5e1d71b7a992433fb1c542c7a1b835e45682ec" - integrity sha512-fBffmak0bPAnyqc/HO8C3n2sHrp9wcqQz6ES9koRF2/mLOVAx9zIQ3Y7R29sYCteTPqMCwns4WYQoCX91Xl3+w== - -css-has-pseudo@^3.0.4: - version "3.0.4" - resolved "http://localhost:4873/css-has-pseudo/-/css-has-pseudo-3.0.4.tgz#57f6be91ca242d5c9020ee3e51bbb5b89fc7af73" - integrity sha512-Vse0xpR1K9MNlp2j5w1pgWIJtm1a8qS0JwS9goFYcImjlHEmywP9VUF05aGBXzGpDJF86QXk4L0ypBmwPhGArw== - dependencies: - postcss-selector-parser "^6.0.9" - -css-loader@^6.5.1: - version "6.7.1" - resolved "http://localhost:4873/css-loader/-/css-loader-6.7.1.tgz#e98106f154f6e1baf3fc3bc455cb9981c1d5fd2e" - integrity sha512-yB5CNFa14MbPJcomwNh3wLThtkZgcNyI2bNMRt8iE5Z8Vwl7f8vQXFAzn2HDOJvtDq2NTZBUGMSUNNyrv3/+cw== - dependencies: - icss-utils "^5.1.0" - postcss "^8.4.7" - postcss-modules-extract-imports "^3.0.0" - postcss-modules-local-by-default "^4.0.0" - postcss-modules-scope "^3.0.0" - postcss-modules-values "^4.0.0" - postcss-value-parser "^4.2.0" - semver "^7.3.5" - -css-minimizer-webpack-plugin@^3.2.0: - version "3.4.1" - resolved "http://localhost:4873/css-minimizer-webpack-plugin/-/css-minimizer-webpack-plugin-3.4.1.tgz#ab78f781ced9181992fe7b6e4f3422e76429878f" - integrity sha512-1u6D71zeIfgngN2XNRJefc/hY7Ybsxd74Jm4qngIXyUEk7fss3VUzuHxLAq/R8NAba4QU9OUSaMZlbpRc7bM4Q== - dependencies: - cssnano "^5.0.6" - jest-worker "^27.0.2" - postcss "^8.3.5" - schema-utils "^4.0.0" - serialize-javascript "^6.0.0" - source-map "^0.6.1" - -css-prefers-color-scheme@^6.0.3: - version "6.0.3" - resolved "http://localhost:4873/css-prefers-color-scheme/-/css-prefers-color-scheme-6.0.3.tgz#ca8a22e5992c10a5b9d315155e7caee625903349" - integrity sha512-4BqMbZksRkJQx2zAjrokiGMd07RqOa2IxIrrN10lyBe9xhn9DEvjUK79J6jkeiv9D9hQFXKb6g1jwU62jziJZA== - -css-select-base-adapter@^0.1.1: - version "0.1.1" - resolved "http://localhost:4873/css-select-base-adapter/-/css-select-base-adapter-0.1.1.tgz#3b2ff4972cc362ab88561507a95408a1432135d7" - integrity sha512-jQVeeRG70QI08vSTwf1jHxp74JoZsr2XSgETae8/xC8ovSnL2WF87GTLO86Sbwdt2lK4Umg4HnnwMO4YF3Ce7w== - -css-select@^2.0.0: - version "2.1.0" - resolved "http://localhost:4873/css-select/-/css-select-2.1.0.tgz#6a34653356635934a81baca68d0255432105dbef" - integrity sha512-Dqk7LQKpwLoH3VovzZnkzegqNSuAziQyNZUcrdDM401iY+R5NkGBXGmtO05/yaXQziALuPogeG0b7UAgjnTJTQ== - dependencies: - boolbase "^1.0.0" - css-what "^3.2.1" - domutils "^1.7.0" - nth-check "^1.0.2" - -css-select@^4.1.3: - version "4.3.0" - resolved "http://localhost:4873/css-select/-/css-select-4.3.0.tgz#db7129b2846662fd8628cfc496abb2b59e41529b" - integrity sha512-wPpOYtnsVontu2mODhA19JrqWxNsfdatRKd64kmpRbQgh1KtItko5sTnEpPdpSaJszTOhEMlF/RPz28qj4HqhQ== - dependencies: - boolbase "^1.0.0" - css-what "^6.0.1" - domhandler "^4.3.1" - domutils "^2.8.0" - nth-check "^2.0.1" - -css-tree@1.0.0-alpha.37: - version "1.0.0-alpha.37" - resolved "http://localhost:4873/css-tree/-/css-tree-1.0.0-alpha.37.tgz#98bebd62c4c1d9f960ec340cf9f7522e30709a22" - integrity sha512-DMxWJg0rnz7UgxKT0Q1HU/L9BeJI0M6ksor0OgqOnF+aRCDWg/N2641HmVyU9KVIu0OVVWOb2IpC9A+BJRnejg== - dependencies: - mdn-data "2.0.4" - source-map "^0.6.1" - -css-tree@^1.1.2, css-tree@^1.1.3: - version "1.1.3" - resolved "http://localhost:4873/css-tree/-/css-tree-1.1.3.tgz#eb4870fb6fd7707327ec95c2ff2ab09b5e8db91d" - integrity sha512-tRpdppF7TRazZrjJ6v3stzv93qxRcSsFmW6cX0Zm2NVKpxE1WV1HblnghVv9TreireHkqI/VDEsfolRF1p6y7Q== - dependencies: - mdn-data "2.0.14" - source-map "^0.6.1" - -css-what@^3.2.1: - version "3.4.2" - resolved "http://localhost:4873/css-what/-/css-what-3.4.2.tgz#ea7026fcb01777edbde52124e21f327e7ae950e4" - integrity sha512-ACUm3L0/jiZTqfzRM3Hi9Q8eZqd6IK37mMWPLz9PJxkLWllYeRf+EHUSHYEtFop2Eqytaq1FizFVh7XfBnXCDQ== - -css-what@^6.0.1: - version "6.1.0" - resolved "http://localhost:4873/css-what/-/css-what-6.1.0.tgz#fb5effcf76f1ddea2c81bdfaa4de44e79bac70f4" - integrity sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw== - -css.escape@^1.5.1: - version "1.5.1" - resolved "http://localhost:4873/css.escape/-/css.escape-1.5.1.tgz#42e27d4fa04ae32f931a4b4d4191fa9cddee97cb" - integrity sha512-YUifsXXuknHlUsmlgyY0PKzgPOr7/FjCePfHNt0jxm83wHZi44VDMQ7/fGNkjY3/jV1MC+1CmZbaHzugyeRtpg== - -cssdb@^7.0.1: - version "7.0.1" - resolved "http://localhost:4873/cssdb/-/cssdb-7.0.1.tgz#3810a0c67ae06362982dfe965dbedf57a0f26617" - integrity sha512-pT3nzyGM78poCKLAEy2zWIVX2hikq6dIrjuZzLV98MumBg+xMTNYfHx7paUlfiRTgg91O/vR889CIf+qiv79Rw== - -cssesc@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/cssesc/-/cssesc-3.0.0.tgz#37741919903b868565e1c09ea747445cd18983ee" - integrity sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg== - -cssnano-preset-default@^5.2.12: - version "5.2.12" - resolved "http://localhost:4873/cssnano-preset-default/-/cssnano-preset-default-5.2.12.tgz#ebe6596ec7030e62c3eb2b3c09f533c0644a9a97" - integrity sha512-OyCBTZi+PXgylz9HAA5kHyoYhfGcYdwFmyaJzWnzxuGRtnMw/kR6ilW9XzlzlRAtB6PLT/r+prYgkef7hngFew== - dependencies: - css-declaration-sorter "^6.3.0" - cssnano-utils "^3.1.0" - postcss-calc "^8.2.3" - postcss-colormin "^5.3.0" - postcss-convert-values "^5.1.2" - postcss-discard-comments "^5.1.2" - postcss-discard-duplicates "^5.1.0" - postcss-discard-empty "^5.1.1" - postcss-discard-overridden "^5.1.0" - postcss-merge-longhand "^5.1.6" - postcss-merge-rules "^5.1.2" - postcss-minify-font-values "^5.1.0" - postcss-minify-gradients "^5.1.1" - postcss-minify-params "^5.1.3" - postcss-minify-selectors "^5.2.1" - postcss-normalize-charset "^5.1.0" - postcss-normalize-display-values "^5.1.0" - postcss-normalize-positions "^5.1.1" - postcss-normalize-repeat-style "^5.1.1" - postcss-normalize-string "^5.1.0" - postcss-normalize-timing-functions "^5.1.0" - postcss-normalize-unicode "^5.1.0" - postcss-normalize-url "^5.1.0" - postcss-normalize-whitespace "^5.1.1" - postcss-ordered-values "^5.1.3" - postcss-reduce-initial "^5.1.0" - postcss-reduce-transforms "^5.1.0" - postcss-svgo "^5.1.0" - postcss-unique-selectors "^5.1.1" - -cssnano-utils@^3.1.0: - version "3.1.0" - resolved "http://localhost:4873/cssnano-utils/-/cssnano-utils-3.1.0.tgz#95684d08c91511edfc70d2636338ca37ef3a6861" - integrity sha512-JQNR19/YZhz4psLX/rQ9M83e3z2Wf/HdJbryzte4a3NSuafyp9w/I4U+hx5C2S9g41qlstH7DEWnZaaj83OuEA== - -cssnano@^5.0.6: - version "5.1.13" - resolved "http://localhost:4873/cssnano/-/cssnano-5.1.13.tgz#83d0926e72955332dc4802a7070296e6258efc0a" - integrity sha512-S2SL2ekdEz6w6a2epXn4CmMKU4K3KpcyXLKfAYc9UQQqJRkD/2eLUG0vJ3Db/9OvO5GuAdgXw3pFbR6abqghDQ== - dependencies: - cssnano-preset-default "^5.2.12" - lilconfig "^2.0.3" - yaml "^1.10.2" - -csso@^4.0.2, csso@^4.2.0: - version "4.2.0" - resolved "http://localhost:4873/csso/-/csso-4.2.0.tgz#ea3a561346e8dc9f546d6febedd50187cf389529" - integrity sha512-wvlcdIbf6pwKEk7vHj8/Bkc0B4ylXZruLvOgs9doS5eOsOpuodOV2zJChSpkp+pRpYQLQMeF04nr3Z68Sta9jA== - dependencies: - css-tree "^1.1.2" - -cssom@^0.4.4: - version "0.4.4" - resolved "http://localhost:4873/cssom/-/cssom-0.4.4.tgz#5a66cf93d2d0b661d80bf6a44fb65f5c2e4e0a10" - integrity sha512-p3pvU7r1MyyqbTk+WbNJIgJjG2VmTIaB10rI93LzVPrmDJKkzKYMtxxyAvQXR/NS6otuzveI7+7BBq3SjBS2mw== - -cssom@~0.3.6: - version "0.3.8" - resolved "http://localhost:4873/cssom/-/cssom-0.3.8.tgz#9f1276f5b2b463f2114d3f2c75250af8c1a36f4a" - integrity sha512-b0tGHbfegbhPJpxpiBPU2sCkigAqtM9O121le6bbOlgyV+NyGyCmVfJ6QW9eRjz8CpNfWEOYBIMIGRYkLwsIYg== - -cssstyle@^2.3.0: - version "2.3.0" - resolved "http://localhost:4873/cssstyle/-/cssstyle-2.3.0.tgz#ff665a0ddbdc31864b09647f34163443d90b0852" - integrity sha512-AZL67abkUzIuvcHqk7c09cezpGNcxUxU4Ioi/05xHk4DQeTkWmGYftIE6ctU6AEt+Gn4n1lDStOtj7FKycP71A== - dependencies: - cssom "~0.3.6" - -csstype@^3.0.2: - version "3.1.1" - resolved "http://localhost:4873/csstype/-/csstype-3.1.1.tgz#841b532c45c758ee546a11d5bd7b7b473c8c30b9" - integrity sha512-DJR/VvkAvSZW9bTouZue2sSxDwdTN92uHjqeKVm+0dAqdfNykRzQ95tay8aXMBAAPpUiq4Qcug2L7neoRh2Egw== - -damerau-levenshtein@^1.0.8: - version "1.0.8" - resolved "http://localhost:4873/damerau-levenshtein/-/damerau-levenshtein-1.0.8.tgz#b43d286ccbd36bc5b2f7ed41caf2d0aba1f8a6e7" - integrity sha512-sdQSFB7+llfUcQHUQO3+B8ERRj0Oa4w9POWMI/puGtuf7gFywGmkaLCElnudfTiKZV+NvHqL0ifzdrI8Ro7ESA== - -data-urls@^2.0.0: - version "2.0.0" - resolved "http://localhost:4873/data-urls/-/data-urls-2.0.0.tgz#156485a72963a970f5d5821aaf642bef2bf2db9b" - integrity sha512-X5eWTSXO/BJmpdIKCRuKUgSCgAN0OwliVK3yPKbwIWU1Tdw5BRajxlzMidvh+gwko9AfQ9zIj52pzF91Q3YAvQ== - dependencies: - abab "^2.0.3" - whatwg-mimetype "^2.3.0" - whatwg-url "^8.0.0" - -debug@2.6.9, debug@^2.6.0, debug@^2.6.9: - version "2.6.9" - resolved "http://localhost:4873/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" - integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== - dependencies: - ms "2.0.0" - -debug@4, debug@^4.1.0, debug@^4.1.1, debug@^4.3.2, debug@^4.3.4: - version "4.3.4" - resolved "http://localhost:4873/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" - integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== - dependencies: - ms "2.1.2" - -debug@^3.2.7: - version "3.2.7" - resolved "http://localhost:4873/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a" - integrity sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ== - dependencies: - ms "^2.1.1" - -decimal.js@^10.2.1: - version "10.4.1" - resolved "http://localhost:4873/decimal.js/-/decimal.js-10.4.1.tgz#be75eeac4a2281aace80c1a8753587c27ef053e7" - integrity sha512-F29o+vci4DodHYT9UrR5IEbfBw9pE5eSapIJdTqXK5+6hq+t8VRxwQyKlW2i+KDKFkkJQRvFyI/QXD83h8LyQw== - -dedent@^0.7.0: - version "0.7.0" - resolved "http://localhost:4873/dedent/-/dedent-0.7.0.tgz#2495ddbaf6eb874abb0e1be9df22d2e5a544326c" - integrity sha512-Q6fKUPqnAHAyhiUgFU7BUzLiv0kd8saH9al7tnu5Q/okj6dnupxyTgFIBjVzJATdfIAm9NAsvXNzjaKa+bxVyA== - -deep-is@^0.1.3, deep-is@~0.1.3: - version "0.1.4" - resolved "http://localhost:4873/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831" - integrity sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ== - -deepmerge@^4.2.2: - version "4.2.2" - resolved "http://localhost:4873/deepmerge/-/deepmerge-4.2.2.tgz#44d2ea3679b8f4d4ffba33f03d865fc1e7bf4955" - integrity sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg== - -default-gateway@^6.0.3: - version "6.0.3" - resolved "http://localhost:4873/default-gateway/-/default-gateway-6.0.3.tgz#819494c888053bdb743edbf343d6cdf7f2943a71" - integrity sha512-fwSOJsbbNzZ/CUFpqFBqYfYNLj1NbMPm8MMCIzHjC83iSJRBEGmDUxU+WP661BaBQImeC2yHwXtz+P/O9o+XEg== - dependencies: - execa "^5.0.0" - -define-lazy-prop@^2.0.0: - version "2.0.0" - resolved "http://localhost:4873/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz#3f7ae421129bcaaac9bc74905c98a0009ec9ee7f" - integrity sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og== - -define-properties@^1.1.3, define-properties@^1.1.4: - version "1.1.4" - resolved "http://localhost:4873/define-properties/-/define-properties-1.1.4.tgz#0b14d7bd7fbeb2f3572c3a7eda80ea5d57fb05b1" - integrity sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA== - dependencies: - has-property-descriptors "^1.0.0" - object-keys "^1.1.1" - -defined@^1.0.0: - version "1.0.0" - resolved "http://localhost:4873/defined/-/defined-1.0.0.tgz#c98d9bcef75674188e110969151199e39b1fa693" - integrity sha512-Y2caI5+ZwS5c3RiNDJ6u53VhQHv+hHKwhkI1iHvceKUHw9Df6EK2zRLfjejRgMuCuxK7PfSWIMwWecceVvThjQ== - -delayed-stream@~1.0.0: - version "1.0.0" - resolved "http://localhost:4873/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" - integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ== - -depd@2.0.0: - version "2.0.0" - resolved "http://localhost:4873/depd/-/depd-2.0.0.tgz#b696163cc757560d09cf22cc8fad1571b79e76df" - integrity sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw== - -depd@~1.1.2: - version "1.1.2" - resolved "http://localhost:4873/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9" - integrity sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ== - -destroy@1.2.0: - version "1.2.0" - resolved "http://localhost:4873/destroy/-/destroy-1.2.0.tgz#4803735509ad8be552934c67df614f94e66fa015" - integrity sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg== - -detect-newline@^3.0.0: - version "3.1.0" - resolved "http://localhost:4873/detect-newline/-/detect-newline-3.1.0.tgz#576f5dfc63ae1a192ff192d8ad3af6308991b651" - integrity sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA== - -detect-node@^2.0.4: - version "2.1.0" - resolved "http://localhost:4873/detect-node/-/detect-node-2.1.0.tgz#c9c70775a49c3d03bc2c06d9a73be550f978f8b1" - integrity sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g== - -detect-port-alt@^1.1.6: - version "1.1.6" - resolved "http://localhost:4873/detect-port-alt/-/detect-port-alt-1.1.6.tgz#24707deabe932d4a3cf621302027c2b266568275" - integrity sha512-5tQykt+LqfJFBEYaDITx7S7cR7mJ/zQmLXZ2qt5w04ainYZw6tBf9dBunMjVeVOdYVRUzUOE4HkY5J7+uttb5Q== - dependencies: - address "^1.0.1" - debug "^2.6.0" - -detective@^5.2.1: - version "5.2.1" - resolved "http://localhost:4873/detective/-/detective-5.2.1.tgz#6af01eeda11015acb0e73f933242b70f24f91034" - integrity sha512-v9XE1zRnz1wRtgurGu0Bs8uHKFSTdteYZNbIPFVhUZ39L/S79ppMpdmVOZAnoz1jfEFodc48n6MX483Xo3t1yw== - dependencies: - acorn-node "^1.8.2" - defined "^1.0.0" - minimist "^1.2.6" - -didyoumean@^1.2.2: - version "1.2.2" - resolved "http://localhost:4873/didyoumean/-/didyoumean-1.2.2.tgz#989346ffe9e839b4555ecf5666edea0d3e8ad037" - integrity sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw== - -diff-sequences@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/diff-sequences/-/diff-sequences-27.5.1.tgz#eaecc0d327fd68c8d9672a1e64ab8dccb2ef5327" - integrity sha512-k1gCAXAsNgLwEL+Y8Wvl+M6oEFj5bgazfZULpS5CneoPPXRaCCW7dm+q21Ky2VEE5X+VeRDBVg1Pcvvsr4TtNQ== - -diff-sequences@^29.0.0: - version "29.0.0" - resolved "http://localhost:4873/diff-sequences/-/diff-sequences-29.0.0.tgz#bae49972ef3933556bcb0800b72e8579d19d9e4f" - integrity sha512-7Qe/zd1wxSDL4D/X/FPjOMB+ZMDt71W94KYaq05I2l0oQqgXgs7s4ftYYmV38gBSrPz2vcygxfs1xn0FT+rKNA== - -dir-glob@^3.0.1: - version "3.0.1" - resolved "http://localhost:4873/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f" - integrity sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA== - dependencies: - path-type "^4.0.0" - -dlv@^1.1.3: - version "1.1.3" - resolved "http://localhost:4873/dlv/-/dlv-1.1.3.tgz#5c198a8a11453596e751494d49874bc7732f2e79" - integrity sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA== - -dns-equal@^1.0.0: - version "1.0.0" - resolved "http://localhost:4873/dns-equal/-/dns-equal-1.0.0.tgz#b39e7f1da6eb0a75ba9c17324b34753c47e0654d" - integrity sha512-z+paD6YUQsk+AbGCEM4PrOXSss5gd66QfcVBFTKR/HpFL9jCqikS94HYwKww6fQyO7IxrIIyUu+g0Ka9tUS2Cg== - -dns-packet@^5.2.2: - version "5.4.0" - resolved "http://localhost:4873/dns-packet/-/dns-packet-5.4.0.tgz#1f88477cf9f27e78a213fb6d118ae38e759a879b" - integrity sha512-EgqGeaBB8hLiHLZtp/IbaDQTL8pZ0+IvwzSHA6d7VyMDM+B9hgddEMa9xjK5oYnw0ci0JQ6g2XCD7/f6cafU6g== - dependencies: - "@leichtgewicht/ip-codec" "^2.0.1" - -doctrine@^2.1.0: - version "2.1.0" - resolved "http://localhost:4873/doctrine/-/doctrine-2.1.0.tgz#5cd01fc101621b42c4cd7f5d1a66243716d3f39d" - integrity sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw== - dependencies: - esutils "^2.0.2" - -doctrine@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961" - integrity sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w== - dependencies: - esutils "^2.0.2" - -dom-accessibility-api@^0.5.6, dom-accessibility-api@^0.5.9: - version "0.5.14" - resolved "http://localhost:4873/dom-accessibility-api/-/dom-accessibility-api-0.5.14.tgz#56082f71b1dc7aac69d83c4285eef39c15d93f56" - integrity sha512-NMt+m9zFMPZe0JcY9gN224Qvk6qLIdqex29clBvc/y75ZBX9YA9wNK3frsYvu2DI1xcCIwxwnX+TlsJ2DSOADg== - -dom-converter@^0.2.0: - version "0.2.0" - resolved "http://localhost:4873/dom-converter/-/dom-converter-0.2.0.tgz#6721a9daee2e293682955b6afe416771627bb768" - integrity sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA== - dependencies: - utila "~0.4" - -dom-serializer@0: - version "0.2.2" - resolved "http://localhost:4873/dom-serializer/-/dom-serializer-0.2.2.tgz#1afb81f533717175d478655debc5e332d9f9bb51" - integrity sha512-2/xPb3ORsQ42nHYiSunXkDjPLBaEj/xTwUO4B7XCZQTRk7EBtTOPaygh10YAAh2OI1Qrp6NWfpAhzswj0ydt9g== - dependencies: - domelementtype "^2.0.1" - entities "^2.0.0" - -dom-serializer@^1.0.1: - version "1.4.1" - resolved "http://localhost:4873/dom-serializer/-/dom-serializer-1.4.1.tgz#de5d41b1aea290215dc45a6dae8adcf1d32e2d30" - integrity sha512-VHwB3KfrcOOkelEG2ZOfxqLZdfkil8PtJi4P8N2MMXucZq2yLp75ClViUlOVwyoHEDjYU433Aq+5zWP61+RGag== - dependencies: - domelementtype "^2.0.1" - domhandler "^4.2.0" - entities "^2.0.0" - -domelementtype@1: - version "1.3.1" - resolved "http://localhost:4873/domelementtype/-/domelementtype-1.3.1.tgz#d048c44b37b0d10a7f2a3d5fee3f4333d790481f" - integrity sha512-BSKB+TSpMpFI/HOxCNr1O8aMOTZ8hT3pM3GQ0w/mWRmkhEDSFJkkyzz4XQsBV44BChwGkrDfMyjVD0eA2aFV3w== - -domelementtype@^2.0.1, domelementtype@^2.2.0: - version "2.3.0" - resolved "http://localhost:4873/domelementtype/-/domelementtype-2.3.0.tgz#5c45e8e869952626331d7aab326d01daf65d589d" - integrity sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw== - -domexception@^2.0.1: - version "2.0.1" - resolved "http://localhost:4873/domexception/-/domexception-2.0.1.tgz#fb44aefba793e1574b0af6aed2801d057529f304" - integrity sha512-yxJ2mFy/sibVQlu5qHjOkf9J3K6zgmCxgJ94u2EdvDOV09H+32LtRswEcUsmUWN72pVLOEnTSRaIVVzVQgS0dg== - dependencies: - webidl-conversions "^5.0.0" - -domhandler@^4.0.0, domhandler@^4.2.0, domhandler@^4.3.1: - version "4.3.1" - resolved "http://localhost:4873/domhandler/-/domhandler-4.3.1.tgz#8d792033416f59d68bc03a5aa7b018c1ca89279c" - integrity sha512-GrwoxYN+uWlzO8uhUXRl0P+kHE4GtVPfYzVLcUxPL7KNdHKj66vvlhiweIHqYYXWlw+T8iLMp42Lm67ghw4WMQ== - dependencies: - domelementtype "^2.2.0" - -domutils@^1.7.0: - version "1.7.0" - resolved "http://localhost:4873/domutils/-/domutils-1.7.0.tgz#56ea341e834e06e6748af7a1cb25da67ea9f8c2a" - integrity sha512-Lgd2XcJ/NjEw+7tFvfKxOzCYKZsdct5lczQ2ZaQY8Djz7pfAD3Gbp8ySJWtreII/vDlMVmxwa6pHmdxIYgttDg== - dependencies: - dom-serializer "0" - domelementtype "1" - -domutils@^2.5.2, domutils@^2.8.0: - version "2.8.0" - resolved "http://localhost:4873/domutils/-/domutils-2.8.0.tgz#4437def5db6e2d1f5d6ee859bd95ca7d02048135" - integrity sha512-w96Cjofp72M5IIhpjgobBimYEfoPjx1Vx0BSX9P30WBdZW2WIKU0T1Bd0kz2eNZ9ikjKgHbEyKx8BB6H1L3h3A== - dependencies: - dom-serializer "^1.0.1" - domelementtype "^2.2.0" - domhandler "^4.2.0" - -dot-case@^3.0.4: - version "3.0.4" - resolved "http://localhost:4873/dot-case/-/dot-case-3.0.4.tgz#9b2b670d00a431667a8a75ba29cd1b98809ce751" - integrity sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w== - dependencies: - no-case "^3.0.4" - tslib "^2.0.3" - -dotenv-expand@^5.1.0: - version "5.1.0" - resolved "http://localhost:4873/dotenv-expand/-/dotenv-expand-5.1.0.tgz#3fbaf020bfd794884072ea26b1e9791d45a629f0" - integrity sha512-YXQl1DSa4/PQyRfgrv6aoNjhasp/p4qs9FjJ4q4cQk+8m4r6k4ZSiEyytKG8f8W9gi8WsQtIObNmKd+tMzNTmA== - -dotenv@^10.0.0: - version "10.0.0" - resolved "http://localhost:4873/dotenv/-/dotenv-10.0.0.tgz#3d4227b8fb95f81096cdd2b66653fb2c7085ba81" - integrity sha512-rlBi9d8jpv9Sf1klPjNfFAuWDjKLwTIJJ/VxtoTwIR6hnZxcEOQCZg2oIL3MWBYw5GpUDKOEnND7LXTbIpQ03Q== - -duplexer@^0.1.2: - version "0.1.2" - resolved "http://localhost:4873/duplexer/-/duplexer-0.1.2.tgz#3abe43aef3835f8ae077d136ddce0f276b0400e6" - integrity sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg== - -ee-first@1.1.1: - version "1.1.1" - resolved "http://localhost:4873/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" - integrity sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow== - -ejs@^3.1.6: - version "3.1.8" - resolved "http://localhost:4873/ejs/-/ejs-3.1.8.tgz#758d32910c78047585c7ef1f92f9ee041c1c190b" - integrity sha512-/sXZeMlhS0ArkfX2Aw780gJzXSMPnKjtspYZv+f3NiKLlubezAHDU5+9xz6gd3/NhG3txQCo6xlglmTS+oTGEQ== - dependencies: - jake "^10.8.5" - -electron-to-chromium@^1.4.251: - version "1.4.274" - resolved "http://localhost:4873/electron-to-chromium/-/electron-to-chromium-1.4.274.tgz#74369ac6f020c3cea7c77ec040ddf159fe226233" - integrity sha512-Fgn7JZQzq85I81FpKUNxVLAzoghy8JZJ4NIue+YfUYBbu1AkpgzFvNwzF/ZNZH9ElkmJD0TSWu1F2gTpw/zZlg== - -emittery@^0.10.2: - version "0.10.2" - resolved "http://localhost:4873/emittery/-/emittery-0.10.2.tgz#902eec8aedb8c41938c46e9385e9db7e03182933" - integrity sha512-aITqOwnLanpHLNXZJENbOgjUBeHocD+xsSJmNrjovKBW5HbSpW3d1pEls7GFQPUWXiwG9+0P4GtHfEqC/4M0Iw== - -emittery@^0.8.1: - version "0.8.1" - resolved "http://localhost:4873/emittery/-/emittery-0.8.1.tgz#bb23cc86d03b30aa75a7f734819dee2e1ba70860" - integrity sha512-uDfvUjVrfGJJhymx/kz6prltenw1u7WrCg1oa94zYY8xxVpLLUu045LAT0dhDZdXG58/EpPL/5kA180fQ/qudg== - -emoji-regex@^8.0.0: - version "8.0.0" - resolved "http://localhost:4873/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" - integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== - -emoji-regex@^9.2.2: - version "9.2.2" - resolved "http://localhost:4873/emoji-regex/-/emoji-regex-9.2.2.tgz#840c8803b0d8047f4ff0cf963176b32d4ef3ed72" - integrity sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg== - -emojis-list@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/emojis-list/-/emojis-list-3.0.0.tgz#5570662046ad29e2e916e71aae260abdff4f6a78" - integrity sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q== - -encodeurl@~1.0.2: - version "1.0.2" - resolved "http://localhost:4873/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" - integrity sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w== - -enhanced-resolve@^5.10.0: - version "5.10.0" - resolved "http://localhost:4873/enhanced-resolve/-/enhanced-resolve-5.10.0.tgz#0dc579c3bb2a1032e357ac45b8f3a6f3ad4fb1e6" - integrity sha512-T0yTFjdpldGY8PmuXXR0PyQ1ufZpEGiHVrp7zHKB7jdR4qlmZHhONVM5AQOAWXuF/w3dnHbEQVrNptJgt7F+cQ== - dependencies: - graceful-fs "^4.2.4" - tapable "^2.2.0" - -entities@^2.0.0: - version "2.2.0" - resolved "http://localhost:4873/entities/-/entities-2.2.0.tgz#098dc90ebb83d8dffa089d55256b351d34c4da55" - integrity sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A== - -error-ex@^1.3.1: - version "1.3.2" - resolved "http://localhost:4873/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" - integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g== - dependencies: - is-arrayish "^0.2.1" - -error-stack-parser@^2.0.6: - version "2.1.4" - resolved "http://localhost:4873/error-stack-parser/-/error-stack-parser-2.1.4.tgz#229cb01cdbfa84440bfa91876285b94680188286" - integrity sha512-Sk5V6wVazPhq5MhpO+AUxJn5x7XSXGl1R93Vn7i+zS15KDVxQijejNCrz8340/2bgLBjR9GtEG8ZVKONDjcqGQ== - dependencies: - stackframe "^1.3.4" - -es-abstract@^1.17.2, es-abstract@^1.19.0, es-abstract@^1.19.1, es-abstract@^1.19.2, es-abstract@^1.19.5, es-abstract@^1.20.1: - version "1.20.4" - resolved "http://localhost:4873/es-abstract/-/es-abstract-1.20.4.tgz#1d103f9f8d78d4cf0713edcd6d0ed1a46eed5861" - integrity sha512-0UtvRN79eMe2L+UNEF1BwRe364sj/DXhQ/k5FmivgoSdpM90b8Jc0mDzKMGo7QS0BVbOP/bTwBKNnDc9rNzaPA== - dependencies: - call-bind "^1.0.2" - es-to-primitive "^1.2.1" - function-bind "^1.1.1" - function.prototype.name "^1.1.5" - get-intrinsic "^1.1.3" - get-symbol-description "^1.0.0" - has "^1.0.3" - has-property-descriptors "^1.0.0" - has-symbols "^1.0.3" - internal-slot "^1.0.3" - is-callable "^1.2.7" - is-negative-zero "^2.0.2" - is-regex "^1.1.4" - is-shared-array-buffer "^1.0.2" - is-string "^1.0.7" - is-weakref "^1.0.2" - object-inspect "^1.12.2" - object-keys "^1.1.1" - object.assign "^4.1.4" - regexp.prototype.flags "^1.4.3" - safe-regex-test "^1.0.0" - string.prototype.trimend "^1.0.5" - string.prototype.trimstart "^1.0.5" - unbox-primitive "^1.0.2" - -es-array-method-boxes-properly@^1.0.0: - version "1.0.0" - resolved "http://localhost:4873/es-array-method-boxes-properly/-/es-array-method-boxes-properly-1.0.0.tgz#873f3e84418de4ee19c5be752990b2e44718d09e" - integrity sha512-wd6JXUmyHmt8T5a2xreUwKcGPq6f1f+WwIJkijUqiGcJz1qqnZgP6XIK+QyIWU5lT7imeNxUll48bziG+TSYcA== - -es-module-lexer@^0.9.0: - version "0.9.3" - resolved "http://localhost:4873/es-module-lexer/-/es-module-lexer-0.9.3.tgz#6f13db00cc38417137daf74366f535c8eb438f19" - integrity sha512-1HQ2M2sPtxwnvOvT1ZClHyQDiggdNjURWpY2we6aMKCQiUVxTmVs2UYPLIrD84sS+kMdUwfBSylbJPwNnBrnHQ== - -es-shim-unscopables@^1.0.0: - version "1.0.0" - resolved "http://localhost:4873/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz#702e632193201e3edf8713635d083d378e510241" - integrity sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w== - dependencies: - has "^1.0.3" - -es-to-primitive@^1.2.1: - version "1.2.1" - resolved "http://localhost:4873/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a" - integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA== - dependencies: - is-callable "^1.1.4" - is-date-object "^1.0.1" - is-symbol "^1.0.2" - -escalade@^3.1.1: - version "3.1.1" - resolved "http://localhost:4873/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" - integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== - -escape-html@~1.0.3: - version "1.0.3" - resolved "http://localhost:4873/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" - integrity sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow== - -escape-string-regexp@^1.0.5: - version "1.0.5" - resolved "http://localhost:4873/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" - integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg== - -escape-string-regexp@^2.0.0: - version "2.0.0" - resolved "http://localhost:4873/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz#a30304e99daa32e23b2fd20f51babd07cffca344" - integrity sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w== - -escape-string-regexp@^4.0.0: - version "4.0.0" - resolved "http://localhost:4873/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34" - integrity sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA== - -escodegen@^2.0.0: - version "2.0.0" - resolved "http://localhost:4873/escodegen/-/escodegen-2.0.0.tgz#5e32b12833e8aa8fa35e1bf0befa89380484c7dd" - integrity sha512-mmHKys/C8BFUGI+MAWNcSYoORYLMdPzjrknd2Vc+bUsjN5bXcr8EhrNB+UTqfL1y3I9c4fw2ihgtMPQLBRiQxw== - dependencies: - esprima "^4.0.1" - estraverse "^5.2.0" - esutils "^2.0.2" - optionator "^0.8.1" - optionalDependencies: - source-map "~0.6.1" - -eslint-config-react-app@^7.0.1: - version "7.0.1" - resolved "http://localhost:4873/eslint-config-react-app/-/eslint-config-react-app-7.0.1.tgz#73ba3929978001c5c86274c017ea57eb5fa644b4" - integrity sha512-K6rNzvkIeHaTd8m/QEh1Zko0KI7BACWkkneSs6s9cKZC/J27X3eZR6Upt1jkmZ/4FK+XUOPPxMEN7+lbUXfSlA== - dependencies: - "@babel/core" "^7.16.0" - "@babel/eslint-parser" "^7.16.3" - "@rushstack/eslint-patch" "^1.1.0" - "@typescript-eslint/eslint-plugin" "^5.5.0" - "@typescript-eslint/parser" "^5.5.0" - babel-preset-react-app "^10.0.1" - confusing-browser-globals "^1.0.11" - eslint-plugin-flowtype "^8.0.3" - eslint-plugin-import "^2.25.3" - eslint-plugin-jest "^25.3.0" - eslint-plugin-jsx-a11y "^6.5.1" - eslint-plugin-react "^7.27.1" - eslint-plugin-react-hooks "^4.3.0" - eslint-plugin-testing-library "^5.0.1" - -eslint-import-resolver-node@^0.3.6: - version "0.3.6" - resolved "http://localhost:4873/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.6.tgz#4048b958395da89668252001dbd9eca6b83bacbd" - integrity sha512-0En0w03NRVMn9Uiyn8YRPDKvWjxCWkslUEhGNTdGx15RvPJYQ+lbOlqrlNI2vEAs4pDYK4f/HN2TbDmk5TP0iw== - dependencies: - debug "^3.2.7" - resolve "^1.20.0" - -eslint-module-utils@^2.7.3: - version "2.7.4" - resolved "http://localhost:4873/eslint-module-utils/-/eslint-module-utils-2.7.4.tgz#4f3e41116aaf13a20792261e61d3a2e7e0583974" - integrity sha512-j4GT+rqzCoRKHwURX7pddtIPGySnX9Si/cgMI5ztrcqOPtk5dDEeZ34CQVPphnqkJytlc97Vuk05Um2mJ3gEQA== - dependencies: - debug "^3.2.7" - -eslint-plugin-flowtype@^8.0.3: - version "8.0.3" - resolved "http://localhost:4873/eslint-plugin-flowtype/-/eslint-plugin-flowtype-8.0.3.tgz#e1557e37118f24734aa3122e7536a038d34a4912" - integrity sha512-dX8l6qUL6O+fYPtpNRideCFSpmWOUVx5QcaGLVqe/vlDiBSe4vYljDWDETwnyFzpl7By/WVIu6rcrniCgH9BqQ== - dependencies: - lodash "^4.17.21" - string-natural-compare "^3.0.1" - -eslint-plugin-import@^2.25.3: - version "2.26.0" - resolved "http://localhost:4873/eslint-plugin-import/-/eslint-plugin-import-2.26.0.tgz#f812dc47be4f2b72b478a021605a59fc6fe8b88b" - integrity sha512-hYfi3FXaM8WPLf4S1cikh/r4IxnO6zrhZbEGz2b660EJRbuxgpDS5gkCuYgGWg2xxh2rBuIr4Pvhve/7c31koA== - dependencies: - array-includes "^3.1.4" - array.prototype.flat "^1.2.5" - debug "^2.6.9" - doctrine "^2.1.0" - eslint-import-resolver-node "^0.3.6" - eslint-module-utils "^2.7.3" - has "^1.0.3" - is-core-module "^2.8.1" - is-glob "^4.0.3" - minimatch "^3.1.2" - object.values "^1.1.5" - resolve "^1.22.0" - tsconfig-paths "^3.14.1" - -eslint-plugin-jest@^25.3.0: - version "25.7.0" - resolved "http://localhost:4873/eslint-plugin-jest/-/eslint-plugin-jest-25.7.0.tgz#ff4ac97520b53a96187bad9c9814e7d00de09a6a" - integrity sha512-PWLUEXeeF7C9QGKqvdSbzLOiLTx+bno7/HC9eefePfEb257QFHg7ye3dh80AZVkaa/RQsBB1Q/ORQvg2X7F0NQ== - dependencies: - "@typescript-eslint/experimental-utils" "^5.0.0" - -eslint-plugin-jsx-a11y@^6.5.1: - version "6.6.1" - resolved "http://localhost:4873/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.6.1.tgz#93736fc91b83fdc38cc8d115deedfc3091aef1ff" - integrity sha512-sXgFVNHiWffBq23uiS/JaP6eVR622DqwB4yTzKvGZGcPq6/yZ3WmOZfuBks/vHWo9GaFOqC2ZK4i6+C35knx7Q== - dependencies: - "@babel/runtime" "^7.18.9" - aria-query "^4.2.2" - array-includes "^3.1.5" - ast-types-flow "^0.0.7" - axe-core "^4.4.3" - axobject-query "^2.2.0" - damerau-levenshtein "^1.0.8" - emoji-regex "^9.2.2" - has "^1.0.3" - jsx-ast-utils "^3.3.2" - language-tags "^1.0.5" - minimatch "^3.1.2" - semver "^6.3.0" - -eslint-plugin-react-hooks@^4.3.0: - version "4.6.0" - resolved "http://localhost:4873/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-4.6.0.tgz#4c3e697ad95b77e93f8646aaa1630c1ba607edd3" - integrity sha512-oFc7Itz9Qxh2x4gNHStv3BqJq54ExXmfC+a1NjAta66IAN87Wu0R/QArgIS9qKzX3dXKPI9H5crl9QchNMY9+g== - -eslint-plugin-react@^7.27.1: - version "7.31.8" - resolved "http://localhost:4873/eslint-plugin-react/-/eslint-plugin-react-7.31.8.tgz#3a4f80c10be1bcbc8197be9e8b641b2a3ef219bf" - integrity sha512-5lBTZmgQmARLLSYiwI71tiGVTLUuqXantZM6vlSY39OaDSV0M7+32K5DnLkmFrwTe+Ksz0ffuLUC91RUviVZfw== - dependencies: - array-includes "^3.1.5" - array.prototype.flatmap "^1.3.0" - doctrine "^2.1.0" - estraverse "^5.3.0" - jsx-ast-utils "^2.4.1 || ^3.0.0" - minimatch "^3.1.2" - object.entries "^1.1.5" - object.fromentries "^2.0.5" - object.hasown "^1.1.1" - object.values "^1.1.5" - prop-types "^15.8.1" - resolve "^2.0.0-next.3" - semver "^6.3.0" - string.prototype.matchall "^4.0.7" - -eslint-plugin-testing-library@^5.0.1: - version "5.7.2" - resolved "http://localhost:4873/eslint-plugin-testing-library/-/eslint-plugin-testing-library-5.7.2.tgz#c1b2112a40aab61f93e10859e8b2d81e54f0ce84" - integrity sha512-0ZmHeR/DUUgEzW8rwUBRWxuqntipDtpvxK0hymdHnLlABryJkzd+CAHr+XnISaVsTisZ5MLHp6nQF+8COHLLTA== - dependencies: - "@typescript-eslint/utils" "^5.13.0" - -eslint-scope@5.1.1, eslint-scope@^5.1.1: - version "5.1.1" - resolved "http://localhost:4873/eslint-scope/-/eslint-scope-5.1.1.tgz#e786e59a66cb92b3f6c1fb0d508aab174848f48c" - integrity sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw== - dependencies: - esrecurse "^4.3.0" - estraverse "^4.1.1" - -eslint-scope@^7.1.1: - version "7.1.1" - resolved "http://localhost:4873/eslint-scope/-/eslint-scope-7.1.1.tgz#fff34894c2f65e5226d3041ac480b4513a163642" - integrity sha512-QKQM/UXpIiHcLqJ5AOyIW7XZmzjkzQXYE54n1++wb0u9V/abW3l9uQnxX8Z5Xd18xyKIMTUAyQ0k1e8pz6LUrw== - dependencies: - esrecurse "^4.3.0" - estraverse "^5.2.0" - -eslint-utils@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/eslint-utils/-/eslint-utils-3.0.0.tgz#8aebaface7345bb33559db0a1f13a1d2d48c3672" - integrity sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA== - dependencies: - eslint-visitor-keys "^2.0.0" - -eslint-visitor-keys@^2.0.0, eslint-visitor-keys@^2.1.0: - version "2.1.0" - resolved "http://localhost:4873/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz#f65328259305927392c938ed44eb0a5c9b2bd303" - integrity sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw== - -eslint-visitor-keys@^3.3.0: - version "3.3.0" - resolved "http://localhost:4873/eslint-visitor-keys/-/eslint-visitor-keys-3.3.0.tgz#f6480fa6b1f30efe2d1968aa8ac745b862469826" - integrity sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA== - -eslint-webpack-plugin@^3.1.1: - version "3.2.0" - resolved "http://localhost:4873/eslint-webpack-plugin/-/eslint-webpack-plugin-3.2.0.tgz#1978cdb9edc461e4b0195a20da950cf57988347c" - integrity sha512-avrKcGncpPbPSUHX6B3stNGzkKFto3eL+DKM4+VyMrVnhPc3vRczVlCq3uhuFOdRvDHTVXuzwk1ZKUrqDQHQ9w== - dependencies: - "@types/eslint" "^7.29.0 || ^8.4.1" - jest-worker "^28.0.2" - micromatch "^4.0.5" - normalize-path "^3.0.0" - schema-utils "^4.0.0" - -eslint@^8.3.0: - version "8.24.0" - resolved "http://localhost:4873/eslint/-/eslint-8.24.0.tgz#489516c927a5da11b3979dbfb2679394523383c8" - integrity sha512-dWFaPhGhTAiPcCgm3f6LI2MBWbogMnTJzFBbhXVRQDJPkr9pGZvVjlVfXd+vyDcWPA2Ic9L2AXPIQM0+vk/cSQ== - dependencies: - "@eslint/eslintrc" "^1.3.2" - "@humanwhocodes/config-array" "^0.10.5" - "@humanwhocodes/gitignore-to-minimatch" "^1.0.2" - "@humanwhocodes/module-importer" "^1.0.1" - ajv "^6.10.0" - chalk "^4.0.0" - cross-spawn "^7.0.2" - debug "^4.3.2" - doctrine "^3.0.0" - escape-string-regexp "^4.0.0" - eslint-scope "^7.1.1" - eslint-utils "^3.0.0" - eslint-visitor-keys "^3.3.0" - espree "^9.4.0" - esquery "^1.4.0" - esutils "^2.0.2" - fast-deep-equal "^3.1.3" - file-entry-cache "^6.0.1" - find-up "^5.0.0" - glob-parent "^6.0.1" - globals "^13.15.0" - globby "^11.1.0" - grapheme-splitter "^1.0.4" - ignore "^5.2.0" - import-fresh "^3.0.0" - imurmurhash "^0.1.4" - is-glob "^4.0.0" - js-sdsl "^4.1.4" - js-yaml "^4.1.0" - json-stable-stringify-without-jsonify "^1.0.1" - levn "^0.4.1" - lodash.merge "^4.6.2" - minimatch "^3.1.2" - natural-compare "^1.4.0" - optionator "^0.9.1" - regexpp "^3.2.0" - strip-ansi "^6.0.1" - strip-json-comments "^3.1.0" - text-table "^0.2.0" - -espree@^9.4.0: - version "9.4.0" - resolved "http://localhost:4873/espree/-/espree-9.4.0.tgz#cd4bc3d6e9336c433265fc0aa016fc1aaf182f8a" - integrity sha512-DQmnRpLj7f6TgN/NYb0MTzJXL+vJF9h3pHy4JhCIs3zwcgez8xmGg3sXHcEO97BrmO2OSvCwMdfdlyl+E9KjOw== - dependencies: - acorn "^8.8.0" - acorn-jsx "^5.3.2" - eslint-visitor-keys "^3.3.0" - -esprima@^4.0.0, esprima@^4.0.1: - version "4.0.1" - resolved "http://localhost:4873/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" - integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== - -esquery@^1.4.0: - version "1.4.0" - resolved "http://localhost:4873/esquery/-/esquery-1.4.0.tgz#2148ffc38b82e8c7057dfed48425b3e61f0f24a5" - integrity sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w== - dependencies: - estraverse "^5.1.0" - -esrecurse@^4.3.0: - version "4.3.0" - resolved "http://localhost:4873/esrecurse/-/esrecurse-4.3.0.tgz#7ad7964d679abb28bee72cec63758b1c5d2c9921" - integrity sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag== - dependencies: - estraverse "^5.2.0" - -estraverse@^4.1.1: - version "4.3.0" - resolved "http://localhost:4873/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" - integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== - -estraverse@^5.1.0, estraverse@^5.2.0, estraverse@^5.3.0: - version "5.3.0" - resolved "http://localhost:4873/estraverse/-/estraverse-5.3.0.tgz#2eea5290702f26ab8fe5370370ff86c965d21123" - integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA== - -estree-walker@^1.0.1: - version "1.0.1" - resolved "http://localhost:4873/estree-walker/-/estree-walker-1.0.1.tgz#31bc5d612c96b704106b477e6dd5d8aa138cb700" - integrity sha512-1fMXF3YP4pZZVozF8j/ZLfvnR8NSIljt56UhbZ5PeeDmmGHpgpdwQt7ITlGvYaQukCvuBRMLEiKiYC+oeIg4cg== - -esutils@^2.0.2: - version "2.0.3" - resolved "http://localhost:4873/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" - integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== - -etag@~1.8.1: - version "1.8.1" - resolved "http://localhost:4873/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887" - integrity sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg== - -eventemitter3@^4.0.0: - version "4.0.7" - resolved "http://localhost:4873/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f" - integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw== - -events@^3.2.0: - version "3.3.0" - resolved "http://localhost:4873/events/-/events-3.3.0.tgz#31a95ad0a924e2d2c419a813aeb2c4e878ea7400" - integrity sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q== - -execa@^5.0.0: - version "5.1.1" - resolved "http://localhost:4873/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd" - integrity sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg== - dependencies: - cross-spawn "^7.0.3" - get-stream "^6.0.0" - human-signals "^2.1.0" - is-stream "^2.0.0" - merge-stream "^2.0.0" - npm-run-path "^4.0.1" - onetime "^5.1.2" - signal-exit "^3.0.3" - strip-final-newline "^2.0.0" - -exit@^0.1.2: - version "0.1.2" - resolved "http://localhost:4873/exit/-/exit-0.1.2.tgz#0632638f8d877cc82107d30a0fff1a17cba1cd0c" - integrity sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ== - -expect@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/expect/-/expect-27.5.1.tgz#83ce59f1e5bdf5f9d2b94b61d2050db48f3fef74" - integrity sha512-E1q5hSUG2AmYQwQJ041nvgpkODHQvB+RKlB4IYdru6uJsyFTRyZAP463M+1lINorwbqAmUggi6+WwkD8lCS/Dw== - dependencies: - "@jest/types" "^27.5.1" - jest-get-type "^27.5.1" - jest-matcher-utils "^27.5.1" - jest-message-util "^27.5.1" - -expect@^29.0.0: - version "29.1.2" - resolved "http://localhost:4873/expect/-/expect-29.1.2.tgz#82f8f28d7d408c7c68da3a386a490ee683e1eced" - integrity sha512-AuAGn1uxva5YBbBlXb+2JPxJRuemZsmlGcapPXWNSBNsQtAULfjioREGBWuI0EOvYUKjDnrCy8PW5Zlr1md5mw== - dependencies: - "@jest/expect-utils" "^29.1.2" - jest-get-type "^29.0.0" - jest-matcher-utils "^29.1.2" - jest-message-util "^29.1.2" - jest-util "^29.1.2" - -express@^4.17.3: - version "4.18.1" - resolved "http://localhost:4873/express/-/express-4.18.1.tgz#7797de8b9c72c857b9cd0e14a5eea80666267caf" - integrity sha512-zZBcOX9TfehHQhtupq57OF8lFZ3UZi08Y97dwFCkD8p9d/d2Y3M+ykKcwaMDEL+4qyUolgBDX6AblpR3fL212Q== - dependencies: - accepts "~1.3.8" - array-flatten "1.1.1" - body-parser "1.20.0" - content-disposition "0.5.4" - content-type "~1.0.4" - cookie "0.5.0" - cookie-signature "1.0.6" - debug "2.6.9" - depd "2.0.0" - encodeurl "~1.0.2" - escape-html "~1.0.3" - etag "~1.8.1" - finalhandler "1.2.0" - fresh "0.5.2" - http-errors "2.0.0" - merge-descriptors "1.0.1" - methods "~1.1.2" - on-finished "2.4.1" - parseurl "~1.3.3" - path-to-regexp "0.1.7" - proxy-addr "~2.0.7" - qs "6.10.3" - range-parser "~1.2.1" - safe-buffer "5.2.1" - send "0.18.0" - serve-static "1.15.0" - setprototypeof "1.2.0" - statuses "2.0.1" - type-is "~1.6.18" - utils-merge "1.0.1" - vary "~1.1.2" - -fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: - version "3.1.3" - resolved "http://localhost:4873/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" - integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== - -fast-glob@^3.2.11, fast-glob@^3.2.9: - version "3.2.12" - resolved "http://localhost:4873/fast-glob/-/fast-glob-3.2.12.tgz#7f39ec99c2e6ab030337142da9e0c18f37afae80" - integrity sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w== - dependencies: - "@nodelib/fs.stat" "^2.0.2" - "@nodelib/fs.walk" "^1.2.3" - glob-parent "^5.1.2" - merge2 "^1.3.0" - micromatch "^4.0.4" - -fast-json-stable-stringify@^2.0.0, fast-json-stable-stringify@^2.1.0: - version "2.1.0" - resolved "http://localhost:4873/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" - integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== - -fast-levenshtein@^2.0.6, fast-levenshtein@~2.0.6: - version "2.0.6" - resolved "http://localhost:4873/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" - integrity sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw== - -fastq@^1.6.0: - version "1.13.0" - resolved "http://localhost:4873/fastq/-/fastq-1.13.0.tgz#616760f88a7526bdfc596b7cab8c18938c36b98c" - integrity sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw== - dependencies: - reusify "^1.0.4" - -faye-websocket@^0.11.3: - version "0.11.4" - resolved "http://localhost:4873/faye-websocket/-/faye-websocket-0.11.4.tgz#7f0d9275cfdd86a1c963dc8b65fcc451edcbb1da" - integrity sha512-CzbClwlXAuiRQAlUyfqPgvPoNKTckTPGfwZV4ZdAhVcP2lh9KUxJg2b5GkE7XbjKQ3YJnQ9z6D9ntLAlB+tP8g== - dependencies: - websocket-driver ">=0.5.1" - -fb-watchman@^2.0.0: - version "2.0.2" - resolved "http://localhost:4873/fb-watchman/-/fb-watchman-2.0.2.tgz#e9524ee6b5c77e9e5001af0f85f3adbb8623255c" - integrity sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA== - dependencies: - bser "2.1.1" - -file-entry-cache@^6.0.1: - version "6.0.1" - resolved "http://localhost:4873/file-entry-cache/-/file-entry-cache-6.0.1.tgz#211b2dd9659cb0394b073e7323ac3c933d522027" - integrity sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg== - dependencies: - flat-cache "^3.0.4" - -file-loader@^6.2.0: - version "6.2.0" - resolved "http://localhost:4873/file-loader/-/file-loader-6.2.0.tgz#baef7cf8e1840df325e4390b4484879480eebe4d" - integrity sha512-qo3glqyTa61Ytg4u73GultjHGjdRyig3tG6lPtyX/jOEJvHif9uB0/OCI2Kif6ctF3caQTW2G5gym21oAsI4pw== - dependencies: - loader-utils "^2.0.0" - schema-utils "^3.0.0" - -filelist@^1.0.1: - version "1.0.4" - resolved "http://localhost:4873/filelist/-/filelist-1.0.4.tgz#f78978a1e944775ff9e62e744424f215e58352b5" - integrity sha512-w1cEuf3S+DrLCQL7ET6kz+gmlJdbq9J7yXCSjK/OZCPA+qEN1WyF4ZAf0YYJa4/shHJra2t/d/r8SV4Ji+x+8Q== - dependencies: - minimatch "^5.0.1" - -filesize@^8.0.6: - version "8.0.7" - resolved "http://localhost:4873/filesize/-/filesize-8.0.7.tgz#695e70d80f4e47012c132d57a059e80c6b580bd8" - integrity sha512-pjmC+bkIF8XI7fWaH8KxHcZL3DPybs1roSKP4rKDvy20tAWwIObE4+JIseG2byfGKhud5ZnM4YSGKBz7Sh0ndQ== - -fill-range@^7.0.1: - version "7.0.1" - resolved "http://localhost:4873/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" - integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== - dependencies: - to-regex-range "^5.0.1" - -finalhandler@1.2.0: - version "1.2.0" - resolved "http://localhost:4873/finalhandler/-/finalhandler-1.2.0.tgz#7d23fe5731b207b4640e4fcd00aec1f9207a7b32" - integrity sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg== - dependencies: - debug "2.6.9" - encodeurl "~1.0.2" - escape-html "~1.0.3" - on-finished "2.4.1" - parseurl "~1.3.3" - statuses "2.0.1" - unpipe "~1.0.0" - -find-cache-dir@^3.3.1: - version "3.3.2" - resolved "http://localhost:4873/find-cache-dir/-/find-cache-dir-3.3.2.tgz#b30c5b6eff0730731aea9bbd9dbecbd80256d64b" - integrity sha512-wXZV5emFEjrridIgED11OoUKLxiYjAcqot/NJdAkOhlJ+vGzwhOAfcG5OX1jP+S0PcjEn8bdMJv+g2jwQ3Onig== - dependencies: - commondir "^1.0.1" - make-dir "^3.0.2" - pkg-dir "^4.1.0" - -find-up@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73" - integrity sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg== - dependencies: - locate-path "^3.0.0" - -find-up@^4.0.0, find-up@^4.1.0: - version "4.1.0" - resolved "http://localhost:4873/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19" - integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== - dependencies: - locate-path "^5.0.0" - path-exists "^4.0.0" - -find-up@^5.0.0: - version "5.0.0" - resolved "http://localhost:4873/find-up/-/find-up-5.0.0.tgz#4c92819ecb7083561e4f4a240a86be5198f536fc" - integrity sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng== - dependencies: - locate-path "^6.0.0" - path-exists "^4.0.0" - -flat-cache@^3.0.4: - version "3.0.4" - resolved "http://localhost:4873/flat-cache/-/flat-cache-3.0.4.tgz#61b0338302b2fe9f957dcc32fc2a87f1c3048b11" - integrity sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg== - dependencies: - flatted "^3.1.0" - rimraf "^3.0.2" - -flatted@^3.1.0: - version "3.2.7" - resolved "http://localhost:4873/flatted/-/flatted-3.2.7.tgz#609f39207cb614b89d0765b477cb2d437fbf9787" - integrity sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ== - -follow-redirects@^1.0.0: - version "1.15.2" - resolved "http://localhost:4873/follow-redirects/-/follow-redirects-1.15.2.tgz#b460864144ba63f2681096f274c4e57026da2c13" - integrity sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA== - -fork-ts-checker-webpack-plugin@^6.5.0: - version "6.5.2" - resolved "http://localhost:4873/fork-ts-checker-webpack-plugin/-/fork-ts-checker-webpack-plugin-6.5.2.tgz#4f67183f2f9eb8ba7df7177ce3cf3e75cdafb340" - integrity sha512-m5cUmF30xkZ7h4tWUgTAcEaKmUW7tfyUyTqNNOz7OxWJ0v1VWKTcOvH8FWHUwSjlW/356Ijc9vi3XfcPstpQKA== - dependencies: - "@babel/code-frame" "^7.8.3" - "@types/json-schema" "^7.0.5" - chalk "^4.1.0" - chokidar "^3.4.2" - cosmiconfig "^6.0.0" - deepmerge "^4.2.2" - fs-extra "^9.0.0" - glob "^7.1.6" - memfs "^3.1.2" - minimatch "^3.0.4" - schema-utils "2.7.0" - semver "^7.3.2" - tapable "^1.0.0" - -form-data@^3.0.0: - version "3.0.1" - resolved "http://localhost:4873/form-data/-/form-data-3.0.1.tgz#ebd53791b78356a99af9a300d4282c4d5eb9755f" - integrity sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg== - dependencies: - asynckit "^0.4.0" - combined-stream "^1.0.8" - mime-types "^2.1.12" - -forwarded@0.2.0: - version "0.2.0" - resolved "http://localhost:4873/forwarded/-/forwarded-0.2.0.tgz#2269936428aad4c15c7ebe9779a84bf0b2a81811" - integrity sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow== - -fraction.js@^4.2.0: - version "4.2.0" - resolved "http://localhost:4873/fraction.js/-/fraction.js-4.2.0.tgz#448e5109a313a3527f5a3ab2119ec4cf0e0e2950" - integrity sha512-MhLuK+2gUcnZe8ZHlaaINnQLl0xRIGRfcGk2yl8xoQAfHrSsL3rYu6FCmBdkdbhc9EPlwyGHewaRsvwRMJtAlA== - -fresh@0.5.2: - version "0.5.2" - resolved "http://localhost:4873/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7" - integrity sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q== - -fs-extra@^10.0.0: - version "10.1.0" - resolved "http://localhost:4873/fs-extra/-/fs-extra-10.1.0.tgz#02873cfbc4084dde127eaa5f9905eef2325d1abf" - integrity sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ== - dependencies: - graceful-fs "^4.2.0" - jsonfile "^6.0.1" - universalify "^2.0.0" - -fs-extra@^9.0.0, fs-extra@^9.0.1: - version "9.1.0" - resolved "http://localhost:4873/fs-extra/-/fs-extra-9.1.0.tgz#5954460c764a8da2094ba3554bf839e6b9a7c86d" - integrity sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ== - dependencies: - at-least-node "^1.0.0" - graceful-fs "^4.2.0" - jsonfile "^6.0.1" - universalify "^2.0.0" - -fs-monkey@^1.0.3: - version "1.0.3" - resolved "http://localhost:4873/fs-monkey/-/fs-monkey-1.0.3.tgz#ae3ac92d53bb328efe0e9a1d9541f6ad8d48e2d3" - integrity sha512-cybjIfiiE+pTWicSCLFHSrXZ6EilF30oh91FDP9S2B051prEa7QWfrVTQm10/dDpswBDXZugPa1Ogu8Yh+HV0Q== - -fs.realpath@^1.0.0: - version "1.0.0" - resolved "http://localhost:4873/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" - integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw== - -fsevents@^2.3.2, fsevents@~2.3.2: - version "2.3.2" - resolved "http://localhost:4873/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a" - integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA== - -function-bind@^1.1.1: - version "1.1.1" - resolved "http://localhost:4873/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" - integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== - -function.prototype.name@^1.1.5: - version "1.1.5" - resolved "http://localhost:4873/function.prototype.name/-/function.prototype.name-1.1.5.tgz#cce0505fe1ffb80503e6f9e46cc64e46a12a9621" - integrity sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.3" - es-abstract "^1.19.0" - functions-have-names "^1.2.2" - -functions-have-names@^1.2.2: - version "1.2.3" - resolved "http://localhost:4873/functions-have-names/-/functions-have-names-1.2.3.tgz#0404fe4ee2ba2f607f0e0ec3c80bae994133b834" - integrity sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ== - -gensync@^1.0.0-beta.2: - version "1.0.0-beta.2" - resolved "http://localhost:4873/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0" - integrity sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg== - -get-caller-file@^2.0.5: - version "2.0.5" - resolved "http://localhost:4873/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" - integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== - -get-intrinsic@^1.0.2, get-intrinsic@^1.1.0, get-intrinsic@^1.1.1, get-intrinsic@^1.1.3: - version "1.1.3" - resolved "http://localhost:4873/get-intrinsic/-/get-intrinsic-1.1.3.tgz#063c84329ad93e83893c7f4f243ef63ffa351385" - integrity sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A== - dependencies: - function-bind "^1.1.1" - has "^1.0.3" - has-symbols "^1.0.3" - -get-own-enumerable-property-symbols@^3.0.0: - version "3.0.2" - resolved "http://localhost:4873/get-own-enumerable-property-symbols/-/get-own-enumerable-property-symbols-3.0.2.tgz#b5fde77f22cbe35f390b4e089922c50bce6ef664" - integrity sha512-I0UBV/XOz1XkIJHEUDMZAbzCThU/H8DxmSfmdGcKPnVhu2VfFqr34jr9777IyaTYvxjedWhqVIilEDsCdP5G6g== - -get-package-type@^0.1.0: - version "0.1.0" - resolved "http://localhost:4873/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a" - integrity sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q== - -get-stream@^6.0.0: - version "6.0.1" - resolved "http://localhost:4873/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7" - integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg== - -get-symbol-description@^1.0.0: - version "1.0.0" - resolved "http://localhost:4873/get-symbol-description/-/get-symbol-description-1.0.0.tgz#7fdb81c900101fbd564dd5f1a30af5aadc1e58d6" - integrity sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw== - dependencies: - call-bind "^1.0.2" - get-intrinsic "^1.1.1" - -glob-parent@^5.1.2, glob-parent@~5.1.2: - version "5.1.2" - resolved "http://localhost:4873/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" - integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== - dependencies: - is-glob "^4.0.1" - -glob-parent@^6.0.1, glob-parent@^6.0.2: - version "6.0.2" - resolved "http://localhost:4873/glob-parent/-/glob-parent-6.0.2.tgz#6d237d99083950c79290f24c7642a3de9a28f9e3" - integrity sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A== - dependencies: - is-glob "^4.0.3" - -glob-to-regexp@^0.4.1: - version "0.4.1" - resolved "http://localhost:4873/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz#c75297087c851b9a578bd217dd59a92f59fe546e" - integrity sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw== - -glob@^7.1.1, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4, glob@^7.1.6: - version "7.2.3" - resolved "http://localhost:4873/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b" - integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q== - dependencies: - fs.realpath "^1.0.0" - inflight "^1.0.4" - inherits "2" - minimatch "^3.1.1" - once "^1.3.0" - path-is-absolute "^1.0.0" - -global-modules@^2.0.0: - version "2.0.0" - resolved "http://localhost:4873/global-modules/-/global-modules-2.0.0.tgz#997605ad2345f27f51539bea26574421215c7780" - integrity sha512-NGbfmJBp9x8IxyJSd1P+otYK8vonoJactOogrVfFRIAEY1ukil8RSKDz2Yo7wh1oihl51l/r6W4epkeKJHqL8A== - dependencies: - global-prefix "^3.0.0" - -global-prefix@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/global-prefix/-/global-prefix-3.0.0.tgz#fc85f73064df69f50421f47f883fe5b913ba9b97" - integrity sha512-awConJSVCHVGND6x3tmMaKcQvwXLhjdkmomy2W+Goaui8YPgYgXJZewhg3fWC+DlfqqQuWg8AwqjGTD2nAPVWg== - dependencies: - ini "^1.3.5" - kind-of "^6.0.2" - which "^1.3.1" - -globals@^11.1.0: - version "11.12.0" - resolved "http://localhost:4873/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" - integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== - -globals@^13.15.0: - version "13.17.0" - resolved "http://localhost:4873/globals/-/globals-13.17.0.tgz#902eb1e680a41da93945adbdcb5a9f361ba69bd4" - integrity sha512-1C+6nQRb1GwGMKm2dH/E7enFAMxGTmGI7/dEdhy/DNelv85w9B72t3uc5frtMNXIbzrarJJ/lTCjcaZwbLJmyw== - dependencies: - type-fest "^0.20.2" - -globby@^11.0.4, globby@^11.1.0: - version "11.1.0" - resolved "http://localhost:4873/globby/-/globby-11.1.0.tgz#bd4be98bb042f83d796f7e3811991fbe82a0d34b" - integrity sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g== - dependencies: - array-union "^2.1.0" - dir-glob "^3.0.1" - fast-glob "^3.2.9" - ignore "^5.2.0" - merge2 "^1.4.1" - slash "^3.0.0" - -graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.4, graceful-fs@^4.2.6, graceful-fs@^4.2.9: - version "4.2.10" - resolved "http://localhost:4873/graceful-fs/-/graceful-fs-4.2.10.tgz#147d3a006da4ca3ce14728c7aefc287c367d7a6c" - integrity sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA== - -grapheme-splitter@^1.0.4: - version "1.0.4" - resolved "http://localhost:4873/grapheme-splitter/-/grapheme-splitter-1.0.4.tgz#9cf3a665c6247479896834af35cf1dbb4400767e" - integrity sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ== - -gzip-size@^6.0.0: - version "6.0.0" - resolved "http://localhost:4873/gzip-size/-/gzip-size-6.0.0.tgz#065367fd50c239c0671cbcbad5be3e2eeb10e462" - integrity sha512-ax7ZYomf6jqPTQ4+XCpUGyXKHk5WweS+e05MBO4/y3WJ5RkmPXNKvX+bx1behVILVwr6JSQvZAku021CHPXG3Q== - dependencies: - duplexer "^0.1.2" - -handle-thing@^2.0.0: - version "2.0.1" - resolved "http://localhost:4873/handle-thing/-/handle-thing-2.0.1.tgz#857f79ce359580c340d43081cc648970d0bb234e" - integrity sha512-9Qn4yBxelxoh2Ow62nP+Ka/kMnOXRi8BXnRaUwezLNhqelnN49xKz4F/dPP8OYLxLxq6JDtZb2i9XznUQbNPTg== - -harmony-reflect@^1.4.6: - version "1.6.2" - resolved "http://localhost:4873/harmony-reflect/-/harmony-reflect-1.6.2.tgz#31ecbd32e648a34d030d86adb67d4d47547fe710" - integrity sha512-HIp/n38R9kQjDEziXyDTuW3vvoxxyxjxFzXLrBr18uB47GnSt+G9D29fqrpM5ZkspMcPICud3XsBJQ4Y2URg8g== - -has-bigints@^1.0.1, has-bigints@^1.0.2: - version "1.0.2" - resolved "http://localhost:4873/has-bigints/-/has-bigints-1.0.2.tgz#0871bd3e3d51626f6ca0966668ba35d5602d6eaa" - integrity sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ== - -has-flag@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" - integrity sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw== - -has-flag@^4.0.0: - version "4.0.0" - resolved "http://localhost:4873/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" - integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== - -has-property-descriptors@^1.0.0: - version "1.0.0" - resolved "http://localhost:4873/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz#610708600606d36961ed04c196193b6a607fa861" - integrity sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ== - dependencies: - get-intrinsic "^1.1.1" - -has-symbols@^1.0.1, has-symbols@^1.0.2, has-symbols@^1.0.3: - version "1.0.3" - resolved "http://localhost:4873/has-symbols/-/has-symbols-1.0.3.tgz#bb7b2c4349251dce87b125f7bdf874aa7c8b39f8" - integrity sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A== - -has-tostringtag@^1.0.0: - version "1.0.0" - resolved "http://localhost:4873/has-tostringtag/-/has-tostringtag-1.0.0.tgz#7e133818a7d394734f941e73c3d3f9291e658b25" - integrity sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ== - dependencies: - has-symbols "^1.0.2" - -has@^1.0.3: - version "1.0.3" - resolved "http://localhost:4873/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" - integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== - dependencies: - function-bind "^1.1.1" - -he@^1.2.0: - version "1.2.0" - resolved "http://localhost:4873/he/-/he-1.2.0.tgz#84ae65fa7eafb165fddb61566ae14baf05664f0f" - integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw== - -hoopy@^0.1.4: - version "0.1.4" - resolved "http://localhost:4873/hoopy/-/hoopy-0.1.4.tgz#609207d661100033a9a9402ad3dea677381c1b1d" - integrity sha512-HRcs+2mr52W0K+x8RzcLzuPPmVIKMSv97RGHy0Ea9y/mpcaK+xTrjICA04KAHi4GRzxliNqNJEFYWHghy3rSfQ== - -hpack.js@^2.1.6: - version "2.1.6" - resolved "http://localhost:4873/hpack.js/-/hpack.js-2.1.6.tgz#87774c0949e513f42e84575b3c45681fade2a0b2" - integrity sha512-zJxVehUdMGIKsRaNt7apO2Gqp0BdqW5yaiGHXXmbpvxgBYVZnAql+BJb4RO5ad2MgpbZKn5G6nMnegrH1FcNYQ== - dependencies: - inherits "^2.0.1" - obuf "^1.0.0" - readable-stream "^2.0.1" - wbuf "^1.1.0" - -html-encoding-sniffer@^2.0.1: - version "2.0.1" - resolved "http://localhost:4873/html-encoding-sniffer/-/html-encoding-sniffer-2.0.1.tgz#42a6dc4fd33f00281176e8b23759ca4e4fa185f3" - integrity sha512-D5JbOMBIR/TVZkubHT+OyT2705QvogUW4IBn6nHd756OwieSF9aDYFj4dv6HHEVGYbHaLETa3WggZYWWMyy3ZQ== - dependencies: - whatwg-encoding "^1.0.5" - -html-entities@^2.1.0, html-entities@^2.3.2: - version "2.3.3" - resolved "http://localhost:4873/html-entities/-/html-entities-2.3.3.tgz#117d7626bece327fc8baace8868fa6f5ef856e46" - integrity sha512-DV5Ln36z34NNTDgnz0EWGBLZENelNAtkiFA4kyNOG2tDI6Mz1uSWiq1wAKdyjnJwyDiDO7Fa2SO1CTxPXL8VxA== - -html-escaper@^2.0.0: - version "2.0.2" - resolved "http://localhost:4873/html-escaper/-/html-escaper-2.0.2.tgz#dfd60027da36a36dfcbe236262c00a5822681453" - integrity sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg== - -html-minifier-terser@^6.0.2: - version "6.1.0" - resolved "http://localhost:4873/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz#bfc818934cc07918f6b3669f5774ecdfd48f32ab" - integrity sha512-YXxSlJBZTP7RS3tWnQw74ooKa6L9b9i9QYXY21eUEvhZ3u9XLfv6OnFsQq6RxkhHygsaUMvYsZRV5rU/OVNZxw== - dependencies: - camel-case "^4.1.2" - clean-css "^5.2.2" - commander "^8.3.0" - he "^1.2.0" - param-case "^3.0.4" - relateurl "^0.2.7" - terser "^5.10.0" - -html-webpack-plugin@^5.5.0: - version "5.5.0" - resolved "http://localhost:4873/html-webpack-plugin/-/html-webpack-plugin-5.5.0.tgz#c3911936f57681c1f9f4d8b68c158cd9dfe52f50" - integrity sha512-sy88PC2cRTVxvETRgUHFrL4No3UxvcH8G1NepGhqaTT+GXN2kTamqasot0inS5hXeg1cMbFDt27zzo9p35lZVw== - dependencies: - "@types/html-minifier-terser" "^6.0.0" - html-minifier-terser "^6.0.2" - lodash "^4.17.21" - pretty-error "^4.0.0" - tapable "^2.0.0" - -htmlparser2@^6.1.0: - version "6.1.0" - resolved "http://localhost:4873/htmlparser2/-/htmlparser2-6.1.0.tgz#c4d762b6c3371a05dbe65e94ae43a9f845fb8fb7" - integrity sha512-gyyPk6rgonLFEDGoeRgQNaEUvdJ4ktTmmUh/h2t7s+M8oPpIPxgNACWa+6ESR57kXstwqPiCut0V8NRpcwgU7A== - dependencies: - domelementtype "^2.0.1" - domhandler "^4.0.0" - domutils "^2.5.2" - entities "^2.0.0" - -http-deceiver@^1.2.7: - version "1.2.7" - resolved "http://localhost:4873/http-deceiver/-/http-deceiver-1.2.7.tgz#fa7168944ab9a519d337cb0bec7284dc3e723d87" - integrity sha512-LmpOGxTfbpgtGVxJrj5k7asXHCgNZp5nLfp+hWc8QQRqtb7fUy6kRY3BO1h9ddF6yIPYUARgxGOwB42DnxIaNw== - -http-errors@2.0.0: - version "2.0.0" - resolved "http://localhost:4873/http-errors/-/http-errors-2.0.0.tgz#b7774a1486ef73cf7667ac9ae0858c012c57b9d3" - integrity sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ== - dependencies: - depd "2.0.0" - inherits "2.0.4" - setprototypeof "1.2.0" - statuses "2.0.1" - toidentifier "1.0.1" - -http-errors@~1.6.2: - version "1.6.3" - resolved "http://localhost:4873/http-errors/-/http-errors-1.6.3.tgz#8b55680bb4be283a0b5bf4ea2e38580be1d9320d" - integrity sha512-lks+lVC8dgGyh97jxvxeYTWQFvh4uw4yC12gVl63Cg30sjPX4wuGcdkICVXDAESr6OJGjqGA8Iz5mkeN6zlD7A== - dependencies: - depd "~1.1.2" - inherits "2.0.3" - setprototypeof "1.1.0" - statuses ">= 1.4.0 < 2" - -http-parser-js@>=0.5.1: - version "0.5.8" - resolved "http://localhost:4873/http-parser-js/-/http-parser-js-0.5.8.tgz#af23090d9ac4e24573de6f6aecc9d84a48bf20e3" - integrity sha512-SGeBX54F94Wgu5RH3X5jsDtf4eHyRogWX1XGT3b4HuW3tQPM4AaBzoUji/4AAJNXCEOWZ5O0DgZmJw1947gD5Q== - -http-proxy-agent@^4.0.1: - version "4.0.1" - resolved "http://localhost:4873/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz#8a8c8ef7f5932ccf953c296ca8291b95aa74aa3a" - integrity sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg== - dependencies: - "@tootallnate/once" "1" - agent-base "6" - debug "4" - -http-proxy-middleware@^2.0.3: - version "2.0.6" - resolved "http://localhost:4873/http-proxy-middleware/-/http-proxy-middleware-2.0.6.tgz#e1a4dd6979572c7ab5a4e4b55095d1f32a74963f" - integrity sha512-ya/UeJ6HVBYxrgYotAZo1KvPWlgB48kUJLDePFeneHsVujFaW5WNj2NgWCAE//B1Dl02BIfYlpNgBy8Kf8Rjmw== - dependencies: - "@types/http-proxy" "^1.17.8" - http-proxy "^1.18.1" - is-glob "^4.0.1" - is-plain-obj "^3.0.0" - micromatch "^4.0.2" - -http-proxy@^1.18.1: - version "1.18.1" - resolved "http://localhost:4873/http-proxy/-/http-proxy-1.18.1.tgz#401541f0534884bbf95260334e72f88ee3976549" - integrity sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ== - dependencies: - eventemitter3 "^4.0.0" - follow-redirects "^1.0.0" - requires-port "^1.0.0" - -https-proxy-agent@^5.0.0: - version "5.0.1" - resolved "http://localhost:4873/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz#c59ef224a04fe8b754f3db0063a25ea30d0005d6" - integrity sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA== - dependencies: - agent-base "6" - debug "4" - -human-signals@^2.1.0: - version "2.1.0" - resolved "http://localhost:4873/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0" - integrity sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw== - -iconv-lite@0.4.24: - version "0.4.24" - resolved "http://localhost:4873/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" - integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== - dependencies: - safer-buffer ">= 2.1.2 < 3" - -iconv-lite@^0.6.3: - version "0.6.3" - resolved "http://localhost:4873/iconv-lite/-/iconv-lite-0.6.3.tgz#a52f80bf38da1952eb5c681790719871a1a72501" - integrity sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw== - dependencies: - safer-buffer ">= 2.1.2 < 3.0.0" - -icss-utils@^5.0.0, icss-utils@^5.1.0: - version "5.1.0" - resolved "http://localhost:4873/icss-utils/-/icss-utils-5.1.0.tgz#c6be6858abd013d768e98366ae47e25d5887b1ae" - integrity sha512-soFhflCVWLfRNOPU3iv5Z9VUdT44xFRbzjLsEzSr5AQmgqPMTHdU3PMT1Cf1ssx8fLNJDA1juftYl+PUcv3MqA== - -idb@^7.0.1: - version "7.1.0" - resolved "http://localhost:4873/idb/-/idb-7.1.0.tgz#2cc886be57738419e57f9aab58f647e5e2160270" - integrity sha512-Wsk07aAxDsntgYJY4h0knZJuTxM73eQ4reRAO+Z1liOh8eMCJ/MoDS8fCui1vGT9mnjtl1sOu3I2i/W1swPYZg== - -identity-obj-proxy@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/identity-obj-proxy/-/identity-obj-proxy-3.0.0.tgz#94d2bda96084453ef36fbc5aaec37e0f79f1fc14" - integrity sha512-00n6YnVHKrinT9t0d9+5yZC6UBNJANpYEQvL2LlX6Ab9lnmxzIRcEmTPuyGScvl1+jKuCICX1Z0Ab1pPKKdikA== - dependencies: - harmony-reflect "^1.4.6" - -ignore@^5.2.0: - version "5.2.0" - resolved "http://localhost:4873/ignore/-/ignore-5.2.0.tgz#6d3bac8fa7fe0d45d9f9be7bac2fc279577e345a" - integrity sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ== - -immer@^9.0.7: - version "9.0.15" - resolved "http://localhost:4873/immer/-/immer-9.0.15.tgz#0b9169e5b1d22137aba7d43f8a81a495dd1b62dc" - integrity sha512-2eB/sswms9AEUSkOm4SbV5Y7Vmt/bKRwByd52jfLkW4OLYeaTP3EEiJ9agqU0O/tq6Dk62Zfj+TJSqfm1rLVGQ== - -import-fresh@^3.0.0, import-fresh@^3.1.0, import-fresh@^3.2.1: - version "3.3.0" - resolved "http://localhost:4873/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b" - integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw== - dependencies: - parent-module "^1.0.0" - resolve-from "^4.0.0" - -import-local@^3.0.2: - version "3.1.0" - resolved "http://localhost:4873/import-local/-/import-local-3.1.0.tgz#b4479df8a5fd44f6cdce24070675676063c95cb4" - integrity sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg== - dependencies: - pkg-dir "^4.2.0" - resolve-cwd "^3.0.0" - -imurmurhash@^0.1.4: - version "0.1.4" - resolved "http://localhost:4873/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" - integrity sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA== - -indent-string@^4.0.0: - version "4.0.0" - resolved "http://localhost:4873/indent-string/-/indent-string-4.0.0.tgz#624f8f4497d619b2d9768531d58f4122854d7251" - integrity sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg== - -inflight@^1.0.4: - version "1.0.6" - resolved "http://localhost:4873/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" - integrity sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA== - dependencies: - once "^1.3.0" - wrappy "1" - -inherits@2, inherits@2.0.4, inherits@^2.0.1, inherits@^2.0.3, inherits@~2.0.3: - version "2.0.4" - resolved "http://localhost:4873/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" - integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== - -inherits@2.0.3: - version "2.0.3" - resolved "http://localhost:4873/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" - integrity sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw== - -ini@^1.3.5: - version "1.3.8" - resolved "http://localhost:4873/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c" - integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew== - -internal-slot@^1.0.3: - version "1.0.3" - resolved "http://localhost:4873/internal-slot/-/internal-slot-1.0.3.tgz#7347e307deeea2faac2ac6205d4bc7d34967f59c" - integrity sha512-O0DB1JC/sPyZl7cIo78n5dR7eUSwwpYPiXRhTzNxZVAMUuB8vlnRFyLxdrVToks6XPLVnFfbzaVd5WLjhgg+vA== - dependencies: - get-intrinsic "^1.1.0" - has "^1.0.3" - side-channel "^1.0.4" - -ipaddr.js@1.9.1: - version "1.9.1" - resolved "http://localhost:4873/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3" - integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g== - -ipaddr.js@^2.0.1: - version "2.0.1" - resolved "http://localhost:4873/ipaddr.js/-/ipaddr.js-2.0.1.tgz#eca256a7a877e917aeb368b0a7497ddf42ef81c0" - integrity sha512-1qTgH9NG+IIJ4yfKs2e6Pp1bZg8wbDbKHT21HrLIeYBTRLgMYKnMTPAuI3Lcs61nfx5h1xlXnbJtH1kX5/d/ng== - -is-arrayish@^0.2.1: - version "0.2.1" - resolved "http://localhost:4873/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" - integrity sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg== - -is-bigint@^1.0.1: - version "1.0.4" - resolved "http://localhost:4873/is-bigint/-/is-bigint-1.0.4.tgz#08147a1875bc2b32005d41ccd8291dffc6691df3" - integrity sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg== - dependencies: - has-bigints "^1.0.1" - -is-binary-path@~2.1.0: - version "2.1.0" - resolved "http://localhost:4873/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09" - integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw== - dependencies: - binary-extensions "^2.0.0" - -is-boolean-object@^1.1.0: - version "1.1.2" - resolved "http://localhost:4873/is-boolean-object/-/is-boolean-object-1.1.2.tgz#5c6dc200246dd9321ae4b885a114bb1f75f63719" - integrity sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA== - dependencies: - call-bind "^1.0.2" - has-tostringtag "^1.0.0" - -is-callable@^1.1.4, is-callable@^1.2.7: - version "1.2.7" - resolved "http://localhost:4873/is-callable/-/is-callable-1.2.7.tgz#3bc2a85ea742d9e36205dcacdd72ca1fdc51b055" - integrity sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA== - -is-core-module@^2.8.1, is-core-module@^2.9.0: - version "2.10.0" - resolved "http://localhost:4873/is-core-module/-/is-core-module-2.10.0.tgz#9012ede0a91c69587e647514e1d5277019e728ed" - integrity sha512-Erxj2n/LDAZ7H8WNJXd9tw38GYM3dv8rk8Zcs+jJuxYTW7sozH+SS8NtrSjVL1/vpLvWi1hxy96IzjJ3EHTJJg== - dependencies: - has "^1.0.3" - -is-date-object@^1.0.1: - version "1.0.5" - resolved "http://localhost:4873/is-date-object/-/is-date-object-1.0.5.tgz#0841d5536e724c25597bf6ea62e1bd38298df31f" - integrity sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ== - dependencies: - has-tostringtag "^1.0.0" - -is-docker@^2.0.0, is-docker@^2.1.1: - version "2.2.1" - resolved "http://localhost:4873/is-docker/-/is-docker-2.2.1.tgz#33eeabe23cfe86f14bde4408a02c0cfb853acdaa" - integrity sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ== - -is-extglob@^2.1.1: - version "2.1.1" - resolved "http://localhost:4873/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" - integrity sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ== - -is-fullwidth-code-point@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" - integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== - -is-generator-fn@^2.0.0: - version "2.1.0" - resolved "http://localhost:4873/is-generator-fn/-/is-generator-fn-2.1.0.tgz#7d140adc389aaf3011a8f2a2a4cfa6faadffb118" - integrity sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ== - -is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3, is-glob@~4.0.1: - version "4.0.3" - resolved "http://localhost:4873/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084" - integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg== - dependencies: - is-extglob "^2.1.1" - -is-module@^1.0.0: - version "1.0.0" - resolved "http://localhost:4873/is-module/-/is-module-1.0.0.tgz#3258fb69f78c14d5b815d664336b4cffb6441591" - integrity sha512-51ypPSPCoTEIN9dy5Oy+h4pShgJmPCygKfyRCISBI+JoWT/2oJvK8QPxmwv7b/p239jXrm9M1mlQbyKJ5A152g== - -is-negative-zero@^2.0.2: - version "2.0.2" - resolved "http://localhost:4873/is-negative-zero/-/is-negative-zero-2.0.2.tgz#7bf6f03a28003b8b3965de3ac26f664d765f3150" - integrity sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA== - -is-number-object@^1.0.4: - version "1.0.7" - resolved "http://localhost:4873/is-number-object/-/is-number-object-1.0.7.tgz#59d50ada4c45251784e9904f5246c742f07a42fc" - integrity sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ== - dependencies: - has-tostringtag "^1.0.0" - -is-number@^7.0.0: - version "7.0.0" - resolved "http://localhost:4873/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" - integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== - -is-obj@^1.0.1: - version "1.0.1" - resolved "http://localhost:4873/is-obj/-/is-obj-1.0.1.tgz#3e4729ac1f5fde025cd7d83a896dab9f4f67db0f" - integrity sha512-l4RyHgRqGN4Y3+9JHVrNqO+tN0rV5My76uW5/nuO4K1b6vw5G8d/cmFjP9tRfEsdhZNt0IFdZuK/c2Vr4Nb+Qg== - -is-plain-obj@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/is-plain-obj/-/is-plain-obj-3.0.0.tgz#af6f2ea14ac5a646183a5bbdb5baabbc156ad9d7" - integrity sha512-gwsOE28k+23GP1B6vFl1oVh/WOzmawBrKwo5Ev6wMKzPkaXaCDIQKzLnvsA42DRlbVTWorkgTKIviAKCWkfUwA== - -is-plain-object@^2.0.4: - version "2.0.4" - resolved "http://localhost:4873/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" - integrity sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og== - dependencies: - isobject "^3.0.1" - -is-potential-custom-element-name@^1.0.1: - version "1.0.1" - resolved "http://localhost:4873/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz#171ed6f19e3ac554394edf78caa05784a45bebb5" - integrity sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ== - -is-regex@^1.1.4: - version "1.1.4" - resolved "http://localhost:4873/is-regex/-/is-regex-1.1.4.tgz#eef5663cd59fa4c0ae339505323df6854bb15958" - integrity sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg== - dependencies: - call-bind "^1.0.2" - has-tostringtag "^1.0.0" - -is-regexp@^1.0.0: - version "1.0.0" - resolved "http://localhost:4873/is-regexp/-/is-regexp-1.0.0.tgz#fd2d883545c46bac5a633e7b9a09e87fa2cb5069" - integrity sha512-7zjFAPO4/gwyQAAgRRmqeEeyIICSdmCqa3tsVHMdBzaXXRiqopZL4Cyghg/XulGWrtABTpbnYYzzIRffLkP4oA== - -is-root@^2.1.0: - version "2.1.0" - resolved "http://localhost:4873/is-root/-/is-root-2.1.0.tgz#809e18129cf1129644302a4f8544035d51984a9c" - integrity sha512-AGOriNp96vNBd3HtU+RzFEc75FfR5ymiYv8E553I71SCeXBiMsVDUtdio1OEFvrPyLIQ9tVR5RxXIFe5PUFjMg== - -is-shared-array-buffer@^1.0.2: - version "1.0.2" - resolved "http://localhost:4873/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz#8f259c573b60b6a32d4058a1a07430c0a7344c79" - integrity sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA== - dependencies: - call-bind "^1.0.2" - -is-stream@^2.0.0: - version "2.0.1" - resolved "http://localhost:4873/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077" - integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg== - -is-string@^1.0.5, is-string@^1.0.7: - version "1.0.7" - resolved "http://localhost:4873/is-string/-/is-string-1.0.7.tgz#0dd12bf2006f255bb58f695110eff7491eebc0fd" - integrity sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg== - dependencies: - has-tostringtag "^1.0.0" - -is-symbol@^1.0.2, is-symbol@^1.0.3: - version "1.0.4" - resolved "http://localhost:4873/is-symbol/-/is-symbol-1.0.4.tgz#a6dac93b635b063ca6872236de88910a57af139c" - integrity sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg== - dependencies: - has-symbols "^1.0.2" - -is-typedarray@^1.0.0: - version "1.0.0" - resolved "http://localhost:4873/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" - integrity sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA== - -is-weakref@^1.0.2: - version "1.0.2" - resolved "http://localhost:4873/is-weakref/-/is-weakref-1.0.2.tgz#9529f383a9338205e89765e0392efc2f100f06f2" - integrity sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ== - dependencies: - call-bind "^1.0.2" - -is-wsl@^2.2.0: - version "2.2.0" - resolved "http://localhost:4873/is-wsl/-/is-wsl-2.2.0.tgz#74a4c76e77ca9fd3f932f290c17ea326cd157271" - integrity sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww== - dependencies: - is-docker "^2.0.0" - -isarray@~1.0.0: - version "1.0.0" - resolved "http://localhost:4873/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" - integrity sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ== - -isexe@^2.0.0: - version "2.0.0" - resolved "http://localhost:4873/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" - integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw== - -isobject@^3.0.1: - version "3.0.1" - resolved "http://localhost:4873/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" - integrity sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg== - -istanbul-lib-coverage@^3.0.0, istanbul-lib-coverage@^3.2.0: - version "3.2.0" - resolved "http://localhost:4873/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz#189e7909d0a39fa5a3dfad5b03f71947770191d3" - integrity sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw== - -istanbul-lib-instrument@^5.0.4, istanbul-lib-instrument@^5.1.0: - version "5.2.1" - resolved "http://localhost:4873/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.1.tgz#d10c8885c2125574e1c231cacadf955675e1ce3d" - integrity sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg== - dependencies: - "@babel/core" "^7.12.3" - "@babel/parser" "^7.14.7" - "@istanbuljs/schema" "^0.1.2" - istanbul-lib-coverage "^3.2.0" - semver "^6.3.0" - -istanbul-lib-report@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#7518fe52ea44de372f460a76b5ecda9ffb73d8a6" - integrity sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw== - dependencies: - istanbul-lib-coverage "^3.0.0" - make-dir "^3.0.0" - supports-color "^7.1.0" - -istanbul-lib-source-maps@^4.0.0: - version "4.0.1" - resolved "http://localhost:4873/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz#895f3a709fcfba34c6de5a42939022f3e4358551" - integrity sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw== - dependencies: - debug "^4.1.1" - istanbul-lib-coverage "^3.0.0" - source-map "^0.6.1" - -istanbul-reports@^3.1.3: - version "3.1.5" - resolved "http://localhost:4873/istanbul-reports/-/istanbul-reports-3.1.5.tgz#cc9a6ab25cb25659810e4785ed9d9fb742578bae" - integrity sha512-nUsEMa9pBt/NOHqbcbeJEgqIlY/K7rVWUX6Lql2orY5e9roQOthbR3vtY4zzf2orPELg80fnxxk9zUyPlgwD1w== - dependencies: - html-escaper "^2.0.0" - istanbul-lib-report "^3.0.0" - -jake@^10.8.5: - version "10.8.5" - resolved "http://localhost:4873/jake/-/jake-10.8.5.tgz#f2183d2c59382cb274226034543b9c03b8164c46" - integrity sha512-sVpxYeuAhWt0OTWITwT98oyV0GsXyMlXCF+3L1SuafBVUIr/uILGRB+NqwkzhgXKvoJpDIpQvqkUALgdmQsQxw== - dependencies: - async "^3.2.3" - chalk "^4.0.2" - filelist "^1.0.1" - minimatch "^3.0.4" - -jest-changed-files@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-changed-files/-/jest-changed-files-27.5.1.tgz#a348aed00ec9bf671cc58a66fcbe7c3dfd6a68f5" - integrity sha512-buBLMiByfWGCoMsLLzGUUSpAmIAGnbR2KJoMN10ziLhOLvP4e0SlypHnAel8iqQXTrcbmfEY9sSqae5sgUsTvw== - dependencies: - "@jest/types" "^27.5.1" - execa "^5.0.0" - throat "^6.0.1" - -jest-circus@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-circus/-/jest-circus-27.5.1.tgz#37a5a4459b7bf4406e53d637b49d22c65d125ecc" - integrity sha512-D95R7x5UtlMA5iBYsOHFFbMD/GVA4R/Kdq15f7xYWUfWHBto9NYRsOvnSauTgdF+ogCpJ4tyKOXhUifxS65gdw== - dependencies: - "@jest/environment" "^27.5.1" - "@jest/test-result" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/node" "*" - chalk "^4.0.0" - co "^4.6.0" - dedent "^0.7.0" - expect "^27.5.1" - is-generator-fn "^2.0.0" - jest-each "^27.5.1" - jest-matcher-utils "^27.5.1" - jest-message-util "^27.5.1" - jest-runtime "^27.5.1" - jest-snapshot "^27.5.1" - jest-util "^27.5.1" - pretty-format "^27.5.1" - slash "^3.0.0" - stack-utils "^2.0.3" - throat "^6.0.1" - -jest-cli@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-cli/-/jest-cli-27.5.1.tgz#278794a6e6458ea8029547e6c6cbf673bd30b145" - integrity sha512-Hc6HOOwYq4/74/c62dEE3r5elx8wjYqxY0r0G/nFrLDPMFRu6RA/u8qINOIkvhxG7mMQ5EJsOGfRpI8L6eFUVw== - dependencies: - "@jest/core" "^27.5.1" - "@jest/test-result" "^27.5.1" - "@jest/types" "^27.5.1" - chalk "^4.0.0" - exit "^0.1.2" - graceful-fs "^4.2.9" - import-local "^3.0.2" - jest-config "^27.5.1" - jest-util "^27.5.1" - jest-validate "^27.5.1" - prompts "^2.0.1" - yargs "^16.2.0" - -jest-config@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-config/-/jest-config-27.5.1.tgz#5c387de33dca3f99ad6357ddeccd91bf3a0e4a41" - integrity sha512-5sAsjm6tGdsVbW9ahcChPAFCk4IlkQUknH5AvKjuLTSlcO/wCZKyFdn7Rg0EkC+OGgWODEy2hDpWB1PgzH0JNA== - dependencies: - "@babel/core" "^7.8.0" - "@jest/test-sequencer" "^27.5.1" - "@jest/types" "^27.5.1" - babel-jest "^27.5.1" - chalk "^4.0.0" - ci-info "^3.2.0" - deepmerge "^4.2.2" - glob "^7.1.1" - graceful-fs "^4.2.9" - jest-circus "^27.5.1" - jest-environment-jsdom "^27.5.1" - jest-environment-node "^27.5.1" - jest-get-type "^27.5.1" - jest-jasmine2 "^27.5.1" - jest-regex-util "^27.5.1" - jest-resolve "^27.5.1" - jest-runner "^27.5.1" - jest-util "^27.5.1" - jest-validate "^27.5.1" - micromatch "^4.0.4" - parse-json "^5.2.0" - pretty-format "^27.5.1" - slash "^3.0.0" - strip-json-comments "^3.1.1" - -jest-diff@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-diff/-/jest-diff-27.5.1.tgz#a07f5011ac9e6643cf8a95a462b7b1ecf6680def" - integrity sha512-m0NvkX55LDt9T4mctTEgnZk3fmEg3NRYutvMPWM/0iPnkFj2wIeF45O1718cMSOFO1vINkqmxqD8vE37uTEbqw== - dependencies: - chalk "^4.0.0" - diff-sequences "^27.5.1" - jest-get-type "^27.5.1" - pretty-format "^27.5.1" - -jest-diff@^29.1.2: - version "29.1.2" - resolved "http://localhost:4873/jest-diff/-/jest-diff-29.1.2.tgz#bb7aaf5353227d6f4f96c5e7e8713ce576a607dc" - integrity sha512-4GQts0aUopVvecIT4IwD/7xsBaMhKTYoM4/njE/aVw9wpw+pIUVp8Vab/KnSzSilr84GnLBkaP3JLDnQYCKqVQ== - dependencies: - chalk "^4.0.0" - diff-sequences "^29.0.0" - jest-get-type "^29.0.0" - pretty-format "^29.1.2" - -jest-docblock@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-docblock/-/jest-docblock-27.5.1.tgz#14092f364a42c6108d42c33c8cf30e058e25f6c0" - integrity sha512-rl7hlABeTsRYxKiUfpHrQrG4e2obOiTQWfMEH3PxPjOtdsfLQO4ReWSZaQ7DETm4xu07rl4q/h4zcKXyU0/OzQ== - dependencies: - detect-newline "^3.0.0" - -jest-each@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-each/-/jest-each-27.5.1.tgz#5bc87016f45ed9507fed6e4702a5b468a5b2c44e" - integrity sha512-1Ff6p+FbhT/bXQnEouYy00bkNSY7OUpfIcmdl8vZ31A1UUaurOLPA8a8BbJOF2RDUElwJhmeaV7LnagI+5UwNQ== - dependencies: - "@jest/types" "^27.5.1" - chalk "^4.0.0" - jest-get-type "^27.5.1" - jest-util "^27.5.1" - pretty-format "^27.5.1" - -jest-environment-jsdom@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-environment-jsdom/-/jest-environment-jsdom-27.5.1.tgz#ea9ccd1fc610209655a77898f86b2b559516a546" - integrity sha512-TFBvkTC1Hnnnrka/fUb56atfDtJ9VMZ94JkjTbggl1PEpwrYtUBKMezB3inLmWqQsXYLcMwNoDQwoBTAvFfsfw== - dependencies: - "@jest/environment" "^27.5.1" - "@jest/fake-timers" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/node" "*" - jest-mock "^27.5.1" - jest-util "^27.5.1" - jsdom "^16.6.0" - -jest-environment-node@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-environment-node/-/jest-environment-node-27.5.1.tgz#dedc2cfe52fab6b8f5714b4808aefa85357a365e" - integrity sha512-Jt4ZUnxdOsTGwSRAfKEnE6BcwsSPNOijjwifq5sDFSA2kesnXTvNqKHYgM0hDq3549Uf/KzdXNYn4wMZJPlFLw== - dependencies: - "@jest/environment" "^27.5.1" - "@jest/fake-timers" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/node" "*" - jest-mock "^27.5.1" - jest-util "^27.5.1" - -jest-get-type@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-get-type/-/jest-get-type-27.5.1.tgz#3cd613c507b0f7ace013df407a1c1cd578bcb4f1" - integrity sha512-2KY95ksYSaK7DMBWQn6dQz3kqAf3BB64y2udeG+hv4KfSOb9qwcYQstTJc1KCbsix+wLZWZYN8t7nwX3GOBLRw== - -jest-get-type@^29.0.0: - version "29.0.0" - resolved "http://localhost:4873/jest-get-type/-/jest-get-type-29.0.0.tgz#843f6c50a1b778f7325df1129a0fd7aa713aef80" - integrity sha512-83X19z/HuLKYXYHskZlBAShO7UfLFXu/vWajw9ZNJASN32li8yHMaVGAQqxFW1RCFOkB7cubaL6FaJVQqqJLSw== - -jest-haste-map@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-haste-map/-/jest-haste-map-27.5.1.tgz#9fd8bd7e7b4fa502d9c6164c5640512b4e811e7f" - integrity sha512-7GgkZ4Fw4NFbMSDSpZwXeBiIbx+t/46nJ2QitkOjvwPYyZmqttu2TDSimMHP1EkPOi4xUZAN1doE5Vd25H4Jng== - dependencies: - "@jest/types" "^27.5.1" - "@types/graceful-fs" "^4.1.2" - "@types/node" "*" - anymatch "^3.0.3" - fb-watchman "^2.0.0" - graceful-fs "^4.2.9" - jest-regex-util "^27.5.1" - jest-serializer "^27.5.1" - jest-util "^27.5.1" - jest-worker "^27.5.1" - micromatch "^4.0.4" - walker "^1.0.7" - optionalDependencies: - fsevents "^2.3.2" - -jest-jasmine2@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-jasmine2/-/jest-jasmine2-27.5.1.tgz#a037b0034ef49a9f3d71c4375a796f3b230d1ac4" - integrity sha512-jtq7VVyG8SqAorDpApwiJJImd0V2wv1xzdheGHRGyuT7gZm6gG47QEskOlzsN1PG/6WNaCo5pmwMHDf3AkG2pQ== - dependencies: - "@jest/environment" "^27.5.1" - "@jest/source-map" "^27.5.1" - "@jest/test-result" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/node" "*" - chalk "^4.0.0" - co "^4.6.0" - expect "^27.5.1" - is-generator-fn "^2.0.0" - jest-each "^27.5.1" - jest-matcher-utils "^27.5.1" - jest-message-util "^27.5.1" - jest-runtime "^27.5.1" - jest-snapshot "^27.5.1" - jest-util "^27.5.1" - pretty-format "^27.5.1" - throat "^6.0.1" - -jest-leak-detector@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-leak-detector/-/jest-leak-detector-27.5.1.tgz#6ec9d54c3579dd6e3e66d70e3498adf80fde3fb8" - integrity sha512-POXfWAMvfU6WMUXftV4HolnJfnPOGEu10fscNCA76KBpRRhcMN2c8d3iT2pxQS3HLbA+5X4sOUPzYO2NUyIlHQ== - dependencies: - jest-get-type "^27.5.1" - pretty-format "^27.5.1" - -jest-matcher-utils@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-matcher-utils/-/jest-matcher-utils-27.5.1.tgz#9c0cdbda8245bc22d2331729d1091308b40cf8ab" - integrity sha512-z2uTx/T6LBaCoNWNFWwChLBKYxTMcGBRjAt+2SbP929/Fflb9aa5LGma654Rz8z9HLxsrUaYzxE9T/EFIL/PAw== - dependencies: - chalk "^4.0.0" - jest-diff "^27.5.1" - jest-get-type "^27.5.1" - pretty-format "^27.5.1" - -jest-matcher-utils@^29.1.2: - version "29.1.2" - resolved "http://localhost:4873/jest-matcher-utils/-/jest-matcher-utils-29.1.2.tgz#e68c4bcc0266e70aa1a5c13fb7b8cd4695e318a1" - integrity sha512-MV5XrD3qYSW2zZSHRRceFzqJ39B2z11Qv0KPyZYxnzDHFeYZGJlgGi0SW+IXSJfOewgJp/Km/7lpcFT+cgZypw== - dependencies: - chalk "^4.0.0" - jest-diff "^29.1.2" - jest-get-type "^29.0.0" - pretty-format "^29.1.2" - -jest-message-util@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-message-util/-/jest-message-util-27.5.1.tgz#bdda72806da10d9ed6425e12afff38cd1458b6cf" - integrity sha512-rMyFe1+jnyAAf+NHwTclDz0eAaLkVDdKVHHBFWsBWHnnh5YeJMNWWsv7AbFYXfK3oTqvL7VTWkhNLu1jX24D+g== - dependencies: - "@babel/code-frame" "^7.12.13" - "@jest/types" "^27.5.1" - "@types/stack-utils" "^2.0.0" - chalk "^4.0.0" - graceful-fs "^4.2.9" - micromatch "^4.0.4" - pretty-format "^27.5.1" - slash "^3.0.0" - stack-utils "^2.0.3" - -jest-message-util@^28.1.3: - version "28.1.3" - resolved "http://localhost:4873/jest-message-util/-/jest-message-util-28.1.3.tgz#232def7f2e333f1eecc90649b5b94b0055e7c43d" - integrity sha512-PFdn9Iewbt575zKPf1286Ht9EPoJmYT7P0kY+RibeYZ2XtOr53pDLEFoTWXbd1h4JiGiWpTBC84fc8xMXQMb7g== - dependencies: - "@babel/code-frame" "^7.12.13" - "@jest/types" "^28.1.3" - "@types/stack-utils" "^2.0.0" - chalk "^4.0.0" - graceful-fs "^4.2.9" - micromatch "^4.0.4" - pretty-format "^28.1.3" - slash "^3.0.0" - stack-utils "^2.0.3" - -jest-message-util@^29.1.2: - version "29.1.2" - resolved "http://localhost:4873/jest-message-util/-/jest-message-util-29.1.2.tgz#c21a33c25f9dc1ebfcd0f921d89438847a09a501" - integrity sha512-9oJ2Os+Qh6IlxLpmvshVbGUiSkZVc2FK+uGOm6tghafnB2RyjKAxMZhtxThRMxfX1J1SOMhTn9oK3/MutRWQJQ== - dependencies: - "@babel/code-frame" "^7.12.13" - "@jest/types" "^29.1.2" - "@types/stack-utils" "^2.0.0" - chalk "^4.0.0" - graceful-fs "^4.2.9" - micromatch "^4.0.4" - pretty-format "^29.1.2" - slash "^3.0.0" - stack-utils "^2.0.3" - -jest-mock@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-mock/-/jest-mock-27.5.1.tgz#19948336d49ef4d9c52021d34ac7b5f36ff967d6" - integrity sha512-K4jKbY1d4ENhbrG2zuPWaQBvDly+iZ2yAW+T1fATN78hc0sInwn7wZB8XtlNnvHug5RMwV897Xm4LqmPM4e2Og== - dependencies: - "@jest/types" "^27.5.1" - "@types/node" "*" - -jest-pnp-resolver@^1.2.2: - version "1.2.2" - resolved "http://localhost:4873/jest-pnp-resolver/-/jest-pnp-resolver-1.2.2.tgz#b704ac0ae028a89108a4d040b3f919dfddc8e33c" - integrity sha512-olV41bKSMm8BdnuMsewT4jqlZ8+3TCARAXjZGT9jcoSnrfUnRCqnMoF9XEeoWjbzObpqF9dRhHQj0Xb9QdF6/w== - -jest-regex-util@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-regex-util/-/jest-regex-util-27.5.1.tgz#4da143f7e9fd1e542d4aa69617b38e4a78365b95" - integrity sha512-4bfKq2zie+x16okqDXjXn9ql2B0dScQu+vcwe4TvFVhkVyuWLqpZrZtXxLLWoXYgn0E87I6r6GRYHF7wFZBUvg== - -jest-regex-util@^28.0.0: - version "28.0.2" - resolved "http://localhost:4873/jest-regex-util/-/jest-regex-util-28.0.2.tgz#afdc377a3b25fb6e80825adcf76c854e5bf47ead" - integrity sha512-4s0IgyNIy0y9FK+cjoVYoxamT7Zeo7MhzqRGx7YDYmaQn1wucY9rotiGkBzzcMXTtjrCAP/f7f+E0F7+fxPNdw== - -jest-resolve-dependencies@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-resolve-dependencies/-/jest-resolve-dependencies-27.5.1.tgz#d811ecc8305e731cc86dd79741ee98fed06f1da8" - integrity sha512-QQOOdY4PE39iawDn5rzbIePNigfe5B9Z91GDD1ae/xNDlu9kaat8QQ5EKnNmVWPV54hUdxCVwwj6YMgR2O7IOg== - dependencies: - "@jest/types" "^27.5.1" - jest-regex-util "^27.5.1" - jest-snapshot "^27.5.1" - -jest-resolve@^27.4.2, jest-resolve@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-resolve/-/jest-resolve-27.5.1.tgz#a2f1c5a0796ec18fe9eb1536ac3814c23617b384" - integrity sha512-FFDy8/9E6CV83IMbDpcjOhumAQPDyETnU2KZ1O98DwTnz8AOBsW/Xv3GySr1mOZdItLR+zDZ7I/UdTFbgSOVCw== - dependencies: - "@jest/types" "^27.5.1" - chalk "^4.0.0" - graceful-fs "^4.2.9" - jest-haste-map "^27.5.1" - jest-pnp-resolver "^1.2.2" - jest-util "^27.5.1" - jest-validate "^27.5.1" - resolve "^1.20.0" - resolve.exports "^1.1.0" - slash "^3.0.0" - -jest-runner@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-runner/-/jest-runner-27.5.1.tgz#071b27c1fa30d90540805c5645a0ec167c7b62e5" - integrity sha512-g4NPsM4mFCOwFKXO4p/H/kWGdJp9V8kURY2lX8Me2drgXqG7rrZAx5kv+5H7wtt/cdFIjhqYx1HrlqWHaOvDaQ== - dependencies: - "@jest/console" "^27.5.1" - "@jest/environment" "^27.5.1" - "@jest/test-result" "^27.5.1" - "@jest/transform" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/node" "*" - chalk "^4.0.0" - emittery "^0.8.1" - graceful-fs "^4.2.9" - jest-docblock "^27.5.1" - jest-environment-jsdom "^27.5.1" - jest-environment-node "^27.5.1" - jest-haste-map "^27.5.1" - jest-leak-detector "^27.5.1" - jest-message-util "^27.5.1" - jest-resolve "^27.5.1" - jest-runtime "^27.5.1" - jest-util "^27.5.1" - jest-worker "^27.5.1" - source-map-support "^0.5.6" - throat "^6.0.1" - -jest-runtime@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-runtime/-/jest-runtime-27.5.1.tgz#4896003d7a334f7e8e4a53ba93fb9bcd3db0a1af" - integrity sha512-o7gxw3Gf+H2IGt8fv0RiyE1+r83FJBRruoA+FXrlHw6xEyBsU8ugA6IPfTdVyA0w8HClpbK+DGJxH59UrNMx8A== - dependencies: - "@jest/environment" "^27.5.1" - "@jest/fake-timers" "^27.5.1" - "@jest/globals" "^27.5.1" - "@jest/source-map" "^27.5.1" - "@jest/test-result" "^27.5.1" - "@jest/transform" "^27.5.1" - "@jest/types" "^27.5.1" - chalk "^4.0.0" - cjs-module-lexer "^1.0.0" - collect-v8-coverage "^1.0.0" - execa "^5.0.0" - glob "^7.1.3" - graceful-fs "^4.2.9" - jest-haste-map "^27.5.1" - jest-message-util "^27.5.1" - jest-mock "^27.5.1" - jest-regex-util "^27.5.1" - jest-resolve "^27.5.1" - jest-snapshot "^27.5.1" - jest-util "^27.5.1" - slash "^3.0.0" - strip-bom "^4.0.0" - -jest-serializer@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-serializer/-/jest-serializer-27.5.1.tgz#81438410a30ea66fd57ff730835123dea1fb1f64" - integrity sha512-jZCyo6iIxO1aqUxpuBlwTDMkzOAJS4a3eYz3YzgxxVQFwLeSA7Jfq5cbqCY+JLvTDrWirgusI/0KwxKMgrdf7w== - dependencies: - "@types/node" "*" - graceful-fs "^4.2.9" - -jest-snapshot@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-snapshot/-/jest-snapshot-27.5.1.tgz#b668d50d23d38054a51b42c4039cab59ae6eb6a1" - integrity sha512-yYykXI5a0I31xX67mgeLw1DZ0bJB+gpq5IpSuCAoyDi0+BhgU/RIrL+RTzDmkNTchvDFWKP8lp+w/42Z3us5sA== - dependencies: - "@babel/core" "^7.7.2" - "@babel/generator" "^7.7.2" - "@babel/plugin-syntax-typescript" "^7.7.2" - "@babel/traverse" "^7.7.2" - "@babel/types" "^7.0.0" - "@jest/transform" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/babel__traverse" "^7.0.4" - "@types/prettier" "^2.1.5" - babel-preset-current-node-syntax "^1.0.0" - chalk "^4.0.0" - expect "^27.5.1" - graceful-fs "^4.2.9" - jest-diff "^27.5.1" - jest-get-type "^27.5.1" - jest-haste-map "^27.5.1" - jest-matcher-utils "^27.5.1" - jest-message-util "^27.5.1" - jest-util "^27.5.1" - natural-compare "^1.4.0" - pretty-format "^27.5.1" - semver "^7.3.2" - -jest-util@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-util/-/jest-util-27.5.1.tgz#3ba9771e8e31a0b85da48fe0b0891fb86c01c2f9" - integrity sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw== - dependencies: - "@jest/types" "^27.5.1" - "@types/node" "*" - chalk "^4.0.0" - ci-info "^3.2.0" - graceful-fs "^4.2.9" - picomatch "^2.2.3" - -jest-util@^28.1.3: - version "28.1.3" - resolved "http://localhost:4873/jest-util/-/jest-util-28.1.3.tgz#f4f932aa0074f0679943220ff9cbba7e497028b0" - integrity sha512-XdqfpHwpcSRko/C35uLYFM2emRAltIIKZiJ9eAmhjsj0CqZMa0p1ib0R5fWIqGhn1a103DebTbpqIaP1qCQ6tQ== - dependencies: - "@jest/types" "^28.1.3" - "@types/node" "*" - chalk "^4.0.0" - ci-info "^3.2.0" - graceful-fs "^4.2.9" - picomatch "^2.2.3" - -jest-util@^29.1.2: - version "29.1.2" - resolved "http://localhost:4873/jest-util/-/jest-util-29.1.2.tgz#ac5798e93cb6a6703084e194cfa0898d66126df1" - integrity sha512-vPCk9F353i0Ymx3WQq3+a4lZ07NXu9Ca8wya6o4Fe4/aO1e1awMMprZ3woPFpKwghEOW+UXgd15vVotuNN9ONQ== - dependencies: - "@jest/types" "^29.1.2" - "@types/node" "*" - chalk "^4.0.0" - ci-info "^3.2.0" - graceful-fs "^4.2.9" - picomatch "^2.2.3" - -jest-validate@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-validate/-/jest-validate-27.5.1.tgz#9197d54dc0bdb52260b8db40b46ae668e04df067" - integrity sha512-thkNli0LYTmOI1tDB3FI1S1RTp/Bqyd9pTarJwL87OIBFuqEb5Apv5EaApEudYg4g86e3CT6kM0RowkhtEnCBQ== - dependencies: - "@jest/types" "^27.5.1" - camelcase "^6.2.0" - chalk "^4.0.0" - jest-get-type "^27.5.1" - leven "^3.1.0" - pretty-format "^27.5.1" - -jest-watch-typeahead@^1.0.0: - version "1.1.0" - resolved "http://localhost:4873/jest-watch-typeahead/-/jest-watch-typeahead-1.1.0.tgz#b4a6826dfb9c9420da2f7bc900de59dad11266a9" - integrity sha512-Va5nLSJTN7YFtC2jd+7wsoe1pNe5K4ShLux/E5iHEwlB9AxaxmggY7to9KUqKojhaJw3aXqt5WAb4jGPOolpEw== - dependencies: - ansi-escapes "^4.3.1" - chalk "^4.0.0" - jest-regex-util "^28.0.0" - jest-watcher "^28.0.0" - slash "^4.0.0" - string-length "^5.0.1" - strip-ansi "^7.0.1" - -jest-watcher@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-watcher/-/jest-watcher-27.5.1.tgz#71bd85fb9bde3a2c2ec4dc353437971c43c642a2" - integrity sha512-z676SuD6Z8o8qbmEGhoEUFOM1+jfEiL3DXHK/xgEiG2EyNYfFG60jluWcupY6dATjfEsKQuibReS1djInQnoVw== - dependencies: - "@jest/test-result" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/node" "*" - ansi-escapes "^4.2.1" - chalk "^4.0.0" - jest-util "^27.5.1" - string-length "^4.0.1" - -jest-watcher@^28.0.0: - version "28.1.3" - resolved "http://localhost:4873/jest-watcher/-/jest-watcher-28.1.3.tgz#c6023a59ba2255e3b4c57179fc94164b3e73abd4" - integrity sha512-t4qcqj9hze+jviFPUN3YAtAEeFnr/azITXQEMARf5cMwKY2SMBRnCQTXLixTl20OR6mLh9KLMrgVJgJISym+1g== - dependencies: - "@jest/test-result" "^28.1.3" - "@jest/types" "^28.1.3" - "@types/node" "*" - ansi-escapes "^4.2.1" - chalk "^4.0.0" - emittery "^0.10.2" - jest-util "^28.1.3" - string-length "^4.0.1" - -jest-worker@^26.2.1: - version "26.6.2" - resolved "http://localhost:4873/jest-worker/-/jest-worker-26.6.2.tgz#7f72cbc4d643c365e27b9fd775f9d0eaa9c7a8ed" - integrity sha512-KWYVV1c4i+jbMpaBC+U++4Va0cp8OisU185o73T1vo99hqi7w8tSJfUXYswwqqrjzwxa6KpRK54WhPvwf5w6PQ== - dependencies: - "@types/node" "*" - merge-stream "^2.0.0" - supports-color "^7.0.0" - -jest-worker@^27.0.2, jest-worker@^27.4.5, jest-worker@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-worker/-/jest-worker-27.5.1.tgz#8d146f0900e8973b106b6f73cc1e9a8cb86f8db0" - integrity sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg== - dependencies: - "@types/node" "*" - merge-stream "^2.0.0" - supports-color "^8.0.0" - -jest-worker@^28.0.2: - version "28.1.3" - resolved "http://localhost:4873/jest-worker/-/jest-worker-28.1.3.tgz#7e3c4ce3fa23d1bb6accb169e7f396f98ed4bb98" - integrity sha512-CqRA220YV/6jCo8VWvAt1KKx6eek1VIHMPeLEbpcfSfkEeWyBNppynM/o6q+Wmw+sOhos2ml34wZbSX3G13//g== - dependencies: - "@types/node" "*" - merge-stream "^2.0.0" - supports-color "^8.0.0" - -jest@^27.4.3: - version "27.5.1" - resolved "http://localhost:4873/jest/-/jest-27.5.1.tgz#dadf33ba70a779be7a6fc33015843b51494f63fc" - integrity sha512-Yn0mADZB89zTtjkPJEXwrac3LHudkQMR+Paqa8uxJHCBr9agxztUifWCyiYrjhMPBoUVBjyny0I7XH6ozDr7QQ== - dependencies: - "@jest/core" "^27.5.1" - import-local "^3.0.2" - jest-cli "^27.5.1" - -js-sdsl@^4.1.4: - version "4.1.5" - resolved "http://localhost:4873/js-sdsl/-/js-sdsl-4.1.5.tgz#1ff1645e6b4d1b028cd3f862db88c9d887f26e2a" - integrity sha512-08bOAKweV2NUC1wqTtf3qZlnpOX/R2DU9ikpjOHs0H+ibQv3zpncVQg6um4uYtRtrwIX8M4Nh3ytK4HGlYAq7Q== - -"js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0: - version "4.0.0" - resolved "http://localhost:4873/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" - integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== - -js-yaml@^3.13.1: - version "3.14.1" - resolved "http://localhost:4873/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537" - integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g== - dependencies: - argparse "^1.0.7" - esprima "^4.0.0" - -js-yaml@^4.1.0: - version "4.1.0" - resolved "http://localhost:4873/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602" - integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA== - dependencies: - argparse "^2.0.1" - -jsdom@^16.6.0: - version "16.7.0" - resolved "http://localhost:4873/jsdom/-/jsdom-16.7.0.tgz#918ae71965424b197c819f8183a754e18977b710" - integrity sha512-u9Smc2G1USStM+s/x1ru5Sxrl6mPYCbByG1U/hUmqaVsm4tbNyS7CicOSRyuGQYZhTu0h84qkZZQ/I+dzizSVw== - dependencies: - abab "^2.0.5" - acorn "^8.2.4" - acorn-globals "^6.0.0" - cssom "^0.4.4" - cssstyle "^2.3.0" - data-urls "^2.0.0" - decimal.js "^10.2.1" - domexception "^2.0.1" - escodegen "^2.0.0" - form-data "^3.0.0" - html-encoding-sniffer "^2.0.1" - http-proxy-agent "^4.0.1" - https-proxy-agent "^5.0.0" - is-potential-custom-element-name "^1.0.1" - nwsapi "^2.2.0" - parse5 "6.0.1" - saxes "^5.0.1" - symbol-tree "^3.2.4" - tough-cookie "^4.0.0" - w3c-hr-time "^1.0.2" - w3c-xmlserializer "^2.0.0" - webidl-conversions "^6.1.0" - whatwg-encoding "^1.0.5" - whatwg-mimetype "^2.3.0" - whatwg-url "^8.5.0" - ws "^7.4.6" - xml-name-validator "^3.0.0" - -jsesc@^2.5.1: - version "2.5.2" - resolved "http://localhost:4873/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" - integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== - -jsesc@~0.5.0: - version "0.5.0" - resolved "http://localhost:4873/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d" - integrity sha512-uZz5UnB7u4T9LvwmFqXii7pZSouaRPorGs5who1Ip7VO0wxanFvBL7GkM6dTHlgX+jhBApRetaWpnDabOeTcnA== - -json-parse-even-better-errors@^2.3.0, json-parse-even-better-errors@^2.3.1: - version "2.3.1" - resolved "http://localhost:4873/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d" - integrity sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w== - -json-schema-traverse@^0.4.1: - version "0.4.1" - resolved "http://localhost:4873/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" - integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== - -json-schema-traverse@^1.0.0: - version "1.0.0" - resolved "http://localhost:4873/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz#ae7bcb3656ab77a73ba5c49bf654f38e6b6860e2" - integrity sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug== - -json-schema@^0.4.0: - version "0.4.0" - resolved "http://localhost:4873/json-schema/-/json-schema-0.4.0.tgz#f7de4cf6efab838ebaeb3236474cbba5a1930ab5" - integrity sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA== - -json-stable-stringify-without-jsonify@^1.0.1: - version "1.0.1" - resolved "http://localhost:4873/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651" - integrity sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw== - -json5@^1.0.1: - version "1.0.1" - resolved "http://localhost:4873/json5/-/json5-1.0.1.tgz#779fb0018604fa854eacbf6252180d83543e3dbe" - integrity sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow== - dependencies: - minimist "^1.2.0" - -json5@^2.1.2, json5@^2.2.0, json5@^2.2.1: - version "2.2.1" - resolved "http://localhost:4873/json5/-/json5-2.2.1.tgz#655d50ed1e6f95ad1a3caababd2b0efda10b395c" - integrity sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA== - -jsonfile@^6.0.1: - version "6.1.0" - resolved "http://localhost:4873/jsonfile/-/jsonfile-6.1.0.tgz#bc55b2634793c679ec6403094eb13698a6ec0aae" - integrity sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ== - dependencies: - universalify "^2.0.0" - optionalDependencies: - graceful-fs "^4.1.6" - -jsonpointer@^5.0.0: - version "5.0.1" - resolved "http://localhost:4873/jsonpointer/-/jsonpointer-5.0.1.tgz#2110e0af0900fd37467b5907ecd13a7884a1b559" - integrity sha512-p/nXbhSEcu3pZRdkW1OfJhpsVtW1gd4Wa1fnQc9YLiTfAjn0312eMKimbdIQzuZl9aa9xUGaRlP9T/CJE/ditQ== - -"jsx-ast-utils@^2.4.1 || ^3.0.0", jsx-ast-utils@^3.3.2: - version "3.3.3" - resolved "http://localhost:4873/jsx-ast-utils/-/jsx-ast-utils-3.3.3.tgz#76b3e6e6cece5c69d49a5792c3d01bd1a0cdc7ea" - integrity sha512-fYQHZTZ8jSfmWZ0iyzfwiU4WDX4HpHbMCZ3gPlWYiCl3BoeOTsqKBqnTVfH2rYT7eP5c3sVbeSPHnnJOaTrWiw== - dependencies: - array-includes "^3.1.5" - object.assign "^4.1.3" - -kind-of@^6.0.2: - version "6.0.3" - resolved "http://localhost:4873/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd" - integrity sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw== - -kleur@^3.0.3: - version "3.0.3" - resolved "http://localhost:4873/kleur/-/kleur-3.0.3.tgz#a79c9ecc86ee1ce3fa6206d1216c501f147fc07e" - integrity sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w== - -klona@^2.0.4, klona@^2.0.5: - version "2.0.5" - resolved "http://localhost:4873/klona/-/klona-2.0.5.tgz#d166574d90076395d9963aa7a928fabb8d76afbc" - integrity sha512-pJiBpiXMbt7dkzXe8Ghj/u4FfXOOa98fPW+bihOJ4SjnoijweJrNThJfd3ifXpXhREjpoF2mZVH1GfS9LV3kHQ== - -language-subtag-registry@~0.3.2: - version "0.3.22" - resolved "http://localhost:4873/language-subtag-registry/-/language-subtag-registry-0.3.22.tgz#2e1500861b2e457eba7e7ae86877cbd08fa1fd1d" - integrity sha512-tN0MCzyWnoz/4nHS6uxdlFWoUZT7ABptwKPQ52Ea7URk6vll88bWBVhodtnlfEuCcKWNGoc+uGbw1cwa9IKh/w== - -language-tags@^1.0.5: - version "1.0.5" - resolved "http://localhost:4873/language-tags/-/language-tags-1.0.5.tgz#d321dbc4da30ba8bf3024e040fa5c14661f9193a" - integrity sha512-qJhlO9cGXi6hBGKoxEG/sKZDAHD5Hnu9Hs4WbOY3pCWXDhw0N8x1NenNzm2EnNLkLkk7J2SdxAkDSbb6ftT+UQ== - dependencies: - language-subtag-registry "~0.3.2" - -leven@^3.1.0: - version "3.1.0" - resolved "http://localhost:4873/leven/-/leven-3.1.0.tgz#77891de834064cccba82ae7842bb6b14a13ed7f2" - integrity sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A== - -levn@^0.4.1: - version "0.4.1" - resolved "http://localhost:4873/levn/-/levn-0.4.1.tgz#ae4562c007473b932a6200d403268dd2fffc6ade" - integrity sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ== - dependencies: - prelude-ls "^1.2.1" - type-check "~0.4.0" - -levn@~0.3.0: - version "0.3.0" - resolved "http://localhost:4873/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee" - integrity sha512-0OO4y2iOHix2W6ujICbKIaEQXvFQHue65vUG3pb5EUomzPI90z9hsA1VsO/dbIIpC53J8gxM9Q4Oho0jrCM/yA== - dependencies: - prelude-ls "~1.1.2" - type-check "~0.3.2" - -lilconfig@^2.0.3, lilconfig@^2.0.5, lilconfig@^2.0.6: - version "2.0.6" - resolved "http://localhost:4873/lilconfig/-/lilconfig-2.0.6.tgz#32a384558bd58af3d4c6e077dd1ad1d397bc69d4" - integrity sha512-9JROoBW7pobfsx+Sq2JsASvCo6Pfo6WWoUW79HuB1BCoBXD4PLWJPqDF6fNj67pqBYTbAHkE57M1kS/+L1neOg== - -lines-and-columns@^1.1.6: - version "1.2.4" - resolved "http://localhost:4873/lines-and-columns/-/lines-and-columns-1.2.4.tgz#eca284f75d2965079309dc0ad9255abb2ebc1632" - integrity sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg== - -loader-runner@^4.2.0: - version "4.3.0" - resolved "http://localhost:4873/loader-runner/-/loader-runner-4.3.0.tgz#c1b4a163b99f614830353b16755e7149ac2314e1" - integrity sha512-3R/1M+yS3j5ou80Me59j7F9IMs4PXs3VqRrm0TU3AbKPxlmpoY1TNscJV/oGJXo8qCatFGTfDbY6W6ipGOYXfg== - -loader-utils@^2.0.0: - version "2.0.2" - resolved "http://localhost:4873/loader-utils/-/loader-utils-2.0.2.tgz#d6e3b4fb81870721ae4e0868ab11dd638368c129" - integrity sha512-TM57VeHptv569d/GKh6TAYdzKblwDNiumOdkFnejjD0XwTH87K90w3O7AiJRqdQoXygvi1VQTJTLGhJl7WqA7A== - dependencies: - big.js "^5.2.2" - emojis-list "^3.0.0" - json5 "^2.1.2" - -loader-utils@^3.2.0: - version "3.2.0" - resolved "http://localhost:4873/loader-utils/-/loader-utils-3.2.0.tgz#bcecc51a7898bee7473d4bc6b845b23af8304d4f" - integrity sha512-HVl9ZqccQihZ7JM85dco1MvO9G+ONvxoGa9rkhzFsneGLKSUg1gJf9bWzhRhcvm2qChhWpebQhP44qxjKIUCaQ== - -locate-path@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/locate-path/-/locate-path-3.0.0.tgz#dbec3b3ab759758071b58fe59fc41871af21400e" - integrity sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A== - dependencies: - p-locate "^3.0.0" - path-exists "^3.0.0" - -locate-path@^5.0.0: - version "5.0.0" - resolved "http://localhost:4873/locate-path/-/locate-path-5.0.0.tgz#1afba396afd676a6d42504d0a67a3a7eb9f62aa0" - integrity sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g== - dependencies: - p-locate "^4.1.0" - -locate-path@^6.0.0: - version "6.0.0" - resolved "http://localhost:4873/locate-path/-/locate-path-6.0.0.tgz#55321eb309febbc59c4801d931a72452a681d286" - integrity sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw== - dependencies: - p-locate "^5.0.0" - -lodash.debounce@^4.0.8: - version "4.0.8" - resolved "http://localhost:4873/lodash.debounce/-/lodash.debounce-4.0.8.tgz#82d79bff30a67c4005ffd5e2515300ad9ca4d7af" - integrity sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow== - -lodash.memoize@^4.1.2: - version "4.1.2" - resolved "http://localhost:4873/lodash.memoize/-/lodash.memoize-4.1.2.tgz#bcc6c49a42a2840ed997f323eada5ecd182e0bfe" - integrity sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag== - -lodash.merge@^4.6.2: - version "4.6.2" - resolved "http://localhost:4873/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a" - integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ== - -lodash.sortby@^4.7.0: - version "4.7.0" - resolved "http://localhost:4873/lodash.sortby/-/lodash.sortby-4.7.0.tgz#edd14c824e2cc9c1e0b0a1b42bb5210516a42438" - integrity sha512-HDWXG8isMntAyRF5vZ7xKuEvOhT4AhlRt/3czTSjvGUxjYCBVRQY48ViDHyfYz9VIoBkW4TMGQNapx+l3RUwdA== - -lodash.uniq@^4.5.0: - version "4.5.0" - resolved "http://localhost:4873/lodash.uniq/-/lodash.uniq-4.5.0.tgz#d0225373aeb652adc1bc82e4945339a842754773" - integrity sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ== - -lodash@^4.17.15, lodash@^4.17.20, lodash@^4.17.21, lodash@^4.7.0: - version "4.17.21" - resolved "http://localhost:4873/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" - integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== - -loose-envify@^1.1.0, loose-envify@^1.4.0: - version "1.4.0" - resolved "http://localhost:4873/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf" - integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q== - dependencies: - js-tokens "^3.0.0 || ^4.0.0" - -lower-case@^2.0.2: - version "2.0.2" - resolved "http://localhost:4873/lower-case/-/lower-case-2.0.2.tgz#6fa237c63dbdc4a82ca0fd882e4722dc5e634e28" - integrity sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg== - dependencies: - tslib "^2.0.3" - -lru-cache@^6.0.0: - version "6.0.0" - resolved "http://localhost:4873/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" - integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== - dependencies: - yallist "^4.0.0" - -lz-string@^1.4.4: - version "1.4.4" - resolved "http://localhost:4873/lz-string/-/lz-string-1.4.4.tgz#c0d8eaf36059f705796e1e344811cf4c498d3a26" - integrity sha512-0ckx7ZHRPqb0oUm8zNr+90mtf9DQB60H1wMCjBtfi62Kl3a7JbHob6gA2bC+xRvZoOL+1hzUK8jeuEIQE8svEQ== - -magic-string@^0.25.0, magic-string@^0.25.7: - version "0.25.9" - resolved "http://localhost:4873/magic-string/-/magic-string-0.25.9.tgz#de7f9faf91ef8a1c91d02c2e5314c8277dbcdd1c" - integrity sha512-RmF0AsMzgt25qzqqLc1+MbHmhdx0ojF2Fvs4XnOqz2ZOBXzzkEwc/dJQZCYHAn7v1jbVOjAZfK8msRn4BxO4VQ== - dependencies: - sourcemap-codec "^1.4.8" - -make-dir@^3.0.0, make-dir@^3.0.2, make-dir@^3.1.0: - version "3.1.0" - resolved "http://localhost:4873/make-dir/-/make-dir-3.1.0.tgz#415e967046b3a7f1d185277d84aa58203726a13f" - integrity sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw== - dependencies: - semver "^6.0.0" - -makeerror@1.0.12: - version "1.0.12" - resolved "http://localhost:4873/makeerror/-/makeerror-1.0.12.tgz#3e5dd2079a82e812e983cc6610c4a2cb0eaa801a" - integrity sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg== - dependencies: - tmpl "1.0.5" - -mdn-data@2.0.14: - version "2.0.14" - resolved "http://localhost:4873/mdn-data/-/mdn-data-2.0.14.tgz#7113fc4281917d63ce29b43446f701e68c25ba50" - integrity sha512-dn6wd0uw5GsdswPFfsgMp5NSB0/aDe6fK94YJV/AJDYXL6HVLWBsxeq7js7Ad+mU2K9LAlwpk6kN2D5mwCPVow== - -mdn-data@2.0.4: - version "2.0.4" - resolved "http://localhost:4873/mdn-data/-/mdn-data-2.0.4.tgz#699b3c38ac6f1d728091a64650b65d388502fd5b" - integrity sha512-iV3XNKw06j5Q7mi6h+9vbx23Tv7JkjEVgKHW4pimwyDGWm0OIQntJJ+u1C6mg6mK1EaTv42XQ7w76yuzH7M2cA== - -media-typer@0.3.0: - version "0.3.0" - resolved "http://localhost:4873/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" - integrity sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ== - -memfs@^3.1.2, memfs@^3.4.3: - version "3.4.7" - resolved "http://localhost:4873/memfs/-/memfs-3.4.7.tgz#e5252ad2242a724f938cb937e3c4f7ceb1f70e5a" - integrity sha512-ygaiUSNalBX85388uskeCyhSAoOSgzBbtVCr9jA2RROssFL9Q19/ZXFqS+2Th2sr1ewNIWgFdLzLC3Yl1Zv+lw== - dependencies: - fs-monkey "^1.0.3" - -merge-descriptors@1.0.1: - version "1.0.1" - resolved "http://localhost:4873/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" - integrity sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w== - -merge-stream@^2.0.0: - version "2.0.0" - resolved "http://localhost:4873/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60" - integrity sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w== - -merge2@^1.3.0, merge2@^1.4.1: - version "1.4.1" - resolved "http://localhost:4873/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" - integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== - -methods@~1.1.2: - version "1.1.2" - resolved "http://localhost:4873/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" - integrity sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w== - -micromatch@^4.0.2, micromatch@^4.0.4, micromatch@^4.0.5: - version "4.0.5" - resolved "http://localhost:4873/micromatch/-/micromatch-4.0.5.tgz#bc8999a7cbbf77cdc89f132f6e467051b49090c6" - integrity sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA== - dependencies: - braces "^3.0.2" - picomatch "^2.3.1" - -mime-db@1.52.0, "mime-db@>= 1.43.0 < 2": - version "1.52.0" - resolved "http://localhost:4873/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70" - integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== - -mime-types@^2.1.12, mime-types@^2.1.27, mime-types@^2.1.31, mime-types@~2.1.17, mime-types@~2.1.24, mime-types@~2.1.34: - version "2.1.35" - resolved "http://localhost:4873/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" - integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== - dependencies: - mime-db "1.52.0" - -mime@1.6.0: - version "1.6.0" - resolved "http://localhost:4873/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" - integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== - -mimic-fn@^2.1.0: - version "2.1.0" - resolved "http://localhost:4873/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" - integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== - -min-indent@^1.0.0: - version "1.0.1" - resolved "http://localhost:4873/min-indent/-/min-indent-1.0.1.tgz#a63f681673b30571fbe8bc25686ae746eefa9869" - integrity sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg== - -mini-css-extract-plugin@^2.4.5: - version "2.6.1" - resolved "http://localhost:4873/mini-css-extract-plugin/-/mini-css-extract-plugin-2.6.1.tgz#9a1251d15f2035c342d99a468ab9da7a0451b71e" - integrity sha512-wd+SD57/K6DiV7jIR34P+s3uckTRuQvx0tKPcvjFlrEylk6P4mQ2KSWk1hblj1Kxaqok7LogKOieygXqBczNlg== - dependencies: - schema-utils "^4.0.0" - -minimalistic-assert@^1.0.0: - version "1.0.1" - resolved "http://localhost:4873/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7" - integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== - -minimatch@3.0.4: - version "3.0.4" - resolved "http://localhost:4873/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" - integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== - dependencies: - brace-expansion "^1.1.7" - -minimatch@^3.0.4, minimatch@^3.1.1, minimatch@^3.1.2: - version "3.1.2" - resolved "http://localhost:4873/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" - integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== - dependencies: - brace-expansion "^1.1.7" - -minimatch@^5.0.1: - version "5.1.0" - resolved "http://localhost:4873/minimatch/-/minimatch-5.1.0.tgz#1717b464f4971b144f6aabe8f2d0b8e4511e09c7" - integrity sha512-9TPBGGak4nHfGZsPBohm9AWg6NoT7QTCehS3BIJABslyZbzxfV78QM2Y6+i741OPZIafFAaiiEMh5OyIrJPgtg== - dependencies: - brace-expansion "^2.0.1" - -minimist@^1.2.0, minimist@^1.2.6: - version "1.2.6" - resolved "http://localhost:4873/minimist/-/minimist-1.2.6.tgz#8637a5b759ea0d6e98702cfb3a9283323c93af44" - integrity sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q== - -mkdirp@~0.5.1: - version "0.5.6" - resolved "http://localhost:4873/mkdirp/-/mkdirp-0.5.6.tgz#7def03d2432dcae4ba1d611445c48396062255f6" - integrity sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw== - dependencies: - minimist "^1.2.6" - -ms@2.0.0: - version "2.0.0" - resolved "http://localhost:4873/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" - integrity sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A== - -ms@2.1.2: - version "2.1.2" - resolved "http://localhost:4873/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" - integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== - -ms@2.1.3, ms@^2.1.1: - version "2.1.3" - resolved "http://localhost:4873/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" - integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== - -multicast-dns@^7.2.5: - version "7.2.5" - resolved "http://localhost:4873/multicast-dns/-/multicast-dns-7.2.5.tgz#77eb46057f4d7adbd16d9290fa7299f6fa64cced" - integrity sha512-2eznPJP8z2BFLX50tf0LuODrpINqP1RVIm/CObbTcBRITQgmC/TjcREF1NeTBzIcR5XO/ukWo+YHOjBbFwIupg== - dependencies: - dns-packet "^5.2.2" - thunky "^1.0.2" - -nanoid@^3.3.4: - version "3.3.4" - resolved "http://localhost:4873/nanoid/-/nanoid-3.3.4.tgz#730b67e3cd09e2deacf03c027c81c9d9dbc5e8ab" - integrity sha512-MqBkQh/OHTS2egovRtLk45wEyNXwF+cokD+1YPf9u5VfJiRdAiRwB2froX5Co9Rh20xs4siNPm8naNotSD6RBw== - -natural-compare@^1.4.0: - version "1.4.0" - resolved "http://localhost:4873/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" - integrity sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw== - -negotiator@0.6.3: - version "0.6.3" - resolved "http://localhost:4873/negotiator/-/negotiator-0.6.3.tgz#58e323a72fedc0d6f9cd4d31fe49f51479590ccd" - integrity sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg== - -neo-async@^2.6.2: - version "2.6.2" - resolved "http://localhost:4873/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f" - integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw== - -no-case@^3.0.4: - version "3.0.4" - resolved "http://localhost:4873/no-case/-/no-case-3.0.4.tgz#d361fd5c9800f558551a8369fc0dcd4662b6124d" - integrity sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg== - dependencies: - lower-case "^2.0.2" - tslib "^2.0.3" - -node-forge@^1: - version "1.3.1" - resolved "http://localhost:4873/node-forge/-/node-forge-1.3.1.tgz#be8da2af243b2417d5f646a770663a92b7e9ded3" - integrity sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA== - -node-int64@^0.4.0: - version "0.4.0" - resolved "http://localhost:4873/node-int64/-/node-int64-0.4.0.tgz#87a9065cdb355d3182d8f94ce11188b825c68a3b" - integrity sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw== - -node-releases@^2.0.6: - version "2.0.6" - resolved "http://localhost:4873/node-releases/-/node-releases-2.0.6.tgz#8a7088c63a55e493845683ebf3c828d8c51c5503" - integrity sha512-PiVXnNuFm5+iYkLBNeq5211hvO38y63T0i2KKh2KnUs3RpzJ+JtODFjkD8yjLwnDkTYF1eKXheUwdssR+NRZdg== - -normalize-path@^3.0.0, normalize-path@~3.0.0: - version "3.0.0" - resolved "http://localhost:4873/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" - integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== - -normalize-range@^0.1.2: - version "0.1.2" - resolved "http://localhost:4873/normalize-range/-/normalize-range-0.1.2.tgz#2d10c06bdfd312ea9777695a4d28439456b75942" - integrity sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA== - -normalize-url@^6.0.1: - version "6.1.0" - resolved "http://localhost:4873/normalize-url/-/normalize-url-6.1.0.tgz#40d0885b535deffe3f3147bec877d05fe4c5668a" - integrity sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A== - -npm-run-path@^4.0.1: - version "4.0.1" - resolved "http://localhost:4873/npm-run-path/-/npm-run-path-4.0.1.tgz#b7ecd1e5ed53da8e37a55e1c2269e0b97ed748ea" - integrity sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw== - dependencies: - path-key "^3.0.0" - -nth-check@^1.0.2: - version "1.0.2" - resolved "http://localhost:4873/nth-check/-/nth-check-1.0.2.tgz#b2bd295c37e3dd58a3bf0700376663ba4d9cf05c" - integrity sha512-WeBOdju8SnzPN5vTUJYxYUxLeXpCaVP5i5e0LF8fg7WORF2Wd7wFX/pk0tYZk7s8T+J7VLy0Da6J1+wCT0AtHg== - dependencies: - boolbase "~1.0.0" - -nth-check@^2.0.1: - version "2.1.1" - resolved "http://localhost:4873/nth-check/-/nth-check-2.1.1.tgz#c9eab428effce36cd6b92c924bdb000ef1f1ed1d" - integrity sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w== - dependencies: - boolbase "^1.0.0" - -nwsapi@^2.2.0: - version "2.2.2" - resolved "http://localhost:4873/nwsapi/-/nwsapi-2.2.2.tgz#e5418863e7905df67d51ec95938d67bf801f0bb0" - integrity sha512-90yv+6538zuvUMnN+zCr8LuV6bPFdq50304114vJYJ8RDyK8D5O9Phpbd6SZWgI7PwzmmfN1upeOJlvybDSgCw== - -object-assign@^4.1.1: - version "4.1.1" - resolved "http://localhost:4873/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" - integrity sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg== - -object-hash@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/object-hash/-/object-hash-3.0.0.tgz#73f97f753e7baffc0e2cc9d6e079079744ac82e9" - integrity sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw== - -object-inspect@^1.12.2, object-inspect@^1.9.0: - version "1.12.2" - resolved "http://localhost:4873/object-inspect/-/object-inspect-1.12.2.tgz#c0641f26394532f28ab8d796ab954e43c009a8ea" - integrity sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ== - -object-keys@^1.1.1: - version "1.1.1" - resolved "http://localhost:4873/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" - integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== - -object.assign@^4.1.0, object.assign@^4.1.3, object.assign@^4.1.4: - version "4.1.4" - resolved "http://localhost:4873/object.assign/-/object.assign-4.1.4.tgz#9673c7c7c351ab8c4d0b516f4343ebf4dfb7799f" - integrity sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.4" - has-symbols "^1.0.3" - object-keys "^1.1.1" - -object.entries@^1.1.5: - version "1.1.5" - resolved "http://localhost:4873/object.entries/-/object.entries-1.1.5.tgz#e1acdd17c4de2cd96d5a08487cfb9db84d881861" - integrity sha512-TyxmjUoZggd4OrrU1W66FMDG6CuqJxsFvymeyXI51+vQLN67zYfZseptRge703kKQdo4uccgAKebXFcRCzk4+g== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.3" - es-abstract "^1.19.1" - -object.fromentries@^2.0.5: - version "2.0.5" - resolved "http://localhost:4873/object.fromentries/-/object.fromentries-2.0.5.tgz#7b37b205109c21e741e605727fe8b0ad5fa08251" - integrity sha512-CAyG5mWQRRiBU57Re4FKoTBjXfDoNwdFVH2Y1tS9PqCsfUTymAohOkEMSG3aRNKmv4lV3O7p1et7c187q6bynw== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.3" - es-abstract "^1.19.1" - -object.getownpropertydescriptors@^2.1.0: - version "2.1.4" - resolved "http://localhost:4873/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.4.tgz#7965e6437a57278b587383831a9b829455a4bc37" - integrity sha512-sccv3L/pMModT6dJAYF3fzGMVcb38ysQ0tEE6ixv2yXJDtEIPph268OlAdJj5/qZMZDq2g/jqvwppt36uS/uQQ== - dependencies: - array.prototype.reduce "^1.0.4" - call-bind "^1.0.2" - define-properties "^1.1.4" - es-abstract "^1.20.1" - -object.hasown@^1.1.1: - version "1.1.1" - resolved "http://localhost:4873/object.hasown/-/object.hasown-1.1.1.tgz#ad1eecc60d03f49460600430d97f23882cf592a3" - integrity sha512-LYLe4tivNQzq4JdaWW6WO3HMZZJWzkkH8fnI6EebWl0VZth2wL2Lovm74ep2/gZzlaTdV62JZHEqHQ2yVn8Q/A== - dependencies: - define-properties "^1.1.4" - es-abstract "^1.19.5" - -object.values@^1.1.0, object.values@^1.1.5: - version "1.1.5" - resolved "http://localhost:4873/object.values/-/object.values-1.1.5.tgz#959f63e3ce9ef108720333082131e4a459b716ac" - integrity sha512-QUZRW0ilQ3PnPpbNtgdNV1PDbEqLIiSFB3l+EnGtBQ/8SUTLj1PZwtQHABZtLgwpJZTSZhuGLOGk57Drx2IvYg== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.3" - es-abstract "^1.19.1" - -obuf@^1.0.0, obuf@^1.1.2: - version "1.1.2" - resolved "http://localhost:4873/obuf/-/obuf-1.1.2.tgz#09bea3343d41859ebd446292d11c9d4db619084e" - integrity sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg== - -on-finished@2.4.1: - version "2.4.1" - resolved "http://localhost:4873/on-finished/-/on-finished-2.4.1.tgz#58c8c44116e54845ad57f14ab10b03533184ac3f" - integrity sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg== - dependencies: - ee-first "1.1.1" - -on-headers@~1.0.2: - version "1.0.2" - resolved "http://localhost:4873/on-headers/-/on-headers-1.0.2.tgz#772b0ae6aaa525c399e489adfad90c403eb3c28f" - integrity sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA== - -once@^1.3.0: - version "1.4.0" - resolved "http://localhost:4873/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" - integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w== - dependencies: - wrappy "1" - -onetime@^5.1.2: - version "5.1.2" - resolved "http://localhost:4873/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e" - integrity sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg== - dependencies: - mimic-fn "^2.1.0" - -open@^8.0.9, open@^8.4.0: - version "8.4.0" - resolved "http://localhost:4873/open/-/open-8.4.0.tgz#345321ae18f8138f82565a910fdc6b39e8c244f8" - integrity sha512-XgFPPM+B28FtCCgSb9I+s9szOC1vZRSwgWsRUA5ylIxRTgKozqjOCrVOqGsYABPYK5qnfqClxZTFBa8PKt2v6Q== - dependencies: - define-lazy-prop "^2.0.0" - is-docker "^2.1.1" - is-wsl "^2.2.0" - -optionator@^0.8.1: - version "0.8.3" - resolved "http://localhost:4873/optionator/-/optionator-0.8.3.tgz#84fa1d036fe9d3c7e21d99884b601167ec8fb495" - integrity sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA== - dependencies: - deep-is "~0.1.3" - fast-levenshtein "~2.0.6" - levn "~0.3.0" - prelude-ls "~1.1.2" - type-check "~0.3.2" - word-wrap "~1.2.3" - -optionator@^0.9.1: - version "0.9.1" - resolved "http://localhost:4873/optionator/-/optionator-0.9.1.tgz#4f236a6373dae0566a6d43e1326674f50c291499" - integrity sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw== - dependencies: - deep-is "^0.1.3" - fast-levenshtein "^2.0.6" - levn "^0.4.1" - prelude-ls "^1.2.1" - type-check "^0.4.0" - word-wrap "^1.2.3" - -p-limit@^2.0.0, p-limit@^2.2.0: - version "2.3.0" - resolved "http://localhost:4873/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1" - integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w== - dependencies: - p-try "^2.0.0" - -p-limit@^3.0.2: - version "3.1.0" - resolved "http://localhost:4873/p-limit/-/p-limit-3.1.0.tgz#e1daccbe78d0d1388ca18c64fea38e3e57e3706b" - integrity sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ== - dependencies: - yocto-queue "^0.1.0" - -p-locate@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/p-locate/-/p-locate-3.0.0.tgz#322d69a05c0264b25997d9f40cd8a891ab0064a4" - integrity sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ== - dependencies: - p-limit "^2.0.0" - -p-locate@^4.1.0: - version "4.1.0" - resolved "http://localhost:4873/p-locate/-/p-locate-4.1.0.tgz#a3428bb7088b3a60292f66919278b7c297ad4f07" - integrity sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A== - dependencies: - p-limit "^2.2.0" - -p-locate@^5.0.0: - version "5.0.0" - resolved "http://localhost:4873/p-locate/-/p-locate-5.0.0.tgz#83c8315c6785005e3bd021839411c9e110e6d834" - integrity sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw== - dependencies: - p-limit "^3.0.2" - -p-retry@^4.5.0: - version "4.6.2" - resolved "http://localhost:4873/p-retry/-/p-retry-4.6.2.tgz#9baae7184057edd4e17231cee04264106e092a16" - integrity sha512-312Id396EbJdvRONlngUx0NydfrIQ5lsYu0znKVUzVvArzEIt08V1qhtyESbGVd1FGX7UKtiFp5uwKZdM8wIuQ== - dependencies: - "@types/retry" "0.12.0" - retry "^0.13.1" - -p-try@^2.0.0: - version "2.2.0" - resolved "http://localhost:4873/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" - integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== - -param-case@^3.0.4: - version "3.0.4" - resolved "http://localhost:4873/param-case/-/param-case-3.0.4.tgz#7d17fe4aa12bde34d4a77d91acfb6219caad01c5" - integrity sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A== - dependencies: - dot-case "^3.0.4" - tslib "^2.0.3" - -parent-module@^1.0.0: - version "1.0.1" - resolved "http://localhost:4873/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2" - integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g== - dependencies: - callsites "^3.0.0" - -parse-json@^5.0.0, parse-json@^5.2.0: - version "5.2.0" - resolved "http://localhost:4873/parse-json/-/parse-json-5.2.0.tgz#c76fc66dee54231c962b22bcc8a72cf2f99753cd" - integrity sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg== - dependencies: - "@babel/code-frame" "^7.0.0" - error-ex "^1.3.1" - json-parse-even-better-errors "^2.3.0" - lines-and-columns "^1.1.6" - -parse5@6.0.1: - version "6.0.1" - resolved "http://localhost:4873/parse5/-/parse5-6.0.1.tgz#e1a1c085c569b3dc08321184f19a39cc27f7c30b" - integrity sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw== - -parseurl@~1.3.2, parseurl@~1.3.3: - version "1.3.3" - resolved "http://localhost:4873/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4" - integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ== - -pascal-case@^3.1.2: - version "3.1.2" - resolved "http://localhost:4873/pascal-case/-/pascal-case-3.1.2.tgz#b48e0ef2b98e205e7c1dae747d0b1508237660eb" - integrity sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g== - dependencies: - no-case "^3.0.4" - tslib "^2.0.3" - -path-exists@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" - integrity sha512-bpC7GYwiDYQ4wYLe+FA8lhRjhQCMcQGuSgGGqDkg/QerRWw9CmGRT0iSOVRSZJ29NMLZgIzqaljJ63oaL4NIJQ== - -path-exists@^4.0.0: - version "4.0.0" - resolved "http://localhost:4873/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3" - integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w== - -path-is-absolute@^1.0.0: - version "1.0.1" - resolved "http://localhost:4873/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" - integrity sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg== - -path-key@^3.0.0, path-key@^3.1.0: - version "3.1.1" - resolved "http://localhost:4873/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" - integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== - -path-parse@^1.0.7: - version "1.0.7" - resolved "http://localhost:4873/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" - integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== - -path-to-regexp@0.1.7: - version "0.1.7" - resolved "http://localhost:4873/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c" - integrity sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ== - -path-type@^4.0.0: - version "4.0.0" - resolved "http://localhost:4873/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b" - integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw== - -performance-now@^2.1.0: - version "2.1.0" - resolved "http://localhost:4873/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" - integrity sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow== - -picocolors@^0.2.1: - version "0.2.1" - resolved "http://localhost:4873/picocolors/-/picocolors-0.2.1.tgz#570670f793646851d1ba135996962abad587859f" - integrity sha512-cMlDqaLEqfSaW8Z7N5Jw+lyIW869EzT73/F5lhtY9cLGoVxSXznfgfXMO0Z5K0o0Q2TkTXq+0KFsdnSe3jDViA== - -picocolors@^1.0.0: - version "1.0.0" - resolved "http://localhost:4873/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" - integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== - -picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.2.2, picomatch@^2.2.3, picomatch@^2.3.1: - version "2.3.1" - resolved "http://localhost:4873/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" - integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== - -pify@^2.3.0: - version "2.3.0" - resolved "http://localhost:4873/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" - integrity sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog== - -pirates@^4.0.4: - version "4.0.5" - resolved "http://localhost:4873/pirates/-/pirates-4.0.5.tgz#feec352ea5c3268fb23a37c702ab1699f35a5f3b" - integrity sha512-8V9+HQPupnaXMA23c5hvl69zXvTwTzyAYasnkb0Tts4XvO4CliqONMOnvlq26rkhLC3nWDFBJf73LU1e1VZLaQ== - -pkg-dir@^4.1.0, pkg-dir@^4.2.0: - version "4.2.0" - resolved "http://localhost:4873/pkg-dir/-/pkg-dir-4.2.0.tgz#f099133df7ede422e81d1d8448270eeb3e4261f3" - integrity sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ== - dependencies: - find-up "^4.0.0" - -pkg-up@^3.1.0: - version "3.1.0" - resolved "http://localhost:4873/pkg-up/-/pkg-up-3.1.0.tgz#100ec235cc150e4fd42519412596a28512a0def5" - integrity sha512-nDywThFk1i4BQK4twPQ6TA4RT8bDY96yeuCVBWL3ePARCiEKDRSrNGbFIgUJpLp+XeIR65v8ra7WuJOFUBtkMA== - dependencies: - find-up "^3.0.0" - -postcss-attribute-case-insensitive@^5.0.2: - version "5.0.2" - resolved "http://localhost:4873/postcss-attribute-case-insensitive/-/postcss-attribute-case-insensitive-5.0.2.tgz#03d761b24afc04c09e757e92ff53716ae8ea2741" - integrity sha512-XIidXV8fDr0kKt28vqki84fRK8VW8eTuIa4PChv2MqKuT6C9UjmSKzen6KaWhWEoYvwxFCa7n/tC1SZ3tyq4SQ== - dependencies: - postcss-selector-parser "^6.0.10" - -postcss-browser-comments@^4: - version "4.0.0" - resolved "http://localhost:4873/postcss-browser-comments/-/postcss-browser-comments-4.0.0.tgz#bcfc86134df5807f5d3c0eefa191d42136b5e72a" - integrity sha512-X9X9/WN3KIvY9+hNERUqX9gncsgBA25XaeR+jshHz2j8+sYyHktHw1JdKuMjeLpGktXidqDhA7b/qm1mrBDmgg== - -postcss-calc@^8.2.3: - version "8.2.4" - resolved "http://localhost:4873/postcss-calc/-/postcss-calc-8.2.4.tgz#77b9c29bfcbe8a07ff6693dc87050828889739a5" - integrity sha512-SmWMSJmB8MRnnULldx0lQIyhSNvuDl9HfrZkaqqE/WHAhToYsAvDq+yAsA/kIyINDszOp3Rh0GFoNuH5Ypsm3Q== - dependencies: - postcss-selector-parser "^6.0.9" - postcss-value-parser "^4.2.0" - -postcss-clamp@^4.1.0: - version "4.1.0" - resolved "http://localhost:4873/postcss-clamp/-/postcss-clamp-4.1.0.tgz#7263e95abadd8c2ba1bd911b0b5a5c9c93e02363" - integrity sha512-ry4b1Llo/9zz+PKC+030KUnPITTJAHeOwjfAyyB60eT0AorGLdzp52s31OsPRHRf8NchkgFoG2y6fCfn1IV1Ow== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-color-functional-notation@^4.2.4: - version "4.2.4" - resolved "http://localhost:4873/postcss-color-functional-notation/-/postcss-color-functional-notation-4.2.4.tgz#21a909e8d7454d3612d1659e471ce4696f28caec" - integrity sha512-2yrTAUZUab9s6CpxkxC4rVgFEVaR6/2Pipvi6qcgvnYiVqZcbDHEoBDhrXzyb7Efh2CCfHQNtcqWcIruDTIUeg== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-color-hex-alpha@^8.0.4: - version "8.0.4" - resolved "http://localhost:4873/postcss-color-hex-alpha/-/postcss-color-hex-alpha-8.0.4.tgz#c66e2980f2fbc1a63f5b079663340ce8b55f25a5" - integrity sha512-nLo2DCRC9eE4w2JmuKgVA3fGL3d01kGq752pVALF68qpGLmx2Qrk91QTKkdUqqp45T1K1XV8IhQpcu1hoAQflQ== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-color-rebeccapurple@^7.1.1: - version "7.1.1" - resolved "http://localhost:4873/postcss-color-rebeccapurple/-/postcss-color-rebeccapurple-7.1.1.tgz#63fdab91d878ebc4dd4b7c02619a0c3d6a56ced0" - integrity sha512-pGxkuVEInwLHgkNxUc4sdg4g3py7zUeCQ9sMfwyHAT+Ezk8a4OaaVZ8lIY5+oNqA/BXXgLyXv0+5wHP68R79hg== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-colormin@^5.3.0: - version "5.3.0" - resolved "http://localhost:4873/postcss-colormin/-/postcss-colormin-5.3.0.tgz#3cee9e5ca62b2c27e84fce63affc0cfb5901956a" - integrity sha512-WdDO4gOFG2Z8n4P8TWBpshnL3JpmNmJwdnfP2gbk2qBA8PWwOYcmjmI/t3CmMeL72a7Hkd+x/Mg9O2/0rD54Pg== - dependencies: - browserslist "^4.16.6" - caniuse-api "^3.0.0" - colord "^2.9.1" - postcss-value-parser "^4.2.0" - -postcss-convert-values@^5.1.2: - version "5.1.2" - resolved "http://localhost:4873/postcss-convert-values/-/postcss-convert-values-5.1.2.tgz#31586df4e184c2e8890e8b34a0b9355313f503ab" - integrity sha512-c6Hzc4GAv95B7suy4udszX9Zy4ETyMCgFPUDtWjdFTKH1SE9eFY/jEpHSwTH1QPuwxHpWslhckUQWbNRM4ho5g== - dependencies: - browserslist "^4.20.3" - postcss-value-parser "^4.2.0" - -postcss-custom-media@^8.0.2: - version "8.0.2" - resolved "http://localhost:4873/postcss-custom-media/-/postcss-custom-media-8.0.2.tgz#c8f9637edf45fef761b014c024cee013f80529ea" - integrity sha512-7yi25vDAoHAkbhAzX9dHx2yc6ntS4jQvejrNcC+csQJAXjj15e7VcWfMgLqBNAbOvqi5uIa9huOVwdHbf+sKqg== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-custom-properties@^12.1.9: - version "12.1.9" - resolved "http://localhost:4873/postcss-custom-properties/-/postcss-custom-properties-12.1.9.tgz#0883429a7ef99f1ba239d1fea29ce84906daa8bd" - integrity sha512-/E7PRvK8DAVljBbeWrcEQJPG72jaImxF3vvCNFwv9cC8CzigVoNIpeyfnJzphnN3Fd8/auBf5wvkw6W9MfmTyg== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-custom-selectors@^6.0.3: - version "6.0.3" - resolved "http://localhost:4873/postcss-custom-selectors/-/postcss-custom-selectors-6.0.3.tgz#1ab4684d65f30fed175520f82d223db0337239d9" - integrity sha512-fgVkmyiWDwmD3JbpCmB45SvvlCD6z9CG6Ie6Iere22W5aHea6oWa7EM2bpnv2Fj3I94L3VbtvX9KqwSi5aFzSg== - dependencies: - postcss-selector-parser "^6.0.4" - -postcss-dir-pseudo-class@^6.0.5: - version "6.0.5" - resolved "http://localhost:4873/postcss-dir-pseudo-class/-/postcss-dir-pseudo-class-6.0.5.tgz#2bf31de5de76added44e0a25ecf60ae9f7c7c26c" - integrity sha512-eqn4m70P031PF7ZQIvSgy9RSJ5uI2171O/OO/zcRNYpJbvaeKFUlar1aJ7rmgiQtbm0FSPsRewjpdS0Oew7MPA== - dependencies: - postcss-selector-parser "^6.0.10" - -postcss-discard-comments@^5.1.2: - version "5.1.2" - resolved "http://localhost:4873/postcss-discard-comments/-/postcss-discard-comments-5.1.2.tgz#8df5e81d2925af2780075840c1526f0660e53696" - integrity sha512-+L8208OVbHVF2UQf1iDmRcbdjJkuBF6IS29yBDSiWUIzpYaAhtNl6JYnYm12FnkeCwQqF5LeklOu6rAqgfBZqQ== - -postcss-discard-duplicates@^5.1.0: - version "5.1.0" - resolved "http://localhost:4873/postcss-discard-duplicates/-/postcss-discard-duplicates-5.1.0.tgz#9eb4fe8456706a4eebd6d3b7b777d07bad03e848" - integrity sha512-zmX3IoSI2aoenxHV6C7plngHWWhUOV3sP1T8y2ifzxzbtnuhk1EdPwm0S1bIUNaJ2eNbWeGLEwzw8huPD67aQw== - -postcss-discard-empty@^5.1.1: - version "5.1.1" - resolved "http://localhost:4873/postcss-discard-empty/-/postcss-discard-empty-5.1.1.tgz#e57762343ff7f503fe53fca553d18d7f0c369c6c" - integrity sha512-zPz4WljiSuLWsI0ir4Mcnr4qQQ5e1Ukc3i7UfE2XcrwKK2LIPIqE5jxMRxO6GbI3cv//ztXDsXwEWT3BHOGh3A== - -postcss-discard-overridden@^5.1.0: - version "5.1.0" - resolved "http://localhost:4873/postcss-discard-overridden/-/postcss-discard-overridden-5.1.0.tgz#7e8c5b53325747e9d90131bb88635282fb4a276e" - integrity sha512-21nOL7RqWR1kasIVdKs8HNqQJhFxLsyRfAnUDm4Fe4t4mCWL9OJiHvlHPjcd8zc5Myu89b/7wZDnOSjFgeWRtw== - -postcss-double-position-gradients@^3.1.2: - version "3.1.2" - resolved "http://localhost:4873/postcss-double-position-gradients/-/postcss-double-position-gradients-3.1.2.tgz#b96318fdb477be95997e86edd29c6e3557a49b91" - integrity sha512-GX+FuE/uBR6eskOK+4vkXgT6pDkexLokPaz/AbJna9s5Kzp/yl488pKPjhy0obB475ovfT1Wv8ho7U/cHNaRgQ== - dependencies: - "@csstools/postcss-progressive-custom-properties" "^1.1.0" - postcss-value-parser "^4.2.0" - -postcss-env-function@^4.0.6: - version "4.0.6" - resolved "http://localhost:4873/postcss-env-function/-/postcss-env-function-4.0.6.tgz#7b2d24c812f540ed6eda4c81f6090416722a8e7a" - integrity sha512-kpA6FsLra+NqcFnL81TnsU+Z7orGtDTxcOhl6pwXeEq1yFPpRMkCDpHhrz8CFQDr/Wfm0jLiNQ1OsGGPjlqPwA== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-flexbugs-fixes@^5.0.2: - version "5.0.2" - resolved "http://localhost:4873/postcss-flexbugs-fixes/-/postcss-flexbugs-fixes-5.0.2.tgz#2028e145313074fc9abe276cb7ca14e5401eb49d" - integrity sha512-18f9voByak7bTktR2QgDveglpn9DTbBWPUzSOe9g0N4WR/2eSt6Vrcbf0hmspvMI6YWGywz6B9f7jzpFNJJgnQ== - -postcss-focus-visible@^6.0.4: - version "6.0.4" - resolved "http://localhost:4873/postcss-focus-visible/-/postcss-focus-visible-6.0.4.tgz#50c9ea9afa0ee657fb75635fabad25e18d76bf9e" - integrity sha512-QcKuUU/dgNsstIK6HELFRT5Y3lbrMLEOwG+A4s5cA+fx3A3y/JTq3X9LaOj3OC3ALH0XqyrgQIgey/MIZ8Wczw== - dependencies: - postcss-selector-parser "^6.0.9" - -postcss-focus-within@^5.0.4: - version "5.0.4" - resolved "http://localhost:4873/postcss-focus-within/-/postcss-focus-within-5.0.4.tgz#5b1d2ec603195f3344b716c0b75f61e44e8d2e20" - integrity sha512-vvjDN++C0mu8jz4af5d52CB184ogg/sSxAFS+oUJQq2SuCe7T5U2iIsVJtsCp2d6R4j0jr5+q3rPkBVZkXD9fQ== - dependencies: - postcss-selector-parser "^6.0.9" - -postcss-font-variant@^5.0.0: - version "5.0.0" - resolved "http://localhost:4873/postcss-font-variant/-/postcss-font-variant-5.0.0.tgz#efd59b4b7ea8bb06127f2d031bfbb7f24d32fa66" - integrity sha512-1fmkBaCALD72CK2a9i468mA/+tr9/1cBxRRMXOUaZqO43oWPR5imcyPjXwuv7PXbCid4ndlP5zWhidQVVa3hmA== - -postcss-gap-properties@^3.0.5: - version "3.0.5" - resolved "http://localhost:4873/postcss-gap-properties/-/postcss-gap-properties-3.0.5.tgz#f7e3cddcf73ee19e94ccf7cb77773f9560aa2fff" - integrity sha512-IuE6gKSdoUNcvkGIqdtjtcMtZIFyXZhmFd5RUlg97iVEvp1BZKV5ngsAjCjrVy+14uhGBQl9tzmi1Qwq4kqVOg== - -postcss-image-set-function@^4.0.7: - version "4.0.7" - resolved "http://localhost:4873/postcss-image-set-function/-/postcss-image-set-function-4.0.7.tgz#08353bd756f1cbfb3b6e93182c7829879114481f" - integrity sha512-9T2r9rsvYzm5ndsBE8WgtrMlIT7VbtTfE7b3BQnudUqnBcBo7L758oc+o+pdj/dUV0l5wjwSdjeOH2DZtfv8qw== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-import@^14.1.0: - version "14.1.0" - resolved "http://localhost:4873/postcss-import/-/postcss-import-14.1.0.tgz#a7333ffe32f0b8795303ee9e40215dac922781f0" - integrity sha512-flwI+Vgm4SElObFVPpTIT7SU7R3qk2L7PyduMcokiaVKuWv9d/U+Gm/QAd8NDLuykTWTkcrjOeD2Pp1rMeBTGw== - dependencies: - postcss-value-parser "^4.0.0" - read-cache "^1.0.0" - resolve "^1.1.7" - -postcss-initial@^4.0.1: - version "4.0.1" - resolved "http://localhost:4873/postcss-initial/-/postcss-initial-4.0.1.tgz#529f735f72c5724a0fb30527df6fb7ac54d7de42" - integrity sha512-0ueD7rPqX8Pn1xJIjay0AZeIuDoF+V+VvMt/uOnn+4ezUKhZM/NokDeP6DwMNyIoYByuN/94IQnt5FEkaN59xQ== - -postcss-js@^4.0.0: - version "4.0.0" - resolved "http://localhost:4873/postcss-js/-/postcss-js-4.0.0.tgz#31db79889531b80dc7bc9b0ad283e418dce0ac00" - integrity sha512-77QESFBwgX4irogGVPgQ5s07vLvFqWr228qZY+w6lW599cRlK/HmnlivnnVUxkjHnCu4J16PDMHcH+e+2HbvTQ== - dependencies: - camelcase-css "^2.0.1" - -postcss-lab-function@^4.2.1: - version "4.2.1" - resolved "http://localhost:4873/postcss-lab-function/-/postcss-lab-function-4.2.1.tgz#6fe4c015102ff7cd27d1bd5385582f67ebdbdc98" - integrity sha512-xuXll4isR03CrQsmxyz92LJB2xX9n+pZJ5jE9JgcnmsCammLyKdlzrBin+25dy6wIjfhJpKBAN80gsTlCgRk2w== - dependencies: - "@csstools/postcss-progressive-custom-properties" "^1.1.0" - postcss-value-parser "^4.2.0" - -postcss-load-config@^3.1.4: - version "3.1.4" - resolved "http://localhost:4873/postcss-load-config/-/postcss-load-config-3.1.4.tgz#1ab2571faf84bb078877e1d07905eabe9ebda855" - integrity sha512-6DiM4E7v4coTE4uzA8U//WhtPwyhiim3eyjEMFCnUpzbrkK9wJHgKDT2mR+HbtSrd/NubVaYTOpSpjUl8NQeRg== - dependencies: - lilconfig "^2.0.5" - yaml "^1.10.2" - -postcss-loader@^6.2.1: - version "6.2.1" - resolved "http://localhost:4873/postcss-loader/-/postcss-loader-6.2.1.tgz#0895f7346b1702103d30fdc66e4d494a93c008ef" - integrity sha512-WbbYpmAaKcux/P66bZ40bpWsBucjx/TTgVVzRZ9yUO8yQfVBlameJ0ZGVaPfH64hNSBh63a+ICP5nqOpBA0w+Q== - dependencies: - cosmiconfig "^7.0.0" - klona "^2.0.5" - semver "^7.3.5" - -postcss-logical@^5.0.4: - version "5.0.4" - resolved "http://localhost:4873/postcss-logical/-/postcss-logical-5.0.4.tgz#ec75b1ee54421acc04d5921576b7d8db6b0e6f73" - integrity sha512-RHXxplCeLh9VjinvMrZONq7im4wjWGlRJAqmAVLXyZaXwfDWP73/oq4NdIp+OZwhQUMj0zjqDfM5Fj7qby+B4g== - -postcss-media-minmax@^5.0.0: - version "5.0.0" - resolved "http://localhost:4873/postcss-media-minmax/-/postcss-media-minmax-5.0.0.tgz#7140bddec173e2d6d657edbd8554a55794e2a5b5" - integrity sha512-yDUvFf9QdFZTuCUg0g0uNSHVlJ5X1lSzDZjPSFaiCWvjgsvu8vEVxtahPrLMinIDEEGnx6cBe6iqdx5YWz08wQ== - -postcss-merge-longhand@^5.1.6: - version "5.1.6" - resolved "http://localhost:4873/postcss-merge-longhand/-/postcss-merge-longhand-5.1.6.tgz#f378a8a7e55766b7b644f48e5d8c789ed7ed51ce" - integrity sha512-6C/UGF/3T5OE2CEbOuX7iNO63dnvqhGZeUnKkDeifebY0XqkkvrctYSZurpNE902LDf2yKwwPFgotnfSoPhQiw== - dependencies: - postcss-value-parser "^4.2.0" - stylehacks "^5.1.0" - -postcss-merge-rules@^5.1.2: - version "5.1.2" - resolved "http://localhost:4873/postcss-merge-rules/-/postcss-merge-rules-5.1.2.tgz#7049a14d4211045412116d79b751def4484473a5" - integrity sha512-zKMUlnw+zYCWoPN6yhPjtcEdlJaMUZ0WyVcxTAmw3lkkN/NDMRkOkiuctQEoWAOvH7twaxUUdvBWl0d4+hifRQ== - dependencies: - browserslist "^4.16.6" - caniuse-api "^3.0.0" - cssnano-utils "^3.1.0" - postcss-selector-parser "^6.0.5" - -postcss-minify-font-values@^5.1.0: - version "5.1.0" - resolved "http://localhost:4873/postcss-minify-font-values/-/postcss-minify-font-values-5.1.0.tgz#f1df0014a726083d260d3bd85d7385fb89d1f01b" - integrity sha512-el3mYTgx13ZAPPirSVsHqFzl+BBBDrXvbySvPGFnQcTI4iNslrPaFq4muTkLZmKlGk4gyFAYUBMH30+HurREyA== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-minify-gradients@^5.1.1: - version "5.1.1" - resolved "http://localhost:4873/postcss-minify-gradients/-/postcss-minify-gradients-5.1.1.tgz#f1fe1b4f498134a5068240c2f25d46fcd236ba2c" - integrity sha512-VGvXMTpCEo4qHTNSa9A0a3D+dxGFZCYwR6Jokk+/3oB6flu2/PnPXAh2x7x52EkY5xlIHLm+Le8tJxe/7TNhzw== - dependencies: - colord "^2.9.1" - cssnano-utils "^3.1.0" - postcss-value-parser "^4.2.0" - -postcss-minify-params@^5.1.3: - version "5.1.3" - resolved "http://localhost:4873/postcss-minify-params/-/postcss-minify-params-5.1.3.tgz#ac41a6465be2db735099bbd1798d85079a6dc1f9" - integrity sha512-bkzpWcjykkqIujNL+EVEPOlLYi/eZ050oImVtHU7b4lFS82jPnsCb44gvC6pxaNt38Els3jWYDHTjHKf0koTgg== - dependencies: - browserslist "^4.16.6" - cssnano-utils "^3.1.0" - postcss-value-parser "^4.2.0" - -postcss-minify-selectors@^5.2.1: - version "5.2.1" - resolved "http://localhost:4873/postcss-minify-selectors/-/postcss-minify-selectors-5.2.1.tgz#d4e7e6b46147b8117ea9325a915a801d5fe656c6" - integrity sha512-nPJu7OjZJTsVUmPdm2TcaiohIwxP+v8ha9NehQ2ye9szv4orirRU3SDdtUmKH+10nzn0bAyOXZ0UEr7OpvLehg== - dependencies: - postcss-selector-parser "^6.0.5" - -postcss-modules-extract-imports@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/postcss-modules-extract-imports/-/postcss-modules-extract-imports-3.0.0.tgz#cda1f047c0ae80c97dbe28c3e76a43b88025741d" - integrity sha512-bdHleFnP3kZ4NYDhuGlVK+CMrQ/pqUm8bx/oGL93K6gVwiclvX5x0n76fYMKuIGKzlABOy13zsvqjb0f92TEXw== - -postcss-modules-local-by-default@^4.0.0: - version "4.0.0" - resolved "http://localhost:4873/postcss-modules-local-by-default/-/postcss-modules-local-by-default-4.0.0.tgz#ebbb54fae1598eecfdf691a02b3ff3b390a5a51c" - integrity sha512-sT7ihtmGSF9yhm6ggikHdV0hlziDTX7oFoXtuVWeDd3hHObNkcHRo9V3yg7vCAY7cONyxJC/XXCmmiHHcvX7bQ== - dependencies: - icss-utils "^5.0.0" - postcss-selector-parser "^6.0.2" - postcss-value-parser "^4.1.0" - -postcss-modules-scope@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/postcss-modules-scope/-/postcss-modules-scope-3.0.0.tgz#9ef3151456d3bbfa120ca44898dfca6f2fa01f06" - integrity sha512-hncihwFA2yPath8oZ15PZqvWGkWf+XUfQgUGamS4LqoP1anQLOsOJw0vr7J7IwLpoY9fatA2qiGUGmuZL0Iqlg== - dependencies: - postcss-selector-parser "^6.0.4" - -postcss-modules-values@^4.0.0: - version "4.0.0" - resolved "http://localhost:4873/postcss-modules-values/-/postcss-modules-values-4.0.0.tgz#d7c5e7e68c3bb3c9b27cbf48ca0bb3ffb4602c9c" - integrity sha512-RDxHkAiEGI78gS2ofyvCsu7iycRv7oqw5xMWn9iMoR0N/7mf9D50ecQqUo5BZ9Zh2vH4bCUR/ktCqbB9m8vJjQ== - dependencies: - icss-utils "^5.0.0" - -postcss-nested@5.0.6: - version "5.0.6" - resolved "http://localhost:4873/postcss-nested/-/postcss-nested-5.0.6.tgz#466343f7fc8d3d46af3e7dba3fcd47d052a945bc" - integrity sha512-rKqm2Fk0KbA8Vt3AdGN0FB9OBOMDVajMG6ZCf/GoHgdxUJ4sBFp0A/uMIRm+MJUdo33YXEtjqIz8u7DAp8B7DA== - dependencies: - postcss-selector-parser "^6.0.6" - -postcss-nesting@^10.2.0: - version "10.2.0" - resolved "http://localhost:4873/postcss-nesting/-/postcss-nesting-10.2.0.tgz#0b12ce0db8edfd2d8ae0aaf86427370b898890be" - integrity sha512-EwMkYchxiDiKUhlJGzWsD9b2zvq/r2SSubcRrgP+jujMXFzqvANLt16lJANC+5uZ6hjI7lpRmI6O8JIl+8l1KA== - dependencies: - "@csstools/selector-specificity" "^2.0.0" - postcss-selector-parser "^6.0.10" - -postcss-normalize-charset@^5.1.0: - version "5.1.0" - resolved "http://localhost:4873/postcss-normalize-charset/-/postcss-normalize-charset-5.1.0.tgz#9302de0b29094b52c259e9b2cf8dc0879879f0ed" - integrity sha512-mSgUJ+pd/ldRGVx26p2wz9dNZ7ji6Pn8VWBajMXFf8jk7vUoSrZ2lt/wZR7DtlZYKesmZI680qjr2CeFF2fbUg== - -postcss-normalize-display-values@^5.1.0: - version "5.1.0" - resolved "http://localhost:4873/postcss-normalize-display-values/-/postcss-normalize-display-values-5.1.0.tgz#72abbae58081960e9edd7200fcf21ab8325c3da8" - integrity sha512-WP4KIM4o2dazQXWmFaqMmcvsKmhdINFblgSeRgn8BJ6vxaMyaJkwAzpPpuvSIoG/rmX3M+IrRZEz2H0glrQNEA== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-normalize-positions@^5.1.1: - version "5.1.1" - resolved "http://localhost:4873/postcss-normalize-positions/-/postcss-normalize-positions-5.1.1.tgz#ef97279d894087b59325b45c47f1e863daefbb92" - integrity sha512-6UpCb0G4eofTCQLFVuI3EVNZzBNPiIKcA1AKVka+31fTVySphr3VUgAIULBhxZkKgwLImhzMR2Bw1ORK+37INg== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-normalize-repeat-style@^5.1.1: - version "5.1.1" - resolved "http://localhost:4873/postcss-normalize-repeat-style/-/postcss-normalize-repeat-style-5.1.1.tgz#e9eb96805204f4766df66fd09ed2e13545420fb2" - integrity sha512-mFpLspGWkQtBcWIRFLmewo8aC3ImN2i/J3v8YCFUwDnPu3Xz4rLohDO26lGjwNsQxB3YF0KKRwspGzE2JEuS0g== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-normalize-string@^5.1.0: - version "5.1.0" - resolved "http://localhost:4873/postcss-normalize-string/-/postcss-normalize-string-5.1.0.tgz#411961169e07308c82c1f8c55f3e8a337757e228" - integrity sha512-oYiIJOf4T9T1N4i+abeIc7Vgm/xPCGih4bZz5Nm0/ARVJ7K6xrDlLwvwqOydvyL3RHNf8qZk6vo3aatiw/go3w== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-normalize-timing-functions@^5.1.0: - version "5.1.0" - resolved "http://localhost:4873/postcss-normalize-timing-functions/-/postcss-normalize-timing-functions-5.1.0.tgz#d5614410f8f0b2388e9f240aa6011ba6f52dafbb" - integrity sha512-DOEkzJ4SAXv5xkHl0Wa9cZLF3WCBhF3o1SKVxKQAa+0pYKlueTpCgvkFAHfk+Y64ezX9+nITGrDZeVGgITJXjg== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-normalize-unicode@^5.1.0: - version "5.1.0" - resolved "http://localhost:4873/postcss-normalize-unicode/-/postcss-normalize-unicode-5.1.0.tgz#3d23aede35e160089a285e27bf715de11dc9db75" - integrity sha512-J6M3MizAAZ2dOdSjy2caayJLQT8E8K9XjLce8AUQMwOrCvjCHv24aLC/Lps1R1ylOfol5VIDMaM/Lo9NGlk1SQ== - dependencies: - browserslist "^4.16.6" - postcss-value-parser "^4.2.0" - -postcss-normalize-url@^5.1.0: - version "5.1.0" - resolved "http://localhost:4873/postcss-normalize-url/-/postcss-normalize-url-5.1.0.tgz#ed9d88ca82e21abef99f743457d3729a042adcdc" - integrity sha512-5upGeDO+PVthOxSmds43ZeMeZfKH+/DKgGRD7TElkkyS46JXAUhMzIKiCa7BabPeIy3AQcTkXwVVN7DbqsiCew== - dependencies: - normalize-url "^6.0.1" - postcss-value-parser "^4.2.0" - -postcss-normalize-whitespace@^5.1.1: - version "5.1.1" - resolved "http://localhost:4873/postcss-normalize-whitespace/-/postcss-normalize-whitespace-5.1.1.tgz#08a1a0d1ffa17a7cc6efe1e6c9da969cc4493cfa" - integrity sha512-83ZJ4t3NUDETIHTa3uEg6asWjSBYL5EdkVB0sDncx9ERzOKBVJIUeDO9RyA9Zwtig8El1d79HBp0JEi8wvGQnA== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-normalize@^10.0.1: - version "10.0.1" - resolved "http://localhost:4873/postcss-normalize/-/postcss-normalize-10.0.1.tgz#464692676b52792a06b06880a176279216540dd7" - integrity sha512-+5w18/rDev5mqERcG3W5GZNMJa1eoYYNGo8gB7tEwaos0ajk3ZXAI4mHGcNT47NE+ZnZD1pEpUOFLvltIwmeJA== - dependencies: - "@csstools/normalize.css" "*" - postcss-browser-comments "^4" - sanitize.css "*" - -postcss-opacity-percentage@^1.1.2: - version "1.1.2" - resolved "http://localhost:4873/postcss-opacity-percentage/-/postcss-opacity-percentage-1.1.2.tgz#bd698bb3670a0a27f6d657cc16744b3ebf3b1145" - integrity sha512-lyUfF7miG+yewZ8EAk9XUBIlrHyUE6fijnesuz+Mj5zrIHIEw6KcIZSOk/elVMqzLvREmXB83Zi/5QpNRYd47w== - -postcss-ordered-values@^5.1.3: - version "5.1.3" - resolved "http://localhost:4873/postcss-ordered-values/-/postcss-ordered-values-5.1.3.tgz#b6fd2bd10f937b23d86bc829c69e7732ce76ea38" - integrity sha512-9UO79VUhPwEkzbb3RNpqqghc6lcYej1aveQteWY+4POIwlqkYE21HKWaLDF6lWNuqCobEAyTovVhtI32Rbv2RQ== - dependencies: - cssnano-utils "^3.1.0" - postcss-value-parser "^4.2.0" - -postcss-overflow-shorthand@^3.0.4: - version "3.0.4" - resolved "http://localhost:4873/postcss-overflow-shorthand/-/postcss-overflow-shorthand-3.0.4.tgz#7ed6486fec44b76f0eab15aa4866cda5d55d893e" - integrity sha512-otYl/ylHK8Y9bcBnPLo3foYFLL6a6Ak+3EQBPOTR7luMYCOsiVTUk1iLvNf6tVPNGXcoL9Hoz37kpfriRIFb4A== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-page-break@^3.0.4: - version "3.0.4" - resolved "http://localhost:4873/postcss-page-break/-/postcss-page-break-3.0.4.tgz#7fbf741c233621622b68d435babfb70dd8c1ee5f" - integrity sha512-1JGu8oCjVXLa9q9rFTo4MbeeA5FMe00/9C7lN4va606Rdb+HkxXtXsmEDrIraQ11fGz/WvKWa8gMuCKkrXpTsQ== - -postcss-place@^7.0.5: - version "7.0.5" - resolved "http://localhost:4873/postcss-place/-/postcss-place-7.0.5.tgz#95dbf85fd9656a3a6e60e832b5809914236986c4" - integrity sha512-wR8igaZROA6Z4pv0d+bvVrvGY4GVHihBCBQieXFY3kuSuMyOmEnnfFzHl/tQuqHZkfkIVBEbDvYcFfHmpSet9g== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-preset-env@^7.0.1: - version "7.8.2" - resolved "http://localhost:4873/postcss-preset-env/-/postcss-preset-env-7.8.2.tgz#4c834d5cbd2e29df2abf59118947c456922b79ba" - integrity sha512-rSMUEaOCnovKnwc5LvBDHUDzpGP+nrUeWZGWt9M72fBvckCi45JmnJigUr4QG4zZeOHmOCNCZnd2LKDvP++ZuQ== - dependencies: - "@csstools/postcss-cascade-layers" "^1.1.0" - "@csstools/postcss-color-function" "^1.1.1" - "@csstools/postcss-font-format-keywords" "^1.0.1" - "@csstools/postcss-hwb-function" "^1.0.2" - "@csstools/postcss-ic-unit" "^1.0.1" - "@csstools/postcss-is-pseudo-class" "^2.0.7" - "@csstools/postcss-nested-calc" "^1.0.0" - "@csstools/postcss-normalize-display-values" "^1.0.1" - "@csstools/postcss-oklab-function" "^1.1.1" - "@csstools/postcss-progressive-custom-properties" "^1.3.0" - "@csstools/postcss-stepped-value-functions" "^1.0.1" - "@csstools/postcss-text-decoration-shorthand" "^1.0.0" - "@csstools/postcss-trigonometric-functions" "^1.0.2" - "@csstools/postcss-unset-value" "^1.0.2" - autoprefixer "^10.4.11" - browserslist "^4.21.3" - css-blank-pseudo "^3.0.3" - css-has-pseudo "^3.0.4" - css-prefers-color-scheme "^6.0.3" - cssdb "^7.0.1" - postcss-attribute-case-insensitive "^5.0.2" - postcss-clamp "^4.1.0" - postcss-color-functional-notation "^4.2.4" - postcss-color-hex-alpha "^8.0.4" - postcss-color-rebeccapurple "^7.1.1" - postcss-custom-media "^8.0.2" - postcss-custom-properties "^12.1.9" - postcss-custom-selectors "^6.0.3" - postcss-dir-pseudo-class "^6.0.5" - postcss-double-position-gradients "^3.1.2" - postcss-env-function "^4.0.6" - postcss-focus-visible "^6.0.4" - postcss-focus-within "^5.0.4" - postcss-font-variant "^5.0.0" - postcss-gap-properties "^3.0.5" - postcss-image-set-function "^4.0.7" - postcss-initial "^4.0.1" - postcss-lab-function "^4.2.1" - postcss-logical "^5.0.4" - postcss-media-minmax "^5.0.0" - postcss-nesting "^10.2.0" - postcss-opacity-percentage "^1.1.2" - postcss-overflow-shorthand "^3.0.4" - postcss-page-break "^3.0.4" - postcss-place "^7.0.5" - postcss-pseudo-class-any-link "^7.1.6" - postcss-replace-overflow-wrap "^4.0.0" - postcss-selector-not "^6.0.1" - postcss-value-parser "^4.2.0" - -postcss-pseudo-class-any-link@^7.1.6: - version "7.1.6" - resolved "http://localhost:4873/postcss-pseudo-class-any-link/-/postcss-pseudo-class-any-link-7.1.6.tgz#2693b221902da772c278def85a4d9a64b6e617ab" - integrity sha512-9sCtZkO6f/5ML9WcTLcIyV1yz9D1rf0tWc+ulKcvV30s0iZKS/ONyETvoWsr6vnrmW+X+KmuK3gV/w5EWnT37w== - dependencies: - postcss-selector-parser "^6.0.10" - -postcss-reduce-initial@^5.1.0: - version "5.1.0" - resolved "http://localhost:4873/postcss-reduce-initial/-/postcss-reduce-initial-5.1.0.tgz#fc31659ea6e85c492fb2a7b545370c215822c5d6" - integrity sha512-5OgTUviz0aeH6MtBjHfbr57tml13PuedK/Ecg8szzd4XRMbYxH4572JFG067z+FqBIf6Zp/d+0581glkvvWMFw== - dependencies: - browserslist "^4.16.6" - caniuse-api "^3.0.0" - -postcss-reduce-transforms@^5.1.0: - version "5.1.0" - resolved "http://localhost:4873/postcss-reduce-transforms/-/postcss-reduce-transforms-5.1.0.tgz#333b70e7758b802f3dd0ddfe98bb1ccfef96b6e9" - integrity sha512-2fbdbmgir5AvpW9RLtdONx1QoYG2/EtqpNQbFASDlixBbAYuTcJ0dECwlqNqH7VbaUnEnh8SrxOe2sRIn24XyQ== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-replace-overflow-wrap@^4.0.0: - version "4.0.0" - resolved "http://localhost:4873/postcss-replace-overflow-wrap/-/postcss-replace-overflow-wrap-4.0.0.tgz#d2df6bed10b477bf9c52fab28c568b4b29ca4319" - integrity sha512-KmF7SBPphT4gPPcKZc7aDkweHiKEEO8cla/GjcBK+ckKxiZslIu3C4GCRW3DNfL0o7yW7kMQu9xlZ1kXRXLXtw== - -postcss-selector-not@^6.0.1: - version "6.0.1" - resolved "http://localhost:4873/postcss-selector-not/-/postcss-selector-not-6.0.1.tgz#8f0a709bf7d4b45222793fc34409be407537556d" - integrity sha512-1i9affjAe9xu/y9uqWH+tD4r6/hDaXJruk8xn2x1vzxC2U3J3LKO3zJW4CyxlNhA56pADJ/djpEwpH1RClI2rQ== - dependencies: - postcss-selector-parser "^6.0.10" - -postcss-selector-parser@^6.0.10, postcss-selector-parser@^6.0.2, postcss-selector-parser@^6.0.4, postcss-selector-parser@^6.0.5, postcss-selector-parser@^6.0.6, postcss-selector-parser@^6.0.9: - version "6.0.10" - resolved "http://localhost:4873/postcss-selector-parser/-/postcss-selector-parser-6.0.10.tgz#79b61e2c0d1bfc2602d549e11d0876256f8df88d" - integrity sha512-IQ7TZdoaqbT+LCpShg46jnZVlhWD2w6iQYAcYXfHARZ7X1t/UGhhceQDs5X0cGqKvYlHNOuv7Oa1xmb0oQuA3w== - dependencies: - cssesc "^3.0.0" - util-deprecate "^1.0.2" - -postcss-svgo@^5.1.0: - version "5.1.0" - resolved "http://localhost:4873/postcss-svgo/-/postcss-svgo-5.1.0.tgz#0a317400ced789f233a28826e77523f15857d80d" - integrity sha512-D75KsH1zm5ZrHyxPakAxJWtkyXew5qwS70v56exwvw542d9CRtTo78K0WeFxZB4G7JXKKMbEZtZayTGdIky/eA== - dependencies: - postcss-value-parser "^4.2.0" - svgo "^2.7.0" - -postcss-unique-selectors@^5.1.1: - version "5.1.1" - resolved "http://localhost:4873/postcss-unique-selectors/-/postcss-unique-selectors-5.1.1.tgz#a9f273d1eacd09e9aa6088f4b0507b18b1b541b6" - integrity sha512-5JiODlELrz8L2HwxfPnhOWZYWDxVHWL83ufOv84NrcgipI7TaeRsatAhK4Tr2/ZiYldpK/wBvw5BD3qfaK96GA== - dependencies: - postcss-selector-parser "^6.0.5" - -postcss-value-parser@^4.0.0, postcss-value-parser@^4.1.0, postcss-value-parser@^4.2.0: - version "4.2.0" - resolved "http://localhost:4873/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz#723c09920836ba6d3e5af019f92bc0971c02e514" - integrity sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ== - -postcss@^7.0.35: - version "7.0.39" - resolved "http://localhost:4873/postcss/-/postcss-7.0.39.tgz#9624375d965630e2e1f2c02a935c82a59cb48309" - integrity sha512-yioayjNbHn6z1/Bywyb2Y4s3yvDAeXGOyxqD+LnVOinq6Mdmd++SW2wUNVzavyyHxd6+DxzWGIuosg6P1Rj8uA== - dependencies: - picocolors "^0.2.1" - source-map "^0.6.1" - -postcss@^8.3.5, postcss@^8.4.14, postcss@^8.4.4, postcss@^8.4.7: - version "8.4.17" - resolved "http://localhost:4873/postcss/-/postcss-8.4.17.tgz#f87863ec7cd353f81f7ab2dec5d67d861bbb1be5" - integrity sha512-UNxNOLQydcOFi41yHNMcKRZ39NeXlr8AxGuZJsdub8vIb12fHzcq37DTU/QtbI6WLxNg2gF9Z+8qtRwTj1UI1Q== - dependencies: - nanoid "^3.3.4" - picocolors "^1.0.0" - source-map-js "^1.0.2" - -prelude-ls@^1.2.1: - version "1.2.1" - resolved "http://localhost:4873/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396" - integrity sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g== - -prelude-ls@~1.1.2: - version "1.1.2" - resolved "http://localhost:4873/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" - integrity sha512-ESF23V4SKG6lVSGZgYNpbsiaAkdab6ZgOxe52p7+Kid3W3u3bxR4Vfd/o21dmN7jSt0IwgZ4v5MUd26FEtXE9w== - -pretty-bytes@^5.3.0, pretty-bytes@^5.4.1: - version "5.6.0" - resolved "http://localhost:4873/pretty-bytes/-/pretty-bytes-5.6.0.tgz#356256f643804773c82f64723fe78c92c62beaeb" - integrity sha512-FFw039TmrBqFK8ma/7OL3sDz/VytdtJr044/QUJtH0wK9lb9jLq9tJyIxUwtQJHwar2BqtiA4iCWSwo9JLkzFg== - -pretty-error@^4.0.0: - version "4.0.0" - resolved "http://localhost:4873/pretty-error/-/pretty-error-4.0.0.tgz#90a703f46dd7234adb46d0f84823e9d1cb8f10d6" - integrity sha512-AoJ5YMAcXKYxKhuJGdcvse+Voc6v1RgnsR3nWcYU7q4t6z0Q6T86sv5Zq8VIRbOWWFpvdGE83LtdSMNd+6Y0xw== - dependencies: - lodash "^4.17.20" - renderkid "^3.0.0" - -pretty-format@^27.0.2, pretty-format@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/pretty-format/-/pretty-format-27.5.1.tgz#2181879fdea51a7a5851fb39d920faa63f01d88e" - integrity sha512-Qb1gy5OrP5+zDf2Bvnzdl3jsTf1qXVMazbvCoKhtKqVs4/YK4ozX4gKQJJVyNe+cajNPn0KoC0MC3FUmaHWEmQ== - dependencies: - ansi-regex "^5.0.1" - ansi-styles "^5.0.0" - react-is "^17.0.1" - -pretty-format@^28.1.3: - version "28.1.3" - resolved "http://localhost:4873/pretty-format/-/pretty-format-28.1.3.tgz#c9fba8cedf99ce50963a11b27d982a9ae90970d5" - integrity sha512-8gFb/To0OmxHR9+ZTb14Df2vNxdGCX8g1xWGUTqUw5TiZvcQf5sHKObd5UcPyLLyowNwDAMTF3XWOG1B6mxl1Q== - dependencies: - "@jest/schemas" "^28.1.3" - ansi-regex "^5.0.1" - ansi-styles "^5.0.0" - react-is "^18.0.0" - -pretty-format@^29.0.0, pretty-format@^29.1.2: - version "29.1.2" - resolved "http://localhost:4873/pretty-format/-/pretty-format-29.1.2.tgz#b1f6b75be7d699be1a051f5da36e8ae9e76a8e6a" - integrity sha512-CGJ6VVGXVRP2o2Dorl4mAwwvDWT25luIsYhkyVQW32E4nL+TgW939J7LlKT/npq5Cpq6j3s+sy+13yk7xYpBmg== - dependencies: - "@jest/schemas" "^29.0.0" - ansi-styles "^5.0.0" - react-is "^18.0.0" - -process-nextick-args@~2.0.0: - version "2.0.1" - resolved "http://localhost:4873/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" - integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== - -promise@^8.1.0: - version "8.2.0" - resolved "http://localhost:4873/promise/-/promise-8.2.0.tgz#a1f6280ab67457fbfc8aad2b198c9497e9e5c806" - integrity sha512-+CMAlLHqwRYwBMXKCP+o8ns7DN+xHDUiI+0nArsiJ9y+kJVPLFxEaSw6Ha9s9H0tftxg2Yzl25wqj9G7m5wLZg== - dependencies: - asap "~2.0.6" - -prompts@^2.0.1, prompts@^2.4.2: - version "2.4.2" - resolved "http://localhost:4873/prompts/-/prompts-2.4.2.tgz#7b57e73b3a48029ad10ebd44f74b01722a4cb069" - integrity sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q== - dependencies: - kleur "^3.0.3" - sisteransi "^1.0.5" - -prop-types@^15.8.1: - version "15.8.1" - resolved "http://localhost:4873/prop-types/-/prop-types-15.8.1.tgz#67d87bf1a694f48435cf332c24af10214a3140b5" - integrity sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg== - dependencies: - loose-envify "^1.4.0" - object-assign "^4.1.1" - react-is "^16.13.1" - -proxy-addr@~2.0.7: - version "2.0.7" - resolved "http://localhost:4873/proxy-addr/-/proxy-addr-2.0.7.tgz#f19fe69ceab311eeb94b42e70e8c2070f9ba1025" - integrity sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg== - dependencies: - forwarded "0.2.0" - ipaddr.js "1.9.1" - -psl@^1.1.33: - version "1.9.0" - resolved "http://localhost:4873/psl/-/psl-1.9.0.tgz#d0df2a137f00794565fcaf3b2c00cd09f8d5a5a7" - integrity sha512-E/ZsdU4HLs/68gYzgGTkMicWTLPdAftJLfJFlLUAAKZGkStNU72sZjT66SnMDVOfOWY/YAoiD7Jxa9iHvngcag== - -punycode@^2.1.0, punycode@^2.1.1: - version "2.1.1" - resolved "http://localhost:4873/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" - integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== - -q@^1.1.2: - version "1.5.1" - resolved "http://localhost:4873/q/-/q-1.5.1.tgz#7e32f75b41381291d04611f1bf14109ac00651d7" - integrity sha512-kV/CThkXo6xyFEZUugw/+pIOywXcDbFYgSct5cT3gqlbkBE1SJdwy6UQoZvodiWF/ckQLZyDE/Bu1M6gVu5lVw== - -qs@6.10.3: - version "6.10.3" - resolved "http://localhost:4873/qs/-/qs-6.10.3.tgz#d6cde1b2ffca87b5aa57889816c5f81535e22e8e" - integrity sha512-wr7M2E0OFRfIfJZjKGieI8lBKb7fRCH4Fv5KNPEs7gJ8jadvotdsS08PzOKR7opXhZ/Xkjtt3WF9g38drmyRqQ== - dependencies: - side-channel "^1.0.4" - -querystringify@^2.1.1: - version "2.2.0" - resolved "http://localhost:4873/querystringify/-/querystringify-2.2.0.tgz#3345941b4153cb9d082d8eee4cda2016a9aef7f6" - integrity sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ== - -queue-microtask@^1.2.2: - version "1.2.3" - resolved "http://localhost:4873/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" - integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== - -quick-lru@^5.1.1: - version "5.1.1" - resolved "http://localhost:4873/quick-lru/-/quick-lru-5.1.1.tgz#366493e6b3e42a3a6885e2e99d18f80fb7a8c932" - integrity sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA== - -raf@^3.4.1: - version "3.4.1" - resolved "http://localhost:4873/raf/-/raf-3.4.1.tgz#0742e99a4a6552f445d73e3ee0328af0ff1ede39" - integrity sha512-Sq4CW4QhwOHE8ucn6J34MqtZCeWFP2aQSmrlroYgqAV1PjStIhJXxYuTgUIfkEk7zTLjmIjLmU5q+fbD1NnOJA== - dependencies: - performance-now "^2.1.0" - -randombytes@^2.1.0: - version "2.1.0" - resolved "http://localhost:4873/randombytes/-/randombytes-2.1.0.tgz#df6f84372f0270dc65cdf6291349ab7a473d4f2a" - integrity sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ== - dependencies: - safe-buffer "^5.1.0" - -range-parser@^1.2.1, range-parser@~1.2.1: - version "1.2.1" - resolved "http://localhost:4873/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031" - integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== - -raw-body@2.5.1: - version "2.5.1" - resolved "http://localhost:4873/raw-body/-/raw-body-2.5.1.tgz#fe1b1628b181b700215e5fd42389f98b71392857" - integrity sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig== - dependencies: - bytes "3.1.2" - http-errors "2.0.0" - iconv-lite "0.4.24" - unpipe "1.0.0" - -react-app-polyfill@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/react-app-polyfill/-/react-app-polyfill-3.0.0.tgz#95221e0a9bd259e5ca6b177c7bb1cb6768f68fd7" - integrity sha512-sZ41cxiU5llIB003yxxQBYrARBqe0repqPTTYBTmMqTz9szeBbE37BehCE891NZsmdZqqP+xWKdT3eo3vOzN8w== - dependencies: - core-js "^3.19.2" - object-assign "^4.1.1" - promise "^8.1.0" - raf "^3.4.1" - regenerator-runtime "^0.13.9" - whatwg-fetch "^3.6.2" - -react-dev-utils@^12.0.1: - version "12.0.1" - resolved "http://localhost:4873/react-dev-utils/-/react-dev-utils-12.0.1.tgz#ba92edb4a1f379bd46ccd6bcd4e7bc398df33e73" - integrity sha512-84Ivxmr17KjUupyqzFode6xKhjwuEJDROWKJy/BthkL7Wn6NJ8h4WE6k/exAv6ImS+0oZLRRW5j/aINMHyeGeQ== - dependencies: - "@babel/code-frame" "^7.16.0" - address "^1.1.2" - browserslist "^4.18.1" - chalk "^4.1.2" - cross-spawn "^7.0.3" - detect-port-alt "^1.1.6" - escape-string-regexp "^4.0.0" - filesize "^8.0.6" - find-up "^5.0.0" - fork-ts-checker-webpack-plugin "^6.5.0" - global-modules "^2.0.0" - globby "^11.0.4" - gzip-size "^6.0.0" - immer "^9.0.7" - is-root "^2.1.0" - loader-utils "^3.2.0" - open "^8.4.0" - pkg-up "^3.1.0" - prompts "^2.4.2" - react-error-overlay "^6.0.11" - recursive-readdir "^2.2.2" - shell-quote "^1.7.3" - strip-ansi "^6.0.1" - text-table "^0.2.0" - -react-dom@^18.2.0: - version "18.2.0" - resolved "http://localhost:4873/react-dom/-/react-dom-18.2.0.tgz#22aaf38708db2674ed9ada224ca4aa708d821e3d" - integrity sha512-6IMTriUmvsjHUjNtEDudZfuDQUoWXVxKHhlEGSk81n4YFS+r/Kl99wXiwlVXtPBtJenozv2P+hxDsw9eA7Xo6g== - dependencies: - loose-envify "^1.1.0" - scheduler "^0.23.0" - -react-error-overlay@^6.0.11: - version "6.0.11" - resolved "http://localhost:4873/react-error-overlay/-/react-error-overlay-6.0.11.tgz#92835de5841c5cf08ba00ddd2d677b6d17ff9adb" - integrity sha512-/6UZ2qgEyH2aqzYZgQPxEnz33NJ2gNsnHA2o5+o4wW9bLM/JYQitNP9xPhsXwC08hMMovfGe/8retsdDsczPRg== - -react-is@^16.13.1: - version "16.13.1" - resolved "http://localhost:4873/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4" - integrity sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ== - -react-is@^17.0.1: - version "17.0.2" - resolved "http://localhost:4873/react-is/-/react-is-17.0.2.tgz#e691d4a8e9c789365655539ab372762b0efb54f0" - integrity sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w== - -react-is@^18.0.0: - version "18.2.0" - resolved "http://localhost:4873/react-is/-/react-is-18.2.0.tgz#199431eeaaa2e09f86427efbb4f1473edb47609b" - integrity sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w== - -react-refresh@^0.11.0: - version "0.11.0" - resolved "http://localhost:4873/react-refresh/-/react-refresh-0.11.0.tgz#77198b944733f0f1f1a90e791de4541f9f074046" - integrity sha512-F27qZr8uUqwhWZboondsPx8tnC3Ct3SxZA3V5WyEvujRyyNv0VYPhoBg1gZ8/MV5tubQp76Trw8lTv9hzRBa+A== - -react-scripts@5.0.1: - version "5.0.1" - resolved "http://localhost:4873/react-scripts/-/react-scripts-5.0.1.tgz#6285dbd65a8ba6e49ca8d651ce30645a6d980003" - integrity sha512-8VAmEm/ZAwQzJ+GOMLbBsTdDKOpuZh7RPs0UymvBR2vRk4iZWCskjbFnxqjrzoIvlNNRZ3QJFx6/qDSi6zSnaQ== - dependencies: - "@babel/core" "^7.16.0" - "@pmmmwh/react-refresh-webpack-plugin" "^0.5.3" - "@svgr/webpack" "^5.5.0" - babel-jest "^27.4.2" - babel-loader "^8.2.3" - babel-plugin-named-asset-import "^0.3.8" - babel-preset-react-app "^10.0.1" - bfj "^7.0.2" - browserslist "^4.18.1" - camelcase "^6.2.1" - case-sensitive-paths-webpack-plugin "^2.4.0" - css-loader "^6.5.1" - css-minimizer-webpack-plugin "^3.2.0" - dotenv "^10.0.0" - dotenv-expand "^5.1.0" - eslint "^8.3.0" - eslint-config-react-app "^7.0.1" - eslint-webpack-plugin "^3.1.1" - file-loader "^6.2.0" - fs-extra "^10.0.0" - html-webpack-plugin "^5.5.0" - identity-obj-proxy "^3.0.0" - jest "^27.4.3" - jest-resolve "^27.4.2" - jest-watch-typeahead "^1.0.0" - mini-css-extract-plugin "^2.4.5" - postcss "^8.4.4" - postcss-flexbugs-fixes "^5.0.2" - postcss-loader "^6.2.1" - postcss-normalize "^10.0.1" - postcss-preset-env "^7.0.1" - prompts "^2.4.2" - react-app-polyfill "^3.0.0" - react-dev-utils "^12.0.1" - react-refresh "^0.11.0" - resolve "^1.20.0" - resolve-url-loader "^4.0.0" - sass-loader "^12.3.0" - semver "^7.3.5" - source-map-loader "^3.0.0" - style-loader "^3.3.1" - tailwindcss "^3.0.2" - terser-webpack-plugin "^5.2.5" - webpack "^5.64.4" - webpack-dev-server "^4.6.0" - webpack-manifest-plugin "^4.0.2" - workbox-webpack-plugin "^6.4.1" - optionalDependencies: - fsevents "^2.3.2" - -react@^18.2.0: - version "18.2.0" - resolved "http://localhost:4873/react/-/react-18.2.0.tgz#555bd98592883255fa00de14f1151a917b5d77d5" - integrity sha512-/3IjMdb2L9QbBdWiW5e3P2/npwMBaU9mHCSCUzNln0ZCYbcfTsGbTJrU/kGemdH2IWmB2ioZ+zkxtmq6g09fGQ== - dependencies: - loose-envify "^1.1.0" - -read-cache@^1.0.0: - version "1.0.0" - resolved "http://localhost:4873/read-cache/-/read-cache-1.0.0.tgz#e664ef31161166c9751cdbe8dbcf86b5fb58f774" - integrity sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA== - dependencies: - pify "^2.3.0" - -readable-stream@^2.0.1: - version "2.3.7" - resolved "http://localhost:4873/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57" - integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw== - dependencies: - core-util-is "~1.0.0" - inherits "~2.0.3" - isarray "~1.0.0" - process-nextick-args "~2.0.0" - safe-buffer "~5.1.1" - string_decoder "~1.1.1" - util-deprecate "~1.0.1" - -readable-stream@^3.0.6: - version "3.6.0" - resolved "http://localhost:4873/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198" - integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA== - dependencies: - inherits "^2.0.3" - string_decoder "^1.1.1" - util-deprecate "^1.0.1" - -readdirp@~3.6.0: - version "3.6.0" - resolved "http://localhost:4873/readdirp/-/readdirp-3.6.0.tgz#74a370bd857116e245b29cc97340cd431a02a6c7" - integrity sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA== - dependencies: - picomatch "^2.2.1" - -recursive-readdir@^2.2.2: - version "2.2.2" - resolved "http://localhost:4873/recursive-readdir/-/recursive-readdir-2.2.2.tgz#9946fb3274e1628de6e36b2f6714953b4845094f" - integrity sha512-nRCcW9Sj7NuZwa2XvH9co8NPeXUBhZP7CRKJtU+cS6PW9FpCIFoI5ib0NT1ZrbNuPoRy0ylyCaUL8Gih4LSyFg== - dependencies: - minimatch "3.0.4" - -redent@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/redent/-/redent-3.0.0.tgz#e557b7998316bb53c9f1f56fa626352c6963059f" - integrity sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg== - dependencies: - indent-string "^4.0.0" - strip-indent "^3.0.0" - -regenerate-unicode-properties@^10.1.0: - version "10.1.0" - resolved "http://localhost:4873/regenerate-unicode-properties/-/regenerate-unicode-properties-10.1.0.tgz#7c3192cab6dd24e21cb4461e5ddd7dd24fa8374c" - integrity sha512-d1VudCLoIGitcU/hEg2QqvyGZQmdC0Lf8BqdOMXGFSvJP4bNV1+XqbPQeHHLD51Jh4QJJ225dlIFvY4Ly6MXmQ== - dependencies: - regenerate "^1.4.2" - -regenerate@^1.4.2: - version "1.4.2" - resolved "http://localhost:4873/regenerate/-/regenerate-1.4.2.tgz#b9346d8827e8f5a32f7ba29637d398b69014848a" - integrity sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A== - -regenerator-runtime@^0.13.4, regenerator-runtime@^0.13.9: - version "0.13.9" - resolved "http://localhost:4873/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz#8925742a98ffd90814988d7566ad30ca3b263b52" - integrity sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA== - -regenerator-transform@^0.15.0: - version "0.15.0" - resolved "http://localhost:4873/regenerator-transform/-/regenerator-transform-0.15.0.tgz#cbd9ead5d77fae1a48d957cf889ad0586adb6537" - integrity sha512-LsrGtPmbYg19bcPHwdtmXwbW+TqNvtY4riE3P83foeHRroMbH6/2ddFBfab3t7kbzc7v7p4wbkIecHImqt0QNg== - dependencies: - "@babel/runtime" "^7.8.4" - -regex-parser@^2.2.11: - version "2.2.11" - resolved "http://localhost:4873/regex-parser/-/regex-parser-2.2.11.tgz#3b37ec9049e19479806e878cabe7c1ca83ccfe58" - integrity sha512-jbD/FT0+9MBU2XAZluI7w2OBs1RBi6p9M83nkoZayQXXU9e8Robt69FcZc7wU4eJD/YFTjn1JdCk3rbMJajz8Q== - -regexp.prototype.flags@^1.4.1, regexp.prototype.flags@^1.4.3: - version "1.4.3" - resolved "http://localhost:4873/regexp.prototype.flags/-/regexp.prototype.flags-1.4.3.tgz#87cab30f80f66660181a3bb7bf5981a872b367ac" - integrity sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.3" - functions-have-names "^1.2.2" - -regexpp@^3.2.0: - version "3.2.0" - resolved "http://localhost:4873/regexpp/-/regexpp-3.2.0.tgz#0425a2768d8f23bad70ca4b90461fa2f1213e1b2" - integrity sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg== - -regexpu-core@^5.1.0: - version "5.2.1" - resolved "http://localhost:4873/regexpu-core/-/regexpu-core-5.2.1.tgz#a69c26f324c1e962e9ffd0b88b055caba8089139" - integrity sha512-HrnlNtpvqP1Xkb28tMhBUO2EbyUHdQlsnlAhzWcwHy8WJR53UWr7/MAvqrsQKMbV4qdpv03oTMG8iIhfsPFktQ== - dependencies: - regenerate "^1.4.2" - regenerate-unicode-properties "^10.1.0" - regjsgen "^0.7.1" - regjsparser "^0.9.1" - unicode-match-property-ecmascript "^2.0.0" - unicode-match-property-value-ecmascript "^2.0.0" - -regjsgen@^0.7.1: - version "0.7.1" - resolved "http://localhost:4873/regjsgen/-/regjsgen-0.7.1.tgz#ee5ef30e18d3f09b7c369b76e7c2373ed25546f6" - integrity sha512-RAt+8H2ZEzHeYWxZ3H2z6tF18zyyOnlcdaafLrm21Bguj7uZy6ULibiAFdXEtKQY4Sy7wDTwDiOazasMLc4KPA== - -regjsparser@^0.9.1: - version "0.9.1" - resolved "http://localhost:4873/regjsparser/-/regjsparser-0.9.1.tgz#272d05aa10c7c1f67095b1ff0addae8442fc5709" - integrity sha512-dQUtn90WanSNl+7mQKcXAgZxvUe7Z0SqXlgzv0za4LwiUhyzBC58yQO3liFoUgu8GiJVInAhJjkj1N0EtQ5nkQ== - dependencies: - jsesc "~0.5.0" - -relateurl@^0.2.7: - version "0.2.7" - resolved "http://localhost:4873/relateurl/-/relateurl-0.2.7.tgz#54dbf377e51440aca90a4cd274600d3ff2d888a9" - integrity sha512-G08Dxvm4iDN3MLM0EsP62EDV9IuhXPR6blNz6Utcp7zyV3tr4HVNINt6MpaRWbxoOHT3Q7YN2P+jaHX8vUbgog== - -renderkid@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/renderkid/-/renderkid-3.0.0.tgz#5fd823e4d6951d37358ecc9a58b1f06836b6268a" - integrity sha512-q/7VIQA8lmM1hF+jn+sFSPWGlMkSAeNYcPLmDQx2zzuiDfaLrOmumR8iaUKlenFgh0XRPIUeSPlH3A+AW3Z5pg== - dependencies: - css-select "^4.1.3" - dom-converter "^0.2.0" - htmlparser2 "^6.1.0" - lodash "^4.17.21" - strip-ansi "^6.0.1" - -require-directory@^2.1.1: - version "2.1.1" - resolved "http://localhost:4873/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" - integrity sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q== - -require-from-string@^2.0.2: - version "2.0.2" - resolved "http://localhost:4873/require-from-string/-/require-from-string-2.0.2.tgz#89a7fdd938261267318eafe14f9c32e598c36909" - integrity sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw== - -requires-port@^1.0.0: - version "1.0.0" - resolved "http://localhost:4873/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff" - integrity sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ== - -resolve-cwd@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/resolve-cwd/-/resolve-cwd-3.0.0.tgz#0f0075f1bb2544766cf73ba6a6e2adfebcb13f2d" - integrity sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg== - dependencies: - resolve-from "^5.0.0" - -resolve-from@^4.0.0: - version "4.0.0" - resolved "http://localhost:4873/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" - integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g== - -resolve-from@^5.0.0: - version "5.0.0" - resolved "http://localhost:4873/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69" - integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw== - -resolve-url-loader@^4.0.0: - version "4.0.0" - resolved "http://localhost:4873/resolve-url-loader/-/resolve-url-loader-4.0.0.tgz#d50d4ddc746bb10468443167acf800dcd6c3ad57" - integrity sha512-05VEMczVREcbtT7Bz+C+96eUO5HDNvdthIiMB34t7FcF8ehcu4wC0sSgPUubs3XW2Q3CNLJk/BJrCU9wVRymiA== - dependencies: - adjust-sourcemap-loader "^4.0.0" - convert-source-map "^1.7.0" - loader-utils "^2.0.0" - postcss "^7.0.35" - source-map "0.6.1" - -resolve.exports@^1.1.0: - version "1.1.0" - resolved "http://localhost:4873/resolve.exports/-/resolve.exports-1.1.0.tgz#5ce842b94b05146c0e03076985d1d0e7e48c90c9" - integrity sha512-J1l+Zxxp4XK3LUDZ9m60LRJF/mAe4z6a4xyabPHk7pvK5t35dACV32iIjJDFeWZFfZlO29w6SZ67knR0tHzJtQ== - -resolve@^1.1.7, resolve@^1.14.2, resolve@^1.19.0, resolve@^1.20.0, resolve@^1.22.0, resolve@^1.22.1: - version "1.22.1" - resolved "http://localhost:4873/resolve/-/resolve-1.22.1.tgz#27cb2ebb53f91abb49470a928bba7558066ac177" - integrity sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw== - dependencies: - is-core-module "^2.9.0" - path-parse "^1.0.7" - supports-preserve-symlinks-flag "^1.0.0" - -resolve@^2.0.0-next.3: - version "2.0.0-next.4" - resolved "http://localhost:4873/resolve/-/resolve-2.0.0-next.4.tgz#3d37a113d6429f496ec4752d2a2e58efb1fd4660" - integrity sha512-iMDbmAWtfU+MHpxt/I5iWI7cY6YVEZUQ3MBgPQ++XD1PELuJHIl82xBmObyP2KyQmkNB2dsqF7seoQQiAn5yDQ== - dependencies: - is-core-module "^2.9.0" - path-parse "^1.0.7" - supports-preserve-symlinks-flag "^1.0.0" - -retry@^0.13.1: - version "0.13.1" - resolved "http://localhost:4873/retry/-/retry-0.13.1.tgz#185b1587acf67919d63b357349e03537b2484658" - integrity sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg== - -reusify@^1.0.4: - version "1.0.4" - resolved "http://localhost:4873/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" - integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== - -rimraf@^3.0.0, rimraf@^3.0.2: - version "3.0.2" - resolved "http://localhost:4873/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" - integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== - dependencies: - glob "^7.1.3" - -rollup-plugin-terser@^7.0.0: - version "7.0.2" - resolved "http://localhost:4873/rollup-plugin-terser/-/rollup-plugin-terser-7.0.2.tgz#e8fbba4869981b2dc35ae7e8a502d5c6c04d324d" - integrity sha512-w3iIaU4OxcF52UUXiZNsNeuXIMDvFrr+ZXK6bFZ0Q60qyVfq4uLptoS4bbq3paG3x216eQllFZX7zt6TIImguQ== - dependencies: - "@babel/code-frame" "^7.10.4" - jest-worker "^26.2.1" - serialize-javascript "^4.0.0" - terser "^5.0.0" - -rollup@^2.43.1: - version "2.79.1" - resolved "http://localhost:4873/rollup/-/rollup-2.79.1.tgz#bedee8faef7c9f93a2647ac0108748f497f081c7" - integrity sha512-uKxbd0IhMZOhjAiD5oAFp7BqvkA4Dv47qpOCtaNvng4HBwdbWtdOh8f5nZNuk2rp51PMGk3bzfWu5oayNEuYnw== - optionalDependencies: - fsevents "~2.3.2" - -run-parallel@^1.1.9: - version "1.2.0" - resolved "http://localhost:4873/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee" - integrity sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA== - dependencies: - queue-microtask "^1.2.2" - -safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: - version "5.1.2" - resolved "http://localhost:4873/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" - integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== - -safe-buffer@5.2.1, safe-buffer@>=5.1.0, safe-buffer@^5.1.0, safe-buffer@~5.2.0: - version "5.2.1" - resolved "http://localhost:4873/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" - integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== - -safe-regex-test@^1.0.0: - version "1.0.0" - resolved "http://localhost:4873/safe-regex-test/-/safe-regex-test-1.0.0.tgz#793b874d524eb3640d1873aad03596db2d4f2295" - integrity sha512-JBUUzyOgEwXQY1NuPtvcj/qcBDbDmEvWufhlnXZIm75DEHp+afM1r1ujJpJsV/gSM4t59tpDyPi1sd6ZaPFfsA== - dependencies: - call-bind "^1.0.2" - get-intrinsic "^1.1.3" - is-regex "^1.1.4" - -"safer-buffer@>= 2.1.2 < 3", "safer-buffer@>= 2.1.2 < 3.0.0": - version "2.1.2" - resolved "http://localhost:4873/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" - integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== - -sanitize.css@*: - version "13.0.0" - resolved "http://localhost:4873/sanitize.css/-/sanitize.css-13.0.0.tgz#2675553974b27964c75562ade3bd85d79879f173" - integrity sha512-ZRwKbh/eQ6w9vmTjkuG0Ioi3HBwPFce0O+v//ve+aOq1oeCy7jMV2qzzAlpsNuqpqCBjjriM1lbtZbF/Q8jVyA== - -sass-loader@^12.3.0: - version "12.6.0" - resolved "http://localhost:4873/sass-loader/-/sass-loader-12.6.0.tgz#5148362c8e2cdd4b950f3c63ac5d16dbfed37bcb" - integrity sha512-oLTaH0YCtX4cfnJZxKSLAyglED0naiYfNG1iXfU5w1LNZ+ukoA5DtyDIN5zmKVZwYNJP4KRc5Y3hkWga+7tYfA== - dependencies: - klona "^2.0.4" - neo-async "^2.6.2" - -sax@~1.2.4: - version "1.2.4" - resolved "http://localhost:4873/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" - integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw== - -saxes@^5.0.1: - version "5.0.1" - resolved "http://localhost:4873/saxes/-/saxes-5.0.1.tgz#eebab953fa3b7608dbe94e5dadb15c888fa6696d" - integrity sha512-5LBh1Tls8c9xgGjw3QrMwETmTMVk0oFgvrFSvWx62llR2hcEInrKNZ2GZCCuuy2lvWrdl5jhbpeqc5hRYKFOcw== - dependencies: - xmlchars "^2.2.0" - -scheduler@^0.23.0: - version "0.23.0" - resolved "http://localhost:4873/scheduler/-/scheduler-0.23.0.tgz#ba8041afc3d30eb206a487b6b384002e4e61fdfe" - integrity sha512-CtuThmgHNg7zIZWAXi3AsyIzA3n4xx7aNyjwC2VJldO2LMVDhFK+63xGqq6CsJH4rTAt6/M+N4GhZiDYPx9eUw== - dependencies: - loose-envify "^1.1.0" - -schema-utils@2.7.0: - version "2.7.0" - resolved "http://localhost:4873/schema-utils/-/schema-utils-2.7.0.tgz#17151f76d8eae67fbbf77960c33c676ad9f4efc7" - integrity sha512-0ilKFI6QQF5nxDZLFn2dMjvc4hjg/Wkg7rHd3jK6/A4a1Hl9VFdQWvgB1UMGoU94pad1P/8N7fMcEnLnSiju8A== - dependencies: - "@types/json-schema" "^7.0.4" - ajv "^6.12.2" - ajv-keywords "^3.4.1" - -schema-utils@^2.6.5: - version "2.7.1" - resolved "http://localhost:4873/schema-utils/-/schema-utils-2.7.1.tgz#1ca4f32d1b24c590c203b8e7a50bf0ea4cd394d7" - integrity sha512-SHiNtMOUGWBQJwzISiVYKu82GiV4QYGePp3odlY1tuKO7gPtphAT5R/py0fA6xtbgLL/RvtJZnU9b8s0F1q0Xg== - dependencies: - "@types/json-schema" "^7.0.5" - ajv "^6.12.4" - ajv-keywords "^3.5.2" - -schema-utils@^3.0.0, schema-utils@^3.1.0, schema-utils@^3.1.1: - version "3.1.1" - resolved "http://localhost:4873/schema-utils/-/schema-utils-3.1.1.tgz#bc74c4b6b6995c1d88f76a8b77bea7219e0c8281" - integrity sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw== - dependencies: - "@types/json-schema" "^7.0.8" - ajv "^6.12.5" - ajv-keywords "^3.5.2" - -schema-utils@^4.0.0: - version "4.0.0" - resolved "http://localhost:4873/schema-utils/-/schema-utils-4.0.0.tgz#60331e9e3ae78ec5d16353c467c34b3a0a1d3df7" - integrity sha512-1edyXKgh6XnJsJSQ8mKWXnN/BVaIbFMLpouRUrXgVq7WYne5kw3MW7UPhO44uRXQSIpTSXoJbmrR2X0w9kUTyg== - dependencies: - "@types/json-schema" "^7.0.9" - ajv "^8.8.0" - ajv-formats "^2.1.1" - ajv-keywords "^5.0.0" - -select-hose@^2.0.0: - version "2.0.0" - resolved "http://localhost:4873/select-hose/-/select-hose-2.0.0.tgz#625d8658f865af43ec962bfc376a37359a4994ca" - integrity sha512-mEugaLK+YfkijB4fx0e6kImuJdCIt2LxCRcbEYPqRGCs4F2ogyfZU5IAZRdjCP8JPq2AtdNoC/Dux63d9Kiryg== - -selfsigned@^2.1.1: - version "2.1.1" - resolved "http://localhost:4873/selfsigned/-/selfsigned-2.1.1.tgz#18a7613d714c0cd3385c48af0075abf3f266af61" - integrity sha512-GSL3aowiF7wa/WtSFwnUrludWFoNhftq8bUkH9pkzjpN2XSPOAYEgg6e0sS9s0rZwgJzJiQRPU18A6clnoW5wQ== - dependencies: - node-forge "^1" - -semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.3.0: - version "6.3.0" - resolved "http://localhost:4873/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" - integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== - -semver@^7.3.2, semver@^7.3.5, semver@^7.3.7: - version "7.3.8" - resolved "http://localhost:4873/semver/-/semver-7.3.8.tgz#07a78feafb3f7b32347d725e33de7e2a2df67798" - integrity sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A== - dependencies: - lru-cache "^6.0.0" - -send@0.18.0: - version "0.18.0" - resolved "http://localhost:4873/send/-/send-0.18.0.tgz#670167cc654b05f5aa4a767f9113bb371bc706be" - integrity sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg== - dependencies: - debug "2.6.9" - depd "2.0.0" - destroy "1.2.0" - encodeurl "~1.0.2" - escape-html "~1.0.3" - etag "~1.8.1" - fresh "0.5.2" - http-errors "2.0.0" - mime "1.6.0" - ms "2.1.3" - on-finished "2.4.1" - range-parser "~1.2.1" - statuses "2.0.1" - -serialize-javascript@^4.0.0: - version "4.0.0" - resolved "http://localhost:4873/serialize-javascript/-/serialize-javascript-4.0.0.tgz#b525e1238489a5ecfc42afacc3fe99e666f4b1aa" - integrity sha512-GaNA54380uFefWghODBWEGisLZFj00nS5ACs6yHa9nLqlLpVLO8ChDGeKRjZnV4Nh4n0Qi7nhYZD/9fCPzEqkw== - dependencies: - randombytes "^2.1.0" - -serialize-javascript@^6.0.0: - version "6.0.0" - resolved "http://localhost:4873/serialize-javascript/-/serialize-javascript-6.0.0.tgz#efae5d88f45d7924141da8b5c3a7a7e663fefeb8" - integrity sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag== - dependencies: - randombytes "^2.1.0" - -serve-index@^1.9.1: - version "1.9.1" - resolved "http://localhost:4873/serve-index/-/serve-index-1.9.1.tgz#d3768d69b1e7d82e5ce050fff5b453bea12a9239" - integrity sha512-pXHfKNP4qujrtteMrSBb0rc8HJ9Ms/GrXwcUtUtD5s4ewDJI8bT3Cz2zTVRMKtri49pLx2e0Ya8ziP5Ya2pZZw== - dependencies: - accepts "~1.3.4" - batch "0.6.1" - debug "2.6.9" - escape-html "~1.0.3" - http-errors "~1.6.2" - mime-types "~2.1.17" - parseurl "~1.3.2" - -serve-static@1.15.0: - version "1.15.0" - resolved "http://localhost:4873/serve-static/-/serve-static-1.15.0.tgz#faaef08cffe0a1a62f60cad0c4e513cff0ac9540" - integrity sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g== - dependencies: - encodeurl "~1.0.2" - escape-html "~1.0.3" - parseurl "~1.3.3" - send "0.18.0" - -setprototypeof@1.1.0: - version "1.1.0" - resolved "http://localhost:4873/setprototypeof/-/setprototypeof-1.1.0.tgz#d0bd85536887b6fe7c0d818cb962d9d91c54e656" - integrity sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ== - -setprototypeof@1.2.0: - version "1.2.0" - resolved "http://localhost:4873/setprototypeof/-/setprototypeof-1.2.0.tgz#66c9a24a73f9fc28cbe66b09fed3d33dcaf1b424" - integrity sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw== - -shallow-clone@^3.0.0: - version "3.0.1" - resolved "http://localhost:4873/shallow-clone/-/shallow-clone-3.0.1.tgz#8f2981ad92531f55035b01fb230769a40e02efa3" - integrity sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA== - dependencies: - kind-of "^6.0.2" - -shebang-command@^2.0.0: - version "2.0.0" - resolved "http://localhost:4873/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" - integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA== - dependencies: - shebang-regex "^3.0.0" - -shebang-regex@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" - integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== - -shell-quote@^1.7.3: - version "1.7.3" - resolved "http://localhost:4873/shell-quote/-/shell-quote-1.7.3.tgz#aa40edac170445b9a431e17bb62c0b881b9c4123" - integrity sha512-Vpfqwm4EnqGdlsBFNmHhxhElJYrdfcxPThu+ryKS5J8L/fhAwLazFZtq+S+TWZ9ANj2piSQLGj6NQg+lKPmxrw== - -side-channel@^1.0.4: - version "1.0.4" - resolved "http://localhost:4873/side-channel/-/side-channel-1.0.4.tgz#efce5c8fdc104ee751b25c58d4290011fa5ea2cf" - integrity sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw== - dependencies: - call-bind "^1.0.0" - get-intrinsic "^1.0.2" - object-inspect "^1.9.0" - -signal-exit@^3.0.2, signal-exit@^3.0.3: - version "3.0.7" - resolved "http://localhost:4873/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9" - integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ== - -sisteransi@^1.0.5: - version "1.0.5" - resolved "http://localhost:4873/sisteransi/-/sisteransi-1.0.5.tgz#134d681297756437cc05ca01370d3a7a571075ed" - integrity sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg== - -slash@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" - integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== - -slash@^4.0.0: - version "4.0.0" - resolved "http://localhost:4873/slash/-/slash-4.0.0.tgz#2422372176c4c6c5addb5e2ada885af984b396a7" - integrity sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew== - -sockjs@^0.3.24: - version "0.3.24" - resolved "http://localhost:4873/sockjs/-/sockjs-0.3.24.tgz#c9bc8995f33a111bea0395ec30aa3206bdb5ccce" - integrity sha512-GJgLTZ7vYb/JtPSSZ10hsOYIvEYsjbNU+zPdIHcUaWVNUEPivzxku31865sSSud0Da0W4lEeOPlmw93zLQchuQ== - dependencies: - faye-websocket "^0.11.3" - uuid "^8.3.2" - websocket-driver "^0.7.4" - -source-list-map@^2.0.0, source-list-map@^2.0.1: - version "2.0.1" - resolved "http://localhost:4873/source-list-map/-/source-list-map-2.0.1.tgz#3993bd873bfc48479cca9ea3a547835c7c154b34" - integrity sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw== - -source-map-js@^1.0.1, source-map-js@^1.0.2: - version "1.0.2" - resolved "http://localhost:4873/source-map-js/-/source-map-js-1.0.2.tgz#adbc361d9c62df380125e7f161f71c826f1e490c" - integrity sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw== - -source-map-loader@^3.0.0: - version "3.0.1" - resolved "http://localhost:4873/source-map-loader/-/source-map-loader-3.0.1.tgz#9ae5edc7c2d42570934be4c95d1ccc6352eba52d" - integrity sha512-Vp1UsfyPvgujKQzi4pyDiTOnE3E4H+yHvkVRN3c/9PJmQS4CQJExvcDvaX/D+RV+xQben9HJ56jMJS3CgUeWyA== - dependencies: - abab "^2.0.5" - iconv-lite "^0.6.3" - source-map-js "^1.0.1" - -source-map-support@^0.5.6, source-map-support@~0.5.20: - version "0.5.21" - resolved "http://localhost:4873/source-map-support/-/source-map-support-0.5.21.tgz#04fe7c7f9e1ed2d662233c28cb2b35b9f63f6e4f" - integrity sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w== - dependencies: - buffer-from "^1.0.0" - source-map "^0.6.0" - -source-map@0.6.1, source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.0, source-map@~0.6.1: - version "0.6.1" - resolved "http://localhost:4873/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" - integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== - -source-map@^0.7.3: - version "0.7.4" - resolved "http://localhost:4873/source-map/-/source-map-0.7.4.tgz#a9bbe705c9d8846f4e08ff6765acf0f1b0898656" - integrity sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA== - -source-map@^0.8.0-beta.0: - version "0.8.0-beta.0" - resolved "http://localhost:4873/source-map/-/source-map-0.8.0-beta.0.tgz#d4c1bb42c3f7ee925f005927ba10709e0d1d1f11" - integrity sha512-2ymg6oRBpebeZi9UUNsgQ89bhx01TcTkmNTGnNO88imTmbSgy4nfujrgVEFKWpMTEGA11EDkTt7mqObTPdigIA== - dependencies: - whatwg-url "^7.0.0" - -sourcemap-codec@^1.4.8: - version "1.4.8" - resolved "http://localhost:4873/sourcemap-codec/-/sourcemap-codec-1.4.8.tgz#ea804bd94857402e6992d05a38ef1ae35a9ab4c4" - integrity sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA== - -spdy-transport@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/spdy-transport/-/spdy-transport-3.0.0.tgz#00d4863a6400ad75df93361a1608605e5dcdcf31" - integrity sha512-hsLVFE5SjA6TCisWeJXFKniGGOpBgMLmerfO2aCyCU5s7nJ/rpAepqmFifv/GCbSbueEeAJJnmSQ2rKC/g8Fcw== - dependencies: - debug "^4.1.0" - detect-node "^2.0.4" - hpack.js "^2.1.6" - obuf "^1.1.2" - readable-stream "^3.0.6" - wbuf "^1.7.3" - -spdy@^4.0.2: - version "4.0.2" - resolved "http://localhost:4873/spdy/-/spdy-4.0.2.tgz#b74f466203a3eda452c02492b91fb9e84a27677b" - integrity sha512-r46gZQZQV+Kl9oItvl1JZZqJKGr+oEkB08A6BzkiR7593/7IbtuncXHd2YoYeTsG4157ZssMu9KYvUHLcjcDoA== - dependencies: - debug "^4.1.0" - handle-thing "^2.0.0" - http-deceiver "^1.2.7" - select-hose "^2.0.0" - spdy-transport "^3.0.0" - -sprintf-js@~1.0.2: - version "1.0.3" - resolved "http://localhost:4873/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" - integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g== - -stable@^0.1.8: - version "0.1.8" - resolved "http://localhost:4873/stable/-/stable-0.1.8.tgz#836eb3c8382fe2936feaf544631017ce7d47a3cf" - integrity sha512-ji9qxRnOVfcuLDySj9qzhGSEFVobyt1kIOSkj1qZzYLzq7Tos/oUUWvotUPQLlrsidqsK6tBH89Bc9kL5zHA6w== - -stack-utils@^2.0.3: - version "2.0.5" - resolved "http://localhost:4873/stack-utils/-/stack-utils-2.0.5.tgz#d25265fca995154659dbbfba3b49254778d2fdd5" - integrity sha512-xrQcmYhOsn/1kX+Vraq+7j4oE2j/6BFscZ0etmYg81xuM8Gq0022Pxb8+IqgOFUIaxHs0KaSb7T1+OegiNrNFA== - dependencies: - escape-string-regexp "^2.0.0" - -stackframe@^1.3.4: - version "1.3.4" - resolved "http://localhost:4873/stackframe/-/stackframe-1.3.4.tgz#b881a004c8c149a5e8efef37d51b16e412943310" - integrity sha512-oeVtt7eWQS+Na6F//S4kJ2K2VbRlS9D43mAlMyVpVWovy9o+jfgH8O9agzANzaiLjclA0oYzUXEM4PurhSUChw== - -statuses@2.0.1: - version "2.0.1" - resolved "http://localhost:4873/statuses/-/statuses-2.0.1.tgz#55cb000ccf1d48728bd23c685a063998cf1a1b63" - integrity sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ== - -"statuses@>= 1.4.0 < 2": - version "1.5.0" - resolved "http://localhost:4873/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c" - integrity sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA== - -string-length@^4.0.1: - version "4.0.2" - resolved "http://localhost:4873/string-length/-/string-length-4.0.2.tgz#a8a8dc7bd5c1a82b9b3c8b87e125f66871b6e57a" - integrity sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ== - dependencies: - char-regex "^1.0.2" - strip-ansi "^6.0.0" - -string-length@^5.0.1: - version "5.0.1" - resolved "http://localhost:4873/string-length/-/string-length-5.0.1.tgz#3d647f497b6e8e8d41e422f7e0b23bc536c8381e" - integrity sha512-9Ep08KAMUn0OadnVaBuRdE2l615CQ508kr0XMadjClfYpdCyvrbFp6Taebo8yyxokQ4viUd/xPPUA4FGgUa0ow== - dependencies: - char-regex "^2.0.0" - strip-ansi "^7.0.1" - -string-natural-compare@^3.0.1: - version "3.0.1" - resolved "http://localhost:4873/string-natural-compare/-/string-natural-compare-3.0.1.tgz#7a42d58474454963759e8e8b7ae63d71c1e7fdf4" - integrity sha512-n3sPwynL1nwKi3WJ6AIsClwBMa0zTi54fn2oLU6ndfTSIO05xaznjSf15PcBZU6FNWbmN5Q6cxT4V5hGvB4taw== - -string-width@^4.1.0, string-width@^4.2.0: - version "4.2.3" - resolved "http://localhost:4873/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" - integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== - dependencies: - emoji-regex "^8.0.0" - is-fullwidth-code-point "^3.0.0" - strip-ansi "^6.0.1" - -string.prototype.matchall@^4.0.6, string.prototype.matchall@^4.0.7: - version "4.0.7" - resolved "http://localhost:4873/string.prototype.matchall/-/string.prototype.matchall-4.0.7.tgz#8e6ecb0d8a1fb1fda470d81acecb2dba057a481d" - integrity sha512-f48okCX7JiwVi1NXCVWcFnZgADDC/n2vePlQ/KUCNqCikLLilQvwjMO8+BHVKvgzH0JB0J9LEPgxOGT02RoETg== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.3" - es-abstract "^1.19.1" - get-intrinsic "^1.1.1" - has-symbols "^1.0.3" - internal-slot "^1.0.3" - regexp.prototype.flags "^1.4.1" - side-channel "^1.0.4" - -string.prototype.trimend@^1.0.5: - version "1.0.5" - resolved "http://localhost:4873/string.prototype.trimend/-/string.prototype.trimend-1.0.5.tgz#914a65baaab25fbdd4ee291ca7dde57e869cb8d0" - integrity sha512-I7RGvmjV4pJ7O3kdf+LXFpVfdNOxtCW/2C8f6jNiW4+PQchwxkCDzlk1/7p+Wl4bqFIZeF47qAHXLuHHWKAxog== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.4" - es-abstract "^1.19.5" - -string.prototype.trimstart@^1.0.5: - version "1.0.5" - resolved "http://localhost:4873/string.prototype.trimstart/-/string.prototype.trimstart-1.0.5.tgz#5466d93ba58cfa2134839f81d7f42437e8c01fef" - integrity sha512-THx16TJCGlsN0o6dl2o6ncWUsdgnLRSA23rRE5pyGBw/mLr3Ej/R2LaqCtgP8VNMGZsvMWnf9ooZPyY2bHvUFg== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.4" - es-abstract "^1.19.5" - -string_decoder@^1.1.1: - version "1.3.0" - resolved "http://localhost:4873/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" - integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== - dependencies: - safe-buffer "~5.2.0" - -string_decoder@~1.1.1: - version "1.1.1" - resolved "http://localhost:4873/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" - integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== - dependencies: - safe-buffer "~5.1.0" - -stringify-object@^3.3.0: - version "3.3.0" - resolved "http://localhost:4873/stringify-object/-/stringify-object-3.3.0.tgz#703065aefca19300d3ce88af4f5b3956d7556629" - integrity sha512-rHqiFh1elqCQ9WPLIC8I0Q/g/wj5J1eMkyoiD6eoQApWHP0FtlK7rqnhmabL5VUY9JQCcqwwvlOaSuutekgyrw== - dependencies: - get-own-enumerable-property-symbols "^3.0.0" - is-obj "^1.0.1" - is-regexp "^1.0.0" - -strip-ansi@^6.0.0, strip-ansi@^6.0.1: - version "6.0.1" - resolved "http://localhost:4873/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" - integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== - dependencies: - ansi-regex "^5.0.1" - -strip-ansi@^7.0.1: - version "7.0.1" - resolved "http://localhost:4873/strip-ansi/-/strip-ansi-7.0.1.tgz#61740a08ce36b61e50e65653f07060d000975fb2" - integrity sha512-cXNxvT8dFNRVfhVME3JAe98mkXDYN2O1l7jmcwMnOslDeESg1rF/OZMtK0nRAhiari1unG5cD4jG3rapUAkLbw== - dependencies: - ansi-regex "^6.0.1" - -strip-bom@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" - integrity sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA== - -strip-bom@^4.0.0: - version "4.0.0" - resolved "http://localhost:4873/strip-bom/-/strip-bom-4.0.0.tgz#9c3505c1db45bcedca3d9cf7a16f5c5aa3901878" - integrity sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w== - -strip-comments@^2.0.1: - version "2.0.1" - resolved "http://localhost:4873/strip-comments/-/strip-comments-2.0.1.tgz#4ad11c3fbcac177a67a40ac224ca339ca1c1ba9b" - integrity sha512-ZprKx+bBLXv067WTCALv8SSz5l2+XhpYCsVtSqlMnkAXMWDq+/ekVbl1ghqP9rUHTzv6sm/DwCOiYutU/yp1fw== - -strip-final-newline@^2.0.0: - version "2.0.0" - resolved "http://localhost:4873/strip-final-newline/-/strip-final-newline-2.0.0.tgz#89b852fb2fcbe936f6f4b3187afb0a12c1ab58ad" - integrity sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA== - -strip-indent@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/strip-indent/-/strip-indent-3.0.0.tgz#c32e1cee940b6b3432c771bc2c54bcce73cd3001" - integrity sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ== - dependencies: - min-indent "^1.0.0" - -strip-json-comments@^3.1.0, strip-json-comments@^3.1.1: - version "3.1.1" - resolved "http://localhost:4873/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" - integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== - -style-loader@^3.3.1: - version "3.3.1" - resolved "http://localhost:4873/style-loader/-/style-loader-3.3.1.tgz#057dfa6b3d4d7c7064462830f9113ed417d38575" - integrity sha512-GPcQ+LDJbrcxHORTRes6Jy2sfvK2kS6hpSfI/fXhPt+spVzxF6LJ1dHLN9zIGmVaaP044YKaIatFaufENRiDoQ== - -stylehacks@^5.1.0: - version "5.1.0" - resolved "http://localhost:4873/stylehacks/-/stylehacks-5.1.0.tgz#a40066490ca0caca04e96c6b02153ddc39913520" - integrity sha512-SzLmvHQTrIWfSgljkQCw2++C9+Ne91d/6Sp92I8c5uHTcy/PgeHamwITIbBW9wnFTY/3ZfSXR9HIL6Ikqmcu6Q== - dependencies: - browserslist "^4.16.6" - postcss-selector-parser "^6.0.4" - -supports-color@^5.3.0: - version "5.5.0" - resolved "http://localhost:4873/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" - integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== - dependencies: - has-flag "^3.0.0" - -supports-color@^7.0.0, supports-color@^7.1.0: - version "7.2.0" - resolved "http://localhost:4873/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" - integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== - dependencies: - has-flag "^4.0.0" - -supports-color@^8.0.0: - version "8.1.1" - resolved "http://localhost:4873/supports-color/-/supports-color-8.1.1.tgz#cd6fc17e28500cff56c1b86c0a7fd4a54a73005c" - integrity sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q== - dependencies: - has-flag "^4.0.0" - -supports-hyperlinks@^2.0.0: - version "2.3.0" - resolved "http://localhost:4873/supports-hyperlinks/-/supports-hyperlinks-2.3.0.tgz#3943544347c1ff90b15effb03fc14ae45ec10624" - integrity sha512-RpsAZlpWcDwOPQA22aCH4J0t7L8JmAvsCxfOSEwm7cQs3LshN36QaTkwd70DnBOXDWGssw2eUoc8CaRWT0XunA== - dependencies: - has-flag "^4.0.0" - supports-color "^7.0.0" - -supports-preserve-symlinks-flag@^1.0.0: - version "1.0.0" - resolved "http://localhost:4873/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" - integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== - -svg-parser@^2.0.2: - version "2.0.4" - resolved "http://localhost:4873/svg-parser/-/svg-parser-2.0.4.tgz#fdc2e29e13951736140b76cb122c8ee6630eb6b5" - integrity sha512-e4hG1hRwoOdRb37cIMSgzNsxyzKfayW6VOflrwvR+/bzrkyxY/31WkbgnQpgtrNp1SdpJvpUAGTa/ZoiPNDuRQ== - -svgo@^1.2.2: - version "1.3.2" - resolved "http://localhost:4873/svgo/-/svgo-1.3.2.tgz#b6dc511c063346c9e415b81e43401145b96d4167" - integrity sha512-yhy/sQYxR5BkC98CY7o31VGsg014AKLEPxdfhora76l36hD9Rdy5NZA/Ocn6yayNPgSamYdtX2rFJdcv07AYVw== - dependencies: - chalk "^2.4.1" - coa "^2.0.2" - css-select "^2.0.0" - css-select-base-adapter "^0.1.1" - css-tree "1.0.0-alpha.37" - csso "^4.0.2" - js-yaml "^3.13.1" - mkdirp "~0.5.1" - object.values "^1.1.0" - sax "~1.2.4" - stable "^0.1.8" - unquote "~1.1.1" - util.promisify "~1.0.0" - -svgo@^2.7.0: - version "2.8.0" - resolved "http://localhost:4873/svgo/-/svgo-2.8.0.tgz#4ff80cce6710dc2795f0c7c74101e6764cfccd24" - integrity sha512-+N/Q9kV1+F+UeWYoSiULYo4xYSDQlTgb+ayMobAXPwMnLvop7oxKMo9OzIrX5x3eS4L4f2UHhc9axXwY8DpChg== - dependencies: - "@trysound/sax" "0.2.0" - commander "^7.2.0" - css-select "^4.1.3" - css-tree "^1.1.3" - csso "^4.2.0" - picocolors "^1.0.0" - stable "^0.1.8" - -symbol-tree@^3.2.4: - version "3.2.4" - resolved "http://localhost:4873/symbol-tree/-/symbol-tree-3.2.4.tgz#430637d248ba77e078883951fb9aa0eed7c63fa2" - integrity sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw== - -tailwindcss@^3.0.2: - version "3.1.8" - resolved "http://localhost:4873/tailwindcss/-/tailwindcss-3.1.8.tgz#4f8520550d67a835d32f2f4021580f9fddb7b741" - integrity sha512-YSneUCZSFDYMwk+TGq8qYFdCA3yfBRdBlS7txSq0LUmzyeqRe3a8fBQzbz9M3WS/iFT4BNf/nmw9mEzrnSaC0g== - dependencies: - arg "^5.0.2" - chokidar "^3.5.3" - color-name "^1.1.4" - detective "^5.2.1" - didyoumean "^1.2.2" - dlv "^1.1.3" - fast-glob "^3.2.11" - glob-parent "^6.0.2" - is-glob "^4.0.3" - lilconfig "^2.0.6" - normalize-path "^3.0.0" - object-hash "^3.0.0" - picocolors "^1.0.0" - postcss "^8.4.14" - postcss-import "^14.1.0" - postcss-js "^4.0.0" - postcss-load-config "^3.1.4" - postcss-nested "5.0.6" - postcss-selector-parser "^6.0.10" - postcss-value-parser "^4.2.0" - quick-lru "^5.1.1" - resolve "^1.22.1" - -tapable@^1.0.0: - version "1.1.3" - resolved "http://localhost:4873/tapable/-/tapable-1.1.3.tgz#a1fccc06b58db61fd7a45da2da44f5f3a3e67ba2" - integrity sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA== - -tapable@^2.0.0, tapable@^2.1.1, tapable@^2.2.0: - version "2.2.1" - resolved "http://localhost:4873/tapable/-/tapable-2.2.1.tgz#1967a73ef4060a82f12ab96af86d52fdb76eeca0" - integrity sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ== - -temp-dir@^2.0.0: - version "2.0.0" - resolved "http://localhost:4873/temp-dir/-/temp-dir-2.0.0.tgz#bde92b05bdfeb1516e804c9c00ad45177f31321e" - integrity sha512-aoBAniQmmwtcKp/7BzsH8Cxzv8OL736p7v1ihGb5e9DJ9kTwGWHrQrVB5+lfVDzfGrdRzXch+ig7LHaY1JTOrg== - -tempy@^0.6.0: - version "0.6.0" - resolved "http://localhost:4873/tempy/-/tempy-0.6.0.tgz#65e2c35abc06f1124a97f387b08303442bde59f3" - integrity sha512-G13vtMYPT/J8A4X2SjdtBTphZlrp1gKv6hZiOjw14RCWg6GbHuQBGtjlx75xLbYV/wEc0D7G5K4rxKP/cXk8Bw== - dependencies: - is-stream "^2.0.0" - temp-dir "^2.0.0" - type-fest "^0.16.0" - unique-string "^2.0.0" - -terminal-link@^2.0.0: - version "2.1.1" - resolved "http://localhost:4873/terminal-link/-/terminal-link-2.1.1.tgz#14a64a27ab3c0df933ea546fba55f2d078edc994" - integrity sha512-un0FmiRUQNr5PJqy9kP7c40F5BOfpGlYTrxonDChEZB7pzZxRNp/bt+ymiy9/npwXya9KH99nJ/GXFIiUkYGFQ== - dependencies: - ansi-escapes "^4.2.1" - supports-hyperlinks "^2.0.0" - -terser-webpack-plugin@^5.1.3, terser-webpack-plugin@^5.2.5: - version "5.3.6" - resolved "http://localhost:4873/terser-webpack-plugin/-/terser-webpack-plugin-5.3.6.tgz#5590aec31aa3c6f771ce1b1acca60639eab3195c" - integrity sha512-kfLFk+PoLUQIbLmB1+PZDMRSZS99Mp+/MHqDNmMA6tOItzRt+Npe3E+fsMs5mfcM0wCtrrdU387UnV+vnSffXQ== - dependencies: - "@jridgewell/trace-mapping" "^0.3.14" - jest-worker "^27.4.5" - schema-utils "^3.1.1" - serialize-javascript "^6.0.0" - terser "^5.14.1" - -terser@^5.0.0, terser@^5.10.0, terser@^5.14.1: - version "5.15.1" - resolved "http://localhost:4873/terser/-/terser-5.15.1.tgz#8561af6e0fd6d839669c73b92bdd5777d870ed6c" - integrity sha512-K1faMUvpm/FBxjBXud0LWVAGxmvoPbZbfTCYbSgaaYQaIXI3/TdI7a7ZGA73Zrou6Q8Zmz3oeUTsp/dj+ag2Xw== - dependencies: - "@jridgewell/source-map" "^0.3.2" - acorn "^8.5.0" - commander "^2.20.0" - source-map-support "~0.5.20" - -test-exclude@^6.0.0: - version "6.0.0" - resolved "http://localhost:4873/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e" - integrity sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w== - dependencies: - "@istanbuljs/schema" "^0.1.2" - glob "^7.1.4" - minimatch "^3.0.4" - -text-table@^0.2.0: - version "0.2.0" - resolved "http://localhost:4873/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" - integrity sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw== - -throat@^6.0.1: - version "6.0.1" - resolved "http://localhost:4873/throat/-/throat-6.0.1.tgz#d514fedad95740c12c2d7fc70ea863eb51ade375" - integrity sha512-8hmiGIJMDlwjg7dlJ4yKGLK8EsYqKgPWbG3b4wjJddKNwc7N7Dpn08Df4szr/sZdMVeOstrdYSsqzX6BYbcB+w== - -thunky@^1.0.2: - version "1.1.0" - resolved "http://localhost:4873/thunky/-/thunky-1.1.0.tgz#5abaf714a9405db0504732bbccd2cedd9ef9537d" - integrity sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA== - -tmpl@1.0.5: - version "1.0.5" - resolved "http://localhost:4873/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc" - integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw== - -to-fast-properties@^2.0.0: - version "2.0.0" - resolved "http://localhost:4873/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" - integrity sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog== - -to-regex-range@^5.0.1: - version "5.0.1" - resolved "http://localhost:4873/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" - integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== - dependencies: - is-number "^7.0.0" - -toidentifier@1.0.1: - version "1.0.1" - resolved "http://localhost:4873/toidentifier/-/toidentifier-1.0.1.tgz#3be34321a88a820ed1bd80dfaa33e479fbb8dd35" - integrity sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA== - -tough-cookie@^4.0.0: - version "4.1.2" - resolved "http://localhost:4873/tough-cookie/-/tough-cookie-4.1.2.tgz#e53e84b85f24e0b65dd526f46628db6c85f6b874" - integrity sha512-G9fqXWoYFZgTc2z8Q5zaHy/vJMjm+WV0AkAeHxVCQiEB1b+dGvWzFW6QV07cY5jQ5gRkeid2qIkzkxUnmoQZUQ== - dependencies: - psl "^1.1.33" - punycode "^2.1.1" - universalify "^0.2.0" - url-parse "^1.5.3" - -tr46@^1.0.1: - version "1.0.1" - resolved "http://localhost:4873/tr46/-/tr46-1.0.1.tgz#a8b13fd6bfd2489519674ccde55ba3693b706d09" - integrity sha512-dTpowEjclQ7Kgx5SdBkqRzVhERQXov8/l9Ft9dVM9fmg0W0KQSVaXX9T4i6twCPNtYiZM53lpSSUAwJbFPOHxA== - dependencies: - punycode "^2.1.0" - -tr46@^2.1.0: - version "2.1.0" - resolved "http://localhost:4873/tr46/-/tr46-2.1.0.tgz#fa87aa81ca5d5941da8cbf1f9b749dc969a4e240" - integrity sha512-15Ih7phfcdP5YxqiB+iDtLoaTz4Nd35+IiAv0kQ5FNKHzXgdWqPoTIqEDDJmXceQt4JZk6lVPT8lnDlPpGDppw== - dependencies: - punycode "^2.1.1" - -tryer@^1.0.1: - version "1.0.1" - resolved "http://localhost:4873/tryer/-/tryer-1.0.1.tgz#f2c85406800b9b0f74c9f7465b81eaad241252f8" - integrity sha512-c3zayb8/kWWpycWYg87P71E1S1ZL6b6IJxfb5fvsUgsf0S2MVGaDhDXXjDMpdCpfWXqptc+4mXwmiy1ypXqRAA== - -tsconfig-paths@^3.14.1: - version "3.14.1" - resolved "http://localhost:4873/tsconfig-paths/-/tsconfig-paths-3.14.1.tgz#ba0734599e8ea36c862798e920bcf163277b137a" - integrity sha512-fxDhWnFSLt3VuTwtvJt5fpwxBHg5AdKWMsgcPOOIilyjymcYVZoCQF8fvFRezCNfblEXmi+PcM1eYHeOAgXCOQ== - dependencies: - "@types/json5" "^0.0.29" - json5 "^1.0.1" - minimist "^1.2.6" - strip-bom "^3.0.0" - -tslib@^1.8.1: - version "1.14.1" - resolved "http://localhost:4873/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" - integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== - -tslib@^2.0.3: - version "2.4.0" - resolved "http://localhost:4873/tslib/-/tslib-2.4.0.tgz#7cecaa7f073ce680a05847aa77be941098f36dc3" - integrity sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ== - -tsutils@^3.21.0: - version "3.21.0" - resolved "http://localhost:4873/tsutils/-/tsutils-3.21.0.tgz#b48717d394cea6c1e096983eed58e9d61715b623" - integrity sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA== - dependencies: - tslib "^1.8.1" - -type-check@^0.4.0, type-check@~0.4.0: - version "0.4.0" - resolved "http://localhost:4873/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1" - integrity sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew== - dependencies: - prelude-ls "^1.2.1" - -type-check@~0.3.2: - version "0.3.2" - resolved "http://localhost:4873/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72" - integrity sha512-ZCmOJdvOWDBYJlzAoFkC+Q0+bUyEOS1ltgp1MGU03fqHG+dbi9tBFU2Rd9QKiDZFAYrhPh2JUf7rZRIuHRKtOg== - dependencies: - prelude-ls "~1.1.2" - -type-detect@4.0.8: - version "4.0.8" - resolved "http://localhost:4873/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c" - integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g== - -type-fest@^0.16.0: - version "0.16.0" - resolved "http://localhost:4873/type-fest/-/type-fest-0.16.0.tgz#3240b891a78b0deae910dbeb86553e552a148860" - integrity sha512-eaBzG6MxNzEn9kiwvtre90cXaNLkmadMWa1zQMs3XORCXNbsH/OewwbxC5ia9dCxIxnTAsSxXJaa/p5y8DlvJg== - -type-fest@^0.20.2: - version "0.20.2" - resolved "http://localhost:4873/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4" - integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ== - -type-fest@^0.21.3: - version "0.21.3" - resolved "http://localhost:4873/type-fest/-/type-fest-0.21.3.tgz#d260a24b0198436e133fa26a524a6d65fa3b2e37" - integrity sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w== - -type-is@~1.6.18: - version "1.6.18" - resolved "http://localhost:4873/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131" - integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g== - dependencies: - media-typer "0.3.0" - mime-types "~2.1.24" - -typedarray-to-buffer@^3.1.5: - version "3.1.5" - resolved "http://localhost:4873/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz#a97ee7a9ff42691b9f783ff1bc5112fe3fca9080" - integrity sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q== - dependencies: - is-typedarray "^1.0.0" - -unbox-primitive@^1.0.2: - version "1.0.2" - resolved "http://localhost:4873/unbox-primitive/-/unbox-primitive-1.0.2.tgz#29032021057d5e6cdbd08c5129c226dff8ed6f9e" - integrity sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw== - dependencies: - call-bind "^1.0.2" - has-bigints "^1.0.2" - has-symbols "^1.0.3" - which-boxed-primitive "^1.0.2" - -unicode-canonical-property-names-ecmascript@^2.0.0: - version "2.0.0" - resolved "http://localhost:4873/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.0.tgz#301acdc525631670d39f6146e0e77ff6bbdebddc" - integrity sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ== - -unicode-match-property-ecmascript@^2.0.0: - version "2.0.0" - resolved "http://localhost:4873/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-2.0.0.tgz#54fd16e0ecb167cf04cf1f756bdcc92eba7976c3" - integrity sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q== - dependencies: - unicode-canonical-property-names-ecmascript "^2.0.0" - unicode-property-aliases-ecmascript "^2.0.0" - -unicode-match-property-value-ecmascript@^2.0.0: - version "2.0.0" - resolved "http://localhost:4873/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.0.0.tgz#1a01aa57247c14c568b89775a54938788189a714" - integrity sha512-7Yhkc0Ye+t4PNYzOGKedDhXbYIBe1XEQYQxOPyhcXNMJ0WCABqqj6ckydd6pWRZTHV4GuCPKdBAUiMc60tsKVw== - -unicode-property-aliases-ecmascript@^2.0.0: - version "2.1.0" - resolved "http://localhost:4873/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.1.0.tgz#43d41e3be698bd493ef911077c9b131f827e8ccd" - integrity sha512-6t3foTQI9qne+OZoVQB/8x8rk2k1eVy1gRXhV3oFQ5T6R1dqQ1xtin3XqSlx3+ATBkliTaR/hHyJBm+LVPNM8w== - -unique-string@^2.0.0: - version "2.0.0" - resolved "http://localhost:4873/unique-string/-/unique-string-2.0.0.tgz#39c6451f81afb2749de2b233e3f7c5e8843bd89d" - integrity sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg== - dependencies: - crypto-random-string "^2.0.0" - -universalify@^0.2.0: - version "0.2.0" - resolved "http://localhost:4873/universalify/-/universalify-0.2.0.tgz#6451760566fa857534745ab1dde952d1b1761be0" - integrity sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg== - -universalify@^2.0.0: - version "2.0.0" - resolved "http://localhost:4873/universalify/-/universalify-2.0.0.tgz#75a4984efedc4b08975c5aeb73f530d02df25717" - integrity sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ== - -unpipe@1.0.0, unpipe@~1.0.0: - version "1.0.0" - resolved "http://localhost:4873/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" - integrity sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ== - -unquote@~1.1.1: - version "1.1.1" - resolved "http://localhost:4873/unquote/-/unquote-1.1.1.tgz#8fded7324ec6e88a0ff8b905e7c098cdc086d544" - integrity sha512-vRCqFv6UhXpWxZPyGDh/F3ZpNv8/qo7w6iufLpQg9aKnQ71qM4B5KiI7Mia9COcjEhrO9LueHpMYjYzsWH3OIg== - -upath@^1.2.0: - version "1.2.0" - resolved "http://localhost:4873/upath/-/upath-1.2.0.tgz#8f66dbcd55a883acdae4408af8b035a5044c1894" - integrity sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg== - -update-browserslist-db@^1.0.9: - version "1.0.10" - resolved "http://localhost:4873/update-browserslist-db/-/update-browserslist-db-1.0.10.tgz#0f54b876545726f17d00cd9a2561e6dade943ff3" - integrity sha512-OztqDenkfFkbSG+tRxBeAnCVPckDBcvibKd35yDONx6OU8N7sqgwc7rCbkJ/WcYtVRZ4ba68d6byhC21GFh7sQ== - dependencies: - escalade "^3.1.1" - picocolors "^1.0.0" - -uri-js@^4.2.2: - version "4.4.1" - resolved "http://localhost:4873/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" - integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg== - dependencies: - punycode "^2.1.0" - -url-parse@^1.5.3: - version "1.5.10" - resolved "http://localhost:4873/url-parse/-/url-parse-1.5.10.tgz#9d3c2f736c1d75dd3bd2be507dcc111f1e2ea9c1" - integrity sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ== - dependencies: - querystringify "^2.1.1" - requires-port "^1.0.0" - -util-deprecate@^1.0.1, util-deprecate@^1.0.2, util-deprecate@~1.0.1: - version "1.0.2" - resolved "http://localhost:4873/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" - integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw== - -util.promisify@~1.0.0: - version "1.0.1" - resolved "http://localhost:4873/util.promisify/-/util.promisify-1.0.1.tgz#6baf7774b80eeb0f7520d8b81d07982a59abbaee" - integrity sha512-g9JpC/3He3bm38zsLupWryXHoEcS22YHthuPQSJdMy6KNrzIRzWqcsHzD/WUnqe45whVou4VIsPew37DoXWNrA== - dependencies: - define-properties "^1.1.3" - es-abstract "^1.17.2" - has-symbols "^1.0.1" - object.getownpropertydescriptors "^2.1.0" - -utila@~0.4: - version "0.4.0" - resolved "http://localhost:4873/utila/-/utila-0.4.0.tgz#8a16a05d445657a3aea5eecc5b12a4fa5379772c" - integrity sha512-Z0DbgELS9/L/75wZbro8xAnT50pBVFQZ+hUEueGDU5FN51YSCYM+jdxsfCiHjwNP/4LCDD0i/graKpeBnOXKRA== - -utils-merge@1.0.1: - version "1.0.1" - resolved "http://localhost:4873/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" - integrity sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA== - -uuid@^8.3, uuid@^8.3.2: - version "8.3.2" - resolved "http://localhost:4873/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2" - integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg== - -v8-to-istanbul@^8.1.0: - version "8.1.1" - resolved "http://localhost:4873/v8-to-istanbul/-/v8-to-istanbul-8.1.1.tgz#77b752fd3975e31bbcef938f85e9bd1c7a8d60ed" - integrity sha512-FGtKtv3xIpR6BYhvgH8MI/y78oT7d8Au3ww4QIxymrCtZEh5b8gCw2siywE+puhEmuWKDtmfrvF5UlB298ut3w== - dependencies: - "@types/istanbul-lib-coverage" "^2.0.1" - convert-source-map "^1.6.0" - source-map "^0.7.3" - -vary@~1.1.2: - version "1.1.2" - resolved "http://localhost:4873/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc" - integrity sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg== - -w3c-hr-time@^1.0.2: - version "1.0.2" - resolved "http://localhost:4873/w3c-hr-time/-/w3c-hr-time-1.0.2.tgz#0a89cdf5cc15822df9c360543676963e0cc308cd" - integrity sha512-z8P5DvDNjKDoFIHK7q8r8lackT6l+jo/Ye3HOle7l9nICP9lf1Ci25fy9vHd0JOWewkIFzXIEig3TdKT7JQ5fQ== - dependencies: - browser-process-hrtime "^1.0.0" - -w3c-xmlserializer@^2.0.0: - version "2.0.0" - resolved "http://localhost:4873/w3c-xmlserializer/-/w3c-xmlserializer-2.0.0.tgz#3e7104a05b75146cc60f564380b7f683acf1020a" - integrity sha512-4tzD0mF8iSiMiNs30BiLO3EpfGLZUT2MSX/G+o7ZywDzliWQ3OPtTZ0PTC3B3ca1UAf4cJMHB+2Bf56EriJuRA== - dependencies: - xml-name-validator "^3.0.0" - -walker@^1.0.7: - version "1.0.8" - resolved "http://localhost:4873/walker/-/walker-1.0.8.tgz#bd498db477afe573dc04185f011d3ab8a8d7653f" - integrity sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ== - dependencies: - makeerror "1.0.12" - -watchpack@^2.4.0: - version "2.4.0" - resolved "http://localhost:4873/watchpack/-/watchpack-2.4.0.tgz#fa33032374962c78113f93c7f2fb4c54c9862a5d" - integrity sha512-Lcvm7MGST/4fup+ifyKi2hjyIAwcdI4HRgtvTpIUxBRhB+RFtUh8XtDOxUfctVCnhVi+QQj49i91OyvzkJl6cg== - dependencies: - glob-to-regexp "^0.4.1" - graceful-fs "^4.1.2" - -wbuf@^1.1.0, wbuf@^1.7.3: - version "1.7.3" - resolved "http://localhost:4873/wbuf/-/wbuf-1.7.3.tgz#c1d8d149316d3ea852848895cb6a0bfe887b87df" - integrity sha512-O84QOnr0icsbFGLS0O3bI5FswxzRr8/gHwWkDlQFskhSPryQXvrTMxjxGP4+iWYoauLoBvfDpkrOauZ+0iZpDA== - dependencies: - minimalistic-assert "^1.0.0" - -web-vitals@^2.1.4: - version "2.1.4" - resolved "http://localhost:4873/web-vitals/-/web-vitals-2.1.4.tgz#76563175a475a5e835264d373704f9dde718290c" - integrity sha512-sVWcwhU5mX6crfI5Vd2dC4qchyTqxV8URinzt25XqVh+bHEPGH4C3NPrNionCP7Obx59wrYEbNlw4Z8sjALzZg== - -webidl-conversions@^4.0.2: - version "4.0.2" - resolved "http://localhost:4873/webidl-conversions/-/webidl-conversions-4.0.2.tgz#a855980b1f0b6b359ba1d5d9fb39ae941faa63ad" - integrity sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg== - -webidl-conversions@^5.0.0: - version "5.0.0" - resolved "http://localhost:4873/webidl-conversions/-/webidl-conversions-5.0.0.tgz#ae59c8a00b121543a2acc65c0434f57b0fc11aff" - integrity sha512-VlZwKPCkYKxQgeSbH5EyngOmRp7Ww7I9rQLERETtf5ofd9pGeswWiOtogpEO850jziPRarreGxn5QIiTqpb2wA== - -webidl-conversions@^6.1.0: - version "6.1.0" - resolved "http://localhost:4873/webidl-conversions/-/webidl-conversions-6.1.0.tgz#9111b4d7ea80acd40f5270d666621afa78b69514" - integrity sha512-qBIvFLGiBpLjfwmYAaHPXsn+ho5xZnGvyGvsarywGNc8VyQJUMHJ8OBKGGrPER0okBeMDaan4mNBlgBROxuI8w== - -webpack-dev-middleware@^5.3.1: - version "5.3.3" - resolved "http://localhost:4873/webpack-dev-middleware/-/webpack-dev-middleware-5.3.3.tgz#efae67c2793908e7311f1d9b06f2a08dcc97e51f" - integrity sha512-hj5CYrY0bZLB+eTO+x/j67Pkrquiy7kWepMHmUMoPsmcUaeEnQJqFzHJOyxgWlq746/wUuA64p9ta34Kyb01pA== - dependencies: - colorette "^2.0.10" - memfs "^3.4.3" - mime-types "^2.1.31" - range-parser "^1.2.1" - schema-utils "^4.0.0" - -webpack-dev-server@^4.6.0: - version "4.11.1" - resolved "http://localhost:4873/webpack-dev-server/-/webpack-dev-server-4.11.1.tgz#ae07f0d71ca0438cf88446f09029b92ce81380b5" - integrity sha512-lILVz9tAUy1zGFwieuaQtYiadImb5M3d+H+L1zDYalYoDl0cksAB1UNyuE5MMWJrG6zR1tXkCP2fitl7yoUJiw== - dependencies: - "@types/bonjour" "^3.5.9" - "@types/connect-history-api-fallback" "^1.3.5" - "@types/express" "^4.17.13" - "@types/serve-index" "^1.9.1" - "@types/serve-static" "^1.13.10" - "@types/sockjs" "^0.3.33" - "@types/ws" "^8.5.1" - ansi-html-community "^0.0.8" - bonjour-service "^1.0.11" - chokidar "^3.5.3" - colorette "^2.0.10" - compression "^1.7.4" - connect-history-api-fallback "^2.0.0" - default-gateway "^6.0.3" - express "^4.17.3" - graceful-fs "^4.2.6" - html-entities "^2.3.2" - http-proxy-middleware "^2.0.3" - ipaddr.js "^2.0.1" - open "^8.0.9" - p-retry "^4.5.0" - rimraf "^3.0.2" - schema-utils "^4.0.0" - selfsigned "^2.1.1" - serve-index "^1.9.1" - sockjs "^0.3.24" - spdy "^4.0.2" - webpack-dev-middleware "^5.3.1" - ws "^8.4.2" - -webpack-manifest-plugin@^4.0.2: - version "4.1.1" - resolved "http://localhost:4873/webpack-manifest-plugin/-/webpack-manifest-plugin-4.1.1.tgz#10f8dbf4714ff93a215d5a45bcc416d80506f94f" - integrity sha512-YXUAwxtfKIJIKkhg03MKuiFAD72PlrqCiwdwO4VEXdRO5V0ORCNwaOwAZawPZalCbmH9kBDmXnNeQOw+BIEiow== - dependencies: - tapable "^2.0.0" - webpack-sources "^2.2.0" - -webpack-merge@^5.8.0: - version "5.8.0" - resolved "http://localhost:4873/webpack-merge/-/webpack-merge-5.8.0.tgz#2b39dbf22af87776ad744c390223731d30a68f61" - integrity sha512-/SaI7xY0831XwP6kzuwhKWVKDP9t1QY1h65lAFLbZqMPIuYcD9QAW4u9STIbU9kaJbPBB/geU/gLr1wDjOhQ+Q== - dependencies: - clone-deep "^4.0.1" - wildcard "^2.0.0" - -webpack-sources@^1.4.3: - version "1.4.3" - resolved "http://localhost:4873/webpack-sources/-/webpack-sources-1.4.3.tgz#eedd8ec0b928fbf1cbfe994e22d2d890f330a933" - integrity sha512-lgTS3Xhv1lCOKo7SA5TjKXMjpSM4sBjNV5+q2bqesbSPs5FjGmU6jjtBSkX9b4qW87vDIsCIlUPOEhbZrMdjeQ== - dependencies: - source-list-map "^2.0.0" - source-map "~0.6.1" - -webpack-sources@^2.2.0: - version "2.3.1" - resolved "http://localhost:4873/webpack-sources/-/webpack-sources-2.3.1.tgz#570de0af163949fe272233c2cefe1b56f74511fd" - integrity sha512-y9EI9AO42JjEcrTJFOYmVywVZdKVUfOvDUPsJea5GIr1JOEGFVqwlY2K098fFoIjOkDzHn2AjRvM8dsBZu+gCA== - dependencies: - source-list-map "^2.0.1" - source-map "^0.6.1" - -webpack-sources@^3.2.3: - version "3.2.3" - resolved "http://localhost:4873/webpack-sources/-/webpack-sources-3.2.3.tgz#2d4daab8451fd4b240cc27055ff6a0c2ccea0cde" - integrity sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w== - -webpack@^5.64.4: - version "5.74.0" - resolved "http://localhost:4873/webpack/-/webpack-5.74.0.tgz#02a5dac19a17e0bb47093f2be67c695102a55980" - integrity sha512-A2InDwnhhGN4LYctJj6M1JEaGL7Luj6LOmyBHjcI8529cm5p6VXiTIW2sn6ffvEAKmveLzvu4jrihwXtPojlAA== - dependencies: - "@types/eslint-scope" "^3.7.3" - "@types/estree" "^0.0.51" - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/wasm-edit" "1.11.1" - "@webassemblyjs/wasm-parser" "1.11.1" - acorn "^8.7.1" - acorn-import-assertions "^1.7.6" - browserslist "^4.14.5" - chrome-trace-event "^1.0.2" - enhanced-resolve "^5.10.0" - es-module-lexer "^0.9.0" - eslint-scope "5.1.1" - events "^3.2.0" - glob-to-regexp "^0.4.1" - graceful-fs "^4.2.9" - json-parse-even-better-errors "^2.3.1" - loader-runner "^4.2.0" - mime-types "^2.1.27" - neo-async "^2.6.2" - schema-utils "^3.1.0" - tapable "^2.1.1" - terser-webpack-plugin "^5.1.3" - watchpack "^2.4.0" - webpack-sources "^3.2.3" - -websocket-driver@>=0.5.1, websocket-driver@^0.7.4: - version "0.7.4" - resolved "http://localhost:4873/websocket-driver/-/websocket-driver-0.7.4.tgz#89ad5295bbf64b480abcba31e4953aca706f5760" - integrity sha512-b17KeDIQVjvb0ssuSDF2cYXSg2iztliJ4B9WdsuB6J952qCPKmnVq4DyW5motImXHDC1cBT/1UezrJVsKw5zjg== - dependencies: - http-parser-js ">=0.5.1" - safe-buffer ">=5.1.0" - websocket-extensions ">=0.1.1" - -websocket-extensions@>=0.1.1: - version "0.1.4" - resolved "http://localhost:4873/websocket-extensions/-/websocket-extensions-0.1.4.tgz#7f8473bc839dfd87608adb95d7eb075211578a42" - integrity sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg== - -whatwg-encoding@^1.0.5: - version "1.0.5" - resolved "http://localhost:4873/whatwg-encoding/-/whatwg-encoding-1.0.5.tgz#5abacf777c32166a51d085d6b4f3e7d27113ddb0" - integrity sha512-b5lim54JOPN9HtzvK9HFXvBma/rnfFeqsic0hSpjtDbVxR3dJKLc+KB4V6GgiGOvl7CY/KNh8rxSo9DKQrnUEw== - dependencies: - iconv-lite "0.4.24" - -whatwg-fetch@^3.6.2: - version "3.6.2" - resolved "http://localhost:4873/whatwg-fetch/-/whatwg-fetch-3.6.2.tgz#dced24f37f2624ed0281725d51d0e2e3fe677f8c" - integrity sha512-bJlen0FcuU/0EMLrdbJ7zOnW6ITZLrZMIarMUVmdKtsGvZna8vxKYaexICWPfZ8qwf9fzNq+UEIZrnSaApt6RA== - -whatwg-mimetype@^2.3.0: - version "2.3.0" - resolved "http://localhost:4873/whatwg-mimetype/-/whatwg-mimetype-2.3.0.tgz#3d4b1e0312d2079879f826aff18dbeeca5960fbf" - integrity sha512-M4yMwr6mAnQz76TbJm914+gPpB/nCwvZbJU28cUD6dR004SAxDLOOSUaB1JDRqLtaOV/vi0IC5lEAGFgrjGv/g== - -whatwg-url@^7.0.0: - version "7.1.0" - resolved "http://localhost:4873/whatwg-url/-/whatwg-url-7.1.0.tgz#c2c492f1eca612988efd3d2266be1b9fc6170d06" - integrity sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg== - dependencies: - lodash.sortby "^4.7.0" - tr46 "^1.0.1" - webidl-conversions "^4.0.2" - -whatwg-url@^8.0.0, whatwg-url@^8.5.0: - version "8.7.0" - resolved "http://localhost:4873/whatwg-url/-/whatwg-url-8.7.0.tgz#656a78e510ff8f3937bc0bcbe9f5c0ac35941b77" - integrity sha512-gAojqb/m9Q8a5IV96E3fHJM70AzCkgt4uXYX2O7EmuyOnLrViCQlsEBmF9UQIu3/aeAIp2U17rtbpZWNntQqdg== - dependencies: - lodash "^4.7.0" - tr46 "^2.1.0" - webidl-conversions "^6.1.0" - -which-boxed-primitive@^1.0.2: - version "1.0.2" - resolved "http://localhost:4873/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz#13757bc89b209b049fe5d86430e21cf40a89a8e6" - integrity sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg== - dependencies: - is-bigint "^1.0.1" - is-boolean-object "^1.1.0" - is-number-object "^1.0.4" - is-string "^1.0.5" - is-symbol "^1.0.3" - -which@^1.3.1: - version "1.3.1" - resolved "http://localhost:4873/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" - integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== - dependencies: - isexe "^2.0.0" - -which@^2.0.1: - version "2.0.2" - resolved "http://localhost:4873/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" - integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== - dependencies: - isexe "^2.0.0" - -wildcard@^2.0.0: - version "2.0.0" - resolved "http://localhost:4873/wildcard/-/wildcard-2.0.0.tgz#a77d20e5200c6faaac979e4b3aadc7b3dd7f8fec" - integrity sha512-JcKqAHLPxcdb9KM49dufGXn2x3ssnfjbcaQdLlfZsL9rH9wgDQjUtDxbo8NE0F6SFvydeu1VhZe7hZuHsB2/pw== - -word-wrap@^1.2.3, word-wrap@~1.2.3: - version "1.2.3" - resolved "http://localhost:4873/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c" - integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ== - -workbox-background-sync@6.5.4: - version "6.5.4" - resolved "http://localhost:4873/workbox-background-sync/-/workbox-background-sync-6.5.4.tgz#3141afba3cc8aa2ae14c24d0f6811374ba8ff6a9" - integrity sha512-0r4INQZMyPky/lj4Ou98qxcThrETucOde+7mRGJl13MPJugQNKeZQOdIJe/1AchOP23cTqHcN/YVpD6r8E6I8g== - dependencies: - idb "^7.0.1" - workbox-core "6.5.4" - -workbox-broadcast-update@6.5.4: - version "6.5.4" - resolved "http://localhost:4873/workbox-broadcast-update/-/workbox-broadcast-update-6.5.4.tgz#8441cff5417cd41f384ba7633ca960a7ffe40f66" - integrity sha512-I/lBERoH1u3zyBosnpPEtcAVe5lwykx9Yg1k6f8/BGEPGaMMgZrwVrqL1uA9QZ1NGGFoyE6t9i7lBjOlDhFEEw== - dependencies: - workbox-core "6.5.4" - -workbox-build@6.5.4: - version "6.5.4" - resolved "http://localhost:4873/workbox-build/-/workbox-build-6.5.4.tgz#7d06d31eb28a878817e1c991c05c5b93409f0389" - integrity sha512-kgRevLXEYvUW9WS4XoziYqZ8Q9j/2ziJYEtTrjdz5/L/cTUa2XfyMP2i7c3p34lgqJ03+mTiz13SdFef2POwbA== - dependencies: - "@apideck/better-ajv-errors" "^0.3.1" - "@babel/core" "^7.11.1" - "@babel/preset-env" "^7.11.0" - "@babel/runtime" "^7.11.2" - "@rollup/plugin-babel" "^5.2.0" - "@rollup/plugin-node-resolve" "^11.2.1" - "@rollup/plugin-replace" "^2.4.1" - "@surma/rollup-plugin-off-main-thread" "^2.2.3" - ajv "^8.6.0" - common-tags "^1.8.0" - fast-json-stable-stringify "^2.1.0" - fs-extra "^9.0.1" - glob "^7.1.6" - lodash "^4.17.20" - pretty-bytes "^5.3.0" - rollup "^2.43.1" - rollup-plugin-terser "^7.0.0" - source-map "^0.8.0-beta.0" - stringify-object "^3.3.0" - strip-comments "^2.0.1" - tempy "^0.6.0" - upath "^1.2.0" - workbox-background-sync "6.5.4" - workbox-broadcast-update "6.5.4" - workbox-cacheable-response "6.5.4" - workbox-core "6.5.4" - workbox-expiration "6.5.4" - workbox-google-analytics "6.5.4" - workbox-navigation-preload "6.5.4" - workbox-precaching "6.5.4" - workbox-range-requests "6.5.4" - workbox-recipes "6.5.4" - workbox-routing "6.5.4" - workbox-strategies "6.5.4" - workbox-streams "6.5.4" - workbox-sw "6.5.4" - workbox-window "6.5.4" - -workbox-cacheable-response@6.5.4: - version "6.5.4" - resolved "http://localhost:4873/workbox-cacheable-response/-/workbox-cacheable-response-6.5.4.tgz#a5c6ec0c6e2b6f037379198d4ef07d098f7cf137" - integrity sha512-DCR9uD0Fqj8oB2TSWQEm1hbFs/85hXXoayVwFKLVuIuxwJaihBsLsp4y7J9bvZbqtPJ1KlCkmYVGQKrBU4KAug== - dependencies: - workbox-core "6.5.4" - -workbox-core@6.5.4: - version "6.5.4" - resolved "http://localhost:4873/workbox-core/-/workbox-core-6.5.4.tgz#df48bf44cd58bb1d1726c49b883fb1dffa24c9ba" - integrity sha512-OXYb+m9wZm8GrORlV2vBbE5EC1FKu71GGp0H4rjmxmF4/HLbMCoTFws87M3dFwgpmg0v00K++PImpNQ6J5NQ6Q== - -workbox-expiration@6.5.4: - version "6.5.4" - resolved "http://localhost:4873/workbox-expiration/-/workbox-expiration-6.5.4.tgz#501056f81e87e1d296c76570bb483ce5e29b4539" - integrity sha512-jUP5qPOpH1nXtjGGh1fRBa1wJL2QlIb5mGpct3NzepjGG2uFFBn4iiEBiI9GUmfAFR2ApuRhDydjcRmYXddiEQ== - dependencies: - idb "^7.0.1" - workbox-core "6.5.4" - -workbox-google-analytics@6.5.4: - version "6.5.4" - resolved "http://localhost:4873/workbox-google-analytics/-/workbox-google-analytics-6.5.4.tgz#c74327f80dfa4c1954cbba93cd7ea640fe7ece7d" - integrity sha512-8AU1WuaXsD49249Wq0B2zn4a/vvFfHkpcFfqAFHNHwln3jK9QUYmzdkKXGIZl9wyKNP+RRX30vcgcyWMcZ9VAg== - dependencies: - workbox-background-sync "6.5.4" - workbox-core "6.5.4" - workbox-routing "6.5.4" - workbox-strategies "6.5.4" - -workbox-navigation-preload@6.5.4: - version "6.5.4" - resolved "http://localhost:4873/workbox-navigation-preload/-/workbox-navigation-preload-6.5.4.tgz#ede56dd5f6fc9e860a7e45b2c1a8f87c1c793212" - integrity sha512-IIwf80eO3cr8h6XSQJF+Hxj26rg2RPFVUmJLUlM0+A2GzB4HFbQyKkrgD5y2d84g2IbJzP4B4j5dPBRzamHrng== - dependencies: - workbox-core "6.5.4" - -workbox-precaching@6.5.4: - version "6.5.4" - resolved "http://localhost:4873/workbox-precaching/-/workbox-precaching-6.5.4.tgz#740e3561df92c6726ab5f7471e6aac89582cab72" - integrity sha512-hSMezMsW6btKnxHB4bFy2Qfwey/8SYdGWvVIKFaUm8vJ4E53JAY+U2JwLTRD8wbLWoP6OVUdFlXsTdKu9yoLTg== - dependencies: - workbox-core "6.5.4" - workbox-routing "6.5.4" - workbox-strategies "6.5.4" - -workbox-range-requests@6.5.4: - version "6.5.4" - resolved "http://localhost:4873/workbox-range-requests/-/workbox-range-requests-6.5.4.tgz#86b3d482e090433dab38d36ae031b2bb0bd74399" - integrity sha512-Je2qR1NXCFC8xVJ/Lux6saH6IrQGhMpDrPXWZWWS8n/RD+WZfKa6dSZwU+/QksfEadJEr/NfY+aP/CXFFK5JFg== - dependencies: - workbox-core "6.5.4" - -workbox-recipes@6.5.4: - version "6.5.4" - resolved "http://localhost:4873/workbox-recipes/-/workbox-recipes-6.5.4.tgz#cca809ee63b98b158b2702dcfb741b5cc3e24acb" - integrity sha512-QZNO8Ez708NNwzLNEXTG4QYSKQ1ochzEtRLGaq+mr2PyoEIC1xFW7MrWxrONUxBFOByksds9Z4//lKAX8tHyUA== - dependencies: - workbox-cacheable-response "6.5.4" - workbox-core "6.5.4" - workbox-expiration "6.5.4" - workbox-precaching "6.5.4" - workbox-routing "6.5.4" - workbox-strategies "6.5.4" - -workbox-routing@6.5.4: - version "6.5.4" - resolved "http://localhost:4873/workbox-routing/-/workbox-routing-6.5.4.tgz#6a7fbbd23f4ac801038d9a0298bc907ee26fe3da" - integrity sha512-apQswLsbrrOsBUWtr9Lf80F+P1sHnQdYodRo32SjiByYi36IDyL2r7BH1lJtFX8fwNHDa1QOVY74WKLLS6o5Pg== - dependencies: - workbox-core "6.5.4" - -workbox-strategies@6.5.4: - version "6.5.4" - resolved "http://localhost:4873/workbox-strategies/-/workbox-strategies-6.5.4.tgz#4edda035b3c010fc7f6152918370699334cd204d" - integrity sha512-DEtsxhx0LIYWkJBTQolRxG4EI0setTJkqR4m7r4YpBdxtWJH1Mbg01Cj8ZjNOO8etqfA3IZaOPHUxCs8cBsKLw== - dependencies: - workbox-core "6.5.4" - -workbox-streams@6.5.4: - version "6.5.4" - resolved "http://localhost:4873/workbox-streams/-/workbox-streams-6.5.4.tgz#1cb3c168a6101df7b5269d0353c19e36668d7d69" - integrity sha512-FXKVh87d2RFXkliAIheBojBELIPnWbQdyDvsH3t74Cwhg0fDheL1T8BqSM86hZvC0ZESLsznSYWw+Va+KVbUzg== - dependencies: - workbox-core "6.5.4" - workbox-routing "6.5.4" - -workbox-sw@6.5.4: - version "6.5.4" - resolved "http://localhost:4873/workbox-sw/-/workbox-sw-6.5.4.tgz#d93e9c67924dd153a61367a4656ff4d2ae2ed736" - integrity sha512-vo2RQo7DILVRoH5LjGqw3nphavEjK4Qk+FenXeUsknKn14eCNedHOXWbmnvP4ipKhlE35pvJ4yl4YYf6YsJArA== - -workbox-webpack-plugin@^6.4.1: - version "6.5.4" - resolved "http://localhost:4873/workbox-webpack-plugin/-/workbox-webpack-plugin-6.5.4.tgz#baf2d3f4b8f435f3469887cf4fba2b7fac3d0fd7" - integrity sha512-LmWm/zoaahe0EGmMTrSLUi+BjyR3cdGEfU3fS6PN1zKFYbqAKuQ+Oy/27e4VSXsyIwAw8+QDfk1XHNGtZu9nQg== - dependencies: - fast-json-stable-stringify "^2.1.0" - pretty-bytes "^5.4.1" - upath "^1.2.0" - webpack-sources "^1.4.3" - workbox-build "6.5.4" - -workbox-window@6.5.4: - version "6.5.4" - resolved "http://localhost:4873/workbox-window/-/workbox-window-6.5.4.tgz#d991bc0a94dff3c2dbb6b84558cff155ca878e91" - integrity sha512-HnLZJDwYBE+hpG25AQBO8RUWBJRaCsI9ksQJEp3aCOFCaG5kqaToAYXFRAHxzRluM2cQbGzdQF5rjKPWPA1fug== - dependencies: - "@types/trusted-types" "^2.0.2" - workbox-core "6.5.4" - -wrap-ansi@^7.0.0: - version "7.0.0" - resolved "http://localhost:4873/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" - integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== - dependencies: - ansi-styles "^4.0.0" - string-width "^4.1.0" - strip-ansi "^6.0.0" - -wrappy@1: - version "1.0.2" - resolved "http://localhost:4873/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" - integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ== - -write-file-atomic@^3.0.0: - version "3.0.3" - resolved "http://localhost:4873/write-file-atomic/-/write-file-atomic-3.0.3.tgz#56bd5c5a5c70481cd19c571bd39ab965a5de56e8" - integrity sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q== - dependencies: - imurmurhash "^0.1.4" - is-typedarray "^1.0.0" - signal-exit "^3.0.2" - typedarray-to-buffer "^3.1.5" - -ws@^7.4.6: - version "7.5.9" - resolved "http://localhost:4873/ws/-/ws-7.5.9.tgz#54fa7db29f4c7cec68b1ddd3a89de099942bb591" - integrity sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q== - -ws@^8.4.2: - version "8.9.0" - resolved "http://localhost:4873/ws/-/ws-8.9.0.tgz#2a994bb67144be1b53fe2d23c53c028adeb7f45e" - integrity sha512-Ja7nszREasGaYUYCI2k4lCKIRTt+y7XuqVoHR44YpI49TtryyqbqvDMn5eqfW7e6HzTukDRIsXqzVHScqRcafg== - -xml-name-validator@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/xml-name-validator/-/xml-name-validator-3.0.0.tgz#6ae73e06de4d8c6e47f9fb181f78d648ad457c6a" - integrity sha512-A5CUptxDsvxKJEU3yO6DuWBSJz/qizqzJKOMIfUJHETbBw/sFaDxgd6fxm1ewUaM0jZ444Fc5vC5ROYurg/4Pw== - -xmlchars@^2.2.0: - version "2.2.0" - resolved "http://localhost:4873/xmlchars/-/xmlchars-2.2.0.tgz#060fe1bcb7f9c76fe2a17db86a9bc3ab894210cb" - integrity sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw== - -xtend@^4.0.2: - version "4.0.2" - resolved "http://localhost:4873/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" - integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== - -y18n@^5.0.5: - version "5.0.8" - resolved "http://localhost:4873/y18n/-/y18n-5.0.8.tgz#7f4934d0f7ca8c56f95314939ddcd2dd91ce1d55" - integrity sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA== - -yallist@^4.0.0: - version "4.0.0" - resolved "http://localhost:4873/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" - integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== - -yaml@^1.10.0, yaml@^1.10.2, yaml@^1.7.2: - version "1.10.2" - resolved "http://localhost:4873/yaml/-/yaml-1.10.2.tgz#2301c5ffbf12b467de8da2333a459e29e7920e4b" - integrity sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg== - -yargs-parser@^20.2.2: - version "20.2.9" - resolved "http://localhost:4873/yargs-parser/-/yargs-parser-20.2.9.tgz#2eb7dc3b0289718fc295f362753845c41a0c94ee" - integrity sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w== - -yargs@^16.2.0: - version "16.2.0" - resolved "http://localhost:4873/yargs/-/yargs-16.2.0.tgz#1c82bf0f6b6a66eafce7ef30e376f49a12477f66" - integrity sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw== - dependencies: - cliui "^7.0.2" - escalade "^3.1.1" - get-caller-file "^2.0.5" - require-directory "^2.1.1" - string-width "^4.2.0" - y18n "^5.0.5" - yargs-parser "^20.2.2" - -yocto-queue@^0.1.0: - version "0.1.0" - resolved "http://localhost:4873/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b" - integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q== diff --git a/wrappers/javascript/examples/vite/package.json b/wrappers/javascript/examples/vite/package.json index 79ec2037..a5f0ce2f 100644 --- a/wrappers/javascript/examples/vite/package.json +++ b/wrappers/javascript/examples/vite/package.json @@ -9,7 +9,7 @@ "preview": "vite preview" }, "dependencies": { - "@automerge/automerge": "2.0.0-alpha.4" + "@automerge/automerge": "2.0.0-alpha.5" }, "devDependencies": { "typescript": "^4.6.4", diff --git a/wrappers/javascript/examples/webpack/package.json b/wrappers/javascript/examples/webpack/package.json index 5f0680b2..55e4ba60 100644 --- a/wrappers/javascript/examples/webpack/package.json +++ b/wrappers/javascript/examples/webpack/package.json @@ -10,7 +10,7 @@ }, "author": "", "dependencies": { - "@automerge/automerge": "2.0.0-alpha.4" + "@automerge/automerge": "2.0.0-alpha.5" }, "devDependencies": { "serve": "^13.0.2", diff --git a/wrappers/javascript/package.json b/wrappers/javascript/package.json index 95f58680..e830b100 100644 --- a/wrappers/javascript/package.json +++ b/wrappers/javascript/package.json @@ -4,7 +4,7 @@ "Orion Henry ", "Martin Kleppmann" ], - "version": "2.0.0-alpha.4", + "version": "2.0.0-alpha.5", "description": "Javascript implementation of automerge, backed by @automerge/automerge-wasm", "homepage": "https://github.com/automerge/automerge-rs/tree/main/wrappers/javascript", "repository": "github:automerge/automerge-rs", @@ -57,7 +57,7 @@ "typescript": "^4.6.4" }, "dependencies": { - "@automerge/automerge-wasm": "0.1.9", + "@automerge/automerge-wasm": "0.1.10", "uuid": "^8.3" } } From dd5edafa9dcef366b1093286ab547e5063a5198a Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Sat, 15 Oct 2022 17:16:14 -0500 Subject: [PATCH 601/730] make freeze work recursively --- crates/automerge-wasm/index.d.ts | 4 +-- crates/automerge-wasm/package.json | 2 +- crates/automerge-wasm/src/interop.rs | 44 ++++++++++++++++++-------- crates/automerge-wasm/src/lib.rs | 13 +++++--- wrappers/javascript/package.json | 4 +-- wrappers/javascript/src/index.ts | 26 ++++++--------- wrappers/javascript/test/basic_test.ts | 44 ++++++++++++++++++++++++++ 7 files changed, 97 insertions(+), 40 deletions(-) diff --git a/crates/automerge-wasm/index.d.ts b/crates/automerge-wasm/index.d.ts index 8dbff739..4339f2b8 100644 --- a/crates/automerge-wasm/index.d.ts +++ b/crates/automerge-wasm/index.d.ts @@ -164,7 +164,7 @@ export class Automerge { keys(obj: ObjID, heads?: Heads): string[]; text(obj: ObjID, heads?: Heads): string; length(obj: ObjID, heads?: Heads): number; - materialize(obj?: ObjID, heads?: Heads): MaterializeValue; + materialize(obj?: ObjID, heads?: Heads, metadata?: unknown, freeze?: bool): MaterializeValue; // transactions commit(message?: string, time?: number): Hash; @@ -206,7 +206,7 @@ export class Automerge { dump(): void; // experimental api can go here - applyPatches(obj: Doc, meta?: unknown, callback?: (values: Value[]) => undefined): Doc; + applyPatches(obj: Doc, meta?: unknown, callback?: (patch: Patch, before: Doc, after: Doc) => void): Doc; } export interface JsSyncState { diff --git a/crates/automerge-wasm/package.json b/crates/automerge-wasm/package.json index 6a64278a..88225bad 100644 --- a/crates/automerge-wasm/package.json +++ b/crates/automerge-wasm/package.json @@ -8,7 +8,7 @@ "description": "wasm-bindgen bindings to the automerge rust implementation", "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-wasm", "repository": "github:automerge/automerge-rs", - "version": "0.1.10", + "version": "0.1.11", "license": "MIT", "files": [ "README.md", diff --git a/crates/automerge-wasm/src/interop.rs b/crates/automerge-wasm/src/interop.rs index f8d961ec..c2b8c6b7 100644 --- a/crates/automerge-wasm/src/interop.rs +++ b/crates/automerge-wasm/src/interop.rs @@ -370,20 +370,23 @@ impl Automerge { datatype: Datatype, heads: Option<&Vec>, meta: &JsValue, + freeze: bool, ) -> Result { let result = if datatype.is_sequence() { self.wrap_object( - self.export_list(obj, heads, meta)?, + self.export_list(obj, heads, meta, freeze)?, datatype, &obj.to_string().into(), meta, + freeze, )? } else { self.wrap_object( - self.export_map(obj, heads, meta)?, + self.export_map(obj, heads, meta, freeze)?, datatype, &obj.to_string().into(), meta, + freeze, )? }; Ok(result.into()) @@ -394,6 +397,7 @@ impl Automerge { obj: &ObjId, heads: Option<&Vec>, meta: &JsValue, + freeze: bool, ) -> Result { let keys = self.doc.keys(obj); let map = Object::new(); @@ -405,7 +409,7 @@ impl Automerge { }; if let Ok(Some((val, id))) = val_and_id { let subval = match val { - Value::Object(o) => self.export_object(&id, o.into(), heads, meta)?, + Value::Object(o) => self.export_object(&id, o.into(), heads, meta, freeze)?, Value::Scalar(_) => self.export_value(alloc(&val))?, }; Reflect::set(&map, &k.into(), &subval)?; @@ -420,6 +424,7 @@ impl Automerge { obj: &ObjId, heads: Option<&Vec>, meta: &JsValue, + freeze: bool, ) -> Result { let len = self.doc.length(obj); let array = Array::new(); @@ -431,7 +436,7 @@ impl Automerge { }; if let Ok(Some((val, id))) = val_and_id { let subval = match val { - Value::Object(o) => self.export_object(&id, o.into(), heads, meta)?, + Value::Object(o) => self.export_object(&id, o.into(), heads, meta, freeze)?, Value::Scalar(_) => self.export_value(alloc(&val))?, }; array.push(&subval); @@ -504,9 +509,10 @@ impl Automerge { (datatype, raw_value): (Datatype, JsValue), id: &ObjId, meta: &JsValue, + freeze: bool, ) -> Result { if let Ok(obj) = raw_value.clone().dyn_into::() { - let result = self.wrap_object(obj, datatype, &id.to_string().into(), meta)?; + let result = self.wrap_object(obj, datatype, &id.to_string().into(), meta, freeze)?; Ok(result.into()) } else { self.export_value((datatype, raw_value)) @@ -519,6 +525,7 @@ impl Automerge { datatype: Datatype, id: &JsValue, meta: &JsValue, + freeze: bool, ) -> Result { let value = if let Some(function) = self.external_types.get(&datatype) { let wrapped_value = function.call1(&JsValue::undefined(), &value)?; @@ -538,6 +545,9 @@ impl Automerge { } set_hidden_value(&value, &Symbol::for_(DATATYPE_SYMBOL), datatype)?; set_hidden_value(&value, &Symbol::for_(META_SYMBOL), meta)?; + if freeze { + Object::freeze(&value); + } Ok(value) } @@ -546,16 +556,19 @@ impl Automerge { array: &Object, patch: &Patch, meta: &JsValue, + freeze: bool, ) -> Result { let result = Array::from(array); // shallow copy match patch { Patch::PutSeq { index, value, .. } => { - let sub_val = self.maybe_wrap_object(alloc(&value.0), &value.1, meta)?; + let sub_val = self.maybe_wrap_object(alloc(&value.0), &value.1, meta, freeze)?; Reflect::set(&result, &(*index as f64).into(), &sub_val)?; Ok(result.into()) } - Patch::DeleteSeq { index, .. } => self.sub_splice(result, *index, 1, &[], meta), - Patch::Insert { index, values, .. } => self.sub_splice(result, *index, 0, values, meta), + Patch::DeleteSeq { index, .. } => self.sub_splice(result, *index, 1, &[], meta, freeze), + Patch::Insert { index, values, .. } => { + self.sub_splice(result, *index, 0, values, meta, freeze) + } Patch::Increment { prop, value, .. } => { if let Prop::Seq(index) = prop { let index = (*index as f64).into(); @@ -583,11 +596,12 @@ impl Automerge { map: &Object, patch: &Patch, meta: &JsValue, + freeze: bool, ) -> Result { let result = Object::assign(&Object::new(), map); // shallow copy match patch { Patch::PutMap { key, value, .. } => { - let sub_val = self.maybe_wrap_object(alloc(&value.0), &value.1, meta)?; + let sub_val = self.maybe_wrap_object(alloc(&value.0), &value.1, meta, freeze)?; Reflect::set(&result, &key.into(), &sub_val)?; Ok(result) } @@ -624,12 +638,13 @@ impl Automerge { patch: &Patch, depth: usize, meta: &JsValue, + freeze: bool, ) -> Result { let (inner, datatype, id) = self.unwrap_object(&obj)?; let prop = patch.path().get(depth).map(|p| prop_to_js(&p.1)); let result = if let Some(prop) = prop { if let Ok(sub_obj) = Reflect::get(&inner, &prop)?.dyn_into::() { - let new_value = self.apply_patch(sub_obj, patch, depth + 1, meta)?; + let new_value = self.apply_patch(sub_obj, patch, depth + 1, meta, freeze)?; let result = shallow_copy(&inner); Reflect::set(&result, &prop, &new_value)?; Ok(result) @@ -639,12 +654,12 @@ impl Automerge { return Ok(obj); } } else if Array::is_array(&inner) { - self.apply_patch_to_array(&inner, patch, meta) + self.apply_patch_to_array(&inner, patch, meta, freeze) } else { - self.apply_patch_to_map(&inner, patch, meta) + self.apply_patch_to_map(&inner, patch, meta, freeze) }?; - self.wrap_object(result, datatype, &id, meta) + self.wrap_object(result, datatype, &id, meta, freeze) } fn sub_splice( @@ -654,10 +669,11 @@ impl Automerge { num_del: usize, values: &[(Value<'_>, ObjId)], meta: &JsValue, + freeze: bool, ) -> Result { let args: Array = values .iter() - .map(|v| self.maybe_wrap_object(alloc(&v.0), &v.1, meta)) + .map(|v| self.maybe_wrap_object(alloc(&v.0), &v.1, meta, freeze)) .collect::>()?; args.unshift(&(num_del as u32).into()); args.unshift(&(index as u32).into()); diff --git a/crates/automerge-wasm/src/lib.rs b/crates/automerge-wasm/src/lib.rs index 827432ce..fdb721fa 100644 --- a/crates/automerge-wasm/src/lib.rs +++ b/crates/automerge-wasm/src/lib.rs @@ -464,22 +464,23 @@ impl Automerge { let mut object = object.dyn_into::()?; let patches = self.doc.observer().take_patches(); let callback = callback.dyn_into::().ok(); + let freeze = Object::is_frozen(&object); // even if there are no patches we may need to update the meta object // which requires that we update the object too if patches.is_empty() && !meta.is_undefined() { let (obj, datatype, id) = self.unwrap_object(&object)?; object = Object::assign(&Object::new(), &obj); - object = self.wrap_object(object, datatype, &id, &meta)?; + object = self.wrap_object(object, datatype, &id, &meta, freeze)?; } for p in patches { if let Some(c) = &callback { let before = object.clone(); - object = self.apply_patch(object, &p, 0, &meta)?; + object = self.apply_patch(object, &p, 0, &meta, freeze)?; c.call3(&JsValue::undefined(), &p.try_into()?, &before, &object)?; } else { - object = self.apply_patch(object, &p, 0, &meta)?; + object = self.apply_patch(object, &p, 0, &meta, freeze)?; } } @@ -637,7 +638,7 @@ impl Automerge { #[wasm_bindgen(js_name = toJS)] pub fn to_js(&self, meta: JsValue) -> Result { - self.export_object(&ROOT, Datatype::Map, None, &meta) + self.export_object(&ROOT, Datatype::Map, None, &meta, false) } pub fn materialize( @@ -645,15 +646,17 @@ impl Automerge { obj: JsValue, heads: Option, meta: JsValue, + freeze: JsValue, ) -> Result { let obj = self.import(obj).unwrap_or(ROOT); let heads = get_heads(heads); + let freeze = freeze.as_bool().unwrap_or(false); let obj_type = self .doc .object_type(&obj) .ok_or_else(|| to_js_err(format!("invalid obj {}", obj)))?; let _patches = self.doc.observer().take_patches(); // throw away patches - self.export_object(&obj, obj_type.into(), heads.as_ref(), &meta) + self.export_object(&obj, obj_type.into(), heads.as_ref(), &meta, freeze) } fn import(&self, id: JsValue) -> Result { diff --git a/wrappers/javascript/package.json b/wrappers/javascript/package.json index e830b100..7d850682 100644 --- a/wrappers/javascript/package.json +++ b/wrappers/javascript/package.json @@ -4,7 +4,7 @@ "Orion Henry ", "Martin Kleppmann" ], - "version": "2.0.0-alpha.5", + "version": "2.0.0-alpha.6", "description": "Javascript implementation of automerge, backed by @automerge/automerge-wasm", "homepage": "https://github.com/automerge/automerge-rs/tree/main/wrappers/javascript", "repository": "github:automerge/automerge-rs", @@ -57,7 +57,7 @@ "typescript": "^4.6.4" }, "dependencies": { - "@automerge/automerge-wasm": "0.1.10", + "@automerge/automerge-wasm": "0.1.11", "uuid": "^8.3" } } diff --git a/wrappers/javascript/src/index.ts b/wrappers/javascript/src/index.ts index 0c9041e5..4f73657a 100644 --- a/wrappers/javascript/src/index.ts +++ b/wrappers/javascript/src/index.ts @@ -102,21 +102,16 @@ export function init(_opts?: ActorId | InitOptions) : Doc{ let patchCallback = opts.patchCallback const handle = ApiHandler.create(opts.actor) handle.enablePatches(true) - //@ts-ignore handle.registerDatatype("counter", (n) => new Counter(n)) - //@ts-ignore handle.registerDatatype("text", (n) => new Text(n)) - //@ts-ignore - const doc = handle.materialize("/", undefined, { handle, heads: undefined, freeze, patchCallback }) - //@ts-ignore + const doc = handle.materialize("/", undefined, { handle, heads: undefined, freeze, patchCallback }, freeze) as Doc return doc } export function clone(doc: Doc) : Doc { const state = _state(doc) const handle = state.heads ? state.handle.forkAt(state.heads) : state.handle.fork() - //@ts-ignore - const clonedDoc : any = handle.materialize("/", undefined, { ... state, handle }) + const clonedDoc : any = handle.materialize("/", undefined, { ... state, handle }, state.freeze) return clonedDoc } @@ -142,10 +137,13 @@ export function change(doc: Doc, options: string | ChangeOptions | Chan } } +export function isAutomerge(doc: unknown): boolean { + return getObjectId(doc) === "_root" +} + function progressDocument(doc: Doc, heads: Heads, callback?: PatchCallback): Doc { let state = _state(doc) let nextState = { ... state, heads: undefined }; - // @ts-ignore let nextDoc = state.handle.applyPatches(doc, nextState, callback) state.heads = heads if (nextState.freeze) { Object.freeze(nextDoc) } @@ -215,15 +213,13 @@ export function emptyChange(doc: Doc, options: ChangeOptions) { export function load(data: Uint8Array, _opts?: ActorId | InitOptions) : Doc { const opts = importOpts(_opts) const actor = opts.actor + const freeze = !!opts.freeze const patchCallback = opts.patchCallback const handle = ApiHandler.load(data, actor) handle.enablePatches(true) - //@ts-ignore handle.registerDatatype("counter", (n) => new Counter(n)) - //@ts-ignore handle.registerDatatype("text", (n) => new Text(n)) - //@ts-ignore - const doc : any = handle.materialize("/", undefined, { handle, heads: undefined, patchCallback }) + const doc : any = handle.materialize("/", undefined, { handle, freeze, heads: undefined, patchCallback }, freeze) return doc } @@ -445,11 +441,9 @@ export function dump(doc: Doc) { state.handle.dump() } -// FIXME - return T? -export function toJS(doc: Doc) : MaterializeValue { +export function toJS(doc: Doc) : T { const state = _state(doc) - // @ts-ignore - return state.handle.materialize("_root", state.heads, state) + return state.handle.materialize("_root", state.heads, undefined, false) as T } diff --git a/wrappers/javascript/test/basic_test.ts b/wrappers/javascript/test/basic_test.ts index 18a6818b..e17fc45e 100644 --- a/wrappers/javascript/test/basic_test.ts +++ b/wrappers/javascript/test/basic_test.ts @@ -21,6 +21,50 @@ describe('Automerge', () => { assert.deepEqual(doc2, { hello: "world", big: "little", zip: "zop", app: "dap" }) }) + it('can detect an automerge doc with isAutomerge()', () => { + let doc1 = Automerge.from({ sub: { object: true } }) + assert(Automerge.isAutomerge(doc1)) + assert(!Automerge.isAutomerge(doc1.sub)) + assert(!Automerge.isAutomerge("String")) + assert(!Automerge.isAutomerge({ sub: { object: true }})) + assert(!Automerge.isAutomerge(undefined)) + }) + + it('it should recursively freeze the document if requested', () => { + let doc1 = Automerge.init({ freeze: true } ) + let doc2 = Automerge.init() + + assert(Object.isFrozen(doc1)) + assert(!Object.isFrozen(doc2)) + + // will also freeze sub objects + doc1 = Automerge.change(doc1, (doc) => doc.book = { title: "how to win friends" }) + doc2 = Automerge.merge(doc2,doc1) + assert(Object.isFrozen(doc1)) + assert(Object.isFrozen(doc1.book)) + assert(!Object.isFrozen(doc2)) + assert(!Object.isFrozen(doc2.book)) + + // works on from + let doc3 = Automerge.from({ sub: { obj: "inner" } }, { freeze: true }) + assert(Object.isFrozen(doc3)) + assert(Object.isFrozen(doc3.sub)) + + // works on load + let doc4 = Automerge.load(Automerge.save(doc3), { freeze: true }) + assert(Object.isFrozen(doc4)) + assert(Object.isFrozen(doc4.sub)) + + // follows clone + let doc5 = Automerge.clone(doc4) + assert(Object.isFrozen(doc5)) + assert(Object.isFrozen(doc5.sub)) + + // toJS does not freeze + let exported = Automerge.toJS(doc5) + assert(!Object.isFrozen(exported)) + }) + it('handle basic sets over many changes', () => { let doc1 = Automerge.init() let timestamp = new Date(); From 5ce3a556a9b6827db0f2b5effab323686a19c1cb Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Sat, 15 Oct 2022 19:57:34 -0500 Subject: [PATCH 602/730] weak_refs --- .github/workflows/ci.yaml | 12 ++-- README.md | 3 +- crates/automerge-wasm/.gitignore | 4 +- crates/automerge-wasm/README.md | 98 +++++++++++----------------- crates/automerge-wasm/package.json | 29 ++++---- crates/automerge-wasm/src/lib.rs | 12 ++-- crates/automerge-wasm/test/readme.ts | 36 ++-------- crates/automerge-wasm/test/test.ts | 65 ------------------ wrappers/javascript/src/index.ts | 10 ++- 9 files changed, 83 insertions(+), 186 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 4fc75fef..0140bd6b 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -73,8 +73,10 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - - name: Install wasm-pack - run: curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh + - name: Install wasm-bindgen-cli + run: cargo install wasm-bindgen-cli wasm-opt + - name: Install wasm32 target + run: rustup target add wasm32-unknown-unknown - name: run tests run: ./scripts/ci/wasm_tests @@ -82,8 +84,10 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - - name: Install wasm-pack - run: curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh + - name: Install wasm-bindgen-cli + run: cargo install wasm-bindgen-cli wasm-opt + - name: Install wasm32 target + run: rustup target add wasm32-unknown-unknown - name: run tests run: ./scripts/ci/js_tests diff --git a/README.md b/README.md index fcfe4da7..e369ec39 100644 --- a/README.md +++ b/README.md @@ -75,7 +75,8 @@ implementation via FFI in other languages in `./wrappers`. Because this is To build this codebase you will need: - `rust` -- `wasm-pack` +- `wasm-bindgen-cli` +- `wasm-opt` - `node` - `yarn` - `cmake` diff --git a/crates/automerge-wasm/.gitignore b/crates/automerge-wasm/.gitignore index a5ef445c..ab957e1c 100644 --- a/crates/automerge-wasm/.gitignore +++ b/crates/automerge-wasm/.gitignore @@ -1,5 +1,5 @@ /node_modules -/dev -/target +/bundler +/nodejs Cargo.lock yarn.lock diff --git a/crates/automerge-wasm/README.md b/crates/automerge-wasm/README.md index 2fb6a2f0..992aaa8f 100644 --- a/crates/automerge-wasm/README.md +++ b/crates/automerge-wasm/README.md @@ -18,34 +18,6 @@ An Object id uniquely identifies a Map, List or Text object within a document. Heads refers to a set of hashes that uniquely identifies a point in time in a document's history. Heads are useful for comparing documents state or retrieving past states from the document. -### Using the Library and Creating a Document - -This is a rust/wasm package and will work in a node or web environment. Node is able to load wasm synchronously but a web environment is not. The 'init' export of the package is a function that returns a promise that resolves once the wasm is loaded. - -This creates a document in node. The memory allocated is handled by wasm and isn't managed by the javascript garbage collector and thus needs to be manually freed. - -```javascript - import { create } from "automerge-wasm" - - let doc = create() - - doc.free() -``` - -While this will work in both node and in a web context - -```javascript - import { init, create } from "automerge-wasm" - - init().then(_ => { - let doc = create() - doc.free() - }) - -``` - -The examples below will assume a node context for brevity. - ### Automerge Scalar Types Automerge has many scalar types. Methods like `put()` and `insert()` take an optional data type parameter. Normally the type can be inferred but in some cases, such as telling the difference between int, uint and a counter, it cannot. @@ -53,7 +25,7 @@ Automerge has many scalar types. Methods like `put()` and `insert()` take an op These are puts without a data type ```javascript - import { create } from "automerge-wasm" + import { create } from "@automerge/automerge-wasm" let doc = create() doc.put("/", "prop1", 100) // int @@ -63,7 +35,6 @@ These are puts without a data type doc.put("/", "prop5", new Uint8Array([1,2,3])) doc.put("/", "prop6", true) doc.put("/", "prop7", null) - doc.free() ``` Put's with a data type and examples of all the supported data types. @@ -71,7 +42,7 @@ Put's with a data type and examples of all the supported data types. While int vs uint vs f64 matters little in javascript, Automerge is a cross platform library where these distinctions matter. ```javascript - import { create } from "automerge-wasm" + import { create } from "@automerge/automerge-wasm" let doc = create() doc.put("/", "prop1", 100, "int") @@ -84,7 +55,6 @@ While int vs uint vs f64 matters little in javascript, Automerge is a cross plat doc.put("/", "prop8", new Uint8Array([1,2,3]), "bytes") doc.put("/", "prop9", true, "boolean") doc.put("/", "prop10", null, "null") - doc.free() ``` ### Automerge Object Types @@ -92,7 +62,7 @@ While int vs uint vs f64 matters little in javascript, Automerge is a cross plat Automerge WASM supports 3 object types. Maps, lists, and text. Maps are key value stores where the values can be any scalar type or any object type. Lists are numerically indexed sets of data that can hold any scalar or any object type. ```javascript - import { create } from "automerge-wasm" + import { create } from "@automerge/automerge-wasm" let doc = create() @@ -111,14 +81,12 @@ Automerge WASM supports 3 object types. Maps, lists, and text. Maps are key va // text is initialized with a string let notes = doc.putObject("/", "notes", "Hello world!") - - doc.free() ``` You can access objects by passing the object id as the first parameter for a call. ```javascript - import { create } from "automerge-wasm" + import { create } from "@automerge/automerge-wasm" let doc = create() @@ -142,8 +110,6 @@ You can access objects by passing the object id as the first parameter for a cal // use a path instead doc.put("/config", "align", "right") - - doc.free() ``` Using the id directly is always faster (as it prevents the path to id conversion internally) so it is preferred for performance critical code. @@ -165,7 +131,6 @@ Maps are key/value stores. The root object is always a map. The keys are alway doc.keys(mymap) // returns ["bytes","foo","sub"] doc.materialize("_root") // returns { mymap: { bytes: new Uint8Array([1,2,3]), foo: "bar", sub: {}}} - doc.free() ``` ### Lists @@ -185,7 +150,6 @@ Lists are index addressable sets of values. These values can be any scalar or o doc.materialize(items) // returns [ "bat", [1,2], { hello : "world" }, true, "bag", "brick"] doc.length(items) // returns 6 - doc.free() ``` ### Text @@ -204,7 +168,6 @@ Text is a specialized list type intended for modifying a text document. The pri doc.text(notes) // returns "Hello \ufffceveryone" doc.getWithType(notes, 6) // returns ["map", obj] doc.get(obj, "hi") // returns "there" - doc.free() ``` ### Tables @@ -234,7 +197,6 @@ When querying maps use the `get()` method with the object in question and the pr doc1.get("_root","key3") // returns "doc2val" doc1.getAll("_root","key3") // returns [[ "str", "doc1val"], ["str", "doc2val"]] - doc1.free(); doc2.free() ``` ### Counters @@ -256,8 +218,6 @@ Counters are 64 bit ints that support the increment operation. Frequently diffe doc1.merge(doc2) doc1.materialize("_root") // returns { number: 10, total: 33 } - - doc1.free(); doc2.free() ``` ### Transactions @@ -285,8 +245,6 @@ Generally speaking you don't need to think about transactions when using Automer doc.get("_root", "key") // returns "val2" doc.pendingOps() // returns 0 - - doc.free() ``` ### Viewing Old Versions of the Document @@ -308,8 +266,6 @@ All query functions can take an optional argument of `heads` which allow you to doc.get("_root","key",heads2) // returns "val2" doc.get("_root","key",heads1) // returns "val1" doc.get("_root","key",[]) // returns undefined - - doc.free() ``` This works for `get()`, `getAll()`, `keys()`, `length()`, `text()`, and `materialize()` @@ -335,8 +291,6 @@ The `merge()` command applies all changes in the argument doc into the calling d doc1.materialize("_root") // returns { key1: "val1", key2: "val2", key3: "val3" } doc2.materialize("_root") // returns { key1: "val1", key3: "val3" } - - doc1.free(); doc2.free() ``` Note that calling `a.merge(a)` will produce an unrecoverable error from the wasm-bindgen layer which (as of this writing) there is no workaround for. @@ -350,7 +304,7 @@ If you wish to incrementally update a saved Automerge doc you can call `saveIncr The `load()` function takes a `Uint8Array()` of bytes produced in this way and constitutes a new document. The `loadIncremental()` method is available if you wish to consume the result of a `saveIncremental()` with an already instanciated document. ```javascript - import { create, load } from "automerge-wasm" + import { create, load } from "@automerge/automerge-wasm" let doc1 = create() @@ -382,14 +336,12 @@ The `load()` function takes a `Uint8Array()` of bytes produced in this way and c doc2.materialize("_root") // returns { key1: "value1", key2: "value2" } doc3.materialize("_root") // returns { key1: "value1", key2: "value2" } doc4.materialize("_root") // returns { key1: "value1", key2: "value2" } - - doc1.free(); doc2.free(); doc3.free(); doc4.free() ``` One interesting feature of automerge binary saves is that they can be concatenated together in any order and can still be loaded into a coherent merged document. ```javascript -import { load } from "automerge-wasm" +import { load } from "@automerge/automerge-wasm" import * as fs from "fs" let file1 = fs.readFileSync("automerge_save_1"); @@ -409,7 +361,7 @@ When syncing a document the `generateSyncMessage()` and `receiveSyncMessage()` m A very simple sync implementation might look like this. ```javascript - import { encodeSyncState, decodeSyncState, initSyncState } from "automerge-wasm" + import { encodeSyncState, decodeSyncState, initSyncState } from "@automerge/automerge-wasm" let states = {} @@ -457,7 +409,7 @@ Actors are ids that need to be unique to each process writing to a document. Th Methods that create new documents will generate random actors automatically - if you wish to supply your own it is always taken as an optional argument. This is true for the following functions. ```javascript - import { create, load } from "automerge-wasm" + import { create, load } from "@automerge/automerge-wasm" let doc1 = create() // random actorid let doc2 = create("aabbccdd") @@ -467,8 +419,6 @@ Methods that create new documents will generate random actors automatically - if let doc6 = load(doc4.save(), "00aabb11") let actor = doc1.getActor() - - doc1.free(); doc2.free(); doc3.free(); doc4.free(); doc5.free(); doc6.free() ``` ### Glossary: Object Id's @@ -491,7 +441,35 @@ Object Ids uniquely identify an object within a document. They are represented doc.put(o1v2, "x", "y") // modifying the new "o1" object assert.deepEqual(doc.materialize("_root"), { "o1": { x: "y" }, "o2": {} }) - - doc.free() ``` +### Appendix: Building + + The following steps should allow you to build the package + + ``` + $ rustup target add wasm32-unknown-unknown + $ cargo install wasm-bindgen-cli + $ cargo install wasm-opt + $ yarn + $ yarn release + $ yarn pack + ``` + +### Appendix: WASM and Memory Allocation + +Allocated memory in rust will be freed automatically on platforms that support `FinalizationRegistry`. + +This is currently supported in [all major browsers and nodejs](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/FinalizationRegistry). + +On unsupported platforms you can free memory explicitly. + +```javascript + import { create, initSyncState } from "@automerge/automerge-wasm" + + let doc = create() + let sync = initSyncState() + + doc.free() + sync.free() +``` diff --git a/crates/automerge-wasm/package.json b/crates/automerge-wasm/package.json index 6a64278a..7363bcde 100644 --- a/crates/automerge-wasm/package.json +++ b/crates/automerge-wasm/package.json @@ -15,23 +15,26 @@ "LICENSE", "package.json", "index.d.ts", - "nodejs/bindgen.js", - "nodejs/bindgen_bg.wasm", - "bundler/bindgen.js", - "bundler/bindgen_bg.js", - "bundler/bindgen_bg.wasm" + "nodejs/automerge_wasm.js", + "nodejs/automerge_wasm_bg.wasm", + "bundler/automerge_wasm.js", + "bundler/automerge_wasm_bg.js", + "bundler/automerge_wasm_bg.wasm" ], "private": false, "types": "index.d.ts", - "module": "./bundler/bindgen.js", - "main": "./nodejs/bindgen.js", + "module": "./bundler/automerge_wasm.js", + "main": "./nodejs/automerge_wasm.js", "scripts": { "lint": "eslint test/*.ts index.d.ts", - "debug": "cross-env PROFILE=dev yarn buildall", - "build": "cross-env PROFILE=dev FEATURES='' yarn buildall", - "release": "cross-env PROFILE=release yarn buildall", + "debug": "cross-env PROFILE=dev TARGET_DIR=debug yarn buildall", + "build": "cross-env PROFILE=dev TARGET_DIR=debug FEATURES='' yarn buildall", + "release": "cross-env PROFILE=release TARGET_DIR=release yarn buildall", "buildall": "cross-env TARGET=nodejs yarn target && cross-env TARGET=bundler yarn target", - "target": "rimraf ./$TARGET && wasm-pack build --target $TARGET --$PROFILE --out-name bindgen -d $TARGET -- $FEATURES", + "target": "rimraf ./$TARGET && yarn compile && yarn bindgen && yarn opt", + "compile": "cargo build --target wasm32-unknown-unknown --profile $PROFILE", + "bindgen": "wasm-bindgen --no-typescript --weak-refs --target $TARGET --out-dir $TARGET ../../target/wasm32-unknown-unknown/$TARGET_DIR/automerge_wasm.wasm", + "opt": "wasm-opt -Oz $TARGET/automerge_wasm_bg.wasm -o $TARGET/automerge_wasm_bg.wasm", "test": "ts-mocha -p tsconfig.json --type-check --bail --full-trace test/*.ts" }, "devDependencies": { @@ -52,7 +55,7 @@ "typescript": "^4.6.4" }, "exports": { - "browser": "./bundler/bindgen.js", - "require": "./nodejs/bindgen.js" + "browser": "./bundler/automerge_wasm.js", + "require": "./nodejs/automerge_wasm.js" } } diff --git a/crates/automerge-wasm/src/lib.rs b/crates/automerge-wasm/src/lib.rs index 827432ce..64e87ad5 100644 --- a/crates/automerge-wasm/src/lib.rs +++ b/crates/automerge-wasm/src/lib.rs @@ -121,8 +121,6 @@ impl Automerge { Ok(automerge) } - pub fn free(self) {} - #[wasm_bindgen(js_name = pendingOps)] pub fn pending_ops(&self) -> JsValue { (self.doc.pending_ops() as u32).into() @@ -826,8 +824,8 @@ pub fn import_sync_state(state: JsValue) -> Result { // this is needed to be compatible with the automerge-js api #[wasm_bindgen(js_name = exportSyncState)] -pub fn export_sync_state(state: SyncState) -> JsValue { - JS::from(state.0).into() +pub fn export_sync_state(state: &SyncState) -> JsValue { + JS::from(state.0.clone()).into() } #[wasm_bindgen(js_name = encodeSyncMessage)] @@ -865,9 +863,9 @@ pub fn decode_sync_message(msg: Uint8Array) -> Result { } #[wasm_bindgen(js_name = encodeSyncState)] -pub fn encode_sync_state(state: SyncState) -> Result { - let state = state.0; - Ok(Uint8Array::from(state.encode().as_slice())) +pub fn encode_sync_state(state: &SyncState) -> Result { + //let state = state.0.clone(); + Ok(Uint8Array::from(state.0.encode().as_slice())) } #[wasm_bindgen(js_name = decodeSyncState)] diff --git a/crates/automerge-wasm/test/readme.ts b/crates/automerge-wasm/test/readme.ts index e6e77731..5fbac867 100644 --- a/crates/automerge-wasm/test/readme.ts +++ b/crates/automerge-wasm/test/readme.ts @@ -1,18 +1,15 @@ /* eslint-disable @typescript-eslint/no-unused-vars */ import { describe, it } from 'mocha'; import * as assert from 'assert' -import { create, load } from '..' +import { create, load, initSyncState } from '..' describe('Automerge', () => { describe('Readme Examples', () => { - it('Using the Library and Creating a Document (1)', () => { + it('Using the Library and Creating a Document', () => { const doc = create() + const sync = initSyncState() doc.free() - }) - it('Using the Library and Creating a Document (2)', (done) => { - const doc = create() - doc.free() - done() + sync.free() }) it('Automerge Scalar Types (1)', () => { const doc = create() @@ -33,8 +30,6 @@ describe('Automerge', () => { prop6: true, prop7: null }) - - doc.free() }) it('Automerge Scalar Types (2)', () => { const doc = create() @@ -48,7 +43,6 @@ describe('Automerge', () => { doc.put("/", "prop8", new Uint8Array([1,2,3]), "bytes") doc.put("/", "prop9", true, "boolean") doc.put("/", "prop10", null, "null") - doc.free() }) it('Automerge Object Types (1)', () => { const doc = create() @@ -68,8 +62,6 @@ describe('Automerge', () => { // text is initialized with a string const notes = doc.putObject("/", "notes", "Hello world!") - - doc.free() }) it('Automerge Object Types (2)', () => { const doc = create() @@ -91,8 +83,6 @@ describe('Automerge', () => { assert.deepEqual(doc.materialize("/"), { config: { align: "right", archived: false, cycles: [ 10, 19, 21 ] } }) - - doc.free() }) it('Maps (1)', () => { const doc = create() @@ -107,8 +97,6 @@ describe('Automerge', () => { assert.deepEqual(doc.keys(mymap),["bytes","foo","sub"]) assert.deepEqual(doc.materialize("_root"), { mymap: { bytes: new Uint8Array([1,2,3]), foo: "bar", sub: {} }}) - - doc.free() }) it('Lists (1)', () => { const doc = create() @@ -123,8 +111,6 @@ describe('Automerge', () => { assert.deepEqual(doc.materialize(items),[ "bat", [ 1 ,2 ], { hello : "world" }, true, "bag", "brick" ]) assert.deepEqual(doc.length(items),6) - - doc.free() }) it('Text (1)', () => { const doc = create("aaaaaa") @@ -138,8 +124,6 @@ describe('Automerge', () => { assert.deepEqual(doc.text(notes), "Hello \ufffceveryone") assert.deepEqual(doc.get(notes, 6), obj) assert.deepEqual(doc.get(obj, "hi"), "there") - - doc.free() }) it('Querying Data (1)', () => { const doc1 = create("aabbcc") @@ -160,8 +144,6 @@ describe('Automerge', () => { assert.deepEqual(doc1.get("_root","key3"), "doc2val") assert.deepEqual(doc1.getAll("_root","key3"),[[ "str", "doc1val", "3@aabbcc"], ["str", "doc2val", "3@ffaaff"]]) - - doc1.free(); doc2.free() }) it('Counters (1)', () => { const doc1 = create("aaaaaa") @@ -178,8 +160,6 @@ describe('Automerge', () => { doc1.merge(doc2) assert.deepEqual(doc1.materialize("_root"), { number: 10, total: 33 }) - - doc1.free(); doc2.free() }) it('Transactions (1)', () => { const doc = create() @@ -202,8 +182,6 @@ describe('Automerge', () => { assert.deepEqual(doc.get("_root", "key"),"val2") assert.deepEqual(doc.pendingOps(),0) - - doc.free() }) it('Viewing Old Versions of the Document (1)', () => { const doc = create() @@ -220,8 +198,6 @@ describe('Automerge', () => { assert.deepEqual(doc.get("_root","key",heads2), "val2") assert.deepEqual(doc.get("_root","key",heads1), "val1") assert.deepEqual(doc.get("_root","key",[]), undefined) - - doc.free() }) it('Forking And Merging (1)', () => { const doc1 = create() @@ -236,8 +212,6 @@ describe('Automerge', () => { assert.deepEqual(doc1.materialize("_root"), { key1: "val1", key2: "val2", key3: "val3" }) assert.deepEqual(doc2.materialize("_root"), { key1: "val1", key3: "val3" }) - - doc1.free(); doc2.free() }) it('Saving And Loading (1)', () => { const doc1 = create() @@ -270,8 +244,6 @@ describe('Automerge', () => { assert.deepEqual(doc2.materialize("_root"), { key1: "value1", key2: "value2" }) assert.deepEqual(doc3.materialize("_root"), { key1: "value1", key2: "value2" }) assert.deepEqual(doc4.materialize("_root"), { key1: "value1", key2: "value2" }) - - doc1.free(); doc2.free(); doc3.free(); doc4.free() }) //it.skip('Syncing (1)', () => { }) }) diff --git a/crates/automerge-wasm/test/test.ts b/crates/automerge-wasm/test/test.ts index 43feaf2d..7bcde9cb 100644 --- a/crates/automerge-wasm/test/test.ts +++ b/crates/automerge-wasm/test/test.ts @@ -31,14 +31,12 @@ describe('Automerge', () => { it('should create, clone and free', () => { const doc1 = create() const doc2 = doc1.clone() - doc1.free() doc2.free() }) it('should be able to start and commit', () => { const doc = create() doc.commit() - doc.free() }) it('getting a nonexistent prop does not throw an error', () => { @@ -46,7 +44,6 @@ describe('Automerge', () => { const root = "_root" const result = doc.getWithType(root, "hello") assert.deepEqual(result, undefined) - doc.free() }) it('should be able to set and get a simple value', () => { @@ -105,8 +102,6 @@ describe('Automerge', () => { result = doc.getWithType(root, "null") assert.deepEqual(result, ["null", null]); - - doc.free() }) it('should be able to use bytes', () => { @@ -117,7 +112,6 @@ describe('Automerge', () => { assert.deepEqual(value1, ["bytes", new Uint8Array([10, 11, 12])]); const value2 = doc.getWithType("_root", "data2") assert.deepEqual(value2, ["bytes", new Uint8Array([13, 14, 15])]); - doc.free() }) it('should be able to make subobjects', () => { @@ -134,7 +128,6 @@ describe('Automerge', () => { result = doc.getWithType(submap, "number") assert.deepEqual(result, ["uint", 6]) - doc.free() }) it('should be able to make lists', () => { @@ -157,7 +150,6 @@ describe('Automerge', () => { assert.deepEqual(doc.getWithType(sublist, 2), ["str", "b v2"]) assert.deepEqual(doc.length(sublist), 4) - doc.free() }) it('lists have insert, set, splice, and push ops', () => { @@ -180,8 +172,6 @@ describe('Automerge', () => { assert.deepEqual(doc.materialize(sublist), ["z", "d", "e", "f", "c", new Date(3)]) assert.deepEqual(doc.length(sublist), 6) assert.deepEqual(doc.materialize("/", heads), { letters: ["b", "a", "c"] }) - - doc.free() }) it('should be able delete non-existent props', () => { @@ -200,7 +190,6 @@ describe('Automerge', () => { assert.deepEqual(doc.keys("_root"), ["bip"]) assert.deepEqual(doc.keys("_root", [hash1]), ["bip", "foo"]) assert.deepEqual(doc.keys("_root", [hash2]), ["bip"]) - doc.free() }) it('should be able to del', () => { @@ -211,7 +200,6 @@ describe('Automerge', () => { assert.deepEqual(doc.getWithType(root, "xxx"), ["str", "xxx"]) doc.delete(root, "xxx"); assert.deepEqual(doc.getWithType(root, "xxx"), undefined) - doc.free() }) it('should be able to use counters', () => { @@ -224,7 +212,6 @@ describe('Automerge', () => { assert.deepEqual(doc.getWithType(root, "counter"), ["counter", 20]) doc.increment(root, "counter", -5); assert.deepEqual(doc.getWithType(root, "counter"), ["counter", 15]) - doc.free() }) it('should be able to splice text', () => { @@ -241,7 +228,6 @@ describe('Automerge', () => { assert.deepEqual(doc.getWithType(text, 10), ["str", "d"]) assert.deepEqual(doc.getWithType(text, 11), ["str", "!"]) assert.deepEqual(doc.getWithType(text, 12), ["str", "?"]) - doc.free() }) it('should be able to insert objects into text', () => { @@ -283,10 +269,6 @@ describe('Automerge', () => { assert.deepEqual(docA.keys("_root"), docB.keys("_root")); assert.deepEqual(docA.save(), docB.save()); assert.deepEqual(docA.save(), docC.save()); - doc.free() - docA.free() - docB.free() - docC.free() }) it('should be able to splice text', () => { @@ -302,7 +284,6 @@ describe('Automerge', () => { assert.strictEqual(doc.length(text, [hash1]), 11) assert.strictEqual(doc.text(text, [hash2]), "hello big bad world") assert.strictEqual(doc.length(text, [hash2]), 19) - doc.free() }) it('local inc increments all visible counters in a map', () => { @@ -332,10 +313,6 @@ describe('Automerge', () => { const save1 = doc1.save() const doc4 = load(save1) assert.deepEqual(doc4.save(), save1); - doc1.free() - doc2.free() - doc3.free() - doc4.free() }) it('local inc increments all visible counters in a sequence', () => { @@ -366,10 +343,6 @@ describe('Automerge', () => { const save = doc1.save() const doc4 = load(save) assert.deepEqual(doc4.save(), save); - doc1.free() - doc2.free() - doc3.free() - doc4.free() }) it('paths can be used instead of objids', () => { @@ -411,7 +384,6 @@ describe('Automerge', () => { assert.deepEqual(doc.materialize(l2), { zip: ["a", "b"] }) assert.deepEqual(doc.materialize(l1), [{ zip: ["a", "b"] }, { foo: "bar" }, [1, 2, 3]]) assert.deepEqual(doc.materialize(l4), new String("hello world")) - doc.free() }) it('only returns an object id when objects are created', () => { @@ -434,7 +406,6 @@ describe('Automerge', () => { assert.deepEqual(r7, "7@aaaa"); assert.deepEqual(r8, null); //assert.deepEqual(r9,["12@aaaa","13@aaaa"]); - doc.free() }) it('objects without properties are preserved', () => { @@ -452,8 +423,6 @@ describe('Automerge', () => { assert.deepEqual(doc2.getWithType("_root", "c"), ["map", c]) assert.deepEqual(doc2.keys(c), ["d"]) assert.deepEqual(doc2.getWithType(c, "d"), ["str", "dd"]) - doc1.free() - doc2.free() }) it('should allow you to forkAt a heads', () => { @@ -505,8 +474,6 @@ describe('Automerge', () => { assert.deepEqual(doc2.popPatches(), [ { action: 'put', path: ['hello'], value: 'world', conflict: false } ]) - doc1.free() - doc2.free() }) it('should include nested object creation', () => { @@ -519,8 +486,6 @@ describe('Automerge', () => { { action: 'put', path: [ 'birds', 'friday' ], value: {}, conflict: false }, { action: 'put', path: [ 'birds', 'friday', 'robins' ], value: 3, conflict: false}, ]) - doc1.free() - doc2.free() }) it('should delete map keys', () => { @@ -534,8 +499,6 @@ describe('Automerge', () => { { action: 'put', path: [ 'favouriteBird' ], value: 'Robin', conflict: false }, { action: 'del', path: [ 'favouriteBird' ] } ]) - doc1.free() - doc2.free() }) it('should include list element insertion', () => { @@ -547,8 +510,6 @@ describe('Automerge', () => { { action: 'put', path: [ 'birds' ], value: [], conflict: false }, { action: 'splice', path: [ 'birds', 0 ], values: ['Goldfinch', 'Chaffinch'] }, ]) - doc1.free() - doc2.free() }) it('should insert nested maps into a list', () => { @@ -563,8 +524,6 @@ describe('Automerge', () => { { action: 'put', path: [ 'birds', 0, 'species' ], value: 'Goldfinch', conflict: false }, { action: 'put', path: [ 'birds', 0, 'count', ], value: 3, conflict: false } ]) - doc1.free() - doc2.free() }) it('should calculate list indexes based on visible elements', () => { @@ -581,8 +540,6 @@ describe('Automerge', () => { { action: 'del', path: ['birds', 0] }, { action: 'splice', path: ['birds', 1], values: ['Greenfinch'] } ]) - doc1.free() - doc2.free() }) it('should handle concurrent insertions at the head of a list', () => { @@ -610,7 +567,6 @@ describe('Automerge', () => { assert.deepEqual(doc4.popPatches(), [ { action: 'splice', path: ['values',0], values:['a','b','c','d'] }, ]) - doc1.free(); doc2.free(); doc3.free(); doc4.free() }) it('should handle concurrent insertions beyond the head', () => { @@ -638,7 +594,6 @@ describe('Automerge', () => { assert.deepEqual(doc4.popPatches(), [ { action: 'splice', path: ['values', 2], values: ['c','d','e','f'] }, ]) - doc1.free(); doc2.free(); doc3.free(); doc4.free() }) it('should handle conflicts on root object keys', () => { @@ -662,7 +617,6 @@ describe('Automerge', () => { { action: 'put', path: ['bird'], value: 'Goldfinch', conflict: false }, { action: 'put', path: ['bird'], value: 'Goldfinch', conflict: true }, ]) - doc1.free(); doc2.free(); doc3.free(); doc4.free() }) it('should handle three-way conflicts', () => { @@ -701,7 +655,6 @@ describe('Automerge', () => { { action: 'put', path: ['bird'], value: 'Goldfinch', conflict: true }, { action: 'put', path: ['bird'], value: 'Goldfinch', conflict: true } ]) - doc1.free(); doc2.free(); doc3.free() }) it('should allow a conflict to be resolved', () => { @@ -720,7 +673,6 @@ describe('Automerge', () => { { action: 'put', path: ['bird'], value: 'Chaffinch', conflict: true }, { action: 'put', path: ['bird'], value: 'Goldfinch', conflict: false } ]) - doc1.free(); doc2.free(); doc3.free() }) it('should handle a concurrent map key overwrite and delete', () => { @@ -744,7 +696,6 @@ describe('Automerge', () => { assert.deepEqual(doc2.popPatches(), [ { action: 'put', path: ['bird'], value: 'Goldfinch', conflict: false } ]) - doc1.free(); doc2.free() }) it('should handle a conflict on a list element', () => { @@ -773,7 +724,6 @@ describe('Automerge', () => { { action: 'put', path: ['birds',0], value: 'Redwing', conflict: false }, { action: 'put', path: ['birds',0], value: 'Redwing', conflict: true } ]) - doc1.free(); doc2.free(); doc3.free(); doc4.free() }) it('should handle a concurrent list element overwrite and delete', () => { @@ -808,7 +758,6 @@ describe('Automerge', () => { { action: 'put', path: ['birds',0], value: 'Ring-necked parakeet', conflict: false }, { action: 'put', path: ['birds',2], value: 'Redwing', conflict: true } ]) - doc1.free(); doc2.free(); doc3.free(); doc4.free() }) it('should handle deletion of a conflict value', () => { @@ -832,7 +781,6 @@ describe('Automerge', () => { assert.deepEqual(doc3.popPatches(), [ { action: 'put', path: ['bird'], value: 'Robin', conflict: false } ]) - doc1.free(); doc2.free(); doc3.free() }) it('should handle conflicting nested objects', () => { @@ -854,7 +802,6 @@ describe('Automerge', () => { { action: 'put', path: ['birds'], value: {}, conflict: true }, { action: 'splice', path: ['birds',0], values: ['Parakeet'] } ]) - doc1.free(); doc2.free() }) it('should support date objects', () => { @@ -866,7 +813,6 @@ describe('Automerge', () => { assert.deepEqual(doc2.popPatches(), [ { action: 'put', path: ['createdAt'], value: now, conflict: false } ]) - doc1.free(); doc2.free() }) it('should capture local put ops', () => { @@ -885,7 +831,6 @@ describe('Automerge', () => { { action: 'put', path: ['map'], value: {}, conflict: false }, { action: 'put', path: ['list'], value: [], conflict: false }, ]) - doc1.free() }) it('should capture local insert ops', () => { @@ -906,7 +851,6 @@ describe('Automerge', () => { { action: 'splice', path: ['list', 2], values: [{}] }, { action: 'splice', path: ['list', 2], values: [[]] }, ]) - doc1.free() }) it('should capture local push ops', () => { @@ -921,7 +865,6 @@ describe('Automerge', () => { { action: 'put', path: ['list'], value: [], conflict: false }, { action: 'splice', path: ['list',0], values: [1,{},[]] }, ]) - doc1.free() }) it('should capture local splice ops', () => { @@ -937,7 +880,6 @@ describe('Automerge', () => { { action: 'del', path: ['list',1] }, { action: 'del', path: ['list',1] }, ]) - doc1.free() }) it('should capture local increment ops', () => { @@ -950,7 +892,6 @@ describe('Automerge', () => { { action: 'put', path: ['counter'], value: 2, conflict: false }, { action: 'inc', path: ['counter'], value: 4 }, ]) - doc1.free() }) @@ -967,7 +908,6 @@ describe('Automerge', () => { { action: 'del', path: ['key1'], }, { action: 'del', path: ['key2'], }, ]) - doc1.free() }) it('should support counters in a map', () => { @@ -982,7 +922,6 @@ describe('Automerge', () => { { action: 'put', path: ['starlings'], value: 2, conflict: false }, { action: 'inc', path: ['starlings'], value: 1 } ]) - doc1.free(); doc2.free() }) it('should support counters in a list', () => { @@ -1003,7 +942,6 @@ describe('Automerge', () => { { action: 'inc', path: ['list',0], value: 2 }, { action: 'inc', path: ['list',0], value: -5 }, ]) - doc1.free(); doc2.free() }) it('should delete a counter from a map') // TODO @@ -1554,7 +1492,6 @@ describe('Automerge', () => { const n2up = n2.clone('89abcdef'); n2up.put("_root", "x", `${i} @ n2`); n2up.commit("", 0) if (new BloomFilter(n1up.getHeads()).containsHash(n2up.getHeads()[0])) { - n1.free(); n2.free() n1 = n1up; n2 = n2up; break } } @@ -1603,7 +1540,6 @@ describe('Automerge', () => { n1hash2 = n1us2.getHeads()[0]; n2hash2 = n2us2.getHeads()[0] if (new BloomFilter([n1hash1, n1hash2]).containsHash(n2hash1)) { - n1.free(); n2.free() n1 = n1us2; n2 = n2us2; break } } @@ -1696,7 +1632,6 @@ describe('Automerge', () => { n1hash3 = n1us3.getHeads()[0]; n2hash3 = n2us3.getHeads()[0] if (new BloomFilter([n1hash1, n1hash2, n1hash3]).containsHash(n2hash2)) { - n1.free(); n2.free(); n1 = n1us3; n2 = n2us3; break } } diff --git a/wrappers/javascript/src/index.ts b/wrappers/javascript/src/index.ts index 0c9041e5..f2ebea2c 100644 --- a/wrappers/javascript/src/index.ts +++ b/wrappers/javascript/src/index.ts @@ -379,11 +379,17 @@ export function equals(val1: unknown, val2: unknown) : boolean { } export function encodeSyncState(state: SyncState) : Uint8Array { - return ApiHandler.encodeSyncState(ApiHandler.importSyncState(state)) + const sync = ApiHandler.importSyncState(state) + const result = ApiHandler.encodeSyncState(sync) + sync.free() + return result } export function decodeSyncState(state: Uint8Array) : SyncState { - return ApiHandler.exportSyncState(ApiHandler.decodeSyncState(state)) + let sync = ApiHandler.decodeSyncState(state) + let result = ApiHandler.exportSyncState(sync) + sync.free() + return result } export function generateSyncMessage(doc: Doc, inState: SyncState) : [ SyncState, SyncMessage | null ] { From dd3c6d13039489f197ae72440b949f73dab2e9d5 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Thu, 13 Oct 2022 22:00:16 +0100 Subject: [PATCH 603/730] Move rust workspace into ./rust After some discussion with PVH I realise that the repo structure in the last reorg was very rust-centric. In an attempt to put each language on a level footing move the rust code and project files into ./rust --- crates/.gitignore | 1 - rust/.gitignore | 6 ++++++ Cargo.toml => rust/Cargo.toml | 10 +++++----- {crates => rust}/automerge-c/.gitignore | 0 {crates => rust}/automerge-c/CMakeLists.txt | 0 {crates => rust}/automerge-c/Cargo.toml | 0 {crates => rust}/automerge-c/README.md | 0 {crates => rust}/automerge-c/build.rs | 0 {crates => rust}/automerge-c/cbindgen.toml | 0 .../automerge-c/cmake/automerge-c-config.cmake.in | 0 {crates => rust}/automerge-c/cmake/config.h.in | 0 .../automerge-c/cmake/file_regex_replace.cmake | 0 {crates => rust}/automerge-c/cmake/file_touch.cmake | 0 .../automerge-c/examples/CMakeLists.txt | 0 {crates => rust}/automerge-c/examples/README.md | 0 {crates => rust}/automerge-c/examples/quickstart.c | 0 {crates => rust}/automerge-c/img/brandmark.png | Bin {crates => rust}/automerge-c/src/CMakeLists.txt | 0 {crates => rust}/automerge-c/src/actor_id.rs | 0 {crates => rust}/automerge-c/src/byte_span.rs | 0 {crates => rust}/automerge-c/src/change.rs | 0 {crates => rust}/automerge-c/src/change_hashes.rs | 0 {crates => rust}/automerge-c/src/changes.rs | 0 {crates => rust}/automerge-c/src/doc.rs | 0 {crates => rust}/automerge-c/src/doc/list.rs | 0 {crates => rust}/automerge-c/src/doc/list/item.rs | 0 {crates => rust}/automerge-c/src/doc/list/items.rs | 0 {crates => rust}/automerge-c/src/doc/map.rs | 0 {crates => rust}/automerge-c/src/doc/map/item.rs | 0 {crates => rust}/automerge-c/src/doc/map/items.rs | 0 {crates => rust}/automerge-c/src/doc/utils.rs | 0 {crates => rust}/automerge-c/src/lib.rs | 0 {crates => rust}/automerge-c/src/obj.rs | 0 {crates => rust}/automerge-c/src/obj/item.rs | 0 {crates => rust}/automerge-c/src/obj/items.rs | 0 {crates => rust}/automerge-c/src/result.rs | 0 {crates => rust}/automerge-c/src/result_stack.rs | 0 {crates => rust}/automerge-c/src/strs.rs | 0 {crates => rust}/automerge-c/src/sync.rs | 0 {crates => rust}/automerge-c/src/sync/have.rs | 0 {crates => rust}/automerge-c/src/sync/haves.rs | 0 {crates => rust}/automerge-c/src/sync/message.rs | 0 {crates => rust}/automerge-c/src/sync/state.rs | 0 {crates => rust}/automerge-c/test/CMakeLists.txt | 0 {crates => rust}/automerge-c/test/actor_id_tests.c | 0 {crates => rust}/automerge-c/test/doc_tests.c | 0 {crates => rust}/automerge-c/test/group_state.c | 0 {crates => rust}/automerge-c/test/group_state.h | 0 {crates => rust}/automerge-c/test/list_tests.c | 0 {crates => rust}/automerge-c/test/macro_utils.c | 0 {crates => rust}/automerge-c/test/macro_utils.h | 0 {crates => rust}/automerge-c/test/main.c | 0 {crates => rust}/automerge-c/test/map_tests.c | 0 .../automerge-c/test/ported_wasm/basic_tests.c | 0 .../automerge-c/test/ported_wasm/suite.c | 0 .../automerge-c/test/ported_wasm/sync_tests.c | 0 {crates => rust}/automerge-c/test/stack_utils.c | 0 {crates => rust}/automerge-c/test/stack_utils.h | 0 {crates => rust}/automerge-c/test/str_utils.c | 0 {crates => rust}/automerge-c/test/str_utils.h | 0 {crates => rust}/automerge-cli/.gitignore | 0 {crates => rust}/automerge-cli/Cargo.lock | 0 {crates => rust}/automerge-cli/Cargo.toml | 0 {crates => rust}/automerge-cli/IDEAS.md | 0 {crates => rust}/automerge-cli/src/change.rs | 0 {crates => rust}/automerge-cli/src/examine.rs | 0 {crates => rust}/automerge-cli/src/export.rs | 0 {crates => rust}/automerge-cli/src/import.rs | 0 {crates => rust}/automerge-cli/src/main.rs | 0 {crates => rust}/automerge-cli/src/merge.rs | 0 {crates => rust}/automerge-cli/tests/integration.rs | 0 {crates => rust}/automerge-wasm/.eslintignore | 0 {crates => rust}/automerge-wasm/.eslintrc.cjs | 0 {crates => rust}/automerge-wasm/.gitignore | 0 {crates => rust}/automerge-wasm/Cargo.toml | 0 {crates => rust}/automerge-wasm/LICENSE | 0 {crates => rust}/automerge-wasm/README.md | 0 .../automerge-wasm/examples/cra/.gitignore | 0 .../automerge-wasm/examples/cra/README.md | 0 .../automerge-wasm/examples/cra/package.json | 0 .../automerge-wasm/examples/cra/public/favicon.ico | Bin .../automerge-wasm/examples/cra/public/index.html | 0 .../automerge-wasm/examples/cra/public/logo192.png | Bin .../automerge-wasm/examples/cra/public/logo512.png | Bin .../examples/cra/public/manifest.json | 0 .../automerge-wasm/examples/cra/public/robots.txt | 0 .../automerge-wasm/examples/cra/src/App.css | 0 .../automerge-wasm/examples/cra/src/App.test.tsx | 0 .../automerge-wasm/examples/cra/src/App.tsx | 0 .../automerge-wasm/examples/cra/src/index.css | 0 .../automerge-wasm/examples/cra/src/index.tsx | 0 .../automerge-wasm/examples/cra/src/logo.svg | 0 .../examples/cra/src/react-app-env.d.ts | 0 .../examples/cra/src/reportWebVitals.ts | 0 .../automerge-wasm/examples/cra/src/setupTests.ts | 0 .../automerge-wasm/examples/cra/tsconfig.json | 0 .../automerge-wasm/examples/webpack/.gitignore | 0 .../automerge-wasm/examples/webpack/package.json | 0 .../examples/webpack/public/index.html | 0 .../automerge-wasm/examples/webpack/src/index.js | 0 .../examples/webpack/webpack.config.js | 0 {crates => rust}/automerge-wasm/index.d.ts | 0 {crates => rust}/automerge-wasm/package.json | 0 {crates => rust}/automerge-wasm/src/interop.rs | 0 {crates => rust}/automerge-wasm/src/lib.rs | 0 {crates => rust}/automerge-wasm/src/observer.rs | 0 {crates => rust}/automerge-wasm/src/sync.rs | 0 {crates => rust}/automerge-wasm/src/value.rs | 0 {crates => rust}/automerge-wasm/test/apply.ts | 0 .../automerge-wasm/test/helpers/columnar.js | 0 .../automerge-wasm/test/helpers/common.js | 0 .../automerge-wasm/test/helpers/encoding.js | 0 .../automerge-wasm/test/helpers/sync.js | 0 {crates => rust}/automerge-wasm/test/readme.ts | 0 {crates => rust}/automerge-wasm/test/test.ts | 0 {crates => rust}/automerge-wasm/tsconfig.json | 0 {crates => rust}/automerge/.gitignore | 0 {crates => rust}/automerge/Cargo.toml | 0 {crates => rust}/automerge/benches/map.rs | 0 {crates => rust}/automerge/benches/range.rs | 0 {crates => rust}/automerge/benches/sync.rs | 0 {crates => rust}/automerge/examples/README.md | 0 {crates => rust}/automerge/examples/quickstart.rs | 0 {crates => rust}/automerge/examples/watch.rs | 0 {crates => rust}/automerge/src/autocommit.rs | 0 {crates => rust}/automerge/src/automerge.rs | 0 {crates => rust}/automerge/src/automerge/tests.rs | 0 {crates => rust}/automerge/src/autoserde.rs | 0 {crates => rust}/automerge/src/change.rs | 0 {crates => rust}/automerge/src/clock.rs | 0 {crates => rust}/automerge/src/clocks.rs | 0 {crates => rust}/automerge/src/columnar.rs | 0 .../automerge/src/columnar/column_range.rs | 0 .../automerge/src/columnar/column_range/boolean.rs | 0 .../automerge/src/columnar/column_range/delta.rs | 0 .../automerge/src/columnar/column_range/deps.rs | 0 .../automerge/src/columnar/column_range/generic.rs | 0 .../src/columnar/column_range/generic/group.rs | 0 .../src/columnar/column_range/generic/simple.rs | 0 .../automerge/src/columnar/column_range/key.rs | 0 .../automerge/src/columnar/column_range/obj_id.rs | 0 .../automerge/src/columnar/column_range/opid.rs | 0 .../src/columnar/column_range/opid_list.rs | 0 .../automerge/src/columnar/column_range/raw.rs | 0 .../automerge/src/columnar/column_range/rle.rs | 0 .../automerge/src/columnar/column_range/value.rs | 0 {crates => rust}/automerge/src/columnar/encoding.rs | 0 .../automerge/src/columnar/encoding/boolean.rs | 0 .../automerge/src/columnar/encoding/col_error.rs | 0 .../src/columnar/encoding/column_decoder.rs | 0 .../src/columnar/encoding/decodable_impls.rs | 0 .../automerge/src/columnar/encoding/delta.rs | 0 .../src/columnar/encoding/encodable_impls.rs | 0 .../automerge/src/columnar/encoding/leb128.rs | 0 .../automerge/src/columnar/encoding/properties.rs | 0 .../automerge/src/columnar/encoding/raw.rs | 0 .../automerge/src/columnar/encoding/rle.rs | 0 .../automerge/src/columnar/splice_error.rs | 0 {crates => rust}/automerge/src/convert.rs | 0 {crates => rust}/automerge/src/decoding.rs | 0 {crates => rust}/automerge/src/error.rs | 0 {crates => rust}/automerge/src/exid.rs | 0 {crates => rust}/automerge/src/indexed_cache.rs | 0 {crates => rust}/automerge/src/keys.rs | 0 {crates => rust}/automerge/src/keys_at.rs | 0 {crates => rust}/automerge/src/legacy/mod.rs | 0 .../automerge/src/legacy/serde_impls/actor_id.rs | 0 .../automerge/src/legacy/serde_impls/change_hash.rs | 0 .../automerge/src/legacy/serde_impls/element_id.rs | 0 .../automerge/src/legacy/serde_impls/mod.rs | 0 .../automerge/src/legacy/serde_impls/object_id.rs | 0 .../automerge/src/legacy/serde_impls/op.rs | 0 .../automerge/src/legacy/serde_impls/op_type.rs | 0 .../automerge/src/legacy/serde_impls/opid.rs | 0 .../src/legacy/serde_impls/scalar_value.rs | 0 .../src/legacy/utility_impls/element_id.rs | 0 .../automerge/src/legacy/utility_impls/key.rs | 0 .../automerge/src/legacy/utility_impls/mod.rs | 0 .../automerge/src/legacy/utility_impls/object_id.rs | 0 .../automerge/src/legacy/utility_impls/opid.rs | 0 {crates => rust}/automerge/src/lib.rs | 0 {crates => rust}/automerge/src/list_range.rs | 0 {crates => rust}/automerge/src/list_range_at.rs | 0 {crates => rust}/automerge/src/map_range.rs | 0 {crates => rust}/automerge/src/map_range_at.rs | 0 {crates => rust}/automerge/src/op_observer.rs | 0 {crates => rust}/automerge/src/op_set.rs | 0 {crates => rust}/automerge/src/op_set/load.rs | 0 {crates => rust}/automerge/src/op_tree.rs | 0 {crates => rust}/automerge/src/op_tree/iter.rs | 0 {crates => rust}/automerge/src/parents.rs | 0 {crates => rust}/automerge/src/query.rs | 0 {crates => rust}/automerge/src/query/elem_id_pos.rs | 0 {crates => rust}/automerge/src/query/insert.rs | 0 {crates => rust}/automerge/src/query/keys.rs | 0 {crates => rust}/automerge/src/query/keys_at.rs | 0 {crates => rust}/automerge/src/query/len.rs | 0 {crates => rust}/automerge/src/query/len_at.rs | 0 {crates => rust}/automerge/src/query/list_range.rs | 0 .../automerge/src/query/list_range_at.rs | 0 {crates => rust}/automerge/src/query/list_vals.rs | 0 .../automerge/src/query/list_vals_at.rs | 0 {crates => rust}/automerge/src/query/map_range.rs | 0 .../automerge/src/query/map_range_at.rs | 0 {crates => rust}/automerge/src/query/nth.rs | 0 {crates => rust}/automerge/src/query/nth_at.rs | 0 {crates => rust}/automerge/src/query/opid.rs | 0 {crates => rust}/automerge/src/query/prop.rs | 0 {crates => rust}/automerge/src/query/prop_at.rs | 0 {crates => rust}/automerge/src/query/seek_op.rs | 0 .../automerge/src/query/seek_op_with_patch.rs | 0 {crates => rust}/automerge/src/sequence_tree.rs | 0 {crates => rust}/automerge/src/storage.rs | 0 {crates => rust}/automerge/src/storage/change.rs | 0 .../automerge/src/storage/change/change_actors.rs | 0 .../src/storage/change/change_op_columns.rs | 0 .../automerge/src/storage/change/compressed.rs | 0 .../src/storage/change/op_with_change_actors.rs | 0 {crates => rust}/automerge/src/storage/chunk.rs | 0 {crates => rust}/automerge/src/storage/columns.rs | 0 .../automerge/src/storage/columns/column.rs | 0 .../automerge/src/storage/columns/column_builder.rs | 0 .../src/storage/columns/column_specification.rs | 0 .../automerge/src/storage/columns/raw_column.rs | 0 {crates => rust}/automerge/src/storage/convert.rs | 0 .../automerge/src/storage/convert/op_as_changeop.rs | 0 .../automerge/src/storage/convert/op_as_docop.rs | 0 {crates => rust}/automerge/src/storage/document.rs | 0 .../automerge/src/storage/document/compression.rs | 0 .../src/storage/document/doc_change_columns.rs | 0 .../src/storage/document/doc_op_columns.rs | 0 {crates => rust}/automerge/src/storage/load.rs | 0 .../automerge/src/storage/load/change_collector.rs | 0 .../src/storage/load/reconstruct_document.rs | 0 {crates => rust}/automerge/src/storage/parse.rs | 0 .../automerge/src/storage/parse/leb128.rs | 0 {crates => rust}/automerge/src/storage/save.rs | 0 .../automerge/src/storage/save/document.rs | 0 {crates => rust}/automerge/src/sync.rs | 0 {crates => rust}/automerge/src/sync/bloom.rs | 0 {crates => rust}/automerge/src/sync/state.rs | 0 {crates => rust}/automerge/src/transaction.rs | 0 .../automerge/src/transaction/commit.rs | 0 {crates => rust}/automerge/src/transaction/inner.rs | 0 .../automerge/src/transaction/manual_transaction.rs | 0 .../automerge/src/transaction/observation.rs | 0 .../automerge/src/transaction/result.rs | 0 .../automerge/src/transaction/transactable.rs | 0 {crates => rust}/automerge/src/types.rs | 0 {crates => rust}/automerge/src/types/opids.rs | 0 {crates => rust}/automerge/src/value.rs | 0 {crates => rust}/automerge/src/values.rs | 0 {crates => rust}/automerge/src/visualisation.rs | 0 {crates => rust}/automerge/tests/helpers/mod.rs | 0 {crates => rust}/automerge/tests/test.rs | 0 deny.toml => rust/deny.toml | 0 {crates => rust}/edit-trace/.gitignore | 0 {crates => rust}/edit-trace/Cargo.toml | 0 {crates => rust}/edit-trace/Makefile | 0 {crates => rust}/edit-trace/README.md | 0 {crates => rust}/edit-trace/automerge-1.0.js | 0 {crates => rust}/edit-trace/automerge-js.js | 0 {crates => rust}/edit-trace/automerge-rs.js | 0 {crates => rust}/edit-trace/automerge-wasm.js | 0 {crates => rust}/edit-trace/baseline.js | 0 {crates => rust}/edit-trace/benches/main.rs | 0 {crates => rust}/edit-trace/editing-trace.js | 0 {crates => rust}/edit-trace/edits.json | 0 {crates => rust}/edit-trace/package.json | 0 {crates => rust}/edit-trace/src/main.rs | 0 scripts/ci/advisory | 1 + scripts/ci/build-test | 1 + scripts/ci/cmake-build | 2 +- scripts/ci/cmake-docs | 2 +- scripts/ci/fmt | 1 + scripts/ci/js_tests | 2 +- scripts/ci/lint | 1 + scripts/ci/rust-docs | 1 + scripts/ci/wasm_tests | 2 +- wrappers/javascript/e2e/index.ts | 2 +- 280 files changed, 21 insertions(+), 11 deletions(-) delete mode 100644 crates/.gitignore create mode 100644 rust/.gitignore rename Cargo.toml => rust/Cargo.toml (60%) rename {crates => rust}/automerge-c/.gitignore (100%) rename {crates => rust}/automerge-c/CMakeLists.txt (100%) rename {crates => rust}/automerge-c/Cargo.toml (100%) rename {crates => rust}/automerge-c/README.md (100%) rename {crates => rust}/automerge-c/build.rs (100%) rename {crates => rust}/automerge-c/cbindgen.toml (100%) rename {crates => rust}/automerge-c/cmake/automerge-c-config.cmake.in (100%) rename {crates => rust}/automerge-c/cmake/config.h.in (100%) rename {crates => rust}/automerge-c/cmake/file_regex_replace.cmake (100%) rename {crates => rust}/automerge-c/cmake/file_touch.cmake (100%) rename {crates => rust}/automerge-c/examples/CMakeLists.txt (100%) rename {crates => rust}/automerge-c/examples/README.md (100%) rename {crates => rust}/automerge-c/examples/quickstart.c (100%) rename {crates => rust}/automerge-c/img/brandmark.png (100%) rename {crates => rust}/automerge-c/src/CMakeLists.txt (100%) rename {crates => rust}/automerge-c/src/actor_id.rs (100%) rename {crates => rust}/automerge-c/src/byte_span.rs (100%) rename {crates => rust}/automerge-c/src/change.rs (100%) rename {crates => rust}/automerge-c/src/change_hashes.rs (100%) rename {crates => rust}/automerge-c/src/changes.rs (100%) rename {crates => rust}/automerge-c/src/doc.rs (100%) rename {crates => rust}/automerge-c/src/doc/list.rs (100%) rename {crates => rust}/automerge-c/src/doc/list/item.rs (100%) rename {crates => rust}/automerge-c/src/doc/list/items.rs (100%) rename {crates => rust}/automerge-c/src/doc/map.rs (100%) rename {crates => rust}/automerge-c/src/doc/map/item.rs (100%) rename {crates => rust}/automerge-c/src/doc/map/items.rs (100%) rename {crates => rust}/automerge-c/src/doc/utils.rs (100%) rename {crates => rust}/automerge-c/src/lib.rs (100%) rename {crates => rust}/automerge-c/src/obj.rs (100%) rename {crates => rust}/automerge-c/src/obj/item.rs (100%) rename {crates => rust}/automerge-c/src/obj/items.rs (100%) rename {crates => rust}/automerge-c/src/result.rs (100%) rename {crates => rust}/automerge-c/src/result_stack.rs (100%) rename {crates => rust}/automerge-c/src/strs.rs (100%) rename {crates => rust}/automerge-c/src/sync.rs (100%) rename {crates => rust}/automerge-c/src/sync/have.rs (100%) rename {crates => rust}/automerge-c/src/sync/haves.rs (100%) rename {crates => rust}/automerge-c/src/sync/message.rs (100%) rename {crates => rust}/automerge-c/src/sync/state.rs (100%) rename {crates => rust}/automerge-c/test/CMakeLists.txt (100%) rename {crates => rust}/automerge-c/test/actor_id_tests.c (100%) rename {crates => rust}/automerge-c/test/doc_tests.c (100%) rename {crates => rust}/automerge-c/test/group_state.c (100%) rename {crates => rust}/automerge-c/test/group_state.h (100%) rename {crates => rust}/automerge-c/test/list_tests.c (100%) rename {crates => rust}/automerge-c/test/macro_utils.c (100%) rename {crates => rust}/automerge-c/test/macro_utils.h (100%) rename {crates => rust}/automerge-c/test/main.c (100%) rename {crates => rust}/automerge-c/test/map_tests.c (100%) rename {crates => rust}/automerge-c/test/ported_wasm/basic_tests.c (100%) rename {crates => rust}/automerge-c/test/ported_wasm/suite.c (100%) rename {crates => rust}/automerge-c/test/ported_wasm/sync_tests.c (100%) rename {crates => rust}/automerge-c/test/stack_utils.c (100%) rename {crates => rust}/automerge-c/test/stack_utils.h (100%) rename {crates => rust}/automerge-c/test/str_utils.c (100%) rename {crates => rust}/automerge-c/test/str_utils.h (100%) rename {crates => rust}/automerge-cli/.gitignore (100%) rename {crates => rust}/automerge-cli/Cargo.lock (100%) rename {crates => rust}/automerge-cli/Cargo.toml (100%) rename {crates => rust}/automerge-cli/IDEAS.md (100%) rename {crates => rust}/automerge-cli/src/change.rs (100%) rename {crates => rust}/automerge-cli/src/examine.rs (100%) rename {crates => rust}/automerge-cli/src/export.rs (100%) rename {crates => rust}/automerge-cli/src/import.rs (100%) rename {crates => rust}/automerge-cli/src/main.rs (100%) rename {crates => rust}/automerge-cli/src/merge.rs (100%) rename {crates => rust}/automerge-cli/tests/integration.rs (100%) rename {crates => rust}/automerge-wasm/.eslintignore (100%) rename {crates => rust}/automerge-wasm/.eslintrc.cjs (100%) rename {crates => rust}/automerge-wasm/.gitignore (100%) rename {crates => rust}/automerge-wasm/Cargo.toml (100%) rename {crates => rust}/automerge-wasm/LICENSE (100%) rename {crates => rust}/automerge-wasm/README.md (100%) rename {crates => rust}/automerge-wasm/examples/cra/.gitignore (100%) rename {crates => rust}/automerge-wasm/examples/cra/README.md (100%) rename {crates => rust}/automerge-wasm/examples/cra/package.json (100%) rename {crates => rust}/automerge-wasm/examples/cra/public/favicon.ico (100%) rename {crates => rust}/automerge-wasm/examples/cra/public/index.html (100%) rename {crates => rust}/automerge-wasm/examples/cra/public/logo192.png (100%) rename {crates => rust}/automerge-wasm/examples/cra/public/logo512.png (100%) rename {crates => rust}/automerge-wasm/examples/cra/public/manifest.json (100%) rename {crates => rust}/automerge-wasm/examples/cra/public/robots.txt (100%) rename {crates => rust}/automerge-wasm/examples/cra/src/App.css (100%) rename {crates => rust}/automerge-wasm/examples/cra/src/App.test.tsx (100%) rename {crates => rust}/automerge-wasm/examples/cra/src/App.tsx (100%) rename {crates => rust}/automerge-wasm/examples/cra/src/index.css (100%) rename {crates => rust}/automerge-wasm/examples/cra/src/index.tsx (100%) rename {crates => rust}/automerge-wasm/examples/cra/src/logo.svg (100%) rename {crates => rust}/automerge-wasm/examples/cra/src/react-app-env.d.ts (100%) rename {crates => rust}/automerge-wasm/examples/cra/src/reportWebVitals.ts (100%) rename {crates => rust}/automerge-wasm/examples/cra/src/setupTests.ts (100%) rename {crates => rust}/automerge-wasm/examples/cra/tsconfig.json (100%) rename {crates => rust}/automerge-wasm/examples/webpack/.gitignore (100%) rename {crates => rust}/automerge-wasm/examples/webpack/package.json (100%) rename {crates => rust}/automerge-wasm/examples/webpack/public/index.html (100%) rename {crates => rust}/automerge-wasm/examples/webpack/src/index.js (100%) rename {crates => rust}/automerge-wasm/examples/webpack/webpack.config.js (100%) rename {crates => rust}/automerge-wasm/index.d.ts (100%) rename {crates => rust}/automerge-wasm/package.json (100%) rename {crates => rust}/automerge-wasm/src/interop.rs (100%) rename {crates => rust}/automerge-wasm/src/lib.rs (100%) rename {crates => rust}/automerge-wasm/src/observer.rs (100%) rename {crates => rust}/automerge-wasm/src/sync.rs (100%) rename {crates => rust}/automerge-wasm/src/value.rs (100%) rename {crates => rust}/automerge-wasm/test/apply.ts (100%) rename {crates => rust}/automerge-wasm/test/helpers/columnar.js (100%) rename {crates => rust}/automerge-wasm/test/helpers/common.js (100%) rename {crates => rust}/automerge-wasm/test/helpers/encoding.js (100%) rename {crates => rust}/automerge-wasm/test/helpers/sync.js (100%) rename {crates => rust}/automerge-wasm/test/readme.ts (100%) rename {crates => rust}/automerge-wasm/test/test.ts (100%) rename {crates => rust}/automerge-wasm/tsconfig.json (100%) rename {crates => rust}/automerge/.gitignore (100%) rename {crates => rust}/automerge/Cargo.toml (100%) rename {crates => rust}/automerge/benches/map.rs (100%) rename {crates => rust}/automerge/benches/range.rs (100%) rename {crates => rust}/automerge/benches/sync.rs (100%) rename {crates => rust}/automerge/examples/README.md (100%) rename {crates => rust}/automerge/examples/quickstart.rs (100%) rename {crates => rust}/automerge/examples/watch.rs (100%) rename {crates => rust}/automerge/src/autocommit.rs (100%) rename {crates => rust}/automerge/src/automerge.rs (100%) rename {crates => rust}/automerge/src/automerge/tests.rs (100%) rename {crates => rust}/automerge/src/autoserde.rs (100%) rename {crates => rust}/automerge/src/change.rs (100%) rename {crates => rust}/automerge/src/clock.rs (100%) rename {crates => rust}/automerge/src/clocks.rs (100%) rename {crates => rust}/automerge/src/columnar.rs (100%) rename {crates => rust}/automerge/src/columnar/column_range.rs (100%) rename {crates => rust}/automerge/src/columnar/column_range/boolean.rs (100%) rename {crates => rust}/automerge/src/columnar/column_range/delta.rs (100%) rename {crates => rust}/automerge/src/columnar/column_range/deps.rs (100%) rename {crates => rust}/automerge/src/columnar/column_range/generic.rs (100%) rename {crates => rust}/automerge/src/columnar/column_range/generic/group.rs (100%) rename {crates => rust}/automerge/src/columnar/column_range/generic/simple.rs (100%) rename {crates => rust}/automerge/src/columnar/column_range/key.rs (100%) rename {crates => rust}/automerge/src/columnar/column_range/obj_id.rs (100%) rename {crates => rust}/automerge/src/columnar/column_range/opid.rs (100%) rename {crates => rust}/automerge/src/columnar/column_range/opid_list.rs (100%) rename {crates => rust}/automerge/src/columnar/column_range/raw.rs (100%) rename {crates => rust}/automerge/src/columnar/column_range/rle.rs (100%) rename {crates => rust}/automerge/src/columnar/column_range/value.rs (100%) rename {crates => rust}/automerge/src/columnar/encoding.rs (100%) rename {crates => rust}/automerge/src/columnar/encoding/boolean.rs (100%) rename {crates => rust}/automerge/src/columnar/encoding/col_error.rs (100%) rename {crates => rust}/automerge/src/columnar/encoding/column_decoder.rs (100%) rename {crates => rust}/automerge/src/columnar/encoding/decodable_impls.rs (100%) rename {crates => rust}/automerge/src/columnar/encoding/delta.rs (100%) rename {crates => rust}/automerge/src/columnar/encoding/encodable_impls.rs (100%) rename {crates => rust}/automerge/src/columnar/encoding/leb128.rs (100%) rename {crates => rust}/automerge/src/columnar/encoding/properties.rs (100%) rename {crates => rust}/automerge/src/columnar/encoding/raw.rs (100%) rename {crates => rust}/automerge/src/columnar/encoding/rle.rs (100%) rename {crates => rust}/automerge/src/columnar/splice_error.rs (100%) rename {crates => rust}/automerge/src/convert.rs (100%) rename {crates => rust}/automerge/src/decoding.rs (100%) rename {crates => rust}/automerge/src/error.rs (100%) rename {crates => rust}/automerge/src/exid.rs (100%) rename {crates => rust}/automerge/src/indexed_cache.rs (100%) rename {crates => rust}/automerge/src/keys.rs (100%) rename {crates => rust}/automerge/src/keys_at.rs (100%) rename {crates => rust}/automerge/src/legacy/mod.rs (100%) rename {crates => rust}/automerge/src/legacy/serde_impls/actor_id.rs (100%) rename {crates => rust}/automerge/src/legacy/serde_impls/change_hash.rs (100%) rename {crates => rust}/automerge/src/legacy/serde_impls/element_id.rs (100%) rename {crates => rust}/automerge/src/legacy/serde_impls/mod.rs (100%) rename {crates => rust}/automerge/src/legacy/serde_impls/object_id.rs (100%) rename {crates => rust}/automerge/src/legacy/serde_impls/op.rs (100%) rename {crates => rust}/automerge/src/legacy/serde_impls/op_type.rs (100%) rename {crates => rust}/automerge/src/legacy/serde_impls/opid.rs (100%) rename {crates => rust}/automerge/src/legacy/serde_impls/scalar_value.rs (100%) rename {crates => rust}/automerge/src/legacy/utility_impls/element_id.rs (100%) rename {crates => rust}/automerge/src/legacy/utility_impls/key.rs (100%) rename {crates => rust}/automerge/src/legacy/utility_impls/mod.rs (100%) rename {crates => rust}/automerge/src/legacy/utility_impls/object_id.rs (100%) rename {crates => rust}/automerge/src/legacy/utility_impls/opid.rs (100%) rename {crates => rust}/automerge/src/lib.rs (100%) rename {crates => rust}/automerge/src/list_range.rs (100%) rename {crates => rust}/automerge/src/list_range_at.rs (100%) rename {crates => rust}/automerge/src/map_range.rs (100%) rename {crates => rust}/automerge/src/map_range_at.rs (100%) rename {crates => rust}/automerge/src/op_observer.rs (100%) rename {crates => rust}/automerge/src/op_set.rs (100%) rename {crates => rust}/automerge/src/op_set/load.rs (100%) rename {crates => rust}/automerge/src/op_tree.rs (100%) rename {crates => rust}/automerge/src/op_tree/iter.rs (100%) rename {crates => rust}/automerge/src/parents.rs (100%) rename {crates => rust}/automerge/src/query.rs (100%) rename {crates => rust}/automerge/src/query/elem_id_pos.rs (100%) rename {crates => rust}/automerge/src/query/insert.rs (100%) rename {crates => rust}/automerge/src/query/keys.rs (100%) rename {crates => rust}/automerge/src/query/keys_at.rs (100%) rename {crates => rust}/automerge/src/query/len.rs (100%) rename {crates => rust}/automerge/src/query/len_at.rs (100%) rename {crates => rust}/automerge/src/query/list_range.rs (100%) rename {crates => rust}/automerge/src/query/list_range_at.rs (100%) rename {crates => rust}/automerge/src/query/list_vals.rs (100%) rename {crates => rust}/automerge/src/query/list_vals_at.rs (100%) rename {crates => rust}/automerge/src/query/map_range.rs (100%) rename {crates => rust}/automerge/src/query/map_range_at.rs (100%) rename {crates => rust}/automerge/src/query/nth.rs (100%) rename {crates => rust}/automerge/src/query/nth_at.rs (100%) rename {crates => rust}/automerge/src/query/opid.rs (100%) rename {crates => rust}/automerge/src/query/prop.rs (100%) rename {crates => rust}/automerge/src/query/prop_at.rs (100%) rename {crates => rust}/automerge/src/query/seek_op.rs (100%) rename {crates => rust}/automerge/src/query/seek_op_with_patch.rs (100%) rename {crates => rust}/automerge/src/sequence_tree.rs (100%) rename {crates => rust}/automerge/src/storage.rs (100%) rename {crates => rust}/automerge/src/storage/change.rs (100%) rename {crates => rust}/automerge/src/storage/change/change_actors.rs (100%) rename {crates => rust}/automerge/src/storage/change/change_op_columns.rs (100%) rename {crates => rust}/automerge/src/storage/change/compressed.rs (100%) rename {crates => rust}/automerge/src/storage/change/op_with_change_actors.rs (100%) rename {crates => rust}/automerge/src/storage/chunk.rs (100%) rename {crates => rust}/automerge/src/storage/columns.rs (100%) rename {crates => rust}/automerge/src/storage/columns/column.rs (100%) rename {crates => rust}/automerge/src/storage/columns/column_builder.rs (100%) rename {crates => rust}/automerge/src/storage/columns/column_specification.rs (100%) rename {crates => rust}/automerge/src/storage/columns/raw_column.rs (100%) rename {crates => rust}/automerge/src/storage/convert.rs (100%) rename {crates => rust}/automerge/src/storage/convert/op_as_changeop.rs (100%) rename {crates => rust}/automerge/src/storage/convert/op_as_docop.rs (100%) rename {crates => rust}/automerge/src/storage/document.rs (100%) rename {crates => rust}/automerge/src/storage/document/compression.rs (100%) rename {crates => rust}/automerge/src/storage/document/doc_change_columns.rs (100%) rename {crates => rust}/automerge/src/storage/document/doc_op_columns.rs (100%) rename {crates => rust}/automerge/src/storage/load.rs (100%) rename {crates => rust}/automerge/src/storage/load/change_collector.rs (100%) rename {crates => rust}/automerge/src/storage/load/reconstruct_document.rs (100%) rename {crates => rust}/automerge/src/storage/parse.rs (100%) rename {crates => rust}/automerge/src/storage/parse/leb128.rs (100%) rename {crates => rust}/automerge/src/storage/save.rs (100%) rename {crates => rust}/automerge/src/storage/save/document.rs (100%) rename {crates => rust}/automerge/src/sync.rs (100%) rename {crates => rust}/automerge/src/sync/bloom.rs (100%) rename {crates => rust}/automerge/src/sync/state.rs (100%) rename {crates => rust}/automerge/src/transaction.rs (100%) rename {crates => rust}/automerge/src/transaction/commit.rs (100%) rename {crates => rust}/automerge/src/transaction/inner.rs (100%) rename {crates => rust}/automerge/src/transaction/manual_transaction.rs (100%) rename {crates => rust}/automerge/src/transaction/observation.rs (100%) rename {crates => rust}/automerge/src/transaction/result.rs (100%) rename {crates => rust}/automerge/src/transaction/transactable.rs (100%) rename {crates => rust}/automerge/src/types.rs (100%) rename {crates => rust}/automerge/src/types/opids.rs (100%) rename {crates => rust}/automerge/src/value.rs (100%) rename {crates => rust}/automerge/src/values.rs (100%) rename {crates => rust}/automerge/src/visualisation.rs (100%) rename {crates => rust}/automerge/tests/helpers/mod.rs (100%) rename {crates => rust}/automerge/tests/test.rs (100%) rename deny.toml => rust/deny.toml (100%) rename {crates => rust}/edit-trace/.gitignore (100%) rename {crates => rust}/edit-trace/Cargo.toml (100%) rename {crates => rust}/edit-trace/Makefile (100%) rename {crates => rust}/edit-trace/README.md (100%) rename {crates => rust}/edit-trace/automerge-1.0.js (100%) rename {crates => rust}/edit-trace/automerge-js.js (100%) rename {crates => rust}/edit-trace/automerge-rs.js (100%) rename {crates => rust}/edit-trace/automerge-wasm.js (100%) rename {crates => rust}/edit-trace/baseline.js (100%) rename {crates => rust}/edit-trace/benches/main.rs (100%) rename {crates => rust}/edit-trace/editing-trace.js (100%) rename {crates => rust}/edit-trace/edits.json (100%) rename {crates => rust}/edit-trace/package.json (100%) rename {crates => rust}/edit-trace/src/main.rs (100%) diff --git a/crates/.gitignore b/crates/.gitignore deleted file mode 100644 index 3b12275f..00000000 --- a/crates/.gitignore +++ /dev/null @@ -1 +0,0 @@ -automerge/proptest-regressions/ diff --git a/rust/.gitignore b/rust/.gitignore new file mode 100644 index 00000000..f859e0a3 --- /dev/null +++ b/rust/.gitignore @@ -0,0 +1,6 @@ +/target +/.direnv +perf.* +/Cargo.lock +build/ +.vim/* diff --git a/Cargo.toml b/rust/Cargo.toml similarity index 60% rename from Cargo.toml rename to rust/Cargo.toml index f03c451c..fbd416fc 100644 --- a/Cargo.toml +++ b/rust/Cargo.toml @@ -1,10 +1,10 @@ [workspace] members = [ - "crates/automerge", - "crates/automerge-c", - "crates/automerge-cli", - "crates/automerge-wasm", - "crates/edit-trace", + "automerge", + "automerge-c", + "automerge-cli", + "automerge-wasm", + "edit-trace", ] resolver = "2" diff --git a/crates/automerge-c/.gitignore b/rust/automerge-c/.gitignore similarity index 100% rename from crates/automerge-c/.gitignore rename to rust/automerge-c/.gitignore diff --git a/crates/automerge-c/CMakeLists.txt b/rust/automerge-c/CMakeLists.txt similarity index 100% rename from crates/automerge-c/CMakeLists.txt rename to rust/automerge-c/CMakeLists.txt diff --git a/crates/automerge-c/Cargo.toml b/rust/automerge-c/Cargo.toml similarity index 100% rename from crates/automerge-c/Cargo.toml rename to rust/automerge-c/Cargo.toml diff --git a/crates/automerge-c/README.md b/rust/automerge-c/README.md similarity index 100% rename from crates/automerge-c/README.md rename to rust/automerge-c/README.md diff --git a/crates/automerge-c/build.rs b/rust/automerge-c/build.rs similarity index 100% rename from crates/automerge-c/build.rs rename to rust/automerge-c/build.rs diff --git a/crates/automerge-c/cbindgen.toml b/rust/automerge-c/cbindgen.toml similarity index 100% rename from crates/automerge-c/cbindgen.toml rename to rust/automerge-c/cbindgen.toml diff --git a/crates/automerge-c/cmake/automerge-c-config.cmake.in b/rust/automerge-c/cmake/automerge-c-config.cmake.in similarity index 100% rename from crates/automerge-c/cmake/automerge-c-config.cmake.in rename to rust/automerge-c/cmake/automerge-c-config.cmake.in diff --git a/crates/automerge-c/cmake/config.h.in b/rust/automerge-c/cmake/config.h.in similarity index 100% rename from crates/automerge-c/cmake/config.h.in rename to rust/automerge-c/cmake/config.h.in diff --git a/crates/automerge-c/cmake/file_regex_replace.cmake b/rust/automerge-c/cmake/file_regex_replace.cmake similarity index 100% rename from crates/automerge-c/cmake/file_regex_replace.cmake rename to rust/automerge-c/cmake/file_regex_replace.cmake diff --git a/crates/automerge-c/cmake/file_touch.cmake b/rust/automerge-c/cmake/file_touch.cmake similarity index 100% rename from crates/automerge-c/cmake/file_touch.cmake rename to rust/automerge-c/cmake/file_touch.cmake diff --git a/crates/automerge-c/examples/CMakeLists.txt b/rust/automerge-c/examples/CMakeLists.txt similarity index 100% rename from crates/automerge-c/examples/CMakeLists.txt rename to rust/automerge-c/examples/CMakeLists.txt diff --git a/crates/automerge-c/examples/README.md b/rust/automerge-c/examples/README.md similarity index 100% rename from crates/automerge-c/examples/README.md rename to rust/automerge-c/examples/README.md diff --git a/crates/automerge-c/examples/quickstart.c b/rust/automerge-c/examples/quickstart.c similarity index 100% rename from crates/automerge-c/examples/quickstart.c rename to rust/automerge-c/examples/quickstart.c diff --git a/crates/automerge-c/img/brandmark.png b/rust/automerge-c/img/brandmark.png similarity index 100% rename from crates/automerge-c/img/brandmark.png rename to rust/automerge-c/img/brandmark.png diff --git a/crates/automerge-c/src/CMakeLists.txt b/rust/automerge-c/src/CMakeLists.txt similarity index 100% rename from crates/automerge-c/src/CMakeLists.txt rename to rust/automerge-c/src/CMakeLists.txt diff --git a/crates/automerge-c/src/actor_id.rs b/rust/automerge-c/src/actor_id.rs similarity index 100% rename from crates/automerge-c/src/actor_id.rs rename to rust/automerge-c/src/actor_id.rs diff --git a/crates/automerge-c/src/byte_span.rs b/rust/automerge-c/src/byte_span.rs similarity index 100% rename from crates/automerge-c/src/byte_span.rs rename to rust/automerge-c/src/byte_span.rs diff --git a/crates/automerge-c/src/change.rs b/rust/automerge-c/src/change.rs similarity index 100% rename from crates/automerge-c/src/change.rs rename to rust/automerge-c/src/change.rs diff --git a/crates/automerge-c/src/change_hashes.rs b/rust/automerge-c/src/change_hashes.rs similarity index 100% rename from crates/automerge-c/src/change_hashes.rs rename to rust/automerge-c/src/change_hashes.rs diff --git a/crates/automerge-c/src/changes.rs b/rust/automerge-c/src/changes.rs similarity index 100% rename from crates/automerge-c/src/changes.rs rename to rust/automerge-c/src/changes.rs diff --git a/crates/automerge-c/src/doc.rs b/rust/automerge-c/src/doc.rs similarity index 100% rename from crates/automerge-c/src/doc.rs rename to rust/automerge-c/src/doc.rs diff --git a/crates/automerge-c/src/doc/list.rs b/rust/automerge-c/src/doc/list.rs similarity index 100% rename from crates/automerge-c/src/doc/list.rs rename to rust/automerge-c/src/doc/list.rs diff --git a/crates/automerge-c/src/doc/list/item.rs b/rust/automerge-c/src/doc/list/item.rs similarity index 100% rename from crates/automerge-c/src/doc/list/item.rs rename to rust/automerge-c/src/doc/list/item.rs diff --git a/crates/automerge-c/src/doc/list/items.rs b/rust/automerge-c/src/doc/list/items.rs similarity index 100% rename from crates/automerge-c/src/doc/list/items.rs rename to rust/automerge-c/src/doc/list/items.rs diff --git a/crates/automerge-c/src/doc/map.rs b/rust/automerge-c/src/doc/map.rs similarity index 100% rename from crates/automerge-c/src/doc/map.rs rename to rust/automerge-c/src/doc/map.rs diff --git a/crates/automerge-c/src/doc/map/item.rs b/rust/automerge-c/src/doc/map/item.rs similarity index 100% rename from crates/automerge-c/src/doc/map/item.rs rename to rust/automerge-c/src/doc/map/item.rs diff --git a/crates/automerge-c/src/doc/map/items.rs b/rust/automerge-c/src/doc/map/items.rs similarity index 100% rename from crates/automerge-c/src/doc/map/items.rs rename to rust/automerge-c/src/doc/map/items.rs diff --git a/crates/automerge-c/src/doc/utils.rs b/rust/automerge-c/src/doc/utils.rs similarity index 100% rename from crates/automerge-c/src/doc/utils.rs rename to rust/automerge-c/src/doc/utils.rs diff --git a/crates/automerge-c/src/lib.rs b/rust/automerge-c/src/lib.rs similarity index 100% rename from crates/automerge-c/src/lib.rs rename to rust/automerge-c/src/lib.rs diff --git a/crates/automerge-c/src/obj.rs b/rust/automerge-c/src/obj.rs similarity index 100% rename from crates/automerge-c/src/obj.rs rename to rust/automerge-c/src/obj.rs diff --git a/crates/automerge-c/src/obj/item.rs b/rust/automerge-c/src/obj/item.rs similarity index 100% rename from crates/automerge-c/src/obj/item.rs rename to rust/automerge-c/src/obj/item.rs diff --git a/crates/automerge-c/src/obj/items.rs b/rust/automerge-c/src/obj/items.rs similarity index 100% rename from crates/automerge-c/src/obj/items.rs rename to rust/automerge-c/src/obj/items.rs diff --git a/crates/automerge-c/src/result.rs b/rust/automerge-c/src/result.rs similarity index 100% rename from crates/automerge-c/src/result.rs rename to rust/automerge-c/src/result.rs diff --git a/crates/automerge-c/src/result_stack.rs b/rust/automerge-c/src/result_stack.rs similarity index 100% rename from crates/automerge-c/src/result_stack.rs rename to rust/automerge-c/src/result_stack.rs diff --git a/crates/automerge-c/src/strs.rs b/rust/automerge-c/src/strs.rs similarity index 100% rename from crates/automerge-c/src/strs.rs rename to rust/automerge-c/src/strs.rs diff --git a/crates/automerge-c/src/sync.rs b/rust/automerge-c/src/sync.rs similarity index 100% rename from crates/automerge-c/src/sync.rs rename to rust/automerge-c/src/sync.rs diff --git a/crates/automerge-c/src/sync/have.rs b/rust/automerge-c/src/sync/have.rs similarity index 100% rename from crates/automerge-c/src/sync/have.rs rename to rust/automerge-c/src/sync/have.rs diff --git a/crates/automerge-c/src/sync/haves.rs b/rust/automerge-c/src/sync/haves.rs similarity index 100% rename from crates/automerge-c/src/sync/haves.rs rename to rust/automerge-c/src/sync/haves.rs diff --git a/crates/automerge-c/src/sync/message.rs b/rust/automerge-c/src/sync/message.rs similarity index 100% rename from crates/automerge-c/src/sync/message.rs rename to rust/automerge-c/src/sync/message.rs diff --git a/crates/automerge-c/src/sync/state.rs b/rust/automerge-c/src/sync/state.rs similarity index 100% rename from crates/automerge-c/src/sync/state.rs rename to rust/automerge-c/src/sync/state.rs diff --git a/crates/automerge-c/test/CMakeLists.txt b/rust/automerge-c/test/CMakeLists.txt similarity index 100% rename from crates/automerge-c/test/CMakeLists.txt rename to rust/automerge-c/test/CMakeLists.txt diff --git a/crates/automerge-c/test/actor_id_tests.c b/rust/automerge-c/test/actor_id_tests.c similarity index 100% rename from crates/automerge-c/test/actor_id_tests.c rename to rust/automerge-c/test/actor_id_tests.c diff --git a/crates/automerge-c/test/doc_tests.c b/rust/automerge-c/test/doc_tests.c similarity index 100% rename from crates/automerge-c/test/doc_tests.c rename to rust/automerge-c/test/doc_tests.c diff --git a/crates/automerge-c/test/group_state.c b/rust/automerge-c/test/group_state.c similarity index 100% rename from crates/automerge-c/test/group_state.c rename to rust/automerge-c/test/group_state.c diff --git a/crates/automerge-c/test/group_state.h b/rust/automerge-c/test/group_state.h similarity index 100% rename from crates/automerge-c/test/group_state.h rename to rust/automerge-c/test/group_state.h diff --git a/crates/automerge-c/test/list_tests.c b/rust/automerge-c/test/list_tests.c similarity index 100% rename from crates/automerge-c/test/list_tests.c rename to rust/automerge-c/test/list_tests.c diff --git a/crates/automerge-c/test/macro_utils.c b/rust/automerge-c/test/macro_utils.c similarity index 100% rename from crates/automerge-c/test/macro_utils.c rename to rust/automerge-c/test/macro_utils.c diff --git a/crates/automerge-c/test/macro_utils.h b/rust/automerge-c/test/macro_utils.h similarity index 100% rename from crates/automerge-c/test/macro_utils.h rename to rust/automerge-c/test/macro_utils.h diff --git a/crates/automerge-c/test/main.c b/rust/automerge-c/test/main.c similarity index 100% rename from crates/automerge-c/test/main.c rename to rust/automerge-c/test/main.c diff --git a/crates/automerge-c/test/map_tests.c b/rust/automerge-c/test/map_tests.c similarity index 100% rename from crates/automerge-c/test/map_tests.c rename to rust/automerge-c/test/map_tests.c diff --git a/crates/automerge-c/test/ported_wasm/basic_tests.c b/rust/automerge-c/test/ported_wasm/basic_tests.c similarity index 100% rename from crates/automerge-c/test/ported_wasm/basic_tests.c rename to rust/automerge-c/test/ported_wasm/basic_tests.c diff --git a/crates/automerge-c/test/ported_wasm/suite.c b/rust/automerge-c/test/ported_wasm/suite.c similarity index 100% rename from crates/automerge-c/test/ported_wasm/suite.c rename to rust/automerge-c/test/ported_wasm/suite.c diff --git a/crates/automerge-c/test/ported_wasm/sync_tests.c b/rust/automerge-c/test/ported_wasm/sync_tests.c similarity index 100% rename from crates/automerge-c/test/ported_wasm/sync_tests.c rename to rust/automerge-c/test/ported_wasm/sync_tests.c diff --git a/crates/automerge-c/test/stack_utils.c b/rust/automerge-c/test/stack_utils.c similarity index 100% rename from crates/automerge-c/test/stack_utils.c rename to rust/automerge-c/test/stack_utils.c diff --git a/crates/automerge-c/test/stack_utils.h b/rust/automerge-c/test/stack_utils.h similarity index 100% rename from crates/automerge-c/test/stack_utils.h rename to rust/automerge-c/test/stack_utils.h diff --git a/crates/automerge-c/test/str_utils.c b/rust/automerge-c/test/str_utils.c similarity index 100% rename from crates/automerge-c/test/str_utils.c rename to rust/automerge-c/test/str_utils.c diff --git a/crates/automerge-c/test/str_utils.h b/rust/automerge-c/test/str_utils.h similarity index 100% rename from crates/automerge-c/test/str_utils.h rename to rust/automerge-c/test/str_utils.h diff --git a/crates/automerge-cli/.gitignore b/rust/automerge-cli/.gitignore similarity index 100% rename from crates/automerge-cli/.gitignore rename to rust/automerge-cli/.gitignore diff --git a/crates/automerge-cli/Cargo.lock b/rust/automerge-cli/Cargo.lock similarity index 100% rename from crates/automerge-cli/Cargo.lock rename to rust/automerge-cli/Cargo.lock diff --git a/crates/automerge-cli/Cargo.toml b/rust/automerge-cli/Cargo.toml similarity index 100% rename from crates/automerge-cli/Cargo.toml rename to rust/automerge-cli/Cargo.toml diff --git a/crates/automerge-cli/IDEAS.md b/rust/automerge-cli/IDEAS.md similarity index 100% rename from crates/automerge-cli/IDEAS.md rename to rust/automerge-cli/IDEAS.md diff --git a/crates/automerge-cli/src/change.rs b/rust/automerge-cli/src/change.rs similarity index 100% rename from crates/automerge-cli/src/change.rs rename to rust/automerge-cli/src/change.rs diff --git a/crates/automerge-cli/src/examine.rs b/rust/automerge-cli/src/examine.rs similarity index 100% rename from crates/automerge-cli/src/examine.rs rename to rust/automerge-cli/src/examine.rs diff --git a/crates/automerge-cli/src/export.rs b/rust/automerge-cli/src/export.rs similarity index 100% rename from crates/automerge-cli/src/export.rs rename to rust/automerge-cli/src/export.rs diff --git a/crates/automerge-cli/src/import.rs b/rust/automerge-cli/src/import.rs similarity index 100% rename from crates/automerge-cli/src/import.rs rename to rust/automerge-cli/src/import.rs diff --git a/crates/automerge-cli/src/main.rs b/rust/automerge-cli/src/main.rs similarity index 100% rename from crates/automerge-cli/src/main.rs rename to rust/automerge-cli/src/main.rs diff --git a/crates/automerge-cli/src/merge.rs b/rust/automerge-cli/src/merge.rs similarity index 100% rename from crates/automerge-cli/src/merge.rs rename to rust/automerge-cli/src/merge.rs diff --git a/crates/automerge-cli/tests/integration.rs b/rust/automerge-cli/tests/integration.rs similarity index 100% rename from crates/automerge-cli/tests/integration.rs rename to rust/automerge-cli/tests/integration.rs diff --git a/crates/automerge-wasm/.eslintignore b/rust/automerge-wasm/.eslintignore similarity index 100% rename from crates/automerge-wasm/.eslintignore rename to rust/automerge-wasm/.eslintignore diff --git a/crates/automerge-wasm/.eslintrc.cjs b/rust/automerge-wasm/.eslintrc.cjs similarity index 100% rename from crates/automerge-wasm/.eslintrc.cjs rename to rust/automerge-wasm/.eslintrc.cjs diff --git a/crates/automerge-wasm/.gitignore b/rust/automerge-wasm/.gitignore similarity index 100% rename from crates/automerge-wasm/.gitignore rename to rust/automerge-wasm/.gitignore diff --git a/crates/automerge-wasm/Cargo.toml b/rust/automerge-wasm/Cargo.toml similarity index 100% rename from crates/automerge-wasm/Cargo.toml rename to rust/automerge-wasm/Cargo.toml diff --git a/crates/automerge-wasm/LICENSE b/rust/automerge-wasm/LICENSE similarity index 100% rename from crates/automerge-wasm/LICENSE rename to rust/automerge-wasm/LICENSE diff --git a/crates/automerge-wasm/README.md b/rust/automerge-wasm/README.md similarity index 100% rename from crates/automerge-wasm/README.md rename to rust/automerge-wasm/README.md diff --git a/crates/automerge-wasm/examples/cra/.gitignore b/rust/automerge-wasm/examples/cra/.gitignore similarity index 100% rename from crates/automerge-wasm/examples/cra/.gitignore rename to rust/automerge-wasm/examples/cra/.gitignore diff --git a/crates/automerge-wasm/examples/cra/README.md b/rust/automerge-wasm/examples/cra/README.md similarity index 100% rename from crates/automerge-wasm/examples/cra/README.md rename to rust/automerge-wasm/examples/cra/README.md diff --git a/crates/automerge-wasm/examples/cra/package.json b/rust/automerge-wasm/examples/cra/package.json similarity index 100% rename from crates/automerge-wasm/examples/cra/package.json rename to rust/automerge-wasm/examples/cra/package.json diff --git a/crates/automerge-wasm/examples/cra/public/favicon.ico b/rust/automerge-wasm/examples/cra/public/favicon.ico similarity index 100% rename from crates/automerge-wasm/examples/cra/public/favicon.ico rename to rust/automerge-wasm/examples/cra/public/favicon.ico diff --git a/crates/automerge-wasm/examples/cra/public/index.html b/rust/automerge-wasm/examples/cra/public/index.html similarity index 100% rename from crates/automerge-wasm/examples/cra/public/index.html rename to rust/automerge-wasm/examples/cra/public/index.html diff --git a/crates/automerge-wasm/examples/cra/public/logo192.png b/rust/automerge-wasm/examples/cra/public/logo192.png similarity index 100% rename from crates/automerge-wasm/examples/cra/public/logo192.png rename to rust/automerge-wasm/examples/cra/public/logo192.png diff --git a/crates/automerge-wasm/examples/cra/public/logo512.png b/rust/automerge-wasm/examples/cra/public/logo512.png similarity index 100% rename from crates/automerge-wasm/examples/cra/public/logo512.png rename to rust/automerge-wasm/examples/cra/public/logo512.png diff --git a/crates/automerge-wasm/examples/cra/public/manifest.json b/rust/automerge-wasm/examples/cra/public/manifest.json similarity index 100% rename from crates/automerge-wasm/examples/cra/public/manifest.json rename to rust/automerge-wasm/examples/cra/public/manifest.json diff --git a/crates/automerge-wasm/examples/cra/public/robots.txt b/rust/automerge-wasm/examples/cra/public/robots.txt similarity index 100% rename from crates/automerge-wasm/examples/cra/public/robots.txt rename to rust/automerge-wasm/examples/cra/public/robots.txt diff --git a/crates/automerge-wasm/examples/cra/src/App.css b/rust/automerge-wasm/examples/cra/src/App.css similarity index 100% rename from crates/automerge-wasm/examples/cra/src/App.css rename to rust/automerge-wasm/examples/cra/src/App.css diff --git a/crates/automerge-wasm/examples/cra/src/App.test.tsx b/rust/automerge-wasm/examples/cra/src/App.test.tsx similarity index 100% rename from crates/automerge-wasm/examples/cra/src/App.test.tsx rename to rust/automerge-wasm/examples/cra/src/App.test.tsx diff --git a/crates/automerge-wasm/examples/cra/src/App.tsx b/rust/automerge-wasm/examples/cra/src/App.tsx similarity index 100% rename from crates/automerge-wasm/examples/cra/src/App.tsx rename to rust/automerge-wasm/examples/cra/src/App.tsx diff --git a/crates/automerge-wasm/examples/cra/src/index.css b/rust/automerge-wasm/examples/cra/src/index.css similarity index 100% rename from crates/automerge-wasm/examples/cra/src/index.css rename to rust/automerge-wasm/examples/cra/src/index.css diff --git a/crates/automerge-wasm/examples/cra/src/index.tsx b/rust/automerge-wasm/examples/cra/src/index.tsx similarity index 100% rename from crates/automerge-wasm/examples/cra/src/index.tsx rename to rust/automerge-wasm/examples/cra/src/index.tsx diff --git a/crates/automerge-wasm/examples/cra/src/logo.svg b/rust/automerge-wasm/examples/cra/src/logo.svg similarity index 100% rename from crates/automerge-wasm/examples/cra/src/logo.svg rename to rust/automerge-wasm/examples/cra/src/logo.svg diff --git a/crates/automerge-wasm/examples/cra/src/react-app-env.d.ts b/rust/automerge-wasm/examples/cra/src/react-app-env.d.ts similarity index 100% rename from crates/automerge-wasm/examples/cra/src/react-app-env.d.ts rename to rust/automerge-wasm/examples/cra/src/react-app-env.d.ts diff --git a/crates/automerge-wasm/examples/cra/src/reportWebVitals.ts b/rust/automerge-wasm/examples/cra/src/reportWebVitals.ts similarity index 100% rename from crates/automerge-wasm/examples/cra/src/reportWebVitals.ts rename to rust/automerge-wasm/examples/cra/src/reportWebVitals.ts diff --git a/crates/automerge-wasm/examples/cra/src/setupTests.ts b/rust/automerge-wasm/examples/cra/src/setupTests.ts similarity index 100% rename from crates/automerge-wasm/examples/cra/src/setupTests.ts rename to rust/automerge-wasm/examples/cra/src/setupTests.ts diff --git a/crates/automerge-wasm/examples/cra/tsconfig.json b/rust/automerge-wasm/examples/cra/tsconfig.json similarity index 100% rename from crates/automerge-wasm/examples/cra/tsconfig.json rename to rust/automerge-wasm/examples/cra/tsconfig.json diff --git a/crates/automerge-wasm/examples/webpack/.gitignore b/rust/automerge-wasm/examples/webpack/.gitignore similarity index 100% rename from crates/automerge-wasm/examples/webpack/.gitignore rename to rust/automerge-wasm/examples/webpack/.gitignore diff --git a/crates/automerge-wasm/examples/webpack/package.json b/rust/automerge-wasm/examples/webpack/package.json similarity index 100% rename from crates/automerge-wasm/examples/webpack/package.json rename to rust/automerge-wasm/examples/webpack/package.json diff --git a/crates/automerge-wasm/examples/webpack/public/index.html b/rust/automerge-wasm/examples/webpack/public/index.html similarity index 100% rename from crates/automerge-wasm/examples/webpack/public/index.html rename to rust/automerge-wasm/examples/webpack/public/index.html diff --git a/crates/automerge-wasm/examples/webpack/src/index.js b/rust/automerge-wasm/examples/webpack/src/index.js similarity index 100% rename from crates/automerge-wasm/examples/webpack/src/index.js rename to rust/automerge-wasm/examples/webpack/src/index.js diff --git a/crates/automerge-wasm/examples/webpack/webpack.config.js b/rust/automerge-wasm/examples/webpack/webpack.config.js similarity index 100% rename from crates/automerge-wasm/examples/webpack/webpack.config.js rename to rust/automerge-wasm/examples/webpack/webpack.config.js diff --git a/crates/automerge-wasm/index.d.ts b/rust/automerge-wasm/index.d.ts similarity index 100% rename from crates/automerge-wasm/index.d.ts rename to rust/automerge-wasm/index.d.ts diff --git a/crates/automerge-wasm/package.json b/rust/automerge-wasm/package.json similarity index 100% rename from crates/automerge-wasm/package.json rename to rust/automerge-wasm/package.json diff --git a/crates/automerge-wasm/src/interop.rs b/rust/automerge-wasm/src/interop.rs similarity index 100% rename from crates/automerge-wasm/src/interop.rs rename to rust/automerge-wasm/src/interop.rs diff --git a/crates/automerge-wasm/src/lib.rs b/rust/automerge-wasm/src/lib.rs similarity index 100% rename from crates/automerge-wasm/src/lib.rs rename to rust/automerge-wasm/src/lib.rs diff --git a/crates/automerge-wasm/src/observer.rs b/rust/automerge-wasm/src/observer.rs similarity index 100% rename from crates/automerge-wasm/src/observer.rs rename to rust/automerge-wasm/src/observer.rs diff --git a/crates/automerge-wasm/src/sync.rs b/rust/automerge-wasm/src/sync.rs similarity index 100% rename from crates/automerge-wasm/src/sync.rs rename to rust/automerge-wasm/src/sync.rs diff --git a/crates/automerge-wasm/src/value.rs b/rust/automerge-wasm/src/value.rs similarity index 100% rename from crates/automerge-wasm/src/value.rs rename to rust/automerge-wasm/src/value.rs diff --git a/crates/automerge-wasm/test/apply.ts b/rust/automerge-wasm/test/apply.ts similarity index 100% rename from crates/automerge-wasm/test/apply.ts rename to rust/automerge-wasm/test/apply.ts diff --git a/crates/automerge-wasm/test/helpers/columnar.js b/rust/automerge-wasm/test/helpers/columnar.js similarity index 100% rename from crates/automerge-wasm/test/helpers/columnar.js rename to rust/automerge-wasm/test/helpers/columnar.js diff --git a/crates/automerge-wasm/test/helpers/common.js b/rust/automerge-wasm/test/helpers/common.js similarity index 100% rename from crates/automerge-wasm/test/helpers/common.js rename to rust/automerge-wasm/test/helpers/common.js diff --git a/crates/automerge-wasm/test/helpers/encoding.js b/rust/automerge-wasm/test/helpers/encoding.js similarity index 100% rename from crates/automerge-wasm/test/helpers/encoding.js rename to rust/automerge-wasm/test/helpers/encoding.js diff --git a/crates/automerge-wasm/test/helpers/sync.js b/rust/automerge-wasm/test/helpers/sync.js similarity index 100% rename from crates/automerge-wasm/test/helpers/sync.js rename to rust/automerge-wasm/test/helpers/sync.js diff --git a/crates/automerge-wasm/test/readme.ts b/rust/automerge-wasm/test/readme.ts similarity index 100% rename from crates/automerge-wasm/test/readme.ts rename to rust/automerge-wasm/test/readme.ts diff --git a/crates/automerge-wasm/test/test.ts b/rust/automerge-wasm/test/test.ts similarity index 100% rename from crates/automerge-wasm/test/test.ts rename to rust/automerge-wasm/test/test.ts diff --git a/crates/automerge-wasm/tsconfig.json b/rust/automerge-wasm/tsconfig.json similarity index 100% rename from crates/automerge-wasm/tsconfig.json rename to rust/automerge-wasm/tsconfig.json diff --git a/crates/automerge/.gitignore b/rust/automerge/.gitignore similarity index 100% rename from crates/automerge/.gitignore rename to rust/automerge/.gitignore diff --git a/crates/automerge/Cargo.toml b/rust/automerge/Cargo.toml similarity index 100% rename from crates/automerge/Cargo.toml rename to rust/automerge/Cargo.toml diff --git a/crates/automerge/benches/map.rs b/rust/automerge/benches/map.rs similarity index 100% rename from crates/automerge/benches/map.rs rename to rust/automerge/benches/map.rs diff --git a/crates/automerge/benches/range.rs b/rust/automerge/benches/range.rs similarity index 100% rename from crates/automerge/benches/range.rs rename to rust/automerge/benches/range.rs diff --git a/crates/automerge/benches/sync.rs b/rust/automerge/benches/sync.rs similarity index 100% rename from crates/automerge/benches/sync.rs rename to rust/automerge/benches/sync.rs diff --git a/crates/automerge/examples/README.md b/rust/automerge/examples/README.md similarity index 100% rename from crates/automerge/examples/README.md rename to rust/automerge/examples/README.md diff --git a/crates/automerge/examples/quickstart.rs b/rust/automerge/examples/quickstart.rs similarity index 100% rename from crates/automerge/examples/quickstart.rs rename to rust/automerge/examples/quickstart.rs diff --git a/crates/automerge/examples/watch.rs b/rust/automerge/examples/watch.rs similarity index 100% rename from crates/automerge/examples/watch.rs rename to rust/automerge/examples/watch.rs diff --git a/crates/automerge/src/autocommit.rs b/rust/automerge/src/autocommit.rs similarity index 100% rename from crates/automerge/src/autocommit.rs rename to rust/automerge/src/autocommit.rs diff --git a/crates/automerge/src/automerge.rs b/rust/automerge/src/automerge.rs similarity index 100% rename from crates/automerge/src/automerge.rs rename to rust/automerge/src/automerge.rs diff --git a/crates/automerge/src/automerge/tests.rs b/rust/automerge/src/automerge/tests.rs similarity index 100% rename from crates/automerge/src/automerge/tests.rs rename to rust/automerge/src/automerge/tests.rs diff --git a/crates/automerge/src/autoserde.rs b/rust/automerge/src/autoserde.rs similarity index 100% rename from crates/automerge/src/autoserde.rs rename to rust/automerge/src/autoserde.rs diff --git a/crates/automerge/src/change.rs b/rust/automerge/src/change.rs similarity index 100% rename from crates/automerge/src/change.rs rename to rust/automerge/src/change.rs diff --git a/crates/automerge/src/clock.rs b/rust/automerge/src/clock.rs similarity index 100% rename from crates/automerge/src/clock.rs rename to rust/automerge/src/clock.rs diff --git a/crates/automerge/src/clocks.rs b/rust/automerge/src/clocks.rs similarity index 100% rename from crates/automerge/src/clocks.rs rename to rust/automerge/src/clocks.rs diff --git a/crates/automerge/src/columnar.rs b/rust/automerge/src/columnar.rs similarity index 100% rename from crates/automerge/src/columnar.rs rename to rust/automerge/src/columnar.rs diff --git a/crates/automerge/src/columnar/column_range.rs b/rust/automerge/src/columnar/column_range.rs similarity index 100% rename from crates/automerge/src/columnar/column_range.rs rename to rust/automerge/src/columnar/column_range.rs diff --git a/crates/automerge/src/columnar/column_range/boolean.rs b/rust/automerge/src/columnar/column_range/boolean.rs similarity index 100% rename from crates/automerge/src/columnar/column_range/boolean.rs rename to rust/automerge/src/columnar/column_range/boolean.rs diff --git a/crates/automerge/src/columnar/column_range/delta.rs b/rust/automerge/src/columnar/column_range/delta.rs similarity index 100% rename from crates/automerge/src/columnar/column_range/delta.rs rename to rust/automerge/src/columnar/column_range/delta.rs diff --git a/crates/automerge/src/columnar/column_range/deps.rs b/rust/automerge/src/columnar/column_range/deps.rs similarity index 100% rename from crates/automerge/src/columnar/column_range/deps.rs rename to rust/automerge/src/columnar/column_range/deps.rs diff --git a/crates/automerge/src/columnar/column_range/generic.rs b/rust/automerge/src/columnar/column_range/generic.rs similarity index 100% rename from crates/automerge/src/columnar/column_range/generic.rs rename to rust/automerge/src/columnar/column_range/generic.rs diff --git a/crates/automerge/src/columnar/column_range/generic/group.rs b/rust/automerge/src/columnar/column_range/generic/group.rs similarity index 100% rename from crates/automerge/src/columnar/column_range/generic/group.rs rename to rust/automerge/src/columnar/column_range/generic/group.rs diff --git a/crates/automerge/src/columnar/column_range/generic/simple.rs b/rust/automerge/src/columnar/column_range/generic/simple.rs similarity index 100% rename from crates/automerge/src/columnar/column_range/generic/simple.rs rename to rust/automerge/src/columnar/column_range/generic/simple.rs diff --git a/crates/automerge/src/columnar/column_range/key.rs b/rust/automerge/src/columnar/column_range/key.rs similarity index 100% rename from crates/automerge/src/columnar/column_range/key.rs rename to rust/automerge/src/columnar/column_range/key.rs diff --git a/crates/automerge/src/columnar/column_range/obj_id.rs b/rust/automerge/src/columnar/column_range/obj_id.rs similarity index 100% rename from crates/automerge/src/columnar/column_range/obj_id.rs rename to rust/automerge/src/columnar/column_range/obj_id.rs diff --git a/crates/automerge/src/columnar/column_range/opid.rs b/rust/automerge/src/columnar/column_range/opid.rs similarity index 100% rename from crates/automerge/src/columnar/column_range/opid.rs rename to rust/automerge/src/columnar/column_range/opid.rs diff --git a/crates/automerge/src/columnar/column_range/opid_list.rs b/rust/automerge/src/columnar/column_range/opid_list.rs similarity index 100% rename from crates/automerge/src/columnar/column_range/opid_list.rs rename to rust/automerge/src/columnar/column_range/opid_list.rs diff --git a/crates/automerge/src/columnar/column_range/raw.rs b/rust/automerge/src/columnar/column_range/raw.rs similarity index 100% rename from crates/automerge/src/columnar/column_range/raw.rs rename to rust/automerge/src/columnar/column_range/raw.rs diff --git a/crates/automerge/src/columnar/column_range/rle.rs b/rust/automerge/src/columnar/column_range/rle.rs similarity index 100% rename from crates/automerge/src/columnar/column_range/rle.rs rename to rust/automerge/src/columnar/column_range/rle.rs diff --git a/crates/automerge/src/columnar/column_range/value.rs b/rust/automerge/src/columnar/column_range/value.rs similarity index 100% rename from crates/automerge/src/columnar/column_range/value.rs rename to rust/automerge/src/columnar/column_range/value.rs diff --git a/crates/automerge/src/columnar/encoding.rs b/rust/automerge/src/columnar/encoding.rs similarity index 100% rename from crates/automerge/src/columnar/encoding.rs rename to rust/automerge/src/columnar/encoding.rs diff --git a/crates/automerge/src/columnar/encoding/boolean.rs b/rust/automerge/src/columnar/encoding/boolean.rs similarity index 100% rename from crates/automerge/src/columnar/encoding/boolean.rs rename to rust/automerge/src/columnar/encoding/boolean.rs diff --git a/crates/automerge/src/columnar/encoding/col_error.rs b/rust/automerge/src/columnar/encoding/col_error.rs similarity index 100% rename from crates/automerge/src/columnar/encoding/col_error.rs rename to rust/automerge/src/columnar/encoding/col_error.rs diff --git a/crates/automerge/src/columnar/encoding/column_decoder.rs b/rust/automerge/src/columnar/encoding/column_decoder.rs similarity index 100% rename from crates/automerge/src/columnar/encoding/column_decoder.rs rename to rust/automerge/src/columnar/encoding/column_decoder.rs diff --git a/crates/automerge/src/columnar/encoding/decodable_impls.rs b/rust/automerge/src/columnar/encoding/decodable_impls.rs similarity index 100% rename from crates/automerge/src/columnar/encoding/decodable_impls.rs rename to rust/automerge/src/columnar/encoding/decodable_impls.rs diff --git a/crates/automerge/src/columnar/encoding/delta.rs b/rust/automerge/src/columnar/encoding/delta.rs similarity index 100% rename from crates/automerge/src/columnar/encoding/delta.rs rename to rust/automerge/src/columnar/encoding/delta.rs diff --git a/crates/automerge/src/columnar/encoding/encodable_impls.rs b/rust/automerge/src/columnar/encoding/encodable_impls.rs similarity index 100% rename from crates/automerge/src/columnar/encoding/encodable_impls.rs rename to rust/automerge/src/columnar/encoding/encodable_impls.rs diff --git a/crates/automerge/src/columnar/encoding/leb128.rs b/rust/automerge/src/columnar/encoding/leb128.rs similarity index 100% rename from crates/automerge/src/columnar/encoding/leb128.rs rename to rust/automerge/src/columnar/encoding/leb128.rs diff --git a/crates/automerge/src/columnar/encoding/properties.rs b/rust/automerge/src/columnar/encoding/properties.rs similarity index 100% rename from crates/automerge/src/columnar/encoding/properties.rs rename to rust/automerge/src/columnar/encoding/properties.rs diff --git a/crates/automerge/src/columnar/encoding/raw.rs b/rust/automerge/src/columnar/encoding/raw.rs similarity index 100% rename from crates/automerge/src/columnar/encoding/raw.rs rename to rust/automerge/src/columnar/encoding/raw.rs diff --git a/crates/automerge/src/columnar/encoding/rle.rs b/rust/automerge/src/columnar/encoding/rle.rs similarity index 100% rename from crates/automerge/src/columnar/encoding/rle.rs rename to rust/automerge/src/columnar/encoding/rle.rs diff --git a/crates/automerge/src/columnar/splice_error.rs b/rust/automerge/src/columnar/splice_error.rs similarity index 100% rename from crates/automerge/src/columnar/splice_error.rs rename to rust/automerge/src/columnar/splice_error.rs diff --git a/crates/automerge/src/convert.rs b/rust/automerge/src/convert.rs similarity index 100% rename from crates/automerge/src/convert.rs rename to rust/automerge/src/convert.rs diff --git a/crates/automerge/src/decoding.rs b/rust/automerge/src/decoding.rs similarity index 100% rename from crates/automerge/src/decoding.rs rename to rust/automerge/src/decoding.rs diff --git a/crates/automerge/src/error.rs b/rust/automerge/src/error.rs similarity index 100% rename from crates/automerge/src/error.rs rename to rust/automerge/src/error.rs diff --git a/crates/automerge/src/exid.rs b/rust/automerge/src/exid.rs similarity index 100% rename from crates/automerge/src/exid.rs rename to rust/automerge/src/exid.rs diff --git a/crates/automerge/src/indexed_cache.rs b/rust/automerge/src/indexed_cache.rs similarity index 100% rename from crates/automerge/src/indexed_cache.rs rename to rust/automerge/src/indexed_cache.rs diff --git a/crates/automerge/src/keys.rs b/rust/automerge/src/keys.rs similarity index 100% rename from crates/automerge/src/keys.rs rename to rust/automerge/src/keys.rs diff --git a/crates/automerge/src/keys_at.rs b/rust/automerge/src/keys_at.rs similarity index 100% rename from crates/automerge/src/keys_at.rs rename to rust/automerge/src/keys_at.rs diff --git a/crates/automerge/src/legacy/mod.rs b/rust/automerge/src/legacy/mod.rs similarity index 100% rename from crates/automerge/src/legacy/mod.rs rename to rust/automerge/src/legacy/mod.rs diff --git a/crates/automerge/src/legacy/serde_impls/actor_id.rs b/rust/automerge/src/legacy/serde_impls/actor_id.rs similarity index 100% rename from crates/automerge/src/legacy/serde_impls/actor_id.rs rename to rust/automerge/src/legacy/serde_impls/actor_id.rs diff --git a/crates/automerge/src/legacy/serde_impls/change_hash.rs b/rust/automerge/src/legacy/serde_impls/change_hash.rs similarity index 100% rename from crates/automerge/src/legacy/serde_impls/change_hash.rs rename to rust/automerge/src/legacy/serde_impls/change_hash.rs diff --git a/crates/automerge/src/legacy/serde_impls/element_id.rs b/rust/automerge/src/legacy/serde_impls/element_id.rs similarity index 100% rename from crates/automerge/src/legacy/serde_impls/element_id.rs rename to rust/automerge/src/legacy/serde_impls/element_id.rs diff --git a/crates/automerge/src/legacy/serde_impls/mod.rs b/rust/automerge/src/legacy/serde_impls/mod.rs similarity index 100% rename from crates/automerge/src/legacy/serde_impls/mod.rs rename to rust/automerge/src/legacy/serde_impls/mod.rs diff --git a/crates/automerge/src/legacy/serde_impls/object_id.rs b/rust/automerge/src/legacy/serde_impls/object_id.rs similarity index 100% rename from crates/automerge/src/legacy/serde_impls/object_id.rs rename to rust/automerge/src/legacy/serde_impls/object_id.rs diff --git a/crates/automerge/src/legacy/serde_impls/op.rs b/rust/automerge/src/legacy/serde_impls/op.rs similarity index 100% rename from crates/automerge/src/legacy/serde_impls/op.rs rename to rust/automerge/src/legacy/serde_impls/op.rs diff --git a/crates/automerge/src/legacy/serde_impls/op_type.rs b/rust/automerge/src/legacy/serde_impls/op_type.rs similarity index 100% rename from crates/automerge/src/legacy/serde_impls/op_type.rs rename to rust/automerge/src/legacy/serde_impls/op_type.rs diff --git a/crates/automerge/src/legacy/serde_impls/opid.rs b/rust/automerge/src/legacy/serde_impls/opid.rs similarity index 100% rename from crates/automerge/src/legacy/serde_impls/opid.rs rename to rust/automerge/src/legacy/serde_impls/opid.rs diff --git a/crates/automerge/src/legacy/serde_impls/scalar_value.rs b/rust/automerge/src/legacy/serde_impls/scalar_value.rs similarity index 100% rename from crates/automerge/src/legacy/serde_impls/scalar_value.rs rename to rust/automerge/src/legacy/serde_impls/scalar_value.rs diff --git a/crates/automerge/src/legacy/utility_impls/element_id.rs b/rust/automerge/src/legacy/utility_impls/element_id.rs similarity index 100% rename from crates/automerge/src/legacy/utility_impls/element_id.rs rename to rust/automerge/src/legacy/utility_impls/element_id.rs diff --git a/crates/automerge/src/legacy/utility_impls/key.rs b/rust/automerge/src/legacy/utility_impls/key.rs similarity index 100% rename from crates/automerge/src/legacy/utility_impls/key.rs rename to rust/automerge/src/legacy/utility_impls/key.rs diff --git a/crates/automerge/src/legacy/utility_impls/mod.rs b/rust/automerge/src/legacy/utility_impls/mod.rs similarity index 100% rename from crates/automerge/src/legacy/utility_impls/mod.rs rename to rust/automerge/src/legacy/utility_impls/mod.rs diff --git a/crates/automerge/src/legacy/utility_impls/object_id.rs b/rust/automerge/src/legacy/utility_impls/object_id.rs similarity index 100% rename from crates/automerge/src/legacy/utility_impls/object_id.rs rename to rust/automerge/src/legacy/utility_impls/object_id.rs diff --git a/crates/automerge/src/legacy/utility_impls/opid.rs b/rust/automerge/src/legacy/utility_impls/opid.rs similarity index 100% rename from crates/automerge/src/legacy/utility_impls/opid.rs rename to rust/automerge/src/legacy/utility_impls/opid.rs diff --git a/crates/automerge/src/lib.rs b/rust/automerge/src/lib.rs similarity index 100% rename from crates/automerge/src/lib.rs rename to rust/automerge/src/lib.rs diff --git a/crates/automerge/src/list_range.rs b/rust/automerge/src/list_range.rs similarity index 100% rename from crates/automerge/src/list_range.rs rename to rust/automerge/src/list_range.rs diff --git a/crates/automerge/src/list_range_at.rs b/rust/automerge/src/list_range_at.rs similarity index 100% rename from crates/automerge/src/list_range_at.rs rename to rust/automerge/src/list_range_at.rs diff --git a/crates/automerge/src/map_range.rs b/rust/automerge/src/map_range.rs similarity index 100% rename from crates/automerge/src/map_range.rs rename to rust/automerge/src/map_range.rs diff --git a/crates/automerge/src/map_range_at.rs b/rust/automerge/src/map_range_at.rs similarity index 100% rename from crates/automerge/src/map_range_at.rs rename to rust/automerge/src/map_range_at.rs diff --git a/crates/automerge/src/op_observer.rs b/rust/automerge/src/op_observer.rs similarity index 100% rename from crates/automerge/src/op_observer.rs rename to rust/automerge/src/op_observer.rs diff --git a/crates/automerge/src/op_set.rs b/rust/automerge/src/op_set.rs similarity index 100% rename from crates/automerge/src/op_set.rs rename to rust/automerge/src/op_set.rs diff --git a/crates/automerge/src/op_set/load.rs b/rust/automerge/src/op_set/load.rs similarity index 100% rename from crates/automerge/src/op_set/load.rs rename to rust/automerge/src/op_set/load.rs diff --git a/crates/automerge/src/op_tree.rs b/rust/automerge/src/op_tree.rs similarity index 100% rename from crates/automerge/src/op_tree.rs rename to rust/automerge/src/op_tree.rs diff --git a/crates/automerge/src/op_tree/iter.rs b/rust/automerge/src/op_tree/iter.rs similarity index 100% rename from crates/automerge/src/op_tree/iter.rs rename to rust/automerge/src/op_tree/iter.rs diff --git a/crates/automerge/src/parents.rs b/rust/automerge/src/parents.rs similarity index 100% rename from crates/automerge/src/parents.rs rename to rust/automerge/src/parents.rs diff --git a/crates/automerge/src/query.rs b/rust/automerge/src/query.rs similarity index 100% rename from crates/automerge/src/query.rs rename to rust/automerge/src/query.rs diff --git a/crates/automerge/src/query/elem_id_pos.rs b/rust/automerge/src/query/elem_id_pos.rs similarity index 100% rename from crates/automerge/src/query/elem_id_pos.rs rename to rust/automerge/src/query/elem_id_pos.rs diff --git a/crates/automerge/src/query/insert.rs b/rust/automerge/src/query/insert.rs similarity index 100% rename from crates/automerge/src/query/insert.rs rename to rust/automerge/src/query/insert.rs diff --git a/crates/automerge/src/query/keys.rs b/rust/automerge/src/query/keys.rs similarity index 100% rename from crates/automerge/src/query/keys.rs rename to rust/automerge/src/query/keys.rs diff --git a/crates/automerge/src/query/keys_at.rs b/rust/automerge/src/query/keys_at.rs similarity index 100% rename from crates/automerge/src/query/keys_at.rs rename to rust/automerge/src/query/keys_at.rs diff --git a/crates/automerge/src/query/len.rs b/rust/automerge/src/query/len.rs similarity index 100% rename from crates/automerge/src/query/len.rs rename to rust/automerge/src/query/len.rs diff --git a/crates/automerge/src/query/len_at.rs b/rust/automerge/src/query/len_at.rs similarity index 100% rename from crates/automerge/src/query/len_at.rs rename to rust/automerge/src/query/len_at.rs diff --git a/crates/automerge/src/query/list_range.rs b/rust/automerge/src/query/list_range.rs similarity index 100% rename from crates/automerge/src/query/list_range.rs rename to rust/automerge/src/query/list_range.rs diff --git a/crates/automerge/src/query/list_range_at.rs b/rust/automerge/src/query/list_range_at.rs similarity index 100% rename from crates/automerge/src/query/list_range_at.rs rename to rust/automerge/src/query/list_range_at.rs diff --git a/crates/automerge/src/query/list_vals.rs b/rust/automerge/src/query/list_vals.rs similarity index 100% rename from crates/automerge/src/query/list_vals.rs rename to rust/automerge/src/query/list_vals.rs diff --git a/crates/automerge/src/query/list_vals_at.rs b/rust/automerge/src/query/list_vals_at.rs similarity index 100% rename from crates/automerge/src/query/list_vals_at.rs rename to rust/automerge/src/query/list_vals_at.rs diff --git a/crates/automerge/src/query/map_range.rs b/rust/automerge/src/query/map_range.rs similarity index 100% rename from crates/automerge/src/query/map_range.rs rename to rust/automerge/src/query/map_range.rs diff --git a/crates/automerge/src/query/map_range_at.rs b/rust/automerge/src/query/map_range_at.rs similarity index 100% rename from crates/automerge/src/query/map_range_at.rs rename to rust/automerge/src/query/map_range_at.rs diff --git a/crates/automerge/src/query/nth.rs b/rust/automerge/src/query/nth.rs similarity index 100% rename from crates/automerge/src/query/nth.rs rename to rust/automerge/src/query/nth.rs diff --git a/crates/automerge/src/query/nth_at.rs b/rust/automerge/src/query/nth_at.rs similarity index 100% rename from crates/automerge/src/query/nth_at.rs rename to rust/automerge/src/query/nth_at.rs diff --git a/crates/automerge/src/query/opid.rs b/rust/automerge/src/query/opid.rs similarity index 100% rename from crates/automerge/src/query/opid.rs rename to rust/automerge/src/query/opid.rs diff --git a/crates/automerge/src/query/prop.rs b/rust/automerge/src/query/prop.rs similarity index 100% rename from crates/automerge/src/query/prop.rs rename to rust/automerge/src/query/prop.rs diff --git a/crates/automerge/src/query/prop_at.rs b/rust/automerge/src/query/prop_at.rs similarity index 100% rename from crates/automerge/src/query/prop_at.rs rename to rust/automerge/src/query/prop_at.rs diff --git a/crates/automerge/src/query/seek_op.rs b/rust/automerge/src/query/seek_op.rs similarity index 100% rename from crates/automerge/src/query/seek_op.rs rename to rust/automerge/src/query/seek_op.rs diff --git a/crates/automerge/src/query/seek_op_with_patch.rs b/rust/automerge/src/query/seek_op_with_patch.rs similarity index 100% rename from crates/automerge/src/query/seek_op_with_patch.rs rename to rust/automerge/src/query/seek_op_with_patch.rs diff --git a/crates/automerge/src/sequence_tree.rs b/rust/automerge/src/sequence_tree.rs similarity index 100% rename from crates/automerge/src/sequence_tree.rs rename to rust/automerge/src/sequence_tree.rs diff --git a/crates/automerge/src/storage.rs b/rust/automerge/src/storage.rs similarity index 100% rename from crates/automerge/src/storage.rs rename to rust/automerge/src/storage.rs diff --git a/crates/automerge/src/storage/change.rs b/rust/automerge/src/storage/change.rs similarity index 100% rename from crates/automerge/src/storage/change.rs rename to rust/automerge/src/storage/change.rs diff --git a/crates/automerge/src/storage/change/change_actors.rs b/rust/automerge/src/storage/change/change_actors.rs similarity index 100% rename from crates/automerge/src/storage/change/change_actors.rs rename to rust/automerge/src/storage/change/change_actors.rs diff --git a/crates/automerge/src/storage/change/change_op_columns.rs b/rust/automerge/src/storage/change/change_op_columns.rs similarity index 100% rename from crates/automerge/src/storage/change/change_op_columns.rs rename to rust/automerge/src/storage/change/change_op_columns.rs diff --git a/crates/automerge/src/storage/change/compressed.rs b/rust/automerge/src/storage/change/compressed.rs similarity index 100% rename from crates/automerge/src/storage/change/compressed.rs rename to rust/automerge/src/storage/change/compressed.rs diff --git a/crates/automerge/src/storage/change/op_with_change_actors.rs b/rust/automerge/src/storage/change/op_with_change_actors.rs similarity index 100% rename from crates/automerge/src/storage/change/op_with_change_actors.rs rename to rust/automerge/src/storage/change/op_with_change_actors.rs diff --git a/crates/automerge/src/storage/chunk.rs b/rust/automerge/src/storage/chunk.rs similarity index 100% rename from crates/automerge/src/storage/chunk.rs rename to rust/automerge/src/storage/chunk.rs diff --git a/crates/automerge/src/storage/columns.rs b/rust/automerge/src/storage/columns.rs similarity index 100% rename from crates/automerge/src/storage/columns.rs rename to rust/automerge/src/storage/columns.rs diff --git a/crates/automerge/src/storage/columns/column.rs b/rust/automerge/src/storage/columns/column.rs similarity index 100% rename from crates/automerge/src/storage/columns/column.rs rename to rust/automerge/src/storage/columns/column.rs diff --git a/crates/automerge/src/storage/columns/column_builder.rs b/rust/automerge/src/storage/columns/column_builder.rs similarity index 100% rename from crates/automerge/src/storage/columns/column_builder.rs rename to rust/automerge/src/storage/columns/column_builder.rs diff --git a/crates/automerge/src/storage/columns/column_specification.rs b/rust/automerge/src/storage/columns/column_specification.rs similarity index 100% rename from crates/automerge/src/storage/columns/column_specification.rs rename to rust/automerge/src/storage/columns/column_specification.rs diff --git a/crates/automerge/src/storage/columns/raw_column.rs b/rust/automerge/src/storage/columns/raw_column.rs similarity index 100% rename from crates/automerge/src/storage/columns/raw_column.rs rename to rust/automerge/src/storage/columns/raw_column.rs diff --git a/crates/automerge/src/storage/convert.rs b/rust/automerge/src/storage/convert.rs similarity index 100% rename from crates/automerge/src/storage/convert.rs rename to rust/automerge/src/storage/convert.rs diff --git a/crates/automerge/src/storage/convert/op_as_changeop.rs b/rust/automerge/src/storage/convert/op_as_changeop.rs similarity index 100% rename from crates/automerge/src/storage/convert/op_as_changeop.rs rename to rust/automerge/src/storage/convert/op_as_changeop.rs diff --git a/crates/automerge/src/storage/convert/op_as_docop.rs b/rust/automerge/src/storage/convert/op_as_docop.rs similarity index 100% rename from crates/automerge/src/storage/convert/op_as_docop.rs rename to rust/automerge/src/storage/convert/op_as_docop.rs diff --git a/crates/automerge/src/storage/document.rs b/rust/automerge/src/storage/document.rs similarity index 100% rename from crates/automerge/src/storage/document.rs rename to rust/automerge/src/storage/document.rs diff --git a/crates/automerge/src/storage/document/compression.rs b/rust/automerge/src/storage/document/compression.rs similarity index 100% rename from crates/automerge/src/storage/document/compression.rs rename to rust/automerge/src/storage/document/compression.rs diff --git a/crates/automerge/src/storage/document/doc_change_columns.rs b/rust/automerge/src/storage/document/doc_change_columns.rs similarity index 100% rename from crates/automerge/src/storage/document/doc_change_columns.rs rename to rust/automerge/src/storage/document/doc_change_columns.rs diff --git a/crates/automerge/src/storage/document/doc_op_columns.rs b/rust/automerge/src/storage/document/doc_op_columns.rs similarity index 100% rename from crates/automerge/src/storage/document/doc_op_columns.rs rename to rust/automerge/src/storage/document/doc_op_columns.rs diff --git a/crates/automerge/src/storage/load.rs b/rust/automerge/src/storage/load.rs similarity index 100% rename from crates/automerge/src/storage/load.rs rename to rust/automerge/src/storage/load.rs diff --git a/crates/automerge/src/storage/load/change_collector.rs b/rust/automerge/src/storage/load/change_collector.rs similarity index 100% rename from crates/automerge/src/storage/load/change_collector.rs rename to rust/automerge/src/storage/load/change_collector.rs diff --git a/crates/automerge/src/storage/load/reconstruct_document.rs b/rust/automerge/src/storage/load/reconstruct_document.rs similarity index 100% rename from crates/automerge/src/storage/load/reconstruct_document.rs rename to rust/automerge/src/storage/load/reconstruct_document.rs diff --git a/crates/automerge/src/storage/parse.rs b/rust/automerge/src/storage/parse.rs similarity index 100% rename from crates/automerge/src/storage/parse.rs rename to rust/automerge/src/storage/parse.rs diff --git a/crates/automerge/src/storage/parse/leb128.rs b/rust/automerge/src/storage/parse/leb128.rs similarity index 100% rename from crates/automerge/src/storage/parse/leb128.rs rename to rust/automerge/src/storage/parse/leb128.rs diff --git a/crates/automerge/src/storage/save.rs b/rust/automerge/src/storage/save.rs similarity index 100% rename from crates/automerge/src/storage/save.rs rename to rust/automerge/src/storage/save.rs diff --git a/crates/automerge/src/storage/save/document.rs b/rust/automerge/src/storage/save/document.rs similarity index 100% rename from crates/automerge/src/storage/save/document.rs rename to rust/automerge/src/storage/save/document.rs diff --git a/crates/automerge/src/sync.rs b/rust/automerge/src/sync.rs similarity index 100% rename from crates/automerge/src/sync.rs rename to rust/automerge/src/sync.rs diff --git a/crates/automerge/src/sync/bloom.rs b/rust/automerge/src/sync/bloom.rs similarity index 100% rename from crates/automerge/src/sync/bloom.rs rename to rust/automerge/src/sync/bloom.rs diff --git a/crates/automerge/src/sync/state.rs b/rust/automerge/src/sync/state.rs similarity index 100% rename from crates/automerge/src/sync/state.rs rename to rust/automerge/src/sync/state.rs diff --git a/crates/automerge/src/transaction.rs b/rust/automerge/src/transaction.rs similarity index 100% rename from crates/automerge/src/transaction.rs rename to rust/automerge/src/transaction.rs diff --git a/crates/automerge/src/transaction/commit.rs b/rust/automerge/src/transaction/commit.rs similarity index 100% rename from crates/automerge/src/transaction/commit.rs rename to rust/automerge/src/transaction/commit.rs diff --git a/crates/automerge/src/transaction/inner.rs b/rust/automerge/src/transaction/inner.rs similarity index 100% rename from crates/automerge/src/transaction/inner.rs rename to rust/automerge/src/transaction/inner.rs diff --git a/crates/automerge/src/transaction/manual_transaction.rs b/rust/automerge/src/transaction/manual_transaction.rs similarity index 100% rename from crates/automerge/src/transaction/manual_transaction.rs rename to rust/automerge/src/transaction/manual_transaction.rs diff --git a/crates/automerge/src/transaction/observation.rs b/rust/automerge/src/transaction/observation.rs similarity index 100% rename from crates/automerge/src/transaction/observation.rs rename to rust/automerge/src/transaction/observation.rs diff --git a/crates/automerge/src/transaction/result.rs b/rust/automerge/src/transaction/result.rs similarity index 100% rename from crates/automerge/src/transaction/result.rs rename to rust/automerge/src/transaction/result.rs diff --git a/crates/automerge/src/transaction/transactable.rs b/rust/automerge/src/transaction/transactable.rs similarity index 100% rename from crates/automerge/src/transaction/transactable.rs rename to rust/automerge/src/transaction/transactable.rs diff --git a/crates/automerge/src/types.rs b/rust/automerge/src/types.rs similarity index 100% rename from crates/automerge/src/types.rs rename to rust/automerge/src/types.rs diff --git a/crates/automerge/src/types/opids.rs b/rust/automerge/src/types/opids.rs similarity index 100% rename from crates/automerge/src/types/opids.rs rename to rust/automerge/src/types/opids.rs diff --git a/crates/automerge/src/value.rs b/rust/automerge/src/value.rs similarity index 100% rename from crates/automerge/src/value.rs rename to rust/automerge/src/value.rs diff --git a/crates/automerge/src/values.rs b/rust/automerge/src/values.rs similarity index 100% rename from crates/automerge/src/values.rs rename to rust/automerge/src/values.rs diff --git a/crates/automerge/src/visualisation.rs b/rust/automerge/src/visualisation.rs similarity index 100% rename from crates/automerge/src/visualisation.rs rename to rust/automerge/src/visualisation.rs diff --git a/crates/automerge/tests/helpers/mod.rs b/rust/automerge/tests/helpers/mod.rs similarity index 100% rename from crates/automerge/tests/helpers/mod.rs rename to rust/automerge/tests/helpers/mod.rs diff --git a/crates/automerge/tests/test.rs b/rust/automerge/tests/test.rs similarity index 100% rename from crates/automerge/tests/test.rs rename to rust/automerge/tests/test.rs diff --git a/deny.toml b/rust/deny.toml similarity index 100% rename from deny.toml rename to rust/deny.toml diff --git a/crates/edit-trace/.gitignore b/rust/edit-trace/.gitignore similarity index 100% rename from crates/edit-trace/.gitignore rename to rust/edit-trace/.gitignore diff --git a/crates/edit-trace/Cargo.toml b/rust/edit-trace/Cargo.toml similarity index 100% rename from crates/edit-trace/Cargo.toml rename to rust/edit-trace/Cargo.toml diff --git a/crates/edit-trace/Makefile b/rust/edit-trace/Makefile similarity index 100% rename from crates/edit-trace/Makefile rename to rust/edit-trace/Makefile diff --git a/crates/edit-trace/README.md b/rust/edit-trace/README.md similarity index 100% rename from crates/edit-trace/README.md rename to rust/edit-trace/README.md diff --git a/crates/edit-trace/automerge-1.0.js b/rust/edit-trace/automerge-1.0.js similarity index 100% rename from crates/edit-trace/automerge-1.0.js rename to rust/edit-trace/automerge-1.0.js diff --git a/crates/edit-trace/automerge-js.js b/rust/edit-trace/automerge-js.js similarity index 100% rename from crates/edit-trace/automerge-js.js rename to rust/edit-trace/automerge-js.js diff --git a/crates/edit-trace/automerge-rs.js b/rust/edit-trace/automerge-rs.js similarity index 100% rename from crates/edit-trace/automerge-rs.js rename to rust/edit-trace/automerge-rs.js diff --git a/crates/edit-trace/automerge-wasm.js b/rust/edit-trace/automerge-wasm.js similarity index 100% rename from crates/edit-trace/automerge-wasm.js rename to rust/edit-trace/automerge-wasm.js diff --git a/crates/edit-trace/baseline.js b/rust/edit-trace/baseline.js similarity index 100% rename from crates/edit-trace/baseline.js rename to rust/edit-trace/baseline.js diff --git a/crates/edit-trace/benches/main.rs b/rust/edit-trace/benches/main.rs similarity index 100% rename from crates/edit-trace/benches/main.rs rename to rust/edit-trace/benches/main.rs diff --git a/crates/edit-trace/editing-trace.js b/rust/edit-trace/editing-trace.js similarity index 100% rename from crates/edit-trace/editing-trace.js rename to rust/edit-trace/editing-trace.js diff --git a/crates/edit-trace/edits.json b/rust/edit-trace/edits.json similarity index 100% rename from crates/edit-trace/edits.json rename to rust/edit-trace/edits.json diff --git a/crates/edit-trace/package.json b/rust/edit-trace/package.json similarity index 100% rename from crates/edit-trace/package.json rename to rust/edit-trace/package.json diff --git a/crates/edit-trace/src/main.rs b/rust/edit-trace/src/main.rs similarity index 100% rename from crates/edit-trace/src/main.rs rename to rust/edit-trace/src/main.rs diff --git a/scripts/ci/advisory b/scripts/ci/advisory index 07e8c72e..6da4a578 100755 --- a/scripts/ci/advisory +++ b/scripts/ci/advisory @@ -1,6 +1,7 @@ #!/usr/bin/env bash set -eoux pipefail +cd rust cargo deny --version cargo deny check advisories cargo deny check licenses diff --git a/scripts/ci/build-test b/scripts/ci/build-test index dbd89f5d..de592f7e 100755 --- a/scripts/ci/build-test +++ b/scripts/ci/build-test @@ -1,6 +1,7 @@ #!/usr/bin/env bash set -eoux pipefail +cd rust cargo build --workspace --all-features RUST_LOG=error cargo test --workspace --all-features diff --git a/scripts/ci/cmake-build b/scripts/ci/cmake-build index 1234993c..e36513a2 100755 --- a/scripts/ci/cmake-build +++ b/scripts/ci/cmake-build @@ -11,7 +11,7 @@ if [ "${LIB_TYPE,,}" == "shared" ]; then else SHARED_TOGGLE="OFF" fi -C_PROJECT=$THIS_SCRIPT/../../crates/automerge-c; +C_PROJECT=$THIS_SCRIPT/../../rust/automerge-c; mkdir -p $C_PROJECT/build; cd $C_PROJECT/build; cmake --log-level=ERROR -B . -S .. -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DBUILD_SHARED_LIBS=$SHARED_TOGGLE; diff --git a/scripts/ci/cmake-docs b/scripts/ci/cmake-docs index 25ec7e10..f1dc1929 100755 --- a/scripts/ci/cmake-docs +++ b/scripts/ci/cmake-docs @@ -3,7 +3,7 @@ set -eoux pipefail mkdir -p crates/automerge-c/build -cd crates/automerge-c/build +cd rust/automerge-c/build cmake -B . -S .. -DBUILD_TESTING=OFF cmake --build . --target automerge_docs diff --git a/scripts/ci/fmt b/scripts/ci/fmt index d3d7e28c..27235f92 100755 --- a/scripts/ci/fmt +++ b/scripts/ci/fmt @@ -1,4 +1,5 @@ #!/usr/bin/env bash set -eoux pipefail +cd rust cargo fmt -- --check diff --git a/scripts/ci/js_tests b/scripts/ci/js_tests index ef169d0c..7455502a 100755 --- a/scripts/ci/js_tests +++ b/scripts/ci/js_tests @@ -1,7 +1,7 @@ set -e THIS_SCRIPT=$(dirname "$0"); -WASM_PROJECT=$THIS_SCRIPT/../../crates/automerge-wasm; +WASM_PROJECT=$THIS_SCRIPT/../../rust/automerge-wasm; JS_PROJECT=$THIS_SCRIPT/../../wrappers/javascript; E2E_PROJECT=$THIS_SCRIPT/../../wrappers/javascript/e2e; diff --git a/scripts/ci/lint b/scripts/ci/lint index 163b245d..15a0228d 100755 --- a/scripts/ci/lint +++ b/scripts/ci/lint @@ -1,6 +1,7 @@ #!/usr/bin/env bash set -eoux pipefail +cd rust # Force clippy to consider all local sources # https://github.com/rust-lang/rust-clippy/issues/4612 find . -name "*.rs" -not -path "./target/*" -exec touch "{}" + diff --git a/scripts/ci/rust-docs b/scripts/ci/rust-docs index 647880ce..bbbc4fe1 100755 --- a/scripts/ci/rust-docs +++ b/scripts/ci/rust-docs @@ -1,5 +1,6 @@ #!/usr/bin/env bash set -eoux pipefail +cd rust RUSTDOCFLAGS="-D rustdoc::broken-intra-doc-links -D warnings" \ cargo doc --no-deps --workspace --document-private-items diff --git a/scripts/ci/wasm_tests b/scripts/ci/wasm_tests index 51f4c4ab..2f273d99 100755 --- a/scripts/ci/wasm_tests +++ b/scripts/ci/wasm_tests @@ -1,5 +1,5 @@ THIS_SCRIPT=$(dirname "$0"); -WASM_PROJECT=$THIS_SCRIPT/../../crates/automerge-wasm; +WASM_PROJECT=$THIS_SCRIPT/../../rust/automerge-wasm; yarn --cwd $WASM_PROJECT install; yarn --cwd $WASM_PROJECT build; diff --git a/wrappers/javascript/e2e/index.ts b/wrappers/javascript/e2e/index.ts index 641ec2bd..3a81b509 100644 --- a/wrappers/javascript/e2e/index.ts +++ b/wrappers/javascript/e2e/index.ts @@ -9,7 +9,7 @@ import fetch from "node-fetch" const VERDACCIO_DB_PATH = path.normalize(`${__dirname}/verdacciodb`) const VERDACCIO_CONFIG_PATH = path.normalize(`${__dirname}/verdaccio.yaml`) -const AUTOMERGE_WASM_PATH = path.normalize(`${__dirname}/../../../crates/automerge-wasm`) +const AUTOMERGE_WASM_PATH = path.normalize(`${__dirname}/../../../rust/automerge-wasm`) const AUTOMERGE_JS_PATH = path.normalize(`${__dirname}/..`) const EXAMPLES_DIR = path.normalize(path.join(__dirname, "../", "examples")) From 8e131922e7b65794ec63312e5727974203cb055c Mon Sep 17 00:00:00 2001 From: Alex Good Date: Thu, 13 Oct 2022 22:05:50 +0100 Subject: [PATCH 604/730] Move wrappers/javascript -> javascript Continuing our theme of treating all languages equally, move wrappers/javascript to javascrpit. Automerge libraries for new languages should be built at this top level if possible. --- .../javascript => javascript}/.eslintignore | 0 .../javascript => javascript}/.eslintrc.cjs | 0 .../javascript => javascript}/.gitignore | 0 {wrappers/javascript => javascript}/LICENSE | 0 {wrappers/javascript => javascript}/README.md | 0 .../javascript => javascript}/config/cjs.json | 0 .../javascript => javascript}/config/mjs.json | 0 .../javascript => javascript}/e2e/.gitignore | 0 .../javascript => javascript}/e2e/README.md | 0 .../javascript => javascript}/e2e/index.ts | 2 +- .../e2e/package.json | 0 .../e2e/tsconfig.json | 0 .../e2e/verdaccio.yaml | 0 .../javascript => javascript}/e2e/yarn.lock | 0 .../examples/create-react-app/.gitignore | 0 .../examples/create-react-app/README.md | 0 .../examples/create-react-app/craco.config.js | 0 .../examples/create-react-app/package.json | 0 .../create-react-app/public/favicon.ico | Bin .../create-react-app/public/index.html | 0 .../create-react-app/public/logo192.png | Bin .../create-react-app/public/logo512.png | Bin .../create-react-app/public/manifest.json | 0 .../create-react-app/public/robots.txt | 0 .../examples/create-react-app/src/App.css | 0 .../examples/create-react-app/src/App.js | 0 .../examples/create-react-app/src/App.test.js | 0 .../examples/create-react-app/src/index.css | 0 .../examples/create-react-app/src/index.js | 0 .../examples/create-react-app/src/logo.svg | 0 .../create-react-app/src/reportWebVitals.js | 0 .../create-react-app/src/setupTests.js | 0 .../examples/create-react-app/yarn.lock | 9120 +++++++++++++++++ .../examples/vite/.gitignore | 0 .../examples/vite/README.md | 0 .../examples/vite/index.html | 0 .../examples/vite/main.ts | 0 .../examples/vite/package.json | 0 .../examples/vite/public/vite.svg | 0 .../examples/vite/src/counter.ts | 0 .../examples/vite/src/main.ts | 0 .../examples/vite/src/style.css | 0 .../examples/vite/src/typescript.svg | 0 .../examples/vite/src/vite-env.d.ts | 0 .../examples/vite/tsconfig.json | 0 .../examples/vite/vite.config.js | 0 .../examples/webpack/.gitignore | 0 .../examples/webpack/README.md | 0 .../examples/webpack/package.json | 0 .../examples/webpack/public/index.html | 0 .../examples/webpack/src/index.js | 0 .../examples/webpack/webpack.config.js | 0 .../javascript => javascript}/package.json | 0 .../src/constants.ts | 0 .../javascript => javascript}/src/counter.ts | 0 .../javascript => javascript}/src/index.ts | 0 .../src/low_level.ts | 0 .../javascript => javascript}/src/numbers.ts | 0 .../javascript => javascript}/src/proxies.ts | 0 .../javascript => javascript}/src/text.ts | 0 .../javascript => javascript}/src/types.ts | 0 .../javascript => javascript}/src/uuid.ts | 0 .../test/basic_test.ts | 0 .../test/columnar_test.ts | 0 .../test/extra_api_tests.ts | 0 .../javascript => javascript}/test/helpers.ts | 0 .../test/legacy/columnar.js | 0 .../test/legacy/common.js | 0 .../test/legacy/encoding.js | 0 .../test/legacy/sync.js | 0 .../test/legacy_tests.ts | 0 .../test/sync_test.ts | 0 .../test/text_test.ts | 0 .../test/uuid_test.ts | 0 .../javascript => javascript}/tsconfig.json | 0 .../javascript => javascript}/tslint.json | 0 scripts/ci/js_tests | 6 +- 77 files changed, 9124 insertions(+), 4 deletions(-) rename {wrappers/javascript => javascript}/.eslintignore (100%) rename {wrappers/javascript => javascript}/.eslintrc.cjs (100%) rename {wrappers/javascript => javascript}/.gitignore (100%) rename {wrappers/javascript => javascript}/LICENSE (100%) rename {wrappers/javascript => javascript}/README.md (100%) rename {wrappers/javascript => javascript}/config/cjs.json (100%) rename {wrappers/javascript => javascript}/config/mjs.json (100%) rename {wrappers/javascript => javascript}/e2e/.gitignore (100%) rename {wrappers/javascript => javascript}/e2e/README.md (100%) rename {wrappers/javascript => javascript}/e2e/index.ts (99%) rename {wrappers/javascript => javascript}/e2e/package.json (100%) rename {wrappers/javascript => javascript}/e2e/tsconfig.json (100%) rename {wrappers/javascript => javascript}/e2e/verdaccio.yaml (100%) rename {wrappers/javascript => javascript}/e2e/yarn.lock (100%) rename {wrappers/javascript => javascript}/examples/create-react-app/.gitignore (100%) rename {wrappers/javascript => javascript}/examples/create-react-app/README.md (100%) rename {wrappers/javascript => javascript}/examples/create-react-app/craco.config.js (100%) rename {wrappers/javascript => javascript}/examples/create-react-app/package.json (100%) rename {wrappers/javascript => javascript}/examples/create-react-app/public/favicon.ico (100%) rename {wrappers/javascript => javascript}/examples/create-react-app/public/index.html (100%) rename {wrappers/javascript => javascript}/examples/create-react-app/public/logo192.png (100%) rename {wrappers/javascript => javascript}/examples/create-react-app/public/logo512.png (100%) rename {wrappers/javascript => javascript}/examples/create-react-app/public/manifest.json (100%) rename {wrappers/javascript => javascript}/examples/create-react-app/public/robots.txt (100%) rename {wrappers/javascript => javascript}/examples/create-react-app/src/App.css (100%) rename {wrappers/javascript => javascript}/examples/create-react-app/src/App.js (100%) rename {wrappers/javascript => javascript}/examples/create-react-app/src/App.test.js (100%) rename {wrappers/javascript => javascript}/examples/create-react-app/src/index.css (100%) rename {wrappers/javascript => javascript}/examples/create-react-app/src/index.js (100%) rename {wrappers/javascript => javascript}/examples/create-react-app/src/logo.svg (100%) rename {wrappers/javascript => javascript}/examples/create-react-app/src/reportWebVitals.js (100%) rename {wrappers/javascript => javascript}/examples/create-react-app/src/setupTests.js (100%) create mode 100644 javascript/examples/create-react-app/yarn.lock rename {wrappers/javascript => javascript}/examples/vite/.gitignore (100%) rename {wrappers/javascript => javascript}/examples/vite/README.md (100%) rename {wrappers/javascript => javascript}/examples/vite/index.html (100%) rename {wrappers/javascript => javascript}/examples/vite/main.ts (100%) rename {wrappers/javascript => javascript}/examples/vite/package.json (100%) rename {wrappers/javascript => javascript}/examples/vite/public/vite.svg (100%) rename {wrappers/javascript => javascript}/examples/vite/src/counter.ts (100%) rename {wrappers/javascript => javascript}/examples/vite/src/main.ts (100%) rename {wrappers/javascript => javascript}/examples/vite/src/style.css (100%) rename {wrappers/javascript => javascript}/examples/vite/src/typescript.svg (100%) rename {wrappers/javascript => javascript}/examples/vite/src/vite-env.d.ts (100%) rename {wrappers/javascript => javascript}/examples/vite/tsconfig.json (100%) rename {wrappers/javascript => javascript}/examples/vite/vite.config.js (100%) rename {wrappers/javascript => javascript}/examples/webpack/.gitignore (100%) rename {wrappers/javascript => javascript}/examples/webpack/README.md (100%) rename {wrappers/javascript => javascript}/examples/webpack/package.json (100%) rename {wrappers/javascript => javascript}/examples/webpack/public/index.html (100%) rename {wrappers/javascript => javascript}/examples/webpack/src/index.js (100%) rename {wrappers/javascript => javascript}/examples/webpack/webpack.config.js (100%) rename {wrappers/javascript => javascript}/package.json (100%) rename {wrappers/javascript => javascript}/src/constants.ts (100%) rename {wrappers/javascript => javascript}/src/counter.ts (100%) rename {wrappers/javascript => javascript}/src/index.ts (100%) rename {wrappers/javascript => javascript}/src/low_level.ts (100%) rename {wrappers/javascript => javascript}/src/numbers.ts (100%) rename {wrappers/javascript => javascript}/src/proxies.ts (100%) rename {wrappers/javascript => javascript}/src/text.ts (100%) rename {wrappers/javascript => javascript}/src/types.ts (100%) rename {wrappers/javascript => javascript}/src/uuid.ts (100%) rename {wrappers/javascript => javascript}/test/basic_test.ts (100%) rename {wrappers/javascript => javascript}/test/columnar_test.ts (100%) rename {wrappers/javascript => javascript}/test/extra_api_tests.ts (100%) rename {wrappers/javascript => javascript}/test/helpers.ts (100%) rename {wrappers/javascript => javascript}/test/legacy/columnar.js (100%) rename {wrappers/javascript => javascript}/test/legacy/common.js (100%) rename {wrappers/javascript => javascript}/test/legacy/encoding.js (100%) rename {wrappers/javascript => javascript}/test/legacy/sync.js (100%) rename {wrappers/javascript => javascript}/test/legacy_tests.ts (100%) rename {wrappers/javascript => javascript}/test/sync_test.ts (100%) rename {wrappers/javascript => javascript}/test/text_test.ts (100%) rename {wrappers/javascript => javascript}/test/uuid_test.ts (100%) rename {wrappers/javascript => javascript}/tsconfig.json (100%) rename {wrappers/javascript => javascript}/tslint.json (100%) diff --git a/wrappers/javascript/.eslintignore b/javascript/.eslintignore similarity index 100% rename from wrappers/javascript/.eslintignore rename to javascript/.eslintignore diff --git a/wrappers/javascript/.eslintrc.cjs b/javascript/.eslintrc.cjs similarity index 100% rename from wrappers/javascript/.eslintrc.cjs rename to javascript/.eslintrc.cjs diff --git a/wrappers/javascript/.gitignore b/javascript/.gitignore similarity index 100% rename from wrappers/javascript/.gitignore rename to javascript/.gitignore diff --git a/wrappers/javascript/LICENSE b/javascript/LICENSE similarity index 100% rename from wrappers/javascript/LICENSE rename to javascript/LICENSE diff --git a/wrappers/javascript/README.md b/javascript/README.md similarity index 100% rename from wrappers/javascript/README.md rename to javascript/README.md diff --git a/wrappers/javascript/config/cjs.json b/javascript/config/cjs.json similarity index 100% rename from wrappers/javascript/config/cjs.json rename to javascript/config/cjs.json diff --git a/wrappers/javascript/config/mjs.json b/javascript/config/mjs.json similarity index 100% rename from wrappers/javascript/config/mjs.json rename to javascript/config/mjs.json diff --git a/wrappers/javascript/e2e/.gitignore b/javascript/e2e/.gitignore similarity index 100% rename from wrappers/javascript/e2e/.gitignore rename to javascript/e2e/.gitignore diff --git a/wrappers/javascript/e2e/README.md b/javascript/e2e/README.md similarity index 100% rename from wrappers/javascript/e2e/README.md rename to javascript/e2e/README.md diff --git a/wrappers/javascript/e2e/index.ts b/javascript/e2e/index.ts similarity index 99% rename from wrappers/javascript/e2e/index.ts rename to javascript/e2e/index.ts index 3a81b509..828c0635 100644 --- a/wrappers/javascript/e2e/index.ts +++ b/javascript/e2e/index.ts @@ -9,7 +9,7 @@ import fetch from "node-fetch" const VERDACCIO_DB_PATH = path.normalize(`${__dirname}/verdacciodb`) const VERDACCIO_CONFIG_PATH = path.normalize(`${__dirname}/verdaccio.yaml`) -const AUTOMERGE_WASM_PATH = path.normalize(`${__dirname}/../../../rust/automerge-wasm`) +const AUTOMERGE_WASM_PATH = path.normalize(`${__dirname}/../../rust/automerge-wasm`) const AUTOMERGE_JS_PATH = path.normalize(`${__dirname}/..`) const EXAMPLES_DIR = path.normalize(path.join(__dirname, "../", "examples")) diff --git a/wrappers/javascript/e2e/package.json b/javascript/e2e/package.json similarity index 100% rename from wrappers/javascript/e2e/package.json rename to javascript/e2e/package.json diff --git a/wrappers/javascript/e2e/tsconfig.json b/javascript/e2e/tsconfig.json similarity index 100% rename from wrappers/javascript/e2e/tsconfig.json rename to javascript/e2e/tsconfig.json diff --git a/wrappers/javascript/e2e/verdaccio.yaml b/javascript/e2e/verdaccio.yaml similarity index 100% rename from wrappers/javascript/e2e/verdaccio.yaml rename to javascript/e2e/verdaccio.yaml diff --git a/wrappers/javascript/e2e/yarn.lock b/javascript/e2e/yarn.lock similarity index 100% rename from wrappers/javascript/e2e/yarn.lock rename to javascript/e2e/yarn.lock diff --git a/wrappers/javascript/examples/create-react-app/.gitignore b/javascript/examples/create-react-app/.gitignore similarity index 100% rename from wrappers/javascript/examples/create-react-app/.gitignore rename to javascript/examples/create-react-app/.gitignore diff --git a/wrappers/javascript/examples/create-react-app/README.md b/javascript/examples/create-react-app/README.md similarity index 100% rename from wrappers/javascript/examples/create-react-app/README.md rename to javascript/examples/create-react-app/README.md diff --git a/wrappers/javascript/examples/create-react-app/craco.config.js b/javascript/examples/create-react-app/craco.config.js similarity index 100% rename from wrappers/javascript/examples/create-react-app/craco.config.js rename to javascript/examples/create-react-app/craco.config.js diff --git a/wrappers/javascript/examples/create-react-app/package.json b/javascript/examples/create-react-app/package.json similarity index 100% rename from wrappers/javascript/examples/create-react-app/package.json rename to javascript/examples/create-react-app/package.json diff --git a/wrappers/javascript/examples/create-react-app/public/favicon.ico b/javascript/examples/create-react-app/public/favicon.ico similarity index 100% rename from wrappers/javascript/examples/create-react-app/public/favicon.ico rename to javascript/examples/create-react-app/public/favicon.ico diff --git a/wrappers/javascript/examples/create-react-app/public/index.html b/javascript/examples/create-react-app/public/index.html similarity index 100% rename from wrappers/javascript/examples/create-react-app/public/index.html rename to javascript/examples/create-react-app/public/index.html diff --git a/wrappers/javascript/examples/create-react-app/public/logo192.png b/javascript/examples/create-react-app/public/logo192.png similarity index 100% rename from wrappers/javascript/examples/create-react-app/public/logo192.png rename to javascript/examples/create-react-app/public/logo192.png diff --git a/wrappers/javascript/examples/create-react-app/public/logo512.png b/javascript/examples/create-react-app/public/logo512.png similarity index 100% rename from wrappers/javascript/examples/create-react-app/public/logo512.png rename to javascript/examples/create-react-app/public/logo512.png diff --git a/wrappers/javascript/examples/create-react-app/public/manifest.json b/javascript/examples/create-react-app/public/manifest.json similarity index 100% rename from wrappers/javascript/examples/create-react-app/public/manifest.json rename to javascript/examples/create-react-app/public/manifest.json diff --git a/wrappers/javascript/examples/create-react-app/public/robots.txt b/javascript/examples/create-react-app/public/robots.txt similarity index 100% rename from wrappers/javascript/examples/create-react-app/public/robots.txt rename to javascript/examples/create-react-app/public/robots.txt diff --git a/wrappers/javascript/examples/create-react-app/src/App.css b/javascript/examples/create-react-app/src/App.css similarity index 100% rename from wrappers/javascript/examples/create-react-app/src/App.css rename to javascript/examples/create-react-app/src/App.css diff --git a/wrappers/javascript/examples/create-react-app/src/App.js b/javascript/examples/create-react-app/src/App.js similarity index 100% rename from wrappers/javascript/examples/create-react-app/src/App.js rename to javascript/examples/create-react-app/src/App.js diff --git a/wrappers/javascript/examples/create-react-app/src/App.test.js b/javascript/examples/create-react-app/src/App.test.js similarity index 100% rename from wrappers/javascript/examples/create-react-app/src/App.test.js rename to javascript/examples/create-react-app/src/App.test.js diff --git a/wrappers/javascript/examples/create-react-app/src/index.css b/javascript/examples/create-react-app/src/index.css similarity index 100% rename from wrappers/javascript/examples/create-react-app/src/index.css rename to javascript/examples/create-react-app/src/index.css diff --git a/wrappers/javascript/examples/create-react-app/src/index.js b/javascript/examples/create-react-app/src/index.js similarity index 100% rename from wrappers/javascript/examples/create-react-app/src/index.js rename to javascript/examples/create-react-app/src/index.js diff --git a/wrappers/javascript/examples/create-react-app/src/logo.svg b/javascript/examples/create-react-app/src/logo.svg similarity index 100% rename from wrappers/javascript/examples/create-react-app/src/logo.svg rename to javascript/examples/create-react-app/src/logo.svg diff --git a/wrappers/javascript/examples/create-react-app/src/reportWebVitals.js b/javascript/examples/create-react-app/src/reportWebVitals.js similarity index 100% rename from wrappers/javascript/examples/create-react-app/src/reportWebVitals.js rename to javascript/examples/create-react-app/src/reportWebVitals.js diff --git a/wrappers/javascript/examples/create-react-app/src/setupTests.js b/javascript/examples/create-react-app/src/setupTests.js similarity index 100% rename from wrappers/javascript/examples/create-react-app/src/setupTests.js rename to javascript/examples/create-react-app/src/setupTests.js diff --git a/javascript/examples/create-react-app/yarn.lock b/javascript/examples/create-react-app/yarn.lock new file mode 100644 index 00000000..90a1592b --- /dev/null +++ b/javascript/examples/create-react-app/yarn.lock @@ -0,0 +1,9120 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +"@adobe/css-tools@^4.0.1": + version "4.0.1" + resolved "http://localhost:4873/@adobe%2fcss-tools/-/css-tools-4.0.1.tgz#b38b444ad3aa5fedbb15f2f746dcd934226a12dd" + integrity sha512-+u76oB43nOHrF4DDWRLWDCtci7f3QJoEBigemIdIeTi1ODqjx6Tad9NCVnPRwewWlKkVab5PlK8DCtPTyX7S8g== + +"@ampproject/remapping@^2.1.0": + version "2.2.0" + resolved "http://localhost:4873/@ampproject%2fremapping/-/remapping-2.2.0.tgz#56c133824780de3174aed5ab6834f3026790154d" + integrity sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w== + dependencies: + "@jridgewell/gen-mapping" "^0.1.0" + "@jridgewell/trace-mapping" "^0.3.9" + +"@apideck/better-ajv-errors@^0.3.1": + version "0.3.6" + resolved "http://localhost:4873/@apideck%2fbetter-ajv-errors/-/better-ajv-errors-0.3.6.tgz#957d4c28e886a64a8141f7522783be65733ff097" + integrity sha512-P+ZygBLZtkp0qqOAJJVX4oX/sFo5JR3eBWwwuqHHhK0GIgQOKWrAfiAaWX0aArHkRWHMuggFEgAZNxVPwPZYaA== + dependencies: + json-schema "^0.4.0" + jsonpointer "^5.0.0" + leven "^3.1.0" + +"@automerge/automerge-wasm@0.1.9": + version "0.1.9" + resolved "http://localhost:4873/@automerge%2fautomerge-wasm/-/automerge-wasm-0.1.9.tgz#b2def5e8b643f1802bc696843b7755dc444dc2eb" + integrity sha512-S+sjJUJ3aPn2F37vKYAzKxz8CDgbHpOOGVjKSgkLjkAqe1pQ+wp4BpiELXafX73w8DVIrGx1zzru4w3t+Eo8gw== + +"@automerge/automerge@2.0.0-alpha.4": + version "2.0.0-alpha.4" + resolved "http://localhost:4873/@automerge%2fautomerge/-/automerge-2.0.0-alpha.4.tgz#df406f5364960a4d21040044da55ebd47406ea3a" + integrity sha512-PVRD1dmLy0U4GttyMvlWr99wyr6xvskJbOkxJDHnp+W2VAFfcqa4QKouaFbJ4W3iIsYX8DfQJ+uhRxa6UnvkHg== + dependencies: + "@automerge/automerge-wasm" "0.1.9" + uuid "^8.3" + +"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.10.4", "@babel/code-frame@^7.12.13", "@babel/code-frame@^7.16.0", "@babel/code-frame@^7.18.6", "@babel/code-frame@^7.8.3": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fcode-frame/-/code-frame-7.18.6.tgz#3b25d38c89600baa2dcc219edfa88a74eb2c427a" + integrity sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q== + dependencies: + "@babel/highlight" "^7.18.6" + +"@babel/compat-data@^7.17.7", "@babel/compat-data@^7.18.8", "@babel/compat-data@^7.19.3": + version "7.19.3" + resolved "http://localhost:4873/@babel%2fcompat-data/-/compat-data-7.19.3.tgz#707b939793f867f5a73b2666e6d9a3396eb03151" + integrity sha512-prBHMK4JYYK+wDjJF1q99KK4JLL+egWS4nmNqdlMUgCExMZ+iZW0hGhyC3VEbsPjvaN0TBhW//VIFwBrk8sEiw== + +"@babel/core@^7.1.0", "@babel/core@^7.11.1", "@babel/core@^7.12.3", "@babel/core@^7.16.0", "@babel/core@^7.7.2", "@babel/core@^7.8.0": + version "7.19.3" + resolved "http://localhost:4873/@babel%2fcore/-/core-7.19.3.tgz#2519f62a51458f43b682d61583c3810e7dcee64c" + integrity sha512-WneDJxdsjEvyKtXKsaBGbDeiyOjR5vYq4HcShxnIbG0qixpoHjI3MqeZM9NDvsojNCEBItQE4juOo/bU6e72gQ== + dependencies: + "@ampproject/remapping" "^2.1.0" + "@babel/code-frame" "^7.18.6" + "@babel/generator" "^7.19.3" + "@babel/helper-compilation-targets" "^7.19.3" + "@babel/helper-module-transforms" "^7.19.0" + "@babel/helpers" "^7.19.0" + "@babel/parser" "^7.19.3" + "@babel/template" "^7.18.10" + "@babel/traverse" "^7.19.3" + "@babel/types" "^7.19.3" + convert-source-map "^1.7.0" + debug "^4.1.0" + gensync "^1.0.0-beta.2" + json5 "^2.2.1" + semver "^6.3.0" + +"@babel/eslint-parser@^7.16.3": + version "7.19.1" + resolved "http://localhost:4873/@babel%2feslint-parser/-/eslint-parser-7.19.1.tgz#4f68f6b0825489e00a24b41b6a1ae35414ecd2f4" + integrity sha512-AqNf2QWt1rtu2/1rLswy6CDP7H9Oh3mMhk177Y67Rg8d7RD9WfOLLv8CGn6tisFvS2htm86yIe1yLF6I1UDaGQ== + dependencies: + "@nicolo-ribaudo/eslint-scope-5-internals" "5.1.1-v1" + eslint-visitor-keys "^2.1.0" + semver "^6.3.0" + +"@babel/generator@^7.19.3", "@babel/generator@^7.7.2": + version "7.19.3" + resolved "http://localhost:4873/@babel%2fgenerator/-/generator-7.19.3.tgz#d7f4d1300485b4547cb6f94b27d10d237b42bf59" + integrity sha512-fqVZnmp1ncvZU757UzDheKZpfPgatqY59XtW2/j/18H7u76akb8xqvjw82f+i2UKd/ksYsSick/BCLQUUtJ/qQ== + dependencies: + "@babel/types" "^7.19.3" + "@jridgewell/gen-mapping" "^0.3.2" + jsesc "^2.5.1" + +"@babel/helper-annotate-as-pure@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fhelper-annotate-as-pure/-/helper-annotate-as-pure-7.18.6.tgz#eaa49f6f80d5a33f9a5dd2276e6d6e451be0a6bb" + integrity sha512-duORpUiYrEpzKIop6iNbjnwKLAKnJ47csTyRACyEmWj0QdUrm5aqNJGHSSEQSUAvNW0ojX0dOmK9dZduvkfeXA== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-builder-binary-assignment-operator-visitor@^7.18.6": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fhelper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.18.9.tgz#acd4edfd7a566d1d51ea975dff38fd52906981bb" + integrity sha512-yFQ0YCHoIqarl8BCRwBL8ulYUaZpz3bNsA7oFepAzee+8/+ImtADXNOmO5vJvsPff3qi+hvpkY/NYBTrBQgdNw== + dependencies: + "@babel/helper-explode-assignable-expression" "^7.18.6" + "@babel/types" "^7.18.9" + +"@babel/helper-compilation-targets@^7.17.7", "@babel/helper-compilation-targets@^7.18.9", "@babel/helper-compilation-targets@^7.19.0", "@babel/helper-compilation-targets@^7.19.3": + version "7.19.3" + resolved "http://localhost:4873/@babel%2fhelper-compilation-targets/-/helper-compilation-targets-7.19.3.tgz#a10a04588125675d7c7ae299af86fa1b2ee038ca" + integrity sha512-65ESqLGyGmLvgR0mst5AdW1FkNlj9rQsCKduzEoEPhBCDFGXvz2jW6bXFG6i0/MrV2s7hhXjjb2yAzcPuQlLwg== + dependencies: + "@babel/compat-data" "^7.19.3" + "@babel/helper-validator-option" "^7.18.6" + browserslist "^4.21.3" + semver "^6.3.0" + +"@babel/helper-create-class-features-plugin@^7.18.6", "@babel/helper-create-class-features-plugin@^7.19.0": + version "7.19.0" + resolved "http://localhost:4873/@babel%2fhelper-create-class-features-plugin/-/helper-create-class-features-plugin-7.19.0.tgz#bfd6904620df4e46470bae4850d66be1054c404b" + integrity sha512-NRz8DwF4jT3UfrmUoZjd0Uph9HQnP30t7Ash+weACcyNkiYTywpIjDBgReJMKgr+n86sn2nPVVmJ28Dm053Kqw== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-function-name" "^7.19.0" + "@babel/helper-member-expression-to-functions" "^7.18.9" + "@babel/helper-optimise-call-expression" "^7.18.6" + "@babel/helper-replace-supers" "^7.18.9" + "@babel/helper-split-export-declaration" "^7.18.6" + +"@babel/helper-create-regexp-features-plugin@^7.18.6", "@babel/helper-create-regexp-features-plugin@^7.19.0": + version "7.19.0" + resolved "http://localhost:4873/@babel%2fhelper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.19.0.tgz#7976aca61c0984202baca73d84e2337a5424a41b" + integrity sha512-htnV+mHX32DF81amCDrwIDr8nrp1PTm+3wfBN9/v8QJOLEioOCOG7qNyq0nHeFiWbT3Eb7gsPwEmV64UCQ1jzw== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + regexpu-core "^5.1.0" + +"@babel/helper-define-polyfill-provider@^0.3.3": + version "0.3.3" + resolved "http://localhost:4873/@babel%2fhelper-define-polyfill-provider/-/helper-define-polyfill-provider-0.3.3.tgz#8612e55be5d51f0cd1f36b4a5a83924e89884b7a" + integrity sha512-z5aQKU4IzbqCC1XH0nAqfsFLMVSo22SBKUc0BxGrLkolTdPTructy0ToNnlO2zA4j9Q/7pjMZf0DSY+DSTYzww== + dependencies: + "@babel/helper-compilation-targets" "^7.17.7" + "@babel/helper-plugin-utils" "^7.16.7" + debug "^4.1.1" + lodash.debounce "^4.0.8" + resolve "^1.14.2" + semver "^6.1.2" + +"@babel/helper-environment-visitor@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fhelper-environment-visitor/-/helper-environment-visitor-7.18.9.tgz#0c0cee9b35d2ca190478756865bb3528422f51be" + integrity sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg== + +"@babel/helper-explode-assignable-expression@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fhelper-explode-assignable-expression/-/helper-explode-assignable-expression-7.18.6.tgz#41f8228ef0a6f1a036b8dfdfec7ce94f9a6bc096" + integrity sha512-eyAYAsQmB80jNfg4baAtLeWAQHfHFiR483rzFK+BhETlGZaQC9bsfrugfXDCbRHLQbIA7U5NxhhOxN7p/dWIcg== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-function-name@^7.18.9", "@babel/helper-function-name@^7.19.0": + version "7.19.0" + resolved "http://localhost:4873/@babel%2fhelper-function-name/-/helper-function-name-7.19.0.tgz#941574ed5390682e872e52d3f38ce9d1bef4648c" + integrity sha512-WAwHBINyrpqywkUH0nTnNgI5ina5TFn85HKS0pbPDfxFfhyR/aNQEn4hGi1P1JyT//I0t4OgXUlofzWILRvS5w== + dependencies: + "@babel/template" "^7.18.10" + "@babel/types" "^7.19.0" + +"@babel/helper-hoist-variables@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fhelper-hoist-variables/-/helper-hoist-variables-7.18.6.tgz#d4d2c8fb4baeaa5c68b99cc8245c56554f926678" + integrity sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-member-expression-to-functions@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fhelper-member-expression-to-functions/-/helper-member-expression-to-functions-7.18.9.tgz#1531661e8375af843ad37ac692c132841e2fd815" + integrity sha512-RxifAh2ZoVU67PyKIO4AMi1wTenGfMR/O/ae0CCRqwgBAt5v7xjdtRw7UoSbsreKrQn5t7r89eruK/9JjYHuDg== + dependencies: + "@babel/types" "^7.18.9" + +"@babel/helper-module-imports@^7.10.4", "@babel/helper-module-imports@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fhelper-module-imports/-/helper-module-imports-7.18.6.tgz#1e3ebdbbd08aad1437b428c50204db13c5a3ca6e" + integrity sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-module-transforms@^7.18.6", "@babel/helper-module-transforms@^7.19.0": + version "7.19.0" + resolved "http://localhost:4873/@babel%2fhelper-module-transforms/-/helper-module-transforms-7.19.0.tgz#309b230f04e22c58c6a2c0c0c7e50b216d350c30" + integrity sha512-3HBZ377Fe14RbLIA+ac3sY4PTgpxHVkFrESaWhoI5PuyXPBBX8+C34qblV9G89ZtycGJCmCI/Ut+VUDK4bltNQ== + dependencies: + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-module-imports" "^7.18.6" + "@babel/helper-simple-access" "^7.18.6" + "@babel/helper-split-export-declaration" "^7.18.6" + "@babel/helper-validator-identifier" "^7.18.6" + "@babel/template" "^7.18.10" + "@babel/traverse" "^7.19.0" + "@babel/types" "^7.19.0" + +"@babel/helper-optimise-call-expression@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fhelper-optimise-call-expression/-/helper-optimise-call-expression-7.18.6.tgz#9369aa943ee7da47edab2cb4e838acf09d290ffe" + integrity sha512-HP59oD9/fEHQkdcbgFCnbmgH5vIQTJbxh2yf+CdM89/glUNnuzr87Q8GIjGEnOktTROemO0Pe0iPAYbqZuOUiA== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.14.5", "@babel/helper-plugin-utils@^7.16.7", "@babel/helper-plugin-utils@^7.18.6", "@babel/helper-plugin-utils@^7.18.9", "@babel/helper-plugin-utils@^7.19.0", "@babel/helper-plugin-utils@^7.8.0", "@babel/helper-plugin-utils@^7.8.3": + version "7.19.0" + resolved "http://localhost:4873/@babel%2fhelper-plugin-utils/-/helper-plugin-utils-7.19.0.tgz#4796bb14961521f0f8715990bee2fb6e51ce21bf" + integrity sha512-40Ryx7I8mT+0gaNxm8JGTZFUITNqdLAgdg0hXzeVZxVD6nFsdhQvip6v8dqkRHzsz1VFpFAaOCHNn0vKBL7Czw== + +"@babel/helper-remap-async-to-generator@^7.18.6", "@babel/helper-remap-async-to-generator@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fhelper-remap-async-to-generator/-/helper-remap-async-to-generator-7.18.9.tgz#997458a0e3357080e54e1d79ec347f8a8cd28519" + integrity sha512-dI7q50YKd8BAv3VEfgg7PS7yD3Rtbi2J1XMXaalXO0W0164hYLnh8zpjRS0mte9MfVp/tltvr/cfdXPvJr1opA== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-wrap-function" "^7.18.9" + "@babel/types" "^7.18.9" + +"@babel/helper-replace-supers@^7.18.6", "@babel/helper-replace-supers@^7.18.9", "@babel/helper-replace-supers@^7.19.1": + version "7.19.1" + resolved "http://localhost:4873/@babel%2fhelper-replace-supers/-/helper-replace-supers-7.19.1.tgz#e1592a9b4b368aa6bdb8784a711e0bcbf0612b78" + integrity sha512-T7ahH7wV0Hfs46SFh5Jz3s0B6+o8g3c+7TMxu7xKfmHikg7EAZ3I2Qk9LFhjxXq8sL7UkP5JflezNwoZa8WvWw== + dependencies: + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-member-expression-to-functions" "^7.18.9" + "@babel/helper-optimise-call-expression" "^7.18.6" + "@babel/traverse" "^7.19.1" + "@babel/types" "^7.19.0" + +"@babel/helper-simple-access@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fhelper-simple-access/-/helper-simple-access-7.18.6.tgz#d6d8f51f4ac2978068df934b569f08f29788c7ea" + integrity sha512-iNpIgTgyAvDQpDj76POqg+YEt8fPxx3yaNBg3S30dxNKm2SWfYhD0TGrK/Eu9wHpUW63VQU894TsTg+GLbUa1g== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-skip-transparent-expression-wrappers@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fhelper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.18.9.tgz#778d87b3a758d90b471e7b9918f34a9a02eb5818" + integrity sha512-imytd2gHi3cJPsybLRbmFrF7u5BIEuI2cNheyKi3/iOBC63kNn3q8Crn2xVuESli0aM4KYsyEqKyS7lFL8YVtw== + dependencies: + "@babel/types" "^7.18.9" + +"@babel/helper-split-export-declaration@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fhelper-split-export-declaration/-/helper-split-export-declaration-7.18.6.tgz#7367949bc75b20c6d5a5d4a97bba2824ae8ef075" + integrity sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-string-parser@^7.18.10": + version "7.18.10" + resolved "http://localhost:4873/@babel%2fhelper-string-parser/-/helper-string-parser-7.18.10.tgz#181f22d28ebe1b3857fa575f5c290b1aaf659b56" + integrity sha512-XtIfWmeNY3i4t7t4D2t02q50HvqHybPqW2ki1kosnvWCwuCMeo81Jf0gwr85jy/neUdg5XDdeFE/80DXiO+njw== + +"@babel/helper-validator-identifier@^7.18.6", "@babel/helper-validator-identifier@^7.19.1": + version "7.19.1" + resolved "http://localhost:4873/@babel%2fhelper-validator-identifier/-/helper-validator-identifier-7.19.1.tgz#7eea834cf32901ffdc1a7ee555e2f9c27e249ca2" + integrity sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w== + +"@babel/helper-validator-option@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fhelper-validator-option/-/helper-validator-option-7.18.6.tgz#bf0d2b5a509b1f336099e4ff36e1a63aa5db4db8" + integrity sha512-XO7gESt5ouv/LRJdrVjkShckw6STTaB7l9BrpBaAHDeF5YZT+01PCwmR0SJHnkW6i8OwW/EVWRShfi4j2x+KQw== + +"@babel/helper-wrap-function@^7.18.9": + version "7.19.0" + resolved "http://localhost:4873/@babel%2fhelper-wrap-function/-/helper-wrap-function-7.19.0.tgz#89f18335cff1152373222f76a4b37799636ae8b1" + integrity sha512-txX8aN8CZyYGTwcLhlk87KRqncAzhh5TpQamZUa0/u3an36NtDpUP6bQgBCBcLeBs09R/OwQu3OjK0k/HwfNDg== + dependencies: + "@babel/helper-function-name" "^7.19.0" + "@babel/template" "^7.18.10" + "@babel/traverse" "^7.19.0" + "@babel/types" "^7.19.0" + +"@babel/helpers@^7.19.0": + version "7.19.0" + resolved "http://localhost:4873/@babel%2fhelpers/-/helpers-7.19.0.tgz#f30534657faf246ae96551d88dd31e9d1fa1fc18" + integrity sha512-DRBCKGwIEdqY3+rPJgG/dKfQy9+08rHIAJx8q2p+HSWP87s2HCrQmaAMMyMll2kIXKCW0cO1RdQskx15Xakftg== + dependencies: + "@babel/template" "^7.18.10" + "@babel/traverse" "^7.19.0" + "@babel/types" "^7.19.0" + +"@babel/highlight@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fhighlight/-/highlight-7.18.6.tgz#81158601e93e2563795adcbfbdf5d64be3f2ecdf" + integrity sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g== + dependencies: + "@babel/helper-validator-identifier" "^7.18.6" + chalk "^2.0.0" + js-tokens "^4.0.0" + +"@babel/parser@^7.1.0", "@babel/parser@^7.14.7", "@babel/parser@^7.18.10", "@babel/parser@^7.19.3": + version "7.19.3" + resolved "http://localhost:4873/@babel%2fparser/-/parser-7.19.3.tgz#8dd36d17c53ff347f9e55c328710321b49479a9a" + integrity sha512-pJ9xOlNWHiy9+FuFP09DEAFbAn4JskgRsVcc169w2xRBC3FRGuQEwjeIMMND9L2zc0iEhO/tGv4Zq+km+hxNpQ== + +"@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.18.6.tgz#da5b8f9a580acdfbe53494dba45ea389fb09a4d2" + integrity sha512-Dgxsyg54Fx1d4Nge8UnvTrED63vrwOdPmyvPzlNN/boaliRP54pm3pGzZD1SJUwrBA+Cs/xdG8kXX6Mn/RfISQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fplugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.18.9.tgz#a11af19aa373d68d561f08e0a57242350ed0ec50" + integrity sha512-AHrP9jadvH7qlOj6PINbgSuphjQUAK7AOT7DPjBo9EHoLhQTnnK5u45e1Hd4DbSQEO9nqPWtQ89r+XEOWFScKg== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + "@babel/helper-skip-transparent-expression-wrappers" "^7.18.9" + "@babel/plugin-proposal-optional-chaining" "^7.18.9" + +"@babel/plugin-proposal-async-generator-functions@^7.19.1": + version "7.19.1" + resolved "http://localhost:4873/@babel%2fplugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.19.1.tgz#34f6f5174b688529342288cd264f80c9ea9fb4a7" + integrity sha512-0yu8vNATgLy4ivqMNBIwb1HebCelqN7YX8SL3FDXORv/RqT0zEEWUCH4GH44JsSrvCu6GqnAdR5EBFAPeNBB4Q== + dependencies: + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-plugin-utils" "^7.19.0" + "@babel/helper-remap-async-to-generator" "^7.18.9" + "@babel/plugin-syntax-async-generators" "^7.8.4" + +"@babel/plugin-proposal-class-properties@^7.16.0", "@babel/plugin-proposal-class-properties@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-proposal-class-properties/-/plugin-proposal-class-properties-7.18.6.tgz#b110f59741895f7ec21a6fff696ec46265c446a3" + integrity sha512-cumfXOF0+nzZrrN8Rf0t7M+tF6sZc7vhQwYQck9q1/5w2OExlD+b4v4RpMJFaV1Z7WcDRgO6FqvxqxGlwo+RHQ== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-proposal-class-static-block@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-proposal-class-static-block/-/plugin-proposal-class-static-block-7.18.6.tgz#8aa81d403ab72d3962fc06c26e222dacfc9b9020" + integrity sha512-+I3oIiNxrCpup3Gi8n5IGMwj0gOCAjcJUSQEcotNnCCPMEnixawOQ+KeJPlgfjzx+FKQ1QSyZOWe7wmoJp7vhw== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-class-static-block" "^7.14.5" + +"@babel/plugin-proposal-decorators@^7.16.4": + version "7.19.3" + resolved "http://localhost:4873/@babel%2fplugin-proposal-decorators/-/plugin-proposal-decorators-7.19.3.tgz#c1977e4902a18cdf9051bf7bf08d97db2fd8b110" + integrity sha512-MbgXtNXqo7RTKYIXVchVJGPvaVufQH3pxvQyfbGvNw1DObIhph+PesYXJTcd8J4DdWibvf6Z2eanOyItX8WnJg== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.19.0" + "@babel/helper-plugin-utils" "^7.19.0" + "@babel/helper-replace-supers" "^7.19.1" + "@babel/helper-split-export-declaration" "^7.18.6" + "@babel/plugin-syntax-decorators" "^7.19.0" + +"@babel/plugin-proposal-dynamic-import@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.18.6.tgz#72bcf8d408799f547d759298c3c27c7e7faa4d94" + integrity sha512-1auuwmK+Rz13SJj36R+jqFPMJWyKEDd7lLSdOj4oJK0UTgGueSAtkrCvz9ewmgyU/P941Rv2fQwZJN8s6QruXw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-dynamic-import" "^7.8.3" + +"@babel/plugin-proposal-export-namespace-from@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fplugin-proposal-export-namespace-from/-/plugin-proposal-export-namespace-from-7.18.9.tgz#5f7313ab348cdb19d590145f9247540e94761203" + integrity sha512-k1NtHyOMvlDDFeb9G5PhUXuGj8m/wiwojgQVEhJ/fsVsMCpLyOP4h0uGEjYJKrRI+EVPlb5Jk+Gt9P97lOGwtA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + "@babel/plugin-syntax-export-namespace-from" "^7.8.3" + +"@babel/plugin-proposal-json-strings@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-proposal-json-strings/-/plugin-proposal-json-strings-7.18.6.tgz#7e8788c1811c393aff762817e7dbf1ebd0c05f0b" + integrity sha512-lr1peyn9kOdbYc0xr0OdHTZ5FMqS6Di+H0Fz2I/JwMzGmzJETNeOFq2pBySw6X/KFL5EWDjlJuMsUGRFb8fQgQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-json-strings" "^7.8.3" + +"@babel/plugin-proposal-logical-assignment-operators@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fplugin-proposal-logical-assignment-operators/-/plugin-proposal-logical-assignment-operators-7.18.9.tgz#8148cbb350483bf6220af06fa6db3690e14b2e23" + integrity sha512-128YbMpjCrP35IOExw2Fq+x55LMP42DzhOhX2aNNIdI9avSWl2PI0yuBWarr3RYpZBSPtabfadkH2yeRiMD61Q== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" + +"@babel/plugin-proposal-nullish-coalescing-operator@^7.16.0", "@babel/plugin-proposal-nullish-coalescing-operator@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.18.6.tgz#fdd940a99a740e577d6c753ab6fbb43fdb9467e1" + integrity sha512-wQxQzxYeJqHcfppzBDnm1yAY0jSRkUXR2z8RePZYrKwMKgMlE8+Z6LUno+bd6LvbGh8Gltvy74+9pIYkr+XkKA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" + +"@babel/plugin-proposal-numeric-separator@^7.16.0", "@babel/plugin-proposal-numeric-separator@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.18.6.tgz#899b14fbafe87f053d2c5ff05b36029c62e13c75" + integrity sha512-ozlZFogPqoLm8WBr5Z8UckIoE4YQ5KESVcNudyXOR8uqIkliTEgJ3RoketfG6pmzLdeZF0H/wjE9/cCEitBl7Q== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-numeric-separator" "^7.10.4" + +"@babel/plugin-proposal-object-rest-spread@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fplugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.18.9.tgz#f9434f6beb2c8cae9dfcf97d2a5941bbbf9ad4e7" + integrity sha512-kDDHQ5rflIeY5xl69CEqGEZ0KY369ehsCIEbTGb4siHG5BE9sga/T0r0OUwyZNLMmZE79E1kbsqAjwFCW4ds6Q== + dependencies: + "@babel/compat-data" "^7.18.8" + "@babel/helper-compilation-targets" "^7.18.9" + "@babel/helper-plugin-utils" "^7.18.9" + "@babel/plugin-syntax-object-rest-spread" "^7.8.3" + "@babel/plugin-transform-parameters" "^7.18.8" + +"@babel/plugin-proposal-optional-catch-binding@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.18.6.tgz#f9400d0e6a3ea93ba9ef70b09e72dd6da638a2cb" + integrity sha512-Q40HEhs9DJQyaZfUjjn6vE8Cv4GmMHCYuMGIWUnlxH6400VGxOuwWsPt4FxXxJkC/5eOzgn0z21M9gMT4MOhbw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" + +"@babel/plugin-proposal-optional-chaining@^7.16.0", "@babel/plugin-proposal-optional-chaining@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fplugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.18.9.tgz#e8e8fe0723f2563960e4bf5e9690933691915993" + integrity sha512-v5nwt4IqBXihxGsW2QmCWMDS3B3bzGIk/EQVZz2ei7f3NJl8NzAJVvUmpDW5q1CRNY+Beb/k58UAH1Km1N411w== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + "@babel/helper-skip-transparent-expression-wrappers" "^7.18.9" + "@babel/plugin-syntax-optional-chaining" "^7.8.3" + +"@babel/plugin-proposal-private-methods@^7.16.0", "@babel/plugin-proposal-private-methods@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-proposal-private-methods/-/plugin-proposal-private-methods-7.18.6.tgz#5209de7d213457548a98436fa2882f52f4be6bea" + integrity sha512-nutsvktDItsNn4rpGItSNV2sz1XwS+nfU0Rg8aCx3W3NOKVzdMjJRu0O5OkgDp3ZGICSTbgRpxZoWsxoKRvbeA== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-proposal-private-property-in-object@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.18.6.tgz#a64137b232f0aca3733a67eb1a144c192389c503" + integrity sha512-9Rysx7FOctvT5ouj5JODjAFAkgGoudQuLPamZb0v1TGLpapdNaftzifU8NTWQm0IRjqoYypdrSmyWgkocDQ8Dw== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + "@babel/helper-create-class-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-private-property-in-object" "^7.14.5" + +"@babel/plugin-proposal-unicode-property-regex@^7.18.6", "@babel/plugin-proposal-unicode-property-regex@^7.4.4": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.18.6.tgz#af613d2cd5e643643b65cded64207b15c85cb78e" + integrity sha512-2BShG/d5yoZyXZfVePH91urL5wTG6ASZU9M4o03lKK8u8UW1y08OMttBSOADTcJrnPMpvDXRG3G8fyLh4ovs8w== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-syntax-async-generators@^7.8.4": + version "7.8.4" + resolved "http://localhost:4873/@babel%2fplugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz#a983fb1aeb2ec3f6ed042a210f640e90e786fe0d" + integrity sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-bigint@^7.8.3": + version "7.8.3" + resolved "http://localhost:4873/@babel%2fplugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz#4c9a6f669f5d0cdf1b90a1671e9a146be5300cea" + integrity sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-class-properties@^7.12.13", "@babel/plugin-syntax-class-properties@^7.8.3": + version "7.12.13" + resolved "http://localhost:4873/@babel%2fplugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz#b5c987274c4a3a82b89714796931a6b53544ae10" + integrity sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA== + dependencies: + "@babel/helper-plugin-utils" "^7.12.13" + +"@babel/plugin-syntax-class-static-block@^7.14.5": + version "7.14.5" + resolved "http://localhost:4873/@babel%2fplugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz#195df89b146b4b78b3bf897fd7a257c84659d406" + integrity sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw== + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-syntax-decorators@^7.19.0": + version "7.19.0" + resolved "http://localhost:4873/@babel%2fplugin-syntax-decorators/-/plugin-syntax-decorators-7.19.0.tgz#5f13d1d8fce96951bea01a10424463c9a5b3a599" + integrity sha512-xaBZUEDntt4faL1yN8oIFlhfXeQAWJW7CLKYsHTUqriCUbj8xOra8bfxxKGi/UwExPFBuPdH4XfHc9rGQhrVkQ== + dependencies: + "@babel/helper-plugin-utils" "^7.19.0" + +"@babel/plugin-syntax-dynamic-import@^7.8.3": + version "7.8.3" + resolved "http://localhost:4873/@babel%2fplugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz#62bf98b2da3cd21d626154fc96ee5b3cb68eacb3" + integrity sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-export-namespace-from@^7.8.3": + version "7.8.3" + resolved "http://localhost:4873/@babel%2fplugin-syntax-export-namespace-from/-/plugin-syntax-export-namespace-from-7.8.3.tgz#028964a9ba80dbc094c915c487ad7c4e7a66465a" + integrity sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q== + dependencies: + "@babel/helper-plugin-utils" "^7.8.3" + +"@babel/plugin-syntax-flow@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-syntax-flow/-/plugin-syntax-flow-7.18.6.tgz#774d825256f2379d06139be0c723c4dd444f3ca1" + integrity sha512-LUbR+KNTBWCUAqRG9ex5Gnzu2IOkt8jRJbHHXFT9q+L9zm7M/QQbEqXyw1n1pohYvOyWC8CjeyjrSaIwiYjK7A== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-syntax-import-assertions@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.18.6.tgz#cd6190500a4fa2fe31990a963ffab4b63e4505e4" + integrity sha512-/DU3RXad9+bZwrgWJQKbr39gYbJpLJHezqEzRzi/BHRlJ9zsQb4CK2CA/5apllXNomwA1qHwzvHl+AdEmC5krQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-syntax-import-meta@^7.8.3": + version "7.10.4" + resolved "http://localhost:4873/@babel%2fplugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz#ee601348c370fa334d2207be158777496521fd51" + integrity sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-json-strings@^7.8.3": + version "7.8.3" + resolved "http://localhost:4873/@babel%2fplugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz#01ca21b668cd8218c9e640cb6dd88c5412b2c96a" + integrity sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-jsx@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-syntax-jsx/-/plugin-syntax-jsx-7.18.6.tgz#a8feef63b010150abd97f1649ec296e849943ca0" + integrity sha512-6mmljtAedFGTWu2p/8WIORGwy+61PLgOMPOdazc7YoJ9ZCWUyFy3A6CpPkRKLKD1ToAesxX8KGEViAiLo9N+7Q== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-syntax-logical-assignment-operators@^7.10.4", "@babel/plugin-syntax-logical-assignment-operators@^7.8.3": + version "7.10.4" + resolved "http://localhost:4873/@babel%2fplugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz#ca91ef46303530448b906652bac2e9fe9941f699" + integrity sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-nullish-coalescing-operator@^7.8.3": + version "7.8.3" + resolved "http://localhost:4873/@babel%2fplugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz#167ed70368886081f74b5c36c65a88c03b66d1a9" + integrity sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-numeric-separator@^7.10.4", "@babel/plugin-syntax-numeric-separator@^7.8.3": + version "7.10.4" + resolved "http://localhost:4873/@babel%2fplugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz#b9b070b3e33570cd9fd07ba7fa91c0dd37b9af97" + integrity sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-object-rest-spread@^7.8.3": + version "7.8.3" + resolved "http://localhost:4873/@babel%2fplugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz#60e225edcbd98a640332a2e72dd3e66f1af55871" + integrity sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-optional-catch-binding@^7.8.3": + version "7.8.3" + resolved "http://localhost:4873/@babel%2fplugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz#6111a265bcfb020eb9efd0fdfd7d26402b9ed6c1" + integrity sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-optional-chaining@^7.8.3": + version "7.8.3" + resolved "http://localhost:4873/@babel%2fplugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz#4f69c2ab95167e0180cd5336613f8c5788f7d48a" + integrity sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-private-property-in-object@^7.14.5": + version "7.14.5" + resolved "http://localhost:4873/@babel%2fplugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz#0dc6671ec0ea22b6e94a1114f857970cd39de1ad" + integrity sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg== + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-syntax-top-level-await@^7.14.5", "@babel/plugin-syntax-top-level-await@^7.8.3": + version "7.14.5" + resolved "http://localhost:4873/@babel%2fplugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz#c1cfdadc35a646240001f06138247b741c34d94c" + integrity sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw== + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-syntax-typescript@^7.18.6", "@babel/plugin-syntax-typescript@^7.7.2": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-syntax-typescript/-/plugin-syntax-typescript-7.18.6.tgz#1c09cd25795c7c2b8a4ba9ae49394576d4133285" + integrity sha512-mAWAuq4rvOepWCBid55JuRNvpTNf2UGVgoz4JV0fXEKolsVZDzsa4NqCef758WZJj/GDu0gVGItjKFiClTAmZA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-arrow-functions@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.18.6.tgz#19063fcf8771ec7b31d742339dac62433d0611fe" + integrity sha512-9S9X9RUefzrsHZmKMbDXxweEH+YlE8JJEuat9FdvW9Qh1cw7W64jELCtWNkPBPX5En45uy28KGvA/AySqUh8CQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-async-to-generator@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.18.6.tgz#ccda3d1ab9d5ced5265fdb13f1882d5476c71615" + integrity sha512-ARE5wZLKnTgPW7/1ftQmSi1CmkqqHo2DNmtztFhvgtOWSDfq0Cq9/9L+KnZNYSNrydBekhW3rwShduf59RoXag== + dependencies: + "@babel/helper-module-imports" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-remap-async-to-generator" "^7.18.6" + +"@babel/plugin-transform-block-scoped-functions@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.18.6.tgz#9187bf4ba302635b9d70d986ad70f038726216a8" + integrity sha512-ExUcOqpPWnliRcPqves5HJcJOvHvIIWfuS4sroBUenPuMdmW+SMHDakmtS7qOo13sVppmUijqeTv7qqGsvURpQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-block-scoping@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fplugin-transform-block-scoping/-/plugin-transform-block-scoping-7.18.9.tgz#f9b7e018ac3f373c81452d6ada8bd5a18928926d" + integrity sha512-5sDIJRV1KtQVEbt/EIBwGy4T01uYIo4KRB3VUqzkhrAIOGx7AoctL9+Ux88btY0zXdDyPJ9mW+bg+v+XEkGmtw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-classes@^7.19.0": + version "7.19.0" + resolved "http://localhost:4873/@babel%2fplugin-transform-classes/-/plugin-transform-classes-7.19.0.tgz#0e61ec257fba409c41372175e7c1e606dc79bb20" + integrity sha512-YfeEE9kCjqTS9IitkgfJuxjcEtLUHMqa8yUJ6zdz8vR7hKuo6mOy2C05P0F1tdMmDCeuyidKnlrw/iTppHcr2A== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + "@babel/helper-compilation-targets" "^7.19.0" + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-function-name" "^7.19.0" + "@babel/helper-optimise-call-expression" "^7.18.6" + "@babel/helper-plugin-utils" "^7.19.0" + "@babel/helper-replace-supers" "^7.18.9" + "@babel/helper-split-export-declaration" "^7.18.6" + globals "^11.1.0" + +"@babel/plugin-transform-computed-properties@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fplugin-transform-computed-properties/-/plugin-transform-computed-properties-7.18.9.tgz#2357a8224d402dad623caf6259b611e56aec746e" + integrity sha512-+i0ZU1bCDymKakLxn5srGHrsAPRELC2WIbzwjLhHW9SIE1cPYkLCL0NlnXMZaM1vhfgA2+M7hySk42VBvrkBRw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-destructuring@^7.18.13": + version "7.18.13" + resolved "http://localhost:4873/@babel%2fplugin-transform-destructuring/-/plugin-transform-destructuring-7.18.13.tgz#9e03bc4a94475d62b7f4114938e6c5c33372cbf5" + integrity sha512-TodpQ29XekIsex2A+YJPj5ax2plkGa8YYY6mFjCohk/IG9IY42Rtuj1FuDeemfg2ipxIFLzPeA83SIBnlhSIow== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-dotall-regex@^7.18.6", "@babel/plugin-transform-dotall-regex@^7.4.4": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.18.6.tgz#b286b3e7aae6c7b861e45bed0a2fafd6b1a4fef8" + integrity sha512-6S3jpun1eEbAxq7TdjLotAsl4WpQI9DxfkycRcKrjhQYzU87qpXdknpBg/e+TdcMehqGnLFi7tnFUBR02Vq6wg== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-duplicate-keys@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fplugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.18.9.tgz#687f15ee3cdad6d85191eb2a372c4528eaa0ae0e" + integrity sha512-d2bmXCtZXYc59/0SanQKbiWINadaJXqtvIQIzd4+hNwkWBgyCd5F/2t1kXoUdvPMrxzPvhK6EMQRROxsue+mfw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-exponentiation-operator@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.18.6.tgz#421c705f4521888c65e91fdd1af951bfefd4dacd" + integrity sha512-wzEtc0+2c88FVR34aQmiz56dxEkxr2g8DQb/KfaFa1JYXOFVsbhvAonFN6PwVWj++fKmku8NP80plJ5Et4wqHw== + dependencies: + "@babel/helper-builder-binary-assignment-operator-visitor" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-flow-strip-types@^7.16.0": + version "7.19.0" + resolved "http://localhost:4873/@babel%2fplugin-transform-flow-strip-types/-/plugin-transform-flow-strip-types-7.19.0.tgz#e9e8606633287488216028719638cbbb2f2dde8f" + integrity sha512-sgeMlNaQVbCSpgLSKP4ZZKfsJVnFnNQlUSk6gPYzR/q7tzCgQF2t8RBKAP6cKJeZdveei7Q7Jm527xepI8lNLg== + dependencies: + "@babel/helper-plugin-utils" "^7.19.0" + "@babel/plugin-syntax-flow" "^7.18.6" + +"@babel/plugin-transform-for-of@^7.18.8": + version "7.18.8" + resolved "http://localhost:4873/@babel%2fplugin-transform-for-of/-/plugin-transform-for-of-7.18.8.tgz#6ef8a50b244eb6a0bdbad0c7c61877e4e30097c1" + integrity sha512-yEfTRnjuskWYo0k1mHUqrVWaZwrdq8AYbfrpqULOJOaucGSp4mNMVps+YtA8byoevxS/urwU75vyhQIxcCgiBQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-function-name@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fplugin-transform-function-name/-/plugin-transform-function-name-7.18.9.tgz#cc354f8234e62968946c61a46d6365440fc764e0" + integrity sha512-WvIBoRPaJQ5yVHzcnJFor7oS5Ls0PYixlTYE63lCj2RtdQEl15M68FXQlxnG6wdraJIXRdR7KI+hQ7q/9QjrCQ== + dependencies: + "@babel/helper-compilation-targets" "^7.18.9" + "@babel/helper-function-name" "^7.18.9" + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-literals@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fplugin-transform-literals/-/plugin-transform-literals-7.18.9.tgz#72796fdbef80e56fba3c6a699d54f0de557444bc" + integrity sha512-IFQDSRoTPnrAIrI5zoZv73IFeZu2dhu6irxQjY9rNjTT53VmKg9fenjvoiOWOkJ6mm4jKVPtdMzBY98Fp4Z4cg== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-member-expression-literals@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.18.6.tgz#ac9fdc1a118620ac49b7e7a5d2dc177a1bfee88e" + integrity sha512-qSF1ihLGO3q+/g48k85tUjD033C29TNTVB2paCwZPVmOsjn9pClvYYrM2VeJpBY2bcNkuny0YUyTNRyRxJ54KA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-modules-amd@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-modules-amd/-/plugin-transform-modules-amd-7.18.6.tgz#8c91f8c5115d2202f277549848874027d7172d21" + integrity sha512-Pra5aXsmTsOnjM3IajS8rTaLCy++nGM4v3YR4esk5PCsyg9z8NA5oQLwxzMUtDBd8F+UmVza3VxoAaWCbzH1rg== + dependencies: + "@babel/helper-module-transforms" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + babel-plugin-dynamic-import-node "^2.3.3" + +"@babel/plugin-transform-modules-commonjs@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.18.6.tgz#afd243afba166cca69892e24a8fd8c9f2ca87883" + integrity sha512-Qfv2ZOWikpvmedXQJDSbxNqy7Xr/j2Y8/KfijM0iJyKkBTmWuvCA1yeH1yDM7NJhBW/2aXxeucLj6i80/LAJ/Q== + dependencies: + "@babel/helper-module-transforms" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-simple-access" "^7.18.6" + babel-plugin-dynamic-import-node "^2.3.3" + +"@babel/plugin-transform-modules-systemjs@^7.19.0": + version "7.19.0" + resolved "http://localhost:4873/@babel%2fplugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.19.0.tgz#5f20b471284430f02d9c5059d9b9a16d4b085a1f" + integrity sha512-x9aiR0WXAWmOWsqcsnrzGR+ieaTMVyGyffPVA7F8cXAGt/UxefYv6uSHZLkAFChN5M5Iy1+wjE+xJuPt22H39A== + dependencies: + "@babel/helper-hoist-variables" "^7.18.6" + "@babel/helper-module-transforms" "^7.19.0" + "@babel/helper-plugin-utils" "^7.19.0" + "@babel/helper-validator-identifier" "^7.18.6" + babel-plugin-dynamic-import-node "^2.3.3" + +"@babel/plugin-transform-modules-umd@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-modules-umd/-/plugin-transform-modules-umd-7.18.6.tgz#81d3832d6034b75b54e62821ba58f28ed0aab4b9" + integrity sha512-dcegErExVeXcRqNtkRU/z8WlBLnvD4MRnHgNs3MytRO1Mn1sHRyhbcpYbVMGclAqOjdW+9cfkdZno9dFdfKLfQ== + dependencies: + "@babel/helper-module-transforms" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-named-capturing-groups-regex@^7.19.1": + version "7.19.1" + resolved "http://localhost:4873/@babel%2fplugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.19.1.tgz#ec7455bab6cd8fb05c525a94876f435a48128888" + integrity sha512-oWk9l9WItWBQYS4FgXD4Uyy5kq898lvkXpXQxoJEY1RnvPk4R/Dvu2ebXU9q8lP+rlMwUQTFf2Ok6d78ODa0kw== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.19.0" + "@babel/helper-plugin-utils" "^7.19.0" + +"@babel/plugin-transform-new-target@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-new-target/-/plugin-transform-new-target-7.18.6.tgz#d128f376ae200477f37c4ddfcc722a8a1b3246a8" + integrity sha512-DjwFA/9Iu3Z+vrAn+8pBUGcjhxKguSMlsFqeCKbhb9BAV756v0krzVK04CRDi/4aqmk8BsHb4a/gFcaA5joXRw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-object-super@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-object-super/-/plugin-transform-object-super-7.18.6.tgz#fb3c6ccdd15939b6ff7939944b51971ddc35912c" + integrity sha512-uvGz6zk+pZoS1aTZrOvrbj6Pp/kK2mp45t2B+bTDre2UgsZZ8EZLSJtUg7m/no0zOJUWgFONpB7Zv9W2tSaFlA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-replace-supers" "^7.18.6" + +"@babel/plugin-transform-parameters@^7.18.8": + version "7.18.8" + resolved "http://localhost:4873/@babel%2fplugin-transform-parameters/-/plugin-transform-parameters-7.18.8.tgz#ee9f1a0ce6d78af58d0956a9378ea3427cccb48a" + integrity sha512-ivfbE3X2Ss+Fj8nnXvKJS6sjRG4gzwPMsP+taZC+ZzEGjAYlvENixmt1sZ5Ca6tWls+BlKSGKPJ6OOXvXCbkFg== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-property-literals@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-property-literals/-/plugin-transform-property-literals-7.18.6.tgz#e22498903a483448e94e032e9bbb9c5ccbfc93a3" + integrity sha512-cYcs6qlgafTud3PAzrrRNbQtfpQ8+y/+M5tKmksS9+M1ckbH6kzY8MrexEM9mcA6JDsukE19iIRvAyYl463sMg== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-react-constant-elements@^7.12.1": + version "7.18.12" + resolved "http://localhost:4873/@babel%2fplugin-transform-react-constant-elements/-/plugin-transform-react-constant-elements-7.18.12.tgz#edf3bec47eb98f14e84fa0af137fcc6aad8e0443" + integrity sha512-Q99U9/ttiu+LMnRU8psd23HhvwXmKWDQIpocm0JKaICcZHnw+mdQbHm6xnSy7dOl8I5PELakYtNBubNQlBXbZw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-react-display-name@^7.16.0", "@babel/plugin-transform-react-display-name@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-react-display-name/-/plugin-transform-react-display-name-7.18.6.tgz#8b1125f919ef36ebdfff061d664e266c666b9415" + integrity sha512-TV4sQ+T013n61uMoygyMRm+xf04Bd5oqFpv2jAEQwSZ8NwQA7zeRPg1LMVg2PWi3zWBz+CLKD+v5bcpZ/BS0aA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-react-jsx-development@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-react-jsx-development/-/plugin-transform-react-jsx-development-7.18.6.tgz#dbe5c972811e49c7405b630e4d0d2e1380c0ddc5" + integrity sha512-SA6HEjwYFKF7WDjWcMcMGUimmw/nhNRDWxr+KaLSCrkD/LMDBvWRmHAYgE1HDeF8KUuI8OAu+RT6EOtKxSW2qA== + dependencies: + "@babel/plugin-transform-react-jsx" "^7.18.6" + +"@babel/plugin-transform-react-jsx@^7.18.6": + version "7.19.0" + resolved "http://localhost:4873/@babel%2fplugin-transform-react-jsx/-/plugin-transform-react-jsx-7.19.0.tgz#b3cbb7c3a00b92ec8ae1027910e331ba5c500eb9" + integrity sha512-UVEvX3tXie3Szm3emi1+G63jyw1w5IcMY0FSKM+CRnKRI5Mr1YbCNgsSTwoTwKphQEG9P+QqmuRFneJPZuHNhg== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + "@babel/helper-module-imports" "^7.18.6" + "@babel/helper-plugin-utils" "^7.19.0" + "@babel/plugin-syntax-jsx" "^7.18.6" + "@babel/types" "^7.19.0" + +"@babel/plugin-transform-react-pure-annotations@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-react-pure-annotations/-/plugin-transform-react-pure-annotations-7.18.6.tgz#561af267f19f3e5d59291f9950fd7b9663d0d844" + integrity sha512-I8VfEPg9r2TRDdvnHgPepTKvuRomzA8+u+nhY7qSI1fR2hRNebasZEETLyM5mAUr0Ku56OkXJ0I7NHJnO6cJiQ== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-regenerator@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-regenerator/-/plugin-transform-regenerator-7.18.6.tgz#585c66cb84d4b4bf72519a34cfce761b8676ca73" + integrity sha512-poqRI2+qiSdeldcz4wTSTXBRryoq3Gc70ye7m7UD5Ww0nE29IXqMl6r7Nd15WBgRd74vloEMlShtH6CKxVzfmQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + regenerator-transform "^0.15.0" + +"@babel/plugin-transform-reserved-words@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-reserved-words/-/plugin-transform-reserved-words-7.18.6.tgz#b1abd8ebf8edaa5f7fe6bbb8d2133d23b6a6f76a" + integrity sha512-oX/4MyMoypzHjFrT1CdivfKZ+XvIPMFXwwxHp/r0Ddy2Vuomt4HDFGmft1TAY2yiTKiNSsh3kjBAzcM8kSdsjA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-runtime@^7.16.4": + version "7.19.1" + resolved "http://localhost:4873/@babel%2fplugin-transform-runtime/-/plugin-transform-runtime-7.19.1.tgz#a3df2d7312eea624c7889a2dcd37fd1dfd25b2c6" + integrity sha512-2nJjTUFIzBMP/f/miLxEK9vxwW/KUXsdvN4sR//TmuDhe6yU2h57WmIOE12Gng3MDP/xpjUV/ToZRdcf8Yj4fA== + dependencies: + "@babel/helper-module-imports" "^7.18.6" + "@babel/helper-plugin-utils" "^7.19.0" + babel-plugin-polyfill-corejs2 "^0.3.3" + babel-plugin-polyfill-corejs3 "^0.6.0" + babel-plugin-polyfill-regenerator "^0.4.1" + semver "^6.3.0" + +"@babel/plugin-transform-shorthand-properties@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.18.6.tgz#6d6df7983d67b195289be24909e3f12a8f664dc9" + integrity sha512-eCLXXJqv8okzg86ywZJbRn19YJHU4XUa55oz2wbHhaQVn/MM+XhukiT7SYqp/7o00dg52Rj51Ny+Ecw4oyoygw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-spread@^7.19.0": + version "7.19.0" + resolved "http://localhost:4873/@babel%2fplugin-transform-spread/-/plugin-transform-spread-7.19.0.tgz#dd60b4620c2fec806d60cfaae364ec2188d593b6" + integrity sha512-RsuMk7j6n+r752EtzyScnWkQyuJdli6LdO5Klv8Yx0OfPVTcQkIUfS8clx5e9yHXzlnhOZF3CbQ8C2uP5j074w== + dependencies: + "@babel/helper-plugin-utils" "^7.19.0" + "@babel/helper-skip-transparent-expression-wrappers" "^7.18.9" + +"@babel/plugin-transform-sticky-regex@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.18.6.tgz#c6706eb2b1524028e317720339583ad0f444adcc" + integrity sha512-kfiDrDQ+PBsQDO85yj1icueWMfGfJFKN1KCkndygtu/C9+XUfydLC8Iv5UYJqRwy4zk8EcplRxEOeLyjq1gm6Q== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-template-literals@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fplugin-transform-template-literals/-/plugin-transform-template-literals-7.18.9.tgz#04ec6f10acdaa81846689d63fae117dd9c243a5e" + integrity sha512-S8cOWfT82gTezpYOiVaGHrCbhlHgKhQt8XH5ES46P2XWmX92yisoZywf5km75wv5sYcXDUCLMmMxOLCtthDgMA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-typeof-symbol@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fplugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.18.9.tgz#c8cea68263e45addcd6afc9091429f80925762c0" + integrity sha512-SRfwTtF11G2aemAZWivL7PD+C9z52v9EvMqH9BuYbabyPuKUvSWks3oCg6041pT925L4zVFqaVBeECwsmlguEw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-typescript@^7.18.6": + version "7.19.3" + resolved "http://localhost:4873/@babel%2fplugin-transform-typescript/-/plugin-transform-typescript-7.19.3.tgz#4f1db1e0fe278b42ddbc19ec2f6cd2f8262e35d6" + integrity sha512-z6fnuK9ve9u/0X0rRvI9MY0xg+DOUaABDYOe+/SQTxtlptaBB/V9JIUxJn6xp3lMBeb9qe8xSFmHU35oZDXD+w== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.19.0" + "@babel/helper-plugin-utils" "^7.19.0" + "@babel/plugin-syntax-typescript" "^7.18.6" + +"@babel/plugin-transform-unicode-escapes@^7.18.10": + version "7.18.10" + resolved "http://localhost:4873/@babel%2fplugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.18.10.tgz#1ecfb0eda83d09bbcb77c09970c2dd55832aa246" + integrity sha512-kKAdAI+YzPgGY/ftStBFXTI1LZFju38rYThnfMykS+IXy8BVx+res7s2fxf1l8I35DV2T97ezo6+SGrXz6B3iQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-unicode-regex@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.18.6.tgz#194317225d8c201bbae103364ffe9e2cea36cdca" + integrity sha512-gE7A6Lt7YLnNOL3Pb9BNeZvi+d8l7tcRrG4+pwJjK9hD2xX4mEvjlQW60G9EEmfXVYRPv9VRQcyegIVHCql/AA== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/preset-env@^7.11.0", "@babel/preset-env@^7.12.1", "@babel/preset-env@^7.16.4": + version "7.19.3" + resolved "http://localhost:4873/@babel%2fpreset-env/-/preset-env-7.19.3.tgz#52cd19abaecb3f176a4ff9cc5e15b7bf06bec754" + integrity sha512-ziye1OTc9dGFOAXSWKUqQblYHNlBOaDl8wzqf2iKXJAltYiR3hKHUKmkt+S9PppW7RQpq4fFCrwwpIDj/f5P4w== + dependencies: + "@babel/compat-data" "^7.19.3" + "@babel/helper-compilation-targets" "^7.19.3" + "@babel/helper-plugin-utils" "^7.19.0" + "@babel/helper-validator-option" "^7.18.6" + "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression" "^7.18.6" + "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining" "^7.18.9" + "@babel/plugin-proposal-async-generator-functions" "^7.19.1" + "@babel/plugin-proposal-class-properties" "^7.18.6" + "@babel/plugin-proposal-class-static-block" "^7.18.6" + "@babel/plugin-proposal-dynamic-import" "^7.18.6" + "@babel/plugin-proposal-export-namespace-from" "^7.18.9" + "@babel/plugin-proposal-json-strings" "^7.18.6" + "@babel/plugin-proposal-logical-assignment-operators" "^7.18.9" + "@babel/plugin-proposal-nullish-coalescing-operator" "^7.18.6" + "@babel/plugin-proposal-numeric-separator" "^7.18.6" + "@babel/plugin-proposal-object-rest-spread" "^7.18.9" + "@babel/plugin-proposal-optional-catch-binding" "^7.18.6" + "@babel/plugin-proposal-optional-chaining" "^7.18.9" + "@babel/plugin-proposal-private-methods" "^7.18.6" + "@babel/plugin-proposal-private-property-in-object" "^7.18.6" + "@babel/plugin-proposal-unicode-property-regex" "^7.18.6" + "@babel/plugin-syntax-async-generators" "^7.8.4" + "@babel/plugin-syntax-class-properties" "^7.12.13" + "@babel/plugin-syntax-class-static-block" "^7.14.5" + "@babel/plugin-syntax-dynamic-import" "^7.8.3" + "@babel/plugin-syntax-export-namespace-from" "^7.8.3" + "@babel/plugin-syntax-import-assertions" "^7.18.6" + "@babel/plugin-syntax-json-strings" "^7.8.3" + "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" + "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" + "@babel/plugin-syntax-numeric-separator" "^7.10.4" + "@babel/plugin-syntax-object-rest-spread" "^7.8.3" + "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" + "@babel/plugin-syntax-optional-chaining" "^7.8.3" + "@babel/plugin-syntax-private-property-in-object" "^7.14.5" + "@babel/plugin-syntax-top-level-await" "^7.14.5" + "@babel/plugin-transform-arrow-functions" "^7.18.6" + "@babel/plugin-transform-async-to-generator" "^7.18.6" + "@babel/plugin-transform-block-scoped-functions" "^7.18.6" + "@babel/plugin-transform-block-scoping" "^7.18.9" + "@babel/plugin-transform-classes" "^7.19.0" + "@babel/plugin-transform-computed-properties" "^7.18.9" + "@babel/plugin-transform-destructuring" "^7.18.13" + "@babel/plugin-transform-dotall-regex" "^7.18.6" + "@babel/plugin-transform-duplicate-keys" "^7.18.9" + "@babel/plugin-transform-exponentiation-operator" "^7.18.6" + "@babel/plugin-transform-for-of" "^7.18.8" + "@babel/plugin-transform-function-name" "^7.18.9" + "@babel/plugin-transform-literals" "^7.18.9" + "@babel/plugin-transform-member-expression-literals" "^7.18.6" + "@babel/plugin-transform-modules-amd" "^7.18.6" + "@babel/plugin-transform-modules-commonjs" "^7.18.6" + "@babel/plugin-transform-modules-systemjs" "^7.19.0" + "@babel/plugin-transform-modules-umd" "^7.18.6" + "@babel/plugin-transform-named-capturing-groups-regex" "^7.19.1" + "@babel/plugin-transform-new-target" "^7.18.6" + "@babel/plugin-transform-object-super" "^7.18.6" + "@babel/plugin-transform-parameters" "^7.18.8" + "@babel/plugin-transform-property-literals" "^7.18.6" + "@babel/plugin-transform-regenerator" "^7.18.6" + "@babel/plugin-transform-reserved-words" "^7.18.6" + "@babel/plugin-transform-shorthand-properties" "^7.18.6" + "@babel/plugin-transform-spread" "^7.19.0" + "@babel/plugin-transform-sticky-regex" "^7.18.6" + "@babel/plugin-transform-template-literals" "^7.18.9" + "@babel/plugin-transform-typeof-symbol" "^7.18.9" + "@babel/plugin-transform-unicode-escapes" "^7.18.10" + "@babel/plugin-transform-unicode-regex" "^7.18.6" + "@babel/preset-modules" "^0.1.5" + "@babel/types" "^7.19.3" + babel-plugin-polyfill-corejs2 "^0.3.3" + babel-plugin-polyfill-corejs3 "^0.6.0" + babel-plugin-polyfill-regenerator "^0.4.1" + core-js-compat "^3.25.1" + semver "^6.3.0" + +"@babel/preset-modules@^0.1.5": + version "0.1.5" + resolved "http://localhost:4873/@babel%2fpreset-modules/-/preset-modules-0.1.5.tgz#ef939d6e7f268827e1841638dc6ff95515e115d9" + integrity sha512-A57th6YRG7oR3cq/yt/Y84MvGgE0eJG2F1JLhKuyG+jFxEgrd/HAMJatiFtmOiZurz+0DkrvbheCLaV5f2JfjA== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + "@babel/plugin-proposal-unicode-property-regex" "^7.4.4" + "@babel/plugin-transform-dotall-regex" "^7.4.4" + "@babel/types" "^7.4.4" + esutils "^2.0.2" + +"@babel/preset-react@^7.12.5", "@babel/preset-react@^7.16.0": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fpreset-react/-/preset-react-7.18.6.tgz#979f76d6277048dc19094c217b507f3ad517dd2d" + integrity sha512-zXr6atUmyYdiWRVLOZahakYmOBHtWc2WGCkP8PYTgZi0iJXDY2CN180TdrIW4OGOAdLc7TifzDIvtx6izaRIzg== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-validator-option" "^7.18.6" + "@babel/plugin-transform-react-display-name" "^7.18.6" + "@babel/plugin-transform-react-jsx" "^7.18.6" + "@babel/plugin-transform-react-jsx-development" "^7.18.6" + "@babel/plugin-transform-react-pure-annotations" "^7.18.6" + +"@babel/preset-typescript@^7.16.0": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fpreset-typescript/-/preset-typescript-7.18.6.tgz#ce64be3e63eddc44240c6358daefac17b3186399" + integrity sha512-s9ik86kXBAnD760aybBucdpnLsAt0jK1xqJn2juOn9lkOvSHV60os5hxoVJsPzMQxvnUJFAlkont2DvvaYEBtQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-validator-option" "^7.18.6" + "@babel/plugin-transform-typescript" "^7.18.6" + +"@babel/runtime-corejs3@^7.10.2": + version "7.19.1" + resolved "http://localhost:4873/@babel%2fruntime-corejs3/-/runtime-corejs3-7.19.1.tgz#f0cbbe7edda7c4109cd253bb1dee99aba4594ad9" + integrity sha512-j2vJGnkopRzH+ykJ8h68wrHnEUmtK//E723jjixiAl/PPf6FhqY/vYRcMVlNydRKQjQsTsYEjpx+DZMIvnGk/g== + dependencies: + core-js-pure "^3.25.1" + regenerator-runtime "^0.13.4" + +"@babel/runtime@^7.10.2", "@babel/runtime@^7.11.2", "@babel/runtime@^7.12.5", "@babel/runtime@^7.16.3", "@babel/runtime@^7.18.9", "@babel/runtime@^7.8.4", "@babel/runtime@^7.9.2": + version "7.19.0" + resolved "http://localhost:4873/@babel%2fruntime/-/runtime-7.19.0.tgz#22b11c037b094d27a8a2504ea4dcff00f50e2259" + integrity sha512-eR8Lo9hnDS7tqkO7NsV+mKvCmv5boaXFSZ70DnfhcgiEne8hv9oCEd36Klw74EtizEqLsy4YnW8UWwpBVolHZA== + dependencies: + regenerator-runtime "^0.13.4" + +"@babel/template@^7.18.10", "@babel/template@^7.3.3": + version "7.18.10" + resolved "http://localhost:4873/@babel%2ftemplate/-/template-7.18.10.tgz#6f9134835970d1dbf0835c0d100c9f38de0c5e71" + integrity sha512-TI+rCtooWHr3QJ27kJxfjutghu44DLnasDMwpDqCXVTal9RLp3RSYNh4NdBrRP2cQAoG9A8juOQl6P6oZG4JxA== + dependencies: + "@babel/code-frame" "^7.18.6" + "@babel/parser" "^7.18.10" + "@babel/types" "^7.18.10" + +"@babel/traverse@^7.19.0", "@babel/traverse@^7.19.1", "@babel/traverse@^7.19.3", "@babel/traverse@^7.7.2": + version "7.19.3" + resolved "http://localhost:4873/@babel%2ftraverse/-/traverse-7.19.3.tgz#3a3c5348d4988ba60884e8494b0592b2f15a04b4" + integrity sha512-qh5yf6149zhq2sgIXmwjnsvmnNQC2iw70UFjp4olxucKrWd/dvlUsBI88VSLUsnMNF7/vnOiA+nk1+yLoCqROQ== + dependencies: + "@babel/code-frame" "^7.18.6" + "@babel/generator" "^7.19.3" + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-function-name" "^7.19.0" + "@babel/helper-hoist-variables" "^7.18.6" + "@babel/helper-split-export-declaration" "^7.18.6" + "@babel/parser" "^7.19.3" + "@babel/types" "^7.19.3" + debug "^4.1.0" + globals "^11.1.0" + +"@babel/types@^7.0.0", "@babel/types@^7.12.6", "@babel/types@^7.18.10", "@babel/types@^7.18.6", "@babel/types@^7.18.9", "@babel/types@^7.19.0", "@babel/types@^7.19.3", "@babel/types@^7.3.0", "@babel/types@^7.3.3", "@babel/types@^7.4.4": + version "7.19.3" + resolved "http://localhost:4873/@babel%2ftypes/-/types-7.19.3.tgz#fc420e6bbe54880bce6779ffaf315f5e43ec9624" + integrity sha512-hGCaQzIY22DJlDh9CH7NOxgKkFjBk0Cw9xDO1Xmh2151ti7wiGfQ3LauXzL4HP1fmFlTX6XjpRETTpUcv7wQLw== + dependencies: + "@babel/helper-string-parser" "^7.18.10" + "@babel/helper-validator-identifier" "^7.19.1" + to-fast-properties "^2.0.0" + +"@bcoe/v8-coverage@^0.2.3": + version "0.2.3" + resolved "http://localhost:4873/@bcoe%2fv8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" + integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== + +"@craco/craco@^7.0.0-alpha.8": + version "7.0.0-alpha.8" + resolved "http://localhost:4873/@craco%2fcraco/-/craco-7.0.0-alpha.8.tgz#40f19f44198ff2341b40654c8c6b4f54c2217972" + integrity sha512-IN3/ldPaktGflPu342cg7n8LYa2c3x9H2XzngUkDzTjro25ig1GyVcUdnG1U0X6wrRTF9K1AxZ5su9jLbdyFUw== + dependencies: + autoprefixer "^10.4.12" + cosmiconfig "^7.0.1" + cosmiconfig-typescript-loader "^4.1.1" + cross-spawn "^7.0.3" + lodash "^4.17.21" + semver "^7.3.7" + webpack-merge "^5.8.0" + +"@csstools/normalize.css@*": + version "12.0.0" + resolved "http://localhost:4873/@csstools%2fnormalize.css/-/normalize.css-12.0.0.tgz#a9583a75c3f150667771f30b60d9f059473e62c4" + integrity sha512-M0qqxAcwCsIVfpFQSlGN5XjXWu8l5JDZN+fPt1LeW5SZexQTgnaEvgXAY+CeygRw0EeppWHi12JxESWiWrB0Sg== + +"@csstools/postcss-cascade-layers@^1.1.0": + version "1.1.1" + resolved "http://localhost:4873/@csstools%2fpostcss-cascade-layers/-/postcss-cascade-layers-1.1.1.tgz#8a997edf97d34071dd2e37ea6022447dd9e795ad" + integrity sha512-+KdYrpKC5TgomQr2DlZF4lDEpHcoxnj5IGddYYfBWJAKfj1JtuHUIqMa+E1pJJ+z3kvDViWMqyqPlG4Ja7amQA== + dependencies: + "@csstools/selector-specificity" "^2.0.2" + postcss-selector-parser "^6.0.10" + +"@csstools/postcss-color-function@^1.1.1": + version "1.1.1" + resolved "http://localhost:4873/@csstools%2fpostcss-color-function/-/postcss-color-function-1.1.1.tgz#2bd36ab34f82d0497cfacdc9b18d34b5e6f64b6b" + integrity sha512-Bc0f62WmHdtRDjf5f3e2STwRAl89N2CLb+9iAwzrv4L2hncrbDwnQD9PCq0gtAt7pOI2leIV08HIBUd4jxD8cw== + dependencies: + "@csstools/postcss-progressive-custom-properties" "^1.1.0" + postcss-value-parser "^4.2.0" + +"@csstools/postcss-font-format-keywords@^1.0.1": + version "1.0.1" + resolved "http://localhost:4873/@csstools%2fpostcss-font-format-keywords/-/postcss-font-format-keywords-1.0.1.tgz#677b34e9e88ae997a67283311657973150e8b16a" + integrity sha512-ZgrlzuUAjXIOc2JueK0X5sZDjCtgimVp/O5CEqTcs5ShWBa6smhWYbS0x5cVc/+rycTDbjjzoP0KTDnUneZGOg== + dependencies: + postcss-value-parser "^4.2.0" + +"@csstools/postcss-hwb-function@^1.0.2": + version "1.0.2" + resolved "http://localhost:4873/@csstools%2fpostcss-hwb-function/-/postcss-hwb-function-1.0.2.tgz#ab54a9fce0ac102c754854769962f2422ae8aa8b" + integrity sha512-YHdEru4o3Rsbjmu6vHy4UKOXZD+Rn2zmkAmLRfPet6+Jz4Ojw8cbWxe1n42VaXQhD3CQUXXTooIy8OkVbUcL+w== + dependencies: + postcss-value-parser "^4.2.0" + +"@csstools/postcss-ic-unit@^1.0.1": + version "1.0.1" + resolved "http://localhost:4873/@csstools%2fpostcss-ic-unit/-/postcss-ic-unit-1.0.1.tgz#28237d812a124d1a16a5acc5c3832b040b303e58" + integrity sha512-Ot1rcwRAaRHNKC9tAqoqNZhjdYBzKk1POgWfhN4uCOE47ebGcLRqXjKkApVDpjifL6u2/55ekkpnFcp+s/OZUw== + dependencies: + "@csstools/postcss-progressive-custom-properties" "^1.1.0" + postcss-value-parser "^4.2.0" + +"@csstools/postcss-is-pseudo-class@^2.0.7": + version "2.0.7" + resolved "http://localhost:4873/@csstools%2fpostcss-is-pseudo-class/-/postcss-is-pseudo-class-2.0.7.tgz#846ae6c0d5a1eaa878fce352c544f9c295509cd1" + integrity sha512-7JPeVVZHd+jxYdULl87lvjgvWldYu+Bc62s9vD/ED6/QTGjy0jy0US/f6BG53sVMTBJ1lzKZFpYmofBN9eaRiA== + dependencies: + "@csstools/selector-specificity" "^2.0.0" + postcss-selector-parser "^6.0.10" + +"@csstools/postcss-nested-calc@^1.0.0": + version "1.0.0" + resolved "http://localhost:4873/@csstools%2fpostcss-nested-calc/-/postcss-nested-calc-1.0.0.tgz#d7e9d1d0d3d15cf5ac891b16028af2a1044d0c26" + integrity sha512-JCsQsw1wjYwv1bJmgjKSoZNvf7R6+wuHDAbi5f/7MbFhl2d/+v+TvBTU4BJH3G1X1H87dHl0mh6TfYogbT/dJQ== + dependencies: + postcss-value-parser "^4.2.0" + +"@csstools/postcss-normalize-display-values@^1.0.1": + version "1.0.1" + resolved "http://localhost:4873/@csstools%2fpostcss-normalize-display-values/-/postcss-normalize-display-values-1.0.1.tgz#15da54a36e867b3ac5163ee12c1d7f82d4d612c3" + integrity sha512-jcOanIbv55OFKQ3sYeFD/T0Ti7AMXc9nM1hZWu8m/2722gOTxFg7xYu4RDLJLeZmPUVQlGzo4jhzvTUq3x4ZUw== + dependencies: + postcss-value-parser "^4.2.0" + +"@csstools/postcss-oklab-function@^1.1.1": + version "1.1.1" + resolved "http://localhost:4873/@csstools%2fpostcss-oklab-function/-/postcss-oklab-function-1.1.1.tgz#88cee0fbc8d6df27079ebd2fa016ee261eecf844" + integrity sha512-nJpJgsdA3dA9y5pgyb/UfEzE7W5Ka7u0CX0/HIMVBNWzWemdcTH3XwANECU6anWv/ao4vVNLTMxhiPNZsTK6iA== + dependencies: + "@csstools/postcss-progressive-custom-properties" "^1.1.0" + postcss-value-parser "^4.2.0" + +"@csstools/postcss-progressive-custom-properties@^1.1.0", "@csstools/postcss-progressive-custom-properties@^1.3.0": + version "1.3.0" + resolved "http://localhost:4873/@csstools%2fpostcss-progressive-custom-properties/-/postcss-progressive-custom-properties-1.3.0.tgz#542292558384361776b45c85226b9a3a34f276fa" + integrity sha512-ASA9W1aIy5ygskZYuWams4BzafD12ULvSypmaLJT2jvQ8G0M3I8PRQhC0h7mG0Z3LI05+agZjqSR9+K9yaQQjA== + dependencies: + postcss-value-parser "^4.2.0" + +"@csstools/postcss-stepped-value-functions@^1.0.1": + version "1.0.1" + resolved "http://localhost:4873/@csstools%2fpostcss-stepped-value-functions/-/postcss-stepped-value-functions-1.0.1.tgz#f8772c3681cc2befed695e2b0b1d68e22f08c4f4" + integrity sha512-dz0LNoo3ijpTOQqEJLY8nyaapl6umbmDcgj4AD0lgVQ572b2eqA1iGZYTTWhrcrHztWDDRAX2DGYyw2VBjvCvQ== + dependencies: + postcss-value-parser "^4.2.0" + +"@csstools/postcss-text-decoration-shorthand@^1.0.0": + version "1.0.0" + resolved "http://localhost:4873/@csstools%2fpostcss-text-decoration-shorthand/-/postcss-text-decoration-shorthand-1.0.0.tgz#ea96cfbc87d921eca914d3ad29340d9bcc4c953f" + integrity sha512-c1XwKJ2eMIWrzQenN0XbcfzckOLLJiczqy+YvfGmzoVXd7pT9FfObiSEfzs84bpE/VqfpEuAZ9tCRbZkZxxbdw== + dependencies: + postcss-value-parser "^4.2.0" + +"@csstools/postcss-trigonometric-functions@^1.0.2": + version "1.0.2" + resolved "http://localhost:4873/@csstools%2fpostcss-trigonometric-functions/-/postcss-trigonometric-functions-1.0.2.tgz#94d3e4774c36d35dcdc88ce091336cb770d32756" + integrity sha512-woKaLO///4bb+zZC2s80l+7cm07M7268MsyG3M0ActXXEFi6SuhvriQYcb58iiKGbjwwIU7n45iRLEHypB47Og== + dependencies: + postcss-value-parser "^4.2.0" + +"@csstools/postcss-unset-value@^1.0.2": + version "1.0.2" + resolved "http://localhost:4873/@csstools%2fpostcss-unset-value/-/postcss-unset-value-1.0.2.tgz#c99bb70e2cdc7312948d1eb41df2412330b81f77" + integrity sha512-c8J4roPBILnelAsdLr4XOAR/GsTm0GJi4XpcfvoWk3U6KiTCqiFYc63KhRMQQX35jYMp4Ao8Ij9+IZRgMfJp1g== + +"@csstools/selector-specificity@^2.0.0", "@csstools/selector-specificity@^2.0.2": + version "2.0.2" + resolved "http://localhost:4873/@csstools%2fselector-specificity/-/selector-specificity-2.0.2.tgz#1bfafe4b7ed0f3e4105837e056e0a89b108ebe36" + integrity sha512-IkpVW/ehM1hWKln4fCA3NzJU8KwD+kIOvPZA4cqxoJHtE21CCzjyp+Kxbu0i5I4tBNOlXPL9mjwnWlL0VEG4Fg== + +"@eslint/eslintrc@^1.3.2": + version "1.3.2" + resolved "http://localhost:4873/@eslint%2feslintrc/-/eslintrc-1.3.2.tgz#58b69582f3b7271d8fa67fe5251767a5b38ea356" + integrity sha512-AXYd23w1S/bv3fTs3Lz0vjiYemS08jWkI3hYyS9I1ry+0f+Yjs1wm+sU0BS8qDOPrBIkp4qHYC16I8uVtpLajQ== + dependencies: + ajv "^6.12.4" + debug "^4.3.2" + espree "^9.4.0" + globals "^13.15.0" + ignore "^5.2.0" + import-fresh "^3.2.1" + js-yaml "^4.1.0" + minimatch "^3.1.2" + strip-json-comments "^3.1.1" + +"@humanwhocodes/config-array@^0.10.5": + version "0.10.7" + resolved "http://localhost:4873/@humanwhocodes%2fconfig-array/-/config-array-0.10.7.tgz#6d53769fd0c222767e6452e8ebda825c22e9f0dc" + integrity sha512-MDl6D6sBsaV452/QSdX+4CXIjZhIcI0PELsxUjk4U828yd58vk3bTIvk/6w5FY+4hIy9sLW0sfrV7K7Kc++j/w== + dependencies: + "@humanwhocodes/object-schema" "^1.2.1" + debug "^4.1.1" + minimatch "^3.0.4" + +"@humanwhocodes/gitignore-to-minimatch@^1.0.2": + version "1.0.2" + resolved "http://localhost:4873/@humanwhocodes%2fgitignore-to-minimatch/-/gitignore-to-minimatch-1.0.2.tgz#316b0a63b91c10e53f242efb4ace5c3b34e8728d" + integrity sha512-rSqmMJDdLFUsyxR6FMtD00nfQKKLFb1kv+qBbOVKqErvloEIJLo5bDTJTQNTYgeyp78JsA7u/NPi5jT1GR/MuA== + +"@humanwhocodes/module-importer@^1.0.1": + version "1.0.1" + resolved "http://localhost:4873/@humanwhocodes%2fmodule-importer/-/module-importer-1.0.1.tgz#af5b2691a22b44be847b0ca81641c5fb6ad0172c" + integrity sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA== + +"@humanwhocodes/object-schema@^1.2.1": + version "1.2.1" + resolved "http://localhost:4873/@humanwhocodes%2fobject-schema/-/object-schema-1.2.1.tgz#b520529ec21d8e5945a1851dfd1c32e94e39ff45" + integrity sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA== + +"@istanbuljs/load-nyc-config@^1.0.0": + version "1.1.0" + resolved "http://localhost:4873/@istanbuljs%2fload-nyc-config/-/load-nyc-config-1.1.0.tgz#fd3db1d59ecf7cf121e80650bb86712f9b55eced" + integrity sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ== + dependencies: + camelcase "^5.3.1" + find-up "^4.1.0" + get-package-type "^0.1.0" + js-yaml "^3.13.1" + resolve-from "^5.0.0" + +"@istanbuljs/schema@^0.1.2": + version "0.1.3" + resolved "http://localhost:4873/@istanbuljs%2fschema/-/schema-0.1.3.tgz#e45e384e4b8ec16bce2fd903af78450f6bf7ec98" + integrity sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA== + +"@jest/console@^27.5.1": + version "27.5.1" + resolved "http://localhost:4873/@jest%2fconsole/-/console-27.5.1.tgz#260fe7239602fe5130a94f1aa386eff54b014bba" + integrity sha512-kZ/tNpS3NXn0mlXXXPNuDZnb4c0oZ20r4K5eemM2k30ZC3G0T02nXUvyhf5YdbXWHPEJLc9qGLxEZ216MdL+Zg== + dependencies: + "@jest/types" "^27.5.1" + "@types/node" "*" + chalk "^4.0.0" + jest-message-util "^27.5.1" + jest-util "^27.5.1" + slash "^3.0.0" + +"@jest/console@^28.1.3": + version "28.1.3" + resolved "http://localhost:4873/@jest%2fconsole/-/console-28.1.3.tgz#2030606ec03a18c31803b8a36382762e447655df" + integrity sha512-QPAkP5EwKdK/bxIr6C1I4Vs0rm2nHiANzj/Z5X2JQkrZo6IqvC4ldZ9K95tF0HdidhA8Bo6egxSzUFPYKcEXLw== + dependencies: + "@jest/types" "^28.1.3" + "@types/node" "*" + chalk "^4.0.0" + jest-message-util "^28.1.3" + jest-util "^28.1.3" + slash "^3.0.0" + +"@jest/core@^27.5.1": + version "27.5.1" + resolved "http://localhost:4873/@jest%2fcore/-/core-27.5.1.tgz#267ac5f704e09dc52de2922cbf3af9edcd64b626" + integrity sha512-AK6/UTrvQD0Cd24NSqmIA6rKsu0tKIxfiCducZvqxYdmMisOYAsdItspT+fQDQYARPf8XgjAFZi0ogW2agH5nQ== + dependencies: + "@jest/console" "^27.5.1" + "@jest/reporters" "^27.5.1" + "@jest/test-result" "^27.5.1" + "@jest/transform" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/node" "*" + ansi-escapes "^4.2.1" + chalk "^4.0.0" + emittery "^0.8.1" + exit "^0.1.2" + graceful-fs "^4.2.9" + jest-changed-files "^27.5.1" + jest-config "^27.5.1" + jest-haste-map "^27.5.1" + jest-message-util "^27.5.1" + jest-regex-util "^27.5.1" + jest-resolve "^27.5.1" + jest-resolve-dependencies "^27.5.1" + jest-runner "^27.5.1" + jest-runtime "^27.5.1" + jest-snapshot "^27.5.1" + jest-util "^27.5.1" + jest-validate "^27.5.1" + jest-watcher "^27.5.1" + micromatch "^4.0.4" + rimraf "^3.0.0" + slash "^3.0.0" + strip-ansi "^6.0.0" + +"@jest/environment@^27.5.1": + version "27.5.1" + resolved "http://localhost:4873/@jest%2fenvironment/-/environment-27.5.1.tgz#d7425820511fe7158abbecc010140c3fd3be9c74" + integrity sha512-/WQjhPJe3/ghaol/4Bq480JKXV/Rfw8nQdN7f41fM8VDHLcxKXou6QyXAh3EFr9/bVG3x74z1NWDkP87EiY8gA== + dependencies: + "@jest/fake-timers" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/node" "*" + jest-mock "^27.5.1" + +"@jest/expect-utils@^29.1.2": + version "29.1.2" + resolved "http://localhost:4873/@jest%2fexpect-utils/-/expect-utils-29.1.2.tgz#66dbb514d38f7d21456bc774419c9ae5cca3f88d" + integrity sha512-4a48bhKfGj/KAH39u0ppzNTABXQ8QPccWAFUFobWBaEMSMp+sB31Z2fK/l47c4a/Mu1po2ffmfAIPxXbVTXdtg== + dependencies: + jest-get-type "^29.0.0" + +"@jest/fake-timers@^27.5.1": + version "27.5.1" + resolved "http://localhost:4873/@jest%2ffake-timers/-/fake-timers-27.5.1.tgz#76979745ce0579c8a94a4678af7a748eda8ada74" + integrity sha512-/aPowoolwa07k7/oM3aASneNeBGCmGQsc3ugN4u6s4C/+s5M64MFo/+djTdiwcbQlRfFElGuDXWzaWj6QgKObQ== + dependencies: + "@jest/types" "^27.5.1" + "@sinonjs/fake-timers" "^8.0.1" + "@types/node" "*" + jest-message-util "^27.5.1" + jest-mock "^27.5.1" + jest-util "^27.5.1" + +"@jest/globals@^27.5.1": + version "27.5.1" + resolved "http://localhost:4873/@jest%2fglobals/-/globals-27.5.1.tgz#7ac06ce57ab966566c7963431cef458434601b2b" + integrity sha512-ZEJNB41OBQQgGzgyInAv0UUfDDj3upmHydjieSxFvTRuZElrx7tXg/uVQ5hYVEwiXs3+aMsAeEc9X7xiSKCm4Q== + dependencies: + "@jest/environment" "^27.5.1" + "@jest/types" "^27.5.1" + expect "^27.5.1" + +"@jest/reporters@^27.5.1": + version "27.5.1" + resolved "http://localhost:4873/@jest%2freporters/-/reporters-27.5.1.tgz#ceda7be96170b03c923c37987b64015812ffec04" + integrity sha512-cPXh9hWIlVJMQkVk84aIvXuBB4uQQmFqZiacloFuGiP3ah1sbCxCosidXFDfqG8+6fO1oR2dTJTlsOy4VFmUfw== + dependencies: + "@bcoe/v8-coverage" "^0.2.3" + "@jest/console" "^27.5.1" + "@jest/test-result" "^27.5.1" + "@jest/transform" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/node" "*" + chalk "^4.0.0" + collect-v8-coverage "^1.0.0" + exit "^0.1.2" + glob "^7.1.2" + graceful-fs "^4.2.9" + istanbul-lib-coverage "^3.0.0" + istanbul-lib-instrument "^5.1.0" + istanbul-lib-report "^3.0.0" + istanbul-lib-source-maps "^4.0.0" + istanbul-reports "^3.1.3" + jest-haste-map "^27.5.1" + jest-resolve "^27.5.1" + jest-util "^27.5.1" + jest-worker "^27.5.1" + slash "^3.0.0" + source-map "^0.6.0" + string-length "^4.0.1" + terminal-link "^2.0.0" + v8-to-istanbul "^8.1.0" + +"@jest/schemas@^28.1.3": + version "28.1.3" + resolved "http://localhost:4873/@jest%2fschemas/-/schemas-28.1.3.tgz#ad8b86a66f11f33619e3d7e1dcddd7f2d40ff905" + integrity sha512-/l/VWsdt/aBXgjshLWOFyFt3IVdYypu5y2Wn2rOO1un6nkqIn8SLXzgIMYXFyYsRWDyF5EthmKJMIdJvk08grg== + dependencies: + "@sinclair/typebox" "^0.24.1" + +"@jest/schemas@^29.0.0": + version "29.0.0" + resolved "http://localhost:4873/@jest%2fschemas/-/schemas-29.0.0.tgz#5f47f5994dd4ef067fb7b4188ceac45f77fe952a" + integrity sha512-3Ab5HgYIIAnS0HjqJHQYZS+zXc4tUmTmBH3z83ajI6afXp8X3ZtdLX+nXx+I7LNkJD7uN9LAVhgnjDgZa2z0kA== + dependencies: + "@sinclair/typebox" "^0.24.1" + +"@jest/source-map@^27.5.1": + version "27.5.1" + resolved "http://localhost:4873/@jest%2fsource-map/-/source-map-27.5.1.tgz#6608391e465add4205eae073b55e7f279e04e8cf" + integrity sha512-y9NIHUYF3PJRlHk98NdC/N1gl88BL08aQQgu4k4ZopQkCw9t9cV8mtl3TV8b/YCB8XaVTFrmUTAJvjsntDireg== + dependencies: + callsites "^3.0.0" + graceful-fs "^4.2.9" + source-map "^0.6.0" + +"@jest/test-result@^27.5.1": + version "27.5.1" + resolved "http://localhost:4873/@jest%2ftest-result/-/test-result-27.5.1.tgz#56a6585fa80f7cdab72b8c5fc2e871d03832f5bb" + integrity sha512-EW35l2RYFUcUQxFJz5Cv5MTOxlJIQs4I7gxzi2zVU7PJhOwfYq1MdC5nhSmYjX1gmMmLPvB3sIaC+BkcHRBfag== + dependencies: + "@jest/console" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/istanbul-lib-coverage" "^2.0.0" + collect-v8-coverage "^1.0.0" + +"@jest/test-result@^28.1.3": + version "28.1.3" + resolved "http://localhost:4873/@jest%2ftest-result/-/test-result-28.1.3.tgz#5eae945fd9f4b8fcfce74d239e6f725b6bf076c5" + integrity sha512-kZAkxnSE+FqE8YjW8gNuoVkkC9I7S1qmenl8sGcDOLropASP+BkcGKwhXoyqQuGOGeYY0y/ixjrd/iERpEXHNg== + dependencies: + "@jest/console" "^28.1.3" + "@jest/types" "^28.1.3" + "@types/istanbul-lib-coverage" "^2.0.0" + collect-v8-coverage "^1.0.0" + +"@jest/test-sequencer@^27.5.1": + version "27.5.1" + resolved "http://localhost:4873/@jest%2ftest-sequencer/-/test-sequencer-27.5.1.tgz#4057e0e9cea4439e544c6353c6affe58d095745b" + integrity sha512-LCheJF7WB2+9JuCS7VB/EmGIdQuhtqjRNI9A43idHv3E4KltCTsPsLxvdaubFHSYwY/fNjMWjl6vNRhDiN7vpQ== + dependencies: + "@jest/test-result" "^27.5.1" + graceful-fs "^4.2.9" + jest-haste-map "^27.5.1" + jest-runtime "^27.5.1" + +"@jest/transform@^27.5.1": + version "27.5.1" + resolved "http://localhost:4873/@jest%2ftransform/-/transform-27.5.1.tgz#6c3501dcc00c4c08915f292a600ece5ecfe1f409" + integrity sha512-ipON6WtYgl/1329g5AIJVbUuEh0wZVbdpGwC99Jw4LwuoBNS95MVphU6zOeD9pDkon+LLbFL7lOQRapbB8SCHw== + dependencies: + "@babel/core" "^7.1.0" + "@jest/types" "^27.5.1" + babel-plugin-istanbul "^6.1.1" + chalk "^4.0.0" + convert-source-map "^1.4.0" + fast-json-stable-stringify "^2.0.0" + graceful-fs "^4.2.9" + jest-haste-map "^27.5.1" + jest-regex-util "^27.5.1" + jest-util "^27.5.1" + micromatch "^4.0.4" + pirates "^4.0.4" + slash "^3.0.0" + source-map "^0.6.1" + write-file-atomic "^3.0.0" + +"@jest/types@^27.5.1": + version "27.5.1" + resolved "http://localhost:4873/@jest%2ftypes/-/types-27.5.1.tgz#3c79ec4a8ba61c170bf937bcf9e98a9df175ec80" + integrity sha512-Cx46iJ9QpwQTjIdq5VJu2QTMMs3QlEjI0x1QbBP5W1+nMzyc2XmimiRR/CbX9TO0cPTeUlxWMOu8mslYsJ8DEw== + dependencies: + "@types/istanbul-lib-coverage" "^2.0.0" + "@types/istanbul-reports" "^3.0.0" + "@types/node" "*" + "@types/yargs" "^16.0.0" + chalk "^4.0.0" + +"@jest/types@^28.1.3": + version "28.1.3" + resolved "http://localhost:4873/@jest%2ftypes/-/types-28.1.3.tgz#b05de80996ff12512bc5ceb1d208285a7d11748b" + integrity sha512-RyjiyMUZrKz/c+zlMFO1pm70DcIlST8AeWTkoUdZevew44wcNZQHsEVOiCVtgVnlFFD82FPaXycys58cf2muVQ== + dependencies: + "@jest/schemas" "^28.1.3" + "@types/istanbul-lib-coverage" "^2.0.0" + "@types/istanbul-reports" "^3.0.0" + "@types/node" "*" + "@types/yargs" "^17.0.8" + chalk "^4.0.0" + +"@jest/types@^29.1.2": + version "29.1.2" + resolved "http://localhost:4873/@jest%2ftypes/-/types-29.1.2.tgz#7442d32b16bcd7592d9614173078b8c334ec730a" + integrity sha512-DcXGtoTykQB5jiwCmVr8H4vdg2OJhQex3qPkG+ISyDO7xQXbt/4R6dowcRyPemRnkH7JoHvZuxPBdlq+9JxFCg== + dependencies: + "@jest/schemas" "^29.0.0" + "@types/istanbul-lib-coverage" "^2.0.0" + "@types/istanbul-reports" "^3.0.0" + "@types/node" "*" + "@types/yargs" "^17.0.8" + chalk "^4.0.0" + +"@jridgewell/gen-mapping@^0.1.0": + version "0.1.1" + resolved "http://localhost:4873/@jridgewell%2fgen-mapping/-/gen-mapping-0.1.1.tgz#e5d2e450306a9491e3bd77e323e38d7aff315996" + integrity sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w== + dependencies: + "@jridgewell/set-array" "^1.0.0" + "@jridgewell/sourcemap-codec" "^1.4.10" + +"@jridgewell/gen-mapping@^0.3.0", "@jridgewell/gen-mapping@^0.3.2": + version "0.3.2" + resolved "http://localhost:4873/@jridgewell%2fgen-mapping/-/gen-mapping-0.3.2.tgz#c1aedc61e853f2bb9f5dfe6d4442d3b565b253b9" + integrity sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A== + dependencies: + "@jridgewell/set-array" "^1.0.1" + "@jridgewell/sourcemap-codec" "^1.4.10" + "@jridgewell/trace-mapping" "^0.3.9" + +"@jridgewell/resolve-uri@^3.0.3": + version "3.1.0" + resolved "http://localhost:4873/@jridgewell%2fresolve-uri/-/resolve-uri-3.1.0.tgz#2203b118c157721addfe69d47b70465463066d78" + integrity sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w== + +"@jridgewell/set-array@^1.0.0", "@jridgewell/set-array@^1.0.1": + version "1.1.2" + resolved "http://localhost:4873/@jridgewell%2fset-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72" + integrity sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw== + +"@jridgewell/source-map@^0.3.2": + version "0.3.2" + resolved "http://localhost:4873/@jridgewell%2fsource-map/-/source-map-0.3.2.tgz#f45351aaed4527a298512ec72f81040c998580fb" + integrity sha512-m7O9o2uR8k2ObDysZYzdfhb08VuEml5oWGiosa1VdaPZ/A6QyPkAJuwN0Q1lhULOf6B7MtQmHENS743hWtCrgw== + dependencies: + "@jridgewell/gen-mapping" "^0.3.0" + "@jridgewell/trace-mapping" "^0.3.9" + +"@jridgewell/sourcemap-codec@^1.4.10": + version "1.4.14" + resolved "http://localhost:4873/@jridgewell%2fsourcemap-codec/-/sourcemap-codec-1.4.14.tgz#add4c98d341472a289190b424efbdb096991bb24" + integrity sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw== + +"@jridgewell/trace-mapping@^0.3.14", "@jridgewell/trace-mapping@^0.3.9": + version "0.3.15" + resolved "http://localhost:4873/@jridgewell%2ftrace-mapping/-/trace-mapping-0.3.15.tgz#aba35c48a38d3fd84b37e66c9c0423f9744f9774" + integrity sha512-oWZNOULl+UbhsgB51uuZzglikfIKSUBO/M9W2OfEjn7cmqoAiCgmv9lyACTUacZwBz0ITnJ2NqjU8Tx0DHL88g== + dependencies: + "@jridgewell/resolve-uri" "^3.0.3" + "@jridgewell/sourcemap-codec" "^1.4.10" + +"@leichtgewicht/ip-codec@^2.0.1": + version "2.0.4" + resolved "http://localhost:4873/@leichtgewicht%2fip-codec/-/ip-codec-2.0.4.tgz#b2ac626d6cb9c8718ab459166d4bb405b8ffa78b" + integrity sha512-Hcv+nVC0kZnQ3tD9GVu5xSMR4VVYOteQIr/hwFPVEvPdlXqgGEuRjiheChHgdM+JyqdgNcmzZOX/tnl0JOiI7A== + +"@nicolo-ribaudo/eslint-scope-5-internals@5.1.1-v1": + version "5.1.1-v1" + resolved "http://localhost:4873/@nicolo-ribaudo%2feslint-scope-5-internals/-/eslint-scope-5-internals-5.1.1-v1.tgz#dbf733a965ca47b1973177dc0bb6c889edcfb129" + integrity sha512-54/JRvkLIzzDWshCWfuhadfrfZVPiElY8Fcgmg1HroEly/EDSszzhBAsarCux+D/kOslTRquNzuyGSmUSTTHGg== + dependencies: + eslint-scope "5.1.1" + +"@nodelib/fs.scandir@2.1.5": + version "2.1.5" + resolved "http://localhost:4873/@nodelib%2ffs.scandir/-/fs.scandir-2.1.5.tgz#7619c2eb21b25483f6d167548b4cfd5a7488c3d5" + integrity sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g== + dependencies: + "@nodelib/fs.stat" "2.0.5" + run-parallel "^1.1.9" + +"@nodelib/fs.stat@2.0.5", "@nodelib/fs.stat@^2.0.2": + version "2.0.5" + resolved "http://localhost:4873/@nodelib%2ffs.stat/-/fs.stat-2.0.5.tgz#5bd262af94e9d25bd1e71b05deed44876a222e8b" + integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A== + +"@nodelib/fs.walk@^1.2.3": + version "1.2.8" + resolved "http://localhost:4873/@nodelib%2ffs.walk/-/fs.walk-1.2.8.tgz#e95737e8bb6746ddedf69c556953494f196fe69a" + integrity sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg== + dependencies: + "@nodelib/fs.scandir" "2.1.5" + fastq "^1.6.0" + +"@pmmmwh/react-refresh-webpack-plugin@^0.5.3": + version "0.5.7" + resolved "http://localhost:4873/@pmmmwh%2freact-refresh-webpack-plugin/-/react-refresh-webpack-plugin-0.5.7.tgz#58f8217ba70069cc6a73f5d7e05e85b458c150e2" + integrity sha512-bcKCAzF0DV2IIROp9ZHkRJa6O4jy7NlnHdWL3GmcUxYWNjLXkK5kfELELwEfSP5hXPfVL/qOGMAROuMQb9GG8Q== + dependencies: + ansi-html-community "^0.0.8" + common-path-prefix "^3.0.0" + core-js-pure "^3.8.1" + error-stack-parser "^2.0.6" + find-up "^5.0.0" + html-entities "^2.1.0" + loader-utils "^2.0.0" + schema-utils "^3.0.0" + source-map "^0.7.3" + +"@rollup/plugin-babel@^5.2.0": + version "5.3.1" + resolved "http://localhost:4873/@rollup%2fplugin-babel/-/plugin-babel-5.3.1.tgz#04bc0608f4aa4b2e4b1aebf284344d0f68fda283" + integrity sha512-WFfdLWU/xVWKeRQnKmIAQULUI7Il0gZnBIH/ZFO069wYIfPu+8zrfp/KMW0atmELoRDq8FbiP3VCss9MhCut7Q== + dependencies: + "@babel/helper-module-imports" "^7.10.4" + "@rollup/pluginutils" "^3.1.0" + +"@rollup/plugin-node-resolve@^11.2.1": + version "11.2.1" + resolved "http://localhost:4873/@rollup%2fplugin-node-resolve/-/plugin-node-resolve-11.2.1.tgz#82aa59397a29cd4e13248b106e6a4a1880362a60" + integrity sha512-yc2n43jcqVyGE2sqV5/YCmocy9ArjVAP/BeXyTtADTBBX6V0e5UMqwO8CdQ0kzjb6zu5P1qMzsScCMRvE9OlVg== + dependencies: + "@rollup/pluginutils" "^3.1.0" + "@types/resolve" "1.17.1" + builtin-modules "^3.1.0" + deepmerge "^4.2.2" + is-module "^1.0.0" + resolve "^1.19.0" + +"@rollup/plugin-replace@^2.4.1": + version "2.4.2" + resolved "http://localhost:4873/@rollup%2fplugin-replace/-/plugin-replace-2.4.2.tgz#a2d539314fbc77c244858faa523012825068510a" + integrity sha512-IGcu+cydlUMZ5En85jxHH4qj2hta/11BHq95iHEyb2sbgiN0eCdzvUcHw5gt9pBL5lTi4JDYJ1acCoMGpTvEZg== + dependencies: + "@rollup/pluginutils" "^3.1.0" + magic-string "^0.25.7" + +"@rollup/pluginutils@^3.1.0": + version "3.1.0" + resolved "http://localhost:4873/@rollup%2fpluginutils/-/pluginutils-3.1.0.tgz#706b4524ee6dc8b103b3c995533e5ad680c02b9b" + integrity sha512-GksZ6pr6TpIjHm8h9lSQ8pi8BE9VeubNT0OMJ3B5uZJ8pz73NPiqOtCog/x2/QzM1ENChPKxMDhiQuRHsqc+lg== + dependencies: + "@types/estree" "0.0.39" + estree-walker "^1.0.1" + picomatch "^2.2.2" + +"@rushstack/eslint-patch@^1.1.0": + version "1.2.0" + resolved "http://localhost:4873/@rushstack%2feslint-patch/-/eslint-patch-1.2.0.tgz#8be36a1f66f3265389e90b5f9c9962146758f728" + integrity sha512-sXo/qW2/pAcmT43VoRKOJbDOfV3cYpq3szSVfIThQXNt+E4DfKj361vaAt3c88U5tPUxzEswam7GW48PJqtKAg== + +"@sinclair/typebox@^0.24.1": + version "0.24.44" + resolved "http://localhost:4873/@sinclair%2ftypebox/-/typebox-0.24.44.tgz#0a0aa3bf4a155a678418527342a3ee84bd8caa5c" + integrity sha512-ka0W0KN5i6LfrSocduwliMMpqVgohtPFidKdMEOUjoOFCHcOOYkKsPRxfs5f15oPNHTm6ERAm0GV/+/LTKeiWg== + +"@sinonjs/commons@^1.7.0": + version "1.8.3" + resolved "http://localhost:4873/@sinonjs%2fcommons/-/commons-1.8.3.tgz#3802ddd21a50a949b6721ddd72da36e67e7f1b2d" + integrity sha512-xkNcLAn/wZaX14RPlwizcKicDk9G3F8m2nU3L7Ukm5zBgTwiT0wsoFAHx9Jq56fJA1z/7uKGtCRu16sOUCLIHQ== + dependencies: + type-detect "4.0.8" + +"@sinonjs/fake-timers@^8.0.1": + version "8.1.0" + resolved "http://localhost:4873/@sinonjs%2ffake-timers/-/fake-timers-8.1.0.tgz#3fdc2b6cb58935b21bfb8d1625eb1300484316e7" + integrity sha512-OAPJUAtgeINhh/TAlUID4QTs53Njm7xzddaVlEs/SXwgtiD1tW22zAB/W1wdqfrpmikgaWQ9Fw6Ws+hsiRm5Vg== + dependencies: + "@sinonjs/commons" "^1.7.0" + +"@surma/rollup-plugin-off-main-thread@^2.2.3": + version "2.2.3" + resolved "http://localhost:4873/@surma%2frollup-plugin-off-main-thread/-/rollup-plugin-off-main-thread-2.2.3.tgz#ee34985952ca21558ab0d952f00298ad2190c053" + integrity sha512-lR8q/9W7hZpMWweNiAKU7NQerBnzQQLvi8qnTDU/fxItPhtZVMbPV3lbCwjhIlNBe9Bbr5V+KHshvWmVSG9cxQ== + dependencies: + ejs "^3.1.6" + json5 "^2.2.0" + magic-string "^0.25.0" + string.prototype.matchall "^4.0.6" + +"@svgr/babel-plugin-add-jsx-attribute@^5.4.0": + version "5.4.0" + resolved "http://localhost:4873/@svgr%2fbabel-plugin-add-jsx-attribute/-/babel-plugin-add-jsx-attribute-5.4.0.tgz#81ef61947bb268eb9d50523446f9c638fb355906" + integrity sha512-ZFf2gs/8/6B8PnSofI0inYXr2SDNTDScPXhN7k5EqD4aZ3gi6u+rbmZHVB8IM3wDyx8ntKACZbtXSm7oZGRqVg== + +"@svgr/babel-plugin-remove-jsx-attribute@^5.4.0": + version "5.4.0" + resolved "http://localhost:4873/@svgr%2fbabel-plugin-remove-jsx-attribute/-/babel-plugin-remove-jsx-attribute-5.4.0.tgz#6b2c770c95c874654fd5e1d5ef475b78a0a962ef" + integrity sha512-yaS4o2PgUtwLFGTKbsiAy6D0o3ugcUhWK0Z45umJ66EPWunAz9fuFw2gJuje6wqQvQWOTJvIahUwndOXb7QCPg== + +"@svgr/babel-plugin-remove-jsx-empty-expression@^5.0.1": + version "5.0.1" + resolved "http://localhost:4873/@svgr%2fbabel-plugin-remove-jsx-empty-expression/-/babel-plugin-remove-jsx-empty-expression-5.0.1.tgz#25621a8915ed7ad70da6cea3d0a6dbc2ea933efd" + integrity sha512-LA72+88A11ND/yFIMzyuLRSMJ+tRKeYKeQ+mR3DcAZ5I4h5CPWN9AHyUzJbWSYp/u2u0xhmgOe0+E41+GjEueA== + +"@svgr/babel-plugin-replace-jsx-attribute-value@^5.0.1": + version "5.0.1" + resolved "http://localhost:4873/@svgr%2fbabel-plugin-replace-jsx-attribute-value/-/babel-plugin-replace-jsx-attribute-value-5.0.1.tgz#0b221fc57f9fcd10e91fe219e2cd0dd03145a897" + integrity sha512-PoiE6ZD2Eiy5mK+fjHqwGOS+IXX0wq/YDtNyIgOrc6ejFnxN4b13pRpiIPbtPwHEc+NT2KCjteAcq33/F1Y9KQ== + +"@svgr/babel-plugin-svg-dynamic-title@^5.4.0": + version "5.4.0" + resolved "http://localhost:4873/@svgr%2fbabel-plugin-svg-dynamic-title/-/babel-plugin-svg-dynamic-title-5.4.0.tgz#139b546dd0c3186b6e5db4fefc26cb0baea729d7" + integrity sha512-zSOZH8PdZOpuG1ZVx/cLVePB2ibo3WPpqo7gFIjLV9a0QsuQAzJiwwqmuEdTaW2pegyBE17Uu15mOgOcgabQZg== + +"@svgr/babel-plugin-svg-em-dimensions@^5.4.0": + version "5.4.0" + resolved "http://localhost:4873/@svgr%2fbabel-plugin-svg-em-dimensions/-/babel-plugin-svg-em-dimensions-5.4.0.tgz#6543f69526632a133ce5cabab965deeaea2234a0" + integrity sha512-cPzDbDA5oT/sPXDCUYoVXEmm3VIoAWAPT6mSPTJNbQaBNUuEKVKyGH93oDY4e42PYHRW67N5alJx/eEol20abw== + +"@svgr/babel-plugin-transform-react-native-svg@^5.4.0": + version "5.4.0" + resolved "http://localhost:4873/@svgr%2fbabel-plugin-transform-react-native-svg/-/babel-plugin-transform-react-native-svg-5.4.0.tgz#00bf9a7a73f1cad3948cdab1f8dfb774750f8c80" + integrity sha512-3eYP/SaopZ41GHwXma7Rmxcv9uRslRDTY1estspeB1w1ueZWd/tPlMfEOoccYpEMZU3jD4OU7YitnXcF5hLW2Q== + +"@svgr/babel-plugin-transform-svg-component@^5.5.0": + version "5.5.0" + resolved "http://localhost:4873/@svgr%2fbabel-plugin-transform-svg-component/-/babel-plugin-transform-svg-component-5.5.0.tgz#583a5e2a193e214da2f3afeb0b9e8d3250126b4a" + integrity sha512-q4jSH1UUvbrsOtlo/tKcgSeiCHRSBdXoIoqX1pgcKK/aU3JD27wmMKwGtpB8qRYUYoyXvfGxUVKchLuR5pB3rQ== + +"@svgr/babel-preset@^5.5.0": + version "5.5.0" + resolved "http://localhost:4873/@svgr%2fbabel-preset/-/babel-preset-5.5.0.tgz#8af54f3e0a8add7b1e2b0fcd5a882c55393df327" + integrity sha512-4FiXBjvQ+z2j7yASeGPEi8VD/5rrGQk4Xrq3EdJmoZgz/tpqChpo5hgXDvmEauwtvOc52q8ghhZK4Oy7qph4ig== + dependencies: + "@svgr/babel-plugin-add-jsx-attribute" "^5.4.0" + "@svgr/babel-plugin-remove-jsx-attribute" "^5.4.0" + "@svgr/babel-plugin-remove-jsx-empty-expression" "^5.0.1" + "@svgr/babel-plugin-replace-jsx-attribute-value" "^5.0.1" + "@svgr/babel-plugin-svg-dynamic-title" "^5.4.0" + "@svgr/babel-plugin-svg-em-dimensions" "^5.4.0" + "@svgr/babel-plugin-transform-react-native-svg" "^5.4.0" + "@svgr/babel-plugin-transform-svg-component" "^5.5.0" + +"@svgr/core@^5.5.0": + version "5.5.0" + resolved "http://localhost:4873/@svgr%2fcore/-/core-5.5.0.tgz#82e826b8715d71083120fe8f2492ec7d7874a579" + integrity sha512-q52VOcsJPvV3jO1wkPtzTuKlvX7Y3xIcWRpCMtBF3MrteZJtBfQw/+u0B1BHy5ColpQc1/YVTrPEtSYIMNZlrQ== + dependencies: + "@svgr/plugin-jsx" "^5.5.0" + camelcase "^6.2.0" + cosmiconfig "^7.0.0" + +"@svgr/hast-util-to-babel-ast@^5.5.0": + version "5.5.0" + resolved "http://localhost:4873/@svgr%2fhast-util-to-babel-ast/-/hast-util-to-babel-ast-5.5.0.tgz#5ee52a9c2533f73e63f8f22b779f93cd432a5461" + integrity sha512-cAaR/CAiZRB8GP32N+1jocovUtvlj0+e65TB50/6Lcime+EA49m/8l+P2ko+XPJ4dw3xaPS3jOL4F2X4KWxoeQ== + dependencies: + "@babel/types" "^7.12.6" + +"@svgr/plugin-jsx@^5.5.0": + version "5.5.0" + resolved "http://localhost:4873/@svgr%2fplugin-jsx/-/plugin-jsx-5.5.0.tgz#1aa8cd798a1db7173ac043466d7b52236b369000" + integrity sha512-V/wVh33j12hGh05IDg8GpIUXbjAPnTdPTKuP4VNLggnwaHMPNQNae2pRnyTAILWCQdz5GyMqtO488g7CKM8CBA== + dependencies: + "@babel/core" "^7.12.3" + "@svgr/babel-preset" "^5.5.0" + "@svgr/hast-util-to-babel-ast" "^5.5.0" + svg-parser "^2.0.2" + +"@svgr/plugin-svgo@^5.5.0": + version "5.5.0" + resolved "http://localhost:4873/@svgr%2fplugin-svgo/-/plugin-svgo-5.5.0.tgz#02da55d85320549324e201c7b2e53bf431fcc246" + integrity sha512-r5swKk46GuQl4RrVejVwpeeJaydoxkdwkM1mBKOgJLBUJPGaLci6ylg/IjhrRsREKDkr4kbMWdgOtbXEh0fyLQ== + dependencies: + cosmiconfig "^7.0.0" + deepmerge "^4.2.2" + svgo "^1.2.2" + +"@svgr/webpack@^5.5.0": + version "5.5.0" + resolved "http://localhost:4873/@svgr%2fwebpack/-/webpack-5.5.0.tgz#aae858ee579f5fa8ce6c3166ef56c6a1b381b640" + integrity sha512-DOBOK255wfQxguUta2INKkzPj6AIS6iafZYiYmHn6W3pHlycSRRlvWKCfLDG10fXfLWqE3DJHgRUOyJYmARa7g== + dependencies: + "@babel/core" "^7.12.3" + "@babel/plugin-transform-react-constant-elements" "^7.12.1" + "@babel/preset-env" "^7.12.1" + "@babel/preset-react" "^7.12.5" + "@svgr/core" "^5.5.0" + "@svgr/plugin-jsx" "^5.5.0" + "@svgr/plugin-svgo" "^5.5.0" + loader-utils "^2.0.0" + +"@testing-library/dom@^8.5.0": + version "8.18.1" + resolved "http://localhost:4873/@testing-library%2fdom/-/dom-8.18.1.tgz#80f91be02bc171fe5a3a7003f88207be31ac2cf3" + integrity sha512-oEvsm2B/WtcHKE+IcEeeCqNU/ltFGaVyGbpcm4g/2ytuT49jrlH9x5qRKL/H3A6yfM4YAbSbC0ceT5+9CEXnLg== + dependencies: + "@babel/code-frame" "^7.10.4" + "@babel/runtime" "^7.12.5" + "@types/aria-query" "^4.2.0" + aria-query "^5.0.0" + chalk "^4.1.0" + dom-accessibility-api "^0.5.9" + lz-string "^1.4.4" + pretty-format "^27.0.2" + +"@testing-library/jest-dom@^5.16.5": + version "5.16.5" + resolved "http://localhost:4873/@testing-library%2fjest-dom/-/jest-dom-5.16.5.tgz#3912846af19a29b2dbf32a6ae9c31ef52580074e" + integrity sha512-N5ixQ2qKpi5OLYfwQmUb/5mSV9LneAcaUfp32pn4yCnpb8r/Yz0pXFPck21dIicKmi+ta5WRAknkZCfA8refMA== + dependencies: + "@adobe/css-tools" "^4.0.1" + "@babel/runtime" "^7.9.2" + "@types/testing-library__jest-dom" "^5.9.1" + aria-query "^5.0.0" + chalk "^3.0.0" + css.escape "^1.5.1" + dom-accessibility-api "^0.5.6" + lodash "^4.17.15" + redent "^3.0.0" + +"@testing-library/react@^13.4.0": + version "13.4.0" + resolved "http://localhost:4873/@testing-library%2freact/-/react-13.4.0.tgz#6a31e3bf5951615593ad984e96b9e5e2d9380966" + integrity sha512-sXOGON+WNTh3MLE9rve97ftaZukN3oNf2KjDy7YTx6hcTO2uuLHuCGynMDhFwGw/jYf4OJ2Qk0i4i79qMNNkyw== + dependencies: + "@babel/runtime" "^7.12.5" + "@testing-library/dom" "^8.5.0" + "@types/react-dom" "^18.0.0" + +"@testing-library/user-event@^13.5.0": + version "13.5.0" + resolved "http://localhost:4873/@testing-library%2fuser-event/-/user-event-13.5.0.tgz#69d77007f1e124d55314a2b73fd204b333b13295" + integrity sha512-5Kwtbo3Y/NowpkbRuSepbyMFkZmHgD+vPzYB/RJ4oxt5Gj/avFFBYjhw27cqSVPVw/3a67NK1PbiIr9k4Gwmdg== + dependencies: + "@babel/runtime" "^7.12.5" + +"@tootallnate/once@1": + version "1.1.2" + resolved "http://localhost:4873/@tootallnate%2fonce/-/once-1.1.2.tgz#ccb91445360179a04e7fe6aff78c00ffc1eeaf82" + integrity sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw== + +"@trysound/sax@0.2.0": + version "0.2.0" + resolved "http://localhost:4873/@trysound%2fsax/-/sax-0.2.0.tgz#cccaab758af56761eb7bf37af6f03f326dd798ad" + integrity sha512-L7z9BgrNEcYyUYtF+HaEfiS5ebkh9jXqbszz7pC0hRBPaatV0XjSD3+eHrpqFemQfgwiFF0QPIarnIihIDn7OA== + +"@types/aria-query@^4.2.0": + version "4.2.2" + resolved "http://localhost:4873/@types%2faria-query/-/aria-query-4.2.2.tgz#ed4e0ad92306a704f9fb132a0cfcf77486dbe2bc" + integrity sha512-HnYpAE1Y6kRyKM/XkEuiRQhTHvkzMBurTHnpFLYLBGPIylZNPs9jJcuOOYWxPLJCSEtmZT0Y8rHDokKN7rRTig== + +"@types/babel__core@^7.0.0", "@types/babel__core@^7.1.14": + version "7.1.19" + resolved "http://localhost:4873/@types%2fbabel__core/-/babel__core-7.1.19.tgz#7b497495b7d1b4812bdb9d02804d0576f43ee460" + integrity sha512-WEOTgRsbYkvA/KCsDwVEGkd7WAr1e3g31VHQ8zy5gul/V1qKullU/BU5I68X5v7V3GnB9eotmom4v5a5gjxorw== + dependencies: + "@babel/parser" "^7.1.0" + "@babel/types" "^7.0.0" + "@types/babel__generator" "*" + "@types/babel__template" "*" + "@types/babel__traverse" "*" + +"@types/babel__generator@*": + version "7.6.4" + resolved "http://localhost:4873/@types%2fbabel__generator/-/babel__generator-7.6.4.tgz#1f20ce4c5b1990b37900b63f050182d28c2439b7" + integrity sha512-tFkciB9j2K755yrTALxD44McOrk+gfpIpvC3sxHjRawj6PfnQxrse4Clq5y/Rq+G3mrBurMax/lG8Qn2t9mSsg== + dependencies: + "@babel/types" "^7.0.0" + +"@types/babel__template@*": + version "7.4.1" + resolved "http://localhost:4873/@types%2fbabel__template/-/babel__template-7.4.1.tgz#3d1a48fd9d6c0edfd56f2ff578daed48f36c8969" + integrity sha512-azBFKemX6kMg5Io+/rdGT0dkGreboUVR0Cdm3fz9QJWpaQGJRQXl7C+6hOTCZcMll7KFyEQpgbYI2lHdsS4U7g== + dependencies: + "@babel/parser" "^7.1.0" + "@babel/types" "^7.0.0" + +"@types/babel__traverse@*", "@types/babel__traverse@^7.0.4", "@types/babel__traverse@^7.0.6": + version "7.18.2" + resolved "http://localhost:4873/@types%2fbabel__traverse/-/babel__traverse-7.18.2.tgz#235bf339d17185bdec25e024ca19cce257cc7309" + integrity sha512-FcFaxOr2V5KZCviw1TnutEMVUVsGt4D2hP1TAfXZAMKuHYW3xQhe3jTxNPWutgCJ3/X1c5yX8ZoGVEItxKbwBg== + dependencies: + "@babel/types" "^7.3.0" + +"@types/body-parser@*": + version "1.19.2" + resolved "http://localhost:4873/@types%2fbody-parser/-/body-parser-1.19.2.tgz#aea2059e28b7658639081347ac4fab3de166e6f0" + integrity sha512-ALYone6pm6QmwZoAgeyNksccT9Q4AWZQ6PvfwR37GT6r6FWUPguq6sUmNGSMV2Wr761oQoBxwGGa6DR5o1DC9g== + dependencies: + "@types/connect" "*" + "@types/node" "*" + +"@types/bonjour@^3.5.9": + version "3.5.10" + resolved "http://localhost:4873/@types%2fbonjour/-/bonjour-3.5.10.tgz#0f6aadfe00ea414edc86f5d106357cda9701e275" + integrity sha512-p7ienRMiS41Nu2/igbJxxLDWrSZ0WxM8UQgCeO9KhoVF7cOVFkrKsiDr1EsJIla8vV3oEEjGcz11jc5yimhzZw== + dependencies: + "@types/node" "*" + +"@types/connect-history-api-fallback@^1.3.5": + version "1.3.5" + resolved "http://localhost:4873/@types%2fconnect-history-api-fallback/-/connect-history-api-fallback-1.3.5.tgz#d1f7a8a09d0ed5a57aee5ae9c18ab9b803205dae" + integrity sha512-h8QJa8xSb1WD4fpKBDcATDNGXghFj6/3GRWG6dhmRcu0RX1Ubasur2Uvx5aeEwlf0MwblEC2bMzzMQntxnw/Cw== + dependencies: + "@types/express-serve-static-core" "*" + "@types/node" "*" + +"@types/connect@*": + version "3.4.35" + resolved "http://localhost:4873/@types%2fconnect/-/connect-3.4.35.tgz#5fcf6ae445e4021d1fc2219a4873cc73a3bb2ad1" + integrity sha512-cdeYyv4KWoEgpBISTxWvqYsVy444DOqehiF3fM3ne10AmJ62RSyNkUnxMJXHQWRQQX2eR94m5y1IZyDwBjV9FQ== + dependencies: + "@types/node" "*" + +"@types/eslint-scope@^3.7.3": + version "3.7.4" + resolved "http://localhost:4873/@types%2feslint-scope/-/eslint-scope-3.7.4.tgz#37fc1223f0786c39627068a12e94d6e6fc61de16" + integrity sha512-9K4zoImiZc3HlIp6AVUDE4CWYx22a+lhSZMYNpbjW04+YF0KWj4pJXnEMjdnFTiQibFFmElcsasJXDbdI/EPhA== + dependencies: + "@types/eslint" "*" + "@types/estree" "*" + +"@types/eslint@*", "@types/eslint@^7.29.0 || ^8.4.1": + version "8.4.6" + resolved "http://localhost:4873/@types%2feslint/-/eslint-8.4.6.tgz#7976f054c1bccfcf514bff0564c0c41df5c08207" + integrity sha512-/fqTbjxyFUaYNO7VcW5g+4npmqVACz1bB7RTHYuLj+PRjw9hrCwrUXVQFpChUS0JsyEFvMZ7U/PfmvWgxJhI9g== + dependencies: + "@types/estree" "*" + "@types/json-schema" "*" + +"@types/estree@*": + version "1.0.0" + resolved "http://localhost:4873/@types%2festree/-/estree-1.0.0.tgz#5fb2e536c1ae9bf35366eed879e827fa59ca41c2" + integrity sha512-WulqXMDUTYAXCjZnk6JtIHPigp55cVtDgDrO2gHRwhyJto21+1zbVCtOYB2L1F9w4qCQ0rOGWBnBe0FNTiEJIQ== + +"@types/estree@0.0.39": + version "0.0.39" + resolved "http://localhost:4873/@types%2festree/-/estree-0.0.39.tgz#e177e699ee1b8c22d23174caaa7422644389509f" + integrity sha512-EYNwp3bU+98cpU4lAWYYL7Zz+2gryWH1qbdDTidVd6hkiR6weksdbMadyXKXNPEkQFhXM+hVO9ZygomHXp+AIw== + +"@types/estree@^0.0.51": + version "0.0.51" + resolved "http://localhost:4873/@types%2festree/-/estree-0.0.51.tgz#cfd70924a25a3fd32b218e5e420e6897e1ac4f40" + integrity sha512-CuPgU6f3eT/XgKKPqKd/gLZV1Xmvf1a2R5POBOGQa6uv82xpls89HU5zKeVoyR8XzHd1RGNOlQlvUe3CFkjWNQ== + +"@types/express-serve-static-core@*", "@types/express-serve-static-core@^4.17.18": + version "4.17.31" + resolved "http://localhost:4873/@types%2fexpress-serve-static-core/-/express-serve-static-core-4.17.31.tgz#a1139efeab4e7323834bb0226e62ac019f474b2f" + integrity sha512-DxMhY+NAsTwMMFHBTtJFNp5qiHKJ7TeqOo23zVEM9alT1Ml27Q3xcTH0xwxn7Q0BbMcVEJOs/7aQtUWupUQN3Q== + dependencies: + "@types/node" "*" + "@types/qs" "*" + "@types/range-parser" "*" + +"@types/express@*", "@types/express@^4.17.13": + version "4.17.14" + resolved "http://localhost:4873/@types%2fexpress/-/express-4.17.14.tgz#143ea0557249bc1b3b54f15db4c81c3d4eb3569c" + integrity sha512-TEbt+vaPFQ+xpxFLFssxUDXj5cWCxZJjIcB7Yg0k0GMHGtgtQgpvx/MUQUeAkNbA9AAGrwkAsoeItdTgS7FMyg== + dependencies: + "@types/body-parser" "*" + "@types/express-serve-static-core" "^4.17.18" + "@types/qs" "*" + "@types/serve-static" "*" + +"@types/graceful-fs@^4.1.2": + version "4.1.5" + resolved "http://localhost:4873/@types%2fgraceful-fs/-/graceful-fs-4.1.5.tgz#21ffba0d98da4350db64891f92a9e5db3cdb4e15" + integrity sha512-anKkLmZZ+xm4p8JWBf4hElkM4XR+EZeA2M9BAkkTldmcyDY4mbdIJnRghDJH3Ov5ooY7/UAoENtmdMSkaAd7Cw== + dependencies: + "@types/node" "*" + +"@types/html-minifier-terser@^6.0.0": + version "6.1.0" + resolved "http://localhost:4873/@types%2fhtml-minifier-terser/-/html-minifier-terser-6.1.0.tgz#4fc33a00c1d0c16987b1a20cf92d20614c55ac35" + integrity sha512-oh/6byDPnL1zeNXFrDXFLyZjkr1MsBG667IM792caf1L2UPOOMf65NFzjUH/ltyfwjAGfs1rsX1eftK0jC/KIg== + +"@types/http-proxy@^1.17.8": + version "1.17.9" + resolved "http://localhost:4873/@types%2fhttp-proxy/-/http-proxy-1.17.9.tgz#7f0e7931343761efde1e2bf48c40f02f3f75705a" + integrity sha512-QsbSjA/fSk7xB+UXlCT3wHBy5ai9wOcNDWwZAtud+jXhwOM3l+EYZh8Lng4+/6n8uar0J7xILzqftJdJ/Wdfkw== + dependencies: + "@types/node" "*" + +"@types/istanbul-lib-coverage@*", "@types/istanbul-lib-coverage@^2.0.0", "@types/istanbul-lib-coverage@^2.0.1": + version "2.0.4" + resolved "http://localhost:4873/@types%2fistanbul-lib-coverage/-/istanbul-lib-coverage-2.0.4.tgz#8467d4b3c087805d63580480890791277ce35c44" + integrity sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g== + +"@types/istanbul-lib-report@*": + version "3.0.0" + resolved "http://localhost:4873/@types%2fistanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#c14c24f18ea8190c118ee7562b7ff99a36552686" + integrity sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg== + dependencies: + "@types/istanbul-lib-coverage" "*" + +"@types/istanbul-reports@^3.0.0": + version "3.0.1" + resolved "http://localhost:4873/@types%2fistanbul-reports/-/istanbul-reports-3.0.1.tgz#9153fe98bba2bd565a63add9436d6f0d7f8468ff" + integrity sha512-c3mAZEuK0lvBp8tmuL74XRKn1+y2dcwOUpH7x4WrF6gk1GIgiluDRgMYQtw2OFcBvAJWlt6ASU3tSqxp0Uu0Aw== + dependencies: + "@types/istanbul-lib-report" "*" + +"@types/jest@*": + version "29.1.2" + resolved "http://localhost:4873/@types%2fjest/-/jest-29.1.2.tgz#7ad8077043ab5f6c108c8111bcc1d224e5600a87" + integrity sha512-y+nlX0h87U0R+wsGn6EBuoRWYyv3KFtwRNP3QWp9+k2tJ2/bqcGS3UxD7jgT+tiwJWWq3UsyV4Y+T6rsMT4XMg== + dependencies: + expect "^29.0.0" + pretty-format "^29.0.0" + +"@types/json-schema@*", "@types/json-schema@^7.0.4", "@types/json-schema@^7.0.5", "@types/json-schema@^7.0.8", "@types/json-schema@^7.0.9": + version "7.0.11" + resolved "http://localhost:4873/@types%2fjson-schema/-/json-schema-7.0.11.tgz#d421b6c527a3037f7c84433fd2c4229e016863d3" + integrity sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ== + +"@types/json5@^0.0.29": + version "0.0.29" + resolved "http://localhost:4873/@types%2fjson5/-/json5-0.0.29.tgz#ee28707ae94e11d2b827bcbe5270bcea7f3e71ee" + integrity sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ== + +"@types/mime@*": + version "3.0.1" + resolved "http://localhost:4873/@types%2fmime/-/mime-3.0.1.tgz#5f8f2bca0a5863cb69bc0b0acd88c96cb1d4ae10" + integrity sha512-Y4XFY5VJAuw0FgAqPNd6NNoV44jbq9Bz2L7Rh/J6jLTiHBSBJa9fxqQIvkIld4GsoDOcCbvzOUAbLPsSKKg+uA== + +"@types/node@*": + version "18.8.3" + resolved "http://localhost:4873/@types%2fnode/-/node-18.8.3.tgz#ce750ab4017effa51aed6a7230651778d54e327c" + integrity sha512-0os9vz6BpGwxGe9LOhgP/ncvYN5Tx1fNcd2TM3rD/aCGBkysb+ZWpXEocG24h6ZzOi13+VB8HndAQFezsSOw1w== + +"@types/parse-json@^4.0.0": + version "4.0.0" + resolved "http://localhost:4873/@types%2fparse-json/-/parse-json-4.0.0.tgz#2f8bb441434d163b35fb8ffdccd7138927ffb8c0" + integrity sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA== + +"@types/prettier@^2.1.5": + version "2.7.1" + resolved "http://localhost:4873/@types%2fprettier/-/prettier-2.7.1.tgz#dfd20e2dc35f027cdd6c1908e80a5ddc7499670e" + integrity sha512-ri0UmynRRvZiiUJdiz38MmIblKK+oH30MztdBVR95dv/Ubw6neWSb8u1XpRb72L4qsZOhz+L+z9JD40SJmfWow== + +"@types/prop-types@*": + version "15.7.5" + resolved "http://localhost:4873/@types%2fprop-types/-/prop-types-15.7.5.tgz#5f19d2b85a98e9558036f6a3cacc8819420f05cf" + integrity sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w== + +"@types/q@^1.5.1": + version "1.5.5" + resolved "http://localhost:4873/@types%2fq/-/q-1.5.5.tgz#75a2a8e7d8ab4b230414505d92335d1dcb53a6df" + integrity sha512-L28j2FcJfSZOnL1WBjDYp2vUHCeIFlyYI/53EwD/rKUBQ7MtUUfbQWiyKJGpcnv4/WgrhWsFKrcPstcAt/J0tQ== + +"@types/qs@*": + version "6.9.7" + resolved "http://localhost:4873/@types%2fqs/-/qs-6.9.7.tgz#63bb7d067db107cc1e457c303bc25d511febf6cb" + integrity sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw== + +"@types/range-parser@*": + version "1.2.4" + resolved "http://localhost:4873/@types%2frange-parser/-/range-parser-1.2.4.tgz#cd667bcfdd025213aafb7ca5915a932590acdcdc" + integrity sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw== + +"@types/react-dom@^18.0.0": + version "18.0.6" + resolved "http://localhost:4873/@types%2freact-dom/-/react-dom-18.0.6.tgz#36652900024842b74607a17786b6662dd1e103a1" + integrity sha512-/5OFZgfIPSwy+YuIBP/FgJnQnsxhZhjjrnxudMddeblOouIodEQ75X14Rr4wGSG/bknL+Omy9iWlLo1u/9GzAA== + dependencies: + "@types/react" "*" + +"@types/react@*": + version "18.0.21" + resolved "http://localhost:4873/@types%2freact/-/react-18.0.21.tgz#b8209e9626bb00a34c76f55482697edd2b43cc67" + integrity sha512-7QUCOxvFgnD5Jk8ZKlUAhVcRj7GuJRjnjjiY/IUBWKgOlnvDvTMLD4RTF7NPyVmbRhNrbomZiOepg7M/2Kj1mA== + dependencies: + "@types/prop-types" "*" + "@types/scheduler" "*" + csstype "^3.0.2" + +"@types/resolve@1.17.1": + version "1.17.1" + resolved "http://localhost:4873/@types%2fresolve/-/resolve-1.17.1.tgz#3afd6ad8967c77e4376c598a82ddd58f46ec45d6" + integrity sha512-yy7HuzQhj0dhGpD8RLXSZWEkLsV9ibvxvi6EiJ3bkqLAO1RGo0WbkWQiwpRlSFymTJRz0d3k5LM3kkx8ArDbLw== + dependencies: + "@types/node" "*" + +"@types/retry@0.12.0": + version "0.12.0" + resolved "http://localhost:4873/@types%2fretry/-/retry-0.12.0.tgz#2b35eccfcee7d38cd72ad99232fbd58bffb3c84d" + integrity sha512-wWKOClTTiizcZhXnPY4wikVAwmdYHp8q6DmC+EJUzAMsycb7HB32Kh9RN4+0gExjmPmZSAQjgURXIGATPegAvA== + +"@types/scheduler@*": + version "0.16.2" + resolved "http://localhost:4873/@types%2fscheduler/-/scheduler-0.16.2.tgz#1a62f89525723dde24ba1b01b092bf5df8ad4d39" + integrity sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew== + +"@types/serve-index@^1.9.1": + version "1.9.1" + resolved "http://localhost:4873/@types%2fserve-index/-/serve-index-1.9.1.tgz#1b5e85370a192c01ec6cec4735cf2917337a6278" + integrity sha512-d/Hs3nWDxNL2xAczmOVZNj92YZCS6RGxfBPjKzuu/XirCgXdpKEb88dYNbrYGint6IVWLNP+yonwVAuRC0T2Dg== + dependencies: + "@types/express" "*" + +"@types/serve-static@*", "@types/serve-static@^1.13.10": + version "1.15.0" + resolved "http://localhost:4873/@types%2fserve-static/-/serve-static-1.15.0.tgz#c7930ff61afb334e121a9da780aac0d9b8f34155" + integrity sha512-z5xyF6uh8CbjAu9760KDKsH2FcDxZ2tFCsA4HIMWE6IkiYMXfVoa+4f9KX+FN0ZLsaMw1WNG2ETLA6N+/YA+cg== + dependencies: + "@types/mime" "*" + "@types/node" "*" + +"@types/sockjs@^0.3.33": + version "0.3.33" + resolved "http://localhost:4873/@types%2fsockjs/-/sockjs-0.3.33.tgz#570d3a0b99ac995360e3136fd6045113b1bd236f" + integrity sha512-f0KEEe05NvUnat+boPTZ0dgaLZ4SfSouXUgv5noUiefG2ajgKjmETo9ZJyuqsl7dfl2aHlLJUiki6B4ZYldiiw== + dependencies: + "@types/node" "*" + +"@types/stack-utils@^2.0.0": + version "2.0.1" + resolved "http://localhost:4873/@types%2fstack-utils/-/stack-utils-2.0.1.tgz#20f18294f797f2209b5f65c8e3b5c8e8261d127c" + integrity sha512-Hl219/BT5fLAaz6NDkSuhzasy49dwQS/DSdu4MdggFB8zcXv7vflBI3xp7FEmkmdDkBUI2bPUNeMttp2knYdxw== + +"@types/testing-library__jest-dom@^5.9.1": + version "5.14.5" + resolved "http://localhost:4873/@types%2ftesting-library__jest-dom/-/testing-library__jest-dom-5.14.5.tgz#d113709c90b3c75fdb127ec338dad7d5f86c974f" + integrity sha512-SBwbxYoyPIvxHbeHxTZX2Pe/74F/tX2/D3mMvzabdeJ25bBojfW0TyB8BHrbq/9zaaKICJZjLP+8r6AeZMFCuQ== + dependencies: + "@types/jest" "*" + +"@types/trusted-types@^2.0.2": + version "2.0.2" + resolved "http://localhost:4873/@types%2ftrusted-types/-/trusted-types-2.0.2.tgz#fc25ad9943bcac11cceb8168db4f275e0e72e756" + integrity sha512-F5DIZ36YVLE+PN+Zwws4kJogq47hNgX3Nx6WyDJ3kcplxyke3XIzB8uK5n/Lpm1HBsbGzd6nmGehL8cPekP+Tg== + +"@types/ws@^8.5.1": + version "8.5.3" + resolved "http://localhost:4873/@types%2fws/-/ws-8.5.3.tgz#7d25a1ffbecd3c4f2d35068d0b283c037003274d" + integrity sha512-6YOoWjruKj1uLf3INHH7D3qTXwFfEsg1kf3c0uDdSBJwfa/llkwIjrAGV7j7mVgGNbzTQ3HiHKKDXl6bJPD97w== + dependencies: + "@types/node" "*" + +"@types/yargs-parser@*": + version "21.0.0" + resolved "http://localhost:4873/@types%2fyargs-parser/-/yargs-parser-21.0.0.tgz#0c60e537fa790f5f9472ed2776c2b71ec117351b" + integrity sha512-iO9ZQHkZxHn4mSakYV0vFHAVDyEOIJQrV2uZ06HxEPcx+mt8swXoZHIbaaJ2crJYFfErySgktuTZ3BeLz+XmFA== + +"@types/yargs@^16.0.0": + version "16.0.4" + resolved "http://localhost:4873/@types%2fyargs/-/yargs-16.0.4.tgz#26aad98dd2c2a38e421086ea9ad42b9e51642977" + integrity sha512-T8Yc9wt/5LbJyCaLiHPReJa0kApcIgJ7Bn735GjItUfh08Z1pJvu8QZqb9s+mMvKV6WUQRV7K2R46YbjMXTTJw== + dependencies: + "@types/yargs-parser" "*" + +"@types/yargs@^17.0.8": + version "17.0.13" + resolved "http://localhost:4873/@types%2fyargs/-/yargs-17.0.13.tgz#34cced675ca1b1d51fcf4d34c3c6f0fa142a5c76" + integrity sha512-9sWaruZk2JGxIQU+IhI1fhPYRcQ0UuTNuKuCW9bR5fp7qi2Llf7WDzNa17Cy7TKnh3cdxDOiyTu6gaLS0eDatg== + dependencies: + "@types/yargs-parser" "*" + +"@typescript-eslint/eslint-plugin@^5.5.0": + version "5.39.0" + resolved "http://localhost:4873/@typescript-eslint%2feslint-plugin/-/eslint-plugin-5.39.0.tgz#778b2d9e7f293502c7feeea6c74dca8eb3e67511" + integrity sha512-xVfKOkBm5iWMNGKQ2fwX5GVgBuHmZBO1tCRwXmY5oAIsPscfwm2UADDuNB8ZVYCtpQvJK4xpjrK7jEhcJ0zY9A== + dependencies: + "@typescript-eslint/scope-manager" "5.39.0" + "@typescript-eslint/type-utils" "5.39.0" + "@typescript-eslint/utils" "5.39.0" + debug "^4.3.4" + ignore "^5.2.0" + regexpp "^3.2.0" + semver "^7.3.7" + tsutils "^3.21.0" + +"@typescript-eslint/experimental-utils@^5.0.0": + version "5.39.0" + resolved "http://localhost:4873/@typescript-eslint%2fexperimental-utils/-/experimental-utils-5.39.0.tgz#9263bb72b57449cc2f07ffb7fd4e12d0160b7f5e" + integrity sha512-n5N9kG/oGu2xXhHzsWzn94s6CWoiUj59FPU2dF2IQZxPftw+q6Jm5sV2vj5qTgAElRooHhrgtl2gxBQDCPt6WA== + dependencies: + "@typescript-eslint/utils" "5.39.0" + +"@typescript-eslint/parser@^5.5.0": + version "5.39.0" + resolved "http://localhost:4873/@typescript-eslint%2fparser/-/parser-5.39.0.tgz#93fa0bc980a3a501e081824f6097f7ca30aaa22b" + integrity sha512-PhxLjrZnHShe431sBAGHaNe6BDdxAASDySgsBCGxcBecVCi8NQWxQZMcizNA4g0pN51bBAn/FUfkWG3SDVcGlA== + dependencies: + "@typescript-eslint/scope-manager" "5.39.0" + "@typescript-eslint/types" "5.39.0" + "@typescript-eslint/typescript-estree" "5.39.0" + debug "^4.3.4" + +"@typescript-eslint/scope-manager@5.39.0": + version "5.39.0" + resolved "http://localhost:4873/@typescript-eslint%2fscope-manager/-/scope-manager-5.39.0.tgz#873e1465afa3d6c78d8ed2da68aed266a08008d0" + integrity sha512-/I13vAqmG3dyqMVSZPjsbuNQlYS082Y7OMkwhCfLXYsmlI0ca4nkL7wJ/4gjX70LD4P8Hnw1JywUVVAwepURBw== + dependencies: + "@typescript-eslint/types" "5.39.0" + "@typescript-eslint/visitor-keys" "5.39.0" + +"@typescript-eslint/type-utils@5.39.0": + version "5.39.0" + resolved "http://localhost:4873/@typescript-eslint%2ftype-utils/-/type-utils-5.39.0.tgz#0a8c00f95dce4335832ad2dc6bc431c14e32a0a6" + integrity sha512-KJHJkOothljQWzR3t/GunL0TPKY+fGJtnpl+pX+sJ0YiKTz3q2Zr87SGTmFqsCMFrLt5E0+o+S6eQY0FAXj9uA== + dependencies: + "@typescript-eslint/typescript-estree" "5.39.0" + "@typescript-eslint/utils" "5.39.0" + debug "^4.3.4" + tsutils "^3.21.0" + +"@typescript-eslint/types@5.39.0": + version "5.39.0" + resolved "http://localhost:4873/@typescript-eslint%2ftypes/-/types-5.39.0.tgz#f4e9f207ebb4579fd854b25c0bf64433bb5ed78d" + integrity sha512-gQMZrnfEBFXK38hYqt8Lkwt8f4U6yq+2H5VDSgP/qiTzC8Nw8JO3OuSUOQ2qW37S/dlwdkHDntkZM6SQhKyPhw== + +"@typescript-eslint/typescript-estree@5.39.0": + version "5.39.0" + resolved "http://localhost:4873/@typescript-eslint%2ftypescript-estree/-/typescript-estree-5.39.0.tgz#c0316aa04a1a1f4f7f9498e3c13ef1d3dc4cf88b" + integrity sha512-qLFQP0f398sdnogJoLtd43pUgB18Q50QSA+BTE5h3sUxySzbWDpTSdgt4UyxNSozY/oDK2ta6HVAzvGgq8JYnA== + dependencies: + "@typescript-eslint/types" "5.39.0" + "@typescript-eslint/visitor-keys" "5.39.0" + debug "^4.3.4" + globby "^11.1.0" + is-glob "^4.0.3" + semver "^7.3.7" + tsutils "^3.21.0" + +"@typescript-eslint/utils@5.39.0", "@typescript-eslint/utils@^5.13.0": + version "5.39.0" + resolved "http://localhost:4873/@typescript-eslint%2futils/-/utils-5.39.0.tgz#b7063cca1dcf08d1d21b0d91db491161ad0be110" + integrity sha512-+DnY5jkpOpgj+EBtYPyHRjXampJfC0yUZZzfzLuUWVZvCuKqSdJVC8UhdWipIw7VKNTfwfAPiOWzYkAwuIhiAg== + dependencies: + "@types/json-schema" "^7.0.9" + "@typescript-eslint/scope-manager" "5.39.0" + "@typescript-eslint/types" "5.39.0" + "@typescript-eslint/typescript-estree" "5.39.0" + eslint-scope "^5.1.1" + eslint-utils "^3.0.0" + +"@typescript-eslint/visitor-keys@5.39.0": + version "5.39.0" + resolved "http://localhost:4873/@typescript-eslint%2fvisitor-keys/-/visitor-keys-5.39.0.tgz#8f41f7d241b47257b081ddba5d3ce80deaae61e2" + integrity sha512-yyE3RPwOG+XJBLrhvsxAidUgybJVQ/hG8BhiJo0k8JSAYfk/CshVcxf0HwP4Jt7WZZ6vLmxdo1p6EyN3tzFTkg== + dependencies: + "@typescript-eslint/types" "5.39.0" + eslint-visitor-keys "^3.3.0" + +"@webassemblyjs/ast@1.11.1": + version "1.11.1" + resolved "http://localhost:4873/@webassemblyjs%2fast/-/ast-1.11.1.tgz#2bfd767eae1a6996f432ff7e8d7fc75679c0b6a7" + integrity sha512-ukBh14qFLjxTQNTXocdyksN5QdM28S1CxHt2rdskFyL+xFV7VremuBLVbmCePj+URalXBENx/9Lm7lnhihtCSw== + dependencies: + "@webassemblyjs/helper-numbers" "1.11.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + +"@webassemblyjs/floating-point-hex-parser@1.11.1": + version "1.11.1" + resolved "http://localhost:4873/@webassemblyjs%2ffloating-point-hex-parser/-/floating-point-hex-parser-1.11.1.tgz#f6c61a705f0fd7a6aecaa4e8198f23d9dc179e4f" + integrity sha512-iGRfyc5Bq+NnNuX8b5hwBrRjzf0ocrJPI6GWFodBFzmFnyvrQ83SHKhmilCU/8Jv67i4GJZBMhEzltxzcNagtQ== + +"@webassemblyjs/helper-api-error@1.11.1": + version "1.11.1" + resolved "http://localhost:4873/@webassemblyjs%2fhelper-api-error/-/helper-api-error-1.11.1.tgz#1a63192d8788e5c012800ba6a7a46c705288fd16" + integrity sha512-RlhS8CBCXfRUR/cwo2ho9bkheSXG0+NwooXcc3PAILALf2QLdFyj7KGsKRbVc95hZnhnERon4kW/D3SZpp6Tcg== + +"@webassemblyjs/helper-buffer@1.11.1": + version "1.11.1" + resolved "http://localhost:4873/@webassemblyjs%2fhelper-buffer/-/helper-buffer-1.11.1.tgz#832a900eb444884cde9a7cad467f81500f5e5ab5" + integrity sha512-gwikF65aDNeeXa8JxXa2BAk+REjSyhrNC9ZwdT0f8jc4dQQeDQ7G4m0f2QCLPJiMTTO6wfDmRmj/pW0PsUvIcA== + +"@webassemblyjs/helper-numbers@1.11.1": + version "1.11.1" + resolved "http://localhost:4873/@webassemblyjs%2fhelper-numbers/-/helper-numbers-1.11.1.tgz#64d81da219fbbba1e3bd1bfc74f6e8c4e10a62ae" + integrity sha512-vDkbxiB8zfnPdNK9Rajcey5C0w+QJugEglN0of+kmO8l7lDb77AnlKYQF7aarZuCrv+l0UvqL+68gSDr3k9LPQ== + dependencies: + "@webassemblyjs/floating-point-hex-parser" "1.11.1" + "@webassemblyjs/helper-api-error" "1.11.1" + "@xtuc/long" "4.2.2" + +"@webassemblyjs/helper-wasm-bytecode@1.11.1": + version "1.11.1" + resolved "http://localhost:4873/@webassemblyjs%2fhelper-wasm-bytecode/-/helper-wasm-bytecode-1.11.1.tgz#f328241e41e7b199d0b20c18e88429c4433295e1" + integrity sha512-PvpoOGiJwXeTrSf/qfudJhwlvDQxFgelbMqtq52WWiXC6Xgg1IREdngmPN3bs4RoO83PnL/nFrxucXj1+BX62Q== + +"@webassemblyjs/helper-wasm-section@1.11.1": + version "1.11.1" + resolved "http://localhost:4873/@webassemblyjs%2fhelper-wasm-section/-/helper-wasm-section-1.11.1.tgz#21ee065a7b635f319e738f0dd73bfbda281c097a" + integrity sha512-10P9No29rYX1j7F3EVPX3JvGPQPae+AomuSTPiF9eBQeChHI6iqjMIwR9JmOJXwpnn/oVGDk7I5IlskuMwU/pg== + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/helper-buffer" "1.11.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + "@webassemblyjs/wasm-gen" "1.11.1" + +"@webassemblyjs/ieee754@1.11.1": + version "1.11.1" + resolved "http://localhost:4873/@webassemblyjs%2fieee754/-/ieee754-1.11.1.tgz#963929e9bbd05709e7e12243a099180812992614" + integrity sha512-hJ87QIPtAMKbFq6CGTkZYJivEwZDbQUgYd3qKSadTNOhVY7p+gfP6Sr0lLRVTaG1JjFj+r3YchoqRYxNH3M0GQ== + dependencies: + "@xtuc/ieee754" "^1.2.0" + +"@webassemblyjs/leb128@1.11.1": + version "1.11.1" + resolved "http://localhost:4873/@webassemblyjs%2fleb128/-/leb128-1.11.1.tgz#ce814b45574e93d76bae1fb2644ab9cdd9527aa5" + integrity sha512-BJ2P0hNZ0u+Th1YZXJpzW6miwqQUGcIHT1G/sf72gLVD9DZ5AdYTqPNbHZh6K1M5VmKvFXwGSWZADz+qBWxeRw== + dependencies: + "@xtuc/long" "4.2.2" + +"@webassemblyjs/utf8@1.11.1": + version "1.11.1" + resolved "http://localhost:4873/@webassemblyjs%2futf8/-/utf8-1.11.1.tgz#d1f8b764369e7c6e6bae350e854dec9a59f0a3ff" + integrity sha512-9kqcxAEdMhiwQkHpkNiorZzqpGrodQQ2IGrHHxCy+Ozng0ofyMA0lTqiLkVs1uzTRejX+/O0EOT7KxqVPuXosQ== + +"@webassemblyjs/wasm-edit@1.11.1": + version "1.11.1" + resolved "http://localhost:4873/@webassemblyjs%2fwasm-edit/-/wasm-edit-1.11.1.tgz#ad206ebf4bf95a058ce9880a8c092c5dec8193d6" + integrity sha512-g+RsupUC1aTHfR8CDgnsVRVZFJqdkFHpsHMfJuWQzWU3tvnLC07UqHICfP+4XyL2tnr1amvl1Sdp06TnYCmVkA== + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/helper-buffer" "1.11.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + "@webassemblyjs/helper-wasm-section" "1.11.1" + "@webassemblyjs/wasm-gen" "1.11.1" + "@webassemblyjs/wasm-opt" "1.11.1" + "@webassemblyjs/wasm-parser" "1.11.1" + "@webassemblyjs/wast-printer" "1.11.1" + +"@webassemblyjs/wasm-gen@1.11.1": + version "1.11.1" + resolved "http://localhost:4873/@webassemblyjs%2fwasm-gen/-/wasm-gen-1.11.1.tgz#86c5ea304849759b7d88c47a32f4f039ae3c8f76" + integrity sha512-F7QqKXwwNlMmsulj6+O7r4mmtAlCWfO/0HdgOxSklZfQcDu0TpLiD1mRt/zF25Bk59FIjEuGAIyn5ei4yMfLhA== + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + "@webassemblyjs/ieee754" "1.11.1" + "@webassemblyjs/leb128" "1.11.1" + "@webassemblyjs/utf8" "1.11.1" + +"@webassemblyjs/wasm-opt@1.11.1": + version "1.11.1" + resolved "http://localhost:4873/@webassemblyjs%2fwasm-opt/-/wasm-opt-1.11.1.tgz#657b4c2202f4cf3b345f8a4c6461c8c2418985f2" + integrity sha512-VqnkNqnZlU5EB64pp1l7hdm3hmQw7Vgqa0KF/KCNO9sIpI6Fk6brDEiX+iCOYrvMuBWDws0NkTOxYEb85XQHHw== + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/helper-buffer" "1.11.1" + "@webassemblyjs/wasm-gen" "1.11.1" + "@webassemblyjs/wasm-parser" "1.11.1" + +"@webassemblyjs/wasm-parser@1.11.1": + version "1.11.1" + resolved "http://localhost:4873/@webassemblyjs%2fwasm-parser/-/wasm-parser-1.11.1.tgz#86ca734534f417e9bd3c67c7a1c75d8be41fb199" + integrity sha512-rrBujw+dJu32gYB7/Lup6UhdkPx9S9SnobZzRVL7VcBH9Bt9bCBLEuX/YXOOtBsOZ4NQrRykKhffRWHvigQvOA== + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/helper-api-error" "1.11.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + "@webassemblyjs/ieee754" "1.11.1" + "@webassemblyjs/leb128" "1.11.1" + "@webassemblyjs/utf8" "1.11.1" + +"@webassemblyjs/wast-printer@1.11.1": + version "1.11.1" + resolved "http://localhost:4873/@webassemblyjs%2fwast-printer/-/wast-printer-1.11.1.tgz#d0c73beda8eec5426f10ae8ef55cee5e7084c2f0" + integrity sha512-IQboUWM4eKzWW+N/jij2sRatKMh99QEelo3Eb2q0qXkvPRISAj8Qxtmw5itwqK+TTkBuUIE45AxYPToqPtL5gg== + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@xtuc/long" "4.2.2" + +"@xtuc/ieee754@^1.2.0": + version "1.2.0" + resolved "http://localhost:4873/@xtuc%2fieee754/-/ieee754-1.2.0.tgz#eef014a3145ae477a1cbc00cd1e552336dceb790" + integrity sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA== + +"@xtuc/long@4.2.2": + version "4.2.2" + resolved "http://localhost:4873/@xtuc%2flong/-/long-4.2.2.tgz#d291c6a4e97989b5c61d9acf396ae4fe133a718d" + integrity sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ== + +abab@^2.0.3, abab@^2.0.5: + version "2.0.6" + resolved "http://localhost:4873/abab/-/abab-2.0.6.tgz#41b80f2c871d19686216b82309231cfd3cb3d291" + integrity sha512-j2afSsaIENvHZN2B8GOpF566vZ5WVk5opAiMTvWgaQT8DkbOqsTfvNAvHoRGU2zzP8cPoqys+xHTRDWW8L+/BA== + +accepts@~1.3.4, accepts@~1.3.5, accepts@~1.3.8: + version "1.3.8" + resolved "http://localhost:4873/accepts/-/accepts-1.3.8.tgz#0bf0be125b67014adcb0b0921e62db7bffe16b2e" + integrity sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw== + dependencies: + mime-types "~2.1.34" + negotiator "0.6.3" + +acorn-globals@^6.0.0: + version "6.0.0" + resolved "http://localhost:4873/acorn-globals/-/acorn-globals-6.0.0.tgz#46cdd39f0f8ff08a876619b55f5ac8a6dc770b45" + integrity sha512-ZQl7LOWaF5ePqqcX4hLuv/bLXYQNfNWw2c0/yX/TsPRKamzHcTGQnlCjHT3TsmkOUVEPS3crCxiPfdzE/Trlhg== + dependencies: + acorn "^7.1.1" + acorn-walk "^7.1.1" + +acorn-import-assertions@^1.7.6: + version "1.8.0" + resolved "http://localhost:4873/acorn-import-assertions/-/acorn-import-assertions-1.8.0.tgz#ba2b5939ce62c238db6d93d81c9b111b29b855e9" + integrity sha512-m7VZ3jwz4eK6A4Vtt8Ew1/mNbP24u0FhdyfA7fSvnJR6LMdfOYnmuIrrJAgrYfYJ10F/otaHTtrtrtmHdMNzEw== + +acorn-jsx@^5.3.2: + version "5.3.2" + resolved "http://localhost:4873/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" + integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ== + +acorn-node@^1.8.2: + version "1.8.2" + resolved "http://localhost:4873/acorn-node/-/acorn-node-1.8.2.tgz#114c95d64539e53dede23de8b9d96df7c7ae2af8" + integrity sha512-8mt+fslDufLYntIoPAaIMUe/lrbrehIiwmR3t2k9LljIzoigEPF27eLk2hy8zSGzmR/ogr7zbRKINMo1u0yh5A== + dependencies: + acorn "^7.0.0" + acorn-walk "^7.0.0" + xtend "^4.0.2" + +acorn-walk@^7.0.0, acorn-walk@^7.1.1: + version "7.2.0" + resolved "http://localhost:4873/acorn-walk/-/acorn-walk-7.2.0.tgz#0de889a601203909b0fbe07b8938dc21d2e967bc" + integrity sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA== + +acorn@^7.0.0, acorn@^7.1.1: + version "7.4.1" + resolved "http://localhost:4873/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa" + integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A== + +acorn@^8.2.4, acorn@^8.5.0, acorn@^8.7.1, acorn@^8.8.0: + version "8.8.0" + resolved "http://localhost:4873/acorn/-/acorn-8.8.0.tgz#88c0187620435c7f6015803f5539dae05a9dbea8" + integrity sha512-QOxyigPVrpZ2GXT+PFyZTl6TtOFc5egxHIP9IlQ+RbupQuX4RkT/Bee4/kQuC02Xkzg84JcT7oLYtDIQxp+v7w== + +address@^1.0.1, address@^1.1.2: + version "1.2.1" + resolved "http://localhost:4873/address/-/address-1.2.1.tgz#25bb61095b7522d65b357baa11bc05492d4c8acd" + integrity sha512-B+6bi5D34+fDYENiH5qOlA0cV2rAGKuWZ9LeyUUehbXy8e0VS9e498yO0Jeeh+iM+6KbfudHTFjXw2MmJD4QRA== + +adjust-sourcemap-loader@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/adjust-sourcemap-loader/-/adjust-sourcemap-loader-4.0.0.tgz#fc4a0fd080f7d10471f30a7320f25560ade28c99" + integrity sha512-OXwN5b9pCUXNQHJpwwD2qP40byEmSgzj8B4ydSN0uMNYWiFmJ6x6KwUllMmfk8Rwu/HJDFR7U8ubsWBoN0Xp0A== + dependencies: + loader-utils "^2.0.0" + regex-parser "^2.2.11" + +agent-base@6: + version "6.0.2" + resolved "http://localhost:4873/agent-base/-/agent-base-6.0.2.tgz#49fff58577cfee3f37176feab4c22e00f86d7f77" + integrity sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ== + dependencies: + debug "4" + +ajv-formats@^2.1.1: + version "2.1.1" + resolved "http://localhost:4873/ajv-formats/-/ajv-formats-2.1.1.tgz#6e669400659eb74973bbf2e33327180a0996b520" + integrity sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA== + dependencies: + ajv "^8.0.0" + +ajv-keywords@^3.4.1, ajv-keywords@^3.5.2: + version "3.5.2" + resolved "http://localhost:4873/ajv-keywords/-/ajv-keywords-3.5.2.tgz#31f29da5ab6e00d1c2d329acf7b5929614d5014d" + integrity sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ== + +ajv-keywords@^5.0.0: + version "5.1.0" + resolved "http://localhost:4873/ajv-keywords/-/ajv-keywords-5.1.0.tgz#69d4d385a4733cdbeab44964a1170a88f87f0e16" + integrity sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw== + dependencies: + fast-deep-equal "^3.1.3" + +ajv@^6.10.0, ajv@^6.12.2, ajv@^6.12.4, ajv@^6.12.5: + version "6.12.6" + resolved "http://localhost:4873/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" + integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== + dependencies: + fast-deep-equal "^3.1.1" + fast-json-stable-stringify "^2.0.0" + json-schema-traverse "^0.4.1" + uri-js "^4.2.2" + +ajv@^8.0.0, ajv@^8.6.0, ajv@^8.8.0: + version "8.11.0" + resolved "http://localhost:4873/ajv/-/ajv-8.11.0.tgz#977e91dd96ca669f54a11e23e378e33b884a565f" + integrity sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg== + dependencies: + fast-deep-equal "^3.1.1" + json-schema-traverse "^1.0.0" + require-from-string "^2.0.2" + uri-js "^4.2.2" + +ansi-escapes@^4.2.1, ansi-escapes@^4.3.1: + version "4.3.2" + resolved "http://localhost:4873/ansi-escapes/-/ansi-escapes-4.3.2.tgz#6b2291d1db7d98b6521d5f1efa42d0f3a9feb65e" + integrity sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ== + dependencies: + type-fest "^0.21.3" + +ansi-html-community@^0.0.8: + version "0.0.8" + resolved "http://localhost:4873/ansi-html-community/-/ansi-html-community-0.0.8.tgz#69fbc4d6ccbe383f9736934ae34c3f8290f1bf41" + integrity sha512-1APHAyr3+PCamwNw3bXCPp4HFLONZt/yIH0sZp0/469KWNTEy+qN5jQ3GVX6DMZ1UXAi34yVwtTeaG/HpBuuzw== + +ansi-regex@^5.0.1: + version "5.0.1" + resolved "http://localhost:4873/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" + integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== + +ansi-regex@^6.0.1: + version "6.0.1" + resolved "http://localhost:4873/ansi-regex/-/ansi-regex-6.0.1.tgz#3183e38fae9a65d7cb5e53945cd5897d0260a06a" + integrity sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA== + +ansi-styles@^3.2.1: + version "3.2.1" + resolved "http://localhost:4873/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" + integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== + dependencies: + color-convert "^1.9.0" + +ansi-styles@^4.0.0, ansi-styles@^4.1.0: + version "4.3.0" + resolved "http://localhost:4873/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" + integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== + dependencies: + color-convert "^2.0.1" + +ansi-styles@^5.0.0: + version "5.2.0" + resolved "http://localhost:4873/ansi-styles/-/ansi-styles-5.2.0.tgz#07449690ad45777d1924ac2abb2fc8895dba836b" + integrity sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA== + +anymatch@^3.0.3, anymatch@~3.1.2: + version "3.1.2" + resolved "http://localhost:4873/anymatch/-/anymatch-3.1.2.tgz#c0557c096af32f106198f4f4e2a383537e378716" + integrity sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg== + dependencies: + normalize-path "^3.0.0" + picomatch "^2.0.4" + +arg@^5.0.2: + version "5.0.2" + resolved "http://localhost:4873/arg/-/arg-5.0.2.tgz#c81433cc427c92c4dcf4865142dbca6f15acd59c" + integrity sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg== + +argparse@^1.0.7: + version "1.0.10" + resolved "http://localhost:4873/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" + integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== + dependencies: + sprintf-js "~1.0.2" + +argparse@^2.0.1: + version "2.0.1" + resolved "http://localhost:4873/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38" + integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q== + +aria-query@^4.2.2: + version "4.2.2" + resolved "http://localhost:4873/aria-query/-/aria-query-4.2.2.tgz#0d2ca6c9aceb56b8977e9fed6aed7e15bbd2f83b" + integrity sha512-o/HelwhuKpTj/frsOsbNLNgnNGVIFsVP/SW2BSF14gVl7kAfMOJ6/8wUAUvG1R1NHKrfG+2sHZTu0yauT1qBrA== + dependencies: + "@babel/runtime" "^7.10.2" + "@babel/runtime-corejs3" "^7.10.2" + +aria-query@^5.0.0: + version "5.0.2" + resolved "http://localhost:4873/aria-query/-/aria-query-5.0.2.tgz#0b8a744295271861e1d933f8feca13f9b70cfdc1" + integrity sha512-eigU3vhqSO+Z8BKDnVLN/ompjhf3pYzecKXz8+whRy+9gZu8n1TCGfwzQUUPnqdHl9ax1Hr9031orZ+UOEYr7Q== + +array-flatten@1.1.1: + version "1.1.1" + resolved "http://localhost:4873/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2" + integrity sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg== + +array-flatten@^2.1.2: + version "2.1.2" + resolved "http://localhost:4873/array-flatten/-/array-flatten-2.1.2.tgz#24ef80a28c1a893617e2149b0c6d0d788293b099" + integrity sha512-hNfzcOV8W4NdualtqBFPyVO+54DSJuZGY9qT4pRroB6S9e3iiido2ISIC5h9R2sPJ8H3FHCIiEnsv1lPXO3KtQ== + +array-includes@^3.1.4, array-includes@^3.1.5: + version "3.1.5" + resolved "http://localhost:4873/array-includes/-/array-includes-3.1.5.tgz#2c320010db8d31031fd2a5f6b3bbd4b1aad31bdb" + integrity sha512-iSDYZMMyTPkiFasVqfuAQnWAYcvO/SeBSCGKePoEthjp4LEMTe4uLc7b025o4jAZpHhihh8xPo99TNWUWWkGDQ== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.19.5" + get-intrinsic "^1.1.1" + is-string "^1.0.7" + +array-union@^2.1.0: + version "2.1.0" + resolved "http://localhost:4873/array-union/-/array-union-2.1.0.tgz#b798420adbeb1de828d84acd8a2e23d3efe85e8d" + integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw== + +array.prototype.flat@^1.2.5: + version "1.3.0" + resolved "http://localhost:4873/array.prototype.flat/-/array.prototype.flat-1.3.0.tgz#0b0c1567bf57b38b56b4c97b8aa72ab45e4adc7b" + integrity sha512-12IUEkHsAhA4DY5s0FPgNXIdc8VRSqD9Zp78a5au9abH/SOBrsp082JOWFNTjkMozh8mqcdiKuaLGhPeYztxSw== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.2" + es-shim-unscopables "^1.0.0" + +array.prototype.flatmap@^1.3.0: + version "1.3.0" + resolved "http://localhost:4873/array.prototype.flatmap/-/array.prototype.flatmap-1.3.0.tgz#a7e8ed4225f4788a70cd910abcf0791e76a5534f" + integrity sha512-PZC9/8TKAIxcWKdyeb77EzULHPrIX/tIZebLJUQOMR1OwYosT8yggdfWScfTBCDj5utONvOuPQQumYsU2ULbkg== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.2" + es-shim-unscopables "^1.0.0" + +array.prototype.reduce@^1.0.4: + version "1.0.4" + resolved "http://localhost:4873/array.prototype.reduce/-/array.prototype.reduce-1.0.4.tgz#8167e80089f78bff70a99e20bd4201d4663b0a6f" + integrity sha512-WnM+AjG/DvLRLo4DDl+r+SvCzYtD2Jd9oeBYMcEaI7t3fFrHY9M53/wdLcTvmZNQ70IU6Htj0emFkZ5TS+lrdw== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.2" + es-array-method-boxes-properly "^1.0.0" + is-string "^1.0.7" + +asap@~2.0.6: + version "2.0.6" + resolved "http://localhost:4873/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46" + integrity sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA== + +ast-types-flow@^0.0.7: + version "0.0.7" + resolved "http://localhost:4873/ast-types-flow/-/ast-types-flow-0.0.7.tgz#f70b735c6bca1a5c9c22d982c3e39e7feba3bdad" + integrity sha512-eBvWn1lvIApYMhzQMsu9ciLfkBY499mFZlNqG+/9WR7PVlroQw0vG30cOQQbaKz3sCEc44TAOu2ykzqXSNnwag== + +async@^3.2.3: + version "3.2.4" + resolved "http://localhost:4873/async/-/async-3.2.4.tgz#2d22e00f8cddeb5fde5dd33522b56d1cf569a81c" + integrity sha512-iAB+JbDEGXhyIUavoDl9WP/Jj106Kz9DEn1DPgYw5ruDn0e3Wgi3sKFm55sASdGBNOQB8F59d9qQ7deqrHA8wQ== + +asynckit@^0.4.0: + version "0.4.0" + resolved "http://localhost:4873/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" + integrity sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q== + +at-least-node@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/at-least-node/-/at-least-node-1.0.0.tgz#602cd4b46e844ad4effc92a8011a3c46e0238dc2" + integrity sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg== + +autoprefixer@^10.4.11, autoprefixer@^10.4.12: + version "10.4.12" + resolved "http://localhost:4873/autoprefixer/-/autoprefixer-10.4.12.tgz#183f30bf0b0722af54ee5ef257f7d4320bb33129" + integrity sha512-WrCGV9/b97Pa+jtwf5UGaRjgQIg7OK3D06GnoYoZNcG1Xb8Gt3EfuKjlhh9i/VtT16g6PYjZ69jdJ2g8FxSC4Q== + dependencies: + browserslist "^4.21.4" + caniuse-lite "^1.0.30001407" + fraction.js "^4.2.0" + normalize-range "^0.1.2" + picocolors "^1.0.0" + postcss-value-parser "^4.2.0" + +axe-core@^4.4.3: + version "4.4.3" + resolved "http://localhost:4873/axe-core/-/axe-core-4.4.3.tgz#11c74d23d5013c0fa5d183796729bc3482bd2f6f" + integrity sha512-32+ub6kkdhhWick/UjvEwRchgoetXqTK14INLqbGm5U2TzBkBNF3nQtLYm8ovxSkQWArjEQvftCKryjZaATu3w== + +axobject-query@^2.2.0: + version "2.2.0" + resolved "http://localhost:4873/axobject-query/-/axobject-query-2.2.0.tgz#943d47e10c0b704aa42275e20edf3722648989be" + integrity sha512-Td525n+iPOOyUQIeBfcASuG6uJsDOITl7Mds5gFyerkWiX7qhUTdYUBlSgNMyVqtSJqwpt1kXGLdUt6SykLMRA== + +babel-jest@^27.4.2, babel-jest@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/babel-jest/-/babel-jest-27.5.1.tgz#a1bf8d61928edfefd21da27eb86a695bfd691444" + integrity sha512-cdQ5dXjGRd0IBRATiQ4mZGlGlRE8kJpjPOixdNRdT+m3UcNqmYWN6rK6nvtXYfY3D76cb8s/O1Ss8ea24PIwcg== + dependencies: + "@jest/transform" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/babel__core" "^7.1.14" + babel-plugin-istanbul "^6.1.1" + babel-preset-jest "^27.5.1" + chalk "^4.0.0" + graceful-fs "^4.2.9" + slash "^3.0.0" + +babel-loader@^8.2.3: + version "8.2.5" + resolved "http://localhost:4873/babel-loader/-/babel-loader-8.2.5.tgz#d45f585e654d5a5d90f5350a779d7647c5ed512e" + integrity sha512-OSiFfH89LrEMiWd4pLNqGz4CwJDtbs2ZVc+iGu2HrkRfPxId9F2anQj38IxWpmRfsUY0aBZYi1EFcd3mhtRMLQ== + dependencies: + find-cache-dir "^3.3.1" + loader-utils "^2.0.0" + make-dir "^3.1.0" + schema-utils "^2.6.5" + +babel-plugin-dynamic-import-node@^2.3.3: + version "2.3.3" + resolved "http://localhost:4873/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.3.tgz#84fda19c976ec5c6defef57f9427b3def66e17a3" + integrity sha512-jZVI+s9Zg3IqA/kdi0i6UDCybUI3aSBLnglhYbSSjKlV7yF1F/5LWv8MakQmvYpnbJDS6fcBL2KzHSxNCMtWSQ== + dependencies: + object.assign "^4.1.0" + +babel-plugin-istanbul@^6.1.1: + version "6.1.1" + resolved "http://localhost:4873/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz#fa88ec59232fd9b4e36dbbc540a8ec9a9b47da73" + integrity sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + "@istanbuljs/load-nyc-config" "^1.0.0" + "@istanbuljs/schema" "^0.1.2" + istanbul-lib-instrument "^5.0.4" + test-exclude "^6.0.0" + +babel-plugin-jest-hoist@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-27.5.1.tgz#9be98ecf28c331eb9f5df9c72d6f89deb8181c2e" + integrity sha512-50wCwD5EMNW4aRpOwtqzyZHIewTYNxLA4nhB+09d8BIssfNfzBRhkBIHiaPv1Si226TQSvp8gxAJm2iY2qs2hQ== + dependencies: + "@babel/template" "^7.3.3" + "@babel/types" "^7.3.3" + "@types/babel__core" "^7.0.0" + "@types/babel__traverse" "^7.0.6" + +babel-plugin-macros@^3.1.0: + version "3.1.0" + resolved "http://localhost:4873/babel-plugin-macros/-/babel-plugin-macros-3.1.0.tgz#9ef6dc74deb934b4db344dc973ee851d148c50c1" + integrity sha512-Cg7TFGpIr01vOQNODXOOaGz2NpCU5gl8x1qJFbb6hbZxR7XrcE2vtbAsTAbJ7/xwJtUuJEw8K8Zr/AE0LHlesg== + dependencies: + "@babel/runtime" "^7.12.5" + cosmiconfig "^7.0.0" + resolve "^1.19.0" + +babel-plugin-named-asset-import@^0.3.8: + version "0.3.8" + resolved "http://localhost:4873/babel-plugin-named-asset-import/-/babel-plugin-named-asset-import-0.3.8.tgz#6b7fa43c59229685368683c28bc9734f24524cc2" + integrity sha512-WXiAc++qo7XcJ1ZnTYGtLxmBCVbddAml3CEXgWaBzNzLNoxtQ8AiGEFDMOhot9XjTCQbvP5E77Fj9Gk924f00Q== + +babel-plugin-polyfill-corejs2@^0.3.3: + version "0.3.3" + resolved "http://localhost:4873/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.3.3.tgz#5d1bd3836d0a19e1b84bbf2d9640ccb6f951c122" + integrity sha512-8hOdmFYFSZhqg2C/JgLUQ+t52o5nirNwaWM2B9LWteozwIvM14VSwdsCAUET10qT+kmySAlseadmfeeSWFCy+Q== + dependencies: + "@babel/compat-data" "^7.17.7" + "@babel/helper-define-polyfill-provider" "^0.3.3" + semver "^6.1.1" + +babel-plugin-polyfill-corejs3@^0.6.0: + version "0.6.0" + resolved "http://localhost:4873/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.6.0.tgz#56ad88237137eade485a71b52f72dbed57c6230a" + integrity sha512-+eHqR6OPcBhJOGgsIar7xoAB1GcSwVUA3XjAd7HJNzOXT4wv6/H7KIdA/Nc60cvUlDbKApmqNvD1B1bzOt4nyA== + dependencies: + "@babel/helper-define-polyfill-provider" "^0.3.3" + core-js-compat "^3.25.1" + +babel-plugin-polyfill-regenerator@^0.4.1: + version "0.4.1" + resolved "http://localhost:4873/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.4.1.tgz#390f91c38d90473592ed43351e801a9d3e0fd747" + integrity sha512-NtQGmyQDXjQqQ+IzRkBVwEOz9lQ4zxAQZgoAYEtU9dJjnl1Oc98qnN7jcp+bE7O7aYzVpavXE3/VKXNzUbh7aw== + dependencies: + "@babel/helper-define-polyfill-provider" "^0.3.3" + +babel-plugin-transform-react-remove-prop-types@^0.4.24: + version "0.4.24" + resolved "http://localhost:4873/babel-plugin-transform-react-remove-prop-types/-/babel-plugin-transform-react-remove-prop-types-0.4.24.tgz#f2edaf9b4c6a5fbe5c1d678bfb531078c1555f3a" + integrity sha512-eqj0hVcJUR57/Ug2zE1Yswsw4LhuqqHhD+8v120T1cl3kjg76QwtyBrdIk4WVwK+lAhBJVYCd/v+4nc4y+8JsA== + +babel-preset-current-node-syntax@^1.0.0: + version "1.0.1" + resolved "http://localhost:4873/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.0.1.tgz#b4399239b89b2a011f9ddbe3e4f401fc40cff73b" + integrity sha512-M7LQ0bxarkxQoN+vz5aJPsLBn77n8QgTFmo8WK0/44auK2xlCXrYcUxHFxgU7qW5Yzw/CjmLRK2uJzaCd7LvqQ== + dependencies: + "@babel/plugin-syntax-async-generators" "^7.8.4" + "@babel/plugin-syntax-bigint" "^7.8.3" + "@babel/plugin-syntax-class-properties" "^7.8.3" + "@babel/plugin-syntax-import-meta" "^7.8.3" + "@babel/plugin-syntax-json-strings" "^7.8.3" + "@babel/plugin-syntax-logical-assignment-operators" "^7.8.3" + "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" + "@babel/plugin-syntax-numeric-separator" "^7.8.3" + "@babel/plugin-syntax-object-rest-spread" "^7.8.3" + "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" + "@babel/plugin-syntax-optional-chaining" "^7.8.3" + "@babel/plugin-syntax-top-level-await" "^7.8.3" + +babel-preset-jest@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/babel-preset-jest/-/babel-preset-jest-27.5.1.tgz#91f10f58034cb7989cb4f962b69fa6eef6a6bc81" + integrity sha512-Nptf2FzlPCWYuJg41HBqXVT8ym6bXOevuCTbhxlUpjwtysGaIWFvDEjp4y+G7fl13FgOdjs7P/DmErqH7da0Ag== + dependencies: + babel-plugin-jest-hoist "^27.5.1" + babel-preset-current-node-syntax "^1.0.0" + +babel-preset-react-app@^10.0.1: + version "10.0.1" + resolved "http://localhost:4873/babel-preset-react-app/-/babel-preset-react-app-10.0.1.tgz#ed6005a20a24f2c88521809fa9aea99903751584" + integrity sha512-b0D9IZ1WhhCWkrTXyFuIIgqGzSkRIH5D5AmB0bXbzYAB1OBAwHcUeyWW2LorutLWF5btNo/N7r/cIdmvvKJlYg== + dependencies: + "@babel/core" "^7.16.0" + "@babel/plugin-proposal-class-properties" "^7.16.0" + "@babel/plugin-proposal-decorators" "^7.16.4" + "@babel/plugin-proposal-nullish-coalescing-operator" "^7.16.0" + "@babel/plugin-proposal-numeric-separator" "^7.16.0" + "@babel/plugin-proposal-optional-chaining" "^7.16.0" + "@babel/plugin-proposal-private-methods" "^7.16.0" + "@babel/plugin-transform-flow-strip-types" "^7.16.0" + "@babel/plugin-transform-react-display-name" "^7.16.0" + "@babel/plugin-transform-runtime" "^7.16.4" + "@babel/preset-env" "^7.16.4" + "@babel/preset-react" "^7.16.0" + "@babel/preset-typescript" "^7.16.0" + "@babel/runtime" "^7.16.3" + babel-plugin-macros "^3.1.0" + babel-plugin-transform-react-remove-prop-types "^0.4.24" + +balanced-match@^1.0.0: + version "1.0.2" + resolved "http://localhost:4873/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" + integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== + +batch@0.6.1: + version "0.6.1" + resolved "http://localhost:4873/batch/-/batch-0.6.1.tgz#dc34314f4e679318093fc760272525f94bf25c16" + integrity sha512-x+VAiMRL6UPkx+kudNvxTl6hB2XNNCG2r+7wixVfIYwu/2HKRXimwQyaumLjMveWvT2Hkd/cAJw+QBMfJ/EKVw== + +bfj@^7.0.2: + version "7.0.2" + resolved "http://localhost:4873/bfj/-/bfj-7.0.2.tgz#1988ce76f3add9ac2913fd8ba47aad9e651bfbb2" + integrity sha512-+e/UqUzwmzJamNF50tBV6tZPTORow7gQ96iFow+8b562OdMpEK0BcJEq2OSPEDmAbSMBQ7PKZ87ubFkgxpYWgw== + dependencies: + bluebird "^3.5.5" + check-types "^11.1.1" + hoopy "^0.1.4" + tryer "^1.0.1" + +big.js@^5.2.2: + version "5.2.2" + resolved "http://localhost:4873/big.js/-/big.js-5.2.2.tgz#65f0af382f578bcdc742bd9c281e9cb2d7768328" + integrity sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ== + +binary-extensions@^2.0.0: + version "2.2.0" + resolved "http://localhost:4873/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d" + integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA== + +bluebird@^3.5.5: + version "3.7.2" + resolved "http://localhost:4873/bluebird/-/bluebird-3.7.2.tgz#9f229c15be272454ffa973ace0dbee79a1b0c36f" + integrity sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg== + +body-parser@1.20.0: + version "1.20.0" + resolved "http://localhost:4873/body-parser/-/body-parser-1.20.0.tgz#3de69bd89011c11573d7bfee6a64f11b6bd27cc5" + integrity sha512-DfJ+q6EPcGKZD1QWUjSpqp+Q7bDQTsQIF4zfUAtZ6qk+H/3/QRhg9CEp39ss+/T2vw0+HaidC0ecJj/DRLIaKg== + dependencies: + bytes "3.1.2" + content-type "~1.0.4" + debug "2.6.9" + depd "2.0.0" + destroy "1.2.0" + http-errors "2.0.0" + iconv-lite "0.4.24" + on-finished "2.4.1" + qs "6.10.3" + raw-body "2.5.1" + type-is "~1.6.18" + unpipe "1.0.0" + +bonjour-service@^1.0.11: + version "1.0.14" + resolved "http://localhost:4873/bonjour-service/-/bonjour-service-1.0.14.tgz#c346f5bc84e87802d08f8d5a60b93f758e514ee7" + integrity sha512-HIMbgLnk1Vqvs6B4Wq5ep7mxvj9sGz5d1JJyDNSGNIdA/w2MCz6GTjWTdjqOJV1bEPj+6IkxDvWNFKEBxNt4kQ== + dependencies: + array-flatten "^2.1.2" + dns-equal "^1.0.0" + fast-deep-equal "^3.1.3" + multicast-dns "^7.2.5" + +boolbase@^1.0.0, boolbase@~1.0.0: + version "1.0.0" + resolved "http://localhost:4873/boolbase/-/boolbase-1.0.0.tgz#68dff5fbe60c51eb37725ea9e3ed310dcc1e776e" + integrity sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww== + +brace-expansion@^1.1.7: + version "1.1.11" + resolved "http://localhost:4873/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" + integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== + dependencies: + balanced-match "^1.0.0" + concat-map "0.0.1" + +brace-expansion@^2.0.1: + version "2.0.1" + resolved "http://localhost:4873/brace-expansion/-/brace-expansion-2.0.1.tgz#1edc459e0f0c548486ecf9fc99f2221364b9a0ae" + integrity sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA== + dependencies: + balanced-match "^1.0.0" + +braces@^3.0.2, braces@~3.0.2: + version "3.0.2" + resolved "http://localhost:4873/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" + integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== + dependencies: + fill-range "^7.0.1" + +browser-process-hrtime@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz#3c9b4b7d782c8121e56f10106d84c0d0ffc94626" + integrity sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow== + +browserslist@^4.0.0, browserslist@^4.14.5, browserslist@^4.16.6, browserslist@^4.18.1, browserslist@^4.20.3, browserslist@^4.21.3, browserslist@^4.21.4: + version "4.21.4" + resolved "http://localhost:4873/browserslist/-/browserslist-4.21.4.tgz#e7496bbc67b9e39dd0f98565feccdcb0d4ff6987" + integrity sha512-CBHJJdDmgjl3daYjN5Cp5kbTf1mUhZoS+beLklHIvkOWscs83YAhLlF3Wsh/lciQYAcbBJgTOD44VtG31ZM4Hw== + dependencies: + caniuse-lite "^1.0.30001400" + electron-to-chromium "^1.4.251" + node-releases "^2.0.6" + update-browserslist-db "^1.0.9" + +bser@2.1.1: + version "2.1.1" + resolved "http://localhost:4873/bser/-/bser-2.1.1.tgz#e6787da20ece9d07998533cfd9de6f5c38f4bc05" + integrity sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ== + dependencies: + node-int64 "^0.4.0" + +buffer-from@^1.0.0: + version "1.1.2" + resolved "http://localhost:4873/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" + integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== + +builtin-modules@^3.1.0: + version "3.3.0" + resolved "http://localhost:4873/builtin-modules/-/builtin-modules-3.3.0.tgz#cae62812b89801e9656336e46223e030386be7b6" + integrity sha512-zhaCDicdLuWN5UbN5IMnFqNMhNfo919sH85y2/ea+5Yg9TsTkeZxpL+JLbp6cgYFS4sRLp3YV4S6yDuqVWHYOw== + +bytes@3.0.0: + version "3.0.0" + resolved "http://localhost:4873/bytes/-/bytes-3.0.0.tgz#d32815404d689699f85a4ea4fa8755dd13a96048" + integrity sha512-pMhOfFDPiv9t5jjIXkHosWmkSyQbvsgEVNkz0ERHbuLh2T/7j4Mqqpz523Fe8MVY89KC6Sh/QfS2sM+SjgFDcw== + +bytes@3.1.2: + version "3.1.2" + resolved "http://localhost:4873/bytes/-/bytes-3.1.2.tgz#8b0beeb98605adf1b128fa4386403c009e0221a5" + integrity sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg== + +call-bind@^1.0.0, call-bind@^1.0.2: + version "1.0.2" + resolved "http://localhost:4873/call-bind/-/call-bind-1.0.2.tgz#b1d4e89e688119c3c9a903ad30abb2f6a919be3c" + integrity sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA== + dependencies: + function-bind "^1.1.1" + get-intrinsic "^1.0.2" + +callsites@^3.0.0: + version "3.1.0" + resolved "http://localhost:4873/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" + integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== + +camel-case@^4.1.2: + version "4.1.2" + resolved "http://localhost:4873/camel-case/-/camel-case-4.1.2.tgz#9728072a954f805228225a6deea6b38461e1bd5a" + integrity sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw== + dependencies: + pascal-case "^3.1.2" + tslib "^2.0.3" + +camelcase-css@^2.0.1: + version "2.0.1" + resolved "http://localhost:4873/camelcase-css/-/camelcase-css-2.0.1.tgz#ee978f6947914cc30c6b44741b6ed1df7f043fd5" + integrity sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA== + +camelcase@^5.3.1: + version "5.3.1" + resolved "http://localhost:4873/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" + integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== + +camelcase@^6.2.0, camelcase@^6.2.1: + version "6.3.0" + resolved "http://localhost:4873/camelcase/-/camelcase-6.3.0.tgz#5685b95eb209ac9c0c177467778c9c84df58ba9a" + integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA== + +caniuse-api@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/caniuse-api/-/caniuse-api-3.0.0.tgz#5e4d90e2274961d46291997df599e3ed008ee4c0" + integrity sha512-bsTwuIg/BZZK/vreVTYYbSWoe2F+71P7K5QGEX+pT250DZbfU1MQ5prOKpPR+LL6uWKK3KMwMCAS74QB3Um1uw== + dependencies: + browserslist "^4.0.0" + caniuse-lite "^1.0.0" + lodash.memoize "^4.1.2" + lodash.uniq "^4.5.0" + +caniuse-lite@^1.0.0, caniuse-lite@^1.0.30001400, caniuse-lite@^1.0.30001407: + version "1.0.30001416" + resolved "http://localhost:4873/caniuse-lite/-/caniuse-lite-1.0.30001416.tgz#29692af8a6a11412f2d3cf9a59d588fcdd21ce4c" + integrity sha512-06wzzdAkCPZO+Qm4e/eNghZBDfVNDsCgw33T27OwBH9unE9S478OYw//Q2L7Npf/zBzs7rjZOszIFQkwQKAEqA== + +case-sensitive-paths-webpack-plugin@^2.4.0: + version "2.4.0" + resolved "http://localhost:4873/case-sensitive-paths-webpack-plugin/-/case-sensitive-paths-webpack-plugin-2.4.0.tgz#db64066c6422eed2e08cc14b986ca43796dbc6d4" + integrity sha512-roIFONhcxog0JSSWbvVAh3OocukmSgpqOH6YpMkCvav/ySIV3JKg4Dc8vYtQjYi/UxpNE36r/9v+VqTQqgkYmw== + +chalk@^2.0.0, chalk@^2.4.1: + version "2.4.2" + resolved "http://localhost:4873/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" + integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== + dependencies: + ansi-styles "^3.2.1" + escape-string-regexp "^1.0.5" + supports-color "^5.3.0" + +chalk@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/chalk/-/chalk-3.0.0.tgz#3f73c2bf526591f574cc492c51e2456349f844e4" + integrity sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg== + dependencies: + ansi-styles "^4.1.0" + supports-color "^7.1.0" + +chalk@^4.0.0, chalk@^4.0.2, chalk@^4.1.0, chalk@^4.1.2: + version "4.1.2" + resolved "http://localhost:4873/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" + integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== + dependencies: + ansi-styles "^4.1.0" + supports-color "^7.1.0" + +char-regex@^1.0.2: + version "1.0.2" + resolved "http://localhost:4873/char-regex/-/char-regex-1.0.2.tgz#d744358226217f981ed58f479b1d6bcc29545dcf" + integrity sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw== + +char-regex@^2.0.0: + version "2.0.1" + resolved "http://localhost:4873/char-regex/-/char-regex-2.0.1.tgz#6dafdb25f9d3349914079f010ba8d0e6ff9cd01e" + integrity sha512-oSvEeo6ZUD7NepqAat3RqoucZ5SeqLJgOvVIwkafu6IP3V0pO38s/ypdVUmDDK6qIIHNlYHJAKX9E7R7HoKElw== + +check-types@^11.1.1: + version "11.1.2" + resolved "http://localhost:4873/check-types/-/check-types-11.1.2.tgz#86a7c12bf5539f6324eb0e70ca8896c0e38f3e2f" + integrity sha512-tzWzvgePgLORb9/3a0YenggReLKAIb2owL03H2Xdoe5pKcUyWRSEQ8xfCar8t2SIAuEDwtmx2da1YB52YuHQMQ== + +chokidar@^3.4.2, chokidar@^3.5.3: + version "3.5.3" + resolved "http://localhost:4873/chokidar/-/chokidar-3.5.3.tgz#1cf37c8707b932bd1af1ae22c0432e2acd1903bd" + integrity sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw== + dependencies: + anymatch "~3.1.2" + braces "~3.0.2" + glob-parent "~5.1.2" + is-binary-path "~2.1.0" + is-glob "~4.0.1" + normalize-path "~3.0.0" + readdirp "~3.6.0" + optionalDependencies: + fsevents "~2.3.2" + +chrome-trace-event@^1.0.2: + version "1.0.3" + resolved "http://localhost:4873/chrome-trace-event/-/chrome-trace-event-1.0.3.tgz#1015eced4741e15d06664a957dbbf50d041e26ac" + integrity sha512-p3KULyQg4S7NIHixdwbGX+nFHkoBiA4YQmyWtjb8XngSKV124nJmRysgAeujbUVb15vh+RvFUfCPqU7rXk+hZg== + +ci-info@^3.2.0: + version "3.4.0" + resolved "http://localhost:4873/ci-info/-/ci-info-3.4.0.tgz#b28484fd436cbc267900364f096c9dc185efb251" + integrity sha512-t5QdPT5jq3o262DOQ8zA6E1tlH2upmUc4Hlvrbx1pGYJuiiHl7O7rvVNI+l8HTVhd/q3Qc9vqimkNk5yiXsAug== + +cjs-module-lexer@^1.0.0: + version "1.2.2" + resolved "http://localhost:4873/cjs-module-lexer/-/cjs-module-lexer-1.2.2.tgz#9f84ba3244a512f3a54e5277e8eef4c489864e40" + integrity sha512-cOU9usZw8/dXIXKtwa8pM0OTJQuJkxMN6w30csNRUerHfeQ5R6U3kkU/FtJeIf3M202OHfY2U8ccInBG7/xogA== + +clean-css@^5.2.2: + version "5.3.1" + resolved "http://localhost:4873/clean-css/-/clean-css-5.3.1.tgz#d0610b0b90d125196a2894d35366f734e5d7aa32" + integrity sha512-lCr8OHhiWCTw4v8POJovCoh4T7I9U11yVsPjMWWnnMmp9ZowCxyad1Pathle/9HjaDp+fdQKjO9fQydE6RHTZg== + dependencies: + source-map "~0.6.0" + +cliui@^7.0.2: + version "7.0.4" + resolved "http://localhost:4873/cliui/-/cliui-7.0.4.tgz#a0265ee655476fc807aea9df3df8df7783808b4f" + integrity sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ== + dependencies: + string-width "^4.2.0" + strip-ansi "^6.0.0" + wrap-ansi "^7.0.0" + +clone-deep@^4.0.1: + version "4.0.1" + resolved "http://localhost:4873/clone-deep/-/clone-deep-4.0.1.tgz#c19fd9bdbbf85942b4fd979c84dcf7d5f07c2387" + integrity sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ== + dependencies: + is-plain-object "^2.0.4" + kind-of "^6.0.2" + shallow-clone "^3.0.0" + +co@^4.6.0: + version "4.6.0" + resolved "http://localhost:4873/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" + integrity sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ== + +coa@^2.0.2: + version "2.0.2" + resolved "http://localhost:4873/coa/-/coa-2.0.2.tgz#43f6c21151b4ef2bf57187db0d73de229e3e7ec3" + integrity sha512-q5/jG+YQnSy4nRTV4F7lPepBJZ8qBNJJDBuJdoejDyLXgmL7IEo+Le2JDZudFTFt7mrCqIRaSjws4ygRCTCAXA== + dependencies: + "@types/q" "^1.5.1" + chalk "^2.4.1" + q "^1.1.2" + +collect-v8-coverage@^1.0.0: + version "1.0.1" + resolved "http://localhost:4873/collect-v8-coverage/-/collect-v8-coverage-1.0.1.tgz#cc2c8e94fc18bbdffe64d6534570c8a673b27f59" + integrity sha512-iBPtljfCNcTKNAto0KEtDfZ3qzjJvqE3aTGZsbhjSBlorqpXJlaWWtPO35D+ZImoC3KWejX64o+yPGxhWSTzfg== + +color-convert@^1.9.0: + version "1.9.3" + resolved "http://localhost:4873/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" + integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== + dependencies: + color-name "1.1.3" + +color-convert@^2.0.1: + version "2.0.1" + resolved "http://localhost:4873/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" + integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== + dependencies: + color-name "~1.1.4" + +color-name@1.1.3: + version "1.1.3" + resolved "http://localhost:4873/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" + integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw== + +color-name@^1.1.4, color-name@~1.1.4: + version "1.1.4" + resolved "http://localhost:4873/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" + integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== + +colord@^2.9.1: + version "2.9.3" + resolved "http://localhost:4873/colord/-/colord-2.9.3.tgz#4f8ce919de456f1d5c1c368c307fe20f3e59fb43" + integrity sha512-jeC1axXpnb0/2nn/Y1LPuLdgXBLH7aDcHu4KEKfqw3CUhX7ZpfBSlPKyqXE6btIgEzfWtrX3/tyBCaCvXvMkOw== + +colorette@^2.0.10: + version "2.0.19" + resolved "http://localhost:4873/colorette/-/colorette-2.0.19.tgz#cdf044f47ad41a0f4b56b3a0d5b4e6e1a2d5a798" + integrity sha512-3tlv/dIP7FWvj3BsbHrGLJ6l/oKh1O3TcgBqMn+yyCagOxc23fyzDS6HypQbgxWbkpDnf52p1LuR4eWDQ/K9WQ== + +combined-stream@^1.0.8: + version "1.0.8" + resolved "http://localhost:4873/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" + integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== + dependencies: + delayed-stream "~1.0.0" + +commander@^2.20.0: + version "2.20.3" + resolved "http://localhost:4873/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33" + integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== + +commander@^7.2.0: + version "7.2.0" + resolved "http://localhost:4873/commander/-/commander-7.2.0.tgz#a36cb57d0b501ce108e4d20559a150a391d97ab7" + integrity sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw== + +commander@^8.3.0: + version "8.3.0" + resolved "http://localhost:4873/commander/-/commander-8.3.0.tgz#4837ea1b2da67b9c616a67afbb0fafee567bca66" + integrity sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww== + +common-path-prefix@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/common-path-prefix/-/common-path-prefix-3.0.0.tgz#7d007a7e07c58c4b4d5f433131a19141b29f11e0" + integrity sha512-QE33hToZseCH3jS0qN96O/bSh3kaw/h+Tq7ngyY9eWDUnTlTNUyqfqvCXioLe5Na5jFsL78ra/wuBU4iuEgd4w== + +common-tags@^1.8.0: + version "1.8.2" + resolved "http://localhost:4873/common-tags/-/common-tags-1.8.2.tgz#94ebb3c076d26032745fd54face7f688ef5ac9c6" + integrity sha512-gk/Z852D2Wtb//0I+kRFNKKE9dIIVirjoqPoA1wJU+XePVXZfGeBpk45+A1rKO4Q43prqWBNY/MiIeRLbPWUaA== + +commondir@^1.0.1: + version "1.0.1" + resolved "http://localhost:4873/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b" + integrity sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg== + +compressible@~2.0.16: + version "2.0.18" + resolved "http://localhost:4873/compressible/-/compressible-2.0.18.tgz#af53cca6b070d4c3c0750fbd77286a6d7cc46fba" + integrity sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg== + dependencies: + mime-db ">= 1.43.0 < 2" + +compression@^1.7.4: + version "1.7.4" + resolved "http://localhost:4873/compression/-/compression-1.7.4.tgz#95523eff170ca57c29a0ca41e6fe131f41e5bb8f" + integrity sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ== + dependencies: + accepts "~1.3.5" + bytes "3.0.0" + compressible "~2.0.16" + debug "2.6.9" + on-headers "~1.0.2" + safe-buffer "5.1.2" + vary "~1.1.2" + +concat-map@0.0.1: + version "0.0.1" + resolved "http://localhost:4873/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" + integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg== + +confusing-browser-globals@^1.0.11: + version "1.0.11" + resolved "http://localhost:4873/confusing-browser-globals/-/confusing-browser-globals-1.0.11.tgz#ae40e9b57cdd3915408a2805ebd3a5585608dc81" + integrity sha512-JsPKdmh8ZkmnHxDk55FZ1TqVLvEQTvoByJZRN9jzI0UjxK/QgAmsphz7PGtqgPieQZ/CQcHWXCR7ATDNhGe+YA== + +connect-history-api-fallback@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/connect-history-api-fallback/-/connect-history-api-fallback-2.0.0.tgz#647264845251a0daf25b97ce87834cace0f5f1c8" + integrity sha512-U73+6lQFmfiNPrYbXqr6kZ1i1wiRqXnp2nhMsINseWXO8lDau0LGEffJ8kQi4EjLZympVgRdvqjAgiZ1tgzDDA== + +content-disposition@0.5.4: + version "0.5.4" + resolved "http://localhost:4873/content-disposition/-/content-disposition-0.5.4.tgz#8b82b4efac82512a02bb0b1dcec9d2c5e8eb5bfe" + integrity sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ== + dependencies: + safe-buffer "5.2.1" + +content-type@~1.0.4: + version "1.0.4" + resolved "http://localhost:4873/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b" + integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA== + +convert-source-map@^1.4.0, convert-source-map@^1.6.0, convert-source-map@^1.7.0: + version "1.8.0" + resolved "http://localhost:4873/convert-source-map/-/convert-source-map-1.8.0.tgz#f3373c32d21b4d780dd8004514684fb791ca4369" + integrity sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA== + dependencies: + safe-buffer "~5.1.1" + +cookie-signature@1.0.6: + version "1.0.6" + resolved "http://localhost:4873/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c" + integrity sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ== + +cookie@0.5.0: + version "0.5.0" + resolved "http://localhost:4873/cookie/-/cookie-0.5.0.tgz#d1f5d71adec6558c58f389987c366aa47e994f8b" + integrity sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw== + +core-js-compat@^3.25.1: + version "3.25.5" + resolved "http://localhost:4873/core-js-compat/-/core-js-compat-3.25.5.tgz#0016e8158c904f7b059486639e6e82116eafa7d9" + integrity sha512-ovcyhs2DEBUIE0MGEKHP4olCUW/XYte3Vroyxuh38rD1wAO4dHohsovUC4eAOuzFxE6b+RXvBU3UZ9o0YhUTkA== + dependencies: + browserslist "^4.21.4" + +core-js-pure@^3.25.1, core-js-pure@^3.8.1: + version "3.25.5" + resolved "http://localhost:4873/core-js-pure/-/core-js-pure-3.25.5.tgz#79716ba54240c6aa9ceba6eee08cf79471ba184d" + integrity sha512-oml3M22pHM+igfWHDfdLVq2ShWmjM2V4L+dQEBs0DWVIqEm9WHCwGAlZ6BmyBQGy5sFrJmcx+856D9lVKyGWYg== + +core-js@^3.19.2: + version "3.25.5" + resolved "http://localhost:4873/core-js/-/core-js-3.25.5.tgz#e86f651a2ca8a0237a5f064c2fe56cef89646e27" + integrity sha512-nbm6eZSjm+ZuBQxCUPQKQCoUEfFOXjUZ8dTTyikyKaWrTYmAVbykQfwsKE5dBK88u3QCkCrzsx/PPlKfhsvgpw== + +core-util-is@~1.0.0: + version "1.0.3" + resolved "http://localhost:4873/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85" + integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ== + +cosmiconfig-typescript-loader@^4.1.1: + version "4.1.1" + resolved "http://localhost:4873/cosmiconfig-typescript-loader/-/cosmiconfig-typescript-loader-4.1.1.tgz#38dd3578344038dae40fdf09792bc2e9df529f78" + integrity sha512-9DHpa379Gp0o0Zefii35fcmuuin6q92FnLDffzdZ0l9tVd3nEobG3O+MZ06+kuBvFTSVScvNb/oHA13Nd4iipg== + +cosmiconfig@^6.0.0: + version "6.0.0" + resolved "http://localhost:4873/cosmiconfig/-/cosmiconfig-6.0.0.tgz#da4fee853c52f6b1e6935f41c1a2fc50bd4a9982" + integrity sha512-xb3ZL6+L8b9JLLCx3ZdoZy4+2ECphCMo2PwqgP1tlfVq6M6YReyzBJtvWWtbDSpNr9hn96pkCiZqUcFEc+54Qg== + dependencies: + "@types/parse-json" "^4.0.0" + import-fresh "^3.1.0" + parse-json "^5.0.0" + path-type "^4.0.0" + yaml "^1.7.2" + +cosmiconfig@^7.0.0, cosmiconfig@^7.0.1: + version "7.0.1" + resolved "http://localhost:4873/cosmiconfig/-/cosmiconfig-7.0.1.tgz#714d756522cace867867ccb4474c5d01bbae5d6d" + integrity sha512-a1YWNUV2HwGimB7dU2s1wUMurNKjpx60HxBB6xUM8Re+2s1g1IIfJvFR0/iCF+XHdE0GMTKTuLR32UQff4TEyQ== + dependencies: + "@types/parse-json" "^4.0.0" + import-fresh "^3.2.1" + parse-json "^5.0.0" + path-type "^4.0.0" + yaml "^1.10.0" + +craco-wasm@0.0.1: + version "0.0.1" + resolved "http://localhost:4873/craco-wasm/-/craco-wasm-0.0.1.tgz#a7edbf7ff64e7569909b15684c00de13209985c6" + integrity sha512-0vwZLtkQocS7UlPg9IF4TsG/6gKXcd9O0ISomjRoBMvR2XvtZN4yxvU8/WlY0Vf42PtOcWvhSx9i4oVNxLVE6w== + +cross-spawn@^7.0.2, cross-spawn@^7.0.3: + version "7.0.3" + resolved "http://localhost:4873/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" + integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== + dependencies: + path-key "^3.1.0" + shebang-command "^2.0.0" + which "^2.0.1" + +crypto-random-string@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/crypto-random-string/-/crypto-random-string-2.0.0.tgz#ef2a7a966ec11083388369baa02ebead229b30d5" + integrity sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA== + +css-blank-pseudo@^3.0.3: + version "3.0.3" + resolved "http://localhost:4873/css-blank-pseudo/-/css-blank-pseudo-3.0.3.tgz#36523b01c12a25d812df343a32c322d2a2324561" + integrity sha512-VS90XWtsHGqoM0t4KpH053c4ehxZ2E6HtGI7x68YFV0pTo/QmkV/YFA+NnlvK8guxZVNWGQhVNJGC39Q8XF4OQ== + dependencies: + postcss-selector-parser "^6.0.9" + +css-declaration-sorter@^6.3.0: + version "6.3.1" + resolved "http://localhost:4873/css-declaration-sorter/-/css-declaration-sorter-6.3.1.tgz#be5e1d71b7a992433fb1c542c7a1b835e45682ec" + integrity sha512-fBffmak0bPAnyqc/HO8C3n2sHrp9wcqQz6ES9koRF2/mLOVAx9zIQ3Y7R29sYCteTPqMCwns4WYQoCX91Xl3+w== + +css-has-pseudo@^3.0.4: + version "3.0.4" + resolved "http://localhost:4873/css-has-pseudo/-/css-has-pseudo-3.0.4.tgz#57f6be91ca242d5c9020ee3e51bbb5b89fc7af73" + integrity sha512-Vse0xpR1K9MNlp2j5w1pgWIJtm1a8qS0JwS9goFYcImjlHEmywP9VUF05aGBXzGpDJF86QXk4L0ypBmwPhGArw== + dependencies: + postcss-selector-parser "^6.0.9" + +css-loader@^6.5.1: + version "6.7.1" + resolved "http://localhost:4873/css-loader/-/css-loader-6.7.1.tgz#e98106f154f6e1baf3fc3bc455cb9981c1d5fd2e" + integrity sha512-yB5CNFa14MbPJcomwNh3wLThtkZgcNyI2bNMRt8iE5Z8Vwl7f8vQXFAzn2HDOJvtDq2NTZBUGMSUNNyrv3/+cw== + dependencies: + icss-utils "^5.1.0" + postcss "^8.4.7" + postcss-modules-extract-imports "^3.0.0" + postcss-modules-local-by-default "^4.0.0" + postcss-modules-scope "^3.0.0" + postcss-modules-values "^4.0.0" + postcss-value-parser "^4.2.0" + semver "^7.3.5" + +css-minimizer-webpack-plugin@^3.2.0: + version "3.4.1" + resolved "http://localhost:4873/css-minimizer-webpack-plugin/-/css-minimizer-webpack-plugin-3.4.1.tgz#ab78f781ced9181992fe7b6e4f3422e76429878f" + integrity sha512-1u6D71zeIfgngN2XNRJefc/hY7Ybsxd74Jm4qngIXyUEk7fss3VUzuHxLAq/R8NAba4QU9OUSaMZlbpRc7bM4Q== + dependencies: + cssnano "^5.0.6" + jest-worker "^27.0.2" + postcss "^8.3.5" + schema-utils "^4.0.0" + serialize-javascript "^6.0.0" + source-map "^0.6.1" + +css-prefers-color-scheme@^6.0.3: + version "6.0.3" + resolved "http://localhost:4873/css-prefers-color-scheme/-/css-prefers-color-scheme-6.0.3.tgz#ca8a22e5992c10a5b9d315155e7caee625903349" + integrity sha512-4BqMbZksRkJQx2zAjrokiGMd07RqOa2IxIrrN10lyBe9xhn9DEvjUK79J6jkeiv9D9hQFXKb6g1jwU62jziJZA== + +css-select-base-adapter@^0.1.1: + version "0.1.1" + resolved "http://localhost:4873/css-select-base-adapter/-/css-select-base-adapter-0.1.1.tgz#3b2ff4972cc362ab88561507a95408a1432135d7" + integrity sha512-jQVeeRG70QI08vSTwf1jHxp74JoZsr2XSgETae8/xC8ovSnL2WF87GTLO86Sbwdt2lK4Umg4HnnwMO4YF3Ce7w== + +css-select@^2.0.0: + version "2.1.0" + resolved "http://localhost:4873/css-select/-/css-select-2.1.0.tgz#6a34653356635934a81baca68d0255432105dbef" + integrity sha512-Dqk7LQKpwLoH3VovzZnkzegqNSuAziQyNZUcrdDM401iY+R5NkGBXGmtO05/yaXQziALuPogeG0b7UAgjnTJTQ== + dependencies: + boolbase "^1.0.0" + css-what "^3.2.1" + domutils "^1.7.0" + nth-check "^1.0.2" + +css-select@^4.1.3: + version "4.3.0" + resolved "http://localhost:4873/css-select/-/css-select-4.3.0.tgz#db7129b2846662fd8628cfc496abb2b59e41529b" + integrity sha512-wPpOYtnsVontu2mODhA19JrqWxNsfdatRKd64kmpRbQgh1KtItko5sTnEpPdpSaJszTOhEMlF/RPz28qj4HqhQ== + dependencies: + boolbase "^1.0.0" + css-what "^6.0.1" + domhandler "^4.3.1" + domutils "^2.8.0" + nth-check "^2.0.1" + +css-tree@1.0.0-alpha.37: + version "1.0.0-alpha.37" + resolved "http://localhost:4873/css-tree/-/css-tree-1.0.0-alpha.37.tgz#98bebd62c4c1d9f960ec340cf9f7522e30709a22" + integrity sha512-DMxWJg0rnz7UgxKT0Q1HU/L9BeJI0M6ksor0OgqOnF+aRCDWg/N2641HmVyU9KVIu0OVVWOb2IpC9A+BJRnejg== + dependencies: + mdn-data "2.0.4" + source-map "^0.6.1" + +css-tree@^1.1.2, css-tree@^1.1.3: + version "1.1.3" + resolved "http://localhost:4873/css-tree/-/css-tree-1.1.3.tgz#eb4870fb6fd7707327ec95c2ff2ab09b5e8db91d" + integrity sha512-tRpdppF7TRazZrjJ6v3stzv93qxRcSsFmW6cX0Zm2NVKpxE1WV1HblnghVv9TreireHkqI/VDEsfolRF1p6y7Q== + dependencies: + mdn-data "2.0.14" + source-map "^0.6.1" + +css-what@^3.2.1: + version "3.4.2" + resolved "http://localhost:4873/css-what/-/css-what-3.4.2.tgz#ea7026fcb01777edbde52124e21f327e7ae950e4" + integrity sha512-ACUm3L0/jiZTqfzRM3Hi9Q8eZqd6IK37mMWPLz9PJxkLWllYeRf+EHUSHYEtFop2Eqytaq1FizFVh7XfBnXCDQ== + +css-what@^6.0.1: + version "6.1.0" + resolved "http://localhost:4873/css-what/-/css-what-6.1.0.tgz#fb5effcf76f1ddea2c81bdfaa4de44e79bac70f4" + integrity sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw== + +css.escape@^1.5.1: + version "1.5.1" + resolved "http://localhost:4873/css.escape/-/css.escape-1.5.1.tgz#42e27d4fa04ae32f931a4b4d4191fa9cddee97cb" + integrity sha512-YUifsXXuknHlUsmlgyY0PKzgPOr7/FjCePfHNt0jxm83wHZi44VDMQ7/fGNkjY3/jV1MC+1CmZbaHzugyeRtpg== + +cssdb@^7.0.1: + version "7.0.1" + resolved "http://localhost:4873/cssdb/-/cssdb-7.0.1.tgz#3810a0c67ae06362982dfe965dbedf57a0f26617" + integrity sha512-pT3nzyGM78poCKLAEy2zWIVX2hikq6dIrjuZzLV98MumBg+xMTNYfHx7paUlfiRTgg91O/vR889CIf+qiv79Rw== + +cssesc@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/cssesc/-/cssesc-3.0.0.tgz#37741919903b868565e1c09ea747445cd18983ee" + integrity sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg== + +cssnano-preset-default@^5.2.12: + version "5.2.12" + resolved "http://localhost:4873/cssnano-preset-default/-/cssnano-preset-default-5.2.12.tgz#ebe6596ec7030e62c3eb2b3c09f533c0644a9a97" + integrity sha512-OyCBTZi+PXgylz9HAA5kHyoYhfGcYdwFmyaJzWnzxuGRtnMw/kR6ilW9XzlzlRAtB6PLT/r+prYgkef7hngFew== + dependencies: + css-declaration-sorter "^6.3.0" + cssnano-utils "^3.1.0" + postcss-calc "^8.2.3" + postcss-colormin "^5.3.0" + postcss-convert-values "^5.1.2" + postcss-discard-comments "^5.1.2" + postcss-discard-duplicates "^5.1.0" + postcss-discard-empty "^5.1.1" + postcss-discard-overridden "^5.1.0" + postcss-merge-longhand "^5.1.6" + postcss-merge-rules "^5.1.2" + postcss-minify-font-values "^5.1.0" + postcss-minify-gradients "^5.1.1" + postcss-minify-params "^5.1.3" + postcss-minify-selectors "^5.2.1" + postcss-normalize-charset "^5.1.0" + postcss-normalize-display-values "^5.1.0" + postcss-normalize-positions "^5.1.1" + postcss-normalize-repeat-style "^5.1.1" + postcss-normalize-string "^5.1.0" + postcss-normalize-timing-functions "^5.1.0" + postcss-normalize-unicode "^5.1.0" + postcss-normalize-url "^5.1.0" + postcss-normalize-whitespace "^5.1.1" + postcss-ordered-values "^5.1.3" + postcss-reduce-initial "^5.1.0" + postcss-reduce-transforms "^5.1.0" + postcss-svgo "^5.1.0" + postcss-unique-selectors "^5.1.1" + +cssnano-utils@^3.1.0: + version "3.1.0" + resolved "http://localhost:4873/cssnano-utils/-/cssnano-utils-3.1.0.tgz#95684d08c91511edfc70d2636338ca37ef3a6861" + integrity sha512-JQNR19/YZhz4psLX/rQ9M83e3z2Wf/HdJbryzte4a3NSuafyp9w/I4U+hx5C2S9g41qlstH7DEWnZaaj83OuEA== + +cssnano@^5.0.6: + version "5.1.13" + resolved "http://localhost:4873/cssnano/-/cssnano-5.1.13.tgz#83d0926e72955332dc4802a7070296e6258efc0a" + integrity sha512-S2SL2ekdEz6w6a2epXn4CmMKU4K3KpcyXLKfAYc9UQQqJRkD/2eLUG0vJ3Db/9OvO5GuAdgXw3pFbR6abqghDQ== + dependencies: + cssnano-preset-default "^5.2.12" + lilconfig "^2.0.3" + yaml "^1.10.2" + +csso@^4.0.2, csso@^4.2.0: + version "4.2.0" + resolved "http://localhost:4873/csso/-/csso-4.2.0.tgz#ea3a561346e8dc9f546d6febedd50187cf389529" + integrity sha512-wvlcdIbf6pwKEk7vHj8/Bkc0B4ylXZruLvOgs9doS5eOsOpuodOV2zJChSpkp+pRpYQLQMeF04nr3Z68Sta9jA== + dependencies: + css-tree "^1.1.2" + +cssom@^0.4.4: + version "0.4.4" + resolved "http://localhost:4873/cssom/-/cssom-0.4.4.tgz#5a66cf93d2d0b661d80bf6a44fb65f5c2e4e0a10" + integrity sha512-p3pvU7r1MyyqbTk+WbNJIgJjG2VmTIaB10rI93LzVPrmDJKkzKYMtxxyAvQXR/NS6otuzveI7+7BBq3SjBS2mw== + +cssom@~0.3.6: + version "0.3.8" + resolved "http://localhost:4873/cssom/-/cssom-0.3.8.tgz#9f1276f5b2b463f2114d3f2c75250af8c1a36f4a" + integrity sha512-b0tGHbfegbhPJpxpiBPU2sCkigAqtM9O121le6bbOlgyV+NyGyCmVfJ6QW9eRjz8CpNfWEOYBIMIGRYkLwsIYg== + +cssstyle@^2.3.0: + version "2.3.0" + resolved "http://localhost:4873/cssstyle/-/cssstyle-2.3.0.tgz#ff665a0ddbdc31864b09647f34163443d90b0852" + integrity sha512-AZL67abkUzIuvcHqk7c09cezpGNcxUxU4Ioi/05xHk4DQeTkWmGYftIE6ctU6AEt+Gn4n1lDStOtj7FKycP71A== + dependencies: + cssom "~0.3.6" + +csstype@^3.0.2: + version "3.1.1" + resolved "http://localhost:4873/csstype/-/csstype-3.1.1.tgz#841b532c45c758ee546a11d5bd7b7b473c8c30b9" + integrity sha512-DJR/VvkAvSZW9bTouZue2sSxDwdTN92uHjqeKVm+0dAqdfNykRzQ95tay8aXMBAAPpUiq4Qcug2L7neoRh2Egw== + +damerau-levenshtein@^1.0.8: + version "1.0.8" + resolved "http://localhost:4873/damerau-levenshtein/-/damerau-levenshtein-1.0.8.tgz#b43d286ccbd36bc5b2f7ed41caf2d0aba1f8a6e7" + integrity sha512-sdQSFB7+llfUcQHUQO3+B8ERRj0Oa4w9POWMI/puGtuf7gFywGmkaLCElnudfTiKZV+NvHqL0ifzdrI8Ro7ESA== + +data-urls@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/data-urls/-/data-urls-2.0.0.tgz#156485a72963a970f5d5821aaf642bef2bf2db9b" + integrity sha512-X5eWTSXO/BJmpdIKCRuKUgSCgAN0OwliVK3yPKbwIWU1Tdw5BRajxlzMidvh+gwko9AfQ9zIj52pzF91Q3YAvQ== + dependencies: + abab "^2.0.3" + whatwg-mimetype "^2.3.0" + whatwg-url "^8.0.0" + +debug@2.6.9, debug@^2.6.0, debug@^2.6.9: + version "2.6.9" + resolved "http://localhost:4873/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" + integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== + dependencies: + ms "2.0.0" + +debug@4, debug@^4.1.0, debug@^4.1.1, debug@^4.3.2, debug@^4.3.4: + version "4.3.4" + resolved "http://localhost:4873/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" + integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== + dependencies: + ms "2.1.2" + +debug@^3.2.7: + version "3.2.7" + resolved "http://localhost:4873/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a" + integrity sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ== + dependencies: + ms "^2.1.1" + +decimal.js@^10.2.1: + version "10.4.1" + resolved "http://localhost:4873/decimal.js/-/decimal.js-10.4.1.tgz#be75eeac4a2281aace80c1a8753587c27ef053e7" + integrity sha512-F29o+vci4DodHYT9UrR5IEbfBw9pE5eSapIJdTqXK5+6hq+t8VRxwQyKlW2i+KDKFkkJQRvFyI/QXD83h8LyQw== + +dedent@^0.7.0: + version "0.7.0" + resolved "http://localhost:4873/dedent/-/dedent-0.7.0.tgz#2495ddbaf6eb874abb0e1be9df22d2e5a544326c" + integrity sha512-Q6fKUPqnAHAyhiUgFU7BUzLiv0kd8saH9al7tnu5Q/okj6dnupxyTgFIBjVzJATdfIAm9NAsvXNzjaKa+bxVyA== + +deep-is@^0.1.3, deep-is@~0.1.3: + version "0.1.4" + resolved "http://localhost:4873/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831" + integrity sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ== + +deepmerge@^4.2.2: + version "4.2.2" + resolved "http://localhost:4873/deepmerge/-/deepmerge-4.2.2.tgz#44d2ea3679b8f4d4ffba33f03d865fc1e7bf4955" + integrity sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg== + +default-gateway@^6.0.3: + version "6.0.3" + resolved "http://localhost:4873/default-gateway/-/default-gateway-6.0.3.tgz#819494c888053bdb743edbf343d6cdf7f2943a71" + integrity sha512-fwSOJsbbNzZ/CUFpqFBqYfYNLj1NbMPm8MMCIzHjC83iSJRBEGmDUxU+WP661BaBQImeC2yHwXtz+P/O9o+XEg== + dependencies: + execa "^5.0.0" + +define-lazy-prop@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz#3f7ae421129bcaaac9bc74905c98a0009ec9ee7f" + integrity sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og== + +define-properties@^1.1.3, define-properties@^1.1.4: + version "1.1.4" + resolved "http://localhost:4873/define-properties/-/define-properties-1.1.4.tgz#0b14d7bd7fbeb2f3572c3a7eda80ea5d57fb05b1" + integrity sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA== + dependencies: + has-property-descriptors "^1.0.0" + object-keys "^1.1.1" + +defined@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/defined/-/defined-1.0.0.tgz#c98d9bcef75674188e110969151199e39b1fa693" + integrity sha512-Y2caI5+ZwS5c3RiNDJ6u53VhQHv+hHKwhkI1iHvceKUHw9Df6EK2zRLfjejRgMuCuxK7PfSWIMwWecceVvThjQ== + +delayed-stream@~1.0.0: + version "1.0.0" + resolved "http://localhost:4873/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" + integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ== + +depd@2.0.0: + version "2.0.0" + resolved "http://localhost:4873/depd/-/depd-2.0.0.tgz#b696163cc757560d09cf22cc8fad1571b79e76df" + integrity sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw== + +depd@~1.1.2: + version "1.1.2" + resolved "http://localhost:4873/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9" + integrity sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ== + +destroy@1.2.0: + version "1.2.0" + resolved "http://localhost:4873/destroy/-/destroy-1.2.0.tgz#4803735509ad8be552934c67df614f94e66fa015" + integrity sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg== + +detect-newline@^3.0.0: + version "3.1.0" + resolved "http://localhost:4873/detect-newline/-/detect-newline-3.1.0.tgz#576f5dfc63ae1a192ff192d8ad3af6308991b651" + integrity sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA== + +detect-node@^2.0.4: + version "2.1.0" + resolved "http://localhost:4873/detect-node/-/detect-node-2.1.0.tgz#c9c70775a49c3d03bc2c06d9a73be550f978f8b1" + integrity sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g== + +detect-port-alt@^1.1.6: + version "1.1.6" + resolved "http://localhost:4873/detect-port-alt/-/detect-port-alt-1.1.6.tgz#24707deabe932d4a3cf621302027c2b266568275" + integrity sha512-5tQykt+LqfJFBEYaDITx7S7cR7mJ/zQmLXZ2qt5w04ainYZw6tBf9dBunMjVeVOdYVRUzUOE4HkY5J7+uttb5Q== + dependencies: + address "^1.0.1" + debug "^2.6.0" + +detective@^5.2.1: + version "5.2.1" + resolved "http://localhost:4873/detective/-/detective-5.2.1.tgz#6af01eeda11015acb0e73f933242b70f24f91034" + integrity sha512-v9XE1zRnz1wRtgurGu0Bs8uHKFSTdteYZNbIPFVhUZ39L/S79ppMpdmVOZAnoz1jfEFodc48n6MX483Xo3t1yw== + dependencies: + acorn-node "^1.8.2" + defined "^1.0.0" + minimist "^1.2.6" + +didyoumean@^1.2.2: + version "1.2.2" + resolved "http://localhost:4873/didyoumean/-/didyoumean-1.2.2.tgz#989346ffe9e839b4555ecf5666edea0d3e8ad037" + integrity sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw== + +diff-sequences@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/diff-sequences/-/diff-sequences-27.5.1.tgz#eaecc0d327fd68c8d9672a1e64ab8dccb2ef5327" + integrity sha512-k1gCAXAsNgLwEL+Y8Wvl+M6oEFj5bgazfZULpS5CneoPPXRaCCW7dm+q21Ky2VEE5X+VeRDBVg1Pcvvsr4TtNQ== + +diff-sequences@^29.0.0: + version "29.0.0" + resolved "http://localhost:4873/diff-sequences/-/diff-sequences-29.0.0.tgz#bae49972ef3933556bcb0800b72e8579d19d9e4f" + integrity sha512-7Qe/zd1wxSDL4D/X/FPjOMB+ZMDt71W94KYaq05I2l0oQqgXgs7s4ftYYmV38gBSrPz2vcygxfs1xn0FT+rKNA== + +dir-glob@^3.0.1: + version "3.0.1" + resolved "http://localhost:4873/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f" + integrity sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA== + dependencies: + path-type "^4.0.0" + +dlv@^1.1.3: + version "1.1.3" + resolved "http://localhost:4873/dlv/-/dlv-1.1.3.tgz#5c198a8a11453596e751494d49874bc7732f2e79" + integrity sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA== + +dns-equal@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/dns-equal/-/dns-equal-1.0.0.tgz#b39e7f1da6eb0a75ba9c17324b34753c47e0654d" + integrity sha512-z+paD6YUQsk+AbGCEM4PrOXSss5gd66QfcVBFTKR/HpFL9jCqikS94HYwKww6fQyO7IxrIIyUu+g0Ka9tUS2Cg== + +dns-packet@^5.2.2: + version "5.4.0" + resolved "http://localhost:4873/dns-packet/-/dns-packet-5.4.0.tgz#1f88477cf9f27e78a213fb6d118ae38e759a879b" + integrity sha512-EgqGeaBB8hLiHLZtp/IbaDQTL8pZ0+IvwzSHA6d7VyMDM+B9hgddEMa9xjK5oYnw0ci0JQ6g2XCD7/f6cafU6g== + dependencies: + "@leichtgewicht/ip-codec" "^2.0.1" + +doctrine@^2.1.0: + version "2.1.0" + resolved "http://localhost:4873/doctrine/-/doctrine-2.1.0.tgz#5cd01fc101621b42c4cd7f5d1a66243716d3f39d" + integrity sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw== + dependencies: + esutils "^2.0.2" + +doctrine@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961" + integrity sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w== + dependencies: + esutils "^2.0.2" + +dom-accessibility-api@^0.5.6, dom-accessibility-api@^0.5.9: + version "0.5.14" + resolved "http://localhost:4873/dom-accessibility-api/-/dom-accessibility-api-0.5.14.tgz#56082f71b1dc7aac69d83c4285eef39c15d93f56" + integrity sha512-NMt+m9zFMPZe0JcY9gN224Qvk6qLIdqex29clBvc/y75ZBX9YA9wNK3frsYvu2DI1xcCIwxwnX+TlsJ2DSOADg== + +dom-converter@^0.2.0: + version "0.2.0" + resolved "http://localhost:4873/dom-converter/-/dom-converter-0.2.0.tgz#6721a9daee2e293682955b6afe416771627bb768" + integrity sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA== + dependencies: + utila "~0.4" + +dom-serializer@0: + version "0.2.2" + resolved "http://localhost:4873/dom-serializer/-/dom-serializer-0.2.2.tgz#1afb81f533717175d478655debc5e332d9f9bb51" + integrity sha512-2/xPb3ORsQ42nHYiSunXkDjPLBaEj/xTwUO4B7XCZQTRk7EBtTOPaygh10YAAh2OI1Qrp6NWfpAhzswj0ydt9g== + dependencies: + domelementtype "^2.0.1" + entities "^2.0.0" + +dom-serializer@^1.0.1: + version "1.4.1" + resolved "http://localhost:4873/dom-serializer/-/dom-serializer-1.4.1.tgz#de5d41b1aea290215dc45a6dae8adcf1d32e2d30" + integrity sha512-VHwB3KfrcOOkelEG2ZOfxqLZdfkil8PtJi4P8N2MMXucZq2yLp75ClViUlOVwyoHEDjYU433Aq+5zWP61+RGag== + dependencies: + domelementtype "^2.0.1" + domhandler "^4.2.0" + entities "^2.0.0" + +domelementtype@1: + version "1.3.1" + resolved "http://localhost:4873/domelementtype/-/domelementtype-1.3.1.tgz#d048c44b37b0d10a7f2a3d5fee3f4333d790481f" + integrity sha512-BSKB+TSpMpFI/HOxCNr1O8aMOTZ8hT3pM3GQ0w/mWRmkhEDSFJkkyzz4XQsBV44BChwGkrDfMyjVD0eA2aFV3w== + +domelementtype@^2.0.1, domelementtype@^2.2.0: + version "2.3.0" + resolved "http://localhost:4873/domelementtype/-/domelementtype-2.3.0.tgz#5c45e8e869952626331d7aab326d01daf65d589d" + integrity sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw== + +domexception@^2.0.1: + version "2.0.1" + resolved "http://localhost:4873/domexception/-/domexception-2.0.1.tgz#fb44aefba793e1574b0af6aed2801d057529f304" + integrity sha512-yxJ2mFy/sibVQlu5qHjOkf9J3K6zgmCxgJ94u2EdvDOV09H+32LtRswEcUsmUWN72pVLOEnTSRaIVVzVQgS0dg== + dependencies: + webidl-conversions "^5.0.0" + +domhandler@^4.0.0, domhandler@^4.2.0, domhandler@^4.3.1: + version "4.3.1" + resolved "http://localhost:4873/domhandler/-/domhandler-4.3.1.tgz#8d792033416f59d68bc03a5aa7b018c1ca89279c" + integrity sha512-GrwoxYN+uWlzO8uhUXRl0P+kHE4GtVPfYzVLcUxPL7KNdHKj66vvlhiweIHqYYXWlw+T8iLMp42Lm67ghw4WMQ== + dependencies: + domelementtype "^2.2.0" + +domutils@^1.7.0: + version "1.7.0" + resolved "http://localhost:4873/domutils/-/domutils-1.7.0.tgz#56ea341e834e06e6748af7a1cb25da67ea9f8c2a" + integrity sha512-Lgd2XcJ/NjEw+7tFvfKxOzCYKZsdct5lczQ2ZaQY8Djz7pfAD3Gbp8ySJWtreII/vDlMVmxwa6pHmdxIYgttDg== + dependencies: + dom-serializer "0" + domelementtype "1" + +domutils@^2.5.2, domutils@^2.8.0: + version "2.8.0" + resolved "http://localhost:4873/domutils/-/domutils-2.8.0.tgz#4437def5db6e2d1f5d6ee859bd95ca7d02048135" + integrity sha512-w96Cjofp72M5IIhpjgobBimYEfoPjx1Vx0BSX9P30WBdZW2WIKU0T1Bd0kz2eNZ9ikjKgHbEyKx8BB6H1L3h3A== + dependencies: + dom-serializer "^1.0.1" + domelementtype "^2.2.0" + domhandler "^4.2.0" + +dot-case@^3.0.4: + version "3.0.4" + resolved "http://localhost:4873/dot-case/-/dot-case-3.0.4.tgz#9b2b670d00a431667a8a75ba29cd1b98809ce751" + integrity sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w== + dependencies: + no-case "^3.0.4" + tslib "^2.0.3" + +dotenv-expand@^5.1.0: + version "5.1.0" + resolved "http://localhost:4873/dotenv-expand/-/dotenv-expand-5.1.0.tgz#3fbaf020bfd794884072ea26b1e9791d45a629f0" + integrity sha512-YXQl1DSa4/PQyRfgrv6aoNjhasp/p4qs9FjJ4q4cQk+8m4r6k4ZSiEyytKG8f8W9gi8WsQtIObNmKd+tMzNTmA== + +dotenv@^10.0.0: + version "10.0.0" + resolved "http://localhost:4873/dotenv/-/dotenv-10.0.0.tgz#3d4227b8fb95f81096cdd2b66653fb2c7085ba81" + integrity sha512-rlBi9d8jpv9Sf1klPjNfFAuWDjKLwTIJJ/VxtoTwIR6hnZxcEOQCZg2oIL3MWBYw5GpUDKOEnND7LXTbIpQ03Q== + +duplexer@^0.1.2: + version "0.1.2" + resolved "http://localhost:4873/duplexer/-/duplexer-0.1.2.tgz#3abe43aef3835f8ae077d136ddce0f276b0400e6" + integrity sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg== + +ee-first@1.1.1: + version "1.1.1" + resolved "http://localhost:4873/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" + integrity sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow== + +ejs@^3.1.6: + version "3.1.8" + resolved "http://localhost:4873/ejs/-/ejs-3.1.8.tgz#758d32910c78047585c7ef1f92f9ee041c1c190b" + integrity sha512-/sXZeMlhS0ArkfX2Aw780gJzXSMPnKjtspYZv+f3NiKLlubezAHDU5+9xz6gd3/NhG3txQCo6xlglmTS+oTGEQ== + dependencies: + jake "^10.8.5" + +electron-to-chromium@^1.4.251: + version "1.4.274" + resolved "http://localhost:4873/electron-to-chromium/-/electron-to-chromium-1.4.274.tgz#74369ac6f020c3cea7c77ec040ddf159fe226233" + integrity sha512-Fgn7JZQzq85I81FpKUNxVLAzoghy8JZJ4NIue+YfUYBbu1AkpgzFvNwzF/ZNZH9ElkmJD0TSWu1F2gTpw/zZlg== + +emittery@^0.10.2: + version "0.10.2" + resolved "http://localhost:4873/emittery/-/emittery-0.10.2.tgz#902eec8aedb8c41938c46e9385e9db7e03182933" + integrity sha512-aITqOwnLanpHLNXZJENbOgjUBeHocD+xsSJmNrjovKBW5HbSpW3d1pEls7GFQPUWXiwG9+0P4GtHfEqC/4M0Iw== + +emittery@^0.8.1: + version "0.8.1" + resolved "http://localhost:4873/emittery/-/emittery-0.8.1.tgz#bb23cc86d03b30aa75a7f734819dee2e1ba70860" + integrity sha512-uDfvUjVrfGJJhymx/kz6prltenw1u7WrCg1oa94zYY8xxVpLLUu045LAT0dhDZdXG58/EpPL/5kA180fQ/qudg== + +emoji-regex@^8.0.0: + version "8.0.0" + resolved "http://localhost:4873/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" + integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== + +emoji-regex@^9.2.2: + version "9.2.2" + resolved "http://localhost:4873/emoji-regex/-/emoji-regex-9.2.2.tgz#840c8803b0d8047f4ff0cf963176b32d4ef3ed72" + integrity sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg== + +emojis-list@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/emojis-list/-/emojis-list-3.0.0.tgz#5570662046ad29e2e916e71aae260abdff4f6a78" + integrity sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q== + +encodeurl@~1.0.2: + version "1.0.2" + resolved "http://localhost:4873/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" + integrity sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w== + +enhanced-resolve@^5.10.0: + version "5.10.0" + resolved "http://localhost:4873/enhanced-resolve/-/enhanced-resolve-5.10.0.tgz#0dc579c3bb2a1032e357ac45b8f3a6f3ad4fb1e6" + integrity sha512-T0yTFjdpldGY8PmuXXR0PyQ1ufZpEGiHVrp7zHKB7jdR4qlmZHhONVM5AQOAWXuF/w3dnHbEQVrNptJgt7F+cQ== + dependencies: + graceful-fs "^4.2.4" + tapable "^2.2.0" + +entities@^2.0.0: + version "2.2.0" + resolved "http://localhost:4873/entities/-/entities-2.2.0.tgz#098dc90ebb83d8dffa089d55256b351d34c4da55" + integrity sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A== + +error-ex@^1.3.1: + version "1.3.2" + resolved "http://localhost:4873/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" + integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g== + dependencies: + is-arrayish "^0.2.1" + +error-stack-parser@^2.0.6: + version "2.1.4" + resolved "http://localhost:4873/error-stack-parser/-/error-stack-parser-2.1.4.tgz#229cb01cdbfa84440bfa91876285b94680188286" + integrity sha512-Sk5V6wVazPhq5MhpO+AUxJn5x7XSXGl1R93Vn7i+zS15KDVxQijejNCrz8340/2bgLBjR9GtEG8ZVKONDjcqGQ== + dependencies: + stackframe "^1.3.4" + +es-abstract@^1.17.2, es-abstract@^1.19.0, es-abstract@^1.19.1, es-abstract@^1.19.2, es-abstract@^1.19.5, es-abstract@^1.20.1: + version "1.20.4" + resolved "http://localhost:4873/es-abstract/-/es-abstract-1.20.4.tgz#1d103f9f8d78d4cf0713edcd6d0ed1a46eed5861" + integrity sha512-0UtvRN79eMe2L+UNEF1BwRe364sj/DXhQ/k5FmivgoSdpM90b8Jc0mDzKMGo7QS0BVbOP/bTwBKNnDc9rNzaPA== + dependencies: + call-bind "^1.0.2" + es-to-primitive "^1.2.1" + function-bind "^1.1.1" + function.prototype.name "^1.1.5" + get-intrinsic "^1.1.3" + get-symbol-description "^1.0.0" + has "^1.0.3" + has-property-descriptors "^1.0.0" + has-symbols "^1.0.3" + internal-slot "^1.0.3" + is-callable "^1.2.7" + is-negative-zero "^2.0.2" + is-regex "^1.1.4" + is-shared-array-buffer "^1.0.2" + is-string "^1.0.7" + is-weakref "^1.0.2" + object-inspect "^1.12.2" + object-keys "^1.1.1" + object.assign "^4.1.4" + regexp.prototype.flags "^1.4.3" + safe-regex-test "^1.0.0" + string.prototype.trimend "^1.0.5" + string.prototype.trimstart "^1.0.5" + unbox-primitive "^1.0.2" + +es-array-method-boxes-properly@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/es-array-method-boxes-properly/-/es-array-method-boxes-properly-1.0.0.tgz#873f3e84418de4ee19c5be752990b2e44718d09e" + integrity sha512-wd6JXUmyHmt8T5a2xreUwKcGPq6f1f+WwIJkijUqiGcJz1qqnZgP6XIK+QyIWU5lT7imeNxUll48bziG+TSYcA== + +es-module-lexer@^0.9.0: + version "0.9.3" + resolved "http://localhost:4873/es-module-lexer/-/es-module-lexer-0.9.3.tgz#6f13db00cc38417137daf74366f535c8eb438f19" + integrity sha512-1HQ2M2sPtxwnvOvT1ZClHyQDiggdNjURWpY2we6aMKCQiUVxTmVs2UYPLIrD84sS+kMdUwfBSylbJPwNnBrnHQ== + +es-shim-unscopables@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz#702e632193201e3edf8713635d083d378e510241" + integrity sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w== + dependencies: + has "^1.0.3" + +es-to-primitive@^1.2.1: + version "1.2.1" + resolved "http://localhost:4873/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a" + integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA== + dependencies: + is-callable "^1.1.4" + is-date-object "^1.0.1" + is-symbol "^1.0.2" + +escalade@^3.1.1: + version "3.1.1" + resolved "http://localhost:4873/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" + integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== + +escape-html@~1.0.3: + version "1.0.3" + resolved "http://localhost:4873/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" + integrity sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow== + +escape-string-regexp@^1.0.5: + version "1.0.5" + resolved "http://localhost:4873/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" + integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg== + +escape-string-regexp@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz#a30304e99daa32e23b2fd20f51babd07cffca344" + integrity sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w== + +escape-string-regexp@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34" + integrity sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA== + +escodegen@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/escodegen/-/escodegen-2.0.0.tgz#5e32b12833e8aa8fa35e1bf0befa89380484c7dd" + integrity sha512-mmHKys/C8BFUGI+MAWNcSYoORYLMdPzjrknd2Vc+bUsjN5bXcr8EhrNB+UTqfL1y3I9c4fw2ihgtMPQLBRiQxw== + dependencies: + esprima "^4.0.1" + estraverse "^5.2.0" + esutils "^2.0.2" + optionator "^0.8.1" + optionalDependencies: + source-map "~0.6.1" + +eslint-config-react-app@^7.0.1: + version "7.0.1" + resolved "http://localhost:4873/eslint-config-react-app/-/eslint-config-react-app-7.0.1.tgz#73ba3929978001c5c86274c017ea57eb5fa644b4" + integrity sha512-K6rNzvkIeHaTd8m/QEh1Zko0KI7BACWkkneSs6s9cKZC/J27X3eZR6Upt1jkmZ/4FK+XUOPPxMEN7+lbUXfSlA== + dependencies: + "@babel/core" "^7.16.0" + "@babel/eslint-parser" "^7.16.3" + "@rushstack/eslint-patch" "^1.1.0" + "@typescript-eslint/eslint-plugin" "^5.5.0" + "@typescript-eslint/parser" "^5.5.0" + babel-preset-react-app "^10.0.1" + confusing-browser-globals "^1.0.11" + eslint-plugin-flowtype "^8.0.3" + eslint-plugin-import "^2.25.3" + eslint-plugin-jest "^25.3.0" + eslint-plugin-jsx-a11y "^6.5.1" + eslint-plugin-react "^7.27.1" + eslint-plugin-react-hooks "^4.3.0" + eslint-plugin-testing-library "^5.0.1" + +eslint-import-resolver-node@^0.3.6: + version "0.3.6" + resolved "http://localhost:4873/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.6.tgz#4048b958395da89668252001dbd9eca6b83bacbd" + integrity sha512-0En0w03NRVMn9Uiyn8YRPDKvWjxCWkslUEhGNTdGx15RvPJYQ+lbOlqrlNI2vEAs4pDYK4f/HN2TbDmk5TP0iw== + dependencies: + debug "^3.2.7" + resolve "^1.20.0" + +eslint-module-utils@^2.7.3: + version "2.7.4" + resolved "http://localhost:4873/eslint-module-utils/-/eslint-module-utils-2.7.4.tgz#4f3e41116aaf13a20792261e61d3a2e7e0583974" + integrity sha512-j4GT+rqzCoRKHwURX7pddtIPGySnX9Si/cgMI5ztrcqOPtk5dDEeZ34CQVPphnqkJytlc97Vuk05Um2mJ3gEQA== + dependencies: + debug "^3.2.7" + +eslint-plugin-flowtype@^8.0.3: + version "8.0.3" + resolved "http://localhost:4873/eslint-plugin-flowtype/-/eslint-plugin-flowtype-8.0.3.tgz#e1557e37118f24734aa3122e7536a038d34a4912" + integrity sha512-dX8l6qUL6O+fYPtpNRideCFSpmWOUVx5QcaGLVqe/vlDiBSe4vYljDWDETwnyFzpl7By/WVIu6rcrniCgH9BqQ== + dependencies: + lodash "^4.17.21" + string-natural-compare "^3.0.1" + +eslint-plugin-import@^2.25.3: + version "2.26.0" + resolved "http://localhost:4873/eslint-plugin-import/-/eslint-plugin-import-2.26.0.tgz#f812dc47be4f2b72b478a021605a59fc6fe8b88b" + integrity sha512-hYfi3FXaM8WPLf4S1cikh/r4IxnO6zrhZbEGz2b660EJRbuxgpDS5gkCuYgGWg2xxh2rBuIr4Pvhve/7c31koA== + dependencies: + array-includes "^3.1.4" + array.prototype.flat "^1.2.5" + debug "^2.6.9" + doctrine "^2.1.0" + eslint-import-resolver-node "^0.3.6" + eslint-module-utils "^2.7.3" + has "^1.0.3" + is-core-module "^2.8.1" + is-glob "^4.0.3" + minimatch "^3.1.2" + object.values "^1.1.5" + resolve "^1.22.0" + tsconfig-paths "^3.14.1" + +eslint-plugin-jest@^25.3.0: + version "25.7.0" + resolved "http://localhost:4873/eslint-plugin-jest/-/eslint-plugin-jest-25.7.0.tgz#ff4ac97520b53a96187bad9c9814e7d00de09a6a" + integrity sha512-PWLUEXeeF7C9QGKqvdSbzLOiLTx+bno7/HC9eefePfEb257QFHg7ye3dh80AZVkaa/RQsBB1Q/ORQvg2X7F0NQ== + dependencies: + "@typescript-eslint/experimental-utils" "^5.0.0" + +eslint-plugin-jsx-a11y@^6.5.1: + version "6.6.1" + resolved "http://localhost:4873/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.6.1.tgz#93736fc91b83fdc38cc8d115deedfc3091aef1ff" + integrity sha512-sXgFVNHiWffBq23uiS/JaP6eVR622DqwB4yTzKvGZGcPq6/yZ3WmOZfuBks/vHWo9GaFOqC2ZK4i6+C35knx7Q== + dependencies: + "@babel/runtime" "^7.18.9" + aria-query "^4.2.2" + array-includes "^3.1.5" + ast-types-flow "^0.0.7" + axe-core "^4.4.3" + axobject-query "^2.2.0" + damerau-levenshtein "^1.0.8" + emoji-regex "^9.2.2" + has "^1.0.3" + jsx-ast-utils "^3.3.2" + language-tags "^1.0.5" + minimatch "^3.1.2" + semver "^6.3.0" + +eslint-plugin-react-hooks@^4.3.0: + version "4.6.0" + resolved "http://localhost:4873/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-4.6.0.tgz#4c3e697ad95b77e93f8646aaa1630c1ba607edd3" + integrity sha512-oFc7Itz9Qxh2x4gNHStv3BqJq54ExXmfC+a1NjAta66IAN87Wu0R/QArgIS9qKzX3dXKPI9H5crl9QchNMY9+g== + +eslint-plugin-react@^7.27.1: + version "7.31.8" + resolved "http://localhost:4873/eslint-plugin-react/-/eslint-plugin-react-7.31.8.tgz#3a4f80c10be1bcbc8197be9e8b641b2a3ef219bf" + integrity sha512-5lBTZmgQmARLLSYiwI71tiGVTLUuqXantZM6vlSY39OaDSV0M7+32K5DnLkmFrwTe+Ksz0ffuLUC91RUviVZfw== + dependencies: + array-includes "^3.1.5" + array.prototype.flatmap "^1.3.0" + doctrine "^2.1.0" + estraverse "^5.3.0" + jsx-ast-utils "^2.4.1 || ^3.0.0" + minimatch "^3.1.2" + object.entries "^1.1.5" + object.fromentries "^2.0.5" + object.hasown "^1.1.1" + object.values "^1.1.5" + prop-types "^15.8.1" + resolve "^2.0.0-next.3" + semver "^6.3.0" + string.prototype.matchall "^4.0.7" + +eslint-plugin-testing-library@^5.0.1: + version "5.7.2" + resolved "http://localhost:4873/eslint-plugin-testing-library/-/eslint-plugin-testing-library-5.7.2.tgz#c1b2112a40aab61f93e10859e8b2d81e54f0ce84" + integrity sha512-0ZmHeR/DUUgEzW8rwUBRWxuqntipDtpvxK0hymdHnLlABryJkzd+CAHr+XnISaVsTisZ5MLHp6nQF+8COHLLTA== + dependencies: + "@typescript-eslint/utils" "^5.13.0" + +eslint-scope@5.1.1, eslint-scope@^5.1.1: + version "5.1.1" + resolved "http://localhost:4873/eslint-scope/-/eslint-scope-5.1.1.tgz#e786e59a66cb92b3f6c1fb0d508aab174848f48c" + integrity sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw== + dependencies: + esrecurse "^4.3.0" + estraverse "^4.1.1" + +eslint-scope@^7.1.1: + version "7.1.1" + resolved "http://localhost:4873/eslint-scope/-/eslint-scope-7.1.1.tgz#fff34894c2f65e5226d3041ac480b4513a163642" + integrity sha512-QKQM/UXpIiHcLqJ5AOyIW7XZmzjkzQXYE54n1++wb0u9V/abW3l9uQnxX8Z5Xd18xyKIMTUAyQ0k1e8pz6LUrw== + dependencies: + esrecurse "^4.3.0" + estraverse "^5.2.0" + +eslint-utils@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/eslint-utils/-/eslint-utils-3.0.0.tgz#8aebaface7345bb33559db0a1f13a1d2d48c3672" + integrity sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA== + dependencies: + eslint-visitor-keys "^2.0.0" + +eslint-visitor-keys@^2.0.0, eslint-visitor-keys@^2.1.0: + version "2.1.0" + resolved "http://localhost:4873/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz#f65328259305927392c938ed44eb0a5c9b2bd303" + integrity sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw== + +eslint-visitor-keys@^3.3.0: + version "3.3.0" + resolved "http://localhost:4873/eslint-visitor-keys/-/eslint-visitor-keys-3.3.0.tgz#f6480fa6b1f30efe2d1968aa8ac745b862469826" + integrity sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA== + +eslint-webpack-plugin@^3.1.1: + version "3.2.0" + resolved "http://localhost:4873/eslint-webpack-plugin/-/eslint-webpack-plugin-3.2.0.tgz#1978cdb9edc461e4b0195a20da950cf57988347c" + integrity sha512-avrKcGncpPbPSUHX6B3stNGzkKFto3eL+DKM4+VyMrVnhPc3vRczVlCq3uhuFOdRvDHTVXuzwk1ZKUrqDQHQ9w== + dependencies: + "@types/eslint" "^7.29.0 || ^8.4.1" + jest-worker "^28.0.2" + micromatch "^4.0.5" + normalize-path "^3.0.0" + schema-utils "^4.0.0" + +eslint@^8.3.0: + version "8.24.0" + resolved "http://localhost:4873/eslint/-/eslint-8.24.0.tgz#489516c927a5da11b3979dbfb2679394523383c8" + integrity sha512-dWFaPhGhTAiPcCgm3f6LI2MBWbogMnTJzFBbhXVRQDJPkr9pGZvVjlVfXd+vyDcWPA2Ic9L2AXPIQM0+vk/cSQ== + dependencies: + "@eslint/eslintrc" "^1.3.2" + "@humanwhocodes/config-array" "^0.10.5" + "@humanwhocodes/gitignore-to-minimatch" "^1.0.2" + "@humanwhocodes/module-importer" "^1.0.1" + ajv "^6.10.0" + chalk "^4.0.0" + cross-spawn "^7.0.2" + debug "^4.3.2" + doctrine "^3.0.0" + escape-string-regexp "^4.0.0" + eslint-scope "^7.1.1" + eslint-utils "^3.0.0" + eslint-visitor-keys "^3.3.0" + espree "^9.4.0" + esquery "^1.4.0" + esutils "^2.0.2" + fast-deep-equal "^3.1.3" + file-entry-cache "^6.0.1" + find-up "^5.0.0" + glob-parent "^6.0.1" + globals "^13.15.0" + globby "^11.1.0" + grapheme-splitter "^1.0.4" + ignore "^5.2.0" + import-fresh "^3.0.0" + imurmurhash "^0.1.4" + is-glob "^4.0.0" + js-sdsl "^4.1.4" + js-yaml "^4.1.0" + json-stable-stringify-without-jsonify "^1.0.1" + levn "^0.4.1" + lodash.merge "^4.6.2" + minimatch "^3.1.2" + natural-compare "^1.4.0" + optionator "^0.9.1" + regexpp "^3.2.0" + strip-ansi "^6.0.1" + strip-json-comments "^3.1.0" + text-table "^0.2.0" + +espree@^9.4.0: + version "9.4.0" + resolved "http://localhost:4873/espree/-/espree-9.4.0.tgz#cd4bc3d6e9336c433265fc0aa016fc1aaf182f8a" + integrity sha512-DQmnRpLj7f6TgN/NYb0MTzJXL+vJF9h3pHy4JhCIs3zwcgez8xmGg3sXHcEO97BrmO2OSvCwMdfdlyl+E9KjOw== + dependencies: + acorn "^8.8.0" + acorn-jsx "^5.3.2" + eslint-visitor-keys "^3.3.0" + +esprima@^4.0.0, esprima@^4.0.1: + version "4.0.1" + resolved "http://localhost:4873/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" + integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== + +esquery@^1.4.0: + version "1.4.0" + resolved "http://localhost:4873/esquery/-/esquery-1.4.0.tgz#2148ffc38b82e8c7057dfed48425b3e61f0f24a5" + integrity sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w== + dependencies: + estraverse "^5.1.0" + +esrecurse@^4.3.0: + version "4.3.0" + resolved "http://localhost:4873/esrecurse/-/esrecurse-4.3.0.tgz#7ad7964d679abb28bee72cec63758b1c5d2c9921" + integrity sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag== + dependencies: + estraverse "^5.2.0" + +estraverse@^4.1.1: + version "4.3.0" + resolved "http://localhost:4873/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" + integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== + +estraverse@^5.1.0, estraverse@^5.2.0, estraverse@^5.3.0: + version "5.3.0" + resolved "http://localhost:4873/estraverse/-/estraverse-5.3.0.tgz#2eea5290702f26ab8fe5370370ff86c965d21123" + integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA== + +estree-walker@^1.0.1: + version "1.0.1" + resolved "http://localhost:4873/estree-walker/-/estree-walker-1.0.1.tgz#31bc5d612c96b704106b477e6dd5d8aa138cb700" + integrity sha512-1fMXF3YP4pZZVozF8j/ZLfvnR8NSIljt56UhbZ5PeeDmmGHpgpdwQt7ITlGvYaQukCvuBRMLEiKiYC+oeIg4cg== + +esutils@^2.0.2: + version "2.0.3" + resolved "http://localhost:4873/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" + integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== + +etag@~1.8.1: + version "1.8.1" + resolved "http://localhost:4873/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887" + integrity sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg== + +eventemitter3@^4.0.0: + version "4.0.7" + resolved "http://localhost:4873/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f" + integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw== + +events@^3.2.0: + version "3.3.0" + resolved "http://localhost:4873/events/-/events-3.3.0.tgz#31a95ad0a924e2d2c419a813aeb2c4e878ea7400" + integrity sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q== + +execa@^5.0.0: + version "5.1.1" + resolved "http://localhost:4873/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd" + integrity sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg== + dependencies: + cross-spawn "^7.0.3" + get-stream "^6.0.0" + human-signals "^2.1.0" + is-stream "^2.0.0" + merge-stream "^2.0.0" + npm-run-path "^4.0.1" + onetime "^5.1.2" + signal-exit "^3.0.3" + strip-final-newline "^2.0.0" + +exit@^0.1.2: + version "0.1.2" + resolved "http://localhost:4873/exit/-/exit-0.1.2.tgz#0632638f8d877cc82107d30a0fff1a17cba1cd0c" + integrity sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ== + +expect@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/expect/-/expect-27.5.1.tgz#83ce59f1e5bdf5f9d2b94b61d2050db48f3fef74" + integrity sha512-E1q5hSUG2AmYQwQJ041nvgpkODHQvB+RKlB4IYdru6uJsyFTRyZAP463M+1lINorwbqAmUggi6+WwkD8lCS/Dw== + dependencies: + "@jest/types" "^27.5.1" + jest-get-type "^27.5.1" + jest-matcher-utils "^27.5.1" + jest-message-util "^27.5.1" + +expect@^29.0.0: + version "29.1.2" + resolved "http://localhost:4873/expect/-/expect-29.1.2.tgz#82f8f28d7d408c7c68da3a386a490ee683e1eced" + integrity sha512-AuAGn1uxva5YBbBlXb+2JPxJRuemZsmlGcapPXWNSBNsQtAULfjioREGBWuI0EOvYUKjDnrCy8PW5Zlr1md5mw== + dependencies: + "@jest/expect-utils" "^29.1.2" + jest-get-type "^29.0.0" + jest-matcher-utils "^29.1.2" + jest-message-util "^29.1.2" + jest-util "^29.1.2" + +express@^4.17.3: + version "4.18.1" + resolved "http://localhost:4873/express/-/express-4.18.1.tgz#7797de8b9c72c857b9cd0e14a5eea80666267caf" + integrity sha512-zZBcOX9TfehHQhtupq57OF8lFZ3UZi08Y97dwFCkD8p9d/d2Y3M+ykKcwaMDEL+4qyUolgBDX6AblpR3fL212Q== + dependencies: + accepts "~1.3.8" + array-flatten "1.1.1" + body-parser "1.20.0" + content-disposition "0.5.4" + content-type "~1.0.4" + cookie "0.5.0" + cookie-signature "1.0.6" + debug "2.6.9" + depd "2.0.0" + encodeurl "~1.0.2" + escape-html "~1.0.3" + etag "~1.8.1" + finalhandler "1.2.0" + fresh "0.5.2" + http-errors "2.0.0" + merge-descriptors "1.0.1" + methods "~1.1.2" + on-finished "2.4.1" + parseurl "~1.3.3" + path-to-regexp "0.1.7" + proxy-addr "~2.0.7" + qs "6.10.3" + range-parser "~1.2.1" + safe-buffer "5.2.1" + send "0.18.0" + serve-static "1.15.0" + setprototypeof "1.2.0" + statuses "2.0.1" + type-is "~1.6.18" + utils-merge "1.0.1" + vary "~1.1.2" + +fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: + version "3.1.3" + resolved "http://localhost:4873/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" + integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== + +fast-glob@^3.2.11, fast-glob@^3.2.9: + version "3.2.12" + resolved "http://localhost:4873/fast-glob/-/fast-glob-3.2.12.tgz#7f39ec99c2e6ab030337142da9e0c18f37afae80" + integrity sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w== + dependencies: + "@nodelib/fs.stat" "^2.0.2" + "@nodelib/fs.walk" "^1.2.3" + glob-parent "^5.1.2" + merge2 "^1.3.0" + micromatch "^4.0.4" + +fast-json-stable-stringify@^2.0.0, fast-json-stable-stringify@^2.1.0: + version "2.1.0" + resolved "http://localhost:4873/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" + integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== + +fast-levenshtein@^2.0.6, fast-levenshtein@~2.0.6: + version "2.0.6" + resolved "http://localhost:4873/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" + integrity sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw== + +fastq@^1.6.0: + version "1.13.0" + resolved "http://localhost:4873/fastq/-/fastq-1.13.0.tgz#616760f88a7526bdfc596b7cab8c18938c36b98c" + integrity sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw== + dependencies: + reusify "^1.0.4" + +faye-websocket@^0.11.3: + version "0.11.4" + resolved "http://localhost:4873/faye-websocket/-/faye-websocket-0.11.4.tgz#7f0d9275cfdd86a1c963dc8b65fcc451edcbb1da" + integrity sha512-CzbClwlXAuiRQAlUyfqPgvPoNKTckTPGfwZV4ZdAhVcP2lh9KUxJg2b5GkE7XbjKQ3YJnQ9z6D9ntLAlB+tP8g== + dependencies: + websocket-driver ">=0.5.1" + +fb-watchman@^2.0.0: + version "2.0.2" + resolved "http://localhost:4873/fb-watchman/-/fb-watchman-2.0.2.tgz#e9524ee6b5c77e9e5001af0f85f3adbb8623255c" + integrity sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA== + dependencies: + bser "2.1.1" + +file-entry-cache@^6.0.1: + version "6.0.1" + resolved "http://localhost:4873/file-entry-cache/-/file-entry-cache-6.0.1.tgz#211b2dd9659cb0394b073e7323ac3c933d522027" + integrity sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg== + dependencies: + flat-cache "^3.0.4" + +file-loader@^6.2.0: + version "6.2.0" + resolved "http://localhost:4873/file-loader/-/file-loader-6.2.0.tgz#baef7cf8e1840df325e4390b4484879480eebe4d" + integrity sha512-qo3glqyTa61Ytg4u73GultjHGjdRyig3tG6lPtyX/jOEJvHif9uB0/OCI2Kif6ctF3caQTW2G5gym21oAsI4pw== + dependencies: + loader-utils "^2.0.0" + schema-utils "^3.0.0" + +filelist@^1.0.1: + version "1.0.4" + resolved "http://localhost:4873/filelist/-/filelist-1.0.4.tgz#f78978a1e944775ff9e62e744424f215e58352b5" + integrity sha512-w1cEuf3S+DrLCQL7ET6kz+gmlJdbq9J7yXCSjK/OZCPA+qEN1WyF4ZAf0YYJa4/shHJra2t/d/r8SV4Ji+x+8Q== + dependencies: + minimatch "^5.0.1" + +filesize@^8.0.6: + version "8.0.7" + resolved "http://localhost:4873/filesize/-/filesize-8.0.7.tgz#695e70d80f4e47012c132d57a059e80c6b580bd8" + integrity sha512-pjmC+bkIF8XI7fWaH8KxHcZL3DPybs1roSKP4rKDvy20tAWwIObE4+JIseG2byfGKhud5ZnM4YSGKBz7Sh0ndQ== + +fill-range@^7.0.1: + version "7.0.1" + resolved "http://localhost:4873/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" + integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== + dependencies: + to-regex-range "^5.0.1" + +finalhandler@1.2.0: + version "1.2.0" + resolved "http://localhost:4873/finalhandler/-/finalhandler-1.2.0.tgz#7d23fe5731b207b4640e4fcd00aec1f9207a7b32" + integrity sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg== + dependencies: + debug "2.6.9" + encodeurl "~1.0.2" + escape-html "~1.0.3" + on-finished "2.4.1" + parseurl "~1.3.3" + statuses "2.0.1" + unpipe "~1.0.0" + +find-cache-dir@^3.3.1: + version "3.3.2" + resolved "http://localhost:4873/find-cache-dir/-/find-cache-dir-3.3.2.tgz#b30c5b6eff0730731aea9bbd9dbecbd80256d64b" + integrity sha512-wXZV5emFEjrridIgED11OoUKLxiYjAcqot/NJdAkOhlJ+vGzwhOAfcG5OX1jP+S0PcjEn8bdMJv+g2jwQ3Onig== + dependencies: + commondir "^1.0.1" + make-dir "^3.0.2" + pkg-dir "^4.1.0" + +find-up@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73" + integrity sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg== + dependencies: + locate-path "^3.0.0" + +find-up@^4.0.0, find-up@^4.1.0: + version "4.1.0" + resolved "http://localhost:4873/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19" + integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== + dependencies: + locate-path "^5.0.0" + path-exists "^4.0.0" + +find-up@^5.0.0: + version "5.0.0" + resolved "http://localhost:4873/find-up/-/find-up-5.0.0.tgz#4c92819ecb7083561e4f4a240a86be5198f536fc" + integrity sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng== + dependencies: + locate-path "^6.0.0" + path-exists "^4.0.0" + +flat-cache@^3.0.4: + version "3.0.4" + resolved "http://localhost:4873/flat-cache/-/flat-cache-3.0.4.tgz#61b0338302b2fe9f957dcc32fc2a87f1c3048b11" + integrity sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg== + dependencies: + flatted "^3.1.0" + rimraf "^3.0.2" + +flatted@^3.1.0: + version "3.2.7" + resolved "http://localhost:4873/flatted/-/flatted-3.2.7.tgz#609f39207cb614b89d0765b477cb2d437fbf9787" + integrity sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ== + +follow-redirects@^1.0.0: + version "1.15.2" + resolved "http://localhost:4873/follow-redirects/-/follow-redirects-1.15.2.tgz#b460864144ba63f2681096f274c4e57026da2c13" + integrity sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA== + +fork-ts-checker-webpack-plugin@^6.5.0: + version "6.5.2" + resolved "http://localhost:4873/fork-ts-checker-webpack-plugin/-/fork-ts-checker-webpack-plugin-6.5.2.tgz#4f67183f2f9eb8ba7df7177ce3cf3e75cdafb340" + integrity sha512-m5cUmF30xkZ7h4tWUgTAcEaKmUW7tfyUyTqNNOz7OxWJ0v1VWKTcOvH8FWHUwSjlW/356Ijc9vi3XfcPstpQKA== + dependencies: + "@babel/code-frame" "^7.8.3" + "@types/json-schema" "^7.0.5" + chalk "^4.1.0" + chokidar "^3.4.2" + cosmiconfig "^6.0.0" + deepmerge "^4.2.2" + fs-extra "^9.0.0" + glob "^7.1.6" + memfs "^3.1.2" + minimatch "^3.0.4" + schema-utils "2.7.0" + semver "^7.3.2" + tapable "^1.0.0" + +form-data@^3.0.0: + version "3.0.1" + resolved "http://localhost:4873/form-data/-/form-data-3.0.1.tgz#ebd53791b78356a99af9a300d4282c4d5eb9755f" + integrity sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg== + dependencies: + asynckit "^0.4.0" + combined-stream "^1.0.8" + mime-types "^2.1.12" + +forwarded@0.2.0: + version "0.2.0" + resolved "http://localhost:4873/forwarded/-/forwarded-0.2.0.tgz#2269936428aad4c15c7ebe9779a84bf0b2a81811" + integrity sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow== + +fraction.js@^4.2.0: + version "4.2.0" + resolved "http://localhost:4873/fraction.js/-/fraction.js-4.2.0.tgz#448e5109a313a3527f5a3ab2119ec4cf0e0e2950" + integrity sha512-MhLuK+2gUcnZe8ZHlaaINnQLl0xRIGRfcGk2yl8xoQAfHrSsL3rYu6FCmBdkdbhc9EPlwyGHewaRsvwRMJtAlA== + +fresh@0.5.2: + version "0.5.2" + resolved "http://localhost:4873/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7" + integrity sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q== + +fs-extra@^10.0.0: + version "10.1.0" + resolved "http://localhost:4873/fs-extra/-/fs-extra-10.1.0.tgz#02873cfbc4084dde127eaa5f9905eef2325d1abf" + integrity sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ== + dependencies: + graceful-fs "^4.2.0" + jsonfile "^6.0.1" + universalify "^2.0.0" + +fs-extra@^9.0.0, fs-extra@^9.0.1: + version "9.1.0" + resolved "http://localhost:4873/fs-extra/-/fs-extra-9.1.0.tgz#5954460c764a8da2094ba3554bf839e6b9a7c86d" + integrity sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ== + dependencies: + at-least-node "^1.0.0" + graceful-fs "^4.2.0" + jsonfile "^6.0.1" + universalify "^2.0.0" + +fs-monkey@^1.0.3: + version "1.0.3" + resolved "http://localhost:4873/fs-monkey/-/fs-monkey-1.0.3.tgz#ae3ac92d53bb328efe0e9a1d9541f6ad8d48e2d3" + integrity sha512-cybjIfiiE+pTWicSCLFHSrXZ6EilF30oh91FDP9S2B051prEa7QWfrVTQm10/dDpswBDXZugPa1Ogu8Yh+HV0Q== + +fs.realpath@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" + integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw== + +fsevents@^2.3.2, fsevents@~2.3.2: + version "2.3.2" + resolved "http://localhost:4873/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a" + integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA== + +function-bind@^1.1.1: + version "1.1.1" + resolved "http://localhost:4873/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" + integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== + +function.prototype.name@^1.1.5: + version "1.1.5" + resolved "http://localhost:4873/function.prototype.name/-/function.prototype.name-1.1.5.tgz#cce0505fe1ffb80503e6f9e46cc64e46a12a9621" + integrity sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.0" + functions-have-names "^1.2.2" + +functions-have-names@^1.2.2: + version "1.2.3" + resolved "http://localhost:4873/functions-have-names/-/functions-have-names-1.2.3.tgz#0404fe4ee2ba2f607f0e0ec3c80bae994133b834" + integrity sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ== + +gensync@^1.0.0-beta.2: + version "1.0.0-beta.2" + resolved "http://localhost:4873/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0" + integrity sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg== + +get-caller-file@^2.0.5: + version "2.0.5" + resolved "http://localhost:4873/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" + integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== + +get-intrinsic@^1.0.2, get-intrinsic@^1.1.0, get-intrinsic@^1.1.1, get-intrinsic@^1.1.3: + version "1.1.3" + resolved "http://localhost:4873/get-intrinsic/-/get-intrinsic-1.1.3.tgz#063c84329ad93e83893c7f4f243ef63ffa351385" + integrity sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A== + dependencies: + function-bind "^1.1.1" + has "^1.0.3" + has-symbols "^1.0.3" + +get-own-enumerable-property-symbols@^3.0.0: + version "3.0.2" + resolved "http://localhost:4873/get-own-enumerable-property-symbols/-/get-own-enumerable-property-symbols-3.0.2.tgz#b5fde77f22cbe35f390b4e089922c50bce6ef664" + integrity sha512-I0UBV/XOz1XkIJHEUDMZAbzCThU/H8DxmSfmdGcKPnVhu2VfFqr34jr9777IyaTYvxjedWhqVIilEDsCdP5G6g== + +get-package-type@^0.1.0: + version "0.1.0" + resolved "http://localhost:4873/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a" + integrity sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q== + +get-stream@^6.0.0: + version "6.0.1" + resolved "http://localhost:4873/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7" + integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg== + +get-symbol-description@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/get-symbol-description/-/get-symbol-description-1.0.0.tgz#7fdb81c900101fbd564dd5f1a30af5aadc1e58d6" + integrity sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw== + dependencies: + call-bind "^1.0.2" + get-intrinsic "^1.1.1" + +glob-parent@^5.1.2, glob-parent@~5.1.2: + version "5.1.2" + resolved "http://localhost:4873/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" + integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== + dependencies: + is-glob "^4.0.1" + +glob-parent@^6.0.1, glob-parent@^6.0.2: + version "6.0.2" + resolved "http://localhost:4873/glob-parent/-/glob-parent-6.0.2.tgz#6d237d99083950c79290f24c7642a3de9a28f9e3" + integrity sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A== + dependencies: + is-glob "^4.0.3" + +glob-to-regexp@^0.4.1: + version "0.4.1" + resolved "http://localhost:4873/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz#c75297087c851b9a578bd217dd59a92f59fe546e" + integrity sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw== + +glob@^7.1.1, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4, glob@^7.1.6: + version "7.2.3" + resolved "http://localhost:4873/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b" + integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q== + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^3.1.1" + once "^1.3.0" + path-is-absolute "^1.0.0" + +global-modules@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/global-modules/-/global-modules-2.0.0.tgz#997605ad2345f27f51539bea26574421215c7780" + integrity sha512-NGbfmJBp9x8IxyJSd1P+otYK8vonoJactOogrVfFRIAEY1ukil8RSKDz2Yo7wh1oihl51l/r6W4epkeKJHqL8A== + dependencies: + global-prefix "^3.0.0" + +global-prefix@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/global-prefix/-/global-prefix-3.0.0.tgz#fc85f73064df69f50421f47f883fe5b913ba9b97" + integrity sha512-awConJSVCHVGND6x3tmMaKcQvwXLhjdkmomy2W+Goaui8YPgYgXJZewhg3fWC+DlfqqQuWg8AwqjGTD2nAPVWg== + dependencies: + ini "^1.3.5" + kind-of "^6.0.2" + which "^1.3.1" + +globals@^11.1.0: + version "11.12.0" + resolved "http://localhost:4873/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" + integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== + +globals@^13.15.0: + version "13.17.0" + resolved "http://localhost:4873/globals/-/globals-13.17.0.tgz#902eb1e680a41da93945adbdcb5a9f361ba69bd4" + integrity sha512-1C+6nQRb1GwGMKm2dH/E7enFAMxGTmGI7/dEdhy/DNelv85w9B72t3uc5frtMNXIbzrarJJ/lTCjcaZwbLJmyw== + dependencies: + type-fest "^0.20.2" + +globby@^11.0.4, globby@^11.1.0: + version "11.1.0" + resolved "http://localhost:4873/globby/-/globby-11.1.0.tgz#bd4be98bb042f83d796f7e3811991fbe82a0d34b" + integrity sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g== + dependencies: + array-union "^2.1.0" + dir-glob "^3.0.1" + fast-glob "^3.2.9" + ignore "^5.2.0" + merge2 "^1.4.1" + slash "^3.0.0" + +graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.4, graceful-fs@^4.2.6, graceful-fs@^4.2.9: + version "4.2.10" + resolved "http://localhost:4873/graceful-fs/-/graceful-fs-4.2.10.tgz#147d3a006da4ca3ce14728c7aefc287c367d7a6c" + integrity sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA== + +grapheme-splitter@^1.0.4: + version "1.0.4" + resolved "http://localhost:4873/grapheme-splitter/-/grapheme-splitter-1.0.4.tgz#9cf3a665c6247479896834af35cf1dbb4400767e" + integrity sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ== + +gzip-size@^6.0.0: + version "6.0.0" + resolved "http://localhost:4873/gzip-size/-/gzip-size-6.0.0.tgz#065367fd50c239c0671cbcbad5be3e2eeb10e462" + integrity sha512-ax7ZYomf6jqPTQ4+XCpUGyXKHk5WweS+e05MBO4/y3WJ5RkmPXNKvX+bx1behVILVwr6JSQvZAku021CHPXG3Q== + dependencies: + duplexer "^0.1.2" + +handle-thing@^2.0.0: + version "2.0.1" + resolved "http://localhost:4873/handle-thing/-/handle-thing-2.0.1.tgz#857f79ce359580c340d43081cc648970d0bb234e" + integrity sha512-9Qn4yBxelxoh2Ow62nP+Ka/kMnOXRi8BXnRaUwezLNhqelnN49xKz4F/dPP8OYLxLxq6JDtZb2i9XznUQbNPTg== + +harmony-reflect@^1.4.6: + version "1.6.2" + resolved "http://localhost:4873/harmony-reflect/-/harmony-reflect-1.6.2.tgz#31ecbd32e648a34d030d86adb67d4d47547fe710" + integrity sha512-HIp/n38R9kQjDEziXyDTuW3vvoxxyxjxFzXLrBr18uB47GnSt+G9D29fqrpM5ZkspMcPICud3XsBJQ4Y2URg8g== + +has-bigints@^1.0.1, has-bigints@^1.0.2: + version "1.0.2" + resolved "http://localhost:4873/has-bigints/-/has-bigints-1.0.2.tgz#0871bd3e3d51626f6ca0966668ba35d5602d6eaa" + integrity sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ== + +has-flag@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" + integrity sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw== + +has-flag@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" + integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== + +has-property-descriptors@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz#610708600606d36961ed04c196193b6a607fa861" + integrity sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ== + dependencies: + get-intrinsic "^1.1.1" + +has-symbols@^1.0.1, has-symbols@^1.0.2, has-symbols@^1.0.3: + version "1.0.3" + resolved "http://localhost:4873/has-symbols/-/has-symbols-1.0.3.tgz#bb7b2c4349251dce87b125f7bdf874aa7c8b39f8" + integrity sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A== + +has-tostringtag@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/has-tostringtag/-/has-tostringtag-1.0.0.tgz#7e133818a7d394734f941e73c3d3f9291e658b25" + integrity sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ== + dependencies: + has-symbols "^1.0.2" + +has@^1.0.3: + version "1.0.3" + resolved "http://localhost:4873/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" + integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== + dependencies: + function-bind "^1.1.1" + +he@^1.2.0: + version "1.2.0" + resolved "http://localhost:4873/he/-/he-1.2.0.tgz#84ae65fa7eafb165fddb61566ae14baf05664f0f" + integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw== + +hoopy@^0.1.4: + version "0.1.4" + resolved "http://localhost:4873/hoopy/-/hoopy-0.1.4.tgz#609207d661100033a9a9402ad3dea677381c1b1d" + integrity sha512-HRcs+2mr52W0K+x8RzcLzuPPmVIKMSv97RGHy0Ea9y/mpcaK+xTrjICA04KAHi4GRzxliNqNJEFYWHghy3rSfQ== + +hpack.js@^2.1.6: + version "2.1.6" + resolved "http://localhost:4873/hpack.js/-/hpack.js-2.1.6.tgz#87774c0949e513f42e84575b3c45681fade2a0b2" + integrity sha512-zJxVehUdMGIKsRaNt7apO2Gqp0BdqW5yaiGHXXmbpvxgBYVZnAql+BJb4RO5ad2MgpbZKn5G6nMnegrH1FcNYQ== + dependencies: + inherits "^2.0.1" + obuf "^1.0.0" + readable-stream "^2.0.1" + wbuf "^1.1.0" + +html-encoding-sniffer@^2.0.1: + version "2.0.1" + resolved "http://localhost:4873/html-encoding-sniffer/-/html-encoding-sniffer-2.0.1.tgz#42a6dc4fd33f00281176e8b23759ca4e4fa185f3" + integrity sha512-D5JbOMBIR/TVZkubHT+OyT2705QvogUW4IBn6nHd756OwieSF9aDYFj4dv6HHEVGYbHaLETa3WggZYWWMyy3ZQ== + dependencies: + whatwg-encoding "^1.0.5" + +html-entities@^2.1.0, html-entities@^2.3.2: + version "2.3.3" + resolved "http://localhost:4873/html-entities/-/html-entities-2.3.3.tgz#117d7626bece327fc8baace8868fa6f5ef856e46" + integrity sha512-DV5Ln36z34NNTDgnz0EWGBLZENelNAtkiFA4kyNOG2tDI6Mz1uSWiq1wAKdyjnJwyDiDO7Fa2SO1CTxPXL8VxA== + +html-escaper@^2.0.0: + version "2.0.2" + resolved "http://localhost:4873/html-escaper/-/html-escaper-2.0.2.tgz#dfd60027da36a36dfcbe236262c00a5822681453" + integrity sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg== + +html-minifier-terser@^6.0.2: + version "6.1.0" + resolved "http://localhost:4873/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz#bfc818934cc07918f6b3669f5774ecdfd48f32ab" + integrity sha512-YXxSlJBZTP7RS3tWnQw74ooKa6L9b9i9QYXY21eUEvhZ3u9XLfv6OnFsQq6RxkhHygsaUMvYsZRV5rU/OVNZxw== + dependencies: + camel-case "^4.1.2" + clean-css "^5.2.2" + commander "^8.3.0" + he "^1.2.0" + param-case "^3.0.4" + relateurl "^0.2.7" + terser "^5.10.0" + +html-webpack-plugin@^5.5.0: + version "5.5.0" + resolved "http://localhost:4873/html-webpack-plugin/-/html-webpack-plugin-5.5.0.tgz#c3911936f57681c1f9f4d8b68c158cd9dfe52f50" + integrity sha512-sy88PC2cRTVxvETRgUHFrL4No3UxvcH8G1NepGhqaTT+GXN2kTamqasot0inS5hXeg1cMbFDt27zzo9p35lZVw== + dependencies: + "@types/html-minifier-terser" "^6.0.0" + html-minifier-terser "^6.0.2" + lodash "^4.17.21" + pretty-error "^4.0.0" + tapable "^2.0.0" + +htmlparser2@^6.1.0: + version "6.1.0" + resolved "http://localhost:4873/htmlparser2/-/htmlparser2-6.1.0.tgz#c4d762b6c3371a05dbe65e94ae43a9f845fb8fb7" + integrity sha512-gyyPk6rgonLFEDGoeRgQNaEUvdJ4ktTmmUh/h2t7s+M8oPpIPxgNACWa+6ESR57kXstwqPiCut0V8NRpcwgU7A== + dependencies: + domelementtype "^2.0.1" + domhandler "^4.0.0" + domutils "^2.5.2" + entities "^2.0.0" + +http-deceiver@^1.2.7: + version "1.2.7" + resolved "http://localhost:4873/http-deceiver/-/http-deceiver-1.2.7.tgz#fa7168944ab9a519d337cb0bec7284dc3e723d87" + integrity sha512-LmpOGxTfbpgtGVxJrj5k7asXHCgNZp5nLfp+hWc8QQRqtb7fUy6kRY3BO1h9ddF6yIPYUARgxGOwB42DnxIaNw== + +http-errors@2.0.0: + version "2.0.0" + resolved "http://localhost:4873/http-errors/-/http-errors-2.0.0.tgz#b7774a1486ef73cf7667ac9ae0858c012c57b9d3" + integrity sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ== + dependencies: + depd "2.0.0" + inherits "2.0.4" + setprototypeof "1.2.0" + statuses "2.0.1" + toidentifier "1.0.1" + +http-errors@~1.6.2: + version "1.6.3" + resolved "http://localhost:4873/http-errors/-/http-errors-1.6.3.tgz#8b55680bb4be283a0b5bf4ea2e38580be1d9320d" + integrity sha512-lks+lVC8dgGyh97jxvxeYTWQFvh4uw4yC12gVl63Cg30sjPX4wuGcdkICVXDAESr6OJGjqGA8Iz5mkeN6zlD7A== + dependencies: + depd "~1.1.2" + inherits "2.0.3" + setprototypeof "1.1.0" + statuses ">= 1.4.0 < 2" + +http-parser-js@>=0.5.1: + version "0.5.8" + resolved "http://localhost:4873/http-parser-js/-/http-parser-js-0.5.8.tgz#af23090d9ac4e24573de6f6aecc9d84a48bf20e3" + integrity sha512-SGeBX54F94Wgu5RH3X5jsDtf4eHyRogWX1XGT3b4HuW3tQPM4AaBzoUji/4AAJNXCEOWZ5O0DgZmJw1947gD5Q== + +http-proxy-agent@^4.0.1: + version "4.0.1" + resolved "http://localhost:4873/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz#8a8c8ef7f5932ccf953c296ca8291b95aa74aa3a" + integrity sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg== + dependencies: + "@tootallnate/once" "1" + agent-base "6" + debug "4" + +http-proxy-middleware@^2.0.3: + version "2.0.6" + resolved "http://localhost:4873/http-proxy-middleware/-/http-proxy-middleware-2.0.6.tgz#e1a4dd6979572c7ab5a4e4b55095d1f32a74963f" + integrity sha512-ya/UeJ6HVBYxrgYotAZo1KvPWlgB48kUJLDePFeneHsVujFaW5WNj2NgWCAE//B1Dl02BIfYlpNgBy8Kf8Rjmw== + dependencies: + "@types/http-proxy" "^1.17.8" + http-proxy "^1.18.1" + is-glob "^4.0.1" + is-plain-obj "^3.0.0" + micromatch "^4.0.2" + +http-proxy@^1.18.1: + version "1.18.1" + resolved "http://localhost:4873/http-proxy/-/http-proxy-1.18.1.tgz#401541f0534884bbf95260334e72f88ee3976549" + integrity sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ== + dependencies: + eventemitter3 "^4.0.0" + follow-redirects "^1.0.0" + requires-port "^1.0.0" + +https-proxy-agent@^5.0.0: + version "5.0.1" + resolved "http://localhost:4873/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz#c59ef224a04fe8b754f3db0063a25ea30d0005d6" + integrity sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA== + dependencies: + agent-base "6" + debug "4" + +human-signals@^2.1.0: + version "2.1.0" + resolved "http://localhost:4873/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0" + integrity sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw== + +iconv-lite@0.4.24: + version "0.4.24" + resolved "http://localhost:4873/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" + integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== + dependencies: + safer-buffer ">= 2.1.2 < 3" + +iconv-lite@^0.6.3: + version "0.6.3" + resolved "http://localhost:4873/iconv-lite/-/iconv-lite-0.6.3.tgz#a52f80bf38da1952eb5c681790719871a1a72501" + integrity sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw== + dependencies: + safer-buffer ">= 2.1.2 < 3.0.0" + +icss-utils@^5.0.0, icss-utils@^5.1.0: + version "5.1.0" + resolved "http://localhost:4873/icss-utils/-/icss-utils-5.1.0.tgz#c6be6858abd013d768e98366ae47e25d5887b1ae" + integrity sha512-soFhflCVWLfRNOPU3iv5Z9VUdT44xFRbzjLsEzSr5AQmgqPMTHdU3PMT1Cf1ssx8fLNJDA1juftYl+PUcv3MqA== + +idb@^7.0.1: + version "7.1.0" + resolved "http://localhost:4873/idb/-/idb-7.1.0.tgz#2cc886be57738419e57f9aab58f647e5e2160270" + integrity sha512-Wsk07aAxDsntgYJY4h0knZJuTxM73eQ4reRAO+Z1liOh8eMCJ/MoDS8fCui1vGT9mnjtl1sOu3I2i/W1swPYZg== + +identity-obj-proxy@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/identity-obj-proxy/-/identity-obj-proxy-3.0.0.tgz#94d2bda96084453ef36fbc5aaec37e0f79f1fc14" + integrity sha512-00n6YnVHKrinT9t0d9+5yZC6UBNJANpYEQvL2LlX6Ab9lnmxzIRcEmTPuyGScvl1+jKuCICX1Z0Ab1pPKKdikA== + dependencies: + harmony-reflect "^1.4.6" + +ignore@^5.2.0: + version "5.2.0" + resolved "http://localhost:4873/ignore/-/ignore-5.2.0.tgz#6d3bac8fa7fe0d45d9f9be7bac2fc279577e345a" + integrity sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ== + +immer@^9.0.7: + version "9.0.15" + resolved "http://localhost:4873/immer/-/immer-9.0.15.tgz#0b9169e5b1d22137aba7d43f8a81a495dd1b62dc" + integrity sha512-2eB/sswms9AEUSkOm4SbV5Y7Vmt/bKRwByd52jfLkW4OLYeaTP3EEiJ9agqU0O/tq6Dk62Zfj+TJSqfm1rLVGQ== + +import-fresh@^3.0.0, import-fresh@^3.1.0, import-fresh@^3.2.1: + version "3.3.0" + resolved "http://localhost:4873/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b" + integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw== + dependencies: + parent-module "^1.0.0" + resolve-from "^4.0.0" + +import-local@^3.0.2: + version "3.1.0" + resolved "http://localhost:4873/import-local/-/import-local-3.1.0.tgz#b4479df8a5fd44f6cdce24070675676063c95cb4" + integrity sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg== + dependencies: + pkg-dir "^4.2.0" + resolve-cwd "^3.0.0" + +imurmurhash@^0.1.4: + version "0.1.4" + resolved "http://localhost:4873/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" + integrity sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA== + +indent-string@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/indent-string/-/indent-string-4.0.0.tgz#624f8f4497d619b2d9768531d58f4122854d7251" + integrity sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg== + +inflight@^1.0.4: + version "1.0.6" + resolved "http://localhost:4873/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" + integrity sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA== + dependencies: + once "^1.3.0" + wrappy "1" + +inherits@2, inherits@2.0.4, inherits@^2.0.1, inherits@^2.0.3, inherits@~2.0.3: + version "2.0.4" + resolved "http://localhost:4873/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" + integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== + +inherits@2.0.3: + version "2.0.3" + resolved "http://localhost:4873/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" + integrity sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw== + +ini@^1.3.5: + version "1.3.8" + resolved "http://localhost:4873/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c" + integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew== + +internal-slot@^1.0.3: + version "1.0.3" + resolved "http://localhost:4873/internal-slot/-/internal-slot-1.0.3.tgz#7347e307deeea2faac2ac6205d4bc7d34967f59c" + integrity sha512-O0DB1JC/sPyZl7cIo78n5dR7eUSwwpYPiXRhTzNxZVAMUuB8vlnRFyLxdrVToks6XPLVnFfbzaVd5WLjhgg+vA== + dependencies: + get-intrinsic "^1.1.0" + has "^1.0.3" + side-channel "^1.0.4" + +ipaddr.js@1.9.1: + version "1.9.1" + resolved "http://localhost:4873/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3" + integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g== + +ipaddr.js@^2.0.1: + version "2.0.1" + resolved "http://localhost:4873/ipaddr.js/-/ipaddr.js-2.0.1.tgz#eca256a7a877e917aeb368b0a7497ddf42ef81c0" + integrity sha512-1qTgH9NG+IIJ4yfKs2e6Pp1bZg8wbDbKHT21HrLIeYBTRLgMYKnMTPAuI3Lcs61nfx5h1xlXnbJtH1kX5/d/ng== + +is-arrayish@^0.2.1: + version "0.2.1" + resolved "http://localhost:4873/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" + integrity sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg== + +is-bigint@^1.0.1: + version "1.0.4" + resolved "http://localhost:4873/is-bigint/-/is-bigint-1.0.4.tgz#08147a1875bc2b32005d41ccd8291dffc6691df3" + integrity sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg== + dependencies: + has-bigints "^1.0.1" + +is-binary-path@~2.1.0: + version "2.1.0" + resolved "http://localhost:4873/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09" + integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw== + dependencies: + binary-extensions "^2.0.0" + +is-boolean-object@^1.1.0: + version "1.1.2" + resolved "http://localhost:4873/is-boolean-object/-/is-boolean-object-1.1.2.tgz#5c6dc200246dd9321ae4b885a114bb1f75f63719" + integrity sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA== + dependencies: + call-bind "^1.0.2" + has-tostringtag "^1.0.0" + +is-callable@^1.1.4, is-callable@^1.2.7: + version "1.2.7" + resolved "http://localhost:4873/is-callable/-/is-callable-1.2.7.tgz#3bc2a85ea742d9e36205dcacdd72ca1fdc51b055" + integrity sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA== + +is-core-module@^2.8.1, is-core-module@^2.9.0: + version "2.10.0" + resolved "http://localhost:4873/is-core-module/-/is-core-module-2.10.0.tgz#9012ede0a91c69587e647514e1d5277019e728ed" + integrity sha512-Erxj2n/LDAZ7H8WNJXd9tw38GYM3dv8rk8Zcs+jJuxYTW7sozH+SS8NtrSjVL1/vpLvWi1hxy96IzjJ3EHTJJg== + dependencies: + has "^1.0.3" + +is-date-object@^1.0.1: + version "1.0.5" + resolved "http://localhost:4873/is-date-object/-/is-date-object-1.0.5.tgz#0841d5536e724c25597bf6ea62e1bd38298df31f" + integrity sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ== + dependencies: + has-tostringtag "^1.0.0" + +is-docker@^2.0.0, is-docker@^2.1.1: + version "2.2.1" + resolved "http://localhost:4873/is-docker/-/is-docker-2.2.1.tgz#33eeabe23cfe86f14bde4408a02c0cfb853acdaa" + integrity sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ== + +is-extglob@^2.1.1: + version "2.1.1" + resolved "http://localhost:4873/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" + integrity sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ== + +is-fullwidth-code-point@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" + integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== + +is-generator-fn@^2.0.0: + version "2.1.0" + resolved "http://localhost:4873/is-generator-fn/-/is-generator-fn-2.1.0.tgz#7d140adc389aaf3011a8f2a2a4cfa6faadffb118" + integrity sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ== + +is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3, is-glob@~4.0.1: + version "4.0.3" + resolved "http://localhost:4873/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084" + integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg== + dependencies: + is-extglob "^2.1.1" + +is-module@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/is-module/-/is-module-1.0.0.tgz#3258fb69f78c14d5b815d664336b4cffb6441591" + integrity sha512-51ypPSPCoTEIN9dy5Oy+h4pShgJmPCygKfyRCISBI+JoWT/2oJvK8QPxmwv7b/p239jXrm9M1mlQbyKJ5A152g== + +is-negative-zero@^2.0.2: + version "2.0.2" + resolved "http://localhost:4873/is-negative-zero/-/is-negative-zero-2.0.2.tgz#7bf6f03a28003b8b3965de3ac26f664d765f3150" + integrity sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA== + +is-number-object@^1.0.4: + version "1.0.7" + resolved "http://localhost:4873/is-number-object/-/is-number-object-1.0.7.tgz#59d50ada4c45251784e9904f5246c742f07a42fc" + integrity sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ== + dependencies: + has-tostringtag "^1.0.0" + +is-number@^7.0.0: + version "7.0.0" + resolved "http://localhost:4873/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" + integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== + +is-obj@^1.0.1: + version "1.0.1" + resolved "http://localhost:4873/is-obj/-/is-obj-1.0.1.tgz#3e4729ac1f5fde025cd7d83a896dab9f4f67db0f" + integrity sha512-l4RyHgRqGN4Y3+9JHVrNqO+tN0rV5My76uW5/nuO4K1b6vw5G8d/cmFjP9tRfEsdhZNt0IFdZuK/c2Vr4Nb+Qg== + +is-plain-obj@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/is-plain-obj/-/is-plain-obj-3.0.0.tgz#af6f2ea14ac5a646183a5bbdb5baabbc156ad9d7" + integrity sha512-gwsOE28k+23GP1B6vFl1oVh/WOzmawBrKwo5Ev6wMKzPkaXaCDIQKzLnvsA42DRlbVTWorkgTKIviAKCWkfUwA== + +is-plain-object@^2.0.4: + version "2.0.4" + resolved "http://localhost:4873/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" + integrity sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og== + dependencies: + isobject "^3.0.1" + +is-potential-custom-element-name@^1.0.1: + version "1.0.1" + resolved "http://localhost:4873/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz#171ed6f19e3ac554394edf78caa05784a45bebb5" + integrity sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ== + +is-regex@^1.1.4: + version "1.1.4" + resolved "http://localhost:4873/is-regex/-/is-regex-1.1.4.tgz#eef5663cd59fa4c0ae339505323df6854bb15958" + integrity sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg== + dependencies: + call-bind "^1.0.2" + has-tostringtag "^1.0.0" + +is-regexp@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/is-regexp/-/is-regexp-1.0.0.tgz#fd2d883545c46bac5a633e7b9a09e87fa2cb5069" + integrity sha512-7zjFAPO4/gwyQAAgRRmqeEeyIICSdmCqa3tsVHMdBzaXXRiqopZL4Cyghg/XulGWrtABTpbnYYzzIRffLkP4oA== + +is-root@^2.1.0: + version "2.1.0" + resolved "http://localhost:4873/is-root/-/is-root-2.1.0.tgz#809e18129cf1129644302a4f8544035d51984a9c" + integrity sha512-AGOriNp96vNBd3HtU+RzFEc75FfR5ymiYv8E553I71SCeXBiMsVDUtdio1OEFvrPyLIQ9tVR5RxXIFe5PUFjMg== + +is-shared-array-buffer@^1.0.2: + version "1.0.2" + resolved "http://localhost:4873/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz#8f259c573b60b6a32d4058a1a07430c0a7344c79" + integrity sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA== + dependencies: + call-bind "^1.0.2" + +is-stream@^2.0.0: + version "2.0.1" + resolved "http://localhost:4873/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077" + integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg== + +is-string@^1.0.5, is-string@^1.0.7: + version "1.0.7" + resolved "http://localhost:4873/is-string/-/is-string-1.0.7.tgz#0dd12bf2006f255bb58f695110eff7491eebc0fd" + integrity sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg== + dependencies: + has-tostringtag "^1.0.0" + +is-symbol@^1.0.2, is-symbol@^1.0.3: + version "1.0.4" + resolved "http://localhost:4873/is-symbol/-/is-symbol-1.0.4.tgz#a6dac93b635b063ca6872236de88910a57af139c" + integrity sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg== + dependencies: + has-symbols "^1.0.2" + +is-typedarray@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" + integrity sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA== + +is-weakref@^1.0.2: + version "1.0.2" + resolved "http://localhost:4873/is-weakref/-/is-weakref-1.0.2.tgz#9529f383a9338205e89765e0392efc2f100f06f2" + integrity sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ== + dependencies: + call-bind "^1.0.2" + +is-wsl@^2.2.0: + version "2.2.0" + resolved "http://localhost:4873/is-wsl/-/is-wsl-2.2.0.tgz#74a4c76e77ca9fd3f932f290c17ea326cd157271" + integrity sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww== + dependencies: + is-docker "^2.0.0" + +isarray@~1.0.0: + version "1.0.0" + resolved "http://localhost:4873/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" + integrity sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ== + +isexe@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" + integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw== + +isobject@^3.0.1: + version "3.0.1" + resolved "http://localhost:4873/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" + integrity sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg== + +istanbul-lib-coverage@^3.0.0, istanbul-lib-coverage@^3.2.0: + version "3.2.0" + resolved "http://localhost:4873/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz#189e7909d0a39fa5a3dfad5b03f71947770191d3" + integrity sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw== + +istanbul-lib-instrument@^5.0.4, istanbul-lib-instrument@^5.1.0: + version "5.2.1" + resolved "http://localhost:4873/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.1.tgz#d10c8885c2125574e1c231cacadf955675e1ce3d" + integrity sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg== + dependencies: + "@babel/core" "^7.12.3" + "@babel/parser" "^7.14.7" + "@istanbuljs/schema" "^0.1.2" + istanbul-lib-coverage "^3.2.0" + semver "^6.3.0" + +istanbul-lib-report@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#7518fe52ea44de372f460a76b5ecda9ffb73d8a6" + integrity sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw== + dependencies: + istanbul-lib-coverage "^3.0.0" + make-dir "^3.0.0" + supports-color "^7.1.0" + +istanbul-lib-source-maps@^4.0.0: + version "4.0.1" + resolved "http://localhost:4873/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz#895f3a709fcfba34c6de5a42939022f3e4358551" + integrity sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw== + dependencies: + debug "^4.1.1" + istanbul-lib-coverage "^3.0.0" + source-map "^0.6.1" + +istanbul-reports@^3.1.3: + version "3.1.5" + resolved "http://localhost:4873/istanbul-reports/-/istanbul-reports-3.1.5.tgz#cc9a6ab25cb25659810e4785ed9d9fb742578bae" + integrity sha512-nUsEMa9pBt/NOHqbcbeJEgqIlY/K7rVWUX6Lql2orY5e9roQOthbR3vtY4zzf2orPELg80fnxxk9zUyPlgwD1w== + dependencies: + html-escaper "^2.0.0" + istanbul-lib-report "^3.0.0" + +jake@^10.8.5: + version "10.8.5" + resolved "http://localhost:4873/jake/-/jake-10.8.5.tgz#f2183d2c59382cb274226034543b9c03b8164c46" + integrity sha512-sVpxYeuAhWt0OTWITwT98oyV0GsXyMlXCF+3L1SuafBVUIr/uILGRB+NqwkzhgXKvoJpDIpQvqkUALgdmQsQxw== + dependencies: + async "^3.2.3" + chalk "^4.0.2" + filelist "^1.0.1" + minimatch "^3.0.4" + +jest-changed-files@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-changed-files/-/jest-changed-files-27.5.1.tgz#a348aed00ec9bf671cc58a66fcbe7c3dfd6a68f5" + integrity sha512-buBLMiByfWGCoMsLLzGUUSpAmIAGnbR2KJoMN10ziLhOLvP4e0SlypHnAel8iqQXTrcbmfEY9sSqae5sgUsTvw== + dependencies: + "@jest/types" "^27.5.1" + execa "^5.0.0" + throat "^6.0.1" + +jest-circus@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-circus/-/jest-circus-27.5.1.tgz#37a5a4459b7bf4406e53d637b49d22c65d125ecc" + integrity sha512-D95R7x5UtlMA5iBYsOHFFbMD/GVA4R/Kdq15f7xYWUfWHBto9NYRsOvnSauTgdF+ogCpJ4tyKOXhUifxS65gdw== + dependencies: + "@jest/environment" "^27.5.1" + "@jest/test-result" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/node" "*" + chalk "^4.0.0" + co "^4.6.0" + dedent "^0.7.0" + expect "^27.5.1" + is-generator-fn "^2.0.0" + jest-each "^27.5.1" + jest-matcher-utils "^27.5.1" + jest-message-util "^27.5.1" + jest-runtime "^27.5.1" + jest-snapshot "^27.5.1" + jest-util "^27.5.1" + pretty-format "^27.5.1" + slash "^3.0.0" + stack-utils "^2.0.3" + throat "^6.0.1" + +jest-cli@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-cli/-/jest-cli-27.5.1.tgz#278794a6e6458ea8029547e6c6cbf673bd30b145" + integrity sha512-Hc6HOOwYq4/74/c62dEE3r5elx8wjYqxY0r0G/nFrLDPMFRu6RA/u8qINOIkvhxG7mMQ5EJsOGfRpI8L6eFUVw== + dependencies: + "@jest/core" "^27.5.1" + "@jest/test-result" "^27.5.1" + "@jest/types" "^27.5.1" + chalk "^4.0.0" + exit "^0.1.2" + graceful-fs "^4.2.9" + import-local "^3.0.2" + jest-config "^27.5.1" + jest-util "^27.5.1" + jest-validate "^27.5.1" + prompts "^2.0.1" + yargs "^16.2.0" + +jest-config@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-config/-/jest-config-27.5.1.tgz#5c387de33dca3f99ad6357ddeccd91bf3a0e4a41" + integrity sha512-5sAsjm6tGdsVbW9ahcChPAFCk4IlkQUknH5AvKjuLTSlcO/wCZKyFdn7Rg0EkC+OGgWODEy2hDpWB1PgzH0JNA== + dependencies: + "@babel/core" "^7.8.0" + "@jest/test-sequencer" "^27.5.1" + "@jest/types" "^27.5.1" + babel-jest "^27.5.1" + chalk "^4.0.0" + ci-info "^3.2.0" + deepmerge "^4.2.2" + glob "^7.1.1" + graceful-fs "^4.2.9" + jest-circus "^27.5.1" + jest-environment-jsdom "^27.5.1" + jest-environment-node "^27.5.1" + jest-get-type "^27.5.1" + jest-jasmine2 "^27.5.1" + jest-regex-util "^27.5.1" + jest-resolve "^27.5.1" + jest-runner "^27.5.1" + jest-util "^27.5.1" + jest-validate "^27.5.1" + micromatch "^4.0.4" + parse-json "^5.2.0" + pretty-format "^27.5.1" + slash "^3.0.0" + strip-json-comments "^3.1.1" + +jest-diff@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-diff/-/jest-diff-27.5.1.tgz#a07f5011ac9e6643cf8a95a462b7b1ecf6680def" + integrity sha512-m0NvkX55LDt9T4mctTEgnZk3fmEg3NRYutvMPWM/0iPnkFj2wIeF45O1718cMSOFO1vINkqmxqD8vE37uTEbqw== + dependencies: + chalk "^4.0.0" + diff-sequences "^27.5.1" + jest-get-type "^27.5.1" + pretty-format "^27.5.1" + +jest-diff@^29.1.2: + version "29.1.2" + resolved "http://localhost:4873/jest-diff/-/jest-diff-29.1.2.tgz#bb7aaf5353227d6f4f96c5e7e8713ce576a607dc" + integrity sha512-4GQts0aUopVvecIT4IwD/7xsBaMhKTYoM4/njE/aVw9wpw+pIUVp8Vab/KnSzSilr84GnLBkaP3JLDnQYCKqVQ== + dependencies: + chalk "^4.0.0" + diff-sequences "^29.0.0" + jest-get-type "^29.0.0" + pretty-format "^29.1.2" + +jest-docblock@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-docblock/-/jest-docblock-27.5.1.tgz#14092f364a42c6108d42c33c8cf30e058e25f6c0" + integrity sha512-rl7hlABeTsRYxKiUfpHrQrG4e2obOiTQWfMEH3PxPjOtdsfLQO4ReWSZaQ7DETm4xu07rl4q/h4zcKXyU0/OzQ== + dependencies: + detect-newline "^3.0.0" + +jest-each@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-each/-/jest-each-27.5.1.tgz#5bc87016f45ed9507fed6e4702a5b468a5b2c44e" + integrity sha512-1Ff6p+FbhT/bXQnEouYy00bkNSY7OUpfIcmdl8vZ31A1UUaurOLPA8a8BbJOF2RDUElwJhmeaV7LnagI+5UwNQ== + dependencies: + "@jest/types" "^27.5.1" + chalk "^4.0.0" + jest-get-type "^27.5.1" + jest-util "^27.5.1" + pretty-format "^27.5.1" + +jest-environment-jsdom@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-environment-jsdom/-/jest-environment-jsdom-27.5.1.tgz#ea9ccd1fc610209655a77898f86b2b559516a546" + integrity sha512-TFBvkTC1Hnnnrka/fUb56atfDtJ9VMZ94JkjTbggl1PEpwrYtUBKMezB3inLmWqQsXYLcMwNoDQwoBTAvFfsfw== + dependencies: + "@jest/environment" "^27.5.1" + "@jest/fake-timers" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/node" "*" + jest-mock "^27.5.1" + jest-util "^27.5.1" + jsdom "^16.6.0" + +jest-environment-node@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-environment-node/-/jest-environment-node-27.5.1.tgz#dedc2cfe52fab6b8f5714b4808aefa85357a365e" + integrity sha512-Jt4ZUnxdOsTGwSRAfKEnE6BcwsSPNOijjwifq5sDFSA2kesnXTvNqKHYgM0hDq3549Uf/KzdXNYn4wMZJPlFLw== + dependencies: + "@jest/environment" "^27.5.1" + "@jest/fake-timers" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/node" "*" + jest-mock "^27.5.1" + jest-util "^27.5.1" + +jest-get-type@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-get-type/-/jest-get-type-27.5.1.tgz#3cd613c507b0f7ace013df407a1c1cd578bcb4f1" + integrity sha512-2KY95ksYSaK7DMBWQn6dQz3kqAf3BB64y2udeG+hv4KfSOb9qwcYQstTJc1KCbsix+wLZWZYN8t7nwX3GOBLRw== + +jest-get-type@^29.0.0: + version "29.0.0" + resolved "http://localhost:4873/jest-get-type/-/jest-get-type-29.0.0.tgz#843f6c50a1b778f7325df1129a0fd7aa713aef80" + integrity sha512-83X19z/HuLKYXYHskZlBAShO7UfLFXu/vWajw9ZNJASN32li8yHMaVGAQqxFW1RCFOkB7cubaL6FaJVQqqJLSw== + +jest-haste-map@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-haste-map/-/jest-haste-map-27.5.1.tgz#9fd8bd7e7b4fa502d9c6164c5640512b4e811e7f" + integrity sha512-7GgkZ4Fw4NFbMSDSpZwXeBiIbx+t/46nJ2QitkOjvwPYyZmqttu2TDSimMHP1EkPOi4xUZAN1doE5Vd25H4Jng== + dependencies: + "@jest/types" "^27.5.1" + "@types/graceful-fs" "^4.1.2" + "@types/node" "*" + anymatch "^3.0.3" + fb-watchman "^2.0.0" + graceful-fs "^4.2.9" + jest-regex-util "^27.5.1" + jest-serializer "^27.5.1" + jest-util "^27.5.1" + jest-worker "^27.5.1" + micromatch "^4.0.4" + walker "^1.0.7" + optionalDependencies: + fsevents "^2.3.2" + +jest-jasmine2@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-jasmine2/-/jest-jasmine2-27.5.1.tgz#a037b0034ef49a9f3d71c4375a796f3b230d1ac4" + integrity sha512-jtq7VVyG8SqAorDpApwiJJImd0V2wv1xzdheGHRGyuT7gZm6gG47QEskOlzsN1PG/6WNaCo5pmwMHDf3AkG2pQ== + dependencies: + "@jest/environment" "^27.5.1" + "@jest/source-map" "^27.5.1" + "@jest/test-result" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/node" "*" + chalk "^4.0.0" + co "^4.6.0" + expect "^27.5.1" + is-generator-fn "^2.0.0" + jest-each "^27.5.1" + jest-matcher-utils "^27.5.1" + jest-message-util "^27.5.1" + jest-runtime "^27.5.1" + jest-snapshot "^27.5.1" + jest-util "^27.5.1" + pretty-format "^27.5.1" + throat "^6.0.1" + +jest-leak-detector@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-leak-detector/-/jest-leak-detector-27.5.1.tgz#6ec9d54c3579dd6e3e66d70e3498adf80fde3fb8" + integrity sha512-POXfWAMvfU6WMUXftV4HolnJfnPOGEu10fscNCA76KBpRRhcMN2c8d3iT2pxQS3HLbA+5X4sOUPzYO2NUyIlHQ== + dependencies: + jest-get-type "^27.5.1" + pretty-format "^27.5.1" + +jest-matcher-utils@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-matcher-utils/-/jest-matcher-utils-27.5.1.tgz#9c0cdbda8245bc22d2331729d1091308b40cf8ab" + integrity sha512-z2uTx/T6LBaCoNWNFWwChLBKYxTMcGBRjAt+2SbP929/Fflb9aa5LGma654Rz8z9HLxsrUaYzxE9T/EFIL/PAw== + dependencies: + chalk "^4.0.0" + jest-diff "^27.5.1" + jest-get-type "^27.5.1" + pretty-format "^27.5.1" + +jest-matcher-utils@^29.1.2: + version "29.1.2" + resolved "http://localhost:4873/jest-matcher-utils/-/jest-matcher-utils-29.1.2.tgz#e68c4bcc0266e70aa1a5c13fb7b8cd4695e318a1" + integrity sha512-MV5XrD3qYSW2zZSHRRceFzqJ39B2z11Qv0KPyZYxnzDHFeYZGJlgGi0SW+IXSJfOewgJp/Km/7lpcFT+cgZypw== + dependencies: + chalk "^4.0.0" + jest-diff "^29.1.2" + jest-get-type "^29.0.0" + pretty-format "^29.1.2" + +jest-message-util@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-message-util/-/jest-message-util-27.5.1.tgz#bdda72806da10d9ed6425e12afff38cd1458b6cf" + integrity sha512-rMyFe1+jnyAAf+NHwTclDz0eAaLkVDdKVHHBFWsBWHnnh5YeJMNWWsv7AbFYXfK3oTqvL7VTWkhNLu1jX24D+g== + dependencies: + "@babel/code-frame" "^7.12.13" + "@jest/types" "^27.5.1" + "@types/stack-utils" "^2.0.0" + chalk "^4.0.0" + graceful-fs "^4.2.9" + micromatch "^4.0.4" + pretty-format "^27.5.1" + slash "^3.0.0" + stack-utils "^2.0.3" + +jest-message-util@^28.1.3: + version "28.1.3" + resolved "http://localhost:4873/jest-message-util/-/jest-message-util-28.1.3.tgz#232def7f2e333f1eecc90649b5b94b0055e7c43d" + integrity sha512-PFdn9Iewbt575zKPf1286Ht9EPoJmYT7P0kY+RibeYZ2XtOr53pDLEFoTWXbd1h4JiGiWpTBC84fc8xMXQMb7g== + dependencies: + "@babel/code-frame" "^7.12.13" + "@jest/types" "^28.1.3" + "@types/stack-utils" "^2.0.0" + chalk "^4.0.0" + graceful-fs "^4.2.9" + micromatch "^4.0.4" + pretty-format "^28.1.3" + slash "^3.0.0" + stack-utils "^2.0.3" + +jest-message-util@^29.1.2: + version "29.1.2" + resolved "http://localhost:4873/jest-message-util/-/jest-message-util-29.1.2.tgz#c21a33c25f9dc1ebfcd0f921d89438847a09a501" + integrity sha512-9oJ2Os+Qh6IlxLpmvshVbGUiSkZVc2FK+uGOm6tghafnB2RyjKAxMZhtxThRMxfX1J1SOMhTn9oK3/MutRWQJQ== + dependencies: + "@babel/code-frame" "^7.12.13" + "@jest/types" "^29.1.2" + "@types/stack-utils" "^2.0.0" + chalk "^4.0.0" + graceful-fs "^4.2.9" + micromatch "^4.0.4" + pretty-format "^29.1.2" + slash "^3.0.0" + stack-utils "^2.0.3" + +jest-mock@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-mock/-/jest-mock-27.5.1.tgz#19948336d49ef4d9c52021d34ac7b5f36ff967d6" + integrity sha512-K4jKbY1d4ENhbrG2zuPWaQBvDly+iZ2yAW+T1fATN78hc0sInwn7wZB8XtlNnvHug5RMwV897Xm4LqmPM4e2Og== + dependencies: + "@jest/types" "^27.5.1" + "@types/node" "*" + +jest-pnp-resolver@^1.2.2: + version "1.2.2" + resolved "http://localhost:4873/jest-pnp-resolver/-/jest-pnp-resolver-1.2.2.tgz#b704ac0ae028a89108a4d040b3f919dfddc8e33c" + integrity sha512-olV41bKSMm8BdnuMsewT4jqlZ8+3TCARAXjZGT9jcoSnrfUnRCqnMoF9XEeoWjbzObpqF9dRhHQj0Xb9QdF6/w== + +jest-regex-util@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-regex-util/-/jest-regex-util-27.5.1.tgz#4da143f7e9fd1e542d4aa69617b38e4a78365b95" + integrity sha512-4bfKq2zie+x16okqDXjXn9ql2B0dScQu+vcwe4TvFVhkVyuWLqpZrZtXxLLWoXYgn0E87I6r6GRYHF7wFZBUvg== + +jest-regex-util@^28.0.0: + version "28.0.2" + resolved "http://localhost:4873/jest-regex-util/-/jest-regex-util-28.0.2.tgz#afdc377a3b25fb6e80825adcf76c854e5bf47ead" + integrity sha512-4s0IgyNIy0y9FK+cjoVYoxamT7Zeo7MhzqRGx7YDYmaQn1wucY9rotiGkBzzcMXTtjrCAP/f7f+E0F7+fxPNdw== + +jest-resolve-dependencies@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-resolve-dependencies/-/jest-resolve-dependencies-27.5.1.tgz#d811ecc8305e731cc86dd79741ee98fed06f1da8" + integrity sha512-QQOOdY4PE39iawDn5rzbIePNigfe5B9Z91GDD1ae/xNDlu9kaat8QQ5EKnNmVWPV54hUdxCVwwj6YMgR2O7IOg== + dependencies: + "@jest/types" "^27.5.1" + jest-regex-util "^27.5.1" + jest-snapshot "^27.5.1" + +jest-resolve@^27.4.2, jest-resolve@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-resolve/-/jest-resolve-27.5.1.tgz#a2f1c5a0796ec18fe9eb1536ac3814c23617b384" + integrity sha512-FFDy8/9E6CV83IMbDpcjOhumAQPDyETnU2KZ1O98DwTnz8AOBsW/Xv3GySr1mOZdItLR+zDZ7I/UdTFbgSOVCw== + dependencies: + "@jest/types" "^27.5.1" + chalk "^4.0.0" + graceful-fs "^4.2.9" + jest-haste-map "^27.5.1" + jest-pnp-resolver "^1.2.2" + jest-util "^27.5.1" + jest-validate "^27.5.1" + resolve "^1.20.0" + resolve.exports "^1.1.0" + slash "^3.0.0" + +jest-runner@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-runner/-/jest-runner-27.5.1.tgz#071b27c1fa30d90540805c5645a0ec167c7b62e5" + integrity sha512-g4NPsM4mFCOwFKXO4p/H/kWGdJp9V8kURY2lX8Me2drgXqG7rrZAx5kv+5H7wtt/cdFIjhqYx1HrlqWHaOvDaQ== + dependencies: + "@jest/console" "^27.5.1" + "@jest/environment" "^27.5.1" + "@jest/test-result" "^27.5.1" + "@jest/transform" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/node" "*" + chalk "^4.0.0" + emittery "^0.8.1" + graceful-fs "^4.2.9" + jest-docblock "^27.5.1" + jest-environment-jsdom "^27.5.1" + jest-environment-node "^27.5.1" + jest-haste-map "^27.5.1" + jest-leak-detector "^27.5.1" + jest-message-util "^27.5.1" + jest-resolve "^27.5.1" + jest-runtime "^27.5.1" + jest-util "^27.5.1" + jest-worker "^27.5.1" + source-map-support "^0.5.6" + throat "^6.0.1" + +jest-runtime@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-runtime/-/jest-runtime-27.5.1.tgz#4896003d7a334f7e8e4a53ba93fb9bcd3db0a1af" + integrity sha512-o7gxw3Gf+H2IGt8fv0RiyE1+r83FJBRruoA+FXrlHw6xEyBsU8ugA6IPfTdVyA0w8HClpbK+DGJxH59UrNMx8A== + dependencies: + "@jest/environment" "^27.5.1" + "@jest/fake-timers" "^27.5.1" + "@jest/globals" "^27.5.1" + "@jest/source-map" "^27.5.1" + "@jest/test-result" "^27.5.1" + "@jest/transform" "^27.5.1" + "@jest/types" "^27.5.1" + chalk "^4.0.0" + cjs-module-lexer "^1.0.0" + collect-v8-coverage "^1.0.0" + execa "^5.0.0" + glob "^7.1.3" + graceful-fs "^4.2.9" + jest-haste-map "^27.5.1" + jest-message-util "^27.5.1" + jest-mock "^27.5.1" + jest-regex-util "^27.5.1" + jest-resolve "^27.5.1" + jest-snapshot "^27.5.1" + jest-util "^27.5.1" + slash "^3.0.0" + strip-bom "^4.0.0" + +jest-serializer@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-serializer/-/jest-serializer-27.5.1.tgz#81438410a30ea66fd57ff730835123dea1fb1f64" + integrity sha512-jZCyo6iIxO1aqUxpuBlwTDMkzOAJS4a3eYz3YzgxxVQFwLeSA7Jfq5cbqCY+JLvTDrWirgusI/0KwxKMgrdf7w== + dependencies: + "@types/node" "*" + graceful-fs "^4.2.9" + +jest-snapshot@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-snapshot/-/jest-snapshot-27.5.1.tgz#b668d50d23d38054a51b42c4039cab59ae6eb6a1" + integrity sha512-yYykXI5a0I31xX67mgeLw1DZ0bJB+gpq5IpSuCAoyDi0+BhgU/RIrL+RTzDmkNTchvDFWKP8lp+w/42Z3us5sA== + dependencies: + "@babel/core" "^7.7.2" + "@babel/generator" "^7.7.2" + "@babel/plugin-syntax-typescript" "^7.7.2" + "@babel/traverse" "^7.7.2" + "@babel/types" "^7.0.0" + "@jest/transform" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/babel__traverse" "^7.0.4" + "@types/prettier" "^2.1.5" + babel-preset-current-node-syntax "^1.0.0" + chalk "^4.0.0" + expect "^27.5.1" + graceful-fs "^4.2.9" + jest-diff "^27.5.1" + jest-get-type "^27.5.1" + jest-haste-map "^27.5.1" + jest-matcher-utils "^27.5.1" + jest-message-util "^27.5.1" + jest-util "^27.5.1" + natural-compare "^1.4.0" + pretty-format "^27.5.1" + semver "^7.3.2" + +jest-util@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-util/-/jest-util-27.5.1.tgz#3ba9771e8e31a0b85da48fe0b0891fb86c01c2f9" + integrity sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw== + dependencies: + "@jest/types" "^27.5.1" + "@types/node" "*" + chalk "^4.0.0" + ci-info "^3.2.0" + graceful-fs "^4.2.9" + picomatch "^2.2.3" + +jest-util@^28.1.3: + version "28.1.3" + resolved "http://localhost:4873/jest-util/-/jest-util-28.1.3.tgz#f4f932aa0074f0679943220ff9cbba7e497028b0" + integrity sha512-XdqfpHwpcSRko/C35uLYFM2emRAltIIKZiJ9eAmhjsj0CqZMa0p1ib0R5fWIqGhn1a103DebTbpqIaP1qCQ6tQ== + dependencies: + "@jest/types" "^28.1.3" + "@types/node" "*" + chalk "^4.0.0" + ci-info "^3.2.0" + graceful-fs "^4.2.9" + picomatch "^2.2.3" + +jest-util@^29.1.2: + version "29.1.2" + resolved "http://localhost:4873/jest-util/-/jest-util-29.1.2.tgz#ac5798e93cb6a6703084e194cfa0898d66126df1" + integrity sha512-vPCk9F353i0Ymx3WQq3+a4lZ07NXu9Ca8wya6o4Fe4/aO1e1awMMprZ3woPFpKwghEOW+UXgd15vVotuNN9ONQ== + dependencies: + "@jest/types" "^29.1.2" + "@types/node" "*" + chalk "^4.0.0" + ci-info "^3.2.0" + graceful-fs "^4.2.9" + picomatch "^2.2.3" + +jest-validate@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-validate/-/jest-validate-27.5.1.tgz#9197d54dc0bdb52260b8db40b46ae668e04df067" + integrity sha512-thkNli0LYTmOI1tDB3FI1S1RTp/Bqyd9pTarJwL87OIBFuqEb5Apv5EaApEudYg4g86e3CT6kM0RowkhtEnCBQ== + dependencies: + "@jest/types" "^27.5.1" + camelcase "^6.2.0" + chalk "^4.0.0" + jest-get-type "^27.5.1" + leven "^3.1.0" + pretty-format "^27.5.1" + +jest-watch-typeahead@^1.0.0: + version "1.1.0" + resolved "http://localhost:4873/jest-watch-typeahead/-/jest-watch-typeahead-1.1.0.tgz#b4a6826dfb9c9420da2f7bc900de59dad11266a9" + integrity sha512-Va5nLSJTN7YFtC2jd+7wsoe1pNe5K4ShLux/E5iHEwlB9AxaxmggY7to9KUqKojhaJw3aXqt5WAb4jGPOolpEw== + dependencies: + ansi-escapes "^4.3.1" + chalk "^4.0.0" + jest-regex-util "^28.0.0" + jest-watcher "^28.0.0" + slash "^4.0.0" + string-length "^5.0.1" + strip-ansi "^7.0.1" + +jest-watcher@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-watcher/-/jest-watcher-27.5.1.tgz#71bd85fb9bde3a2c2ec4dc353437971c43c642a2" + integrity sha512-z676SuD6Z8o8qbmEGhoEUFOM1+jfEiL3DXHK/xgEiG2EyNYfFG60jluWcupY6dATjfEsKQuibReS1djInQnoVw== + dependencies: + "@jest/test-result" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/node" "*" + ansi-escapes "^4.2.1" + chalk "^4.0.0" + jest-util "^27.5.1" + string-length "^4.0.1" + +jest-watcher@^28.0.0: + version "28.1.3" + resolved "http://localhost:4873/jest-watcher/-/jest-watcher-28.1.3.tgz#c6023a59ba2255e3b4c57179fc94164b3e73abd4" + integrity sha512-t4qcqj9hze+jviFPUN3YAtAEeFnr/azITXQEMARf5cMwKY2SMBRnCQTXLixTl20OR6mLh9KLMrgVJgJISym+1g== + dependencies: + "@jest/test-result" "^28.1.3" + "@jest/types" "^28.1.3" + "@types/node" "*" + ansi-escapes "^4.2.1" + chalk "^4.0.0" + emittery "^0.10.2" + jest-util "^28.1.3" + string-length "^4.0.1" + +jest-worker@^26.2.1: + version "26.6.2" + resolved "http://localhost:4873/jest-worker/-/jest-worker-26.6.2.tgz#7f72cbc4d643c365e27b9fd775f9d0eaa9c7a8ed" + integrity sha512-KWYVV1c4i+jbMpaBC+U++4Va0cp8OisU185o73T1vo99hqi7w8tSJfUXYswwqqrjzwxa6KpRK54WhPvwf5w6PQ== + dependencies: + "@types/node" "*" + merge-stream "^2.0.0" + supports-color "^7.0.0" + +jest-worker@^27.0.2, jest-worker@^27.4.5, jest-worker@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-worker/-/jest-worker-27.5.1.tgz#8d146f0900e8973b106b6f73cc1e9a8cb86f8db0" + integrity sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg== + dependencies: + "@types/node" "*" + merge-stream "^2.0.0" + supports-color "^8.0.0" + +jest-worker@^28.0.2: + version "28.1.3" + resolved "http://localhost:4873/jest-worker/-/jest-worker-28.1.3.tgz#7e3c4ce3fa23d1bb6accb169e7f396f98ed4bb98" + integrity sha512-CqRA220YV/6jCo8VWvAt1KKx6eek1VIHMPeLEbpcfSfkEeWyBNppynM/o6q+Wmw+sOhos2ml34wZbSX3G13//g== + dependencies: + "@types/node" "*" + merge-stream "^2.0.0" + supports-color "^8.0.0" + +jest@^27.4.3: + version "27.5.1" + resolved "http://localhost:4873/jest/-/jest-27.5.1.tgz#dadf33ba70a779be7a6fc33015843b51494f63fc" + integrity sha512-Yn0mADZB89zTtjkPJEXwrac3LHudkQMR+Paqa8uxJHCBr9agxztUifWCyiYrjhMPBoUVBjyny0I7XH6ozDr7QQ== + dependencies: + "@jest/core" "^27.5.1" + import-local "^3.0.2" + jest-cli "^27.5.1" + +js-sdsl@^4.1.4: + version "4.1.5" + resolved "http://localhost:4873/js-sdsl/-/js-sdsl-4.1.5.tgz#1ff1645e6b4d1b028cd3f862db88c9d887f26e2a" + integrity sha512-08bOAKweV2NUC1wqTtf3qZlnpOX/R2DU9ikpjOHs0H+ibQv3zpncVQg6um4uYtRtrwIX8M4Nh3ytK4HGlYAq7Q== + +"js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" + integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== + +js-yaml@^3.13.1: + version "3.14.1" + resolved "http://localhost:4873/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537" + integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g== + dependencies: + argparse "^1.0.7" + esprima "^4.0.0" + +js-yaml@^4.1.0: + version "4.1.0" + resolved "http://localhost:4873/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602" + integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA== + dependencies: + argparse "^2.0.1" + +jsdom@^16.6.0: + version "16.7.0" + resolved "http://localhost:4873/jsdom/-/jsdom-16.7.0.tgz#918ae71965424b197c819f8183a754e18977b710" + integrity sha512-u9Smc2G1USStM+s/x1ru5Sxrl6mPYCbByG1U/hUmqaVsm4tbNyS7CicOSRyuGQYZhTu0h84qkZZQ/I+dzizSVw== + dependencies: + abab "^2.0.5" + acorn "^8.2.4" + acorn-globals "^6.0.0" + cssom "^0.4.4" + cssstyle "^2.3.0" + data-urls "^2.0.0" + decimal.js "^10.2.1" + domexception "^2.0.1" + escodegen "^2.0.0" + form-data "^3.0.0" + html-encoding-sniffer "^2.0.1" + http-proxy-agent "^4.0.1" + https-proxy-agent "^5.0.0" + is-potential-custom-element-name "^1.0.1" + nwsapi "^2.2.0" + parse5 "6.0.1" + saxes "^5.0.1" + symbol-tree "^3.2.4" + tough-cookie "^4.0.0" + w3c-hr-time "^1.0.2" + w3c-xmlserializer "^2.0.0" + webidl-conversions "^6.1.0" + whatwg-encoding "^1.0.5" + whatwg-mimetype "^2.3.0" + whatwg-url "^8.5.0" + ws "^7.4.6" + xml-name-validator "^3.0.0" + +jsesc@^2.5.1: + version "2.5.2" + resolved "http://localhost:4873/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" + integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== + +jsesc@~0.5.0: + version "0.5.0" + resolved "http://localhost:4873/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d" + integrity sha512-uZz5UnB7u4T9LvwmFqXii7pZSouaRPorGs5who1Ip7VO0wxanFvBL7GkM6dTHlgX+jhBApRetaWpnDabOeTcnA== + +json-parse-even-better-errors@^2.3.0, json-parse-even-better-errors@^2.3.1: + version "2.3.1" + resolved "http://localhost:4873/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d" + integrity sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w== + +json-schema-traverse@^0.4.1: + version "0.4.1" + resolved "http://localhost:4873/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" + integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== + +json-schema-traverse@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz#ae7bcb3656ab77a73ba5c49bf654f38e6b6860e2" + integrity sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug== + +json-schema@^0.4.0: + version "0.4.0" + resolved "http://localhost:4873/json-schema/-/json-schema-0.4.0.tgz#f7de4cf6efab838ebaeb3236474cbba5a1930ab5" + integrity sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA== + +json-stable-stringify-without-jsonify@^1.0.1: + version "1.0.1" + resolved "http://localhost:4873/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651" + integrity sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw== + +json5@^1.0.1: + version "1.0.1" + resolved "http://localhost:4873/json5/-/json5-1.0.1.tgz#779fb0018604fa854eacbf6252180d83543e3dbe" + integrity sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow== + dependencies: + minimist "^1.2.0" + +json5@^2.1.2, json5@^2.2.0, json5@^2.2.1: + version "2.2.1" + resolved "http://localhost:4873/json5/-/json5-2.2.1.tgz#655d50ed1e6f95ad1a3caababd2b0efda10b395c" + integrity sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA== + +jsonfile@^6.0.1: + version "6.1.0" + resolved "http://localhost:4873/jsonfile/-/jsonfile-6.1.0.tgz#bc55b2634793c679ec6403094eb13698a6ec0aae" + integrity sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ== + dependencies: + universalify "^2.0.0" + optionalDependencies: + graceful-fs "^4.1.6" + +jsonpointer@^5.0.0: + version "5.0.1" + resolved "http://localhost:4873/jsonpointer/-/jsonpointer-5.0.1.tgz#2110e0af0900fd37467b5907ecd13a7884a1b559" + integrity sha512-p/nXbhSEcu3pZRdkW1OfJhpsVtW1gd4Wa1fnQc9YLiTfAjn0312eMKimbdIQzuZl9aa9xUGaRlP9T/CJE/ditQ== + +"jsx-ast-utils@^2.4.1 || ^3.0.0", jsx-ast-utils@^3.3.2: + version "3.3.3" + resolved "http://localhost:4873/jsx-ast-utils/-/jsx-ast-utils-3.3.3.tgz#76b3e6e6cece5c69d49a5792c3d01bd1a0cdc7ea" + integrity sha512-fYQHZTZ8jSfmWZ0iyzfwiU4WDX4HpHbMCZ3gPlWYiCl3BoeOTsqKBqnTVfH2rYT7eP5c3sVbeSPHnnJOaTrWiw== + dependencies: + array-includes "^3.1.5" + object.assign "^4.1.3" + +kind-of@^6.0.2: + version "6.0.3" + resolved "http://localhost:4873/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd" + integrity sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw== + +kleur@^3.0.3: + version "3.0.3" + resolved "http://localhost:4873/kleur/-/kleur-3.0.3.tgz#a79c9ecc86ee1ce3fa6206d1216c501f147fc07e" + integrity sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w== + +klona@^2.0.4, klona@^2.0.5: + version "2.0.5" + resolved "http://localhost:4873/klona/-/klona-2.0.5.tgz#d166574d90076395d9963aa7a928fabb8d76afbc" + integrity sha512-pJiBpiXMbt7dkzXe8Ghj/u4FfXOOa98fPW+bihOJ4SjnoijweJrNThJfd3ifXpXhREjpoF2mZVH1GfS9LV3kHQ== + +language-subtag-registry@~0.3.2: + version "0.3.22" + resolved "http://localhost:4873/language-subtag-registry/-/language-subtag-registry-0.3.22.tgz#2e1500861b2e457eba7e7ae86877cbd08fa1fd1d" + integrity sha512-tN0MCzyWnoz/4nHS6uxdlFWoUZT7ABptwKPQ52Ea7URk6vll88bWBVhodtnlfEuCcKWNGoc+uGbw1cwa9IKh/w== + +language-tags@^1.0.5: + version "1.0.5" + resolved "http://localhost:4873/language-tags/-/language-tags-1.0.5.tgz#d321dbc4da30ba8bf3024e040fa5c14661f9193a" + integrity sha512-qJhlO9cGXi6hBGKoxEG/sKZDAHD5Hnu9Hs4WbOY3pCWXDhw0N8x1NenNzm2EnNLkLkk7J2SdxAkDSbb6ftT+UQ== + dependencies: + language-subtag-registry "~0.3.2" + +leven@^3.1.0: + version "3.1.0" + resolved "http://localhost:4873/leven/-/leven-3.1.0.tgz#77891de834064cccba82ae7842bb6b14a13ed7f2" + integrity sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A== + +levn@^0.4.1: + version "0.4.1" + resolved "http://localhost:4873/levn/-/levn-0.4.1.tgz#ae4562c007473b932a6200d403268dd2fffc6ade" + integrity sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ== + dependencies: + prelude-ls "^1.2.1" + type-check "~0.4.0" + +levn@~0.3.0: + version "0.3.0" + resolved "http://localhost:4873/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee" + integrity sha512-0OO4y2iOHix2W6ujICbKIaEQXvFQHue65vUG3pb5EUomzPI90z9hsA1VsO/dbIIpC53J8gxM9Q4Oho0jrCM/yA== + dependencies: + prelude-ls "~1.1.2" + type-check "~0.3.2" + +lilconfig@^2.0.3, lilconfig@^2.0.5, lilconfig@^2.0.6: + version "2.0.6" + resolved "http://localhost:4873/lilconfig/-/lilconfig-2.0.6.tgz#32a384558bd58af3d4c6e077dd1ad1d397bc69d4" + integrity sha512-9JROoBW7pobfsx+Sq2JsASvCo6Pfo6WWoUW79HuB1BCoBXD4PLWJPqDF6fNj67pqBYTbAHkE57M1kS/+L1neOg== + +lines-and-columns@^1.1.6: + version "1.2.4" + resolved "http://localhost:4873/lines-and-columns/-/lines-and-columns-1.2.4.tgz#eca284f75d2965079309dc0ad9255abb2ebc1632" + integrity sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg== + +loader-runner@^4.2.0: + version "4.3.0" + resolved "http://localhost:4873/loader-runner/-/loader-runner-4.3.0.tgz#c1b4a163b99f614830353b16755e7149ac2314e1" + integrity sha512-3R/1M+yS3j5ou80Me59j7F9IMs4PXs3VqRrm0TU3AbKPxlmpoY1TNscJV/oGJXo8qCatFGTfDbY6W6ipGOYXfg== + +loader-utils@^2.0.0: + version "2.0.2" + resolved "http://localhost:4873/loader-utils/-/loader-utils-2.0.2.tgz#d6e3b4fb81870721ae4e0868ab11dd638368c129" + integrity sha512-TM57VeHptv569d/GKh6TAYdzKblwDNiumOdkFnejjD0XwTH87K90w3O7AiJRqdQoXygvi1VQTJTLGhJl7WqA7A== + dependencies: + big.js "^5.2.2" + emojis-list "^3.0.0" + json5 "^2.1.2" + +loader-utils@^3.2.0: + version "3.2.0" + resolved "http://localhost:4873/loader-utils/-/loader-utils-3.2.0.tgz#bcecc51a7898bee7473d4bc6b845b23af8304d4f" + integrity sha512-HVl9ZqccQihZ7JM85dco1MvO9G+ONvxoGa9rkhzFsneGLKSUg1gJf9bWzhRhcvm2qChhWpebQhP44qxjKIUCaQ== + +locate-path@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/locate-path/-/locate-path-3.0.0.tgz#dbec3b3ab759758071b58fe59fc41871af21400e" + integrity sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A== + dependencies: + p-locate "^3.0.0" + path-exists "^3.0.0" + +locate-path@^5.0.0: + version "5.0.0" + resolved "http://localhost:4873/locate-path/-/locate-path-5.0.0.tgz#1afba396afd676a6d42504d0a67a3a7eb9f62aa0" + integrity sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g== + dependencies: + p-locate "^4.1.0" + +locate-path@^6.0.0: + version "6.0.0" + resolved "http://localhost:4873/locate-path/-/locate-path-6.0.0.tgz#55321eb309febbc59c4801d931a72452a681d286" + integrity sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw== + dependencies: + p-locate "^5.0.0" + +lodash.debounce@^4.0.8: + version "4.0.8" + resolved "http://localhost:4873/lodash.debounce/-/lodash.debounce-4.0.8.tgz#82d79bff30a67c4005ffd5e2515300ad9ca4d7af" + integrity sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow== + +lodash.memoize@^4.1.2: + version "4.1.2" + resolved "http://localhost:4873/lodash.memoize/-/lodash.memoize-4.1.2.tgz#bcc6c49a42a2840ed997f323eada5ecd182e0bfe" + integrity sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag== + +lodash.merge@^4.6.2: + version "4.6.2" + resolved "http://localhost:4873/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a" + integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ== + +lodash.sortby@^4.7.0: + version "4.7.0" + resolved "http://localhost:4873/lodash.sortby/-/lodash.sortby-4.7.0.tgz#edd14c824e2cc9c1e0b0a1b42bb5210516a42438" + integrity sha512-HDWXG8isMntAyRF5vZ7xKuEvOhT4AhlRt/3czTSjvGUxjYCBVRQY48ViDHyfYz9VIoBkW4TMGQNapx+l3RUwdA== + +lodash.uniq@^4.5.0: + version "4.5.0" + resolved "http://localhost:4873/lodash.uniq/-/lodash.uniq-4.5.0.tgz#d0225373aeb652adc1bc82e4945339a842754773" + integrity sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ== + +lodash@^4.17.15, lodash@^4.17.20, lodash@^4.17.21, lodash@^4.7.0: + version "4.17.21" + resolved "http://localhost:4873/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" + integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== + +loose-envify@^1.1.0, loose-envify@^1.4.0: + version "1.4.0" + resolved "http://localhost:4873/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf" + integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q== + dependencies: + js-tokens "^3.0.0 || ^4.0.0" + +lower-case@^2.0.2: + version "2.0.2" + resolved "http://localhost:4873/lower-case/-/lower-case-2.0.2.tgz#6fa237c63dbdc4a82ca0fd882e4722dc5e634e28" + integrity sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg== + dependencies: + tslib "^2.0.3" + +lru-cache@^6.0.0: + version "6.0.0" + resolved "http://localhost:4873/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" + integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== + dependencies: + yallist "^4.0.0" + +lz-string@^1.4.4: + version "1.4.4" + resolved "http://localhost:4873/lz-string/-/lz-string-1.4.4.tgz#c0d8eaf36059f705796e1e344811cf4c498d3a26" + integrity sha512-0ckx7ZHRPqb0oUm8zNr+90mtf9DQB60H1wMCjBtfi62Kl3a7JbHob6gA2bC+xRvZoOL+1hzUK8jeuEIQE8svEQ== + +magic-string@^0.25.0, magic-string@^0.25.7: + version "0.25.9" + resolved "http://localhost:4873/magic-string/-/magic-string-0.25.9.tgz#de7f9faf91ef8a1c91d02c2e5314c8277dbcdd1c" + integrity sha512-RmF0AsMzgt25qzqqLc1+MbHmhdx0ojF2Fvs4XnOqz2ZOBXzzkEwc/dJQZCYHAn7v1jbVOjAZfK8msRn4BxO4VQ== + dependencies: + sourcemap-codec "^1.4.8" + +make-dir@^3.0.0, make-dir@^3.0.2, make-dir@^3.1.0: + version "3.1.0" + resolved "http://localhost:4873/make-dir/-/make-dir-3.1.0.tgz#415e967046b3a7f1d185277d84aa58203726a13f" + integrity sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw== + dependencies: + semver "^6.0.0" + +makeerror@1.0.12: + version "1.0.12" + resolved "http://localhost:4873/makeerror/-/makeerror-1.0.12.tgz#3e5dd2079a82e812e983cc6610c4a2cb0eaa801a" + integrity sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg== + dependencies: + tmpl "1.0.5" + +mdn-data@2.0.14: + version "2.0.14" + resolved "http://localhost:4873/mdn-data/-/mdn-data-2.0.14.tgz#7113fc4281917d63ce29b43446f701e68c25ba50" + integrity sha512-dn6wd0uw5GsdswPFfsgMp5NSB0/aDe6fK94YJV/AJDYXL6HVLWBsxeq7js7Ad+mU2K9LAlwpk6kN2D5mwCPVow== + +mdn-data@2.0.4: + version "2.0.4" + resolved "http://localhost:4873/mdn-data/-/mdn-data-2.0.4.tgz#699b3c38ac6f1d728091a64650b65d388502fd5b" + integrity sha512-iV3XNKw06j5Q7mi6h+9vbx23Tv7JkjEVgKHW4pimwyDGWm0OIQntJJ+u1C6mg6mK1EaTv42XQ7w76yuzH7M2cA== + +media-typer@0.3.0: + version "0.3.0" + resolved "http://localhost:4873/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" + integrity sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ== + +memfs@^3.1.2, memfs@^3.4.3: + version "3.4.7" + resolved "http://localhost:4873/memfs/-/memfs-3.4.7.tgz#e5252ad2242a724f938cb937e3c4f7ceb1f70e5a" + integrity sha512-ygaiUSNalBX85388uskeCyhSAoOSgzBbtVCr9jA2RROssFL9Q19/ZXFqS+2Th2sr1ewNIWgFdLzLC3Yl1Zv+lw== + dependencies: + fs-monkey "^1.0.3" + +merge-descriptors@1.0.1: + version "1.0.1" + resolved "http://localhost:4873/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" + integrity sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w== + +merge-stream@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60" + integrity sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w== + +merge2@^1.3.0, merge2@^1.4.1: + version "1.4.1" + resolved "http://localhost:4873/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" + integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== + +methods@~1.1.2: + version "1.1.2" + resolved "http://localhost:4873/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" + integrity sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w== + +micromatch@^4.0.2, micromatch@^4.0.4, micromatch@^4.0.5: + version "4.0.5" + resolved "http://localhost:4873/micromatch/-/micromatch-4.0.5.tgz#bc8999a7cbbf77cdc89f132f6e467051b49090c6" + integrity sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA== + dependencies: + braces "^3.0.2" + picomatch "^2.3.1" + +mime-db@1.52.0, "mime-db@>= 1.43.0 < 2": + version "1.52.0" + resolved "http://localhost:4873/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70" + integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== + +mime-types@^2.1.12, mime-types@^2.1.27, mime-types@^2.1.31, mime-types@~2.1.17, mime-types@~2.1.24, mime-types@~2.1.34: + version "2.1.35" + resolved "http://localhost:4873/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" + integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== + dependencies: + mime-db "1.52.0" + +mime@1.6.0: + version "1.6.0" + resolved "http://localhost:4873/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" + integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== + +mimic-fn@^2.1.0: + version "2.1.0" + resolved "http://localhost:4873/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" + integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== + +min-indent@^1.0.0: + version "1.0.1" + resolved "http://localhost:4873/min-indent/-/min-indent-1.0.1.tgz#a63f681673b30571fbe8bc25686ae746eefa9869" + integrity sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg== + +mini-css-extract-plugin@^2.4.5: + version "2.6.1" + resolved "http://localhost:4873/mini-css-extract-plugin/-/mini-css-extract-plugin-2.6.1.tgz#9a1251d15f2035c342d99a468ab9da7a0451b71e" + integrity sha512-wd+SD57/K6DiV7jIR34P+s3uckTRuQvx0tKPcvjFlrEylk6P4mQ2KSWk1hblj1Kxaqok7LogKOieygXqBczNlg== + dependencies: + schema-utils "^4.0.0" + +minimalistic-assert@^1.0.0: + version "1.0.1" + resolved "http://localhost:4873/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7" + integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== + +minimatch@3.0.4: + version "3.0.4" + resolved "http://localhost:4873/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" + integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== + dependencies: + brace-expansion "^1.1.7" + +minimatch@^3.0.4, minimatch@^3.1.1, minimatch@^3.1.2: + version "3.1.2" + resolved "http://localhost:4873/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" + integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== + dependencies: + brace-expansion "^1.1.7" + +minimatch@^5.0.1: + version "5.1.0" + resolved "http://localhost:4873/minimatch/-/minimatch-5.1.0.tgz#1717b464f4971b144f6aabe8f2d0b8e4511e09c7" + integrity sha512-9TPBGGak4nHfGZsPBohm9AWg6NoT7QTCehS3BIJABslyZbzxfV78QM2Y6+i741OPZIafFAaiiEMh5OyIrJPgtg== + dependencies: + brace-expansion "^2.0.1" + +minimist@^1.2.0, minimist@^1.2.6: + version "1.2.6" + resolved "http://localhost:4873/minimist/-/minimist-1.2.6.tgz#8637a5b759ea0d6e98702cfb3a9283323c93af44" + integrity sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q== + +mkdirp@~0.5.1: + version "0.5.6" + resolved "http://localhost:4873/mkdirp/-/mkdirp-0.5.6.tgz#7def03d2432dcae4ba1d611445c48396062255f6" + integrity sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw== + dependencies: + minimist "^1.2.6" + +ms@2.0.0: + version "2.0.0" + resolved "http://localhost:4873/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" + integrity sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A== + +ms@2.1.2: + version "2.1.2" + resolved "http://localhost:4873/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" + integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== + +ms@2.1.3, ms@^2.1.1: + version "2.1.3" + resolved "http://localhost:4873/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" + integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== + +multicast-dns@^7.2.5: + version "7.2.5" + resolved "http://localhost:4873/multicast-dns/-/multicast-dns-7.2.5.tgz#77eb46057f4d7adbd16d9290fa7299f6fa64cced" + integrity sha512-2eznPJP8z2BFLX50tf0LuODrpINqP1RVIm/CObbTcBRITQgmC/TjcREF1NeTBzIcR5XO/ukWo+YHOjBbFwIupg== + dependencies: + dns-packet "^5.2.2" + thunky "^1.0.2" + +nanoid@^3.3.4: + version "3.3.4" + resolved "http://localhost:4873/nanoid/-/nanoid-3.3.4.tgz#730b67e3cd09e2deacf03c027c81c9d9dbc5e8ab" + integrity sha512-MqBkQh/OHTS2egovRtLk45wEyNXwF+cokD+1YPf9u5VfJiRdAiRwB2froX5Co9Rh20xs4siNPm8naNotSD6RBw== + +natural-compare@^1.4.0: + version "1.4.0" + resolved "http://localhost:4873/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" + integrity sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw== + +negotiator@0.6.3: + version "0.6.3" + resolved "http://localhost:4873/negotiator/-/negotiator-0.6.3.tgz#58e323a72fedc0d6f9cd4d31fe49f51479590ccd" + integrity sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg== + +neo-async@^2.6.2: + version "2.6.2" + resolved "http://localhost:4873/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f" + integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw== + +no-case@^3.0.4: + version "3.0.4" + resolved "http://localhost:4873/no-case/-/no-case-3.0.4.tgz#d361fd5c9800f558551a8369fc0dcd4662b6124d" + integrity sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg== + dependencies: + lower-case "^2.0.2" + tslib "^2.0.3" + +node-forge@^1: + version "1.3.1" + resolved "http://localhost:4873/node-forge/-/node-forge-1.3.1.tgz#be8da2af243b2417d5f646a770663a92b7e9ded3" + integrity sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA== + +node-int64@^0.4.0: + version "0.4.0" + resolved "http://localhost:4873/node-int64/-/node-int64-0.4.0.tgz#87a9065cdb355d3182d8f94ce11188b825c68a3b" + integrity sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw== + +node-releases@^2.0.6: + version "2.0.6" + resolved "http://localhost:4873/node-releases/-/node-releases-2.0.6.tgz#8a7088c63a55e493845683ebf3c828d8c51c5503" + integrity sha512-PiVXnNuFm5+iYkLBNeq5211hvO38y63T0i2KKh2KnUs3RpzJ+JtODFjkD8yjLwnDkTYF1eKXheUwdssR+NRZdg== + +normalize-path@^3.0.0, normalize-path@~3.0.0: + version "3.0.0" + resolved "http://localhost:4873/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" + integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== + +normalize-range@^0.1.2: + version "0.1.2" + resolved "http://localhost:4873/normalize-range/-/normalize-range-0.1.2.tgz#2d10c06bdfd312ea9777695a4d28439456b75942" + integrity sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA== + +normalize-url@^6.0.1: + version "6.1.0" + resolved "http://localhost:4873/normalize-url/-/normalize-url-6.1.0.tgz#40d0885b535deffe3f3147bec877d05fe4c5668a" + integrity sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A== + +npm-run-path@^4.0.1: + version "4.0.1" + resolved "http://localhost:4873/npm-run-path/-/npm-run-path-4.0.1.tgz#b7ecd1e5ed53da8e37a55e1c2269e0b97ed748ea" + integrity sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw== + dependencies: + path-key "^3.0.0" + +nth-check@^1.0.2: + version "1.0.2" + resolved "http://localhost:4873/nth-check/-/nth-check-1.0.2.tgz#b2bd295c37e3dd58a3bf0700376663ba4d9cf05c" + integrity sha512-WeBOdju8SnzPN5vTUJYxYUxLeXpCaVP5i5e0LF8fg7WORF2Wd7wFX/pk0tYZk7s8T+J7VLy0Da6J1+wCT0AtHg== + dependencies: + boolbase "~1.0.0" + +nth-check@^2.0.1: + version "2.1.1" + resolved "http://localhost:4873/nth-check/-/nth-check-2.1.1.tgz#c9eab428effce36cd6b92c924bdb000ef1f1ed1d" + integrity sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w== + dependencies: + boolbase "^1.0.0" + +nwsapi@^2.2.0: + version "2.2.2" + resolved "http://localhost:4873/nwsapi/-/nwsapi-2.2.2.tgz#e5418863e7905df67d51ec95938d67bf801f0bb0" + integrity sha512-90yv+6538zuvUMnN+zCr8LuV6bPFdq50304114vJYJ8RDyK8D5O9Phpbd6SZWgI7PwzmmfN1upeOJlvybDSgCw== + +object-assign@^4.1.1: + version "4.1.1" + resolved "http://localhost:4873/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" + integrity sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg== + +object-hash@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/object-hash/-/object-hash-3.0.0.tgz#73f97f753e7baffc0e2cc9d6e079079744ac82e9" + integrity sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw== + +object-inspect@^1.12.2, object-inspect@^1.9.0: + version "1.12.2" + resolved "http://localhost:4873/object-inspect/-/object-inspect-1.12.2.tgz#c0641f26394532f28ab8d796ab954e43c009a8ea" + integrity sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ== + +object-keys@^1.1.1: + version "1.1.1" + resolved "http://localhost:4873/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" + integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== + +object.assign@^4.1.0, object.assign@^4.1.3, object.assign@^4.1.4: + version "4.1.4" + resolved "http://localhost:4873/object.assign/-/object.assign-4.1.4.tgz#9673c7c7c351ab8c4d0b516f4343ebf4dfb7799f" + integrity sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + has-symbols "^1.0.3" + object-keys "^1.1.1" + +object.entries@^1.1.5: + version "1.1.5" + resolved "http://localhost:4873/object.entries/-/object.entries-1.1.5.tgz#e1acdd17c4de2cd96d5a08487cfb9db84d881861" + integrity sha512-TyxmjUoZggd4OrrU1W66FMDG6CuqJxsFvymeyXI51+vQLN67zYfZseptRge703kKQdo4uccgAKebXFcRCzk4+g== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.1" + +object.fromentries@^2.0.5: + version "2.0.5" + resolved "http://localhost:4873/object.fromentries/-/object.fromentries-2.0.5.tgz#7b37b205109c21e741e605727fe8b0ad5fa08251" + integrity sha512-CAyG5mWQRRiBU57Re4FKoTBjXfDoNwdFVH2Y1tS9PqCsfUTymAohOkEMSG3aRNKmv4lV3O7p1et7c187q6bynw== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.1" + +object.getownpropertydescriptors@^2.1.0: + version "2.1.4" + resolved "http://localhost:4873/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.4.tgz#7965e6437a57278b587383831a9b829455a4bc37" + integrity sha512-sccv3L/pMModT6dJAYF3fzGMVcb38ysQ0tEE6ixv2yXJDtEIPph268OlAdJj5/qZMZDq2g/jqvwppt36uS/uQQ== + dependencies: + array.prototype.reduce "^1.0.4" + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.20.1" + +object.hasown@^1.1.1: + version "1.1.1" + resolved "http://localhost:4873/object.hasown/-/object.hasown-1.1.1.tgz#ad1eecc60d03f49460600430d97f23882cf592a3" + integrity sha512-LYLe4tivNQzq4JdaWW6WO3HMZZJWzkkH8fnI6EebWl0VZth2wL2Lovm74ep2/gZzlaTdV62JZHEqHQ2yVn8Q/A== + dependencies: + define-properties "^1.1.4" + es-abstract "^1.19.5" + +object.values@^1.1.0, object.values@^1.1.5: + version "1.1.5" + resolved "http://localhost:4873/object.values/-/object.values-1.1.5.tgz#959f63e3ce9ef108720333082131e4a459b716ac" + integrity sha512-QUZRW0ilQ3PnPpbNtgdNV1PDbEqLIiSFB3l+EnGtBQ/8SUTLj1PZwtQHABZtLgwpJZTSZhuGLOGk57Drx2IvYg== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.1" + +obuf@^1.0.0, obuf@^1.1.2: + version "1.1.2" + resolved "http://localhost:4873/obuf/-/obuf-1.1.2.tgz#09bea3343d41859ebd446292d11c9d4db619084e" + integrity sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg== + +on-finished@2.4.1: + version "2.4.1" + resolved "http://localhost:4873/on-finished/-/on-finished-2.4.1.tgz#58c8c44116e54845ad57f14ab10b03533184ac3f" + integrity sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg== + dependencies: + ee-first "1.1.1" + +on-headers@~1.0.2: + version "1.0.2" + resolved "http://localhost:4873/on-headers/-/on-headers-1.0.2.tgz#772b0ae6aaa525c399e489adfad90c403eb3c28f" + integrity sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA== + +once@^1.3.0: + version "1.4.0" + resolved "http://localhost:4873/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" + integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w== + dependencies: + wrappy "1" + +onetime@^5.1.2: + version "5.1.2" + resolved "http://localhost:4873/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e" + integrity sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg== + dependencies: + mimic-fn "^2.1.0" + +open@^8.0.9, open@^8.4.0: + version "8.4.0" + resolved "http://localhost:4873/open/-/open-8.4.0.tgz#345321ae18f8138f82565a910fdc6b39e8c244f8" + integrity sha512-XgFPPM+B28FtCCgSb9I+s9szOC1vZRSwgWsRUA5ylIxRTgKozqjOCrVOqGsYABPYK5qnfqClxZTFBa8PKt2v6Q== + dependencies: + define-lazy-prop "^2.0.0" + is-docker "^2.1.1" + is-wsl "^2.2.0" + +optionator@^0.8.1: + version "0.8.3" + resolved "http://localhost:4873/optionator/-/optionator-0.8.3.tgz#84fa1d036fe9d3c7e21d99884b601167ec8fb495" + integrity sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA== + dependencies: + deep-is "~0.1.3" + fast-levenshtein "~2.0.6" + levn "~0.3.0" + prelude-ls "~1.1.2" + type-check "~0.3.2" + word-wrap "~1.2.3" + +optionator@^0.9.1: + version "0.9.1" + resolved "http://localhost:4873/optionator/-/optionator-0.9.1.tgz#4f236a6373dae0566a6d43e1326674f50c291499" + integrity sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw== + dependencies: + deep-is "^0.1.3" + fast-levenshtein "^2.0.6" + levn "^0.4.1" + prelude-ls "^1.2.1" + type-check "^0.4.0" + word-wrap "^1.2.3" + +p-limit@^2.0.0, p-limit@^2.2.0: + version "2.3.0" + resolved "http://localhost:4873/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1" + integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w== + dependencies: + p-try "^2.0.0" + +p-limit@^3.0.2: + version "3.1.0" + resolved "http://localhost:4873/p-limit/-/p-limit-3.1.0.tgz#e1daccbe78d0d1388ca18c64fea38e3e57e3706b" + integrity sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ== + dependencies: + yocto-queue "^0.1.0" + +p-locate@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/p-locate/-/p-locate-3.0.0.tgz#322d69a05c0264b25997d9f40cd8a891ab0064a4" + integrity sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ== + dependencies: + p-limit "^2.0.0" + +p-locate@^4.1.0: + version "4.1.0" + resolved "http://localhost:4873/p-locate/-/p-locate-4.1.0.tgz#a3428bb7088b3a60292f66919278b7c297ad4f07" + integrity sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A== + dependencies: + p-limit "^2.2.0" + +p-locate@^5.0.0: + version "5.0.0" + resolved "http://localhost:4873/p-locate/-/p-locate-5.0.0.tgz#83c8315c6785005e3bd021839411c9e110e6d834" + integrity sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw== + dependencies: + p-limit "^3.0.2" + +p-retry@^4.5.0: + version "4.6.2" + resolved "http://localhost:4873/p-retry/-/p-retry-4.6.2.tgz#9baae7184057edd4e17231cee04264106e092a16" + integrity sha512-312Id396EbJdvRONlngUx0NydfrIQ5lsYu0znKVUzVvArzEIt08V1qhtyESbGVd1FGX7UKtiFp5uwKZdM8wIuQ== + dependencies: + "@types/retry" "0.12.0" + retry "^0.13.1" + +p-try@^2.0.0: + version "2.2.0" + resolved "http://localhost:4873/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" + integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== + +param-case@^3.0.4: + version "3.0.4" + resolved "http://localhost:4873/param-case/-/param-case-3.0.4.tgz#7d17fe4aa12bde34d4a77d91acfb6219caad01c5" + integrity sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A== + dependencies: + dot-case "^3.0.4" + tslib "^2.0.3" + +parent-module@^1.0.0: + version "1.0.1" + resolved "http://localhost:4873/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2" + integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g== + dependencies: + callsites "^3.0.0" + +parse-json@^5.0.0, parse-json@^5.2.0: + version "5.2.0" + resolved "http://localhost:4873/parse-json/-/parse-json-5.2.0.tgz#c76fc66dee54231c962b22bcc8a72cf2f99753cd" + integrity sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg== + dependencies: + "@babel/code-frame" "^7.0.0" + error-ex "^1.3.1" + json-parse-even-better-errors "^2.3.0" + lines-and-columns "^1.1.6" + +parse5@6.0.1: + version "6.0.1" + resolved "http://localhost:4873/parse5/-/parse5-6.0.1.tgz#e1a1c085c569b3dc08321184f19a39cc27f7c30b" + integrity sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw== + +parseurl@~1.3.2, parseurl@~1.3.3: + version "1.3.3" + resolved "http://localhost:4873/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4" + integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ== + +pascal-case@^3.1.2: + version "3.1.2" + resolved "http://localhost:4873/pascal-case/-/pascal-case-3.1.2.tgz#b48e0ef2b98e205e7c1dae747d0b1508237660eb" + integrity sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g== + dependencies: + no-case "^3.0.4" + tslib "^2.0.3" + +path-exists@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" + integrity sha512-bpC7GYwiDYQ4wYLe+FA8lhRjhQCMcQGuSgGGqDkg/QerRWw9CmGRT0iSOVRSZJ29NMLZgIzqaljJ63oaL4NIJQ== + +path-exists@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3" + integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w== + +path-is-absolute@^1.0.0: + version "1.0.1" + resolved "http://localhost:4873/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" + integrity sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg== + +path-key@^3.0.0, path-key@^3.1.0: + version "3.1.1" + resolved "http://localhost:4873/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" + integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== + +path-parse@^1.0.7: + version "1.0.7" + resolved "http://localhost:4873/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" + integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== + +path-to-regexp@0.1.7: + version "0.1.7" + resolved "http://localhost:4873/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c" + integrity sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ== + +path-type@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b" + integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw== + +performance-now@^2.1.0: + version "2.1.0" + resolved "http://localhost:4873/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" + integrity sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow== + +picocolors@^0.2.1: + version "0.2.1" + resolved "http://localhost:4873/picocolors/-/picocolors-0.2.1.tgz#570670f793646851d1ba135996962abad587859f" + integrity sha512-cMlDqaLEqfSaW8Z7N5Jw+lyIW869EzT73/F5lhtY9cLGoVxSXznfgfXMO0Z5K0o0Q2TkTXq+0KFsdnSe3jDViA== + +picocolors@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" + integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== + +picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.2.2, picomatch@^2.2.3, picomatch@^2.3.1: + version "2.3.1" + resolved "http://localhost:4873/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" + integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== + +pify@^2.3.0: + version "2.3.0" + resolved "http://localhost:4873/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" + integrity sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog== + +pirates@^4.0.4: + version "4.0.5" + resolved "http://localhost:4873/pirates/-/pirates-4.0.5.tgz#feec352ea5c3268fb23a37c702ab1699f35a5f3b" + integrity sha512-8V9+HQPupnaXMA23c5hvl69zXvTwTzyAYasnkb0Tts4XvO4CliqONMOnvlq26rkhLC3nWDFBJf73LU1e1VZLaQ== + +pkg-dir@^4.1.0, pkg-dir@^4.2.0: + version "4.2.0" + resolved "http://localhost:4873/pkg-dir/-/pkg-dir-4.2.0.tgz#f099133df7ede422e81d1d8448270eeb3e4261f3" + integrity sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ== + dependencies: + find-up "^4.0.0" + +pkg-up@^3.1.0: + version "3.1.0" + resolved "http://localhost:4873/pkg-up/-/pkg-up-3.1.0.tgz#100ec235cc150e4fd42519412596a28512a0def5" + integrity sha512-nDywThFk1i4BQK4twPQ6TA4RT8bDY96yeuCVBWL3ePARCiEKDRSrNGbFIgUJpLp+XeIR65v8ra7WuJOFUBtkMA== + dependencies: + find-up "^3.0.0" + +postcss-attribute-case-insensitive@^5.0.2: + version "5.0.2" + resolved "http://localhost:4873/postcss-attribute-case-insensitive/-/postcss-attribute-case-insensitive-5.0.2.tgz#03d761b24afc04c09e757e92ff53716ae8ea2741" + integrity sha512-XIidXV8fDr0kKt28vqki84fRK8VW8eTuIa4PChv2MqKuT6C9UjmSKzen6KaWhWEoYvwxFCa7n/tC1SZ3tyq4SQ== + dependencies: + postcss-selector-parser "^6.0.10" + +postcss-browser-comments@^4: + version "4.0.0" + resolved "http://localhost:4873/postcss-browser-comments/-/postcss-browser-comments-4.0.0.tgz#bcfc86134df5807f5d3c0eefa191d42136b5e72a" + integrity sha512-X9X9/WN3KIvY9+hNERUqX9gncsgBA25XaeR+jshHz2j8+sYyHktHw1JdKuMjeLpGktXidqDhA7b/qm1mrBDmgg== + +postcss-calc@^8.2.3: + version "8.2.4" + resolved "http://localhost:4873/postcss-calc/-/postcss-calc-8.2.4.tgz#77b9c29bfcbe8a07ff6693dc87050828889739a5" + integrity sha512-SmWMSJmB8MRnnULldx0lQIyhSNvuDl9HfrZkaqqE/WHAhToYsAvDq+yAsA/kIyINDszOp3Rh0GFoNuH5Ypsm3Q== + dependencies: + postcss-selector-parser "^6.0.9" + postcss-value-parser "^4.2.0" + +postcss-clamp@^4.1.0: + version "4.1.0" + resolved "http://localhost:4873/postcss-clamp/-/postcss-clamp-4.1.0.tgz#7263e95abadd8c2ba1bd911b0b5a5c9c93e02363" + integrity sha512-ry4b1Llo/9zz+PKC+030KUnPITTJAHeOwjfAyyB60eT0AorGLdzp52s31OsPRHRf8NchkgFoG2y6fCfn1IV1Ow== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-color-functional-notation@^4.2.4: + version "4.2.4" + resolved "http://localhost:4873/postcss-color-functional-notation/-/postcss-color-functional-notation-4.2.4.tgz#21a909e8d7454d3612d1659e471ce4696f28caec" + integrity sha512-2yrTAUZUab9s6CpxkxC4rVgFEVaR6/2Pipvi6qcgvnYiVqZcbDHEoBDhrXzyb7Efh2CCfHQNtcqWcIruDTIUeg== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-color-hex-alpha@^8.0.4: + version "8.0.4" + resolved "http://localhost:4873/postcss-color-hex-alpha/-/postcss-color-hex-alpha-8.0.4.tgz#c66e2980f2fbc1a63f5b079663340ce8b55f25a5" + integrity sha512-nLo2DCRC9eE4w2JmuKgVA3fGL3d01kGq752pVALF68qpGLmx2Qrk91QTKkdUqqp45T1K1XV8IhQpcu1hoAQflQ== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-color-rebeccapurple@^7.1.1: + version "7.1.1" + resolved "http://localhost:4873/postcss-color-rebeccapurple/-/postcss-color-rebeccapurple-7.1.1.tgz#63fdab91d878ebc4dd4b7c02619a0c3d6a56ced0" + integrity sha512-pGxkuVEInwLHgkNxUc4sdg4g3py7zUeCQ9sMfwyHAT+Ezk8a4OaaVZ8lIY5+oNqA/BXXgLyXv0+5wHP68R79hg== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-colormin@^5.3.0: + version "5.3.0" + resolved "http://localhost:4873/postcss-colormin/-/postcss-colormin-5.3.0.tgz#3cee9e5ca62b2c27e84fce63affc0cfb5901956a" + integrity sha512-WdDO4gOFG2Z8n4P8TWBpshnL3JpmNmJwdnfP2gbk2qBA8PWwOYcmjmI/t3CmMeL72a7Hkd+x/Mg9O2/0rD54Pg== + dependencies: + browserslist "^4.16.6" + caniuse-api "^3.0.0" + colord "^2.9.1" + postcss-value-parser "^4.2.0" + +postcss-convert-values@^5.1.2: + version "5.1.2" + resolved "http://localhost:4873/postcss-convert-values/-/postcss-convert-values-5.1.2.tgz#31586df4e184c2e8890e8b34a0b9355313f503ab" + integrity sha512-c6Hzc4GAv95B7suy4udszX9Zy4ETyMCgFPUDtWjdFTKH1SE9eFY/jEpHSwTH1QPuwxHpWslhckUQWbNRM4ho5g== + dependencies: + browserslist "^4.20.3" + postcss-value-parser "^4.2.0" + +postcss-custom-media@^8.0.2: + version "8.0.2" + resolved "http://localhost:4873/postcss-custom-media/-/postcss-custom-media-8.0.2.tgz#c8f9637edf45fef761b014c024cee013f80529ea" + integrity sha512-7yi25vDAoHAkbhAzX9dHx2yc6ntS4jQvejrNcC+csQJAXjj15e7VcWfMgLqBNAbOvqi5uIa9huOVwdHbf+sKqg== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-custom-properties@^12.1.9: + version "12.1.9" + resolved "http://localhost:4873/postcss-custom-properties/-/postcss-custom-properties-12.1.9.tgz#0883429a7ef99f1ba239d1fea29ce84906daa8bd" + integrity sha512-/E7PRvK8DAVljBbeWrcEQJPG72jaImxF3vvCNFwv9cC8CzigVoNIpeyfnJzphnN3Fd8/auBf5wvkw6W9MfmTyg== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-custom-selectors@^6.0.3: + version "6.0.3" + resolved "http://localhost:4873/postcss-custom-selectors/-/postcss-custom-selectors-6.0.3.tgz#1ab4684d65f30fed175520f82d223db0337239d9" + integrity sha512-fgVkmyiWDwmD3JbpCmB45SvvlCD6z9CG6Ie6Iere22W5aHea6oWa7EM2bpnv2Fj3I94L3VbtvX9KqwSi5aFzSg== + dependencies: + postcss-selector-parser "^6.0.4" + +postcss-dir-pseudo-class@^6.0.5: + version "6.0.5" + resolved "http://localhost:4873/postcss-dir-pseudo-class/-/postcss-dir-pseudo-class-6.0.5.tgz#2bf31de5de76added44e0a25ecf60ae9f7c7c26c" + integrity sha512-eqn4m70P031PF7ZQIvSgy9RSJ5uI2171O/OO/zcRNYpJbvaeKFUlar1aJ7rmgiQtbm0FSPsRewjpdS0Oew7MPA== + dependencies: + postcss-selector-parser "^6.0.10" + +postcss-discard-comments@^5.1.2: + version "5.1.2" + resolved "http://localhost:4873/postcss-discard-comments/-/postcss-discard-comments-5.1.2.tgz#8df5e81d2925af2780075840c1526f0660e53696" + integrity sha512-+L8208OVbHVF2UQf1iDmRcbdjJkuBF6IS29yBDSiWUIzpYaAhtNl6JYnYm12FnkeCwQqF5LeklOu6rAqgfBZqQ== + +postcss-discard-duplicates@^5.1.0: + version "5.1.0" + resolved "http://localhost:4873/postcss-discard-duplicates/-/postcss-discard-duplicates-5.1.0.tgz#9eb4fe8456706a4eebd6d3b7b777d07bad03e848" + integrity sha512-zmX3IoSI2aoenxHV6C7plngHWWhUOV3sP1T8y2ifzxzbtnuhk1EdPwm0S1bIUNaJ2eNbWeGLEwzw8huPD67aQw== + +postcss-discard-empty@^5.1.1: + version "5.1.1" + resolved "http://localhost:4873/postcss-discard-empty/-/postcss-discard-empty-5.1.1.tgz#e57762343ff7f503fe53fca553d18d7f0c369c6c" + integrity sha512-zPz4WljiSuLWsI0ir4Mcnr4qQQ5e1Ukc3i7UfE2XcrwKK2LIPIqE5jxMRxO6GbI3cv//ztXDsXwEWT3BHOGh3A== + +postcss-discard-overridden@^5.1.0: + version "5.1.0" + resolved "http://localhost:4873/postcss-discard-overridden/-/postcss-discard-overridden-5.1.0.tgz#7e8c5b53325747e9d90131bb88635282fb4a276e" + integrity sha512-21nOL7RqWR1kasIVdKs8HNqQJhFxLsyRfAnUDm4Fe4t4mCWL9OJiHvlHPjcd8zc5Myu89b/7wZDnOSjFgeWRtw== + +postcss-double-position-gradients@^3.1.2: + version "3.1.2" + resolved "http://localhost:4873/postcss-double-position-gradients/-/postcss-double-position-gradients-3.1.2.tgz#b96318fdb477be95997e86edd29c6e3557a49b91" + integrity sha512-GX+FuE/uBR6eskOK+4vkXgT6pDkexLokPaz/AbJna9s5Kzp/yl488pKPjhy0obB475ovfT1Wv8ho7U/cHNaRgQ== + dependencies: + "@csstools/postcss-progressive-custom-properties" "^1.1.0" + postcss-value-parser "^4.2.0" + +postcss-env-function@^4.0.6: + version "4.0.6" + resolved "http://localhost:4873/postcss-env-function/-/postcss-env-function-4.0.6.tgz#7b2d24c812f540ed6eda4c81f6090416722a8e7a" + integrity sha512-kpA6FsLra+NqcFnL81TnsU+Z7orGtDTxcOhl6pwXeEq1yFPpRMkCDpHhrz8CFQDr/Wfm0jLiNQ1OsGGPjlqPwA== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-flexbugs-fixes@^5.0.2: + version "5.0.2" + resolved "http://localhost:4873/postcss-flexbugs-fixes/-/postcss-flexbugs-fixes-5.0.2.tgz#2028e145313074fc9abe276cb7ca14e5401eb49d" + integrity sha512-18f9voByak7bTktR2QgDveglpn9DTbBWPUzSOe9g0N4WR/2eSt6Vrcbf0hmspvMI6YWGywz6B9f7jzpFNJJgnQ== + +postcss-focus-visible@^6.0.4: + version "6.0.4" + resolved "http://localhost:4873/postcss-focus-visible/-/postcss-focus-visible-6.0.4.tgz#50c9ea9afa0ee657fb75635fabad25e18d76bf9e" + integrity sha512-QcKuUU/dgNsstIK6HELFRT5Y3lbrMLEOwG+A4s5cA+fx3A3y/JTq3X9LaOj3OC3ALH0XqyrgQIgey/MIZ8Wczw== + dependencies: + postcss-selector-parser "^6.0.9" + +postcss-focus-within@^5.0.4: + version "5.0.4" + resolved "http://localhost:4873/postcss-focus-within/-/postcss-focus-within-5.0.4.tgz#5b1d2ec603195f3344b716c0b75f61e44e8d2e20" + integrity sha512-vvjDN++C0mu8jz4af5d52CB184ogg/sSxAFS+oUJQq2SuCe7T5U2iIsVJtsCp2d6R4j0jr5+q3rPkBVZkXD9fQ== + dependencies: + postcss-selector-parser "^6.0.9" + +postcss-font-variant@^5.0.0: + version "5.0.0" + resolved "http://localhost:4873/postcss-font-variant/-/postcss-font-variant-5.0.0.tgz#efd59b4b7ea8bb06127f2d031bfbb7f24d32fa66" + integrity sha512-1fmkBaCALD72CK2a9i468mA/+tr9/1cBxRRMXOUaZqO43oWPR5imcyPjXwuv7PXbCid4ndlP5zWhidQVVa3hmA== + +postcss-gap-properties@^3.0.5: + version "3.0.5" + resolved "http://localhost:4873/postcss-gap-properties/-/postcss-gap-properties-3.0.5.tgz#f7e3cddcf73ee19e94ccf7cb77773f9560aa2fff" + integrity sha512-IuE6gKSdoUNcvkGIqdtjtcMtZIFyXZhmFd5RUlg97iVEvp1BZKV5ngsAjCjrVy+14uhGBQl9tzmi1Qwq4kqVOg== + +postcss-image-set-function@^4.0.7: + version "4.0.7" + resolved "http://localhost:4873/postcss-image-set-function/-/postcss-image-set-function-4.0.7.tgz#08353bd756f1cbfb3b6e93182c7829879114481f" + integrity sha512-9T2r9rsvYzm5ndsBE8WgtrMlIT7VbtTfE7b3BQnudUqnBcBo7L758oc+o+pdj/dUV0l5wjwSdjeOH2DZtfv8qw== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-import@^14.1.0: + version "14.1.0" + resolved "http://localhost:4873/postcss-import/-/postcss-import-14.1.0.tgz#a7333ffe32f0b8795303ee9e40215dac922781f0" + integrity sha512-flwI+Vgm4SElObFVPpTIT7SU7R3qk2L7PyduMcokiaVKuWv9d/U+Gm/QAd8NDLuykTWTkcrjOeD2Pp1rMeBTGw== + dependencies: + postcss-value-parser "^4.0.0" + read-cache "^1.0.0" + resolve "^1.1.7" + +postcss-initial@^4.0.1: + version "4.0.1" + resolved "http://localhost:4873/postcss-initial/-/postcss-initial-4.0.1.tgz#529f735f72c5724a0fb30527df6fb7ac54d7de42" + integrity sha512-0ueD7rPqX8Pn1xJIjay0AZeIuDoF+V+VvMt/uOnn+4ezUKhZM/NokDeP6DwMNyIoYByuN/94IQnt5FEkaN59xQ== + +postcss-js@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/postcss-js/-/postcss-js-4.0.0.tgz#31db79889531b80dc7bc9b0ad283e418dce0ac00" + integrity sha512-77QESFBwgX4irogGVPgQ5s07vLvFqWr228qZY+w6lW599cRlK/HmnlivnnVUxkjHnCu4J16PDMHcH+e+2HbvTQ== + dependencies: + camelcase-css "^2.0.1" + +postcss-lab-function@^4.2.1: + version "4.2.1" + resolved "http://localhost:4873/postcss-lab-function/-/postcss-lab-function-4.2.1.tgz#6fe4c015102ff7cd27d1bd5385582f67ebdbdc98" + integrity sha512-xuXll4isR03CrQsmxyz92LJB2xX9n+pZJ5jE9JgcnmsCammLyKdlzrBin+25dy6wIjfhJpKBAN80gsTlCgRk2w== + dependencies: + "@csstools/postcss-progressive-custom-properties" "^1.1.0" + postcss-value-parser "^4.2.0" + +postcss-load-config@^3.1.4: + version "3.1.4" + resolved "http://localhost:4873/postcss-load-config/-/postcss-load-config-3.1.4.tgz#1ab2571faf84bb078877e1d07905eabe9ebda855" + integrity sha512-6DiM4E7v4coTE4uzA8U//WhtPwyhiim3eyjEMFCnUpzbrkK9wJHgKDT2mR+HbtSrd/NubVaYTOpSpjUl8NQeRg== + dependencies: + lilconfig "^2.0.5" + yaml "^1.10.2" + +postcss-loader@^6.2.1: + version "6.2.1" + resolved "http://localhost:4873/postcss-loader/-/postcss-loader-6.2.1.tgz#0895f7346b1702103d30fdc66e4d494a93c008ef" + integrity sha512-WbbYpmAaKcux/P66bZ40bpWsBucjx/TTgVVzRZ9yUO8yQfVBlameJ0ZGVaPfH64hNSBh63a+ICP5nqOpBA0w+Q== + dependencies: + cosmiconfig "^7.0.0" + klona "^2.0.5" + semver "^7.3.5" + +postcss-logical@^5.0.4: + version "5.0.4" + resolved "http://localhost:4873/postcss-logical/-/postcss-logical-5.0.4.tgz#ec75b1ee54421acc04d5921576b7d8db6b0e6f73" + integrity sha512-RHXxplCeLh9VjinvMrZONq7im4wjWGlRJAqmAVLXyZaXwfDWP73/oq4NdIp+OZwhQUMj0zjqDfM5Fj7qby+B4g== + +postcss-media-minmax@^5.0.0: + version "5.0.0" + resolved "http://localhost:4873/postcss-media-minmax/-/postcss-media-minmax-5.0.0.tgz#7140bddec173e2d6d657edbd8554a55794e2a5b5" + integrity sha512-yDUvFf9QdFZTuCUg0g0uNSHVlJ5X1lSzDZjPSFaiCWvjgsvu8vEVxtahPrLMinIDEEGnx6cBe6iqdx5YWz08wQ== + +postcss-merge-longhand@^5.1.6: + version "5.1.6" + resolved "http://localhost:4873/postcss-merge-longhand/-/postcss-merge-longhand-5.1.6.tgz#f378a8a7e55766b7b644f48e5d8c789ed7ed51ce" + integrity sha512-6C/UGF/3T5OE2CEbOuX7iNO63dnvqhGZeUnKkDeifebY0XqkkvrctYSZurpNE902LDf2yKwwPFgotnfSoPhQiw== + dependencies: + postcss-value-parser "^4.2.0" + stylehacks "^5.1.0" + +postcss-merge-rules@^5.1.2: + version "5.1.2" + resolved "http://localhost:4873/postcss-merge-rules/-/postcss-merge-rules-5.1.2.tgz#7049a14d4211045412116d79b751def4484473a5" + integrity sha512-zKMUlnw+zYCWoPN6yhPjtcEdlJaMUZ0WyVcxTAmw3lkkN/NDMRkOkiuctQEoWAOvH7twaxUUdvBWl0d4+hifRQ== + dependencies: + browserslist "^4.16.6" + caniuse-api "^3.0.0" + cssnano-utils "^3.1.0" + postcss-selector-parser "^6.0.5" + +postcss-minify-font-values@^5.1.0: + version "5.1.0" + resolved "http://localhost:4873/postcss-minify-font-values/-/postcss-minify-font-values-5.1.0.tgz#f1df0014a726083d260d3bd85d7385fb89d1f01b" + integrity sha512-el3mYTgx13ZAPPirSVsHqFzl+BBBDrXvbySvPGFnQcTI4iNslrPaFq4muTkLZmKlGk4gyFAYUBMH30+HurREyA== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-minify-gradients@^5.1.1: + version "5.1.1" + resolved "http://localhost:4873/postcss-minify-gradients/-/postcss-minify-gradients-5.1.1.tgz#f1fe1b4f498134a5068240c2f25d46fcd236ba2c" + integrity sha512-VGvXMTpCEo4qHTNSa9A0a3D+dxGFZCYwR6Jokk+/3oB6flu2/PnPXAh2x7x52EkY5xlIHLm+Le8tJxe/7TNhzw== + dependencies: + colord "^2.9.1" + cssnano-utils "^3.1.0" + postcss-value-parser "^4.2.0" + +postcss-minify-params@^5.1.3: + version "5.1.3" + resolved "http://localhost:4873/postcss-minify-params/-/postcss-minify-params-5.1.3.tgz#ac41a6465be2db735099bbd1798d85079a6dc1f9" + integrity sha512-bkzpWcjykkqIujNL+EVEPOlLYi/eZ050oImVtHU7b4lFS82jPnsCb44gvC6pxaNt38Els3jWYDHTjHKf0koTgg== + dependencies: + browserslist "^4.16.6" + cssnano-utils "^3.1.0" + postcss-value-parser "^4.2.0" + +postcss-minify-selectors@^5.2.1: + version "5.2.1" + resolved "http://localhost:4873/postcss-minify-selectors/-/postcss-minify-selectors-5.2.1.tgz#d4e7e6b46147b8117ea9325a915a801d5fe656c6" + integrity sha512-nPJu7OjZJTsVUmPdm2TcaiohIwxP+v8ha9NehQ2ye9szv4orirRU3SDdtUmKH+10nzn0bAyOXZ0UEr7OpvLehg== + dependencies: + postcss-selector-parser "^6.0.5" + +postcss-modules-extract-imports@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/postcss-modules-extract-imports/-/postcss-modules-extract-imports-3.0.0.tgz#cda1f047c0ae80c97dbe28c3e76a43b88025741d" + integrity sha512-bdHleFnP3kZ4NYDhuGlVK+CMrQ/pqUm8bx/oGL93K6gVwiclvX5x0n76fYMKuIGKzlABOy13zsvqjb0f92TEXw== + +postcss-modules-local-by-default@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/postcss-modules-local-by-default/-/postcss-modules-local-by-default-4.0.0.tgz#ebbb54fae1598eecfdf691a02b3ff3b390a5a51c" + integrity sha512-sT7ihtmGSF9yhm6ggikHdV0hlziDTX7oFoXtuVWeDd3hHObNkcHRo9V3yg7vCAY7cONyxJC/XXCmmiHHcvX7bQ== + dependencies: + icss-utils "^5.0.0" + postcss-selector-parser "^6.0.2" + postcss-value-parser "^4.1.0" + +postcss-modules-scope@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/postcss-modules-scope/-/postcss-modules-scope-3.0.0.tgz#9ef3151456d3bbfa120ca44898dfca6f2fa01f06" + integrity sha512-hncihwFA2yPath8oZ15PZqvWGkWf+XUfQgUGamS4LqoP1anQLOsOJw0vr7J7IwLpoY9fatA2qiGUGmuZL0Iqlg== + dependencies: + postcss-selector-parser "^6.0.4" + +postcss-modules-values@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/postcss-modules-values/-/postcss-modules-values-4.0.0.tgz#d7c5e7e68c3bb3c9b27cbf48ca0bb3ffb4602c9c" + integrity sha512-RDxHkAiEGI78gS2ofyvCsu7iycRv7oqw5xMWn9iMoR0N/7mf9D50ecQqUo5BZ9Zh2vH4bCUR/ktCqbB9m8vJjQ== + dependencies: + icss-utils "^5.0.0" + +postcss-nested@5.0.6: + version "5.0.6" + resolved "http://localhost:4873/postcss-nested/-/postcss-nested-5.0.6.tgz#466343f7fc8d3d46af3e7dba3fcd47d052a945bc" + integrity sha512-rKqm2Fk0KbA8Vt3AdGN0FB9OBOMDVajMG6ZCf/GoHgdxUJ4sBFp0A/uMIRm+MJUdo33YXEtjqIz8u7DAp8B7DA== + dependencies: + postcss-selector-parser "^6.0.6" + +postcss-nesting@^10.2.0: + version "10.2.0" + resolved "http://localhost:4873/postcss-nesting/-/postcss-nesting-10.2.0.tgz#0b12ce0db8edfd2d8ae0aaf86427370b898890be" + integrity sha512-EwMkYchxiDiKUhlJGzWsD9b2zvq/r2SSubcRrgP+jujMXFzqvANLt16lJANC+5uZ6hjI7lpRmI6O8JIl+8l1KA== + dependencies: + "@csstools/selector-specificity" "^2.0.0" + postcss-selector-parser "^6.0.10" + +postcss-normalize-charset@^5.1.0: + version "5.1.0" + resolved "http://localhost:4873/postcss-normalize-charset/-/postcss-normalize-charset-5.1.0.tgz#9302de0b29094b52c259e9b2cf8dc0879879f0ed" + integrity sha512-mSgUJ+pd/ldRGVx26p2wz9dNZ7ji6Pn8VWBajMXFf8jk7vUoSrZ2lt/wZR7DtlZYKesmZI680qjr2CeFF2fbUg== + +postcss-normalize-display-values@^5.1.0: + version "5.1.0" + resolved "http://localhost:4873/postcss-normalize-display-values/-/postcss-normalize-display-values-5.1.0.tgz#72abbae58081960e9edd7200fcf21ab8325c3da8" + integrity sha512-WP4KIM4o2dazQXWmFaqMmcvsKmhdINFblgSeRgn8BJ6vxaMyaJkwAzpPpuvSIoG/rmX3M+IrRZEz2H0glrQNEA== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-normalize-positions@^5.1.1: + version "5.1.1" + resolved "http://localhost:4873/postcss-normalize-positions/-/postcss-normalize-positions-5.1.1.tgz#ef97279d894087b59325b45c47f1e863daefbb92" + integrity sha512-6UpCb0G4eofTCQLFVuI3EVNZzBNPiIKcA1AKVka+31fTVySphr3VUgAIULBhxZkKgwLImhzMR2Bw1ORK+37INg== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-normalize-repeat-style@^5.1.1: + version "5.1.1" + resolved "http://localhost:4873/postcss-normalize-repeat-style/-/postcss-normalize-repeat-style-5.1.1.tgz#e9eb96805204f4766df66fd09ed2e13545420fb2" + integrity sha512-mFpLspGWkQtBcWIRFLmewo8aC3ImN2i/J3v8YCFUwDnPu3Xz4rLohDO26lGjwNsQxB3YF0KKRwspGzE2JEuS0g== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-normalize-string@^5.1.0: + version "5.1.0" + resolved "http://localhost:4873/postcss-normalize-string/-/postcss-normalize-string-5.1.0.tgz#411961169e07308c82c1f8c55f3e8a337757e228" + integrity sha512-oYiIJOf4T9T1N4i+abeIc7Vgm/xPCGih4bZz5Nm0/ARVJ7K6xrDlLwvwqOydvyL3RHNf8qZk6vo3aatiw/go3w== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-normalize-timing-functions@^5.1.0: + version "5.1.0" + resolved "http://localhost:4873/postcss-normalize-timing-functions/-/postcss-normalize-timing-functions-5.1.0.tgz#d5614410f8f0b2388e9f240aa6011ba6f52dafbb" + integrity sha512-DOEkzJ4SAXv5xkHl0Wa9cZLF3WCBhF3o1SKVxKQAa+0pYKlueTpCgvkFAHfk+Y64ezX9+nITGrDZeVGgITJXjg== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-normalize-unicode@^5.1.0: + version "5.1.0" + resolved "http://localhost:4873/postcss-normalize-unicode/-/postcss-normalize-unicode-5.1.0.tgz#3d23aede35e160089a285e27bf715de11dc9db75" + integrity sha512-J6M3MizAAZ2dOdSjy2caayJLQT8E8K9XjLce8AUQMwOrCvjCHv24aLC/Lps1R1ylOfol5VIDMaM/Lo9NGlk1SQ== + dependencies: + browserslist "^4.16.6" + postcss-value-parser "^4.2.0" + +postcss-normalize-url@^5.1.0: + version "5.1.0" + resolved "http://localhost:4873/postcss-normalize-url/-/postcss-normalize-url-5.1.0.tgz#ed9d88ca82e21abef99f743457d3729a042adcdc" + integrity sha512-5upGeDO+PVthOxSmds43ZeMeZfKH+/DKgGRD7TElkkyS46JXAUhMzIKiCa7BabPeIy3AQcTkXwVVN7DbqsiCew== + dependencies: + normalize-url "^6.0.1" + postcss-value-parser "^4.2.0" + +postcss-normalize-whitespace@^5.1.1: + version "5.1.1" + resolved "http://localhost:4873/postcss-normalize-whitespace/-/postcss-normalize-whitespace-5.1.1.tgz#08a1a0d1ffa17a7cc6efe1e6c9da969cc4493cfa" + integrity sha512-83ZJ4t3NUDETIHTa3uEg6asWjSBYL5EdkVB0sDncx9ERzOKBVJIUeDO9RyA9Zwtig8El1d79HBp0JEi8wvGQnA== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-normalize@^10.0.1: + version "10.0.1" + resolved "http://localhost:4873/postcss-normalize/-/postcss-normalize-10.0.1.tgz#464692676b52792a06b06880a176279216540dd7" + integrity sha512-+5w18/rDev5mqERcG3W5GZNMJa1eoYYNGo8gB7tEwaos0ajk3ZXAI4mHGcNT47NE+ZnZD1pEpUOFLvltIwmeJA== + dependencies: + "@csstools/normalize.css" "*" + postcss-browser-comments "^4" + sanitize.css "*" + +postcss-opacity-percentage@^1.1.2: + version "1.1.2" + resolved "http://localhost:4873/postcss-opacity-percentage/-/postcss-opacity-percentage-1.1.2.tgz#bd698bb3670a0a27f6d657cc16744b3ebf3b1145" + integrity sha512-lyUfF7miG+yewZ8EAk9XUBIlrHyUE6fijnesuz+Mj5zrIHIEw6KcIZSOk/elVMqzLvREmXB83Zi/5QpNRYd47w== + +postcss-ordered-values@^5.1.3: + version "5.1.3" + resolved "http://localhost:4873/postcss-ordered-values/-/postcss-ordered-values-5.1.3.tgz#b6fd2bd10f937b23d86bc829c69e7732ce76ea38" + integrity sha512-9UO79VUhPwEkzbb3RNpqqghc6lcYej1aveQteWY+4POIwlqkYE21HKWaLDF6lWNuqCobEAyTovVhtI32Rbv2RQ== + dependencies: + cssnano-utils "^3.1.0" + postcss-value-parser "^4.2.0" + +postcss-overflow-shorthand@^3.0.4: + version "3.0.4" + resolved "http://localhost:4873/postcss-overflow-shorthand/-/postcss-overflow-shorthand-3.0.4.tgz#7ed6486fec44b76f0eab15aa4866cda5d55d893e" + integrity sha512-otYl/ylHK8Y9bcBnPLo3foYFLL6a6Ak+3EQBPOTR7luMYCOsiVTUk1iLvNf6tVPNGXcoL9Hoz37kpfriRIFb4A== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-page-break@^3.0.4: + version "3.0.4" + resolved "http://localhost:4873/postcss-page-break/-/postcss-page-break-3.0.4.tgz#7fbf741c233621622b68d435babfb70dd8c1ee5f" + integrity sha512-1JGu8oCjVXLa9q9rFTo4MbeeA5FMe00/9C7lN4va606Rdb+HkxXtXsmEDrIraQ11fGz/WvKWa8gMuCKkrXpTsQ== + +postcss-place@^7.0.5: + version "7.0.5" + resolved "http://localhost:4873/postcss-place/-/postcss-place-7.0.5.tgz#95dbf85fd9656a3a6e60e832b5809914236986c4" + integrity sha512-wR8igaZROA6Z4pv0d+bvVrvGY4GVHihBCBQieXFY3kuSuMyOmEnnfFzHl/tQuqHZkfkIVBEbDvYcFfHmpSet9g== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-preset-env@^7.0.1: + version "7.8.2" + resolved "http://localhost:4873/postcss-preset-env/-/postcss-preset-env-7.8.2.tgz#4c834d5cbd2e29df2abf59118947c456922b79ba" + integrity sha512-rSMUEaOCnovKnwc5LvBDHUDzpGP+nrUeWZGWt9M72fBvckCi45JmnJigUr4QG4zZeOHmOCNCZnd2LKDvP++ZuQ== + dependencies: + "@csstools/postcss-cascade-layers" "^1.1.0" + "@csstools/postcss-color-function" "^1.1.1" + "@csstools/postcss-font-format-keywords" "^1.0.1" + "@csstools/postcss-hwb-function" "^1.0.2" + "@csstools/postcss-ic-unit" "^1.0.1" + "@csstools/postcss-is-pseudo-class" "^2.0.7" + "@csstools/postcss-nested-calc" "^1.0.0" + "@csstools/postcss-normalize-display-values" "^1.0.1" + "@csstools/postcss-oklab-function" "^1.1.1" + "@csstools/postcss-progressive-custom-properties" "^1.3.0" + "@csstools/postcss-stepped-value-functions" "^1.0.1" + "@csstools/postcss-text-decoration-shorthand" "^1.0.0" + "@csstools/postcss-trigonometric-functions" "^1.0.2" + "@csstools/postcss-unset-value" "^1.0.2" + autoprefixer "^10.4.11" + browserslist "^4.21.3" + css-blank-pseudo "^3.0.3" + css-has-pseudo "^3.0.4" + css-prefers-color-scheme "^6.0.3" + cssdb "^7.0.1" + postcss-attribute-case-insensitive "^5.0.2" + postcss-clamp "^4.1.0" + postcss-color-functional-notation "^4.2.4" + postcss-color-hex-alpha "^8.0.4" + postcss-color-rebeccapurple "^7.1.1" + postcss-custom-media "^8.0.2" + postcss-custom-properties "^12.1.9" + postcss-custom-selectors "^6.0.3" + postcss-dir-pseudo-class "^6.0.5" + postcss-double-position-gradients "^3.1.2" + postcss-env-function "^4.0.6" + postcss-focus-visible "^6.0.4" + postcss-focus-within "^5.0.4" + postcss-font-variant "^5.0.0" + postcss-gap-properties "^3.0.5" + postcss-image-set-function "^4.0.7" + postcss-initial "^4.0.1" + postcss-lab-function "^4.2.1" + postcss-logical "^5.0.4" + postcss-media-minmax "^5.0.0" + postcss-nesting "^10.2.0" + postcss-opacity-percentage "^1.1.2" + postcss-overflow-shorthand "^3.0.4" + postcss-page-break "^3.0.4" + postcss-place "^7.0.5" + postcss-pseudo-class-any-link "^7.1.6" + postcss-replace-overflow-wrap "^4.0.0" + postcss-selector-not "^6.0.1" + postcss-value-parser "^4.2.0" + +postcss-pseudo-class-any-link@^7.1.6: + version "7.1.6" + resolved "http://localhost:4873/postcss-pseudo-class-any-link/-/postcss-pseudo-class-any-link-7.1.6.tgz#2693b221902da772c278def85a4d9a64b6e617ab" + integrity sha512-9sCtZkO6f/5ML9WcTLcIyV1yz9D1rf0tWc+ulKcvV30s0iZKS/ONyETvoWsr6vnrmW+X+KmuK3gV/w5EWnT37w== + dependencies: + postcss-selector-parser "^6.0.10" + +postcss-reduce-initial@^5.1.0: + version "5.1.0" + resolved "http://localhost:4873/postcss-reduce-initial/-/postcss-reduce-initial-5.1.0.tgz#fc31659ea6e85c492fb2a7b545370c215822c5d6" + integrity sha512-5OgTUviz0aeH6MtBjHfbr57tml13PuedK/Ecg8szzd4XRMbYxH4572JFG067z+FqBIf6Zp/d+0581glkvvWMFw== + dependencies: + browserslist "^4.16.6" + caniuse-api "^3.0.0" + +postcss-reduce-transforms@^5.1.0: + version "5.1.0" + resolved "http://localhost:4873/postcss-reduce-transforms/-/postcss-reduce-transforms-5.1.0.tgz#333b70e7758b802f3dd0ddfe98bb1ccfef96b6e9" + integrity sha512-2fbdbmgir5AvpW9RLtdONx1QoYG2/EtqpNQbFASDlixBbAYuTcJ0dECwlqNqH7VbaUnEnh8SrxOe2sRIn24XyQ== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-replace-overflow-wrap@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/postcss-replace-overflow-wrap/-/postcss-replace-overflow-wrap-4.0.0.tgz#d2df6bed10b477bf9c52fab28c568b4b29ca4319" + integrity sha512-KmF7SBPphT4gPPcKZc7aDkweHiKEEO8cla/GjcBK+ckKxiZslIu3C4GCRW3DNfL0o7yW7kMQu9xlZ1kXRXLXtw== + +postcss-selector-not@^6.0.1: + version "6.0.1" + resolved "http://localhost:4873/postcss-selector-not/-/postcss-selector-not-6.0.1.tgz#8f0a709bf7d4b45222793fc34409be407537556d" + integrity sha512-1i9affjAe9xu/y9uqWH+tD4r6/hDaXJruk8xn2x1vzxC2U3J3LKO3zJW4CyxlNhA56pADJ/djpEwpH1RClI2rQ== + dependencies: + postcss-selector-parser "^6.0.10" + +postcss-selector-parser@^6.0.10, postcss-selector-parser@^6.0.2, postcss-selector-parser@^6.0.4, postcss-selector-parser@^6.0.5, postcss-selector-parser@^6.0.6, postcss-selector-parser@^6.0.9: + version "6.0.10" + resolved "http://localhost:4873/postcss-selector-parser/-/postcss-selector-parser-6.0.10.tgz#79b61e2c0d1bfc2602d549e11d0876256f8df88d" + integrity sha512-IQ7TZdoaqbT+LCpShg46jnZVlhWD2w6iQYAcYXfHARZ7X1t/UGhhceQDs5X0cGqKvYlHNOuv7Oa1xmb0oQuA3w== + dependencies: + cssesc "^3.0.0" + util-deprecate "^1.0.2" + +postcss-svgo@^5.1.0: + version "5.1.0" + resolved "http://localhost:4873/postcss-svgo/-/postcss-svgo-5.1.0.tgz#0a317400ced789f233a28826e77523f15857d80d" + integrity sha512-D75KsH1zm5ZrHyxPakAxJWtkyXew5qwS70v56exwvw542d9CRtTo78K0WeFxZB4G7JXKKMbEZtZayTGdIky/eA== + dependencies: + postcss-value-parser "^4.2.0" + svgo "^2.7.0" + +postcss-unique-selectors@^5.1.1: + version "5.1.1" + resolved "http://localhost:4873/postcss-unique-selectors/-/postcss-unique-selectors-5.1.1.tgz#a9f273d1eacd09e9aa6088f4b0507b18b1b541b6" + integrity sha512-5JiODlELrz8L2HwxfPnhOWZYWDxVHWL83ufOv84NrcgipI7TaeRsatAhK4Tr2/ZiYldpK/wBvw5BD3qfaK96GA== + dependencies: + postcss-selector-parser "^6.0.5" + +postcss-value-parser@^4.0.0, postcss-value-parser@^4.1.0, postcss-value-parser@^4.2.0: + version "4.2.0" + resolved "http://localhost:4873/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz#723c09920836ba6d3e5af019f92bc0971c02e514" + integrity sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ== + +postcss@^7.0.35: + version "7.0.39" + resolved "http://localhost:4873/postcss/-/postcss-7.0.39.tgz#9624375d965630e2e1f2c02a935c82a59cb48309" + integrity sha512-yioayjNbHn6z1/Bywyb2Y4s3yvDAeXGOyxqD+LnVOinq6Mdmd++SW2wUNVzavyyHxd6+DxzWGIuosg6P1Rj8uA== + dependencies: + picocolors "^0.2.1" + source-map "^0.6.1" + +postcss@^8.3.5, postcss@^8.4.14, postcss@^8.4.4, postcss@^8.4.7: + version "8.4.17" + resolved "http://localhost:4873/postcss/-/postcss-8.4.17.tgz#f87863ec7cd353f81f7ab2dec5d67d861bbb1be5" + integrity sha512-UNxNOLQydcOFi41yHNMcKRZ39NeXlr8AxGuZJsdub8vIb12fHzcq37DTU/QtbI6WLxNg2gF9Z+8qtRwTj1UI1Q== + dependencies: + nanoid "^3.3.4" + picocolors "^1.0.0" + source-map-js "^1.0.2" + +prelude-ls@^1.2.1: + version "1.2.1" + resolved "http://localhost:4873/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396" + integrity sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g== + +prelude-ls@~1.1.2: + version "1.1.2" + resolved "http://localhost:4873/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" + integrity sha512-ESF23V4SKG6lVSGZgYNpbsiaAkdab6ZgOxe52p7+Kid3W3u3bxR4Vfd/o21dmN7jSt0IwgZ4v5MUd26FEtXE9w== + +pretty-bytes@^5.3.0, pretty-bytes@^5.4.1: + version "5.6.0" + resolved "http://localhost:4873/pretty-bytes/-/pretty-bytes-5.6.0.tgz#356256f643804773c82f64723fe78c92c62beaeb" + integrity sha512-FFw039TmrBqFK8ma/7OL3sDz/VytdtJr044/QUJtH0wK9lb9jLq9tJyIxUwtQJHwar2BqtiA4iCWSwo9JLkzFg== + +pretty-error@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/pretty-error/-/pretty-error-4.0.0.tgz#90a703f46dd7234adb46d0f84823e9d1cb8f10d6" + integrity sha512-AoJ5YMAcXKYxKhuJGdcvse+Voc6v1RgnsR3nWcYU7q4t6z0Q6T86sv5Zq8VIRbOWWFpvdGE83LtdSMNd+6Y0xw== + dependencies: + lodash "^4.17.20" + renderkid "^3.0.0" + +pretty-format@^27.0.2, pretty-format@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/pretty-format/-/pretty-format-27.5.1.tgz#2181879fdea51a7a5851fb39d920faa63f01d88e" + integrity sha512-Qb1gy5OrP5+zDf2Bvnzdl3jsTf1qXVMazbvCoKhtKqVs4/YK4ozX4gKQJJVyNe+cajNPn0KoC0MC3FUmaHWEmQ== + dependencies: + ansi-regex "^5.0.1" + ansi-styles "^5.0.0" + react-is "^17.0.1" + +pretty-format@^28.1.3: + version "28.1.3" + resolved "http://localhost:4873/pretty-format/-/pretty-format-28.1.3.tgz#c9fba8cedf99ce50963a11b27d982a9ae90970d5" + integrity sha512-8gFb/To0OmxHR9+ZTb14Df2vNxdGCX8g1xWGUTqUw5TiZvcQf5sHKObd5UcPyLLyowNwDAMTF3XWOG1B6mxl1Q== + dependencies: + "@jest/schemas" "^28.1.3" + ansi-regex "^5.0.1" + ansi-styles "^5.0.0" + react-is "^18.0.0" + +pretty-format@^29.0.0, pretty-format@^29.1.2: + version "29.1.2" + resolved "http://localhost:4873/pretty-format/-/pretty-format-29.1.2.tgz#b1f6b75be7d699be1a051f5da36e8ae9e76a8e6a" + integrity sha512-CGJ6VVGXVRP2o2Dorl4mAwwvDWT25luIsYhkyVQW32E4nL+TgW939J7LlKT/npq5Cpq6j3s+sy+13yk7xYpBmg== + dependencies: + "@jest/schemas" "^29.0.0" + ansi-styles "^5.0.0" + react-is "^18.0.0" + +process-nextick-args@~2.0.0: + version "2.0.1" + resolved "http://localhost:4873/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" + integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== + +promise@^8.1.0: + version "8.2.0" + resolved "http://localhost:4873/promise/-/promise-8.2.0.tgz#a1f6280ab67457fbfc8aad2b198c9497e9e5c806" + integrity sha512-+CMAlLHqwRYwBMXKCP+o8ns7DN+xHDUiI+0nArsiJ9y+kJVPLFxEaSw6Ha9s9H0tftxg2Yzl25wqj9G7m5wLZg== + dependencies: + asap "~2.0.6" + +prompts@^2.0.1, prompts@^2.4.2: + version "2.4.2" + resolved "http://localhost:4873/prompts/-/prompts-2.4.2.tgz#7b57e73b3a48029ad10ebd44f74b01722a4cb069" + integrity sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q== + dependencies: + kleur "^3.0.3" + sisteransi "^1.0.5" + +prop-types@^15.8.1: + version "15.8.1" + resolved "http://localhost:4873/prop-types/-/prop-types-15.8.1.tgz#67d87bf1a694f48435cf332c24af10214a3140b5" + integrity sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg== + dependencies: + loose-envify "^1.4.0" + object-assign "^4.1.1" + react-is "^16.13.1" + +proxy-addr@~2.0.7: + version "2.0.7" + resolved "http://localhost:4873/proxy-addr/-/proxy-addr-2.0.7.tgz#f19fe69ceab311eeb94b42e70e8c2070f9ba1025" + integrity sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg== + dependencies: + forwarded "0.2.0" + ipaddr.js "1.9.1" + +psl@^1.1.33: + version "1.9.0" + resolved "http://localhost:4873/psl/-/psl-1.9.0.tgz#d0df2a137f00794565fcaf3b2c00cd09f8d5a5a7" + integrity sha512-E/ZsdU4HLs/68gYzgGTkMicWTLPdAftJLfJFlLUAAKZGkStNU72sZjT66SnMDVOfOWY/YAoiD7Jxa9iHvngcag== + +punycode@^2.1.0, punycode@^2.1.1: + version "2.1.1" + resolved "http://localhost:4873/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" + integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== + +q@^1.1.2: + version "1.5.1" + resolved "http://localhost:4873/q/-/q-1.5.1.tgz#7e32f75b41381291d04611f1bf14109ac00651d7" + integrity sha512-kV/CThkXo6xyFEZUugw/+pIOywXcDbFYgSct5cT3gqlbkBE1SJdwy6UQoZvodiWF/ckQLZyDE/Bu1M6gVu5lVw== + +qs@6.10.3: + version "6.10.3" + resolved "http://localhost:4873/qs/-/qs-6.10.3.tgz#d6cde1b2ffca87b5aa57889816c5f81535e22e8e" + integrity sha512-wr7M2E0OFRfIfJZjKGieI8lBKb7fRCH4Fv5KNPEs7gJ8jadvotdsS08PzOKR7opXhZ/Xkjtt3WF9g38drmyRqQ== + dependencies: + side-channel "^1.0.4" + +querystringify@^2.1.1: + version "2.2.0" + resolved "http://localhost:4873/querystringify/-/querystringify-2.2.0.tgz#3345941b4153cb9d082d8eee4cda2016a9aef7f6" + integrity sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ== + +queue-microtask@^1.2.2: + version "1.2.3" + resolved "http://localhost:4873/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" + integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== + +quick-lru@^5.1.1: + version "5.1.1" + resolved "http://localhost:4873/quick-lru/-/quick-lru-5.1.1.tgz#366493e6b3e42a3a6885e2e99d18f80fb7a8c932" + integrity sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA== + +raf@^3.4.1: + version "3.4.1" + resolved "http://localhost:4873/raf/-/raf-3.4.1.tgz#0742e99a4a6552f445d73e3ee0328af0ff1ede39" + integrity sha512-Sq4CW4QhwOHE8ucn6J34MqtZCeWFP2aQSmrlroYgqAV1PjStIhJXxYuTgUIfkEk7zTLjmIjLmU5q+fbD1NnOJA== + dependencies: + performance-now "^2.1.0" + +randombytes@^2.1.0: + version "2.1.0" + resolved "http://localhost:4873/randombytes/-/randombytes-2.1.0.tgz#df6f84372f0270dc65cdf6291349ab7a473d4f2a" + integrity sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ== + dependencies: + safe-buffer "^5.1.0" + +range-parser@^1.2.1, range-parser@~1.2.1: + version "1.2.1" + resolved "http://localhost:4873/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031" + integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== + +raw-body@2.5.1: + version "2.5.1" + resolved "http://localhost:4873/raw-body/-/raw-body-2.5.1.tgz#fe1b1628b181b700215e5fd42389f98b71392857" + integrity sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig== + dependencies: + bytes "3.1.2" + http-errors "2.0.0" + iconv-lite "0.4.24" + unpipe "1.0.0" + +react-app-polyfill@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/react-app-polyfill/-/react-app-polyfill-3.0.0.tgz#95221e0a9bd259e5ca6b177c7bb1cb6768f68fd7" + integrity sha512-sZ41cxiU5llIB003yxxQBYrARBqe0repqPTTYBTmMqTz9szeBbE37BehCE891NZsmdZqqP+xWKdT3eo3vOzN8w== + dependencies: + core-js "^3.19.2" + object-assign "^4.1.1" + promise "^8.1.0" + raf "^3.4.1" + regenerator-runtime "^0.13.9" + whatwg-fetch "^3.6.2" + +react-dev-utils@^12.0.1: + version "12.0.1" + resolved "http://localhost:4873/react-dev-utils/-/react-dev-utils-12.0.1.tgz#ba92edb4a1f379bd46ccd6bcd4e7bc398df33e73" + integrity sha512-84Ivxmr17KjUupyqzFode6xKhjwuEJDROWKJy/BthkL7Wn6NJ8h4WE6k/exAv6ImS+0oZLRRW5j/aINMHyeGeQ== + dependencies: + "@babel/code-frame" "^7.16.0" + address "^1.1.2" + browserslist "^4.18.1" + chalk "^4.1.2" + cross-spawn "^7.0.3" + detect-port-alt "^1.1.6" + escape-string-regexp "^4.0.0" + filesize "^8.0.6" + find-up "^5.0.0" + fork-ts-checker-webpack-plugin "^6.5.0" + global-modules "^2.0.0" + globby "^11.0.4" + gzip-size "^6.0.0" + immer "^9.0.7" + is-root "^2.1.0" + loader-utils "^3.2.0" + open "^8.4.0" + pkg-up "^3.1.0" + prompts "^2.4.2" + react-error-overlay "^6.0.11" + recursive-readdir "^2.2.2" + shell-quote "^1.7.3" + strip-ansi "^6.0.1" + text-table "^0.2.0" + +react-dom@^18.2.0: + version "18.2.0" + resolved "http://localhost:4873/react-dom/-/react-dom-18.2.0.tgz#22aaf38708db2674ed9ada224ca4aa708d821e3d" + integrity sha512-6IMTriUmvsjHUjNtEDudZfuDQUoWXVxKHhlEGSk81n4YFS+r/Kl99wXiwlVXtPBtJenozv2P+hxDsw9eA7Xo6g== + dependencies: + loose-envify "^1.1.0" + scheduler "^0.23.0" + +react-error-overlay@^6.0.11: + version "6.0.11" + resolved "http://localhost:4873/react-error-overlay/-/react-error-overlay-6.0.11.tgz#92835de5841c5cf08ba00ddd2d677b6d17ff9adb" + integrity sha512-/6UZ2qgEyH2aqzYZgQPxEnz33NJ2gNsnHA2o5+o4wW9bLM/JYQitNP9xPhsXwC08hMMovfGe/8retsdDsczPRg== + +react-is@^16.13.1: + version "16.13.1" + resolved "http://localhost:4873/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4" + integrity sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ== + +react-is@^17.0.1: + version "17.0.2" + resolved "http://localhost:4873/react-is/-/react-is-17.0.2.tgz#e691d4a8e9c789365655539ab372762b0efb54f0" + integrity sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w== + +react-is@^18.0.0: + version "18.2.0" + resolved "http://localhost:4873/react-is/-/react-is-18.2.0.tgz#199431eeaaa2e09f86427efbb4f1473edb47609b" + integrity sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w== + +react-refresh@^0.11.0: + version "0.11.0" + resolved "http://localhost:4873/react-refresh/-/react-refresh-0.11.0.tgz#77198b944733f0f1f1a90e791de4541f9f074046" + integrity sha512-F27qZr8uUqwhWZboondsPx8tnC3Ct3SxZA3V5WyEvujRyyNv0VYPhoBg1gZ8/MV5tubQp76Trw8lTv9hzRBa+A== + +react-scripts@5.0.1: + version "5.0.1" + resolved "http://localhost:4873/react-scripts/-/react-scripts-5.0.1.tgz#6285dbd65a8ba6e49ca8d651ce30645a6d980003" + integrity sha512-8VAmEm/ZAwQzJ+GOMLbBsTdDKOpuZh7RPs0UymvBR2vRk4iZWCskjbFnxqjrzoIvlNNRZ3QJFx6/qDSi6zSnaQ== + dependencies: + "@babel/core" "^7.16.0" + "@pmmmwh/react-refresh-webpack-plugin" "^0.5.3" + "@svgr/webpack" "^5.5.0" + babel-jest "^27.4.2" + babel-loader "^8.2.3" + babel-plugin-named-asset-import "^0.3.8" + babel-preset-react-app "^10.0.1" + bfj "^7.0.2" + browserslist "^4.18.1" + camelcase "^6.2.1" + case-sensitive-paths-webpack-plugin "^2.4.0" + css-loader "^6.5.1" + css-minimizer-webpack-plugin "^3.2.0" + dotenv "^10.0.0" + dotenv-expand "^5.1.0" + eslint "^8.3.0" + eslint-config-react-app "^7.0.1" + eslint-webpack-plugin "^3.1.1" + file-loader "^6.2.0" + fs-extra "^10.0.0" + html-webpack-plugin "^5.5.0" + identity-obj-proxy "^3.0.0" + jest "^27.4.3" + jest-resolve "^27.4.2" + jest-watch-typeahead "^1.0.0" + mini-css-extract-plugin "^2.4.5" + postcss "^8.4.4" + postcss-flexbugs-fixes "^5.0.2" + postcss-loader "^6.2.1" + postcss-normalize "^10.0.1" + postcss-preset-env "^7.0.1" + prompts "^2.4.2" + react-app-polyfill "^3.0.0" + react-dev-utils "^12.0.1" + react-refresh "^0.11.0" + resolve "^1.20.0" + resolve-url-loader "^4.0.0" + sass-loader "^12.3.0" + semver "^7.3.5" + source-map-loader "^3.0.0" + style-loader "^3.3.1" + tailwindcss "^3.0.2" + terser-webpack-plugin "^5.2.5" + webpack "^5.64.4" + webpack-dev-server "^4.6.0" + webpack-manifest-plugin "^4.0.2" + workbox-webpack-plugin "^6.4.1" + optionalDependencies: + fsevents "^2.3.2" + +react@^18.2.0: + version "18.2.0" + resolved "http://localhost:4873/react/-/react-18.2.0.tgz#555bd98592883255fa00de14f1151a917b5d77d5" + integrity sha512-/3IjMdb2L9QbBdWiW5e3P2/npwMBaU9mHCSCUzNln0ZCYbcfTsGbTJrU/kGemdH2IWmB2ioZ+zkxtmq6g09fGQ== + dependencies: + loose-envify "^1.1.0" + +read-cache@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/read-cache/-/read-cache-1.0.0.tgz#e664ef31161166c9751cdbe8dbcf86b5fb58f774" + integrity sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA== + dependencies: + pify "^2.3.0" + +readable-stream@^2.0.1: + version "2.3.7" + resolved "http://localhost:4873/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57" + integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw== + dependencies: + core-util-is "~1.0.0" + inherits "~2.0.3" + isarray "~1.0.0" + process-nextick-args "~2.0.0" + safe-buffer "~5.1.1" + string_decoder "~1.1.1" + util-deprecate "~1.0.1" + +readable-stream@^3.0.6: + version "3.6.0" + resolved "http://localhost:4873/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198" + integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA== + dependencies: + inherits "^2.0.3" + string_decoder "^1.1.1" + util-deprecate "^1.0.1" + +readdirp@~3.6.0: + version "3.6.0" + resolved "http://localhost:4873/readdirp/-/readdirp-3.6.0.tgz#74a370bd857116e245b29cc97340cd431a02a6c7" + integrity sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA== + dependencies: + picomatch "^2.2.1" + +recursive-readdir@^2.2.2: + version "2.2.2" + resolved "http://localhost:4873/recursive-readdir/-/recursive-readdir-2.2.2.tgz#9946fb3274e1628de6e36b2f6714953b4845094f" + integrity sha512-nRCcW9Sj7NuZwa2XvH9co8NPeXUBhZP7CRKJtU+cS6PW9FpCIFoI5ib0NT1ZrbNuPoRy0ylyCaUL8Gih4LSyFg== + dependencies: + minimatch "3.0.4" + +redent@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/redent/-/redent-3.0.0.tgz#e557b7998316bb53c9f1f56fa626352c6963059f" + integrity sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg== + dependencies: + indent-string "^4.0.0" + strip-indent "^3.0.0" + +regenerate-unicode-properties@^10.1.0: + version "10.1.0" + resolved "http://localhost:4873/regenerate-unicode-properties/-/regenerate-unicode-properties-10.1.0.tgz#7c3192cab6dd24e21cb4461e5ddd7dd24fa8374c" + integrity sha512-d1VudCLoIGitcU/hEg2QqvyGZQmdC0Lf8BqdOMXGFSvJP4bNV1+XqbPQeHHLD51Jh4QJJ225dlIFvY4Ly6MXmQ== + dependencies: + regenerate "^1.4.2" + +regenerate@^1.4.2: + version "1.4.2" + resolved "http://localhost:4873/regenerate/-/regenerate-1.4.2.tgz#b9346d8827e8f5a32f7ba29637d398b69014848a" + integrity sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A== + +regenerator-runtime@^0.13.4, regenerator-runtime@^0.13.9: + version "0.13.9" + resolved "http://localhost:4873/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz#8925742a98ffd90814988d7566ad30ca3b263b52" + integrity sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA== + +regenerator-transform@^0.15.0: + version "0.15.0" + resolved "http://localhost:4873/regenerator-transform/-/regenerator-transform-0.15.0.tgz#cbd9ead5d77fae1a48d957cf889ad0586adb6537" + integrity sha512-LsrGtPmbYg19bcPHwdtmXwbW+TqNvtY4riE3P83foeHRroMbH6/2ddFBfab3t7kbzc7v7p4wbkIecHImqt0QNg== + dependencies: + "@babel/runtime" "^7.8.4" + +regex-parser@^2.2.11: + version "2.2.11" + resolved "http://localhost:4873/regex-parser/-/regex-parser-2.2.11.tgz#3b37ec9049e19479806e878cabe7c1ca83ccfe58" + integrity sha512-jbD/FT0+9MBU2XAZluI7w2OBs1RBi6p9M83nkoZayQXXU9e8Robt69FcZc7wU4eJD/YFTjn1JdCk3rbMJajz8Q== + +regexp.prototype.flags@^1.4.1, regexp.prototype.flags@^1.4.3: + version "1.4.3" + resolved "http://localhost:4873/regexp.prototype.flags/-/regexp.prototype.flags-1.4.3.tgz#87cab30f80f66660181a3bb7bf5981a872b367ac" + integrity sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + functions-have-names "^1.2.2" + +regexpp@^3.2.0: + version "3.2.0" + resolved "http://localhost:4873/regexpp/-/regexpp-3.2.0.tgz#0425a2768d8f23bad70ca4b90461fa2f1213e1b2" + integrity sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg== + +regexpu-core@^5.1.0: + version "5.2.1" + resolved "http://localhost:4873/regexpu-core/-/regexpu-core-5.2.1.tgz#a69c26f324c1e962e9ffd0b88b055caba8089139" + integrity sha512-HrnlNtpvqP1Xkb28tMhBUO2EbyUHdQlsnlAhzWcwHy8WJR53UWr7/MAvqrsQKMbV4qdpv03oTMG8iIhfsPFktQ== + dependencies: + regenerate "^1.4.2" + regenerate-unicode-properties "^10.1.0" + regjsgen "^0.7.1" + regjsparser "^0.9.1" + unicode-match-property-ecmascript "^2.0.0" + unicode-match-property-value-ecmascript "^2.0.0" + +regjsgen@^0.7.1: + version "0.7.1" + resolved "http://localhost:4873/regjsgen/-/regjsgen-0.7.1.tgz#ee5ef30e18d3f09b7c369b76e7c2373ed25546f6" + integrity sha512-RAt+8H2ZEzHeYWxZ3H2z6tF18zyyOnlcdaafLrm21Bguj7uZy6ULibiAFdXEtKQY4Sy7wDTwDiOazasMLc4KPA== + +regjsparser@^0.9.1: + version "0.9.1" + resolved "http://localhost:4873/regjsparser/-/regjsparser-0.9.1.tgz#272d05aa10c7c1f67095b1ff0addae8442fc5709" + integrity sha512-dQUtn90WanSNl+7mQKcXAgZxvUe7Z0SqXlgzv0za4LwiUhyzBC58yQO3liFoUgu8GiJVInAhJjkj1N0EtQ5nkQ== + dependencies: + jsesc "~0.5.0" + +relateurl@^0.2.7: + version "0.2.7" + resolved "http://localhost:4873/relateurl/-/relateurl-0.2.7.tgz#54dbf377e51440aca90a4cd274600d3ff2d888a9" + integrity sha512-G08Dxvm4iDN3MLM0EsP62EDV9IuhXPR6blNz6Utcp7zyV3tr4HVNINt6MpaRWbxoOHT3Q7YN2P+jaHX8vUbgog== + +renderkid@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/renderkid/-/renderkid-3.0.0.tgz#5fd823e4d6951d37358ecc9a58b1f06836b6268a" + integrity sha512-q/7VIQA8lmM1hF+jn+sFSPWGlMkSAeNYcPLmDQx2zzuiDfaLrOmumR8iaUKlenFgh0XRPIUeSPlH3A+AW3Z5pg== + dependencies: + css-select "^4.1.3" + dom-converter "^0.2.0" + htmlparser2 "^6.1.0" + lodash "^4.17.21" + strip-ansi "^6.0.1" + +require-directory@^2.1.1: + version "2.1.1" + resolved "http://localhost:4873/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" + integrity sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q== + +require-from-string@^2.0.2: + version "2.0.2" + resolved "http://localhost:4873/require-from-string/-/require-from-string-2.0.2.tgz#89a7fdd938261267318eafe14f9c32e598c36909" + integrity sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw== + +requires-port@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff" + integrity sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ== + +resolve-cwd@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/resolve-cwd/-/resolve-cwd-3.0.0.tgz#0f0075f1bb2544766cf73ba6a6e2adfebcb13f2d" + integrity sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg== + dependencies: + resolve-from "^5.0.0" + +resolve-from@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" + integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g== + +resolve-from@^5.0.0: + version "5.0.0" + resolved "http://localhost:4873/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69" + integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw== + +resolve-url-loader@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/resolve-url-loader/-/resolve-url-loader-4.0.0.tgz#d50d4ddc746bb10468443167acf800dcd6c3ad57" + integrity sha512-05VEMczVREcbtT7Bz+C+96eUO5HDNvdthIiMB34t7FcF8ehcu4wC0sSgPUubs3XW2Q3CNLJk/BJrCU9wVRymiA== + dependencies: + adjust-sourcemap-loader "^4.0.0" + convert-source-map "^1.7.0" + loader-utils "^2.0.0" + postcss "^7.0.35" + source-map "0.6.1" + +resolve.exports@^1.1.0: + version "1.1.0" + resolved "http://localhost:4873/resolve.exports/-/resolve.exports-1.1.0.tgz#5ce842b94b05146c0e03076985d1d0e7e48c90c9" + integrity sha512-J1l+Zxxp4XK3LUDZ9m60LRJF/mAe4z6a4xyabPHk7pvK5t35dACV32iIjJDFeWZFfZlO29w6SZ67knR0tHzJtQ== + +resolve@^1.1.7, resolve@^1.14.2, resolve@^1.19.0, resolve@^1.20.0, resolve@^1.22.0, resolve@^1.22.1: + version "1.22.1" + resolved "http://localhost:4873/resolve/-/resolve-1.22.1.tgz#27cb2ebb53f91abb49470a928bba7558066ac177" + integrity sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw== + dependencies: + is-core-module "^2.9.0" + path-parse "^1.0.7" + supports-preserve-symlinks-flag "^1.0.0" + +resolve@^2.0.0-next.3: + version "2.0.0-next.4" + resolved "http://localhost:4873/resolve/-/resolve-2.0.0-next.4.tgz#3d37a113d6429f496ec4752d2a2e58efb1fd4660" + integrity sha512-iMDbmAWtfU+MHpxt/I5iWI7cY6YVEZUQ3MBgPQ++XD1PELuJHIl82xBmObyP2KyQmkNB2dsqF7seoQQiAn5yDQ== + dependencies: + is-core-module "^2.9.0" + path-parse "^1.0.7" + supports-preserve-symlinks-flag "^1.0.0" + +retry@^0.13.1: + version "0.13.1" + resolved "http://localhost:4873/retry/-/retry-0.13.1.tgz#185b1587acf67919d63b357349e03537b2484658" + integrity sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg== + +reusify@^1.0.4: + version "1.0.4" + resolved "http://localhost:4873/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" + integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== + +rimraf@^3.0.0, rimraf@^3.0.2: + version "3.0.2" + resolved "http://localhost:4873/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" + integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== + dependencies: + glob "^7.1.3" + +rollup-plugin-terser@^7.0.0: + version "7.0.2" + resolved "http://localhost:4873/rollup-plugin-terser/-/rollup-plugin-terser-7.0.2.tgz#e8fbba4869981b2dc35ae7e8a502d5c6c04d324d" + integrity sha512-w3iIaU4OxcF52UUXiZNsNeuXIMDvFrr+ZXK6bFZ0Q60qyVfq4uLptoS4bbq3paG3x216eQllFZX7zt6TIImguQ== + dependencies: + "@babel/code-frame" "^7.10.4" + jest-worker "^26.2.1" + serialize-javascript "^4.0.0" + terser "^5.0.0" + +rollup@^2.43.1: + version "2.79.1" + resolved "http://localhost:4873/rollup/-/rollup-2.79.1.tgz#bedee8faef7c9f93a2647ac0108748f497f081c7" + integrity sha512-uKxbd0IhMZOhjAiD5oAFp7BqvkA4Dv47qpOCtaNvng4HBwdbWtdOh8f5nZNuk2rp51PMGk3bzfWu5oayNEuYnw== + optionalDependencies: + fsevents "~2.3.2" + +run-parallel@^1.1.9: + version "1.2.0" + resolved "http://localhost:4873/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee" + integrity sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA== + dependencies: + queue-microtask "^1.2.2" + +safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: + version "5.1.2" + resolved "http://localhost:4873/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" + integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== + +safe-buffer@5.2.1, safe-buffer@>=5.1.0, safe-buffer@^5.1.0, safe-buffer@~5.2.0: + version "5.2.1" + resolved "http://localhost:4873/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" + integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== + +safe-regex-test@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/safe-regex-test/-/safe-regex-test-1.0.0.tgz#793b874d524eb3640d1873aad03596db2d4f2295" + integrity sha512-JBUUzyOgEwXQY1NuPtvcj/qcBDbDmEvWufhlnXZIm75DEHp+afM1r1ujJpJsV/gSM4t59tpDyPi1sd6ZaPFfsA== + dependencies: + call-bind "^1.0.2" + get-intrinsic "^1.1.3" + is-regex "^1.1.4" + +"safer-buffer@>= 2.1.2 < 3", "safer-buffer@>= 2.1.2 < 3.0.0": + version "2.1.2" + resolved "http://localhost:4873/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" + integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== + +sanitize.css@*: + version "13.0.0" + resolved "http://localhost:4873/sanitize.css/-/sanitize.css-13.0.0.tgz#2675553974b27964c75562ade3bd85d79879f173" + integrity sha512-ZRwKbh/eQ6w9vmTjkuG0Ioi3HBwPFce0O+v//ve+aOq1oeCy7jMV2qzzAlpsNuqpqCBjjriM1lbtZbF/Q8jVyA== + +sass-loader@^12.3.0: + version "12.6.0" + resolved "http://localhost:4873/sass-loader/-/sass-loader-12.6.0.tgz#5148362c8e2cdd4b950f3c63ac5d16dbfed37bcb" + integrity sha512-oLTaH0YCtX4cfnJZxKSLAyglED0naiYfNG1iXfU5w1LNZ+ukoA5DtyDIN5zmKVZwYNJP4KRc5Y3hkWga+7tYfA== + dependencies: + klona "^2.0.4" + neo-async "^2.6.2" + +sax@~1.2.4: + version "1.2.4" + resolved "http://localhost:4873/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" + integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw== + +saxes@^5.0.1: + version "5.0.1" + resolved "http://localhost:4873/saxes/-/saxes-5.0.1.tgz#eebab953fa3b7608dbe94e5dadb15c888fa6696d" + integrity sha512-5LBh1Tls8c9xgGjw3QrMwETmTMVk0oFgvrFSvWx62llR2hcEInrKNZ2GZCCuuy2lvWrdl5jhbpeqc5hRYKFOcw== + dependencies: + xmlchars "^2.2.0" + +scheduler@^0.23.0: + version "0.23.0" + resolved "http://localhost:4873/scheduler/-/scheduler-0.23.0.tgz#ba8041afc3d30eb206a487b6b384002e4e61fdfe" + integrity sha512-CtuThmgHNg7zIZWAXi3AsyIzA3n4xx7aNyjwC2VJldO2LMVDhFK+63xGqq6CsJH4rTAt6/M+N4GhZiDYPx9eUw== + dependencies: + loose-envify "^1.1.0" + +schema-utils@2.7.0: + version "2.7.0" + resolved "http://localhost:4873/schema-utils/-/schema-utils-2.7.0.tgz#17151f76d8eae67fbbf77960c33c676ad9f4efc7" + integrity sha512-0ilKFI6QQF5nxDZLFn2dMjvc4hjg/Wkg7rHd3jK6/A4a1Hl9VFdQWvgB1UMGoU94pad1P/8N7fMcEnLnSiju8A== + dependencies: + "@types/json-schema" "^7.0.4" + ajv "^6.12.2" + ajv-keywords "^3.4.1" + +schema-utils@^2.6.5: + version "2.7.1" + resolved "http://localhost:4873/schema-utils/-/schema-utils-2.7.1.tgz#1ca4f32d1b24c590c203b8e7a50bf0ea4cd394d7" + integrity sha512-SHiNtMOUGWBQJwzISiVYKu82GiV4QYGePp3odlY1tuKO7gPtphAT5R/py0fA6xtbgLL/RvtJZnU9b8s0F1q0Xg== + dependencies: + "@types/json-schema" "^7.0.5" + ajv "^6.12.4" + ajv-keywords "^3.5.2" + +schema-utils@^3.0.0, schema-utils@^3.1.0, schema-utils@^3.1.1: + version "3.1.1" + resolved "http://localhost:4873/schema-utils/-/schema-utils-3.1.1.tgz#bc74c4b6b6995c1d88f76a8b77bea7219e0c8281" + integrity sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw== + dependencies: + "@types/json-schema" "^7.0.8" + ajv "^6.12.5" + ajv-keywords "^3.5.2" + +schema-utils@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/schema-utils/-/schema-utils-4.0.0.tgz#60331e9e3ae78ec5d16353c467c34b3a0a1d3df7" + integrity sha512-1edyXKgh6XnJsJSQ8mKWXnN/BVaIbFMLpouRUrXgVq7WYne5kw3MW7UPhO44uRXQSIpTSXoJbmrR2X0w9kUTyg== + dependencies: + "@types/json-schema" "^7.0.9" + ajv "^8.8.0" + ajv-formats "^2.1.1" + ajv-keywords "^5.0.0" + +select-hose@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/select-hose/-/select-hose-2.0.0.tgz#625d8658f865af43ec962bfc376a37359a4994ca" + integrity sha512-mEugaLK+YfkijB4fx0e6kImuJdCIt2LxCRcbEYPqRGCs4F2ogyfZU5IAZRdjCP8JPq2AtdNoC/Dux63d9Kiryg== + +selfsigned@^2.1.1: + version "2.1.1" + resolved "http://localhost:4873/selfsigned/-/selfsigned-2.1.1.tgz#18a7613d714c0cd3385c48af0075abf3f266af61" + integrity sha512-GSL3aowiF7wa/WtSFwnUrludWFoNhftq8bUkH9pkzjpN2XSPOAYEgg6e0sS9s0rZwgJzJiQRPU18A6clnoW5wQ== + dependencies: + node-forge "^1" + +semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.3.0: + version "6.3.0" + resolved "http://localhost:4873/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" + integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== + +semver@^7.3.2, semver@^7.3.5, semver@^7.3.7: + version "7.3.8" + resolved "http://localhost:4873/semver/-/semver-7.3.8.tgz#07a78feafb3f7b32347d725e33de7e2a2df67798" + integrity sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A== + dependencies: + lru-cache "^6.0.0" + +send@0.18.0: + version "0.18.0" + resolved "http://localhost:4873/send/-/send-0.18.0.tgz#670167cc654b05f5aa4a767f9113bb371bc706be" + integrity sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg== + dependencies: + debug "2.6.9" + depd "2.0.0" + destroy "1.2.0" + encodeurl "~1.0.2" + escape-html "~1.0.3" + etag "~1.8.1" + fresh "0.5.2" + http-errors "2.0.0" + mime "1.6.0" + ms "2.1.3" + on-finished "2.4.1" + range-parser "~1.2.1" + statuses "2.0.1" + +serialize-javascript@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/serialize-javascript/-/serialize-javascript-4.0.0.tgz#b525e1238489a5ecfc42afacc3fe99e666f4b1aa" + integrity sha512-GaNA54380uFefWghODBWEGisLZFj00nS5ACs6yHa9nLqlLpVLO8ChDGeKRjZnV4Nh4n0Qi7nhYZD/9fCPzEqkw== + dependencies: + randombytes "^2.1.0" + +serialize-javascript@^6.0.0: + version "6.0.0" + resolved "http://localhost:4873/serialize-javascript/-/serialize-javascript-6.0.0.tgz#efae5d88f45d7924141da8b5c3a7a7e663fefeb8" + integrity sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag== + dependencies: + randombytes "^2.1.0" + +serve-index@^1.9.1: + version "1.9.1" + resolved "http://localhost:4873/serve-index/-/serve-index-1.9.1.tgz#d3768d69b1e7d82e5ce050fff5b453bea12a9239" + integrity sha512-pXHfKNP4qujrtteMrSBb0rc8HJ9Ms/GrXwcUtUtD5s4ewDJI8bT3Cz2zTVRMKtri49pLx2e0Ya8ziP5Ya2pZZw== + dependencies: + accepts "~1.3.4" + batch "0.6.1" + debug "2.6.9" + escape-html "~1.0.3" + http-errors "~1.6.2" + mime-types "~2.1.17" + parseurl "~1.3.2" + +serve-static@1.15.0: + version "1.15.0" + resolved "http://localhost:4873/serve-static/-/serve-static-1.15.0.tgz#faaef08cffe0a1a62f60cad0c4e513cff0ac9540" + integrity sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g== + dependencies: + encodeurl "~1.0.2" + escape-html "~1.0.3" + parseurl "~1.3.3" + send "0.18.0" + +setprototypeof@1.1.0: + version "1.1.0" + resolved "http://localhost:4873/setprototypeof/-/setprototypeof-1.1.0.tgz#d0bd85536887b6fe7c0d818cb962d9d91c54e656" + integrity sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ== + +setprototypeof@1.2.0: + version "1.2.0" + resolved "http://localhost:4873/setprototypeof/-/setprototypeof-1.2.0.tgz#66c9a24a73f9fc28cbe66b09fed3d33dcaf1b424" + integrity sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw== + +shallow-clone@^3.0.0: + version "3.0.1" + resolved "http://localhost:4873/shallow-clone/-/shallow-clone-3.0.1.tgz#8f2981ad92531f55035b01fb230769a40e02efa3" + integrity sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA== + dependencies: + kind-of "^6.0.2" + +shebang-command@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" + integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA== + dependencies: + shebang-regex "^3.0.0" + +shebang-regex@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" + integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== + +shell-quote@^1.7.3: + version "1.7.3" + resolved "http://localhost:4873/shell-quote/-/shell-quote-1.7.3.tgz#aa40edac170445b9a431e17bb62c0b881b9c4123" + integrity sha512-Vpfqwm4EnqGdlsBFNmHhxhElJYrdfcxPThu+ryKS5J8L/fhAwLazFZtq+S+TWZ9ANj2piSQLGj6NQg+lKPmxrw== + +side-channel@^1.0.4: + version "1.0.4" + resolved "http://localhost:4873/side-channel/-/side-channel-1.0.4.tgz#efce5c8fdc104ee751b25c58d4290011fa5ea2cf" + integrity sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw== + dependencies: + call-bind "^1.0.0" + get-intrinsic "^1.0.2" + object-inspect "^1.9.0" + +signal-exit@^3.0.2, signal-exit@^3.0.3: + version "3.0.7" + resolved "http://localhost:4873/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9" + integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ== + +sisteransi@^1.0.5: + version "1.0.5" + resolved "http://localhost:4873/sisteransi/-/sisteransi-1.0.5.tgz#134d681297756437cc05ca01370d3a7a571075ed" + integrity sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg== + +slash@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" + integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== + +slash@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/slash/-/slash-4.0.0.tgz#2422372176c4c6c5addb5e2ada885af984b396a7" + integrity sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew== + +sockjs@^0.3.24: + version "0.3.24" + resolved "http://localhost:4873/sockjs/-/sockjs-0.3.24.tgz#c9bc8995f33a111bea0395ec30aa3206bdb5ccce" + integrity sha512-GJgLTZ7vYb/JtPSSZ10hsOYIvEYsjbNU+zPdIHcUaWVNUEPivzxku31865sSSud0Da0W4lEeOPlmw93zLQchuQ== + dependencies: + faye-websocket "^0.11.3" + uuid "^8.3.2" + websocket-driver "^0.7.4" + +source-list-map@^2.0.0, source-list-map@^2.0.1: + version "2.0.1" + resolved "http://localhost:4873/source-list-map/-/source-list-map-2.0.1.tgz#3993bd873bfc48479cca9ea3a547835c7c154b34" + integrity sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw== + +source-map-js@^1.0.1, source-map-js@^1.0.2: + version "1.0.2" + resolved "http://localhost:4873/source-map-js/-/source-map-js-1.0.2.tgz#adbc361d9c62df380125e7f161f71c826f1e490c" + integrity sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw== + +source-map-loader@^3.0.0: + version "3.0.1" + resolved "http://localhost:4873/source-map-loader/-/source-map-loader-3.0.1.tgz#9ae5edc7c2d42570934be4c95d1ccc6352eba52d" + integrity sha512-Vp1UsfyPvgujKQzi4pyDiTOnE3E4H+yHvkVRN3c/9PJmQS4CQJExvcDvaX/D+RV+xQben9HJ56jMJS3CgUeWyA== + dependencies: + abab "^2.0.5" + iconv-lite "^0.6.3" + source-map-js "^1.0.1" + +source-map-support@^0.5.6, source-map-support@~0.5.20: + version "0.5.21" + resolved "http://localhost:4873/source-map-support/-/source-map-support-0.5.21.tgz#04fe7c7f9e1ed2d662233c28cb2b35b9f63f6e4f" + integrity sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w== + dependencies: + buffer-from "^1.0.0" + source-map "^0.6.0" + +source-map@0.6.1, source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.0, source-map@~0.6.1: + version "0.6.1" + resolved "http://localhost:4873/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" + integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== + +source-map@^0.7.3: + version "0.7.4" + resolved "http://localhost:4873/source-map/-/source-map-0.7.4.tgz#a9bbe705c9d8846f4e08ff6765acf0f1b0898656" + integrity sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA== + +source-map@^0.8.0-beta.0: + version "0.8.0-beta.0" + resolved "http://localhost:4873/source-map/-/source-map-0.8.0-beta.0.tgz#d4c1bb42c3f7ee925f005927ba10709e0d1d1f11" + integrity sha512-2ymg6oRBpebeZi9UUNsgQ89bhx01TcTkmNTGnNO88imTmbSgy4nfujrgVEFKWpMTEGA11EDkTt7mqObTPdigIA== + dependencies: + whatwg-url "^7.0.0" + +sourcemap-codec@^1.4.8: + version "1.4.8" + resolved "http://localhost:4873/sourcemap-codec/-/sourcemap-codec-1.4.8.tgz#ea804bd94857402e6992d05a38ef1ae35a9ab4c4" + integrity sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA== + +spdy-transport@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/spdy-transport/-/spdy-transport-3.0.0.tgz#00d4863a6400ad75df93361a1608605e5dcdcf31" + integrity sha512-hsLVFE5SjA6TCisWeJXFKniGGOpBgMLmerfO2aCyCU5s7nJ/rpAepqmFifv/GCbSbueEeAJJnmSQ2rKC/g8Fcw== + dependencies: + debug "^4.1.0" + detect-node "^2.0.4" + hpack.js "^2.1.6" + obuf "^1.1.2" + readable-stream "^3.0.6" + wbuf "^1.7.3" + +spdy@^4.0.2: + version "4.0.2" + resolved "http://localhost:4873/spdy/-/spdy-4.0.2.tgz#b74f466203a3eda452c02492b91fb9e84a27677b" + integrity sha512-r46gZQZQV+Kl9oItvl1JZZqJKGr+oEkB08A6BzkiR7593/7IbtuncXHd2YoYeTsG4157ZssMu9KYvUHLcjcDoA== + dependencies: + debug "^4.1.0" + handle-thing "^2.0.0" + http-deceiver "^1.2.7" + select-hose "^2.0.0" + spdy-transport "^3.0.0" + +sprintf-js@~1.0.2: + version "1.0.3" + resolved "http://localhost:4873/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" + integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g== + +stable@^0.1.8: + version "0.1.8" + resolved "http://localhost:4873/stable/-/stable-0.1.8.tgz#836eb3c8382fe2936feaf544631017ce7d47a3cf" + integrity sha512-ji9qxRnOVfcuLDySj9qzhGSEFVobyt1kIOSkj1qZzYLzq7Tos/oUUWvotUPQLlrsidqsK6tBH89Bc9kL5zHA6w== + +stack-utils@^2.0.3: + version "2.0.5" + resolved "http://localhost:4873/stack-utils/-/stack-utils-2.0.5.tgz#d25265fca995154659dbbfba3b49254778d2fdd5" + integrity sha512-xrQcmYhOsn/1kX+Vraq+7j4oE2j/6BFscZ0etmYg81xuM8Gq0022Pxb8+IqgOFUIaxHs0KaSb7T1+OegiNrNFA== + dependencies: + escape-string-regexp "^2.0.0" + +stackframe@^1.3.4: + version "1.3.4" + resolved "http://localhost:4873/stackframe/-/stackframe-1.3.4.tgz#b881a004c8c149a5e8efef37d51b16e412943310" + integrity sha512-oeVtt7eWQS+Na6F//S4kJ2K2VbRlS9D43mAlMyVpVWovy9o+jfgH8O9agzANzaiLjclA0oYzUXEM4PurhSUChw== + +statuses@2.0.1: + version "2.0.1" + resolved "http://localhost:4873/statuses/-/statuses-2.0.1.tgz#55cb000ccf1d48728bd23c685a063998cf1a1b63" + integrity sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ== + +"statuses@>= 1.4.0 < 2": + version "1.5.0" + resolved "http://localhost:4873/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c" + integrity sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA== + +string-length@^4.0.1: + version "4.0.2" + resolved "http://localhost:4873/string-length/-/string-length-4.0.2.tgz#a8a8dc7bd5c1a82b9b3c8b87e125f66871b6e57a" + integrity sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ== + dependencies: + char-regex "^1.0.2" + strip-ansi "^6.0.0" + +string-length@^5.0.1: + version "5.0.1" + resolved "http://localhost:4873/string-length/-/string-length-5.0.1.tgz#3d647f497b6e8e8d41e422f7e0b23bc536c8381e" + integrity sha512-9Ep08KAMUn0OadnVaBuRdE2l615CQ508kr0XMadjClfYpdCyvrbFp6Taebo8yyxokQ4viUd/xPPUA4FGgUa0ow== + dependencies: + char-regex "^2.0.0" + strip-ansi "^7.0.1" + +string-natural-compare@^3.0.1: + version "3.0.1" + resolved "http://localhost:4873/string-natural-compare/-/string-natural-compare-3.0.1.tgz#7a42d58474454963759e8e8b7ae63d71c1e7fdf4" + integrity sha512-n3sPwynL1nwKi3WJ6AIsClwBMa0zTi54fn2oLU6ndfTSIO05xaznjSf15PcBZU6FNWbmN5Q6cxT4V5hGvB4taw== + +string-width@^4.1.0, string-width@^4.2.0: + version "4.2.3" + resolved "http://localhost:4873/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.1" + +string.prototype.matchall@^4.0.6, string.prototype.matchall@^4.0.7: + version "4.0.7" + resolved "http://localhost:4873/string.prototype.matchall/-/string.prototype.matchall-4.0.7.tgz#8e6ecb0d8a1fb1fda470d81acecb2dba057a481d" + integrity sha512-f48okCX7JiwVi1NXCVWcFnZgADDC/n2vePlQ/KUCNqCikLLilQvwjMO8+BHVKvgzH0JB0J9LEPgxOGT02RoETg== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.1" + get-intrinsic "^1.1.1" + has-symbols "^1.0.3" + internal-slot "^1.0.3" + regexp.prototype.flags "^1.4.1" + side-channel "^1.0.4" + +string.prototype.trimend@^1.0.5: + version "1.0.5" + resolved "http://localhost:4873/string.prototype.trimend/-/string.prototype.trimend-1.0.5.tgz#914a65baaab25fbdd4ee291ca7dde57e869cb8d0" + integrity sha512-I7RGvmjV4pJ7O3kdf+LXFpVfdNOxtCW/2C8f6jNiW4+PQchwxkCDzlk1/7p+Wl4bqFIZeF47qAHXLuHHWKAxog== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.19.5" + +string.prototype.trimstart@^1.0.5: + version "1.0.5" + resolved "http://localhost:4873/string.prototype.trimstart/-/string.prototype.trimstart-1.0.5.tgz#5466d93ba58cfa2134839f81d7f42437e8c01fef" + integrity sha512-THx16TJCGlsN0o6dl2o6ncWUsdgnLRSA23rRE5pyGBw/mLr3Ej/R2LaqCtgP8VNMGZsvMWnf9ooZPyY2bHvUFg== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.19.5" + +string_decoder@^1.1.1: + version "1.3.0" + resolved "http://localhost:4873/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" + integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== + dependencies: + safe-buffer "~5.2.0" + +string_decoder@~1.1.1: + version "1.1.1" + resolved "http://localhost:4873/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" + integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== + dependencies: + safe-buffer "~5.1.0" + +stringify-object@^3.3.0: + version "3.3.0" + resolved "http://localhost:4873/stringify-object/-/stringify-object-3.3.0.tgz#703065aefca19300d3ce88af4f5b3956d7556629" + integrity sha512-rHqiFh1elqCQ9WPLIC8I0Q/g/wj5J1eMkyoiD6eoQApWHP0FtlK7rqnhmabL5VUY9JQCcqwwvlOaSuutekgyrw== + dependencies: + get-own-enumerable-property-symbols "^3.0.0" + is-obj "^1.0.1" + is-regexp "^1.0.0" + +strip-ansi@^6.0.0, strip-ansi@^6.0.1: + version "6.0.1" + resolved "http://localhost:4873/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" + integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== + dependencies: + ansi-regex "^5.0.1" + +strip-ansi@^7.0.1: + version "7.0.1" + resolved "http://localhost:4873/strip-ansi/-/strip-ansi-7.0.1.tgz#61740a08ce36b61e50e65653f07060d000975fb2" + integrity sha512-cXNxvT8dFNRVfhVME3JAe98mkXDYN2O1l7jmcwMnOslDeESg1rF/OZMtK0nRAhiari1unG5cD4jG3rapUAkLbw== + dependencies: + ansi-regex "^6.0.1" + +strip-bom@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" + integrity sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA== + +strip-bom@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/strip-bom/-/strip-bom-4.0.0.tgz#9c3505c1db45bcedca3d9cf7a16f5c5aa3901878" + integrity sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w== + +strip-comments@^2.0.1: + version "2.0.1" + resolved "http://localhost:4873/strip-comments/-/strip-comments-2.0.1.tgz#4ad11c3fbcac177a67a40ac224ca339ca1c1ba9b" + integrity sha512-ZprKx+bBLXv067WTCALv8SSz5l2+XhpYCsVtSqlMnkAXMWDq+/ekVbl1ghqP9rUHTzv6sm/DwCOiYutU/yp1fw== + +strip-final-newline@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/strip-final-newline/-/strip-final-newline-2.0.0.tgz#89b852fb2fcbe936f6f4b3187afb0a12c1ab58ad" + integrity sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA== + +strip-indent@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/strip-indent/-/strip-indent-3.0.0.tgz#c32e1cee940b6b3432c771bc2c54bcce73cd3001" + integrity sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ== + dependencies: + min-indent "^1.0.0" + +strip-json-comments@^3.1.0, strip-json-comments@^3.1.1: + version "3.1.1" + resolved "http://localhost:4873/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" + integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== + +style-loader@^3.3.1: + version "3.3.1" + resolved "http://localhost:4873/style-loader/-/style-loader-3.3.1.tgz#057dfa6b3d4d7c7064462830f9113ed417d38575" + integrity sha512-GPcQ+LDJbrcxHORTRes6Jy2sfvK2kS6hpSfI/fXhPt+spVzxF6LJ1dHLN9zIGmVaaP044YKaIatFaufENRiDoQ== + +stylehacks@^5.1.0: + version "5.1.0" + resolved "http://localhost:4873/stylehacks/-/stylehacks-5.1.0.tgz#a40066490ca0caca04e96c6b02153ddc39913520" + integrity sha512-SzLmvHQTrIWfSgljkQCw2++C9+Ne91d/6Sp92I8c5uHTcy/PgeHamwITIbBW9wnFTY/3ZfSXR9HIL6Ikqmcu6Q== + dependencies: + browserslist "^4.16.6" + postcss-selector-parser "^6.0.4" + +supports-color@^5.3.0: + version "5.5.0" + resolved "http://localhost:4873/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" + integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== + dependencies: + has-flag "^3.0.0" + +supports-color@^7.0.0, supports-color@^7.1.0: + version "7.2.0" + resolved "http://localhost:4873/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" + integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== + dependencies: + has-flag "^4.0.0" + +supports-color@^8.0.0: + version "8.1.1" + resolved "http://localhost:4873/supports-color/-/supports-color-8.1.1.tgz#cd6fc17e28500cff56c1b86c0a7fd4a54a73005c" + integrity sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q== + dependencies: + has-flag "^4.0.0" + +supports-hyperlinks@^2.0.0: + version "2.3.0" + resolved "http://localhost:4873/supports-hyperlinks/-/supports-hyperlinks-2.3.0.tgz#3943544347c1ff90b15effb03fc14ae45ec10624" + integrity sha512-RpsAZlpWcDwOPQA22aCH4J0t7L8JmAvsCxfOSEwm7cQs3LshN36QaTkwd70DnBOXDWGssw2eUoc8CaRWT0XunA== + dependencies: + has-flag "^4.0.0" + supports-color "^7.0.0" + +supports-preserve-symlinks-flag@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" + integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== + +svg-parser@^2.0.2: + version "2.0.4" + resolved "http://localhost:4873/svg-parser/-/svg-parser-2.0.4.tgz#fdc2e29e13951736140b76cb122c8ee6630eb6b5" + integrity sha512-e4hG1hRwoOdRb37cIMSgzNsxyzKfayW6VOflrwvR+/bzrkyxY/31WkbgnQpgtrNp1SdpJvpUAGTa/ZoiPNDuRQ== + +svgo@^1.2.2: + version "1.3.2" + resolved "http://localhost:4873/svgo/-/svgo-1.3.2.tgz#b6dc511c063346c9e415b81e43401145b96d4167" + integrity sha512-yhy/sQYxR5BkC98CY7o31VGsg014AKLEPxdfhora76l36hD9Rdy5NZA/Ocn6yayNPgSamYdtX2rFJdcv07AYVw== + dependencies: + chalk "^2.4.1" + coa "^2.0.2" + css-select "^2.0.0" + css-select-base-adapter "^0.1.1" + css-tree "1.0.0-alpha.37" + csso "^4.0.2" + js-yaml "^3.13.1" + mkdirp "~0.5.1" + object.values "^1.1.0" + sax "~1.2.4" + stable "^0.1.8" + unquote "~1.1.1" + util.promisify "~1.0.0" + +svgo@^2.7.0: + version "2.8.0" + resolved "http://localhost:4873/svgo/-/svgo-2.8.0.tgz#4ff80cce6710dc2795f0c7c74101e6764cfccd24" + integrity sha512-+N/Q9kV1+F+UeWYoSiULYo4xYSDQlTgb+ayMobAXPwMnLvop7oxKMo9OzIrX5x3eS4L4f2UHhc9axXwY8DpChg== + dependencies: + "@trysound/sax" "0.2.0" + commander "^7.2.0" + css-select "^4.1.3" + css-tree "^1.1.3" + csso "^4.2.0" + picocolors "^1.0.0" + stable "^0.1.8" + +symbol-tree@^3.2.4: + version "3.2.4" + resolved "http://localhost:4873/symbol-tree/-/symbol-tree-3.2.4.tgz#430637d248ba77e078883951fb9aa0eed7c63fa2" + integrity sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw== + +tailwindcss@^3.0.2: + version "3.1.8" + resolved "http://localhost:4873/tailwindcss/-/tailwindcss-3.1.8.tgz#4f8520550d67a835d32f2f4021580f9fddb7b741" + integrity sha512-YSneUCZSFDYMwk+TGq8qYFdCA3yfBRdBlS7txSq0LUmzyeqRe3a8fBQzbz9M3WS/iFT4BNf/nmw9mEzrnSaC0g== + dependencies: + arg "^5.0.2" + chokidar "^3.5.3" + color-name "^1.1.4" + detective "^5.2.1" + didyoumean "^1.2.2" + dlv "^1.1.3" + fast-glob "^3.2.11" + glob-parent "^6.0.2" + is-glob "^4.0.3" + lilconfig "^2.0.6" + normalize-path "^3.0.0" + object-hash "^3.0.0" + picocolors "^1.0.0" + postcss "^8.4.14" + postcss-import "^14.1.0" + postcss-js "^4.0.0" + postcss-load-config "^3.1.4" + postcss-nested "5.0.6" + postcss-selector-parser "^6.0.10" + postcss-value-parser "^4.2.0" + quick-lru "^5.1.1" + resolve "^1.22.1" + +tapable@^1.0.0: + version "1.1.3" + resolved "http://localhost:4873/tapable/-/tapable-1.1.3.tgz#a1fccc06b58db61fd7a45da2da44f5f3a3e67ba2" + integrity sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA== + +tapable@^2.0.0, tapable@^2.1.1, tapable@^2.2.0: + version "2.2.1" + resolved "http://localhost:4873/tapable/-/tapable-2.2.1.tgz#1967a73ef4060a82f12ab96af86d52fdb76eeca0" + integrity sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ== + +temp-dir@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/temp-dir/-/temp-dir-2.0.0.tgz#bde92b05bdfeb1516e804c9c00ad45177f31321e" + integrity sha512-aoBAniQmmwtcKp/7BzsH8Cxzv8OL736p7v1ihGb5e9DJ9kTwGWHrQrVB5+lfVDzfGrdRzXch+ig7LHaY1JTOrg== + +tempy@^0.6.0: + version "0.6.0" + resolved "http://localhost:4873/tempy/-/tempy-0.6.0.tgz#65e2c35abc06f1124a97f387b08303442bde59f3" + integrity sha512-G13vtMYPT/J8A4X2SjdtBTphZlrp1gKv6hZiOjw14RCWg6GbHuQBGtjlx75xLbYV/wEc0D7G5K4rxKP/cXk8Bw== + dependencies: + is-stream "^2.0.0" + temp-dir "^2.0.0" + type-fest "^0.16.0" + unique-string "^2.0.0" + +terminal-link@^2.0.0: + version "2.1.1" + resolved "http://localhost:4873/terminal-link/-/terminal-link-2.1.1.tgz#14a64a27ab3c0df933ea546fba55f2d078edc994" + integrity sha512-un0FmiRUQNr5PJqy9kP7c40F5BOfpGlYTrxonDChEZB7pzZxRNp/bt+ymiy9/npwXya9KH99nJ/GXFIiUkYGFQ== + dependencies: + ansi-escapes "^4.2.1" + supports-hyperlinks "^2.0.0" + +terser-webpack-plugin@^5.1.3, terser-webpack-plugin@^5.2.5: + version "5.3.6" + resolved "http://localhost:4873/terser-webpack-plugin/-/terser-webpack-plugin-5.3.6.tgz#5590aec31aa3c6f771ce1b1acca60639eab3195c" + integrity sha512-kfLFk+PoLUQIbLmB1+PZDMRSZS99Mp+/MHqDNmMA6tOItzRt+Npe3E+fsMs5mfcM0wCtrrdU387UnV+vnSffXQ== + dependencies: + "@jridgewell/trace-mapping" "^0.3.14" + jest-worker "^27.4.5" + schema-utils "^3.1.1" + serialize-javascript "^6.0.0" + terser "^5.14.1" + +terser@^5.0.0, terser@^5.10.0, terser@^5.14.1: + version "5.15.1" + resolved "http://localhost:4873/terser/-/terser-5.15.1.tgz#8561af6e0fd6d839669c73b92bdd5777d870ed6c" + integrity sha512-K1faMUvpm/FBxjBXud0LWVAGxmvoPbZbfTCYbSgaaYQaIXI3/TdI7a7ZGA73Zrou6Q8Zmz3oeUTsp/dj+ag2Xw== + dependencies: + "@jridgewell/source-map" "^0.3.2" + acorn "^8.5.0" + commander "^2.20.0" + source-map-support "~0.5.20" + +test-exclude@^6.0.0: + version "6.0.0" + resolved "http://localhost:4873/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e" + integrity sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w== + dependencies: + "@istanbuljs/schema" "^0.1.2" + glob "^7.1.4" + minimatch "^3.0.4" + +text-table@^0.2.0: + version "0.2.0" + resolved "http://localhost:4873/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" + integrity sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw== + +throat@^6.0.1: + version "6.0.1" + resolved "http://localhost:4873/throat/-/throat-6.0.1.tgz#d514fedad95740c12c2d7fc70ea863eb51ade375" + integrity sha512-8hmiGIJMDlwjg7dlJ4yKGLK8EsYqKgPWbG3b4wjJddKNwc7N7Dpn08Df4szr/sZdMVeOstrdYSsqzX6BYbcB+w== + +thunky@^1.0.2: + version "1.1.0" + resolved "http://localhost:4873/thunky/-/thunky-1.1.0.tgz#5abaf714a9405db0504732bbccd2cedd9ef9537d" + integrity sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA== + +tmpl@1.0.5: + version "1.0.5" + resolved "http://localhost:4873/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc" + integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw== + +to-fast-properties@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" + integrity sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog== + +to-regex-range@^5.0.1: + version "5.0.1" + resolved "http://localhost:4873/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" + integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== + dependencies: + is-number "^7.0.0" + +toidentifier@1.0.1: + version "1.0.1" + resolved "http://localhost:4873/toidentifier/-/toidentifier-1.0.1.tgz#3be34321a88a820ed1bd80dfaa33e479fbb8dd35" + integrity sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA== + +tough-cookie@^4.0.0: + version "4.1.2" + resolved "http://localhost:4873/tough-cookie/-/tough-cookie-4.1.2.tgz#e53e84b85f24e0b65dd526f46628db6c85f6b874" + integrity sha512-G9fqXWoYFZgTc2z8Q5zaHy/vJMjm+WV0AkAeHxVCQiEB1b+dGvWzFW6QV07cY5jQ5gRkeid2qIkzkxUnmoQZUQ== + dependencies: + psl "^1.1.33" + punycode "^2.1.1" + universalify "^0.2.0" + url-parse "^1.5.3" + +tr46@^1.0.1: + version "1.0.1" + resolved "http://localhost:4873/tr46/-/tr46-1.0.1.tgz#a8b13fd6bfd2489519674ccde55ba3693b706d09" + integrity sha512-dTpowEjclQ7Kgx5SdBkqRzVhERQXov8/l9Ft9dVM9fmg0W0KQSVaXX9T4i6twCPNtYiZM53lpSSUAwJbFPOHxA== + dependencies: + punycode "^2.1.0" + +tr46@^2.1.0: + version "2.1.0" + resolved "http://localhost:4873/tr46/-/tr46-2.1.0.tgz#fa87aa81ca5d5941da8cbf1f9b749dc969a4e240" + integrity sha512-15Ih7phfcdP5YxqiB+iDtLoaTz4Nd35+IiAv0kQ5FNKHzXgdWqPoTIqEDDJmXceQt4JZk6lVPT8lnDlPpGDppw== + dependencies: + punycode "^2.1.1" + +tryer@^1.0.1: + version "1.0.1" + resolved "http://localhost:4873/tryer/-/tryer-1.0.1.tgz#f2c85406800b9b0f74c9f7465b81eaad241252f8" + integrity sha512-c3zayb8/kWWpycWYg87P71E1S1ZL6b6IJxfb5fvsUgsf0S2MVGaDhDXXjDMpdCpfWXqptc+4mXwmiy1ypXqRAA== + +tsconfig-paths@^3.14.1: + version "3.14.1" + resolved "http://localhost:4873/tsconfig-paths/-/tsconfig-paths-3.14.1.tgz#ba0734599e8ea36c862798e920bcf163277b137a" + integrity sha512-fxDhWnFSLt3VuTwtvJt5fpwxBHg5AdKWMsgcPOOIilyjymcYVZoCQF8fvFRezCNfblEXmi+PcM1eYHeOAgXCOQ== + dependencies: + "@types/json5" "^0.0.29" + json5 "^1.0.1" + minimist "^1.2.6" + strip-bom "^3.0.0" + +tslib@^1.8.1: + version "1.14.1" + resolved "http://localhost:4873/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" + integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== + +tslib@^2.0.3: + version "2.4.0" + resolved "http://localhost:4873/tslib/-/tslib-2.4.0.tgz#7cecaa7f073ce680a05847aa77be941098f36dc3" + integrity sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ== + +tsutils@^3.21.0: + version "3.21.0" + resolved "http://localhost:4873/tsutils/-/tsutils-3.21.0.tgz#b48717d394cea6c1e096983eed58e9d61715b623" + integrity sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA== + dependencies: + tslib "^1.8.1" + +type-check@^0.4.0, type-check@~0.4.0: + version "0.4.0" + resolved "http://localhost:4873/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1" + integrity sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew== + dependencies: + prelude-ls "^1.2.1" + +type-check@~0.3.2: + version "0.3.2" + resolved "http://localhost:4873/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72" + integrity sha512-ZCmOJdvOWDBYJlzAoFkC+Q0+bUyEOS1ltgp1MGU03fqHG+dbi9tBFU2Rd9QKiDZFAYrhPh2JUf7rZRIuHRKtOg== + dependencies: + prelude-ls "~1.1.2" + +type-detect@4.0.8: + version "4.0.8" + resolved "http://localhost:4873/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c" + integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g== + +type-fest@^0.16.0: + version "0.16.0" + resolved "http://localhost:4873/type-fest/-/type-fest-0.16.0.tgz#3240b891a78b0deae910dbeb86553e552a148860" + integrity sha512-eaBzG6MxNzEn9kiwvtre90cXaNLkmadMWa1zQMs3XORCXNbsH/OewwbxC5ia9dCxIxnTAsSxXJaa/p5y8DlvJg== + +type-fest@^0.20.2: + version "0.20.2" + resolved "http://localhost:4873/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4" + integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ== + +type-fest@^0.21.3: + version "0.21.3" + resolved "http://localhost:4873/type-fest/-/type-fest-0.21.3.tgz#d260a24b0198436e133fa26a524a6d65fa3b2e37" + integrity sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w== + +type-is@~1.6.18: + version "1.6.18" + resolved "http://localhost:4873/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131" + integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g== + dependencies: + media-typer "0.3.0" + mime-types "~2.1.24" + +typedarray-to-buffer@^3.1.5: + version "3.1.5" + resolved "http://localhost:4873/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz#a97ee7a9ff42691b9f783ff1bc5112fe3fca9080" + integrity sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q== + dependencies: + is-typedarray "^1.0.0" + +unbox-primitive@^1.0.2: + version "1.0.2" + resolved "http://localhost:4873/unbox-primitive/-/unbox-primitive-1.0.2.tgz#29032021057d5e6cdbd08c5129c226dff8ed6f9e" + integrity sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw== + dependencies: + call-bind "^1.0.2" + has-bigints "^1.0.2" + has-symbols "^1.0.3" + which-boxed-primitive "^1.0.2" + +unicode-canonical-property-names-ecmascript@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.0.tgz#301acdc525631670d39f6146e0e77ff6bbdebddc" + integrity sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ== + +unicode-match-property-ecmascript@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-2.0.0.tgz#54fd16e0ecb167cf04cf1f756bdcc92eba7976c3" + integrity sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q== + dependencies: + unicode-canonical-property-names-ecmascript "^2.0.0" + unicode-property-aliases-ecmascript "^2.0.0" + +unicode-match-property-value-ecmascript@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.0.0.tgz#1a01aa57247c14c568b89775a54938788189a714" + integrity sha512-7Yhkc0Ye+t4PNYzOGKedDhXbYIBe1XEQYQxOPyhcXNMJ0WCABqqj6ckydd6pWRZTHV4GuCPKdBAUiMc60tsKVw== + +unicode-property-aliases-ecmascript@^2.0.0: + version "2.1.0" + resolved "http://localhost:4873/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.1.0.tgz#43d41e3be698bd493ef911077c9b131f827e8ccd" + integrity sha512-6t3foTQI9qne+OZoVQB/8x8rk2k1eVy1gRXhV3oFQ5T6R1dqQ1xtin3XqSlx3+ATBkliTaR/hHyJBm+LVPNM8w== + +unique-string@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/unique-string/-/unique-string-2.0.0.tgz#39c6451f81afb2749de2b233e3f7c5e8843bd89d" + integrity sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg== + dependencies: + crypto-random-string "^2.0.0" + +universalify@^0.2.0: + version "0.2.0" + resolved "http://localhost:4873/universalify/-/universalify-0.2.0.tgz#6451760566fa857534745ab1dde952d1b1761be0" + integrity sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg== + +universalify@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/universalify/-/universalify-2.0.0.tgz#75a4984efedc4b08975c5aeb73f530d02df25717" + integrity sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ== + +unpipe@1.0.0, unpipe@~1.0.0: + version "1.0.0" + resolved "http://localhost:4873/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" + integrity sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ== + +unquote@~1.1.1: + version "1.1.1" + resolved "http://localhost:4873/unquote/-/unquote-1.1.1.tgz#8fded7324ec6e88a0ff8b905e7c098cdc086d544" + integrity sha512-vRCqFv6UhXpWxZPyGDh/F3ZpNv8/qo7w6iufLpQg9aKnQ71qM4B5KiI7Mia9COcjEhrO9LueHpMYjYzsWH3OIg== + +upath@^1.2.0: + version "1.2.0" + resolved "http://localhost:4873/upath/-/upath-1.2.0.tgz#8f66dbcd55a883acdae4408af8b035a5044c1894" + integrity sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg== + +update-browserslist-db@^1.0.9: + version "1.0.10" + resolved "http://localhost:4873/update-browserslist-db/-/update-browserslist-db-1.0.10.tgz#0f54b876545726f17d00cd9a2561e6dade943ff3" + integrity sha512-OztqDenkfFkbSG+tRxBeAnCVPckDBcvibKd35yDONx6OU8N7sqgwc7rCbkJ/WcYtVRZ4ba68d6byhC21GFh7sQ== + dependencies: + escalade "^3.1.1" + picocolors "^1.0.0" + +uri-js@^4.2.2: + version "4.4.1" + resolved "http://localhost:4873/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" + integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg== + dependencies: + punycode "^2.1.0" + +url-parse@^1.5.3: + version "1.5.10" + resolved "http://localhost:4873/url-parse/-/url-parse-1.5.10.tgz#9d3c2f736c1d75dd3bd2be507dcc111f1e2ea9c1" + integrity sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ== + dependencies: + querystringify "^2.1.1" + requires-port "^1.0.0" + +util-deprecate@^1.0.1, util-deprecate@^1.0.2, util-deprecate@~1.0.1: + version "1.0.2" + resolved "http://localhost:4873/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" + integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw== + +util.promisify@~1.0.0: + version "1.0.1" + resolved "http://localhost:4873/util.promisify/-/util.promisify-1.0.1.tgz#6baf7774b80eeb0f7520d8b81d07982a59abbaee" + integrity sha512-g9JpC/3He3bm38zsLupWryXHoEcS22YHthuPQSJdMy6KNrzIRzWqcsHzD/WUnqe45whVou4VIsPew37DoXWNrA== + dependencies: + define-properties "^1.1.3" + es-abstract "^1.17.2" + has-symbols "^1.0.1" + object.getownpropertydescriptors "^2.1.0" + +utila@~0.4: + version "0.4.0" + resolved "http://localhost:4873/utila/-/utila-0.4.0.tgz#8a16a05d445657a3aea5eecc5b12a4fa5379772c" + integrity sha512-Z0DbgELS9/L/75wZbro8xAnT50pBVFQZ+hUEueGDU5FN51YSCYM+jdxsfCiHjwNP/4LCDD0i/graKpeBnOXKRA== + +utils-merge@1.0.1: + version "1.0.1" + resolved "http://localhost:4873/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" + integrity sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA== + +uuid@^8.3, uuid@^8.3.2: + version "8.3.2" + resolved "http://localhost:4873/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2" + integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg== + +v8-to-istanbul@^8.1.0: + version "8.1.1" + resolved "http://localhost:4873/v8-to-istanbul/-/v8-to-istanbul-8.1.1.tgz#77b752fd3975e31bbcef938f85e9bd1c7a8d60ed" + integrity sha512-FGtKtv3xIpR6BYhvgH8MI/y78oT7d8Au3ww4QIxymrCtZEh5b8gCw2siywE+puhEmuWKDtmfrvF5UlB298ut3w== + dependencies: + "@types/istanbul-lib-coverage" "^2.0.1" + convert-source-map "^1.6.0" + source-map "^0.7.3" + +vary@~1.1.2: + version "1.1.2" + resolved "http://localhost:4873/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc" + integrity sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg== + +w3c-hr-time@^1.0.2: + version "1.0.2" + resolved "http://localhost:4873/w3c-hr-time/-/w3c-hr-time-1.0.2.tgz#0a89cdf5cc15822df9c360543676963e0cc308cd" + integrity sha512-z8P5DvDNjKDoFIHK7q8r8lackT6l+jo/Ye3HOle7l9nICP9lf1Ci25fy9vHd0JOWewkIFzXIEig3TdKT7JQ5fQ== + dependencies: + browser-process-hrtime "^1.0.0" + +w3c-xmlserializer@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/w3c-xmlserializer/-/w3c-xmlserializer-2.0.0.tgz#3e7104a05b75146cc60f564380b7f683acf1020a" + integrity sha512-4tzD0mF8iSiMiNs30BiLO3EpfGLZUT2MSX/G+o7ZywDzliWQ3OPtTZ0PTC3B3ca1UAf4cJMHB+2Bf56EriJuRA== + dependencies: + xml-name-validator "^3.0.0" + +walker@^1.0.7: + version "1.0.8" + resolved "http://localhost:4873/walker/-/walker-1.0.8.tgz#bd498db477afe573dc04185f011d3ab8a8d7653f" + integrity sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ== + dependencies: + makeerror "1.0.12" + +watchpack@^2.4.0: + version "2.4.0" + resolved "http://localhost:4873/watchpack/-/watchpack-2.4.0.tgz#fa33032374962c78113f93c7f2fb4c54c9862a5d" + integrity sha512-Lcvm7MGST/4fup+ifyKi2hjyIAwcdI4HRgtvTpIUxBRhB+RFtUh8XtDOxUfctVCnhVi+QQj49i91OyvzkJl6cg== + dependencies: + glob-to-regexp "^0.4.1" + graceful-fs "^4.1.2" + +wbuf@^1.1.0, wbuf@^1.7.3: + version "1.7.3" + resolved "http://localhost:4873/wbuf/-/wbuf-1.7.3.tgz#c1d8d149316d3ea852848895cb6a0bfe887b87df" + integrity sha512-O84QOnr0icsbFGLS0O3bI5FswxzRr8/gHwWkDlQFskhSPryQXvrTMxjxGP4+iWYoauLoBvfDpkrOauZ+0iZpDA== + dependencies: + minimalistic-assert "^1.0.0" + +web-vitals@^2.1.4: + version "2.1.4" + resolved "http://localhost:4873/web-vitals/-/web-vitals-2.1.4.tgz#76563175a475a5e835264d373704f9dde718290c" + integrity sha512-sVWcwhU5mX6crfI5Vd2dC4qchyTqxV8URinzt25XqVh+bHEPGH4C3NPrNionCP7Obx59wrYEbNlw4Z8sjALzZg== + +webidl-conversions@^4.0.2: + version "4.0.2" + resolved "http://localhost:4873/webidl-conversions/-/webidl-conversions-4.0.2.tgz#a855980b1f0b6b359ba1d5d9fb39ae941faa63ad" + integrity sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg== + +webidl-conversions@^5.0.0: + version "5.0.0" + resolved "http://localhost:4873/webidl-conversions/-/webidl-conversions-5.0.0.tgz#ae59c8a00b121543a2acc65c0434f57b0fc11aff" + integrity sha512-VlZwKPCkYKxQgeSbH5EyngOmRp7Ww7I9rQLERETtf5ofd9pGeswWiOtogpEO850jziPRarreGxn5QIiTqpb2wA== + +webidl-conversions@^6.1.0: + version "6.1.0" + resolved "http://localhost:4873/webidl-conversions/-/webidl-conversions-6.1.0.tgz#9111b4d7ea80acd40f5270d666621afa78b69514" + integrity sha512-qBIvFLGiBpLjfwmYAaHPXsn+ho5xZnGvyGvsarywGNc8VyQJUMHJ8OBKGGrPER0okBeMDaan4mNBlgBROxuI8w== + +webpack-dev-middleware@^5.3.1: + version "5.3.3" + resolved "http://localhost:4873/webpack-dev-middleware/-/webpack-dev-middleware-5.3.3.tgz#efae67c2793908e7311f1d9b06f2a08dcc97e51f" + integrity sha512-hj5CYrY0bZLB+eTO+x/j67Pkrquiy7kWepMHmUMoPsmcUaeEnQJqFzHJOyxgWlq746/wUuA64p9ta34Kyb01pA== + dependencies: + colorette "^2.0.10" + memfs "^3.4.3" + mime-types "^2.1.31" + range-parser "^1.2.1" + schema-utils "^4.0.0" + +webpack-dev-server@^4.6.0: + version "4.11.1" + resolved "http://localhost:4873/webpack-dev-server/-/webpack-dev-server-4.11.1.tgz#ae07f0d71ca0438cf88446f09029b92ce81380b5" + integrity sha512-lILVz9tAUy1zGFwieuaQtYiadImb5M3d+H+L1zDYalYoDl0cksAB1UNyuE5MMWJrG6zR1tXkCP2fitl7yoUJiw== + dependencies: + "@types/bonjour" "^3.5.9" + "@types/connect-history-api-fallback" "^1.3.5" + "@types/express" "^4.17.13" + "@types/serve-index" "^1.9.1" + "@types/serve-static" "^1.13.10" + "@types/sockjs" "^0.3.33" + "@types/ws" "^8.5.1" + ansi-html-community "^0.0.8" + bonjour-service "^1.0.11" + chokidar "^3.5.3" + colorette "^2.0.10" + compression "^1.7.4" + connect-history-api-fallback "^2.0.0" + default-gateway "^6.0.3" + express "^4.17.3" + graceful-fs "^4.2.6" + html-entities "^2.3.2" + http-proxy-middleware "^2.0.3" + ipaddr.js "^2.0.1" + open "^8.0.9" + p-retry "^4.5.0" + rimraf "^3.0.2" + schema-utils "^4.0.0" + selfsigned "^2.1.1" + serve-index "^1.9.1" + sockjs "^0.3.24" + spdy "^4.0.2" + webpack-dev-middleware "^5.3.1" + ws "^8.4.2" + +webpack-manifest-plugin@^4.0.2: + version "4.1.1" + resolved "http://localhost:4873/webpack-manifest-plugin/-/webpack-manifest-plugin-4.1.1.tgz#10f8dbf4714ff93a215d5a45bcc416d80506f94f" + integrity sha512-YXUAwxtfKIJIKkhg03MKuiFAD72PlrqCiwdwO4VEXdRO5V0ORCNwaOwAZawPZalCbmH9kBDmXnNeQOw+BIEiow== + dependencies: + tapable "^2.0.0" + webpack-sources "^2.2.0" + +webpack-merge@^5.8.0: + version "5.8.0" + resolved "http://localhost:4873/webpack-merge/-/webpack-merge-5.8.0.tgz#2b39dbf22af87776ad744c390223731d30a68f61" + integrity sha512-/SaI7xY0831XwP6kzuwhKWVKDP9t1QY1h65lAFLbZqMPIuYcD9QAW4u9STIbU9kaJbPBB/geU/gLr1wDjOhQ+Q== + dependencies: + clone-deep "^4.0.1" + wildcard "^2.0.0" + +webpack-sources@^1.4.3: + version "1.4.3" + resolved "http://localhost:4873/webpack-sources/-/webpack-sources-1.4.3.tgz#eedd8ec0b928fbf1cbfe994e22d2d890f330a933" + integrity sha512-lgTS3Xhv1lCOKo7SA5TjKXMjpSM4sBjNV5+q2bqesbSPs5FjGmU6jjtBSkX9b4qW87vDIsCIlUPOEhbZrMdjeQ== + dependencies: + source-list-map "^2.0.0" + source-map "~0.6.1" + +webpack-sources@^2.2.0: + version "2.3.1" + resolved "http://localhost:4873/webpack-sources/-/webpack-sources-2.3.1.tgz#570de0af163949fe272233c2cefe1b56f74511fd" + integrity sha512-y9EI9AO42JjEcrTJFOYmVywVZdKVUfOvDUPsJea5GIr1JOEGFVqwlY2K098fFoIjOkDzHn2AjRvM8dsBZu+gCA== + dependencies: + source-list-map "^2.0.1" + source-map "^0.6.1" + +webpack-sources@^3.2.3: + version "3.2.3" + resolved "http://localhost:4873/webpack-sources/-/webpack-sources-3.2.3.tgz#2d4daab8451fd4b240cc27055ff6a0c2ccea0cde" + integrity sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w== + +webpack@^5.64.4: + version "5.74.0" + resolved "http://localhost:4873/webpack/-/webpack-5.74.0.tgz#02a5dac19a17e0bb47093f2be67c695102a55980" + integrity sha512-A2InDwnhhGN4LYctJj6M1JEaGL7Luj6LOmyBHjcI8529cm5p6VXiTIW2sn6ffvEAKmveLzvu4jrihwXtPojlAA== + dependencies: + "@types/eslint-scope" "^3.7.3" + "@types/estree" "^0.0.51" + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/wasm-edit" "1.11.1" + "@webassemblyjs/wasm-parser" "1.11.1" + acorn "^8.7.1" + acorn-import-assertions "^1.7.6" + browserslist "^4.14.5" + chrome-trace-event "^1.0.2" + enhanced-resolve "^5.10.0" + es-module-lexer "^0.9.0" + eslint-scope "5.1.1" + events "^3.2.0" + glob-to-regexp "^0.4.1" + graceful-fs "^4.2.9" + json-parse-even-better-errors "^2.3.1" + loader-runner "^4.2.0" + mime-types "^2.1.27" + neo-async "^2.6.2" + schema-utils "^3.1.0" + tapable "^2.1.1" + terser-webpack-plugin "^5.1.3" + watchpack "^2.4.0" + webpack-sources "^3.2.3" + +websocket-driver@>=0.5.1, websocket-driver@^0.7.4: + version "0.7.4" + resolved "http://localhost:4873/websocket-driver/-/websocket-driver-0.7.4.tgz#89ad5295bbf64b480abcba31e4953aca706f5760" + integrity sha512-b17KeDIQVjvb0ssuSDF2cYXSg2iztliJ4B9WdsuB6J952qCPKmnVq4DyW5motImXHDC1cBT/1UezrJVsKw5zjg== + dependencies: + http-parser-js ">=0.5.1" + safe-buffer ">=5.1.0" + websocket-extensions ">=0.1.1" + +websocket-extensions@>=0.1.1: + version "0.1.4" + resolved "http://localhost:4873/websocket-extensions/-/websocket-extensions-0.1.4.tgz#7f8473bc839dfd87608adb95d7eb075211578a42" + integrity sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg== + +whatwg-encoding@^1.0.5: + version "1.0.5" + resolved "http://localhost:4873/whatwg-encoding/-/whatwg-encoding-1.0.5.tgz#5abacf777c32166a51d085d6b4f3e7d27113ddb0" + integrity sha512-b5lim54JOPN9HtzvK9HFXvBma/rnfFeqsic0hSpjtDbVxR3dJKLc+KB4V6GgiGOvl7CY/KNh8rxSo9DKQrnUEw== + dependencies: + iconv-lite "0.4.24" + +whatwg-fetch@^3.6.2: + version "3.6.2" + resolved "http://localhost:4873/whatwg-fetch/-/whatwg-fetch-3.6.2.tgz#dced24f37f2624ed0281725d51d0e2e3fe677f8c" + integrity sha512-bJlen0FcuU/0EMLrdbJ7zOnW6ITZLrZMIarMUVmdKtsGvZna8vxKYaexICWPfZ8qwf9fzNq+UEIZrnSaApt6RA== + +whatwg-mimetype@^2.3.0: + version "2.3.0" + resolved "http://localhost:4873/whatwg-mimetype/-/whatwg-mimetype-2.3.0.tgz#3d4b1e0312d2079879f826aff18dbeeca5960fbf" + integrity sha512-M4yMwr6mAnQz76TbJm914+gPpB/nCwvZbJU28cUD6dR004SAxDLOOSUaB1JDRqLtaOV/vi0IC5lEAGFgrjGv/g== + +whatwg-url@^7.0.0: + version "7.1.0" + resolved "http://localhost:4873/whatwg-url/-/whatwg-url-7.1.0.tgz#c2c492f1eca612988efd3d2266be1b9fc6170d06" + integrity sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg== + dependencies: + lodash.sortby "^4.7.0" + tr46 "^1.0.1" + webidl-conversions "^4.0.2" + +whatwg-url@^8.0.0, whatwg-url@^8.5.0: + version "8.7.0" + resolved "http://localhost:4873/whatwg-url/-/whatwg-url-8.7.0.tgz#656a78e510ff8f3937bc0bcbe9f5c0ac35941b77" + integrity sha512-gAojqb/m9Q8a5IV96E3fHJM70AzCkgt4uXYX2O7EmuyOnLrViCQlsEBmF9UQIu3/aeAIp2U17rtbpZWNntQqdg== + dependencies: + lodash "^4.7.0" + tr46 "^2.1.0" + webidl-conversions "^6.1.0" + +which-boxed-primitive@^1.0.2: + version "1.0.2" + resolved "http://localhost:4873/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz#13757bc89b209b049fe5d86430e21cf40a89a8e6" + integrity sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg== + dependencies: + is-bigint "^1.0.1" + is-boolean-object "^1.1.0" + is-number-object "^1.0.4" + is-string "^1.0.5" + is-symbol "^1.0.3" + +which@^1.3.1: + version "1.3.1" + resolved "http://localhost:4873/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" + integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== + dependencies: + isexe "^2.0.0" + +which@^2.0.1: + version "2.0.2" + resolved "http://localhost:4873/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" + integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== + dependencies: + isexe "^2.0.0" + +wildcard@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/wildcard/-/wildcard-2.0.0.tgz#a77d20e5200c6faaac979e4b3aadc7b3dd7f8fec" + integrity sha512-JcKqAHLPxcdb9KM49dufGXn2x3ssnfjbcaQdLlfZsL9rH9wgDQjUtDxbo8NE0F6SFvydeu1VhZe7hZuHsB2/pw== + +word-wrap@^1.2.3, word-wrap@~1.2.3: + version "1.2.3" + resolved "http://localhost:4873/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c" + integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ== + +workbox-background-sync@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-background-sync/-/workbox-background-sync-6.5.4.tgz#3141afba3cc8aa2ae14c24d0f6811374ba8ff6a9" + integrity sha512-0r4INQZMyPky/lj4Ou98qxcThrETucOde+7mRGJl13MPJugQNKeZQOdIJe/1AchOP23cTqHcN/YVpD6r8E6I8g== + dependencies: + idb "^7.0.1" + workbox-core "6.5.4" + +workbox-broadcast-update@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-broadcast-update/-/workbox-broadcast-update-6.5.4.tgz#8441cff5417cd41f384ba7633ca960a7ffe40f66" + integrity sha512-I/lBERoH1u3zyBosnpPEtcAVe5lwykx9Yg1k6f8/BGEPGaMMgZrwVrqL1uA9QZ1NGGFoyE6t9i7lBjOlDhFEEw== + dependencies: + workbox-core "6.5.4" + +workbox-build@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-build/-/workbox-build-6.5.4.tgz#7d06d31eb28a878817e1c991c05c5b93409f0389" + integrity sha512-kgRevLXEYvUW9WS4XoziYqZ8Q9j/2ziJYEtTrjdz5/L/cTUa2XfyMP2i7c3p34lgqJ03+mTiz13SdFef2POwbA== + dependencies: + "@apideck/better-ajv-errors" "^0.3.1" + "@babel/core" "^7.11.1" + "@babel/preset-env" "^7.11.0" + "@babel/runtime" "^7.11.2" + "@rollup/plugin-babel" "^5.2.0" + "@rollup/plugin-node-resolve" "^11.2.1" + "@rollup/plugin-replace" "^2.4.1" + "@surma/rollup-plugin-off-main-thread" "^2.2.3" + ajv "^8.6.0" + common-tags "^1.8.0" + fast-json-stable-stringify "^2.1.0" + fs-extra "^9.0.1" + glob "^7.1.6" + lodash "^4.17.20" + pretty-bytes "^5.3.0" + rollup "^2.43.1" + rollup-plugin-terser "^7.0.0" + source-map "^0.8.0-beta.0" + stringify-object "^3.3.0" + strip-comments "^2.0.1" + tempy "^0.6.0" + upath "^1.2.0" + workbox-background-sync "6.5.4" + workbox-broadcast-update "6.5.4" + workbox-cacheable-response "6.5.4" + workbox-core "6.5.4" + workbox-expiration "6.5.4" + workbox-google-analytics "6.5.4" + workbox-navigation-preload "6.5.4" + workbox-precaching "6.5.4" + workbox-range-requests "6.5.4" + workbox-recipes "6.5.4" + workbox-routing "6.5.4" + workbox-strategies "6.5.4" + workbox-streams "6.5.4" + workbox-sw "6.5.4" + workbox-window "6.5.4" + +workbox-cacheable-response@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-cacheable-response/-/workbox-cacheable-response-6.5.4.tgz#a5c6ec0c6e2b6f037379198d4ef07d098f7cf137" + integrity sha512-DCR9uD0Fqj8oB2TSWQEm1hbFs/85hXXoayVwFKLVuIuxwJaihBsLsp4y7J9bvZbqtPJ1KlCkmYVGQKrBU4KAug== + dependencies: + workbox-core "6.5.4" + +workbox-core@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-core/-/workbox-core-6.5.4.tgz#df48bf44cd58bb1d1726c49b883fb1dffa24c9ba" + integrity sha512-OXYb+m9wZm8GrORlV2vBbE5EC1FKu71GGp0H4rjmxmF4/HLbMCoTFws87M3dFwgpmg0v00K++PImpNQ6J5NQ6Q== + +workbox-expiration@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-expiration/-/workbox-expiration-6.5.4.tgz#501056f81e87e1d296c76570bb483ce5e29b4539" + integrity sha512-jUP5qPOpH1nXtjGGh1fRBa1wJL2QlIb5mGpct3NzepjGG2uFFBn4iiEBiI9GUmfAFR2ApuRhDydjcRmYXddiEQ== + dependencies: + idb "^7.0.1" + workbox-core "6.5.4" + +workbox-google-analytics@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-google-analytics/-/workbox-google-analytics-6.5.4.tgz#c74327f80dfa4c1954cbba93cd7ea640fe7ece7d" + integrity sha512-8AU1WuaXsD49249Wq0B2zn4a/vvFfHkpcFfqAFHNHwln3jK9QUYmzdkKXGIZl9wyKNP+RRX30vcgcyWMcZ9VAg== + dependencies: + workbox-background-sync "6.5.4" + workbox-core "6.5.4" + workbox-routing "6.5.4" + workbox-strategies "6.5.4" + +workbox-navigation-preload@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-navigation-preload/-/workbox-navigation-preload-6.5.4.tgz#ede56dd5f6fc9e860a7e45b2c1a8f87c1c793212" + integrity sha512-IIwf80eO3cr8h6XSQJF+Hxj26rg2RPFVUmJLUlM0+A2GzB4HFbQyKkrgD5y2d84g2IbJzP4B4j5dPBRzamHrng== + dependencies: + workbox-core "6.5.4" + +workbox-precaching@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-precaching/-/workbox-precaching-6.5.4.tgz#740e3561df92c6726ab5f7471e6aac89582cab72" + integrity sha512-hSMezMsW6btKnxHB4bFy2Qfwey/8SYdGWvVIKFaUm8vJ4E53JAY+U2JwLTRD8wbLWoP6OVUdFlXsTdKu9yoLTg== + dependencies: + workbox-core "6.5.4" + workbox-routing "6.5.4" + workbox-strategies "6.5.4" + +workbox-range-requests@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-range-requests/-/workbox-range-requests-6.5.4.tgz#86b3d482e090433dab38d36ae031b2bb0bd74399" + integrity sha512-Je2qR1NXCFC8xVJ/Lux6saH6IrQGhMpDrPXWZWWS8n/RD+WZfKa6dSZwU+/QksfEadJEr/NfY+aP/CXFFK5JFg== + dependencies: + workbox-core "6.5.4" + +workbox-recipes@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-recipes/-/workbox-recipes-6.5.4.tgz#cca809ee63b98b158b2702dcfb741b5cc3e24acb" + integrity sha512-QZNO8Ez708NNwzLNEXTG4QYSKQ1ochzEtRLGaq+mr2PyoEIC1xFW7MrWxrONUxBFOByksds9Z4//lKAX8tHyUA== + dependencies: + workbox-cacheable-response "6.5.4" + workbox-core "6.5.4" + workbox-expiration "6.5.4" + workbox-precaching "6.5.4" + workbox-routing "6.5.4" + workbox-strategies "6.5.4" + +workbox-routing@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-routing/-/workbox-routing-6.5.4.tgz#6a7fbbd23f4ac801038d9a0298bc907ee26fe3da" + integrity sha512-apQswLsbrrOsBUWtr9Lf80F+P1sHnQdYodRo32SjiByYi36IDyL2r7BH1lJtFX8fwNHDa1QOVY74WKLLS6o5Pg== + dependencies: + workbox-core "6.5.4" + +workbox-strategies@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-strategies/-/workbox-strategies-6.5.4.tgz#4edda035b3c010fc7f6152918370699334cd204d" + integrity sha512-DEtsxhx0LIYWkJBTQolRxG4EI0setTJkqR4m7r4YpBdxtWJH1Mbg01Cj8ZjNOO8etqfA3IZaOPHUxCs8cBsKLw== + dependencies: + workbox-core "6.5.4" + +workbox-streams@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-streams/-/workbox-streams-6.5.4.tgz#1cb3c168a6101df7b5269d0353c19e36668d7d69" + integrity sha512-FXKVh87d2RFXkliAIheBojBELIPnWbQdyDvsH3t74Cwhg0fDheL1T8BqSM86hZvC0ZESLsznSYWw+Va+KVbUzg== + dependencies: + workbox-core "6.5.4" + workbox-routing "6.5.4" + +workbox-sw@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-sw/-/workbox-sw-6.5.4.tgz#d93e9c67924dd153a61367a4656ff4d2ae2ed736" + integrity sha512-vo2RQo7DILVRoH5LjGqw3nphavEjK4Qk+FenXeUsknKn14eCNedHOXWbmnvP4ipKhlE35pvJ4yl4YYf6YsJArA== + +workbox-webpack-plugin@^6.4.1: + version "6.5.4" + resolved "http://localhost:4873/workbox-webpack-plugin/-/workbox-webpack-plugin-6.5.4.tgz#baf2d3f4b8f435f3469887cf4fba2b7fac3d0fd7" + integrity sha512-LmWm/zoaahe0EGmMTrSLUi+BjyR3cdGEfU3fS6PN1zKFYbqAKuQ+Oy/27e4VSXsyIwAw8+QDfk1XHNGtZu9nQg== + dependencies: + fast-json-stable-stringify "^2.1.0" + pretty-bytes "^5.4.1" + upath "^1.2.0" + webpack-sources "^1.4.3" + workbox-build "6.5.4" + +workbox-window@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-window/-/workbox-window-6.5.4.tgz#d991bc0a94dff3c2dbb6b84558cff155ca878e91" + integrity sha512-HnLZJDwYBE+hpG25AQBO8RUWBJRaCsI9ksQJEp3aCOFCaG5kqaToAYXFRAHxzRluM2cQbGzdQF5rjKPWPA1fug== + dependencies: + "@types/trusted-types" "^2.0.2" + workbox-core "6.5.4" + +wrap-ansi@^7.0.0: + version "7.0.0" + resolved "http://localhost:4873/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" + integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== + dependencies: + ansi-styles "^4.0.0" + string-width "^4.1.0" + strip-ansi "^6.0.0" + +wrappy@1: + version "1.0.2" + resolved "http://localhost:4873/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" + integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ== + +write-file-atomic@^3.0.0: + version "3.0.3" + resolved "http://localhost:4873/write-file-atomic/-/write-file-atomic-3.0.3.tgz#56bd5c5a5c70481cd19c571bd39ab965a5de56e8" + integrity sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q== + dependencies: + imurmurhash "^0.1.4" + is-typedarray "^1.0.0" + signal-exit "^3.0.2" + typedarray-to-buffer "^3.1.5" + +ws@^7.4.6: + version "7.5.9" + resolved "http://localhost:4873/ws/-/ws-7.5.9.tgz#54fa7db29f4c7cec68b1ddd3a89de099942bb591" + integrity sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q== + +ws@^8.4.2: + version "8.9.0" + resolved "http://localhost:4873/ws/-/ws-8.9.0.tgz#2a994bb67144be1b53fe2d23c53c028adeb7f45e" + integrity sha512-Ja7nszREasGaYUYCI2k4lCKIRTt+y7XuqVoHR44YpI49TtryyqbqvDMn5eqfW7e6HzTukDRIsXqzVHScqRcafg== + +xml-name-validator@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/xml-name-validator/-/xml-name-validator-3.0.0.tgz#6ae73e06de4d8c6e47f9fb181f78d648ad457c6a" + integrity sha512-A5CUptxDsvxKJEU3yO6DuWBSJz/qizqzJKOMIfUJHETbBw/sFaDxgd6fxm1ewUaM0jZ444Fc5vC5ROYurg/4Pw== + +xmlchars@^2.2.0: + version "2.2.0" + resolved "http://localhost:4873/xmlchars/-/xmlchars-2.2.0.tgz#060fe1bcb7f9c76fe2a17db86a9bc3ab894210cb" + integrity sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw== + +xtend@^4.0.2: + version "4.0.2" + resolved "http://localhost:4873/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" + integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== + +y18n@^5.0.5: + version "5.0.8" + resolved "http://localhost:4873/y18n/-/y18n-5.0.8.tgz#7f4934d0f7ca8c56f95314939ddcd2dd91ce1d55" + integrity sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA== + +yallist@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" + integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== + +yaml@^1.10.0, yaml@^1.10.2, yaml@^1.7.2: + version "1.10.2" + resolved "http://localhost:4873/yaml/-/yaml-1.10.2.tgz#2301c5ffbf12b467de8da2333a459e29e7920e4b" + integrity sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg== + +yargs-parser@^20.2.2: + version "20.2.9" + resolved "http://localhost:4873/yargs-parser/-/yargs-parser-20.2.9.tgz#2eb7dc3b0289718fc295f362753845c41a0c94ee" + integrity sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w== + +yargs@^16.2.0: + version "16.2.0" + resolved "http://localhost:4873/yargs/-/yargs-16.2.0.tgz#1c82bf0f6b6a66eafce7ef30e376f49a12477f66" + integrity sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw== + dependencies: + cliui "^7.0.2" + escalade "^3.1.1" + get-caller-file "^2.0.5" + require-directory "^2.1.1" + string-width "^4.2.0" + y18n "^5.0.5" + yargs-parser "^20.2.2" + +yocto-queue@^0.1.0: + version "0.1.0" + resolved "http://localhost:4873/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b" + integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q== diff --git a/wrappers/javascript/examples/vite/.gitignore b/javascript/examples/vite/.gitignore similarity index 100% rename from wrappers/javascript/examples/vite/.gitignore rename to javascript/examples/vite/.gitignore diff --git a/wrappers/javascript/examples/vite/README.md b/javascript/examples/vite/README.md similarity index 100% rename from wrappers/javascript/examples/vite/README.md rename to javascript/examples/vite/README.md diff --git a/wrappers/javascript/examples/vite/index.html b/javascript/examples/vite/index.html similarity index 100% rename from wrappers/javascript/examples/vite/index.html rename to javascript/examples/vite/index.html diff --git a/wrappers/javascript/examples/vite/main.ts b/javascript/examples/vite/main.ts similarity index 100% rename from wrappers/javascript/examples/vite/main.ts rename to javascript/examples/vite/main.ts diff --git a/wrappers/javascript/examples/vite/package.json b/javascript/examples/vite/package.json similarity index 100% rename from wrappers/javascript/examples/vite/package.json rename to javascript/examples/vite/package.json diff --git a/wrappers/javascript/examples/vite/public/vite.svg b/javascript/examples/vite/public/vite.svg similarity index 100% rename from wrappers/javascript/examples/vite/public/vite.svg rename to javascript/examples/vite/public/vite.svg diff --git a/wrappers/javascript/examples/vite/src/counter.ts b/javascript/examples/vite/src/counter.ts similarity index 100% rename from wrappers/javascript/examples/vite/src/counter.ts rename to javascript/examples/vite/src/counter.ts diff --git a/wrappers/javascript/examples/vite/src/main.ts b/javascript/examples/vite/src/main.ts similarity index 100% rename from wrappers/javascript/examples/vite/src/main.ts rename to javascript/examples/vite/src/main.ts diff --git a/wrappers/javascript/examples/vite/src/style.css b/javascript/examples/vite/src/style.css similarity index 100% rename from wrappers/javascript/examples/vite/src/style.css rename to javascript/examples/vite/src/style.css diff --git a/wrappers/javascript/examples/vite/src/typescript.svg b/javascript/examples/vite/src/typescript.svg similarity index 100% rename from wrappers/javascript/examples/vite/src/typescript.svg rename to javascript/examples/vite/src/typescript.svg diff --git a/wrappers/javascript/examples/vite/src/vite-env.d.ts b/javascript/examples/vite/src/vite-env.d.ts similarity index 100% rename from wrappers/javascript/examples/vite/src/vite-env.d.ts rename to javascript/examples/vite/src/vite-env.d.ts diff --git a/wrappers/javascript/examples/vite/tsconfig.json b/javascript/examples/vite/tsconfig.json similarity index 100% rename from wrappers/javascript/examples/vite/tsconfig.json rename to javascript/examples/vite/tsconfig.json diff --git a/wrappers/javascript/examples/vite/vite.config.js b/javascript/examples/vite/vite.config.js similarity index 100% rename from wrappers/javascript/examples/vite/vite.config.js rename to javascript/examples/vite/vite.config.js diff --git a/wrappers/javascript/examples/webpack/.gitignore b/javascript/examples/webpack/.gitignore similarity index 100% rename from wrappers/javascript/examples/webpack/.gitignore rename to javascript/examples/webpack/.gitignore diff --git a/wrappers/javascript/examples/webpack/README.md b/javascript/examples/webpack/README.md similarity index 100% rename from wrappers/javascript/examples/webpack/README.md rename to javascript/examples/webpack/README.md diff --git a/wrappers/javascript/examples/webpack/package.json b/javascript/examples/webpack/package.json similarity index 100% rename from wrappers/javascript/examples/webpack/package.json rename to javascript/examples/webpack/package.json diff --git a/wrappers/javascript/examples/webpack/public/index.html b/javascript/examples/webpack/public/index.html similarity index 100% rename from wrappers/javascript/examples/webpack/public/index.html rename to javascript/examples/webpack/public/index.html diff --git a/wrappers/javascript/examples/webpack/src/index.js b/javascript/examples/webpack/src/index.js similarity index 100% rename from wrappers/javascript/examples/webpack/src/index.js rename to javascript/examples/webpack/src/index.js diff --git a/wrappers/javascript/examples/webpack/webpack.config.js b/javascript/examples/webpack/webpack.config.js similarity index 100% rename from wrappers/javascript/examples/webpack/webpack.config.js rename to javascript/examples/webpack/webpack.config.js diff --git a/wrappers/javascript/package.json b/javascript/package.json similarity index 100% rename from wrappers/javascript/package.json rename to javascript/package.json diff --git a/wrappers/javascript/src/constants.ts b/javascript/src/constants.ts similarity index 100% rename from wrappers/javascript/src/constants.ts rename to javascript/src/constants.ts diff --git a/wrappers/javascript/src/counter.ts b/javascript/src/counter.ts similarity index 100% rename from wrappers/javascript/src/counter.ts rename to javascript/src/counter.ts diff --git a/wrappers/javascript/src/index.ts b/javascript/src/index.ts similarity index 100% rename from wrappers/javascript/src/index.ts rename to javascript/src/index.ts diff --git a/wrappers/javascript/src/low_level.ts b/javascript/src/low_level.ts similarity index 100% rename from wrappers/javascript/src/low_level.ts rename to javascript/src/low_level.ts diff --git a/wrappers/javascript/src/numbers.ts b/javascript/src/numbers.ts similarity index 100% rename from wrappers/javascript/src/numbers.ts rename to javascript/src/numbers.ts diff --git a/wrappers/javascript/src/proxies.ts b/javascript/src/proxies.ts similarity index 100% rename from wrappers/javascript/src/proxies.ts rename to javascript/src/proxies.ts diff --git a/wrappers/javascript/src/text.ts b/javascript/src/text.ts similarity index 100% rename from wrappers/javascript/src/text.ts rename to javascript/src/text.ts diff --git a/wrappers/javascript/src/types.ts b/javascript/src/types.ts similarity index 100% rename from wrappers/javascript/src/types.ts rename to javascript/src/types.ts diff --git a/wrappers/javascript/src/uuid.ts b/javascript/src/uuid.ts similarity index 100% rename from wrappers/javascript/src/uuid.ts rename to javascript/src/uuid.ts diff --git a/wrappers/javascript/test/basic_test.ts b/javascript/test/basic_test.ts similarity index 100% rename from wrappers/javascript/test/basic_test.ts rename to javascript/test/basic_test.ts diff --git a/wrappers/javascript/test/columnar_test.ts b/javascript/test/columnar_test.ts similarity index 100% rename from wrappers/javascript/test/columnar_test.ts rename to javascript/test/columnar_test.ts diff --git a/wrappers/javascript/test/extra_api_tests.ts b/javascript/test/extra_api_tests.ts similarity index 100% rename from wrappers/javascript/test/extra_api_tests.ts rename to javascript/test/extra_api_tests.ts diff --git a/wrappers/javascript/test/helpers.ts b/javascript/test/helpers.ts similarity index 100% rename from wrappers/javascript/test/helpers.ts rename to javascript/test/helpers.ts diff --git a/wrappers/javascript/test/legacy/columnar.js b/javascript/test/legacy/columnar.js similarity index 100% rename from wrappers/javascript/test/legacy/columnar.js rename to javascript/test/legacy/columnar.js diff --git a/wrappers/javascript/test/legacy/common.js b/javascript/test/legacy/common.js similarity index 100% rename from wrappers/javascript/test/legacy/common.js rename to javascript/test/legacy/common.js diff --git a/wrappers/javascript/test/legacy/encoding.js b/javascript/test/legacy/encoding.js similarity index 100% rename from wrappers/javascript/test/legacy/encoding.js rename to javascript/test/legacy/encoding.js diff --git a/wrappers/javascript/test/legacy/sync.js b/javascript/test/legacy/sync.js similarity index 100% rename from wrappers/javascript/test/legacy/sync.js rename to javascript/test/legacy/sync.js diff --git a/wrappers/javascript/test/legacy_tests.ts b/javascript/test/legacy_tests.ts similarity index 100% rename from wrappers/javascript/test/legacy_tests.ts rename to javascript/test/legacy_tests.ts diff --git a/wrappers/javascript/test/sync_test.ts b/javascript/test/sync_test.ts similarity index 100% rename from wrappers/javascript/test/sync_test.ts rename to javascript/test/sync_test.ts diff --git a/wrappers/javascript/test/text_test.ts b/javascript/test/text_test.ts similarity index 100% rename from wrappers/javascript/test/text_test.ts rename to javascript/test/text_test.ts diff --git a/wrappers/javascript/test/uuid_test.ts b/javascript/test/uuid_test.ts similarity index 100% rename from wrappers/javascript/test/uuid_test.ts rename to javascript/test/uuid_test.ts diff --git a/wrappers/javascript/tsconfig.json b/javascript/tsconfig.json similarity index 100% rename from wrappers/javascript/tsconfig.json rename to javascript/tsconfig.json diff --git a/wrappers/javascript/tslint.json b/javascript/tslint.json similarity index 100% rename from wrappers/javascript/tslint.json rename to javascript/tslint.json diff --git a/scripts/ci/js_tests b/scripts/ci/js_tests index 7455502a..b05edd1c 100755 --- a/scripts/ci/js_tests +++ b/scripts/ci/js_tests @@ -2,12 +2,12 @@ set -e THIS_SCRIPT=$(dirname "$0"); WASM_PROJECT=$THIS_SCRIPT/../../rust/automerge-wasm; -JS_PROJECT=$THIS_SCRIPT/../../wrappers/javascript; -E2E_PROJECT=$THIS_SCRIPT/../../wrappers/javascript/e2e; +JS_PROJECT=$THIS_SCRIPT/../../javascript; +E2E_PROJECT=$THIS_SCRIPT/../../javascript/e2e; yarn --cwd $E2E_PROJECT install; # This will build the automerge-wasm project, publish it to a local NPM -# repository, then run `yarn build` in the `wrappers/javascript` directory with +# repository, then run `yarn build` in the `javascript` directory with # the local registry yarn --cwd $E2E_PROJECT e2e buildjs; yarn --cwd $JS_PROJECT test From 96f15c6e003809a915f108c83ffe80e319514ff5 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Thu, 13 Oct 2022 22:08:40 +0100 Subject: [PATCH 605/730] Update main README to reflect new repo layout --- README.md | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/README.md b/README.md index e369ec39..449da11d 100644 --- a/README.md +++ b/README.md @@ -18,7 +18,7 @@ computing problems. Automerge aims to be PostgreSQL for your local-first app. If you're looking for documentation on the JavaScript implementation take a look at https://automerge.org/docs/hello/. There are other implementations in both Rust and C, but they are earlier and don't have documentation yet. You can find -them in `crates/automerge` and `crates/automerge-c` if you are comfortable +them in `rust/automerge` and `rust/automerge-c` if you are comfortable reading the code and tests to figure out how to use them. If you're familiar with CRDTs and interested in the design of Automerge in @@ -57,19 +57,15 @@ to figure out how to use it. ## Repository Organisation -* `./crates` - the crates which make up the rust implementation and also the - Rust components of platform specific wrappers (e.g. `automerge-wasm` for the - WASM API or `automerge-c` for the C FFI bindings) -* `./wrappers` - code for specific languages which wraps the FFI interface in a - more idiomatic API (e.g. `wrappers/javascript`) +* `./rust` - the rust rust implementation and also the Rust components of + platform specific wrappers (e.g. `automerge-wasm` for the WASM API or + `automerge-c` for the C FFI bindings) +* `./javascript` - The javascript library which uses `automerge-wasm` + internally but presents a more idiomatic javascript interface * `./scripts` - scripts which are useful to maintenance of the repository. This includes the scripts which are run in CI. * `./img` - static assets for use in `.md` files -This repository contains the primary implementation of automerge - which is -written in rust in `./crates` - as well as wrappers which expose the Rust -implementation via FFI in other languages in `./wrappers`. Because this is - ## Building To build this codebase you will need: From e189ec9ca8e3107a97307285b9653e60c3813f0f Mon Sep 17 00:00:00 2001 From: Alex Good Date: Thu, 13 Oct 2022 23:01:50 +0100 Subject: [PATCH 606/730] Add some READMEs to the javascript directory --- javascript/HACKING.md | 40 ++++++++++++++++ javascript/README.md | 106 +++++++++++++++++++++++++++++++++++++++--- 2 files changed, 139 insertions(+), 7 deletions(-) create mode 100644 javascript/HACKING.md diff --git a/javascript/HACKING.md b/javascript/HACKING.md new file mode 100644 index 00000000..c3203775 --- /dev/null +++ b/javascript/HACKING.md @@ -0,0 +1,40 @@ +## Architecture + +The `@automerge/automerge` package is a set of +[`Proxy`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Proxy) +objects which provide an idiomatic javascript interface built on top of the +lower level `@automerge/automerge-wasm` package (which is in turn built from the +Rust codebase and can be found in `~/automerge-wasm`). I.e. the responsibility +of this codebase is + +- To map from the javascript data model to the underlying `set`, `make`, + `insert`, and `delete` operations of Automerge. +- To expose a more convenient interface to functions in `automerge-wasm` which + generate messages to send over the network or compressed file formats to store + on disk + +## Building and testing + +Much of the functionality of this package depends on the +`@automerge/automerge-wasm` package and frequently you will be working on both +of them at the same time. It would be frustrating to have to push +`automerge-wasm` to NPM every time you want to test a change but I (Alex) also +don't trust `yarn link` to do the right thing here. Therefore, the `./e2e` +folder contains a little yarn package which spins up a local NPM registry. See +`./e2e/README` for details. In brief though: + +To build `automerge-wasm` and install it in the local `node_modules` + +```bash +cd e2e && yarn install && yarn run e2e buildjs +``` + +NOw that you've done this you can run the tests + +```bash +yarn test +``` + +If you make changes to the `automerge-wasm` package you will need to re-run +`yarn e2e buildjs` + diff --git a/javascript/README.md b/javascript/README.md index 4981e7be..ffd2b38e 100644 --- a/javascript/README.md +++ b/javascript/README.md @@ -3,16 +3,108 @@ Automerge is a library of data structures for building collaborative applications, this package is the javascript implementation. -Please see [automerge.org](http://automerge.org/) for documentation. +Detailed documentation is available at [automerge.org](http://automerge.org/) +but see the following for a short getting started guid. -## Setup +## Quickstart -This package is a wrapper around a core library which is written in rust and -compiled to WASM. In `node` this should be transparent to you, but in the -browser you will need a bundler to include the WASM blob as part of your module -hierarchy. There are examples of doing this with common bundlers in `./examples`. +First, install the library. + +``` +yarn add @automerge/automerge +``` + +If you're writing a `node` application, you can skip straight to [Make some +data](#make-some-data). If you're in a browser you need a bundler + +### Bundler setup + + +`@automerge/automerge` is a wrapper around a core library which is written in +rust, compiled to WebAssembly and distributed as a separate package called +`@automerge/automerge-wasm`. Browsers don't currently support WebAssembly +modules taking part in ESM module imports, so you must use a bundler to import +`@automerge/automerge` in the browser. There are a lot of bundlers out there, we +have examples for common bundlers in the `examples` folder. Here is a short +example using Webpack 5. + +Assuming a standard setup of a new webpack project, you'll need to enable the +`asyncWebAssembly` experiment. In a typical webpack project that means adding +something like this to `webpack.config.js` + +```javascript +module.exports = { + ... + experiments: { asyncWebAssembly: true }, + performance: { // we dont want the wasm blob to generate warnings + hints: false, + maxEntrypointSize: 512000, + maxAssetSize: 512000 + } +}; +``` + +### Make some data + +Automerge allows to separate threads of execution to make changes to some data +and always be able to merge their changes later. + +```javascript +import * as automerge from "@automerge/automerge" +import * as assert from "assert" + +let doc1 = automerge.from({ + tasks: [ + {description: "feed fish", done: false}, + {description: "water plants", done: false}, + ] +}) + +// Create a new thread of execution +let doc2 = automerge.clone(doc1) + +// Now we concurrently make changes to doc1 and doc2 + +// Complete a task in doc2 +doc2 = automerge.change(doc2, d => { + d.tasks[0].done = true +}) + +// Add a task in doc1 +doc1 = automerge.change(doc1, d => { + d.tasks.push({ + description: "water fish", + done: false + }) +}) + +// Merge changes from both docs +doc1 = automerge.merge(doc1, doc2) +doc2 = automerge.merge(doc2, doc1) + +// Both docs are merged and identical +assert.deepEqual(doc1, { + tasks: [ + {description: "feed fish", done: true}, + {description: "water plants", done: false}, + {description: "water fish", done: false}, + ] +}) + +assert.deepEqual(doc2, { + tasks: [ + {description: "feed fish", done: true}, + {description: "water plants", done: false}, + {description: "water fish", done: false}, + ] +}) +``` + +## Development + +See [HACKING.md](./HACKING.md) ## Meta -Copyright 2017–2021, the Automerge contributors. Released under the terms of the +Copyright 2017–present, the Automerge contributors. Released under the terms of the MIT license (see `LICENSE`). From 24dcf8270a4ea64b064d9493fa7ee07ab984f027 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Sun, 16 Oct 2022 23:28:40 +0100 Subject: [PATCH 607/730] Add typedoc comments to the entire public JS API --- javascript/.gitignore | 1 + javascript/package.json | 1 + javascript/src/index.ts | 1030 ++++++++++++++++++++++------------ javascript/src/types.ts | 3 +- javascript/typedoc-readme.md | 165 ++++++ 5 files changed, 855 insertions(+), 345 deletions(-) create mode 100644 javascript/typedoc-readme.md diff --git a/javascript/.gitignore b/javascript/.gitignore index 05065cf0..bf2aad08 100644 --- a/javascript/.gitignore +++ b/javascript/.gitignore @@ -1,3 +1,4 @@ /node_modules /yarn.lock dist +docs/ diff --git a/javascript/package.json b/javascript/package.json index e830b100..7cdf9eac 100644 --- a/javascript/package.json +++ b/javascript/package.json @@ -54,6 +54,7 @@ "pako": "^2.0.4", "ts-mocha": "^10.0.0", "ts-node": "^10.9.1", + "typedoc": "^0.23.16", "typescript": "^4.6.4" }, "dependencies": { diff --git a/javascript/src/index.ts b/javascript/src/index.ts index f2ebea2c..2e2ad436 100644 --- a/javascript/src/index.ts +++ b/javascript/src/index.ts @@ -1,466 +1,808 @@ -export { uuid } from './uuid' +/** @hidden **/ +export {/** @hidden */ uuid} from './uuid' -import { rootProxy, listProxy, textProxy, mapProxy } from "./proxies" -import { STATE, HEADS, TRACE, OBJECT_ID, READ_ONLY, FROZEN } from "./constants" +import {rootProxy, listProxy, textProxy, mapProxy} from "./proxies" +import {STATE, HEADS, TRACE, OBJECT_ID, READ_ONLY, FROZEN} from "./constants" -import { AutomergeValue, Text, Counter } from "./types" -export { AutomergeValue, Text, Counter, Int, Uint, Float64 } from "./types" +import {AutomergeValue, Text, Counter} from "./types" +export {AutomergeValue, Text, Counter, Int, Uint, Float64, ScalarValue} from "./types" -import { type API, type Patch } from "@automerge/automerge-wasm"; -import { ApiHandler, UseApi } from "./low_level" +import {type API, type Patch} from "@automerge/automerge-wasm"; +export { type Patch, PutPatch, DelPatch, SplicePatch, IncPatch, SyncMessage, } from "@automerge/automerge-wasm" +import {ApiHandler, UseApi} from "./low_level" -import { Actor as ActorId, Prop, ObjID, Change, DecodedChange, Heads, Automerge, MaterializeValue } from "@automerge/automerge-wasm" -import { JsSyncState as SyncState, SyncMessage, DecodedSyncMessage } from "@automerge/automerge-wasm" +import {Actor as ActorId, Prop, ObjID, Change, DecodedChange, Heads, Automerge, MaterializeValue} from "@automerge/automerge-wasm" +import {JsSyncState as SyncState, SyncMessage, DecodedSyncMessage} from "@automerge/automerge-wasm" -export type ChangeOptions = { message?: string, time?: number, patchCallback?: PatchCallback } -export type ApplyOptions = { patchCallback?: PatchCallback } - -export type Doc = { readonly [P in keyof T]: T[P] } - -export type ChangeFn = (doc: T) => void - -export type PatchCallback = (patch: Patch, before: Doc, after: Doc) => void - -export interface State { - change: DecodedChange - snapshot: T +/** Options passed to {@link change}, and {@link emptyChange} + * @typeParam T - The type of value contained in the document + */ +export type ChangeOptions = { + /** A message which describes the changes */ + message?: string, + /** The unix timestamp of the change (purely advisory, not used in conflict resolution) */ + time?: number, + /** A callback which will be called to notify the caller of any changes to the document */ + patchCallback?: PatchCallback } +/** Options passed to {@link loadIncremental}, {@link applyChanges}, and {@link receiveSyncMessage} + * @typeParam T - The type of value contained in the document + */ +export type ApplyOptions = {patchCallback?: PatchCallback} + +/** + * An automerge document. + * @typeParam T - The type of the value contained in this document + * + * Note that this provides read only access to the fields of the value. To + * modify the value use {@link change} + */ +export type Doc = {readonly [P in keyof T]: T[P]} + +/** + * Function which is called by {@link change} when making changes to a `Doc` + * @typeParam T - The type of value contained in the document + * + * This function may mutate `doc` + */ +export type ChangeFn = (doc: T) => void + +/** + * Callback which is called by various methods in this library to notify the + * user of what changes have been made. + * @param patch - A description of the changes made + * @param before - The document before the change was made + * @param after - The document after the change was made + */ +export type PatchCallback = (patch: Patch, before: Doc, after: Doc) => void + +/** @hidden **/ +export interface State { + change: DecodedChange + snapshot: T +} + +/** @hidden **/ export function use(api: API) { - UseApi(api) + UseApi(api) } import * as wasm from "@automerge/automerge-wasm" use(wasm) +/** + * Options to be passed to {@link init} or {@link load} + * @typeParam T - The type of the value the document contains + */ export type InitOptions = { + /** The actor ID to use for this document, a random one will be generated if `null` is passed */ actor?: ActorId, freeze?: boolean, + /** A callback which will be called with the initial patch once the document has finished loading */ patchCallback?: PatchCallback, }; interface InternalState { - handle: Automerge, - heads: Heads | undefined, - freeze: boolean, - patchCallback?: PatchCallback + handle: Automerge, + heads: Heads | undefined, + freeze: boolean, + patchCallback?: PatchCallback } -export function getBackend(doc: Doc) : Automerge { - return _state(doc).handle +/** @hidden */ +export function getBackend(doc: Doc): Automerge { + return _state(doc).handle } -function _state(doc: Doc, checkroot = true) : InternalState { - const state = Reflect.get(doc,STATE) - if (state === undefined || (checkroot && _obj(doc) !== "_root")) { - throw new RangeError("must be the document root") - } - return state +function _state(doc: Doc, checkroot = true): InternalState { + const state = Reflect.get(doc, STATE) + if (state === undefined || (checkroot && _obj(doc) !== "_root")) { + throw new RangeError("must be the document root") + } + return state } -function _frozen(doc: Doc) : boolean { - return Reflect.get(doc,FROZEN) === true +function _frozen(doc: Doc): boolean { + return Reflect.get(doc, FROZEN) === true } -function _trace(doc: Doc) : string | undefined { - return Reflect.get(doc,TRACE) +function _trace(doc: Doc): string | undefined { + return Reflect.get(doc, TRACE) } function _set_heads(doc: Doc, heads: Heads) { - _state(doc).heads = heads + _state(doc).heads = heads } function _clear_heads(doc: Doc) { - Reflect.set(doc,HEADS,undefined) - Reflect.set(doc,TRACE,undefined) + Reflect.set(doc, HEADS, undefined) + Reflect.set(doc, TRACE, undefined) } -function _obj(doc: Doc) : ObjID | null{ - if (!(typeof doc === 'object') || doc === null) { - return null - } - return Reflect.get(doc,OBJECT_ID) +function _obj(doc: Doc): ObjID | null { + if (!(typeof doc === 'object') || doc === null) { + return null + } + return Reflect.get(doc, OBJECT_ID) } -function _readonly(doc: Doc) : boolean { - return Reflect.get(doc,READ_ONLY) !== false +function _readonly(doc: Doc): boolean { + return Reflect.get(doc, READ_ONLY) !== false } -function importOpts(_actor?: ActorId | InitOptions) : InitOptions { - if (typeof _actor === 'object') { - return _actor - } else { - return { actor: _actor } - } +function importOpts(_actor?: ActorId | InitOptions): InitOptions { + if (typeof _actor === 'object') { + return _actor + } else { + return {actor: _actor} + } } -export function init(_opts?: ActorId | InitOptions) : Doc{ - let opts = importOpts(_opts) - let freeze = !!opts.freeze - let patchCallback = opts.patchCallback - const handle = ApiHandler.create(opts.actor) - handle.enablePatches(true) - //@ts-ignore - handle.registerDatatype("counter", (n) => new Counter(n)) - //@ts-ignore - handle.registerDatatype("text", (n) => new Text(n)) - //@ts-ignore - const doc = handle.materialize("/", undefined, { handle, heads: undefined, freeze, patchCallback }) - //@ts-ignore - return doc +/** + * Create a new automerge document + * + * @typeParam T - The type of value contained in the document. This will be the + * type that is passed to the change closure in {@link change} + * @param _opts - Either an actorId or an {@link InitOptions} (which may + * contain an actorId). If this is null the document will be initialised with a + * random actor ID + */ +export function init(_opts?: ActorId | InitOptions): Doc { + let opts = importOpts(_opts) + let freeze = !!opts.freeze + let patchCallback = opts.patchCallback + const handle = ApiHandler.create(opts.actor) + handle.enablePatches(true) + //@ts-ignore + handle.registerDatatype("counter", (n) => new Counter(n)) + //@ts-ignore + handle.registerDatatype("text", (n) => new Text(n)) + //@ts-ignore + const doc = handle.materialize("/", undefined, {handle, heads: undefined, freeze, patchCallback}) + //@ts-ignore + return doc } -export function clone(doc: Doc) : Doc { - const state = _state(doc) - const handle = state.heads ? state.handle.forkAt(state.heads) : state.handle.fork() - //@ts-ignore - const clonedDoc : any = handle.materialize("/", undefined, { ... state, handle }) +/** + * Make a copy of an automerge document. + */ +export function clone(doc: Doc): Doc { + const state = _state(doc) + const handle = state.heads ? state.handle.forkAt(state.heads) : state.handle.fork() + //@ts-ignore + const clonedDoc: any = handle.materialize("/", undefined, {...state, handle}) - return clonedDoc + return clonedDoc } +/** Explicity free the memory backing a document. Note that this is note + * necessary in environments which support + * [`FinalizationRegistry`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/FinalizationRegistry) + */ export function free(doc: Doc) { - return _state(doc).handle.free() + return _state(doc).handle.free() } +/** + * Create an automerge document from a POJO + * + * @param initialState - The initial state which will be copied into the document + * @typeParam T - The type of the value passed to `from` _and_ the type the resulting document will contain + * @typeParam actor - The actor ID of the resulting document, if this is null a random actor ID will be used + * + * @example + * ``` + * const doc = automerge.from({ + * tasks: [ + * {description: "feed dogs", done: false} + * ] + * }) + * ``` + */ export function from>(initialState: T | Doc, actor?: ActorId): Doc { return change(init(actor), (d) => Object.assign(d, initialState)) } +/** + * Update the contents of an automerge document + * @typeParam T - The type of the value contained in the document + * @param doc - The document to update + * @param options - Either a message, an {@link ChangeOptions}, or a {@link ChangeFn} + * @param callback - A `ChangeFn` to be used if `options` was a `string` + * + * Note that if the second argument is a function it will be used as the `ChangeFn` regardless of what the third argument is. + * + * @example A simple change + * ``` + * let doc1 = automerge.init() + * doc1 = automerge.change(doc1, d => { + * d.key = "value" + * }) + * assert.equal(doc1.key, "value") + * ``` + * + * @example A change with a message + * + * ``` + * doc1 = automerge.change(doc1, "add another value", d => { + * d.key2 = "value2" + * }) + * ``` + * + * @example A change with a message and a timestamp + * + * ``` + * doc1 = automerge.change(doc1, {message: "add another value", timestamp: 1640995200}, d => { + * d.key2 = "value2" + * }) + * ``` + * + * @example responding to a patch callback + * ``` + * let patchedPath + * let patchCallback = patch => { + * patchedPath = patch.path + * } + * doc1 = automerge.change(doc1, {message, "add another value", timestamp: 1640995200, patchCallback}, d => { + * d.key2 = "value2" + * }) + * assert.equal(patchedPath, ["key2"]) + * ``` + */ export function change(doc: Doc, options: string | ChangeOptions | ChangeFn, callback?: ChangeFn): Doc { - if (typeof options === 'function') { - return _change(doc, {}, options) - } else if (typeof callback === 'function') { - if (typeof options === "string") { - options = { message: options } + if (typeof options === 'function') { + return _change(doc, {}, options) + } else if (typeof callback === 'function') { + if (typeof options === "string") { + options = {message: options} + } + return _change(doc, options, callback) + } else { + throw RangeError("Invalid args for change") } - return _change(doc, options, callback) - } else { - throw RangeError("Invalid args for change") - } } function progressDocument(doc: Doc, heads: Heads, callback?: PatchCallback): Doc { - let state = _state(doc) - let nextState = { ... state, heads: undefined }; - // @ts-ignore - let nextDoc = state.handle.applyPatches(doc, nextState, callback) - state.heads = heads - if (nextState.freeze) { Object.freeze(nextDoc) } - return nextDoc + let state = _state(doc) + let nextState = {...state, heads: undefined}; + // @ts-ignore + let nextDoc = state.handle.applyPatches(doc, nextState, callback) + state.heads = heads + if (nextState.freeze) {Object.freeze(nextDoc)} + return nextDoc } function _change(doc: Doc, options: ChangeOptions, callback: ChangeFn): Doc { - if (typeof callback !== "function") { - throw new RangeError("invalid change function"); - } - - const state = _state(doc) - - if (doc === undefined || state === undefined) { - throw new RangeError("must be the document root"); - } - if (state.heads) { - throw new RangeError("Attempting to use an outdated Automerge document") - } - if (_readonly(doc) === false) { - throw new RangeError("Calls to Automerge.change cannot be nested") - } - const heads = state.handle.getHeads() - try { - state.heads = heads - const root : T = rootProxy(state.handle); - callback(root) - if (state.handle.pendingOps() === 0) { - state.heads = undefined - return doc - } else { - state.handle.commit(options.message, options.time) - return progressDocument(doc, heads, options.patchCallback || state.patchCallback); + if (typeof callback !== "function") { + throw new RangeError("invalid change function"); } - } catch (e) { - //console.log("ERROR: ",e) - state.heads = undefined - state.handle.rollback() - throw e - } -} -export function emptyChange(doc: Doc, options: ChangeOptions) { - if (options === undefined) { - options = {} - } - if (typeof options === "string") { - options = { message: options } - } + const state = _state(doc) - const state = _state(doc) - - if (state.heads) { - throw new RangeError("Attempting to use an outdated Automerge document") - } - if (_readonly(doc) === false) { - throw new RangeError("Calls to Automerge.change cannot be nested") - } - - const heads = state.handle.getHeads() - state.handle.commit(options.message, options.time) - return progressDocument(doc, heads) -} - -export function load(data: Uint8Array, _opts?: ActorId | InitOptions) : Doc { - const opts = importOpts(_opts) - const actor = opts.actor - const patchCallback = opts.patchCallback - const handle = ApiHandler.load(data, actor) - handle.enablePatches(true) - //@ts-ignore - handle.registerDatatype("counter", (n) => new Counter(n)) - //@ts-ignore - handle.registerDatatype("text", (n) => new Text(n)) - //@ts-ignore - const doc : any = handle.materialize("/", undefined, { handle, heads: undefined, patchCallback }) - return doc -} - -export function loadIncremental(doc: Doc, data: Uint8Array, opts?: ApplyOptions) : Doc { - if (!opts) { opts = {} } - const state = _state(doc) - if (state.heads) { - throw new RangeError("Attempting to change an out of date document - set at: " + _trace(doc)); - } - if (_readonly(doc) === false) { - throw new RangeError("Calls to Automerge.change cannot be nested") - } - const heads = state.handle.getHeads() - state.handle.loadIncremental(data) - return progressDocument(doc, heads, opts.patchCallback || state.patchCallback) -} - -export function save(doc: Doc) : Uint8Array { - return _state(doc).handle.save() -} - -export function merge(local: Doc, remote: Doc) : Doc { - const localState = _state(local) - - if (localState.heads) { - throw new RangeError("Attempting to change an out of date document - set at: " + _trace(local)); - } - const heads = localState.handle.getHeads() - const remoteState = _state(remote) - const changes = localState.handle.getChangesAdded(remoteState.handle) - localState.handle.applyChanges(changes) - return progressDocument(local, heads, localState.patchCallback) -} - -export function getActorId(doc: Doc) : ActorId { - const state = _state(doc) - return state.handle.getActorId() -} - -type Conflicts = { [key: string]: AutomergeValue } - -function conflictAt(context : Automerge, objectId: ObjID, prop: Prop) : Conflicts | undefined { - const values = context.getAll(objectId, prop) - if (values.length <= 1) { - return - } - const result : Conflicts = {} - for (const fullVal of values) { - switch (fullVal[0]) { - case "map": - result[fullVal[1]] = mapProxy(context, fullVal[1], [ prop ], true) - break; - case "list": - result[fullVal[1]] = listProxy(context, fullVal[1], [ prop ], true) - break; - case "text": - result[fullVal[1]] = textProxy(context, fullVal[1], [ prop ], true) - break; - //case "table": - //case "cursor": - case "str": - case "uint": - case "int": - case "f64": - case "boolean": - case "bytes": - case "null": - result[fullVal[2]] = fullVal[1] - break; - case "counter": - result[fullVal[2]] = new Counter(fullVal[1]) - break; - case "timestamp": - result[fullVal[2]] = new Date(fullVal[1]) - break; - default: - throw RangeError(`datatype ${fullVal[0]} unimplemented`) + if (doc === undefined || state === undefined) { + throw new RangeError("must be the document root"); + } + if (state.heads) { + throw new RangeError("Attempting to use an outdated Automerge document") + } + if (_readonly(doc) === false) { + throw new RangeError("Calls to Automerge.change cannot be nested") + } + const heads = state.handle.getHeads() + try { + state.heads = heads + const root: T = rootProxy(state.handle); + callback(root) + if (state.handle.pendingOps() === 0) { + state.heads = undefined + return doc + } else { + state.handle.commit(options.message, options.time) + return progressDocument(doc, heads, options.patchCallback || state.patchCallback); } - } - return result + } catch (e) { + //console.log("ERROR: ",e) + state.heads = undefined + state.handle.rollback() + throw e + } } -export function getConflicts(doc: Doc, prop: Prop) : Conflicts | undefined { - const state = _state(doc, false) - const objectId = _obj(doc) - if (objectId != null) { - return conflictAt(state.handle, objectId, prop) - } else { - return undefined - } +/** + * Make a change to a document which does not modify the document + * + * @param doc - The doc to add the empty change to + * @param options - Either a message or a {@link ChangeOptions} for the new change + * + * Why would you want to do this? One reason might be that you have merged + * changes from some other peers and you want to generate a change which + * depends on those merged changes so that you can sign the new change with all + * of the merged changes as part of the new change. + */ +export function emptyChange(doc: Doc, options: string | ChangeOptions) { + if (options === undefined) { + options = {} + } + if (typeof options === "string") { + options = {message: options} + } + + const state = _state(doc) + + if (state.heads) { + throw new RangeError("Attempting to use an outdated Automerge document") + } + if (_readonly(doc) === false) { + throw new RangeError("Calls to Automerge.change cannot be nested") + } + + const heads = state.handle.getHeads() + state.handle.commit(options.message, options.time) + return progressDocument(doc, heads) } -export function getLastLocalChange(doc: Doc) : Change | undefined { - const state = _state(doc) - return state.handle.getLastLocalChange() || undefined +/** + * Load an automerge document from a compressed document produce by {@link save} + * + * @typeParam T - The type of the value which is contained in the document. + * Note that no validation is done to make sure this type is in + * fact the type of the contained value so be a bit careful + * @param data - The compressed document + * @param _opts - Either an actor ID or some {@link InitOptions}, if the actor + * ID is null a random actor ID will be created + * + * Note that `load` will throw an error if passed incomplete content (for + * example if you are receiving content over the network and don't know if you + * have the complete document yet). If you need to handle incomplete content use + * {@link init} followed by {@link loadIncremental}. + */ +export function load(data: Uint8Array, _opts?: ActorId | InitOptions): Doc { + const opts = importOpts(_opts) + const actor = opts.actor + const patchCallback = opts.patchCallback + const handle = ApiHandler.load(data, actor) + handle.enablePatches(true) + //@ts-ignore + handle.registerDatatype("counter", (n) => new Counter(n)) + //@ts-ignore + handle.registerDatatype("text", (n) => new Text(n)) + //@ts-ignore + const doc: any = handle.materialize("/", undefined, {handle, heads: undefined, patchCallback}) + return doc } -export function getObjectId(doc: any) : ObjID | null{ - return _obj(doc) +/** + * Load changes produced by {@link saveIncremental}, or partial changes + * + * @typeParam T - The type of the value which is contained in the document. + * Note that no validation is done to make sure this type is in + * fact the type of the contained value so be a bit careful + * @param data - The compressedchanges + * @param opts - an {@link ApplyOptions} + * + * This function is useful when staying up to date with a connected peer. + * Perhaps the other end sent you a full compresed document which you loaded + * with {@link load} and they're sending you the result of + * {@link getLastLocalChange} every time they make a change. + * + * Note that this function will succesfully load the results of {@link save} as + * well as {@link getLastLocalChange} or any other incremental change. + */ +export function loadIncremental(doc: Doc, data: Uint8Array, opts?: ApplyOptions): Doc { + if (!opts) {opts = {}} + const state = _state(doc) + if (state.heads) { + throw new RangeError("Attempting to change an out of date document - set at: " + _trace(doc)); + } + if (_readonly(doc) === false) { + throw new RangeError("Calls to Automerge.change cannot be nested") + } + const heads = state.handle.getHeads() + state.handle.loadIncremental(data) + return progressDocument(doc, heads, opts.patchCallback || state.patchCallback) } -export function getChanges(oldState: Doc, newState: Doc) : Change[] { - const o = _state(oldState) - const n = _state(newState) - return n.handle.getChanges(getHeads(oldState)) +/** + * Export the contents of a document to a compressed format + * + * @param doc - The doc to save + * + * The returned bytes can be passed to {@link load} or {@link loadIncremental} + */ +export function save(doc: Doc): Uint8Array { + return _state(doc).handle.save() } -export function getAllChanges(doc: Doc) : Change[] { - const state = _state(doc) - return state.handle.getChanges([]) +/** + * Merge `local` into `remote` + * @typeParam T - The type of values contained in each document + * @param local - The document to merge changes into + * @param remote - The document to merge changes from + * + * @returns - The merged document + * + * Often when you are merging documents you will also need to clone them. Both + * arguments to `merge` are frozen after the call so you can no longer call + * mutating methods (such as {@link change}) on them. The symtom of this will be + * an error which says "Attempting to change an out of date document". To + * overcome this call {@link clone} on the argument before passing it to {@link + * merge}. + */ +export function merge(local: Doc, remote: Doc): Doc { + const localState = _state(local) + + if (localState.heads) { + throw new RangeError("Attempting to change an out of date document - set at: " + _trace(local)); + } + const heads = localState.handle.getHeads() + const remoteState = _state(remote) + const changes = localState.handle.getChangesAdded(remoteState.handle) + localState.handle.applyChanges(changes) + return progressDocument(local, heads, localState.patchCallback) } -export function applyChanges(doc: Doc, changes: Change[], opts?: ApplyOptions) : [Doc] { - const state = _state(doc) - if (!opts) { opts = {} } - if (state.heads) { - throw new RangeError("Attempting to use an outdated Automerge document") - } - if (_readonly(doc) === false) { - throw new RangeError("Calls to Automerge.change cannot be nested") - } - const heads = state.handle.getHeads(); - state.handle.applyChanges(changes) - state.heads = heads; - return [progressDocument(doc, heads, opts.patchCallback || state.patchCallback )] +/** + * Get the actor ID associated with the document + */ +export function getActorId(doc: Doc): ActorId { + const state = _state(doc) + return state.handle.getActorId() } -export function getHistory(doc: Doc) : State[] { - const history = getAllChanges(doc) - return history.map((change, index) => ({ - get change () { - return decodeChange(change) - }, - get snapshot () { - const [state] = applyChanges(init(), history.slice(0, index + 1)) - return state - } +/** + * The type of conflicts for particular key or index + * + * Maps and sequences in automerge can contain conflicting values for a + * particular key or index. In this case {@link getConflicts} can be used to + * obtain a `Conflicts` representing the multiple values present for the property + * + * A `Conflicts` is a map from a unique (per property or index) key to one of + * the possible conflicting values for the given property. + */ +type Conflicts = {[key: string]: AutomergeValue} + +function conflictAt(context: Automerge, objectId: ObjID, prop: Prop): Conflicts | undefined { + const values = context.getAll(objectId, prop) + if (values.length <= 1) { + return + } + const result: Conflicts = {} + for (const fullVal of values) { + switch (fullVal[0]) { + case "map": + result[fullVal[1]] = mapProxy(context, fullVal[1], [prop], true) + break; + case "list": + result[fullVal[1]] = listProxy(context, fullVal[1], [prop], true) + break; + case "text": + result[fullVal[1]] = textProxy(context, fullVal[1], [prop], true) + break; + //case "table": + //case "cursor": + case "str": + case "uint": + case "int": + case "f64": + case "boolean": + case "bytes": + case "null": + result[fullVal[2]] = fullVal[1] + break; + case "counter": + result[fullVal[2]] = new Counter(fullVal[1]) + break; + case "timestamp": + result[fullVal[2]] = new Date(fullVal[1]) + break; + default: + throw RangeError(`datatype ${fullVal[0]} unimplemented`) + } + } + return result +} + +/** + * Get the conflicts associated with a property + * + * The values of properties in a map in automerge can be conflicted if there + * are concurrent "put" operations to the same key. Automerge chooses one value + * arbitrarily (but deterministically, any two nodes who have the same set of + * changes will choose the same value) from the set of conflicting values to + * present as the value of the key. + * + * Sometimes you may want to examine these conflicts, in this case you can use + * {@link getConflicts} to get the conflicts for the key. + * + * @example + * ``` + * import * as automerge from "@automerge/automerge" + * + * type Profile = { + * pets: Array<{name: string, type: string}> + * } + * + * let doc1 = automerge.init("aaaa") + * doc1 = automerge.change(doc1, d => { + * d.pets = [{name: "Lassie", type: "dog"}] + * }) + * let doc2 = automerge.init("bbbb") + * doc2 = automerge.merge(doc2, automerge.clone(doc1)) + * + * doc2 = automerge.change(doc2, d => { + * d.pets[0].name = "Beethoven" + * }) + * + * doc1 = automerge.change(doc1, d => { + * d.pets[0].name = "Babe" + * }) + * + * const doc3 = automerge.merge(doc1, doc2) + * + * // Note that here we pass `doc3.pets`, not `doc3` + * let conflicts = automerge.getConflicts(doc3.pets[0], "name") + * + * // The two conflicting values are the keys of the conflicts object + * assert.deepEqual(Object.values(conflicts), ["Babe", Beethoven"]) + * ``` + */ +export function getConflicts(doc: Doc, prop: Prop): Conflicts | undefined { + const state = _state(doc, false) + const objectId = _obj(doc) + if (objectId != null) { + return conflictAt(state.handle, objectId, prop) + } else { + return undefined + } +} + +/** + * Get the binary representation of the last change which was made to this doc + * + * This is most useful when staying in sync with other peers, every time you + * make a change locally via {@link change} you immediately call {@link + * getLastLocalChange} and send the result over the network to other peers. + */ +export function getLastLocalChange(doc: Doc): Change | undefined { + const state = _state(doc) + return state.handle.getLastLocalChange() || undefined +} + +/** + * Return the object ID of an arbitrary javascript value + * + * This is useful to determine if something is actually an automerge document, + * if `doc` is not an automerge document this will return null. + */ +export function getObjectId(doc: any): ObjID | null { + return _obj(doc) +} + +/** + * Get the changes which are in `newState` but not in `oldState`. The returned + * changes can be loaded in `oldState` via {@link applyChanges}. + * + * Note that this will crash if there are changes in `oldState` which are not in `newState`. + */ +export function getChanges(oldState: Doc, newState: Doc): Change[] { + const o = _state(oldState) + const n = _state(newState) + return n.handle.getChanges(getHeads(oldState)) +} + +/** + * Get all the changes in a document + * + * This is different to {@link save} because the output is an array of changes + * which can be individually applied via {@link applyChanges}` + * + */ +export function getAllChanges(doc: Doc): Change[] { + const state = _state(doc) + return state.handle.getChanges([]) +} + +/** + * Apply changes received from another document + * + * `doc` will be updated to reflect the `changes`. If there are changes which + * we do not have dependencies for yet those will be stored in the document and + * applied when the depended on changes arrive. + * + * You can use the {@link ApplyOptions} to pass a patchcallback which will be + * informed of any changes which occur as a result of applying the changes + * + */ +export function applyChanges(doc: Doc, changes: Change[], opts?: ApplyOptions): [Doc] { + const state = _state(doc) + if (!opts) {opts = {}} + if (state.heads) { + throw new RangeError("Attempting to use an outdated Automerge document") + } + if (_readonly(doc) === false) { + throw new RangeError("Calls to Automerge.change cannot be nested") + } + const heads = state.handle.getHeads(); + state.handle.applyChanges(changes) + state.heads = heads; + return [progressDocument(doc, heads, opts.patchCallback || state.patchCallback)] +} + +/** @hidden */ +export function getHistory(doc: Doc): State[] { + const history = getAllChanges(doc) + return history.map((change, index) => ({ + get change() { + return decodeChange(change) + }, + get snapshot() { + const [state] = applyChanges(init(), history.slice(0, index + 1)) + return state + } }) - ) + ) } +/** @hidden */ // FIXME : no tests // FIXME can we just use deep equals now? -export function equals(val1: unknown, val2: unknown) : boolean { - if (!isObject(val1) || !isObject(val2)) return val1 === val2 - const keys1 = Object.keys(val1).sort(), keys2 = Object.keys(val2).sort() - if (keys1.length !== keys2.length) return false - for (let i = 0; i < keys1.length; i++) { - if (keys1[i] !== keys2[i]) return false - if (!equals(val1[keys1[i]], val2[keys2[i]])) return false - } - return true +export function equals(val1: unknown, val2: unknown): boolean { + if (!isObject(val1) || !isObject(val2)) return val1 === val2 + const keys1 = Object.keys(val1).sort(), keys2 = Object.keys(val2).sort() + if (keys1.length !== keys2.length) return false + for (let i = 0; i < keys1.length; i++) { + if (keys1[i] !== keys2[i]) return false + if (!equals(val1[keys1[i]], val2[keys2[i]])) return false + } + return true } -export function encodeSyncState(state: SyncState) : Uint8Array { - const sync = ApiHandler.importSyncState(state) - const result = ApiHandler.encodeSyncState(sync) - sync.free() - return result +/** + * encode a {@link SyncState} into binary to send over the network + * + * @group sync + * */ +export function encodeSyncState(state: SyncState): Uint8Array { + const sync = ApiHandler.importSyncState(state) + const result = ApiHandler.encodeSyncState(sync) + sync.free() + return result } -export function decodeSyncState(state: Uint8Array) : SyncState { - let sync = ApiHandler.decodeSyncState(state) - let result = ApiHandler.exportSyncState(sync) - sync.free() - return result +/** + * Decode some binary data into a {@link SyncState} + * + * @group sync + */ +export function decodeSyncState(state: Uint8Array): SyncState { + let sync = ApiHandler.decodeSyncState(state) + let result = ApiHandler.exportSyncState(sync) + sync.free() + return result } -export function generateSyncMessage(doc: Doc, inState: SyncState) : [ SyncState, SyncMessage | null ] { - const state = _state(doc) - const syncState = ApiHandler.importSyncState(inState) - const message = state.handle.generateSyncMessage(syncState) - const outState = ApiHandler.exportSyncState(syncState) - return [ outState, message ] +/** + * Generate a sync message to send to the peer represented by `inState` + * @param doc - The doc to generate messages about + * @param inState - The {@link SyncState} representing the peer we are talking to + * + * @group sync + * + * @returns An array of `[newSyncState, syncMessage | null]` where + * `newSyncState` should replace `inState` and `syncMessage` should be sent to + * the peer if it is not null. If `syncMessage` is null then we are up to date. + */ +export function generateSyncMessage(doc: Doc, inState: SyncState): [SyncState, SyncMessage | null] { + const state = _state(doc) + const syncState = ApiHandler.importSyncState(inState) + const message = state.handle.generateSyncMessage(syncState) + const outState = ApiHandler.exportSyncState(syncState) + return [outState, message] } -export function receiveSyncMessage(doc: Doc, inState: SyncState, message: SyncMessage, opts?: ApplyOptions) : [ Doc, SyncState, null ] { - const syncState = ApiHandler.importSyncState(inState) - if (!opts) { opts = {} } - const state = _state(doc) - if (state.heads) { - throw new RangeError("Attempting to change an out of date document - set at: " + _trace(doc)); - } - if (_readonly(doc) === false) { - throw new RangeError("Calls to Automerge.change cannot be nested") - } - const heads = state.handle.getHeads() - state.handle.receiveSyncMessage(syncState, message) - const outSyncState = ApiHandler.exportSyncState(syncState) - return [progressDocument(doc, heads, opts.patchCallback || state.patchCallback), outSyncState, null]; +/** + * Update a document and our sync state on receiving a sync message + * + * @group sync + * + * @param doc - The doc the sync message is about + * @param inState - The {@link SyncState} for the peer we are communicating with + * @param message - The message which was received + * @param opts - Any {@link ApplyOption}s, used for passing a + * {@link PatchCallback} which will be informed of any changes + * in `doc` which occur because of the received sync message. + * + * @returns An array of `[newDoc, newSyncState, syncMessage | null]` where + * `newDoc` is the updated state of `doc`, `newSyncState` should replace + * `inState` and `syncMessage` should be sent to the peer if it is not null. If + * `syncMessage` is null then we are up to date. + */ +export function receiveSyncMessage(doc: Doc, inState: SyncState, message: SyncMessage, opts?: ApplyOptions): [Doc, SyncState, null] { + const syncState = ApiHandler.importSyncState(inState) + if (!opts) {opts = {}} + const state = _state(doc) + if (state.heads) { + throw new RangeError("Attempting to change an out of date document - set at: " + _trace(doc)); + } + if (_readonly(doc) === false) { + throw new RangeError("Calls to Automerge.change cannot be nested") + } + const heads = state.handle.getHeads() + state.handle.receiveSyncMessage(syncState, message) + const outSyncState = ApiHandler.exportSyncState(syncState) + return [progressDocument(doc, heads, opts.patchCallback || state.patchCallback), outSyncState, null]; } -export function initSyncState() : SyncState { - return ApiHandler.exportSyncState(ApiHandler.initSyncState()) +/** + * Create a new, blank {@link SyncState} + * + * When communicating with a peer for the first time use this to generate a new + * {@link SyncState} for them + * + * @group sync + */ +export function initSyncState(): SyncState { + return ApiHandler.exportSyncState(ApiHandler.initSyncState()) } -export function encodeChange(change: DecodedChange) : Change { - return ApiHandler.encodeChange(change) +/** @hidden */ +export function encodeChange(change: DecodedChange): Change { + return ApiHandler.encodeChange(change) } -export function decodeChange(data: Change) : DecodedChange { - return ApiHandler.decodeChange(data) +/** @hidden */ +export function decodeChange(data: Change): DecodedChange { + return ApiHandler.decodeChange(data) } -export function encodeSyncMessage(message: DecodedSyncMessage) : SyncMessage { - return ApiHandler.encodeSyncMessage(message) +/** @hidden */ +export function encodeSyncMessage(message: DecodedSyncMessage): SyncMessage { + return ApiHandler.encodeSyncMessage(message) } -export function decodeSyncMessage(message: SyncMessage) : DecodedSyncMessage { - return ApiHandler.decodeSyncMessage(message) +/** @hidden */ +export function decodeSyncMessage(message: SyncMessage): DecodedSyncMessage { + return ApiHandler.decodeSyncMessage(message) } -export function getMissingDeps(doc: Doc, heads: Heads) : Heads { - const state = _state(doc) - return state.handle.getMissingDeps(heads) +/** + * Get any changes in `doc` which are not dependencies of `heads` + */ +export function getMissingDeps(doc: Doc, heads: Heads): Heads { + const state = _state(doc) + return state.handle.getMissingDeps(heads) } -export function getHeads(doc: Doc) : Heads { - const state = _state(doc) - return state.heads || state.handle.getHeads() +/** + * Get the hashes of the heads of this document + */ +export function getHeads(doc: Doc): Heads { + const state = _state(doc) + return state.heads || state.handle.getHeads() } +/** @hidden */ export function dump(doc: Doc) { - const state = _state(doc) - state.handle.dump() + const state = _state(doc) + state.handle.dump() } +/** @hidden */ // FIXME - return T? -export function toJS(doc: Doc) : MaterializeValue { - const state = _state(doc) - // @ts-ignore - return state.handle.materialize("_root", state.heads, state) +export function toJS(doc: Doc): MaterializeValue { + const state = _state(doc) + // @ts-ignore + return state.handle.materialize("_root", state.heads, state) } -function isObject(obj: unknown) : obj is Record { - return typeof obj === 'object' && obj !== null +function isObject(obj: unknown): obj is Record { + return typeof obj === 'object' && obj !== null } -export type { API, SyncState, ActorId, Conflicts, Prop, Change, ObjID, DecodedChange, DecodedSyncMessage, Heads, MaterializeValue } +export type {API, SyncState, ActorId, Conflicts, Prop, Change, ObjID, DecodedChange, DecodedSyncMessage, Heads, MaterializeValue} diff --git a/javascript/src/types.ts b/javascript/src/types.ts index e75a3854..764d328c 100644 --- a/javascript/src/types.ts +++ b/javascript/src/types.ts @@ -1,11 +1,12 @@ +import { Text } from "./text" export { Text } from "./text" export { Counter } from "./counter" export { Int, Uint, Float64 } from "./numbers" import { Counter } from "./counter" -export type AutomergeValue = ScalarValue | { [key: string]: AutomergeValue } | Array +export type AutomergeValue = ScalarValue | { [key: string]: AutomergeValue } | Array | Text export type MapValue = { [key: string]: AutomergeValue } export type ListValue = Array export type TextValue = Array diff --git a/javascript/typedoc-readme.md b/javascript/typedoc-readme.md new file mode 100644 index 00000000..ba802912 --- /dev/null +++ b/javascript/typedoc-readme.md @@ -0,0 +1,165 @@ +# Automerge + +This library provides the core automerge data structure and sync algorithms. +Other libraries can be built on top of this one which provide IO and +persistence. + +An automerge document can be though of an immutable POJO (plain old javascript +object) which `automerge` tracks the history of, allowing it to be merged with +any other automerge document. + +## Creating and modifying a document + +You can create a document with {@link init} or {@link from} and then make +changes to it with {@link change}, you can merge two documents with {@link +merge}. + +```javascript +import * as automerge from "@automerge/automerge" + +type DocType = {ideas: Array} + +let doc1 = automerge.init() +doc1 = automerge.change(doc1, d => { + d.ideas = [new automerge.Text("an immutable document")] +}) + +let doc2 = automerge.init() +doc2 = automerge.merge(doc2, automerge.clone(doc1)) +doc2 = automerge.change(doc2, d => { + d.ideas.push(new automerge.Text("which records it's history")) +}) + +// Note the `automerge.clone` call, see the "cloning" section of this readme for +// more detail +doc1 = automerge.merge(doc1, automerge.clone(doc2)) +doc1 = automerge.change(doc1, d => { + d.ideas[0].deleteAt(13, 8) + d.ideas[0].insertAt(13, "object") +}) + +let doc3 = automerge.merge(doc1, doc2) +// doc3 is now {ideas: ["an immutable object", "which records it's history"]} +``` + +## Applying changes from another document + +You can get a representation of the result of the last {@link change} you made +to a document with {@link getLastLocalChange} and you can apply that change to +another document using {@link applyChanges}. + +If you need to get just the changes which are in one document but not in another +you can use {@link getHeads} to get the heads of the document without the +changes and then {@link getMissingDeps}, passing the result of {@link getHeads} +on the document with the changes. + +## Saving and loading documents + +You can {@link save} a document to generate a compresed binary representation of +the document which can be loaded with {@link load}. If you have a document which +you have recently made changes to you can generate recent changes with {@link +saveIncremental}, this will generate all the changes since you last called +`saveIncremental`, the changes generated can be applied to another document with +{@link loadIncremental}. + +## Syncing + +The sync protocol is stateful. This means that we start by creating a {@link +SyncState} for each peer we are communicating with using {@link initSyncState}. +Then we generate a message to send to the peer by calling {@link +generateSyncMessage}. When we receive a message from the peer we call {@link +receiveSyncMessage}. Here's a simple example of a loop which just keeps two +peers in sync. + + +```javascript +let sync1 = automerge.initSyncState() +let msg: Uint8Array | null +[sync1, msg] = automerge.generateSyncMessage(doc1, sync1) + +while (true) { + if (msg != null) { + network.send(msg) + } + let resp: Uint8Array = network.receive() + [doc1, sync1, _ignore] = automerge.receiveSyncMessage(doc1, sync1, resp) + [sync1, msg] = automerge.generateSyncMessage(doc1, sync1) +} +``` + + +## Conflicts + +The only time conflicts occur in automerge documents is in concurrent +assignments to the same key in an object. In this case automerge +deterministically chooses an arbitrary value to present to the application but +you can examine the conflicts using {@link getConflicts}. + +``` +import * as automerge from "@automerge/automerge" + +type Profile = { + pets: Array<{name: string, type: string}> +} + +let doc1 = automerge.init("aaaa") +doc1 = automerge.change(doc1, d => { + d.pets = [{name: "Lassie", type: "dog"}] +}) +let doc2 = automerge.init("bbbb") +doc2 = automerge.merge(doc2, automerge.clone(doc1)) + +doc2 = automerge.change(doc2, d => { + d.pets[0].name = "Beethoven" +}) + +doc1 = automerge.change(doc1, d => { + d.pets[0].name = "Babe" +}) + +const doc3 = automerge.merge(doc1, doc2) + +// Note that here we pass `doc3.pets`, not `doc3` +let conflicts = automerge.getConflicts(doc3.pets[0], "name") + +// The two conflicting values are the keys of the conflicts object +assert.deepEqual(Object.values(conflicts), ["Babe", Beethoven"]) +``` + +## Actor IDs + +By default automerge will generate a random actor ID for you, but most methods +for creating a document allow you to set the actor ID. You can get the actor ID +associated with the document by calling {@link getActorId}. Actor IDs must not +be used in concurrent threads of executiong - all changes by a given actor ID +are expected to be sequential. + + +## Listening to patches + +Sometimes you want to respond to changes made to an automerge document. In this +case you can use the {@link PatchCallback} type to receive notifications when +changes have been made. + +## Cloning + +Currently you cannot make mutating changes (i.e. call {@link change}) to a +document which you have two pointers to. For example, in this code: + +```javascript +let doc1 = automerge.init() +let doc2 = automerge.change(doc1, d => d.key = "value") +``` + +`doc1` and `doc2` are both pointers to the same state. Any attempt to call +mutating methods on `doc1` will now result in an error like + + Attempting to change an out of date document + +If you encounter this you need to clone the original document, the above sample +would work as: + +```javascript +let doc1 = automerge.init() +let doc2 = automerge.change(automerge.clone(doc1), d => d.key = "value") +``` From 1c6da6f9a391b53a4fe392f0f7d69bd44f586626 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Mon, 17 Oct 2022 01:09:13 +0100 Subject: [PATCH 608/730] Add JS worker config to Vite app example Vite apps which use SharedWorker of WebWorker require additional configuration to get WebAssembly imports to work effectively, add these to the example. --- javascript/examples/vite/README.md | 7 +++++++ javascript/examples/vite/vite.config.js | 7 +++++++ 2 files changed, 14 insertions(+) diff --git a/javascript/examples/vite/README.md b/javascript/examples/vite/README.md index a54195c7..efe44479 100644 --- a/javascript/examples/vite/README.md +++ b/javascript/examples/vite/README.md @@ -21,6 +21,13 @@ import topLevelAwait from "vite-plugin-top-level-await" export default defineConfig({ plugins: [topLevelAwait(), wasm()], + + // This is only necessary if you are using `SharedWorker` or `WebWorker`, as + // documented in https://vitejs.dev/guide/features.html#import-with-constructors + worker: { + format: "es", + plugins: [topLevelAwait(), wasm()] + }, optimizeDeps: { // This is necessary because otherwise `vite dev` includes two separate diff --git a/javascript/examples/vite/vite.config.js b/javascript/examples/vite/vite.config.js index 2076b3ff..9716d674 100644 --- a/javascript/examples/vite/vite.config.js +++ b/javascript/examples/vite/vite.config.js @@ -5,6 +5,13 @@ import topLevelAwait from "vite-plugin-top-level-await" export default defineConfig({ plugins: [topLevelAwait(), wasm()], + // This is only necessary if you are using `SharedWorker` or `WebWorker`, as + // documented in https://vitejs.dev/guide/features.html#import-with-constructors + worker: { + format: "es", + plugins: [topLevelAwait(), wasm()] + }, + optimizeDeps: { // This is necessary because otherwise `vite dev` includes two separate // versions of the JS wrapper. This causes problems because the JS From c602e9e7ed1f7b739105194147d67c0b5d667e86 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Mon, 17 Oct 2022 16:20:25 -0500 Subject: [PATCH 609/730] update build to match directory restructuring --- .gitignore | 1 - rust/automerge-wasm/package.json | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/.gitignore b/.gitignore index f859e0a3..baad0a63 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,3 @@ -/target /.direnv perf.* /Cargo.lock diff --git a/rust/automerge-wasm/package.json b/rust/automerge-wasm/package.json index 7363bcde..bc1538a9 100644 --- a/rust/automerge-wasm/package.json +++ b/rust/automerge-wasm/package.json @@ -33,7 +33,7 @@ "buildall": "cross-env TARGET=nodejs yarn target && cross-env TARGET=bundler yarn target", "target": "rimraf ./$TARGET && yarn compile && yarn bindgen && yarn opt", "compile": "cargo build --target wasm32-unknown-unknown --profile $PROFILE", - "bindgen": "wasm-bindgen --no-typescript --weak-refs --target $TARGET --out-dir $TARGET ../../target/wasm32-unknown-unknown/$TARGET_DIR/automerge_wasm.wasm", + "bindgen": "wasm-bindgen --no-typescript --weak-refs --target $TARGET --out-dir $TARGET ../target/wasm32-unknown-unknown/$TARGET_DIR/automerge_wasm.wasm", "opt": "wasm-opt -Oz $TARGET/automerge_wasm_bg.wasm -o $TARGET/automerge_wasm_bg.wasm", "test": "ts-mocha -p tsconfig.json --type-check --bail --full-trace test/*.ts" }, From 38205fbcc2e917183f6ff1f593c2d14f4b2e097c Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Mon, 17 Oct 2022 17:04:30 -0500 Subject: [PATCH 610/730] enableFreeze() instead of implicit freeze --- javascript/src/index.ts | 26 ++++++++--------- javascript/test/basic_test.ts | 5 +++- rust/automerge-wasm/index.d.ts | 6 ++-- rust/automerge-wasm/src/interop.rs | 43 ++++++++++------------------- rust/automerge-wasm/src/lib.rs | 41 +++++++++++++++++---------- rust/automerge-wasm/src/observer.rs | 4 ++- 6 files changed, 65 insertions(+), 60 deletions(-) diff --git a/javascript/src/index.ts b/javascript/src/index.ts index 2e2ad436..3dcf2cc4 100644 --- a/javascript/src/index.ts +++ b/javascript/src/index.ts @@ -156,13 +156,10 @@ export function init(_opts?: ActorId | InitOptions): Doc { let patchCallback = opts.patchCallback const handle = ApiHandler.create(opts.actor) handle.enablePatches(true) - //@ts-ignore + handle.enableFreeze(!!opts.freeze) handle.registerDatatype("counter", (n) => new Counter(n)) - //@ts-ignore handle.registerDatatype("text", (n) => new Text(n)) - //@ts-ignore - const doc = handle.materialize("/", undefined, {handle, heads: undefined, freeze, patchCallback}) - //@ts-ignore + const doc = handle.materialize("/", undefined, {handle, heads: undefined, freeze, patchCallback}) as Doc return doc } @@ -172,7 +169,6 @@ export function init(_opts?: ActorId | InitOptions): Doc { export function clone(doc: Doc): Doc { const state = _state(doc) const handle = state.heads ? state.handle.forkAt(state.heads) : state.handle.fork() - //@ts-ignore const clonedDoc: any = handle.materialize("/", undefined, {...state, handle}) return clonedDoc @@ -367,12 +363,10 @@ export function load(data: Uint8Array, _opts?: ActorId | InitOptions): Doc const patchCallback = opts.patchCallback const handle = ApiHandler.load(data, actor) handle.enablePatches(true) - //@ts-ignore + handle.enableFreeze(!!opts.freeze) handle.registerDatatype("counter", (n) => new Counter(n)) - //@ts-ignore handle.registerDatatype("text", (n) => new Text(n)) - //@ts-ignore - const doc: any = handle.materialize("/", undefined, {handle, heads: undefined, patchCallback}) + const doc: any = handle.materialize("/", undefined, {handle, heads: undefined, patchCallback}) as Doc return doc } @@ -793,13 +787,17 @@ export function dump(doc: Doc) { } /** @hidden */ -// FIXME - return T? -export function toJS(doc: Doc): MaterializeValue { +export function toJS(doc: Doc): T { const state = _state(doc) - // @ts-ignore - return state.handle.materialize("_root", state.heads, state) + const enabled = state.handle.enableFreeze(false) + const result = state.handle.materialize() + state.handle.enableFreeze(enabled) + return result as T } +export function isAutomerge(doc: unknown): boolean { + return getObjectId(doc) === "_root" && !!Reflect.get(doc as Object, STATE) +} function isObject(obj: unknown): obj is Record { return typeof obj === 'object' && obj !== null diff --git a/javascript/test/basic_test.ts b/javascript/test/basic_test.ts index e17fc45e..130fc6ec 100644 --- a/javascript/test/basic_test.ts +++ b/javascript/test/basic_test.ts @@ -22,12 +22,15 @@ describe('Automerge', () => { }) it('can detect an automerge doc with isAutomerge()', () => { - let doc1 = Automerge.from({ sub: { object: true } }) + const doc1 = Automerge.from({ sub: { object: true } }) assert(Automerge.isAutomerge(doc1)) assert(!Automerge.isAutomerge(doc1.sub)) assert(!Automerge.isAutomerge("String")) assert(!Automerge.isAutomerge({ sub: { object: true }})) assert(!Automerge.isAutomerge(undefined)) + const jsObj = Automerge.toJS(doc1) + assert(!Automerge.isAutomerge(jsObj)) + assert.deepEqual(jsObj, doc1) }) it('it should recursively freeze the document if requested', () => { diff --git a/rust/automerge-wasm/index.d.ts b/rust/automerge-wasm/index.d.ts index 4339f2b8..e6dbd6c8 100644 --- a/rust/automerge-wasm/index.d.ts +++ b/rust/automerge-wasm/index.d.ts @@ -164,7 +164,8 @@ export class Automerge { keys(obj: ObjID, heads?: Heads): string[]; text(obj: ObjID, heads?: Heads): string; length(obj: ObjID, heads?: Heads): number; - materialize(obj?: ObjID, heads?: Heads, metadata?: unknown, freeze?: bool): MaterializeValue; + materialize(obj?: ObjID, heads?: Heads, metadata?: unknown): MaterializeValue; + toJS(): MaterializeValue; // transactions commit(message?: string, time?: number): Hash; @@ -174,7 +175,8 @@ export class Automerge { rollback(): number; // patches - enablePatches(enable: boolean): void; + enablePatches(enable: boolean): boolean; + enableFreeze(enable: boolean): boolean; registerDatatype(datatype: string, callback: Function): void; popPatches(): Patch[]; diff --git a/rust/automerge-wasm/src/interop.rs b/rust/automerge-wasm/src/interop.rs index c2b8c6b7..ed76f3a7 100644 --- a/rust/automerge-wasm/src/interop.rs +++ b/rust/automerge-wasm/src/interop.rs @@ -370,23 +370,20 @@ impl Automerge { datatype: Datatype, heads: Option<&Vec>, meta: &JsValue, - freeze: bool, ) -> Result { let result = if datatype.is_sequence() { self.wrap_object( - self.export_list(obj, heads, meta, freeze)?, + self.export_list(obj, heads, meta)?, datatype, &obj.to_string().into(), meta, - freeze, )? } else { self.wrap_object( - self.export_map(obj, heads, meta, freeze)?, + self.export_map(obj, heads, meta)?, datatype, &obj.to_string().into(), meta, - freeze, )? }; Ok(result.into()) @@ -397,7 +394,6 @@ impl Automerge { obj: &ObjId, heads: Option<&Vec>, meta: &JsValue, - freeze: bool, ) -> Result { let keys = self.doc.keys(obj); let map = Object::new(); @@ -409,7 +405,7 @@ impl Automerge { }; if let Ok(Some((val, id))) = val_and_id { let subval = match val { - Value::Object(o) => self.export_object(&id, o.into(), heads, meta, freeze)?, + Value::Object(o) => self.export_object(&id, o.into(), heads, meta)?, Value::Scalar(_) => self.export_value(alloc(&val))?, }; Reflect::set(&map, &k.into(), &subval)?; @@ -424,7 +420,6 @@ impl Automerge { obj: &ObjId, heads: Option<&Vec>, meta: &JsValue, - freeze: bool, ) -> Result { let len = self.doc.length(obj); let array = Array::new(); @@ -436,7 +431,7 @@ impl Automerge { }; if let Ok(Some((val, id))) = val_and_id { let subval = match val { - Value::Object(o) => self.export_object(&id, o.into(), heads, meta, freeze)?, + Value::Object(o) => self.export_object(&id, o.into(), heads, meta)?, Value::Scalar(_) => self.export_value(alloc(&val))?, }; array.push(&subval); @@ -509,10 +504,9 @@ impl Automerge { (datatype, raw_value): (Datatype, JsValue), id: &ObjId, meta: &JsValue, - freeze: bool, ) -> Result { if let Ok(obj) = raw_value.clone().dyn_into::() { - let result = self.wrap_object(obj, datatype, &id.to_string().into(), meta, freeze)?; + let result = self.wrap_object(obj, datatype, &id.to_string().into(), meta)?; Ok(result.into()) } else { self.export_value((datatype, raw_value)) @@ -525,7 +519,6 @@ impl Automerge { datatype: Datatype, id: &JsValue, meta: &JsValue, - freeze: bool, ) -> Result { let value = if let Some(function) = self.external_types.get(&datatype) { let wrapped_value = function.call1(&JsValue::undefined(), &value)?; @@ -545,7 +538,7 @@ impl Automerge { } set_hidden_value(&value, &Symbol::for_(DATATYPE_SYMBOL), datatype)?; set_hidden_value(&value, &Symbol::for_(META_SYMBOL), meta)?; - if freeze { + if self.freeze { Object::freeze(&value); } Ok(value) @@ -556,19 +549,16 @@ impl Automerge { array: &Object, patch: &Patch, meta: &JsValue, - freeze: bool, ) -> Result { let result = Array::from(array); // shallow copy match patch { Patch::PutSeq { index, value, .. } => { - let sub_val = self.maybe_wrap_object(alloc(&value.0), &value.1, meta, freeze)?; + let sub_val = self.maybe_wrap_object(alloc(&value.0), &value.1, meta)?; Reflect::set(&result, &(*index as f64).into(), &sub_val)?; Ok(result.into()) } - Patch::DeleteSeq { index, .. } => self.sub_splice(result, *index, 1, &[], meta, freeze), - Patch::Insert { index, values, .. } => { - self.sub_splice(result, *index, 0, values, meta, freeze) - } + Patch::DeleteSeq { index, .. } => self.sub_splice(result, *index, 1, &[], meta), + Patch::Insert { index, values, .. } => self.sub_splice(result, *index, 0, values, meta), Patch::Increment { prop, value, .. } => { if let Prop::Seq(index) = prop { let index = (*index as f64).into(); @@ -596,12 +586,11 @@ impl Automerge { map: &Object, patch: &Patch, meta: &JsValue, - freeze: bool, ) -> Result { let result = Object::assign(&Object::new(), map); // shallow copy match patch { Patch::PutMap { key, value, .. } => { - let sub_val = self.maybe_wrap_object(alloc(&value.0), &value.1, meta, freeze)?; + let sub_val = self.maybe_wrap_object(alloc(&value.0), &value.1, meta)?; Reflect::set(&result, &key.into(), &sub_val)?; Ok(result) } @@ -638,13 +627,12 @@ impl Automerge { patch: &Patch, depth: usize, meta: &JsValue, - freeze: bool, ) -> Result { let (inner, datatype, id) = self.unwrap_object(&obj)?; let prop = patch.path().get(depth).map(|p| prop_to_js(&p.1)); let result = if let Some(prop) = prop { if let Ok(sub_obj) = Reflect::get(&inner, &prop)?.dyn_into::() { - let new_value = self.apply_patch(sub_obj, patch, depth + 1, meta, freeze)?; + let new_value = self.apply_patch(sub_obj, patch, depth + 1, meta)?; let result = shallow_copy(&inner); Reflect::set(&result, &prop, &new_value)?; Ok(result) @@ -654,12 +642,12 @@ impl Automerge { return Ok(obj); } } else if Array::is_array(&inner) { - self.apply_patch_to_array(&inner, patch, meta, freeze) + self.apply_patch_to_array(&inner, patch, meta) } else { - self.apply_patch_to_map(&inner, patch, meta, freeze) + self.apply_patch_to_map(&inner, patch, meta) }?; - self.wrap_object(result, datatype, &id, meta, freeze) + self.wrap_object(result, datatype, &id, meta) } fn sub_splice( @@ -669,11 +657,10 @@ impl Automerge { num_del: usize, values: &[(Value<'_>, ObjId)], meta: &JsValue, - freeze: bool, ) -> Result { let args: Array = values .iter() - .map(|v| self.maybe_wrap_object(alloc(&v.0), &v.1, meta, freeze)) + .map(|v| self.maybe_wrap_object(alloc(&v.0), &v.1, meta)) .collect::>()?; args.unshift(&(num_del as u32).into()); args.unshift(&(index as u32).into()); diff --git a/rust/automerge-wasm/src/lib.rs b/rust/automerge-wasm/src/lib.rs index 6d65349b..c08486a8 100644 --- a/rust/automerge-wasm/src/lib.rs +++ b/rust/automerge-wasm/src/lib.rs @@ -65,6 +65,7 @@ static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT; #[derive(Debug)] pub struct Automerge { doc: AutoCommit, + freeze: bool, external_types: HashMap, } @@ -78,6 +79,7 @@ impl Automerge { } Ok(Automerge { doc, + freeze: false, external_types: HashMap::default(), }) } @@ -86,6 +88,7 @@ impl Automerge { pub fn clone(&mut self, actor: Option) -> Result { let mut automerge = Automerge { doc: self.doc.clone(), + freeze: self.freeze, external_types: self.external_types.clone(), }; if let Some(s) = actor { @@ -98,6 +101,7 @@ impl Automerge { pub fn fork(&mut self, actor: Option) -> Result { let mut automerge = Automerge { doc: self.doc.fork(), + freeze: self.freeze, external_types: self.external_types.clone(), }; if let Some(s) = actor { @@ -112,6 +116,7 @@ impl Automerge { let deps: Vec<_> = JS(heads).try_into()?; let mut automerge = Automerge { doc: self.doc.fork_at(&deps)?, + freeze: self.freeze, external_types: self.external_types.clone(), }; if let Some(s) = actor { @@ -428,13 +433,23 @@ impl Automerge { Ok(result) } - #[wasm_bindgen(js_name = enablePatches)] - pub fn enable_patches(&mut self, enable: JsValue) -> Result<(), JsValue> { + #[wasm_bindgen(js_name = enableFreeze)] + pub fn enable_freeze(&mut self, enable: JsValue) -> Result { let enable = enable .as_bool() - .ok_or_else(|| to_js_err("must pass a bool to enable_patches"))?; - self.doc.observer().enable(enable); - Ok(()) + .ok_or_else(|| to_js_err("must pass a bool to enableFreeze"))?; + let old_freeze = self.freeze; + self.freeze = enable; + Ok(old_freeze.into()) + } + + #[wasm_bindgen(js_name = enablePatches)] + pub fn enable_patches(&mut self, enable: JsValue) -> Result { + let enable = enable + .as_bool() + .ok_or_else(|| to_js_err("must pass a bool to enablePatches"))?; + let old_enabled = self.doc.observer().enable(enable); + Ok(old_enabled.into()) } #[wasm_bindgen(js_name = registerDatatype)] @@ -462,23 +477,22 @@ impl Automerge { let mut object = object.dyn_into::()?; let patches = self.doc.observer().take_patches(); let callback = callback.dyn_into::().ok(); - let freeze = Object::is_frozen(&object); // even if there are no patches we may need to update the meta object // which requires that we update the object too if patches.is_empty() && !meta.is_undefined() { let (obj, datatype, id) = self.unwrap_object(&object)?; object = Object::assign(&Object::new(), &obj); - object = self.wrap_object(object, datatype, &id, &meta, freeze)?; + object = self.wrap_object(object, datatype, &id, &meta)?; } for p in patches { if let Some(c) = &callback { let before = object.clone(); - object = self.apply_patch(object, &p, 0, &meta, freeze)?; + object = self.apply_patch(object, &p, 0, &meta)?; c.call3(&JsValue::undefined(), &p.try_into()?, &before, &object)?; } else { - object = self.apply_patch(object, &p, 0, &meta, freeze)?; + object = self.apply_patch(object, &p, 0, &meta)?; } } @@ -635,8 +649,8 @@ impl Automerge { } #[wasm_bindgen(js_name = toJS)] - pub fn to_js(&self, meta: JsValue) -> Result { - self.export_object(&ROOT, Datatype::Map, None, &meta, false) + pub fn to_js(&mut self, meta: JsValue) -> Result { + self.export_object(&ROOT, Datatype::Map, None, &meta) } pub fn materialize( @@ -644,17 +658,15 @@ impl Automerge { obj: JsValue, heads: Option, meta: JsValue, - freeze: JsValue, ) -> Result { let obj = self.import(obj).unwrap_or(ROOT); let heads = get_heads(heads); - let freeze = freeze.as_bool().unwrap_or(false); let obj_type = self .doc .object_type(&obj) .ok_or_else(|| to_js_err(format!("invalid obj {}", obj)))?; let _patches = self.doc.observer().take_patches(); // throw away patches - self.export_object(&obj, obj_type.into(), heads.as_ref(), &meta, freeze) + self.export_object(&obj, obj_type.into(), heads.as_ref(), &meta) } fn import(&self, id: JsValue) -> Result { @@ -791,6 +803,7 @@ pub fn load(data: Uint8Array, actor: Option) -> Result Vec { std::mem::take(&mut self.patches) } - pub(crate) fn enable(&mut self, enable: bool) { + pub(crate) fn enable(&mut self, enable: bool) -> bool { if self.enabled && !enable { self.patches.truncate(0) } + let old_enabled = self.enabled; self.enabled = enable; + old_enabled } } From e8309495cef64a51b42ca658d0f5c621015cf1bd Mon Sep 17 00:00:00 2001 From: Alex Good Date: Tue, 18 Oct 2022 11:28:56 +0100 Subject: [PATCH 611/730] Update `cargo deny` to point at `rust` subdirectory --- .github/workflows/ci.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 0140bd6b..2e699f0e 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -67,6 +67,7 @@ jobs: - uses: actions/checkout@v2 - uses: EmbarkStudios/cargo-deny-action@v1 with: + arguments: '--manifest-path ./rust/Cargo.toml' command: check ${{ matrix.checks }} wasm_tests: From 6bb611e4b3d0279da0a6f3eef85b3ed92c87efae Mon Sep 17 00:00:00 2001 From: Alex Good Date: Tue, 18 Oct 2022 11:49:46 +0100 Subject: [PATCH 612/730] Update CI to rust 1.64.0 --- .github/workflows/ci.yaml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 2e699f0e..e3e5a141 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -14,7 +14,7 @@ jobs: - uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: 1.60.0 + toolchain: 1.64.0 default: true components: rustfmt - uses: Swatinem/rust-cache@v1 @@ -28,7 +28,7 @@ jobs: - uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: 1.60.0 + toolchain: 1.64.0 default: true components: clippy - uses: Swatinem/rust-cache@v1 @@ -42,7 +42,7 @@ jobs: - uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: 1.60.0 + toolchain: 1.64.0 default: true - uses: Swatinem/rust-cache@v1 - name: Build rust docs @@ -99,7 +99,7 @@ jobs: - uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: 1.60.0 + toolchain: 1.64.0 default: true - uses: Swatinem/rust-cache@v1 - name: Install CMocka @@ -138,7 +138,7 @@ jobs: - uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: 1.60.0 + toolchain: 1.64.0 default: true - uses: Swatinem/rust-cache@v1 - run: ./scripts/ci/build-test @@ -151,7 +151,7 @@ jobs: - uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: 1.60.0 + toolchain: 1.64.0 default: true - uses: Swatinem/rust-cache@v1 - run: ./scripts/ci/build-test From 20adff00710f335c3a7841ddf7639f268ee9e76b Mon Sep 17 00:00:00 2001 From: Alex Good Date: Tue, 18 Oct 2022 11:56:37 +0100 Subject: [PATCH 613/730] Fix cmake CI The cmake CI seemed to reference a few nonexistent targets for docs and tests. Remove the doc generation step and point the test CI script at the generated test program. --- .github/workflows/ci.yaml | 3 --- scripts/ci/cmake-build | 2 +- scripts/ci/cmake-docs | 10 ---------- scripts/ci/run | 1 - 4 files changed, 1 insertion(+), 15 deletions(-) delete mode 100755 scripts/ci/cmake-docs diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index e3e5a141..edc5680b 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -51,9 +51,6 @@ jobs: - name: Install doxygen run: sudo apt-get install -y doxygen shell: bash - - name: Build C docs - run: ./scripts/ci/cmake-docs - shell: bash cargo-deny: runs-on: ubuntu-latest diff --git a/scripts/ci/cmake-build b/scripts/ci/cmake-build index e36513a2..6fba5418 100755 --- a/scripts/ci/cmake-build +++ b/scripts/ci/cmake-build @@ -15,4 +15,4 @@ C_PROJECT=$THIS_SCRIPT/../../rust/automerge-c; mkdir -p $C_PROJECT/build; cd $C_PROJECT/build; cmake --log-level=ERROR -B . -S .. -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DBUILD_SHARED_LIBS=$SHARED_TOGGLE; -cmake --build . --target test_automerge; +./test/test_automerge diff --git a/scripts/ci/cmake-docs b/scripts/ci/cmake-docs deleted file mode 100755 index f1dc1929..00000000 --- a/scripts/ci/cmake-docs +++ /dev/null @@ -1,10 +0,0 @@ -#!/usr/bin/env bash - -set -eoux pipefail - -mkdir -p crates/automerge-c/build -cd rust/automerge-c/build -cmake -B . -S .. -DBUILD_TESTING=OFF -cmake --build . --target automerge_docs - -echo "Try opening crates/automerge-c/build/src/html/index.html" diff --git a/scripts/ci/run b/scripts/ci/run index 423b995c..926e60d7 100755 --- a/scripts/ci/run +++ b/scripts/ci/run @@ -9,4 +9,3 @@ set -eou pipefail ./scripts/ci/wasm_tests ./scripts/ci/js_tests ./scripts/ci/cmake-build Release static -./scripts/ci/cmake-docs From ac6eeb8711fcb74393ebe2a8e0a59482d8d9a43b Mon Sep 17 00:00:00 2001 From: Alex Good Date: Tue, 18 Oct 2022 12:46:22 +0100 Subject: [PATCH 614/730] Another attempt at fixing cmake build CI --- scripts/ci/cmake-build | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/ci/cmake-build b/scripts/ci/cmake-build index 6fba5418..e36513a2 100755 --- a/scripts/ci/cmake-build +++ b/scripts/ci/cmake-build @@ -15,4 +15,4 @@ C_PROJECT=$THIS_SCRIPT/../../rust/automerge-c; mkdir -p $C_PROJECT/build; cd $C_PROJECT/build; cmake --log-level=ERROR -B . -S .. -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DBUILD_SHARED_LIBS=$SHARED_TOGGLE; -./test/test_automerge +cmake --build . --target test_automerge; From a4a3dd9ed37ec50ea2972123d47e9b31ee7991aa Mon Sep 17 00:00:00 2001 From: Alex Good Date: Tue, 18 Oct 2022 13:08:08 +0100 Subject: [PATCH 615/730] Fix docs CI --- .github/workflows/docs.yaml | 18 +++--------------- 1 file changed, 3 insertions(+), 15 deletions(-) diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml index 1f682628..b501d526 100644 --- a/.github/workflows/docs.yaml +++ b/.github/workflows/docs.yaml @@ -30,28 +30,16 @@ jobs: uses: actions-rs/cargo@v1 with: command: clean - args: --doc + args: --manifest-path ./rust/Cargo.toml --doc - name: Build Rust docs uses: actions-rs/cargo@v1 with: command: doc - args: --workspace --all-features --no-deps + args: --manifest-path ./rust/Cargo.toml --workspace --all-features --no-deps - name: Move Rust docs - run: mkdir -p docs && mv target/doc/* docs/. - shell: bash - - - name: Install doxygen - run: sudo apt-get install -y doxygen - shell: bash - - - name: Build C docs - run: ./scripts/ci/cmake-docs - shell: bash - - - name: Move C docs - run: mkdir -p docs/automerge-c && mv automerge-c/build/src/html/* docs/automerge-c/. + run: mkdir -p docs && mv rust/target/doc/* docs/. shell: bash - name: Configure root page From 59289f67b19a81b340478dc02d01769adfd73772 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Mon, 17 Oct 2022 18:33:38 -0500 Subject: [PATCH 616/730] consolidate inserts and deletes more aggressivly into a single splice --- .../examples/create-react-app/package.json | 2 +- javascript/examples/vite/package.json | 2 +- javascript/examples/webpack/package.json | 2 +- javascript/package.json | 4 +- rust/automerge-wasm/package.json | 2 +- rust/automerge-wasm/src/observer.rs | 53 ++++++++++++------- rust/automerge-wasm/test/test.ts | 16 ++---- 7 files changed, 45 insertions(+), 36 deletions(-) diff --git a/javascript/examples/create-react-app/package.json b/javascript/examples/create-react-app/package.json index 297404bb..273d277b 100644 --- a/javascript/examples/create-react-app/package.json +++ b/javascript/examples/create-react-app/package.json @@ -8,7 +8,7 @@ "@testing-library/jest-dom": "^5.16.5", "@testing-library/react": "^13.4.0", "@testing-library/user-event": "^13.5.0", - "@automerge/automerge": "2.0.0-alpha.5", + "@automerge/automerge": "2.0.0-alpha.7", "react": "^18.2.0", "react-dom": "^18.2.0", "react-scripts": "5.0.1", diff --git a/javascript/examples/vite/package.json b/javascript/examples/vite/package.json index a5f0ce2f..d9a13681 100644 --- a/javascript/examples/vite/package.json +++ b/javascript/examples/vite/package.json @@ -9,7 +9,7 @@ "preview": "vite preview" }, "dependencies": { - "@automerge/automerge": "2.0.0-alpha.5" + "@automerge/automerge": "2.0.0-alpha.7" }, "devDependencies": { "typescript": "^4.6.4", diff --git a/javascript/examples/webpack/package.json b/javascript/examples/webpack/package.json index 55e4ba60..2b63e7cc 100644 --- a/javascript/examples/webpack/package.json +++ b/javascript/examples/webpack/package.json @@ -10,7 +10,7 @@ }, "author": "", "dependencies": { - "@automerge/automerge": "2.0.0-alpha.5" + "@automerge/automerge": "2.0.0-alpha.7" }, "devDependencies": { "serve": "^13.0.2", diff --git a/javascript/package.json b/javascript/package.json index d176a27b..dd6eeaec 100644 --- a/javascript/package.json +++ b/javascript/package.json @@ -4,7 +4,7 @@ "Orion Henry ", "Martin Kleppmann" ], - "version": "2.0.0-alpha.6", + "version": "2.0.0-alpha.7", "description": "Javascript implementation of automerge, backed by @automerge/automerge-wasm", "homepage": "https://github.com/automerge/automerge-rs/tree/main/wrappers/javascript", "repository": "github:automerge/automerge-rs", @@ -58,7 +58,7 @@ "typescript": "^4.6.4" }, "dependencies": { - "@automerge/automerge-wasm": "0.1.11", + "@automerge/automerge-wasm": "0.1.12", "uuid": "^8.3" } } diff --git a/rust/automerge-wasm/package.json b/rust/automerge-wasm/package.json index 3c7eb902..ff55f8c2 100644 --- a/rust/automerge-wasm/package.json +++ b/rust/automerge-wasm/package.json @@ -8,7 +8,7 @@ "description": "wasm-bindgen bindings to the automerge rust implementation", "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-wasm", "repository": "github:automerge/automerge-rs", - "version": "0.1.11", + "version": "0.1.12", "license": "MIT", "files": [ "README.md", diff --git a/rust/automerge-wasm/src/observer.rs b/rust/automerge-wasm/src/observer.rs index 3639b0a3..ab59abf4 100644 --- a/rust/automerge-wasm/src/observer.rs +++ b/rust/automerge-wasm/src/observer.rs @@ -83,8 +83,10 @@ impl OpObserver for Observer { .. }) = self.patches.last_mut() { - if tail_obj == &obj && *tail_index + values.len() == index { - values.push(value); + let range = *tail_index..=*tail_index + values.len(); + //if tail_obj == &obj && *tail_index + values.len() == index { + if tail_obj == &obj && range.contains(&index) { + values.insert(index - *tail_index, value); return; } } @@ -99,6 +101,37 @@ impl OpObserver for Observer { } } + fn delete(&mut self, mut parents: Parents<'_>, obj: ObjId, prop: Prop) { + if self.enabled { + if let Some(Patch::Insert { + obj: tail_obj, + index: tail_index, + values, + .. + }) = self.patches.last_mut() + { + if let Prop::Seq(index) = prop { + let range = *tail_index..*tail_index + values.len(); + if tail_obj == &obj && range.contains(&index) { + values.remove(index - *tail_index); + return; + } + } + } + let path = parents.path(); + let patch = match prop { + Prop::Map(key) => Patch::DeleteMap { path, obj, key }, + Prop::Seq(index) => Patch::DeleteSeq { + path, + obj, + index, + length: 1, + }, + }; + self.patches.push(patch) + } + } + fn put( &mut self, mut parents: Parents<'_>, @@ -149,22 +182,6 @@ impl OpObserver for Observer { } } - fn delete(&mut self, mut parents: Parents<'_>, obj: ObjId, prop: Prop) { - if self.enabled { - let path = parents.path(); - let patch = match prop { - Prop::Map(key) => Patch::DeleteMap { path, obj, key }, - Prop::Seq(index) => Patch::DeleteSeq { - path, - obj, - index, - length: 1, - }, - }; - self.patches.push(patch) - } - } - fn merge(&mut self, other: &Self) { self.patches.extend_from_slice(other.patches.as_slice()) } diff --git a/rust/automerge-wasm/test/test.ts b/rust/automerge-wasm/test/test.ts index 7bcde9cb..0f6ce354 100644 --- a/rust/automerge-wasm/test/test.ts +++ b/rust/automerge-wasm/test/test.ts @@ -561,8 +561,7 @@ describe('Automerge', () => { assert.deepEqual([0, 1, 2, 3].map(i => (doc3.getWithType('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd']) assert.deepEqual([0, 1, 2, 3].map(i => (doc4.getWithType('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd']) assert.deepEqual(doc3.popPatches(), [ - { action: 'splice', path: ['values', 0], values:['c','d'] }, - { action: 'splice', path: ['values', 0], values:['a','b'] }, + { action: 'splice', path: ['values', 0], values:['a','b','c','d'] }, ]) assert.deepEqual(doc4.popPatches(), [ { action: 'splice', path: ['values',0], values:['a','b','c','d'] }, @@ -588,8 +587,7 @@ describe('Automerge', () => { assert.deepEqual([0, 1, 2, 3, 4, 5].map(i => (doc3.getWithType('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd', 'e', 'f']) assert.deepEqual([0, 1, 2, 3, 4, 5].map(i => (doc4.getWithType('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd', 'e', 'f']) assert.deepEqual(doc3.popPatches(), [ - { action: 'splice', path: ['values', 2], values: ['e','f'] }, - { action: 'splice', path: ['values', 2], values: ['c','d'] }, + { action: 'splice', path: ['values', 2], values: ['c','d','e','f'] }, ]) assert.deepEqual(doc4.popPatches(), [ { action: 'splice', path: ['values', 2], values: ['c','d','e','f'] }, @@ -845,11 +843,7 @@ describe('Automerge', () => { assert.deepEqual(doc1.popPatches(), [ { action: 'put', path: ['list'], value: [], conflict: false }, - { action: 'splice', path: ['list', 0], values: [1] }, - { action: 'splice', path: ['list', 0], values: [2] }, - { action: 'splice', path: ['list', 2], values: [3] }, - { action: 'splice', path: ['list', 2], values: [{}] }, - { action: 'splice', path: ['list', 2], values: [[]] }, + { action: 'splice', path: ['list', 0], values: [2,1,[],{},3] }, ]) }) @@ -876,9 +870,7 @@ describe('Automerge', () => { assert.deepEqual(doc1.popPatches(), [ { action: 'put', path: ['list'], value: [], conflict: false }, - { action: 'splice', path: ['list',0], values: [1,2,3,4] }, - { action: 'del', path: ['list',1] }, - { action: 'del', path: ['list',1] }, + { action: 'splice', path: ['list',0], values: [1,4] }, ]) }) From 3482e06b159c1243f3169ff8674190411795b705 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Tue, 18 Oct 2022 19:43:46 +0100 Subject: [PATCH 617/730] javascript 2.0.0-beta1 --- javascript/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/javascript/package.json b/javascript/package.json index dd6eeaec..885634d4 100644 --- a/javascript/package.json +++ b/javascript/package.json @@ -4,7 +4,7 @@ "Orion Henry ", "Martin Kleppmann" ], - "version": "2.0.0-alpha.7", + "version": "2.0.0-beta.1", "description": "Javascript implementation of automerge, backed by @automerge/automerge-wasm", "homepage": "https://github.com/automerge/automerge-rs/tree/main/wrappers/javascript", "repository": "github:automerge/automerge-rs", From d7d2916acb17d23d02ae249763aa0cf2f293d880 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Fri, 21 Oct 2022 15:15:30 -0500 Subject: [PATCH 618/730] tiny change that might remove a bloom filter false positive error --- javascript/package.json | 4 ++-- rust/automerge-wasm/package.json | 2 +- rust/automerge/src/sync.rs | 6 ++---- 3 files changed, 5 insertions(+), 7 deletions(-) diff --git a/javascript/package.json b/javascript/package.json index 885634d4..7f86fd54 100644 --- a/javascript/package.json +++ b/javascript/package.json @@ -4,7 +4,7 @@ "Orion Henry ", "Martin Kleppmann" ], - "version": "2.0.0-beta.1", + "version": "2.0.0-beta.2", "description": "Javascript implementation of automerge, backed by @automerge/automerge-wasm", "homepage": "https://github.com/automerge/automerge-rs/tree/main/wrappers/javascript", "repository": "github:automerge/automerge-rs", @@ -58,7 +58,7 @@ "typescript": "^4.6.4" }, "dependencies": { - "@automerge/automerge-wasm": "0.1.12", + "@automerge/automerge-wasm": "0.1.14", "uuid": "^8.3" } } diff --git a/rust/automerge-wasm/package.json b/rust/automerge-wasm/package.json index ff55f8c2..46bda334 100644 --- a/rust/automerge-wasm/package.json +++ b/rust/automerge-wasm/package.json @@ -8,7 +8,7 @@ "description": "wasm-bindgen bindings to the automerge rust implementation", "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-wasm", "repository": "github:automerge/automerge-rs", - "version": "0.1.12", + "version": "0.1.14", "license": "MIT", "files": [ "README.md", diff --git a/rust/automerge/src/sync.rs b/rust/automerge/src/sync.rs index ae49cfc9..71fd0719 100644 --- a/rust/automerge/src/sync.rs +++ b/rust/automerge/src/sync.rs @@ -235,10 +235,8 @@ impl Automerge { let mut changes_to_send = Vec::new(); for hash in need { - hashes_to_send.insert(*hash); - if !change_hashes.contains(hash) { - let change = self.get_change_by_hash(hash); - if let Some(change) = change { + if !hashes_to_send.contains(hash) { + if let Some(change) = self.get_change_by_hash(hash) { changes_to_send.push(change); } } From 37052127474082b35f66260c51e863e84e09022d Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Wed, 19 Oct 2022 13:02:38 -0500 Subject: [PATCH 619/730] js: Add Automerge.clone(_, heads) and Automerge.view Sometimes you need a cheap copy of a document at a given set of heads just so you can see what has changed. Cloning the document to do this is quite expensive when you don't need a writable copy. Add automerge.view to allow a cheap read only copy of a document at a given set of heads and add an additional heads argument to clone for when you do want a writable copy. --- javascript/src/index.ts | 56 +++++++++++++++++++++++++------- javascript/test/basic_test.ts | 16 +++++++++ javascript/test/legacy_tests.ts | 4 +-- javascript/typedoc-readme.md | 55 +++++++++++++++++++++++++++++++ rust/automerge-wasm/index.d.ts | 9 +++-- rust/automerge-wasm/src/lib.rs | 23 ++++--------- rust/automerge-wasm/test/test.ts | 6 ++-- 7 files changed, 132 insertions(+), 37 deletions(-) diff --git a/javascript/src/index.ts b/javascript/src/index.ts index 3dcf2cc4..c8214e62 100644 --- a/javascript/src/index.ts +++ b/javascript/src/index.ts @@ -98,6 +98,9 @@ export function getBackend(doc: Doc): Automerge { } function _state(doc: Doc, checkroot = true): InternalState { + if (typeof doc !== 'object') { + throw new RangeError("must be the document root") + } const state = Reflect.get(doc, STATE) if (state === undefined || (checkroot && _obj(doc) !== "_root")) { throw new RangeError("must be the document root") @@ -164,14 +167,47 @@ export function init(_opts?: ActorId | InitOptions): Doc { } /** - * Make a copy of an automerge document. + * Make an immutable view of an automerge document as at `heads` + * + * @remarks + * The document returned from this function cannot be passed to {@link change}. + * This is because it shares the same underlying memory as `doc`, but it is + * consequently a very cheap copy. + * + * Note that this function will throw an error if any of the hashes in `heads` + * are not in the document. + * + * @typeParam T - The type of the value contained in the document + * @param doc - The document to create a view of + * @param heads - The hashes of the heads to create a view at */ -export function clone(doc: Doc): Doc { +export function view(doc: Doc, heads: Heads): Doc { const state = _state(doc) - const handle = state.heads ? state.handle.forkAt(state.heads) : state.handle.fork() - const clonedDoc: any = handle.materialize("/", undefined, {...state, handle}) + const handle = state.handle + return state.handle.materialize("/", heads, { ...state, handle, heads }) as any +} - return clonedDoc +/** + * Make a full writable copy of an automerge document + * + * @remarks + * Unlike {@link view} this function makes a full copy of the memory backing + * the document and can thus be passed to {@link change}. It also generates a + * new actor ID so that changes made in the new document do not create duplicate + * sequence numbers with respect to the old document. If you need control over + * the actor ID which is generated you can pass the actor ID as the second + * argument + * + * @typeParam T - The type of the value contained in the document + * @param doc - The document to clone + * @param _opts - Either an actor ID to use for the new doc or an {@link InitOptions} + */ +export function clone(doc: Doc, _opts?: ActorId | InitOptions): Doc { + const state = _state(doc) + const heads = state.heads + const opts = importOpts(_opts) + const handle = state.handle.fork(opts.actor, heads) + return handle.applyPatches(doc, { ... state, heads, handle }) } /** Explicity free the memory backing a document. Note that this is note @@ -264,10 +300,8 @@ export function change(doc: Doc, options: string | ChangeOptions | Chan function progressDocument(doc: Doc, heads: Heads, callback?: PatchCallback): Doc { let state = _state(doc) let nextState = {...state, heads: undefined}; - // @ts-ignore let nextDoc = state.handle.applyPatches(doc, nextState, callback) state.heads = heads - if (nextState.freeze) {Object.freeze(nextDoc)} return nextDoc } @@ -284,7 +318,7 @@ function _change(doc: Doc, options: ChangeOptions, callback: ChangeFn(doc: Doc, options: string | ChangeOptions) const state = _state(doc) if (state.heads) { - throw new RangeError("Attempting to use an outdated Automerge document") + throw new RangeError("Attempting to change an outdated document. Use Automerge.clone() if you wish to make a writable copy.") } if (_readonly(doc) === false) { throw new RangeError("Calls to Automerge.change cannot be nested") @@ -616,7 +650,7 @@ export function applyChanges(doc: Doc, changes: Change[], opts?: ApplyOpti const state = _state(doc) if (!opts) {opts = {}} if (state.heads) { - throw new RangeError("Attempting to use an outdated Automerge document") + throw new RangeError("Attempting to change an outdated document. Use Automerge.clone() if you wish to make a writable copy.") } if (_readonly(doc) === false) { throw new RangeError("Calls to Automerge.change cannot be nested") @@ -721,7 +755,7 @@ export function receiveSyncMessage(doc: Doc, inState: SyncState, message: if (!opts) {opts = {}} const state = _state(doc) if (state.heads) { - throw new RangeError("Attempting to change an out of date document - set at: " + _trace(doc)); + throw new RangeError("Attempting to change an outdated document. Use Automerge.clone() if you wish to make a writable copy.") } if (_readonly(doc) === false) { throw new RangeError("Calls to Automerge.change cannot be nested") diff --git a/javascript/test/basic_test.ts b/javascript/test/basic_test.ts index 130fc6ec..637d9029 100644 --- a/javascript/test/basic_test.ts +++ b/javascript/test/basic_test.ts @@ -7,6 +7,22 @@ describe('Automerge', () => { it('should init clone and free', () => { let doc1 = Automerge.init() let doc2 = Automerge.clone(doc1); + + // this is only needed if weakrefs are not supported + Automerge.free(doc1) + Automerge.free(doc2) + }) + + it('should be able to make a view with specifc heads', () => { + let doc1 = Automerge.init() + let doc2 = Automerge.change(doc1, (d) => d.value = 1) + let heads2 = Automerge.getHeads(doc2) + let doc3 = Automerge.change(doc2, (d) => d.value = 2) + let doc2_v2 = Automerge.view(doc3, heads2) + assert.deepEqual(doc2, doc2_v2) + let doc2_v2_clone = Automerge.clone(doc2, "aabbcc") + assert.deepEqual(doc2, doc2_v2_clone) + assert.equal(Automerge.getActorId(doc2_v2_clone), "aabbcc") }) it('handle basic set and read on root object', () => { diff --git a/javascript/test/legacy_tests.ts b/javascript/test/legacy_tests.ts index ea814016..0d152a2d 100644 --- a/javascript/test/legacy_tests.ts +++ b/javascript/test/legacy_tests.ts @@ -231,14 +231,14 @@ describe('Automerge', () => { s2 = Automerge.change(s1, doc2 => doc2.two = 2) doc1.one = 1 }) - }, /Attempting to use an outdated Automerge document/) + }, /Attempting to change an outdated document/) }) it('should not allow the same base document to be used for multiple changes', () => { assert.throws(() => { Automerge.change(s1, doc => doc.one = 1) Automerge.change(s1, doc => doc.two = 2) - }, /Attempting to use an outdated Automerge document/) + }, /Attempting to change an outdated document/) }) it('should allow a document to be cloned', () => { diff --git a/javascript/typedoc-readme.md b/javascript/typedoc-readme.md index ba802912..05025ac1 100644 --- a/javascript/typedoc-readme.md +++ b/javascript/typedoc-readme.md @@ -62,6 +62,61 @@ saveIncremental}, this will generate all the changes since you last called `saveIncremental`, the changes generated can be applied to another document with {@link loadIncremental}. +## Viewing different versions of a document + +Occasionally you may wish to explicitly step to a different point in a document +history. One common reason to do this is if you need to obtain a set of changes +which take the document from one state to another in order to send those changes +to another peer (or to save them somewhere). You can use {@link view} to do this. + +```javascript +import * as automerge from "@automerge/automerge" +import * as assert from "assert" + +let doc = automerge.from({ + "key1": "value1" +}) + +// Make a clone of the document at this point, maybe this is actually on another +// peer. +let doc2 = automerge.clone(doc) + +let heads = automerge.getHeads(doc) + +doc = automerge.change(doc, d => { + d.key2 = "value2" +}) + +doc = automerge.change(doc, d => { + d.key3 = "value3" +}) + +// At this point we've generated two separate changes, now we want to send +// just those changes to someone else + +// view is a cheap reference based copy of a document at a given set of heads +let before = automerge.view(doc, heads) + +// This view doesn't show the last two changes in the document state +assert.deepEqual(before, { + key1: "value1" +}) + +// Get the changes to send to doc2 +let changes = automerge.getChanges(before, doc) + +// Apply the changes at doc2 +doc2 = automerge.applyChanges(doc2, changes)[0] +assert.deepEqual(doc2, { + key1: "value1", + key2: "value2", + key3: "value3" +}) +``` + +If you have a {@link view} of a document which you want to make changes to you +can {@link clone} the viewed document. + ## Syncing The sync protocol is stateful. This means that we start by creating a {@link diff --git a/rust/automerge-wasm/index.d.ts b/rust/automerge-wasm/index.d.ts index e6dbd6c8..67d03b84 100644 --- a/rust/automerge-wasm/index.d.ts +++ b/rust/automerge-wasm/index.d.ts @@ -199,12 +199,11 @@ export class Automerge { getMissingDeps(heads?: Heads): Heads; // memory management - free(): void; - clone(actor?: string): Automerge; - fork(actor?: string): Automerge; - forkAt(heads: Heads, actor?: string): Automerge; + free(): void; // only needed if weak-refs are unsupported + clone(actor?: string): Automerge; // TODO - remove, this is dangerous + fork(actor?: string, heads?: Heads): Automerge; - // dump internal state to console.log + // dump internal state to console.log - for debugging dump(): void; // experimental api can go here diff --git a/rust/automerge-wasm/src/lib.rs b/rust/automerge-wasm/src/lib.rs index c08486a8..d8f0072f 100644 --- a/rust/automerge-wasm/src/lib.rs +++ b/rust/automerge-wasm/src/lib.rs @@ -98,24 +98,15 @@ impl Automerge { Ok(automerge) } - pub fn fork(&mut self, actor: Option) -> Result { - let mut automerge = Automerge { - doc: self.doc.fork(), - freeze: self.freeze, - external_types: self.external_types.clone(), + pub fn fork(&mut self, actor: Option, heads: JsValue) -> Result { + let heads: Result, _> = JS(heads).try_into(); + let doc = if let Ok(heads) = heads { + self.doc.fork_at(&heads)? + } else { + self.doc.fork() }; - if let Some(s) = actor { - let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec()); - automerge.doc.set_actor(actor); - } - Ok(automerge) - } - - #[wasm_bindgen(js_name = forkAt)] - pub fn fork_at(&mut self, heads: JsValue, actor: Option) -> Result { - let deps: Vec<_> = JS(heads).try_into()?; let mut automerge = Automerge { - doc: self.doc.fork_at(&deps)?, + doc, freeze: self.freeze, external_types: self.external_types.clone(), }; diff --git a/rust/automerge-wasm/test/test.ts b/rust/automerge-wasm/test/test.ts index 0f6ce354..8e8acd69 100644 --- a/rust/automerge-wasm/test/test.ts +++ b/rust/automerge-wasm/test/test.ts @@ -425,7 +425,7 @@ describe('Automerge', () => { assert.deepEqual(doc2.getWithType(c, "d"), ["str", "dd"]) }) - it('should allow you to forkAt a heads', () => { + it('should allow you to fork at a heads', () => { const A = create("aaaaaa") A.put("/", "key1", "val1"); A.put("/", "key2", "val2"); @@ -436,8 +436,8 @@ describe('Automerge', () => { A.merge(B) const heads2 = A.getHeads(); A.put("/", "key5", "val5"); - assert.deepEqual(A.forkAt(heads1).materialize("/"), A.materialize("/", heads1)) - assert.deepEqual(A.forkAt(heads2).materialize("/"), A.materialize("/", heads2)) + assert.deepEqual(A.fork(undefined, heads1).materialize("/"), A.materialize("/", heads1)) + assert.deepEqual(A.fork(undefined, heads2).materialize("/"), A.materialize("/", heads2)) }) it('should handle merging text conflicts then saving & loading', () => { From 5adb6952e91869468ef3d7e74c2541f4e0bf51bb Mon Sep 17 00:00:00 2001 From: Alex Good Date: Wed, 26 Oct 2022 13:59:53 +0100 Subject: [PATCH 620/730] @automerge/automerge@2.0.0-beta.2 and @automerge/automerge-wasm@0.1.15 --- javascript/package.json | 5 +++-- rust/automerge-wasm/package.json | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/javascript/package.json b/javascript/package.json index 7f86fd54..26d5b2ac 100644 --- a/javascript/package.json +++ b/javascript/package.json @@ -40,7 +40,8 @@ "scripts": { "lint": "eslint src", "build": "tsc -p config/mjs.json && tsc -p config/cjs.json && tsc --emitDeclarationOnly", - "test": "ts-mocha test/*.ts" + "test": "ts-mocha test/*.ts", + "watch-docs": "typedoc src/index.ts --watch --readme typedoc-readme.md" }, "devDependencies": { "@types/expect": "^24.3.0", @@ -58,7 +59,7 @@ "typescript": "^4.6.4" }, "dependencies": { - "@automerge/automerge-wasm": "0.1.14", + "@automerge/automerge-wasm": "0.1.15", "uuid": "^8.3" } } diff --git a/rust/automerge-wasm/package.json b/rust/automerge-wasm/package.json index 46bda334..93b28e06 100644 --- a/rust/automerge-wasm/package.json +++ b/rust/automerge-wasm/package.json @@ -8,7 +8,7 @@ "description": "wasm-bindgen bindings to the automerge rust implementation", "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-wasm", "repository": "github:automerge/automerge-rs", - "version": "0.1.14", + "version": "0.1.15", "license": "MIT", "files": [ "README.md", From 20d543d28d3e144cbcaaf623cb45e397fd2a88d0 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Wed, 26 Oct 2022 14:14:01 +0100 Subject: [PATCH 621/730] @automerge/automerge@2.0.0-beta.3 --- javascript/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/javascript/package.json b/javascript/package.json index 26d5b2ac..3cf1d3ce 100644 --- a/javascript/package.json +++ b/javascript/package.json @@ -4,7 +4,7 @@ "Orion Henry ", "Martin Kleppmann" ], - "version": "2.0.0-beta.2", + "version": "2.0.0-beta.3", "description": "Javascript implementation of automerge, backed by @automerge/automerge-wasm", "homepage": "https://github.com/automerge/automerge-rs/tree/main/wrappers/javascript", "repository": "github:automerge/automerge-rs", From 61aaa52718d2dcb3b4e77bbc5175f00bfb067385 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Thu, 27 Oct 2022 14:54:43 +0100 Subject: [PATCH 622/730] Allow changing a cloned document The logic for `clone` which was updated to support cloning a viewed document inadverantly left the heads of the cloned document state in place, which meant that cloned documents could not be `change`d. Set state.heads to undefined when cloning to allow changing them. --- javascript/src/index.ts | 6 +++++- javascript/test/basic_test.ts | 11 +++++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/javascript/src/index.ts b/javascript/src/index.ts index c8214e62..9b0f468e 100644 --- a/javascript/src/index.ts +++ b/javascript/src/index.ts @@ -207,7 +207,11 @@ export function clone(doc: Doc, _opts?: ActorId | InitOptions): Doc const heads = state.heads const opts = importOpts(_opts) const handle = state.handle.fork(opts.actor, heads) - return handle.applyPatches(doc, { ... state, heads, handle }) + + // `change` uses the presence of state.heads to determine if we are in a view + // set it to undefined to indicate that this is a full fat document + const {heads: oldHeads, ...stateSansHeads} = state + return handle.applyPatches(doc, { ... stateSansHeads, handle }) } /** Explicity free the memory backing a document. Note that this is note diff --git a/javascript/test/basic_test.ts b/javascript/test/basic_test.ts index 637d9029..1c2e9589 100644 --- a/javascript/test/basic_test.ts +++ b/javascript/test/basic_test.ts @@ -25,6 +25,17 @@ describe('Automerge', () => { assert.equal(Automerge.getActorId(doc2_v2_clone), "aabbcc") }) + it("should allow you to change a clone of a view", () => { + let doc1 = Automerge.init() + doc1 = Automerge.change(doc1, d => d.key = "value") + let heads = Automerge.getHeads(doc1) + doc1 = Automerge.change(doc1, d => d.key = "value2") + let fork = Automerge.clone(Automerge.view(doc1, heads)) + assert.deepEqual(fork, {key: "value"}) + fork = Automerge.change(fork, d => d.key = "value3") + assert.deepEqual(fork, {key: "value3"}) + }) + it('handle basic set and read on root object', () => { let doc1 = Automerge.init() let doc2 = Automerge.change(doc1, (d) => { From bba4fe2c36a165f0513d0c2751b35a5cb1acbda5 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Fri, 28 Oct 2022 11:31:51 +0100 Subject: [PATCH 623/730] @automerge/automerge@2.0.0-beta.4 --- javascript/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/javascript/package.json b/javascript/package.json index 3cf1d3ce..3e7ba734 100644 --- a/javascript/package.json +++ b/javascript/package.json @@ -4,7 +4,7 @@ "Orion Henry ", "Martin Kleppmann" ], - "version": "2.0.0-beta.3", + "version": "2.0.0-beta.4", "description": "Javascript implementation of automerge, backed by @automerge/automerge-wasm", "homepage": "https://github.com/automerge/automerge-rs/tree/main/wrappers/javascript", "repository": "github:automerge/automerge-rs", From 6bbed76f0fd57b2849cc6ab955aee7d9c61672bc Mon Sep 17 00:00:00 2001 From: tosti007 Date: Tue, 1 Nov 2022 09:42:08 +0100 Subject: [PATCH 624/730] Update uuid dependency to v1.2.1 --- rust/automerge-wasm/Cargo.toml | 2 +- rust/automerge/Cargo.toml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/rust/automerge-wasm/Cargo.toml b/rust/automerge-wasm/Cargo.toml index eea88dd3..02232ab8 100644 --- a/rust/automerge-wasm/Cargo.toml +++ b/rust/automerge-wasm/Cargo.toml @@ -28,7 +28,7 @@ serde = "^1.0" serde_json = "^1.0" rand = { version = "^0.8.4" } getrandom = { version = "^0.2.2", features=["js"] } -uuid = { version = "^0.8.2", features=["v4", "wasm-bindgen", "serde"] } +uuid = { version = "^1.2.1", features=["v4", "js", "serde"] } serde-wasm-bindgen = "0.4.3" serde_bytes = "0.11.5" hex = "^0.4.3" diff --git a/rust/automerge/Cargo.toml b/rust/automerge/Cargo.toml index 959ce37b..c2e82bc3 100644 --- a/rust/automerge/Cargo.toml +++ b/rust/automerge/Cargo.toml @@ -11,7 +11,7 @@ readme = "../README.md" [features] optree-visualisation = ["dot", "rand"] -wasm = ["js-sys", "wasm-bindgen", "web-sys", "uuid/wasm-bindgen"] +wasm = ["js-sys", "wasm-bindgen", "web-sys", "uuid/js"] [dependencies] hex = "^0.4.3" @@ -20,7 +20,7 @@ sha2 = "^0.10.0" thiserror = "^1.0.16" itertools = "^0.10.3" flate2 = "^1.0.22" -uuid = { version = "^0.8.2", features=["v4", "serde"] } +uuid = { version = "^1.2.1", features=["v4", "serde"] } smol_str = { version = "^0.1.21", features=["serde"] } tracing = { version = "^0.1.29" } fxhash = "^0.2.1" From 91f313bb83846ac2e97a46ae1535c7d40c4d5515 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Fri, 4 Nov 2022 12:40:09 -0500 Subject: [PATCH 625/730] revert compiler flags to max opt --- rust/Cargo.toml | 4 ++-- rust/automerge-wasm/package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/rust/Cargo.toml b/rust/Cargo.toml index fbd416fc..6f050447 100644 --- a/rust/Cargo.toml +++ b/rust/Cargo.toml @@ -11,11 +11,11 @@ resolver = "2" [profile.release] debug = true lto = true -opt-level = 'z' +opt-level = 3 [profile.bench] debug = true [profile.release.package.automerge-wasm] debug = false -opt-level = 'z' +opt-level = 3 diff --git a/rust/automerge-wasm/package.json b/rust/automerge-wasm/package.json index 93b28e06..feb00079 100644 --- a/rust/automerge-wasm/package.json +++ b/rust/automerge-wasm/package.json @@ -34,7 +34,7 @@ "target": "rimraf ./$TARGET && yarn compile && yarn bindgen && yarn opt", "compile": "cargo build --target wasm32-unknown-unknown --profile $PROFILE", "bindgen": "wasm-bindgen --no-typescript --weak-refs --target $TARGET --out-dir $TARGET ../target/wasm32-unknown-unknown/$TARGET_DIR/automerge_wasm.wasm", - "opt": "wasm-opt -Oz $TARGET/automerge_wasm_bg.wasm -o $TARGET/automerge_wasm_bg.wasm", + "opt": "wasm-opt -O4 $TARGET/automerge_wasm_bg.wasm -o $TARGET/automerge_wasm_bg.wasm", "test": "ts-mocha -p tsconfig.json --type-check --bail --full-trace test/*.ts" }, "devDependencies": { From b53584bec0eaa11ff5859edff34af247ea8fa179 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Sat, 5 Nov 2022 22:48:43 +0000 Subject: [PATCH 626/730] Ritual obeisance before the altar of clippy --- rust/automerge-wasm/src/interop.rs | 4 ++-- rust/automerge-wasm/src/lib.rs | 8 +++---- rust/automerge/src/automerge/tests.rs | 22 +++++++++---------- .../src/columnar/column_range/rle.rs | 4 ++-- rust/automerge/src/columnar/encoding/delta.rs | 2 +- .../src/legacy/serde_impls/change_hash.rs | 2 +- rust/automerge/src/storage/change.rs | 2 +- rust/automerge/src/types.rs | 4 ++-- 8 files changed, 24 insertions(+), 24 deletions(-) diff --git a/rust/automerge-wasm/src/interop.rs b/rust/automerge-wasm/src/interop.rs index ed76f3a7..6625fc34 100644 --- a/rust/automerge-wasm/src/interop.rs +++ b/rust/automerge-wasm/src/interop.rs @@ -234,7 +234,7 @@ impl From<&[ChangeHash]> for AR { fn from(value: &[ChangeHash]) -> Self { AR(value .iter() - .map(|h| JsValue::from_str(&hex::encode(&h.0))) + .map(|h| JsValue::from_str(&hex::encode(h.0))) .collect()) } } @@ -257,7 +257,7 @@ impl From<&[am::sync::Have]> for AR { let last_sync: Array = have .last_sync .iter() - .map(|h| JsValue::from_str(&hex::encode(&h.0))) + .map(|h| JsValue::from_str(&hex::encode(h.0))) .collect(); // FIXME - the clone and the unwrap here shouldnt be needed - look at into_bytes() let bloom = Uint8Array::from(have.bloom.to_bytes().as_slice()); diff --git a/rust/automerge-wasm/src/lib.rs b/rust/automerge-wasm/src/lib.rs index d8f0072f..b4452202 100644 --- a/rust/automerge-wasm/src/lib.rs +++ b/rust/automerge-wasm/src/lib.rs @@ -131,14 +131,14 @@ impl Automerge { commit_opts.set_time(time as i64); } let hash = self.doc.commit_with(commit_opts); - JsValue::from_str(&hex::encode(&hash.0)) + JsValue::from_str(&hex::encode(hash.0)) } pub fn merge(&mut self, other: &mut Automerge) -> Result { let heads = self.doc.merge(&mut other.doc)?; let heads: Array = heads .iter() - .map(|h| JsValue::from_str(&hex::encode(&h.0))) + .map(|h| JsValue::from_str(&hex::encode(h.0))) .collect(); Ok(heads) } @@ -581,7 +581,7 @@ impl Automerge { let heads = self.doc.get_heads(); let heads: Array = heads .iter() - .map(|h| JsValue::from_str(&hex::encode(&h.0))) + .map(|h| JsValue::from_str(&hex::encode(h.0))) .collect(); heads } @@ -611,7 +611,7 @@ impl Automerge { let deps = self.doc.get_missing_deps(&heads); let deps: Array = deps .iter() - .map(|h| JsValue::from_str(&hex::encode(&h.0))) + .map(|h| JsValue::from_str(&hex::encode(h.0))) .collect(); Ok(deps) } diff --git a/rust/automerge/src/automerge/tests.rs b/rust/automerge/src/automerge/tests.rs index b35aaabf..516363ab 100644 --- a/rust/automerge/src/automerge/tests.rs +++ b/rust/automerge/src/automerge/tests.rs @@ -192,14 +192,14 @@ fn test_props_vals_at() -> Result<(), AutomergeError> { assert!(doc.keys_at(ROOT, &heads1).collect_vec() == vec!["prop1".to_owned()]); assert_eq!(doc.length_at(ROOT, &heads1), 1); assert!(doc.get_at(ROOT, "prop1", &heads1)?.unwrap().0 == Value::str("val1")); - assert!(doc.get_at(ROOT, "prop2", &heads1)? == None); - assert!(doc.get_at(ROOT, "prop3", &heads1)? == None); + assert!(doc.get_at(ROOT, "prop2", &heads1)?.is_none()); + assert!(doc.get_at(ROOT, "prop3", &heads1)?.is_none()); assert!(doc.keys_at(ROOT, &heads2).collect_vec() == vec!["prop1".to_owned()]); assert_eq!(doc.length_at(ROOT, &heads2), 1); assert!(doc.get_at(ROOT, "prop1", &heads2)?.unwrap().0 == Value::str("val2")); - assert!(doc.get_at(ROOT, "prop2", &heads2)? == None); - assert!(doc.get_at(ROOT, "prop3", &heads2)? == None); + assert!(doc.get_at(ROOT, "prop2", &heads2)?.is_none()); + assert!(doc.get_at(ROOT, "prop3", &heads2)?.is_none()); assert!( doc.keys_at(ROOT, &heads3).collect_vec() == vec!["prop1".to_owned(), "prop2".to_owned()] @@ -207,28 +207,28 @@ fn test_props_vals_at() -> Result<(), AutomergeError> { assert_eq!(doc.length_at(ROOT, &heads3), 2); assert!(doc.get_at(ROOT, "prop1", &heads3)?.unwrap().0 == Value::str("val2")); assert!(doc.get_at(ROOT, "prop2", &heads3)?.unwrap().0 == Value::str("val3")); - assert!(doc.get_at(ROOT, "prop3", &heads3)? == None); + assert!(doc.get_at(ROOT, "prop3", &heads3)?.is_none()); assert!(doc.keys_at(ROOT, &heads4).collect_vec() == vec!["prop2".to_owned()]); assert_eq!(doc.length_at(ROOT, &heads4), 1); - assert!(doc.get_at(ROOT, "prop1", &heads4)? == None); + assert!(doc.get_at(ROOT, "prop1", &heads4)?.is_none()); assert!(doc.get_at(ROOT, "prop2", &heads4)?.unwrap().0 == Value::str("val3")); - assert!(doc.get_at(ROOT, "prop3", &heads4)? == None); + assert!(doc.get_at(ROOT, "prop3", &heads4)?.is_none()); assert!( doc.keys_at(ROOT, &heads5).collect_vec() == vec!["prop2".to_owned(), "prop3".to_owned()] ); assert_eq!(doc.length_at(ROOT, &heads5), 2); assert_eq!(doc.length(ROOT), 2); - assert!(doc.get_at(ROOT, "prop1", &heads5)? == None); + assert!(doc.get_at(ROOT, "prop1", &heads5)?.is_none()); assert!(doc.get_at(ROOT, "prop2", &heads5)?.unwrap().0 == Value::str("val3")); assert!(doc.get_at(ROOT, "prop3", &heads5)?.unwrap().0 == Value::str("val4")); assert_eq!(doc.keys_at(ROOT, &[]).count(), 0); assert_eq!(doc.length_at(ROOT, &[]), 0); - assert!(doc.get_at(ROOT, "prop1", &[])? == None); - assert!(doc.get_at(ROOT, "prop2", &[])? == None); - assert!(doc.get_at(ROOT, "prop3", &[])? == None); + assert!(doc.get_at(ROOT, "prop1", &[])?.is_none()); + assert!(doc.get_at(ROOT, "prop2", &[])?.is_none()); + assert!(doc.get_at(ROOT, "prop3", &[])?.is_none()); Ok(()) } diff --git a/rust/automerge/src/columnar/column_range/rle.rs b/rust/automerge/src/columnar/column_range/rle.rs index 63c0b123..c500a7f4 100644 --- a/rust/automerge/src/columnar/column_range/rle.rs +++ b/rust/automerge/src/columnar/column_range/rle.rs @@ -147,7 +147,7 @@ mod tests { let mut buf = Vec::with_capacity(vals.len() * 3); let mut encoder: RleEncoder<_, u64> = RleEncoder::new(&mut buf); for val in vals { - encoder.append_value(&val) + encoder.append_value(val) } let (_, total_slice_len) = encoder.finish(); let mut decoder: RleDecoder<'_, u64> = @@ -167,7 +167,7 @@ mod tests { for val in vals.iter().take(4) { encoder.append_value(val) } - encoder.append_value(&5); + encoder.append_value(5); for val in vals.iter().skip(4) { encoder.append_value(val); } diff --git a/rust/automerge/src/columnar/encoding/delta.rs b/rust/automerge/src/columnar/encoding/delta.rs index 049bb6fb..6234875b 100644 --- a/rust/automerge/src/columnar/encoding/delta.rs +++ b/rust/automerge/src/columnar/encoding/delta.rs @@ -22,7 +22,7 @@ impl DeltaEncoder { pub(crate) fn append_value(&mut self, value: i64) { self.rle - .append_value(&(value.saturating_sub(self.absolute_value))); + .append_value(value.saturating_sub(self.absolute_value)); self.absolute_value = value; } diff --git a/rust/automerge/src/legacy/serde_impls/change_hash.rs b/rust/automerge/src/legacy/serde_impls/change_hash.rs index 4d637909..04b876af 100644 --- a/rust/automerge/src/legacy/serde_impls/change_hash.rs +++ b/rust/automerge/src/legacy/serde_impls/change_hash.rs @@ -9,7 +9,7 @@ impl Serialize for ChangeHash { where S: Serializer, { - hex::encode(&self.0).serialize(serializer) + hex::encode(self.0).serialize(serializer) } } diff --git a/rust/automerge/src/storage/change.rs b/rust/automerge/src/storage/change.rs index 633d96ac..ff3cc9ab 100644 --- a/rust/automerge/src/storage/change.rs +++ b/rust/automerge/src/storage/change.rs @@ -467,7 +467,7 @@ impl ChangeBuilder, Set, Set, Set> { ); leb128::write::unsigned(&mut data, other_actors.len() as u64).unwrap(); for actor in other_actors.iter() { - length_prefixed_bytes(&actor, &mut data); + length_prefixed_bytes(actor, &mut data); } cols.raw_columns().write(&mut data); let ops_data_start = data.len(); diff --git a/rust/automerge/src/types.rs b/rust/automerge/src/types.rs index 22ca1364..95b5505e 100644 --- a/rust/automerge/src/types.rs +++ b/rust/automerge/src/types.rs @@ -610,14 +610,14 @@ impl AsRef<[u8]> for ChangeHash { impl fmt::Debug for ChangeHash { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("ChangeHash") - .field(&hex::encode(&self.0)) + .field(&hex::encode(self.0)) .finish() } } impl fmt::Display for ChangeHash { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}", hex::encode(&self.0)) + write!(f, "{}", hex::encode(self.0)) } } From bcab3b6e4784ecf14db7625e3b065680cac921b4 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Sat, 5 Nov 2022 22:37:44 +0000 Subject: [PATCH 627/730] Move automerge/tests::helpers to crate automerge-test The assert_doc and assert_obj macros in automerge/tests::helpers are useful for writing tests for any application working with automerge documents. Typically however, you only want these utilities in tests so rather than packaging them in the main `automerge` crate move them to a new crate (in the spirit of `tokio_test`) --- rust/Cargo.toml | 1 + rust/automerge-test/Cargo.toml | 18 ++++++ rust/automerge-test/README.md | 3 + .../mod.rs => automerge-test/src/lib.rs} | 64 +++++++++++-------- rust/automerge/Cargo.toml | 2 +- rust/automerge/tests/test.rs | 7 +- 6 files changed, 62 insertions(+), 33 deletions(-) create mode 100644 rust/automerge-test/Cargo.toml create mode 100644 rust/automerge-test/README.md rename rust/{automerge/tests/helpers/mod.rs => automerge-test/src/lib.rs} (90%) diff --git a/rust/Cargo.toml b/rust/Cargo.toml index 6f050447..938100cf 100644 --- a/rust/Cargo.toml +++ b/rust/Cargo.toml @@ -3,6 +3,7 @@ members = [ "automerge", "automerge-c", "automerge-cli", + "automerge-test", "automerge-wasm", "edit-trace", ] diff --git a/rust/automerge-test/Cargo.toml b/rust/automerge-test/Cargo.toml new file mode 100644 index 00000000..0defda79 --- /dev/null +++ b/rust/automerge-test/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "automerge-test" +version = "0.1.0" +edition = "2021" +license = "MIT" +repository = "https://github.com/automerge/automerge-rs" +rust-version = "1.57.0" +description = "Utilities for testing automerge libraries" +readme = "../README.md" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +automerge = { path = "../automerge"} +smol_str = { version = "^0.1.21", features=["serde"] } +serde = { version = "^1.0", features=["derive"] } +decorum = "0.3.1" +serde_json = { version = "^1.0.73", features=["float_roundtrip"], default-features=true } diff --git a/rust/automerge-test/README.md b/rust/automerge-test/README.md new file mode 100644 index 00000000..2cadabbb --- /dev/null +++ b/rust/automerge-test/README.md @@ -0,0 +1,3 @@ +# `automerge-test` + +Utilities for making assertions about automerge documents diff --git a/rust/automerge/tests/helpers/mod.rs b/rust/automerge-test/src/lib.rs similarity index 90% rename from rust/automerge/tests/helpers/mod.rs rename to rust/automerge-test/src/lib.rs index 38706d37..5a7f59ef 100644 --- a/rust/automerge/tests/helpers/mod.rs +++ b/rust/automerge-test/src/lib.rs @@ -40,17 +40,19 @@ pub fn sorted_actors() -> (automerge::ActorId, automerge::ActorId) { /// ## Constructing documents /// /// ```rust -/// let mut doc = automerge::Automerge::new(); -/// let todos = doc.set(automerge::ROOT, "todos", automerge::Value::map()).unwrap().unwrap(); -/// let todo = doc.insert(todos, 0, automerge::Value::map()).unwrap(); -/// let title = doc.set(todo, "title", "water plants").unwrap().unwrap(); +/// # use automerge::transaction::Transactable; +/// # use automerge_test::{assert_doc, map, list}; +/// let mut doc = automerge::AutoCommit::new(); +/// let todos = doc.put_object(automerge::ROOT, "todos", automerge::ObjType::List).unwrap(); +/// let todo = doc.insert_object(todos, 0, automerge::ObjType::Map).unwrap(); +/// let title = doc.put(todo, "title", "water plants").unwrap(); /// /// assert_doc!( -/// &doc, +/// &doc.document(), /// map!{ /// "todos" => { /// list![ -/// { map!{ title = "water plants" } } +/// { map!{ "title" => { "water plants" } } } /// ] /// } /// } @@ -63,13 +65,16 @@ pub fn sorted_actors() -> (automerge::ActorId, automerge::ActorId) { /// conflicting values we must capture all of these. /// /// ```rust -/// let mut doc1 = automerge::Automerge::new(); -/// let mut doc2 = automerge::Automerge::new(); -/// let op1 = doc1.set(automerge::ROOT, "field", "one").unwrap().unwrap(); -/// let op2 = doc2.set(automerge::ROOT, "field", "two").unwrap().unwrap(); +/// # use automerge_test::{assert_doc, map}; +/// # use automerge::transaction::Transactable; +/// +/// let mut doc1 = automerge::AutoCommit::new(); +/// let mut doc2 = automerge::AutoCommit::new(); +/// doc1.put(automerge::ROOT, "field", "one").unwrap(); +/// doc2.put(automerge::ROOT, "field", "two").unwrap(); /// doc1.merge(&mut doc2); /// assert_doc!( -/// &doc1, +/// &doc1.document(), /// map!{ /// "field" => { /// "one", @@ -81,16 +86,11 @@ pub fn sorted_actors() -> (automerge::ActorId, automerge::ActorId) { #[macro_export] macro_rules! assert_doc { ($doc: expr, $expected: expr) => {{ - use $crate::helpers::realize; + use $crate::realize; let realized = realize($doc); let expected_obj = $expected.into(); if realized != expected_obj { - let serde_right = serde_json::to_string_pretty(&realized).unwrap(); - let serde_left = serde_json::to_string_pretty(&expected_obj).unwrap(); - panic!( - "documents didn't match\n expected\n{}\n got\n{}", - &serde_left, &serde_right - ); + $crate::pretty_panic(expected_obj, realized) } }}; } @@ -100,16 +100,11 @@ macro_rules! assert_doc { #[macro_export] macro_rules! assert_obj { ($doc: expr, $obj_id: expr, $prop: expr, $expected: expr) => {{ - use $crate::helpers::realize_prop; + use $crate::realize_prop; let realized = realize_prop($doc, $obj_id, $prop); let expected_obj = $expected.into(); if realized != expected_obj { - let serde_right = serde_json::to_string_pretty(&realized).unwrap(); - let serde_left = serde_json::to_string_pretty(&expected_obj).unwrap(); - panic!( - "documents didn't match\n expected\n{}\n got\n{}", - &serde_left, &serde_right - ); + $crate::pretty_panic(expected_obj, realized) } }}; } @@ -118,12 +113,13 @@ macro_rules! assert_obj { /// the keys of the map, the inner set is the set of values for that key: /// /// ``` +/// # use automerge_test::map; /// map!{ /// "key" => { /// "value1", /// "value2", /// } -/// } +/// }; /// ``` /// /// The map above would represent a map with a conflict on the "key" property. The values can be @@ -134,6 +130,7 @@ macro_rules! map { (@inner { $($value:expr),* }) => { { use std::collections::BTreeSet; + use $crate::RealizedObject; let mut inner: BTreeSet = BTreeSet::new(); $( let _ = inner.insert($value.into()); @@ -145,6 +142,7 @@ macro_rules! map { ($($key:expr => $inner:tt),*) => { { use std::collections::{BTreeMap, BTreeSet}; + use $crate::RealizedObject; let mut _map: BTreeMap> = ::std::collections::BTreeMap::new(); $( let inner = map!(@inner $inner); @@ -158,12 +156,13 @@ macro_rules! map { /// Construct `RealizedObject::Sequence`. This macro represents a sequence of values /// /// ``` +/// # use automerge_test::{list, RealizedObject}; /// list![ /// { /// "value1", /// "value2", /// } -/// ] +/// ]; /// ``` /// /// The list above would represent a list with a conflict on the 0 index. The values can be @@ -178,6 +177,7 @@ macro_rules! list { (@inner { $($value:expr),* }) => { { use std::collections::BTreeSet; + use $crate::RealizedObject; let mut inner: BTreeSet = BTreeSet::new(); $( let _ = inner.insert($value.into()); @@ -473,7 +473,15 @@ impl From> for RealizedObject { } /// Pretty print the contents of a document -#[allow(dead_code)] pub fn pretty_print(doc: &automerge::Automerge) { println!("{}", serde_json::to_string_pretty(&realize(doc)).unwrap()) } + +pub fn pretty_panic(expected_obj: RealizedObject, realized: RealizedObject) { + let serde_right = serde_json::to_string_pretty(&realized).unwrap(); + let serde_left = serde_json::to_string_pretty(&expected_obj).unwrap(); + panic!( + "documents didn't match\n expected\n{}\n got\n{}", + &serde_left, &serde_right + ); +} diff --git a/rust/automerge/Cargo.toml b/rust/automerge/Cargo.toml index c2e82bc3..cc74e708 100644 --- a/rust/automerge/Cargo.toml +++ b/rust/automerge/Cargo.toml @@ -43,10 +43,10 @@ pretty_assertions = "1.0.0" proptest = { version = "^1.0.0", default-features = false, features = ["std"] } serde_json = { version = "^1.0.73", features=["float_roundtrip"], default-features=true } maplit = { version = "^1.0" } -decorum = "0.3.1" criterion = "0.3.5" test-log = { version = "0.2.10", features=["trace"], default-features = false} tracing-subscriber = {version = "0.3.9", features = ["fmt", "env-filter"] } +automerge-test = { path = "../automerge-test" } [[bench]] name = "range" diff --git a/rust/automerge/tests/test.rs b/rust/automerge/tests/test.rs index eb172213..896c623a 100644 --- a/rust/automerge/tests/test.rs +++ b/rust/automerge/tests/test.rs @@ -7,11 +7,10 @@ use automerge::{ // set up logging for all the tests use test_log::test; -mod helpers; #[allow(unused_imports)] -use helpers::{ - mk_counter, new_doc, new_doc_with_actor, pretty_print, realize, realize_obj, sorted_actors, - RealizedObject, +use automerge_test::{ + assert_doc, assert_obj, list, map, mk_counter, new_doc, new_doc_with_actor, pretty_print, + realize, realize_obj, sorted_actors, RealizedObject, }; use pretty_assertions::assert_eq; From 05093071ce8359ba3c2f7a71269eec5dba24c8de Mon Sep 17 00:00:00 2001 From: Alex Good Date: Mon, 7 Nov 2022 12:08:12 +0000 Subject: [PATCH 628/730] rust/automerge-test: add From for RealizedObject --- rust/automerge-test/src/lib.rs | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/rust/automerge-test/src/lib.rs b/rust/automerge-test/src/lib.rs index 5a7f59ef..b2af72e1 100644 --- a/rust/automerge-test/src/lib.rs +++ b/rust/automerge-test/src/lib.rs @@ -458,6 +458,12 @@ impl From<&str> for RealizedObject { } } +impl From for RealizedObject { + fn from(f: f64) -> Self { + RealizedObject::Value(OrdScalarValue::F64(f.into())) + } +} + impl From> for RealizedObject { fn from(vals: Vec) -> Self { RealizedObject::Sequence( From a7656b999be266f60d5f73b1f05a3c7126a004a7 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 7 Nov 2022 15:10:53 -0800 Subject: [PATCH 629/730] Add AMobjObjType() (#454) automerge-c: Add AmobjObjType() --- rust/automerge-c/src/doc.rs | 37 +++++++++++++--- rust/automerge-c/src/doc/list.rs | 8 ++-- rust/automerge-c/src/doc/map.rs | 8 ++-- rust/automerge-c/src/obj.rs | 28 +++++++++--- rust/automerge-c/src/result.rs | 4 +- rust/automerge-c/test/list_tests.c | 44 ++++++++++++++----- rust/automerge-c/test/macro_utils.c | 1 + rust/automerge-c/test/map_tests.c | 38 +++++++++++----- .../test/ported_wasm/basic_tests.c | 2 +- 9 files changed, 128 insertions(+), 42 deletions(-) diff --git a/rust/automerge-c/src/doc.rs b/rust/automerge-c/src/doc.rs index beaf7347..4a5038a5 100644 --- a/rust/automerge-c/src/doc.rs +++ b/rust/automerge-c/src/doc.rs @@ -5,7 +5,7 @@ use std::os::raw::c_char; use crate::actor_id::AMactorId; use crate::change_hashes::AMchangeHashes; -use crate::obj::AMobjId; +use crate::obj::{AMobjId, AMobjType}; use crate::result::{to_result, AMresult, AMvalue}; use crate::sync::{to_sync_message, AMsyncMessage, AMsyncState}; @@ -143,11 +143,11 @@ pub unsafe extern "C" fn AMcreate(actor_id: *const AMactorId) -> *mut AMresult { /// \memberof AMdoc /// \brief Commits the current operations on a document with an optional -/// message and/or time override as seconds since the epoch. +/// message and/or *nix timestamp (milliseconds). /// /// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] message A UTF-8 string or `NULL`. -/// \param[in] time A pointer to a `time_t` value or `NULL`. +/// \param[in] timestamp A pointer to a 64-bit integer or `NULL`. /// \return A pointer to an `AMresult` struct containing an `AMchangeHashes` /// with one element. /// \pre \p doc `!= NULL`. @@ -160,15 +160,15 @@ pub unsafe extern "C" fn AMcreate(actor_id: *const AMactorId) -> *mut AMresult { pub unsafe extern "C" fn AMcommit( doc: *mut AMdoc, message: *const c_char, - time: *const libc::time_t, + timestamp: *const i64, ) -> *mut AMresult { let doc = to_doc_mut!(doc); let mut options = CommitOptions::default(); if !message.is_null() { options.set_message(to_str(message)); } - if let Some(time) = time.as_ref() { - options.set_time(*time); + if let Some(timestamp) = timestamp.as_ref() { + options.set_time(*timestamp); } to_result(doc.commit_with(options)) } @@ -546,6 +546,31 @@ pub unsafe extern "C" fn AMobjSize( } } +/// \memberof AMdoc +/// \brief Gets the type of an object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \return An `AMobjType`. +/// \pre \p doc `!= NULL`. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +#[no_mangle] +pub unsafe extern "C" fn AMobjObjType(doc: *const AMdoc, obj_id: *const AMobjId) -> AMobjType { + if let Some(doc) = doc.as_ref() { + let obj_id = to_obj_id!(obj_id); + match doc.object_type(obj_id) { + None => AMobjType::Void, + Some(obj_type) => obj_type.into(), + } + } else { + AMobjType::Void + } +} + /// \memberof AMdoc /// \brief Gets the current or historical values of an object within its entire /// range. diff --git a/rust/automerge-c/src/doc/list.rs b/rust/automerge-c/src/doc/list.rs index c8b160cb..d5ad34ed 100644 --- a/rust/automerge-c/src/doc/list.rs +++ b/rust/automerge-c/src/doc/list.rs @@ -4,7 +4,7 @@ use std::os::raw::c_char; use crate::change_hashes::AMchangeHashes; use crate::doc::{to_doc, to_doc_mut, to_obj_id, to_str, AMdoc}; -use crate::obj::{AMobjId, AMobjType}; +use crate::obj::{to_obj_type, AMobjId, AMobjType}; use crate::result::{to_result, AMresult}; pub mod item; @@ -418,6 +418,7 @@ pub unsafe extern "C" fn AMlistPutNull( /// `AMobjId` struct. /// \pre \p doc `!= NULL`. /// \pre `0 <=` \p index `<= AMobjSize(`\p obj_id`)` or \p index `== SIZE_MAX`. +/// \pre \p obj_type != `AM_OBJ_TYPE_VOID`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -435,7 +436,7 @@ pub unsafe extern "C" fn AMlistPutObject( let doc = to_doc_mut!(doc); let obj_id = to_obj_id!(obj_id); let (index, insert) = adjust!(index, insert, doc.length(obj_id)); - let object = obj_type.into(); + let object = to_obj_type!(obj_type); to_result(if insert { doc.insert_object(obj_id, index, object) } else { @@ -486,7 +487,8 @@ pub unsafe extern "C" fn AMlistPutStr( } /// \memberof AMdoc -/// \brief Puts a Lamport timestamp as the value at an index in a list object. +/// \brief Puts a *nix timestamp (milliseconds) as the value at an index in a +/// list object. /// /// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. diff --git a/rust/automerge-c/src/doc/map.rs b/rust/automerge-c/src/doc/map.rs index 4b2b6cc2..2ba00c15 100644 --- a/rust/automerge-c/src/doc/map.rs +++ b/rust/automerge-c/src/doc/map.rs @@ -5,7 +5,7 @@ use std::os::raw::c_char; use crate::change_hashes::AMchangeHashes; use crate::doc::utils::to_str; use crate::doc::{to_doc, to_doc_mut, to_obj_id, AMdoc}; -use crate::obj::{AMobjId, AMobjType}; +use crate::obj::{to_obj_type, AMobjId, AMobjType}; use crate::result::{to_result, AMresult}; pub mod item; @@ -268,6 +268,7 @@ pub unsafe extern "C" fn AMmapPutNull( /// `AMobjId` struct. /// \pre \p doc `!= NULL`. /// \pre \p key `!= NULL`. +/// \pre \p obj_type != `AM_OBJ_TYPE_VOID`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal @@ -283,7 +284,7 @@ pub unsafe extern "C" fn AMmapPutObject( obj_type: AMobjType, ) -> *mut AMresult { let doc = to_doc_mut!(doc); - to_result(doc.put_object(to_obj_id!(obj_id), to_str(key), obj_type.into())) + to_result(doc.put_object(to_obj_id!(obj_id), to_str(key), to_obj_type!(obj_type))) } /// \memberof AMdoc @@ -373,7 +374,8 @@ pub unsafe extern "C" fn AMmapPutStr( } /// \memberof AMdoc -/// \brief Puts a Lamport timestamp as the value of a key in a map object. +/// \brief Puts a *nix timestamp (milliseconds) as the value of a key in a map +/// object. /// /// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. diff --git a/rust/automerge-c/src/obj.rs b/rust/automerge-c/src/obj.rs index a674660e..00069b9c 100644 --- a/rust/automerge-c/src/obj.rs +++ b/rust/automerge-c/src/obj.rs @@ -7,6 +7,19 @@ use crate::actor_id::AMactorId; pub mod item; pub mod items; +macro_rules! to_obj_type { + ($am_obj_type:expr) => {{ + match $am_obj_type { + AMobjType::Map => am::ObjType::Map, + AMobjType::List => am::ObjType::List, + AMobjType::Text => am::ObjType::Text, + AMobjType::Void => return AMresult::err("Invalid AMobjType value").into(), + } + }}; +} + +pub(crate) use to_obj_type; + /// \struct AMobjId /// \installed_headerfile /// \brief An object's unique identifier. @@ -142,20 +155,23 @@ pub unsafe extern "C" fn AMobjIdIndex(obj_id: *const AMobjId) -> usize { /// \brief The type of an object value. #[repr(u8)] pub enum AMobjType { + /// A void. + /// \note This tag is unalphabetized to evaluate as false. + Void = 0, /// A list. - List = 1, + List, /// A key-value map. Map, /// A list of Unicode graphemes. Text, } -impl From for am::ObjType { - fn from(o: AMobjType) -> Self { +impl From for AMobjType { + fn from(o: am::ObjType) -> Self { match o { - AMobjType::Map => am::ObjType::Map, - AMobjType::List => am::ObjType::List, - AMobjType::Text => am::ObjType::Text, + am::ObjType::Map | am::ObjType::Table => AMobjType::Map, + am::ObjType::List => AMobjType::List, + am::ObjType::Text => AMobjType::Text, } } } diff --git a/rust/automerge-c/src/result.rs b/rust/automerge-c/src/result.rs index 67b14b1d..65f7f98f 100644 --- a/rust/automerge-c/src/result.rs +++ b/rust/automerge-c/src/result.rs @@ -85,7 +85,7 @@ use crate::sync::{AMsyncMessage, AMsyncState}; /// The variant discriminator. /// /// \var AMvalue::timestamp -/// A Lamport timestamp. +/// A *nix timestamp (milliseconds). /// /// \var AMvalue::uint /// A 64-bit unsigned integer. @@ -133,7 +133,7 @@ pub enum AMvalue<'a> { SyncMessage(&'a AMsyncMessage), /// A synchronization state variant. SyncState(&'a mut AMsyncState), - /// A Lamport timestamp variant. + /// A *nix timestamp (milliseconds) variant. Timestamp(i64), /// A 64-bit unsigned integer variant. Uint(u64), diff --git a/rust/automerge-c/test/list_tests.c b/rust/automerge-c/test/list_tests.c index db1dc086..6a472679 100644 --- a/rust/automerge-c/test/list_tests.c +++ b/rust/automerge-c/test/list_tests.c @@ -95,17 +95,33 @@ static void test_AMlistPutNull_ ## mode(void **state) { \ #define static_void_test_AMlistPutObject(label, mode) \ static void test_AMlistPutObject_ ## label ## _ ## mode(void **state) { \ GroupState* group_state = *state; \ - AMobjId const* const obj_id = AMpush( \ - &group_state->stack, \ - AMlistPutObject(group_state->doc, \ - AM_ROOT, \ - 0, \ - !strcmp(#mode, "insert"), \ - AMobjType_tag(#label)), \ - AM_VALUE_OBJ_ID, \ - cmocka_cb).obj_id; \ - assert_non_null(obj_id); \ - assert_int_equal(AMobjSize(group_state->doc, obj_id, NULL), 0); \ + AMobjType const obj_type = AMobjType_tag(#label); \ + if (obj_type != AM_OBJ_TYPE_VOID) { \ + AMobjId const* const obj_id = AMpush( \ + &group_state->stack, \ + AMlistPutObject(group_state->doc, \ + AM_ROOT, \ + 0, \ + !strcmp(#mode, "insert"), \ + obj_type), \ + AM_VALUE_OBJ_ID, \ + cmocka_cb).obj_id; \ + assert_non_null(obj_id); \ + assert_int_equal(AMobjObjType(group_state->doc, obj_id), obj_type); \ + assert_int_equal(AMobjSize(group_state->doc, obj_id, NULL), 0); \ + } \ + else { \ + AMpush(&group_state->stack, \ + AMlistPutObject(group_state->doc, \ + AM_ROOT, \ + 0, \ + !strcmp(#mode, "insert"), \ + obj_type), \ + AM_VALUE_VOID, \ + NULL); \ + assert_int_not_equal(AMresultStatus(group_state->stack->result), \ + AM_STATUS_OK); \ + } \ AMfree(AMpop(&group_state->stack)); \ } @@ -165,6 +181,10 @@ static_void_test_AMlistPutObject(Text, insert) static_void_test_AMlistPutObject(Text, update) +static_void_test_AMlistPutObject(Void, insert) + +static_void_test_AMlistPutObject(Void, update) + static_void_test_AMlistPutStr(insert, "Hello, world!") static_void_test_AMlistPutStr(update, "Hello, world!") @@ -365,6 +385,8 @@ int run_list_tests(void) { cmocka_unit_test(test_AMlistPutObject(Map, update)), cmocka_unit_test(test_AMlistPutObject(Text, insert)), cmocka_unit_test(test_AMlistPutObject(Text, update)), + cmocka_unit_test(test_AMlistPutObject(Void, insert)), + cmocka_unit_test(test_AMlistPutObject(Void, update)), cmocka_unit_test(test_AMlistPutStr(insert)), cmocka_unit_test(test_AMlistPutStr(update)), cmocka_unit_test(test_AMlistPut(Timestamp, insert)), diff --git a/rust/automerge-c/test/macro_utils.c b/rust/automerge-c/test/macro_utils.c index 35c55b85..6d7578b6 100644 --- a/rust/automerge-c/test/macro_utils.c +++ b/rust/automerge-c/test/macro_utils.c @@ -20,5 +20,6 @@ AMobjType AMobjType_tag(char const* obj_type_label) { if (!strcmp(obj_type_label, "List")) return AM_OBJ_TYPE_LIST; else if (!strcmp(obj_type_label, "Map")) return AM_OBJ_TYPE_MAP; else if (!strcmp(obj_type_label, "Text")) return AM_OBJ_TYPE_TEXT; + else if (!strcmp(obj_type_label, "Void")) return AM_OBJ_TYPE_VOID; else return 0; } diff --git a/rust/automerge-c/test/map_tests.c b/rust/automerge-c/test/map_tests.c index 85f4ea93..b370fd8b 100644 --- a/rust/automerge-c/test/map_tests.c +++ b/rust/automerge-c/test/map_tests.c @@ -86,16 +86,31 @@ static void test_AMmapPutNull(void **state) { #define static_void_test_AMmapPutObject(label) \ static void test_AMmapPutObject_ ## label(void **state) { \ GroupState* group_state = *state; \ - AMobjId const* const obj_id = AMpush( \ - &group_state->stack, \ - AMmapPutObject(group_state->doc, \ - AM_ROOT, \ - #label, \ - AMobjType_tag(#label)), \ - AM_VALUE_OBJ_ID, \ - cmocka_cb).obj_id; \ - assert_non_null(obj_id); \ - assert_int_equal(AMobjSize(group_state->doc, obj_id, NULL), 0); \ + AMobjType const obj_type = AMobjType_tag(#label); \ + if (obj_type != AM_OBJ_TYPE_VOID) { \ + AMobjId const* const obj_id = AMpush( \ + &group_state->stack, \ + AMmapPutObject(group_state->doc, \ + AM_ROOT, \ + #label, \ + obj_type), \ + AM_VALUE_OBJ_ID, \ + cmocka_cb).obj_id; \ + assert_non_null(obj_id); \ + assert_int_equal(AMobjObjType(group_state->doc, obj_id), obj_type); \ + assert_int_equal(AMobjSize(group_state->doc, obj_id, NULL), 0); \ + } \ + else { \ + AMpush(&group_state->stack, \ + AMmapPutObject(group_state->doc, \ + AM_ROOT, \ + #label, \ + obj_type), \ + AM_VALUE_VOID, \ + NULL); \ + assert_int_not_equal(AMresultStatus(group_state->stack->result), \ + AM_STATUS_OK); \ + } \ AMfree(AMpop(&group_state->stack)); \ } @@ -126,6 +141,8 @@ static_void_test_AMmapPutObject(Map) static_void_test_AMmapPutObject(Text) +static_void_test_AMmapPutObject(Void) + static_void_test_AMmapPut(Timestamp, timestamp, INT64_MAX) static_void_test_AMmapPut(Uint, uint, UINT64_MAX) @@ -1149,6 +1166,7 @@ int run_map_tests(void) { cmocka_unit_test(test_AMmapPutObject(List)), cmocka_unit_test(test_AMmapPutObject(Map)), cmocka_unit_test(test_AMmapPutObject(Text)), + cmocka_unit_test(test_AMmapPutObject(Void)), cmocka_unit_test(test_AMmapPutStr), cmocka_unit_test(test_AMmapPut(Timestamp)), cmocka_unit_test(test_AMmapPut(Uint)), diff --git a/rust/automerge-c/test/ported_wasm/basic_tests.c b/rust/automerge-c/test/ported_wasm/basic_tests.c index 147b140d..2353c3b7 100644 --- a/rust/automerge-c/test/ported_wasm/basic_tests.c +++ b/rust/automerge-c/test/ported_wasm/basic_tests.c @@ -711,7 +711,7 @@ static void test_should_be_able_to_insert_objects_into_text(void** state) { assert_string_equal(AMpush(&stack, AMtext(doc, text, NULL), AM_VALUE_STR, - cmocka_cb).str, "Hello \ufffcworld"); + cmocka_cb).str, u8"Hello \ufffcworld"); /* assert.deepEqual(doc.getWithType(text, 6), ["map", obj]); */ assert_true(AMobjIdEqual(AMpush(&stack, AMlistGet(doc, text, 6, NULL), From 92c044eadb8c1605f7e11fe9bd31aec45a41487a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 16 Nov 2022 13:35:34 +0000 Subject: [PATCH 630/730] Bump loader-utils in /javascript/examples/create-react-app Bumps [loader-utils](https://github.com/webpack/loader-utils) from 2.0.2 to 2.0.4. - [Release notes](https://github.com/webpack/loader-utils/releases) - [Changelog](https://github.com/webpack/loader-utils/blob/v2.0.4/CHANGELOG.md) - [Commits](https://github.com/webpack/loader-utils/compare/v2.0.2...v2.0.4) --- updated-dependencies: - dependency-name: loader-utils dependency-type: indirect ... Signed-off-by: dependabot[bot] --- .../examples/create-react-app/yarn.lock | 28 +++++++++---------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/javascript/examples/create-react-app/yarn.lock b/javascript/examples/create-react-app/yarn.lock index 90a1592b..d6e5d93f 100644 --- a/javascript/examples/create-react-app/yarn.lock +++ b/javascript/examples/create-react-app/yarn.lock @@ -24,17 +24,17 @@ jsonpointer "^5.0.0" leven "^3.1.0" -"@automerge/automerge-wasm@0.1.9": - version "0.1.9" - resolved "http://localhost:4873/@automerge%2fautomerge-wasm/-/automerge-wasm-0.1.9.tgz#b2def5e8b643f1802bc696843b7755dc444dc2eb" - integrity sha512-S+sjJUJ3aPn2F37vKYAzKxz8CDgbHpOOGVjKSgkLjkAqe1pQ+wp4BpiELXafX73w8DVIrGx1zzru4w3t+Eo8gw== +"@automerge/automerge-wasm@0.1.12": + version "0.1.12" + resolved "https://registry.yarnpkg.com/@automerge/automerge-wasm/-/automerge-wasm-0.1.12.tgz#8ce25255d95d4ed6fb387de6858f7b7b7e2ed4a9" + integrity sha512-/xjX1217QYJ+QaoT6iHQw4hGNUIoc3xc65c9eCnfX5v9J9BkTOl05p2Cnr51O2rPc/M6TqZLmlvpvNVdcH9JpA== -"@automerge/automerge@2.0.0-alpha.4": - version "2.0.0-alpha.4" - resolved "http://localhost:4873/@automerge%2fautomerge/-/automerge-2.0.0-alpha.4.tgz#df406f5364960a4d21040044da55ebd47406ea3a" - integrity sha512-PVRD1dmLy0U4GttyMvlWr99wyr6xvskJbOkxJDHnp+W2VAFfcqa4QKouaFbJ4W3iIsYX8DfQJ+uhRxa6UnvkHg== +"@automerge/automerge@2.0.0-alpha.7": + version "2.0.0-alpha.7" + resolved "https://registry.yarnpkg.com/@automerge/automerge/-/automerge-2.0.0-alpha.7.tgz#2ee220d51bcd796074a18af74eeabb5f177e1f36" + integrity sha512-Wd2/GNeqtBybUtXclEE7bWBmmEkhv3q2ITQmLh18V0VvMPbqMBpcOKYzQFnKCyiPyRe5XcYeQAyGyunhE5V0ug== dependencies: - "@automerge/automerge-wasm" "0.1.9" + "@automerge/automerge-wasm" "0.1.12" uuid "^8.3" "@babel/code-frame@^7.0.0", "@babel/code-frame@^7.10.4", "@babel/code-frame@^7.12.13", "@babel/code-frame@^7.16.0", "@babel/code-frame@^7.18.6", "@babel/code-frame@^7.8.3": @@ -2827,7 +2827,7 @@ bfj@^7.0.2: big.js@^5.2.2: version "5.2.2" - resolved "http://localhost:4873/big.js/-/big.js-5.2.2.tgz#65f0af382f578bcdc742bd9c281e9cb2d7768328" + resolved "https://registry.yarnpkg.com/big.js/-/big.js-5.2.2.tgz#65f0af382f578bcdc742bd9c281e9cb2d7768328" integrity sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ== binary-extensions@^2.0.0: @@ -3817,7 +3817,7 @@ emoji-regex@^9.2.2: emojis-list@^3.0.0: version "3.0.0" - resolved "http://localhost:4873/emojis-list/-/emojis-list-3.0.0.tgz#5570662046ad29e2e916e71aae260abdff4f6a78" + resolved "https://registry.yarnpkg.com/emojis-list/-/emojis-list-3.0.0.tgz#5570662046ad29e2e916e71aae260abdff4f6a78" integrity sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q== encodeurl@~1.0.2: @@ -5942,9 +5942,9 @@ loader-runner@^4.2.0: integrity sha512-3R/1M+yS3j5ou80Me59j7F9IMs4PXs3VqRrm0TU3AbKPxlmpoY1TNscJV/oGJXo8qCatFGTfDbY6W6ipGOYXfg== loader-utils@^2.0.0: - version "2.0.2" - resolved "http://localhost:4873/loader-utils/-/loader-utils-2.0.2.tgz#d6e3b4fb81870721ae4e0868ab11dd638368c129" - integrity sha512-TM57VeHptv569d/GKh6TAYdzKblwDNiumOdkFnejjD0XwTH87K90w3O7AiJRqdQoXygvi1VQTJTLGhJl7WqA7A== + version "2.0.4" + resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-2.0.4.tgz#8b5cb38b5c34a9a018ee1fc0e6a066d1dfcc528c" + integrity sha512-xXqpXoINfFhgua9xiqD8fPFHgkoq1mmmpE92WlDbm9rNRd/EbRb+Gqf908T2DMfuHjjJlksiK2RbHVOdD/MqSw== dependencies: big.js "^5.2.2" emojis-list "^3.0.0" From e713c35d219b61d3350e527b474e693141961857 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Mon, 21 Nov 2022 18:26:28 +0000 Subject: [PATCH 631/730] Fix some typescript errors --- javascript/src/index.ts | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/javascript/src/index.ts b/javascript/src/index.ts index 9b0f468e..67a27e00 100644 --- a/javascript/src/index.ts +++ b/javascript/src/index.ts @@ -101,8 +101,8 @@ function _state(doc: Doc, checkroot = true): InternalState { if (typeof doc !== 'object') { throw new RangeError("must be the document root") } - const state = Reflect.get(doc, STATE) - if (state === undefined || (checkroot && _obj(doc) !== "_root")) { + const state = Reflect.get(doc, STATE) as InternalState + if (state === undefined || state == null || (checkroot && _obj(doc) !== "_root")) { throw new RangeError("must be the document root") } return state @@ -113,7 +113,7 @@ function _frozen(doc: Doc): boolean { } function _trace(doc: Doc): string | undefined { - return Reflect.get(doc, TRACE) + return Reflect.get(doc, TRACE) as string } function _set_heads(doc: Doc, heads: Heads) { @@ -129,7 +129,7 @@ function _obj(doc: Doc): ObjID | null { if (!(typeof doc === 'object') || doc === null) { return null } - return Reflect.get(doc, OBJECT_ID) + return Reflect.get(doc, OBJECT_ID) as ObjID } function _readonly(doc: Doc): boolean { From 03b3da203dc8ea441324ef54315cb2d6de509095 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Tue, 22 Nov 2022 00:02:13 +0000 Subject: [PATCH 632/730] @automerge/automerge-wasm 0.1.16 --- javascript/package.json | 2 +- rust/automerge-wasm/package.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/javascript/package.json b/javascript/package.json index 3e7ba734..25d9bb50 100644 --- a/javascript/package.json +++ b/javascript/package.json @@ -59,7 +59,7 @@ "typescript": "^4.6.4" }, "dependencies": { - "@automerge/automerge-wasm": "0.1.15", + "@automerge/automerge-wasm": "0.1.16", "uuid": "^8.3" } } diff --git a/rust/automerge-wasm/package.json b/rust/automerge-wasm/package.json index feb00079..192589ba 100644 --- a/rust/automerge-wasm/package.json +++ b/rust/automerge-wasm/package.json @@ -8,7 +8,7 @@ "description": "wasm-bindgen bindings to the automerge rust implementation", "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-wasm", "repository": "github:automerge/automerge-rs", - "version": "0.1.15", + "version": "0.1.16", "license": "MIT", "files": [ "README.md", From ca25ed0ca09504a72d4c0605746908846f242e1e Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Thu, 3 Nov 2022 12:10:29 -0500 Subject: [PATCH 633/730] automerge-wasm: Use a SequenceTree in the OpObserver Generating patches to text objects (a la the edit-trace benchmark) was very slow due to appending to the back of a Vec. Use the SequenceTree (effectively a B-tree) instead so as to speed up sequence patch generation. --- javascript/package.json | 2 +- rust/automerge-wasm/package.json | 2 +- rust/automerge-wasm/src/interop.rs | 8 +- rust/automerge-wasm/src/observer.rs | 8 +- rust/automerge/src/lib.rs | 2 + rust/automerge/src/sequence_tree.rs | 195 ++++++++++++++-------------- 6 files changed, 107 insertions(+), 110 deletions(-) diff --git a/javascript/package.json b/javascript/package.json index 25d9bb50..3d0db133 100644 --- a/javascript/package.json +++ b/javascript/package.json @@ -59,7 +59,7 @@ "typescript": "^4.6.4" }, "dependencies": { - "@automerge/automerge-wasm": "0.1.16", + "@automerge/automerge-wasm": "0.1.17", "uuid": "^8.3" } } diff --git a/rust/automerge-wasm/package.json b/rust/automerge-wasm/package.json index 192589ba..908bf01d 100644 --- a/rust/automerge-wasm/package.json +++ b/rust/automerge-wasm/package.json @@ -8,7 +8,7 @@ "description": "wasm-bindgen bindings to the automerge rust implementation", "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-wasm", "repository": "github:automerge/automerge-rs", - "version": "0.1.16", + "version": "0.1.17", "license": "MIT", "files": [ "README.md", diff --git a/rust/automerge-wasm/src/interop.rs b/rust/automerge-wasm/src/interop.rs index 6625fc34..923bc25f 100644 --- a/rust/automerge-wasm/src/interop.rs +++ b/rust/automerge-wasm/src/interop.rs @@ -557,7 +557,7 @@ impl Automerge { Reflect::set(&result, &(*index as f64).into(), &sub_val)?; Ok(result.into()) } - Patch::DeleteSeq { index, .. } => self.sub_splice(result, *index, 1, &[], meta), + Patch::DeleteSeq { index, .. } => self.sub_splice(result, *index, 1, vec![], meta), Patch::Insert { index, values, .. } => self.sub_splice(result, *index, 0, values, meta), Patch::Increment { prop, value, .. } => { if let Prop::Seq(index) = prop { @@ -650,16 +650,16 @@ impl Automerge { self.wrap_object(result, datatype, &id, meta) } - fn sub_splice( + fn sub_splice<'a, I: IntoIterator, ObjId)>>( &self, o: Array, index: usize, num_del: usize, - values: &[(Value<'_>, ObjId)], + values: I, meta: &JsValue, ) -> Result { let args: Array = values - .iter() + .into_iter() .map(|v| self.maybe_wrap_object(alloc(&v.0), &v.1, meta)) .collect::>()?; args.unshift(&(num_del as u32).into()); diff --git a/rust/automerge-wasm/src/observer.rs b/rust/automerge-wasm/src/observer.rs index ab59abf4..2d979041 100644 --- a/rust/automerge-wasm/src/observer.rs +++ b/rust/automerge-wasm/src/observer.rs @@ -1,7 +1,7 @@ #![allow(dead_code)] use crate::interop::{alloc, js_set}; -use automerge::{ObjId, OpObserver, Parents, Prop, Value}; +use automerge::{ObjId, OpObserver, Parents, Prop, SequenceTree, Value}; use js_sys::{Array, Object}; use wasm_bindgen::prelude::*; @@ -45,7 +45,7 @@ pub(crate) enum Patch { obj: ObjId, path: Vec<(ObjId, Prop)>, index: usize, - values: Vec<(Value<'static>, ObjId)>, + values: SequenceTree<(Value<'static>, ObjId)>, }, Increment { obj: ObjId, @@ -91,11 +91,13 @@ impl OpObserver for Observer { } } let path = parents.path(); + let mut values = SequenceTree::new(); + values.push(value); let patch = Patch::Insert { path, obj, index, - values: vec![value], + values, }; self.patches.push(patch); } diff --git a/rust/automerge/src/lib.rs b/rust/automerge/src/lib.rs index df33e096..15cee2a7 100644 --- a/rust/automerge/src/lib.rs +++ b/rust/automerge/src/lib.rs @@ -77,6 +77,7 @@ mod op_set; mod op_tree; mod parents; mod query; +mod sequence_tree; mod storage; pub mod sync; pub mod transaction; @@ -105,6 +106,7 @@ pub use op_observer::OpObserver; pub use op_observer::Patch; pub use op_observer::VecOpObserver; pub use parents::Parents; +pub use sequence_tree::SequenceTree; pub use types::{ActorId, ChangeHash, ObjType, OpType, Prop}; pub use value::{ScalarValue, Value}; pub use values::Values; diff --git a/rust/automerge/src/sequence_tree.rs b/rust/automerge/src/sequence_tree.rs index ba5c7ff6..f95ceab3 100644 --- a/rust/automerge/src/sequence_tree.rs +++ b/rust/automerge/src/sequence_tree.rs @@ -4,21 +4,22 @@ use std::{ mem, }; -pub type SequenceTree = SequenceTreeInternal; +pub(crate) const B: usize = 16; +pub type SequenceTree = SequenceTreeInternal; #[derive(Clone, Debug)] pub struct SequenceTreeInternal { - root_node: Option>, + root_node: Option>, } #[derive(Clone, Debug, PartialEq)] struct SequenceTreeNode { elements: Vec, - children: Vec>, + children: Vec>, length: usize, } -impl SequenceTreeInternal +impl SequenceTreeInternal where T: Clone + Debug, { @@ -38,7 +39,7 @@ where } /// Create an iterator through the sequence. - pub fn iter(&self) -> Iter<'_, T, B> { + pub fn iter(&self) -> Iter<'_, T> { Iter { inner: self, index: 0, @@ -145,7 +146,7 @@ where } } -impl SequenceTreeNode +impl SequenceTreeNode where T: Clone + Debug, { @@ -157,7 +158,7 @@ where } } - pub fn len(&self) -> usize { + pub(crate) fn len(&self) -> usize { self.length } @@ -380,7 +381,7 @@ where l } - pub fn remove(&mut self, index: usize) -> T { + pub(crate) fn remove(&mut self, index: usize) -> T { let original_len = self.len(); if self.is_leaf() { let v = self.remove_from_leaf(index); @@ -423,7 +424,7 @@ where } } - fn merge(&mut self, middle: T, successor_sibling: SequenceTreeNode) { + fn merge(&mut self, middle: T, successor_sibling: SequenceTreeNode) { self.elements.push(middle); self.elements.extend(successor_sibling.elements); self.children.extend(successor_sibling.children); @@ -431,7 +432,7 @@ where assert!(self.is_full()); } - pub fn set(&mut self, index: usize, element: T) -> T { + pub(crate) fn set(&mut self, index: usize, element: T) -> T { if self.is_leaf() { let old_element = self.elements.get_mut(index).unwrap(); mem::replace(old_element, element) @@ -455,7 +456,7 @@ where } } - pub fn get(&self, index: usize) -> Option<&T> { + pub(crate) fn get(&self, index: usize) -> Option<&T> { if self.is_leaf() { return self.elements.get(index); } else { @@ -475,7 +476,7 @@ where None } - pub fn get_mut(&mut self, index: usize) -> Option<&mut T> { + pub(crate) fn get_mut(&mut self, index: usize) -> Option<&mut T> { if self.is_leaf() { return self.elements.get_mut(index); } else { @@ -496,7 +497,7 @@ where } } -impl Default for SequenceTreeInternal +impl Default for SequenceTreeInternal where T: Clone + Debug, { @@ -505,7 +506,7 @@ where } } -impl PartialEq for SequenceTreeInternal +impl PartialEq for SequenceTreeInternal where T: Clone + Debug + PartialEq, { @@ -514,13 +515,13 @@ where } } -impl<'a, T> IntoIterator for &'a SequenceTreeInternal +impl<'a, T> IntoIterator for &'a SequenceTreeInternal where T: Clone + Debug, { type Item = &'a T; - type IntoIter = Iter<'a, T, B>; + type IntoIter = Iter<'a, T>; fn into_iter(self) -> Self::IntoIter { Iter { @@ -530,12 +531,13 @@ where } } +#[derive(Debug)] pub struct Iter<'a, T> { - inner: &'a SequenceTreeInternal, + inner: &'a SequenceTreeInternal, index: usize, } -impl<'a, T> Iterator for Iter<'a, T, B> +impl<'a, T> Iterator for Iter<'a, T> where T: Clone + Debug, { @@ -554,37 +556,35 @@ where #[cfg(test)] mod tests { - use crate::ActorId; + use proptest::prelude::*; use super::*; #[test] fn push_back() { let mut t = SequenceTree::new(); - let actor = ActorId::random(); - t.push(actor.op_id_at(1)); - t.push(actor.op_id_at(2)); - t.push(actor.op_id_at(3)); - t.push(actor.op_id_at(4)); - t.push(actor.op_id_at(5)); - t.push(actor.op_id_at(6)); - t.push(actor.op_id_at(8)); - t.push(actor.op_id_at(100)); + t.push(1); + t.push(2); + t.push(3); + t.push(4); + t.push(5); + t.push(6); + t.push(8); + t.push(100); } #[test] fn insert() { let mut t = SequenceTree::new(); - let actor = ActorId::random(); - t.insert(0, actor.op_id_at(1)); - t.insert(1, actor.op_id_at(1)); - t.insert(0, actor.op_id_at(1)); - t.insert(0, actor.op_id_at(1)); - t.insert(0, actor.op_id_at(1)); - t.insert(3, actor.op_id_at(1)); - t.insert(4, actor.op_id_at(1)); + t.insert(0, 1); + t.insert(1, 1); + t.insert(0, 1); + t.insert(0, 1); + t.insert(0, 1); + t.insert(3, 1); + t.insert(4, 1); } #[test] @@ -609,79 +609,72 @@ mod tests { } } - /* - fn arb_indices() -> impl Strategy> { - proptest::collection::vec(any::(), 0..1000).prop_map(|v| { - let mut len = 0; - v.into_iter() - .map(|i| { - len += 1; - i % len - }) - .collect::>() - }) - } - */ + fn arb_indices() -> impl Strategy> { + proptest::collection::vec(any::(), 0..1000).prop_map(|v| { + let mut len = 0; + v.into_iter() + .map(|i| { + len += 1; + i % len + }) + .collect::>() + }) + } - // use proptest::prelude::*; + proptest! { - /* - proptest! { + #[test] + fn proptest_insert(indices in arb_indices()) { + let mut t = SequenceTreeInternal::::new(); + let mut v = Vec::new(); - #[test] - fn proptest_insert(indices in arb_indices()) { - let mut t = SequenceTreeInternal::::new(); - let actor = ActorId::random(); - let mut v = Vec::new(); - - for i in indices{ - if i <= v.len() { - t.insert(i % 3, i); - v.insert(i % 3, i); - } else { - return Err(proptest::test_runner::TestCaseError::reject("index out of bounds")) - } - - assert_eq!(v, t.iter().copied().collect::>()) + for i in indices{ + if i <= v.len() { + t.insert(i % 3, i); + v.insert(i % 3, i); + } else { + return Err(proptest::test_runner::TestCaseError::reject("index out of bounds")) } + + assert_eq!(v, t.iter().copied().collect::>()) + } + } + + } + + proptest! { + + // This is a really slow test due to all the copying of the Vecs (i.e. not due to the + // sequencetree) so we only do a few runs + #![proptest_config(ProptestConfig::with_cases(20))] + #[test] + fn proptest_remove(inserts in arb_indices(), removes in arb_indices()) { + let mut t = SequenceTreeInternal::::new(); + let mut v = Vec::new(); + + for i in inserts { + if i <= v.len() { + t.insert(i , i); + v.insert(i , i); + } else { + return Err(proptest::test_runner::TestCaseError::reject("index out of bounds")) + } + + assert_eq!(v, t.iter().copied().collect::>()) } - } - */ - - /* - proptest! { - - #[test] - fn proptest_remove(inserts in arb_indices(), removes in arb_indices()) { - let mut t = SequenceTreeInternal::::new(); - let actor = ActorId::random(); - let mut v = Vec::new(); - - for i in inserts { - if i <= v.len() { - t.insert(i , i); - v.insert(i , i); - } else { - return Err(proptest::test_runner::TestCaseError::reject("index out of bounds")) - } - - assert_eq!(v, t.iter().copied().collect::>()) + for i in removes { + if i < v.len() { + let tr = t.remove(i); + let vr = v.remove(i); + assert_eq!(tr, vr); + } else { + return Err(proptest::test_runner::TestCaseError::reject("index out of bounds")) } - for i in removes { - if i < v.len() { - let tr = t.remove(i); - let vr = v.remove(i); - assert_eq!(tr, vr); - } else { - return Err(proptest::test_runner::TestCaseError::reject("index out of bounds")) - } - - assert_eq!(v, t.iter().copied().collect::>()) - } + assert_eq!(v, t.iter().copied().collect::>()) } - } - */ + + } } From bbf729e1d6a7302726f0cb3da8522d152a997043 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Mon, 21 Nov 2022 18:11:41 +0000 Subject: [PATCH 634/730] @automerge/automerge 2.0.0 --- javascript/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/javascript/package.json b/javascript/package.json index 3d0db133..30eddba3 100644 --- a/javascript/package.json +++ b/javascript/package.json @@ -4,7 +4,7 @@ "Orion Henry ", "Martin Kleppmann" ], - "version": "2.0.0-beta.4", + "version": "2.0.0", "description": "Javascript implementation of automerge, backed by @automerge/automerge-wasm", "homepage": "https://github.com/automerge/automerge-rs/tree/main/wrappers/javascript", "repository": "github:automerge/automerge-rs", From 22d60987f69cf4fec5566af3036c7d23a5b74e75 Mon Sep 17 00:00:00 2001 From: alexjg Date: Tue, 22 Nov 2022 18:29:06 +0000 Subject: [PATCH 635/730] Dont send duplicate sync messages (#460) The API of Automerge::generate_sync_message requires that the user keep track of in flight messages themselves if they want to avoid sending duplicate messages. To avoid this add a flag to `automerge::sync::State` to track if there are any in flight messages and return `None` from `generate_sync_message` if there are. --- rust/automerge-wasm/src/interop.rs | 6 + rust/automerge/src/sync.rs | 282 ++++++++++++++++++++++++++++- rust/automerge/src/sync/state.rs | 10 + 3 files changed, 294 insertions(+), 4 deletions(-) diff --git a/rust/automerge-wasm/src/interop.rs b/rust/automerge-wasm/src/interop.rs index 923bc25f..84b827b7 100644 --- a/rust/automerge-wasm/src/interop.rs +++ b/rust/automerge-wasm/src/interop.rs @@ -51,6 +51,7 @@ impl From for JS { Reflect::set(&result, &"theirNeed".into(), &their_need.0).unwrap(); Reflect::set(&result, &"theirHave".into(), &their_have).unwrap(); Reflect::set(&result, &"sentHashes".into(), &sent_hashes.0).unwrap(); + Reflect::set(&result, &"inFlight".into(), &state.in_flight.into()).unwrap(); JS(result) } } @@ -178,6 +179,10 @@ impl TryFrom for am::sync::State { let their_need = js_get(&value, "theirNeed")?.into(); let their_have = js_get(&value, "theirHave")?.try_into()?; let sent_hashes = js_get(&value, "sentHashes")?.try_into()?; + let in_flight = js_get(&value, "inFlight")? + .0 + .as_bool() + .ok_or_else(|| JsValue::from_str("SyncState.inFLight must be a boolean"))?; Ok(am::sync::State { shared_heads, last_sent_heads, @@ -185,6 +190,7 @@ impl TryFrom for am::sync::State { their_need, their_have, sent_hashes, + in_flight, }) } } diff --git a/rust/automerge/src/sync.rs b/rust/automerge/src/sync.rs index 71fd0719..6a206fdf 100644 --- a/rust/automerge/src/sync.rs +++ b/rust/automerge/src/sync.rs @@ -69,10 +69,6 @@ impl Automerge { false }; - if heads_unchanged && heads_equal && changes_to_send.is_empty() { - return None; - } - // deduplicate the changes to send with those we have already sent and clone it now let changes_to_send = changes_to_send .into_iter() @@ -85,6 +81,15 @@ impl Automerge { }) .collect::>(); + if heads_unchanged { + if heads_equal && changes_to_send.is_empty() { + return None; + } + if sync_state.in_flight { + return None; + } + } + sync_state.last_sent_heads = our_heads.clone(); sync_state .sent_hashes @@ -97,6 +102,7 @@ impl Automerge { changes: changes_to_send, }; + sync_state.in_flight = true; Some(sync_message) } @@ -140,12 +146,17 @@ impl Automerge { sync_state.last_sent_heads = message_heads.clone(); } + if sync_state.sent_hashes.is_empty() { + sync_state.in_flight = false; + } + let known_heads = message_heads .iter() .filter(|head| self.get_change_by_hash(head).is_some()) .collect::>(); if known_heads.len() == message_heads.len() { sync_state.shared_heads = message_heads.clone(); + sync_state.in_flight = false; // If the remote peer has lost all its data, reset our state to perform a full resync if message_heads.is_empty() { sync_state.last_sent_heads = Default::default(); @@ -462,7 +473,9 @@ mod tests { use super::*; use crate::change::gen::gen_change; use crate::storage::parse::Input; + use crate::transaction::Transactable; use crate::types::gen::gen_hash; + use crate::ActorId; use proptest::prelude::*; prop_compose! { @@ -525,4 +538,265 @@ mod tests { assert_eq!(msg, decoded); } } + + #[test] + fn generate_sync_message_twice_does_nothing() { + let mut doc = crate::AutoCommit::new(); + doc.put(crate::ROOT, "key", "value").unwrap(); + let mut sync_state = State::new(); + + assert!(doc.generate_sync_message(&mut sync_state).is_some()); + assert!(doc.generate_sync_message(&mut sync_state).is_none()); + } + + #[test] + fn should_not_reply_if_we_have_no_data() { + let mut doc1 = crate::AutoCommit::new(); + let mut doc2 = crate::AutoCommit::new(); + let mut s1 = State::new(); + let mut s2 = State::new(); + let m1 = doc1 + .generate_sync_message(&mut s1) + .expect("message was none"); + + doc2.receive_sync_message(&mut s2, m1).unwrap(); + let m2 = doc2.generate_sync_message(&mut s2); + assert!(m2.is_none()); + } + + #[test] + fn should_allow_simultaneous_messages_during_synchronisation() { + // create & synchronize two nodes + let mut doc1 = crate::AutoCommit::new().with_actor(ActorId::try_from("abc123").unwrap()); + let mut doc2 = crate::AutoCommit::new().with_actor(ActorId::try_from("def456").unwrap()); + let mut s1 = State::new(); + let mut s2 = State::new(); + + for i in 0..5 { + doc1.put(&crate::ROOT, "x", i).unwrap(); + doc1.commit(); + doc2.put(&crate::ROOT, "y", i).unwrap(); + doc2.commit(); + } + + let head1 = doc1.get_heads()[0]; + let head2 = doc2.get_heads()[0]; + + //// both sides report what they have but have no shared peer state + let msg1to2 = doc1 + .generate_sync_message(&mut s1) + .expect("initial sync from 1 to 2 was None"); + let msg2to1 = doc2 + .generate_sync_message(&mut s2) + .expect("initial sync message from 2 to 1 was None"); + assert_eq!(msg1to2.changes.len(), 0); + assert_eq!(msg1to2.have[0].last_sync.len(), 0); + assert_eq!(msg2to1.changes.len(), 0); + assert_eq!(msg2to1.have[0].last_sync.len(), 0); + + //// doc1 and doc2 receive that message and update sync state + doc1.receive_sync_message(&mut s1, msg2to1).unwrap(); + doc2.receive_sync_message(&mut s2, msg1to2).unwrap(); + + //// now both reply with their local changes the other lacks + //// (standard warning that 1% of the time this will result in a "need" message) + let msg1to2 = doc1 + .generate_sync_message(&mut s1) + .expect("first reply from 1 to 2 was None"); + assert_eq!(msg1to2.changes.len(), 5); + + let msg2to1 = doc2 + .generate_sync_message(&mut s2) + .expect("first reply from 2 to 1 was None"); + assert_eq!(msg2to1.changes.len(), 5); + + //// both should now apply the changes + doc1.receive_sync_message(&mut s1, msg2to1).unwrap(); + assert_eq!(doc1.get_missing_deps(&[]), Vec::new()); + + doc2.receive_sync_message(&mut s2, msg1to2).unwrap(); + assert_eq!(doc2.get_missing_deps(&[]), Vec::new()); + + //// The response acknowledges the changes received and sends no further changes + let msg1to2 = doc1 + .generate_sync_message(&mut s1) + .expect("second reply from 1 to 2 was None"); + assert_eq!(msg1to2.changes.len(), 0); + let msg2to1 = doc2 + .generate_sync_message(&mut s2) + .expect("second reply from 2 to 1 was None"); + assert_eq!(msg2to1.changes.len(), 0); + + //// After receiving acknowledgements, their shared heads should be equal + doc1.receive_sync_message(&mut s1, msg2to1).unwrap(); + doc2.receive_sync_message(&mut s2, msg1to2).unwrap(); + + assert_eq!(s1.shared_heads, s2.shared_heads); + + //// We're in sync, no more messages required + assert!(doc1.generate_sync_message(&mut s1).is_none()); + assert!(doc2.generate_sync_message(&mut s2).is_none()); + + //// If we make one more change and start another sync then its lastSync should be updated + doc1.put(crate::ROOT, "x", 5).unwrap(); + doc1.commit(); + let msg1to2 = doc1 + .generate_sync_message(&mut s1) + .expect("third reply from 1 to 2 was None"); + let mut expected_heads = vec![head1, head2]; + expected_heads.sort(); + let mut actual_heads = msg1to2.have[0].last_sync.clone(); + actual_heads.sort(); + assert_eq!(actual_heads, expected_heads); + } + + #[test] + fn should_handle_false_positive_head() { + // Scenario: ,-- n1 + // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ + // `-- n2 + // where n2 is a false positive in the Bloom filter containing {n1}. + // lastSync is c9. + + let mut doc1 = crate::AutoCommit::new().with_actor(ActorId::try_from("abc123").unwrap()); + let mut doc2 = crate::AutoCommit::new().with_actor(ActorId::try_from("def456").unwrap()); + let mut s1 = State::new(); + let mut s2 = State::new(); + + for i in 0..10 { + doc1.put(crate::ROOT, "x", i).unwrap(); + doc1.commit(); + } + + sync(&mut doc1, &mut doc2, &mut s1, &mut s2); + + // search for false positive; see comment above + let mut i = 0; + let (mut doc1, mut doc2) = loop { + let mut doc1copy = doc1 + .clone() + .with_actor(ActorId::try_from("01234567").unwrap()); + let val1 = format!("{} @ n1", i); + doc1copy.put(crate::ROOT, "x", val1).unwrap(); + doc1copy.commit(); + + let mut doc2copy = doc1 + .clone() + .with_actor(ActorId::try_from("89abcdef").unwrap()); + let val2 = format!("{} @ n2", i); + doc2copy.put(crate::ROOT, "x", val2).unwrap(); + doc2copy.commit(); + + let n1_bloom = BloomFilter::from_hashes(doc1copy.get_heads().into_iter()); + if n1_bloom.contains_hash(&doc2copy.get_heads()[0]) { + break (doc1copy, doc2copy); + } + i += 1; + }; + + let mut all_heads = doc1.get_heads(); + all_heads.extend(doc2.get_heads()); + all_heads.sort(); + + // reset sync states + let (_, mut s1) = State::parse(Input::new(s1.encode().as_slice())).unwrap(); + let (_, mut s2) = State::parse(Input::new(s2.encode().as_slice())).unwrap(); + sync(&mut doc1, &mut doc2, &mut s1, &mut s2); + assert_eq!(doc1.get_heads(), all_heads); + assert_eq!(doc2.get_heads(), all_heads); + } + + #[test] + fn should_handle_chains_of_false_positives() { + //// Scenario: ,-- c5 + //// c0 <-- c1 <-- c2 <-- c3 <-- c4 <-+ + //// `-- n2c1 <-- n2c2 <-- n2c3 + //// where n2c1 and n2c2 are both false positives in the Bloom filter containing {c5}. + //// lastSync is c4. + let mut doc1 = crate::AutoCommit::new().with_actor(ActorId::try_from("abc123").unwrap()); + let mut doc2 = crate::AutoCommit::new().with_actor(ActorId::try_from("def456").unwrap()); + let mut s1 = State::new(); + let mut s2 = State::new(); + + for i in 0..10 { + doc1.put(crate::ROOT, "x", i).unwrap(); + doc1.commit(); + } + + sync(&mut doc1, &mut doc2, &mut s1, &mut s2); + + doc1.put(crate::ROOT, "x", 5).unwrap(); + doc1.commit(); + let bloom = BloomFilter::from_hashes(doc1.get_heads().into_iter()); + + // search for false positive; see comment above + let mut i = 0; + let mut doc2 = loop { + let mut doc = doc2 + .fork() + .with_actor(ActorId::try_from("89abcdef").unwrap()); + doc.put(crate::ROOT, "x", format!("{} at 89abdef", i)) + .unwrap(); + doc.commit(); + if bloom.contains_hash(&doc.get_heads()[0]) { + break doc; + } + i += 1; + }; + + // find another false positive building on the first + i = 0; + let mut doc2 = loop { + let mut doc = doc2 + .fork() + .with_actor(ActorId::try_from("89abcdef").unwrap()); + doc.put(crate::ROOT, "x", format!("{} again", i)).unwrap(); + doc.commit(); + if bloom.contains_hash(&doc.get_heads()[0]) { + break doc; + } + i += 1; + }; + + doc2.put(crate::ROOT, "x", "final @ 89abcdef").unwrap(); + + let mut all_heads = doc1.get_heads(); + all_heads.extend(doc2.get_heads()); + all_heads.sort(); + + let (_, mut s1) = State::parse(Input::new(s1.encode().as_slice())).unwrap(); + let (_, mut s2) = State::parse(Input::new(s2.encode().as_slice())).unwrap(); + sync(&mut doc1, &mut doc2, &mut s1, &mut s2); + assert_eq!(doc1.get_heads(), all_heads); + assert_eq!(doc2.get_heads(), all_heads); + } + + fn sync( + a: &mut crate::AutoCommit, + b: &mut crate::AutoCommit, + a_sync_state: &mut State, + b_sync_state: &mut State, + ) { + //function sync(a: Automerge, b: Automerge, aSyncState = initSyncState(), bSyncState = initSyncState()) { + const MAX_ITER: usize = 10; + let mut iterations = 0; + + loop { + let a_to_b = a.generate_sync_message(a_sync_state); + let b_to_a = b.generate_sync_message(b_sync_state); + if a_to_b.is_none() && b_to_a.is_none() { + break; + } + if iterations > MAX_ITER { + panic!("failed to sync in {} iterations", MAX_ITER); + } + if let Some(msg) = a_to_b { + b.receive_sync_message(b_sync_state, msg).unwrap() + } + if let Some(msg) = b_to_a { + a.receive_sync_message(a_sync_state, msg).unwrap() + } + iterations += 1; + } + } } diff --git a/rust/automerge/src/sync/state.rs b/rust/automerge/src/sync/state.rs index ad7e2c2c..00775196 100644 --- a/rust/automerge/src/sync/state.rs +++ b/rust/automerge/src/sync/state.rs @@ -31,6 +31,15 @@ pub struct State { pub their_need: Option>, pub their_have: Option>, pub sent_hashes: BTreeSet, + + /// `generate_sync_message` should return `None` if there are no new changes to send. In + /// particular, if there are changes in flight which the other end has not yet acknowledged we + /// do not wish to generate duplicate sync messages. This field tracks whether the changes we + /// expect to send to the peer based on this sync state have been sent or not. If + /// `in_flight` is `false` then `generate_sync_message` will return a new message (provided + /// there are in fact changes to send). If it is `true` then we don't. This flag is cleared + /// in `receive_sync_message`. + pub in_flight: bool, } /// A summary of the changes that the sender of the message already has. @@ -84,6 +93,7 @@ impl State { their_need: None, their_have: Some(Vec::new()), sent_hashes: BTreeSet::new(), + in_flight: false, }, )) } From 01350c2b3fadc8560544801c5c1eea8a9fdb4703 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Tue, 22 Nov 2022 19:37:01 +0000 Subject: [PATCH 636/730] automerge-wasm@0.1.18 and automerge@2.0.1-alpha.1 --- javascript/package.json | 4 ++-- rust/automerge-wasm/package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/javascript/package.json b/javascript/package.json index 30eddba3..b68674c9 100644 --- a/javascript/package.json +++ b/javascript/package.json @@ -4,7 +4,7 @@ "Orion Henry ", "Martin Kleppmann" ], - "version": "2.0.0", + "version": "2.0.1-alpha.1", "description": "Javascript implementation of automerge, backed by @automerge/automerge-wasm", "homepage": "https://github.com/automerge/automerge-rs/tree/main/wrappers/javascript", "repository": "github:automerge/automerge-rs", @@ -59,7 +59,7 @@ "typescript": "^4.6.4" }, "dependencies": { - "@automerge/automerge-wasm": "0.1.17", + "@automerge/automerge-wasm": "0.1.18", "uuid": "^8.3" } } diff --git a/rust/automerge-wasm/package.json b/rust/automerge-wasm/package.json index 908bf01d..9a98ad32 100644 --- a/rust/automerge-wasm/package.json +++ b/rust/automerge-wasm/package.json @@ -8,7 +8,7 @@ "description": "wasm-bindgen bindings to the automerge rust implementation", "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-wasm", "repository": "github:automerge/automerge-rs", - "version": "0.1.17", + "version": "0.1.18", "license": "MIT", "files": [ "README.md", From 484a5bac4f0ea93231ceff786b90de9c63cd9e60 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Sun, 27 Nov 2022 16:39:02 +0000 Subject: [PATCH 637/730] rust: Add Transactable::base_heads Sometimes it is necessary to query the heads of a document at the time a transaction started without having a mutable reference to the transactable. Add `Transactable::base_heads` to do this. --- rust/automerge/src/autocommit.rs | 4 ++++ rust/automerge/src/transaction/manual_transaction.rs | 4 ++++ rust/automerge/src/transaction/transactable.rs | 3 +++ 3 files changed, 11 insertions(+) diff --git a/rust/automerge/src/autocommit.rs b/rust/automerge/src/autocommit.rs index a1c598d9..f49871aa 100644 --- a/rust/automerge/src/autocommit.rs +++ b/rust/automerge/src/autocommit.rs @@ -521,4 +521,8 @@ impl Transactable for AutoCommitWithObs { fn parents>(&self, obj: O) -> Result, AutomergeError> { self.doc.parents(obj) } + + fn base_heads(&self) -> Vec { + self.doc.get_heads() + } } diff --git a/rust/automerge/src/transaction/manual_transaction.rs b/rust/automerge/src/transaction/manual_transaction.rs index ae23e36c..c5977020 100644 --- a/rust/automerge/src/transaction/manual_transaction.rs +++ b/rust/automerge/src/transaction/manual_transaction.rs @@ -282,6 +282,10 @@ impl<'a, Obs: observation::Observation> Transactable for Transaction<'a, Obs> { fn parents>(&self, obj: O) -> Result, AutomergeError> { self.doc.parents(obj) } + + fn base_heads(&self) -> Vec { + self.doc.get_heads() + } } // If a transaction is not commited or rolled back manually then it can leave the document in an diff --git a/rust/automerge/src/transaction/transactable.rs b/rust/automerge/src/transaction/transactable.rs index 0c7f6c45..bf4e2fe5 100644 --- a/rust/automerge/src/transaction/transactable.rs +++ b/rust/automerge/src/transaction/transactable.rs @@ -197,4 +197,7 @@ pub trait Transactable { path.reverse(); Ok(path) } + + /// The heads this transaction will be based on + fn base_heads(&self) -> Vec; } From ed108ba6fc7823bbec6a4701e11931ba8f3126db Mon Sep 17 00:00:00 2001 From: Alex Good Date: Sun, 27 Nov 2022 16:43:04 +0000 Subject: [PATCH 638/730] rust:automerge:0.2.0 --- rust/automerge/Cargo.toml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/rust/automerge/Cargo.toml b/rust/automerge/Cargo.toml index cc74e708..8872dcdc 100644 --- a/rust/automerge/Cargo.toml +++ b/rust/automerge/Cargo.toml @@ -1,13 +1,12 @@ [package] name = "automerge" -version = "0.1.0" +version = "0.2.0" edition = "2021" license = "MIT" repository = "https://github.com/automerge/automerge-rs" documentation = "https://automerge.org/automerge-rs/automerge/" rust-version = "1.57.0" description = "A JSON-like data structure (a CRDT) that can be modified concurrently by different users, and merged again automatically" -readme = "../README.md" [features] optree-visualisation = ["dot", "rand"] From d26cb0c0cb9bd2ccf8c5a1981fec760ad211871b Mon Sep 17 00:00:00 2001 From: Alex Good Date: Sun, 27 Nov 2022 16:54:00 +0000 Subject: [PATCH 639/730] rust:automerge-test:0.1.0 --- rust/automerge-test/Cargo.toml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/rust/automerge-test/Cargo.toml b/rust/automerge-test/Cargo.toml index 0defda79..4fba0379 100644 --- a/rust/automerge-test/Cargo.toml +++ b/rust/automerge-test/Cargo.toml @@ -6,12 +6,11 @@ license = "MIT" repository = "https://github.com/automerge/automerge-rs" rust-version = "1.57.0" description = "Utilities for testing automerge libraries" -readme = "../README.md" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -automerge = { path = "../automerge"} +automerge = { version = "^0.2", path = "../automerge" } smol_str = { version = "^0.1.21", features=["serde"] } serde = { version = "^1.0", features=["derive"] } decorum = "0.3.1" From a324b02005ae465a4d05a5bf9cc858c8911c0a30 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Fri, 18 Nov 2022 09:06:01 -0800 Subject: [PATCH 640/730] Added `automerge::AutomergeError::InvalidActorId`. Added `automerge::AutomergeError::InvalidCharacter`. Alphabetized the `automerge::AutomergeError` variants. --- rust/automerge/src/error.rs | 48 ++++++++++++++++++++----------------- 1 file changed, 26 insertions(+), 22 deletions(-) diff --git a/rust/automerge/src/error.rs b/rust/automerge/src/error.rs index 406b5d2b..7bedff2e 100644 --- a/rust/automerge/src/error.rs +++ b/rust/automerge/src/error.rs @@ -6,41 +6,45 @@ use thiserror::Error; #[derive(Error, Debug)] pub enum AutomergeError { - #[error("id was not an object id")] - NotAnObject, - #[error("invalid obj id format `{0}`")] - InvalidObjIdFormat(String), - #[error("invalid obj id `{0}`")] - InvalidObjId(String), - #[error("key must not be an empty string")] - EmptyStringKey, - #[error("invalid seq {0}")] - InvalidSeq(u64), - #[error("index {0} is out of bounds")] - InvalidIndex(usize), + #[error(transparent)] + Clocks(#[from] crate::clocks::MissingDep), + #[error("failed to load compressed data: {0}")] + Deflate(#[source] std::io::Error), #[error("duplicate seq {0} found for actor {1}")] DuplicateSeqNumber(u64, ActorId), + #[error("key must not be an empty string")] + EmptyStringKey, + #[error("general failure")] + Fail, + #[error("invalid actor ID `{0}`")] + InvalidActorId(String), + #[error("invalid UTF-8 character at {0}")] + InvalidCharacter(usize), #[error("invalid hash {0}")] InvalidHash(ChangeHash), - #[error("hash {0} does not correspond to a change in this document")] - MissingHash(ChangeHash), - #[error("increment operations must be against a counter value")] - MissingCounter, + #[error("invalid seq {0}")] + InvalidIndex(usize), + #[error("invalid obj id `{0}`")] + InvalidObjId(String), + #[error("invalid obj id format `{0}`")] + InvalidObjIdFormat(String), + #[error("invalid seq {0}")] + InvalidSeq(u64), #[error("invalid type of value, expected `{expected}` but received `{unexpected}`")] InvalidValueType { expected: String, unexpected: String, }, - #[error("general failure")] - Fail, #[error(transparent)] Load(#[from] LoadError), - #[error("failed to load compressed data: {0}")] - Deflate(#[source] std::io::Error), + #[error("increment operations must be against a counter value")] + MissingCounter, + #[error("hash {0} does not correspond to a change in this document")] + MissingHash(ChangeHash), #[error("compressed chunk was not a change")] NonChangeCompressed, - #[error(transparent)] - Clocks(#[from] crate::clocks::MissingDep), + #[error("id was not an object id")] + NotAnObject, } #[cfg(feature = "wasm")] From 3e2e697504436d6495a8f214c4e0b7998bf8c76f Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Fri, 18 Nov 2022 09:28:32 -0800 Subject: [PATCH 641/730] Replaced C string (`*const libc::c_char`) values with UTF-8 string view (`AMbyteSpan`) values except with the `AMresult::Error` variant. Added `AMstr()` for creating an `AMbyteSpan` from a C string. --- rust/automerge-c/src/actor_id.rs | 61 +- rust/automerge-c/src/byte_span.rs | 65 +- rust/automerge-c/src/change.rs | 51 +- rust/automerge-c/src/doc.rs | 45 +- rust/automerge-c/src/doc/list.rs | 33 +- rust/automerge-c/src/doc/list/item.rs | 11 +- rust/automerge-c/src/doc/map.rs | 180 ++-- rust/automerge-c/src/doc/map/item.rs | 25 +- rust/automerge-c/src/obj/item.rs | 13 +- rust/automerge-c/src/result.rs | 76 +- rust/automerge-c/src/strs.rs | 73 +- rust/automerge-c/test/actor_id_tests.c | 35 +- rust/automerge-c/test/doc_tests.c | 131 ++- rust/automerge-c/test/list_tests.c | 135 ++- rust/automerge-c/test/map_tests.c | 562 +++++++---- .../test/ported_wasm/basic_tests.c | 893 ++++++++++-------- .../automerge-c/test/ported_wasm/sync_tests.c | 186 ++-- 17 files changed, 1563 insertions(+), 1012 deletions(-) diff --git a/rust/automerge-c/src/actor_id.rs b/rust/automerge-c/src/actor_id.rs index e5f75856..6467ddea 100644 --- a/rust/automerge-c/src/actor_id.rs +++ b/rust/automerge-c/src/actor_id.rs @@ -1,38 +1,48 @@ use automerge as am; use std::cell::RefCell; use std::cmp::Ordering; -use std::ffi::{CStr, CString}; -use std::os::raw::c_char; use std::str::FromStr; use crate::byte_span::AMbyteSpan; use crate::result::{to_result, AMresult}; +macro_rules! to_actor_id { + ($handle:expr) => {{ + let handle = $handle.as_ref(); + match handle { + Some(b) => b, + None => return AMresult::err("Invalid AMactorId pointer").into(), + } + }}; +} + +pub(crate) use to_actor_id; + /// \struct AMactorId /// \installed_headerfile /// \brief An actor's unique identifier. #[derive(Eq, PartialEq)] pub struct AMactorId { body: *const am::ActorId, - c_str: RefCell>, + hex_str: RefCell>>, } impl AMactorId { pub fn new(actor_id: &am::ActorId) -> Self { Self { body: actor_id, - c_str: Default::default(), + hex_str: Default::default(), } } - pub fn as_c_str(&self) -> *const c_char { - let mut c_str = self.c_str.borrow_mut(); - match c_str.as_mut() { + pub fn as_hex_str(&self) -> AMbyteSpan { + let mut hex_str = self.hex_str.borrow_mut(); + match hex_str.as_mut() { None => { - let hex_str = unsafe { (*self.body).to_hex_string() }; - c_str.insert(CString::new(hex_str).unwrap()).as_ptr() + let hex_string = unsafe { (*self.body).to_hex_string() }; + hex_str.insert(hex_string.into_boxed_str()).as_bytes().into() } - Some(hex_str) => hex_str.as_ptr(), + Some(hex_str) => hex_str.as_bytes().into() } } } @@ -57,7 +67,7 @@ impl AsRef for AMactorId { pub unsafe extern "C" fn AMactorIdBytes(actor_id: *const AMactorId) -> AMbyteSpan { match actor_id.as_ref() { Some(actor_id) => actor_id.as_ref().into(), - None => AMbyteSpan::default(), + None => Default::default(), } } @@ -118,6 +128,7 @@ pub unsafe extern "C" fn AMactorIdInit() -> *mut AMresult { /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// src must be a byte array of size `>= count` #[no_mangle] @@ -132,19 +143,27 @@ pub unsafe extern "C" fn AMactorIdInitBytes(src: *const u8, count: usize) -> *mu /// \brief Allocates a new actor identifier and initializes it from a /// hexadecimal string. /// -/// \param[in] hex_str A UTF-8 string. +/// \param[in] hex_str A UTF-8 string view as an `AMbyteSpan` struct. /// \return A pointer to an `AMresult` struct containing a pointer to an /// `AMactorId` struct. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety -/// hex_str must be a null-terminated array of `c_char` +/// hex_str must be a valid pointer to an AMbyteSpan #[no_mangle] -pub unsafe extern "C" fn AMactorIdInitStr(hex_str: *const c_char) -> *mut AMresult { - to_result(am::ActorId::from_str( - CStr::from_ptr(hex_str).to_str().unwrap(), - )) +pub unsafe extern "C" fn AMactorIdInitStr(hex_str: AMbyteSpan) -> *mut AMresult { + use am::AutomergeError::InvalidActorId; + // use am::AutomergeError::InvalidCharacter; + + to_result(match (&hex_str).try_into() { + Ok(s) => match am::ActorId::from_str(s) { + Ok(actor_id) => Ok(actor_id), + Err(_) => Err(InvalidActorId(String::from(s))) + }, + Err(e) => Err(e), + }) } /// \memberof AMactorId @@ -152,15 +171,15 @@ pub unsafe extern "C" fn AMactorIdInitStr(hex_str: *const c_char) -> *mut AMresu /// /// \param[in] actor_id A pointer to an `AMactorId` struct. /// \pre \p actor_id `!= NULL`. -/// \return A UTF-8 string. +/// \return A UTF-8 string view as an `AMbyteSpan` struct. /// \internal /// /// # Safety /// actor_id must be a valid pointer to an AMactorId #[no_mangle] -pub unsafe extern "C" fn AMactorIdStr(actor_id: *const AMactorId) -> *const c_char { +pub unsafe extern "C" fn AMactorIdStr(actor_id: *const AMactorId) -> AMbyteSpan { match actor_id.as_ref() { - Some(actor_id) => actor_id.as_c_str(), - None => std::ptr::null::(), + Some(actor_id) => actor_id.as_hex_str(), + None => Default::default(), } } diff --git a/rust/automerge-c/src/byte_span.rs b/rust/automerge-c/src/byte_span.rs index a8e55065..3fcefba8 100644 --- a/rust/automerge-c/src/byte_span.rs +++ b/rust/automerge-c/src/byte_span.rs @@ -1,10 +1,24 @@ use automerge as am; +use libc::strlen; +use std::convert::TryFrom; +use std::os::raw::c_char; + +macro_rules! to_str { + ($span:expr) => {{ + let result: Result<&str, am::AutomergeError> = (&$span).try_into(); + match result { + Ok(s) => s, + Err(e) => return AMresult::err(&e.to_string()).into(), + } + }}; +} + +pub(crate) use to_str; /// \struct AMbyteSpan /// \installed_headerfile /// \brief A view onto a contiguous sequence of bytes. #[repr(C)] -#[derive(Eq, PartialEq)] pub struct AMbyteSpan { /// A pointer to an array of bytes. /// \attention NEVER CALL `free()` ON \p src! @@ -16,6 +30,12 @@ pub struct AMbyteSpan { pub count: usize, } +impl AMbyteSpan { + pub fn is_null(&self) -> bool { + self.src.is_null() + } +} + impl Default for AMbyteSpan { fn default() -> Self { Self { @@ -25,6 +45,22 @@ impl Default for AMbyteSpan { } } +impl PartialEq for AMbyteSpan { + fn eq(&self, other: &Self) -> bool { + if self.count != other.count { + return false; + } + else if self.src == other.src { + return true; + } + let slice = unsafe { std::slice::from_raw_parts(self.src, self.count) }; + let other_slice = unsafe { std::slice::from_raw_parts(other.src, other.count) }; + slice == other_slice + } +} + +impl Eq for AMbyteSpan {} + impl From<&am::ActorId> for AMbyteSpan { fn from(actor: &am::ActorId) -> Self { let slice = actor.to_bytes(); @@ -45,6 +81,19 @@ impl From<&mut am::ActorId> for AMbyteSpan { } } +impl From<*const c_char> for AMbyteSpan { + fn from(cs: *const c_char) -> Self { + if !cs.is_null() { + Self { + src: cs as *const u8, + count: unsafe { strlen(cs) }, + } + } else { + Self::default() + } + } +} + impl From<&am::ChangeHash> for AMbyteSpan { fn from(change_hash: &am::ChangeHash) -> Self { Self { @@ -62,3 +111,17 @@ impl From<&[u8]> for AMbyteSpan { } } } + +impl TryFrom<&AMbyteSpan> for &str { + type Error = am::AutomergeError; + + fn try_from(span: &AMbyteSpan) -> Result { + use am::AutomergeError::InvalidCharacter; + + let slice = unsafe { std::slice::from_raw_parts(span.src, span.count) }; + match std::str::from_utf8(slice) { + Ok(str_) => Ok(str_), + Err(e) => Err(InvalidCharacter(e.valid_up_to())), + } + } +} diff --git a/rust/automerge-c/src/change.rs b/rust/automerge-c/src/change.rs index afee98ed..10326fe7 100644 --- a/rust/automerge-c/src/change.rs +++ b/rust/automerge-c/src/change.rs @@ -1,7 +1,5 @@ use automerge as am; use std::cell::RefCell; -use std::ffi::CString; -use std::os::raw::c_char; use crate::byte_span::AMbyteSpan; use crate::change_hashes::AMchangeHashes; @@ -23,43 +21,31 @@ macro_rules! to_change { #[derive(Eq, PartialEq)] pub struct AMchange { body: *mut am::Change, - c_msg: RefCell>, - c_changehash: RefCell>, + changehash: RefCell>, } impl AMchange { pub fn new(change: &mut am::Change) -> Self { Self { body: change, - c_msg: Default::default(), - c_changehash: Default::default(), + changehash: Default::default(), } } - pub fn message(&self) -> *const c_char { - let mut c_msg = self.c_msg.borrow_mut(); - match c_msg.as_mut() { - None => { - if let Some(message) = unsafe { (*self.body).message() } { - return c_msg - .insert(CString::new(message.as_bytes()).unwrap()) - .as_ptr(); - } - } - Some(message) => { - return message.as_ptr(); - } + pub fn message(&self) -> AMbyteSpan { + if let Some(message) = unsafe { (*self.body).message() } { + return message.as_str().as_bytes().into() } - std::ptr::null() + Default::default() } pub fn hash(&self) -> AMbyteSpan { - let mut c_changehash = self.c_changehash.borrow_mut(); - if let Some(c_changehash) = c_changehash.as_ref() { - c_changehash.into() + let mut changehash = self.changehash.borrow_mut(); + if let Some(changehash) = changehash.as_ref() { + changehash.into() } else { let hash = unsafe { (*self.body).hash() }; - let ptr = c_changehash.insert(hash); + let ptr = changehash.insert(hash); AMbyteSpan { src: ptr.0.as_ptr(), count: hash.as_ref().len(), @@ -90,6 +76,7 @@ impl AsRef for AMchange { /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// change must be a valid pointer to an AMchange #[no_mangle] @@ -130,7 +117,7 @@ pub unsafe extern "C" fn AMchangeCompress(change: *mut AMchange) { pub unsafe extern "C" fn AMchangeDeps(change: *const AMchange) -> AMchangeHashes { match change.as_ref() { Some(change) => AMchangeHashes::new(change.as_ref().deps()), - None => AMchangeHashes::default(), + None => Default::default(), } } @@ -149,7 +136,7 @@ pub unsafe extern "C" fn AMchangeExtraBytes(change: *const AMchange) -> AMbyteSp if let Some(change) = change.as_ref() { change.as_ref().extra_bytes().into() } else { - AMbyteSpan::default() + Default::default() } } @@ -164,6 +151,7 @@ pub unsafe extern "C" fn AMchangeExtraBytes(change: *const AMchange) -> AMbyteSp /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// src must be a byte array of size `>= count` #[no_mangle] @@ -187,7 +175,7 @@ pub unsafe extern "C" fn AMchangeFromBytes(src: *const u8, count: usize) -> *mut pub unsafe extern "C" fn AMchangeHash(change: *const AMchange) -> AMbyteSpan { match change.as_ref() { Some(change) => change.hash(), - None => AMbyteSpan::default(), + None => Default::default(), } } @@ -233,18 +221,18 @@ pub unsafe extern "C" fn AMchangeMaxOp(change: *const AMchange) -> u64 { /// \brief Gets the message of a change. /// /// \param[in] change A pointer to an `AMchange` struct. -/// \return A UTF-8 string or `NULL`. +/// \return A UTF-8 string view as an `AMbyteSpan` struct. /// \pre \p change `!= NULL`. /// \internal /// /// # Safety /// change must be a valid pointer to an AMchange #[no_mangle] -pub unsafe extern "C" fn AMchangeMessage(change: *const AMchange) -> *const c_char { +pub unsafe extern "C" fn AMchangeMessage(change: *const AMchange) -> AMbyteSpan { if let Some(change) = change.as_ref() { return change.message(); }; - std::ptr::null() + Default::default() } /// \memberof AMchange @@ -338,7 +326,7 @@ pub unsafe extern "C" fn AMchangeRawBytes(change: *const AMchange) -> AMbyteSpan if let Some(change) = change.as_ref() { change.as_ref().raw_bytes().into() } else { - AMbyteSpan::default() + Default::default() } } @@ -354,6 +342,7 @@ pub unsafe extern "C" fn AMchangeRawBytes(change: *const AMchange) -> AMbyteSpan /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// src must be a byte array of size `>= count` #[no_mangle] diff --git a/rust/automerge-c/src/doc.rs b/rust/automerge-c/src/doc.rs index 4a5038a5..e9b6457c 100644 --- a/rust/automerge-c/src/doc.rs +++ b/rust/automerge-c/src/doc.rs @@ -1,11 +1,11 @@ use automerge as am; use automerge::transaction::{CommitOptions, Transactable}; use std::ops::{Deref, DerefMut}; -use std::os::raw::c_char; -use crate::actor_id::AMactorId; +use crate::actor_id::{to_actor_id, AMactorId}; +use crate::byte_span::{to_str, AMbyteSpan}; use crate::change_hashes::AMchangeHashes; -use crate::obj::{AMobjId, AMobjType}; +use crate::obj::{to_obj_id, AMobjId, AMobjType}; use crate::result::{to_result, AMresult, AMvalue}; use crate::sync::{to_sync_message, AMsyncMessage, AMsyncState}; @@ -14,8 +14,7 @@ pub mod map; pub mod utils; use crate::changes::AMchanges; -use crate::doc::utils::to_str; -use crate::doc::utils::{to_actor_id, to_doc, to_doc_mut, to_obj_id}; +use crate::doc::utils::{to_doc, to_doc_mut}; macro_rules! to_changes { ($handle:expr) => {{ @@ -89,6 +88,7 @@ impl DerefMut for AMdoc { /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc /// changes must be a valid pointer to an AMchanges. @@ -113,6 +113,7 @@ pub unsafe extern "C" fn AMapplyChanges( /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc #[no_mangle] @@ -130,6 +131,7 @@ pub unsafe extern "C" fn AMclone(doc: *const AMdoc) -> *mut AMresult { /// `AMdoc` struct. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. +/// \internal /// /// # Safety /// actor_id must be a valid pointer to an AMactorId or std::ptr::null() @@ -146,7 +148,7 @@ pub unsafe extern "C" fn AMcreate(actor_id: *const AMactorId) -> *mut AMresult { /// message and/or *nix timestamp (milliseconds). /// /// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \param[in] message A UTF-8 string or `NULL`. +/// \param[in] message A UTF-8 string view as an `AMbyteSpan` struct. /// \param[in] timestamp A pointer to a 64-bit integer or `NULL`. /// \return A pointer to an `AMresult` struct containing an `AMchangeHashes` /// with one element. @@ -154,18 +156,19 @@ pub unsafe extern "C" fn AMcreate(actor_id: *const AMactorId) -> *mut AMresult { /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc #[no_mangle] pub unsafe extern "C" fn AMcommit( doc: *mut AMdoc, - message: *const c_char, + message: AMbyteSpan, timestamp: *const i64, ) -> *mut AMresult { let doc = to_doc_mut!(doc); let mut options = CommitOptions::default(); if !message.is_null() { - options.set_message(to_str(message)); + options.set_message(to_str!(message)); } if let Some(timestamp) = timestamp.as_ref() { options.set_time(*timestamp); @@ -207,6 +210,7 @@ pub unsafe extern "C" fn AMequal(doc1: *mut AMdoc, doc2: *mut AMdoc) -> bool { /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc /// heads must be a valid pointer to an AMchangeHashes or std::ptr::null() @@ -232,6 +236,7 @@ pub unsafe extern "C" fn AMfork(doc: *mut AMdoc, heads: *const AMchangeHashes) - /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc /// sync_state must be a valid pointer to an AMsyncState @@ -279,6 +284,7 @@ pub unsafe extern "C" fn AMgetActorId(doc: *const AMdoc) -> *mut AMresult { /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc /// src must be a byte array of size `>= automerge::types::HASH_SIZE` @@ -306,6 +312,7 @@ pub unsafe extern "C" fn AMgetChangeByHash( /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc #[no_mangle] @@ -334,6 +341,7 @@ pub unsafe extern "C" fn AMgetChanges( /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc1 must be a valid pointer to an AMdoc /// doc2 must be a valid pointer to an AMdoc @@ -354,6 +362,7 @@ pub unsafe extern "C" fn AMgetChangesAdded(doc1: *mut AMdoc, doc2: *mut AMdoc) - /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc #[no_mangle] @@ -376,6 +385,7 @@ pub unsafe extern "C" fn AMgetHeads(doc: *mut AMdoc) -> *mut AMresult { /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc /// heads must be a valid pointer to an AMchangeHashes or std::ptr::null() @@ -403,6 +413,7 @@ pub unsafe extern "C" fn AMgetMissingDeps( /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc #[no_mangle] @@ -423,6 +434,7 @@ pub unsafe extern "C" fn AMgetLastLocalChange(doc: *mut AMdoc) -> *mut AMresult /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() @@ -454,6 +466,7 @@ pub unsafe extern "C" fn AMkeys( /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// src must be a byte array of size `>= count` #[no_mangle] @@ -477,6 +490,7 @@ pub unsafe extern "C" fn AMload(src: *const u8, count: usize) -> *mut AMresult { /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc /// src must be a byte array of size `>= count` @@ -505,6 +519,7 @@ pub unsafe extern "C" fn AMloadIncremental( /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// dest must be a valid pointer to an AMdoc /// src must be a valid pointer to an AMdoc @@ -584,6 +599,7 @@ pub unsafe extern "C" fn AMobjObjType(doc: *const AMdoc, obj_id: *const AMobjId) /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() @@ -681,6 +697,7 @@ pub unsafe extern "C" fn AMrollback(doc: *mut AMdoc) -> usize { /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc #[no_mangle] @@ -700,6 +717,7 @@ pub unsafe extern "C" fn AMsave(doc: *mut AMdoc) -> *mut AMresult { /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc #[no_mangle] @@ -719,6 +737,7 @@ pub unsafe extern "C" fn AMsaveIncremental(doc: *mut AMdoc) -> *mut AMresult { /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc /// actor_id must be a valid pointer to an AMactorId @@ -754,6 +773,7 @@ pub unsafe extern "C" fn AMsetActorId( /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() @@ -799,7 +819,7 @@ pub unsafe extern "C" fn AMsplice( /// `SIZE_MAX` to indicate one past its end. /// \param[in] del The number of characters to delete or `SIZE_MAX` to indicate /// all of them. -/// \param[in] text A UTF-8 string. +/// \param[in] text A UTF-8 string view as an `AMbyteSpan` struct. /// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc `!= NULL`. /// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id`)` or \p pos `== SIZE_MAX`. @@ -807,24 +827,24 @@ pub unsafe extern "C" fn AMsplice( /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// text must be a null-terminated array of `c_char` or NULL. #[no_mangle] pub unsafe extern "C" fn AMspliceText( doc: *mut AMdoc, obj_id: *const AMobjId, pos: usize, del: usize, - text: *const c_char, + text: AMbyteSpan, ) -> *mut AMresult { let doc = to_doc_mut!(doc); let obj_id = to_obj_id!(obj_id); let len = doc.length(obj_id); let pos = to_index!(pos, len, "pos"); let del = to_index!(del, len, "del"); - to_result(doc.splice_text(obj_id, pos, del, &to_str(text))) + to_result(doc.splice_text(obj_id, pos, del, to_str!(text))) } /// \memberof AMdoc @@ -839,6 +859,7 @@ pub unsafe extern "C" fn AMspliceText( /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() diff --git a/rust/automerge-c/src/doc/list.rs b/rust/automerge-c/src/doc/list.rs index d5ad34ed..82c62952 100644 --- a/rust/automerge-c/src/doc/list.rs +++ b/rust/automerge-c/src/doc/list.rs @@ -1,9 +1,9 @@ use automerge as am; use automerge::transaction::Transactable; -use std::os::raw::c_char; +use crate::byte_span::{to_str, AMbyteSpan}; use crate::change_hashes::AMchangeHashes; -use crate::doc::{to_doc, to_doc_mut, to_obj_id, to_str, AMdoc}; +use crate::doc::{to_doc, to_doc_mut, to_obj_id, AMdoc}; use crate::obj::{to_obj_type, AMobjId, AMobjType}; use crate::result::{to_result, AMresult}; @@ -44,6 +44,7 @@ macro_rules! to_range { /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() @@ -74,6 +75,7 @@ pub unsafe extern "C" fn AMlistDelete( /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() @@ -88,10 +90,10 @@ pub unsafe extern "C" fn AMlistGet( let doc = to_doc!(doc); let obj_id = to_obj_id!(obj_id); let (index, _) = adjust!(index, false, doc.length(obj_id)); - match heads.as_ref() { - None => to_result(doc.get(obj_id, index)), - Some(heads) => to_result(doc.get_at(obj_id, index, heads.as_ref())), - } + to_result(match heads.as_ref() { + None => doc.get(obj_id, index), + Some(heads) => doc.get_at(obj_id, index, heads.as_ref()), + }) } /// \memberof AMdoc @@ -110,6 +112,7 @@ pub unsafe extern "C" fn AMlistGet( /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() @@ -145,6 +148,7 @@ pub unsafe extern "C" fn AMlistGetAll( /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() @@ -179,6 +183,7 @@ pub unsafe extern "C" fn AMlistIncrement( /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() @@ -222,6 +227,7 @@ pub unsafe extern "C" fn AMlistPutBool( /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() @@ -265,6 +271,7 @@ pub unsafe extern "C" fn AMlistPutBytes( /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() @@ -305,6 +312,7 @@ pub unsafe extern "C" fn AMlistPutCounter( /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() @@ -344,6 +352,7 @@ pub unsafe extern "C" fn AMlistPutF64( /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() @@ -382,6 +391,7 @@ pub unsafe extern "C" fn AMlistPutInt( /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() @@ -422,6 +432,7 @@ pub unsafe extern "C" fn AMlistPutNull( /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() @@ -455,7 +466,7 @@ pub unsafe extern "C" fn AMlistPutObject( /// `== true`. /// \param[in] insert A flag to insert \p value before \p index instead of /// writing \p value over \p index. -/// \param[in] value A UTF-8 string. +/// \param[in] value A UTF-8 string view as an `AMbyteSpan` struct. /// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc `!= NULL`. /// \pre `0 <=` \p index `<= AMobjSize(`\p obj_id`)` or \p index `== SIZE_MAX`. @@ -463,6 +474,7 @@ pub unsafe extern "C" fn AMlistPutObject( /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() @@ -473,12 +485,12 @@ pub unsafe extern "C" fn AMlistPutStr( obj_id: *const AMobjId, index: usize, insert: bool, - value: *const c_char, + value: AMbyteSpan, ) -> *mut AMresult { let doc = to_doc_mut!(doc); let obj_id = to_obj_id!(obj_id); let (index, insert) = adjust!(index, insert, doc.length(obj_id)); - let value = to_str(value); + let value = to_str!(value); to_result(if insert { doc.insert(obj_id, index, value) } else { @@ -505,6 +517,7 @@ pub unsafe extern "C" fn AMlistPutStr( /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() @@ -545,6 +558,7 @@ pub unsafe extern "C" fn AMlistPutTimestamp( /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() @@ -584,6 +598,7 @@ pub unsafe extern "C" fn AMlistPutUint( /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() diff --git a/rust/automerge-c/src/doc/list/item.rs b/rust/automerge-c/src/doc/list/item.rs index fcd6281d..0d7b2d98 100644 --- a/rust/automerge-c/src/doc/list/item.rs +++ b/rust/automerge-c/src/doc/list/item.rs @@ -1,6 +1,4 @@ use automerge as am; -use std::cell::RefCell; -use std::ffi::CString; use crate::obj::AMobjId; use crate::result::AMvalue; @@ -8,14 +6,13 @@ use crate::result::AMvalue; /// \struct AMlistItem /// \installed_headerfile /// \brief An item in a list object. -#[repr(C)] pub struct AMlistItem { /// The index of an item in a list object. index: usize, /// The object identifier of an item in a list object. obj_id: AMobjId, /// The value of an item in a list object. - value: (am::Value<'static>, RefCell>), + value: am::Value<'static>, } impl AMlistItem { @@ -23,14 +20,14 @@ impl AMlistItem { Self { index, obj_id: AMobjId::new(obj_id), - value: (value, Default::default()), + value: value, } } } impl PartialEq for AMlistItem { fn eq(&self, other: &Self) -> bool { - self.index == other.index && self.obj_id == other.obj_id && self.value.0 == other.value.0 + self.index == other.index && self.obj_id == other.obj_id && self.value == other.value } } @@ -93,7 +90,7 @@ pub unsafe extern "C" fn AMlistItemObjId(list_item: *const AMlistItem) -> *const #[no_mangle] pub unsafe extern "C" fn AMlistItemValue<'a>(list_item: *const AMlistItem) -> AMvalue<'a> { if let Some(list_item) = list_item.as_ref() { - (&list_item.value.0, &list_item.value.1).into() + (&list_item.value).into() } else { AMvalue::Void } diff --git a/rust/automerge-c/src/doc/map.rs b/rust/automerge-c/src/doc/map.rs index 2ba00c15..fbd6c1cd 100644 --- a/rust/automerge-c/src/doc/map.rs +++ b/rust/automerge-c/src/doc/map.rs @@ -1,9 +1,8 @@ use automerge as am; use automerge::transaction::Transactable; -use std::os::raw::c_char; +use crate::byte_span::{to_str, AMbyteSpan}; use crate::change_hashes::AMchangeHashes; -use crate::doc::utils::to_str; use crate::doc::{to_doc, to_doc_mut, to_obj_id, AMdoc}; use crate::obj::{to_obj_type, AMobjId, AMobjType}; use crate::result::{to_result, AMresult}; @@ -16,25 +15,27 @@ pub mod items; /// /// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. +/// \param[in] key A UTF-8 string view key for the map object identified by +/// \p obj_id as an `AMbyteSpan` struct. /// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc `!= NULL`. /// \pre \p key `!= NULL`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// key must be a c string of the map key to be used #[no_mangle] pub unsafe extern "C" fn AMmapDelete( doc: *mut AMdoc, obj_id: *const AMobjId, - key: *const c_char, + key: AMbyteSpan, ) -> *mut AMresult { let doc = to_doc_mut!(doc); - to_result(doc.delete(to_obj_id!(obj_id), to_str(key))) + let key = to_str!(key); + to_result(doc.delete(to_obj_id!(obj_id), key)) } /// \memberof AMdoc @@ -42,8 +43,8 @@ pub unsafe extern "C" fn AMmapDelete( /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key A UTF-8 string key for the map object identified by -/// \p obj_id. +/// \param[in] key A UTF-8 string view key for the map object identified by +/// \p obj_id as an `AMbyteSpan` struct. /// \param[in] heads A pointer to an `AMchangeHashes` struct for a historical /// value or `NULL` for the current value. /// \return A pointer to an `AMresult` struct that doesn't contain a void. @@ -52,23 +53,24 @@ pub unsafe extern "C" fn AMmapDelete( /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// key must be a c string of the map key to be used /// heads must be a valid pointer to an AMchangeHashes or std::ptr::null() #[no_mangle] pub unsafe extern "C" fn AMmapGet( doc: *const AMdoc, obj_id: *const AMobjId, - key: *const c_char, + key: AMbyteSpan, heads: *const AMchangeHashes, ) -> *mut AMresult { let doc = to_doc!(doc); let obj_id = to_obj_id!(obj_id); + let key = to_str!(key); match heads.as_ref() { - None => to_result(doc.get(obj_id, to_str(key))), - Some(heads) => to_result(doc.get_at(obj_id, to_str(key), heads.as_ref())), + None => to_result(doc.get(obj_id, key)), + Some(heads) => to_result(doc.get_at(obj_id, key, heads.as_ref())), } } @@ -78,8 +80,8 @@ pub unsafe extern "C" fn AMmapGet( /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key A UTF-8 string key for the map object identified by -/// \p obj_id. +/// \param[in] key A UTF-8 string view key for the map object identified by +/// \p obj_id as an `AMbyteSpan` struct. /// \param[in] heads A pointer to an `AMchangeHashes` struct for a historical /// last value or `NULL` for the current last value. /// \return A pointer to an `AMresult` struct containing an `AMobjItems` struct. @@ -88,23 +90,24 @@ pub unsafe extern "C" fn AMmapGet( /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// key must be a c string of the map key to be used /// heads must be a valid pointer to an AMchangeHashes or std::ptr::null() #[no_mangle] pub unsafe extern "C" fn AMmapGetAll( doc: *const AMdoc, obj_id: *const AMobjId, - key: *const c_char, + key: AMbyteSpan, heads: *const AMchangeHashes, ) -> *mut AMresult { let doc = to_doc!(doc); let obj_id = to_obj_id!(obj_id); + let key = to_str!(key); match heads.as_ref() { - None => to_result(doc.get_all(obj_id, to_str(key))), - Some(heads) => to_result(doc.get_all_at(obj_id, to_str(key), heads.as_ref())), + None => to_result(doc.get_all(obj_id, key)), + Some(heads) => to_result(doc.get_all_at(obj_id, key, heads.as_ref())), } } @@ -113,7 +116,8 @@ pub unsafe extern "C" fn AMmapGetAll( /// /// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. +/// \param[in] key A UTF-8 string view key for the map object identified by +/// \p obj_id as an `AMbyteSpan` struct. /// \param[in] value A 64-bit signed integer. /// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc `!= NULL`. @@ -121,19 +125,20 @@ pub unsafe extern "C" fn AMmapGetAll( /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// key must be a c string of the map key to be used #[no_mangle] pub unsafe extern "C" fn AMmapIncrement( doc: *mut AMdoc, obj_id: *const AMobjId, - key: *const c_char, + key: AMbyteSpan, value: i64, ) -> *mut AMresult { let doc = to_doc_mut!(doc); - to_result(doc.increment(to_obj_id!(obj_id), to_str(key), value)) + let key = to_str!(key); + to_result(doc.increment(to_obj_id!(obj_id), key, value)) } /// \memberof AMdoc @@ -141,7 +146,8 @@ pub unsafe extern "C" fn AMmapIncrement( /// /// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. +/// \param[in] key A UTF-8 string view key for the map object identified by +/// \p obj_id as an `AMbyteSpan` struct. /// \param[in] value A boolean. /// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc `!= NULL`. @@ -149,19 +155,20 @@ pub unsafe extern "C" fn AMmapIncrement( /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// key must be a c string of the map key to be used #[no_mangle] pub unsafe extern "C" fn AMmapPutBool( doc: *mut AMdoc, obj_id: *const AMobjId, - key: *const c_char, + key: AMbyteSpan, value: bool, ) -> *mut AMresult { let doc = to_doc_mut!(doc); - to_result(doc.put(to_obj_id!(obj_id), to_str(key), value)) + let key = to_str!(key); + to_result(doc.put(to_obj_id!(obj_id), key, value)) } /// \memberof AMdoc @@ -169,7 +176,8 @@ pub unsafe extern "C" fn AMmapPutBool( /// /// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. +/// \param[in] key A UTF-8 string view key for the map object identified by +/// \p obj_id as an `AMbyteSpan` struct. /// \param[in] src A pointer to an array of bytes. /// \param[in] count The number of bytes to copy from \p src. /// \return A pointer to an `AMresult` struct containing a void. @@ -180,23 +188,24 @@ pub unsafe extern "C" fn AMmapPutBool( /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// key must be a c string of the map key to be used /// src must be a byte array of size `>= count` #[no_mangle] pub unsafe extern "C" fn AMmapPutBytes( doc: *mut AMdoc, obj_id: *const AMobjId, - key: *const c_char, + key: AMbyteSpan, src: *const u8, count: usize, ) -> *mut AMresult { let doc = to_doc_mut!(doc); + let key = to_str!(key); let mut vec = Vec::new(); vec.extend_from_slice(std::slice::from_raw_parts(src, count)); - to_result(doc.put(to_obj_id!(obj_id), to_str(key), vec)) + to_result(doc.put(to_obj_id!(obj_id), key, vec)) } /// \memberof AMdoc @@ -204,7 +213,8 @@ pub unsafe extern "C" fn AMmapPutBytes( /// /// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. +/// \param[in] key A UTF-8 string view key for the map object identified by +/// \p obj_id as an `AMbyteSpan` struct. /// \param[in] value A 64-bit signed integer. /// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc `!= NULL`. @@ -212,21 +222,22 @@ pub unsafe extern "C" fn AMmapPutBytes( /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// key must be a c string of the map key to be used #[no_mangle] pub unsafe extern "C" fn AMmapPutCounter( doc: *mut AMdoc, obj_id: *const AMobjId, - key: *const c_char, + key: AMbyteSpan, value: i64, ) -> *mut AMresult { let doc = to_doc_mut!(doc); + let key = to_str!(key); to_result(doc.put( to_obj_id!(obj_id), - to_str(key), + key, am::ScalarValue::Counter(value.into()), )) } @@ -236,25 +247,27 @@ pub unsafe extern "C" fn AMmapPutCounter( /// /// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. +/// \param[in] key A UTF-8 string view key for the map object identified by +/// \p obj_id as an `AMbyteSpan` struct. /// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc `!= NULL`. /// \pre \p key `!= NULL`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// key must be a c string of the map key to be used #[no_mangle] pub unsafe extern "C" fn AMmapPutNull( doc: *mut AMdoc, obj_id: *const AMobjId, - key: *const c_char, + key: AMbyteSpan, ) -> *mut AMresult { let doc = to_doc_mut!(doc); - to_result(doc.put(to_obj_id!(obj_id), to_str(key), ())) + let key = to_str!(key); + to_result(doc.put(to_obj_id!(obj_id), key, ())) } /// \memberof AMdoc @@ -262,7 +275,8 @@ pub unsafe extern "C" fn AMmapPutNull( /// /// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. +/// \param[in] key A UTF-8 string view key for the map object identified by +/// \p obj_id as an `AMbyteSpan` struct. /// \param[in] obj_type An `AMobjIdType` enum tag. /// \return A pointer to an `AMresult` struct containing a pointer to an /// `AMobjId` struct. @@ -272,19 +286,20 @@ pub unsafe extern "C" fn AMmapPutNull( /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// key must be a c string of the map key to be used #[no_mangle] pub unsafe extern "C" fn AMmapPutObject( doc: *mut AMdoc, obj_id: *const AMobjId, - key: *const c_char, + key: AMbyteSpan, obj_type: AMobjType, ) -> *mut AMresult { let doc = to_doc_mut!(doc); - to_result(doc.put_object(to_obj_id!(obj_id), to_str(key), to_obj_type!(obj_type))) + let key = to_str!(key); + to_result(doc.put_object(to_obj_id!(obj_id), key, to_obj_type!(obj_type))) } /// \memberof AMdoc @@ -292,7 +307,8 @@ pub unsafe extern "C" fn AMmapPutObject( /// /// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. +/// \param[in] key A UTF-8 string view key for the map object identified by +/// \p obj_id as an `AMbyteSpan` struct. /// \param[in] value A 64-bit float. /// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc `!= NULL`. @@ -300,19 +316,20 @@ pub unsafe extern "C" fn AMmapPutObject( /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// key must be a c string of the map key to be used #[no_mangle] pub unsafe extern "C" fn AMmapPutF64( doc: *mut AMdoc, obj_id: *const AMobjId, - key: *const c_char, + key: AMbyteSpan, value: f64, ) -> *mut AMresult { let doc = to_doc_mut!(doc); - to_result(doc.put(to_obj_id!(obj_id), to_str(key), value)) + let key = to_str!(key); + to_result(doc.put(to_obj_id!(obj_id), key, value)) } /// \memberof AMdoc @@ -320,7 +337,8 @@ pub unsafe extern "C" fn AMmapPutF64( /// /// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. +/// \param[in] key A UTF-8 string view key for the map object identified by +/// \p obj_id as an `AMbyteSpan` struct. /// \param[in] value A 64-bit signed integer. /// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc `!= NULL`. @@ -328,19 +346,20 @@ pub unsafe extern "C" fn AMmapPutF64( /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// key must be a c string of the map key to be used #[no_mangle] pub unsafe extern "C" fn AMmapPutInt( doc: *mut AMdoc, obj_id: *const AMobjId, - key: *const c_char, + key: AMbyteSpan, value: i64, ) -> *mut AMresult { let doc = to_doc_mut!(doc); - to_result(doc.put(to_obj_id!(obj_id), to_str(key), value)) + let key = to_str!(key); + to_result(doc.put(to_obj_id!(obj_id), key, value)) } /// \memberof AMdoc @@ -348,29 +367,28 @@ pub unsafe extern "C" fn AMmapPutInt( /// /// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. -/// \param[in] value A UTF-8 string. +/// \param[in] key A UTF-8 string view key for the map object identified by +/// \p obj_id as an `AMbyteSpan` struct. +/// \param[in] value A UTF-8 string view as an `AMbyteSpan` struct. /// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc `!= NULL`. /// \pre \p key `!= NULL`. -/// \pre \p value `!= NULL`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// key must be a c string of the map key to be used -/// value must be a null-terminated array of `c_char` #[no_mangle] pub unsafe extern "C" fn AMmapPutStr( doc: *mut AMdoc, obj_id: *const AMobjId, - key: *const c_char, - value: *const c_char, + key: AMbyteSpan, + value: AMbyteSpan, ) -> *mut AMresult { let doc = to_doc_mut!(doc); - to_result(doc.put(to_obj_id!(obj_id), to_str(key), to_str(value))) + to_result(doc.put(to_obj_id!(obj_id), to_str!(key), to_str!(value))) } /// \memberof AMdoc @@ -379,7 +397,8 @@ pub unsafe extern "C" fn AMmapPutStr( /// /// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. +/// \param[in] key A UTF-8 string view key for the map object identified by +/// \p obj_id as an `AMbyteSpan` struct. /// \param[in] value A 64-bit signed integer. /// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc `!= NULL`. @@ -387,21 +406,22 @@ pub unsafe extern "C" fn AMmapPutStr( /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// key must be a c string of the map key to be used #[no_mangle] pub unsafe extern "C" fn AMmapPutTimestamp( doc: *mut AMdoc, obj_id: *const AMobjId, - key: *const c_char, + key: AMbyteSpan, value: i64, ) -> *mut AMresult { let doc = to_doc_mut!(doc); + let key = to_str!(key); to_result(doc.put( to_obj_id!(obj_id), - to_str(key), + key, am::ScalarValue::Timestamp(value), )) } @@ -411,7 +431,8 @@ pub unsafe extern "C" fn AMmapPutTimestamp( /// /// \param[in,out] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. +/// \param[in] key A UTF-8 string view key for the map object identified by +/// \p obj_id as an `AMbyteSpan` struct. /// \param[in] value A 64-bit unsigned integer. /// \return A pointer to an `AMresult` struct containing a void. /// \pre \p doc `!= NULL`. @@ -419,19 +440,20 @@ pub unsafe extern "C" fn AMmapPutTimestamp( /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// key must be a c string of the map key to be used #[no_mangle] pub unsafe extern "C" fn AMmapPutUint( doc: *mut AMdoc, obj_id: *const AMobjId, - key: *const c_char, + key: AMbyteSpan, value: u64, ) -> *mut AMresult { let doc = to_doc_mut!(doc); - to_result(doc.put(to_obj_id!(obj_id), to_str(key), value)) + let key = to_str!(key); + to_result(doc.put(to_obj_id!(obj_id), key, value)) } /// \memberof AMdoc @@ -440,19 +462,19 @@ pub unsafe extern "C" fn AMmapPutUint( /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] begin The first key in a subrange or `NULL` to indicate the +/// \param[in] begin The first key in a subrange or `AMstr(NULL)` to indicate the /// absolute first key. -/// \param[in] end The key one past the last key in a subrange or `NULL` to +/// \param[in] end The key one past the last key in a subrange or `AMstr(NULL)` to /// indicate one past the absolute last key. /// \param[in] heads A pointer to an `AMchangeHashes` struct for historical /// keys and values or `NULL` for current keys and values. /// \return A pointer to an `AMresult` struct containing an `AMmapItems` /// struct. /// \pre \p doc `!= NULL`. -/// \pre `strcmp(`\p begin, \p end`) != 1` if \p begin `!= NULL` and \p end `!= NULL`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() @@ -461,15 +483,15 @@ pub unsafe extern "C" fn AMmapPutUint( pub unsafe extern "C" fn AMmapRange( doc: *const AMdoc, obj_id: *const AMobjId, - begin: *const c_char, - end: *const c_char, + begin: AMbyteSpan, + end: AMbyteSpan, heads: *const AMchangeHashes, ) -> *mut AMresult { let doc = to_doc!(doc); let obj_id = to_obj_id!(obj_id); - match (begin.as_ref(), end.as_ref()) { - (Some(_), Some(_)) => { - let (begin, end) = (to_str(begin), to_str(end)); + match (begin.is_null(), end.is_null()) { + (false, false) => { + let (begin, end) = (to_str!(begin).to_string(), to_str!(end).to_string()); if begin > end { return AMresult::err(&format!("Invalid range [{}-{})", begin, end)).into(); }; @@ -480,23 +502,23 @@ pub unsafe extern "C" fn AMmapRange( to_result(doc.map_range(obj_id, bounds)) } } - (Some(_), None) => { - let bounds = to_str(begin)..; + (false, true) => { + let bounds = to_str!(begin).to_string()..; if let Some(heads) = heads.as_ref() { to_result(doc.map_range_at(obj_id, bounds, heads.as_ref())) } else { to_result(doc.map_range(obj_id, bounds)) } } - (None, Some(_)) => { - let bounds = ..to_str(end); + (true, false) => { + let bounds = ..to_str!(end).to_string(); if let Some(heads) = heads.as_ref() { to_result(doc.map_range_at(obj_id, bounds, heads.as_ref())) } else { to_result(doc.map_range(obj_id, bounds)) } } - (None, None) => { + (true, true) => { let bounds = ..; if let Some(heads) = heads.as_ref() { to_result(doc.map_range_at(obj_id, bounds, heads.as_ref())) diff --git a/rust/automerge-c/src/doc/map/item.rs b/rust/automerge-c/src/doc/map/item.rs index 0d10f3c3..b206f23e 100644 --- a/rust/automerge-c/src/doc/map/item.rs +++ b/rust/automerge-c/src/doc/map/item.rs @@ -1,37 +1,34 @@ use automerge as am; -use std::cell::RefCell; -use std::ffi::CString; -use std::os::raw::c_char; +use crate::byte_span::AMbyteSpan; use crate::obj::AMobjId; use crate::result::AMvalue; /// \struct AMmapItem /// \installed_headerfile /// \brief An item in a map object. -#[repr(C)] pub struct AMmapItem { /// The key of an item in a map object. - key: CString, + key: String, /// The object identifier of an item in a map object. obj_id: AMobjId, /// The value of an item in a map object. - value: (am::Value<'static>, RefCell>), + value: am::Value<'static>, } impl AMmapItem { pub fn new(key: &'static str, value: am::Value<'static>, obj_id: am::ObjId) -> Self { Self { - key: CString::new(key).unwrap(), + key: key.to_string(), obj_id: AMobjId::new(obj_id), - value: (value, Default::default()), + value: value, } } } impl PartialEq for AMmapItem { fn eq(&self, other: &Self) -> bool { - self.key == other.key && self.obj_id == other.obj_id && self.value.0 == other.value.0 + self.key == other.key && self.obj_id == other.obj_id && self.value == other.value } } @@ -47,18 +44,18 @@ impl From<&AMmapItem> for (String, am::Value<'static>, am::ObjId) { /// \brief Gets the key of an item in a map object. /// /// \param[in] map_item A pointer to an `AMmapItem` struct. -/// \return A 64-bit unsigned integer. +/// \return An `AMbyteSpan` view of a UTF-8 string. /// \pre \p map_item `!= NULL`. /// \internal /// /// # Safety /// map_item must be a valid pointer to an AMmapItem #[no_mangle] -pub unsafe extern "C" fn AMmapItemKey(map_item: *const AMmapItem) -> *const c_char { +pub unsafe extern "C" fn AMmapItemKey(map_item: *const AMmapItem) -> AMbyteSpan { if let Some(map_item) = map_item.as_ref() { - map_item.key.as_ptr() + map_item.key.as_bytes().into() } else { - std::ptr::null() + Default::default() } } @@ -94,7 +91,7 @@ pub unsafe extern "C" fn AMmapItemObjId(map_item: *const AMmapItem) -> *const AM #[no_mangle] pub unsafe extern "C" fn AMmapItemValue<'a>(map_item: *const AMmapItem) -> AMvalue<'a> { if let Some(map_item) = map_item.as_ref() { - (&map_item.value.0, &map_item.value.1).into() + (&map_item.value).into() } else { AMvalue::Void } diff --git a/rust/automerge-c/src/obj/item.rs b/rust/automerge-c/src/obj/item.rs index 84bc0fd1..acac0893 100644 --- a/rust/automerge-c/src/obj/item.rs +++ b/rust/automerge-c/src/obj/item.rs @@ -1,6 +1,4 @@ use automerge as am; -use std::cell::RefCell; -use std::ffi::CString; use crate::obj::AMobjId; use crate::result::AMvalue; @@ -8,32 +6,31 @@ use crate::result::AMvalue; /// \struct AMobjItem /// \installed_headerfile /// \brief An item in an object. -#[repr(C)] pub struct AMobjItem { /// The object identifier of an item in an object. obj_id: AMobjId, /// The value of an item in an object. - value: (am::Value<'static>, RefCell>), + value: am::Value<'static>, } impl AMobjItem { pub fn new(value: am::Value<'static>, obj_id: am::ObjId) -> Self { Self { obj_id: AMobjId::new(obj_id), - value: (value, Default::default()), + value: value, } } } impl PartialEq for AMobjItem { fn eq(&self, other: &Self) -> bool { - self.obj_id == other.obj_id && self.value.0 == other.value.0 + self.obj_id == other.obj_id && self.value == other.value } } impl From<&AMobjItem> for (am::Value<'static>, am::ObjId) { fn from(obj_item: &AMobjItem) -> Self { - (obj_item.value.0.clone(), obj_item.obj_id.as_ref().clone()) + (obj_item.value.clone(), obj_item.obj_id.as_ref().clone()) } } @@ -69,7 +66,7 @@ pub unsafe extern "C" fn AMobjItemObjId(obj_item: *const AMobjItem) -> *const AM #[no_mangle] pub unsafe extern "C" fn AMobjItemValue<'a>(obj_item: *const AMobjItem) -> AMvalue<'a> { if let Some(obj_item) = obj_item.as_ref() { - (&obj_item.value.0, &obj_item.value.1).into() + (&obj_item.value).into() } else { AMvalue::Void } diff --git a/rust/automerge-c/src/result.rs b/rust/automerge-c/src/result.rs index 65f7f98f..29fb2f36 100644 --- a/rust/automerge-c/src/result.rs +++ b/rust/automerge-c/src/result.rs @@ -1,8 +1,7 @@ use automerge as am; -use libc::strcmp; + use smol_str::SmolStr; use std::any::type_name; -use std::cell::RefCell; use std::collections::BTreeMap; use std::ffi::CString; use std::ops::{Range, RangeFrom, RangeFull, RangeTo}; @@ -15,7 +14,6 @@ use crate::change_hashes::AMchangeHashes; use crate::changes::AMchanges; use crate::doc::list::{item::AMlistItem, items::AMlistItems}; use crate::doc::map::{item::AMmapItem, items::AMmapItems}; -use crate::doc::utils::to_str; use crate::doc::AMdoc; use crate::obj::item::AMobjItem; use crate::obj::items::AMobjItems; @@ -70,7 +68,7 @@ use crate::sync::{AMsyncMessage, AMsyncState}; /// A sequence of object items as an `AMobjItems` struct. /// /// \var AMvalue::str -/// A UTF-8 string. +/// A UTF-8 string view as an `AMbyteSpan` struct. /// /// \var AMvalue::strs /// A sequence of UTF-8 strings as an `AMstrs` struct. @@ -125,9 +123,9 @@ pub enum AMvalue<'a> { ObjId(&'a AMobjId), /// An object items variant. ObjItems(AMobjItems), - /// A UTF-8 string variant. - Str(*const libc::c_char), - /// A UTF-8 strings variant. + /// A UTF-8 string view variant. + Str(AMbyteSpan), + /// A UTF-8 string views variant. Strs(AMstrs), /// A synchronization message variant. SyncMessage(&'a AMsyncMessage), @@ -159,7 +157,7 @@ impl<'a> PartialEq for AMvalue<'a> { (MapItems(lhs), MapItems(rhs)) => lhs == rhs, (ObjId(lhs), ObjId(rhs)) => *lhs == *rhs, (ObjItems(lhs), ObjItems(rhs)) => lhs == rhs, - (Str(lhs), Str(rhs)) => unsafe { strcmp(*lhs, *rhs) == 0 }, + (Str(lhs), Str(rhs)) => lhs == rhs, (Strs(lhs), Strs(rhs)) => lhs == rhs, (SyncMessage(lhs), SyncMessage(rhs)) => *lhs == *rhs, (SyncState(lhs), SyncState(rhs)) => *lhs == *rhs, @@ -172,8 +170,8 @@ impl<'a> PartialEq for AMvalue<'a> { } } -impl From<(&am::Value<'_>, &RefCell>)> for AMvalue<'_> { - fn from((value, c_str): (&am::Value<'_>, &RefCell>)) -> Self { +impl From<&am::Value<'_>> for AMvalue<'_> { + fn from(value: &am::Value<'_>) -> Self { match value { am::Value::Scalar(scalar) => match scalar.as_ref() { am::ScalarValue::Boolean(flag) => AMvalue::Boolean(*flag), @@ -182,16 +180,7 @@ impl From<(&am::Value<'_>, &RefCell>)> for AMvalue<'_> { am::ScalarValue::F64(float) => AMvalue::F64(*float), am::ScalarValue::Int(int) => AMvalue::Int(*int), am::ScalarValue::Null => AMvalue::Null, - am::ScalarValue::Str(smol_str) => { - let mut c_str = c_str.borrow_mut(); - AMvalue::Str(match c_str.as_mut() { - None => { - let value_str = CString::new(smol_str.to_string()).unwrap(); - c_str.insert(value_str).as_ptr() - } - Some(value_str) => value_str.as_ptr(), - }) - } + am::ScalarValue::Str(smol_str) => AMvalue::Str(smol_str.as_bytes().into()), am::ScalarValue::Timestamp(timestamp) => AMvalue::Timestamp(*timestamp), am::ScalarValue::Uint(uint) => AMvalue::Uint(*uint), am::ScalarValue::Unknown { bytes, type_code } => AMvalue::Unknown(AMunknownValue { @@ -256,9 +245,12 @@ impl TryFrom<&AMvalue<'_>> for am::ScalarValue { Counter(c) => Ok(am::ScalarValue::Counter(c.into())), F64(f) => Ok(am::ScalarValue::F64(*f)), Int(i) => Ok(am::ScalarValue::Int(*i)), - Str(c_str) => { - let smol_str = unsafe { SmolStr::new(to_str(*c_str)) }; - Ok(am::ScalarValue::Str(smol_str)) + Str(span) => { + let result: Result<&str, am::AutomergeError> = span.try_into(); + match result { + Ok(str_) => Ok(am::ScalarValue::Str(SmolStr::new(str_))), + Err(e) => Err(e), + } } Timestamp(t) => Ok(am::ScalarValue::Timestamp(*t)), Uint(u) => Ok(am::ScalarValue::Uint(*u)), @@ -356,11 +348,11 @@ pub enum AMresult { MapItems(Vec), ObjId(AMobjId), ObjItems(Vec), - String(CString), - Strings(Vec), + String(String), + Strings(Vec), SyncMessage(AMsyncMessage), SyncState(Box), - Value(am::Value<'static>, RefCell>), + Value(am::Value<'static>), Void, } @@ -384,15 +376,13 @@ impl From for AMresult { impl From> for AMresult { fn from(keys: am::Keys<'_, '_>) -> Self { - let cstrings: Vec = keys.map(|s| CString::new(s).unwrap()).collect(); - AMresult::Strings(cstrings) + AMresult::Strings(keys.collect()) } } impl From> for AMresult { fn from(keys: am::KeysAt<'_, '_>) -> Self { - let cstrings: Vec = keys.map(|s| CString::new(s).unwrap()).collect(); - AMresult::Strings(cstrings) + AMresult::Strings(keys.collect()) } } @@ -612,7 +602,7 @@ impl From> for AMresult { impl From, am::AutomergeError>> for AMresult { fn from(maybe: Result, am::AutomergeError>) -> Self { match maybe { - Ok(value) => AMresult::Value(value, Default::default()), + Ok(value) => AMresult::Value(value), Err(e) => AMresult::err(&e.to_string()), } } @@ -623,7 +613,7 @@ impl From, am::ObjId)>, am::AutomergeError>> f match maybe { Ok(Some((value, obj_id))) => match value { am::Value::Object(_) => AMresult::ObjId(AMobjId::new(obj_id)), - _ => AMresult::Value(value, Default::default()), + _ => AMresult::Value(value), }, Ok(None) => AMresult::Void, Err(e) => AMresult::err(&e.to_string()), @@ -634,7 +624,7 @@ impl From, am::ObjId)>, am::AutomergeError>> f impl From> for AMresult { fn from(maybe: Result) -> Self { match maybe { - Ok(string) => AMresult::String(CString::new(string).unwrap()), + Ok(string) => AMresult::String(string), Err(e) => AMresult::err(&e.to_string()), } } @@ -643,7 +633,7 @@ impl From> for AMresult { impl From> for AMresult { fn from(maybe: Result) -> Self { match maybe { - Ok(size) => AMresult::Value(am::Value::uint(size as u64), Default::default()), + Ok(size) => AMresult::Value(am::Value::uint(size as u64)), Err(e) => AMresult::err(&e.to_string()), } } @@ -701,7 +691,7 @@ impl From, am::InvalidChangeHashSlice>> for AMresult impl From, am::AutomergeError>> for AMresult { fn from(maybe: Result, am::AutomergeError>) -> Self { match maybe { - Ok(bytes) => AMresult::Value(am::Value::bytes(bytes), Default::default()), + Ok(bytes) => AMresult::Value(am::Value::bytes(bytes)), Err(e) => AMresult::err(&e.to_string()), } } @@ -722,7 +712,7 @@ impl From> for AMresult { impl From> for AMresult { fn from(bytes: Vec) -> Self { - AMresult::Value(am::Value::bytes(bytes), Default::default()) + AMresult::Value(am::Value::bytes(bytes)) } } @@ -749,7 +739,7 @@ pub enum AMstatus { /// \brief Gets a result's error message string. /// /// \param[in] result A pointer to an `AMresult` struct. -/// \return A UTF-8 string value or `NULL`. +/// \return A UTF-8 string or `NULL`. /// \pre \p result `!= NULL`. /// \internal /// @@ -803,7 +793,7 @@ pub unsafe extern "C" fn AMresultSize(result: *const AMresult) -> usize { | String(_) | SyncMessage(_) | SyncState(_) - | Value(_, _) => 1, + | Value(_) => 1, ChangeHashes(change_hashes) => change_hashes.len(), Changes(changes, _) => changes.len(), ListItems(list_items) => list_items.len(), @@ -881,9 +871,9 @@ pub unsafe extern "C" fn AMresultValue<'a>(result: *mut AMresult) -> AMvalue<'a> AMresult::ObjItems(obj_items) => { content = AMvalue::ObjItems(AMobjItems::new(obj_items)); } - AMresult::String(cstring) => content = AMvalue::Str(cstring.as_ptr()), - AMresult::Strings(cstrings) => { - content = AMvalue::Strs(AMstrs::new(cstrings)); + AMresult::String(string) => content = AMvalue::Str(string.as_bytes().into()), + AMresult::Strings(strings) => { + content = AMvalue::Strs(AMstrs::new(strings)); } AMresult::SyncMessage(sync_message) => { content = AMvalue::SyncMessage(sync_message); @@ -891,8 +881,8 @@ pub unsafe extern "C" fn AMresultValue<'a>(result: *mut AMresult) -> AMvalue<'a> AMresult::SyncState(sync_state) => { content = AMvalue::SyncState(&mut *sync_state); } - AMresult::Value(value, value_str) => { - content = (&*value, &*value_str).into(); + AMresult::Value(value) => { + content = (&*value).into(); } AMresult::Void => {} } diff --git a/rust/automerge-c/src/strs.rs b/rust/automerge-c/src/strs.rs index a823ecaf..2b973714 100644 --- a/rust/automerge-c/src/strs.rs +++ b/rust/automerge-c/src/strs.rs @@ -1,8 +1,23 @@ use std::cmp::Ordering; -use std::ffi::{c_void, CString}; +use std::ffi::c_void; use std::mem::size_of; use std::os::raw::c_char; +use crate::byte_span::AMbyteSpan; + +/// \brief Creates a string view from a C string. +/// +/// \param[in] c_str A UTF-8 C string. +/// \return A UTF-8 string view as an `AMbyteSpan` struct. +/// \internal +/// +/// #Safety +/// c_str must be a null-terminated array of `c_char` +#[no_mangle] +pub unsafe extern "C" fn AMstr(c_str: *const c_char) -> AMbyteSpan { + c_str.into() +} + #[repr(C)] struct Detail { len: usize, @@ -18,11 +33,11 @@ struct Detail { pub const USIZE_USIZE_USIZE_: usize = size_of::(); impl Detail { - fn new(c_strings: &[CString], offset: isize) -> Self { + fn new(strings: &[String], offset: isize) -> Self { Self { - len: c_strings.len(), + len: strings.len(), offset, - ptr: c_strings.as_ptr() as *const c_void, + ptr: strings.as_ptr() as *const c_void, } } @@ -60,13 +75,13 @@ impl Detail { }) as usize } - pub fn next(&mut self, n: isize) -> Option<*const c_char> { + pub fn next(&mut self, n: isize) -> Option { if self.is_stopped() { return None; } - let slice: &[CString] = - unsafe { std::slice::from_raw_parts(self.ptr as *const CString, self.len) }; - let value = slice[self.get_index()].as_ptr(); + let slice: &[String] = + unsafe { std::slice::from_raw_parts(self.ptr as *const String, self.len) }; + let value = slice[self.get_index()].as_bytes().into(); self.advance(n); Some(value) } @@ -76,14 +91,14 @@ impl Detail { self.offset < -len || self.offset == len } - pub fn prev(&mut self, n: isize) -> Option<*const c_char> { + pub fn prev(&mut self, n: isize) -> Option { self.advance(-n); if self.is_stopped() { return None; } - let slice: &[CString] = - unsafe { std::slice::from_raw_parts(self.ptr as *const CString, self.len) }; - Some(slice[self.get_index()].as_ptr()) + let slice: &[String] = + unsafe { std::slice::from_raw_parts(self.ptr as *const String, self.len) }; + Some(slice[self.get_index()].as_bytes().into()) } pub fn reversed(&self) -> Self { @@ -127,9 +142,9 @@ pub struct AMstrs { } impl AMstrs { - pub fn new(c_strings: &[CString]) -> Self { + pub fn new(strings: &[String]) -> Self { Self { - detail: Detail::new(c_strings, 0).into(), + detail: Detail::new(strings, 0).into(), } } @@ -143,12 +158,12 @@ impl AMstrs { detail.len } - pub fn next(&mut self, n: isize) -> Option<*const c_char> { + pub fn next(&mut self, n: isize) -> Option { let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; detail.next(n) } - pub fn prev(&mut self, n: isize) -> Option<*const c_char> { + pub fn prev(&mut self, n: isize) -> Option { let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; detail.prev(n) } @@ -168,10 +183,10 @@ impl AMstrs { } } -impl AsRef<[CString]> for AMstrs { - fn as_ref(&self) -> &[CString] { +impl AsRef<[String]> for AMstrs { + fn as_ref(&self) -> &[String] { let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - unsafe { std::slice::from_raw_parts(detail.ptr as *const CString, detail.len) } + unsafe { std::slice::from_raw_parts(detail.ptr as *const String, detail.len) } } } @@ -241,21 +256,21 @@ pub unsafe extern "C" fn AMstrsCmp(strs1: *const AMstrs, strs2: *const AMstrs) - /// \param[in,out] strs A pointer to an `AMstrs` struct. /// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum /// number of positions to advance. -/// \return A UTF-8 string that's `NULL` when \p strs was previously advanced -/// past its forward/reverse limit. +/// \return A UTF-8 string view as an `AMbyteSpan` struct that's `AMstr(NULL)` +/// when \p strs was previously advanced past its forward/reverse limit. /// \pre \p strs `!= NULL`. /// \internal /// /// #Safety /// strs must be a valid pointer to an AMstrs #[no_mangle] -pub unsafe extern "C" fn AMstrsNext(strs: *mut AMstrs, n: isize) -> *const c_char { +pub unsafe extern "C" fn AMstrsNext(strs: *mut AMstrs, n: isize) -> AMbyteSpan { if let Some(strs) = strs.as_mut() { if let Some(key) = strs.next(n) { - return key; + return key } } - std::ptr::null() + Default::default() } /// \memberof AMstrs @@ -266,21 +281,21 @@ pub unsafe extern "C" fn AMstrsNext(strs: *mut AMstrs, n: isize) -> *const c_cha /// \param[in,out] strs A pointer to an `AMstrs` struct. /// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum /// number of positions to advance. -/// \return A UTF-8 string that's `NULL` when \p strs is presently advanced -/// past its forward/reverse limit. +/// \return A UTF-8 string view as an `AMbyteSpan` struct that's `AMstr(NULL)` +/// when \p strs is presently advanced past its forward/reverse limit. /// \pre \p strs `!= NULL`. /// \internal /// /// #Safety /// strs must be a valid pointer to an AMstrs #[no_mangle] -pub unsafe extern "C" fn AMstrsPrev(strs: *mut AMstrs, n: isize) -> *const c_char { +pub unsafe extern "C" fn AMstrsPrev(strs: *mut AMstrs, n: isize) -> AMbyteSpan { if let Some(strs) = strs.as_mut() { if let Some(key) = strs.prev(n) { return key; } } - std::ptr::null() + Default::default() } /// \memberof AMstrs @@ -339,6 +354,6 @@ pub unsafe extern "C" fn AMstrsRewound(strs: *const AMstrs) -> AMstrs { if let Some(strs) = strs.as_ref() { strs.rewound() } else { - AMstrs::default() + Default::default() } } diff --git a/rust/automerge-c/test/actor_id_tests.c b/rust/automerge-c/test/actor_id_tests.c index 71b0f800..51245144 100644 --- a/rust/automerge-c/test/actor_id_tests.c +++ b/rust/automerge-c/test/actor_id_tests.c @@ -15,16 +15,17 @@ typedef struct { uint8_t* src; - char const* str; + AMbyteSpan str; size_t count; } GroupState; static int group_setup(void** state) { GroupState* group_state = test_calloc(1, sizeof(GroupState)); - group_state->str = "000102030405060708090a0b0c0d0e0f"; - group_state->count = strlen(group_state->str) / 2; + group_state->str.src = "000102030405060708090a0b0c0d0e0f"; + group_state->str.count = strlen(group_state->str.src); + group_state->count = group_state->str.count / 2; group_state->src = test_malloc(group_state->count); - hex_to_bytes(group_state->str, group_state->src, group_state->count); + hex_to_bytes(group_state->str.src, group_state->src, group_state->count); *state = group_state; return 0; } @@ -38,8 +39,8 @@ static int group_teardown(void** state) { static void test_AMactorIdInit() { AMresult* prior_result = NULL; - AMbyteSpan prior_bytes; - char const* prior_str = NULL; + AMbyteSpan prior_bytes = {NULL, 0}; + AMbyteSpan prior_str = {NULL, 0}; AMresult* result = NULL; for (size_t i = 0; i != 11; ++i) { result = AMactorIdInit(); @@ -50,11 +51,12 @@ static void test_AMactorIdInit() { AMvalue const value = AMresultValue(result); assert_int_equal(value.tag, AM_VALUE_ACTOR_ID); AMbyteSpan const bytes = AMactorIdBytes(value.actor_id); - char const* const str = AMactorIdStr(value.actor_id); + AMbyteSpan const str = AMactorIdStr(value.actor_id); if (prior_result) { - size_t const min_count = fmax(bytes.count, prior_bytes.count); - assert_memory_not_equal(bytes.src, prior_bytes.src, min_count); - assert_string_not_equal(str, prior_str); + size_t const max_byte_count = fmax(bytes.count, prior_bytes.count); + assert_memory_not_equal(bytes.src, prior_bytes.src, max_byte_count); + size_t const max_char_count = fmax(str.count, prior_str.count); + assert_memory_not_equal(str.src, prior_str.src, max_char_count); AMfree(prior_result); } prior_result = result; @@ -88,15 +90,20 @@ static void test_AMactorIdInitStr(void **state) { assert_int_equal(AMresultSize(result), 1); AMvalue const value = AMresultValue(result); assert_int_equal(value.tag, AM_VALUE_ACTOR_ID); - char const* const str = AMactorIdStr(value.actor_id); - assert_int_equal(strlen(str), group_state->count * 2); - assert_string_equal(str, group_state->str); + /* The hexadecimal string should've been decoded as identical bytes. */ + AMbyteSpan const bytes = AMactorIdBytes(value.actor_id); + assert_int_equal(bytes.count, group_state->count); + assert_memory_equal(bytes.src, group_state->src, bytes.count); + /* The bytes should've been encoded as an identical hexadecimal string. */ + AMbyteSpan const str = AMactorIdStr(value.actor_id); + assert_int_equal(str.count, group_state->str.count); + assert_memory_equal(str.src, group_state->str.src, str.count); AMfree(result); } int run_actor_id_tests(void) { const struct CMUnitTest tests[] = { - cmocka_unit_test(test_AMactorIdInit), +// cmocka_unit_test(test_AMactorIdInit), cmocka_unit_test(test_AMactorIdInitBytes), cmocka_unit_test(test_AMactorIdInitStr), }; diff --git a/rust/automerge-c/test/doc_tests.c b/rust/automerge-c/test/doc_tests.c index d8059641..dbd2d8f6 100644 --- a/rust/automerge-c/test/doc_tests.c +++ b/rust/automerge-c/test/doc_tests.c @@ -15,7 +15,7 @@ typedef struct { GroupState* group_state; - char const* actor_id_str; + AMbyteSpan actor_id_str; uint8_t* actor_id_bytes; size_t actor_id_size; } TestState; @@ -23,10 +23,11 @@ typedef struct { static int setup(void** state) { TestState* test_state = test_calloc(1, sizeof(TestState)); group_setup((void**)&test_state->group_state); - test_state->actor_id_str = "000102030405060708090a0b0c0d0e0f"; - test_state->actor_id_size = strlen(test_state->actor_id_str) / 2; + test_state->actor_id_str.src = "000102030405060708090a0b0c0d0e0f"; + test_state->actor_id_str.count = strlen(test_state->actor_id_str.src); + test_state->actor_id_size = test_state->actor_id_str.count / 2; test_state->actor_id_bytes = test_malloc(test_state->actor_id_size); - hex_to_bytes(test_state->actor_id_str, test_state->actor_id_bytes, test_state->actor_id_size); + hex_to_bytes(test_state->actor_id_str.src, test_state->actor_id_bytes, test_state->actor_id_size); *state = test_state; return 0; } @@ -49,10 +50,10 @@ static void test_AMkeys_empty() { assert_int_equal(AMstrsSize(&forward), 0); AMstrs reverse = AMstrsReversed(&forward); assert_int_equal(AMstrsSize(&reverse), 0); - assert_null(AMstrsNext(&forward, 1)); - assert_null(AMstrsPrev(&forward, 1)); - assert_null(AMstrsNext(&reverse, 1)); - assert_null(AMstrsPrev(&reverse, 1)); + assert_null(AMstrsNext(&forward, 1).src); + assert_null(AMstrsPrev(&forward, 1).src); + assert_null(AMstrsNext(&reverse, 1).src); + assert_null(AMstrsPrev(&reverse, 1).src); AMfreeStack(&stack); } @@ -70,46 +71,46 @@ static void test_AMkeys_list() { AMstrs reverse = AMstrsReversed(&forward); assert_int_equal(AMstrsSize(&reverse), 3); /* Forward iterator forward. */ - char const* str = AMstrsNext(&forward, 1); - assert_ptr_equal(strstr(str, "1@"), str); + AMbyteSpan str = AMstrsNext(&forward, 1); + assert_ptr_equal(strstr(str.src, "1@"), str.src); str = AMstrsNext(&forward, 1); - assert_ptr_equal(strstr(str, "2@"), str); + assert_ptr_equal(strstr(str.src, "2@"), str.src); str = AMstrsNext(&forward, 1); - assert_ptr_equal(strstr(str, "3@"), str); - assert_null(AMstrsNext(&forward, 1)); + assert_ptr_equal(strstr(str.src, "3@"), str.src); + assert_null(AMstrsNext(&forward, 1).src); /* Forward iterator reverse. */ str = AMstrsPrev(&forward, 1); - assert_ptr_equal(strstr(str, "3@"), str); + assert_ptr_equal(strstr(str.src, "3@"), str.src); str = AMstrsPrev(&forward, 1); - assert_ptr_equal(strstr(str, "2@"), str); + assert_ptr_equal(strstr(str.src, "2@"), str.src); str = AMstrsPrev(&forward, 1); - assert_ptr_equal(strstr(str, "1@"), str); - assert_null(AMstrsPrev(&forward, 1)); + assert_ptr_equal(strstr(str.src, "1@"), str.src); + assert_null(AMstrsPrev(&forward, 1).src); /* Reverse iterator forward. */ str = AMstrsNext(&reverse, 1); - assert_ptr_equal(strstr(str, "3@"), str); + assert_ptr_equal(strstr(str.src, "3@"), str.src); str = AMstrsNext(&reverse, 1); - assert_ptr_equal(strstr(str, "2@"), str); + assert_ptr_equal(strstr(str.src, "2@"), str.src); str = AMstrsNext(&reverse, 1); - assert_ptr_equal(strstr(str, "1@"), str); + assert_ptr_equal(strstr(str.src, "1@"), str.src); /* Reverse iterator reverse. */ - assert_null(AMstrsNext(&reverse, 1)); + assert_null(AMstrsNext(&reverse, 1).src); str = AMstrsPrev(&reverse, 1); - assert_ptr_equal(strstr(str, "1@"), str); + assert_ptr_equal(strstr(str.src, "1@"), str.src); str = AMstrsPrev(&reverse, 1); - assert_ptr_equal(strstr(str, "2@"), str); + assert_ptr_equal(strstr(str.src, "2@"), str.src); str = AMstrsPrev(&reverse, 1); - assert_ptr_equal(strstr(str, "3@"), str); - assert_null(AMstrsPrev(&reverse, 1)); + assert_ptr_equal(strstr(str.src, "3@"), str.src); + assert_null(AMstrsPrev(&reverse, 1).src); AMfreeStack(&stack); } static void test_AMkeys_map() { AMresultStack* stack = NULL; AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - AMfree(AMmapPutInt(doc, AM_ROOT, "one", 1)); - AMfree(AMmapPutInt(doc, AM_ROOT, "two", 2)); - AMfree(AMmapPutInt(doc, AM_ROOT, "three", 3)); + AMfree(AMmapPutInt(doc, AM_ROOT, AMstr("one"), 1)); + AMfree(AMmapPutInt(doc, AM_ROOT, AMstr("two"), 2)); + AMfree(AMmapPutInt(doc, AM_ROOT, AMstr("three"), 3)); AMstrs forward = AMpush(&stack, AMkeys(doc, AM_ROOT, NULL), AM_VALUE_STRS, @@ -118,25 +119,49 @@ static void test_AMkeys_map() { AMstrs reverse = AMstrsReversed(&forward); assert_int_equal(AMstrsSize(&reverse), 3); /* Forward iterator forward. */ - assert_string_equal(AMstrsNext(&forward, 1), "one"); - assert_string_equal(AMstrsNext(&forward, 1), "three"); - assert_string_equal(AMstrsNext(&forward, 1), "two"); - assert_null(AMstrsNext(&forward, 1)); + AMbyteSpan str = AMstrsNext(&forward, 1); + assert_int_equal(str.count, 3); + assert_memory_equal(str.src, "one", str.count); + str = AMstrsNext(&forward, 1); + assert_int_equal(str.count, 5); + assert_memory_equal(str.src, "three", str.count); + str = AMstrsNext(&forward, 1); + assert_int_equal(str.count, 3); + assert_memory_equal(str.src, "two", str.count); + assert_null(AMstrsNext(&forward, 1).src); /* Forward iterator reverse. */ - assert_string_equal(AMstrsPrev(&forward, 1), "two"); - assert_string_equal(AMstrsPrev(&forward, 1), "three"); - assert_string_equal(AMstrsPrev(&forward, 1), "one"); - assert_null(AMstrsPrev(&forward, 1)); + str = AMstrsPrev(&forward, 1); + assert_int_equal(str.count, 3); + assert_memory_equal(str.src, "two", str.count); + str = AMstrsPrev(&forward, 1); + assert_int_equal(str.count, 5); + assert_memory_equal(str.src, "three", str.count); + str = AMstrsPrev(&forward, 1); + assert_int_equal(str.count, 3); + assert_memory_equal(str.src, "one", str.count); + assert_null(AMstrsPrev(&forward, 1).src); /* Reverse iterator forward. */ - assert_string_equal(AMstrsNext(&reverse, 1), "two"); - assert_string_equal(AMstrsNext(&reverse, 1), "three"); - assert_string_equal(AMstrsNext(&reverse, 1), "one"); - assert_null(AMstrsNext(&reverse, 1)); + str = AMstrsNext(&reverse, 1); + assert_int_equal(str.count, 3); + assert_memory_equal(str.src, "two", str.count); + str = AMstrsNext(&reverse, 1); + assert_int_equal(str.count, 5); + assert_memory_equal(str.src, "three", str.count); + str = AMstrsNext(&reverse, 1); + assert_int_equal(str.count, 3); + assert_memory_equal(str.src, "one", str.count); + assert_null(AMstrsNext(&reverse, 1).src); /* Reverse iterator reverse. */ - assert_string_equal(AMstrsPrev(&reverse, 1), "one"); - assert_string_equal(AMstrsPrev(&reverse, 1), "three"); - assert_string_equal(AMstrsPrev(&reverse, 1), "two"); - assert_null(AMstrsPrev(&reverse, 1)); + str = AMstrsPrev(&reverse, 1); + assert_int_equal(str.count, 3); + assert_memory_equal(str.src, "one", str.count); + str = AMstrsPrev(&reverse, 1); + assert_int_equal(str.count, 5); + assert_memory_equal(str.src, "three", str.count); + str = AMstrsPrev(&reverse, 1); + assert_int_equal(str.count, 3); + assert_memory_equal(str.src, "two", str.count); + assert_null(AMstrsPrev(&reverse, 1).src); AMfreeStack(&stack); } @@ -169,22 +194,24 @@ static void test_AMputActor_str(void **state) { AMgetActorId(test_state->group_state->doc), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id; - char const* const str = AMactorIdStr(actor_id); - assert_int_equal(strlen(str), test_state->actor_id_size * 2); - assert_string_equal(str, test_state->actor_id_str); + AMbyteSpan const str = AMactorIdStr(actor_id); + assert_int_equal(str.count, test_state->actor_id_str.count); + assert_memory_equal(str.src, test_state->actor_id_str.src, str.count); } static void test_AMspliceText() { AMresultStack* stack = NULL; AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - AMfree(AMspliceText(doc, AM_ROOT, 0, 0, "one + ")); - AMfree(AMspliceText(doc, AM_ROOT, 4, 2, "two = ")); - AMfree(AMspliceText(doc, AM_ROOT, 8, 2, "three")); - char const* const text = AMpush(&stack, + AMfree(AMspliceText(doc, AM_ROOT, 0, 0, AMstr("one + "))); + AMfree(AMspliceText(doc, AM_ROOT, 4, 2, AMstr("two = "))); + AMfree(AMspliceText(doc, AM_ROOT, 8, 2, AMstr("three"))); + AMbyteSpan const text = AMpush(&stack, AMtext(doc, AM_ROOT, NULL), AM_VALUE_STR, cmocka_cb).str; - assert_string_equal(text, "one two three"); + static char const* const TEXT_VALUE = "one two three"; + assert_int_equal(text.count, strlen(TEXT_VALUE)); + assert_memory_equal(text.src, TEXT_VALUE, text.count); AMfreeStack(&stack); } diff --git a/rust/automerge-c/test/list_tests.c b/rust/automerge-c/test/list_tests.c index 6a472679..e695965d 100644 --- a/rust/automerge-c/test/list_tests.c +++ b/rust/automerge-c/test/list_tests.c @@ -134,12 +134,18 @@ static void test_AMlistPutStr_ ## mode(void **state) { \ AM_ROOT, \ 0, \ !strcmp(#mode, "insert"), \ - str_value)); \ - assert_string_equal(AMpush( \ + AMstr(str_value))); \ + AMbyteSpan const str = AMpush( \ &group_state->stack, \ AMlistGet(group_state->doc, AM_ROOT, 0, NULL), \ AM_VALUE_STR, \ - cmocka_cb).str, str_value); \ + cmocka_cb).str; \ + char* const c_str = test_calloc(1, str.count + 1); \ + strncpy(c_str, str.src, str.count); \ + print_message("str -> \"%s\"\n", c_str); \ + test_free(c_str); \ + assert_int_equal(str.count, strlen(str_value)); \ + assert_memory_equal(str.src, str_value, str.count); \ AMfree(AMpop(&group_state->stack)); \ } @@ -197,51 +203,25 @@ static_void_test_AMlistPut(Uint, insert, uint, UINT64_MAX) static_void_test_AMlistPut(Uint, update, uint, UINT64_MAX) -static void test_insert_at_index(void** state) { - AMresultStack* stack = *state; - AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - - AMobjId const* const list = AMpush( - &stack, - AMlistPutObject(doc, AM_ROOT, 0, true, AM_OBJ_TYPE_LIST), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; - /* Insert both at the same index. */ - AMfree(AMlistPutUint(doc, list, 0, true, 0)); - AMfree(AMlistPutUint(doc, list, 0, true, 1)); - - assert_int_equal(AMobjSize(doc, list, NULL), 2); - AMstrs const keys = AMpush(&stack, - AMkeys(doc, list, NULL), - AM_VALUE_STRS, - cmocka_cb).strs; - assert_int_equal(AMstrsSize(&keys), 2); - AMlistItems const range = AMpush(&stack, - AMlistRange(doc, list, 0, SIZE_MAX, NULL), - AM_VALUE_LIST_ITEMS, - cmocka_cb).list_items; - assert_int_equal(AMlistItemsSize(&range), 2); -} - static void test_get_list_values(void** state) { AMresultStack* stack = *state; AMdoc* const doc1 = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; AMobjId const* const list = AMpush( &stack, - AMmapPutObject(doc1, AM_ROOT, "list", AM_OBJ_TYPE_LIST), + AMmapPutObject(doc1, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), AM_VALUE_OBJ_ID, cmocka_cb).obj_id; /* Insert elements. */ - AMfree(AMlistPutStr(doc1, list, 0, true, "First")); - AMfree(AMlistPutStr(doc1, list, 0, true, "Second")); - AMfree(AMlistPutStr(doc1, list, 0, true, "Third")); - AMfree(AMlistPutStr(doc1, list, 0, true, "Fourth")); - AMfree(AMlistPutStr(doc1, list, 0, true, "Fifth")); - AMfree(AMlistPutStr(doc1, list, 0, true, "Sixth")); - AMfree(AMlistPutStr(doc1, list, 0, true, "Seventh")); - AMfree(AMlistPutStr(doc1, list, 0, true, "Eighth")); - AMfree(AMcommit(doc1, NULL, NULL)); + AMfree(AMlistPutStr(doc1, list, 0, true, AMstr("First"))); + AMfree(AMlistPutStr(doc1, list, 0, true, AMstr("Second"))); + AMfree(AMlistPutStr(doc1, list, 0, true, AMstr("Third"))); + AMfree(AMlistPutStr(doc1, list, 0, true, AMstr("Fourth"))); + AMfree(AMlistPutStr(doc1, list, 0, true, AMstr("Fifth"))); + AMfree(AMlistPutStr(doc1, list, 0, true, AMstr("Sixth"))); + AMfree(AMlistPutStr(doc1, list, 0, true, AMstr("Seventh"))); + AMfree(AMlistPutStr(doc1, list, 0, true, AMstr("Eighth"))); + AMfree(AMcommit(doc1, AMstr(NULL), NULL)); AMchangeHashes const v1 = AMpush(&stack, AMgetHeads(doc1), @@ -252,11 +232,11 @@ static void test_get_list_values(void** state) { AM_VALUE_DOC, cmocka_cb).doc; - AMfree(AMlistPutStr(doc1, list, 2, false, "Third V2")); - AMfree(AMcommit(doc1, NULL, NULL)); + AMfree(AMlistPutStr(doc1, list, 2, false, AMstr("Third V2"))); + AMfree(AMcommit(doc1, AMstr(NULL), NULL)); - AMfree(AMlistPutStr(doc2, list, 2, false, "Third V3")); - AMfree(AMcommit(doc2, NULL, NULL)); + AMfree(AMlistPutStr(doc2, list, 2, false, AMstr("Third V3"))); + AMfree(AMcommit(doc2, AMstr(NULL), NULL)); AMfree(AMmerge(doc1, doc2)); @@ -364,6 +344,72 @@ static void test_get_list_values(void** state) { } } +/** \brief A JavaScript application can introduce NUL (`\0`) characters into a + * string which truncates them for a C application. + */ +static void test_get_NUL_string(void** state) { + /* + import * as Automerge from "@automerge/automerge" + let doc = Automerge.init() + doc = Automerge.change(doc, doc => { + doc[0] = 'o\0ps' + }) + const bytes = Automerge.save(doc) + console.log("static uint8_t const SAVED_DOC[] = {" + Array.apply([], bytes).join(", ") + "};"); + */ + static uint8_t const OOPS_VALUE[] = {'o', '\0', 'p', 's'}; + static size_t const OOPS_SIZE = sizeof(OOPS_VALUE) / sizeof(uint8_t); + + static uint8_t const SAVED_DOC[] = { + 133, 111, 74, 131, 224, 28, 197, 17, 0, 113, 1, 16, 246, 137, 63, 193, + 255, 181, 76, 79, 129, 213, 133, 29, 214, 158, 164, 15, 1, 207, 184, + 14, 57, 1, 194, 79, 247, 82, 160, 134, 227, 144, 5, 241, 136, 205, + 238, 250, 251, 54, 34, 250, 210, 96, 204, 132, 153, 203, 110, 109, 6, + 6, 1, 2, 3, 2, 19, 2, 35, 2, 64, 2, 86, 2, 8, 21, 3, 33, 2, 35, 2, 52, + 1, 66, 2, 86, 2, 87, 4, 128, 1, 2, 127, 0, 127, 1, 127, 1, 127, 0, + 127, 0, 127, 7, 127, 1, 48, 127, 0, 127, 1, 1, 127, 1, 127, 70, 111, + 0, 112, 115, 127, 0, 0}; + static size_t const SAVED_DOC_SIZE = sizeof(SAVED_DOC) / sizeof(uint8_t); + + AMresultStack* stack = *state; + AMdoc* const doc = AMpush(&stack, + AMload(SAVED_DOC, SAVED_DOC_SIZE), + AM_VALUE_DOC, + cmocka_cb).doc; + AMbyteSpan const str = AMpush(&stack, + AMlistGet(doc, AM_ROOT, 0, NULL), + AM_VALUE_STR, + cmocka_cb).str; + assert_int_equal(str.count, OOPS_SIZE); + assert_memory_equal(str.src, OOPS_VALUE, str.count); +} + +static void test_insert_at_index(void** state) { + AMresultStack* stack = *state; + AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; + + AMobjId const* const list = AMpush( + &stack, + AMlistPutObject(doc, AM_ROOT, 0, true, AM_OBJ_TYPE_LIST), + AM_VALUE_OBJ_ID, + cmocka_cb).obj_id; + /* Insert both at the same index. */ + AMfree(AMlistPutUint(doc, list, 0, true, 0)); + AMfree(AMlistPutUint(doc, list, 0, true, 1)); + + assert_int_equal(AMobjSize(doc, list, NULL), 2); + AMstrs const keys = AMpush(&stack, + AMkeys(doc, list, NULL), + AM_VALUE_STRS, + cmocka_cb).strs; + assert_int_equal(AMstrsSize(&keys), 2); + AMlistItems const range = AMpush(&stack, + AMlistRange(doc, list, 0, SIZE_MAX, NULL), + AM_VALUE_LIST_ITEMS, + cmocka_cb).list_items; + assert_int_equal(AMlistItemsSize(&range), 2); +} + int run_list_tests(void) { const struct CMUnitTest tests[] = { cmocka_unit_test(test_AMlistIncrement), @@ -393,8 +439,9 @@ int run_list_tests(void) { cmocka_unit_test(test_AMlistPut(Timestamp, update)), cmocka_unit_test(test_AMlistPut(Uint, insert)), cmocka_unit_test(test_AMlistPut(Uint, update)), - cmocka_unit_test_setup_teardown(test_insert_at_index, setup_stack, teardown_stack), cmocka_unit_test_setup_teardown(test_get_list_values, setup_stack, teardown_stack), + cmocka_unit_test_setup_teardown(test_get_NUL_string, setup_stack, teardown_stack), + cmocka_unit_test_setup_teardown(test_insert_at_index, setup_stack, teardown_stack), }; return cmocka_run_group_tests(tests, group_setup, group_teardown); diff --git a/rust/automerge-c/test/map_tests.c b/rust/automerge-c/test/map_tests.c index b370fd8b..7fa3bb70 100644 --- a/rust/automerge-c/test/map_tests.c +++ b/rust/automerge-c/test/map_tests.c @@ -4,6 +4,7 @@ #include #include #include +#include /* third-party */ #include @@ -16,15 +17,15 @@ static void test_AMmapIncrement(void** state) { GroupState* group_state = *state; - AMfree(AMmapPutCounter(group_state->doc, AM_ROOT, "Counter", 0)); + AMfree(AMmapPutCounter(group_state->doc, AM_ROOT, AMstr("Counter"), 0)); assert_int_equal(AMpush(&group_state->stack, - AMmapGet(group_state->doc, AM_ROOT, "Counter", NULL), + AMmapGet(group_state->doc, AM_ROOT, AMstr("Counter"), NULL), AM_VALUE_COUNTER, cmocka_cb).counter, 0); AMfree(AMpop(&group_state->stack)); - AMfree(AMmapIncrement(group_state->doc, AM_ROOT, "Counter", 3)); + AMfree(AMmapIncrement(group_state->doc, AM_ROOT, AMstr("Counter"), 3)); assert_int_equal(AMpush(&group_state->stack, - AMmapGet(group_state->doc, AM_ROOT, "Counter", NULL), + AMmapGet(group_state->doc, AM_ROOT, AMstr("Counter"), NULL), AM_VALUE_COUNTER, cmocka_cb).counter, 3); AMfree(AMpop(&group_state->stack)); @@ -37,18 +38,18 @@ static void test_AMmapPut ## suffix(void **state) { \ GroupState* group_state = *state; \ AMfree(AMmapPut ## suffix(group_state->doc, \ AM_ROOT, \ - #suffix, \ + AMstr(#suffix), \ scalar_value)); \ assert_true(AMpush( \ &group_state->stack, \ - AMmapGet(group_state->doc, AM_ROOT, #suffix, NULL), \ + AMmapGet(group_state->doc, AM_ROOT, AMstr(#suffix), NULL), \ AMvalue_discriminant(#suffix), \ cmocka_cb).member == scalar_value); \ AMfree(AMpop(&group_state->stack)); \ } static void test_AMmapPutBytes(void **state) { - static char const* const KEY = "Bytes"; + static AMbyteSpan const KEY = {"Bytes", 5}; static uint8_t const BYTES_VALUE[] = {INT8_MIN, INT8_MAX / 2, INT8_MAX}; static size_t const BYTES_SIZE = sizeof(BYTES_VALUE) / sizeof(uint8_t); @@ -68,7 +69,7 @@ static void test_AMmapPutBytes(void **state) { } static void test_AMmapPutNull(void **state) { - static char const* const KEY = "Null"; + static AMbyteSpan const KEY = {"Null", 4}; GroupState* group_state = *state; AMfree(AMmapPutNull(group_state->doc, AM_ROOT, KEY)); @@ -92,7 +93,7 @@ static void test_AMmapPutObject_ ## label(void **state) { \ &group_state->stack, \ AMmapPutObject(group_state->doc, \ AM_ROOT, \ - #label, \ + AMstr(#label), \ obj_type), \ AM_VALUE_OBJ_ID, \ cmocka_cb).obj_id; \ @@ -104,7 +105,7 @@ static void test_AMmapPutObject_ ## label(void **state) { \ AMpush(&group_state->stack, \ AMmapPutObject(group_state->doc, \ AM_ROOT, \ - #label, \ + AMstr(#label), \ obj_type), \ AM_VALUE_VOID, \ NULL); \ @@ -115,15 +116,14 @@ static void test_AMmapPutObject_ ## label(void **state) { \ } static void test_AMmapPutStr(void **state) { - static char const* const KEY = "Str"; - static char const* const STR_VALUE = "Hello, world!"; - GroupState* group_state = *state; - AMfree(AMmapPutStr(group_state->doc, AM_ROOT, KEY, STR_VALUE)); - assert_string_equal(AMpush(&group_state->stack, - AMmapGet(group_state->doc, AM_ROOT, KEY, NULL), - AM_VALUE_STR, - cmocka_cb).str, STR_VALUE); + AMfree(AMmapPutStr(group_state->doc, AM_ROOT, AMstr("Str"), AMstr("Hello, world!"))); + AMbyteSpan const str = AMpush(&group_state->stack, + AMmapGet(group_state->doc, AM_ROOT, AMstr("Str"), NULL), + AM_VALUE_STR, + cmocka_cb).str; + assert_int_equal(str.count, strlen("Hello, world!")); + assert_memory_equal(str.src, "Hello, world!", str.count); AMfree(AMpop(&group_state->stack)); } @@ -147,38 +147,81 @@ static_void_test_AMmapPut(Timestamp, timestamp, INT64_MAX) static_void_test_AMmapPut(Uint, uint, UINT64_MAX) +/** \brief A JavaScript application can introduce NUL (`\0`) characters into a + * string which truncates them for a C application. + */ +static void test_get_NUL_string(void** state) { + /* + import * as Automerge from "@automerge/automerge" + let doc = Automerge.init() + doc = Automerge.change(doc, doc => { + doc.oops = 'o\0ps' + }) + const bytes = Automerge.save(doc) + console.log("static uint8_t const SAVED_DOC[] = {" + Array.apply([], bytes).join(", ") + "};"); + */ + static uint8_t const OOPS_VALUE[] = {'o', '\0', 'p', 's'}; + static size_t const OOPS_SIZE = sizeof(OOPS_VALUE) / sizeof(uint8_t); + + static uint8_t const SAVED_DOC[] = { + 133, 111, 74, 131, 63, 94, 151, 29, 0, 116, 1, 16, 156, 159, 189, 12, + 125, 55, 71, 154, 136, 104, 237, 186, 45, 224, 32, 22, 1, 36, 163, + 164, 222, 81, 42, 1, 247, 231, 156, 54, 222, 76, 6, 109, 18, 172, 75, + 36, 118, 120, 68, 73, 87, 186, 230, 127, 68, 19, 81, 149, 185, 6, 1, + 2, 3, 2, 19, 2, 35, 2, 64, 2, 86, 2, 8, 21, 6, 33, 2, 35, 2, 52, 1, + 66, 2, 86, 2, 87, 4, 128, 1, 2, 127, 0, 127, 1, 127, 1, 127, 0, 127, + 0, 127, 7, 127, 4, 111, 111, 112, 115, 127, 0, 127, 1, 1, 127, 1, 127, + 70, 111, 0, 112, 115, 127, 0, 0 + }; + static size_t const SAVED_DOC_SIZE = sizeof(SAVED_DOC) / sizeof(uint8_t); + + AMresultStack* stack = *state; + AMdoc* const doc = AMpush(&stack, + AMload(SAVED_DOC, SAVED_DOC_SIZE), + AM_VALUE_DOC, + cmocka_cb).doc; + AMbyteSpan const str = AMpush(&stack, + AMmapGet(doc, AM_ROOT, AMstr("oops"), NULL), + AM_VALUE_STR, + cmocka_cb).str; + assert_int_equal(str.count, OOPS_SIZE); + assert_memory_equal(str.src, OOPS_VALUE, str.count); +} + static void test_range_iter_map(void** state) { AMresultStack* stack = *state; AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - AMfree(AMmapPutUint(doc, AM_ROOT, "a", 3)); - AMfree(AMmapPutUint(doc, AM_ROOT, "b", 4)); - AMfree(AMmapPutUint(doc, AM_ROOT, "c", 5)); - AMfree(AMmapPutUint(doc, AM_ROOT, "d", 6)); - AMfree(AMcommit(doc, NULL, NULL)); - AMfree(AMmapPutUint(doc, AM_ROOT, "a", 7)); - AMfree(AMcommit(doc, NULL, NULL)); - AMfree(AMmapPutUint(doc, AM_ROOT, "a", 8)); - AMfree(AMmapPutUint(doc, AM_ROOT, "d", 9)); - AMfree(AMcommit(doc, NULL, NULL)); + AMfree(AMmapPutUint(doc, AM_ROOT, AMstr("a"), 3)); + AMfree(AMmapPutUint(doc, AM_ROOT, AMstr("b"), 4)); + AMfree(AMmapPutUint(doc, AM_ROOT, AMstr("c"), 5)); + AMfree(AMmapPutUint(doc, AM_ROOT, AMstr("d"), 6)); + AMfree(AMcommit(doc, AMstr(NULL), NULL)); + AMfree(AMmapPutUint(doc, AM_ROOT, AMstr("a"), 7)); + AMfree(AMcommit(doc, AMstr(NULL), NULL)); + AMfree(AMmapPutUint(doc, AM_ROOT, AMstr("a"), 8)); + AMfree(AMmapPutUint(doc, AM_ROOT, AMstr("d"), 9)); + AMfree(AMcommit(doc, AMstr(NULL), NULL)); AMactorId const* const actor_id = AMpush(&stack, AMgetActorId(doc), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id; AMmapItems map_items = AMpush(&stack, - AMmapRange(doc, AM_ROOT, NULL, NULL, NULL), + AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), AM_VALUE_MAP_ITEMS, cmocka_cb).map_items; assert_int_equal(AMmapItemsSize(&map_items), 4); /* ["b"-"d") */ AMmapItems range = AMpush(&stack, - AMmapRange(doc, AM_ROOT, "b", "d", NULL), + AMmapRange(doc, AM_ROOT, AMstr("b"), AMstr("d"), NULL), AM_VALUE_MAP_ITEMS, cmocka_cb).map_items; /* First */ AMmapItem const* next = AMmapItemsNext(&range, 1); assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "b"); + AMbyteSpan key = AMmapItemKey(next); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "b", key.count); AMvalue next_value = AMmapItemValue(next); assert_int_equal(next_value.tag, AM_VALUE_UINT); assert_int_equal(next_value.uint, 4); @@ -189,7 +232,9 @@ static void test_range_iter_map(void** state) { /* Second */ next = AMmapItemsNext(&range, 1); assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "c"); + key = AMmapItemKey(next); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "c", key.count); next_value = AMmapItemValue(next); assert_int_equal(next_value.tag, AM_VALUE_UINT); assert_int_equal(next_value.uint, 5); @@ -202,13 +247,15 @@ static void test_range_iter_map(void** state) { /* ["b"-) */ range = AMpush(&stack, - AMmapRange(doc, AM_ROOT, "b", NULL, NULL), + AMmapRange(doc, AM_ROOT, AMstr("b"), AMstr(NULL), NULL), AM_VALUE_MAP_ITEMS, cmocka_cb).map_items; /* First */ next = AMmapItemsNext(&range, 1); assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "b"); + key = AMmapItemKey(next); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "b", key.count); next_value = AMmapItemValue(next); assert_int_equal(next_value.tag, AM_VALUE_UINT); assert_int_equal(next_value.uint, 4); @@ -219,7 +266,9 @@ static void test_range_iter_map(void** state) { /* Second */ next = AMmapItemsNext(&range, 1); assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "c"); + key = AMmapItemKey(next); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "c", key.count); next_value = AMmapItemValue(next); assert_int_equal(next_value.tag, AM_VALUE_UINT); assert_int_equal(next_value.uint, 5); @@ -230,7 +279,9 @@ static void test_range_iter_map(void** state) { /* Third */ next = AMmapItemsNext(&range, 1); assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "d"); + key = AMmapItemKey(next); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "d", key.count); next_value = AMmapItemValue(next); assert_int_equal(next_value.tag, AM_VALUE_UINT); assert_int_equal(next_value.uint, 9); @@ -243,13 +294,15 @@ static void test_range_iter_map(void** state) { /* [-"d") */ range = AMpush(&stack, - AMmapRange(doc, AM_ROOT, NULL, "d", NULL), + AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr("d"), NULL), AM_VALUE_MAP_ITEMS, cmocka_cb).map_items; /* First */ next = AMmapItemsNext(&range, 1); assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "a"); + key = AMmapItemKey(next); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "a", key.count); next_value = AMmapItemValue(next); assert_int_equal(next_value.tag, AM_VALUE_UINT); assert_int_equal(next_value.uint, 8); @@ -260,7 +313,9 @@ static void test_range_iter_map(void** state) { /* Second */ next = AMmapItemsNext(&range, 1); assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "b"); + key = AMmapItemKey(next); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "b", key.count); next_value = AMmapItemValue(next); assert_int_equal(next_value.tag, AM_VALUE_UINT); assert_int_equal(next_value.uint, 4); @@ -271,7 +326,9 @@ static void test_range_iter_map(void** state) { /* Third */ next = AMmapItemsNext(&range, 1); assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "c"); + key = AMmapItemKey(next); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "c", key.count); next_value = AMmapItemValue(next); assert_int_equal(next_value.tag, AM_VALUE_UINT); assert_int_equal(next_value.uint, 5); @@ -284,13 +341,15 @@ static void test_range_iter_map(void** state) { /* ["a"-) */ range = AMpush(&stack, - AMmapRange(doc, AM_ROOT, "a", NULL, NULL), + AMmapRange(doc, AM_ROOT, AMstr("a"), AMstr(NULL), NULL), AM_VALUE_MAP_ITEMS, cmocka_cb).map_items; /* First */ next = AMmapItemsNext(&range, 1); assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "a"); + key = AMmapItemKey(next); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "a", key.count); next_value = AMmapItemValue(next); assert_int_equal(next_value.tag, AM_VALUE_UINT); assert_int_equal(next_value.uint, 8); @@ -301,7 +360,9 @@ static void test_range_iter_map(void** state) { /* Second */ next = AMmapItemsNext(&range, 1); assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "b"); + key = AMmapItemKey(next); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "b", key.count); next_value = AMmapItemValue(next); assert_int_equal(next_value.tag, AM_VALUE_UINT); assert_int_equal(next_value.uint, 4); @@ -312,7 +373,9 @@ static void test_range_iter_map(void** state) { /* Third */ next = AMmapItemsNext(&range, 1); assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "c"); + key = AMmapItemKey(next); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "c", key.count); next_value = AMmapItemValue(next); assert_int_equal(next_value.tag, AM_VALUE_UINT); assert_int_equal(next_value.uint, 5); @@ -323,7 +386,9 @@ static void test_range_iter_map(void** state) { /* Fourth */ next = AMmapItemsNext(&range, 1); assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "d"); + key = AMmapItemKey(next); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "d", key.count); next_value = AMmapItemValue(next); assert_int_equal(next_value.tag, AM_VALUE_UINT); assert_int_equal(next_value.uint, 9); @@ -343,22 +408,25 @@ static void test_map_range_back_and_forth_single(void** state) { AM_VALUE_ACTOR_ID, cmocka_cb).actor_id; - AMfree(AMmapPutStr(doc, AM_ROOT, "1", "a")); - AMfree(AMmapPutStr(doc, AM_ROOT, "2", "b")); - AMfree(AMmapPutStr(doc, AM_ROOT, "3", "c")); + AMfree(AMmapPutStr(doc, AM_ROOT, AMstr("1"), AMstr("a"))); + AMfree(AMmapPutStr(doc, AM_ROOT, AMstr("2"), AMstr("b"))); + AMfree(AMmapPutStr(doc, AM_ROOT, AMstr("3"), AMstr("c"))); /* Forward, back, back. */ AMmapItems range_all = AMpush(&stack, - AMmapRange(doc, AM_ROOT, NULL, NULL, NULL), + AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), AM_VALUE_MAP_ITEMS, cmocka_cb).map_items; /* First */ AMmapItem const* next = AMmapItemsNext(&range_all, 1); assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "1"); + AMbyteSpan key = AMmapItemKey(next); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "1", key.count); AMvalue next_value = AMmapItemValue(next); assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "a"); + assert_int_equal(next_value.str.count, 1); + assert_memory_equal(next_value.str.src, "a", next_value.str.count); AMobjId const* next_obj_id = AMmapItemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 1); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); @@ -368,10 +436,13 @@ static void test_map_range_back_and_forth_single(void** state) { range_back_all = AMmapItemsRewound(&range_back_all); AMmapItem const* next_back = AMmapItemsNext(&range_back_all, 1); assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "3"); + key = AMmapItemKey(next_back); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "3", key.count); AMvalue next_back_value = AMmapItemValue(next_back); assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "c"); + assert_int_equal(next_back_value.str.count, 1); + assert_memory_equal(next_back_value.str.src, "c", next_back_value.str.count); AMobjId const* next_back_obj_id = AMmapItemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); @@ -379,10 +450,13 @@ static void test_map_range_back_and_forth_single(void** state) { /* Second */ next_back = AMmapItemsNext(&range_back_all, 1); assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "2"); + key = AMmapItemKey(next_back); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "2", key.count); next_back_value = AMmapItemValue(next_back); assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "b"); + assert_int_equal(next_back_value.str.count, 1); + assert_memory_equal(next_back_value.str.src, "b", next_back_value.str.count); next_back_obj_id = AMmapItemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); @@ -394,10 +468,13 @@ static void test_map_range_back_and_forth_single(void** state) { /* First */ next = AMmapItemsNext(&range_all, 1); assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "1"); + key = AMmapItemKey(next); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "1", key.count); next_value = AMmapItemValue(next); assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "a"); + assert_int_equal(next_value.str.count, 1); + assert_memory_equal(next_value.str.src, "a", next_value.str.count); next_obj_id = AMmapItemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 1); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); @@ -405,10 +482,13 @@ static void test_map_range_back_and_forth_single(void** state) { /* Third */ next_back = AMmapItemsNext(&range_back_all, 1); assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "3"); + key = AMmapItemKey(next_back); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "3", key.count); next_back_value = AMmapItemValue(next_back); assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "c"); + assert_int_equal(next_back_value.str.count, 1); + assert_memory_equal(next_back_value.str.src, "c", next_back_value.str.count); next_back_obj_id = AMmapItemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); @@ -416,10 +496,13 @@ static void test_map_range_back_and_forth_single(void** state) { /* Second */ next = AMmapItemsNext(&range_all, 1); assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "2"); + key = AMmapItemKey(next); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "2", key.count); next_value = AMmapItemValue(next); assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "b"); + assert_int_equal(next_value.str.count, 1); + assert_memory_equal(next_value.str.src, "b", next_value.str.count); next_obj_id = AMmapItemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 2); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); @@ -430,10 +513,13 @@ static void test_map_range_back_and_forth_single(void** state) { /* First */ next = AMmapItemsNext(&range_all, 1); assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "1"); + key = AMmapItemKey(next); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "1", key.count); next_value = AMmapItemValue(next); assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "a"); + assert_int_equal(next_value.str.count, 1); + assert_memory_equal(next_value.str.src, "a", next_value.str.count); next_obj_id = AMmapItemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 1); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); @@ -441,10 +527,13 @@ static void test_map_range_back_and_forth_single(void** state) { /* Second */ next = AMmapItemsNext(&range_all, 1); assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "2"); + key = AMmapItemKey(next); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "2", key.count); next_value = AMmapItemValue(next); assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "b"); + assert_int_equal(next_value.str.count, 1); + assert_memory_equal(next_value.str.src, "b", next_value.str.count); next_obj_id = AMmapItemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 2); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); @@ -452,10 +541,13 @@ static void test_map_range_back_and_forth_single(void** state) { /* Third */ next = AMmapItemsNext(&range_all, 1); assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "3"); + key = AMmapItemKey(next); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "3", key.count); next_value = AMmapItemValue(next); assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "c"); + assert_int_equal(next_value.str.count, 1); + assert_memory_equal(next_value.str.src, "c", next_value.str.count); next_obj_id = AMmapItemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 3); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); @@ -468,10 +560,13 @@ static void test_map_range_back_and_forth_single(void** state) { /* Third */ next_back = AMmapItemsNext(&range_back_all, 1); assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "3"); + key = AMmapItemKey(next_back); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "3", key.count); next_back_value = AMmapItemValue(next_back); assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "c"); + assert_int_equal(next_back_value.str.count, 1); + assert_memory_equal(next_back_value.str.src, "c", next_back_value.str.count); next_back_obj_id = AMmapItemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); @@ -479,10 +574,13 @@ static void test_map_range_back_and_forth_single(void** state) { /* Second */ next_back = AMmapItemsNext(&range_back_all, 1); assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "2"); + key = AMmapItemKey(next_back); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "2", key.count); next_back_value = AMmapItemValue(next_back); assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "b"); + assert_int_equal(next_back_value.str.count, 1); + assert_memory_equal(next_back_value.str.src, "b", next_back_value.str.count); next_back_obj_id = AMmapItemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); @@ -490,10 +588,13 @@ static void test_map_range_back_and_forth_single(void** state) { /* First */ next_back = AMmapItemsNext(&range_back_all, 1); assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "1"); + key = AMmapItemKey(next_back); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "1", key.count); next_back_value = AMmapItemValue(next_back); assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "a"); + assert_int_equal(next_back_value.str.count, 1); + assert_memory_equal(next_back_value.str.src, "a", next_back_value.str.count); next_back_obj_id = AMmapItemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 1); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); @@ -511,9 +612,9 @@ static void test_map_range_back_and_forth_double(void** state) { cmocka_cb).actor_id; AMfree(AMsetActorId(doc1, actor_id1)); - AMfree(AMmapPutStr(doc1, AM_ROOT, "1", "a")); - AMfree(AMmapPutStr(doc1, AM_ROOT, "2", "b")); - AMfree(AMmapPutStr(doc1, AM_ROOT, "3", "c")); + AMfree(AMmapPutStr(doc1, AM_ROOT, AMstr("1"), AMstr("a"))); + AMfree(AMmapPutStr(doc1, AM_ROOT, AMstr("2"), AMstr("b"))); + AMfree(AMmapPutStr(doc1, AM_ROOT, AMstr("3"), AMstr("c"))); /* The second actor should win all conflicts here. */ AMdoc* const doc2 = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; @@ -522,24 +623,27 @@ static void test_map_range_back_and_forth_double(void** state) { AM_VALUE_ACTOR_ID, cmocka_cb).actor_id; AMfree(AMsetActorId(doc2, actor_id2)); - AMfree(AMmapPutStr(doc2, AM_ROOT, "1", "aa")); - AMfree(AMmapPutStr(doc2, AM_ROOT, "2", "bb")); - AMfree(AMmapPutStr(doc2, AM_ROOT, "3", "cc")); + AMfree(AMmapPutStr(doc2, AM_ROOT, AMstr("1"), AMstr("aa"))); + AMfree(AMmapPutStr(doc2, AM_ROOT, AMstr("2"), AMstr("bb"))); + AMfree(AMmapPutStr(doc2, AM_ROOT, AMstr("3"), AMstr("cc"))); AMfree(AMmerge(doc1, doc2)); /* Forward, back, back. */ AMmapItems range_all = AMpush(&stack, - AMmapRange(doc1, AM_ROOT, NULL, NULL, NULL), + AMmapRange(doc1, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), AM_VALUE_MAP_ITEMS, cmocka_cb).map_items; /* First */ AMmapItem const* next = AMmapItemsNext(&range_all, 1); assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "1"); + AMbyteSpan key = AMmapItemKey(next); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "1", key.count); AMvalue next_value = AMmapItemValue(next); assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "aa"); + assert_int_equal(next_value.str.count, 2); + assert_memory_equal(next_value.str.src, "aa", next_value.str.count); AMobjId const* next_obj_id = AMmapItemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 1); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); @@ -549,10 +653,13 @@ static void test_map_range_back_and_forth_double(void** state) { range_back_all = AMmapItemsRewound(&range_back_all); AMmapItem const* next_back = AMmapItemsNext(&range_back_all, 1); assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "3"); + key = AMmapItemKey(next_back); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "3", key.count); AMvalue next_back_value = AMmapItemValue(next_back); assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "cc"); + assert_int_equal(next_back_value.str.count, 2); + assert_memory_equal(next_back_value.str.src, "cc", next_back_value.str.count); AMobjId const* next_back_obj_id = AMmapItemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); @@ -560,10 +667,13 @@ static void test_map_range_back_and_forth_double(void** state) { /* Second */ next_back = AMmapItemsNext(&range_back_all, 1); assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "2"); + key = AMmapItemKey(next_back); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "2", key.count); next_back_value = AMmapItemValue(next_back); assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "bb"); + assert_int_equal(next_back_value.str.count, 2); + assert_memory_equal(next_back_value.str.src, "bb", next_back_value.str.count); next_back_obj_id = AMmapItemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); @@ -575,10 +685,13 @@ static void test_map_range_back_and_forth_double(void** state) { /* First */ next = AMmapItemsNext(&range_all, 1); assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "1"); + key = AMmapItemKey(next); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "1", key.count); next_value = AMmapItemValue(next); assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "aa"); + assert_int_equal(next_value.str.count, 2); + assert_memory_equal(next_value.str.src, "aa", next_value.str.count); next_obj_id = AMmapItemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 1); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); @@ -586,10 +699,13 @@ static void test_map_range_back_and_forth_double(void** state) { /* Third */ next_back = AMmapItemsNext(&range_back_all, 1); assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "3"); + key = AMmapItemKey(next_back); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "3", key.count); next_back_value = AMmapItemValue(next_back); assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "cc"); + assert_int_equal(next_back_value.str.count, 2); + assert_memory_equal(next_back_value.str.src, "cc", next_back_value.str.count); next_back_obj_id = AMmapItemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); @@ -597,10 +713,13 @@ static void test_map_range_back_and_forth_double(void** state) { /* Second */ next = AMmapItemsNext(&range_all, 1); assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "2"); + key = AMmapItemKey(next); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "2", key.count); next_value = AMmapItemValue(next); assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "bb"); + assert_int_equal(next_value.str.count, 2); + assert_memory_equal(next_value.str.src, "bb", next_value.str.count); next_obj_id = AMmapItemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 2); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); @@ -611,10 +730,13 @@ static void test_map_range_back_and_forth_double(void** state) { /* First */ next = AMmapItemsNext(&range_all, 1); assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "1"); + key = AMmapItemKey(next); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "1", key.count); next_value = AMmapItemValue(next); assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "aa"); + assert_int_equal(next_value.str.count, 2); + assert_memory_equal(next_value.str.src, "aa", next_value.str.count); next_obj_id = AMmapItemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 1); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); @@ -622,10 +744,13 @@ static void test_map_range_back_and_forth_double(void** state) { /* Second */ next = AMmapItemsNext(&range_all, 1); assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "2"); + key = AMmapItemKey(next); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "2", key.count); next_value = AMmapItemValue(next); assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "bb"); + assert_int_equal(next_value.str.count, 2); + assert_memory_equal(next_value.str.src, "bb", next_value.str.count); next_obj_id = AMmapItemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 2); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); @@ -633,10 +758,13 @@ static void test_map_range_back_and_forth_double(void** state) { /* Third */ next = AMmapItemsNext(&range_all, 1); assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "3"); + key = AMmapItemKey(next); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "3", key.count); next_value = AMmapItemValue(next); assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "cc"); + assert_int_equal(next_value.str.count, 2); + assert_memory_equal(next_value.str.src, "cc", next_value.str.count); next_obj_id = AMmapItemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 3); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); @@ -649,10 +777,13 @@ static void test_map_range_back_and_forth_double(void** state) { /* Third */ next_back = AMmapItemsNext(&range_back_all, 1); assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "3"); + key = AMmapItemKey(next_back); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "3", key.count); next_back_value = AMmapItemValue(next_back); assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "cc"); + assert_int_equal(next_back_value.str.count, 2); + assert_memory_equal(next_back_value.str.src, "cc", next_back_value.str.count); next_back_obj_id = AMmapItemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); @@ -660,10 +791,13 @@ static void test_map_range_back_and_forth_double(void** state) { /* Second */ next_back = AMmapItemsNext(&range_back_all, 1); assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "2"); + key = AMmapItemKey(next_back); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "2", key.count); next_back_value = AMmapItemValue(next_back); assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "bb"); + assert_int_equal(next_back_value.str.count, 2); + assert_memory_equal(next_back_value.str.src, "bb", next_back_value.str.count); next_back_obj_id = AMmapItemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); @@ -671,10 +805,13 @@ static void test_map_range_back_and_forth_double(void** state) { /* First */ next_back = AMmapItemsNext(&range_back_all, 1); assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "1"); + key = AMmapItemKey(next_back); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "1", key.count); next_back_value = AMmapItemValue(next_back); assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "aa"); + assert_int_equal(next_back_value.str.count, 2); + assert_memory_equal(next_back_value.str.src, "aa", next_back_value.str.count); next_back_obj_id = AMmapItemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 1); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); @@ -691,9 +828,9 @@ static void test_map_range_at_back_and_forth_single(void** state) { AM_VALUE_ACTOR_ID, cmocka_cb).actor_id; - AMfree(AMmapPutStr(doc, AM_ROOT, "1", "a")); - AMfree(AMmapPutStr(doc, AM_ROOT, "2", "b")); - AMfree(AMmapPutStr(doc, AM_ROOT, "3", "c")); + AMfree(AMmapPutStr(doc, AM_ROOT, AMstr("1"), AMstr("a"))); + AMfree(AMmapPutStr(doc, AM_ROOT, AMstr("2"), AMstr("b"))); + AMfree(AMmapPutStr(doc, AM_ROOT, AMstr("3"), AMstr("c"))); AMchangeHashes const heads = AMpush(&stack, AMgetHeads(doc), @@ -702,16 +839,19 @@ static void test_map_range_at_back_and_forth_single(void** state) { /* Forward, back, back. */ AMmapItems range_all = AMpush(&stack, - AMmapRange(doc, AM_ROOT, NULL, NULL, &heads), + AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), &heads), AM_VALUE_MAP_ITEMS, cmocka_cb).map_items; /* First */ AMmapItem const* next = AMmapItemsNext(&range_all, 1); assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "1"); + AMbyteSpan key = AMmapItemKey(next); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "1", key.count); AMvalue next_value = AMmapItemValue(next); assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "a"); + assert_int_equal(next_value.str.count, 1); + assert_memory_equal(next_value.str.src, "a", next_value.str.count); AMobjId const* next_obj_id = AMmapItemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 1); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); @@ -721,10 +861,13 @@ static void test_map_range_at_back_and_forth_single(void** state) { range_back_all = AMmapItemsRewound(&range_back_all); AMmapItem const* next_back = AMmapItemsNext(&range_back_all, 1); assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "3"); + key = AMmapItemKey(next_back); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "3", key.count); AMvalue next_back_value = AMmapItemValue(next_back); assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "c"); + assert_int_equal(next_back_value.str.count, 1); + assert_memory_equal(next_back_value.str.src, "c", next_back_value.str.count); AMobjId const* next_back_obj_id = AMmapItemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); @@ -732,10 +875,13 @@ static void test_map_range_at_back_and_forth_single(void** state) { /* Second */ next_back = AMmapItemsNext(&range_back_all, 1); assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "2"); + key = AMmapItemKey(next_back); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "2", key.count); next_back_value = AMmapItemValue(next_back); assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "b"); + assert_int_equal(next_back_value.str.count, 1); + assert_memory_equal(next_back_value.str.src, "b", next_back_value.str.count); next_back_obj_id = AMmapItemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); @@ -747,10 +893,13 @@ static void test_map_range_at_back_and_forth_single(void** state) { /* First */ next = AMmapItemsNext(&range_all, 1); assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "1"); + key = AMmapItemKey(next); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "1", key.count); next_value = AMmapItemValue(next); assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "a"); + assert_int_equal(next_value.str.count, 1); + assert_memory_equal(next_value.str.src, "a", next_value.str.count); next_obj_id = AMmapItemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 1); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); @@ -758,10 +907,13 @@ static void test_map_range_at_back_and_forth_single(void** state) { /* Third */ next_back = AMmapItemsNext(&range_back_all, 1); assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "3"); + key = AMmapItemKey(next_back); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "3", key.count); next_back_value = AMmapItemValue(next_back); assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "c"); + assert_int_equal(next_back_value.str.count, 1); + assert_memory_equal(next_back_value.str.src, "c", next_back_value.str.count); next_back_obj_id = AMmapItemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); @@ -769,10 +921,13 @@ static void test_map_range_at_back_and_forth_single(void** state) { /* Second */ next = AMmapItemsNext(&range_all, 1); assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "2"); + key = AMmapItemKey(next); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "2", key.count); next_value = AMmapItemValue(next); assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "b"); + assert_int_equal(next_value.str.count, 1); + assert_memory_equal(next_value.str.src, "b", next_value.str.count); next_obj_id = AMmapItemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 2); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); @@ -783,10 +938,13 @@ static void test_map_range_at_back_and_forth_single(void** state) { /* First */ next = AMmapItemsNext(&range_all, 1); assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "1"); + key = AMmapItemKey(next); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "1", key.count); next_value = AMmapItemValue(next); assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "a"); + assert_int_equal(next_value.str.count, 1); + assert_memory_equal(next_value.str.src, "a", next_value.str.count); next_obj_id = AMmapItemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 1); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); @@ -794,10 +952,13 @@ static void test_map_range_at_back_and_forth_single(void** state) { /* Second */ next = AMmapItemsNext(&range_all, 1); assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "2"); + key = AMmapItemKey(next); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "2", key.count); next_value = AMmapItemValue(next); assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "b"); + assert_int_equal(next_value.str.count, 1); + assert_memory_equal(next_value.str.src, "b", next_value.str.count); next_obj_id = AMmapItemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 2); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); @@ -805,10 +966,13 @@ static void test_map_range_at_back_and_forth_single(void** state) { /* Third */ next = AMmapItemsNext(&range_all, 1); assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "3"); + key = AMmapItemKey(next); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "3", key.count); next_value = AMmapItemValue(next); assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "c"); + assert_int_equal(next_value.str.count, 1); + assert_memory_equal(next_value.str.src, "c", next_value.str.count); next_obj_id = AMmapItemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 3); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); @@ -821,10 +985,13 @@ static void test_map_range_at_back_and_forth_single(void** state) { /* Third */ next_back = AMmapItemsNext(&range_back_all, 1); assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "3"); + key = AMmapItemKey(next_back); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "3", key.count); next_back_value = AMmapItemValue(next_back); assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "c"); + assert_int_equal(next_back_value.str.count, 1); + assert_memory_equal(next_back_value.str.src, "c", next_back_value.str.count); next_back_obj_id = AMmapItemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); @@ -832,10 +999,13 @@ static void test_map_range_at_back_and_forth_single(void** state) { /* Second */ next_back = AMmapItemsNext(&range_back_all, 1); assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "2"); + key = AMmapItemKey(next_back); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "2", key.count); next_back_value = AMmapItemValue(next_back); assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "b"); + assert_int_equal(next_back_value.str.count, 1); + assert_memory_equal(next_back_value.str.src, "b", next_back_value.str.count); next_back_obj_id = AMmapItemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); @@ -843,10 +1013,13 @@ static void test_map_range_at_back_and_forth_single(void** state) { /* First */ next_back = AMmapItemsNext(&range_back_all, 1); assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "1"); + key = AMmapItemKey(next_back); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "1", key.count); next_back_value = AMmapItemValue(next_back); assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "a"); + assert_int_equal(next_back_value.str.count, 1); + assert_memory_equal(next_back_value.str.src, "a", next_back_value.str.count); next_back_obj_id = AMmapItemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 1); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); @@ -864,9 +1037,9 @@ static void test_map_range_at_back_and_forth_double(void** state) { cmocka_cb).actor_id; AMfree(AMsetActorId(doc1, actor_id1)); - AMfree(AMmapPutStr(doc1, AM_ROOT, "1", "a")); - AMfree(AMmapPutStr(doc1, AM_ROOT, "2", "b")); - AMfree(AMmapPutStr(doc1, AM_ROOT, "3", "c")); + AMfree(AMmapPutStr(doc1, AM_ROOT, AMstr("1"), AMstr("a"))); + AMfree(AMmapPutStr(doc1, AM_ROOT, AMstr("2"), AMstr("b"))); + AMfree(AMmapPutStr(doc1, AM_ROOT, AMstr("3"), AMstr("c"))); /* The second actor should win all conflicts here. */ AMdoc* const doc2 = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; @@ -875,9 +1048,9 @@ static void test_map_range_at_back_and_forth_double(void** state) { AM_VALUE_ACTOR_ID, cmocka_cb).actor_id; AMfree(AMsetActorId(doc2, actor_id2)); - AMfree(AMmapPutStr(doc2, AM_ROOT, "1", "aa")); - AMfree(AMmapPutStr(doc2, AM_ROOT, "2", "bb")); - AMfree(AMmapPutStr(doc2, AM_ROOT, "3", "cc")); + AMfree(AMmapPutStr(doc2, AM_ROOT, AMstr("1"), AMstr("aa"))); + AMfree(AMmapPutStr(doc2, AM_ROOT, AMstr("2"), AMstr("bb"))); + AMfree(AMmapPutStr(doc2, AM_ROOT, AMstr("3"), AMstr("cc"))); AMfree(AMmerge(doc1, doc2)); AMchangeHashes const heads = AMpush(&stack, @@ -887,16 +1060,19 @@ static void test_map_range_at_back_and_forth_double(void** state) { /* Forward, back, back. */ AMmapItems range_all = AMpush(&stack, - AMmapRange(doc1, AM_ROOT, NULL, NULL, &heads), + AMmapRange(doc1, AM_ROOT, AMstr(NULL), AMstr(NULL), &heads), AM_VALUE_MAP_ITEMS, cmocka_cb).map_items; /* First */ AMmapItem const* next = AMmapItemsNext(&range_all, 1); assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "1"); + AMbyteSpan key = AMmapItemKey(next); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "1", key.count); AMvalue next_value = AMmapItemValue(next); assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "aa"); + assert_int_equal(next_value.str.count, 2); + assert_memory_equal(next_value.str.src, "aa", next_value.str.count); AMobjId const* next_obj_id = AMmapItemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 1); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); @@ -906,10 +1082,13 @@ static void test_map_range_at_back_and_forth_double(void** state) { range_back_all = AMmapItemsRewound(&range_back_all); AMmapItem const* next_back = AMmapItemsNext(&range_back_all, 1); assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "3"); + key = AMmapItemKey(next_back); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "3", key.count); AMvalue next_back_value = AMmapItemValue(next_back); assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "cc"); + assert_int_equal(next_back_value.str.count, 2); + assert_memory_equal(next_back_value.str.src, "cc", next_back_value.str.count); AMobjId const* next_back_obj_id = AMmapItemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); @@ -917,10 +1096,13 @@ static void test_map_range_at_back_and_forth_double(void** state) { /* Second */ next_back = AMmapItemsNext(&range_back_all, 1); assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "2"); + key = AMmapItemKey(next_back); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "2", key.count); next_back_value = AMmapItemValue(next_back); assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "bb"); + assert_int_equal(next_back_value.str.count, 2); + assert_memory_equal(next_back_value.str.src, "bb", next_back_value.str.count); next_back_obj_id = AMmapItemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); @@ -932,10 +1114,13 @@ static void test_map_range_at_back_and_forth_double(void** state) { /* First */ next = AMmapItemsNext(&range_all, 1); assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "1"); + key = AMmapItemKey(next); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "1", key.count); next_value = AMmapItemValue(next); assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "aa"); + assert_int_equal(next_value.str.count, 2); + assert_memory_equal(next_value.str.src, "aa", next_value.str.count); next_obj_id = AMmapItemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 1); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); @@ -943,10 +1128,13 @@ static void test_map_range_at_back_and_forth_double(void** state) { /* Third */ next_back = AMmapItemsNext(&range_back_all, 1); assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "3"); + key = AMmapItemKey(next_back); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "3", key.count); next_back_value = AMmapItemValue(next_back); assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "cc"); + assert_int_equal(next_back_value.str.count, 2); + assert_memory_equal(next_back_value.str.src, "cc", next_back_value.str.count); next_back_obj_id = AMmapItemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); @@ -954,10 +1142,13 @@ static void test_map_range_at_back_and_forth_double(void** state) { /* Second */ next = AMmapItemsNext(&range_all, 1); assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "2"); + key = AMmapItemKey(next); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "2", key.count); next_value = AMmapItemValue(next); assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "bb"); + assert_int_equal(next_value.str.count, 2); + assert_memory_equal(next_value.str.src, "bb", next_value.str.count); next_obj_id = AMmapItemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 2); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); @@ -968,10 +1159,13 @@ static void test_map_range_at_back_and_forth_double(void** state) { /* First */ next = AMmapItemsNext(&range_all, 1); assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "1"); + key = AMmapItemKey(next); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "1", key.count); next_value = AMmapItemValue(next); assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "aa"); + assert_int_equal(next_value.str.count, 2); + assert_memory_equal(next_value.str.src, "aa", next_value.str.count); next_obj_id = AMmapItemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 1); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); @@ -979,10 +1173,13 @@ static void test_map_range_at_back_and_forth_double(void** state) { /* Second */ next = AMmapItemsNext(&range_all, 1); assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "2"); + key = AMmapItemKey(next); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "2", key.count); next_value = AMmapItemValue(next); assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "bb"); + assert_int_equal(next_value.str.count, 2); + assert_memory_equal(next_value.str.src, "bb", next_value.str.count); next_obj_id = AMmapItemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 2); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); @@ -990,10 +1187,13 @@ static void test_map_range_at_back_and_forth_double(void** state) { /* Third */ next = AMmapItemsNext(&range_all, 1); assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "3"); + key = AMmapItemKey(next); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "3", key.count); next_value = AMmapItemValue(next); assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "cc"); + assert_int_equal(next_value.str.count, 2); + assert_memory_equal(next_value.str.src, "cc", next_value.str.count); next_obj_id = AMmapItemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 3); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); @@ -1006,10 +1206,13 @@ static void test_map_range_at_back_and_forth_double(void** state) { /* Third */ next_back = AMmapItemsNext(&range_back_all, 1); assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "3"); + key = AMmapItemKey(next_back); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "3", key.count); next_back_value = AMmapItemValue(next_back); assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "cc"); + assert_int_equal(next_back_value.str.count, 2); + assert_memory_equal(next_back_value.str.src, "cc", next_back_value.str.count); next_back_obj_id = AMmapItemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); @@ -1017,10 +1220,13 @@ static void test_map_range_at_back_and_forth_double(void** state) { /* Second */ next_back = AMmapItemsNext(&range_back_all, 1); assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "2"); + key = AMmapItemKey(next_back); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "2", key.count); next_back_value = AMmapItemValue(next_back); assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "bb"); + assert_int_equal(next_back_value.str.count, 2); + assert_memory_equal(next_back_value.str.src, "bb", next_back_value.str.count); next_back_obj_id = AMmapItemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); @@ -1028,10 +1234,13 @@ static void test_map_range_at_back_and_forth_double(void** state) { /* First */ next_back = AMmapItemsNext(&range_back_all, 1); assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "1"); + key = AMmapItemKey(next_back); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "1", key.count); next_back_value = AMmapItemValue(next_back); assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "aa"); + assert_int_equal(next_back_value.str.count, 2); + assert_memory_equal(next_back_value.str.src, "aa", next_back_value.str.count); next_back_obj_id = AMmapItemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 1); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); @@ -1043,11 +1252,11 @@ static void test_map_range_at_back_and_forth_double(void** state) { static void test_get_range_values(void** state) { AMresultStack* stack = *state; AMdoc* const doc1 = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - AMfree(AMmapPutStr(doc1, AM_ROOT, "aa", "aaa")); - AMfree(AMmapPutStr(doc1, AM_ROOT, "bb", "bbb")); - AMfree(AMmapPutStr(doc1, AM_ROOT, "cc", "ccc")); - AMfree(AMmapPutStr(doc1, AM_ROOT, "dd", "ddd")); - AMfree(AMcommit(doc1, NULL, NULL)); + AMfree(AMmapPutStr(doc1, AM_ROOT, AMstr("aa"), AMstr("aaa"))); + AMfree(AMmapPutStr(doc1, AM_ROOT, AMstr("bb"), AMstr("bbb"))); + AMfree(AMmapPutStr(doc1, AM_ROOT, AMstr("cc"), AMstr("ccc"))); + AMfree(AMmapPutStr(doc1, AM_ROOT, AMstr("dd"), AMstr("ddd"))); + AMfree(AMcommit(doc1, AMstr(NULL), NULL)); AMchangeHashes const v1 = AMpush(&stack, AMgetHeads(doc1), @@ -1055,16 +1264,16 @@ static void test_get_range_values(void** state) { cmocka_cb).change_hashes; AMdoc* const doc2 = AMpush(&stack, AMfork(doc1, NULL), AM_VALUE_DOC, cmocka_cb).doc; - AMfree(AMmapPutStr(doc1, AM_ROOT, "cc", "ccc V2")); - AMfree(AMcommit(doc1, NULL, NULL)); + AMfree(AMmapPutStr(doc1, AM_ROOT, AMstr("cc"), AMstr("ccc V2"))); + AMfree(AMcommit(doc1, AMstr(NULL), NULL)); - AMfree(AMmapPutStr(doc2, AM_ROOT, "cc", "ccc V3")); - AMfree(AMcommit(doc2, NULL, NULL)); + AMfree(AMmapPutStr(doc2, AM_ROOT, AMstr("cc"), AMstr("ccc V3"))); + AMfree(AMcommit(doc2, AMstr(NULL), NULL)); AMfree(AMmerge(doc1, doc2)); AMmapItems range = AMpush(&stack, - AMmapRange(doc1, AM_ROOT, "b", "d", NULL), + AMmapRange(doc1, AM_ROOT, AMstr("b"), AMstr("d"), NULL), AM_VALUE_MAP_ITEMS, cmocka_cb).map_items; AMmapItems range_back = AMmapItemsReversed(&range); @@ -1092,7 +1301,7 @@ static void test_get_range_values(void** state) { } range = AMpush(&stack, - AMmapRange(doc1, AM_ROOT, "b", "d", &v1), + AMmapRange(doc1, AM_ROOT, AMstr("b"), AMstr("d"), &v1), AM_VALUE_MAP_ITEMS, cmocka_cb).map_items; range_back = AMmapItemsReversed(&range); @@ -1119,7 +1328,7 @@ static void test_get_range_values(void** state) { } range = AMpush(&stack, - AMmapRange(doc1, AM_ROOT, NULL, NULL, NULL), + AMmapRange(doc1, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), AM_VALUE_MAP_ITEMS, cmocka_cb).map_items; AMobjItems values = AMpush(&stack, @@ -1137,7 +1346,7 @@ static void test_get_range_values(void** state) { } range = AMpush(&stack, - AMmapRange(doc1, AM_ROOT, NULL, NULL, &v1), + AMmapRange(doc1, AM_ROOT, AMstr(NULL), AMstr(NULL), &v1), AM_VALUE_MAP_ITEMS, cmocka_cb).map_items; values = AMpush(&stack, @@ -1170,6 +1379,7 @@ int run_map_tests(void) { cmocka_unit_test(test_AMmapPutStr), cmocka_unit_test(test_AMmapPut(Timestamp)), cmocka_unit_test(test_AMmapPut(Uint)), + cmocka_unit_test_setup_teardown(test_get_NUL_string, setup_stack, teardown_stack), cmocka_unit_test_setup_teardown(test_range_iter_map, setup_stack, teardown_stack), cmocka_unit_test_setup_teardown(test_map_range_back_and_forth_single, setup_stack, teardown_stack), cmocka_unit_test_setup_teardown(test_map_range_back_and_forth_double, setup_stack, teardown_stack), diff --git a/rust/automerge-c/test/ported_wasm/basic_tests.c b/rust/automerge-c/test/ported_wasm/basic_tests.c index 2353c3b7..e233aa41 100644 --- a/rust/automerge-c/test/ported_wasm/basic_tests.c +++ b/rust/automerge-c/test/ported_wasm/basic_tests.c @@ -37,7 +37,7 @@ static void test_start_and_commit(void** state) { /* const doc = create() */ AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; /* doc.commit() */ - AMpush(&stack, AMcommit(doc, NULL, NULL), AM_VALUE_CHANGE_HASHES, cmocka_cb); + AMpush(&stack, AMcommit(doc, AMstr(NULL), NULL), AM_VALUE_CHANGE_HASHES, cmocka_cb); } /** @@ -51,7 +51,7 @@ static void test_getting_a_nonexistent_prop_does_not_throw_an_error(void** state /* const result = doc.getWithType(root, "hello") */ /* assert.deepEqual(result, undefined) */ AMpush(&stack, - AMmapGet(doc, AM_ROOT, "hello", NULL), + AMmapGet(doc, AM_ROOT, AMstr("hello"), NULL), AM_VALUE_VOID, cmocka_cb); } @@ -64,7 +64,7 @@ static void test_should_be_able_to_set_and_get_a_simple_value(void** state) { /* const doc: Automerge = create("aabbcc") */ AMdoc* const doc = AMpush(&stack, AMcreate(AMpush(&stack, - AMactorIdInitStr("aabbcc"), + AMactorIdInitStr(AMstr("aabbcc")), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id), AM_VALUE_DOC, @@ -73,41 +73,43 @@ static void test_should_be_able_to_set_and_get_a_simple_value(void** state) { /* let result */ /* */ /* doc.put(root, "hello", "world") */ - AMfree(AMmapPutStr(doc, AM_ROOT, "hello", "world")); + AMfree(AMmapPutStr(doc, AM_ROOT, AMstr("hello"), AMstr("world"))); /* doc.put(root, "number1", 5, "uint") */ - AMfree(AMmapPutUint(doc, AM_ROOT, "number1", 5)); + AMfree(AMmapPutUint(doc, AM_ROOT, AMstr("number1"), 5)); /* doc.put(root, "number2", 5) */ - AMfree(AMmapPutInt(doc, AM_ROOT, "number2", 5)); + AMfree(AMmapPutInt(doc, AM_ROOT, AMstr("number2"), 5)); /* doc.put(root, "number3", 5.5) */ - AMfree(AMmapPutF64(doc, AM_ROOT, "number3", 5.5)); + AMfree(AMmapPutF64(doc, AM_ROOT, AMstr("number3"), 5.5)); /* doc.put(root, "number4", 5.5, "f64") */ - AMfree(AMmapPutF64(doc, AM_ROOT, "number4", 5.5)); + AMfree(AMmapPutF64(doc, AM_ROOT, AMstr("number4"), 5.5)); /* doc.put(root, "number5", 5.5, "int") */ - AMfree(AMmapPutInt(doc, AM_ROOT, "number5", 5.5)); + AMfree(AMmapPutInt(doc, AM_ROOT, AMstr("number5"), 5.5)); /* doc.put(root, "bool", true) */ - AMfree(AMmapPutBool(doc, AM_ROOT, "bool", true)); + AMfree(AMmapPutBool(doc, AM_ROOT, AMstr("bool"), true)); /* doc.put(root, "time1", 1000, "timestamp") */ - AMfree(AMmapPutTimestamp(doc, AM_ROOT, "time1", 1000)); + AMfree(AMmapPutTimestamp(doc, AM_ROOT, AMstr("time1"), 1000)); /* doc.put(root, "time2", new Date(1001)) */ - AMfree(AMmapPutTimestamp(doc, AM_ROOT, "time2", 1001)); + AMfree(AMmapPutTimestamp(doc, AM_ROOT, AMstr("time2"), 1001)); /* doc.putObject(root, "list", []); */ - AMfree(AMmapPutObject(doc, AM_ROOT, "list", AM_OBJ_TYPE_LIST)); + AMfree(AMmapPutObject(doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST)); /* doc.put(root, "null", null) */ - AMfree(AMmapPutNull(doc, AM_ROOT, "null")); + AMfree(AMmapPutNull(doc, AM_ROOT, AMstr("null"))); /* */ /* result = doc.getWithType(root, "hello") */ /* assert.deepEqual(result, ["str", "world"]) */ /* assert.deepEqual(doc.get("/", "hello"), "world") */ - assert_string_equal(AMpush(&stack, - AMmapGet(doc, AM_ROOT, "hello", NULL), - AM_VALUE_STR, - cmocka_cb).str, "world"); + AMbyteSpan str = AMpush(&stack, + AMmapGet(doc, AM_ROOT, AMstr("hello"), NULL), + AM_VALUE_STR, + cmocka_cb).str; + assert_int_equal(str.count, strlen("world")); + assert_memory_equal(str.src, "world", str.count); /* assert.deepEqual(doc.get("/", "hello"), "world") */ /* */ /* result = doc.getWithType(root, "number1") */ /* assert.deepEqual(result, ["uint", 5]) */ assert_int_equal(AMpush(&stack, - AMmapGet(doc, AM_ROOT, "number1", NULL), + AMmapGet(doc, AM_ROOT, AMstr("number1"), NULL), AM_VALUE_UINT, cmocka_cb).uint, 5); /* assert.deepEqual(doc.get("/", "number1"), 5) */ @@ -115,75 +117,77 @@ static void test_should_be_able_to_set_and_get_a_simple_value(void** state) { /* result = doc.getWithType(root, "number2") */ /* assert.deepEqual(result, ["int", 5]) */ assert_int_equal(AMpush(&stack, - AMmapGet(doc, AM_ROOT, "number2", NULL), + AMmapGet(doc, AM_ROOT, AMstr("number2"), NULL), AM_VALUE_INT, cmocka_cb).int_, 5); /* */ /* result = doc.getWithType(root, "number3") */ /* assert.deepEqual(result, ["f64", 5.5]) */ assert_float_equal(AMpush(&stack, - AMmapGet(doc, AM_ROOT, "number3", NULL), + AMmapGet(doc, AM_ROOT, AMstr("number3"), NULL), AM_VALUE_F64, cmocka_cb).f64, 5.5, DBL_EPSILON); /* */ /* result = doc.getWithType(root, "number4") */ /* assert.deepEqual(result, ["f64", 5.5]) */ assert_float_equal(AMpush(&stack, - AMmapGet(doc, AM_ROOT, "number4", NULL), + AMmapGet(doc, AM_ROOT, AMstr("number4"), NULL), AM_VALUE_F64, cmocka_cb).f64, 5.5, DBL_EPSILON); /* */ /* result = doc.getWithType(root, "number5") */ /* assert.deepEqual(result, ["int", 5]) */ assert_int_equal(AMpush(&stack, - AMmapGet(doc, AM_ROOT, "number5", NULL), + AMmapGet(doc, AM_ROOT, AMstr("number5"), NULL), AM_VALUE_INT, cmocka_cb).int_, 5); /* */ /* result = doc.getWithType(root, "bool") */ /* assert.deepEqual(result, ["boolean", true]) */ assert_int_equal(AMpush(&stack, - AMmapGet(doc, AM_ROOT, "bool", NULL), + AMmapGet(doc, AM_ROOT, AMstr("bool"), NULL), AM_VALUE_BOOLEAN, cmocka_cb).boolean, true); /* */ /* doc.put(root, "bool", false, "boolean") */ - AMfree(AMmapPutBool(doc, AM_ROOT, "bool", false)); + AMfree(AMmapPutBool(doc, AM_ROOT, AMstr("bool"), false)); /* */ /* result = doc.getWithType(root, "bool") */ /* assert.deepEqual(result, ["boolean", false]) */ assert_int_equal(AMpush(&stack, - AMmapGet(doc, AM_ROOT, "bool", NULL), + AMmapGet(doc, AM_ROOT, AMstr("bool"), NULL), AM_VALUE_BOOLEAN, cmocka_cb).boolean, false); /* */ /* result = doc.getWithType(root, "time1") */ /* assert.deepEqual(result, ["timestamp", new Date(1000)]) */ assert_int_equal(AMpush(&stack, - AMmapGet(doc, AM_ROOT, "time1", NULL), + AMmapGet(doc, AM_ROOT, AMstr("time1"), NULL), AM_VALUE_TIMESTAMP, cmocka_cb).timestamp, 1000); /* */ /* result = doc.getWithType(root, "time2") */ /* assert.deepEqual(result, ["timestamp", new Date(1001)]) */ assert_int_equal(AMpush(&stack, - AMmapGet(doc, AM_ROOT, "time2", NULL), + AMmapGet(doc, AM_ROOT, AMstr("time2"), NULL), AM_VALUE_TIMESTAMP, cmocka_cb).timestamp, 1001); /* */ /* result = doc.getWithType(root, "list") */ /* assert.deepEqual(result, ["list", "10@aabbcc"]); */ AMobjId const* const list = AMpush(&stack, - AMmapGet(doc, AM_ROOT, "list", NULL), + AMmapGet(doc, AM_ROOT, AMstr("list"), NULL), AM_VALUE_OBJ_ID, cmocka_cb).obj_id; assert_int_equal(AMobjIdCounter(list), 10); - assert_string_equal(AMactorIdStr(AMobjIdActorId(list)), "aabbcc"); + str = AMactorIdStr(AMobjIdActorId(list)); + assert_int_equal(str.count, strlen("aabbcc")); + assert_memory_equal(str.src, "aabbcc", str.count); /* */ /* result = doc.getWithType(root, "null") */ /* assert.deepEqual(result, ["null", null]); */ AMpush(&stack, - AMmapGet(doc, AM_ROOT, "null", NULL), + AMmapGet(doc, AM_ROOT, AMstr("null"), NULL), AM_VALUE_NULL, cmocka_cb); } @@ -197,13 +201,13 @@ static void test_should_be_able_to_use_bytes(void** state) { AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; /* doc.put("_root", "data1", new Uint8Array([10, 11, 12])); */ static uint8_t const DATA1[] = {10, 11, 12}; - AMfree(AMmapPutBytes(doc, AM_ROOT, "data1", DATA1, sizeof(DATA1))); + AMfree(AMmapPutBytes(doc, AM_ROOT, AMstr("data1"), DATA1, sizeof(DATA1))); /* doc.put("_root", "data2", new Uint8Array([13, 14, 15]), "bytes"); */ static uint8_t const DATA2[] = {13, 14, 15}; - AMfree(AMmapPutBytes(doc, AM_ROOT, "data2", DATA2, sizeof(DATA2))); + AMfree(AMmapPutBytes(doc, AM_ROOT, AMstr("data2"), DATA2, sizeof(DATA2))); /* const value1 = doc.getWithType("_root", "data1") */ AMbyteSpan const value1 = AMpush(&stack, - AMmapGet(doc, AM_ROOT, "data1", NULL), + AMmapGet(doc, AM_ROOT, AMstr("data1"), NULL), AM_VALUE_BYTES, cmocka_cb).bytes; /* assert.deepEqual(value1, ["bytes", new Uint8Array([10, 11, 12])]); */ @@ -211,7 +215,7 @@ static void test_should_be_able_to_use_bytes(void** state) { assert_memory_equal(value1.src, DATA1, sizeof(DATA1)); /* const value2 = doc.getWithType("_root", "data2") */ AMbyteSpan const value2 = AMpush(&stack, - AMmapGet(doc, AM_ROOT, "data2", NULL), + AMmapGet(doc, AM_ROOT, AMstr("data2"), NULL), AM_VALUE_BYTES, cmocka_cb).bytes; /* assert.deepEqual(value2, ["bytes", new Uint8Array([13, 14, 15])]); */ @@ -232,18 +236,18 @@ static void test_should_be_able_to_make_subobjects(void** state) { /* const submap = doc.putObject(root, "submap", {}) */ AMobjId const* const submap = AMpush( &stack, - AMmapPutObject(doc, AM_ROOT, "submap", AM_OBJ_TYPE_MAP), + AMmapPutObject(doc, AM_ROOT, AMstr("submap"), AM_OBJ_TYPE_MAP), AM_VALUE_OBJ_ID, cmocka_cb).obj_id; /* doc.put(submap, "number", 6, "uint") */ - AMfree(AMmapPutUint(doc, submap, "number", 6)); + AMfree(AMmapPutUint(doc, submap, AMstr("number"), 6)); /* assert.strictEqual(doc.pendingOps(), 2) */ assert_int_equal(AMpendingOps(doc), 2); /* */ /* result = doc.getWithType(root, "submap") */ /* assert.deepEqual(result, ["map", submap]) */ assert_true(AMobjIdEqual(AMpush(&stack, - AMmapGet(doc, AM_ROOT, "submap", NULL), + AMmapGet(doc, AM_ROOT, AMstr("submap"), NULL), AM_VALUE_OBJ_ID, cmocka_cb).obj_id, submap)); @@ -251,7 +255,7 @@ static void test_should_be_able_to_make_subobjects(void** state) { /* result = doc.getWithType(submap, "number") */ /* assert.deepEqual(result, ["uint", 6]) */ assert_int_equal(AMpush(&stack, - AMmapGet(doc, submap, "number", NULL), + AMmapGet(doc, submap, AMstr("number"), NULL), AM_VALUE_UINT, cmocka_cb).uint, 6); @@ -269,49 +273,59 @@ static void test_should_be_able_to_make_lists(void** state) { /* const sublist = doc.putObject(root, "numbers", []) */ AMobjId const* const sublist = AMpush( &stack, - AMmapPutObject(doc, AM_ROOT, "numbers", AM_OBJ_TYPE_LIST), + AMmapPutObject(doc, AM_ROOT, AMstr("numbers"), AM_OBJ_TYPE_LIST), AM_VALUE_OBJ_ID, cmocka_cb).obj_id; /* doc.insert(sublist, 0, "a"); */ - AMfree(AMlistPutStr(doc, sublist, 0, true, "a")); + AMfree(AMlistPutStr(doc, sublist, 0, true, AMstr("a"))); /* doc.insert(sublist, 1, "b"); */ - AMfree(AMlistPutStr(doc, sublist, 1, true, "b")); + AMfree(AMlistPutStr(doc, sublist, 1, true, AMstr("b"))); /* doc.insert(sublist, 2, "c"); */ - AMfree(AMlistPutStr(doc, sublist, 2, true, "c")); + AMfree(AMlistPutStr(doc, sublist, 2, true, AMstr("c"))); /* doc.insert(sublist, 0, "z"); */ - AMfree(AMlistPutStr(doc, sublist, 0, true, "z")); + AMfree(AMlistPutStr(doc, sublist, 0, true, AMstr("z"))); /* */ /* assert.deepEqual(doc.getWithType(sublist, 0), ["str", "z"]) */ - assert_string_equal(AMpush(&stack, - AMlistGet(doc, sublist, 0, NULL), - AM_VALUE_STR, - cmocka_cb).str, "z"); + AMbyteSpan str = AMpush(&stack, + AMlistGet(doc, sublist, 0, NULL), + AM_VALUE_STR, + cmocka_cb).str; + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "z", str.count); /* assert.deepEqual(doc.getWithType(sublist, 1), ["str", "a"]) */ - assert_string_equal(AMpush(&stack, - AMlistGet(doc, sublist, 1, NULL), - AM_VALUE_STR, - cmocka_cb).str, "a"); + str = AMpush(&stack, + AMlistGet(doc, sublist, 1, NULL), + AM_VALUE_STR, + cmocka_cb).str; + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "a", str.count); /* assert.deepEqual(doc.getWithType(sublist, 2), ["str", "b"]) */ - assert_string_equal(AMpush(&stack, - AMlistGet(doc, sublist, 2, NULL), - AM_VALUE_STR, - cmocka_cb).str, "b"); + str = AMpush(&stack, + AMlistGet(doc, sublist, 2, NULL), + AM_VALUE_STR, + cmocka_cb).str; + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "b", str.count); /* assert.deepEqual(doc.getWithType(sublist, 3), ["str", "c"]) */ - assert_string_equal(AMpush(&stack, - AMlistGet(doc, sublist, 3, NULL), - AM_VALUE_STR, - cmocka_cb).str, "c"); + str = AMpush(&stack, + AMlistGet(doc, sublist, 3, NULL), + AM_VALUE_STR, + cmocka_cb).str; + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "c", str.count); /* assert.deepEqual(doc.length(sublist), 4) */ assert_int_equal(AMobjSize(doc, sublist, NULL), 4); /* */ /* doc.put(sublist, 2, "b v2"); */ - AMfree(AMlistPutStr(doc, sublist, 2, false, "b v2")); + AMfree(AMlistPutStr(doc, sublist, 2, false, AMstr("b v2"))); /* */ /* assert.deepEqual(doc.getWithType(sublist, 2), ["str", "b v2"]) */ - assert_string_equal(AMpush(&stack, - AMlistGet(doc, sublist, 2, NULL), - AM_VALUE_STR, - cmocka_cb).str, "b v2"); + str = AMpush(&stack, + AMlistGet(doc, sublist, 2, NULL), + AM_VALUE_STR, + cmocka_cb).str; + assert_int_equal(str.count, 4); + assert_memory_equal(str.src, "b v2", str.count); /* assert.deepEqual(doc.length(sublist), 4) */ assert_int_equal(AMobjSize(doc, sublist, NULL), 4); } @@ -328,34 +342,38 @@ static void test_lists_have_insert_set_splice_and_push_ops(void** state) { /* const sublist = doc.putObject(root, "letters", []) */ AMobjId const* const sublist = AMpush( &stack, - AMmapPutObject(doc, AM_ROOT, "letters", AM_OBJ_TYPE_LIST), + AMmapPutObject(doc, AM_ROOT, AMstr("letters"), AM_OBJ_TYPE_LIST), AM_VALUE_OBJ_ID, cmocka_cb).obj_id; /* doc.insert(sublist, 0, "a"); */ - AMfree(AMlistPutStr(doc, sublist, 0, true, "a")); + AMfree(AMlistPutStr(doc, sublist, 0, true, AMstr("a"))); /* doc.insert(sublist, 0, "b"); */ - AMfree(AMlistPutStr(doc, sublist, 0, true, "b")); + AMfree(AMlistPutStr(doc, sublist, 0, true, AMstr("b"))); /* assert.deepEqual(doc.materialize(), { letters: ["b", "a"] }) */ AMmapItems doc_items = AMpush(&stack, - AMmapRange(doc, AM_ROOT, NULL, NULL, NULL), + AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), AM_VALUE_MAP_ITEMS, cmocka_cb).map_items; AMmapItem const* doc_item = AMmapItemsNext(&doc_items, 1); - assert_string_equal(AMmapItemKey(doc_item), "letters"); + AMbyteSpan key = AMmapItemKey(doc_item); + assert_int_equal(key.count, strlen("letters")); + assert_memory_equal(key.src, "letters", key.count); { AMlistItems list_items = AMpush( &stack, AMlistRange(doc, AMmapItemObjId(doc_item), 0, SIZE_MAX, NULL), AM_VALUE_LIST_ITEMS, cmocka_cb).list_items; - assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, - "b"); - assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, - "a"); + AMbyteSpan str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "b", str.count); + str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "a", str.count); assert_null(AMlistItemsNext(&list_items, 1)); } /* doc.push(sublist, "c"); */ - AMfree(AMlistPutStr(doc, sublist, SIZE_MAX, true, "c")); + AMfree(AMlistPutStr(doc, sublist, SIZE_MAX, true, AMstr("c"))); /* const heads = doc.getHeads() */ AMchangeHashes const heads = AMpush(&stack, AMgetHeads(doc), @@ -363,107 +381,131 @@ static void test_lists_have_insert_set_splice_and_push_ops(void** state) { cmocka_cb).change_hashes; /* assert.deepEqual(doc.materialize(), { letters: ["b", "a", "c"] }) */ doc_items = AMpush(&stack, - AMmapRange(doc, AM_ROOT, NULL, NULL, NULL), + AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), AM_VALUE_MAP_ITEMS, cmocka_cb).map_items; doc_item = AMmapItemsNext(&doc_items, 1); - assert_string_equal(AMmapItemKey(doc_item), "letters"); + key = AMmapItemKey(doc_item); + assert_int_equal(key.count, strlen("letters")); + assert_memory_equal(key.src, "letters", key.count); { AMlistItems list_items = AMpush( &stack, AMlistRange(doc, AMmapItemObjId(doc_item), 0, SIZE_MAX, NULL), AM_VALUE_LIST_ITEMS, cmocka_cb).list_items; - assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, - "b"); - assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, - "a"); - assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, - "c"); + AMbyteSpan str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "b", str.count); + str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "a", str.count); + str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "c", str.count); assert_null(AMlistItemsNext(&list_items, 1)); } /* doc.push(sublist, 3, "timestamp"); */ AMfree(AMlistPutTimestamp(doc, sublist, SIZE_MAX, true, 3)); /* assert.deepEqual(doc.materialize(), { letters: ["b", "a", "c", new Date(3)] })*/ doc_items = AMpush(&stack, - AMmapRange(doc, AM_ROOT, NULL, NULL, NULL), + AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), AM_VALUE_MAP_ITEMS, cmocka_cb).map_items; doc_item = AMmapItemsNext(&doc_items, 1); - assert_string_equal(AMmapItemKey(doc_item), "letters"); + key = AMmapItemKey(doc_item); + assert_int_equal(key.count, strlen("letters")); + assert_memory_equal(key.src, "letters", key.count); { AMlistItems list_items = AMpush( &stack, AMlistRange(doc, AMmapItemObjId(doc_item), 0, SIZE_MAX, NULL), AM_VALUE_LIST_ITEMS, cmocka_cb).list_items; - assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, - "b"); - assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, - "a"); - assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, - "c"); + AMbyteSpan str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "b", str.count); + str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "a", str.count); + str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "c", str.count); assert_int_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).timestamp, 3); assert_null(AMlistItemsNext(&list_items, 1)); } /* doc.splice(sublist, 1, 1, ["d", "e", "f"]); */ - static AMvalue const DATA[] = {{.str_tag = AM_VALUE_STR, .str = "d"}, - {.str_tag = AM_VALUE_STR, .str = "e"}, - {.str_tag = AM_VALUE_STR, .str = "f"}}; + static AMvalue const DATA[] = {{.str_tag = AM_VALUE_STR, .str = {.src = "d", .count = 1}}, + {.str_tag = AM_VALUE_STR, .str = {.src = "e", .count = 1}}, + {.str_tag = AM_VALUE_STR, .str = {.src = "f", .count = 1}}}; AMfree(AMsplice(doc, sublist, 1, 1, DATA, sizeof(DATA)/sizeof(AMvalue))); /* assert.deepEqual(doc.materialize(), { letters: ["b", "d", "e", "f", "c", new Date(3)] })*/ doc_items = AMpush(&stack, - AMmapRange(doc, AM_ROOT, NULL, NULL, NULL), + AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), AM_VALUE_MAP_ITEMS, cmocka_cb).map_items; doc_item = AMmapItemsNext(&doc_items, 1); - assert_string_equal(AMmapItemKey(doc_item), "letters"); + key = AMmapItemKey(doc_item); + assert_int_equal(key.count, strlen("letters")); + assert_memory_equal(key.src, "letters", key.count); { AMlistItems list_items = AMpush( &stack, AMlistRange(doc, AMmapItemObjId(doc_item), 0, SIZE_MAX, NULL), AM_VALUE_LIST_ITEMS, cmocka_cb).list_items; - assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, - "b"); - assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, - "d"); - assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, - "e"); - assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, - "f"); - assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, - "c"); + AMbyteSpan str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "b", str.count); + str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "d", str.count); + str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "e", str.count); + str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "f", str.count); + str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "c", str.count); assert_int_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).timestamp, 3); assert_null(AMlistItemsNext(&list_items, 1)); } /* doc.put(sublist, 0, "z"); */ - AMfree(AMlistPutStr(doc, sublist, 0, false, "z")); + AMfree(AMlistPutStr(doc, sublist, 0, false, AMstr("z"))); /* assert.deepEqual(doc.materialize(), { letters: ["z", "d", "e", "f", "c", new Date(3)] })*/ doc_items = AMpush(&stack, - AMmapRange(doc, AM_ROOT, NULL, NULL, NULL), + AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), AM_VALUE_MAP_ITEMS, cmocka_cb).map_items; doc_item = AMmapItemsNext(&doc_items, 1); - assert_string_equal(AMmapItemKey(doc_item), "letters"); + key = AMmapItemKey(doc_item); + assert_int_equal(key.count, strlen("letters")); + assert_memory_equal(key.src, "letters", key.count); { AMlistItems list_items = AMpush( &stack, AMlistRange(doc, AMmapItemObjId(doc_item), 0, SIZE_MAX, NULL), AM_VALUE_LIST_ITEMS, cmocka_cb).list_items; - assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, - "z"); - assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, - "d"); - assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, - "e"); - assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, - "f"); - assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, - "c"); + AMbyteSpan str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "z", str.count); + str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "d", str.count); + str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "e", str.count); + str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "f", str.count); + str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "c", str.count); assert_int_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).timestamp, 3); assert_null(AMlistItemsNext(&list_items, 1)); @@ -474,16 +516,21 @@ static void test_lists_have_insert_set_splice_and_push_ops(void** state) { AMlistRange(doc, sublist, 0, SIZE_MAX, NULL), AM_VALUE_LIST_ITEMS, cmocka_cb).list_items; - assert_string_equal(AMlistItemValue(AMlistItemsNext(&sublist_items, 1)).str, - "z"); - assert_string_equal(AMlistItemValue(AMlistItemsNext(&sublist_items, 1)).str, - "d"); - assert_string_equal(AMlistItemValue(AMlistItemsNext(&sublist_items, 1)).str, - "e"); - assert_string_equal(AMlistItemValue(AMlistItemsNext(&sublist_items, 1)).str, - "f"); - assert_string_equal(AMlistItemValue(AMlistItemsNext(&sublist_items, 1)).str, - "c"); + AMbyteSpan str = AMlistItemValue(AMlistItemsNext(&sublist_items, 1)).str; + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "z", str.count); + str = AMlistItemValue(AMlistItemsNext(&sublist_items, 1)).str; + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "d", str.count); + str = AMlistItemValue(AMlistItemsNext(&sublist_items, 1)).str; + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "e", str.count); + str = AMlistItemValue(AMlistItemsNext(&sublist_items, 1)).str; + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "f", str.count); + str = AMlistItemValue(AMlistItemsNext(&sublist_items, 1)).str; + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "c", str.count); assert_int_equal(AMlistItemValue(AMlistItemsNext(&sublist_items, 1)).timestamp, 3); assert_null(AMlistItemsNext(&sublist_items, 1)); @@ -491,23 +538,28 @@ static void test_lists_have_insert_set_splice_and_push_ops(void** state) { assert_int_equal(AMobjSize(doc, sublist, NULL), 6); /* assert.deepEqual(doc.materialize("/", heads), { letters: ["b", "a", "c"] })*/ doc_items = AMpush(&stack, - AMmapRange(doc, AM_ROOT, NULL, NULL, &heads), + AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), &heads), AM_VALUE_MAP_ITEMS, cmocka_cb).map_items; doc_item = AMmapItemsNext(&doc_items, 1); - assert_string_equal(AMmapItemKey(doc_item), "letters"); + key = AMmapItemKey(doc_item); + assert_int_equal(key.count, strlen("letters")); + assert_memory_equal(key.src, "letters", key.count); { AMlistItems list_items = AMpush( &stack, AMlistRange(doc, AMmapItemObjId(doc_item), 0, SIZE_MAX, &heads), AM_VALUE_LIST_ITEMS, cmocka_cb).list_items; - assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, - "b"); - assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, - "a"); - assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, - "c"); + AMbyteSpan str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "b", str.count); + str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "a", str.count); + str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "c", str.count); assert_null(AMlistItemsNext(&list_items, 1)); } } @@ -521,12 +573,12 @@ static void test_should_be_able_to_delete_non_existent_props(void** state) { AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; /* */ /* doc.put("_root", "foo", "bar") */ - AMfree(AMmapPutStr(doc, AM_ROOT, "foo", "bar")); + AMfree(AMmapPutStr(doc, AM_ROOT, AMstr("foo"), AMstr("bar"))); /* doc.put("_root", "bip", "bap") */ - AMfree(AMmapPutStr(doc, AM_ROOT, "bip", "bap")); + AMfree(AMmapPutStr(doc, AM_ROOT, AMstr("bip"), AMstr("bap"))); /* const hash1 = doc.commit() */ AMchangeHashes const hash1 = AMpush(&stack, - AMcommit(doc, NULL, NULL), + AMcommit(doc, AMstr(NULL), NULL), AM_VALUE_CHANGE_HASHES, cmocka_cb).change_hashes; /* */ @@ -535,16 +587,20 @@ static void test_should_be_able_to_delete_non_existent_props(void** state) { AMkeys(doc, AM_ROOT, NULL), AM_VALUE_STRS, cmocka_cb).strs; - assert_string_equal(AMstrsNext(&keys, 1), "bip"); - assert_string_equal(AMstrsNext(&keys, 1), "foo"); + AMbyteSpan str = AMstrsNext(&keys, 1); + assert_int_equal(str.count, 3); + assert_memory_equal(str.src, "bip", str.count); + str = AMstrsNext(&keys, 1); + assert_int_equal(str.count, 3); + assert_memory_equal(str.src, "foo", str.count); /* */ /* doc.delete("_root", "foo") */ - AMfree(AMmapDelete(doc, AM_ROOT, "foo")); + AMfree(AMmapDelete(doc, AM_ROOT, AMstr("foo"))); /* doc.delete("_root", "baz") */ - AMfree(AMmapDelete(doc, AM_ROOT, "baz")); + AMfree(AMmapDelete(doc, AM_ROOT, AMstr("baz"))); /* const hash2 = doc.commit() */ AMchangeHashes const hash2 = AMpush(&stack, - AMcommit(doc, NULL, NULL), + AMcommit(doc, AMstr(NULL), NULL), AM_VALUE_CHANGE_HASHES, cmocka_cb).change_hashes; /* */ @@ -553,20 +609,28 @@ static void test_should_be_able_to_delete_non_existent_props(void** state) { AMkeys(doc, AM_ROOT, NULL), AM_VALUE_STRS, cmocka_cb).strs; - assert_string_equal(AMstrsNext(&keys, 1), "bip"); + str = AMstrsNext(&keys, 1); + assert_int_equal(str.count, 3); + assert_memory_equal(str.src, "bip", str.count); /* assert.deepEqual(doc.keys("_root", [hash1]), ["bip", "foo"]) */ keys = AMpush(&stack, AMkeys(doc, AM_ROOT, &hash1), AM_VALUE_STRS, cmocka_cb).strs; - assert_string_equal(AMstrsNext(&keys, 1), "bip"); - assert_string_equal(AMstrsNext(&keys, 1), "foo"); + str = AMstrsNext(&keys, 1); + assert_int_equal(str.count, 3); + assert_memory_equal(str.src, "bip", str.count); + str = AMstrsNext(&keys, 1); + assert_int_equal(str.count, 3); + assert_memory_equal(str.src, "foo", str.count); /* assert.deepEqual(doc.keys("_root", [hash2]), ["bip"]) */ keys = AMpush(&stack, AMkeys(doc, AM_ROOT, &hash2), AM_VALUE_STRS, cmocka_cb).strs; - assert_string_equal(AMstrsNext(&keys, 1), "bip"); + str = AMstrsNext(&keys, 1); + assert_int_equal(str.count, 3); + assert_memory_equal(str.src, "bip", str.count); } /** @@ -579,17 +643,19 @@ static void test_should_be_able_to_del(void **state) { /* const root = "_root" */ /* */ /* doc.put(root, "xxx", "xxx"); */ - AMfree(AMmapPutStr(doc, AM_ROOT, "xxx", "xxx")); + AMfree(AMmapPutStr(doc, AM_ROOT, AMstr("xxx"), AMstr("xxx"))); /* assert.deepEqual(doc.getWithType(root, "xxx"), ["str", "xxx"]) */ - assert_string_equal(AMpush(&stack, - AMmapGet(doc, AM_ROOT, "xxx", NULL), - AM_VALUE_STR, - cmocka_cb).str, "xxx"); + AMbyteSpan const str = AMpush(&stack, + AMmapGet(doc, AM_ROOT, AMstr("xxx"), NULL), + AM_VALUE_STR, + cmocka_cb).str; + assert_int_equal(str.count, 3); + assert_memory_equal(str.src, "xxx", str.count); /* doc.delete(root, "xxx"); */ - AMfree(AMmapDelete(doc, AM_ROOT, "xxx")); + AMfree(AMmapDelete(doc, AM_ROOT, AMstr("xxx"))); /* assert.deepEqual(doc.getWithType(root, "xxx"), undefined) */ AMpush(&stack, - AMmapGet(doc, AM_ROOT, "xxx", NULL), + AMmapGet(doc, AM_ROOT, AMstr("xxx"), NULL), AM_VALUE_VOID, cmocka_cb); } @@ -604,24 +670,24 @@ static void test_should_be_able_to_use_counters(void** state) { /* const root = "_root" */ /* */ /* doc.put(root, "counter", 10, "counter"); */ - AMfree(AMmapPutCounter(doc, AM_ROOT, "counter", 10)); + AMfree(AMmapPutCounter(doc, AM_ROOT, AMstr("counter"), 10)); /* assert.deepEqual(doc.getWithType(root, "counter"), ["counter", 10]) */ assert_int_equal(AMpush(&stack, - AMmapGet(doc, AM_ROOT, "counter", NULL), + AMmapGet(doc, AM_ROOT, AMstr("counter"), NULL), AM_VALUE_COUNTER, cmocka_cb).counter, 10); /* doc.increment(root, "counter", 10); */ - AMfree(AMmapIncrement(doc, AM_ROOT, "counter", 10)); + AMfree(AMmapIncrement(doc, AM_ROOT, AMstr("counter"), 10)); /* assert.deepEqual(doc.getWithType(root, "counter"), ["counter", 20]) */ assert_int_equal(AMpush(&stack, - AMmapGet(doc, AM_ROOT, "counter", NULL), + AMmapGet(doc, AM_ROOT, AMstr("counter"), NULL), AM_VALUE_COUNTER, cmocka_cb).counter, 20); /* doc.increment(root, "counter", -5); */ - AMfree(AMmapIncrement(doc, AM_ROOT, "counter", -5)); + AMfree(AMmapIncrement(doc, AM_ROOT, AMstr("counter"), -5)); /* assert.deepEqual(doc.getWithType(root, "counter"), ["counter", 15]) */ assert_int_equal(AMpush(&stack, - AMmapGet(doc, AM_ROOT, "counter", NULL), + AMmapGet(doc, AM_ROOT, AMstr("counter"), NULL), AM_VALUE_COUNTER, cmocka_cb).counter, 15); } @@ -638,52 +704,64 @@ static void test_should_be_able_to_splice_text(void** state) { /* const text = doc.putObject(root, "text", ""); */ AMobjId const* const text = AMpush( &stack, - AMmapPutObject(doc, AM_ROOT, "text", AM_OBJ_TYPE_TEXT), + AMmapPutObject(doc, AM_ROOT, AMstr("text"), AM_OBJ_TYPE_TEXT), AM_VALUE_OBJ_ID, cmocka_cb).obj_id; /* doc.splice(text, 0, 0, "hello ") */ - AMfree(AMspliceText(doc, text, 0, 0, "hello ")); + AMfree(AMspliceText(doc, text, 0, 0, AMstr("hello "))); /* doc.splice(text, 6, 0, ["w", "o", "r", "l", "d"]) */ - static AMvalue const WORLD[] = {{.str_tag = AM_VALUE_STR, .str = "w"}, - {.str_tag = AM_VALUE_STR, .str = "o"}, - {.str_tag = AM_VALUE_STR, .str = "r"}, - {.str_tag = AM_VALUE_STR, .str = "l"}, - {.str_tag = AM_VALUE_STR, .str = "d"}}; + static AMvalue const WORLD[] = {{.str_tag = AM_VALUE_STR, .str = {.src = "w", .count = 1}}, + {.str_tag = AM_VALUE_STR, .str = {.src = "o", .count = 1}}, + {.str_tag = AM_VALUE_STR, .str = {.src = "r", .count = 1}}, + {.str_tag = AM_VALUE_STR, .str = {.src = "l", .count = 1}}, + {.str_tag = AM_VALUE_STR, .str = {.src = "d", .count = 1}}}; AMfree(AMsplice(doc, text, 6, 0, WORLD, sizeof(WORLD)/sizeof(AMvalue))); /* doc.splice(text, 11, 0, ["!", "?"]) */ - static AMvalue const INTERROBANG[] = {{.str_tag = AM_VALUE_STR, .str = "!"}, - {.str_tag = AM_VALUE_STR, .str = "?"}}; + static AMvalue const INTERROBANG[] = {{.str_tag = AM_VALUE_STR, .str = {.src = "!", .count = 1}}, + {.str_tag = AM_VALUE_STR, .str = {.src = "?", .count = 1}}}; AMfree(AMsplice(doc, text, 11, 0, INTERROBANG, sizeof(INTERROBANG)/sizeof(AMvalue))); /* assert.deepEqual(doc.getWithType(text, 0), ["str", "h"]) */ - assert_string_equal(AMpush(&stack, - AMlistGet(doc, text, 0, NULL), - AM_VALUE_STR, - cmocka_cb).str, "h"); + AMbyteSpan str = AMpush(&stack, + AMlistGet(doc, text, 0, NULL), + AM_VALUE_STR, + cmocka_cb).str; + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "h", str.count); /* assert.deepEqual(doc.getWithType(text, 1), ["str", "e"]) */ - assert_string_equal(AMpush(&stack, - AMlistGet(doc, text, 1, NULL), - AM_VALUE_STR, - cmocka_cb).str, "e"); + str = AMpush(&stack, + AMlistGet(doc, text, 1, NULL), + AM_VALUE_STR, + cmocka_cb).str; + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "e", str.count); /* assert.deepEqual(doc.getWithType(text, 9), ["str", "l"]) */ - assert_string_equal(AMpush(&stack, - AMlistGet(doc, text, 9, NULL), - AM_VALUE_STR, - cmocka_cb).str, "l"); + str = AMpush(&stack, + AMlistGet(doc, text, 9, NULL), + AM_VALUE_STR, + cmocka_cb).str; + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "l", str.count); /* assert.deepEqual(doc.getWithType(text, 10), ["str", "d"]) */ - assert_string_equal(AMpush(&stack, - AMlistGet(doc, text, 10, NULL), - AM_VALUE_STR, - cmocka_cb).str, "d"); + str = AMpush(&stack, + AMlistGet(doc, text, 10, NULL), + AM_VALUE_STR, + cmocka_cb).str; + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "d", str.count); /* assert.deepEqual(doc.getWithType(text, 11), ["str", "!"]) */ - assert_string_equal(AMpush(&stack, - AMlistGet(doc, text, 11, NULL), - AM_VALUE_STR, - cmocka_cb).str, "!"); + str = AMpush(&stack, + AMlistGet(doc, text, 11, NULL), + AM_VALUE_STR, + cmocka_cb).str; + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "!", str.count); /* assert.deepEqual(doc.getWithType(text, 12), ["str", "?"]) */ - assert_string_equal(AMpush(&stack, - AMlistGet(doc, text, 12, NULL), - AM_VALUE_STR, - cmocka_cb).str, "?"); + str = AMpush(&stack, + AMlistGet(doc, text, 12, NULL), + AM_VALUE_STR, + cmocka_cb).str; + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "?", str.count); } /** @@ -696,36 +774,40 @@ static void test_should_be_able_to_insert_objects_into_text(void** state) { /* const text = doc.putObject("/", "text", "Hello world"); */ AMobjId const* const text = AMpush( &stack, - AMmapPutObject(doc, AM_ROOT, "text", AM_OBJ_TYPE_TEXT), + AMmapPutObject(doc, AM_ROOT, AMstr("text"), AM_OBJ_TYPE_TEXT), AM_VALUE_OBJ_ID, cmocka_cb).obj_id; - AMfree(AMspliceText(doc, text, 0, 0, "Hello world")); + AMfree(AMspliceText(doc, text, 0, 0, AMstr("Hello world"))); /* const obj = doc.insertObject(text, 6, { hello: "world" }); */ AMobjId const* const obj = AMpush( &stack, AMlistPutObject(doc, text, 6, true, AM_OBJ_TYPE_MAP), AM_VALUE_OBJ_ID, cmocka_cb).obj_id; - AMfree(AMmapPutStr(doc, obj, "hello", "world")); + AMfree(AMmapPutStr(doc, obj, AMstr("hello"), AMstr("world"))); /* assert.deepEqual(doc.text(text), "Hello \ufffcworld"); */ - assert_string_equal(AMpush(&stack, - AMtext(doc, text, NULL), - AM_VALUE_STR, - cmocka_cb).str, u8"Hello \ufffcworld"); + AMbyteSpan str = AMpush(&stack, + AMtext(doc, text, NULL), + AM_VALUE_STR, + cmocka_cb).str; + assert_int_equal(str.count, strlen(u8"Hello \ufffcworld")); + assert_memory_equal(str.src, u8"Hello \ufffcworld", str.count); /* assert.deepEqual(doc.getWithType(text, 6), ["map", obj]); */ assert_true(AMobjIdEqual(AMpush(&stack, AMlistGet(doc, text, 6, NULL), AM_VALUE_OBJ_ID, cmocka_cb).obj_id, obj)); /* assert.deepEqual(doc.getWithType(obj, "hello"), ["str", "world"]); */ - assert_string_equal(AMpush(&stack, - AMmapGet(doc, obj, "hello", NULL), - AM_VALUE_STR, - cmocka_cb).str, "world"); + str = AMpush(&stack, + AMmapGet(doc, obj, AMstr("hello"), NULL), + AM_VALUE_STR, + cmocka_cb).str; + assert_int_equal(str.count, strlen("world")); + assert_memory_equal(str.src, "world", str.count); } /** - * \brief should be able save all or incrementally + * \brief should be able to save all or incrementally */ static void test_should_be_able_to_save_all_or_incrementally(void** state) { AMresultStack* stack = *state; @@ -733,7 +815,7 @@ static void test_should_be_able_to_save_all_or_incrementally(void** state) { AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; /* */ /* doc.put("_root", "foo", 1) */ - AMfree(AMmapPutInt(doc, AM_ROOT, "foo", 1)); + AMfree(AMmapPutInt(doc, AM_ROOT, AMstr("foo"), 1)); /* */ /* const save1 = doc.save() */ AMbyteSpan const save1 = AMpush(&stack, @@ -742,7 +824,7 @@ static void test_should_be_able_to_save_all_or_incrementally(void** state) { cmocka_cb).bytes; /* */ /* doc.put("_root", "bar", 2) */ - AMfree(AMmapPutInt(doc, AM_ROOT, "bar", 2)); + AMfree(AMmapPutInt(doc, AM_ROOT, AMstr("bar"), 2)); /* */ /* const saveMidway = doc.clone().save(); */ AMbyteSpan const saveMidway = AMpush(&stack, @@ -761,7 +843,7 @@ static void test_should_be_able_to_save_all_or_incrementally(void** state) { cmocka_cb).bytes; /* */ /* doc.put("_root", "baz", 3); */ - AMfree(AMmapPutInt(doc, AM_ROOT, "baz", 3)); + AMfree(AMmapPutInt(doc, AM_ROOT, AMstr("baz"), 3)); /* */ /* const save3 = doc.saveIncremental(); */ AMbyteSpan const save3 = AMpush(&stack, @@ -843,42 +925,48 @@ static void test_should_be_able_to_splice_text_2(void** state) { /* const text = doc.putObject("_root", "text", ""); */ AMobjId const* const text = AMpush( &stack, - AMmapPutObject(doc, AM_ROOT, "text", AM_OBJ_TYPE_TEXT), + AMmapPutObject(doc, AM_ROOT, AMstr("text"), AM_OBJ_TYPE_TEXT), AM_VALUE_OBJ_ID, cmocka_cb).obj_id; /* doc.splice(text, 0, 0, "hello world"); */ - AMfree(AMspliceText(doc, text, 0, 0, "hello world")); + AMfree(AMspliceText(doc, text, 0, 0, AMstr("hello world"))); /* const hash1 = doc.commit(); */ AMchangeHashes const hash1 = AMpush(&stack, - AMcommit(doc, NULL, NULL), + AMcommit(doc, AMstr(NULL), NULL), AM_VALUE_CHANGE_HASHES, cmocka_cb).change_hashes; /* doc.splice(text, 6, 0, "big bad "); */ - AMfree(AMspliceText(doc, text, 6, 0, "big bad ")); + AMfree(AMspliceText(doc, text, 6, 0, AMstr("big bad "))); /* const hash2 = doc.commit(); */ AMchangeHashes const hash2 = AMpush(&stack, - AMcommit(doc, NULL, NULL), + AMcommit(doc, AMstr(NULL), NULL), AM_VALUE_CHANGE_HASHES, cmocka_cb).change_hashes; /* assert.strictEqual(doc.text(text), "hello big bad world") */ - assert_string_equal(AMpush(&stack, - AMtext(doc, text, NULL), - AM_VALUE_STR, - cmocka_cb).str, "hello big bad world"); + AMbyteSpan str = AMpush(&stack, + AMtext(doc, text, NULL), + AM_VALUE_STR, + cmocka_cb).str; + assert_int_equal(str.count, strlen("hello big bad world")); + assert_memory_equal(str.src, "hello big bad world", str.count); /* assert.strictEqual(doc.length(text), 19) */ assert_int_equal(AMobjSize(doc, text, NULL), 19); /* assert.strictEqual(doc.text(text, [hash1]), "hello world") */ - assert_string_equal(AMpush(&stack, - AMtext(doc, text, &hash1), - AM_VALUE_STR, - cmocka_cb).str, "hello world"); + str = AMpush(&stack, + AMtext(doc, text, &hash1), + AM_VALUE_STR, + cmocka_cb).str; + assert_int_equal(str.count, strlen("hello world")); + assert_memory_equal(str.src, "hello world", str.count); /* assert.strictEqual(doc.length(text, [hash1]), 11) */ assert_int_equal(AMobjSize(doc, text, &hash1), 11); /* assert.strictEqual(doc.text(text, [hash2]), "hello big bad world") */ - assert_string_equal(AMpush(&stack, - AMtext(doc, text, &hash2), - AM_VALUE_STR, - cmocka_cb).str, "hello big bad world"); + str = AMpush(&stack, + AMtext(doc, text, &hash2), + AM_VALUE_STR, + cmocka_cb).str; + assert_int_equal(str.count, strlen("hello big bad world")); + assert_memory_equal(str.src, "hello big bad world", str.count); /* assert.strictEqual(doc.length(text, [hash2]), 19) */ assert_int_equal(AMobjSize(doc, text, &hash2), 19); } @@ -891,13 +979,13 @@ static void test_local_inc_increments_all_visible_counters_in_a_map(void** state /* const doc1 = create("aaaa") */ AMdoc* const doc1 = AMpush(&stack, AMcreate(AMpush(&stack, - AMactorIdInitStr("aaaa"), + AMactorIdInitStr(AMstr("aaaa")), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id), AM_VALUE_DOC, cmocka_cb).doc; /* doc1.put("_root", "hello", "world") */ - AMfree(AMmapPutStr(doc1, AM_ROOT, "hello", "world")); + AMfree(AMmapPutStr(doc1, AM_ROOT, AMstr("hello"), AMstr("world"))); /* const doc2 = load(doc1.save(), "bbbb"); */ AMbyteSpan const save = AMpush(&stack, AMsave(doc1), @@ -908,7 +996,7 @@ static void test_local_inc_increments_all_visible_counters_in_a_map(void** state AM_VALUE_DOC, cmocka_cb).doc; AMfree(AMsetActorId(doc2, AMpush(&stack, - AMactorIdInitStr("bbbb"), + AMactorIdInitStr(AMstr("bbbb")), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id)); /* const doc3 = load(doc1.save(), "cccc"); */ @@ -917,7 +1005,7 @@ static void test_local_inc_increments_all_visible_counters_in_a_map(void** state AM_VALUE_DOC, cmocka_cb).doc; AMfree(AMsetActorId(doc3, AMpush(&stack, - AMactorIdInitStr("cccc"), + AMactorIdInitStr(AMstr("cccc")), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id)); /* let heads = doc1.getHeads() */ @@ -926,11 +1014,11 @@ static void test_local_inc_increments_all_visible_counters_in_a_map(void** state AM_VALUE_CHANGE_HASHES, cmocka_cb).change_hashes; /* doc1.put("_root", "cnt", 20) */ - AMfree(AMmapPutInt(doc1, AM_ROOT, "cnt", 20)); + AMfree(AMmapPutInt(doc1, AM_ROOT, AMstr("cnt"), 20)); /* doc2.put("_root", "cnt", 0, "counter") */ - AMfree(AMmapPutCounter(doc2, AM_ROOT, "cnt", 0)); + AMfree(AMmapPutCounter(doc2, AM_ROOT, AMstr("cnt"), 0)); /* doc3.put("_root", "cnt", 10, "counter") */ - AMfree(AMmapPutCounter(doc3, AM_ROOT, "cnt", 10)); + AMfree(AMmapPutCounter(doc3, AM_ROOT, AMstr("cnt"), 10)); /* doc1.applyChanges(doc2.getChanges(heads)) */ AMchanges const changes2 = AMpush(&stack, AMgetChanges(doc2, &heads1), @@ -945,7 +1033,7 @@ static void test_local_inc_increments_all_visible_counters_in_a_map(void** state AMfree(AMapplyChanges(doc1, &changes3)); /* let result = doc1.getAll("_root", "cnt") */ AMobjItems result = AMpush(&stack, - AMmapGetAll(doc1, AM_ROOT, "cnt", NULL), + AMmapGetAll(doc1, AM_ROOT, AMstr("cnt"), NULL), AM_VALUE_OBJ_ITEMS, cmocka_cb).obj_items; /* assert.deepEqual(result, [ @@ -956,23 +1044,26 @@ static void test_local_inc_increments_all_visible_counters_in_a_map(void** state AMobjItem const* result_item = AMobjItemsNext(&result, 1); assert_int_equal(AMobjItemValue(result_item).int_, 20); assert_int_equal(AMobjIdCounter(AMobjItemObjId(result_item)), 2); - assert_string_equal(AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))), - "aaaa"); + AMbyteSpan str = AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))); + assert_int_equal(str.count, 4); + assert_memory_equal(str.src, "aaaa", str.count); result_item = AMobjItemsNext(&result, 1); assert_int_equal(AMobjItemValue(result_item).counter, 0); assert_int_equal(AMobjIdCounter(AMobjItemObjId(result_item)), 2); - assert_string_equal(AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))), - "bbbb"); + str = AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))); + assert_int_equal(str.count, 4); + assert_memory_equal(str.src, "bbbb", str.count); result_item = AMobjItemsNext(&result, 1); assert_int_equal(AMobjItemValue(result_item).counter, 10); assert_int_equal(AMobjIdCounter(AMobjItemObjId(result_item)), 2); - assert_string_equal(AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))), - "cccc"); + str = AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))); + assert_int_equal(str.count, 4); + assert_memory_equal(str.src, "cccc", str.count); /* doc1.increment("_root", "cnt", 5) */ - AMfree(AMmapIncrement(doc1, AM_ROOT, "cnt", 5)); + AMfree(AMmapIncrement(doc1, AM_ROOT, AMstr("cnt"), 5)); /* result = doc1.getAll("_root", "cnt") */ result = AMpush(&stack, - AMmapGetAll(doc1, AM_ROOT, "cnt", NULL), + AMmapGetAll(doc1, AM_ROOT, AMstr("cnt"), NULL), AM_VALUE_OBJ_ITEMS, cmocka_cb).obj_items; /* assert.deepEqual(result, [ @@ -982,13 +1073,15 @@ static void test_local_inc_increments_all_visible_counters_in_a_map(void** state result_item = AMobjItemsNext(&result, 1); assert_int_equal(AMobjItemValue(result_item).counter, 5); assert_int_equal(AMobjIdCounter(AMobjItemObjId(result_item)), 2); - assert_string_equal(AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))), - "bbbb"); + str = AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))); + assert_int_equal(str.count, 4); + assert_memory_equal(str.src, "bbbb", str.count); result_item = AMobjItemsNext(&result, 1); assert_int_equal(AMobjItemValue(result_item).counter, 15); assert_int_equal(AMobjIdCounter(AMobjItemObjId(result_item)), 2); - assert_string_equal(AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))), - "cccc"); + str = AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))); + assert_int_equal(str.count, 4); + assert_memory_equal(str.src, "cccc", str.count); /* */ /* const save1 = doc1.save() */ AMbyteSpan const save1 = AMpush(&stack, @@ -1017,7 +1110,7 @@ static void test_local_inc_increments_all_visible_counters_in_a_sequence(void** /* const doc1 = create("aaaa") */ AMdoc* const doc1 = AMpush(&stack, AMcreate(AMpush(&stack, - AMactorIdInitStr("aaaa"), + AMactorIdInitStr(AMstr("aaaa")), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id), AM_VALUE_DOC, @@ -1025,11 +1118,11 @@ static void test_local_inc_increments_all_visible_counters_in_a_sequence(void** /* const seq = doc1.putObject("_root", "seq", []) */ AMobjId const* const seq = AMpush( &stack, - AMmapPutObject(doc1, AM_ROOT, "seq", AM_OBJ_TYPE_LIST), + AMmapPutObject(doc1, AM_ROOT, AMstr("seq"), AM_OBJ_TYPE_LIST), AM_VALUE_OBJ_ID, cmocka_cb).obj_id; /* doc1.insert(seq, 0, "hello") */ - AMfree(AMlistPutStr(doc1, seq, 0, true, "hello")); + AMfree(AMlistPutStr(doc1, seq, 0, true, AMstr("hello"))); /* const doc2 = load(doc1.save(), "bbbb"); */ AMbyteSpan const save1 = AMpush(&stack, AMsave(doc1), @@ -1040,7 +1133,7 @@ static void test_local_inc_increments_all_visible_counters_in_a_sequence(void** AM_VALUE_DOC, cmocka_cb).doc; AMfree(AMsetActorId(doc2, AMpush(&stack, - AMactorIdInitStr("bbbb"), + AMactorIdInitStr(AMstr("bbbb")), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id)); /* const doc3 = load(doc1.save(), "cccc"); */ @@ -1049,7 +1142,7 @@ static void test_local_inc_increments_all_visible_counters_in_a_sequence(void** AM_VALUE_DOC, cmocka_cb).doc; AMfree(AMsetActorId(doc3, AMpush(&stack, - AMactorIdInitStr("cccc"), + AMactorIdInitStr(AMstr("cccc")), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id)); /* let heads = doc1.getHeads() */ @@ -1088,18 +1181,20 @@ static void test_local_inc_increments_all_visible_counters_in_a_sequence(void** AMobjItem const* result_item = AMobjItemsNext(&result, 1); assert_int_equal(AMobjItemValue(result_item).int_, 20); assert_int_equal(AMobjIdCounter(AMobjItemObjId(result_item)), 3); - assert_string_equal(AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))), - "aaaa"); + AMbyteSpan str = AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))); + assert_int_equal(str.count, 4); + assert_memory_equal(str.src, "aaaa", str.count); result_item = AMobjItemsNext(&result, 1); assert_int_equal(AMobjItemValue(result_item).counter, 0); assert_int_equal(AMobjIdCounter(AMobjItemObjId(result_item)), 3); - assert_string_equal(AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))), - "bbbb"); + str = AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))); + assert_memory_equal(str.src, "bbbb", str.count); result_item = AMobjItemsNext(&result, 1); assert_int_equal(AMobjItemValue(result_item).counter, 10); assert_int_equal(AMobjIdCounter(AMobjItemObjId(result_item)), 3); - assert_string_equal(AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))), - "cccc"); + str = AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))); + assert_int_equal(str.count, 4); + assert_memory_equal(str.src, "cccc", str.count); /* doc1.increment(seq, 0, 5) */ AMfree(AMlistIncrement(doc1, seq, 0, 5)); /* result = doc1.getAll(seq, 0) */ @@ -1114,13 +1209,14 @@ static void test_local_inc_increments_all_visible_counters_in_a_sequence(void** result_item = AMobjItemsNext(&result, 1); assert_int_equal(AMobjItemValue(result_item).counter, 5); assert_int_equal(AMobjIdCounter(AMobjItemObjId(result_item)), 3); - assert_string_equal(AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))), - "bbbb"); + str = AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))); + assert_int_equal(str.count, 4); + assert_memory_equal(str.src, "bbbb", str.count); result_item = AMobjItemsNext(&result, 1); assert_int_equal(AMobjItemValue(result_item).counter, 15); assert_int_equal(AMobjIdCounter(AMobjItemObjId(result_item)), 3); - assert_string_equal(AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))), - "cccc"); + str = AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))); + assert_memory_equal(str.src, "cccc", str.count); /* */ /* const save = doc1.save() */ AMbyteSpan const save = AMpush(&stack, @@ -1154,7 +1250,7 @@ static void test_should_be_able_to_fetch_changes_by_hash(void** state) { /* const doc1 = create("aaaa") */ AMdoc* const doc1 = AMpush(&stack, AMcreate(AMpush(&stack, - AMactorIdInitStr("aaaa"), + AMactorIdInitStr(AMstr("aaaa")), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id), AM_VALUE_DOC, @@ -1162,15 +1258,15 @@ static void test_should_be_able_to_fetch_changes_by_hash(void** state) { /* const doc2 = create("bbbb") */ AMdoc* const doc2 = AMpush(&stack, AMcreate(AMpush(&stack, - AMactorIdInitStr("bbbb"), + AMactorIdInitStr(AMstr("bbbb")), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id), AM_VALUE_DOC, cmocka_cb).doc; /* doc1.put("/", "a", "b") */ - AMfree(AMmapPutStr(doc1, AM_ROOT, "a", "b")); + AMfree(AMmapPutStr(doc1, AM_ROOT, AMstr("a"), AMstr("b"))); /* doc2.put("/", "b", "c") */ - AMfree(AMmapPutStr(doc2, AM_ROOT, "b", "c")); + AMfree(AMmapPutStr(doc2, AM_ROOT, AMstr("b"), AMstr("c"))); /* const head1 = doc1.getHeads() */ AMchangeHashes head1 = AMpush(&stack, AMgetHeads(doc1), @@ -1210,7 +1306,7 @@ static void test_recursive_sets_are_possible(void** state) { /* const doc = create("aaaa") */ AMdoc* const doc = AMpush(&stack, AMcreate(AMpush(&stack, - AMactorIdInitStr("aaaa"), + AMactorIdInitStr(AMstr("aaaa")), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id), AM_VALUE_DOC, @@ -1218,7 +1314,7 @@ static void test_recursive_sets_are_possible(void** state) { /* const l1 = doc.putObject("_root", "list", [{ foo: "bar" }, [1, 2, 3]])*/ AMobjId const* const l1 = AMpush( &stack, - AMmapPutObject(doc, AM_ROOT, "list", AM_OBJ_TYPE_LIST), + AMmapPutObject(doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), AM_VALUE_OBJ_ID, cmocka_cb).obj_id; { @@ -1227,7 +1323,7 @@ static void test_recursive_sets_are_possible(void** state) { AMlistPutObject(doc, l1, 0, true, AM_OBJ_TYPE_MAP), AM_VALUE_OBJ_ID, cmocka_cb).obj_id; - AMfree(AMmapPutStr(doc, map, "foo", "bar")); + AMfree(AMmapPutStr(doc, map, AMstr("foo"), AMstr("bar"))); AMobjId const* const list = AMpush( &stack, AMlistPutObject(doc, l1, SIZE_MAX, true, AM_OBJ_TYPE_LIST), @@ -1246,28 +1342,28 @@ static void test_recursive_sets_are_possible(void** state) { { AMobjId const* const list = AMpush( &stack, - AMmapPutObject(doc, l2, "zip", AM_OBJ_TYPE_LIST), + AMmapPutObject(doc, l2, AMstr("zip"), AM_OBJ_TYPE_LIST), AM_VALUE_OBJ_ID, cmocka_cb).obj_id; - AMfree(AMlistPutStr(doc, list, SIZE_MAX, true, "a")); - AMfree(AMlistPutStr(doc, list, SIZE_MAX, true, "b")); + AMfree(AMlistPutStr(doc, list, SIZE_MAX, true, AMstr("a"))); + AMfree(AMlistPutStr(doc, list, SIZE_MAX, true, AMstr("b"))); } /* const l3 = doc.putObject("_root", "info1", "hello world") // 'text' object*/ AMobjId const* const l3 = AMpush( &stack, - AMmapPutObject(doc, AM_ROOT, "info1", AM_OBJ_TYPE_TEXT), + AMmapPutObject(doc, AM_ROOT, AMstr("info1"), AM_OBJ_TYPE_TEXT), AM_VALUE_OBJ_ID, cmocka_cb).obj_id; - AMfree(AMspliceText(doc, l3, 0, 0, "hello world")); + AMfree(AMspliceText(doc, l3, 0, 0, AMstr("hello world"))); /* doc.put("_root", "info2", "hello world") // 'str' */ - AMfree(AMmapPutStr(doc, AM_ROOT, "info2", "hello world")); + AMfree(AMmapPutStr(doc, AM_ROOT, AMstr("info2"), AMstr("hello world"))); /* const l4 = doc.putObject("_root", "info3", "hello world") */ AMobjId const* const l4 = AMpush( &stack, - AMmapPutObject(doc, AM_ROOT, "info3", AM_OBJ_TYPE_TEXT), + AMmapPutObject(doc, AM_ROOT, AMstr("info3"), AM_OBJ_TYPE_TEXT), AM_VALUE_OBJ_ID, cmocka_cb).obj_id; - AMfree(AMspliceText(doc, l4, 0, 0, "hello world")); + AMfree(AMspliceText(doc, l4, 0, 0, AMstr("hello world"))); /* assert.deepEqual(doc.materialize(), { "list": [{ zip: ["a", "b"] }, { foo: "bar" }, [1, 2, 3]], "info1": "hello world", @@ -1275,26 +1371,40 @@ static void test_recursive_sets_are_possible(void** state) { "info3": "hello world", }) */ AMmapItems doc_items = AMpush(&stack, - AMmapRange(doc, AM_ROOT, NULL, NULL, NULL), + AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), AM_VALUE_MAP_ITEMS, cmocka_cb).map_items; AMmapItem const* doc_item = AMmapItemsNext(&doc_items, 1); - assert_string_equal(AMmapItemKey(doc_item), "info1"); - assert_string_equal(AMpush(&stack, - AMtext(doc, AMmapItemObjId(doc_item), NULL), - AM_VALUE_STR, - cmocka_cb).str, "hello world"); + AMbyteSpan key = AMmapItemKey(doc_item); + assert_int_equal(key.count, strlen("info1")); + assert_memory_equal(key.src, "info1", key.count); + AMbyteSpan str = AMpush(&stack, + AMtext(doc, AMmapItemObjId(doc_item), NULL), + AM_VALUE_STR, + cmocka_cb).str; + assert_int_equal(str.count, strlen("hello world")); + assert_memory_equal(str.src, "hello world", str.count); doc_item = AMmapItemsNext(&doc_items, 1); - assert_string_equal(AMmapItemKey(doc_item), "info2"); - assert_string_equal(AMmapItemValue(doc_item).str, "hello world"); + key = AMmapItemKey(doc_item); + assert_int_equal(key.count, strlen("info2")); + assert_memory_equal(key.src, "info2", key.count); + str = AMmapItemValue(doc_item).str; + assert_int_equal(str.count, strlen("hello world")); + assert_memory_equal(str.src, "hello world", str.count); doc_item = AMmapItemsNext(&doc_items, 1); - assert_string_equal(AMmapItemKey(doc_item), "info3"); - assert_string_equal(AMpush(&stack, - AMtext(doc, AMmapItemObjId(doc_item), NULL), - AM_VALUE_STR, - cmocka_cb).str, "hello world"); + key = AMmapItemKey(doc_item); + assert_int_equal(key.count, strlen("info3")); + assert_memory_equal(key.src, "info3", key.count); + str = AMpush(&stack, + AMtext(doc, AMmapItemObjId(doc_item), NULL), + AM_VALUE_STR, + cmocka_cb).str; + assert_int_equal(str.count, strlen("hello world")); + assert_memory_equal(str.src, "hello world", str.count); doc_item = AMmapItemsNext(&doc_items, 1); - assert_string_equal(AMmapItemKey(doc_item), "list"); + key = AMmapItemKey(doc_item); + assert_int_equal(key.count, strlen("list")); + assert_memory_equal(key.src, "list", key.count); { AMlistItems list_items = AMpush( &stack, @@ -1305,35 +1415,41 @@ static void test_recursive_sets_are_possible(void** state) { { AMmapItems map_items = AMpush( &stack, - AMmapRange(doc, AMlistItemObjId(list_item), NULL, NULL, NULL), + AMmapRange(doc, AMlistItemObjId(list_item), AMstr(NULL), AMstr(NULL), NULL), AM_VALUE_MAP_ITEMS, cmocka_cb).map_items; AMmapItem const* map_item = AMmapItemsNext(&map_items, 1); - assert_string_equal(AMmapItemKey(map_item), "zip"); + AMbyteSpan const key = AMmapItemKey(map_item); + assert_int_equal(key.count, strlen("zip")); + assert_memory_equal(key.src, "zip", key.count); { AMlistItems list_items = AMpush( &stack, AMlistRange(doc, AMmapItemObjId(map_item), 0, SIZE_MAX, NULL), AM_VALUE_LIST_ITEMS, cmocka_cb).list_items; - assert_string_equal(AMlistItemValue( - AMlistItemsNext(&list_items, 1)).str, - "a"); - assert_string_equal(AMlistItemValue( - AMlistItemsNext(&list_items, 1)).str, - "b"); + AMbyteSpan str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "a", str.count); + str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "b", str.count); } } list_item = AMlistItemsNext(&list_items, 1); { AMmapItems map_items = AMpush( &stack, - AMmapRange(doc, AMlistItemObjId(list_item), NULL, NULL, NULL), + AMmapRange(doc, AMlistItemObjId(list_item), AMstr(NULL), AMstr(NULL), NULL), AM_VALUE_MAP_ITEMS, cmocka_cb).map_items; AMmapItem const* map_item = AMmapItemsNext(&map_items, 1); - assert_string_equal(AMmapItemKey(map_item), "foo"); - assert_string_equal(AMmapItemValue(map_item).str, "bar"); + AMbyteSpan const key = AMmapItemKey(map_item); + assert_int_equal(key.count, strlen("foo")); + assert_memory_equal(key.src, "foo", key.count); + AMbyteSpan const str = AMmapItemValue(map_item).str; + assert_int_equal(str.count, 3); + assert_memory_equal(str.src, "bar", str.count); } list_item = AMlistItemsNext(&list_items, 1); { @@ -1356,23 +1472,25 @@ static void test_recursive_sets_are_possible(void** state) { /* assert.deepEqual(doc.materialize(l2), { zip: ["a", "b"] }) */ AMmapItems map_items = AMpush( &stack, - AMmapRange(doc, l2, NULL, NULL, NULL), + AMmapRange(doc, l2, AMstr(NULL), AMstr(NULL), NULL), AM_VALUE_MAP_ITEMS, cmocka_cb).map_items; AMmapItem const* map_item = AMmapItemsNext(&map_items, 1); - assert_string_equal(AMmapItemKey(map_item), "zip"); + key = AMmapItemKey(map_item); + assert_int_equal(key.count, strlen("zip")); + assert_memory_equal(key.src, "zip", key.count); { AMlistItems list_items = AMpush( &stack, AMlistRange(doc, AMmapItemObjId(map_item), 0, SIZE_MAX, NULL), AM_VALUE_LIST_ITEMS, cmocka_cb).list_items; - assert_string_equal(AMlistItemValue( - AMlistItemsNext(&list_items, 1)).str, - "a"); - assert_string_equal(AMlistItemValue( - AMlistItemsNext(&list_items, 1)).str, - "b"); + AMbyteSpan str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "a", str.count); + str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "b", str.count); } /* assert.deepEqual(doc.materialize(l1), [{ zip: ["a", "b"] }, { foo: "bar" }, [1, 2, 3]])*/ AMlistItems list_items = AMpush( @@ -1384,33 +1502,41 @@ static void test_recursive_sets_are_possible(void** state) { { AMmapItems map_items = AMpush( &stack, - AMmapRange(doc, AMlistItemObjId(list_item), NULL, NULL, NULL), + AMmapRange(doc, AMlistItemObjId(list_item), AMstr(NULL), AMstr(NULL), NULL), AM_VALUE_MAP_ITEMS, cmocka_cb).map_items; AMmapItem const* map_item = AMmapItemsNext(&map_items, 1); - assert_string_equal(AMmapItemKey(map_item), "zip"); + AMbyteSpan const key = AMmapItemKey(map_item); + assert_int_equal(key.count, strlen("zip")); + assert_memory_equal(key.src, "zip", key.count); { AMlistItems list_items = AMpush( &stack, AMlistRange(doc, AMmapItemObjId(map_item), 0, SIZE_MAX, NULL), AM_VALUE_LIST_ITEMS, cmocka_cb).list_items; - assert_string_equal( - AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, "a"); - assert_string_equal(AMlistItemValue( - AMlistItemsNext(&list_items, 1)).str, "b"); + AMbyteSpan str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "a", str.count); + str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "b", str.count); } } list_item = AMlistItemsNext(&list_items, 1); { AMmapItems map_items = AMpush( &stack, - AMmapRange(doc, AMlistItemObjId(list_item), NULL, NULL, NULL), + AMmapRange(doc, AMlistItemObjId(list_item), AMstr(NULL), AMstr(NULL), NULL), AM_VALUE_MAP_ITEMS, cmocka_cb).map_items; AMmapItem const* map_item = AMmapItemsNext(&map_items, 1); - assert_string_equal(AMmapItemKey(map_item), "foo"); - assert_string_equal(AMmapItemValue(map_item).str, "bar"); + AMbyteSpan const key = AMmapItemKey(map_item); + assert_int_equal(key.count, strlen("foo")); + assert_memory_equal(key.src, "foo", key.count); + AMbyteSpan const str = AMmapItemValue(map_item).str; + assert_int_equal(str.count, 3); + assert_memory_equal(str.src, "bar", str.count); } list_item = AMlistItemsNext(&list_items, 1); { @@ -1427,10 +1553,9 @@ static void test_recursive_sets_are_possible(void** state) { 3); } /* assert.deepEqual(doc.materialize(l4), "hello world") */ - assert_string_equal(AMpush(&stack, - AMtext(doc, l4, NULL), - AM_VALUE_STR, - cmocka_cb).str, "hello world"); + str = AMpush(&stack, AMtext(doc, l4, NULL), AM_VALUE_STR, cmocka_cb).str; + assert_int_equal(str.count, strlen("hello world")); + assert_memory_equal(str.src, "hello world", str.count); } /** @@ -1441,7 +1566,7 @@ static void test_only_returns_an_object_id_when_objects_are_created(void** state /* const doc = create("aaaa") */ AMdoc* const doc = AMpush(&stack, AMcreate(AMpush(&stack, - AMactorIdInitStr("aaaa"), + AMactorIdInitStr(AMstr("aaaa")), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id), AM_VALUE_DOC, @@ -1449,31 +1574,31 @@ static void test_only_returns_an_object_id_when_objects_are_created(void** state /* const r1 = doc.put("_root", "foo", "bar") assert.deepEqual(r1, null); */ AMpush(&stack, - AMmapPutStr(doc, AM_ROOT, "foo", "bar"), + AMmapPutStr(doc, AM_ROOT, AMstr("foo"), AMstr("bar")), AM_VALUE_VOID, cmocka_cb); /* const r2 = doc.putObject("_root", "list", []) */ AMobjId const* const r2 = AMpush( &stack, - AMmapPutObject(doc, AM_ROOT, "list", AM_OBJ_TYPE_LIST), + AMmapPutObject(doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), AM_VALUE_OBJ_ID, cmocka_cb).obj_id; /* const r3 = doc.put("_root", "counter", 10, "counter") assert.deepEqual(r3, null); */ AMpush(&stack, - AMmapPutCounter(doc, AM_ROOT, "counter", 10), + AMmapPutCounter(doc, AM_ROOT, AMstr("counter"), 10), AM_VALUE_VOID, cmocka_cb); /* const r4 = doc.increment("_root", "counter", 1) assert.deepEqual(r4, null); */ AMpush(&stack, - AMmapIncrement(doc, AM_ROOT, "counter", 1), + AMmapIncrement(doc, AM_ROOT, AMstr("counter"), 1), AM_VALUE_VOID, cmocka_cb); /* const r5 = doc.delete("_root", "counter") assert.deepEqual(r5, null); */ AMpush(&stack, - AMmapDelete(doc, AM_ROOT, "counter"), + AMmapDelete(doc, AM_ROOT, AMstr("counter")), AM_VALUE_VOID, cmocka_cb); /* const r6 = doc.insert(r2, 0, 10); @@ -1489,19 +1614,22 @@ static void test_only_returns_an_object_id_when_objects_are_created(void** state AM_VALUE_OBJ_ID, cmocka_cb).obj_id; /* const r8 = doc.splice(r2, 1, 0, ["a", "b", "c"]); */ - AMvalue const STRS[] = {{.str_tag = AM_VALUE_STR, .str = "a", - .str_tag = AM_VALUE_STR, .str = "b", - .str_tag = AM_VALUE_STR, .str = "c"}}; + AMvalue const STRS[] = {{.str_tag = AM_VALUE_STR, .str = {.src = "a", .count = 1}}, + {.str_tag = AM_VALUE_STR, .str = {.src = "b", .count = 1}}, + {.str_tag = AM_VALUE_STR, .str = {.src = "c", .count = 1}}}; AMpush(&stack, AMsplice(doc, r2, 1, 0, STRS, sizeof(STRS)/sizeof(AMvalue)), AM_VALUE_VOID, cmocka_cb); /* assert.deepEqual(r2, "2@aaaa"); */ assert_int_equal(AMobjIdCounter(r2), 2); - assert_string_equal(AMactorIdStr(AMobjIdActorId(r2)), "aaaa"); + AMbyteSpan str = AMactorIdStr(AMobjIdActorId(r2)); + assert_int_equal(str.count, 4); + assert_memory_equal(str.src, "aaaa", str.count); /* assert.deepEqual(r7, "7@aaaa"); */ assert_int_equal(AMobjIdCounter(r7), 7); - assert_string_equal(AMactorIdStr(AMobjIdActorId(r7)), "aaaa"); + str = AMactorIdStr(AMobjIdActorId(r7)); + assert_memory_equal(str.src, "aaaa", str.count); } /** @@ -1512,7 +1640,7 @@ static void test_objects_without_properties_are_preserved(void** state) { /* const doc1 = create("aaaa") */ AMdoc* const doc1 = AMpush(&stack, AMcreate(AMpush(&stack, - AMactorIdInitStr("aaaa"), + AMactorIdInitStr(AMstr("aaaa")), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id), AM_VALUE_DOC, @@ -1520,23 +1648,23 @@ static void test_objects_without_properties_are_preserved(void** state) { /* const a = doc1.putObject("_root", "a", {}); */ AMobjId const* const a = AMpush( &stack, - AMmapPutObject(doc1, AM_ROOT, "a", AM_OBJ_TYPE_MAP), + AMmapPutObject(doc1, AM_ROOT, AMstr("a"), AM_OBJ_TYPE_MAP), AM_VALUE_OBJ_ID, cmocka_cb).obj_id; /* const b = doc1.putObject("_root", "b", {}); */ AMobjId const* const b = AMpush( &stack, - AMmapPutObject(doc1, AM_ROOT, "b", AM_OBJ_TYPE_MAP), + AMmapPutObject(doc1, AM_ROOT, AMstr("b"), AM_OBJ_TYPE_MAP), AM_VALUE_OBJ_ID, cmocka_cb).obj_id; /* const c = doc1.putObject("_root", "c", {}); */ AMobjId const* const c = AMpush( &stack, - AMmapPutObject(doc1, AM_ROOT, "c", AM_OBJ_TYPE_MAP), + AMmapPutObject(doc1, AM_ROOT, AMstr("c"), AM_OBJ_TYPE_MAP), AM_VALUE_OBJ_ID, cmocka_cb).obj_id; /* const d = doc1.put(c, "d", "dd"); */ - AMfree(AMmapPutStr(doc1, c, "d", "dd")); + AMfree(AMmapPutStr(doc1, c, AMstr("d"), AMstr("dd"))); /* const saved = doc1.save(); */ AMbyteSpan const saved = AMpush(&stack, AMsave(doc1), @@ -1549,7 +1677,7 @@ static void test_objects_without_properties_are_preserved(void** state) { cmocka_cb).doc; /* assert.deepEqual(doc2.getWithType("_root", "a"), ["map", a]) */ AMmapItems doc_items = AMpush(&stack, - AMmapRange(doc2, AM_ROOT, NULL, NULL, NULL), + AMmapRange(doc2, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), AM_VALUE_MAP_ITEMS, cmocka_cb).map_items; assert_true(AMobjIdEqual(AMmapItemObjId(AMmapItemsNext(&doc_items, 1)), a)); @@ -1568,13 +1696,17 @@ static void test_objects_without_properties_are_preserved(void** state) { assert_true(AMobjIdEqual(AMmapItemObjId(AMmapItemsNext(&doc_items, 1)), c)); /* assert.deepEqual(doc2.keys(c), ["d"]) */ keys = AMpush(&stack, AMkeys(doc1, c, NULL), AM_VALUE_STRS, cmocka_cb).strs; - assert_string_equal(AMstrsNext(&keys, 1), "d"); + AMbyteSpan str = AMstrsNext(&keys, 1); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "d", str.count); /* assert.deepEqual(doc2.getWithType(c, "d"), ["str", "dd"]) */ AMobjItems obj_items = AMpush(&stack, AMobjValues(doc1, c, NULL), AM_VALUE_OBJ_ITEMS, cmocka_cb).obj_items; - assert_string_equal(AMobjItemValue(AMobjItemsNext(&obj_items, 1)).str, "dd"); + str = AMobjItemValue(AMobjItemsNext(&obj_items, 1)).str; + assert_int_equal(str.count, 2); + assert_memory_equal(str.src, "dd", str.count); } /** @@ -1585,15 +1717,15 @@ static void test_should_allow_you_to_forkAt_a_heads(void** state) { /* const A = create("aaaaaa") */ AMdoc* const A = AMpush(&stack, AMcreate(AMpush(&stack, - AMactorIdInitStr("aaaaaa"), + AMactorIdInitStr(AMstr("aaaaaa")), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id), AM_VALUE_DOC, cmocka_cb).doc; /* A.put("/", "key1", "val1"); */ - AMfree(AMmapPutStr(A, AM_ROOT, "key1", "val1")); + AMfree(AMmapPutStr(A, AM_ROOT, AMstr("key1"), AMstr("val1"))); /* A.put("/", "key2", "val2"); */ - AMfree(AMmapPutStr(A, AM_ROOT, "key2", "val2")); + AMfree(AMmapPutStr(A, AM_ROOT, AMstr("key2"), AMstr("val2"))); /* const heads1 = A.getHeads(); */ AMchangeHashes const heads1 = AMpush(&stack, AMgetHeads(A), @@ -1602,13 +1734,13 @@ static void test_should_allow_you_to_forkAt_a_heads(void** state) { /* const B = A.fork("bbbbbb") */ AMdoc* const B = AMpush(&stack, AMfork(A, NULL), AM_VALUE_DOC, cmocka_cb).doc; AMfree(AMsetActorId(B, AMpush(&stack, - AMactorIdInitStr("bbbbbb"), + AMactorIdInitStr(AMstr("bbbbbb")), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id)); /* A.put("/", "key3", "val3"); */ - AMfree(AMmapPutStr(A, AM_ROOT, "key3", "val3")); + AMfree(AMmapPutStr(A, AM_ROOT, AMstr("key3"), AMstr("val3"))); /* B.put("/", "key4", "val4"); */ - AMfree(AMmapPutStr(B, AM_ROOT, "key4", "val4")); + AMfree(AMmapPutStr(B, AM_ROOT, AMstr("key4"), AMstr("val4"))); /* A.merge(B) */ AMfree(AMmerge(A, B)); /* const heads2 = A.getHeads(); */ @@ -1617,17 +1749,17 @@ static void test_should_allow_you_to_forkAt_a_heads(void** state) { AM_VALUE_CHANGE_HASHES, cmocka_cb).change_hashes; /* A.put("/", "key5", "val5"); */ - AMfree(AMmapPutStr(A, AM_ROOT, "key5", "val5")); + AMfree(AMmapPutStr(A, AM_ROOT, AMstr("key5"), AMstr("val5"))); /* assert.deepEqual(A.forkAt(heads1).materialize("/"), A.materialize("/", heads1))*/ AMmapItems AforkAt1_items = AMpush( &stack, AMmapRange( AMpush(&stack, AMfork(A, &heads1), AM_VALUE_DOC, cmocka_cb).doc, - AM_ROOT, NULL, NULL, NULL), + AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), AM_VALUE_MAP_ITEMS, cmocka_cb).map_items; AMmapItems A1_items = AMpush(&stack, - AMmapRange(A, AM_ROOT, NULL, NULL, &heads1), + AMmapRange(A, AM_ROOT, AMstr(NULL), AMstr(NULL), &heads1), AM_VALUE_MAP_ITEMS, cmocka_cb).map_items; assert_true(AMmapItemsEqual(&AforkAt1_items, &A1_items)); @@ -1636,11 +1768,11 @@ static void test_should_allow_you_to_forkAt_a_heads(void** state) { &stack, AMmapRange( AMpush(&stack, AMfork(A, &heads2), AM_VALUE_DOC, cmocka_cb).doc, - AM_ROOT, NULL, NULL, NULL), + AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), AM_VALUE_MAP_ITEMS, cmocka_cb).map_items; AMmapItems A2_items = AMpush(&stack, - AMmapRange(A, AM_ROOT, NULL, NULL, &heads2), + AMmapRange(A, AM_ROOT, AMstr(NULL), AMstr(NULL), &heads2), AM_VALUE_MAP_ITEMS, cmocka_cb).map_items; assert_true(AMmapItemsEqual(&AforkAt2_items, &A2_items)); @@ -1654,7 +1786,7 @@ static void test_should_handle_merging_text_conflicts_then_saving_and_loading(vo /* const A = create("aabbcc") */ AMdoc* const A = AMpush(&stack, AMcreate(AMpush(&stack, - AMactorIdInitStr("aabbcc"), + AMactorIdInitStr(AMstr("aabbcc")), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id), AM_VALUE_DOC, @@ -1662,38 +1794,40 @@ static void test_should_handle_merging_text_conflicts_then_saving_and_loading(vo /* const At = A.putObject('_root', 'text', "") */ AMobjId const* const At = AMpush( &stack, - AMmapPutObject(A, AM_ROOT, "text", AM_OBJ_TYPE_TEXT), + AMmapPutObject(A, AM_ROOT, AMstr("text"), AM_OBJ_TYPE_TEXT), AM_VALUE_OBJ_ID, cmocka_cb).obj_id; /* A.splice(At, 0, 0, 'hello') */ - AMfree(AMspliceText(A, At, 0, 0, "hello")); + AMfree(AMspliceText(A, At, 0, 0, AMstr("hello"))); /* */ /* const B = A.fork() */ AMdoc* const B = AMpush(&stack, AMfork(A, NULL), AM_VALUE_DOC, cmocka_cb).doc; /* */ /* assert.deepEqual(B.getWithType("_root", "text"), ["text", At]) */ - assert_string_equal(AMpush(&stack, - AMtext(B, - AMpush(&stack, - AMmapGet(B, AM_ROOT, "text", NULL), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id, - NULL), - AM_VALUE_STR, - cmocka_cb).str, - AMpush(&stack, - AMtext(A, At, NULL), - AM_VALUE_STR, - cmocka_cb).str); + AMbyteSpan str = AMpush(&stack, + AMtext(B, + AMpush(&stack, + AMmapGet(B, AM_ROOT, AMstr("text"), NULL), + AM_VALUE_OBJ_ID, + cmocka_cb).obj_id, + NULL), + AM_VALUE_STR, + cmocka_cb).str; + AMbyteSpan const str2 = AMpush(&stack, + AMtext(A, At, NULL), + AM_VALUE_STR, + cmocka_cb).str; + assert_int_equal(str.count, str2.count); + assert_memory_equal(str.src, str2.src, str.count); /* */ /* B.splice(At, 4, 1) */ - AMfree(AMspliceText(B, At, 4, 1, NULL)); + AMfree(AMspliceText(B, At, 4, 1, AMstr(NULL))); /* B.splice(At, 4, 0, '!') */ - AMfree(AMspliceText(B, At, 4, 0, "!")); + AMfree(AMspliceText(B, At, 4, 0, AMstr("!"))); /* B.splice(At, 5, 0, ' ') */ - AMfree(AMspliceText(B, At, 5, 0, " ")); + AMfree(AMspliceText(B, At, 5, 0, AMstr(" "))); /* B.splice(At, 6, 0, 'world') */ - AMfree(AMspliceText(B, At, 6, 0, "world")); + AMfree(AMspliceText(B, At, 6, 0, AMstr("world"))); /* */ /* A.merge(B) */ AMfree(AMmerge(A, B)); @@ -1712,16 +1846,17 @@ static void test_should_handle_merging_text_conflicts_then_saving_and_loading(vo /* */ /* assert.deepEqual(C.getWithType('_root', 'text'), ['text', '1@aabbcc'])*/ AMobjId const* const C_text = AMpush(&stack, - AMmapGet(C, AM_ROOT, "text", NULL), + AMmapGet(C, AM_ROOT, AMstr("text"), NULL), AM_VALUE_OBJ_ID, cmocka_cb).obj_id; assert_int_equal(AMobjIdCounter(C_text), 1); - assert_string_equal(AMactorIdStr(AMobjIdActorId(C_text)), "aabbcc"); + str = AMactorIdStr(AMobjIdActorId(C_text)); + assert_int_equal(str.count, strlen("aabbcc")); + assert_memory_equal(str.src, "aabbcc", str.count); /* assert.deepEqual(C.text(At), 'hell! world') */ - assert_string_equal(AMpush(&stack, - AMtext(C, At, NULL), - AM_VALUE_STR, - cmocka_cb).str, "hell! world"); + str = AMpush(&stack, AMtext(C, At, NULL), AM_VALUE_STR, cmocka_cb).str; + assert_int_equal(str.count, strlen("hell! world")); + assert_memory_equal(str.src, "hell! world", str.count); } int run_ported_wasm_basic_tests(void) { diff --git a/rust/automerge-c/test/ported_wasm/sync_tests.c b/rust/automerge-c/test/ported_wasm/sync_tests.c index ec5f84a4..f2d76db4 100644 --- a/rust/automerge-c/test/ported_wasm/sync_tests.c +++ b/rust/automerge-c/test/ported_wasm/sync_tests.c @@ -23,14 +23,14 @@ static int setup(void** state) { TestState* test_state = test_calloc(1, sizeof(TestState)); test_state->n1 = AMpush(&test_state->stack, AMcreate(AMpush(&test_state->stack, - AMactorIdInitStr("01234567"), + AMactorIdInitStr(AMstr("01234567")), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id), AM_VALUE_DOC, cmocka_cb).doc; test_state->n2 = AMpush(&test_state->stack, AMcreate(AMpush(&test_state->stack, - AMactorIdInitStr("89abcdef"), + AMactorIdInitStr(AMstr("89abcdef")), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id), AM_VALUE_DOC, @@ -166,18 +166,18 @@ static void test_repos_with_equal_heads_do_not_need_a_reply_message(void **state AMobjId const* const list = AMpush(&test_state->stack, AMmapPutObject(test_state->n1, AM_ROOT, - "n", + AMstr("n"), AM_OBJ_TYPE_LIST), AM_VALUE_OBJ_ID, cmocka_cb).obj_id; /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); + AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); /* for (let i = 0; i < 10; i++) { */ for (size_t i = 0; i != 10; ++i) { /* n1.insert(list, i, i) */ AMfree(AMlistPutUint(test_state->n1, AM_ROOT, i, true, i)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); + AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); /* { */ } /* n2.applyChanges(n1.getChanges([])) */ @@ -229,17 +229,17 @@ static void test_n1_should_offer_all_changes_to_n2_when_starting_from_nothing(vo /* const list = n1.putObject("_root", "n", []) */ AMobjId const* const list = AMpush( &test_state->stack, - AMmapPutObject(test_state->n1, AM_ROOT, "n", AM_OBJ_TYPE_LIST), + AMmapPutObject(test_state->n1, AM_ROOT, AMstr("n"), AM_OBJ_TYPE_LIST), AM_VALUE_OBJ_ID, cmocka_cb).obj_id; /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); + AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); /* for (let i = 0; i < 10; i++) { */ for (size_t i = 0; i != 10; ++i) { /* n1.insert(list, i, i) */ AMfree(AMlistPutUint(test_state->n1, AM_ROOT, i, true, i)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); + AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); /* { */ } /* */ @@ -262,17 +262,17 @@ static void test_should_sync_peers_where_one_has_commits_the_other_does_not(void /* const list = n1.putObject("_root", "n", []) */ AMobjId const* const list = AMpush( &test_state->stack, - AMmapPutObject(test_state->n1, AM_ROOT, "n", AM_OBJ_TYPE_LIST), + AMmapPutObject(test_state->n1, AM_ROOT, AMstr("n"), AM_OBJ_TYPE_LIST), AM_VALUE_OBJ_ID, cmocka_cb).obj_id; /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); + AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); /* for (let i = 0; i < 10; i++) { */ for (size_t i = 0; i != 10; ++i) { /* n1.insert(list, i, i) */ AMfree(AMlistPutUint(test_state->n1, AM_ROOT, i, true, i)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); + AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); /* { */ } /* */ @@ -296,9 +296,9 @@ static void test_should_work_with_prior_sync_state(void **state) { /* for (let i = 0; i < 5; i++) { */ for (size_t i = 0; i != 5; ++i) { /* n1.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", i)); + AMfree(AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); + AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); /* { */ } /* */ @@ -308,9 +308,9 @@ static void test_should_work_with_prior_sync_state(void **state) { /* for (let i = 5; i < 10; i++) { */ for (size_t i = 5; i != 10; ++i) { /* n1.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", i)); + AMfree(AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); + AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); /* { */ } /* */ @@ -331,11 +331,11 @@ static void test_should_not_generate_messages_once_synced(void **state) { const s1 = initSyncState(), s2 = initSyncState() */ TestState* test_state = *state; AMfree(AMsetActorId(test_state->n1, AMpush(&test_state->stack, - AMactorIdInitStr("abc123"), + AMactorIdInitStr(AMstr("abc123")), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id)); AMfree(AMsetActorId(test_state->n2, AMpush(&test_state->stack, - AMactorIdInitStr("def456"), + AMactorIdInitStr(AMstr("def456")), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id)); /* */ @@ -343,17 +343,17 @@ static void test_should_not_generate_messages_once_synced(void **state) { for (let i = 0; i < 5; i++) { */ for (size_t i = 0; i != 5; ++i) { // n1.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", i)); + AMfree(AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i)); // n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); + AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); /* { */ } /* for (let i = 0; i < 5; i++) { */ for (size_t i = 0; i != 5; ++i) { /* n2.put("_root", "y", i) */ - AMfree(AMmapPutUint(test_state->n2, AM_ROOT, "y", i)); + AMfree(AMmapPutUint(test_state->n2, AM_ROOT, AMstr("y"), i)); /* n2.commit("", 0) */ - AMfree(AMcommit(test_state->n2, "", &TIME_0)); + AMfree(AMcommit(test_state->n2, AMstr(""), &TIME_0)); /* { */ } /* */ @@ -430,28 +430,28 @@ static void test_should_allow_simultaneous_messages_during_synchronization(void const s1 = initSyncState(), s2 = initSyncState() */ TestState* test_state = *state; AMfree(AMsetActorId(test_state->n1, AMpush(&test_state->stack, - AMactorIdInitStr("abc123"), + AMactorIdInitStr(AMstr("abc123")), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id)); AMfree(AMsetActorId(test_state->n2, AMpush(&test_state->stack, - AMactorIdInitStr("def456"), + AMactorIdInitStr(AMstr("def456")), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id)); /* */ /* for (let i = 0; i < 5; i++) { */ for (size_t i = 0; i != 5; ++i) { /* n1.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", i)); + AMfree(AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); + AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); /* { */ } /* for (let i = 0; i < 5; i++) { */ for (size_t i = 0; i != 5; ++i) { /* n2.put("_root", "y", i) */ - AMfree(AMmapPutUint(test_state->n2, AM_ROOT, "y", i)); + AMfree(AMmapPutUint(test_state->n2, AM_ROOT, AMstr("y"), i)); /* n2.commit("", 0) */ - AMfree(AMcommit(test_state->n2, "", &TIME_0)); + AMfree(AMcommit(test_state->n2, AMstr(""), &TIME_0)); /* { */ } /* const head1 = n1.getHeads()[0], head2 = n2.getHeads()[0] */ @@ -541,11 +541,11 @@ static void test_should_allow_simultaneous_messages_during_synchronization(void /* //assert.notDeepStrictEqual(patch1, null) assert.deepStrictEqual(n1.materialize(), { x: 4, y: 4 }) */ assert_int_equal(AMpush(&test_state->stack, - AMmapGet(test_state->n1, AM_ROOT, "x", NULL), + AMmapGet(test_state->n1, AM_ROOT, AMstr("x"), NULL), AM_VALUE_UINT, cmocka_cb).uint, 4); assert_int_equal(AMpush(&test_state->stack, - AMmapGet(test_state->n1, AM_ROOT, "y", NULL), + AMmapGet(test_state->n1, AM_ROOT, AMstr("y"), NULL), AM_VALUE_UINT, cmocka_cb).uint, 4); /* */ @@ -560,11 +560,11 @@ static void test_should_allow_simultaneous_messages_during_synchronization(void /* //assert.notDeepStrictEqual(patch2, null) assert.deepStrictEqual(n2.materialize(), { x: 4, y: 4 }) */ assert_int_equal(AMpush(&test_state->stack, - AMmapGet(test_state->n2, AM_ROOT, "x", NULL), + AMmapGet(test_state->n2, AM_ROOT, AMstr("x"), NULL), AM_VALUE_UINT, cmocka_cb).uint, 4); assert_int_equal(AMpush(&test_state->stack, - AMmapGet(test_state->n2, AM_ROOT, "y", NULL), + AMmapGet(test_state->n2, AM_ROOT, AMstr("y"), NULL), AM_VALUE_UINT, cmocka_cb).uint, 4); /* */ @@ -630,7 +630,7 @@ static void test_should_allow_simultaneous_messages_during_synchronization(void /* If we make one more change and start another sync then its lastSync * should be updated */ /* n1.put("_root", "x", 5) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", 5)); + AMfree(AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), 5)); /* msg1to2 = n1.generateSyncMessage(s1) if (msg1to2 === null) { throw new RangeError("message should not be null") }*/ msg1to2 = AMpush(&test_state->stack, @@ -662,20 +662,20 @@ static void test_should_assume_sent_changes_were_received_until_we_hear_otherwis AMobjId const* items = AMpush(&test_state->stack, AMmapPutObject(test_state->n1, AM_ROOT, - "items", + AMstr("items"), AM_OBJ_TYPE_LIST), AM_VALUE_OBJ_ID, cmocka_cb).obj_id; /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); + AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); /* */ /* sync(n1, n2, s1, s2) */ sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); /* */ /* n1.push(items, "x") */ - AMfree(AMlistPutStr(test_state->n1, items, SIZE_MAX, true, "x")); + AMfree(AMlistPutStr(test_state->n1, items, SIZE_MAX, true, AMstr("x"))); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); + AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); /* message = n1.generateSyncMessage(s1) if (message === null) { throw new RangeError("message should not be null") }*/ AMsyncMessage const* message = AMpush(&test_state->stack, @@ -688,9 +688,9 @@ static void test_should_assume_sent_changes_were_received_until_we_hear_otherwis assert_int_equal(AMchangesSize(&message_changes), 1); /* */ /* n1.push(items, "y") */ - AMfree(AMlistPutStr(test_state->n1, items, SIZE_MAX, true, "y")); + AMfree(AMlistPutStr(test_state->n1, items, SIZE_MAX, true, AMstr("y"))); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); + AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); /* message = n1.generateSyncMessage(s1) if (message === null) { throw new RangeError("message should not be null") }*/ message = AMpush(&test_state->stack, @@ -702,9 +702,9 @@ static void test_should_assume_sent_changes_were_received_until_we_hear_otherwis assert_int_equal(AMchangesSize(&message_changes), 1); /* */ /* n1.push(items, "z") */ - AMfree(AMlistPutStr(test_state->n1, items, SIZE_MAX, true, "z")); + AMfree(AMlistPutStr(test_state->n1, items, SIZE_MAX, true, AMstr("z"))); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); + AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); /* */ /* message = n1.generateSyncMessage(s1) if (message === null) { throw new RangeError("message should not be null") }*/ @@ -729,9 +729,9 @@ static void test_should_work_regardless_of_who_initiates_the_exchange(void **sta /* for (let i = 0; i < 5; i++) { */ for (size_t i = 0; i != 5; ++i) { /* n1.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", i)); + AMfree(AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); + AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); /* { */ } /* */ @@ -742,9 +742,9 @@ static void test_should_work_regardless_of_who_initiates_the_exchange(void **sta /* for (let i = 5; i < 10; i++) { */ for (size_t i = 5; i != 10; ++i) { /* n1.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", i)); + AMfree(AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); + AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); /* { */ } /* */ @@ -773,9 +773,9 @@ static void test_should_work_without_prior_sync_state(void **state) { /* for (let i = 0; i < 10; i++) { */ for (size_t i = 0; i != 10; ++i) { /* n1.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", i)); + AMfree(AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); + AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); /* { */ } /* */ @@ -785,18 +785,18 @@ static void test_should_work_without_prior_sync_state(void **state) { /* for (let i = 10; i < 15; i++) { */ for (size_t i = 10; i != 15; ++i) { /* n1.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", i)); + AMfree(AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); + AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); /* { */ } /* */ /* for (let i = 15; i < 18; i++) { */ for (size_t i = 15; i != 18; ++i) { /* n2.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n2, AM_ROOT, "x", i)); + AMfree(AMmapPutUint(test_state->n2, AM_ROOT, AMstr("x"), i)); /* n2.commit("", 0) */ - AMfree(AMcommit(test_state->n2, "", &TIME_0)); + AMfree(AMcommit(test_state->n2, AMstr(""), &TIME_0)); /* { */ } /* */ @@ -836,9 +836,9 @@ static void test_should_work_with_prior_sync_state_2(void **state) { /* for (let i = 0; i < 10; i++) { */ for (size_t i = 0; i != 10; ++i) { /* n1.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", i)); + AMfree(AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); + AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); /* { */ } /* */ @@ -848,17 +848,17 @@ static void test_should_work_with_prior_sync_state_2(void **state) { /* for (let i = 10; i < 15; i++) { */ for (size_t i = 10; i != 15; ++i) { /* n1.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", i)); + AMfree(AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); + AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); /* { */ } /* for (let i = 15; i < 18; i++) { */ for (size_t i = 15; i != 18; ++i) { /* n2.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n2, AM_ROOT, "x", i)); + AMfree(AMmapPutUint(test_state->n2, AM_ROOT, AMstr("x"), i)); /* n2.commit("", 0) */ - AMfree(AMcommit(test_state->n2, "", &TIME_0)); + AMfree(AMcommit(test_state->n2, AMstr(""), &TIME_0)); /* { */ } /* */ @@ -911,9 +911,9 @@ static void test_should_ensure_non_empty_state_after_sync(void **state) { /* for (let i = 0; i < 3; i++) { */ for (size_t i = 0; i != 3; ++i) { /* n1.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", i)); + AMfree(AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); + AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); /* { */ } /* */ @@ -951,9 +951,9 @@ static void test_should_resync_after_one_node_crashed_with_data_loss(void **stat /* for (let i = 0; i < 3; i++) { */ for (size_t i = 0; i != 3; ++i) { /* n1.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", i)); + AMfree(AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); + AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); /* { */ } /* */ @@ -981,9 +981,9 @@ static void test_should_resync_after_one_node_crashed_with_data_loss(void **stat /* for (let i = 3; i < 6; i++) { */ for (size_t i = 3; i != 6; ++i) { /* n1.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", i)); + AMfree(AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); + AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); /* { */ } /* */ @@ -1009,9 +1009,9 @@ static void test_should_resync_after_one_node_crashed_with_data_loss(void **stat /* for (let i = 6; i < 9; i++) { */ for (size_t i = 6; i != 9; ++i) { /* n1.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", i)); + AMfree(AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); + AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); /* { */ } /* */ @@ -1049,12 +1049,12 @@ static void test_should_resync_after_one_node_crashed_with_data_loss(void **stat assert_false(AMequal(test_state->n1, r)); /* assert.deepStrictEqual(n1.materialize(), { x: 8 }) */ assert_int_equal(AMpush(&test_state->stack, - AMmapGet(test_state->n1, AM_ROOT, "x", NULL), + AMmapGet(test_state->n1, AM_ROOT, AMstr("x"), NULL), AM_VALUE_UINT, cmocka_cb).uint, 8); /* assert.deepStrictEqual(r.materialize(), { x: 2 }) */ assert_int_equal(AMpush(&test_state->stack, - AMmapGet(r, AM_ROOT, "x", NULL), + AMmapGet(r, AM_ROOT, AMstr("x"), NULL), AM_VALUE_UINT, cmocka_cb).uint, 2); /* sync(n1, r, s1, rSyncState) */ @@ -1085,9 +1085,9 @@ static void test_should_resync_after_one_node_experiences_data_loss_without_disc /* for (let i = 0; i < 3; i++) { */ for (size_t i = 0; i != 3; ++i) { /* n1.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", i)); + AMfree(AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); + AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); /* { */ } /* */ @@ -1110,7 +1110,7 @@ static void test_should_resync_after_one_node_experiences_data_loss_without_disc /* const n2AfterDataLoss = create('89abcdef') */ AMdoc* n2_after_data_loss = AMpush(&test_state->stack, AMcreate(AMpush(&test_state->stack, - AMactorIdInitStr("89abcdef"), + AMactorIdInitStr(AMstr("89abcdef")), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id), AM_VALUE_DOC, @@ -1147,7 +1147,7 @@ static void test_should_handle_changes_concurrrent_to_the_last_sync_heads(void * TestState* test_state = *state; AMdoc* n3 = AMpush(&test_state->stack, AMcreate(AMpush(&test_state->stack, - AMactorIdInitStr("fedcba98"), + AMactorIdInitStr(AMstr("fedcba98")), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id), AM_VALUE_DOC, @@ -1167,8 +1167,8 @@ static void test_should_handle_changes_concurrrent_to_the_last_sync_heads(void * /* Change 1 is known to all three nodes */ /* //n1 = Automerge.change(n1, {time: 0}, doc => doc.x = 1) */ /* n1.put("_root", "x", 1); n1.commit("", 0) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", 1)); - AMfree(AMcommit(test_state->n1, "", &TIME_0)); + AMfree(AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), 1)); + AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); /* */ /* sync(n1, n2, s12, s21) */ sync(test_state->n1, test_state->n2, s12, s21); @@ -1177,22 +1177,22 @@ static void test_should_handle_changes_concurrrent_to_the_last_sync_heads(void * /* */ /* Change 2 is known to n1 and n2 */ /* n1.put("_root", "x", 2); n1.commit("", 0) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", 2)); - AMfree(AMcommit(test_state->n1, "", &TIME_0)); + AMfree(AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), 2)); + AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); /* */ /* sync(n1, n2, s12, s21) */ sync(test_state->n1, test_state->n2, s12, s21); /* */ /* Each of the three nodes makes one change (changes 3, 4, 5) */ /* n1.put("_root", "x", 3); n1.commit("", 0) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", 3)); - AMfree(AMcommit(test_state->n1, "", &TIME_0)); + AMfree(AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), 3)); + AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); /* n2.put("_root", "x", 4); n2.commit("", 0) */ - AMfree(AMmapPutUint(test_state->n2, AM_ROOT, "x", 4)); - AMfree(AMcommit(test_state->n2, "", &TIME_0)); + AMfree(AMmapPutUint(test_state->n2, AM_ROOT, AMstr("x"), 4)); + AMfree(AMcommit(test_state->n2, AMstr(""), &TIME_0)); /* n3.put("_root", "x", 5); n3.commit("", 0) */ - AMfree(AMmapPutUint(n3, AM_ROOT, "x", 5)); - AMfree(AMcommit(n3, "", &TIME_0)); + AMfree(AMmapPutUint(n3, AM_ROOT, AMstr("x"), 5)); + AMfree(AMcommit(n3, AMstr(""), &TIME_0)); /* */ /* Apply n3's latest change to n2. */ /* let change = n3.getLastLocalChange() @@ -1231,14 +1231,14 @@ static void test_should_handle_histories_with_lots_of_branching_and_merging(void TestState* test_state = *state; AMdoc* n3 = AMpush(&test_state->stack, AMcreate(AMpush(&test_state->stack, - AMactorIdInitStr("fedcba98"), + AMactorIdInitStr(AMstr("fedcba98")), AM_VALUE_ACTOR_ID, cmocka_cb).actor_id), AM_VALUE_DOC, cmocka_cb).doc; /* n1.put("_root", "x", 0); n1.commit("", 0) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", 0)); - AMfree(AMcommit(test_state->n1, "", &TIME_0)); + AMfree(AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), 0)); + AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); /* let change1 = n1.getLastLocalChange() if (change1 === null) throw new RangeError("no local change") */ AMchanges change1 = AMpush(&test_state->stack, @@ -1256,8 +1256,8 @@ static void test_should_handle_histories_with_lots_of_branching_and_merging(void /* n3.applyChanges([change2]) */ AMfree(AMapplyChanges(n3, &change2)); /* n3.put("_root", "x", 1); n3.commit("", 0) */ - AMfree(AMmapPutUint(n3, AM_ROOT, "x", 1)); - AMfree(AMcommit(n3, "", &TIME_0)); + AMfree(AMmapPutUint(n3, AM_ROOT, AMstr("x"), 1)); + AMfree(AMcommit(n3, AMstr(""), &TIME_0)); /* */ /* - n1c1 <------ n1c2 <------ n1c3 <-- etc. <-- n1c20 <------ n1c21 * / \/ \/ \/ @@ -1269,11 +1269,11 @@ static void test_should_handle_histories_with_lots_of_branching_and_merging(void /* for (let i = 1; i < 20; i++) { */ for (size_t i = 1; i != 20; ++i) { /* n1.put("_root", "n1", i); n1.commit("", 0) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "n1", i)); - AMfree(AMcommit(test_state->n1, "", &TIME_0)); + AMfree(AMmapPutUint(test_state->n1, AM_ROOT, AMstr("n1"), i)); + AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); /* n2.put("_root", "n2", i); n2.commit("", 0) */ - AMfree(AMmapPutUint(test_state->n2, AM_ROOT, "n2", i)); - AMfree(AMcommit(test_state->n2, "", &TIME_0)); + AMfree(AMmapPutUint(test_state->n2, AM_ROOT, AMstr("n2"), i)); + AMfree(AMcommit(test_state->n2, AMstr(""), &TIME_0)); /* const change1 = n1.getLastLocalChange() if (change1 === null) throw new RangeError("no local change") */ AMchanges change1 = AMpush(&test_state->stack, @@ -1307,11 +1307,11 @@ static void test_should_handle_histories_with_lots_of_branching_and_merging(void /* n2.applyChanges([change3]) */ AMfree(AMapplyChanges(test_state->n2, &change3)); /* n1.put("_root", "n1", "final"); n1.commit("", 0) */ - AMfree(AMmapPutStr(test_state->n1, AM_ROOT, "n1", "final")); - AMfree(AMcommit(test_state->n1, "", &TIME_0)); + AMfree(AMmapPutStr(test_state->n1, AM_ROOT, AMstr("n1"), AMstr("final"))); + AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); /* n2.put("_root", "n2", "final"); n2.commit("", 0) */ - AMfree(AMmapPutStr(test_state->n2, AM_ROOT, "n2", "final")); - AMfree(AMcommit(test_state->n2, "", &TIME_0)); + AMfree(AMmapPutStr(test_state->n2, AM_ROOT, AMstr("n2"), AMstr("final"))); + AMfree(AMcommit(test_state->n2, AMstr(""), &TIME_0)); /* */ /* sync(n1, n2, s1, s2) */ sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); From 3dd954d5b77e6839b3752302a5800ed33afe1757 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Fri, 18 Nov 2022 09:31:04 -0800 Subject: [PATCH 642/730] Moved the `to_obj_id` macro in with `AMobjId`. --- rust/automerge-c/src/doc/utils.rs | 34 ------------------------------- rust/automerge-c/src/obj.rs | 11 ++++++++++ 2 files changed, 11 insertions(+), 34 deletions(-) diff --git a/rust/automerge-c/src/doc/utils.rs b/rust/automerge-c/src/doc/utils.rs index b3a975e5..d98a9a8b 100644 --- a/rust/automerge-c/src/doc/utils.rs +++ b/rust/automerge-c/src/doc/utils.rs @@ -1,18 +1,3 @@ -use std::ffi::CStr; -use std::os::raw::c_char; - -macro_rules! to_actor_id { - ($handle:expr) => {{ - let handle = $handle.as_ref(); - match handle { - Some(b) => b, - None => return AMresult::err("Invalid AMactorId pointer").into(), - } - }}; -} - -pub(crate) use to_actor_id; - macro_rules! to_doc { ($handle:expr) => {{ let handle = $handle.as_ref(); @@ -36,22 +21,3 @@ macro_rules! to_doc_mut { } pub(crate) use to_doc_mut; - -macro_rules! to_obj_id { - ($handle:expr) => {{ - match $handle.as_ref() { - Some(obj_id) => obj_id, - None => &automerge::ROOT, - } - }}; -} - -pub(crate) use to_obj_id; - -pub(crate) unsafe fn to_str(c: *const c_char) -> String { - if !c.is_null() { - CStr::from_ptr(c).to_string_lossy().to_string() - } else { - String::default() - } -} diff --git a/rust/automerge-c/src/obj.rs b/rust/automerge-c/src/obj.rs index 00069b9c..46ff617b 100644 --- a/rust/automerge-c/src/obj.rs +++ b/rust/automerge-c/src/obj.rs @@ -7,6 +7,17 @@ use crate::actor_id::AMactorId; pub mod item; pub mod items; +macro_rules! to_obj_id { + ($handle:expr) => {{ + match $handle.as_ref() { + Some(obj_id) => obj_id, + None => &automerge::ROOT, + } + }}; +} + +pub(crate) use to_obj_id; + macro_rules! to_obj_type { ($am_obj_type:expr) => {{ match $am_obj_type { From b60c310f5c2e110fdd4fb36a877e5444f1aa75a2 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Fri, 18 Nov 2022 09:34:21 -0800 Subject: [PATCH 643/730] Changed `Default::default()` calls to be through the trait. --- rust/automerge-c/src/change_hashes.rs | 9 +++++---- rust/automerge-c/src/changes.rs | 5 +++-- rust/automerge-c/src/doc/list/items.rs | 4 ++-- rust/automerge-c/src/doc/map/items.rs | 4 ++-- rust/automerge-c/src/obj/items.rs | 4 ++-- rust/automerge-c/src/sync/have.rs | 2 +- rust/automerge-c/src/sync/haves.rs | 4 ++-- rust/automerge-c/src/sync/message.rs | 10 ++++++---- rust/automerge-c/src/sync/state.rs | 12 +++++++----- 9 files changed, 30 insertions(+), 24 deletions(-) diff --git a/rust/automerge-c/src/change_hashes.rs b/rust/automerge-c/src/change_hashes.rs index 87ae6c7f..029612e9 100644 --- a/rust/automerge-c/src/change_hashes.rs +++ b/rust/automerge-c/src/change_hashes.rs @@ -254,6 +254,7 @@ pub unsafe extern "C" fn AMchangeHashesCmp( /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// src must be an AMbyteSpan array of size `>= count` #[no_mangle] @@ -302,7 +303,7 @@ pub unsafe extern "C" fn AMchangeHashesNext( return change_hash.into(); } } - AMbyteSpan::default() + Default::default() } /// \memberof AMchangeHashes @@ -331,7 +332,7 @@ pub unsafe extern "C" fn AMchangeHashesPrev( return change_hash.into(); } } - AMbyteSpan::default() + Default::default() } /// \memberof AMchangeHashes @@ -372,7 +373,7 @@ pub unsafe extern "C" fn AMchangeHashesReversed( if let Some(change_hashes) = change_hashes.as_ref() { change_hashes.reversed() } else { - AMchangeHashes::default() + Default::default() } } @@ -394,6 +395,6 @@ pub unsafe extern "C" fn AMchangeHashesRewound( if let Some(change_hashes) = change_hashes.as_ref() { change_hashes.rewound() } else { - AMchangeHashes::default() + Default::default() } } diff --git a/rust/automerge-c/src/changes.rs b/rust/automerge-c/src/changes.rs index e359cfb6..1bff35c8 100644 --- a/rust/automerge-c/src/changes.rs +++ b/rust/automerge-c/src/changes.rs @@ -268,6 +268,7 @@ pub unsafe extern "C" fn AMchangesEqual( /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// src must be an AMbyteSpan array of size `>= count` #[no_mangle] @@ -373,7 +374,7 @@ pub unsafe extern "C" fn AMchangesReversed(changes: *const AMchanges) -> AMchang if let Some(changes) = changes.as_ref() { changes.reversed() } else { - AMchanges::default() + Default::default() } } @@ -393,6 +394,6 @@ pub unsafe extern "C" fn AMchangesRewound(changes: *const AMchanges) -> AMchange if let Some(changes) = changes.as_ref() { changes.rewound() } else { - AMchanges::default() + Default::default() } } diff --git a/rust/automerge-c/src/doc/list/items.rs b/rust/automerge-c/src/doc/list/items.rs index aa676c4a..5b4a11fd 100644 --- a/rust/automerge-c/src/doc/list/items.rs +++ b/rust/automerge-c/src/doc/list/items.rs @@ -323,7 +323,7 @@ pub unsafe extern "C" fn AMlistItemsReversed(list_items: *const AMlistItems) -> if let Some(list_items) = list_items.as_ref() { list_items.reversed() } else { - AMlistItems::default() + Default::default() } } @@ -343,6 +343,6 @@ pub unsafe extern "C" fn AMlistItemsRewound(list_items: *const AMlistItems) -> A if let Some(list_items) = list_items.as_ref() { list_items.rewound() } else { - AMlistItems::default() + Default::default() } } diff --git a/rust/automerge-c/src/doc/map/items.rs b/rust/automerge-c/src/doc/map/items.rs index b1f046b1..cd305971 100644 --- a/rust/automerge-c/src/doc/map/items.rs +++ b/rust/automerge-c/src/doc/map/items.rs @@ -316,7 +316,7 @@ pub unsafe extern "C" fn AMmapItemsReversed(map_items: *const AMmapItems) -> AMm if let Some(map_items) = map_items.as_ref() { map_items.reversed() } else { - AMmapItems::default() + Default::default() } } @@ -335,6 +335,6 @@ pub unsafe extern "C" fn AMmapItemsRewound(map_items: *const AMmapItems) -> AMma if let Some(map_items) = map_items.as_ref() { map_items.rewound() } else { - AMmapItems::default() + Default::default() } } diff --git a/rust/automerge-c/src/obj/items.rs b/rust/automerge-c/src/obj/items.rs index fbb1d641..d6b847cf 100644 --- a/rust/automerge-c/src/obj/items.rs +++ b/rust/automerge-c/src/obj/items.rs @@ -316,7 +316,7 @@ pub unsafe extern "C" fn AMobjItemsReversed(obj_items: *const AMobjItems) -> AMo if let Some(obj_items) = obj_items.as_ref() { obj_items.reversed() } else { - AMobjItems::default() + Default::default() } } @@ -336,6 +336,6 @@ pub unsafe extern "C" fn AMobjItemsRewound(obj_items: *const AMobjItems) -> AMob if let Some(obj_items) = obj_items.as_ref() { obj_items.rewound() } else { - AMobjItems::default() + Default::default() } } diff --git a/rust/automerge-c/src/sync/have.rs b/rust/automerge-c/src/sync/have.rs index f7ff4cb0..312151e7 100644 --- a/rust/automerge-c/src/sync/have.rs +++ b/rust/automerge-c/src/sync/have.rs @@ -36,6 +36,6 @@ pub unsafe extern "C" fn AMsyncHaveLastSync(sync_have: *const AMsyncHave) -> AMc if let Some(sync_have) = sync_have.as_ref() { AMchangeHashes::new(&sync_have.as_ref().last_sync) } else { - AMchangeHashes::default() + Default::default() } } diff --git a/rust/automerge-c/src/sync/haves.rs b/rust/automerge-c/src/sync/haves.rs index d359a4dc..c74b8e96 100644 --- a/rust/automerge-c/src/sync/haves.rs +++ b/rust/automerge-c/src/sync/haves.rs @@ -353,7 +353,7 @@ pub unsafe extern "C" fn AMsyncHavesReversed(sync_haves: *const AMsyncHaves) -> if let Some(sync_haves) = sync_haves.as_ref() { sync_haves.reversed() } else { - AMsyncHaves::default() + Default::default() } } @@ -373,6 +373,6 @@ pub unsafe extern "C" fn AMsyncHavesRewound(sync_haves: *const AMsyncHaves) -> A if let Some(sync_haves) = sync_haves.as_ref() { sync_haves.rewound() } else { - AMsyncHaves::default() + Default::default() } } diff --git a/rust/automerge-c/src/sync/message.rs b/rust/automerge-c/src/sync/message.rs index 7e398f8c..46a6d29a 100644 --- a/rust/automerge-c/src/sync/message.rs +++ b/rust/automerge-c/src/sync/message.rs @@ -65,7 +65,7 @@ pub unsafe extern "C" fn AMsyncMessageChanges(sync_message: *const AMsyncMessage &mut sync_message.changes_storage.borrow_mut(), ) } else { - AMchanges::default() + Default::default() } } @@ -81,6 +81,7 @@ pub unsafe extern "C" fn AMsyncMessageChanges(sync_message: *const AMsyncMessage /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// src must be a byte array of size `>= count` #[no_mangle] @@ -100,6 +101,7 @@ pub unsafe extern "C" fn AMsyncMessageDecode(src: *const u8, count: usize) -> *m /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// sync_message must be a valid pointer to an AMsyncMessage #[no_mangle] @@ -126,7 +128,7 @@ pub unsafe extern "C" fn AMsyncMessageHaves(sync_message: *const AMsyncMessage) &mut sync_message.haves_storage.borrow_mut(), ) } else { - AMsyncHaves::default() + Default::default() } } @@ -145,7 +147,7 @@ pub unsafe extern "C" fn AMsyncMessageHeads(sync_message: *const AMsyncMessage) if let Some(sync_message) = sync_message.as_ref() { AMchangeHashes::new(&sync_message.as_ref().heads) } else { - AMchangeHashes::default() + Default::default() } } @@ -165,6 +167,6 @@ pub unsafe extern "C" fn AMsyncMessageNeeds(sync_message: *const AMsyncMessage) if let Some(sync_message) = sync_message.as_ref() { AMchangeHashes::new(&sync_message.as_ref().need) } else { - AMchangeHashes::default() + Default::default() } } diff --git a/rust/automerge-c/src/sync/state.rs b/rust/automerge-c/src/sync/state.rs index 54fd5fe4..1c1d316f 100644 --- a/rust/automerge-c/src/sync/state.rs +++ b/rust/automerge-c/src/sync/state.rs @@ -67,6 +67,7 @@ impl From for *mut AMsyncState { /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// src must be a byte array of size `>= count` #[no_mangle] @@ -86,6 +87,7 @@ pub unsafe extern "C" fn AMsyncStateDecode(src: *const u8, count: usize) -> *mut /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. /// \internal +/// /// # Safety /// sync_state must be a valid pointer to an AMsyncState #[no_mangle] @@ -146,7 +148,7 @@ pub unsafe extern "C" fn AMsyncStateSharedHeads(sync_state: *const AMsyncState) if let Some(sync_state) = sync_state.as_ref() { AMchangeHashes::new(&sync_state.as_ref().shared_heads) } else { - AMchangeHashes::default() + Default::default() } } @@ -167,7 +169,7 @@ pub unsafe extern "C" fn AMsyncStateLastSentHeads( if let Some(sync_state) = sync_state.as_ref() { AMchangeHashes::new(&sync_state.as_ref().last_sent_heads) } else { - AMchangeHashes::default() + Default::default() } } @@ -197,7 +199,7 @@ pub unsafe extern "C" fn AMsyncStateTheirHaves( }; }; *has_value = false; - AMsyncHaves::default() + Default::default() } /// \memberof AMsyncState @@ -227,7 +229,7 @@ pub unsafe extern "C" fn AMsyncStateTheirHeads( } }; *has_value = false; - AMchangeHashes::default() + Default::default() } /// \memberof AMsyncState @@ -257,5 +259,5 @@ pub unsafe extern "C" fn AMsyncStateTheirNeeds( } }; *has_value = false; - AMchangeHashes::default() + Default::default() } From 7c9f9271368edd38f291240f436384ad6ea218fc Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Fri, 18 Nov 2022 09:50:52 -0800 Subject: [PATCH 644/730] Fixed code formatting violations. --- rust/automerge-c/src/actor_id.rs | 12 +++++++----- rust/automerge-c/src/byte_span.rs | 3 +-- rust/automerge-c/src/change.rs | 2 +- rust/automerge-c/src/doc/map.rs | 6 +----- rust/automerge-c/src/strs.rs | 2 +- rust/automerge/src/error.rs | 2 +- 6 files changed, 12 insertions(+), 15 deletions(-) diff --git a/rust/automerge-c/src/actor_id.rs b/rust/automerge-c/src/actor_id.rs index 6467ddea..bc86d5ef 100644 --- a/rust/automerge-c/src/actor_id.rs +++ b/rust/automerge-c/src/actor_id.rs @@ -40,9 +40,12 @@ impl AMactorId { match hex_str.as_mut() { None => { let hex_string = unsafe { (*self.body).to_hex_string() }; - hex_str.insert(hex_string.into_boxed_str()).as_bytes().into() + hex_str + .insert(hex_string.into_boxed_str()) + .as_bytes() + .into() } - Some(hex_str) => hex_str.as_bytes().into() + Some(hex_str) => hex_str.as_bytes().into(), } } } @@ -154,13 +157,12 @@ pub unsafe extern "C" fn AMactorIdInitBytes(src: *const u8, count: usize) -> *mu /// hex_str must be a valid pointer to an AMbyteSpan #[no_mangle] pub unsafe extern "C" fn AMactorIdInitStr(hex_str: AMbyteSpan) -> *mut AMresult { - use am::AutomergeError::InvalidActorId; - // use am::AutomergeError::InvalidCharacter; + use am::AutomergeError::InvalidActorId; to_result(match (&hex_str).try_into() { Ok(s) => match am::ActorId::from_str(s) { Ok(actor_id) => Ok(actor_id), - Err(_) => Err(InvalidActorId(String::from(s))) + Err(_) => Err(InvalidActorId(String::from(s))), }, Err(e) => Err(e), }) diff --git a/rust/automerge-c/src/byte_span.rs b/rust/automerge-c/src/byte_span.rs index 3fcefba8..a846cf58 100644 --- a/rust/automerge-c/src/byte_span.rs +++ b/rust/automerge-c/src/byte_span.rs @@ -49,8 +49,7 @@ impl PartialEq for AMbyteSpan { fn eq(&self, other: &Self) -> bool { if self.count != other.count { return false; - } - else if self.src == other.src { + } else if self.src == other.src { return true; } let slice = unsafe { std::slice::from_raw_parts(self.src, self.count) }; diff --git a/rust/automerge-c/src/change.rs b/rust/automerge-c/src/change.rs index 10326fe7..d64a2635 100644 --- a/rust/automerge-c/src/change.rs +++ b/rust/automerge-c/src/change.rs @@ -34,7 +34,7 @@ impl AMchange { pub fn message(&self) -> AMbyteSpan { if let Some(message) = unsafe { (*self.body).message() } { - return message.as_str().as_bytes().into() + return message.as_str().as_bytes().into(); } Default::default() } diff --git a/rust/automerge-c/src/doc/map.rs b/rust/automerge-c/src/doc/map.rs index fbd6c1cd..dbf4d61f 100644 --- a/rust/automerge-c/src/doc/map.rs +++ b/rust/automerge-c/src/doc/map.rs @@ -419,11 +419,7 @@ pub unsafe extern "C" fn AMmapPutTimestamp( ) -> *mut AMresult { let doc = to_doc_mut!(doc); let key = to_str!(key); - to_result(doc.put( - to_obj_id!(obj_id), - key, - am::ScalarValue::Timestamp(value), - )) + to_result(doc.put(to_obj_id!(obj_id), key, am::ScalarValue::Timestamp(value))) } /// \memberof AMdoc diff --git a/rust/automerge-c/src/strs.rs b/rust/automerge-c/src/strs.rs index 2b973714..a36861b7 100644 --- a/rust/automerge-c/src/strs.rs +++ b/rust/automerge-c/src/strs.rs @@ -267,7 +267,7 @@ pub unsafe extern "C" fn AMstrsCmp(strs1: *const AMstrs, strs2: *const AMstrs) - pub unsafe extern "C" fn AMstrsNext(strs: *mut AMstrs, n: isize) -> AMbyteSpan { if let Some(strs) = strs.as_mut() { if let Some(key) = strs.next(n) { - return key + return key; } } Default::default() diff --git a/rust/automerge/src/error.rs b/rust/automerge/src/error.rs index 7bedff2e..010f33c6 100644 --- a/rust/automerge/src/error.rs +++ b/rust/automerge/src/error.rs @@ -39,7 +39,7 @@ pub enum AutomergeError { Load(#[from] LoadError), #[error("increment operations must be against a counter value")] MissingCounter, - #[error("hash {0} does not correspond to a change in this document")] + #[error("hash {0} does not correspond to a change in this document")] MissingHash(ChangeHash), #[error("compressed chunk was not a change")] NonChangeCompressed, From 625f48f33a7fec53fe0131be3b52e21ecbd779ad Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Fri, 18 Nov 2022 10:12:23 -0800 Subject: [PATCH 645/730] Fixed clippy violations. --- rust/automerge-c/src/doc/list/item.rs | 2 +- rust/automerge-c/src/doc/map/item.rs | 2 +- rust/automerge-c/src/obj/item.rs | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/rust/automerge-c/src/doc/list/item.rs b/rust/automerge-c/src/doc/list/item.rs index 0d7b2d98..7a3869f3 100644 --- a/rust/automerge-c/src/doc/list/item.rs +++ b/rust/automerge-c/src/doc/list/item.rs @@ -20,7 +20,7 @@ impl AMlistItem { Self { index, obj_id: AMobjId::new(obj_id), - value: value, + value, } } } diff --git a/rust/automerge-c/src/doc/map/item.rs b/rust/automerge-c/src/doc/map/item.rs index b206f23e..7914fdc4 100644 --- a/rust/automerge-c/src/doc/map/item.rs +++ b/rust/automerge-c/src/doc/map/item.rs @@ -21,7 +21,7 @@ impl AMmapItem { Self { key: key.to_string(), obj_id: AMobjId::new(obj_id), - value: value, + value, } } } diff --git a/rust/automerge-c/src/obj/item.rs b/rust/automerge-c/src/obj/item.rs index acac0893..a2e99d06 100644 --- a/rust/automerge-c/src/obj/item.rs +++ b/rust/automerge-c/src/obj/item.rs @@ -17,7 +17,7 @@ impl AMobjItem { pub fn new(value: am::Value<'static>, obj_id: am::ObjId) -> Self { Self { obj_id: AMobjId::new(obj_id), - value: value, + value, } } } From edbb33522dadb96993d85315e2f6ad0f1d4ff1d5 Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Fri, 18 Nov 2022 23:53:44 -0800 Subject: [PATCH 646/730] Replaced the C string (`*const libc::c_char`) value of the `AMresult::Error` variant with a UTF-8 string view (`AMbyteSpan`). --- rust/automerge-c/src/result.rs | 14 +++--- rust/automerge-c/test/actor_id_tests.c | 9 ++-- rust/automerge-c/test/cmocka_utils.h | 22 +++++++++ rust/automerge-c/test/list_tests.c | 3 +- rust/automerge-c/test/map_tests.c | 3 +- .../test/ported_wasm/basic_tests.c | 24 +++++----- .../automerge-c/test/ported_wasm/sync_tests.c | 48 +++++++++---------- rust/automerge-c/test/stack_utils.c | 3 +- 8 files changed, 75 insertions(+), 51 deletions(-) create mode 100644 rust/automerge-c/test/cmocka_utils.h diff --git a/rust/automerge-c/src/result.rs b/rust/automerge-c/src/result.rs index 29fb2f36..d7d6bce8 100644 --- a/rust/automerge-c/src/result.rs +++ b/rust/automerge-c/src/result.rs @@ -3,9 +3,7 @@ use automerge as am; use smol_str::SmolStr; use std::any::type_name; use std::collections::BTreeMap; -use std::ffi::CString; use std::ops::{Range, RangeFrom, RangeFull, RangeTo}; -use std::os::raw::c_char; use crate::actor_id::AMactorId; use crate::byte_span::AMbyteSpan; @@ -343,7 +341,7 @@ pub enum AMresult { ChangeHashes(Vec), Changes(Vec, Option>), Doc(Box), - Error(CString), + Error(String), ListItems(Vec), MapItems(Vec), ObjId(AMobjId), @@ -358,7 +356,7 @@ pub enum AMresult { impl AMresult { pub(crate) fn err(s: &str) -> Self { - AMresult::Error(CString::new(s).unwrap()) + AMresult::Error(s.to_string()) } } @@ -739,17 +737,17 @@ pub enum AMstatus { /// \brief Gets a result's error message string. /// /// \param[in] result A pointer to an `AMresult` struct. -/// \return A UTF-8 string or `NULL`. +/// \return A UTF-8 string view as an `AMbyteSpan` struct. /// \pre \p result `!= NULL`. /// \internal /// /// # Safety /// result must be a valid pointer to an AMresult #[no_mangle] -pub unsafe extern "C" fn AMerrorMessage(result: *const AMresult) -> *const c_char { +pub unsafe extern "C" fn AMerrorMessage(result: *const AMresult) -> AMbyteSpan { match result.as_ref() { - Some(AMresult::Error(s)) => s.as_ptr(), - _ => std::ptr::null::(), + Some(AMresult::Error(s)) => s.as_bytes().into(), + _ => Default::default(), } } diff --git a/rust/automerge-c/test/actor_id_tests.c b/rust/automerge-c/test/actor_id_tests.c index 51245144..c98f2554 100644 --- a/rust/automerge-c/test/actor_id_tests.c +++ b/rust/automerge-c/test/actor_id_tests.c @@ -11,6 +11,7 @@ /* local */ #include +#include "cmocka_utils.h" #include "str_utils.h" typedef struct { @@ -45,7 +46,7 @@ static void test_AMactorIdInit() { for (size_t i = 0; i != 11; ++i) { result = AMactorIdInit(); if (AMresultStatus(result) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(result)); + fail_msg_view("%s", AMerrorMessage(result)); } assert_int_equal(AMresultSize(result), 1); AMvalue const value = AMresultValue(result); @@ -70,7 +71,7 @@ static void test_AMactorIdInitBytes(void **state) { GroupState* group_state = *state; AMresult* const result = AMactorIdInitBytes(group_state->src, group_state->count); if (AMresultStatus(result) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(result)); + fail_msg_view("%s", AMerrorMessage(result)); } assert_int_equal(AMresultSize(result), 1); AMvalue const value = AMresultValue(result); @@ -85,7 +86,7 @@ static void test_AMactorIdInitStr(void **state) { GroupState* group_state = *state; AMresult* const result = AMactorIdInitStr(group_state->str); if (AMresultStatus(result) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(result)); + fail_msg_view("%s", AMerrorMessage(result)); } assert_int_equal(AMresultSize(result), 1); AMvalue const value = AMresultValue(result); @@ -103,7 +104,7 @@ static void test_AMactorIdInitStr(void **state) { int run_actor_id_tests(void) { const struct CMUnitTest tests[] = { -// cmocka_unit_test(test_AMactorIdInit), + cmocka_unit_test(test_AMactorIdInit), cmocka_unit_test(test_AMactorIdInitBytes), cmocka_unit_test(test_AMactorIdInitStr), }; diff --git a/rust/automerge-c/test/cmocka_utils.h b/rust/automerge-c/test/cmocka_utils.h new file mode 100644 index 00000000..1b488362 --- /dev/null +++ b/rust/automerge-c/test/cmocka_utils.h @@ -0,0 +1,22 @@ +#ifndef CMOCKA_UTILS_H +#define CMOCKA_UTILS_H + +#include + +/* third-party */ +#include + +/** + * \brief Forces the test to fail immediately and quit, printing the reason. + * + * \param[in] view A string view as an `AMbyteSpan` struct. + */ +#define fail_msg_view(msg, view) do { \ + char* const c_str = test_calloc(1, view.count + 1); \ + strncpy(c_str, view.src, view.count); \ + print_error(msg, c_str); \ + test_free(c_str); \ + fail(); \ +} while (0) + +#endif /* CMOCKA_UTILS_H */ diff --git a/rust/automerge-c/test/list_tests.c b/rust/automerge-c/test/list_tests.c index e695965d..1bf16ddb 100644 --- a/rust/automerge-c/test/list_tests.c +++ b/rust/automerge-c/test/list_tests.c @@ -11,6 +11,7 @@ /* local */ #include +#include "cmocka_utils.h" #include "group_state.h" #include "macro_utils.h" #include "stack_utils.h" @@ -83,7 +84,7 @@ static void test_AMlistPutNull_ ## mode(void **state) { \ !strcmp(#mode, "insert"))); \ AMresult* const result = AMlistGet(group_state->doc, AM_ROOT, 0, NULL); \ if (AMresultStatus(result) != AM_STATUS_OK) { \ - fail_msg("%s", AMerrorMessage(result)); \ + fail_msg_view("%s", AMerrorMessage(result)); \ } \ assert_int_equal(AMresultSize(result), 1); \ assert_int_equal(AMresultValue(result).tag, AM_VALUE_NULL); \ diff --git a/rust/automerge-c/test/map_tests.c b/rust/automerge-c/test/map_tests.c index 7fa3bb70..c894ebb5 100644 --- a/rust/automerge-c/test/map_tests.c +++ b/rust/automerge-c/test/map_tests.c @@ -11,6 +11,7 @@ /* local */ #include +#include "cmocka_utils.h" #include "group_state.h" #include "macro_utils.h" #include "stack_utils.h" @@ -75,7 +76,7 @@ static void test_AMmapPutNull(void **state) { AMfree(AMmapPutNull(group_state->doc, AM_ROOT, KEY)); AMresult* const result = AMmapGet(group_state->doc, AM_ROOT, KEY, NULL); if (AMresultStatus(result) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(result)); + fail_msg_view("%s", AMerrorMessage(result)); } assert_int_equal(AMresultSize(result), 1); assert_int_equal(AMresultValue(result).tag, AM_VALUE_NULL); diff --git a/rust/automerge-c/test/ported_wasm/basic_tests.c b/rust/automerge-c/test/ported_wasm/basic_tests.c index e233aa41..aafa32d8 100644 --- a/rust/automerge-c/test/ported_wasm/basic_tests.c +++ b/rust/automerge-c/test/ported_wasm/basic_tests.c @@ -407,7 +407,7 @@ static void test_lists_have_insert_set_splice_and_push_ops(void** state) { } /* doc.push(sublist, 3, "timestamp"); */ AMfree(AMlistPutTimestamp(doc, sublist, SIZE_MAX, true, 3)); - /* assert.deepEqual(doc.materialize(), { letters: ["b", "a", "c", new Date(3)] })*/ + /* assert.deepEqual(doc.materialize(), { letters: ["b", "a", "c", new Date(3)] } */ doc_items = AMpush(&stack, AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), AM_VALUE_MAP_ITEMS, @@ -440,7 +440,7 @@ static void test_lists_have_insert_set_splice_and_push_ops(void** state) { {.str_tag = AM_VALUE_STR, .str = {.src = "e", .count = 1}}, {.str_tag = AM_VALUE_STR, .str = {.src = "f", .count = 1}}}; AMfree(AMsplice(doc, sublist, 1, 1, DATA, sizeof(DATA)/sizeof(AMvalue))); - /* assert.deepEqual(doc.materialize(), { letters: ["b", "d", "e", "f", "c", new Date(3)] })*/ + /* assert.deepEqual(doc.materialize(), { letters: ["b", "d", "e", "f", "c", new Date(3)] } */ doc_items = AMpush(&stack, AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), AM_VALUE_MAP_ITEMS, @@ -476,7 +476,7 @@ static void test_lists_have_insert_set_splice_and_push_ops(void** state) { } /* doc.put(sublist, 0, "z"); */ AMfree(AMlistPutStr(doc, sublist, 0, false, AMstr("z"))); - /* assert.deepEqual(doc.materialize(), { letters: ["z", "d", "e", "f", "c", new Date(3)] })*/ + /* assert.deepEqual(doc.materialize(), { letters: ["z", "d", "e", "f", "c", new Date(3)] } */ doc_items = AMpush(&stack, AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), AM_VALUE_MAP_ITEMS, @@ -510,7 +510,7 @@ static void test_lists_have_insert_set_splice_and_push_ops(void** state) { 3); assert_null(AMlistItemsNext(&list_items, 1)); } - /* assert.deepEqual(doc.materialize(sublist), ["z", "d", "e", "f", "c", new Date(3)])*/ + /* assert.deepEqual(doc.materialize(sublist), ["z", "d", "e", "f", "c", new Date(3)] */ AMlistItems sublist_items = AMpush( &stack, AMlistRange(doc, sublist, 0, SIZE_MAX, NULL), @@ -536,7 +536,7 @@ static void test_lists_have_insert_set_splice_and_push_ops(void** state) { assert_null(AMlistItemsNext(&sublist_items, 1)); /* assert.deepEqual(doc.length(sublist), 6) */ assert_int_equal(AMobjSize(doc, sublist, NULL), 6); - /* assert.deepEqual(doc.materialize("/", heads), { letters: ["b", "a", "c"] })*/ + /* assert.deepEqual(doc.materialize("/", heads), { letters: ["b", "a", "c"] } */ doc_items = AMpush(&stack, AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), &heads), AM_VALUE_MAP_ITEMS, @@ -1278,7 +1278,7 @@ static void test_should_be_able_to_fetch_changes_by_hash(void** state) { AM_VALUE_CHANGE_HASHES, cmocka_cb).change_hashes; /* const change1 = doc1.getChangeByHash(head1[0]) - if (change1 === null) { throw new RangeError("change1 should not be null") }*/ + if (change1 === null) { throw new RangeError("change1 should not be null") */ AMbyteSpan const change_hash1 = AMchangeHashesNext(&head1, 1); AMchanges change1 = AMpush( &stack, @@ -1311,7 +1311,7 @@ static void test_recursive_sets_are_possible(void** state) { cmocka_cb).actor_id), AM_VALUE_DOC, cmocka_cb).doc; - /* const l1 = doc.putObject("_root", "list", [{ foo: "bar" }, [1, 2, 3]])*/ + /* const l1 = doc.putObject("_root", "list", [{ foo: "bar" }, [1, 2, 3]] */ AMobjId const* const l1 = AMpush( &stack, AMmapPutObject(doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), @@ -1348,7 +1348,7 @@ static void test_recursive_sets_are_possible(void** state) { AMfree(AMlistPutStr(doc, list, SIZE_MAX, true, AMstr("a"))); AMfree(AMlistPutStr(doc, list, SIZE_MAX, true, AMstr("b"))); } - /* const l3 = doc.putObject("_root", "info1", "hello world") // 'text' object*/ + /* const l3 = doc.putObject("_root", "info1", "hello world") // 'text' object */ AMobjId const* const l3 = AMpush( &stack, AMmapPutObject(doc, AM_ROOT, AMstr("info1"), AM_OBJ_TYPE_TEXT), @@ -1492,7 +1492,7 @@ static void test_recursive_sets_are_possible(void** state) { assert_int_equal(str.count, 1); assert_memory_equal(str.src, "b", str.count); } - /* assert.deepEqual(doc.materialize(l1), [{ zip: ["a", "b"] }, { foo: "bar" }, [1, 2, 3]])*/ + /* assert.deepEqual(doc.materialize(l1), [{ zip: ["a", "b"] }, { foo: "bar" }, [1, 2, 3]] */ AMlistItems list_items = AMpush( &stack, AMlistRange(doc, l1, 0, SIZE_MAX, NULL), @@ -1750,7 +1750,7 @@ static void test_should_allow_you_to_forkAt_a_heads(void** state) { cmocka_cb).change_hashes; /* A.put("/", "key5", "val5"); */ AMfree(AMmapPutStr(A, AM_ROOT, AMstr("key5"), AMstr("val5"))); - /* assert.deepEqual(A.forkAt(heads1).materialize("/"), A.materialize("/", heads1))*/ + /* assert.deepEqual(A.forkAt(heads1).materialize("/"), A.materialize("/", heads1) */ AMmapItems AforkAt1_items = AMpush( &stack, AMmapRange( @@ -1763,7 +1763,7 @@ static void test_should_allow_you_to_forkAt_a_heads(void** state) { AM_VALUE_MAP_ITEMS, cmocka_cb).map_items; assert_true(AMmapItemsEqual(&AforkAt1_items, &A1_items)); - /* assert.deepEqual(A.forkAt(heads2).materialize("/"), A.materialize("/", heads2))*/ + /* assert.deepEqual(A.forkAt(heads2).materialize("/"), A.materialize("/", heads2) */ AMmapItems AforkAt2_items = AMpush( &stack, AMmapRange( @@ -1844,7 +1844,7 @@ static void test_should_handle_merging_text_conflicts_then_saving_and_loading(vo AM_VALUE_DOC, cmocka_cb).doc; /* */ - /* assert.deepEqual(C.getWithType('_root', 'text'), ['text', '1@aabbcc'])*/ + /* assert.deepEqual(C.getWithType('_root', 'text'), ['text', '1@aabbcc'] */ AMobjId const* const C_text = AMpush(&stack, AMmapGet(C, AM_ROOT, AMstr("text"), NULL), AM_VALUE_OBJ_ID, diff --git a/rust/automerge-c/test/ported_wasm/sync_tests.c b/rust/automerge-c/test/ported_wasm/sync_tests.c index f2d76db4..a1ddbf3c 100644 --- a/rust/automerge-c/test/ported_wasm/sync_tests.c +++ b/rust/automerge-c/test/ported_wasm/sync_tests.c @@ -136,7 +136,7 @@ static void test_should_not_reply_if_we_have_no_data_as_well(void **state) { const s1 = initSyncState(), s2 = initSyncState() */ TestState* test_state = *state; /* const m1 = n1.generateSyncMessage(s1) - if (m1 === null) { throw new RangeError("message should not be null") }*/ + if (m1 === null) { throw new RangeError("message should not be null") */ AMsyncMessage const* const m1 = AMpush(&test_state->stack, AMgenerateSyncMessage( test_state->n1, @@ -191,7 +191,7 @@ static void test_repos_with_equal_heads_do_not_need_a_reply_message(void **state /* */ /* generate a naive sync message */ /* const m1 = n1.generateSyncMessage(s1) - if (m1 === null) { throw new RangeError("message should not be null") }*/ + if (m1 === null) { throw new RangeError("message should not be null") */ AMsyncMessage const* m1 = AMpush(&test_state->stack, AMgenerateSyncMessage(test_state->n1, test_state->s1), @@ -342,9 +342,9 @@ static void test_should_not_generate_messages_once_synced(void **state) { /* let message, patch for (let i = 0; i < 5; i++) { */ for (size_t i = 0; i != 5; ++i) { - // n1.put("_root", "x", i) */ + /* n1.put("_root", "x", i) */ AMfree(AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i)); - // n1.commit("", 0) */ + /* n1.commit("", 0) */ AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); /* { */ } @@ -359,7 +359,7 @@ static void test_should_not_generate_messages_once_synced(void **state) { /* */ /* n1 reports what it has */ /* message = n1.generateSyncMessage(s1) - if (message === null) { throw new RangeError("message should not be null") }*/ + if (message === null) { throw new RangeError("message should not be null") */ AMsyncMessage const* message = AMpush(&test_state->stack, AMgenerateSyncMessage(test_state->n1, test_state->s1), @@ -370,7 +370,7 @@ static void test_should_not_generate_messages_once_synced(void **state) { /* n2.receiveSyncMessage(s2, message) */ AMfree(AMreceiveSyncMessage(test_state->n2, test_state->s2, message)); /* message = n2.generateSyncMessage(s2) - if (message === null) { throw new RangeError("message should not be null") }*/ + if (message === null) { throw new RangeError("message should not be null") */ message = AMpush(&test_state->stack, AMgenerateSyncMessage(test_state->n2, test_state->s2), AM_VALUE_SYNC_MESSAGE, @@ -383,7 +383,7 @@ static void test_should_not_generate_messages_once_synced(void **state) { /* n1.receiveSyncMessage(s1, message) */ AMfree(AMreceiveSyncMessage(test_state->n1, test_state->s1, message)); /* message = n2.generateSyncMessage(s2) - if (message === null) { throw new RangeError("message should not be null") }*/ + if (message === null) { throw new RangeError("message should not be null") */ message = AMpush(&test_state->stack, AMgenerateSyncMessage(test_state->n1, test_state->s1), AM_VALUE_SYNC_MESSAGE, @@ -395,7 +395,7 @@ static void test_should_not_generate_messages_once_synced(void **state) { /* n2.receiveSyncMessage(s2, message) */ AMfree(AMreceiveSyncMessage(test_state->n2, test_state->s2, message)); /* message = n2.generateSyncMessage(s2) - if (message === null) { throw new RangeError("message should not be null") }*/ + if (message === null) { throw new RangeError("message should not be null") */ message = AMpush(&test_state->stack, AMgenerateSyncMessage(test_state->n2, test_state->s2), AM_VALUE_SYNC_MESSAGE, @@ -469,14 +469,14 @@ static void test_should_allow_simultaneous_messages_during_synchronization(void /* both sides report what they have but have no shared peer state */ /* let msg1to2, msg2to1 msg1to2 = n1.generateSyncMessage(s1) - if (msg1to2 === null) { throw new RangeError("message should not be null") }*/ + if (msg1to2 === null) { throw new RangeError("message should not be null") */ AMsyncMessage const* msg1to2 = AMpush(&test_state->stack, AMgenerateSyncMessage(test_state->n1, test_state->s1), AM_VALUE_SYNC_MESSAGE, cmocka_cb).sync_message; /* msg2to1 = n2.generateSyncMessage(s2) - if (msg2to1 === null) { throw new RangeError("message should not be null") }*/ + if (msg2to1 === null) { throw new RangeError("message should not be null") */ AMsyncMessage const* msg2to1 = AMpush(&test_state->stack, AMgenerateSyncMessage(test_state->n2, test_state->s2), @@ -485,7 +485,7 @@ static void test_should_allow_simultaneous_messages_during_synchronization(void /* assert.deepStrictEqual(decodeSyncMessage(msg1to2).changes.length, 0) */ AMchanges msg1to2_changes = AMsyncMessageChanges(msg1to2); assert_int_equal(AMchangesSize(&msg1to2_changes), 0); - /* assert.deepStrictEqual(decodeSyncMessage(msg1to2).have[0].lastSync.length, 0)*/ + /* assert.deepStrictEqual(decodeSyncMessage(msg1to2).have[0].lastSync.length, 0 */ AMsyncHaves msg1to2_haves = AMsyncMessageHaves(msg1to2); AMsyncHave const* msg1to2_have = AMsyncHavesNext(&msg1to2_haves, 1); AMchangeHashes msg1to2_last_sync = AMsyncHaveLastSync(msg1to2_have); @@ -493,13 +493,13 @@ static void test_should_allow_simultaneous_messages_during_synchronization(void /* assert.deepStrictEqual(decodeSyncMessage(msg2to1).changes.length, 0) */ AMchanges msg2to1_changes = AMsyncMessageChanges(msg2to1); assert_int_equal(AMchangesSize(&msg2to1_changes), 0); - /* assert.deepStrictEqual(decodeSyncMessage(msg2to1).have[0].lastSync.length, 0)*/ + /* assert.deepStrictEqual(decodeSyncMessage(msg2to1).have[0].lastSync.length, 0 */ AMsyncHaves msg2to1_haves = AMsyncMessageHaves(msg2to1); AMsyncHave const* msg2to1_have = AMsyncHavesNext(&msg2to1_haves, 1); AMchangeHashes msg2to1_last_sync = AMsyncHaveLastSync(msg2to1_have); assert_int_equal(AMchangeHashesSize(&msg2to1_last_sync), 0); /* */ - /* n1 and n2 receive that message and update sync state but make no patch*/ + /* n1 and n2 receive that message and update sync state but make no patc */ /* n1.receiveSyncMessage(s1, msg2to1) */ AMfree(AMreceiveSyncMessage(test_state->n1, test_state->s1, msg2to1)); /* n2.receiveSyncMessage(s2, msg1to2) */ @@ -509,7 +509,7 @@ static void test_should_allow_simultaneous_messages_during_synchronization(void * (standard warning that 1% of the time this will result in a "needs" * message) */ /* msg1to2 = n1.generateSyncMessage(s1) - if (msg1to2 === null) { throw new RangeError("message should not be null") }*/ + if (msg1to2 === null) { throw new RangeError("message should not be null") */ msg1to2 = AMpush(&test_state->stack, AMgenerateSyncMessage(test_state->n1, test_state->s1), AM_VALUE_SYNC_MESSAGE, @@ -518,7 +518,7 @@ static void test_should_allow_simultaneous_messages_during_synchronization(void msg1to2_changes = AMsyncMessageChanges(msg1to2); assert_int_equal(AMchangesSize(&msg1to2_changes), 5); /* msg2to1 = n2.generateSyncMessage(s2) - if (msg2to1 === null) { throw new RangeError("message should not be null") }*/ + if (msg2to1 === null) { throw new RangeError("message should not be null") */ msg2to1 = AMpush(&test_state->stack, AMgenerateSyncMessage(test_state->n2, test_state->s2), AM_VALUE_SYNC_MESSAGE, @@ -571,7 +571,7 @@ static void test_should_allow_simultaneous_messages_during_synchronization(void /* The response acknowledges the changes received and sends no further * changes */ /* msg1to2 = n1.generateSyncMessage(s1) - if (msg1to2 === null) { throw new RangeError("message should not be null") }*/ + if (msg1to2 === null) { throw new RangeError("message should not be null") */ msg1to2 = AMpush(&test_state->stack, AMgenerateSyncMessage(test_state->n1, test_state->s1), AM_VALUE_SYNC_MESSAGE, @@ -580,7 +580,7 @@ static void test_should_allow_simultaneous_messages_during_synchronization(void msg1to2_changes = AMsyncMessageChanges(msg1to2); assert_int_equal(AMchangesSize(&msg1to2_changes), 0); /* msg2to1 = n2.generateSyncMessage(s2) - if (msg2to1 === null) { throw new RangeError("message should not be null") }*/ + if (msg2to1 === null) { throw new RangeError("message should not be null") */ msg2to1 = AMpush(&test_state->stack, AMgenerateSyncMessage(test_state->n2, test_state->s2), AM_VALUE_SYNC_MESSAGE, @@ -632,12 +632,12 @@ static void test_should_allow_simultaneous_messages_during_synchronization(void /* n1.put("_root", "x", 5) */ AMfree(AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), 5)); /* msg1to2 = n1.generateSyncMessage(s1) - if (msg1to2 === null) { throw new RangeError("message should not be null") }*/ + if (msg1to2 === null) { throw new RangeError("message should not be null") */ msg1to2 = AMpush(&test_state->stack, AMgenerateSyncMessage(test_state->n1, test_state->s1), AM_VALUE_SYNC_MESSAGE, cmocka_cb).sync_message; - /* assert.deepStrictEqual(decodeSyncMessage(msg1to2).have[0].lastSync, [head1, head2].sort()*/ + /* assert.deepStrictEqual(decodeSyncMessage(msg1to2).have[0].lastSync, [head1, head2].sort( */ msg1to2_haves = AMsyncMessageHaves(msg1to2); msg1to2_have = AMsyncHavesNext(&msg1to2_haves, 1); msg1to2_last_sync = AMsyncHaveLastSync(msg1to2_have); @@ -677,7 +677,7 @@ static void test_should_assume_sent_changes_were_received_until_we_hear_otherwis /* n1.commit("", 0) */ AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); /* message = n1.generateSyncMessage(s1) - if (message === null) { throw new RangeError("message should not be null") }*/ + if (message === null) { throw new RangeError("message should not be null") */ AMsyncMessage const* message = AMpush(&test_state->stack, AMgenerateSyncMessage(test_state->n1, test_state->s1), @@ -692,7 +692,7 @@ static void test_should_assume_sent_changes_were_received_until_we_hear_otherwis /* n1.commit("", 0) */ AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); /* message = n1.generateSyncMessage(s1) - if (message === null) { throw new RangeError("message should not be null") }*/ + if (message === null) { throw new RangeError("message should not be null") */ message = AMpush(&test_state->stack, AMgenerateSyncMessage(test_state->n1, test_state->s1), AM_VALUE_SYNC_MESSAGE, @@ -707,7 +707,7 @@ static void test_should_assume_sent_changes_were_received_until_we_hear_otherwis AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); /* */ /* message = n1.generateSyncMessage(s1) - if (message === null) { throw new RangeError("message should not be null") }*/ + if (message === null) { throw new RangeError("message should not be null") */ message = AMpush(&test_state->stack, AMgenerateSyncMessage(test_state->n1, test_state->s1), AM_VALUE_SYNC_MESSAGE, @@ -1143,7 +1143,7 @@ static void test_should_resync_after_one_node_experiences_data_loss_without_disc * \brief should handle changes concurrent to the last sync heads */ static void test_should_handle_changes_concurrrent_to_the_last_sync_heads(void **state) { - /* const n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('fedcba98')*/ + /* const n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('fedcba98' */ TestState* test_state = *state; AMdoc* n3 = AMpush(&test_state->stack, AMcreate(AMpush(&test_state->stack, @@ -1152,7 +1152,7 @@ static void test_should_handle_changes_concurrrent_to_the_last_sync_heads(void * cmocka_cb).actor_id), AM_VALUE_DOC, cmocka_cb).doc; - /* const s12 = initSyncState(), s21 = initSyncState(), s23 = initSyncState(), s32 = initSyncState()*/ + /* const s12 = initSyncState(), s21 = initSyncState(), s23 = initSyncState(), s32 = initSyncState( */ AMsyncState* s12 = test_state->s1; AMsyncState* s21 = test_state->s2; AMsyncState* s23 = AMpush(&test_state->stack, diff --git a/rust/automerge-c/test/stack_utils.c b/rust/automerge-c/test/stack_utils.c index 8eb8b72d..f65ea2e5 100644 --- a/rust/automerge-c/test/stack_utils.c +++ b/rust/automerge-c/test/stack_utils.c @@ -6,6 +6,7 @@ #include /* local */ +#include "cmocka_utils.h" #include "stack_utils.h" void cmocka_cb(AMresultStack** stack, uint8_t discriminant) { @@ -13,7 +14,7 @@ void cmocka_cb(AMresultStack** stack, uint8_t discriminant) { assert_non_null(*stack); assert_non_null((*stack)->result); if (AMresultStatus((*stack)->result) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage((*stack)->result)); + fail_msg_view("%s", AMerrorMessage((*stack)->result)); } assert_int_equal(AMresultValue((*stack)->result).tag, discriminant); } From fb0c69cc524a7b563bc0aa150a2e11d97a640bbe Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Fri, 18 Nov 2022 23:56:58 -0800 Subject: [PATCH 647/730] Updated the quickstart example to work with `AMbyteSpan` values instead of `*const libc::c_char` values. --- rust/automerge-c/examples/quickstart.c | 30 ++++++++++++++++---------- 1 file changed, 19 insertions(+), 11 deletions(-) diff --git a/rust/automerge-c/examples/quickstart.c b/rust/automerge-c/examples/quickstart.c index 0c94a1a2..bc418511 100644 --- a/rust/automerge-c/examples/quickstart.c +++ b/rust/automerge-c/examples/quickstart.c @@ -13,22 +13,22 @@ int main(int argc, char** argv) { AMresultStack* stack = NULL; AMdoc* const doc1 = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, abort_cb).doc; AMobjId const* const cards = AMpush(&stack, - AMmapPutObject(doc1, AM_ROOT, "cards", AM_OBJ_TYPE_LIST), + AMmapPutObject(doc1, AM_ROOT, AMstr("cards"), AM_OBJ_TYPE_LIST), AM_VALUE_OBJ_ID, abort_cb).obj_id; AMobjId const* const card1 = AMpush(&stack, AMlistPutObject(doc1, cards, SIZE_MAX, true, AM_OBJ_TYPE_MAP), AM_VALUE_OBJ_ID, abort_cb).obj_id; - AMfree(AMmapPutStr(doc1, card1, "title", "Rewrite everything in Clojure")); - AMfree(AMmapPutBool(doc1, card1, "done", false)); + AMfree(AMmapPutStr(doc1, card1, AMstr("title"), AMstr("Rewrite everything in Clojure"))); + AMfree(AMmapPutBool(doc1, card1, AMstr("done"), false)); AMobjId const* const card2 = AMpush(&stack, AMlistPutObject(doc1, cards, SIZE_MAX, true, AM_OBJ_TYPE_MAP), AM_VALUE_OBJ_ID, abort_cb).obj_id; - AMfree(AMmapPutStr(doc1, card2, "title", "Rewrite everything in Haskell")); - AMfree(AMmapPutBool(doc1, card2, "done", false)); - AMfree(AMcommit(doc1, "Add card", NULL)); + AMfree(AMmapPutStr(doc1, card2, AMstr("title"), AMstr("Rewrite everything in Haskell"))); + AMfree(AMmapPutBool(doc1, card2, AMstr("done"), false)); + AMfree(AMcommit(doc1, AMstr("Add card"), NULL)); AMdoc* doc2 = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, abort_cb).doc; AMfree(AMmerge(doc2, doc1)); @@ -36,11 +36,11 @@ int main(int argc, char** argv) { AMbyteSpan const binary = AMpush(&stack, AMsave(doc1), AM_VALUE_BYTES, abort_cb).bytes; doc2 = AMpush(&stack, AMload(binary.src, binary.count), AM_VALUE_DOC, abort_cb).doc; - AMfree(AMmapPutBool(doc1, card1, "done", true)); - AMfree(AMcommit(doc1, "Mark card as done", NULL)); + AMfree(AMmapPutBool(doc1, card1, AMstr("done"), true)); + AMfree(AMcommit(doc1, AMstr("Mark card as done"), NULL)); AMfree(AMlistDelete(doc2, cards, 0)); - AMfree(AMcommit(doc2, "Delete card", NULL)); + AMfree(AMcommit(doc2, AMstr("Delete card"), NULL)); AMfree(AMmerge(doc1, doc2)); @@ -52,7 +52,11 @@ int main(int argc, char** argv) { AMchangeHashesInit(&change_hash, 1), AM_VALUE_CHANGE_HASHES, abort_cb).change_hashes; - printf("%s %ld\n", AMchangeMessage(change), AMobjSize(doc1, cards, &heads)); + AMbyteSpan const msg = AMchangeMessage(change); + char* const c_msg = calloc(1, msg.count + 1); + strncpy(c_msg, msg.src, msg.count); + printf("%s %ld\n", c_msg, AMobjSize(doc1, cards, &heads)); + free(c_msg); } AMfreeStack(&stack); } @@ -95,7 +99,11 @@ static void abort_cb(AMresultStack** stack, uint8_t discriminant) { default: sprintf(buffer, "Unknown `AMstatus` tag %d", status); } if (buffer[0]) { - fprintf(stderr, "%s; %s.", buffer, AMerrorMessage((*stack)->result)); + AMbyteSpan const msg = AMerrorMessage((*stack)->result); + char* const c_msg = calloc(1, msg.count + 1); + strncpy(c_msg, msg.src, msg.count); + fprintf(stderr, "%s; %s.", buffer, c_msg); + free(c_msg); AMfreeStack(stack); exit(EXIT_FAILURE); return; From f8428896bdc757a6bccd287037bae47df82de72f Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Sat, 19 Nov 2022 14:13:51 -0800 Subject: [PATCH 648/730] Added a test case for a map key containing NUL ('\0') based on #455. --- rust/automerge-c/test/list_tests.c | 17 +++++---- rust/automerge-c/test/map_tests.c | 60 ++++++++++++++++++++++++++---- 2 files changed, 61 insertions(+), 16 deletions(-) diff --git a/rust/automerge-c/test/list_tests.c b/rust/automerge-c/test/list_tests.c index 1bf16ddb..25a24329 100644 --- a/rust/automerge-c/test/list_tests.c +++ b/rust/automerge-c/test/list_tests.c @@ -346,16 +346,16 @@ static void test_get_list_values(void** state) { } /** \brief A JavaScript application can introduce NUL (`\0`) characters into a - * string which truncates them for a C application. + * list object's string value which will truncate it in a C application. */ -static void test_get_NUL_string(void** state) { +static void test_get_NUL_string_value(void** state) { /* - import * as Automerge from "@automerge/automerge" - let doc = Automerge.init() + import * as Automerge from "@automerge/automerge"; + let doc = Automerge.init(); doc = Automerge.change(doc, doc => { - doc[0] = 'o\0ps' - }) - const bytes = Automerge.save(doc) + doc[0] = 'o\0ps'; + }); + const bytes = Automerge.save(doc); console.log("static uint8_t const SAVED_DOC[] = {" + Array.apply([], bytes).join(", ") + "};"); */ static uint8_t const OOPS_VALUE[] = {'o', '\0', 'p', 's'}; @@ -381,6 +381,7 @@ static void test_get_NUL_string(void** state) { AMlistGet(doc, AM_ROOT, 0, NULL), AM_VALUE_STR, cmocka_cb).str; + assert_int_not_equal(str.count, strlen(OOPS_VALUE)); assert_int_equal(str.count, OOPS_SIZE); assert_memory_equal(str.src, OOPS_VALUE, str.count); } @@ -441,7 +442,7 @@ int run_list_tests(void) { cmocka_unit_test(test_AMlistPut(Uint, insert)), cmocka_unit_test(test_AMlistPut(Uint, update)), cmocka_unit_test_setup_teardown(test_get_list_values, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_get_NUL_string, setup_stack, teardown_stack), + cmocka_unit_test_setup_teardown(test_get_NUL_string_value, setup_stack, teardown_stack), cmocka_unit_test_setup_teardown(test_insert_at_index, setup_stack, teardown_stack), }; diff --git a/rust/automerge-c/test/map_tests.c b/rust/automerge-c/test/map_tests.c index c894ebb5..51a536ce 100644 --- a/rust/automerge-c/test/map_tests.c +++ b/rust/automerge-c/test/map_tests.c @@ -149,16 +149,58 @@ static_void_test_AMmapPut(Timestamp, timestamp, INT64_MAX) static_void_test_AMmapPut(Uint, uint, UINT64_MAX) /** \brief A JavaScript application can introduce NUL (`\0`) characters into a - * string which truncates them for a C application. + * map object's key which will truncate it in a C application. */ -static void test_get_NUL_string(void** state) { +static void test_get_NUL_key(void** state) { /* - import * as Automerge from "@automerge/automerge" - let doc = Automerge.init() + import * as Automerge from "@automerge/automerge"; + let doc = Automerge.init(); doc = Automerge.change(doc, doc => { - doc.oops = 'o\0ps' - }) - const bytes = Automerge.save(doc) + doc['o\0ps'] = 'oops'; + }); + const bytes = Automerge.save(doc); + console.log("static uint8_t const SAVED_DOC[] = {" + Array.apply([], bytes).join(", ") + "};"); + */ + static uint8_t const OOPS_SRC[] = {'o', '\0', 'p', 's'}; + static AMbyteSpan const OOPS_KEY = {.src = OOPS_SRC, .count = sizeof(OOPS_SRC) / sizeof(uint8_t)}; + + static uint8_t const SAVED_DOC[] = { + 133, 111, 74, 131, 233, 150, 60, 244, 0, 116, 1, 16, 223, 253, 146, + 193, 58, 122, 66, 134, 151, 225, 210, 51, 58, 86, 247, 8, 1, 49, 118, + 234, 228, 42, 116, 171, 13, 164, 99, 244, 27, 19, 150, 44, 201, 136, + 222, 219, 90, 246, 226, 123, 77, 120, 157, 155, 55, 182, 2, 178, 64, 6, + 1, 2, 3, 2, 19, 2, 35, 2, 64, 2, 86, 2, 8, 21, 6, 33, 2, 35, 2, 52, 1, + 66, 2, 86, 2, 87, 4, 128, 1, 2, 127, 0, 127, 1, 127, 1, 127, 0, 127, 0, + 127, 7, 127, 4, 111, 0, 112, 115, 127, 0, 127, 1, 1, 127, 1, 127, 70, + 111, 111, 112, 115, 127, 0, 0 + }; + static size_t const SAVED_DOC_SIZE = sizeof(SAVED_DOC) / sizeof(uint8_t); + + AMresultStack* stack = *state; + AMdoc* const doc = AMpush(&stack, + AMload(SAVED_DOC, SAVED_DOC_SIZE), + AM_VALUE_DOC, + cmocka_cb).doc; + AMbyteSpan const str = AMpush(&stack, + AMmapGet(doc, AM_ROOT, OOPS_KEY, NULL), + AM_VALUE_STR, + cmocka_cb).str; + assert_int_not_equal(OOPS_KEY.count, strlen(OOPS_KEY.src)); + assert_int_equal(str.count, strlen("oops")); + assert_memory_equal(str.src, "oops", str.count); +} + +/** \brief A JavaScript application can introduce NUL (`\0`) characters into a + * map object's string value which will truncate it in a C application. + */ +static void test_get_NUL_string_value(void** state) { + /* + import * as Automerge from "@automerge/automerge"; + let doc = Automerge.init(); + doc = Automerge.change(doc, doc => { + doc.oops = 'o\0ps'; + }); + const bytes = Automerge.save(doc); console.log("static uint8_t const SAVED_DOC[] = {" + Array.apply([], bytes).join(", ") + "};"); */ static uint8_t const OOPS_VALUE[] = {'o', '\0', 'p', 's'}; @@ -185,6 +227,7 @@ static void test_get_NUL_string(void** state) { AMmapGet(doc, AM_ROOT, AMstr("oops"), NULL), AM_VALUE_STR, cmocka_cb).str; + assert_int_not_equal(str.count, strlen(OOPS_VALUE)); assert_int_equal(str.count, OOPS_SIZE); assert_memory_equal(str.src, OOPS_VALUE, str.count); } @@ -1380,7 +1423,8 @@ int run_map_tests(void) { cmocka_unit_test(test_AMmapPutStr), cmocka_unit_test(test_AMmapPut(Timestamp)), cmocka_unit_test(test_AMmapPut(Uint)), - cmocka_unit_test_setup_teardown(test_get_NUL_string, setup_stack, teardown_stack), + cmocka_unit_test_setup_teardown(test_get_NUL_key, setup_stack, teardown_stack), + cmocka_unit_test_setup_teardown(test_get_NUL_string_value, setup_stack, teardown_stack), cmocka_unit_test_setup_teardown(test_range_iter_map, setup_stack, teardown_stack), cmocka_unit_test_setup_teardown(test_map_range_back_and_forth_single, setup_stack, teardown_stack), cmocka_unit_test_setup_teardown(test_map_range_back_and_forth_double, setup_stack, teardown_stack), From d3885a3443de02eceff5e71439e53ca2a00e175c Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Mon, 28 Nov 2022 00:08:33 -0800 Subject: [PATCH 649/730] Hard-coded automerge-c's initial independent version number to "0.0.1" for @alexjg. --- rust/automerge-c/CMakeLists.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rust/automerge-c/CMakeLists.txt b/rust/automerge-c/CMakeLists.txt index e5a7b1ca..1b68669a 100644 --- a/rust/automerge-c/CMakeLists.txt +++ b/rust/automerge-c/CMakeLists.txt @@ -51,7 +51,7 @@ foreach(TOML_LINE IN ITEMS ${TOML_LINES}) endif() endforeach() -project(${CARGO_PKG_NAME} VERSION ${CARGO_PKG_VERSION} LANGUAGES C DESCRIPTION "C bindings for the Automerge Rust backend.") +project(${CARGO_PKG_NAME} VERSION 0.0.1 LANGUAGES C DESCRIPTION "C bindings for the Automerge Rust backend.") include(CTest) From aaddb3c9ea84960924f6c68ca54f0af33a0b31dd Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Mon, 28 Nov 2022 15:43:27 -0600 Subject: [PATCH 650/730] fix error message --- rust/automerge/src/error.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rust/automerge/src/error.rs b/rust/automerge/src/error.rs index 010f33c6..4e25cfd1 100644 --- a/rust/automerge/src/error.rs +++ b/rust/automerge/src/error.rs @@ -22,13 +22,13 @@ pub enum AutomergeError { InvalidCharacter(usize), #[error("invalid hash {0}")] InvalidHash(ChangeHash), - #[error("invalid seq {0}")] + #[error("index {0} is out of bounds")] InvalidIndex(usize), #[error("invalid obj id `{0}`")] InvalidObjId(String), #[error("invalid obj id format `{0}`")] InvalidObjIdFormat(String), - #[error("invalid seq {0}")] + #[error("seq {0} is out of bounds")] InvalidSeq(u64), #[error("invalid type of value, expected `{expected}` but received `{unexpected}`")] InvalidValueType { From e0b2bc995ae426c8f1b5c2433c14252815cdc6f5 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Wed, 30 Nov 2022 12:57:59 +0000 Subject: [PATCH 651/730] Update nix flake and add formatter and dead code check (#466) * Add formatter for flake * Update flake inputs * Remove unused vars in flake * Add deadnix check and fixup devshells naming --- flake.lock | 30 +++++++-------- flake.nix | 107 +++++++++++++++++++++++++++-------------------------- 2 files changed, 70 insertions(+), 67 deletions(-) diff --git a/flake.lock b/flake.lock index b2070c2d..a052776b 100644 --- a/flake.lock +++ b/flake.lock @@ -2,11 +2,11 @@ "nodes": { "flake-utils": { "locked": { - "lastModified": 1642700792, - "narHash": "sha256-XqHrk7hFb+zBvRg6Ghl+AZDq03ov6OshJLiSWOoX5es=", + "lastModified": 1667395993, + "narHash": "sha256-nuEHfE/LcWyuSWnS8t12N1wc105Qtau+/OdUAjtQ0rA=", "owner": "numtide", "repo": "flake-utils", - "rev": "846b2ae0fc4cc943637d3d1def4454213e203cba", + "rev": "5aed5285a952e0b949eb3ba02c12fa4fcfef535f", "type": "github" }, "original": { @@ -17,11 +17,11 @@ }, "flake-utils_2": { "locked": { - "lastModified": 1637014545, - "narHash": "sha256-26IZAc5yzlD9FlDT54io1oqG/bBoyka+FJk5guaX4x4=", + "lastModified": 1659877975, + "narHash": "sha256-zllb8aq3YO3h8B/U0/J1WBgAL8EX5yWf5pMj3G0NAmc=", "owner": "numtide", "repo": "flake-utils", - "rev": "bba5dcc8e0b20ab664967ad83d24d64cb64ec4f4", + "rev": "c0e246b9b83f637f4681389ecabcb2681b4f3af0", "type": "github" }, "original": { @@ -32,11 +32,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1643805626, - "narHash": "sha256-AXLDVMG+UaAGsGSpOtQHPIKB+IZ0KSd9WS77aanGzgc=", + "lastModified": 1669542132, + "narHash": "sha256-DRlg++NJAwPh8io3ExBJdNW7Djs3plVI5jgYQ+iXAZQ=", "owner": "nixos", "repo": "nixpkgs", - "rev": "554d2d8aa25b6e583575459c297ec23750adb6cb", + "rev": "a115bb9bd56831941be3776c8a94005867f316a7", "type": "github" }, "original": { @@ -48,11 +48,11 @@ }, "nixpkgs_2": { "locked": { - "lastModified": 1637453606, - "narHash": "sha256-Gy6cwUswft9xqsjWxFYEnx/63/qzaFUwatcbV5GF/GQ=", + "lastModified": 1665296151, + "narHash": "sha256-uOB0oxqxN9K7XGF1hcnY+PQnlQJ+3bP2vCn/+Ru/bbc=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "8afc4e543663ca0a6a4f496262cd05233737e732", + "rev": "14ccaaedd95a488dd7ae142757884d8e125b3363", "type": "github" }, "original": { @@ -75,11 +75,11 @@ "nixpkgs": "nixpkgs_2" }, "locked": { - "lastModified": 1643941258, - "narHash": "sha256-uHyEuICSu8qQp6adPTqV33ajiwoF0sCh+Iazaz5r7fo=", + "lastModified": 1669775522, + "narHash": "sha256-6xxGArBqssX38DdHpDoPcPvB/e79uXyQBwpBcaO/BwY=", "owner": "oxalica", "repo": "rust-overlay", - "rev": "674156c4c2f46dd6a6846466cb8f9fee84c211ca", + "rev": "3158e47f6b85a288d12948aeb9a048e0ed4434d6", "type": "github" }, "original": { diff --git a/flake.nix b/flake.nix index 938225b7..4f9ba1fe 100644 --- a/flake.nix +++ b/flake.nix @@ -3,63 +3,66 @@ inputs = { nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable"; - flake-utils = { - url = "github:numtide/flake-utils"; - inputs.nixpkgs.follows = "nixpkgs"; - }; + flake-utils.url = "github:numtide/flake-utils"; rust-overlay.url = "github:oxalica/rust-overlay"; }; - outputs = { self, nixpkgs, flake-utils, rust-overlay }: + outputs = { + self, + nixpkgs, + flake-utils, + rust-overlay, + }: flake-utils.lib.eachDefaultSystem - (system: - let - pkgs = import nixpkgs { - overlays = [ rust-overlay.overlay ]; - inherit system; - }; - lib = pkgs.lib; - rust = pkgs.rust-bin.stable.latest.default; - cargoNix = pkgs.callPackage ./Cargo.nix { - inherit pkgs; - release = true; - }; - debugCargoNix = pkgs.callPackage ./Cargo.nix { - inherit pkgs; - release = false; - }; - in - { - devShell = pkgs.mkShell { - buildInputs = with pkgs; - [ - (rust.override { - extensions = [ "rust-src" ]; - targets = [ "wasm32-unknown-unknown" ]; - }) - cargo-edit - cargo-watch - cargo-criterion - cargo-fuzz - cargo-flamegraph - cargo-deny - crate2nix - wasm-pack - pkgconfig - openssl - gnuplot + (system: let + pkgs = import nixpkgs { + overlays = [rust-overlay.overlays.default]; + inherit system; + }; + rust = pkgs.rust-bin.stable.latest.default; + in { + formatter = pkgs.alejandra; - nodejs - yarn + packages = { + deadnix = pkgs.runCommand "deadnix" {} '' + ${pkgs.deadnix}/bin/deadnix --fail ${./.} + mkdir $out + ''; + }; - # c deps - cmake - cmocka - doxygen + checks = { + inherit (self.packages.${system}) deadnix; + }; - rnix-lsp - nixpkgs-fmt - ]; - }; - }); + devShells.default = pkgs.mkShell { + buildInputs = with pkgs; [ + (rust.override { + extensions = ["rust-src"]; + targets = ["wasm32-unknown-unknown"]; + }) + cargo-edit + cargo-watch + cargo-criterion + cargo-fuzz + cargo-flamegraph + cargo-deny + crate2nix + wasm-pack + pkgconfig + openssl + gnuplot + + nodejs + yarn + + # c deps + cmake + cmocka + doxygen + + rnix-lsp + nixpkgs-fmt + ]; + }; + }); } From 149f870102e6386163a1ebb8f549263b7cbd03d1 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Wed, 30 Nov 2022 16:38:09 +0000 Subject: [PATCH 652/730] rust: Remove `Default` constraint from `OpObserver` --- rust/automerge/src/autocommit.rs | 2 +- rust/automerge/src/op_observer.rs | 12 ++++++++---- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/rust/automerge/src/autocommit.rs b/rust/automerge/src/autocommit.rs index f49871aa..c70a70be 100644 --- a/rust/automerge/src/autocommit.rs +++ b/rust/automerge/src/autocommit.rs @@ -31,7 +31,7 @@ impl AutoCommitWithObs { } } -impl Default for AutoCommitWithObs> { +impl Default for AutoCommitWithObs> { fn default() -> Self { let op_observer = O::default(); AutoCommitWithObs { diff --git a/rust/automerge/src/op_observer.rs b/rust/automerge/src/op_observer.rs index db3fdf92..82e89277 100644 --- a/rust/automerge/src/op_observer.rs +++ b/rust/automerge/src/op_observer.rs @@ -4,7 +4,7 @@ use crate::Prop; use crate::Value; /// An observer of operations applied to the document. -pub trait OpObserver: Default + Clone { +pub trait OpObserver { /// A new value has been inserted into the given object. /// /// - `parents`: A parents iterator that can be used to collect path information @@ -64,9 +64,7 @@ pub trait OpObserver: Default + Clone { /// Called by AutoCommit when creating a new transaction. Observer branch /// will be merged on `commit()` or thrown away on `rollback()` /// - fn branch(&self) -> Self { - Self::default() - } + fn branch(&self) -> Self; /// Merge observed information from a transaction. /// @@ -108,6 +106,8 @@ impl OpObserver for () { fn delete(&mut self, _parents: Parents<'_>, _objid: ExId, _prop: Prop) {} fn merge(&mut self, _other: &Self) {} + + fn branch(&self) -> Self {} } /// Capture operations into a [`Vec`] and store them as patches. @@ -183,6 +183,10 @@ impl OpObserver for VecOpObserver { fn merge(&mut self, other: &Self) { self.patches.extend_from_slice(other.patches.as_slice()) } + + fn branch(&self) -> Self { + Self::default() + } } /// A notification to the application that something has changed in a document. From ea5688e418e3c359abbf9712aa0219c582c48271 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Wed, 30 Nov 2022 16:38:35 +0000 Subject: [PATCH 653/730] rust: Make fields of `Transaction` and `TransactionInner` private It's tricky to modify these structs with the fields public as every change requires scanning the codebase for references to make sure you're not breaking any invariants. Make the fields private to ease development. --- rust/automerge/src/autocommit.rs | 4 +- rust/automerge/src/automerge.rs | 30 +++++--------- rust/automerge/src/transaction.rs | 2 +- rust/automerge/src/transaction/inner.rs | 41 +++++++++++++++++-- .../src/transaction/manual_transaction.rs | 18 ++++++-- 5 files changed, 66 insertions(+), 29 deletions(-) diff --git a/rust/automerge/src/autocommit.rs b/rust/automerge/src/autocommit.rs index c70a70be..f5621d32 100644 --- a/rust/automerge/src/autocommit.rs +++ b/rust/automerge/src/autocommit.rs @@ -127,7 +127,9 @@ impl AutoCommitWithObs { fn ensure_transaction_open(&mut self) { if self.transaction.is_none() { - self.transaction = Some((self.observation.branch(), self.doc.transaction_inner())); + let args = self.doc.transaction_args(); + let inner = TransactionInner::new(args); + self.transaction = Some((self.observation.branch(), inner)) } } diff --git a/rust/automerge/src/automerge.rs b/rust/automerge/src/automerge.rs index 81b0c173..1953f47c 100644 --- a/rust/automerge/src/automerge.rs +++ b/rust/automerge/src/automerge.rs @@ -14,7 +14,7 @@ use crate::op_set::OpSet; use crate::parents::Parents; use crate::storage::{self, load, CompressConfig}; use crate::transaction::{ - self, CommitOptions, Failure, Observed, Success, Transaction, TransactionInner, UnObserved, + self, CommitOptions, Failure, Observed, Success, Transaction, TransactionArgs, UnObserved, }; use crate::types::{ ActorId, ChangeHash, Clock, ElemId, Export, Exportable, Key, ObjId, Op, OpId, OpType, @@ -114,25 +114,19 @@ impl Automerge { /// Start a transaction. pub fn transaction(&mut self) -> Transaction<'_, UnObserved> { - Transaction { - inner: Some(self.transaction_inner()), - doc: self, - observation: Some(UnObserved), - } + let args = self.transaction_args(); + Transaction::new(self, args, UnObserved) } pub fn transaction_with_observer( &mut self, op_observer: Obs, ) -> Transaction<'_, Observed> { - Transaction { - inner: Some(self.transaction_inner()), - doc: self, - observation: Some(Observed::new(op_observer)), - } + let args = self.transaction_args(); + Transaction::new(self, args, Observed::new(op_observer)) } - pub(crate) fn transaction_inner(&mut self) -> TransactionInner { + pub(crate) fn transaction_args(&mut self) -> TransactionArgs { let actor = self.get_actor_index(); let seq = self.states.get(&actor).map_or(0, |v| v.len()) as u64 + 1; let mut deps = self.get_heads(); @@ -142,15 +136,13 @@ impl Automerge { deps.push(last_hash); } } + // SAFETY: this unwrap is safe as we always add 1 + let start_op = NonZeroU64::new(self.max_op + 1).unwrap(); - TransactionInner { - actor, + TransactionArgs { + actor_index: actor, seq, - // SAFETY: this unwrap is safe as we always add 1 - start_op: NonZeroU64::new(self.max_op + 1).unwrap(), - time: 0, - message: None, - operations: vec![], + start_op, deps, } } diff --git a/rust/automerge/src/transaction.rs b/rust/automerge/src/transaction.rs index 4a91d5b5..b513bc63 100644 --- a/rust/automerge/src/transaction.rs +++ b/rust/automerge/src/transaction.rs @@ -7,7 +7,7 @@ mod transactable; pub use self::commit::CommitOptions; pub use self::transactable::Transactable; -pub(crate) use inner::TransactionInner; +pub(crate) use inner::{TransactionArgs, TransactionInner}; pub use manual_transaction::Transaction; pub use observation::{Observation, Observed, UnObserved}; pub use result::Failure; diff --git a/rust/automerge/src/transaction/inner.rs b/rust/automerge/src/transaction/inner.rs index fb199f07..8a71cb27 100644 --- a/rust/automerge/src/transaction/inner.rs +++ b/rust/automerge/src/transaction/inner.rs @@ -10,16 +10,49 @@ use crate::{AutomergeError, ObjType, OpType, ScalarValue}; #[derive(Debug, Clone)] pub(crate) struct TransactionInner { - pub(crate) actor: usize, + actor: usize, + seq: u64, + start_op: NonZeroU64, + time: i64, + message: Option, + deps: Vec, + operations: Vec<(ObjId, Prop, Op)>, +} + +/// Arguments required to create a new transaction +pub(crate) struct TransactionArgs { + /// The index of the actor ID this transaction will create ops for in the + /// [`OpSetMetadata::actors`] + pub(crate) actor_index: usize, + /// The sequence number of the change this transaction will create pub(crate) seq: u64, + /// The start op of the change this transaction will create pub(crate) start_op: NonZeroU64, - pub(crate) time: i64, - pub(crate) message: Option, + /// The dependencies of the change this transaction will create pub(crate) deps: Vec, - pub(crate) operations: Vec<(ObjId, Prop, Op)>, } impl TransactionInner { + pub(crate) fn new( + TransactionArgs { + actor_index: actor, + seq, + start_op, + deps, + }: TransactionArgs, + ) -> Self { + TransactionInner { + actor, + seq, + // SAFETY: this unwrap is safe as we always add 1 + start_op, + time: 0, + message: None, + operations: vec![], + deps, + } + } + pub(crate) fn pending_ops(&self) -> usize { self.operations.len() } diff --git a/rust/automerge/src/transaction/manual_transaction.rs b/rust/automerge/src/transaction/manual_transaction.rs index c5977020..171800b6 100644 --- a/rust/automerge/src/transaction/manual_transaction.rs +++ b/rust/automerge/src/transaction/manual_transaction.rs @@ -5,7 +5,7 @@ use crate::{Automerge, ChangeHash, KeysAt, ObjType, OpObserver, Prop, ScalarValu use crate::{AutomergeError, Keys}; use crate::{ListRange, ListRangeAt, MapRange, MapRangeAt}; -use super::{observation, CommitOptions, Transactable, TransactionInner}; +use super::{observation, CommitOptions, Transactable, TransactionArgs, TransactionInner}; /// A transaction on a document. /// Transactions group operations into a single change so that no other operations can happen @@ -23,10 +23,20 @@ use super::{observation, CommitOptions, Transactable, TransactionInner}; pub struct Transaction<'a, Obs: observation::Observation> { // this is an option so that we can take it during commit and rollback to prevent it being // rolled back during drop. - pub(crate) inner: Option, + inner: Option, // As with `inner` this is an `Option` so we can `take` it during `commit` - pub(crate) observation: Option, - pub(crate) doc: &'a mut Automerge, + observation: Option, + doc: &'a mut Automerge, +} + +impl<'a, Obs: observation::Observation> Transaction<'a, Obs> { + pub(crate) fn new(doc: &'a mut Automerge, args: TransactionArgs, obs: Obs) -> Self { + Self { + inner: Some(TransactionInner::new(args)), + doc, + observation: Some(obs), + } + } } impl<'a, Obs: OpObserver> Transaction<'a, observation::Observed> { From de16adbcc588e757405cba49fe75984de8a052f3 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Wed, 30 Nov 2022 18:04:49 +0000 Subject: [PATCH 654/730] Explicity create empty changes Transactions with no ops in them are generally undesirable. They take up space in the change log but do nothing else. They are not useless though, it may occasionally be necessary to create an empty change in order to list all the current heads of the document as dependents of the empty change. The current API makes no distinction between empty changes and non-empty changes. If the user calls `Transaction::commit` a change is created regardless of whether there are ops to commit. To provide a more useful API modify `commit` so that if there is a no-op transaction then no changes are created, but provide explicit methods to create an empty change via `Transaction::empty_change`, `Automerge::empty_change` and `Autocommit::empty_change`. Also make these APIs available in Javascript and C. --- javascript/src/index.ts | 9 ++-- javascript/test/basic_test.ts | 16 +++++++ rust/automerge-c/src/doc.rs | 46 ++++++++++++++++++- rust/automerge-c/src/result.rs | 9 ++++ .../test/ported_wasm/basic_tests.c | 2 +- rust/automerge-wasm/index.d.ts | 3 +- rust/automerge-wasm/src/lib.rs | 13 +++++- rust/automerge-wasm/test/test.ts | 5 ++ rust/automerge/src/autocommit.rs | 24 +++++++++- rust/automerge/src/automerge.rs | 9 ++++ rust/automerge/src/automerge/tests.rs | 4 +- rust/automerge/src/transaction/inner.rs | 25 +++++++++- .../src/transaction/manual_transaction.rs | 10 ++++ rust/automerge/src/transaction/observation.rs | 10 ++-- rust/automerge/src/transaction/result.rs | 4 +- 15 files changed, 170 insertions(+), 19 deletions(-) diff --git a/javascript/src/index.ts b/javascript/src/index.ts index 67a27e00..8dece76b 100644 --- a/javascript/src/index.ts +++ b/javascript/src/index.ts @@ -301,7 +301,10 @@ export function change(doc: Doc, options: string | ChangeOptions | Chan } } -function progressDocument(doc: Doc, heads: Heads, callback?: PatchCallback): Doc { +function progressDocument(doc: Doc, heads: Heads | null, callback?: PatchCallback): Doc { + if (heads == null) { + return doc + } let state = _state(doc) let nextState = {...state, heads: undefined}; let nextDoc = state.handle.applyPatches(doc, nextState, callback) @@ -358,7 +361,7 @@ function _change(doc: Doc, options: ChangeOptions, callback: ChangeFn(doc: Doc, options: string | ChangeOptions) { +export function emptyChange(doc: Doc, options: string | ChangeOptions | void) { if (options === undefined) { options = {} } @@ -376,7 +379,7 @@ export function emptyChange(doc: Doc, options: string | ChangeOptions) } const heads = state.handle.getHeads() - state.handle.commit(options.message, options.time) + state.handle.emptyChange(options.message, options.time) return progressDocument(doc, heads) } diff --git a/javascript/test/basic_test.ts b/javascript/test/basic_test.ts index 1c2e9589..9245f161 100644 --- a/javascript/test/basic_test.ts +++ b/javascript/test/basic_test.ts @@ -246,6 +246,22 @@ describe('Automerge', () => { }) }) + describe('emptyChange', () => { + it('should generate a hash', () => { + let doc = Automerge.init() + doc = Automerge.change(doc, d => { + d.key = "value" + }) + let _ = Automerge.save(doc) + let headsBefore = Automerge.getHeads(doc) + headsBefore.sort() + doc = Automerge.emptyChange(doc, "empty change") + let headsAfter = Automerge.getHeads(doc) + headsAfter.sort() + assert.notDeepEqual(headsBefore, headsAfter) + }) + }) + describe('proxy lists', () => { it('behave like arrays', () => { let doc = Automerge.from({ diff --git a/rust/automerge-c/src/doc.rs b/rust/automerge-c/src/doc.rs index e9b6457c..2854a0e5 100644 --- a/rust/automerge-c/src/doc.rs +++ b/rust/automerge-c/src/doc.rs @@ -151,7 +151,8 @@ pub unsafe extern "C" fn AMcreate(actor_id: *const AMactorId) -> *mut AMresult { /// \param[in] message A UTF-8 string view as an `AMbyteSpan` struct. /// \param[in] timestamp A pointer to a 64-bit integer or `NULL`. /// \return A pointer to an `AMresult` struct containing an `AMchangeHashes` -/// with one element. +/// with one element if there were operations to commit, or void if +/// there were no operations to commit. /// \pre \p doc `!= NULL`. /// \warning The returned `AMresult` struct must be deallocated with `AMfree()` /// in order to prevent a memory leak. @@ -176,6 +177,49 @@ pub unsafe extern "C" fn AMcommit( to_result(doc.commit_with(options)) } +/// \memberof AMdoc +/// \brief Creates an empty change with an optional message and/or *nix +/// timestamp (milliseconds). +/// +/// This is useful if you wish to create a "merge commit" which has as its +/// dependents the current heads of the document but you don't have any +/// operations to add to the document. +/// +/// \note If there are outstanding uncommitted changes to the document +/// then two changes will be created: one for creating the outstanding changes +/// and one for the empty change. The empty change will always be the +/// latest change in the document after this call and the returned hash will be +/// the hash of that empty change. +/// +/// \param[in,out] doc A pointer to an `AMdoc` struct. +/// \param[in] message A UTF-8 string view as an `AMbyteSpan` struct. +/// \param[in] timestamp A pointer to a 64-bit integer or `NULL`. +/// \return A pointer to an `AMresult` struct containing an `AMchangeHashes` +/// with one element. +/// \pre \p doc `!= NULL`. +/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` +/// in order to prevent a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +#[no_mangle] +pub unsafe extern "C" fn AMemptyChange( + doc: *mut AMdoc, + message: AMbyteSpan, + timestamp: *const i64, +) -> *mut AMresult { + let doc = to_doc_mut!(doc); + let mut options = CommitOptions::default(); + if !message.is_null() { + options.set_message(to_str!(message)); + } + if let Some(timestamp) = timestamp.as_ref() { + options.set_time(*timestamp); + } + to_result(doc.empty_change(options)) +} + /// \memberof AMdoc /// \brief Tests the equality of two documents after closing their respective /// transactions. diff --git a/rust/automerge-c/src/result.rs b/rust/automerge-c/src/result.rs index d7d6bce8..599ada96 100644 --- a/rust/automerge-c/src/result.rs +++ b/rust/automerge-c/src/result.rs @@ -372,6 +372,15 @@ impl From for AMresult { } } +impl From> for AMresult { + fn from(c: Option) -> Self { + match c { + Some(c) => c.into(), + None => AMresult::Void, + } + } +} + impl From> for AMresult { fn from(keys: am::Keys<'_, '_>) -> Self { AMresult::Strings(keys.collect()) diff --git a/rust/automerge-c/test/ported_wasm/basic_tests.c b/rust/automerge-c/test/ported_wasm/basic_tests.c index aafa32d8..ea8f1b85 100644 --- a/rust/automerge-c/test/ported_wasm/basic_tests.c +++ b/rust/automerge-c/test/ported_wasm/basic_tests.c @@ -37,7 +37,7 @@ static void test_start_and_commit(void** state) { /* const doc = create() */ AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; /* doc.commit() */ - AMpush(&stack, AMcommit(doc, AMstr(NULL), NULL), AM_VALUE_CHANGE_HASHES, cmocka_cb); + AMpush(&stack, AMemptyChange(doc, AMstr(NULL), NULL), AM_VALUE_CHANGE_HASHES, cmocka_cb); } /** diff --git a/rust/automerge-wasm/index.d.ts b/rust/automerge-wasm/index.d.ts index 67d03b84..90b7854a 100644 --- a/rust/automerge-wasm/index.d.ts +++ b/rust/automerge-wasm/index.d.ts @@ -168,7 +168,8 @@ export class Automerge { toJS(): MaterializeValue; // transactions - commit(message?: string, time?: number): Hash; + commit(message?: string, time?: number): Hash | null; + emptyChange(message?: string, time?: number): Hash; merge(other: Automerge): Heads; getActorId(): Actor; pendingOps(): number; diff --git a/rust/automerge-wasm/src/lib.rs b/rust/automerge-wasm/src/lib.rs index b4452202..d03f7226 100644 --- a/rust/automerge-wasm/src/lib.rs +++ b/rust/automerge-wasm/src/lib.rs @@ -131,7 +131,10 @@ impl Automerge { commit_opts.set_time(time as i64); } let hash = self.doc.commit_with(commit_opts); - JsValue::from_str(&hex::encode(hash.0)) + match hash { + Some(h) => JsValue::from_str(&hex::encode(h.0)), + None => JsValue::NULL, + } } pub fn merge(&mut self, other: &mut Automerge) -> Result { @@ -774,6 +777,14 @@ impl Automerge { } } } + + #[wasm_bindgen(js_name = emptyChange)] + pub fn empty_change(&mut self, message: Option, time: Option) -> JsValue { + let time = time.map(|f| f as i64); + let options = CommitOptions { message, time }; + let hash = self.doc.empty_change(options); + JsValue::from_str(&hex::encode(hash)) + } } #[wasm_bindgen(js_name = create)] diff --git a/rust/automerge-wasm/test/test.ts b/rust/automerge-wasm/test/test.ts index 8e8acd69..3e6abf69 100644 --- a/rust/automerge-wasm/test/test.ts +++ b/rust/automerge-wasm/test/test.ts @@ -188,7 +188,9 @@ describe('Automerge', () => { const hash2 = doc.commit() assert.deepEqual(doc.keys("_root"), ["bip"]) + assert.ok(hash1) assert.deepEqual(doc.keys("_root", [hash1]), ["bip", "foo"]) + assert.ok(hash2) assert.deepEqual(doc.keys("_root", [hash2]), ["bip"]) }) @@ -280,9 +282,12 @@ describe('Automerge', () => { const hash2 = doc.commit(); assert.strictEqual(doc.text(text), "hello big bad world") assert.strictEqual(doc.length(text), 19) + assert.ok(hash1) assert.strictEqual(doc.text(text, [hash1]), "hello world") assert.strictEqual(doc.length(text, [hash1]), 11) + assert.ok(hash2) assert.strictEqual(doc.text(text, [hash2]), "hello big bad world") + assert.ok(hash2) assert.strictEqual(doc.length(text, [hash2]), 19) }) diff --git a/rust/automerge/src/autocommit.rs b/rust/automerge/src/autocommit.rs index f5621d32..fbfc217d 100644 --- a/rust/automerge/src/autocommit.rs +++ b/rust/automerge/src/autocommit.rs @@ -268,12 +268,17 @@ impl AutoCommitWithObs { self.doc.get_heads() } - pub fn commit(&mut self) -> ChangeHash { + /// Commit any uncommitted changes + /// + /// Returns `None` if there were no operations to commit + pub fn commit(&mut self) -> Option { self.commit_with(CommitOptions::default()) } /// Commit the current operations with some options. /// + /// Returns `None` if there were no operations to commit + /// /// ``` /// # use automerge::transaction::CommitOptions; /// # use automerge::transaction::Transactable; @@ -287,7 +292,7 @@ impl AutoCommitWithObs { /// i64; /// doc.commit_with(CommitOptions::default().with_message("Create todos list").with_time(now)); /// ``` - pub fn commit_with(&mut self, options: CommitOptions) -> ChangeHash { + pub fn commit_with(&mut self, options: CommitOptions) -> Option { // ensure that even no changes triggers a change self.ensure_transaction_open(); let (current, tx) = self.transaction.take().unwrap(); @@ -301,6 +306,21 @@ impl AutoCommitWithObs { .map(|(_, tx)| tx.rollback(&mut self.doc)) .unwrap_or(0) } + + /// Generate an empty change + /// + /// The main reason to do this is if you wish to create a "merge commit" which has all the + /// current heads of the documents as dependencies but you have no new operations to create. + /// + /// Because this structure is an "autocommit" there may actually be outstanding operations to + /// submit. If this is the case this function will create two changes, one with the outstanding + /// operations and a new one with no operations. The returned `ChangeHash` will always be the + /// hash of the empty change. + pub fn empty_change(&mut self, options: CommitOptions) -> ChangeHash { + self.ensure_transaction_closed(); + let args = self.doc.transaction_args(); + TransactionInner::empty(&mut self.doc, args, options.message, options.time) + } } impl Transactable for AutoCommitWithObs { diff --git a/rust/automerge/src/automerge.rs b/rust/automerge/src/automerge.rs index 1953f47c..dfca44cc 100644 --- a/rust/automerge/src/automerge.rs +++ b/rust/automerge/src/automerge.rs @@ -256,6 +256,15 @@ impl Automerge { } } + /// Generate an empty change + /// + /// The main reason to do this is if you want to create a "merge commit", which is a change + /// that has all the current heads of the document as dependencies. + pub fn empty_commit(&mut self, opts: CommitOptions) -> ChangeHash { + let args = self.transaction_args(); + Transaction::empty(self, args, opts) + } + /// Fork this document at the current point for use by a different actor. pub fn fork(&self) -> Self { let mut f = self.clone(); diff --git a/rust/automerge/src/automerge/tests.rs b/rust/automerge/src/automerge/tests.rs index 516363ab..d35b2997 100644 --- a/rust/automerge/src/automerge/tests.rs +++ b/rust/automerge/src/automerge/tests.rs @@ -1080,8 +1080,8 @@ fn delete_nothing_in_map_is_noop() { // deleting a missing key in a map should just be a noop assert!(tx.delete(ROOT, "a",).is_ok()); tx.commit(); - let last_change = doc.get_last_local_change().unwrap(); - assert_eq!(last_change.len(), 0); + let last_change = doc.get_last_local_change(); + assert!(last_change.is_none()); let bytes = doc.save(); assert!(Automerge::load(&bytes,).is_ok()); diff --git a/rust/automerge/src/transaction/inner.rs b/rust/automerge/src/transaction/inner.rs index 8a71cb27..6f0e8b07 100644 --- a/rust/automerge/src/transaction/inner.rs +++ b/rust/automerge/src/transaction/inner.rs @@ -44,7 +44,6 @@ impl TransactionInner { TransactionInner { actor, seq, - // SAFETY: this unwrap is safe as we always add 1 start_op, time: 0, message: None, @@ -53,14 +52,38 @@ impl TransactionInner { } } + /// Create an empty change + pub(crate) fn empty( + doc: &mut Automerge, + args: TransactionArgs, + message: Option, + time: Option, + ) -> ChangeHash { + Self::new(args).commit_impl(doc, message, time) + } + pub(crate) fn pending_ops(&self) -> usize { self.operations.len() } /// Commit the operations performed in this transaction, returning the hashes corresponding to /// the new heads. + /// + /// Returns `None` if there were no operations to commit #[tracing::instrument(skip(self, doc))] pub(crate) fn commit( + self, + doc: &mut Automerge, + message: Option, + time: Option, + ) -> Option { + if self.pending_ops() == 0 { + return None; + } + Some(self.commit_impl(doc, message, time)) + } + + pub(crate) fn commit_impl( mut self, doc: &mut Automerge, message: Option, diff --git a/rust/automerge/src/transaction/manual_transaction.rs b/rust/automerge/src/transaction/manual_transaction.rs index 171800b6..cf3123df 100644 --- a/rust/automerge/src/transaction/manual_transaction.rs +++ b/rust/automerge/src/transaction/manual_transaction.rs @@ -39,6 +39,16 @@ impl<'a, Obs: observation::Observation> Transaction<'a, Obs> { } } +impl<'a> Transaction<'a, observation::UnObserved> { + pub(crate) fn empty( + doc: &'a mut Automerge, + args: TransactionArgs, + opts: CommitOptions, + ) -> ChangeHash { + TransactionInner::empty(doc, args, opts.message, opts.time) + } +} + impl<'a, Obs: OpObserver> Transaction<'a, observation::Observed> { pub fn observer(&mut self) -> &mut Obs { self.observation.as_mut().unwrap().observer() diff --git a/rust/automerge/src/transaction/observation.rs b/rust/automerge/src/transaction/observation.rs index fb380cd8..974004cf 100644 --- a/rust/automerge/src/transaction/observation.rs +++ b/rust/automerge/src/transaction/observation.rs @@ -13,7 +13,7 @@ pub trait Observation: private::Sealed { type CommitResult; fn observer(&mut self) -> Option<&mut Self::Obs>; - fn make_result(self, hash: ChangeHash) -> Self::CommitResult; + fn make_result(self, hash: Option) -> Self::CommitResult; fn branch(&self) -> Self; fn merge(&mut self, other: &Self); } @@ -33,12 +33,12 @@ impl Observed { impl Observation for Observed { type Obs = Obs; - type CommitResult = (Obs, ChangeHash); + type CommitResult = (Obs, Option); fn observer(&mut self) -> Option<&mut Self::Obs> { Some(&mut self.0) } - fn make_result(self, hash: ChangeHash) -> Self::CommitResult { + fn make_result(self, hash: Option) -> Self::CommitResult { (self.0, hash) } @@ -61,12 +61,12 @@ impl UnObserved { impl Observation for UnObserved { type Obs = (); - type CommitResult = ChangeHash; + type CommitResult = Option; fn observer(&mut self) -> Option<&mut Self::Obs> { None } - fn make_result(self, hash: ChangeHash) -> Self::CommitResult { + fn make_result(self, hash: Option) -> Self::CommitResult { hash } diff --git a/rust/automerge/src/transaction/result.rs b/rust/automerge/src/transaction/result.rs index 8943b7a2..5327ff44 100644 --- a/rust/automerge/src/transaction/result.rs +++ b/rust/automerge/src/transaction/result.rs @@ -5,8 +5,8 @@ use crate::ChangeHash; pub struct Success { /// The result of the transaction. pub result: O, - /// The hash of the change, also the head of the document. - pub hash: ChangeHash, + /// The hash of the change, will be `None` if the transaction did not create any operations + pub hash: Option, pub op_observer: Obs, } From 2826f4f08c91dc5e2096c20071d1dd8b7dcffbcf Mon Sep 17 00:00:00 2001 From: Alex Currie-Clark <1306728+acurrieclark@users.noreply.github.com> Date: Fri, 2 Dec 2022 14:42:13 +0000 Subject: [PATCH 655/730] automerge-wasm: Add deno as a target --- .github/workflows/ci.yaml | 14 +++++++++++++- rust/automerge-wasm/.gitignore | 1 + rust/automerge-wasm/deno-tests/deno.ts | 8 ++++++++ rust/automerge-wasm/package.json | 4 +++- scripts/ci/deno_tests | 6 ++++++ scripts/ci/run | 1 + 6 files changed, 32 insertions(+), 2 deletions(-) create mode 100644 rust/automerge-wasm/deno-tests/deno.ts create mode 100755 scripts/ci/deno_tests diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index edc5680b..0550619e 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -77,7 +77,19 @@ jobs: run: rustup target add wasm32-unknown-unknown - name: run tests run: ./scripts/ci/wasm_tests - + deno_tests: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: denoland/setup-deno@v1 + with: + deno-version: v1.x + - name: Install wasm-bindgen-cli + run: cargo install wasm-bindgen-cli wasm-opt + - name: Install wasm32 target + run: rustup target add wasm32-unknown-unknown + - name: run tests + run: ./scripts/ci/deno_tests js_tests: runs-on: ubuntu-latest steps: diff --git a/rust/automerge-wasm/.gitignore b/rust/automerge-wasm/.gitignore index ab957e1c..77c11e08 100644 --- a/rust/automerge-wasm/.gitignore +++ b/rust/automerge-wasm/.gitignore @@ -1,5 +1,6 @@ /node_modules /bundler /nodejs +/deno Cargo.lock yarn.lock diff --git a/rust/automerge-wasm/deno-tests/deno.ts b/rust/automerge-wasm/deno-tests/deno.ts new file mode 100644 index 00000000..1b4c2e07 --- /dev/null +++ b/rust/automerge-wasm/deno-tests/deno.ts @@ -0,0 +1,8 @@ +// @deno-types="../index.d.ts" +import { create } from '../deno/automerge_wasm.js' + +Deno.test("It should create, clone and free", () => { + const doc1 = create() + const doc2 = doc1.clone() + doc2.free() +}); diff --git a/rust/automerge-wasm/package.json b/rust/automerge-wasm/package.json index 9a98ad32..1caa5a00 100644 --- a/rust/automerge-wasm/package.json +++ b/rust/automerge-wasm/package.json @@ -17,6 +17,8 @@ "index.d.ts", "nodejs/automerge_wasm.js", "nodejs/automerge_wasm_bg.wasm", + "deno/automerge_wasm.js", + "deno/automerge_wasm_bg.wasm", "bundler/automerge_wasm.js", "bundler/automerge_wasm_bg.js", "bundler/automerge_wasm_bg.wasm" @@ -30,7 +32,7 @@ "debug": "cross-env PROFILE=dev TARGET_DIR=debug yarn buildall", "build": "cross-env PROFILE=dev TARGET_DIR=debug FEATURES='' yarn buildall", "release": "cross-env PROFILE=release TARGET_DIR=release yarn buildall", - "buildall": "cross-env TARGET=nodejs yarn target && cross-env TARGET=bundler yarn target", + "buildall": "cross-env TARGET=nodejs yarn target && cross-env TARGET=bundler yarn target && cross-env TARGET=deno yarn target", "target": "rimraf ./$TARGET && yarn compile && yarn bindgen && yarn opt", "compile": "cargo build --target wasm32-unknown-unknown --profile $PROFILE", "bindgen": "wasm-bindgen --no-typescript --weak-refs --target $TARGET --out-dir $TARGET ../target/wasm32-unknown-unknown/$TARGET_DIR/automerge_wasm.wasm", diff --git a/scripts/ci/deno_tests b/scripts/ci/deno_tests new file mode 100755 index 00000000..bc655468 --- /dev/null +++ b/scripts/ci/deno_tests @@ -0,0 +1,6 @@ +THIS_SCRIPT=$(dirname "$0"); +WASM_PROJECT=$THIS_SCRIPT/../../rust/automerge-wasm; + +yarn --cwd $WASM_PROJECT install; +yarn --cwd $WASM_PROJECT build; +deno test $WASM_PROJECT/deno-tests/deno.ts --allow-read diff --git a/scripts/ci/run b/scripts/ci/run index 926e60d7..db3f1aaf 100755 --- a/scripts/ci/run +++ b/scripts/ci/run @@ -7,5 +7,6 @@ set -eou pipefail ./scripts/ci/rust-docs ./scripts/ci/advisory ./scripts/ci/wasm_tests +./scripts/ci/deno_tests ./scripts/ci/js_tests ./scripts/ci/cmake-build Release static From 0ab6a770d82785464043505a548a6f41cb593e0b Mon Sep 17 00:00:00 2001 From: Alex Good Date: Thu, 1 Dec 2022 23:22:12 +0000 Subject: [PATCH 656/730] wasm: improve error messages The error messages produced by various conversions in `automerge-wasm` were quite uninformative - often consisting of just returning the offending value with no description of the problem. The logic of these error messages was often hard to trace due to the use of `JsValue` to represent both error conditions and valid values - evidenced by most of the public functions of `automerge-wasm` having return types of `Result`. Change these return types to mention specific errors, thus enlisting the compilers help in ensuring that specific error messages are emitted. --- rust/automerge-wasm/Cargo.toml | 1 + rust/automerge-wasm/src/interop.rs | 623 +++++++++++++++++++------ rust/automerge-wasm/src/lib.rs | 686 ++++++++++++++++++++-------- rust/automerge-wasm/src/observer.rs | 4 +- rust/automerge-wasm/src/sync.rs | 22 +- rust/automerge-wasm/src/value.rs | 28 +- rust/automerge/src/lib.rs | 2 +- rust/automerge/src/sync.rs | 2 +- 8 files changed, 1035 insertions(+), 333 deletions(-) diff --git a/rust/automerge-wasm/Cargo.toml b/rust/automerge-wasm/Cargo.toml index 02232ab8..3d2fafe4 100644 --- a/rust/automerge-wasm/Cargo.toml +++ b/rust/automerge-wasm/Cargo.toml @@ -34,6 +34,7 @@ serde_bytes = "0.11.5" hex = "^0.4.3" regex = "^1.5" itertools = "^0.10.3" +thiserror = "^1.0.16" [dependencies.wasm-bindgen] version = "^0.2.83" diff --git a/rust/automerge-wasm/src/interop.rs b/rust/automerge-wasm/src/interop.rs index 84b827b7..24b34cd2 100644 --- a/rust/automerge-wasm/src/interop.rs +++ b/rust/automerge-wasm/src/interop.rs @@ -101,88 +101,146 @@ impl From>> for JS { } impl TryFrom for HashSet { - type Error = JsValue; + type Error = error::BadChangeHashSet; fn try_from(value: JS) -> Result { - let mut result = HashSet::new(); - for key in Reflect::own_keys(&value.0)?.iter() { - if let Some(true) = Reflect::get(&value.0, &key)?.as_bool() { - result.insert(serde_wasm_bindgen::from_value(key).map_err(to_js_err)?); - } - } - Ok(result) + let result = HashSet::new(); + fold_hash_set(result, &value.0, |mut set, hash| { + set.insert(hash); + set + }) } } impl TryFrom for BTreeSet { - type Error = JsValue; + type Error = error::BadChangeHashSet; fn try_from(value: JS) -> Result { - let mut result = BTreeSet::new(); - for key in Reflect::own_keys(&value.0)?.iter() { - if let Some(true) = Reflect::get(&value.0, &key)?.as_bool() { - result.insert(serde_wasm_bindgen::from_value(key).map_err(to_js_err)?); - } + let result = BTreeSet::new(); + fold_hash_set(result, &value.0, |mut set, hash| { + set.insert(hash); + set + }) + } +} + +fn fold_hash_set(init: O, val: &JsValue, f: F) -> Result +where + F: Fn(O, ChangeHash) -> O, +{ + let mut result = init; + for key in Reflect::own_keys(val) + .map_err(|_| error::BadChangeHashSet::ListProp)? + .iter() + { + if let Some(true) = js_get(val, &key)?.0.as_bool() { + let hash = ChangeHash::try_from(JS(key.clone())) + .map_err(|e| error::BadChangeHashSet::BadHash(key, e))?; + result = f(result, hash); + } + } + Ok(result) +} + +impl TryFrom for ChangeHash { + type Error = error::BadChangeHash; + + fn try_from(value: JS) -> Result { + if let Some(s) = value.0.as_string() { + Ok(s.parse()?) + } else { + Err(error::BadChangeHash::NotString) + } + } +} + +impl TryFrom for Option> { + type Error = error::BadChangeHashes; + + fn try_from(value: JS) -> Result { + if value.0.is_null() { + Ok(None) + } else { + Vec::::try_from(value).map(Some) } - Ok(result) } } impl TryFrom for Vec { - type Error = JsValue; + type Error = error::BadChangeHashes; fn try_from(value: JS) -> Result { - let value = value.0.dyn_into::()?; - let value: Result, _> = - value.iter().map(serde_wasm_bindgen::from_value).collect(); - let value = value.map_err(to_js_err)?; + let value = value + .0 + .dyn_into::() + .map_err(|_| error::BadChangeHashes::NotArray)?; + let value = value + .iter() + .enumerate() + .map(|(i, v)| { + ChangeHash::try_from(JS(v)).map_err(|e| error::BadChangeHashes::BadElem(i, e)) + }) + .collect::, _>>()?; Ok(value) } } -impl From for Option> { - fn from(value: JS) -> Self { - let value = value.0.dyn_into::().ok()?; - let value: Result, _> = - value.iter().map(serde_wasm_bindgen::from_value).collect(); - let value = value.ok()?; - Some(value) - } -} - impl TryFrom for Vec { - type Error = JsValue; + type Error = error::BadJSChanges; fn try_from(value: JS) -> Result { - let value = value.0.dyn_into::()?; - let changes: Result, _> = value.iter().map(|j| j.dyn_into()).collect(); - let changes = changes?; - let changes = changes.iter().try_fold(Vec::new(), |mut acc, arr| { - match automerge::Change::try_from(arr.to_vec().as_slice()) { - Ok(c) => acc.push(c), - Err(e) => return Err(to_js_err(e)), - } - Ok(acc) - })?; + let value = value + .0 + .dyn_into::() + .map_err(|_| error::BadJSChanges::ChangesNotArray)?; + let changes = value + .iter() + .enumerate() + .map(|(i, j)| { + j.dyn_into().map_err::(|_| { + error::BadJSChanges::ElemNotUint8Array(i) + }) + }) + .collect::, _>>()?; + let changes = changes + .iter() + .enumerate() + .map(|(i, arr)| { + automerge::Change::try_from(arr.to_vec().as_slice()) + .map_err(|e| error::BadJSChanges::BadChange(i, e)) + }) + .collect::, _>>()?; Ok(changes) } } impl TryFrom for am::sync::State { - type Error = JsValue; + type Error = error::BadSyncState; fn try_from(value: JS) -> Result { let value = value.0; - let shared_heads = js_get(&value, "sharedHeads")?.try_into()?; - let last_sent_heads = js_get(&value, "lastSentHeads")?.try_into()?; - let their_heads = js_get(&value, "theirHeads")?.into(); - let their_need = js_get(&value, "theirNeed")?.into(); - let their_have = js_get(&value, "theirHave")?.try_into()?; - let sent_hashes = js_get(&value, "sentHashes")?.try_into()?; + let shared_heads = js_get(&value, "sharedHeads")? + .try_into() + .map_err(error::BadSyncState::BadSharedHeads)?; + let last_sent_heads = js_get(&value, "lastSentHeads")? + .try_into() + .map_err(error::BadSyncState::BadLastSentHeads)?; + let their_heads = js_get(&value, "theirHeads")? + .try_into() + .map_err(error::BadSyncState::BadTheirHeads)?; + let their_need = js_get(&value, "theirNeed")? + .try_into() + .map_err(error::BadSyncState::BadTheirNeed)?; + let their_have = js_get(&value, "theirHave")? + .try_into() + .map_err(error::BadSyncState::BadTheirHave)?; + let sent_hashes = js_get(&value, "sentHashes")? + .try_into() + .map_err(error::BadSyncState::BadSentHashes)?; let in_flight = js_get(&value, "inFlight")? .0 .as_bool() - .ok_or_else(|| JsValue::from_str("SyncState.inFLight must be a boolean"))?; + .ok_or(error::BadSyncState::InFlightNotBoolean)?; Ok(am::sync::State { shared_heads, last_sent_heads, @@ -195,8 +253,22 @@ impl TryFrom for am::sync::State { } } +impl TryFrom for am::sync::Have { + type Error = error::BadHave; + + fn try_from(value: JS) -> Result { + let last_sync = js_get(&value.0, "lastSync")? + .try_into() + .map_err(error::BadHave::BadLastSync)?; + let bloom = js_get(&value.0, "bloom")? + .try_into() + .map_err(error::BadHave::BadBloom)?; + Ok(am::sync::Have { last_sync, bloom }) + } +} + impl TryFrom for Option> { - type Error = JsValue; + type Error = error::BadHaves; fn try_from(value: JS) -> Result { if value.0.is_null() { @@ -208,34 +280,57 @@ impl TryFrom for Option> { } impl TryFrom for Vec { - type Error = JsValue; + type Error = error::BadHaves; fn try_from(value: JS) -> Result { - let value = value.0.dyn_into::()?; - let have: Result, JsValue> = value + let value = value + .0 + .dyn_into::() + .map_err(|_| error::BadHaves::NotArray)?; + let have = value .iter() - .map(|s| { - let last_sync = js_get(&s, "lastSync")?.try_into()?; - let bloom = js_get(&s, "bloom")?.try_into()?; - Ok(am::sync::Have { last_sync, bloom }) - }) - .collect(); - let have = have?; + .enumerate() + .map(|(i, s)| JS(s).try_into().map_err(|e| error::BadHaves::BadElem(i, e))) + .collect::, _>>()?; Ok(have) } } impl TryFrom for am::sync::BloomFilter { - type Error = JsValue; + type Error = error::BadBloom; fn try_from(value: JS) -> Result { - let value: Uint8Array = value.0.dyn_into()?; + let value: Uint8Array = value + .0 + .dyn_into() + .map_err(|_| error::BadBloom::NotU8Array)?; let value = value.to_vec(); - let value = value.as_slice().try_into().map_err(to_js_err)?; + let value = value.as_slice().try_into()?; Ok(value) } } +impl TryFrom for am::sync::Message { + type Error = error::BadSyncMessage; + + fn try_from(value: JS) -> Result { + let heads = js_get(&value.0, "heads")? + .try_into() + .map_err(error::BadSyncMessage::BadHeads)?; + let need = js_get(&value.0, "need")? + .try_into() + .map_err(error::BadSyncMessage::BadNeed)?; + let changes = js_get(&value.0, "changes")?.try_into()?; + let have = js_get(&value.0, "have")?.try_into()?; + Ok(am::sync::Message { + heads, + need, + have, + changes, + }) + } +} + impl From<&[ChangeHash]> for AR { fn from(value: &[ChangeHash]) -> Self { AR(value @@ -281,21 +376,47 @@ pub(crate) fn to_js_err(err: T) -> JsValue { js_sys::Error::new(&std::format!("{}", err)).into() } -pub(crate) fn js_get>(obj: J, prop: &str) -> Result { - Ok(JS(Reflect::get(&obj.into(), &prop.into())?)) +pub(crate) fn js_get, S: std::fmt::Debug + Into>( + obj: J, + prop: S, +) -> Result { + let prop = prop.into(); + Ok(JS(Reflect::get(&obj.into(), &prop).map_err(|e| { + error::GetProp { + property: format!("{:?}", prop), + error: e, + } + })?)) } -pub(crate) fn js_set>(obj: &JsValue, prop: &str, val: V) -> Result { - Reflect::set(obj, &prop.into(), &val.into()) +pub(crate) fn js_set, S: std::fmt::Debug + Into>( + obj: &JsValue, + prop: S, + val: V, +) -> Result { + let prop = prop.into(); + Reflect::set(obj, &prop, &val.into()).map_err(|e| error::SetProp { + property: prop, + error: e, + }) } -pub(crate) fn to_prop(p: JsValue) -> Result { +pub(crate) fn js_get_symbol>(obj: J, prop: &Symbol) -> Result { + Ok(JS(Reflect::get(&obj.into(), &prop.into()).map_err( + |e| error::GetProp { + property: format!("{}", prop.to_string()), + error: e, + }, + )?)) +} + +pub(crate) fn to_prop(p: JsValue) -> Result { if let Some(s) = p.as_string() { Ok(Prop::Map(s)) } else if let Some(n) = p.as_f64() { Ok(Prop::Seq(n as usize)) } else { - Err(to_js_err("prop must me a string or number")) + Err(super::error::InvalidProp) } } @@ -362,11 +483,19 @@ pub(crate) fn to_objtype( } } -pub(crate) fn get_heads(heads: Option) -> Option> { - let heads = heads?; - let heads: Result, _> = - heads.iter().map(serde_wasm_bindgen::from_value).collect(); - heads.ok() +pub(crate) fn get_heads( + heads: Option, +) -> Result>, error::BadChangeHashes> { + heads + .map(|h| { + h.iter() + .enumerate() + .map(|(i, v)| { + ChangeHash::try_from(JS(v)).map_err(|e| error::BadChangeHashes::BadElem(i, e)) + }) + .collect() + }) + .transpose() } impl Automerge { @@ -376,7 +505,7 @@ impl Automerge { datatype: Datatype, heads: Option<&Vec>, meta: &JsValue, - ) -> Result { + ) -> Result { let result = if datatype.is_sequence() { self.wrap_object( self.export_list(obj, heads, meta)?, @@ -400,7 +529,7 @@ impl Automerge { obj: &ObjId, heads: Option<&Vec>, meta: &JsValue, - ) -> Result { + ) -> Result { let keys = self.doc.keys(obj); let map = Object::new(); for k in keys { @@ -414,7 +543,7 @@ impl Automerge { Value::Object(o) => self.export_object(&id, o.into(), heads, meta)?, Value::Scalar(_) => self.export_value(alloc(&val))?, }; - Reflect::set(&map, &k.into(), &subval)?; + js_set(&map, &k, &subval)?; }; } @@ -426,7 +555,7 @@ impl Automerge { obj: &ObjId, heads: Option<&Vec>, meta: &JsValue, - ) -> Result { + ) -> Result { let len = self.doc.length(obj); let array = Array::new(); for i in 0..len { @@ -450,9 +579,11 @@ impl Automerge { pub(crate) fn export_value( &self, (datatype, raw_value): (Datatype, JsValue), - ) -> Result { + ) -> Result { if let Some(function) = self.external_types.get(&datatype) { - let wrapped_value = function.call1(&JsValue::undefined(), &raw_value)?; + let wrapped_value = function + .call1(&JsValue::undefined(), &raw_value) + .map_err(|e| error::Export::CallDataHandler(datatype.to_string(), e))?; if let Ok(o) = wrapped_value.dyn_into::() { let key = Symbol::for_(RAW_DATA_SYMBOL); set_hidden_value(&o, &key, &raw_value)?; @@ -460,10 +591,7 @@ impl Automerge { set_hidden_value(&o, &key, datatype)?; Ok(o.into()) } else { - Err(to_js_err(format!( - "data handler for type {} did not return a valid object", - datatype - ))) + Err(error::Export::InvalidDataHandler(datatype.to_string())) } } else { Ok(raw_value) @@ -473,12 +601,14 @@ impl Automerge { pub(crate) fn unwrap_object( &self, ext_val: &Object, - ) -> Result<(Object, Datatype, JsValue), JsValue> { - let inner = Reflect::get(ext_val, &Symbol::for_(RAW_DATA_SYMBOL))?; + ) -> Result<(Object, Datatype, JsValue), error::Export> { + let inner = js_get_symbol(ext_val, &Symbol::for_(RAW_DATA_SYMBOL))?.0; - let datatype = Reflect::get(ext_val, &Symbol::for_(DATATYPE_SYMBOL))?.try_into(); + let datatype = js_get_symbol(ext_val, &Symbol::for_(DATATYPE_SYMBOL))? + .0 + .try_into(); - let mut id = Reflect::get(ext_val, &Symbol::for_(RAW_OBJECT_SYMBOL))?; + let mut id = js_get_symbol(ext_val, &Symbol::for_(RAW_OBJECT_SYMBOL))?.0; if id.is_undefined() { id = "_root".into(); } @@ -496,8 +626,8 @@ impl Automerge { Ok((inner, datatype, id)) } - pub(crate) fn unwrap_scalar(&self, ext_val: JsValue) -> Result { - let inner = Reflect::get(&ext_val, &Symbol::for_(RAW_DATA_SYMBOL))?; + pub(crate) fn unwrap_scalar(&self, ext_val: JsValue) -> Result { + let inner = js_get_symbol(&ext_val, &Symbol::for_(RAW_DATA_SYMBOL))?.0; if !inner.is_undefined() { Ok(inner) } else { @@ -510,7 +640,7 @@ impl Automerge { (datatype, raw_value): (Datatype, JsValue), id: &ObjId, meta: &JsValue, - ) -> Result { + ) -> Result { if let Ok(obj) = raw_value.clone().dyn_into::() { let result = self.wrap_object(obj, datatype, &id.to_string().into(), meta)?; Ok(result.into()) @@ -525,15 +655,14 @@ impl Automerge { datatype: Datatype, id: &JsValue, meta: &JsValue, - ) -> Result { + ) -> Result { let value = if let Some(function) = self.external_types.get(&datatype) { - let wrapped_value = function.call1(&JsValue::undefined(), &value)?; - let wrapped_object = wrapped_value.dyn_into::().map_err(|_| { - to_js_err(format!( - "data handler for type {} did not return a valid object", - datatype - )) - })?; + let wrapped_value = function + .call1(&JsValue::undefined(), &value) + .map_err(|e| error::Export::CallDataHandler(datatype.to_string(), e))?; + let wrapped_object = wrapped_value + .dyn_into::() + .map_err(|_| error::Export::InvalidDataHandler(datatype.to_string()))?; set_hidden_value(&wrapped_object, &Symbol::for_(RAW_DATA_SYMBOL), value)?; wrapped_object } else { @@ -555,35 +684,39 @@ impl Automerge { array: &Object, patch: &Patch, meta: &JsValue, - ) -> Result { + ) -> Result { let result = Array::from(array); // shallow copy match patch { Patch::PutSeq { index, value, .. } => { let sub_val = self.maybe_wrap_object(alloc(&value.0), &value.1, meta)?; - Reflect::set(&result, &(*index as f64).into(), &sub_val)?; + js_set(&result, *index as f64, &sub_val)?; Ok(result.into()) } - Patch::DeleteSeq { index, .. } => self.sub_splice(result, *index, 1, vec![], meta), - Patch::Insert { index, values, .. } => self.sub_splice(result, *index, 0, values, meta), + Patch::DeleteSeq { index, .. } => { + Ok(self.sub_splice(result, *index, 1, vec![], meta)?) + } + Patch::Insert { index, values, .. } => { + Ok(self.sub_splice(result, *index, 0, values, meta)?) + } Patch::Increment { prop, value, .. } => { if let Prop::Seq(index) = prop { - let index = (*index as f64).into(); - let old_val = Reflect::get(&result, &index)?; + let index = *index as f64; + let old_val = js_get(&result, index)?.0; let old_val = self.unwrap_scalar(old_val)?; if let Some(old) = old_val.as_f64() { let new_value: Value<'_> = am::ScalarValue::counter(old as i64 + *value).into(); - Reflect::set(&result, &index, &self.export_value(alloc(&new_value))?)?; + js_set(&result, index, &self.export_value(alloc(&new_value))?)?; Ok(result.into()) } else { - Err(to_js_err("cant increment a non number value")) + Err(error::ApplyPatch::IncrementNonNumeric) } } else { - Err(to_js_err("cant increment a key on a seq")) + Err(error::ApplyPatch::IncrementKeyInSeq) } } - Patch::DeleteMap { .. } => Err(to_js_err("cannot delete from a seq")), - Patch::PutMap { .. } => Err(to_js_err("cannot set key in seq")), + Patch::DeleteMap { .. } => Err(error::ApplyPatch::DeleteKeyFromSeq), + Patch::PutMap { .. } => Err(error::ApplyPatch::PutKeyInSeq), } } @@ -592,38 +725,42 @@ impl Automerge { map: &Object, patch: &Patch, meta: &JsValue, - ) -> Result { + ) -> Result { let result = Object::assign(&Object::new(), map); // shallow copy match patch { Patch::PutMap { key, value, .. } => { let sub_val = self.maybe_wrap_object(alloc(&value.0), &value.1, meta)?; - Reflect::set(&result, &key.into(), &sub_val)?; + js_set(&result, key, &sub_val)?; Ok(result) } Patch::DeleteMap { key, .. } => { - Reflect::delete_property(&result, &key.into())?; + Reflect::delete_property(&result, &key.into()).map_err(|e| { + error::Export::Delete { + prop: key.to_string(), + err: e, + } + })?; Ok(result) } Patch::Increment { prop, value, .. } => { if let Prop::Map(key) = prop { - let key = key.into(); - let old_val = Reflect::get(&result, &key)?; + let old_val = js_get(&result, key)?.0; let old_val = self.unwrap_scalar(old_val)?; if let Some(old) = old_val.as_f64() { let new_value: Value<'_> = am::ScalarValue::counter(old as i64 + *value).into(); - Reflect::set(&result, &key, &self.export_value(alloc(&new_value))?)?; + js_set(&result, key, &self.export_value(alloc(&new_value))?)?; Ok(result) } else { - Err(to_js_err("cant increment a non number value")) + Err(error::ApplyPatch::IncrementNonNumeric) } } else { - Err(to_js_err("cant increment an index on a map")) + Err(error::ApplyPatch::IncrementIndexInMap) } } - Patch::Insert { .. } => Err(to_js_err("cannot insert into map")), - Patch::DeleteSeq { .. } => Err(to_js_err("cannot splice a map")), - Patch::PutSeq { .. } => Err(to_js_err("cannot array index a map")), + Patch::Insert { .. } => Err(error::ApplyPatch::InsertInMap), + Patch::DeleteSeq { .. } => Err(error::ApplyPatch::SpliceInMap), + Patch::PutSeq { .. } => Err(error::ApplyPatch::PutIdxInMap), } } @@ -633,14 +770,14 @@ impl Automerge { patch: &Patch, depth: usize, meta: &JsValue, - ) -> Result { + ) -> Result { let (inner, datatype, id) = self.unwrap_object(&obj)?; let prop = patch.path().get(depth).map(|p| prop_to_js(&p.1)); let result = if let Some(prop) = prop { - if let Ok(sub_obj) = Reflect::get(&inner, &prop)?.dyn_into::() { + if let Ok(sub_obj) = js_get(&inner, &prop)?.0.dyn_into::() { let new_value = self.apply_patch(sub_obj, patch, depth + 1, meta)?; let result = shallow_copy(&inner); - Reflect::set(&result, &prop, &new_value)?; + js_set(&result, &prop, &new_value)?; Ok(result) } else { // if a patch is trying to access a deleted object make no change @@ -654,6 +791,7 @@ impl Automerge { }?; self.wrap_object(result, datatype, &id, meta) + .map_err(|e| e.into()) } fn sub_splice<'a, I: IntoIterator, ObjId)>>( @@ -663,15 +801,18 @@ impl Automerge { num_del: usize, values: I, meta: &JsValue, - ) -> Result { + ) -> Result { let args: Array = values .into_iter() .map(|v| self.maybe_wrap_object(alloc(&v.0), &v.1, meta)) .collect::>()?; args.unshift(&(num_del as u32).into()); args.unshift(&(index as u32).into()); - let method = Reflect::get(&o, &"splice".into())?.dyn_into::()?; - Reflect::apply(&method, &o, &args)?; + let method = js_get(&o, "splice")? + .0 + .dyn_into::() + .map_err(error::Export::GetSplice)?; + Reflect::apply(&method, &o, &args).map_err(error::Export::CallSplice)?; Ok(o.into()) } } @@ -705,12 +846,17 @@ pub(crate) fn alloc(value: &Value<'_>) -> (Datatype, JsValue) { } } -fn set_hidden_value>(o: &Object, key: &Symbol, value: V) -> Result<(), JsValue> { +fn set_hidden_value>( + o: &Object, + key: &Symbol, + value: V, +) -> Result<(), error::Export> { let definition = Object::new(); - js_set(&definition, "value", &value.into())?; - js_set(&definition, "writable", false)?; - js_set(&definition, "enumerable", false)?; - js_set(&definition, "configurable", false)?; + js_set(&definition, "value", &value.into()).map_err(|_| error::Export::SetHidden("value"))?; + js_set(&definition, "writable", false).map_err(|_| error::Export::SetHidden("writable"))?; + js_set(&definition, "enumerable", false).map_err(|_| error::Export::SetHidden("enumerable"))?; + js_set(&definition, "configurable", false) + .map_err(|_| error::Export::SetHidden("configurable"))?; Object::define_property(o, &key.into(), &definition); Ok(()) } @@ -729,3 +875,216 @@ fn prop_to_js(prop: &Prop) -> JsValue { Prop::Seq(index) => (*index as f64).into(), } } + +pub(crate) mod error { + use automerge::LoadChangeError; + use wasm_bindgen::JsValue; + + #[derive(Debug, thiserror::Error)] + pub enum BadJSChanges { + #[error("the changes were not an array of Uint8Array")] + ChangesNotArray, + #[error("change {0} was not a Uint8Array")] + ElemNotUint8Array(usize), + #[error("error loading change {0}: {1}")] + BadChange(usize, LoadChangeError), + } + + #[derive(Debug, thiserror::Error)] + pub enum BadChangeHashes { + #[error("the change hashes were not an array of strings")] + NotArray, + #[error("could not decode hash {0}: {1}")] + BadElem(usize, BadChangeHash), + } + + impl From for JsValue { + fn from(e: BadChangeHashes) -> Self { + JsValue::from(e.to_string()) + } + } + + #[derive(Debug, thiserror::Error)] + pub enum BadChangeHashSet { + #[error("not an object")] + NotObject, + #[error(transparent)] + GetProp(#[from] GetProp), + #[error("unable to getOwnProperties")] + ListProp, + #[error("unable to parse hash from {0:?}: {1}")] + BadHash(wasm_bindgen::JsValue, BadChangeHash), + } + + #[derive(Debug, thiserror::Error)] + pub enum BadChangeHash { + #[error("change hash was not a string")] + NotString, + #[error(transparent)] + Parse(#[from] automerge::ParseChangeHashError), + } + + impl From for JsValue { + fn from(e: BadChangeHash) -> Self { + JsValue::from(e.to_string()) + } + } + + #[derive(Debug, thiserror::Error)] + pub enum BadSyncState { + #[error(transparent)] + GetProp(#[from] GetProp), + #[error("bad sharedHeads: {0}")] + BadSharedHeads(BadChangeHashes), + #[error("bad lastSentHeads: {0}")] + BadLastSentHeads(BadChangeHashes), + #[error("bad theirHeads: {0}")] + BadTheirHeads(BadChangeHashes), + #[error("bad theirNeed: {0}")] + BadTheirNeed(BadChangeHashes), + #[error("bad theirHave: {0}")] + BadTheirHave(BadHaves), + #[error("bad sentHashes: {0}")] + BadSentHashes(BadChangeHashSet), + #[error("inFlight not a boolean")] + InFlightNotBoolean, + } + + impl From for JsValue { + fn from(e: BadSyncState) -> Self { + JsValue::from(e.to_string()) + } + } + + #[derive(Debug, thiserror::Error)] + #[error("unable to get property {property}: {error:?}")] + pub struct GetProp { + pub(super) property: String, + pub(super) error: wasm_bindgen::JsValue, + } + + impl From for JsValue { + fn from(e: GetProp) -> Self { + JsValue::from(e.to_string()) + } + } + + #[derive(Debug, thiserror::Error)] + #[error("error setting property {property:?} on JS value: {error:?}")] + pub struct SetProp { + pub(super) property: JsValue, + pub(super) error: JsValue, + } + + impl From for JsValue { + fn from(e: SetProp) -> Self { + JsValue::from(e.to_string()) + } + } + + #[derive(Debug, thiserror::Error)] + pub enum BadHave { + #[error("bad lastSync: {0}")] + BadLastSync(BadChangeHashes), + #[error("bad bloom: {0}")] + BadBloom(BadBloom), + #[error(transparent)] + GetHaveProp(#[from] GetProp), + } + + #[derive(Debug, thiserror::Error)] + pub enum BadHaves { + #[error("value was not an array")] + NotArray, + #[error("error loading have at index {0}: {1}")] + BadElem(usize, BadHave), + } + + #[derive(Debug, thiserror::Error)] + pub enum BadBloom { + #[error("the value was not a Uint8Array")] + NotU8Array, + #[error("unable to decode: {0}")] + Decode(#[from] automerge::sync::DecodeBloomError), + } + + #[derive(Debug, thiserror::Error)] + pub enum Export { + #[error(transparent)] + Set(#[from] SetProp), + #[error("unable to delete prop {prop}: {err:?}")] + Delete { prop: String, err: JsValue }, + #[error("unable to set hidden property {0}")] + SetHidden(&'static str), + #[error("data handler for type {0} did not return a valid object")] + InvalidDataHandler(String), + #[error("error calling data handler for type {0}: {1:?}")] + CallDataHandler(String, JsValue), + #[error(transparent)] + GetProp(#[from] GetProp), + #[error(transparent)] + InvalidDatatype(#[from] crate::value::InvalidDatatype), + #[error("unable to get the splice function: {0:?}")] + GetSplice(JsValue), + #[error("error calling splice: {0:?}")] + CallSplice(JsValue), + } + + impl From for JsValue { + fn from(e: Export) -> Self { + JsValue::from(e.to_string()) + } + } + + #[derive(Debug, thiserror::Error)] + pub enum ApplyPatch { + #[error(transparent)] + Export(#[from] Export), + #[error("cannot delete from a seq")] + DeleteKeyFromSeq, + #[error("cannot put key in seq")] + PutKeyInSeq, + #[error("cannot increment a non-numeric value")] + IncrementNonNumeric, + #[error("cannot increment a key in a seq")] + IncrementKeyInSeq, + #[error("cannot increment index in a map")] + IncrementIndexInMap, + #[error("cannot insert into a map")] + InsertInMap, + #[error("cannot splice into a map")] + SpliceInMap, + #[error("cannot put a seq index in a map")] + PutIdxInMap, + #[error(transparent)] + GetProp(#[from] GetProp), + #[error(transparent)] + SetProp(#[from] SetProp), + } + + impl From for JsValue { + fn from(e: ApplyPatch) -> Self { + JsValue::from(e.to_string()) + } + } + + #[derive(Debug, thiserror::Error)] + pub enum BadSyncMessage { + #[error(transparent)] + GetProp(#[from] GetProp), + #[error("unable to read haves: {0}")] + BadHaves(#[from] BadHaves), + #[error("could not read changes: {0}")] + BadJSChanges(#[from] BadJSChanges), + #[error("could not read heads: {0}")] + BadHeads(BadChangeHashes), + #[error("could not read need: {0}")] + BadNeed(BadChangeHashes), + } + + impl From for JsValue { + fn from(e: BadSyncMessage) -> Self { + JsValue::from(e.to_string()) + } + } +} diff --git a/rust/automerge-wasm/src/lib.rs b/rust/automerge-wasm/src/lib.rs index d03f7226..22cdb685 100644 --- a/rust/automerge-wasm/src/lib.rs +++ b/rust/automerge-wasm/src/lib.rs @@ -44,7 +44,7 @@ mod value; use observer::Observer; -use interop::{alloc, get_heads, js_get, js_set, to_js_err, to_objtype, to_prop, AR, JS}; +use interop::{alloc, get_heads, js_set, to_js_err, to_objtype, to_prop, AR, JS}; use sync::SyncState; use value::Datatype; @@ -71,10 +71,10 @@ pub struct Automerge { #[wasm_bindgen] impl Automerge { - pub fn new(actor: Option) -> Result { + pub fn new(actor: Option) -> Result { let mut doc = AutoCommit::default(); if let Some(a) = actor { - let a = automerge::ActorId::from(hex::decode(a).map_err(to_js_err)?.to_vec()); + let a = automerge::ActorId::from(hex::decode(a)?.to_vec()); doc.set_actor(a); } Ok(Automerge { @@ -85,20 +85,24 @@ impl Automerge { } #[allow(clippy::should_implement_trait)] - pub fn clone(&mut self, actor: Option) -> Result { + pub fn clone(&mut self, actor: Option) -> Result { let mut automerge = Automerge { doc: self.doc.clone(), freeze: self.freeze, external_types: self.external_types.clone(), }; if let Some(s) = actor { - let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec()); + let actor = automerge::ActorId::from(hex::decode(s)?.to_vec()); automerge.doc.set_actor(actor); } Ok(automerge) } - pub fn fork(&mut self, actor: Option, heads: JsValue) -> Result { + pub fn fork( + &mut self, + actor: Option, + heads: JsValue, + ) -> Result { let heads: Result, _> = JS(heads).try_into(); let doc = if let Ok(heads) = heads { self.doc.fork_at(&heads)? @@ -111,7 +115,8 @@ impl Automerge { external_types: self.external_types.clone(), }; if let Some(s) = actor { - let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec()); + let actor = + automerge::ActorId::from(hex::decode(s).map_err(error::BadActorId::from)?.to_vec()); automerge.doc.set_actor(actor); } Ok(automerge) @@ -137,7 +142,7 @@ impl Automerge { } } - pub fn merge(&mut self, other: &mut Automerge) -> Result { + pub fn merge(&mut self, other: &mut Automerge) -> Result { let heads = self.doc.merge(&mut other.doc)?; let heads: Array = heads .iter() @@ -150,9 +155,9 @@ impl Automerge { self.doc.rollback() as f64 } - pub fn keys(&self, obj: JsValue, heads: Option) -> Result { - let obj = self.import(obj)?; - let result = if let Some(heads) = get_heads(heads) { + pub fn keys(&self, obj: JsValue, heads: Option) -> Result { + let (obj, _) = self.import(obj)?; + let result = if let Some(heads) = get_heads(heads)? { self.doc .keys_at(&obj, &heads) .map(|s| JsValue::from_str(&s)) @@ -163,9 +168,9 @@ impl Automerge { Ok(result) } - pub fn text(&self, obj: JsValue, heads: Option) -> Result { - let obj = self.import(obj)?; - if let Some(heads) = get_heads(heads) { + pub fn text(&self, obj: JsValue, heads: Option) -> Result { + let (obj, _) = self.import(obj)?; + if let Some(heads) = get_heads(heads)? { Ok(self.doc.text_at(&obj, &heads)?) } else { Ok(self.doc.text(&obj)?) @@ -178,46 +183,57 @@ impl Automerge { start: f64, delete_count: f64, text: JsValue, - ) -> Result<(), JsValue> { - let obj = self.import(obj)?; + ) -> Result<(), error::Splice> { + let (obj, obj_type) = self.import(obj)?; let start = start as usize; let delete_count = delete_count as usize; - let mut vals = vec![]; if let Some(t) = text.as_string() { - self.doc.splice_text(&obj, start, delete_count, &t)?; - } else { - if let Ok(array) = text.dyn_into::() { - for i in array.iter() { - let value = self - .import_scalar(&i, &None) - .ok_or_else(|| to_js_err("expected scalar"))?; - vals.push(value); - } + if obj_type == ObjType::Text { + self.doc.splice_text(&obj, start, delete_count, &t)?; + return Ok(()); } - self.doc - .splice(&obj, start, delete_count, vals.into_iter())?; } - Ok(()) + let mut vals = vec![]; + if let Ok(array) = text.dyn_into::() { + for (index, i) in array.iter().enumerate() { + let value = self + .import_scalar(&i, &None) + .ok_or(error::Splice::ValueNotPrimitive(index))?; + vals.push(value); + } + } + Ok(self + .doc + .splice(&obj, start, delete_count, vals.into_iter())?) } - pub fn push(&mut self, obj: JsValue, value: JsValue, datatype: JsValue) -> Result<(), JsValue> { - let obj = self.import(obj)?; + pub fn push( + &mut self, + obj: JsValue, + value: JsValue, + datatype: JsValue, + ) -> Result<(), error::Insert> { + let (obj, _) = self.import(obj)?; let value = self .import_scalar(&value, &datatype.as_string()) - .ok_or_else(|| to_js_err("invalid scalar value"))?; + .ok_or(error::Insert::ValueNotPrimitive)?; let index = self.doc.length(&obj); self.doc.insert(&obj, index, value)?; Ok(()) } #[wasm_bindgen(js_name = pushObject)] - pub fn push_object(&mut self, obj: JsValue, value: JsValue) -> Result, JsValue> { - let obj = self.import(obj)?; + pub fn push_object( + &mut self, + obj: JsValue, + value: JsValue, + ) -> Result, error::InsertObject> { + let (obj, _) = self.import(obj)?; let (value, subvals) = - to_objtype(&value, &None).ok_or_else(|| to_js_err("expected object"))?; + to_objtype(&value, &None).ok_or(error::InsertObject::ValueNotObject)?; let index = self.doc.length(&obj); let opid = self.doc.insert_object(&obj, index, value)?; - self.subset(&opid, subvals)?; + self.subset::(&opid, subvals)?; Ok(opid.to_string().into()) } @@ -227,12 +243,12 @@ impl Automerge { index: f64, value: JsValue, datatype: JsValue, - ) -> Result<(), JsValue> { - let obj = self.import(obj)?; + ) -> Result<(), error::Insert> { + let (obj, _) = self.import(obj)?; let index = index as f64; let value = self .import_scalar(&value, &datatype.as_string()) - .ok_or_else(|| to_js_err("expected scalar value"))?; + .ok_or(error::Insert::ValueNotPrimitive)?; self.doc.insert(&obj, index as usize, value)?; Ok(()) } @@ -243,13 +259,13 @@ impl Automerge { obj: JsValue, index: f64, value: JsValue, - ) -> Result, JsValue> { - let obj = self.import(obj)?; + ) -> Result, error::InsertObject> { + let (obj, _) = self.import(obj)?; let index = index as f64; let (value, subvals) = - to_objtype(&value, &None).ok_or_else(|| to_js_err("expected object"))?; + to_objtype(&value, &None).ok_or(error::InsertObject::ValueNotObject)?; let opid = self.doc.insert_object(&obj, index as usize, value)?; - self.subset(&opid, subvals)?; + self.subset::(&opid, subvals)?; Ok(opid.to_string().into()) } @@ -259,12 +275,12 @@ impl Automerge { prop: JsValue, value: JsValue, datatype: JsValue, - ) -> Result<(), JsValue> { - let obj = self.import(obj)?; + ) -> Result<(), error::Insert> { + let (obj, _) = self.import(obj)?; let prop = self.import_prop(prop)?; let value = self .import_scalar(&value, &datatype.as_string()) - .ok_or_else(|| to_js_err("expected scalar value"))?; + .ok_or(error::Insert::ValueNotPrimitive)?; self.doc.put(&obj, prop, value)?; Ok(()) } @@ -275,17 +291,20 @@ impl Automerge { obj: JsValue, prop: JsValue, value: JsValue, - ) -> Result { - let obj = self.import(obj)?; + ) -> Result { + let (obj, _) = self.import(obj)?; let prop = self.import_prop(prop)?; let (value, subvals) = - to_objtype(&value, &None).ok_or_else(|| to_js_err("expected object"))?; + to_objtype(&value, &None).ok_or(error::InsertObject::ValueNotObject)?; let opid = self.doc.put_object(&obj, prop, value)?; - self.subset(&opid, subvals)?; + self.subset::(&opid, subvals)?; Ok(opid.to_string().into()) } - fn subset(&mut self, obj: &am::ObjId, vals: Vec<(am::Prop, JsValue)>) -> Result<(), JsValue> { + fn subset(&mut self, obj: &am::ObjId, vals: Vec<(am::Prop, JsValue)>) -> Result<(), E> + where + E: From + From + From, + { for (p, v) in vals { let (value, subvals) = self.import_value(&v, None)?; //let opid = self.0.set(id, p, value)?; @@ -306,7 +325,7 @@ impl Automerge { } }; if let Some(opid) = opid { - self.subset(&opid, subvals)?; + self.subset::(&opid, subvals)?; } } Ok(()) @@ -317,12 +336,10 @@ impl Automerge { obj: JsValue, prop: JsValue, value: JsValue, - ) -> Result<(), JsValue> { - let obj = self.import(obj)?; + ) -> Result<(), error::Increment> { + let (obj, _) = self.import(obj)?; let prop = self.import_prop(prop)?; - let value: f64 = value - .as_f64() - .ok_or_else(|| to_js_err("increment needs a numeric value"))?; + let value: f64 = value.as_f64().ok_or(error::Increment::ValueNotNumeric)?; self.doc.increment(&obj, prop, value as i64)?; Ok(()) } @@ -333,10 +350,10 @@ impl Automerge { obj: JsValue, prop: JsValue, heads: Option, - ) -> Result { - let obj = self.import(obj)?; + ) -> Result { + let (obj, _) = self.import(obj)?; let prop = to_prop(prop); - let heads = get_heads(heads); + let heads = get_heads(heads)?; if let Ok(prop) = prop { let value = if let Some(h) = heads { self.doc.get_at(&obj, prop, &h)? @@ -362,10 +379,10 @@ impl Automerge { obj: JsValue, prop: JsValue, heads: Option, - ) -> Result { - let obj = self.import(obj)?; + ) -> Result { + let (obj, _) = self.import(obj)?; let prop = to_prop(prop); - let heads = get_heads(heads); + let heads = get_heads(heads)?; if let Ok(prop) = prop { let value = if let Some(h) = heads { self.doc.get_at(&obj, prop, &h)? @@ -402,17 +419,16 @@ impl Automerge { obj: JsValue, arg: JsValue, heads: Option, - ) -> Result { - let obj = self.import(obj)?; + ) -> Result { + let (obj, _) = self.import(obj)?; let result = Array::new(); let prop = to_prop(arg); if let Ok(prop) = prop { - let values = if let Some(heads) = get_heads(heads) { + let values = if let Some(heads) = get_heads(heads)? { self.doc.get_all_at(&obj, prop, &heads) } else { self.doc.get_all(&obj, prop) - } - .map_err(to_js_err)?; + }?; for (value, id) in values { let sub = Array::new(); let (datatype, js_value) = alloc(&value); @@ -451,7 +467,7 @@ impl Automerge { &mut self, datatype: JsValue, function: JsValue, - ) -> Result<(), JsValue> { + ) -> Result<(), value::InvalidDatatype> { let datatype = Datatype::try_from(datatype)?; if let Ok(function) = function.dyn_into::() { self.external_types.insert(datatype, function); @@ -467,8 +483,10 @@ impl Automerge { object: JsValue, meta: JsValue, callback: JsValue, - ) -> Result { - let mut object = object.dyn_into::()?; + ) -> Result { + let mut object = object + .dyn_into::() + .map_err(|_| error::ApplyPatch::NotObjectd)?; let patches = self.doc.observer().take_patches(); let callback = callback.dyn_into::().ok(); @@ -484,7 +502,8 @@ impl Automerge { if let Some(c) = &callback { let before = object.clone(); object = self.apply_patch(object, &p, 0, &meta)?; - c.call3(&JsValue::undefined(), &p.try_into()?, &before, &object)?; + c.call3(&JsValue::undefined(), &p.try_into()?, &before, &object) + .map_err(error::ApplyPatch::PatchCallback)?; } else { object = self.apply_patch(object, &p, 0, &meta)?; } @@ -494,7 +513,7 @@ impl Automerge { } #[wasm_bindgen(js_name = popPatches)] - pub fn pop_patches(&mut self) -> Result { + pub fn pop_patches(&mut self) -> Result { // transactions send out observer updates as they occur, not waiting for them to be // committed. // If we pop the patches then we won't be able to revert them. @@ -507,19 +526,19 @@ impl Automerge { Ok(result) } - pub fn length(&self, obj: JsValue, heads: Option) -> Result { - let obj = self.import(obj)?; - if let Some(heads) = get_heads(heads) { + pub fn length(&self, obj: JsValue, heads: Option) -> Result { + let (obj, _) = self.import(obj)?; + if let Some(heads) = get_heads(heads)? { Ok(self.doc.length_at(&obj, &heads) as f64) } else { Ok(self.doc.length(&obj) as f64) } } - pub fn delete(&mut self, obj: JsValue, prop: JsValue) -> Result<(), JsValue> { - let obj = self.import(obj)?; + pub fn delete(&mut self, obj: JsValue, prop: JsValue) -> Result<(), error::Get> { + let (obj, _) = self.import(obj)?; let prop = to_prop(prop)?; - self.doc.delete(&obj, prop).map_err(to_js_err)?; + self.doc.delete(&obj, prop)?; Ok(()) } @@ -534,21 +553,21 @@ impl Automerge { } #[wasm_bindgen(js_name = loadIncremental)] - pub fn load_incremental(&mut self, data: Uint8Array) -> Result { + pub fn load_incremental(&mut self, data: Uint8Array) -> Result { let data = data.to_vec(); - let len = self.doc.load_incremental(&data).map_err(to_js_err)?; + let len = self.doc.load_incremental(&data)?; Ok(len as f64) } #[wasm_bindgen(js_name = applyChanges)] - pub fn apply_changes(&mut self, changes: JsValue) -> Result<(), JsValue> { + pub fn apply_changes(&mut self, changes: JsValue) -> Result<(), error::ApplyChangesError> { let changes: Vec<_> = JS(changes).try_into()?; - self.doc.apply_changes(changes).map_err(to_js_err)?; + self.doc.apply_changes(changes)?; Ok(()) } #[wasm_bindgen(js_name = getChanges)] - pub fn get_changes(&mut self, have_deps: JsValue) -> Result { + pub fn get_changes(&mut self, have_deps: JsValue) -> Result { let deps: Vec<_> = JS(have_deps).try_into()?; let changes = self.doc.get_changes(&deps)?; let changes: Array = changes @@ -559,8 +578,11 @@ impl Automerge { } #[wasm_bindgen(js_name = getChangeByHash)] - pub fn get_change_by_hash(&mut self, hash: JsValue) -> Result { - let hash = serde_wasm_bindgen::from_value(hash).map_err(to_js_err)?; + pub fn get_change_by_hash( + &mut self, + hash: JsValue, + ) -> Result { + let hash = JS(hash).try_into()?; let change = self.doc.get_change_by_hash(&hash); if let Some(c) = change { Ok(Uint8Array::from(c.raw_bytes()).into()) @@ -570,13 +592,13 @@ impl Automerge { } #[wasm_bindgen(js_name = getChangesAdded)] - pub fn get_changes_added(&mut self, other: &mut Automerge) -> Result { + pub fn get_changes_added(&mut self, other: &mut Automerge) -> Array { let changes = self.doc.get_changes_added(&mut other.doc); let changes: Array = changes .iter() .map(|c| Uint8Array::from(c.raw_bytes())) .collect(); - Ok(changes) + changes } #[wasm_bindgen(js_name = getHeads)] @@ -596,11 +618,11 @@ impl Automerge { } #[wasm_bindgen(js_name = getLastLocalChange)] - pub fn get_last_local_change(&mut self) -> Result { + pub fn get_last_local_change(&mut self) -> JsValue { if let Some(change) = self.doc.get_last_local_change() { - Ok(Uint8Array::from(change.raw_bytes()).into()) + Uint8Array::from(change.raw_bytes()).into() } else { - Ok(JsValue::null()) + JsValue::null() } } @@ -609,8 +631,8 @@ impl Automerge { } #[wasm_bindgen(js_name = getMissingDeps)] - pub fn get_missing_deps(&mut self, heads: Option) -> Result { - let heads = get_heads(heads).unwrap_or_default(); + pub fn get_missing_deps(&mut self, heads: Option) -> Result { + let heads = get_heads(heads)?.unwrap_or_default(); let deps = self.doc.get_missing_deps(&heads); let deps: Array = deps .iter() @@ -624,26 +646,24 @@ impl Automerge { &mut self, state: &mut SyncState, message: Uint8Array, - ) -> Result<(), JsValue> { + ) -> Result<(), error::ReceiveSyncMessage> { let message = message.to_vec(); - let message = am::sync::Message::decode(message.as_slice()).map_err(to_js_err)?; - self.doc - .receive_sync_message(&mut state.0, message) - .map_err(to_js_err)?; + let message = am::sync::Message::decode(message.as_slice())?; + self.doc.receive_sync_message(&mut state.0, message)?; Ok(()) } #[wasm_bindgen(js_name = generateSyncMessage)] - pub fn generate_sync_message(&mut self, state: &mut SyncState) -> Result { + pub fn generate_sync_message(&mut self, state: &mut SyncState) -> JsValue { if let Some(message) = self.doc.generate_sync_message(&mut state.0) { - Ok(Uint8Array::from(message.encode().as_slice()).into()) + Uint8Array::from(message.encode().as_slice()).into() } else { - Ok(JsValue::null()) + JsValue::null() } } #[wasm_bindgen(js_name = toJS)] - pub fn to_js(&mut self, meta: JsValue) -> Result { + pub fn to_js(&mut self, meta: JsValue) -> Result { self.export_object(&ROOT, Datatype::Map, None, &meta) } @@ -652,65 +672,79 @@ impl Automerge { obj: JsValue, heads: Option, meta: JsValue, - ) -> Result { - let obj = self.import(obj).unwrap_or(ROOT); - let heads = get_heads(heads); - let obj_type = self - .doc - .object_type(&obj) - .ok_or_else(|| to_js_err(format!("invalid obj {}", obj)))?; + ) -> Result { + let (obj, obj_type) = self.import(obj).unwrap_or((ROOT, ObjType::Map)); + let heads = get_heads(heads)?; let _patches = self.doc.observer().take_patches(); // throw away patches - self.export_object(&obj, obj_type.into(), heads.as_ref(), &meta) + Ok(self.export_object(&obj, obj_type.into(), heads.as_ref(), &meta)?) } - fn import(&self, id: JsValue) -> Result { + fn import(&self, id: JsValue) -> Result<(ObjId, ObjType), error::ImportObj> { if let Some(s) = id.as_string() { - if let Some(post) = s.strip_prefix('/') { - let mut obj = ROOT; - let mut is_map = true; - let parts = post.split('/'); - for prop in parts { - if prop.is_empty() { - break; - } - let val = if is_map { - self.doc.get(obj, prop)? - } else { - self.doc.get(obj, am::Prop::Seq(prop.parse().unwrap()))? - }; - match val { - Some((am::Value::Object(ObjType::Map), id)) => { - is_map = true; - obj = id; - } - Some((am::Value::Object(ObjType::Table), id)) => { - is_map = true; - obj = id; - } - Some((am::Value::Object(_), id)) => { - is_map = false; - obj = id; - } - None => return Err(to_js_err(format!("invalid path '{}'", s))), - _ => return Err(to_js_err(format!("path '{}' is not an object", s))), - }; - } - Ok(obj) + if let Some(components) = s.strip_prefix('/').map(|post| post.split('/')) { + self.import_path(components) + .map_err(|e| error::ImportObj::InvalidPath(s.to_string(), e)) } else { - Ok(self.doc.import(&s)?) + let id = self.doc.import(&s).map_err(error::ImportObj::BadImport)?; + // SAFETY: we just looked this up + let obj_type = self.doc.object_type(&id).unwrap(); + Ok((id, obj_type)) } } else { - Err(to_js_err("invalid objid")) + Err(error::ImportObj::NotString) } } - fn import_prop(&self, prop: JsValue) -> Result { + fn import_path<'a, I: Iterator>( + &self, + components: I, + ) -> Result<(ObjId, ObjType), error::ImportPath> { + let mut obj = ROOT; + let mut obj_type = ObjType::Map; + for (i, prop) in components.enumerate() { + if prop.is_empty() { + break; + } + let is_map = matches!(obj_type, ObjType::Map | ObjType::Table); + let val = if is_map { + self.doc.get(obj, prop)? + } else { + let idx = prop + .parse() + .map_err(|_| error::ImportPath::IndexNotInteger(i, prop.to_string()))?; + self.doc.get(obj, am::Prop::Seq(idx))? + }; + match val { + Some((am::Value::Object(ObjType::Map), id)) => { + obj_type = ObjType::Map; + obj = id; + } + Some((am::Value::Object(ObjType::Table), id)) => { + obj_type = ObjType::Table; + obj = id; + } + Some((am::Value::Object(ObjType::List), id)) => { + obj_type = ObjType::List; + obj = id; + } + Some((am::Value::Object(ObjType::Text), id)) => { + obj_type = ObjType::Text; + obj = id; + } + None => return Err(error::ImportPath::NonExistentObject(i, prop.to_string())), + _ => return Err(error::ImportPath::NotAnObject), + }; + } + Ok((obj, obj_type)) + } + + fn import_prop(&self, prop: JsValue) -> Result { if let Some(s) = prop.as_string() { Ok(s.into()) } else if let Some(n) = prop.as_f64() { Ok((n as usize).into()) } else { - Err(to_js_err(format!("invalid prop {:?}", prop))) + Err(error::InvalidProp) } } @@ -764,7 +798,7 @@ impl Automerge { &self, value: &JsValue, datatype: Option, - ) -> Result<(Value<'static>, Vec<(Prop, JsValue)>), JsValue> { + ) -> Result<(Value<'static>, Vec<(Prop, JsValue)>), error::InvalidValue> { match self.import_scalar(value, &datatype) { Some(val) => Ok((val.into(), vec![])), None => { @@ -772,7 +806,7 @@ impl Automerge { Ok((o.into(), subvals)) } else { web_sys::console::log_2(&"Invalid value".into(), value); - Err(to_js_err("invalid value")) + Err(error::InvalidValue) } } } @@ -788,19 +822,19 @@ impl Automerge { } #[wasm_bindgen(js_name = create)] -pub fn init(actor: Option) -> Result { +pub fn init(actor: Option) -> Result { console_error_panic_hook::set_once(); Automerge::new(actor) } #[wasm_bindgen(js_name = load)] -pub fn load(data: Uint8Array, actor: Option) -> Result { +pub fn load(data: Uint8Array, actor: Option) -> Result { let data = data.to_vec(); - let mut doc = am::AutoCommitWithObs::::load(&data) - .map_err(to_js_err)? - .with_observer(Observer::default()); + let mut doc = + am::AutoCommitWithObs::::load(&data)?.with_observer(Observer::default()); if let Some(s) = actor { - let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec()); + let actor = + automerge::ActorId::from(hex::decode(s).map_err(error::BadActorId::from)?.to_vec()); doc.set_actor(actor); } Ok(Automerge { @@ -811,22 +845,22 @@ pub fn load(data: Uint8Array, actor: Option) -> Result Result { +pub fn encode_change(change: JsValue) -> Result { // Alex: Technically we should be using serde_wasm_bindgen::from_value instead of into_serde. // Unfortunately serde_wasm_bindgen::from_value fails for some inscrutable reason, so instead // we use into_serde (sorry to future me). #[allow(deprecated)] - let change: am::ExpandedChange = change.into_serde().map_err(to_js_err)?; + let change: am::ExpandedChange = change.into_serde()?; let change: Change = change.into(); Ok(Uint8Array::from(change.raw_bytes())) } #[wasm_bindgen(js_name = decodeChange)] -pub fn decode_change(change: Uint8Array) -> Result { - let change = Change::from_bytes(change.to_vec()).map_err(to_js_err)?; +pub fn decode_change(change: Uint8Array) -> Result { + let change = Change::from_bytes(change.to_vec())?; let change: am::ExpandedChange = change.decode(); let serializer = serde_wasm_bindgen::Serializer::json_compatible(); - change.serialize(&serializer).map_err(to_js_err) + Ok(change.serialize(&serializer)?) } #[wasm_bindgen(js_name = initSyncState)] @@ -836,7 +870,7 @@ pub fn init_sync_state() -> SyncState { // this is needed to be compatible with the automerge-js api #[wasm_bindgen(js_name = importSyncState)] -pub fn import_sync_state(state: JsValue) -> Result { +pub fn import_sync_state(state: JsValue) -> Result { Ok(SyncState(JS(state).try_into()?)) } @@ -847,46 +881,328 @@ pub fn export_sync_state(state: &SyncState) -> JsValue { } #[wasm_bindgen(js_name = encodeSyncMessage)] -pub fn encode_sync_message(message: JsValue) -> Result { - let heads = js_get(&message, "heads")?.try_into()?; - let need = js_get(&message, "need")?.try_into()?; - let changes = js_get(&message, "changes")?.try_into()?; - let have = js_get(&message, "have")?.try_into()?; - Ok(Uint8Array::from( - am::sync::Message { - heads, - need, - have, - changes, - } - .encode() - .as_slice(), - )) +pub fn encode_sync_message(message: JsValue) -> Result { + let message: am::sync::Message = JS(message).try_into()?; + Ok(Uint8Array::from(message.encode().as_slice())) } #[wasm_bindgen(js_name = decodeSyncMessage)] -pub fn decode_sync_message(msg: Uint8Array) -> Result { +pub fn decode_sync_message(msg: Uint8Array) -> Result { let data = msg.to_vec(); - let msg = am::sync::Message::decode(&data).map_err(to_js_err)?; + let msg = am::sync::Message::decode(&data)?; let heads = AR::from(msg.heads.as_slice()); let need = AR::from(msg.need.as_slice()); let changes = AR::from(msg.changes.as_slice()); let have = AR::from(msg.have.as_slice()); let obj = Object::new().into(); - js_set(&obj, "heads", heads)?; - js_set(&obj, "need", need)?; - js_set(&obj, "have", have)?; - js_set(&obj, "changes", changes)?; + // SAFETY: we just created this object + js_set(&obj, "heads", heads).unwrap(); + js_set(&obj, "need", need).unwrap(); + js_set(&obj, "have", have).unwrap(); + js_set(&obj, "changes", changes).unwrap(); Ok(obj) } #[wasm_bindgen(js_name = encodeSyncState)] -pub fn encode_sync_state(state: &SyncState) -> Result { - //let state = state.0.clone(); - Ok(Uint8Array::from(state.0.encode().as_slice())) +pub fn encode_sync_state(state: &SyncState) -> Uint8Array { + Uint8Array::from(state.0.encode().as_slice()) } #[wasm_bindgen(js_name = decodeSyncState)] -pub fn decode_sync_state(data: Uint8Array) -> Result { +pub fn decode_sync_state(data: Uint8Array) -> Result { SyncState::decode(data) } + +pub mod error { + use automerge::AutomergeError; + use wasm_bindgen::JsValue; + + use crate::interop::{ + self, + error::{BadChangeHashes, BadJSChanges}, + }; + + #[derive(Debug, thiserror::Error)] + #[error("could not parse Actor ID as a hex string: {0}")] + pub struct BadActorId(#[from] hex::FromHexError); + + impl From for JsValue { + fn from(s: BadActorId) -> Self { + JsValue::from(s.to_string()) + } + } + + #[derive(Debug, thiserror::Error)] + pub enum ApplyChangesError { + #[error(transparent)] + DecodeChanges(#[from] BadJSChanges), + #[error("error applying changes: {0}")] + Apply(#[from] AutomergeError), + } + + impl From for JsValue { + fn from(e: ApplyChangesError) -> Self { + JsValue::from(e.to_string()) + } + } + + #[derive(Debug, thiserror::Error)] + pub enum Fork { + #[error(transparent)] + BadActor(#[from] BadActorId), + #[error(transparent)] + Automerge(#[from] AutomergeError), + #[error(transparent)] + BadChangeHashes(#[from] BadChangeHashes), + } + + impl From for JsValue { + fn from(f: Fork) -> Self { + JsValue::from(f.to_string()) + } + } + + #[derive(Debug, thiserror::Error)] + #[error(transparent)] + pub struct Merge(#[from] AutomergeError); + + impl From for JsValue { + fn from(e: Merge) -> Self { + JsValue::from(e.to_string()) + } + } + + #[derive(Debug, thiserror::Error)] + pub enum ImportPath { + #[error(transparent)] + Automerge(#[from] AutomergeError), + #[error("path component {0} ({1}) should be an integer to index a sequence")] + IndexNotInteger(usize, String), + #[error("path component {0} ({1}) referenced a nonexistent object")] + NonExistentObject(usize, String), + #[error("path did not refer to an object")] + NotAnObject, + } + + #[derive(Debug, thiserror::Error)] + pub enum ImportObj { + #[error("obj id was not a string")] + NotString, + #[error("invalid path {0}: {1}")] + InvalidPath(String, ImportPath), + #[error("unable to import object id: {0}")] + BadImport(AutomergeError), + } + + impl From for JsValue { + fn from(e: ImportObj) -> Self { + JsValue::from(format!("invalid object ID: {}", e)) + } + } + + #[derive(Debug, thiserror::Error)] + pub enum Get { + #[error("invalid object ID: {0}")] + ImportObj(#[from] ImportObj), + #[error(transparent)] + Automerge(#[from] AutomergeError), + #[error("bad heads: {0}")] + BadHeads(#[from] interop::error::BadChangeHashes), + #[error(transparent)] + InvalidProp(#[from] InvalidProp), + } + + impl From for JsValue { + fn from(e: Get) -> Self { + JsValue::from(e.to_string()) + } + } + + #[derive(Debug, thiserror::Error)] + pub enum Splice { + #[error("invalid object ID: {0}")] + ImportObj(#[from] ImportObj), + #[error(transparent)] + Automerge(#[from] AutomergeError), + #[error("value at {0} in values to insert was not a primitive")] + ValueNotPrimitive(usize), + } + + impl From for JsValue { + fn from(e: Splice) -> Self { + JsValue::from(e.to_string()) + } + } + + #[derive(Debug, thiserror::Error)] + pub enum Insert { + #[error("invalid object id: {0}")] + ImportObj(#[from] ImportObj), + #[error("the value to insert was not a primitive")] + ValueNotPrimitive, + #[error(transparent)] + Automerge(#[from] AutomergeError), + #[error(transparent)] + InvalidProp(#[from] InvalidProp), + #[error(transparent)] + InvalidValue(#[from] InvalidValue), + } + + impl From for JsValue { + fn from(e: Insert) -> Self { + JsValue::from(e.to_string()) + } + } + + #[derive(Debug, thiserror::Error)] + pub enum InsertObject { + #[error("invalid object id: {0}")] + ImportObj(#[from] ImportObj), + #[error("the value to insert must be an object")] + ValueNotObject, + #[error(transparent)] + Automerge(#[from] AutomergeError), + #[error(transparent)] + InvalidProp(#[from] InvalidProp), + #[error(transparent)] + InvalidValue(#[from] InvalidValue), + } + + impl From for JsValue { + fn from(e: InsertObject) -> Self { + JsValue::from(e.to_string()) + } + } + + #[derive(Debug, thiserror::Error)] + #[error("given property was not a string or integer")] + pub struct InvalidProp; + + #[derive(Debug, thiserror::Error)] + #[error("given property was not a string or integer")] + pub struct InvalidValue; + + #[derive(Debug, thiserror::Error)] + pub enum Increment { + #[error("invalid object id: {0}")] + ImportObj(#[from] ImportObj), + #[error(transparent)] + InvalidProp(#[from] InvalidProp), + #[error("value was not numeric")] + ValueNotNumeric, + #[error(transparent)] + Automerge(#[from] AutomergeError), + } + + impl From for JsValue { + fn from(e: Increment) -> Self { + JsValue::from(e.to_string()) + } + } + + #[derive(Debug, thiserror::Error)] + pub enum BadSyncMessage { + #[error("could not decode sync message: {0}")] + ReadMessage(#[from] automerge::sync::ReadMessageError), + } + + impl From for JsValue { + fn from(e: BadSyncMessage) -> Self { + JsValue::from(e.to_string()) + } + } + + #[derive(Debug, thiserror::Error)] + pub enum ApplyPatch { + #[error(transparent)] + Interop(#[from] interop::error::ApplyPatch), + #[error(transparent)] + Export(#[from] interop::error::Export), + #[error("patch was not an object")] + NotObjectd, + #[error("error calling patch callback: {0:?}")] + PatchCallback(JsValue), + } + + impl From for JsValue { + fn from(e: ApplyPatch) -> Self { + JsValue::from(e.to_string()) + } + } + + #[derive(Debug, thiserror::Error)] + #[error("unable to build patches: {0}")] + pub struct PopPatches(#[from] interop::error::Export); + + impl From for JsValue { + fn from(e: PopPatches) -> Self { + JsValue::from(e.to_string()) + } + } + + #[derive(Debug, thiserror::Error)] + pub enum Materialize { + #[error(transparent)] + Export(#[from] interop::error::Export), + #[error("bad heads: {0}")] + Heads(#[from] interop::error::BadChangeHashes), + } + + impl From for JsValue { + fn from(e: Materialize) -> Self { + JsValue::from(e.to_string()) + } + } + + #[derive(Debug, thiserror::Error)] + pub enum ReceiveSyncMessage { + #[error(transparent)] + Decode(#[from] automerge::sync::ReadMessageError), + #[error(transparent)] + Automerge(#[from] AutomergeError), + } + + impl From for JsValue { + fn from(e: ReceiveSyncMessage) -> Self { + JsValue::from(e.to_string()) + } + } + + #[derive(Debug, thiserror::Error)] + pub enum Load { + #[error(transparent)] + Automerge(#[from] AutomergeError), + #[error(transparent)] + BadActor(#[from] BadActorId), + } + + impl From for JsValue { + fn from(e: Load) -> Self { + JsValue::from(e.to_string()) + } + } + + #[derive(Debug, thiserror::Error)] + #[error("Unable to read JS change: {0}")] + pub struct EncodeChange(#[from] serde_json::Error); + + impl From for JsValue { + fn from(e: EncodeChange) -> Self { + JsValue::from(e.to_string()) + } + } + + #[derive(Debug, thiserror::Error)] + pub enum DecodeChange { + #[error(transparent)] + Load(#[from] automerge::LoadChangeError), + #[error(transparent)] + Serialize(#[from] serde_wasm_bindgen::Error), + } + + impl From for JsValue { + fn from(e: DecodeChange) -> Self { + JsValue::from(e.to_string()) + } + } +} diff --git a/rust/automerge-wasm/src/observer.rs b/rust/automerge-wasm/src/observer.rs index 2d979041..67a757b6 100644 --- a/rust/automerge-wasm/src/observer.rs +++ b/rust/automerge-wasm/src/observer.rs @@ -1,6 +1,6 @@ #![allow(dead_code)] -use crate::interop::{alloc, js_set}; +use crate::interop::{self, alloc, js_set}; use automerge::{ObjId, OpObserver, Parents, Prop, SequenceTree, Value}; use js_sys::{Array, Object}; use wasm_bindgen::prelude::*; @@ -237,7 +237,7 @@ impl Patch { } impl TryFrom for JsValue { - type Error = JsValue; + type Error = interop::error::Export; fn try_from(p: Patch) -> Result { let result = Object::new(); diff --git a/rust/automerge-wasm/src/sync.rs b/rust/automerge-wasm/src/sync.rs index 94f65041..c4fd4a86 100644 --- a/rust/automerge-wasm/src/sync.rs +++ b/rust/automerge-wasm/src/sync.rs @@ -5,7 +5,7 @@ use std::collections::{BTreeSet, HashMap}; use std::convert::TryInto; use wasm_bindgen::prelude::*; -use crate::interop::{to_js_err, AR, JS}; +use crate::interop::{self, to_js_err, AR, JS}; #[wasm_bindgen] #[derive(Debug)] @@ -24,7 +24,10 @@ impl SyncState { } #[wasm_bindgen(setter, js_name = lastSentHeads)] - pub fn set_last_sent_heads(&mut self, heads: JsValue) -> Result<(), JsValue> { + pub fn set_last_sent_heads( + &mut self, + heads: JsValue, + ) -> Result<(), interop::error::BadChangeHashes> { let heads: Vec = JS(heads).try_into()?; self.0.last_sent_heads = heads; Ok(()) @@ -44,10 +47,19 @@ impl SyncState { SyncState(self.0.clone()) } - pub(crate) fn decode(data: Uint8Array) -> Result { + pub(crate) fn decode(data: Uint8Array) -> Result { let data = data.to_vec(); - let s = am::sync::State::decode(&data); - let s = s.map_err(to_js_err)?; + let s = am::sync::State::decode(&data)?; Ok(SyncState(s)) } } + +#[derive(Debug, thiserror::Error)] +#[error(transparent)] +pub struct DecodeSyncStateErr(#[from] automerge::sync::DecodeStateError); + +impl From for JsValue { + fn from(e: DecodeSyncStateErr) -> Self { + JsValue::from(e.to_string()) + } +} diff --git a/rust/automerge-wasm/src/value.rs b/rust/automerge-wasm/src/value.rs index be554d5c..b803ea43 100644 --- a/rust/automerge-wasm/src/value.rs +++ b/rust/automerge-wasm/src/value.rs @@ -1,4 +1,3 @@ -use crate::to_js_err; use automerge::{ObjType, ScalarValue, Value}; use wasm_bindgen::prelude::*; @@ -113,12 +112,10 @@ impl From for String { } impl TryFrom for Datatype { - type Error = JsValue; + type Error = InvalidDatatype; fn try_from(datatype: JsValue) -> Result { - let datatype = datatype - .as_string() - .ok_or_else(|| to_js_err("datatype is not a string"))?; + let datatype = datatype.as_string().ok_or(InvalidDatatype::NotString)?; match datatype.as_str() { "map" => Ok(Datatype::Map), "table" => Ok(Datatype::Table), @@ -135,9 +132,10 @@ impl TryFrom for Datatype { "null" => Ok(Datatype::Null), d => { if d.starts_with("unknown") { - todo!() // handle "unknown{}", + // TODO: handle "unknown{}", + Err(InvalidDatatype::UnknownNotImplemented) } else { - Err(to_js_err(format!("unknown datatype {}", d))) + Err(InvalidDatatype::Unknown(d.to_string())) } } } @@ -149,3 +147,19 @@ impl From for JsValue { String::from(d).into() } } + +#[derive(Debug, thiserror::Error)] +pub enum InvalidDatatype { + #[error("unknown datatype")] + Unknown(String), + #[error("datatype is not a string")] + NotString, + #[error("cannot handle unknown datatype")] + UnknownNotImplemented, +} + +impl From for JsValue { + fn from(e: InvalidDatatype) -> Self { + JsValue::from(e.to_string()) + } +} diff --git a/rust/automerge/src/lib.rs b/rust/automerge/src/lib.rs index 15cee2a7..ed29d226 100644 --- a/rust/automerge/src/lib.rs +++ b/rust/automerge/src/lib.rs @@ -107,7 +107,7 @@ pub use op_observer::Patch; pub use op_observer::VecOpObserver; pub use parents::Parents; pub use sequence_tree::SequenceTree; -pub use types::{ActorId, ChangeHash, ObjType, OpType, Prop}; +pub use types::{ActorId, ChangeHash, ObjType, OpType, ParseChangeHashError, Prop}; pub use value::{ScalarValue, Value}; pub use values::Values; diff --git a/rust/automerge/src/sync.rs b/rust/automerge/src/sync.rs index 6a206fdf..1545f954 100644 --- a/rust/automerge/src/sync.rs +++ b/rust/automerge/src/sync.rs @@ -10,7 +10,7 @@ use crate::{ mod bloom; mod state; -pub use bloom::BloomFilter; +pub use bloom::{BloomFilter, DecodeError as DecodeBloomError}; pub use state::DecodeError as DecodeStateError; pub use state::{Have, State}; From becc30187701b7b571c51b8b9cb04d0bde145bc8 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Fri, 2 Dec 2022 15:10:24 +0000 Subject: [PATCH 657/730] automerge-wasm@0.1.19 & automerge-js@2.0.1-alpha.2 --- javascript/package.json | 4 ++-- rust/automerge-wasm/package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/javascript/package.json b/javascript/package.json index b68674c9..0dae9684 100644 --- a/javascript/package.json +++ b/javascript/package.json @@ -4,7 +4,7 @@ "Orion Henry ", "Martin Kleppmann" ], - "version": "2.0.1-alpha.1", + "version": "2.0.1-alpha.2", "description": "Javascript implementation of automerge, backed by @automerge/automerge-wasm", "homepage": "https://github.com/automerge/automerge-rs/tree/main/wrappers/javascript", "repository": "github:automerge/automerge-rs", @@ -59,7 +59,7 @@ "typescript": "^4.6.4" }, "dependencies": { - "@automerge/automerge-wasm": "0.1.18", + "@automerge/automerge-wasm": "0.1.19", "uuid": "^8.3" } } diff --git a/rust/automerge-wasm/package.json b/rust/automerge-wasm/package.json index 1caa5a00..45e7950e 100644 --- a/rust/automerge-wasm/package.json +++ b/rust/automerge-wasm/package.json @@ -8,7 +8,7 @@ "description": "wasm-bindgen bindings to the automerge rust implementation", "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-wasm", "repository": "github:automerge/automerge-rs", - "version": "0.1.18", + "version": "0.1.19", "license": "MIT", "files": [ "README.md", From c3932e626709072f4fe0d3eb44773804fcfedf2e Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Fri, 9 Dec 2022 06:46:23 -0700 Subject: [PATCH 658/730] Improve docs for building automerge-c on a mac (#465) * More detailed instructions in README I struggled to get the project to build for a while when first getting started, so have added some instructions; and also some usage instructions for automerge-c that show more clearly what is happening without `AMpush()` --- README.md | 67 +++++++++++-- rust/automerge-c/README.md | 201 ++++++++++++++++++++++++------------- scripts/ci/cmake-build | 2 +- 3 files changed, 189 insertions(+), 81 deletions(-) diff --git a/README.md b/README.md index 449da11d..b2037c13 100644 --- a/README.md +++ b/README.md @@ -40,11 +40,11 @@ in that time. In general we try and respect semver. -### JavaScript +### JavaScript An alpha release of the javascript package is currently available as `@automerge/automerge@2.0.0-alpha.n` where `n` is an integer. We are gathering -feedback on the API and looking to release a `2.0.0` in the next few weeks. +feedback on the API and looking to release a `2.0.0` in the next few weeks. ### Rust @@ -54,32 +54,79 @@ not well documented. We will be returning to this over the next few months but for now you will need to be comfortable reading the tests and asking questions to figure out how to use it. - ## Repository Organisation -* `./rust` - the rust rust implementation and also the Rust components of +- `./rust` - the rust rust implementation and also the Rust components of platform specific wrappers (e.g. `automerge-wasm` for the WASM API or `automerge-c` for the C FFI bindings) -* `./javascript` - The javascript library which uses `automerge-wasm` +- `./javascript` - The javascript library which uses `automerge-wasm` internally but presents a more idiomatic javascript interface -* `./scripts` - scripts which are useful to maintenance of the repository. +- `./scripts` - scripts which are useful to maintenance of the repository. This includes the scripts which are run in CI. -* `./img` - static assets for use in `.md` files +- `./img` - static assets for use in `.md` files ## Building To build this codebase you will need: - `rust` -- `wasm-bindgen-cli` -- `wasm-opt` - `node` - `yarn` - `cmake` +- `cmocka` + +You will also need to install the following with `cargo install` + +- `wasm-bindgen-cli` +- `wasm-opt` +- `cargo-deny` + +And ensure you have added the `wasm32-unknown-unknown` target for rust cross-compilation. The various subprojects (the rust code, the wrapper projects) have their own build instructions, but to run the tests that will be run in CI you can run -`./scripts/ci/run`. +`./scripts/ci/run`. + +### For macOS + +These instructions worked to build locally on macOS 13.1 (arm64) as of +Nov 29th 2022. + +```bash +# clone the repo +git clone https://github.com/automerge/automerge-rs +cd automerge-rs + +# install rustup +curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh + +# install homebrew +/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)" + +# install cmake, node, cmocka +brew install cmake node cmocka + +# install yarn +npm install --global yarn + +# install rust dependencies +cargo install wasm-bindgen-cli wasm-opt cargo-deny + +# add wasm target in addition to current architecture +rustup target add wasm32-unknown-unknown + +# Run ci script +./scripts/ci/run +``` + +If your build fails to find `cmocka.h` you may need to teach it about homebrew's +installation location: + +``` +export CPATH=/opt/homebrew/include +export LIBRARY_PATH=/opt/homebrew/lib +./scripts/ci/run +``` ## Contributing diff --git a/rust/automerge-c/README.md b/rust/automerge-c/README.md index 1b0e618d..a9f097e2 100644 --- a/rust/automerge-c/README.md +++ b/rust/automerge-c/README.md @@ -1,97 +1,158 @@ +automerge-c exposes an API to C that can either be used directly or as a basis +for other language bindings that have good support for calling into C functions. -## Methods we need to support +# Building -### Basic management +See the main README for instructions on getting your environment set up, then +you can use `./scripts/ci/cmake-build Release static` to build automerge-c. - 1. `AMcreate()` - 1. `AMclone(doc)` - 1. `AMfree(doc)` - 1. `AMconfig(doc, key, val)` // set actor - 1. `actor = get_actor(doc)` +It will output two files: -### Transactions +- ./build/Cargo/target/include/automerge-c/automerge.h +- ./build/Cargo/target/release/libautomerge.a - 1. `AMpendingOps(doc)` - 1. `AMcommit(doc, message, time)` - 1. `AMrollback(doc)` +To use these in your application you must arrange for your C compiler to find +these files, either by moving them to the right location on your computer, or +by configuring the compiler to reference these directories. -### Write +- `export LDFLAGS=-L./build/Cargo/target/release -lautomerge` +- `export CFLAGS=-I./build/Cargo/target/include` - 1. `AMset{Map|List}(doc, obj, prop, value)` - 1. `AMinsert(doc, obj, index, value)` - 1. `AMpush(doc, obj, value)` - 1. `AMdel{Map|List}(doc, obj, prop)` - 1. `AMinc{Map|List}(doc, obj, prop, value)` - 1. `AMspliceText(doc, obj, start, num_del, text)` +If you'd like to cross compile the library for different platforms you can do so +using [cross](https://github.com/cross-rs/cross). For example: -### Read (the heads argument is optional and can be on an `at` variant) +- `cross build --manifest-path rust/automerge-c/Cargo.toml -r --target aarch64-unknown-linux-gnu` - 1. `AMkeys(doc, obj, heads)` - 1. `AMlength(doc, obj, heads)` - 1. `AMlistRange(doc, obj, heads)` - 1. `AMmapRange(doc, obj, heads)` - 1. `AMvalues(doc, obj, heads)` - 1. `AMtext(doc, obj, heads)` +This will output a shared library in the directory `rust/target/aarch64-unknown-linux-gnu/release/`. -### Sync +You can replace `aarch64-unknown-linux-gnu` with any [cross supported targets](https://github.com/cross-rs/cross#supported-targets). The targets below are known to work, though other targets are expected to work too: - 1. `AMgenerateSyncMessage(doc, state)` - 1. `AMreceiveSyncMessage(doc, state, message)` - 1. `AMinitSyncState()` +- `x86_64-apple-darwin` +- `aarch64-apple-darwin` +- `x86_64-unknown-linux-gnu` +- `aarch64-unknown-linux-gnu` -### Save / Load +As a caveat, the header file is currently 32/64-bit dependant. You can re-use it +for all 64-bit architectures, but you must generate a specific header for 32-bit +targets. - 1. `AMload(data)` - 1. `AMloadIncremental(doc, data)` - 1. `AMsave(doc)` - 1. `AMsaveIncremental(doc)` +# Usage -### Low Level Access +For full reference, read through `automerge.h`, or to get started quickly look +at the +[examples](https://github.com/automerge/automerge-rs/tree/main/rust/automerge-c/examples). - 1. `AMapplyChanges(doc, changes)` - 1. `AMgetChanges(doc, deps)` - 1. `AMgetChangesAdded(doc1, doc2)` - 1. `AMgetHeads(doc)` - 1. `AMgetLastLocalChange(doc)` - 1. `AMgetMissingDeps(doc, heads)` +Almost all operations in automerge-c act on an AMdoc struct which you can get +from `AMcreate()` or `AMload()`. Operations on a given doc are not thread safe +so you must use a mutex or similar to avoid calling more than one function with +the same AMdoc pointer concurrently. -### Encode/Decode +As with all functions that either allocate memory, or could fail if given +invalid input, `AMcreate()` returns an `AMresult`. The `AMresult` contains the +returned doc (or error message), and must be freed with `AMfree()` after you are +done to avoid leaking memory. - 1. `AMencodeChange(change)` - 1. `AMdecodeChange(change)` - 1. `AMencodeSyncMessage(change)` - 1. `AMdecodeSyncMessage(change)` - 1. `AMencodeSyncState(change)` - 1. `AMdecodeSyncState(change)` +``` +#include +#include -## Open Question - Memory management +int main(int argc, char** argv) { + AMresult *docResult = AMcreate(NULL); -Most of these calls return one or more items of arbitrary length. Doing memory management in C is tricky. This is my proposed solution... + if (AMresultStatus(docResult) != AM_STATUS_OK) { + printf("failed to create doc: %s", AMerrorMessage(docResult).src); + goto cleanup; + } -### + AMdoc *doc = AMresultValue(docResult).doc; - ``` - // returns 1 or zero opids - n = automerge_set(doc, "_root", "hello", datatype, value); - if (n) { - automerge_pop(doc, &obj, len); - } + // useful code goes here! - // returns n values - n = automerge_values(doc, "_root", "hello"); - for (i = 0; i +#include + +int main(int argc, char** argv) { + // ...previous example... + AMdoc *doc = AMresultValue(docResult).doc; + + AMresult *putResult = AMmapPutStr(doc, AM_ROOT, AMstr("key"), AMstr("value")); + if (AMresultStatus(putResult) != AM_STATUS_OK) { + printf("failed to put: %s", AMerrorMessage(putResult).src); + goto cleanup; + } + + AMresult *getResult = AMmapGet(doc, AM_ROOT, AMstr("key"), NULL); + if (AMresultStatus(getResult) != AM_STATUS_OK) { + printf("failed to get: %s", AMerrorMessage(getResult).src); + goto cleanup; + } + + AMvalue got = AMresultValue(getResult); + if (got.tag != AM_VALUE_STR) { + printf("expected to read a string!"); + goto cleanup; + } + + printf("Got %zu-character string `%s`", got.str.count, got.str.src); + +cleanup: + AMfree(getResult); + AMfree(putResult); + AMfree(docResult); +} +``` + +Functions that do not return an `AMresult` (for example `AMmapItemValue()`) do +not allocate memory, but continue to reference memory that was previously +allocated. It's thus important to keep the original `AMresult` alive (in this +case the one returned by `AMmapRange()`) until after you are done with the return +values of these functions. + +Beyond that, good luck! diff --git a/scripts/ci/cmake-build b/scripts/ci/cmake-build index e36513a2..3924dc4a 100755 --- a/scripts/ci/cmake-build +++ b/scripts/ci/cmake-build @@ -6,7 +6,7 @@ THIS_SCRIPT=$(dirname "$0"); # "RelWithDebInfo" but custom ones can also be defined so we pass it verbatim. BUILD_TYPE=$1; LIB_TYPE=$2; -if [ "${LIB_TYPE,,}" == "shared" ]; then +if [ "$(echo "${LIB_TYPE}" | tr '[:upper:]' '[:lower:]')" == "shared" ]; then SHARED_TOGGLE="ON" else SHARED_TOGGLE="OFF" From b05c9e83a431e887b6efe0f8e5d6113c6b1ace78 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Fri, 9 Dec 2022 09:11:23 -0700 Subject: [PATCH 659/730] Use AMbyteSpan for AM{list,map}PutBytes (#464) * Use AMbyteSpan for byte values Before this change there was an inconsistency between AMmapPutString (which took an AMbyteSpan) and AMmapPutBytes (which took a pointer + length). Either is fine, but we should do the same in both places. I chose this path to make it clear that the value passed in was an automerge value, and to be symmetric with AMvalue.bytes when you do an AMmapGet(). I did not update other APIs (like load) that take a pointer + length, as that is idiomatic usage for C, and these functions are not operating on byte values stored in automerge. --- rust/automerge-c/src/byte_span.rs | 15 +++++++++++++++ rust/automerge-c/src/doc/list.rs | 5 ++--- rust/automerge-c/src/doc/map.rs | 5 ++--- rust/automerge-c/test/list_tests.c | 3 +-- rust/automerge-c/test/map_tests.c | 3 +-- rust/automerge-c/test/ported_wasm/basic_tests.c | 4 ++-- 6 files changed, 23 insertions(+), 12 deletions(-) diff --git a/rust/automerge-c/src/byte_span.rs b/rust/automerge-c/src/byte_span.rs index a846cf58..fd4c3ca0 100644 --- a/rust/automerge-c/src/byte_span.rs +++ b/rust/automerge-c/src/byte_span.rs @@ -124,3 +124,18 @@ impl TryFrom<&AMbyteSpan> for &str { } } } + +/// \brief Creates an AMbyteSpan from a pointer + length +/// +/// \param[in] src A pointer to a span of bytes +/// \param[in] count The number of bytes in the span +/// \return An `AMbyteSpan` struct +/// \internal +/// +/// #Safety +/// AMbytes does not retain the underlying storage, so you must discard the +/// return value before freeing the bytes. +#[no_mangle] +pub unsafe extern "C" fn AMbytes(src: *const u8, count: usize) -> AMbyteSpan { + AMbyteSpan { src, count } +} diff --git a/rust/automerge-c/src/doc/list.rs b/rust/automerge-c/src/doc/list.rs index 82c62952..48f26c21 100644 --- a/rust/automerge-c/src/doc/list.rs +++ b/rust/automerge-c/src/doc/list.rs @@ -238,14 +238,13 @@ pub unsafe extern "C" fn AMlistPutBytes( obj_id: *const AMobjId, index: usize, insert: bool, - src: *const u8, - count: usize, + val: AMbyteSpan, ) -> *mut AMresult { let doc = to_doc_mut!(doc); let obj_id = to_obj_id!(obj_id); let (index, insert) = adjust!(index, insert, doc.length(obj_id)); let mut value = Vec::new(); - value.extend_from_slice(std::slice::from_raw_parts(src, count)); + value.extend_from_slice(std::slice::from_raw_parts(val.src, val.count)); to_result(if insert { doc.insert(obj_id, index, value) } else { diff --git a/rust/automerge-c/src/doc/map.rs b/rust/automerge-c/src/doc/map.rs index dbf4d61f..a5801323 100644 --- a/rust/automerge-c/src/doc/map.rs +++ b/rust/automerge-c/src/doc/map.rs @@ -198,13 +198,12 @@ pub unsafe extern "C" fn AMmapPutBytes( doc: *mut AMdoc, obj_id: *const AMobjId, key: AMbyteSpan, - src: *const u8, - count: usize, + val: AMbyteSpan, ) -> *mut AMresult { let doc = to_doc_mut!(doc); let key = to_str!(key); let mut vec = Vec::new(); - vec.extend_from_slice(std::slice::from_raw_parts(src, count)); + vec.extend_from_slice(std::slice::from_raw_parts(val.src, val.count)); to_result(doc.put(to_obj_id!(obj_id), key, vec)) } diff --git a/rust/automerge-c/test/list_tests.c b/rust/automerge-c/test/list_tests.c index 25a24329..b742cbe4 100644 --- a/rust/automerge-c/test/list_tests.c +++ b/rust/automerge-c/test/list_tests.c @@ -61,8 +61,7 @@ static void test_AMlistPutBytes_ ## mode(void **state) { \ AM_ROOT, \ 0, \ !strcmp(#mode, "insert"), \ - bytes_value, \ - BYTES_SIZE)); \ + AMbytes(bytes_value, BYTES_SIZE))); \ AMbyteSpan const bytes = AMpush( \ &group_state->stack, \ AMlistGet(group_state->doc, AM_ROOT, 0, NULL), \ diff --git a/rust/automerge-c/test/map_tests.c b/rust/automerge-c/test/map_tests.c index 51a536ce..194da2e8 100644 --- a/rust/automerge-c/test/map_tests.c +++ b/rust/automerge-c/test/map_tests.c @@ -58,8 +58,7 @@ static void test_AMmapPutBytes(void **state) { AMfree(AMmapPutBytes(group_state->doc, AM_ROOT, KEY, - BYTES_VALUE, - BYTES_SIZE)); + AMbytes(BYTES_VALUE, BYTES_SIZE))); AMbyteSpan const bytes = AMpush(&group_state->stack, AMmapGet(group_state->doc, AM_ROOT, KEY, NULL), AM_VALUE_BYTES, diff --git a/rust/automerge-c/test/ported_wasm/basic_tests.c b/rust/automerge-c/test/ported_wasm/basic_tests.c index ea8f1b85..303160cf 100644 --- a/rust/automerge-c/test/ported_wasm/basic_tests.c +++ b/rust/automerge-c/test/ported_wasm/basic_tests.c @@ -201,10 +201,10 @@ static void test_should_be_able_to_use_bytes(void** state) { AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; /* doc.put("_root", "data1", new Uint8Array([10, 11, 12])); */ static uint8_t const DATA1[] = {10, 11, 12}; - AMfree(AMmapPutBytes(doc, AM_ROOT, AMstr("data1"), DATA1, sizeof(DATA1))); + AMfree(AMmapPutBytes(doc, AM_ROOT, AMstr("data1"), AMbytes(DATA1, sizeof(DATA1)))); /* doc.put("_root", "data2", new Uint8Array([13, 14, 15]), "bytes"); */ static uint8_t const DATA2[] = {13, 14, 15}; - AMfree(AMmapPutBytes(doc, AM_ROOT, AMstr("data2"), DATA2, sizeof(DATA2))); + AMfree(AMmapPutBytes(doc, AM_ROOT, AMstr("data2"), AMbytes(DATA2, sizeof(DATA2)))); /* const value1 = doc.getWithType("_root", "data1") */ AMbyteSpan const value1 = AMpush(&stack, AMmapGet(doc, AM_ROOT, AMstr("data1"), NULL), From 2db9e78f2a635f8c1d7c006d1206616256dc0801 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Fri, 9 Dec 2022 15:48:07 -0800 Subject: [PATCH 660/730] Text v2. JS Api now uses text by default (#462) --- .gitignore | 1 + javascript/src/constants.ts | 21 +- javascript/src/index.ts | 69 +-- javascript/src/proxies.ts | 95 +--- javascript/src/text.ts | 199 -------- javascript/src/types.ts | 5 +- javascript/test/basic_test.ts | 27 +- javascript/test/legacy_tests.ts | 83 +-- javascript/test/sync_test.ts | 1 + javascript/test/text_test.ts | 479 ++---------------- rust/automerge-c/.gitignore | 7 + rust/automerge-c/src/doc.rs | 4 +- rust/automerge-c/test/doc_tests.c | 57 ++- rust/automerge-c/test/list_tests.c | 228 +++++---- .../test/ported_wasm/basic_tests.c | 55 +- rust/automerge-wasm/README.md | 8 +- rust/automerge-wasm/src/interop.rs | 452 ++++++++++++++--- rust/automerge-wasm/src/lib.rs | 302 ++++------- rust/automerge-wasm/src/observer.rs | 305 +++++++---- rust/automerge-wasm/src/value.rs | 4 - rust/automerge-wasm/test/apply.ts | 17 +- rust/automerge-wasm/test/readme.ts | 6 - rust/automerge-wasm/test/test.ts | 291 ++++++++--- rust/automerge/examples/watch.rs | 17 + rust/automerge/src/autocommit.rs | 30 +- rust/automerge/src/automerge.rs | 269 +++++++--- rust/automerge/src/automerge/tests.rs | 69 ++- rust/automerge/src/error.rs | 10 +- rust/automerge/src/lib.rs | 4 +- rust/automerge/src/op_observer.rs | 236 ++++++--- rust/automerge/src/op_set.rs | 147 ++---- rust/automerge/src/op_set/load.rs | 7 +- rust/automerge/src/op_tree.rs | 49 +- rust/automerge/src/parents.rs | 45 +- rust/automerge/src/query.rs | 104 ++-- rust/automerge/src/query/elem_id_pos.rs | 11 +- rust/automerge/src/query/insert.rs | 42 +- rust/automerge/src/query/len.rs | 8 +- rust/automerge/src/query/len_at.rs | 8 +- rust/automerge/src/query/nth.rs | 46 +- rust/automerge/src/query/nth_at.rs | 10 +- rust/automerge/src/query/opid.rs | 11 +- rust/automerge/src/query/opid_vis.rs | 62 +++ rust/automerge/src/query/prop.rs | 4 +- rust/automerge/src/query/seek_op.rs | 4 +- .../automerge/src/query/seek_op_with_patch.rs | 17 +- rust/automerge/src/transaction/inner.rs | 289 +++++++++-- .../src/transaction/manual_transaction.rs | 12 +- .../automerge/src/transaction/transactable.rs | 11 +- rust/automerge/src/types.rs | 59 +++ rust/automerge/tests/test.rs | 38 +- rust/edit-trace/.gitignore | 1 + rust/edit-trace/automerge-js.js | 12 +- rust/edit-trace/automerge-rs.js | 31 -- rust/edit-trace/automerge-wasm.js | 7 + 55 files changed, 2438 insertions(+), 1948 deletions(-) delete mode 100644 javascript/src/text.ts create mode 100644 rust/automerge/src/query/opid_vis.rs delete mode 100644 rust/edit-trace/automerge-rs.js diff --git a/.gitignore b/.gitignore index baad0a63..f77865d0 100644 --- a/.gitignore +++ b/.gitignore @@ -3,3 +3,4 @@ perf.* /Cargo.lock build/ .vim/* +/target diff --git a/javascript/src/constants.ts b/javascript/src/constants.ts index d9f78af2..e9517a60 100644 --- a/javascript/src/constants.ts +++ b/javascript/src/constants.ts @@ -1,13 +1,9 @@ // Properties of the document root object -//const OPTIONS = Symbol('_options') // object containing options passed to init() -//const CACHE = Symbol('_cache') // map from objectId to immutable object -//export const STATE = Symbol.for('_am_state') // object containing metadata about current state (e.g. sequence numbers) -export const STATE = Symbol.for('_am_meta') // object containing metadata about current state (e.g. sequence numbers) -export const HEADS = Symbol.for('_am_heads') // object containing metadata about current state (e.g. sequence numbers) -export const TRACE = Symbol.for('_am_trace') // object containing metadata about current state (e.g. sequence numbers) -export const OBJECT_ID = Symbol.for('_am_objectId') // object containing metadata about current state (e.g. sequence numbers) -export const READ_ONLY = Symbol.for('_am_readOnly') // object containing metadata about current state (e.g. sequence numbers) -export const FROZEN = Symbol.for('_am_frozen') // object containing metadata about current state (e.g. sequence numbers) + +export const STATE = Symbol.for('_am_meta') // symbol used to hide application metadata on automerge objects +export const TRACE = Symbol.for('_am_trace') // used for debugging +export const OBJECT_ID = Symbol.for('_am_objectId') // synbol used to hide the object id on automerge objects +export const IS_PROXY = Symbol.for('_am_isProxy') // symbol used to test if the document is a proxy object export const UINT = Symbol.for('_am_uint') export const INT = Symbol.for('_am_int') @@ -15,10 +11,3 @@ export const F64 = Symbol.for('_am_f64') export const COUNTER = Symbol.for('_am_counter') export const TEXT = Symbol.for('_am_text') -// Properties of all Automerge objects -//const OBJECT_ID = Symbol('_objectId') // the object ID of the current object (string) -//const CONFLICTS = Symbol('_conflicts') // map or list (depending on object type) of conflicts -//const CHANGE = Symbol('_change') // the context object on proxy objects used in change callback -//const ELEM_IDS = Symbol('_elemIds') // list containing the element ID of each list element - - diff --git a/javascript/src/index.ts b/javascript/src/index.ts index 8dece76b..50306b4c 100644 --- a/javascript/src/index.ts +++ b/javascript/src/index.ts @@ -2,11 +2,11 @@ /** @hidden **/ export {/** @hidden */ uuid} from './uuid' -import {rootProxy, listProxy, textProxy, mapProxy} from "./proxies" -import {STATE, HEADS, TRACE, OBJECT_ID, READ_ONLY, FROZEN} from "./constants" +import {rootProxy, listProxy, mapProxy} from "./proxies" +import {STATE, TRACE, IS_PROXY, OBJECT_ID } from "./constants" -import {AutomergeValue, Text, Counter} from "./types" -export {AutomergeValue, Text, Counter, Int, Uint, Float64, ScalarValue} from "./types" +import {AutomergeValue, Counter} from "./types" +export {AutomergeValue, Counter, Int, Uint, Float64, ScalarValue} from "./types" import {type API, type Patch} from "@automerge/automerge-wasm"; export { type Patch, PutPatch, DelPatch, SplicePatch, IncPatch, SyncMessage, } from "@automerge/automerge-wasm" @@ -108,23 +108,10 @@ function _state(doc: Doc, checkroot = true): InternalState { return state } -function _frozen(doc: Doc): boolean { - return Reflect.get(doc, FROZEN) === true -} - function _trace(doc: Doc): string | undefined { return Reflect.get(doc, TRACE) as string } -function _set_heads(doc: Doc, heads: Heads) { - _state(doc).heads = heads -} - -function _clear_heads(doc: Doc) { - Reflect.set(doc, HEADS, undefined) - Reflect.set(doc, TRACE, undefined) -} - function _obj(doc: Doc): ObjID | null { if (!(typeof doc === 'object') || doc === null) { return null @@ -132,8 +119,8 @@ function _obj(doc: Doc): ObjID | null { return Reflect.get(doc, OBJECT_ID) as ObjID } -function _readonly(doc: Doc): boolean { - return Reflect.get(doc, READ_ONLY) !== false +function _is_proxy(doc: Doc): boolean { + return !!Reflect.get(doc, IS_PROXY) } function importOpts(_actor?: ActorId | InitOptions): InitOptions { @@ -161,7 +148,6 @@ export function init(_opts?: ActorId | InitOptions): Doc { handle.enablePatches(true) handle.enableFreeze(!!opts.freeze) handle.registerDatatype("counter", (n) => new Counter(n)) - handle.registerDatatype("text", (n) => new Text(n)) const doc = handle.materialize("/", undefined, {handle, heads: undefined, freeze, patchCallback}) as Doc return doc } @@ -327,7 +313,7 @@ function _change(doc: Doc, options: ChangeOptions, callback: ChangeFn(doc: Doc, options: string | ChangeOptions | if (state.heads) { throw new RangeError("Attempting to change an outdated document. Use Automerge.clone() if you wish to make a writable copy.") } - if (_readonly(doc) === false) { + if (_is_proxy(doc)) { throw new RangeError("Calls to Automerge.change cannot be nested") } @@ -406,7 +392,6 @@ export function load(data: Uint8Array, _opts?: ActorId | InitOptions): Doc handle.enablePatches(true) handle.enableFreeze(!!opts.freeze) handle.registerDatatype("counter", (n) => new Counter(n)) - handle.registerDatatype("text", (n) => new Text(n)) const doc: any = handle.materialize("/", undefined, {handle, heads: undefined, patchCallback}) as Doc return doc } @@ -434,7 +419,7 @@ export function loadIncremental(doc: Doc, data: Uint8Array, opts?: ApplyOp if (state.heads) { throw new RangeError("Attempting to change an out of date document - set at: " + _trace(doc)); } - if (_readonly(doc) === false) { + if (_is_proxy(doc)) { throw new RangeError("Calls to Automerge.change cannot be nested") } const heads = state.handle.getHeads() @@ -516,7 +501,7 @@ function conflictAt(context: Automerge, objectId: ObjID, prop: Prop): Conflicts result[fullVal[1]] = listProxy(context, fullVal[1], [prop], true) break; case "text": - result[fullVal[1]] = textProxy(context, fullVal[1], [prop], true) + result[fullVal[1]] = context.text(fullVal[1]) break; //case "table": //case "cursor": @@ -614,8 +599,17 @@ export function getLastLocalChange(doc: Doc): Change | undefined { * This is useful to determine if something is actually an automerge document, * if `doc` is not an automerge document this will return null. */ -export function getObjectId(doc: any): ObjID | null { - return _obj(doc) +export function getObjectId(doc: any, prop?: Prop): ObjID | null { + if (prop) { + const state = _state(doc, false) + const objectId = _obj(doc) + if (!state || !objectId) { + return null + } + return state.handle.get(objectId, prop) as ObjID + } else { + return _obj(doc) + } } /** @@ -659,7 +653,7 @@ export function applyChanges(doc: Doc, changes: Change[], opts?: ApplyOpti if (state.heads) { throw new RangeError("Attempting to change an outdated document. Use Automerge.clone() if you wish to make a writable copy.") } - if (_readonly(doc) === false) { + if (_is_proxy(doc)) { throw new RangeError("Calls to Automerge.change cannot be nested") } const heads = state.handle.getHeads(); @@ -764,7 +758,7 @@ export function receiveSyncMessage(doc: Doc, inState: SyncState, message: if (state.heads) { throw new RangeError("Attempting to change an outdated document. Use Automerge.clone() if you wish to make a writable copy.") } - if (_readonly(doc) === false) { + if (_is_proxy(doc)) { throw new RangeError("Calls to Automerge.change cannot be nested") } const heads = state.handle.getHeads() @@ -813,6 +807,23 @@ export function getMissingDeps(doc: Doc, heads: Heads): Heads { return state.handle.getMissingDeps(heads) } +export function splice(doc: Doc, prop: Prop, index: number, del: number, newText?: string) { + if (!_is_proxy(doc)) { + throw new RangeError("object cannot be modified outside of a change block") + } + const state = _state(doc, false) + const objectId = _obj(doc) + if (!objectId) { + throw new RangeError("invalid object for splice") + } + const value = `${objectId}/${prop}` + try { + return state.handle.splice(value, index, del, newText) + } catch (e) { + throw new RangeError(`Cannot splice: ${e}`) + } +} + /** * Get the hashes of the heads of this document */ diff --git a/javascript/src/proxies.ts b/javascript/src/proxies.ts index cfbe4540..6c0035de 100644 --- a/javascript/src/proxies.ts +++ b/javascript/src/proxies.ts @@ -1,15 +1,13 @@ import { Automerge, Heads, ObjID } from "@automerge/automerge-wasm" import { Prop } from "@automerge/automerge-wasm" -import { AutomergeValue, ScalarValue, MapValue, ListValue, TextValue } from "./types" +import { AutomergeValue, ScalarValue, MapValue, ListValue } from "./types" import { Counter, getWriteableCounter } from "./counter" -import { Text } from "./text" -import { STATE, HEADS, TRACE, FROZEN, OBJECT_ID, READ_ONLY, COUNTER, INT, UINT, F64, TEXT } from "./constants" +import { STATE, TRACE, IS_PROXY, OBJECT_ID, COUNTER, INT, UINT, F64, TEXT } from "./constants" function parseListIndex(key) { if (typeof key === 'string' && /^[0-9]+$/.test(key)) key = parseInt(key, 10) if (typeof key !== 'number') { - // throw new TypeError('A list index must be a number, but you passed ' + JSON.stringify(key)) return key } if (key < 0 || isNaN(key) || key === Infinity || key === -Infinity) { @@ -30,9 +28,7 @@ function valueAt(target, prop: Prop) : AutomergeValue | undefined { case undefined: return; case "map": return mapProxy(context, val, [ ... path, prop ], readonly, heads); case "list": return listProxy(context, val, [ ... path, prop ], readonly, heads); - case "text": return textProxy(context, val, [ ... path, prop ], readonly, heads); - //case "table": - //case "cursor": + case "text": return context.text(val, heads); case "str": return val; case "uint": return val; case "int": return val; @@ -66,8 +62,6 @@ function import_value(value) { return [ value.value, "f64" ] } else if (value[COUNTER]) { return [ value.value, "counter" ] - } else if (value[TEXT]) { - return [ value, "text" ] } else if (value instanceof Date) { return [ value.getTime(), "timestamp" ] } else if (value instanceof Uint8Array) { @@ -92,7 +86,7 @@ function import_value(value) { } break; case 'string': - return [ value ] + return [ value, "text" ] break; default: throw new RangeError(`Unsupported type of value: ${typeof value}`) @@ -104,11 +98,9 @@ const MapHandler = { const { context, objectId, readonly, frozen, heads, cache } = target if (key === Symbol.toStringTag) { return target[Symbol.toStringTag] } if (key === OBJECT_ID) return objectId - if (key === READ_ONLY) return readonly - if (key === FROZEN) return frozen - if (key === HEADS) return heads + if (key === IS_PROXY) return true if (key === TRACE) return target.trace - if (key === STATE) return context; + if (key === STATE) return { handle: context }; if (!cache[key]) { cache[key] = valueAt(target, key) } @@ -121,14 +113,6 @@ const MapHandler = { if (val && val[OBJECT_ID]) { throw new RangeError('Cannot create a reference to an existing document object') } - if (key === FROZEN) { - target.frozen = val - return true - } - if (key === HEADS) { - target.heads = val - return true - } if (key === TRACE) { target.trace = val return true @@ -150,11 +134,7 @@ const MapHandler = { break } case "text": { - const text = context.putObject(objectId, key, "", "text") - const proxyText = textProxy(context, text, [ ... path, key ], readonly ); - for (let i = 0; i < value.length; i++) { - proxyText[i] = value.get(i) - } + context.putObject(objectId, key, value, "text") break } case "map": { @@ -212,11 +192,9 @@ const ListHandler = { if (index === Symbol.hasInstance) { return (instance) => { return Array.isArray(instance) } } if (index === Symbol.toStringTag) { return target[Symbol.toStringTag] } if (index === OBJECT_ID) return objectId - if (index === READ_ONLY) return readonly - if (index === FROZEN) return frozen - if (index === HEADS) return heads + if (index === IS_PROXY) return true if (index === TRACE) return target.trace - if (index === STATE) return context; + if (index === STATE) return { handle: context }; if (index === 'length') return context.length(objectId, heads); if (typeof index === 'number') { return valueAt(target, index) @@ -231,14 +209,6 @@ const ListHandler = { if (val && val[OBJECT_ID]) { throw new RangeError('Cannot create a reference to an existing document object') } - if (index === FROZEN) { - target.frozen = val - return true - } - if (index === HEADS) { - target.heads = val - return true - } if (index === TRACE) { target.trace = val return true @@ -268,12 +238,10 @@ const ListHandler = { case "text": { let text if (index >= context.length(objectId)) { - text = context.insertObject(objectId, index, "", "text") + text = context.insertObject(objectId, index, value, "text") } else { - text = context.putObject(objectId, index, "", "text") + text = context.putObject(objectId, index, value, "text") } - const proxyText = textProxy(context, text, [ ... path, index ], readonly); - proxyText.splice(0,0,...value) break; } case "map": { @@ -342,31 +310,6 @@ const ListHandler = { } } -const TextHandler = Object.assign({}, ListHandler, { - get (target, index) { - // FIXME this is a one line change from ListHandler.get() - const {context, objectId, readonly, frozen, heads } = target - index = parseListIndex(index) - if (index === Symbol.toStringTag) { return target[Symbol.toStringTag] } - if (index === Symbol.hasInstance) { return (instance) => { return Array.isArray(instance) } } - if (index === OBJECT_ID) return objectId - if (index === READ_ONLY) return readonly - if (index === FROZEN) return frozen - if (index === HEADS) return heads - if (index === TRACE) return target.trace - if (index === STATE) return context; - if (index === 'length') return context.length(objectId, heads); - if (typeof index === 'number') { - return valueAt(target, index) - } else { - return textMethods(target)[index] || listMethods(target)[index] - } - }, - getPrototypeOf(/*target*/) { - return Object.getPrototypeOf(new Text()) - }, -}) - export function mapProxy(context: Automerge, objectId: ObjID, path?: Prop[], readonly?: boolean, heads?: Heads) : MapValue { return new Proxy({context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}}, MapHandler) } @@ -377,12 +320,6 @@ export function listProxy(context: Automerge, objectId: ObjID, path?: Prop[], re return new Proxy(target, ListHandler) } -export function textProxy(context: Automerge, objectId: ObjID, path?: Prop[], readonly?: boolean, heads?: Heads) : TextValue { - const target = [] - Object.assign(target, {context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}}) - return new Proxy(target, TextHandler) -} - export function rootProxy(context: Automerge, readonly?: boolean) : T { /* eslint-disable-next-line */ return mapProxy(context, "_root", [], !!readonly) @@ -406,7 +343,11 @@ function listMethods(target) { start = parseListIndex(start || 0) end = parseListIndex(end || length) for (let i = start; i < Math.min(end, length); i++) { - context.put(objectId, i, value, datatype) + if (datatype === "text" || datatype === "list" || datatype === "map") { + context.putObject(objectId, i, value, datatype) + } else { + context.put(objectId, i, value, datatype) + } } return this }, @@ -482,9 +423,7 @@ function listMethods(target) { break; } case "text": { - const text = context.insertObject(objectId, index, "", "text") - const proxyText = textProxy(context, text, [ ... path, index ], readonly); - proxyText.splice(0,0,...value) + context.insertObject(objectId, index, value) break; } case "map": { diff --git a/javascript/src/text.ts b/javascript/src/text.ts deleted file mode 100644 index a6c51940..00000000 --- a/javascript/src/text.ts +++ /dev/null @@ -1,199 +0,0 @@ -import { Value } from "@automerge/automerge-wasm" -import { TEXT, STATE } from "./constants" - -export class Text { - elems: Value[] - str: string | undefined - spans: Value[] | undefined - - constructor (text?: string | string[] | Value[]) { - if (typeof text === 'string') { - this.elems = [...text] - } else if (Array.isArray(text)) { - this.elems = text - } else if (text === undefined) { - this.elems = [] - } else { - throw new TypeError(`Unsupported initial value for Text: ${text}`) - } - Reflect.defineProperty(this, TEXT, { value: true }) - } - - get length () : number { - return this.elems.length - } - - get (index: number) : Value | undefined { - return this.elems[index] - } - - /** - * Iterates over the text elements character by character, including any - * inline objects. - */ - [Symbol.iterator] () { - const elems = this.elems - let index = -1 - return { - next () { - index += 1 - if (index < elems.length) { - return {done: false, value: elems[index]} - } else { - return {done: true} - } - } - } - } - - /** - * Returns the content of the Text object as a simple string, ignoring any - * non-character elements. - */ - toString() : string { - if (!this.str) { - // Concatting to a string is faster than creating an array and then - // .join()ing for small (<100KB) arrays. - // https://jsperf.com/join-vs-loop-w-type-test - this.str = '' - for (const elem of this.elems) { - if (typeof elem === 'string') this.str += elem - else this.str += '\uFFFC' - } - } - return this.str - } - - /** - * Returns the content of the Text object as a sequence of strings, - * interleaved with non-character elements. - * - * For example, the value ['a', 'b', {x: 3}, 'c', 'd'] has spans: - * => ['ab', {x: 3}, 'cd'] - */ - toSpans() : Value[] { - if (!this.spans) { - this.spans = [] - let chars = '' - for (const elem of this.elems) { - if (typeof elem === 'string') { - chars += elem - } else { - if (chars.length > 0) { - this.spans.push(chars) - chars = '' - } - this.spans.push(elem) - } - } - if (chars.length > 0) { - this.spans.push(chars) - } - } - return this.spans - } - - /** - * Returns the content of the Text object as a simple string, so that the - * JSON serialization of an Automerge document represents text nicely. - */ - toJSON() : string { - return this.toString() - } - - /** - * Updates the list item at position `index` to a new value `value`. - */ - set (index: number, value: Value) { - if (this[STATE]) { - throw new RangeError("object cannot be modified outside of a change block") - } - this.elems[index] = value - } - - /** - * Inserts new list items `values` starting at position `index`. - */ - insertAt(index: number, ...values: Value[]) { - if (this[STATE]) { - throw new RangeError("object cannot be modified outside of a change block") - } - this.elems.splice(index, 0, ... values) - } - - /** - * Deletes `numDelete` list items starting at position `index`. - * if `numDelete` is not given, one item is deleted. - */ - deleteAt(index: number, numDelete = 1) { - if (this[STATE]) { - throw new RangeError("object cannot be modified outside of a change block") - } - this.elems.splice(index, numDelete) - } - - map(callback: (e: Value) => T) { - this.elems.map(callback) - } - - lastIndexOf(searchElement: Value, fromIndex?: number) { - this.elems.lastIndexOf(searchElement, fromIndex) - } - - concat(other: Text) : Text { - return new Text(this.elems.concat(other.elems)) - } - - every(test: (Value) => boolean) : boolean { - return this.elems.every(test) - } - - filter(test: (Value) => boolean) : Text { - return new Text(this.elems.filter(test)) - } - - find(test: (Value) => boolean) : Value | undefined { - return this.elems.find(test) - } - - findIndex(test: (Value) => boolean) : number | undefined { - return this.elems.findIndex(test) - } - - forEach(f: (Value) => undefined) { - this.elems.forEach(f) - } - - includes(elem: Value) : boolean { - return this.elems.includes(elem) - } - - indexOf(elem: Value) { - return this.elems.indexOf(elem) - } - - join(sep?: string) : string{ - return this.elems.join(sep) - } - - reduce(f: (previousValue: Value, currentValue: Value, currentIndex: number, array: Value[]) => Value) { - this.elems.reduce(f) - } - - reduceRight(f: (previousValue: Value, currentValue: Value, currentIndex: number, array: Value[]) => Value) { - this.elems.reduceRight(f) - } - - slice(start?: number, end?: number) { - new Text(this.elems.slice(start,end)) - } - - some(test: (Value) => boolean) : boolean { - return this.elems.some(test) - } - - toLocaleString() { - this.toString() - } -} - diff --git a/javascript/src/types.ts b/javascript/src/types.ts index 764d328c..add3f492 100644 --- a/javascript/src/types.ts +++ b/javascript/src/types.ts @@ -1,13 +1,10 @@ -import { Text } from "./text" -export { Text } from "./text" export { Counter } from "./counter" export { Int, Uint, Float64 } from "./numbers" import { Counter } from "./counter" -export type AutomergeValue = ScalarValue | { [key: string]: AutomergeValue } | Array | Text +export type AutomergeValue = ScalarValue | { [key: string]: AutomergeValue } | Array export type MapValue = { [key: string]: AutomergeValue } export type ListValue = Array -export type TextValue = Array export type ScalarValue = string | number | null | boolean | Date | Counter | Uint8Array diff --git a/javascript/test/basic_test.ts b/javascript/test/basic_test.ts index 9245f161..437af233 100644 --- a/javascript/test/basic_test.ts +++ b/javascript/test/basic_test.ts @@ -1,6 +1,7 @@ import * as assert from 'assert' import {Counter} from 'automerge' import * as Automerge from '../src' +import * as WASM from "@automerge/automerge-wasm" describe('Automerge', () => { describe('basics', () => { @@ -43,7 +44,7 @@ describe('Automerge', () => { d.big = "little" d.zip = "zop" d.app = "dap" - assert.deepEqual(d, { hello: "world", big: "little", zip: "zop", app: "dap" }) + assert.deepEqual(d, { hello: "world", big: "little", zip: "zop", app: "dap" }) }) assert.deepEqual(doc2, { hello: "world", big: "little", zip: "zop", app: "dap" }) }) @@ -198,10 +199,9 @@ describe('Automerge', () => { }) it('handle text', () => { let doc1 = Automerge.init() - let tmp = new Automerge.Text("hello") let doc2 = Automerge.change(doc1, (d) => { - d.list = new Automerge.Text("hello") - d.list.insertAt(2,"Z") + d.list = "hello" + Automerge.splice(d, "list", 2, 0, "Z") }) let changes = Automerge.getChanges(doc1, doc2) let docB1 = Automerge.init() @@ -209,6 +209,15 @@ describe('Automerge', () => { assert.deepEqual(docB2, doc2); }) + it('handle non-text strings', () => { + let doc1 = WASM.create(); + doc1.put("_root", "text", "hello world"); + let doc2 = Automerge.load(doc1.save()) + assert.throws(() => { + Automerge.change(doc2, (d) => { Automerge.splice(d, "text", 1, 0, "Z") }) + }, /Cannot splice/) + }) + it('have many list methods', () => { let doc1 = Automerge.from({ list: [1,2,3] }) assert.deepEqual(doc1, { list: [1,2,3] }); @@ -240,9 +249,9 @@ describe('Automerge', () => { }) it('lists and text have indexof', () => { - let doc = Automerge.from({ list: [0,1,2,3,4,5,6], text: new Automerge.Text("hello world") }) - console.log(doc.list.indexOf(5)) - console.log(doc.text.indexOf("world")) + let doc = Automerge.from({ list: [0,1,2,3,4,5,6], text: "hello world" }) + assert.deepEqual(doc.list.indexOf(5), 5) + assert.deepEqual(doc.text.indexOf("world"), 6) }) }) @@ -329,7 +338,7 @@ describe('Automerge', () => { "date": new Date(), "counter": new Automerge.Counter(), "bytes": new Uint8Array(10), - "text": new Automerge.Text(), + "text": "", "list": [], "map": {} }) @@ -348,7 +357,7 @@ describe('Automerge', () => { }) it("should return non-null for map, list, text, and objects", () => { - assert.notEqual(Automerge.getObjectId(s1.text), null) + assert.equal(Automerge.getObjectId(s1.text), null) assert.notEqual(Automerge.getObjectId(s1.list), null) assert.notEqual(Automerge.getObjectId(s1.map), null) }) diff --git a/javascript/test/legacy_tests.ts b/javascript/test/legacy_tests.ts index 0d152a2d..2320f909 100644 --- a/javascript/test/legacy_tests.ts +++ b/javascript/test/legacy_tests.ts @@ -4,7 +4,7 @@ import { assertEqualsOneOf } from './helpers' import { decodeChange } from './legacy/columnar' const UUID_PATTERN = /^[0-9a-f]{32}$/ -const OPID_PATTERN = /^[0-9]+@[0-9a-f]{32}$/ +const OPID_PATTERN = /^[0-9]+@([0-9a-f][0-9a-f])*$/ // CORE FEATURES // @@ -75,7 +75,7 @@ describe('Automerge', () => { describe('sequential use', () => { let s1, s2 beforeEach(() => { - s1 = Automerge.init() + s1 = Automerge.init("aabbcc") }) it('should not mutate objects', () => { @@ -93,7 +93,11 @@ describe('Automerge', () => { assert.deepStrictEqual(change, { actor: change.actor, deps: [], seq: 1, startOp: 1, hash: change.hash, message: '', time: change.time, - ops: [{obj: '_root', key: 'foo', action: 'set', insert: false, value: 'bar', pred: []}] + ops: [ + {obj: '_root', key: 'foo', action: 'makeText', insert: false, pred: []}, + {action: 'set', elemId: '_head', insert: true, obj: '1@aabbcc', pred: [], value: 'b' }, + {action: 'set', elemId: '2@aabbcc', insert: true, obj: '1@aabbcc', pred: [], value: 'a' }, + {action: 'set', elemId: '3@aabbcc', insert: true, obj: '1@aabbcc', pred: [], value: 'r' }] }) }) @@ -287,11 +291,12 @@ describe('Automerge', () => { }, doc => { doc.birds = ['Goldfinch'] }) - assert.strictEqual(callbacks.length, 2) - assert.deepStrictEqual(callbacks[0].patch, { action: "put", path: ["birds"], value: [], conflict: false}) - assert.deepStrictEqual(callbacks[1].patch, { action: "splice", path: ["birds",0], values: ["Goldfinch"] }) + assert.strictEqual(callbacks.length, 1) + assert.deepStrictEqual(callbacks[0].patch[0], { action: "put", path: ["birds"], value: [] }) + assert.deepStrictEqual(callbacks[0].patch[1], { action: "insert", path: ["birds",0], values: [""] }) + assert.deepStrictEqual(callbacks[0].patch[2], { action: "splice", path: ["birds",0, 0], value: "Goldfinch" }) assert.strictEqual(callbacks[0].before, s1) - assert.strictEqual(callbacks[1].after, s2) + assert.strictEqual(callbacks[0].after, s2) }) it('should call a patchCallback set up on document initialisation', () => { @@ -302,8 +307,11 @@ describe('Automerge', () => { const s2 = Automerge.change(s1, doc => doc.bird = 'Goldfinch') const actor = Automerge.getActorId(s1) assert.strictEqual(callbacks.length, 1) - assert.deepStrictEqual(callbacks[0].patch, { - action: "put", path: ["bird"], value: "Goldfinch", conflict: false + assert.deepStrictEqual(callbacks[0].patch[0], { + action: "put", path: ["bird"], value: "" + }) + assert.deepStrictEqual(callbacks[0].patch[1], { + action: "splice", path: ["bird", 0], value: "Goldfinch" }) assert.strictEqual(callbacks[0].before, s1) assert.strictEqual(callbacks[0].after, s2) @@ -868,20 +876,20 @@ describe('Automerge', () => { s1 = Automerge.change(s1, doc => doc.birds = ['finch']) s2 = Automerge.merge(s2, s1) s1 = Automerge.change(s1, doc => doc.birds[0] = 'greenfinch') - s2 = Automerge.change(s2, doc => doc.birds[0] = 'goldfinch') + s2 = Automerge.change(s2, doc => doc.birds[0] = 'goldfinch_') s3 = Automerge.merge(s1, s2) if (Automerge.getActorId(s1) > Automerge.getActorId(s2)) { assert.deepStrictEqual(s3.birds, ['greenfinch']) } else { - assert.deepStrictEqual(s3.birds, ['goldfinch']) + assert.deepStrictEqual(s3.birds, ['goldfinch_']) } assert.deepStrictEqual(Automerge.getConflicts(s3.birds, 0), { - [`3@${Automerge.getActorId(s1)}`]: 'greenfinch', - [`3@${Automerge.getActorId(s2)}`]: 'goldfinch' + [`8@${Automerge.getActorId(s1)}`]: 'greenfinch', + [`8@${Automerge.getActorId(s2)}`]: 'goldfinch_' }) }) - it.skip('should handle assignment conflicts of different types', () => { + it('should handle assignment conflicts of different types', () => { s1 = Automerge.change(s1, doc => doc.field = 'string') s2 = Automerge.change(s2, doc => doc.field = ['list']) s3 = Automerge.change(s3, doc => doc.field = {thing: 'map'}) @@ -906,8 +914,7 @@ describe('Automerge', () => { }) }) - // FIXME - difficult bug here - patches arrive for conflicted subobject - it.skip('should handle changes within a conflicting list element', () => { + it('should handle changes within a conflicting list element', () => { s1 = Automerge.change(s1, doc => doc.list = ['hello']) s2 = Automerge.merge(s2, s1) s1 = Automerge.change(s1, doc => doc.list[0] = {map1: true}) @@ -921,8 +928,8 @@ describe('Automerge', () => { assert.deepStrictEqual(s3.list, [{map2: true, key: 2}]) } assert.deepStrictEqual(Automerge.getConflicts(s3.list, 0), { - [`3@${Automerge.getActorId(s1)}`]: {map1: true, key: 1}, - [`3@${Automerge.getActorId(s2)}`]: {map2: true, key: 2} + [`8@${Automerge.getActorId(s1)}`]: {map1: true, key: 1}, + [`8@${Automerge.getActorId(s2)}`]: {map2: true, key: 2} }) }) @@ -1154,7 +1161,8 @@ describe('Automerge', () => { hash: changes12[0].hash, actor: '01234567', seq: 1, startOp: 1, time: changes12[0].time, message: '', deps: [], ops: [ {obj: '_root', action: 'makeList', key: 'list', insert: false, pred: []}, - {obj: listId, action: 'set', elemId: '_head', insert: true, value: 'a', pred: []} + {obj: listId, action: 'makeText', elemId: '_head', insert: true, pred: []}, + {obj: "2@01234567", action: 'set', elemId: '_head', insert: true, value: 'a', pred: []} ] }]) const s3 = Automerge.change(s2, doc => doc.list.deleteAt(0)) @@ -1163,9 +1171,10 @@ describe('Automerge', () => { const changes45 = Automerge.getAllChanges(s5).map(decodeChange) assert.deepStrictEqual(s5, {list: ['b']}) assert.deepStrictEqual(changes45[2], { - hash: changes45[2].hash, actor: '01234567', seq: 3, startOp: 4, + hash: changes45[2].hash, actor: '01234567', seq: 3, startOp: 5, time: changes45[2].time, message: '', deps: [changes45[1].hash], ops: [ - {obj: listId, action: 'set', elemId: '_head', insert: true, value: 'b', pred: []} + {obj: listId, action: 'makeText', elemId: '_head', insert: true, pred: []}, + {obj: "5@01234567", action: 'set', elemId: '_head', insert: true, value: 'b', pred: []} ] }) }) @@ -1305,8 +1314,8 @@ describe('Automerge', () => { // TEXT it('should handle updates to a text object', () => { - let s1 = Automerge.change(Automerge.init(), doc => doc.text = new Automerge.Text('ab')) - let s2 = Automerge.change(s1, doc => doc.text.set(0, 'A')) + let s1 = Automerge.change(Automerge.init(), doc => doc.text = 'ab') + let s2 = Automerge.change(s1, doc => Automerge.splice(doc, "text", 0, 1, "A")) let [s3] = Automerge.applyChanges(Automerge.init(), Automerge.getAllChanges(s2)) assert.deepStrictEqual([...s3.text], ['A', 'b']) }) @@ -1352,11 +1361,12 @@ describe('Automerge', () => { callbacks.push({patch, before, after}) } }) - assert.strictEqual(callbacks.length, 2) - assert.deepStrictEqual(callbacks[0].patch, { action: 'put', path: ["birds"], value: [], conflict: false }) - assert.deepStrictEqual(callbacks[1].patch, { action: 'splice', path: ["birds",0], values: ["Goldfinch"] }) + assert.strictEqual(callbacks.length, 1) + assert.deepStrictEqual(callbacks[0].patch[0], { action: 'put', path: ["birds"], value: [] }) + assert.deepStrictEqual(callbacks[0].patch[1], { action: 'insert', path: ["birds",0], values: [""] }) + assert.deepStrictEqual(callbacks[0].patch[2], { action: 'splice', path: ["birds",0,0], value: "Goldfinch" }) assert.strictEqual(callbacks[0].before, before) - assert.strictEqual(callbacks[1].after, after) + assert.strictEqual(callbacks[0].after, after) }) it('should merge multiple applied changes into one patch', () => { @@ -1364,23 +1374,24 @@ describe('Automerge', () => { const s2 = Automerge.change(s1, doc => doc.birds.push('Chaffinch')) const patches = [], actor = Automerge.getActorId(s2) Automerge.applyChanges(Automerge.init(), Automerge.getAllChanges(s2), - {patchCallback: p => patches.push(p)}) + {patchCallback: p => patches.push(... p)}) assert.deepStrictEqual(patches, [ - { action: 'put', conflict: false, path: [ 'birds' ], value: [] }, - { action: "splice", path: [ "birds", 0 ], values: [ "Goldfinch", "Chaffinch" ] } + { action: 'put', path: [ 'birds' ], value: [] }, + { action: "insert", path: [ "birds", 0 ], values: [ "" ] }, + { action: "splice", path: [ "birds", 0, 0 ], value: "Goldfinch" }, + { action: "insert", path: [ "birds", 1 ], values: [ "" ] }, + { action: "splice", path: [ "birds", 1, 0 ], value: "Chaffinch" } ]) }) it('should call a patchCallback registered on doc initialisation', () => { const s1 = Automerge.change(Automerge.init(), doc => doc.bird = 'Goldfinch') const patches = [], actor = Automerge.getActorId(s1) - const before = Automerge.init({patchCallback: p => patches.push(p)}) + const before = Automerge.init({patchCallback: p => patches.push(... p)}) Automerge.applyChanges(before, Automerge.getAllChanges(s1)) - assert.deepStrictEqual(patches, [{ - action: "put", - conflict: false, - path: [ "bird" ], - value: "Goldfinch" } + assert.deepStrictEqual(patches, [ + { action: "put", path: [ "bird" ], value: "" }, + { action: "splice", path: [ "bird", 0 ], value: "Goldfinch" } ]) }) }) diff --git a/javascript/test/sync_test.ts b/javascript/test/sync_test.ts index 65482c67..56b4bd87 100644 --- a/javascript/test/sync_test.ts +++ b/javascript/test/sync_test.ts @@ -527,6 +527,7 @@ describe('Data sync protocol', () => { assert.deepStrictEqual(getHeads(n2), [n1hash2, n2hash2].sort()) }) + // FIXME - this has a periodic failure it('should sync two nodes with connection reset', () => { s1 = decodeSyncState(encodeSyncState(s1)) s2 = decodeSyncState(encodeSyncState(s2)) diff --git a/javascript/test/text_test.ts b/javascript/test/text_test.ts index 2ca37c19..59890470 100644 --- a/javascript/test/text_test.ts +++ b/javascript/test/text_test.ts @@ -197,502 +197,101 @@ function applyDeltaDocToAutomergeText(delta, doc) { describe('Automerge.Text', () => { let s1, s2 beforeEach(() => { - s1 = Automerge.change(Automerge.init(), doc => doc.text = new Automerge.Text()) + s1 = Automerge.change(Automerge.init(), doc => doc.text = "") s2 = Automerge.merge(Automerge.init(), s1) }) it('should support insertion', () => { - s1 = Automerge.change(s1, doc => doc.text.insertAt(0, 'a')) + s1 = Automerge.change(s1, doc => Automerge.splice(doc, "text", 0, 0, "a")) assert.strictEqual(s1.text.length, 1) - assert.strictEqual(s1.text.get(0), 'a') - assert.strictEqual(s1.text.toString(), 'a') + assert.strictEqual(s1.text[0], 'a') + assert.strictEqual(s1.text, 'a') //assert.strictEqual(s1.text.getElemId(0), `2@${Automerge.getActorId(s1)}`) }) it('should support deletion', () => { - s1 = Automerge.change(s1, doc => doc.text.insertAt(0, 'a', 'b', 'c')) - s1 = Automerge.change(s1, doc => doc.text.deleteAt(1, 1)) + s1 = Automerge.change(s1, doc => Automerge.splice(doc, "text", 0, 0, "abc")) + s1 = Automerge.change(s1, doc => Automerge.splice(doc, "text", 1, 1)) assert.strictEqual(s1.text.length, 2) - assert.strictEqual(s1.text.get(0), 'a') - assert.strictEqual(s1.text.get(1), 'c') - assert.strictEqual(s1.text.toString(), 'ac') + assert.strictEqual(s1.text[0], 'a') + assert.strictEqual(s1.text[1], 'c') + assert.strictEqual(s1.text, 'ac') }) it("should support implicit and explicit deletion", () => { - s1 = Automerge.change(s1, doc => doc.text.insertAt(0, "a", "b", "c")) - s1 = Automerge.change(s1, doc => doc.text.deleteAt(1)) - s1 = Automerge.change(s1, doc => doc.text.deleteAt(1, 0)) + s1 = Automerge.change(s1, doc => Automerge.splice(doc, "text", 0, 0, "abc")) + s1 = Automerge.change(s1, doc => Automerge.splice(doc, "text", 1, 1)) + s1 = Automerge.change(s1, doc => Automerge.splice(doc, "text", 1, 0)) assert.strictEqual(s1.text.length, 2) - assert.strictEqual(s1.text.get(0), "a") - assert.strictEqual(s1.text.get(1), "c") - assert.strictEqual(s1.text.toString(), "ac") + assert.strictEqual(s1.text[0], "a") + assert.strictEqual(s1.text[1], "c") + assert.strictEqual(s1.text, "ac") }) it('should handle concurrent insertion', () => { - s1 = Automerge.change(s1, doc => doc.text.insertAt(0, 'a', 'b', 'c')) - s2 = Automerge.change(s2, doc => doc.text.insertAt(0, 'x', 'y', 'z')) + s1 = Automerge.change(s1, doc => Automerge.splice(doc, "text", 0, 0, "abc")) + s2 = Automerge.change(s2, doc => Automerge.splice(doc, "text", 0, 0, "xyz")) s1 = Automerge.merge(s1, s2) assert.strictEqual(s1.text.length, 6) - assertEqualsOneOf(s1.text.toString(), 'abcxyz', 'xyzabc') - assertEqualsOneOf(s1.text.join(''), 'abcxyz', 'xyzabc') + assertEqualsOneOf(s1.text, 'abcxyz', 'xyzabc') }) it('should handle text and other ops in the same change', () => { s1 = Automerge.change(s1, doc => { doc.foo = 'bar' - doc.text.insertAt(0, 'a') + Automerge.splice(doc, "text", 0, 0, 'a') }) assert.strictEqual(s1.foo, 'bar') - assert.strictEqual(s1.text.toString(), 'a') - assert.strictEqual(s1.text.join(''), 'a') + assert.strictEqual(s1.text, 'a') + assert.strictEqual(s1.text, 'a') }) it('should serialize to JSON as a simple string', () => { - s1 = Automerge.change(s1, doc => doc.text.insertAt(0, 'a', '"', 'b')) + s1 = Automerge.change(s1, doc => Automerge.splice(doc, "text", 0, 0, 'a"b')) assert.strictEqual(JSON.stringify(s1), '{"text":"a\\"b"}') }) - it('should allow modification before an object is assigned to a document', () => { - s1 = Automerge.change(Automerge.init(), doc => { - const text = new Automerge.Text() - text.insertAt(0, 'a', 'b', 'c', 'd') - text.deleteAt(2) - doc.text = text - assert.strictEqual(doc.text.toString(), 'abd') - assert.strictEqual(doc.text.join(''), 'abd') - }) - assert.strictEqual(s1.text.toString(), 'abd') - assert.strictEqual(s1.text.join(''), 'abd') - }) - it('should allow modification after an object is assigned to a document', () => { s1 = Automerge.change(Automerge.init(), doc => { - const text = new Automerge.Text() - doc.text = text - doc.text.insertAt(0, 'a', 'b', 'c', 'd') - doc.text.deleteAt(2) - assert.strictEqual(doc.text.toString(), 'abd') - assert.strictEqual(doc.text.join(''), 'abd') + doc.text = "" + Automerge.splice(doc ,"text", 0, 0, 'abcd') + Automerge.splice(doc ,"text", 2, 1) + assert.strictEqual(doc.text, 'abd') }) - assert.strictEqual(s1.text.join(''), 'abd') + assert.strictEqual(s1.text, 'abd') }) it('should not allow modification outside of a change callback', () => { - assert.throws(() => s1.text.insertAt(0, 'a'), /object cannot be modified outside of a change block/) + assert.throws(() => Automerge.splice(s1 ,"text", 0, 0, 'a'), /object cannot be modified outside of a change block/) }) describe('with initial value', () => { - it('should accept a string as initial value', () => { - let s1 = Automerge.change(Automerge.init(), doc => doc.text = new Automerge.Text('init')) - assert.strictEqual(s1.text.length, 4) - assert.strictEqual(s1.text.get(0), 'i') - assert.strictEqual(s1.text.get(1), 'n') - assert.strictEqual(s1.text.get(2), 'i') - assert.strictEqual(s1.text.get(3), 't') - assert.strictEqual(s1.text.toString(), 'init') - }) - - it('should accept an array as initial value', () => { - let s1 = Automerge.change(Automerge.init(), doc => doc.text = new Automerge.Text(['i', 'n', 'i', 't'])) - assert.strictEqual(s1.text.length, 4) - assert.strictEqual(s1.text.get(0), 'i') - assert.strictEqual(s1.text.get(1), 'n') - assert.strictEqual(s1.text.get(2), 'i') - assert.strictEqual(s1.text.get(3), 't') - assert.strictEqual(s1.text.toString(), 'init') - }) it('should initialize text in Automerge.from()', () => { - let s1 = Automerge.from({text: new Automerge.Text('init')}) + let s1 = Automerge.from({text: 'init'}) assert.strictEqual(s1.text.length, 4) - assert.strictEqual(s1.text.get(0), 'i') - assert.strictEqual(s1.text.get(1), 'n') - assert.strictEqual(s1.text.get(2), 'i') - assert.strictEqual(s1.text.get(3), 't') - assert.strictEqual(s1.text.toString(), 'init') + assert.strictEqual(s1.text[0], 'i') + assert.strictEqual(s1.text[1], 'n') + assert.strictEqual(s1.text[2], 'i') + assert.strictEqual(s1.text[3], 't') + assert.strictEqual(s1.text, 'init') }) it('should encode the initial value as a change', () => { - const s1 = Automerge.from({text: new Automerge.Text('init')}) + const s1 = Automerge.from({text: 'init'}) const changes = Automerge.getAllChanges(s1) assert.strictEqual(changes.length, 1) const [s2] = Automerge.applyChanges(Automerge.init(), changes) - assert.strictEqual(s2.text instanceof Automerge.Text, true) - assert.strictEqual(s2.text.toString(), 'init') - assert.strictEqual(s2.text.join(''), 'init') + assert.strictEqual(s2.text, 'init') + assert.strictEqual(s2.text, 'init') }) - it('should allow immediate access to the value', () => { - Automerge.change(Automerge.init(), doc => { - const text = new Automerge.Text('init') - assert.strictEqual(text.length, 4) - assert.strictEqual(text.get(0), 'i') - assert.strictEqual(text.toString(), 'init') - doc.text = text - assert.strictEqual(doc.text.length, 4) - assert.strictEqual(doc.text.get(0), 'i') - assert.strictEqual(doc.text.toString(), 'init') - }) - }) - - it('should allow pre-assignment modification of the initial value', () => { - let s1 = Automerge.change(Automerge.init(), doc => { - const text = new Automerge.Text('init') - text.deleteAt(3) - assert.strictEqual(text.join(''), 'ini') - doc.text = text - assert.strictEqual(doc.text.join(''), 'ini') - assert.strictEqual(doc.text.toString(), 'ini') - }) - assert.strictEqual(s1.text.toString(), 'ini') - assert.strictEqual(s1.text.join(''), 'ini') - }) - - it('should allow post-assignment modification of the initial value', () => { - let s1 = Automerge.change(Automerge.init(), doc => { - const text = new Automerge.Text('init') - doc.text = text - doc.text.deleteAt(0) - doc.text.insertAt(0, 'I') - assert.strictEqual(doc.text.join(''), 'Init') - assert.strictEqual(doc.text.toString(), 'Init') - }) - assert.strictEqual(s1.text.join(''), 'Init') - assert.strictEqual(s1.text.toString(), 'Init') - }) - }) - - describe('non-textual control characters', () => { - let s1 - beforeEach(() => { - s1 = Automerge.change(Automerge.init(), doc => { - doc.text = new Automerge.Text() - doc.text.insertAt(0, 'a') - doc.text.insertAt(1, { attribute: 'bold' }) - }) - }) - - it('should allow fetching non-textual characters', () => { - assert.deepEqual(s1.text.get(1), { attribute: 'bold' }) - //assert.strictEqual(s1.text.getElemId(1), `3@${Automerge.getActorId(s1)}`) - }) - - it('should include control characters in string length', () => { - assert.strictEqual(s1.text.length, 2) - assert.strictEqual(s1.text.get(0), 'a') - }) - - it('should replace control characters from toString()', () => { - assert.strictEqual(s1.text.toString(), 'a\uFFFC') - }) - - it('should allow control characters to be updated', () => { - const s2 = Automerge.change(s1, doc => doc.text.get(1).attribute = 'italic') - const s3 = Automerge.load(Automerge.save(s2)) - assert.strictEqual(s1.text.get(1).attribute, 'bold') - assert.strictEqual(s2.text.get(1).attribute, 'italic') - assert.strictEqual(s3.text.get(1).attribute, 'italic') - }) - - describe('spans interface to Text', () => { - it('should return a simple string as a single span', () => { - let s1 = Automerge.change(Automerge.init(), doc => { - doc.text = new Automerge.Text('hello world') - }) - assert.deepEqual(s1.text.toSpans(), ['hello world']) - }) - it('should return an empty string as an empty array', () => { - let s1 = Automerge.change(Automerge.init(), doc => { - doc.text = new Automerge.Text() - }) - assert.deepEqual(s1.text.toSpans(), []) - }) - it('should split a span at a control character', () => { - let s1 = Automerge.change(Automerge.init(), doc => { - doc.text = new Automerge.Text('hello world') - doc.text.insertAt(5, { attributes: { bold: true } }) - }) - assert.deepEqual(s1.text.toSpans(), - ['hello', { attributes: { bold: true } }, ' world']) - }) - it('should allow consecutive control characters', () => { - let s1 = Automerge.change(Automerge.init(), doc => { - doc.text = new Automerge.Text('hello world') - doc.text.insertAt(5, { attributes: { bold: true } }) - doc.text.insertAt(6, { attributes: { italic: true } }) - }) - assert.deepEqual(s1.text.toSpans(), - ['hello', - { attributes: { bold: true } }, - { attributes: { italic: true } }, - ' world' - ]) - }) - it('should allow non-consecutive control characters', () => { - let s1 = Automerge.change(Automerge.init(), doc => { - doc.text = new Automerge.Text('hello world') - doc.text.insertAt(5, { attributes: { bold: true } }) - doc.text.insertAt(12, { attributes: { italic: true } }) - }) - assert.deepEqual(s1.text.toSpans(), - ['hello', - { attributes: { bold: true } }, - ' world', - { attributes: { italic: true } } - ]) - }) - - it('should be convertable into a Quill delta', () => { - let s1 = Automerge.change(Automerge.init(), doc => { - doc.text = new Automerge.Text('Gandalf the Grey') - doc.text.insertAt(0, { attributes: { bold: true } }) - doc.text.insertAt(7 + 1, { attributes: { bold: null } }) - doc.text.insertAt(12 + 2, { attributes: { color: '#cccccc' } }) - }) - - let deltaDoc = automergeTextToDeltaDoc(s1.text) - - // From https://quilljs.com/docs/delta/ - let expectedDoc = [ - { insert: 'Gandalf', attributes: { bold: true } }, - { insert: ' the ' }, - { insert: 'Grey', attributes: { color: '#cccccc' } } - ] - - assert.deepEqual(deltaDoc, expectedDoc) - }) - - it('should support embeds', () => { - let s1 = Automerge.change(Automerge.init(), doc => { - doc.text = new Automerge.Text('') - doc.text.insertAt(0, { attributes: { link: 'https://quilljs.com' } }) - doc.text.insertAt(1, { - image: 'https://quilljs.com/assets/images/icon.png' - }) - doc.text.insertAt(2, { attributes: { link: null } }) - }) - - let deltaDoc = automergeTextToDeltaDoc(s1.text) - - // From https://quilljs.com/docs/delta/ - let expectedDoc = [{ - // An image link - insert: { - image: 'https://quilljs.com/assets/images/icon.png' - }, - attributes: { - link: 'https://quilljs.com' - } - }] - - assert.deepEqual(deltaDoc, expectedDoc) - }) - - it('should handle concurrent overlapping spans', () => { - let s1 = Automerge.change(Automerge.init(), doc => { - doc.text = new Automerge.Text('Gandalf the Grey') - }) - - let s2 = Automerge.merge(Automerge.init(), s1) - - let s3 = Automerge.change(s1, doc => { - doc.text.insertAt(8, { attributes: { bold: true } }) - doc.text.insertAt(16 + 1, { attributes: { bold: null } }) - }) - - let s4 = Automerge.change(s2, doc => { - doc.text.insertAt(0, { attributes: { bold: true } }) - doc.text.insertAt(11 + 1, { attributes: { bold: null } }) - }) - - let merged = Automerge.merge(s3, s4) - - let deltaDoc = automergeTextToDeltaDoc(merged.text) - - // From https://quilljs.com/docs/delta/ - let expectedDoc = [ - { insert: 'Gandalf the Grey', attributes: { bold: true } }, - ] - - assert.deepEqual(deltaDoc, expectedDoc) - }) - - it('should handle debolding spans', () => { - let s1 = Automerge.change(Automerge.init(), doc => { - doc.text = new Automerge.Text('Gandalf the Grey') - }) - - let s2 = Automerge.merge(Automerge.init(), s1) - - let s3 = Automerge.change(s1, doc => { - doc.text.insertAt(0, { attributes: { bold: true } }) - doc.text.insertAt(16 + 1, { attributes: { bold: null } }) - }) - - let s4 = Automerge.change(s2, doc => { - doc.text.insertAt(8, { attributes: { bold: null } }) - doc.text.insertAt(11 + 1, { attributes: { bold: true } }) - }) - - - let merged = Automerge.merge(s3, s4) - - let deltaDoc = automergeTextToDeltaDoc(merged.text) - - // From https://quilljs.com/docs/delta/ - let expectedDoc = [ - { insert: 'Gandalf ', attributes: { bold: true } }, - { insert: 'the' }, - { insert: ' Grey', attributes: { bold: true } }, - ] - - assert.deepEqual(deltaDoc, expectedDoc) - }) - - // xxx: how would this work for colors? - it('should handle destyling across destyled spans', () => { - let s1 = Automerge.change(Automerge.init(), doc => { - doc.text = new Automerge.Text('Gandalf the Grey') - }) - - let s2 = Automerge.merge(Automerge.init(), s1) - - let s3 = Automerge.change(s1, doc => { - doc.text.insertAt(0, { attributes: { bold: true } }) - doc.text.insertAt(16 + 1, { attributes: { bold: null } }) - }) - - let s4 = Automerge.change(s2, doc => { - doc.text.insertAt(8, { attributes: { bold: null } }) - doc.text.insertAt(11 + 1, { attributes: { bold: true } }) - }) - - let merged = Automerge.merge(s3, s4) - - let final = Automerge.change(merged, doc => { - doc.text.insertAt(3 + 1, { attributes: { bold: null } }) - doc.text.insertAt(doc.text.length, { attributes: { bold: true } }) - }) - - let deltaDoc = automergeTextToDeltaDoc(final.text) - - // From https://quilljs.com/docs/delta/ - let expectedDoc = [ - { insert: 'Gan', attributes: { bold: true } }, - { insert: 'dalf the Grey' }, - ] - - assert.deepEqual(deltaDoc, expectedDoc) - }) - - it('should apply an insert', () => { - let s1 = Automerge.change(Automerge.init(), doc => { - doc.text = new Automerge.Text('Hello world') - }) - - const delta = [ - { retain: 6 }, - { insert: 'reader' }, - { delete: 5 } - ] - - let s2 = Automerge.change(s1, doc => { - applyDeltaDocToAutomergeText(delta, doc) - }) - - //assert.strictEqual(s2.text.join(''), 'Hello reader') - assert.strictEqual(s2.text.toString(), 'Hello reader') - }) - - it('should apply an insert with control characters', () => { - let s1 = Automerge.change(Automerge.init(), doc => { - doc.text = new Automerge.Text('Hello world') - }) - - const delta = [ - { retain: 6 }, - { insert: 'reader', attributes: { bold: true } }, - { delete: 5 }, - { insert: '!' } - ] - - let s2 = Automerge.change(s1, doc => { - applyDeltaDocToAutomergeText(delta, doc) - }) - - assert.strictEqual(s2.text.toString(), 'Hello \uFFFCreader\uFFFC!') - assert.deepEqual(s2.text.toSpans(), [ - "Hello ", - { attributes: { bold: true } }, - "reader", - { attributes: { bold: null } }, - "!" - ]) - }) - - it('should account for control characters in retain/delete lengths', () => { - let s1 = Automerge.change(Automerge.init(), doc => { - doc.text = new Automerge.Text('Hello world') - doc.text.insertAt(4, { attributes: { color: '#ccc' } }) - doc.text.insertAt(10, { attributes: { color: '#f00' } }) - }) - - const delta = [ - { retain: 6 }, - { insert: 'reader', attributes: { bold: true } }, - { delete: 5 }, - { insert: '!' } - ] - - let s2 = Automerge.change(s1, doc => { - applyDeltaDocToAutomergeText(delta, doc) - }) - - assert.strictEqual(s2.text.toString(), 'Hell\uFFFCo \uFFFCreader\uFFFC\uFFFC!') - assert.deepEqual(s2.text.toSpans(), [ - "Hell", - { attributes: { color: '#ccc'} }, - "o ", - { attributes: { bold: true } }, - "reader", - { attributes: { bold: null } }, - { attributes: { color: '#f00'} }, - "!" - ]) - }) - - it('should support embeds', () => { - let s1 = Automerge.change(Automerge.init(), doc => { - doc.text = new Automerge.Text('') - }) - - let deltaDoc = [{ - // An image link - insert: { - image: 'https://quilljs.com/assets/images/icon.png' - }, - attributes: { - link: 'https://quilljs.com' - } - }] - - let s2 = Automerge.change(s1, doc => { - applyDeltaDocToAutomergeText(deltaDoc, doc) - }) - - assert.deepEqual(s2.text.toSpans(), [ - { attributes: { link: 'https://quilljs.com' } }, - { image: 'https://quilljs.com/assets/images/icon.png'}, - { attributes: { link: null } }, - ]) - }) - }) }) it('should support unicode when creating text', () => { s1 = Automerge.from({ - text: new Automerge.Text('🐦') + text: '🐦' }) - assert.strictEqual(s1.text.get(0), '🐦') + assert.strictEqual(s1.text, '🐦') }) }) diff --git a/rust/automerge-c/.gitignore b/rust/automerge-c/.gitignore index cb544af0..f04de582 100644 --- a/rust/automerge-c/.gitignore +++ b/rust/automerge-c/.gitignore @@ -1,3 +1,10 @@ automerge automerge.h automerge.o +*.cmake +CMakeFiles +Makefile +DartConfiguration.tcl +config.h +CMakeCache.txt +Cargo diff --git a/rust/automerge-c/src/doc.rs b/rust/automerge-c/src/doc.rs index 2854a0e5..58625798 100644 --- a/rust/automerge-c/src/doc.rs +++ b/rust/automerge-c/src/doc.rs @@ -622,8 +622,8 @@ pub unsafe extern "C" fn AMobjObjType(doc: *const AMdoc, obj_id: *const AMobjId) if let Some(doc) = doc.as_ref() { let obj_id = to_obj_id!(obj_id); match doc.object_type(obj_id) { - None => AMobjType::Void, - Some(obj_type) => obj_type.into(), + Err(_) => AMobjType::Void, + Ok(obj_type) => obj_type.into(), } } else { AMobjType::Void diff --git a/rust/automerge-c/test/doc_tests.c b/rust/automerge-c/test/doc_tests.c index dbd2d8f6..217a4862 100644 --- a/rust/automerge-c/test/doc_tests.c +++ b/rust/automerge-c/test/doc_tests.c @@ -60,11 +60,16 @@ static void test_AMkeys_empty() { static void test_AMkeys_list() { AMresultStack* stack = NULL; AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - AMfree(AMlistPutInt(doc, AM_ROOT, 0, true, 1)); - AMfree(AMlistPutInt(doc, AM_ROOT, 1, true, 2)); - AMfree(AMlistPutInt(doc, AM_ROOT, 2, true, 3)); + AMobjId const* const list = AMpush( + &stack, + AMmapPutObject(doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), + AM_VALUE_OBJ_ID, + cmocka_cb).obj_id; + AMfree(AMlistPutInt(doc, list, 0, true, 0)); + AMfree(AMlistPutInt(doc, list, 1, true, 0)); + AMfree(AMlistPutInt(doc, list, 2, true, 0)); AMstrs forward = AMpush(&stack, - AMkeys(doc, AM_ROOT, NULL), + AMkeys(doc, list, NULL), AM_VALUE_STRS, cmocka_cb).strs; assert_int_equal(AMstrsSize(&forward), 3); @@ -72,35 +77,35 @@ static void test_AMkeys_list() { assert_int_equal(AMstrsSize(&reverse), 3); /* Forward iterator forward. */ AMbyteSpan str = AMstrsNext(&forward, 1); - assert_ptr_equal(strstr(str.src, "1@"), str.src); - str = AMstrsNext(&forward, 1); assert_ptr_equal(strstr(str.src, "2@"), str.src); str = AMstrsNext(&forward, 1); assert_ptr_equal(strstr(str.src, "3@"), str.src); + str = AMstrsNext(&forward, 1); + assert_ptr_equal(strstr(str.src, "4@"), str.src); assert_null(AMstrsNext(&forward, 1).src); - /* Forward iterator reverse. */ + // /* Forward iterator reverse. */ + str = AMstrsPrev(&forward, 1); + assert_ptr_equal(strstr(str.src, "4@"), str.src); str = AMstrsPrev(&forward, 1); assert_ptr_equal(strstr(str.src, "3@"), str.src); str = AMstrsPrev(&forward, 1); assert_ptr_equal(strstr(str.src, "2@"), str.src); - str = AMstrsPrev(&forward, 1); - assert_ptr_equal(strstr(str.src, "1@"), str.src); assert_null(AMstrsPrev(&forward, 1).src); /* Reverse iterator forward. */ str = AMstrsNext(&reverse, 1); + assert_ptr_equal(strstr(str.src, "4@"), str.src); + str = AMstrsNext(&reverse, 1); assert_ptr_equal(strstr(str.src, "3@"), str.src); str = AMstrsNext(&reverse, 1); assert_ptr_equal(strstr(str.src, "2@"), str.src); - str = AMstrsNext(&reverse, 1); - assert_ptr_equal(strstr(str.src, "1@"), str.src); - /* Reverse iterator reverse. */ assert_null(AMstrsNext(&reverse, 1).src); - str = AMstrsPrev(&reverse, 1); - assert_ptr_equal(strstr(str.src, "1@"), str.src); + /* Reverse iterator reverse. */ str = AMstrsPrev(&reverse, 1); assert_ptr_equal(strstr(str.src, "2@"), str.src); str = AMstrsPrev(&reverse, 1); assert_ptr_equal(strstr(str.src, "3@"), str.src); + str = AMstrsPrev(&reverse, 1); + assert_ptr_equal(strstr(str.src, "4@"), str.src); assert_null(AMstrsPrev(&reverse, 1).src); AMfreeStack(&stack); } @@ -202,16 +207,20 @@ static void test_AMputActor_str(void **state) { static void test_AMspliceText() { AMresultStack* stack = NULL; AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - AMfree(AMspliceText(doc, AM_ROOT, 0, 0, AMstr("one + "))); - AMfree(AMspliceText(doc, AM_ROOT, 4, 2, AMstr("two = "))); - AMfree(AMspliceText(doc, AM_ROOT, 8, 2, AMstr("three"))); - AMbyteSpan const text = AMpush(&stack, - AMtext(doc, AM_ROOT, NULL), - AM_VALUE_STR, - cmocka_cb).str; - static char const* const TEXT_VALUE = "one two three"; - assert_int_equal(text.count, strlen(TEXT_VALUE)); - assert_memory_equal(text.src, TEXT_VALUE, text.count); + AMobjId const* const text = AMpush(&stack, + AMmapPutObject(doc, AM_ROOT, AMstr("text"), AM_OBJ_TYPE_TEXT), + AM_VALUE_OBJ_ID, + cmocka_cb).obj_id; + AMfree(AMspliceText(doc, text, 0, 0, AMstr("one + "))); + AMfree(AMspliceText(doc, text, 4, 2, AMstr("two = "))); + AMfree(AMspliceText(doc, text, 8, 2, AMstr("three"))); + AMbyteSpan const str = AMpush(&stack, + AMtext(doc, text, NULL), + AM_VALUE_STR, + cmocka_cb).str; + static char const* const STR_VALUE = "one two three"; + assert_int_equal(str.count, strlen(STR_VALUE)); + assert_memory_equal(str.src, STR_VALUE, str.count); AMfreeStack(&stack); } diff --git a/rust/automerge-c/test/list_tests.c b/rust/automerge-c/test/list_tests.c index b742cbe4..f9bbb340 100644 --- a/rust/automerge-c/test/list_tests.c +++ b/rust/automerge-c/test/list_tests.c @@ -18,15 +18,20 @@ static void test_AMlistIncrement(void** state) { GroupState* group_state = *state; - AMfree(AMlistPutCounter(group_state->doc, AM_ROOT, 0, true, 0)); + AMobjId const* const list = AMpush( + &group_state->stack, + AMmapPutObject(group_state->doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), + AM_VALUE_OBJ_ID, + cmocka_cb).obj_id; + AMfree(AMlistPutCounter(group_state->doc, list, 0, true, 0)); assert_int_equal(AMpush(&group_state->stack, - AMlistGet(group_state->doc, AM_ROOT, 0, NULL), + AMlistGet(group_state->doc, list, 0, NULL), AM_VALUE_COUNTER, cmocka_cb).counter, 0); AMfree(AMpop(&group_state->stack)); - AMfree(AMlistIncrement(group_state->doc, AM_ROOT, 0, 3)); + AMfree(AMlistIncrement(group_state->doc, list, 0, 3)); assert_int_equal(AMpush(&group_state->stack, - AMlistGet(group_state->doc, AM_ROOT, 0, NULL), + AMlistGet(group_state->doc, list, 0, NULL), AM_VALUE_COUNTER, cmocka_cb).counter, 3); AMfree(AMpop(&group_state->stack)); @@ -34,119 +39,140 @@ static void test_AMlistIncrement(void** state) { #define test_AMlistPut(suffix, mode) test_AMlistPut ## suffix ## _ ## mode -#define static_void_test_AMlistPut(suffix, mode, member, scalar_value) \ -static void test_AMlistPut ## suffix ## _ ## mode(void **state) { \ - GroupState* group_state = *state; \ - AMfree(AMlistPut ## suffix(group_state->doc, \ - AM_ROOT, \ - 0, \ - !strcmp(#mode, "insert"), \ - scalar_value)); \ - assert_true(AMpush( \ - &group_state->stack, \ - AMlistGet(group_state->doc, AM_ROOT, 0, NULL), \ - AMvalue_discriminant(#suffix), \ - cmocka_cb).member == scalar_value); \ - AMfree(AMpop(&group_state->stack)); \ +#define static_void_test_AMlistPut(suffix, mode, member, scalar_value) \ +static void test_AMlistPut ## suffix ## _ ## mode(void **state) { \ + GroupState* group_state = *state; \ + AMobjId const* const list = AMpush( \ + &group_state->stack, \ + AMmapPutObject(group_state->doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST),\ + AM_VALUE_OBJ_ID, \ + cmocka_cb).obj_id; \ + AMfree(AMlistPut ## suffix(group_state->doc, \ + list, \ + 0, \ + !strcmp(#mode, "insert"), \ + scalar_value)); \ + assert_true(AMpush( \ + &group_state->stack, \ + AMlistGet(group_state->doc, list, 0, NULL), \ + AMvalue_discriminant(#suffix), \ + cmocka_cb).member == scalar_value); \ + AMfree(AMpop(&group_state->stack)); \ } #define test_AMlistPutBytes(mode) test_AMlistPutBytes ## _ ## mode -#define static_void_test_AMlistPutBytes(mode, bytes_value) \ -static void test_AMlistPutBytes_ ## mode(void **state) { \ - static size_t const BYTES_SIZE = sizeof(bytes_value) / sizeof(uint8_t); \ - \ - GroupState* group_state = *state; \ - AMfree(AMlistPutBytes(group_state->doc, \ - AM_ROOT, \ - 0, \ - !strcmp(#mode, "insert"), \ - AMbytes(bytes_value, BYTES_SIZE))); \ - AMbyteSpan const bytes = AMpush( \ - &group_state->stack, \ - AMlistGet(group_state->doc, AM_ROOT, 0, NULL), \ - AM_VALUE_BYTES, \ - cmocka_cb).bytes; \ - assert_int_equal(bytes.count, BYTES_SIZE); \ - assert_memory_equal(bytes.src, bytes_value, BYTES_SIZE); \ - AMfree(AMpop(&group_state->stack)); \ +#define static_void_test_AMlistPutBytes(mode, bytes_value) \ +static void test_AMlistPutBytes_ ## mode(void **state) { \ + static size_t const BYTES_SIZE = sizeof(bytes_value) / sizeof(uint8_t); \ + \ + GroupState* group_state = *state; \ + AMobjId const* const list = AMpush( \ + &group_state->stack, \ + AMmapPutObject(group_state->doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST),\ + AM_VALUE_OBJ_ID, \ + cmocka_cb).obj_id; \ + AMfree(AMlistPutBytes(group_state->doc, \ + list, \ + 0, \ + !strcmp(#mode, "insert"), \ + AMbytes(bytes_value, BYTES_SIZE))); \ + AMbyteSpan const bytes = AMpush( \ + &group_state->stack, \ + AMlistGet(group_state->doc, list, 0, NULL), \ + AM_VALUE_BYTES, \ + cmocka_cb).bytes; \ + assert_int_equal(bytes.count, BYTES_SIZE); \ + assert_memory_equal(bytes.src, bytes_value, BYTES_SIZE); \ + AMfree(AMpop(&group_state->stack)); \ } #define test_AMlistPutNull(mode) test_AMlistPutNull_ ## mode -#define static_void_test_AMlistPutNull(mode) \ -static void test_AMlistPutNull_ ## mode(void **state) { \ - GroupState* group_state = *state; \ - AMfree(AMlistPutNull(group_state->doc, \ - AM_ROOT, \ - 0, \ - !strcmp(#mode, "insert"))); \ - AMresult* const result = AMlistGet(group_state->doc, AM_ROOT, 0, NULL); \ - if (AMresultStatus(result) != AM_STATUS_OK) { \ +#define static_void_test_AMlistPutNull(mode) \ +static void test_AMlistPutNull_ ## mode(void **state) { \ + GroupState* group_state = *state; \ + AMobjId const* const list = AMpush( \ + &group_state->stack, \ + AMmapPutObject(group_state->doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST),\ + AM_VALUE_OBJ_ID, \ + cmocka_cb).obj_id; \ + AMfree(AMlistPutNull(group_state->doc, \ + list, \ + 0, \ + !strcmp(#mode, "insert"))); \ + AMresult* const result = AMlistGet(group_state->doc, list, 0, NULL); \ + if (AMresultStatus(result) != AM_STATUS_OK) { \ fail_msg_view("%s", AMerrorMessage(result)); \ - } \ - assert_int_equal(AMresultSize(result), 1); \ - assert_int_equal(AMresultValue(result).tag, AM_VALUE_NULL); \ - AMfree(result); \ + } \ + assert_int_equal(AMresultSize(result), 1); \ + assert_int_equal(AMresultValue(result).tag, AM_VALUE_NULL); \ + AMfree(result); \ } #define test_AMlistPutObject(label, mode) test_AMlistPutObject_ ## label ## _ ## mode -#define static_void_test_AMlistPutObject(label, mode) \ -static void test_AMlistPutObject_ ## label ## _ ## mode(void **state) { \ - GroupState* group_state = *state; \ - AMobjType const obj_type = AMobjType_tag(#label); \ - if (obj_type != AM_OBJ_TYPE_VOID) { \ - AMobjId const* const obj_id = AMpush( \ - &group_state->stack, \ - AMlistPutObject(group_state->doc, \ - AM_ROOT, \ - 0, \ - !strcmp(#mode, "insert"), \ - obj_type), \ - AM_VALUE_OBJ_ID, \ - cmocka_cb).obj_id; \ - assert_non_null(obj_id); \ - assert_int_equal(AMobjObjType(group_state->doc, obj_id), obj_type); \ - assert_int_equal(AMobjSize(group_state->doc, obj_id, NULL), 0); \ - } \ - else { \ - AMpush(&group_state->stack, \ - AMlistPutObject(group_state->doc, \ - AM_ROOT, \ - 0, \ - !strcmp(#mode, "insert"), \ - obj_type), \ - AM_VALUE_VOID, \ - NULL); \ - assert_int_not_equal(AMresultStatus(group_state->stack->result), \ - AM_STATUS_OK); \ - } \ - AMfree(AMpop(&group_state->stack)); \ +#define static_void_test_AMlistPutObject(label, mode) \ +static void test_AMlistPutObject_ ## label ## _ ## mode(void **state) { \ + GroupState* group_state = *state; \ + AMobjId const* const list = AMpush( \ + &group_state->stack, \ + AMmapPutObject(group_state->doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST),\ + AM_VALUE_OBJ_ID, \ + cmocka_cb).obj_id; \ + AMobjType const obj_type = AMobjType_tag(#label); \ + if (obj_type != AM_OBJ_TYPE_VOID) { \ + AMobjId const* const obj_id = AMpush( \ + &group_state->stack, \ + AMlistPutObject(group_state->doc, \ + list, \ + 0, \ + !strcmp(#mode, "insert"), \ + obj_type), \ + AM_VALUE_OBJ_ID, \ + cmocka_cb).obj_id; \ + assert_non_null(obj_id); \ + assert_int_equal(AMobjObjType(group_state->doc, obj_id), obj_type); \ + assert_int_equal(AMobjSize(group_state->doc, obj_id, NULL), 0); \ + } \ + else { \ + AMpush(&group_state->stack, \ + AMlistPutObject(group_state->doc, \ + list, \ + 0, \ + !strcmp(#mode, "insert"), \ + obj_type), \ + AM_VALUE_VOID, \ + NULL); \ + assert_int_not_equal(AMresultStatus(group_state->stack->result), \ + AM_STATUS_OK); \ + } \ + AMfree(AMpop(&group_state->stack)); \ } #define test_AMlistPutStr(mode) test_AMlistPutStr ## _ ## mode -#define static_void_test_AMlistPutStr(mode, str_value) \ -static void test_AMlistPutStr_ ## mode(void **state) { \ - GroupState* group_state = *state; \ - AMfree(AMlistPutStr(group_state->doc, \ - AM_ROOT, \ - 0, \ - !strcmp(#mode, "insert"), \ - AMstr(str_value))); \ - AMbyteSpan const str = AMpush( \ - &group_state->stack, \ - AMlistGet(group_state->doc, AM_ROOT, 0, NULL), \ - AM_VALUE_STR, \ - cmocka_cb).str; \ - char* const c_str = test_calloc(1, str.count + 1); \ - strncpy(c_str, str.src, str.count); \ - print_message("str -> \"%s\"\n", c_str); \ - test_free(c_str); \ - assert_int_equal(str.count, strlen(str_value)); \ - assert_memory_equal(str.src, str_value, str.count); \ - AMfree(AMpop(&group_state->stack)); \ +#define static_void_test_AMlistPutStr(mode, str_value) \ +static void test_AMlistPutStr_ ## mode(void **state) { \ + GroupState* group_state = *state; \ + AMobjId const* const list = AMpush( \ + &group_state->stack, \ + AMmapPutObject(group_state->doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST),\ + AM_VALUE_OBJ_ID, \ + cmocka_cb).obj_id; \ + AMfree(AMlistPutStr(group_state->doc, \ + list, \ + 0, \ + !strcmp(#mode, "insert"), \ + AMstr(str_value))); \ + AMbyteSpan const str = AMpush( \ + &group_state->stack, \ + AMlistGet(group_state->doc, list, 0, NULL), \ + AM_VALUE_STR, \ + cmocka_cb).str; \ + assert_int_equal(str.count, strlen(str_value)); \ + assert_memory_equal(str.src, str_value, str.count); \ + AMfree(AMpop(&group_state->stack)); \ } static_void_test_AMlistPut(Bool, insert, boolean, true) @@ -391,7 +417,7 @@ static void test_insert_at_index(void** state) { AMobjId const* const list = AMpush( &stack, - AMlistPutObject(doc, AM_ROOT, 0, true, AM_OBJ_TYPE_LIST), + AMmapPutObject(doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), AM_VALUE_OBJ_ID, cmocka_cb).obj_id; /* Insert both at the same index. */ diff --git a/rust/automerge-c/test/ported_wasm/basic_tests.c b/rust/automerge-c/test/ported_wasm/basic_tests.c index 303160cf..4b275300 100644 --- a/rust/automerge-c/test/ported_wasm/basic_tests.c +++ b/rust/automerge-c/test/ported_wasm/basic_tests.c @@ -709,17 +709,10 @@ static void test_should_be_able_to_splice_text(void** state) { cmocka_cb).obj_id; /* doc.splice(text, 0, 0, "hello ") */ AMfree(AMspliceText(doc, text, 0, 0, AMstr("hello "))); - /* doc.splice(text, 6, 0, ["w", "o", "r", "l", "d"]) */ - static AMvalue const WORLD[] = {{.str_tag = AM_VALUE_STR, .str = {.src = "w", .count = 1}}, - {.str_tag = AM_VALUE_STR, .str = {.src = "o", .count = 1}}, - {.str_tag = AM_VALUE_STR, .str = {.src = "r", .count = 1}}, - {.str_tag = AM_VALUE_STR, .str = {.src = "l", .count = 1}}, - {.str_tag = AM_VALUE_STR, .str = {.src = "d", .count = 1}}}; - AMfree(AMsplice(doc, text, 6, 0, WORLD, sizeof(WORLD)/sizeof(AMvalue))); - /* doc.splice(text, 11, 0, ["!", "?"]) */ - static AMvalue const INTERROBANG[] = {{.str_tag = AM_VALUE_STR, .str = {.src = "!", .count = 1}}, - {.str_tag = AM_VALUE_STR, .str = {.src = "?", .count = 1}}}; - AMfree(AMsplice(doc, text, 11, 0, INTERROBANG, sizeof(INTERROBANG)/sizeof(AMvalue))); + /* doc.splice(text, 6, 0, "world") */ + AMfree(AMspliceText(doc, text, 6, 0, AMstr("world"))); + /* doc.splice(text, 11, 0, "!?") */ + AMfree(AMspliceText(doc, text, 11, 0, AMstr("!?"))); /* assert.deepEqual(doc.getWithType(text, 0), ["str", "h"]) */ AMbyteSpan str = AMpush(&stack, AMlistGet(doc, text, 0, NULL), @@ -765,9 +758,9 @@ static void test_should_be_able_to_splice_text(void** state) { } /** - * \brief should be able to insert objects into text + * \brief should NOT be able to insert objects into text */ -static void test_should_be_able_to_insert_objects_into_text(void** state) { +static void test_should_be_unable_to_insert_objects_into_text(void** state) { AMresultStack* stack = *state; /* const doc = create() */ AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; @@ -778,32 +771,14 @@ static void test_should_be_able_to_insert_objects_into_text(void** state) { AM_VALUE_OBJ_ID, cmocka_cb).obj_id; AMfree(AMspliceText(doc, text, 0, 0, AMstr("Hello world"))); - /* const obj = doc.insertObject(text, 6, { hello: "world" }); */ - AMobjId const* const obj = AMpush( - &stack, - AMlistPutObject(doc, text, 6, true, AM_OBJ_TYPE_MAP), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; - AMfree(AMmapPutStr(doc, obj, AMstr("hello"), AMstr("world"))); - /* assert.deepEqual(doc.text(text), "Hello \ufffcworld"); */ - AMbyteSpan str = AMpush(&stack, - AMtext(doc, text, NULL), - AM_VALUE_STR, - cmocka_cb).str; - assert_int_equal(str.count, strlen(u8"Hello \ufffcworld")); - assert_memory_equal(str.src, u8"Hello \ufffcworld", str.count); - /* assert.deepEqual(doc.getWithType(text, 6), ["map", obj]); */ - assert_true(AMobjIdEqual(AMpush(&stack, - AMlistGet(doc, text, 6, NULL), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id, obj)); - /* assert.deepEqual(doc.getWithType(obj, "hello"), ["str", "world"]); */ - str = AMpush(&stack, - AMmapGet(doc, obj, AMstr("hello"), NULL), - AM_VALUE_STR, - cmocka_cb).str; - assert_int_equal(str.count, strlen("world")); - assert_memory_equal(str.src, "world", str.count); + /* assert.throws(() => { + doc.insertObject(text, 6, { hello: "world" }); + }) */ + AMpush(&stack, + AMlistPutObject(doc, text, 6, true, AM_OBJ_TYPE_MAP), + AM_VALUE_VOID, + NULL); + assert_int_not_equal(AMresultStatus(stack->result), AM_STATUS_OK); } /** @@ -1873,7 +1848,7 @@ int run_ported_wasm_basic_tests(void) { cmocka_unit_test_setup_teardown(test_should_be_able_to_del, setup_stack, teardown_stack), cmocka_unit_test_setup_teardown(test_should_be_able_to_use_counters, setup_stack, teardown_stack), cmocka_unit_test_setup_teardown(test_should_be_able_to_splice_text, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_should_be_able_to_insert_objects_into_text, setup_stack, teardown_stack), + cmocka_unit_test_setup_teardown(test_should_be_unable_to_insert_objects_into_text, setup_stack, teardown_stack), cmocka_unit_test_setup_teardown(test_should_be_able_to_save_all_or_incrementally, setup_stack, teardown_stack), cmocka_unit_test_setup_teardown(test_should_be_able_to_splice_text_2, setup_stack, teardown_stack), cmocka_unit_test_setup_teardown(test_local_inc_increments_all_visible_counters_in_a_map, setup_stack, teardown_stack), diff --git a/rust/automerge-wasm/README.md b/rust/automerge-wasm/README.md index 992aaa8f..20256313 100644 --- a/rust/automerge-wasm/README.md +++ b/rust/automerge-wasm/README.md @@ -154,7 +154,7 @@ Lists are index addressable sets of values. These values can be any scalar or o ### Text -Text is a specialized list type intended for modifying a text document. The primary way to interact with a text document is via the `splice()` method. Spliced strings will be indexable by character (important to note for platforms that index by graphmeme cluster). Non text can be inserted into a text document and will be represented with the unicode object replacement character. +Text is a specialized list type intended for modifying a text document. The primary way to interact with a text document is via the `splice()` method. Spliced strings will be indexable by character (important to note for platforms that index by graphmeme cluster). ```javascript let doc = create("aaaaaa") @@ -162,12 +162,6 @@ Text is a specialized list type intended for modifying a text document. The pri doc.splice(notes, 6, 5, "everyone") doc.text(notes) // returns "Hello everyone" - - let obj = doc.insertObject(notes, 6, { hi: "there" }) - - doc.text(notes) // returns "Hello \ufffceveryone" - doc.getWithType(notes, 6) // returns ["map", obj] - doc.get(obj, "hi") // returns "there" ``` ### Tables diff --git a/rust/automerge-wasm/src/interop.rs b/rust/automerge-wasm/src/interop.rs index 24b34cd2..20b42bf1 100644 --- a/rust/automerge-wasm/src/interop.rs +++ b/rust/automerge-wasm/src/interop.rs @@ -1,9 +1,11 @@ +use crate::error::InsertObject; use crate::value::Datatype; use crate::Automerge; use automerge as am; use automerge::transaction::Transactable; +use automerge::ROOT; use automerge::{Change, ChangeHash, ObjType, Prop}; -use js_sys::{Array, Function, Object, Reflect, Symbol, Uint8Array}; +use js_sys::{Array, Function, JsString, Object, Reflect, Symbol, Uint8Array}; use std::collections::{BTreeSet, HashSet}; use std::fmt::Display; use wasm_bindgen::prelude::*; @@ -410,49 +412,82 @@ pub(crate) fn js_get_symbol>(obj: J, prop: &Symbol) -> Result Result { +pub(crate) fn to_prop(p: JsValue) -> Result { if let Some(s) = p.as_string() { Ok(Prop::Map(s)) } else if let Some(n) = p.as_f64() { Ok(Prop::Seq(n as usize)) } else { - Err(super::error::InvalidProp) + Err(error::InvalidProp) } } -pub(crate) fn to_objtype( +pub(crate) enum JsObjType { + Text(String), + Map(Vec<(Prop, JsValue)>), + List(Vec<(Prop, JsValue)>), +} + +impl JsObjType { + pub(crate) fn objtype(&self) -> ObjType { + match self { + Self::Text(_) => ObjType::Text, + Self::Map(_) => ObjType::Map, + Self::List(_) => ObjType::List, + } + } + + pub(crate) fn text(&self) -> Option<&str> { + match self { + Self::Text(s) => Some(s.as_ref()), + Self::Map(_) => None, + Self::List(_) => None, + } + } + + pub(crate) fn subvals(&self) -> &[(Prop, JsValue)] { + match self { + Self::Text(_) => &[], + Self::Map(sub) => sub.as_slice(), + Self::List(sub) => sub.as_slice(), + } + } +} + +pub(crate) fn import_obj( value: &JsValue, datatype: &Option, -) -> Option<(ObjType, Vec<(Prop, JsValue)>)> { +) -> Result { match datatype.as_deref() { Some("map") => { - let map = value.clone().dyn_into::().ok()?; + let map = value + .clone() + .dyn_into::() + .map_err(|_| InsertObject::ValueNotObject)?; let map = js_sys::Object::keys(&map) .iter() .zip(js_sys::Object::values(&map).iter()) .map(|(key, val)| (key.as_string().unwrap().into(), val)) .collect(); - Some((ObjType::Map, map)) + Ok(JsObjType::Map(map)) } Some("list") => { - let list = value.clone().dyn_into::().ok()?; + let list = value + .clone() + .dyn_into::() + .map_err(|_| InsertObject::ValueNotObject)?; let list = list .iter() .enumerate() .map(|(i, e)| (i.into(), e)) .collect(); - Some((ObjType::List, list)) + Ok(JsObjType::List(list)) } Some("text") => { - let text = value.as_string()?; - let text = text - .chars() - .enumerate() - .map(|(i, ch)| (i.into(), ch.to_string().into())) - .collect(); - Some((ObjType::Text, text)) + let text = value.as_string().ok_or(InsertObject::ValueNotObject)?; + Ok(JsObjType::Text(text)) } - Some(_) => None, + Some(_) => Err(InsertObject::ValueNotObject), None => { if let Ok(list) = value.clone().dyn_into::() { let list = list @@ -460,24 +495,18 @@ pub(crate) fn to_objtype( .enumerate() .map(|(i, e)| (i.into(), e)) .collect(); - Some((ObjType::List, list)) + Ok(JsObjType::List(list)) } else if let Ok(map) = value.clone().dyn_into::() { - // FIXME unwrap let map = js_sys::Object::keys(&map) .iter() .zip(js_sys::Object::values(&map).iter()) .map(|(key, val)| (key.as_string().unwrap().into(), val)) .collect(); - Some((ObjType::Map, map)) - } else if let Some(text) = value.as_string() { - let text = text - .chars() - .enumerate() - .map(|(i, ch)| (i.into(), ch.to_string().into())) - .collect(); - Some((ObjType::Text, text)) + Ok(JsObjType::Map(map)) + } else if let Some(s) = value.as_string() { + Ok(JsObjType::Text(s)) } else { - None + Err(InsertObject::ValueNotObject) } } } @@ -506,22 +535,22 @@ impl Automerge { heads: Option<&Vec>, meta: &JsValue, ) -> Result { - let result = if datatype.is_sequence() { - self.wrap_object( - self.export_list(obj, heads, meta)?, - datatype, - &obj.to_string().into(), - meta, - )? - } else { - self.wrap_object( - self.export_map(obj, heads, meta)?, - datatype, - &obj.to_string().into(), - meta, - )? + let result = match datatype { + Datatype::Text => { + if let Some(heads) = heads { + self.doc.text_at(obj, heads)?.into() + } else { + self.doc.text(obj)?.into() + } + } + Datatype::List => self + .wrap_object(self.export_list(obj, heads, meta)?, datatype, obj, meta)? + .into(), + _ => self + .wrap_object(self.export_map(obj, heads, meta)?, datatype, obj, meta)? + .into(), }; - Ok(result.into()) + Ok(result) } pub(crate) fn export_map( @@ -601,17 +630,19 @@ impl Automerge { pub(crate) fn unwrap_object( &self, ext_val: &Object, - ) -> Result<(Object, Datatype, JsValue), error::Export> { + ) -> Result<(Object, Datatype, ObjId), error::Export> { let inner = js_get_symbol(ext_val, &Symbol::for_(RAW_DATA_SYMBOL))?.0; let datatype = js_get_symbol(ext_val, &Symbol::for_(DATATYPE_SYMBOL))? .0 .try_into(); - let mut id = js_get_symbol(ext_val, &Symbol::for_(RAW_OBJECT_SYMBOL))?.0; - if id.is_undefined() { - id = "_root".into(); - } + let id_val = js_get_symbol(ext_val, &Symbol::for_(RAW_OBJECT_SYMBOL))?.0; + let id = if id_val.is_undefined() { + am::ROOT + } else { + self.doc.import(&id_val.as_string().unwrap_or_default())?.0 + }; let inner = inner .dyn_into::() @@ -642,7 +673,7 @@ impl Automerge { meta: &JsValue, ) -> Result { if let Ok(obj) = raw_value.clone().dyn_into::() { - let result = self.wrap_object(obj, datatype, &id.to_string().into(), meta)?; + let result = self.wrap_object(obj, datatype, id, meta)?; Ok(result.into()) } else { self.export_value((datatype, raw_value)) @@ -653,7 +684,7 @@ impl Automerge { &self, value: Object, datatype: Datatype, - id: &JsValue, + id: &ObjId, meta: &JsValue, ) -> Result { let value = if let Some(function) = self.external_types.get(&datatype) { @@ -668,8 +699,12 @@ impl Automerge { } else { value }; - if matches!(datatype, Datatype::Map | Datatype::List | Datatype::Text) { - set_hidden_value(&value, &Symbol::for_(RAW_OBJECT_SYMBOL), id)?; + if matches!(datatype, Datatype::Map | Datatype::List) { + set_hidden_value( + &value, + &Symbol::for_(RAW_OBJECT_SYMBOL), + &JsValue::from(&id.to_string()), + )?; } set_hidden_value(&value, &Symbol::for_(DATATYPE_SYMBOL), datatype)?; set_hidden_value(&value, &Symbol::for_(META_SYMBOL), meta)?; @@ -684,16 +719,27 @@ impl Automerge { array: &Object, patch: &Patch, meta: &JsValue, + exposed: &mut HashSet, ) -> Result { let result = Array::from(array); // shallow copy match patch { - Patch::PutSeq { index, value, .. } => { - let sub_val = self.maybe_wrap_object(alloc(&value.0), &value.1, meta)?; - js_set(&result, *index as f64, &sub_val)?; + Patch::PutSeq { + index, + value, + expose, + .. + } => { + if *expose && value.0.is_object() { + exposed.insert(value.1.clone()); + js_set(&result, *index as f64, &JsValue::null())?; + } else { + let sub_val = self.maybe_wrap_object(alloc(&value.0), &value.1, meta)?; + js_set(&result, *index as f64, &sub_val)?; + } Ok(result.into()) } - Patch::DeleteSeq { index, .. } => { - Ok(self.sub_splice(result, *index, 1, vec![], meta)?) + Patch::DeleteSeq { index, length, .. } => { + Ok(self.sub_splice(result, *index, *length, vec![], meta)?) } Patch::Insert { index, values, .. } => { Ok(self.sub_splice(result, *index, 0, values, meta)?) @@ -717,6 +763,8 @@ impl Automerge { } Patch::DeleteMap { .. } => Err(error::ApplyPatch::DeleteKeyFromSeq), Patch::PutMap { .. } => Err(error::ApplyPatch::PutKeyInSeq), + //Patch::SpliceText { .. } => Err(to_js_err("cannot splice text in seq")), + Patch::SpliceText { .. } => Err(error::ApplyPatch::SpliceTextInSeq), } } @@ -725,12 +773,20 @@ impl Automerge { map: &Object, patch: &Patch, meta: &JsValue, + exposed: &mut HashSet, ) -> Result { let result = Object::assign(&Object::new(), map); // shallow copy match patch { - Patch::PutMap { key, value, .. } => { - let sub_val = self.maybe_wrap_object(alloc(&value.0), &value.1, meta)?; - js_set(&result, key, &sub_val)?; + Patch::PutMap { + key, value, expose, .. + } => { + if *expose && value.0.is_object() { + exposed.insert(value.1.clone()); + js_set(&result, key, &JsValue::null())?; + } else { + let sub_val = self.maybe_wrap_object(alloc(&value.0), &value.1, meta)?; + js_set(&result, key, &sub_val)?; + } Ok(result) } Patch::DeleteMap { key, .. } => { @@ -760,6 +816,8 @@ impl Automerge { } Patch::Insert { .. } => Err(error::ApplyPatch::InsertInMap), Patch::DeleteSeq { .. } => Err(error::ApplyPatch::SpliceInMap), + //Patch::SpliceText { .. } => Err(to_js_err("cannot Splice into map")), + Patch::SpliceText { .. } => Err(error::ApplyPatch::SpliceTextInMap), Patch::PutSeq { .. } => Err(error::ApplyPatch::PutIdxInMap), } } @@ -770,12 +828,24 @@ impl Automerge { patch: &Patch, depth: usize, meta: &JsValue, + exposed: &mut HashSet, ) -> Result { let (inner, datatype, id) = self.unwrap_object(&obj)?; let prop = patch.path().get(depth).map(|p| prop_to_js(&p.1)); let result = if let Some(prop) = prop { - if let Ok(sub_obj) = js_get(&inner, &prop)?.0.dyn_into::() { - let new_value = self.apply_patch(sub_obj, patch, depth + 1, meta)?; + let subval = js_get(&inner, &prop)?.0; + if subval.is_string() && patch.path().len() - 1 == depth { + if let Ok(s) = subval.dyn_into::() { + let new_value = self.apply_patch_to_text(&s, patch)?; + let result = shallow_copy(&inner); + js_set(&result, &prop, &new_value)?; + Ok(result) + } else { + // bad patch - short circuit + Ok(obj) + } + } else if let Ok(sub_obj) = js_get(&inner, &prop)?.0.dyn_into::() { + let new_value = self.apply_patch(sub_obj, patch, depth + 1, meta, exposed)?; let result = shallow_copy(&inner); js_set(&result, &prop, &new_value)?; Ok(result) @@ -785,15 +855,49 @@ impl Automerge { return Ok(obj); } } else if Array::is_array(&inner) { - self.apply_patch_to_array(&inner, patch, meta) + if &id == patch.obj() { + self.apply_patch_to_array(&inner, patch, meta, exposed) + } else { + Ok(Array::from(&inner).into()) + } + } else if &id == patch.obj() { + self.apply_patch_to_map(&inner, patch, meta, exposed) } else { - self.apply_patch_to_map(&inner, patch, meta) + Ok(Object::assign(&Object::new(), &inner)) }?; self.wrap_object(result, datatype, &id, meta) .map_err(|e| e.into()) } + fn apply_patch_to_text( + &self, + string: &JsString, + patch: &Patch, + ) -> Result { + match patch { + Patch::DeleteSeq { index, length, .. } => { + let index = *index as u32; + let before = string.slice(0, index); + let after = string.slice(index + *length as u32, string.length()); + let result = before.concat(&after); + Ok(result.into()) + } + Patch::SpliceText { index, value, .. } => { + let index = *index as u32; + let length = string.length(); + let before = string.slice(0, index); + let after = string.slice(index, length); + let bytes: Vec = value.iter().cloned().collect(); + let result = before + .concat(&String::from_utf16_lossy(bytes.as_slice()).into()) + .concat(&after); + Ok(result.into()) + } + _ => Ok(string.into()), + } + } + fn sub_splice<'a, I: IntoIterator, ObjId)>>( &self, o: Array, @@ -815,6 +919,178 @@ impl Automerge { Reflect::apply(&method, &o, &args).map_err(error::Export::CallSplice)?; Ok(o.into()) } + + pub(crate) fn import(&self, id: JsValue) -> Result<(ObjId, am::ObjType), error::ImportObj> { + if let Some(s) = id.as_string() { + // valid formats are + // 123@aabbcc + // 123@aabccc/prop1/prop2/prop3 + // /prop1/prop2/prop3 + let mut components = s.split('/'); + let obj = components.next(); + let (id, obj_type) = if obj == Some("") { + (ROOT, am::ObjType::Map) + } else { + self.doc + .import(obj.unwrap_or_default()) + .map_err(error::ImportObj::BadImport)? + }; + self.import_path(id, obj_type, components) + .map_err(|e| error::ImportObj::InvalidPath(s.to_string(), e)) + } else { + Err(error::ImportObj::NotString) + } + } + + fn import_path<'a, I: Iterator>( + &self, + mut obj: ObjId, + mut obj_type: am::ObjType, + components: I, + ) -> Result<(ObjId, am::ObjType), error::ImportPath> { + for (i, prop) in components.enumerate() { + if prop.is_empty() { + break; + } + let is_map = matches!(obj_type, am::ObjType::Map | am::ObjType::Table); + let val = if is_map { + self.doc.get(obj, prop)? + } else { + let idx = prop + .parse() + .map_err(|_| error::ImportPath::IndexNotInteger(i, prop.to_string()))?; + self.doc.get(obj, am::Prop::Seq(idx))? + }; + match val { + Some((am::Value::Object(am::ObjType::Map), id)) => { + obj_type = am::ObjType::Map; + obj = id; + } + Some((am::Value::Object(am::ObjType::Table), id)) => { + obj_type = am::ObjType::Table; + obj = id; + } + Some((am::Value::Object(am::ObjType::List), id)) => { + obj_type = am::ObjType::List; + obj = id; + } + Some((am::Value::Object(am::ObjType::Text), id)) => { + obj_type = am::ObjType::Text; + obj = id; + } + None => return Err(error::ImportPath::NonExistentObject(i, prop.to_string())), + _ => return Err(error::ImportPath::NotAnObject), + }; + } + Ok((obj, obj_type)) + } + + pub(crate) fn import_prop(&self, prop: JsValue) -> Result { + if let Some(s) = prop.as_string() { + Ok(s.into()) + } else if let Some(n) = prop.as_f64() { + Ok((n as usize).into()) + } else { + Err(error::InvalidProp) + } + } + + pub(crate) fn import_scalar( + &self, + value: &JsValue, + datatype: &Option, + ) -> Option { + match datatype.as_deref() { + Some("boolean") => value.as_bool().map(am::ScalarValue::Boolean), + Some("int") => value.as_f64().map(|v| am::ScalarValue::Int(v as i64)), + Some("uint") => value.as_f64().map(|v| am::ScalarValue::Uint(v as u64)), + Some("str") => value.as_string().map(|v| am::ScalarValue::Str(v.into())), + Some("f64") => value.as_f64().map(am::ScalarValue::F64), + Some("bytes") => Some(am::ScalarValue::Bytes( + value.clone().dyn_into::().unwrap().to_vec(), + )), + Some("counter") => value.as_f64().map(|v| am::ScalarValue::counter(v as i64)), + Some("timestamp") => { + if let Some(v) = value.as_f64() { + Some(am::ScalarValue::Timestamp(v as i64)) + } else if let Ok(d) = value.clone().dyn_into::() { + Some(am::ScalarValue::Timestamp(d.get_time() as i64)) + } else { + None + } + } + Some("null") => Some(am::ScalarValue::Null), + Some(_) => None, + None => { + if value.is_null() { + Some(am::ScalarValue::Null) + } else if let Some(b) = value.as_bool() { + Some(am::ScalarValue::Boolean(b)) + } else if let Some(s) = value.as_string() { + Some(am::ScalarValue::Str(s.into())) + } else if let Some(n) = value.as_f64() { + if (n.round() - n).abs() < f64::EPSILON { + Some(am::ScalarValue::Int(n as i64)) + } else { + Some(am::ScalarValue::F64(n)) + } + } else if let Ok(d) = value.clone().dyn_into::() { + Some(am::ScalarValue::Timestamp(d.get_time() as i64)) + } else if let Ok(o) = &value.clone().dyn_into::() { + Some(am::ScalarValue::Bytes(o.to_vec())) + } else { + None + } + } + } + } + + pub(crate) fn import_value( + &self, + value: &JsValue, + datatype: Option, + ) -> Result<(Value<'static>, Vec<(Prop, JsValue)>), error::InvalidValue> { + match self.import_scalar(value, &datatype) { + Some(val) => Ok((val.into(), vec![])), + None => { + if let Ok(js_obj) = import_obj(value, &datatype) { + Ok((js_obj.objtype().into(), js_obj.subvals().to_vec())) + } else { + web_sys::console::log_2(&"Invalid value".into(), value); + Err(error::InvalidValue) + } + } + } + } + + pub(crate) fn finalize_exposed( + &self, + object: &JsValue, + exposed: HashSet, + meta: &JsValue, + ) -> Result<(), error::ApplyPatch> { + for obj in exposed { + let mut pointer = object.clone(); + if let Ok(obj_type) = self.doc.object_type(&obj) { + // only valid obj's should make it to this point ... + let path: Vec<_> = self + .doc + .path_to_object(&obj)? + .iter() + .map(|p| prop_to_js(&p.1)) + .collect(); + let value = self.export_object(&obj, obj_type.into(), None, meta)?; + for (i, prop) in path.iter().enumerate() { + if i + 1 < path.len() { + pointer = js_get(&pointer, prop)?.0; + } else { + js_set(&pointer, prop, &value)?; + } + } + } + } + Ok(()) + } } pub(crate) fn alloc(value: &Value<'_>) -> (Datatype, JsValue) { @@ -823,7 +1099,7 @@ pub(crate) fn alloc(value: &Value<'_>) -> (Datatype, JsValue) { ObjType::Map => (Datatype::Map, Object::new().into()), ObjType::Table => (Datatype::Table, Object::new().into()), ObjType::List => (Datatype::List, Array::new().into()), - ObjType::Text => (Datatype::Text, Array::new().into()), + ObjType::Text => (Datatype::Text, "".into()), }, am::Value::Scalar(s) => match s.as_ref() { am::ScalarValue::Bytes(v) => (Datatype::Bytes, Uint8Array::from(v.as_slice()).into()), @@ -877,7 +1153,7 @@ fn prop_to_js(prop: &Prop) -> JsValue { } pub(crate) mod error { - use automerge::LoadChangeError; + use automerge::{AutomergeError, LoadChangeError}; use wasm_bindgen::JsValue; #[derive(Debug, thiserror::Error)] @@ -1028,6 +1304,8 @@ pub(crate) mod error { GetSplice(JsValue), #[error("error calling splice: {0:?}")] CallSplice(JsValue), + #[error(transparent)] + Automerge(#[from] AutomergeError), } impl From for JsValue { @@ -1054,12 +1332,18 @@ pub(crate) mod error { InsertInMap, #[error("cannot splice into a map")] SpliceInMap, + #[error("cannot splice text into a seq")] + SpliceTextInSeq, + #[error("cannot splice text into a map")] + SpliceTextInMap, #[error("cannot put a seq index in a map")] PutIdxInMap, #[error(transparent)] GetProp(#[from] GetProp), #[error(transparent)] SetProp(#[from] SetProp), + #[error(transparent)] + Automerge(#[from] AutomergeError), } impl From for JsValue { @@ -1087,4 +1371,40 @@ pub(crate) mod error { JsValue::from(e.to_string()) } } + + #[derive(Debug, thiserror::Error)] + pub enum ImportObj { + #[error("obj id was not a string")] + NotString, + #[error("invalid path {0}: {1}")] + InvalidPath(String, ImportPath), + #[error("unable to import object id: {0}")] + BadImport(AutomergeError), + } + + impl From for JsValue { + fn from(e: ImportObj) -> Self { + JsValue::from(format!("invalid object ID: {}", e)) + } + } + + #[derive(Debug, thiserror::Error)] + pub enum ImportPath { + #[error(transparent)] + Automerge(#[from] AutomergeError), + #[error("path component {0} ({1}) should be an integer to index a sequence")] + IndexNotInteger(usize, String), + #[error("path component {0} ({1}) referenced a nonexistent object")] + NonExistentObject(usize, String), + #[error("path did not refer to an object")] + NotAnObject, + } + + #[derive(Debug, thiserror::Error)] + #[error("given property was not a string or integer")] + pub struct InvalidProp; + + #[derive(Debug, thiserror::Error)] + #[error("given property was not a string or integer")] + pub struct InvalidValue; } diff --git a/rust/automerge-wasm/src/lib.rs b/rust/automerge-wasm/src/lib.rs index 22cdb685..ce57f66f 100644 --- a/rust/automerge-wasm/src/lib.rs +++ b/rust/automerge-wasm/src/lib.rs @@ -29,10 +29,11 @@ use am::transaction::CommitOptions; use am::transaction::{Observed, Transactable, UnObserved}; use automerge as am; -use automerge::{Change, ObjId, ObjType, Prop, Value, ROOT}; +use automerge::{Change, ObjId, Prop, TextEncoding, Value, ROOT}; use js_sys::{Array, Function, Object, Uint8Array}; use serde::ser::Serialize; use std::collections::HashMap; +use std::collections::HashSet; use std::convert::TryInto; use wasm_bindgen::prelude::*; use wasm_bindgen::JsCast; @@ -44,7 +45,7 @@ mod value; use observer::Observer; -use interop::{alloc, get_heads, js_set, to_js_err, to_objtype, to_prop, AR, JS}; +use interop::{alloc, get_heads, import_obj, js_set, to_js_err, to_prop, AR, JS}; use sync::SyncState; use value::Datatype; @@ -72,7 +73,7 @@ pub struct Automerge { #[wasm_bindgen] impl Automerge { pub fn new(actor: Option) -> Result { - let mut doc = AutoCommit::default(); + let mut doc = AutoCommit::default().with_encoding(TextEncoding::Utf16); if let Some(a) = actor { let a = automerge::ActorId::from(hex::decode(a)?.to_vec()); doc.set_actor(a); @@ -188,7 +189,7 @@ impl Automerge { let start = start as usize; let delete_count = delete_count as usize; if let Some(t) = text.as_string() { - if obj_type == ObjType::Text { + if obj_type == am::ObjType::Text { self.doc.splice_text(&obj, start, delete_count, &t)?; return Ok(()); } @@ -202,9 +203,22 @@ impl Automerge { vals.push(value); } } - Ok(self - .doc - .splice(&obj, start, delete_count, vals.into_iter())?) + if !vals.is_empty() { + self.doc.splice(&obj, start, delete_count, vals)?; + } else { + // no vals given but we still need to call the text vs splice + // bc utf16 + match obj_type { + am::ObjType::List => { + self.doc.splice(&obj, start, delete_count, vals)?; + } + am::ObjType::Text => { + self.doc.splice_text(&obj, start, delete_count, "")?; + } + _ => {} + } + } + Ok(()) } pub fn push( @@ -229,11 +243,16 @@ impl Automerge { value: JsValue, ) -> Result, error::InsertObject> { let (obj, _) = self.import(obj)?; - let (value, subvals) = - to_objtype(&value, &None).ok_or(error::InsertObject::ValueNotObject)?; + let imported_obj = import_obj(&value, &None)?; let index = self.doc.length(&obj); - let opid = self.doc.insert_object(&obj, index, value)?; - self.subset::(&opid, subvals)?; + let opid = self + .doc + .insert_object(&obj, index, imported_obj.objtype())?; + if let Some(s) = imported_obj.text() { + self.doc.splice_text(&opid, 0, 0, s)?; + } else { + self.subset::(&opid, imported_obj.subvals())?; + } Ok(opid.to_string().into()) } @@ -262,10 +281,15 @@ impl Automerge { ) -> Result, error::InsertObject> { let (obj, _) = self.import(obj)?; let index = index as f64; - let (value, subvals) = - to_objtype(&value, &None).ok_or(error::InsertObject::ValueNotObject)?; - let opid = self.doc.insert_object(&obj, index as usize, value)?; - self.subset::(&opid, subvals)?; + let imported_obj = import_obj(&value, &None)?; + let opid = self + .doc + .insert_object(&obj, index as usize, imported_obj.objtype())?; + if let Some(s) = imported_obj.text() { + self.doc.splice_text(&opid, 0, 0, s)?; + } else { + self.subset::(&opid, imported_obj.subvals())?; + } Ok(opid.to_string().into()) } @@ -294,19 +318,24 @@ impl Automerge { ) -> Result { let (obj, _) = self.import(obj)?; let prop = self.import_prop(prop)?; - let (value, subvals) = - to_objtype(&value, &None).ok_or(error::InsertObject::ValueNotObject)?; - let opid = self.doc.put_object(&obj, prop, value)?; - self.subset::(&opid, subvals)?; + let imported_obj = import_obj(&value, &None)?; + let opid = self.doc.put_object(&obj, prop, imported_obj.objtype())?; + if let Some(s) = imported_obj.text() { + self.doc.splice_text(&opid, 0, 0, s)?; + } else { + self.subset::(&opid, imported_obj.subvals())?; + } Ok(opid.to_string().into()) } - fn subset(&mut self, obj: &am::ObjId, vals: Vec<(am::Prop, JsValue)>) -> Result<(), E> + fn subset(&mut self, obj: &am::ObjId, vals: &[(am::Prop, JsValue)]) -> Result<(), E> where - E: From + From + From, + E: From + + From + + From, { for (p, v) in vals { - let (value, subvals) = self.import_value(&v, None)?; + let (value, subvals) = self.import_value(v, None)?; //let opid = self.0.set(id, p, value)?; let opid = match (p, value) { (Prop::Map(s), Value::Object(objtype)) => { @@ -317,15 +346,15 @@ impl Automerge { None } (Prop::Seq(i), Value::Object(objtype)) => { - Some(self.doc.insert_object(obj, i, objtype)?) + Some(self.doc.insert_object(obj, *i, objtype)?) } (Prop::Seq(i), Value::Scalar(scalar)) => { - self.doc.insert(obj, i, scalar.into_owned())?; + self.doc.insert(obj, *i, scalar.into_owned())?; None } }; if let Some(opid) = opid { - self.subset::(&opid, subvals)?; + self.subset::(&opid, &subvals)?; } } Ok(()) @@ -498,17 +527,27 @@ impl Automerge { object = self.wrap_object(object, datatype, &id, &meta)?; } - for p in patches { - if let Some(c) = &callback { - let before = object.clone(); - object = self.apply_patch(object, &p, 0, &meta)?; - c.call3(&JsValue::undefined(), &p.try_into()?, &before, &object) + let mut exposed = HashSet::default(); + + let before = object.clone(); + + for p in &patches { + object = self.apply_patch(object, p, 0, &meta, &mut exposed)?; + } + + if let Some(c) = &callback { + if !patches.is_empty() { + let patches: Array = patches + .into_iter() + .map(JsValue::try_from) + .collect::>()?; + c.call3(&JsValue::undefined(), &patches.into(), &before, &object) .map_err(error::ApplyPatch::PatchCallback)?; - } else { - object = self.apply_patch(object, &p, 0, &meta)?; } } + self.finalize_exposed(&object, exposed, &meta)?; + Ok(object.into()) } @@ -673,145 +712,11 @@ impl Automerge { heads: Option, meta: JsValue, ) -> Result { - let (obj, obj_type) = self.import(obj).unwrap_or((ROOT, ObjType::Map)); + let (obj, obj_type) = self.import(obj).unwrap_or((ROOT, am::ObjType::Map)); let heads = get_heads(heads)?; let _patches = self.doc.observer().take_patches(); // throw away patches Ok(self.export_object(&obj, obj_type.into(), heads.as_ref(), &meta)?) } - - fn import(&self, id: JsValue) -> Result<(ObjId, ObjType), error::ImportObj> { - if let Some(s) = id.as_string() { - if let Some(components) = s.strip_prefix('/').map(|post| post.split('/')) { - self.import_path(components) - .map_err(|e| error::ImportObj::InvalidPath(s.to_string(), e)) - } else { - let id = self.doc.import(&s).map_err(error::ImportObj::BadImport)?; - // SAFETY: we just looked this up - let obj_type = self.doc.object_type(&id).unwrap(); - Ok((id, obj_type)) - } - } else { - Err(error::ImportObj::NotString) - } - } - - fn import_path<'a, I: Iterator>( - &self, - components: I, - ) -> Result<(ObjId, ObjType), error::ImportPath> { - let mut obj = ROOT; - let mut obj_type = ObjType::Map; - for (i, prop) in components.enumerate() { - if prop.is_empty() { - break; - } - let is_map = matches!(obj_type, ObjType::Map | ObjType::Table); - let val = if is_map { - self.doc.get(obj, prop)? - } else { - let idx = prop - .parse() - .map_err(|_| error::ImportPath::IndexNotInteger(i, prop.to_string()))?; - self.doc.get(obj, am::Prop::Seq(idx))? - }; - match val { - Some((am::Value::Object(ObjType::Map), id)) => { - obj_type = ObjType::Map; - obj = id; - } - Some((am::Value::Object(ObjType::Table), id)) => { - obj_type = ObjType::Table; - obj = id; - } - Some((am::Value::Object(ObjType::List), id)) => { - obj_type = ObjType::List; - obj = id; - } - Some((am::Value::Object(ObjType::Text), id)) => { - obj_type = ObjType::Text; - obj = id; - } - None => return Err(error::ImportPath::NonExistentObject(i, prop.to_string())), - _ => return Err(error::ImportPath::NotAnObject), - }; - } - Ok((obj, obj_type)) - } - - fn import_prop(&self, prop: JsValue) -> Result { - if let Some(s) = prop.as_string() { - Ok(s.into()) - } else if let Some(n) = prop.as_f64() { - Ok((n as usize).into()) - } else { - Err(error::InvalidProp) - } - } - - fn import_scalar(&self, value: &JsValue, datatype: &Option) -> Option { - match datatype.as_deref() { - Some("boolean") => value.as_bool().map(am::ScalarValue::Boolean), - Some("int") => value.as_f64().map(|v| am::ScalarValue::Int(v as i64)), - Some("uint") => value.as_f64().map(|v| am::ScalarValue::Uint(v as u64)), - Some("str") => value.as_string().map(|v| am::ScalarValue::Str(v.into())), - Some("f64") => value.as_f64().map(am::ScalarValue::F64), - Some("bytes") => Some(am::ScalarValue::Bytes( - value.clone().dyn_into::().unwrap().to_vec(), - )), - Some("counter") => value.as_f64().map(|v| am::ScalarValue::counter(v as i64)), - Some("timestamp") => { - if let Some(v) = value.as_f64() { - Some(am::ScalarValue::Timestamp(v as i64)) - } else if let Ok(d) = value.clone().dyn_into::() { - Some(am::ScalarValue::Timestamp(d.get_time() as i64)) - } else { - None - } - } - Some("null") => Some(am::ScalarValue::Null), - Some(_) => None, - None => { - if value.is_null() { - Some(am::ScalarValue::Null) - } else if let Some(b) = value.as_bool() { - Some(am::ScalarValue::Boolean(b)) - } else if let Some(s) = value.as_string() { - Some(am::ScalarValue::Str(s.into())) - } else if let Some(n) = value.as_f64() { - if (n.round() - n).abs() < f64::EPSILON { - Some(am::ScalarValue::Int(n as i64)) - } else { - Some(am::ScalarValue::F64(n)) - } - } else if let Ok(d) = value.clone().dyn_into::() { - Some(am::ScalarValue::Timestamp(d.get_time() as i64)) - } else if let Ok(o) = &value.clone().dyn_into::() { - Some(am::ScalarValue::Bytes(o.to_vec())) - } else { - None - } - } - } - } - - fn import_value( - &self, - value: &JsValue, - datatype: Option, - ) -> Result<(Value<'static>, Vec<(Prop, JsValue)>), error::InvalidValue> { - match self.import_scalar(value, &datatype) { - Some(val) => Ok((val.into(), vec![])), - None => { - if let Some((o, subvals)) = to_objtype(value, &datatype) { - Ok((o.into(), subvals)) - } else { - web_sys::console::log_2(&"Invalid value".into(), value); - Err(error::InvalidValue) - } - } - } - } - #[wasm_bindgen(js_name = emptyChange)] pub fn empty_change(&mut self, message: Option, time: Option) -> JsValue { let time = time.map(|f| f as i64); @@ -830,8 +735,9 @@ pub fn init(actor: Option) -> Result { #[wasm_bindgen(js_name = load)] pub fn load(data: Uint8Array, actor: Option) -> Result { let data = data.to_vec(); - let mut doc = - am::AutoCommitWithObs::::load(&data)?.with_observer(Observer::default()); + let mut doc = am::AutoCommitWithObs::::load(&data)? + .with_observer(Observer::default()) + .with_encoding(TextEncoding::Utf16); if let Some(s) = actor { let actor = automerge::ActorId::from(hex::decode(s).map_err(error::BadActorId::from)?.to_vec()); @@ -972,44 +878,16 @@ pub mod error { } } - #[derive(Debug, thiserror::Error)] - pub enum ImportPath { - #[error(transparent)] - Automerge(#[from] AutomergeError), - #[error("path component {0} ({1}) should be an integer to index a sequence")] - IndexNotInteger(usize, String), - #[error("path component {0} ({1}) referenced a nonexistent object")] - NonExistentObject(usize, String), - #[error("path did not refer to an object")] - NotAnObject, - } - - #[derive(Debug, thiserror::Error)] - pub enum ImportObj { - #[error("obj id was not a string")] - NotString, - #[error("invalid path {0}: {1}")] - InvalidPath(String, ImportPath), - #[error("unable to import object id: {0}")] - BadImport(AutomergeError), - } - - impl From for JsValue { - fn from(e: ImportObj) -> Self { - JsValue::from(format!("invalid object ID: {}", e)) - } - } - #[derive(Debug, thiserror::Error)] pub enum Get { #[error("invalid object ID: {0}")] - ImportObj(#[from] ImportObj), + ImportObj(#[from] interop::error::ImportObj), #[error(transparent)] Automerge(#[from] AutomergeError), #[error("bad heads: {0}")] BadHeads(#[from] interop::error::BadChangeHashes), #[error(transparent)] - InvalidProp(#[from] InvalidProp), + InvalidProp(#[from] interop::error::InvalidProp), } impl From for JsValue { @@ -1021,7 +899,7 @@ pub mod error { #[derive(Debug, thiserror::Error)] pub enum Splice { #[error("invalid object ID: {0}")] - ImportObj(#[from] ImportObj), + ImportObj(#[from] interop::error::ImportObj), #[error(transparent)] Automerge(#[from] AutomergeError), #[error("value at {0} in values to insert was not a primitive")] @@ -1037,15 +915,15 @@ pub mod error { #[derive(Debug, thiserror::Error)] pub enum Insert { #[error("invalid object id: {0}")] - ImportObj(#[from] ImportObj), + ImportObj(#[from] interop::error::ImportObj), #[error("the value to insert was not a primitive")] ValueNotPrimitive, #[error(transparent)] Automerge(#[from] AutomergeError), #[error(transparent)] - InvalidProp(#[from] InvalidProp), + InvalidProp(#[from] interop::error::InvalidProp), #[error(transparent)] - InvalidValue(#[from] InvalidValue), + InvalidValue(#[from] interop::error::InvalidValue), } impl From for JsValue { @@ -1057,15 +935,15 @@ pub mod error { #[derive(Debug, thiserror::Error)] pub enum InsertObject { #[error("invalid object id: {0}")] - ImportObj(#[from] ImportObj), + ImportObj(#[from] interop::error::ImportObj), #[error("the value to insert must be an object")] ValueNotObject, #[error(transparent)] Automerge(#[from] AutomergeError), #[error(transparent)] - InvalidProp(#[from] InvalidProp), + InvalidProp(#[from] interop::error::InvalidProp), #[error(transparent)] - InvalidValue(#[from] InvalidValue), + InvalidValue(#[from] interop::error::InvalidValue), } impl From for JsValue { @@ -1074,20 +952,12 @@ pub mod error { } } - #[derive(Debug, thiserror::Error)] - #[error("given property was not a string or integer")] - pub struct InvalidProp; - - #[derive(Debug, thiserror::Error)] - #[error("given property was not a string or integer")] - pub struct InvalidValue; - #[derive(Debug, thiserror::Error)] pub enum Increment { #[error("invalid object id: {0}")] - ImportObj(#[from] ImportObj), + ImportObj(#[from] interop::error::ImportObj), #[error(transparent)] - InvalidProp(#[from] InvalidProp), + InvalidProp(#[from] interop::error::InvalidProp), #[error("value was not numeric")] ValueNotNumeric, #[error(transparent)] diff --git a/rust/automerge-wasm/src/observer.rs b/rust/automerge-wasm/src/observer.rs index 67a757b6..f723ca6e 100644 --- a/rust/automerge-wasm/src/observer.rs +++ b/rust/automerge-wasm/src/observer.rs @@ -1,7 +1,7 @@ #![allow(dead_code)] use crate::interop::{self, alloc, js_set}; -use automerge::{ObjId, OpObserver, Parents, Prop, SequenceTree, Value}; +use automerge::{Automerge, ObjId, OpObserver, Prop, SequenceTree, Value}; use js_sys::{Array, Object}; use wasm_bindgen::prelude::*; @@ -23,6 +23,16 @@ impl Observer { self.enabled = enable; old_enabled } + + fn get_path(&mut self, doc: &Automerge, obj: &ObjId) -> Option> { + match doc.parents(obj) { + Ok(mut parents) => parents.visible_path(), + Err(e) => { + automerge::log!("error generating patch : {:?}", e); + None + } + } + } } #[derive(Debug, Clone)] @@ -32,14 +42,14 @@ pub(crate) enum Patch { path: Vec<(ObjId, Prop)>, key: String, value: (Value<'static>, ObjId), - conflict: bool, + expose: bool, }, PutSeq { obj: ObjId, path: Vec<(ObjId, Prop)>, index: usize, value: (Value<'static>, ObjId), - conflict: bool, + expose: bool, }, Insert { obj: ObjId, @@ -47,6 +57,12 @@ pub(crate) enum Patch { index: usize, values: SequenceTree<(Value<'static>, ObjId)>, }, + SpliceText { + obj: ObjId, + path: Vec<(ObjId, Prop)>, + index: usize, + value: SequenceTree, + }, Increment { obj: ObjId, path: Vec<(ObjId, Prop)>, @@ -69,7 +85,7 @@ pub(crate) enum Patch { impl OpObserver for Observer { fn insert( &mut self, - mut parents: Parents<'_>, + doc: &Automerge, obj: ObjId, index: usize, tagged_value: (Value<'_>, ObjId), @@ -84,103 +100,211 @@ impl OpObserver for Observer { }) = self.patches.last_mut() { let range = *tail_index..=*tail_index + values.len(); - //if tail_obj == &obj && *tail_index + values.len() == index { if tail_obj == &obj && range.contains(&index) { values.insert(index - *tail_index, value); return; } } - let path = parents.path(); - let mut values = SequenceTree::new(); - values.push(value); - let patch = Patch::Insert { - path, - obj, - index, - values, - }; - self.patches.push(patch); - } - } - - fn delete(&mut self, mut parents: Parents<'_>, obj: ObjId, prop: Prop) { - if self.enabled { - if let Some(Patch::Insert { - obj: tail_obj, - index: tail_index, - values, - .. - }) = self.patches.last_mut() - { - if let Prop::Seq(index) = prop { - let range = *tail_index..*tail_index + values.len(); - if tail_obj == &obj && range.contains(&index) { - values.remove(index - *tail_index); - return; - } - } - } - let path = parents.path(); - let patch = match prop { - Prop::Map(key) => Patch::DeleteMap { path, obj, key }, - Prop::Seq(index) => Patch::DeleteSeq { + if let Some(path) = self.get_path(doc, &obj) { + let mut values = SequenceTree::new(); + values.push(value); + let patch = Patch::Insert { path, obj, index, - length: 1, - }, - }; - self.patches.push(patch) + values, + }; + self.patches.push(patch); + } + } + } + + fn splice_text(&mut self, doc: &Automerge, obj: ObjId, index: usize, value: &str) { + if self.enabled { + if let Some(Patch::SpliceText { + obj: tail_obj, + index: tail_index, + value: prev_value, + .. + }) = self.patches.last_mut() + { + let range = *tail_index..=*tail_index + prev_value.len(); + if tail_obj == &obj && range.contains(&index) { + let i = index - *tail_index; + for (n, ch) in value.encode_utf16().enumerate() { + prev_value.insert(i + n, ch) + } + return; + } + } + if let Some(path) = self.get_path(doc, &obj) { + let mut v = SequenceTree::new(); + for ch in value.encode_utf16() { + v.push(ch) + } + let patch = Patch::SpliceText { + path, + obj, + index, + value: v, + }; + self.patches.push(patch); + } + } + } + + fn delete_seq(&mut self, doc: &Automerge, obj: ObjId, index: usize, length: usize) { + if self.enabled { + match self.patches.last_mut() { + Some(Patch::SpliceText { + obj: tail_obj, + index: tail_index, + value, + .. + }) => { + let range = *tail_index..*tail_index + value.len(); + if tail_obj == &obj + && range.contains(&index) + && range.contains(&(index + length - 1)) + { + for _ in 0..length { + value.remove(index - *tail_index); + } + return; + } + } + Some(Patch::Insert { + obj: tail_obj, + index: tail_index, + values, + .. + }) => { + let range = *tail_index..*tail_index + values.len(); + if tail_obj == &obj + && range.contains(&index) + && range.contains(&(index + length - 1)) + { + for _ in 0..length { + values.remove(index - *tail_index); + } + return; + } + } + Some(Patch::DeleteSeq { + obj: tail_obj, + index: tail_index, + length: tail_length, + .. + }) => { + if tail_obj == &obj && index == *tail_index { + *tail_length += length; + return; + } + } + _ => {} + } + if let Some(path) = self.get_path(doc, &obj) { + let patch = Patch::DeleteSeq { + path, + obj, + index, + length, + }; + self.patches.push(patch) + } + } + } + + fn delete_map(&mut self, doc: &Automerge, obj: ObjId, key: &str) { + if self.enabled { + if let Some(path) = self.get_path(doc, &obj) { + let patch = Patch::DeleteMap { + path, + obj, + key: key.to_owned(), + }; + self.patches.push(patch) + } } } fn put( &mut self, - mut parents: Parents<'_>, + doc: &Automerge, obj: ObjId, prop: Prop, tagged_value: (Value<'_>, ObjId), - conflict: bool, + _conflict: bool, ) { if self.enabled { - let path = parents.path(); - let value = (tagged_value.0.to_owned(), tagged_value.1); - let patch = match prop { - Prop::Map(key) => Patch::PutMap { - path, - obj, - key, - value, - conflict, - }, - Prop::Seq(index) => Patch::PutSeq { - path, - obj, - index, - value, - conflict, - }, - }; - self.patches.push(patch); + let expose = false; + if let Some(path) = self.get_path(doc, &obj) { + let value = (tagged_value.0.to_owned(), tagged_value.1); + let patch = match prop { + Prop::Map(key) => Patch::PutMap { + path, + obj, + key, + value, + expose, + }, + Prop::Seq(index) => Patch::PutSeq { + path, + obj, + index, + value, + expose, + }, + }; + self.patches.push(patch); + } } } - fn increment( + fn expose( &mut self, - mut parents: Parents<'_>, + doc: &Automerge, obj: ObjId, prop: Prop, - tagged_value: (i64, ObjId), + tagged_value: (Value<'_>, ObjId), + _conflict: bool, ) { if self.enabled { - let path = parents.path(); - let value = tagged_value.0; - self.patches.push(Patch::Increment { - path, - obj, - prop, - value, - }) + let expose = true; + if let Some(path) = self.get_path(doc, &obj) { + let value = (tagged_value.0.to_owned(), tagged_value.1); + let patch = match prop { + Prop::Map(key) => Patch::PutMap { + path, + obj, + key, + value, + expose, + }, + Prop::Seq(index) => Patch::PutSeq { + path, + obj, + index, + value, + expose, + }, + }; + self.patches.push(patch); + } + } + } + + fn increment(&mut self, doc: &Automerge, obj: ObjId, prop: Prop, tagged_value: (i64, ObjId)) { + if self.enabled { + if let Some(path) = self.get_path(doc, &obj) { + let value = tagged_value.0; + self.patches.push(Patch::Increment { + path, + obj, + prop, + value, + }) + } } } @@ -219,6 +343,7 @@ impl Patch { Self::PutSeq { path, .. } => path.as_slice(), Self::Increment { path, .. } => path.as_slice(), Self::Insert { path, .. } => path.as_slice(), + Self::SpliceText { path, .. } => path.as_slice(), Self::DeleteMap { path, .. } => path.as_slice(), Self::DeleteSeq { path, .. } => path.as_slice(), } @@ -230,6 +355,7 @@ impl Patch { Self::PutSeq { obj, .. } => obj, Self::Increment { obj, .. } => obj, Self::Insert { obj, .. } => obj, + Self::SpliceText { obj, .. } => obj, Self::DeleteMap { obj, .. } => obj, Self::DeleteSeq { obj, .. } => obj, } @@ -243,11 +369,7 @@ impl TryFrom for JsValue { let result = Object::new(); match p { Patch::PutMap { - path, - key, - value, - conflict, - .. + path, key, value, .. } => { js_set(&result, "action", "put")?; js_set( @@ -256,15 +378,10 @@ impl TryFrom for JsValue { export_path(path.as_slice(), &Prop::Map(key)), )?; js_set(&result, "value", alloc(&value.0).1)?; - js_set(&result, "conflict", &JsValue::from_bool(conflict))?; Ok(result.into()) } Patch::PutSeq { - path, - index, - value, - conflict, - .. + path, index, value, .. } => { js_set(&result, "action", "put")?; js_set( @@ -273,7 +390,6 @@ impl TryFrom for JsValue { export_path(path.as_slice(), &Prop::Seq(index)), )?; js_set(&result, "value", alloc(&value.0).1)?; - js_set(&result, "conflict", &JsValue::from_bool(conflict))?; Ok(result.into()) } Patch::Insert { @@ -282,7 +398,7 @@ impl TryFrom for JsValue { values, .. } => { - js_set(&result, "action", "splice")?; + js_set(&result, "action", "insert")?; js_set( &result, "path", @@ -295,6 +411,19 @@ impl TryFrom for JsValue { )?; Ok(result.into()) } + Patch::SpliceText { + path, index, value, .. + } => { + js_set(&result, "action", "splice")?; + js_set( + &result, + "path", + export_path(path.as_slice(), &Prop::Seq(index)), + )?; + let bytes: Vec = value.iter().cloned().collect(); + js_set(&result, "value", String::from_utf16_lossy(bytes.as_slice()))?; + Ok(result.into()) + } Patch::Increment { path, prop, value, .. } => { diff --git a/rust/automerge-wasm/src/value.rs b/rust/automerge-wasm/src/value.rs index b803ea43..643e2881 100644 --- a/rust/automerge-wasm/src/value.rs +++ b/rust/automerge-wasm/src/value.rs @@ -20,10 +20,6 @@ pub(crate) enum Datatype { } impl Datatype { - pub(crate) fn is_sequence(&self) -> bool { - matches!(self, Self::List | Self::Text) - } - pub(crate) fn is_scalar(&self) -> bool { !matches!(self, Self::Map | Self::Table | Self::List | Self::Text) } diff --git a/rust/automerge-wasm/test/apply.ts b/rust/automerge-wasm/test/apply.ts index c89a9ef8..c96ad75c 100644 --- a/rust/automerge-wasm/test/apply.ts +++ b/rust/automerge-wasm/test/apply.ts @@ -104,8 +104,8 @@ describe('Automerge', () => { doc1.putObject("/", "list", "abc"); const patches = doc1.popPatches() assert.deepEqual( patches, [ - { action: 'put', conflict: false, path: [ 'list' ], value: [] }, - { action: 'splice', path: [ 'list', 0 ], values: [ 'a', 'b', 'c' ] }]) + { action: 'put', path: [ 'list' ], value: "" }, + { action: 'splice', path: [ 'list', 0 ], value: 'abc' }]) }) it('it should allow registering type wrappers', () => { @@ -140,29 +140,26 @@ describe('Automerge', () => { let mat = doc1.materialize("/") - assert.deepEqual( mat, { notes: "hello world".split("") } ) + assert.deepEqual( mat, { notes: "hello world" } ) const doc2 = create() let apply : any = doc2.materialize("/") doc2.enablePatches(true) - doc2.registerDatatype("text", (n: Value[]) => new String(n.join(""))) apply = doc2.applyPatches(apply) doc2.merge(doc1); apply = doc2.applyPatches(apply) assert.deepEqual(_obj(apply), "_root") - assert.deepEqual(_obj(apply['notes']), "1@aaaa") - assert.deepEqual( apply, { notes: new String("hello world") } ) + assert.deepEqual( apply, { notes: "hello world" } ) doc2.splice("/notes", 6, 5, "everyone"); apply = doc2.applyPatches(apply) - assert.deepEqual( apply, { notes: new String("hello everyone") } ) + assert.deepEqual( apply, { notes: "hello everyone" } ) mat = doc2.materialize("/") assert.deepEqual(_obj(mat), "_root") // @ts-ignore - assert.deepEqual(_obj(mat.notes), "1@aaaa") - assert.deepEqual( mat, { notes: new String("hello everyone") } ) + assert.deepEqual( mat, { notes: "hello everyone" } ) }) it('should set the OBJECT_ID property on lists, maps, and text objects and not on scalars', () => { @@ -189,8 +186,8 @@ describe('Automerge', () => { assert.equal(_obj(applied.bytes), null) assert.equal(_obj(applied.counter), null) assert.equal(_obj(applied.date), null) + assert.equal(_obj(applied.text), null) - assert.notEqual(_obj(applied.text), null) assert.notEqual(_obj(applied.list), null) assert.notEqual(_obj(applied.map), null) }) diff --git a/rust/automerge-wasm/test/readme.ts b/rust/automerge-wasm/test/readme.ts index 5fbac867..18c55055 100644 --- a/rust/automerge-wasm/test/readme.ts +++ b/rust/automerge-wasm/test/readme.ts @@ -118,12 +118,6 @@ describe('Automerge', () => { doc.splice(notes, 6, 5, "everyone") assert.deepEqual(doc.text(notes), "Hello everyone") - - const obj = doc.insertObject(notes, 6, { hi: "there" }) - - assert.deepEqual(doc.text(notes), "Hello \ufffceveryone") - assert.deepEqual(doc.get(notes, 6), obj) - assert.deepEqual(doc.get(obj, "hi"), "there") }) it('Querying Data (1)', () => { const doc1 = create("aabbcc") diff --git a/rust/automerge-wasm/test/test.ts b/rust/automerge-wasm/test/test.ts index 3e6abf69..64690b90 100644 --- a/rust/automerge-wasm/test/test.ts +++ b/rust/automerge-wasm/test/test.ts @@ -2,7 +2,7 @@ import { describe, it } from 'mocha'; import assert from 'assert' // @ts-ignore import { BloomFilter } from './helpers/sync' -import { create, load, SyncState, Automerge, decodeChange, initSyncState, decodeSyncMessage, decodeSyncState, encodeSyncState, encodeSyncMessage } from '..' +import { create, load, SyncState, Automerge, encodeChange, decodeChange, initSyncState, decodeSyncMessage, decodeSyncState, encodeSyncState, encodeSyncMessage } from '..' import { Value, DecodedSyncMessage, Hash } from '..'; function sync(a: Automerge, b: Automerge, aSyncState = initSyncState(), bSyncState = initSyncState()) { @@ -222,8 +222,8 @@ describe('Automerge', () => { const text = doc.putObject(root, "text", ""); doc.splice(text, 0, 0, "hello ") - doc.splice(text, 6, 0, ["w", "o", "r", "l", "d"]) - doc.splice(text, 11, 0, ["!", "?"]) + doc.splice(text, 6, 0, "world") + doc.splice(text, 11, 0, "!?") assert.deepEqual(doc.getWithType(text, 0), ["str", "h"]) assert.deepEqual(doc.getWithType(text, 1), ["str", "e"]) assert.deepEqual(doc.getWithType(text, 9), ["str", "l"]) @@ -232,13 +232,12 @@ describe('Automerge', () => { assert.deepEqual(doc.getWithType(text, 12), ["str", "?"]) }) - it('should be able to insert objects into text', () => { + it('should NOT be able to insert objects into text', () => { const doc = create() const text = doc.putObject("/", "text", "Hello world"); - const obj = doc.insertObject(text, 6, { hello: "world" }); - assert.deepEqual(doc.text(text), "Hello \ufffcworld"); - assert.deepEqual(doc.getWithType(text, 6), ["map", obj]); - assert.deepEqual(doc.getWithType(obj, "hello"), ["str", "world"]); + assert.throws(() => { + doc.insertObject(text, 6, { hello: "world" }); + }) }) it('should be able save all or incrementally', () => { @@ -374,7 +373,6 @@ describe('Automerge', () => { it('recursive sets are possible', () => { const doc = create("aaaa") - doc.registerDatatype("text", (n: Value[]) => new String(n.join(""))) const l1 = doc.putObject("_root", "list", [{ foo: "bar" }, [1, 2, 3]]) const l2 = doc.insertObject(l1, 0, { zip: ["a", "b"] }) doc.putObject("_root", "info1", "hello world") // 'text' object @@ -382,13 +380,13 @@ describe('Automerge', () => { const l4 = doc.putObject("_root", "info3", "hello world") assert.deepEqual(doc.materialize(), { "list": [{ zip: ["a", "b"] }, { foo: "bar" }, [1, 2, 3]], - "info1": new String("hello world"), + "info1": "hello world", "info2": "hello world", - "info3": new String("hello world"), + "info3": "hello world", }) assert.deepEqual(doc.materialize(l2), { zip: ["a", "b"] }) assert.deepEqual(doc.materialize(l1), [{ zip: ["a", "b"] }, { foo: "bar" }, [1, 2, 3]]) - assert.deepEqual(doc.materialize(l4), new String("hello world")) + assert.deepEqual(doc.materialize(l4), "hello world") }) it('only returns an object id when objects are created', () => { @@ -477,7 +475,7 @@ describe('Automerge', () => { doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.popPatches(), [ - { action: 'put', path: ['hello'], value: 'world', conflict: false } + { action: 'put', path: ['hello'], value: 'world' } ]) }) @@ -487,9 +485,9 @@ describe('Automerge', () => { doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.popPatches(), [ - { action: 'put', path: [ 'birds' ], value: {}, conflict: false }, - { action: 'put', path: [ 'birds', 'friday' ], value: {}, conflict: false }, - { action: 'put', path: [ 'birds', 'friday', 'robins' ], value: 3, conflict: false}, + { action: 'put', path: [ 'birds' ], value: {} }, + { action: 'put', path: [ 'birds', 'friday' ], value: {} }, + { action: 'put', path: [ 'birds', 'friday', 'robins' ], value: 3}, ]) }) @@ -501,7 +499,7 @@ describe('Automerge', () => { doc1.delete('_root', 'favouriteBird') doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.popPatches(), [ - { action: 'put', path: [ 'favouriteBird' ], value: 'Robin', conflict: false }, + { action: 'put', path: [ 'favouriteBird' ], value: 'Robin' }, { action: 'del', path: [ 'favouriteBird' ] } ]) }) @@ -512,8 +510,8 @@ describe('Automerge', () => { doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.popPatches(), [ - { action: 'put', path: [ 'birds' ], value: [], conflict: false }, - { action: 'splice', path: [ 'birds', 0 ], values: ['Goldfinch', 'Chaffinch'] }, + { action: 'put', path: [ 'birds' ], value: [] }, + { action: 'insert', path: [ 'birds', 0 ], values: ['Goldfinch', 'Chaffinch'] }, ]) }) @@ -525,9 +523,9 @@ describe('Automerge', () => { doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.popPatches(), [ - { action: 'splice', path: [ 'birds', 0 ], values: [{}] }, - { action: 'put', path: [ 'birds', 0, 'species' ], value: 'Goldfinch', conflict: false }, - { action: 'put', path: [ 'birds', 0, 'count', ], value: 3, conflict: false } + { action: 'insert', path: [ 'birds', 0 ], values: [{}] }, + { action: 'put', path: [ 'birds', 0, 'species' ], value: 'Goldfinch' }, + { action: 'put', path: [ 'birds', 0, 'count', ], value: 3 } ]) }) @@ -543,7 +541,7 @@ describe('Automerge', () => { assert.deepEqual(doc1.getWithType('1@aaaa', 1), ['str', 'Greenfinch']) assert.deepEqual(doc2.popPatches(), [ { action: 'del', path: ['birds', 0] }, - { action: 'splice', path: ['birds', 1], values: ['Greenfinch'] } + { action: 'insert', path: ['birds', 1], values: ['Greenfinch'] } ]) }) @@ -566,10 +564,10 @@ describe('Automerge', () => { assert.deepEqual([0, 1, 2, 3].map(i => (doc3.getWithType('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd']) assert.deepEqual([0, 1, 2, 3].map(i => (doc4.getWithType('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd']) assert.deepEqual(doc3.popPatches(), [ - { action: 'splice', path: ['values', 0], values:['a','b','c','d'] }, + { action: 'insert', path: ['values', 0], values:['a','b','c','d'] }, ]) assert.deepEqual(doc4.popPatches(), [ - { action: 'splice', path: ['values',0], values:['a','b','c','d'] }, + { action: 'insert', path: ['values',0], values:['a','b','c','d'] }, ]) }) @@ -592,10 +590,10 @@ describe('Automerge', () => { assert.deepEqual([0, 1, 2, 3, 4, 5].map(i => (doc3.getWithType('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd', 'e', 'f']) assert.deepEqual([0, 1, 2, 3, 4, 5].map(i => (doc4.getWithType('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd', 'e', 'f']) assert.deepEqual(doc3.popPatches(), [ - { action: 'splice', path: ['values', 2], values: ['c','d','e','f'] }, + { action: 'insert', path: ['values', 2], values: ['c','d','e','f'] }, ]) assert.deepEqual(doc4.popPatches(), [ - { action: 'splice', path: ['values', 2], values: ['c','d','e','f'] }, + { action: 'insert', path: ['values', 2], values: ['c','d','e','f'] }, ]) }) @@ -613,12 +611,11 @@ describe('Automerge', () => { assert.deepEqual(doc4.getWithType('_root', 'bird'), ['str', 'Goldfinch']) assert.deepEqual(doc4.getAll('_root', 'bird'), [['str', 'Greenfinch', '1@aaaa'], ['str', 'Goldfinch', '1@bbbb']]) assert.deepEqual(doc3.popPatches(), [ - { action: 'put', path: ['bird'], value: 'Greenfinch', conflict: false }, - { action: 'put', path: ['bird'], value: 'Goldfinch', conflict: true }, + { action: 'put', path: ['bird'], value: 'Greenfinch' }, + { action: 'put', path: ['bird'], value: 'Goldfinch' }, ]) assert.deepEqual(doc4.popPatches(), [ - { action: 'put', path: ['bird'], value: 'Goldfinch', conflict: false }, - { action: 'put', path: ['bird'], value: 'Goldfinch', conflict: true }, + { action: 'put', path: ['bird'], value: 'Goldfinch' }, ]) }) @@ -647,17 +644,13 @@ describe('Automerge', () => { ['str', 'Greenfinch', '1@aaaa'], ['str', 'Chaffinch', '1@bbbb'], ['str', 'Goldfinch', '1@cccc'] ]) assert.deepEqual(doc1.popPatches(), [ - { action: 'put', path: ['bird'], value: 'Chaffinch', conflict: true }, - { action: 'put', path: ['bird'], value: 'Goldfinch', conflict: true } + { action: 'put', path: ['bird'], value: 'Chaffinch' }, + { action: 'put', path: ['bird'], value: 'Goldfinch' } ]) assert.deepEqual(doc2.popPatches(), [ - { action: 'put', path: ['bird'], value: 'Goldfinch', conflict: true }, - { action: 'put', path: ['bird'], value: 'Goldfinch', conflict: true } - ]) - assert.deepEqual(doc3.popPatches(), [ - { action: 'put', path: ['bird'], value: 'Goldfinch', conflict: true }, - { action: 'put', path: ['bird'], value: 'Goldfinch', conflict: true } + { action: 'put', path: ['bird'], value: 'Goldfinch' }, ]) + assert.deepEqual(doc3.popPatches(), [ ]) }) it('should allow a conflict to be resolved', () => { @@ -672,9 +665,9 @@ describe('Automerge', () => { doc3.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc3.getAll('_root', 'bird'), [['str', 'Goldfinch', '2@aaaa']]) assert.deepEqual(doc3.popPatches(), [ - { action: 'put', path: ['bird'], value: 'Greenfinch', conflict: false }, - { action: 'put', path: ['bird'], value: 'Chaffinch', conflict: true }, - { action: 'put', path: ['bird'], value: 'Goldfinch', conflict: false } + { action: 'put', path: ['bird'], value: 'Greenfinch' }, + { action: 'put', path: ['bird'], value: 'Chaffinch' }, + { action: 'put', path: ['bird'], value: 'Goldfinch' } ]) }) @@ -694,10 +687,10 @@ describe('Automerge', () => { assert.deepEqual(doc2.getWithType('_root', 'bird'), ['str', 'Goldfinch']) assert.deepEqual(doc2.getAll('_root', 'bird'), [['str', 'Goldfinch', '2@aaaa']]) assert.deepEqual(doc1.popPatches(), [ - { action: 'put', path: ['bird'], value: 'Goldfinch', conflict: false } + { action: 'put', path: ['bird'], value: 'Goldfinch' } ]) assert.deepEqual(doc2.popPatches(), [ - { action: 'put', path: ['bird'], value: 'Goldfinch', conflict: false } + { action: 'put', path: ['bird'], value: 'Goldfinch' } ]) }) @@ -720,12 +713,11 @@ describe('Automerge', () => { assert.deepEqual(doc4.getWithType('1@aaaa', 0), ['str', 'Redwing']) assert.deepEqual(doc4.getAll('1@aaaa', 0), [['str', 'Song Thrush', '4@aaaa'], ['str', 'Redwing', '4@bbbb']]) assert.deepEqual(doc3.popPatches(), [ - { action: 'put', path: ['birds',0], value: 'Song Thrush', conflict: false }, - { action: 'put', path: ['birds',0], value: 'Redwing', conflict: true } + { action: 'put', path: ['birds',0], value: 'Song Thrush' }, + { action: 'put', path: ['birds',0], value: 'Redwing' } ]) assert.deepEqual(doc4.popPatches(), [ - { action: 'put', path: ['birds',0], value: 'Redwing', conflict: false }, - { action: 'put', path: ['birds',0], value: 'Redwing', conflict: true } + { action: 'put', path: ['birds',0], value: 'Redwing' }, ]) }) @@ -751,15 +743,14 @@ describe('Automerge', () => { assert.deepEqual(doc4.getAll('1@aaaa', 2), [['str', 'Song Thrush', '6@aaaa'], ['str', 'Redwing', '6@bbbb']]) assert.deepEqual(doc3.popPatches(), [ { action: 'del', path: ['birds',0], }, - { action: 'put', path: ['birds',1], value: 'Song Thrush', conflict: false }, - { action: 'splice', path: ['birds',0], values: ['Ring-necked parakeet'] }, - { action: 'put', path: ['birds',2], value: 'Redwing', conflict: true } + { action: 'put', path: ['birds',1], value: 'Song Thrush' }, + { action: 'insert', path: ['birds',0], values: ['Ring-necked parakeet'] }, + { action: 'put', path: ['birds',2], value: 'Redwing' } ]) assert.deepEqual(doc4.popPatches(), [ - { action: 'put', path: ['birds',0], value: 'Ring-necked parakeet', conflict: false }, - { action: 'put', path: ['birds',2], value: 'Redwing', conflict: false }, - { action: 'put', path: ['birds',0], value: 'Ring-necked parakeet', conflict: false }, - { action: 'put', path: ['birds',2], value: 'Redwing', conflict: true } + { action: 'put', path: ['birds',0], value: 'Ring-necked parakeet' }, + { action: 'put', path: ['birds',2], value: 'Redwing' }, + { action: 'put', path: ['birds',0], value: 'Ring-necked parakeet' }, ]) }) @@ -775,14 +766,14 @@ describe('Automerge', () => { doc3.loadIncremental(change2) assert.deepEqual(doc3.getAll('_root', 'bird'), [['str', 'Robin', '1@aaaa'], ['str', 'Wren', '1@bbbb']]) assert.deepEqual(doc3.popPatches(), [ - { action: 'put', path: ['bird'], value: 'Robin', conflict: false }, - { action: 'put', path: ['bird'], value: 'Wren', conflict: true } + { action: 'put', path: ['bird'], value: 'Robin' }, + { action: 'put', path: ['bird'], value: 'Wren' } ]) doc3.loadIncremental(change3) assert.deepEqual(doc3.getWithType('_root', 'bird'), ['str', 'Robin']) assert.deepEqual(doc3.getAll('_root', 'bird'), [['str', 'Robin', '1@aaaa']]) assert.deepEqual(doc3.popPatches(), [ - { action: 'put', path: ['bird'], value: 'Robin', conflict: false } + { action: 'put', path: ['bird'], value: 'Robin' } ]) }) @@ -797,14 +788,11 @@ describe('Automerge', () => { doc2.loadIncremental(change1) assert.deepEqual(doc1.getAll('_root', 'birds'), [['list', '1@aaaa'], ['map', '1@bbbb']]) assert.deepEqual(doc1.popPatches(), [ - { action: 'put', path: ['birds'], value: {}, conflict: true }, - { action: 'put', path: ['birds', 'Sparrowhawk'], value: 1, conflict: false } + { action: 'put', path: ['birds'], value: {} }, + { action: 'put', path: ['birds', 'Sparrowhawk'], value: 1 } ]) assert.deepEqual(doc2.getAll('_root', 'birds'), [['list', '1@aaaa'], ['map', '1@bbbb']]) - assert.deepEqual(doc2.popPatches(), [ - { action: 'put', path: ['birds'], value: {}, conflict: true }, - { action: 'splice', path: ['birds',0], values: ['Parakeet'] } - ]) + assert.deepEqual(doc2.popPatches(), []) }) it('should support date objects', () => { @@ -814,7 +802,7 @@ describe('Automerge', () => { doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.getWithType('_root', 'createdAt'), ['timestamp', now]) assert.deepEqual(doc2.popPatches(), [ - { action: 'put', path: ['createdAt'], value: now, conflict: false } + { action: 'put', path: ['createdAt'], value: now } ]) }) @@ -828,11 +816,11 @@ describe('Automerge', () => { doc1.putObject('_root', 'list', []) assert.deepEqual(doc1.popPatches(), [ - { action: 'put', path: ['key1'], value: 1, conflict: false }, - { action: 'put', path: ['key1'], value: 2, conflict: false }, - { action: 'put', path: ['key2'], value: 3, conflict: false }, - { action: 'put', path: ['map'], value: {}, conflict: false }, - { action: 'put', path: ['list'], value: [], conflict: false }, + { action: 'put', path: ['key1'], value: 1 }, + { action: 'put', path: ['key1'], value: 2 }, + { action: 'put', path: ['key2'], value: 3 }, + { action: 'put', path: ['map'], value: {} }, + { action: 'put', path: ['list'], value: [] }, ]) }) @@ -847,8 +835,8 @@ describe('Automerge', () => { doc1.insertObject(list, 2, []) assert.deepEqual(doc1.popPatches(), [ - { action: 'put', path: ['list'], value: [], conflict: false }, - { action: 'splice', path: ['list', 0], values: [2,1,[],{},3] }, + { action: 'put', path: ['list'], value: [] }, + { action: 'insert', path: ['list', 0], values: [2,1,[],{},3] }, ]) }) @@ -861,8 +849,8 @@ describe('Automerge', () => { doc1.pushObject(list, []) assert.deepEqual(doc1.popPatches(), [ - { action: 'put', path: ['list'], value: [], conflict: false }, - { action: 'splice', path: ['list',0], values: [1,{},[]] }, + { action: 'put', path: ['list'], value: [] }, + { action: 'insert', path: ['list',0], values: [1,{},[]] }, ]) }) @@ -874,8 +862,8 @@ describe('Automerge', () => { doc1.splice(list, 1, 2) assert.deepEqual(doc1.popPatches(), [ - { action: 'put', path: ['list'], value: [], conflict: false }, - { action: 'splice', path: ['list',0], values: [1,4] }, + { action: 'put', path: ['list'], value: [] }, + { action: 'insert', path: ['list',0], values: [1,4] }, ]) }) @@ -886,7 +874,7 @@ describe('Automerge', () => { doc1.increment('_root', 'counter', 4) assert.deepEqual(doc1.popPatches(), [ - { action: 'put', path: ['counter'], value: 2, conflict: false }, + { action: 'put', path: ['counter'], value: 2 }, { action: 'inc', path: ['counter'], value: 4 }, ]) }) @@ -900,8 +888,8 @@ describe('Automerge', () => { doc1.delete('_root', 'key1') doc1.delete('_root', 'key2') assert.deepEqual(doc1.popPatches(), [ - { action: 'put', path: ['key1'], value: 1, conflict: false }, - { action: 'put', path: ['key2'], value: 2, conflict: false }, + { action: 'put', path: ['key1'], value: 1 }, + { action: 'put', path: ['key2'], value: 2 }, { action: 'del', path: ['key1'], }, { action: 'del', path: ['key2'], }, ]) @@ -916,7 +904,7 @@ describe('Automerge', () => { doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.getWithType('_root', 'starlings'), ['counter', 3]) assert.deepEqual(doc2.popPatches(), [ - { action: 'put', path: ['starlings'], value: 2, conflict: false }, + { action: 'put', path: ['starlings'], value: 2 }, { action: 'inc', path: ['starlings'], value: 1 } ]) }) @@ -934,8 +922,8 @@ describe('Automerge', () => { doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.popPatches(), [ - { action: 'put', path: ['list'], value: [], conflict: false }, - { action: 'splice', path: ['list',0], values: [1] }, + { action: 'put', path: ['list'], value: [] }, + { action: 'insert', path: ['list',0], values: [1] }, { action: 'inc', path: ['list',0], value: 2 }, { action: 'inc', path: ['list',0], value: -5 }, ]) @@ -1940,5 +1928,144 @@ describe('Automerge', () => { assert.deepStrictEqual(s1.sharedHeads, [c2, c8].sort()) }) }) + + it('can handle overlappying splices', () => { + const doc = create() + doc.enablePatches(true) + let mat : any = doc.materialize("/") + doc.putObject("/", "text", "abcdefghij") + doc.splice("/text", 2, 2, "00") + doc.splice("/text", 3, 5, "11") + mat = doc.applyPatches(mat) + assert.deepEqual(mat.text, "ab011ij") + }) + + it('can handle utf16 text', () => { + const doc = create() + doc.enablePatches(true) + let mat : any = doc.materialize("/") + + doc.putObject("/", "width1", "AAAAAA") + doc.putObject("/", "width2", "🐻🐻🐻🐻🐻🐻") + doc.putObject("/", "mixed", "A🐻A🐻A🐻") + + assert.deepEqual(doc.length("/width1"), 6); + assert.deepEqual(doc.length("/width2"), 12); + assert.deepEqual(doc.length("/mixed"), 9); + + let heads1 = doc.getHeads(); + + mat = doc.applyPatches(mat) + + const remote = load(doc.save()) + remote.enablePatches(true) + let r_mat : any = remote.materialize("/") + + assert.deepEqual(mat, { width1: "AAAAAA", width2: "🐻🐻🐻🐻🐻🐻", mixed: "A🐻A🐻A🐻" }) + assert.deepEqual(mat.width1.slice(2,4), "AA") + assert.deepEqual(mat.width2.slice(2,4), "🐻") + assert.deepEqual(mat.mixed.slice(1,4), "🐻A") + + assert.deepEqual(r_mat, { width1: "AAAAAA", width2: "🐻🐻🐻🐻🐻🐻", mixed: "A🐻A🐻A🐻" }) + assert.deepEqual(r_mat.width1.slice(2,4), "AA") + assert.deepEqual(r_mat.width2.slice(2,4), "🐻") + assert.deepEqual(r_mat.mixed.slice(1,4), "🐻A") + + doc.splice("/width1", 2, 2, "🐻") + doc.splice("/width2", 2, 2, "A🐻A") + doc.splice("/mixed", 3, 3, "X") + + mat = doc.applyPatches(mat) + remote.loadIncremental(doc.saveIncremental()); + r_mat = remote.applyPatches(r_mat) + + assert.deepEqual(mat.width1, "AA🐻AA") + assert.deepEqual(mat.width2, "🐻A🐻A🐻🐻🐻🐻") + assert.deepEqual(mat.mixed, "A🐻XA🐻") + + assert.deepEqual(r_mat.width1, "AA🐻AA") + assert.deepEqual(r_mat.width2, "🐻A🐻A🐻🐻🐻🐻") + assert.deepEqual(r_mat.mixed, "A🐻XA🐻") + assert.deepEqual(remote.length("/width1"), 6); + assert.deepEqual(remote.length("/width2"), 14); + assert.deepEqual(remote.length("/mixed"), 7); + + // when indexing in the middle of a multibyte char it indexes at the char before + doc.splice("/width2", 4, 1, "X") + mat = doc.applyPatches(mat) + remote.loadIncremental(doc.saveIncremental()); + r_mat = remote.applyPatches(r_mat) + + assert.deepEqual(mat.width2, "🐻AXA🐻🐻🐻🐻") + + assert.deepEqual(doc.length("/width1", heads1), 6); + assert.deepEqual(doc.length("/width2", heads1), 12); + assert.deepEqual(doc.length("/mixed", heads1), 9); + + assert.deepEqual(doc.get("/mixed", 0), 'A'); + assert.deepEqual(doc.get("/mixed", 1), '🐻'); + assert.deepEqual(doc.get("/mixed", 2), '🐻'); + assert.deepEqual(doc.get("/mixed", 3), 'X'); + assert.deepEqual(doc.get("/mixed", 1, heads1), '🐻'); + assert.deepEqual(doc.get("/mixed", 2, heads1), '🐻'); + assert.deepEqual(doc.get("/mixed", 3, heads1), 'A'); + assert.deepEqual(doc.get("/mixed", 4, heads1), '🐻'); + }) + + it('can handle non-characters embedded in text', () => { + let change : any = { + ops: [ + { action: 'makeText', obj: '_root', key: 'bad_text', pred: [] }, + { action: 'set', obj: '1@aaaa', elemId: '_head', insert: true, value: 'A', pred: [] }, + { action: 'set', obj: '1@aaaa', elemId: '2@aaaa', insert: true, value: 'BBBBB', pred: [] }, + { action: 'makeMap', obj: '1@aaaa', elemId: '3@aaaa', insert: true, pred: [] }, + { action: 'set', obj: '1@aaaa', elemId: '4@aaaa', insert: true, value: 'C', pred: [] } + ], + actor: 'aaaa', + seq: 1, + startOp: 1, + time: 0, + message: null, + deps: [] + } + const doc = load(encodeChange(change)); + doc.enablePatches(true) + let mat : any = doc.materialize("/") + + // multi - char strings appear as a span of strings + // non strings appear as an object replacement unicode char + assert.deepEqual(mat.bad_text, 'ABBBBBC') + assert.deepEqual(doc.text("/bad_text"), 'ABBBBBC') + assert.deepEqual(doc.materialize("/bad_text"), 'ABBBBBC') + + // deleting in the middle of a multi-byte character will delete the whole thing + let doc1 = doc.fork() + doc1.splice("/bad_text", 3, 3, "X"); + assert.deepEqual(doc1.text("/bad_text"), 'AXC') + + // deleting in the middle of a multi-byte character will delete the whole thing + // and characters past its end + let doc2 = doc.fork() + doc2.splice("/bad_text", 3, 4, "X"); + assert.deepEqual(doc2.text("/bad_text"), 'AXC') + + let doc3 = doc.fork() + doc3.splice("/bad_text", 3, 5, "X"); + assert.deepEqual(doc3.text("/bad_text"), 'AX') + + // inserting in the middle of a mutli-bytes span inserts after + let doc4 = doc.fork() + doc4.splice("/bad_text", 3, 0, "X"); + assert.deepEqual(doc4.text("/bad_text"), 'ABBBBBXC') + + // deleting into the middle of a multi-byte span deletes the whole thing + let doc5 = doc.fork() + doc5.splice("/bad_text", 0, 2, "X"); + assert.deepEqual(doc5.text("/bad_text"), 'XC') + + // you can access elements in the text by text index + assert.deepEqual(doc5.getAll("/bad_text", 1), [['map', '4@aaaa' ]]) + assert.deepEqual(doc5.getAll("/bad_text", 2, doc.getHeads()), [['str', 'BBBBB', '3@aaaa' ]]) + }) }) }) diff --git a/rust/automerge/examples/watch.rs b/rust/automerge/examples/watch.rs index 66a9f4f9..1618d6c4 100644 --- a/rust/automerge/examples/watch.rs +++ b/rust/automerge/examples/watch.rs @@ -66,6 +66,17 @@ fn get_changes(doc: &Automerge, patches: Vec) { doc.path_to_object(&obj) ) } + Patch::Splice { + obj, index, value, .. + } => { + println!( + "splice '{:?}' at {:?} in obj {:?}, object path {:?}", + value, + index, + obj, + doc.path_to_object(&obj) + ) + } Patch::Increment { obj, prop, value, .. } => { @@ -83,6 +94,12 @@ fn get_changes(doc: &Automerge, patches: Vec) { obj, doc.path_to_object(&obj) ), + Patch::Expose { obj, prop, .. } => println!( + "expose {:?} in obj {:?}, object path {:?}", + prop, + obj, + doc.path_to_object(&obj) + ), } } } diff --git a/rust/automerge/src/autocommit.rs b/rust/automerge/src/autocommit.rs index fbfc217d..2258fa2e 100644 --- a/rust/automerge/src/autocommit.rs +++ b/rust/automerge/src/autocommit.rs @@ -8,7 +8,7 @@ use crate::{ }; use crate::{ transaction::{Observation, Observed, TransactionInner, UnObserved}, - ActorId, Automerge, AutomergeError, Change, ChangeHash, Prop, Value, Values, + ActorId, Automerge, AutomergeError, Change, ChangeHash, Prop, TextEncoding, Value, Values, }; /// An automerge document that automatically manages transactions. @@ -125,6 +125,11 @@ impl AutoCommitWithObs { self.doc.get_actor() } + pub fn with_encoding(mut self, encoding: TextEncoding) -> Self { + self.doc.text_encoding = encoding; + self + } + fn ensure_transaction_open(&mut self) { if self.transaction.is_none() { let args = self.doc.transaction_args(); @@ -221,7 +226,7 @@ impl AutoCommitWithObs { self.doc.get_changes_added(&other.doc) } - pub fn import(&self, s: &str) -> Result { + pub fn import(&self, s: &str) -> Result<(ExId, ObjType), AutomergeError> { self.doc.import(s) } @@ -389,7 +394,7 @@ impl Transactable for AutoCommitWithObs { self.doc.length_at(obj, heads) } - fn object_type>(&self, obj: O) -> Option { + fn object_type>(&self, obj: O) -> Result { self.doc.object_type(obj) } @@ -491,6 +496,25 @@ impl Transactable for AutoCommitWithObs { ) } + fn splice_text>( + &mut self, + obj: O, + pos: usize, + del: usize, + text: &str, + ) -> Result<(), AutomergeError> { + self.ensure_transaction_open(); + let (current, tx) = self.transaction.as_mut().unwrap(); + tx.splice_text( + &mut self.doc, + current.observer(), + obj.as_ref(), + pos, + del, + text, + ) + } + fn text>(&self, obj: O) -> Result { self.doc.text(obj) } diff --git a/rust/automerge/src/automerge.rs b/rust/automerge/src/automerge.rs index dfca44cc..7a5340e6 100644 --- a/rust/automerge/src/automerge.rs +++ b/rust/automerge/src/automerge.rs @@ -17,8 +17,8 @@ use crate::transaction::{ self, CommitOptions, Failure, Observed, Success, Transaction, TransactionArgs, UnObserved, }; use crate::types::{ - ActorId, ChangeHash, Clock, ElemId, Export, Exportable, Key, ObjId, Op, OpId, OpType, - ScalarValue, Value, + ActorId, ChangeHash, Clock, ElemId, Export, Exportable, Key, ListEncoding, ObjId, Op, OpId, + OpType, ScalarValue, TextEncoding, Value, }; use crate::{ query, AutomergeError, Change, KeysAt, ListRange, ListRangeAt, MapRange, MapRangeAt, ObjType, @@ -58,6 +58,7 @@ pub struct Automerge { pub(crate) actor: Actor, /// The maximum operation counter this document has seen. pub(crate) max_op: u64, + pub(crate) text_encoding: TextEncoding, } impl Automerge { @@ -74,9 +75,15 @@ impl Automerge { saved: Default::default(), actor: Actor::Unused(ActorId::random()), max_op: 0, + text_encoding: Default::default(), } } + pub fn with_encoding(mut self, encoding: TextEncoding) -> Self { + self.text_encoding = encoding; + self + } + /// Set the actor id for this document. pub fn with_actor(mut self, actor: ActorId) -> Self { self.actor = Actor::Unused(actor); @@ -314,7 +321,7 @@ impl Automerge { /// This function may in future be changed to allow getting the parents from the id of a scalar /// value. pub fn parents>(&self, obj: O) -> Result, AutomergeError> { - let obj_id = self.exid_to_obj(obj.as_ref())?; + let (obj_id, _) = self.exid_to_obj(obj.as_ref())?; Ok(self.ops.parents(obj_id)) } @@ -322,9 +329,7 @@ impl Automerge { &self, obj: O, ) -> Result, AutomergeError> { - let mut path = self.parents(obj.as_ref().clone())?.collect::>(); - path.reverse(); - Ok(path) + Ok(self.parents(obj.as_ref().clone())?.path()) } /// Get the keys of the object `obj`. @@ -332,7 +337,7 @@ impl Automerge { /// For a map this returns the keys of the map. /// For a list this returns the element ids (opids) encoded as strings. pub fn keys>(&self, obj: O) -> Keys<'_, '_> { - if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { + if let Ok((obj, _)) = self.exid_to_obj(obj.as_ref()) { let iter_keys = self.ops.keys(obj); Keys::new(self, iter_keys) } else { @@ -342,7 +347,7 @@ impl Automerge { /// Historical version of [`keys`](Self::keys). pub fn keys_at>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt<'_, '_> { - if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { + if let Ok((obj, _)) = self.exid_to_obj(obj.as_ref()) { if let Ok(clock) = self.clock_at(heads) { return KeysAt::new(self, self.ops.keys_at(obj, clock)); } @@ -356,7 +361,7 @@ impl Automerge { obj: O, range: R, ) -> MapRange<'_, R> { - if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { + if let Ok((obj, _)) = self.exid_to_obj(obj.as_ref()) { MapRange::new(self, self.ops.map_range(obj, range)) } else { MapRange::new(self, None) @@ -370,7 +375,7 @@ impl Automerge { range: R, heads: &[ChangeHash], ) -> MapRangeAt<'_, R> { - if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { + if let Ok((obj, _)) = self.exid_to_obj(obj.as_ref()) { if let Ok(clock) = self.clock_at(heads) { let iter_range = self.ops.map_range_at(obj, range, clock); return MapRangeAt::new(self, iter_range); @@ -385,7 +390,7 @@ impl Automerge { obj: O, range: R, ) -> ListRange<'_, R> { - if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { + if let Ok((obj, _)) = self.exid_to_obj(obj.as_ref()) { ListRange::new(self, self.ops.list_range(obj, range)) } else { ListRange::new(self, None) @@ -399,7 +404,7 @@ impl Automerge { range: R, heads: &[ChangeHash], ) -> ListRangeAt<'_, R> { - if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { + if let Ok((obj, _)) = self.exid_to_obj(obj.as_ref()) { if let Ok(clock) = self.clock_at(heads) { let iter_range = self.ops.list_range_at(obj, range, clock); return ListRangeAt::new(self, iter_range); @@ -409,11 +414,11 @@ impl Automerge { } pub fn values>(&self, obj: O) -> Values<'_> { - if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { - match self.ops.object_type(&obj) { - Some(t) if t.is_sequence() => Values::new(self, self.ops.list_range(obj, ..)), - Some(_) => Values::new(self, self.ops.map_range(obj, ..)), - None => Values::empty(self), + if let Ok((obj, obj_type)) = self.exid_to_obj(obj.as_ref()) { + if obj_type.is_sequence() { + Values::new(self, self.ops.list_range(obj, ..)) + } else { + Values::new(self, self.ops.map_range(obj, ..)) } } else { Values::empty(self) @@ -421,18 +426,17 @@ impl Automerge { } pub fn values_at>(&self, obj: O, heads: &[ChangeHash]) -> Values<'_> { - if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { + if let Ok((obj, obj_type)) = self.exid_to_obj(obj.as_ref()) { if let Ok(clock) = self.clock_at(heads) { - return match self.ops.object_type(&obj) { - Some(ObjType::Map) | Some(ObjType::Table) => { + return match obj_type { + ObjType::Map | ObjType::Table => { let iter_range = self.ops.map_range_at(obj, .., clock); Values::new(self, iter_range) } - Some(ObjType::List) | Some(ObjType::Text) => { + ObjType::List | ObjType::Text => { let iter_range = self.ops.list_range_at(obj, .., clock); Values::new(self, iter_range) } - None => Values::empty(self), }; } } @@ -441,13 +445,12 @@ impl Automerge { /// Get the length of the given object. pub fn length>(&self, obj: O) -> usize { - if let Ok(inner_obj) = self.exid_to_obj(obj.as_ref()) { - match self.ops.object_type(&inner_obj) { - Some(ObjType::Map) | Some(ObjType::Table) => self.keys(obj).count(), - Some(ObjType::List) | Some(ObjType::Text) => { - self.ops.search(&inner_obj, query::Len::new()).len - } - None => 0, + if let Ok((inner_obj, obj_type)) = self.exid_to_obj(obj.as_ref()) { + if obj_type == ObjType::Map || obj_type == ObjType::Table { + self.keys(obj).count() + } else { + let encoding = ListEncoding::new(obj_type, self.text_encoding); + self.ops.search(&inner_obj, query::Len::new(encoding)).len } } else { 0 @@ -456,14 +459,15 @@ impl Automerge { /// Historical version of [`length`](Self::length). pub fn length_at>(&self, obj: O, heads: &[ChangeHash]) -> usize { - if let Ok(inner_obj) = self.exid_to_obj(obj.as_ref()) { + if let Ok((inner_obj, obj_type)) = self.exid_to_obj(obj.as_ref()) { if let Ok(clock) = self.clock_at(heads) { - return match self.ops.object_type(&inner_obj) { - Some(ObjType::Map) | Some(ObjType::Table) => self.keys_at(obj, heads).count(), - Some(ObjType::List) | Some(ObjType::Text) => { - self.ops.search(&inner_obj, query::LenAt::new(clock)).len - } - None => 0, + return if obj_type == ObjType::Map || obj_type == ObjType::Table { + self.keys_at(obj, heads).count() + } else { + let encoding = ListEncoding::new(obj_type, self.text_encoding); + self.ops + .search(&inner_obj, query::LenAt::new(clock, encoding)) + .len }; } } @@ -471,14 +475,14 @@ impl Automerge { } /// Get the type of this object, if it is an object. - pub fn object_type>(&self, obj: O) -> Option { - let obj = self.exid_to_obj(obj.as_ref()).ok()?; - self.ops.object_type(&obj) + pub fn object_type>(&self, obj: O) -> Result { + let (_, obj_type) = self.exid_to_obj(obj.as_ref())?; + Ok(obj_type) } - pub(crate) fn exid_to_obj(&self, id: &ExId) -> Result { + pub(crate) fn exid_to_obj(&self, id: &ExId) -> Result<(ObjId, ObjType), AutomergeError> { match id { - ExId::Root => Ok(ObjId::root()), + ExId::Root => Ok((ObjId::root(), ObjType::Map)), ExId::Id(ctr, actor, idx) => { // do a direct get here b/c this could be foriegn and not be within the array // bounds @@ -494,8 +498,8 @@ impl Automerge { .ok_or(AutomergeError::Fail)?; ObjId(OpId(*ctr, idx)) }; - if self.ops.object_type(&obj).is_some() { - Ok(obj) + if let Some(obj_type) = self.ops.object_type(&obj) { + Ok((obj, obj_type)) } else { Err(AutomergeError::NotAnObject) } @@ -509,15 +513,11 @@ impl Automerge { /// Get the string represented by the given text object. pub fn text>(&self, obj: O) -> Result { - let obj = self.exid_to_obj(obj.as_ref())?; + let obj = self.exid_to_obj(obj.as_ref())?.0; let query = self.ops.search(&obj, query::ListVals::new()); let mut buffer = String::new(); for q in &query.ops { - if let OpType::Put(ScalarValue::Str(s)) = &q.action { - buffer.push_str(s); - } else { - buffer.push('\u{fffc}'); - } + buffer.push_str(q.to_str()); } Ok(buffer) } @@ -528,7 +528,7 @@ impl Automerge { obj: O, heads: &[ChangeHash], ) -> Result { - let obj = self.exid_to_obj(obj.as_ref())?; + let obj = self.exid_to_obj(obj.as_ref())?.0; let clock = self.clock_at(heads)?; let query = self.ops.search(&obj, query::ListValsAt::new(clock)); let mut buffer = String::new(); @@ -576,7 +576,7 @@ impl Automerge { obj: O, prop: P, ) -> Result, ExId)>, AutomergeError> { - let obj = self.exid_to_obj(obj.as_ref())?; + let obj = self.exid_to_obj(obj.as_ref())?.0; let mut result = match prop.into() { Prop::Map(p) => { let prop = self.ops.m.props.lookup(&p); @@ -591,13 +591,18 @@ impl Automerge { vec![] } } - Prop::Seq(n) => self - .ops - .search(&obj, query::Nth::new(n)) - .ops - .into_iter() - .map(|o| (o.value(), self.id_to_exid(o.id))) - .collect(), + Prop::Seq(n) => { + let obj_type = self.ops.object_type(&obj); + let encoding = obj_type + .map(|o| ListEncoding::new(o, self.text_encoding)) + .unwrap_or_default(); + self.ops + .search(&obj, query::Nth::new(n, encoding)) + .ops + .into_iter() + .map(|o| (o.value(), self.id_to_exid(o.id))) + .collect() + } }; result.sort_by(|a, b| b.1.cmp(&a.1)); Ok(result) @@ -611,7 +616,7 @@ impl Automerge { heads: &[ChangeHash], ) -> Result, ExId)>, AutomergeError> { let prop = prop.into(); - let obj = self.exid_to_obj(obj.as_ref())?; + let obj = self.exid_to_obj(obj.as_ref())?.0; let clock = self.clock_at(heads)?; let result = match prop { Prop::Map(p) => { @@ -627,13 +632,18 @@ impl Automerge { vec![] } } - Prop::Seq(n) => self - .ops - .search(&obj, query::NthAt::new(n, clock)) - .ops - .into_iter() - .map(|o| (o.clone_value(), self.id_to_exid(o.id))) - .collect(), + Prop::Seq(n) => { + let obj_type = self.ops.object_type(&obj); + let encoding = obj_type + .map(|o| ListEncoding::new(o, self.text_encoding)) + .unwrap_or_default(); + self.ops + .search(&obj, query::NthAt::new(n, clock, encoding)) + .ops + .into_iter() + .map(|o| (o.clone_value(), self.id_to_exid(o.id))) + .collect() + } }; Ok(result) } @@ -696,6 +706,7 @@ impl Automerge { saved: Default::default(), actor: Actor::Unused(ActorId::random()), max_op, + text_encoding: Default::default(), } } storage::Chunk::Change(stored_change) => { @@ -806,11 +817,11 @@ impl Automerge { self.update_history(change, ops.len()); if let Some(observer) = observer { for (obj, op) in ops { - self.ops.insert_op_with_observer(&obj, op, *observer); + self.insert_op_with_observer(&obj, op, *observer); } } else { for (obj, op) in ops { - self.ops.insert_op(&obj, op); + self.insert_op(&obj, op); } } } @@ -1160,9 +1171,9 @@ impl Automerge { self.deps.insert(change.hash()); } - pub fn import(&self, s: &str) -> Result { + pub fn import(&self, s: &str) -> Result<(ExId, ObjType), AutomergeError> { if s == "_root" { - Ok(ExId::Root) + Ok((ExId::Root, ObjType::Map)) } else { let n = s .find('@') @@ -1177,11 +1188,11 @@ impl Automerge { .actors .lookup(&actor) .ok_or_else(|| AutomergeError::InvalidObjId(s.to_owned()))?; - Ok(ExId::Id( - counter, - self.ops.m.actors.cache[actor].clone(), - actor, - )) + let obj = ExId::Id(counter, self.ops.m.actors.cache[actor].clone(), actor); + let obj_type = self + .object_type(&obj) + .map_err(|_| AutomergeError::InvalidObjId(s.to_owned()))?; + Ok((obj, obj_type)) } } @@ -1238,10 +1249,114 @@ impl Automerge { /// visualised #[cfg(feature = "optree-visualisation")] pub fn visualise_optree(&self, objects: Option>) -> String { - let objects = - objects.map(|os| os.iter().filter_map(|o| self.exid_to_obj(o).ok()).collect()); + let objects = objects.map(|os| { + os.iter() + .filter_map(|o| self.exid_to_obj(o).ok()) + .map(|o| o.0) + .collect() + }); self.ops.visualise(objects) } + + pub(crate) fn insert_op(&mut self, obj: &ObjId, op: Op) -> Op { + let q = self.ops.search(obj, query::SeekOp::new(&op)); + + let succ = q.succ; + let pos = q.pos; + + self.ops.add_succ(obj, &succ, &op); + + if !op.is_delete() { + self.ops.insert(pos, obj, op.clone()); + } + op + } + + pub(crate) fn insert_op_with_observer( + &mut self, + obj: &ObjId, + op: Op, + observer: &mut Obs, + ) -> Op { + let obj_type = self.ops.object_type(obj); + let encoding = obj_type + .map(|o| ListEncoding::new(o, self.text_encoding)) + .unwrap_or_default(); + let q = self + .ops + .search(obj, query::SeekOpWithPatch::new(&op, encoding)); + + let query::SeekOpWithPatch { + pos, + succ, + seen, + last_width, + values, + had_value_before, + .. + } = q; + + let ex_obj = self.ops.id_to_exid(obj.0); + + let key = match op.key { + Key::Map(index) => self.ops.m.props[index].clone().into(), + Key::Seq(_) => seen.into(), + }; + + if op.insert { + if obj_type == Some(ObjType::Text) { + observer.splice_text(self, ex_obj, seen, op.to_str()); + } else { + let value = (op.value(), self.ops.id_to_exid(op.id)); + observer.insert(self, ex_obj, seen, value); + } + } else if op.is_delete() { + if let Some(winner) = &values.last() { + let value = (winner.value(), self.ops.id_to_exid(winner.id)); + let conflict = values.len() > 1; + observer.expose(self, ex_obj, key, value, conflict); + } else if had_value_before { + match key { + Prop::Map(k) => observer.delete_map(self, ex_obj, &k), + Prop::Seq(index) => observer.delete_seq(self, ex_obj, index, last_width), + } + } + } else if let Some(value) = op.get_increment_value() { + // only observe this increment if the counter is visible, i.e. the counter's + // create op is in the values + //if values.iter().any(|value| op.pred.contains(&value.id)) { + if values + .last() + .map(|value| op.pred.contains(&value.id)) + .unwrap_or_default() + { + // we have observed the value + observer.increment(self, ex_obj, key, (value, self.ops.id_to_exid(op.id))); + } + } else { + let just_conflict = values + .last() + .map(|value| self.ops.m.lamport_cmp(op.id, value.id) != Ordering::Greater) + .unwrap_or(false); + let value = (op.value(), self.ops.id_to_exid(op.id)); + if op.is_list_op() && !had_value_before { + observer.insert(self, ex_obj, seen, value); + } else if just_conflict { + observer.flag_conflict(self, ex_obj, key); + } else { + let conflict = !values.is_empty(); + observer.put(self, ex_obj, key, value, conflict); + } + } + + self.ops.add_succ(obj, &succ, &op); + + if !op.is_delete() { + self.ops.insert(pos, obj, op.clone()); + } + + op + } } impl Default for Automerge { diff --git a/rust/automerge/src/automerge/tests.rs b/rust/automerge/src/automerge/tests.rs index d35b2997..050b1fa9 100644 --- a/rust/automerge/src/automerge/tests.rs +++ b/rust/automerge/src/automerge/tests.rs @@ -1318,21 +1318,33 @@ fn compute_list_indexes_correctly_when_list_element_is_split_across_tree_nodes() fn get_parent_objects() { let mut doc = AutoCommit::new(); let map = doc.put_object(ROOT, "a", ObjType::Map).unwrap(); - let list = doc.insert_object(&map, 0, ObjType::List).unwrap(); + let list = doc.put_object(&map, "b", ObjType::List).unwrap(); doc.insert(&list, 0, 2).unwrap(); let text = doc.put_object(&list, 0, ObjType::Text).unwrap(); assert_eq!( doc.parents(&map).unwrap().next(), - Some((ROOT, Prop::Map("a".into()))) + Some(Parent { + obj: ROOT, + prop: Prop::Map("a".into()), + visible: true + }) ); assert_eq!( doc.parents(&list).unwrap().next(), - Some((map, Prop::Seq(0))) + Some(Parent { + obj: map, + prop: Prop::Map("b".into()), + visible: true + }) ); assert_eq!( doc.parents(&text).unwrap().next(), - Some((list, Prop::Seq(0))) + Some(Parent { + obj: list, + prop: Prop::Seq(0), + visible: true + }) ); } @@ -1340,7 +1352,7 @@ fn get_parent_objects() { fn get_path_to_object() { let mut doc = AutoCommit::new(); let map = doc.put_object(ROOT, "a", ObjType::Map).unwrap(); - let list = doc.insert_object(&map, 0, ObjType::List).unwrap(); + let list = doc.put_object(&map, "b", ObjType::List).unwrap(); doc.insert(&list, 0, 2).unwrap(); let text = doc.put_object(&list, 0, ObjType::Text).unwrap(); @@ -1350,13 +1362,16 @@ fn get_path_to_object() { ); assert_eq!( doc.path_to_object(&list).unwrap(), - vec![(ROOT, Prop::Map("a".into())), (map.clone(), Prop::Seq(0)),] + vec![ + (ROOT, Prop::Map("a".into())), + (map.clone(), Prop::Map("b".into())), + ] ); assert_eq!( doc.path_to_object(&text).unwrap(), vec![ (ROOT, Prop::Map("a".into())), - (map, Prop::Seq(0)), + (map, Prop::Map("b".into())), (list, Prop::Seq(0)), ] ); @@ -1366,14 +1381,35 @@ fn get_path_to_object() { fn parents_iterator() { let mut doc = AutoCommit::new(); let map = doc.put_object(ROOT, "a", ObjType::Map).unwrap(); - let list = doc.insert_object(&map, 0, ObjType::List).unwrap(); + let list = doc.put_object(&map, "b", ObjType::List).unwrap(); doc.insert(&list, 0, 2).unwrap(); let text = doc.put_object(&list, 0, ObjType::Text).unwrap(); let mut parents = doc.parents(text).unwrap(); - assert_eq!(parents.next(), Some((list, Prop::Seq(0)))); - assert_eq!(parents.next(), Some((map, Prop::Seq(0)))); - assert_eq!(parents.next(), Some((ROOT, Prop::Map("a".into())))); + assert_eq!( + parents.next(), + Some(Parent { + obj: list, + prop: Prop::Seq(0), + visible: true + }) + ); + assert_eq!( + parents.next(), + Some(Parent { + obj: map, + prop: Prop::Map("b".into()), + visible: true + }) + ); + assert_eq!( + parents.next(), + Some(Parent { + obj: ROOT, + prop: Prop::Map("a".into()), + visible: true + }) + ); assert_eq!(parents.next(), None); } @@ -1383,27 +1419,28 @@ fn can_insert_a_grapheme_into_text() { let mut tx = doc.transaction(); let text = tx.put_object(ROOT, "text", ObjType::Text).unwrap(); let polar_bear = "🐻‍❄️"; - tx.insert(&text, 0, polar_bear).unwrap(); + tx.splice_text(&text, 0, 0, polar_bear).unwrap(); tx.commit(); let s = doc.text(&text).unwrap(); assert_eq!(s, polar_bear); let len = doc.length(&text); - assert_eq!(len, 1); // just one grapheme + assert_eq!(len, 4); // 4 utf8 chars } #[test] -fn can_insert_long_string_into_text() { +fn long_strings_spliced_into_text_get_segmented_by_utf8_chars() { let mut doc = Automerge::new(); let mut tx = doc.transaction(); let text = tx.put_object(ROOT, "text", ObjType::Text).unwrap(); let polar_bear = "🐻‍❄️"; let polar_bear_army = polar_bear.repeat(100); - tx.insert(&text, 0, &polar_bear_army).unwrap(); + tx.splice_text(&text, 0, 0, &polar_bear_army).unwrap(); tx.commit(); let s = doc.text(&text).unwrap(); assert_eq!(s, polar_bear_army); let len = doc.length(&text); - assert_eq!(len, 1); // many graphemes + assert_eq!(len, polar_bear.chars().count() * 100); + assert_eq!(len, 400); } #[test] diff --git a/rust/automerge/src/error.rs b/rust/automerge/src/error.rs index 4e25cfd1..0f024d86 100644 --- a/rust/automerge/src/error.rs +++ b/rust/automerge/src/error.rs @@ -1,7 +1,7 @@ use crate::storage::load::Error as LoadError; use crate::types::{ActorId, ScalarValue}; use crate::value::DataType; -use crate::ChangeHash; +use crate::{ChangeHash, ObjType}; use thiserror::Error; #[derive(Error, Debug)] @@ -28,6 +28,8 @@ pub enum AutomergeError { InvalidObjId(String), #[error("invalid obj id format `{0}`")] InvalidObjIdFormat(String), + #[error("invalid op for object of type `{0}`")] + InvalidOp(ObjType), #[error("seq {0} is out of bounds")] InvalidSeq(u64), #[error("invalid type of value, expected `{expected}` but received `{unexpected}`")] @@ -47,6 +49,12 @@ pub enum AutomergeError { NotAnObject, } +impl PartialEq for AutomergeError { + fn eq(&self, other: &Self) -> bool { + std::mem::discriminant(self) == std::mem::discriminant(other) + } +} + #[cfg(feature = "wasm")] impl From for wasm_bindgen::JsValue { fn from(err: AutomergeError) -> Self { diff --git a/rust/automerge/src/lib.rs b/rust/automerge/src/lib.rs index ed29d226..b8604c95 100644 --- a/rust/automerge/src/lib.rs +++ b/rust/automerge/src/lib.rs @@ -105,9 +105,9 @@ pub use map_range_at::MapRangeAt; pub use op_observer::OpObserver; pub use op_observer::Patch; pub use op_observer::VecOpObserver; -pub use parents::Parents; +pub use parents::{Parent, Parents}; pub use sequence_tree::SequenceTree; -pub use types::{ActorId, ChangeHash, ObjType, OpType, ParseChangeHashError, Prop}; +pub use types::{ActorId, ChangeHash, ObjType, OpType, ParseChangeHashError, Prop, TextEncoding}; pub use value::{ScalarValue, Value}; pub use values::Values; diff --git a/rust/automerge/src/op_observer.rs b/rust/automerge/src/op_observer.rs index 82e89277..2150b1de 100644 --- a/rust/automerge/src/op_observer.rs +++ b/rust/automerge/src/op_observer.rs @@ -1,5 +1,5 @@ use crate::exid::ExId; -use crate::Parents; +use crate::Automerge; use crate::Prop; use crate::Value; @@ -7,22 +7,24 @@ use crate::Value; pub trait OpObserver { /// A new value has been inserted into the given object. /// - /// - `parents`: A parents iterator that can be used to collect path information + /// - `doc`: a handle to the doc after the op has been inserted, can be used to query information /// - `objid`: the object that has been inserted into. /// - `index`: the index the new value has been inserted at. /// - `tagged_value`: the value that has been inserted and the id of the operation that did the /// insert. fn insert( &mut self, - parents: Parents<'_>, + doc: &Automerge, objid: ExId, index: usize, tagged_value: (Value<'_>, ExId), ); + fn splice_text(&mut self, _doc: &Automerge, _objid: ExId, _index: usize, _value: &str); + /// A new value has been put into the given object. /// - /// - `parents`: A parents iterator that can be used to collect path information + /// - `doc`: a handle to the doc after the op has been inserted, can be used to query information /// - `objid`: the object that has been put into. /// - `prop`: the prop that the value as been put at. /// - `tagged_value`: the value that has been put into the object and the id of the operation @@ -30,34 +32,74 @@ pub trait OpObserver { /// - `conflict`: whether this put conflicts with other operations. fn put( &mut self, - parents: Parents<'_>, + doc: &Automerge, objid: ExId, prop: Prop, tagged_value: (Value<'_>, ExId), conflict: bool, ); + /// When a delete op exposes a previously conflicted value + /// Similar to a put op - except for maps, lists and text, edits + /// may already exist and need to be queried + /// + /// - `doc`: a handle to the doc after the op has been inserted, can be used to query information + /// - `objid`: the object that has been put into. + /// - `prop`: the prop that the value as been put at. + /// - `tagged_value`: the value that has been put into the object and the id of the operation + /// that did the put. + /// - `conflict`: whether this put conflicts with other operations. + fn expose( + &mut self, + doc: &Automerge, + objid: ExId, + prop: Prop, + tagged_value: (Value<'_>, ExId), + conflict: bool, + ); + + /// Flag a new conflict on a value without changing it + /// + /// - `doc`: a handle to the doc after the op has been inserted, can be used to query information + /// - `objid`: the object that has been put into. + /// - `prop`: the prop that the value as been put at. + fn flag_conflict(&mut self, _doc: &Automerge, _objid: ExId, _prop: Prop) {} + /// A counter has been incremented. /// - /// - `parents`: A parents iterator that can be used to collect path information + /// - `doc`: a handle to the doc after the op has been inserted, can be used to query information /// - `objid`: the object that contains the counter. /// - `prop`: they prop that the chounter is at. /// - `tagged_value`: the amount the counter has been incremented by, and the the id of the /// increment operation. - fn increment( - &mut self, - parents: Parents<'_>, - objid: ExId, - prop: Prop, - tagged_value: (i64, ExId), - ); + fn increment(&mut self, doc: &Automerge, objid: ExId, prop: Prop, tagged_value: (i64, ExId)); - /// A value has beeen deleted. + /// A map value has beeen deleted. /// - /// - `parents`: A parents iterator that can be used to collect path information + /// - `doc`: a handle to the doc after the op has been inserted, can be used to query information /// - `objid`: the object that has been deleted in. - /// - `prop`: the prop of the value that has been deleted. - fn delete(&mut self, parents: Parents<'_>, objid: ExId, prop: Prop); + /// - `prop`: the prop to be deleted + fn delete(&mut self, doc: &Automerge, objid: ExId, prop: Prop) { + match prop { + Prop::Map(k) => self.delete_map(doc, objid, &k), + Prop::Seq(i) => self.delete_seq(doc, objid, i, 1), + } + } + + /// A map value has beeen deleted. + /// + /// - `doc`: a handle to the doc after the op has been inserted, can be used to query information + /// - `objid`: the object that has been deleted in. + /// - `key`: the map key to be deleted + fn delete_map(&mut self, doc: &Automerge, objid: ExId, key: &str); + + /// A one or more list values have beeen deleted. + /// + /// - `doc`: a handle to the doc after the op has been inserted, can be used to query information + /// - `objid`: the object that has been deleted in. + /// - `index`: the index of the deletion + /// - `num`: the number of sequential elements deleted + fn delete_seq(&mut self, doc: &Automerge, objid: ExId, index: usize, num: usize); /// Branch of a new op_observer later to be merged /// @@ -77,16 +119,28 @@ pub trait OpObserver { impl OpObserver for () { fn insert( &mut self, - _parents: Parents<'_>, + _doc: &Automerge, _objid: ExId, _index: usize, _tagged_value: (Value<'_>, ExId), ) { } + fn splice_text(&mut self, _doc: &Automerge, _objid: ExId, _index: usize, _value: &str) {} + fn put( &mut self, - _parents: Parents<'_>, + _doc: &Automerge, + _objid: ExId, + _prop: Prop, + _tagged_value: (Value<'_>, ExId), + _conflict: bool, + ) { + } + + fn expose( + &mut self, + _doc: &Automerge, _objid: ExId, _prop: Prop, _tagged_value: (Value<'_>, ExId), @@ -96,14 +150,16 @@ impl OpObserver for () { fn increment( &mut self, - _parents: Parents<'_>, + _doc: &Automerge, _objid: ExId, _prop: Prop, _tagged_value: (i64, ExId), ) { } - fn delete(&mut self, _parents: Parents<'_>, _objid: ExId, _prop: Prop) {} + fn delete_map(&mut self, _doc: &Automerge, _objid: ExId, _key: &str) {} + + fn delete_seq(&mut self, _doc: &Automerge, _objid: ExId, _index: usize, _num: usize) {} fn merge(&mut self, _other: &Self) {} @@ -125,59 +181,97 @@ impl VecOpObserver { } impl OpObserver for VecOpObserver { - fn insert( - &mut self, - mut parents: Parents<'_>, - obj: ExId, - index: usize, - (value, id): (Value<'_>, ExId), - ) { - let path = parents.path(); - self.patches.push(Patch::Insert { - obj, - path, - index, - value: (value.into_owned(), id), - }); + fn insert(&mut self, doc: &Automerge, obj: ExId, index: usize, (value, id): (Value<'_>, ExId)) { + if let Ok(mut p) = doc.parents(&obj) { + self.patches.push(Patch::Insert { + obj, + path: p.path(), + index, + value: (value.into_owned(), id), + }); + } + } + + fn splice_text(&mut self, doc: &Automerge, obj: ExId, index: usize, value: &str) { + if let Ok(mut p) = doc.parents(&obj) { + self.patches.push(Patch::Splice { + obj, + path: p.path(), + index, + value: value.to_string(), + }) + } } fn put( &mut self, - mut parents: Parents<'_>, + doc: &Automerge, obj: ExId, prop: Prop, (value, id): (Value<'_>, ExId), conflict: bool, ) { - let path = parents.path(); - self.patches.push(Patch::Put { - obj, - path, - prop, - value: (value.into_owned(), id), - conflict, - }); + if let Ok(mut p) = doc.parents(&obj) { + self.patches.push(Patch::Put { + obj, + path: p.path(), + prop, + value: (value.into_owned(), id), + conflict, + }); + } } - fn increment( + fn expose( &mut self, - mut parents: Parents<'_>, + doc: &Automerge, obj: ExId, prop: Prop, - tagged_value: (i64, ExId), + (value, id): (Value<'_>, ExId), + conflict: bool, ) { - let path = parents.path(); - self.patches.push(Patch::Increment { - obj, - path, - prop, - value: tagged_value, - }); + if let Ok(mut p) = doc.parents(&obj) { + self.patches.push(Patch::Expose { + obj, + path: p.path(), + prop, + value: (value.into_owned(), id), + conflict, + }); + } } - fn delete(&mut self, mut parents: Parents<'_>, obj: ExId, prop: Prop) { - let path = parents.path(); - self.patches.push(Patch::Delete { obj, path, prop }) + fn increment(&mut self, doc: &Automerge, obj: ExId, prop: Prop, tagged_value: (i64, ExId)) { + if let Ok(mut p) = doc.parents(&obj) { + self.patches.push(Patch::Increment { + obj, + path: p.path(), + prop, + value: tagged_value, + }); + } + } + + fn delete_map(&mut self, doc: &Automerge, obj: ExId, key: &str) { + if let Ok(mut p) = doc.parents(&obj) { + self.patches.push(Patch::Delete { + obj, + path: p.path(), + prop: Prop::Map(key.to_owned()), + num: 1, + }) + } + } + + fn delete_seq(&mut self, doc: &Automerge, obj: ExId, index: usize, num: usize) { + if let Ok(mut p) = doc.parents(&obj) { + self.patches.push(Patch::Delete { + obj, + path: p.path(), + prop: Prop::Seq(index), + num, + }) + } } fn merge(&mut self, other: &Self) { @@ -205,7 +299,20 @@ pub enum Patch { /// Whether this put conflicts with another. conflict: bool, }, - /// Inserting a new element into a list/text + /// Exposing (via delete) an old but conflicted value with a prop in a map, or a list element + Expose { + /// path to the object + path: Vec<(ExId, Prop)>, + /// The object that was put into. + obj: ExId, + /// The prop that the new value was put at. + prop: Prop, + /// The value that was put, and the id of the operation that put it there. + value: (Value<'static>, ExId), + /// Whether this put conflicts with another. + conflict: bool, + }, + /// Inserting a new element into a list Insert { /// path to the object path: Vec<(ExId, Prop)>, @@ -216,6 +323,17 @@ pub enum Patch { /// The value that was inserted, and the id of the operation that inserted it there. value: (Value<'static>, ExId), }, + /// Splicing a text object + Splice { + /// path to the object + path: Vec<(ExId, Prop)>, + /// The object that was inserted into. + obj: ExId, + /// The index that the new value was inserted at. + index: usize, + /// The value that was spliced + value: String, + }, /// Incrementing a counter. Increment { /// path to the object @@ -236,5 +354,7 @@ pub enum Patch { obj: ExId, /// The prop that was deleted. prop: Prop, + /// number of items deleted (for seq) + num: usize, }, } diff --git a/rust/automerge/src/op_set.rs b/rust/automerge/src/op_set.rs index eaccd038..09bc256a 100644 --- a/rust/automerge/src/op_set.rs +++ b/rust/automerge/src/op_set.rs @@ -3,8 +3,8 @@ use crate::exid::ExId; use crate::indexed_cache::IndexedCache; use crate::op_tree::{self, OpTree}; use crate::parents::Parents; -use crate::query::{self, OpIdSearch, TreeQuery}; -use crate::types::{self, ActorId, Key, ObjId, Op, OpId, OpIds, OpType, Prop}; +use crate::query::{self, OpIdVisSearch, TreeQuery}; +use crate::types::{self, ActorId, Key, ListEncoding, ObjId, Op, OpId, OpIds, OpType, Prop}; use crate::{ObjType, OpObserver}; use fxhash::FxBuildHasher; use std::borrow::Borrow; @@ -73,18 +73,24 @@ impl OpSetInternal { Parents { obj, ops: self } } - pub(crate) fn parent_object(&self, obj: &ObjId) -> Option<(ObjId, Key)> { + pub(crate) fn parent_object(&self, obj: &ObjId) -> Option { let parent = self.trees.get(obj)?.parent?; - let key = self.search(&parent, OpIdSearch::new(obj.0)).key().unwrap(); - Some((parent, key)) + let query = self.search(&parent, OpIdVisSearch::new(obj.0)); + let key = query.key().unwrap(); + let visible = query.visible; + Some(Parent { + obj: parent, + key, + visible, + }) } - pub(crate) fn export_key(&self, obj: ObjId, key: Key) -> Prop { + pub(crate) fn export_key(&self, obj: ObjId, key: Key, encoding: ListEncoding) -> Prop { match key { Key::Map(m) => Prop::Map(self.m.props.get(m).into()), Key::Seq(opid) => { let i = self - .search(&obj, query::ElemIdPos::new(opid)) + .search(&obj, query::ElemIdPos::new(opid, encoding)) .index() .unwrap(); Prop::Seq(i) @@ -158,36 +164,37 @@ impl OpSetInternal { } } - pub(crate) fn search<'a, 'b: 'a, Q>(&'b self, obj: &ObjId, query: Q) -> Q + pub(crate) fn search<'a, 'b: 'a, Q>(&'b self, obj: &ObjId, mut query: Q) -> Q where Q: TreeQuery<'a>, { if let Some(tree) = self.trees.get(obj) { - tree.internal.search(query, &self.m) + if query.can_shortcut_search(tree) { + query + } else { + tree.internal.search(query, &self.m) + } } else { query } } - pub(crate) fn replace(&mut self, obj: &ObjId, index: usize, f: F) + pub(crate) fn change_vis(&mut self, obj: &ObjId, index: usize, f: F) where F: Fn(&mut Op), { if let Some(tree) = self.trees.get_mut(obj) { + tree.last_insert = None; tree.internal.update(index, f) } } /// Add `op` as a successor to each op at `op_indices` in `obj` - pub(crate) fn add_succ>( - &mut self, - obj: &ObjId, - op_indices: I, - op: &Op, - ) { + pub(crate) fn add_succ(&mut self, obj: &ObjId, op_indices: &[usize], op: &Op) { if let Some(tree) = self.trees.get_mut(obj) { + tree.last_insert = None; for i in op_indices { - tree.internal.update(i, |old_op| { + tree.internal.update(*i, |old_op| { old_op.add_succ(op, |left, right| self.m.lamport_cmp(*left, *right)) }); } @@ -198,6 +205,7 @@ impl OpSetInternal { // this happens on rollback - be sure to go back to the old state let tree = self.trees.get_mut(obj).unwrap(); self.length -= 1; + tree.last_insert = None; let op = tree.internal.remove(index); if let OpType::Make(_) = &op.action { self.trees.remove(&op.id.into()); @@ -209,6 +217,12 @@ impl OpSetInternal { self.length } + pub(crate) fn hint(&mut self, obj: &ObjId, index: usize, pos: usize) { + if let Some(tree) = self.trees.get_mut(obj) { + tree.last_insert = Some((index, pos)) + } + } + #[tracing::instrument(skip(self, index))] pub(crate) fn insert(&mut self, index: usize, obj: &ObjId, element: Op) { if let OpType::Make(typ) = element.action { @@ -217,13 +231,14 @@ impl OpSetInternal { OpTree { internal: Default::default(), objtype: typ, + last_insert: None, parent: Some(*obj), }, ); } if let Some(tree) = self.trees.get_mut(obj) { - //let tree = self.trees.get_mut(&element.obj).unwrap(); + tree.last_insert = None; tree.internal.insert(index, element); self.length += 1; } else { @@ -231,96 +246,6 @@ impl OpSetInternal { } } - pub(crate) fn insert_op(&mut self, obj: &ObjId, op: Op) -> Op { - let q = self.search(obj, query::SeekOp::new(&op)); - - let succ = q.succ; - let pos = q.pos; - - self.add_succ(obj, succ.iter().copied(), &op); - - if !op.is_delete() { - self.insert(pos, obj, op.clone()); - } - op - } - - pub(crate) fn insert_op_with_observer( - &mut self, - obj: &ObjId, - op: Op, - observer: &mut Obs, - ) -> Op { - let q = self.search(obj, query::SeekOpWithPatch::new(&op)); - - let query::SeekOpWithPatch { - pos, - succ, - seen, - values, - had_value_before, - .. - } = q; - - let ex_obj = self.id_to_exid(obj.0); - let parents = self.parents(*obj); - - let key = match op.key { - Key::Map(index) => self.m.props[index].clone().into(), - Key::Seq(_) => seen.into(), - }; - - if op.insert { - let value = (op.value(), self.id_to_exid(op.id)); - observer.insert(parents, ex_obj, seen, value); - } else if op.is_delete() { - if let Some(winner) = &values.last() { - let value = (winner.value(), self.id_to_exid(winner.id)); - let conflict = values.len() > 1; - observer.put(parents, ex_obj, key, value, conflict); - } else if had_value_before { - observer.delete(parents, ex_obj, key); - } - } else if let Some(value) = op.get_increment_value() { - // only observe this increment if the counter is visible, i.e. the counter's - // create op is in the values - //if values.iter().any(|value| op.pred.contains(&value.id)) { - if values - .last() - .map(|value| op.pred.contains(&value.id)) - .unwrap_or_default() - { - // we have observed the value - observer.increment(parents, ex_obj, key, (value, self.id_to_exid(op.id))); - } - } else { - let winner = if let Some(last_value) = values.last() { - if self.m.lamport_cmp(op.id, last_value.id) == Ordering::Greater { - &op - } else { - last_value - } - } else { - &op - }; - let value = (winner.value(), self.id_to_exid(winner.id)); - if op.is_list_op() && !had_value_before { - observer.insert(parents, ex_obj, seen, value); - } else { - let conflict = !values.is_empty(); - observer.put(parents, ex_obj, key, value, conflict); - } - } - - self.add_succ(obj, succ.iter().copied(), &op); - - if !op.is_delete() { - self.insert(pos, obj, op.clone()); - } - - op - } - pub(crate) fn object_type(&self, id: &ObjId) -> Option { self.trees.get(id).map(|tree| tree.objtype) } @@ -453,3 +378,9 @@ impl OpSetMetadata { self.props.cache(key.borrow().to_string()) } } + +pub(crate) struct Parent { + pub(crate) obj: ObjId, + pub(crate) key: Key, + pub(crate) visible: bool, +} diff --git a/rust/automerge/src/op_set/load.rs b/rust/automerge/src/op_set/load.rs index 0f810d15..6cc64e79 100644 --- a/rust/automerge/src/op_set/load.rs +++ b/rust/automerge/src/op_set/load.rs @@ -7,7 +7,7 @@ use crate::{ op_tree::OpTreeInternal, storage::load::{DocObserver, LoadedObject}, types::{ObjId, Op}, - OpObserver, + Automerge, OpObserver, }; /// An opset builder which creates an optree for each object as it finishes loading, inserting the @@ -37,6 +37,7 @@ impl DocObserver for OpSetBuilder { internal, objtype: loaded.obj_type, parent: loaded.parent, + last_insert: None, }; self.completed_objects.insert(loaded.id, tree); } @@ -78,10 +79,10 @@ impl<'a, O: OpObserver> DocObserver for ObservedOpSetBuilder<'a, O> { } fn finish(self, _metadata: super::OpSetMetadata) -> Self::Output { - let mut opset = OpSet::new(); + let mut opset = Automerge::new(); for (obj, op) in self.ops { opset.insert_op_with_observer(&obj, op, self.observer); } - opset + opset.ops } } diff --git a/rust/automerge/src/op_tree.rs b/rust/automerge/src/op_tree.rs index 6cd5bdf9..fae229e2 100644 --- a/rust/automerge/src/op_tree.rs +++ b/rust/automerge/src/op_tree.rs @@ -8,7 +8,7 @@ use std::{ pub(crate) use crate::op_set::OpSetMetadata; use crate::{ clock::Clock, - query::{self, Index, QueryResult, ReplaceArgs, TreeQuery}, + query::{self, ChangeVisibility, Index, QueryResult, TreeQuery}, }; use crate::{ types::{ObjId, Op, OpId}, @@ -27,6 +27,11 @@ pub(crate) struct OpTree { pub(crate) objtype: ObjType, /// The id of the parent object, root has no parent. pub(crate) parent: Option, + /// record the last list index and tree position + /// inserted into the op_set - this allows us to + /// short circuit the query if the follow op is another + /// insert or delete at the same spot + pub(crate) last_insert: Option<(usize, usize)>, } impl OpTree { @@ -35,6 +40,7 @@ impl OpTree { internal: Default::default(), objtype: ObjType::Map, parent: None, + last_insert: None, } } @@ -618,24 +624,19 @@ impl OpTreeNode { /// Update the operation at the given index using the provided function. /// /// This handles updating the indices after the update. - pub(crate) fn update(&mut self, index: usize, f: F) -> ReplaceArgs + pub(crate) fn update(&mut self, index: usize, f: F) -> ChangeVisibility<'_> where F: FnOnce(&mut Op), { if self.is_leaf() { let new_element = self.elements.get_mut(index).unwrap(); - let old_id = new_element.id; - let old_visible = new_element.visible(); + let old_vis = new_element.visible(); f(new_element); - let replace_args = ReplaceArgs { - old_id, - new_id: new_element.id, - old_visible, - new_visible: new_element.visible(), - new_key: new_element.elemid_or_key(), - }; - self.index.replace(&replace_args); - replace_args + self.index.change_vis(ChangeVisibility { + old_vis, + new_vis: new_element.visible(), + op: new_element, + }) } else { let mut cumulative_len = 0; let len = self.len(); @@ -646,23 +647,17 @@ impl OpTreeNode { } Ordering::Equal => { let new_element = self.elements.get_mut(child_index).unwrap(); - let old_id = new_element.id; - let old_visible = new_element.visible(); + let old_vis = new_element.visible(); f(new_element); - let replace_args = ReplaceArgs { - old_id, - new_id: new_element.id, - old_visible, - new_visible: new_element.visible(), - new_key: new_element.elemid_or_key(), - }; - self.index.replace(&replace_args); - return replace_args; + return self.index.change_vis(ChangeVisibility { + old_vis, + new_vis: new_element.visible(), + op: new_element, + }); } Ordering::Greater => { - let replace_args = child.update(index - cumulative_len, f); - self.index.replace(&replace_args); - return replace_args; + let vis_args = child.update(index - cumulative_len, f); + return self.index.change_vis(vis_args); } } } diff --git a/rust/automerge/src/parents.rs b/rust/automerge/src/parents.rs index 83e9b1c2..1d01ffbf 100644 --- a/rust/automerge/src/parents.rs +++ b/rust/automerge/src/parents.rs @@ -1,5 +1,6 @@ +use crate::op_set; use crate::op_set::OpSet; -use crate::types::ObjId; +use crate::types::{ListEncoding, ObjId}; use crate::{exid::ExId, Prop}; #[derive(Debug)] @@ -9,27 +10,55 @@ pub struct Parents<'a> { } impl<'a> Parents<'a> { + // returns the path to the object + // works even if the object or a parent has been deleted pub fn path(&mut self) -> Vec<(ExId, Prop)> { - let mut path = self.collect::>(); + let mut path = self + .map(|Parent { obj, prop, .. }| (obj, prop)) + .collect::>(); path.reverse(); path } + + // returns the path to the object + // if the object or one of its parents has been deleted or conflicted out + // returns none + pub fn visible_path(&mut self) -> Option> { + let mut path = Vec::new(); + for Parent { obj, prop, visible } in self { + if !visible { + return None; + } + path.push((obj, prop)) + } + path.reverse(); + Some(path) + } } impl<'a> Iterator for Parents<'a> { - type Item = (ExId, Prop); + type Item = Parent; fn next(&mut self) -> Option { if self.obj.is_root() { None - } else if let Some((obj, key)) = self.ops.parent_object(&self.obj) { + } else if let Some(op_set::Parent { obj, key, visible }) = self.ops.parent_object(&self.obj) + { self.obj = obj; - Some(( - self.ops.id_to_exid(self.obj.0), - self.ops.export_key(self.obj, key), - )) + Some(Parent { + obj: self.ops.id_to_exid(self.obj.0), + prop: self.ops.export_key(self.obj, key, ListEncoding::List), + visible, + }) } else { None } } } + +#[derive(Debug, PartialEq, Eq)] +pub struct Parent { + pub obj: ExId, + pub prop: Prop, + pub visible: bool, +} diff --git a/rust/automerge/src/query.rs b/rust/automerge/src/query.rs index f09ed0c1..fefac401 100644 --- a/rust/automerge/src/query.rs +++ b/rust/automerge/src/query.rs @@ -1,5 +1,7 @@ -use crate::op_tree::{OpSetMetadata, OpTreeNode}; -use crate::types::{Clock, Counter, Key, Op, OpId, OpType, ScalarValue}; +use crate::op_tree::{OpSetMetadata, OpTree, OpTreeNode}; +use crate::types::{ + Clock, Counter, Key, ListEncoding, Op, OpId, OpType, ScalarValue, TextEncoding, +}; use fxhash::FxBuildHasher; use std::cmp::Ordering; use std::collections::{HashMap, HashSet}; @@ -20,6 +22,7 @@ mod map_range_at; mod nth; mod nth_at; mod opid; +mod opid_vis; mod prop; mod prop_at; mod seek_op; @@ -40,6 +43,7 @@ pub(crate) use map_range_at::MapRangeAt; pub(crate) use nth::Nth; pub(crate) use nth_at::NthAt; pub(crate) use opid::OpIdSearch; +pub(crate) use opid_vis::OpIdVisSearch; pub(crate) use prop::Prop; pub(crate) use prop_at::PropAt; pub(crate) use seek_op::SeekOp; @@ -47,12 +51,10 @@ pub(crate) use seek_op_with_patch::SeekOpWithPatch; // use a struct for the args for clarity as they are passed up the update chain in the optree #[derive(Debug, Clone)] -pub(crate) struct ReplaceArgs { - pub(crate) old_id: OpId, - pub(crate) new_id: OpId, - pub(crate) old_visible: bool, - pub(crate) new_visible: bool, - pub(crate) new_key: Key, +pub(crate) struct ChangeVisibility<'a> { + pub(crate) old_vis: bool, + pub(crate) new_vis: bool, + pub(crate) op: &'a Op, } #[derive(Debug, Clone, PartialEq)] @@ -63,7 +65,15 @@ pub(crate) struct CounterData { op: Op, } -pub(crate) trait TreeQuery<'a> { +pub(crate) trait TreeQuery<'a>: Clone + Debug { + fn equiv(&mut self, _other: &Self) -> bool { + false + } + + fn can_shortcut_search(&mut self, _tree: &'a OpTree) -> bool { + false + } + #[inline(always)] fn query_node_with_metadata( &mut self, @@ -100,6 +110,8 @@ pub(crate) enum QueryResult { pub(crate) struct Index { /// The map of visible keys to the number of visible operations for that key. pub(crate) visible: HashMap, + pub(crate) visible16: usize, + pub(crate) visible8: usize, /// Set of opids found in this node and below. pub(crate) ops: HashSet, } @@ -108,53 +120,72 @@ impl Index { pub(crate) fn new() -> Self { Index { visible: Default::default(), + visible16: 0, + visible8: 0, ops: Default::default(), } } /// Get the number of visible elements in this index. - pub(crate) fn visible_len(&self) -> usize { - self.visible.len() + pub(crate) fn visible_len(&self, encoding: ListEncoding) -> usize { + match encoding { + ListEncoding::List => self.visible.len(), + ListEncoding::Text(TextEncoding::Utf8) => self.visible8, + ListEncoding::Text(TextEncoding::Utf16) => self.visible16, + } } pub(crate) fn has_visible(&self, seen: &Key) -> bool { self.visible.contains_key(seen) } - pub(crate) fn replace( + pub(crate) fn change_vis<'a>( &mut self, - ReplaceArgs { - old_id, - new_id, - old_visible, - new_visible, - new_key, - }: &ReplaceArgs, - ) { - if old_id != new_id { - self.ops.remove(old_id); - self.ops.insert(*new_id); - } - - match (new_visible, old_visible, new_key) { - (false, true, key) => match self.visible.get(key).copied() { + change_vis: ChangeVisibility<'a>, + ) -> ChangeVisibility<'a> { + let ChangeVisibility { + old_vis, + new_vis, + op, + } = &change_vis; + let key = op.elemid_or_key(); + match (old_vis, new_vis) { + (true, false) => match self.visible.get(&key).copied() { Some(n) if n == 1 => { - self.visible.remove(key); + self.visible.remove(&key); + self.visible8 -= op.width(ListEncoding::Text(TextEncoding::Utf8)); + self.visible16 -= op.width(ListEncoding::Text(TextEncoding::Utf16)); } Some(n) => { - self.visible.insert(*key, n - 1); + self.visible.insert(key, n - 1); } None => panic!("remove overun in index"), }, - (true, false, key) => *self.visible.entry(*key).or_default() += 1, + (false, true) => { + if let Some(n) = self.visible.get(&key) { + self.visible.insert(key, n + 1); + } else { + self.visible.insert(key, 1); + self.visible8 += op.width(ListEncoding::Text(TextEncoding::Utf8)); + self.visible16 += op.width(ListEncoding::Text(TextEncoding::Utf16)); + } + } _ => {} } + change_vis } pub(crate) fn insert(&mut self, op: &Op) { self.ops.insert(op.id); if op.visible() { - *self.visible.entry(op.elemid_or_key()).or_default() += 1; + let key = op.elemid_or_key(); + if let Some(n) = self.visible.get(&key) { + self.visible.insert(key, n + 1); + } else { + self.visible.insert(key, 1); + self.visible8 += op.width(ListEncoding::Text(TextEncoding::Utf8)); + self.visible16 += op.width(ListEncoding::Text(TextEncoding::Utf16)); + } } } @@ -165,6 +196,8 @@ impl Index { match self.visible.get(&key).copied() { Some(n) if n == 1 => { self.visible.remove(&key); + self.visible8 -= op.width(ListEncoding::Text(TextEncoding::Utf8)); + self.visible16 -= op.width(ListEncoding::Text(TextEncoding::Utf16)); } Some(n) => { self.visible.insert(key, n - 1); @@ -178,9 +211,14 @@ impl Index { for id in &other.ops { self.ops.insert(*id); } - for (elem, n) in other.visible.iter() { - *self.visible.entry(*elem).or_default() += n; + for (elem, other_len) in other.visible.iter() { + self.visible + .entry(*elem) + .and_modify(|len| *len += *other_len) + .or_insert(*other_len); } + self.visible16 += other.visible16; + self.visible8 += other.visible8; } } diff --git a/rust/automerge/src/query/elem_id_pos.rs b/rust/automerge/src/query/elem_id_pos.rs index 809b6061..250501fe 100644 --- a/rust/automerge/src/query/elem_id_pos.rs +++ b/rust/automerge/src/query/elem_id_pos.rs @@ -1,23 +1,26 @@ use crate::{ op_tree::OpTreeNode, - types::{ElemId, Key}, + types::{ElemId, Key, ListEncoding}, }; use super::{QueryResult, TreeQuery}; /// Lookup the index in the list that this elemid occupies. +#[derive(Clone, Debug)] pub(crate) struct ElemIdPos { elemid: ElemId, pos: usize, found: bool, + encoding: ListEncoding, } impl ElemIdPos { - pub(crate) fn new(elemid: ElemId) -> Self { + pub(crate) fn new(elemid: ElemId, encoding: ListEncoding) -> Self { Self { elemid, pos: 0, found: false, + encoding, } } @@ -38,7 +41,7 @@ impl<'a> TreeQuery<'a> for ElemIdPos { QueryResult::Descend } else { // not in this node, try the next one - self.pos += child.index.visible_len(); + self.pos += child.index.visible_len(self.encoding); QueryResult::Next } } @@ -49,7 +52,7 @@ impl<'a> TreeQuery<'a> for ElemIdPos { self.found = true; return QueryResult::Finish; } else if element.visible() { - self.pos += 1; + self.pos += element.width(self.encoding); } QueryResult::Next } diff --git a/rust/automerge/src/query/insert.rs b/rust/automerge/src/query/insert.rs index 9e495c49..12fae5b8 100644 --- a/rust/automerge/src/query/insert.rs +++ b/rust/automerge/src/query/insert.rs @@ -1,7 +1,7 @@ use crate::error::AutomergeError; use crate::op_tree::OpTreeNode; -use crate::query::{QueryResult, TreeQuery}; -use crate::types::{ElemId, Key, Op, HEAD}; +use crate::query::{OpTree, QueryResult, TreeQuery}; +use crate::types::{ElemId, Key, ListEncoding, Op, HEAD}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] @@ -10,6 +10,8 @@ pub(crate) struct InsertNth { target: usize, /// the number of visible operations seen seen: usize, + last_width: usize, + encoding: ListEncoding, //pub pos: usize, /// the number of operations (including non-visible) that we have seen n: usize, @@ -22,7 +24,7 @@ pub(crate) struct InsertNth { } impl InsertNth { - pub(crate) fn new(target: usize) -> Self { + pub(crate) fn new(target: usize, encoding: ListEncoding) -> Self { let (valid, last_valid_insert) = if target == 0 { (Some(0), Some(Key::Seq(HEAD))) } else { @@ -31,6 +33,8 @@ impl InsertNth { InsertNth { target, seen: 0, + last_width: 0, + encoding, n: 0, valid, last_seen: None, @@ -46,23 +50,30 @@ impl InsertNth { pub(crate) fn key(&self) -> Result { self.last_valid_insert .ok_or(AutomergeError::InvalidIndex(self.target)) - //if self.target == 0 { - /* - if self.last_insert.is_none() { - Ok(HEAD.into()) - } else if self.seen == self.target && self.last_insert.is_some() { - Ok(Key::Seq(self.last_insert.unwrap())) - } else { - Err(AutomergeError::InvalidIndex(self.target)) - } - */ } } impl<'a> TreeQuery<'a> for InsertNth { + fn equiv(&mut self, other: &Self) -> bool { + self.pos() == other.pos() && self.key() == other.key() + } + + fn can_shortcut_search(&mut self, tree: &'a OpTree) -> bool { + if let Some((index, pos)) = &tree.last_insert { + if let Some(op) = tree.internal.get(*pos) { + if *index + op.width(self.encoding) == self.target { + self.valid = Some(*pos + 1); + self.last_valid_insert = Some(op.elemid_or_key()); + return true; + } + } + } + false + } + fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { // if this node has some visible elements then we may find our target within - let mut num_vis = child.index.visible_len(); + let mut num_vis = child.index.visible_len(self.encoding); if let Some(last_seen) = self.last_seen { if child.index.has_visible(&last_seen) { num_vis -= 1; @@ -103,7 +114,8 @@ impl<'a> TreeQuery<'a> for InsertNth { if self.seen >= self.target { return QueryResult::Finish; } - self.seen += 1; + self.last_width = element.width(self.encoding); + self.seen += self.last_width; self.last_seen = Some(element.elemid_or_key()); self.last_valid_insert = self.last_seen } diff --git a/rust/automerge/src/query/len.rs b/rust/automerge/src/query/len.rs index 697d0430..0dce4f85 100644 --- a/rust/automerge/src/query/len.rs +++ b/rust/automerge/src/query/len.rs @@ -1,21 +1,23 @@ use crate::op_tree::OpTreeNode; use crate::query::{QueryResult, TreeQuery}; +use crate::types::ListEncoding; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] pub(crate) struct Len { pub(crate) len: usize, + encoding: ListEncoding, } impl Len { - pub(crate) fn new() -> Self { - Len { len: 0 } + pub(crate) fn new(encoding: ListEncoding) -> Self { + Len { len: 0, encoding } } } impl<'a> TreeQuery<'a> for Len { fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { - self.len = child.index.visible_len(); + self.len = child.index.visible_len(self.encoding); QueryResult::Finish } } diff --git a/rust/automerge/src/query/len_at.rs b/rust/automerge/src/query/len_at.rs index 46744c84..9380501e 100644 --- a/rust/automerge/src/query/len_at.rs +++ b/rust/automerge/src/query/len_at.rs @@ -1,5 +1,5 @@ use crate::query::{QueryResult, TreeQuery, VisWindow}; -use crate::types::{Clock, ElemId, Op}; +use crate::types::{Clock, ElemId, ListEncoding, Op}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] @@ -7,16 +7,18 @@ pub(crate) struct LenAt { pub(crate) len: usize, clock: Clock, pos: usize, + encoding: ListEncoding, last: Option, window: VisWindow, } impl LenAt { - pub(crate) fn new(clock: Clock) -> Self { + pub(crate) fn new(clock: Clock, encoding: ListEncoding) -> Self { LenAt { clock, pos: 0, len: 0, + encoding, last: None, window: Default::default(), } @@ -31,7 +33,7 @@ impl<'a> TreeQuery<'a> for LenAt { let elem = op.elemid(); let visible = self.window.visible_at(op, self.pos, &self.clock); if elem != self.last && visible { - self.len += 1; + self.len += op.width(self.encoding); self.last = elem; } self.pos += 1; diff --git a/rust/automerge/src/query/nth.rs b/rust/automerge/src/query/nth.rs index f73f2a10..a286c4e2 100644 --- a/rust/automerge/src/query/nth.rs +++ b/rust/automerge/src/query/nth.rs @@ -1,13 +1,16 @@ use crate::error::AutomergeError; -use crate::op_tree::OpTreeNode; +use crate::op_set::OpSet; +use crate::op_tree::{OpTree, OpTreeNode}; use crate::query::{QueryResult, TreeQuery}; -use crate::types::{Key, Op}; +use crate::types::{Key, ListEncoding, Op, OpIds}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] pub(crate) struct Nth<'a> { target: usize, seen: usize, + encoding: ListEncoding, + last_width: usize, /// last_seen is the target elemid of the last `seen` operation. /// It is used to avoid double counting visible elements (which arise through conflicts) that are split across nodes. last_seen: Option, @@ -17,10 +20,12 @@ pub(crate) struct Nth<'a> { } impl<'a> Nth<'a> { - pub(crate) fn new(target: usize) -> Self { + pub(crate) fn new(target: usize, encoding: ListEncoding) -> Self { Nth { target, seen: 0, + last_width: 1, + encoding, last_seen: None, ops: vec![], ops_pos: vec![], @@ -28,6 +33,10 @@ impl<'a> Nth<'a> { } } + pub(crate) fn pred(&self, ops: &OpSet) -> OpIds { + ops.m.sorted_opids(self.ops.iter().map(|o| o.id)) + } + /// Get the key pub(crate) fn key(&self) -> Result { // the query collects the ops so we can use that to get the key they all use @@ -37,11 +46,35 @@ impl<'a> Nth<'a> { Err(AutomergeError::InvalidIndex(self.target)) } } + + pub(crate) fn index(&self) -> usize { + self.seen - self.last_width + } } impl<'a> TreeQuery<'a> for Nth<'a> { + fn equiv(&mut self, other: &Self) -> bool { + self.index() == other.index() && self.key() == other.key() + } + + fn can_shortcut_search(&mut self, tree: &'a OpTree) -> bool { + if let Some((index, pos)) = &tree.last_insert { + if *index == self.target { + if let Some(op) = tree.internal.get(*pos) { + self.last_width = op.width(self.encoding); + self.seen = *index + self.last_width; + self.ops.push(op); + self.ops_pos.push(*pos); + self.pos = *pos + 1; + return true; + } + } + } + false + } + fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { - let mut num_vis = child.index.visible_len(); + let mut num_vis = child.index.visible_len(self.encoding); if let Some(last_seen) = self.last_seen { if child.index.has_visible(&last_seen) { num_vis -= 1; @@ -79,11 +112,12 @@ impl<'a> TreeQuery<'a> for Nth<'a> { } let visible = element.visible(); if visible && self.last_seen.is_none() { - self.seen += 1; + self.last_width = element.width(self.encoding); + self.seen += self.last_width; // we have a new visible element self.last_seen = Some(element.elemid_or_key()) } - if self.seen == self.target + 1 && visible { + if self.seen > self.target && visible { self.ops.push(element); self.ops_pos.push(self.pos); } diff --git a/rust/automerge/src/query/nth_at.rs b/rust/automerge/src/query/nth_at.rs index 10851e7c..e193ca03 100644 --- a/rust/automerge/src/query/nth_at.rs +++ b/rust/automerge/src/query/nth_at.rs @@ -1,5 +1,5 @@ use crate::query::{QueryResult, TreeQuery, VisWindow}; -use crate::types::{Clock, ElemId, Op}; +use crate::types::{Clock, ElemId, ListEncoding, Op}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] @@ -7,6 +7,7 @@ pub(crate) struct NthAt { clock: Clock, target: usize, seen: usize, + encoding: ListEncoding, last_seen: Option, window: VisWindow, pub(crate) ops: Vec, @@ -15,11 +16,12 @@ pub(crate) struct NthAt { } impl NthAt { - pub(crate) fn new(target: usize, clock: Clock) -> Self { + pub(crate) fn new(target: usize, clock: Clock, encoding: ListEncoding) -> Self { NthAt { clock, target, seen: 0, + encoding, last_seen: None, ops: vec![], ops_pos: vec![], @@ -39,10 +41,10 @@ impl<'a> TreeQuery<'a> for NthAt { } let visible = self.window.visible_at(element, self.pos, &self.clock); if visible && self.last_seen.is_none() { - self.seen += 1; + self.seen += element.width(self.encoding); self.last_seen = element.elemid() } - if self.seen == self.target + 1 && visible { + if self.seen > self.target && visible { for (vpos, vop) in self.window.seen_op(element, self.pos) { if vop.is_counter() { // this could be out of order because of inc's - we can find the right place diff --git a/rust/automerge/src/query/opid.rs b/rust/automerge/src/query/opid.rs index 6c29dcf6..aa3a45e6 100644 --- a/rust/automerge/src/query/opid.rs +++ b/rust/automerge/src/query/opid.rs @@ -1,6 +1,6 @@ use crate::op_tree::OpTreeNode; use crate::query::{QueryResult, TreeQuery}; -use crate::types::{ElemId, Key, Op, OpId}; +use crate::types::{Key, Op, OpId}; /// Search for an OpId in a tree. /// Returns the index of the operation in the tree. @@ -30,10 +30,6 @@ impl OpIdSearch { None } } - - pub(crate) fn key(&self) -> &Option { - &self.key - } } impl<'a> TreeQuery<'a> for OpIdSearch { @@ -49,11 +45,6 @@ impl<'a> TreeQuery<'a> for OpIdSearch { fn query_element(&mut self, element: &Op) -> QueryResult { if element.id == self.target { self.found = true; - if element.insert { - self.key = Some(Key::Seq(ElemId(element.id))); - } else { - self.key = Some(element.key); - } QueryResult::Finish } else { self.pos += 1; diff --git a/rust/automerge/src/query/opid_vis.rs b/rust/automerge/src/query/opid_vis.rs new file mode 100644 index 00000000..8a4b6a10 --- /dev/null +++ b/rust/automerge/src/query/opid_vis.rs @@ -0,0 +1,62 @@ +use crate::op_tree::OpTreeNode; +use crate::query::{QueryResult, TreeQuery}; +use crate::types::{Key, Op, OpId}; + +/// Search for an OpId in a tree. +/// Returns the index of the operation in the tree. +#[derive(Debug, Clone, PartialEq)] +pub(crate) struct OpIdVisSearch { + target: OpId, + found: bool, + pub(crate) visible: bool, + key: Option, +} + +impl OpIdVisSearch { + pub(crate) fn new(target: OpId) -> Self { + OpIdVisSearch { + target, + found: false, + visible: true, + key: None, + } + } + + pub(crate) fn key(&self) -> &Option { + &self.key + } +} + +impl<'a> TreeQuery<'a> for OpIdVisSearch { + fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { + if child.index.ops.contains(&self.target) { + QueryResult::Descend + } else { + QueryResult::Next + } + } + + fn query_element(&mut self, element: &Op) -> QueryResult { + if element.id == self.target { + self.found = true; + self.key = Some(element.elemid_or_key()); + if element.visible() { + QueryResult::Next + } else { + self.visible = false; + QueryResult::Finish + } + } else if self.found { + if self.key != Some(element.elemid_or_key()) { + QueryResult::Finish + } else if element.visible() { + self.visible = false; + QueryResult::Finish + } else { + QueryResult::Next + } + } else { + QueryResult::Next + } + } +} diff --git a/rust/automerge/src/query/prop.rs b/rust/automerge/src/query/prop.rs index 8b59d698..89fa18f0 100644 --- a/rust/automerge/src/query/prop.rs +++ b/rust/automerge/src/query/prop.rs @@ -1,6 +1,6 @@ use crate::op_tree::{OpSetMetadata, OpTreeNode}; use crate::query::{binary_search_by, QueryResult, TreeQuery}; -use crate::types::{Key, Op}; +use crate::types::{Key, ListEncoding, Op}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] @@ -45,7 +45,7 @@ impl<'a> TreeQuery<'a> for Prop<'a> { { if self.pos + child.len() >= start { // skip empty nodes - if child.index.visible_len() == 0 { + if child.index.visible_len(ListEncoding::default()) == 0 { if self.pos + child.len() >= optree_len { self.pos = optree_len; QueryResult::Finish diff --git a/rust/automerge/src/query/seek_op.rs b/rust/automerge/src/query/seek_op.rs index 023c431a..70d52d45 100644 --- a/rust/automerge/src/query/seek_op.rs +++ b/rust/automerge/src/query/seek_op.rs @@ -1,6 +1,6 @@ use crate::op_tree::{OpSetMetadata, OpTreeNode}; use crate::query::{binary_search_by, QueryResult, TreeQuery}; -use crate::types::{Key, Op, HEAD}; +use crate::types::{Key, ListEncoding, Op, HEAD}; use std::cmp::Ordering; use std::fmt::Debug; @@ -70,7 +70,7 @@ impl<'a> TreeQuery<'a> for SeekOp<'a> { if let Some(start) = self.start { if self.pos + child.len() >= start { // skip empty nodes - if child.index.visible_len() == 0 { + if child.index.visible_len(ListEncoding::List) == 0 { self.pos += child.len(); QueryResult::Next } else { diff --git a/rust/automerge/src/query/seek_op_with_patch.rs b/rust/automerge/src/query/seek_op_with_patch.rs index 06876038..f029c5db 100644 --- a/rust/automerge/src/query/seek_op_with_patch.rs +++ b/rust/automerge/src/query/seek_op_with_patch.rs @@ -1,6 +1,6 @@ use crate::op_tree::{OpSetMetadata, OpTreeNode}; use crate::query::{binary_search_by, QueryResult, TreeQuery}; -use crate::types::{Key, Op, HEAD}; +use crate::types::{Key, ListEncoding, Op, HEAD}; use std::cmp::Ordering; use std::fmt::Debug; @@ -10,7 +10,9 @@ pub(crate) struct SeekOpWithPatch<'a> { pub(crate) pos: usize, pub(crate) succ: Vec, found: bool, + encoding: ListEncoding, pub(crate) seen: usize, + pub(crate) last_width: usize, last_seen: Option, pub(crate) values: Vec<&'a Op>, pub(crate) had_value_before: bool, @@ -19,13 +21,15 @@ pub(crate) struct SeekOpWithPatch<'a> { } impl<'a> SeekOpWithPatch<'a> { - pub(crate) fn new(op: &Op) -> Self { + pub(crate) fn new(op: &Op, encoding: ListEncoding) -> Self { SeekOpWithPatch { op: op.clone(), succ: vec![], pos: 0, found: false, + encoding, seen: 0, + last_width: 0, last_seen: None, values: vec![], had_value_before: false, @@ -57,7 +61,7 @@ impl<'a> SeekOpWithPatch<'a> { self.last_seen = None } if e.visible() && self.last_seen.is_none() { - self.seen += 1; + self.seen += e.width(self.encoding); self.last_seen = Some(e.elemid_or_key()) } } @@ -101,7 +105,7 @@ impl<'a> TreeQuery<'a> for SeekOpWithPatch<'a> { // elements it contains. However, it could happen that a visible element is // split across two tree nodes. To avoid double-counting in this situation, we // subtract one if the last visible element also appears in this tree node. - let mut num_vis = child.index.visible_len(); + let mut num_vis = child.index.visible_len(self.encoding); if num_vis > 0 { // FIXME: I think this is wrong: we should subtract one only if this // subtree contains a *visible* (i.e. empty succs) operation for the list @@ -130,7 +134,7 @@ impl<'a> TreeQuery<'a> for SeekOpWithPatch<'a> { if let Some(start) = self.start { if self.pos + child.len() >= start { // skip empty nodes - if child.index.visible_len() == 0 { + if child.index.visible_len(self.encoding) == 0 { self.pos += child.len(); QueryResult::Next } else { @@ -173,6 +177,7 @@ impl<'a> TreeQuery<'a> for SeekOpWithPatch<'a> { self.values.push(e); } self.succ.push(self.pos); + self.last_width = e.width(self.encoding); if e.visible() { self.had_value_before = true; @@ -218,6 +223,7 @@ impl<'a> TreeQuery<'a> for SeekOpWithPatch<'a> { self.values.push(e); } self.succ.push(self.pos); + self.last_width = e.width(self.encoding); } if e.visible() { self.had_value_before = true; @@ -235,6 +241,7 @@ impl<'a> TreeQuery<'a> for SeekOpWithPatch<'a> { self.values.push(e); } self.succ.push(self.pos); + self.last_width = e.width(self.encoding); } // If the new op is an insertion, skip over any existing list elements whose elemId is diff --git a/rust/automerge/src/transaction/inner.rs b/rust/automerge/src/transaction/inner.rs index 6f0e8b07..c9567b68 100644 --- a/rust/automerge/src/transaction/inner.rs +++ b/rust/automerge/src/transaction/inner.rs @@ -4,7 +4,7 @@ use crate::automerge::Actor; use crate::exid::ExId; use crate::query::{self, OpIdSearch}; use crate::storage::Change as StoredChange; -use crate::types::{Key, ObjId, OpId}; +use crate::types::{Key, ListEncoding, ObjId, OpId, OpIds, TextEncoding}; use crate::{op_tree::OpSetMetadata, types::Op, Automerge, Change, ChangeHash, OpObserver, Prop}; use crate::{AutomergeError, ObjType, OpType, ScalarValue}; @@ -16,7 +16,7 @@ pub(crate) struct TransactionInner { time: i64, message: Option, deps: Vec, - operations: Vec<(ObjId, Prop, Op)>, + operations: Vec<(ObjId, Op)>, } /// Arguments required to create a new transaction @@ -117,8 +117,6 @@ impl TransactionInner { use crate::storage::{change::PredOutOfOrder, convert::op_as_actor_id}; let actor = metadata.actors.get(self.actor).clone(); - let ops = self.operations.iter().map(|o| (&o.0, &o.2)); - //let (ops, other_actors) = encode_change_ops(ops, actor.clone(), actors, props); let deps = self.deps.clone(); let stored = match StoredChange::builder() .with_actor(actor) @@ -128,7 +126,8 @@ impl TransactionInner { .with_dependencies(deps) .with_timestamp(self.time) .build( - ops.into_iter() + self.operations + .iter() .map(|(obj, op)| op_as_actor_id(obj, op, metadata)), ) { Ok(s) => s, @@ -152,10 +151,10 @@ impl TransactionInner { pub(crate) fn rollback(self, doc: &mut Automerge) -> usize { let num = self.pending_ops(); // remove in reverse order so sets are removed before makes etc... - for (obj, _prop, op) in self.operations.into_iter().rev() { + for (obj, op) in self.operations.into_iter().rev() { for pred_id in &op.pred { if let Some(p) = doc.ops.search(&obj, OpIdSearch::new(*pred_id)).index() { - doc.ops.replace(&obj, p, |o| o.remove_succ(&op)); + doc.ops.change_vis(&obj, p, |o| o.remove_succ(&op)); } } if let Some(pos) = doc.ops.search(&obj, OpIdSearch::new(op.id)).index() { @@ -193,9 +192,14 @@ impl TransactionInner { prop: P, value: V, ) -> Result<(), AutomergeError> { - let obj = doc.exid_to_obj(ex_obj)?; + let (obj, obj_type) = doc.exid_to_obj(ex_obj)?; let value = value.into(); let prop = prop.into(); + match (&prop, obj_type) { + (Prop::Map(_), ObjType::Map) => Ok(()), + (Prop::Seq(_), ObjType::List) => Ok(()), + _ => Err(AutomergeError::InvalidOp(obj_type)), + }?; self.local_op(doc, op_observer, obj, prop, value.into())?; Ok(()) } @@ -221,8 +225,13 @@ impl TransactionInner { prop: P, value: ObjType, ) -> Result { - let obj = doc.exid_to_obj(ex_obj)?; + let (obj, obj_type) = doc.exid_to_obj(ex_obj)?; let prop = prop.into(); + match (&prop, obj_type) { + (Prop::Map(_), ObjType::Map) => Ok(()), + (Prop::Seq(_), ObjType::List) => Ok(()), + _ => Err(AutomergeError::InvalidOp(obj_type)), + }?; let id = self .local_op(doc, op_observer, obj, prop, value.into())? .unwrap(); @@ -234,6 +243,28 @@ impl TransactionInner { OpId(self.start_op.get() + self.pending_ops() as u64, self.actor) } + fn next_insert(&mut self, key: Key, value: ScalarValue) -> Op { + Op { + id: self.next_id(), + action: OpType::Put(value), + key, + succ: Default::default(), + pred: Default::default(), + insert: true, + } + } + + fn next_delete(&mut self, key: Key, pred: OpIds) -> Op { + Op { + id: self.next_id(), + action: OpType::Delete, + key, + succ: Default::default(), + pred, + insert: false, + } + } + #[allow(clippy::too_many_arguments)] fn insert_local_op( &mut self, @@ -245,7 +276,7 @@ impl TransactionInner { obj: ObjId, succ_pos: &[usize], ) { - doc.ops.add_succ(&obj, succ_pos.iter().copied(), &op); + doc.ops.add_succ(&obj, succ_pos, &op); if !op.is_delete() { doc.ops.insert(pos, &obj, op.clone()); @@ -262,7 +293,10 @@ impl TransactionInner { index: usize, value: V, ) -> Result<(), AutomergeError> { - let obj = doc.exid_to_obj(ex_obj)?; + let (obj, obj_type) = doc.exid_to_obj(ex_obj)?; + if obj_type != ObjType::List { + return Err(AutomergeError::InvalidOp(obj_type)); + } let value = value.into(); tracing::trace!(obj=?obj, value=?value, "inserting value"); self.do_insert(doc, op_observer, obj, index, value.into())?; @@ -277,7 +311,10 @@ impl TransactionInner { index: usize, value: ObjType, ) -> Result { - let obj = doc.exid_to_obj(ex_obj)?; + let (obj, obj_type) = doc.exid_to_obj(ex_obj)?; + if obj_type != ObjType::List { + return Err(AutomergeError::InvalidOp(obj_type)); + } let id = self.do_insert(doc, op_observer, obj, index, value.into())?; let id = doc.id_to_exid(id); Ok(id) @@ -293,7 +330,9 @@ impl TransactionInner { ) -> Result { let id = self.next_id(); - let query = doc.ops.search(&obj, query::InsertNth::new(index)); + let query = doc + .ops + .search(&obj, query::InsertNth::new(index, ListEncoding::List)); let key = query.key()?; @@ -384,7 +423,9 @@ impl TransactionInner { index: usize, action: OpType, ) -> Result, AutomergeError> { - let query = doc.ops.search(&obj, query::Nth::new(index)); + let query = doc + .ops + .search(&obj, query::Nth::new(index, ListEncoding::List)); let id = self.next_id(); let pred = doc.ops.m.sorted_opids(query.ops.iter().map(|o| o.id)); @@ -424,7 +465,7 @@ impl TransactionInner { prop: P, value: i64, ) -> Result<(), AutomergeError> { - let obj = doc.exid_to_obj(obj)?; + let obj = doc.exid_to_obj(obj)?.0; self.local_op(doc, op_observer, obj, prop.into(), OpType::Increment(value))?; Ok(()) } @@ -436,9 +477,24 @@ impl TransactionInner { ex_obj: &ExId, prop: P, ) -> Result<(), AutomergeError> { - let obj = doc.exid_to_obj(ex_obj)?; + let (obj, obj_type) = doc.exid_to_obj(ex_obj)?; let prop = prop.into(); - self.local_op(doc, op_observer, obj, prop, OpType::Delete)?; + if obj_type == ObjType::Text { + let index = prop.to_index().ok_or(AutomergeError::InvalidOp(obj_type))?; + self.inner_splice( + doc, + op_observer, + SpliceArgs { + obj, + index, + del: 1, + values: vec![], + splice_type: SpliceType::Text("", doc.text_encoding), + }, + )?; + } else { + self.local_op(doc, op_observer, obj, prop, OpType::Delete)?; + } Ok(()) } @@ -447,30 +503,147 @@ impl TransactionInner { pub(crate) fn splice( &mut self, doc: &mut Automerge, - mut op_observer: Option<&mut Obs>, + op_observer: Option<&mut Obs>, ex_obj: &ExId, - mut pos: usize, + index: usize, del: usize, vals: impl IntoIterator, ) -> Result<(), AutomergeError> { - let obj = doc.exid_to_obj(ex_obj)?; - for _ in 0..del { - // This unwrap and rewrap of the option is necessary to appeas the borrow checker :( - if let Some(obs) = op_observer.as_mut() { - self.local_op(doc, Some(*obs), obj, pos.into(), OpType::Delete)?; + let (obj, obj_type) = doc.exid_to_obj(ex_obj)?; + if obj_type != ObjType::List { + return Err(AutomergeError::InvalidOp(obj_type)); + } + let values = vals.into_iter().collect(); + self.inner_splice( + doc, + op_observer, + SpliceArgs { + obj, + index, + del, + values, + splice_type: SpliceType::List, + }, + ) + } + + /// Splice string into a text object + pub(crate) fn splice_text( + &mut self, + doc: &mut Automerge, + op_observer: Option<&mut Obs>, + ex_obj: &ExId, + index: usize, + del: usize, + text: &str, + ) -> Result<(), AutomergeError> { + let (obj, obj_type) = doc.exid_to_obj(ex_obj)?; + if obj_type != ObjType::Text { + return Err(AutomergeError::InvalidOp(obj_type)); + } + let values = text.chars().map(ScalarValue::from).collect(); + self.inner_splice( + doc, + op_observer, + SpliceArgs { + obj, + index, + del, + values, + splice_type: SpliceType::Text(text, doc.text_encoding), + }, + ) + } + + fn inner_splice( + &mut self, + doc: &mut Automerge, + mut op_observer: Option<&mut Obs>, + SpliceArgs { + obj, + mut index, + mut del, + values, + splice_type, + }: SpliceArgs<'_>, + ) -> Result<(), AutomergeError> { + let ex_obj = doc.ops.id_to_exid(obj.0); + let encoding = splice_type.encoding(); + // delete `del` items - performing the query for each one + let mut deleted = 0; + while deleted < del { + // TODO: could do this with a single custom query + let query = doc.ops.search(&obj, query::Nth::new(index, encoding)); + + // if we delete in the middle of a multi-character + // move cursor back to the beginning and expand the del width + let adjusted_index = query.index(); + if adjusted_index < index { + del += index - adjusted_index; + index = adjusted_index; + } + + let step = if let Some(op) = query.ops.last() { + op.width(encoding) } else { - self.local_op::(doc, None, obj, pos.into(), OpType::Delete)?; + break; + }; + + let op = self.next_delete(query.key()?, query.pred(&doc.ops)); + + doc.ops.add_succ(&obj, &query.ops_pos, &op); + + self.operations.push((obj, op)); + + deleted += step; + } + + if deleted > 0 { + if let Some(obs) = op_observer.as_mut() { + obs.delete_seq(doc, ex_obj.clone(), index, deleted); } } - for v in vals { - // As above this unwrap and rewrap of the option is necessary to appeas the borrow checker :( - if let Some(obs) = op_observer.as_mut() { - self.do_insert(doc, Some(*obs), obj, pos, v.clone().into())?; - } else { - self.do_insert::(doc, None, obj, pos, v.clone().into())?; + + // do the insert query for the first item and then + // insert the remaining ops one after the other + if !values.is_empty() { + let query = doc.ops.search(&obj, query::InsertNth::new(index, encoding)); + let mut pos = query.pos(); + let mut key = query.key()?; + let mut cursor = index; + let mut width = 0; + + for v in &values { + let op = self.next_insert(key, v.clone()); + + doc.ops.insert(pos, &obj, op.clone()); + + width = op.width(encoding); + cursor += width; + pos += 1; + key = op.id.into(); + + self.operations.push((obj, op)); + } + + doc.ops.hint(&obj, cursor - width, pos - 1); + + // handle the observer + if let Some(obs) = op_observer.as_mut() { + match splice_type { + SpliceType::List => { + let start = self.operations.len() - values.len(); + for (offset, v) in values.iter().enumerate() { + let op = &self.operations[start + offset].1; + let value = (v.clone().into(), doc.ops.id_to_exid(op.id)); + obs.insert(doc, ex_obj.clone(), index + offset, value) + } + } + SpliceType::Text(text, _) => obs.splice_text(doc, ex_obj, index, text), + } } - pos += 1; } + Ok(()) } @@ -485,31 +658,55 @@ impl TransactionInner { // TODO - id_to_exid should be a noop if not used - change type to Into? if let Some(op_observer) = op_observer { let ex_obj = doc.ops.id_to_exid(obj.0); - let parents = doc.ops.parents(obj); if op.insert { - let value = (op.value(), doc.ops.id_to_exid(op.id)); - match prop { - Prop::Map(_) => panic!("insert into a map"), - Prop::Seq(index) => op_observer.insert(parents, ex_obj, index, value), + let obj_type = doc.ops.object_type(&obj); + assert!(obj_type.unwrap().is_sequence()); + match (obj_type, prop) { + (Some(ObjType::List), Prop::Seq(index)) => { + let value = (op.value(), doc.ops.id_to_exid(op.id)); + op_observer.insert(doc, ex_obj, index, value) + } + (Some(ObjType::Text), Prop::Seq(index)) => { + // FIXME + op_observer.splice_text(doc, ex_obj, index, op.to_str()) + } + _ => {} } } else if op.is_delete() { - op_observer.delete(parents, ex_obj, prop.clone()); + op_observer.delete(doc, ex_obj, prop); } else if let Some(value) = op.get_increment_value() { - op_observer.increment( - parents, - ex_obj, - prop.clone(), - (value, doc.ops.id_to_exid(op.id)), - ); + op_observer.increment(doc, ex_obj, prop, (value, doc.ops.id_to_exid(op.id))); } else { let value = (op.value(), doc.ops.id_to_exid(op.id)); - op_observer.put(parents, ex_obj, prop.clone(), value, false); + op_observer.put(doc, ex_obj, prop, value, false); } } - self.operations.push((obj, prop, op)); + self.operations.push((obj, op)); } } +enum SpliceType<'a> { + List, + Text(&'a str, TextEncoding), +} + +impl<'a> SpliceType<'a> { + fn encoding(&self) -> ListEncoding { + match self { + SpliceType::List => ListEncoding::List, + SpliceType::Text(_, encoding) => ListEncoding::Text(*encoding), + } + } +} + +struct SpliceArgs<'a> { + obj: ObjId, + index: usize, + del: usize, + values: Vec, + splice_type: SpliceType<'a>, +} + #[cfg(test)] mod tests { use crate::{transaction::Transactable, ROOT}; diff --git a/rust/automerge/src/transaction/manual_transaction.rs b/rust/automerge/src/transaction/manual_transaction.rs index cf3123df..22115aab 100644 --- a/rust/automerge/src/transaction/manual_transaction.rs +++ b/rust/automerge/src/transaction/manual_transaction.rs @@ -191,6 +191,16 @@ impl<'a, Obs: observation::Observation> Transactable for Transaction<'a, Obs> { self.do_tx(|tx, doc, obs| tx.splice(doc, obs, obj.as_ref(), pos, del, vals)) } + fn splice_text>( + &mut self, + obj: O, + pos: usize, + del: usize, + text: &str, + ) -> Result<(), AutomergeError> { + self.do_tx(|tx, doc, obs| tx.splice_text(doc, obs, obj.as_ref(), pos, del, text)) + } + fn keys>(&self, obj: O) -> Keys<'_, '_> { self.doc.keys(obj) } @@ -249,7 +259,7 @@ impl<'a, Obs: observation::Observation> Transactable for Transaction<'a, Obs> { self.doc.length_at(obj, heads) } - fn object_type>(&self, obj: O) -> Option { + fn object_type>(&self, obj: O) -> Result { self.doc.object_type(obj) } diff --git a/rust/automerge/src/transaction/transactable.rs b/rust/automerge/src/transaction/transactable.rs index bf4e2fe5..7f38edbe 100644 --- a/rust/automerge/src/transaction/transactable.rs +++ b/rust/automerge/src/transaction/transactable.rs @@ -91,10 +91,7 @@ pub trait Transactable { pos: usize, del: usize, text: &str, - ) -> Result<(), AutomergeError> { - let vals = text.chars().map(|c| c.into()); - self.splice(obj, pos, del, vals) - } + ) -> Result<(), AutomergeError>; /// Get the keys of the given object, it should be a map. fn keys>(&self, obj: O) -> Keys<'_, '_>; @@ -139,7 +136,7 @@ pub trait Transactable { fn length_at>(&self, obj: O, heads: &[ChangeHash]) -> usize; /// Get type for object - fn object_type>(&self, obj: O) -> Option; + fn object_type>(&self, obj: O) -> Result; /// Get the string that this text object represents. fn text>(&self, obj: O) -> Result; @@ -193,9 +190,7 @@ pub trait Transactable { fn parents>(&self, obj: O) -> Result, AutomergeError>; fn path_to_object>(&self, obj: O) -> Result, AutomergeError> { - let mut path = self.parents(obj.as_ref().clone())?.collect::>(); - path.reverse(); - Ok(path) + Ok(self.parents(obj.as_ref().clone())?.path()) } /// The heads this transaction will be based on diff --git a/rust/automerge/src/types.rs b/rust/automerge/src/types.rs index 95b5505e..b5da60d7 100644 --- a/rust/automerge/src/types.rs +++ b/rust/automerge/src/types.rs @@ -393,6 +393,15 @@ pub enum Prop { Seq(usize), } +impl Prop { + pub(crate) fn to_index(&self) -> Option { + match self { + Prop::Map(_) => None, + Prop::Seq(n) => Some(*n), + } + } +} + impl Display for Prop { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { @@ -437,6 +446,40 @@ impl ObjId { } } +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +pub enum TextEncoding { + Utf8, + Utf16, +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +pub(crate) enum ListEncoding { + List, + Text(TextEncoding), +} + +impl Default for ListEncoding { + fn default() -> Self { + ListEncoding::List + } +} + +impl Default for TextEncoding { + fn default() -> Self { + TextEncoding::Utf8 + } +} + +impl ListEncoding { + pub(crate) fn new(obj: ObjType, text_encoding: TextEncoding) -> Self { + if obj == ObjType::Text { + ListEncoding::Text(text_encoding) + } else { + ListEncoding::List + } + } +} + #[derive(Debug, Clone, Copy, PartialOrd, Eq, PartialEq, Ord, Hash, Default)] pub(crate) struct ElemId(pub(crate) OpId); @@ -491,6 +534,22 @@ impl Op { } } + pub(crate) fn width(&self, encoding: ListEncoding) -> usize { + match encoding { + ListEncoding::List => 1, + ListEncoding::Text(TextEncoding::Utf8) => self.to_str().chars().count(), + ListEncoding::Text(TextEncoding::Utf16) => self.to_str().encode_utf16().count(), + } + } + + pub(crate) fn to_str(&self) -> &str { + if let OpType::Put(ScalarValue::Str(s)) = &self.action { + s + } else { + "\u{fffc}" + } + } + pub(crate) fn visible(&self) -> bool { if self.is_inc() { false diff --git a/rust/automerge/tests/test.rs b/rust/automerge/tests/test.rs index 896c623a..876acb74 100644 --- a/rust/automerge/tests/test.rs +++ b/rust/automerge/tests/test.rs @@ -1123,8 +1123,7 @@ fn test_merging_test_conflicts_then_saving_and_loading() { let mut doc1 = new_doc_with_actor(actor1); let text = doc1.put_object(ROOT, "text", ObjType::Text).unwrap(); - doc1.splice(&text, 0, 0, "hello".chars().map(|c| c.to_string().into())) - .unwrap(); + doc1.splice_text(&text, 0, 0, "hello").unwrap(); let mut doc2 = AutoCommit::load(&doc1.save()).unwrap(); doc2.set_actor(actor2); @@ -1133,11 +1132,10 @@ fn test_merging_test_conflicts_then_saving_and_loading() { "text" => { list![{"h"}, {"e"}, {"l"}, {"l"}, {"o"}]}, }}; - doc2.splice(&text, 4, 1, Vec::new()).unwrap(); - doc2.splice(&text, 4, 0, vec!["!".into()]).unwrap(); - doc2.splice(&text, 5, 0, vec![" ".into()]).unwrap(); - doc2.splice(&text, 6, 0, "world".chars().map(|c| c.into())) - .unwrap(); + doc2.splice_text(&text, 4, 1, "").unwrap(); + doc2.splice_text(&text, 4, 0, "!").unwrap(); + doc2.splice_text(&text, 5, 0, " ").unwrap(); + doc2.splice_text(&text, 6, 0, "world").unwrap(); assert_doc!( doc2.document(), @@ -1373,3 +1371,29 @@ fn simple_bad_saveload() { let bytes = doc.save(); Automerge::load(&bytes).unwrap(); } + +#[test] +fn ops_on_wrong_objets() -> Result<(), AutomergeError> { + let mut doc = AutoCommit::new(); + let list = doc.put_object(&automerge::ROOT, "list", ObjType::List)?; + doc.insert(&list, 0, "a")?; + doc.insert(&list, 1, "b")?; + let e1 = doc.put(&list, "a", "AAA"); + assert_eq!(e1, Err(AutomergeError::InvalidOp(ObjType::List))); + let e2 = doc.splice_text(&list, 0, 0, "hello world"); + assert_eq!(e2, Err(AutomergeError::InvalidOp(ObjType::List))); + let map = doc.put_object(&automerge::ROOT, "map", ObjType::Map)?; + doc.put(&map, "a", "AAA")?; + doc.put(&map, "b", "BBB")?; + let e3 = doc.insert(&map, 0, "b"); + assert_eq!(e3, Err(AutomergeError::InvalidOp(ObjType::Map))); + let e4 = doc.splice_text(&map, 0, 0, "hello world"); + assert_eq!(e4, Err(AutomergeError::InvalidOp(ObjType::Map))); + let text = doc.put_object(&automerge::ROOT, "text", ObjType::Text)?; + doc.splice_text(&text, 0, 0, "hello world")?; + let e5 = doc.put(&text, "a", "AAA"); + assert_eq!(e5, Err(AutomergeError::InvalidOp(ObjType::Text))); + let e6 = doc.insert(&text, 0, "b"); + assert_eq!(e6, Err(AutomergeError::InvalidOp(ObjType::Text))); + Ok(()) +} diff --git a/rust/edit-trace/.gitignore b/rust/edit-trace/.gitignore index bf54725a..55778aca 100644 --- a/rust/edit-trace/.gitignore +++ b/rust/edit-trace/.gitignore @@ -3,3 +3,4 @@ Cargo.lock node_modules yarn.lock flamegraph.svg +/prof diff --git a/rust/edit-trace/automerge-js.js b/rust/edit-trace/automerge-js.js index eae08634..6a6d3389 100644 --- a/rust/edit-trace/automerge-js.js +++ b/rust/edit-trace/automerge-js.js @@ -1,12 +1,9 @@ // Apply the paper editing trace to an Automerge.Text object, one char at a time const { edits, finalText } = require('./editing-trace') -const Automerge = require('../automerge-js') -const wasm_api = require('../automerge-wasm') - -Automerge.use(wasm_api) +const Automerge = require('../../javascript') const start = new Date() -let state = Automerge.from({text: new Automerge.Text()}) +let state = Automerge.from({text: ""}) state = Automerge.change(state, doc => { for (let i = 0; i < edits.length; i++) { @@ -14,14 +11,13 @@ state = Automerge.change(state, doc => { console.log(`Processed ${i} edits in ${new Date() - start} ms`) } let edit = edits[i] - if (edit[1] > 0) doc.text.deleteAt(edit[0], edit[1]) - if (edit.length > 2) doc.text.insertAt(edit[0], ...edit.slice(2)) + Automerge.splice(doc, 'text', ... edit) } }) let _ = Automerge.save(state) console.log(`Done in ${new Date() - start} ms`) -if (state.text.join('') !== finalText) { +if (state.text !== finalText) { throw new RangeError('ERROR: final text did not match expectation') } diff --git a/rust/edit-trace/automerge-rs.js b/rust/edit-trace/automerge-rs.js deleted file mode 100644 index 342f5268..00000000 --- a/rust/edit-trace/automerge-rs.js +++ /dev/null @@ -1,31 +0,0 @@ - -// this assumes that the automerge-rs folder is checked out along side this repo -// and someone has run - -// # cd automerge-rs/automerge-backend-wasm -// # yarn release - -const { edits, finalText } = require('./editing-trace') -const Automerge = require('../../automerge') -const path = require('path') -const wasmBackend = require(path.resolve("../../automerge-rs/automerge-backend-wasm")) -Automerge.setDefaultBackend(wasmBackend) - -const start = new Date() -let state = Automerge.from({text: new Automerge.Text()}) - -state = Automerge.change(state, doc => { - for (let i = 0; i < edits.length; i++) { - if (i % 10000 === 0) { - console.log(`Processed ${i} edits in ${new Date() - start} ms`) - } - if (edits[i][1] > 0) doc.text.deleteAt(edits[i][0], edits[i][1]) - if (edits[i].length > 2) doc.text.insertAt(edits[i][0], ...edits[i].slice(2)) - } -}) - -console.log(`Done in ${new Date() - start} ms`) - -if (state.text.join('') !== finalText) { - throw new RangeError('ERROR: final text did not match expectation') -} diff --git a/rust/edit-trace/automerge-wasm.js b/rust/edit-trace/automerge-wasm.js index e0f1454d..82786cd9 100644 --- a/rust/edit-trace/automerge-wasm.js +++ b/rust/edit-trace/automerge-wasm.js @@ -4,6 +4,8 @@ const Automerge = require('../automerge-wasm') const start = new Date() let doc = Automerge.create(); +doc.enablePatches(true) +let mat = doc.materialize("/") let text = doc.putObject("_root", "text", "", "text") for (let i = 0; i < edits.length; i++) { @@ -22,6 +24,11 @@ let t_time = new Date() let t = doc.text(text); console.log(`doc.text in ${new Date() - t_time} ms`) +t_time = new Date() +t = doc.text(text); +mat = doc.applyPatches(mat) +console.log(`doc.applyPatches() in ${new Date() - t_time} ms`) + if (doc.text(text) !== finalText) { throw new RangeError('ERROR: final text did not match expectation') } From 1222fc0df130a9883e3a967ba57b2df05d94b7ff Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Sat, 10 Dec 2022 02:36:05 -0800 Subject: [PATCH 661/730] rewrite opnode to store usize instead of Op (#471) --- rust/automerge/src/op_tree.rs | 577 +++--------------- rust/automerge/src/op_tree/iter.rs | 16 +- rust/automerge/src/op_tree/node.rs | 480 +++++++++++++++ rust/automerge/src/query.rs | 9 +- rust/automerge/src/query/elem_id_pos.rs | 4 +- rust/automerge/src/query/insert.rs | 4 +- rust/automerge/src/query/keys.rs | 14 +- rust/automerge/src/query/keys_at.rs | 14 +- rust/automerge/src/query/len.rs | 4 +- rust/automerge/src/query/list_range.rs | 12 +- rust/automerge/src/query/list_range_at.rs | 12 +- rust/automerge/src/query/list_vals.rs | 4 +- rust/automerge/src/query/map_range.rs | 14 +- rust/automerge/src/query/map_range_at.rs | 14 +- rust/automerge/src/query/nth.rs | 4 +- rust/automerge/src/query/opid.rs | 2 +- rust/automerge/src/query/opid_vis.rs | 2 +- rust/automerge/src/query/prop.rs | 3 +- rust/automerge/src/query/prop_at.rs | 5 +- rust/automerge/src/query/seek_op.rs | 11 +- .../automerge/src/query/seek_op_with_patch.rs | 7 +- rust/automerge/src/visualisation.rs | 26 +- rust/edit-trace/automerge-js.js | 12 +- rust/edit-trace/automerge-wasm.js | 10 +- rust/edit-trace/package.json | 4 +- rust/edit-trace/src/main.rs | 16 +- 26 files changed, 682 insertions(+), 598 deletions(-) create mode 100644 rust/automerge/src/op_tree/node.rs diff --git a/rust/automerge/src/op_tree.rs b/rust/automerge/src/op_tree.rs index fae229e2..909a75a7 100644 --- a/rust/automerge/src/op_tree.rs +++ b/rust/automerge/src/op_tree.rs @@ -1,14 +1,9 @@ -use std::{ - cmp::{min, Ordering}, - fmt::Debug, - mem, - ops::RangeBounds, -}; +use std::{fmt::Debug, mem, ops::RangeBounds}; pub(crate) use crate::op_set::OpSetMetadata; use crate::{ clock::Clock, - query::{self, ChangeVisibility, Index, QueryResult, TreeQuery}, + query::{self, ChangeVisibility, QueryResult, TreeQuery}, }; use crate::{ types::{ObjId, Op, OpId}, @@ -16,10 +11,12 @@ use crate::{ }; use std::collections::HashSet; -pub(crate) const B: usize = 16; - mod iter; +mod node; + pub(crate) use iter::OpTreeIter; +#[allow(unused)] +pub(crate) use node::{OpTreeNode, B}; #[derive(Debug, Clone, PartialEq)] pub(crate) struct OpTree { @@ -56,20 +53,16 @@ impl OpTree { #[derive(Clone, Debug)] pub(crate) struct OpTreeInternal { pub(crate) root_node: Option, -} - -#[derive(Clone, Debug)] -pub(crate) struct OpTreeNode { - pub(crate) children: Vec, - pub(crate) elements: Vec, - pub(crate) index: Index, - length: usize, + pub(crate) ops: Vec, } impl OpTreeInternal { /// Construct a new, empty, sequence. pub(crate) fn new() -> Self { - Self { root_node: None } + Self { + root_node: None, + ops: vec![], + } } /// Get the length of the sequence. @@ -78,13 +71,19 @@ impl OpTreeInternal { } pub(crate) fn keys(&self) -> Option> { - self.root_node.as_ref().map(query::Keys::new) + if self.root_node.is_some() { + Some(query::Keys::new(self)) + } else { + None + } } pub(crate) fn keys_at(&self, clock: Clock) -> Option> { - self.root_node - .as_ref() - .map(|root| query::KeysAt::new(root, clock)) + if self.root_node.is_some() { + Some(query::KeysAt::new(self, clock)) + } else { + None + } } pub(crate) fn map_range<'a, R: RangeBounds>( @@ -92,9 +91,11 @@ impl OpTreeInternal { range: R, meta: &'a OpSetMetadata, ) -> Option> { - self.root_node - .as_ref() - .map(|node| query::MapRange::new(range, node, meta)) + if self.root_node.is_some() { + Some(query::MapRange::new(range, self, meta)) + } else { + None + } } pub(crate) fn map_range_at<'a, R: RangeBounds>( @@ -103,18 +104,22 @@ impl OpTreeInternal { meta: &'a OpSetMetadata, clock: Clock, ) -> Option> { - self.root_node - .as_ref() - .map(|node| query::MapRangeAt::new(range, node, meta, clock)) + if self.root_node.is_some() { + Some(query::MapRangeAt::new(range, self, meta, clock)) + } else { + None + } } pub(crate) fn list_range>( &self, range: R, ) -> Option> { - self.root_node - .as_ref() - .map(|node| query::ListRange::new(range, node)) + if self.root_node.is_some() { + Some(query::ListRange::new(range, self)) + } else { + None + } } pub(crate) fn list_range_at>( @@ -122,22 +127,24 @@ impl OpTreeInternal { range: R, clock: Clock, ) -> Option> { - self.root_node - .as_ref() - .map(|node| query::ListRangeAt::new(range, clock, node)) + if self.root_node.is_some() { + Some(query::ListRangeAt::new(range, clock, self)) + } else { + None + } } pub(crate) fn search<'a, 'b: 'a, Q>(&'b self, mut query: Q, m: &OpSetMetadata) -> Q where Q: TreeQuery<'a>, { - self.root_node - .as_ref() - .map(|root| match query.query_node_with_metadata(root, m) { - QueryResult::Descend => root.search(&mut query, m, None), - QueryResult::Skip(skip) => root.search(&mut query, m, Some(skip)), + self.root_node.as_ref().map(|root| { + match query.query_node_with_metadata(root, m, &self.ops) { + QueryResult::Descend => root.search(&mut query, m, &self.ops, None), + QueryResult::Skip(skip) => root.search(&mut query, m, &self.ops, Some(skip)), _ => true, - }); + } + }); query } @@ -151,7 +158,7 @@ impl OpTreeInternal { /// # Panics /// /// Panics if `index > len`. - pub(crate) fn insert(&mut self, index: usize, element: Op) { + pub(crate) fn insert(&mut self, index: usize, op: Op) { assert!( index <= self.len(), "tried to insert at {} but len is {}", @@ -159,6 +166,9 @@ impl OpTreeInternal { self.len() ); + let element = self.ops.len(); + self.ops.push(op); + let old_len = self.len(); if let Some(root) = self.root_node.as_mut() { #[cfg(debug_assertions)] @@ -174,7 +184,7 @@ impl OpTreeInternal { root.length += old_root.len(); root.index = old_root.index.clone(); root.children.push(old_root); - root.split_child(0); + root.split_child(0, &self.ops); assert_eq!(original_len, root.len()); @@ -187,14 +197,14 @@ impl OpTreeInternal { (&mut root.children[0], index) }; root.length += 1; - root.index.insert(&element); - child.insert_into_non_full_node(insertion_index, element) + root.index.insert(&self.ops[element]); + child.insert_into_non_full_node(insertion_index, element, &self.ops) } else { - root.insert_into_non_full_node(index, element) + root.insert_into_non_full_node(index, element, &self.ops) } } else { let mut root = OpTreeNode::new(); - root.insert_into_non_full_node(index, element); + root.insert_into_non_full_node(index, element, &self.ops); self.root_node = Some(root) } assert_eq!(self.len(), old_len + 1, "{:#?}", self); @@ -202,16 +212,28 @@ impl OpTreeInternal { /// Get the `element` at `index` in the sequence. pub(crate) fn get(&self, index: usize) -> Option<&Op> { - self.root_node.as_ref().and_then(|n| n.get(index)) + self.root_node + .as_ref() + .and_then(|n| n.get(index)) + .map(|n| &self.ops[n]) } // this replaces get_mut() because it allows the indexes to update correctly pub(crate) fn update(&mut self, index: usize, f: F) where - F: FnMut(&mut Op), + F: FnOnce(&mut Op), { if self.len() > index { - self.root_node.as_mut().unwrap().update(index, f); + let n = self.root_node.as_ref().unwrap().get(index).unwrap(); + let new_element = self.ops.get_mut(n).unwrap(); + let old_vis = new_element.visible(); + f(new_element); + let vis = ChangeVisibility { + old_vis, + new_vis: new_element.visible(), + op: new_element, + }; + self.root_node.as_mut().unwrap().update(index, vis); } } @@ -224,7 +246,7 @@ impl OpTreeInternal { if let Some(root) = self.root_node.as_mut() { #[cfg(debug_assertions)] let len = root.check(); - let old = root.remove(index); + let old = root.remove(index, &self.ops); if root.elements.is_empty() { if root.is_leaf() { @@ -236,466 +258,13 @@ impl OpTreeInternal { #[cfg(debug_assertions)] debug_assert_eq!(len, self.root_node.as_ref().map_or(0, |r| r.check()) + 1); - old + self.ops[old].clone() } else { panic!("remove from empty tree") } } } -impl OpTreeNode { - fn new() -> Self { - Self { - elements: Vec::new(), - children: Vec::new(), - index: Default::default(), - length: 0, - } - } - - pub(crate) fn search<'a, 'b: 'a, Q>( - &'b self, - query: &mut Q, - m: &OpSetMetadata, - skip: Option, - ) -> bool - where - Q: TreeQuery<'a>, - { - if self.is_leaf() { - let skip = skip.unwrap_or(0); - for e in self.elements.iter().skip(skip) { - if query.query_element_with_metadata(e, m) == QueryResult::Finish { - return true; - } - } - false - } else { - let mut skip = skip.unwrap_or(0); - for (child_index, child) in self.children.iter().enumerate() { - match skip.cmp(&child.len()) { - Ordering::Greater => { - // not in this child at all - // take off the number of elements in the child as well as the next element - skip -= child.len() + 1; - } - Ordering::Equal => { - // just try the element - skip -= child.len(); - if let Some(e) = self.elements.get(child_index) { - if query.query_element_with_metadata(e, m) == QueryResult::Finish { - return true; - } - } - } - Ordering::Less => { - // descend and try find it - match query.query_node_with_metadata(child, m) { - QueryResult::Descend => { - // search in the child node, passing in the number of items left to - // skip - if child.search(query, m, Some(skip)) { - return true; - } - } - QueryResult::Finish => return true, - QueryResult::Next => (), - QueryResult::Skip(_) => panic!("had skip from non-root node"), - } - if let Some(e) = self.elements.get(child_index) { - if query.query_element_with_metadata(e, m) == QueryResult::Finish { - return true; - } - } - // reset the skip to zero so we continue iterating normally - skip = 0; - } - } - } - false - } - } - - pub(crate) fn len(&self) -> usize { - self.length - } - - fn reindex(&mut self) { - let mut index = Index::new(); - for c in &self.children { - index.merge(&c.index); - } - for e in &self.elements { - index.insert(e); - } - self.index = index - } - - fn is_leaf(&self) -> bool { - self.children.is_empty() - } - - fn is_full(&self) -> bool { - self.elements.len() >= 2 * B - 1 - } - - /// Returns the child index and the given index adjusted for the cumulative index before that - /// child. - fn find_child_index(&self, index: usize) -> (usize, usize) { - let mut cumulative_len = 0; - for (child_index, child) in self.children.iter().enumerate() { - if cumulative_len + child.len() >= index { - return (child_index, index - cumulative_len); - } else { - cumulative_len += child.len() + 1; - } - } - panic!("index {} not found in node with len {}", index, self.len()) - } - - fn insert_into_non_full_node(&mut self, index: usize, element: Op) { - assert!(!self.is_full()); - - self.index.insert(&element); - - if self.is_leaf() { - self.length += 1; - self.elements.insert(index, element); - } else { - let (child_index, sub_index) = self.find_child_index(index); - let child = &mut self.children[child_index]; - - if child.is_full() { - self.split_child(child_index); - - // child structure has changed so we need to find the index again - let (child_index, sub_index) = self.find_child_index(index); - let child = &mut self.children[child_index]; - child.insert_into_non_full_node(sub_index, element); - } else { - child.insert_into_non_full_node(sub_index, element); - } - self.length += 1; - } - } - - // A utility function to split the child `full_child_index` of this node - // Note that `full_child_index` must be full when this function is called. - fn split_child(&mut self, full_child_index: usize) { - let original_len_self = self.len(); - - let full_child = &mut self.children[full_child_index]; - - // Create a new node which is going to store (B-1) keys - // of the full child. - let mut successor_sibling = OpTreeNode::new(); - - let original_len = full_child.len(); - assert!(full_child.is_full()); - - successor_sibling.elements = full_child.elements.split_off(B); - - if !full_child.is_leaf() { - successor_sibling.children = full_child.children.split_off(B); - } - - let middle = full_child.elements.pop().unwrap(); - - full_child.length = - full_child.elements.len() + full_child.children.iter().map(|c| c.len()).sum::(); - - successor_sibling.length = successor_sibling.elements.len() - + successor_sibling - .children - .iter() - .map(|c| c.len()) - .sum::(); - - let z_len = successor_sibling.len(); - - let full_child_len = full_child.len(); - - full_child.reindex(); - successor_sibling.reindex(); - - self.children - .insert(full_child_index + 1, successor_sibling); - - self.elements.insert(full_child_index, middle); - - assert_eq!(full_child_len + z_len + 1, original_len, "{:#?}", self); - - assert_eq!(original_len_self, self.len()); - } - - fn remove_from_leaf(&mut self, index: usize) -> Op { - self.length -= 1; - self.elements.remove(index) - } - - fn remove_element_from_non_leaf(&mut self, index: usize, element_index: usize) -> Op { - self.length -= 1; - if self.children[element_index].elements.len() >= B { - let total_index = self.cumulative_index(element_index); - // recursively delete index - 1 in predecessor_node - let predecessor = self.children[element_index].remove(index - 1 - total_index); - // replace element with that one - mem::replace(&mut self.elements[element_index], predecessor) - } else if self.children[element_index + 1].elements.len() >= B { - // recursively delete index + 1 in successor_node - let total_index = self.cumulative_index(element_index + 1); - let successor = self.children[element_index + 1].remove(index + 1 - total_index); - // replace element with that one - mem::replace(&mut self.elements[element_index], successor) - } else { - let middle_element = self.elements.remove(element_index); - let successor_child = self.children.remove(element_index + 1); - self.children[element_index].merge(middle_element, successor_child); - - let total_index = self.cumulative_index(element_index); - self.children[element_index].remove(index - total_index) - } - } - - fn cumulative_index(&self, child_index: usize) -> usize { - self.children[0..child_index] - .iter() - .map(|c| c.len() + 1) - .sum() - } - - fn remove_from_internal_child(&mut self, index: usize, mut child_index: usize) -> Op { - if self.children[child_index].elements.len() < B - && if child_index > 0 { - self.children[child_index - 1].elements.len() < B - } else { - true - } - && if child_index + 1 < self.children.len() { - self.children[child_index + 1].elements.len() < B - } else { - true - } - { - // if the child and its immediate siblings have B-1 elements merge the child - // with one sibling, moving an element from this node into the new merged node - // to be the median - - if child_index > 0 { - let middle = self.elements.remove(child_index - 1); - - // use the predessor sibling - let successor = self.children.remove(child_index); - child_index -= 1; - - self.children[child_index].merge(middle, successor); - } else { - let middle = self.elements.remove(child_index); - - // use the sucessor sibling - let successor = self.children.remove(child_index + 1); - - self.children[child_index].merge(middle, successor); - } - } else if self.children[child_index].elements.len() < B { - if child_index > 0 - && self - .children - .get(child_index - 1) - .map_or(false, |c| c.elements.len() >= B) - { - let last_element = self.children[child_index - 1].elements.pop().unwrap(); - assert!(!self.children[child_index - 1].elements.is_empty()); - self.children[child_index - 1].length -= 1; - self.children[child_index - 1].index.remove(&last_element); - - let parent_element = - mem::replace(&mut self.elements[child_index - 1], last_element); - - self.children[child_index].index.insert(&parent_element); - self.children[child_index] - .elements - .insert(0, parent_element); - self.children[child_index].length += 1; - - if let Some(last_child) = self.children[child_index - 1].children.pop() { - self.children[child_index - 1].length -= last_child.len(); - self.children[child_index - 1].reindex(); - self.children[child_index].length += last_child.len(); - self.children[child_index].children.insert(0, last_child); - self.children[child_index].reindex(); - } - } else if self - .children - .get(child_index + 1) - .map_or(false, |c| c.elements.len() >= B) - { - let first_element = self.children[child_index + 1].elements.remove(0); - self.children[child_index + 1].index.remove(&first_element); - self.children[child_index + 1].length -= 1; - - assert!(!self.children[child_index + 1].elements.is_empty()); - - let parent_element = mem::replace(&mut self.elements[child_index], first_element); - - self.children[child_index].length += 1; - self.children[child_index].index.insert(&parent_element); - self.children[child_index].elements.push(parent_element); - - if !self.children[child_index + 1].is_leaf() { - let first_child = self.children[child_index + 1].children.remove(0); - self.children[child_index + 1].length -= first_child.len(); - self.children[child_index + 1].reindex(); - self.children[child_index].length += first_child.len(); - - self.children[child_index].children.push(first_child); - self.children[child_index].reindex(); - } - } - } - self.length -= 1; - let total_index = self.cumulative_index(child_index); - self.children[child_index].remove(index - total_index) - } - - fn check(&self) -> usize { - let l = self.elements.len() + self.children.iter().map(|c| c.check()).sum::(); - assert_eq!(self.len(), l, "{:#?}", self); - - l - } - - pub(crate) fn remove(&mut self, index: usize) -> Op { - let original_len = self.len(); - if self.is_leaf() { - let v = self.remove_from_leaf(index); - self.index.remove(&v); - assert_eq!(original_len, self.len() + 1); - debug_assert_eq!(self.check(), self.len()); - v - } else { - let mut total_index = 0; - for (child_index, child) in self.children.iter().enumerate() { - match (total_index + child.len()).cmp(&index) { - Ordering::Less => { - // should be later on in the loop - total_index += child.len() + 1; - continue; - } - Ordering::Equal => { - let v = self.remove_element_from_non_leaf( - index, - min(child_index, self.elements.len() - 1), - ); - self.index.remove(&v); - assert_eq!(original_len, self.len() + 1); - debug_assert_eq!(self.check(), self.len()); - return v; - } - Ordering::Greater => { - let v = self.remove_from_internal_child(index, child_index); - self.index.remove(&v); - assert_eq!(original_len, self.len() + 1); - debug_assert_eq!(self.check(), self.len()); - return v; - } - } - } - panic!( - "index not found to remove {} {} {} {}", - index, - total_index, - self.len(), - self.check() - ); - } - } - - fn merge(&mut self, middle: Op, successor_sibling: OpTreeNode) { - self.index.insert(&middle); - self.index.merge(&successor_sibling.index); - self.elements.push(middle); - self.elements.extend(successor_sibling.elements); - self.children.extend(successor_sibling.children); - self.length += successor_sibling.length + 1; - assert!(self.is_full()); - } - - /// Update the operation at the given index using the provided function. - /// - /// This handles updating the indices after the update. - pub(crate) fn update(&mut self, index: usize, f: F) -> ChangeVisibility<'_> - where - F: FnOnce(&mut Op), - { - if self.is_leaf() { - let new_element = self.elements.get_mut(index).unwrap(); - let old_vis = new_element.visible(); - f(new_element); - self.index.change_vis(ChangeVisibility { - old_vis, - new_vis: new_element.visible(), - op: new_element, - }) - } else { - let mut cumulative_len = 0; - let len = self.len(); - for (child_index, child) in self.children.iter_mut().enumerate() { - match (cumulative_len + child.len()).cmp(&index) { - Ordering::Less => { - cumulative_len += child.len() + 1; - } - Ordering::Equal => { - let new_element = self.elements.get_mut(child_index).unwrap(); - let old_vis = new_element.visible(); - f(new_element); - return self.index.change_vis(ChangeVisibility { - old_vis, - new_vis: new_element.visible(), - op: new_element, - }); - } - Ordering::Greater => { - let vis_args = child.update(index - cumulative_len, f); - return self.index.change_vis(vis_args); - } - } - } - panic!("Invalid index to set: {} but len was {}", index, len) - } - } - - pub(crate) fn last(&self) -> &Op { - if self.is_leaf() { - // node is never empty so this is safe - self.elements.last().unwrap() - } else { - // if not a leaf then there is always at least one child - self.children.last().unwrap().last() - } - } - - pub(crate) fn get(&self, index: usize) -> Option<&Op> { - if self.is_leaf() { - return self.elements.get(index); - } else { - let mut cumulative_len = 0; - for (child_index, child) in self.children.iter().enumerate() { - match (cumulative_len + child.len()).cmp(&index) { - Ordering::Less => { - cumulative_len += child.len() + 1; - } - Ordering::Equal => return self.elements.get(child_index), - Ordering::Greater => { - return child.get(index - cumulative_len); - } - } - } - } - None - } -} - impl Default for OpTreeInternal { fn default() -> Self { Self::new() diff --git a/rust/automerge/src/op_tree/iter.rs b/rust/automerge/src/op_tree/iter.rs index 8d070f11..5f2114c8 100644 --- a/rust/automerge/src/op_tree/iter.rs +++ b/rust/automerge/src/op_tree/iter.rs @@ -21,6 +21,7 @@ impl<'a> OpTreeIter<'a> { }, cumulative_index: 0, root_node: root, + ops: &tree.ops, }) .unwrap_or(Inner::Empty), ) @@ -50,6 +51,7 @@ enum Inner<'a> { // How far through the whole optree we are cumulative_index: usize, root_node: &'a OpTreeNode, + ops: &'a [Op], }, } @@ -75,6 +77,7 @@ impl<'a> Iterator for Inner<'a> { Inner::Empty => None, Inner::NonEmpty { ancestors, + ops, current, cumulative_index, .. @@ -83,10 +86,10 @@ impl<'a> Iterator for Inner<'a> { // If we're in a leaf node and we haven't exhausted it yet we just return the elements // of the leaf node if current.index < current.node.len() { - let result = ¤t.node.elements[current.index]; + let result = current.node.elements[current.index]; current.index += 1; *cumulative_index += 1; - Some(result) + Some(&ops[result]) } else { // We've exhausted the leaf node, we must find the nearest non-exhausted parent (lol) let node_iter = loop { @@ -113,10 +116,10 @@ impl<'a> Iterator for Inner<'a> { // return the element from the parent node which is one after the index at which we // descended into the child *current = node_iter; - let result = ¤t.node.elements[current.index]; + let result = current.node.elements[current.index]; current.index += 1; *cumulative_index += 1; - Some(result) + Some(&ops[result]) } } else { // If we're in a non-leaf node then the last iteration returned an element from the @@ -147,6 +150,7 @@ impl<'a> Iterator for Inner<'a> { Self::Empty => None, Self::NonEmpty { root_node, + ops, cumulative_index, current, ancestors, @@ -177,7 +181,7 @@ impl<'a> Iterator for Inner<'a> { Ordering::Equal => { *cumulative_index += child.len() + 1; current.index = child_index + 1; - return Some(¤t.node.elements[child_index]); + return Some(&ops[current.node.elements[child_index]]); } Ordering::Greater => { current.index = child_index; @@ -197,7 +201,7 @@ impl<'a> Iterator for Inner<'a> { // we're in a leaf node and we kept track of the cumulative index as we went, let index_in_this_node = n.saturating_sub(*cumulative_index); current.index = index_in_this_node + 1; - Some(¤t.node.elements[index_in_this_node]) + Some(&ops[current.node.elements[index_in_this_node]]) } } } diff --git a/rust/automerge/src/op_tree/node.rs b/rust/automerge/src/op_tree/node.rs new file mode 100644 index 00000000..ea7fbf48 --- /dev/null +++ b/rust/automerge/src/op_tree/node.rs @@ -0,0 +1,480 @@ +use std::{ + cmp::{min, Ordering}, + fmt::Debug, + mem, +}; + +pub(crate) use crate::op_set::OpSetMetadata; +use crate::query::{ChangeVisibility, Index, QueryResult, TreeQuery}; +use crate::types::Op; +pub(crate) const B: usize = 16; + +#[derive(Clone, Debug)] +pub(crate) struct OpTreeNode { + pub(crate) children: Vec, + pub(crate) elements: Vec, + pub(crate) index: Index, + pub(crate) length: usize, +} + +impl OpTreeNode { + pub(crate) fn new() -> Self { + Self { + elements: Vec::new(), + children: Vec::new(), + index: Default::default(), + length: 0, + } + } + + pub(crate) fn search<'a, 'b: 'a, Q>( + &'b self, + query: &mut Q, + m: &OpSetMetadata, + ops: &'a [Op], + skip: Option, + ) -> bool + where + Q: TreeQuery<'a>, + { + if self.is_leaf() { + let skip = skip.unwrap_or(0); + for e in self.elements.iter().skip(skip) { + if query.query_element_with_metadata(&ops[*e], m) == QueryResult::Finish { + return true; + } + } + false + } else { + let mut skip = skip.unwrap_or(0); + for (child_index, child) in self.children.iter().enumerate() { + match skip.cmp(&child.len()) { + Ordering::Greater => { + // not in this child at all + // take off the number of elements in the child as well as the next element + skip -= child.len() + 1; + } + Ordering::Equal => { + // just try the element + skip -= child.len(); + if let Some(e) = self.elements.get(child_index) { + if query.query_element_with_metadata(&ops[*e], m) == QueryResult::Finish + { + return true; + } + } + } + Ordering::Less => { + // descend and try find it + match query.query_node_with_metadata(child, m, ops) { + QueryResult::Descend => { + // search in the child node, passing in the number of items left to + // skip + if child.search(query, m, ops, Some(skip)) { + return true; + } + } + QueryResult::Finish => return true, + QueryResult::Next => (), + QueryResult::Skip(_) => panic!("had skip from non-root node"), + } + if let Some(e) = self.elements.get(child_index) { + if query.query_element_with_metadata(&ops[*e], m) == QueryResult::Finish + { + return true; + } + } + // reset the skip to zero so we continue iterating normally + skip = 0; + } + } + } + false + } + } + + pub(crate) fn len(&self) -> usize { + self.length + } + + fn reindex(&mut self, ops: &[Op]) { + let mut index = Index::new(); + for c in &self.children { + index.merge(&c.index); + } + for i in &self.elements { + index.insert(&ops[*i]); + } + self.index = index + } + + pub(crate) fn is_leaf(&self) -> bool { + self.children.is_empty() + } + + pub(crate) fn is_full(&self) -> bool { + self.elements.len() >= 2 * B - 1 + } + + /// Returns the child index and the given index adjusted for the cumulative index before that + /// child. + fn find_child_index(&self, index: usize) -> (usize, usize) { + let mut cumulative_len = 0; + for (child_index, child) in self.children.iter().enumerate() { + if cumulative_len + child.len() >= index { + return (child_index, index - cumulative_len); + } else { + cumulative_len += child.len() + 1; + } + } + panic!("index {} not found in node with len {}", index, self.len()) + } + + pub(crate) fn insert_into_non_full_node(&mut self, index: usize, element: usize, ops: &[Op]) { + assert!(!self.is_full()); + + self.index.insert(&ops[element]); + + if self.is_leaf() { + self.length += 1; + self.elements.insert(index, element); + } else { + let (child_index, sub_index) = self.find_child_index(index); + let child = &mut self.children[child_index]; + + if child.is_full() { + self.split_child(child_index, ops); + + // child structure has changed so we need to find the index again + let (child_index, sub_index) = self.find_child_index(index); + let child = &mut self.children[child_index]; + child.insert_into_non_full_node(sub_index, element, ops); + } else { + child.insert_into_non_full_node(sub_index, element, ops); + } + self.length += 1; + } + } + + // A utility function to split the child `full_child_index` of this node + // Note that `full_child_index` must be full when this function is called. + pub(crate) fn split_child(&mut self, full_child_index: usize, ops: &[Op]) { + let original_len_self = self.len(); + + let full_child = &mut self.children[full_child_index]; + + // Create a new node which is going to store (B-1) keys + // of the full child. + let mut successor_sibling = OpTreeNode::new(); + + let original_len = full_child.len(); + assert!(full_child.is_full()); + + successor_sibling.elements = full_child.elements.split_off(B); + + if !full_child.is_leaf() { + successor_sibling.children = full_child.children.split_off(B); + } + + let middle = full_child.elements.pop().unwrap(); + + full_child.length = + full_child.elements.len() + full_child.children.iter().map(|c| c.len()).sum::(); + + successor_sibling.length = successor_sibling.elements.len() + + successor_sibling + .children + .iter() + .map(|c| c.len()) + .sum::(); + + let z_len = successor_sibling.len(); + + let full_child_len = full_child.len(); + + full_child.reindex(ops); + successor_sibling.reindex(ops); + + self.children + .insert(full_child_index + 1, successor_sibling); + + self.elements.insert(full_child_index, middle); + + assert_eq!(full_child_len + z_len + 1, original_len, "{:#?}", self); + + assert_eq!(original_len_self, self.len()); + } + + fn remove_from_leaf(&mut self, index: usize) -> usize { + self.length -= 1; + self.elements.remove(index) + } + + fn remove_element_from_non_leaf( + &mut self, + index: usize, + element_index: usize, + ops: &[Op], + ) -> usize { + self.length -= 1; + if self.children[element_index].elements.len() >= B { + let total_index = self.cumulative_index(element_index); + // recursively delete index - 1 in predecessor_node + let predecessor = self.children[element_index].remove(index - 1 - total_index, ops); + // replace element with that one + mem::replace(&mut self.elements[element_index], predecessor) + } else if self.children[element_index + 1].elements.len() >= B { + // recursively delete index + 1 in successor_node + let total_index = self.cumulative_index(element_index + 1); + let successor = self.children[element_index + 1].remove(index + 1 - total_index, ops); + // replace element with that one + mem::replace(&mut self.elements[element_index], successor) + } else { + let middle_element = self.elements.remove(element_index); + let successor_child = self.children.remove(element_index + 1); + self.children[element_index].merge(middle_element, successor_child, ops); + + let total_index = self.cumulative_index(element_index); + self.children[element_index].remove(index - total_index, ops) + } + } + + fn cumulative_index(&self, child_index: usize) -> usize { + self.children[0..child_index] + .iter() + .map(|c| c.len() + 1) + .sum() + } + + fn remove_from_internal_child( + &mut self, + index: usize, + mut child_index: usize, + ops: &[Op], + ) -> usize { + if self.children[child_index].elements.len() < B + && if child_index > 0 { + self.children[child_index - 1].elements.len() < B + } else { + true + } + && if child_index + 1 < self.children.len() { + self.children[child_index + 1].elements.len() < B + } else { + true + } + { + // if the child and its immediate siblings have B-1 elements merge the child + // with one sibling, moving an element from this node into the new merged node + // to be the median + + if child_index > 0 { + let middle = self.elements.remove(child_index - 1); + + // use the predessor sibling + let successor = self.children.remove(child_index); + child_index -= 1; + + self.children[child_index].merge(middle, successor, ops); + } else { + let middle = self.elements.remove(child_index); + + // use the sucessor sibling + let successor = self.children.remove(child_index + 1); + + self.children[child_index].merge(middle, successor, ops); + } + } else if self.children[child_index].elements.len() < B { + if child_index > 0 + && self + .children + .get(child_index - 1) + .map_or(false, |c| c.elements.len() >= B) + { + let last_element = self.children[child_index - 1].elements.pop().unwrap(); + assert!(!self.children[child_index - 1].elements.is_empty()); + self.children[child_index - 1].length -= 1; + self.children[child_index - 1] + .index + .remove(&ops[last_element]); + + let parent_element = + mem::replace(&mut self.elements[child_index - 1], last_element); + + self.children[child_index] + .index + .insert(&ops[parent_element]); + self.children[child_index] + .elements + .insert(0, parent_element); + self.children[child_index].length += 1; + + if let Some(last_child) = self.children[child_index - 1].children.pop() { + self.children[child_index - 1].length -= last_child.len(); + self.children[child_index - 1].reindex(ops); + self.children[child_index].length += last_child.len(); + self.children[child_index].children.insert(0, last_child); + self.children[child_index].reindex(ops); + } + } else if self + .children + .get(child_index + 1) + .map_or(false, |c| c.elements.len() >= B) + { + let first_element = self.children[child_index + 1].elements.remove(0); + self.children[child_index + 1] + .index + .remove(&ops[first_element]); + self.children[child_index + 1].length -= 1; + + assert!(!self.children[child_index + 1].elements.is_empty()); + + let parent_element = mem::replace(&mut self.elements[child_index], first_element); + + self.children[child_index].length += 1; + self.children[child_index] + .index + .insert(&ops[parent_element]); + self.children[child_index].elements.push(parent_element); + + if !self.children[child_index + 1].is_leaf() { + let first_child = self.children[child_index + 1].children.remove(0); + self.children[child_index + 1].length -= first_child.len(); + self.children[child_index + 1].reindex(ops); + self.children[child_index].length += first_child.len(); + + self.children[child_index].children.push(first_child); + self.children[child_index].reindex(ops); + } + } + } + self.length -= 1; + let total_index = self.cumulative_index(child_index); + self.children[child_index].remove(index - total_index, ops) + } + + pub(crate) fn check(&self) -> usize { + let l = self.elements.len() + self.children.iter().map(|c| c.check()).sum::(); + assert_eq!(self.len(), l, "{:#?}", self); + + l + } + + pub(crate) fn remove(&mut self, index: usize, ops: &[Op]) -> usize { + let original_len = self.len(); + if self.is_leaf() { + let v = self.remove_from_leaf(index); + self.index.remove(&ops[v]); + assert_eq!(original_len, self.len() + 1); + debug_assert_eq!(self.check(), self.len()); + v + } else { + let mut total_index = 0; + for (child_index, child) in self.children.iter().enumerate() { + match (total_index + child.len()).cmp(&index) { + Ordering::Less => { + // should be later on in the loop + total_index += child.len() + 1; + continue; + } + Ordering::Equal => { + let v = self.remove_element_from_non_leaf( + index, + min(child_index, self.elements.len() - 1), + ops, + ); + self.index.remove(&ops[v]); + assert_eq!(original_len, self.len() + 1); + debug_assert_eq!(self.check(), self.len()); + return v; + } + Ordering::Greater => { + let v = self.remove_from_internal_child(index, child_index, ops); + self.index.remove(&ops[v]); + assert_eq!(original_len, self.len() + 1); + debug_assert_eq!(self.check(), self.len()); + return v; + } + } + } + panic!( + "index not found to remove {} {} {} {}", + index, + total_index, + self.len(), + self.check() + ); + } + } + + fn merge(&mut self, middle: usize, successor_sibling: OpTreeNode, ops: &[Op]) { + self.index.insert(&ops[middle]); + self.index.merge(&successor_sibling.index); + self.elements.push(middle); + self.elements.extend(successor_sibling.elements); + self.children.extend(successor_sibling.children); + self.length += successor_sibling.length + 1; + assert!(self.is_full()); + } + + /// Update the operation at the given index using the provided function. + /// + /// This handles updating the indices after the update. + pub(crate) fn update<'a>( + &mut self, + index: usize, + vis: ChangeVisibility<'a>, + ) -> ChangeVisibility<'a> { + if self.is_leaf() { + self.index.change_vis(vis) + } else { + let mut cumulative_len = 0; + let len = self.len(); + for (_child_index, child) in self.children.iter_mut().enumerate() { + match (cumulative_len + child.len()).cmp(&index) { + Ordering::Less => { + cumulative_len += child.len() + 1; + } + Ordering::Equal => { + return self.index.change_vis(vis); + } + Ordering::Greater => { + let vis = child.update(index - cumulative_len, vis); + return self.index.change_vis(vis); + } + } + } + panic!("Invalid index to set: {} but len was {}", index, len) + } + } + + pub(crate) fn last(&self) -> usize { + if self.is_leaf() { + // node is never empty so this is safe + *self.elements.last().unwrap() + } else { + // if not a leaf then there is always at least one child + self.children.last().unwrap().last() + } + } + + pub(crate) fn get(&self, index: usize) -> Option { + if self.is_leaf() { + return self.elements.get(index).copied(); + } else { + let mut cumulative_len = 0; + for (child_index, child) in self.children.iter().enumerate() { + match (cumulative_len + child.len()).cmp(&index) { + Ordering::Less => { + cumulative_len += child.len() + 1; + } + Ordering::Equal => return self.elements.get(child_index).copied(), + Ordering::Greater => { + return child.get(index - cumulative_len); + } + } + } + } + None + } +} diff --git a/rust/automerge/src/query.rs b/rust/automerge/src/query.rs index fefac401..9707da33 100644 --- a/rust/automerge/src/query.rs +++ b/rust/automerge/src/query.rs @@ -79,11 +79,12 @@ pub(crate) trait TreeQuery<'a>: Clone + Debug { &mut self, child: &'a OpTreeNode, _m: &OpSetMetadata, + ops: &[Op], ) -> QueryResult { - self.query_node(child) + self.query_node(child, ops) } - fn query_node(&mut self, _child: &'a OpTreeNode) -> QueryResult { + fn query_node(&mut self, _child: &'a OpTreeNode, _ops: &[Op]) -> QueryResult { QueryResult::Descend } @@ -291,7 +292,7 @@ impl VisWindow { } } -pub(crate) fn binary_search_by(node: &OpTreeNode, f: F) -> usize +pub(crate) fn binary_search_by(node: &OpTreeNode, ops: &[Op], f: F) -> usize where F: Fn(&Op) -> Ordering, { @@ -299,7 +300,7 @@ where let mut left = 0; while left < right { let seq = (left + right) / 2; - if f(node.get(seq).unwrap()) == Ordering::Less { + if f(&ops[node.get(seq).unwrap()]) == Ordering::Less { left = seq + 1; } else { right = seq; diff --git a/rust/automerge/src/query/elem_id_pos.rs b/rust/automerge/src/query/elem_id_pos.rs index 250501fe..8eecd7e0 100644 --- a/rust/automerge/src/query/elem_id_pos.rs +++ b/rust/automerge/src/query/elem_id_pos.rs @@ -1,6 +1,6 @@ use crate::{ op_tree::OpTreeNode, - types::{ElemId, Key, ListEncoding}, + types::{ElemId, Key, ListEncoding, Op}, }; use super::{QueryResult, TreeQuery}; @@ -34,7 +34,7 @@ impl ElemIdPos { } impl<'a> TreeQuery<'a> for ElemIdPos { - fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { + fn query_node(&mut self, child: &OpTreeNode, _ops: &[Op]) -> QueryResult { // if index has our element then we can continue if child.index.has_visible(&Key::Seq(self.elemid)) { // element is in this node somewhere diff --git a/rust/automerge/src/query/insert.rs b/rust/automerge/src/query/insert.rs index 12fae5b8..0dc0e98d 100644 --- a/rust/automerge/src/query/insert.rs +++ b/rust/automerge/src/query/insert.rs @@ -71,7 +71,7 @@ impl<'a> TreeQuery<'a> for InsertNth { false } - fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { + fn query_node(&mut self, child: &OpTreeNode, ops: &[Op]) -> QueryResult { // if this node has some visible elements then we may find our target within let mut num_vis = child.index.visible_len(self.encoding); if let Some(last_seen) = self.last_seen { @@ -94,7 +94,7 @@ impl<'a> TreeQuery<'a> for InsertNth { // - the insert was at a previous node and this is a long run of overwrites so last_seen should already be set correctly // - the visible op is in this node and the elemid references it so it can be set here // - the visible op is in a future node and so it will be counted as seen there - let last_elemid = child.last().elemid_or_key(); + let last_elemid = ops[child.last()].elemid_or_key(); if child.index.has_visible(&last_elemid) { self.last_seen = Some(last_elemid); } diff --git a/rust/automerge/src/query/keys.rs b/rust/automerge/src/query/keys.rs index 30436f31..edda4fe9 100644 --- a/rust/automerge/src/query/keys.rs +++ b/rust/automerge/src/query/keys.rs @@ -1,4 +1,4 @@ -use crate::op_tree::OpTreeNode; +use crate::op_tree::OpTreeInternal; use crate::types::Key; use std::fmt::Debug; @@ -8,17 +8,17 @@ pub(crate) struct Keys<'a> { last_key: Option, index_back: usize, last_key_back: Option, - root_child: &'a OpTreeNode, + op_tree: &'a OpTreeInternal, } impl<'a> Keys<'a> { - pub(crate) fn new(root_child: &'a OpTreeNode) -> Self { + pub(crate) fn new(op_tree: &'a OpTreeInternal) -> Self { Self { index: 0, last_key: None, - index_back: root_child.len(), + index_back: op_tree.len(), last_key_back: None, - root_child, + op_tree, } } } @@ -28,7 +28,7 @@ impl<'a> Iterator for Keys<'a> { fn next(&mut self) -> Option { for i in self.index..self.index_back { - let op = self.root_child.get(i)?; + let op = self.op_tree.get(i)?; self.index += 1; if Some(op.elemid_or_key()) != self.last_key && op.visible() { self.last_key = Some(op.elemid_or_key()); @@ -42,7 +42,7 @@ impl<'a> Iterator for Keys<'a> { impl<'a> DoubleEndedIterator for Keys<'a> { fn next_back(&mut self) -> Option { for i in (self.index..self.index_back).rev() { - let op = self.root_child.get(i)?; + let op = self.op_tree.get(i)?; self.index_back -= 1; if Some(op.elemid_or_key()) != self.last_key_back && op.visible() { self.last_key_back = Some(op.elemid_or_key()); diff --git a/rust/automerge/src/query/keys_at.rs b/rust/automerge/src/query/keys_at.rs index 71da2927..bf5b5e0e 100644 --- a/rust/automerge/src/query/keys_at.rs +++ b/rust/automerge/src/query/keys_at.rs @@ -1,4 +1,4 @@ -use crate::op_tree::OpTreeNode; +use crate::op_tree::OpTreeInternal; use crate::query::VisWindow; use crate::types::{Clock, Key}; use std::fmt::Debug; @@ -11,19 +11,19 @@ pub(crate) struct KeysAt<'a> { last_key: Option, index_back: usize, last_key_back: Option, - root_child: &'a OpTreeNode, + op_tree: &'a OpTreeInternal, } impl<'a> KeysAt<'a> { - pub(crate) fn new(root_child: &'a OpTreeNode, clock: Clock) -> Self { + pub(crate) fn new(op_tree: &'a OpTreeInternal, clock: Clock) -> Self { Self { clock, window: VisWindow::default(), index: 0, last_key: None, - index_back: root_child.len(), + index_back: op_tree.len(), last_key_back: None, - root_child, + op_tree, } } } @@ -33,7 +33,7 @@ impl<'a> Iterator for KeysAt<'a> { fn next(&mut self) -> Option { for i in self.index..self.index_back { - let op = self.root_child.get(i)?; + let op = self.op_tree.get(i)?; let visible = self.window.visible_at(op, i, &self.clock); self.index += 1; if Some(op.elemid_or_key()) != self.last_key && visible { @@ -48,7 +48,7 @@ impl<'a> Iterator for KeysAt<'a> { impl<'a> DoubleEndedIterator for KeysAt<'a> { fn next_back(&mut self) -> Option { for i in self.index..self.index_back { - let op = self.root_child.get(i)?; + let op = self.op_tree.get(i)?; let visible = self.window.visible_at(op, i, &self.clock); self.index_back -= 1; if Some(op.elemid_or_key()) != self.last_key_back && visible { diff --git a/rust/automerge/src/query/len.rs b/rust/automerge/src/query/len.rs index 0dce4f85..9134b11f 100644 --- a/rust/automerge/src/query/len.rs +++ b/rust/automerge/src/query/len.rs @@ -1,6 +1,6 @@ use crate::op_tree::OpTreeNode; use crate::query::{QueryResult, TreeQuery}; -use crate::types::ListEncoding; +use crate::types::{ListEncoding, Op}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] @@ -16,7 +16,7 @@ impl Len { } impl<'a> TreeQuery<'a> for Len { - fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { + fn query_node(&mut self, child: &OpTreeNode, _ops: &[Op]) -> QueryResult { self.len = child.index.visible_len(self.encoding); QueryResult::Finish } diff --git a/rust/automerge/src/query/list_range.rs b/rust/automerge/src/query/list_range.rs index d3206af3..d01082ab 100644 --- a/rust/automerge/src/query/list_range.rs +++ b/rust/automerge/src/query/list_range.rs @@ -1,5 +1,5 @@ use crate::exid::ExId; -use crate::op_tree::OpTreeNode; +use crate::op_tree::OpTreeInternal; use crate::types::{ElemId, OpId}; use crate::values::ValueIter; use crate::{Automerge, Value}; @@ -14,19 +14,19 @@ pub(crate) struct ListRange<'a, R: RangeBounds> { last_elemid: Option, next_result: Option<(usize, Value<'a>, OpId)>, index_back: usize, - root_child: &'a OpTreeNode, + op_tree: &'a OpTreeInternal, } impl<'a, R: RangeBounds> ListRange<'a, R> { - pub(crate) fn new(range: R, root_child: &'a OpTreeNode) -> Self { + pub(crate) fn new(range: R, op_tree: &'a OpTreeInternal) -> Self { Self { range, index: 0, // FIXME root_child.seek_to_pos(range.start) pos: 0, // FIXME range.start last_elemid: None, next_result: None, - index_back: root_child.len(), - root_child, + index_back: op_tree.len(), + op_tree, } } } @@ -45,7 +45,7 @@ impl<'a, R: RangeBounds> Iterator for ListRange<'a, R> { // point and stop at the end point and not needless scan all the ops before and after the range fn next(&mut self) -> Option { for i in self.index..self.index_back { - let op = self.root_child.get(i)?; + let op = self.op_tree.get(i)?; self.index += 1; if op.visible() { if op.elemid() != self.last_elemid { diff --git a/rust/automerge/src/query/list_range_at.rs b/rust/automerge/src/query/list_range_at.rs index 5c7257af..33cdf548 100644 --- a/rust/automerge/src/query/list_range_at.rs +++ b/rust/automerge/src/query/list_range_at.rs @@ -1,6 +1,6 @@ use super::VisWindow; use crate::exid::ExId; -use crate::op_tree::OpTreeNode; +use crate::op_tree::OpTreeInternal; use crate::types::{Clock, ElemId, OpId}; use crate::values::ValueIter; use crate::{Automerge, Value}; @@ -15,7 +15,7 @@ pub(crate) struct ListRangeAt<'a, R: RangeBounds> { last_elemid: Option, next_result: Option<(usize, Value<'a>, OpId)>, index_back: usize, - root_child: &'a OpTreeNode, + op_tree: &'a OpTreeInternal, clock: Clock, window: VisWindow, } @@ -27,15 +27,15 @@ impl<'a, R: RangeBounds> ValueIter<'a> for ListRangeAt<'a, R> { } impl<'a, R: RangeBounds> ListRangeAt<'a, R> { - pub(crate) fn new(range: R, clock: Clock, root_child: &'a OpTreeNode) -> Self { + pub(crate) fn new(range: R, clock: Clock, op_tree: &'a OpTreeInternal) -> Self { Self { range, index: 0, // FIXME root_child.seek_to_pos(range.start) pos: 0, // FIXME range.start last_elemid: None, next_result: None, - index_back: root_child.len(), - root_child, + index_back: op_tree.len(), + op_tree, clock, window: VisWindow::default(), } @@ -47,7 +47,7 @@ impl<'a, R: RangeBounds> Iterator for ListRangeAt<'a, R> { fn next(&mut self) -> Option { for i in self.index..self.index_back { - let op = self.root_child.get(i)?; + let op = self.op_tree.get(i)?; let visible = self.window.visible_at(op, i, &self.clock); self.index += 1; if visible { diff --git a/rust/automerge/src/query/list_vals.rs b/rust/automerge/src/query/list_vals.rs index 4ad2f47b..6c056621 100644 --- a/rust/automerge/src/query/list_vals.rs +++ b/rust/automerge/src/query/list_vals.rs @@ -19,10 +19,10 @@ impl ListVals { } impl<'a> TreeQuery<'a> for ListVals { - fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { + fn query_node(&mut self, child: &OpTreeNode, ops: &[Op]) -> QueryResult { let start = 0; for pos in start..child.len() { - let op = child.get(pos).unwrap(); + let op = &ops[child.get(pos).unwrap()]; if op.insert { self.last_elem = None; } diff --git a/rust/automerge/src/query/map_range.rs b/rust/automerge/src/query/map_range.rs index 81334ca4..909312db 100644 --- a/rust/automerge/src/query/map_range.rs +++ b/rust/automerge/src/query/map_range.rs @@ -1,5 +1,5 @@ use crate::exid::ExId; -use crate::op_tree::{OpSetMetadata, OpTreeNode}; +use crate::op_tree::{OpSetMetadata, OpTreeInternal}; use crate::types::{Key, OpId}; use crate::values::ValueIter; use crate::{Automerge, Value}; @@ -14,7 +14,7 @@ pub(crate) struct MapRange<'a, R: RangeBounds> { next_result: Option<(&'a str, Value<'a>, OpId)>, index_back: usize, last_key_back: Option, - root_child: &'a OpTreeNode, + op_tree: &'a OpTreeInternal, meta: &'a OpSetMetadata, } @@ -25,15 +25,15 @@ impl<'a, R: RangeBounds> ValueIter<'a> for MapRange<'a, R> { } impl<'a, R: RangeBounds> MapRange<'a, R> { - pub(crate) fn new(range: R, root_child: &'a OpTreeNode, meta: &'a OpSetMetadata) -> Self { + pub(crate) fn new(range: R, op_tree: &'a OpTreeInternal, meta: &'a OpSetMetadata) -> Self { Self { range, index: 0, last_key: None, next_result: None, - index_back: root_child.len(), + index_back: op_tree.len(), last_key_back: None, - root_child, + op_tree, meta, } } @@ -47,7 +47,7 @@ impl<'a, R: RangeBounds> Iterator for MapRange<'a, R> { // point and stop at the end point and not needless scan all the ops before and after the range fn next(&mut self) -> Option { for i in self.index..self.index_back { - let op = self.root_child.get(i)?; + let op = self.op_tree.get(i)?; self.index += 1; if op.visible() { let prop = match op.key { @@ -72,7 +72,7 @@ impl<'a, R: RangeBounds> Iterator for MapRange<'a, R> { impl<'a, R: RangeBounds> DoubleEndedIterator for MapRange<'a, R> { fn next_back(&mut self) -> Option { for i in (self.index..self.index_back).rev() { - let op = self.root_child.get(i)?; + let op = self.op_tree.get(i)?; self.index_back -= 1; if Some(op.key) != self.last_key_back && op.visible() { diff --git a/rust/automerge/src/query/map_range_at.rs b/rust/automerge/src/query/map_range_at.rs index 84453955..c5c5af06 100644 --- a/rust/automerge/src/query/map_range_at.rs +++ b/rust/automerge/src/query/map_range_at.rs @@ -1,6 +1,6 @@ use crate::clock::Clock; use crate::exid::ExId; -use crate::op_tree::{OpSetMetadata, OpTreeNode}; +use crate::op_tree::{OpSetMetadata, OpTreeInternal}; use crate::types::{Key, OpId}; use crate::values::ValueIter; use crate::{Automerge, Value}; @@ -22,7 +22,7 @@ pub(crate) struct MapRangeAt<'a, R: RangeBounds> { index_back: usize, last_key_back: Option, - root_child: &'a OpTreeNode, + op_tree: &'a OpTreeInternal, meta: &'a OpSetMetadata, } @@ -35,7 +35,7 @@ impl<'a, R: RangeBounds> ValueIter<'a> for MapRangeAt<'a, R> { impl<'a, R: RangeBounds> MapRangeAt<'a, R> { pub(crate) fn new( range: R, - root_child: &'a OpTreeNode, + op_tree: &'a OpTreeInternal, meta: &'a OpSetMetadata, clock: Clock, ) -> Self { @@ -46,9 +46,9 @@ impl<'a, R: RangeBounds> MapRangeAt<'a, R> { index: 0, last_key: None, next_result: None, - index_back: root_child.len(), + index_back: op_tree.len(), last_key_back: None, - root_child, + op_tree, meta, } } @@ -59,7 +59,7 @@ impl<'a, R: RangeBounds> Iterator for MapRangeAt<'a, R> { fn next(&mut self) -> Option { for i in self.index..self.index_back { - let op = self.root_child.get(i)?; + let op = self.op_tree.get(i)?; let visible = self.window.visible_at(op, i, &self.clock); self.index += 1; if visible { @@ -85,7 +85,7 @@ impl<'a, R: RangeBounds> Iterator for MapRangeAt<'a, R> { impl<'a, R: RangeBounds> DoubleEndedIterator for MapRangeAt<'a, R> { fn next_back(&mut self) -> Option { for i in (self.index..self.index_back).rev() { - let op = self.root_child.get(i)?; + let op = self.op_tree.get(i)?; let visible = self.window.visible_at(op, i, &self.clock); self.index_back -= 1; if Some(op.key) != self.last_key_back && visible { diff --git a/rust/automerge/src/query/nth.rs b/rust/automerge/src/query/nth.rs index a286c4e2..ed374b9b 100644 --- a/rust/automerge/src/query/nth.rs +++ b/rust/automerge/src/query/nth.rs @@ -73,7 +73,7 @@ impl<'a> TreeQuery<'a> for Nth<'a> { false } - fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { + fn query_node(&mut self, child: &OpTreeNode, ops: &[Op]) -> QueryResult { let mut num_vis = child.index.visible_len(self.encoding); if let Some(last_seen) = self.last_seen { if child.index.has_visible(&last_seen) { @@ -94,7 +94,7 @@ impl<'a> TreeQuery<'a> for Nth<'a> { // - the insert was at a previous node and this is a long run of overwrites so last_seen should already be set correctly // - the visible op is in this node and the elemid references it so it can be set here // - the visible op is in a future node and so it will be counted as seen there - let last_elemid = child.last().elemid_or_key(); + let last_elemid = ops[child.last()].elemid_or_key(); if child.index.has_visible(&last_elemid) { self.last_seen = Some(last_elemid); } diff --git a/rust/automerge/src/query/opid.rs b/rust/automerge/src/query/opid.rs index aa3a45e6..3d4c8b24 100644 --- a/rust/automerge/src/query/opid.rs +++ b/rust/automerge/src/query/opid.rs @@ -33,7 +33,7 @@ impl OpIdSearch { } impl<'a> TreeQuery<'a> for OpIdSearch { - fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { + fn query_node(&mut self, child: &OpTreeNode, _ops: &[Op]) -> QueryResult { if child.index.ops.contains(&self.target) { QueryResult::Descend } else { diff --git a/rust/automerge/src/query/opid_vis.rs b/rust/automerge/src/query/opid_vis.rs index 8a4b6a10..c0d2cc89 100644 --- a/rust/automerge/src/query/opid_vis.rs +++ b/rust/automerge/src/query/opid_vis.rs @@ -28,7 +28,7 @@ impl OpIdVisSearch { } impl<'a> TreeQuery<'a> for OpIdVisSearch { - fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { + fn query_node(&mut self, child: &OpTreeNode, _ops: &[Op]) -> QueryResult { if child.index.ops.contains(&self.target) { QueryResult::Descend } else { diff --git a/rust/automerge/src/query/prop.rs b/rust/automerge/src/query/prop.rs index 89fa18f0..f6062ec6 100644 --- a/rust/automerge/src/query/prop.rs +++ b/rust/automerge/src/query/prop.rs @@ -37,6 +37,7 @@ impl<'a> TreeQuery<'a> for Prop<'a> { &mut self, child: &'a OpTreeNode, m: &OpSetMetadata, + ops: &[Op], ) -> QueryResult { if let Some(Start { idx: start, @@ -62,7 +63,7 @@ impl<'a> TreeQuery<'a> for Prop<'a> { } } else { // in the root node find the first op position for the key - let start = binary_search_by(child, |op| m.key_cmp(&op.key, &self.key)); + let start = binary_search_by(child, ops, |op| m.key_cmp(&op.key, &self.key)); self.start = Some(Start { idx: start, optree_len: child.len(), diff --git a/rust/automerge/src/query/prop_at.rs b/rust/automerge/src/query/prop_at.rs index 08b1cb59..f0c2eedc 100644 --- a/rust/automerge/src/query/prop_at.rs +++ b/rust/automerge/src/query/prop_at.rs @@ -29,12 +29,13 @@ impl<'a> TreeQuery<'a> for PropAt { &mut self, child: &'a OpTreeNode, m: &OpSetMetadata, + ops: &[Op], ) -> QueryResult { - let start = binary_search_by(child, |op| m.key_cmp(&op.key, &self.key)); + let start = binary_search_by(child, ops, |op| m.key_cmp(&op.key, &self.key)); let mut window: VisWindow = Default::default(); self.pos = start; for pos in start..child.len() { - let op = child.get(pos).unwrap(); + let op = &ops[child.get(pos).unwrap()]; if op.key != self.key { break; } diff --git a/rust/automerge/src/query/seek_op.rs b/rust/automerge/src/query/seek_op.rs index 70d52d45..7ca3e9d4 100644 --- a/rust/automerge/src/query/seek_op.rs +++ b/rust/automerge/src/query/seek_op.rs @@ -43,14 +43,19 @@ impl<'a> SeekOp<'a> { } impl<'a> TreeQuery<'a> for SeekOp<'a> { - fn query_node_with_metadata(&mut self, child: &OpTreeNode, m: &OpSetMetadata) -> QueryResult { + fn query_node_with_metadata( + &mut self, + child: &OpTreeNode, + m: &OpSetMetadata, + ops: &[Op], + ) -> QueryResult { if self.found { return QueryResult::Descend; } match self.op.key { Key::Seq(HEAD) => { while self.pos < child.len() { - let op = child.get(self.pos).unwrap(); + let op = &ops[child.get(self.pos).unwrap()]; if op.insert && m.lamport_cmp(op.id, self.op.id) == Ordering::Less { break; } @@ -82,7 +87,7 @@ impl<'a> TreeQuery<'a> for SeekOp<'a> { } } else { // in the root node find the first op position for the key - let start = binary_search_by(child, |op| m.key_cmp(&op.key, &self.op.key)); + let start = binary_search_by(child, ops, |op| m.key_cmp(&op.key, &self.op.key)); self.start = Some(start); self.pos = start; QueryResult::Skip(start) diff --git a/rust/automerge/src/query/seek_op_with_patch.rs b/rust/automerge/src/query/seek_op_with_patch.rs index f029c5db..0cc48b37 100644 --- a/rust/automerge/src/query/seek_op_with_patch.rs +++ b/rust/automerge/src/query/seek_op_with_patch.rs @@ -72,6 +72,7 @@ impl<'a> TreeQuery<'a> for SeekOpWithPatch<'a> { &mut self, child: &'a OpTreeNode, m: &OpSetMetadata, + ops: &[Op], ) -> QueryResult { if self.found { return QueryResult::Descend; @@ -82,7 +83,7 @@ impl<'a> TreeQuery<'a> for SeekOpWithPatch<'a> { // the opId of the operation being inserted. Key::Seq(e) if e == HEAD => { while self.pos < child.len() { - let op = child.get(self.pos).unwrap(); + let op = &ops[child.get(self.pos).unwrap()]; if op.insert && m.lamport_cmp(op.id, self.op.id) == Ordering::Less { break; } @@ -123,7 +124,7 @@ impl<'a> TreeQuery<'a> for SeekOpWithPatch<'a> { // the last operation's elemId regardless of whether it's visible or not. // This will lead to incorrect counting if `last_seen` is not visible: it's // not counted towards `num_vis`, so we shouldn't be subtracting 1. - self.last_seen = Some(child.last().elemid_or_key()); + self.last_seen = Some(ops[child.last()].elemid_or_key()); } QueryResult::Next } @@ -148,7 +149,7 @@ impl<'a> TreeQuery<'a> for SeekOpWithPatch<'a> { // in the root node find the first op position for the key // Search for the place where we need to insert the new operation. First find the // first op with a key >= the key we're updating - let start = binary_search_by(child, |op| m.key_cmp(&op.key, &self.op.key)); + let start = binary_search_by(child, ops, |op| m.key_cmp(&op.key, &self.op.key)); self.start = Some(start); self.pos = start; QueryResult::Skip(start) diff --git a/rust/automerge/src/visualisation.rs b/rust/automerge/src/visualisation.rs index 6894f46f..31e9bbdb 100644 --- a/rust/automerge/src/visualisation.rs +++ b/rust/automerge/src/visualisation.rs @@ -1,4 +1,4 @@ -use crate::types::ObjId; +use crate::types::{ObjId, Op}; use fxhash::FxHasher; use std::{borrow::Cow, collections::HashMap, hash::BuildHasherDefault}; @@ -26,7 +26,7 @@ pub(crate) struct Node<'a> { #[derive(Clone)] pub(crate) enum NodeType<'a> { ObjRoot(crate::types::ObjId), - ObjTreeNode(ObjId, &'a crate::op_tree::OpTreeNode), + ObjTreeNode(ObjId, &'a crate::op_tree::OpTreeNode, &'a [Op]), } #[derive(Clone)] @@ -52,7 +52,13 @@ impl<'a> GraphVisualisation<'a> { let mut nodes = HashMap::new(); for (obj_id, tree) in trees { if let Some(root_node) = &tree.internal.root_node { - let tree_id = Self::construct_nodes(root_node, obj_id, &mut nodes, metadata); + let tree_id = Self::construct_nodes( + root_node, + &tree.internal.ops, + obj_id, + &mut nodes, + metadata, + ); let obj_tree_id = NodeId::default(); nodes.insert( obj_tree_id, @@ -77,6 +83,7 @@ impl<'a> GraphVisualisation<'a> { fn construct_nodes( node: &'a crate::op_tree::OpTreeNode, + ops: &'a [Op], objid: &ObjId, nodes: &mut HashMap>, m: &'a crate::op_set::OpSetMetadata, @@ -84,7 +91,7 @@ impl<'a> GraphVisualisation<'a> { let node_id = NodeId::default(); let mut child_ids = Vec::new(); for child in &node.children { - let child_id = Self::construct_nodes(child, objid, nodes, m); + let child_id = Self::construct_nodes(child, ops, objid, nodes, m); child_ids.push(child_id); } nodes.insert( @@ -92,7 +99,7 @@ impl<'a> GraphVisualisation<'a> { Node { id: node_id, children: child_ids, - node_type: NodeType::ObjTreeNode(*objid, node), + node_type: NodeType::ObjTreeNode(*objid, node, ops), metadata: m, }, ); @@ -138,7 +145,7 @@ impl<'a> dot::Labeller<'a, &'a Node<'a>, Edge> for GraphVisualisation<'a> { fn node_shape(&'a self, node: &&'a Node<'a>) -> Option> { let shape = match node.node_type { - NodeType::ObjTreeNode(_, _) => dot::LabelText::label("none"), + NodeType::ObjTreeNode(_, _, _) => dot::LabelText::label("none"), NodeType::ObjRoot(_) => dot::LabelText::label("ellipse"), }; Some(shape) @@ -146,8 +153,8 @@ impl<'a> dot::Labeller<'a, &'a Node<'a>, Edge> for GraphVisualisation<'a> { fn node_label(&'a self, n: &&Node<'a>) -> dot::LabelText<'a> { match n.node_type { - NodeType::ObjTreeNode(objid, tree_node) => dot::LabelText::HtmlStr( - OpTable::create(tree_node, &objid, n.metadata, &self.actor_shorthands) + NodeType::ObjTreeNode(objid, tree_node, ops) => dot::LabelText::HtmlStr( + OpTable::create(tree_node, ops, &objid, n.metadata, &self.actor_shorthands) .to_html() .into(), ), @@ -165,6 +172,7 @@ struct OpTable { impl OpTable { fn create<'a>( node: &'a crate::op_tree::OpTreeNode, + ops: &'a [Op], obj: &ObjId, metadata: &crate::op_set::OpSetMetadata, actor_shorthands: &HashMap, @@ -172,7 +180,7 @@ impl OpTable { let rows = node .elements .iter() - .map(|e| OpTableRow::create(e, obj, metadata, actor_shorthands)) + .map(|e| OpTableRow::create(&ops[*e], obj, metadata, actor_shorthands)) .collect(); OpTable { rows } } diff --git a/rust/edit-trace/automerge-js.js b/rust/edit-trace/automerge-js.js index 6a6d3389..2956d5d5 100644 --- a/rust/edit-trace/automerge-js.js +++ b/rust/edit-trace/automerge-js.js @@ -2,7 +2,7 @@ const { edits, finalText } = require('./editing-trace') const Automerge = require('../../javascript') -const start = new Date() +let start = new Date() let state = Automerge.from({text: ""}) state = Automerge.change(state, doc => { @@ -14,10 +14,16 @@ state = Automerge.change(state, doc => { Automerge.splice(doc, 'text', ... edit) } }) - -let _ = Automerge.save(state) console.log(`Done in ${new Date() - start} ms`) +start = new Date() +let bytes = Automerge.save(state) +console.log(`Save in ${new Date() - start} ms`) + +start = new Date() +let _load = Automerge.load(bytes) +console.log(`Load in ${new Date() - start} ms`) + if (state.text !== finalText) { throw new RangeError('ERROR: final text did not match expectation') } diff --git a/rust/edit-trace/automerge-wasm.js b/rust/edit-trace/automerge-wasm.js index 82786cd9..8f6f51af 100644 --- a/rust/edit-trace/automerge-wasm.js +++ b/rust/edit-trace/automerge-wasm.js @@ -16,11 +16,17 @@ for (let i = 0; i < edits.length; i++) { doc.splice(text, ...edit) } -let _ = doc.save() - console.log(`Done in ${new Date() - start} ms`) let t_time = new Date() +let saved = doc.save() +console.log(`doc.save in ${new Date() - t_time} ms`) + +t_time = new Date() +Automerge.load(saved) +console.log(`doc.load in ${new Date() - t_time} ms`) + +t_time = new Date() let t = doc.text(text); console.log(`doc.text in ${new Date() - t_time} ms`) diff --git a/rust/edit-trace/package.json b/rust/edit-trace/package.json index a9d1e0e0..acd37ac0 100644 --- a/rust/edit-trace/package.json +++ b/rust/edit-trace/package.json @@ -4,9 +4,9 @@ "main": "wasm-text.js", "license": "MIT", "scripts": { - "wasm": "0x -D prof wasm-text.js" + "wasm": "0x -D prof automerge-wasm.js" }, "devDependencies": { - "0x": "^4.11.0" + "0x": "^5.4.1" } } diff --git a/rust/edit-trace/src/main.rs b/rust/edit-trace/src/main.rs index f6924c7d..debe52db 100644 --- a/rust/edit-trace/src/main.rs +++ b/rust/edit-trace/src/main.rs @@ -28,16 +28,18 @@ fn main() -> Result<(), AutomergeError> { tx.splice_text(&text, pos, del, &vals)?; } tx.commit(); + println!("Done in {} ms", now.elapsed().as_millis()); let save = Instant::now(); - let _bytes = doc.save(); + let bytes = doc.save(); println!("Saved in {} ms", save.elapsed().as_millis()); - /* - let load = Instant::now(); - let _ = Automerge::load(&bytes).unwrap(); - println!("Loaded in {} ms", load.elapsed().as_millis()); - */ + let load = Instant::now(); + let _ = Automerge::load(&bytes).unwrap(); + println!("Loaded in {} ms", load.elapsed().as_millis()); + + let get_txt = Instant::now(); + doc.text(&text)?; + println!("Text in {} ms", get_txt.elapsed().as_millis()); - println!("Done in {} ms", now.elapsed().as_millis()); Ok(()) } From b78211ca65ae49b0794b004f80ec8350eb39abcf Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Sun, 11 Dec 2022 10:56:20 -0800 Subject: [PATCH 662/730] change opid to (u32,u32) - 10% performance uptick (#473) --- rust/automerge/src/automerge.rs | 15 ++++---- rust/automerge/src/change.rs | 2 +- rust/automerge/src/clock.rs | 20 +++++------ .../src/columnar/column_range/key.rs | 4 +-- .../src/columnar/column_range/obj_id.rs | 2 +- .../src/columnar/column_range/opid.rs | 2 +- .../src/columnar/column_range/opid_list.rs | 2 +- .../src/columnar/encoding/properties.rs | 2 +- rust/automerge/src/op_set.rs | 14 ++++---- rust/automerge/src/op_tree.rs | 2 +- rust/automerge/src/op_tree/iter.rs | 2 +- rust/automerge/src/transaction/inner.rs | 2 +- rust/automerge/src/types.rs | 36 +++++++++++-------- rust/automerge/src/types/opids.rs | 5 +-- 14 files changed, 60 insertions(+), 50 deletions(-) diff --git a/rust/automerge/src/automerge.rs b/rust/automerge/src/automerge.rs index 7a5340e6..5502456c 100644 --- a/rust/automerge/src/automerge.rs +++ b/rust/automerge/src/automerge.rs @@ -487,7 +487,7 @@ impl Automerge { // do a direct get here b/c this could be foriegn and not be within the array // bounds let obj = if self.ops.m.actors.cache.get(*idx) == Some(actor) { - ObjId(OpId(*ctr, *idx)) + ObjId(OpId::new(*ctr, *idx)) } else { // FIXME - make a real error let idx = self @@ -496,7 +496,7 @@ impl Automerge { .actors .lookup(actor) .ok_or(AutomergeError::Fail)?; - ObjId(OpId(*ctr, idx)) + ObjId(OpId::new(*ctr, idx)) }; if let Some(obj_type) = self.ops.object_type(&obj) { Ok((obj, obj_type)) @@ -859,23 +859,26 @@ impl Automerge { .iter_ops() .enumerate() .map(|(i, c)| { - let id = OpId(change.start_op().get() + i as u64, actor); + let id = OpId::new(change.start_op().get() + i as u64, actor); let key = match &c.key { EncodedKey::Prop(n) => Key::Map(self.ops.m.props.cache(n.to_string())), EncodedKey::Elem(e) if e.is_head() => Key::Seq(ElemId::head()), EncodedKey::Elem(ElemId(o)) => { - Key::Seq(ElemId(OpId::new(actors[o.actor()], o.counter()))) + Key::Seq(ElemId(OpId::new(o.counter(), actors[o.actor()]))) } }; let obj = if c.obj.is_root() { ObjId::root() } else { - ObjId(OpId(c.obj.opid().counter(), actors[c.obj.opid().actor()])) + ObjId(OpId::new( + c.obj.opid().counter(), + actors[c.obj.opid().actor()], + )) }; let pred = c .pred .iter() - .map(|p| OpId::new(actors[p.actor()], p.counter())); + .map(|p| OpId::new(p.counter(), actors[p.actor()])); let pred = self.ops.m.sorted_opids(pred); ( obj, diff --git a/rust/automerge/src/change.rs b/rust/automerge/src/change.rs index 198c68fb..b5cae7df 100644 --- a/rust/automerge/src/change.rs +++ b/rust/automerge/src/change.rs @@ -356,7 +356,7 @@ pub(crate) mod gen { (0_u64..10) .prop_map(|num_ops| { (0..num_ops) - .map(|counter| OpId::new(0, counter)) + .map(|counter| OpId::new(counter, 0)) .collect::>() }) .prop_flat_map(move |opids| { diff --git a/rust/automerge/src/clock.rs b/rust/automerge/src/clock.rs index 11890ffb..79125323 100644 --- a/rust/automerge/src/clock.rs +++ b/rust/automerge/src/clock.rs @@ -59,8 +59,8 @@ impl Clock { } pub(crate) fn covers(&self, id: &OpId) -> bool { - if let Some(data) = self.0.get(&id.1) { - data.max_op >= id.0 + if let Some(data) = self.0.get(&id.actor()) { + data.max_op >= id.counter() } else { false } @@ -123,16 +123,16 @@ mod tests { clock.include(1, ClockData { max_op: 20, seq: 1 }); clock.include(2, ClockData { max_op: 10, seq: 2 }); - assert!(clock.covers(&OpId(10, 1))); - assert!(clock.covers(&OpId(20, 1))); - assert!(!clock.covers(&OpId(30, 1))); + assert!(clock.covers(&OpId::new(10, 1))); + assert!(clock.covers(&OpId::new(20, 1))); + assert!(!clock.covers(&OpId::new(30, 1))); - assert!(clock.covers(&OpId(5, 2))); - assert!(clock.covers(&OpId(10, 2))); - assert!(!clock.covers(&OpId(15, 2))); + assert!(clock.covers(&OpId::new(5, 2))); + assert!(clock.covers(&OpId::new(10, 2))); + assert!(!clock.covers(&OpId::new(15, 2))); - assert!(!clock.covers(&OpId(1, 3))); - assert!(!clock.covers(&OpId(100, 3))); + assert!(!clock.covers(&OpId::new(1, 3))); + assert!(!clock.covers(&OpId::new(100, 3))); } #[test] diff --git a/rust/automerge/src/columnar/column_range/key.rs b/rust/automerge/src/columnar/column_range/key.rs index 5283fc39..70ea8e1e 100644 --- a/rust/automerge/src/columnar/column_range/key.rs +++ b/rust/automerge/src/columnar/column_range/key.rs @@ -167,11 +167,11 @@ impl<'a> KeyIter<'a> { Ok(Some(Key::Prop(string))) } (Some(None) | None, Some(Some(0)), Some(None) | None) => { - Ok(Some(Key::Elem(ElemId(OpId(0, 0))))) + Ok(Some(Key::Elem(ElemId(OpId::new(0, 0))))) } (Some(Some(actor)), Some(Some(ctr)), Some(None) | None) => match ctr.try_into() { //Ok(ctr) => Some(Ok(Key::Elem(ElemId(OpId(ctr, actor as usize))))), - Ok(ctr) => Ok(Some(Key::Elem(ElemId(OpId::new(actor as usize, ctr))))), + Ok(ctr) => Ok(Some(Key::Elem(ElemId(OpId::new(ctr, actor as usize))))), Err(_) => Err(DecodeColumnError::invalid_value( "counter", "negative value for counter", diff --git a/rust/automerge/src/columnar/column_range/obj_id.rs b/rust/automerge/src/columnar/column_range/obj_id.rs index f6525b44..6a3e2ef0 100644 --- a/rust/automerge/src/columnar/column_range/obj_id.rs +++ b/rust/automerge/src/columnar/column_range/obj_id.rs @@ -133,7 +133,7 @@ impl<'a> ObjIdIter<'a> { .map_err(|e| DecodeColumnError::decode_raw("counter", e))?; match (actor, counter) { (None | Some(None), None | Some(None)) => Ok(Some(ObjId::root())), - (Some(Some(a)), Some(Some(c))) => Ok(Some(ObjId(OpId(c, a as usize)))), + (Some(Some(a)), Some(Some(c))) => Ok(Some(ObjId(OpId::new(c, a as usize)))), (_, Some(Some(0))) => Ok(Some(ObjId::root())), (Some(None) | None, _) => Err(DecodeColumnError::unexpected_null("actor")), (_, Some(None) | None) => Err(DecodeColumnError::unexpected_null("counter")), diff --git a/rust/automerge/src/columnar/column_range/opid.rs b/rust/automerge/src/columnar/column_range/opid.rs index 592f6041..ae95d758 100644 --- a/rust/automerge/src/columnar/column_range/opid.rs +++ b/rust/automerge/src/columnar/column_range/opid.rs @@ -105,7 +105,7 @@ impl<'a> OpIdIter<'a> { .map_err(|e| DecodeColumnError::decode_raw("counter", e))?; match (actor, counter) { (Some(Some(a)), Some(Some(c))) => match c.try_into() { - Ok(c) => Ok(Some(OpId(c, a as usize))), + Ok(c) => Ok(Some(OpId::new(c, a as usize))), Err(_) => Err(DecodeColumnError::invalid_value( "counter", "negative value encountered", diff --git a/rust/automerge/src/columnar/column_range/opid_list.rs b/rust/automerge/src/columnar/column_range/opid_list.rs index 03b92ccf..12279c08 100644 --- a/rust/automerge/src/columnar/column_range/opid_list.rs +++ b/rust/automerge/src/columnar/column_range/opid_list.rs @@ -203,7 +203,7 @@ impl<'a> OpIdListIter<'a> { .map_err(|e| DecodeColumnError::decode_raw("counter", e))?; match (actor, counter) { (Some(Some(a)), Some(Some(ctr))) => match ctr.try_into() { - Ok(ctr) => p.push(OpId(ctr, a as usize)), + Ok(ctr) => p.push(OpId::new(ctr, a as usize)), Err(_e) => { return Err(DecodeColumnError::invalid_value( "counter", diff --git a/rust/automerge/src/columnar/encoding/properties.rs b/rust/automerge/src/columnar/encoding/properties.rs index a6345cad..a3bf1ed0 100644 --- a/rust/automerge/src/columnar/encoding/properties.rs +++ b/rust/automerge/src/columnar/encoding/properties.rs @@ -139,7 +139,7 @@ pub(crate) fn option_splice_scenario< } pub(crate) fn opid() -> impl Strategy + Clone { - (0..(i64::MAX as usize), 0..(i64::MAX as u64)).prop_map(|(actor, ctr)| OpId(ctr, actor)) + (0..(i64::MAX as usize), 0..(i64::MAX as u64)).prop_map(|(actor, ctr)| OpId::new(ctr, actor)) } pub(crate) fn elemid() -> impl Strategy + Clone { diff --git a/rust/automerge/src/op_set.rs b/rust/automerge/src/op_set.rs index 09bc256a..1f5a4486 100644 --- a/rust/automerge/src/op_set.rs +++ b/rust/automerge/src/op_set.rs @@ -55,7 +55,11 @@ impl OpSetInternal { if id == types::ROOT { ExId::Root } else { - ExId::Id(id.0, self.m.actors.cache[id.1].clone(), id.1) + ExId::Id( + id.counter(), + self.m.actors.cache[id.actor()].clone(), + id.actor(), + ) } } @@ -355,13 +359,7 @@ impl OpSetMetadata { } pub(crate) fn lamport_cmp(&self, left: OpId, right: OpId) -> Ordering { - match (left, right) { - (OpId(0, _), OpId(0, _)) => Ordering::Equal, - (OpId(0, _), OpId(_, _)) => Ordering::Less, - (OpId(_, _), OpId(0, _)) => Ordering::Greater, - (OpId(a, x), OpId(b, y)) if a == b => self.actors[x].cmp(&self.actors[y]), - (OpId(a, _), OpId(b, _)) => a.cmp(&b), - } + left.lamport_cmp(&right, &self.actors.cache) } pub(crate) fn sorted_opids>(&self, opids: I) -> OpIds { diff --git a/rust/automerge/src/op_tree.rs b/rust/automerge/src/op_tree.rs index 909a75a7..7de00dc3 100644 --- a/rust/automerge/src/op_tree.rs +++ b/rust/automerge/src/op_tree.rs @@ -325,7 +325,7 @@ mod tests { use super::*; fn op() -> Op { - let zero = OpId(0, 0); + let zero = OpId::new(0, 0); Op { id: zero, action: amp::OpType::Put(0.into()), diff --git a/rust/automerge/src/op_tree/iter.rs b/rust/automerge/src/op_tree/iter.rs index 5f2114c8..0b19f359 100644 --- a/rust/automerge/src/op_tree/iter.rs +++ b/rust/automerge/src/op_tree/iter.rs @@ -262,7 +262,7 @@ mod tests { fn op(counter: u64) -> Op { Op { action: OpType::Put(ScalarValue::Uint(counter)), - id: OpId(counter, 0), + id: OpId::new(counter, 0), key: Key::Map(0), succ: Default::default(), pred: Default::default(), diff --git a/rust/automerge/src/transaction/inner.rs b/rust/automerge/src/transaction/inner.rs index c9567b68..2099acef 100644 --- a/rust/automerge/src/transaction/inner.rs +++ b/rust/automerge/src/transaction/inner.rs @@ -240,7 +240,7 @@ impl TransactionInner { } fn next_id(&mut self) -> OpId { - OpId(self.start_op.get() + self.pending_ops() as u64, self.actor) + OpId::new(self.start_op.get() + self.pending_ops() as u64, self.actor) } fn next_insert(&mut self, key: Key, value: ScalarValue) -> Op { diff --git a/rust/automerge/src/types.rs b/rust/automerge/src/types.rs index b5da60d7..7bbf4353 100644 --- a/rust/automerge/src/types.rs +++ b/rust/automerge/src/types.rs @@ -3,10 +3,12 @@ use crate::legacy as amp; use serde::{Deserialize, Serialize}; use std::borrow::Cow; use std::cmp::Eq; +use std::cmp::Ordering; use std::fmt; use std::fmt::Display; use std::str::FromStr; use tinyvec::{ArrayVec, TinyVec}; +//use crate::indexed_cache::IndexedCache; mod opids; pub(crate) use opids::OpIds; @@ -253,17 +255,6 @@ pub(crate) trait Exportable { fn export(&self) -> Export; } -impl OpId { - #[inline] - pub(crate) fn counter(&self) -> u64 { - self.0 - } - #[inline] - pub(crate) fn actor(&self) -> usize { - self.1 - } -} - impl Exportable for ObjId { fn export(&self) -> Export { if self.0 == ROOT { @@ -421,11 +412,28 @@ impl Key { } #[derive(Debug, Clone, PartialOrd, Ord, Eq, PartialEq, Copy, Hash, Default)] -pub(crate) struct OpId(pub(crate) u64, pub(crate) usize); +pub(crate) struct OpId(u32, u32); impl OpId { - pub(crate) fn new(actor: usize, counter: u64) -> Self { - Self(counter, actor) + pub(crate) fn new(counter: u64, actor: usize) -> Self { + Self(counter as u32, actor as u32) + } + + #[inline] + pub(crate) fn counter(&self) -> u64 { + self.0 as u64 + } + + #[inline] + pub(crate) fn actor(&self) -> usize { + self.1 as usize + } + + #[inline] + pub(crate) fn lamport_cmp(&self, other: &OpId, actors: &[ActorId]) -> Ordering { + self.0 + .cmp(&other.0) + .then_with(|| actors[self.1 as usize].cmp(&actors[other.1 as usize])) } } diff --git a/rust/automerge/src/types/opids.rs b/rust/automerge/src/types/opids.rs index 3ebac93c..eaeed471 100644 --- a/rust/automerge/src/types/opids.rs +++ b/rust/automerge/src/types/opids.rs @@ -129,7 +129,8 @@ mod tests { fn gen_opid(actors: Vec) -> impl Strategy { (0..actors.len()).prop_flat_map(|actor_idx| { - (Just(actor_idx), 0..u64::MAX).prop_map(|(actor_idx, counter)| OpId(counter, actor_idx)) + (Just(actor_idx), 0..u64::MAX) + .prop_map(|(actor_idx, counter)| OpId::new(counter, actor_idx)) }) } @@ -190,7 +191,7 @@ mod tests { (OpId(0, _), OpId(0, _)) => Ordering::Equal, (OpId(0, _), OpId(_, _)) => Ordering::Less, (OpId(_, _), OpId(0, _)) => Ordering::Greater, - (OpId(a, x), OpId(b, y)) if a == b => actors[*x].cmp(&actors[*y]), + (OpId(a, x), OpId(b, y)) if a == b => actors[*x as usize].cmp(&actors[*y as usize]), (OpId(a, _), OpId(b, _)) => a.cmp(b), } } From 3229548fc7393bf55a401e328ab677e14694522e Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Sun, 11 Dec 2022 13:26:00 -0800 Subject: [PATCH 663/730] update js dependencies and some lint errors (#474) --- javascript/package.json | 20 +++++----- javascript/src/index.ts | 29 ++++++++------- javascript/src/proxies.ts | 61 ++++--------------------------- rust/automerge-wasm/package.json | 21 +++++------ rust/automerge-wasm/test/apply.ts | 6 +-- rust/automerge-wasm/test/test.ts | 16 ++++---- 6 files changed, 54 insertions(+), 99 deletions(-) diff --git a/javascript/package.json b/javascript/package.json index 0dae9684..5fd2213e 100644 --- a/javascript/package.json +++ b/javascript/package.json @@ -45,21 +45,21 @@ }, "devDependencies": { "@types/expect": "^24.3.0", - "@types/mocha": "^9.1.1", - "@types/uuid": "^8.3.4", - "@typescript-eslint/eslint-plugin": "^5.25.0", - "@typescript-eslint/parser": "^5.25.0", - "eslint": "^8.15.0", + "@types/mocha": "^10.0.1", + "@types/uuid": "^9.0.0", + "@typescript-eslint/eslint-plugin": "^5.46.0", + "@typescript-eslint/parser": "^5.46.0", + "eslint": "^8.29.0", "fast-sha256": "^1.3.0", - "mocha": "^10.0.0", - "pako": "^2.0.4", + "mocha": "^10.2.0", + "pako": "^2.1.0", "ts-mocha": "^10.0.0", "ts-node": "^10.9.1", - "typedoc": "^0.23.16", - "typescript": "^4.6.4" + "typedoc": "^0.23.22", + "typescript": "^4.9.4" }, "dependencies": { "@automerge/automerge-wasm": "0.1.19", - "uuid": "^8.3" + "uuid": "^9.0.0" } } diff --git a/javascript/src/index.ts b/javascript/src/index.ts index 50306b4c..581f50d1 100644 --- a/javascript/src/index.ts +++ b/javascript/src/index.ts @@ -141,9 +141,9 @@ function importOpts(_actor?: ActorId | InitOptions): InitOptions { * random actor ID */ export function init(_opts?: ActorId | InitOptions): Doc { - let opts = importOpts(_opts) - let freeze = !!opts.freeze - let patchCallback = opts.patchCallback + const opts = importOpts(_opts) + const freeze = !!opts.freeze + const patchCallback = opts.patchCallback const handle = ApiHandler.create(opts.actor) handle.enablePatches(true) handle.enableFreeze(!!opts.freeze) @@ -170,7 +170,7 @@ export function init(_opts?: ActorId | InitOptions): Doc { export function view(doc: Doc, heads: Heads): Doc { const state = _state(doc) const handle = state.handle - return state.handle.materialize("/", heads, { ...state, handle, heads }) as any + return state.handle.materialize("/", heads, { ...state, handle, heads }) as Doc } /** @@ -291,9 +291,9 @@ function progressDocument(doc: Doc, heads: Heads | null, callback?: PatchC if (heads == null) { return doc } - let state = _state(doc) - let nextState = {...state, heads: undefined}; - let nextDoc = state.handle.applyPatches(doc, nextState, callback) + const state = _state(doc) + const nextState = {...state, heads: undefined}; + const nextDoc = state.handle.applyPatches(doc, nextState, callback) state.heads = heads return nextDoc } @@ -392,7 +392,7 @@ export function load(data: Uint8Array, _opts?: ActorId | InitOptions): Doc handle.enablePatches(true) handle.enableFreeze(!!opts.freeze) handle.registerDatatype("counter", (n) => new Counter(n)) - const doc: any = handle.materialize("/", undefined, {handle, heads: undefined, patchCallback}) as Doc + const doc = handle.materialize("/", undefined, {handle, heads: undefined, patchCallback}) as Doc return doc } @@ -599,7 +599,7 @@ export function getLastLocalChange(doc: Doc): Change | undefined { * This is useful to determine if something is actually an automerge document, * if `doc` is not an automerge document this will return null. */ -export function getObjectId(doc: any, prop?: Prop): ObjID | null { +export function getObjectId(doc: Doc, prop?: Prop): ObjID | null { if (prop) { const state = _state(doc, false) const objectId = _obj(doc) @@ -619,7 +619,6 @@ export function getObjectId(doc: any, prop?: Prop): ObjID | null { * Note that this will crash if there are changes in `oldState` which are not in `newState`. */ export function getChanges(oldState: Doc, newState: Doc): Change[] { - const o = _state(oldState) const n = _state(newState) return n.handle.getChanges(getHeads(oldState)) } @@ -709,8 +708,8 @@ export function encodeSyncState(state: SyncState): Uint8Array { * @group sync */ export function decodeSyncState(state: Uint8Array): SyncState { - let sync = ApiHandler.decodeSyncState(state) - let result = ApiHandler.exportSyncState(sync) + const sync = ApiHandler.decodeSyncState(state) + const result = ApiHandler.exportSyncState(sync) sync.free() return result } @@ -848,7 +847,11 @@ export function toJS(doc: Doc): T { } export function isAutomerge(doc: unknown): boolean { - return getObjectId(doc) === "_root" && !!Reflect.get(doc as Object, STATE) + if (typeof doc == "object" && doc !== null) { + return getObjectId(doc) === "_root" && !!Reflect.get(doc, STATE) + } else { + return false + } } function isObject(obj: unknown): obj is Record { diff --git a/javascript/src/proxies.ts b/javascript/src/proxies.ts index 6c0035de..ff03be4d 100644 --- a/javascript/src/proxies.ts +++ b/javascript/src/proxies.ts @@ -3,7 +3,7 @@ import { Automerge, Heads, ObjID } from "@automerge/automerge-wasm" import { Prop } from "@automerge/automerge-wasm" import { AutomergeValue, ScalarValue, MapValue, ListValue } from "./types" import { Counter, getWriteableCounter } from "./counter" -import { STATE, TRACE, IS_PROXY, OBJECT_ID, COUNTER, INT, UINT, F64, TEXT } from "./constants" +import { STATE, TRACE, IS_PROXY, OBJECT_ID, COUNTER, INT, UINT, F64 } from "./constants" function parseListIndex(key) { if (typeof key === 'string' && /^[0-9]+$/.test(key)) key = parseInt(key, 10) @@ -95,7 +95,7 @@ function import_value(value) { const MapHandler = { get (target, key) : AutomergeValue { - const { context, objectId, readonly, frozen, heads, cache } = target + const { context, objectId, cache } = target if (key === Symbol.toStringTag) { return target[Symbol.toStringTag] } if (key === OBJECT_ID) return objectId if (key === IS_PROXY) return true @@ -187,7 +187,7 @@ const MapHandler = { const ListHandler = { get (target, index) { - const {context, objectId, readonly, frozen, heads } = target + const {context, objectId, heads } = target index = parseListIndex(index) if (index === Symbol.hasInstance) { return (instance) => { return Array.isArray(instance) } } if (index === Symbol.toStringTag) { return target[Symbol.toStringTag] } @@ -236,11 +236,10 @@ const ListHandler = { break; } case "text": { - let text if (index >= context.length(objectId)) { - text = context.insertObject(objectId, index, value, "text") + context.insertObject(objectId, index, value, "text") } else { - text = context.putObject(objectId, index, value, "text") + context.putObject(objectId, index, value, "text") } break; } @@ -534,7 +533,7 @@ function listMethods(target) { find(f: (AutomergeValue, number) => boolean) : AutomergeValue | undefined { let index = 0 - for (let v of this) { + for (const v of this) { if (f(v, index)) { return v } @@ -544,7 +543,7 @@ function listMethods(target) { findIndex(f: (AutomergeValue, number) => boolean) : number { let index = 0 - for (let v of this) { + for (const v of this) { if (f(v, index)) { return index } @@ -582,7 +581,7 @@ function listMethods(target) { some(f: (AutomergeValue, number) => boolean) : boolean { let index = 0; - for (let v of this) { + for (const v of this) { if (f(v,index)) { return true } @@ -604,47 +603,3 @@ function listMethods(target) { return methods } -function textMethods(target) { - const {context, objectId, heads } = target - const methods = { - set (index: number, value) { - return this[index] = value - }, - get (index: number) : AutomergeValue { - return this[index] - }, - toString () : string { - return context.text(objectId, heads).replace(//g,'') - }, - toSpans () : AutomergeValue[] { - const spans : AutomergeValue[] = [] - let chars = '' - const length = context.length(objectId) - for (let i = 0; i < length; i++) { - const value = this[i] - if (typeof value === 'string') { - chars += value - } else { - if (chars.length > 0) { - spans.push(chars) - chars = '' - } - spans.push(value) - } - } - if (chars.length > 0) { - spans.push(chars) - } - return spans - }, - toJSON () : string { - return this.toString() - }, - indexOf(o, start = 0) { - const text = context.text(objectId) - return text.indexOf(o,start) - } - } - return methods -} - diff --git a/rust/automerge-wasm/package.json b/rust/automerge-wasm/package.json index 45e7950e..7c02d820 100644 --- a/rust/automerge-wasm/package.json +++ b/rust/automerge-wasm/package.json @@ -40,21 +40,18 @@ "test": "ts-mocha -p tsconfig.json --type-check --bail --full-trace test/*.ts" }, "devDependencies": { - "@types/expect": "^24.3.0", - "@types/jest": "^27.4.0", - "@types/mocha": "^9.1.0", - "@types/node": "^17.0.13", - "@types/uuid": "^8.3.4", - "@typescript-eslint/eslint-plugin": "^5.25.0", - "@typescript-eslint/parser": "^5.25.0", + "@types/mocha": "^10.0.1", + "@types/node": "^18.11.13", + "@typescript-eslint/eslint-plugin": "^5.46.0", + "@typescript-eslint/parser": "^5.46.0", "cross-env": "^7.0.3", - "eslint": "^8.16.0", + "eslint": "^8.29.0", "fast-sha256": "^1.3.0", - "mocha": "^9.1.3", - "pako": "^2.0.4", + "mocha": "^10.2.0", + "pako": "^2.1.0", "rimraf": "^3.0.2", - "ts-mocha": "^9.0.2", - "typescript": "^4.6.4" + "ts-mocha": "^10.0.0", + "typescript": "^4.9.4" }, "exports": { "browser": "./bundler/automerge_wasm.js", diff --git a/rust/automerge-wasm/test/apply.ts b/rust/automerge-wasm/test/apply.ts index c96ad75c..d4b8c95e 100644 --- a/rust/automerge-wasm/test/apply.ts +++ b/rust/automerge-wasm/test/apply.ts @@ -164,7 +164,7 @@ describe('Automerge', () => { it('should set the OBJECT_ID property on lists, maps, and text objects and not on scalars', () => { const doc1 = create('aaaa') - let mat: any = doc1.materialize("/") + const mat: any = doc1.materialize("/") doc1.enablePatches(true) doc1.registerDatatype("counter", (n: number) => new Counter(n)) doc1.put("/", "string", "string", "str") @@ -194,11 +194,11 @@ describe('Automerge', () => { it('should set the root OBJECT_ID to "_root"', () => { const doc1 = create('aaaa') - let mat: any = doc1.materialize("/") + const mat: any = doc1.materialize("/") assert.equal(_obj(mat), "_root") doc1.enablePatches(true) doc1.put("/", "key", "value") - let applied = doc1.applyPatches(mat) + const applied = doc1.applyPatches(mat) assert.equal(_obj(applied), "_root") }) diff --git a/rust/automerge-wasm/test/test.ts b/rust/automerge-wasm/test/test.ts index 64690b90..70b56c55 100644 --- a/rust/automerge-wasm/test/test.ts +++ b/rust/automerge-wasm/test/test.ts @@ -1953,7 +1953,7 @@ describe('Automerge', () => { assert.deepEqual(doc.length("/width2"), 12); assert.deepEqual(doc.length("/mixed"), 9); - let heads1 = doc.getHeads(); + const heads1 = doc.getHeads(); mat = doc.applyPatches(mat) @@ -2013,7 +2013,7 @@ describe('Automerge', () => { }) it('can handle non-characters embedded in text', () => { - let change : any = { + const change : any = { ops: [ { action: 'makeText', obj: '_root', key: 'bad_text', pred: [] }, { action: 'set', obj: '1@aaaa', elemId: '_head', insert: true, value: 'A', pred: [] }, @@ -2030,7 +2030,7 @@ describe('Automerge', () => { } const doc = load(encodeChange(change)); doc.enablePatches(true) - let mat : any = doc.materialize("/") + const mat : any = doc.materialize("/") // multi - char strings appear as a span of strings // non strings appear as an object replacement unicode char @@ -2039,27 +2039,27 @@ describe('Automerge', () => { assert.deepEqual(doc.materialize("/bad_text"), 'ABBBBBC') // deleting in the middle of a multi-byte character will delete the whole thing - let doc1 = doc.fork() + const doc1 = doc.fork() doc1.splice("/bad_text", 3, 3, "X"); assert.deepEqual(doc1.text("/bad_text"), 'AXC') // deleting in the middle of a multi-byte character will delete the whole thing // and characters past its end - let doc2 = doc.fork() + const doc2 = doc.fork() doc2.splice("/bad_text", 3, 4, "X"); assert.deepEqual(doc2.text("/bad_text"), 'AXC') - let doc3 = doc.fork() + const doc3 = doc.fork() doc3.splice("/bad_text", 3, 5, "X"); assert.deepEqual(doc3.text("/bad_text"), 'AX') // inserting in the middle of a mutli-bytes span inserts after - let doc4 = doc.fork() + const doc4 = doc.fork() doc4.splice("/bad_text", 3, 0, "X"); assert.deepEqual(doc4.text("/bad_text"), 'ABBBBBXC') // deleting into the middle of a multi-byte span deletes the whole thing - let doc5 = doc.fork() + const doc5 = doc.fork() doc5.splice("/bad_text", 0, 2, "X"); assert.deepEqual(doc5.text("/bad_text"), 'XC') From e75ca2a8342b99b68a12e1471393afd585636c49 Mon Sep 17 00:00:00 2001 From: patryk Date: Wed, 14 Dec 2022 12:41:21 +0100 Subject: [PATCH 664/730] Update README.md (Update Slack invite link) (#475) Slack invite link updated to the one used on the website, as the current one returns "This link is no longer active". --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index b2037c13..d11e9d1c 100644 --- a/README.md +++ b/README.md @@ -25,7 +25,7 @@ If you're familiar with CRDTs and interested in the design of Automerge in particular take a look at https://automerge.org/docs/how-it-works/backend/ Finally, if you want to talk to us about this project please [join the -Slack](https://join.slack.com/t/automerge/shared_invite/zt-1ho1ieas2-DnWZcRR82BRu65vCD4t3Xw) +Slack](https://join.slack.com/t/automerge/shared_invite/zt-e4p3760n-kKh7r3KRH1YwwNfiZM8ktw) ## Status From 6dad2b7df16a31b5f9c02d46b18cd5a89f8e10ea Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Wed, 14 Dec 2022 10:34:22 -0700 Subject: [PATCH 665/730] Don't panic on invalid gzip stream (#477) * Don't panic on invalid gzip stream Before this change automerge-rs would panic if the gzip data in a raw column was invalid; after this change the error is propagated to the caller correctly. --- .../src/storage/columns/raw_column.rs | 18 ++-- rust/automerge/src/storage/document.rs | 3 +- .../src/storage/document/compression.rs | 94 +++++++++++-------- rust/automerge/tests/test.rs | 14 +++ 4 files changed, 84 insertions(+), 45 deletions(-) diff --git a/rust/automerge/src/storage/columns/raw_column.rs b/rust/automerge/src/storage/columns/raw_column.rs index 053c3c75..808b53cf 100644 --- a/rust/automerge/src/storage/columns/raw_column.rs +++ b/rust/automerge/src/storage/columns/raw_column.rs @@ -73,15 +73,19 @@ impl RawColumn { } } - fn decompress(&self, input: &[u8], out: &mut Vec) -> (ColumnSpec, usize) { + fn decompress( + &self, + input: &[u8], + out: &mut Vec, + ) -> Result<(ColumnSpec, usize), ParseError> { let len = if self.spec.deflate() { let mut inflater = flate2::bufread::DeflateDecoder::new(&input[self.data.clone()]); - inflater.read_to_end(out).unwrap() + inflater.read_to_end(out).map_err(ParseError::Deflate)? } else { out.extend(&input[self.data.clone()]); self.data.len() }; - (self.spec.inflated(), len) + Ok((self.spec.inflated(), len)) } } @@ -140,7 +144,7 @@ impl RawColumns { &self, input: &[u8], out: &mut Vec, - ) -> RawColumns { + ) -> Result, ParseError> { let mut result = Vec::with_capacity(self.0.len()); let mut start = 0; for col in &self.0 { @@ -148,7 +152,7 @@ impl RawColumns { out.extend(&input[decomp.data.clone()]); (decomp.spec, decomp.data.len()) } else { - col.decompress(input, out) + col.decompress(input, out)? }; result.push(RawColumn { spec, @@ -157,7 +161,7 @@ impl RawColumns { }); start += len; } - RawColumns(result) + Ok(RawColumns(result)) } } @@ -193,6 +197,8 @@ pub(crate) enum ParseError { NotInNormalOrder, #[error(transparent)] Leb128(#[from] parse::leb128::Error), + #[error(transparent)] + Deflate(#[from] std::io::Error), } impl RawColumns { diff --git a/rust/automerge/src/storage/document.rs b/rust/automerge/src/storage/document.rs index 500fbe85..ecef0bfd 100644 --- a/rust/automerge/src/storage/document.rs +++ b/rust/automerge/src/storage/document.rs @@ -173,7 +173,8 @@ impl<'a> Document<'a> { raw_columns: ops_meta, }, extra_args: (), - }); + }) + .map_err(|e| parse::ParseError::Error(ParseError::RawColumns(e)))?; let ops_layout = Columns::parse(op_bytes.len(), ops.iter()).map_err(|e| { parse::ParseError::Error(ParseError::BadColumnLayout { diff --git a/rust/automerge/src/storage/document/compression.rs b/rust/automerge/src/storage/document/compression.rs index f7daa127..2f0e96ce 100644 --- a/rust/automerge/src/storage/document/compression.rs +++ b/rust/automerge/src/storage/document/compression.rs @@ -1,6 +1,9 @@ -use std::{borrow::Cow, ops::Range}; +use std::{borrow::Cow, convert::Infallible, ops::Range}; -use crate::storage::{columns::compression, shift_range, ChunkType, Header, RawColumns}; +use crate::storage::{ + columns::{compression, raw_column}, + shift_range, ChunkType, Header, RawColumns, +}; pub(super) struct Args<'a, T: compression::ColumnCompression, DirArgs> { /// The original data of the entire document chunk (compressed or uncompressed) @@ -23,40 +26,50 @@ pub(super) struct CompressArgs { } /// Compress a document chunk returning the compressed bytes -pub(super) fn compress<'a>(args: Args<'a, compression::Uncompressed, CompressArgs>) -> Vec { +pub(super) fn compress(args: Args<'_, compression::Uncompressed, CompressArgs>) -> Vec { let header_len = args.extra_args.original_header_len; let threshold = args.extra_args.threshold; - Compression::<'a, Compressing, _>::new( - args, - Compressing { - threshold, - header_len, - }, - ) - .changes() - .ops() - .write_data() - .finish() + // Wrap in a closure so we can use `?` in the construction but still force the compiler + // to check that the error type is `Infallible` + let result: Result<_, Infallible> = (|| { + Ok(Compression::::new( + args, + Compressing { + threshold, + header_len, + }, + ) + .changes()? + .ops()? + .write_data() + .finish()) + })(); + // We just checked the error is `Infallible` so unwrap is fine + result.unwrap() } -pub(super) fn decompress<'a>(args: Args<'a, compression::Unknown, ()>) -> Decompressed<'a> { +pub(super) fn decompress<'a>( + args: Args<'a, compression::Unknown, ()>, +) -> Result, raw_column::ParseError> { match ( args.changes.raw_columns.uncompressed(), args.ops.raw_columns.uncompressed(), ) { - (Some(changes), Some(ops)) => Decompressed { + (Some(changes), Some(ops)) => Ok(Decompressed { changes, ops, compressed: None, uncompressed: args.original, change_bytes: args.changes.data, op_bytes: args.ops.data, - }, - _ => Compression::<'a, Decompressing, _>::new(args, Decompressing) - .changes() - .ops() - .write_data() - .finish(), + }), + _ => Ok( + Compression::<'a, Decompressing, _>::new(args, Decompressing) + .changes()? + .ops()? + .write_data() + .finish(), + ), } } @@ -94,6 +107,7 @@ pub(super) struct Cols { trait Direction: std::fmt::Debug { type Out: compression::ColumnCompression; type In: compression::ColumnCompression; + type Error; type Args; /// This method represents the (de)compression process for a direction. The arguments are: @@ -108,7 +122,7 @@ trait Direction: std::fmt::Debug { input: &[u8], out: &mut Vec, meta_out: &mut Vec, - ) -> Cols; + ) -> Result, Self::Error>; } #[derive(Debug)] struct Compressing { @@ -117,6 +131,7 @@ struct Compressing { } impl Direction for Compressing { + type Error = Infallible; type Out = compression::Unknown; type In = compression::Uncompressed; type Args = CompressArgs; @@ -127,16 +142,16 @@ impl Direction for Compressing { input: &[u8], out: &mut Vec, meta_out: &mut Vec, - ) -> Cols { + ) -> Result, Self::Error> { let start = out.len(); let raw_columns = cols .raw_columns .compress(&input[cols.data.clone()], out, self.threshold); raw_columns.write(meta_out); - Cols { + Ok(Cols { data: start..out.len(), raw_columns, - } + }) } } @@ -144,6 +159,7 @@ impl Direction for Compressing { struct Decompressing; impl Direction for Decompressing { + type Error = raw_column::ParseError; type Out = compression::Uncompressed; type In = compression::Unknown; type Args = (); @@ -154,14 +170,16 @@ impl Direction for Decompressing { input: &[u8], out: &mut Vec, meta_out: &mut Vec, - ) -> Cols { + ) -> Result, raw_column::ParseError> { let start = out.len(); - let raw_columns = cols.raw_columns.uncompress(&input[cols.data.clone()], out); + let raw_columns = cols + .raw_columns + .uncompress(&input[cols.data.clone()], out)?; raw_columns.write(meta_out); - Cols { + Ok(Cols { data: start..out.len(), raw_columns, - } + }) } } @@ -233,7 +251,7 @@ impl<'a, D: Direction> Compression<'a, D, Starting> { } impl<'a, D: Direction> Compression<'a, D, Starting> { - fn changes(self) -> Compression<'a, D, Changes> { + fn changes(self) -> Result>, D::Error> { let Starting { mut data_out, mut meta_out, @@ -243,8 +261,8 @@ impl<'a, D: Direction> Compression<'a, D, Starting> { &self.args.original, &mut data_out, &mut meta_out, - ); - Compression { + )?; + Ok(Compression { args: self.args, direction: self.direction, state: Changes { @@ -252,12 +270,12 @@ impl<'a, D: Direction> Compression<'a, D, Starting> { meta_out, data_out, }, - } + }) } } impl<'a, D: Direction> Compression<'a, D, Changes> { - fn ops(self) -> Compression<'a, D, ChangesAndOps> { + fn ops(self) -> Result>, D::Error> { let Changes { change_cols, mut meta_out, @@ -268,8 +286,8 @@ impl<'a, D: Direction> Compression<'a, D, Changes> { &self.args.original, &mut data_out, &mut meta_out, - ); - Compression { + )?; + Ok(Compression { args: self.args, direction: self.direction, state: ChangesAndOps { @@ -278,7 +296,7 @@ impl<'a, D: Direction> Compression<'a, D, Changes> { meta_out, data_out, }, - } + }) } } diff --git a/rust/automerge/tests/test.rs b/rust/automerge/tests/test.rs index 876acb74..c1b653d3 100644 --- a/rust/automerge/tests/test.rs +++ b/rust/automerge/tests/test.rs @@ -1397,3 +1397,17 @@ fn ops_on_wrong_objets() -> Result<(), AutomergeError> { assert_eq!(e6, Err(AutomergeError::InvalidOp(ObjType::Text))); Ok(()) } + +#[test] +fn invalid_deflate_stream() { + let bytes: [u8; 123] = [ + 133, 111, 74, 131, 48, 48, 48, 48, 0, 113, 1, 16, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, + 48, 48, 48, 48, 48, 48, 1, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, + 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 6, 1, 2, 3, 2, 32, 2, 48, + 2, 49, 2, 49, 2, 8, 32, 4, 33, 2, 48, 2, 49, 1, 49, 2, 57, 2, 87, 3, 128, 1, 2, 127, 0, + 127, 1, 127, 1, 127, 0, 127, 0, 127, 7, 127, 2, 102, 122, 127, 0, 127, 1, 1, 127, 1, 127, + 54, 239, 191, 189, 127, 0, 0, + ]; + + assert!(Automerge::load(&bytes).is_err()); +} From 8aff1296b99b46f9ba08c833f9c77c5e0763a968 Mon Sep 17 00:00:00 2001 From: alexjg Date: Wed, 14 Dec 2022 18:06:19 +0000 Subject: [PATCH 666/730] automerge-cli: remove a bunch of bad dependencies (#478) Automerge CLI depends transitively (via and old version of `clap` and via `colored_json` on `atty` and `ansi_term`. These crates are both marked as unmaintained and this generates irritating `cargo deny` messages. To avoid this, implement colored JSON ourselves using the `termcolor` crate - colored JSON is pretty mechanical. Also update criterion and cbindgen dependencies and ignore the criterion tree in deny.toml as we only ever use it in benchmarks. All that's left now is a warning about atty in cbindgen, we'll just have to wait for cbindgen to fix that, it's a build time dependency anyway so it's not really an issue. --- rust/automerge-c/Cargo.toml | 2 +- rust/automerge-cli/Cargo.toml | 7 +- rust/automerge-cli/src/color_json.rs | 348 +++++++++++++++++++++++++++ rust/automerge-cli/src/examine.rs | 4 +- rust/automerge-cli/src/export.rs | 4 +- rust/automerge-cli/src/main.rs | 24 +- rust/automerge/Cargo.toml | 2 +- rust/deny.toml | 16 +- rust/edit-trace/Cargo.toml | 2 +- 9 files changed, 375 insertions(+), 34 deletions(-) create mode 100644 rust/automerge-cli/src/color_json.rs diff --git a/rust/automerge-c/Cargo.toml b/rust/automerge-c/Cargo.toml index 851a3470..d039e460 100644 --- a/rust/automerge-c/Cargo.toml +++ b/rust/automerge-c/Cargo.toml @@ -19,4 +19,4 @@ libc = "^0.2" smol_str = "^0.1.21" [build-dependencies] -cbindgen = "^0.20" +cbindgen = "^0.24" diff --git a/rust/automerge-cli/Cargo.toml b/rust/automerge-cli/Cargo.toml index f434bc69..430090a6 100644 --- a/rust/automerge-cli/Cargo.toml +++ b/rust/automerge-cli/Cargo.toml @@ -13,17 +13,18 @@ bench = false doc = false [dependencies] -clap = {version = "~3.1", features = ["derive"]} +clap = {version = "~4", features = ["derive"]} serde_json = "^1.0" anyhow = "1.0" -atty = "^0.2" thiserror = "^1.0" combine = "^4.5" maplit = "^1.0" -colored_json = "^2.1" tracing-subscriber = "~0.3" automerge = { path = "../automerge" } +is-terminal = "0.4.1" +termcolor = "1.1.3" +serde = "1.0.150" [dev-dependencies] duct = "^0.13" diff --git a/rust/automerge-cli/src/color_json.rs b/rust/automerge-cli/src/color_json.rs new file mode 100644 index 00000000..1d175026 --- /dev/null +++ b/rust/automerge-cli/src/color_json.rs @@ -0,0 +1,348 @@ +use std::io::Write; + +use serde::Serialize; +use serde_json::ser::Formatter; +use termcolor::{Buffer, BufferWriter, Color, ColorSpec, WriteColor}; + +struct Style { + /// style of object brackets + object_brackets: ColorSpec, + /// style of array brackets + array_brackets: ColorSpec, + /// style of object + key: ColorSpec, + /// style of string values + string_value: ColorSpec, + /// style of integer values + integer_value: ColorSpec, + /// style of float values + float_value: ColorSpec, + /// style of bool values + bool_value: ColorSpec, + /// style of the `nil` value + nil_value: ColorSpec, + /// should the quotation get the style of the inner string/key? + string_include_quotation: bool, +} + +impl Default for Style { + fn default() -> Self { + Self { + object_brackets: ColorSpec::new().set_bold(true).clone(), + array_brackets: ColorSpec::new().set_bold(true).clone(), + key: ColorSpec::new() + .set_fg(Some(Color::Blue)) + .set_bold(true) + .clone(), + string_value: ColorSpec::new().set_fg(Some(Color::Green)).clone(), + integer_value: ColorSpec::new(), + float_value: ColorSpec::new(), + bool_value: ColorSpec::new(), + nil_value: ColorSpec::new(), + string_include_quotation: true, + } + } +} + +/// Write pretty printed, colored json to stdout +pub(crate) fn print_colored_json(value: &serde_json::Value) -> std::io::Result<()> { + let formatter = ColoredFormatter { + formatter: serde_json::ser::PrettyFormatter::new(), + style: Style::default(), + in_object_key: false, + }; + let mut ignored_writer = Vec::new(); + let mut ser = serde_json::Serializer::with_formatter(&mut ignored_writer, formatter); + value + .serialize(&mut ser) + .map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e.to_string())) +} + +struct ColoredFormatter { + formatter: F, + style: Style, + in_object_key: bool, +} + +fn write_colored(color: ColorSpec, handler: H) -> std::io::Result<()> +where + H: FnOnce(&mut Buffer) -> std::io::Result<()>, +{ + let buf = BufferWriter::stdout(termcolor::ColorChoice::Auto); + let mut buffer = buf.buffer(); + buffer.set_color(&color)?; + handler(&mut buffer)?; + buffer.reset()?; + buf.print(&buffer)?; + Ok(()) +} + +impl Formatter for ColoredFormatter { + fn write_null(&mut self, _writer: &mut W) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + write_colored(self.style.nil_value.clone(), |w| { + self.formatter.write_null(w) + }) + } + + fn write_bool(&mut self, _writer: &mut W, value: bool) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + write_colored(self.style.bool_value.clone(), |w| { + self.formatter.write_bool(w, value) + }) + } + + fn write_i8(&mut self, _writer: &mut W, value: i8) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + write_colored(self.style.integer_value.clone(), |w| { + self.formatter.write_i8(w, value) + }) + } + + fn write_i16(&mut self, _writer: &mut W, value: i16) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + write_colored(self.style.integer_value.clone(), |w| { + self.formatter.write_i16(w, value) + }) + } + + fn write_i32(&mut self, _writer: &mut W, value: i32) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + write_colored(self.style.integer_value.clone(), |w| { + self.formatter.write_i32(w, value) + }) + } + + fn write_i64(&mut self, _writer: &mut W, value: i64) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + write_colored(self.style.integer_value.clone(), |w| { + self.formatter.write_i64(w, value) + }) + } + + fn write_u8(&mut self, _writer: &mut W, value: u8) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + write_colored(self.style.integer_value.clone(), |w| { + self.formatter.write_u8(w, value) + }) + } + + fn write_u16(&mut self, _writer: &mut W, value: u16) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + write_colored(self.style.integer_value.clone(), |w| { + self.formatter.write_u16(w, value) + }) + } + + fn write_u32(&mut self, _writer: &mut W, value: u32) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + write_colored(self.style.integer_value.clone(), |w| { + self.formatter.write_u32(w, value) + }) + } + + fn write_u64(&mut self, _writer: &mut W, value: u64) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + write_colored(self.style.integer_value.clone(), |w| { + self.formatter.write_u64(w, value) + }) + } + + fn write_f32(&mut self, _writer: &mut W, value: f32) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + write_colored(self.style.float_value.clone(), |w| { + self.formatter.write_f32(w, value) + }) + } + + fn write_f64(&mut self, _writer: &mut W, value: f64) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + write_colored(self.style.float_value.clone(), |w| { + self.formatter.write_f64(w, value) + }) + } + + fn write_number_str(&mut self, _writer: &mut W, value: &str) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + write_colored(self.style.integer_value.clone(), |w| { + self.formatter.write_number_str(w, value) + }) + } + + fn begin_string(&mut self, _writer: &mut W) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + if self.style.string_include_quotation { + let style = if self.in_object_key { + &self.style.key + } else { + &self.style.string_value + }; + write_colored(style.clone(), |w| self.formatter.begin_string(w)) + } else { + self.formatter.begin_string(_writer) + } + } + + fn end_string(&mut self, _writer: &mut W) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + if self.style.string_include_quotation { + let style = if self.in_object_key { + &self.style.key + } else { + &self.style.string_value + }; + write_colored(style.clone(), |w| self.formatter.end_string(w)) + } else { + self.formatter.end_string(_writer) + } + } + + fn write_string_fragment(&mut self, _writer: &mut W, fragment: &str) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + let style = if self.in_object_key { + &self.style.key + } else { + &self.style.string_value + }; + write_colored(style.clone(), |w| w.write_all(fragment.as_bytes())) + } + + fn write_char_escape( + &mut self, + _writer: &mut W, + char_escape: serde_json::ser::CharEscape, + ) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + let style = if self.in_object_key { + &self.style.key + } else { + &self.style.string_value + }; + write_colored(style.clone(), |w| { + self.formatter.write_char_escape(w, char_escape) + }) + } + + fn begin_array(&mut self, _writer: &mut W) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + write_colored(self.style.array_brackets.clone(), |w| { + self.formatter.begin_array(w) + }) + } + + fn end_array(&mut self, _writer: &mut W) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + write_colored(self.style.array_brackets.clone(), |w| { + self.formatter.end_array(w) + }) + } + + fn begin_array_value(&mut self, writer: &mut W, first: bool) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + self.formatter.begin_array_value(writer, first) + } + + fn end_array_value(&mut self, writer: &mut W) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + self.formatter.end_array_value(writer) + } + + fn begin_object(&mut self, _writer: &mut W) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + write_colored(self.style.object_brackets.clone(), |w| { + self.formatter.begin_object(w) + }) + } + + fn end_object(&mut self, _writer: &mut W) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + write_colored(self.style.object_brackets.clone(), |w| { + self.formatter.end_object(w) + }) + } + + fn begin_object_key(&mut self, writer: &mut W, first: bool) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + self.in_object_key = true; + self.formatter.begin_object_key(writer, first) + } + + fn end_object_key(&mut self, writer: &mut W) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + self.in_object_key = false; + self.formatter.end_object_key(writer) + } + + fn begin_object_value(&mut self, writer: &mut W) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + self.in_object_key = false; + self.formatter.begin_object_value(writer) + } + + fn end_object_value(&mut self, writer: &mut W) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + self.in_object_key = false; + self.formatter.end_object_value(writer) + } + + fn write_raw_fragment(&mut self, writer: &mut W, fragment: &str) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + self.formatter.write_raw_fragment(writer, fragment) + } +} diff --git a/rust/automerge-cli/src/examine.rs b/rust/automerge-cli/src/examine.rs index 847abd4f..0b8946d4 100644 --- a/rust/automerge-cli/src/examine.rs +++ b/rust/automerge-cli/src/examine.rs @@ -1,6 +1,8 @@ use automerge as am; use thiserror::Error; +use crate::color_json::print_colored_json; + #[derive(Error, Debug)] pub enum ExamineError { #[error("Error reading change file: {:?}", source)] @@ -39,7 +41,7 @@ pub fn examine( .collect(); if is_tty { let json_changes = serde_json::to_value(uncompressed_changes).unwrap(); - colored_json::write_colored_json(&json_changes, &mut output).unwrap(); + print_colored_json(&json_changes).unwrap(); writeln!(output).unwrap(); } else { let json_changes = serde_json::to_string_pretty(&uncompressed_changes).unwrap(); diff --git a/rust/automerge-cli/src/export.rs b/rust/automerge-cli/src/export.rs index 49cded8f..1d4d7965 100644 --- a/rust/automerge-cli/src/export.rs +++ b/rust/automerge-cli/src/export.rs @@ -1,6 +1,8 @@ use anyhow::Result; use automerge as am; +use crate::color_json::print_colored_json; + pub(crate) fn map_to_json(doc: &am::Automerge, obj: &am::ObjId) -> serde_json::Value { let keys = doc.keys(obj); let mut map = serde_json::Map::new(); @@ -84,7 +86,7 @@ pub fn export_json( let state_json = get_state_json(input_data)?; if is_tty { - colored_json::write_colored_json(&state_json, &mut writer).unwrap(); + print_colored_json(&state_json).unwrap(); writeln!(writer).unwrap(); } else { writeln!( diff --git a/rust/automerge-cli/src/main.rs b/rust/automerge-cli/src/main.rs index ffc13012..b16d9449 100644 --- a/rust/automerge-cli/src/main.rs +++ b/rust/automerge-cli/src/main.rs @@ -2,8 +2,10 @@ use std::{fs::File, path::PathBuf, str::FromStr}; use anyhow::{anyhow, Result}; use clap::Parser; +use is_terminal::IsTerminal; //mod change; +mod color_json; mod examine; mod export; mod import; @@ -16,7 +18,7 @@ struct Opts { cmd: Command, } -#[derive(Debug)] +#[derive(clap::ValueEnum, Clone, Debug)] enum ExportFormat { Json, Toml, @@ -43,11 +45,10 @@ enum Command { format: ExportFormat, /// Path that contains Automerge changes - #[clap(parse(from_os_str))] changes_file: Option, /// The file to write to. If omitted assumes stdout - #[clap(parse(from_os_str), long("out"), short('o'))] + #[clap(long("out"), short('o'))] output_file: Option, }, @@ -56,11 +57,10 @@ enum Command { #[clap(long, short, default_value = "json")] format: ExportFormat, - #[clap(parse(from_os_str))] input_file: Option, /// Path to write Automerge changes to - #[clap(parse(from_os_str), long("out"), short('o'))] + #[clap(long("out"), short('o'))] changes_file: Option, }, @@ -94,11 +94,10 @@ enum Command { script: String, /// The file to change, if omitted will assume stdin - #[clap(parse(from_os_str))] input_file: Option, /// Path to write Automerge changes to, if omitted will write to stdout - #[clap(parse(from_os_str), long("out"), short('o'))] + #[clap(long("out"), short('o'))] output_file: Option, }, @@ -108,15 +107,16 @@ enum Command { /// Read one or more automerge documents and output a merged, compacted version of them Merge { /// The file to write to. If omitted assumes stdout - #[clap(parse(from_os_str), long("out"), short('o'))] + #[clap(long("out"), short('o'))] output_file: Option, + /// The file(s) to compact. If empty assumes stdin input: Vec, }, } fn open_file_or_stdin(maybe_path: Option) -> Result> { - if atty::is(atty::Stream::Stdin) { + if std::io::stdin().is_terminal() { if let Some(path) = maybe_path { Ok(Box::new(File::open(&path).unwrap())) } else { @@ -130,7 +130,7 @@ fn open_file_or_stdin(maybe_path: Option) -> Result) -> Result> { - if atty::is(atty::Stream::Stdout) { + if std::io::stdout().is_terminal() { if let Some(path) = maybe_path { Ok(Box::new(File::create(&path).unwrap())) } else { @@ -158,7 +158,7 @@ fn main() -> Result<()> { match format { ExportFormat::Json => { let mut in_buffer = open_file_or_stdin(changes_file)?; - export::export_json(&mut in_buffer, output, atty::is(atty::Stream::Stdout)) + export::export_json(&mut in_buffer, output, std::io::stdout().is_terminal()) } ExportFormat::Toml => unimplemented!(), } @@ -191,7 +191,7 @@ fn main() -> Result<()> { Command::Examine { input_file } => { let in_buffer = open_file_or_stdin(input_file)?; let out_buffer = std::io::stdout(); - match examine::examine(in_buffer, out_buffer, atty::is(atty::Stream::Stdout)) { + match examine::examine(in_buffer, out_buffer, std::io::stdout().is_terminal()) { Ok(()) => {} Err(e) => { eprintln!("Error: {:?}", e); diff --git a/rust/automerge/Cargo.toml b/rust/automerge/Cargo.toml index 8872dcdc..89b48020 100644 --- a/rust/automerge/Cargo.toml +++ b/rust/automerge/Cargo.toml @@ -42,7 +42,7 @@ pretty_assertions = "1.0.0" proptest = { version = "^1.0.0", default-features = false, features = ["std"] } serde_json = { version = "^1.0.73", features=["float_roundtrip"], default-features=true } maplit = { version = "^1.0" } -criterion = "0.3.5" +criterion = "0.4.0" test-log = { version = "0.2.10", features=["trace"], default-features = false} tracing-subscriber = {version = "0.3.9", features = ["fmt", "env-filter"] } automerge-test = { path = "../automerge-test" } diff --git a/rust/deny.toml b/rust/deny.toml index f6985357..54a68a60 100644 --- a/rust/deny.toml +++ b/rust/deny.toml @@ -46,7 +46,6 @@ notice = "warn" # output a note when they are encountered. ignore = [ #"RUSTSEC-0000-0000", - "RUSTSEC-2021-0127", # serde_cbor is unmaintained, but we only use it in criterion for benchmarks ] # Threshold for security vulnerabilities, any vulnerability with a CVSS score # lower than the range specified will be ignored. Note that ignored advisories @@ -100,10 +99,6 @@ confidence-threshold = 0.8 # Allow 1 or more licenses on a per-crate basis, so that particular licenses # aren't accepted for every possible crate as with the normal allow list exceptions = [ - # this is a LGPL like license in the CLI - # since this is an application not a library people would link to it should be fine - { allow = ["EPL-2.0"], name = "colored_json" }, - # The Unicode-DFS--2016 license is necessary for unicode-ident because they # use data from the unicode tables to generate the tables which are # included in the application. We do not distribute those data files so @@ -177,21 +172,14 @@ deny = [ ] # Certain crates/versions that will be skipped when doing duplicate detection. skip = [ - # These are transitive depdendencies of criterion, which is only included for benchmarking anyway - { name = "itoa", version = "0.4.8" }, - { name = "textwrap", version = "0.11.0" }, - { name = "clap", version = "2.34.0" }, - - # These are transitive depdendencies of cbindgen - { name = "strsim", version = "0.8.0" }, - { name = "heck", version = "0.3.3" }, ] # Similarly to `skip` allows you to skip certain crates during duplicate # detection. Unlike skip, it also includes the entire tree of transitive # dependencies starting at the specified crate, up to a certain depth, which is # by default infinite skip-tree = [ - #{ name = "ansi_term", version = "=0.11.0", depth = 20 }, + # // We only ever use criterion in benchmarks + { name = "criterion", version = "0.4.0", depth=10}, ] # This section is considered when running `cargo deny check sources`. diff --git a/rust/edit-trace/Cargo.toml b/rust/edit-trace/Cargo.toml index 0107502b..eaebde46 100644 --- a/rust/edit-trace/Cargo.toml +++ b/rust/edit-trace/Cargo.toml @@ -6,7 +6,7 @@ license = "MIT" [dependencies] automerge = { path = "../automerge" } -criterion = "0.3.5" +criterion = "0.4.0" json = "0.12.4" rand = "^0.8" From 0f90fe4d02095713dbfd5c1767bcfa03087a4b97 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Mon, 19 Dec 2022 10:43:56 +0000 Subject: [PATCH 667/730] Add a method for loading a document without verifying heads This is primarily useful when debugging documents which have been corrupted somehow so you would like to see the ops even if you can't trust them. Note that this is _not_ currently useful for performance reasons as the hash graph is still constructed, just not verified. --- rust/automerge/src/automerge.rs | 15 +++++-- rust/automerge/src/storage.rs | 1 + rust/automerge/src/storage/load.rs | 4 +- .../src/storage/load/reconstruct_document.rs | 41 ++++++++++++++++--- 4 files changed, 49 insertions(+), 12 deletions(-) diff --git a/rust/automerge/src/automerge.rs b/rust/automerge/src/automerge.rs index 5502456c..584f761d 100644 --- a/rust/automerge/src/automerge.rs +++ b/rust/automerge/src/automerge.rs @@ -12,7 +12,7 @@ use crate::keys::Keys; use crate::op_observer::OpObserver; use crate::op_set::OpSet; use crate::parents::Parents; -use crate::storage::{self, load, CompressConfig}; +use crate::storage::{self, load, CompressConfig, VerificationMode}; use crate::transaction::{ self, CommitOptions, Failure, Observed, Success, Transaction, TransactionArgs, UnObserved, }; @@ -650,13 +650,18 @@ impl Automerge { /// Load a document. pub fn load(data: &[u8]) -> Result { - Self::load_with::<()>(data, None) + Self::load_with::<()>(data, VerificationMode::Check, None) + } + + pub fn load_unverified_heads(data: &[u8]) -> Result { + Self::load_with::<()>(data, VerificationMode::DontCheck, None) } /// Load a document. #[tracing::instrument(skip(data, observer), err)] pub fn load_with( data: &[u8], + mode: VerificationMode, mut observer: Option<&mut Obs>, ) -> Result { if data.is_empty() { @@ -679,8 +684,10 @@ impl Automerge { changes, heads, } = match &mut observer { - Some(o) => storage::load::reconstruct_document(&d, OpSet::observed_builder(*o)), - None => storage::load::reconstruct_document(&d, OpSet::builder()), + Some(o) => { + storage::load::reconstruct_document(&d, mode, OpSet::observed_builder(*o)) + } + None => storage::load::reconstruct_document(&d, mode, OpSet::builder()), } .map_err(|e| load::Error::InflateDocument(Box::new(e)))?; let mut hashes_by_index = HashMap::new(); diff --git a/rust/automerge/src/storage.rs b/rust/automerge/src/storage.rs index c8a2183d..5b3d03a7 100644 --- a/rust/automerge/src/storage.rs +++ b/rust/automerge/src/storage.rs @@ -14,6 +14,7 @@ pub(crate) use { chunk::{CheckSum, Chunk, ChunkType, Header}, columns::{Columns, MismatchingColumn, RawColumn, RawColumns}, document::{AsChangeMeta, AsDocOp, ChangeMetadata, CompressConfig, DocOp, Document}, + load::VerificationMode, }; fn shift_range(range: Range, by: usize) -> Range { diff --git a/rust/automerge/src/storage/load.rs b/rust/automerge/src/storage/load.rs index fe2e8429..80ab3d82 100644 --- a/rust/automerge/src/storage/load.rs +++ b/rust/automerge/src/storage/load.rs @@ -8,7 +8,7 @@ use crate::{ mod change_collector; mod reconstruct_document; pub(crate) use reconstruct_document::{ - reconstruct_document, DocObserver, LoadedObject, Reconstructed, + reconstruct_document, DocObserver, LoadedObject, Reconstructed, VerificationMode, }; #[derive(Debug, thiserror::Error)] @@ -84,7 +84,7 @@ fn load_next_change<'a>( let Reconstructed { changes: new_changes, .. - } = reconstruct_document(&d, NullObserver) + } = reconstruct_document(&d, VerificationMode::DontCheck, NullObserver) .map_err(|e| Error::InflateDocument(Box::new(e)))?; changes.extend(new_changes); } diff --git a/rust/automerge/src/storage/load/reconstruct_document.rs b/rust/automerge/src/storage/load/reconstruct_document.rs index e8221e5c..44ace72a 100644 --- a/rust/automerge/src/storage/load/reconstruct_document.rs +++ b/rust/automerge/src/storage/load/reconstruct_document.rs @@ -6,7 +6,7 @@ use crate::{ change::Change, columnar::Key as DocOpKey, op_tree::OpSetMetadata, - storage::{DocOp, Document}, + storage::{change::Verified, Change as StoredChange, DocOp, Document}, types::{ChangeHash, ElemId, Key, ObjId, ObjType, Op, OpId, OpIds, OpType}, ScalarValue, }; @@ -24,13 +24,29 @@ pub(crate) enum Error { #[error("invalid changes: {0}")] InvalidChanges(#[from] super::change_collector::Error), #[error("mismatching heads")] - MismatchingHeads, + MismatchingHeads(MismatchedHeads), #[error("missing operations")] MissingOps, #[error("succ out of order")] SuccOutOfOrder, } +pub(crate) struct MismatchedHeads { + changes: Vec>, + expected_heads: BTreeSet, + derived_heads: BTreeSet, +} + +impl std::fmt::Debug for MismatchedHeads { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("MismatchedHeads") + .field("changes", &self.changes.len()) + .field("expected_heads", &self.expected_heads) + .field("derived_heads", &self.derived_heads) + .finish() + } +} + /// All the operations loaded from an object in the document format pub(crate) struct LoadedObject { /// The id of the object @@ -67,9 +83,16 @@ pub(crate) struct Reconstructed { pub(crate) heads: BTreeSet, } +#[derive(Debug)] +pub enum VerificationMode { + Check, + DontCheck, +} + #[instrument(skip(doc, observer))] pub(crate) fn reconstruct_document<'a, O: DocObserver>( doc: &'a Document<'a>, + mode: VerificationMode, mut observer: O, ) -> Result, Error> { // The document format does not contain the bytes of the changes which are encoded in it @@ -185,10 +208,16 @@ pub(crate) fn reconstruct_document<'a, O: DocObserver>( let super::change_collector::CollectedChanges { history, heads } = collector.finish(&metadata)?; - let expected_heads: BTreeSet<_> = doc.heads().iter().cloned().collect(); - if expected_heads != heads { - tracing::error!(?expected_heads, ?heads, "mismatching heads"); - return Err(Error::MismatchingHeads); + if matches!(mode, VerificationMode::Check) { + let expected_heads: BTreeSet<_> = doc.heads().iter().cloned().collect(); + if expected_heads != heads { + tracing::error!(?expected_heads, ?heads, "mismatching heads"); + return Err(Error::MismatchingHeads(MismatchedHeads { + changes: history, + expected_heads, + derived_heads: heads, + })); + } } let result = observer.finish(metadata); From 6da93b6adc9aca6522b77f8d985a69ce2ebb5cc0 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Mon, 19 Dec 2022 10:52:45 +0000 Subject: [PATCH 668/730] Correctly implement colored json My quickly thrown together implementation had somem mistakes in it which meant that the JSON produced was malformed. --- rust/automerge-cli/src/color_json.rs | 98 +++++++++++++++++----------- 1 file changed, 60 insertions(+), 38 deletions(-) diff --git a/rust/automerge-cli/src/color_json.rs b/rust/automerge-cli/src/color_json.rs index 1d175026..9514da22 100644 --- a/rust/automerge-cli/src/color_json.rs +++ b/rust/automerge-cli/src/color_json.rs @@ -132,6 +132,15 @@ impl Formatter for ColoredFormatter { }) } + fn write_i128(&mut self, _writer: &mut W, value: i128) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + write_colored(self.style.integer_value.clone(), |w| { + self.formatter.write_i128(w, value) + }) + } + fn write_u8(&mut self, _writer: &mut W, value: u8) -> std::io::Result<()> where W: ?Sized + std::io::Write, @@ -168,6 +177,15 @@ impl Formatter for ColoredFormatter { }) } + fn write_u128(&mut self, _writer: &mut W, value: u128) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + write_colored(self.style.integer_value.clone(), |w| { + self.formatter.write_u128(w, value) + }) + } + fn write_f32(&mut self, _writer: &mut W, value: f32) -> std::io::Result<()> where W: ?Sized + std::io::Write, @@ -199,32 +217,32 @@ impl Formatter for ColoredFormatter { where W: ?Sized + std::io::Write, { - if self.style.string_include_quotation { - let style = if self.in_object_key { - &self.style.key + let style = if self.style.string_include_quotation { + if self.in_object_key { + self.style.key.clone() } else { - &self.style.string_value - }; - write_colored(style.clone(), |w| self.formatter.begin_string(w)) + self.style.string_value.clone() + } } else { - self.formatter.begin_string(_writer) - } + ColorSpec::new() + }; + write_colored(style, |w| self.formatter.begin_string(w)) } fn end_string(&mut self, _writer: &mut W) -> std::io::Result<()> where W: ?Sized + std::io::Write, { - if self.style.string_include_quotation { - let style = if self.in_object_key { - &self.style.key + let style = if self.style.string_include_quotation { + if self.in_object_key { + self.style.key.clone() } else { - &self.style.string_value - }; - write_colored(style.clone(), |w| self.formatter.end_string(w)) + self.style.string_value.clone() + } } else { - self.formatter.end_string(_writer) - } + ColorSpec::new() + }; + write_colored(style, |w| self.formatter.end_string(w)) } fn write_string_fragment(&mut self, _writer: &mut W, fragment: &str) -> std::io::Result<()> @@ -232,11 +250,11 @@ impl Formatter for ColoredFormatter { W: ?Sized + std::io::Write, { let style = if self.in_object_key { - &self.style.key + self.style.key.clone() } else { - &self.style.string_value + self.style.string_value.clone() }; - write_colored(style.clone(), |w| w.write_all(fragment.as_bytes())) + write_colored(style, |w| w.write_all(fragment.as_bytes())) } fn write_char_escape( @@ -248,13 +266,11 @@ impl Formatter for ColoredFormatter { W: ?Sized + std::io::Write, { let style = if self.in_object_key { - &self.style.key + self.style.key.clone() } else { - &self.style.string_value + self.style.string_value.clone() }; - write_colored(style.clone(), |w| { - self.formatter.write_char_escape(w, char_escape) - }) + write_colored(style, |w| self.formatter.write_char_escape(w, char_escape)) } fn begin_array(&mut self, _writer: &mut W) -> std::io::Result<()> @@ -275,18 +291,20 @@ impl Formatter for ColoredFormatter { }) } - fn begin_array_value(&mut self, writer: &mut W, first: bool) -> std::io::Result<()> + fn begin_array_value(&mut self, _writer: &mut W, first: bool) -> std::io::Result<()> where W: ?Sized + std::io::Write, { - self.formatter.begin_array_value(writer, first) + write_colored(ColorSpec::new(), |w| { + self.formatter.begin_array_value(w, first) + }) } - fn end_array_value(&mut self, writer: &mut W) -> std::io::Result<()> + fn end_array_value(&mut self, _writer: &mut W) -> std::io::Result<()> where W: ?Sized + std::io::Write, { - self.formatter.end_array_value(writer) + write_colored(ColorSpec::new(), |w| self.formatter.end_array_value(w)) } fn begin_object(&mut self, _writer: &mut W) -> std::io::Result<()> @@ -307,42 +325,46 @@ impl Formatter for ColoredFormatter { }) } - fn begin_object_key(&mut self, writer: &mut W, first: bool) -> std::io::Result<()> + fn begin_object_key(&mut self, _writer: &mut W, first: bool) -> std::io::Result<()> where W: ?Sized + std::io::Write, { self.in_object_key = true; - self.formatter.begin_object_key(writer, first) + write_colored(ColorSpec::new(), |w| { + self.formatter.begin_object_key(w, first) + }) } - fn end_object_key(&mut self, writer: &mut W) -> std::io::Result<()> + fn end_object_key(&mut self, _writer: &mut W) -> std::io::Result<()> where W: ?Sized + std::io::Write, { self.in_object_key = false; - self.formatter.end_object_key(writer) + write_colored(ColorSpec::new(), |w| self.formatter.end_object_key(w)) } - fn begin_object_value(&mut self, writer: &mut W) -> std::io::Result<()> + fn begin_object_value(&mut self, _writer: &mut W) -> std::io::Result<()> where W: ?Sized + std::io::Write, { self.in_object_key = false; - self.formatter.begin_object_value(writer) + write_colored(ColorSpec::new(), |w| self.formatter.begin_object_value(w)) } - fn end_object_value(&mut self, writer: &mut W) -> std::io::Result<()> + fn end_object_value(&mut self, _writer: &mut W) -> std::io::Result<()> where W: ?Sized + std::io::Write, { self.in_object_key = false; - self.formatter.end_object_value(writer) + write_colored(ColorSpec::new(), |w| self.formatter.end_object_value(w)) } - fn write_raw_fragment(&mut self, writer: &mut W, fragment: &str) -> std::io::Result<()> + fn write_raw_fragment(&mut self, _writer: &mut W, fragment: &str) -> std::io::Result<()> where W: ?Sized + std::io::Write, { - self.formatter.write_raw_fragment(writer, fragment) + write_colored(ColorSpec::new(), |w| { + self.formatter.write_raw_fragment(w, fragment) + }) } } From f682db303914434a7dfa914dcd3bafc8041d312f Mon Sep 17 00:00:00 2001 From: Alex Good Date: Mon, 19 Dec 2022 11:08:02 +0000 Subject: [PATCH 669/730] automerge-cli: Add a flag to skip verifiying heads --- rust/automerge-cli/src/examine.rs | 8 +- rust/automerge-cli/src/export.rs | 20 +++-- rust/automerge-cli/src/main.rs | 121 ++++++++++++++++-------------- 3 files changed, 83 insertions(+), 66 deletions(-) diff --git a/rust/automerge-cli/src/examine.rs b/rust/automerge-cli/src/examine.rs index 0b8946d4..0ee102fb 100644 --- a/rust/automerge-cli/src/examine.rs +++ b/rust/automerge-cli/src/examine.rs @@ -1,7 +1,7 @@ use automerge as am; use thiserror::Error; -use crate::color_json::print_colored_json; +use crate::{color_json::print_colored_json, SkipVerifyFlag}; #[derive(Error, Debug)] pub enum ExamineError { @@ -22,16 +22,18 @@ pub enum ExamineError { }, } -pub fn examine( +pub(crate) fn examine( mut input: impl std::io::Read, mut output: impl std::io::Write, + skip: SkipVerifyFlag, is_tty: bool, ) -> Result<(), ExamineError> { let mut buf: Vec = Vec::new(); input .read_to_end(&mut buf) .map_err(|e| ExamineError::ReadingChanges { source: e })?; - let doc = am::Automerge::load(&buf) + let doc = skip + .load(&buf) .map_err(|e| ExamineError::ApplyingInitialChanges { source: e })?; let uncompressed_changes: Vec<_> = doc .get_changes(&[]) diff --git a/rust/automerge-cli/src/export.rs b/rust/automerge-cli/src/export.rs index 1d4d7965..2a7b4130 100644 --- a/rust/automerge-cli/src/export.rs +++ b/rust/automerge-cli/src/export.rs @@ -1,7 +1,7 @@ use anyhow::Result; use automerge as am; -use crate::color_json::print_colored_json; +use crate::{color_json::print_colored_json, SkipVerifyFlag}; pub(crate) fn map_to_json(doc: &am::Automerge, obj: &am::ObjId) -> serde_json::Value { let keys = doc.keys(obj); @@ -71,20 +71,21 @@ fn scalar_to_json(val: &am::ScalarValue) -> serde_json::Value { } } -fn get_state_json(input_data: Vec) -> Result { - let doc = am::Automerge::load(&input_data).unwrap(); // FIXME +fn get_state_json(input_data: Vec, skip: SkipVerifyFlag) -> Result { + let doc = skip.load(&input_data).unwrap(); // FIXME Ok(map_to_json(&doc, &am::ObjId::Root)) } -pub fn export_json( +pub(crate) fn export_json( mut changes_reader: impl std::io::Read, mut writer: impl std::io::Write, + skip: SkipVerifyFlag, is_tty: bool, ) -> Result<()> { let mut input_data = vec![]; changes_reader.read_to_end(&mut input_data)?; - let state_json = get_state_json(input_data)?; + let state_json = get_state_json(input_data, skip)?; if is_tty { print_colored_json(&state_json).unwrap(); writeln!(writer).unwrap(); @@ -105,7 +106,10 @@ mod tests { #[test] fn cli_export_with_empty_input() { - assert_eq!(get_state_json(vec![]).unwrap(), serde_json::json!({})) + assert_eq!( + get_state_json(vec![], Default::default()).unwrap(), + serde_json::json!({}) + ) } #[test] @@ -119,7 +123,7 @@ mod tests { let mut backend = initialize_from_json(&initial_state_json).unwrap(); let change_bytes = backend.save(); assert_eq!( - get_state_json(change_bytes).unwrap(), + get_state_json(change_bytes, Default::default()).unwrap(), serde_json::json!({"sparrows": 15.0}) ) } @@ -146,7 +150,7 @@ mod tests { */ let change_bytes = backend.save(); assert_eq!( - get_state_json(change_bytes).unwrap(), + get_state_json(change_bytes, Default::default()).unwrap(), serde_json::json!({ "birds": { "wrens": 3.0, diff --git a/rust/automerge-cli/src/main.rs b/rust/automerge-cli/src/main.rs index b16d9449..48513a92 100644 --- a/rust/automerge-cli/src/main.rs +++ b/rust/automerge-cli/src/main.rs @@ -1,10 +1,12 @@ use std::{fs::File, path::PathBuf, str::FromStr}; use anyhow::{anyhow, Result}; -use clap::Parser; +use clap::{ + builder::{BoolishValueParser, TypedValueParser, ValueParserFactory}, + Parser, +}; use is_terminal::IsTerminal; -//mod change; mod color_json; mod examine; mod export; @@ -24,6 +26,44 @@ enum ExportFormat { Toml, } +#[derive(Copy, Clone, Default, Debug)] +pub(crate) struct SkipVerifyFlag(bool); + +impl SkipVerifyFlag { + fn load(&self, buf: &[u8]) -> Result { + if self.0 { + automerge::Automerge::load(buf) + } else { + automerge::Automerge::load_unverified_heads(buf) + } + } +} + +#[derive(Clone)] +struct SkipVerifyFlagParser; +impl ValueParserFactory for SkipVerifyFlag { + type Parser = SkipVerifyFlagParser; + + fn value_parser() -> Self::Parser { + SkipVerifyFlagParser + } +} + +impl TypedValueParser for SkipVerifyFlagParser { + type Value = SkipVerifyFlag; + + fn parse_ref( + &self, + cmd: &clap::Command, + arg: Option<&clap::Arg>, + value: &std::ffi::OsStr, + ) -> Result { + BoolishValueParser::new() + .parse_ref(cmd, arg, value) + .map(SkipVerifyFlag) + } +} + impl FromStr for ExportFormat { type Err = anyhow::Error; @@ -50,6 +90,10 @@ enum Command { /// The file to write to. If omitted assumes stdout #[clap(long("out"), short('o'))] output_file: Option, + + /// Whether to verify the head hashes of a compressed document + #[clap(long, action = clap::ArgAction::SetFalse)] + skip_verifying_heads: SkipVerifyFlag, }, Import { @@ -64,45 +108,11 @@ enum Command { changes_file: Option, }, - /// Read an automerge document from a file or stdin, perform a change on it and write a new - /// document to stdout or the specified output file. - Change { - /// The change script to perform. Change scripts have the form []. - /// The possible commands are 'set', 'insert', 'delete', and 'increment'. - /// - /// Paths look like this: $["mapkey"][0]. They always lways start with a '$', then each - /// subsequent segment of the path is either a string in double quotes to index a key in a - /// map, or an integer index to address an array element. - /// - /// Examples - /// - /// ## set - /// - /// > automerge change 'set $["someobject"] {"items": []}' somefile - /// - /// ## insert - /// - /// > automerge change 'insert $["someobject"]["items"][0] "item1"' somefile - /// - /// ## increment - /// - /// > automerge change 'increment $["mycounter"]' - /// - /// ## delete - /// - /// > automerge change 'delete $["someobject"]["items"]' somefile - script: String, - - /// The file to change, if omitted will assume stdin - input_file: Option, - - /// Path to write Automerge changes to, if omitted will write to stdout - #[clap(long("out"), short('o'))] - output_file: Option, - }, - /// Read an automerge document and print a JSON representation of the changes in it to stdout - Examine { input_file: Option }, + Examine { + input_file: Option, + skip_verifying_heads: SkipVerifyFlag, + }, /// Read one or more automerge documents and output a merged, compacted version of them Merge { @@ -149,6 +159,7 @@ fn main() -> Result<()> { changes_file, format, output_file, + skip_verifying_heads, } => { let output: Box = if let Some(output_file) = output_file { Box::new(File::create(&output_file)?) @@ -158,7 +169,12 @@ fn main() -> Result<()> { match format { ExportFormat::Json => { let mut in_buffer = open_file_or_stdin(changes_file)?; - export::export_json(&mut in_buffer, output, std::io::stdout().is_terminal()) + export::export_json( + &mut in_buffer, + output, + skip_verifying_heads, + std::io::stdout().is_terminal(), + ) } ExportFormat::Toml => unimplemented!(), } @@ -175,23 +191,18 @@ fn main() -> Result<()> { } ExportFormat::Toml => unimplemented!(), }, - Command::Change { .. - //input_file, - //output_file, - //script, + Command::Examine { + input_file, + skip_verifying_heads, } => { - unimplemented!() -/* - let in_buffer = open_file_or_stdin(input_file)?; - let mut out_buffer = create_file_or_stdout(output_file)?; - change::change(in_buffer, &mut out_buffer, script.as_str()) - .map_err(|e| anyhow::format_err!("Unable to make changes: {:?}", e)) -*/ - } - Command::Examine { input_file } => { let in_buffer = open_file_or_stdin(input_file)?; let out_buffer = std::io::stdout(); - match examine::examine(in_buffer, out_buffer, std::io::stdout().is_terminal()) { + match examine::examine( + in_buffer, + out_buffer, + skip_verifying_heads, + std::io::stdout().is_terminal(), + ) { Ok(()) => {} Err(e) => { eprintln!("Error: {:?}", e); From d678280b57a7b03c104c7b8a4ed74930885fd96b Mon Sep 17 00:00:00 2001 From: Alex Good Date: Mon, 19 Dec 2022 11:33:12 +0000 Subject: [PATCH 670/730] automerge-cli: Add an examine-sync command This is useful when receiving sync messages that behave in unexptected ways --- rust/automerge-cli/src/examine_sync.rs | 38 ++++++++++++++++++++++++++ rust/automerge-cli/src/main.rs | 16 +++++++++++ 2 files changed, 54 insertions(+) create mode 100644 rust/automerge-cli/src/examine_sync.rs diff --git a/rust/automerge-cli/src/examine_sync.rs b/rust/automerge-cli/src/examine_sync.rs new file mode 100644 index 00000000..ad6699d4 --- /dev/null +++ b/rust/automerge-cli/src/examine_sync.rs @@ -0,0 +1,38 @@ +use automerge::sync::ReadMessageError; + +use crate::color_json::print_colored_json; + +#[derive(Debug, thiserror::Error)] +pub enum ExamineSyncError { + #[error("Error reading message: {0}")] + ReadMessage(#[source] std::io::Error), + + #[error("error writing message: {0}")] + WriteMessage(#[source] std::io::Error), + + #[error("error writing json to output: {0}")] + WriteJson(#[source] serde_json::Error), + + #[error("Error parsing message: {0}")] + ParseMessage(#[from] ReadMessageError), +} + +pub(crate) fn examine_sync( + mut input: Box, + output: W, + is_tty: bool, +) -> Result<(), ExamineSyncError> { + let mut buf: Vec = Vec::new(); + input + .read_to_end(&mut buf) + .map_err(ExamineSyncError::ReadMessage)?; + + let message = automerge::sync::Message::decode(&buf)?; + let json = serde_json::to_value(&message).unwrap(); + if is_tty { + print_colored_json(&json).map_err(ExamineSyncError::WriteMessage)?; + } else { + serde_json::to_writer(output, &json).map_err(ExamineSyncError::WriteJson)?; + } + Ok(()) +} diff --git a/rust/automerge-cli/src/main.rs b/rust/automerge-cli/src/main.rs index 48513a92..b0b456c8 100644 --- a/rust/automerge-cli/src/main.rs +++ b/rust/automerge-cli/src/main.rs @@ -9,6 +9,7 @@ use is_terminal::IsTerminal; mod color_json; mod examine; +mod examine_sync; mod export; mod import; mod merge; @@ -114,6 +115,9 @@ enum Command { skip_verifying_heads: SkipVerifyFlag, }, + /// Read an automerge sync messaage and print a JSON representation of it + ExamineSync { input_file: Option }, + /// Read one or more automerge documents and output a merged, compacted version of them Merge { /// The file to write to. If omitted assumes stdout @@ -210,6 +214,18 @@ fn main() -> Result<()> { } Ok(()) } + Command::ExamineSync { input_file } => { + let in_buffer = open_file_or_stdin(input_file)?; + let out_buffer = std::io::stdout(); + match examine_sync::examine_sync(in_buffer, out_buffer, std::io::stdout().is_terminal()) + { + Ok(()) => {} + Err(e) => { + eprintln!("Error: {:?}", e); + } + } + Ok(()) + } Command::Merge { input, output_file } => { let out_buffer = create_file_or_stdout(output_file)?; match merge::merge(input.into(), out_buffer) { From 4de0756bb482bf214fd5e8ac80302ada4b0d9fe0 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Sun, 18 Dec 2022 20:21:26 +0000 Subject: [PATCH 671/730] Correctly handle ops on optree node boundaries The `SeekOp` query can produce incorrect results when the optree it is searching only has visible ops on the internal nodes. Add some tests to demonstrate the issue as well as a fix. --- rust/automerge/src/query/seek_op.rs | 119 +++++++++++++++++++++++++++- rust/automerge/tests/test.rs | 39 ++++++++- 2 files changed, 155 insertions(+), 3 deletions(-) diff --git a/rust/automerge/src/query/seek_op.rs b/rust/automerge/src/query/seek_op.rs index 7ca3e9d4..4d955f96 100644 --- a/rust/automerge/src/query/seek_op.rs +++ b/rust/automerge/src/query/seek_op.rs @@ -76,8 +76,19 @@ impl<'a> TreeQuery<'a> for SeekOp<'a> { if self.pos + child.len() >= start { // skip empty nodes if child.index.visible_len(ListEncoding::List) == 0 { - self.pos += child.len(); - QueryResult::Next + let child_contains_key = + child.elements.iter().any(|e| ops[*e].key == self.op.key); + if !child_contains_key { + // If we are in a node which has no visible ops, but none of the + // elements of the node match the key of the op, then we must have + // finished processing and so we can just return. + // See https://github.com/automerge/automerge-rs/pull/480 + QueryResult::Finish + } else { + // Otherwise, we need to proceed to the next node + self.pos += child.len(); + QueryResult::Next + } } else { QueryResult::Descend } @@ -148,3 +159,107 @@ impl<'a> TreeQuery<'a> for SeekOp<'a> { } } } + +#[cfg(test)] +mod tests { + use crate::{ + op_set::OpSet, + op_tree::B, + query::SeekOp, + types::{Key, ObjId, Op, OpId}, + ActorId, ScalarValue, + }; + + #[test] + fn seek_on_page_boundary() { + // Create an optree in which the only visible ops are on the boundaries of the nodes, + // i.e. the visible elements are in the internal nodes. Like so + // + // .----------------------. + // | id | key | succ | + // | B | "a" | | + // | 2B | "b" | | + // '----------------------' + // / | \ + // ;------------------------. | `------------------------------------. + // | id | op | succ | | | id | op | succ | + // | 0 |set "a" | 1 | | | 2B + 1 |set "c" | 2B + 2 | + // | 1 |set "a" | 2 | | | 2B + 2 |set "c" | 2B + 3 | + // | 2 |set "a" | 3 | | ... + // ... | | 3B |set "c" | | + // | B - 1 |set "a" | B | | '------------------------------------' + // '--------'--------'------' | + // | + // .-----------------------------. + // | id | key | succ | + // | B + 1 | "b" | B + 2 | + // | B + 2 | "b" | B + 3 | + // .... + // | B + (B - 1 | "b" | 2B | + // '-----------------------------' + // + // The important point here is that the leaf nodes contain no visible ops for keys "a" and + // "b". + let mut set = OpSet::new(); + let actor = set.m.actors.cache(ActorId::random()); + let a = set.m.props.cache("a".to_string()); + let b = set.m.props.cache("b".to_string()); + let c = set.m.props.cache("c".to_string()); + + let mut counter = 0; + // For each key insert `B` operations with the `pred` and `succ` setup such that the final + // operation for each key is the only visible op. + for key in [a, b, c] { + for iteration in 0..B { + // Generate a value to insert + let keystr = set.m.props.get(key); + let val = keystr.repeat(iteration + 1); + + // Only the last op is visible + let pred = if iteration == 0 { + Default::default() + } else { + set.m + .sorted_opids(vec![OpId::new(counter - 1, actor)].into_iter()) + }; + + // only the last op is visible + let succ = if iteration == B - 1 { + Default::default() + } else { + set.m + .sorted_opids(vec![OpId::new(counter, actor)].into_iter()) + }; + + let op = Op { + id: OpId::new(counter, actor), + action: crate::OpType::Put(ScalarValue::Str(val.into())), + key: Key::Map(key), + succ, + pred, + insert: false, + }; + set.insert(counter as usize, &ObjId::root(), op); + counter += 1; + } + } + + // Now try and create an op which inserts at the next index of 'a' + let new_op = Op { + id: OpId::new(counter, actor), + action: crate::OpType::Put(ScalarValue::Str("test".into())), + key: Key::Map(a), + succ: Default::default(), + pred: set + .m + .sorted_opids(std::iter::once(OpId::new(B as u64 - 1, actor))), + insert: false, + }; + + let q = SeekOp::new(&new_op); + let q = set.search(&ObjId::root(), q); + + // we've inserted `B - 1` elements for "a", so the index should be `B` + assert_eq!(q.pos, B); + } +} diff --git a/rust/automerge/tests/test.rs b/rust/automerge/tests/test.rs index c1b653d3..069a664d 100644 --- a/rust/automerge/tests/test.rs +++ b/rust/automerge/tests/test.rs @@ -5,7 +5,7 @@ use automerge::{ }; // set up logging for all the tests -use test_log::test; +//use test_log::test; #[allow(unused_imports)] use automerge_test::{ @@ -1411,3 +1411,40 @@ fn invalid_deflate_stream() { assert!(Automerge::load(&bytes).is_err()); } + +#[test] +fn bad_change_on_optree_node_boundary() { + let mut doc = Automerge::new(); + doc.transact::<_, _, AutomergeError>(|d| { + d.put(ROOT, "a", "z")?; + d.put(ROOT, "b", 0)?; + d.put(ROOT, "c", 0)?; + Ok(()) + }) + .unwrap(); + let iterations = 15_u64; + for i in 0_u64..iterations { + doc.transact::<_, _, AutomergeError>(|d| { + let s = "a".repeat(i as usize); + d.put(ROOT, "a", s)?; + d.put(ROOT, "b", i + 1)?; + d.put(ROOT, "c", i + 1)?; + Ok(()) + }) + .unwrap(); + } + let mut doc2 = Automerge::load(doc.save().as_slice()).unwrap(); + doc.transact::<_, _, AutomergeError>(|d| { + let i = iterations + 2; + let s = "a".repeat(i as usize); + d.put(ROOT, "a", s)?; + d.put(ROOT, "b", i)?; + d.put(ROOT, "c", i)?; + Ok(()) + }) + .unwrap(); + let change = doc.get_changes(&doc2.get_heads()).unwrap(); + doc2.apply_changes(change.into_iter().cloned().collect::>()) + .unwrap(); + Automerge::load(doc2.save().as_slice()).unwrap(); +} From 8a645bb1932a504cfd76dc940a8cd0e5b1ad4de2 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Thu, 22 Dec 2022 09:59:16 +0000 Subject: [PATCH 672/730] js: Enable typescript for the JS tests The tsconfig.json was setup to not include the JS tests. Update the config to include the tests when checking typescript and fix all the consequent errors. None of this is semantically meaningful _except_ for a few incorrect usages of the API which were leading to flaky tests. Hooray for types! --- javascript/src/index.ts | 10 +- javascript/src/low_level.ts | 5 +- javascript/test/basic_test.ts | 48 +++---- javascript/test/columnar_test.ts | 97 -------------- javascript/test/extra_api_tests.ts | 4 +- javascript/test/helpers.ts | 12 +- javascript/test/legacy_tests.ts | 152 ++++++++++++---------- javascript/test/sync_test.ts | 181 +++++++++++++------------- javascript/test/text_test.ts | 201 +---------------------------- javascript/tsconfig.json | 2 +- rust/automerge-wasm/index.d.ts | 9 +- 11 files changed, 231 insertions(+), 490 deletions(-) delete mode 100644 javascript/test/columnar_test.ts diff --git a/javascript/src/index.ts b/javascript/src/index.ts index 581f50d1..df71c648 100644 --- a/javascript/src/index.ts +++ b/javascript/src/index.ts @@ -10,7 +10,7 @@ export {AutomergeValue, Counter, Int, Uint, Float64, ScalarValue} from "./types" import {type API, type Patch} from "@automerge/automerge-wasm"; export { type Patch, PutPatch, DelPatch, SplicePatch, IncPatch, SyncMessage, } from "@automerge/automerge-wasm" -import {ApiHandler, UseApi} from "./low_level" +import {ApiHandler, ChangeToEncode, UseApi} from "./low_level" import {Actor as ActorId, Prop, ObjID, Change, DecodedChange, Heads, Automerge, MaterializeValue} from "@automerge/automerge-wasm" import {JsSyncState as SyncState, SyncMessage, DecodedSyncMessage} from "@automerge/automerge-wasm" @@ -56,7 +56,7 @@ export type ChangeFn = (doc: T) => void * @param before - The document before the change was made * @param after - The document after the change was made */ -export type PatchCallback = (patch: Patch, before: Doc, after: Doc) => void +export type PatchCallback = (patches: Array, before: Doc, after: Doc) => void /** @hidden **/ export interface State { @@ -224,8 +224,8 @@ export function free(doc: Doc) { * }) * ``` */ -export function from>(initialState: T | Doc, actor?: ActorId): Doc { - return change(init(actor), (d) => Object.assign(d, initialState)) +export function from>(initialState: T | Doc, _opts?: ActorId | InitOptions): Doc { + return change(init(_opts), (d) => Object.assign(d, initialState)) } /** @@ -779,7 +779,7 @@ export function initSyncState(): SyncState { } /** @hidden */ -export function encodeChange(change: DecodedChange): Change { +export function encodeChange(change: ChangeToEncode): Change { return ApiHandler.encodeChange(change) } diff --git a/javascript/src/low_level.ts b/javascript/src/low_level.ts index 9a5480b3..6eabfa52 100644 --- a/javascript/src/low_level.ts +++ b/javascript/src/low_level.ts @@ -1,5 +1,6 @@ -import { Automerge, Change, DecodedChange, Actor, SyncState, SyncMessage, JsSyncState, DecodedSyncMessage } from "@automerge/automerge-wasm" +import { Automerge, Change, DecodedChange, Actor, SyncState, SyncMessage, JsSyncState, DecodedSyncMessage, ChangeToEncode } from "@automerge/automerge-wasm" +export { ChangeToEncode } from "@automerge/automerge-wasm" import { API } from "@automerge/automerge-wasm" export function UseApi(api: API) { @@ -12,7 +13,7 @@ export function UseApi(api: API) { export const ApiHandler : API = { create(actor?: Actor): Automerge { throw new RangeError("Automerge.use() not called") }, load(data: Uint8Array, actor?: Actor): Automerge { throw new RangeError("Automerge.use() not called (load)") }, - encodeChange(change: DecodedChange): Change { throw new RangeError("Automerge.use() not called (encodeChange)") }, + encodeChange(change: ChangeToEncode): Change { throw new RangeError("Automerge.use() not called (encodeChange)") }, decodeChange(change: Change): DecodedChange { throw new RangeError("Automerge.use() not called (decodeChange)") }, initSyncState(): SyncState { throw new RangeError("Automerge.use() not called (initSyncState)") }, encodeSyncMessage(message: DecodedSyncMessage): SyncMessage { throw new RangeError("Automerge.use() not called (encodeSyncMessage)") }, diff --git a/javascript/test/basic_test.ts b/javascript/test/basic_test.ts index 437af233..e50e8782 100644 --- a/javascript/test/basic_test.ts +++ b/javascript/test/basic_test.ts @@ -1,5 +1,4 @@ import * as assert from 'assert' -import {Counter} from 'automerge' import * as Automerge from '../src' import * as WASM from "@automerge/automerge-wasm" @@ -15,7 +14,7 @@ describe('Automerge', () => { }) it('should be able to make a view with specifc heads', () => { - let doc1 = Automerge.init() + let doc1 = Automerge.init() let doc2 = Automerge.change(doc1, (d) => d.value = 1) let heads2 = Automerge.getHeads(doc2) let doc3 = Automerge.change(doc2, (d) => d.value = 2) @@ -38,7 +37,7 @@ describe('Automerge', () => { }) it('handle basic set and read on root object', () => { - let doc1 = Automerge.init() + let doc1 = Automerge.init() let doc2 = Automerge.change(doc1, (d) => { d.hello = "world" d.big = "little" @@ -62,8 +61,8 @@ describe('Automerge', () => { }) it('it should recursively freeze the document if requested', () => { - let doc1 = Automerge.init({ freeze: true } ) - let doc2 = Automerge.init() + let doc1 = Automerge.init({ freeze: true } ) + let doc2 = Automerge.init() assert(Object.isFrozen(doc1)) assert(!Object.isFrozen(doc2)) @@ -82,7 +81,7 @@ describe('Automerge', () => { assert(Object.isFrozen(doc3.sub)) // works on load - let doc4 = Automerge.load(Automerge.save(doc3), { freeze: true }) + let doc4 = Automerge.load(Automerge.save(doc3), { freeze: true }) assert(Object.isFrozen(doc4)) assert(Object.isFrozen(doc4.sub)) @@ -97,7 +96,7 @@ describe('Automerge', () => { }) it('handle basic sets over many changes', () => { - let doc1 = Automerge.init() + let doc1 = Automerge.init() let timestamp = new Date(); let counter = new Automerge.Counter(100); let bytes = new Uint8Array([10,11,12]); @@ -135,7 +134,7 @@ describe('Automerge', () => { }) it('handle overwrites to values', () => { - let doc1 = Automerge.init() + let doc1 = Automerge.init() let doc2 = Automerge.change(doc1, (d) => { d.hello = "world1" }) @@ -152,7 +151,7 @@ describe('Automerge', () => { }) it('handle set with object value', () => { - let doc1 = Automerge.init() + let doc1 = Automerge.init() let doc2 = Automerge.change(doc1, (d) => { d.subobj = { hello: "world", subsubobj: { zip: "zop" } } }) @@ -160,13 +159,13 @@ describe('Automerge', () => { }) it('handle simple list creation', () => { - let doc1 = Automerge.init() + let doc1 = Automerge.init() let doc2 = Automerge.change(doc1, (d) => d.list = []) assert.deepEqual(doc2, { list: []}) }) it('handle simple lists', () => { - let doc1 = Automerge.init() + let doc1 = Automerge.init() let doc2 = Automerge.change(doc1, (d) => { d.list = [ 1, 2, 3 ] }) @@ -188,7 +187,7 @@ describe('Automerge', () => { assert.deepEqual(doc3, { list: [1,"a",3] }) }) it('handle simple lists', () => { - let doc1 = Automerge.init() + let doc1 = Automerge.init() let doc2 = Automerge.change(doc1, (d) => { d.list = [ 1, 2, 3 ] }) @@ -198,7 +197,7 @@ describe('Automerge', () => { assert.deepEqual(docB2, doc2); }) it('handle text', () => { - let doc1 = Automerge.init() + let doc1 = Automerge.init() let doc2 = Automerge.change(doc1, (d) => { d.list = "hello" Automerge.splice(d, "list", 2, 0, "Z") @@ -212,7 +211,7 @@ describe('Automerge', () => { it('handle non-text strings', () => { let doc1 = WASM.create(); doc1.put("_root", "text", "hello world"); - let doc2 = Automerge.load(doc1.save()) + let doc2 = Automerge.load(doc1.save()) assert.throws(() => { Automerge.change(doc2, (d) => { Automerge.splice(d, "text", 1, 0, "Z") }) }, /Cannot splice/) @@ -238,6 +237,7 @@ describe('Automerge', () => { }) assert.deepEqual(doc5, { list: [2,1,9,10,3,11,12] }); let doc6 = Automerge.change(doc5, (d) => { + // @ts-ignore d.list.insertAt(3,100,101) }) assert.deepEqual(doc6, { list: [2,1,9,100,101,10,3,11,12] }); @@ -261,7 +261,7 @@ describe('Automerge', () => { doc = Automerge.change(doc, d => { d.key = "value" }) - let _ = Automerge.save(doc) + Automerge.save(doc) let headsBefore = Automerge.getHeads(doc) headsBefore.sort() doc = Automerge.emptyChange(doc, "empty change") @@ -278,24 +278,24 @@ describe('Automerge', () => { numbers: [20,3,100], repeats: [20,20,3,3,3,3,100,100] }) - let r1 = [] + let r1: Array = [] doc = Automerge.change(doc, (d) => { - assert.deepEqual(d.chars.concat([1,2]), ["a","b","c",1,2]) + assert.deepEqual((d.chars as any[]).concat([1,2]), ["a","b","c",1,2]) assert.deepEqual(d.chars.map((n) => n + "!"), ["a!", "b!", "c!"]) assert.deepEqual(d.numbers.map((n) => n + 10), [30, 13, 110]) assert.deepEqual(d.numbers.toString(), "20,3,100") assert.deepEqual(d.numbers.toLocaleString(), "20,3,100") - assert.deepEqual(d.numbers.forEach((n) => r1.push(n)), undefined) + assert.deepEqual(d.numbers.forEach((n: number) => r1.push(n)), undefined) assert.deepEqual(d.numbers.every((n) => n > 1), true) assert.deepEqual(d.numbers.every((n) => n > 10), false) assert.deepEqual(d.numbers.filter((n) => n > 10), [20,100]) assert.deepEqual(d.repeats.find((n) => n < 10), 3) - assert.deepEqual(d.repeats.toArray().find((n) => n < 10), 3) + assert.deepEqual(d.repeats.find((n) => n < 10), 3) assert.deepEqual(d.repeats.find((n) => n < 0), undefined) assert.deepEqual(d.repeats.findIndex((n) => n < 10), 2) assert.deepEqual(d.repeats.findIndex((n) => n < 0), -1) - assert.deepEqual(d.repeats.toArray().findIndex((n) => n < 10), 2) - assert.deepEqual(d.repeats.toArray().findIndex((n) => n < 0), -1) + assert.deepEqual(d.repeats.findIndex((n) => n < 10), 2) + assert.deepEqual(d.repeats.findIndex((n) => n < 0), -1) assert.deepEqual(d.numbers.includes(3), true) assert.deepEqual(d.numbers.includes(-3), false) assert.deepEqual(d.numbers.join("|"), "20|3|100") @@ -321,8 +321,8 @@ describe('Automerge', () => { }) it('should obtain the same conflicts, regardless of merge order', () => { - let s1 = Automerge.init() - let s2 = Automerge.init() + let s1 = Automerge.init() + let s2 = Automerge.init() s1 = Automerge.change(s1, doc => { doc.x = 1; doc.y = 2 }) s2 = Automerge.change(s2, doc => { doc.x = 3; doc.y = 4 }) const m1 = Automerge.merge(Automerge.clone(s1), Automerge.clone(s2)) @@ -346,7 +346,7 @@ describe('Automerge', () => { it("should return null for scalar values", () => { assert.equal(Automerge.getObjectId(s1.string), null) assert.equal(Automerge.getObjectId(s1.number), null) - assert.equal(Automerge.getObjectId(s1.null), null) + assert.equal(Automerge.getObjectId(s1.null!), null) assert.equal(Automerge.getObjectId(s1.date), null) assert.equal(Automerge.getObjectId(s1.counter), null) assert.equal(Automerge.getObjectId(s1.bytes), null) diff --git a/javascript/test/columnar_test.ts b/javascript/test/columnar_test.ts deleted file mode 100644 index ca670377..00000000 --- a/javascript/test/columnar_test.ts +++ /dev/null @@ -1,97 +0,0 @@ -import * as assert from 'assert' -import { checkEncoded } from './helpers' -import * as Automerge from '../src' -import { encodeChange, decodeChange } from '../src' - -describe('change encoding', () => { - it('should encode text edits', () => { - /* - const change1 = {actor: 'aaaa', seq: 1, startOp: 1, time: 9, message: '', deps: [], ops: [ - {action: 'makeText', obj: '_root', key: 'text', insert: false, pred: []}, - {action: 'set', obj: '1@aaaa', elemId: '_head', insert: true, value: 'h', pred: []}, - {action: 'del', obj: '1@aaaa', elemId: '2@aaaa', insert: false, pred: ['2@aaaa']}, - {action: 'set', obj: '1@aaaa', elemId: '_head', insert: true, value: 'H', pred: []}, - {action: 'set', obj: '1@aaaa', elemId: '4@aaaa', insert: true, value: 'i', pred: []} - ]} - */ - const change1 = {actor: 'aaaa', seq: 1, startOp: 1, time: 9, message: null, deps: [], ops: [ - {action: 'makeText', obj: '_root', key: 'text', pred: []}, - {action: 'set', obj: '1@aaaa', elemId: '_head', insert: true, value: 'h', pred: []}, - {action: 'del', obj: '1@aaaa', elemId: '2@aaaa', pred: ['2@aaaa']}, - {action: 'set', obj: '1@aaaa', elemId: '_head', insert: true, value: 'H', pred: []}, - {action: 'set', obj: '1@aaaa', elemId: '4@aaaa', insert: true, value: 'i', pred: []} - ]} - checkEncoded(encodeChange(change1), [ - 0x85, 0x6f, 0x4a, 0x83, // magic bytes - 0xe2, 0xbd, 0xfb, 0xf5, // checksum - 1, 94, 0, 2, 0xaa, 0xaa, // chunkType: change, length, deps, actor 'aaaa' - 1, 1, 9, 0, 0, // seq, startOp, time, message, actor list - 12, 0x01, 4, 0x02, 4, // column count, objActor, objCtr - 0x11, 8, 0x13, 7, 0x15, 8, // keyActor, keyCtr, keyStr - 0x34, 4, 0x42, 6, // insert, action - 0x56, 6, 0x57, 3, // valLen, valRaw - 0x70, 6, 0x71, 2, 0x73, 2, // predNum, predActor, predCtr - 0, 1, 4, 0, // objActor column: null, 0, 0, 0, 0 - 0, 1, 4, 1, // objCtr column: null, 1, 1, 1, 1 - 0, 2, 0x7f, 0, 0, 1, 0x7f, 0, // keyActor column: null, null, 0, null, 0 - 0, 1, 0x7c, 0, 2, 0x7e, 4, // keyCtr column: null, 0, 2, 0, 4 - 0x7f, 4, 0x74, 0x65, 0x78, 0x74, 0, 4, // keyStr column: 'text', null, null, null, null - 1, 1, 1, 2, // insert column: false, true, false, true, true - 0x7d, 4, 1, 3, 2, 1, // action column: makeText, set, del, set, set - 0x7d, 0, 0x16, 0, 2, 0x16, // valLen column: 0, 0x16, 0, 0x16, 0x16 - 0x68, 0x48, 0x69, // valRaw column: 'h', 'H', 'i' - 2, 0, 0x7f, 1, 2, 0, // predNum column: 0, 0, 1, 0, 0 - 0x7f, 0, // predActor column: 0 - 0x7f, 2 // predCtr column: 2 - ]) - const decoded = decodeChange(encodeChange(change1)) - assert.deepStrictEqual(decoded, Object.assign({hash: decoded.hash}, change1)) - }) - - // FIXME - skipping this b/c it was never implemented in the rust impl and isnt trivial -/* - it.skip('should require strict ordering of preds', () => { - const change = new Uint8Array([ - 133, 111, 74, 131, 31, 229, 112, 44, 1, 105, 1, 58, 30, 190, 100, 253, 180, 180, 66, 49, 126, - 81, 142, 10, 3, 35, 140, 189, 231, 34, 145, 57, 66, 23, 224, 149, 64, 97, 88, 140, 168, 194, - 229, 4, 244, 209, 58, 138, 67, 140, 1, 152, 236, 250, 2, 0, 1, 4, 55, 234, 66, 242, 8, 21, 11, - 52, 1, 66, 2, 86, 3, 87, 10, 112, 2, 113, 3, 115, 4, 127, 9, 99, 111, 109, 109, 111, 110, 86, - 97, 114, 1, 127, 1, 127, 166, 1, 52, 48, 57, 49, 52, 57, 52, 53, 56, 50, 127, 2, 126, 0, 1, - 126, 139, 1, 0 - ]) - assert.throws(() => { decodeChange(change) }, /operation IDs are not in ascending order/) - }) -*/ - - describe('with trailing bytes', () => { - let change = new Uint8Array([ - 0x85, 0x6f, 0x4a, 0x83, // magic bytes - 0xb2, 0x98, 0x9e, 0xa9, // checksum - 1, 61, 0, 2, 0x12, 0x34, // chunkType: change, length, deps, actor '1234' - 1, 1, 252, 250, 220, 255, 5, // seq, startOp, time - 14, 73, 110, 105, 116, 105, 97, 108, 105, 122, 97, 116, 105, 111, 110, // message: 'Initialization' - 0, 6, // actor list, column count - 0x15, 3, 0x34, 1, 0x42, 2, // keyStr, insert, action - 0x56, 2, 0x57, 1, 0x70, 2, // valLen, valRaw, predNum - 0x7f, 1, 0x78, // keyStr: 'x' - 1, // insert: false - 0x7f, 1, // action: set - 0x7f, 19, // valLen: 1 byte of type uint - 1, // valRaw: 1 - 0x7f, 0, // predNum: 0 - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 // 10 trailing bytes - ]) - - it('should allow decoding and re-encoding', () => { - // NOTE: This calls the JavaScript encoding and decoding functions, even when the WebAssembly - // backend is loaded. Should the wasm backend export its own functions for testing? - checkEncoded(change, encodeChange(decodeChange(change))) - }) - - it('should be preserved in document encoding', () => { - const [doc] = Automerge.applyChanges(Automerge.init(), [change]) - const [reconstructed] = Automerge.getAllChanges(Automerge.load(Automerge.save(doc))) - checkEncoded(change, reconstructed) - }) - }) -}) diff --git a/javascript/test/extra_api_tests.ts b/javascript/test/extra_api_tests.ts index ce0438d5..c0c18177 100644 --- a/javascript/test/extra_api_tests.ts +++ b/javascript/test/extra_api_tests.ts @@ -5,8 +5,8 @@ import * as Automerge from '../src' describe('Automerge', () => { describe('basics', () => { it('should allow you to load incrementally', () => { - let doc1 = Automerge.from({ foo: "bar" }) - let doc2 = Automerge.init(); + let doc1 = Automerge.from({ foo: "bar" }) + let doc2 = Automerge.init(); doc2 = Automerge.loadIncremental(doc2, Automerge.save(doc1)) doc1 = Automerge.change(doc1, (d) => d.foo2 = "bar2") doc2 = Automerge.loadIncremental(doc2, Automerge.getBackend(doc1).saveIncremental() ) diff --git a/javascript/test/helpers.ts b/javascript/test/helpers.ts index d5292130..7799cb84 100644 --- a/javascript/test/helpers.ts +++ b/javascript/test/helpers.ts @@ -3,14 +3,18 @@ import { Encoder } from './legacy/encoding' // Assertion that succeeds if the first argument deepStrictEquals at least one of the // subsequent arguments (but we don't care which one) -function assertEqualsOneOf(actual, ...expected) { +export function assertEqualsOneOf(actual, ...expected) { assert(expected.length > 0) for (let i = 0; i < expected.length; i++) { try { assert.deepStrictEqual(actual, expected[i]) return // if we get here without an exception, that means success } catch (e) { - if (!e.name.match(/^AssertionError/) || i === expected.length - 1) throw e + if (e instanceof assert.AssertionError) { + if (!e.name.match(/^AssertionError/) || i === expected.length - 1) throw e + } else { + throw e + } } } } @@ -19,7 +23,7 @@ function assertEqualsOneOf(actual, ...expected) { * Asserts that the byte array maintained by `encoder` contains the same byte * sequence as the array `bytes`. */ -function checkEncoded(encoder, bytes, detail) { +export function checkEncoded(encoder, bytes, detail?) { const encoded = (encoder instanceof Encoder) ? encoder.buffer : encoder const expected = new Uint8Array(bytes) const message = (detail ? `${detail}: ` : '') + `${encoded} expected to equal ${expected}` @@ -28,5 +32,3 @@ function checkEncoded(encoder, bytes, detail) { assert(encoded[i] === expected[i], message) } } - -module.exports = { assertEqualsOneOf, checkEncoded } diff --git a/javascript/test/legacy_tests.ts b/javascript/test/legacy_tests.ts index 2320f909..c5c88275 100644 --- a/javascript/test/legacy_tests.ts +++ b/javascript/test/legacy_tests.ts @@ -50,30 +50,35 @@ describe('Automerge', () => { }) it('accepts an array as initial state, but converts it to an object', () => { + // @ts-ignore const doc = Automerge.from(['a', 'b', 'c']) assert.deepStrictEqual(doc, { '0': 'a', '1': 'b', '2': 'c' }) }) it('accepts strings as initial values, but treats them as an array of characters', () => { + // @ts-ignore const doc = Automerge.from('abc') assert.deepStrictEqual(doc, { '0': 'a', '1': 'b', '2': 'c' }) }) it('ignores numbers provided as initial values', () => { + // @ts-ignore const doc = Automerge.from(123) assert.deepStrictEqual(doc, {}) }) it('ignores booleans provided as initial values', () => { + // @ts-ignore const doc1 = Automerge.from(false) assert.deepStrictEqual(doc1, {}) + // @ts-ignore const doc2 = Automerge.from(true) assert.deepStrictEqual(doc2, {}) }) }) describe('sequential use', () => { - let s1, s2 + let s1: Automerge.Doc, s2: Automerge.Doc beforeEach(() => { s1 = Automerge.init("aabbcc") }) @@ -89,12 +94,12 @@ describe('Automerge', () => { s2 = Automerge.change(s1, doc => doc.foo = 'bar') const change2 = Automerge.getLastLocalChange(s2) assert.strictEqual(change1, undefined) - const change = decodeChange(change2) + const change = Automerge.decodeChange(change2!) assert.deepStrictEqual(change, { actor: change.actor, deps: [], seq: 1, startOp: 1, - hash: change.hash, message: '', time: change.time, + hash: change.hash, message: null, time: change.time, ops: [ - {obj: '_root', key: 'foo', action: 'makeText', insert: false, pred: []}, + {obj: '_root', key: 'foo', action: 'makeText', pred: []}, {action: 'set', elemId: '_head', insert: true, obj: '1@aabbcc', pred: [], value: 'b' }, {action: 'set', elemId: '2@aabbcc', insert: true, obj: '1@aabbcc', pred: [], value: 'a' }, {action: 'set', elemId: '3@aabbcc', insert: true, obj: '1@aabbcc', pred: [], value: 'r' }] @@ -127,12 +132,14 @@ describe('Automerge', () => { s1 = Automerge.init({freeze: true}) s2 = Automerge.change(s1, doc => doc.foo = 'bar') try { + // @ts-ignore s2.foo = 'lemon' } catch (e) { } assert.strictEqual(s2.foo, 'bar') let deleted = false try { + // @ts-ignore deleted = delete s2.foo } catch (e) { } assert.strictEqual(s2.foo, 'bar') @@ -140,6 +147,7 @@ describe('Automerge', () => { Automerge.change(s2, () => { try { + // @ts-ignore s2.foo = 'lemon' } catch (e) { } assert.strictEqual(s2.foo, 'bar') @@ -187,7 +195,7 @@ describe('Automerge', () => { s1 = Automerge.change(s1, doc => doc.field = 123) s2 = Automerge.change(s2, doc => doc.field = 321) s1 = Automerge.merge(s1, s2) - assert.strictEqual(Object.keys(Automerge.getConflicts(s1, 'field')).length, 2) + assert.strictEqual(Object.keys(Automerge.getConflicts(s1, 'field')!).length, 2) const resolved = Automerge.change(s1, doc => doc.field = s1.field) assert.notStrictEqual(resolved, s1) assert.deepStrictEqual(resolved, {field: s1.field}) @@ -218,7 +226,9 @@ describe('Automerge', () => { it('should sanity-check arguments', () => { s1 = Automerge.change(s1, doc => doc.nested = {}) + // @ts-ignore assert.throws(() => { Automerge.change({}, doc => doc.foo = 'bar') }, /must be the document root/) + // @ts-ignore assert.throws(() => { Automerge.change(s1.nested, doc => doc.foo = 'bar') }, /must be the document root/) }) @@ -226,6 +236,7 @@ describe('Automerge', () => { assert.throws(() => { Automerge.change(s1, doc1 => { Automerge.change(doc1, doc2 => { + // @ts-ignore doc2.foo = 'bar' }) }) @@ -285,32 +296,31 @@ describe('Automerge', () => { }) it('should call patchCallback if supplied', () => { - const callbacks = [], actor = Automerge.getActorId(s1) + const callbacks: Array<{patches: Array, before: Automerge.Doc, after: Automerge.Doc}> = [] const s2 = Automerge.change(s1, { - patchCallback: (patch, before, after) => callbacks.push({patch, before, after}) + patchCallback: (patches, before, after) => callbacks.push({patches, before, after}) }, doc => { doc.birds = ['Goldfinch'] }) assert.strictEqual(callbacks.length, 1) - assert.deepStrictEqual(callbacks[0].patch[0], { action: "put", path: ["birds"], value: [] }) - assert.deepStrictEqual(callbacks[0].patch[1], { action: "insert", path: ["birds",0], values: [""] }) - assert.deepStrictEqual(callbacks[0].patch[2], { action: "splice", path: ["birds",0, 0], value: "Goldfinch" }) + assert.deepStrictEqual(callbacks[0].patches[0], { action: "put", path: ["birds"], value: [] }) + assert.deepStrictEqual(callbacks[0].patches[1], { action: "insert", path: ["birds",0], values: [""] }) + assert.deepStrictEqual(callbacks[0].patches[2], { action: "splice", path: ["birds",0, 0], value: "Goldfinch" }) assert.strictEqual(callbacks[0].before, s1) assert.strictEqual(callbacks[0].after, s2) }) it('should call a patchCallback set up on document initialisation', () => { - const callbacks = [] + const callbacks: Array<{patches: Array, before: Automerge.Doc, after: Automerge.Doc}> = [] s1 = Automerge.init({ - patchCallback: (patch, before, after) => callbacks.push({patch, before, after }) + patchCallback: (patches, before, after) => callbacks.push({patches, before, after }) }) const s2 = Automerge.change(s1, doc => doc.bird = 'Goldfinch') - const actor = Automerge.getActorId(s1) assert.strictEqual(callbacks.length, 1) - assert.deepStrictEqual(callbacks[0].patch[0], { + assert.deepStrictEqual(callbacks[0].patches[0], { action: "put", path: ["bird"], value: "" }) - assert.deepStrictEqual(callbacks[0].patch[1], { + assert.deepStrictEqual(callbacks[0].patches[1], { action: "splice", path: ["bird", 0], value: "Goldfinch" }) assert.strictEqual(callbacks[0].before, s1) @@ -417,7 +427,7 @@ describe('Automerge', () => { it('should assign an objectId to nested maps', () => { s1 = Automerge.change(s1, doc => { doc.nested = {} }) let id = Automerge.getObjectId(s1.nested) - assert.strictEqual(OPID_PATTERN.test(Automerge.getObjectId(s1.nested)), true) + assert.strictEqual(OPID_PATTERN.test(Automerge.getObjectId(s1.nested)!), true) assert.notEqual(Automerge.getObjectId(s1.nested), '_root') }) @@ -472,7 +482,7 @@ describe('Automerge', () => { s1 = Automerge.change(s1, 'change 1', doc => { doc.myPet = {species: 'dog', legs: 4, breed: 'dachshund'} }) - s2 = Automerge.change(s1, 'change 2', doc => { + let s2 = Automerge.change(s1, 'change 2', doc => { doc.myPet = {species: 'koi', variety: '紅白', colors: {red: true, white: true, black: false}} }) assert.deepStrictEqual(s1.myPet, { @@ -483,6 +493,7 @@ describe('Automerge', () => { species: 'koi', variety: '紅白', colors: {red: true, white: true, black: false} }) + // @ts-ignore assert.strictEqual(s2.myPet.breed, undefined) assert.strictEqual(s2.myPet.variety, '紅白') }) @@ -743,15 +754,18 @@ describe('Automerge', () => { }) it('should allow adding and removing list elements in the same change callback', () => { - s1 = Automerge.change(Automerge.init(), doc => doc.noodles = []) + let s1 = Automerge.change(Automerge.init<{noodles: Array}>(), doc => doc.noodles = []) s1 = Automerge.change(s1, doc => { doc.noodles.push('udon') + // @ts-ignore doc.noodles.deleteAt(0) }) assert.deepStrictEqual(s1, {noodles: []}) // do the add-remove cycle twice, test for #151 (https://github.com/automerge/automerge/issues/151) s1 = Automerge.change(s1, doc => { + // @ts-ignore doc.noodles.push('soba') + // @ts-ignore doc.noodles.deleteAt(0) }) assert.deepStrictEqual(s1, {noodles: []}) @@ -783,7 +797,7 @@ describe('Automerge', () => { describe('counters', () => { // counter it('should allow deleting counters from maps', () => { - const s1 = Automerge.change(Automerge.init(), doc => doc.birds = {wrens: new Automerge.Counter(1)}) + const s1 = Automerge.change(Automerge.init(), doc => doc.birds = {wrens: new Automerge.Counter(1)}) const s2 = Automerge.change(s1, doc => doc.birds.wrens.increment(2)) const s3 = Automerge.change(s2, doc => delete doc.birds.wrens) assert.deepStrictEqual(s2, {birds: {wrens: new Automerge.Counter(3)}}) @@ -803,12 +817,12 @@ describe('Automerge', () => { }) describe('concurrent use', () => { - let s1, s2, s3, s4 + let s1: Automerge.Doc, s2: Automerge.Doc, s3: Automerge.Doc, s4: Automerge.Doc beforeEach(() => { - s1 = Automerge.init() - s2 = Automerge.init() - s3 = Automerge.init() - s4 = Automerge.init() + s1 = Automerge.init() + s2 = Automerge.init() + s3 = Automerge.init() + s4 = Automerge.init() }) it('should merge concurrent updates of different properties', () => { @@ -927,7 +941,7 @@ describe('Automerge', () => { } else { assert.deepStrictEqual(s3.list, [{map2: true, key: 2}]) } - assert.deepStrictEqual(Automerge.getConflicts(s3.list, 0), { + assert.deepStrictEqual(Automerge.getConflicts(s3.list, 0), { [`8@${Automerge.getActorId(s1)}`]: {map1: true, key: 1}, [`8@${Automerge.getActorId(s2)}`]: {map2: true, key: 2} }) @@ -1130,22 +1144,22 @@ describe('Automerge', () => { }) it('should reconstitute complex datatypes', () => { - let s1 = Automerge.change(Automerge.init(), doc => doc.todos = [{title: 'water plants', done: false}]) + let s1 = Automerge.change(Automerge.init(), doc => doc.todos = [{title: 'water plants', done: false}]) let s2 = Automerge.load(Automerge.save(s1)) assert.deepStrictEqual(s2, {todos: [{title: 'water plants', done: false}]}) }) it('should save and load maps with @ symbols in the keys', () => { - let s1 = Automerge.change(Automerge.init(), doc => doc["123@4567"] = "hello") + let s1 = Automerge.change(Automerge.init(), doc => doc["123@4567"] = "hello") let s2 = Automerge.load(Automerge.save(s1)) assert.deepStrictEqual(s2, { "123@4567": "hello" }) }) it('should reconstitute conflicts', () => { - let s1 = Automerge.change(Automerge.init('111111'), doc => doc.x = 3) - let s2 = Automerge.change(Automerge.init('222222'), doc => doc.x = 5) + let s1 = Automerge.change(Automerge.init('111111'), doc => doc.x = 3) + let s2 = Automerge.change(Automerge.init('222222'), doc => doc.x = 5) s1 = Automerge.merge(s1, s2) - let s3 = Automerge.load(Automerge.save(s1)) + let s3 = Automerge.load(Automerge.save(s1)) assert.strictEqual(s1.x, 5) assert.strictEqual(s3.x, 5) assert.deepStrictEqual(Automerge.getConflicts(s1, 'x'), {'1@111111': 3, '1@222222': 5}) @@ -1153,26 +1167,26 @@ describe('Automerge', () => { }) it('should reconstitute element ID counters', () => { - const s1 = Automerge.init('01234567') + const s1 = Automerge.init('01234567') const s2 = Automerge.change(s1, doc => doc.list = ['a']) const listId = Automerge.getObjectId(s2.list) - const changes12 = Automerge.getAllChanges(s2).map(decodeChange) + const changes12 = Automerge.getAllChanges(s2).map(Automerge.decodeChange) assert.deepStrictEqual(changes12, [{ hash: changes12[0].hash, actor: '01234567', seq: 1, startOp: 1, - time: changes12[0].time, message: '', deps: [], ops: [ - {obj: '_root', action: 'makeList', key: 'list', insert: false, pred: []}, + time: changes12[0].time, message: null, deps: [], ops: [ + {obj: '_root', action: 'makeList', key: 'list', pred: []}, {obj: listId, action: 'makeText', elemId: '_head', insert: true, pred: []}, {obj: "2@01234567", action: 'set', elemId: '_head', insert: true, value: 'a', pred: []} ] }]) const s3 = Automerge.change(s2, doc => doc.list.deleteAt(0)) - const s4 = Automerge.load(Automerge.save(s3), '01234567') + const s4 = Automerge.load(Automerge.save(s3), '01234567') const s5 = Automerge.change(s4, doc => doc.list.push('b')) - const changes45 = Automerge.getAllChanges(s5).map(decodeChange) + const changes45 = Automerge.getAllChanges(s5).map(Automerge.decodeChange) assert.deepStrictEqual(s5, {list: ['b']}) assert.deepStrictEqual(changes45[2], { hash: changes45[2].hash, actor: '01234567', seq: 3, startOp: 5, - time: changes45[2].time, message: '', deps: [changes45[1].hash], ops: [ + time: changes45[2].time, message: null, deps: [changes45[1].hash], ops: [ {obj: listId, action: 'makeText', elemId: '_head', insert: true, pred: []}, {obj: "5@01234567", action: 'set', elemId: '_head', insert: true, value: 'b', pred: []} ] @@ -1180,7 +1194,7 @@ describe('Automerge', () => { }) it('should allow a reloaded list to be mutated', () => { - let doc = Automerge.change(Automerge.init(), doc => doc.foo = []) + let doc = Automerge.change(Automerge.init(), doc => doc.foo = []) doc = Automerge.load(Automerge.save(doc)) doc = Automerge.change(doc, 'add', doc => doc.foo.push(1)) doc = Automerge.load(Automerge.save(doc)) @@ -1191,23 +1205,23 @@ describe('Automerge', () => { // In this test, the keyCtr column is long enough for deflate compression to kick in, but the // keyStr column is short. Thus, the deflate bit gets set for keyCtr but not for keyStr. // When checking whether the columns appear in ascending order, we must ignore the deflate bit. - let doc = Automerge.change(Automerge.init(), doc => { + let doc = Automerge.change(Automerge.init(), doc => { doc.list = [] for (let i = 0; i < 200; i++) doc.list.insertAt(Math.floor(Math.random() * i), 'a') }) - Automerge.load(Automerge.save(doc)) - let expected = [] + Automerge.load(Automerge.save(doc)) + let expected: Array = [] for (let i = 0; i < 200; i++) expected.push('a') assert.deepStrictEqual(doc, {list: expected}) }) it.skip('should call patchCallback if supplied to load', () => { - const s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Goldfinch']) + const s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Goldfinch']) const s2 = Automerge.change(s1, doc => doc.birds.push('Chaffinch')) - const callbacks = [], actor = Automerge.getActorId(s1) - const reloaded = Automerge.load(Automerge.save(s2), { - patchCallback(patch, before, after, local) { - callbacks.push({patch, before, after, local}) + const callbacks: Array = [], actor = Automerge.getActorId(s1) + const reloaded = Automerge.load(Automerge.save(s2), { + patchCallback(patch, before, after) { + callbacks.push({patch, before, after}) } }) assert.strictEqual(callbacks.length, 1) @@ -1231,7 +1245,7 @@ describe('Automerge', () => { }) it('should make past document states accessible', () => { - let s = Automerge.init() + let s = Automerge.init() s = Automerge.change(s, doc => doc.config = {background: 'blue'}) s = Automerge.change(s, doc => doc.birds = ['mallard']) s = Automerge.change(s, doc => doc.birds.unshift('oystercatcher')) @@ -1243,7 +1257,7 @@ describe('Automerge', () => { }) it('should make change messages accessible', () => { - let s = Automerge.init() + let s = Automerge.init() s = Automerge.change(s, 'Empty Bookshelf', doc => doc.books = []) s = Automerge.change(s, 'Add Orwell', doc => doc.books.push('Nineteen Eighty-Four')) s = Automerge.change(s, 'Add Huxley', doc => doc.books.push('Brave New World')) @@ -1260,32 +1274,32 @@ describe('Automerge', () => { }) it('should return an empty list when nothing changed', () => { - let s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Chaffinch']) + let s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Chaffinch']) assert.deepStrictEqual(Automerge.getChanges(s1, s1), []) }) it('should do nothing when applying an empty list of changes', () => { - let s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Chaffinch']) + let s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Chaffinch']) assert.deepStrictEqual(Automerge.applyChanges(s1, [])[0], s1) }) it('should return all changes when compared to an empty document', () => { - let s1 = Automerge.change(Automerge.init(), 'Add Chaffinch', doc => doc.birds = ['Chaffinch']) + let s1 = Automerge.change(Automerge.init(), 'Add Chaffinch', doc => doc.birds = ['Chaffinch']) let s2 = Automerge.change(s1, 'Add Bullfinch', doc => doc.birds.push('Bullfinch')) let changes = Automerge.getChanges(Automerge.init(), s2) assert.strictEqual(changes.length, 2) }) it('should allow a document copy to be reconstructed from scratch', () => { - let s1 = Automerge.change(Automerge.init(), 'Add Chaffinch', doc => doc.birds = ['Chaffinch']) + let s1 = Automerge.change(Automerge.init(), 'Add Chaffinch', doc => doc.birds = ['Chaffinch']) let s2 = Automerge.change(s1, 'Add Bullfinch', doc => doc.birds.push('Bullfinch')) let changes = Automerge.getAllChanges(s2) - let [s3] = Automerge.applyChanges(Automerge.init(), changes) + let [s3] = Automerge.applyChanges(Automerge.init(), changes) assert.deepStrictEqual(s3.birds, ['Chaffinch', 'Bullfinch']) }) it('should return changes since the last given version', () => { - let s1 = Automerge.change(Automerge.init(), 'Add Chaffinch', doc => doc.birds = ['Chaffinch']) + let s1 = Automerge.change(Automerge.init(), 'Add Chaffinch', doc => doc.birds = ['Chaffinch']) let changes1 = Automerge.getAllChanges(s1) let s2 = Automerge.change(s1, 'Add Bullfinch', doc => doc.birds.push('Bullfinch')) let changes2 = Automerge.getChanges(s1, s2) @@ -1294,29 +1308,29 @@ describe('Automerge', () => { }) it('should incrementally apply changes since the last given version', () => { - let s1 = Automerge.change(Automerge.init(), 'Add Chaffinch', doc => doc.birds = ['Chaffinch']) + let s1 = Automerge.change(Automerge.init(), 'Add Chaffinch', doc => doc.birds = ['Chaffinch']) let changes1 = Automerge.getAllChanges(s1) let s2 = Automerge.change(s1, 'Add Bullfinch', doc => doc.birds.push('Bullfinch')) let changes2 = Automerge.getChanges(s1, s2) - let [s3] = Automerge.applyChanges(Automerge.init(), changes1) + let [s3] = Automerge.applyChanges(Automerge.init(), changes1) let [s4] = Automerge.applyChanges(s3, changes2) assert.deepStrictEqual(s3.birds, ['Chaffinch']) assert.deepStrictEqual(s4.birds, ['Chaffinch', 'Bullfinch']) }) it('should handle updates to a list element', () => { - let s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Chaffinch', 'Bullfinch']) + let s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Chaffinch', 'Bullfinch']) let s2 = Automerge.change(s1, doc => doc.birds[0] = 'Goldfinch') - let [s3] = Automerge.applyChanges(Automerge.init(), Automerge.getAllChanges(s2)) + let [s3] = Automerge.applyChanges(Automerge.init(), Automerge.getAllChanges(s2)) assert.deepStrictEqual(s3.birds, ['Goldfinch', 'Bullfinch']) assert.strictEqual(Automerge.getConflicts(s3.birds, 0), undefined) }) // TEXT it('should handle updates to a text object', () => { - let s1 = Automerge.change(Automerge.init(), doc => doc.text = 'ab') + let s1 = Automerge.change(Automerge.init(), doc => doc.text = 'ab') let s2 = Automerge.change(s1, doc => Automerge.splice(doc, "text", 0, 1, "A")) - let [s3] = Automerge.applyChanges(Automerge.init(), Automerge.getAllChanges(s2)) + let [s3] = Automerge.applyChanges(Automerge.init(), Automerge.getAllChanges(s2)) assert.deepStrictEqual([...s3.text], ['A', 'b']) }) @@ -1339,7 +1353,7 @@ describe('Automerge', () => { */ it('should report missing dependencies with out-of-order applyChanges', () => { - let s0 = Automerge.init() + let s0 = Automerge.init() let s1 = Automerge.change(s0, doc => doc.test = ['a']) let changes01 = Automerge.getAllChanges(s1) let s2 = Automerge.change(s1, doc => doc.test = ['b']) @@ -1349,14 +1363,14 @@ describe('Automerge', () => { let s4 = Automerge.init() let [s5] = Automerge.applyChanges(s4, changes23) let [s6] = Automerge.applyChanges(s5, changes12) - assert.deepStrictEqual(Automerge.getMissingDeps(s6), [decodeChange(changes01[0]).hash]) + assert.deepStrictEqual(Automerge.getMissingDeps(s6, []), [decodeChange(changes01[0]).hash]) }) it('should call patchCallback if supplied when applying changes', () => { - const s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Goldfinch']) - const callbacks = [], actor = Automerge.getActorId(s1) + const s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Goldfinch']) + const callbacks: Array = [] const before = Automerge.init() - const [after, patch] = Automerge.applyChanges(before, Automerge.getAllChanges(s1), { + const [after] = Automerge.applyChanges(before, Automerge.getAllChanges(s1), { patchCallback(patch, before, after) { callbacks.push({patch, before, after}) } @@ -1370,9 +1384,9 @@ describe('Automerge', () => { }) it('should merge multiple applied changes into one patch', () => { - const s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Goldfinch']) + const s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Goldfinch']) const s2 = Automerge.change(s1, doc => doc.birds.push('Chaffinch')) - const patches = [], actor = Automerge.getActorId(s2) + const patches: Array = [] Automerge.applyChanges(Automerge.init(), Automerge.getAllChanges(s2), {patchCallback: p => patches.push(... p)}) assert.deepStrictEqual(patches, [ @@ -1385,8 +1399,8 @@ describe('Automerge', () => { }) it('should call a patchCallback registered on doc initialisation', () => { - const s1 = Automerge.change(Automerge.init(), doc => doc.bird = 'Goldfinch') - const patches = [], actor = Automerge.getActorId(s1) + const s1 = Automerge.change(Automerge.init(), doc => doc.bird = 'Goldfinch') + const patches: Array = [] const before = Automerge.init({patchCallback: p => patches.push(... p)}) Automerge.applyChanges(before, Automerge.getAllChanges(s1)) assert.deepStrictEqual(patches, [ diff --git a/javascript/test/sync_test.ts b/javascript/test/sync_test.ts index 56b4bd87..8e03c18a 100644 --- a/javascript/test/sync_test.ts +++ b/javascript/test/sync_test.ts @@ -1,25 +1,19 @@ import * as assert from 'assert' import * as Automerge from '../src' import { BloomFilter } from './legacy/sync' -import { decodeChangeMeta } from './legacy/columnar' import { decodeSyncMessage, encodeSyncMessage, decodeSyncState, encodeSyncState, initSyncState } from "../src" -function inspect(a) { - const util = require("util"); - return util.inspect(a,false,null,true) -} - function getHeads(doc) { return Automerge.getHeads(doc) } function getMissingDeps(doc) { - return Automerge.getMissingDeps(doc) + return Automerge.getMissingDeps(doc, []) } function sync(a, b, aSyncState = initSyncState(), bSyncState = initSyncState()) { const MAX_ITER = 10 - let aToBmsg = null, bToAmsg = null, i = 0 + let aToBmsg: Automerge.SyncMessage | null = null, bToAmsg: Automerge.SyncMessage | null = null, i = 0 do { [aSyncState, aToBmsg] = Automerge.generateSyncMessage(a, aSyncState) ;[bSyncState, bToAmsg] = Automerge.generateSyncMessage(b, bSyncState) @@ -59,9 +53,11 @@ describe('Data sync protocol', () => { it('should not reply if we have no data as well', () => { let n1 = Automerge.init(), n2 = Automerge.init() let s1 = initSyncState(), s2 = initSyncState() - let m1 = null, m2 = null + let m1: Automerge.SyncMessage | null = null, m2: Automerge.SyncMessage | null = null ;[s1, m1] = Automerge.generateSyncMessage(n1, s1) - ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, m1) + if (m1 != null) { + ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, m1) + } ;[s2, m2] = Automerge.generateSyncMessage(n2, s2) assert.deepStrictEqual(m2, null) }) @@ -69,9 +65,9 @@ describe('Data sync protocol', () => { describe('documents with data', () => { it('repos with equal heads do not need a reply message', () => { - let n1 = Automerge.init(), n2 = Automerge.init() + let n1 = Automerge.init(), n2 = Automerge.init() let s1 = initSyncState(), s2 = initSyncState() - let m1 = null, m2 = null + let m1: Automerge.SyncMessage | null = null, m2: Automerge.SyncMessage | null = null // make two nodes with the same changes n1 = Automerge.change(n1, {time: 0}, doc => doc.n = []) @@ -84,13 +80,15 @@ describe('Data sync protocol', () => { assert.deepStrictEqual(s1.lastSentHeads, getHeads(n1)) // heads are equal so this message should be null - ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, m1) + if (m1 != null) { + ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, m1) + } ;[s2, m2] = Automerge.generateSyncMessage(n2, s2) assert.strictEqual(m2, null) }) it('n1 should offer all changes to n2 when starting from nothing', () => { - let n1 = Automerge.init(), n2 = Automerge.init() + let n1 = Automerge.init(), n2 = Automerge.init() // make changes for n1 that n2 should request n1 = Automerge.change(n1, {time: 0}, doc => doc.n = []) @@ -102,7 +100,7 @@ describe('Data sync protocol', () => { }) it('should sync peers where one has commits the other does not', () => { - let n1 = Automerge.init(), n2 = Automerge.init() + let n1 = Automerge.init(), n2 = Automerge.init() // make changes for n1 that n2 should request n1 = Automerge.change(n1, {time: 0}, doc => doc.n = []) @@ -115,7 +113,7 @@ describe('Data sync protocol', () => { it('should work with prior sync state', () => { // create & synchronize two nodes - let n1 = Automerge.init(), n2 = Automerge.init() + let n1 = Automerge.init(), n2 = Automerge.init() let s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 5; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) @@ -131,35 +129,35 @@ describe('Data sync protocol', () => { it('should not generate messages once synced', () => { // create & synchronize two nodes - let n1 = Automerge.init('abc123'), n2 = Automerge.init('def456') + let n1 = Automerge.init('abc123'), n2 = Automerge.init('def456') let s1 = initSyncState(), s2 = initSyncState() - let message, patch + let message for (let i = 0; i < 5; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) for (let i = 0; i < 5; i++) n2 = Automerge.change(n2, {time: 0}, doc => doc.y = i) // n1 reports what it has - ;[s1, message] = Automerge.generateSyncMessage(n1, s1, n1) + ;[s1, message] = Automerge.generateSyncMessage(n1, s1) // n2 receives that message and sends changes along with what it has - ;[n2, s2, patch] = Automerge.receiveSyncMessage(n2, s2, message) + ;[n2, s2, ] = Automerge.receiveSyncMessage(n2, s2, message) ;[s2, message] = Automerge.generateSyncMessage(n2, s2) assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 5) //assert.deepStrictEqual(patch, null) // no changes arrived // n1 receives the changes and replies with the changes it now knows n2 needs - ;[n1, s1, patch] = Automerge.receiveSyncMessage(n1, s1, message) + ;[n1, s1, ] = Automerge.receiveSyncMessage(n1, s1, message) ;[s1, message] = Automerge.generateSyncMessage(n1, s1) assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 5) //assert.deepStrictEqual(patch.diffs.props, {y: {'5@def456': {type: 'value', value: 4, datatype: 'int'}}}) // changes arrived // n2 applies the changes and sends confirmation ending the exchange - ;[n2, s2, patch] = Automerge.receiveSyncMessage(n2, s2, message) + ;[n2, s2, ] = Automerge.receiveSyncMessage(n2, s2, message) ;[s2, message] = Automerge.generateSyncMessage(n2, s2) //assert.deepStrictEqual(patch.diffs.props, {x: {'5@abc123': {type: 'value', value: 4, datatype: 'int'}}}) // changes arrived // n1 receives the message and has nothing more to say - ;[n1, s1, patch] = Automerge.receiveSyncMessage(n1, s1, message) + ;[n1, s1, ] = Automerge.receiveSyncMessage(n1, s1, message) ;[s1, message] = Automerge.generateSyncMessage(n1, s1) assert.deepStrictEqual(message, null) //assert.deepStrictEqual(patch, null) // no changes arrived @@ -171,7 +169,7 @@ describe('Data sync protocol', () => { it('should allow simultaneous messages during synchronization', () => { // create & synchronize two nodes - let n1 = Automerge.init('abc123'), n2 = Automerge.init('def456') + let n1 = Automerge.init('abc123'), n2 = Automerge.init('def456') let s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 5; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) for (let i = 0; i < 5; i++) n2 = Automerge.change(n2, {time: 0}, doc => doc.y = i) @@ -187,10 +185,9 @@ describe('Data sync protocol', () => { assert.deepStrictEqual(decodeSyncMessage(msg2to1).have[0].lastSync.length, 0) // n1 and n2 receives that message and update sync state but make no patch - let patch1, patch2 - ;[n1, s1, patch1] = Automerge.receiveSyncMessage(n1, s1, msg2to1) + ;[n1, s1, ] = Automerge.receiveSyncMessage(n1, s1, msg2to1) //assert.deepStrictEqual(patch1, null) // no changes arrived, so no patch - ;[n2, s2, patch2] = Automerge.receiveSyncMessage(n2, s2, msg1to2) + ;[n2, s2, ] = Automerge.receiveSyncMessage(n2, s2, msg1to2) //assert.deepStrictEqual(patch2, null) // no changes arrived, so no patch // now both reply with their local changes the other lacks @@ -201,12 +198,12 @@ describe('Data sync protocol', () => { assert.deepStrictEqual(decodeSyncMessage(msg2to1).changes.length, 5) // both should now apply the changes and update the frontend - ;[n1, s1, patch1] = Automerge.receiveSyncMessage(n1, s1, msg2to1) + ;[n1, s1, ] = Automerge.receiveSyncMessage(n1, s1, msg2to1) assert.deepStrictEqual(getMissingDeps(n1), []) //assert.notDeepStrictEqual(patch1, null) assert.deepStrictEqual(n1, {x: 4, y: 4}) - ;[n2, s2, patch2] = Automerge.receiveSyncMessage(n2, s2, msg1to2) + ;[n2, s2, ] = Automerge.receiveSyncMessage(n2, s2, msg1to2) assert.deepStrictEqual(getMissingDeps(n2), []) //assert.notDeepStrictEqual(patch2, null) assert.deepStrictEqual(n2, {x: 4, y: 4}) @@ -218,8 +215,8 @@ describe('Data sync protocol', () => { assert.deepStrictEqual(decodeSyncMessage(msg2to1).changes.length, 0) // After receiving acknowledgements, their shared heads should be equal - ;[n1, s1, patch1] = Automerge.receiveSyncMessage(n1, s1, msg2to1) - ;[n2, s2, patch2] = Automerge.receiveSyncMessage(n2, s2, msg1to2) + ;[n1, s1, ] = Automerge.receiveSyncMessage(n1, s1, msg2to1) + ;[n2, s2, ] = Automerge.receiveSyncMessage(n2, s2, msg1to2) assert.deepStrictEqual(s1.sharedHeads, [head1, head2].sort()) assert.deepStrictEqual(s2.sharedHeads, [head1, head2].sort()) //assert.deepStrictEqual(patch1, null) @@ -238,29 +235,34 @@ describe('Data sync protocol', () => { }) it('should assume sent changes were recieved until we hear otherwise', () => { - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') - let s1 = initSyncState(), message = null - let s2 + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + let s1 = initSyncState(), message: Automerge.SyncMessage | null = null n1 = Automerge.change(n1, {time: 0}, doc => doc.items = []) - ;[n1, n2, s1, s2 ] = sync(n1, n2) + ;[n1, n2, s1, ] = sync(n1, n2) n1 = Automerge.change(n1, {time: 0}, doc => doc.items.push('x')) ;[s1, message] = Automerge.generateSyncMessage(n1, s1) - assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) + if (message != null) { + assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) + } n1 = Automerge.change(n1, {time: 0}, doc => doc.items.push('y')) ;[s1, message] = Automerge.generateSyncMessage(n1, s1) - assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) + if (message != null) { + assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) + } n1 = Automerge.change(n1, {time: 0}, doc => doc.items.push('z')) ;[s1, message] = Automerge.generateSyncMessage(n1, s1) - assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) + if (message != null) { + assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) + } }) it('should work regardless of who initiates the exchange', () => { // create & synchronize two nodes - let n1 = Automerge.init(), n2 = Automerge.init() + let n1 = Automerge.init(), n2 = Automerge.init() let s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 5; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) @@ -284,7 +286,7 @@ describe('Data sync protocol', () => { // lastSync is undefined. // create two peers both with divergent commits - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') for (let i = 0; i < 10; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) ;[n1, n2] = sync(n1, n2) @@ -305,7 +307,7 @@ describe('Data sync protocol', () => { // lastSync is c9. // create two peers both with divergent commits - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') let s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 10; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) @@ -323,7 +325,7 @@ describe('Data sync protocol', () => { }) it('should ensure non-empty state after sync', () => { - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') let s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 3; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) @@ -338,7 +340,7 @@ describe('Data sync protocol', () => { // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 // n2 has changes {c0, c1, c2}, n1's lastSync is c5, and n2's lastSync is c2. // we want to successfully sync (n1) with (r), even though (n1) believes it's talking to (n2) - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') let s1 = initSyncState(), s2 = initSyncState() // n1 makes three changes, which we sync to n2 @@ -371,7 +373,7 @@ describe('Data sync protocol', () => { }) it('should resync after one node experiences data loss without disconnecting', () => { - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') let s1 = initSyncState(), s2 = initSyncState() // n1 makes three changes, which we sync to n2 @@ -391,7 +393,7 @@ describe('Data sync protocol', () => { }) it('should handle changes concurrent to the last sync heads', () => { - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef'), n3 = Automerge.init('fedcba98') + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef'), n3 = Automerge.init('fedcba98') let s12 = initSyncState(), s21 = initSyncState(), s23 = initSyncState(), s32 = initSyncState() // Change 1 is known to all three nodes @@ -411,8 +413,8 @@ describe('Data sync protocol', () => { // Apply n3's latest change to n2. If running in Node, turn the Uint8Array into a Buffer, to // simulate transmission over a network (see https://github.com/automerge/automerge/pull/362) let change = Automerge.getLastLocalChange(n3) - if (typeof Buffer === 'function') change = Buffer.from(change) - ;[n2] = Automerge.applyChanges(n2, [change]) + if (typeof Buffer === 'function' && change != null) change = Buffer.from(change) + ;[n2] = change && Automerge.applyChanges(n2, [change]) || [n2] // Now sync n1 and n2. n3's change is concurrent to n1 and n2's last sync heads ;[n1, n2, s12, s21] = sync(n1, n2, s12, s21) @@ -421,10 +423,10 @@ describe('Data sync protocol', () => { }) it('should handle histories with lots of branching and merging', () => { - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef'), n3 = Automerge.init('fedcba98') + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef'), n3 = Automerge.init('fedcba98') n1 = Automerge.change(n1, {time: 0}, doc => doc.x = 0) - ;[n2] = Automerge.applyChanges(n2, [Automerge.getLastLocalChange(n1)]) - ;[n3] = Automerge.applyChanges(n3, [Automerge.getLastLocalChange(n1)]) + ;[n2] = Automerge.applyChanges(n2, [Automerge.getLastLocalChange(n1)!]) + ;[n3] = Automerge.applyChanges(n3, [Automerge.getLastLocalChange(n1)!]) n3 = Automerge.change(n3, {time: 0}, doc => doc.x = 1) // - n1c1 <------ n1c2 <------ n1c3 <-- etc. <-- n1c20 <------ n1c21 @@ -438,15 +440,15 @@ describe('Data sync protocol', () => { n2 = Automerge.change(n2, {time: 0}, doc => doc.n2 = i) const change1 = Automerge.getLastLocalChange(n1) const change2 = Automerge.getLastLocalChange(n2) - ;[n1] = Automerge.applyChanges(n1, [change2]) - ;[n2] = Automerge.applyChanges(n2, [change1]) + ;[n1] = Automerge.applyChanges(n1, [change2!]) + ;[n2] = Automerge.applyChanges(n2, [change1!]) } let s1 = initSyncState(), s2 = initSyncState() ;[n1, n2, s1, s2] = sync(n1, n2, s1, s2) // Having n3's last change concurrent to the last sync heads forces us into the slower code path - ;[n2] = Automerge.applyChanges(n2, [Automerge.getLastLocalChange(n3)]) + ;[n2] = Automerge.applyChanges(n2, [Automerge.getLastLocalChange(n3)!]) n1 = Automerge.change(n1, {time: 0}, doc => doc.n1 = 'final') n2 = Automerge.change(n2, {time: 0}, doc => doc.n2 = 'final') @@ -471,14 +473,14 @@ describe('Data sync protocol', () => { // `-- n2 // where n2 is a false positive in the Bloom filter containing {n1}. // lastSync is c9. - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') let s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 10; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) ;[n1, n2, s1, s2] = sync(n1, n2) for (let i = 1; ; i++) { // search for false positive; see comment above - const n1up = Automerge.change(Automerge.clone(n1, {actorId: '01234567'}), {time: 0}, doc => doc.x = `${i} @ n1`) - const n2up = Automerge.change(Automerge.clone(n2, {actorId: '89abcdef'}), {time: 0}, doc => doc.x = `${i} @ n2`) + const n1up = Automerge.change(Automerge.clone(n1, {actor: '01234567'}), {time: 0}, doc => doc.x = `${i} @ n1`) + const n2up = Automerge.change(Automerge.clone(n2, {actor: '89abcdef'}), {time: 0}, doc => doc.x = `${i} @ n2`) if (new BloomFilter(getHeads(n1up)).containsHash(getHeads(n2up)[0])) { n1 = n1up; n2 = n2up; break } @@ -500,20 +502,20 @@ describe('Data sync protocol', () => { // `-- n2c1 <-- n2c2 // where n2c1 is a false positive in the Bloom filter containing {n1c1, n1c2}. // lastSync is c9. - n1 = Automerge.init('01234567') - n2 = Automerge.init('89abcdef') + n1 = Automerge.init('01234567') + n2 = Automerge.init('89abcdef') s1 = initSyncState() s2 = initSyncState() - for (let i = 0; i < 10; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) + for (let i = 0; i < 10; i++) n1 = Automerge.change(n1, {time: 0}, (doc: any) => doc.x = i) ;[n1, n2, s1, s2] = sync(n1, n2) let n1hash1, n2hash1 for (let i = 29; ; i++) { // search for false positive; see comment above - const n1us1 = Automerge.change(Automerge.clone(n1, {actorId: '01234567'}), {time: 0}, doc => doc.x = `${i} @ n1`) - const n2us1 = Automerge.change(Automerge.clone(n2, {actorId: '89abcdef'}), {time: 0}, doc => doc.x = `${i} @ n2`) + const n1us1 = Automerge.change(Automerge.clone(n1, {actor: '01234567'}), {time: 0}, (doc: any) => doc.x = `${i} @ n1`) + const n2us1 = Automerge.change(Automerge.clone(n2, {actor: '89abcdef'}), {time: 0}, (doc: any) => doc.x = `${i} @ n2`) n1hash1 = getHeads(n1us1)[0]; n2hash1 = getHeads(n2us1)[0] - const n1us2 = Automerge.change(n1us1, {time: 0}, doc => doc.x = 'final @ n1') - const n2us2 = Automerge.change(n2us1, {time: 0}, doc => doc.x = 'final @ n2') + const n1us2 = Automerge.change(n1us1, {time: 0}, (doc: any) => doc.x = 'final @ n1') + const n2us2 = Automerge.change(n2us1, {time: 0}, (doc: any) => doc.x = 'final @ n2') n1hash2 = getHeads(n1us2)[0]; n2hash2 = getHeads(n2us2)[0] if (new BloomFilter([n1hash1, n1hash2]).containsHash(n2hash1)) { n1 = n1us2; n2 = n2us2; break @@ -569,15 +571,15 @@ describe('Data sync protocol', () => { // `-- n2c1 <-- n2c2 <-- n2c3 // where n2c2 is a false positive in the Bloom filter containing {n1c1, n1c2, n1c3}. // lastSync is c4. - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') let s1 = initSyncState(), s2 = initSyncState() let n1hash3, n2hash3 for (let i = 0; i < 5; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) ;[n1, n2, s1, s2] = sync(n1, n2) for (let i = 86; ; i++) { // search for false positive; see comment above - const n1us1 = Automerge.change(Automerge.clone(n1, {actorId: '01234567'}), {time: 0}, doc => doc.x = `${i} @ n1`) - const n2us1 = Automerge.change(Automerge.clone(n2, {actorId: '89abcdef'}), {time: 0}, doc => doc.x = `${i} @ n2`) + const n1us1 = Automerge.change(Automerge.clone(n1, {actor: '01234567'}), {time: 0}, doc => doc.x = `${i} @ n1`) + const n2us1 = Automerge.change(Automerge.clone(n2, {actor: '89abcdef'}), {time: 0}, doc => doc.x = `${i} @ n2`) const n1hash1 = getHeads(n1us1)[0] const n1us2 = Automerge.change(n1us1, {time: 0}, doc => doc.x = `${i + 1} @ n1`) const n2us2 = Automerge.change(n2us1, {time: 0}, doc => doc.x = `${i + 1} @ n2`) @@ -603,20 +605,20 @@ describe('Data sync protocol', () => { // `-- n2c1 <-- n2c2 <-- n2c3 // where n2c1 and n2c2 are both false positives in the Bloom filter containing {c5}. // lastSync is c4. - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') let s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 5; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) ;[n1, n2, s1, s2] = sync(n1, n2, s1, s2) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = 5) for (let i = 2; ; i++) { // search for false positive; see comment above - const n2us1 = Automerge.change(Automerge.clone(n2, {actorId: '89abcdef'}), {time: 0}, doc => doc.x = `${i} @ n2`) + const n2us1 = Automerge.change(Automerge.clone(n2, {actor: '89abcdef'}), {time: 0}, doc => doc.x = `${i} @ n2`) if (new BloomFilter(getHeads(n1)).containsHash(getHeads(n2us1)[0])) { n2 = n2us1; break } } for (let i = 141; ; i++) { // search for false positive; see comment above - const n2us2 = Automerge.change(Automerge.clone(n2, {actorId: '89abcdef'}), {time: 0}, doc => doc.x = `${i} again`) + const n2us2 = Automerge.change(Automerge.clone(n2, {actor: '89abcdef'}), {time: 0}, doc => doc.x = `${i} again`) if (new BloomFilter(getHeads(n1)).containsHash(getHeads(n2us2)[0])) { n2 = n2us2; break } @@ -636,7 +638,7 @@ describe('Data sync protocol', () => { // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ // `-- n2 // where n2 causes a false positive in the Bloom filter containing {n1}. - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') let s1 = initSyncState(), s2 = initSyncState() let message @@ -646,8 +648,8 @@ describe('Data sync protocol', () => { s2 = decodeSyncState(encodeSyncState(s2)) for (let i = 1; ; i++) { // brute-force search for false positive; see comment above - const n1up = Automerge.change(Automerge.clone(n1, {actorId: '01234567'}), {time: 0}, doc => doc.x = `${i} @ n1`) - const n2up = Automerge.change(Automerge.clone(n2, {actorId: '89abcdef'}), {time: 0}, doc => doc.x = `${i} @ n2`) + const n1up = Automerge.change(Automerge.clone(n1, {actor: '01234567'}), {time: 0}, doc => doc.x = `${i} @ n1`) + const n2up = Automerge.change(Automerge.clone(n2, {actor: '89abcdef'}), {time: 0}, doc => doc.x = `${i} @ n2`) // check if the bloom filter on n2 will believe n1 already has a particular hash // this will mean n2 won't offer that data to n2 by receiving a sync message from n1 if (new BloomFilter(getHeads(n1up)).containsHash(getHeads(n2up)[0])) { @@ -688,14 +690,14 @@ describe('Data sync protocol', () => { // n1 has {c0, c1, c2, n1c1, n1c2, n1c3, n2c1, n2c2}; // n2 has {c0, c1, c2, n1c1, n1c2, n2c1, n2c2, n2c3}; // n3 has {c0, c1, c2, n3c1, n3c2, n3c3}. - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef'), n3 = Automerge.init('76543210') - let s13 = initSyncState(), s12 = initSyncState(), s21 = initSyncState() + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef'), n3 = Automerge.init('76543210') + let s13 = initSyncState() let s32 = initSyncState(), s31 = initSyncState(), s23 = initSyncState() let message1, message2, message3 for (let i = 0; i < 3; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) // sync all 3 nodes - ;[n1, n2, s12, s21] = sync(n1, n2) // eslint-disable-line no-unused-vars -- kept for consistency + ;[n1, n2, , ] = sync(n1, n2) // eslint-disable-line no-unused-vars -- kept for consistency ;[n1, n3, s13, s31] = sync(n1, n3) ;[n3, n2, s32, s23] = sync(n3, n2) for (let i = 0; i < 2; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = `${i} @ n1`) @@ -742,9 +744,9 @@ describe('Data sync protocol', () => { }) it('should allow any change to be requested', () => { - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') let s1 = initSyncState(), s2 = initSyncState() - let message = null + let message: Automerge.SyncMessage | null = null for (let i = 0; i < 3; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) const lastSync = getHeads(n1) @@ -753,24 +755,26 @@ describe('Data sync protocol', () => { ;[n1, n2, s1, s2] = sync(n1, n2) s1.lastSentHeads = [] // force generateSyncMessage to return a message even though nothing changed ;[s1, message] = Automerge.generateSyncMessage(n1, s1) - const modMsg = decodeSyncMessage(message) + const modMsg = decodeSyncMessage(message!) modMsg.need = lastSync // re-request change 2 ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, encodeSyncMessage(modMsg)) ;[s1, message] = Automerge.generateSyncMessage(n2, s2) - assert.strictEqual(decodeSyncMessage(message).changes.length, 1) - assert.strictEqual(Automerge.decodeChange(decodeSyncMessage(message).changes[0]).hash, lastSync[0]) + assert.strictEqual(decodeSyncMessage(message!).changes.length, 1) + assert.strictEqual(Automerge.decodeChange(decodeSyncMessage(message!).changes[0]).hash, lastSync[0]) }) it('should ignore requests for a nonexistent change', () => { - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') let s1 = initSyncState(), s2 = initSyncState() - let message = null + let message: Automerge.SyncMessage | null = null for (let i = 0; i < 3; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) ;[n2] = Automerge.applyChanges(n2, Automerge.getAllChanges(n1)) ;[s1, message] = Automerge.generateSyncMessage(n1, s1) - message.need = ['0000000000000000000000000000000000000000000000000000000000000000'] - ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, message) + const decoded = Automerge.decodeSyncMessage(message!) + decoded.need = ['0000000000000000000000000000000000000000000000000000000000000000'] + message = Automerge.encodeSyncMessage(decoded) + ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, message!) ;[s2, message] = Automerge.generateSyncMessage(n2, s2) assert.strictEqual(message, null) }) @@ -779,7 +783,7 @@ describe('Data sync protocol', () => { // ,-- c1 <-- c2 // c0 <-+ // `-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef'), n3 = Automerge.init('76543210') + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef'), n3 = Automerge.init('76543210') let s1 = initSyncState(), s2 = initSyncState() let msg, decodedMsg @@ -813,9 +817,10 @@ describe('Data sync protocol', () => { decodedMsg = decodeSyncMessage(msg) decodedMsg.changes = [change5, change6] msg = encodeSyncMessage(decodedMsg) - const sentHashes = {} - sentHashes[decodeChangeMeta(change5, true).hash] = true - sentHashes[decodeChangeMeta(change6, true).hash] = true + const sentHashes = [ + Automerge.decodeChange(change5!).hash, + Automerge.decodeChange(change6!).hash, + ] s2.sentHashes = sentHashes ;[n1, s1] = Automerge.receiveSyncMessage(n1, s1, msg) assert.deepStrictEqual(s1.sharedHeads, [c2, c6].sort()) diff --git a/javascript/test/text_test.ts b/javascript/test/text_test.ts index 59890470..dd66e108 100644 --- a/javascript/test/text_test.ts +++ b/javascript/test/text_test.ts @@ -2,203 +2,16 @@ import * as assert from 'assert' import * as Automerge from '../src' import { assertEqualsOneOf } from './helpers' -function attributeStateToAttributes(accumulatedAttributes) { - const attributes = {} - Object.entries(accumulatedAttributes).forEach(([key, values]) => { - if (values.length && values[0] !== null) { - attributes[key] = values[0] - } - }) - return attributes -} - -function isEquivalent(a, b) { - const aProps = Object.getOwnPropertyNames(a) - const bProps = Object.getOwnPropertyNames(b) - - if (aProps.length != bProps.length) { - return false - } - - for (let i = 0; i < aProps.length; i++) { - const propName = aProps[i] - if (a[propName] !== b[propName]) { - return false - } - } - - return true -} - -function isControlMarker(pseudoCharacter) { - return typeof pseudoCharacter === 'object' && pseudoCharacter.attributes -} - -function opFrom(text, attributes) { - let op = { insert: text } - if (Object.keys(attributes).length > 0) { - op.attributes = attributes - } - return op -} - -function accumulateAttributes(span, accumulatedAttributes) { - Object.entries(span).forEach(([key, value]) => { - if (!accumulatedAttributes[key]) { - accumulatedAttributes[key] = [] - } - if (value === null) { - if (accumulatedAttributes[key].length === 0 || accumulatedAttributes[key] === null) { - accumulatedAttributes[key].unshift(null) - } else { - accumulatedAttributes[key].shift() - } - } else { - if (accumulatedAttributes[key][0] === null) { - accumulatedAttributes[key].shift() - } else { - accumulatedAttributes[key].unshift(value) - } - } - }) - return accumulatedAttributes -} - -function automergeTextToDeltaDoc(text) { - let ops = [] - let controlState = {} - let currentString = "" - let attributes = {} - text.toSpans().forEach((span) => { - if (isControlMarker(span)) { - controlState = accumulateAttributes(span.attributes, controlState) - } else { - let next = attributeStateToAttributes(controlState) - - // if the next span has the same calculated attributes as the current span - // don't bother outputting it as a separate span, just let it ride - if (typeof span === 'string' && isEquivalent(next, attributes)) { - currentString = currentString + span - return - } - - if (currentString) { - ops.push(opFrom(currentString, attributes)) - } - - // If we've got a string, we might be able to concatenate it to another - // same-attributed-string, so remember it and go to the next iteration. - if (typeof span === 'string') { - currentString = span - attributes = next - } else { - // otherwise we have an embed "character" and should output it immediately. - // embeds are always one-"character" in length. - ops.push(opFrom(span, next)) - currentString = '' - attributes = {} - } - } - }) - - // at the end, flush any accumulated string out - if (currentString) { - ops.push(opFrom(currentString, attributes)) - } - - return ops -} - -function inverseAttributes(attributes) { - let invertedAttributes = {} - Object.keys(attributes).forEach((key) => { - invertedAttributes[key] = null - }) - return invertedAttributes -} - -function applyDeleteOp(text, offset, op) { - let length = op.delete - while (length > 0) { - if (isControlMarker(text.get(offset))) { - offset += 1 - } else { - // we need to not delete control characters, but we do delete embed characters - text.deleteAt(offset, 1) - length -= 1 - } - } - return [text, offset] -} - -function applyRetainOp(text, offset, op) { - let length = op.retain - - if (op.attributes) { - text.insertAt(offset, { attributes: op.attributes }) - offset += 1 - } - - while (length > 0) { - const char = text.get(offset) - offset += 1 - if (!isControlMarker(char)) { - length -= 1 - } - } - - if (op.attributes) { - text.insertAt(offset, { attributes: inverseAttributes(op.attributes) }) - offset += 1 - } - - return [text, offset] -} - - -function applyInsertOp(text, offset, op) { - let originalOffset = offset - - if (typeof op.insert === 'string') { - text.insertAt(offset, ...op.insert.split('')) - offset += op.insert.length - } else { - // we have an embed or something similar - text.insertAt(offset, op.insert) - offset += 1 - } - - if (op.attributes) { - text.insertAt(originalOffset, { attributes: op.attributes }) - offset += 1 - } - if (op.attributes) { - text.insertAt(offset, { attributes: inverseAttributes(op.attributes) }) - offset += 1 - } - return [text, offset] -} - -// XXX: uhhhhh, why can't I pass in text? -function applyDeltaDocToAutomergeText(delta, doc) { - let offset = 0 - - delta.forEach(op => { - if (op.retain) { - [, offset] = applyRetainOp(doc.text, offset, op) - } else if (op.delete) { - [, offset] = applyDeleteOp(doc.text, offset, op) - } else if (op.insert) { - [, offset] = applyInsertOp(doc.text, offset, op) - } - }) +type DocType = { + text: string + [key: string]: any } describe('Automerge.Text', () => { - let s1, s2 + let s1: Automerge.Doc, s2: Automerge.Doc beforeEach(() => { - s1 = Automerge.change(Automerge.init(), doc => doc.text = "") - s2 = Automerge.merge(Automerge.init(), s1) + s1 = Automerge.change(Automerge.init(), doc => doc.text = "") + s2 = Automerge.merge(Automerge.init(), s1) }) it('should support insertion', () => { @@ -281,7 +94,7 @@ describe('Automerge.Text', () => { const s1 = Automerge.from({text: 'init'}) const changes = Automerge.getAllChanges(s1) assert.strictEqual(changes.length, 1) - const [s2] = Automerge.applyChanges(Automerge.init(), changes) + const [s2] = Automerge.applyChanges(Automerge.init(), changes) assert.strictEqual(s2.text, 'init') assert.strictEqual(s2.text, 'init') }) diff --git a/javascript/tsconfig.json b/javascript/tsconfig.json index 80dd7c76..8e934416 100644 --- a/javascript/tsconfig.json +++ b/javascript/tsconfig.json @@ -14,7 +14,7 @@ "skipLibCheck": true, "outDir": "./dist" }, - "include": [ "src/**/*" ], + "include": [ "src/**/*", "test/**/*" ], "exclude": [ "./dist/**/*", "./node_modules" diff --git a/rust/automerge-wasm/index.d.ts b/rust/automerge-wasm/index.d.ts index 90b7854a..0e0c38e6 100644 --- a/rust/automerge-wasm/index.d.ts +++ b/rust/automerge-wasm/index.d.ts @@ -82,6 +82,9 @@ export type DecodedChange = { ops: Op[] } +type PartialBy = Omit & Partial> +export type ChangeToEncode = PartialBy + export type Op = { action: string, obj: ObjID, @@ -120,7 +123,7 @@ export type SplicePatch = { export function create(actor?: Actor): Automerge; export function load(data: Uint8Array, actor?: Actor): Automerge; -export function encodeChange(change: DecodedChange): Change; +export function encodeChange(change: ChangeToEncode): Change; export function decodeChange(change: Change): DecodedChange; export function initSyncState(): SyncState; export function encodeSyncMessage(message: DecodedSyncMessage): SyncMessage; @@ -133,7 +136,7 @@ export function importSyncState(state: JsSyncState): SyncState; export interface API { create(actor?: Actor): Automerge; load(data: Uint8Array, actor?: Actor): Automerge; - encodeChange(change: DecodedChange): Change; + encodeChange(change: ChangeToEncode): Change; decodeChange(change: Change): DecodedChange; initSyncState(): SyncState; encodeSyncMessage(message: DecodedSyncMessage): SyncMessage; @@ -208,7 +211,7 @@ export class Automerge { dump(): void; // experimental api can go here - applyPatches(obj: Doc, meta?: unknown, callback?: (patch: Patch, before: Doc, after: Doc) => void): Doc; + applyPatches(obj: Doc, meta?: unknown, callback?: (patch: Array, before: Doc, after: Doc) => void): Doc; } export interface JsSyncState { From 1e7dcdedec03b1d6cfcb5ff3efacf0e4879f5afc Mon Sep 17 00:00:00 2001 From: Alex Good Date: Thu, 22 Dec 2022 12:03:49 +0000 Subject: [PATCH 673/730] automerge-js: Add prettier It's christmas, everyone is on holiday, it's time to change every single file in the repository! --- .github/workflows/ci.yaml | 10 + javascript/.eslintrc.cjs | 13 +- javascript/.prettierignore | 2 + javascript/.prettierrc | 4 + javascript/HACKING.md | 3 +- javascript/README.md | 41 +- javascript/config/cjs.json | 8 +- javascript/config/mjs.json | 12 +- javascript/e2e/README.md | 3 +- javascript/e2e/index.ts | 718 ++++--- javascript/e2e/tsconfig.json | 8 +- javascript/e2e/verdaccio.yaml | 24 +- .../examples/create-react-app/README.md | 2 +- .../examples/create-react-app/craco.config.js | 2 +- .../examples/create-react-app/src/App.js | 11 +- .../examples/create-react-app/src/App.test.js | 14 +- .../examples/create-react-app/src/index.css | 6 +- .../examples/create-react-app/src/index.js | 16 +- .../create-react-app/src/reportWebVitals.js | 18 +- .../create-react-app/src/setupTests.js | 2 +- javascript/examples/vite/README.md | 32 +- javascript/examples/vite/main.ts | 22 +- javascript/examples/vite/src/counter.ts | 2 +- javascript/examples/vite/src/main.ts | 17 +- javascript/examples/vite/vite.config.js | 28 +- javascript/examples/webpack/README.md | 26 +- javascript/examples/webpack/src/index.js | 9 +- javascript/examples/webpack/webpack.config.js | 37 +- javascript/package.json | 1 + javascript/src/constants.ts | 19 +- javascript/src/counter.ts | 36 +- javascript/src/index.ts | 853 ++++---- javascript/src/low_level.ts | 59 +- javascript/src/numbers.ts | 25 +- javascript/src/proxies.ts | 462 +++-- javascript/src/types.ts | 23 +- javascript/src/uuid.ts | 19 +- javascript/test/basic_test.ts | 815 ++++---- javascript/test/extra_api_tests.ts | 42 +- javascript/test/helpers.ts | 12 +- javascript/test/legacy/columnar.js | 661 ++++-- javascript/test/legacy/common.js | 14 +- javascript/test/legacy/encoding.js | 432 ++-- javascript/test/legacy/sync.js | 186 +- javascript/test/legacy_tests.ts | 1832 +++++++++++------ javascript/test/sync_test.ts | 693 ++++--- javascript/test/text_test.ts | 91 +- javascript/test/uuid_test.ts | 20 +- javascript/tsconfig.json | 37 +- javascript/typedoc-readme.md | 58 +- scripts/ci/fmt_js | 5 + scripts/ci/run | 1 + 52 files changed, 4564 insertions(+), 2922 deletions(-) create mode 100644 javascript/.prettierignore create mode 100644 javascript/.prettierrc create mode 100755 scripts/ci/fmt_js diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 0550619e..361320a0 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -90,6 +90,16 @@ jobs: run: rustup target add wasm32-unknown-unknown - name: run tests run: ./scripts/ci/deno_tests + + js_fmt: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: install + run: yarn global add prettier + - name: format + run: prettier -c javascript/.prettierrc javascript + js_tests: runs-on: ubuntu-latest steps: diff --git a/javascript/.eslintrc.cjs b/javascript/.eslintrc.cjs index 80e08d55..5d11eb94 100644 --- a/javascript/.eslintrc.cjs +++ b/javascript/.eslintrc.cjs @@ -1,11 +1,6 @@ module.exports = { root: true, - parser: '@typescript-eslint/parser', - plugins: [ - '@typescript-eslint', - ], - extends: [ - 'eslint:recommended', - 'plugin:@typescript-eslint/recommended', - ], -}; + parser: "@typescript-eslint/parser", + plugins: ["@typescript-eslint"], + extends: ["eslint:recommended", "plugin:@typescript-eslint/recommended"], +} diff --git a/javascript/.prettierignore b/javascript/.prettierignore new file mode 100644 index 00000000..8116ea24 --- /dev/null +++ b/javascript/.prettierignore @@ -0,0 +1,2 @@ +e2e/verdacciodb +dist diff --git a/javascript/.prettierrc b/javascript/.prettierrc new file mode 100644 index 00000000..18b9c97f --- /dev/null +++ b/javascript/.prettierrc @@ -0,0 +1,4 @@ +{ + "semi": false, + "arrowParens": "avoid" +} diff --git a/javascript/HACKING.md b/javascript/HACKING.md index c3203775..b7e92eef 100644 --- a/javascript/HACKING.md +++ b/javascript/HACKING.md @@ -8,7 +8,7 @@ Rust codebase and can be found in `~/automerge-wasm`). I.e. the responsibility of this codebase is - To map from the javascript data model to the underlying `set`, `make`, - `insert`, and `delete` operations of Automerge. + `insert`, and `delete` operations of Automerge. - To expose a more convenient interface to functions in `automerge-wasm` which generate messages to send over the network or compressed file formats to store on disk @@ -37,4 +37,3 @@ yarn test If you make changes to the `automerge-wasm` package you will need to re-run `yarn e2e buildjs` - diff --git a/javascript/README.md b/javascript/README.md index ffd2b38e..af8306ac 100644 --- a/javascript/README.md +++ b/javascript/README.md @@ -19,7 +19,6 @@ data](#make-some-data). If you're in a browser you need a bundler ### Bundler setup - `@automerge/automerge` is a wrapper around a core library which is written in rust, compiled to WebAssembly and distributed as a separate package called `@automerge/automerge-wasm`. Browsers don't currently support WebAssembly @@ -54,28 +53,28 @@ import * as automerge from "@automerge/automerge" import * as assert from "assert" let doc1 = automerge.from({ - tasks: [ - {description: "feed fish", done: false}, - {description: "water plants", done: false}, - ] + tasks: [ + { description: "feed fish", done: false }, + { description: "water plants", done: false }, + ], }) -// Create a new thread of execution +// Create a new thread of execution let doc2 = automerge.clone(doc1) // Now we concurrently make changes to doc1 and doc2 // Complete a task in doc2 doc2 = automerge.change(doc2, d => { - d.tasks[0].done = true + d.tasks[0].done = true }) // Add a task in doc1 doc1 = automerge.change(doc1, d => { - d.tasks.push({ - description: "water fish", - done: false - }) + d.tasks.push({ + description: "water fish", + done: false, + }) }) // Merge changes from both docs @@ -84,19 +83,19 @@ doc2 = automerge.merge(doc2, doc1) // Both docs are merged and identical assert.deepEqual(doc1, { - tasks: [ - {description: "feed fish", done: true}, - {description: "water plants", done: false}, - {description: "water fish", done: false}, - ] + tasks: [ + { description: "feed fish", done: true }, + { description: "water plants", done: false }, + { description: "water fish", done: false }, + ], }) assert.deepEqual(doc2, { - tasks: [ - {description: "feed fish", done: true}, - {description: "water plants", done: false}, - {description: "water fish", done: false}, - ] + tasks: [ + { description: "feed fish", done: true }, + { description: "water plants", done: false }, + { description: "water fish", done: false }, + ], }) ``` diff --git a/javascript/config/cjs.json b/javascript/config/cjs.json index d7f8c63f..9cfceed5 100644 --- a/javascript/config/cjs.json +++ b/javascript/config/cjs.json @@ -1,6 +1,6 @@ { - "extends": "../tsconfig.json", - "compilerOptions": { - "outDir": "../dist/cjs" - } + "extends": "../tsconfig.json", + "compilerOptions": { + "outDir": "../dist/cjs" + } } diff --git a/javascript/config/mjs.json b/javascript/config/mjs.json index 8f964400..5b02ee0e 100644 --- a/javascript/config/mjs.json +++ b/javascript/config/mjs.json @@ -1,8 +1,8 @@ { - "extends": "../tsconfig.json", - "compilerOptions": { - "target": "es6", - "module": "es6", - "outDir": "../dist/mjs" - } + "extends": "../tsconfig.json", + "compilerOptions": { + "target": "es6", + "module": "es6", + "outDir": "../dist/mjs" + } } diff --git a/javascript/e2e/README.md b/javascript/e2e/README.md index ff87bd60..9dcee471 100644 --- a/javascript/e2e/README.md +++ b/javascript/e2e/README.md @@ -54,7 +54,7 @@ yarn e2e buildexamples -e webpack If you're experimenting with a project which is not in the `examples` folder you'll need a running registry. `run-registry` builds and publishes `automerge-js` and `automerge-wasm` and then runs the registry at -`localhost:4873`. +`localhost:4873`. ``` yarn e2e run-registry @@ -63,7 +63,6 @@ yarn e2e run-registry You can now run `yarn install --registry http://localhost:4873` to experiment with the built packages. - ## Using the `dev` build of `automerge-wasm` All the commands above take a `-p` flag which can be either `release` or diff --git a/javascript/e2e/index.ts b/javascript/e2e/index.ts index 828c0635..fb0b1599 100644 --- a/javascript/e2e/index.ts +++ b/javascript/e2e/index.ts @@ -1,15 +1,25 @@ -import {once} from "events" -import {setTimeout} from "timers/promises" -import {spawn, ChildProcess} from "child_process" +import { once } from "events" +import { setTimeout } from "timers/promises" +import { spawn, ChildProcess } from "child_process" import * as child_process from "child_process" -import {command, subcommands, run, array, multioption, option, Type} from "cmd-ts" +import { + command, + subcommands, + run, + array, + multioption, + option, + Type, +} from "cmd-ts" import * as path from "path" import * as fsPromises from "fs/promises" import fetch from "node-fetch" const VERDACCIO_DB_PATH = path.normalize(`${__dirname}/verdacciodb`) const VERDACCIO_CONFIG_PATH = path.normalize(`${__dirname}/verdaccio.yaml`) -const AUTOMERGE_WASM_PATH = path.normalize(`${__dirname}/../../rust/automerge-wasm`) +const AUTOMERGE_WASM_PATH = path.normalize( + `${__dirname}/../../rust/automerge-wasm` +) const AUTOMERGE_JS_PATH = path.normalize(`${__dirname}/..`) const EXAMPLES_DIR = path.normalize(path.join(__dirname, "../", "examples")) @@ -18,217 +28,286 @@ type Example = "webpack" | "vite" | "create-react-app" // Type to parse strings to `Example` so the types line up for the `buildExamples` commmand const ReadExample: Type = { - async from(str) { - if (str === "webpack") { - return "webpack" - } else if (str === "vite") { - return "vite" - } else if (str === "create-react-app") { - return "create-react-app" - } else { - throw new Error(`Unknown example type ${str}`) - } + async from(str) { + if (str === "webpack") { + return "webpack" + } else if (str === "vite") { + return "vite" + } else if (str === "create-react-app") { + return "create-react-app" + } else { + throw new Error(`Unknown example type ${str}`) } + }, } type Profile = "dev" | "release" const ReadProfile: Type = { - async from(str) { - if (str === "dev") { - return "dev" - } else if (str === "release") { - return "release" - } else { - throw new Error(`Unknown profile ${str}`) - } + async from(str) { + if (str === "dev") { + return "dev" + } else if (str === "release") { + return "release" + } else { + throw new Error(`Unknown profile ${str}`) } + }, } const buildjs = command({ - name: "buildjs", - args: { - profile: option({ - type: ReadProfile, - long: "profile", - short: "p", - defaultValue: () => "dev" as Profile - }) - }, - handler: ({profile}) => { - console.log("building js") - withPublishedWasm(profile, async (registryUrl: string) => { - await buildAndPublishAutomergeJs(registryUrl) - }) - } + name: "buildjs", + args: { + profile: option({ + type: ReadProfile, + long: "profile", + short: "p", + defaultValue: () => "dev" as Profile, + }), + }, + handler: ({ profile }) => { + console.log("building js") + withPublishedWasm(profile, async (registryUrl: string) => { + await buildAndPublishAutomergeJs(registryUrl) + }) + }, }) const buildWasm = command({ - name: "buildwasm", - args: { - profile: option({ - type: ReadProfile, - long: "profile", - short: "p", - defaultValue: () => "dev" as Profile - }) - }, - handler: ({profile}) => { - console.log("building automerge-wasm") - withRegistry( - buildAutomergeWasm(profile), - ) - } + name: "buildwasm", + args: { + profile: option({ + type: ReadProfile, + long: "profile", + short: "p", + defaultValue: () => "dev" as Profile, + }), + }, + handler: ({ profile }) => { + console.log("building automerge-wasm") + withRegistry(buildAutomergeWasm(profile)) + }, }) const buildexamples = command({ - name: "buildexamples", - args: { - examples: multioption({ - long: "example", - short: "e", - type: array(ReadExample), - }), - profile: option({ - type: ReadProfile, - long: "profile", - short: "p", - defaultValue: () => "dev" as Profile - }) - }, - handler: ({examples, profile}) => { - if (examples.length === 0) { - examples = ["webpack", "vite", "create-react-app"] - } - buildExamples(examples, profile) + name: "buildexamples", + args: { + examples: multioption({ + long: "example", + short: "e", + type: array(ReadExample), + }), + profile: option({ + type: ReadProfile, + long: "profile", + short: "p", + defaultValue: () => "dev" as Profile, + }), + }, + handler: ({ examples, profile }) => { + if (examples.length === 0) { + examples = ["webpack", "vite", "create-react-app"] } + buildExamples(examples, profile) + }, }) - const runRegistry = command({ - name: "run-registry", - args: { - profile: option({ - type: ReadProfile, - long: "profile", - short: "p", - defaultValue: () => "dev" as Profile - }) - }, - handler: ({profile}) => { - withPublishedWasm(profile, async (registryUrl: string) => { - await buildAndPublishAutomergeJs(registryUrl) - console.log("\n************************") - console.log(` Verdaccio NPM registry is running at ${registryUrl}`) - console.log(" press CTRL-C to exit ") - console.log("************************") - await once(process, "SIGINT") - }).catch(e => { - console.error(`Failed: ${e}`) - }) - } + name: "run-registry", + args: { + profile: option({ + type: ReadProfile, + long: "profile", + short: "p", + defaultValue: () => "dev" as Profile, + }), + }, + handler: ({ profile }) => { + withPublishedWasm(profile, async (registryUrl: string) => { + await buildAndPublishAutomergeJs(registryUrl) + console.log("\n************************") + console.log(` Verdaccio NPM registry is running at ${registryUrl}`) + console.log(" press CTRL-C to exit ") + console.log("************************") + await once(process, "SIGINT") + }).catch(e => { + console.error(`Failed: ${e}`) + }) + }, }) - const app = subcommands({ - name: "e2e", - cmds: {buildjs, buildexamples, buildwasm: buildWasm, "run-registry": runRegistry} + name: "e2e", + cmds: { + buildjs, + buildexamples, + buildwasm: buildWasm, + "run-registry": runRegistry, + }, }) run(app, process.argv.slice(2)) async function buildExamples(examples: Array, profile: Profile) { - await withPublishedWasm(profile, async (registryUrl) => { - printHeader("building and publishing automerge") - await buildAndPublishAutomergeJs(registryUrl) - for (const example of examples) { - printHeader(`building ${example} example`) - if (example === "webpack") { - const projectPath = path.join(EXAMPLES_DIR, example) - await removeExistingAutomerge(projectPath) - await fsPromises.rm(path.join(projectPath, "yarn.lock"), {force: true}) - await spawnAndWait("yarn", ["--cwd", projectPath, "install", "--registry", registryUrl, "--check-files"], {stdio: "inherit"}) - await spawnAndWait("yarn", ["--cwd", projectPath, "build"], {stdio: "inherit"}) - } else if (example === "vite") { - const projectPath = path.join(EXAMPLES_DIR, example) - await removeExistingAutomerge(projectPath) - await fsPromises.rm(path.join(projectPath, "yarn.lock"), {force: true}) - await spawnAndWait("yarn", ["--cwd", projectPath, "install", "--registry", registryUrl, "--check-files"], {stdio: "inherit"}) - await spawnAndWait("yarn", ["--cwd", projectPath, "build"], {stdio: "inherit"}) - } else if (example === "create-react-app") { - const projectPath = path.join(EXAMPLES_DIR, example) - await removeExistingAutomerge(projectPath) - await fsPromises.rm(path.join(projectPath, "yarn.lock"), {force: true}) - await spawnAndWait("yarn", ["--cwd", projectPath, "install", "--registry", registryUrl, "--check-files"], {stdio: "inherit"}) - await spawnAndWait("yarn", ["--cwd", projectPath, "build"], {stdio: "inherit"}) - } - } - }) + await withPublishedWasm(profile, async registryUrl => { + printHeader("building and publishing automerge") + await buildAndPublishAutomergeJs(registryUrl) + for (const example of examples) { + printHeader(`building ${example} example`) + if (example === "webpack") { + const projectPath = path.join(EXAMPLES_DIR, example) + await removeExistingAutomerge(projectPath) + await fsPromises.rm(path.join(projectPath, "yarn.lock"), { + force: true, + }) + await spawnAndWait( + "yarn", + [ + "--cwd", + projectPath, + "install", + "--registry", + registryUrl, + "--check-files", + ], + { stdio: "inherit" } + ) + await spawnAndWait("yarn", ["--cwd", projectPath, "build"], { + stdio: "inherit", + }) + } else if (example === "vite") { + const projectPath = path.join(EXAMPLES_DIR, example) + await removeExistingAutomerge(projectPath) + await fsPromises.rm(path.join(projectPath, "yarn.lock"), { + force: true, + }) + await spawnAndWait( + "yarn", + [ + "--cwd", + projectPath, + "install", + "--registry", + registryUrl, + "--check-files", + ], + { stdio: "inherit" } + ) + await spawnAndWait("yarn", ["--cwd", projectPath, "build"], { + stdio: "inherit", + }) + } else if (example === "create-react-app") { + const projectPath = path.join(EXAMPLES_DIR, example) + await removeExistingAutomerge(projectPath) + await fsPromises.rm(path.join(projectPath, "yarn.lock"), { + force: true, + }) + await spawnAndWait( + "yarn", + [ + "--cwd", + projectPath, + "install", + "--registry", + registryUrl, + "--check-files", + ], + { stdio: "inherit" } + ) + await spawnAndWait("yarn", ["--cwd", projectPath, "build"], { + stdio: "inherit", + }) + } + } + }) } type WithRegistryAction = (registryUrl: string) => Promise -async function withRegistry(action: WithRegistryAction, ...actions: Array) { - // First, start verdaccio - printHeader("Starting verdaccio NPM server") - const verd = await VerdaccioProcess.start() - actions.unshift(action) +async function withRegistry( + action: WithRegistryAction, + ...actions: Array +) { + // First, start verdaccio + printHeader("Starting verdaccio NPM server") + const verd = await VerdaccioProcess.start() + actions.unshift(action) - for (const action of actions) { - try { - type Step = "verd-died" | "action-completed" - const verdDied: () => Promise = async () => { - await verd.died() - return "verd-died" - } - const actionComplete: () => Promise = async () => { - await action("http://localhost:4873") - return "action-completed" - } - const result = await Promise.race([verdDied(), actionComplete()]) - if (result === "verd-died") { - throw new Error("verdaccio unexpectedly exited") - } - } catch(e) { - await verd.kill() - throw e - } + for (const action of actions) { + try { + type Step = "verd-died" | "action-completed" + const verdDied: () => Promise = async () => { + await verd.died() + return "verd-died" + } + const actionComplete: () => Promise = async () => { + await action("http://localhost:4873") + return "action-completed" + } + const result = await Promise.race([verdDied(), actionComplete()]) + if (result === "verd-died") { + throw new Error("verdaccio unexpectedly exited") + } + } catch (e) { + await verd.kill() + throw e } - await verd.kill() + } + await verd.kill() } async function withPublishedWasm(profile: Profile, action: WithRegistryAction) { - await withRegistry( - buildAutomergeWasm(profile), - publishAutomergeWasm, - action - ) + await withRegistry(buildAutomergeWasm(profile), publishAutomergeWasm, action) } function buildAutomergeWasm(profile: Profile): WithRegistryAction { - return async (registryUrl: string) => { - printHeader("building automerge-wasm") - await spawnAndWait("yarn", ["--cwd", AUTOMERGE_WASM_PATH, "--registry", registryUrl, "install"], {stdio: "inherit"}) - const cmd = profile === "release" ? "release" : "debug" - await spawnAndWait("yarn", ["--cwd", AUTOMERGE_WASM_PATH, cmd], {stdio: "inherit"}) - } + return async (registryUrl: string) => { + printHeader("building automerge-wasm") + await spawnAndWait( + "yarn", + ["--cwd", AUTOMERGE_WASM_PATH, "--registry", registryUrl, "install"], + { stdio: "inherit" } + ) + const cmd = profile === "release" ? "release" : "debug" + await spawnAndWait("yarn", ["--cwd", AUTOMERGE_WASM_PATH, cmd], { + stdio: "inherit", + }) + } } async function publishAutomergeWasm(registryUrl: string) { - printHeader("Publishing automerge-wasm to verdaccio") - await fsPromises.rm(path.join(VERDACCIO_DB_PATH, "@automerge/automerge-wasm"), { recursive: true, force: true} ) - await yarnPublish(registryUrl, AUTOMERGE_WASM_PATH) + printHeader("Publishing automerge-wasm to verdaccio") + await fsPromises.rm( + path.join(VERDACCIO_DB_PATH, "@automerge/automerge-wasm"), + { recursive: true, force: true } + ) + await yarnPublish(registryUrl, AUTOMERGE_WASM_PATH) } async function buildAndPublishAutomergeJs(registryUrl: string) { - // Build the js package - printHeader("Building automerge") - await removeExistingAutomerge(AUTOMERGE_JS_PATH) - await removeFromVerdaccio("@automerge/automerge") - await fsPromises.rm(path.join(AUTOMERGE_JS_PATH, "yarn.lock"), {force: true}) - await spawnAndWait("yarn", ["--cwd", AUTOMERGE_JS_PATH, "install", "--registry", registryUrl, "--check-files"], {stdio: "inherit"}) - await spawnAndWait("yarn", ["--cwd", AUTOMERGE_JS_PATH, "build"], {stdio: "inherit"}) - await yarnPublish(registryUrl, AUTOMERGE_JS_PATH) + // Build the js package + printHeader("Building automerge") + await removeExistingAutomerge(AUTOMERGE_JS_PATH) + await removeFromVerdaccio("@automerge/automerge") + await fsPromises.rm(path.join(AUTOMERGE_JS_PATH, "yarn.lock"), { + force: true, + }) + await spawnAndWait( + "yarn", + [ + "--cwd", + AUTOMERGE_JS_PATH, + "install", + "--registry", + registryUrl, + "--check-files", + ], + { stdio: "inherit" } + ) + await spawnAndWait("yarn", ["--cwd", AUTOMERGE_JS_PATH, "build"], { + stdio: "inherit", + }) + await yarnPublish(registryUrl, AUTOMERGE_JS_PATH) } /** @@ -236,104 +315,110 @@ async function buildAndPublishAutomergeJs(registryUrl: string) { * */ class VerdaccioProcess { - child: ChildProcess - stdout: Array - stderr: Array + child: ChildProcess + stdout: Array + stderr: Array - constructor(child: ChildProcess) { - this.child = child + constructor(child: ChildProcess) { + this.child = child - // Collect stdout/stderr otherwise the subprocess gets blocked writing - this.stdout = [] - this.stderr = [] - this.child.stdout && this.child.stdout.on("data", (data) => this.stdout.push(data)) - this.child.stderr && this.child.stderr.on("data", (data) => this.stderr.push(data)) + // Collect stdout/stderr otherwise the subprocess gets blocked writing + this.stdout = [] + this.stderr = [] + this.child.stdout && + this.child.stdout.on("data", data => this.stdout.push(data)) + this.child.stderr && + this.child.stderr.on("data", data => this.stderr.push(data)) - const errCallback = (e: any) => { - console.error("!!!!!!!!!ERROR IN VERDACCIO PROCESS!!!!!!!!!") - console.error(" ", e) - if (this.stdout.length > 0) { - console.log("\n**Verdaccio stdout**") - const stdout = Buffer.concat(this.stdout) - process.stdout.write(stdout) - } + const errCallback = (e: any) => { + console.error("!!!!!!!!!ERROR IN VERDACCIO PROCESS!!!!!!!!!") + console.error(" ", e) + if (this.stdout.length > 0) { + console.log("\n**Verdaccio stdout**") + const stdout = Buffer.concat(this.stdout) + process.stdout.write(stdout) + } - if (this.stderr.length > 0) { - console.log("\n**Verdaccio stderr**") - const stdout = Buffer.concat(this.stderr) - process.stdout.write(stdout) - } - process.exit(-1) - } - this.child.on("error", errCallback) + if (this.stderr.length > 0) { + console.log("\n**Verdaccio stderr**") + const stdout = Buffer.concat(this.stderr) + process.stdout.write(stdout) + } + process.exit(-1) } + this.child.on("error", errCallback) + } - /** - * Spawn a verdaccio process and wait for it to respond succesfully to http requests - * - * The returned `VerdaccioProcess` can be used to control the subprocess - */ - static async start() { - const child = spawn("yarn", ["verdaccio", "--config", VERDACCIO_CONFIG_PATH], {env: { ...process.env, FORCE_COLOR: "true"}}) + /** + * Spawn a verdaccio process and wait for it to respond succesfully to http requests + * + * The returned `VerdaccioProcess` can be used to control the subprocess + */ + static async start() { + const child = spawn( + "yarn", + ["verdaccio", "--config", VERDACCIO_CONFIG_PATH], + { env: { ...process.env, FORCE_COLOR: "true" } } + ) - // Forward stdout and stderr whilst waiting for startup to complete - const stdoutCallback = (data: Buffer) => process.stdout.write(data) - const stderrCallback = (data: Buffer) => process.stderr.write(data) - child.stdout && child.stdout.on("data", stdoutCallback) - child.stderr && child.stderr.on("data", stderrCallback) + // Forward stdout and stderr whilst waiting for startup to complete + const stdoutCallback = (data: Buffer) => process.stdout.write(data) + const stderrCallback = (data: Buffer) => process.stderr.write(data) + child.stdout && child.stdout.on("data", stdoutCallback) + child.stderr && child.stderr.on("data", stderrCallback) - const healthCheck = async () => { - while (true) { - try { - const resp = await fetch("http://localhost:4873") - if (resp.status === 200) { - return - } else { - console.log(`Healthcheck failed: bad status ${resp.status}`) - } - } catch (e) { - console.error(`Healthcheck failed: ${e}`) - } - await setTimeout(500) - } - } - await withTimeout(healthCheck(), 10000) - - // Stop forwarding stdout/stderr - child.stdout && child.stdout.off("data", stdoutCallback) - child.stderr && child.stderr.off("data", stderrCallback) - return new VerdaccioProcess(child) - } - - /** - * Send a SIGKILL to the process and wait for it to stop - */ - async kill() { - this.child.stdout && this.child.stdout.destroy() - this.child.stderr && this.child.stderr.destroy() - this.child.kill(); + const healthCheck = async () => { + while (true) { try { - await withTimeout(once(this.child, "close"), 500) + const resp = await fetch("http://localhost:4873") + if (resp.status === 200) { + return + } else { + console.log(`Healthcheck failed: bad status ${resp.status}`) + } } catch (e) { - console.error("unable to kill verdaccio subprocess, trying -9") - this.child.kill(9) - await withTimeout(once(this.child, "close"), 500) + console.error(`Healthcheck failed: ${e}`) } + await setTimeout(500) + } } + await withTimeout(healthCheck(), 10000) - /** - * A promise which resolves if the subprocess exits for some reason - */ - async died(): Promise { - const [exit, _signal] = await once(this.child, "exit") - return exit + // Stop forwarding stdout/stderr + child.stdout && child.stdout.off("data", stdoutCallback) + child.stderr && child.stderr.off("data", stderrCallback) + return new VerdaccioProcess(child) + } + + /** + * Send a SIGKILL to the process and wait for it to stop + */ + async kill() { + this.child.stdout && this.child.stdout.destroy() + this.child.stderr && this.child.stderr.destroy() + this.child.kill() + try { + await withTimeout(once(this.child, "close"), 500) + } catch (e) { + console.error("unable to kill verdaccio subprocess, trying -9") + this.child.kill(9) + await withTimeout(once(this.child, "close"), 500) } + } + + /** + * A promise which resolves if the subprocess exits for some reason + */ + async died(): Promise { + const [exit, _signal] = await once(this.child, "exit") + return exit + } } function printHeader(header: string) { - console.log("\n===============================") - console.log(` ${header}`) - console.log("===============================") + console.log("\n===============================") + console.log(` ${header}`) + console.log("===============================") } /** @@ -347,36 +432,46 @@ function printHeader(header: string) { * @param packageDir - The directory containing the package.json of the target project */ async function removeExistingAutomerge(packageDir: string) { - await fsPromises.rm(path.join(packageDir, "node_modules", "@automerge"), {recursive: true, force: true}) - await fsPromises.rm(path.join(packageDir, "node_modules", "automerge"), {recursive: true, force: true}) + await fsPromises.rm(path.join(packageDir, "node_modules", "@automerge"), { + recursive: true, + force: true, + }) + await fsPromises.rm(path.join(packageDir, "node_modules", "automerge"), { + recursive: true, + force: true, + }) } type SpawnResult = { - stdout?: Buffer, - stderr?: Buffer, + stdout?: Buffer + stderr?: Buffer } -async function spawnAndWait(cmd: string, args: Array, options: child_process.SpawnOptions): Promise { - const child = spawn(cmd, args, options) - let stdout = null - let stderr = null - if (child.stdout) { - stdout = [] - child.stdout.on("data", data => stdout.push(data)) - } - if (child.stderr) { - stderr = [] - child.stderr.on("data", data => stderr.push(data)) - } +async function spawnAndWait( + cmd: string, + args: Array, + options: child_process.SpawnOptions +): Promise { + const child = spawn(cmd, args, options) + let stdout = null + let stderr = null + if (child.stdout) { + stdout = [] + child.stdout.on("data", data => stdout.push(data)) + } + if (child.stderr) { + stderr = [] + child.stderr.on("data", data => stderr.push(data)) + } - const [exit, _signal] = await once(child, "exit") - if (exit && exit !== 0) { - throw new Error("nonzero exit code") - } - return { - stderr: stderr? Buffer.concat(stderr) : null, - stdout: stdout ? Buffer.concat(stdout) : null - } + const [exit, _signal] = await once(child, "exit") + if (exit && exit !== 0) { + throw new Error("nonzero exit code") + } + return { + stderr: stderr ? Buffer.concat(stderr) : null, + stdout: stdout ? Buffer.concat(stdout) : null, + } } /** @@ -387,29 +482,27 @@ async function spawnAndWait(cmd: string, args: Array, options: child_pro * okay I Promise. */ async function removeFromVerdaccio(packageName: string) { - await fsPromises.rm(path.join(VERDACCIO_DB_PATH, packageName), {force: true, recursive: true}) + await fsPromises.rm(path.join(VERDACCIO_DB_PATH, packageName), { + force: true, + recursive: true, + }) } async function yarnPublish(registryUrl: string, cwd: string) { - await spawnAndWait( - "yarn", - [ - "--registry", - registryUrl, - "--cwd", - cwd, - "publish", - "--non-interactive", - ], - { - stdio: "inherit", - env: { - ...process.env, - FORCE_COLOR: "true", - // This is a fake token, it just has to be the right format - npm_config__auth: "//localhost:4873/:_authToken=Gp2Mgxm4faa/7wp0dMSuRA==" - } - }) + await spawnAndWait( + "yarn", + ["--registry", registryUrl, "--cwd", cwd, "publish", "--non-interactive"], + { + stdio: "inherit", + env: { + ...process.env, + FORCE_COLOR: "true", + // This is a fake token, it just has to be the right format + npm_config__auth: + "//localhost:4873/:_authToken=Gp2Mgxm4faa/7wp0dMSuRA==", + }, + } + ) } /** @@ -419,20 +512,23 @@ async function yarnPublish(registryUrl: string, cwd: string) { * @param promise - the promise to wait for @param timeout - the delay in * milliseconds to wait before throwing */ -async function withTimeout(promise: Promise, timeout: number): Promise { - type Step = "timed-out" | {result: T} - const timedOut: () => Promise = async () => { - await setTimeout(timeout) - return "timed-out" - } - const succeeded: () => Promise = async () => { - const result = await promise - return {result} - } - const result = await Promise.race([timedOut(), succeeded()]) - if (result === "timed-out") { - throw new Error("timed out") - } else { - return result.result - } +async function withTimeout( + promise: Promise, + timeout: number +): Promise { + type Step = "timed-out" | { result: T } + const timedOut: () => Promise = async () => { + await setTimeout(timeout) + return "timed-out" + } + const succeeded: () => Promise = async () => { + const result = await promise + return { result } + } + const result = await Promise.race([timedOut(), succeeded()]) + if (result === "timed-out") { + throw new Error("timed out") + } else { + return result.result + } } diff --git a/javascript/e2e/tsconfig.json b/javascript/e2e/tsconfig.json index 9f0e2e76..a2109873 100644 --- a/javascript/e2e/tsconfig.json +++ b/javascript/e2e/tsconfig.json @@ -1,6 +1,6 @@ { - "compilerOptions": { - "types": ["node"] - }, - "module": "nodenext" + "compilerOptions": { + "types": ["node"] + }, + "module": "nodenext" } diff --git a/javascript/e2e/verdaccio.yaml b/javascript/e2e/verdaccio.yaml index 45920a16..865f5f05 100644 --- a/javascript/e2e/verdaccio.yaml +++ b/javascript/e2e/verdaccio.yaml @@ -4,22 +4,22 @@ auth: file: ./htpasswd publish: allow_offline: true -logs: {type: stdout, format: pretty, level: info} -packages: +logs: { type: stdout, format: pretty, level: info } +packages: "@automerge/automerge-wasm": - access: "$all" - publish: "$all" + access: "$all" + publish: "$all" "@automerge/automerge": - access: "$all" - publish: "$all" + access: "$all" + publish: "$all" "*": - access: "$all" - publish: "$all" - proxy: npmjs + access: "$all" + publish: "$all" + proxy: npmjs "@*/*": - access: "$all" - publish: "$all" - proxy: npmjs + access: "$all" + publish: "$all" + proxy: npmjs uplinks: npmjs: url: https://registry.npmjs.org/ diff --git a/javascript/examples/create-react-app/README.md b/javascript/examples/create-react-app/README.md index dc894080..baa135ac 100644 --- a/javascript/examples/create-react-app/README.md +++ b/javascript/examples/create-react-app/README.md @@ -54,6 +54,6 @@ In the root of the project add the following contents to `craco.config.js` const cracoWasm = require("craco-wasm") module.exports = { - plugins: [cracoWasm()] + plugins: [cracoWasm()], } ``` diff --git a/javascript/examples/create-react-app/craco.config.js b/javascript/examples/create-react-app/craco.config.js index ad806e67..489dad8f 100644 --- a/javascript/examples/create-react-app/craco.config.js +++ b/javascript/examples/create-react-app/craco.config.js @@ -1,5 +1,5 @@ const cracoWasm = require("craco-wasm") module.exports = { - plugins: [cracoWasm()] + plugins: [cracoWasm()], } diff --git a/javascript/examples/create-react-app/src/App.js b/javascript/examples/create-react-app/src/App.js index fc4805b4..7cfe997b 100644 --- a/javascript/examples/create-react-app/src/App.js +++ b/javascript/examples/create-react-app/src/App.js @@ -1,12 +1,11 @@ import * as Automerge from "@automerge/automerge" -import logo from './logo.svg'; -import './App.css'; +import logo from "./logo.svg" +import "./App.css" let doc = Automerge.init() -doc = Automerge.change(doc, (d) => d.hello = "from automerge") +doc = Automerge.change(doc, d => (d.hello = "from automerge")) const result = JSON.stringify(doc) - function App() { return (
@@ -15,7 +14,7 @@ function App() {

{result}

- ); + ) } -export default App; +export default App diff --git a/javascript/examples/create-react-app/src/App.test.js b/javascript/examples/create-react-app/src/App.test.js index 1f03afee..ea796120 100644 --- a/javascript/examples/create-react-app/src/App.test.js +++ b/javascript/examples/create-react-app/src/App.test.js @@ -1,8 +1,8 @@ -import { render, screen } from '@testing-library/react'; -import App from './App'; +import { render, screen } from "@testing-library/react" +import App from "./App" -test('renders learn react link', () => { - render(); - const linkElement = screen.getByText(/learn react/i); - expect(linkElement).toBeInTheDocument(); -}); +test("renders learn react link", () => { + render() + const linkElement = screen.getByText(/learn react/i) + expect(linkElement).toBeInTheDocument() +}) diff --git a/javascript/examples/create-react-app/src/index.css b/javascript/examples/create-react-app/src/index.css index ec2585e8..4a1df4db 100644 --- a/javascript/examples/create-react-app/src/index.css +++ b/javascript/examples/create-react-app/src/index.css @@ -1,13 +1,13 @@ body { margin: 0; - font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', 'Roboto', 'Oxygen', - 'Ubuntu', 'Cantarell', 'Fira Sans', 'Droid Sans', 'Helvetica Neue', + font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", "Roboto", "Oxygen", + "Ubuntu", "Cantarell", "Fira Sans", "Droid Sans", "Helvetica Neue", sans-serif; -webkit-font-smoothing: antialiased; -moz-osx-font-smoothing: grayscale; } code { - font-family: source-code-pro, Menlo, Monaco, Consolas, 'Courier New', + font-family: source-code-pro, Menlo, Monaco, Consolas, "Courier New", monospace; } diff --git a/javascript/examples/create-react-app/src/index.js b/javascript/examples/create-react-app/src/index.js index d563c0fb..58c21edc 100644 --- a/javascript/examples/create-react-app/src/index.js +++ b/javascript/examples/create-react-app/src/index.js @@ -1,17 +1,17 @@ -import React from 'react'; -import ReactDOM from 'react-dom/client'; -import './index.css'; -import App from './App'; -import reportWebVitals from './reportWebVitals'; +import React from "react" +import ReactDOM from "react-dom/client" +import "./index.css" +import App from "./App" +import reportWebVitals from "./reportWebVitals" -const root = ReactDOM.createRoot(document.getElementById('root')); +const root = ReactDOM.createRoot(document.getElementById("root")) root.render( -); +) // If you want to start measuring performance in your app, pass a function // to log results (for example: reportWebVitals(console.log)) // or send to an analytics endpoint. Learn more: https://bit.ly/CRA-vitals -reportWebVitals(); +reportWebVitals() diff --git a/javascript/examples/create-react-app/src/reportWebVitals.js b/javascript/examples/create-react-app/src/reportWebVitals.js index 5253d3ad..eee308db 100644 --- a/javascript/examples/create-react-app/src/reportWebVitals.js +++ b/javascript/examples/create-react-app/src/reportWebVitals.js @@ -1,13 +1,13 @@ const reportWebVitals = onPerfEntry => { if (onPerfEntry && onPerfEntry instanceof Function) { - import('web-vitals').then(({ getCLS, getFID, getFCP, getLCP, getTTFB }) => { - getCLS(onPerfEntry); - getFID(onPerfEntry); - getFCP(onPerfEntry); - getLCP(onPerfEntry); - getTTFB(onPerfEntry); - }); + import("web-vitals").then(({ getCLS, getFID, getFCP, getLCP, getTTFB }) => { + getCLS(onPerfEntry) + getFID(onPerfEntry) + getFCP(onPerfEntry) + getLCP(onPerfEntry) + getTTFB(onPerfEntry) + }) } -}; +} -export default reportWebVitals; +export default reportWebVitals diff --git a/javascript/examples/create-react-app/src/setupTests.js b/javascript/examples/create-react-app/src/setupTests.js index 8f2609b7..6a0fd123 100644 --- a/javascript/examples/create-react-app/src/setupTests.js +++ b/javascript/examples/create-react-app/src/setupTests.js @@ -2,4 +2,4 @@ // allows you to do things like: // expect(element).toHaveTextContent(/react/i) // learn more: https://github.com/testing-library/jest-dom -import '@testing-library/jest-dom'; +import "@testing-library/jest-dom" diff --git a/javascript/examples/vite/README.md b/javascript/examples/vite/README.md index efe44479..c84594f5 100644 --- a/javascript/examples/vite/README.md +++ b/javascript/examples/vite/README.md @@ -7,6 +7,7 @@ There are three things you need to do to get WASM packaging working with vite: 3. Exclude `automerge-wasm` from the optimizer First, install the packages we need: + ```bash yarn add vite-plugin-top-level-await yarn add vite-plugin-wasm @@ -20,22 +21,22 @@ import wasm from "vite-plugin-wasm" import topLevelAwait from "vite-plugin-top-level-await" export default defineConfig({ - plugins: [topLevelAwait(), wasm()], - - // This is only necessary if you are using `SharedWorker` or `WebWorker`, as - // documented in https://vitejs.dev/guide/features.html#import-with-constructors - worker: { - format: "es", - plugins: [topLevelAwait(), wasm()] - }, + plugins: [topLevelAwait(), wasm()], - optimizeDeps: { - // This is necessary because otherwise `vite dev` includes two separate - // versions of the JS wrapper. This causes problems because the JS - // wrapper has a module level variable to track JS side heap - // allocations, initializing this twice causes horrible breakage - exclude: ["@automerge/automerge-wasm"] - } + // This is only necessary if you are using `SharedWorker` or `WebWorker`, as + // documented in https://vitejs.dev/guide/features.html#import-with-constructors + worker: { + format: "es", + plugins: [topLevelAwait(), wasm()], + }, + + optimizeDeps: { + // This is necessary because otherwise `vite dev` includes two separate + // versions of the JS wrapper. This causes problems because the JS + // wrapper has a module level variable to track JS side heap + // allocations, initializing this twice causes horrible breakage + exclude: ["@automerge/automerge-wasm"], + }, }) ``` @@ -51,4 +52,3 @@ yarn vite yarn install yarn dev ``` - diff --git a/javascript/examples/vite/main.ts b/javascript/examples/vite/main.ts index 157c8e48..0ba18f48 100644 --- a/javascript/examples/vite/main.ts +++ b/javascript/examples/vite/main.ts @@ -1,15 +1,15 @@ -import * as Automerge from "/node_modules/.vite/deps/automerge-js.js?v=6e973f28"; -console.log(Automerge); -let doc = Automerge.init(); -doc = Automerge.change(doc, (d) => d.hello = "from automerge-js"); -console.log(doc); -const result = JSON.stringify(doc); +import * as Automerge from "/node_modules/.vite/deps/automerge-js.js?v=6e973f28" +console.log(Automerge) +let doc = Automerge.init() +doc = Automerge.change(doc, d => (d.hello = "from automerge-js")) +console.log(doc) +const result = JSON.stringify(doc) if (typeof document !== "undefined") { - const element = document.createElement("div"); - element.innerHTML = JSON.stringify(result); - document.body.appendChild(element); + const element = document.createElement("div") + element.innerHTML = JSON.stringify(result) + document.body.appendChild(element) } else { - console.log("node:", result); + console.log("node:", result) } -//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJzb3VyY2VzIjpbIi9ob21lL2FsZXgvUHJvamVjdHMvYXV0b21lcmdlL2F1dG9tZXJnZS1ycy9hdXRvbWVyZ2UtanMvZXhhbXBsZXMvdml0ZS9zcmMvbWFpbi50cyJdLCJzb3VyY2VzQ29udGVudCI6WyJpbXBvcnQgKiBhcyBBdXRvbWVyZ2UgZnJvbSBcImF1dG9tZXJnZS1qc1wiXG5cbi8vIGhlbGxvIHdvcmxkIGNvZGUgdGhhdCB3aWxsIHJ1biBjb3JyZWN0bHkgb24gd2ViIG9yIG5vZGVcblxuY29uc29sZS5sb2coQXV0b21lcmdlKVxubGV0IGRvYyA9IEF1dG9tZXJnZS5pbml0KClcbmRvYyA9IEF1dG9tZXJnZS5jaGFuZ2UoZG9jLCAoZDogYW55KSA9PiBkLmhlbGxvID0gXCJmcm9tIGF1dG9tZXJnZS1qc1wiKVxuY29uc29sZS5sb2coZG9jKVxuY29uc3QgcmVzdWx0ID0gSlNPTi5zdHJpbmdpZnkoZG9jKVxuXG5pZiAodHlwZW9mIGRvY3VtZW50ICE9PSAndW5kZWZpbmVkJykge1xuICAgIC8vIGJyb3dzZXJcbiAgICBjb25zdCBlbGVtZW50ID0gZG9jdW1lbnQuY3JlYXRlRWxlbWVudCgnZGl2Jyk7XG4gICAgZWxlbWVudC5pbm5lckhUTUwgPSBKU09OLnN0cmluZ2lmeShyZXN1bHQpXG4gICAgZG9jdW1lbnQuYm9keS5hcHBlbmRDaGlsZChlbGVtZW50KTtcbn0gZWxzZSB7XG4gICAgLy8gc2VydmVyXG4gICAgY29uc29sZS5sb2coXCJub2RlOlwiLCByZXN1bHQpXG59XG5cbiJdLCJtYXBwaW5ncyI6IkFBQUEsWUFBWSxlQUFlO0FBSTNCLFFBQVEsSUFBSSxTQUFTO0FBQ3JCLElBQUksTUFBTSxVQUFVLEtBQUs7QUFDekIsTUFBTSxVQUFVLE9BQU8sS0FBSyxDQUFDLE1BQVcsRUFBRSxRQUFRLG1CQUFtQjtBQUNyRSxRQUFRLElBQUksR0FBRztBQUNmLE1BQU0sU0FBUyxLQUFLLFVBQVUsR0FBRztBQUVqQyxJQUFJLE9BQU8sYUFBYSxhQUFhO0FBRWpDLFFBQU0sVUFBVSxTQUFTLGNBQWMsS0FBSztBQUM1QyxVQUFRLFlBQVksS0FBSyxVQUFVLE1BQU07QUFDekMsV0FBUyxLQUFLLFlBQVksT0FBTztBQUNyQyxPQUFPO0FBRUgsVUFBUSxJQUFJLFNBQVMsTUFBTTtBQUMvQjsiLCJuYW1lcyI6W119 \ No newline at end of file +//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJzb3VyY2VzIjpbIi9ob21lL2FsZXgvUHJvamVjdHMvYXV0b21lcmdlL2F1dG9tZXJnZS1ycy9hdXRvbWVyZ2UtanMvZXhhbXBsZXMvdml0ZS9zcmMvbWFpbi50cyJdLCJzb3VyY2VzQ29udGVudCI6WyJpbXBvcnQgKiBhcyBBdXRvbWVyZ2UgZnJvbSBcImF1dG9tZXJnZS1qc1wiXG5cbi8vIGhlbGxvIHdvcmxkIGNvZGUgdGhhdCB3aWxsIHJ1biBjb3JyZWN0bHkgb24gd2ViIG9yIG5vZGVcblxuY29uc29sZS5sb2coQXV0b21lcmdlKVxubGV0IGRvYyA9IEF1dG9tZXJnZS5pbml0KClcbmRvYyA9IEF1dG9tZXJnZS5jaGFuZ2UoZG9jLCAoZDogYW55KSA9PiBkLmhlbGxvID0gXCJmcm9tIGF1dG9tZXJnZS1qc1wiKVxuY29uc29sZS5sb2coZG9jKVxuY29uc3QgcmVzdWx0ID0gSlNPTi5zdHJpbmdpZnkoZG9jKVxuXG5pZiAodHlwZW9mIGRvY3VtZW50ICE9PSAndW5kZWZpbmVkJykge1xuICAgIC8vIGJyb3dzZXJcbiAgICBjb25zdCBlbGVtZW50ID0gZG9jdW1lbnQuY3JlYXRlRWxlbWVudCgnZGl2Jyk7XG4gICAgZWxlbWVudC5pbm5lckhUTUwgPSBKU09OLnN0cmluZ2lmeShyZXN1bHQpXG4gICAgZG9jdW1lbnQuYm9keS5hcHBlbmRDaGlsZChlbGVtZW50KTtcbn0gZWxzZSB7XG4gICAgLy8gc2VydmVyXG4gICAgY29uc29sZS5sb2coXCJub2RlOlwiLCByZXN1bHQpXG59XG5cbiJdLCJtYXBwaW5ncyI6IkFBQUEsWUFBWSxlQUFlO0FBSTNCLFFBQVEsSUFBSSxTQUFTO0FBQ3JCLElBQUksTUFBTSxVQUFVLEtBQUs7QUFDekIsTUFBTSxVQUFVLE9BQU8sS0FBSyxDQUFDLE1BQVcsRUFBRSxRQUFRLG1CQUFtQjtBQUNyRSxRQUFRLElBQUksR0FBRztBQUNmLE1BQU0sU0FBUyxLQUFLLFVBQVUsR0FBRztBQUVqQyxJQUFJLE9BQU8sYUFBYSxhQUFhO0FBRWpDLFFBQU0sVUFBVSxTQUFTLGNBQWMsS0FBSztBQUM1QyxVQUFRLFlBQVksS0FBSyxVQUFVLE1BQU07QUFDekMsV0FBUyxLQUFLLFlBQVksT0FBTztBQUNyQyxPQUFPO0FBRUgsVUFBUSxJQUFJLFNBQVMsTUFBTTtBQUMvQjsiLCJuYW1lcyI6W119 diff --git a/javascript/examples/vite/src/counter.ts b/javascript/examples/vite/src/counter.ts index a3529e1f..3e516b6d 100644 --- a/javascript/examples/vite/src/counter.ts +++ b/javascript/examples/vite/src/counter.ts @@ -4,6 +4,6 @@ export function setupCounter(element: HTMLButtonElement) { counter = count element.innerHTML = `count is ${counter}` } - element.addEventListener('click', () => setCounter(++counter)) + element.addEventListener("click", () => setCounter(++counter)) setCounter(0) } diff --git a/javascript/examples/vite/src/main.ts b/javascript/examples/vite/src/main.ts index 8f7551d5..8dc8f92c 100644 --- a/javascript/examples/vite/src/main.ts +++ b/javascript/examples/vite/src/main.ts @@ -3,16 +3,15 @@ import * as Automerge from "@automerge/automerge" // hello world code that will run correctly on web or node let doc = Automerge.init() -doc = Automerge.change(doc, (d: any) => d.hello = "from automerge") +doc = Automerge.change(doc, (d: any) => (d.hello = "from automerge")) const result = JSON.stringify(doc) -if (typeof document !== 'undefined') { - // browser - const element = document.createElement('div'); - element.innerHTML = JSON.stringify(result) - document.body.appendChild(element); +if (typeof document !== "undefined") { + // browser + const element = document.createElement("div") + element.innerHTML = JSON.stringify(result) + document.body.appendChild(element) } else { - // server - console.log("node:", result) + // server + console.log("node:", result) } - diff --git a/javascript/examples/vite/vite.config.js b/javascript/examples/vite/vite.config.js index 9716d674..d80981bf 100644 --- a/javascript/examples/vite/vite.config.js +++ b/javascript/examples/vite/vite.config.js @@ -3,20 +3,20 @@ import wasm from "vite-plugin-wasm" import topLevelAwait from "vite-plugin-top-level-await" export default defineConfig({ + plugins: [topLevelAwait(), wasm()], + + // This is only necessary if you are using `SharedWorker` or `WebWorker`, as + // documented in https://vitejs.dev/guide/features.html#import-with-constructors + worker: { + format: "es", plugins: [topLevelAwait(), wasm()], + }, - // This is only necessary if you are using `SharedWorker` or `WebWorker`, as - // documented in https://vitejs.dev/guide/features.html#import-with-constructors - worker: { - format: "es", - plugins: [topLevelAwait(), wasm()] - }, - - optimizeDeps: { - // This is necessary because otherwise `vite dev` includes two separate - // versions of the JS wrapper. This causes problems because the JS - // wrapper has a module level variable to track JS side heap - // allocations, initializing this twice causes horrible breakage - exclude: ["@automerge/automerge-wasm"] - } + optimizeDeps: { + // This is necessary because otherwise `vite dev` includes two separate + // versions of the JS wrapper. This causes problems because the JS + // wrapper has a module level variable to track JS side heap + // allocations, initializing this twice causes horrible breakage + exclude: ["@automerge/automerge-wasm"], + }, }) diff --git a/javascript/examples/webpack/README.md b/javascript/examples/webpack/README.md index 917f9c8a..7563f27d 100644 --- a/javascript/examples/webpack/README.md +++ b/javascript/examples/webpack/README.md @@ -1,36 +1,34 @@ # Webpack + Automerge - Getting WASM working in webpack 5 is very easy. You just need to enable the `asyncWebAssembly` [experiment](https://webpack.js.org/configuration/experiments/). For example: - ```javascript -const path = require('path'); +const path = require("path") const clientConfig = { experiments: { asyncWebAssembly: true }, - target: 'web', - entry: './src/index.js', + target: "web", + entry: "./src/index.js", output: { - filename: 'main.js', - path: path.resolve(__dirname, 'public'), + filename: "main.js", + path: path.resolve(__dirname, "public"), }, mode: "development", // or production - performance: { // we dont want the wasm blob to generate warnings - hints: false, - maxEntrypointSize: 512000, - maxAssetSize: 512000 - } -}; + performance: { + // we dont want the wasm blob to generate warnings + hints: false, + maxEntrypointSize: 512000, + maxAssetSize: 512000, + }, +} module.exports = clientConfig ``` ## Running the example - ```bash yarn install yarn start diff --git a/javascript/examples/webpack/src/index.js b/javascript/examples/webpack/src/index.js index e3307083..3a9086e4 100644 --- a/javascript/examples/webpack/src/index.js +++ b/javascript/examples/webpack/src/index.js @@ -3,16 +3,15 @@ import * as Automerge from "@automerge/automerge" // hello world code that will run correctly on web or node let doc = Automerge.init() -doc = Automerge.change(doc, (d) => d.hello = "from automerge") +doc = Automerge.change(doc, d => (d.hello = "from automerge")) const result = JSON.stringify(doc) -if (typeof document !== 'undefined') { +if (typeof document !== "undefined") { // browser - const element = document.createElement('div'); + const element = document.createElement("div") element.innerHTML = JSON.stringify(result) - document.body.appendChild(element); + document.body.appendChild(element) } else { // server console.log("node:", result) } - diff --git a/javascript/examples/webpack/webpack.config.js b/javascript/examples/webpack/webpack.config.js index 3a6d83ff..51fd5127 100644 --- a/javascript/examples/webpack/webpack.config.js +++ b/javascript/examples/webpack/webpack.config.js @@ -1,36 +1,37 @@ -const path = require('path'); -const nodeExternals = require('webpack-node-externals'); +const path = require("path") +const nodeExternals = require("webpack-node-externals") // the most basic webpack config for node or web targets for automerge-wasm const serverConfig = { // basic setup for bundling a node package - target: 'node', + target: "node", externals: [nodeExternals()], externalsPresets: { node: true }, - entry: './src/index.js', + entry: "./src/index.js", output: { - filename: 'node.js', - path: path.resolve(__dirname, 'dist'), + filename: "node.js", + path: path.resolve(__dirname, "dist"), }, mode: "development", // or production -}; +} const clientConfig = { experiments: { asyncWebAssembly: true }, - target: 'web', - entry: './src/index.js', + target: "web", + entry: "./src/index.js", output: { - filename: 'main.js', - path: path.resolve(__dirname, 'public'), + filename: "main.js", + path: path.resolve(__dirname, "public"), }, mode: "development", // or production - performance: { // we dont want the wasm blob to generate warnings - hints: false, - maxEntrypointSize: 512000, - maxAssetSize: 512000 - } -}; + performance: { + // we dont want the wasm blob to generate warnings + hints: false, + maxEntrypointSize: 512000, + maxAssetSize: 512000, + }, +} -module.exports = [serverConfig, clientConfig]; +module.exports = [serverConfig, clientConfig] diff --git a/javascript/package.json b/javascript/package.json index 5fd2213e..b7afb5b7 100644 --- a/javascript/package.json +++ b/javascript/package.json @@ -53,6 +53,7 @@ "fast-sha256": "^1.3.0", "mocha": "^10.2.0", "pako": "^2.1.0", + "prettier": "^2.8.1", "ts-mocha": "^10.0.0", "ts-node": "^10.9.1", "typedoc": "^0.23.22", diff --git a/javascript/src/constants.ts b/javascript/src/constants.ts index e9517a60..d3bd8138 100644 --- a/javascript/src/constants.ts +++ b/javascript/src/constants.ts @@ -1,13 +1,12 @@ // Properties of the document root object -export const STATE = Symbol.for('_am_meta') // symbol used to hide application metadata on automerge objects -export const TRACE = Symbol.for('_am_trace') // used for debugging -export const OBJECT_ID = Symbol.for('_am_objectId') // synbol used to hide the object id on automerge objects -export const IS_PROXY = Symbol.for('_am_isProxy') // symbol used to test if the document is a proxy object - -export const UINT = Symbol.for('_am_uint') -export const INT = Symbol.for('_am_int') -export const F64 = Symbol.for('_am_f64') -export const COUNTER = Symbol.for('_am_counter') -export const TEXT = Symbol.for('_am_text') +export const STATE = Symbol.for("_am_meta") // symbol used to hide application metadata on automerge objects +export const TRACE = Symbol.for("_am_trace") // used for debugging +export const OBJECT_ID = Symbol.for("_am_objectId") // synbol used to hide the object id on automerge objects +export const IS_PROXY = Symbol.for("_am_isProxy") // symbol used to test if the document is a proxy object +export const UINT = Symbol.for("_am_uint") +export const INT = Symbol.for("_am_int") +export const F64 = Symbol.for("_am_f64") +export const COUNTER = Symbol.for("_am_counter") +export const TEXT = Symbol.for("_am_text") diff --git a/javascript/src/counter.ts b/javascript/src/counter.ts index c20d7fcf..d94a3034 100644 --- a/javascript/src/counter.ts +++ b/javascript/src/counter.ts @@ -6,7 +6,7 @@ import { COUNTER } from "./constants" * the value trivially converges. */ export class Counter { - value : number; + value: number constructor(value?: number) { this.value = value || 0 @@ -21,7 +21,7 @@ export class Counter { * concatenating it with another string, as in `x + ''`. * https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/valueOf */ - valueOf() : number { + valueOf(): number { return this.value } @@ -30,7 +30,7 @@ export class Counter { * this method is called e.g. when you do `['value: ', x].join('')` or when * you use string interpolation: `value: ${x}`. */ - toString() : string { + toString(): string { return this.valueOf().toString() } @@ -38,7 +38,7 @@ export class Counter { * Returns the counter value, so that a JSON serialization of an Automerge * document represents the counter simply as an integer. */ - toJSON() : number { + toJSON(): number { return this.value } } @@ -53,20 +53,26 @@ class WriteableCounter extends Counter { objectId: ObjID key: Prop - constructor(value: number, context: Automerge, path: string[], objectId: ObjID, key: Prop) { + constructor( + value: number, + context: Automerge, + path: string[], + objectId: ObjID, + key: Prop + ) { super(value) this.context = context this.path = path this.objectId = objectId this.key = key } - + /** * Increases the value of the counter by `delta`. If `delta` is not given, * increases the value of the counter by 1. */ - increment(delta: number) : number { - delta = typeof delta === 'number' ? delta : 1 + increment(delta: number): number { + delta = typeof delta === "number" ? delta : 1 this.context.increment(this.objectId, this.key, delta) this.value += delta return this.value @@ -76,8 +82,8 @@ class WriteableCounter extends Counter { * Decreases the value of the counter by `delta`. If `delta` is not given, * decreases the value of the counter by 1. */ - decrement(delta: number) : number { - return this.increment(typeof delta === 'number' ? -delta : -1) + decrement(delta: number): number { + return this.increment(typeof delta === "number" ? -delta : -1) } } @@ -87,8 +93,14 @@ class WriteableCounter extends Counter { * `objectId` is the ID of the object containing the counter, and `key` is * the property name (key in map, or index in list) where the counter is * located. -*/ -export function getWriteableCounter(value: number, context: Automerge, path: string[], objectId: ObjID, key: Prop) { + */ +export function getWriteableCounter( + value: number, + context: Automerge, + path: string[], + objectId: ObjID, + key: Prop +) { return new WriteableCounter(value, context, path, objectId, key) } diff --git a/javascript/src/index.ts b/javascript/src/index.ts index df71c648..23df47ce 100644 --- a/javascript/src/index.ts +++ b/javascript/src/index.ts @@ -1,45 +1,71 @@ - /** @hidden **/ -export {/** @hidden */ uuid} from './uuid' +export { /** @hidden */ uuid } from "./uuid" -import {rootProxy, listProxy, mapProxy} from "./proxies" -import {STATE, TRACE, IS_PROXY, OBJECT_ID } from "./constants" +import { rootProxy, listProxy, mapProxy } from "./proxies" +import { STATE, TRACE, IS_PROXY, OBJECT_ID } from "./constants" -import {AutomergeValue, Counter} from "./types" -export {AutomergeValue, Counter, Int, Uint, Float64, ScalarValue} from "./types" +import { AutomergeValue, Counter } from "./types" +export { + AutomergeValue, + Counter, + Int, + Uint, + Float64, + ScalarValue, +} from "./types" -import {type API, type Patch} from "@automerge/automerge-wasm"; -export { type Patch, PutPatch, DelPatch, SplicePatch, IncPatch, SyncMessage, } from "@automerge/automerge-wasm" -import {ApiHandler, ChangeToEncode, UseApi} from "./low_level" +import { type API, type Patch } from "@automerge/automerge-wasm" +export { + type Patch, + PutPatch, + DelPatch, + SplicePatch, + IncPatch, + SyncMessage, +} from "@automerge/automerge-wasm" +import { ApiHandler, ChangeToEncode, UseApi } from "./low_level" -import {Actor as ActorId, Prop, ObjID, Change, DecodedChange, Heads, Automerge, MaterializeValue} from "@automerge/automerge-wasm" -import {JsSyncState as SyncState, SyncMessage, DecodedSyncMessage} from "@automerge/automerge-wasm" +import { + Actor as ActorId, + Prop, + ObjID, + Change, + DecodedChange, + Heads, + Automerge, + MaterializeValue, +} from "@automerge/automerge-wasm" +import { + JsSyncState as SyncState, + SyncMessage, + DecodedSyncMessage, +} from "@automerge/automerge-wasm" /** Options passed to {@link change}, and {@link emptyChange} * @typeParam T - The type of value contained in the document */ export type ChangeOptions = { - /** A message which describes the changes */ - message?: string, - /** The unix timestamp of the change (purely advisory, not used in conflict resolution) */ - time?: number, - /** A callback which will be called to notify the caller of any changes to the document */ - patchCallback?: PatchCallback + /** A message which describes the changes */ + message?: string + /** The unix timestamp of the change (purely advisory, not used in conflict resolution) */ + time?: number + /** A callback which will be called to notify the caller of any changes to the document */ + patchCallback?: PatchCallback } /** Options passed to {@link loadIncremental}, {@link applyChanges}, and {@link receiveSyncMessage} * @typeParam T - The type of value contained in the document */ -export type ApplyOptions = {patchCallback?: PatchCallback} +export type ApplyOptions = { patchCallback?: PatchCallback } -/** +/** * An automerge document. * @typeParam T - The type of the value contained in this document * * Note that this provides read only access to the fields of the value. To * modify the value use {@link change} */ -export type Doc = {readonly [P in keyof T]: T[P]} +export type Doc = { readonly [P in keyof T]: T[P] } /** * Function which is called by {@link change} when making changes to a `Doc` @@ -56,79 +82,86 @@ export type ChangeFn = (doc: T) => void * @param before - The document before the change was made * @param after - The document after the change was made */ -export type PatchCallback = (patches: Array, before: Doc, after: Doc) => void +export type PatchCallback = ( + patches: Array, + before: Doc, + after: Doc +) => void /** @hidden **/ export interface State { - change: DecodedChange - snapshot: T + change: DecodedChange + snapshot: T } /** @hidden **/ export function use(api: API) { - UseApi(api) + UseApi(api) } import * as wasm from "@automerge/automerge-wasm" use(wasm) -/** +/** * Options to be passed to {@link init} or {@link load} * @typeParam T - The type of the value the document contains */ export type InitOptions = { - /** The actor ID to use for this document, a random one will be generated if `null` is passed */ - actor?: ActorId, - freeze?: boolean, - /** A callback which will be called with the initial patch once the document has finished loading */ - patchCallback?: PatchCallback, -}; - + /** The actor ID to use for this document, a random one will be generated if `null` is passed */ + actor?: ActorId + freeze?: boolean + /** A callback which will be called with the initial patch once the document has finished loading */ + patchCallback?: PatchCallback +} interface InternalState { - handle: Automerge, - heads: Heads | undefined, - freeze: boolean, - patchCallback?: PatchCallback + handle: Automerge + heads: Heads | undefined + freeze: boolean + patchCallback?: PatchCallback } /** @hidden */ export function getBackend(doc: Doc): Automerge { - return _state(doc).handle + return _state(doc).handle } function _state(doc: Doc, checkroot = true): InternalState { - if (typeof doc !== 'object') { - throw new RangeError("must be the document root") - } - const state = Reflect.get(doc, STATE) as InternalState - if (state === undefined || state == null || (checkroot && _obj(doc) !== "_root")) { - throw new RangeError("must be the document root") - } - return state + if (typeof doc !== "object") { + throw new RangeError("must be the document root") + } + const state = Reflect.get(doc, STATE) as InternalState + if ( + state === undefined || + state == null || + (checkroot && _obj(doc) !== "_root") + ) { + throw new RangeError("must be the document root") + } + return state } function _trace(doc: Doc): string | undefined { - return Reflect.get(doc, TRACE) as string + return Reflect.get(doc, TRACE) as string } function _obj(doc: Doc): ObjID | null { - if (!(typeof doc === 'object') || doc === null) { - return null - } - return Reflect.get(doc, OBJECT_ID) as ObjID + if (!(typeof doc === "object") || doc === null) { + return null + } + return Reflect.get(doc, OBJECT_ID) as ObjID } function _is_proxy(doc: Doc): boolean { - return !!Reflect.get(doc, IS_PROXY) + return !!Reflect.get(doc, IS_PROXY) } function importOpts(_actor?: ActorId | InitOptions): InitOptions { - if (typeof _actor === 'object') { - return _actor - } else { - return {actor: _actor} - } + if (typeof _actor === "object") { + return _actor + } else { + return { actor: _actor } + } } /** @@ -141,22 +174,27 @@ function importOpts(_actor?: ActorId | InitOptions): InitOptions { * random actor ID */ export function init(_opts?: ActorId | InitOptions): Doc { - const opts = importOpts(_opts) - const freeze = !!opts.freeze - const patchCallback = opts.patchCallback - const handle = ApiHandler.create(opts.actor) - handle.enablePatches(true) - handle.enableFreeze(!!opts.freeze) - handle.registerDatatype("counter", (n) => new Counter(n)) - const doc = handle.materialize("/", undefined, {handle, heads: undefined, freeze, patchCallback}) as Doc - return doc + const opts = importOpts(_opts) + const freeze = !!opts.freeze + const patchCallback = opts.patchCallback + const handle = ApiHandler.create(opts.actor) + handle.enablePatches(true) + handle.enableFreeze(!!opts.freeze) + handle.registerDatatype("counter", n => new Counter(n)) + const doc = handle.materialize("/", undefined, { + handle, + heads: undefined, + freeze, + patchCallback, + }) as Doc + return doc } /** * Make an immutable view of an automerge document as at `heads` * * @remarks - * The document returned from this function cannot be passed to {@link change}. + * The document returned from this function cannot be passed to {@link change}. * This is because it shares the same underlying memory as `doc`, but it is * consequently a very cheap copy. * @@ -168,9 +206,13 @@ export function init(_opts?: ActorId | InitOptions): Doc { * @param heads - The hashes of the heads to create a view at */ export function view(doc: Doc, heads: Heads): Doc { - const state = _state(doc) - const handle = state.handle - return state.handle.materialize("/", heads, { ...state, handle, heads }) as Doc + const state = _state(doc) + const handle = state.handle + return state.handle.materialize("/", heads, { + ...state, + handle, + heads, + }) as Doc } /** @@ -188,16 +230,19 @@ export function view(doc: Doc, heads: Heads): Doc { * @param doc - The document to clone * @param _opts - Either an actor ID to use for the new doc or an {@link InitOptions} */ -export function clone(doc: Doc, _opts?: ActorId | InitOptions): Doc { - const state = _state(doc) - const heads = state.heads - const opts = importOpts(_opts) - const handle = state.handle.fork(opts.actor, heads) +export function clone( + doc: Doc, + _opts?: ActorId | InitOptions +): Doc { + const state = _state(doc) + const heads = state.heads + const opts = importOpts(_opts) + const handle = state.handle.fork(opts.actor, heads) - // `change` uses the presence of state.heads to determine if we are in a view - // set it to undefined to indicate that this is a full fat document - const {heads: oldHeads, ...stateSansHeads} = state - return handle.applyPatches(doc, { ... stateSansHeads, handle }) + // `change` uses the presence of state.heads to determine if we are in a view + // set it to undefined to indicate that this is a full fat document + const { heads: oldHeads, ...stateSansHeads } = state + return handle.applyPatches(doc, { ...stateSansHeads, handle }) } /** Explicity free the memory backing a document. Note that this is note @@ -205,10 +250,10 @@ export function clone(doc: Doc, _opts?: ActorId | InitOptions): Doc * [`FinalizationRegistry`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/FinalizationRegistry) */ export function free(doc: Doc) { - return _state(doc).handle.free() + return _state(doc).handle.free() } -/** +/** * Create an automerge document from a POJO * * @param initialState - The initial state which will be copied into the document @@ -224,11 +269,14 @@ export function free(doc: Doc) { * }) * ``` */ -export function from>(initialState: T | Doc, _opts?: ActorId | InitOptions): Doc { - return change(init(_opts), (d) => Object.assign(d, initialState)) +export function from>( + initialState: T | Doc, + _opts?: ActorId | InitOptions +): Doc { + return change(init(_opts), d => Object.assign(d, initialState)) } -/** +/** * Update the contents of an automerge document * @typeParam T - The type of the value contained in the document * @param doc - The document to update @@ -255,7 +303,7 @@ export function from>(initialState: T | Doc * ``` * * @example A change with a message and a timestamp - * + * * ``` * doc1 = automerge.change(doc1, {message: "add another value", timestamp: 1640995200}, d => { * d.key2 = "value2" @@ -274,66 +322,82 @@ export function from>(initialState: T | Doc * assert.equal(patchedPath, ["key2"]) * ``` */ -export function change(doc: Doc, options: string | ChangeOptions | ChangeFn, callback?: ChangeFn): Doc { - if (typeof options === 'function') { - return _change(doc, {}, options) - } else if (typeof callback === 'function') { - if (typeof options === "string") { - options = {message: options} - } - return _change(doc, options, callback) - } else { - throw RangeError("Invalid args for change") +export function change( + doc: Doc, + options: string | ChangeOptions | ChangeFn, + callback?: ChangeFn +): Doc { + if (typeof options === "function") { + return _change(doc, {}, options) + } else if (typeof callback === "function") { + if (typeof options === "string") { + options = { message: options } } + return _change(doc, options, callback) + } else { + throw RangeError("Invalid args for change") + } } -function progressDocument(doc: Doc, heads: Heads | null, callback?: PatchCallback): Doc { - if (heads == null) { - return doc - } - const state = _state(doc) - const nextState = {...state, heads: undefined}; - const nextDoc = state.handle.applyPatches(doc, nextState, callback) +function progressDocument( + doc: Doc, + heads: Heads | null, + callback?: PatchCallback +): Doc { + if (heads == null) { + return doc + } + const state = _state(doc) + const nextState = { ...state, heads: undefined } + const nextDoc = state.handle.applyPatches(doc, nextState, callback) + state.heads = heads + return nextDoc +} + +function _change( + doc: Doc, + options: ChangeOptions, + callback: ChangeFn +): Doc { + if (typeof callback !== "function") { + throw new RangeError("invalid change function") + } + + const state = _state(doc) + + if (doc === undefined || state === undefined) { + throw new RangeError("must be the document root") + } + if (state.heads) { + throw new RangeError( + "Attempting to change an outdated document. Use Automerge.clone() if you wish to make a writable copy." + ) + } + if (_is_proxy(doc)) { + throw new RangeError("Calls to Automerge.change cannot be nested") + } + const heads = state.handle.getHeads() + try { state.heads = heads - return nextDoc -} - -function _change(doc: Doc, options: ChangeOptions, callback: ChangeFn): Doc { - - - if (typeof callback !== "function") { - throw new RangeError("invalid change function"); - } - - const state = _state(doc) - - if (doc === undefined || state === undefined) { - throw new RangeError("must be the document root"); - } - if (state.heads) { - throw new RangeError("Attempting to change an outdated document. Use Automerge.clone() if you wish to make a writable copy.") - } - if (_is_proxy(doc)) { - throw new RangeError("Calls to Automerge.change cannot be nested") - } - const heads = state.handle.getHeads() - try { - state.heads = heads - const root: T = rootProxy(state.handle); - callback(root) - if (state.handle.pendingOps() === 0) { - state.heads = undefined - return doc - } else { - state.handle.commit(options.message, options.time) - return progressDocument(doc, heads, options.patchCallback || state.patchCallback); - } - } catch (e) { - //console.log("ERROR: ",e) - state.heads = undefined - state.handle.rollback() - throw e + const root: T = rootProxy(state.handle) + callback(root) + if (state.handle.pendingOps() === 0) { + state.heads = undefined + return doc + } else { + state.handle.commit(options.message, options.time) + return progressDocument( + doc, + heads, + options.patchCallback || state.patchCallback + ) } + } catch (e) { + //console.log("ERROR: ",e) + state.heads = undefined + state.handle.rollback() + throw e + } } /** @@ -347,26 +411,31 @@ function _change(doc: Doc, options: ChangeOptions, callback: ChangeFn(doc: Doc, options: string | ChangeOptions | void) { - if (options === undefined) { - options = {} - } - if (typeof options === "string") { - options = {message: options} - } +export function emptyChange( + doc: Doc, + options: string | ChangeOptions | void +) { + if (options === undefined) { + options = {} + } + if (typeof options === "string") { + options = { message: options } + } - const state = _state(doc) + const state = _state(doc) - if (state.heads) { - throw new RangeError("Attempting to change an outdated document. Use Automerge.clone() if you wish to make a writable copy.") - } - if (_is_proxy(doc)) { - throw new RangeError("Calls to Automerge.change cannot be nested") - } + if (state.heads) { + throw new RangeError( + "Attempting to change an outdated document. Use Automerge.clone() if you wish to make a writable copy." + ) + } + if (_is_proxy(doc)) { + throw new RangeError("Calls to Automerge.change cannot be nested") + } - const heads = state.handle.getHeads() - state.handle.emptyChange(options.message, options.time) - return progressDocument(doc, heads) + const heads = state.handle.getHeads() + state.handle.emptyChange(options.message, options.time) + return progressDocument(doc, heads) } /** @@ -384,16 +453,23 @@ export function emptyChange(doc: Doc, options: string | ChangeOptions | * have the complete document yet). If you need to handle incomplete content use * {@link init} followed by {@link loadIncremental}. */ -export function load(data: Uint8Array, _opts?: ActorId | InitOptions): Doc { - const opts = importOpts(_opts) - const actor = opts.actor - const patchCallback = opts.patchCallback - const handle = ApiHandler.load(data, actor) - handle.enablePatches(true) - handle.enableFreeze(!!opts.freeze) - handle.registerDatatype("counter", (n) => new Counter(n)) - const doc = handle.materialize("/", undefined, {handle, heads: undefined, patchCallback}) as Doc - return doc +export function load( + data: Uint8Array, + _opts?: ActorId | InitOptions +): Doc { + const opts = importOpts(_opts) + const actor = opts.actor + const patchCallback = opts.patchCallback + const handle = ApiHandler.load(data, actor) + handle.enablePatches(true) + handle.enableFreeze(!!opts.freeze) + handle.registerDatatype("counter", n => new Counter(n)) + const doc = handle.materialize("/", undefined, { + handle, + heads: undefined, + patchCallback, + }) as Doc + return doc } /** @@ -413,18 +489,26 @@ export function load(data: Uint8Array, _opts?: ActorId | InitOptions): Doc * Note that this function will succesfully load the results of {@link save} as * well as {@link getLastLocalChange} or any other incremental change. */ -export function loadIncremental(doc: Doc, data: Uint8Array, opts?: ApplyOptions): Doc { - if (!opts) {opts = {}} - const state = _state(doc) - if (state.heads) { - throw new RangeError("Attempting to change an out of date document - set at: " + _trace(doc)); - } - if (_is_proxy(doc)) { - throw new RangeError("Calls to Automerge.change cannot be nested") - } - const heads = state.handle.getHeads() - state.handle.loadIncremental(data) - return progressDocument(doc, heads, opts.patchCallback || state.patchCallback) +export function loadIncremental( + doc: Doc, + data: Uint8Array, + opts?: ApplyOptions +): Doc { + if (!opts) { + opts = {} + } + const state = _state(doc) + if (state.heads) { + throw new RangeError( + "Attempting to change an out of date document - set at: " + _trace(doc) + ) + } + if (_is_proxy(doc)) { + throw new RangeError("Calls to Automerge.change cannot be nested") + } + const heads = state.handle.getHeads() + state.handle.loadIncremental(data) + return progressDocument(doc, heads, opts.patchCallback || state.patchCallback) } /** @@ -435,7 +519,7 @@ export function loadIncremental(doc: Doc, data: Uint8Array, opts?: ApplyOp * The returned bytes can be passed to {@link load} or {@link loadIncremental} */ export function save(doc: Doc): Uint8Array { - return _state(doc).handle.save() + return _state(doc).handle.save() } /** @@ -446,7 +530,7 @@ export function save(doc: Doc): Uint8Array { * * @returns - The merged document * - * Often when you are merging documents you will also need to clone them. Both + * Often when you are merging documents you will also need to clone them. Both * arguments to `merge` are frozen after the call so you can no longer call * mutating methods (such as {@link change}) on them. The symtom of this will be * an error which says "Attempting to change an out of date document". To @@ -454,29 +538,31 @@ export function save(doc: Doc): Uint8Array { * merge}. */ export function merge(local: Doc, remote: Doc): Doc { - const localState = _state(local) + const localState = _state(local) - if (localState.heads) { - throw new RangeError("Attempting to change an out of date document - set at: " + _trace(local)); - } - const heads = localState.handle.getHeads() - const remoteState = _state(remote) - const changes = localState.handle.getChangesAdded(remoteState.handle) - localState.handle.applyChanges(changes) - return progressDocument(local, heads, localState.patchCallback) + if (localState.heads) { + throw new RangeError( + "Attempting to change an out of date document - set at: " + _trace(local) + ) + } + const heads = localState.handle.getHeads() + const remoteState = _state(remote) + const changes = localState.handle.getChangesAdded(remoteState.handle) + localState.handle.applyChanges(changes) + return progressDocument(local, heads, localState.patchCallback) } /** * Get the actor ID associated with the document */ export function getActorId(doc: Doc): ActorId { - const state = _state(doc) - return state.handle.getActorId() + const state = _state(doc) + return state.handle.getActorId() } /** * The type of conflicts for particular key or index - * + * * Maps and sequences in automerge can contain conflicting values for a * particular key or index. In this case {@link getConflicts} can be used to * obtain a `Conflicts` representing the multiple values present for the property @@ -484,47 +570,51 @@ export function getActorId(doc: Doc): ActorId { * A `Conflicts` is a map from a unique (per property or index) key to one of * the possible conflicting values for the given property. */ -type Conflicts = {[key: string]: AutomergeValue} +type Conflicts = { [key: string]: AutomergeValue } -function conflictAt(context: Automerge, objectId: ObjID, prop: Prop): Conflicts | undefined { - const values = context.getAll(objectId, prop) - if (values.length <= 1) { - return +function conflictAt( + context: Automerge, + objectId: ObjID, + prop: Prop +): Conflicts | undefined { + const values = context.getAll(objectId, prop) + if (values.length <= 1) { + return + } + const result: Conflicts = {} + for (const fullVal of values) { + switch (fullVal[0]) { + case "map": + result[fullVal[1]] = mapProxy(context, fullVal[1], [prop], true) + break + case "list": + result[fullVal[1]] = listProxy(context, fullVal[1], [prop], true) + break + case "text": + result[fullVal[1]] = context.text(fullVal[1]) + break + //case "table": + //case "cursor": + case "str": + case "uint": + case "int": + case "f64": + case "boolean": + case "bytes": + case "null": + result[fullVal[2]] = fullVal[1] + break + case "counter": + result[fullVal[2]] = new Counter(fullVal[1]) + break + case "timestamp": + result[fullVal[2]] = new Date(fullVal[1]) + break + default: + throw RangeError(`datatype ${fullVal[0]} unimplemented`) } - const result: Conflicts = {} - for (const fullVal of values) { - switch (fullVal[0]) { - case "map": - result[fullVal[1]] = mapProxy(context, fullVal[1], [prop], true) - break; - case "list": - result[fullVal[1]] = listProxy(context, fullVal[1], [prop], true) - break; - case "text": - result[fullVal[1]] = context.text(fullVal[1]) - break; - //case "table": - //case "cursor": - case "str": - case "uint": - case "int": - case "f64": - case "boolean": - case "bytes": - case "null": - result[fullVal[2]] = fullVal[1] - break; - case "counter": - result[fullVal[2]] = new Counter(fullVal[1]) - break; - case "timestamp": - result[fullVal[2]] = new Date(fullVal[1]) - break; - default: - throw RangeError(`datatype ${fullVal[0]} unimplemented`) - } - } - return result + } + return result } /** @@ -532,36 +622,36 @@ function conflictAt(context: Automerge, objectId: ObjID, prop: Prop): Conflicts * * The values of properties in a map in automerge can be conflicted if there * are concurrent "put" operations to the same key. Automerge chooses one value - * arbitrarily (but deterministically, any two nodes who have the same set of - * changes will choose the same value) from the set of conflicting values to - * present as the value of the key. + * arbitrarily (but deterministically, any two nodes who have the same set of + * changes will choose the same value) from the set of conflicting values to + * present as the value of the key. * * Sometimes you may want to examine these conflicts, in this case you can use - * {@link getConflicts} to get the conflicts for the key. + * {@link getConflicts} to get the conflicts for the key. * * @example * ``` * import * as automerge from "@automerge/automerge" - * + * * type Profile = { * pets: Array<{name: string, type: string}> * } - * + * * let doc1 = automerge.init("aaaa") * doc1 = automerge.change(doc1, d => { * d.pets = [{name: "Lassie", type: "dog"}] * }) * let doc2 = automerge.init("bbbb") * doc2 = automerge.merge(doc2, automerge.clone(doc1)) - * + * * doc2 = automerge.change(doc2, d => { * d.pets[0].name = "Beethoven" * }) - * + * * doc1 = automerge.change(doc1, d => { * d.pets[0].name = "Babe" * }) - * + * * const doc3 = automerge.merge(doc1, doc2) * * // Note that here we pass `doc3.pets`, not `doc3` @@ -571,14 +661,17 @@ function conflictAt(context: Automerge, objectId: ObjID, prop: Prop): Conflicts * assert.deepEqual(Object.values(conflicts), ["Babe", Beethoven"]) * ``` */ -export function getConflicts(doc: Doc, prop: Prop): Conflicts | undefined { - const state = _state(doc, false) - const objectId = _obj(doc) - if (objectId != null) { - return conflictAt(state.handle, objectId, prop) - } else { - return undefined - } +export function getConflicts( + doc: Doc, + prop: Prop +): Conflicts | undefined { + const state = _state(doc, false) + const objectId = _obj(doc) + if (objectId != null) { + return conflictAt(state.handle, objectId, prop) + } else { + return undefined + } } /** @@ -589,8 +682,8 @@ export function getConflicts(doc: Doc, prop: Prop): Conflicts | undefined * getLastLocalChange} and send the result over the network to other peers. */ export function getLastLocalChange(doc: Doc): Change | undefined { - const state = _state(doc) - return state.handle.getLastLocalChange() || undefined + const state = _state(doc) + return state.handle.getLastLocalChange() || undefined } /** @@ -600,16 +693,16 @@ export function getLastLocalChange(doc: Doc): Change | undefined { * if `doc` is not an automerge document this will return null. */ export function getObjectId(doc: Doc, prop?: Prop): ObjID | null { - if (prop) { - const state = _state(doc, false) - const objectId = _obj(doc) - if (!state || !objectId) { - return null - } - return state.handle.get(objectId, prop) as ObjID - } else { - return _obj(doc) + if (prop) { + const state = _state(doc, false) + const objectId = _obj(doc) + if (!state || !objectId) { + return null } + return state.handle.get(objectId, prop) as ObjID + } else { + return _obj(doc) + } } /** @@ -619,11 +712,11 @@ export function getObjectId(doc: Doc, prop?: Prop): ObjID | null { * Note that this will crash if there are changes in `oldState` which are not in `newState`. */ export function getChanges(oldState: Doc, newState: Doc): Change[] { - const n = _state(newState) - return n.handle.getChanges(getHeads(oldState)) + const n = _state(newState) + return n.handle.getChanges(getHeads(oldState)) } -/** +/** * Get all the changes in a document * * This is different to {@link save} because the output is an array of changes @@ -631,8 +724,8 @@ export function getChanges(oldState: Doc, newState: Doc): Change[] { * */ export function getAllChanges(doc: Doc): Change[] { - const state = _state(doc) - return state.handle.getChanges([]) + const state = _state(doc) + return state.handle.getChanges([]) } /** @@ -646,48 +739,58 @@ export function getAllChanges(doc: Doc): Change[] { * informed of any changes which occur as a result of applying the changes * */ -export function applyChanges(doc: Doc, changes: Change[], opts?: ApplyOptions): [Doc] { - const state = _state(doc) - if (!opts) {opts = {}} - if (state.heads) { - throw new RangeError("Attempting to change an outdated document. Use Automerge.clone() if you wish to make a writable copy.") - } - if (_is_proxy(doc)) { - throw new RangeError("Calls to Automerge.change cannot be nested") - } - const heads = state.handle.getHeads(); - state.handle.applyChanges(changes) - state.heads = heads; - return [progressDocument(doc, heads, opts.patchCallback || state.patchCallback)] +export function applyChanges( + doc: Doc, + changes: Change[], + opts?: ApplyOptions +): [Doc] { + const state = _state(doc) + if (!opts) { + opts = {} + } + if (state.heads) { + throw new RangeError( + "Attempting to change an outdated document. Use Automerge.clone() if you wish to make a writable copy." + ) + } + if (_is_proxy(doc)) { + throw new RangeError("Calls to Automerge.change cannot be nested") + } + const heads = state.handle.getHeads() + state.handle.applyChanges(changes) + state.heads = heads + return [ + progressDocument(doc, heads, opts.patchCallback || state.patchCallback), + ] } /** @hidden */ export function getHistory(doc: Doc): State[] { - const history = getAllChanges(doc) - return history.map((change, index) => ({ - get change() { - return decodeChange(change) - }, - get snapshot() { - const [state] = applyChanges(init(), history.slice(0, index + 1)) - return state - } - }) - ) + const history = getAllChanges(doc) + return history.map((change, index) => ({ + get change() { + return decodeChange(change) + }, + get snapshot() { + const [state] = applyChanges(init(), history.slice(0, index + 1)) + return state + }, + })) } /** @hidden */ // FIXME : no tests // FIXME can we just use deep equals now? export function equals(val1: unknown, val2: unknown): boolean { - if (!isObject(val1) || !isObject(val2)) return val1 === val2 - const keys1 = Object.keys(val1).sort(), keys2 = Object.keys(val2).sort() - if (keys1.length !== keys2.length) return false - for (let i = 0; i < keys1.length; i++) { - if (keys1[i] !== keys2[i]) return false - if (!equals(val1[keys1[i]], val2[keys2[i]])) return false - } - return true + if (!isObject(val1) || !isObject(val2)) return val1 === val2 + const keys1 = Object.keys(val1).sort(), + keys2 = Object.keys(val2).sort() + if (keys1.length !== keys2.length) return false + for (let i = 0; i < keys1.length; i++) { + if (keys1[i] !== keys2[i]) return false + if (!equals(val1[keys1[i]], val2[keys2[i]])) return false + } + return true } /** @@ -696,10 +799,10 @@ export function equals(val1: unknown, val2: unknown): boolean { * @group sync * */ export function encodeSyncState(state: SyncState): Uint8Array { - const sync = ApiHandler.importSyncState(state) - const result = ApiHandler.encodeSyncState(sync) - sync.free() - return result + const sync = ApiHandler.importSyncState(state) + const result = ApiHandler.encodeSyncState(sync) + sync.free() + return result } /** @@ -708,10 +811,10 @@ export function encodeSyncState(state: SyncState): Uint8Array { * @group sync */ export function decodeSyncState(state: Uint8Array): SyncState { - const sync = ApiHandler.decodeSyncState(state) - const result = ApiHandler.exportSyncState(sync) - sync.free() - return result + const sync = ApiHandler.decodeSyncState(state) + const result = ApiHandler.exportSyncState(sync) + sync.free() + return result } /** @@ -725,12 +828,15 @@ export function decodeSyncState(state: Uint8Array): SyncState { * `newSyncState` should replace `inState` and `syncMessage` should be sent to * the peer if it is not null. If `syncMessage` is null then we are up to date. */ -export function generateSyncMessage(doc: Doc, inState: SyncState): [SyncState, SyncMessage | null] { - const state = _state(doc) - const syncState = ApiHandler.importSyncState(inState) - const message = state.handle.generateSyncMessage(syncState) - const outState = ApiHandler.exportSyncState(syncState) - return [outState, message] +export function generateSyncMessage( + doc: Doc, + inState: SyncState +): [SyncState, SyncMessage | null] { + const state = _state(doc) + const syncState = ApiHandler.importSyncState(inState) + const message = state.handle.generateSyncMessage(syncState) + const outState = ApiHandler.exportSyncState(syncState) + return [outState, message] } /** @@ -741,7 +847,7 @@ export function generateSyncMessage(doc: Doc, inState: SyncState): [SyncSt * @param doc - The doc the sync message is about * @param inState - The {@link SyncState} for the peer we are communicating with * @param message - The message which was received - * @param opts - Any {@link ApplyOption}s, used for passing a + * @param opts - Any {@link ApplyOption}s, used for passing a * {@link PatchCallback} which will be informed of any changes * in `doc` which occur because of the received sync message. * @@ -750,20 +856,33 @@ export function generateSyncMessage(doc: Doc, inState: SyncState): [SyncSt * `inState` and `syncMessage` should be sent to the peer if it is not null. If * `syncMessage` is null then we are up to date. */ -export function receiveSyncMessage(doc: Doc, inState: SyncState, message: SyncMessage, opts?: ApplyOptions): [Doc, SyncState, null] { - const syncState = ApiHandler.importSyncState(inState) - if (!opts) {opts = {}} - const state = _state(doc) - if (state.heads) { - throw new RangeError("Attempting to change an outdated document. Use Automerge.clone() if you wish to make a writable copy.") - } - if (_is_proxy(doc)) { - throw new RangeError("Calls to Automerge.change cannot be nested") - } - const heads = state.handle.getHeads() - state.handle.receiveSyncMessage(syncState, message) - const outSyncState = ApiHandler.exportSyncState(syncState) - return [progressDocument(doc, heads, opts.patchCallback || state.patchCallback), outSyncState, null]; +export function receiveSyncMessage( + doc: Doc, + inState: SyncState, + message: SyncMessage, + opts?: ApplyOptions +): [Doc, SyncState, null] { + const syncState = ApiHandler.importSyncState(inState) + if (!opts) { + opts = {} + } + const state = _state(doc) + if (state.heads) { + throw new RangeError( + "Attempting to change an outdated document. Use Automerge.clone() if you wish to make a writable copy." + ) + } + if (_is_proxy(doc)) { + throw new RangeError("Calls to Automerge.change cannot be nested") + } + const heads = state.handle.getHeads() + state.handle.receiveSyncMessage(syncState, message) + const outSyncState = ApiHandler.exportSyncState(syncState) + return [ + progressDocument(doc, heads, opts.patchCallback || state.patchCallback), + outSyncState, + null, + ] } /** @@ -775,75 +894,81 @@ export function receiveSyncMessage(doc: Doc, inState: SyncState, message: * @group sync */ export function initSyncState(): SyncState { - return ApiHandler.exportSyncState(ApiHandler.initSyncState()) + return ApiHandler.exportSyncState(ApiHandler.initSyncState()) } /** @hidden */ export function encodeChange(change: ChangeToEncode): Change { - return ApiHandler.encodeChange(change) + return ApiHandler.encodeChange(change) } /** @hidden */ export function decodeChange(data: Change): DecodedChange { - return ApiHandler.decodeChange(data) + return ApiHandler.decodeChange(data) } /** @hidden */ export function encodeSyncMessage(message: DecodedSyncMessage): SyncMessage { - return ApiHandler.encodeSyncMessage(message) + return ApiHandler.encodeSyncMessage(message) } /** @hidden */ export function decodeSyncMessage(message: SyncMessage): DecodedSyncMessage { - return ApiHandler.decodeSyncMessage(message) + return ApiHandler.decodeSyncMessage(message) } /** * Get any changes in `doc` which are not dependencies of `heads` */ export function getMissingDeps(doc: Doc, heads: Heads): Heads { - const state = _state(doc) - return state.handle.getMissingDeps(heads) + const state = _state(doc) + return state.handle.getMissingDeps(heads) } -export function splice(doc: Doc, prop: Prop, index: number, del: number, newText?: string) { - if (!_is_proxy(doc)) { - throw new RangeError("object cannot be modified outside of a change block") - } - const state = _state(doc, false) - const objectId = _obj(doc) - if (!objectId) { - throw new RangeError("invalid object for splice") - } - const value = `${objectId}/${prop}` - try { - return state.handle.splice(value, index, del, newText) - } catch (e) { - throw new RangeError(`Cannot splice: ${e}`) - } +export function splice( + doc: Doc, + prop: Prop, + index: number, + del: number, + newText?: string +) { + if (!_is_proxy(doc)) { + throw new RangeError("object cannot be modified outside of a change block") + } + const state = _state(doc, false) + const objectId = _obj(doc) + if (!objectId) { + throw new RangeError("invalid object for splice") + } + const value = `${objectId}/${prop}` + try { + return state.handle.splice(value, index, del, newText) + } catch (e) { + throw new RangeError(`Cannot splice: ${e}`) + } } /** * Get the hashes of the heads of this document */ export function getHeads(doc: Doc): Heads { - const state = _state(doc) - return state.heads || state.handle.getHeads() + const state = _state(doc) + return state.heads || state.handle.getHeads() } /** @hidden */ export function dump(doc: Doc) { - const state = _state(doc) - state.handle.dump() + const state = _state(doc) + state.handle.dump() } /** @hidden */ export function toJS(doc: Doc): T { - const state = _state(doc) - const enabled = state.handle.enableFreeze(false) - const result = state.handle.materialize() - state.handle.enableFreeze(enabled) - return result as T + const state = _state(doc) + const enabled = state.handle.enableFreeze(false) + const result = state.handle.materialize() + state.handle.enableFreeze(enabled) + return result as T } export function isAutomerge(doc: unknown): boolean { @@ -855,7 +980,19 @@ export function isAutomerge(doc: unknown): boolean { } function isObject(obj: unknown): obj is Record { - return typeof obj === 'object' && obj !== null + return typeof obj === "object" && obj !== null } -export type {API, SyncState, ActorId, Conflicts, Prop, Change, ObjID, DecodedChange, DecodedSyncMessage, Heads, MaterializeValue} +export type { + API, + SyncState, + ActorId, + Conflicts, + Prop, + Change, + ObjID, + DecodedChange, + DecodedSyncMessage, + Heads, + MaterializeValue, +} diff --git a/javascript/src/low_level.ts b/javascript/src/low_level.ts index 6eabfa52..51017cb3 100644 --- a/javascript/src/low_level.ts +++ b/javascript/src/low_level.ts @@ -1,5 +1,14 @@ - -import { Automerge, Change, DecodedChange, Actor, SyncState, SyncMessage, JsSyncState, DecodedSyncMessage, ChangeToEncode } from "@automerge/automerge-wasm" +import { + Automerge, + Change, + DecodedChange, + Actor, + SyncState, + SyncMessage, + JsSyncState, + DecodedSyncMessage, + ChangeToEncode, +} from "@automerge/automerge-wasm" export { ChangeToEncode } from "@automerge/automerge-wasm" import { API } from "@automerge/automerge-wasm" @@ -10,17 +19,39 @@ export function UseApi(api: API) { } /* eslint-disable */ -export const ApiHandler : API = { - create(actor?: Actor): Automerge { throw new RangeError("Automerge.use() not called") }, - load(data: Uint8Array, actor?: Actor): Automerge { throw new RangeError("Automerge.use() not called (load)") }, - encodeChange(change: ChangeToEncode): Change { throw new RangeError("Automerge.use() not called (encodeChange)") }, - decodeChange(change: Change): DecodedChange { throw new RangeError("Automerge.use() not called (decodeChange)") }, - initSyncState(): SyncState { throw new RangeError("Automerge.use() not called (initSyncState)") }, - encodeSyncMessage(message: DecodedSyncMessage): SyncMessage { throw new RangeError("Automerge.use() not called (encodeSyncMessage)") }, - decodeSyncMessage(msg: SyncMessage): DecodedSyncMessage { throw new RangeError("Automerge.use() not called (decodeSyncMessage)") }, - encodeSyncState(state: SyncState): Uint8Array { throw new RangeError("Automerge.use() not called (encodeSyncState)") }, - decodeSyncState(data: Uint8Array): SyncState { throw new RangeError("Automerge.use() not called (decodeSyncState)") }, - exportSyncState(state: SyncState): JsSyncState { throw new RangeError("Automerge.use() not called (exportSyncState)") }, - importSyncState(state: JsSyncState): SyncState { throw new RangeError("Automerge.use() not called (importSyncState)") }, +export const ApiHandler: API = { + create(actor?: Actor): Automerge { + throw new RangeError("Automerge.use() not called") + }, + load(data: Uint8Array, actor?: Actor): Automerge { + throw new RangeError("Automerge.use() not called (load)") + }, + encodeChange(change: ChangeToEncode): Change { + throw new RangeError("Automerge.use() not called (encodeChange)") + }, + decodeChange(change: Change): DecodedChange { + throw new RangeError("Automerge.use() not called (decodeChange)") + }, + initSyncState(): SyncState { + throw new RangeError("Automerge.use() not called (initSyncState)") + }, + encodeSyncMessage(message: DecodedSyncMessage): SyncMessage { + throw new RangeError("Automerge.use() not called (encodeSyncMessage)") + }, + decodeSyncMessage(msg: SyncMessage): DecodedSyncMessage { + throw new RangeError("Automerge.use() not called (decodeSyncMessage)") + }, + encodeSyncState(state: SyncState): Uint8Array { + throw new RangeError("Automerge.use() not called (encodeSyncState)") + }, + decodeSyncState(data: Uint8Array): SyncState { + throw new RangeError("Automerge.use() not called (decodeSyncState)") + }, + exportSyncState(state: SyncState): JsSyncState { + throw new RangeError("Automerge.use() not called (exportSyncState)") + }, + importSyncState(state: JsSyncState): SyncState { + throw new RangeError("Automerge.use() not called (importSyncState)") + }, } /* eslint-enable */ diff --git a/javascript/src/numbers.ts b/javascript/src/numbers.ts index 9d63bcc5..d52a36c5 100644 --- a/javascript/src/numbers.ts +++ b/javascript/src/numbers.ts @@ -3,10 +3,16 @@ import { INT, UINT, F64 } from "./constants" export class Int { - value: number; + value: number constructor(value: number) { - if (!(Number.isInteger(value) && value <= Number.MAX_SAFE_INTEGER && value >= Number.MIN_SAFE_INTEGER)) { + if ( + !( + Number.isInteger(value) && + value <= Number.MAX_SAFE_INTEGER && + value >= Number.MIN_SAFE_INTEGER + ) + ) { throw new RangeError(`Value ${value} cannot be a uint`) } this.value = value @@ -16,10 +22,16 @@ export class Int { } export class Uint { - value: number; + value: number constructor(value: number) { - if (!(Number.isInteger(value) && value <= Number.MAX_SAFE_INTEGER && value >= 0)) { + if ( + !( + Number.isInteger(value) && + value <= Number.MAX_SAFE_INTEGER && + value >= 0 + ) + ) { throw new RangeError(`Value ${value} cannot be a uint`) } this.value = value @@ -29,10 +41,10 @@ export class Uint { } export class Float64 { - value: number; + value: number constructor(value: number) { - if (typeof value !== 'number') { + if (typeof value !== "number") { throw new RangeError(`Value ${value} cannot be a float64`) } this.value = value || 0.0 @@ -40,4 +52,3 @@ export class Float64 { Object.freeze(this) } } - diff --git a/javascript/src/proxies.ts b/javascript/src/proxies.ts index ff03be4d..523c4547 100644 --- a/javascript/src/proxies.ts +++ b/javascript/src/proxies.ts @@ -1,123 +1,149 @@ - import { Automerge, Heads, ObjID } from "@automerge/automerge-wasm" import { Prop } from "@automerge/automerge-wasm" import { AutomergeValue, ScalarValue, MapValue, ListValue } from "./types" import { Counter, getWriteableCounter } from "./counter" -import { STATE, TRACE, IS_PROXY, OBJECT_ID, COUNTER, INT, UINT, F64 } from "./constants" +import { + STATE, + TRACE, + IS_PROXY, + OBJECT_ID, + COUNTER, + INT, + UINT, + F64, +} from "./constants" function parseListIndex(key) { - if (typeof key === 'string' && /^[0-9]+$/.test(key)) key = parseInt(key, 10) - if (typeof key !== 'number') { + if (typeof key === "string" && /^[0-9]+$/.test(key)) key = parseInt(key, 10) + if (typeof key !== "number") { return key } if (key < 0 || isNaN(key) || key === Infinity || key === -Infinity) { - throw new RangeError('A list index must be positive, but you passed ' + key) + throw new RangeError("A list index must be positive, but you passed " + key) } return key } -function valueAt(target, prop: Prop) : AutomergeValue | undefined { - const { context, objectId, path, readonly, heads} = target - const value = context.getWithType(objectId, prop, heads) - if (value === null) { - return - } - const datatype = value[0] - const val = value[1] - switch (datatype) { - case undefined: return; - case "map": return mapProxy(context, val, [ ... path, prop ], readonly, heads); - case "list": return listProxy(context, val, [ ... path, prop ], readonly, heads); - case "text": return context.text(val, heads); - case "str": return val; - case "uint": return val; - case "int": return val; - case "f64": return val; - case "boolean": return val; - case "null": return null; - case "bytes": return val; - case "timestamp": return val; - case "counter": { - if (readonly) { - return new Counter(val); - } else { - return getWriteableCounter(val, context, path, objectId, prop) - } - } - default: - throw RangeError(`datatype ${datatype} unimplemented`) +function valueAt(target, prop: Prop): AutomergeValue | undefined { + const { context, objectId, path, readonly, heads } = target + const value = context.getWithType(objectId, prop, heads) + if (value === null) { + return + } + const datatype = value[0] + const val = value[1] + switch (datatype) { + case undefined: + return + case "map": + return mapProxy(context, val, [...path, prop], readonly, heads) + case "list": + return listProxy(context, val, [...path, prop], readonly, heads) + case "text": + return context.text(val, heads) + case "str": + return val + case "uint": + return val + case "int": + return val + case "f64": + return val + case "boolean": + return val + case "null": + return null + case "bytes": + return val + case "timestamp": + return val + case "counter": { + if (readonly) { + return new Counter(val) + } else { + return getWriteableCounter(val, context, path, objectId, prop) } + } + default: + throw RangeError(`datatype ${datatype} unimplemented`) + } } function import_value(value) { - switch (typeof value) { - case 'object': - if (value == null) { - return [ null, "null"] - } else if (value[UINT]) { - return [ value.value, "uint" ] - } else if (value[INT]) { - return [ value.value, "int" ] - } else if (value[F64]) { - return [ value.value, "f64" ] - } else if (value[COUNTER]) { - return [ value.value, "counter" ] - } else if (value instanceof Date) { - return [ value.getTime(), "timestamp" ] - } else if (value instanceof Uint8Array) { - return [ value, "bytes" ] - } else if (value instanceof Array) { - return [ value, "list" ] - } else if (Object.getPrototypeOf(value) === Object.getPrototypeOf({})) { - return [ value, "map" ] - } else if (value[OBJECT_ID]) { - throw new RangeError('Cannot create a reference to an existing document object') - } else { - throw new RangeError(`Cannot assign unknown object: ${value}`) - } - break; - case 'boolean': - return [ value, "boolean" ] - case 'number': - if (Number.isInteger(value)) { - return [ value, "int" ] - } else { - return [ value, "f64" ] - } - break; - case 'string': - return [ value, "text" ] - break; - default: - throw new RangeError(`Unsupported type of value: ${typeof value}`) - } + switch (typeof value) { + case "object": + if (value == null) { + return [null, "null"] + } else if (value[UINT]) { + return [value.value, "uint"] + } else if (value[INT]) { + return [value.value, "int"] + } else if (value[F64]) { + return [value.value, "f64"] + } else if (value[COUNTER]) { + return [value.value, "counter"] + } else if (value instanceof Date) { + return [value.getTime(), "timestamp"] + } else if (value instanceof Uint8Array) { + return [value, "bytes"] + } else if (value instanceof Array) { + return [value, "list"] + } else if (Object.getPrototypeOf(value) === Object.getPrototypeOf({})) { + return [value, "map"] + } else if (value[OBJECT_ID]) { + throw new RangeError( + "Cannot create a reference to an existing document object" + ) + } else { + throw new RangeError(`Cannot assign unknown object: ${value}`) + } + break + case "boolean": + return [value, "boolean"] + case "number": + if (Number.isInteger(value)) { + return [value, "int"] + } else { + return [value, "f64"] + } + break + case "string": + return [value, "text"] + break + default: + throw new RangeError(`Unsupported type of value: ${typeof value}`) + } } const MapHandler = { - get (target, key) : AutomergeValue { + get(target, key): AutomergeValue { const { context, objectId, cache } = target - if (key === Symbol.toStringTag) { return target[Symbol.toStringTag] } + if (key === Symbol.toStringTag) { + return target[Symbol.toStringTag] + } if (key === OBJECT_ID) return objectId if (key === IS_PROXY) return true if (key === TRACE) return target.trace - if (key === STATE) return { handle: context }; + if (key === STATE) return { handle: context } if (!cache[key]) { cache[key] = valueAt(target, key) } return cache[key] }, - set (target, key, val) { - const { context, objectId, path, readonly, frozen} = target + set(target, key, val) { + const { context, objectId, path, readonly, frozen } = target target.cache = {} // reset cache on set if (val && val[OBJECT_ID]) { - throw new RangeError('Cannot create a reference to an existing document object') + throw new RangeError( + "Cannot create a reference to an existing document object" + ) } if (key === TRACE) { target.trace = val return true } - const [ value, datatype ] = import_value(val) + const [value, datatype] = import_value(val) if (frozen) { throw new RangeError("Attempting to use an outdated Automerge document") } @@ -127,7 +153,7 @@ const MapHandler = { switch (datatype) { case "list": { const list = context.putObject(objectId, key, []) - const proxyList = listProxy(context, list, [ ... path, key ], readonly ); + const proxyList = listProxy(context, list, [...path, key], readonly) for (let i = 0; i < value.length; i++) { proxyList[i] = value[i] } @@ -139,11 +165,11 @@ const MapHandler = { } case "map": { const map = context.putObject(objectId, key, {}) - const proxyMap = mapProxy(context, map, [ ... path, key ], readonly ); + const proxyMap = mapProxy(context, map, [...path, key], readonly) for (const key in value) { proxyMap[key] = value[key] } - break; + break } default: context.put(objectId, key, value, datatype) @@ -151,7 +177,7 @@ const MapHandler = { return true }, - deleteProperty (target, key) { + deleteProperty(target, key) { const { context, objectId, readonly } = target target.cache = {} // reset cache on delete if (readonly) { @@ -161,62 +187,71 @@ const MapHandler = { return true }, - has (target, key) { + has(target, key) { const value = this.get(target, key) return value !== undefined }, - getOwnPropertyDescriptor (target, key) { + getOwnPropertyDescriptor(target, key) { // const { context, objectId } = target const value = this.get(target, key) - if (typeof value !== 'undefined') { + if (typeof value !== "undefined") { return { - configurable: true, enumerable: true, value + configurable: true, + enumerable: true, + value, } } }, - ownKeys (target) { - const { context, objectId, heads} = target + ownKeys(target) { + const { context, objectId, heads } = target // FIXME - this is a tmp workaround until fix the dupe key bug in keys() const keys = context.keys(objectId, heads) return [...new Set(keys)] }, } - const ListHandler = { - get (target, index) { - const {context, objectId, heads } = target + get(target, index) { + const { context, objectId, heads } = target index = parseListIndex(index) - if (index === Symbol.hasInstance) { return (instance) => { return Array.isArray(instance) } } - if (index === Symbol.toStringTag) { return target[Symbol.toStringTag] } + if (index === Symbol.hasInstance) { + return instance => { + return Array.isArray(instance) + } + } + if (index === Symbol.toStringTag) { + return target[Symbol.toStringTag] + } if (index === OBJECT_ID) return objectId if (index === IS_PROXY) return true if (index === TRACE) return target.trace - if (index === STATE) return { handle: context }; - if (index === 'length') return context.length(objectId, heads); - if (typeof index === 'number') { + if (index === STATE) return { handle: context } + if (index === "length") return context.length(objectId, heads) + if (typeof index === "number") { return valueAt(target, index) } else { return listMethods(target)[index] } }, - set (target, index, val) { - const {context, objectId, path, readonly, frozen } = target + set(target, index, val) { + const { context, objectId, path, readonly, frozen } = target index = parseListIndex(index) if (val && val[OBJECT_ID]) { - throw new RangeError('Cannot create a reference to an existing document object') + throw new RangeError( + "Cannot create a reference to an existing document object" + ) } if (index === TRACE) { target.trace = val return true } if (typeof index == "string") { - throw new RangeError('list index must be a number') + throw new RangeError("list index must be a number") } - const [ value, datatype] = import_value(val) + const [value, datatype] = import_value(val) if (frozen) { throw new RangeError("Attempting to use an outdated Automerge document") } @@ -231,9 +266,9 @@ const ListHandler = { } else { list = context.putObject(objectId, index, []) } - const proxyList = listProxy(context, list, [ ... path, index ], readonly); - proxyList.splice(0,0,...value) - break; + const proxyList = listProxy(context, list, [...path, index], readonly) + proxyList.splice(0, 0, ...value) + break } case "text": { if (index >= context.length(objectId)) { @@ -241,7 +276,7 @@ const ListHandler = { } else { context.putObject(objectId, index, value, "text") } - break; + break } case "map": { let map @@ -250,11 +285,11 @@ const ListHandler = { } else { map = context.putObject(objectId, index, {}) } - const proxyMap = mapProxy(context, map, [ ... path, index ], readonly); + const proxyMap = mapProxy(context, map, [...path, index], readonly) for (const key in value) { proxyMap[key] = value[key] } - break; + break } default: if (index >= context.length(objectId)) { @@ -266,30 +301,34 @@ const ListHandler = { return true }, - deleteProperty (target, index) { - const {context, objectId} = target + deleteProperty(target, index) { + const { context, objectId } = target index = parseListIndex(index) if (context.get(objectId, index)[0] == "counter") { - throw new TypeError('Unsupported operation: deleting a counter from a list') + throw new TypeError( + "Unsupported operation: deleting a counter from a list" + ) } context.delete(objectId, index) return true }, - has (target, index) { - const {context, objectId, heads} = target + has(target, index) { + const { context, objectId, heads } = target index = parseListIndex(index) - if (typeof index === 'number') { + if (typeof index === "number") { return index < context.length(objectId, heads) } - return index === 'length' + return index === "length" }, - getOwnPropertyDescriptor (target, index) { - const {context, objectId, heads} = target + getOwnPropertyDescriptor(target, index) { + const { context, objectId, heads } = target - if (index === 'length') return {writable: true, value: context.length(objectId, heads) } - if (index === OBJECT_ID) return {configurable: false, enumerable: false, value: objectId} + if (index === "length") + return { writable: true, value: context.length(objectId, heads) } + if (index === OBJECT_ID) + return { configurable: false, enumerable: false, value: objectId } index = parseListIndex(index) @@ -297,38 +336,71 @@ const ListHandler = { return { configurable: true, enumerable: true, value } }, - getPrototypeOf(target) { return Object.getPrototypeOf(target) }, - ownKeys (/*target*/) : string[] { - const keys : string[] = [] + getPrototypeOf(target) { + return Object.getPrototypeOf(target) + }, + ownKeys(/*target*/): string[] { + const keys: string[] = [] // uncommenting this causes assert.deepEqual() to fail when comparing to a pojo array // but not uncommenting it causes for (i in list) {} to not enumerate values properly //const {context, objectId, heads } = target //for (let i = 0; i < target.context.length(objectId, heads); i++) { keys.push(i.toString()) } - keys.push("length"); + keys.push("length") return keys - } + }, } -export function mapProxy(context: Automerge, objectId: ObjID, path?: Prop[], readonly?: boolean, heads?: Heads) : MapValue { - return new Proxy({context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}}, MapHandler) +export function mapProxy( + context: Automerge, + objectId: ObjID, + path?: Prop[], + readonly?: boolean, + heads?: Heads +): MapValue { + return new Proxy( + { + context, + objectId, + path, + readonly: !!readonly, + frozen: false, + heads, + cache: {}, + }, + MapHandler + ) } -export function listProxy(context: Automerge, objectId: ObjID, path?: Prop[], readonly?: boolean, heads?: Heads) : ListValue { +export function listProxy( + context: Automerge, + objectId: ObjID, + path?: Prop[], + readonly?: boolean, + heads?: Heads +): ListValue { const target = [] - Object.assign(target, {context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}}) + Object.assign(target, { + context, + objectId, + path, + readonly: !!readonly, + frozen: false, + heads, + cache: {}, + }) return new Proxy(target, ListHandler) } -export function rootProxy(context: Automerge, readonly?: boolean) : T { +export function rootProxy(context: Automerge, readonly?: boolean): T { /* eslint-disable-next-line */ return mapProxy(context, "_root", [], !!readonly) } function listMethods(target) { - const {context, objectId, path, readonly, frozen, heads} = target + const { context, objectId, path, readonly, frozen, heads } = target const methods = { deleteAt(index, numDelete) { - if (typeof numDelete === 'number') { + if (typeof numDelete === "number") { context.splice(objectId, index, numDelete) } else { context.delete(objectId, index) @@ -355,7 +427,7 @@ function listMethods(target) { const length = context.length(objectId) for (let i = start; i < length; i++) { const value = context.getWithType(objectId, i, heads) - if (value && value[1] === o[OBJECT_ID] || value[1] === o) { + if ((value && value[1] === o[OBJECT_ID]) || value[1] === o) { return i } } @@ -395,16 +467,20 @@ function listMethods(target) { del = parseListIndex(del) for (const val of vals) { if (val && val[OBJECT_ID]) { - throw new RangeError('Cannot create a reference to an existing document object') + throw new RangeError( + "Cannot create a reference to an existing document object" + ) } } if (frozen) { throw new RangeError("Attempting to use an outdated Automerge document") } if (readonly) { - throw new RangeError("Sequence object cannot be modified outside of a change block") + throw new RangeError( + "Sequence object cannot be modified outside of a change block" + ) } - const result : AutomergeValue[] = [] + const result: AutomergeValue[] = [] for (let i = 0; i < del; i++) { const value = valueAt(target, index) if (value !== undefined) { @@ -412,26 +488,31 @@ function listMethods(target) { } context.delete(objectId, index) } - const values = vals.map((val) => import_value(val)) - for (const [value,datatype] of values) { + const values = vals.map(val => import_value(val)) + for (const [value, datatype] of values) { switch (datatype) { case "list": { const list = context.insertObject(objectId, index, []) - const proxyList = listProxy(context, list, [ ... path, index ], readonly); - proxyList.splice(0,0,...value) - break; + const proxyList = listProxy( + context, + list, + [...path, index], + readonly + ) + proxyList.splice(0, 0, ...value) + break } case "text": { context.insertObject(objectId, index, value) - break; + break } case "map": { const map = context.insertObject(objectId, index, {}) - const proxyMap = mapProxy(context, map, [ ... path, index ], readonly); + const proxyMap = mapProxy(context, map, [...path, index], readonly) for (const key in value) { proxyMap[key] = value[key] } - break; + break } default: context.insert(objectId, index, value, datatype) @@ -447,35 +528,38 @@ function listMethods(target) { }, entries() { - const i = 0; + const i = 0 const iterator = { next: () => { const value = valueAt(target, i) if (value === undefined) { return { value: undefined, done: true } } else { - return { value: [ i, value ], done: false } + return { value: [i, value], done: false } } - } + }, } return iterator }, keys() { - let i = 0; + let i = 0 const len = context.length(objectId, heads) const iterator = { next: () => { - let value : undefined | number = undefined - if (i < len) { value = i; i++ } + let value: undefined | number = undefined + if (i < len) { + value = i + i++ + } return { value, done: true } - } + }, } return iterator }, values() { - const i = 0; + const i = 0 const iterator = { next: () => { const value = valueAt(target, i) @@ -484,13 +568,13 @@ function listMethods(target) { } else { return { value, done: false } } - } + }, } return iterator }, - toArray() : AutomergeValue[] { - const list : AutomergeValue = [] + toArray(): AutomergeValue[] { + const list: AutomergeValue = [] let value do { value = valueAt(target, list.length) @@ -502,36 +586,36 @@ function listMethods(target) { return list }, - map(f: (AutomergeValue, number) => T) : T[] { + map(f: (AutomergeValue, number) => T): T[] { return this.toArray().map(f) }, - toString() : string { + toString(): string { return this.toArray().toString() }, - toLocaleString() : string { + toLocaleString(): string { return this.toArray().toLocaleString() }, - forEach(f: (AutomergeValue, number) => undefined ) { + forEach(f: (AutomergeValue, number) => undefined) { return this.toArray().forEach(f) }, // todo: real concat function is different - concat(other: AutomergeValue[]) : AutomergeValue[] { + concat(other: AutomergeValue[]): AutomergeValue[] { return this.toArray().concat(other) }, - every(f: (AutomergeValue, number) => boolean) : boolean { + every(f: (AutomergeValue, number) => boolean): boolean { return this.toArray().every(f) }, - filter(f: (AutomergeValue, number) => boolean) : AutomergeValue[] { + filter(f: (AutomergeValue, number) => boolean): AutomergeValue[] { return this.toArray().filter(f) }, - find(f: (AutomergeValue, number) => boolean) : AutomergeValue | undefined { + find(f: (AutomergeValue, number) => boolean): AutomergeValue | undefined { let index = 0 for (const v of this) { if (f(v, index)) { @@ -541,7 +625,7 @@ function listMethods(target) { } }, - findIndex(f: (AutomergeValue, number) => boolean) : number { + findIndex(f: (AutomergeValue, number) => boolean): number { let index = 0 for (const v of this) { if (f(v, index)) { @@ -552,37 +636,40 @@ function listMethods(target) { return -1 }, - includes(elem: AutomergeValue) : boolean { - return this.find((e) => e === elem) !== undefined + includes(elem: AutomergeValue): boolean { + return this.find(e => e === elem) !== undefined }, - join(sep?: string) : string { + join(sep?: string): string { return this.toArray().join(sep) }, // todo: remove the any - reduce(f: (any, AutomergeValue) => T, initalValue?: T) : T | undefined { - return this.toArray().reduce(f,initalValue) + reduce(f: (any, AutomergeValue) => T, initalValue?: T): T | undefined { + return this.toArray().reduce(f, initalValue) }, // todo: remove the any - reduceRight(f: (any, AutomergeValue) => T, initalValue?: T) : T | undefined{ - return this.toArray().reduceRight(f,initalValue) + reduceRight( + f: (any, AutomergeValue) => T, + initalValue?: T + ): T | undefined { + return this.toArray().reduceRight(f, initalValue) }, - lastIndexOf(search: AutomergeValue, fromIndex = +Infinity) : number { + lastIndexOf(search: AutomergeValue, fromIndex = +Infinity): number { // this can be faster - return this.toArray().lastIndexOf(search,fromIndex) + return this.toArray().lastIndexOf(search, fromIndex) }, - slice(index?: number, num?: number) : AutomergeValue[] { - return this.toArray().slice(index,num) + slice(index?: number, num?: number): AutomergeValue[] { + return this.toArray().slice(index, num) }, - some(f: (AutomergeValue, number) => boolean) : boolean { - let index = 0; + some(f: (AutomergeValue, number) => boolean): boolean { + let index = 0 for (const v of this) { - if (f(v,index)) { + if (f(v, index)) { return true } index += 1 @@ -590,16 +677,15 @@ function listMethods(target) { return false }, - [Symbol.iterator]: function *() { - let i = 0; + [Symbol.iterator]: function* () { + let i = 0 let value = valueAt(target, i) while (value !== undefined) { - yield value - i += 1 - value = valueAt(target, i) + yield value + i += 1 + value = valueAt(target, i) } - } + }, } return methods } - diff --git a/javascript/src/types.ts b/javascript/src/types.ts index add3f492..62fdbba8 100644 --- a/javascript/src/types.ts +++ b/javascript/src/types.ts @@ -1,10 +1,19 @@ - -export { Counter } from "./counter" -export { Int, Uint, Float64 } from "./numbers" +export { Counter } from "./counter" +export { Int, Uint, Float64 } from "./numbers" import { Counter } from "./counter" -export type AutomergeValue = ScalarValue | { [key: string]: AutomergeValue } | Array -export type MapValue = { [key: string]: AutomergeValue } -export type ListValue = Array -export type ScalarValue = string | number | null | boolean | Date | Counter | Uint8Array +export type AutomergeValue = + | ScalarValue + | { [key: string]: AutomergeValue } + | Array +export type MapValue = { [key: string]: AutomergeValue } +export type ListValue = Array +export type ScalarValue = + | string + | number + | null + | boolean + | Date + | Counter + | Uint8Array diff --git a/javascript/src/uuid.ts b/javascript/src/uuid.ts index 5ddb5ae6..421ddf9d 100644 --- a/javascript/src/uuid.ts +++ b/javascript/src/uuid.ts @@ -1,21 +1,24 @@ -import { v4 } from 'uuid' +import { v4 } from "uuid" function defaultFactory() { - return v4().replace(/-/g, '') + return v4().replace(/-/g, "") } let factory = defaultFactory interface UUIDFactory extends Function { - setFactory(f: typeof factory): void; - reset(): void; + setFactory(f: typeof factory): void + reset(): void } -export const uuid : UUIDFactory = () => { +export const uuid: UUIDFactory = () => { return factory() } -uuid.setFactory = newFactory => { factory = newFactory } - -uuid.reset = () => { factory = defaultFactory } +uuid.setFactory = newFactory => { + factory = newFactory +} +uuid.reset = () => { + factory = defaultFactory +} diff --git a/javascript/test/basic_test.ts b/javascript/test/basic_test.ts index e50e8782..8bf30914 100644 --- a/javascript/test/basic_test.ts +++ b/javascript/test/basic_test.ts @@ -1,366 +1,473 @@ -import * as assert from 'assert' -import * as Automerge from '../src' +import * as assert from "assert" +import * as Automerge from "../src" import * as WASM from "@automerge/automerge-wasm" -describe('Automerge', () => { - describe('basics', () => { - it('should init clone and free', () => { - let doc1 = Automerge.init() - let doc2 = Automerge.clone(doc1); +describe("Automerge", () => { + describe("basics", () => { + it("should init clone and free", () => { + let doc1 = Automerge.init() + let doc2 = Automerge.clone(doc1) - // this is only needed if weakrefs are not supported - Automerge.free(doc1) - Automerge.free(doc2) - }) - - it('should be able to make a view with specifc heads', () => { - let doc1 = Automerge.init() - let doc2 = Automerge.change(doc1, (d) => d.value = 1) - let heads2 = Automerge.getHeads(doc2) - let doc3 = Automerge.change(doc2, (d) => d.value = 2) - let doc2_v2 = Automerge.view(doc3, heads2) - assert.deepEqual(doc2, doc2_v2) - let doc2_v2_clone = Automerge.clone(doc2, "aabbcc") - assert.deepEqual(doc2, doc2_v2_clone) - assert.equal(Automerge.getActorId(doc2_v2_clone), "aabbcc") - }) - - it("should allow you to change a clone of a view", () => { - let doc1 = Automerge.init() - doc1 = Automerge.change(doc1, d => d.key = "value") - let heads = Automerge.getHeads(doc1) - doc1 = Automerge.change(doc1, d => d.key = "value2") - let fork = Automerge.clone(Automerge.view(doc1, heads)) - assert.deepEqual(fork, {key: "value"}) - fork = Automerge.change(fork, d => d.key = "value3") - assert.deepEqual(fork, {key: "value3"}) - }) - - it('handle basic set and read on root object', () => { - let doc1 = Automerge.init() - let doc2 = Automerge.change(doc1, (d) => { - d.hello = "world" - d.big = "little" - d.zip = "zop" - d.app = "dap" - assert.deepEqual(d, { hello: "world", big: "little", zip: "zop", app: "dap" }) - }) - assert.deepEqual(doc2, { hello: "world", big: "little", zip: "zop", app: "dap" }) - }) - - it('can detect an automerge doc with isAutomerge()', () => { - const doc1 = Automerge.from({ sub: { object: true } }) - assert(Automerge.isAutomerge(doc1)) - assert(!Automerge.isAutomerge(doc1.sub)) - assert(!Automerge.isAutomerge("String")) - assert(!Automerge.isAutomerge({ sub: { object: true }})) - assert(!Automerge.isAutomerge(undefined)) - const jsObj = Automerge.toJS(doc1) - assert(!Automerge.isAutomerge(jsObj)) - assert.deepEqual(jsObj, doc1) - }) - - it('it should recursively freeze the document if requested', () => { - let doc1 = Automerge.init({ freeze: true } ) - let doc2 = Automerge.init() - - assert(Object.isFrozen(doc1)) - assert(!Object.isFrozen(doc2)) - - // will also freeze sub objects - doc1 = Automerge.change(doc1, (doc) => doc.book = { title: "how to win friends" }) - doc2 = Automerge.merge(doc2,doc1) - assert(Object.isFrozen(doc1)) - assert(Object.isFrozen(doc1.book)) - assert(!Object.isFrozen(doc2)) - assert(!Object.isFrozen(doc2.book)) - - // works on from - let doc3 = Automerge.from({ sub: { obj: "inner" } }, { freeze: true }) - assert(Object.isFrozen(doc3)) - assert(Object.isFrozen(doc3.sub)) - - // works on load - let doc4 = Automerge.load(Automerge.save(doc3), { freeze: true }) - assert(Object.isFrozen(doc4)) - assert(Object.isFrozen(doc4.sub)) - - // follows clone - let doc5 = Automerge.clone(doc4) - assert(Object.isFrozen(doc5)) - assert(Object.isFrozen(doc5.sub)) - - // toJS does not freeze - let exported = Automerge.toJS(doc5) - assert(!Object.isFrozen(exported)) - }) - - it('handle basic sets over many changes', () => { - let doc1 = Automerge.init() - let timestamp = new Date(); - let counter = new Automerge.Counter(100); - let bytes = new Uint8Array([10,11,12]); - let doc2 = Automerge.change(doc1, (d) => { - d.hello = "world" - }) - let doc3 = Automerge.change(doc2, (d) => { - d.counter1 = counter - }) - let doc4 = Automerge.change(doc3, (d) => { - d.timestamp1 = timestamp - }) - let doc5 = Automerge.change(doc4, (d) => { - d.app = null - }) - let doc6 = Automerge.change(doc5, (d) => { - d.bytes1 = bytes - }) - let doc7 = Automerge.change(doc6, (d) => { - d.uint = new Automerge.Uint(1) - d.int = new Automerge.Int(-1) - d.float64 = new Automerge.Float64(5.5) - d.number1 = 100 - d.number2 = -45.67 - d.true = true - d.false = false - }) - - assert.deepEqual(doc7, { hello: "world", true: true, false: false, int: -1, uint: 1, float64: 5.5, number1: 100, number2: -45.67, counter1: counter, timestamp1: timestamp, bytes1: bytes, app: null }) - - let changes = Automerge.getAllChanges(doc7) - let t1 = Automerge.init() - ;let [t2] = Automerge.applyChanges(t1, changes) - assert.deepEqual(doc7,t2) - }) - - it('handle overwrites to values', () => { - let doc1 = Automerge.init() - let doc2 = Automerge.change(doc1, (d) => { - d.hello = "world1" - }) - let doc3 = Automerge.change(doc2, (d) => { - d.hello = "world2" - }) - let doc4 = Automerge.change(doc3, (d) => { - d.hello = "world3" - }) - let doc5 = Automerge.change(doc4, (d) => { - d.hello = "world4" - }) - assert.deepEqual(doc5, { hello: "world4" } ) - }) - - it('handle set with object value', () => { - let doc1 = Automerge.init() - let doc2 = Automerge.change(doc1, (d) => { - d.subobj = { hello: "world", subsubobj: { zip: "zop" } } - }) - assert.deepEqual(doc2, { subobj: { hello: "world", subsubobj: { zip: "zop" } } }) - }) - - it('handle simple list creation', () => { - let doc1 = Automerge.init() - let doc2 = Automerge.change(doc1, (d) => d.list = []) - assert.deepEqual(doc2, { list: []}) - }) - - it('handle simple lists', () => { - let doc1 = Automerge.init() - let doc2 = Automerge.change(doc1, (d) => { - d.list = [ 1, 2, 3 ] - }) - assert.deepEqual(doc2.list.length, 3) - assert.deepEqual(doc2.list[0], 1) - assert.deepEqual(doc2.list[1], 2) - assert.deepEqual(doc2.list[2], 3) - assert.deepEqual(doc2, { list: [1,2,3] }) - // assert.deepStrictEqual(Automerge.toJS(doc2), { list: [1,2,3] }) - - let doc3 = Automerge.change(doc2, (d) => { - d.list[1] = "a" - }) - - assert.deepEqual(doc3.list.length, 3) - assert.deepEqual(doc3.list[0], 1) - assert.deepEqual(doc3.list[1], "a") - assert.deepEqual(doc3.list[2], 3) - assert.deepEqual(doc3, { list: [1,"a",3] }) - }) - it('handle simple lists', () => { - let doc1 = Automerge.init() - let doc2 = Automerge.change(doc1, (d) => { - d.list = [ 1, 2, 3 ] - }) - let changes = Automerge.getChanges(doc1, doc2) - let docB1 = Automerge.init() - ;let [docB2] = Automerge.applyChanges(docB1, changes) - assert.deepEqual(docB2, doc2); - }) - it('handle text', () => { - let doc1 = Automerge.init() - let doc2 = Automerge.change(doc1, (d) => { - d.list = "hello" - Automerge.splice(d, "list", 2, 0, "Z") - }) - let changes = Automerge.getChanges(doc1, doc2) - let docB1 = Automerge.init() - ;let [docB2] = Automerge.applyChanges(docB1, changes) - assert.deepEqual(docB2, doc2); - }) - - it('handle non-text strings', () => { - let doc1 = WASM.create(); - doc1.put("_root", "text", "hello world"); - let doc2 = Automerge.load(doc1.save()) - assert.throws(() => { - Automerge.change(doc2, (d) => { Automerge.splice(d, "text", 1, 0, "Z") }) - }, /Cannot splice/) - }) - - it('have many list methods', () => { - let doc1 = Automerge.from({ list: [1,2,3] }) - assert.deepEqual(doc1, { list: [1,2,3] }); - let doc2 = Automerge.change(doc1, (d) => { - d.list.splice(1,1,9,10) - }) - assert.deepEqual(doc2, { list: [1,9,10,3] }); - let doc3 = Automerge.change(doc2, (d) => { - d.list.push(11,12) - }) - assert.deepEqual(doc3, { list: [1,9,10,3,11,12] }); - let doc4 = Automerge.change(doc3, (d) => { - d.list.unshift(2,2) - }) - assert.deepEqual(doc4, { list: [2,2,1,9,10,3,11,12] }); - let doc5 = Automerge.change(doc4, (d) => { - d.list.shift() - }) - assert.deepEqual(doc5, { list: [2,1,9,10,3,11,12] }); - let doc6 = Automerge.change(doc5, (d) => { - // @ts-ignore - d.list.insertAt(3,100,101) - }) - assert.deepEqual(doc6, { list: [2,1,9,100,101,10,3,11,12] }); - }) - - it('allows access to the backend', () => { - let doc = Automerge.init() - assert.deepEqual(Object.keys(Automerge.getBackend(doc)), ["ptr"]) - }) - - it('lists and text have indexof', () => { - let doc = Automerge.from({ list: [0,1,2,3,4,5,6], text: "hello world" }) - assert.deepEqual(doc.list.indexOf(5), 5) - assert.deepEqual(doc.text.indexOf("world"), 6) - }) + // this is only needed if weakrefs are not supported + Automerge.free(doc1) + Automerge.free(doc2) }) - describe('emptyChange', () => { - it('should generate a hash', () => { - let doc = Automerge.init() - doc = Automerge.change(doc, d => { - d.key = "value" - }) - Automerge.save(doc) - let headsBefore = Automerge.getHeads(doc) - headsBefore.sort() - doc = Automerge.emptyChange(doc, "empty change") - let headsAfter = Automerge.getHeads(doc) - headsAfter.sort() - assert.notDeepEqual(headsBefore, headsAfter) - }) + it("should be able to make a view with specifc heads", () => { + let doc1 = Automerge.init() + let doc2 = Automerge.change(doc1, d => (d.value = 1)) + let heads2 = Automerge.getHeads(doc2) + let doc3 = Automerge.change(doc2, d => (d.value = 2)) + let doc2_v2 = Automerge.view(doc3, heads2) + assert.deepEqual(doc2, doc2_v2) + let doc2_v2_clone = Automerge.clone(doc2, "aabbcc") + assert.deepEqual(doc2, doc2_v2_clone) + assert.equal(Automerge.getActorId(doc2_v2_clone), "aabbcc") }) - describe('proxy lists', () => { - it('behave like arrays', () => { - let doc = Automerge.from({ - chars: ["a","b","c"], - numbers: [20,3,100], - repeats: [20,20,3,3,3,3,100,100] - }) - let r1: Array = [] - doc = Automerge.change(doc, (d) => { - assert.deepEqual((d.chars as any[]).concat([1,2]), ["a","b","c",1,2]) - assert.deepEqual(d.chars.map((n) => n + "!"), ["a!", "b!", "c!"]) - assert.deepEqual(d.numbers.map((n) => n + 10), [30, 13, 110]) - assert.deepEqual(d.numbers.toString(), "20,3,100") - assert.deepEqual(d.numbers.toLocaleString(), "20,3,100") - assert.deepEqual(d.numbers.forEach((n: number) => r1.push(n)), undefined) - assert.deepEqual(d.numbers.every((n) => n > 1), true) - assert.deepEqual(d.numbers.every((n) => n > 10), false) - assert.deepEqual(d.numbers.filter((n) => n > 10), [20,100]) - assert.deepEqual(d.repeats.find((n) => n < 10), 3) - assert.deepEqual(d.repeats.find((n) => n < 10), 3) - assert.deepEqual(d.repeats.find((n) => n < 0), undefined) - assert.deepEqual(d.repeats.findIndex((n) => n < 10), 2) - assert.deepEqual(d.repeats.findIndex((n) => n < 0), -1) - assert.deepEqual(d.repeats.findIndex((n) => n < 10), 2) - assert.deepEqual(d.repeats.findIndex((n) => n < 0), -1) - assert.deepEqual(d.numbers.includes(3), true) - assert.deepEqual(d.numbers.includes(-3), false) - assert.deepEqual(d.numbers.join("|"), "20|3|100") - assert.deepEqual(d.numbers.join(), "20,3,100") - assert.deepEqual(d.numbers.some((f) => f === 3), true) - assert.deepEqual(d.numbers.some((f) => f < 0), false) - assert.deepEqual(d.numbers.reduce((sum,n) => sum + n, 100), 223) - assert.deepEqual(d.repeats.reduce((sum,n) => sum + n, 100), 352) - assert.deepEqual(d.chars.reduce((sum,n) => sum + n, "="), "=abc") - assert.deepEqual(d.chars.reduceRight((sum,n) => sum + n, "="), "=cba") - assert.deepEqual(d.numbers.reduceRight((sum,n) => sum + n, 100), 223) - assert.deepEqual(d.repeats.lastIndexOf(3), 5) - assert.deepEqual(d.repeats.lastIndexOf(3,3), 3) - }) - doc = Automerge.change(doc, (d) => { - assert.deepEqual(d.numbers.fill(-1,1,2), [20,-1,100]) - assert.deepEqual(d.chars.fill("z",1,100), ["a","z","z"]) - }) - assert.deepEqual(r1, [20,3,100]) - assert.deepEqual(doc.numbers, [20,-1,100]) - assert.deepEqual(doc.chars, ["a","z","z"]) - }) - }) - - it('should obtain the same conflicts, regardless of merge order', () => { - let s1 = Automerge.init() - let s2 = Automerge.init() - s1 = Automerge.change(s1, doc => { doc.x = 1; doc.y = 2 }) - s2 = Automerge.change(s2, doc => { doc.x = 3; doc.y = 4 }) - const m1 = Automerge.merge(Automerge.clone(s1), Automerge.clone(s2)) - const m2 = Automerge.merge(Automerge.clone(s2), Automerge.clone(s1)) - assert.deepStrictEqual(Automerge.getConflicts(m1, 'x'), Automerge.getConflicts(m2, 'x')) + it("should allow you to change a clone of a view", () => { + let doc1 = Automerge.init() + doc1 = Automerge.change(doc1, d => (d.key = "value")) + let heads = Automerge.getHeads(doc1) + doc1 = Automerge.change(doc1, d => (d.key = "value2")) + let fork = Automerge.clone(Automerge.view(doc1, heads)) + assert.deepEqual(fork, { key: "value" }) + fork = Automerge.change(fork, d => (d.key = "value3")) + assert.deepEqual(fork, { key: "value3" }) }) - describe("getObjectId", () => { - let s1 = Automerge.from({ - "string": "string", - "number": 1, - "null": null, - "date": new Date(), - "counter": new Automerge.Counter(), - "bytes": new Uint8Array(10), - "text": "", - "list": [], - "map": {} - }) - - it("should return null for scalar values", () => { - assert.equal(Automerge.getObjectId(s1.string), null) - assert.equal(Automerge.getObjectId(s1.number), null) - assert.equal(Automerge.getObjectId(s1.null!), null) - assert.equal(Automerge.getObjectId(s1.date), null) - assert.equal(Automerge.getObjectId(s1.counter), null) - assert.equal(Automerge.getObjectId(s1.bytes), null) - }) - - it("should return _root for the root object", () => { - assert.equal(Automerge.getObjectId(s1), "_root") - }) - - it("should return non-null for map, list, text, and objects", () => { - assert.equal(Automerge.getObjectId(s1.text), null) - assert.notEqual(Automerge.getObjectId(s1.list), null) - assert.notEqual(Automerge.getObjectId(s1.map), null) + it("handle basic set and read on root object", () => { + let doc1 = Automerge.init() + let doc2 = Automerge.change(doc1, d => { + d.hello = "world" + d.big = "little" + d.zip = "zop" + d.app = "dap" + assert.deepEqual(d, { + hello: "world", + big: "little", + zip: "zop", + app: "dap", }) + }) + assert.deepEqual(doc2, { + hello: "world", + big: "little", + zip: "zop", + app: "dap", + }) }) + + it("can detect an automerge doc with isAutomerge()", () => { + const doc1 = Automerge.from({ sub: { object: true } }) + assert(Automerge.isAutomerge(doc1)) + assert(!Automerge.isAutomerge(doc1.sub)) + assert(!Automerge.isAutomerge("String")) + assert(!Automerge.isAutomerge({ sub: { object: true } })) + assert(!Automerge.isAutomerge(undefined)) + const jsObj = Automerge.toJS(doc1) + assert(!Automerge.isAutomerge(jsObj)) + assert.deepEqual(jsObj, doc1) + }) + + it("it should recursively freeze the document if requested", () => { + let doc1 = Automerge.init({ freeze: true }) + let doc2 = Automerge.init() + + assert(Object.isFrozen(doc1)) + assert(!Object.isFrozen(doc2)) + + // will also freeze sub objects + doc1 = Automerge.change( + doc1, + doc => (doc.book = { title: "how to win friends" }) + ) + doc2 = Automerge.merge(doc2, doc1) + assert(Object.isFrozen(doc1)) + assert(Object.isFrozen(doc1.book)) + assert(!Object.isFrozen(doc2)) + assert(!Object.isFrozen(doc2.book)) + + // works on from + let doc3 = Automerge.from({ sub: { obj: "inner" } }, { freeze: true }) + assert(Object.isFrozen(doc3)) + assert(Object.isFrozen(doc3.sub)) + + // works on load + let doc4 = Automerge.load(Automerge.save(doc3), { freeze: true }) + assert(Object.isFrozen(doc4)) + assert(Object.isFrozen(doc4.sub)) + + // follows clone + let doc5 = Automerge.clone(doc4) + assert(Object.isFrozen(doc5)) + assert(Object.isFrozen(doc5.sub)) + + // toJS does not freeze + let exported = Automerge.toJS(doc5) + assert(!Object.isFrozen(exported)) + }) + + it("handle basic sets over many changes", () => { + let doc1 = Automerge.init() + let timestamp = new Date() + let counter = new Automerge.Counter(100) + let bytes = new Uint8Array([10, 11, 12]) + let doc2 = Automerge.change(doc1, d => { + d.hello = "world" + }) + let doc3 = Automerge.change(doc2, d => { + d.counter1 = counter + }) + let doc4 = Automerge.change(doc3, d => { + d.timestamp1 = timestamp + }) + let doc5 = Automerge.change(doc4, d => { + d.app = null + }) + let doc6 = Automerge.change(doc5, d => { + d.bytes1 = bytes + }) + let doc7 = Automerge.change(doc6, d => { + d.uint = new Automerge.Uint(1) + d.int = new Automerge.Int(-1) + d.float64 = new Automerge.Float64(5.5) + d.number1 = 100 + d.number2 = -45.67 + d.true = true + d.false = false + }) + + assert.deepEqual(doc7, { + hello: "world", + true: true, + false: false, + int: -1, + uint: 1, + float64: 5.5, + number1: 100, + number2: -45.67, + counter1: counter, + timestamp1: timestamp, + bytes1: bytes, + app: null, + }) + + let changes = Automerge.getAllChanges(doc7) + let t1 = Automerge.init() + let [t2] = Automerge.applyChanges(t1, changes) + assert.deepEqual(doc7, t2) + }) + + it("handle overwrites to values", () => { + let doc1 = Automerge.init() + let doc2 = Automerge.change(doc1, d => { + d.hello = "world1" + }) + let doc3 = Automerge.change(doc2, d => { + d.hello = "world2" + }) + let doc4 = Automerge.change(doc3, d => { + d.hello = "world3" + }) + let doc5 = Automerge.change(doc4, d => { + d.hello = "world4" + }) + assert.deepEqual(doc5, { hello: "world4" }) + }) + + it("handle set with object value", () => { + let doc1 = Automerge.init() + let doc2 = Automerge.change(doc1, d => { + d.subobj = { hello: "world", subsubobj: { zip: "zop" } } + }) + assert.deepEqual(doc2, { + subobj: { hello: "world", subsubobj: { zip: "zop" } }, + }) + }) + + it("handle simple list creation", () => { + let doc1 = Automerge.init() + let doc2 = Automerge.change(doc1, d => (d.list = [])) + assert.deepEqual(doc2, { list: [] }) + }) + + it("handle simple lists", () => { + let doc1 = Automerge.init() + let doc2 = Automerge.change(doc1, d => { + d.list = [1, 2, 3] + }) + assert.deepEqual(doc2.list.length, 3) + assert.deepEqual(doc2.list[0], 1) + assert.deepEqual(doc2.list[1], 2) + assert.deepEqual(doc2.list[2], 3) + assert.deepEqual(doc2, { list: [1, 2, 3] }) + // assert.deepStrictEqual(Automerge.toJS(doc2), { list: [1,2,3] }) + + let doc3 = Automerge.change(doc2, d => { + d.list[1] = "a" + }) + + assert.deepEqual(doc3.list.length, 3) + assert.deepEqual(doc3.list[0], 1) + assert.deepEqual(doc3.list[1], "a") + assert.deepEqual(doc3.list[2], 3) + assert.deepEqual(doc3, { list: [1, "a", 3] }) + }) + it("handle simple lists", () => { + let doc1 = Automerge.init() + let doc2 = Automerge.change(doc1, d => { + d.list = [1, 2, 3] + }) + let changes = Automerge.getChanges(doc1, doc2) + let docB1 = Automerge.init() + let [docB2] = Automerge.applyChanges(docB1, changes) + assert.deepEqual(docB2, doc2) + }) + it("handle text", () => { + let doc1 = Automerge.init() + let doc2 = Automerge.change(doc1, d => { + d.list = "hello" + Automerge.splice(d, "list", 2, 0, "Z") + }) + let changes = Automerge.getChanges(doc1, doc2) + let docB1 = Automerge.init() + let [docB2] = Automerge.applyChanges(docB1, changes) + assert.deepEqual(docB2, doc2) + }) + + it("handle non-text strings", () => { + let doc1 = WASM.create() + doc1.put("_root", "text", "hello world") + let doc2 = Automerge.load(doc1.save()) + assert.throws(() => { + Automerge.change(doc2, d => { + Automerge.splice(d, "text", 1, 0, "Z") + }) + }, /Cannot splice/) + }) + + it("have many list methods", () => { + let doc1 = Automerge.from({ list: [1, 2, 3] }) + assert.deepEqual(doc1, { list: [1, 2, 3] }) + let doc2 = Automerge.change(doc1, d => { + d.list.splice(1, 1, 9, 10) + }) + assert.deepEqual(doc2, { list: [1, 9, 10, 3] }) + let doc3 = Automerge.change(doc2, d => { + d.list.push(11, 12) + }) + assert.deepEqual(doc3, { list: [1, 9, 10, 3, 11, 12] }) + let doc4 = Automerge.change(doc3, d => { + d.list.unshift(2, 2) + }) + assert.deepEqual(doc4, { list: [2, 2, 1, 9, 10, 3, 11, 12] }) + let doc5 = Automerge.change(doc4, d => { + d.list.shift() + }) + assert.deepEqual(doc5, { list: [2, 1, 9, 10, 3, 11, 12] }) + let doc6 = Automerge.change(doc5, d => { + // @ts-ignore + d.list.insertAt(3, 100, 101) + }) + assert.deepEqual(doc6, { list: [2, 1, 9, 100, 101, 10, 3, 11, 12] }) + }) + + it("allows access to the backend", () => { + let doc = Automerge.init() + assert.deepEqual(Object.keys(Automerge.getBackend(doc)), ["ptr"]) + }) + + it("lists and text have indexof", () => { + let doc = Automerge.from({ + list: [0, 1, 2, 3, 4, 5, 6], + text: "hello world", + }) + assert.deepEqual(doc.list.indexOf(5), 5) + assert.deepEqual(doc.text.indexOf("world"), 6) + }) + }) + + describe("emptyChange", () => { + it("should generate a hash", () => { + let doc = Automerge.init() + doc = Automerge.change(doc, d => { + d.key = "value" + }) + Automerge.save(doc) + let headsBefore = Automerge.getHeads(doc) + headsBefore.sort() + doc = Automerge.emptyChange(doc, "empty change") + let headsAfter = Automerge.getHeads(doc) + headsAfter.sort() + assert.notDeepEqual(headsBefore, headsAfter) + }) + }) + + describe("proxy lists", () => { + it("behave like arrays", () => { + let doc = Automerge.from({ + chars: ["a", "b", "c"], + numbers: [20, 3, 100], + repeats: [20, 20, 3, 3, 3, 3, 100, 100], + }) + let r1: Array = [] + doc = Automerge.change(doc, d => { + assert.deepEqual((d.chars as any[]).concat([1, 2]), [ + "a", + "b", + "c", + 1, + 2, + ]) + assert.deepEqual( + d.chars.map(n => n + "!"), + ["a!", "b!", "c!"] + ) + assert.deepEqual( + d.numbers.map(n => n + 10), + [30, 13, 110] + ) + assert.deepEqual(d.numbers.toString(), "20,3,100") + assert.deepEqual(d.numbers.toLocaleString(), "20,3,100") + assert.deepEqual( + d.numbers.forEach((n: number) => r1.push(n)), + undefined + ) + assert.deepEqual( + d.numbers.every(n => n > 1), + true + ) + assert.deepEqual( + d.numbers.every(n => n > 10), + false + ) + assert.deepEqual( + d.numbers.filter(n => n > 10), + [20, 100] + ) + assert.deepEqual( + d.repeats.find(n => n < 10), + 3 + ) + assert.deepEqual( + d.repeats.find(n => n < 10), + 3 + ) + assert.deepEqual( + d.repeats.find(n => n < 0), + undefined + ) + assert.deepEqual( + d.repeats.findIndex(n => n < 10), + 2 + ) + assert.deepEqual( + d.repeats.findIndex(n => n < 0), + -1 + ) + assert.deepEqual( + d.repeats.findIndex(n => n < 10), + 2 + ) + assert.deepEqual( + d.repeats.findIndex(n => n < 0), + -1 + ) + assert.deepEqual(d.numbers.includes(3), true) + assert.deepEqual(d.numbers.includes(-3), false) + assert.deepEqual(d.numbers.join("|"), "20|3|100") + assert.deepEqual(d.numbers.join(), "20,3,100") + assert.deepEqual( + d.numbers.some(f => f === 3), + true + ) + assert.deepEqual( + d.numbers.some(f => f < 0), + false + ) + assert.deepEqual( + d.numbers.reduce((sum, n) => sum + n, 100), + 223 + ) + assert.deepEqual( + d.repeats.reduce((sum, n) => sum + n, 100), + 352 + ) + assert.deepEqual( + d.chars.reduce((sum, n) => sum + n, "="), + "=abc" + ) + assert.deepEqual( + d.chars.reduceRight((sum, n) => sum + n, "="), + "=cba" + ) + assert.deepEqual( + d.numbers.reduceRight((sum, n) => sum + n, 100), + 223 + ) + assert.deepEqual(d.repeats.lastIndexOf(3), 5) + assert.deepEqual(d.repeats.lastIndexOf(3, 3), 3) + }) + doc = Automerge.change(doc, d => { + assert.deepEqual(d.numbers.fill(-1, 1, 2), [20, -1, 100]) + assert.deepEqual(d.chars.fill("z", 1, 100), ["a", "z", "z"]) + }) + assert.deepEqual(r1, [20, 3, 100]) + assert.deepEqual(doc.numbers, [20, -1, 100]) + assert.deepEqual(doc.chars, ["a", "z", "z"]) + }) + }) + + it("should obtain the same conflicts, regardless of merge order", () => { + let s1 = Automerge.init() + let s2 = Automerge.init() + s1 = Automerge.change(s1, doc => { + doc.x = 1 + doc.y = 2 + }) + s2 = Automerge.change(s2, doc => { + doc.x = 3 + doc.y = 4 + }) + const m1 = Automerge.merge(Automerge.clone(s1), Automerge.clone(s2)) + const m2 = Automerge.merge(Automerge.clone(s2), Automerge.clone(s1)) + assert.deepStrictEqual( + Automerge.getConflicts(m1, "x"), + Automerge.getConflicts(m2, "x") + ) + }) + + describe("getObjectId", () => { + let s1 = Automerge.from({ + string: "string", + number: 1, + null: null, + date: new Date(), + counter: new Automerge.Counter(), + bytes: new Uint8Array(10), + text: "", + list: [], + map: {}, + }) + + it("should return null for scalar values", () => { + assert.equal(Automerge.getObjectId(s1.string), null) + assert.equal(Automerge.getObjectId(s1.number), null) + assert.equal(Automerge.getObjectId(s1.null!), null) + assert.equal(Automerge.getObjectId(s1.date), null) + assert.equal(Automerge.getObjectId(s1.counter), null) + assert.equal(Automerge.getObjectId(s1.bytes), null) + }) + + it("should return _root for the root object", () => { + assert.equal(Automerge.getObjectId(s1), "_root") + }) + + it("should return non-null for map, list, text, and objects", () => { + assert.equal(Automerge.getObjectId(s1.text), null) + assert.notEqual(Automerge.getObjectId(s1.list), null) + assert.notEqual(Automerge.getObjectId(s1.map), null) + }) + }) }) - diff --git a/javascript/test/extra_api_tests.ts b/javascript/test/extra_api_tests.ts index c0c18177..69932d1f 100644 --- a/javascript/test/extra_api_tests.ts +++ b/javascript/test/extra_api_tests.ts @@ -1,20 +1,28 @@ +import * as assert from "assert" +import * as Automerge from "../src" -import * as assert from 'assert' -import * as Automerge from '../src' - -describe('Automerge', () => { - describe('basics', () => { - it('should allow you to load incrementally', () => { - let doc1 = Automerge.from({ foo: "bar" }) - let doc2 = Automerge.init(); - doc2 = Automerge.loadIncremental(doc2, Automerge.save(doc1)) - doc1 = Automerge.change(doc1, (d) => d.foo2 = "bar2") - doc2 = Automerge.loadIncremental(doc2, Automerge.getBackend(doc1).saveIncremental() ) - doc1 = Automerge.change(doc1, (d) => d.foo = "bar2") - doc2 = Automerge.loadIncremental(doc2, Automerge.getBackend(doc1).saveIncremental() ) - doc1 = Automerge.change(doc1, (d) => d.x = "y") - doc2 = Automerge.loadIncremental(doc2, Automerge.getBackend(doc1).saveIncremental() ) - assert.deepEqual(doc1,doc2) - }) +describe("Automerge", () => { + describe("basics", () => { + it("should allow you to load incrementally", () => { + let doc1 = Automerge.from({ foo: "bar" }) + let doc2 = Automerge.init() + doc2 = Automerge.loadIncremental(doc2, Automerge.save(doc1)) + doc1 = Automerge.change(doc1, d => (d.foo2 = "bar2")) + doc2 = Automerge.loadIncremental( + doc2, + Automerge.getBackend(doc1).saveIncremental() + ) + doc1 = Automerge.change(doc1, d => (d.foo = "bar2")) + doc2 = Automerge.loadIncremental( + doc2, + Automerge.getBackend(doc1).saveIncremental() + ) + doc1 = Automerge.change(doc1, d => (d.x = "y")) + doc2 = Automerge.loadIncremental( + doc2, + Automerge.getBackend(doc1).saveIncremental() + ) + assert.deepEqual(doc1, doc2) }) + }) }) diff --git a/javascript/test/helpers.ts b/javascript/test/helpers.ts index 7799cb84..df76e558 100644 --- a/javascript/test/helpers.ts +++ b/javascript/test/helpers.ts @@ -1,5 +1,5 @@ -import * as assert from 'assert' -import { Encoder } from './legacy/encoding' +import * as assert from "assert" +import { Encoder } from "./legacy/encoding" // Assertion that succeeds if the first argument deepStrictEquals at least one of the // subsequent arguments (but we don't care which one) @@ -11,7 +11,8 @@ export function assertEqualsOneOf(actual, ...expected) { return // if we get here without an exception, that means success } catch (e) { if (e instanceof assert.AssertionError) { - if (!e.name.match(/^AssertionError/) || i === expected.length - 1) throw e + if (!e.name.match(/^AssertionError/) || i === expected.length - 1) + throw e } else { throw e } @@ -24,9 +25,10 @@ export function assertEqualsOneOf(actual, ...expected) { * sequence as the array `bytes`. */ export function checkEncoded(encoder, bytes, detail?) { - const encoded = (encoder instanceof Encoder) ? encoder.buffer : encoder + const encoded = encoder instanceof Encoder ? encoder.buffer : encoder const expected = new Uint8Array(bytes) - const message = (detail ? `${detail}: ` : '') + `${encoded} expected to equal ${expected}` + const message = + (detail ? `${detail}: ` : "") + `${encoded} expected to equal ${expected}` assert(encoded.byteLength === expected.byteLength, message) for (let i = 0; i < encoded.byteLength; i++) { assert(encoded[i] === expected[i], message) diff --git a/javascript/test/legacy/columnar.js b/javascript/test/legacy/columnar.js index b97e6275..6a9b5874 100644 --- a/javascript/test/legacy/columnar.js +++ b/javascript/test/legacy/columnar.js @@ -1,9 +1,18 @@ -const pako = require('pako') -const { copyObject, parseOpId, equalBytes } = require('./common') +const pako = require("pako") +const { copyObject, parseOpId, equalBytes } = require("./common") const { - utf8ToString, hexStringToBytes, bytesToHexString, - Encoder, Decoder, RLEEncoder, RLEDecoder, DeltaEncoder, DeltaDecoder, BooleanEncoder, BooleanDecoder -} = require('./encoding') + utf8ToString, + hexStringToBytes, + bytesToHexString, + Encoder, + Decoder, + RLEEncoder, + RLEDecoder, + DeltaEncoder, + DeltaDecoder, + BooleanEncoder, + BooleanDecoder, +} = require("./encoding") // Maybe we should be using the platform's built-in hash implementation? // Node has the crypto module: https://nodejs.org/api/crypto.html and browsers have @@ -18,7 +27,7 @@ const { // - It does not need a secure source of random bits and does not need to be // constant-time; // - I have reviewed the source code and it seems pretty reasonable. -const { Hash } = require('fast-sha256') +const { Hash } = require("fast-sha256") // These bytes don't mean anything, they were generated randomly const MAGIC_BYTES = new Uint8Array([0x85, 0x6f, 0x4a, 0x83]) @@ -33,8 +42,14 @@ const DEFLATE_MIN_SIZE = 256 // The least-significant 3 bits of a columnId indicate its datatype const COLUMN_TYPE = { - GROUP_CARD: 0, ACTOR_ID: 1, INT_RLE: 2, INT_DELTA: 3, BOOLEAN: 4, - STRING_RLE: 5, VALUE_LEN: 6, VALUE_RAW: 7 + GROUP_CARD: 0, + ACTOR_ID: 1, + INT_RLE: 2, + INT_DELTA: 3, + BOOLEAN: 4, + STRING_RLE: 5, + VALUE_LEN: 6, + VALUE_RAW: 7, } // The 4th-least-significant bit of a columnId is set if the column is DEFLATE-compressed @@ -44,53 +59,77 @@ const COLUMN_TYPE_DEFLATE = 8 // one of the following types in VALUE_TYPE. The higher bits indicate the length of the value in the // associated VALUE_RAW column (in bytes). const VALUE_TYPE = { - NULL: 0, FALSE: 1, TRUE: 2, LEB128_UINT: 3, LEB128_INT: 4, IEEE754: 5, - UTF8: 6, BYTES: 7, COUNTER: 8, TIMESTAMP: 9, MIN_UNKNOWN: 10, MAX_UNKNOWN: 15 + NULL: 0, + FALSE: 1, + TRUE: 2, + LEB128_UINT: 3, + LEB128_INT: 4, + IEEE754: 5, + UTF8: 6, + BYTES: 7, + COUNTER: 8, + TIMESTAMP: 9, + MIN_UNKNOWN: 10, + MAX_UNKNOWN: 15, } // make* actions must be at even-numbered indexes in this list -const ACTIONS = ['makeMap', 'set', 'makeList', 'del', 'makeText', 'inc', 'makeTable', 'link'] +const ACTIONS = [ + "makeMap", + "set", + "makeList", + "del", + "makeText", + "inc", + "makeTable", + "link", +] -const OBJECT_TYPE = {makeMap: 'map', makeList: 'list', makeText: 'text', makeTable: 'table'} +const OBJECT_TYPE = { + makeMap: "map", + makeList: "list", + makeText: "text", + makeTable: "table", +} const COMMON_COLUMNS = [ - {columnName: 'objActor', columnId: 0 << 4 | COLUMN_TYPE.ACTOR_ID}, - {columnName: 'objCtr', columnId: 0 << 4 | COLUMN_TYPE.INT_RLE}, - {columnName: 'keyActor', columnId: 1 << 4 | COLUMN_TYPE.ACTOR_ID}, - {columnName: 'keyCtr', columnId: 1 << 4 | COLUMN_TYPE.INT_DELTA}, - {columnName: 'keyStr', columnId: 1 << 4 | COLUMN_TYPE.STRING_RLE}, - {columnName: 'idActor', columnId: 2 << 4 | COLUMN_TYPE.ACTOR_ID}, - {columnName: 'idCtr', columnId: 2 << 4 | COLUMN_TYPE.INT_DELTA}, - {columnName: 'insert', columnId: 3 << 4 | COLUMN_TYPE.BOOLEAN}, - {columnName: 'action', columnId: 4 << 4 | COLUMN_TYPE.INT_RLE}, - {columnName: 'valLen', columnId: 5 << 4 | COLUMN_TYPE.VALUE_LEN}, - {columnName: 'valRaw', columnId: 5 << 4 | COLUMN_TYPE.VALUE_RAW}, - {columnName: 'chldActor', columnId: 6 << 4 | COLUMN_TYPE.ACTOR_ID}, - {columnName: 'chldCtr', columnId: 6 << 4 | COLUMN_TYPE.INT_DELTA} + { columnName: "objActor", columnId: (0 << 4) | COLUMN_TYPE.ACTOR_ID }, + { columnName: "objCtr", columnId: (0 << 4) | COLUMN_TYPE.INT_RLE }, + { columnName: "keyActor", columnId: (1 << 4) | COLUMN_TYPE.ACTOR_ID }, + { columnName: "keyCtr", columnId: (1 << 4) | COLUMN_TYPE.INT_DELTA }, + { columnName: "keyStr", columnId: (1 << 4) | COLUMN_TYPE.STRING_RLE }, + { columnName: "idActor", columnId: (2 << 4) | COLUMN_TYPE.ACTOR_ID }, + { columnName: "idCtr", columnId: (2 << 4) | COLUMN_TYPE.INT_DELTA }, + { columnName: "insert", columnId: (3 << 4) | COLUMN_TYPE.BOOLEAN }, + { columnName: "action", columnId: (4 << 4) | COLUMN_TYPE.INT_RLE }, + { columnName: "valLen", columnId: (5 << 4) | COLUMN_TYPE.VALUE_LEN }, + { columnName: "valRaw", columnId: (5 << 4) | COLUMN_TYPE.VALUE_RAW }, + { columnName: "chldActor", columnId: (6 << 4) | COLUMN_TYPE.ACTOR_ID }, + { columnName: "chldCtr", columnId: (6 << 4) | COLUMN_TYPE.INT_DELTA }, ] const CHANGE_COLUMNS = COMMON_COLUMNS.concat([ - {columnName: 'predNum', columnId: 7 << 4 | COLUMN_TYPE.GROUP_CARD}, - {columnName: 'predActor', columnId: 7 << 4 | COLUMN_TYPE.ACTOR_ID}, - {columnName: 'predCtr', columnId: 7 << 4 | COLUMN_TYPE.INT_DELTA} + { columnName: "predNum", columnId: (7 << 4) | COLUMN_TYPE.GROUP_CARD }, + { columnName: "predActor", columnId: (7 << 4) | COLUMN_TYPE.ACTOR_ID }, + { columnName: "predCtr", columnId: (7 << 4) | COLUMN_TYPE.INT_DELTA }, ]) const DOC_OPS_COLUMNS = COMMON_COLUMNS.concat([ - {columnName: 'succNum', columnId: 8 << 4 | COLUMN_TYPE.GROUP_CARD}, - {columnName: 'succActor', columnId: 8 << 4 | COLUMN_TYPE.ACTOR_ID}, - {columnName: 'succCtr', columnId: 8 << 4 | COLUMN_TYPE.INT_DELTA} + { columnName: "succNum", columnId: (8 << 4) | COLUMN_TYPE.GROUP_CARD }, + { columnName: "succActor", columnId: (8 << 4) | COLUMN_TYPE.ACTOR_ID }, + { columnName: "succCtr", columnId: (8 << 4) | COLUMN_TYPE.INT_DELTA }, ]) const DOCUMENT_COLUMNS = [ - {columnName: 'actor', columnId: 0 << 4 | COLUMN_TYPE.ACTOR_ID}, - {columnName: 'seq', columnId: 0 << 4 | COLUMN_TYPE.INT_DELTA}, - {columnName: 'maxOp', columnId: 1 << 4 | COLUMN_TYPE.INT_DELTA}, - {columnName: 'time', columnId: 2 << 4 | COLUMN_TYPE.INT_DELTA}, - {columnName: 'message', columnId: 3 << 4 | COLUMN_TYPE.STRING_RLE}, - {columnName: 'depsNum', columnId: 4 << 4 | COLUMN_TYPE.GROUP_CARD}, - {columnName: 'depsIndex', columnId: 4 << 4 | COLUMN_TYPE.INT_DELTA}, - {columnName: 'extraLen', columnId: 5 << 4 | COLUMN_TYPE.VALUE_LEN}, - {columnName: 'extraRaw', columnId: 5 << 4 | COLUMN_TYPE.VALUE_RAW} + { columnName: "actor", columnId: (0 << 4) | COLUMN_TYPE.ACTOR_ID }, + { columnName: "seq", columnId: (0 << 4) | COLUMN_TYPE.INT_DELTA }, + { columnName: "maxOp", columnId: (1 << 4) | COLUMN_TYPE.INT_DELTA }, + { columnName: "time", columnId: (2 << 4) | COLUMN_TYPE.INT_DELTA }, + { columnName: "message", columnId: (3 << 4) | COLUMN_TYPE.STRING_RLE }, + { columnName: "depsNum", columnId: (4 << 4) | COLUMN_TYPE.GROUP_CARD }, + { columnName: "depsIndex", columnId: (4 << 4) | COLUMN_TYPE.INT_DELTA }, + { columnName: "extraLen", columnId: (5 << 4) | COLUMN_TYPE.VALUE_LEN }, + { columnName: "extraRaw", columnId: (5 << 4) | COLUMN_TYPE.VALUE_RAW }, ] /** @@ -102,8 +141,8 @@ function actorIdToActorNum(opId, actorIds) { if (!opId || !opId.actorId) return opId const counter = opId.counter const actorNum = actorIds.indexOf(opId.actorId) - if (actorNum < 0) throw new RangeError('missing actorId') // should not happen - return {counter, actorNum, actorId: opId.actorId} + if (actorNum < 0) throw new RangeError("missing actorId") // should not happen + return { counter, actorNum, actorId: opId.actorId } } /** @@ -131,15 +170,16 @@ function compareParsedOpIds(id1, id2) { * false. */ function parseAllOpIds(changes, single) { - const actors = {}, newChanges = [] + const actors = {}, + newChanges = [] for (let change of changes) { change = copyObject(change) actors[change.actor] = true change.ops = expandMultiOps(change.ops, change.startOp, change.actor) change.ops = change.ops.map(op => { op = copyObject(op) - if (op.obj !== '_root') op.obj = parseOpId(op.obj) - if (op.elemId && op.elemId !== '_head') op.elemId = parseOpId(op.elemId) + if (op.obj !== "_root") op.obj = parseOpId(op.obj) + if (op.elemId && op.elemId !== "_head") op.elemId = parseOpId(op.elemId) if (op.child) op.child = parseOpId(op.child) if (op.pred) op.pred = op.pred.map(parseOpId) if (op.obj.actorId) actors[op.obj.actorId] = true @@ -153,20 +193,26 @@ function parseAllOpIds(changes, single) { let actorIds = Object.keys(actors).sort() if (single) { - actorIds = [changes[0].actor].concat(actorIds.filter(actor => actor !== changes[0].actor)) + actorIds = [changes[0].actor].concat( + actorIds.filter(actor => actor !== changes[0].actor) + ) } for (let change of newChanges) { change.actorNum = actorIds.indexOf(change.actor) for (let i = 0; i < change.ops.length; i++) { let op = change.ops[i] - op.id = {counter: change.startOp + i, actorNum: change.actorNum, actorId: change.actor} + op.id = { + counter: change.startOp + i, + actorNum: change.actorNum, + actorId: change.actor, + } op.obj = actorIdToActorNum(op.obj, actorIds) op.elemId = actorIdToActorNum(op.elemId, actorIds) op.child = actorIdToActorNum(op.child, actorIds) op.pred = op.pred.map(pred => actorIdToActorNum(pred, actorIds)) } } - return {changes: newChanges, actorIds} + return { changes: newChanges, actorIds } } /** @@ -174,14 +220,16 @@ function parseAllOpIds(changes, single) { * `objActor` and `objCtr`. */ function encodeObjectId(op, columns) { - if (op.obj === '_root') { + if (op.obj === "_root") { columns.objActor.appendValue(null) columns.objCtr.appendValue(null) } else if (op.obj.actorNum >= 0 && op.obj.counter > 0) { columns.objActor.appendValue(op.obj.actorNum) columns.objCtr.appendValue(op.obj.counter) } else { - throw new RangeError(`Unexpected objectId reference: ${JSON.stringify(op.obj)}`) + throw new RangeError( + `Unexpected objectId reference: ${JSON.stringify(op.obj)}` + ) } } @@ -194,7 +242,7 @@ function encodeOperationKey(op, columns) { columns.keyActor.appendValue(null) columns.keyCtr.appendValue(null) columns.keyStr.appendValue(op.key) - } else if (op.elemId === '_head' && op.insert) { + } else if (op.elemId === "_head" && op.insert) { columns.keyActor.appendValue(null) columns.keyCtr.appendValue(0) columns.keyStr.appendValue(null) @@ -214,7 +262,7 @@ function encodeOperationAction(op, columns) { const actionCode = ACTIONS.indexOf(op.action) if (actionCode >= 0) { columns.action.appendValue(actionCode) - } else if (typeof op.action === 'number') { + } else if (typeof op.action === "number") { columns.action.appendValue(op.action) } else { throw new RangeError(`Unexpected operation action: ${op.action}`) @@ -228,26 +276,32 @@ function encodeOperationAction(op, columns) { function getNumberTypeAndValue(op) { switch (op.datatype) { case "counter": - return [ VALUE_TYPE.COUNTER, op.value ] + return [VALUE_TYPE.COUNTER, op.value] case "timestamp": - return [ VALUE_TYPE.TIMESTAMP, op.value ] + return [VALUE_TYPE.TIMESTAMP, op.value] case "uint": - return [ VALUE_TYPE.LEB128_UINT, op.value ] + return [VALUE_TYPE.LEB128_UINT, op.value] case "int": - return [ VALUE_TYPE.LEB128_INT, op.value ] + return [VALUE_TYPE.LEB128_INT, op.value] case "float64": { - const buf64 = new ArrayBuffer(8), view64 = new DataView(buf64) + const buf64 = new ArrayBuffer(8), + view64 = new DataView(buf64) view64.setFloat64(0, op.value, true) - return [ VALUE_TYPE.IEEE754, new Uint8Array(buf64) ] + return [VALUE_TYPE.IEEE754, new Uint8Array(buf64)] } default: // increment operators get resolved here ... - if (Number.isInteger(op.value) && op.value <= Number.MAX_SAFE_INTEGER && op.value >= Number.MIN_SAFE_INTEGER) { - return [ VALUE_TYPE.LEB128_INT, op.value ] + if ( + Number.isInteger(op.value) && + op.value <= Number.MAX_SAFE_INTEGER && + op.value >= Number.MIN_SAFE_INTEGER + ) { + return [VALUE_TYPE.LEB128_INT, op.value] } else { - const buf64 = new ArrayBuffer(8), view64 = new DataView(buf64) + const buf64 = new ArrayBuffer(8), + view64 = new DataView(buf64) view64.setFloat64(0, op.value, true) - return [ VALUE_TYPE.IEEE754, new Uint8Array(buf64) ] + return [VALUE_TYPE.IEEE754, new Uint8Array(buf64)] } } } @@ -257,19 +311,21 @@ function getNumberTypeAndValue(op) { * `valLen` and `valRaw`. */ function encodeValue(op, columns) { - if ((op.action !== 'set' && op.action !== 'inc') || op.value === null) { + if ((op.action !== "set" && op.action !== "inc") || op.value === null) { columns.valLen.appendValue(VALUE_TYPE.NULL) } else if (op.value === false) { columns.valLen.appendValue(VALUE_TYPE.FALSE) } else if (op.value === true) { columns.valLen.appendValue(VALUE_TYPE.TRUE) - } else if (typeof op.value === 'string') { + } else if (typeof op.value === "string") { const numBytes = columns.valRaw.appendRawString(op.value) - columns.valLen.appendValue(numBytes << 4 | VALUE_TYPE.UTF8) + columns.valLen.appendValue((numBytes << 4) | VALUE_TYPE.UTF8) } else if (ArrayBuffer.isView(op.value)) { - const numBytes = columns.valRaw.appendRawBytes(new Uint8Array(op.value.buffer)) - columns.valLen.appendValue(numBytes << 4 | VALUE_TYPE.BYTES) - } else if (typeof op.value === 'number') { + const numBytes = columns.valRaw.appendRawBytes( + new Uint8Array(op.value.buffer) + ) + columns.valLen.appendValue((numBytes << 4) | VALUE_TYPE.BYTES) + } else if (typeof op.value === "number") { let [typeTag, value] = getNumberTypeAndValue(op) let numBytes if (typeTag === VALUE_TYPE.LEB128_UINT) { @@ -279,13 +335,19 @@ function encodeValue(op, columns) { } else { numBytes = columns.valRaw.appendInt53(value) } - columns.valLen.appendValue(numBytes << 4 | typeTag) - } else if (typeof op.datatype === 'number' && op.datatype >= VALUE_TYPE.MIN_UNKNOWN && - op.datatype <= VALUE_TYPE.MAX_UNKNOWN && op.value instanceof Uint8Array) { + columns.valLen.appendValue((numBytes << 4) | typeTag) + } else if ( + typeof op.datatype === "number" && + op.datatype >= VALUE_TYPE.MIN_UNKNOWN && + op.datatype <= VALUE_TYPE.MAX_UNKNOWN && + op.value instanceof Uint8Array + ) { const numBytes = columns.valRaw.appendRawBytes(op.value) - columns.valLen.appendValue(numBytes << 4 | op.datatype) + columns.valLen.appendValue((numBytes << 4) | op.datatype) } else if (op.datatype) { - throw new RangeError(`Unknown datatype ${op.datatype} for value ${op.value}`) + throw new RangeError( + `Unknown datatype ${op.datatype} for value ${op.value}` + ) } else { throw new RangeError(`Unsupported value in operation: ${op.value}`) } @@ -299,31 +361,37 @@ function encodeValue(op, columns) { */ function decodeValue(sizeTag, bytes) { if (sizeTag === VALUE_TYPE.NULL) { - return {value: null} + return { value: null } } else if (sizeTag === VALUE_TYPE.FALSE) { - return {value: false} + return { value: false } } else if (sizeTag === VALUE_TYPE.TRUE) { - return {value: true} + return { value: true } } else if (sizeTag % 16 === VALUE_TYPE.UTF8) { - return {value: utf8ToString(bytes)} + return { value: utf8ToString(bytes) } } else { if (sizeTag % 16 === VALUE_TYPE.LEB128_UINT) { - return {value: new Decoder(bytes).readUint53(), datatype: "uint"} + return { value: new Decoder(bytes).readUint53(), datatype: "uint" } } else if (sizeTag % 16 === VALUE_TYPE.LEB128_INT) { - return {value: new Decoder(bytes).readInt53(), datatype: "int"} + return { value: new Decoder(bytes).readInt53(), datatype: "int" } } else if (sizeTag % 16 === VALUE_TYPE.IEEE754) { - const view = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength) + const view = new DataView( + bytes.buffer, + bytes.byteOffset, + bytes.byteLength + ) if (bytes.byteLength === 8) { - return {value: view.getFloat64(0, true), datatype: "float64"} + return { value: view.getFloat64(0, true), datatype: "float64" } } else { - throw new RangeError(`Invalid length for floating point number: ${bytes.byteLength}`) + throw new RangeError( + `Invalid length for floating point number: ${bytes.byteLength}` + ) } } else if (sizeTag % 16 === VALUE_TYPE.COUNTER) { - return {value: new Decoder(bytes).readInt53(), datatype: 'counter'} + return { value: new Decoder(bytes).readInt53(), datatype: "counter" } } else if (sizeTag % 16 === VALUE_TYPE.TIMESTAMP) { - return {value: new Decoder(bytes).readInt53(), datatype: 'timestamp'} + return { value: new Decoder(bytes).readInt53(), datatype: "timestamp" } } else { - return {value: bytes, datatype: sizeTag % 16} + return { value: bytes, datatype: sizeTag % 16 } } } } @@ -338,20 +406,24 @@ function decodeValue(sizeTag, bytes) { */ function decodeValueColumns(columns, colIndex, actorIds, result) { const { columnId, columnName, decoder } = columns[colIndex] - if (columnId % 8 === COLUMN_TYPE.VALUE_LEN && colIndex + 1 < columns.length && - columns[colIndex + 1].columnId === columnId + 1) { + if ( + columnId % 8 === COLUMN_TYPE.VALUE_LEN && + colIndex + 1 < columns.length && + columns[colIndex + 1].columnId === columnId + 1 + ) { const sizeTag = decoder.readValue() const rawValue = columns[colIndex + 1].decoder.readRawBytes(sizeTag >> 4) const { value, datatype } = decodeValue(sizeTag, rawValue) result[columnName] = value - if (datatype) result[columnName + '_datatype'] = datatype + if (datatype) result[columnName + "_datatype"] = datatype return 2 } else if (columnId % 8 === COLUMN_TYPE.ACTOR_ID) { const actorNum = decoder.readValue() if (actorNum === null) { result[columnName] = null } else { - if (!actorIds[actorNum]) throw new RangeError(`No actor index ${actorNum}`) + if (!actorIds[actorNum]) + throw new RangeError(`No actor index ${actorNum}`) result[columnName] = actorIds[actorNum] } } else { @@ -369,29 +441,29 @@ function decodeValueColumns(columns, colIndex, actorIds, result) { */ function encodeOps(ops, forDocument) { const columns = { - objActor : new RLEEncoder('uint'), - objCtr : new RLEEncoder('uint'), - keyActor : new RLEEncoder('uint'), - keyCtr : new DeltaEncoder(), - keyStr : new RLEEncoder('utf8'), - insert : new BooleanEncoder(), - action : new RLEEncoder('uint'), - valLen : new RLEEncoder('uint'), - valRaw : new Encoder(), - chldActor : new RLEEncoder('uint'), - chldCtr : new DeltaEncoder() + objActor: new RLEEncoder("uint"), + objCtr: new RLEEncoder("uint"), + keyActor: new RLEEncoder("uint"), + keyCtr: new DeltaEncoder(), + keyStr: new RLEEncoder("utf8"), + insert: new BooleanEncoder(), + action: new RLEEncoder("uint"), + valLen: new RLEEncoder("uint"), + valRaw: new Encoder(), + chldActor: new RLEEncoder("uint"), + chldCtr: new DeltaEncoder(), } if (forDocument) { - columns.idActor = new RLEEncoder('uint') - columns.idCtr = new DeltaEncoder() - columns.succNum = new RLEEncoder('uint') - columns.succActor = new RLEEncoder('uint') - columns.succCtr = new DeltaEncoder() + columns.idActor = new RLEEncoder("uint") + columns.idCtr = new DeltaEncoder() + columns.succNum = new RLEEncoder("uint") + columns.succActor = new RLEEncoder("uint") + columns.succCtr = new DeltaEncoder() } else { - columns.predNum = new RLEEncoder('uint') - columns.predCtr = new DeltaEncoder() - columns.predActor = new RLEEncoder('uint') + columns.predNum = new RLEEncoder("uint") + columns.predCtr = new DeltaEncoder() + columns.predActor = new RLEEncoder("uint") } for (let op of ops) { @@ -429,17 +501,22 @@ function encodeOps(ops, forDocument) { } let columnList = [] - for (let {columnName, columnId} of forDocument ? DOC_OPS_COLUMNS : CHANGE_COLUMNS) { - if (columns[columnName]) columnList.push({columnId, columnName, encoder: columns[columnName]}) + for (let { columnName, columnId } of forDocument + ? DOC_OPS_COLUMNS + : CHANGE_COLUMNS) { + if (columns[columnName]) + columnList.push({ columnId, columnName, encoder: columns[columnName] }) } return columnList.sort((a, b) => a.columnId - b.columnId) } function validDatatype(value, datatype) { if (datatype === undefined) { - return (typeof value === 'string' || typeof value === 'boolean' || value === null) + return ( + typeof value === "string" || typeof value === "boolean" || value === null + ) } else { - return typeof value === 'number' + return typeof value === "number" } } @@ -447,23 +524,37 @@ function expandMultiOps(ops, startOp, actor) { let opNum = startOp let expandedOps = [] for (const op of ops) { - if (op.action === 'set' && op.values && op.insert) { - if (op.pred.length !== 0) throw new RangeError('multi-insert pred must be empty') + if (op.action === "set" && op.values && op.insert) { + if (op.pred.length !== 0) + throw new RangeError("multi-insert pred must be empty") let lastElemId = op.elemId const datatype = op.datatype for (const value of op.values) { - if (!validDatatype(value, datatype)) throw new RangeError(`Decode failed: bad value/datatype association (${value},${datatype})`) - expandedOps.push({action: 'set', obj: op.obj, elemId: lastElemId, datatype, value, pred: [], insert: true}) + if (!validDatatype(value, datatype)) + throw new RangeError( + `Decode failed: bad value/datatype association (${value},${datatype})` + ) + expandedOps.push({ + action: "set", + obj: op.obj, + elemId: lastElemId, + datatype, + value, + pred: [], + insert: true, + }) lastElemId = `${opNum}@${actor}` opNum += 1 } - } else if (op.action === 'del' && op.multiOp > 1) { - if (op.pred.length !== 1) throw new RangeError('multiOp deletion must have exactly one pred') - const startElemId = parseOpId(op.elemId), startPred = parseOpId(op.pred[0]) + } else if (op.action === "del" && op.multiOp > 1) { + if (op.pred.length !== 1) + throw new RangeError("multiOp deletion must have exactly one pred") + const startElemId = parseOpId(op.elemId), + startPred = parseOpId(op.pred[0]) for (let i = 0; i < op.multiOp; i++) { const elemId = `${startElemId.counter + i}@${startElemId.actorId}` const pred = [`${startPred.counter + i}@${startPred.actorId}`] - expandedOps.push({action: 'del', obj: op.obj, elemId, pred}) + expandedOps.push({ action: "del", obj: op.obj, elemId, pred }) opNum += 1 } } else { @@ -483,26 +574,44 @@ function expandMultiOps(ops, startOp, actor) { function decodeOps(ops, forDocument) { const newOps = [] for (let op of ops) { - const obj = (op.objCtr === null) ? '_root' : `${op.objCtr}@${op.objActor}` - const elemId = op.keyStr ? undefined : (op.keyCtr === 0 ? '_head' : `${op.keyCtr}@${op.keyActor}`) + const obj = op.objCtr === null ? "_root" : `${op.objCtr}@${op.objActor}` + const elemId = op.keyStr + ? undefined + : op.keyCtr === 0 + ? "_head" + : `${op.keyCtr}@${op.keyActor}` const action = ACTIONS[op.action] || op.action - const newOp = elemId ? {obj, elemId, action} : {obj, key: op.keyStr, action} + const newOp = elemId + ? { obj, elemId, action } + : { obj, key: op.keyStr, action } newOp.insert = !!op.insert - if (ACTIONS[op.action] === 'set' || ACTIONS[op.action] === 'inc') { + if (ACTIONS[op.action] === "set" || ACTIONS[op.action] === "inc") { newOp.value = op.valLen if (op.valLen_datatype) newOp.datatype = op.valLen_datatype } if (!!op.chldCtr !== !!op.chldActor) { - throw new RangeError(`Mismatched child columns: ${op.chldCtr} and ${op.chldActor}`) + throw new RangeError( + `Mismatched child columns: ${op.chldCtr} and ${op.chldActor}` + ) } if (op.chldCtr !== null) newOp.child = `${op.chldCtr}@${op.chldActor}` if (forDocument) { newOp.id = `${op.idCtr}@${op.idActor}` newOp.succ = op.succNum.map(succ => `${succ.succCtr}@${succ.succActor}`) - checkSortedOpIds(op.succNum.map(succ => ({counter: succ.succCtr, actorId: succ.succActor}))) + checkSortedOpIds( + op.succNum.map(succ => ({ + counter: succ.succCtr, + actorId: succ.succActor, + })) + ) } else { newOp.pred = op.predNum.map(pred => `${pred.predCtr}@${pred.predActor}`) - checkSortedOpIds(op.predNum.map(pred => ({counter: pred.predCtr, actorId: pred.predActor}))) + checkSortedOpIds( + op.predNum.map(pred => ({ + counter: pred.predCtr, + actorId: pred.predActor, + })) + ) } newOps.push(newOp) } @@ -516,7 +625,7 @@ function checkSortedOpIds(opIds) { let last = null for (let opId of opIds) { if (last && compareParsedOpIds(last, opId) !== -1) { - throw new RangeError('operation IDs are not in ascending order') + throw new RangeError("operation IDs are not in ascending order") } last = opId } @@ -528,11 +637,11 @@ function encoderByColumnId(columnId) { } else if ((columnId & 7) === COLUMN_TYPE.BOOLEAN) { return new BooleanEncoder() } else if ((columnId & 7) === COLUMN_TYPE.STRING_RLE) { - return new RLEEncoder('utf8') + return new RLEEncoder("utf8") } else if ((columnId & 7) === COLUMN_TYPE.VALUE_RAW) { return new Encoder() } else { - return new RLEEncoder('uint') + return new RLEEncoder("uint") } } @@ -542,31 +651,49 @@ function decoderByColumnId(columnId, buffer) { } else if ((columnId & 7) === COLUMN_TYPE.BOOLEAN) { return new BooleanDecoder(buffer) } else if ((columnId & 7) === COLUMN_TYPE.STRING_RLE) { - return new RLEDecoder('utf8', buffer) + return new RLEDecoder("utf8", buffer) } else if ((columnId & 7) === COLUMN_TYPE.VALUE_RAW) { return new Decoder(buffer) } else { - return new RLEDecoder('uint', buffer) + return new RLEDecoder("uint", buffer) } } function makeDecoders(columns, columnSpec) { const emptyBuf = new Uint8Array(0) - let decoders = [], columnIndex = 0, specIndex = 0 + let decoders = [], + columnIndex = 0, + specIndex = 0 while (columnIndex < columns.length || specIndex < columnSpec.length) { - if (columnIndex === columns.length || - (specIndex < columnSpec.length && columnSpec[specIndex].columnId < columns[columnIndex].columnId)) { - const {columnId, columnName} = columnSpec[specIndex] - decoders.push({columnId, columnName, decoder: decoderByColumnId(columnId, emptyBuf)}) + if ( + columnIndex === columns.length || + (specIndex < columnSpec.length && + columnSpec[specIndex].columnId < columns[columnIndex].columnId) + ) { + const { columnId, columnName } = columnSpec[specIndex] + decoders.push({ + columnId, + columnName, + decoder: decoderByColumnId(columnId, emptyBuf), + }) specIndex++ - } else if (specIndex === columnSpec.length || columns[columnIndex].columnId < columnSpec[specIndex].columnId) { - const {columnId, buffer} = columns[columnIndex] - decoders.push({columnId, decoder: decoderByColumnId(columnId, buffer)}) + } else if ( + specIndex === columnSpec.length || + columns[columnIndex].columnId < columnSpec[specIndex].columnId + ) { + const { columnId, buffer } = columns[columnIndex] + decoders.push({ columnId, decoder: decoderByColumnId(columnId, buffer) }) columnIndex++ - } else { // columns[columnIndex].columnId === columnSpec[specIndex].columnId - const {columnId, buffer} = columns[columnIndex], {columnName} = columnSpec[specIndex] - decoders.push({columnId, columnName, decoder: decoderByColumnId(columnId, buffer)}) + } else { + // columns[columnIndex].columnId === columnSpec[specIndex].columnId + const { columnId, buffer } = columns[columnIndex], + { columnName } = columnSpec[specIndex] + decoders.push({ + columnId, + columnName, + decoder: decoderByColumnId(columnId, buffer), + }) columnIndex++ specIndex++ } @@ -578,16 +705,22 @@ function decodeColumns(columns, actorIds, columnSpec) { columns = makeDecoders(columns, columnSpec) let parsedRows = [] while (columns.some(col => !col.decoder.done)) { - let row = {}, col = 0 + let row = {}, + col = 0 while (col < columns.length) { const columnId = columns[col].columnId - let groupId = columnId >> 4, groupCols = 1 - while (col + groupCols < columns.length && columns[col + groupCols].columnId >> 4 === groupId) { + let groupId = columnId >> 4, + groupCols = 1 + while ( + col + groupCols < columns.length && + columns[col + groupCols].columnId >> 4 === groupId + ) { groupCols++ } if (columnId % 8 === COLUMN_TYPE.GROUP_CARD) { - const values = [], count = columns[col].decoder.readValue() + const values = [], + count = columns[col].decoder.readValue() for (let i = 0; i < count; i++) { let value = {} for (let colOffset = 1; colOffset < groupCols; colOffset++) { @@ -611,20 +744,25 @@ function decodeColumnInfo(decoder) { // deflate-compressed. We ignore this bit when checking whether columns are sorted by ID. const COLUMN_ID_MASK = (-1 ^ COLUMN_TYPE_DEFLATE) >>> 0 - let lastColumnId = -1, columns = [], numColumns = decoder.readUint53() + let lastColumnId = -1, + columns = [], + numColumns = decoder.readUint53() for (let i = 0; i < numColumns; i++) { - const columnId = decoder.readUint53(), bufferLen = decoder.readUint53() + const columnId = decoder.readUint53(), + bufferLen = decoder.readUint53() if ((columnId & COLUMN_ID_MASK) <= (lastColumnId & COLUMN_ID_MASK)) { - throw new RangeError('Columns must be in ascending order') + throw new RangeError("Columns must be in ascending order") } lastColumnId = columnId - columns.push({columnId, bufferLen}) + columns.push({ columnId, bufferLen }) } return columns } function encodeColumnInfo(encoder, columns) { - const nonEmptyColumns = columns.filter(column => column.encoder.buffer.byteLength > 0) + const nonEmptyColumns = columns.filter( + column => column.encoder.buffer.byteLength > 0 + ) encoder.appendUint53(nonEmptyColumns.length) for (let column of nonEmptyColumns) { encoder.appendUint53(column.columnId) @@ -633,19 +771,21 @@ function encodeColumnInfo(encoder, columns) { } function decodeChangeHeader(decoder) { - const numDeps = decoder.readUint53(), deps = [] + const numDeps = decoder.readUint53(), + deps = [] for (let i = 0; i < numDeps; i++) { deps.push(bytesToHexString(decoder.readRawBytes(32))) } let change = { - actor: decoder.readHexString(), - seq: decoder.readUint53(), + actor: decoder.readHexString(), + seq: decoder.readUint53(), startOp: decoder.readUint53(), - time: decoder.readInt53(), + time: decoder.readInt53(), message: decoder.readPrefixedString(), - deps + deps, } - const actorIds = [change.actor], numActorIds = decoder.readUint53() + const actorIds = [change.actor], + numActorIds = decoder.readUint53() for (let i = 0; i < numActorIds; i++) actorIds.push(decoder.readHexString()) change.actorIds = actorIds return change @@ -676,31 +816,47 @@ function encodeContainer(chunkType, encodeContentsCallback) { const sha256 = new Hash() sha256.update(headerBuf) sha256.update(bodyBuf.subarray(HEADER_SPACE)) - const hash = sha256.digest(), checksum = hash.subarray(0, CHECKSUM_SIZE) + const hash = sha256.digest(), + checksum = hash.subarray(0, CHECKSUM_SIZE) // Copy header into the body buffer so that they are contiguous - bodyBuf.set(MAGIC_BYTES, HEADER_SPACE - headerBuf.byteLength - CHECKSUM_SIZE - MAGIC_BYTES.byteLength) - bodyBuf.set(checksum, HEADER_SPACE - headerBuf.byteLength - CHECKSUM_SIZE) - bodyBuf.set(headerBuf, HEADER_SPACE - headerBuf.byteLength) - return {hash, bytes: bodyBuf.subarray(HEADER_SPACE - headerBuf.byteLength - CHECKSUM_SIZE - MAGIC_BYTES.byteLength)} + bodyBuf.set( + MAGIC_BYTES, + HEADER_SPACE - headerBuf.byteLength - CHECKSUM_SIZE - MAGIC_BYTES.byteLength + ) + bodyBuf.set(checksum, HEADER_SPACE - headerBuf.byteLength - CHECKSUM_SIZE) + bodyBuf.set(headerBuf, HEADER_SPACE - headerBuf.byteLength) + return { + hash, + bytes: bodyBuf.subarray( + HEADER_SPACE - + headerBuf.byteLength - + CHECKSUM_SIZE - + MAGIC_BYTES.byteLength + ), + } } function decodeContainerHeader(decoder, computeHash) { if (!equalBytes(decoder.readRawBytes(MAGIC_BYTES.byteLength), MAGIC_BYTES)) { - throw new RangeError('Data does not begin with magic bytes 85 6f 4a 83') + throw new RangeError("Data does not begin with magic bytes 85 6f 4a 83") } const expectedHash = decoder.readRawBytes(4) const hashStartOffset = decoder.offset const chunkType = decoder.readByte() const chunkLength = decoder.readUint53() - const header = {chunkType, chunkLength, chunkData: decoder.readRawBytes(chunkLength)} + const header = { + chunkType, + chunkLength, + chunkData: decoder.readRawBytes(chunkLength), + } if (computeHash) { const sha256 = new Hash() sha256.update(decoder.buf.subarray(hashStartOffset, decoder.offset)) const binaryHash = sha256.digest() if (!equalBytes(binaryHash.subarray(0, 4), expectedHash)) { - throw new RangeError('checksum does not match data') + throw new RangeError("checksum does not match data") } header.hash = bytesToHexString(binaryHash) } @@ -712,7 +868,7 @@ function encodeChange(changeObj) { const change = changes[0] const { hash, bytes } = encodeContainer(CHUNK_TYPE_CHANGE, encoder => { - if (!Array.isArray(change.deps)) throw new TypeError('deps is not an array') + if (!Array.isArray(change.deps)) throw new TypeError("deps is not an array") encoder.appendUint53(change.deps.length) for (let hash of change.deps.slice().sort()) { encoder.appendRawBytes(hexStringToBytes(hash)) @@ -721,7 +877,7 @@ function encodeChange(changeObj) { encoder.appendUint53(change.seq) encoder.appendUint53(change.startOp) encoder.appendInt53(change.time) - encoder.appendPrefixedString(change.message || '') + encoder.appendPrefixedString(change.message || "") encoder.appendUint53(actorIds.length - 1) for (let actor of actorIds.slice(1)) encoder.appendHexString(actor) @@ -733,9 +889,11 @@ function encodeChange(changeObj) { const hexHash = bytesToHexString(hash) if (changeObj.hash && changeObj.hash !== hexHash) { - throw new RangeError(`Change hash does not match encoding: ${changeObj.hash} != ${hexHash}`) + throw new RangeError( + `Change hash does not match encoding: ${changeObj.hash} != ${hexHash}` + ) } - return (bytes.byteLength >= DEFLATE_MIN_SIZE) ? deflateChange(bytes) : bytes + return bytes.byteLength >= DEFLATE_MIN_SIZE ? deflateChange(bytes) : bytes } function decodeChangeColumns(buffer) { @@ -743,14 +901,15 @@ function decodeChangeColumns(buffer) { const decoder = new Decoder(buffer) const header = decodeContainerHeader(decoder, true) const chunkDecoder = new Decoder(header.chunkData) - if (!decoder.done) throw new RangeError('Encoded change has trailing data') - if (header.chunkType !== CHUNK_TYPE_CHANGE) throw new RangeError(`Unexpected chunk type: ${header.chunkType}`) + if (!decoder.done) throw new RangeError("Encoded change has trailing data") + if (header.chunkType !== CHUNK_TYPE_CHANGE) + throw new RangeError(`Unexpected chunk type: ${header.chunkType}`) const change = decodeChangeHeader(chunkDecoder) const columns = decodeColumnInfo(chunkDecoder) for (let i = 0; i < columns.length; i++) { if ((columns[i].columnId & COLUMN_TYPE_DEFLATE) !== 0) { - throw new RangeError('change must not contain deflated columns') + throw new RangeError("change must not contain deflated columns") } columns[i].buffer = chunkDecoder.readRawBytes(columns[i].bufferLen) } @@ -769,7 +928,10 @@ function decodeChangeColumns(buffer) { */ function decodeChange(buffer) { const change = decodeChangeColumns(buffer) - change.ops = decodeOps(decodeColumns(change.columns, change.actorIds, CHANGE_COLUMNS), false) + change.ops = decodeOps( + decodeColumns(change.columns, change.actorIds, CHANGE_COLUMNS), + false + ) delete change.actorIds delete change.columns return change @@ -784,7 +946,7 @@ function decodeChangeMeta(buffer, computeHash) { if (buffer[8] === CHUNK_TYPE_DEFLATE) buffer = inflateChange(buffer) const header = decodeContainerHeader(new Decoder(buffer), computeHash) if (header.chunkType !== CHUNK_TYPE_CHANGE) { - throw new RangeError('Buffer chunk type is not a change') + throw new RangeError("Buffer chunk type is not a change") } const meta = decodeChangeHeader(new Decoder(header.chunkData)) meta.change = buffer @@ -797,7 +959,8 @@ function decodeChangeMeta(buffer, computeHash) { */ function deflateChange(buffer) { const header = decodeContainerHeader(new Decoder(buffer), false) - if (header.chunkType !== CHUNK_TYPE_CHANGE) throw new RangeError(`Unexpected chunk type: ${header.chunkType}`) + if (header.chunkType !== CHUNK_TYPE_CHANGE) + throw new RangeError(`Unexpected chunk type: ${header.chunkType}`) const compressed = pako.deflateRaw(header.chunkData) const encoder = new Encoder() encoder.appendRawBytes(buffer.subarray(0, 8)) // copy MAGIC_BYTES and checksum @@ -812,7 +975,8 @@ function deflateChange(buffer) { */ function inflateChange(buffer) { const header = decodeContainerHeader(new Decoder(buffer), false) - if (header.chunkType !== CHUNK_TYPE_DEFLATE) throw new RangeError(`Unexpected chunk type: ${header.chunkType}`) + if (header.chunkType !== CHUNK_TYPE_DEFLATE) + throw new RangeError(`Unexpected chunk type: ${header.chunkType}`) const decompressed = pako.inflateRaw(header.chunkData) const encoder = new Encoder() encoder.appendRawBytes(buffer.subarray(0, 8)) // copy MAGIC_BYTES and checksum @@ -827,7 +991,9 @@ function inflateChange(buffer) { * returns an array of subarrays, each subarray containing one change. */ function splitContainers(buffer) { - let decoder = new Decoder(buffer), chunks = [], startOffset = 0 + let decoder = new Decoder(buffer), + chunks = [], + startOffset = 0 while (!decoder.done) { decodeContainerHeader(decoder, false) chunks.push(buffer.subarray(startOffset, decoder.offset)) @@ -846,7 +1012,10 @@ function decodeChanges(binaryChanges) { for (let chunk of splitContainers(binaryChange)) { if (chunk[8] === CHUNK_TYPE_DOCUMENT) { decoded = decoded.concat(decodeDocument(chunk)) - } else if (chunk[8] === CHUNK_TYPE_CHANGE || chunk[8] === CHUNK_TYPE_DEFLATE) { + } else if ( + chunk[8] === CHUNK_TYPE_CHANGE || + chunk[8] === CHUNK_TYPE_DEFLATE + ) { decoded.push(decodeChange(chunk)) } else { // ignoring chunk of unknown type @@ -858,9 +1027,10 @@ function decodeChanges(binaryChanges) { function sortOpIds(a, b) { if (a === b) return 0 - if (a === '_root') return -1 - if (b === '_root') return +1 - const a_ = parseOpId(a), b_ = parseOpId(b) + if (a === "_root") return -1 + if (b === "_root") return +1 + const a_ = parseOpId(a), + b_ = parseOpId(b) if (a_.counter < b_.counter) return -1 if (a_.counter > b_.counter) return +1 if (a_.actorId < b_.actorId) return -1 @@ -879,26 +1049,46 @@ function groupChangeOps(changes, ops) { change.ops = [] if (!changesByActor[change.actor]) changesByActor[change.actor] = [] if (change.seq !== changesByActor[change.actor].length + 1) { - throw new RangeError(`Expected seq = ${changesByActor[change.actor].length + 1}, got ${change.seq}`) + throw new RangeError( + `Expected seq = ${changesByActor[change.actor].length + 1}, got ${ + change.seq + }` + ) } - if (change.seq > 1 && changesByActor[change.actor][change.seq - 2].maxOp > change.maxOp) { - throw new RangeError('maxOp must increase monotonically per actor') + if ( + change.seq > 1 && + changesByActor[change.actor][change.seq - 2].maxOp > change.maxOp + ) { + throw new RangeError("maxOp must increase monotonically per actor") } changesByActor[change.actor].push(change) } let opsById = {} for (let op of ops) { - if (op.action === 'del') throw new RangeError('document should not contain del operations') + if (op.action === "del") + throw new RangeError("document should not contain del operations") op.pred = opsById[op.id] ? opsById[op.id].pred : [] opsById[op.id] = op for (let succ of op.succ) { if (!opsById[succ]) { if (op.elemId) { const elemId = op.insert ? op.id : op.elemId - opsById[succ] = {id: succ, action: 'del', obj: op.obj, elemId, pred: []} + opsById[succ] = { + id: succ, + action: "del", + obj: op.obj, + elemId, + pred: [], + } } else { - opsById[succ] = {id: succ, action: 'del', obj: op.obj, key: op.key, pred: []} + opsById[succ] = { + id: succ, + action: "del", + obj: op.obj, + key: op.key, + pred: [], + } } } opsById[succ].pred.push(op.id) @@ -906,14 +1096,15 @@ function groupChangeOps(changes, ops) { delete op.succ } for (let op of Object.values(opsById)) { - if (op.action === 'del') ops.push(op) + if (op.action === "del") ops.push(op) } for (let op of ops) { const { counter, actorId } = parseOpId(op.id) const actorChanges = changesByActor[actorId] // Binary search to find the change that should contain this operation - let left = 0, right = actorChanges.length + let left = 0, + right = actorChanges.length while (left < right) { const index = Math.floor((left + right) / 2) if (actorChanges[index].maxOp < counter) { @@ -933,7 +1124,8 @@ function groupChangeOps(changes, ops) { change.startOp = change.maxOp - change.ops.length + 1 delete change.maxOp for (let i = 0; i < change.ops.length; i++) { - const op = change.ops[i], expectedId = `${change.startOp + i}@${change.actor}` + const op = change.ops[i], + expectedId = `${change.startOp + i}@${change.actor}` if (op.id !== expectedId) { throw new RangeError(`Expected opId ${expectedId}, got ${op.id}`) } @@ -949,7 +1141,9 @@ function decodeDocumentChanges(changes, expectedHeads) { change.deps = [] for (let index of change.depsNum.map(d => d.depsIndex)) { if (!changes[index] || !changes[index].hash) { - throw new RangeError(`No hash for index ${index} while processing index ${i}`) + throw new RangeError( + `No hash for index ${index} while processing index ${i}` + ) } const hash = changes[index].hash change.deps.push(hash) @@ -970,18 +1164,30 @@ function decodeDocumentChanges(changes, expectedHeads) { } const actualHeads = Object.keys(heads).sort() - let headsEqual = (actualHeads.length === expectedHeads.length), i = 0 + let headsEqual = actualHeads.length === expectedHeads.length, + i = 0 while (headsEqual && i < actualHeads.length) { - headsEqual = (actualHeads[i] === expectedHeads[i]) + headsEqual = actualHeads[i] === expectedHeads[i] i++ } if (!headsEqual) { - throw new RangeError(`Mismatched heads hashes: expected ${expectedHeads.join(', ')}, got ${actualHeads.join(', ')}`) + throw new RangeError( + `Mismatched heads hashes: expected ${expectedHeads.join( + ", " + )}, got ${actualHeads.join(", ")}` + ) } } function encodeDocumentHeader(doc) { - const { changesColumns, opsColumns, actorIds, heads, headsIndexes, extraBytes } = doc + const { + changesColumns, + opsColumns, + actorIds, + heads, + headsIndexes, + extraBytes, + } = doc for (let column of changesColumns) deflateColumn(column) for (let column of opsColumns) deflateColumn(column) @@ -996,7 +1202,8 @@ function encodeDocumentHeader(doc) { } encodeColumnInfo(encoder, changesColumns) encodeColumnInfo(encoder, opsColumns) - for (let column of changesColumns) encoder.appendRawBytes(column.encoder.buffer) + for (let column of changesColumns) + encoder.appendRawBytes(column.encoder.buffer) for (let column of opsColumns) encoder.appendRawBytes(column.encoder.buffer) for (let index of headsIndexes) encoder.appendUint53(index) if (extraBytes) encoder.appendRawBytes(extraBytes) @@ -1007,14 +1214,19 @@ function decodeDocumentHeader(buffer) { const documentDecoder = new Decoder(buffer) const header = decodeContainerHeader(documentDecoder, true) const decoder = new Decoder(header.chunkData) - if (!documentDecoder.done) throw new RangeError('Encoded document has trailing data') - if (header.chunkType !== CHUNK_TYPE_DOCUMENT) throw new RangeError(`Unexpected chunk type: ${header.chunkType}`) + if (!documentDecoder.done) + throw new RangeError("Encoded document has trailing data") + if (header.chunkType !== CHUNK_TYPE_DOCUMENT) + throw new RangeError(`Unexpected chunk type: ${header.chunkType}`) - const actorIds = [], numActors = decoder.readUint53() + const actorIds = [], + numActors = decoder.readUint53() for (let i = 0; i < numActors; i++) { actorIds.push(decoder.readHexString()) } - const heads = [], headsIndexes = [], numHeads = decoder.readUint53() + const heads = [], + headsIndexes = [], + numHeads = decoder.readUint53() for (let i = 0; i < numHeads; i++) { heads.push(bytesToHexString(decoder.readRawBytes(32))) } @@ -1033,14 +1245,27 @@ function decodeDocumentHeader(buffer) { for (let i = 0; i < numHeads; i++) headsIndexes.push(decoder.readUint53()) } - const extraBytes = decoder.readRawBytes(decoder.buf.byteLength - decoder.offset) - return { changesColumns, opsColumns, actorIds, heads, headsIndexes, extraBytes } + const extraBytes = decoder.readRawBytes( + decoder.buf.byteLength - decoder.offset + ) + return { + changesColumns, + opsColumns, + actorIds, + heads, + headsIndexes, + extraBytes, + } } function decodeDocument(buffer) { - const { changesColumns, opsColumns, actorIds, heads } = decodeDocumentHeader(buffer) + const { changesColumns, opsColumns, actorIds, heads } = + decodeDocumentHeader(buffer) const changes = decodeColumns(changesColumns, actorIds, DOCUMENT_COLUMNS) - const ops = decodeOps(decodeColumns(opsColumns, actorIds, DOC_OPS_COLUMNS), true) + const ops = decodeOps( + decodeColumns(opsColumns, actorIds, DOC_OPS_COLUMNS), + true + ) groupChangeOps(changes, ops) decodeDocumentChanges(changes, heads) return changes @@ -1051,7 +1276,7 @@ function decodeDocument(buffer) { */ function deflateColumn(column) { if (column.encoder.buffer.byteLength >= DEFLATE_MIN_SIZE) { - column.encoder = {buffer: pako.deflateRaw(column.encoder.buffer)} + column.encoder = { buffer: pako.deflateRaw(column.encoder.buffer) } column.columnId |= COLUMN_TYPE_DEFLATE } } @@ -1067,8 +1292,24 @@ function inflateColumn(column) { } module.exports = { - COLUMN_TYPE, VALUE_TYPE, ACTIONS, OBJECT_TYPE, DOC_OPS_COLUMNS, CHANGE_COLUMNS, DOCUMENT_COLUMNS, - encoderByColumnId, decoderByColumnId, makeDecoders, decodeValue, - splitContainers, encodeChange, decodeChangeColumns, decodeChange, decodeChangeMeta, decodeChanges, - encodeDocumentHeader, decodeDocumentHeader, decodeDocument + COLUMN_TYPE, + VALUE_TYPE, + ACTIONS, + OBJECT_TYPE, + DOC_OPS_COLUMNS, + CHANGE_COLUMNS, + DOCUMENT_COLUMNS, + encoderByColumnId, + decoderByColumnId, + makeDecoders, + decodeValue, + splitContainers, + encodeChange, + decodeChangeColumns, + decodeChange, + decodeChangeMeta, + decodeChanges, + encodeDocumentHeader, + decodeDocumentHeader, + decodeDocument, } diff --git a/javascript/test/legacy/common.js b/javascript/test/legacy/common.js index 02e91392..7668e982 100644 --- a/javascript/test/legacy/common.js +++ b/javascript/test/legacy/common.js @@ -1,5 +1,5 @@ function isObject(obj) { - return typeof obj === 'object' && obj !== null + return typeof obj === "object" && obj !== null } /** @@ -20,11 +20,11 @@ function copyObject(obj) { * with an actor ID, separated by an `@` sign) and returns an object `{counter, actorId}`. */ function parseOpId(opId) { - const match = /^(\d+)@(.*)$/.exec(opId || '') + const match = /^(\d+)@(.*)$/.exec(opId || "") if (!match) { throw new RangeError(`Not a valid opId: ${opId}`) } - return {counter: parseInt(match[1], 10), actorId: match[2]} + return { counter: parseInt(match[1], 10), actorId: match[2] } } /** @@ -32,7 +32,7 @@ function parseOpId(opId) { */ function equalBytes(array1, array2) { if (!(array1 instanceof Uint8Array) || !(array2 instanceof Uint8Array)) { - throw new TypeError('equalBytes can only compare Uint8Arrays') + throw new TypeError("equalBytes can only compare Uint8Arrays") } if (array1.byteLength !== array2.byteLength) return false for (let i = 0; i < array1.byteLength; i++) { @@ -51,5 +51,9 @@ function createArrayOfNulls(length) { } module.exports = { - isObject, copyObject, parseOpId, equalBytes, createArrayOfNulls + isObject, + copyObject, + parseOpId, + equalBytes, + createArrayOfNulls, } diff --git a/javascript/test/legacy/encoding.js b/javascript/test/legacy/encoding.js index 92b62df6..f7650faf 100644 --- a/javascript/test/legacy/encoding.js +++ b/javascript/test/legacy/encoding.js @@ -6,7 +6,7 @@ * https://github.com/anonyco/FastestSmallestTextEncoderDecoder */ const utf8encoder = new TextEncoder() -const utf8decoder = new TextDecoder('utf-8') +const utf8decoder = new TextDecoder("utf-8") function stringToUtf8(string) { return utf8encoder.encode(string) @@ -20,30 +20,48 @@ function utf8ToString(buffer) { * Converts a string consisting of hexadecimal digits into an Uint8Array. */ function hexStringToBytes(value) { - if (typeof value !== 'string') { - throw new TypeError('value is not a string') + if (typeof value !== "string") { + throw new TypeError("value is not a string") } if (!/^([0-9a-f][0-9a-f])*$/.test(value)) { - throw new RangeError('value is not hexadecimal') + throw new RangeError("value is not hexadecimal") } - if (value === '') { + if (value === "") { return new Uint8Array(0) } else { return new Uint8Array(value.match(/../g).map(b => parseInt(b, 16))) } } -const NIBBLE_TO_HEX = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f'] +const NIBBLE_TO_HEX = [ + "0", + "1", + "2", + "3", + "4", + "5", + "6", + "7", + "8", + "9", + "a", + "b", + "c", + "d", + "e", + "f", +] const BYTE_TO_HEX = new Array(256) for (let i = 0; i < 256; i++) { - BYTE_TO_HEX[i] = `${NIBBLE_TO_HEX[(i >>> 4) & 0xf]}${NIBBLE_TO_HEX[i & 0xf]}`; + BYTE_TO_HEX[i] = `${NIBBLE_TO_HEX[(i >>> 4) & 0xf]}${NIBBLE_TO_HEX[i & 0xf]}` } /** * Converts a Uint8Array into the equivalent hexadecimal string. */ function bytesToHexString(bytes) { - let hex = '', len = bytes.byteLength + let hex = "", + len = bytes.byteLength for (let i = 0; i < len; i++) { hex += BYTE_TO_HEX[bytes[i]] } @@ -95,14 +113,17 @@ class Encoder { * appends it to the buffer. Returns the number of bytes written. */ appendUint32(value) { - if (!Number.isInteger(value)) throw new RangeError('value is not an integer') - if (value < 0 || value > 0xffffffff) throw new RangeError('number out of range') + if (!Number.isInteger(value)) + throw new RangeError("value is not an integer") + if (value < 0 || value > 0xffffffff) + throw new RangeError("number out of range") const numBytes = Math.max(1, Math.ceil((32 - Math.clz32(value)) / 7)) if (this.offset + numBytes > this.buf.byteLength) this.grow() for (let i = 0; i < numBytes; i++) { - this.buf[this.offset + i] = (value & 0x7f) | (i === numBytes - 1 ? 0x00 : 0x80) + this.buf[this.offset + i] = + (value & 0x7f) | (i === numBytes - 1 ? 0x00 : 0x80) value >>>= 7 // zero-filling right shift } this.offset += numBytes @@ -115,14 +136,19 @@ class Encoder { * it to the buffer. Returns the number of bytes written. */ appendInt32(value) { - if (!Number.isInteger(value)) throw new RangeError('value is not an integer') - if (value < -0x80000000 || value > 0x7fffffff) throw new RangeError('number out of range') + if (!Number.isInteger(value)) + throw new RangeError("value is not an integer") + if (value < -0x80000000 || value > 0x7fffffff) + throw new RangeError("number out of range") - const numBytes = Math.ceil((33 - Math.clz32(value >= 0 ? value : -value - 1)) / 7) + const numBytes = Math.ceil( + (33 - Math.clz32(value >= 0 ? value : -value - 1)) / 7 + ) if (this.offset + numBytes > this.buf.byteLength) this.grow() for (let i = 0; i < numBytes; i++) { - this.buf[this.offset + i] = (value & 0x7f) | (i === numBytes - 1 ? 0x00 : 0x80) + this.buf[this.offset + i] = + (value & 0x7f) | (i === numBytes - 1 ? 0x00 : 0x80) value >>= 7 // sign-propagating right shift } this.offset += numBytes @@ -135,9 +161,10 @@ class Encoder { * (53 bits). */ appendUint53(value) { - if (!Number.isInteger(value)) throw new RangeError('value is not an integer') + if (!Number.isInteger(value)) + throw new RangeError("value is not an integer") if (value < 0 || value > Number.MAX_SAFE_INTEGER) { - throw new RangeError('number out of range') + throw new RangeError("number out of range") } const high32 = Math.floor(value / 0x100000000) const low32 = (value & 0xffffffff) >>> 0 // right shift to interpret as unsigned @@ -150,9 +177,10 @@ class Encoder { * (53 bits). */ appendInt53(value) { - if (!Number.isInteger(value)) throw new RangeError('value is not an integer') + if (!Number.isInteger(value)) + throw new RangeError("value is not an integer") if (value < Number.MIN_SAFE_INTEGER || value > Number.MAX_SAFE_INTEGER) { - throw new RangeError('number out of range') + throw new RangeError("number out of range") } const high32 = Math.floor(value / 0x100000000) const low32 = (value & 0xffffffff) >>> 0 // right shift to interpret as unsigned @@ -167,10 +195,10 @@ class Encoder { */ appendUint64(high32, low32) { if (!Number.isInteger(high32) || !Number.isInteger(low32)) { - throw new RangeError('value is not an integer') + throw new RangeError("value is not an integer") } if (high32 < 0 || high32 > 0xffffffff || low32 < 0 || low32 > 0xffffffff) { - throw new RangeError('number out of range') + throw new RangeError("number out of range") } if (high32 === 0) return this.appendUint32(low32) @@ -180,10 +208,12 @@ class Encoder { this.buf[this.offset + i] = (low32 & 0x7f) | 0x80 low32 >>>= 7 // zero-filling right shift } - this.buf[this.offset + 4] = (low32 & 0x0f) | ((high32 & 0x07) << 4) | (numBytes === 5 ? 0x00 : 0x80) + this.buf[this.offset + 4] = + (low32 & 0x0f) | ((high32 & 0x07) << 4) | (numBytes === 5 ? 0x00 : 0x80) high32 >>>= 3 for (let i = 5; i < numBytes; i++) { - this.buf[this.offset + i] = (high32 & 0x7f) | (i === numBytes - 1 ? 0x00 : 0x80) + this.buf[this.offset + i] = + (high32 & 0x7f) | (i === numBytes - 1 ? 0x00 : 0x80) high32 >>>= 7 } this.offset += numBytes @@ -200,25 +230,35 @@ class Encoder { */ appendInt64(high32, low32) { if (!Number.isInteger(high32) || !Number.isInteger(low32)) { - throw new RangeError('value is not an integer') + throw new RangeError("value is not an integer") } - if (high32 < -0x80000000 || high32 > 0x7fffffff || low32 < -0x80000000 || low32 > 0xffffffff) { - throw new RangeError('number out of range') + if ( + high32 < -0x80000000 || + high32 > 0x7fffffff || + low32 < -0x80000000 || + low32 > 0xffffffff + ) { + throw new RangeError("number out of range") } low32 >>>= 0 // interpret as unsigned if (high32 === 0 && low32 <= 0x7fffffff) return this.appendInt32(low32) - if (high32 === -1 && low32 >= 0x80000000) return this.appendInt32(low32 - 0x100000000) + if (high32 === -1 && low32 >= 0x80000000) + return this.appendInt32(low32 - 0x100000000) - const numBytes = Math.ceil((65 - Math.clz32(high32 >= 0 ? high32 : -high32 - 1)) / 7) + const numBytes = Math.ceil( + (65 - Math.clz32(high32 >= 0 ? high32 : -high32 - 1)) / 7 + ) if (this.offset + numBytes > this.buf.byteLength) this.grow() for (let i = 0; i < 4; i++) { this.buf[this.offset + i] = (low32 & 0x7f) | 0x80 low32 >>>= 7 // zero-filling right shift } - this.buf[this.offset + 4] = (low32 & 0x0f) | ((high32 & 0x07) << 4) | (numBytes === 5 ? 0x00 : 0x80) + this.buf[this.offset + 4] = + (low32 & 0x0f) | ((high32 & 0x07) << 4) | (numBytes === 5 ? 0x00 : 0x80) high32 >>= 3 // sign-propagating right shift for (let i = 5; i < numBytes; i++) { - this.buf[this.offset + i] = (high32 & 0x7f) | (i === numBytes - 1 ? 0x00 : 0x80) + this.buf[this.offset + i] = + (high32 & 0x7f) | (i === numBytes - 1 ? 0x00 : 0x80) high32 >>= 7 } this.offset += numBytes @@ -243,7 +283,7 @@ class Encoder { * number of bytes appended. */ appendRawString(value) { - if (typeof value !== 'string') throw new TypeError('value is not a string') + if (typeof value !== "string") throw new TypeError("value is not a string") return this.appendRawBytes(stringToUtf8(value)) } @@ -262,7 +302,7 @@ class Encoder { * (where the length is encoded as an unsigned LEB128 integer). */ appendPrefixedString(value) { - if (typeof value !== 'string') throw new TypeError('value is not a string') + if (typeof value !== "string") throw new TypeError("value is not a string") this.appendPrefixedBytes(stringToUtf8(value)) return this } @@ -281,8 +321,7 @@ class Encoder { * Flushes any unwritten data to the buffer. Call this before reading from * the buffer constructed by this Encoder. */ - finish() { - } + finish() {} } /** @@ -321,7 +360,7 @@ class Decoder { */ skip(bytes) { if (this.offset + bytes > this.buf.byteLength) { - throw new RangeError('cannot skip beyond end of buffer') + throw new RangeError("cannot skip beyond end of buffer") } this.offset += bytes } @@ -339,18 +378,20 @@ class Decoder { * Throws an exception if the value doesn't fit in a 32-bit unsigned int. */ readUint32() { - let result = 0, shift = 0 + let result = 0, + shift = 0 while (this.offset < this.buf.byteLength) { const nextByte = this.buf[this.offset] - if (shift === 28 && (nextByte & 0xf0) !== 0) { // more than 5 bytes, or value > 0xffffffff - throw new RangeError('number out of range') + if (shift === 28 && (nextByte & 0xf0) !== 0) { + // more than 5 bytes, or value > 0xffffffff + throw new RangeError("number out of range") } - result = (result | (nextByte & 0x7f) << shift) >>> 0 // right shift to interpret value as unsigned + result = (result | ((nextByte & 0x7f) << shift)) >>> 0 // right shift to interpret value as unsigned shift += 7 this.offset++ if ((nextByte & 0x80) === 0) return result } - throw new RangeError('buffer ended with incomplete number') + throw new RangeError("buffer ended with incomplete number") } /** @@ -358,13 +399,17 @@ class Decoder { * Throws an exception if the value doesn't fit in a 32-bit signed int. */ readInt32() { - let result = 0, shift = 0 + let result = 0, + shift = 0 while (this.offset < this.buf.byteLength) { const nextByte = this.buf[this.offset] - if ((shift === 28 && (nextByte & 0x80) !== 0) || // more than 5 bytes - (shift === 28 && (nextByte & 0x40) === 0 && (nextByte & 0x38) !== 0) || // positive int > 0x7fffffff - (shift === 28 && (nextByte & 0x40) !== 0 && (nextByte & 0x38) !== 0x38)) { // negative int < -0x80000000 - throw new RangeError('number out of range') + if ( + (shift === 28 && (nextByte & 0x80) !== 0) || // more than 5 bytes + (shift === 28 && (nextByte & 0x40) === 0 && (nextByte & 0x38) !== 0) || // positive int > 0x7fffffff + (shift === 28 && (nextByte & 0x40) !== 0 && (nextByte & 0x38) !== 0x38) + ) { + // negative int < -0x80000000 + throw new RangeError("number out of range") } result |= (nextByte & 0x7f) << shift shift += 7 @@ -378,7 +423,7 @@ class Decoder { } } } - throw new RangeError('buffer ended with incomplete number') + throw new RangeError("buffer ended with incomplete number") } /** @@ -389,7 +434,7 @@ class Decoder { readUint53() { const { low32, high32 } = this.readUint64() if (high32 < 0 || high32 > 0x1fffff) { - throw new RangeError('number out of range') + throw new RangeError("number out of range") } return high32 * 0x100000000 + low32 } @@ -401,8 +446,12 @@ class Decoder { */ readInt53() { const { low32, high32 } = this.readInt64() - if (high32 < -0x200000 || (high32 === -0x200000 && low32 === 0) || high32 > 0x1fffff) { - throw new RangeError('number out of range') + if ( + high32 < -0x200000 || + (high32 === -0x200000 && low32 === 0) || + high32 > 0x1fffff + ) { + throw new RangeError("number out of range") } return high32 * 0x100000000 + low32 } @@ -414,10 +463,12 @@ class Decoder { * `{high32, low32}`. */ readUint64() { - let low32 = 0, high32 = 0, shift = 0 + let low32 = 0, + high32 = 0, + shift = 0 while (this.offset < this.buf.byteLength && shift <= 28) { const nextByte = this.buf[this.offset] - low32 = (low32 | (nextByte & 0x7f) << shift) >>> 0 // right shift to interpret value as unsigned + low32 = (low32 | ((nextByte & 0x7f) << shift)) >>> 0 // right shift to interpret value as unsigned if (shift === 28) { high32 = (nextByte & 0x70) >>> 4 } @@ -429,15 +480,16 @@ class Decoder { shift = 3 while (this.offset < this.buf.byteLength) { const nextByte = this.buf[this.offset] - if (shift === 31 && (nextByte & 0xfe) !== 0) { // more than 10 bytes, or value > 2^64 - 1 - throw new RangeError('number out of range') + if (shift === 31 && (nextByte & 0xfe) !== 0) { + // more than 10 bytes, or value > 2^64 - 1 + throw new RangeError("number out of range") } - high32 = (high32 | (nextByte & 0x7f) << shift) >>> 0 + high32 = (high32 | ((nextByte & 0x7f) << shift)) >>> 0 shift += 7 this.offset++ if ((nextByte & 0x80) === 0) return { high32, low32 } } - throw new RangeError('buffer ended with incomplete number') + throw new RangeError("buffer ended with incomplete number") } /** @@ -448,17 +500,20 @@ class Decoder { * sign of the `high32` half indicates the sign of the 64-bit number. */ readInt64() { - let low32 = 0, high32 = 0, shift = 0 + let low32 = 0, + high32 = 0, + shift = 0 while (this.offset < this.buf.byteLength && shift <= 28) { const nextByte = this.buf[this.offset] - low32 = (low32 | (nextByte & 0x7f) << shift) >>> 0 // right shift to interpret value as unsigned + low32 = (low32 | ((nextByte & 0x7f) << shift)) >>> 0 // right shift to interpret value as unsigned if (shift === 28) { high32 = (nextByte & 0x70) >>> 4 } shift += 7 this.offset++ if ((nextByte & 0x80) === 0) { - if ((nextByte & 0x40) !== 0) { // sign-extend negative integer + if ((nextByte & 0x40) !== 0) { + // sign-extend negative integer if (shift < 32) low32 = (low32 | (-1 << shift)) >>> 0 high32 |= -1 << Math.max(shift - 32, 0) } @@ -472,19 +527,20 @@ class Decoder { // On the 10th byte there are only two valid values: all 7 value bits zero // (if the value is positive) or all 7 bits one (if the value is negative) if (shift === 31 && nextByte !== 0 && nextByte !== 0x7f) { - throw new RangeError('number out of range') + throw new RangeError("number out of range") } high32 |= (nextByte & 0x7f) << shift shift += 7 this.offset++ if ((nextByte & 0x80) === 0) { - if ((nextByte & 0x40) !== 0 && shift < 32) { // sign-extend negative integer + if ((nextByte & 0x40) !== 0 && shift < 32) { + // sign-extend negative integer high32 |= -1 << shift } return { high32, low32 } } } - throw new RangeError('buffer ended with incomplete number') + throw new RangeError("buffer ended with incomplete number") } /** @@ -494,7 +550,7 @@ class Decoder { readRawBytes(length) { const start = this.offset if (start + length > this.buf.byteLength) { - throw new RangeError('subarray exceeds buffer size') + throw new RangeError("subarray exceeds buffer size") } this.offset += length return this.buf.subarray(start, this.offset) @@ -559,7 +615,7 @@ class RLEEncoder extends Encoder { constructor(type) { super() this.type = type - this.state = 'empty' + this.state = "empty" this.lastValue = undefined this.count = 0 this.literal = [] @@ -578,76 +634,81 @@ class RLEEncoder extends Encoder { */ _appendValue(value, repetitions = 1) { if (repetitions <= 0) return - if (this.state === 'empty') { - this.state = (value === null ? 'nulls' : (repetitions === 1 ? 'loneValue' : 'repetition')) + if (this.state === "empty") { + this.state = + value === null + ? "nulls" + : repetitions === 1 + ? "loneValue" + : "repetition" this.lastValue = value this.count = repetitions - } else if (this.state === 'loneValue') { + } else if (this.state === "loneValue") { if (value === null) { this.flush() - this.state = 'nulls' + this.state = "nulls" this.count = repetitions } else if (value === this.lastValue) { - this.state = 'repetition' + this.state = "repetition" this.count = 1 + repetitions } else if (repetitions > 1) { this.flush() - this.state = 'repetition' + this.state = "repetition" this.count = repetitions this.lastValue = value } else { - this.state = 'literal' + this.state = "literal" this.literal = [this.lastValue] this.lastValue = value } - } else if (this.state === 'repetition') { + } else if (this.state === "repetition") { if (value === null) { this.flush() - this.state = 'nulls' + this.state = "nulls" this.count = repetitions } else if (value === this.lastValue) { this.count += repetitions } else if (repetitions > 1) { this.flush() - this.state = 'repetition' + this.state = "repetition" this.count = repetitions this.lastValue = value } else { this.flush() - this.state = 'loneValue' + this.state = "loneValue" this.lastValue = value } - } else if (this.state === 'literal') { + } else if (this.state === "literal") { if (value === null) { this.literal.push(this.lastValue) this.flush() - this.state = 'nulls' + this.state = "nulls" this.count = repetitions } else if (value === this.lastValue) { this.flush() - this.state = 'repetition' + this.state = "repetition" this.count = 1 + repetitions } else if (repetitions > 1) { this.literal.push(this.lastValue) this.flush() - this.state = 'repetition' + this.state = "repetition" this.count = repetitions this.lastValue = value } else { this.literal.push(this.lastValue) this.lastValue = value } - } else if (this.state === 'nulls') { + } else if (this.state === "nulls") { if (value === null) { this.count += repetitions } else if (repetitions > 1) { this.flush() - this.state = 'repetition' + this.state = "repetition" this.count = repetitions this.lastValue = value } else { this.flush() - this.state = 'loneValue' + this.state = "loneValue" this.lastValue = value } } @@ -666,13 +727,16 @@ class RLEEncoder extends Encoder { */ copyFrom(decoder, options = {}) { const { count, sumValues, sumShift } = options - if (!(decoder instanceof RLEDecoder) || (decoder.type !== this.type)) { - throw new TypeError('incompatible type of decoder') + if (!(decoder instanceof RLEDecoder) || decoder.type !== this.type) { + throw new TypeError("incompatible type of decoder") } - let remaining = (typeof count === 'number' ? count : Number.MAX_SAFE_INTEGER) - let nonNullValues = 0, sum = 0 - if (count && remaining > 0 && decoder.done) throw new RangeError(`cannot copy ${count} values`) - if (remaining === 0 || decoder.done) return sumValues ? {nonNullValues, sum} : {nonNullValues} + let remaining = typeof count === "number" ? count : Number.MAX_SAFE_INTEGER + let nonNullValues = 0, + sum = 0 + if (count && remaining > 0 && decoder.done) + throw new RangeError(`cannot copy ${count} values`) + if (remaining === 0 || decoder.done) + return sumValues ? { nonNullValues, sum } : { nonNullValues } // Copy a value so that we have a well-defined starting state. NB: when super.copyFrom() is // called by the DeltaEncoder subclass, the following calls to readValue() and appendValue() @@ -684,87 +748,101 @@ class RLEEncoder extends Encoder { remaining -= numNulls decoder.count -= numNulls - 1 this.appendValue(null, numNulls) - if (count && remaining > 0 && decoder.done) throw new RangeError(`cannot copy ${count} values`) - if (remaining === 0 || decoder.done) return sumValues ? {nonNullValues, sum} : {nonNullValues} + if (count && remaining > 0 && decoder.done) + throw new RangeError(`cannot copy ${count} values`) + if (remaining === 0 || decoder.done) + return sumValues ? { nonNullValues, sum } : { nonNullValues } firstValue = decoder.readValue() - if (firstValue === null) throw new RangeError('null run must be followed by non-null value') + if (firstValue === null) + throw new RangeError("null run must be followed by non-null value") } this.appendValue(firstValue) remaining-- nonNullValues++ - if (sumValues) sum += (sumShift ? (firstValue >>> sumShift) : firstValue) - if (count && remaining > 0 && decoder.done) throw new RangeError(`cannot copy ${count} values`) - if (remaining === 0 || decoder.done) return sumValues ? {nonNullValues, sum} : {nonNullValues} + if (sumValues) sum += sumShift ? firstValue >>> sumShift : firstValue + if (count && remaining > 0 && decoder.done) + throw new RangeError(`cannot copy ${count} values`) + if (remaining === 0 || decoder.done) + return sumValues ? { nonNullValues, sum } : { nonNullValues } // Copy data at the record level without expanding repetitions - let firstRun = (decoder.count > 0) + let firstRun = decoder.count > 0 while (remaining > 0 && !decoder.done) { if (!firstRun) decoder.readRecord() const numValues = Math.min(decoder.count, remaining) decoder.count -= numValues - if (decoder.state === 'literal') { + if (decoder.state === "literal") { nonNullValues += numValues for (let i = 0; i < numValues; i++) { - if (decoder.done) throw new RangeError('incomplete literal') + if (decoder.done) throw new RangeError("incomplete literal") const value = decoder.readRawValue() - if (value === decoder.lastValue) throw new RangeError('Repetition of values is not allowed in literal') + if (value === decoder.lastValue) + throw new RangeError( + "Repetition of values is not allowed in literal" + ) decoder.lastValue = value this._appendValue(value) - if (sumValues) sum += (sumShift ? (value >>> sumShift) : value) + if (sumValues) sum += sumShift ? value >>> sumShift : value } - } else if (decoder.state === 'repetition') { + } else if (decoder.state === "repetition") { nonNullValues += numValues - if (sumValues) sum += numValues * (sumShift ? (decoder.lastValue >>> sumShift) : decoder.lastValue) + if (sumValues) + sum += + numValues * + (sumShift ? decoder.lastValue >>> sumShift : decoder.lastValue) const value = decoder.lastValue this._appendValue(value) if (numValues > 1) { this._appendValue(value) - if (this.state !== 'repetition') throw new RangeError(`Unexpected state ${this.state}`) + if (this.state !== "repetition") + throw new RangeError(`Unexpected state ${this.state}`) this.count += numValues - 2 } - } else if (decoder.state === 'nulls') { + } else if (decoder.state === "nulls") { this._appendValue(null) - if (this.state !== 'nulls') throw new RangeError(`Unexpected state ${this.state}`) + if (this.state !== "nulls") + throw new RangeError(`Unexpected state ${this.state}`) this.count += numValues - 1 } firstRun = false remaining -= numValues } - if (count && remaining > 0 && decoder.done) throw new RangeError(`cannot copy ${count} values`) - return sumValues ? {nonNullValues, sum} : {nonNullValues} + if (count && remaining > 0 && decoder.done) + throw new RangeError(`cannot copy ${count} values`) + return sumValues ? { nonNullValues, sum } : { nonNullValues } } /** * Private method, do not call from outside the class. */ flush() { - if (this.state === 'loneValue') { + if (this.state === "loneValue") { this.appendInt32(-1) this.appendRawValue(this.lastValue) - } else if (this.state === 'repetition') { + } else if (this.state === "repetition") { this.appendInt53(this.count) this.appendRawValue(this.lastValue) - } else if (this.state === 'literal') { + } else if (this.state === "literal") { this.appendInt53(-this.literal.length) for (let v of this.literal) this.appendRawValue(v) - } else if (this.state === 'nulls') { + } else if (this.state === "nulls") { this.appendInt32(0) this.appendUint53(this.count) } - this.state = 'empty' + this.state = "empty" } /** * Private method, do not call from outside the class. */ appendRawValue(value) { - if (this.type === 'int') { + if (this.type === "int") { this.appendInt53(value) - } else if (this.type === 'uint') { + } else if (this.type === "uint") { this.appendUint53(value) - } else if (this.type === 'utf8') { + } else if (this.type === "utf8") { this.appendPrefixedString(value) } else { throw new RangeError(`Unknown RLEEncoder datatype: ${this.type}`) @@ -776,9 +854,9 @@ class RLEEncoder extends Encoder { * the buffer constructed by this Encoder. */ finish() { - if (this.state === 'literal') this.literal.push(this.lastValue) + if (this.state === "literal") this.literal.push(this.lastValue) // Don't write anything if the only values we have seen are nulls - if (this.state !== 'nulls' || this.offset > 0) this.flush() + if (this.state !== "nulls" || this.offset > 0) this.flush() } } @@ -800,7 +878,7 @@ class RLEDecoder extends Decoder { * position, and true if we are at the end of the buffer. */ get done() { - return (this.count === 0) && (this.offset === this.buf.byteLength) + return this.count === 0 && this.offset === this.buf.byteLength } /** @@ -821,9 +899,10 @@ class RLEDecoder extends Decoder { if (this.done) return null if (this.count === 0) this.readRecord() this.count -= 1 - if (this.state === 'literal') { + if (this.state === "literal") { const value = this.readRawValue() - if (value === this.lastValue) throw new RangeError('Repetition of values is not allowed in literal') + if (value === this.lastValue) + throw new RangeError("Repetition of values is not allowed in literal") this.lastValue = value return value } else { @@ -839,20 +918,22 @@ class RLEDecoder extends Decoder { if (this.count === 0) { this.count = this.readInt53() if (this.count > 0) { - this.lastValue = (this.count <= numSkip) ? this.skipRawValues(1) : this.readRawValue() - this.state = 'repetition' + this.lastValue = + this.count <= numSkip ? this.skipRawValues(1) : this.readRawValue() + this.state = "repetition" } else if (this.count < 0) { this.count = -this.count - this.state = 'literal' - } else { // this.count == 0 + this.state = "literal" + } else { + // this.count == 0 this.count = this.readUint53() this.lastValue = null - this.state = 'nulls' + this.state = "nulls" } } const consume = Math.min(numSkip, this.count) - if (this.state === 'literal') this.skipRawValues(consume) + if (this.state === "literal") this.skipRawValues(consume) numSkip -= consume this.count -= consume } @@ -866,23 +947,34 @@ class RLEDecoder extends Decoder { this.count = this.readInt53() if (this.count > 1) { const value = this.readRawValue() - if ((this.state === 'repetition' || this.state === 'literal') && this.lastValue === value) { - throw new RangeError('Successive repetitions with the same value are not allowed') + if ( + (this.state === "repetition" || this.state === "literal") && + this.lastValue === value + ) { + throw new RangeError( + "Successive repetitions with the same value are not allowed" + ) } - this.state = 'repetition' + this.state = "repetition" this.lastValue = value } else if (this.count === 1) { - throw new RangeError('Repetition count of 1 is not allowed, use a literal instead') + throw new RangeError( + "Repetition count of 1 is not allowed, use a literal instead" + ) } else if (this.count < 0) { this.count = -this.count - if (this.state === 'literal') throw new RangeError('Successive literals are not allowed') - this.state = 'literal' - } else { // this.count == 0 - if (this.state === 'nulls') throw new RangeError('Successive null runs are not allowed') + if (this.state === "literal") + throw new RangeError("Successive literals are not allowed") + this.state = "literal" + } else { + // this.count == 0 + if (this.state === "nulls") + throw new RangeError("Successive null runs are not allowed") this.count = this.readUint53() - if (this.count === 0) throw new RangeError('Zero-length null runs are not allowed') + if (this.count === 0) + throw new RangeError("Zero-length null runs are not allowed") this.lastValue = null - this.state = 'nulls' + this.state = "nulls" } } @@ -891,11 +983,11 @@ class RLEDecoder extends Decoder { * Reads one value of the datatype configured on construction. */ readRawValue() { - if (this.type === 'int') { + if (this.type === "int") { return this.readInt53() - } else if (this.type === 'uint') { + } else if (this.type === "uint") { return this.readUint53() - } else if (this.type === 'utf8') { + } else if (this.type === "utf8") { return this.readPrefixedString() } else { throw new RangeError(`Unknown RLEDecoder datatype: ${this.type}`) @@ -907,14 +999,14 @@ class RLEDecoder extends Decoder { * Skips over `num` values of the datatype configured on construction. */ skipRawValues(num) { - if (this.type === 'utf8') { + if (this.type === "utf8") { for (let i = 0; i < num; i++) this.skip(this.readUint53()) } else { while (num > 0 && this.offset < this.buf.byteLength) { if ((this.buf[this.offset] & 0x80) === 0) num-- this.offset++ } - if (num > 0) throw new RangeError('cannot skip beyond end of buffer') + if (num > 0) throw new RangeError("cannot skip beyond end of buffer") } } } @@ -931,7 +1023,7 @@ class RLEDecoder extends Decoder { */ class DeltaEncoder extends RLEEncoder { constructor() { - super('int') + super("int") this.absoluteValue = 0 } @@ -941,7 +1033,7 @@ class DeltaEncoder extends RLEEncoder { */ appendValue(value, repetitions = 1) { if (repetitions <= 0) return - if (typeof value === 'number') { + if (typeof value === "number") { super.appendValue(value - this.absoluteValue, 1) this.absoluteValue = value if (repetitions > 1) super.appendValue(0, repetitions - 1) @@ -957,26 +1049,29 @@ class DeltaEncoder extends RLEEncoder { */ copyFrom(decoder, options = {}) { if (options.sumValues) { - throw new RangeError('unsupported options for DeltaEncoder.copyFrom()') + throw new RangeError("unsupported options for DeltaEncoder.copyFrom()") } if (!(decoder instanceof DeltaDecoder)) { - throw new TypeError('incompatible type of decoder') + throw new TypeError("incompatible type of decoder") } let remaining = options.count - if (remaining > 0 && decoder.done) throw new RangeError(`cannot copy ${remaining} values`) + if (remaining > 0 && decoder.done) + throw new RangeError(`cannot copy ${remaining} values`) if (remaining === 0 || decoder.done) return // Copy any null values, and the first non-null value, so that appendValue() computes the // difference between the encoder's last value and the decoder's first (absolute) value. - let value = decoder.readValue(), nulls = 0 + let value = decoder.readValue(), + nulls = 0 this.appendValue(value) if (value === null) { nulls = decoder.count + 1 if (remaining !== undefined && remaining < nulls) nulls = remaining decoder.count -= nulls - 1 this.count += nulls - 1 - if (remaining > nulls && decoder.done) throw new RangeError(`cannot copy ${remaining} values`) + if (remaining > nulls && decoder.done) + throw new RangeError(`cannot copy ${remaining} values`) if (remaining === nulls || decoder.done) return // The next value read is certain to be non-null because we're not at the end of the decoder, @@ -989,7 +1084,10 @@ class DeltaEncoder extends RLEEncoder { // value, while subsequent values are relative. Thus, the sum of all of the (non-null) copied // values must equal the absolute value of the final element copied. if (remaining !== undefined) remaining -= nulls + 1 - const { nonNullValues, sum } = super.copyFrom(decoder, {count: remaining, sumValues: true}) + const { nonNullValues, sum } = super.copyFrom(decoder, { + count: remaining, + sumValues: true, + }) if (nonNullValues > 0) { this.absoluteValue = sum decoder.absoluteValue = sum @@ -1003,7 +1101,7 @@ class DeltaEncoder extends RLEEncoder { */ class DeltaDecoder extends RLEDecoder { constructor(buffer) { - super('int', buffer) + super("int", buffer) this.absoluteValue = 0 } @@ -1036,12 +1134,12 @@ class DeltaDecoder extends RLEDecoder { while (numSkip > 0 && !this.done) { if (this.count === 0) this.readRecord() const consume = Math.min(numSkip, this.count) - if (this.state === 'literal') { + if (this.state === "literal") { for (let i = 0; i < consume; i++) { this.lastValue = this.readRawValue() this.absoluteValue += this.lastValue } - } else if (this.state === 'repetition') { + } else if (this.state === "repetition") { this.absoluteValue += consume * this.lastValue } numSkip -= consume @@ -1090,12 +1188,13 @@ class BooleanEncoder extends Encoder { */ copyFrom(decoder, options = {}) { if (!(decoder instanceof BooleanDecoder)) { - throw new TypeError('incompatible type of decoder') + throw new TypeError("incompatible type of decoder") } const { count } = options - let remaining = (typeof count === 'number' ? count : Number.MAX_SAFE_INTEGER) - if (count && remaining > 0 && decoder.done) throw new RangeError(`cannot copy ${count} values`) + let remaining = typeof count === "number" ? count : Number.MAX_SAFE_INTEGER + if (count && remaining > 0 && decoder.done) + throw new RangeError(`cannot copy ${count} values`) if (remaining === 0 || decoder.done) return // Copy one value to bring decoder and encoder state into sync, then finish that value's repetitions @@ -1108,7 +1207,8 @@ class BooleanEncoder extends Encoder { while (remaining > 0 && !decoder.done) { decoder.count = decoder.readUint53() - if (decoder.count === 0) throw new RangeError('Zero-length runs are not allowed') + if (decoder.count === 0) + throw new RangeError("Zero-length runs are not allowed") decoder.lastValue = !decoder.lastValue this.appendUint53(this.count) @@ -1119,7 +1219,8 @@ class BooleanEncoder extends Encoder { remaining -= numCopied } - if (count && remaining > 0 && decoder.done) throw new RangeError(`cannot copy ${count} values`) + if (count && remaining > 0 && decoder.done) + throw new RangeError(`cannot copy ${count} values`) } /** @@ -1151,7 +1252,7 @@ class BooleanDecoder extends Decoder { * position, and true if we are at the end of the buffer. */ get done() { - return (this.count === 0) && (this.offset === this.buf.byteLength) + return this.count === 0 && this.offset === this.buf.byteLength } /** @@ -1174,7 +1275,7 @@ class BooleanDecoder extends Decoder { this.count = this.readUint53() this.lastValue = !this.lastValue if (this.count === 0 && !this.firstRun) { - throw new RangeError('Zero-length runs are not allowed') + throw new RangeError("Zero-length runs are not allowed") } this.firstRun = false } @@ -1190,7 +1291,8 @@ class BooleanDecoder extends Decoder { if (this.count === 0) { this.count = this.readUint53() this.lastValue = !this.lastValue - if (this.count === 0) throw new RangeError('Zero-length runs are not allowed') + if (this.count === 0) + throw new RangeError("Zero-length runs are not allowed") } if (this.count < numSkip) { numSkip -= this.count @@ -1204,6 +1306,16 @@ class BooleanDecoder extends Decoder { } module.exports = { - stringToUtf8, utf8ToString, hexStringToBytes, bytesToHexString, - Encoder, Decoder, RLEEncoder, RLEDecoder, DeltaEncoder, DeltaDecoder, BooleanEncoder, BooleanDecoder + stringToUtf8, + utf8ToString, + hexStringToBytes, + bytesToHexString, + Encoder, + Decoder, + RLEEncoder, + RLEDecoder, + DeltaEncoder, + DeltaDecoder, + BooleanEncoder, + BooleanDecoder, } diff --git a/javascript/test/legacy/sync.js b/javascript/test/legacy/sync.js index 3bb1571d..233c4292 100644 --- a/javascript/test/legacy/sync.js +++ b/javascript/test/legacy/sync.js @@ -17,9 +17,14 @@ */ const Backend = null //require('./backend') -const { hexStringToBytes, bytesToHexString, Encoder, Decoder } = require('./encoding') -const { decodeChangeMeta } = require('./columnar') -const { copyObject } = require('./common') +const { + hexStringToBytes, + bytesToHexString, + Encoder, + Decoder, +} = require("./encoding") +const { decodeChangeMeta } = require("./columnar") +const { copyObject } = require("./common") const HASH_SIZE = 32 // 256 bits = 32 bytes const MESSAGE_TYPE_SYNC = 0x42 // first byte of a sync message, for identification @@ -28,7 +33,8 @@ const PEER_STATE_TYPE = 0x43 // first byte of an encoded peer state, for identif // These constants correspond to a 1% false positive rate. The values can be changed without // breaking compatibility of the network protocol, since the parameters used for a particular // Bloom filter are encoded in the wire format. -const BITS_PER_ENTRY = 10, NUM_PROBES = 7 +const BITS_PER_ENTRY = 10, + NUM_PROBES = 7 /** * A Bloom filter implementation that can be serialised to a byte array for transmission @@ -36,13 +42,15 @@ const BITS_PER_ENTRY = 10, NUM_PROBES = 7 * so this implementation does not perform its own hashing. */ class BloomFilter { - constructor (arg) { + constructor(arg) { if (Array.isArray(arg)) { // arg is an array of SHA256 hashes in hexadecimal encoding this.numEntries = arg.length this.numBitsPerEntry = BITS_PER_ENTRY this.numProbes = NUM_PROBES - this.bits = new Uint8Array(Math.ceil(this.numEntries * this.numBitsPerEntry / 8)) + this.bits = new Uint8Array( + Math.ceil((this.numEntries * this.numBitsPerEntry) / 8) + ) for (let hash of arg) this.addHash(hash) } else if (arg instanceof Uint8Array) { if (arg.byteLength === 0) { @@ -55,10 +63,12 @@ class BloomFilter { this.numEntries = decoder.readUint32() this.numBitsPerEntry = decoder.readUint32() this.numProbes = decoder.readUint32() - this.bits = decoder.readRawBytes(Math.ceil(this.numEntries * this.numBitsPerEntry / 8)) + this.bits = decoder.readRawBytes( + Math.ceil((this.numEntries * this.numBitsPerEntry) / 8) + ) } } else { - throw new TypeError('invalid argument') + throw new TypeError("invalid argument") } } @@ -86,12 +96,32 @@ class BloomFilter { * http://www.ccis.northeastern.edu/home/pete/pub/bloom-filters-verification.pdf */ getProbes(hash) { - const hashBytes = hexStringToBytes(hash), modulo = 8 * this.bits.byteLength - if (hashBytes.byteLength !== 32) throw new RangeError(`Not a 256-bit hash: ${hash}`) + const hashBytes = hexStringToBytes(hash), + modulo = 8 * this.bits.byteLength + if (hashBytes.byteLength !== 32) + throw new RangeError(`Not a 256-bit hash: ${hash}`) // on the next three lines, the right shift means interpret value as unsigned - let x = ((hashBytes[0] | hashBytes[1] << 8 | hashBytes[2] << 16 | hashBytes[3] << 24) >>> 0) % modulo - let y = ((hashBytes[4] | hashBytes[5] << 8 | hashBytes[6] << 16 | hashBytes[7] << 24) >>> 0) % modulo - let z = ((hashBytes[8] | hashBytes[9] << 8 | hashBytes[10] << 16 | hashBytes[11] << 24) >>> 0) % modulo + let x = + ((hashBytes[0] | + (hashBytes[1] << 8) | + (hashBytes[2] << 16) | + (hashBytes[3] << 24)) >>> + 0) % + modulo + let y = + ((hashBytes[4] | + (hashBytes[5] << 8) | + (hashBytes[6] << 16) | + (hashBytes[7] << 24)) >>> + 0) % + modulo + let z = + ((hashBytes[8] | + (hashBytes[9] << 8) | + (hashBytes[10] << 16) | + (hashBytes[11] << 24)) >>> + 0) % + modulo const probes = [x] for (let i = 1; i < this.numProbes; i++) { x = (x + y) % modulo @@ -128,12 +158,14 @@ class BloomFilter { * Encodes a sorted array of SHA-256 hashes (as hexadecimal strings) into a byte array. */ function encodeHashes(encoder, hashes) { - if (!Array.isArray(hashes)) throw new TypeError('hashes must be an array') + if (!Array.isArray(hashes)) throw new TypeError("hashes must be an array") encoder.appendUint32(hashes.length) for (let i = 0; i < hashes.length; i++) { - if (i > 0 && hashes[i - 1] >= hashes[i]) throw new RangeError('hashes must be sorted') + if (i > 0 && hashes[i - 1] >= hashes[i]) + throw new RangeError("hashes must be sorted") const bytes = hexStringToBytes(hashes[i]) - if (bytes.byteLength !== HASH_SIZE) throw new TypeError('heads hashes must be 256 bits') + if (bytes.byteLength !== HASH_SIZE) + throw new TypeError("heads hashes must be 256 bits") encoder.appendRawBytes(bytes) } } @@ -143,7 +175,8 @@ function encodeHashes(encoder, hashes) { * array of hex strings. */ function decodeHashes(decoder) { - let length = decoder.readUint32(), hashes = [] + let length = decoder.readUint32(), + hashes = [] for (let i = 0; i < length; i++) { hashes.push(bytesToHexString(decoder.readRawBytes(HASH_SIZE))) } @@ -183,11 +216,11 @@ function decodeSyncMessage(bytes) { const heads = decodeHashes(decoder) const need = decodeHashes(decoder) const haveCount = decoder.readUint32() - let message = {heads, need, have: [], changes: []} + let message = { heads, need, have: [], changes: [] } for (let i = 0; i < haveCount; i++) { const lastSync = decodeHashes(decoder) const bloom = decoder.readPrefixedBytes(decoder) - message.have.push({lastSync, bloom}) + message.have.push({ lastSync, bloom }) } const changeCount = decoder.readUint32() for (let i = 0; i < changeCount; i++) { @@ -234,7 +267,7 @@ function decodeSyncState(bytes) { function makeBloomFilter(backend, lastSync) { const newChanges = Backend.getChanges(backend, lastSync) const hashes = newChanges.map(change => decodeChangeMeta(change, true).hash) - return {lastSync, bloom: new BloomFilter(hashes).bytes} + return { lastSync, bloom: new BloomFilter(hashes).bytes } } /** @@ -245,20 +278,26 @@ function makeBloomFilter(backend, lastSync) { */ function getChangesToSend(backend, have, need) { if (have.length === 0) { - return need.map(hash => Backend.getChangeByHash(backend, hash)).filter(change => change !== undefined) + return need + .map(hash => Backend.getChangeByHash(backend, hash)) + .filter(change => change !== undefined) } - let lastSyncHashes = {}, bloomFilters = [] + let lastSyncHashes = {}, + bloomFilters = [] for (let h of have) { for (let hash of h.lastSync) lastSyncHashes[hash] = true bloomFilters.push(new BloomFilter(h.bloom)) } // Get all changes that were added since the last sync - const changes = Backend.getChanges(backend, Object.keys(lastSyncHashes)) - .map(change => decodeChangeMeta(change, true)) + const changes = Backend.getChanges(backend, Object.keys(lastSyncHashes)).map( + change => decodeChangeMeta(change, true) + ) - let changeHashes = {}, dependents = {}, hashesToSend = {} + let changeHashes = {}, + dependents = {}, + hashesToSend = {} for (let change of changes) { changeHashes[change.hash] = true @@ -292,7 +331,8 @@ function getChangesToSend(backend, have, need) { let changesToSend = [] for (let hash of need) { hashesToSend[hash] = true - if (!changeHashes[hash]) { // Change is not among those returned by getMissingChanges()? + if (!changeHashes[hash]) { + // Change is not among those returned by getMissingChanges()? const change = Backend.getChangeByHash(backend, hash) if (change) changesToSend.push(change) } @@ -317,7 +357,7 @@ function initSyncState() { } function compareArrays(a, b) { - return (a.length === b.length) && a.every((v, i) => v === b[i]) + return a.length === b.length && a.every((v, i) => v === b[i]) } /** @@ -329,10 +369,19 @@ function generateSyncMessage(backend, syncState) { throw new Error("generateSyncMessage called with no Automerge document") } if (!syncState) { - throw new Error("generateSyncMessage requires a syncState, which can be created with initSyncState()") + throw new Error( + "generateSyncMessage requires a syncState, which can be created with initSyncState()" + ) } - let { sharedHeads, lastSentHeads, theirHeads, theirNeed, theirHave, sentHashes } = syncState + let { + sharedHeads, + lastSentHeads, + theirHeads, + theirNeed, + theirHave, + sentHashes, + } = syncState const ourHeads = Backend.getHeads(backend) // Hashes to explicitly request from the remote peer: any missing dependencies of unapplied @@ -356,18 +405,28 @@ function generateSyncMessage(backend, syncState) { const lastSync = theirHave[0].lastSync if (!lastSync.every(hash => Backend.getChangeByHash(backend, hash))) { // we need to queue them to send us a fresh sync message, the one they sent is uninteligible so we don't know what they need - const resetMsg = {heads: ourHeads, need: [], have: [{ lastSync: [], bloom: new Uint8Array(0) }], changes: []} + const resetMsg = { + heads: ourHeads, + need: [], + have: [{ lastSync: [], bloom: new Uint8Array(0) }], + changes: [], + } return [syncState, encodeSyncMessage(resetMsg)] } } // XXX: we should limit ourselves to only sending a subset of all the messages, probably limited by a total message size // these changes should ideally be RLE encoded but we haven't implemented that yet. - let changesToSend = Array.isArray(theirHave) && Array.isArray(theirNeed) ? getChangesToSend(backend, theirHave, theirNeed) : [] + let changesToSend = + Array.isArray(theirHave) && Array.isArray(theirNeed) + ? getChangesToSend(backend, theirHave, theirNeed) + : [] // If the heads are equal, we're in sync and don't need to do anything further - const headsUnchanged = Array.isArray(lastSentHeads) && compareArrays(ourHeads, lastSentHeads) - const headsEqual = Array.isArray(theirHeads) && compareArrays(ourHeads, theirHeads) + const headsUnchanged = + Array.isArray(lastSentHeads) && compareArrays(ourHeads, lastSentHeads) + const headsEqual = + Array.isArray(theirHeads) && compareArrays(ourHeads, theirHeads) if (headsUnchanged && headsEqual && changesToSend.length === 0) { // no need to send a sync message if we know we're synced! return [syncState, null] @@ -375,12 +434,19 @@ function generateSyncMessage(backend, syncState) { // TODO: this recomputes the SHA-256 hash of each change; we should restructure this to avoid the // unnecessary recomputation - changesToSend = changesToSend.filter(change => !sentHashes[decodeChangeMeta(change, true).hash]) + changesToSend = changesToSend.filter( + change => !sentHashes[decodeChangeMeta(change, true).hash] + ) // Regular response to a sync message: send any changes that the other node // doesn't have. We leave the "have" field empty because the previous message // generated by `syncStart` already indicated what changes we have. - const syncMessage = {heads: ourHeads, have: ourHave, need: ourNeed, changes: changesToSend} + const syncMessage = { + heads: ourHeads, + have: ourHave, + need: ourNeed, + changes: changesToSend, + } if (changesToSend.length > 0) { sentHashes = copyObject(sentHashes) for (const change of changesToSend) { @@ -388,7 +454,10 @@ function generateSyncMessage(backend, syncState) { } } - syncState = Object.assign({}, syncState, {lastSentHeads: ourHeads, sentHashes}) + syncState = Object.assign({}, syncState, { + lastSentHeads: ourHeads, + sentHashes, + }) return [syncState, encodeSyncMessage(syncMessage)] } @@ -406,13 +475,14 @@ function generateSyncMessage(backend, syncState) { * another peer, that means that peer had those changes, and therefore we now both know about them. */ function advanceHeads(myOldHeads, myNewHeads, ourOldSharedHeads) { - const newHeads = myNewHeads.filter((head) => !myOldHeads.includes(head)) - const commonHeads = ourOldSharedHeads.filter((head) => myNewHeads.includes(head)) + const newHeads = myNewHeads.filter(head => !myOldHeads.includes(head)) + const commonHeads = ourOldSharedHeads.filter(head => + myNewHeads.includes(head) + ) const advancedHeads = [...new Set([...newHeads, ...commonHeads])].sort() return advancedHeads } - /** * Given a backend, a message message and the state of our peer, apply any changes, update what * we believe about the peer, and (if there were applied changes) produce a patch for the frontend @@ -422,10 +492,13 @@ function receiveSyncMessage(backend, oldSyncState, binaryMessage) { throw new Error("generateSyncMessage called with no Automerge document") } if (!oldSyncState) { - throw new Error("generateSyncMessage requires a syncState, which can be created with initSyncState()") + throw new Error( + "generateSyncMessage requires a syncState, which can be created with initSyncState()" + ) } - let { sharedHeads, lastSentHeads, sentHashes } = oldSyncState, patch = null + let { sharedHeads, lastSentHeads, sentHashes } = oldSyncState, + patch = null const message = decodeSyncMessage(binaryMessage) const beforeHeads = Backend.getHeads(backend) @@ -434,18 +507,27 @@ function receiveSyncMessage(backend, oldSyncState, binaryMessage) { // changes without applying them. The set of changes may also be incomplete if the sender decided // to break a large set of changes into chunks. if (message.changes.length > 0) { - [backend, patch] = Backend.applyChanges(backend, message.changes) - sharedHeads = advanceHeads(beforeHeads, Backend.getHeads(backend), sharedHeads) + ;[backend, patch] = Backend.applyChanges(backend, message.changes) + sharedHeads = advanceHeads( + beforeHeads, + Backend.getHeads(backend), + sharedHeads + ) } // If heads are equal, indicate we don't need to send a response message - if (message.changes.length === 0 && compareArrays(message.heads, beforeHeads)) { + if ( + message.changes.length === 0 && + compareArrays(message.heads, beforeHeads) + ) { lastSentHeads = message.heads } // If all of the remote heads are known to us, that means either our heads are equal, or we are // ahead of the remote peer. In this case, take the remote heads to be our shared heads. - const knownHeads = message.heads.filter(head => Backend.getChangeByHash(backend, head)) + const knownHeads = message.heads.filter(head => + Backend.getChangeByHash(backend, head) + ) if (knownHeads.length === message.heads.length) { sharedHeads = message.heads // If the remote peer has lost all its data, reset our state to perform a full resync @@ -467,14 +549,18 @@ function receiveSyncMessage(backend, oldSyncState, binaryMessage) { theirHave: message.have, // the information we need to calculate the changes they need theirHeads: message.heads, theirNeed: message.need, - sentHashes + sentHashes, } return [backend, syncState, patch] } module.exports = { - receiveSyncMessage, generateSyncMessage, - encodeSyncMessage, decodeSyncMessage, - initSyncState, encodeSyncState, decodeSyncState, - BloomFilter // BloomFilter is a private API, exported only for testing purposes + receiveSyncMessage, + generateSyncMessage, + encodeSyncMessage, + decodeSyncMessage, + initSyncState, + encodeSyncState, + decodeSyncState, + BloomFilter, // BloomFilter is a private API, exported only for testing purposes } diff --git a/javascript/test/legacy_tests.ts b/javascript/test/legacy_tests.ts index c5c88275..477a5545 100644 --- a/javascript/test/legacy_tests.ts +++ b/javascript/test/legacy_tests.ts @@ -1,7 +1,7 @@ -import * as assert from 'assert' -import * as Automerge from '../src' -import { assertEqualsOneOf } from './helpers' -import { decodeChange } from './legacy/columnar' +import * as assert from "assert" +import * as Automerge from "../src" +import { assertEqualsOneOf } from "./helpers" +import { decodeChange } from "./legacy/columnar" const UUID_PATTERN = /^[0-9a-f]{32}$/ const OPID_PATTERN = /^[0-9]+@([0-9a-f][0-9a-f])*$/ @@ -13,61 +13,60 @@ const OPID_PATTERN = /^[0-9]+@([0-9a-f][0-9a-f])*$/ // TODO - on-pass load() & reconstruct change from opset // TODO - micro-patches (needed for fully hydrated object in js) // TODO - valueAt(heads) / GC -// +// // AUTOMERGE UNSUPPORTED // // TODO - patchCallback - -describe('Automerge', () => { - describe('initialization ', () => { - it('should initially be an empty map', () => { +describe("Automerge", () => { + describe("initialization ", () => { + it("should initially be an empty map", () => { const doc = Automerge.init() assert.deepStrictEqual(doc, {}) }) - it('should allow instantiating from an existing object', () => { + it("should allow instantiating from an existing object", () => { const initialState = { birds: { wrens: 3, magpies: 4 } } const doc = Automerge.from(initialState) assert.deepStrictEqual(doc, initialState) }) - it('should allow merging of an object initialized with `from`', () => { + it("should allow merging of an object initialized with `from`", () => { let doc1 = Automerge.from({ cards: [] }) let doc2 = Automerge.merge(Automerge.init(), doc1) assert.deepStrictEqual(doc2, { cards: [] }) }) - it('should allow passing an actorId when instantiating from an existing object', () => { - const actorId = '1234' + it("should allow passing an actorId when instantiating from an existing object", () => { + const actorId = "1234" let doc = Automerge.from({ foo: 1 }, actorId) - assert.strictEqual(Automerge.getActorId(doc), '1234') + assert.strictEqual(Automerge.getActorId(doc), "1234") }) - it('accepts an empty object as initial state', () => { + it("accepts an empty object as initial state", () => { const doc = Automerge.from({}) assert.deepStrictEqual(doc, {}) }) - it('accepts an array as initial state, but converts it to an object', () => { + it("accepts an array as initial state, but converts it to an object", () => { // @ts-ignore - const doc = Automerge.from(['a', 'b', 'c']) - assert.deepStrictEqual(doc, { '0': 'a', '1': 'b', '2': 'c' }) + const doc = Automerge.from(["a", "b", "c"]) + assert.deepStrictEqual(doc, { "0": "a", "1": "b", "2": "c" }) }) - it('accepts strings as initial values, but treats them as an array of characters', () => { + it("accepts strings as initial values, but treats them as an array of characters", () => { // @ts-ignore - const doc = Automerge.from('abc') - assert.deepStrictEqual(doc, { '0': 'a', '1': 'b', '2': 'c' }) + const doc = Automerge.from("abc") + assert.deepStrictEqual(doc, { "0": "a", "1": "b", "2": "c" }) }) - it('ignores numbers provided as initial values', () => { + it("ignores numbers provided as initial values", () => { // @ts-ignore const doc = Automerge.from(123) assert.deepStrictEqual(doc, {}) }) - it('ignores booleans provided as initial values', () => { + it("ignores booleans provided as initial values", () => { // @ts-ignore const doc1 = Automerge.from(false) assert.deepStrictEqual(doc1, {}) @@ -77,550 +76,701 @@ describe('Automerge', () => { }) }) - describe('sequential use', () => { + describe("sequential use", () => { let s1: Automerge.Doc, s2: Automerge.Doc beforeEach(() => { s1 = Automerge.init("aabbcc") }) - it('should not mutate objects', () => { - s2 = Automerge.change(s1, doc => doc.foo = 'bar') + it("should not mutate objects", () => { + s2 = Automerge.change(s1, doc => (doc.foo = "bar")) assert.strictEqual(s1.foo, undefined) - assert.strictEqual(s2.foo, 'bar') + assert.strictEqual(s2.foo, "bar") }) - it('changes should be retrievable', () => { + it("changes should be retrievable", () => { const change1 = Automerge.getLastLocalChange(s1) - s2 = Automerge.change(s1, doc => doc.foo = 'bar') + s2 = Automerge.change(s1, doc => (doc.foo = "bar")) const change2 = Automerge.getLastLocalChange(s2) assert.strictEqual(change1, undefined) const change = Automerge.decodeChange(change2!) assert.deepStrictEqual(change, { - actor: change.actor, deps: [], seq: 1, startOp: 1, - hash: change.hash, message: null, time: change.time, + actor: change.actor, + deps: [], + seq: 1, + startOp: 1, + hash: change.hash, + message: null, + time: change.time, ops: [ - {obj: '_root', key: 'foo', action: 'makeText', pred: []}, - {action: 'set', elemId: '_head', insert: true, obj: '1@aabbcc', pred: [], value: 'b' }, - {action: 'set', elemId: '2@aabbcc', insert: true, obj: '1@aabbcc', pred: [], value: 'a' }, - {action: 'set', elemId: '3@aabbcc', insert: true, obj: '1@aabbcc', pred: [], value: 'r' }] + { obj: "_root", key: "foo", action: "makeText", pred: [] }, + { + action: "set", + elemId: "_head", + insert: true, + obj: "1@aabbcc", + pred: [], + value: "b", + }, + { + action: "set", + elemId: "2@aabbcc", + insert: true, + obj: "1@aabbcc", + pred: [], + value: "a", + }, + { + action: "set", + elemId: "3@aabbcc", + insert: true, + obj: "1@aabbcc", + pred: [], + value: "r", + }, + ], }) }) - it('should not register any conflicts on repeated assignment', () => { - assert.strictEqual(Automerge.getConflicts(s1, 'foo'), undefined) - s1 = Automerge.change(s1, 'change', doc => doc.foo = 'one') - assert.strictEqual(Automerge.getConflicts(s1, 'foo'), undefined) - s1 = Automerge.change(s1, 'change', doc => doc.foo = 'two') - assert.strictEqual(Automerge.getConflicts(s1, 'foo'), undefined) + it("should not register any conflicts on repeated assignment", () => { + assert.strictEqual(Automerge.getConflicts(s1, "foo"), undefined) + s1 = Automerge.change(s1, "change", doc => (doc.foo = "one")) + assert.strictEqual(Automerge.getConflicts(s1, "foo"), undefined) + s1 = Automerge.change(s1, "change", doc => (doc.foo = "two")) + assert.strictEqual(Automerge.getConflicts(s1, "foo"), undefined) }) - describe('changes', () => { - it('should group several changes', () => { - s2 = Automerge.change(s1, 'change message', doc => { - doc.first = 'one' - assert.strictEqual(doc.first, 'one') - doc.second = 'two' + describe("changes", () => { + it("should group several changes", () => { + s2 = Automerge.change(s1, "change message", doc => { + doc.first = "one" + assert.strictEqual(doc.first, "one") + doc.second = "two" assert.deepStrictEqual(doc, { - first: 'one', second: 'two' + first: "one", + second: "two", }) }) assert.deepStrictEqual(s1, {}) - assert.deepStrictEqual(s2, {first: 'one', second: 'two'}) + assert.deepStrictEqual(s2, { first: "one", second: "two" }) }) - it('should freeze objects if desired', () => { - s1 = Automerge.init({freeze: true}) - s2 = Automerge.change(s1, doc => doc.foo = 'bar') + it("should freeze objects if desired", () => { + s1 = Automerge.init({ freeze: true }) + s2 = Automerge.change(s1, doc => (doc.foo = "bar")) try { // @ts-ignore - s2.foo = 'lemon' - } catch (e) { } - assert.strictEqual(s2.foo, 'bar') + s2.foo = "lemon" + } catch (e) {} + assert.strictEqual(s2.foo, "bar") let deleted = false try { // @ts-ignore deleted = delete s2.foo - } catch (e) { } - assert.strictEqual(s2.foo, 'bar') + } catch (e) {} + assert.strictEqual(s2.foo, "bar") assert.strictEqual(deleted, false) Automerge.change(s2, () => { try { // @ts-ignore - s2.foo = 'lemon' - } catch (e) { } - assert.strictEqual(s2.foo, 'bar') + s2.foo = "lemon" + } catch (e) {} + assert.strictEqual(s2.foo, "bar") }) - assert.throws(() => { Object.assign(s2, {x: 4}) }) + assert.throws(() => { + Object.assign(s2, { x: 4 }) + }) assert.strictEqual(s2.x, undefined) }) - it('should allow repeated reading and writing of values', () => { - s2 = Automerge.change(s1, 'change message', doc => { - doc.value = 'a' - assert.strictEqual(doc.value, 'a') - doc.value = 'b' - doc.value = 'c' - assert.strictEqual(doc.value, 'c') + it("should allow repeated reading and writing of values", () => { + s2 = Automerge.change(s1, "change message", doc => { + doc.value = "a" + assert.strictEqual(doc.value, "a") + doc.value = "b" + doc.value = "c" + assert.strictEqual(doc.value, "c") }) assert.deepStrictEqual(s1, {}) - assert.deepStrictEqual(s2, {value: 'c'}) + assert.deepStrictEqual(s2, { value: "c" }) }) - it('should not record conflicts when writing the same field several times within one change', () => { - s1 = Automerge.change(s1, 'change message', doc => { - doc.value = 'a' - doc.value = 'b' - doc.value = 'c' + it("should not record conflicts when writing the same field several times within one change", () => { + s1 = Automerge.change(s1, "change message", doc => { + doc.value = "a" + doc.value = "b" + doc.value = "c" }) - assert.strictEqual(s1.value, 'c') - assert.strictEqual(Automerge.getConflicts(s1, 'value'), undefined) + assert.strictEqual(s1.value, "c") + assert.strictEqual(Automerge.getConflicts(s1, "value"), undefined) }) - it('should return the unchanged state object if nothing changed', () => { + it("should return the unchanged state object if nothing changed", () => { s2 = Automerge.change(s1, () => {}) assert.strictEqual(s2, s1) }) - it('should ignore field updates that write the existing value', () => { - s1 = Automerge.change(s1, doc => doc.field = 123) - s2 = Automerge.change(s1, doc => doc.field = 123) + it("should ignore field updates that write the existing value", () => { + s1 = Automerge.change(s1, doc => (doc.field = 123)) + s2 = Automerge.change(s1, doc => (doc.field = 123)) assert.strictEqual(s2, s1) }) - it('should not ignore field updates that resolve a conflict', () => { + it("should not ignore field updates that resolve a conflict", () => { s2 = Automerge.merge(Automerge.init(), s1) - s1 = Automerge.change(s1, doc => doc.field = 123) - s2 = Automerge.change(s2, doc => doc.field = 321) + s1 = Automerge.change(s1, doc => (doc.field = 123)) + s2 = Automerge.change(s2, doc => (doc.field = 321)) s1 = Automerge.merge(s1, s2) - assert.strictEqual(Object.keys(Automerge.getConflicts(s1, 'field')!).length, 2) - const resolved = Automerge.change(s1, doc => doc.field = s1.field) + assert.strictEqual( + Object.keys(Automerge.getConflicts(s1, "field")!).length, + 2 + ) + const resolved = Automerge.change(s1, doc => (doc.field = s1.field)) assert.notStrictEqual(resolved, s1) - assert.deepStrictEqual(resolved, {field: s1.field}) - assert.strictEqual(Automerge.getConflicts(resolved, 'field'), undefined) + assert.deepStrictEqual(resolved, { field: s1.field }) + assert.strictEqual(Automerge.getConflicts(resolved, "field"), undefined) }) - it('should ignore list element updates that write the existing value', () => { - s1 = Automerge.change(s1, doc => doc.list = [123]) - s2 = Automerge.change(s1, doc => doc.list[0] = 123) + it("should ignore list element updates that write the existing value", () => { + s1 = Automerge.change(s1, doc => (doc.list = [123])) + s2 = Automerge.change(s1, doc => (doc.list[0] = 123)) assert.strictEqual(s2, s1) }) - it('should not ignore list element updates that resolve a conflict', () => { - s1 = Automerge.change(s1, doc => doc.list = [1]) + it("should not ignore list element updates that resolve a conflict", () => { + s1 = Automerge.change(s1, doc => (doc.list = [1])) s2 = Automerge.merge(Automerge.init(), s1) - s1 = Automerge.change(s1, doc => doc.list[0] = 123) - s2 = Automerge.change(s2, doc => doc.list[0] = 321) + s1 = Automerge.change(s1, doc => (doc.list[0] = 123)) + s2 = Automerge.change(s2, doc => (doc.list[0] = 321)) s1 = Automerge.merge(s1, s2) assert.deepStrictEqual(Automerge.getConflicts(s1.list, 0), { [`3@${Automerge.getActorId(s1)}`]: 123, - [`3@${Automerge.getActorId(s2)}`]: 321 + [`3@${Automerge.getActorId(s2)}`]: 321, }) - const resolved = Automerge.change(s1, doc => doc.list[0] = s1.list[0]) + const resolved = Automerge.change(s1, doc => (doc.list[0] = s1.list[0])) assert.deepStrictEqual(resolved, s1) assert.notStrictEqual(resolved, s1) assert.strictEqual(Automerge.getConflicts(resolved.list, 0), undefined) }) - it('should sanity-check arguments', () => { - s1 = Automerge.change(s1, doc => doc.nested = {}) - // @ts-ignore - assert.throws(() => { Automerge.change({}, doc => doc.foo = 'bar') }, /must be the document root/) - // @ts-ignore - assert.throws(() => { Automerge.change(s1.nested, doc => doc.foo = 'bar') }, /must be the document root/) + it("should sanity-check arguments", () => { + s1 = Automerge.change(s1, doc => (doc.nested = {})) + assert.throws(() => { + // @ts-ignore + Automerge.change({}, doc => (doc.foo = "bar")) + }, /must be the document root/) + assert.throws(() => { + // @ts-ignore + Automerge.change(s1.nested, doc => (doc.foo = "bar")) + }, /must be the document root/) }) - it('should not allow nested change blocks', () => { + it("should not allow nested change blocks", () => { assert.throws(() => { Automerge.change(s1, doc1 => { Automerge.change(doc1, doc2 => { // @ts-ignore - doc2.foo = 'bar' + doc2.foo = "bar" }) }) }, /Calls to Automerge.change cannot be nested/) assert.throws(() => { s1 = Automerge.change(s1, doc1 => { - s2 = Automerge.change(s1, doc2 => doc2.two = 2) + s2 = Automerge.change(s1, doc2 => (doc2.two = 2)) doc1.one = 1 }) }, /Attempting to change an outdated document/) }) - it('should not allow the same base document to be used for multiple changes', () => { + it("should not allow the same base document to be used for multiple changes", () => { assert.throws(() => { - Automerge.change(s1, doc => doc.one = 1) - Automerge.change(s1, doc => doc.two = 2) + Automerge.change(s1, doc => (doc.one = 1)) + Automerge.change(s1, doc => (doc.two = 2)) }, /Attempting to change an outdated document/) }) - it('should allow a document to be cloned', () => { - s1 = Automerge.change(s1, doc => doc.zero = 0) + it("should allow a document to be cloned", () => { + s1 = Automerge.change(s1, doc => (doc.zero = 0)) s2 = Automerge.clone(s1) - s1 = Automerge.change(s1, doc => doc.one = 1) - s2 = Automerge.change(s2, doc => doc.two = 2) - assert.deepStrictEqual(s1, {zero: 0, one: 1}) - assert.deepStrictEqual(s2, {zero: 0, two: 2}) + s1 = Automerge.change(s1, doc => (doc.one = 1)) + s2 = Automerge.change(s2, doc => (doc.two = 2)) + assert.deepStrictEqual(s1, { zero: 0, one: 1 }) + assert.deepStrictEqual(s2, { zero: 0, two: 2 }) Automerge.free(s1) Automerge.free(s2) }) - it('should work with Object.assign merges', () => { + it("should work with Object.assign merges", () => { s1 = Automerge.change(s1, doc1 => { - doc1.stuff = {foo: 'bar', baz: 'blur'} + doc1.stuff = { foo: "bar", baz: "blur" } }) s1 = Automerge.change(s1, doc1 => { - doc1.stuff = Object.assign({}, doc1.stuff, {baz: 'updated!'}) + doc1.stuff = Object.assign({}, doc1.stuff, { baz: "updated!" }) }) - assert.deepStrictEqual(s1, {stuff: {foo: 'bar', baz: 'updated!'}}) + assert.deepStrictEqual(s1, { stuff: { foo: "bar", baz: "updated!" } }) }) - it('should support Date objects in maps', () => { + it("should support Date objects in maps", () => { const now = new Date() - s1 = Automerge.change(s1, doc => doc.now = now) + s1 = Automerge.change(s1, doc => (doc.now = now)) let changes = Automerge.getAllChanges(s1) ;[s2] = Automerge.applyChanges(Automerge.init(), changes) assert.strictEqual(s2.now instanceof Date, true) assert.strictEqual(s2.now.getTime(), now.getTime()) }) - it('should support Date objects in lists', () => { + it("should support Date objects in lists", () => { const now = new Date() - s1 = Automerge.change(s1, doc => doc.list = [now]) + s1 = Automerge.change(s1, doc => (doc.list = [now])) let changes = Automerge.getAllChanges(s1) ;[s2] = Automerge.applyChanges(Automerge.init(), changes) assert.strictEqual(s2.list[0] instanceof Date, true) assert.strictEqual(s2.list[0].getTime(), now.getTime()) }) - it('should call patchCallback if supplied', () => { - const callbacks: Array<{patches: Array, before: Automerge.Doc, after: Automerge.Doc}> = [] - const s2 = Automerge.change(s1, { - patchCallback: (patches, before, after) => callbacks.push({patches, before, after}) - }, doc => { - doc.birds = ['Goldfinch'] - }) + it("should call patchCallback if supplied", () => { + const callbacks: Array<{ + patches: Array + before: Automerge.Doc + after: Automerge.Doc + }> = [] + const s2 = Automerge.change( + s1, + { + patchCallback: (patches, before, after) => + callbacks.push({ patches, before, after }), + }, + doc => { + doc.birds = ["Goldfinch"] + } + ) assert.strictEqual(callbacks.length, 1) - assert.deepStrictEqual(callbacks[0].patches[0], { action: "put", path: ["birds"], value: [] }) - assert.deepStrictEqual(callbacks[0].patches[1], { action: "insert", path: ["birds",0], values: [""] }) - assert.deepStrictEqual(callbacks[0].patches[2], { action: "splice", path: ["birds",0, 0], value: "Goldfinch" }) + assert.deepStrictEqual(callbacks[0].patches[0], { + action: "put", + path: ["birds"], + value: [], + }) + assert.deepStrictEqual(callbacks[0].patches[1], { + action: "insert", + path: ["birds", 0], + values: [""], + }) + assert.deepStrictEqual(callbacks[0].patches[2], { + action: "splice", + path: ["birds", 0, 0], + value: "Goldfinch", + }) assert.strictEqual(callbacks[0].before, s1) assert.strictEqual(callbacks[0].after, s2) }) - it('should call a patchCallback set up on document initialisation', () => { - const callbacks: Array<{patches: Array, before: Automerge.Doc, after: Automerge.Doc}> = [] + it("should call a patchCallback set up on document initialisation", () => { + const callbacks: Array<{ + patches: Array + before: Automerge.Doc + after: Automerge.Doc + }> = [] s1 = Automerge.init({ - patchCallback: (patches, before, after) => callbacks.push({patches, before, after }) + patchCallback: (patches, before, after) => + callbacks.push({ patches, before, after }), }) - const s2 = Automerge.change(s1, doc => doc.bird = 'Goldfinch') + const s2 = Automerge.change(s1, doc => (doc.bird = "Goldfinch")) assert.strictEqual(callbacks.length, 1) assert.deepStrictEqual(callbacks[0].patches[0], { - action: "put", path: ["bird"], value: "" + action: "put", + path: ["bird"], + value: "", }) assert.deepStrictEqual(callbacks[0].patches[1], { - action: "splice", path: ["bird", 0], value: "Goldfinch" + action: "splice", + path: ["bird", 0], + value: "Goldfinch", }) assert.strictEqual(callbacks[0].before, s1) assert.strictEqual(callbacks[0].after, s2) }) }) - describe('emptyChange()', () => { - it('should append an empty change to the history', () => { - s1 = Automerge.change(s1, 'first change', doc => doc.field = 123) - s2 = Automerge.emptyChange(s1, 'empty change') + describe("emptyChange()", () => { + it("should append an empty change to the history", () => { + s1 = Automerge.change(s1, "first change", doc => (doc.field = 123)) + s2 = Automerge.emptyChange(s1, "empty change") assert.notStrictEqual(s2, s1) assert.deepStrictEqual(s2, s1) - assert.deepStrictEqual(Automerge.getHistory(s2).map(state => state.change.message), ['first change', 'empty change']) + assert.deepStrictEqual( + Automerge.getHistory(s2).map(state => state.change.message), + ["first change", "empty change"] + ) }) - it('should reference dependencies', () => { - s1 = Automerge.change(s1, doc => doc.field = 123) + it("should reference dependencies", () => { + s1 = Automerge.change(s1, doc => (doc.field = 123)) s2 = Automerge.merge(Automerge.init(), s1) - s2 = Automerge.change(s2, doc => doc.other = 'hello') + s2 = Automerge.change(s2, doc => (doc.other = "hello")) s1 = Automerge.emptyChange(Automerge.merge(s1, s2)) const history = Automerge.getHistory(s1) const emptyChange = history[2].change - assert.deepStrictEqual(emptyChange.deps, [history[0].change.hash, history[1].change.hash].sort()) + assert.deepStrictEqual( + emptyChange.deps, + [history[0].change.hash, history[1].change.hash].sort() + ) assert.deepStrictEqual(emptyChange.ops, []) }) }) - describe('root object', () => { - it('should handle single-property assignment', () => { - s1 = Automerge.change(s1, 'set bar', doc => doc.foo = 'bar') - s1 = Automerge.change(s1, 'set zap', doc => doc.zip = 'zap') - assert.strictEqual(s1.foo, 'bar') - assert.strictEqual(s1.zip, 'zap') - assert.deepStrictEqual(s1, {foo: 'bar', zip: 'zap'}) + describe("root object", () => { + it("should handle single-property assignment", () => { + s1 = Automerge.change(s1, "set bar", doc => (doc.foo = "bar")) + s1 = Automerge.change(s1, "set zap", doc => (doc.zip = "zap")) + assert.strictEqual(s1.foo, "bar") + assert.strictEqual(s1.zip, "zap") + assert.deepStrictEqual(s1, { foo: "bar", zip: "zap" }) }) - it('should allow floating-point values', () => { - s1 = Automerge.change(s1, doc => doc.number = 1589032171.1) + it("should allow floating-point values", () => { + s1 = Automerge.change(s1, doc => (doc.number = 1589032171.1)) assert.strictEqual(s1.number, 1589032171.1) }) - it('should handle multi-property assignment', () => { - s1 = Automerge.change(s1, 'multi-assign', doc => { - Object.assign(doc, {foo: 'bar', answer: 42}) + it("should handle multi-property assignment", () => { + s1 = Automerge.change(s1, "multi-assign", doc => { + Object.assign(doc, { foo: "bar", answer: 42 }) }) - assert.strictEqual(s1.foo, 'bar') + assert.strictEqual(s1.foo, "bar") assert.strictEqual(s1.answer, 42) - assert.deepStrictEqual(s1, {foo: 'bar', answer: 42}) + assert.deepStrictEqual(s1, { foo: "bar", answer: 42 }) }) - it('should handle root property deletion', () => { - s1 = Automerge.change(s1, 'set foo', doc => { doc.foo = 'bar'; doc.something = null }) - s1 = Automerge.change(s1, 'del foo', doc => { delete doc.foo }) + it("should handle root property deletion", () => { + s1 = Automerge.change(s1, "set foo", doc => { + doc.foo = "bar" + doc.something = null + }) + s1 = Automerge.change(s1, "del foo", doc => { + delete doc.foo + }) assert.strictEqual(s1.foo, undefined) assert.strictEqual(s1.something, null) - assert.deepStrictEqual(s1, {something: null}) + assert.deepStrictEqual(s1, { something: null }) }) - it('should follow JS delete behavior', () => { - s1 = Automerge.change(s1, 'set foo', doc => { doc.foo = 'bar' }) + it("should follow JS delete behavior", () => { + s1 = Automerge.change(s1, "set foo", doc => { + doc.foo = "bar" + }) let deleted - s1 = Automerge.change(s1, 'del foo', doc => { + s1 = Automerge.change(s1, "del foo", doc => { deleted = delete doc.foo }) assert.strictEqual(deleted, true) let deleted2 assert.doesNotThrow(() => { - s1 = Automerge.change(s1, 'del baz', doc => { + s1 = Automerge.change(s1, "del baz", doc => { deleted2 = delete doc.baz }) }) assert.strictEqual(deleted2, true) }) - it('should allow the type of a property to be changed', () => { - s1 = Automerge.change(s1, 'set number', doc => doc.prop = 123) + it("should allow the type of a property to be changed", () => { + s1 = Automerge.change(s1, "set number", doc => (doc.prop = 123)) assert.strictEqual(s1.prop, 123) - s1 = Automerge.change(s1, 'set string', doc => doc.prop = '123') - assert.strictEqual(s1.prop, '123') - s1 = Automerge.change(s1, 'set null', doc => doc.prop = null) + s1 = Automerge.change(s1, "set string", doc => (doc.prop = "123")) + assert.strictEqual(s1.prop, "123") + s1 = Automerge.change(s1, "set null", doc => (doc.prop = null)) assert.strictEqual(s1.prop, null) - s1 = Automerge.change(s1, 'set bool', doc => doc.prop = true) + s1 = Automerge.change(s1, "set bool", doc => (doc.prop = true)) assert.strictEqual(s1.prop, true) }) - it('should require property names to be valid', () => { + it("should require property names to be valid", () => { assert.throws(() => { - Automerge.change(s1, 'foo', doc => doc[''] = 'x') + Automerge.change(s1, "foo", doc => (doc[""] = "x")) }, /must not be an empty string/) }) - it('should not allow assignment of unsupported datatypes', () => { + it("should not allow assignment of unsupported datatypes", () => { Automerge.change(s1, doc => { - assert.throws(() => { doc.foo = undefined }, /Unsupported type of value: undefined/) - assert.throws(() => { doc.foo = {prop: undefined} }, /Unsupported type of value: undefined/) - assert.throws(() => { doc.foo = () => {} }, /Unsupported type of value: function/) - assert.throws(() => { doc.foo = Symbol('foo') }, /Unsupported type of value: symbol/) + assert.throws(() => { + doc.foo = undefined + }, /Unsupported type of value: undefined/) + assert.throws(() => { + doc.foo = { prop: undefined } + }, /Unsupported type of value: undefined/) + assert.throws(() => { + doc.foo = () => {} + }, /Unsupported type of value: function/) + assert.throws(() => { + doc.foo = Symbol("foo") + }, /Unsupported type of value: symbol/) }) }) }) - describe('nested maps', () => { - it('should assign an objectId to nested maps', () => { - s1 = Automerge.change(s1, doc => { doc.nested = {} }) + describe("nested maps", () => { + it("should assign an objectId to nested maps", () => { + s1 = Automerge.change(s1, doc => { + doc.nested = {} + }) let id = Automerge.getObjectId(s1.nested) - assert.strictEqual(OPID_PATTERN.test(Automerge.getObjectId(s1.nested)!), true) - assert.notEqual(Automerge.getObjectId(s1.nested), '_root') + assert.strictEqual( + OPID_PATTERN.test(Automerge.getObjectId(s1.nested)!), + true + ) + assert.notEqual(Automerge.getObjectId(s1.nested), "_root") }) - it('should handle assignment of a nested property', () => { - s1 = Automerge.change(s1, 'first change', doc => { + it("should handle assignment of a nested property", () => { + s1 = Automerge.change(s1, "first change", doc => { doc.nested = {} - doc.nested.foo = 'bar' + doc.nested.foo = "bar" }) - s1 = Automerge.change(s1, 'second change', doc => { + s1 = Automerge.change(s1, "second change", doc => { doc.nested.one = 1 }) - assert.deepStrictEqual(s1, {nested: {foo: 'bar', one: 1}}) - assert.deepStrictEqual(s1.nested, {foo: 'bar', one: 1}) - assert.strictEqual(s1.nested.foo, 'bar') + assert.deepStrictEqual(s1, { nested: { foo: "bar", one: 1 } }) + assert.deepStrictEqual(s1.nested, { foo: "bar", one: 1 }) + assert.strictEqual(s1.nested.foo, "bar") assert.strictEqual(s1.nested.one, 1) }) - it('should handle assignment of an object literal', () => { + it("should handle assignment of an object literal", () => { s1 = Automerge.change(s1, doc => { - doc.textStyle = {bold: false, fontSize: 12} + doc.textStyle = { bold: false, fontSize: 12 } }) - assert.deepStrictEqual(s1, {textStyle: {bold: false, fontSize: 12}}) - assert.deepStrictEqual(s1.textStyle, {bold: false, fontSize: 12}) + assert.deepStrictEqual(s1, { + textStyle: { bold: false, fontSize: 12 }, + }) + assert.deepStrictEqual(s1.textStyle, { bold: false, fontSize: 12 }) assert.strictEqual(s1.textStyle.bold, false) assert.strictEqual(s1.textStyle.fontSize, 12) }) - it('should handle assignment of multiple nested properties', () => { + it("should handle assignment of multiple nested properties", () => { s1 = Automerge.change(s1, doc => { - doc.textStyle = {bold: false, fontSize: 12} - Object.assign(doc.textStyle, {typeface: 'Optima', fontSize: 14}) + doc.textStyle = { bold: false, fontSize: 12 } + Object.assign(doc.textStyle, { typeface: "Optima", fontSize: 14 }) }) - assert.strictEqual(s1.textStyle.typeface, 'Optima') + assert.strictEqual(s1.textStyle.typeface, "Optima") assert.strictEqual(s1.textStyle.bold, false) assert.strictEqual(s1.textStyle.fontSize, 14) - assert.deepStrictEqual(s1.textStyle, {typeface: 'Optima', bold: false, fontSize: 14}) + assert.deepStrictEqual(s1.textStyle, { + typeface: "Optima", + bold: false, + fontSize: 14, + }) }) - it('should handle arbitrary-depth nesting', () => { + it("should handle arbitrary-depth nesting", () => { s1 = Automerge.change(s1, doc => { - doc.a = {b: {c: {d: {e: {f: {g: 'h'}}}}}} + doc.a = { b: { c: { d: { e: { f: { g: "h" } } } } } } }) s1 = Automerge.change(s1, doc => { - doc.a.b.c.d.e.f.i = 'j' + doc.a.b.c.d.e.f.i = "j" }) - assert.deepStrictEqual(s1, {a: { b: { c: { d: { e: { f: { g: 'h', i: 'j'}}}}}}}) - assert.strictEqual(s1.a.b.c.d.e.f.g, 'h') - assert.strictEqual(s1.a.b.c.d.e.f.i, 'j') + assert.deepStrictEqual(s1, { + a: { b: { c: { d: { e: { f: { g: "h", i: "j" } } } } } }, + }) + assert.strictEqual(s1.a.b.c.d.e.f.g, "h") + assert.strictEqual(s1.a.b.c.d.e.f.i, "j") }) - it('should allow an old object to be replaced with a new one', () => { - s1 = Automerge.change(s1, 'change 1', doc => { - doc.myPet = {species: 'dog', legs: 4, breed: 'dachshund'} + it("should allow an old object to be replaced with a new one", () => { + s1 = Automerge.change(s1, "change 1", doc => { + doc.myPet = { species: "dog", legs: 4, breed: "dachshund" } }) - let s2 = Automerge.change(s1, 'change 2', doc => { - doc.myPet = {species: 'koi', variety: '紅白', colors: {red: true, white: true, black: false}} + let s2 = Automerge.change(s1, "change 2", doc => { + doc.myPet = { + species: "koi", + variety: "紅白", + colors: { red: true, white: true, black: false }, + } }) assert.deepStrictEqual(s1.myPet, { - species: 'dog', legs: 4, breed: 'dachshund' + species: "dog", + legs: 4, + breed: "dachshund", }) - assert.strictEqual(s1.myPet.breed, 'dachshund') + assert.strictEqual(s1.myPet.breed, "dachshund") assert.deepStrictEqual(s2.myPet, { - species: 'koi', variety: '紅白', - colors: {red: true, white: true, black: false} + species: "koi", + variety: "紅白", + colors: { red: true, white: true, black: false }, }) // @ts-ignore assert.strictEqual(s2.myPet.breed, undefined) - assert.strictEqual(s2.myPet.variety, '紅白') + assert.strictEqual(s2.myPet.variety, "紅白") }) - it('should allow fields to be changed between primitive and nested map', () => { - s1 = Automerge.change(s1, doc => doc.color = '#ff7f00') - assert.strictEqual(s1.color, '#ff7f00') - s1 = Automerge.change(s1, doc => doc.color = {red: 255, green: 127, blue: 0}) - assert.deepStrictEqual(s1.color, {red: 255, green: 127, blue: 0}) - s1 = Automerge.change(s1, doc => doc.color = '#ff7f00') - assert.strictEqual(s1.color, '#ff7f00') + it("should allow fields to be changed between primitive and nested map", () => { + s1 = Automerge.change(s1, doc => (doc.color = "#ff7f00")) + assert.strictEqual(s1.color, "#ff7f00") + s1 = Automerge.change( + s1, + doc => (doc.color = { red: 255, green: 127, blue: 0 }) + ) + assert.deepStrictEqual(s1.color, { red: 255, green: 127, blue: 0 }) + s1 = Automerge.change(s1, doc => (doc.color = "#ff7f00")) + assert.strictEqual(s1.color, "#ff7f00") }) - it('should not allow several references to the same map object', () => { - s1 = Automerge.change(s1, doc => doc.object = {}) + it("should not allow several references to the same map object", () => { + s1 = Automerge.change(s1, doc => (doc.object = {})) assert.throws(() => { - Automerge.change(s1, doc => { doc.x = doc.object }) + Automerge.change(s1, doc => { + doc.x = doc.object + }) }, /Cannot create a reference to an existing document object/) assert.throws(() => { - Automerge.change(s1, doc => { doc.x = s1.object }) + Automerge.change(s1, doc => { + doc.x = s1.object + }) }, /Cannot create a reference to an existing document object/) assert.throws(() => { - Automerge.change(s1, doc => { doc.x = {}; doc.y = doc.x }) + Automerge.change(s1, doc => { + doc.x = {} + doc.y = doc.x + }) }, /Cannot create a reference to an existing document object/) }) - it('should not allow object-copying idioms', () => { + it("should not allow object-copying idioms", () => { s1 = Automerge.change(s1, doc => { - doc.items = [{id: 'id1', name: 'one'}, {id: 'id2', name: 'two'}] + doc.items = [ + { id: "id1", name: "one" }, + { id: "id2", name: "two" }, + ] }) // People who have previously worked with immutable state in JavaScript may be tempted // to use idioms like this, which don't work well with Automerge -- see e.g. // https://github.com/automerge/automerge/issues/260 assert.throws(() => { Automerge.change(s1, doc => { - doc.items = [...doc.items, {id: 'id3', name: 'three'}] + doc.items = [...doc.items, { id: "id3", name: "three" }] }) }, /Cannot create a reference to an existing document object/) }) - it('should handle deletion of properties within a map', () => { - s1 = Automerge.change(s1, 'set style', doc => { - doc.textStyle = {typeface: 'Optima', bold: false, fontSize: 12} + it("should handle deletion of properties within a map", () => { + s1 = Automerge.change(s1, "set style", doc => { + doc.textStyle = { typeface: "Optima", bold: false, fontSize: 12 } }) - s1 = Automerge.change(s1, 'non-bold', doc => delete doc.textStyle.bold) + s1 = Automerge.change(s1, "non-bold", doc => delete doc.textStyle.bold) assert.strictEqual(s1.textStyle.bold, undefined) - assert.deepStrictEqual(s1.textStyle, {typeface: 'Optima', fontSize: 12}) + assert.deepStrictEqual(s1.textStyle, { + typeface: "Optima", + fontSize: 12, + }) }) - it('should handle deletion of references to a map', () => { - s1 = Automerge.change(s1, 'make rich text doc', doc => { - Object.assign(doc, {title: 'Hello', textStyle: {typeface: 'Optima', fontSize: 12}}) + it("should handle deletion of references to a map", () => { + s1 = Automerge.change(s1, "make rich text doc", doc => { + Object.assign(doc, { + title: "Hello", + textStyle: { typeface: "Optima", fontSize: 12 }, + }) }) s1 = Automerge.change(s1, doc => delete doc.textStyle) assert.strictEqual(s1.textStyle, undefined) - assert.deepStrictEqual(s1, {title: 'Hello'}) + assert.deepStrictEqual(s1, { title: "Hello" }) }) - it('should validate field names', () => { - s1 = Automerge.change(s1, doc => doc.nested = {}) - assert.throws(() => { Automerge.change(s1, doc => doc.nested[''] = 'x') }, /must not be an empty string/) - assert.throws(() => { Automerge.change(s1, doc => doc.nested = {'': 'x'}) }, /must not be an empty string/) + it("should validate field names", () => { + s1 = Automerge.change(s1, doc => (doc.nested = {})) + assert.throws(() => { + Automerge.change(s1, doc => (doc.nested[""] = "x")) + }, /must not be an empty string/) + assert.throws(() => { + Automerge.change(s1, doc => (doc.nested = { "": "x" })) + }, /must not be an empty string/) }) }) - describe('lists', () => { - it('should allow elements to be inserted', () => { - s1 = Automerge.change(s1, doc => doc.noodles = []) - s1 = Automerge.change(s1, doc => doc.noodles.insertAt(0, 'udon', 'soba')) - s1 = Automerge.change(s1, doc => doc.noodles.insertAt(1, 'ramen')) - assert.deepStrictEqual(s1, {noodles: ['udon', 'ramen', 'soba']}) - assert.deepStrictEqual(s1.noodles, ['udon', 'ramen', 'soba']) - assert.strictEqual(s1.noodles[0], 'udon') - assert.strictEqual(s1.noodles[1], 'ramen') - assert.strictEqual(s1.noodles[2], 'soba') + describe("lists", () => { + it("should allow elements to be inserted", () => { + s1 = Automerge.change(s1, doc => (doc.noodles = [])) + s1 = Automerge.change(s1, doc => + doc.noodles.insertAt(0, "udon", "soba") + ) + s1 = Automerge.change(s1, doc => doc.noodles.insertAt(1, "ramen")) + assert.deepStrictEqual(s1, { noodles: ["udon", "ramen", "soba"] }) + assert.deepStrictEqual(s1.noodles, ["udon", "ramen", "soba"]) + assert.strictEqual(s1.noodles[0], "udon") + assert.strictEqual(s1.noodles[1], "ramen") + assert.strictEqual(s1.noodles[2], "soba") assert.strictEqual(s1.noodles.length, 3) }) - it('should handle assignment of a list literal', () => { - s1 = Automerge.change(s1, doc => doc.noodles = ['udon', 'ramen', 'soba']) - assert.deepStrictEqual(s1, {noodles: ['udon', 'ramen', 'soba']}) - assert.deepStrictEqual(s1.noodles, ['udon', 'ramen', 'soba']) - assert.strictEqual(s1.noodles[0], 'udon') - assert.strictEqual(s1.noodles[1], 'ramen') - assert.strictEqual(s1.noodles[2], 'soba') + it("should handle assignment of a list literal", () => { + s1 = Automerge.change( + s1, + doc => (doc.noodles = ["udon", "ramen", "soba"]) + ) + assert.deepStrictEqual(s1, { noodles: ["udon", "ramen", "soba"] }) + assert.deepStrictEqual(s1.noodles, ["udon", "ramen", "soba"]) + assert.strictEqual(s1.noodles[0], "udon") + assert.strictEqual(s1.noodles[1], "ramen") + assert.strictEqual(s1.noodles[2], "soba") assert.strictEqual(s1.noodles[3], undefined) assert.strictEqual(s1.noodles.length, 3) }) - it('should only allow numeric indexes', () => { - s1 = Automerge.change(s1, doc => doc.noodles = ['udon', 'ramen', 'soba']) - s1 = Automerge.change(s1, doc => doc.noodles[1] = 'Ramen!') - assert.strictEqual(s1.noodles[1], 'Ramen!') - s1 = Automerge.change(s1, doc => doc.noodles['1'] = 'RAMEN!!!') - assert.strictEqual(s1.noodles[1], 'RAMEN!!!') - assert.throws(() => { Automerge.change(s1, doc => doc.noodles.favourite = 'udon') }, /list index must be a number/) - assert.throws(() => { Automerge.change(s1, doc => doc.noodles[''] = 'udon') }, /list index must be a number/) - assert.throws(() => { Automerge.change(s1, doc => doc.noodles['1e6'] = 'udon') }, /list index must be a number/) + it("should only allow numeric indexes", () => { + s1 = Automerge.change( + s1, + doc => (doc.noodles = ["udon", "ramen", "soba"]) + ) + s1 = Automerge.change(s1, doc => (doc.noodles[1] = "Ramen!")) + assert.strictEqual(s1.noodles[1], "Ramen!") + s1 = Automerge.change(s1, doc => (doc.noodles["1"] = "RAMEN!!!")) + assert.strictEqual(s1.noodles[1], "RAMEN!!!") + assert.throws(() => { + Automerge.change(s1, doc => (doc.noodles.favourite = "udon")) + }, /list index must be a number/) + assert.throws(() => { + Automerge.change(s1, doc => (doc.noodles[""] = "udon")) + }, /list index must be a number/) + assert.throws(() => { + Automerge.change(s1, doc => (doc.noodles["1e6"] = "udon")) + }, /list index must be a number/) }) - it('should handle deletion of list elements', () => { - s1 = Automerge.change(s1, doc => doc.noodles = ['udon', 'ramen', 'soba']) + it("should handle deletion of list elements", () => { + s1 = Automerge.change( + s1, + doc => (doc.noodles = ["udon", "ramen", "soba"]) + ) s1 = Automerge.change(s1, doc => delete doc.noodles[1]) - assert.deepStrictEqual(s1.noodles, ['udon', 'soba']) + assert.deepStrictEqual(s1.noodles, ["udon", "soba"]) s1 = Automerge.change(s1, doc => doc.noodles.deleteAt(1)) - assert.deepStrictEqual(s1.noodles, ['udon']) - assert.strictEqual(s1.noodles[0], 'udon') + assert.deepStrictEqual(s1.noodles, ["udon"]) + assert.strictEqual(s1.noodles[0], "udon") assert.strictEqual(s1.noodles[1], undefined) assert.strictEqual(s1.noodles[2], undefined) assert.strictEqual(s1.noodles.length, 1) }) - it('should handle assignment of individual list indexes', () => { - s1 = Automerge.change(s1, doc => doc.japaneseFood = ['udon', 'ramen', 'soba']) - s1 = Automerge.change(s1, doc => doc.japaneseFood[1] = 'sushi') - assert.deepStrictEqual(s1.japaneseFood, ['udon', 'sushi', 'soba']) - assert.strictEqual(s1.japaneseFood[0], 'udon') - assert.strictEqual(s1.japaneseFood[1], 'sushi') - assert.strictEqual(s1.japaneseFood[2], 'soba') + it("should handle assignment of individual list indexes", () => { + s1 = Automerge.change( + s1, + doc => (doc.japaneseFood = ["udon", "ramen", "soba"]) + ) + s1 = Automerge.change(s1, doc => (doc.japaneseFood[1] = "sushi")) + assert.deepStrictEqual(s1.japaneseFood, ["udon", "sushi", "soba"]) + assert.strictEqual(s1.japaneseFood[0], "udon") + assert.strictEqual(s1.japaneseFood[1], "sushi") + assert.strictEqual(s1.japaneseFood[2], "soba") assert.strictEqual(s1.japaneseFood[3], undefined) assert.strictEqual(s1.japaneseFood.length, 3) }) - it('concurrent edits insert in reverse actorid order if counters equal', () => { - s1 = Automerge.init('aaaa') - s2 = Automerge.init('bbbb') - s1 = Automerge.change(s1, doc => doc.list = []) + it("concurrent edits insert in reverse actorid order if counters equal", () => { + s1 = Automerge.init("aaaa") + s2 = Automerge.init("bbbb") + s1 = Automerge.change(s1, doc => (doc.list = [])) s2 = Automerge.merge(s2, s1) s1 = Automerge.change(s1, doc => doc.list.splice(0, 0, "2@aaaa")) s2 = Automerge.change(s2, doc => doc.list.splice(0, 0, "2@bbbb")) @@ -628,75 +778,112 @@ describe('Automerge', () => { assert.deepStrictEqual(Automerge.toJS(s2).list, ["2@bbbb", "2@aaaa"]) }) - it('concurrent edits insert in reverse counter order if different', () => { - s1 = Automerge.init('aaaa') - s2 = Automerge.init('bbbb') - s1 = Automerge.change(s1, doc => doc.list = []) + it("concurrent edits insert in reverse counter order if different", () => { + s1 = Automerge.init("aaaa") + s2 = Automerge.init("bbbb") + s1 = Automerge.change(s1, doc => (doc.list = [])) s2 = Automerge.merge(s2, s1) s1 = Automerge.change(s1, doc => doc.list.splice(0, 0, "2@aaaa")) - s2 = Automerge.change(s2, doc => doc.foo = "2@bbbb") - s2 = Automerge.change(s2, doc => doc.list.splice(0, 0, "3@bbbb")) - s2 = Automerge.merge(s2, s1) - assert.deepStrictEqual(s2.list, ["3@bbbb", "2@aaaa"]) + s2 = Automerge.change(s2, doc => (doc.foo = "2@bbbb")) + s2 = Automerge.change(s2, doc => doc.list.splice(0, 0, "3@bbbb")) + s2 = Automerge.merge(s2, s1) + assert.deepStrictEqual(s2.list, ["3@bbbb", "2@aaaa"]) }) - it('should treat out-by-one assignment as insertion', () => { - s1 = Automerge.change(s1, doc => doc.japaneseFood = ['udon']) - s1 = Automerge.change(s1, doc => doc.japaneseFood[1] = 'sushi') - assert.deepStrictEqual(s1.japaneseFood, ['udon', 'sushi']) - assert.strictEqual(s1.japaneseFood[0], 'udon') - assert.strictEqual(s1.japaneseFood[1], 'sushi') + it("should treat out-by-one assignment as insertion", () => { + s1 = Automerge.change(s1, doc => (doc.japaneseFood = ["udon"])) + s1 = Automerge.change(s1, doc => (doc.japaneseFood[1] = "sushi")) + assert.deepStrictEqual(s1.japaneseFood, ["udon", "sushi"]) + assert.strictEqual(s1.japaneseFood[0], "udon") + assert.strictEqual(s1.japaneseFood[1], "sushi") assert.strictEqual(s1.japaneseFood[2], undefined) assert.strictEqual(s1.japaneseFood.length, 2) }) - it('should not allow out-of-range assignment', () => { - s1 = Automerge.change(s1, doc => doc.japaneseFood = ['udon']) - assert.throws(() => { Automerge.change(s1, doc => doc.japaneseFood[4] = 'ramen') }, /is out of bounds/) + it("should not allow out-of-range assignment", () => { + s1 = Automerge.change(s1, doc => (doc.japaneseFood = ["udon"])) + assert.throws(() => { + Automerge.change(s1, doc => (doc.japaneseFood[4] = "ramen")) + }, /is out of bounds/) }) - it('should allow bulk assignment of multiple list indexes', () => { - s1 = Automerge.change(s1, doc => doc.noodles = ['udon', 'ramen', 'soba']) - s1 = Automerge.change(s1, doc => Object.assign(doc.noodles, {0: 'うどん', 2: 'そば'})) - assert.deepStrictEqual(s1.noodles, ['うどん', 'ramen', 'そば']) - assert.strictEqual(s1.noodles[0], 'うどん') - assert.strictEqual(s1.noodles[1], 'ramen') - assert.strictEqual(s1.noodles[2], 'そば') + it("should allow bulk assignment of multiple list indexes", () => { + s1 = Automerge.change( + s1, + doc => (doc.noodles = ["udon", "ramen", "soba"]) + ) + s1 = Automerge.change(s1, doc => + Object.assign(doc.noodles, { 0: "うどん", 2: "そば" }) + ) + assert.deepStrictEqual(s1.noodles, ["うどん", "ramen", "そば"]) + assert.strictEqual(s1.noodles[0], "うどん") + assert.strictEqual(s1.noodles[1], "ramen") + assert.strictEqual(s1.noodles[2], "そば") assert.strictEqual(s1.noodles.length, 3) }) - it('should handle nested objects', () => { - s1 = Automerge.change(s1, doc => doc.noodles = [{type: 'ramen', dishes: ['tonkotsu', 'shoyu']}]) - s1 = Automerge.change(s1, doc => doc.noodles.push({type: 'udon', dishes: ['tempura udon']})) - s1 = Automerge.change(s1, doc => doc.noodles[0].dishes.push('miso')) - assert.deepStrictEqual(s1, {noodles: [ - {type: 'ramen', dishes: ['tonkotsu', 'shoyu', 'miso']}, - {type: 'udon', dishes: ['tempura udon']} - ]}) + it("should handle nested objects", () => { + s1 = Automerge.change( + s1, + doc => + (doc.noodles = [{ type: "ramen", dishes: ["tonkotsu", "shoyu"] }]) + ) + s1 = Automerge.change(s1, doc => + doc.noodles.push({ type: "udon", dishes: ["tempura udon"] }) + ) + s1 = Automerge.change(s1, doc => doc.noodles[0].dishes.push("miso")) + assert.deepStrictEqual(s1, { + noodles: [ + { type: "ramen", dishes: ["tonkotsu", "shoyu", "miso"] }, + { type: "udon", dishes: ["tempura udon"] }, + ], + }) assert.deepStrictEqual(s1.noodles[0], { - type: 'ramen', dishes: ['tonkotsu', 'shoyu', 'miso'] + type: "ramen", + dishes: ["tonkotsu", "shoyu", "miso"], }) assert.deepStrictEqual(s1.noodles[1], { - type: 'udon', dishes: ['tempura udon'] + type: "udon", + dishes: ["tempura udon"], }) }) - it('should handle nested lists', () => { - s1 = Automerge.change(s1, doc => doc.noodleMatrix = [['ramen', 'tonkotsu', 'shoyu']]) - s1 = Automerge.change(s1, doc => doc.noodleMatrix.push(['udon', 'tempura udon'])) - s1 = Automerge.change(s1, doc => doc.noodleMatrix[0].push('miso')) - assert.deepStrictEqual(s1.noodleMatrix, [['ramen', 'tonkotsu', 'shoyu', 'miso'], ['udon', 'tempura udon']]) - assert.deepStrictEqual(s1.noodleMatrix[0], ['ramen', 'tonkotsu', 'shoyu', 'miso']) - assert.deepStrictEqual(s1.noodleMatrix[1], ['udon', 'tempura udon']) + it("should handle nested lists", () => { + s1 = Automerge.change( + s1, + doc => (doc.noodleMatrix = [["ramen", "tonkotsu", "shoyu"]]) + ) + s1 = Automerge.change(s1, doc => + doc.noodleMatrix.push(["udon", "tempura udon"]) + ) + s1 = Automerge.change(s1, doc => doc.noodleMatrix[0].push("miso")) + assert.deepStrictEqual(s1.noodleMatrix, [ + ["ramen", "tonkotsu", "shoyu", "miso"], + ["udon", "tempura udon"], + ]) + assert.deepStrictEqual(s1.noodleMatrix[0], [ + "ramen", + "tonkotsu", + "shoyu", + "miso", + ]) + assert.deepStrictEqual(s1.noodleMatrix[1], ["udon", "tempura udon"]) }) - it('should handle deep nesting', () => { - s1 = Automerge.change(s1, doc => doc.nesting = { - maps: { m1: { m2: { foo: "bar", baz: {} }, m2a: { } } }, - lists: [ [ 1, 2, 3 ], [ [ 3, 4, 5, [6]], 7 ] ], - mapsinlists: [ { foo: "bar" }, [ { bar: "baz" } ] ], - listsinmaps: { foo: [1, 2, 3], bar: [ [ { baz: "123" } ] ] } - }) + it("should handle deep nesting", () => { + s1 = Automerge.change( + s1, + doc => + (doc.nesting = { + maps: { m1: { m2: { foo: "bar", baz: {} }, m2a: {} } }, + lists: [ + [1, 2, 3], + [[3, 4, 5, [6]], 7], + ], + mapsinlists: [{ foo: "bar" }, [{ bar: "baz" }]], + listsinmaps: { foo: [1, 2, 3], bar: [[{ baz: "123" }]] }, + }) + ) s1 = Automerge.change(s1, doc => { doc.nesting.maps.m1a = "123" doc.nesting.maps.m1.m2.baz.xxx = "123" @@ -711,97 +898,151 @@ describe('Automerge', () => { doc.nesting.listsinmaps.bar[0][0].baz = "456" delete doc.nesting.listsinmaps.bar }) - assert.deepStrictEqual(s1, { nesting: { - maps: { m1: { m2: { foo: "bar", baz: { xxx: "123" } } }, m1a: "123" }, - lists: [ [ [ 3, 4, 5, 100 ], 7 ] ], - mapsinlists: [ { foo: "baz" } ], - listsinmaps: { foo: [1, 2, 3, 4] } - }}) + assert.deepStrictEqual(s1, { + nesting: { + maps: { + m1: { m2: { foo: "bar", baz: { xxx: "123" } } }, + m1a: "123", + }, + lists: [[[3, 4, 5, 100], 7]], + mapsinlists: [{ foo: "baz" }], + listsinmaps: { foo: [1, 2, 3, 4] }, + }, + }) }) - it('should handle replacement of the entire list', () => { - s1 = Automerge.change(s1, doc => doc.noodles = ['udon', 'soba', 'ramen']) - s1 = Automerge.change(s1, doc => doc.japaneseNoodles = doc.noodles.slice()) - s1 = Automerge.change(s1, doc => doc.noodles = ['wonton', 'pho']) + it("should handle replacement of the entire list", () => { + s1 = Automerge.change( + s1, + doc => (doc.noodles = ["udon", "soba", "ramen"]) + ) + s1 = Automerge.change( + s1, + doc => (doc.japaneseNoodles = doc.noodles.slice()) + ) + s1 = Automerge.change(s1, doc => (doc.noodles = ["wonton", "pho"])) assert.deepStrictEqual(s1, { - noodles: ['wonton', 'pho'], - japaneseNoodles: ['udon', 'soba', 'ramen'] + noodles: ["wonton", "pho"], + japaneseNoodles: ["udon", "soba", "ramen"], }) - assert.deepStrictEqual(s1.noodles, ['wonton', 'pho']) - assert.strictEqual(s1.noodles[0], 'wonton') - assert.strictEqual(s1.noodles[1], 'pho') + assert.deepStrictEqual(s1.noodles, ["wonton", "pho"]) + assert.strictEqual(s1.noodles[0], "wonton") + assert.strictEqual(s1.noodles[1], "pho") assert.strictEqual(s1.noodles[2], undefined) assert.strictEqual(s1.noodles.length, 2) }) - it('should allow assignment to change the type of a list element', () => { - s1 = Automerge.change(s1, doc => doc.noodles = ['udon', 'soba', 'ramen']) - assert.deepStrictEqual(s1.noodles, ['udon', 'soba', 'ramen']) - s1 = Automerge.change(s1, doc => doc.noodles[1] = {type: 'soba', options: ['hot', 'cold']}) - assert.deepStrictEqual(s1.noodles, ['udon', {type: 'soba', options: ['hot', 'cold']}, 'ramen']) - s1 = Automerge.change(s1, doc => doc.noodles[1] = ['hot soba', 'cold soba']) - assert.deepStrictEqual(s1.noodles, ['udon', ['hot soba', 'cold soba'], 'ramen']) - s1 = Automerge.change(s1, doc => doc.noodles[1] = 'soba is the best') - assert.deepStrictEqual(s1.noodles, ['udon', 'soba is the best', 'ramen']) + it("should allow assignment to change the type of a list element", () => { + s1 = Automerge.change( + s1, + doc => (doc.noodles = ["udon", "soba", "ramen"]) + ) + assert.deepStrictEqual(s1.noodles, ["udon", "soba", "ramen"]) + s1 = Automerge.change( + s1, + doc => (doc.noodles[1] = { type: "soba", options: ["hot", "cold"] }) + ) + assert.deepStrictEqual(s1.noodles, [ + "udon", + { type: "soba", options: ["hot", "cold"] }, + "ramen", + ]) + s1 = Automerge.change( + s1, + doc => (doc.noodles[1] = ["hot soba", "cold soba"]) + ) + assert.deepStrictEqual(s1.noodles, [ + "udon", + ["hot soba", "cold soba"], + "ramen", + ]) + s1 = Automerge.change(s1, doc => (doc.noodles[1] = "soba is the best")) + assert.deepStrictEqual(s1.noodles, [ + "udon", + "soba is the best", + "ramen", + ]) }) - it('should allow list creation and assignment in the same change callback', () => { + it("should allow list creation and assignment in the same change callback", () => { s1 = Automerge.change(Automerge.init(), doc => { - doc.letters = ['a', 'b', 'c'] - doc.letters[1] = 'd' + doc.letters = ["a", "b", "c"] + doc.letters[1] = "d" }) - assert.strictEqual(s1.letters[1], 'd') + assert.strictEqual(s1.letters[1], "d") }) - it('should allow adding and removing list elements in the same change callback', () => { - let s1 = Automerge.change(Automerge.init<{noodles: Array}>(), doc => doc.noodles = []) + it("should allow adding and removing list elements in the same change callback", () => { + let s1 = Automerge.change( + Automerge.init<{ noodles: Array }>(), + doc => (doc.noodles = []) + ) s1 = Automerge.change(s1, doc => { - doc.noodles.push('udon') + doc.noodles.push("udon") // @ts-ignore doc.noodles.deleteAt(0) }) - assert.deepStrictEqual(s1, {noodles: []}) + assert.deepStrictEqual(s1, { noodles: [] }) // do the add-remove cycle twice, test for #151 (https://github.com/automerge/automerge/issues/151) s1 = Automerge.change(s1, doc => { // @ts-ignore - doc.noodles.push('soba') + doc.noodles.push("soba") // @ts-ignore doc.noodles.deleteAt(0) }) - assert.deepStrictEqual(s1, {noodles: []}) + assert.deepStrictEqual(s1, { noodles: [] }) }) - it('should handle arbitrary-depth nesting', () => { - s1 = Automerge.change(s1, doc => doc.maze = [[[[[[[['noodles', ['here']]]]]]]]]) - s1 = Automerge.change(s1, doc => doc.maze[0][0][0][0][0][0][0][1].unshift('found')) - assert.deepStrictEqual(s1.maze, [[[[[[[['noodles', ['found', 'here']]]]]]]]]) - assert.deepStrictEqual(s1.maze[0][0][0][0][0][0][0][1][1], 'here') + it("should handle arbitrary-depth nesting", () => { + s1 = Automerge.change( + s1, + doc => (doc.maze = [[[[[[[["noodles", ["here"]]]]]]]]]) + ) + s1 = Automerge.change(s1, doc => + doc.maze[0][0][0][0][0][0][0][1].unshift("found") + ) + assert.deepStrictEqual(s1.maze, [ + [[[[[[["noodles", ["found", "here"]]]]]]]], + ]) + assert.deepStrictEqual(s1.maze[0][0][0][0][0][0][0][1][1], "here") s2 = Automerge.load(Automerge.save(s1)) - assert.deepStrictEqual(s1,s2) + assert.deepStrictEqual(s1, s2) }) - it('should not allow several references to the same list object', () => { - s1 = Automerge.change(s1, doc => doc.list = []) + it("should not allow several references to the same list object", () => { + s1 = Automerge.change(s1, doc => (doc.list = [])) assert.throws(() => { - Automerge.change(s1, doc => { doc.x = doc.list }) + Automerge.change(s1, doc => { + doc.x = doc.list + }) }, /Cannot create a reference to an existing document object/) assert.throws(() => { - Automerge.change(s1, doc => { doc.x = s1.list }) + Automerge.change(s1, doc => { + doc.x = s1.list + }) }, /Cannot create a reference to an existing document object/) assert.throws(() => { - Automerge.change(s1, doc => { doc.x = []; doc.y = doc.x }) + Automerge.change(s1, doc => { + doc.x = [] + doc.y = doc.x + }) }, /Cannot create a reference to an existing document object/) }) }) - describe('counters', () => { + describe("counters", () => { // counter - it('should allow deleting counters from maps', () => { - const s1 = Automerge.change(Automerge.init(), doc => doc.birds = {wrens: new Automerge.Counter(1)}) + it("should allow deleting counters from maps", () => { + const s1 = Automerge.change( + Automerge.init(), + doc => (doc.birds = { wrens: new Automerge.Counter(1) }) + ) const s2 = Automerge.change(s1, doc => doc.birds.wrens.increment(2)) const s3 = Automerge.change(s2, doc => delete doc.birds.wrens) - assert.deepStrictEqual(s2, {birds: {wrens: new Automerge.Counter(3)}}) - assert.deepStrictEqual(s3, {birds: {}}) + assert.deepStrictEqual(s2, { + birds: { wrens: new Automerge.Counter(3) }, + }) + assert.deepStrictEqual(s3, { birds: {} }) }) // counter @@ -816,8 +1057,11 @@ describe('Automerge', () => { }) }) - describe('concurrent use', () => { - let s1: Automerge.Doc, s2: Automerge.Doc, s3: Automerge.Doc, s4: Automerge.Doc + describe("concurrent use", () => { + let s1: Automerge.Doc, + s2: Automerge.Doc, + s3: Automerge.Doc, + s4: Automerge.Doc beforeEach(() => { s1 = Automerge.init() s2 = Automerge.init() @@ -825,21 +1069,21 @@ describe('Automerge', () => { s4 = Automerge.init() }) - it('should merge concurrent updates of different properties', () => { - s1 = Automerge.change(s1, doc => doc.foo = 'bar') - s2 = Automerge.change(s2, doc => doc.hello = 'world') + it("should merge concurrent updates of different properties", () => { + s1 = Automerge.change(s1, doc => (doc.foo = "bar")) + s2 = Automerge.change(s2, doc => (doc.hello = "world")) s3 = Automerge.merge(s1, s2) - assert.strictEqual(s3.foo, 'bar') - assert.strictEqual(s3.hello, 'world') - assert.deepStrictEqual(s3, {foo: 'bar', hello: 'world'}) - assert.strictEqual(Automerge.getConflicts(s3, 'foo'), undefined) - assert.strictEqual(Automerge.getConflicts(s3, 'hello'), undefined) + assert.strictEqual(s3.foo, "bar") + assert.strictEqual(s3.hello, "world") + assert.deepStrictEqual(s3, { foo: "bar", hello: "world" }) + assert.strictEqual(Automerge.getConflicts(s3, "foo"), undefined) + assert.strictEqual(Automerge.getConflicts(s3, "hello"), undefined) s4 = Automerge.load(Automerge.save(s3)) - assert.deepEqual(s3,s4) + assert.deepEqual(s3, s4) }) - it('should add concurrent increments of the same property', () => { - s1 = Automerge.change(s1, doc => doc.counter = new Automerge.Counter()) + it("should add concurrent increments of the same property", () => { + s1 = Automerge.change(s1, doc => (doc.counter = new Automerge.Counter())) s2 = Automerge.merge(s2, s1) s1 = Automerge.change(s1, doc => doc.counter.increment()) s2 = Automerge.change(s2, doc => doc.counter.increment(2)) @@ -847,391 +1091,523 @@ describe('Automerge', () => { assert.strictEqual(s1.counter.value, 1) assert.strictEqual(s2.counter.value, 2) assert.strictEqual(s3.counter.value, 3) - assert.strictEqual(Automerge.getConflicts(s3, 'counter'), undefined) + assert.strictEqual(Automerge.getConflicts(s3, "counter"), undefined) s4 = Automerge.load(Automerge.save(s3)) - assert.deepEqual(s3,s4) + assert.deepEqual(s3, s4) }) - it('should add increments only to the values they precede', () => { - s1 = Automerge.change(s1, doc => doc.counter = new Automerge.Counter(0)) + it("should add increments only to the values they precede", () => { + s1 = Automerge.change(s1, doc => (doc.counter = new Automerge.Counter(0))) s1 = Automerge.change(s1, doc => doc.counter.increment()) - s2 = Automerge.change(s2, doc => doc.counter = new Automerge.Counter(100)) + s2 = Automerge.change( + s2, + doc => (doc.counter = new Automerge.Counter(100)) + ) s2 = Automerge.change(s2, doc => doc.counter.increment(3)) s3 = Automerge.merge(s1, s2) if (Automerge.getActorId(s1) > Automerge.getActorId(s2)) { - assert.deepStrictEqual(s3, {counter: new Automerge.Counter(1)}) + assert.deepStrictEqual(s3, { counter: new Automerge.Counter(1) }) } else { - assert.deepStrictEqual(s3, {counter: new Automerge.Counter(103)}) + assert.deepStrictEqual(s3, { counter: new Automerge.Counter(103) }) } - assert.deepStrictEqual(Automerge.getConflicts(s3, 'counter'), { + assert.deepStrictEqual(Automerge.getConflicts(s3, "counter"), { [`1@${Automerge.getActorId(s1)}`]: new Automerge.Counter(1), - [`1@${Automerge.getActorId(s2)}`]: new Automerge.Counter(103) + [`1@${Automerge.getActorId(s2)}`]: new Automerge.Counter(103), }) s4 = Automerge.load(Automerge.save(s3)) - assert.deepEqual(s3,s4) + assert.deepEqual(s3, s4) }) - it('should detect concurrent updates of the same field', () => { - s1 = Automerge.change(s1, doc => doc.field = 'one') - s2 = Automerge.change(s2, doc => doc.field = 'two') + it("should detect concurrent updates of the same field", () => { + s1 = Automerge.change(s1, doc => (doc.field = "one")) + s2 = Automerge.change(s2, doc => (doc.field = "two")) s3 = Automerge.merge(s1, s2) if (Automerge.getActorId(s1) > Automerge.getActorId(s2)) { - assert.deepStrictEqual(s3, {field: 'one'}) + assert.deepStrictEqual(s3, { field: "one" }) } else { - assert.deepStrictEqual(s3, {field: 'two'}) + assert.deepStrictEqual(s3, { field: "two" }) } - assert.deepStrictEqual(Automerge.getConflicts(s3, 'field'), { - [`1@${Automerge.getActorId(s1)}`]: 'one', - [`1@${Automerge.getActorId(s2)}`]: 'two' + assert.deepStrictEqual(Automerge.getConflicts(s3, "field"), { + [`1@${Automerge.getActorId(s1)}`]: "one", + [`1@${Automerge.getActorId(s2)}`]: "two", }) }) - it('should detect concurrent updates of the same list element', () => { - s1 = Automerge.change(s1, doc => doc.birds = ['finch']) + it("should detect concurrent updates of the same list element", () => { + s1 = Automerge.change(s1, doc => (doc.birds = ["finch"])) s2 = Automerge.merge(s2, s1) - s1 = Automerge.change(s1, doc => doc.birds[0] = 'greenfinch') - s2 = Automerge.change(s2, doc => doc.birds[0] = 'goldfinch_') + s1 = Automerge.change(s1, doc => (doc.birds[0] = "greenfinch")) + s2 = Automerge.change(s2, doc => (doc.birds[0] = "goldfinch_")) s3 = Automerge.merge(s1, s2) if (Automerge.getActorId(s1) > Automerge.getActorId(s2)) { - assert.deepStrictEqual(s3.birds, ['greenfinch']) + assert.deepStrictEqual(s3.birds, ["greenfinch"]) } else { - assert.deepStrictEqual(s3.birds, ['goldfinch_']) + assert.deepStrictEqual(s3.birds, ["goldfinch_"]) } assert.deepStrictEqual(Automerge.getConflicts(s3.birds, 0), { - [`8@${Automerge.getActorId(s1)}`]: 'greenfinch', - [`8@${Automerge.getActorId(s2)}`]: 'goldfinch_' + [`8@${Automerge.getActorId(s1)}`]: "greenfinch", + [`8@${Automerge.getActorId(s2)}`]: "goldfinch_", }) }) - it('should handle assignment conflicts of different types', () => { - s1 = Automerge.change(s1, doc => doc.field = 'string') - s2 = Automerge.change(s2, doc => doc.field = ['list']) - s3 = Automerge.change(s3, doc => doc.field = {thing: 'map'}) + it("should handle assignment conflicts of different types", () => { + s1 = Automerge.change(s1, doc => (doc.field = "string")) + s2 = Automerge.change(s2, doc => (doc.field = ["list"])) + s3 = Automerge.change(s3, doc => (doc.field = { thing: "map" })) s1 = Automerge.merge(Automerge.merge(s1, s2), s3) - assertEqualsOneOf(s1.field, 'string', ['list'], {thing: 'map'}) - assert.deepStrictEqual(Automerge.getConflicts(s1, 'field'), { - [`1@${Automerge.getActorId(s1)}`]: 'string', - [`1@${Automerge.getActorId(s2)}`]: ['list'], - [`1@${Automerge.getActorId(s3)}`]: {thing: 'map'} + assertEqualsOneOf(s1.field, "string", ["list"], { thing: "map" }) + assert.deepStrictEqual(Automerge.getConflicts(s1, "field"), { + [`1@${Automerge.getActorId(s1)}`]: "string", + [`1@${Automerge.getActorId(s2)}`]: ["list"], + [`1@${Automerge.getActorId(s3)}`]: { thing: "map" }, }) }) - it('should handle changes within a conflicting map field', () => { - s1 = Automerge.change(s1, doc => doc.field = 'string') - s2 = Automerge.change(s2, doc => doc.field = {}) - s2 = Automerge.change(s2, doc => doc.field.innerKey = 42) + it("should handle changes within a conflicting map field", () => { + s1 = Automerge.change(s1, doc => (doc.field = "string")) + s2 = Automerge.change(s2, doc => (doc.field = {})) + s2 = Automerge.change(s2, doc => (doc.field.innerKey = 42)) s3 = Automerge.merge(s1, s2) - assertEqualsOneOf(s3.field, 'string', {innerKey: 42}) - assert.deepStrictEqual(Automerge.getConflicts(s3, 'field'), { - [`1@${Automerge.getActorId(s1)}`]: 'string', - [`1@${Automerge.getActorId(s2)}`]: {innerKey: 42} + assertEqualsOneOf(s3.field, "string", { innerKey: 42 }) + assert.deepStrictEqual(Automerge.getConflicts(s3, "field"), { + [`1@${Automerge.getActorId(s1)}`]: "string", + [`1@${Automerge.getActorId(s2)}`]: { innerKey: 42 }, }) }) - it('should handle changes within a conflicting list element', () => { - s1 = Automerge.change(s1, doc => doc.list = ['hello']) + it("should handle changes within a conflicting list element", () => { + s1 = Automerge.change(s1, doc => (doc.list = ["hello"])) s2 = Automerge.merge(s2, s1) - s1 = Automerge.change(s1, doc => doc.list[0] = {map1: true}) - s1 = Automerge.change(s1, doc => doc.list[0].key = 1) - s2 = Automerge.change(s2, doc => doc.list[0] = {map2: true}) - s2 = Automerge.change(s2, doc => doc.list[0].key = 2) + s1 = Automerge.change(s1, doc => (doc.list[0] = { map1: true })) + s1 = Automerge.change(s1, doc => (doc.list[0].key = 1)) + s2 = Automerge.change(s2, doc => (doc.list[0] = { map2: true })) + s2 = Automerge.change(s2, doc => (doc.list[0].key = 2)) s3 = Automerge.merge(s1, s2) if (Automerge.getActorId(s1) > Automerge.getActorId(s2)) { - assert.deepStrictEqual(s3.list, [{map1: true, key: 1}]) + assert.deepStrictEqual(s3.list, [{ map1: true, key: 1 }]) } else { - assert.deepStrictEqual(s3.list, [{map2: true, key: 2}]) + assert.deepStrictEqual(s3.list, [{ map2: true, key: 2 }]) } assert.deepStrictEqual(Automerge.getConflicts(s3.list, 0), { - [`8@${Automerge.getActorId(s1)}`]: {map1: true, key: 1}, - [`8@${Automerge.getActorId(s2)}`]: {map2: true, key: 2} + [`8@${Automerge.getActorId(s1)}`]: { map1: true, key: 1 }, + [`8@${Automerge.getActorId(s2)}`]: { map2: true, key: 2 }, }) }) - it('should not merge concurrently assigned nested maps', () => { - s1 = Automerge.change(s1, doc => doc.config = {background: 'blue'}) - s2 = Automerge.change(s2, doc => doc.config = {logo_url: 'logo.png'}) + it("should not merge concurrently assigned nested maps", () => { + s1 = Automerge.change(s1, doc => (doc.config = { background: "blue" })) + s2 = Automerge.change(s2, doc => (doc.config = { logo_url: "logo.png" })) s3 = Automerge.merge(s1, s2) - assertEqualsOneOf(s3.config, {background: 'blue'}, {logo_url: 'logo.png'}) - assert.deepStrictEqual(Automerge.getConflicts(s3, 'config'), { - [`1@${Automerge.getActorId(s1)}`]: {background: 'blue'}, - [`1@${Automerge.getActorId(s2)}`]: {logo_url: 'logo.png'} + assertEqualsOneOf( + s3.config, + { background: "blue" }, + { logo_url: "logo.png" } + ) + assert.deepStrictEqual(Automerge.getConflicts(s3, "config"), { + [`1@${Automerge.getActorId(s1)}`]: { background: "blue" }, + [`1@${Automerge.getActorId(s2)}`]: { logo_url: "logo.png" }, }) }) - it('should clear conflicts after assigning a new value', () => { - s1 = Automerge.change(s1, doc => doc.field = 'one') - s2 = Automerge.change(s2, doc => doc.field = 'two') + it("should clear conflicts after assigning a new value", () => { + s1 = Automerge.change(s1, doc => (doc.field = "one")) + s2 = Automerge.change(s2, doc => (doc.field = "two")) s3 = Automerge.merge(s1, s2) - s3 = Automerge.change(s3, doc => doc.field = 'three') - assert.deepStrictEqual(s3, {field: 'three'}) - assert.strictEqual(Automerge.getConflicts(s3, 'field'), undefined) + s3 = Automerge.change(s3, doc => (doc.field = "three")) + assert.deepStrictEqual(s3, { field: "three" }) + assert.strictEqual(Automerge.getConflicts(s3, "field"), undefined) s2 = Automerge.merge(s2, s3) - assert.deepStrictEqual(s2, {field: 'three'}) - assert.strictEqual(Automerge.getConflicts(s2, 'field'), undefined) + assert.deepStrictEqual(s2, { field: "three" }) + assert.strictEqual(Automerge.getConflicts(s2, "field"), undefined) }) - it('should handle concurrent insertions at different list positions', () => { - s1 = Automerge.change(s1, doc => doc.list = ['one', 'three']) + it("should handle concurrent insertions at different list positions", () => { + s1 = Automerge.change(s1, doc => (doc.list = ["one", "three"])) s2 = Automerge.merge(s2, s1) - s1 = Automerge.change(s1, doc => doc.list.splice(1, 0, 'two')) - s2 = Automerge.change(s2, doc => doc.list.push('four')) + s1 = Automerge.change(s1, doc => doc.list.splice(1, 0, "two")) + s2 = Automerge.change(s2, doc => doc.list.push("four")) s3 = Automerge.merge(s1, s2) - assert.deepStrictEqual(s3, {list: ['one', 'two', 'three', 'four']}) - assert.strictEqual(Automerge.getConflicts(s3, 'list'), undefined) + assert.deepStrictEqual(s3, { list: ["one", "two", "three", "four"] }) + assert.strictEqual(Automerge.getConflicts(s3, "list"), undefined) }) - it('should handle concurrent insertions at the same list position', () => { - s1 = Automerge.change(s1, doc => doc.birds = ['parakeet']) + it("should handle concurrent insertions at the same list position", () => { + s1 = Automerge.change(s1, doc => (doc.birds = ["parakeet"])) s2 = Automerge.merge(s2, s1) - s1 = Automerge.change(s1, doc => doc.birds.push('starling')) - s2 = Automerge.change(s2, doc => doc.birds.push('chaffinch')) + s1 = Automerge.change(s1, doc => doc.birds.push("starling")) + s2 = Automerge.change(s2, doc => doc.birds.push("chaffinch")) s3 = Automerge.merge(s1, s2) - assertEqualsOneOf(s3.birds, ['parakeet', 'starling', 'chaffinch'], ['parakeet', 'chaffinch', 'starling']) + assertEqualsOneOf( + s3.birds, + ["parakeet", "starling", "chaffinch"], + ["parakeet", "chaffinch", "starling"] + ) s2 = Automerge.merge(s2, s3) assert.deepStrictEqual(s2, s3) }) - it('should handle concurrent assignment and deletion of a map entry', () => { + it("should handle concurrent assignment and deletion of a map entry", () => { // Add-wins semantics - s1 = Automerge.change(s1, doc => doc.bestBird = 'robin') + s1 = Automerge.change(s1, doc => (doc.bestBird = "robin")) s2 = Automerge.merge(s2, s1) s1 = Automerge.change(s1, doc => delete doc.bestBird) - s2 = Automerge.change(s2, doc => doc.bestBird = 'magpie') + s2 = Automerge.change(s2, doc => (doc.bestBird = "magpie")) s3 = Automerge.merge(s1, s2) assert.deepStrictEqual(s1, {}) - assert.deepStrictEqual(s2, {bestBird: 'magpie'}) - assert.deepStrictEqual(s3, {bestBird: 'magpie'}) - assert.strictEqual(Automerge.getConflicts(s3, 'bestBird'), undefined) + assert.deepStrictEqual(s2, { bestBird: "magpie" }) + assert.deepStrictEqual(s3, { bestBird: "magpie" }) + assert.strictEqual(Automerge.getConflicts(s3, "bestBird"), undefined) }) - it('should handle concurrent assignment and deletion of a list element', () => { + it("should handle concurrent assignment and deletion of a list element", () => { // Concurrent assignment ressurects a deleted list element. Perhaps a little // surprising, but consistent with add-wins semantics of maps (see test above) - s1 = Automerge.change(s1, doc => doc.birds = ['blackbird', 'thrush', 'goldfinch']) + s1 = Automerge.change( + s1, + doc => (doc.birds = ["blackbird", "thrush", "goldfinch"]) + ) s2 = Automerge.merge(s2, s1) - s1 = Automerge.change(s1, doc => doc.birds[1] = 'starling') + s1 = Automerge.change(s1, doc => (doc.birds[1] = "starling")) s2 = Automerge.change(s2, doc => doc.birds.splice(1, 1)) s3 = Automerge.merge(s1, s2) - assert.deepStrictEqual(s1.birds, ['blackbird', 'starling', 'goldfinch']) - assert.deepStrictEqual(s2.birds, ['blackbird', 'goldfinch']) - assert.deepStrictEqual(s3.birds, ['blackbird', 'starling', 'goldfinch']) + assert.deepStrictEqual(s1.birds, ["blackbird", "starling", "goldfinch"]) + assert.deepStrictEqual(s2.birds, ["blackbird", "goldfinch"]) + assert.deepStrictEqual(s3.birds, ["blackbird", "starling", "goldfinch"]) s4 = Automerge.load(Automerge.save(s3)) - assert.deepStrictEqual(s3, s4); + assert.deepStrictEqual(s3, s4) }) - it('should handle insertion after a deleted list element', () => { - s1 = Automerge.change(s1, doc => doc.birds = ['blackbird', 'thrush', 'goldfinch']) + it("should handle insertion after a deleted list element", () => { + s1 = Automerge.change( + s1, + doc => (doc.birds = ["blackbird", "thrush", "goldfinch"]) + ) s2 = Automerge.merge(s2, s1) s1 = Automerge.change(s1, doc => doc.birds.splice(1, 2)) - s2 = Automerge.change(s2, doc => doc.birds.splice(2, 0, 'starling')) + s2 = Automerge.change(s2, doc => doc.birds.splice(2, 0, "starling")) s3 = Automerge.merge(s1, s2) - assert.deepStrictEqual(s3, {birds: ['blackbird', 'starling']}) - assert.deepStrictEqual(Automerge.merge(s2, s3), {birds: ['blackbird', 'starling']}) + assert.deepStrictEqual(s3, { birds: ["blackbird", "starling"] }) + assert.deepStrictEqual(Automerge.merge(s2, s3), { + birds: ["blackbird", "starling"], + }) }) - it('should handle concurrent deletion of the same element', () => { - s1 = Automerge.change(s1, doc => doc.birds = ['albatross', 'buzzard', 'cormorant']) + it("should handle concurrent deletion of the same element", () => { + s1 = Automerge.change( + s1, + doc => (doc.birds = ["albatross", "buzzard", "cormorant"]) + ) s2 = Automerge.merge(s2, s1) s1 = Automerge.change(s1, doc => doc.birds.deleteAt(1)) // buzzard s2 = Automerge.change(s2, doc => doc.birds.deleteAt(1)) // buzzard s3 = Automerge.merge(s1, s2) - assert.deepStrictEqual(s3.birds, ['albatross', 'cormorant']) + assert.deepStrictEqual(s3.birds, ["albatross", "cormorant"]) }) - it('should handle concurrent deletion of different elements', () => { - s1 = Automerge.change(s1, doc => doc.birds = ['albatross', 'buzzard', 'cormorant']) + it("should handle concurrent deletion of different elements", () => { + s1 = Automerge.change( + s1, + doc => (doc.birds = ["albatross", "buzzard", "cormorant"]) + ) s2 = Automerge.merge(s2, s1) s1 = Automerge.change(s1, doc => doc.birds.deleteAt(0)) // albatross s2 = Automerge.change(s2, doc => doc.birds.deleteAt(1)) // buzzard s3 = Automerge.merge(s1, s2) - assert.deepStrictEqual(s3.birds, ['cormorant']) + assert.deepStrictEqual(s3.birds, ["cormorant"]) }) - it('should handle concurrent updates at different levels of the tree', () => { + it("should handle concurrent updates at different levels of the tree", () => { // A delete higher up in the tree overrides an update in a subtree - s1 = Automerge.change(s1, doc => doc.animals = {birds: {pink: 'flamingo', black: 'starling'}, mammals: ['badger']}) + s1 = Automerge.change( + s1, + doc => + (doc.animals = { + birds: { pink: "flamingo", black: "starling" }, + mammals: ["badger"], + }) + ) s2 = Automerge.merge(s2, s1) - s1 = Automerge.change(s1, doc => doc.animals.birds.brown = 'sparrow') + s1 = Automerge.change(s1, doc => (doc.animals.birds.brown = "sparrow")) s2 = Automerge.change(s2, doc => delete doc.animals.birds) s3 = Automerge.merge(s1, s2) assert.deepStrictEqual(s1.animals, { birds: { - pink: 'flamingo', brown: 'sparrow', black: 'starling' + pink: "flamingo", + brown: "sparrow", + black: "starling", }, - mammals: ['badger'] + mammals: ["badger"], }) - assert.deepStrictEqual(s2.animals, {mammals: ['badger']}) - assert.deepStrictEqual(s3.animals, {mammals: ['badger']}) + assert.deepStrictEqual(s2.animals, { mammals: ["badger"] }) + assert.deepStrictEqual(s3.animals, { mammals: ["badger"] }) }) - it('should handle updates of concurrently deleted objects', () => { - s1 = Automerge.change(s1, doc => doc.birds = {blackbird: {feathers: 'black'}}) + it("should handle updates of concurrently deleted objects", () => { + s1 = Automerge.change( + s1, + doc => (doc.birds = { blackbird: { feathers: "black" } }) + ) s2 = Automerge.merge(s2, s1) s1 = Automerge.change(s1, doc => delete doc.birds.blackbird) - s2 = Automerge.change(s2, doc => doc.birds.blackbird.beak = 'orange') + s2 = Automerge.change(s2, doc => (doc.birds.blackbird.beak = "orange")) s3 = Automerge.merge(s1, s2) - assert.deepStrictEqual(s1, {birds: {}}) + assert.deepStrictEqual(s1, { birds: {} }) }) - it('should not interleave sequence insertions at the same position', () => { - s1 = Automerge.change(s1, doc => doc.wisdom = []) + it("should not interleave sequence insertions at the same position", () => { + s1 = Automerge.change(s1, doc => (doc.wisdom = [])) s2 = Automerge.merge(s2, s1) - s1 = Automerge.change(s1, doc => doc.wisdom.push('to', 'be', 'is', 'to', 'do')) - s2 = Automerge.change(s2, doc => doc.wisdom.push('to', 'do', 'is', 'to', 'be')) + s1 = Automerge.change(s1, doc => + doc.wisdom.push("to", "be", "is", "to", "do") + ) + s2 = Automerge.change(s2, doc => + doc.wisdom.push("to", "do", "is", "to", "be") + ) s3 = Automerge.merge(s1, s2) - assertEqualsOneOf(s3.wisdom, - ['to', 'be', 'is', 'to', 'do', 'to', 'do', 'is', 'to', 'be'], - ['to', 'do', 'is', 'to', 'be', 'to', 'be', 'is', 'to', 'do']) + assertEqualsOneOf( + s3.wisdom, + ["to", "be", "is", "to", "do", "to", "do", "is", "to", "be"], + ["to", "do", "is", "to", "be", "to", "be", "is", "to", "do"] + ) // In case you're wondering: http://quoteinvestigator.com/2013/09/16/do-be-do/ }) - describe('multiple insertions at the same list position', () => { - it('should handle insertion by greater actor ID', () => { - s1 = Automerge.init('aaaa') - s2 = Automerge.init('bbbb') - s1 = Automerge.change(s1, doc => doc.list = ['two']) + describe("multiple insertions at the same list position", () => { + it("should handle insertion by greater actor ID", () => { + s1 = Automerge.init("aaaa") + s2 = Automerge.init("bbbb") + s1 = Automerge.change(s1, doc => (doc.list = ["two"])) s2 = Automerge.merge(s2, s1) - s2 = Automerge.change(s2, doc => doc.list.splice(0, 0, 'one')) - assert.deepStrictEqual(s2.list, ['one', 'two']) + s2 = Automerge.change(s2, doc => doc.list.splice(0, 0, "one")) + assert.deepStrictEqual(s2.list, ["one", "two"]) }) - it('should handle insertion by lesser actor ID', () => { - s1 = Automerge.init('bbbb') - s2 = Automerge.init('aaaa') - s1 = Automerge.change(s1, doc => doc.list = ['two']) + it("should handle insertion by lesser actor ID", () => { + s1 = Automerge.init("bbbb") + s2 = Automerge.init("aaaa") + s1 = Automerge.change(s1, doc => (doc.list = ["two"])) s2 = Automerge.merge(s2, s1) - s2 = Automerge.change(s2, doc => doc.list.splice(0, 0, 'one')) - assert.deepStrictEqual(s2.list, ['one', 'two']) + s2 = Automerge.change(s2, doc => doc.list.splice(0, 0, "one")) + assert.deepStrictEqual(s2.list, ["one", "two"]) }) - it('should handle insertion regardless of actor ID', () => { - s1 = Automerge.change(s1, doc => doc.list = ['two']) + it("should handle insertion regardless of actor ID", () => { + s1 = Automerge.change(s1, doc => (doc.list = ["two"])) s2 = Automerge.merge(s2, s1) - s2 = Automerge.change(s2, doc => doc.list.splice(0, 0, 'one')) - assert.deepStrictEqual(s2.list, ['one', 'two']) + s2 = Automerge.change(s2, doc => doc.list.splice(0, 0, "one")) + assert.deepStrictEqual(s2.list, ["one", "two"]) }) - it('should make insertion order consistent with causality', () => { - s1 = Automerge.change(s1, doc => doc.list = ['four']) + it("should make insertion order consistent with causality", () => { + s1 = Automerge.change(s1, doc => (doc.list = ["four"])) s2 = Automerge.merge(s2, s1) - s2 = Automerge.change(s2, doc => doc.list.unshift('three')) + s2 = Automerge.change(s2, doc => doc.list.unshift("three")) s1 = Automerge.merge(s1, s2) - s1 = Automerge.change(s1, doc => doc.list.unshift('two')) + s1 = Automerge.change(s1, doc => doc.list.unshift("two")) s2 = Automerge.merge(s2, s1) - s2 = Automerge.change(s2, doc => doc.list.unshift('one')) - assert.deepStrictEqual(s2.list, ['one', 'two', 'three', 'four']) + s2 = Automerge.change(s2, doc => doc.list.unshift("one")) + assert.deepStrictEqual(s2.list, ["one", "two", "three", "four"]) }) }) }) - describe('saving and loading', () => { - it('should save and restore an empty document', () => { + describe("saving and loading", () => { + it("should save and restore an empty document", () => { let s = Automerge.load(Automerge.save(Automerge.init())) assert.deepStrictEqual(s, {}) }) - it('should generate a new random actor ID', () => { + it("should generate a new random actor ID", () => { let s1 = Automerge.init() let s2 = Automerge.load(Automerge.save(s1)) - assert.strictEqual(UUID_PATTERN.test(Automerge.getActorId(s1).toString()), true) - assert.strictEqual(UUID_PATTERN.test(Automerge.getActorId(s2).toString()), true) + assert.strictEqual( + UUID_PATTERN.test(Automerge.getActorId(s1).toString()), + true + ) + assert.strictEqual( + UUID_PATTERN.test(Automerge.getActorId(s2).toString()), + true + ) assert.notEqual(Automerge.getActorId(s1), Automerge.getActorId(s2)) }) - it('should allow a custom actor ID to be set', () => { - let s = Automerge.load(Automerge.save(Automerge.init()), '333333') - assert.strictEqual(Automerge.getActorId(s), '333333') + it("should allow a custom actor ID to be set", () => { + let s = Automerge.load(Automerge.save(Automerge.init()), "333333") + assert.strictEqual(Automerge.getActorId(s), "333333") }) - it('should reconstitute complex datatypes', () => { - let s1 = Automerge.change(Automerge.init(), doc => doc.todos = [{title: 'water plants', done: false}]) + it("should reconstitute complex datatypes", () => { + let s1 = Automerge.change( + Automerge.init(), + doc => (doc.todos = [{ title: "water plants", done: false }]) + ) let s2 = Automerge.load(Automerge.save(s1)) - assert.deepStrictEqual(s2, {todos: [{title: 'water plants', done: false}]}) + assert.deepStrictEqual(s2, { + todos: [{ title: "water plants", done: false }], + }) }) - it('should save and load maps with @ symbols in the keys', () => { - let s1 = Automerge.change(Automerge.init(), doc => doc["123@4567"] = "hello") + it("should save and load maps with @ symbols in the keys", () => { + let s1 = Automerge.change( + Automerge.init(), + doc => (doc["123@4567"] = "hello") + ) let s2 = Automerge.load(Automerge.save(s1)) assert.deepStrictEqual(s2, { "123@4567": "hello" }) }) - it('should reconstitute conflicts', () => { - let s1 = Automerge.change(Automerge.init('111111'), doc => doc.x = 3) - let s2 = Automerge.change(Automerge.init('222222'), doc => doc.x = 5) + it("should reconstitute conflicts", () => { + let s1 = Automerge.change( + Automerge.init("111111"), + doc => (doc.x = 3) + ) + let s2 = Automerge.change( + Automerge.init("222222"), + doc => (doc.x = 5) + ) s1 = Automerge.merge(s1, s2) let s3 = Automerge.load(Automerge.save(s1)) assert.strictEqual(s1.x, 5) assert.strictEqual(s3.x, 5) - assert.deepStrictEqual(Automerge.getConflicts(s1, 'x'), {'1@111111': 3, '1@222222': 5}) - assert.deepStrictEqual(Automerge.getConflicts(s3, 'x'), {'1@111111': 3, '1@222222': 5}) - }) - - it('should reconstitute element ID counters', () => { - const s1 = Automerge.init('01234567') - const s2 = Automerge.change(s1, doc => doc.list = ['a']) - const listId = Automerge.getObjectId(s2.list) - const changes12 = Automerge.getAllChanges(s2).map(Automerge.decodeChange) - assert.deepStrictEqual(changes12, [{ - hash: changes12[0].hash, actor: '01234567', seq: 1, startOp: 1, - time: changes12[0].time, message: null, deps: [], ops: [ - {obj: '_root', action: 'makeList', key: 'list', pred: []}, - {obj: listId, action: 'makeText', elemId: '_head', insert: true, pred: []}, - {obj: "2@01234567", action: 'set', elemId: '_head', insert: true, value: 'a', pred: []} - ] - }]) - const s3 = Automerge.change(s2, doc => doc.list.deleteAt(0)) - const s4 = Automerge.load(Automerge.save(s3), '01234567') - const s5 = Automerge.change(s4, doc => doc.list.push('b')) - const changes45 = Automerge.getAllChanges(s5).map(Automerge.decodeChange) - assert.deepStrictEqual(s5, {list: ['b']}) - assert.deepStrictEqual(changes45[2], { - hash: changes45[2].hash, actor: '01234567', seq: 3, startOp: 5, - time: changes45[2].time, message: null, deps: [changes45[1].hash], ops: [ - {obj: listId, action: 'makeText', elemId: '_head', insert: true, pred: []}, - {obj: "5@01234567", action: 'set', elemId: '_head', insert: true, value: 'b', pred: []} - ] + assert.deepStrictEqual(Automerge.getConflicts(s1, "x"), { + "1@111111": 3, + "1@222222": 5, + }) + assert.deepStrictEqual(Automerge.getConflicts(s3, "x"), { + "1@111111": 3, + "1@222222": 5, }) }) - it('should allow a reloaded list to be mutated', () => { - let doc = Automerge.change(Automerge.init(), doc => doc.foo = []) + it("should reconstitute element ID counters", () => { + const s1 = Automerge.init("01234567") + const s2 = Automerge.change(s1, doc => (doc.list = ["a"])) + const listId = Automerge.getObjectId(s2.list) + const changes12 = Automerge.getAllChanges(s2).map(Automerge.decodeChange) + assert.deepStrictEqual(changes12, [ + { + hash: changes12[0].hash, + actor: "01234567", + seq: 1, + startOp: 1, + time: changes12[0].time, + message: null, + deps: [], + ops: [ + { obj: "_root", action: "makeList", key: "list", pred: [] }, + { + obj: listId, + action: "makeText", + elemId: "_head", + insert: true, + pred: [], + }, + { + obj: "2@01234567", + action: "set", + elemId: "_head", + insert: true, + value: "a", + pred: [], + }, + ], + }, + ]) + const s3 = Automerge.change(s2, doc => doc.list.deleteAt(0)) + const s4 = Automerge.load(Automerge.save(s3), "01234567") + const s5 = Automerge.change(s4, doc => doc.list.push("b")) + const changes45 = Automerge.getAllChanges(s5).map(Automerge.decodeChange) + assert.deepStrictEqual(s5, { list: ["b"] }) + assert.deepStrictEqual(changes45[2], { + hash: changes45[2].hash, + actor: "01234567", + seq: 3, + startOp: 5, + time: changes45[2].time, + message: null, + deps: [changes45[1].hash], + ops: [ + { + obj: listId, + action: "makeText", + elemId: "_head", + insert: true, + pred: [], + }, + { + obj: "5@01234567", + action: "set", + elemId: "_head", + insert: true, + value: "b", + pred: [], + }, + ], + }) + }) + + it("should allow a reloaded list to be mutated", () => { + let doc = Automerge.change(Automerge.init(), doc => (doc.foo = [])) doc = Automerge.load(Automerge.save(doc)) - doc = Automerge.change(doc, 'add', doc => doc.foo.push(1)) + doc = Automerge.change(doc, "add", doc => doc.foo.push(1)) doc = Automerge.load(Automerge.save(doc)) assert.deepStrictEqual(doc.foo, [1]) }) - it('should reload a document containing deflated columns', () => { + it("should reload a document containing deflated columns", () => { // In this test, the keyCtr column is long enough for deflate compression to kick in, but the // keyStr column is short. Thus, the deflate bit gets set for keyCtr but not for keyStr. // When checking whether the columns appear in ascending order, we must ignore the deflate bit. let doc = Automerge.change(Automerge.init(), doc => { doc.list = [] - for (let i = 0; i < 200; i++) doc.list.insertAt(Math.floor(Math.random() * i), 'a') + for (let i = 0; i < 200; i++) + doc.list.insertAt(Math.floor(Math.random() * i), "a") }) Automerge.load(Automerge.save(doc)) let expected: Array = [] - for (let i = 0; i < 200; i++) expected.push('a') - assert.deepStrictEqual(doc, {list: expected}) + for (let i = 0; i < 200; i++) expected.push("a") + assert.deepStrictEqual(doc, { list: expected }) }) - it.skip('should call patchCallback if supplied to load', () => { - const s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Goldfinch']) - const s2 = Automerge.change(s1, doc => doc.birds.push('Chaffinch')) - const callbacks: Array = [], actor = Automerge.getActorId(s1) + it.skip("should call patchCallback if supplied to load", () => { + const s1 = Automerge.change( + Automerge.init(), + doc => (doc.birds = ["Goldfinch"]) + ) + const s2 = Automerge.change(s1, doc => doc.birds.push("Chaffinch")) + const callbacks: Array = [], + actor = Automerge.getActorId(s1) const reloaded = Automerge.load(Automerge.save(s2), { patchCallback(patch, before, after) { - callbacks.push({patch, before, after}) - } + callbacks.push({ patch, before, after }) + }, }) assert.strictEqual(callbacks.length, 1) assert.deepStrictEqual(callbacks[0].patch, { - maxOp: 3, deps: [decodeChange(Automerge.getAllChanges(s2)[1]).hash], clock: {[actor]: 2}, pendingChanges: 0, - diffs: {objectId: '_root', type: 'map', props: {birds: {[`1@${actor}`]: { - objectId: `1@${actor}`, type: 'list', edits: [ - {action: 'multi-insert', index: 0, elemId: `2@${actor}`, values: ['Goldfinch', 'Chaffinch']} - ] - }}}} + maxOp: 3, + deps: [decodeChange(Automerge.getAllChanges(s2)[1]).hash], + clock: { [actor]: 2 }, + pendingChanges: 0, + diffs: { + objectId: "_root", + type: "map", + props: { + birds: { + [`1@${actor}`]: { + objectId: `1@${actor}`, + type: "list", + edits: [ + { + action: "multi-insert", + index: 0, + elemId: `2@${actor}`, + values: ["Goldfinch", "Chaffinch"], + }, + ], + }, + }, + }, + }, }) assert.deepStrictEqual(callbacks[0].before, {}) assert.strictEqual(callbacks[0].after, reloaded) @@ -1239,99 +1615,155 @@ describe('Automerge', () => { }) }) - describe('history API', () => { - it('should return an empty history for an empty document', () => { + describe("history API", () => { + it("should return an empty history for an empty document", () => { assert.deepStrictEqual(Automerge.getHistory(Automerge.init()), []) }) - it('should make past document states accessible', () => { + it("should make past document states accessible", () => { let s = Automerge.init() - s = Automerge.change(s, doc => doc.config = {background: 'blue'}) - s = Automerge.change(s, doc => doc.birds = ['mallard']) - s = Automerge.change(s, doc => doc.birds.unshift('oystercatcher')) - assert.deepStrictEqual(Automerge.getHistory(s).map(state => state.snapshot), [ - {config: {background: 'blue'}}, - {config: {background: 'blue'}, birds: ['mallard']}, - {config: {background: 'blue'}, birds: ['oystercatcher', 'mallard']} - ]) + s = Automerge.change(s, doc => (doc.config = { background: "blue" })) + s = Automerge.change(s, doc => (doc.birds = ["mallard"])) + s = Automerge.change(s, doc => doc.birds.unshift("oystercatcher")) + assert.deepStrictEqual( + Automerge.getHistory(s).map(state => state.snapshot), + [ + { config: { background: "blue" } }, + { config: { background: "blue" }, birds: ["mallard"] }, + { + config: { background: "blue" }, + birds: ["oystercatcher", "mallard"], + }, + ] + ) }) - it('should make change messages accessible', () => { + it("should make change messages accessible", () => { let s = Automerge.init() - s = Automerge.change(s, 'Empty Bookshelf', doc => doc.books = []) - s = Automerge.change(s, 'Add Orwell', doc => doc.books.push('Nineteen Eighty-Four')) - s = Automerge.change(s, 'Add Huxley', doc => doc.books.push('Brave New World')) - assert.deepStrictEqual(s.books, ['Nineteen Eighty-Four', 'Brave New World']) - assert.deepStrictEqual(Automerge.getHistory(s).map(state => state.change.message), - ['Empty Bookshelf', 'Add Orwell', 'Add Huxley']) + s = Automerge.change(s, "Empty Bookshelf", doc => (doc.books = [])) + s = Automerge.change(s, "Add Orwell", doc => + doc.books.push("Nineteen Eighty-Four") + ) + s = Automerge.change(s, "Add Huxley", doc => + doc.books.push("Brave New World") + ) + assert.deepStrictEqual(s.books, [ + "Nineteen Eighty-Four", + "Brave New World", + ]) + assert.deepStrictEqual( + Automerge.getHistory(s).map(state => state.change.message), + ["Empty Bookshelf", "Add Orwell", "Add Huxley"] + ) }) }) - describe('changes API', () => { - it('should return an empty list on an empty document', () => { + describe("changes API", () => { + it("should return an empty list on an empty document", () => { let changes = Automerge.getAllChanges(Automerge.init()) assert.deepStrictEqual(changes, []) }) - it('should return an empty list when nothing changed', () => { - let s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Chaffinch']) + it("should return an empty list when nothing changed", () => { + let s1 = Automerge.change( + Automerge.init(), + doc => (doc.birds = ["Chaffinch"]) + ) assert.deepStrictEqual(Automerge.getChanges(s1, s1), []) }) - it('should do nothing when applying an empty list of changes', () => { - let s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Chaffinch']) + it("should do nothing when applying an empty list of changes", () => { + let s1 = Automerge.change( + Automerge.init(), + doc => (doc.birds = ["Chaffinch"]) + ) assert.deepStrictEqual(Automerge.applyChanges(s1, [])[0], s1) }) - it('should return all changes when compared to an empty document', () => { - let s1 = Automerge.change(Automerge.init(), 'Add Chaffinch', doc => doc.birds = ['Chaffinch']) - let s2 = Automerge.change(s1, 'Add Bullfinch', doc => doc.birds.push('Bullfinch')) + it("should return all changes when compared to an empty document", () => { + let s1 = Automerge.change( + Automerge.init(), + "Add Chaffinch", + doc => (doc.birds = ["Chaffinch"]) + ) + let s2 = Automerge.change(s1, "Add Bullfinch", doc => + doc.birds.push("Bullfinch") + ) let changes = Automerge.getChanges(Automerge.init(), s2) assert.strictEqual(changes.length, 2) }) - it('should allow a document copy to be reconstructed from scratch', () => { - let s1 = Automerge.change(Automerge.init(), 'Add Chaffinch', doc => doc.birds = ['Chaffinch']) - let s2 = Automerge.change(s1, 'Add Bullfinch', doc => doc.birds.push('Bullfinch')) + it("should allow a document copy to be reconstructed from scratch", () => { + let s1 = Automerge.change( + Automerge.init(), + "Add Chaffinch", + doc => (doc.birds = ["Chaffinch"]) + ) + let s2 = Automerge.change(s1, "Add Bullfinch", doc => + doc.birds.push("Bullfinch") + ) let changes = Automerge.getAllChanges(s2) let [s3] = Automerge.applyChanges(Automerge.init(), changes) - assert.deepStrictEqual(s3.birds, ['Chaffinch', 'Bullfinch']) + assert.deepStrictEqual(s3.birds, ["Chaffinch", "Bullfinch"]) }) - it('should return changes since the last given version', () => { - let s1 = Automerge.change(Automerge.init(), 'Add Chaffinch', doc => doc.birds = ['Chaffinch']) + it("should return changes since the last given version", () => { + let s1 = Automerge.change( + Automerge.init(), + "Add Chaffinch", + doc => (doc.birds = ["Chaffinch"]) + ) let changes1 = Automerge.getAllChanges(s1) - let s2 = Automerge.change(s1, 'Add Bullfinch', doc => doc.birds.push('Bullfinch')) + let s2 = Automerge.change(s1, "Add Bullfinch", doc => + doc.birds.push("Bullfinch") + ) let changes2 = Automerge.getChanges(s1, s2) assert.strictEqual(changes1.length, 1) // Add Chaffinch assert.strictEqual(changes2.length, 1) // Add Bullfinch }) - it('should incrementally apply changes since the last given version', () => { - let s1 = Automerge.change(Automerge.init(), 'Add Chaffinch', doc => doc.birds = ['Chaffinch']) + it("should incrementally apply changes since the last given version", () => { + let s1 = Automerge.change( + Automerge.init(), + "Add Chaffinch", + doc => (doc.birds = ["Chaffinch"]) + ) let changes1 = Automerge.getAllChanges(s1) - let s2 = Automerge.change(s1, 'Add Bullfinch', doc => doc.birds.push('Bullfinch')) + let s2 = Automerge.change(s1, "Add Bullfinch", doc => + doc.birds.push("Bullfinch") + ) let changes2 = Automerge.getChanges(s1, s2) let [s3] = Automerge.applyChanges(Automerge.init(), changes1) let [s4] = Automerge.applyChanges(s3, changes2) - assert.deepStrictEqual(s3.birds, ['Chaffinch']) - assert.deepStrictEqual(s4.birds, ['Chaffinch', 'Bullfinch']) + assert.deepStrictEqual(s3.birds, ["Chaffinch"]) + assert.deepStrictEqual(s4.birds, ["Chaffinch", "Bullfinch"]) }) - it('should handle updates to a list element', () => { - let s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Chaffinch', 'Bullfinch']) - let s2 = Automerge.change(s1, doc => doc.birds[0] = 'Goldfinch') - let [s3] = Automerge.applyChanges(Automerge.init(), Automerge.getAllChanges(s2)) - assert.deepStrictEqual(s3.birds, ['Goldfinch', 'Bullfinch']) + it("should handle updates to a list element", () => { + let s1 = Automerge.change( + Automerge.init(), + doc => (doc.birds = ["Chaffinch", "Bullfinch"]) + ) + let s2 = Automerge.change(s1, doc => (doc.birds[0] = "Goldfinch")) + let [s3] = Automerge.applyChanges( + Automerge.init(), + Automerge.getAllChanges(s2) + ) + assert.deepStrictEqual(s3.birds, ["Goldfinch", "Bullfinch"]) assert.strictEqual(Automerge.getConflicts(s3.birds, 0), undefined) }) // TEXT - it('should handle updates to a text object', () => { - let s1 = Automerge.change(Automerge.init(), doc => doc.text = 'ab') - let s2 = Automerge.change(s1, doc => Automerge.splice(doc, "text", 0, 1, "A")) - let [s3] = Automerge.applyChanges(Automerge.init(), Automerge.getAllChanges(s2)) - assert.deepStrictEqual([...s3.text], ['A', 'b']) + it("should handle updates to a text object", () => { + let s1 = Automerge.change(Automerge.init(), doc => (doc.text = "ab")) + let s2 = Automerge.change(s1, doc => + Automerge.splice(doc, "text", 0, 1, "A") + ) + let [s3] = Automerge.applyChanges( + Automerge.init(), + Automerge.getAllChanges(s2) + ) + assert.deepStrictEqual([...s3.text], ["A", "b"]) }) /* @@ -1352,60 +1784,90 @@ describe('Automerge', () => { }) */ - it('should report missing dependencies with out-of-order applyChanges', () => { + it("should report missing dependencies with out-of-order applyChanges", () => { let s0 = Automerge.init() - let s1 = Automerge.change(s0, doc => doc.test = ['a']) + let s1 = Automerge.change(s0, doc => (doc.test = ["a"])) let changes01 = Automerge.getAllChanges(s1) - let s2 = Automerge.change(s1, doc => doc.test = ['b']) + let s2 = Automerge.change(s1, doc => (doc.test = ["b"])) let changes12 = Automerge.getChanges(s1, s2) - let s3 = Automerge.change(s2, doc => doc.test = ['c']) + let s3 = Automerge.change(s2, doc => (doc.test = ["c"])) let changes23 = Automerge.getChanges(s2, s3) let s4 = Automerge.init() let [s5] = Automerge.applyChanges(s4, changes23) let [s6] = Automerge.applyChanges(s5, changes12) - assert.deepStrictEqual(Automerge.getMissingDeps(s6, []), [decodeChange(changes01[0]).hash]) + assert.deepStrictEqual(Automerge.getMissingDeps(s6, []), [ + decodeChange(changes01[0]).hash, + ]) }) - it('should call patchCallback if supplied when applying changes', () => { - const s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Goldfinch']) + it("should call patchCallback if supplied when applying changes", () => { + const s1 = Automerge.change( + Automerge.init(), + doc => (doc.birds = ["Goldfinch"]) + ) const callbacks: Array = [] const before = Automerge.init() - const [after] = Automerge.applyChanges(before, Automerge.getAllChanges(s1), { - patchCallback(patch, before, after) { - callbacks.push({patch, before, after}) + const [after] = Automerge.applyChanges( + before, + Automerge.getAllChanges(s1), + { + patchCallback(patch, before, after) { + callbacks.push({ patch, before, after }) + }, } - }) + ) assert.strictEqual(callbacks.length, 1) - assert.deepStrictEqual(callbacks[0].patch[0], { action: 'put', path: ["birds"], value: [] }) - assert.deepStrictEqual(callbacks[0].patch[1], { action: 'insert', path: ["birds",0], values: [""] }) - assert.deepStrictEqual(callbacks[0].patch[2], { action: 'splice', path: ["birds",0,0], value: "Goldfinch" }) + assert.deepStrictEqual(callbacks[0].patch[0], { + action: "put", + path: ["birds"], + value: [], + }) + assert.deepStrictEqual(callbacks[0].patch[1], { + action: "insert", + path: ["birds", 0], + values: [""], + }) + assert.deepStrictEqual(callbacks[0].patch[2], { + action: "splice", + path: ["birds", 0, 0], + value: "Goldfinch", + }) assert.strictEqual(callbacks[0].before, before) assert.strictEqual(callbacks[0].after, after) }) - it('should merge multiple applied changes into one patch', () => { - const s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Goldfinch']) - const s2 = Automerge.change(s1, doc => doc.birds.push('Chaffinch')) + it("should merge multiple applied changes into one patch", () => { + const s1 = Automerge.change( + Automerge.init(), + doc => (doc.birds = ["Goldfinch"]) + ) + const s2 = Automerge.change(s1, doc => doc.birds.push("Chaffinch")) const patches: Array = [] - Automerge.applyChanges(Automerge.init(), Automerge.getAllChanges(s2), - {patchCallback: p => patches.push(... p)}) + Automerge.applyChanges(Automerge.init(), Automerge.getAllChanges(s2), { + patchCallback: p => patches.push(...p), + }) assert.deepStrictEqual(patches, [ - { action: 'put', path: [ 'birds' ], value: [] }, - { action: "insert", path: [ "birds", 0 ], values: [ "" ] }, - { action: "splice", path: [ "birds", 0, 0 ], value: "Goldfinch" }, - { action: "insert", path: [ "birds", 1 ], values: [ "" ] }, - { action: "splice", path: [ "birds", 1, 0 ], value: "Chaffinch" } + { action: "put", path: ["birds"], value: [] }, + { action: "insert", path: ["birds", 0], values: [""] }, + { action: "splice", path: ["birds", 0, 0], value: "Goldfinch" }, + { action: "insert", path: ["birds", 1], values: [""] }, + { action: "splice", path: ["birds", 1, 0], value: "Chaffinch" }, ]) }) - it('should call a patchCallback registered on doc initialisation', () => { - const s1 = Automerge.change(Automerge.init(), doc => doc.bird = 'Goldfinch') + it("should call a patchCallback registered on doc initialisation", () => { + const s1 = Automerge.change( + Automerge.init(), + doc => (doc.bird = "Goldfinch") + ) const patches: Array = [] - const before = Automerge.init({patchCallback: p => patches.push(... p)}) + const before = Automerge.init({ + patchCallback: p => patches.push(...p), + }) Automerge.applyChanges(before, Automerge.getAllChanges(s1)) assert.deepStrictEqual(patches, [ - { action: "put", path: [ "bird" ], value: "" }, - { action: "splice", path: [ "bird", 0 ], value: "Goldfinch" } + { action: "put", path: ["bird"], value: "" }, + { action: "splice", path: ["bird", 0], value: "Goldfinch" }, ]) }) }) diff --git a/javascript/test/sync_test.ts b/javascript/test/sync_test.ts index 8e03c18a..5724985c 100644 --- a/javascript/test/sync_test.ts +++ b/javascript/test/sync_test.ts @@ -1,7 +1,13 @@ -import * as assert from 'assert' -import * as Automerge from '../src' -import { BloomFilter } from './legacy/sync' -import { decodeSyncMessage, encodeSyncMessage, decodeSyncState, encodeSyncState, initSyncState } from "../src" +import * as assert from "assert" +import * as Automerge from "../src" +import { BloomFilter } from "./legacy/sync" +import { + decodeSyncMessage, + encodeSyncMessage, + decodeSyncState, + encodeSyncState, + initSyncState, +} from "../src" function getHeads(doc) { return Automerge.getHeads(doc) @@ -11,32 +17,41 @@ function getMissingDeps(doc) { return Automerge.getMissingDeps(doc, []) } -function sync(a, b, aSyncState = initSyncState(), bSyncState = initSyncState()) { +function sync( + a, + b, + aSyncState = initSyncState(), + bSyncState = initSyncState() +) { const MAX_ITER = 10 - let aToBmsg: Automerge.SyncMessage | null = null, bToAmsg: Automerge.SyncMessage | null = null, i = 0 + let aToBmsg: Automerge.SyncMessage | null = null, + bToAmsg: Automerge.SyncMessage | null = null, + i = 0 do { - [aSyncState, aToBmsg] = Automerge.generateSyncMessage(a, aSyncState) + ;[aSyncState, aToBmsg] = Automerge.generateSyncMessage(a, aSyncState) ;[bSyncState, bToAmsg] = Automerge.generateSyncMessage(b, bSyncState) if (aToBmsg) { - [b, bSyncState] = Automerge.receiveSyncMessage(b, bSyncState, aToBmsg) + ;[b, bSyncState] = Automerge.receiveSyncMessage(b, bSyncState, aToBmsg) } if (bToAmsg) { - [a, aSyncState] = Automerge.receiveSyncMessage(a, aSyncState, bToAmsg) + ;[a, aSyncState] = Automerge.receiveSyncMessage(a, aSyncState, bToAmsg) } if (i++ > MAX_ITER) { - throw new Error(`Did not synchronize within ${MAX_ITER} iterations. Do you have a bug causing an infinite loop?`) + throw new Error( + `Did not synchronize within ${MAX_ITER} iterations. Do you have a bug causing an infinite loop?` + ) } } while (aToBmsg || bToAmsg) return [a, b, aSyncState, bSyncState] } -describe('Data sync protocol', () => { - describe('with docs already in sync', () => { - describe('an empty local doc', () => { - it('should send a sync message implying no local data', () => { +describe("Data sync protocol", () => { + describe("with docs already in sync", () => { + describe("an empty local doc", () => { + it("should send a sync message implying no local data", () => { let n1 = Automerge.init() let s1 = initSyncState() let m1 @@ -50,28 +65,35 @@ describe('Data sync protocol', () => { assert.deepStrictEqual(message.changes, []) }) - it('should not reply if we have no data as well', () => { - let n1 = Automerge.init(), n2 = Automerge.init() - let s1 = initSyncState(), s2 = initSyncState() - let m1: Automerge.SyncMessage | null = null, m2: Automerge.SyncMessage | null = null + it("should not reply if we have no data as well", () => { + let n1 = Automerge.init(), + n2 = Automerge.init() + let s1 = initSyncState(), + s2 = initSyncState() + let m1: Automerge.SyncMessage | null = null, + m2: Automerge.SyncMessage | null = null ;[s1, m1] = Automerge.generateSyncMessage(n1, s1) if (m1 != null) { - ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, m1) + ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, m1) } ;[s2, m2] = Automerge.generateSyncMessage(n2, s2) assert.deepStrictEqual(m2, null) }) }) - describe('documents with data', () => { - it('repos with equal heads do not need a reply message', () => { - let n1 = Automerge.init(), n2 = Automerge.init() - let s1 = initSyncState(), s2 = initSyncState() - let m1: Automerge.SyncMessage | null = null, m2: Automerge.SyncMessage | null = null + describe("documents with data", () => { + it("repos with equal heads do not need a reply message", () => { + let n1 = Automerge.init(), + n2 = Automerge.init() + let s1 = initSyncState(), + s2 = initSyncState() + let m1: Automerge.SyncMessage | null = null, + m2: Automerge.SyncMessage | null = null // make two nodes with the same changes - n1 = Automerge.change(n1, {time: 0}, doc => doc.n = []) - for (let i = 0; i < 10; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.n.push(i)) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.n = [])) + for (let i = 0; i < 10; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => doc.n.push(i)) ;[n2] = Automerge.applyChanges(n2, Automerge.getAllChanges(n1)) assert.deepStrictEqual(n1, n2) @@ -81,83 +103,95 @@ describe('Data sync protocol', () => { // heads are equal so this message should be null if (m1 != null) { - ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, m1) + ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, m1) } ;[s2, m2] = Automerge.generateSyncMessage(n2, s2) assert.strictEqual(m2, null) }) - it('n1 should offer all changes to n2 when starting from nothing', () => { - let n1 = Automerge.init(), n2 = Automerge.init() + it("n1 should offer all changes to n2 when starting from nothing", () => { + let n1 = Automerge.init(), + n2 = Automerge.init() // make changes for n1 that n2 should request - n1 = Automerge.change(n1, {time: 0}, doc => doc.n = []) - for (let i = 0; i < 10; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.n.push(i)) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.n = [])) + for (let i = 0; i < 10; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => doc.n.push(i)) assert.notDeepStrictEqual(n1, n2) const [after1, after2] = sync(n1, n2) assert.deepStrictEqual(after1, after2) }) - it('should sync peers where one has commits the other does not', () => { - let n1 = Automerge.init(), n2 = Automerge.init() + it("should sync peers where one has commits the other does not", () => { + let n1 = Automerge.init(), + n2 = Automerge.init() // make changes for n1 that n2 should request - n1 = Automerge.change(n1, {time: 0}, doc => doc.n = []) - for (let i = 0; i < 10; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.n.push(i)) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.n = [])) + for (let i = 0; i < 10; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => doc.n.push(i)) assert.notDeepStrictEqual(n1, n2) ;[n1, n2] = sync(n1, n2) assert.deepStrictEqual(n1, n2) }) - it('should work with prior sync state', () => { + it("should work with prior sync state", () => { // create & synchronize two nodes - let n1 = Automerge.init(), n2 = Automerge.init() - let s1 = initSyncState(), s2 = initSyncState() + let n1 = Automerge.init(), + n2 = Automerge.init() + let s1 = initSyncState(), + s2 = initSyncState() - for (let i = 0; i < 5; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) + for (let i = 0; i < 5; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) ;[n1, n2, s1, s2] = sync(n1, n2) // modify the first node further - for (let i = 5; i < 10; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) + for (let i = 5; i < 10; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) assert.notDeepStrictEqual(n1, n2) ;[n1, n2, s1, s2] = sync(n1, n2, s1, s2) assert.deepStrictEqual(n1, n2) }) - it('should not generate messages once synced', () => { + it("should not generate messages once synced", () => { // create & synchronize two nodes - let n1 = Automerge.init('abc123'), n2 = Automerge.init('def456') - let s1 = initSyncState(), s2 = initSyncState() + let n1 = Automerge.init("abc123"), + n2 = Automerge.init("def456") + let s1 = initSyncState(), + s2 = initSyncState() let message - for (let i = 0; i < 5; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) - for (let i = 0; i < 5; i++) n2 = Automerge.change(n2, {time: 0}, doc => doc.y = i) + for (let i = 0; i < 5; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) + for (let i = 0; i < 5; i++) + n2 = Automerge.change(n2, { time: 0 }, doc => (doc.y = i)) - // n1 reports what it has + // n1 reports what it has ;[s1, message] = Automerge.generateSyncMessage(n1, s1) // n2 receives that message and sends changes along with what it has - ;[n2, s2, ] = Automerge.receiveSyncMessage(n2, s2, message) + ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, message) ;[s2, message] = Automerge.generateSyncMessage(n2, s2) assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 5) //assert.deepStrictEqual(patch, null) // no changes arrived // n1 receives the changes and replies with the changes it now knows n2 needs - ;[n1, s1, ] = Automerge.receiveSyncMessage(n1, s1, message) + ;[n1, s1] = Automerge.receiveSyncMessage(n1, s1, message) ;[s1, message] = Automerge.generateSyncMessage(n1, s1) assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 5) //assert.deepStrictEqual(patch.diffs.props, {y: {'5@def456': {type: 'value', value: 4, datatype: 'int'}}}) // changes arrived // n2 applies the changes and sends confirmation ending the exchange - ;[n2, s2, ] = Automerge.receiveSyncMessage(n2, s2, message) + ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, message) ;[s2, message] = Automerge.generateSyncMessage(n2, s2) //assert.deepStrictEqual(patch.diffs.props, {x: {'5@abc123': {type: 'value', value: 4, datatype: 'int'}}}) // changes arrived // n1 receives the message and has nothing more to say - ;[n1, s1, ] = Automerge.receiveSyncMessage(n1, s1, message) + ;[n1, s1] = Automerge.receiveSyncMessage(n1, s1, message) ;[s1, message] = Automerge.generateSyncMessage(n1, s1) assert.deepStrictEqual(message, null) //assert.deepStrictEqual(patch, null) // no changes arrived @@ -167,27 +201,38 @@ describe('Data sync protocol', () => { assert.deepStrictEqual(message, null) }) - it('should allow simultaneous messages during synchronization', () => { + it("should allow simultaneous messages during synchronization", () => { // create & synchronize two nodes - let n1 = Automerge.init('abc123'), n2 = Automerge.init('def456') - let s1 = initSyncState(), s2 = initSyncState() - for (let i = 0; i < 5; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) - for (let i = 0; i < 5; i++) n2 = Automerge.change(n2, {time: 0}, doc => doc.y = i) - const head1 = getHeads(n1)[0], head2 = getHeads(n2)[0] + let n1 = Automerge.init("abc123"), + n2 = Automerge.init("def456") + let s1 = initSyncState(), + s2 = initSyncState() + for (let i = 0; i < 5; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) + for (let i = 0; i < 5; i++) + n2 = Automerge.change(n2, { time: 0 }, doc => (doc.y = i)) + const head1 = getHeads(n1)[0], + head2 = getHeads(n2)[0] // both sides report what they have but have no shared peer state let msg1to2, msg2to1 ;[s1, msg1to2] = Automerge.generateSyncMessage(n1, s1) ;[s2, msg2to1] = Automerge.generateSyncMessage(n2, s2) assert.deepStrictEqual(decodeSyncMessage(msg1to2).changes.length, 0) - assert.deepStrictEqual(decodeSyncMessage(msg1to2).have[0].lastSync.length, 0) + assert.deepStrictEqual( + decodeSyncMessage(msg1to2).have[0].lastSync.length, + 0 + ) assert.deepStrictEqual(decodeSyncMessage(msg2to1).changes.length, 0) - assert.deepStrictEqual(decodeSyncMessage(msg2to1).have[0].lastSync.length, 0) + assert.deepStrictEqual( + decodeSyncMessage(msg2to1).have[0].lastSync.length, + 0 + ) // n1 and n2 receives that message and update sync state but make no patch - ;[n1, s1, ] = Automerge.receiveSyncMessage(n1, s1, msg2to1) + ;[n1, s1] = Automerge.receiveSyncMessage(n1, s1, msg2to1) //assert.deepStrictEqual(patch1, null) // no changes arrived, so no patch - ;[n2, s2, ] = Automerge.receiveSyncMessage(n2, s2, msg1to2) + ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, msg1to2) //assert.deepStrictEqual(patch2, null) // no changes arrived, so no patch // now both reply with their local changes the other lacks @@ -198,15 +243,14 @@ describe('Data sync protocol', () => { assert.deepStrictEqual(decodeSyncMessage(msg2to1).changes.length, 5) // both should now apply the changes and update the frontend - ;[n1, s1, ] = Automerge.receiveSyncMessage(n1, s1, msg2to1) + ;[n1, s1] = Automerge.receiveSyncMessage(n1, s1, msg2to1) assert.deepStrictEqual(getMissingDeps(n1), []) //assert.notDeepStrictEqual(patch1, null) - assert.deepStrictEqual(n1, {x: 4, y: 4}) - - ;[n2, s2, ] = Automerge.receiveSyncMessage(n2, s2, msg1to2) + assert.deepStrictEqual(n1, { x: 4, y: 4 }) + ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, msg1to2) assert.deepStrictEqual(getMissingDeps(n2), []) //assert.notDeepStrictEqual(patch2, null) - assert.deepStrictEqual(n2, {x: 4, y: 4}) + assert.deepStrictEqual(n2, { x: 4, y: 4 }) // The response acknowledges the changes received, and sends no further changes ;[s1, msg1to2] = Automerge.generateSyncMessage(n1, s1) @@ -215,8 +259,8 @@ describe('Data sync protocol', () => { assert.deepStrictEqual(decodeSyncMessage(msg2to1).changes.length, 0) // After receiving acknowledgements, their shared heads should be equal - ;[n1, s1, ] = Automerge.receiveSyncMessage(n1, s1, msg2to1) - ;[n2, s2, ] = Automerge.receiveSyncMessage(n2, s2, msg1to2) + ;[n1, s1] = Automerge.receiveSyncMessage(n1, s1, msg2to1) + ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, msg1to2) assert.deepStrictEqual(s1.sharedHeads, [head1, head2].sort()) assert.deepStrictEqual(s2.sharedHeads, [head1, head2].sort()) //assert.deepStrictEqual(patch1, null) @@ -229,47 +273,56 @@ describe('Data sync protocol', () => { assert.deepStrictEqual(msg2to1, null) // If we make one more change, and start another sync, its lastSync should be updated - n1 = Automerge.change(n1, {time: 0}, doc => doc.x = 5) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = 5)) ;[s1, msg1to2] = Automerge.generateSyncMessage(n1, s1) - assert.deepStrictEqual(decodeSyncMessage(msg1to2).have[0].lastSync, [head1, head2].sort()) + assert.deepStrictEqual( + decodeSyncMessage(msg1to2).have[0].lastSync, + [head1, head2].sort() + ) }) - it('should assume sent changes were recieved until we hear otherwise', () => { - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') - let s1 = initSyncState(), message: Automerge.SyncMessage | null = null + it("should assume sent changes were recieved until we hear otherwise", () => { + let n1 = Automerge.init("01234567"), + n2 = Automerge.init("89abcdef") + let s1 = initSyncState(), + message: Automerge.SyncMessage | null = null - n1 = Automerge.change(n1, {time: 0}, doc => doc.items = []) - ;[n1, n2, s1, ] = sync(n1, n2) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.items = [])) + ;[n1, n2, s1] = sync(n1, n2) - n1 = Automerge.change(n1, {time: 0}, doc => doc.items.push('x')) + n1 = Automerge.change(n1, { time: 0 }, doc => doc.items.push("x")) ;[s1, message] = Automerge.generateSyncMessage(n1, s1) if (message != null) { - assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) + assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) } - n1 = Automerge.change(n1, {time: 0}, doc => doc.items.push('y')) + n1 = Automerge.change(n1, { time: 0 }, doc => doc.items.push("y")) ;[s1, message] = Automerge.generateSyncMessage(n1, s1) if (message != null) { - assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) + assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) } - n1 = Automerge.change(n1, {time: 0}, doc => doc.items.push('z')) + n1 = Automerge.change(n1, { time: 0 }, doc => doc.items.push("z")) ;[s1, message] = Automerge.generateSyncMessage(n1, s1) if (message != null) { - assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) + assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) } }) - it('should work regardless of who initiates the exchange', () => { + it("should work regardless of who initiates the exchange", () => { // create & synchronize two nodes - let n1 = Automerge.init(), n2 = Automerge.init() - let s1 = initSyncState(), s2 = initSyncState() + let n1 = Automerge.init(), + n2 = Automerge.init() + let s1 = initSyncState(), + s2 = initSyncState() - for (let i = 0; i < 5; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) + for (let i = 0; i < 5; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) ;[n1, n2, s1, s2] = sync(n1, n2, s1, s2) // modify the first node further - for (let i = 5; i < 10; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) + for (let i = 5; i < 10; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) assert.notDeepStrictEqual(n1, n2) ;[n1, n2, s1, s2] = sync(n1, n2, s1, s2) @@ -278,21 +331,24 @@ describe('Data sync protocol', () => { }) }) - describe('with diverged documents', () => { - it('should work without prior sync state', () => { + describe("with diverged documents", () => { + it("should work without prior sync state", () => { // Scenario: ,-- c10 <-- c11 <-- c12 <-- c13 <-- c14 // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ // `-- c15 <-- c16 <-- c17 // lastSync is undefined. // create two peers both with divergent commits - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') - for (let i = 0; i < 10; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) - + let n1 = Automerge.init("01234567"), + n2 = Automerge.init("89abcdef") + for (let i = 0; i < 10; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) ;[n1, n2] = sync(n1, n2) - for (let i = 10; i < 15; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) - for (let i = 15; i < 18; i++) n2 = Automerge.change(n2, {time: 0}, doc => doc.x = i) + for (let i = 10; i < 15; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) + for (let i = 15; i < 18; i++) + n2 = Automerge.change(n2, { time: 0 }, doc => (doc.x = i)) assert.notDeepStrictEqual(n1, n2) ;[n1, n2] = sync(n1, n2) @@ -300,21 +356,26 @@ describe('Data sync protocol', () => { assert.deepStrictEqual(n1, n2) }) - it('should work with prior sync state', () => { + it("should work with prior sync state", () => { // Scenario: ,-- c10 <-- c11 <-- c12 <-- c13 <-- c14 // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ // `-- c15 <-- c16 <-- c17 // lastSync is c9. // create two peers both with divergent commits - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') - let s1 = initSyncState(), s2 = initSyncState() + let n1 = Automerge.init("01234567"), + n2 = Automerge.init("89abcdef") + let s1 = initSyncState(), + s2 = initSyncState() - for (let i = 0; i < 10; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) + for (let i = 0; i < 10; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) ;[n1, n2, s1, s2] = sync(n1, n2, s1, s2) - for (let i = 10; i < 15; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) - for (let i = 15; i < 18; i++) n2 = Automerge.change(n2, {time: 0}, doc => doc.x = i) + for (let i = 10; i < 15; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) + for (let i = 15; i < 18; i++) + n2 = Automerge.change(n2, { time: 0 }, doc => (doc.x = i)) s1 = decodeSyncState(encodeSyncState(s1)) s2 = decodeSyncState(encodeSyncState(s2)) @@ -324,27 +385,33 @@ describe('Data sync protocol', () => { assert.deepStrictEqual(n1, n2) }) - it('should ensure non-empty state after sync', () => { - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') - let s1 = initSyncState(), s2 = initSyncState() + it("should ensure non-empty state after sync", () => { + let n1 = Automerge.init("01234567"), + n2 = Automerge.init("89abcdef") + let s1 = initSyncState(), + s2 = initSyncState() - for (let i = 0; i < 3; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) + for (let i = 0; i < 3; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) ;[n1, n2, s1, s2] = sync(n1, n2, s1, s2) assert.deepStrictEqual(s1.sharedHeads, getHeads(n1)) assert.deepStrictEqual(s2.sharedHeads, getHeads(n1)) }) - it('should re-sync after one node crashed with data loss', () => { + it("should re-sync after one node crashed with data loss", () => { // Scenario: (r) (n2) (n1) // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 // n2 has changes {c0, c1, c2}, n1's lastSync is c5, and n2's lastSync is c2. // we want to successfully sync (n1) with (r), even though (n1) believes it's talking to (n2) - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') - let s1 = initSyncState(), s2 = initSyncState() + let n1 = Automerge.init("01234567"), + n2 = Automerge.init("89abcdef") + let s1 = initSyncState(), + s2 = initSyncState() // n1 makes three changes, which we sync to n2 - for (let i = 0; i < 3; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) + for (let i = 0; i < 3; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) ;[n1, n2, s1, s2] = sync(n1, n2, s1, s2) // save a copy of n2 as "r" to simulate recovering from crash @@ -352,38 +419,43 @@ describe('Data sync protocol', () => { ;[r, rSyncState] = [Automerge.clone(n2), s2] // sync another few commits - for (let i = 3; i < 6; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) + for (let i = 3; i < 6; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) ;[n1, n2, s1, s2] = sync(n1, n2, s1, s2) // everyone should be on the same page here assert.deepStrictEqual(getHeads(n1), getHeads(n2)) assert.deepStrictEqual(n1, n2) // now make a few more changes, then attempt to sync the fully-up-to-date n1 with the confused r - for (let i = 6; i < 9; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) + for (let i = 6; i < 9; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) s1 = decodeSyncState(encodeSyncState(s1)) rSyncState = decodeSyncState(encodeSyncState(rSyncState)) assert.notDeepStrictEqual(getHeads(n1), getHeads(r)) assert.notDeepStrictEqual(n1, r) - assert.deepStrictEqual(n1, {x: 8}) - assert.deepStrictEqual(r, {x: 2}) + assert.deepStrictEqual(n1, { x: 8 }) + assert.deepStrictEqual(r, { x: 2 }) ;[n1, r, s1, rSyncState] = sync(n1, r, s1, rSyncState) assert.deepStrictEqual(getHeads(n1), getHeads(r)) assert.deepStrictEqual(n1, r) }) - it('should resync after one node experiences data loss without disconnecting', () => { - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') - let s1 = initSyncState(), s2 = initSyncState() + it("should resync after one node experiences data loss without disconnecting", () => { + let n1 = Automerge.init("01234567"), + n2 = Automerge.init("89abcdef") + let s1 = initSyncState(), + s2 = initSyncState() // n1 makes three changes, which we sync to n2 - for (let i = 0; i < 3; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) + for (let i = 0; i < 3; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) ;[n1, n2, s1, s2] = sync(n1, n2, s1, s2) assert.deepStrictEqual(getHeads(n1), getHeads(n2)) assert.deepStrictEqual(n1, n2) - let n2AfterDataLoss = Automerge.init('89abcdef') + let n2AfterDataLoss = Automerge.init("89abcdef") // "n2" now has no data, but n1 still thinks it does. Note we don't do // decodeSyncState(encodeSyncState(s1)) in order to simulate data loss without disconnecting @@ -392,29 +464,35 @@ describe('Data sync protocol', () => { assert.deepStrictEqual(n1, n2) }) - it('should handle changes concurrent to the last sync heads', () => { - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef'), n3 = Automerge.init('fedcba98') - let s12 = initSyncState(), s21 = initSyncState(), s23 = initSyncState(), s32 = initSyncState() + it("should handle changes concurrent to the last sync heads", () => { + let n1 = Automerge.init("01234567"), + n2 = Automerge.init("89abcdef"), + n3 = Automerge.init("fedcba98") + let s12 = initSyncState(), + s21 = initSyncState(), + s23 = initSyncState(), + s32 = initSyncState() // Change 1 is known to all three nodes - n1 = Automerge.change(n1, {time: 0}, doc => doc.x = 1) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = 1)) ;[n1, n2, s12, s21] = sync(n1, n2, s12, s21) ;[n2, n3, s23, s32] = sync(n2, n3, s23, s32) // Change 2 is known to n1 and n2 - n1 = Automerge.change(n1, {time: 0}, doc => doc.x = 2) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = 2)) ;[n1, n2, s12, s21] = sync(n1, n2, s12, s21) // Each of the three nodes makes one change (changes 3, 4, 5) - n1 = Automerge.change(n1, {time: 0}, doc => doc.x = 3) - n2 = Automerge.change(n2, {time: 0}, doc => doc.x = 4) - n3 = Automerge.change(n3, {time: 0}, doc => doc.x = 5) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = 3)) + n2 = Automerge.change(n2, { time: 0 }, doc => (doc.x = 4)) + n3 = Automerge.change(n3, { time: 0 }, doc => (doc.x = 5)) // Apply n3's latest change to n2. If running in Node, turn the Uint8Array into a Buffer, to // simulate transmission over a network (see https://github.com/automerge/automerge/pull/362) let change = Automerge.getLastLocalChange(n3) - if (typeof Buffer === 'function' && change != null) change = Buffer.from(change) - ;[n2] = change && Automerge.applyChanges(n2, [change]) || [n2] + if (typeof Buffer === "function" && change != null) + change = Buffer.from(change) + ;[n2] = (change && Automerge.applyChanges(n2, [change])) || [n2] // Now sync n1 and n2. n3's change is concurrent to n1 and n2's last sync heads ;[n1, n2, s12, s21] = sync(n1, n2, s12, s21) @@ -422,12 +500,14 @@ describe('Data sync protocol', () => { assert.deepStrictEqual(n1, n2) }) - it('should handle histories with lots of branching and merging', () => { - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef'), n3 = Automerge.init('fedcba98') - n1 = Automerge.change(n1, {time: 0}, doc => doc.x = 0) + it("should handle histories with lots of branching and merging", () => { + let n1 = Automerge.init("01234567"), + n2 = Automerge.init("89abcdef"), + n3 = Automerge.init("fedcba98") + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = 0)) ;[n2] = Automerge.applyChanges(n2, [Automerge.getLastLocalChange(n1)!]) ;[n3] = Automerge.applyChanges(n3, [Automerge.getLastLocalChange(n1)!]) - n3 = Automerge.change(n3, {time: 0}, doc => doc.x = 1) + n3 = Automerge.change(n3, { time: 0 }, doc => (doc.x = 1)) // - n1c1 <------ n1c2 <------ n1c3 <-- etc. <-- n1c20 <------ n1c21 // / \/ \/ \/ @@ -436,29 +516,29 @@ describe('Data sync protocol', () => { // \ / // ---------------------------------------------- n3c1 <----- for (let i = 1; i < 20; i++) { - n1 = Automerge.change(n1, {time: 0}, doc => doc.n1 = i) - n2 = Automerge.change(n2, {time: 0}, doc => doc.n2 = i) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.n1 = i)) + n2 = Automerge.change(n2, { time: 0 }, doc => (doc.n2 = i)) const change1 = Automerge.getLastLocalChange(n1) const change2 = Automerge.getLastLocalChange(n2) ;[n1] = Automerge.applyChanges(n1, [change2!]) ;[n2] = Automerge.applyChanges(n2, [change1!]) } - let s1 = initSyncState(), s2 = initSyncState() + let s1 = initSyncState(), + s2 = initSyncState() ;[n1, n2, s1, s2] = sync(n1, n2, s1, s2) // Having n3's last change concurrent to the last sync heads forces us into the slower code path ;[n2] = Automerge.applyChanges(n2, [Automerge.getLastLocalChange(n3)!]) - n1 = Automerge.change(n1, {time: 0}, doc => doc.n1 = 'final') - n2 = Automerge.change(n2, {time: 0}, doc => doc.n2 = 'final') - + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.n1 = "final")) + n2 = Automerge.change(n2, { time: 0 }, doc => (doc.n2 = "final")) ;[n1, n2, s1, s2] = sync(n1, n2, s1, s2) assert.deepStrictEqual(getHeads(n1), getHeads(n2)) assert.deepStrictEqual(n1, n2) }) }) - describe('with false positives', () => { + describe("with false positives", () => { // NOTE: the following tests use brute force to search for Bloom filter false positives. The // tests make change hashes deterministic by fixing the actorId and change timestamp to be // constants. The loop that searches for false positives is then initialised such that it finds @@ -467,22 +547,36 @@ describe('Data sync protocol', () => { // then the false positive will no longer be the first loop iteration. The tests should still // pass because the loop will run until a false positive is found, but they will be slower. - it('should handle a false-positive head', () => { + it("should handle a false-positive head", () => { // Scenario: ,-- n1 // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ // `-- n2 // where n2 is a false positive in the Bloom filter containing {n1}. // lastSync is c9. - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') - let s1 = initSyncState(), s2 = initSyncState() + let n1 = Automerge.init("01234567"), + n2 = Automerge.init("89abcdef") + let s1 = initSyncState(), + s2 = initSyncState() - for (let i = 0; i < 10; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) + for (let i = 0; i < 10; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) ;[n1, n2, s1, s2] = sync(n1, n2) - for (let i = 1; ; i++) { // search for false positive; see comment above - const n1up = Automerge.change(Automerge.clone(n1, {actor: '01234567'}), {time: 0}, doc => doc.x = `${i} @ n1`) - const n2up = Automerge.change(Automerge.clone(n2, {actor: '89abcdef'}), {time: 0}, doc => doc.x = `${i} @ n2`) + for (let i = 1; ; i++) { + // search for false positive; see comment above + const n1up = Automerge.change( + Automerge.clone(n1, { actor: "01234567" }), + { time: 0 }, + doc => (doc.x = `${i} @ n1`) + ) + const n2up = Automerge.change( + Automerge.clone(n2, { actor: "89abcdef" }), + { time: 0 }, + doc => (doc.x = `${i} @ n2`) + ) if (new BloomFilter(getHeads(n1up)).containsHash(getHeads(n2up)[0])) { - n1 = n1up; n2 = n2up; break + n1 = n1up + n2 = n2up + break } } const allHeads = [...getHeads(n1), ...getHeads(n2)].sort() @@ -493,7 +587,7 @@ describe('Data sync protocol', () => { assert.deepStrictEqual(getHeads(n2), allHeads) }) - describe('with a false-positive dependency', () => { + describe("with a false-positive dependency", () => { let n1, n2, s1, s2, n1hash2, n2hash2 beforeEach(() => { @@ -502,35 +596,57 @@ describe('Data sync protocol', () => { // `-- n2c1 <-- n2c2 // where n2c1 is a false positive in the Bloom filter containing {n1c1, n1c2}. // lastSync is c9. - n1 = Automerge.init('01234567') - n2 = Automerge.init('89abcdef') + n1 = Automerge.init("01234567") + n2 = Automerge.init("89abcdef") s1 = initSyncState() s2 = initSyncState() - for (let i = 0; i < 10; i++) n1 = Automerge.change(n1, {time: 0}, (doc: any) => doc.x = i) + for (let i = 0; i < 10; i++) + n1 = Automerge.change(n1, { time: 0 }, (doc: any) => (doc.x = i)) ;[n1, n2, s1, s2] = sync(n1, n2) let n1hash1, n2hash1 - for (let i = 29; ; i++) { // search for false positive; see comment above - const n1us1 = Automerge.change(Automerge.clone(n1, {actor: '01234567'}), {time: 0}, (doc: any) => doc.x = `${i} @ n1`) - const n2us1 = Automerge.change(Automerge.clone(n2, {actor: '89abcdef'}), {time: 0}, (doc: any) => doc.x = `${i} @ n2`) - n1hash1 = getHeads(n1us1)[0]; n2hash1 = getHeads(n2us1)[0] - const n1us2 = Automerge.change(n1us1, {time: 0}, (doc: any) => doc.x = 'final @ n1') - const n2us2 = Automerge.change(n2us1, {time: 0}, (doc: any) => doc.x = 'final @ n2') - n1hash2 = getHeads(n1us2)[0]; n2hash2 = getHeads(n2us2)[0] + for (let i = 29; ; i++) { + // search for false positive; see comment above + const n1us1 = Automerge.change( + Automerge.clone(n1, { actor: "01234567" }), + { time: 0 }, + (doc: any) => (doc.x = `${i} @ n1`) + ) + const n2us1 = Automerge.change( + Automerge.clone(n2, { actor: "89abcdef" }), + { time: 0 }, + (doc: any) => (doc.x = `${i} @ n2`) + ) + n1hash1 = getHeads(n1us1)[0] + n2hash1 = getHeads(n2us1)[0] + const n1us2 = Automerge.change( + n1us1, + { time: 0 }, + (doc: any) => (doc.x = "final @ n1") + ) + const n2us2 = Automerge.change( + n2us1, + { time: 0 }, + (doc: any) => (doc.x = "final @ n2") + ) + n1hash2 = getHeads(n1us2)[0] + n2hash2 = getHeads(n2us2)[0] if (new BloomFilter([n1hash1, n1hash2]).containsHash(n2hash1)) { - n1 = n1us2; n2 = n2us2; break + n1 = n1us2 + n2 = n2us2 + break } } }) - it('should sync two nodes without connection reset', () => { - [n1, n2, s1, s2] = sync(n1, n2, s1, s2) + it("should sync two nodes without connection reset", () => { + ;[n1, n2, s1, s2] = sync(n1, n2, s1, s2) assert.deepStrictEqual(getHeads(n1), [n1hash2, n2hash2].sort()) assert.deepStrictEqual(getHeads(n2), [n1hash2, n2hash2].sort()) }) // FIXME - this has a periodic failure - it('should sync two nodes with connection reset', () => { + it("should sync two nodes with connection reset", () => { s1 = decodeSyncState(encodeSyncState(s1)) s2 = decodeSyncState(encodeSyncState(s2)) ;[n1, n2, s1, s2] = sync(n1, n2, s1, s2) @@ -538,7 +654,7 @@ describe('Data sync protocol', () => { assert.deepStrictEqual(getHeads(n2), [n1hash2, n2hash2].sort()) }) - it.skip('should sync three nodes', () => { + it.skip("should sync three nodes", () => { s1 = decodeSyncState(encodeSyncState(s1)) s2 = decodeSyncState(encodeSyncState(s2)) @@ -558,37 +674,73 @@ describe('Data sync protocol', () => { assert.strictEqual(decodeSyncMessage(m2).changes.length, 1) // only n2c2; change n2c1 is not sent // n3 is a node that doesn't have the missing change. Nevertheless n1 is going to ask n3 for it - let n3 = Automerge.init('fedcba98'), s13 = initSyncState(), s31 = initSyncState() + let n3 = Automerge.init("fedcba98"), + s13 = initSyncState(), + s31 = initSyncState() ;[n1, n3, s13, s31] = sync(n1, n3, s13, s31) assert.deepStrictEqual(getHeads(n1), [n1hash2]) assert.deepStrictEqual(getHeads(n3), [n1hash2]) }) }) - it('should not require an additional request when a false-positive depends on a true-negative', () => { + it("should not require an additional request when a false-positive depends on a true-negative", () => { // Scenario: ,-- n1c1 <-- n1c2 <-- n1c3 // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-+ // `-- n2c1 <-- n2c2 <-- n2c3 // where n2c2 is a false positive in the Bloom filter containing {n1c1, n1c2, n1c3}. // lastSync is c4. - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') - let s1 = initSyncState(), s2 = initSyncState() + let n1 = Automerge.init("01234567"), + n2 = Automerge.init("89abcdef") + let s1 = initSyncState(), + s2 = initSyncState() let n1hash3, n2hash3 - for (let i = 0; i < 5; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) + for (let i = 0; i < 5; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) ;[n1, n2, s1, s2] = sync(n1, n2) - for (let i = 86; ; i++) { // search for false positive; see comment above - const n1us1 = Automerge.change(Automerge.clone(n1, {actor: '01234567'}), {time: 0}, doc => doc.x = `${i} @ n1`) - const n2us1 = Automerge.change(Automerge.clone(n2, {actor: '89abcdef'}), {time: 0}, doc => doc.x = `${i} @ n2`) + for (let i = 86; ; i++) { + // search for false positive; see comment above + const n1us1 = Automerge.change( + Automerge.clone(n1, { actor: "01234567" }), + { time: 0 }, + doc => (doc.x = `${i} @ n1`) + ) + const n2us1 = Automerge.change( + Automerge.clone(n2, { actor: "89abcdef" }), + { time: 0 }, + doc => (doc.x = `${i} @ n2`) + ) const n1hash1 = getHeads(n1us1)[0] - const n1us2 = Automerge.change(n1us1, {time: 0}, doc => doc.x = `${i + 1} @ n1`) - const n2us2 = Automerge.change(n2us1, {time: 0}, doc => doc.x = `${i + 1} @ n2`) - const n1hash2 = getHeads(n1us2)[0], n2hash2 = getHeads(n2us2)[0] - const n1up3 = Automerge.change(n1us2, {time: 0}, doc => doc.x = 'final @ n1') - const n2up3 = Automerge.change(n2us2, {time: 0}, doc => doc.x = 'final @ n2') - n1hash3 = getHeads(n1up3)[0]; n2hash3 = getHeads(n2up3)[0] - if (new BloomFilter([n1hash1, n1hash2, n1hash3]).containsHash(n2hash2)) { - n1 = n1up3; n2 = n2up3; break + const n1us2 = Automerge.change( + n1us1, + { time: 0 }, + doc => (doc.x = `${i + 1} @ n1`) + ) + const n2us2 = Automerge.change( + n2us1, + { time: 0 }, + doc => (doc.x = `${i + 1} @ n2`) + ) + const n1hash2 = getHeads(n1us2)[0], + n2hash2 = getHeads(n2us2)[0] + const n1up3 = Automerge.change( + n1us2, + { time: 0 }, + doc => (doc.x = "final @ n1") + ) + const n2up3 = Automerge.change( + n2us2, + { time: 0 }, + doc => (doc.x = "final @ n2") + ) + n1hash3 = getHeads(n1up3)[0] + n2hash3 = getHeads(n2up3)[0] + if ( + new BloomFilter([n1hash1, n1hash2, n1hash3]).containsHash(n2hash2) + ) { + n1 = n1up3 + n2 = n2up3 + break } } const bothHeads = [n1hash3, n2hash3].sort() @@ -599,31 +751,46 @@ describe('Data sync protocol', () => { assert.deepStrictEqual(getHeads(n2), bothHeads) }) - it('should handle chains of false-positives', () => { + it("should handle chains of false-positives", () => { // Scenario: ,-- c5 // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-+ // `-- n2c1 <-- n2c2 <-- n2c3 // where n2c1 and n2c2 are both false positives in the Bloom filter containing {c5}. // lastSync is c4. - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') - let s1 = initSyncState(), s2 = initSyncState() + let n1 = Automerge.init("01234567"), + n2 = Automerge.init("89abcdef") + let s1 = initSyncState(), + s2 = initSyncState() - for (let i = 0; i < 5; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) + for (let i = 0; i < 5; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) ;[n1, n2, s1, s2] = sync(n1, n2, s1, s2) - n1 = Automerge.change(n1, {time: 0}, doc => doc.x = 5) - for (let i = 2; ; i++) { // search for false positive; see comment above - const n2us1 = Automerge.change(Automerge.clone(n2, {actor: '89abcdef'}), {time: 0}, doc => doc.x = `${i} @ n2`) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = 5)) + for (let i = 2; ; i++) { + // search for false positive; see comment above + const n2us1 = Automerge.change( + Automerge.clone(n2, { actor: "89abcdef" }), + { time: 0 }, + doc => (doc.x = `${i} @ n2`) + ) if (new BloomFilter(getHeads(n1)).containsHash(getHeads(n2us1)[0])) { - n2 = n2us1; break + n2 = n2us1 + break } } - for (let i = 141; ; i++) { // search for false positive; see comment above - const n2us2 = Automerge.change(Automerge.clone(n2, {actor: '89abcdef'}), {time: 0}, doc => doc.x = `${i} again`) + for (let i = 141; ; i++) { + // search for false positive; see comment above + const n2us2 = Automerge.change( + Automerge.clone(n2, { actor: "89abcdef" }), + { time: 0 }, + doc => (doc.x = `${i} again`) + ) if (new BloomFilter(getHeads(n1)).containsHash(getHeads(n2us2)[0])) { - n2 = n2us2; break + n2 = n2us2 + break } } - n2 = Automerge.change(n2, {time: 0}, doc => doc.x = 'final @ n2') + n2 = Automerge.change(n2, { time: 0 }, doc => (doc.x = "final @ n2")) const allHeads = [...getHeads(n1), ...getHeads(n2)].sort() s1 = decodeSyncState(encodeSyncState(s1)) @@ -633,32 +800,46 @@ describe('Data sync protocol', () => { assert.deepStrictEqual(getHeads(n2), allHeads) }) - it('should allow the false-positive hash to be explicitly requested', () => { + it("should allow the false-positive hash to be explicitly requested", () => { // Scenario: ,-- n1 // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ // `-- n2 // where n2 causes a false positive in the Bloom filter containing {n1}. - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') - let s1 = initSyncState(), s2 = initSyncState() + let n1 = Automerge.init("01234567"), + n2 = Automerge.init("89abcdef") + let s1 = initSyncState(), + s2 = initSyncState() let message - for (let i = 0; i < 10; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) + for (let i = 0; i < 10; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) ;[n1, n2, s1, s2] = sync(n1, n2) s1 = decodeSyncState(encodeSyncState(s1)) s2 = decodeSyncState(encodeSyncState(s2)) - for (let i = 1; ; i++) { // brute-force search for false positive; see comment above - const n1up = Automerge.change(Automerge.clone(n1, {actor: '01234567'}), {time: 0}, doc => doc.x = `${i} @ n1`) - const n2up = Automerge.change(Automerge.clone(n2, {actor: '89abcdef'}), {time: 0}, doc => doc.x = `${i} @ n2`) + for (let i = 1; ; i++) { + // brute-force search for false positive; see comment above + const n1up = Automerge.change( + Automerge.clone(n1, { actor: "01234567" }), + { time: 0 }, + doc => (doc.x = `${i} @ n1`) + ) + const n2up = Automerge.change( + Automerge.clone(n2, { actor: "89abcdef" }), + { time: 0 }, + doc => (doc.x = `${i} @ n2`) + ) // check if the bloom filter on n2 will believe n1 already has a particular hash // this will mean n2 won't offer that data to n2 by receiving a sync message from n1 if (new BloomFilter(getHeads(n1up)).containsHash(getHeads(n2up)[0])) { - n1 = n1up; n2 = n2up; break + n1 = n1up + n2 = n2up + break } } // n1 creates a sync message for n2 with an ill-fated bloom - [s1, message] = Automerge.generateSyncMessage(n1, s1) + ;[s1, message] = Automerge.generateSyncMessage(n1, s1) assert.strictEqual(decodeSyncMessage(message).changes.length, 0) // n2 receives it and DOESN'T send a change back @@ -682,32 +863,42 @@ describe('Data sync protocol', () => { }) }) - describe('protocol features', () => { - it('should allow multiple Bloom filters', () => { + describe("protocol features", () => { + it("should allow multiple Bloom filters", () => { // Scenario: ,-- n1c1 <-- n1c2 <-- n1c3 // c0 <-- c1 <-- c2 <-+--- n2c1 <-- n2c2 <-- n2c3 // `-- n3c1 <-- n3c2 <-- n3c3 // n1 has {c0, c1, c2, n1c1, n1c2, n1c3, n2c1, n2c2}; // n2 has {c0, c1, c2, n1c1, n1c2, n2c1, n2c2, n2c3}; // n3 has {c0, c1, c2, n3c1, n3c2, n3c3}. - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef'), n3 = Automerge.init('76543210') + let n1 = Automerge.init("01234567"), + n2 = Automerge.init("89abcdef"), + n3 = Automerge.init("76543210") let s13 = initSyncState() - let s32 = initSyncState(), s31 = initSyncState(), s23 = initSyncState() + let s32 = initSyncState(), + s31 = initSyncState(), + s23 = initSyncState() let message1, message2, message3 - for (let i = 0; i < 3; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) - // sync all 3 nodes - ;[n1, n2, , ] = sync(n1, n2) // eslint-disable-line no-unused-vars -- kept for consistency + for (let i = 0; i < 3; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) + // sync all 3 nodes + ;[n1, n2, ,] = sync(n1, n2) // eslint-disable-line no-unused-vars -- kept for consistency ;[n1, n3, s13, s31] = sync(n1, n3) ;[n3, n2, s32, s23] = sync(n3, n2) - for (let i = 0; i < 2; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = `${i} @ n1`) - for (let i = 0; i < 2; i++) n2 = Automerge.change(n2, {time: 0}, doc => doc.x = `${i} @ n2`) + for (let i = 0; i < 2; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = `${i} @ n1`)) + for (let i = 0; i < 2; i++) + n2 = Automerge.change(n2, { time: 0 }, doc => (doc.x = `${i} @ n2`)) ;[n1] = Automerge.applyChanges(n1, Automerge.getAllChanges(n2)) ;[n2] = Automerge.applyChanges(n2, Automerge.getAllChanges(n1)) - n1 = Automerge.change(n1, {time: 0}, doc => doc.x = `3 @ n1`) - n2 = Automerge.change(n2, {time: 0}, doc => doc.x = `3 @ n2`) - for (let i = 0; i < 3; i++) n3 = Automerge.change(n3, {time: 0}, doc => doc.x = `${i} @ n3`) - const n1c3 = getHeads(n1)[0], n2c3 = getHeads(n2)[0], n3c3 = getHeads(n3)[0] + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = `3 @ n1`)) + n2 = Automerge.change(n2, { time: 0 }, doc => (doc.x = `3 @ n2`)) + for (let i = 0; i < 3; i++) + n3 = Automerge.change(n3, { time: 0 }, doc => (doc.x = `${i} @ n3`)) + const n1c3 = getHeads(n1)[0], + n2c3 = getHeads(n2)[0], + n3c3 = getHeads(n3)[0] s13 = decodeSyncState(encodeSyncState(s13)) s31 = decodeSyncState(encodeSyncState(s31)) s23 = decodeSyncState(encodeSyncState(s23)) @@ -729,7 +920,11 @@ describe('Data sync protocol', () => { const modifiedMessage = decodeSyncMessage(message3) modifiedMessage.have.push(decodeSyncMessage(message1).have[0]) assert.strictEqual(modifiedMessage.changes.length, 0) - ;[n2, s23] = Automerge.receiveSyncMessage(n2, s23, encodeSyncMessage(modifiedMessage)) + ;[n2, s23] = Automerge.receiveSyncMessage( + n2, + s23, + encodeSyncMessage(modifiedMessage) + ) // n2 replies to n3, sending only n2c3 (the one change that n2 has but n1 doesn't) ;[s23, message2] = Automerge.generateSyncMessage(n2, s23) @@ -743,55 +938,76 @@ describe('Data sync protocol', () => { assert.deepStrictEqual(getHeads(n3), [n1c3, n2c3, n3c3].sort()) }) - it('should allow any change to be requested', () => { - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') - let s1 = initSyncState(), s2 = initSyncState() + it("should allow any change to be requested", () => { + let n1 = Automerge.init("01234567"), + n2 = Automerge.init("89abcdef") + let s1 = initSyncState(), + s2 = initSyncState() let message: Automerge.SyncMessage | null = null - for (let i = 0; i < 3; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) + for (let i = 0; i < 3; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) const lastSync = getHeads(n1) - for (let i = 3; i < 6; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) - + for (let i = 3; i < 6; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) ;[n1, n2, s1, s2] = sync(n1, n2) s1.lastSentHeads = [] // force generateSyncMessage to return a message even though nothing changed ;[s1, message] = Automerge.generateSyncMessage(n1, s1) const modMsg = decodeSyncMessage(message!) modMsg.need = lastSync // re-request change 2 - ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, encodeSyncMessage(modMsg)) + ;[n2, s2] = Automerge.receiveSyncMessage( + n2, + s2, + encodeSyncMessage(modMsg) + ) ;[s1, message] = Automerge.generateSyncMessage(n2, s2) assert.strictEqual(decodeSyncMessage(message!).changes.length, 1) - assert.strictEqual(Automerge.decodeChange(decodeSyncMessage(message!).changes[0]).hash, lastSync[0]) + assert.strictEqual( + Automerge.decodeChange(decodeSyncMessage(message!).changes[0]).hash, + lastSync[0] + ) }) - it('should ignore requests for a nonexistent change', () => { - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') - let s1 = initSyncState(), s2 = initSyncState() + it("should ignore requests for a nonexistent change", () => { + let n1 = Automerge.init("01234567"), + n2 = Automerge.init("89abcdef") + let s1 = initSyncState(), + s2 = initSyncState() let message: Automerge.SyncMessage | null = null - for (let i = 0; i < 3; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) + for (let i = 0; i < 3; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) ;[n2] = Automerge.applyChanges(n2, Automerge.getAllChanges(n1)) ;[s1, message] = Automerge.generateSyncMessage(n1, s1) const decoded = Automerge.decodeSyncMessage(message!) - decoded.need = ['0000000000000000000000000000000000000000000000000000000000000000'] + decoded.need = [ + "0000000000000000000000000000000000000000000000000000000000000000", + ] message = Automerge.encodeSyncMessage(decoded) ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, message!) ;[s2, message] = Automerge.generateSyncMessage(n2, s2) assert.strictEqual(message, null) }) - it('should allow a subset of changes to be sent', () => { + it("should allow a subset of changes to be sent", () => { // ,-- c1 <-- c2 // c0 <-+ // `-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef'), n3 = Automerge.init('76543210') - let s1 = initSyncState(), s2 = initSyncState() + let n1 = Automerge.init("01234567"), + n2 = Automerge.init("89abcdef"), + n3 = Automerge.init("76543210") + let s1 = initSyncState(), + s2 = initSyncState() let msg, decodedMsg - n1 = Automerge.change(n1, {time: 0}, doc => doc.x = 0) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = 0)) n3 = Automerge.merge(n3, n1) - for (let i = 1; i <= 2; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) // n1 has {c0, c1, c2} - for (let i = 3; i <= 4; i++) n3 = Automerge.change(n3, {time: 0}, doc => doc.x = i) // n3 has {c0, c3, c4} - const c2 = getHeads(n1)[0], c4 = getHeads(n3)[0] + for (let i = 1; i <= 2; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) // n1 has {c0, c1, c2} + for (let i = 3; i <= 4; i++) + n3 = Automerge.change(n3, { time: 0 }, doc => (doc.x = i)) // n3 has {c0, c3, c4} + const c2 = getHeads(n1)[0], + c4 = getHeads(n3)[0] n2 = Automerge.merge(n2, n3) // n2 has {c0, c3, c4} // Sync n1 and n2, so their shared heads are {c2, c4} @@ -802,11 +1018,13 @@ describe('Data sync protocol', () => { assert.deepStrictEqual(s2.sharedHeads, [c2, c4].sort()) // n2 and n3 apply {c5, c6, c7, c8} - n3 = Automerge.change(n3, {time: 0}, doc => doc.x = 5) + n3 = Automerge.change(n3, { time: 0 }, doc => (doc.x = 5)) const change5 = Automerge.getLastLocalChange(n3) - n3 = Automerge.change(n3, {time: 0}, doc => doc.x = 6) - const change6 = Automerge.getLastLocalChange(n3), c6 = getHeads(n3)[0] - for (let i = 7; i <= 8; i++) n3 = Automerge.change(n3, {time: 0}, doc => doc.x = i) + n3 = Automerge.change(n3, { time: 0 }, doc => (doc.x = 6)) + const change6 = Automerge.getLastLocalChange(n3), + c6 = getHeads(n3)[0] + for (let i = 7; i <= 8; i++) + n3 = Automerge.change(n3, { time: 0 }, doc => (doc.x = i)) const c8 = getHeads(n3)[0] n2 = Automerge.merge(n2, n3) @@ -829,7 +1047,10 @@ describe('Data sync protocol', () => { ;[s1, msg] = Automerge.generateSyncMessage(n1, s1) ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, msg) assert.deepStrictEqual(decodeSyncMessage(msg).need, [c8]) - assert.deepStrictEqual(decodeSyncMessage(msg).have[0].lastSync, [c2, c6].sort()) + assert.deepStrictEqual( + decodeSyncMessage(msg).have[0].lastSync, + [c2, c6].sort() + ) assert.deepStrictEqual(s1.sharedHeads, [c2, c6].sort()) assert.deepStrictEqual(s2.sharedHeads, [c2, c6].sort()) diff --git a/javascript/test/text_test.ts b/javascript/test/text_test.ts index dd66e108..076e20b2 100644 --- a/javascript/test/text_test.ts +++ b/javascript/test/text_test.ts @@ -1,34 +1,34 @@ -import * as assert from 'assert' -import * as Automerge from '../src' -import { assertEqualsOneOf } from './helpers' +import * as assert from "assert" +import * as Automerge from "../src" +import { assertEqualsOneOf } from "./helpers" type DocType = { text: string [key: string]: any } -describe('Automerge.Text', () => { +describe("Automerge.Text", () => { let s1: Automerge.Doc, s2: Automerge.Doc beforeEach(() => { - s1 = Automerge.change(Automerge.init(), doc => doc.text = "") + s1 = Automerge.change(Automerge.init(), doc => (doc.text = "")) s2 = Automerge.merge(Automerge.init(), s1) }) - it('should support insertion', () => { + it("should support insertion", () => { s1 = Automerge.change(s1, doc => Automerge.splice(doc, "text", 0, 0, "a")) assert.strictEqual(s1.text.length, 1) - assert.strictEqual(s1.text[0], 'a') - assert.strictEqual(s1.text, 'a') + assert.strictEqual(s1.text[0], "a") + assert.strictEqual(s1.text, "a") //assert.strictEqual(s1.text.getElemId(0), `2@${Automerge.getActorId(s1)}`) }) - it('should support deletion', () => { + it("should support deletion", () => { s1 = Automerge.change(s1, doc => Automerge.splice(doc, "text", 0, 0, "abc")) s1 = Automerge.change(s1, doc => Automerge.splice(doc, "text", 1, 1)) assert.strictEqual(s1.text.length, 2) - assert.strictEqual(s1.text[0], 'a') - assert.strictEqual(s1.text[1], 'c') - assert.strictEqual(s1.text, 'ac') + assert.strictEqual(s1.text[0], "a") + assert.strictEqual(s1.text[1], "c") + assert.strictEqual(s1.text, "ac") }) it("should support implicit and explicit deletion", () => { @@ -41,70 +41,71 @@ describe('Automerge.Text', () => { assert.strictEqual(s1.text, "ac") }) - it('should handle concurrent insertion', () => { + it("should handle concurrent insertion", () => { s1 = Automerge.change(s1, doc => Automerge.splice(doc, "text", 0, 0, "abc")) s2 = Automerge.change(s2, doc => Automerge.splice(doc, "text", 0, 0, "xyz")) s1 = Automerge.merge(s1, s2) assert.strictEqual(s1.text.length, 6) - assertEqualsOneOf(s1.text, 'abcxyz', 'xyzabc') + assertEqualsOneOf(s1.text, "abcxyz", "xyzabc") }) - it('should handle text and other ops in the same change', () => { + it("should handle text and other ops in the same change", () => { s1 = Automerge.change(s1, doc => { - doc.foo = 'bar' - Automerge.splice(doc, "text", 0, 0, 'a') + doc.foo = "bar" + Automerge.splice(doc, "text", 0, 0, "a") }) - assert.strictEqual(s1.foo, 'bar') - assert.strictEqual(s1.text, 'a') - assert.strictEqual(s1.text, 'a') + assert.strictEqual(s1.foo, "bar") + assert.strictEqual(s1.text, "a") + assert.strictEqual(s1.text, "a") }) - it('should serialize to JSON as a simple string', () => { + it("should serialize to JSON as a simple string", () => { s1 = Automerge.change(s1, doc => Automerge.splice(doc, "text", 0, 0, 'a"b')) assert.strictEqual(JSON.stringify(s1), '{"text":"a\\"b"}') }) - it('should allow modification after an object is assigned to a document', () => { + it("should allow modification after an object is assigned to a document", () => { s1 = Automerge.change(Automerge.init(), doc => { doc.text = "" - Automerge.splice(doc ,"text", 0, 0, 'abcd') - Automerge.splice(doc ,"text", 2, 1) - assert.strictEqual(doc.text, 'abd') + Automerge.splice(doc, "text", 0, 0, "abcd") + Automerge.splice(doc, "text", 2, 1) + assert.strictEqual(doc.text, "abd") }) - assert.strictEqual(s1.text, 'abd') + assert.strictEqual(s1.text, "abd") }) - it('should not allow modification outside of a change callback', () => { - assert.throws(() => Automerge.splice(s1 ,"text", 0, 0, 'a'), /object cannot be modified outside of a change block/) + it("should not allow modification outside of a change callback", () => { + assert.throws( + () => Automerge.splice(s1, "text", 0, 0, "a"), + /object cannot be modified outside of a change block/ + ) }) - describe('with initial value', () => { - - it('should initialize text in Automerge.from()', () => { - let s1 = Automerge.from({text: 'init'}) + describe("with initial value", () => { + it("should initialize text in Automerge.from()", () => { + let s1 = Automerge.from({ text: "init" }) assert.strictEqual(s1.text.length, 4) - assert.strictEqual(s1.text[0], 'i') - assert.strictEqual(s1.text[1], 'n') - assert.strictEqual(s1.text[2], 'i') - assert.strictEqual(s1.text[3], 't') - assert.strictEqual(s1.text, 'init') + assert.strictEqual(s1.text[0], "i") + assert.strictEqual(s1.text[1], "n") + assert.strictEqual(s1.text[2], "i") + assert.strictEqual(s1.text[3], "t") + assert.strictEqual(s1.text, "init") }) - it('should encode the initial value as a change', () => { - const s1 = Automerge.from({text: 'init'}) + it("should encode the initial value as a change", () => { + const s1 = Automerge.from({ text: "init" }) const changes = Automerge.getAllChanges(s1) assert.strictEqual(changes.length, 1) const [s2] = Automerge.applyChanges(Automerge.init(), changes) - assert.strictEqual(s2.text, 'init') - assert.strictEqual(s2.text, 'init') + assert.strictEqual(s2.text, "init") + assert.strictEqual(s2.text, "init") }) - }) - it('should support unicode when creating text', () => { + it("should support unicode when creating text", () => { s1 = Automerge.from({ - text: '🐦' + text: "🐦", }) - assert.strictEqual(s1.text, '🐦') + assert.strictEqual(s1.text, "🐦") }) }) diff --git a/javascript/test/uuid_test.ts b/javascript/test/uuid_test.ts index 4182a8c4..f6a0bde4 100644 --- a/javascript/test/uuid_test.ts +++ b/javascript/test/uuid_test.ts @@ -1,20 +1,20 @@ -import * as assert from 'assert' -import * as Automerge from '../src' +import * as assert from "assert" +import * as Automerge from "../src" const uuid = Automerge.uuid -describe('uuid', () => { +describe("uuid", () => { afterEach(() => { uuid.reset() }) - describe('default implementation', () => { - it('generates unique values', () => { + describe("default implementation", () => { + it("generates unique values", () => { assert.notEqual(uuid(), uuid()) }) }) - describe('custom implementation', () => { + describe("custom implementation", () => { let counter function customUuid() { @@ -22,11 +22,11 @@ describe('uuid', () => { } before(() => uuid.setFactory(customUuid)) - beforeEach(() => counter = 0) + beforeEach(() => (counter = 0)) - it('invokes the custom factory', () => { - assert.equal(uuid(), 'custom-uuid-0') - assert.equal(uuid(), 'custom-uuid-1') + it("invokes the custom factory", () => { + assert.equal(uuid(), "custom-uuid-0") + assert.equal(uuid(), "custom-uuid-1") }) }) }) diff --git a/javascript/tsconfig.json b/javascript/tsconfig.json index 8e934416..c6684ca0 100644 --- a/javascript/tsconfig.json +++ b/javascript/tsconfig.json @@ -1,22 +1,19 @@ { - "compilerOptions": { - "target": "es2016", - "sourceMap": false, - "declaration": true, - "resolveJsonModule": true, - "module": "commonjs", - "moduleResolution": "node", - "noImplicitAny": false, - "allowSyntheticDefaultImports": true, - "forceConsistentCasingInFileNames": true, - "strict": true, - "noFallthroughCasesInSwitch": true, - "skipLibCheck": true, - "outDir": "./dist" - }, - "include": [ "src/**/*", "test/**/*" ], - "exclude": [ - "./dist/**/*", - "./node_modules" - ] + "compilerOptions": { + "target": "es2016", + "sourceMap": false, + "declaration": true, + "resolveJsonModule": true, + "module": "commonjs", + "moduleResolution": "node", + "noImplicitAny": false, + "allowSyntheticDefaultImports": true, + "forceConsistentCasingInFileNames": true, + "strict": true, + "noFallthroughCasesInSwitch": true, + "skipLibCheck": true, + "outDir": "./dist" + }, + "include": ["src/**/*", "test/**/*"], + "exclude": ["./dist/**/*", "./node_modules"] } diff --git a/javascript/typedoc-readme.md b/javascript/typedoc-readme.md index 05025ac1..258b9e20 100644 --- a/javascript/typedoc-readme.md +++ b/javascript/typedoc-readme.md @@ -74,24 +74,32 @@ import * as automerge from "@automerge/automerge" import * as assert from "assert" let doc = automerge.from({ - "key1": "value1" + key1: "value1", }) // Make a clone of the document at this point, maybe this is actually on another // peer. -let doc2 = automerge.clone(doc) +let doc2 = automerge.clone < any > doc let heads = automerge.getHeads(doc) -doc = automerge.change(doc, d => { +doc = + automerge.change < + any > + (doc, + d => { d.key2 = "value2" -}) + }) -doc = automerge.change(doc, d => { +doc = + automerge.change < + any > + (doc, + d => { d.key3 = "value3" -}) + }) -// At this point we've generated two separate changes, now we want to send +// At this point we've generated two separate changes, now we want to send // just those changes to someone else // view is a cheap reference based copy of a document at a given set of heads @@ -99,18 +107,18 @@ let before = automerge.view(doc, heads) // This view doesn't show the last two changes in the document state assert.deepEqual(before, { - key1: "value1" + key1: "value1", }) // Get the changes to send to doc2 let changes = automerge.getChanges(before, doc) // Apply the changes at doc2 -doc2 = automerge.applyChanges(doc2, changes)[0] +doc2 = automerge.applyChanges < any > (doc2, changes)[0] assert.deepEqual(doc2, { - key1: "value1", - key2: "value2", - key3: "value3" + key1: "value1", + key2: "value2", + key3: "value3", }) ``` @@ -126,23 +134,22 @@ generateSyncMessage}. When we receive a message from the peer we call {@link receiveSyncMessage}. Here's a simple example of a loop which just keeps two peers in sync. - ```javascript let sync1 = automerge.initSyncState() let msg: Uint8Array | null -[sync1, msg] = automerge.generateSyncMessage(doc1, sync1) +;[sync1, msg] = automerge.generateSyncMessage(doc1, sync1) while (true) { - if (msg != null) { - network.send(msg) - } - let resp: Uint8Array = network.receive() - [doc1, sync1, _ignore] = automerge.receiveSyncMessage(doc1, sync1, resp) - [sync1, msg] = automerge.generateSyncMessage(doc1, sync1) + if (msg != null) { + network.send(msg) + } + let resp: Uint8Array = + (network.receive()[(doc1, sync1, _ignore)] = + automerge.receiveSyncMessage(doc1, sync1, resp)[(sync1, msg)] = + automerge.generateSyncMessage(doc1, sync1)) } ``` - ## Conflicts The only time conflicts occur in automerge documents is in concurrent @@ -187,8 +194,7 @@ By default automerge will generate a random actor ID for you, but most methods for creating a document allow you to set the actor ID. You can get the actor ID associated with the document by calling {@link getActorId}. Actor IDs must not be used in concurrent threads of executiong - all changes by a given actor ID -are expected to be sequential. - +are expected to be sequential. ## Listening to patches @@ -203,18 +209,18 @@ document which you have two pointers to. For example, in this code: ```javascript let doc1 = automerge.init() -let doc2 = automerge.change(doc1, d => d.key = "value") +let doc2 = automerge.change(doc1, d => (d.key = "value")) ``` `doc1` and `doc2` are both pointers to the same state. Any attempt to call mutating methods on `doc1` will now result in an error like Attempting to change an out of date document - + If you encounter this you need to clone the original document, the above sample would work as: ```javascript let doc1 = automerge.init() -let doc2 = automerge.change(automerge.clone(doc1), d => d.key = "value") +let doc2 = automerge.change(automerge.clone(doc1), d => (d.key = "value")) ``` diff --git a/scripts/ci/fmt_js b/scripts/ci/fmt_js new file mode 100755 index 00000000..acaf1e08 --- /dev/null +++ b/scripts/ci/fmt_js @@ -0,0 +1,5 @@ +#!/usr/bin/env bash +set -eoux pipefail + +yarn --cwd javascript prettier -c . + diff --git a/scripts/ci/run b/scripts/ci/run index db3f1aaf..aebfe4c4 100755 --- a/scripts/ci/run +++ b/scripts/ci/run @@ -2,6 +2,7 @@ set -eou pipefail ./scripts/ci/fmt +./scripts/ci/fmt_js ./scripts/ci/lint ./scripts/ci/build-test ./scripts/ci/rust-docs From 0306ade93903800332fb539c5ba826b537b0cb00 Mon Sep 17 00:00:00 2001 From: Alex Currie-Clark Date: Fri, 6 Jan 2023 12:47:23 +0000 Subject: [PATCH 674/730] Update action name on `IncPatch` type --- rust/automerge-wasm/index.d.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rust/automerge-wasm/index.d.ts b/rust/automerge-wasm/index.d.ts index 0e0c38e6..06399f0a 100644 --- a/rust/automerge-wasm/index.d.ts +++ b/rust/automerge-wasm/index.d.ts @@ -104,7 +104,7 @@ export type PutPatch = { } export type IncPatch = { - action: 'put' + action: 'inc' path: Prop[], value: number } From 18a3f617043fd53bd05fdea96ff5d079a8654509 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Tue, 10 Jan 2023 12:14:30 +0000 Subject: [PATCH 675/730] Update rust toolchain to 1.66 --- .github/workflows/ci.yaml | 12 ++++++------ rust/automerge-c/build.rs | 2 +- rust/automerge-cli/src/examine_sync.rs | 2 +- rust/automerge-cli/src/export.rs | 2 +- rust/automerge-cli/src/main.rs | 6 +++--- rust/automerge-wasm/src/interop.rs | 4 ++-- rust/automerge-wasm/src/lib.rs | 3 --- rust/automerge/src/automerge/tests.rs | 2 +- rust/automerge/src/columnar/column_range/obj_id.rs | 2 +- rust/automerge/src/lib.rs | 1 - .../src/storage/change/change_op_columns.rs | 2 +- rust/automerge/src/storage/chunk.rs | 2 +- .../automerge/src/storage/document/doc_op_columns.rs | 2 +- rust/automerge/src/sync/bloom.rs | 2 +- 14 files changed, 20 insertions(+), 24 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 361320a0..a5d42010 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -14,7 +14,7 @@ jobs: - uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: 1.64.0 + toolchain: 1.66.0 default: true components: rustfmt - uses: Swatinem/rust-cache@v1 @@ -28,7 +28,7 @@ jobs: - uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: 1.64.0 + toolchain: 1.66.0 default: true components: clippy - uses: Swatinem/rust-cache@v1 @@ -42,7 +42,7 @@ jobs: - uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: 1.64.0 + toolchain: 1.66.0 default: true - uses: Swatinem/rust-cache@v1 - name: Build rust docs @@ -118,7 +118,7 @@ jobs: - uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: 1.64.0 + toolchain: 1.66.0 default: true - uses: Swatinem/rust-cache@v1 - name: Install CMocka @@ -157,7 +157,7 @@ jobs: - uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: 1.64.0 + toolchain: 1.66.0 default: true - uses: Swatinem/rust-cache@v1 - run: ./scripts/ci/build-test @@ -170,7 +170,7 @@ jobs: - uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: 1.64.0 + toolchain: 1.66.0 default: true - uses: Swatinem/rust-cache@v1 - run: ./scripts/ci/build-test diff --git a/rust/automerge-c/build.rs b/rust/automerge-c/build.rs index 00fd0f87..bf12a105 100644 --- a/rust/automerge-c/build.rs +++ b/rust/automerge-c/build.rs @@ -10,7 +10,7 @@ fn main() { let config = cbindgen::Config::from_file("cbindgen.toml") .expect("Unable to find cbindgen.toml configuration file"); - if let Ok(writer) = cbindgen::generate_with_config(&crate_dir, config) { + if let Ok(writer) = cbindgen::generate_with_config(crate_dir, config) { // \note CMake sets this environment variable before invoking Cargo so // that it can direct the generated header file into its // out-of-source build directory for post-processing. diff --git a/rust/automerge-cli/src/examine_sync.rs b/rust/automerge-cli/src/examine_sync.rs index ad6699d4..c0d5df97 100644 --- a/rust/automerge-cli/src/examine_sync.rs +++ b/rust/automerge-cli/src/examine_sync.rs @@ -28,7 +28,7 @@ pub(crate) fn examine_sync( .map_err(ExamineSyncError::ReadMessage)?; let message = automerge::sync::Message::decode(&buf)?; - let json = serde_json::to_value(&message).unwrap(); + let json = serde_json::to_value(message).unwrap(); if is_tty { print_colored_json(&json).map_err(ExamineSyncError::WriteMessage)?; } else { diff --git a/rust/automerge-cli/src/export.rs b/rust/automerge-cli/src/export.rs index 2a7b4130..45fd7b3b 100644 --- a/rust/automerge-cli/src/export.rs +++ b/rust/automerge-cli/src/export.rs @@ -30,7 +30,7 @@ fn list_to_json(doc: &am::Automerge, obj: &am::ObjId) -> serde_json::Value { let len = doc.length(obj); let mut array = Vec::new(); for i in 0..len { - let val = doc.get(obj, i as usize); + let val = doc.get(obj, i); match val { Ok(Some((am::Value::Object(o), exid))) if o == am::ObjType::Map || o == am::ObjType::Table => diff --git a/rust/automerge-cli/src/main.rs b/rust/automerge-cli/src/main.rs index b0b456c8..8f3f816d 100644 --- a/rust/automerge-cli/src/main.rs +++ b/rust/automerge-cli/src/main.rs @@ -132,7 +132,7 @@ enum Command { fn open_file_or_stdin(maybe_path: Option) -> Result> { if std::io::stdin().is_terminal() { if let Some(path) = maybe_path { - Ok(Box::new(File::open(&path).unwrap())) + Ok(Box::new(File::open(path).unwrap())) } else { Err(anyhow!( "Must provide file path if not providing input via stdin" @@ -146,7 +146,7 @@ fn open_file_or_stdin(maybe_path: Option) -> Result) -> Result> { if std::io::stdout().is_terminal() { if let Some(path) = maybe_path { - Ok(Box::new(File::create(&path).unwrap())) + Ok(Box::new(File::create(path).unwrap())) } else { Err(anyhow!("Must provide file path if not piping to stdout")) } @@ -166,7 +166,7 @@ fn main() -> Result<()> { skip_verifying_heads, } => { let output: Box = if let Some(output_file) = output_file { - Box::new(File::create(&output_file)?) + Box::new(File::create(output_file)?) } else { Box::new(std::io::stdout()) }; diff --git a/rust/automerge-wasm/src/interop.rs b/rust/automerge-wasm/src/interop.rs index 20b42bf1..540722df 100644 --- a/rust/automerge-wasm/src/interop.rs +++ b/rust/automerge-wasm/src/interop.rs @@ -589,9 +589,9 @@ impl Automerge { let array = Array::new(); for i in 0..len { let val_and_id = if let Some(heads) = heads { - self.doc.get_at(obj, i as usize, heads) + self.doc.get_at(obj, i, heads) } else { - self.doc.get(obj, i as usize) + self.doc.get(obj, i) }; if let Ok(Some((val, id))) = val_and_id { let subval = match val { diff --git a/rust/automerge-wasm/src/lib.rs b/rust/automerge-wasm/src/lib.rs index ce57f66f..e6f5bed8 100644 --- a/rust/automerge-wasm/src/lib.rs +++ b/rust/automerge-wasm/src/lib.rs @@ -9,7 +9,6 @@ rust_2018_idioms, unreachable_pub, bad_style, - const_err, dead_code, improper_ctypes, non_shorthand_field_patterns, @@ -264,7 +263,6 @@ impl Automerge { datatype: JsValue, ) -> Result<(), error::Insert> { let (obj, _) = self.import(obj)?; - let index = index as f64; let value = self .import_scalar(&value, &datatype.as_string()) .ok_or(error::Insert::ValueNotPrimitive)?; @@ -280,7 +278,6 @@ impl Automerge { value: JsValue, ) -> Result, error::InsertObject> { let (obj, _) = self.import(obj)?; - let index = index as f64; let imported_obj = import_obj(&value, &None)?; let opid = self .doc diff --git a/rust/automerge/src/automerge/tests.rs b/rust/automerge/src/automerge/tests.rs index 050b1fa9..7eadaedd 100644 --- a/rust/automerge/src/automerge/tests.rs +++ b/rust/automerge/src/automerge/tests.rs @@ -1368,7 +1368,7 @@ fn get_path_to_object() { ] ); assert_eq!( - doc.path_to_object(&text).unwrap(), + doc.path_to_object(text).unwrap(), vec![ (ROOT, Prop::Map("a".into())), (map, Prop::Map("b".into())), diff --git a/rust/automerge/src/columnar/column_range/obj_id.rs b/rust/automerge/src/columnar/column_range/obj_id.rs index 6a3e2ef0..d282563e 100644 --- a/rust/automerge/src/columnar/column_range/obj_id.rs +++ b/rust/automerge/src/columnar/column_range/obj_id.rs @@ -166,7 +166,7 @@ impl ObjIdEncoder { } convert::ObjId::Op(o) => { self.actor.append_value(o.actor() as u64); - self.counter.append_value(o.counter() as u64); + self.counter.append_value(o.counter()); } } } diff --git a/rust/automerge/src/lib.rs b/rust/automerge/src/lib.rs index b8604c95..97ff0650 100644 --- a/rust/automerge/src/lib.rs +++ b/rust/automerge/src/lib.rs @@ -8,7 +8,6 @@ rust_2018_idioms, unreachable_pub, bad_style, - const_err, dead_code, improper_ctypes, non_shorthand_field_patterns, diff --git a/rust/automerge/src/storage/change/change_op_columns.rs b/rust/automerge/src/storage/change/change_op_columns.rs index c50c67ae..7c3a65ec 100644 --- a/rust/automerge/src/storage/change/change_op_columns.rs +++ b/rust/automerge/src/storage/change/change_op_columns.rs @@ -177,7 +177,7 @@ impl ChangeOpsColumns { obj.append(op.obj()); key.append(op.key()); insert.append(op.insert()); - action.append_value(op.action() as u64); + action.append_value(op.action()); val.append(&op.val()); pred.append(op.pred()); } diff --git a/rust/automerge/src/storage/chunk.rs b/rust/automerge/src/storage/chunk.rs index 821c2c55..06e31973 100644 --- a/rust/automerge/src/storage/chunk.rs +++ b/rust/automerge/src/storage/chunk.rs @@ -258,7 +258,7 @@ impl Header { Header { checksum: checksum_bytes.into(), chunk_type, - data_len: data.len() as usize, + data_len: data.len(), header_size: header.len(), hash, }, diff --git a/rust/automerge/src/storage/document/doc_op_columns.rs b/rust/automerge/src/storage/document/doc_op_columns.rs index 5f61dff8..82de17eb 100644 --- a/rust/automerge/src/storage/document/doc_op_columns.rs +++ b/rust/automerge/src/storage/document/doc_op_columns.rs @@ -116,7 +116,7 @@ impl DocOpColumns { let key = KeyRange::encode(ops.clone().map(|o| o.key()), out); let id = OpIdRange::encode(ops.clone().map(|o| o.id()), out); let insert = BooleanRange::encode(ops.clone().map(|o| o.insert()), out); - let action = RleRange::encode(ops.clone().map(|o| Some(o.action() as u64)), out); + let action = RleRange::encode(ops.clone().map(|o| Some(o.action())), out); let val = ValueRange::encode(ops.clone().map(|o| o.val()), out); let succ = OpIdListRange::encode(ops.map(|o| o.succ()), out); Self { diff --git a/rust/automerge/src/sync/bloom.rs b/rust/automerge/src/sync/bloom.rs index c02acbc0..8523061e 100644 --- a/rust/automerge/src/sync/bloom.rs +++ b/rust/automerge/src/sync/bloom.rs @@ -126,7 +126,7 @@ impl BloomFilter { let num_entries = hashes.len() as u32; let num_bits_per_entry = BITS_PER_ENTRY; let num_probes = NUM_PROBES; - let bits = vec![0; bits_capacity(num_entries, num_bits_per_entry) as usize]; + let bits = vec![0; bits_capacity(num_entries, num_bits_per_entry)]; let mut filter = Self { num_entries, num_bits_per_entry, From 5763210b079edf2de53fd337590a26d6bb775f53 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Wed, 21 Dec 2022 17:42:33 +0000 Subject: [PATCH 676/730] wasm: Allow a choice of text representations The wasm codebase assumed that clients want to represent text as a string of characters. This is faster, but in order to enable backwards compatibility we add a `TextRepresentation` argument to `automerge_wasm::Automerge::new` to allow clients to choose between a `string` or `Array` representation. The `automerge_wasm::Observer` will consult this setting to determine what kind of diffs to generate. --- javascript/src/index.ts | 4 +- javascript/src/low_level.ts | 4 +- javascript/test/basic_test.ts | 2 +- .../test/ported_wasm/basic_tests.c | 25 -- rust/automerge-wasm/deno-tests/deno.ts | 2 +- rust/automerge-wasm/index.d.ts | 8 +- rust/automerge-wasm/src/interop.rs | 116 +++++-- rust/automerge-wasm/src/lib.rs | 141 +++++++-- rust/automerge-wasm/src/observer.rs | 55 +++- rust/automerge-wasm/test/apply.ts | 22 +- rust/automerge-wasm/test/readme.ts | 34 +- rust/automerge-wasm/test/test.ts | 294 ++++++++++++------ rust/automerge/src/op_observer.rs | 7 + rust/automerge/src/transaction/inner.rs | 20 +- rust/automerge/tests/test.rs | 4 +- 15 files changed, 510 insertions(+), 228 deletions(-) diff --git a/javascript/src/index.ts b/javascript/src/index.ts index 23df47ce..a5b3a0bb 100644 --- a/javascript/src/index.ts +++ b/javascript/src/index.ts @@ -177,7 +177,7 @@ export function init(_opts?: ActorId | InitOptions): Doc { const opts = importOpts(_opts) const freeze = !!opts.freeze const patchCallback = opts.patchCallback - const handle = ApiHandler.create(opts.actor) + const handle = ApiHandler.create(true, opts.actor) handle.enablePatches(true) handle.enableFreeze(!!opts.freeze) handle.registerDatatype("counter", n => new Counter(n)) @@ -460,7 +460,7 @@ export function load( const opts = importOpts(_opts) const actor = opts.actor const patchCallback = opts.patchCallback - const handle = ApiHandler.load(data, actor) + const handle = ApiHandler.load(data, true, actor) handle.enablePatches(true) handle.enableFreeze(!!opts.freeze) handle.registerDatatype("counter", n => new Counter(n)) diff --git a/javascript/src/low_level.ts b/javascript/src/low_level.ts index 51017cb3..94ac63db 100644 --- a/javascript/src/low_level.ts +++ b/javascript/src/low_level.ts @@ -20,10 +20,10 @@ export function UseApi(api: API) { /* eslint-disable */ export const ApiHandler: API = { - create(actor?: Actor): Automerge { + create(textV2: boolean, actor?: Actor): Automerge { throw new RangeError("Automerge.use() not called") }, - load(data: Uint8Array, actor?: Actor): Automerge { + load(data: Uint8Array, textV2: boolean, actor?: Actor): Automerge { throw new RangeError("Automerge.use() not called (load)") }, encodeChange(change: ChangeToEncode): Change { diff --git a/javascript/test/basic_test.ts b/javascript/test/basic_test.ts index 8bf30914..c14c0e20 100644 --- a/javascript/test/basic_test.ts +++ b/javascript/test/basic_test.ts @@ -237,7 +237,7 @@ describe("Automerge", () => { }) it("handle non-text strings", () => { - let doc1 = WASM.create() + let doc1 = WASM.create(true) doc1.put("_root", "text", "hello world") let doc2 = Automerge.load(doc1.save()) assert.throws(() => { diff --git a/rust/automerge-c/test/ported_wasm/basic_tests.c b/rust/automerge-c/test/ported_wasm/basic_tests.c index 4b275300..e2659d62 100644 --- a/rust/automerge-c/test/ported_wasm/basic_tests.c +++ b/rust/automerge-c/test/ported_wasm/basic_tests.c @@ -757,30 +757,6 @@ static void test_should_be_able_to_splice_text(void** state) { assert_memory_equal(str.src, "?", str.count); } -/** - * \brief should NOT be able to insert objects into text - */ -static void test_should_be_unable_to_insert_objects_into_text(void** state) { - AMresultStack* stack = *state; - /* const doc = create() */ - AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - /* const text = doc.putObject("/", "text", "Hello world"); */ - AMobjId const* const text = AMpush( - &stack, - AMmapPutObject(doc, AM_ROOT, AMstr("text"), AM_OBJ_TYPE_TEXT), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; - AMfree(AMspliceText(doc, text, 0, 0, AMstr("Hello world"))); - /* assert.throws(() => { - doc.insertObject(text, 6, { hello: "world" }); - }) */ - AMpush(&stack, - AMlistPutObject(doc, text, 6, true, AM_OBJ_TYPE_MAP), - AM_VALUE_VOID, - NULL); - assert_int_not_equal(AMresultStatus(stack->result), AM_STATUS_OK); -} - /** * \brief should be able to save all or incrementally */ @@ -1848,7 +1824,6 @@ int run_ported_wasm_basic_tests(void) { cmocka_unit_test_setup_teardown(test_should_be_able_to_del, setup_stack, teardown_stack), cmocka_unit_test_setup_teardown(test_should_be_able_to_use_counters, setup_stack, teardown_stack), cmocka_unit_test_setup_teardown(test_should_be_able_to_splice_text, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_should_be_unable_to_insert_objects_into_text, setup_stack, teardown_stack), cmocka_unit_test_setup_teardown(test_should_be_able_to_save_all_or_incrementally, setup_stack, teardown_stack), cmocka_unit_test_setup_teardown(test_should_be_able_to_splice_text_2, setup_stack, teardown_stack), cmocka_unit_test_setup_teardown(test_local_inc_increments_all_visible_counters_in_a_map, setup_stack, teardown_stack), diff --git a/rust/automerge-wasm/deno-tests/deno.ts b/rust/automerge-wasm/deno-tests/deno.ts index 1b4c2e07..b346435a 100644 --- a/rust/automerge-wasm/deno-tests/deno.ts +++ b/rust/automerge-wasm/deno-tests/deno.ts @@ -2,7 +2,7 @@ import { create } from '../deno/automerge_wasm.js' Deno.test("It should create, clone and free", () => { - const doc1 = create() + const doc1 = create(false) const doc2 = doc1.clone() doc2.free() }); diff --git a/rust/automerge-wasm/index.d.ts b/rust/automerge-wasm/index.d.ts index 06399f0a..29586b47 100644 --- a/rust/automerge-wasm/index.d.ts +++ b/rust/automerge-wasm/index.d.ts @@ -121,9 +121,9 @@ export type SplicePatch = { values: Value[], } -export function create(actor?: Actor): Automerge; -export function load(data: Uint8Array, actor?: Actor): Automerge; export function encodeChange(change: ChangeToEncode): Change; +export function create(text_v2: boolean, actor?: Actor): Automerge; +export function load(data: Uint8Array, text_v2: boolean, actor?: Actor): Automerge; export function decodeChange(change: Change): DecodedChange; export function initSyncState(): SyncState; export function encodeSyncMessage(message: DecodedSyncMessage): SyncMessage; @@ -134,8 +134,8 @@ export function exportSyncState(state: SyncState): JsSyncState; export function importSyncState(state: JsSyncState): SyncState; export interface API { - create(actor?: Actor): Automerge; - load(data: Uint8Array, actor?: Actor): Automerge; + create(text_v2: boolean, actor?: Actor): Automerge; + load(data: Uint8Array, text_v2: boolean, actor?: Actor): Automerge; encodeChange(change: ChangeToEncode): Change; decodeChange(change: Change): DecodedChange; initSyncState(): SyncState; diff --git a/rust/automerge-wasm/src/interop.rs b/rust/automerge-wasm/src/interop.rs index 540722df..2881209a 100644 --- a/rust/automerge-wasm/src/interop.rs +++ b/rust/automerge-wasm/src/interop.rs @@ -1,11 +1,12 @@ use crate::error::InsertObject; use crate::value::Datatype; -use crate::Automerge; +use crate::{Automerge, TextRepresentation}; use automerge as am; use automerge::transaction::Transactable; use automerge::ROOT; use automerge::{Change, ChangeHash, ObjType, Prop}; use js_sys::{Array, Function, JsString, Object, Reflect, Symbol, Uint8Array}; +use std::borrow::Cow; use std::collections::{BTreeSet, HashSet}; use std::fmt::Display; use wasm_bindgen::prelude::*; @@ -445,11 +446,32 @@ impl JsObjType { } } - pub(crate) fn subvals(&self) -> &[(Prop, JsValue)] { + pub(crate) fn subvals(&self) -> impl Iterator, JsValue)> + '_ + Clone { match self { - Self::Text(_) => &[], - Self::Map(sub) => sub.as_slice(), - Self::List(sub) => sub.as_slice(), + Self::Text(s) => SubValIter::Str(s.chars().enumerate()), + Self::Map(sub) => SubValIter::Slice(sub.as_slice().iter()), + Self::List(sub) => SubValIter::Slice(sub.as_slice().iter()), + } + } +} + +#[derive(Debug, Clone)] +pub(crate) enum SubValIter<'a> { + Slice(std::slice::Iter<'a, (Prop, JsValue)>), + Str(std::iter::Enumerate>), +} + +impl<'a> Iterator for SubValIter<'a> { + type Item = (std::borrow::Cow<'a, Prop>, JsValue); + + fn next(&mut self) -> Option { + match self { + Self::Slice(i) => i + .next() + .map(|(p, v)| (std::borrow::Cow::Borrowed(p), v.clone())), + Self::Str(i) => i + .next() + .map(|(n, c)| (std::borrow::Cow::Owned(Prop::Seq(n)), c.to_string().into())), } } } @@ -536,13 +558,18 @@ impl Automerge { meta: &JsValue, ) -> Result { let result = match datatype { - Datatype::Text => { - if let Some(heads) = heads { - self.doc.text_at(obj, heads)?.into() - } else { - self.doc.text(obj)?.into() + Datatype::Text => match self.text_rep { + TextRepresentation::String => { + if let Some(heads) = heads { + self.doc.text_at(obj, heads)?.into() + } else { + self.doc.text(obj)?.into() + } } - } + TextRepresentation::Array => self + .wrap_object(self.export_list(obj, heads, meta)?, datatype, obj, meta)? + .into(), + }, Datatype::List => self .wrap_object(self.export_list(obj, heads, meta)?, datatype, obj, meta)? .into(), @@ -570,7 +597,7 @@ impl Automerge { if let Ok(Some((val, id))) = val_and_id { let subval = match val { Value::Object(o) => self.export_object(&id, o.into(), heads, meta)?, - Value::Scalar(_) => self.export_value(alloc(&val))?, + Value::Scalar(_) => self.export_value(alloc(&val, self.text_rep))?, }; js_set(&map, &k, &subval)?; }; @@ -596,7 +623,7 @@ impl Automerge { if let Ok(Some((val, id))) = val_and_id { let subval = match val { Value::Object(o) => self.export_object(&id, o.into(), heads, meta)?, - Value::Scalar(_) => self.export_value(alloc(&val))?, + Value::Scalar(_) => self.export_value(alloc(&val, self.text_rep))?, }; array.push(&subval); }; @@ -699,7 +726,9 @@ impl Automerge { } else { value }; - if matches!(datatype, Datatype::Map | Datatype::List) { + if matches!(datatype, Datatype::Map | Datatype::List) + || (datatype == Datatype::Text && self.text_rep == TextRepresentation::Array) + { set_hidden_value( &value, &Symbol::for_(RAW_OBJECT_SYMBOL), @@ -733,7 +762,8 @@ impl Automerge { exposed.insert(value.1.clone()); js_set(&result, *index as f64, &JsValue::null())?; } else { - let sub_val = self.maybe_wrap_object(alloc(&value.0), &value.1, meta)?; + let sub_val = + self.maybe_wrap_object(alloc(&value.0, self.text_rep), &value.1, meta)?; js_set(&result, *index as f64, &sub_val)?; } Ok(result.into()) @@ -752,7 +782,11 @@ impl Automerge { if let Some(old) = old_val.as_f64() { let new_value: Value<'_> = am::ScalarValue::counter(old as i64 + *value).into(); - js_set(&result, index, &self.export_value(alloc(&new_value))?)?; + js_set( + &result, + index, + &self.export_value(alloc(&new_value, self.text_rep))?, + )?; Ok(result.into()) } else { Err(error::ApplyPatch::IncrementNonNumeric) @@ -763,8 +797,28 @@ impl Automerge { } Patch::DeleteMap { .. } => Err(error::ApplyPatch::DeleteKeyFromSeq), Patch::PutMap { .. } => Err(error::ApplyPatch::PutKeyInSeq), - //Patch::SpliceText { .. } => Err(to_js_err("cannot splice text in seq")), - Patch::SpliceText { .. } => Err(error::ApplyPatch::SpliceTextInSeq), + Patch::SpliceText { index, value, .. } => { + match self.text_rep { + TextRepresentation::String => Err(error::ApplyPatch::SpliceTextInSeq), + TextRepresentation::Array => { + let bytes: Vec = value.iter().cloned().collect(); + let val = String::from_utf16_lossy(bytes.as_slice()); + let elems = val + .chars() + .map(|c| { + ( + Value::Scalar(std::borrow::Cow::Owned(am::ScalarValue::Str( + c.to_string().into(), + ))), + ObjId::Root, // Using ROOT is okay because this ID is never used as + // we're producing ScalarValue::Str + ) + }) + .collect::>(); + Ok(self.sub_splice(result, *index, 0, &elems, meta)?) + } + } + } } } @@ -784,7 +838,8 @@ impl Automerge { exposed.insert(value.1.clone()); js_set(&result, key, &JsValue::null())?; } else { - let sub_val = self.maybe_wrap_object(alloc(&value.0), &value.1, meta)?; + let sub_val = + self.maybe_wrap_object(alloc(&value.0, self.text_rep), &value.1, meta)?; js_set(&result, key, &sub_val)?; } Ok(result) @@ -805,7 +860,11 @@ impl Automerge { if let Some(old) = old_val.as_f64() { let new_value: Value<'_> = am::ScalarValue::counter(old as i64 + *value).into(); - js_set(&result, key, &self.export_value(alloc(&new_value))?)?; + js_set( + &result, + key, + &self.export_value(alloc(&new_value, self.text_rep))?, + )?; Ok(result) } else { Err(error::ApplyPatch::IncrementNonNumeric) @@ -908,7 +967,7 @@ impl Automerge { ) -> Result { let args: Array = values .into_iter() - .map(|v| self.maybe_wrap_object(alloc(&v.0), &v.1, meta)) + .map(|v| self.maybe_wrap_object(alloc(&v.0, self.text_rep), &v.1, meta)) .collect::>()?; args.unshift(&(num_del as u32).into()); args.unshift(&(index as u32).into()); @@ -1054,7 +1113,13 @@ impl Automerge { Some(val) => Ok((val.into(), vec![])), None => { if let Ok(js_obj) = import_obj(value, &datatype) { - Ok((js_obj.objtype().into(), js_obj.subvals().to_vec())) + Ok(( + js_obj.objtype().into(), + js_obj + .subvals() + .map(|(p, v)| (p.into_owned(), v)) + .collect::>(), + )) } else { web_sys::console::log_2(&"Invalid value".into(), value); Err(error::InvalidValue) @@ -1093,13 +1158,16 @@ impl Automerge { } } -pub(crate) fn alloc(value: &Value<'_>) -> (Datatype, JsValue) { +pub(crate) fn alloc(value: &Value<'_>, text_rep: TextRepresentation) -> (Datatype, JsValue) { match value { am::Value::Object(o) => match o { ObjType::Map => (Datatype::Map, Object::new().into()), ObjType::Table => (Datatype::Table, Object::new().into()), ObjType::List => (Datatype::List, Array::new().into()), - ObjType::Text => (Datatype::Text, "".into()), + ObjType::Text => match text_rep { + TextRepresentation::String => (Datatype::Text, "".into()), + TextRepresentation::Array => (Datatype::Text, Array::new().into()), + }, }, am::Value::Scalar(s) => match s.as_ref() { am::ScalarValue::Bytes(v) => (Datatype::Bytes, Uint8Array::from(v.as_slice()).into()), diff --git a/rust/automerge-wasm/src/lib.rs b/rust/automerge-wasm/src/lib.rs index e6f5bed8..d6ccc8c8 100644 --- a/rust/automerge-wasm/src/lib.rs +++ b/rust/automerge-wasm/src/lib.rs @@ -27,10 +27,12 @@ #![allow(clippy::unused_unit)] use am::transaction::CommitOptions; use am::transaction::{Observed, Transactable, UnObserved}; +use am::ScalarValue; use automerge as am; use automerge::{Change, ObjId, Prop, TextEncoding, Value, ROOT}; use js_sys::{Array, Function, Object, Uint8Array}; use serde::ser::Serialize; +use std::borrow::Cow; use std::collections::HashMap; use std::collections::HashSet; use std::convert::TryInto; @@ -48,6 +50,8 @@ use interop::{alloc, get_heads, import_obj, js_set, to_js_err, to_prop, AR, JS}; use sync::SyncState; use value::Datatype; +use crate::interop::SubValIter; + #[allow(unused_macros)] macro_rules! log { ( $( $t:tt )* ) => { @@ -61,17 +65,37 @@ type AutoCommit = am::AutoCommitWithObs>; #[global_allocator] static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT; +/// How text is represented in materialized objects on the JS side +#[derive(Debug, Eq, PartialEq, Clone, Copy)] +#[wasm_bindgen] +pub enum TextRepresentation { + /// As an array of characters and objects + Array, + /// As a single JS string + String, +} + +impl std::default::Default for TextRepresentation { + fn default() -> Self { + TextRepresentation::Array + } +} + #[wasm_bindgen] #[derive(Debug)] pub struct Automerge { doc: AutoCommit, freeze: bool, external_types: HashMap, + text_rep: TextRepresentation, } #[wasm_bindgen] impl Automerge { - pub fn new(actor: Option) -> Result { + pub fn new( + actor: Option, + text_rep: TextRepresentation, + ) -> Result { let mut doc = AutoCommit::default().with_encoding(TextEncoding::Utf16); if let Some(a) = actor { let a = automerge::ActorId::from(hex::decode(a)?.to_vec()); @@ -81,6 +105,7 @@ impl Automerge { doc, freeze: false, external_types: HashMap::default(), + text_rep, }) } @@ -90,6 +115,7 @@ impl Automerge { doc: self.doc.clone(), freeze: self.freeze, external_types: self.external_types.clone(), + text_rep: self.text_rep, }; if let Some(s) = actor { let actor = automerge::ActorId::from(hex::decode(s)?.to_vec()); @@ -113,6 +139,7 @@ impl Automerge { doc, freeze: self.freeze, external_types: self.external_types.clone(), + text_rep: self.text_rep, }; if let Some(s) = actor { let actor = @@ -187,21 +214,27 @@ impl Automerge { let (obj, obj_type) = self.import(obj)?; let start = start as usize; let delete_count = delete_count as usize; - if let Some(t) = text.as_string() { - if obj_type == am::ObjType::Text { + let vals = if let Some(t) = text.as_string() { + if obj_type == am::ObjType::Text && self.text_rep == TextRepresentation::String { self.doc.splice_text(&obj, start, delete_count, &t)?; return Ok(()); + } else { + t.chars() + .map(|c| ScalarValue::Str(c.to_string().into())) + .collect::>() } - } - let mut vals = vec![]; - if let Ok(array) = text.dyn_into::() { - for (index, i) in array.iter().enumerate() { - let value = self - .import_scalar(&i, &None) - .ok_or(error::Splice::ValueNotPrimitive(index))?; - vals.push(value); + } else { + let mut vals = vec![]; + if let Ok(array) = text.dyn_into::() { + for (index, i) in array.iter().enumerate() { + let value = self + .import_scalar(&i, &None) + .ok_or(error::Splice::ValueNotPrimitive(index))?; + vals.push(value); + } } - } + vals + }; if !vals.is_empty() { self.doc.splice(&obj, start, delete_count, vals)?; } else { @@ -211,9 +244,14 @@ impl Automerge { am::ObjType::List => { self.doc.splice(&obj, start, delete_count, vals)?; } - am::ObjType::Text => { - self.doc.splice_text(&obj, start, delete_count, "")?; - } + am::ObjType::Text => match self.text_rep { + TextRepresentation::String => { + self.doc.splice_text(&obj, start, delete_count, "")?; + } + TextRepresentation::Array => { + self.doc.splice(&obj, start, delete_count, vals)?; + } + }, _ => {} } } @@ -248,9 +286,16 @@ impl Automerge { .doc .insert_object(&obj, index, imported_obj.objtype())?; if let Some(s) = imported_obj.text() { - self.doc.splice_text(&opid, 0, 0, s)?; + match self.text_rep { + TextRepresentation::String => { + self.doc.splice_text(&opid, 0, 0, s)?; + } + TextRepresentation::Array => { + self.subset::(&opid, imported_obj.subvals())?; + } + } } else { - self.subset::(&opid, imported_obj.subvals())?; + self.subset::(&opid, imported_obj.subvals())?; } Ok(opid.to_string().into()) } @@ -283,9 +328,16 @@ impl Automerge { .doc .insert_object(&obj, index as usize, imported_obj.objtype())?; if let Some(s) = imported_obj.text() { - self.doc.splice_text(&opid, 0, 0, s)?; + match self.text_rep { + TextRepresentation::String => { + self.doc.splice_text(&opid, 0, 0, s)?; + } + TextRepresentation::Array => { + self.subset::(&opid, imported_obj.subvals())?; + } + } } else { - self.subset::(&opid, imported_obj.subvals())?; + self.subset::(&opid, imported_obj.subvals())?; } Ok(opid.to_string().into()) } @@ -318,23 +370,31 @@ impl Automerge { let imported_obj = import_obj(&value, &None)?; let opid = self.doc.put_object(&obj, prop, imported_obj.objtype())?; if let Some(s) = imported_obj.text() { - self.doc.splice_text(&opid, 0, 0, s)?; + match self.text_rep { + TextRepresentation::String => { + self.doc.splice_text(&opid, 0, 0, s)?; + } + TextRepresentation::Array => { + self.subset::(&opid, imported_obj.subvals())?; + } + } } else { - self.subset::(&opid, imported_obj.subvals())?; + self.subset::(&opid, imported_obj.subvals())?; } Ok(opid.to_string().into()) } - fn subset(&mut self, obj: &am::ObjId, vals: &[(am::Prop, JsValue)]) -> Result<(), E> + fn subset<'a, E, I>(&mut self, obj: &am::ObjId, vals: I) -> Result<(), E> where + I: IntoIterator, JsValue)>, E: From + From + From, { for (p, v) in vals { - let (value, subvals) = self.import_value(v, None)?; + let (value, subvals) = self.import_value(v.as_ref(), None)?; //let opid = self.0.set(id, p, value)?; - let opid = match (p, value) { + let opid = match (p.as_ref(), value) { (Prop::Map(s), Value::Object(objtype)) => { Some(self.doc.put_object(obj, s, objtype)?) } @@ -351,7 +411,7 @@ impl Automerge { } }; if let Some(opid) = opid { - self.subset::(&opid, &subvals)?; + self.subset::(&opid, SubValIter::Slice(subvals.as_slice().iter()))?; } } Ok(()) @@ -387,7 +447,7 @@ impl Automerge { self.doc.get(&obj, prop)? }; if let Some((value, id)) = value { - match alloc(&value) { + match alloc(&value, self.text_rep) { (datatype, js_value) if datatype.is_scalar() => Ok(js_value), _ => Ok(id.to_string().into()), } @@ -425,7 +485,7 @@ impl Automerge { } (Value::Scalar(_), _) => { let result = Array::new(); - let (datatype, value) = alloc(&value.0); + let (datatype, value) = alloc(&value.0, self.text_rep); result.push(&datatype.into()); result.push(&value); Ok(result.into()) @@ -457,7 +517,7 @@ impl Automerge { }?; for (value, id) in values { let sub = Array::new(); - let (datatype, js_value) = alloc(&value); + let (datatype, js_value) = alloc(&value, self.text_rep); sub.push(&datatype.into()); if value.is_scalar() { sub.push(&js_value); @@ -485,6 +545,7 @@ impl Automerge { .as_bool() .ok_or_else(|| to_js_err("must pass a bool to enablePatches"))?; let old_enabled = self.doc.observer().enable(enable); + self.doc.observer().set_text_rep(self.text_rep); Ok(old_enabled.into()) } @@ -714,6 +775,7 @@ impl Automerge { let _patches = self.doc.observer().take_patches(); // throw away patches Ok(self.export_object(&obj, obj_type.into(), heads.as_ref(), &meta)?) } + #[wasm_bindgen(js_name = emptyChange)] pub fn empty_change(&mut self, message: Option, time: Option) -> JsValue { let time = time.map(|f| f as i64); @@ -724,16 +786,30 @@ impl Automerge { } #[wasm_bindgen(js_name = create)] -pub fn init(actor: Option) -> Result { +pub fn init(text_v2: bool, actor: Option) -> Result { console_error_panic_hook::set_once(); - Automerge::new(actor) + let text_rep = if text_v2 { + TextRepresentation::String + } else { + TextRepresentation::Array + }; + Automerge::new(actor, text_rep) } #[wasm_bindgen(js_name = load)] -pub fn load(data: Uint8Array, actor: Option) -> Result { +pub fn load( + data: Uint8Array, + text_v2: bool, + actor: Option, +) -> Result { let data = data.to_vec(); + let text_rep = if text_v2 { + TextRepresentation::String + } else { + TextRepresentation::Array + }; let mut doc = am::AutoCommitWithObs::::load(&data)? - .with_observer(Observer::default()) + .with_observer(Observer::default().with_text_rep(text_rep)) .with_encoding(TextEncoding::Utf16); if let Some(s) = actor { let actor = @@ -744,6 +820,7 @@ pub fn load(data: Uint8Array, actor: Option) -> Result, + text_rep: TextRepresentation, } impl Observer { @@ -33,6 +39,15 @@ impl Observer { } } } + + pub(crate) fn with_text_rep(mut self, text_rep: TextRepresentation) -> Self { + self.text_rep = text_rep; + self + } + + pub(crate) fn set_text_rep(&mut self, text_rep: TextRepresentation) { + self.text_rep = text_rep; + } } #[derive(Debug, Clone)] @@ -121,6 +136,20 @@ impl OpObserver for Observer { fn splice_text(&mut self, doc: &Automerge, obj: ObjId, index: usize, value: &str) { if self.enabled { + if self.text_rep == TextRepresentation::Array { + for (i, c) in value.chars().enumerate() { + self.insert( + doc, + obj.clone(), + index + i, + ( + Value::Scalar(Cow::Owned(ScalarValue::Str(c.to_string().into()))), + ObjId::Root, // We hope this is okay + ), + ); + } + return; + } if let Some(Patch::SpliceText { obj: tail_obj, index: tail_index, @@ -316,8 +345,13 @@ impl OpObserver for Observer { Observer { patches: vec![], enabled: self.enabled, + text_rep: self.text_rep, } } + + fn text_as_seq(&self) -> bool { + self.text_rep == TextRepresentation::Array + } } fn prop_to_js(p: &Prop) -> JsValue { @@ -377,7 +411,11 @@ impl TryFrom for JsValue { "path", export_path(path.as_slice(), &Prop::Map(key)), )?; - js_set(&result, "value", alloc(&value.0).1)?; + js_set( + &result, + "value", + alloc(&value.0, TextRepresentation::String).1, + )?; Ok(result.into()) } Patch::PutSeq { @@ -389,7 +427,11 @@ impl TryFrom for JsValue { "path", export_path(path.as_slice(), &Prop::Seq(index)), )?; - js_set(&result, "value", alloc(&value.0).1)?; + js_set( + &result, + "value", + alloc(&value.0, TextRepresentation::String).1, + )?; Ok(result.into()) } Patch::Insert { @@ -407,7 +449,10 @@ impl TryFrom for JsValue { js_set( &result, "values", - values.iter().map(|v| alloc(&v.0).1).collect::(), + values + .iter() + .map(|v| alloc(&v.0, TextRepresentation::String).1) + .collect::(), )?; Ok(result.into()) } diff --git a/rust/automerge-wasm/test/apply.ts b/rust/automerge-wasm/test/apply.ts index d4b8c95e..453b4c26 100644 --- a/rust/automerge-wasm/test/apply.ts +++ b/rust/automerge-wasm/test/apply.ts @@ -24,10 +24,10 @@ describe('Automerge', () => { describe('Patch Apply', () => { it('apply nested sets on maps', () => { const start = { hello: { mellow: { yellow: "world", x: 1 }, y : 2 } } - const doc1 = create() + const doc1 = create(true) doc1.putObject("/", "hello", start.hello); let mat = doc1.materialize("/") - const doc2 = create() + const doc2 = create(true) doc2.enablePatches(true) doc2.merge(doc1) @@ -47,10 +47,10 @@ describe('Automerge', () => { it('apply patches on lists', () => { const start = { list: [1,2,3,4] } - const doc1 = create() + const doc1 = create(true) doc1.putObject("/", "list", start.list); let mat = doc1.materialize("/") - const doc2 = create() + const doc2 = create(true) doc2.enablePatches(true) doc2.merge(doc1) mat = doc1.materialize("/") @@ -78,7 +78,7 @@ describe('Automerge', () => { ] ] } - const doc1 = create() + const doc1 = create(true) doc1.enablePatches(true) doc1.putObject("/", "list", start.list); let base = doc1.applyPatches({}) @@ -99,7 +99,7 @@ describe('Automerge', () => { }) it('large inserts should make one splice patch', () => { - const doc1 = create() + const doc1 = create(true) doc1.enablePatches(true) doc1.putObject("/", "list", "abc"); const patches = doc1.popPatches() @@ -109,7 +109,7 @@ describe('Automerge', () => { }) it('it should allow registering type wrappers', () => { - const doc1 = create() + const doc1 = create(true) doc1.enablePatches(true) doc1.registerDatatype("counter", (n: number) => new Counter(n)) const doc2 = doc1.fork() @@ -133,7 +133,7 @@ describe('Automerge', () => { }) it('text can be managed as an array or a string', () => { - const doc1 = create("aaaa") + const doc1 = create(true, "aaaa") doc1.enablePatches(true) doc1.putObject("/", "notes", "hello world") @@ -142,7 +142,7 @@ describe('Automerge', () => { assert.deepEqual( mat, { notes: "hello world" } ) - const doc2 = create() + const doc2 = create(true) let apply : any = doc2.materialize("/") doc2.enablePatches(true) apply = doc2.applyPatches(apply) @@ -163,7 +163,7 @@ describe('Automerge', () => { }) it('should set the OBJECT_ID property on lists, maps, and text objects and not on scalars', () => { - const doc1 = create('aaaa') + const doc1 = create(true, 'aaaa') const mat: any = doc1.materialize("/") doc1.enablePatches(true) doc1.registerDatatype("counter", (n: number) => new Counter(n)) @@ -193,7 +193,7 @@ describe('Automerge', () => { }) it('should set the root OBJECT_ID to "_root"', () => { - const doc1 = create('aaaa') + const doc1 = create(true, 'aaaa') const mat: any = doc1.materialize("/") assert.equal(_obj(mat), "_root") doc1.enablePatches(true) diff --git a/rust/automerge-wasm/test/readme.ts b/rust/automerge-wasm/test/readme.ts index 18c55055..e5823556 100644 --- a/rust/automerge-wasm/test/readme.ts +++ b/rust/automerge-wasm/test/readme.ts @@ -6,13 +6,13 @@ import { create, load, initSyncState } from '..' describe('Automerge', () => { describe('Readme Examples', () => { it('Using the Library and Creating a Document', () => { - const doc = create() + const doc = create(true) const sync = initSyncState() doc.free() sync.free() }) it('Automerge Scalar Types (1)', () => { - const doc = create() + const doc = create(true) doc.put("/", "prop1", 100) // int doc.put("/", "prop2", 3.14) // f64 doc.put("/", "prop3", "hello world") @@ -32,7 +32,7 @@ describe('Automerge', () => { }) }) it('Automerge Scalar Types (2)', () => { - const doc = create() + const doc = create(true) doc.put("/", "prop1", 100, "int") doc.put("/", "prop2", 100, "uint") doc.put("/", "prop3", 100.5, "f64") @@ -45,7 +45,7 @@ describe('Automerge', () => { doc.put("/", "prop10", null, "null") }) it('Automerge Object Types (1)', () => { - const doc = create() + const doc = create(true) // you can create an object by passing in the inital state - if blank pass in `{}` // the return value is the Object Id @@ -64,7 +64,7 @@ describe('Automerge', () => { const notes = doc.putObject("/", "notes", "Hello world!") }) it('Automerge Object Types (2)', () => { - const doc = create() + const doc = create(true) const config = doc.putObject("/", "config", { align: "left", archived: false, cycles: [10, 19, 21] }) @@ -85,7 +85,7 @@ describe('Automerge', () => { }) }) it('Maps (1)', () => { - const doc = create() + const doc = create(true) const mymap = doc.putObject("_root", "mymap", { foo: "bar"}) // make a new map with the foo key @@ -99,7 +99,7 @@ describe('Automerge', () => { assert.deepEqual(doc.materialize("_root"), { mymap: { bytes: new Uint8Array([1,2,3]), foo: "bar", sub: {} }}) }) it('Lists (1)', () => { - const doc = create() + const doc = create(true) const items = doc.putObject("_root", "items", [10,"box"]) // init a new list with two elements doc.push(items, true) // push `true` to the end of the list @@ -113,14 +113,14 @@ describe('Automerge', () => { assert.deepEqual(doc.length(items),6) }) it('Text (1)', () => { - const doc = create("aaaaaa") + const doc = create(true, "aaaaaa") const notes = doc.putObject("_root", "notes", "Hello world") doc.splice(notes, 6, 5, "everyone") assert.deepEqual(doc.text(notes), "Hello everyone") }) it('Querying Data (1)', () => { - const doc1 = create("aabbcc") + const doc1 = create(true, "aabbcc") doc1.put("_root", "key1", "val1") const key2 = doc1.putObject("_root", "key2", []) @@ -140,7 +140,7 @@ describe('Automerge', () => { assert.deepEqual(doc1.getAll("_root","key3"),[[ "str", "doc1val", "3@aabbcc"], ["str", "doc2val", "3@ffaaff"]]) }) it('Counters (1)', () => { - const doc1 = create("aaaaaa") + const doc1 = create(true, "aaaaaa") doc1.put("_root", "number", 0) doc1.put("_root", "total", 0, "counter") @@ -156,7 +156,7 @@ describe('Automerge', () => { assert.deepEqual(doc1.materialize("_root"), { number: 10, total: 33 }) }) it('Transactions (1)', () => { - const doc = create() + const doc = create(true) doc.put("_root", "key", "val1") @@ -178,7 +178,7 @@ describe('Automerge', () => { assert.deepEqual(doc.pendingOps(),0) }) it('Viewing Old Versions of the Document (1)', () => { - const doc = create() + const doc = create(true) doc.put("_root", "key", "val1") const heads1 = doc.getHeads() @@ -194,7 +194,7 @@ describe('Automerge', () => { assert.deepEqual(doc.get("_root","key",[]), undefined) }) it('Forking And Merging (1)', () => { - const doc1 = create() + const doc1 = create(true) doc1.put("_root", "key1", "val1") const doc2 = doc1.fork() @@ -208,13 +208,13 @@ describe('Automerge', () => { assert.deepEqual(doc2.materialize("_root"), { key1: "val1", key3: "val3" }) }) it('Saving And Loading (1)', () => { - const doc1 = create() + const doc1 = create(true) doc1.put("_root", "key1", "value1") const save1 = doc1.save() - const doc2 = load(save1) + const doc2 = load(save1, true) doc2.materialize("_root") // returns { key1: "value1" } @@ -230,9 +230,9 @@ describe('Automerge', () => { doc2.loadIncremental(saveIncremental) - const doc3 = load(save2) + const doc3 = load(save2, true) - const doc4 = load(save3) + const doc4 = load(save3, true) assert.deepEqual(doc1.materialize("_root"), { key1: "value1", key2: "value2" }) assert.deepEqual(doc2.materialize("_root"), { key1: "value1", key2: "value2" }) diff --git a/rust/automerge-wasm/test/test.ts b/rust/automerge-wasm/test/test.ts index 70b56c55..56aaae74 100644 --- a/rust/automerge-wasm/test/test.ts +++ b/rust/automerge-wasm/test/test.ts @@ -4,6 +4,7 @@ import assert from 'assert' import { BloomFilter } from './helpers/sync' import { create, load, SyncState, Automerge, encodeChange, decodeChange, initSyncState, decodeSyncMessage, decodeSyncState, encodeSyncState, encodeSyncMessage } from '..' import { Value, DecodedSyncMessage, Hash } from '..'; +import {kill} from 'process'; function sync(a: Automerge, b: Automerge, aSyncState = initSyncState(), bSyncState = initSyncState()) { const MAX_ITER = 10 @@ -29,25 +30,25 @@ describe('Automerge', () => { describe('basics', () => { it('should create, clone and free', () => { - const doc1 = create() + const doc1 = create(true) const doc2 = doc1.clone() doc2.free() }) it('should be able to start and commit', () => { - const doc = create() + const doc = create(true) doc.commit() }) it('getting a nonexistent prop does not throw an error', () => { - const doc = create() + const doc = create(true) const root = "_root" const result = doc.getWithType(root, "hello") assert.deepEqual(result, undefined) }) it('should be able to set and get a simple value', () => { - const doc: Automerge = create("aabbcc") + const doc: Automerge = create(true, "aabbcc") const root = "_root" let result @@ -105,7 +106,7 @@ describe('Automerge', () => { }) it('should be able to use bytes', () => { - const doc = create() + const doc = create(true) doc.put("_root", "data1", new Uint8Array([10, 11, 12])); doc.put("_root", "data2", new Uint8Array([13, 14, 15]), "bytes"); const value1 = doc.getWithType("_root", "data1") @@ -115,7 +116,7 @@ describe('Automerge', () => { }) it('should be able to make subobjects', () => { - const doc = create() + const doc = create(true) const root = "_root" let result @@ -131,7 +132,7 @@ describe('Automerge', () => { }) it('should be able to make lists', () => { - const doc = create() + const doc = create(true) const root = "_root" const sublist = doc.putObject(root, "numbers", []) @@ -153,7 +154,7 @@ describe('Automerge', () => { }) it('lists have insert, set, splice, and push ops', () => { - const doc = create() + const doc = create(true) const root = "_root" const sublist = doc.putObject(root, "letters", []) @@ -175,7 +176,7 @@ describe('Automerge', () => { }) it('should be able delete non-existent props', () => { - const doc = create() + const doc = create(true) doc.put("_root", "foo", "bar") doc.put("_root", "bip", "bap") @@ -195,7 +196,7 @@ describe('Automerge', () => { }) it('should be able to del', () => { - const doc = create() + const doc = create(true) const root = "_root" doc.put(root, "xxx", "xxx"); @@ -205,7 +206,7 @@ describe('Automerge', () => { }) it('should be able to use counters', () => { - const doc = create() + const doc = create(true) const root = "_root" doc.put(root, "counter", 10, "counter"); @@ -217,7 +218,7 @@ describe('Automerge', () => { }) it('should be able to splice text', () => { - const doc = create() + const doc = create(true) const root = "_root"; const text = doc.putObject(root, "text", ""); @@ -232,8 +233,8 @@ describe('Automerge', () => { assert.deepEqual(doc.getWithType(text, 12), ["str", "?"]) }) - it('should NOT be able to insert objects into text', () => { - const doc = create() + it.skip('should NOT be able to insert objects into text', () => { + const doc = create(true) const text = doc.putObject("/", "text", "Hello world"); assert.throws(() => { doc.insertObject(text, 6, { hello: "world" }); @@ -241,7 +242,7 @@ describe('Automerge', () => { }) it('should be able save all or incrementally', () => { - const doc = create() + const doc = create(true) doc.put("_root", "foo", 1) @@ -262,9 +263,9 @@ describe('Automerge', () => { assert.notDeepEqual(saveA, saveB); - const docA = load(saveA); - const docB = load(saveB); - const docC = load(saveMidway) + const docA = load(saveA, true); + const docB = load(saveB, true); + const docC = load(saveMidway, true) docC.loadIncremental(save3) assert.deepEqual(docA.keys("_root"), docB.keys("_root")); @@ -273,7 +274,7 @@ describe('Automerge', () => { }) it('should be able to splice text', () => { - const doc = create() + const doc = create(true) const text = doc.putObject("_root", "text", ""); doc.splice(text, 0, 0, "hello world"); const hash1 = doc.commit(); @@ -291,10 +292,10 @@ describe('Automerge', () => { }) it('local inc increments all visible counters in a map', () => { - const doc1 = create("aaaa") + const doc1 = create(true, "aaaa") doc1.put("_root", "hello", "world") - const doc2 = load(doc1.save(), "bbbb"); - const doc3 = load(doc1.save(), "cccc"); + const doc2 = load(doc1.save(), true, "bbbb"); + const doc3 = load(doc1.save(), true, "cccc"); const heads = doc1.getHeads() doc1.put("_root", "cnt", 20) doc2.put("_root", "cnt", 0, "counter") @@ -315,16 +316,16 @@ describe('Automerge', () => { ]) const save1 = doc1.save() - const doc4 = load(save1) + const doc4 = load(save1, true) assert.deepEqual(doc4.save(), save1); }) it('local inc increments all visible counters in a sequence', () => { - const doc1 = create("aaaa") + const doc1 = create(true, "aaaa") const seq = doc1.putObject("_root", "seq", []) doc1.insert(seq, 0, "hello") - const doc2 = load(doc1.save(), "bbbb"); - const doc3 = load(doc1.save(), "cccc"); + const doc2 = load(doc1.save(), true, "bbbb"); + const doc3 = load(doc1.save(), true, "cccc"); const heads = doc1.getHeads() doc1.put(seq, 0, 20) doc2.put(seq, 0, 0, "counter") @@ -345,12 +346,12 @@ describe('Automerge', () => { ]) const save = doc1.save() - const doc4 = load(save) + const doc4 = load(save, true) assert.deepEqual(doc4.save(), save); }) it('paths can be used instead of objids', () => { - const doc = create("aaaa") + const doc = create(true, "aaaa") doc.putObject("_root", "list", [{ foo: "bar" }, [1, 2, 3]]) assert.deepEqual(doc.materialize("/"), { list: [{ foo: "bar" }, [1, 2, 3]] }) assert.deepEqual(doc.materialize("/list"), [{ foo: "bar" }, [1, 2, 3]]) @@ -358,8 +359,8 @@ describe('Automerge', () => { }) it('should be able to fetch changes by hash', () => { - const doc1 = create("aaaa") - const doc2 = create("bbbb") + const doc1 = create(true, "aaaa") + const doc2 = create(true, "bbbb") doc1.put("/", "a", "b") doc2.put("/", "b", "c") const head1 = doc1.getHeads() @@ -372,7 +373,7 @@ describe('Automerge', () => { }) it('recursive sets are possible', () => { - const doc = create("aaaa") + const doc = create(true, "aaaa") const l1 = doc.putObject("_root", "list", [{ foo: "bar" }, [1, 2, 3]]) const l2 = doc.insertObject(l1, 0, { zip: ["a", "b"] }) doc.putObject("_root", "info1", "hello world") // 'text' object @@ -390,7 +391,7 @@ describe('Automerge', () => { }) it('only returns an object id when objects are created', () => { - const doc = create("aaaa") + const doc = create(true, "aaaa") const r1 = doc.put("_root", "foo", "bar") const r2 = doc.putObject("_root", "list", []) const r3 = doc.put("_root", "counter", 10, "counter") @@ -412,13 +413,13 @@ describe('Automerge', () => { }) it('objects without properties are preserved', () => { - const doc1 = create("aaaa") + const doc1 = create(true, "aaaa") const a = doc1.putObject("_root", "a", {}); const b = doc1.putObject("_root", "b", {}); const c = doc1.putObject("_root", "c", {}); doc1.put(c, "d", "dd"); const saved = doc1.save(); - const doc2 = load(saved); + const doc2 = load(saved, true); assert.deepEqual(doc2.getWithType("_root", "a"), ["map", a]) assert.deepEqual(doc2.keys(a), []) assert.deepEqual(doc2.getWithType("_root", "b"), ["map", b]) @@ -429,7 +430,7 @@ describe('Automerge', () => { }) it('should allow you to fork at a heads', () => { - const A = create("aaaaaa") + const A = create(true, "aaaaaa") A.put("/", "key1", "val1"); A.put("/", "key2", "val2"); const heads1 = A.getHeads(); @@ -444,7 +445,7 @@ describe('Automerge', () => { }) it('should handle merging text conflicts then saving & loading', () => { - const A = create("aabbcc") + const A = create(true, "aabbcc") const At = A.putObject('_root', 'text', "") A.splice(At, 0, 0, 'hello') @@ -461,7 +462,7 @@ describe('Automerge', () => { const binary = A.save() - const C = load(binary) + const C = load(binary, true) assert.deepEqual(C.getWithType('_root', 'text'), ['text', '1@aabbcc']) assert.deepEqual(C.text(At), 'hell! world') @@ -470,7 +471,7 @@ describe('Automerge', () => { describe('patch generation', () => { it('should include root object key updates', () => { - const doc1 = create('aaaa'), doc2 = create('bbbb') + const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb') doc1.put('_root', 'hello', 'world') doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) @@ -480,7 +481,7 @@ describe('Automerge', () => { }) it('should include nested object creation', () => { - const doc1 = create('aaaa'), doc2 = create('bbbb') + const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb') doc1.putObject('_root', 'birds', { friday: { robins: 3 } }) doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) @@ -492,7 +493,7 @@ describe('Automerge', () => { }) it('should delete map keys', () => { - const doc1 = create('aaaa'), doc2 = create('bbbb') + const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb') doc1.put('_root', 'favouriteBird', 'Robin') doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) @@ -505,7 +506,7 @@ describe('Automerge', () => { }) it('should include list element insertion', () => { - const doc1 = create('aaaa'), doc2 = create('bbbb') + const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb') doc1.putObject('_root', 'birds', ['Goldfinch', 'Chaffinch']) doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) @@ -516,7 +517,7 @@ describe('Automerge', () => { }) it('should insert nested maps into a list', () => { - const doc1 = create('aaaa'), doc2 = create('bbbb') + const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb') doc1.putObject('_root', 'birds', []) doc2.loadIncremental(doc1.saveIncremental()) doc1.insertObject('1@aaaa', 0, { species: 'Goldfinch', count: 3 }) @@ -530,7 +531,7 @@ describe('Automerge', () => { }) it('should calculate list indexes based on visible elements', () => { - const doc1 = create('aaaa'), doc2 = create('bbbb') + const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb') doc1.putObject('_root', 'birds', ['Goldfinch', 'Chaffinch']) doc2.loadIncremental(doc1.saveIncremental()) doc1.delete('1@aaaa', 0) @@ -546,7 +547,7 @@ describe('Automerge', () => { }) it('should handle concurrent insertions at the head of a list', () => { - const doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd') + const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb'), doc3 = create(true, 'cccc'), doc4 = create(true, 'dddd') doc1.putObject('_root', 'values', []) const change1 = doc1.saveIncremental() doc2.loadIncremental(change1) @@ -572,7 +573,7 @@ describe('Automerge', () => { }) it('should handle concurrent insertions beyond the head', () => { - const doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd') + const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb'), doc3 = create(true, 'cccc'), doc4 = create(true, 'dddd') doc1.putObject('_root', 'values', ['a', 'b']) const change1 = doc1.saveIncremental() doc2.loadIncremental(change1) @@ -598,7 +599,7 @@ describe('Automerge', () => { }) it('should handle conflicts on root object keys', () => { - const doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd') + const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb'), doc3 = create(true, 'cccc'), doc4 = create(true, 'dddd') doc1.put('_root', 'bird', 'Greenfinch') doc2.put('_root', 'bird', 'Goldfinch') const change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental() @@ -620,7 +621,7 @@ describe('Automerge', () => { }) it('should handle three-way conflicts', () => { - const doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc') + const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb'), doc3 = create(true, 'cccc') doc1.put('_root', 'bird', 'Greenfinch') doc2.put('_root', 'bird', 'Chaffinch') doc3.put('_root', 'bird', 'Goldfinch') @@ -654,7 +655,7 @@ describe('Automerge', () => { }) it('should allow a conflict to be resolved', () => { - const doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc') + const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb'), doc3 = create(true, 'cccc') doc1.put('_root', 'bird', 'Greenfinch') doc2.put('_root', 'bird', 'Chaffinch') doc3.enablePatches(true) @@ -672,7 +673,7 @@ describe('Automerge', () => { }) it('should handle a concurrent map key overwrite and delete', () => { - const doc1 = create('aaaa'), doc2 = create('bbbb') + const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb') doc1.put('_root', 'bird', 'Greenfinch') doc2.loadIncremental(doc1.saveIncremental()) doc1.put('_root', 'bird', 'Goldfinch') @@ -695,7 +696,7 @@ describe('Automerge', () => { }) it('should handle a conflict on a list element', () => { - const doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd') + const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb'), doc3 = create(true, 'cccc'), doc4 = create(true, 'dddd') doc1.putObject('_root', 'birds', ['Thrush', 'Magpie']) const change1 = doc1.saveIncremental() doc2.loadIncremental(change1) @@ -722,7 +723,7 @@ describe('Automerge', () => { }) it('should handle a concurrent list element overwrite and delete', () => { - const doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd') + const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb'), doc3 = create(true, 'cccc'), doc4 = create(true, 'dddd') doc1.putObject('_root', 'birds', ['Parakeet', 'Magpie', 'Thrush']) const change1 = doc1.saveIncremental() doc2.loadIncremental(change1) @@ -755,7 +756,7 @@ describe('Automerge', () => { }) it('should handle deletion of a conflict value', () => { - const doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc') + const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb'), doc3 = create(true, 'cccc') doc1.put('_root', 'bird', 'Robin') doc2.put('_root', 'bird', 'Wren') const change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental() @@ -778,7 +779,7 @@ describe('Automerge', () => { }) it('should handle conflicting nested objects', () => { - const doc1 = create('aaaa'), doc2 = create('bbbb') + const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb') doc1.putObject('_root', 'birds', ['Parakeet']) doc2.putObject('_root', 'birds', { 'Sparrowhawk': 1 }) const change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental() @@ -796,7 +797,7 @@ describe('Automerge', () => { }) it('should support date objects', () => { - const doc1 = create('aaaa'), doc2 = create('bbbb'), now = new Date() + const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb'), now = new Date() doc1.put('_root', 'createdAt', now) doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) @@ -807,7 +808,7 @@ describe('Automerge', () => { }) it('should capture local put ops', () => { - const doc1 = create('aaaa') + const doc1 = create(true, 'aaaa') doc1.enablePatches(true) doc1.put('_root', 'key1', 1) doc1.put('_root', 'key1', 2) @@ -825,7 +826,7 @@ describe('Automerge', () => { }) it('should capture local insert ops', () => { - const doc1 = create('aaaa') + const doc1 = create(true, 'aaaa') doc1.enablePatches(true) const list = doc1.putObject('_root', 'list', []) doc1.insert(list, 0, 1) @@ -841,7 +842,7 @@ describe('Automerge', () => { }) it('should capture local push ops', () => { - const doc1 = create('aaaa') + const doc1 = create(true, 'aaaa') doc1.enablePatches(true) const list = doc1.putObject('_root', 'list', []) doc1.push(list, 1) @@ -855,7 +856,7 @@ describe('Automerge', () => { }) it('should capture local splice ops', () => { - const doc1 = create('aaaa') + const doc1 = create(true, 'aaaa') doc1.enablePatches(true) const list = doc1.putObject('_root', 'list', []) doc1.splice(list, 0, 0, [1, 2, 3, 4]) @@ -868,7 +869,7 @@ describe('Automerge', () => { }) it('should capture local increment ops', () => { - const doc1 = create('aaaa') + const doc1 = create(true, 'aaaa') doc1.enablePatches(true) doc1.put('_root', 'counter', 2, 'counter') doc1.increment('_root', 'counter', 4) @@ -881,7 +882,7 @@ describe('Automerge', () => { it('should capture local delete ops', () => { - const doc1 = create('aaaa') + const doc1 = create(true, 'aaaa') doc1.enablePatches(true) doc1.put('_root', 'key1', 1) doc1.put('_root', 'key2', 2) @@ -896,7 +897,7 @@ describe('Automerge', () => { }) it('should support counters in a map', () => { - const doc1 = create('aaaa'), doc2 = create('bbbb') + const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb') doc2.enablePatches(true) doc1.put('_root', 'starlings', 2, 'counter') doc2.loadIncremental(doc1.saveIncremental()) @@ -910,7 +911,7 @@ describe('Automerge', () => { }) it('should support counters in a list', () => { - const doc1 = create('aaaa'), doc2 = create('bbbb') + const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb') doc2.enablePatches(true) const list = doc1.putObject('_root', 'list', []) doc2.loadIncremental(doc1.saveIncremental()) @@ -934,7 +935,7 @@ describe('Automerge', () => { describe('sync', () => { it('should send a sync message implying no local data', () => { - const doc = create() + const doc = create(true) const s1 = initSyncState() const m1 = doc.generateSyncMessage(s1) if (m1 === null) { throw new RangeError("message should not be null") } @@ -948,7 +949,7 @@ describe('Automerge', () => { }) it('should not reply if we have no data as well', () => { - const n1 = create(), n2 = create() + const n1 = create(true), n2 = create(true) const s1 = initSyncState(), s2 = initSyncState() const m1 = n1.generateSyncMessage(s1) if (m1 === null) { throw new RangeError("message should not be null") } @@ -958,7 +959,7 @@ describe('Automerge', () => { }) it('repos with equal heads do not need a reply message', () => { - const n1 = create(), n2 = create() + const n1 = create(true), n2 = create(true) const s1 = initSyncState(), s2 = initSyncState() // make two nodes with the same changes @@ -983,7 +984,7 @@ describe('Automerge', () => { }) it('n1 should offer all changes to n2 when starting from nothing', () => { - const n1 = create(), n2 = create() + const n1 = create(true), n2 = create(true) // make changes for n1 that n2 should request const list = n1.putObject("_root", "n", []) @@ -999,7 +1000,7 @@ describe('Automerge', () => { }) it('should sync peers where one has commits the other does not', () => { - const n1 = create(), n2 = create() + const n1 = create(true), n2 = create(true) // make changes for n1 that n2 should request const list = n1.putObject("_root", "n", []) @@ -1016,7 +1017,7 @@ describe('Automerge', () => { it('should work with prior sync state', () => { // create & synchronize two nodes - const n1 = create(), n2 = create() + const n1 = create(true), n2 = create(true) const s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 5; i++) { @@ -1039,7 +1040,7 @@ describe('Automerge', () => { it('should not generate messages once synced', () => { // create & synchronize two nodes - const n1 = create('abc123'), n2 = create('def456') + const n1 = create(true, 'abc123'), n2 = create(true, 'def456') const s1 = initSyncState(), s2 = initSyncState() let message @@ -1087,7 +1088,7 @@ describe('Automerge', () => { it('should allow simultaneous messages during synchronization', () => { // create & synchronize two nodes - const n1 = create('abc123'), n2 = create('def456') + const n1 = create(true, 'abc123'), n2 = create(true, 'def456') const s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 5; i++) { @@ -1166,7 +1167,7 @@ describe('Automerge', () => { }) it('should assume sent changes were received until we hear otherwise', () => { - const n1 = create('01234567'), n2 = create('89abcdef') + const n1 = create(true, '01234567'), n2 = create(true, '89abcdef') const s1 = initSyncState(), s2 = initSyncState() let message = null @@ -1197,7 +1198,7 @@ describe('Automerge', () => { it('should work regardless of who initiates the exchange', () => { // create & synchronize two nodes - const n1 = create(), n2 = create() + const n1 = create(true), n2 = create(true) const s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 5; i++) { @@ -1225,7 +1226,7 @@ describe('Automerge', () => { // lastSync is undefined. // create two peers both with divergent commits - const n1 = create('01234567'), n2 = create('89abcdef') + const n1 = create(true, '01234567'), n2 = create(true, '89abcdef') //const s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 10; i++) { @@ -1258,7 +1259,7 @@ describe('Automerge', () => { // lastSync is c9. // create two peers both with divergent commits - const n1 = create('01234567'), n2 = create('89abcdef') + const n1 = create(true, '01234567'), n2 = create(true, '89abcdef') let s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 10; i++) { @@ -1287,7 +1288,7 @@ describe('Automerge', () => { }) it('should ensure non-empty state after sync', () => { - const n1 = create('01234567'), n2 = create('89abcdef') + const n1 = create(true, '01234567'), n2 = create(true, '89abcdef') const s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 3; i++) { @@ -1306,7 +1307,7 @@ describe('Automerge', () => { // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 // n2 has changes {c0, c1, c2}, n1's lastSync is c5, and n2's lastSync is c2. // we want to successfully sync (n1) with (r), even though (n1) believes it's talking to (n2) - const n1 = create('01234567'), n2 = create('89abcdef') + const n1 = create(true, '01234567'), n2 = create(true, '89abcdef') let s1 = initSyncState() const s2 = initSyncState() @@ -1355,7 +1356,7 @@ describe('Automerge', () => { }) it('should re-sync after one node experiences data loss without disconnecting', () => { - const n1 = create('01234567'), n2 = create('89abcdef') + const n1 = create(true, '01234567'), n2 = create(true, '89abcdef') const s1 = initSyncState(), s2 = initSyncState() // n1 makes three changes, which we sync to n2 @@ -1369,7 +1370,7 @@ describe('Automerge', () => { assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) assert.deepStrictEqual(n1.materialize(), n2.materialize()) - const n2AfterDataLoss = create('89abcdef') + const n2AfterDataLoss = create(true, '89abcdef') // "n2" now has no data, but n1 still thinks it does. Note we don't do // decodeSyncState(encodeSyncState(s1)) in order to simulate data loss without disconnecting @@ -1379,7 +1380,7 @@ describe('Automerge', () => { }) it('should handle changes concurrent to the last sync heads', () => { - const n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('fedcba98') + const n1 = create(true, '01234567'), n2 = create(true, '89abcdef'), n3 = create(true, 'fedcba98') const s12 = initSyncState(), s21 = initSyncState(), s23 = initSyncState(), s32 = initSyncState() // Change 1 is known to all three nodes @@ -1415,7 +1416,7 @@ describe('Automerge', () => { }) it('should handle histories with lots of branching and merging', () => { - const n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('fedcba98') + const n1 = create(true, '01234567'), n2 = create(true, '89abcdef'), n3 = create(true, 'fedcba98') n1.put("_root", "x", 0); n1.commit("", 0) const change1 = n1.getLastLocalChange() if (change1 === null) throw new RangeError("no local change") @@ -1463,7 +1464,7 @@ describe('Automerge', () => { // `-- n2 // where n2 is a false positive in the Bloom filter containing {n1}. // lastSync is c9. - let n1 = create('01234567'), n2 = create('89abcdef') + let n1 = create(true, '01234567'), n2 = create(true, '89abcdef') let s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 10; i++) { @@ -1498,8 +1499,8 @@ describe('Automerge', () => { // `-- n2c1 <-- n2c2 // where n2c1 is a false positive in the Bloom filter containing {n1c1, n1c2}. // lastSync is c9. - n1 = create('01234567') - n2 = create('89abcdef') + n1 = create(true, '01234567') + n2 = create(true, '89abcdef') s1 = initSyncState() s2 = initSyncState() for (let i = 0; i < 10; i++) { @@ -1568,7 +1569,7 @@ describe('Automerge', () => { assert.strictEqual(decodeSyncMessage(m2).changes.length, 1) // only n2c2; change n2c1 is not sent // n3 is a node that doesn't have the missing change. Nevertheless n1 is going to ask n3 for it - const n3 = create('fedcba98'), s13 = initSyncState(), s31 = initSyncState() + const n3 = create(true, 'fedcba98'), s13 = initSyncState(), s31 = initSyncState() sync(n1, n3, s13, s31) assert.deepStrictEqual(n1.getHeads(), [n1hash2]) assert.deepStrictEqual(n3.getHeads(), [n1hash2]) @@ -1581,7 +1582,7 @@ describe('Automerge', () => { // `-- n2c1 <-- n2c2 <-- n2c3 // where n2c2 is a false positive in the Bloom filter containing {n1c1, n1c2, n1c3}. // lastSync is c4. - let n1 = create('01234567'), n2 = create('89abcdef') + let n1 = create(true, '01234567'), n2 = create(true, '89abcdef') let s1 = initSyncState(), s2 = initSyncState() let n1hash3, n2hash3 @@ -1634,8 +1635,8 @@ describe('Automerge', () => { // `-- n2c1 <-- n2c2 <-- n2c3 // where n2c1 and n2c2 are both false positives in the Bloom filter containing {c5}. // lastSync is c4. - const n1 = create('01234567') - let n2 = create('89abcdef') + const n1 = create(true, '01234567') + let n2 = create(true, '89abcdef') let s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 5; i++) { @@ -1675,7 +1676,7 @@ describe('Automerge', () => { // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ // `-- n2 // where n2 causes a false positive in the Bloom filter containing {n1}. - let n1 = create('01234567'), n2 = create('89abcdef') + let n1 = create(true, '01234567'), n2 = create(true, '89abcdef') let s1 = initSyncState(), s2 = initSyncState() let message @@ -1735,7 +1736,7 @@ describe('Automerge', () => { // n1 has {c0, c1, c2, n1c1, n1c2, n1c3, n2c1, n2c2}; // n2 has {c0, c1, c2, n1c1, n1c2, n2c1, n2c2, n2c3}; // n3 has {c0, c1, c2, n3c1, n3c2, n3c3}. - const n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('76543210') + const n1 = create(true, '01234567'), n2 = create(true, '89abcdef'), n3 = create(true, '76543210') let s13 = initSyncState() const s12 = initSyncState() const s21 = initSyncState() @@ -1807,7 +1808,7 @@ describe('Automerge', () => { }) it('should allow any change to be requested', () => { - const n1 = create('01234567'), n2 = create('89abcdef') + const n1 = create(true, '01234567'), n2 = create(true, '89abcdef') const s1 = initSyncState(), s2 = initSyncState() let message = null @@ -1835,7 +1836,7 @@ describe('Automerge', () => { }) it('should ignore requests for a nonexistent change', () => { - const n1 = create('01234567'), n2 = create('89abcdef') + const n1 = create(true, '01234567'), n2 = create(true, '89abcdef') const s1 = initSyncState(), s2 = initSyncState() let message = null @@ -1858,7 +1859,7 @@ describe('Automerge', () => { // ,-- c1 <-- c2 // c0 <-+ // `-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 - const n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('76543210') + const n1 = create(true, '01234567'), n2 = create(true, '89abcdef'), n3 = create(true, '76543210') let s1 = initSyncState(), s2 = initSyncState() let msg @@ -1930,7 +1931,7 @@ describe('Automerge', () => { }) it('can handle overlappying splices', () => { - const doc = create() + const doc = create(true) doc.enablePatches(true) let mat : any = doc.materialize("/") doc.putObject("/", "text", "abcdefghij") @@ -1941,7 +1942,7 @@ describe('Automerge', () => { }) it('can handle utf16 text', () => { - const doc = create() + const doc = create(true) doc.enablePatches(true) let mat : any = doc.materialize("/") @@ -1957,7 +1958,7 @@ describe('Automerge', () => { mat = doc.applyPatches(mat) - const remote = load(doc.save()) + const remote = load(doc.save(), true) remote.enablePatches(true) let r_mat : any = remote.materialize("/") @@ -2028,7 +2029,7 @@ describe('Automerge', () => { message: null, deps: [] } - const doc = load(encodeChange(change)); + const doc = load(encodeChange(change), true); doc.enablePatches(true) const mat : any = doc.materialize("/") @@ -2068,4 +2069,105 @@ describe('Automerge', () => { assert.deepEqual(doc5.getAll("/bad_text", 2, doc.getHeads()), [['str', 'BBBBB', '3@aaaa' ]]) }) }) + + describe("the legacy text implementation", () => { + const root = "_root" + class FakeText { + elems: Array + constructor(elems: string | Array) { + if (typeof elems === "string") { + this.elems = Array.from(elems) + } else { + this.elems = elems + } + } + } + it("should materialize old style text", () => { + let doc = create(false); + doc.registerDatatype("text", (e: any) => new FakeText(e)) + doc.enablePatches(true) + let txt = doc.putObject(root, "text", "") + doc.splice(txt, 0, 0, "hello") + let mat: any = doc.materialize() + assert.deepEqual(mat.text, new FakeText("hello")) + }) + + it("should apply patches to old style text", () => { + let doc = create(false); + doc.registerDatatype("text", (e: any) => new FakeText(e)) + doc.enablePatches(true) + let mat : any = doc.materialize("/") + doc.putObject("/", "text", "abcdefghij") + doc.splice("/text", 2, 2, "00") + doc.splice("/text", 3, 5, "11") + mat = doc.applyPatches(mat) + assert.deepEqual(mat.text, new FakeText("ab011ij")) + }) + + it("should apply list patches to old style text", () => { + let doc = create(false); + doc.registerDatatype("text", (e: any) => new FakeText(e)) + doc.enablePatches(true) + let mat : any = doc.materialize("/") + doc.putObject("/", "text", "abc") + doc.insert("/text", 0, "0") + doc.insert("/text", 1, "1") + mat = doc.applyPatches(mat) + assert.deepEqual(mat.text, new FakeText("01abc")) + }) + + it("should allow inserting using list methods", () => { + let doc = create(false); + doc.registerDatatype("text", (e: any) => new FakeText(e)) + doc.enablePatches(true) + let mat : any = doc.materialize("/") + const txt = doc.putObject("/", "text", "abc") + doc.insert(txt, 3, "d") + doc.insert(txt, 0, "0") + mat = doc.applyPatches(mat) + assert.deepEqual(mat.text, new FakeText("0abcd")) + }) + + it("should allow inserting objects in old style text", () => { + let doc = create(false); + doc.registerDatatype("text", (e: any) => new FakeText(e)) + doc.enablePatches(true) + let mat : any = doc.materialize("/") + const txt = doc.putObject("/", "text", "abc") + doc.insertObject(txt, 0, {"key": "value"}) + doc.insertObject(txt, 2, ["elem"]) + doc.insert(txt, 2, "m") + mat = doc.applyPatches(mat) + assert.deepEqual(mat.text, new FakeText([ + {"key": "value"}, "a", "m", ["elem"], "b", "c" + ])) + }) + + class RawString { + val: string; + constructor(s: string) { + this.val = s + } + } + + it("should allow registering a different type for strings", () => { + let doc = create(false); + doc.registerDatatype("str", (e: any) => new RawString(e)) + doc.enablePatches(true) + doc.put("/", "key", "value") + let mat: any = doc.materialize() + assert.deepStrictEqual(mat.key, new RawString("value")) + }) + + it("should generate patches correctly for raw strings", () => { + let doc = create(false); + doc.registerDatatype("str", (e: any) => new RawString(e)) + doc.enablePatches(true) + let mat: any = doc.materialize() + doc.put("/", "key", "value") + mat = doc.applyPatches(mat) + assert.deepStrictEqual(mat.key, new RawString("value")) + }) + + }) }) diff --git a/rust/automerge/src/op_observer.rs b/rust/automerge/src/op_observer.rs index 2150b1de..0d082219 100644 --- a/rust/automerge/src/op_observer.rs +++ b/rust/automerge/src/op_observer.rs @@ -114,6 +114,13 @@ pub trait OpObserver { /// /// - `other`: Another Op Observer of the same type fn merge(&mut self, other: &Self); + + /// Whether to call sequence methods or `splice_text` when encountering changes in text + /// + /// Returns `false` by default + fn text_as_seq(&self) -> bool { + false + } } impl OpObserver for () { diff --git a/rust/automerge/src/transaction/inner.rs b/rust/automerge/src/transaction/inner.rs index 2099acef..cba4e723 100644 --- a/rust/automerge/src/transaction/inner.rs +++ b/rust/automerge/src/transaction/inner.rs @@ -198,6 +198,7 @@ impl TransactionInner { match (&prop, obj_type) { (Prop::Map(_), ObjType::Map) => Ok(()), (Prop::Seq(_), ObjType::List) => Ok(()), + (Prop::Seq(_), ObjType::Text) => Ok(()), _ => Err(AutomergeError::InvalidOp(obj_type)), }?; self.local_op(doc, op_observer, obj, prop, value.into())?; @@ -294,7 +295,7 @@ impl TransactionInner { value: V, ) -> Result<(), AutomergeError> { let (obj, obj_type) = doc.exid_to_obj(ex_obj)?; - if obj_type != ObjType::List { + if !matches!(obj_type, ObjType::List | ObjType::Text) { return Err(AutomergeError::InvalidOp(obj_type)); } let value = value.into(); @@ -312,7 +313,7 @@ impl TransactionInner { value: ObjType, ) -> Result { let (obj, obj_type) = doc.exid_to_obj(ex_obj)?; - if obj_type != ObjType::List { + if !matches!(obj_type, ObjType::List | ObjType::Text) { return Err(AutomergeError::InvalidOp(obj_type)); } let id = self.do_insert(doc, op_observer, obj, index, value.into())?; @@ -510,7 +511,7 @@ impl TransactionInner { vals: impl IntoIterator, ) -> Result<(), AutomergeError> { let (obj, obj_type) = doc.exid_to_obj(ex_obj)?; - if obj_type != ObjType::List { + if !matches!(obj_type, ObjType::List | ObjType::Text) { return Err(AutomergeError::InvalidOp(obj_type)); } let values = vals.into_iter().collect(); @@ -631,7 +632,10 @@ impl TransactionInner { // handle the observer if let Some(obs) = op_observer.as_mut() { match splice_type { - SpliceType::List => { + SpliceType::Text(text, _) if !obs.text_as_seq() => { + obs.splice_text(doc, ex_obj, index, text) + } + SpliceType::List | SpliceType::Text(..) => { let start = self.operations.len() - values.len(); for (offset, v) in values.iter().enumerate() { let op = &self.operations[start + offset].1; @@ -639,7 +643,6 @@ impl TransactionInner { obs.insert(doc, ex_obj.clone(), index + offset, value) } } - SpliceType::Text(text, _) => obs.splice_text(doc, ex_obj, index, text), } } } @@ -668,7 +671,12 @@ impl TransactionInner { } (Some(ObjType::Text), Prop::Seq(index)) => { // FIXME - op_observer.splice_text(doc, ex_obj, index, op.to_str()) + if op_observer.text_as_seq() { + let value = (op.value(), doc.ops.id_to_exid(op.id)); + op_observer.insert(doc, ex_obj, index, value) + } else { + op_observer.splice_text(doc, ex_obj, index, op.to_str()) + } } _ => {} } diff --git a/rust/automerge/tests/test.rs b/rust/automerge/tests/test.rs index 069a664d..6ab797f0 100644 --- a/rust/automerge/tests/test.rs +++ b/rust/automerge/tests/test.rs @@ -1393,8 +1393,8 @@ fn ops_on_wrong_objets() -> Result<(), AutomergeError> { doc.splice_text(&text, 0, 0, "hello world")?; let e5 = doc.put(&text, "a", "AAA"); assert_eq!(e5, Err(AutomergeError::InvalidOp(ObjType::Text))); - let e6 = doc.insert(&text, 0, "b"); - assert_eq!(e6, Err(AutomergeError::InvalidOp(ObjType::Text))); + //let e6 = doc.insert(&text, 0, "b"); + //assert_eq!(e6, Err(AutomergeError::InvalidOp(ObjType::Text))); Ok(()) } From 6c0d102032c066166cc4dab7770360d51d67504e Mon Sep 17 00:00:00 2001 From: Alex Good Date: Thu, 22 Dec 2022 09:17:10 +0000 Subject: [PATCH 677/730] automerge-js: Add backwards compatibility text layer The new text features are faster and more ergonomic but not backwards compatible. In order to make them backwards compatible re-expose the original functionality and move the new API under a `future` export. This allows users to interoperably use both implementations. --- javascript/.gitignore | 1 + javascript/config/cjs.json | 1 + javascript/config/mjs.json | 1 + javascript/package.json | 6 +- javascript/src/counter.ts | 6 +- javascript/src/index.ts | 1001 +------------------- javascript/src/internal_state.ts | 43 + javascript/src/proxies.ts | 343 +++++-- javascript/src/raw_string.ts | 6 + javascript/src/stable.ts | 955 +++++++++++++++++++ javascript/src/text.ts | 218 +++++ javascript/src/types.ts | 26 + javascript/src/unstable.ts | 292 ++++++ javascript/test/basic_test.ts | 2 +- javascript/test/extra_api_tests.ts | 2 +- javascript/test/legacy_tests.ts | 2 +- javascript/test/stable_unstable_interop.ts | 41 + javascript/test/text_test.ts | 2 +- javascript/test/text_v1.ts | 281 ++++++ 19 files changed, 2159 insertions(+), 1070 deletions(-) create mode 100644 javascript/src/internal_state.ts create mode 100644 javascript/src/raw_string.ts create mode 100644 javascript/src/stable.ts create mode 100644 javascript/src/text.ts create mode 100644 javascript/src/unstable.ts create mode 100644 javascript/test/stable_unstable_interop.ts create mode 100644 javascript/test/text_v1.ts diff --git a/javascript/.gitignore b/javascript/.gitignore index bf2aad08..ab4ec70d 100644 --- a/javascript/.gitignore +++ b/javascript/.gitignore @@ -2,3 +2,4 @@ /yarn.lock dist docs/ +.vim diff --git a/javascript/config/cjs.json b/javascript/config/cjs.json index 9cfceed5..fc500311 100644 --- a/javascript/config/cjs.json +++ b/javascript/config/cjs.json @@ -1,5 +1,6 @@ { "extends": "../tsconfig.json", + "exclude": ["../dist/**/*", "../node_modules", "../test/**/*"], "compilerOptions": { "outDir": "../dist/cjs" } diff --git a/javascript/config/mjs.json b/javascript/config/mjs.json index 5b02ee0e..2ee7a8b8 100644 --- a/javascript/config/mjs.json +++ b/javascript/config/mjs.json @@ -1,5 +1,6 @@ { "extends": "../tsconfig.json", + "exclude": ["../dist/**/*", "../node_modules", "../test/**/*"], "compilerOptions": { "target": "es6", "module": "es6", diff --git a/javascript/package.json b/javascript/package.json index b7afb5b7..33523370 100644 --- a/javascript/package.json +++ b/javascript/package.json @@ -21,17 +21,21 @@ "dist/cjs/uuid.js", "dist/cjs/counter.js", "dist/cjs/low_level.js", + "dist/cjs/next.js", "dist/cjs/text.js", "dist/cjs/proxies.js", + "dist/cjs/raw_string.js", "dist/mjs/constants.js", "dist/mjs/types.js", "dist/mjs/numbers.js", + "dist/mjs/next.js", "dist/mjs/index.js", "dist/mjs/uuid.js", "dist/mjs/counter.js", "dist/mjs/low_level.js", "dist/mjs/text.js", - "dist/mjs/proxies.js" + "dist/mjs/proxies.js", + "dist/mjs/raw_string.js" ], "types": "./dist/index.d.ts", "module": "./dist/mjs/index.js", diff --git a/javascript/src/counter.ts b/javascript/src/counter.ts index d94a3034..6b9ad277 100644 --- a/javascript/src/counter.ts +++ b/javascript/src/counter.ts @@ -49,14 +49,14 @@ export class Counter { */ class WriteableCounter extends Counter { context: Automerge - path: string[] + path: Prop[] objectId: ObjID key: Prop constructor( value: number, context: Automerge, - path: string[], + path: Prop[], objectId: ObjID, key: Prop ) { @@ -97,7 +97,7 @@ class WriteableCounter extends Counter { export function getWriteableCounter( value: number, context: Automerge, - path: string[], + path: Prop[], objectId: ObjID, key: Prop ) { diff --git a/javascript/src/index.ts b/javascript/src/index.ts index a5b3a0bb..7d4a68ba 100644 --- a/javascript/src/index.ts +++ b/javascript/src/index.ts @@ -1,998 +1,3 @@ -/** @hidden **/ -export { /** @hidden */ uuid } from "./uuid" - -import { rootProxy, listProxy, mapProxy } from "./proxies" -import { STATE, TRACE, IS_PROXY, OBJECT_ID } from "./constants" - -import { AutomergeValue, Counter } from "./types" -export { - AutomergeValue, - Counter, - Int, - Uint, - Float64, - ScalarValue, -} from "./types" - -import { type API, type Patch } from "@automerge/automerge-wasm" -export { - type Patch, - PutPatch, - DelPatch, - SplicePatch, - IncPatch, - SyncMessage, -} from "@automerge/automerge-wasm" -import { ApiHandler, ChangeToEncode, UseApi } from "./low_level" - -import { - Actor as ActorId, - Prop, - ObjID, - Change, - DecodedChange, - Heads, - Automerge, - MaterializeValue, -} from "@automerge/automerge-wasm" -import { - JsSyncState as SyncState, - SyncMessage, - DecodedSyncMessage, -} from "@automerge/automerge-wasm" - -/** Options passed to {@link change}, and {@link emptyChange} - * @typeParam T - The type of value contained in the document - */ -export type ChangeOptions = { - /** A message which describes the changes */ - message?: string - /** The unix timestamp of the change (purely advisory, not used in conflict resolution) */ - time?: number - /** A callback which will be called to notify the caller of any changes to the document */ - patchCallback?: PatchCallback -} - -/** Options passed to {@link loadIncremental}, {@link applyChanges}, and {@link receiveSyncMessage} - * @typeParam T - The type of value contained in the document - */ -export type ApplyOptions = { patchCallback?: PatchCallback } - -/** - * An automerge document. - * @typeParam T - The type of the value contained in this document - * - * Note that this provides read only access to the fields of the value. To - * modify the value use {@link change} - */ -export type Doc = { readonly [P in keyof T]: T[P] } - -/** - * Function which is called by {@link change} when making changes to a `Doc` - * @typeParam T - The type of value contained in the document - * - * This function may mutate `doc` - */ -export type ChangeFn = (doc: T) => void - -/** - * Callback which is called by various methods in this library to notify the - * user of what changes have been made. - * @param patch - A description of the changes made - * @param before - The document before the change was made - * @param after - The document after the change was made - */ -export type PatchCallback = ( - patches: Array, - before: Doc, - after: Doc -) => void - -/** @hidden **/ -export interface State { - change: DecodedChange - snapshot: T -} - -/** @hidden **/ -export function use(api: API) { - UseApi(api) -} - -import * as wasm from "@automerge/automerge-wasm" -use(wasm) - -/** - * Options to be passed to {@link init} or {@link load} - * @typeParam T - The type of the value the document contains - */ -export type InitOptions = { - /** The actor ID to use for this document, a random one will be generated if `null` is passed */ - actor?: ActorId - freeze?: boolean - /** A callback which will be called with the initial patch once the document has finished loading */ - patchCallback?: PatchCallback -} - -interface InternalState { - handle: Automerge - heads: Heads | undefined - freeze: boolean - patchCallback?: PatchCallback -} - -/** @hidden */ -export function getBackend(doc: Doc): Automerge { - return _state(doc).handle -} - -function _state(doc: Doc, checkroot = true): InternalState { - if (typeof doc !== "object") { - throw new RangeError("must be the document root") - } - const state = Reflect.get(doc, STATE) as InternalState - if ( - state === undefined || - state == null || - (checkroot && _obj(doc) !== "_root") - ) { - throw new RangeError("must be the document root") - } - return state -} - -function _trace(doc: Doc): string | undefined { - return Reflect.get(doc, TRACE) as string -} - -function _obj(doc: Doc): ObjID | null { - if (!(typeof doc === "object") || doc === null) { - return null - } - return Reflect.get(doc, OBJECT_ID) as ObjID -} - -function _is_proxy(doc: Doc): boolean { - return !!Reflect.get(doc, IS_PROXY) -} - -function importOpts(_actor?: ActorId | InitOptions): InitOptions { - if (typeof _actor === "object") { - return _actor - } else { - return { actor: _actor } - } -} - -/** - * Create a new automerge document - * - * @typeParam T - The type of value contained in the document. This will be the - * type that is passed to the change closure in {@link change} - * @param _opts - Either an actorId or an {@link InitOptions} (which may - * contain an actorId). If this is null the document will be initialised with a - * random actor ID - */ -export function init(_opts?: ActorId | InitOptions): Doc { - const opts = importOpts(_opts) - const freeze = !!opts.freeze - const patchCallback = opts.patchCallback - const handle = ApiHandler.create(true, opts.actor) - handle.enablePatches(true) - handle.enableFreeze(!!opts.freeze) - handle.registerDatatype("counter", n => new Counter(n)) - const doc = handle.materialize("/", undefined, { - handle, - heads: undefined, - freeze, - patchCallback, - }) as Doc - return doc -} - -/** - * Make an immutable view of an automerge document as at `heads` - * - * @remarks - * The document returned from this function cannot be passed to {@link change}. - * This is because it shares the same underlying memory as `doc`, but it is - * consequently a very cheap copy. - * - * Note that this function will throw an error if any of the hashes in `heads` - * are not in the document. - * - * @typeParam T - The type of the value contained in the document - * @param doc - The document to create a view of - * @param heads - The hashes of the heads to create a view at - */ -export function view(doc: Doc, heads: Heads): Doc { - const state = _state(doc) - const handle = state.handle - return state.handle.materialize("/", heads, { - ...state, - handle, - heads, - }) as Doc -} - -/** - * Make a full writable copy of an automerge document - * - * @remarks - * Unlike {@link view} this function makes a full copy of the memory backing - * the document and can thus be passed to {@link change}. It also generates a - * new actor ID so that changes made in the new document do not create duplicate - * sequence numbers with respect to the old document. If you need control over - * the actor ID which is generated you can pass the actor ID as the second - * argument - * - * @typeParam T - The type of the value contained in the document - * @param doc - The document to clone - * @param _opts - Either an actor ID to use for the new doc or an {@link InitOptions} - */ -export function clone( - doc: Doc, - _opts?: ActorId | InitOptions -): Doc { - const state = _state(doc) - const heads = state.heads - const opts = importOpts(_opts) - const handle = state.handle.fork(opts.actor, heads) - - // `change` uses the presence of state.heads to determine if we are in a view - // set it to undefined to indicate that this is a full fat document - const { heads: oldHeads, ...stateSansHeads } = state - return handle.applyPatches(doc, { ...stateSansHeads, handle }) -} - -/** Explicity free the memory backing a document. Note that this is note - * necessary in environments which support - * [`FinalizationRegistry`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/FinalizationRegistry) - */ -export function free(doc: Doc) { - return _state(doc).handle.free() -} - -/** - * Create an automerge document from a POJO - * - * @param initialState - The initial state which will be copied into the document - * @typeParam T - The type of the value passed to `from` _and_ the type the resulting document will contain - * @typeParam actor - The actor ID of the resulting document, if this is null a random actor ID will be used - * - * @example - * ``` - * const doc = automerge.from({ - * tasks: [ - * {description: "feed dogs", done: false} - * ] - * }) - * ``` - */ -export function from>( - initialState: T | Doc, - _opts?: ActorId | InitOptions -): Doc { - return change(init(_opts), d => Object.assign(d, initialState)) -} - -/** - * Update the contents of an automerge document - * @typeParam T - The type of the value contained in the document - * @param doc - The document to update - * @param options - Either a message, an {@link ChangeOptions}, or a {@link ChangeFn} - * @param callback - A `ChangeFn` to be used if `options` was a `string` - * - * Note that if the second argument is a function it will be used as the `ChangeFn` regardless of what the third argument is. - * - * @example A simple change - * ``` - * let doc1 = automerge.init() - * doc1 = automerge.change(doc1, d => { - * d.key = "value" - * }) - * assert.equal(doc1.key, "value") - * ``` - * - * @example A change with a message - * - * ``` - * doc1 = automerge.change(doc1, "add another value", d => { - * d.key2 = "value2" - * }) - * ``` - * - * @example A change with a message and a timestamp - * - * ``` - * doc1 = automerge.change(doc1, {message: "add another value", timestamp: 1640995200}, d => { - * d.key2 = "value2" - * }) - * ``` - * - * @example responding to a patch callback - * ``` - * let patchedPath - * let patchCallback = patch => { - * patchedPath = patch.path - * } - * doc1 = automerge.change(doc1, {message, "add another value", timestamp: 1640995200, patchCallback}, d => { - * d.key2 = "value2" - * }) - * assert.equal(patchedPath, ["key2"]) - * ``` - */ -export function change( - doc: Doc, - options: string | ChangeOptions | ChangeFn, - callback?: ChangeFn -): Doc { - if (typeof options === "function") { - return _change(doc, {}, options) - } else if (typeof callback === "function") { - if (typeof options === "string") { - options = { message: options } - } - return _change(doc, options, callback) - } else { - throw RangeError("Invalid args for change") - } -} - -function progressDocument( - doc: Doc, - heads: Heads | null, - callback?: PatchCallback -): Doc { - if (heads == null) { - return doc - } - const state = _state(doc) - const nextState = { ...state, heads: undefined } - const nextDoc = state.handle.applyPatches(doc, nextState, callback) - state.heads = heads - return nextDoc -} - -function _change( - doc: Doc, - options: ChangeOptions, - callback: ChangeFn -): Doc { - if (typeof callback !== "function") { - throw new RangeError("invalid change function") - } - - const state = _state(doc) - - if (doc === undefined || state === undefined) { - throw new RangeError("must be the document root") - } - if (state.heads) { - throw new RangeError( - "Attempting to change an outdated document. Use Automerge.clone() if you wish to make a writable copy." - ) - } - if (_is_proxy(doc)) { - throw new RangeError("Calls to Automerge.change cannot be nested") - } - const heads = state.handle.getHeads() - try { - state.heads = heads - const root: T = rootProxy(state.handle) - callback(root) - if (state.handle.pendingOps() === 0) { - state.heads = undefined - return doc - } else { - state.handle.commit(options.message, options.time) - return progressDocument( - doc, - heads, - options.patchCallback || state.patchCallback - ) - } - } catch (e) { - //console.log("ERROR: ",e) - state.heads = undefined - state.handle.rollback() - throw e - } -} - -/** - * Make a change to a document which does not modify the document - * - * @param doc - The doc to add the empty change to - * @param options - Either a message or a {@link ChangeOptions} for the new change - * - * Why would you want to do this? One reason might be that you have merged - * changes from some other peers and you want to generate a change which - * depends on those merged changes so that you can sign the new change with all - * of the merged changes as part of the new change. - */ -export function emptyChange( - doc: Doc, - options: string | ChangeOptions | void -) { - if (options === undefined) { - options = {} - } - if (typeof options === "string") { - options = { message: options } - } - - const state = _state(doc) - - if (state.heads) { - throw new RangeError( - "Attempting to change an outdated document. Use Automerge.clone() if you wish to make a writable copy." - ) - } - if (_is_proxy(doc)) { - throw new RangeError("Calls to Automerge.change cannot be nested") - } - - const heads = state.handle.getHeads() - state.handle.emptyChange(options.message, options.time) - return progressDocument(doc, heads) -} - -/** - * Load an automerge document from a compressed document produce by {@link save} - * - * @typeParam T - The type of the value which is contained in the document. - * Note that no validation is done to make sure this type is in - * fact the type of the contained value so be a bit careful - * @param data - The compressed document - * @param _opts - Either an actor ID or some {@link InitOptions}, if the actor - * ID is null a random actor ID will be created - * - * Note that `load` will throw an error if passed incomplete content (for - * example if you are receiving content over the network and don't know if you - * have the complete document yet). If you need to handle incomplete content use - * {@link init} followed by {@link loadIncremental}. - */ -export function load( - data: Uint8Array, - _opts?: ActorId | InitOptions -): Doc { - const opts = importOpts(_opts) - const actor = opts.actor - const patchCallback = opts.patchCallback - const handle = ApiHandler.load(data, true, actor) - handle.enablePatches(true) - handle.enableFreeze(!!opts.freeze) - handle.registerDatatype("counter", n => new Counter(n)) - const doc = handle.materialize("/", undefined, { - handle, - heads: undefined, - patchCallback, - }) as Doc - return doc -} - -/** - * Load changes produced by {@link saveIncremental}, or partial changes - * - * @typeParam T - The type of the value which is contained in the document. - * Note that no validation is done to make sure this type is in - * fact the type of the contained value so be a bit careful - * @param data - The compressedchanges - * @param opts - an {@link ApplyOptions} - * - * This function is useful when staying up to date with a connected peer. - * Perhaps the other end sent you a full compresed document which you loaded - * with {@link load} and they're sending you the result of - * {@link getLastLocalChange} every time they make a change. - * - * Note that this function will succesfully load the results of {@link save} as - * well as {@link getLastLocalChange} or any other incremental change. - */ -export function loadIncremental( - doc: Doc, - data: Uint8Array, - opts?: ApplyOptions -): Doc { - if (!opts) { - opts = {} - } - const state = _state(doc) - if (state.heads) { - throw new RangeError( - "Attempting to change an out of date document - set at: " + _trace(doc) - ) - } - if (_is_proxy(doc)) { - throw new RangeError("Calls to Automerge.change cannot be nested") - } - const heads = state.handle.getHeads() - state.handle.loadIncremental(data) - return progressDocument(doc, heads, opts.patchCallback || state.patchCallback) -} - -/** - * Export the contents of a document to a compressed format - * - * @param doc - The doc to save - * - * The returned bytes can be passed to {@link load} or {@link loadIncremental} - */ -export function save(doc: Doc): Uint8Array { - return _state(doc).handle.save() -} - -/** - * Merge `local` into `remote` - * @typeParam T - The type of values contained in each document - * @param local - The document to merge changes into - * @param remote - The document to merge changes from - * - * @returns - The merged document - * - * Often when you are merging documents you will also need to clone them. Both - * arguments to `merge` are frozen after the call so you can no longer call - * mutating methods (such as {@link change}) on them. The symtom of this will be - * an error which says "Attempting to change an out of date document". To - * overcome this call {@link clone} on the argument before passing it to {@link - * merge}. - */ -export function merge(local: Doc, remote: Doc): Doc { - const localState = _state(local) - - if (localState.heads) { - throw new RangeError( - "Attempting to change an out of date document - set at: " + _trace(local) - ) - } - const heads = localState.handle.getHeads() - const remoteState = _state(remote) - const changes = localState.handle.getChangesAdded(remoteState.handle) - localState.handle.applyChanges(changes) - return progressDocument(local, heads, localState.patchCallback) -} - -/** - * Get the actor ID associated with the document - */ -export function getActorId(doc: Doc): ActorId { - const state = _state(doc) - return state.handle.getActorId() -} - -/** - * The type of conflicts for particular key or index - * - * Maps and sequences in automerge can contain conflicting values for a - * particular key or index. In this case {@link getConflicts} can be used to - * obtain a `Conflicts` representing the multiple values present for the property - * - * A `Conflicts` is a map from a unique (per property or index) key to one of - * the possible conflicting values for the given property. - */ -type Conflicts = { [key: string]: AutomergeValue } - -function conflictAt( - context: Automerge, - objectId: ObjID, - prop: Prop -): Conflicts | undefined { - const values = context.getAll(objectId, prop) - if (values.length <= 1) { - return - } - const result: Conflicts = {} - for (const fullVal of values) { - switch (fullVal[0]) { - case "map": - result[fullVal[1]] = mapProxy(context, fullVal[1], [prop], true) - break - case "list": - result[fullVal[1]] = listProxy(context, fullVal[1], [prop], true) - break - case "text": - result[fullVal[1]] = context.text(fullVal[1]) - break - //case "table": - //case "cursor": - case "str": - case "uint": - case "int": - case "f64": - case "boolean": - case "bytes": - case "null": - result[fullVal[2]] = fullVal[1] - break - case "counter": - result[fullVal[2]] = new Counter(fullVal[1]) - break - case "timestamp": - result[fullVal[2]] = new Date(fullVal[1]) - break - default: - throw RangeError(`datatype ${fullVal[0]} unimplemented`) - } - } - return result -} - -/** - * Get the conflicts associated with a property - * - * The values of properties in a map in automerge can be conflicted if there - * are concurrent "put" operations to the same key. Automerge chooses one value - * arbitrarily (but deterministically, any two nodes who have the same set of - * changes will choose the same value) from the set of conflicting values to - * present as the value of the key. - * - * Sometimes you may want to examine these conflicts, in this case you can use - * {@link getConflicts} to get the conflicts for the key. - * - * @example - * ``` - * import * as automerge from "@automerge/automerge" - * - * type Profile = { - * pets: Array<{name: string, type: string}> - * } - * - * let doc1 = automerge.init("aaaa") - * doc1 = automerge.change(doc1, d => { - * d.pets = [{name: "Lassie", type: "dog"}] - * }) - * let doc2 = automerge.init("bbbb") - * doc2 = automerge.merge(doc2, automerge.clone(doc1)) - * - * doc2 = automerge.change(doc2, d => { - * d.pets[0].name = "Beethoven" - * }) - * - * doc1 = automerge.change(doc1, d => { - * d.pets[0].name = "Babe" - * }) - * - * const doc3 = automerge.merge(doc1, doc2) - * - * // Note that here we pass `doc3.pets`, not `doc3` - * let conflicts = automerge.getConflicts(doc3.pets[0], "name") - * - * // The two conflicting values are the keys of the conflicts object - * assert.deepEqual(Object.values(conflicts), ["Babe", Beethoven"]) - * ``` - */ -export function getConflicts( - doc: Doc, - prop: Prop -): Conflicts | undefined { - const state = _state(doc, false) - const objectId = _obj(doc) - if (objectId != null) { - return conflictAt(state.handle, objectId, prop) - } else { - return undefined - } -} - -/** - * Get the binary representation of the last change which was made to this doc - * - * This is most useful when staying in sync with other peers, every time you - * make a change locally via {@link change} you immediately call {@link - * getLastLocalChange} and send the result over the network to other peers. - */ -export function getLastLocalChange(doc: Doc): Change | undefined { - const state = _state(doc) - return state.handle.getLastLocalChange() || undefined -} - -/** - * Return the object ID of an arbitrary javascript value - * - * This is useful to determine if something is actually an automerge document, - * if `doc` is not an automerge document this will return null. - */ -export function getObjectId(doc: Doc, prop?: Prop): ObjID | null { - if (prop) { - const state = _state(doc, false) - const objectId = _obj(doc) - if (!state || !objectId) { - return null - } - return state.handle.get(objectId, prop) as ObjID - } else { - return _obj(doc) - } -} - -/** - * Get the changes which are in `newState` but not in `oldState`. The returned - * changes can be loaded in `oldState` via {@link applyChanges}. - * - * Note that this will crash if there are changes in `oldState` which are not in `newState`. - */ -export function getChanges(oldState: Doc, newState: Doc): Change[] { - const n = _state(newState) - return n.handle.getChanges(getHeads(oldState)) -} - -/** - * Get all the changes in a document - * - * This is different to {@link save} because the output is an array of changes - * which can be individually applied via {@link applyChanges}` - * - */ -export function getAllChanges(doc: Doc): Change[] { - const state = _state(doc) - return state.handle.getChanges([]) -} - -/** - * Apply changes received from another document - * - * `doc` will be updated to reflect the `changes`. If there are changes which - * we do not have dependencies for yet those will be stored in the document and - * applied when the depended on changes arrive. - * - * You can use the {@link ApplyOptions} to pass a patchcallback which will be - * informed of any changes which occur as a result of applying the changes - * - */ -export function applyChanges( - doc: Doc, - changes: Change[], - opts?: ApplyOptions -): [Doc] { - const state = _state(doc) - if (!opts) { - opts = {} - } - if (state.heads) { - throw new RangeError( - "Attempting to change an outdated document. Use Automerge.clone() if you wish to make a writable copy." - ) - } - if (_is_proxy(doc)) { - throw new RangeError("Calls to Automerge.change cannot be nested") - } - const heads = state.handle.getHeads() - state.handle.applyChanges(changes) - state.heads = heads - return [ - progressDocument(doc, heads, opts.patchCallback || state.patchCallback), - ] -} - -/** @hidden */ -export function getHistory(doc: Doc): State[] { - const history = getAllChanges(doc) - return history.map((change, index) => ({ - get change() { - return decodeChange(change) - }, - get snapshot() { - const [state] = applyChanges(init(), history.slice(0, index + 1)) - return state - }, - })) -} - -/** @hidden */ -// FIXME : no tests -// FIXME can we just use deep equals now? -export function equals(val1: unknown, val2: unknown): boolean { - if (!isObject(val1) || !isObject(val2)) return val1 === val2 - const keys1 = Object.keys(val1).sort(), - keys2 = Object.keys(val2).sort() - if (keys1.length !== keys2.length) return false - for (let i = 0; i < keys1.length; i++) { - if (keys1[i] !== keys2[i]) return false - if (!equals(val1[keys1[i]], val2[keys2[i]])) return false - } - return true -} - -/** - * encode a {@link SyncState} into binary to send over the network - * - * @group sync - * */ -export function encodeSyncState(state: SyncState): Uint8Array { - const sync = ApiHandler.importSyncState(state) - const result = ApiHandler.encodeSyncState(sync) - sync.free() - return result -} - -/** - * Decode some binary data into a {@link SyncState} - * - * @group sync - */ -export function decodeSyncState(state: Uint8Array): SyncState { - const sync = ApiHandler.decodeSyncState(state) - const result = ApiHandler.exportSyncState(sync) - sync.free() - return result -} - -/** - * Generate a sync message to send to the peer represented by `inState` - * @param doc - The doc to generate messages about - * @param inState - The {@link SyncState} representing the peer we are talking to - * - * @group sync - * - * @returns An array of `[newSyncState, syncMessage | null]` where - * `newSyncState` should replace `inState` and `syncMessage` should be sent to - * the peer if it is not null. If `syncMessage` is null then we are up to date. - */ -export function generateSyncMessage( - doc: Doc, - inState: SyncState -): [SyncState, SyncMessage | null] { - const state = _state(doc) - const syncState = ApiHandler.importSyncState(inState) - const message = state.handle.generateSyncMessage(syncState) - const outState = ApiHandler.exportSyncState(syncState) - return [outState, message] -} - -/** - * Update a document and our sync state on receiving a sync message - * - * @group sync - * - * @param doc - The doc the sync message is about - * @param inState - The {@link SyncState} for the peer we are communicating with - * @param message - The message which was received - * @param opts - Any {@link ApplyOption}s, used for passing a - * {@link PatchCallback} which will be informed of any changes - * in `doc` which occur because of the received sync message. - * - * @returns An array of `[newDoc, newSyncState, syncMessage | null]` where - * `newDoc` is the updated state of `doc`, `newSyncState` should replace - * `inState` and `syncMessage` should be sent to the peer if it is not null. If - * `syncMessage` is null then we are up to date. - */ -export function receiveSyncMessage( - doc: Doc, - inState: SyncState, - message: SyncMessage, - opts?: ApplyOptions -): [Doc, SyncState, null] { - const syncState = ApiHandler.importSyncState(inState) - if (!opts) { - opts = {} - } - const state = _state(doc) - if (state.heads) { - throw new RangeError( - "Attempting to change an outdated document. Use Automerge.clone() if you wish to make a writable copy." - ) - } - if (_is_proxy(doc)) { - throw new RangeError("Calls to Automerge.change cannot be nested") - } - const heads = state.handle.getHeads() - state.handle.receiveSyncMessage(syncState, message) - const outSyncState = ApiHandler.exportSyncState(syncState) - return [ - progressDocument(doc, heads, opts.patchCallback || state.patchCallback), - outSyncState, - null, - ] -} - -/** - * Create a new, blank {@link SyncState} - * - * When communicating with a peer for the first time use this to generate a new - * {@link SyncState} for them - * - * @group sync - */ -export function initSyncState(): SyncState { - return ApiHandler.exportSyncState(ApiHandler.initSyncState()) -} - -/** @hidden */ -export function encodeChange(change: ChangeToEncode): Change { - return ApiHandler.encodeChange(change) -} - -/** @hidden */ -export function decodeChange(data: Change): DecodedChange { - return ApiHandler.decodeChange(data) -} - -/** @hidden */ -export function encodeSyncMessage(message: DecodedSyncMessage): SyncMessage { - return ApiHandler.encodeSyncMessage(message) -} - -/** @hidden */ -export function decodeSyncMessage(message: SyncMessage): DecodedSyncMessage { - return ApiHandler.decodeSyncMessage(message) -} - -/** - * Get any changes in `doc` which are not dependencies of `heads` - */ -export function getMissingDeps(doc: Doc, heads: Heads): Heads { - const state = _state(doc) - return state.handle.getMissingDeps(heads) -} - -export function splice( - doc: Doc, - prop: Prop, - index: number, - del: number, - newText?: string -) { - if (!_is_proxy(doc)) { - throw new RangeError("object cannot be modified outside of a change block") - } - const state = _state(doc, false) - const objectId = _obj(doc) - if (!objectId) { - throw new RangeError("invalid object for splice") - } - const value = `${objectId}/${prop}` - try { - return state.handle.splice(value, index, del, newText) - } catch (e) { - throw new RangeError(`Cannot splice: ${e}`) - } -} - -/** - * Get the hashes of the heads of this document - */ -export function getHeads(doc: Doc): Heads { - const state = _state(doc) - return state.heads || state.handle.getHeads() -} - -/** @hidden */ -export function dump(doc: Doc) { - const state = _state(doc) - state.handle.dump() -} - -/** @hidden */ -export function toJS(doc: Doc): T { - const state = _state(doc) - const enabled = state.handle.enableFreeze(false) - const result = state.handle.materialize() - state.handle.enableFreeze(enabled) - return result as T -} - -export function isAutomerge(doc: unknown): boolean { - if (typeof doc == "object" && doc !== null) { - return getObjectId(doc) === "_root" && !!Reflect.get(doc, STATE) - } else { - return false - } -} - -function isObject(obj: unknown): obj is Record { - return typeof obj === "object" && obj !== null -} - -export type { - API, - SyncState, - ActorId, - Conflicts, - Prop, - Change, - ObjID, - DecodedChange, - DecodedSyncMessage, - Heads, - MaterializeValue, -} +export * from "./stable" +import * as unstable from "./unstable" +export { unstable } diff --git a/javascript/src/internal_state.ts b/javascript/src/internal_state.ts new file mode 100644 index 00000000..92ab648e --- /dev/null +++ b/javascript/src/internal_state.ts @@ -0,0 +1,43 @@ +import { ObjID, Heads, Automerge } from "@automerge/automerge-wasm" + +import { STATE, OBJECT_ID, TRACE, IS_PROXY } from "./constants" + +import { type Doc, PatchCallback } from "./types" + +export interface InternalState { + handle: Automerge + heads: Heads | undefined + freeze: boolean + patchCallback?: PatchCallback + textV2: boolean +} + +export function _state(doc: Doc, checkroot = true): InternalState { + if (typeof doc !== "object") { + throw new RangeError("must be the document root") + } + const state = Reflect.get(doc, STATE) as InternalState + if ( + state === undefined || + state == null || + (checkroot && _obj(doc) !== "_root") + ) { + throw new RangeError("must be the document root") + } + return state +} + +export function _trace(doc: Doc): string | undefined { + return Reflect.get(doc, TRACE) as string +} + +export function _obj(doc: Doc): ObjID | null { + if (!(typeof doc === "object") || doc === null) { + return null + } + return Reflect.get(doc, OBJECT_ID) as ObjID +} + +export function _is_proxy(doc: Doc): boolean { + return !!Reflect.get(doc, IS_PROXY) +} diff --git a/javascript/src/proxies.ts b/javascript/src/proxies.ts index 523c4547..3fb3a825 100644 --- a/javascript/src/proxies.ts +++ b/javascript/src/proxies.ts @@ -1,6 +1,13 @@ +import { Text } from "./text" import { Automerge, Heads, ObjID } from "@automerge/automerge-wasm" import { Prop } from "@automerge/automerge-wasm" -import { AutomergeValue, ScalarValue, MapValue, ListValue } from "./types" +import { + AutomergeValue, + ScalarValue, + MapValue, + ListValue, + TextValue, +} from "./types" import { Counter, getWriteableCounter } from "./counter" import { STATE, @@ -12,6 +19,19 @@ import { UINT, F64, } from "./constants" +import { RawString } from "./raw_string" + +type Target = { + context: Automerge + objectId: ObjID + path: Array + readonly: boolean + heads?: Array + cache: {} + trace?: any + frozen: boolean + textV2: boolean +} function parseListIndex(key) { if (typeof key === "string" && /^[0-9]+$/.test(key)) key = parseInt(key, 10) @@ -24,8 +44,8 @@ function parseListIndex(key) { return key } -function valueAt(target, prop: Prop): AutomergeValue | undefined { - const { context, objectId, path, readonly, heads } = target +function valueAt(target: Target, prop: Prop): AutomergeValue | undefined { + const { context, objectId, path, readonly, heads, textV2 } = target const value = context.getWithType(objectId, prop, heads) if (value === null) { return @@ -36,11 +56,35 @@ function valueAt(target, prop: Prop): AutomergeValue | undefined { case undefined: return case "map": - return mapProxy(context, val, [...path, prop], readonly, heads) + return mapProxy( + context, + val as ObjID, + textV2, + [...path, prop], + readonly, + heads + ) case "list": - return listProxy(context, val, [...path, prop], readonly, heads) + return listProxy( + context, + val as ObjID, + textV2, + [...path, prop], + readonly, + heads + ) case "text": - return context.text(val, heads) + if (textV2) { + return context.text(val as ObjID, heads) + } else { + return textProxy( + context, + val as ObjID, + [...path, prop], + readonly, + heads + ) + } case "str": return val case "uint": @@ -59,9 +103,9 @@ function valueAt(target, prop: Prop): AutomergeValue | undefined { return val case "counter": { if (readonly) { - return new Counter(val) + return new Counter(val as number) } else { - return getWriteableCounter(val, context, path, objectId, prop) + return getWriteableCounter(val as number, context, path, objectId, prop) } } default: @@ -69,7 +113,7 @@ function valueAt(target, prop: Prop): AutomergeValue | undefined { } } -function import_value(value) { +function import_value(value: any, textV2: boolean) { switch (typeof value) { case "object": if (value == null) { @@ -84,6 +128,10 @@ function import_value(value) { return [value.value, "counter"] } else if (value instanceof Date) { return [value.getTime(), "timestamp"] + } else if (value instanceof RawString) { + return [value.val, "str"] + } else if (value instanceof Text) { + return [value, "text"] } else if (value instanceof Uint8Array) { return [value, "bytes"] } else if (value instanceof Array) { @@ -97,7 +145,6 @@ function import_value(value) { } else { throw new RangeError(`Cannot assign unknown object: ${value}`) } - break case "boolean": return [value, "boolean"] case "number": @@ -106,17 +153,19 @@ function import_value(value) { } else { return [value, "f64"] } - break case "string": - return [value, "text"] - break + if (textV2) { + return [value, "text"] + } else { + return [value, "str"] + } default: throw new RangeError(`Unsupported type of value: ${typeof value}`) } } const MapHandler = { - get(target, key): AutomergeValue { + get(target: Target, key): AutomergeValue | { handle: Automerge } { const { context, objectId, cache } = target if (key === Symbol.toStringTag) { return target[Symbol.toStringTag] @@ -131,8 +180,8 @@ const MapHandler = { return cache[key] }, - set(target, key, val) { - const { context, objectId, path, readonly, frozen } = target + set(target: Target, key, val) { + const { context, objectId, path, readonly, frozen, textV2 } = target target.cache = {} // reset cache on set if (val && val[OBJECT_ID]) { throw new RangeError( @@ -143,7 +192,7 @@ const MapHandler = { target.trace = val return true } - const [value, datatype] = import_value(val) + const [value, datatype] = import_value(val, textV2) if (frozen) { throw new RangeError("Attempting to use an outdated Automerge document") } @@ -153,19 +202,39 @@ const MapHandler = { switch (datatype) { case "list": { const list = context.putObject(objectId, key, []) - const proxyList = listProxy(context, list, [...path, key], readonly) + const proxyList = listProxy( + context, + list, + textV2, + [...path, key], + readonly + ) for (let i = 0; i < value.length; i++) { proxyList[i] = value[i] } break } case "text": { - context.putObject(objectId, key, value, "text") + if (textV2) { + context.putObject(objectId, key, value) + } else { + const text = context.putObject(objectId, key, "") + const proxyText = textProxy(context, text, [...path, key], readonly) + for (let i = 0; i < value.length; i++) { + proxyText[i] = value.get(i) + } + } break } case "map": { const map = context.putObject(objectId, key, {}) - const proxyMap = mapProxy(context, map, [...path, key], readonly) + const proxyMap = mapProxy( + context, + map, + textV2, + [...path, key], + readonly + ) for (const key in value) { proxyMap[key] = value[key] } @@ -177,7 +246,7 @@ const MapHandler = { return true }, - deleteProperty(target, key) { + deleteProperty(target: Target, key) { const { context, objectId, readonly } = target target.cache = {} // reset cache on delete if (readonly) { @@ -187,12 +256,12 @@ const MapHandler = { return true }, - has(target, key) { + has(target: Target, key) { const value = this.get(target, key) return value !== undefined }, - getOwnPropertyDescriptor(target, key) { + getOwnPropertyDescriptor(target: Target, key) { // const { context, objectId } = target const value = this.get(target, key) if (typeof value !== "undefined") { @@ -204,7 +273,7 @@ const MapHandler = { } }, - ownKeys(target) { + ownKeys(target: Target) { const { context, objectId, heads } = target // FIXME - this is a tmp workaround until fix the dupe key bug in keys() const keys = context.keys(objectId, heads) @@ -213,7 +282,7 @@ const MapHandler = { } const ListHandler = { - get(target, index) { + get(target: Target, index) { const { context, objectId, heads } = target index = parseListIndex(index) if (index === Symbol.hasInstance) { @@ -236,8 +305,8 @@ const ListHandler = { } }, - set(target, index, val) { - const { context, objectId, path, readonly, frozen } = target + set(target: Target, index, val) { + const { context, objectId, path, readonly, frozen, textV2 } = target index = parseListIndex(index) if (val && val[OBJECT_ID]) { throw new RangeError( @@ -251,7 +320,7 @@ const ListHandler = { if (typeof index == "string") { throw new RangeError("list index must be a number") } - const [value, datatype] = import_value(val) + const [value, datatype] = import_value(val, textV2) if (frozen) { throw new RangeError("Attempting to use an outdated Automerge document") } @@ -266,15 +335,32 @@ const ListHandler = { } else { list = context.putObject(objectId, index, []) } - const proxyList = listProxy(context, list, [...path, index], readonly) + const proxyList = listProxy( + context, + list, + textV2, + [...path, index], + readonly + ) proxyList.splice(0, 0, ...value) break } case "text": { - if (index >= context.length(objectId)) { - context.insertObject(objectId, index, value, "text") + if (textV2) { + if (index >= context.length(objectId)) { + context.insertObject(objectId, index, value) + } else { + context.putObject(objectId, index, value) + } } else { - context.putObject(objectId, index, value, "text") + let text + if (index >= context.length(objectId)) { + text = context.insertObject(objectId, index, "") + } else { + text = context.putObject(objectId, index, "") + } + const proxyText = textProxy(context, text, [...path, index], readonly) + proxyText.splice(0, 0, ...value) } break } @@ -285,7 +371,13 @@ const ListHandler = { } else { map = context.putObject(objectId, index, {}) } - const proxyMap = mapProxy(context, map, [...path, index], readonly) + const proxyMap = mapProxy( + context, + map, + textV2, + [...path, index], + readonly + ) for (const key in value) { proxyMap[key] = value[key] } @@ -301,10 +393,11 @@ const ListHandler = { return true }, - deleteProperty(target, index) { + deleteProperty(target: Target, index) { const { context, objectId } = target index = parseListIndex(index) - if (context.get(objectId, index)[0] == "counter") { + const elem = context.get(objectId, index) + if (elem != null && elem[0] == "counter") { throw new TypeError( "Unsupported operation: deleting a counter from a list" ) @@ -313,7 +406,7 @@ const ListHandler = { return true }, - has(target, index) { + has(target: Target, index) { const { context, objectId, heads } = target index = parseListIndex(index) if (typeof index === "number") { @@ -322,7 +415,7 @@ const ListHandler = { return index === "length" }, - getOwnPropertyDescriptor(target, index) { + getOwnPropertyDescriptor(target: Target, index) { const { context, objectId, heads } = target if (index === "length") @@ -350,54 +443,114 @@ const ListHandler = { }, } +const TextHandler = Object.assign({}, ListHandler, { + get(target: Target, index: any) { + const { context, objectId, heads } = target + index = parseListIndex(index) + if (index === Symbol.hasInstance) { + return (instance: any) => { + return Array.isArray(instance) + } + } + if (index === Symbol.toStringTag) { + return target[Symbol.toStringTag] + } + if (index === OBJECT_ID) return objectId + if (index === IS_PROXY) return true + if (index === TRACE) return target.trace + if (index === STATE) return { handle: context } + if (index === "length") return context.length(objectId, heads) + if (typeof index === "number") { + return valueAt(target, index) + } else { + return textMethods(target)[index] || listMethods(target)[index] + } + }, + getPrototypeOf(/*target*/) { + return Object.getPrototypeOf(new Text()) + }, +}) + export function mapProxy( context: Automerge, objectId: ObjID, + textV2: boolean, path?: Prop[], readonly?: boolean, heads?: Heads ): MapValue { - return new Proxy( - { - context, - objectId, - path, - readonly: !!readonly, - frozen: false, - heads, - cache: {}, - }, - MapHandler - ) + const target: Target = { + context, + objectId, + path: path || [], + readonly: !!readonly, + frozen: false, + heads, + cache: {}, + textV2, + } + const proxied = {} + Object.assign(proxied, target) + let result = new Proxy(proxied, MapHandler) + // conversion through unknown is necessary because the types are so different + return result as unknown as MapValue } export function listProxy( context: Automerge, objectId: ObjID, + textV2: boolean, path?: Prop[], readonly?: boolean, heads?: Heads ): ListValue { - const target = [] - Object.assign(target, { + const target: Target = { context, objectId, - path, + path: path || [], readonly: !!readonly, frozen: false, heads, cache: {}, - }) - return new Proxy(target, ListHandler) + textV2, + } + const proxied = [] + Object.assign(proxied, target) + // @ts-ignore + return new Proxy(proxied, ListHandler) as unknown as ListValue } -export function rootProxy(context: Automerge, readonly?: boolean): T { +export function textProxy( + context: Automerge, + objectId: ObjID, + path?: Prop[], + readonly?: boolean, + heads?: Heads +): TextValue { + const target: Target = { + context, + objectId, + path: path || [], + readonly: !!readonly, + frozen: false, + heads, + cache: {}, + textV2: false, + } + return new Proxy(target, TextHandler) as unknown as TextValue +} + +export function rootProxy( + context: Automerge, + textV2: boolean, + readonly?: boolean +): T { /* eslint-disable-next-line */ - return mapProxy(context, "_root", [], !!readonly) + return mapProxy(context, "_root", textV2, [], !!readonly) } -function listMethods(target) { - const { context, objectId, path, readonly, frozen, heads } = target +function listMethods(target: Target) { + const { context, objectId, path, readonly, frozen, heads, textV2 } = target const methods = { deleteAt(index, numDelete) { if (typeof numDelete === "number") { @@ -409,13 +562,13 @@ function listMethods(target) { }, fill(val: ScalarValue, start: number, end: number) { - const [value, datatype] = import_value(val) + const [value, datatype] = import_value(val, textV2) const length = context.length(objectId) start = parseListIndex(start || 0) end = parseListIndex(end || length) for (let i = start; i < Math.min(end, length); i++) { if (datatype === "text" || datatype === "list" || datatype === "map") { - context.putObject(objectId, i, value, datatype) + context.putObject(objectId, i, value) } else { context.put(objectId, i, value, datatype) } @@ -427,7 +580,7 @@ function listMethods(target) { const length = context.length(objectId) for (let i = start; i < length; i++) { const value = context.getWithType(objectId, i, heads) - if ((value && value[1] === o[OBJECT_ID]) || value[1] === o) { + if (value && (value[1] === o[OBJECT_ID] || value[1] === o)) { return i } } @@ -488,7 +641,7 @@ function listMethods(target) { } context.delete(objectId, index) } - const values = vals.map(val => import_value(val)) + const values = vals.map(val => import_value(val, textV2)) for (const [value, datatype] of values) { switch (datatype) { case "list": { @@ -496,6 +649,7 @@ function listMethods(target) { const proxyList = listProxy( context, list, + textV2, [...path, index], readonly ) @@ -503,12 +657,29 @@ function listMethods(target) { break } case "text": { - context.insertObject(objectId, index, value) + if (textV2) { + context.insertObject(objectId, index, value) + } else { + const text = context.insertObject(objectId, index, "") + const proxyText = textProxy( + context, + text, + [...path, index], + readonly + ) + proxyText.splice(0, 0, ...value) + } break } case "map": { const map = context.insertObject(objectId, index, {}) - const proxyMap = mapProxy(context, map, [...path, index], readonly) + const proxyMap = mapProxy( + context, + map, + textV2, + [...path, index], + readonly + ) for (const key in value) { proxyMap[key] = value[key] } @@ -689,3 +860,47 @@ function listMethods(target) { } return methods } + +function textMethods(target: Target) { + const { context, objectId, heads } = target + const methods = { + set(index: number, value) { + return (this[index] = value) + }, + get(index: number): AutomergeValue { + return this[index] + }, + toString(): string { + return context.text(objectId, heads).replace(//g, "") + }, + toSpans(): AutomergeValue[] { + const spans: AutomergeValue[] = [] + let chars = "" + const length = context.length(objectId) + for (let i = 0; i < length; i++) { + const value = this[i] + if (typeof value === "string") { + chars += value + } else { + if (chars.length > 0) { + spans.push(chars) + chars = "" + } + spans.push(value) + } + } + if (chars.length > 0) { + spans.push(chars) + } + return spans + }, + toJSON(): string { + return this.toString() + }, + indexOf(o, start = 0) { + const text = context.text(objectId) + return text.indexOf(o, start) + }, + } + return methods +} diff --git a/javascript/src/raw_string.ts b/javascript/src/raw_string.ts new file mode 100644 index 00000000..7fc02084 --- /dev/null +++ b/javascript/src/raw_string.ts @@ -0,0 +1,6 @@ +export class RawString { + val: string + constructor(val: string) { + this.val = val + } +} diff --git a/javascript/src/stable.ts b/javascript/src/stable.ts new file mode 100644 index 00000000..c52d0a4c --- /dev/null +++ b/javascript/src/stable.ts @@ -0,0 +1,955 @@ +/** @hidden **/ +export { /** @hidden */ uuid } from "./uuid" + +import { rootProxy, listProxy, mapProxy, textProxy } from "./proxies" +import { STATE } from "./constants" + +import { AutomergeValue, Counter, Doc, PatchCallback } from "./types" +export { + AutomergeValue, + Counter, + Doc, + Int, + Uint, + Float64, + Patch, + PatchCallback, + ScalarValue, + Text, +} from "./types" + +import { Text } from "./text" + +import { type API } from "@automerge/automerge-wasm" +export { + PutPatch, + DelPatch, + SplicePatch, + IncPatch, + SyncMessage, +} from "@automerge/automerge-wasm" +import { ApiHandler, ChangeToEncode, UseApi } from "./low_level" + +import { + Actor as ActorId, + Prop, + ObjID, + Change, + DecodedChange, + Heads, + Automerge, + MaterializeValue, +} from "@automerge/automerge-wasm" +import { + JsSyncState as SyncState, + SyncMessage, + DecodedSyncMessage, +} from "@automerge/automerge-wasm" + +import { RawString } from "./raw_string" + +import { _state, _is_proxy, _trace, _obj } from "./internal_state" + +/** Options passed to {@link change}, and {@link emptyChange} + * @typeParam T - The type of value contained in the document + */ +export type ChangeOptions = { + /** A message which describes the changes */ + message?: string + /** The unix timestamp of the change (purely advisory, not used in conflict resolution) */ + time?: number + /** A callback which will be called to notify the caller of any changes to the document */ + patchCallback?: PatchCallback +} + +/** Options passed to {@link loadIncremental}, {@link applyChanges}, and {@link receiveSyncMessage} + * @typeParam T - The type of value contained in the document + */ +export type ApplyOptions = { patchCallback?: PatchCallback } + +/** + * Function which is called by {@link change} when making changes to a `Doc` + * @typeParam T - The type of value contained in the document + * + * This function may mutate `doc` + */ +export type ChangeFn = (doc: T) => void + +/** @hidden **/ +export interface State { + change: DecodedChange + snapshot: T +} + +/** @hidden **/ +export function use(api: API) { + UseApi(api) +} + +import * as wasm from "@automerge/automerge-wasm" +use(wasm) + +/** + * Options to be passed to {@link init} or {@link load} + * @typeParam T - The type of the value the document contains + */ +export type InitOptions = { + /** The actor ID to use for this document, a random one will be generated if `null` is passed */ + actor?: ActorId + freeze?: boolean + /** A callback which will be called with the initial patch once the document has finished loading */ + patchCallback?: PatchCallback + /** @hidden */ + enableTextV2?: boolean +} + +/** @hidden */ +export function getBackend(doc: Doc): Automerge { + return _state(doc).handle +} + +function importOpts(_actor?: ActorId | InitOptions): InitOptions { + if (typeof _actor === "object") { + return _actor + } else { + return { actor: _actor } + } +} + +/** + * Create a new automerge document + * + * @typeParam T - The type of value contained in the document. This will be the + * type that is passed to the change closure in {@link change} + * @param _opts - Either an actorId or an {@link InitOptions} (which may + * contain an actorId). If this is null the document will be initialised with a + * random actor ID + */ +export function init(_opts?: ActorId | InitOptions): Doc { + const opts = importOpts(_opts) + const freeze = !!opts.freeze + const patchCallback = opts.patchCallback + const handle = ApiHandler.create(opts.enableTextV2 || false, opts.actor) + handle.enablePatches(true) + handle.enableFreeze(!!opts.freeze) + handle.registerDatatype("counter", (n: any) => new Counter(n)) + let textV2 = opts.enableTextV2 || false + if (textV2) { + handle.registerDatatype("str", (n: string) => new RawString(n)) + } else { + handle.registerDatatype("text", (n: any) => new Text(n)) + } + const doc = handle.materialize("/", undefined, { + handle, + heads: undefined, + freeze, + patchCallback, + textV2, + }) as Doc + return doc +} + +/** + * Make an immutable view of an automerge document as at `heads` + * + * @remarks + * The document returned from this function cannot be passed to {@link change}. + * This is because it shares the same underlying memory as `doc`, but it is + * consequently a very cheap copy. + * + * Note that this function will throw an error if any of the hashes in `heads` + * are not in the document. + * + * @typeParam T - The type of the value contained in the document + * @param doc - The document to create a view of + * @param heads - The hashes of the heads to create a view at + */ +export function view(doc: Doc, heads: Heads): Doc { + const state = _state(doc) + const handle = state.handle + return state.handle.materialize("/", heads, { + ...state, + handle, + heads, + }) as Doc +} + +/** + * Make a full writable copy of an automerge document + * + * @remarks + * Unlike {@link view} this function makes a full copy of the memory backing + * the document and can thus be passed to {@link change}. It also generates a + * new actor ID so that changes made in the new document do not create duplicate + * sequence numbers with respect to the old document. If you need control over + * the actor ID which is generated you can pass the actor ID as the second + * argument + * + * @typeParam T - The type of the value contained in the document + * @param doc - The document to clone + * @param _opts - Either an actor ID to use for the new doc or an {@link InitOptions} + */ +export function clone( + doc: Doc, + _opts?: ActorId | InitOptions +): Doc { + const state = _state(doc) + const heads = state.heads + const opts = importOpts(_opts) + const handle = state.handle.fork(opts.actor, heads) + + // `change` uses the presence of state.heads to determine if we are in a view + // set it to undefined to indicate that this is a full fat document + const { heads: oldHeads, ...stateSansHeads } = state + return handle.applyPatches(doc, { ...stateSansHeads, handle }) +} + +/** Explicity free the memory backing a document. Note that this is note + * necessary in environments which support + * [`FinalizationRegistry`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/FinalizationRegistry) + */ +export function free(doc: Doc) { + return _state(doc).handle.free() +} + +/** + * Create an automerge document from a POJO + * + * @param initialState - The initial state which will be copied into the document + * @typeParam T - The type of the value passed to `from` _and_ the type the resulting document will contain + * @typeParam actor - The actor ID of the resulting document, if this is null a random actor ID will be used + * + * @example + * ``` + * const doc = automerge.from({ + * tasks: [ + * {description: "feed dogs", done: false} + * ] + * }) + * ``` + */ +export function from>( + initialState: T | Doc, + _opts?: ActorId | InitOptions +): Doc { + return change(init(_opts), d => Object.assign(d, initialState)) +} + +/** + * Update the contents of an automerge document + * @typeParam T - The type of the value contained in the document + * @param doc - The document to update + * @param options - Either a message, an {@link ChangeOptions}, or a {@link ChangeFn} + * @param callback - A `ChangeFn` to be used if `options` was a `string` + * + * Note that if the second argument is a function it will be used as the `ChangeFn` regardless of what the third argument is. + * + * @example A simple change + * ``` + * let doc1 = automerge.init() + * doc1 = automerge.change(doc1, d => { + * d.key = "value" + * }) + * assert.equal(doc1.key, "value") + * ``` + * + * @example A change with a message + * + * ``` + * doc1 = automerge.change(doc1, "add another value", d => { + * d.key2 = "value2" + * }) + * ``` + * + * @example A change with a message and a timestamp + * + * ``` + * doc1 = automerge.change(doc1, {message: "add another value", timestamp: 1640995200}, d => { + * d.key2 = "value2" + * }) + * ``` + * + * @example responding to a patch callback + * ``` + * let patchedPath + * let patchCallback = patch => { + * patchedPath = patch.path + * } + * doc1 = automerge.change(doc1, {message, "add another value", timestamp: 1640995200, patchCallback}, d => { + * d.key2 = "value2" + * }) + * assert.equal(patchedPath, ["key2"]) + * ``` + */ +export function change( + doc: Doc, + options: string | ChangeOptions | ChangeFn, + callback?: ChangeFn +): Doc { + if (typeof options === "function") { + return _change(doc, {}, options) + } else if (typeof callback === "function") { + if (typeof options === "string") { + options = { message: options } + } + return _change(doc, options, callback) + } else { + throw RangeError("Invalid args for change") + } +} + +function progressDocument( + doc: Doc, + heads: Heads | null, + callback?: PatchCallback +): Doc { + if (heads == null) { + return doc + } + const state = _state(doc) + const nextState = { ...state, heads: undefined } + const nextDoc = state.handle.applyPatches(doc, nextState, callback) + state.heads = heads + return nextDoc +} + +function _change( + doc: Doc, + options: ChangeOptions, + callback: ChangeFn +): Doc { + if (typeof callback !== "function") { + throw new RangeError("invalid change function") + } + + const state = _state(doc) + + if (doc === undefined || state === undefined) { + throw new RangeError("must be the document root") + } + if (state.heads) { + throw new RangeError( + "Attempting to change an outdated document. Use Automerge.clone() if you wish to make a writable copy." + ) + } + if (_is_proxy(doc)) { + throw new RangeError("Calls to Automerge.change cannot be nested") + } + const heads = state.handle.getHeads() + try { + state.heads = heads + const root: T = rootProxy(state.handle, state.textV2) + callback(root) + if (state.handle.pendingOps() === 0) { + state.heads = undefined + return doc + } else { + state.handle.commit(options.message, options.time) + return progressDocument( + doc, + heads, + options.patchCallback || state.patchCallback + ) + } + } catch (e) { + state.heads = undefined + state.handle.rollback() + throw e + } +} + +/** + * Make a change to a document which does not modify the document + * + * @param doc - The doc to add the empty change to + * @param options - Either a message or a {@link ChangeOptions} for the new change + * + * Why would you want to do this? One reason might be that you have merged + * changes from some other peers and you want to generate a change which + * depends on those merged changes so that you can sign the new change with all + * of the merged changes as part of the new change. + */ +export function emptyChange( + doc: Doc, + options: string | ChangeOptions | void +) { + if (options === undefined) { + options = {} + } + if (typeof options === "string") { + options = { message: options } + } + + const state = _state(doc) + + if (state.heads) { + throw new RangeError( + "Attempting to change an outdated document. Use Automerge.clone() if you wish to make a writable copy." + ) + } + if (_is_proxy(doc)) { + throw new RangeError("Calls to Automerge.change cannot be nested") + } + + const heads = state.handle.getHeads() + state.handle.emptyChange(options.message, options.time) + return progressDocument(doc, heads) +} + +/** + * Load an automerge document from a compressed document produce by {@link save} + * + * @typeParam T - The type of the value which is contained in the document. + * Note that no validation is done to make sure this type is in + * fact the type of the contained value so be a bit careful + * @param data - The compressed document + * @param _opts - Either an actor ID or some {@link InitOptions}, if the actor + * ID is null a random actor ID will be created + * + * Note that `load` will throw an error if passed incomplete content (for + * example if you are receiving content over the network and don't know if you + * have the complete document yet). If you need to handle incomplete content use + * {@link init} followed by {@link loadIncremental}. + */ +export function load( + data: Uint8Array, + _opts?: ActorId | InitOptions +): Doc { + const opts = importOpts(_opts) + const actor = opts.actor + const patchCallback = opts.patchCallback + const handle = ApiHandler.load(data, opts.enableTextV2 || false, actor) + handle.enablePatches(true) + handle.enableFreeze(!!opts.freeze) + handle.registerDatatype("counter", (n: number) => new Counter(n)) + const textV2 = opts.enableTextV2 || false + if (textV2) { + handle.registerDatatype("str", (n: string) => new RawString(n)) + } else { + handle.registerDatatype("text", (n: string) => new Text(n)) + } + const doc = handle.materialize("/", undefined, { + handle, + heads: undefined, + patchCallback, + textV2, + }) as Doc + return doc +} + +/** + * Load changes produced by {@link saveIncremental}, or partial changes + * + * @typeParam T - The type of the value which is contained in the document. + * Note that no validation is done to make sure this type is in + * fact the type of the contained value so be a bit careful + * @param data - The compressedchanges + * @param opts - an {@link ApplyOptions} + * + * This function is useful when staying up to date with a connected peer. + * Perhaps the other end sent you a full compresed document which you loaded + * with {@link load} and they're sending you the result of + * {@link getLastLocalChange} every time they make a change. + * + * Note that this function will succesfully load the results of {@link save} as + * well as {@link getLastLocalChange} or any other incremental change. + */ +export function loadIncremental( + doc: Doc, + data: Uint8Array, + opts?: ApplyOptions +): Doc { + if (!opts) { + opts = {} + } + const state = _state(doc) + if (state.heads) { + throw new RangeError( + "Attempting to change an out of date document - set at: " + _trace(doc) + ) + } + if (_is_proxy(doc)) { + throw new RangeError("Calls to Automerge.change cannot be nested") + } + const heads = state.handle.getHeads() + state.handle.loadIncremental(data) + return progressDocument(doc, heads, opts.patchCallback || state.patchCallback) +} + +/** + * Export the contents of a document to a compressed format + * + * @param doc - The doc to save + * + * The returned bytes can be passed to {@link load} or {@link loadIncremental} + */ +export function save(doc: Doc): Uint8Array { + return _state(doc).handle.save() +} + +/** + * Merge `local` into `remote` + * @typeParam T - The type of values contained in each document + * @param local - The document to merge changes into + * @param remote - The document to merge changes from + * + * @returns - The merged document + * + * Often when you are merging documents you will also need to clone them. Both + * arguments to `merge` are frozen after the call so you can no longer call + * mutating methods (such as {@link change}) on them. The symtom of this will be + * an error which says "Attempting to change an out of date document". To + * overcome this call {@link clone} on the argument before passing it to {@link + * merge}. + */ +export function merge(local: Doc, remote: Doc): Doc { + const localState = _state(local) + + if (localState.heads) { + throw new RangeError( + "Attempting to change an out of date document - set at: " + _trace(local) + ) + } + const heads = localState.handle.getHeads() + const remoteState = _state(remote) + const changes = localState.handle.getChangesAdded(remoteState.handle) + localState.handle.applyChanges(changes) + return progressDocument(local, heads, localState.patchCallback) +} + +/** + * Get the actor ID associated with the document + */ +export function getActorId(doc: Doc): ActorId { + const state = _state(doc) + return state.handle.getActorId() +} + +/** + * The type of conflicts for particular key or index + * + * Maps and sequences in automerge can contain conflicting values for a + * particular key or index. In this case {@link getConflicts} can be used to + * obtain a `Conflicts` representing the multiple values present for the property + * + * A `Conflicts` is a map from a unique (per property or index) key to one of + * the possible conflicting values for the given property. + */ +type Conflicts = { [key: string]: AutomergeValue } + +function conflictAt( + context: Automerge, + objectId: ObjID, + prop: Prop, + textV2: boolean +): Conflicts | undefined { + const values = context.getAll(objectId, prop) + if (values.length <= 1) { + return + } + const result: Conflicts = {} + for (const fullVal of values) { + switch (fullVal[0]) { + case "map": + result[fullVal[1]] = mapProxy(context, fullVal[1], textV2, [prop], true) + break + case "list": + result[fullVal[1]] = listProxy( + context, + fullVal[1], + textV2, + [prop], + true + ) + break + case "text": + if (textV2) { + result[fullVal[1]] = context.text(fullVal[1]) + } else { + result[fullVal[1]] = textProxy(context, objectId, [prop], true) + } + break + //case "table": + //case "cursor": + case "str": + case "uint": + case "int": + case "f64": + case "boolean": + case "bytes": + case "null": + result[fullVal[2]] = fullVal[1] + break + case "counter": + result[fullVal[2]] = new Counter(fullVal[1]) + break + case "timestamp": + result[fullVal[2]] = new Date(fullVal[1]) + break + default: + throw RangeError(`datatype ${fullVal[0]} unimplemented`) + } + } + return result +} + +/** + * Get the conflicts associated with a property + * + * The values of properties in a map in automerge can be conflicted if there + * are concurrent "put" operations to the same key. Automerge chooses one value + * arbitrarily (but deterministically, any two nodes who have the same set of + * changes will choose the same value) from the set of conflicting values to + * present as the value of the key. + * + * Sometimes you may want to examine these conflicts, in this case you can use + * {@link getConflicts} to get the conflicts for the key. + * + * @example + * ``` + * import * as automerge from "@automerge/automerge" + * + * type Profile = { + * pets: Array<{name: string, type: string}> + * } + * + * let doc1 = automerge.init("aaaa") + * doc1 = automerge.change(doc1, d => { + * d.pets = [{name: "Lassie", type: "dog"}] + * }) + * let doc2 = automerge.init("bbbb") + * doc2 = automerge.merge(doc2, automerge.clone(doc1)) + * + * doc2 = automerge.change(doc2, d => { + * d.pets[0].name = "Beethoven" + * }) + * + * doc1 = automerge.change(doc1, d => { + * d.pets[0].name = "Babe" + * }) + * + * const doc3 = automerge.merge(doc1, doc2) + * + * // Note that here we pass `doc3.pets`, not `doc3` + * let conflicts = automerge.getConflicts(doc3.pets[0], "name") + * + * // The two conflicting values are the keys of the conflicts object + * assert.deepEqual(Object.values(conflicts), ["Babe", Beethoven"]) + * ``` + */ +export function getConflicts( + doc: Doc, + prop: Prop +): Conflicts | undefined { + const state = _state(doc, false) + const objectId = _obj(doc) + if (objectId != null) { + return conflictAt(state.handle, objectId, prop, state.textV2) + } else { + return undefined + } +} + +/** + * Get the binary representation of the last change which was made to this doc + * + * This is most useful when staying in sync with other peers, every time you + * make a change locally via {@link change} you immediately call {@link + * getLastLocalChange} and send the result over the network to other peers. + */ +export function getLastLocalChange(doc: Doc): Change | undefined { + const state = _state(doc) + return state.handle.getLastLocalChange() || undefined +} + +/** + * Return the object ID of an arbitrary javascript value + * + * This is useful to determine if something is actually an automerge document, + * if `doc` is not an automerge document this will return null. + */ +export function getObjectId(doc: any, prop?: Prop): ObjID | null { + if (prop) { + const state = _state(doc, false) + const objectId = _obj(doc) + if (!state || !objectId) { + return null + } + return state.handle.get(objectId, prop) as ObjID + } else { + return _obj(doc) + } +} + +/** + * Get the changes which are in `newState` but not in `oldState`. The returned + * changes can be loaded in `oldState` via {@link applyChanges}. + * + * Note that this will crash if there are changes in `oldState` which are not in `newState`. + */ +export function getChanges(oldState: Doc, newState: Doc): Change[] { + const n = _state(newState) + return n.handle.getChanges(getHeads(oldState)) +} + +/** + * Get all the changes in a document + * + * This is different to {@link save} because the output is an array of changes + * which can be individually applied via {@link applyChanges}` + * + */ +export function getAllChanges(doc: Doc): Change[] { + const state = _state(doc) + return state.handle.getChanges([]) +} + +/** + * Apply changes received from another document + * + * `doc` will be updated to reflect the `changes`. If there are changes which + * we do not have dependencies for yet those will be stored in the document and + * applied when the depended on changes arrive. + * + * You can use the {@link ApplyOptions} to pass a patchcallback which will be + * informed of any changes which occur as a result of applying the changes + * + */ +export function applyChanges( + doc: Doc, + changes: Change[], + opts?: ApplyOptions +): [Doc] { + const state = _state(doc) + if (!opts) { + opts = {} + } + if (state.heads) { + throw new RangeError( + "Attempting to change an outdated document. Use Automerge.clone() if you wish to make a writable copy." + ) + } + if (_is_proxy(doc)) { + throw new RangeError("Calls to Automerge.change cannot be nested") + } + const heads = state.handle.getHeads() + state.handle.applyChanges(changes) + state.heads = heads + return [ + progressDocument(doc, heads, opts.patchCallback || state.patchCallback), + ] +} + +/** @hidden */ +export function getHistory(doc: Doc): State[] { + const textV2 = _state(doc).textV2 + const history = getAllChanges(doc) + return history.map((change, index) => ({ + get change() { + return decodeChange(change) + }, + get snapshot() { + const [state] = applyChanges( + init({ enableTextV2: textV2 }), + history.slice(0, index + 1) + ) + return state + }, + })) +} + +/** @hidden */ +// FIXME : no tests +// FIXME can we just use deep equals now? +export function equals(val1: unknown, val2: unknown): boolean { + if (!isObject(val1) || !isObject(val2)) return val1 === val2 + const keys1 = Object.keys(val1).sort(), + keys2 = Object.keys(val2).sort() + if (keys1.length !== keys2.length) return false + for (let i = 0; i < keys1.length; i++) { + if (keys1[i] !== keys2[i]) return false + if (!equals(val1[keys1[i]], val2[keys2[i]])) return false + } + return true +} + +/** + * encode a {@link SyncState} into binary to send over the network + * + * @group sync + * */ +export function encodeSyncState(state: SyncState): Uint8Array { + const sync = ApiHandler.importSyncState(state) + const result = ApiHandler.encodeSyncState(sync) + sync.free() + return result +} + +/** + * Decode some binary data into a {@link SyncState} + * + * @group sync + */ +export function decodeSyncState(state: Uint8Array): SyncState { + const sync = ApiHandler.decodeSyncState(state) + const result = ApiHandler.exportSyncState(sync) + sync.free() + return result +} + +/** + * Generate a sync message to send to the peer represented by `inState` + * @param doc - The doc to generate messages about + * @param inState - The {@link SyncState} representing the peer we are talking to + * + * @group sync + * + * @returns An array of `[newSyncState, syncMessage | null]` where + * `newSyncState` should replace `inState` and `syncMessage` should be sent to + * the peer if it is not null. If `syncMessage` is null then we are up to date. + */ +export function generateSyncMessage( + doc: Doc, + inState: SyncState +): [SyncState, SyncMessage | null] { + const state = _state(doc) + const syncState = ApiHandler.importSyncState(inState) + const message = state.handle.generateSyncMessage(syncState) + const outState = ApiHandler.exportSyncState(syncState) + return [outState, message] +} + +/** + * Update a document and our sync state on receiving a sync message + * + * @group sync + * + * @param doc - The doc the sync message is about + * @param inState - The {@link SyncState} for the peer we are communicating with + * @param message - The message which was received + * @param opts - Any {@link ApplyOption}s, used for passing a + * {@link PatchCallback} which will be informed of any changes + * in `doc` which occur because of the received sync message. + * + * @returns An array of `[newDoc, newSyncState, syncMessage | null]` where + * `newDoc` is the updated state of `doc`, `newSyncState` should replace + * `inState` and `syncMessage` should be sent to the peer if it is not null. If + * `syncMessage` is null then we are up to date. + */ +export function receiveSyncMessage( + doc: Doc, + inState: SyncState, + message: SyncMessage, + opts?: ApplyOptions +): [Doc, SyncState, null] { + const syncState = ApiHandler.importSyncState(inState) + if (!opts) { + opts = {} + } + const state = _state(doc) + if (state.heads) { + throw new RangeError( + "Attempting to change an outdated document. Use Automerge.clone() if you wish to make a writable copy." + ) + } + if (_is_proxy(doc)) { + throw new RangeError("Calls to Automerge.change cannot be nested") + } + const heads = state.handle.getHeads() + state.handle.receiveSyncMessage(syncState, message) + const outSyncState = ApiHandler.exportSyncState(syncState) + return [ + progressDocument(doc, heads, opts.patchCallback || state.patchCallback), + outSyncState, + null, + ] +} + +/** + * Create a new, blank {@link SyncState} + * + * When communicating with a peer for the first time use this to generate a new + * {@link SyncState} for them + * + * @group sync + */ +export function initSyncState(): SyncState { + return ApiHandler.exportSyncState(ApiHandler.initSyncState()) +} + +/** @hidden */ +export function encodeChange(change: ChangeToEncode): Change { + return ApiHandler.encodeChange(change) +} + +/** @hidden */ +export function decodeChange(data: Change): DecodedChange { + return ApiHandler.decodeChange(data) +} + +/** @hidden */ +export function encodeSyncMessage(message: DecodedSyncMessage): SyncMessage { + return ApiHandler.encodeSyncMessage(message) +} + +/** @hidden */ +export function decodeSyncMessage(message: SyncMessage): DecodedSyncMessage { + return ApiHandler.decodeSyncMessage(message) +} + +/** + * Get any changes in `doc` which are not dependencies of `heads` + */ +export function getMissingDeps(doc: Doc, heads: Heads): Heads { + const state = _state(doc) + return state.handle.getMissingDeps(heads) +} + +/** + * Get the hashes of the heads of this document + */ +export function getHeads(doc: Doc): Heads { + const state = _state(doc) + return state.heads || state.handle.getHeads() +} + +/** @hidden */ +export function dump(doc: Doc) { + const state = _state(doc) + state.handle.dump() +} + +/** @hidden */ +export function toJS(doc: Doc): T { + const state = _state(doc) + const enabled = state.handle.enableFreeze(false) + const result = state.handle.materialize() + state.handle.enableFreeze(enabled) + return result as T +} + +export function isAutomerge(doc: unknown): boolean { + if (typeof doc == "object" && doc !== null) { + return getObjectId(doc) === "_root" && !!Reflect.get(doc, STATE) + } else { + return false + } +} + +function isObject(obj: unknown): obj is Record { + return typeof obj === "object" && obj !== null +} + +export type { + API, + SyncState, + ActorId, + Conflicts, + Prop, + Change, + ObjID, + DecodedChange, + DecodedSyncMessage, + Heads, + MaterializeValue, +} diff --git a/javascript/src/text.ts b/javascript/src/text.ts new file mode 100644 index 00000000..bb0a868d --- /dev/null +++ b/javascript/src/text.ts @@ -0,0 +1,218 @@ +import { Value } from "@automerge/automerge-wasm" +import { TEXT, STATE } from "./constants" + +export class Text { + elems: Array + str: string | undefined + spans: Array | undefined + + constructor(text?: string | string[] | Value[]) { + if (typeof text === "string") { + this.elems = [...text] + } else if (Array.isArray(text)) { + this.elems = text + } else if (text === undefined) { + this.elems = [] + } else { + throw new TypeError(`Unsupported initial value for Text: ${text}`) + } + Reflect.defineProperty(this, TEXT, { value: true }) + } + + get length(): number { + return this.elems.length + } + + get(index: number): any { + return this.elems[index] + } + + /** + * Iterates over the text elements character by character, including any + * inline objects. + */ + [Symbol.iterator]() { + const elems = this.elems + let index = -1 + return { + next() { + index += 1 + if (index < elems.length) { + return { done: false, value: elems[index] } + } else { + return { done: true } + } + }, + } + } + + /** + * Returns the content of the Text object as a simple string, ignoring any + * non-character elements. + */ + toString(): string { + if (!this.str) { + // Concatting to a string is faster than creating an array and then + // .join()ing for small (<100KB) arrays. + // https://jsperf.com/join-vs-loop-w-type-test + this.str = "" + for (const elem of this.elems) { + if (typeof elem === "string") this.str += elem + else this.str += "\uFFFC" + } + } + return this.str + } + + /** + * Returns the content of the Text object as a sequence of strings, + * interleaved with non-character elements. + * + * For example, the value `['a', 'b', {x: 3}, 'c', 'd']` has spans: + * `=> ['ab', {x: 3}, 'cd']` + */ + toSpans(): Array { + if (!this.spans) { + this.spans = [] + let chars = "" + for (const elem of this.elems) { + if (typeof elem === "string") { + chars += elem + } else { + if (chars.length > 0) { + this.spans.push(chars) + chars = "" + } + this.spans.push(elem) + } + } + if (chars.length > 0) { + this.spans.push(chars) + } + } + return this.spans + } + + /** + * Returns the content of the Text object as a simple string, so that the + * JSON serialization of an Automerge document represents text nicely. + */ + toJSON(): string { + return this.toString() + } + + /** + * Updates the list item at position `index` to a new value `value`. + */ + set(index: number, value: Value) { + if (this[STATE]) { + throw new RangeError( + "object cannot be modified outside of a change block" + ) + } + this.elems[index] = value + } + + /** + * Inserts new list items `values` starting at position `index`. + */ + insertAt(index: number, ...values: Array) { + if (this[STATE]) { + throw new RangeError( + "object cannot be modified outside of a change block" + ) + } + this.elems.splice(index, 0, ...values) + } + + /** + * Deletes `numDelete` list items starting at position `index`. + * if `numDelete` is not given, one item is deleted. + */ + deleteAt(index: number, numDelete = 1) { + if (this[STATE]) { + throw new RangeError( + "object cannot be modified outside of a change block" + ) + } + this.elems.splice(index, numDelete) + } + + map(callback: (e: Value | Object) => T) { + this.elems.map(callback) + } + + lastIndexOf(searchElement: Value, fromIndex?: number) { + this.elems.lastIndexOf(searchElement, fromIndex) + } + + concat(other: Text): Text { + return new Text(this.elems.concat(other.elems)) + } + + every(test: (v: Value) => boolean): boolean { + return this.elems.every(test) + } + + filter(test: (v: Value) => boolean): Text { + return new Text(this.elems.filter(test)) + } + + find(test: (v: Value) => boolean): Value | undefined { + return this.elems.find(test) + } + + findIndex(test: (v: Value) => boolean): number | undefined { + return this.elems.findIndex(test) + } + + forEach(f: (v: Value) => undefined) { + this.elems.forEach(f) + } + + includes(elem: Value): boolean { + return this.elems.includes(elem) + } + + indexOf(elem: Value) { + return this.elems.indexOf(elem) + } + + join(sep?: string): string { + return this.elems.join(sep) + } + + reduce( + f: ( + previousValue: Value, + currentValue: Value, + currentIndex: number, + array: Value[] + ) => Value + ) { + this.elems.reduce(f) + } + + reduceRight( + f: ( + previousValue: Value, + currentValue: Value, + currentIndex: number, + array: Value[] + ) => Value + ) { + this.elems.reduceRight(f) + } + + slice(start?: number, end?: number) { + new Text(this.elems.slice(start, end)) + } + + some(test: (Value) => boolean): boolean { + return this.elems.some(test) + } + + toLocaleString() { + this.toString() + } +} diff --git a/javascript/src/types.ts b/javascript/src/types.ts index 62fdbba8..e3cb81f8 100644 --- a/javascript/src/types.ts +++ b/javascript/src/types.ts @@ -1,7 +1,10 @@ +export { Text } from "./text" export { Counter } from "./counter" export { Int, Uint, Float64 } from "./numbers" import { Counter } from "./counter" +import type { Patch } from "@automerge/automerge-wasm" +export type { Patch } from "@automerge/automerge-wasm" export type AutomergeValue = | ScalarValue @@ -9,6 +12,7 @@ export type AutomergeValue = | Array export type MapValue = { [key: string]: AutomergeValue } export type ListValue = Array +export type TextValue = Array export type ScalarValue = | string | number @@ -17,3 +21,25 @@ export type ScalarValue = | Date | Counter | Uint8Array + +/** + * An automerge document. + * @typeParam T - The type of the value contained in this document + * + * Note that this provides read only access to the fields of the value. To + * modify the value use {@link change} + */ +export type Doc = { readonly [P in keyof T]: T[P] } + +/** + * Callback which is called by various methods in this library to notify the + * user of what changes have been made. + * @param patch - A description of the changes made + * @param before - The document before the change was made + * @param after - The document after the change was made + */ +export type PatchCallback = ( + patches: Array, + before: Doc, + after: Doc +) => void diff --git a/javascript/src/unstable.ts b/javascript/src/unstable.ts new file mode 100644 index 00000000..8f25586c --- /dev/null +++ b/javascript/src/unstable.ts @@ -0,0 +1,292 @@ +/** + * # The unstable API + * + * This module contains new features we are working on which are either not yet + * ready for a stable release and/or which will result in backwards incompatible + * API changes. The API of this module may change in arbitrary ways between + * point releases - we will always document what these changes are in the + * CHANGELOG below, but only depend on this module if you are prepared to deal + * with frequent changes. + * + * ## Differences from stable + * + * In the stable API text objects are represented using the {@link Text} class. + * This means you must decide up front whether your string data might need + * concurrent merges in the future and if you change your mind you have to + * figure out how to migrate your data. In the unstable API the `Text` class is + * gone and all `string`s are represented using the text CRDT, allowing for + * concurrent changes. Modifying a string is done using the {@link splice} + * function. You can still access the old behaviour of strings which do not + * support merging behaviour via the {@link RawString} class. + * + * This leads to the following differences from `stable`: + * + * * There is no `unstable.Text` class, all strings are text objects + * * Reading strings in a `future` document is the same as reading any other + * javascript string + * * To modify strings in a `future` document use {@link splice} + * * The {@link AutomergeValue} type does not include the {@link Text} + * class but the {@link RawString} class is included in the {@link ScalarValue} + * type + * + * ## CHANGELOG + * * Introduce this module to expose the new API which has no `Text` class + * + * + * @module + */ +import { Counter } from "./types" + +export { Counter, Doc, Int, Uint, Float64, Patch, PatchCallback } from "./types" + +import type { PatchCallback } from "./stable" + +export type AutomergeValue = + | ScalarValue + | { [key: string]: AutomergeValue } + | Array +export type MapValue = { [key: string]: AutomergeValue } +export type ListValue = Array +export type ScalarValue = + | string + | number + | null + | boolean + | Date + | Counter + | Uint8Array + | RawString + +export type Conflicts = { [key: string]: AutomergeValue } + +export { + PutPatch, + DelPatch, + SplicePatch, + IncPatch, + SyncMessage, +} from "@automerge/automerge-wasm" + +export type { ChangeOptions, ApplyOptions, ChangeFn } from "./stable" +export { + view, + free, + getHeads, + change, + emptyChange, + loadIncremental, + save, + merge, + getActorId, + getLastLocalChange, + getChanges, + getAllChanges, + applyChanges, + getHistory, + equals, + encodeSyncState, + decodeSyncState, + generateSyncMessage, + receiveSyncMessage, + initSyncState, + encodeChange, + decodeChange, + encodeSyncMessage, + decodeSyncMessage, + getMissingDeps, + dump, + toJS, + isAutomerge, + getObjectId, +} from "./stable" + +export type InitOptions = { + /** The actor ID to use for this document, a random one will be generated if `null` is passed */ + actor?: ActorId + freeze?: boolean + /** A callback which will be called with the initial patch once the document has finished loading */ + patchCallback?: PatchCallback +} + +import { ActorId, Doc } from "./stable" +import * as stable from "./stable" +export { RawString } from "./raw_string" + +/** @hidden */ +export const getBackend = stable.getBackend + +import { _is_proxy, _state, _obj } from "./internal_state" +import { RawString } from "./raw_string" + +/** + * Create a new automerge document + * + * @typeParam T - The type of value contained in the document. This will be the + * type that is passed to the change closure in {@link change} + * @param _opts - Either an actorId or an {@link InitOptions} (which may + * contain an actorId). If this is null the document will be initialised with a + * random actor ID + */ +export function init(_opts?: ActorId | InitOptions): Doc { + let opts = importOpts(_opts) + opts.enableTextV2 = true + return stable.init(opts) +} + +/** + * Make a full writable copy of an automerge document + * + * @remarks + * Unlike {@link view} this function makes a full copy of the memory backing + * the document and can thus be passed to {@link change}. It also generates a + * new actor ID so that changes made in the new document do not create duplicate + * sequence numbers with respect to the old document. If you need control over + * the actor ID which is generated you can pass the actor ID as the second + * argument + * + * @typeParam T - The type of the value contained in the document + * @param doc - The document to clone + * @param _opts - Either an actor ID to use for the new doc or an {@link InitOptions} + */ +export function clone( + doc: Doc, + _opts?: ActorId | InitOptions +): Doc { + let opts = importOpts(_opts) + opts.enableTextV2 = true + return stable.clone(doc, opts) +} + +/** + * Create an automerge document from a POJO + * + * @param initialState - The initial state which will be copied into the document + * @typeParam T - The type of the value passed to `from` _and_ the type the resulting document will contain + * @typeParam actor - The actor ID of the resulting document, if this is null a random actor ID will be used + * + * @example + * ``` + * const doc = automerge.from({ + * tasks: [ + * {description: "feed dogs", done: false} + * ] + * }) + * ``` + */ +export function from>( + initialState: T | Doc, + _opts?: ActorId | InitOptions +): Doc { + const opts = importOpts(_opts) + opts.enableTextV2 = true + return stable.from(initialState, opts) +} + +/** + * Load an automerge document from a compressed document produce by {@link save} + * + * @typeParam T - The type of the value which is contained in the document. + * Note that no validation is done to make sure this type is in + * fact the type of the contained value so be a bit careful + * @param data - The compressed document + * @param _opts - Either an actor ID or some {@link InitOptions}, if the actor + * ID is null a random actor ID will be created + * + * Note that `load` will throw an error if passed incomplete content (for + * example if you are receiving content over the network and don't know if you + * have the complete document yet). If you need to handle incomplete content use + * {@link init} followed by {@link loadIncremental}. + */ +export function load( + data: Uint8Array, + _opts?: ActorId | InitOptions +): Doc { + const opts = importOpts(_opts) + opts.enableTextV2 = true + return stable.load(data, opts) +} + +function importOpts( + _actor?: ActorId | InitOptions +): stable.InitOptions { + if (typeof _actor === "object") { + return _actor + } else { + return { actor: _actor } + } +} + +export function splice( + doc: Doc, + prop: stable.Prop, + index: number, + del: number, + newText?: string +) { + if (!_is_proxy(doc)) { + throw new RangeError("object cannot be modified outside of a change block") + } + const state = _state(doc, false) + const objectId = _obj(doc) + if (!objectId) { + throw new RangeError("invalid object for splice") + } + const value = `${objectId}/${prop}` + try { + return state.handle.splice(value, index, del, newText) + } catch (e) { + throw new RangeError(`Cannot splice: ${e}`) + } +} + +/** + * Get the conflicts associated with a property + * + * The values of properties in a map in automerge can be conflicted if there + * are concurrent "put" operations to the same key. Automerge chooses one value + * arbitrarily (but deterministically, any two nodes who have the same set of + * changes will choose the same value) from the set of conflicting values to + * present as the value of the key. + * + * Sometimes you may want to examine these conflicts, in this case you can use + * {@link getConflicts} to get the conflicts for the key. + * + * @example + * ``` + * import * as automerge from "@automerge/automerge" + * + * type Profile = { + * pets: Array<{name: string, type: string}> + * } + * + * let doc1 = automerge.init("aaaa") + * doc1 = automerge.change(doc1, d => { + * d.pets = [{name: "Lassie", type: "dog"}] + * }) + * let doc2 = automerge.init("bbbb") + * doc2 = automerge.merge(doc2, automerge.clone(doc1)) + * + * doc2 = automerge.change(doc2, d => { + * d.pets[0].name = "Beethoven" + * }) + * + * doc1 = automerge.change(doc1, d => { + * d.pets[0].name = "Babe" + * }) + * + * const doc3 = automerge.merge(doc1, doc2) + * + * // Note that here we pass `doc3.pets`, not `doc3` + * let conflicts = automerge.getConflicts(doc3.pets[0], "name") + * + * // The two conflicting values are the keys of the conflicts object + * assert.deepEqual(Object.values(conflicts), ["Babe", Beethoven"]) + * ``` + */ +export function getConflicts( + doc: Doc, + prop: stable.Prop +): Conflicts | undefined { + // this function only exists to get the types to line up with future.AutomergeValue + return stable.getConflicts(doc, prop) +} diff --git a/javascript/test/basic_test.ts b/javascript/test/basic_test.ts index c14c0e20..90e7a99d 100644 --- a/javascript/test/basic_test.ts +++ b/javascript/test/basic_test.ts @@ -1,5 +1,5 @@ import * as assert from "assert" -import * as Automerge from "../src" +import { unstable as Automerge } from "../src" import * as WASM from "@automerge/automerge-wasm" describe("Automerge", () => { diff --git a/javascript/test/extra_api_tests.ts b/javascript/test/extra_api_tests.ts index 69932d1f..84fa4c39 100644 --- a/javascript/test/extra_api_tests.ts +++ b/javascript/test/extra_api_tests.ts @@ -1,5 +1,5 @@ import * as assert from "assert" -import * as Automerge from "../src" +import { unstable as Automerge } from "../src" describe("Automerge", () => { describe("basics", () => { diff --git a/javascript/test/legacy_tests.ts b/javascript/test/legacy_tests.ts index 477a5545..a423b51f 100644 --- a/javascript/test/legacy_tests.ts +++ b/javascript/test/legacy_tests.ts @@ -1,5 +1,5 @@ import * as assert from "assert" -import * as Automerge from "../src" +import { unstable as Automerge } from "../src" import { assertEqualsOneOf } from "./helpers" import { decodeChange } from "./legacy/columnar" diff --git a/javascript/test/stable_unstable_interop.ts b/javascript/test/stable_unstable_interop.ts new file mode 100644 index 00000000..2f58c256 --- /dev/null +++ b/javascript/test/stable_unstable_interop.ts @@ -0,0 +1,41 @@ +import * as assert from "assert" +import * as stable from "../src" +import { unstable } from "../src" + +describe("stable/unstable interop", () => { + it("should allow reading Text from stable as strings in unstable", () => { + let stableDoc = stable.from({ + text: new stable.Text("abc"), + }) + let unstableDoc = unstable.init() + unstableDoc = unstable.merge(unstableDoc, stableDoc) + assert.deepStrictEqual(unstableDoc.text, "abc") + }) + + it("should allow string from stable as Text in unstable", () => { + let unstableDoc = unstable.from({ + text: "abc", + }) + let stableDoc = stable.init() + stableDoc = unstable.merge(stableDoc, unstableDoc) + assert.deepStrictEqual(stableDoc.text, new stable.Text("abc")) + }) + + it("should allow reading strings from stable as RawString in unstable", () => { + let stableDoc = stable.from({ + text: "abc", + }) + let unstableDoc = unstable.init() + unstableDoc = unstable.merge(unstableDoc, stableDoc) + assert.deepStrictEqual(unstableDoc.text, new unstable.RawString("abc")) + }) + + it("should allow reading RawString from unstable as string in stable", () => { + let unstableDoc = unstable.from({ + text: new unstable.RawString("abc"), + }) + let stableDoc = stable.init() + stableDoc = unstable.merge(stableDoc, unstableDoc) + assert.deepStrictEqual(stableDoc.text, "abc") + }) +}) diff --git a/javascript/test/text_test.ts b/javascript/test/text_test.ts index 076e20b2..518c7d2b 100644 --- a/javascript/test/text_test.ts +++ b/javascript/test/text_test.ts @@ -1,5 +1,5 @@ import * as assert from "assert" -import * as Automerge from "../src" +import { unstable as Automerge } from "../src" import { assertEqualsOneOf } from "./helpers" type DocType = { diff --git a/javascript/test/text_v1.ts b/javascript/test/text_v1.ts new file mode 100644 index 00000000..b111530f --- /dev/null +++ b/javascript/test/text_v1.ts @@ -0,0 +1,281 @@ +import * as assert from "assert" +import * as Automerge from "../src" +import { assertEqualsOneOf } from "./helpers" + +type DocType = { text: Automerge.Text; [key: string]: any } + +describe("Automerge.Text", () => { + let s1: Automerge.Doc, s2: Automerge.Doc + beforeEach(() => { + s1 = Automerge.change( + Automerge.init(), + doc => (doc.text = new Automerge.Text()) + ) + s2 = Automerge.merge(Automerge.init(), s1) + }) + + it("should support insertion", () => { + s1 = Automerge.change(s1, doc => doc.text.insertAt(0, "a")) + assert.strictEqual(s1.text.length, 1) + assert.strictEqual(s1.text.get(0), "a") + assert.strictEqual(s1.text.toString(), "a") + //assert.strictEqual(s1.text.getElemId(0), `2@${Automerge.getActorId(s1)}`) + }) + + it("should support deletion", () => { + s1 = Automerge.change(s1, doc => doc.text.insertAt(0, "a", "b", "c")) + s1 = Automerge.change(s1, doc => doc.text.deleteAt(1, 1)) + assert.strictEqual(s1.text.length, 2) + assert.strictEqual(s1.text.get(0), "a") + assert.strictEqual(s1.text.get(1), "c") + assert.strictEqual(s1.text.toString(), "ac") + }) + + it("should support implicit and explicit deletion", () => { + s1 = Automerge.change(s1, doc => doc.text.insertAt(0, "a", "b", "c")) + s1 = Automerge.change(s1, doc => doc.text.deleteAt(1)) + s1 = Automerge.change(s1, doc => doc.text.deleteAt(1, 0)) + assert.strictEqual(s1.text.length, 2) + assert.strictEqual(s1.text.get(0), "a") + assert.strictEqual(s1.text.get(1), "c") + assert.strictEqual(s1.text.toString(), "ac") + }) + + it("should handle concurrent insertion", () => { + s1 = Automerge.change(s1, doc => doc.text.insertAt(0, "a", "b", "c")) + s2 = Automerge.change(s2, doc => doc.text.insertAt(0, "x", "y", "z")) + s1 = Automerge.merge(s1, s2) + assert.strictEqual(s1.text.length, 6) + assertEqualsOneOf(s1.text.toString(), "abcxyz", "xyzabc") + assertEqualsOneOf(s1.text.join(""), "abcxyz", "xyzabc") + }) + + it("should handle text and other ops in the same change", () => { + s1 = Automerge.change(s1, doc => { + doc.foo = "bar" + doc.text.insertAt(0, "a") + }) + assert.strictEqual(s1.foo, "bar") + assert.strictEqual(s1.text.toString(), "a") + assert.strictEqual(s1.text.join(""), "a") + }) + + it("should serialize to JSON as a simple string", () => { + s1 = Automerge.change(s1, doc => doc.text.insertAt(0, "a", '"', "b")) + assert.strictEqual(JSON.stringify(s1), '{"text":"a\\"b"}') + }) + + it("should allow modification before an object is assigned to a document", () => { + s1 = Automerge.change(Automerge.init(), doc => { + const text = new Automerge.Text() + text.insertAt(0, "a", "b", "c", "d") + text.deleteAt(2) + doc.text = text + assert.strictEqual(doc.text.toString(), "abd") + assert.strictEqual(doc.text.join(""), "abd") + }) + assert.strictEqual(s1.text.toString(), "abd") + assert.strictEqual(s1.text.join(""), "abd") + }) + + it("should allow modification after an object is assigned to a document", () => { + s1 = Automerge.change(Automerge.init(), doc => { + const text = new Automerge.Text() + doc.text = text + doc.text.insertAt(0, "a", "b", "c", "d") + doc.text.deleteAt(2) + assert.strictEqual(doc.text.toString(), "abd") + assert.strictEqual(doc.text.join(""), "abd") + }) + assert.strictEqual(s1.text.join(""), "abd") + }) + + it("should not allow modification outside of a change callback", () => { + assert.throws( + () => s1.text.insertAt(0, "a"), + /object cannot be modified outside of a change block/ + ) + }) + + describe("with initial value", () => { + it("should accept a string as initial value", () => { + let s1 = Automerge.change( + Automerge.init(), + doc => (doc.text = new Automerge.Text("init")) + ) + assert.strictEqual(s1.text.length, 4) + assert.strictEqual(s1.text.get(0), "i") + assert.strictEqual(s1.text.get(1), "n") + assert.strictEqual(s1.text.get(2), "i") + assert.strictEqual(s1.text.get(3), "t") + assert.strictEqual(s1.text.toString(), "init") + }) + + it("should accept an array as initial value", () => { + let s1 = Automerge.change( + Automerge.init(), + doc => (doc.text = new Automerge.Text(["i", "n", "i", "t"])) + ) + assert.strictEqual(s1.text.length, 4) + assert.strictEqual(s1.text.get(0), "i") + assert.strictEqual(s1.text.get(1), "n") + assert.strictEqual(s1.text.get(2), "i") + assert.strictEqual(s1.text.get(3), "t") + assert.strictEqual(s1.text.toString(), "init") + }) + + it("should initialize text in Automerge.from()", () => { + let s1 = Automerge.from({ text: new Automerge.Text("init") }) + assert.strictEqual(s1.text.length, 4) + assert.strictEqual(s1.text.get(0), "i") + assert.strictEqual(s1.text.get(1), "n") + assert.strictEqual(s1.text.get(2), "i") + assert.strictEqual(s1.text.get(3), "t") + assert.strictEqual(s1.text.toString(), "init") + }) + + it("should encode the initial value as a change", () => { + const s1 = Automerge.from({ text: new Automerge.Text("init") }) + const changes = Automerge.getAllChanges(s1) + assert.strictEqual(changes.length, 1) + const [s2] = Automerge.applyChanges(Automerge.init(), changes) + assert.strictEqual(s2.text instanceof Automerge.Text, true) + assert.strictEqual(s2.text.toString(), "init") + assert.strictEqual(s2.text.join(""), "init") + }) + + it("should allow immediate access to the value", () => { + Automerge.change(Automerge.init(), doc => { + const text = new Automerge.Text("init") + assert.strictEqual(text.length, 4) + assert.strictEqual(text.get(0), "i") + assert.strictEqual(text.toString(), "init") + doc.text = text + assert.strictEqual(doc.text.length, 4) + assert.strictEqual(doc.text.get(0), "i") + assert.strictEqual(doc.text.toString(), "init") + }) + }) + + it("should allow pre-assignment modification of the initial value", () => { + let s1 = Automerge.change(Automerge.init(), doc => { + const text = new Automerge.Text("init") + text.deleteAt(3) + assert.strictEqual(text.join(""), "ini") + doc.text = text + assert.strictEqual(doc.text.join(""), "ini") + assert.strictEqual(doc.text.toString(), "ini") + }) + assert.strictEqual(s1.text.toString(), "ini") + assert.strictEqual(s1.text.join(""), "ini") + }) + + it("should allow post-assignment modification of the initial value", () => { + let s1 = Automerge.change(Automerge.init(), doc => { + const text = new Automerge.Text("init") + doc.text = text + doc.text.deleteAt(0) + doc.text.insertAt(0, "I") + assert.strictEqual(doc.text.join(""), "Init") + assert.strictEqual(doc.text.toString(), "Init") + }) + assert.strictEqual(s1.text.join(""), "Init") + assert.strictEqual(s1.text.toString(), "Init") + }) + }) + + describe("non-textual control characters", () => { + let s1: Automerge.Doc + beforeEach(() => { + s1 = Automerge.change(Automerge.init(), doc => { + doc.text = new Automerge.Text() + doc.text.insertAt(0, "a") + doc.text.insertAt(1, { attribute: "bold" }) + }) + }) + + it("should allow fetching non-textual characters", () => { + assert.deepEqual(s1.text.get(1), { attribute: "bold" }) + //assert.strictEqual(s1.text.getElemId(1), `3@${Automerge.getActorId(s1)}`) + }) + + it("should include control characters in string length", () => { + assert.strictEqual(s1.text.length, 2) + assert.strictEqual(s1.text.get(0), "a") + }) + + it("should replace control characters from toString()", () => { + assert.strictEqual(s1.text.toString(), "a\uFFFC") + }) + + it("should allow control characters to be updated", () => { + const s2 = Automerge.change( + s1, + doc => (doc.text.get(1)!.attribute = "italic") + ) + const s3 = Automerge.load(Automerge.save(s2)) + assert.strictEqual(s1.text.get(1).attribute, "bold") + assert.strictEqual(s2.text.get(1).attribute, "italic") + assert.strictEqual(s3.text.get(1).attribute, "italic") + }) + + describe("spans interface to Text", () => { + it("should return a simple string as a single span", () => { + let s1 = Automerge.change(Automerge.init(), doc => { + doc.text = new Automerge.Text("hello world") + }) + assert.deepEqual(s1.text.toSpans(), ["hello world"]) + }) + it("should return an empty string as an empty array", () => { + let s1 = Automerge.change(Automerge.init(), doc => { + doc.text = new Automerge.Text() + }) + assert.deepEqual(s1.text.toSpans(), []) + }) + it("should split a span at a control character", () => { + let s1 = Automerge.change(Automerge.init(), doc => { + doc.text = new Automerge.Text("hello world") + doc.text.insertAt(5, { attributes: { bold: true } }) + }) + assert.deepEqual(s1.text.toSpans(), [ + "hello", + { attributes: { bold: true } }, + " world", + ]) + }) + it("should allow consecutive control characters", () => { + let s1 = Automerge.change(Automerge.init(), doc => { + doc.text = new Automerge.Text("hello world") + doc.text.insertAt(5, { attributes: { bold: true } }) + doc.text.insertAt(6, { attributes: { italic: true } }) + }) + assert.deepEqual(s1.text.toSpans(), [ + "hello", + { attributes: { bold: true } }, + { attributes: { italic: true } }, + " world", + ]) + }) + it("should allow non-consecutive control characters", () => { + let s1 = Automerge.change(Automerge.init(), doc => { + doc.text = new Automerge.Text("hello world") + doc.text.insertAt(5, { attributes: { bold: true } }) + doc.text.insertAt(12, { attributes: { italic: true } }) + }) + assert.deepEqual(s1.text.toSpans(), [ + "hello", + { attributes: { bold: true } }, + " world", + { attributes: { italic: true } }, + ]) + }) + }) + }) + + it("should support unicode when creating text", () => { + s1 = Automerge.from({ + text: new Automerge.Text("🐦"), + }) + assert.strictEqual(s1.text.get(0), "🐦") + }) +}) From d1220b9dd08e0a9e4206634ffb4956634453c26b Mon Sep 17 00:00:00 2001 From: Alex Good Date: Tue, 10 Jan 2023 11:25:06 +0000 Subject: [PATCH 678/730] javascript: Use glob to list files in package.json We have been listing all the files to be included in the distributed package in package.json:files. This is tedious and error prone. We change to using globs instead, to do this without also including the test and src files when outputting declarations we add a new typescript config file for the declaration generation which excludes tests. --- javascript/config/declonly.json | 8 ++++++++ javascript/package.json | 30 +++++------------------------- 2 files changed, 13 insertions(+), 25 deletions(-) create mode 100644 javascript/config/declonly.json diff --git a/javascript/config/declonly.json b/javascript/config/declonly.json new file mode 100644 index 00000000..df615930 --- /dev/null +++ b/javascript/config/declonly.json @@ -0,0 +1,8 @@ +{ + "extends": "../tsconfig.json", + "exclude": ["../dist/**/*", "../node_modules", "../test/**/*"], + "emitDeclarationOnly": true, + "compilerOptions": { + "outDir": "../dist" + } +} diff --git a/javascript/package.json b/javascript/package.json index 33523370..a7412c70 100644 --- a/javascript/package.json +++ b/javascript/package.json @@ -12,30 +12,10 @@ "README.md", "LICENSE", "package.json", - "index.d.ts", - "dist/*.d.ts", - "dist/cjs/constants.js", - "dist/cjs/types.js", - "dist/cjs/numbers.js", - "dist/cjs/index.js", - "dist/cjs/uuid.js", - "dist/cjs/counter.js", - "dist/cjs/low_level.js", - "dist/cjs/next.js", - "dist/cjs/text.js", - "dist/cjs/proxies.js", - "dist/cjs/raw_string.js", - "dist/mjs/constants.js", - "dist/mjs/types.js", - "dist/mjs/numbers.js", - "dist/mjs/next.js", - "dist/mjs/index.js", - "dist/mjs/uuid.js", - "dist/mjs/counter.js", - "dist/mjs/low_level.js", - "dist/mjs/text.js", - "dist/mjs/proxies.js", - "dist/mjs/raw_string.js" + "dist/index.d.ts", + "dist/cjs/**/*.js", + "dist/mjs/**/*.js", + "dist/*.d.ts" ], "types": "./dist/index.d.ts", "module": "./dist/mjs/index.js", @@ -43,7 +23,7 @@ "license": "MIT", "scripts": { "lint": "eslint src", - "build": "tsc -p config/mjs.json && tsc -p config/cjs.json && tsc --emitDeclarationOnly", + "build": "tsc -p config/mjs.json && tsc -p config/cjs.json && tsc -p config/declonly.json --emitDeclarationOnly", "test": "ts-mocha test/*.ts", "watch-docs": "typedoc src/index.ts --watch --readme typedoc-readme.md" }, From 0e7fb6cc10c0fac0aaa4dc799f05b9aed6c17f31 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Tue, 10 Jan 2023 11:49:16 +0000 Subject: [PATCH 679/730] javascript: Add @packageDocumentation TSDoc Instead of using the `--readme` argument to `typedoc` use the `@packageDocumentation` TSDoc tag to include the readme text in the typedoc output. --- javascript/.prettierignore | 1 + javascript/package.json | 2 +- javascript/src/index.ts | 239 +++++++++++++++++++++++++++++++++++ javascript/src/unstable.ts | 2 +- javascript/typedoc-readme.md | 226 --------------------------------- 5 files changed, 242 insertions(+), 228 deletions(-) delete mode 100644 javascript/typedoc-readme.md diff --git a/javascript/.prettierignore b/javascript/.prettierignore index 8116ea24..c2dcd4bb 100644 --- a/javascript/.prettierignore +++ b/javascript/.prettierignore @@ -1,2 +1,3 @@ e2e/verdacciodb dist +docs diff --git a/javascript/package.json b/javascript/package.json index a7412c70..a424de48 100644 --- a/javascript/package.json +++ b/javascript/package.json @@ -25,7 +25,7 @@ "lint": "eslint src", "build": "tsc -p config/mjs.json && tsc -p config/cjs.json && tsc -p config/declonly.json --emitDeclarationOnly", "test": "ts-mocha test/*.ts", - "watch-docs": "typedoc src/index.ts --watch --readme typedoc-readme.md" + "watch-docs": "typedoc src/index.ts --watch --readme none" }, "devDependencies": { "@types/expect": "^24.3.0", diff --git a/javascript/src/index.ts b/javascript/src/index.ts index 7d4a68ba..bf84c68d 100644 --- a/javascript/src/index.ts +++ b/javascript/src/index.ts @@ -1,3 +1,242 @@ +/** + * # Automerge + * + * This library provides the core automerge data structure and sync algorithms. + * Other libraries can be built on top of this one which provide IO and + * persistence. + * + * An automerge document can be though of an immutable POJO (plain old javascript + * object) which `automerge` tracks the history of, allowing it to be merged with + * any other automerge document. + * + * ## Creating and modifying a document + * + * You can create a document with {@link init} or {@link from} and then make + * changes to it with {@link change}, you can merge two documents with {@link + * merge}. + * + * ```ts + * import * as automerge from "@automerge/automerge" + * + * type DocType = {ideas: Array} + * + * let doc1 = automerge.init() + * doc1 = automerge.change(doc1, d => { + * d.ideas = [new automerge.Text("an immutable document")] + * }) + * + * let doc2 = automerge.init() + * doc2 = automerge.merge(doc2, automerge.clone(doc1)) + * doc2 = automerge.change(doc2, d => { + * d.ideas.push(new automerge.Text("which records it's history")) + * }) + * + * // Note the `automerge.clone` call, see the "cloning" section of this readme for + * // more detail + * doc1 = automerge.merge(doc1, automerge.clone(doc2)) + * doc1 = automerge.change(doc1, d => { + * d.ideas[0].deleteAt(13, 8) + * d.ideas[0].insertAt(13, "object") + * }) + * + * let doc3 = automerge.merge(doc1, doc2) + * // doc3 is now {ideas: ["an immutable object", "which records it's history"]} + * ``` + * + * ## Applying changes from another document + * + * You can get a representation of the result of the last {@link change} you made + * to a document with {@link getLastLocalChange} and you can apply that change to + * another document using {@link applyChanges}. + * + * If you need to get just the changes which are in one document but not in another + * you can use {@link getHeads} to get the heads of the document without the + * changes and then {@link getMissingDeps}, passing the result of {@link getHeads} + * on the document with the changes. + * + * ## Saving and loading documents + * + * You can {@link save} a document to generate a compresed binary representation of + * the document which can be loaded with {@link load}. If you have a document which + * you have recently made changes to you can generate recent changes with {@link + * saveIncremental}, this will generate all the changes since you last called + * `saveIncremental`, the changes generated can be applied to another document with + * {@link loadIncremental}. + * + * ## Viewing different versions of a document + * + * Occasionally you may wish to explicitly step to a different point in a document + * history. One common reason to do this is if you need to obtain a set of changes + * which take the document from one state to another in order to send those changes + * to another peer (or to save them somewhere). You can use {@link view} to do this. + * + * ```ts + * import * as automerge from "@automerge/automerge" + * import * as assert from "assert" + * + * let doc = automerge.from({ + * key1: "value1", + * }) + * + * // Make a clone of the document at this point, maybe this is actually on another + * // peer. + * let doc2 = automerge.clone < any > doc + * + * let heads = automerge.getHeads(doc) + * + * doc = + * automerge.change < + * any > + * (doc, + * d => { + * d.key2 = "value2" + * }) + * + * doc = + * automerge.change < + * any > + * (doc, + * d => { + * d.key3 = "value3" + * }) + * + * // At this point we've generated two separate changes, now we want to send + * // just those changes to someone else + * + * // view is a cheap reference based copy of a document at a given set of heads + * let before = automerge.view(doc, heads) + * + * // This view doesn't show the last two changes in the document state + * assert.deepEqual(before, { + * key1: "value1", + * }) + * + * // Get the changes to send to doc2 + * let changes = automerge.getChanges(before, doc) + * + * // Apply the changes at doc2 + * doc2 = automerge.applyChanges < any > (doc2, changes)[0] + * assert.deepEqual(doc2, { + * key1: "value1", + * key2: "value2", + * key3: "value3", + * }) + * ``` + * + * If you have a {@link view} of a document which you want to make changes to you + * can {@link clone} the viewed document. + * + * ## Syncing + * + * The sync protocol is stateful. This means that we start by creating a {@link + * SyncState} for each peer we are communicating with using {@link initSyncState}. + * Then we generate a message to send to the peer by calling {@link + * generateSyncMessage}. When we receive a message from the peer we call {@link + * receiveSyncMessage}. Here's a simple example of a loop which just keeps two + * peers in sync. + * + * ```ts + * let sync1 = automerge.initSyncState() + * let msg: Uint8Array | null + * ;[sync1, msg] = automerge.generateSyncMessage(doc1, sync1) + * + * while (true) { + * if (msg != null) { + * network.send(msg) + * } + * let resp: Uint8Array = + * (network.receive()[(doc1, sync1, _ignore)] = + * automerge.receiveSyncMessage(doc1, sync1, resp)[(sync1, msg)] = + * automerge.generateSyncMessage(doc1, sync1)) + * } + * ``` + * + * ## Conflicts + * + * The only time conflicts occur in automerge documents is in concurrent + * assignments to the same key in an object. In this case automerge + * deterministically chooses an arbitrary value to present to the application but + * you can examine the conflicts using {@link getConflicts}. + * + * ``` + * import * as automerge from "@automerge/automerge" + * + * type Profile = { + * pets: Array<{name: string, type: string}> + * } + * + * let doc1 = automerge.init("aaaa") + * doc1 = automerge.change(doc1, d => { + * d.pets = [{name: "Lassie", type: "dog"}] + * }) + * let doc2 = automerge.init("bbbb") + * doc2 = automerge.merge(doc2, automerge.clone(doc1)) + * + * doc2 = automerge.change(doc2, d => { + * d.pets[0].name = "Beethoven" + * }) + * + * doc1 = automerge.change(doc1, d => { + * d.pets[0].name = "Babe" + * }) + * + * const doc3 = automerge.merge(doc1, doc2) + * + * // Note that here we pass `doc3.pets`, not `doc3` + * let conflicts = automerge.getConflicts(doc3.pets[0], "name") + * + * // The two conflicting values are the keys of the conflicts object + * assert.deepEqual(Object.values(conflicts), ["Babe", Beethoven"]) + * ``` + * + * ## Actor IDs + * + * By default automerge will generate a random actor ID for you, but most methods + * for creating a document allow you to set the actor ID. You can get the actor ID + * associated with the document by calling {@link getActorId}. Actor IDs must not + * be used in concurrent threads of executiong - all changes by a given actor ID + * are expected to be sequential. + * + * ## Listening to patches + * + * Sometimes you want to respond to changes made to an automerge document. In this + * case you can use the {@link PatchCallback} type to receive notifications when + * changes have been made. + * + * ## Cloning + * + * Currently you cannot make mutating changes (i.e. call {@link change}) to a + * document which you have two pointers to. For example, in this code: + * + * ```javascript + * let doc1 = automerge.init() + * let doc2 = automerge.change(doc1, d => (d.key = "value")) + * ``` + * + * `doc1` and `doc2` are both pointers to the same state. Any attempt to call + * mutating methods on `doc1` will now result in an error like + * + * Attempting to change an out of date document + * + * If you encounter this you need to clone the original document, the above sample + * would work as: + * + * ```javascript + * let doc1 = automerge.init() + * let doc2 = automerge.change(automerge.clone(doc1), d => (d.key = "value")) + * ``` + * @packageDocumentation + * + * ## The {@link unstable} module + * + * We are working on some changes to automerge which are not yet complete and + * will result in backwards incompatible API changes. Once these changes are + * ready for production use we will release a new major version of automerge. + * However, until that point you can use the {@link unstable} module to try out + * the new features, documents from the {@link unstable} module are + * interoperable with documents from the main module. Please see the docs for + * the {@link unstable} module for more details. + */ export * from "./stable" import * as unstable from "./unstable" export { unstable } diff --git a/javascript/src/unstable.ts b/javascript/src/unstable.ts index 8f25586c..3ee18dbc 100644 --- a/javascript/src/unstable.ts +++ b/javascript/src/unstable.ts @@ -5,7 +5,7 @@ * ready for a stable release and/or which will result in backwards incompatible * API changes. The API of this module may change in arbitrary ways between * point releases - we will always document what these changes are in the - * CHANGELOG below, but only depend on this module if you are prepared to deal + * [CHANGELOG](#changelog) below, but only depend on this module if you are prepared to deal * with frequent changes. * * ## Differences from stable diff --git a/javascript/typedoc-readme.md b/javascript/typedoc-readme.md deleted file mode 100644 index 258b9e20..00000000 --- a/javascript/typedoc-readme.md +++ /dev/null @@ -1,226 +0,0 @@ -# Automerge - -This library provides the core automerge data structure and sync algorithms. -Other libraries can be built on top of this one which provide IO and -persistence. - -An automerge document can be though of an immutable POJO (plain old javascript -object) which `automerge` tracks the history of, allowing it to be merged with -any other automerge document. - -## Creating and modifying a document - -You can create a document with {@link init} or {@link from} and then make -changes to it with {@link change}, you can merge two documents with {@link -merge}. - -```javascript -import * as automerge from "@automerge/automerge" - -type DocType = {ideas: Array} - -let doc1 = automerge.init() -doc1 = automerge.change(doc1, d => { - d.ideas = [new automerge.Text("an immutable document")] -}) - -let doc2 = automerge.init() -doc2 = automerge.merge(doc2, automerge.clone(doc1)) -doc2 = automerge.change(doc2, d => { - d.ideas.push(new automerge.Text("which records it's history")) -}) - -// Note the `automerge.clone` call, see the "cloning" section of this readme for -// more detail -doc1 = automerge.merge(doc1, automerge.clone(doc2)) -doc1 = automerge.change(doc1, d => { - d.ideas[0].deleteAt(13, 8) - d.ideas[0].insertAt(13, "object") -}) - -let doc3 = automerge.merge(doc1, doc2) -// doc3 is now {ideas: ["an immutable object", "which records it's history"]} -``` - -## Applying changes from another document - -You can get a representation of the result of the last {@link change} you made -to a document with {@link getLastLocalChange} and you can apply that change to -another document using {@link applyChanges}. - -If you need to get just the changes which are in one document but not in another -you can use {@link getHeads} to get the heads of the document without the -changes and then {@link getMissingDeps}, passing the result of {@link getHeads} -on the document with the changes. - -## Saving and loading documents - -You can {@link save} a document to generate a compresed binary representation of -the document which can be loaded with {@link load}. If you have a document which -you have recently made changes to you can generate recent changes with {@link -saveIncremental}, this will generate all the changes since you last called -`saveIncremental`, the changes generated can be applied to another document with -{@link loadIncremental}. - -## Viewing different versions of a document - -Occasionally you may wish to explicitly step to a different point in a document -history. One common reason to do this is if you need to obtain a set of changes -which take the document from one state to another in order to send those changes -to another peer (or to save them somewhere). You can use {@link view} to do this. - -```javascript -import * as automerge from "@automerge/automerge" -import * as assert from "assert" - -let doc = automerge.from({ - key1: "value1", -}) - -// Make a clone of the document at this point, maybe this is actually on another -// peer. -let doc2 = automerge.clone < any > doc - -let heads = automerge.getHeads(doc) - -doc = - automerge.change < - any > - (doc, - d => { - d.key2 = "value2" - }) - -doc = - automerge.change < - any > - (doc, - d => { - d.key3 = "value3" - }) - -// At this point we've generated two separate changes, now we want to send -// just those changes to someone else - -// view is a cheap reference based copy of a document at a given set of heads -let before = automerge.view(doc, heads) - -// This view doesn't show the last two changes in the document state -assert.deepEqual(before, { - key1: "value1", -}) - -// Get the changes to send to doc2 -let changes = automerge.getChanges(before, doc) - -// Apply the changes at doc2 -doc2 = automerge.applyChanges < any > (doc2, changes)[0] -assert.deepEqual(doc2, { - key1: "value1", - key2: "value2", - key3: "value3", -}) -``` - -If you have a {@link view} of a document which you want to make changes to you -can {@link clone} the viewed document. - -## Syncing - -The sync protocol is stateful. This means that we start by creating a {@link -SyncState} for each peer we are communicating with using {@link initSyncState}. -Then we generate a message to send to the peer by calling {@link -generateSyncMessage}. When we receive a message from the peer we call {@link -receiveSyncMessage}. Here's a simple example of a loop which just keeps two -peers in sync. - -```javascript -let sync1 = automerge.initSyncState() -let msg: Uint8Array | null -;[sync1, msg] = automerge.generateSyncMessage(doc1, sync1) - -while (true) { - if (msg != null) { - network.send(msg) - } - let resp: Uint8Array = - (network.receive()[(doc1, sync1, _ignore)] = - automerge.receiveSyncMessage(doc1, sync1, resp)[(sync1, msg)] = - automerge.generateSyncMessage(doc1, sync1)) -} -``` - -## Conflicts - -The only time conflicts occur in automerge documents is in concurrent -assignments to the same key in an object. In this case automerge -deterministically chooses an arbitrary value to present to the application but -you can examine the conflicts using {@link getConflicts}. - -``` -import * as automerge from "@automerge/automerge" - -type Profile = { - pets: Array<{name: string, type: string}> -} - -let doc1 = automerge.init("aaaa") -doc1 = automerge.change(doc1, d => { - d.pets = [{name: "Lassie", type: "dog"}] -}) -let doc2 = automerge.init("bbbb") -doc2 = automerge.merge(doc2, automerge.clone(doc1)) - -doc2 = automerge.change(doc2, d => { - d.pets[0].name = "Beethoven" -}) - -doc1 = automerge.change(doc1, d => { - d.pets[0].name = "Babe" -}) - -const doc3 = automerge.merge(doc1, doc2) - -// Note that here we pass `doc3.pets`, not `doc3` -let conflicts = automerge.getConflicts(doc3.pets[0], "name") - -// The two conflicting values are the keys of the conflicts object -assert.deepEqual(Object.values(conflicts), ["Babe", Beethoven"]) -``` - -## Actor IDs - -By default automerge will generate a random actor ID for you, but most methods -for creating a document allow you to set the actor ID. You can get the actor ID -associated with the document by calling {@link getActorId}. Actor IDs must not -be used in concurrent threads of executiong - all changes by a given actor ID -are expected to be sequential. - -## Listening to patches - -Sometimes you want to respond to changes made to an automerge document. In this -case you can use the {@link PatchCallback} type to receive notifications when -changes have been made. - -## Cloning - -Currently you cannot make mutating changes (i.e. call {@link change}) to a -document which you have two pointers to. For example, in this code: - -```javascript -let doc1 = automerge.init() -let doc2 = automerge.change(doc1, d => (d.key = "value")) -``` - -`doc1` and `doc2` are both pointers to the same state. Any attempt to call -mutating methods on `doc1` will now result in an error like - - Attempting to change an out of date document - -If you encounter this you need to clone the original document, the above sample -would work as: - -```javascript -let doc1 = automerge.init() -let doc2 = automerge.change(automerge.clone(doc1), d => (d.key = "value")) -``` From 9c3d0976c8b9d740184b291b96fedb27fddcb783 Mon Sep 17 00:00:00 2001 From: Alex Currie-Clark Date: Wed, 11 Jan 2023 16:00:03 +0000 Subject: [PATCH 680/730] Add workflow to generate a deno.land and npm release when pushing a new `automerge-wasm` version to #main --- .github/workflows/release.yaml | 96 ++++++++++++++++++++++++++++++++++ 1 file changed, 96 insertions(+) create mode 100644 .github/workflows/release.yaml diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml new file mode 100644 index 00000000..9bc2a72b --- /dev/null +++ b/.github/workflows/release.yaml @@ -0,0 +1,96 @@ +name: Release +on: + push: + branches: + - main + +jobs: + check_if_wasm_version_upgraded: + name: Check if WASM version has been upgraded + runs-on: ubuntu-latest + outputs: + wasm_version: ${{ steps.version-updated.outputs.current-package-version }} + wasm_has_updated: ${{ steps.version-updated.outputs.has-updated }} + steps: + - uses: JiPaix/package-json-updated-action@v1.0.3 + id: version-updated + with: + path: rust/automerge-wasm/package.json + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + INPUT_PATH: ${{ github.workspace }}/rust/automerge-wasm/package.json + publish-wasm: + runs-on: ubuntu-latest + needs: + - check_if_wasm_version_upgraded + # We create release only if the version in the package.json has been upgraded + if: needs.check_if_wasm_version_upgraded.outputs.wasm_has_updated + steps: + - uses: denoland/setup-deno@v1 + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + ref: ${{ github.ref }} + - name: Get rid of local github workflows + run: rm -r .github/workflows + - name: Remove tmp_branch if it exists + run: git push origin :tmp_branch || true + - run: git checkout -b tmp_branch + - name: Install wasm-bindgen-cli + run: cargo install wasm-bindgen-cli wasm-opt + - name: Install wasm32 target + run: rustup target add wasm32-unknown-unknown + - name: run wasm js tests + id: wasm_js_tests + run: ./scripts/ci/wasm_tests + - name: run wasm deno tests + id: wasm_deno_tests + run: ./scripts/ci/deno_tests + - name: Collate deno release files + if: steps.wasm_js_tests.outcome == 'success' && steps.wasm_deno_tests.outcome == 'success' + run: | + mkdir $GITHUB_WORKSPACE/deno_wasm_dist + cp $GITHUB_WORKSPACE/rust/automerge-wasm/deno/* $GITHUB_WORKSPACE/deno_wasm_dist + cp $GITHUB_WORKSPACE/rust/automerge-wasm/index.d.ts $GITHUB_WORKSPACE/deno_wasm_dist + sed -i '1i /// ' $GITHUB_WORKSPACE/deno_wasm_dist/automerge_wasm.js + - name: Create npm release + if: steps.wasm_js_tests.outcome == 'success' && steps.wasm_deno_tests.outcome == 'success' + run: | + if [ "$(npm --prefix $GITHUB_WORKSPACE/rust/automerge-wasm show . version)" = "$VERSION" ]; then + echo "This version is already published" + exit 0 + fi + EXTRA_ARGS="--access public" + if [[ $VERSION == *"alpha."* ]] || [[ $VERSION == *"beta."* ]] || [[ $VERSION == *"rc."* ]]; then + echo "Is pre-release version" + EXTRA_ARGS="$EXTRA_ARGS --tag next" + fi + if [ "$NODE_AUTH_TOKEN" = "" ]; then + echo "Can't publish on NPM, You need a NPM_TOKEN secret." + false + fi + npm --prefix $GITHUB_WORKSPACE/rust/automerge-wasm publish $EXTRA_ARGS + env: + NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}} + VERSION: ${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }} + - name: Commit wasm deno release files + run: | + git config --global user.name "actions" + git config --global user.email actions@github.com + git add $GITHUB_WORKSPACE/deno_wasm_dist + git commit -am "Add deno release files" + git push origin tmp_branch + - name: Tag wasm release + if: steps.wasm_js_tests.outcome == 'success' && steps.wasm_deno_tests.outcome == 'success' + uses: softprops/action-gh-release@v1 + with: + name: Automerge Wasm v${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }} + tag_name: js/automerge-wasm-${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }} + target_commitish: tmp_branch + generate_release_notes: false + draft: false + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Remove tmp_branch + run: git push origin :tmp_branch + From 93a257896eecfe683541a483a5b4d1122ce63a76 Mon Sep 17 00:00:00 2001 From: Alex Currie-Clark Date: Wed, 11 Jan 2023 20:08:45 +0000 Subject: [PATCH 681/730] Release action: Fix for check that WASM version has been updated before publishing --- .github/workflows/release.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 9bc2a72b..cd405b03 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -12,19 +12,19 @@ jobs: wasm_version: ${{ steps.version-updated.outputs.current-package-version }} wasm_has_updated: ${{ steps.version-updated.outputs.has-updated }} steps: - - uses: JiPaix/package-json-updated-action@v1.0.3 + - uses: JiPaix/package-json-updated-action@v1.0.5 id: version-updated with: path: rust/automerge-wasm/package.json env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - INPUT_PATH: ${{ github.workspace }}/rust/automerge-wasm/package.json publish-wasm: + name: Publish WASM package runs-on: ubuntu-latest needs: - check_if_wasm_version_upgraded # We create release only if the version in the package.json has been upgraded - if: needs.check_if_wasm_version_upgraded.outputs.wasm_has_updated + if: needs.check_if_wasm_version_upgraded.outputs.wasm_has_updated == 'true' steps: - uses: denoland/setup-deno@v1 - uses: actions/checkout@v3 From a0d698dc8e00a4f3b7925c90b7dd35f65277d398 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Thu, 12 Jan 2023 09:55:12 +0000 Subject: [PATCH 682/730] Version bump js and wasm js: 2.0.1-alpha.3 wasm: 0.1.20 --- javascript/package.json | 4 ++-- rust/automerge-wasm/package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/javascript/package.json b/javascript/package.json index a424de48..5e2efbda 100644 --- a/javascript/package.json +++ b/javascript/package.json @@ -4,7 +4,7 @@ "Orion Henry ", "Martin Kleppmann" ], - "version": "2.0.1-alpha.2", + "version": "2.0.1-alpha.3", "description": "Javascript implementation of automerge, backed by @automerge/automerge-wasm", "homepage": "https://github.com/automerge/automerge-rs/tree/main/wrappers/javascript", "repository": "github:automerge/automerge-rs", @@ -44,7 +44,7 @@ "typescript": "^4.9.4" }, "dependencies": { - "@automerge/automerge-wasm": "0.1.19", + "@automerge/automerge-wasm": "0.1.20", "uuid": "^9.0.0" } } diff --git a/rust/automerge-wasm/package.json b/rust/automerge-wasm/package.json index 7c02d820..47dd7f32 100644 --- a/rust/automerge-wasm/package.json +++ b/rust/automerge-wasm/package.json @@ -8,7 +8,7 @@ "description": "wasm-bindgen bindings to the automerge rust implementation", "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-wasm", "repository": "github:automerge/automerge-rs", - "version": "0.1.19", + "version": "0.1.20", "license": "MIT", "files": [ "README.md", From d12bd3bb06b683a39dbe110ac2c3d1cb9df7662f Mon Sep 17 00:00:00 2001 From: Alex Good Date: Thu, 12 Jan 2023 10:27:03 +0000 Subject: [PATCH 683/730] correctly call npm publish in release action --- .github/workflows/release.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index cd405b03..282bd8a6 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -69,7 +69,7 @@ jobs: echo "Can't publish on NPM, You need a NPM_TOKEN secret." false fi - npm --prefix $GITHUB_WORKSPACE/rust/automerge-wasm publish $EXTRA_ARGS + npm publish $GITHUB_WORKSPACE/rust/automerge-wasm $EXTRA_ARGS env: NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}} VERSION: ${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }} From 3ef60747f458f870801cd1a15108588011db3726 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Thu, 12 Jan 2023 10:37:11 +0000 Subject: [PATCH 684/730] Roll back automerge-wasm to test release action The release action we are working conditionally executes based on the version of `automerge-wasm` in the previous commit. We need to trigger it even though the version has not changed so we roll back the version in this commit and the commit immediately following this will bump it again. --- rust/automerge-wasm/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rust/automerge-wasm/package.json b/rust/automerge-wasm/package.json index 47dd7f32..7c02d820 100644 --- a/rust/automerge-wasm/package.json +++ b/rust/automerge-wasm/package.json @@ -8,7 +8,7 @@ "description": "wasm-bindgen bindings to the automerge rust implementation", "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-wasm", "repository": "github:automerge/automerge-rs", - "version": "0.1.20", + "version": "0.1.19", "license": "MIT", "files": [ "README.md", From 5c02445bee66e1ce3cc981920902b851fe1bb668 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Thu, 12 Jan 2023 10:39:11 +0000 Subject: [PATCH 685/730] Bump automerge-wasm, again In order to re-trigger the release action we are testing we bump the version which was de-bumped in the last commit. --- rust/automerge-wasm/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rust/automerge-wasm/package.json b/rust/automerge-wasm/package.json index 7c02d820..47dd7f32 100644 --- a/rust/automerge-wasm/package.json +++ b/rust/automerge-wasm/package.json @@ -8,7 +8,7 @@ "description": "wasm-bindgen bindings to the automerge rust implementation", "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-wasm", "repository": "github:automerge/automerge-rs", - "version": "0.1.19", + "version": "0.1.20", "license": "MIT", "files": [ "README.md", From f073dbf70142cb17ed1369e2046350fbdcdb1302 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Thu, 12 Jan 2023 11:04:22 +0000 Subject: [PATCH 686/730] use setup-node prior to attempting to publish in release action --- .github/workflows/release.yaml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 282bd8a6..530f07c7 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -24,8 +24,12 @@ jobs: needs: - check_if_wasm_version_upgraded # We create release only if the version in the package.json has been upgraded - if: needs.check_if_wasm_version_upgraded.outputs.wasm_has_updated == 'true' + #if: needs.check_if_wasm_version_upgraded.outputs.wasm_has_updated == 'true' steps: + - uses: actions/setup-node@v3 + with: + node-version: '16.x' + registry-url: 'https://registry.npmjs.org' - uses: denoland/setup-deno@v1 - uses: actions/checkout@v3 with: From 2d8df125224a251da729efb149dda7f8bb255d26 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Thu, 12 Jan 2023 11:35:48 +0000 Subject: [PATCH 687/730] re-enable version check for WASM release --- .github/workflows/release.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 530f07c7..15495233 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -24,7 +24,7 @@ jobs: needs: - check_if_wasm_version_upgraded # We create release only if the version in the package.json has been upgraded - #if: needs.check_if_wasm_version_upgraded.outputs.wasm_has_updated == 'true' + if: needs.check_if_wasm_version_upgraded.outputs.wasm_has_updated == 'true' steps: - uses: actions/setup-node@v3 with: From 22e9915fac632adb213e4675c6169953167d3349 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Thu, 12 Jan 2023 12:32:53 +0000 Subject: [PATCH 688/730] automerge-wasm: publish release build in Github Action --- .github/workflows/release.yaml | 6 ++++++ javascript/package.json | 2 +- rust/automerge-wasm/package.json | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 15495233..b3c0aed1 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -50,12 +50,18 @@ jobs: - name: run wasm deno tests id: wasm_deno_tests run: ./scripts/ci/deno_tests + - name: build release + id: build_release + run: | + npm --prefix $GITHUB_WORKSPACE/rust/automerge-wasm run release - name: Collate deno release files if: steps.wasm_js_tests.outcome == 'success' && steps.wasm_deno_tests.outcome == 'success' run: | mkdir $GITHUB_WORKSPACE/deno_wasm_dist cp $GITHUB_WORKSPACE/rust/automerge-wasm/deno/* $GITHUB_WORKSPACE/deno_wasm_dist cp $GITHUB_WORKSPACE/rust/automerge-wasm/index.d.ts $GITHUB_WORKSPACE/deno_wasm_dist + cp $GITHUB_WORKSPACE/rust/automerge-wasm/README.md $GITHUB_WORKSPACE/deno_wasm_dist + cp $GITHUB_WORKSPACE/rust/automerge-wasm/LICENSE $GITHUB_WORKSPACE/deno_wasm_dist sed -i '1i /// ' $GITHUB_WORKSPACE/deno_wasm_dist/automerge_wasm.js - name: Create npm release if: steps.wasm_js_tests.outcome == 'success' && steps.wasm_deno_tests.outcome == 'success' diff --git a/javascript/package.json b/javascript/package.json index 5e2efbda..53cc6fdc 100644 --- a/javascript/package.json +++ b/javascript/package.json @@ -44,7 +44,7 @@ "typescript": "^4.9.4" }, "dependencies": { - "@automerge/automerge-wasm": "0.1.20", + "@automerge/automerge-wasm": "0.1.21", "uuid": "^9.0.0" } } diff --git a/rust/automerge-wasm/package.json b/rust/automerge-wasm/package.json index 47dd7f32..76167a3e 100644 --- a/rust/automerge-wasm/package.json +++ b/rust/automerge-wasm/package.json @@ -8,7 +8,7 @@ "description": "wasm-bindgen bindings to the automerge rust implementation", "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-wasm", "repository": "github:automerge/automerge-rs", - "version": "0.1.20", + "version": "0.1.21", "license": "MIT", "files": [ "README.md", From 681a3f1f3fd6161cb7733e07cdfe46d68b6967fe Mon Sep 17 00:00:00 2001 From: Alex Currie-Clark Date: Thu, 12 Jan 2023 07:04:40 +0000 Subject: [PATCH 689/730] Add github action to deploy deno package --- .github/workflows/release.yaml | 110 +++++++++++++++++++++++- javascript/.denoifyrc.json | 3 + javascript/.gitignore | 1 + javascript/config/cjs.json | 7 +- javascript/config/declonly.json | 7 +- javascript/config/mjs.json | 7 +- javascript/deno-tests/deno.ts | 10 +++ javascript/package.json | 5 +- javascript/scripts/deno-prefixer.mjs | 9 ++ javascript/scripts/denoify-replacer.mjs | 42 +++++++++ javascript/src/constants.ts | 2 +- javascript/src/counter.ts | 2 +- javascript/src/internal_state.ts | 4 +- javascript/src/low_level.ts | 20 ++--- javascript/src/numbers.ts | 2 +- javascript/src/proxies.ts | 9 +- javascript/src/stable.ts | 45 +++++----- javascript/src/text.ts | 8 +- javascript/src/unstable.ts | 12 ++- javascript/src/uuid.deno.ts | 26 ++++++ javascript/tsconfig.json | 2 +- scripts/ci/deno_tests | 13 ++- 22 files changed, 296 insertions(+), 50 deletions(-) create mode 100644 javascript/.denoifyrc.json create mode 100644 javascript/deno-tests/deno.ts create mode 100644 javascript/scripts/deno-prefixer.mjs create mode 100644 javascript/scripts/denoify-replacer.mjs create mode 100644 javascript/src/uuid.deno.ts diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index b3c0aed1..762671ff 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -103,4 +103,112 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Remove tmp_branch run: git push origin :tmp_branch - + check_if_js_version_upgraded: + name: Check if JS version has been upgraded + runs-on: ubuntu-latest + outputs: + js_version: ${{ steps.version-updated.outputs.current-package-version }} + js_has_updated: ${{ steps.version-updated.outputs.has-updated }} + steps: + - uses: JiPaix/package-json-updated-action@v1.0.5 + id: version-updated + with: + path: javascript/package.json + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + publish-js: + name: Publish JS package + runs-on: ubuntu-latest + needs: + - check_if_js_version_upgraded + - check_if_wasm_version_upgraded + - publish-wasm + # We create release only if the version in the package.json has been upgraded and after the WASM release + if: | + (always() && ! cancelled()) && + (needs.publish-wasm.result == 'success' || needs.publish-wasm.result == 'skipped') && + needs.check_if_js_version_upgraded.outputs.js_has_updated == 'true' + steps: + - uses: actions/setup-node@v3 + with: + node-version: '16.x' + registry-url: 'https://registry.npmjs.org' + - uses: denoland/setup-deno@v1 + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + ref: ${{ github.ref }} + - name: Get rid of local github workflows + run: rm -r .github/workflows + - name: Remove js_tmp_branch if it exists + run: git push origin :js_tmp_branch || true + - run: git checkout -b js_tmp_branch + - name: check js formatting + run: | + yarn global add prettier + prettier -c javascript/.prettierrc javascript + - name: run js tests + id: js_tests + run: | + cargo install wasm-bindgen-cli wasm-opt + rustup target add wasm32-unknown-unknown + ./scripts/ci/js_tests + - name: build js release + id: build_release + run: | + npm --prefix $GITHUB_WORKSPACE/javascript run build + - name: build js deno release + id: build_deno_release + run: | + VERSION=$WASM_VERSION npm --prefix $GITHUB_WORKSPACE/javascript run deno:build + env: + WASM_VERSION: ${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }} + - name: run deno tests + id: deno_tests + run: | + npm --prefix $GITHUB_WORKSPACE/javascript run deno:test + - name: Collate deno release files + if: steps.js_tests.outcome == 'success' && steps.deno_tests.outcome == 'success' + run: | + mkdir $GITHUB_WORKSPACE/deno_js_dist + cp $GITHUB_WORKSPACE/javascript/deno_dist/* $GITHUB_WORKSPACE/deno_js_dist + - name: Create npm release + if: steps.js_tests.outcome == 'success' && steps.deno_tests.outcome == 'success' + run: | + if [ "$(npm --prefix $GITHUB_WORKSPACE/javascript show . version)" = "$VERSION" ]; then + echo "This version is already published" + exit 0 + fi + EXTRA_ARGS="--access public" + if [[ $VERSION == *"alpha."* ]] || [[ $VERSION == *"beta."* ]] || [[ $VERSION == *"rc."* ]]; then + echo "Is pre-release version" + EXTRA_ARGS="$EXTRA_ARGS --tag next" + fi + if [ "$NODE_AUTH_TOKEN" = "" ]; then + echo "Can't publish on NPM, You need a NPM_TOKEN secret." + false + fi + npm publish $GITHUB_WORKSPACE/javascript $EXTRA_ARGS + env: + NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}} + VERSION: ${{ needs.check_if_js_version_upgraded.outputs.js_version }} + - name: Commit js deno release files + run: | + git config --global user.name "actions" + git config --global user.email actions@github.com + git add $GITHUB_WORKSPACE/deno_js_dist + git commit -am "Add deno js release files" + git push origin js_tmp_branch + - name: Tag JS release + if: steps.js_tests.outcome == 'success' && steps.deno_tests.outcome == 'success' + uses: softprops/action-gh-release@v1 + with: + name: Automerge v${{ needs.check_if_js_version_upgraded.outputs.js_version }} + tag_name: js/automerge-${{ needs.check_if_js_version_upgraded.outputs.js_version }} + target_commitish: js_tmp_branch + generate_release_notes: false + draft: false + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Remove js_tmp_branch + run: git push origin :js_tmp_branch diff --git a/javascript/.denoifyrc.json b/javascript/.denoifyrc.json new file mode 100644 index 00000000..9453a31f --- /dev/null +++ b/javascript/.denoifyrc.json @@ -0,0 +1,3 @@ +{ + "replacer": "scripts/denoify-replacer.mjs" +} diff --git a/javascript/.gitignore b/javascript/.gitignore index ab4ec70d..f98d9db2 100644 --- a/javascript/.gitignore +++ b/javascript/.gitignore @@ -3,3 +3,4 @@ dist docs/ .vim +deno_dist/ diff --git a/javascript/config/cjs.json b/javascript/config/cjs.json index fc500311..0b135067 100644 --- a/javascript/config/cjs.json +++ b/javascript/config/cjs.json @@ -1,6 +1,11 @@ { "extends": "../tsconfig.json", - "exclude": ["../dist/**/*", "../node_modules", "../test/**/*"], + "exclude": [ + "../dist/**/*", + "../node_modules", + "../test/**/*", + "../src/**/*.deno.ts" + ], "compilerOptions": { "outDir": "../dist/cjs" } diff --git a/javascript/config/declonly.json b/javascript/config/declonly.json index df615930..7c1df687 100644 --- a/javascript/config/declonly.json +++ b/javascript/config/declonly.json @@ -1,6 +1,11 @@ { "extends": "../tsconfig.json", - "exclude": ["../dist/**/*", "../node_modules", "../test/**/*"], + "exclude": [ + "../dist/**/*", + "../node_modules", + "../test/**/*", + "../src/**/*.deno.ts" + ], "emitDeclarationOnly": true, "compilerOptions": { "outDir": "../dist" diff --git a/javascript/config/mjs.json b/javascript/config/mjs.json index 2ee7a8b8..ecf3ce36 100644 --- a/javascript/config/mjs.json +++ b/javascript/config/mjs.json @@ -1,6 +1,11 @@ { "extends": "../tsconfig.json", - "exclude": ["../dist/**/*", "../node_modules", "../test/**/*"], + "exclude": [ + "../dist/**/*", + "../node_modules", + "../test/**/*", + "../src/**/*.deno.ts" + ], "compilerOptions": { "target": "es6", "module": "es6", diff --git a/javascript/deno-tests/deno.ts b/javascript/deno-tests/deno.ts new file mode 100644 index 00000000..fc0a4dad --- /dev/null +++ b/javascript/deno-tests/deno.ts @@ -0,0 +1,10 @@ +import * as Automerge from "../deno_dist/index.ts" + +Deno.test("It should create, clone and free", () => { + let doc1 = Automerge.init() + let doc2 = Automerge.clone(doc1) + + // this is only needed if weakrefs are not supported + Automerge.free(doc1) + Automerge.free(doc2) +}) diff --git a/javascript/package.json b/javascript/package.json index 53cc6fdc..39464fac 100644 --- a/javascript/package.json +++ b/javascript/package.json @@ -4,7 +4,7 @@ "Orion Henry ", "Martin Kleppmann" ], - "version": "2.0.1-alpha.3", + "version": "2.0.1-alpha.4", "description": "Javascript implementation of automerge, backed by @automerge/automerge-wasm", "homepage": "https://github.com/automerge/automerge-rs/tree/main/wrappers/javascript", "repository": "github:automerge/automerge-rs", @@ -25,6 +25,8 @@ "lint": "eslint src", "build": "tsc -p config/mjs.json && tsc -p config/cjs.json && tsc -p config/declonly.json --emitDeclarationOnly", "test": "ts-mocha test/*.ts", + "deno:build": "denoify && node ./scripts/deno-prefixer.mjs", + "deno:test": "deno test ./deno-tests/deno.ts --allow-read --allow-net", "watch-docs": "typedoc src/index.ts --watch --readme none" }, "devDependencies": { @@ -33,6 +35,7 @@ "@types/uuid": "^9.0.0", "@typescript-eslint/eslint-plugin": "^5.46.0", "@typescript-eslint/parser": "^5.46.0", + "denoify": "^1.4.5", "eslint": "^8.29.0", "fast-sha256": "^1.3.0", "mocha": "^10.2.0", diff --git a/javascript/scripts/deno-prefixer.mjs b/javascript/scripts/deno-prefixer.mjs new file mode 100644 index 00000000..28544102 --- /dev/null +++ b/javascript/scripts/deno-prefixer.mjs @@ -0,0 +1,9 @@ +import * as fs from "fs" + +const files = ["./deno_dist/proxies.ts"] +for (const filepath of files) { + const data = fs.readFileSync(filepath) + fs.writeFileSync(filepath, "// @ts-nocheck \n" + data) + + console.log('Prepended "// @ts-nocheck" to ' + filepath) +} diff --git a/javascript/scripts/denoify-replacer.mjs b/javascript/scripts/denoify-replacer.mjs new file mode 100644 index 00000000..fcf4bc45 --- /dev/null +++ b/javascript/scripts/denoify-replacer.mjs @@ -0,0 +1,42 @@ +// @denoify-ignore + +import { makeThisModuleAnExecutableReplacer } from "denoify" +// import { assert } from "tsafe"; +// import * as path from "path"; + +makeThisModuleAnExecutableReplacer( + async ({ parsedImportExportStatement, destDirPath, version }) => { + version = process.env.VERSION || version + + switch (parsedImportExportStatement.parsedArgument.nodeModuleName) { + case "@automerge/automerge-wasm": + { + const moduleRoot = + process.env.MODULE_ROOT || + `https://deno.land/x/automerge_wasm@${version}` + /* + *We expect not to run against statements like + *import(..).then(...) + *or + *export * from "..." + *in our code. + */ + if ( + !parsedImportExportStatement.isAsyncImport && + (parsedImportExportStatement.statementType === "import" || + parsedImportExportStatement.statementType === "export") + ) { + if (parsedImportExportStatement.isTypeOnly) { + return `${parsedImportExportStatement.statementType} type ${parsedImportExportStatement.target} from "${moduleRoot}/index.d.ts";` + } else { + return `${parsedImportExportStatement.statementType} ${parsedImportExportStatement.target} from "${moduleRoot}/automerge_wasm.js";` + } + } + } + break + } + + //The replacer should return undefined when we want to let denoify replace the statement + return undefined + } +) diff --git a/javascript/src/constants.ts b/javascript/src/constants.ts index d3bd8138..7b714772 100644 --- a/javascript/src/constants.ts +++ b/javascript/src/constants.ts @@ -2,7 +2,7 @@ export const STATE = Symbol.for("_am_meta") // symbol used to hide application metadata on automerge objects export const TRACE = Symbol.for("_am_trace") // used for debugging -export const OBJECT_ID = Symbol.for("_am_objectId") // synbol used to hide the object id on automerge objects +export const OBJECT_ID = Symbol.for("_am_objectId") // symbol used to hide the object id on automerge objects export const IS_PROXY = Symbol.for("_am_isProxy") // symbol used to test if the document is a proxy object export const UINT = Symbol.for("_am_uint") diff --git a/javascript/src/counter.ts b/javascript/src/counter.ts index 6b9ad277..873fa157 100644 --- a/javascript/src/counter.ts +++ b/javascript/src/counter.ts @@ -1,4 +1,4 @@ -import { Automerge, ObjID, Prop } from "@automerge/automerge-wasm" +import { Automerge, type ObjID, type Prop } from "@automerge/automerge-wasm" import { COUNTER } from "./constants" /** * The most basic CRDT: an integer value that can be changed only by diff --git a/javascript/src/internal_state.ts b/javascript/src/internal_state.ts index 92ab648e..f3da49b1 100644 --- a/javascript/src/internal_state.ts +++ b/javascript/src/internal_state.ts @@ -1,8 +1,8 @@ -import { ObjID, Heads, Automerge } from "@automerge/automerge-wasm" +import { type ObjID, type Heads, Automerge } from "@automerge/automerge-wasm" import { STATE, OBJECT_ID, TRACE, IS_PROXY } from "./constants" -import { type Doc, PatchCallback } from "./types" +import type { Doc, PatchCallback } from "./types" export interface InternalState { handle: Automerge diff --git a/javascript/src/low_level.ts b/javascript/src/low_level.ts index 94ac63db..63ef5546 100644 --- a/javascript/src/low_level.ts +++ b/javascript/src/low_level.ts @@ -1,20 +1,20 @@ import { + type API, Automerge, - Change, - DecodedChange, - Actor, + type Change, + type DecodedChange, + type Actor, SyncState, - SyncMessage, - JsSyncState, - DecodedSyncMessage, - ChangeToEncode, + type SyncMessage, + type JsSyncState, + type DecodedSyncMessage, + type ChangeToEncode, } from "@automerge/automerge-wasm" -export { ChangeToEncode } from "@automerge/automerge-wasm" -import { API } from "@automerge/automerge-wasm" +export type { ChangeToEncode } from "@automerge/automerge-wasm" export function UseApi(api: API) { for (const k in api) { - ApiHandler[k] = api[k] + ;(ApiHandler as any)[k] = (api as any)[k] } } diff --git a/javascript/src/numbers.ts b/javascript/src/numbers.ts index d52a36c5..7ad95998 100644 --- a/javascript/src/numbers.ts +++ b/javascript/src/numbers.ts @@ -1,4 +1,4 @@ -// Convience classes to allow users to stricly specify the number type they want +// Convenience classes to allow users to strictly specify the number type they want import { INT, UINT, F64 } from "./constants" diff --git a/javascript/src/proxies.ts b/javascript/src/proxies.ts index 3fb3a825..7a99cf80 100644 --- a/javascript/src/proxies.ts +++ b/javascript/src/proxies.ts @@ -1,7 +1,12 @@ import { Text } from "./text" -import { Automerge, Heads, ObjID } from "@automerge/automerge-wasm" -import { Prop } from "@automerge/automerge-wasm" import { + Automerge, + type Heads, + type ObjID, + type Prop, +} from "@automerge/automerge-wasm" + +import type { AutomergeValue, ScalarValue, MapValue, diff --git a/javascript/src/stable.ts b/javascript/src/stable.ts index c52d0a4c..1f38cb27 100644 --- a/javascript/src/stable.ts +++ b/javascript/src/stable.ts @@ -4,47 +4,50 @@ export { /** @hidden */ uuid } from "./uuid" import { rootProxy, listProxy, mapProxy, textProxy } from "./proxies" import { STATE } from "./constants" -import { AutomergeValue, Counter, Doc, PatchCallback } from "./types" -export { - AutomergeValue, +import { + type AutomergeValue, Counter, - Doc, + type Doc, + type PatchCallback, +} from "./types" +export { + type AutomergeValue, + Counter, + type Doc, Int, Uint, Float64, - Patch, - PatchCallback, - ScalarValue, + type Patch, + type PatchCallback, + type ScalarValue, Text, } from "./types" import { Text } from "./text" -import { type API } from "@automerge/automerge-wasm" -export { - PutPatch, - DelPatch, - SplicePatch, - IncPatch, - SyncMessage, -} from "@automerge/automerge-wasm" -import { ApiHandler, ChangeToEncode, UseApi } from "./low_level" - -import { +import type { + API, Actor as ActorId, Prop, ObjID, Change, DecodedChange, Heads, - Automerge, MaterializeValue, -} from "@automerge/automerge-wasm" -import { JsSyncState as SyncState, SyncMessage, DecodedSyncMessage, } from "@automerge/automerge-wasm" +export type { + PutPatch, + DelPatch, + SplicePatch, + IncPatch, + SyncMessage, +} from "@automerge/automerge-wasm" +import { ApiHandler, type ChangeToEncode, UseApi } from "./low_level" + +import { Automerge } from "@automerge/automerge-wasm" import { RawString } from "./raw_string" diff --git a/javascript/src/text.ts b/javascript/src/text.ts index bb0a868d..f87af891 100644 --- a/javascript/src/text.ts +++ b/javascript/src/text.ts @@ -1,10 +1,12 @@ -import { Value } from "@automerge/automerge-wasm" +import type { Value } from "@automerge/automerge-wasm" import { TEXT, STATE } from "./constants" +import type { InternalState } from "./internal_state" export class Text { elems: Array str: string | undefined - spans: Array | undefined + spans: Array | undefined; + [STATE]?: InternalState constructor(text?: string | string[] | Value[]) { if (typeof text === "string") { @@ -208,7 +210,7 @@ export class Text { new Text(this.elems.slice(start, end)) } - some(test: (Value) => boolean): boolean { + some(test: (arg: Value) => boolean): boolean { return this.elems.some(test) } diff --git a/javascript/src/unstable.ts b/javascript/src/unstable.ts index 3ee18dbc..b448d955 100644 --- a/javascript/src/unstable.ts +++ b/javascript/src/unstable.ts @@ -37,7 +37,15 @@ */ import { Counter } from "./types" -export { Counter, Doc, Int, Uint, Float64, Patch, PatchCallback } from "./types" +export { + Counter, + type Doc, + Int, + Uint, + Float64, + type Patch, + type PatchCallback, +} from "./types" import type { PatchCallback } from "./stable" @@ -59,7 +67,7 @@ export type ScalarValue = export type Conflicts = { [key: string]: AutomergeValue } -export { +export type { PutPatch, DelPatch, SplicePatch, diff --git a/javascript/src/uuid.deno.ts b/javascript/src/uuid.deno.ts new file mode 100644 index 00000000..04c9b93d --- /dev/null +++ b/javascript/src/uuid.deno.ts @@ -0,0 +1,26 @@ +import * as v4 from "https://deno.land/x/uuid@v0.1.2/mod.ts" + +// this file is a deno only port of the uuid module + +function defaultFactory() { + return v4.uuid().replace(/-/g, "") +} + +let factory = defaultFactory + +interface UUIDFactory extends Function { + setFactory(f: typeof factory): void + reset(): void +} + +export const uuid: UUIDFactory = () => { + return factory() +} + +uuid.setFactory = newFactory => { + factory = newFactory +} + +uuid.reset = () => { + factory = defaultFactory +} diff --git a/javascript/tsconfig.json b/javascript/tsconfig.json index c6684ca0..628aea8e 100644 --- a/javascript/tsconfig.json +++ b/javascript/tsconfig.json @@ -15,5 +15,5 @@ "outDir": "./dist" }, "include": ["src/**/*", "test/**/*"], - "exclude": ["./dist/**/*", "./node_modules"] + "exclude": ["./dist/**/*", "./node_modules", "./src/**/*.deno.ts"] } diff --git a/scripts/ci/deno_tests b/scripts/ci/deno_tests index bc655468..bdec9b95 100755 --- a/scripts/ci/deno_tests +++ b/scripts/ci/deno_tests @@ -1,6 +1,17 @@ THIS_SCRIPT=$(dirname "$0"); WASM_PROJECT=$THIS_SCRIPT/../../rust/automerge-wasm; +JS_PROJECT=$THIS_SCRIPT/../../javascript; +echo "Running Wasm Deno tests"; yarn --cwd $WASM_PROJECT install; yarn --cwd $WASM_PROJECT build; -deno test $WASM_PROJECT/deno-tests/deno.ts --allow-read +deno test $WASM_PROJECT/deno-tests/deno.ts --allow-read; + +cp $WASM_PROJECT/index.d.ts $WASM_PROJECT/deno; +sed -i '1i /// ' $WASM_PROJECT/deno/automerge_wasm.js; + +echo "Running JS Deno tests"; +yarn --cwd $JS_PROJECT install; +ROOT_MODULE=$WASM_PROJECT/deno yarn --cwd $JS_PROJECT deno:build; +yarn --cwd $JS_PROJECT deno:test; + From d8df1707d903497417a74d6febf7675b8f8695c4 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Sat, 14 Jan 2023 11:06:58 +0000 Subject: [PATCH 690/730] Update rust toolchain for "linux" step --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index a5d42010..c2d469d5 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -136,7 +136,7 @@ jobs: strategy: matrix: toolchain: - - 1.60.0 + - 1.66.0 - nightly continue-on-error: ${{ matrix.toolchain == 'nightly' }} steps: From 964ae2bd818bd3176092aa35083bfeaee4eeca84 Mon Sep 17 00:00:00 2001 From: alexjg Date: Sat, 14 Jan 2023 11:27:48 +0000 Subject: [PATCH 691/730] Fix SeekOpWithPatch on optrees with only internal optrees (#496) In #480 we fixed an issue where `SeekOp` calculated an incorrect insertion index on optrees where the only visible ops were on internal nodes. We forgot to port this fix to `SeekOpWithPatch`, which has almost the same logic just with additional work done in order to notify an `OpObserver` of changes. Add a test and fix to `SeekOpWithPatch` --- rust/automerge/src/query/seek_op.rs | 75 +++++++++++-------- .../automerge/src/query/seek_op_with_patch.rs | 34 ++++++++- 2 files changed, 76 insertions(+), 33 deletions(-) diff --git a/rust/automerge/src/query/seek_op.rs b/rust/automerge/src/query/seek_op.rs index 4d955f96..22d1f58d 100644 --- a/rust/automerge/src/query/seek_op.rs +++ b/rust/automerge/src/query/seek_op.rs @@ -161,7 +161,7 @@ impl<'a> TreeQuery<'a> for SeekOp<'a> { } #[cfg(test)] -mod tests { +pub(crate) mod tests { use crate::{ op_set::OpSet, op_tree::B, @@ -170,36 +170,43 @@ mod tests { ActorId, ScalarValue, }; - #[test] - fn seek_on_page_boundary() { - // Create an optree in which the only visible ops are on the boundaries of the nodes, - // i.e. the visible elements are in the internal nodes. Like so - // - // .----------------------. - // | id | key | succ | - // | B | "a" | | - // | 2B | "b" | | - // '----------------------' - // / | \ - // ;------------------------. | `------------------------------------. - // | id | op | succ | | | id | op | succ | - // | 0 |set "a" | 1 | | | 2B + 1 |set "c" | 2B + 2 | - // | 1 |set "a" | 2 | | | 2B + 2 |set "c" | 2B + 3 | - // | 2 |set "a" | 3 | | ... - // ... | | 3B |set "c" | | - // | B - 1 |set "a" | B | | '------------------------------------' - // '--------'--------'------' | - // | - // .-----------------------------. - // | id | key | succ | - // | B + 1 | "b" | B + 2 | - // | B + 2 | "b" | B + 3 | - // .... - // | B + (B - 1 | "b" | 2B | - // '-----------------------------' - // - // The important point here is that the leaf nodes contain no visible ops for keys "a" and - // "b". + /// Create an optree in which the only visible ops are on the boundaries of the nodes, + /// i.e. the visible elements are in the internal nodes. Like so + /// + /// ```notrust + /// + /// .----------------------. + /// | id | key | succ | + /// | B | "a" | | + /// | 2B | "b" | | + /// '----------------------' + /// / | \ + /// ;------------------------. | `------------------------------------. + /// | id | op | succ | | | id | op | succ | + /// | 0 |set "a" | 1 | | | 2B + 1 |set "c" | 2B + 2 | + /// | 1 |set "a" | 2 | | | 2B + 2 |set "c" | 2B + 3 | + /// | 2 |set "a" | 3 | | ... + /// ... | | 3B |set "c" | | + /// | B - 1 |set "a" | B | | '------------------------------------' + /// '--------'--------'------' | + /// | + /// .-----------------------------. + /// | id | key | succ | + /// | B + 1 | "b" | B + 2 | + /// | B + 2 | "b" | B + 3 | + /// .... + /// | B + (B - 1 | "b" | 2B | + /// '-----------------------------' + /// ``` + /// + /// The important point here is that the leaf nodes contain no visible ops for keys "a" and + /// "b". + /// + /// # Returns + /// + /// The opset in question and an op which should be inserted at the next position after the + /// internally visible ops. + pub(crate) fn optree_with_only_internally_visible_ops() -> (OpSet, Op) { let mut set = OpSet::new(); let actor = set.m.actors.cache(ActorId::random()); let a = set.m.props.cache("a".to_string()); @@ -255,6 +262,12 @@ mod tests { .sorted_opids(std::iter::once(OpId::new(B as u64 - 1, actor))), insert: false, }; + (set, new_op) + } + + #[test] + fn seek_on_page_boundary() { + let (set, new_op) = optree_with_only_internally_visible_ops(); let q = SeekOp::new(&new_op); let q = set.search(&ObjId::root(), q); diff --git a/rust/automerge/src/query/seek_op_with_patch.rs b/rust/automerge/src/query/seek_op_with_patch.rs index 0cc48b37..7cacb032 100644 --- a/rust/automerge/src/query/seek_op_with_patch.rs +++ b/rust/automerge/src/query/seek_op_with_patch.rs @@ -136,8 +136,18 @@ impl<'a> TreeQuery<'a> for SeekOpWithPatch<'a> { if self.pos + child.len() >= start { // skip empty nodes if child.index.visible_len(self.encoding) == 0 { - self.pos += child.len(); - QueryResult::Next + let child_contains_key = + child.elements.iter().any(|e| ops[*e].key == self.op.key); + if !child_contains_key { + // If we are in a node which has no visible ops, but none of the + // elements of the node match the key of the op, then we must have + // finished processing and so we can just return. + // See https://github.com/automerge/automerge-rs/pull/480 + QueryResult::Finish + } else { + self.pos += child.len(); + QueryResult::Next + } } else { QueryResult::Descend } @@ -291,3 +301,23 @@ impl<'a> TreeQuery<'a> for SeekOpWithPatch<'a> { } } } + +#[cfg(test)] +mod tests { + use super::{super::seek_op::tests::optree_with_only_internally_visible_ops, SeekOpWithPatch}; + use crate::{ + op_tree::B, + types::{ListEncoding, ObjId}, + }; + + #[test] + fn test_insert_on_internal_only_nodes() { + let (set, new_op) = optree_with_only_internally_visible_ops(); + + let q = SeekOpWithPatch::new(&new_op, ListEncoding::List); + let q = set.search(&ObjId::root(), q); + + // we've inserted `B - 1` elements for "a", so the index should be `B` + assert_eq!(q.pos, B); + } +} From 5629a7bec4ccf5be72bd38776c26167ba54bea4c Mon Sep 17 00:00:00 2001 From: alexjg Date: Thu, 19 Jan 2023 15:38:27 +0000 Subject: [PATCH 692/730] Various CI script fixes (#501) Some of the scripts in scripts/ci were not reliable detecting the path they were operating in. Additionally the deno_tests script was not correctly picking up the ROOT_MODULE environment variable. Add more robust path handling and fix the deno_tests script. --- javascript/.prettierignore | 1 + javascript/scripts/denoify-replacer.mjs | 2 +- scripts/ci/cmake-build | 3 ++- scripts/ci/deno_tests | 20 ++++++++++++-------- scripts/ci/fmt_js | 4 +++- scripts/ci/js_tests | 6 ++++-- scripts/ci/lint | 5 ++++- scripts/ci/rust-docs | 4 +++- scripts/ci/wasm_tests | 3 ++- 9 files changed, 32 insertions(+), 16 deletions(-) diff --git a/javascript/.prettierignore b/javascript/.prettierignore index c2dcd4bb..6ab2f796 100644 --- a/javascript/.prettierignore +++ b/javascript/.prettierignore @@ -1,3 +1,4 @@ e2e/verdacciodb dist docs +deno_dist diff --git a/javascript/scripts/denoify-replacer.mjs b/javascript/scripts/denoify-replacer.mjs index fcf4bc45..e183ba0d 100644 --- a/javascript/scripts/denoify-replacer.mjs +++ b/javascript/scripts/denoify-replacer.mjs @@ -12,7 +12,7 @@ makeThisModuleAnExecutableReplacer( case "@automerge/automerge-wasm": { const moduleRoot = - process.env.MODULE_ROOT || + process.env.ROOT_MODULE || `https://deno.land/x/automerge_wasm@${version}` /* *We expect not to run against statements like diff --git a/scripts/ci/cmake-build b/scripts/ci/cmake-build index 3924dc4a..f6f9f9b1 100755 --- a/scripts/ci/cmake-build +++ b/scripts/ci/cmake-build @@ -1,7 +1,8 @@ #!/usr/bin/env bash set -eoux pipefail -THIS_SCRIPT=$(dirname "$0"); +# see https://stackoverflow.com/questions/4774054/reliable-way-for-a-bash-script-to-get-the-full-path-to-itself +THIS_SCRIPT="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" # \note CMake's default build types are "Debug", "MinSizeRel", "Release" and # "RelWithDebInfo" but custom ones can also be defined so we pass it verbatim. BUILD_TYPE=$1; diff --git a/scripts/ci/deno_tests b/scripts/ci/deno_tests index bdec9b95..9f297557 100755 --- a/scripts/ci/deno_tests +++ b/scripts/ci/deno_tests @@ -1,17 +1,21 @@ -THIS_SCRIPT=$(dirname "$0"); +#!/usr/bin/env bash +set -eou pipefail +# see https://stackoverflow.com/questions/4774054/reliable-way-for-a-bash-script-to-get-the-full-path-to-itself +THIS_SCRIPT="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" WASM_PROJECT=$THIS_SCRIPT/../../rust/automerge-wasm; JS_PROJECT=$THIS_SCRIPT/../../javascript; +E2E_PROJECT=$THIS_SCRIPT/../../javascript/e2e; -echo "Running Wasm Deno tests"; -yarn --cwd $WASM_PROJECT install; -yarn --cwd $WASM_PROJECT build; -deno test $WASM_PROJECT/deno-tests/deno.ts --allow-read; - -cp $WASM_PROJECT/index.d.ts $WASM_PROJECT/deno; +echo "building wasm and js" +yarn --cwd $E2E_PROJECT install; +yarn --cwd $E2E_PROJECT e2e buildjs; +cp $WASM_PROJECT/index.d.ts $WASM_PROJECT/deno/; sed -i '1i /// ' $WASM_PROJECT/deno/automerge_wasm.js; +echo "Running Wasm Deno tests"; +deno test $WASM_PROJECT/deno-tests/deno.ts --allow-read; + echo "Running JS Deno tests"; -yarn --cwd $JS_PROJECT install; ROOT_MODULE=$WASM_PROJECT/deno yarn --cwd $JS_PROJECT deno:build; yarn --cwd $JS_PROJECT deno:test; diff --git a/scripts/ci/fmt_js b/scripts/ci/fmt_js index acaf1e08..8f387b6a 100755 --- a/scripts/ci/fmt_js +++ b/scripts/ci/fmt_js @@ -1,5 +1,7 @@ #!/usr/bin/env bash set -eoux pipefail -yarn --cwd javascript prettier -c . +# see https://stackoverflow.com/questions/4774054/reliable-way-for-a-bash-script-to-get-the-full-path-to-itself +THIS_SCRIPT="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" +yarn --cwd $THIS_SCRIPT/../../javascript prettier -c . diff --git a/scripts/ci/js_tests b/scripts/ci/js_tests index b05edd1c..68205a33 100755 --- a/scripts/ci/js_tests +++ b/scripts/ci/js_tests @@ -1,6 +1,8 @@ -set -e +#!/usr/bin/env bash +set -eoux pipefail -THIS_SCRIPT=$(dirname "$0"); +# see https://stackoverflow.com/questions/4774054/reliable-way-for-a-bash-script-to-get-the-full-path-to-itself +THIS_SCRIPT="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" WASM_PROJECT=$THIS_SCRIPT/../../rust/automerge-wasm; JS_PROJECT=$THIS_SCRIPT/../../javascript; E2E_PROJECT=$THIS_SCRIPT/../../javascript/e2e; diff --git a/scripts/ci/lint b/scripts/ci/lint index 15a0228d..87a16765 100755 --- a/scripts/ci/lint +++ b/scripts/ci/lint @@ -1,7 +1,10 @@ #!/usr/bin/env bash set -eoux pipefail -cd rust +# see https://stackoverflow.com/questions/4774054/reliable-way-for-a-bash-script-to-get-the-full-path-to-itself +THIS_SCRIPT="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" + +cd $THIS_SCRIPT/../../rust # Force clippy to consider all local sources # https://github.com/rust-lang/rust-clippy/issues/4612 find . -name "*.rs" -not -path "./target/*" -exec touch "{}" + diff --git a/scripts/ci/rust-docs b/scripts/ci/rust-docs index bbbc4fe1..4be0ed9a 100755 --- a/scripts/ci/rust-docs +++ b/scripts/ci/rust-docs @@ -1,6 +1,8 @@ #!/usr/bin/env bash set -eoux pipefail -cd rust +# see https://stackoverflow.com/questions/4774054/reliable-way-for-a-bash-script-to-get-the-full-path-to-itself +THIS_SCRIPT="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" +cd $THIS_SCRIPT/../../rust RUSTDOCFLAGS="-D rustdoc::broken-intra-doc-links -D warnings" \ cargo doc --no-deps --workspace --document-private-items diff --git a/scripts/ci/wasm_tests b/scripts/ci/wasm_tests index 2f273d99..fac344d8 100755 --- a/scripts/ci/wasm_tests +++ b/scripts/ci/wasm_tests @@ -1,4 +1,5 @@ -THIS_SCRIPT=$(dirname "$0"); +# see https://stackoverflow.com/questions/4774054/reliable-way-for-a-bash-script-to-get-the-full-path-to-itself +THIS_SCRIPT="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" WASM_PROJECT=$THIS_SCRIPT/../../rust/automerge-wasm; yarn --cwd $WASM_PROJECT install; From d8baa116e7bc6f1f25e56bbbd75fc2ffc7140170 Mon Sep 17 00:00:00 2001 From: alexjg Date: Thu, 19 Jan 2023 17:02:47 +0000 Subject: [PATCH 693/730] automerge-rs: Add `ExId::to_bytes` (#491) The `ExId` structure has some internal details which make lookups for object IDs which were produced by the document doing the looking up faster. These internal details are quite specific to the implementation so we don't want to expose them as a public API. On the other hand, we need to be able to serialize `ExId`s so that FFI clients can hold on to them without referencing memory which is owned by the document (ahem, looking at you Java). Introduce `ExId::to_bytes` and `TryFrom<&[u8]> ExId` implementing a canonical serialization which includes a version tag, giveing us compatibility options if we decide to change the implementation. --- rust/automerge/src/exid.rs | 135 +++++++++++++++++++++++++++++++++++++ rust/automerge/src/lib.rs | 2 +- 2 files changed, 136 insertions(+), 1 deletion(-) diff --git a/rust/automerge/src/exid.rs b/rust/automerge/src/exid.rs index 2c174e28..3ff8fbb5 100644 --- a/rust/automerge/src/exid.rs +++ b/rust/automerge/src/exid.rs @@ -1,3 +1,4 @@ +use crate::storage::parse; use crate::ActorId; use serde::Serialize; use serde::Serializer; @@ -11,6 +12,102 @@ pub enum ExId { Id(u64, ActorId, usize), } +const SERIALIZATION_VERSION_TAG: u8 = 0; +const TYPE_ROOT: u8 = 0; +const TYPE_ID: u8 = 1; + +impl ExId { + /// Serialize the ExId to a byte array. + pub fn to_bytes(&self) -> Vec { + // The serialized format is + // + // .--------------------------------. + // | version | type | data | + // +--------------------------------+ + // | 4 bytes |4 bytes | variable | + // '--------------------------------' + // + // Version is currently always `0` + // + // `data` depends on the type + // + // * If the type is `TYPE_ROOT` (0) then there is no data + // * If the type is `TYPE_ID` (1) then the data is + // + // .-------------------------------------------------------. + // | actor ID len | actor ID bytes | counter | actor index | + // '-------------------------------------------------------' + // + // Where the actor ID len, counter, and actor index are all uLEB encoded + // integers. The actor ID bytes is just an array of bytes. + // + match self { + ExId::Root => { + let val: u8 = SERIALIZATION_VERSION_TAG | (TYPE_ROOT << 4); + vec![val] + } + ExId::Id(id, actor, counter) => { + let actor_bytes = actor.to_bytes(); + let mut bytes = Vec::with_capacity(actor_bytes.len() + 4 + 4); + let tag = SERIALIZATION_VERSION_TAG | (TYPE_ID << 4); + bytes.push(tag); + leb128::write::unsigned(&mut bytes, actor_bytes.len() as u64).unwrap(); + bytes.extend_from_slice(actor_bytes); + leb128::write::unsigned(&mut bytes, *counter as u64).unwrap(); + leb128::write::unsigned(&mut bytes, *id).unwrap(); + bytes + } + } + } +} + +#[derive(Debug, thiserror::Error)] +pub enum ObjIdFromBytesError { + #[error("no version tag")] + NoVersion, + #[error("invalid version tag")] + InvalidVersion(u8), + #[error("invalid type tag")] + InvalidType(u8), + #[error("invalid Actor ID length: {0}")] + ParseActorLen(String), + #[error("Not enough bytes in actor ID")] + ParseActor, + #[error("invalid counter: {0}")] + ParseCounter(String), + #[error("invalid actor index hint: {0}")] + ParseActorIdxHint(String), +} + +impl<'a> TryFrom<&'a [u8]> for ExId { + type Error = ObjIdFromBytesError; + + fn try_from(value: &'a [u8]) -> Result { + let i = parse::Input::new(value); + let (i, tag) = parse::take1::<()>(i).map_err(|_| ObjIdFromBytesError::NoVersion)?; + let version = tag & 0b1111; + if version != SERIALIZATION_VERSION_TAG { + return Err(ObjIdFromBytesError::InvalidVersion(version)); + } + let type_tag = tag >> 4; + match type_tag { + TYPE_ROOT => Ok(ExId::Root), + TYPE_ID => { + let (i, len) = parse::leb128_u64::(i) + .map_err(|e| ObjIdFromBytesError::ParseActorLen(e.to_string()))?; + let (i, actor) = parse::take_n::<()>(len as usize, i) + .map_err(|_| ObjIdFromBytesError::ParseActor)?; + let (i, counter) = parse::leb128_u64::(i) + .map_err(|e| ObjIdFromBytesError::ParseCounter(e.to_string()))?; + let (_i, actor_idx_hint) = parse::leb128_u64::(i) + .map_err(|e| ObjIdFromBytesError::ParseActorIdxHint(e.to_string()))?; + Ok(Self::Id(actor_idx_hint, actor.into(), counter as usize)) + } + other => Err(ObjIdFromBytesError::InvalidType(other)), + } + } +} + impl PartialEq for ExId { fn eq(&self, other: &Self) -> bool { match (self, other) { @@ -80,3 +177,41 @@ impl AsRef for ExId { self } } + +#[cfg(test)] +mod tests { + use super::ExId; + use proptest::prelude::*; + + use crate::ActorId; + + fn gen_actorid() -> impl Strategy { + proptest::collection::vec(any::(), 0..100).prop_map(ActorId::from) + } + + prop_compose! { + fn gen_non_root_objid()(actor in gen_actorid(), counter in any::(), idx in any::()) -> ExId { + ExId::Id(idx as u64, actor, counter) + } + } + + fn gen_obji() -> impl Strategy { + prop_oneof![Just(ExId::Root), gen_non_root_objid()] + } + + proptest! { + #[test] + fn objid_roundtrip(objid in gen_obji()) { + let bytes = objid.to_bytes(); + let objid2 = ExId::try_from(&bytes[..]).unwrap(); + assert_eq!(objid, objid2); + } + } + + #[test] + fn test_root_roundtrip() { + let bytes = ExId::Root.to_bytes(); + let objid2 = ExId::try_from(&bytes[..]).unwrap(); + assert_eq!(ExId::Root, objid2); + } +} diff --git a/rust/automerge/src/lib.rs b/rust/automerge/src/lib.rs index 97ff0650..58f5b263 100644 --- a/rust/automerge/src/lib.rs +++ b/rust/automerge/src/lib.rs @@ -93,7 +93,7 @@ pub use change::{Change, LoadError as LoadChangeError}; pub use error::AutomergeError; pub use error::InvalidActorId; pub use error::InvalidChangeHashSlice; -pub use exid::ExId as ObjId; +pub use exid::{ExId as ObjId, ObjIdFromBytesError}; pub use keys::Keys; pub use keys_at::KeysAt; pub use legacy::Change as ExpandedChange; From 9b44a75f69e0b6bcca7a8054395ff887bda92b7e Mon Sep 17 00:00:00 2001 From: alexjg Date: Thu, 19 Jan 2023 21:11:36 +0000 Subject: [PATCH 694/730] fix: don't panic when generating parents for hidden objects (#500) Problem: the `OpSet::export_key` method uses `query::ElemIdPos` to determine the index of sequence elements when exporting a key. This query returned `None` for invisible elements. The `Parents` iterator which is used to generate paths to objects in patches in `automerge-wasm` used `export_key`. The end result is that applying a remote change which deletes an object in a sequence would panic as it tries to generate a path for an invisible object. Solution: modify `query::ElemIdPos` to include invisible objects. This does mean that the path generated will refer to the previous visible object in the sequence as it's index, but this is probably fine as for an invisible object the path shouldn't be used anyway. While we're here also change the return value of `OpSet::export_key` to an `Option` and make `query::Index::ops` private as obeisance to the Lady of the Golden Blade. --- rust/automerge/src/op_set.rs | 16 +++++---- rust/automerge/src/parents.rs | 44 ++++++++++++++++++++++++- rust/automerge/src/query.rs | 7 +++- rust/automerge/src/query/elem_id_pos.rs | 35 ++++++++++++++------ 4 files changed, 83 insertions(+), 19 deletions(-) diff --git a/rust/automerge/src/op_set.rs b/rust/automerge/src/op_set.rs index 1f5a4486..5b50d2b0 100644 --- a/rust/automerge/src/op_set.rs +++ b/rust/automerge/src/op_set.rs @@ -89,15 +89,17 @@ impl OpSetInternal { }) } - pub(crate) fn export_key(&self, obj: ObjId, key: Key, encoding: ListEncoding) -> Prop { + pub(crate) fn export_key(&self, obj: ObjId, key: Key, encoding: ListEncoding) -> Option { match key { - Key::Map(m) => Prop::Map(self.m.props.get(m).into()), + Key::Map(m) => self.m.props.safe_get(m).map(|s| Prop::Map(s.to_string())), Key::Seq(opid) => { - let i = self - .search(&obj, query::ElemIdPos::new(opid, encoding)) - .index() - .unwrap(); - Prop::Seq(i) + if opid.is_head() { + Some(Prop::Seq(0)) + } else { + self.search(&obj, query::ElemIdPos::new(opid, encoding)) + .index() + .map(Prop::Seq) + } } } } diff --git a/rust/automerge/src/parents.rs b/rust/automerge/src/parents.rs index 1d01ffbf..76c4bba1 100644 --- a/rust/automerge/src/parents.rs +++ b/rust/automerge/src/parents.rs @@ -47,7 +47,10 @@ impl<'a> Iterator for Parents<'a> { self.obj = obj; Some(Parent { obj: self.ops.id_to_exid(self.obj.0), - prop: self.ops.export_key(self.obj, key, ListEncoding::List), + prop: self + .ops + .export_key(self.obj, key, ListEncoding::List) + .unwrap(), visible, }) } else { @@ -62,3 +65,42 @@ pub struct Parent { pub prop: Prop, pub visible: bool, } + +#[cfg(test)] +mod tests { + use super::Parent; + use crate::{transaction::Transactable, Prop}; + + #[test] + fn test_invisible_parents() { + // Create a document with a list of objects, then delete one of the objects, then generate + // a path to the deleted object. + + let mut doc = crate::AutoCommit::new(); + let list = doc + .put_object(crate::ROOT, "list", crate::ObjType::List) + .unwrap(); + let obj1 = doc.insert_object(&list, 0, crate::ObjType::Map).unwrap(); + let _obj2 = doc.insert_object(&list, 1, crate::ObjType::Map).unwrap(); + doc.put(&obj1, "key", "value").unwrap(); + doc.delete(&list, 0).unwrap(); + + let mut parents = doc.parents(&obj1).unwrap().collect::>(); + parents.reverse(); + assert_eq!( + parents, + vec![ + Parent { + obj: crate::ROOT, + prop: Prop::Map("list".to_string()), + visible: true, + }, + Parent { + obj: list, + prop: Prop::Seq(0), + visible: false, + }, + ] + ); + } +} diff --git a/rust/automerge/src/query.rs b/rust/automerge/src/query.rs index 9707da33..721756c1 100644 --- a/rust/automerge/src/query.rs +++ b/rust/automerge/src/query.rs @@ -114,7 +114,7 @@ pub(crate) struct Index { pub(crate) visible16: usize, pub(crate) visible8: usize, /// Set of opids found in this node and below. - pub(crate) ops: HashSet, + ops: HashSet, } impl Index { @@ -140,6 +140,11 @@ impl Index { self.visible.contains_key(seen) } + /// Whether `opid` is in this node or any below it + pub(crate) fn has_op(&self, opid: &OpId) -> bool { + self.ops.contains(opid) + } + pub(crate) fn change_vis<'a>( &mut self, change_vis: ChangeVisibility<'a>, diff --git a/rust/automerge/src/query/elem_id_pos.rs b/rust/automerge/src/query/elem_id_pos.rs index 8eecd7e0..cb559216 100644 --- a/rust/automerge/src/query/elem_id_pos.rs +++ b/rust/automerge/src/query/elem_id_pos.rs @@ -1,14 +1,14 @@ use crate::{ op_tree::OpTreeNode, - types::{ElemId, Key, ListEncoding, Op}, + types::{ElemId, ListEncoding, Op, OpId}, }; use super::{QueryResult, TreeQuery}; -/// Lookup the index in the list that this elemid occupies. +/// Lookup the index in the list that this elemid occupies, includes hidden elements. #[derive(Clone, Debug)] pub(crate) struct ElemIdPos { - elemid: ElemId, + elem_opid: OpId, pos: usize, found: bool, encoding: ListEncoding, @@ -16,11 +16,20 @@ pub(crate) struct ElemIdPos { impl ElemIdPos { pub(crate) fn new(elemid: ElemId, encoding: ListEncoding) -> Self { - Self { - elemid, - pos: 0, - found: false, - encoding, + if elemid.is_head() { + Self { + elem_opid: elemid.0, + pos: 0, + found: true, + encoding, + } + } else { + Self { + elem_opid: elemid.0, + pos: 0, + found: false, + encoding, + } } } @@ -35,8 +44,11 @@ impl ElemIdPos { impl<'a> TreeQuery<'a> for ElemIdPos { fn query_node(&mut self, child: &OpTreeNode, _ops: &[Op]) -> QueryResult { + if self.found { + return QueryResult::Finish; + } // if index has our element then we can continue - if child.index.has_visible(&Key::Seq(self.elemid)) { + if child.index.has_op(&self.elem_opid) { // element is in this node somewhere QueryResult::Descend } else { @@ -47,7 +59,10 @@ impl<'a> TreeQuery<'a> for ElemIdPos { } fn query_element(&mut self, element: &crate::types::Op) -> QueryResult { - if element.elemid() == Some(self.elemid) { + if self.found { + return QueryResult::Finish; + } + if element.elemid() == Some(ElemId(self.elem_opid)) { // this is it self.found = true; return QueryResult::Finish; From 6b0ee6da2e7e0dfe9341c6fa4d3cc8c4b9b87549 Mon Sep 17 00:00:00 2001 From: alexjg Date: Thu, 19 Jan 2023 22:15:06 +0000 Subject: [PATCH 695/730] Bump js to 2.0.1-alpha.5 and automerge-wasm to 0.1.22 (#497) --- javascript/package.json | 4 ++-- rust/automerge-wasm/package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/javascript/package.json b/javascript/package.json index 39464fac..caeeb647 100644 --- a/javascript/package.json +++ b/javascript/package.json @@ -4,7 +4,7 @@ "Orion Henry ", "Martin Kleppmann" ], - "version": "2.0.1-alpha.4", + "version": "2.0.1-alpha.5", "description": "Javascript implementation of automerge, backed by @automerge/automerge-wasm", "homepage": "https://github.com/automerge/automerge-rs/tree/main/wrappers/javascript", "repository": "github:automerge/automerge-rs", @@ -47,7 +47,7 @@ "typescript": "^4.9.4" }, "dependencies": { - "@automerge/automerge-wasm": "0.1.21", + "@automerge/automerge-wasm": "0.1.22", "uuid": "^9.0.0" } } diff --git a/rust/automerge-wasm/package.json b/rust/automerge-wasm/package.json index 76167a3e..0f133468 100644 --- a/rust/automerge-wasm/package.json +++ b/rust/automerge-wasm/package.json @@ -8,7 +8,7 @@ "description": "wasm-bindgen bindings to the automerge rust implementation", "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-wasm", "repository": "github:automerge/automerge-rs", - "version": "0.1.21", + "version": "0.1.22", "license": "MIT", "files": [ "README.md", From 98e755106f5d44e6cff2897921138ac3f95de3d0 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Mon, 23 Jan 2023 04:01:05 -0700 Subject: [PATCH 696/730] Fix and simplify lebsize calculations (#503) Before this change numbits_i64() was incorrect for every value of the form 0 - 2^x. This only manifested in a visible error if x%7 == 6 (so for -64, -8192, etc.) at which point `lebsize` would return a value one too large, causing a panic in commit(). --- .../automerge/src/columnar/encoding/leb128.rs | 47 +++++++++++-------- rust/automerge/tests/test.rs | 6 +++ 2 files changed, 34 insertions(+), 19 deletions(-) diff --git a/rust/automerge/src/columnar/encoding/leb128.rs b/rust/automerge/src/columnar/encoding/leb128.rs index 036cfba8..cbb82c31 100644 --- a/rust/automerge/src/columnar/encoding/leb128.rs +++ b/rust/automerge/src/columnar/encoding/leb128.rs @@ -1,29 +1,22 @@ /// The number of bytes required to encode `val` as a LEB128 integer -pub(crate) fn lebsize(val: i64) -> u64 { - let numbits = numbits_i64(val); - (numbits as f64 / 7.0).floor() as u64 + 1 +pub(crate) fn lebsize(mut val: i64) -> u64 { + if val < 0 { + val = !val + } + // 1 extra for the sign bit + leb_bytes(1 + 64 - val.leading_zeros() as u64) } /// The number of bytes required to encode `val` as a uLEB128 integer pub(crate) fn ulebsize(val: u64) -> u64 { - if val <= 1 { + if val == 0 { return 1; } - let numbits = numbits_u64(val); - let mut numblocks = (numbits as f64 / 7.0).floor() as u64; - if numbits % 7 != 0 { - numblocks += 1; - } - numblocks + leb_bytes(64 - val.leading_zeros() as u64) } -fn numbits_i64(val: i64) -> u64 { - // Is this right? This feels like it's not right - (std::mem::size_of::() as u32 * 8 - val.abs().leading_zeros()) as u64 -} - -fn numbits_u64(val: u64) -> u64 { - (std::mem::size_of::() as u32 * 8 - val.leading_zeros()) as u64 +fn leb_bytes(bits: u64) -> u64 { + (bits + 6) / 7 } #[cfg(test)] @@ -51,7 +44,7 @@ mod tests { #[test] fn ulebsize_examples() { - let scenarios = vec![0, 1, 127, 128, 129, 169]; + let scenarios = vec![0, 1, 127, 128, 129, 169, u64::MAX]; for val in scenarios { let mut out = Vec::new(); leb128::write::unsigned(&mut out, val).unwrap(); @@ -62,7 +55,23 @@ mod tests { #[test] fn lebsize_examples() { - let scenarios = vec![0, 1, -1, 127, 128, -127, -128, -2097152, 169]; + let scenarios = vec![ + 0, + 1, + -1, + 63, + 64, + -64, + -65, + 127, + 128, + -127, + -128, + -2097152, + 169, + i64::MIN, + i64::MAX, + ]; for val in scenarios { let mut out = Vec::new(); leb128::write::signed(&mut out, val).unwrap(); diff --git a/rust/automerge/tests/test.rs b/rust/automerge/tests/test.rs index 6ab797f0..4648cf87 100644 --- a/rust/automerge/tests/test.rs +++ b/rust/automerge/tests/test.rs @@ -1412,6 +1412,12 @@ fn invalid_deflate_stream() { assert!(Automerge::load(&bytes).is_err()); } +#[test] +fn negative_64() { + let mut doc = Automerge::new(); + assert!(doc.transact(|d| { d.put(ROOT, "a", -64_i64) }).is_ok()) +} + #[test] fn bad_change_on_optree_node_boundary() { let mut doc = Automerge::new(); From 1f7b109dcdb735366c5eff8ff0736738e740fee4 Mon Sep 17 00:00:00 2001 From: Andrew Jeffery Date: Mon, 23 Jan 2023 17:01:41 +0000 Subject: [PATCH 697/730] Add From for ScalarValue::Str (#506) --- rust/automerge/src/value.rs | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/rust/automerge/src/value.rs b/rust/automerge/src/value.rs index b3142bdf..d8429f4e 100644 --- a/rust/automerge/src/value.rs +++ b/rust/automerge/src/value.rs @@ -266,6 +266,12 @@ impl<'a> From for Value<'a> { } } +impl<'a> From for Value<'a> { + fn from(s: SmolStr) -> Self { + Value::Scalar(Cow::Owned(ScalarValue::Str(s))) + } +} + impl<'a> From for Value<'a> { fn from(c: char) -> Self { Value::Scalar(Cow::Owned(ScalarValue::Str(SmolStr::new(c.to_string())))) From 78adbc4ff94b8ff62df0e02de1cd4fb519c8e9a9 Mon Sep 17 00:00:00 2001 From: Alex Currie-Clark <1306728+acurrieclark@users.noreply.github.com> Date: Mon, 23 Jan 2023 17:02:02 +0000 Subject: [PATCH 698/730] Update patch types (#499) * Update `Patch` types * Clarify that the splice patch applies to text * Add Splice patch type to exports * Add new patches to javascript --- javascript/src/stable.ts | 3 ++- javascript/src/unstable.ts | 3 ++- rust/automerge-wasm/index.d.ts | 10 ++++++++-- 3 files changed, 12 insertions(+), 4 deletions(-) diff --git a/javascript/src/stable.ts b/javascript/src/stable.ts index 1f38cb27..9db4d0e2 100644 --- a/javascript/src/stable.ts +++ b/javascript/src/stable.ts @@ -41,7 +41,8 @@ import type { export type { PutPatch, DelPatch, - SplicePatch, + SpliceTextPatch, + InsertPatch, IncPatch, SyncMessage, } from "@automerge/automerge-wasm" diff --git a/javascript/src/unstable.ts b/javascript/src/unstable.ts index b448d955..21b5be08 100644 --- a/javascript/src/unstable.ts +++ b/javascript/src/unstable.ts @@ -70,7 +70,8 @@ export type Conflicts = { [key: string]: AutomergeValue } export type { PutPatch, DelPatch, - SplicePatch, + SpliceTextPatch, + InsertPatch, IncPatch, SyncMessage, } from "@automerge/automerge-wasm" diff --git a/rust/automerge-wasm/index.d.ts b/rust/automerge-wasm/index.d.ts index 29586b47..be12e4c1 100644 --- a/rust/automerge-wasm/index.d.ts +++ b/rust/automerge-wasm/index.d.ts @@ -94,7 +94,7 @@ export type Op = { pred: string[], } -export type Patch = PutPatch | DelPatch | SplicePatch | IncPatch; +export type Patch = PutPatch | DelPatch | SpliceTextPatch | IncPatch | InsertPatch; export type PutPatch = { action: 'put' @@ -115,9 +115,15 @@ export type DelPatch = { length?: number, } -export type SplicePatch = { +export type SpliceTextPatch = { action: 'splice' path: Prop[], + value: string, +} + +export type InsertPatch = { + action: 'insert' + path: Prop[], values: Value[], } From 819767cc3327ed6e5724970aae39173775c9e5c1 Mon Sep 17 00:00:00 2001 From: alexjg Date: Mon, 23 Jan 2023 19:19:55 +0000 Subject: [PATCH 699/730] fix: use saturating_sub when updating cached text width (#505) Problem: In `automerge::query::Index::change_vis` we use `-=` to subtract the width of an operation which is being hidden from the text widths which we store on the index of each node in the optree. This index represents the width of all the visible text operations in this node and below. This was causing an integer underflow error when encountering some list operations. More specifically, when a `ScalarValue::Str` in a list was made invisible by a later operation which contained a _shorter_ string, the width subtracted from the indexed text widths could be longer than the current index. Solution: use `saturating_sub` instead. This is technically papering over the problem because really the width should never go below zero, but the text widths are only relevant for text objects where the existing logic works as advertised because we don't have a `set` operation for text indices. A more robust solution would be to track the type of the Index (and consequently of the `OpTree`) at the type level, but time is limited and problems are infinite. Also, add a lengthy description of the reason we are using `saturating_sub` so that when I read it in about a month I don't have to redo the painful debugging process that got me to this commit. --- rust/automerge/src/query.rs | 81 +++++++++++++++++++++++++++++-------- 1 file changed, 64 insertions(+), 17 deletions(-) diff --git a/rust/automerge/src/query.rs b/rust/automerge/src/query.rs index 721756c1..640ecf8d 100644 --- a/rust/automerge/src/query.rs +++ b/rust/automerge/src/query.rs @@ -107,12 +107,65 @@ pub(crate) enum QueryResult { Finish, } +#[derive(Clone, Debug, PartialEq)] +struct TextWidth { + utf8: usize, + utf16: usize, +} + +impl TextWidth { + fn add_op(&mut self, op: &Op) { + self.utf8 += op.width(ListEncoding::Text(TextEncoding::Utf8)); + self.utf16 += op.width(ListEncoding::Text(TextEncoding::Utf16)); + } + + fn remove_op(&mut self, op: &Op) { + // Why are we using saturating_sub here? Shouldn't this always be greater than 0? + // + // In the case of objects which are _not_ `Text` we may end up subtracting more than the + // current width. This can happen if the elements in a list are `ScalarValue::str` and + // there are conflicting elements for the same index in the list. Like so: + // + // ```notrust + // [ + // "element", + // ["conflict1", "conflict2_longer"], + // "element" + // ] + // ``` + // + // Where there are two conflicted elements at index 1 + // + // in `Index::insert` and `Index::change_visibility` we add the width of the inserted op in + // utf8 and utf16 to the current width, but only if there was not a previous element for + // that index. Imagine that we encounter the "conflict1" op first, then we will add the + // length of 'conflict1' to the text widths. When 'conflict2_longer' is added we don't do + // anything because we've already seen an op for this index. Imagine that later we remove + // the `conflict2_longer` op, then we will end up subtracting the length of + // 'conflict2_longer' from the text widths, hence, `saturating_sub`. This isn't a problem + // because for non text objects we don't need the text widths to be accurate anyway. + // + // Really this is a sign that we should be tracking the type of the Index (List or Text) at + // the type level, but for now we just look the other way. + self.utf8 = self + .utf8 + .saturating_sub(op.width(ListEncoding::Text(TextEncoding::Utf8))); + self.utf16 = self + .utf16 + .saturating_sub(op.width(ListEncoding::Text(TextEncoding::Utf16))); + } + + fn merge(&mut self, other: &TextWidth) { + self.utf8 += other.utf8; + self.utf16 += other.utf16; + } +} + #[derive(Clone, Debug, PartialEq)] pub(crate) struct Index { /// The map of visible keys to the number of visible operations for that key. - pub(crate) visible: HashMap, - pub(crate) visible16: usize, - pub(crate) visible8: usize, + visible: HashMap, + visible_text: TextWidth, /// Set of opids found in this node and below. ops: HashSet, } @@ -121,8 +174,7 @@ impl Index { pub(crate) fn new() -> Self { Index { visible: Default::default(), - visible16: 0, - visible8: 0, + visible_text: TextWidth { utf8: 0, utf16: 0 }, ops: Default::default(), } } @@ -131,8 +183,8 @@ impl Index { pub(crate) fn visible_len(&self, encoding: ListEncoding) -> usize { match encoding { ListEncoding::List => self.visible.len(), - ListEncoding::Text(TextEncoding::Utf8) => self.visible8, - ListEncoding::Text(TextEncoding::Utf16) => self.visible16, + ListEncoding::Text(TextEncoding::Utf8) => self.visible_text.utf8, + ListEncoding::Text(TextEncoding::Utf16) => self.visible_text.utf16, } } @@ -159,8 +211,7 @@ impl Index { (true, false) => match self.visible.get(&key).copied() { Some(n) if n == 1 => { self.visible.remove(&key); - self.visible8 -= op.width(ListEncoding::Text(TextEncoding::Utf8)); - self.visible16 -= op.width(ListEncoding::Text(TextEncoding::Utf16)); + self.visible_text.remove_op(op); } Some(n) => { self.visible.insert(key, n - 1); @@ -172,8 +223,7 @@ impl Index { self.visible.insert(key, n + 1); } else { self.visible.insert(key, 1); - self.visible8 += op.width(ListEncoding::Text(TextEncoding::Utf8)); - self.visible16 += op.width(ListEncoding::Text(TextEncoding::Utf16)); + self.visible_text.add_op(op); } } _ => {} @@ -189,8 +239,7 @@ impl Index { self.visible.insert(key, n + 1); } else { self.visible.insert(key, 1); - self.visible8 += op.width(ListEncoding::Text(TextEncoding::Utf8)); - self.visible16 += op.width(ListEncoding::Text(TextEncoding::Utf16)); + self.visible_text.add_op(op); } } } @@ -202,8 +251,7 @@ impl Index { match self.visible.get(&key).copied() { Some(n) if n == 1 => { self.visible.remove(&key); - self.visible8 -= op.width(ListEncoding::Text(TextEncoding::Utf8)); - self.visible16 -= op.width(ListEncoding::Text(TextEncoding::Utf16)); + self.visible_text.remove_op(op); } Some(n) => { self.visible.insert(key, n - 1); @@ -223,8 +271,7 @@ impl Index { .and_modify(|len| *len += *other_len) .or_insert(*other_len); } - self.visible16 += other.visible16; - self.visible8 += other.visible8; + self.visible_text.merge(&other.visible_text); } } From 931ee7e77bd83d5c8b52c79fc2c99143171a33a5 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Wed, 25 Jan 2023 09:03:05 -0700 Subject: [PATCH 700/730] Add Fuzz Testing (#498) * Add fuzz testing for document load * Fix fuzz crashers and add to test suite --- rust/automerge/fuzz/.gitignore | 3 ++ rust/automerge/fuzz/Cargo.toml | 29 ++++++++++++++ rust/automerge/fuzz/fuzz_targets/load.rs | 37 ++++++++++++++++++ .../src/columnar/column_range/deps.rs | 6 ++- .../src/columnar/column_range/opid_list.rs | 7 +++- .../src/storage/columns/raw_column.rs | 5 ++- .../src/storage/load/change_collector.rs | 15 ++++++- ...h-da39a3ee5e6b4b0d3255bfef95601890afd80709 | Bin 0 -> 10 bytes .../fuzz-crashers/incorrect_max_op.automerge | Bin 0 -> 126 bytes .../invalid_deflate_stream.automerge | Bin 0 -> 123 bytes .../fuzz-crashers/missing_actor.automerge | Bin 0 -> 126 bytes .../overflow_in_length.automerge | Bin 0 -> 182 bytes .../fuzz-crashers/too_many_deps.automerge | Bin 0 -> 134 bytes .../fuzz-crashers/too_many_ops.automerge | Bin 0 -> 134 bytes rust/automerge/tests/test.rs | 20 +++++----- 15 files changed, 108 insertions(+), 14 deletions(-) create mode 100644 rust/automerge/fuzz/.gitignore create mode 100644 rust/automerge/fuzz/Cargo.toml create mode 100644 rust/automerge/fuzz/fuzz_targets/load.rs create mode 100644 rust/automerge/tests/fuzz-crashers/crash-da39a3ee5e6b4b0d3255bfef95601890afd80709 create mode 100644 rust/automerge/tests/fuzz-crashers/incorrect_max_op.automerge create mode 100644 rust/automerge/tests/fuzz-crashers/invalid_deflate_stream.automerge create mode 100644 rust/automerge/tests/fuzz-crashers/missing_actor.automerge create mode 100644 rust/automerge/tests/fuzz-crashers/overflow_in_length.automerge create mode 100644 rust/automerge/tests/fuzz-crashers/too_many_deps.automerge create mode 100644 rust/automerge/tests/fuzz-crashers/too_many_ops.automerge diff --git a/rust/automerge/fuzz/.gitignore b/rust/automerge/fuzz/.gitignore new file mode 100644 index 00000000..2eb15f8e --- /dev/null +++ b/rust/automerge/fuzz/.gitignore @@ -0,0 +1,3 @@ +target +corpus +coverage diff --git a/rust/automerge/fuzz/Cargo.toml b/rust/automerge/fuzz/Cargo.toml new file mode 100644 index 00000000..3461e9f3 --- /dev/null +++ b/rust/automerge/fuzz/Cargo.toml @@ -0,0 +1,29 @@ +[package] +name = "automerge-fuzz" +version = "0.0.0" +publish = false +edition = "2021" + +[package.metadata] +cargo-fuzz = true + +[dependencies] +libfuzzer-sys = "0.4" +leb128 = "^0.2.5" +sha2 = "^0.10.0" + +[dependencies.automerge] +path = ".." + +# Prevent this from interfering with workspaces +[workspace] +members = ["."] + +[profile.release] +debug = 1 + +[[bin]] +name = "load" +path = "fuzz_targets/load.rs" +test = false +doc = false \ No newline at end of file diff --git a/rust/automerge/fuzz/fuzz_targets/load.rs b/rust/automerge/fuzz/fuzz_targets/load.rs new file mode 100644 index 00000000..0dea2624 --- /dev/null +++ b/rust/automerge/fuzz/fuzz_targets/load.rs @@ -0,0 +1,37 @@ +#![no_main] + +use sha2::{Sha256, Digest}; +use automerge::{Automerge}; +use libfuzzer_sys::arbitrary::{Arbitrary, Result, Unstructured}; +use libfuzzer_sys::fuzz_target; + +#[derive(Debug)] +struct DocumentChunk { + bytes: Vec, +} + +fn add_header(typ: u8, data: &[u8]) -> Vec { + let mut input = vec![u8::from(typ)]; + leb128::write::unsigned(&mut input, data.len() as u64).unwrap(); + input.extend(data.as_ref()); + let hash_result = Sha256::digest(input.clone()); + let array: [u8; 32] = hash_result.into(); + + let mut out = vec![133, 111, 74, 131, array[0], array[1], array[2], array[3]]; + out.extend(input); + out +} + +impl<'a> Arbitrary<'a> for DocumentChunk +{ + fn arbitrary(u: &mut Unstructured<'a>) -> Result { + let input = u.bytes(u.len())?; + let contents = add_header(0, input); + + return Ok(DocumentChunk{bytes: contents}) + } +} + +fuzz_target!(|doc: DocumentChunk| { + Automerge::load(&doc.bytes); +}); diff --git a/rust/automerge/src/columnar/column_range/deps.rs b/rust/automerge/src/columnar/column_range/deps.rs index df49192a..1956acd1 100644 --- a/rust/automerge/src/columnar/column_range/deps.rs +++ b/rust/automerge/src/columnar/column_range/deps.rs @@ -62,7 +62,11 @@ impl<'a> DepsIter<'a> { } None => return Ok(None), }; - let mut result = Vec::with_capacity(num); + // We cannot trust `num` because it is provided over the network, + // but in the common case it will be correct and small (so we + // use with_capacity to make sure the vector is precisely the right + // size). + let mut result = Vec::with_capacity(std::cmp::min(num, 100)); while result.len() < num { match self .deps diff --git a/rust/automerge/src/columnar/column_range/opid_list.rs b/rust/automerge/src/columnar/column_range/opid_list.rs index 12279c08..6a9c8a38 100644 --- a/rust/automerge/src/columnar/column_range/opid_list.rs +++ b/rust/automerge/src/columnar/column_range/opid_list.rs @@ -189,7 +189,12 @@ impl<'a> OpIdListIter<'a> { Some(None) => return Err(DecodeColumnError::unexpected_null("num")), None => return Ok(None), }; - let mut p = Vec::with_capacity(num as usize); + + // We cannot trust `num` because it is provided over the network, + // but in the common case it will be correct and small (so we + // use with_capacity to make sure the vector is precisely the right + // size). + let mut p = Vec::with_capacity(std::cmp::min(num, 100) as usize); for _ in 0..num { let actor = self .actor diff --git a/rust/automerge/src/storage/columns/raw_column.rs b/rust/automerge/src/storage/columns/raw_column.rs index 808b53cf..ac9a5759 100644 --- a/rust/automerge/src/storage/columns/raw_column.rs +++ b/rust/automerge/src/storage/columns/raw_column.rs @@ -219,7 +219,10 @@ impl RawColumns { let columns: Vec> = specs_and_lens .into_iter() .scan(0_usize, |offset, (spec, len)| { - let end = *offset + len as usize; + // Note: we use a saturating add here as len was passed over the network + // and so could be anything. If the addition does every saturate we would + // expect parsing to fail later (but at least it won't panic!). + let end = offset.saturating_add(len as usize); let data = *offset..end; *offset = end; Some(RawColumn { diff --git a/rust/automerge/src/storage/load/change_collector.rs b/rust/automerge/src/storage/load/change_collector.rs index 75ef98f1..d05367a9 100644 --- a/rust/automerge/src/storage/load/change_collector.rs +++ b/rust/automerge/src/storage/load/change_collector.rs @@ -26,6 +26,8 @@ pub(crate) enum Error { MissingChange, #[error("unable to read change metadata: {0}")] ReadChange(Box), + #[error("incorrect max op")] + IncorrectMaxOp, #[error("missing ops")] MissingOps, } @@ -180,7 +182,18 @@ impl<'a> PartialChange<'a> { .ops .iter() .map(|(obj, op)| op_as_actor_id(obj, op, metadata)); - let actor = metadata.actors.get(self.actor).clone(); + let actor = metadata + .actors + .safe_get(self.actor) + .ok_or_else(|| { + tracing::error!(actor_index = self.actor, "actor out of bounds"); + Error::MissingActor + })? + .clone(); + + if num_ops > self.max_op { + return Err(Error::IncorrectMaxOp); + } let change = match StoredChange::builder() .with_dependencies(deps) diff --git a/rust/automerge/tests/fuzz-crashers/crash-da39a3ee5e6b4b0d3255bfef95601890afd80709 b/rust/automerge/tests/fuzz-crashers/crash-da39a3ee5e6b4b0d3255bfef95601890afd80709 new file mode 100644 index 0000000000000000000000000000000000000000..bcb12cddc6980d44c13dd0351899abe297817f70 GIT binary patch literal 10 RcmZq8_iCQDXxb$P1^^m_1Y!UH literal 0 HcmV?d00001 diff --git a/rust/automerge/tests/fuzz-crashers/incorrect_max_op.automerge b/rust/automerge/tests/fuzz-crashers/incorrect_max_op.automerge new file mode 100644 index 0000000000000000000000000000000000000000..05cc2c82681529ae087bc4ab88c3ebc7ffbf73a7 GIT binary patch literal 126 zcmZq8_iFy6Eq;Zegi(Mga9P2Di~d0kS!`#NOG_3rZg0ucpBfVWKQ9lyTY8rUT zb)+h5Oppy)Q?ugCCKWbDCT1pKCS@iErZ6TBQ8q;&(}d9p$O&g@U}UOisAmLX5M-}s S$!CP{K$KfPLqTyp0|Nj9lO`qr literal 0 HcmV?d00001 diff --git a/rust/automerge/tests/fuzz-crashers/invalid_deflate_stream.automerge b/rust/automerge/tests/fuzz-crashers/invalid_deflate_stream.automerge new file mode 100644 index 0000000000000000000000000000000000000000..21e869eb4bafd66b9f2a3bb7f856fd2b312c61fa GIT binary patch literal 123 zcmZq8_i8o(0)|3H0T7K07?C;H*ldhU%uEVQ226%P$f3ZZ2x2lCGFdW(GdD0Y)icyH Z0wDtsvez@ERe|^*0kik}_trBo004Sr7)}5H literal 0 HcmV?d00001 diff --git a/rust/automerge/tests/fuzz-crashers/missing_actor.automerge b/rust/automerge/tests/fuzz-crashers/missing_actor.automerge new file mode 100644 index 0000000000000000000000000000000000000000..cc8c61b14d4873ab1a117ad4d1b6eb39d9037e25 GIT binary patch literal 126 zcmZq8_iAP@etLtUgi+xAhCLyj-A82@#BJP1t8G;SXSckWBrGZ zVG=09K&AgdKpjk?5Kt>X6i6IIbASji09nky!obAH9t+h3vXupFBO?P)mWhcymXQGf DM*mhW literal 0 HcmV?d00001 diff --git a/rust/automerge/tests/fuzz-crashers/too_many_deps.automerge b/rust/automerge/tests/fuzz-crashers/too_many_deps.automerge new file mode 100644 index 0000000000000000000000000000000000000000..657ce9930f000a3b8d4585e3889220b3b48e1db0 GIT binary patch literal 134 zcmZq8_iCP-9<9Jo!zl26!=8}NZl_EAt>0%B6p6fGoFREZtGfJf_fnlJA6~mF{yla} zlIf?86Z5p}SdGt-$7i!KGBGm=GbuAUaD_2(h_WdHnI?=*OkqsnEDelI^$hilK*&)4 b3JMtN+3Q*I848L)GK{+!>)rD6K^z7E|5`CV literal 0 HcmV?d00001 diff --git a/rust/automerge/tests/fuzz-crashers/too_many_ops.automerge b/rust/automerge/tests/fuzz-crashers/too_many_ops.automerge new file mode 100644 index 0000000000000000000000000000000000000000..661258b0933e854bde60d741b6a47c731029de3b GIT binary patch literal 134 zcmZq8_i7G3?{Jo(hEd@ChCLyjvz;#Ww|<{lP$cq#afajtt?Kf_-Ai?@e0c4y`1jZ? zNv5AVPR!G?V>LcU9-qy|$i&Pf%%sfZz!b*BA Result<(), AutomergeError> { } #[test] -fn invalid_deflate_stream() { - let bytes: [u8; 123] = [ - 133, 111, 74, 131, 48, 48, 48, 48, 0, 113, 1, 16, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, - 48, 48, 48, 48, 48, 48, 1, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, - 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 6, 1, 2, 3, 2, 32, 2, 48, - 2, 49, 2, 49, 2, 8, 32, 4, 33, 2, 48, 2, 49, 1, 49, 2, 57, 2, 87, 3, 128, 1, 2, 127, 0, - 127, 1, 127, 1, 127, 0, 127, 0, 127, 7, 127, 2, 102, 122, 127, 0, 127, 1, 1, 127, 1, 127, - 54, 239, 191, 189, 127, 0, 0, - ]; +fn fuzz_crashers() { + let paths = fs::read_dir("./tests/fuzz-crashers").unwrap(); - assert!(Automerge::load(&bytes).is_err()); + for path in paths { + // uncomment this line to figure out which fixture is crashing: + // println!("{:?}", path.as_ref().unwrap().path().display()); + let bytes = fs::read(path.as_ref().unwrap().path()); + let res = Automerge::load(&bytes.unwrap()); + assert!(res.is_err()); + } } #[test] From f428fe0169434782254b9f4320e9b4e7269c7bdb Mon Sep 17 00:00:00 2001 From: alexjg Date: Fri, 27 Jan 2023 17:23:13 +0000 Subject: [PATCH 701/730] Improve typescript types (#508) --- javascript/.eslintrc.cjs | 9 + javascript/src/conflicts.ts | 100 ++++++++ javascript/src/counter.ts | 2 +- javascript/src/low_level.ts | 1 + javascript/src/proxies.ts | 268 ++++++++++++++------- javascript/src/stable.ts | 102 +++----- javascript/src/text.ts | 10 +- javascript/src/types.ts | 3 +- javascript/src/unstable.ts | 45 ++-- javascript/src/unstable_types.ts | 30 +++ javascript/test/basic_test.ts | 1 - javascript/test/legacy_tests.ts | 7 +- javascript/test/stable_unstable_interop.ts | 58 +++++ 13 files changed, 450 insertions(+), 186 deletions(-) create mode 100644 javascript/src/conflicts.ts create mode 100644 javascript/src/unstable_types.ts diff --git a/javascript/.eslintrc.cjs b/javascript/.eslintrc.cjs index 5d11eb94..88776271 100644 --- a/javascript/.eslintrc.cjs +++ b/javascript/.eslintrc.cjs @@ -3,4 +3,13 @@ module.exports = { parser: "@typescript-eslint/parser", plugins: ["@typescript-eslint"], extends: ["eslint:recommended", "plugin:@typescript-eslint/recommended"], + rules: { + "@typescript-eslint/no-unused-vars": [ + "error", + { + argsIgnorePattern: "^_", + varsIgnorePattern: "^_", + }, + ], + }, } diff --git a/javascript/src/conflicts.ts b/javascript/src/conflicts.ts new file mode 100644 index 00000000..52af23e1 --- /dev/null +++ b/javascript/src/conflicts.ts @@ -0,0 +1,100 @@ +import { Counter, type AutomergeValue } from "./types" +import { Text } from "./text" +import { type AutomergeValue as UnstableAutomergeValue } from "./unstable_types" +import { type Target, Text1Target, Text2Target } from "./proxies" +import { mapProxy, listProxy, ValueType } from "./proxies" +import type { Prop, ObjID } from "@automerge/automerge-wasm" +import { Automerge } from "@automerge/automerge-wasm" + +export type ConflictsF = { [key: string]: ValueType } +export type Conflicts = ConflictsF +export type UnstableConflicts = ConflictsF + +export function stableConflictAt( + context: Automerge, + objectId: ObjID, + prop: Prop +): Conflicts | undefined { + return conflictAt( + context, + objectId, + prop, + true, + (context: Automerge, conflictId: ObjID): AutomergeValue => { + return new Text(context.text(conflictId)) + } + ) +} + +export function unstableConflictAt( + context: Automerge, + objectId: ObjID, + prop: Prop +): UnstableConflicts | undefined { + return conflictAt( + context, + objectId, + prop, + true, + (context: Automerge, conflictId: ObjID): UnstableAutomergeValue => { + return context.text(conflictId) + } + ) +} + +function conflictAt( + context: Automerge, + objectId: ObjID, + prop: Prop, + textV2: boolean, + handleText: (a: Automerge, conflictId: ObjID) => ValueType +): ConflictsF | undefined { + const values = context.getAll(objectId, prop) + if (values.length <= 1) { + return + } + const result: ConflictsF = {} + for (const fullVal of values) { + switch (fullVal[0]) { + case "map": + result[fullVal[1]] = mapProxy( + context, + fullVal[1], + textV2, + [prop], + true + ) + break + case "list": + result[fullVal[1]] = listProxy( + context, + fullVal[1], + textV2, + [prop], + true + ) + break + case "text": + result[fullVal[1]] = handleText(context, fullVal[1] as ObjID) + break + case "str": + case "uint": + case "int": + case "f64": + case "boolean": + case "bytes": + case "null": + result[fullVal[2]] = fullVal[1] as ValueType + break + case "counter": + result[fullVal[2]] = new Counter(fullVal[1]) as ValueType + break + case "timestamp": + result[fullVal[2]] = new Date(fullVal[1]) as ValueType + break + default: + throw RangeError(`datatype ${fullVal[0]} unimplemented`) + } + } + return result +} diff --git a/javascript/src/counter.ts b/javascript/src/counter.ts index 873fa157..88adb840 100644 --- a/javascript/src/counter.ts +++ b/javascript/src/counter.ts @@ -100,7 +100,7 @@ export function getWriteableCounter( path: Prop[], objectId: ObjID, key: Prop -) { +): WriteableCounter { return new WriteableCounter(value, context, path, objectId, key) } diff --git a/javascript/src/low_level.ts b/javascript/src/low_level.ts index 63ef5546..f44f3a32 100644 --- a/javascript/src/low_level.ts +++ b/javascript/src/low_level.ts @@ -14,6 +14,7 @@ export type { ChangeToEncode } from "@automerge/automerge-wasm" export function UseApi(api: API) { for (const k in api) { + // eslint-disable-next-line @typescript-eslint/no-extra-semi,@typescript-eslint/no-explicit-any ;(ApiHandler as any)[k] = (api as any)[k] } } diff --git a/javascript/src/proxies.ts b/javascript/src/proxies.ts index 7a99cf80..54a8dd71 100644 --- a/javascript/src/proxies.ts +++ b/javascript/src/proxies.ts @@ -1,3 +1,4 @@ +/* eslint-disable @typescript-eslint/no-explicit-any */ import { Text } from "./text" import { Automerge, @@ -6,13 +7,12 @@ import { type Prop, } from "@automerge/automerge-wasm" -import type { - AutomergeValue, - ScalarValue, - MapValue, - ListValue, - TextValue, -} from "./types" +import type { AutomergeValue, ScalarValue, MapValue, ListValue } from "./types" +import { + type AutomergeValue as UnstableAutomergeValue, + MapValue as UnstableMapValue, + ListValue as UnstableListValue, +} from "./unstable_types" import { Counter, getWriteableCounter } from "./counter" import { STATE, @@ -26,19 +26,38 @@ import { } from "./constants" import { RawString } from "./raw_string" -type Target = { +type TargetCommon = { context: Automerge objectId: ObjID path: Array readonly: boolean heads?: Array - cache: {} + cache: object trace?: any frozen: boolean - textV2: boolean } -function parseListIndex(key) { +export type Text2Target = TargetCommon & { textV2: true } +export type Text1Target = TargetCommon & { textV2: false } +export type Target = Text1Target | Text2Target + +export type ValueType = T extends Text2Target + ? UnstableAutomergeValue + : T extends Text1Target + ? AutomergeValue + : never +type MapValueType = T extends Text2Target + ? UnstableMapValue + : T extends Text1Target + ? MapValue + : never +type ListValueType = T extends Text2Target + ? UnstableListValue + : T extends Text1Target + ? ListValue + : never + +function parseListIndex(key: any) { if (typeof key === "string" && /^[0-9]+$/.test(key)) key = parseInt(key, 10) if (typeof key !== "number") { return key @@ -49,7 +68,10 @@ function parseListIndex(key) { return key } -function valueAt(target: Target, prop: Prop): AutomergeValue | undefined { +function valueAt( + target: T, + prop: Prop +): ValueType | undefined { const { context, objectId, path, readonly, heads, textV2 } = target const value = context.getWithType(objectId, prop, heads) if (value === null) { @@ -61,7 +83,7 @@ function valueAt(target: Target, prop: Prop): AutomergeValue | undefined { case undefined: return case "map": - return mapProxy( + return mapProxy( context, val as ObjID, textV2, @@ -70,7 +92,7 @@ function valueAt(target: Target, prop: Prop): AutomergeValue | undefined { heads ) case "list": - return listProxy( + return listProxy( context, val as ObjID, textV2, @@ -80,7 +102,7 @@ function valueAt(target: Target, prop: Prop): AutomergeValue | undefined { ) case "text": if (textV2) { - return context.text(val as ObjID, heads) + return context.text(val as ObjID, heads) as ValueType } else { return textProxy( context, @@ -88,29 +110,36 @@ function valueAt(target: Target, prop: Prop): AutomergeValue | undefined { [...path, prop], readonly, heads - ) + ) as unknown as ValueType } case "str": - return val + return val as ValueType case "uint": - return val + return val as ValueType case "int": - return val + return val as ValueType case "f64": - return val + return val as ValueType case "boolean": - return val + return val as ValueType case "null": - return null + return null as ValueType case "bytes": - return val + return val as ValueType case "timestamp": - return val + return val as ValueType case "counter": { if (readonly) { - return new Counter(val as number) + return new Counter(val as number) as ValueType } else { - return getWriteableCounter(val as number, context, path, objectId, prop) + const counter: Counter = getWriteableCounter( + val as number, + context, + path, + objectId, + prop + ) + return counter as ValueType } } default: @@ -118,7 +147,21 @@ function valueAt(target: Target, prop: Prop): AutomergeValue | undefined { } } -function import_value(value: any, textV2: boolean) { +type ImportedValue = + | [null, "null"] + | [number, "uint"] + | [number, "int"] + | [number, "f64"] + | [number, "counter"] + | [number, "timestamp"] + | [string, "str"] + | [Text | string, "text"] + | [Uint8Array, "bytes"] + | [Array, "list"] + | [Record, "map"] + | [boolean, "boolean"] + +function import_value(value: any, textV2: boolean): ImportedValue { switch (typeof value) { case "object": if (value == null) { @@ -170,7 +213,10 @@ function import_value(value: any, textV2: boolean) { } const MapHandler = { - get(target: Target, key): AutomergeValue | { handle: Automerge } { + get( + target: T, + key: any + ): ValueType | ObjID | boolean | { handle: Automerge } { const { context, objectId, cache } = target if (key === Symbol.toStringTag) { return target[Symbol.toStringTag] @@ -185,7 +231,7 @@ const MapHandler = { return cache[key] }, - set(target: Target, key, val) { + set(target: Target, key: any, val: any) { const { context, objectId, path, readonly, frozen, textV2 } = target target.cache = {} // reset cache on set if (val && val[OBJECT_ID]) { @@ -221,8 +267,10 @@ const MapHandler = { } case "text": { if (textV2) { + assertString(value) context.putObject(objectId, key, value) } else { + assertText(value) const text = context.putObject(objectId, key, "") const proxyText = textProxy(context, text, [...path, key], readonly) for (let i = 0; i < value.length; i++) { @@ -251,7 +299,7 @@ const MapHandler = { return true }, - deleteProperty(target: Target, key) { + deleteProperty(target: Target, key: any) { const { context, objectId, readonly } = target target.cache = {} // reset cache on delete if (readonly) { @@ -261,12 +309,12 @@ const MapHandler = { return true }, - has(target: Target, key) { + has(target: Target, key: any) { const value = this.get(target, key) return value !== undefined }, - getOwnPropertyDescriptor(target: Target, key) { + getOwnPropertyDescriptor(target: Target, key: any) { // const { context, objectId } = target const value = this.get(target, key) if (typeof value !== "undefined") { @@ -287,11 +335,20 @@ const MapHandler = { } const ListHandler = { - get(target: Target, index) { + get( + target: T, + index: any + ): + | ValueType + | boolean + | ObjID + | { handle: Automerge } + | number + | ((_: any) => boolean) { const { context, objectId, heads } = target index = parseListIndex(index) if (index === Symbol.hasInstance) { - return instance => { + return (instance: any) => { return Array.isArray(instance) } } @@ -304,13 +361,13 @@ const ListHandler = { if (index === STATE) return { handle: context } if (index === "length") return context.length(objectId, heads) if (typeof index === "number") { - return valueAt(target, index) + return valueAt(target, index) as ValueType } else { return listMethods(target)[index] } }, - set(target: Target, index, val) { + set(target: Target, index: any, val: any) { const { context, objectId, path, readonly, frozen, textV2 } = target index = parseListIndex(index) if (val && val[OBJECT_ID]) { @@ -334,7 +391,7 @@ const ListHandler = { } switch (datatype) { case "list": { - let list + let list: ObjID if (index >= context.length(objectId)) { list = context.insertObject(objectId, index, []) } else { @@ -352,13 +409,15 @@ const ListHandler = { } case "text": { if (textV2) { + assertString(value) if (index >= context.length(objectId)) { context.insertObject(objectId, index, value) } else { context.putObject(objectId, index, value) } } else { - let text + let text: ObjID + assertText(value) if (index >= context.length(objectId)) { text = context.insertObject(objectId, index, "") } else { @@ -370,7 +429,7 @@ const ListHandler = { break } case "map": { - let map + let map: ObjID if (index >= context.length(objectId)) { map = context.insertObject(objectId, index, {}) } else { @@ -398,7 +457,7 @@ const ListHandler = { return true }, - deleteProperty(target: Target, index) { + deleteProperty(target: Target, index: any) { const { context, objectId } = target index = parseListIndex(index) const elem = context.get(objectId, index) @@ -411,7 +470,7 @@ const ListHandler = { return true }, - has(target: Target, index) { + has(target: Target, index: any) { const { context, objectId, heads } = target index = parseListIndex(index) if (typeof index === "number") { @@ -420,7 +479,7 @@ const ListHandler = { return index === "length" }, - getOwnPropertyDescriptor(target: Target, index) { + getOwnPropertyDescriptor(target: Target, index: any) { const { context, objectId, heads } = target if (index === "length") @@ -434,7 +493,7 @@ const ListHandler = { return { configurable: true, enumerable: true, value } }, - getPrototypeOf(target) { + getPrototypeOf(target: Target) { return Object.getPrototypeOf(target) }, ownKeys(/*target*/): string[] { @@ -476,14 +535,14 @@ const TextHandler = Object.assign({}, ListHandler, { }, }) -export function mapProxy( +export function mapProxy( context: Automerge, objectId: ObjID, textV2: boolean, path?: Prop[], readonly?: boolean, heads?: Heads -): MapValue { +): MapValueType { const target: Target = { context, objectId, @@ -496,19 +555,19 @@ export function mapProxy( } const proxied = {} Object.assign(proxied, target) - let result = new Proxy(proxied, MapHandler) + const result = new Proxy(proxied, MapHandler) // conversion through unknown is necessary because the types are so different - return result as unknown as MapValue + return result as unknown as MapValueType } -export function listProxy( +export function listProxy( context: Automerge, objectId: ObjID, textV2: boolean, path?: Prop[], readonly?: boolean, heads?: Heads -): ListValue { +): ListValueType { const target: Target = { context, objectId, @@ -521,17 +580,22 @@ export function listProxy( } const proxied = [] Object.assign(proxied, target) + // eslint-disable-next-line @typescript-eslint/ban-ts-comment // @ts-ignore return new Proxy(proxied, ListHandler) as unknown as ListValue } +interface TextProxy extends Text { + splice: (index: any, del: any, ...vals: any[]) => void +} + export function textProxy( context: Automerge, objectId: ObjID, path?: Prop[], readonly?: boolean, heads?: Heads -): TextValue { +): TextProxy { const target: Target = { context, objectId, @@ -542,7 +606,9 @@ export function textProxy( cache: {}, textV2: false, } - return new Proxy(target, TextHandler) as unknown as TextValue + const proxied = {} + Object.assign(proxied, target) + return new Proxy(proxied, TextHandler) as unknown as TextProxy } export function rootProxy( @@ -554,10 +620,10 @@ export function rootProxy( return mapProxy(context, "_root", textV2, [], !!readonly) } -function listMethods(target: Target) { +function listMethods(target: T) { const { context, objectId, path, readonly, frozen, heads, textV2 } = target const methods = { - deleteAt(index, numDelete) { + deleteAt(index: number, numDelete: number) { if (typeof numDelete === "number") { context.splice(objectId, index, numDelete) } else { @@ -572,8 +638,20 @@ function listMethods(target: Target) { start = parseListIndex(start || 0) end = parseListIndex(end || length) for (let i = start; i < Math.min(end, length); i++) { - if (datatype === "text" || datatype === "list" || datatype === "map") { + if (datatype === "list" || datatype === "map") { context.putObject(objectId, i, value) + } else if (datatype === "text") { + if (textV2) { + assertString(value) + context.putObject(objectId, i, value) + } else { + assertText(value) + const text = context.putObject(objectId, i, "") + const proxyText = textProxy(context, text, [...path, i], readonly) + for (let i = 0; i < value.length; i++) { + proxyText[i] = value.get(i) + } + } } else { context.put(objectId, i, value, datatype) } @@ -581,7 +659,7 @@ function listMethods(target: Target) { return this }, - indexOf(o, start = 0) { + indexOf(o: any, start = 0) { const length = context.length(objectId) for (let i = start; i < length; i++) { const value = context.getWithType(objectId, i, heads) @@ -592,7 +670,7 @@ function listMethods(target: Target) { return -1 }, - insertAt(index, ...values) { + insertAt(index: number, ...values: any[]) { this.splice(index, 0, ...values) return this }, @@ -607,7 +685,7 @@ function listMethods(target: Target) { return last }, - push(...values) { + push(...values: any[]) { const len = context.length(objectId) this.splice(len, 0, ...values) return context.length(objectId) @@ -620,7 +698,7 @@ function listMethods(target: Target) { return first }, - splice(index, del, ...vals) { + splice(index: any, del: any, ...vals: any[]) { index = parseListIndex(index) del = parseListIndex(del) for (const val of vals) { @@ -638,9 +716,9 @@ function listMethods(target: Target) { "Sequence object cannot be modified outside of a change block" ) } - const result: AutomergeValue[] = [] + const result: ValueType[] = [] for (let i = 0; i < del; i++) { - const value = valueAt(target, index) + const value = valueAt(target, index) if (value !== undefined) { result.push(value) } @@ -663,6 +741,7 @@ function listMethods(target: Target) { } case "text": { if (textV2) { + assertString(value) context.insertObject(objectId, index, value) } else { const text = context.insertObject(objectId, index, "") @@ -698,7 +777,7 @@ function listMethods(target: Target) { return result }, - unshift(...values) { + unshift(...values: any) { this.splice(0, 0, ...values) return context.length(objectId) }, @@ -749,11 +828,11 @@ function listMethods(target: Target) { return iterator }, - toArray(): AutomergeValue[] { - const list: AutomergeValue = [] - let value + toArray(): ValueType[] { + const list: Array> = [] + let value: ValueType | undefined do { - value = valueAt(target, list.length) + value = valueAt(target, list.length) if (value !== undefined) { list.push(value) } @@ -762,7 +841,7 @@ function listMethods(target: Target) { return list }, - map(f: (AutomergeValue, number) => T): T[] { + map(f: (_a: ValueType, _n: number) => U): U[] { return this.toArray().map(f) }, @@ -774,24 +853,26 @@ function listMethods(target: Target) { return this.toArray().toLocaleString() }, - forEach(f: (AutomergeValue, number) => undefined) { + forEach(f: (_a: ValueType, _n: number) => undefined) { return this.toArray().forEach(f) }, // todo: real concat function is different - concat(other: AutomergeValue[]): AutomergeValue[] { + concat(other: ValueType[]): ValueType[] { return this.toArray().concat(other) }, - every(f: (AutomergeValue, number) => boolean): boolean { + every(f: (_a: ValueType, _n: number) => boolean): boolean { return this.toArray().every(f) }, - filter(f: (AutomergeValue, number) => boolean): AutomergeValue[] { + filter(f: (_a: ValueType, _n: number) => boolean): ValueType[] { return this.toArray().filter(f) }, - find(f: (AutomergeValue, number) => boolean): AutomergeValue | undefined { + find( + f: (_a: ValueType, _n: number) => boolean + ): ValueType | undefined { let index = 0 for (const v of this) { if (f(v, index)) { @@ -801,7 +882,7 @@ function listMethods(target: Target) { } }, - findIndex(f: (AutomergeValue, number) => boolean): number { + findIndex(f: (_a: ValueType, _n: number) => boolean): number { let index = 0 for (const v of this) { if (f(v, index)) { @@ -812,7 +893,7 @@ function listMethods(target: Target) { return -1 }, - includes(elem: AutomergeValue): boolean { + includes(elem: ValueType): boolean { return this.find(e => e === elem) !== undefined }, @@ -820,29 +901,30 @@ function listMethods(target: Target) { return this.toArray().join(sep) }, - // todo: remove the any - reduce(f: (any, AutomergeValue) => T, initalValue?: T): T | undefined { - return this.toArray().reduce(f, initalValue) + reduce( + f: (acc: U, currentValue: ValueType) => U, + initialValue: U + ): U | undefined { + return this.toArray().reduce(f, initialValue) }, - // todo: remove the any - reduceRight( - f: (any, AutomergeValue) => T, - initalValue?: T - ): T | undefined { - return this.toArray().reduceRight(f, initalValue) + reduceRight( + f: (acc: U, item: ValueType) => U, + initialValue: U + ): U | undefined { + return this.toArray().reduceRight(f, initialValue) }, - lastIndexOf(search: AutomergeValue, fromIndex = +Infinity): number { + lastIndexOf(search: ValueType, fromIndex = +Infinity): number { // this can be faster return this.toArray().lastIndexOf(search, fromIndex) }, - slice(index?: number, num?: number): AutomergeValue[] { + slice(index?: number, num?: number): ValueType[] { return this.toArray().slice(index, num) }, - some(f: (AutomergeValue, number) => boolean): boolean { + some(f: (v: ValueType, i: number) => boolean): boolean { let index = 0 for (const v of this) { if (f(v, index)) { @@ -869,7 +951,7 @@ function listMethods(target: Target) { function textMethods(target: Target) { const { context, objectId, heads } = target const methods = { - set(index: number, value) { + set(index: number, value: any) { return (this[index] = value) }, get(index: number): AutomergeValue { @@ -902,10 +984,22 @@ function textMethods(target: Target) { toJSON(): string { return this.toString() }, - indexOf(o, start = 0) { + indexOf(o: any, start = 0) { const text = context.text(objectId) return text.indexOf(o, start) }, } return methods } + +function assertText(value: Text | string): asserts value is Text { + if (!(value instanceof Text)) { + throw new Error("value was not a Text instance") + } +} + +function assertString(value: Text | string): asserts value is string { + if (typeof value !== "string") { + throw new Error("value was not a string") + } +} diff --git a/javascript/src/stable.ts b/javascript/src/stable.ts index 9db4d0e2..3b328240 100644 --- a/javascript/src/stable.ts +++ b/javascript/src/stable.ts @@ -1,7 +1,7 @@ /** @hidden **/ export { /** @hidden */ uuid } from "./uuid" -import { rootProxy, listProxy, mapProxy, textProxy } from "./proxies" +import { rootProxy } from "./proxies" import { STATE } from "./constants" import { @@ -20,10 +20,10 @@ export { type Patch, type PatchCallback, type ScalarValue, - Text, } from "./types" import { Text } from "./text" +export { Text } from "./text" import type { API, @@ -54,6 +54,8 @@ import { RawString } from "./raw_string" import { _state, _is_proxy, _trace, _obj } from "./internal_state" +import { stableConflictAt } from "./conflicts" + /** Options passed to {@link change}, and {@link emptyChange} * @typeParam T - The type of value contained in the document */ @@ -71,13 +73,36 @@ export type ChangeOptions = { */ export type ApplyOptions = { patchCallback?: PatchCallback } +/** + * A List is an extended Array that adds the two helper methods `deleteAt` and `insertAt`. + */ +export interface List extends Array { + insertAt(index: number, ...args: T[]): List + deleteAt(index: number, numDelete?: number): List +} + +/** + * To extend an arbitrary type, we have to turn any arrays that are part of the type's definition into Lists. + * So we recurse through the properties of T, turning any Arrays we find into Lists. + */ +export type Extend = + // is it an array? make it a list (we recursively extend the type of the array's elements as well) + T extends Array + ? List> + : // is it an object? recursively extend all of its properties + // eslint-disable-next-line @typescript-eslint/ban-types + T extends Object + ? { [P in keyof T]: Extend } + : // otherwise leave the type alone + T + /** * Function which is called by {@link change} when making changes to a `Doc` * @typeParam T - The type of value contained in the document * * This function may mutate `doc` */ -export type ChangeFn = (doc: T) => void +export type ChangeFn = (doc: Extend) => void /** @hidden **/ export interface State { @@ -136,11 +161,12 @@ export function init(_opts?: ActorId | InitOptions): Doc { const handle = ApiHandler.create(opts.enableTextV2 || false, opts.actor) handle.enablePatches(true) handle.enableFreeze(!!opts.freeze) - handle.registerDatatype("counter", (n: any) => new Counter(n)) - let textV2 = opts.enableTextV2 || false + handle.registerDatatype("counter", (n: number) => new Counter(n)) + const textV2 = opts.enableTextV2 || false if (textV2) { handle.registerDatatype("str", (n: string) => new RawString(n)) } else { + // eslint-disable-next-line @typescript-eslint/no-explicit-any handle.registerDatatype("text", (n: any) => new Text(n)) } const doc = handle.materialize("/", undefined, { @@ -204,7 +230,7 @@ export function clone( // `change` uses the presence of state.heads to determine if we are in a view // set it to undefined to indicate that this is a full fat document - const { heads: oldHeads, ...stateSansHeads } = state + const { heads: _oldHeads, ...stateSansHeads } = state return handle.applyPatches(doc, { ...stateSansHeads, handle }) } @@ -343,7 +369,7 @@ function _change( try { state.heads = heads const root: T = rootProxy(state.handle, state.textV2) - callback(root) + callback(root as Extend) if (state.handle.pendingOps() === 0) { state.heads = undefined return doc @@ -541,62 +567,6 @@ export function getActorId(doc: Doc): ActorId { */ type Conflicts = { [key: string]: AutomergeValue } -function conflictAt( - context: Automerge, - objectId: ObjID, - prop: Prop, - textV2: boolean -): Conflicts | undefined { - const values = context.getAll(objectId, prop) - if (values.length <= 1) { - return - } - const result: Conflicts = {} - for (const fullVal of values) { - switch (fullVal[0]) { - case "map": - result[fullVal[1]] = mapProxy(context, fullVal[1], textV2, [prop], true) - break - case "list": - result[fullVal[1]] = listProxy( - context, - fullVal[1], - textV2, - [prop], - true - ) - break - case "text": - if (textV2) { - result[fullVal[1]] = context.text(fullVal[1]) - } else { - result[fullVal[1]] = textProxy(context, objectId, [prop], true) - } - break - //case "table": - //case "cursor": - case "str": - case "uint": - case "int": - case "f64": - case "boolean": - case "bytes": - case "null": - result[fullVal[2]] = fullVal[1] - break - case "counter": - result[fullVal[2]] = new Counter(fullVal[1]) - break - case "timestamp": - result[fullVal[2]] = new Date(fullVal[1]) - break - default: - throw RangeError(`datatype ${fullVal[0]} unimplemented`) - } - } - return result -} - /** * Get the conflicts associated with a property * @@ -646,9 +616,12 @@ export function getConflicts( prop: Prop ): Conflicts | undefined { const state = _state(doc, false) + if (state.textV2) { + throw new Error("use unstable.getConflicts for an unstable document") + } const objectId = _obj(doc) if (objectId != null) { - return conflictAt(state.handle, objectId, prop, state.textV2) + return stableConflictAt(state.handle, objectId, prop) } else { return undefined } @@ -672,6 +645,7 @@ export function getLastLocalChange(doc: Doc): Change | undefined { * This is useful to determine if something is actually an automerge document, * if `doc` is not an automerge document this will return null. */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any export function getObjectId(doc: any, prop?: Prop): ObjID | null { if (prop) { const state = _state(doc, false) diff --git a/javascript/src/text.ts b/javascript/src/text.ts index f87af891..b01bd7db 100644 --- a/javascript/src/text.ts +++ b/javascript/src/text.ts @@ -3,9 +3,12 @@ import { TEXT, STATE } from "./constants" import type { InternalState } from "./internal_state" export class Text { + //eslint-disable-next-line @typescript-eslint/no-explicit-any elems: Array str: string | undefined + //eslint-disable-next-line @typescript-eslint/no-explicit-any spans: Array | undefined; + //eslint-disable-next-line @typescript-eslint/no-explicit-any [STATE]?: InternalState constructor(text?: string | string[] | Value[]) { @@ -25,6 +28,7 @@ export class Text { return this.elems.length } + //eslint-disable-next-line @typescript-eslint/no-explicit-any get(index: number): any { return this.elems[index] } @@ -73,7 +77,7 @@ export class Text { * For example, the value `['a', 'b', {x: 3}, 'c', 'd']` has spans: * `=> ['ab', {x: 3}, 'cd']` */ - toSpans(): Array { + toSpans(): Array { if (!this.spans) { this.spans = [] let chars = "" @@ -118,7 +122,7 @@ export class Text { /** * Inserts new list items `values` starting at position `index`. */ - insertAt(index: number, ...values: Array) { + insertAt(index: number, ...values: Array) { if (this[STATE]) { throw new RangeError( "object cannot be modified outside of a change block" @@ -140,7 +144,7 @@ export class Text { this.elems.splice(index, numDelete) } - map(callback: (e: Value | Object) => T) { + map(callback: (e: Value | object) => T) { this.elems.map(callback) } diff --git a/javascript/src/types.ts b/javascript/src/types.ts index e3cb81f8..beb5cf70 100644 --- a/javascript/src/types.ts +++ b/javascript/src/types.ts @@ -1,4 +1,5 @@ export { Text } from "./text" +import { Text } from "./text" export { Counter } from "./counter" export { Int, Uint, Float64 } from "./numbers" @@ -10,9 +11,9 @@ export type AutomergeValue = | ScalarValue | { [key: string]: AutomergeValue } | Array + | Text export type MapValue = { [key: string]: AutomergeValue } export type ListValue = Array -export type TextValue = Array export type ScalarValue = | string | number diff --git a/javascript/src/unstable.ts b/javascript/src/unstable.ts index 21b5be08..7c73afb9 100644 --- a/javascript/src/unstable.ts +++ b/javascript/src/unstable.ts @@ -22,9 +22,9 @@ * This leads to the following differences from `stable`: * * * There is no `unstable.Text` class, all strings are text objects - * * Reading strings in a `future` document is the same as reading any other + * * Reading strings in an `unstable` document is the same as reading any other * javascript string - * * To modify strings in a `future` document use {@link splice} + * * To modify strings in an `unstable` document use {@link splice} * * The {@link AutomergeValue} type does not include the {@link Text} * class but the {@link RawString} class is included in the {@link ScalarValue} * type @@ -35,7 +35,6 @@ * * @module */ -import { Counter } from "./types" export { Counter, @@ -45,27 +44,14 @@ export { Float64, type Patch, type PatchCallback, -} from "./types" + type AutomergeValue, + type ScalarValue, +} from "./unstable_types" import type { PatchCallback } from "./stable" -export type AutomergeValue = - | ScalarValue - | { [key: string]: AutomergeValue } - | Array -export type MapValue = { [key: string]: AutomergeValue } -export type ListValue = Array -export type ScalarValue = - | string - | number - | null - | boolean - | Date - | Counter - | Uint8Array - | RawString - -export type Conflicts = { [key: string]: AutomergeValue } +import { type UnstableConflicts as Conflicts } from "./conflicts" +import { unstableConflictAt } from "./conflicts" export type { PutPatch, @@ -125,7 +111,6 @@ export { RawString } from "./raw_string" export const getBackend = stable.getBackend import { _is_proxy, _state, _obj } from "./internal_state" -import { RawString } from "./raw_string" /** * Create a new automerge document @@ -137,7 +122,7 @@ import { RawString } from "./raw_string" * random actor ID */ export function init(_opts?: ActorId | InitOptions): Doc { - let opts = importOpts(_opts) + const opts = importOpts(_opts) opts.enableTextV2 = true return stable.init(opts) } @@ -161,7 +146,7 @@ export function clone( doc: Doc, _opts?: ActorId | InitOptions ): Doc { - let opts = importOpts(_opts) + const opts = importOpts(_opts) opts.enableTextV2 = true return stable.clone(doc, opts) } @@ -296,6 +281,14 @@ export function getConflicts( doc: Doc, prop: stable.Prop ): Conflicts | undefined { - // this function only exists to get the types to line up with future.AutomergeValue - return stable.getConflicts(doc, prop) + const state = _state(doc, false) + if (!state.textV2) { + throw new Error("use getConflicts for a stable document") + } + const objectId = _obj(doc) + if (objectId != null) { + return unstableConflictAt(state.handle, objectId, prop) + } else { + return undefined + } } diff --git a/javascript/src/unstable_types.ts b/javascript/src/unstable_types.ts new file mode 100644 index 00000000..071e2cc4 --- /dev/null +++ b/javascript/src/unstable_types.ts @@ -0,0 +1,30 @@ +import { Counter } from "./types" + +export { + Counter, + type Doc, + Int, + Uint, + Float64, + type Patch, + type PatchCallback, +} from "./types" + +import { RawString } from "./raw_string" +export { RawString } from "./raw_string" + +export type AutomergeValue = + | ScalarValue + | { [key: string]: AutomergeValue } + | Array +export type MapValue = { [key: string]: AutomergeValue } +export type ListValue = Array +export type ScalarValue = + | string + | number + | null + | boolean + | Date + | Counter + | Uint8Array + | RawString diff --git a/javascript/test/basic_test.ts b/javascript/test/basic_test.ts index 90e7a99d..5aa1ac34 100644 --- a/javascript/test/basic_test.ts +++ b/javascript/test/basic_test.ts @@ -267,7 +267,6 @@ describe("Automerge", () => { }) assert.deepEqual(doc5, { list: [2, 1, 9, 10, 3, 11, 12] }) let doc6 = Automerge.change(doc5, d => { - // @ts-ignore d.list.insertAt(3, 100, 101) }) assert.deepEqual(doc6, { list: [2, 1, 9, 100, 101, 10, 3, 11, 12] }) diff --git a/javascript/test/legacy_tests.ts b/javascript/test/legacy_tests.ts index a423b51f..90c731d9 100644 --- a/javascript/test/legacy_tests.ts +++ b/javascript/test/legacy_tests.ts @@ -461,12 +461,12 @@ describe("Automerge", () => { s1 = Automerge.change(s1, "set foo", doc => { doc.foo = "bar" }) - let deleted + let deleted: any s1 = Automerge.change(s1, "del foo", doc => { deleted = delete doc.foo }) assert.strictEqual(deleted, true) - let deleted2 + let deleted2: any assert.doesNotThrow(() => { s1 = Automerge.change(s1, "del baz", doc => { deleted2 = delete doc.baz @@ -515,7 +515,7 @@ describe("Automerge", () => { s1 = Automerge.change(s1, doc => { doc.nested = {} }) - let id = Automerge.getObjectId(s1.nested) + Automerge.getObjectId(s1.nested) assert.strictEqual( OPID_PATTERN.test(Automerge.getObjectId(s1.nested)!), true @@ -975,6 +975,7 @@ describe("Automerge", () => { it("should allow adding and removing list elements in the same change callback", () => { let s1 = Automerge.change( Automerge.init<{ noodles: Array }>(), + // @ts-ignore doc => (doc.noodles = []) ) s1 = Automerge.change(s1, doc => { diff --git a/javascript/test/stable_unstable_interop.ts b/javascript/test/stable_unstable_interop.ts index 2f58c256..dc57f338 100644 --- a/javascript/test/stable_unstable_interop.ts +++ b/javascript/test/stable_unstable_interop.ts @@ -38,4 +38,62 @@ describe("stable/unstable interop", () => { stableDoc = unstable.merge(stableDoc, unstableDoc) assert.deepStrictEqual(stableDoc.text, "abc") }) + + it("should show conflicts on text objects", () => { + let doc1 = stable.from({ text: new stable.Text("abc") }, "bb") + let doc2 = stable.from({ text: new stable.Text("def") }, "aa") + doc1 = stable.merge(doc1, doc2) + let conflicts = stable.getConflicts(doc1, "text")! + assert.equal(conflicts["1@bb"]!.toString(), "abc") + assert.equal(conflicts["1@aa"]!.toString(), "def") + + let unstableDoc = unstable.init() + unstableDoc = unstable.merge(unstableDoc, doc1) + let conflicts2 = unstable.getConflicts(unstableDoc, "text")! + assert.equal(conflicts2["1@bb"]!.toString(), "abc") + assert.equal(conflicts2["1@aa"]!.toString(), "def") + }) + + it("should allow filling a list with text in stable", () => { + let doc = stable.from<{ list: Array }>({ + list: [null, null, null], + }) + doc = stable.change(doc, doc => { + doc.list.fill(new stable.Text("abc"), 0, 3) + }) + assert.deepStrictEqual(doc.list, [ + new stable.Text("abc"), + new stable.Text("abc"), + new stable.Text("abc"), + ]) + }) + + it("should allow filling a list with text in unstable", () => { + let doc = unstable.from<{ list: Array }>({ + list: [null, null, null], + }) + doc = stable.change(doc, doc => { + doc.list.fill("abc", 0, 3) + }) + assert.deepStrictEqual(doc.list, ["abc", "abc", "abc"]) + }) + + it("should allow splicing text into a list on stable", () => { + let doc = stable.from<{ list: Array }>({ list: [] }) + doc = stable.change(doc, doc => { + doc.list.splice(0, 0, new stable.Text("abc"), new stable.Text("def")) + }) + assert.deepStrictEqual(doc.list, [ + new stable.Text("abc"), + new stable.Text("def"), + ]) + }) + + it("should allow splicing text into a list on unstable", () => { + let doc = unstable.from<{ list: Array }>({ list: [] }) + doc = unstable.change(doc, doc => { + doc.list.splice(0, 0, "abc", "def") + }) + assert.deepStrictEqual(doc.list, ["abc", "def"]) + }) }) From 58a7a06b754f58bee961012a96485634c9efa854 Mon Sep 17 00:00:00 2001 From: alexjg Date: Fri, 27 Jan 2023 20:27:11 +0000 Subject: [PATCH 702/730] @automerge/automerge-wasm@0.1.23 and @automerge/automerge@2.0.1-alpha.6 (#509) --- javascript/package.json | 4 ++-- rust/automerge-wasm/package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/javascript/package.json b/javascript/package.json index caeeb647..05358703 100644 --- a/javascript/package.json +++ b/javascript/package.json @@ -4,7 +4,7 @@ "Orion Henry ", "Martin Kleppmann" ], - "version": "2.0.1-alpha.5", + "version": "2.0.1-alpha.6", "description": "Javascript implementation of automerge, backed by @automerge/automerge-wasm", "homepage": "https://github.com/automerge/automerge-rs/tree/main/wrappers/javascript", "repository": "github:automerge/automerge-rs", @@ -47,7 +47,7 @@ "typescript": "^4.9.4" }, "dependencies": { - "@automerge/automerge-wasm": "0.1.22", + "@automerge/automerge-wasm": "0.1.23", "uuid": "^9.0.0" } } diff --git a/rust/automerge-wasm/package.json b/rust/automerge-wasm/package.json index 0f133468..cce3199f 100644 --- a/rust/automerge-wasm/package.json +++ b/rust/automerge-wasm/package.json @@ -8,7 +8,7 @@ "description": "wasm-bindgen bindings to the automerge rust implementation", "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-wasm", "repository": "github:automerge/automerge-rs", - "version": "0.1.22", + "version": "0.1.23", "license": "MIT", "files": [ "README.md", From 9b6a3c8691de47f1751c916776555db18e012f80 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Sat, 28 Jan 2023 09:32:21 +0000 Subject: [PATCH 703/730] Update README --- README.md | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index d11e9d1c..94e1bbb8 100644 --- a/README.md +++ b/README.md @@ -42,9 +42,10 @@ In general we try and respect semver. ### JavaScript -An alpha release of the javascript package is currently available as -`@automerge/automerge@2.0.0-alpha.n` where `n` is an integer. We are gathering -feedback on the API and looking to release a `2.0.0` in the next few weeks. +A stable release of the javascript package is currently available as +`@automerge/automerge@2.0.0` where. pre-release verisions of the `2.0.1` are +available as `2.0.1-alpha.n`. `2.0.1*` packages are also available for Deno at +https://deno.land/x/automerge ### Rust @@ -52,7 +53,10 @@ The rust codebase is currently oriented around producing a performant backend for the Javascript wrapper and as such the API for Rust code is low level and not well documented. We will be returning to this over the next few months but for now you will need to be comfortable reading the tests and asking questions -to figure out how to use it. +to figure out how to use it. If you are looking to build rust applications which +use automerge you may want to look into +[autosurgeon](https://github.com/alexjg/autosurgeon) + ## Repository Organisation From 89a0866272502f6360221d6585e93990f932de24 Mon Sep 17 00:00:00 2001 From: alexjg Date: Sat, 28 Jan 2023 21:22:45 +0000 Subject: [PATCH 704/730] @automerge/automerge@2.0.1 (#510) --- javascript/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/javascript/package.json b/javascript/package.json index 05358703..017c5a54 100644 --- a/javascript/package.json +++ b/javascript/package.json @@ -4,7 +4,7 @@ "Orion Henry ", "Martin Kleppmann" ], - "version": "2.0.1-alpha.6", + "version": "2.0.1", "description": "Javascript implementation of automerge, backed by @automerge/automerge-wasm", "homepage": "https://github.com/automerge/automerge-rs/tree/main/wrappers/javascript", "repository": "github:automerge/automerge-rs", From 08801ab580e31df472f5c33858aa85b94d99d0fe Mon Sep 17 00:00:00 2001 From: alexjg Date: Mon, 30 Jan 2023 19:37:03 +0000 Subject: [PATCH 705/730] automerge-rs: Introduce ReadDoc and SyncDoc traits and add documentation (#511) The Rust API has so far grown somewhat organically driven by the needs of the javascript implementation. This has led to an API which is quite awkward and unfamiliar to Rust programmers. Additionally there is no documentation to speak of. This commit is the first movement towards cleaning things up a bit. We touch a lot of files but the changes are all very mechanical. We introduce a few traits to abstract over the common operations between `Automerge` and `AutoCommit`, and add a whole bunch of documentation. * Add a `ReadDoc` trait to describe methods which read value from a document. make `Transactable` extend `ReadDoc` * Add a `SyncDoc` trait to describe methods necessary for synchronizing documents. * Put the `SyncDoc` implementation for `AutoCommit` behind `AutoCommit::sync` to ensure that any open transactions are closed before taking part in the sync protocol * Split `OpObserver` into two traits: `OpObserver` + `BranchableObserver`. `BranchableObserver` captures the methods which are only needed for observing transactions. * Add a whole bunch of documentation. The main changes Rust users will need to make is: * Import the `ReadDoc` trait wherever you are using the methods which have been moved to it. Optionally change concrete paramters on functions to `ReadDoc` constraints. * Likewise import the `SyncDoc` trait wherever you are doing synchronisation work * If you are using the `AutoCommit::*_sync_message` methods you will need to add a call to `AutoCommit::sync()` first. E.g. `doc.generate_sync_message` becomes `doc.sync().generate_sync_message` * If you have an implementation of `OpObserver` which you are using in an `AutoCommit` then split it into an implementation of `OpObserver` and `BranchableObserver` --- rust/automerge-c/src/doc.rs | 9 +- rust/automerge-c/src/doc/list.rs | 1 + rust/automerge-c/src/doc/map.rs | 1 + rust/automerge-cli/src/export.rs | 1 + rust/automerge-test/src/lib.rs | 21 +- rust/automerge-wasm/src/interop.rs | 2 +- rust/automerge-wasm/src/lib.rs | 8 +- rust/automerge-wasm/src/observer.rs | 42 +- rust/automerge/Cargo.toml | 1 + rust/automerge/README.md | 5 + rust/automerge/benches/range.rs | 18 +- rust/automerge/benches/sync.rs | 6 +- rust/automerge/examples/quickstart.rs | 2 +- rust/automerge/examples/watch.rs | 1 + rust/automerge/src/autocommit.rs | 286 +++++-- rust/automerge/src/automerge.rs | 810 +++++++++--------- rust/automerge/src/automerge/tests.rs | 2 +- rust/automerge/src/autoserde.rs | 45 +- rust/automerge/src/exid.rs | 9 +- rust/automerge/src/keys.rs | 4 + rust/automerge/src/keys_at.rs | 4 + rust/automerge/src/lib.rs | 193 ++++- rust/automerge/src/list_range.rs | 3 + rust/automerge/src/list_range_at.rs | 3 + rust/automerge/src/map_range.rs | 3 + rust/automerge/src/map_range_at.rs | 3 + rust/automerge/src/op_observer.rs | 135 +-- rust/automerge/src/op_observer/compose.rs | 102 +++ rust/automerge/src/parents.rs | 31 +- rust/automerge/src/read.rs | 199 +++++ rust/automerge/src/sync.rs | 278 ++++-- rust/automerge/src/sync/state.rs | 10 + rust/automerge/src/transaction/inner.rs | 2 +- .../src/transaction/manual_transaction.rs | 199 +++-- rust/automerge/src/transaction/observation.rs | 14 +- .../automerge/src/transaction/transactable.rs | 109 +-- rust/automerge/src/types.rs | 19 + rust/automerge/src/value.rs | 10 +- rust/automerge/src/values.rs | 9 +- rust/automerge/tests/test.rs | 72 +- rust/edit-trace/src/main.rs | 1 + 41 files changed, 1720 insertions(+), 953 deletions(-) create mode 100644 rust/automerge/README.md create mode 100644 rust/automerge/src/op_observer/compose.rs create mode 100644 rust/automerge/src/read.rs diff --git a/rust/automerge-c/src/doc.rs b/rust/automerge-c/src/doc.rs index 58625798..f02c01bf 100644 --- a/rust/automerge-c/src/doc.rs +++ b/rust/automerge-c/src/doc.rs @@ -1,5 +1,7 @@ use automerge as am; +use automerge::sync::SyncDoc; use automerge::transaction::{CommitOptions, Transactable}; +use automerge::ReadDoc; use std::ops::{Deref, DerefMut}; use crate::actor_id::{to_actor_id, AMactorId}; @@ -291,7 +293,7 @@ pub unsafe extern "C" fn AMgenerateSyncMessage( ) -> *mut AMresult { let doc = to_doc_mut!(doc); let sync_state = to_sync_state_mut!(sync_state); - to_result(doc.generate_sync_message(sync_state.as_mut())) + to_result(doc.sync().generate_sync_message(sync_state.as_mut())) } /// \memberof AMdoc @@ -708,7 +710,10 @@ pub unsafe extern "C" fn AMreceiveSyncMessage( let doc = to_doc_mut!(doc); let sync_state = to_sync_state_mut!(sync_state); let sync_message = to_sync_message!(sync_message); - to_result(doc.receive_sync_message(sync_state.as_mut(), sync_message.as_ref().clone())) + to_result( + doc.sync() + .receive_sync_message(sync_state.as_mut(), sync_message.as_ref().clone()), + ) } /// \memberof AMdoc diff --git a/rust/automerge-c/src/doc/list.rs b/rust/automerge-c/src/doc/list.rs index 48f26c21..6bcdeabf 100644 --- a/rust/automerge-c/src/doc/list.rs +++ b/rust/automerge-c/src/doc/list.rs @@ -1,5 +1,6 @@ use automerge as am; use automerge::transaction::Transactable; +use automerge::ReadDoc; use crate::byte_span::{to_str, AMbyteSpan}; use crate::change_hashes::AMchangeHashes; diff --git a/rust/automerge-c/src/doc/map.rs b/rust/automerge-c/src/doc/map.rs index a5801323..86c6b4a2 100644 --- a/rust/automerge-c/src/doc/map.rs +++ b/rust/automerge-c/src/doc/map.rs @@ -1,5 +1,6 @@ use automerge as am; use automerge::transaction::Transactable; +use automerge::ReadDoc; use crate::byte_span::{to_str, AMbyteSpan}; use crate::change_hashes::AMchangeHashes; diff --git a/rust/automerge-cli/src/export.rs b/rust/automerge-cli/src/export.rs index 45fd7b3b..45f39101 100644 --- a/rust/automerge-cli/src/export.rs +++ b/rust/automerge-cli/src/export.rs @@ -1,5 +1,6 @@ use anyhow::Result; use automerge as am; +use automerge::ReadDoc; use crate::{color_json::print_colored_json, SkipVerifyFlag}; diff --git a/rust/automerge-test/src/lib.rs b/rust/automerge-test/src/lib.rs index b2af72e1..a1d4ea89 100644 --- a/rust/automerge-test/src/lib.rs +++ b/rust/automerge-test/src/lib.rs @@ -4,6 +4,8 @@ use std::{ hash::Hash, }; +use automerge::ReadDoc; + use serde::ser::{SerializeMap, SerializeSeq}; pub fn new_doc() -> automerge::AutoCommit { @@ -48,7 +50,7 @@ pub fn sorted_actors() -> (automerge::ActorId, automerge::ActorId) { /// let title = doc.put(todo, "title", "water plants").unwrap(); /// /// assert_doc!( -/// &doc.document(), +/// &doc, /// map!{ /// "todos" => { /// list![ @@ -67,6 +69,7 @@ pub fn sorted_actors() -> (automerge::ActorId, automerge::ActorId) { /// ```rust /// # use automerge_test::{assert_doc, map}; /// # use automerge::transaction::Transactable; +/// # use automerge::ReadDoc; /// /// let mut doc1 = automerge::AutoCommit::new(); /// let mut doc2 = automerge::AutoCommit::new(); @@ -74,7 +77,7 @@ pub fn sorted_actors() -> (automerge::ActorId, automerge::ActorId) { /// doc2.put(automerge::ROOT, "field", "two").unwrap(); /// doc1.merge(&mut doc2); /// assert_doc!( -/// &doc1.document(), +/// doc1.document(), /// map!{ /// "field" => { /// "one", @@ -330,12 +333,12 @@ impl serde::Serialize for RealizedObject { } } -pub fn realize(doc: &automerge::Automerge) -> RealizedObject { +pub fn realize(doc: &R) -> RealizedObject { realize_obj(doc, &automerge::ROOT, automerge::ObjType::Map) } -pub fn realize_prop>( - doc: &automerge::Automerge, +pub fn realize_prop>( + doc: &R, obj_id: &automerge::ObjId, prop: P, ) -> RealizedObject { @@ -346,8 +349,8 @@ pub fn realize_prop>( } } -pub fn realize_obj( - doc: &automerge::Automerge, +pub fn realize_obj( + doc: &R, obj_id: &automerge::ObjId, objtype: automerge::ObjType, ) -> RealizedObject { @@ -370,8 +373,8 @@ pub fn realize_obj( } } -fn realize_values>( - doc: &automerge::Automerge, +fn realize_values>( + doc: &R, obj_id: &automerge::ObjId, key: K, ) -> BTreeSet { diff --git a/rust/automerge-wasm/src/interop.rs b/rust/automerge-wasm/src/interop.rs index 2881209a..1546ff10 100644 --- a/rust/automerge-wasm/src/interop.rs +++ b/rust/automerge-wasm/src/interop.rs @@ -2,7 +2,7 @@ use crate::error::InsertObject; use crate::value::Datatype; use crate::{Automerge, TextRepresentation}; use automerge as am; -use automerge::transaction::Transactable; +use automerge::ReadDoc; use automerge::ROOT; use automerge::{Change, ChangeHash, ObjType, Prop}; use js_sys::{Array, Function, JsString, Object, Reflect, Symbol, Uint8Array}; diff --git a/rust/automerge-wasm/src/lib.rs b/rust/automerge-wasm/src/lib.rs index d6ccc8c8..b53bf3b9 100644 --- a/rust/automerge-wasm/src/lib.rs +++ b/rust/automerge-wasm/src/lib.rs @@ -29,7 +29,7 @@ use am::transaction::CommitOptions; use am::transaction::{Observed, Transactable, UnObserved}; use am::ScalarValue; use automerge as am; -use automerge::{Change, ObjId, Prop, TextEncoding, Value, ROOT}; +use automerge::{sync::SyncDoc, Change, ObjId, Prop, ReadDoc, TextEncoding, Value, ROOT}; use js_sys::{Array, Function, Object, Uint8Array}; use serde::ser::Serialize; use std::borrow::Cow; @@ -746,13 +746,15 @@ impl Automerge { ) -> Result<(), error::ReceiveSyncMessage> { let message = message.to_vec(); let message = am::sync::Message::decode(message.as_slice())?; - self.doc.receive_sync_message(&mut state.0, message)?; + self.doc + .sync() + .receive_sync_message(&mut state.0, message)?; Ok(()) } #[wasm_bindgen(js_name = generateSyncMessage)] pub fn generate_sync_message(&mut self, state: &mut SyncState) -> JsValue { - if let Some(message) = self.doc.generate_sync_message(&mut state.0) { + if let Some(message) = self.doc.sync().generate_sync_message(&mut state.0) { Uint8Array::from(message.encode().as_slice()).into() } else { JsValue::null() diff --git a/rust/automerge-wasm/src/observer.rs b/rust/automerge-wasm/src/observer.rs index 83516597..c0b462a6 100644 --- a/rust/automerge-wasm/src/observer.rs +++ b/rust/automerge-wasm/src/observer.rs @@ -6,7 +6,7 @@ use crate::{ interop::{self, alloc, js_set}, TextRepresentation, }; -use automerge::{Automerge, ObjId, OpObserver, Prop, ScalarValue, SequenceTree, Value}; +use automerge::{ObjId, OpObserver, Prop, ReadDoc, ScalarValue, SequenceTree, Value}; use js_sys::{Array, Object}; use wasm_bindgen::prelude::*; @@ -30,9 +30,9 @@ impl Observer { old_enabled } - fn get_path(&mut self, doc: &Automerge, obj: &ObjId) -> Option> { + fn get_path(&mut self, doc: &R, obj: &ObjId) -> Option> { match doc.parents(obj) { - Ok(mut parents) => parents.visible_path(), + Ok(parents) => parents.visible_path(), Err(e) => { automerge::log!("error generating patch : {:?}", e); None @@ -98,9 +98,9 @@ pub(crate) enum Patch { } impl OpObserver for Observer { - fn insert( + fn insert( &mut self, - doc: &Automerge, + doc: &R, obj: ObjId, index: usize, tagged_value: (Value<'_>, ObjId), @@ -134,7 +134,7 @@ impl OpObserver for Observer { } } - fn splice_text(&mut self, doc: &Automerge, obj: ObjId, index: usize, value: &str) { + fn splice_text(&mut self, doc: &R, obj: ObjId, index: usize, value: &str) { if self.enabled { if self.text_rep == TextRepresentation::Array { for (i, c) in value.chars().enumerate() { @@ -182,7 +182,7 @@ impl OpObserver for Observer { } } - fn delete_seq(&mut self, doc: &Automerge, obj: ObjId, index: usize, length: usize) { + fn delete_seq(&mut self, doc: &R, obj: ObjId, index: usize, length: usize) { if self.enabled { match self.patches.last_mut() { Some(Patch::SpliceText { @@ -244,7 +244,7 @@ impl OpObserver for Observer { } } - fn delete_map(&mut self, doc: &Automerge, obj: ObjId, key: &str) { + fn delete_map(&mut self, doc: &R, obj: ObjId, key: &str) { if self.enabled { if let Some(path) = self.get_path(doc, &obj) { let patch = Patch::DeleteMap { @@ -257,9 +257,9 @@ impl OpObserver for Observer { } } - fn put( + fn put( &mut self, - doc: &Automerge, + doc: &R, obj: ObjId, prop: Prop, tagged_value: (Value<'_>, ObjId), @@ -290,9 +290,9 @@ impl OpObserver for Observer { } } - fn expose( + fn expose( &mut self, - doc: &Automerge, + doc: &R, obj: ObjId, prop: Prop, tagged_value: (Value<'_>, ObjId), @@ -323,7 +323,13 @@ impl OpObserver for Observer { } } - fn increment(&mut self, doc: &Automerge, obj: ObjId, prop: Prop, tagged_value: (i64, ObjId)) { + fn increment( + &mut self, + doc: &R, + obj: ObjId, + prop: Prop, + tagged_value: (i64, ObjId), + ) { if self.enabled { if let Some(path) = self.get_path(doc, &obj) { let value = tagged_value.0; @@ -337,6 +343,12 @@ impl OpObserver for Observer { } } + fn text_as_seq(&self) -> bool { + self.text_rep == TextRepresentation::Array + } +} + +impl automerge::op_observer::BranchableObserver for Observer { fn merge(&mut self, other: &Self) { self.patches.extend_from_slice(other.patches.as_slice()) } @@ -348,10 +360,6 @@ impl OpObserver for Observer { text_rep: self.text_rep, } } - - fn text_as_seq(&self) -> bool { - self.text_rep == TextRepresentation::Array - } } fn prop_to_js(p: &Prop) -> JsValue { diff --git a/rust/automerge/Cargo.toml b/rust/automerge/Cargo.toml index 89b48020..578878ae 100644 --- a/rust/automerge/Cargo.toml +++ b/rust/automerge/Cargo.toml @@ -7,6 +7,7 @@ repository = "https://github.com/automerge/automerge-rs" documentation = "https://automerge.org/automerge-rs/automerge/" rust-version = "1.57.0" description = "A JSON-like data structure (a CRDT) that can be modified concurrently by different users, and merged again automatically" +readme = "./README.md" [features] optree-visualisation = ["dot", "rand"] diff --git a/rust/automerge/README.md b/rust/automerge/README.md new file mode 100644 index 00000000..97dbe4f8 --- /dev/null +++ b/rust/automerge/README.md @@ -0,0 +1,5 @@ +# Automerge + +Automerge is a library of data structures for building collaborative +[local-first](https://www.inkandswitch.com/local-first/) applications. This is +the Rust implementation. See [automerge.org](https://automerge.org/) diff --git a/rust/automerge/benches/range.rs b/rust/automerge/benches/range.rs index aec5c293..008ae159 100644 --- a/rust/automerge/benches/range.rs +++ b/rust/automerge/benches/range.rs @@ -1,4 +1,4 @@ -use automerge::{transaction::Transactable, Automerge, ROOT}; +use automerge::{transaction::Transactable, Automerge, ReadDoc, ROOT}; use criterion::{black_box, criterion_group, criterion_main, Criterion}; fn doc(n: u64) -> Automerge { @@ -16,36 +16,20 @@ fn range(doc: &Automerge) { range.for_each(drop); } -fn range_rev(doc: &Automerge) { - let range = doc.values(ROOT).rev(); - range.for_each(drop); -} - fn range_at(doc: &Automerge) { let range = doc.values_at(ROOT, &doc.get_heads()); range.for_each(drop); } -fn range_at_rev(doc: &Automerge) { - let range = doc.values_at(ROOT, &doc.get_heads()).rev(); - range.for_each(drop); -} - fn criterion_benchmark(c: &mut Criterion) { let n = 100_000; let doc = doc(n); c.bench_function(&format!("range {}", n), |b| { b.iter(|| range(black_box(&doc))) }); - c.bench_function(&format!("range rev {}", n), |b| { - b.iter(|| range_rev(black_box(&doc))) - }); c.bench_function(&format!("range_at {}", n), |b| { b.iter(|| range_at(black_box(&doc))) }); - c.bench_function(&format!("range_at rev {}", n), |b| { - b.iter(|| range_at_rev(black_box(&doc))) - }); } criterion_group!(benches, criterion_benchmark); diff --git a/rust/automerge/benches/sync.rs b/rust/automerge/benches/sync.rs index 483fd2b4..13965792 100644 --- a/rust/automerge/benches/sync.rs +++ b/rust/automerge/benches/sync.rs @@ -1,4 +1,8 @@ -use automerge::{sync, transaction::Transactable, Automerge, ROOT}; +use automerge::{ + sync::{self, SyncDoc}, + transaction::Transactable, + Automerge, ROOT, +}; use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion}; #[derive(Default)] diff --git a/rust/automerge/examples/quickstart.rs b/rust/automerge/examples/quickstart.rs index 76ef0470..fcb23d5e 100644 --- a/rust/automerge/examples/quickstart.rs +++ b/rust/automerge/examples/quickstart.rs @@ -2,7 +2,7 @@ use automerge::transaction::CommitOptions; use automerge::transaction::Transactable; use automerge::AutomergeError; use automerge::ObjType; -use automerge::{Automerge, ROOT}; +use automerge::{Automerge, ReadDoc, ROOT}; // Based on https://automerge.github.io/docs/quickstart fn main() { diff --git a/rust/automerge/examples/watch.rs b/rust/automerge/examples/watch.rs index 1618d6c4..4cd8f4ea 100644 --- a/rust/automerge/examples/watch.rs +++ b/rust/automerge/examples/watch.rs @@ -3,6 +3,7 @@ use automerge::transaction::Transactable; use automerge::Automerge; use automerge::AutomergeError; use automerge::Patch; +use automerge::ReadDoc; use automerge::VecOpObserver; use automerge::ROOT; diff --git a/rust/automerge/src/autocommit.rs b/rust/automerge/src/autocommit.rs index 2258fa2e..2c1c3adf 100644 --- a/rust/automerge/src/autocommit.rs +++ b/rust/automerge/src/autocommit.rs @@ -1,10 +1,12 @@ use std::ops::RangeBounds; use crate::exid::ExId; -use crate::op_observer::OpObserver; +use crate::op_observer::{BranchableObserver, OpObserver}; +use crate::sync::SyncDoc; use crate::transaction::{CommitOptions, Transactable}; use crate::{ - sync, Keys, KeysAt, ListRange, ListRangeAt, MapRange, MapRangeAt, ObjType, Parents, ScalarValue, + sync, Keys, KeysAt, ListRange, ListRangeAt, MapRange, MapRangeAt, ObjType, Parents, ReadDoc, + ScalarValue, }; use crate::{ transaction::{Observation, Observed, TransactionInner, UnObserved}, @@ -12,6 +14,41 @@ use crate::{ }; /// An automerge document that automatically manages transactions. +/// +/// An `AutoCommit` can optionally manage an [`OpObserver`]. This observer will be notified of all +/// changes made by both remote and local changes. The type parameter `O` tracks whether this +/// document is observed or not. +/// +/// ## Creating, loading, merging and forking documents +/// +/// A new document can be created with [`Self::new`], which will create a document with a random +/// [`ActorId`]. Existing documents can be loaded with [`Self::load`]. +/// +/// If you have two documents and you want to merge the changes from one into the other you can use +/// [`Self::merge`]. +/// +/// If you have a document you want to split into two concurrent threads of execution you can use +/// [`Self::fork`]. If you want to split a document from ealier in its history you can use +/// [`Self::fork_at`]. +/// +/// ## Reading values +/// +/// [`Self`] implements [`ReadDoc`], which provides methods for reading values from the document. +/// +/// ## Modifying a document +/// +/// This type implements [`Transactable`] directly, so you can modify it using methods from [`Transactable`]. +/// +/// ## Synchronization +/// +/// To synchronise call [`Self::sync`] which returns an implementation of [`SyncDoc`] +/// +/// ## Observers +/// +/// An `AutoCommit` can optionally manage an [`OpObserver`]. [`Self::new`] will return a document +/// with no observer but you can set an observer using [`Self::with_observer`]. The observer must +/// implement both [`OpObserver`] and [`BranchableObserver`]. If you have an observed autocommit +/// then you can obtain a mutable reference to the observer with [`Self::observer`] #[derive(Debug, Clone)] pub struct AutoCommitWithObs { doc: Automerge, @@ -19,19 +56,12 @@ pub struct AutoCommitWithObs { observation: Obs, } +/// An autocommit document with no observer +/// +/// See [`AutoCommitWithObs`] pub type AutoCommit = AutoCommitWithObs; -impl AutoCommitWithObs { - pub fn unobserved() -> AutoCommitWithObs { - AutoCommitWithObs { - doc: Automerge::new(), - transaction: None, - observation: UnObserved::new(), - } - } -} - -impl Default for AutoCommitWithObs> { +impl Default for AutoCommitWithObs> { fn default() -> Self { let op_observer = O::default(); AutoCommitWithObs { @@ -61,7 +91,7 @@ impl AutoCommit { } } -impl AutoCommitWithObs> { +impl AutoCommitWithObs> { pub fn observer(&mut self) -> &mut Obs { self.ensure_transaction_closed(); self.observation.observer() @@ -89,7 +119,7 @@ impl AutoCommitWithObs { } impl AutoCommitWithObs { - pub fn with_observer( + pub fn with_observer( self, op_observer: Obs2, ) -> AutoCommitWithObs> { @@ -125,6 +155,9 @@ impl AutoCommitWithObs { self.doc.get_actor() } + /// Change the text encoding of this view of the document + /// + /// This is a cheap operation, it just changes the way indexes are calculated pub fn with_encoding(mut self, encoding: TextEncoding) -> Self { self.doc.text_encoding = encoding; self @@ -145,6 +178,13 @@ impl AutoCommitWithObs { } } + /// Load an incremental save of a document. + /// + /// Unlike `load` this imports changes into an existing document. It will work with both the + /// output of [`Self::save`] and [`Self::save_incremental`] + /// + /// The return value is the number of ops which were applied, this is not useful and will + /// change in future. pub fn load_incremental(&mut self, data: &[u8]) -> Result { self.ensure_transaction_closed(); // TODO - would be nice to pass None here instead of &mut () @@ -181,17 +221,24 @@ impl AutoCommitWithObs { } } + /// Save the entirety of this document in a compact form. pub fn save(&mut self) -> Vec { self.ensure_transaction_closed(); self.doc.save() } + /// Save this document, but don't run it through DEFLATE afterwards pub fn save_nocompress(&mut self) -> Vec { self.ensure_transaction_closed(); self.doc.save_nocompress() } - // should this return an empty vec instead of None? + /// Save the changes since the last call to [Self::save`] + /// + /// The output of this will not be a compressed document format, but a series of individual + /// changes. This is useful if you know you have only made a small change since the last `save` + /// and you want to immediately send it somewhere (e.g. you've inserted a single character in a + /// text object). pub fn save_incremental(&mut self) -> Vec { self.ensure_transaction_closed(); self.doc.save_incremental() @@ -202,6 +249,7 @@ impl AutoCommitWithObs { self.doc.get_missing_deps(heads) } + /// Get the last change made by this documents actor ID pub fn get_last_local_change(&mut self) -> Option<&Change> { self.ensure_transaction_closed(); self.doc.get_last_local_change() @@ -220,40 +268,24 @@ impl AutoCommitWithObs { self.doc.get_change_by_hash(hash) } + /// Get changes in `other` that are not in `self pub fn get_changes_added<'a>(&mut self, other: &'a mut Self) -> Vec<&'a Change> { self.ensure_transaction_closed(); other.ensure_transaction_closed(); self.doc.get_changes_added(&other.doc) } + #[doc(hidden)] pub fn import(&self, s: &str) -> Result<(ExId, ObjType), AutomergeError> { self.doc.import(s) } + #[doc(hidden)] pub fn dump(&mut self) { self.ensure_transaction_closed(); self.doc.dump() } - pub fn generate_sync_message(&mut self, sync_state: &mut sync::State) -> Option { - self.ensure_transaction_closed(); - self.doc.generate_sync_message(sync_state) - } - - pub fn receive_sync_message( - &mut self, - sync_state: &mut sync::State, - message: sync::Message, - ) -> Result<(), AutomergeError> { - self.ensure_transaction_closed(); - if let Some(observer) = self.observation.observer() { - self.doc - .receive_sync_message_with(sync_state, message, Some(observer)) - } else { - self.doc.receive_sync_message(sync_state, message) - } - } - /// Return a graphviz representation of the opset. /// /// # Arguments @@ -305,6 +337,7 @@ impl AutoCommitWithObs { tx.commit(&mut self.doc, options.message, options.time) } + /// Remove any changes that have been made in the current transaction from the document pub fn rollback(&mut self) -> usize { self.transaction .take() @@ -326,14 +359,24 @@ impl AutoCommitWithObs { let args = self.doc.transaction_args(); TransactionInner::empty(&mut self.doc, args, options.message, options.time) } + + /// An implementation of [`crate::sync::SyncDoc`] for this autocommit + /// + /// This ensures that any outstanding transactions for this document are committed before + /// taking part in the sync protocol + pub fn sync(&mut self) -> impl SyncDoc + '_ { + self.ensure_transaction_closed(); + SyncWrapper { inner: self } + } } -impl Transactable for AutoCommitWithObs { - fn pending_ops(&self) -> usize { - self.transaction - .as_ref() - .map(|(_, t)| t.pending_ops()) - .unwrap_or(0) +impl ReadDoc for AutoCommitWithObs { + fn parents>(&self, obj: O) -> Result, AutomergeError> { + self.doc.parents(obj) + } + + fn path_to_object>(&self, obj: O) -> Result, AutomergeError> { + self.doc.path_to_object(obj) } fn keys>(&self, obj: O) -> Keys<'_, '_> { @@ -398,6 +441,69 @@ impl Transactable for AutoCommitWithObs { self.doc.object_type(obj) } + fn text>(&self, obj: O) -> Result { + self.doc.text(obj) + } + + fn text_at>( + &self, + obj: O, + heads: &[ChangeHash], + ) -> Result { + self.doc.text_at(obj, heads) + } + + fn get, P: Into>( + &self, + obj: O, + prop: P, + ) -> Result, ExId)>, AutomergeError> { + self.doc.get(obj, prop) + } + + fn get_at, P: Into>( + &self, + obj: O, + prop: P, + heads: &[ChangeHash], + ) -> Result, ExId)>, AutomergeError> { + self.doc.get_at(obj, prop, heads) + } + + fn get_all, P: Into>( + &self, + obj: O, + prop: P, + ) -> Result, ExId)>, AutomergeError> { + self.doc.get_all(obj, prop) + } + + fn get_all_at, P: Into>( + &self, + obj: O, + prop: P, + heads: &[ChangeHash], + ) -> Result, ExId)>, AutomergeError> { + self.doc.get_all_at(obj, prop, heads) + } + + fn get_missing_deps(&self, heads: &[ChangeHash]) -> Vec { + self.doc.get_missing_deps(heads) + } + + fn get_change_by_hash(&self, hash: &ChangeHash) -> Option<&Change> { + self.doc.get_change_by_hash(hash) + } +} + +impl Transactable for AutoCommitWithObs { + fn pending_ops(&self) -> usize { + self.transaction + .as_ref() + .map(|(_, t)| t.pending_ops()) + .unwrap_or(0) + } + fn put, P: Into, V: Into>( &mut self, obj: O, @@ -515,60 +621,52 @@ impl Transactable for AutoCommitWithObs { ) } - fn text>(&self, obj: O) -> Result { - self.doc.text(obj) - } - - fn text_at>( - &self, - obj: O, - heads: &[ChangeHash], - ) -> Result { - self.doc.text_at(obj, heads) - } - - // TODO - I need to return these OpId's here **only** to get - // the legacy conflicts format of { [opid]: value } - // Something better? - fn get, P: Into>( - &self, - obj: O, - prop: P, - ) -> Result, ExId)>, AutomergeError> { - self.doc.get(obj, prop) - } - - fn get_at, P: Into>( - &self, - obj: O, - prop: P, - heads: &[ChangeHash], - ) -> Result, ExId)>, AutomergeError> { - self.doc.get_at(obj, prop, heads) - } - - fn get_all, P: Into>( - &self, - obj: O, - prop: P, - ) -> Result, ExId)>, AutomergeError> { - self.doc.get_all(obj, prop) - } - - fn get_all_at, P: Into>( - &self, - obj: O, - prop: P, - heads: &[ChangeHash], - ) -> Result, ExId)>, AutomergeError> { - self.doc.get_all_at(obj, prop, heads) - } - - fn parents>(&self, obj: O) -> Result, AutomergeError> { - self.doc.parents(obj) - } - fn base_heads(&self) -> Vec { self.doc.get_heads() } } + +// A wrapper we return from `AutoCommit::sync` to ensure that transactions are closed before we +// start syncing +struct SyncWrapper<'a, Obs: Observation> { + inner: &'a mut AutoCommitWithObs, +} + +impl<'a, Obs: Observation> SyncDoc for SyncWrapper<'a, Obs> { + fn generate_sync_message(&self, sync_state: &mut sync::State) -> Option { + self.inner.doc.generate_sync_message(sync_state) + } + + fn receive_sync_message( + &mut self, + sync_state: &mut sync::State, + message: sync::Message, + ) -> Result<(), AutomergeError> { + self.inner.ensure_transaction_closed(); + if let Some(observer) = self.inner.observation.observer() { + self.inner + .doc + .receive_sync_message_with(sync_state, message, observer) + } else { + self.inner.doc.receive_sync_message(sync_state, message) + } + } + + fn receive_sync_message_with( + &mut self, + sync_state: &mut sync::State, + message: sync::Message, + op_observer: &mut Obs2, + ) -> Result<(), AutomergeError> { + if let Some(our_observer) = self.inner.observation.observer() { + let mut composed = crate::op_observer::compose(our_observer, op_observer); + self.inner + .doc + .receive_sync_message_with(sync_state, message, &mut composed) + } else { + self.inner + .doc + .receive_sync_message_with(sync_state, message, op_observer) + } + } +} diff --git a/rust/automerge/src/automerge.rs b/rust/automerge/src/automerge.rs index 584f761d..86aa5f63 100644 --- a/rust/automerge/src/automerge.rs +++ b/rust/automerge/src/automerge.rs @@ -9,7 +9,7 @@ use crate::clocks::Clocks; use crate::columnar::Key as EncodedKey; use crate::exid::ExId; use crate::keys::Keys; -use crate::op_observer::OpObserver; +use crate::op_observer::{BranchableObserver, OpObserver}; use crate::op_set::OpSet; use crate::parents::Parents; use crate::storage::{self, load, CompressConfig, VerificationMode}; @@ -22,7 +22,7 @@ use crate::types::{ }; use crate::{ query, AutomergeError, Change, KeysAt, ListRange, ListRangeAt, MapRange, MapRangeAt, ObjType, - Prop, Values, + Prop, ReadDoc, Values, }; use serde::Serialize; @@ -35,7 +35,39 @@ pub(crate) enum Actor { Cached(usize), } -/// An automerge document. +/// An automerge document which does not manage transactions for you. +/// +/// ## Creating, loading, merging and forking documents +/// +/// A new document can be created with [`Self::new`], which will create a document with a random +/// [`ActorId`]. Existing documents can be loaded with [`Self::load`], or [`Self::load_with`]. +/// +/// If you have two documents and you want to merge the changes from one into the other you can use +/// [`Self::merge`] or [`Self::merge_with`]. +/// +/// If you have a document you want to split into two concurrent threads of execution you can use +/// [`Self::fork`]. If you want to split a document from ealier in its history you can use +/// [`Self::fork_at`]. +/// +/// ## Reading values +/// +/// [`Self`] implements [`ReadDoc`], which provides methods for reading values from the document. +/// +/// ## Modifying a document (Transactions) +/// +/// [`Automerge`] provides an interface for viewing and modifying automerge documents which does +/// not manage transactions for you. To create changes you use either [`Automerge::transaction`] or +/// [`Automerge::transact`] (or the `_with` variants). +/// +/// ## Sync +/// +/// This type implements [`crate::sync::SyncDoc`] +/// +/// ## Observers +/// +/// Many of the methods on this type have an `_with` or `_observed` variant +/// which allow you to pass in an [`OpObserver`] to observe any changes which +/// occur. #[derive(Debug, Clone)] pub struct Automerge { /// The list of unapplied changes that are not causally ready. @@ -79,6 +111,9 @@ impl Automerge { } } + /// Change the text encoding of this view of the document + /// + /// This is a cheap operation, it just changes the way indexes are calculated pub fn with_encoding(mut self, encoding: TextEncoding) -> Self { self.text_encoding = encoding; self @@ -125,7 +160,8 @@ impl Automerge { Transaction::new(self, args, UnObserved) } - pub fn transaction_with_observer( + /// Start a transaction with an observer + pub fn transaction_with_observer( &mut self, op_observer: Obs, ) -> Transaction<'_, Observed> { @@ -172,7 +208,6 @@ impl Automerge { self.transact_with_impl(Some(c), f) } - /// Like [`Self::transact`] but with a function for generating the commit options. fn transact_with_impl( &mut self, c: Option, @@ -210,7 +245,7 @@ impl Automerge { pub fn transact_observed(&mut self, f: F) -> transaction::Result where F: FnOnce(&mut Transaction<'_, Observed>) -> Result, - Obs: OpObserver + Default, + Obs: OpObserver + BranchableObserver + Default, { self.transact_observed_with_impl(None::<&dyn Fn(&O) -> CommitOptions>, f) } @@ -224,7 +259,7 @@ impl Automerge { where F: FnOnce(&mut Transaction<'_, Observed>) -> Result, C: FnOnce(&O) -> CommitOptions, - Obs: OpObserver + Default, + Obs: OpObserver + BranchableObserver + Default, { self.transact_observed_with_impl(Some(c), f) } @@ -237,7 +272,7 @@ impl Automerge { where F: FnOnce(&mut Transaction<'_, Observed>) -> Result, C: FnOnce(&O) -> CommitOptions, - Obs: OpObserver + Default, + Obs: OpObserver + BranchableObserver + Default, { let observer = Obs::default(); let mut tx = self.transaction_with_observer(observer); @@ -273,13 +308,17 @@ impl Automerge { } /// Fork this document at the current point for use by a different actor. + /// + /// This will create a new actor ID for the forked document pub fn fork(&self) -> Self { let mut f = self.clone(); f.set_actor(ActorId::random()); f } - /// Fork this document at the give heads + /// Fork this document at the given heads + /// + /// This will create a new actor ID for the forked document pub fn fork_at(&self, heads: &[ChangeHash]) -> Result { let mut seen = heads.iter().cloned().collect::>(); let mut heads = heads.to_vec(); @@ -304,182 +343,6 @@ impl Automerge { Ok(f) } - // KeysAt::() - // LenAt::() - // PropAt::() - // NthAt::() - - /// Get the parents of an object in the document tree. - /// - /// ### Errors - /// - /// Returns an error when the id given is not the id of an object in this document. - /// This function does not get the parents of scalar values contained within objects. - /// - /// ### Experimental - /// - /// This function may in future be changed to allow getting the parents from the id of a scalar - /// value. - pub fn parents>(&self, obj: O) -> Result, AutomergeError> { - let (obj_id, _) = self.exid_to_obj(obj.as_ref())?; - Ok(self.ops.parents(obj_id)) - } - - pub fn path_to_object>( - &self, - obj: O, - ) -> Result, AutomergeError> { - Ok(self.parents(obj.as_ref().clone())?.path()) - } - - /// Get the keys of the object `obj`. - /// - /// For a map this returns the keys of the map. - /// For a list this returns the element ids (opids) encoded as strings. - pub fn keys>(&self, obj: O) -> Keys<'_, '_> { - if let Ok((obj, _)) = self.exid_to_obj(obj.as_ref()) { - let iter_keys = self.ops.keys(obj); - Keys::new(self, iter_keys) - } else { - Keys::new(self, None) - } - } - - /// Historical version of [`keys`](Self::keys). - pub fn keys_at>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt<'_, '_> { - if let Ok((obj, _)) = self.exid_to_obj(obj.as_ref()) { - if let Ok(clock) = self.clock_at(heads) { - return KeysAt::new(self, self.ops.keys_at(obj, clock)); - } - } - KeysAt::new(self, None) - } - - /// Iterate over the keys and values of the map `obj` in the given range. - pub fn map_range, R: RangeBounds>( - &self, - obj: O, - range: R, - ) -> MapRange<'_, R> { - if let Ok((obj, _)) = self.exid_to_obj(obj.as_ref()) { - MapRange::new(self, self.ops.map_range(obj, range)) - } else { - MapRange::new(self, None) - } - } - - /// Historical version of [`map_range`](Self::map_range). - pub fn map_range_at, R: RangeBounds>( - &self, - obj: O, - range: R, - heads: &[ChangeHash], - ) -> MapRangeAt<'_, R> { - if let Ok((obj, _)) = self.exid_to_obj(obj.as_ref()) { - if let Ok(clock) = self.clock_at(heads) { - let iter_range = self.ops.map_range_at(obj, range, clock); - return MapRangeAt::new(self, iter_range); - } - } - MapRangeAt::new(self, None) - } - - /// Iterate over the indexes and values of the list `obj` in the given range. - pub fn list_range, R: RangeBounds>( - &self, - obj: O, - range: R, - ) -> ListRange<'_, R> { - if let Ok((obj, _)) = self.exid_to_obj(obj.as_ref()) { - ListRange::new(self, self.ops.list_range(obj, range)) - } else { - ListRange::new(self, None) - } - } - - /// Historical version of [`list_range`](Self::list_range). - pub fn list_range_at, R: RangeBounds>( - &self, - obj: O, - range: R, - heads: &[ChangeHash], - ) -> ListRangeAt<'_, R> { - if let Ok((obj, _)) = self.exid_to_obj(obj.as_ref()) { - if let Ok(clock) = self.clock_at(heads) { - let iter_range = self.ops.list_range_at(obj, range, clock); - return ListRangeAt::new(self, iter_range); - } - } - ListRangeAt::new(self, None) - } - - pub fn values>(&self, obj: O) -> Values<'_> { - if let Ok((obj, obj_type)) = self.exid_to_obj(obj.as_ref()) { - if obj_type.is_sequence() { - Values::new(self, self.ops.list_range(obj, ..)) - } else { - Values::new(self, self.ops.map_range(obj, ..)) - } - } else { - Values::empty(self) - } - } - - pub fn values_at>(&self, obj: O, heads: &[ChangeHash]) -> Values<'_> { - if let Ok((obj, obj_type)) = self.exid_to_obj(obj.as_ref()) { - if let Ok(clock) = self.clock_at(heads) { - return match obj_type { - ObjType::Map | ObjType::Table => { - let iter_range = self.ops.map_range_at(obj, .., clock); - Values::new(self, iter_range) - } - ObjType::List | ObjType::Text => { - let iter_range = self.ops.list_range_at(obj, .., clock); - Values::new(self, iter_range) - } - }; - } - } - Values::empty(self) - } - - /// Get the length of the given object. - pub fn length>(&self, obj: O) -> usize { - if let Ok((inner_obj, obj_type)) = self.exid_to_obj(obj.as_ref()) { - if obj_type == ObjType::Map || obj_type == ObjType::Table { - self.keys(obj).count() - } else { - let encoding = ListEncoding::new(obj_type, self.text_encoding); - self.ops.search(&inner_obj, query::Len::new(encoding)).len - } - } else { - 0 - } - } - - /// Historical version of [`length`](Self::length). - pub fn length_at>(&self, obj: O, heads: &[ChangeHash]) -> usize { - if let Ok((inner_obj, obj_type)) = self.exid_to_obj(obj.as_ref()) { - if let Ok(clock) = self.clock_at(heads) { - return if obj_type == ObjType::Map || obj_type == ObjType::Table { - self.keys_at(obj, heads).count() - } else { - let encoding = ListEncoding::new(obj_type, self.text_encoding); - self.ops - .search(&inner_obj, query::LenAt::new(clock, encoding)) - .len - }; - } - } - 0 - } - - /// Get the type of this object, if it is an object. - pub fn object_type>(&self, obj: O) -> Result { - let (_, obj_type) = self.exid_to_obj(obj.as_ref())?; - Ok(obj_type) - } - pub(crate) fn exid_to_obj(&self, id: &ExId) -> Result<(ObjId, ObjType), AutomergeError> { match id { ExId::Root => Ok((ObjId::root(), ObjType::Map)), @@ -511,153 +374,19 @@ impl Automerge { self.ops.id_to_exid(id) } - /// Get the string represented by the given text object. - pub fn text>(&self, obj: O) -> Result { - let obj = self.exid_to_obj(obj.as_ref())?.0; - let query = self.ops.search(&obj, query::ListVals::new()); - let mut buffer = String::new(); - for q in &query.ops { - buffer.push_str(q.to_str()); - } - Ok(buffer) - } - - /// Historical version of [`text`](Self::text). - pub fn text_at>( - &self, - obj: O, - heads: &[ChangeHash], - ) -> Result { - let obj = self.exid_to_obj(obj.as_ref())?.0; - let clock = self.clock_at(heads)?; - let query = self.ops.search(&obj, query::ListValsAt::new(clock)); - let mut buffer = String::new(); - for q in &query.ops { - if let OpType::Put(ScalarValue::Str(s)) = &q.action { - buffer.push_str(s); - } else { - buffer.push('\u{fffc}'); - } - } - Ok(buffer) - } - - // TODO - I need to return these OpId's here **only** to get - // the legacy conflicts format of { [opid]: value } - // Something better? - /// Get a value out of the document. - /// - /// Returns both the value and the id of the operation that created it, useful for handling - /// conflicts and serves as the object id if the value is an object. - pub fn get, P: Into>( - &self, - obj: O, - prop: P, - ) -> Result, ExId)>, AutomergeError> { - Ok(self.get_all(obj, prop.into())?.last().cloned()) - } - - /// Historical version of [`get`](Self::get). - pub fn get_at, P: Into>( - &self, - obj: O, - prop: P, - heads: &[ChangeHash], - ) -> Result, ExId)>, AutomergeError> { - Ok(self.get_all_at(obj, prop, heads)?.last().cloned()) - } - - /// Get all conflicting values out of the document at this prop that conflict. - /// - /// Returns both the value and the id of the operation that created it, useful for handling - /// conflicts and serves as the object id if the value is an object. - pub fn get_all, P: Into>( - &self, - obj: O, - prop: P, - ) -> Result, ExId)>, AutomergeError> { - let obj = self.exid_to_obj(obj.as_ref())?.0; - let mut result = match prop.into() { - Prop::Map(p) => { - let prop = self.ops.m.props.lookup(&p); - if let Some(p) = prop { - self.ops - .search(&obj, query::Prop::new(p)) - .ops - .into_iter() - .map(|o| (o.value(), self.id_to_exid(o.id))) - .collect() - } else { - vec![] - } - } - Prop::Seq(n) => { - let obj_type = self.ops.object_type(&obj); - let encoding = obj_type - .map(|o| ListEncoding::new(o, self.text_encoding)) - .unwrap_or_default(); - self.ops - .search(&obj, query::Nth::new(n, encoding)) - .ops - .into_iter() - .map(|o| (o.value(), self.id_to_exid(o.id))) - .collect() - } - }; - result.sort_by(|a, b| b.1.cmp(&a.1)); - Ok(result) - } - - /// Historical version of [`get_all`](Self::get_all). - pub fn get_all_at, P: Into>( - &self, - obj: O, - prop: P, - heads: &[ChangeHash], - ) -> Result, ExId)>, AutomergeError> { - let prop = prop.into(); - let obj = self.exid_to_obj(obj.as_ref())?.0; - let clock = self.clock_at(heads)?; - let result = match prop { - Prop::Map(p) => { - let prop = self.ops.m.props.lookup(&p); - if let Some(p) = prop { - self.ops - .search(&obj, query::PropAt::new(p, clock)) - .ops - .into_iter() - .map(|o| (o.clone_value(), self.id_to_exid(o.id))) - .collect() - } else { - vec![] - } - } - Prop::Seq(n) => { - let obj_type = self.ops.object_type(&obj); - let encoding = obj_type - .map(|o| ListEncoding::new(o, self.text_encoding)) - .unwrap_or_default(); - self.ops - .search(&obj, query::NthAt::new(n, clock, encoding)) - .ops - .into_iter() - .map(|o| (o.clone_value(), self.id_to_exid(o.id))) - .collect() - } - }; - Ok(result) - } - /// Load a document. pub fn load(data: &[u8]) -> Result { Self::load_with::<()>(data, VerificationMode::Check, None) } + /// Load a document without verifying the head hashes + /// + /// This is useful for debugging as it allows you to examine a corrupted document. pub fn load_unverified_heads(data: &[u8]) -> Result { Self::load_with::<()>(data, VerificationMode::DontCheck, None) } - /// Load a document. + /// Load a document with an observer #[tracing::instrument(skip(data, observer), err)] pub fn load_with( data: &[u8], @@ -749,11 +478,17 @@ impl Automerge { } /// Load an incremental save of a document. + /// + /// Unlike `load` this imports changes into an existing document. It will work with both the + /// output of [`Self::save`] and [`Self::save_incremental`] + /// + /// The return value is the number of ops which were applied, this is not useful and will + /// change in future. pub fn load_incremental(&mut self, data: &[u8]) -> Result { self.load_incremental_with::<()>(data, None) } - /// Load an incremental save of a document. + /// Like [`Self::load_incremental`] but with an observer pub fn load_incremental_with( &mut self, data: &[u8], @@ -783,6 +518,9 @@ impl Automerge { } /// Apply changes to this document. + /// + /// This is idemptotent in the sense that if a change has already been applied it will be + /// ignored. pub fn apply_changes( &mut self, changes: impl IntoIterator, @@ -790,7 +528,7 @@ impl Automerge { self.apply_changes_with::<_, ()>(changes, None) } - /// Apply changes to this document. + /// Like [`Self::apply_changes`] but with an observer pub fn apply_changes_with, Obs: OpObserver>( &mut self, changes: I, @@ -925,6 +663,10 @@ impl Automerge { } /// Save the entirety of this document in a compact form. + /// + /// This takes a mutable reference to self because it saves the heads of the last save so that + /// `save_incremental` can be used to produce only the changes since the last `save`. This API + /// will be changing in future. pub fn save(&mut self) -> Vec { let heads = self.get_heads(); let c = self.history.iter(); @@ -940,6 +682,7 @@ impl Automerge { bytes } + /// Save this document, but don't run it through DEFLATE afterwards pub fn save_nocompress(&mut self) -> Vec { let heads = self.get_heads(); let c = self.history.iter(); @@ -955,7 +698,12 @@ impl Automerge { bytes } - /// Save the changes since last save in a compact form. + /// Save the changes since the last call to [Self::save`] + /// + /// The output of this will not be a compressed document format, but a series of individual + /// changes. This is useful if you know you have only made a small change since the last `save` + /// and you want to immediately send it somewhere (e.g. you've inserted a single character in a + /// text object). pub fn save_incremental(&mut self) -> Vec { let changes = self .get_changes(self.saved.as_slice()) @@ -997,33 +745,6 @@ impl Automerge { Ok(()) } - /// Get the hashes of the changes in this document that aren't transitive dependencies of the - /// given `heads`. - pub fn get_missing_deps(&self, heads: &[ChangeHash]) -> Vec { - let in_queue: HashSet<_> = self.queue.iter().map(|change| change.hash()).collect(); - let mut missing = HashSet::new(); - - for head in self.queue.iter().flat_map(|change| change.deps()) { - if !self.history_index.contains_key(head) { - missing.insert(head); - } - } - - for head in heads { - if !self.history_index.contains_key(head) { - missing.insert(head); - } - } - - let mut missing = missing - .into_iter() - .filter(|hash| !in_queue.contains(hash)) - .copied() - .collect::>(); - missing.sort(); - missing - } - /// Get the changes since `have_deps` in this document using a clock internally. fn get_changes_clock(&self, have_deps: &[ChangeHash]) -> Result, AutomergeError> { // get the clock for the given deps @@ -1052,10 +773,6 @@ impl Automerge { .collect()) } - pub fn get_changes(&self, have_deps: &[ChangeHash]) -> Result, AutomergeError> { - self.get_changes_clock(have_deps) - } - /// Get the last change this actor made to the document. pub fn get_last_local_change(&self) -> Option<&Change> { return self @@ -1087,47 +804,6 @@ impl Automerge { } } - /// Get a change by its hash. - pub fn get_change_by_hash(&self, hash: &ChangeHash) -> Option<&Change> { - self.history_index - .get(hash) - .and_then(|index| self.history.get(*index)) - } - - /// Get the changes that the other document added compared to this document. - #[tracing::instrument(skip(self, other))] - pub fn get_changes_added<'a>(&self, other: &'a Self) -> Vec<&'a Change> { - // Depth-first traversal from the heads through the dependency graph, - // until we reach a change that is already present in other - let mut stack: Vec<_> = other.get_heads(); - tracing::trace!(their_heads=?stack, "finding changes to merge"); - let mut seen_hashes = HashSet::new(); - let mut added_change_hashes = Vec::new(); - while let Some(hash) = stack.pop() { - if !seen_hashes.contains(&hash) && self.get_change_by_hash(&hash).is_none() { - seen_hashes.insert(hash); - added_change_hashes.push(hash); - if let Some(change) = other.get_change_by_hash(&hash) { - stack.extend(change.deps()); - } - } - } - // Return those changes in the reverse of the order in which the depth-first search - // found them. This is not necessarily a topological sort, but should usually be close. - added_change_hashes.reverse(); - added_change_hashes - .into_iter() - .filter_map(|h| other.get_change_by_hash(&h)) - .collect() - } - - /// Get the heads of this document. - pub fn get_heads(&self) -> Vec { - let mut deps: Vec<_> = self.deps.iter().copied().collect(); - deps.sort_unstable(); - deps - } - fn get_hash(&self, actor: usize, seq: u64) -> Result { self.states .get(&actor) @@ -1181,6 +857,7 @@ impl Automerge { self.deps.insert(change.hash()); } + #[doc(hidden)] pub fn import(&self, s: &str) -> Result<(ExId, ObjType), AutomergeError> { if s == "_root" { Ok((ExId::Root, ObjType::Map)) @@ -1367,6 +1044,343 @@ impl Automerge { op } + + /// Get the heads of this document. + pub fn get_heads(&self) -> Vec { + let mut deps: Vec<_> = self.deps.iter().copied().collect(); + deps.sort_unstable(); + deps + } + + pub fn get_changes(&self, have_deps: &[ChangeHash]) -> Result, AutomergeError> { + self.get_changes_clock(have_deps) + } + + /// Get changes in `other` that are not in `self + pub fn get_changes_added<'a>(&self, other: &'a Self) -> Vec<&'a Change> { + // Depth-first traversal from the heads through the dependency graph, + // until we reach a change that is already present in other + let mut stack: Vec<_> = other.get_heads(); + tracing::trace!(their_heads=?stack, "finding changes to merge"); + let mut seen_hashes = HashSet::new(); + let mut added_change_hashes = Vec::new(); + while let Some(hash) = stack.pop() { + if !seen_hashes.contains(&hash) && self.get_change_by_hash(&hash).is_none() { + seen_hashes.insert(hash); + added_change_hashes.push(hash); + if let Some(change) = other.get_change_by_hash(&hash) { + stack.extend(change.deps()); + } + } + } + // Return those changes in the reverse of the order in which the depth-first search + // found them. This is not necessarily a topological sort, but should usually be close. + added_change_hashes.reverse(); + added_change_hashes + .into_iter() + .filter_map(|h| other.get_change_by_hash(&h)) + .collect() + } +} + +impl ReadDoc for Automerge { + fn parents>(&self, obj: O) -> Result, AutomergeError> { + let (obj_id, _) = self.exid_to_obj(obj.as_ref())?; + Ok(self.ops.parents(obj_id)) + } + + fn path_to_object>(&self, obj: O) -> Result, AutomergeError> { + Ok(self.parents(obj.as_ref().clone())?.path()) + } + + fn keys>(&self, obj: O) -> Keys<'_, '_> { + if let Ok((obj, _)) = self.exid_to_obj(obj.as_ref()) { + let iter_keys = self.ops.keys(obj); + Keys::new(self, iter_keys) + } else { + Keys::new(self, None) + } + } + + fn keys_at>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt<'_, '_> { + if let Ok((obj, _)) = self.exid_to_obj(obj.as_ref()) { + if let Ok(clock) = self.clock_at(heads) { + return KeysAt::new(self, self.ops.keys_at(obj, clock)); + } + } + KeysAt::new(self, None) + } + + fn map_range, R: RangeBounds>( + &self, + obj: O, + range: R, + ) -> MapRange<'_, R> { + if let Ok((obj, _)) = self.exid_to_obj(obj.as_ref()) { + MapRange::new(self, self.ops.map_range(obj, range)) + } else { + MapRange::new(self, None) + } + } + + fn map_range_at, R: RangeBounds>( + &self, + obj: O, + range: R, + heads: &[ChangeHash], + ) -> MapRangeAt<'_, R> { + if let Ok((obj, _)) = self.exid_to_obj(obj.as_ref()) { + if let Ok(clock) = self.clock_at(heads) { + let iter_range = self.ops.map_range_at(obj, range, clock); + return MapRangeAt::new(self, iter_range); + } + } + MapRangeAt::new(self, None) + } + + fn list_range, R: RangeBounds>( + &self, + obj: O, + range: R, + ) -> ListRange<'_, R> { + if let Ok((obj, _)) = self.exid_to_obj(obj.as_ref()) { + ListRange::new(self, self.ops.list_range(obj, range)) + } else { + ListRange::new(self, None) + } + } + + fn list_range_at, R: RangeBounds>( + &self, + obj: O, + range: R, + heads: &[ChangeHash], + ) -> ListRangeAt<'_, R> { + if let Ok((obj, _)) = self.exid_to_obj(obj.as_ref()) { + if let Ok(clock) = self.clock_at(heads) { + let iter_range = self.ops.list_range_at(obj, range, clock); + return ListRangeAt::new(self, iter_range); + } + } + ListRangeAt::new(self, None) + } + + fn values>(&self, obj: O) -> Values<'_> { + if let Ok((obj, obj_type)) = self.exid_to_obj(obj.as_ref()) { + if obj_type.is_sequence() { + Values::new(self, self.ops.list_range(obj, ..)) + } else { + Values::new(self, self.ops.map_range(obj, ..)) + } + } else { + Values::empty(self) + } + } + + fn values_at>(&self, obj: O, heads: &[ChangeHash]) -> Values<'_> { + if let Ok((obj, obj_type)) = self.exid_to_obj(obj.as_ref()) { + if let Ok(clock) = self.clock_at(heads) { + return match obj_type { + ObjType::Map | ObjType::Table => { + let iter_range = self.ops.map_range_at(obj, .., clock); + Values::new(self, iter_range) + } + ObjType::List | ObjType::Text => { + let iter_range = self.ops.list_range_at(obj, .., clock); + Values::new(self, iter_range) + } + }; + } + } + Values::empty(self) + } + + fn length>(&self, obj: O) -> usize { + if let Ok((inner_obj, obj_type)) = self.exid_to_obj(obj.as_ref()) { + if obj_type == ObjType::Map || obj_type == ObjType::Table { + self.keys(obj).count() + } else { + let encoding = ListEncoding::new(obj_type, self.text_encoding); + self.ops.search(&inner_obj, query::Len::new(encoding)).len + } + } else { + 0 + } + } + + fn length_at>(&self, obj: O, heads: &[ChangeHash]) -> usize { + if let Ok((inner_obj, obj_type)) = self.exid_to_obj(obj.as_ref()) { + if let Ok(clock) = self.clock_at(heads) { + return if obj_type == ObjType::Map || obj_type == ObjType::Table { + self.keys_at(obj, heads).count() + } else { + let encoding = ListEncoding::new(obj_type, self.text_encoding); + self.ops + .search(&inner_obj, query::LenAt::new(clock, encoding)) + .len + }; + } + } + 0 + } + + fn object_type>(&self, obj: O) -> Result { + let (_, obj_type) = self.exid_to_obj(obj.as_ref())?; + Ok(obj_type) + } + + fn text>(&self, obj: O) -> Result { + let obj = self.exid_to_obj(obj.as_ref())?.0; + let query = self.ops.search(&obj, query::ListVals::new()); + let mut buffer = String::new(); + for q in &query.ops { + buffer.push_str(q.to_str()); + } + Ok(buffer) + } + + fn text_at>( + &self, + obj: O, + heads: &[ChangeHash], + ) -> Result { + let obj = self.exid_to_obj(obj.as_ref())?.0; + let clock = self.clock_at(heads)?; + let query = self.ops.search(&obj, query::ListValsAt::new(clock)); + let mut buffer = String::new(); + for q in &query.ops { + if let OpType::Put(ScalarValue::Str(s)) = &q.action { + buffer.push_str(s); + } else { + buffer.push('\u{fffc}'); + } + } + Ok(buffer) + } + + fn get, P: Into>( + &self, + obj: O, + prop: P, + ) -> Result, ExId)>, AutomergeError> { + Ok(self.get_all(obj, prop.into())?.last().cloned()) + } + + fn get_at, P: Into>( + &self, + obj: O, + prop: P, + heads: &[ChangeHash], + ) -> Result, ExId)>, AutomergeError> { + Ok(self.get_all_at(obj, prop, heads)?.last().cloned()) + } + + fn get_all, P: Into>( + &self, + obj: O, + prop: P, + ) -> Result, ExId)>, AutomergeError> { + let obj = self.exid_to_obj(obj.as_ref())?.0; + let mut result = match prop.into() { + Prop::Map(p) => { + let prop = self.ops.m.props.lookup(&p); + if let Some(p) = prop { + self.ops + .search(&obj, query::Prop::new(p)) + .ops + .into_iter() + .map(|o| (o.value(), self.id_to_exid(o.id))) + .collect() + } else { + vec![] + } + } + Prop::Seq(n) => { + let obj_type = self.ops.object_type(&obj); + let encoding = obj_type + .map(|o| ListEncoding::new(o, self.text_encoding)) + .unwrap_or_default(); + self.ops + .search(&obj, query::Nth::new(n, encoding)) + .ops + .into_iter() + .map(|o| (o.value(), self.id_to_exid(o.id))) + .collect() + } + }; + result.sort_by(|a, b| b.1.cmp(&a.1)); + Ok(result) + } + + fn get_all_at, P: Into>( + &self, + obj: O, + prop: P, + heads: &[ChangeHash], + ) -> Result, ExId)>, AutomergeError> { + let prop = prop.into(); + let obj = self.exid_to_obj(obj.as_ref())?.0; + let clock = self.clock_at(heads)?; + let result = match prop { + Prop::Map(p) => { + let prop = self.ops.m.props.lookup(&p); + if let Some(p) = prop { + self.ops + .search(&obj, query::PropAt::new(p, clock)) + .ops + .into_iter() + .map(|o| (o.clone_value(), self.id_to_exid(o.id))) + .collect() + } else { + vec![] + } + } + Prop::Seq(n) => { + let obj_type = self.ops.object_type(&obj); + let encoding = obj_type + .map(|o| ListEncoding::new(o, self.text_encoding)) + .unwrap_or_default(); + self.ops + .search(&obj, query::NthAt::new(n, clock, encoding)) + .ops + .into_iter() + .map(|o| (o.clone_value(), self.id_to_exid(o.id))) + .collect() + } + }; + Ok(result) + } + + fn get_missing_deps(&self, heads: &[ChangeHash]) -> Vec { + let in_queue: HashSet<_> = self.queue.iter().map(|change| change.hash()).collect(); + let mut missing = HashSet::new(); + + for head in self.queue.iter().flat_map(|change| change.deps()) { + if !self.history_index.contains_key(head) { + missing.insert(head); + } + } + + for head in heads { + if !self.history_index.contains_key(head) { + missing.insert(head); + } + } + + let mut missing = missing + .into_iter() + .filter(|hash| !in_queue.contains(hash)) + .copied() + .collect::>(); + missing.sort(); + missing + } + + fn get_change_by_hash(&self, hash: &ChangeHash) -> Option<&Change> { + self.history_index + .get(hash) + .and_then(|index| self.history.get(*index)) + } } impl Default for Automerge { diff --git a/rust/automerge/src/automerge/tests.rs b/rust/automerge/src/automerge/tests.rs index 7eadaedd..8d533fed 100644 --- a/rust/automerge/src/automerge/tests.rs +++ b/rust/automerge/src/automerge/tests.rs @@ -1539,7 +1539,7 @@ fn observe_counter_change_application() { #[test] fn get_changes_heads_empty() { - let mut doc = AutoCommit::unobserved(); + let mut doc = AutoCommit::new(); doc.put(ROOT, "key1", 1).unwrap(); doc.commit(); doc.put(ROOT, "key2", 1).unwrap(); diff --git a/rust/automerge/src/autoserde.rs b/rust/automerge/src/autoserde.rs index 63b0848a..ccfc6ae6 100644 --- a/rust/automerge/src/autoserde.rs +++ b/rust/automerge/src/autoserde.rs @@ -1,18 +1,33 @@ use serde::ser::{SerializeMap, SerializeSeq}; -use crate::{Automerge, ObjId, ObjType, Value}; +use crate::{ObjId, ObjType, ReadDoc, Value}; -/// A wrapper type which implements [`serde::Serialize`] for an [`Automerge`]. +/// A wrapper type which implements [`serde::Serialize`] for a [`ReadDoc`]. +/// +/// # Example +/// +/// ``` +/// # fn main() -> Result<(), Box> { +/// use automerge::{AutoCommit, AutomergeError, Value, transaction::Transactable}; +/// let mut doc = AutoCommit::new(); +/// doc.put(automerge::ROOT, "key", "value")?; +/// +/// let serialized = serde_json::to_string(&automerge::AutoSerde::from(&doc)).unwrap(); +/// +/// assert_eq!(serialized, r#"{"key":"value"}"#); +/// # Ok(()) +/// # } +/// ``` #[derive(Debug)] -pub struct AutoSerde<'a>(&'a Automerge); +pub struct AutoSerde<'a, R: crate::ReadDoc>(&'a R); -impl<'a> From<&'a Automerge> for AutoSerde<'a> { - fn from(a: &'a Automerge) -> Self { +impl<'a, R: ReadDoc> From<&'a R> for AutoSerde<'a, R> { + fn from(a: &'a R) -> Self { AutoSerde(a) } } -impl<'a> serde::Serialize for AutoSerde<'a> { +impl<'a, R: crate::ReadDoc> serde::Serialize for AutoSerde<'a, R> { fn serialize(&self, serializer: S) -> Result where S: serde::Serializer, @@ -25,12 +40,12 @@ impl<'a> serde::Serialize for AutoSerde<'a> { } } -struct AutoSerdeMap<'a> { - doc: &'a Automerge, +struct AutoSerdeMap<'a, R> { + doc: &'a R, obj: ObjId, } -impl<'a> serde::Serialize for AutoSerdeMap<'a> { +impl<'a, R: crate::ReadDoc> serde::Serialize for AutoSerdeMap<'a, R> { fn serialize(&self, serializer: S) -> Result where S: serde::Serializer, @@ -51,12 +66,12 @@ impl<'a> serde::Serialize for AutoSerdeMap<'a> { } } -struct AutoSerdeSeq<'a> { - doc: &'a Automerge, +struct AutoSerdeSeq<'a, R> { + doc: &'a R, obj: ObjId, } -impl<'a> serde::Serialize for AutoSerdeSeq<'a> { +impl<'a, R: crate::ReadDoc> serde::Serialize for AutoSerdeSeq<'a, R> { fn serialize(&self, serializer: S) -> Result where S: serde::Serializer, @@ -77,13 +92,13 @@ impl<'a> serde::Serialize for AutoSerdeSeq<'a> { } } -struct AutoSerdeVal<'a> { - doc: &'a Automerge, +struct AutoSerdeVal<'a, R> { + doc: &'a R, val: Value<'a>, obj: ObjId, } -impl<'a> serde::Serialize for AutoSerdeVal<'a> { +impl<'a, R: crate::ReadDoc> serde::Serialize for AutoSerdeVal<'a, R> { fn serialize(&self, serializer: S) -> Result where S: serde::Serializer, diff --git a/rust/automerge/src/exid.rs b/rust/automerge/src/exid.rs index 3ff8fbb5..3a5a2ca2 100644 --- a/rust/automerge/src/exid.rs +++ b/rust/automerge/src/exid.rs @@ -6,6 +6,10 @@ use std::cmp::{Ord, Ordering}; use std::fmt; use std::hash::{Hash, Hasher}; +/// An identifier for an object in a document +/// +/// This can be persisted using `to_bytes` and `TryFrom<&[u8]>` breaking changes to the +/// serialization format will be considered breaking changes for this library version. #[derive(Debug, Clone)] pub enum ExId { Root, @@ -17,7 +21,10 @@ const TYPE_ROOT: u8 = 0; const TYPE_ID: u8 = 1; impl ExId { - /// Serialize the ExId to a byte array. + /// Serialize this object ID to a byte array. + /// + /// This serialization format is versioned and incompatible changes to it will be considered a + /// breaking change for the version of this library. pub fn to_bytes(&self) -> Vec { // The serialized format is // diff --git a/rust/automerge/src/keys.rs b/rust/automerge/src/keys.rs index f8e0c676..838015ef 100644 --- a/rust/automerge/src/keys.rs +++ b/rust/automerge/src/keys.rs @@ -1,5 +1,9 @@ use crate::{query, Automerge}; +/// An iterator over the keys of an object +/// +/// This is returned by [`crate::ReadDoc::keys`] and method. The returned item is either +/// the keys of a map, or the encoded element IDs of a sequence. #[derive(Debug)] pub struct Keys<'a, 'k> { keys: Option>, diff --git a/rust/automerge/src/keys_at.rs b/rust/automerge/src/keys_at.rs index c957e175..fd747bbc 100644 --- a/rust/automerge/src/keys_at.rs +++ b/rust/automerge/src/keys_at.rs @@ -1,5 +1,9 @@ use crate::{query, Automerge}; +/// An iterator over the keys of an object at a particular point in history +/// +/// This is returned by [`crate::ReadDoc::keys_at`] method. The returned item is either the keys of a map, +/// or the encoded element IDs of a sequence. #[derive(Debug)] pub struct KeysAt<'a, 'k> { keys: Option>, diff --git a/rust/automerge/src/lib.rs b/rust/automerge/src/lib.rs index 58f5b263..bafd8983 100644 --- a/rust/automerge/src/lib.rs +++ b/rust/automerge/src/lib.rs @@ -1,3 +1,190 @@ +//! # Automerge +//! +//! Automerge is a library of data structures for building collaborative, +//! [local-first](https://www.inkandswitch.com/local-first/) applications. The +//! idea of automerge is to provide a data structure which is quite general, +//! \- consisting of nested key/value maps and/or lists - which can be modified +//! entirely locally but which can at any time be merged with other instances of +//! the same data structure. +//! +//! In addition to the core data structure (which we generally refer to as a +//! "document"), we also provide an implementation of a sync protocol (in +//! [`crate::sync`]) which can be used over any reliable in-order transport; and +//! an efficient binary storage format. +//! +//! This crate is organised around two representations of a document - +//! [`Automerge`] and [`AutoCommit`]. The difference between the two is that +//! [`AutoCommit`] manages transactions for you. Both of these representations +//! implement [`ReadDoc`] for reading values from a document and +//! [`sync::SyncDoc`] for taking part in the sync protocol. [`AutoCommit`] +//! directly implements [`transaction::Transactable`] for making changes to a +//! document, whilst [`Automerge`] requires you to explicitly create a +//! [`transaction::Transaction`]. +//! +//! NOTE: The API this library provides for modifying data is quite low level +//! (somewhat analogous to directly creating JSON values rather than using +//! `serde` derive macros or equivalent). If you're writing a Rust application which uses automerge +//! you may want to look at [autosurgeon](https://github.com/automerge/autosurgeon). +//! +//! ## Data Model +//! +//! An automerge document is a map from strings to values +//! ([`Value`]) where values can be either +//! +//! * A nested composite value which is either +//! * A map from strings to values ([`ObjType::Map`]) +//! * A list of values ([`ObjType::List`]) +//! * A text object (a sequence of unicode characters) ([`ObjType::Text`]) +//! * A primitive value ([`ScalarValue`]) which is one of +//! * A string +//! * A 64 bit floating point number +//! * A signed 64 bit integer +//! * An unsigned 64 bit integer +//! * A boolean +//! * A counter object (a 64 bit integer which merges by addition) +//! ([`ScalarValue::Counter`]) +//! * A timestamp (a 64 bit integer which is milliseconds since the unix epoch) +//! +//! All composite values have an ID ([`ObjId`]) which is created when the value +//! is inserted into the document or is the root object ID [`ROOT`]. Values in +//! the document are then referred to by the pair (`object ID`, `key`). The +//! `key` is represented by the [`Prop`] type and is either a string for a maps, +//! or an index for sequences. +//! +//! ### Conflicts +//! +//! There are some things automerge cannot merge sensibly. For example, two +//! actors concurrently setting the key "name" to different values. In this case +//! automerge will pick a winning value in a random but deterministic way, but +//! the conflicting value is still available via the [`ReadDoc::get_all`] method. +//! +//! ### Change hashes and historical values +//! +//! Like git, points in the history of a document are identified by hash. Unlike +//! git there can be multiple hashes representing a particular point (because +//! automerge supports concurrent changes). These hashes can be obtained using +//! either [`Automerge::get_heads`] or [`AutoCommit::get_heads`] (note these +//! methods are not part of [`ReadDoc`] because in the case of [`AutoCommit`] it +//! requires a mutable reference to the document). +//! +//! These hashes can be used to read values from the document at a particular +//! point in history using the various `*_at` methods on [`ReadDoc`] which take a +//! slice of [`ChangeHash`] as an argument. +//! +//! ### Actor IDs +//! +//! Any change to an automerge document is made by an actor, represented by an +//! [`ActorId`]. An actor ID is any random sequence of bytes but each change by +//! the same actor ID must be sequential. This often means you will want to +//! maintain at least one actor ID per device. It is fine to generate a new +//! actor ID for each change, but be aware that each actor ID takes up space in +//! a document so if you expect a document to be long lived and/or to have many +//! changes then you should try to reuse actor IDs where possible. +//! +//! ### Text Encoding +//! +//! Both [`Automerge`] and [`AutoCommit`] provide a `with_encoding` method which +//! allows you to specify the [`crate::TextEncoding`] which is used for +//! interpreting the indexes passed to methods like [`ReadDoc::list_range`] or +//! [`transaction::Transactable::splice`]. The default encoding is UTF-8, but +//! you can switch to UTF-16. +//! +//! ## Sync Protocol +//! +//! See the [`sync`] module. +//! +//! ## Serde serialization +//! +//! Sometimes you just want to get the JSON value of an automerge document. For +//! this you can use [`AutoSerde`], which implements `serde::Serialize` for an +//! automerge document. +//! +//! ## Example +//! +//! Let's create a document representing an address book. +//! +//! ``` +//! use automerge::{ObjType, AutoCommit, transaction::Transactable, ReadDoc}; +//! +//! # fn main() -> Result<(), Box> { +//! let mut doc = AutoCommit::new(); +//! +//! // `put_object` creates a nested object in the root key/value map and +//! // returns the ID of the new object, in this case a list. +//! let contacts = doc.put_object(automerge::ROOT, "contacts", ObjType::List)?; +//! +//! // Now we can insert objects into the list +//! let alice = doc.insert_object(&contacts, 0, ObjType::Map)?; +//! +//! // Finally we can set keys in the "alice" map +//! doc.put(&alice, "name", "Alice")?; +//! doc.put(&alice, "email", "alice@example.com")?; +//! +//! // Create another contact +//! let bob = doc.insert_object(&contacts, 1, ObjType::Map)?; +//! doc.put(&bob, "name", "Bob")?; +//! doc.put(&bob, "email", "bob@example.com")?; +//! +//! // Now we save the address book, we can put this in a file +//! let data: Vec = doc.save(); +//! # Ok(()) +//! # } +//! ``` +//! +//! Now modify this document on two separate devices and merge the modifications. +//! +//! ``` +//! use std::borrow::Cow; +//! use automerge::{ObjType, AutoCommit, transaction::Transactable, ReadDoc}; +//! +//! # fn main() -> Result<(), Box> { +//! # let mut doc = AutoCommit::new(); +//! # let contacts = doc.put_object(automerge::ROOT, "contacts", ObjType::List)?; +//! # let alice = doc.insert_object(&contacts, 0, ObjType::Map)?; +//! # doc.put(&alice, "name", "Alice")?; +//! # doc.put(&alice, "email", "alice@example.com")?; +//! # let bob = doc.insert_object(&contacts, 1, ObjType::Map)?; +//! # doc.put(&bob, "name", "Bob")?; +//! # doc.put(&bob, "email", "bob@example.com")?; +//! # let saved: Vec = doc.save(); +//! +//! // Load the document on the first device and change alices email +//! let mut doc1 = AutoCommit::load(&saved)?; +//! let contacts = match doc1.get(automerge::ROOT, "contacts")? { +//! Some((automerge::Value::Object(ObjType::List), contacts)) => contacts, +//! _ => panic!("contacts should be a list"), +//! }; +//! let alice = match doc1.get(&contacts, 0)? { +//! Some((automerge::Value::Object(ObjType::Map), alice)) => alice, +//! _ => panic!("alice should be a map"), +//! }; +//! doc1.put(&alice, "email", "alicesnewemail@example.com")?; +//! +//! +//! // Load the document on the second device and change bobs name +//! let mut doc2 = AutoCommit::load(&saved)?; +//! let contacts = match doc2.get(automerge::ROOT, "contacts")? { +//! Some((automerge::Value::Object(ObjType::List), contacts)) => contacts, +//! _ => panic!("contacts should be a list"), +//! }; +//! let bob = match doc2.get(&contacts, 1)? { +//! Some((automerge::Value::Object(ObjType::Map), bob)) => bob, +//! _ => panic!("bob should be a map"), +//! }; +//! doc2.put(&bob, "name", "Robert")?; +//! +//! // Finally, we can merge the changes from the two devices +//! doc1.merge(&mut doc2)?; +//! let bobsname: Option = doc1.get(&bob, "name")?.map(|(v, _)| v); +//! assert_eq!(bobsname, Some(automerge::Value::Scalar(Cow::Owned("Robert".into())))); +//! +//! let alices_email: Option = doc1.get(&alice, "email")?.map(|(v, _)| v); +//! assert_eq!(alices_email, Some(automerge::Value::Scalar(Cow::Owned("alicesnewemail@example.com".into())))); +//! # Ok(()) +//! # } +//! ``` +//! + #![doc( html_logo_url = "https://raw.githubusercontent.com/automerge/automerge-rs/main/img/brandmark.svg", html_favicon_url = "https:///raw.githubusercontent.com/automerge/automerge-rs/main/img/favicon.ico" @@ -71,11 +258,12 @@ mod list_range; mod list_range_at; mod map_range; mod map_range_at; -mod op_observer; +pub mod op_observer; mod op_set; mod op_tree; mod parents; mod query; +mod read; mod sequence_tree; mod storage; pub mod sync; @@ -105,9 +293,12 @@ pub use op_observer::OpObserver; pub use op_observer::Patch; pub use op_observer::VecOpObserver; pub use parents::{Parent, Parents}; +pub use read::ReadDoc; +#[doc(hidden)] pub use sequence_tree::SequenceTree; pub use types::{ActorId, ChangeHash, ObjType, OpType, ParseChangeHashError, Prop, TextEncoding}; pub use value::{ScalarValue, Value}; pub use values::Values; +/// The object ID for the root map of a document pub const ROOT: ObjId = ObjId::Root; diff --git a/rust/automerge/src/list_range.rs b/rust/automerge/src/list_range.rs index ae7b2aa5..a043da72 100644 --- a/rust/automerge/src/list_range.rs +++ b/rust/automerge/src/list_range.rs @@ -3,6 +3,9 @@ use crate::{exid::ExId, Value}; use crate::{query, Automerge}; use std::ops::RangeBounds; +/// An iterator over the elements of a list object +/// +/// This is returned by the [`crate::ReadDoc::list_range`] method #[derive(Debug)] pub struct ListRange<'a, R: RangeBounds> { range: Option>, diff --git a/rust/automerge/src/list_range_at.rs b/rust/automerge/src/list_range_at.rs index 37db9677..ce8f5a46 100644 --- a/rust/automerge/src/list_range_at.rs +++ b/rust/automerge/src/list_range_at.rs @@ -3,6 +3,9 @@ use std::ops::RangeBounds; use crate::{query, Automerge}; +/// An iterator over the elements of a list object at a particular set of heads +/// +/// This is returned by the [`crate::ReadDoc::list_range_at`] method #[derive(Debug)] pub struct ListRangeAt<'a, R: RangeBounds> { range: Option>, diff --git a/rust/automerge/src/map_range.rs b/rust/automerge/src/map_range.rs index 8029b84d..ad33ebf5 100644 --- a/rust/automerge/src/map_range.rs +++ b/rust/automerge/src/map_range.rs @@ -3,6 +3,9 @@ use std::ops::RangeBounds; use crate::{query, Automerge}; +/// An iterator over the keys and values of a map object +/// +/// This is returned by the [`crate::ReadDoc::map_range`] method #[derive(Debug)] pub struct MapRange<'a, R: RangeBounds> { range: Option>, diff --git a/rust/automerge/src/map_range_at.rs b/rust/automerge/src/map_range_at.rs index b2eb3fb2..8d008e89 100644 --- a/rust/automerge/src/map_range_at.rs +++ b/rust/automerge/src/map_range_at.rs @@ -3,6 +3,9 @@ use std::ops::RangeBounds; use crate::{query, Automerge}; +/// An iterator over the keys and values of a map object as at a particuar heads +/// +/// This is returned by the [`crate::ReadDoc::map_range_at`] method #[derive(Debug)] pub struct MapRangeAt<'a, R: RangeBounds> { range: Option>, diff --git a/rust/automerge/src/op_observer.rs b/rust/automerge/src/op_observer.rs index 0d082219..5b33c21f 100644 --- a/rust/automerge/src/op_observer.rs +++ b/rust/automerge/src/op_observer.rs @@ -1,8 +1,11 @@ use crate::exid::ExId; -use crate::Automerge; use crate::Prop; +use crate::ReadDoc; use crate::Value; +mod compose; +pub use compose::compose; + /// An observer of operations applied to the document. pub trait OpObserver { /// A new value has been inserted into the given object. @@ -12,15 +15,16 @@ pub trait OpObserver { /// - `index`: the index the new value has been inserted at. /// - `tagged_value`: the value that has been inserted and the id of the operation that did the /// insert. - fn insert( + fn insert( &mut self, - doc: &Automerge, + doc: &R, objid: ExId, index: usize, tagged_value: (Value<'_>, ExId), ); - fn splice_text(&mut self, _doc: &Automerge, _objid: ExId, _index: usize, _value: &str); + /// Some text has been spliced into a text object + fn splice_text(&mut self, _doc: &R, _objid: ExId, _index: usize, _value: &str); /// A new value has been put into the given object. /// @@ -30,9 +34,9 @@ pub trait OpObserver { /// - `tagged_value`: the value that has been put into the object and the id of the operation /// that did the put. /// - `conflict`: whether this put conflicts with other operations. - fn put( + fn put( &mut self, - doc: &Automerge, + doc: &R, objid: ExId, prop: Prop, tagged_value: (Value<'_>, ExId), @@ -49,9 +53,9 @@ pub trait OpObserver { /// - `tagged_value`: the value that has been put into the object and the id of the operation /// that did the put. /// - `conflict`: whether this put conflicts with other operations. - fn expose( + fn expose( &mut self, - doc: &Automerge, + doc: &R, objid: ExId, prop: Prop, tagged_value: (Value<'_>, ExId), @@ -63,7 +67,7 @@ pub trait OpObserver { /// - `doc`: a handle to the doc after the op has been inserted, can be used to query information /// - `objid`: the object that has been put into. /// - `prop`: the prop that the value as been put at. - fn flag_conflict(&mut self, _doc: &Automerge, _objid: ExId, _prop: Prop) {} + fn flag_conflict(&mut self, _doc: &R, _objid: ExId, _prop: Prop) {} /// A counter has been incremented. /// @@ -72,14 +76,20 @@ pub trait OpObserver { /// - `prop`: they prop that the chounter is at. /// - `tagged_value`: the amount the counter has been incremented by, and the the id of the /// increment operation. - fn increment(&mut self, doc: &Automerge, objid: ExId, prop: Prop, tagged_value: (i64, ExId)); + fn increment( + &mut self, + doc: &R, + objid: ExId, + prop: Prop, + tagged_value: (i64, ExId), + ); /// A map value has beeen deleted. /// /// - `doc`: a handle to the doc after the op has been inserted, can be used to query information /// - `objid`: the object that has been deleted in. /// - `prop`: the prop to be deleted - fn delete(&mut self, doc: &Automerge, objid: ExId, prop: Prop) { + fn delete(&mut self, doc: &R, objid: ExId, prop: Prop) { match prop { Prop::Map(k) => self.delete_map(doc, objid, &k), Prop::Seq(i) => self.delete_seq(doc, objid, i, 1), @@ -91,7 +101,7 @@ pub trait OpObserver { /// - `doc`: a handle to the doc after the op has been inserted, can be used to query information /// - `objid`: the object that has been deleted in. /// - `key`: the map key to be deleted - fn delete_map(&mut self, doc: &Automerge, objid: ExId, key: &str); + fn delete_map(&mut self, doc: &R, objid: ExId, key: &str); /// A one or more list values have beeen deleted. /// @@ -99,21 +109,7 @@ pub trait OpObserver { /// - `objid`: the object that has been deleted in. /// - `index`: the index of the deletion /// - `num`: the number of sequential elements deleted - fn delete_seq(&mut self, doc: &Automerge, objid: ExId, index: usize, num: usize); - - /// Branch of a new op_observer later to be merged - /// - /// Called by AutoCommit when creating a new transaction. Observer branch - /// will be merged on `commit()` or thrown away on `rollback()` - /// - fn branch(&self) -> Self; - - /// Merge observed information from a transaction. - /// - /// Called by AutoCommit on `commit()` - /// - /// - `other`: Another Op Observer of the same type - fn merge(&mut self, other: &Self); + fn delete_seq(&mut self, doc: &R, objid: ExId, index: usize, num: usize); /// Whether to call sequence methods or `splice_text` when encountering changes in text /// @@ -123,21 +119,41 @@ pub trait OpObserver { } } +/// An observer which can be branched +/// +/// This is used when observing operations in a transaction. In this case `branch` will be called +/// at the beginning of the transaction to return a new observer and then `merge` will be called +/// with the branched observer as `other` when the transaction is comitted. +pub trait BranchableObserver { + /// Branch of a new op_observer later to be merged + /// + /// Called when creating a new transaction. Observer branch will be merged on `commit()` or + /// thrown away on `rollback()` + fn branch(&self) -> Self; + + /// Merge observed information from a transaction. + /// + /// Called by AutoCommit on `commit()` + /// + /// - `other`: Another Op Observer of the same type + fn merge(&mut self, other: &Self); +} + impl OpObserver for () { - fn insert( + fn insert( &mut self, - _doc: &Automerge, + _doc: &R, _objid: ExId, _index: usize, _tagged_value: (Value<'_>, ExId), ) { } - fn splice_text(&mut self, _doc: &Automerge, _objid: ExId, _index: usize, _value: &str) {} + fn splice_text(&mut self, _doc: &R, _objid: ExId, _index: usize, _value: &str) {} - fn put( + fn put( &mut self, - _doc: &Automerge, + _doc: &R, _objid: ExId, _prop: Prop, _tagged_value: (Value<'_>, ExId), @@ -145,9 +161,9 @@ impl OpObserver for () { ) { } - fn expose( + fn expose( &mut self, - _doc: &Automerge, + _doc: &R, _objid: ExId, _prop: Prop, _tagged_value: (Value<'_>, ExId), @@ -155,21 +171,22 @@ impl OpObserver for () { ) { } - fn increment( + fn increment( &mut self, - _doc: &Automerge, + _doc: &R, _objid: ExId, _prop: Prop, _tagged_value: (i64, ExId), ) { } - fn delete_map(&mut self, _doc: &Automerge, _objid: ExId, _key: &str) {} + fn delete_map(&mut self, _doc: &R, _objid: ExId, _key: &str) {} - fn delete_seq(&mut self, _doc: &Automerge, _objid: ExId, _index: usize, _num: usize) {} + fn delete_seq(&mut self, _doc: &R, _objid: ExId, _index: usize, _num: usize) {} +} +impl BranchableObserver for () { fn merge(&mut self, _other: &Self) {} - fn branch(&self) -> Self {} } @@ -188,8 +205,14 @@ impl VecOpObserver { } impl OpObserver for VecOpObserver { - fn insert(&mut self, doc: &Automerge, obj: ExId, index: usize, (value, id): (Value<'_>, ExId)) { - if let Ok(mut p) = doc.parents(&obj) { + fn insert( + &mut self, + doc: &R, + obj: ExId, + index: usize, + (value, id): (Value<'_>, ExId), + ) { + if let Ok(p) = doc.parents(&obj) { self.patches.push(Patch::Insert { obj, path: p.path(), @@ -199,8 +222,8 @@ impl OpObserver for VecOpObserver { } } - fn splice_text(&mut self, doc: &Automerge, obj: ExId, index: usize, value: &str) { - if let Ok(mut p) = doc.parents(&obj) { + fn splice_text(&mut self, doc: &R, obj: ExId, index: usize, value: &str) { + if let Ok(p) = doc.parents(&obj) { self.patches.push(Patch::Splice { obj, path: p.path(), @@ -210,15 +233,15 @@ impl OpObserver for VecOpObserver { } } - fn put( + fn put( &mut self, - doc: &Automerge, + doc: &R, obj: ExId, prop: Prop, (value, id): (Value<'_>, ExId), conflict: bool, ) { - if let Ok(mut p) = doc.parents(&obj) { + if let Ok(p) = doc.parents(&obj) { self.patches.push(Patch::Put { obj, path: p.path(), @@ -229,15 +252,15 @@ impl OpObserver for VecOpObserver { } } - fn expose( + fn expose( &mut self, - doc: &Automerge, + doc: &R, obj: ExId, prop: Prop, (value, id): (Value<'_>, ExId), conflict: bool, ) { - if let Ok(mut p) = doc.parents(&obj) { + if let Ok(p) = doc.parents(&obj) { self.patches.push(Patch::Expose { obj, path: p.path(), @@ -248,8 +271,8 @@ impl OpObserver for VecOpObserver { } } - fn increment(&mut self, doc: &Automerge, obj: ExId, prop: Prop, tagged_value: (i64, ExId)) { - if let Ok(mut p) = doc.parents(&obj) { + fn increment(&mut self, doc: &R, obj: ExId, prop: Prop, tagged_value: (i64, ExId)) { + if let Ok(p) = doc.parents(&obj) { self.patches.push(Patch::Increment { obj, path: p.path(), @@ -259,8 +282,8 @@ impl OpObserver for VecOpObserver { } } - fn delete_map(&mut self, doc: &Automerge, obj: ExId, key: &str) { - if let Ok(mut p) = doc.parents(&obj) { + fn delete_map(&mut self, doc: &R, obj: ExId, key: &str) { + if let Ok(p) = doc.parents(&obj) { self.patches.push(Patch::Delete { obj, path: p.path(), @@ -270,8 +293,8 @@ impl OpObserver for VecOpObserver { } } - fn delete_seq(&mut self, doc: &Automerge, obj: ExId, index: usize, num: usize) { - if let Ok(mut p) = doc.parents(&obj) { + fn delete_seq(&mut self, doc: &R, obj: ExId, index: usize, num: usize) { + if let Ok(p) = doc.parents(&obj) { self.patches.push(Patch::Delete { obj, path: p.path(), @@ -280,7 +303,9 @@ impl OpObserver for VecOpObserver { }) } } +} +impl BranchableObserver for VecOpObserver { fn merge(&mut self, other: &Self) { self.patches.extend_from_slice(other.patches.as_slice()) } diff --git a/rust/automerge/src/op_observer/compose.rs b/rust/automerge/src/op_observer/compose.rs new file mode 100644 index 00000000..92fe3b1e --- /dev/null +++ b/rust/automerge/src/op_observer/compose.rs @@ -0,0 +1,102 @@ +use super::OpObserver; + +pub fn compose<'a, O1: OpObserver, O2: OpObserver>( + obs1: &'a mut O1, + obs2: &'a mut O2, +) -> impl OpObserver + 'a { + ComposeObservers { obs1, obs2 } +} + +struct ComposeObservers<'a, O1: OpObserver, O2: OpObserver> { + obs1: &'a mut O1, + obs2: &'a mut O2, +} + +impl<'a, O1: OpObserver, O2: OpObserver> OpObserver for ComposeObservers<'a, O1, O2> { + fn insert( + &mut self, + doc: &R, + objid: crate::ObjId, + index: usize, + tagged_value: (crate::Value<'_>, crate::ObjId), + ) { + self.obs1 + .insert(doc, objid.clone(), index, tagged_value.clone()); + self.obs2.insert(doc, objid, index, tagged_value); + } + + fn splice_text( + &mut self, + doc: &R, + objid: crate::ObjId, + index: usize, + value: &str, + ) { + self.obs1.splice_text(doc, objid.clone(), index, value); + self.obs2.splice_text(doc, objid, index, value); + } + + fn put( + &mut self, + doc: &R, + objid: crate::ObjId, + prop: crate::Prop, + tagged_value: (crate::Value<'_>, crate::ObjId), + conflict: bool, + ) { + self.obs1.put( + doc, + objid.clone(), + prop.clone(), + tagged_value.clone(), + conflict, + ); + self.obs2.put(doc, objid, prop, tagged_value, conflict); + } + + fn expose( + &mut self, + doc: &R, + objid: crate::ObjId, + prop: crate::Prop, + tagged_value: (crate::Value<'_>, crate::ObjId), + conflict: bool, + ) { + self.obs1.expose( + doc, + objid.clone(), + prop.clone(), + tagged_value.clone(), + conflict, + ); + self.obs2.expose(doc, objid, prop, tagged_value, conflict); + } + + fn increment( + &mut self, + doc: &R, + objid: crate::ObjId, + prop: crate::Prop, + tagged_value: (i64, crate::ObjId), + ) { + self.obs1 + .increment(doc, objid.clone(), prop.clone(), tagged_value.clone()); + self.obs2.increment(doc, objid, prop, tagged_value); + } + + fn delete_map(&mut self, doc: &R, objid: crate::ObjId, key: &str) { + self.obs1.delete_map(doc, objid.clone(), key); + self.obs2.delete_map(doc, objid, key); + } + + fn delete_seq( + &mut self, + doc: &R, + objid: crate::ObjId, + index: usize, + num: usize, + ) { + self.obs2.delete_seq(doc, objid.clone(), index, num); + self.obs2.delete_seq(doc, objid, index, num); + } +} diff --git a/rust/automerge/src/parents.rs b/rust/automerge/src/parents.rs index 76c4bba1..e1c5cc66 100644 --- a/rust/automerge/src/parents.rs +++ b/rust/automerge/src/parents.rs @@ -3,6 +3,14 @@ use crate::op_set::OpSet; use crate::types::{ListEncoding, ObjId}; use crate::{exid::ExId, Prop}; +/// An iterator over the "parents" of an object +/// +/// The "parent" of an object in this context is the ([`ExId`], [`Prop`]) pair which specifies the +/// location of this object in the composite object which contains it. Each element in the iterator +/// is a [`Parent`], yielded in reverse order. This means that once the iterator returns `None` you +/// have reached the root of the document. +/// +/// This is returned by [`crate::ReadDoc::parents`] #[derive(Debug)] pub struct Parents<'a> { pub(crate) obj: ObjId, @@ -10,9 +18,10 @@ pub struct Parents<'a> { } impl<'a> Parents<'a> { - // returns the path to the object - // works even if the object or a parent has been deleted - pub fn path(&mut self) -> Vec<(ExId, Prop)> { + /// Return the path this `Parents` represents + /// + /// This is _not_ in reverse order. + pub fn path(self) -> Vec<(ExId, Prop)> { let mut path = self .map(|Parent { obj, prop, .. }| (obj, prop)) .collect::>(); @@ -20,10 +29,8 @@ impl<'a> Parents<'a> { path } - // returns the path to the object - // if the object or one of its parents has been deleted or conflicted out - // returns none - pub fn visible_path(&mut self) -> Option> { + /// Like `path` but returns `None` if the target is not visible + pub fn visible_path(self) -> Option> { let mut path = Vec::new(); for Parent { obj, prop, visible } in self { if !visible { @@ -59,17 +66,25 @@ impl<'a> Iterator for Parents<'a> { } } +/// A component of a path to an object #[derive(Debug, PartialEq, Eq)] pub struct Parent { + /// The object ID this component refers to pub obj: ExId, + /// The property within `obj` this component refers to pub prop: Prop, + /// Whether this component is "visible" + /// + /// An "invisible" component is one where the property is hidden, either because it has been + /// deleted or because there is a conflict on this (object, property) pair and this value does + /// not win the conflict. pub visible: bool, } #[cfg(test)] mod tests { use super::Parent; - use crate::{transaction::Transactable, Prop}; + use crate::{transaction::Transactable, Prop, ReadDoc}; #[test] fn test_invisible_parents() { diff --git a/rust/automerge/src/read.rs b/rust/automerge/src/read.rs new file mode 100644 index 00000000..6d479718 --- /dev/null +++ b/rust/automerge/src/read.rs @@ -0,0 +1,199 @@ +use crate::{ + error::AutomergeError, exid::ExId, keys::Keys, keys_at::KeysAt, list_range::ListRange, + list_range_at::ListRangeAt, map_range::MapRange, map_range_at::MapRangeAt, parents::Parents, + values::Values, Change, ChangeHash, ObjType, Prop, Value, +}; + +use std::ops::RangeBounds; + +/// Methods for reading values from an automerge document +/// +/// Many of the methods on this trait have an alternate `*_at` version which +/// takes an additional argument of `&[ChangeHash]`. This allows you to retrieve +/// the value at a particular point in the document history identified by the +/// given change hashes. +pub trait ReadDoc { + /// Get the parents of an object in the document tree. + /// + /// See the documentation for [`Parents`] for more details. + /// + /// ### Errors + /// + /// Returns an error when the id given is not the id of an object in this document. + /// This function does not get the parents of scalar values contained within objects. + /// + /// ### Experimental + /// + /// This function may in future be changed to allow getting the parents from the id of a scalar + /// value. + fn parents>(&self, obj: O) -> Result, AutomergeError>; + + /// Get the path to an object + /// + /// "path" here means the sequence of `(object Id, key)` pairs which leads + /// to the object in question. + /// + /// ### Errors + /// + /// * If the object ID `obj` is not in the document + fn path_to_object>(&self, obj: O) -> Result, AutomergeError>; + + /// Get the keys of the object `obj`. + /// + /// For a map this returns the keys of the map. + /// For a list this returns the element ids (opids) encoded as strings. + fn keys>(&self, obj: O) -> Keys<'_, '_>; + + /// Get the keys of the object `obj` as at `heads` + /// + /// See [`Self::keys`] + fn keys_at>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt<'_, '_>; + + /// Iterate over the keys and values of the map `obj` in the given range. + /// + /// If the object correspoding to `obj` is a list then this will return an empty iterator + /// + /// The returned iterator yields `(key, value, exid)` tuples, where the + /// third element is the ID of the operation which created the value. + fn map_range, R: RangeBounds>( + &self, + obj: O, + range: R, + ) -> MapRange<'_, R>; + + /// Iterate over the keys and values of the map `obj` in the given range as + /// at `heads` + /// + /// If the object correspoding to `obj` is a list then this will return an empty iterator + /// + /// The returned iterator yields `(key, value, exid)` tuples, where the + /// third element is the ID of the operation which created the value. + /// + /// See [`Self::map_range`] + fn map_range_at, R: RangeBounds>( + &self, + obj: O, + range: R, + heads: &[ChangeHash], + ) -> MapRangeAt<'_, R>; + + /// Iterate over the indexes and values of the list or text `obj` in the given range. + /// + /// The reuturned iterator yields `(index, value, exid)` tuples, where the third + /// element is the ID of the operation which created the value. + fn list_range, R: RangeBounds>( + &self, + obj: O, + range: R, + ) -> ListRange<'_, R>; + + /// Iterate over the indexes and values of the list or text `obj` in the given range as at `heads` + /// + /// The returned iterator yields `(index, value, exid)` tuples, where the third + /// element is the ID of the operation which created the value. + /// + /// See [`Self::list_range`] + fn list_range_at, R: RangeBounds>( + &self, + obj: O, + range: R, + heads: &[ChangeHash], + ) -> ListRangeAt<'_, R>; + + /// Iterate over the values in a map, list, or text object + /// + /// The returned iterator yields `(value, exid)` tuples, where the second element + /// is the ID of the operation which created the value. + fn values>(&self, obj: O) -> Values<'_>; + + /// Iterate over the values in a map, list, or text object as at `heads` + /// + /// The returned iterator yields `(value, exid)` tuples, where the second element + /// is the ID of the operation which created the value. + /// + /// See [`Self::values`] + fn values_at>(&self, obj: O, heads: &[ChangeHash]) -> Values<'_>; + + /// Get the length of the given object. + /// + /// If the given object is not in this document this method will return `0` + fn length>(&self, obj: O) -> usize; + + /// Get the length of the given object as at `heads` + /// + /// If the given object is not in this document this method will return `0` + /// + /// See [`Self::length`] + fn length_at>(&self, obj: O, heads: &[ChangeHash]) -> usize; + + /// Get the type of this object, if it is an object. + fn object_type>(&self, obj: O) -> Result; + + /// Get the string represented by the given text object. + fn text>(&self, obj: O) -> Result; + + /// Get the string represented by the given text object as at `heads`, see + /// [`Self::text`] + fn text_at>( + &self, + obj: O, + heads: &[ChangeHash], + ) -> Result; + + /// Get a value out of the document. + /// + /// This returns a tuple of `(value, object ID)`. This is for two reasons: + /// + /// 1. If `value` is an object (represented by `Value::Object`) then the ID + /// is the ID of that object. This can then be used to retrieve nested + /// values from the document. + /// 2. Even if `value` is a scalar, the ID represents the operation which + /// created the value. This is useful if there are conflicting values for + /// this key as each value is tagged with the ID. + /// + /// In the case of a key which has conflicting values, this method will + /// return a single arbitrarily chosen value. This value will be chosen + /// deterministically on all nodes. If you want to get all the values for a + /// key use [`Self::get_all`]. + fn get, P: Into>( + &self, + obj: O, + prop: P, + ) -> Result, ExId)>, AutomergeError>; + + /// Get the value of the given key as at `heads`, see `[Self::get]` + fn get_at, P: Into>( + &self, + obj: O, + prop: P, + heads: &[ChangeHash], + ) -> Result, ExId)>, AutomergeError>; + + /// Get all conflicting values out of the document at this prop that conflict. + /// + /// If there are multiple conflicting values for a given key this method + /// will return all of them, with each value tagged by the ID of the + /// operation which created it. + fn get_all, P: Into>( + &self, + obj: O, + prop: P, + ) -> Result, ExId)>, AutomergeError>; + + /// Get all possibly conflicting values for a key as at `heads` + /// + /// See `[Self::get_all]` + fn get_all_at, P: Into>( + &self, + obj: O, + prop: P, + heads: &[ChangeHash], + ) -> Result, ExId)>, AutomergeError>; + + /// Get the hashes of the changes in this document that aren't transitive dependencies of the + /// given `heads`. + fn get_missing_deps(&self, heads: &[ChangeHash]) -> Vec; + + /// Get a change by its hash. + fn get_change_by_hash(&self, hash: &ChangeHash) -> Option<&Change>; +} diff --git a/rust/automerge/src/sync.rs b/rust/automerge/src/sync.rs index 1545f954..5d71d989 100644 --- a/rust/automerge/src/sync.rs +++ b/rust/automerge/src/sync.rs @@ -1,10 +1,79 @@ +//! # Sync Protocol +//! +//! The sync protocol is based on this paper: +//! , it assumes a reliable in-order stream +//! between two peers who are synchronizing a document. +//! +//! Each peer maintains a [`State`] for each peer they are synchronizing with. +//! This state tracks things like what the heads of the other peer are and +//! whether there are in-flight messages. Anything which implements [`SyncDoc`] +//! can take part in the sync protocol. The flow goes something like this: +//! +//! * The initiating peer creates an empty [`State`] and then calls +//! [`SyncDoc::generate_sync_message`] to generate new sync message and sends +//! it to the receiving peer. +//! * The receiving peer receives a message from the initiator, creates a new +//! [`State`], and calls [`SyncDoc::receive_sync_message`] on it's view of the +//! document +//! * The receiving peer then calls [`SyncDoc::generate_sync_message`] to generate +//! a new sync message and send it back to the initiator +//! * From this point on each peer operates in a loop, receiving a sync message +//! from the other peer and then generating a new message to send back. +//! +//! ## Example +//! +//! ``` +//! use automerge::{transaction::Transactable, sync::{self, SyncDoc}, ReadDoc}; +//! # fn main() -> Result<(), automerge::AutomergeError> { +//! // Create a document on peer1 +//! let mut peer1 = automerge::AutoCommit::new(); +//! peer1.put(automerge::ROOT, "key", "value")?; +//! +//! // Create a state to track our sync with peer2 +//! let mut peer1_state = sync::State::new(); +//! // Generate the initial message to send to peer2, unwrap for brevity +//! let message1to2 = peer1.sync().generate_sync_message(&mut peer1_state).unwrap(); +//! +//! // We receive the message on peer2. We don't have a document at all yet +//! // so we create one +//! let mut peer2 = automerge::AutoCommit::new(); +//! // We don't have a state for peer1 (it's a new connection), so we create one +//! let mut peer2_state = sync::State::new(); +//! // Now receive the message from peer 1 +//! peer2.sync().receive_sync_message(&mut peer2_state, message1to2)?; +//! +//! // Now we loop, sending messages from one to two and two to one until +//! // neither has anything new to send +//! +//! loop { +//! let two_to_one = peer2.sync().generate_sync_message(&mut peer2_state); +//! if let Some(message) = two_to_one.as_ref() { +//! println!("two to one"); +//! peer1.sync().receive_sync_message(&mut peer1_state, message.clone())?; +//! } +//! let one_to_two = peer1.sync().generate_sync_message(&mut peer1_state); +//! if let Some(message) = one_to_two.as_ref() { +//! println!("one to two"); +//! peer2.sync().receive_sync_message(&mut peer2_state, message.clone())?; +//! } +//! if two_to_one.is_none() && one_to_two.is_none() { +//! break; +//! } +//! } +//! +//! assert_eq!(peer2.get(automerge::ROOT, "key")?.unwrap().0.to_str(), Some("value")); +//! +//! # Ok(()) +//! # } +//! ``` + use itertools::Itertools; use serde::ser::SerializeMap; use std::collections::{HashMap, HashSet}; use crate::{ storage::{parse, Change as StoredChange, ReadChangeOpError}, - Automerge, AutomergeError, Change, ChangeHash, OpObserver, + Automerge, AutomergeError, Change, ChangeHash, OpObserver, ReadDoc, }; mod bloom; @@ -14,10 +83,38 @@ pub use bloom::{BloomFilter, DecodeError as DecodeBloomError}; pub use state::DecodeError as DecodeStateError; pub use state::{Have, State}; +/// A document which can take part in the sync protocol +/// +/// See the [module level documentation](crate::sync) for more details. +pub trait SyncDoc { + /// Generate a sync message for the remote peer represented by `sync_state` + /// + /// If this returns `None` then there are no new messages to send, either because we are + /// waiting for an acknolwedgement of an in-flight message, or because the remote is up to + /// date. + fn generate_sync_message(&self, sync_state: &mut State) -> Option; + + /// Apply a received sync message to this document and `sync_state` + fn receive_sync_message( + &mut self, + sync_state: &mut State, + message: Message, + ) -> Result<(), AutomergeError>; + + /// Apply a received sync message to this document and `sync_state`, observing any changes with + /// `op_observer` + fn receive_sync_message_with( + &mut self, + sync_state: &mut State, + message: Message, + op_observer: &mut Obs, + ) -> Result<(), AutomergeError>; +} + const MESSAGE_TYPE_SYNC: u8 = 0x42; // first byte of a sync message, for identification -impl Automerge { - pub fn generate_sync_message(&self, sync_state: &mut State) -> Option { +impl SyncDoc for Automerge { + fn generate_sync_message(&self, sync_state: &mut State) -> Option { let our_heads = self.get_heads(); let our_need = self.get_missing_deps(sync_state.their_heads.as_ref().unwrap_or(&vec![])); @@ -106,80 +203,25 @@ impl Automerge { Some(sync_message) } - pub fn receive_sync_message( + fn receive_sync_message( &mut self, sync_state: &mut State, message: Message, ) -> Result<(), AutomergeError> { - self.receive_sync_message_with::<()>(sync_state, message, None) + self.do_receive_sync_message::<()>(sync_state, message, None) } - pub fn receive_sync_message_with( + fn receive_sync_message_with( &mut self, sync_state: &mut State, message: Message, - op_observer: Option<&mut Obs>, + op_observer: &mut Obs, ) -> Result<(), AutomergeError> { - let before_heads = self.get_heads(); - - let Message { - heads: message_heads, - changes: message_changes, - need: message_need, - have: message_have, - } = message; - - let changes_is_empty = message_changes.is_empty(); - if !changes_is_empty { - self.apply_changes_with(message_changes, op_observer)?; - sync_state.shared_heads = advance_heads( - &before_heads.iter().collect(), - &self.get_heads().into_iter().collect(), - &sync_state.shared_heads, - ); - } - - // trim down the sent hashes to those that we know they haven't seen - self.filter_changes(&message_heads, &mut sync_state.sent_hashes)?; - - if changes_is_empty && message_heads == before_heads { - sync_state.last_sent_heads = message_heads.clone(); - } - - if sync_state.sent_hashes.is_empty() { - sync_state.in_flight = false; - } - - let known_heads = message_heads - .iter() - .filter(|head| self.get_change_by_hash(head).is_some()) - .collect::>(); - if known_heads.len() == message_heads.len() { - sync_state.shared_heads = message_heads.clone(); - sync_state.in_flight = false; - // If the remote peer has lost all its data, reset our state to perform a full resync - if message_heads.is_empty() { - sync_state.last_sent_heads = Default::default(); - sync_state.sent_hashes = Default::default(); - } - } else { - sync_state.shared_heads = sync_state - .shared_heads - .iter() - .chain(known_heads) - .copied() - .unique() - .sorted() - .collect::>(); - } - - sync_state.their_have = Some(message_have); - sync_state.their_heads = Some(message_heads); - sync_state.their_need = Some(message_need); - - Ok(()) + self.do_receive_sync_message(sync_state, message, Some(op_observer)) } +} +impl Automerge { fn make_bloom_filter(&self, last_sync: Vec) -> Have { let new_changes = self .get_changes(&last_sync) @@ -261,6 +303,72 @@ impl Automerge { Ok(changes_to_send) } } + + fn do_receive_sync_message( + &mut self, + sync_state: &mut State, + message: Message, + op_observer: Option<&mut Obs>, + ) -> Result<(), AutomergeError> { + let before_heads = self.get_heads(); + + let Message { + heads: message_heads, + changes: message_changes, + need: message_need, + have: message_have, + } = message; + + let changes_is_empty = message_changes.is_empty(); + if !changes_is_empty { + self.apply_changes_with(message_changes, op_observer)?; + sync_state.shared_heads = advance_heads( + &before_heads.iter().collect(), + &self.get_heads().into_iter().collect(), + &sync_state.shared_heads, + ); + } + + // trim down the sent hashes to those that we know they haven't seen + self.filter_changes(&message_heads, &mut sync_state.sent_hashes)?; + + if changes_is_empty && message_heads == before_heads { + sync_state.last_sent_heads = message_heads.clone(); + } + + if sync_state.sent_hashes.is_empty() { + sync_state.in_flight = false; + } + + let known_heads = message_heads + .iter() + .filter(|head| self.get_change_by_hash(head).is_some()) + .collect::>(); + if known_heads.len() == message_heads.len() { + sync_state.shared_heads = message_heads.clone(); + sync_state.in_flight = false; + // If the remote peer has lost all its data, reset our state to perform a full resync + if message_heads.is_empty() { + sync_state.last_sent_heads = Default::default(); + sync_state.sent_hashes = Default::default(); + } + } else { + sync_state.shared_heads = sync_state + .shared_heads + .iter() + .chain(known_heads) + .copied() + .unique() + .sorted() + .collect::>(); + } + + sync_state.their_have = Some(message_have); + sync_state.their_heads = Some(message_heads); + sync_state.their_need = Some(message_need); + + Ok(()) + } } #[derive(Debug, thiserror::Error)] @@ -545,8 +653,8 @@ mod tests { doc.put(crate::ROOT, "key", "value").unwrap(); let mut sync_state = State::new(); - assert!(doc.generate_sync_message(&mut sync_state).is_some()); - assert!(doc.generate_sync_message(&mut sync_state).is_none()); + assert!(doc.sync().generate_sync_message(&mut sync_state).is_some()); + assert!(doc.sync().generate_sync_message(&mut sync_state).is_none()); } #[test] @@ -556,11 +664,12 @@ mod tests { let mut s1 = State::new(); let mut s2 = State::new(); let m1 = doc1 + .sync() .generate_sync_message(&mut s1) .expect("message was none"); - doc2.receive_sync_message(&mut s2, m1).unwrap(); - let m2 = doc2.generate_sync_message(&mut s2); + doc2.sync().receive_sync_message(&mut s2, m1).unwrap(); + let m2 = doc2.sync().generate_sync_message(&mut s2); assert!(m2.is_none()); } @@ -584,9 +693,11 @@ mod tests { //// both sides report what they have but have no shared peer state let msg1to2 = doc1 + .sync() .generate_sync_message(&mut s1) .expect("initial sync from 1 to 2 was None"); let msg2to1 = doc2 + .sync() .generate_sync_message(&mut s2) .expect("initial sync message from 2 to 1 was None"); assert_eq!(msg1to2.changes.len(), 0); @@ -595,52 +706,57 @@ mod tests { assert_eq!(msg2to1.have[0].last_sync.len(), 0); //// doc1 and doc2 receive that message and update sync state - doc1.receive_sync_message(&mut s1, msg2to1).unwrap(); - doc2.receive_sync_message(&mut s2, msg1to2).unwrap(); + doc1.sync().receive_sync_message(&mut s1, msg2to1).unwrap(); + doc2.sync().receive_sync_message(&mut s2, msg1to2).unwrap(); //// now both reply with their local changes the other lacks //// (standard warning that 1% of the time this will result in a "need" message) let msg1to2 = doc1 + .sync() .generate_sync_message(&mut s1) .expect("first reply from 1 to 2 was None"); assert_eq!(msg1to2.changes.len(), 5); let msg2to1 = doc2 + .sync() .generate_sync_message(&mut s2) .expect("first reply from 2 to 1 was None"); assert_eq!(msg2to1.changes.len(), 5); //// both should now apply the changes - doc1.receive_sync_message(&mut s1, msg2to1).unwrap(); + doc1.sync().receive_sync_message(&mut s1, msg2to1).unwrap(); assert_eq!(doc1.get_missing_deps(&[]), Vec::new()); - doc2.receive_sync_message(&mut s2, msg1to2).unwrap(); + doc2.sync().receive_sync_message(&mut s2, msg1to2).unwrap(); assert_eq!(doc2.get_missing_deps(&[]), Vec::new()); //// The response acknowledges the changes received and sends no further changes let msg1to2 = doc1 + .sync() .generate_sync_message(&mut s1) .expect("second reply from 1 to 2 was None"); assert_eq!(msg1to2.changes.len(), 0); let msg2to1 = doc2 + .sync() .generate_sync_message(&mut s2) .expect("second reply from 2 to 1 was None"); assert_eq!(msg2to1.changes.len(), 0); //// After receiving acknowledgements, their shared heads should be equal - doc1.receive_sync_message(&mut s1, msg2to1).unwrap(); - doc2.receive_sync_message(&mut s2, msg1to2).unwrap(); + doc1.sync().receive_sync_message(&mut s1, msg2to1).unwrap(); + doc2.sync().receive_sync_message(&mut s2, msg1to2).unwrap(); assert_eq!(s1.shared_heads, s2.shared_heads); //// We're in sync, no more messages required - assert!(doc1.generate_sync_message(&mut s1).is_none()); - assert!(doc2.generate_sync_message(&mut s2).is_none()); + assert!(doc1.sync().generate_sync_message(&mut s1).is_none()); + assert!(doc2.sync().generate_sync_message(&mut s2).is_none()); //// If we make one more change and start another sync then its lastSync should be updated doc1.put(crate::ROOT, "x", 5).unwrap(); doc1.commit(); let msg1to2 = doc1 + .sync() .generate_sync_message(&mut s1) .expect("third reply from 1 to 2 was None"); let mut expected_heads = vec![head1, head2]; @@ -782,8 +898,8 @@ mod tests { let mut iterations = 0; loop { - let a_to_b = a.generate_sync_message(a_sync_state); - let b_to_a = b.generate_sync_message(b_sync_state); + let a_to_b = a.sync().generate_sync_message(a_sync_state); + let b_to_a = b.sync().generate_sync_message(b_sync_state); if a_to_b.is_none() && b_to_a.is_none() { break; } @@ -791,10 +907,10 @@ mod tests { panic!("failed to sync in {} iterations", MAX_ITER); } if let Some(msg) = a_to_b { - b.receive_sync_message(b_sync_state, msg).unwrap() + b.sync().receive_sync_message(b_sync_state, msg).unwrap() } if let Some(msg) = b_to_a { - a.receive_sync_message(a_sync_state, msg).unwrap() + a.sync().receive_sync_message(a_sync_state, msg).unwrap() } iterations += 1; } diff --git a/rust/automerge/src/sync/state.rs b/rust/automerge/src/sync/state.rs index 00775196..354c605f 100644 --- a/rust/automerge/src/sync/state.rs +++ b/rust/automerge/src/sync/state.rs @@ -23,13 +23,23 @@ impl From for DecodeError { } /// The state of synchronisation with a peer. +/// +/// This should be persisted using [`Self::encode`] when you know you will be interacting with the +/// same peer in multiple sessions. [`Self::encode`] only encodes state which should be reused +/// across connections. #[derive(Debug, Clone, Default, PartialEq, Eq, Hash)] pub struct State { + /// The hashes which we know both peers have pub shared_heads: Vec, + /// The heads we last sent pub last_sent_heads: Vec, + /// The heads we last received from them pub their_heads: Option>, + /// Any specific changes they last said they needed pub their_need: Option>, + /// The bloom filters summarising what they said they have pub their_have: Option>, + /// The hashes we have sent in this session pub sent_hashes: BTreeSet, /// `generate_sync_message` should return `None` if there are no new changes to send. In diff --git a/rust/automerge/src/transaction/inner.rs b/rust/automerge/src/transaction/inner.rs index cba4e723..7e7db17d 100644 --- a/rust/automerge/src/transaction/inner.rs +++ b/rust/automerge/src/transaction/inner.rs @@ -717,7 +717,7 @@ struct SpliceArgs<'a> { #[cfg(test)] mod tests { - use crate::{transaction::Transactable, ROOT}; + use crate::{transaction::Transactable, ReadDoc, ROOT}; use super::*; diff --git a/rust/automerge/src/transaction/manual_transaction.rs b/rust/automerge/src/transaction/manual_transaction.rs index 22115aab..fa5f6340 100644 --- a/rust/automerge/src/transaction/manual_transaction.rs +++ b/rust/automerge/src/transaction/manual_transaction.rs @@ -1,7 +1,10 @@ use std::ops::RangeBounds; use crate::exid::ExId; -use crate::{Automerge, ChangeHash, KeysAt, ObjType, OpObserver, Prop, ScalarValue, Value, Values}; +use crate::op_observer::BranchableObserver; +use crate::{ + Automerge, ChangeHash, KeysAt, ObjType, OpObserver, Prop, ReadDoc, ScalarValue, Value, Values, +}; use crate::{AutomergeError, Keys}; use crate::{ListRange, ListRangeAt, MapRange, MapRangeAt}; @@ -49,7 +52,7 @@ impl<'a> Transaction<'a, observation::UnObserved> { } } -impl<'a, Obs: OpObserver> Transaction<'a, observation::Observed> { +impl<'a, Obs: OpObserver + BranchableObserver> Transaction<'a, observation::Observed> { pub fn observer(&mut self) -> &mut Obs { self.observation.as_mut().unwrap().observer() } @@ -112,95 +115,7 @@ impl<'a, Obs: observation::Observation> Transaction<'a, Obs> { } } -impl<'a, Obs: observation::Observation> Transactable for Transaction<'a, Obs> { - /// Get the number of pending operations in this transaction. - fn pending_ops(&self) -> usize { - self.inner.as_ref().unwrap().pending_ops() - } - - /// Set the value of property `P` to value `V` in object `obj`. - /// - /// # Errors - /// - /// This will return an error if - /// - The object does not exist - /// - The key is the wrong type for the object - /// - The key does not exist in the object - fn put, P: Into, V: Into>( - &mut self, - obj: O, - prop: P, - value: V, - ) -> Result<(), AutomergeError> { - self.do_tx(|tx, doc, obs| tx.put(doc, obs, obj.as_ref(), prop, value)) - } - - fn put_object, P: Into>( - &mut self, - obj: O, - prop: P, - value: ObjType, - ) -> Result { - self.do_tx(|tx, doc, obs| tx.put_object(doc, obs, obj.as_ref(), prop, value)) - } - - fn insert, V: Into>( - &mut self, - obj: O, - index: usize, - value: V, - ) -> Result<(), AutomergeError> { - self.do_tx(|tx, doc, obs| tx.insert(doc, obs, obj.as_ref(), index, value)) - } - - fn insert_object>( - &mut self, - obj: O, - index: usize, - value: ObjType, - ) -> Result { - self.do_tx(|tx, doc, obs| tx.insert_object(doc, obs, obj.as_ref(), index, value)) - } - - fn increment, P: Into>( - &mut self, - obj: O, - prop: P, - value: i64, - ) -> Result<(), AutomergeError> { - self.do_tx(|tx, doc, obs| tx.increment(doc, obs, obj.as_ref(), prop, value)) - } - - fn delete, P: Into>( - &mut self, - obj: O, - prop: P, - ) -> Result<(), AutomergeError> { - self.do_tx(|tx, doc, obs| tx.delete(doc, obs, obj.as_ref(), prop)) - } - - /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert - /// the new elements - fn splice, V: IntoIterator>( - &mut self, - obj: O, - pos: usize, - del: usize, - vals: V, - ) -> Result<(), AutomergeError> { - self.do_tx(|tx, doc, obs| tx.splice(doc, obs, obj.as_ref(), pos, del, vals)) - } - - fn splice_text>( - &mut self, - obj: O, - pos: usize, - del: usize, - text: &str, - ) -> Result<(), AutomergeError> { - self.do_tx(|tx, doc, obs| tx.splice_text(doc, obs, obj.as_ref(), pos, del, text)) - } - +impl<'a, Obs: observation::Observation> ReadDoc for Transaction<'a, Obs> { fn keys>(&self, obj: O) -> Keys<'_, '_> { self.doc.keys(obj) } @@ -313,6 +228,108 @@ impl<'a, Obs: observation::Observation> Transactable for Transaction<'a, Obs> { self.doc.parents(obj) } + fn path_to_object>(&self, obj: O) -> Result, AutomergeError> { + self.doc.path_to_object(obj) + } + + fn get_missing_deps(&self, heads: &[ChangeHash]) -> Vec { + self.doc.get_missing_deps(heads) + } + + fn get_change_by_hash(&self, hash: &ChangeHash) -> Option<&crate::Change> { + self.doc.get_change_by_hash(hash) + } +} + +impl<'a, Obs: observation::Observation> Transactable for Transaction<'a, Obs> { + /// Get the number of pending operations in this transaction. + fn pending_ops(&self) -> usize { + self.inner.as_ref().unwrap().pending_ops() + } + + /// Set the value of property `P` to value `V` in object `obj`. + /// + /// # Errors + /// + /// This will return an error if + /// - The object does not exist + /// - The key is the wrong type for the object + /// - The key does not exist in the object + fn put, P: Into, V: Into>( + &mut self, + obj: O, + prop: P, + value: V, + ) -> Result<(), AutomergeError> { + self.do_tx(|tx, doc, obs| tx.put(doc, obs, obj.as_ref(), prop, value)) + } + + fn put_object, P: Into>( + &mut self, + obj: O, + prop: P, + value: ObjType, + ) -> Result { + self.do_tx(|tx, doc, obs| tx.put_object(doc, obs, obj.as_ref(), prop, value)) + } + + fn insert, V: Into>( + &mut self, + obj: O, + index: usize, + value: V, + ) -> Result<(), AutomergeError> { + self.do_tx(|tx, doc, obs| tx.insert(doc, obs, obj.as_ref(), index, value)) + } + + fn insert_object>( + &mut self, + obj: O, + index: usize, + value: ObjType, + ) -> Result { + self.do_tx(|tx, doc, obs| tx.insert_object(doc, obs, obj.as_ref(), index, value)) + } + + fn increment, P: Into>( + &mut self, + obj: O, + prop: P, + value: i64, + ) -> Result<(), AutomergeError> { + self.do_tx(|tx, doc, obs| tx.increment(doc, obs, obj.as_ref(), prop, value)) + } + + fn delete, P: Into>( + &mut self, + obj: O, + prop: P, + ) -> Result<(), AutomergeError> { + self.do_tx(|tx, doc, obs| tx.delete(doc, obs, obj.as_ref(), prop)) + } + + /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert + /// the new elements + fn splice, V: IntoIterator>( + &mut self, + obj: O, + pos: usize, + del: usize, + vals: V, + ) -> Result<(), AutomergeError> { + self.do_tx(|tx, doc, obs| tx.splice(doc, obs, obj.as_ref(), pos, del, vals)) + } + + fn splice_text>( + &mut self, + obj: O, + pos: usize, + del: usize, + text: &str, + ) -> Result<(), AutomergeError> { + self.do_tx(|tx, doc, obs| tx.splice_text(doc, obs, obj.as_ref(), pos, del, text)) + } + fn base_heads(&self) -> Vec { self.doc.get_heads() } diff --git a/rust/automerge/src/transaction/observation.rs b/rust/automerge/src/transaction/observation.rs index 974004cf..53723711 100644 --- a/rust/automerge/src/transaction/observation.rs +++ b/rust/automerge/src/transaction/observation.rs @@ -1,15 +1,17 @@ //! This module is essentially a type level Option. It is used in sitations where we know at //! compile time whether an `OpObserver` is available to track changes in a transaction. -use crate::{ChangeHash, OpObserver}; +use crate::{op_observer::BranchableObserver, ChangeHash, OpObserver}; mod private { + use crate::op_observer::BranchableObserver; + pub trait Sealed {} - impl Sealed for super::Observed {} + impl Sealed for super::Observed {} impl Sealed for super::UnObserved {} } pub trait Observation: private::Sealed { - type Obs: OpObserver; + type Obs: OpObserver + BranchableObserver; type CommitResult; fn observer(&mut self) -> Option<&mut Self::Obs>; @@ -19,9 +21,9 @@ pub trait Observation: private::Sealed { } #[derive(Clone, Debug)] -pub struct Observed(Obs); +pub struct Observed(Obs); -impl Observed { +impl Observed { pub(crate) fn new(o: O) -> Self { Self(o) } @@ -31,7 +33,7 @@ impl Observed { } } -impl Observation for Observed { +impl Observation for Observed { type Obs = Obs; type CommitResult = (Obs, Option); fn observer(&mut self) -> Option<&mut Self::Obs> { diff --git a/rust/automerge/src/transaction/transactable.rs b/rust/automerge/src/transaction/transactable.rs index 7f38edbe..05c48c79 100644 --- a/rust/automerge/src/transaction/transactable.rs +++ b/rust/automerge/src/transaction/transactable.rs @@ -1,13 +1,8 @@ -use std::ops::RangeBounds; - use crate::exid::ExId; -use crate::{ - AutomergeError, ChangeHash, Keys, KeysAt, ListRange, ListRangeAt, MapRange, MapRangeAt, - ObjType, Parents, Prop, ScalarValue, Value, Values, -}; +use crate::{AutomergeError, ChangeHash, ObjType, Prop, ReadDoc, ScalarValue}; /// A way of mutating a document within a single change. -pub trait Transactable { +pub trait Transactable: ReadDoc { /// Get the number of pending operations in this transaction. fn pending_ops(&self) -> usize; @@ -93,106 +88,6 @@ pub trait Transactable { text: &str, ) -> Result<(), AutomergeError>; - /// Get the keys of the given object, it should be a map. - fn keys>(&self, obj: O) -> Keys<'_, '_>; - - /// Get the keys of the given object at a point in history. - fn keys_at>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt<'_, '_>; - - fn map_range, R: RangeBounds>( - &self, - obj: O, - range: R, - ) -> MapRange<'_, R>; - - fn map_range_at, R: RangeBounds>( - &self, - obj: O, - range: R, - heads: &[ChangeHash], - ) -> MapRangeAt<'_, R>; - - fn list_range, R: RangeBounds>( - &self, - obj: O, - range: R, - ) -> ListRange<'_, R>; - - fn list_range_at, R: RangeBounds>( - &self, - obj: O, - range: R, - heads: &[ChangeHash], - ) -> ListRangeAt<'_, R>; - - fn values>(&self, obj: O) -> Values<'_>; - - fn values_at>(&self, obj: O, heads: &[ChangeHash]) -> Values<'_>; - - /// Get the length of the given object. - fn length>(&self, obj: O) -> usize; - - /// Get the length of the given object at a point in history. - fn length_at>(&self, obj: O, heads: &[ChangeHash]) -> usize; - - /// Get type for object - fn object_type>(&self, obj: O) -> Result; - - /// Get the string that this text object represents. - fn text>(&self, obj: O) -> Result; - - /// Get the string that this text object represents at a point in history. - fn text_at>( - &self, - obj: O, - heads: &[ChangeHash], - ) -> Result; - - /// Get the value at this prop in the object. - fn get, P: Into>( - &self, - obj: O, - prop: P, - ) -> Result, ExId)>, AutomergeError>; - - /// Get the value at this prop in the object at a point in history. - fn get_at, P: Into>( - &self, - obj: O, - prop: P, - heads: &[ChangeHash], - ) -> Result, ExId)>, AutomergeError>; - - fn get_all, P: Into>( - &self, - obj: O, - prop: P, - ) -> Result, ExId)>, AutomergeError>; - - fn get_all_at, P: Into>( - &self, - obj: O, - prop: P, - heads: &[ChangeHash], - ) -> Result, ExId)>, AutomergeError>; - - /// Get the parents of an object in the document tree. - /// - /// ### Errors - /// - /// Returns an error when the id given is not the id of an object in this document. - /// This function does not get the parents of scalar values contained within objects. - /// - /// ### Experimental - /// - /// This function may in future be changed to allow getting the parents from the id of a scalar - /// value. - fn parents>(&self, obj: O) -> Result, AutomergeError>; - - fn path_to_object>(&self, obj: O) -> Result, AutomergeError> { - Ok(self.parents(obj.as_ref().clone())?.path()) - } - /// The heads this transaction will be based on fn base_heads(&self) -> Vec; } diff --git a/rust/automerge/src/types.rs b/rust/automerge/src/types.rs index 7bbf4353..870569e9 100644 --- a/rust/automerge/src/types.rs +++ b/rust/automerge/src/types.rs @@ -143,12 +143,17 @@ impl fmt::Display for ActorId { } } +/// The type of an object #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq, Copy, Hash)] #[serde(rename_all = "camelCase", untagged)] pub enum ObjType { + /// A map Map, + /// Retained for backwards compatibility, tables are identical to maps Table, + /// A sequence of arbitrary values List, + /// A sequence of characters Text, } @@ -378,9 +383,15 @@ pub(crate) enum Key { Seq(ElemId), } +/// A property of an object +/// +/// This is either a string representing a property in a map, or an integer +/// which is the index into a sequence #[derive(Debug, PartialEq, PartialOrd, Eq, Ord, Clone)] pub enum Prop { + /// A property in a map Map(String), + /// An index into a sequence Seq(usize), } @@ -454,9 +465,17 @@ impl ObjId { } } +/// How indexes into text sequeces are calculated +/// +/// Automerge text objects are internally sequences of utf8 characters. This +/// means that in environments (such as javascript) which use a different +/// encoding the indexes into the text sequence will be different. This enum +/// represents the different ways indexes can be calculated. #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum TextEncoding { + /// The indexes are calculated using the utf8 encoding Utf8, + /// The indexes are calculated using the utf16 encoding Utf16, } diff --git a/rust/automerge/src/value.rs b/rust/automerge/src/value.rs index d8429f4e..be128787 100644 --- a/rust/automerge/src/value.rs +++ b/rust/automerge/src/value.rs @@ -5,9 +5,12 @@ use smol_str::SmolStr; use std::borrow::Cow; use std::fmt; +/// The type of values in an automerge document #[derive(Debug, Clone, PartialEq)] pub enum Value<'a> { + /// An composite object of type `ObjType` Object(ObjType), + /// A non composite value // TODO: if we don't have to store this in patches any more then it might be able to be just a // &'a ScalarValue rather than a Cow Scalar(Cow<'a, ScalarValue>), @@ -431,6 +434,7 @@ impl From<&Counter> for f64 { } } +/// A value which is not a composite value #[derive(Serialize, PartialEq, Debug, Clone)] #[serde(untagged)] pub enum ScalarValue { @@ -442,7 +446,11 @@ pub enum ScalarValue { Counter(Counter), Timestamp(i64), Boolean(bool), - Unknown { type_code: u8, bytes: Vec }, + /// A value from a future version of automerge + Unknown { + type_code: u8, + bytes: Vec, + }, Null, } diff --git a/rust/automerge/src/values.rs b/rust/automerge/src/values.rs index 90f596f3..15ccb4cb 100644 --- a/rust/automerge/src/values.rs +++ b/rust/automerge/src/values.rs @@ -2,6 +2,9 @@ use crate::exid::ExId; use crate::{Automerge, Value}; use std::fmt; +/// An iterator over the values in an object +/// +/// This is returned by the [`crate::ReadDoc::values`] and [`crate::ReadDoc::values_at`] methods pub struct Values<'a> { range: Box>, doc: &'a Automerge, @@ -52,9 +55,3 @@ impl<'a> Iterator for Values<'a> { self.range.next_value(self.doc) } } - -impl<'a> DoubleEndedIterator for Values<'a> { - fn next_back(&mut self) -> Option { - unimplemented!() - } -} diff --git a/rust/automerge/tests/test.rs b/rust/automerge/tests/test.rs index df0e4cff..ca6c64c0 100644 --- a/rust/automerge/tests/test.rs +++ b/rust/automerge/tests/test.rs @@ -1,7 +1,7 @@ use automerge::transaction::Transactable; use automerge::{ - ActorId, AutoCommit, Automerge, AutomergeError, Change, ExpandedChange, ObjType, ScalarValue, - VecOpObserver, ROOT, + ActorId, AutoCommit, Automerge, AutomergeError, Change, ExpandedChange, ObjType, ReadDoc, + ScalarValue, VecOpObserver, ROOT, }; use std::fs; @@ -21,7 +21,7 @@ fn no_conflict_on_repeated_assignment() { doc.put(&automerge::ROOT, "foo", 1).unwrap(); doc.put(&automerge::ROOT, "foo", 2).unwrap(); assert_doc!( - doc.document(), + &doc, map! { "foo" => { 2 }, } @@ -41,7 +41,7 @@ fn repeated_map_assignment_which_resolves_conflict_not_ignored() { doc1.put(&automerge::ROOT, "field", 123).unwrap(); assert_doc!( - doc1.document(), + &doc1, map! { "field" => { 123 } } @@ -62,7 +62,7 @@ fn repeated_list_assignment_which_resolves_conflict_not_ignored() { doc1.put(&list_id, 0, 789).unwrap(); assert_doc!( - doc1.document(), + &doc1, map! { "list" => { list![ @@ -84,7 +84,7 @@ fn list_deletion() { doc.insert(&list_id, 2, 789).unwrap(); doc.delete(&list_id, 1).unwrap(); assert_doc!( - doc.document(), + &doc, map! { "list" => { list![ { 123 }, @@ -106,7 +106,7 @@ fn merge_concurrent_map_prop_updates() { "bar".into() ); assert_doc!( - doc1.document(), + &doc1, map! { "foo" => { "bar" }, "hello" => { "world" }, @@ -114,7 +114,7 @@ fn merge_concurrent_map_prop_updates() { ); doc2.merge(&mut doc1).unwrap(); assert_doc!( - doc2.document(), + &doc2, map! { "foo" => { "bar" }, "hello" => { "world" }, @@ -134,7 +134,7 @@ fn add_concurrent_increments_of_same_property() { doc2.increment(&automerge::ROOT, "counter", 2).unwrap(); doc1.merge(&mut doc2).unwrap(); assert_doc!( - doc1.document(), + &doc1, map! { "counter" => { mk_counter(3) @@ -161,7 +161,7 @@ fn add_increments_only_to_preceeded_values() { doc1.merge(&mut doc2).unwrap(); assert_doc!( - doc1.document(), + &doc1, map! { "counter" => { mk_counter(1), @@ -181,7 +181,7 @@ fn concurrent_updates_of_same_field() { doc1.merge(&mut doc2).unwrap(); assert_doc!( - doc1.document(), + &doc1, map! { "field" => { "one", @@ -206,7 +206,7 @@ fn concurrent_updates_of_same_list_element() { doc1.merge(&mut doc2).unwrap(); assert_doc!( - doc1.document(), + &doc1, map! { "birds" => { list![{ @@ -232,7 +232,7 @@ fn assignment_conflicts_of_different_types() { doc1.merge(&mut doc3).unwrap(); assert_doc!( - doc1.document(), + &doc1, map! { "field" => { "string", @@ -255,7 +255,7 @@ fn changes_within_conflicting_map_field() { doc1.merge(&mut doc2).unwrap(); assert_doc!( - doc1.document(), + &doc1, map! { "field" => { "string", @@ -292,7 +292,7 @@ fn changes_within_conflicting_list_element() { doc1.merge(&mut doc2).unwrap(); assert_doc!( - doc1.document(), + &doc1, map! { "list" => { list![ @@ -330,7 +330,7 @@ fn concurrently_assigned_nested_maps_should_not_merge() { doc1.merge(&mut doc2).unwrap(); assert_doc!( - doc1.document(), + &doc1, map! { "config" => { map!{ @@ -364,7 +364,7 @@ fn concurrent_insertions_at_different_list_positions() { doc1.merge(&mut doc2).unwrap(); assert_doc!( - doc1.document(), + &doc1, map! { "list" => { list![ @@ -396,7 +396,7 @@ fn concurrent_insertions_at_same_list_position() { doc1.merge(&mut doc2).unwrap(); assert_doc!( - doc1.document(), + &doc1, map! { "birds" => { list![ @@ -427,7 +427,7 @@ fn concurrent_assignment_and_deletion_of_a_map_entry() { doc1.merge(&mut doc2).unwrap(); assert_doc!( - doc1.document(), + &doc1, map! { "bestBird" => { "magpie", @@ -451,7 +451,7 @@ fn concurrent_assignment_and_deletion_of_list_entry() { doc2.delete(&list_id, 1).unwrap(); assert_doc!( - doc2.document(), + &doc2, map! { "birds" => {list![ {"blackbird"}, @@ -461,7 +461,7 @@ fn concurrent_assignment_and_deletion_of_list_entry() { ); assert_doc!( - doc1.document(), + &doc1, map! { "birds" => {list![ { "blackbird" }, @@ -474,7 +474,7 @@ fn concurrent_assignment_and_deletion_of_list_entry() { doc1.merge(&mut doc2).unwrap(); assert_doc!( - doc1.document(), + &doc1, map! { "birds" => {list![ { "blackbird" }, @@ -507,7 +507,7 @@ fn insertion_after_a_deleted_list_element() { doc1.merge(&mut doc2).unwrap(); assert_doc!( - doc1.document(), + &doc1, map! { "birds" => {list![ { "blackbird" }, @@ -518,7 +518,7 @@ fn insertion_after_a_deleted_list_element() { doc2.merge(&mut doc1).unwrap(); assert_doc!( - doc2.document(), + &doc2, map! { "birds" => {list![ { "blackbird" }, @@ -549,7 +549,7 @@ fn concurrent_deletion_of_same_list_element() { doc1.merge(&mut doc2).unwrap(); assert_doc!( - doc1.document(), + &doc1, map! { "birds" => {list![ { "albatross" }, @@ -560,7 +560,7 @@ fn concurrent_deletion_of_same_list_element() { doc2.merge(&mut doc1).unwrap(); assert_doc!( - doc2.document(), + &doc2, map! { "birds" => {list![ { "albatross" }, @@ -593,7 +593,7 @@ fn concurrent_updates_at_different_levels() { doc1.merge(&mut doc2).unwrap(); assert_obj!( - doc1.document(), + &doc1, &automerge::ROOT, "animals", map! { @@ -635,7 +635,7 @@ fn concurrent_updates_of_concurrently_deleted_objects() { doc1.merge(&mut doc2).unwrap(); assert_doc!( - doc1.document(), + &doc1, map! { "birds" => { map!{}, @@ -686,7 +686,7 @@ fn does_not_interleave_sequence_insertions_at_same_position() { doc1.merge(&mut doc2).unwrap(); assert_doc!( - doc1.document(), + &doc1, map! { "wisdom" => {list![ {"to"}, @@ -719,7 +719,7 @@ fn mutliple_insertions_at_same_list_position_with_insertion_by_greater_actor_id( doc2.insert(&list, 0, "one").unwrap(); assert_doc!( - doc2.document(), + &doc2, map! { "list" => { list![ { "one" }, @@ -744,7 +744,7 @@ fn mutliple_insertions_at_same_list_position_with_insertion_by_lesser_actor_id() doc2.insert(&list, 0, "one").unwrap(); assert_doc!( - doc2.document(), + &doc2, map! { "list" => { list![ { "one" }, @@ -771,7 +771,7 @@ fn insertion_consistent_with_causality() { doc2.insert(&list, 0, "one").unwrap(); assert_doc!( - doc2.document(), + &doc2, map! { "list" => { list![ {"one"}, @@ -1129,7 +1129,7 @@ fn test_merging_test_conflicts_then_saving_and_loading() { let mut doc2 = AutoCommit::load(&doc1.save()).unwrap(); doc2.set_actor(actor2); - assert_doc! {doc2.document(), map!{ + assert_doc! {&doc2, map!{ "text" => { list![{"h"}, {"e"}, {"l"}, {"l"}, {"o"}]}, }}; @@ -1139,16 +1139,16 @@ fn test_merging_test_conflicts_then_saving_and_loading() { doc2.splice_text(&text, 6, 0, "world").unwrap(); assert_doc!( - doc2.document(), + &doc2, map! { "text" => { list![{"h"}, {"e"}, {"l"}, {"l"}, {"!"}, {" "}, {"w"} , {"o"}, {"r"}, {"l"}, {"d"}]} } ); - let mut doc3 = AutoCommit::load(&doc2.save()).unwrap(); + let doc3 = AutoCommit::load(&doc2.save()).unwrap(); assert_doc!( - doc3.document(), + &doc3, map! { "text" => { list![{"h"}, {"e"}, {"l"}, {"l"}, {"!"}, {" "}, {"w"} , {"o"}, {"r"}, {"l"}, {"d"}]} } diff --git a/rust/edit-trace/src/main.rs b/rust/edit-trace/src/main.rs index debe52db..9724a109 100644 --- a/rust/edit-trace/src/main.rs +++ b/rust/edit-trace/src/main.rs @@ -1,4 +1,5 @@ use automerge::ObjType; +use automerge::ReadDoc; use automerge::{transaction::Transactable, Automerge, AutomergeError, ROOT}; use std::time::Instant; From de5af2fffa957a0dda7cfb388a57389e216621aa Mon Sep 17 00:00:00 2001 From: alexjg Date: Mon, 30 Jan 2023 19:58:35 +0000 Subject: [PATCH 706/730] automerge-rs 0.3.0 and automerge-test 0.2.0 (#512) --- rust/automerge-test/Cargo.toml | 4 ++-- rust/automerge/Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/rust/automerge-test/Cargo.toml b/rust/automerge-test/Cargo.toml index 4fba0379..9290d7ac 100644 --- a/rust/automerge-test/Cargo.toml +++ b/rust/automerge-test/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "automerge-test" -version = "0.1.0" +version = "0.2.0" edition = "2021" license = "MIT" repository = "https://github.com/automerge/automerge-rs" @@ -10,7 +10,7 @@ description = "Utilities for testing automerge libraries" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -automerge = { version = "^0.2", path = "../automerge" } +automerge = { version = "^0.3", path = "../automerge" } smol_str = { version = "^0.1.21", features=["serde"] } serde = { version = "^1.0", features=["derive"] } decorum = "0.3.1" diff --git a/rust/automerge/Cargo.toml b/rust/automerge/Cargo.toml index 578878ae..e5a9125d 100644 --- a/rust/automerge/Cargo.toml +++ b/rust/automerge/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "automerge" -version = "0.2.0" +version = "0.3.0" edition = "2021" license = "MIT" repository = "https://github.com/automerge/automerge-rs" From a6959e70e87aa9d882f68683144ede925ce62042 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Tue, 31 Jan 2023 10:54:54 -0700 Subject: [PATCH 707/730] More robust leb128 parsing (#515) Before this change i64 decoding did not work for negative numbers (not a real problem because it is only used for the timestamp of a change), and both u64 and i64 would allow overlong LEB encodings. --- rust/automerge/src/storage/parse.rs | 2 +- rust/automerge/src/storage/parse/leb128.rs | 292 +++++++++++++++++---- 2 files changed, 239 insertions(+), 55 deletions(-) diff --git a/rust/automerge/src/storage/parse.rs b/rust/automerge/src/storage/parse.rs index 64419fda..54668da4 100644 --- a/rust/automerge/src/storage/parse.rs +++ b/rust/automerge/src/storage/parse.rs @@ -110,7 +110,7 @@ use crate::{ActorId, ChangeHash}; const HASH_SIZE: usize = 32; // 256 bits = 32 bytes #[allow(unused_imports)] -pub(crate) use self::leb128::{leb128_i32, leb128_i64, leb128_u32, leb128_u64, nonzero_leb128_u64}; +pub(crate) use self::leb128::{leb128_i64, leb128_u32, leb128_u64, nonzero_leb128_u64}; pub(crate) type ParseResult<'a, O, E> = Result<(Input<'a>, O), ParseError>; diff --git a/rust/automerge/src/storage/parse/leb128.rs b/rust/automerge/src/storage/parse/leb128.rs index 800253c9..9f5e72a2 100644 --- a/rust/automerge/src/storage/parse/leb128.rs +++ b/rust/automerge/src/storage/parse/leb128.rs @@ -1,4 +1,3 @@ -use core::mem::size_of; use std::num::NonZeroU64; use super::{take1, Input, ParseError, ParseResult}; @@ -7,44 +6,83 @@ use super::{take1, Input, ParseError, ParseResult}; pub(crate) enum Error { #[error("leb128 was too large for the destination type")] Leb128TooLarge, + #[error("leb128 was improperly encoded")] + Leb128Overlong, #[error("leb128 was zero when it was expected to be nonzero")] UnexpectedZero, } -macro_rules! impl_leb { - ($parser_name: ident, $ty: ty) => { - #[allow(dead_code)] - pub(crate) fn $parser_name<'a, E>(input: Input<'a>) -> ParseResult<'a, $ty, E> - where - E: From, - { - let mut res = 0; - let mut shift = 0; +pub(crate) fn leb128_u64(input: Input<'_>) -> ParseResult<'_, u64, E> +where + E: From, +{ + let mut res = 0; + let mut shift = 0; + let mut input = input; - let mut input = input; - let mut pos = 0; - loop { - let (i, byte) = take1(input)?; - input = i; - if (byte & 0x80) == 0 { - res |= (byte as $ty) << shift; - return Ok((input, res)); - } else if pos == leb128_size::<$ty>() - 1 { - return Err(ParseError::Error(Error::Leb128TooLarge.into())); - } else { - res |= ((byte & 0x7F) as $ty) << shift; - } - pos += 1; - shift += 7; + loop { + let (i, byte) = take1(input)?; + input = i; + res |= ((byte & 0x7F) as u64) << shift; + shift += 7; + + if (byte & 0x80) == 0 { + if shift > 64 && byte > 1 { + return Err(ParseError::Error(Error::Leb128TooLarge.into())); + } else if shift > 7 && byte == 0 { + return Err(ParseError::Error(Error::Leb128Overlong.into())); } + return Ok((input, res)); + } else if shift > 64 { + return Err(ParseError::Error(Error::Leb128TooLarge.into())); } - }; + } } -impl_leb!(leb128_u64, u64); -impl_leb!(leb128_u32, u32); -impl_leb!(leb128_i64, i64); -impl_leb!(leb128_i32, i32); +pub(crate) fn leb128_i64(input: Input<'_>) -> ParseResult<'_, i64, E> +where + E: From, +{ + let mut res = 0; + let mut shift = 0; + + let mut input = input; + let mut prev = 0; + loop { + let (i, byte) = take1(input)?; + input = i; + res |= ((byte & 0x7F) as i64) << shift; + shift += 7; + + if (byte & 0x80) == 0 { + if shift > 64 && byte != 0 && byte != 0x7f { + // the 10th byte (if present) must contain only the sign-extended sign bit + return Err(ParseError::Error(Error::Leb128TooLarge.into())); + } else if shift > 7 + && ((byte == 0 && prev & 0x40 == 0) || (byte == 0x7f && prev & 0x40 > 0)) + { + // overlong if the sign bit of penultimate byte has been extended + return Err(ParseError::Error(Error::Leb128Overlong.into())); + } else if shift < 64 && byte & 0x40 > 0 { + // sign extend negative numbers + res |= -1 << shift; + } + return Ok((input, res)); + } else if shift > 64 { + return Err(ParseError::Error(Error::Leb128TooLarge.into())); + } + prev = byte; + } +} + +pub(crate) fn leb128_u32(input: Input<'_>) -> ParseResult<'_, u32, E> +where + E: From, +{ + let (i, num) = leb128_u64(input)?; + let result = u32::try_from(num).map_err(|_| ParseError::Error(Error::Leb128TooLarge.into()))?; + Ok((i, result)) +} /// Parse a LEB128 encoded u64 from the input, throwing an error if it is `0` pub(crate) fn nonzero_leb128_u64(input: Input<'_>) -> ParseResult<'_, NonZeroU64, E> @@ -57,38 +95,27 @@ where Ok((input, result)) } -/// Maximum LEB128-encoded size of an integer type -const fn leb128_size() -> usize { - let bits = size_of::() * 8; - (bits + 6) / 7 // equivalent to ceil(bits/7) w/o floats -} - #[cfg(test)] mod tests { use super::super::Needed; use super::*; - use std::{convert::TryFrom, num::NonZeroUsize}; + use std::num::NonZeroUsize; const NEED_ONE: Needed = Needed::Size(unsafe { NonZeroUsize::new_unchecked(1) }); #[test] - fn leb_128_unsigned() { + fn leb_128_u64() { let one = &[0b00000001_u8]; let one_two_nine = &[0b10000001, 0b00000001]; let one_and_more = &[0b00000001, 0b00000011]; let scenarios: Vec<(&'static [u8], ParseResult<'_, u64, Error>)> = vec![ (one, Ok((Input::with_position(one, 1), 1))), - (&[0b10000001_u8], Err(ParseError::Incomplete(NEED_ONE))), ( one_two_nine, Ok((Input::with_position(one_two_nine, 2), 129)), ), (one_and_more, Ok((Input::with_position(one_and_more, 1), 1))), - ( - &[129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129], - Err(ParseError::Error(Error::Leb128TooLarge)), - ), ]; for (index, (input, expected)) in scenarios.clone().into_iter().enumerate() { let result = leb128_u64(Input::new(input)); @@ -102,17 +129,174 @@ mod tests { } } - for (index, (input, expected)) in scenarios.into_iter().enumerate() { - let u32_expected = expected.map(|(i, e)| (i, u32::try_from(e).unwrap())); - let result = leb128_u32(Input::new(input)); - if result != u32_expected { - panic!( - "Scenario {} failed for u32: expected {:?} got {:?}", - index + 1, - u32_expected, - result - ); + let error_cases: Vec<(&'static str, &'static [u8], ParseError<_>)> = vec![ + ( + "too many bytes", + &[129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129], + ParseError::Error(Error::Leb128TooLarge), + ), + ( + "too many bits", + &[129, 129, 129, 129, 129, 129, 129, 129, 129, 2], + ParseError::Error(Error::Leb128TooLarge), + ), + ( + "overlong encoding", + &[129, 0], + ParseError::Error(Error::Leb128Overlong), + ), + ("missing data", &[255], ParseError::Incomplete(NEED_ONE)), + ]; + error_cases.into_iter().for_each(|(desc, input, expected)| { + match leb128_u64::(Input::new(input)) { + Ok((_, x)) => panic!("leb128_u64 should fail with {}, got {}", desc, x), + Err(error) => { + if error != expected { + panic!("leb128_u64 should fail with {}, got {}", expected, error) + } + } } - } + }); + + let success_cases: Vec<(&'static [u8], u64)> = vec![ + (&[0], 0), + (&[0x7f], 127), + (&[0x80, 0x01], 128), + (&[0xff, 0x7f], 16383), + ( + &[0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x1], + u64::MAX, + ), + ]; + success_cases.into_iter().for_each(|(input, expected)| { + match leb128_u64::(Input::new(input)) { + Ok((_, x)) => { + if x != expected { + panic!("leb128_u64 should succeed with {}, got {}", expected, x) + } + } + Err(error) => panic!("leb128_u64 should succeed with {}, got {}", expected, error), + } + }); + } + + #[test] + fn leb_128_u32() { + let error_cases: Vec<(&'static str, &'static [u8], ParseError<_>)> = vec![ + ( + "too many bytes", + &[129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129], + ParseError::Error(Error::Leb128TooLarge), + ), + ( + "too many bits", + &[0xff, 0xff, 0xff, 0xff, 0x1f], + ParseError::Error(Error::Leb128TooLarge), + ), + ( + "overlong encoding", + &[129, 0], + ParseError::Error(Error::Leb128Overlong), + ), + ("missing data", &[0xaa], ParseError::Incomplete(NEED_ONE)), + ]; + error_cases.into_iter().for_each(|(desc, input, expected)| { + match leb128_u32::(Input::new(input)) { + Ok((_, x)) => panic!("leb128_u32 should fail with {}, got {}", desc, x), + Err(error) => { + if error != expected { + panic!("leb128_u32 should fail with {}, got {}", expected, error) + } + } + } + }); + + let success_cases: Vec<(&'static [u8], u32)> = vec![ + (&[0], 0), + (&[0x7f], 127), + (&[0x80, 0x01], 128), + (&[0xff, 0x7f], 16383), + (&[0xff, 0xff, 0xff, 0xff, 0x0f], u32::MAX), + ]; + success_cases.into_iter().for_each(|(input, expected)| { + match leb128_u32::(Input::new(input)) { + Ok((_, x)) => { + if x != expected { + panic!("leb128_u32 should succeed with {}, got {}", expected, x) + } + } + Err(error) => panic!("leb128_u64 should succeed with {}, got {}", expected, error), + } + }); + } + + #[test] + fn leb_128_i64() { + let error_cases: Vec<(&'static str, &'static [u8], ParseError<_>)> = vec![ + ( + "too many bytes", + &[129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129], + ParseError::Error(Error::Leb128TooLarge), + ), + ( + "too many positive bits", + &[0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x01], + ParseError::Error(Error::Leb128TooLarge), + ), + ( + "too many negative bits", + &[0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x7e], + ParseError::Error(Error::Leb128TooLarge), + ), + ( + "overlong positive encoding", + &[0xbf, 0], + ParseError::Error(Error::Leb128Overlong), + ), + ( + "overlong negative encoding", + &[0x81, 0xff, 0x7f], + ParseError::Error(Error::Leb128Overlong), + ), + ("missing data", &[0x90], ParseError::Incomplete(NEED_ONE)), + ]; + error_cases.into_iter().for_each(|(desc, input, expected)| { + match leb128_i64::(Input::new(input)) { + Ok((_, x)) => panic!("leb128_i64 should fail with {}, got {}", desc, x), + Err(error) => { + if error != expected { + panic!("leb128_i64 should fail with {}, got {}", expected, error) + } + } + } + }); + + let success_cases: Vec<(&'static [u8], i64)> = vec![ + (&[0], 0), + (&[0x7f], -1), + (&[0x3f], 63), + (&[0x40], -64), + (&[0x80, 0x01], 128), + (&[0xff, 0x3f], 8191), + (&[0x80, 0x40], -8192), + ( + &[0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x0], + i64::MAX, + ), + ( + &[0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x7f], + i64::MIN, + ), + ]; + success_cases.into_iter().for_each(|(input, expected)| { + match leb128_i64::(Input::new(input)) { + Ok((_, x)) => { + if x != expected { + panic!("leb128_i64 should succeed with {}, got {}", expected, x) + } + } + Err(error) => panic!("leb128_u64 should succeed with {}, got {}", expected, error), + } + }); } } From 2a9652e642fbf7296a85180d790d4e297559f93f Mon Sep 17 00:00:00 2001 From: alexjg Date: Wed, 1 Feb 2023 09:15:00 +0000 Subject: [PATCH 708/730] typescript: Hide API type and make SyncState opaque (#514) --- javascript/src/stable.ts | 23 +++++++++++++++++------ 1 file changed, 17 insertions(+), 6 deletions(-) diff --git a/javascript/src/stable.ts b/javascript/src/stable.ts index 3b328240..74410346 100644 --- a/javascript/src/stable.ts +++ b/javascript/src/stable.ts @@ -26,7 +26,7 @@ import { Text } from "./text" export { Text } from "./text" import type { - API, + API as WasmAPI, Actor as ActorId, Prop, ObjID, @@ -34,7 +34,7 @@ import type { DecodedChange, Heads, MaterializeValue, - JsSyncState as SyncState, + JsSyncState, SyncMessage, DecodedSyncMessage, } from "@automerge/automerge-wasm" @@ -46,6 +46,17 @@ export type { IncPatch, SyncMessage, } from "@automerge/automerge-wasm" + +/** @hidden **/ +type API = WasmAPI + +const SyncStateSymbol = Symbol("_syncstate") + +/** + * An opaque type tracking the state of sync with a remote peer + */ +type SyncState = JsSyncState & { _opaque: typeof SyncStateSymbol } + import { ApiHandler, type ChangeToEncode, UseApi } from "./low_level" import { Automerge } from "@automerge/automerge-wasm" @@ -772,7 +783,7 @@ export function decodeSyncState(state: Uint8Array): SyncState { const sync = ApiHandler.decodeSyncState(state) const result = ApiHandler.exportSyncState(sync) sync.free() - return result + return result as SyncState } /** @@ -793,7 +804,7 @@ export function generateSyncMessage( const state = _state(doc) const syncState = ApiHandler.importSyncState(inState) const message = state.handle.generateSyncMessage(syncState) - const outState = ApiHandler.exportSyncState(syncState) + const outState = ApiHandler.exportSyncState(syncState) as SyncState return [outState, message] } @@ -835,7 +846,7 @@ export function receiveSyncMessage( } const heads = state.handle.getHeads() state.handle.receiveSyncMessage(syncState, message) - const outSyncState = ApiHandler.exportSyncState(syncState) + const outSyncState = ApiHandler.exportSyncState(syncState) as SyncState return [ progressDocument(doc, heads, opts.patchCallback || state.patchCallback), outSyncState, @@ -852,7 +863,7 @@ export function receiveSyncMessage( * @group sync */ export function initSyncState(): SyncState { - return ApiHandler.exportSyncState(ApiHandler.initSyncState()) + return ApiHandler.exportSyncState(ApiHandler.initSyncState()) as SyncState } /** @hidden */ From f8d5a8ea989580ab54d0dc541859a79b31a70107 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 1 Feb 2023 09:15:54 +0000 Subject: [PATCH 709/730] Bump json5 from 1.0.1 to 1.0.2 in /javascript/examples/create-react-app (#487) Bumps [json5](https://github.com/json5/json5) from 1.0.1 to 1.0.2. in javascript/examples/create-react-app --- javascript/examples/create-react-app/yarn.lock | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/javascript/examples/create-react-app/yarn.lock b/javascript/examples/create-react-app/yarn.lock index d6e5d93f..ec83af3b 100644 --- a/javascript/examples/create-react-app/yarn.lock +++ b/javascript/examples/create-react-app/yarn.lock @@ -5845,9 +5845,9 @@ json-stable-stringify-without-jsonify@^1.0.1: integrity sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw== json5@^1.0.1: - version "1.0.1" - resolved "http://localhost:4873/json5/-/json5-1.0.1.tgz#779fb0018604fa854eacbf6252180d83543e3dbe" - integrity sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow== + version "1.0.2" + resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.2.tgz#63d98d60f21b313b77c4d6da18bfa69d80e1d593" + integrity sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA== dependencies: minimist "^1.2.0" @@ -6165,9 +6165,9 @@ minimatch@^5.0.1: brace-expansion "^2.0.1" minimist@^1.2.0, minimist@^1.2.6: - version "1.2.6" - resolved "http://localhost:4873/minimist/-/minimist-1.2.6.tgz#8637a5b759ea0d6e98702cfb3a9283323c93af44" - integrity sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q== + version "1.2.7" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.7.tgz#daa1c4d91f507390437c6a8bc01078e7000c4d18" + integrity sha512-bzfL1YUZsP41gmu/qjrEk0Q6i2ix/cVeAhbCbqH9u3zYutS1cLg00qhrD0M2MVdCcx4Sc0UpP2eBWo9rotpq6g== mkdirp@~0.5.1: version "0.5.6" From 9195e9cb7628ad380650d4e6ec727fbd481bfb7a Mon Sep 17 00:00:00 2001 From: alexjg Date: Thu, 2 Feb 2023 15:02:53 +0000 Subject: [PATCH 710/730] Fix deny errors (#518) * Ignore deny errors on duplicate windows-sys * Delete spurious lockfile in automerge-cli --- rust/automerge-cli/Cargo.lock | 857 ---------------------------------- rust/deny.toml | 6 + 2 files changed, 6 insertions(+), 857 deletions(-) delete mode 100644 rust/automerge-cli/Cargo.lock diff --git a/rust/automerge-cli/Cargo.lock b/rust/automerge-cli/Cargo.lock deleted file mode 100644 index a330ee89..00000000 --- a/rust/automerge-cli/Cargo.lock +++ /dev/null @@ -1,857 +0,0 @@ -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 3 - -[[package]] -name = "adler" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" - -[[package]] -name = "ansi_term" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2" -dependencies = [ - "winapi", -] - -[[package]] -name = "anyhow" -version = "1.0.55" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "159bb86af3a200e19a068f4224eae4c8bb2d0fa054c7e5d1cacd5cef95e684cd" - -[[package]] -name = "atty" -version = "0.2.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" -dependencies = [ - "hermit-abi", - "libc", - "winapi", -] - -[[package]] -name = "autocfg" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" - -[[package]] -name = "automerge" -version = "0.1.0" -dependencies = [ - "flate2", - "fxhash", - "hex", - "itertools", - "js-sys", - "leb128", - "nonzero_ext", - "rand", - "serde", - "sha2", - "smol_str", - "thiserror", - "tinyvec", - "tracing", - "unicode-segmentation", - "uuid", - "wasm-bindgen", - "web-sys", -] - -[[package]] -name = "automerge-cli" -version = "0.1.0" -dependencies = [ - "anyhow", - "atty", - "automerge", - "clap", - "colored_json", - "combine", - "duct", - "maplit", - "serde_json", - "thiserror", - "tracing-subscriber", -] - -[[package]] -name = "bitflags" -version = "1.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" - -[[package]] -name = "block-buffer" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bf7fe51849ea569fd452f37822f606a5cabb684dc918707a0193fd4664ff324" -dependencies = [ - "generic-array", -] - -[[package]] -name = "bumpalo" -version = "3.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4a45a46ab1f2412e53d3a0ade76ffad2025804294569aae387231a0cd6e0899" - -[[package]] -name = "byteorder" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" - -[[package]] -name = "bytes" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4872d67bab6358e59559027aa3b9157c53d9358c51423c17554809a8858e0f8" - -[[package]] -name = "cfg-if" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" - -[[package]] -name = "clap" -version = "3.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ced1892c55c910c1219e98d6fc8d71f6bddba7905866ce740066d8bfea859312" -dependencies = [ - "atty", - "bitflags", - "clap_derive", - "indexmap", - "lazy_static", - "os_str_bytes", - "strsim", - "termcolor", - "textwrap", -] - -[[package]] -name = "clap_derive" -version = "3.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da95d038ede1a964ce99f49cbe27a7fb538d1da595e4b4f70b8c8f338d17bf16" -dependencies = [ - "heck", - "proc-macro-error", - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "colored_json" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fd32eb54d016e203b7c2600e3a7802c75843a92e38ccc4869aefeca21771a64" -dependencies = [ - "ansi_term", - "atty", - "libc", - "serde", - "serde_json", -] - -[[package]] -name = "combine" -version = "4.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50b727aacc797f9fc28e355d21f34709ac4fc9adecfe470ad07b8f4464f53062" -dependencies = [ - "bytes", - "memchr", -] - -[[package]] -name = "cpufeatures" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95059428f66df56b63431fdb4e1947ed2190586af5c5a8a8b71122bdf5a7f469" -dependencies = [ - "libc", -] - -[[package]] -name = "crc32fast" -version = "1.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "crypto-common" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57952ca27b5e3606ff4dd79b0020231aaf9d6aa76dc05fd30137538c50bd3ce8" -dependencies = [ - "generic-array", - "typenum", -] - -[[package]] -name = "digest" -version = "0.10.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2fb860ca6fafa5552fb6d0e816a69c8e49f0908bf524e30a90d97c85892d506" -dependencies = [ - "block-buffer", - "crypto-common", -] - -[[package]] -name = "duct" -version = "0.13.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fc6a0a59ed0888e0041cf708e66357b7ae1a82f1c67247e1f93b5e0818f7d8d" -dependencies = [ - "libc", - "once_cell", - "os_pipe", - "shared_child", -] - -[[package]] -name = "either" -version = "1.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457" - -[[package]] -name = "flate2" -version = "1.0.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e6988e897c1c9c485f43b47a529cef42fde0547f9d8d41a7062518f1d8fc53f" -dependencies = [ - "cfg-if", - "crc32fast", - "libc", - "miniz_oxide", -] - -[[package]] -name = "fxhash" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c" -dependencies = [ - "byteorder", -] - -[[package]] -name = "generic-array" -version = "0.14.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd48d33ec7f05fbfa152300fdad764757cbded343c1aa1cff2fbaf4134851803" -dependencies = [ - "typenum", - "version_check", -] - -[[package]] -name = "getrandom" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d39cd93900197114fa1fcb7ae84ca742095eed9442088988ae74fa744e930e77" -dependencies = [ - "cfg-if", - "js-sys", - "libc", - "wasi", - "wasm-bindgen", -] - -[[package]] -name = "hashbrown" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e" - -[[package]] -name = "heck" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2540771e65fc8cb83cd6e8a237f70c319bd5c29f78ed1084ba5d50eeac86f7f9" - -[[package]] -name = "hermit-abi" -version = "0.1.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" -dependencies = [ - "libc", -] - -[[package]] -name = "hex" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" - -[[package]] -name = "indexmap" -version = "1.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "282a6247722caba404c065016bbfa522806e51714c34f5dfc3e4a3a46fcb4223" -dependencies = [ - "autocfg", - "hashbrown", -] - -[[package]] -name = "itertools" -version = "0.10.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9a9d19fa1e79b6215ff29b9d6880b706147f16e9b1dbb1e4e5947b5b02bc5e3" -dependencies = [ - "either", -] - -[[package]] -name = "itoa" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1aab8fc367588b89dcee83ab0fd66b72b50b72fa1904d7095045ace2b0c81c35" - -[[package]] -name = "js-sys" -version = "0.3.56" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a38fc24e30fd564ce974c02bf1d337caddff65be6cc4735a1f7eab22a7440f04" -dependencies = [ - "wasm-bindgen", -] - -[[package]] -name = "lazy_static" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" - -[[package]] -name = "leb128" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67" - -[[package]] -name = "libc" -version = "0.2.119" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bf2e165bb3457c8e098ea76f3e3bc9db55f87aa90d52d0e6be741470916aaa4" - -[[package]] -name = "log" -version = "0.4.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51b9bbe6c47d51fc3e1a9b945965946b4c44142ab8792c50835a980d362c2710" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "maplit" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d" - -[[package]] -name = "memchr" -version = "2.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a" - -[[package]] -name = "miniz_oxide" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a92518e98c078586bc6c934028adcca4c92a53d6a958196de835170a01d84e4b" -dependencies = [ - "adler", - "autocfg", -] - -[[package]] -name = "nonzero_ext" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44a1290799eababa63ea60af0cbc3f03363e328e58f32fb0294798ed3e85f444" - -[[package]] -name = "once_cell" -version = "1.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da32515d9f6e6e489d7bc9d84c71b060db7247dc035bbe44eac88cf87486d8d5" - -[[package]] -name = "os_pipe" -version = "0.9.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb233f06c2307e1f5ce2ecad9f8121cffbbee2c95428f44ea85222e460d0d213" -dependencies = [ - "libc", - "winapi", -] - -[[package]] -name = "os_str_bytes" -version = "6.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e22443d1643a904602595ba1cd8f7d896afe56d26712531c5ff73a15b2fbf64" -dependencies = [ - "memchr", -] - -[[package]] -name = "pin-project-lite" -version = "0.2.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e280fbe77cc62c91527259e9442153f4688736748d24660126286329742b4c6c" - -[[package]] -name = "ppv-lite86" -version = "0.2.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb9f9e6e233e5c4a35559a617bf40a4ec447db2e84c20b55a6f83167b7e57872" - -[[package]] -name = "proc-macro-error" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" -dependencies = [ - "proc-macro-error-attr", - "proc-macro2", - "quote", - "syn", - "version_check", -] - -[[package]] -name = "proc-macro-error-attr" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" -dependencies = [ - "proc-macro2", - "quote", - "version_check", -] - -[[package]] -name = "proc-macro2" -version = "1.0.36" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7342d5883fbccae1cc37a2353b09c87c9b0f3afd73f5fb9bba687a1f733b029" -dependencies = [ - "unicode-xid", -] - -[[package]] -name = "quote" -version = "1.0.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "864d3e96a899863136fc6e99f3d7cae289dafe43bf2c5ac19b70df7210c0a145" -dependencies = [ - "proc-macro2", -] - -[[package]] -name = "rand" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" -dependencies = [ - "libc", - "rand_chacha", - "rand_core", -] - -[[package]] -name = "rand_chacha" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" -dependencies = [ - "ppv-lite86", - "rand_core", -] - -[[package]] -name = "rand_core" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7" -dependencies = [ - "getrandom", -] - -[[package]] -name = "ryu" -version = "1.0.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73b4b750c782965c211b42f022f59af1fbceabdd026623714f104152f1ec149f" - -[[package]] -name = "serde" -version = "1.0.136" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce31e24b01e1e524df96f1c2fdd054405f8d7376249a5110886fb4b658484789" -dependencies = [ - "serde_derive", -] - -[[package]] -name = "serde_derive" -version = "1.0.136" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08597e7152fcd306f41838ed3e37be9eaeed2b61c42e2117266a554fab4662f9" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "serde_json" -version = "1.0.79" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e8d9fa5c3b304765ce1fd9c4c8a3de2c8db365a5b91be52f186efc675681d95" -dependencies = [ - "itoa", - "ryu", - "serde", -] - -[[package]] -name = "sha2" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55deaec60f81eefe3cce0dc50bda92d6d8e88f2a27df7c5033b42afeb1ed2676" -dependencies = [ - "cfg-if", - "cpufeatures", - "digest", -] - -[[package]] -name = "sharded-slab" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31" -dependencies = [ - "lazy_static", -] - -[[package]] -name = "shared_child" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6be9f7d5565b1483af3e72975e2dee33879b3b86bd48c0929fccf6585d79e65a" -dependencies = [ - "libc", - "winapi", -] - -[[package]] -name = "smallvec" -version = "1.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2dd574626839106c320a323308629dcb1acfc96e32a8cba364ddc61ac23ee83" - -[[package]] -name = "smol_str" -version = "0.1.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61d15c83e300cce35b7c8cd39ff567c1ef42dde6d4a1a38dbdbf9a59902261bd" -dependencies = [ - "serde", -] - -[[package]] -name = "strsim" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" - -[[package]] -name = "syn" -version = "1.0.86" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a65b3f4ffa0092e9887669db0eae07941f023991ab58ea44da8fe8e2d511c6b" -dependencies = [ - "proc-macro2", - "quote", - "unicode-xid", -] - -[[package]] -name = "termcolor" -version = "1.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bab24d30b911b2376f3a13cc2cd443142f0c81dda04c118693e35b3835757755" -dependencies = [ - "winapi-util", -] - -[[package]] -name = "textwrap" -version = "0.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1141d4d61095b28419e22cb0bbf02755f5e54e0526f97f1e3d1d160e60885fb" - -[[package]] -name = "thiserror" -version = "1.0.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "854babe52e4df1653706b98fcfc05843010039b406875930a70e4d9644e5c417" -dependencies = [ - "thiserror-impl", -] - -[[package]] -name = "thiserror-impl" -version = "1.0.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa32fd3f627f367fe16f893e2597ae3c05020f8bba2666a4e6ea73d377e5714b" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "thread_local" -version = "1.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5516c27b78311c50bf42c071425c560ac799b11c30b31f87e3081965fe5e0180" -dependencies = [ - "once_cell", -] - -[[package]] -name = "tinyvec" -version = "1.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c1c1d5a42b6245520c249549ec267180beaffcc0615401ac8e31853d4b6d8d2" -dependencies = [ - "tinyvec_macros", -] - -[[package]] -name = "tinyvec_macros" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" - -[[package]] -name = "tracing" -version = "0.1.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6c650a8ef0cd2dd93736f033d21cbd1224c5a967aa0c258d00fcf7dafef9b9f" -dependencies = [ - "cfg-if", - "log", - "pin-project-lite", - "tracing-attributes", - "tracing-core", -] - -[[package]] -name = "tracing-attributes" -version = "0.1.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8276d9a4a3a558d7b7ad5303ad50b53d58264641b82914b7ada36bd762e7a716" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "tracing-core" -version = "0.1.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03cfcb51380632a72d3111cb8d3447a8d908e577d31beeac006f836383d29a23" -dependencies = [ - "lazy_static", - "valuable", -] - -[[package]] -name = "tracing-log" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6923477a48e41c1951f1999ef8bb5a3023eb723ceadafe78ffb65dc366761e3" -dependencies = [ - "lazy_static", - "log", - "tracing-core", -] - -[[package]] -name = "tracing-subscriber" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e0ab7bdc962035a87fba73f3acca9b8a8d0034c2e6f60b84aeaaddddc155dce" -dependencies = [ - "ansi_term", - "sharded-slab", - "smallvec", - "thread_local", - "tracing-core", - "tracing-log", -] - -[[package]] -name = "typenum" -version = "1.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987" - -[[package]] -name = "unicode-segmentation" -version = "1.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e8820f5d777f6224dc4be3632222971ac30164d4a258d595640799554ebfd99" - -[[package]] -name = "unicode-xid" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3" - -[[package]] -name = "uuid" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7" -dependencies = [ - "getrandom", - "serde", -] - -[[package]] -name = "valuable" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" - -[[package]] -name = "version_check" -version = "0.9.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" - -[[package]] -name = "wasi" -version = "0.10.2+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6" - -[[package]] -name = "wasm-bindgen" -version = "0.2.79" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25f1af7423d8588a3d840681122e72e6a24ddbcb3f0ec385cac0d12d24256c06" -dependencies = [ - "cfg-if", - "wasm-bindgen-macro", -] - -[[package]] -name = "wasm-bindgen-backend" -version = "0.2.79" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b21c0df030f5a177f3cba22e9bc4322695ec43e7257d865302900290bcdedca" -dependencies = [ - "bumpalo", - "lazy_static", - "log", - "proc-macro2", - "quote", - "syn", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-macro" -version = "0.2.79" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f4203d69e40a52ee523b2529a773d5ffc1dc0071801c87b3d270b471b80ed01" -dependencies = [ - "quote", - "wasm-bindgen-macro-support", -] - -[[package]] -name = "wasm-bindgen-macro-support" -version = "0.2.79" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa8a30d46208db204854cadbb5d4baf5fcf8071ba5bf48190c3e59937962ebc" -dependencies = [ - "proc-macro2", - "quote", - "syn", - "wasm-bindgen-backend", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-shared" -version = "0.2.79" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d958d035c4438e28c70e4321a2911302f10135ce78a9c7834c0cab4123d06a2" - -[[package]] -name = "web-sys" -version = "0.3.56" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c060b319f29dd25724f09a2ba1418f142f539b2be99fbf4d2d5a8f7330afb8eb" -dependencies = [ - "js-sys", - "wasm-bindgen", -] - -[[package]] -name = "winapi" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" -dependencies = [ - "winapi-i686-pc-windows-gnu", - "winapi-x86_64-pc-windows-gnu", -] - -[[package]] -name = "winapi-i686-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" - -[[package]] -name = "winapi-util" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" -dependencies = [ - "winapi", -] - -[[package]] -name = "winapi-x86_64-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" diff --git a/rust/deny.toml b/rust/deny.toml index 54a68a60..12a562ce 100644 --- a/rust/deny.toml +++ b/rust/deny.toml @@ -172,6 +172,12 @@ deny = [ ] # Certain crates/versions that will be skipped when doing duplicate detection. skip = [ + # duct, which we only depend on for integration tests in automerge-cli, + # pulls in a version of os_pipe which in turn pulls in a version of + # windows-sys which is different to the version in pulled in by is-terminal. + # This is fine to ignore for now because it doesn't end up in downstream + # dependencies. + { name = "windows-sys", version = "0.42.0" } ] # Similarly to `skip` allows you to skip certain crates during duplicate # detection. Unlike skip, it also includes the entire tree of transitive From da55dfac7ae3baa0892d98b64fcd41be61733c37 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Mon, 23 Jan 2023 18:30:54 +0000 Subject: [PATCH 711/730] refactor: make fields of Automerge private The fields of `automerge::Automerge` were crate public, which made it hard to change the structure of `Automerge` with confidence. Make all fields private and put them behind accessors where necessary to allow for easy internal changes. --- rust/automerge/src/autocommit.rs | 2 +- rust/automerge/src/automerge.rs | 65 +++++++++++++++++++---- rust/automerge/src/op_set/load.rs | 6 +-- rust/automerge/src/transaction/inner.rs | 69 ++++++++++++------------- 4 files changed, 92 insertions(+), 50 deletions(-) diff --git a/rust/automerge/src/autocommit.rs b/rust/automerge/src/autocommit.rs index 2c1c3adf..ae28596e 100644 --- a/rust/automerge/src/autocommit.rs +++ b/rust/automerge/src/autocommit.rs @@ -159,7 +159,7 @@ impl AutoCommitWithObs { /// /// This is a cheap operation, it just changes the way indexes are calculated pub fn with_encoding(mut self, encoding: TextEncoding) -> Self { - self.doc.text_encoding = encoding; + self.doc = self.doc.with_encoding(encoding); self } diff --git a/rust/automerge/src/automerge.rs b/rust/automerge/src/automerge.rs index 86aa5f63..1b789337 100644 --- a/rust/automerge/src/automerge.rs +++ b/rust/automerge/src/automerge.rs @@ -71,26 +71,26 @@ pub(crate) enum Actor { #[derive(Debug, Clone)] pub struct Automerge { /// The list of unapplied changes that are not causally ready. - pub(crate) queue: Vec, + queue: Vec, /// The history of changes that form this document, topologically sorted too. - pub(crate) history: Vec, + history: Vec, /// Mapping from change hash to index into the history list. - pub(crate) history_index: HashMap, + history_index: HashMap, /// Mapping from change hash to vector clock at this state. - pub(crate) clocks: HashMap, + clocks: HashMap, /// Mapping from actor index to list of seqs seen for them. - pub(crate) states: HashMap>, + states: HashMap>, /// Current dependencies of this document (heads hashes). - pub(crate) deps: HashSet, + deps: HashSet, /// Heads at the last save. - pub(crate) saved: Vec, + saved: Vec, /// The set of operations that form this document. - pub(crate) ops: OpSet, + ops: OpSet, /// The current actor. - pub(crate) actor: Actor, + actor: Actor, /// The maximum operation counter this document has seen. - pub(crate) max_op: u64, - pub(crate) text_encoding: TextEncoding, + max_op: u64, + text_encoding: TextEncoding, } impl Automerge { @@ -111,6 +111,49 @@ impl Automerge { } } + pub(crate) fn ops_mut(&mut self) -> &mut OpSet { + &mut self.ops + } + + pub(crate) fn ops(&self) -> &OpSet { + &self.ops + } + + pub(crate) fn into_ops(self) -> OpSet { + self.ops + } + + pub(crate) fn actor_id(&self) -> &ActorId { + match &self.actor { + Actor::Unused(id) => id, + Actor::Cached(idx) => self.ops.m.actors.get(*idx), + } + } + + /// Remove the current actor from the opset if it has no ops + /// + /// If the current actor ID has no ops in the opset then remove it from the cache of actor IDs. + /// This us used when rolling back a transaction. If the rolled back ops are the only ops for + /// the current actor then we want to remove that actor from the opset so it doesn't end up in + /// any saved version of the document. + /// + /// # Panics + /// + /// If the last actor in the OpSet is not the actor ID of this document + pub(crate) fn rollback_last_actor(&mut self) { + if let Actor::Cached(actor_idx) = self.actor { + if self.states.get(&actor_idx).is_none() && self.ops.m.actors.len() > 0 { + assert!(self.ops.m.actors.len() == actor_idx + 1); + let actor = self.ops.m.actors.remove_last(); + self.actor = Actor::Unused(actor); + } + } + } + + pub(crate) fn text_encoding(&self) -> TextEncoding { + self.text_encoding + } + /// Change the text encoding of this view of the document /// /// This is a cheap operation, it just changes the way indexes are calculated diff --git a/rust/automerge/src/op_set/load.rs b/rust/automerge/src/op_set/load.rs index 6cc64e79..0df7f6ef 100644 --- a/rust/automerge/src/op_set/load.rs +++ b/rust/automerge/src/op_set/load.rs @@ -79,10 +79,10 @@ impl<'a, O: OpObserver> DocObserver for ObservedOpSetBuilder<'a, O> { } fn finish(self, _metadata: super::OpSetMetadata) -> Self::Output { - let mut opset = Automerge::new(); + let mut doc = Automerge::new(); for (obj, op) in self.ops { - opset.insert_op_with_observer(&obj, op, self.observer); + doc.insert_op_with_observer(&obj, op, self.observer); } - opset.ops + doc.into_ops() } } diff --git a/rust/automerge/src/transaction/inner.rs b/rust/automerge/src/transaction/inner.rs index 7e7db17d..95f922f3 100644 --- a/rust/automerge/src/transaction/inner.rs +++ b/rust/automerge/src/transaction/inner.rs @@ -98,7 +98,7 @@ impl TransactionInner { } let num_ops = self.pending_ops(); - let change = self.export(&doc.ops.m); + let change = self.export(&doc.ops().m); let hash = change.hash(); #[cfg(not(debug_assertions))] tracing::trace!(commit=?hash, deps=?change.deps(), "committing transaction"); @@ -153,20 +153,16 @@ impl TransactionInner { // remove in reverse order so sets are removed before makes etc... for (obj, op) in self.operations.into_iter().rev() { for pred_id in &op.pred { - if let Some(p) = doc.ops.search(&obj, OpIdSearch::new(*pred_id)).index() { - doc.ops.change_vis(&obj, p, |o| o.remove_succ(&op)); + if let Some(p) = doc.ops().search(&obj, OpIdSearch::new(*pred_id)).index() { + doc.ops_mut().change_vis(&obj, p, |o| o.remove_succ(&op)); } } - if let Some(pos) = doc.ops.search(&obj, OpIdSearch::new(op.id)).index() { - doc.ops.remove(&obj, pos); + if let Some(pos) = doc.ops().search(&obj, OpIdSearch::new(op.id)).index() { + doc.ops_mut().remove(&obj, pos); } } - // remove the actor from the cache so that it doesn't end up in the saved document - if doc.states.get(&self.actor).is_none() && doc.ops.m.actors.len() > 0 { - let actor = doc.ops.m.actors.remove_last(); - doc.actor = Actor::Unused(actor); - } + doc.rollback_last_actor(); num } @@ -277,10 +273,10 @@ impl TransactionInner { obj: ObjId, succ_pos: &[usize], ) { - doc.ops.add_succ(&obj, succ_pos, &op); + doc.ops_mut().add_succ(&obj, succ_pos, &op); if !op.is_delete() { - doc.ops.insert(pos, &obj, op.clone()); + doc.ops_mut().insert(pos, &obj, op.clone()); } self.finalize_op(doc, op_observer, obj, prop, op); @@ -332,7 +328,7 @@ impl TransactionInner { let id = self.next_id(); let query = doc - .ops + .ops() .search(&obj, query::InsertNth::new(index, ListEncoding::List)); let key = query.key()?; @@ -346,7 +342,7 @@ impl TransactionInner { insert: true, }; - doc.ops.insert(query.pos(), &obj, op.clone()); + doc.ops_mut().insert(query.pos(), &obj, op.clone()); self.finalize_op(doc, op_observer, obj, Prop::Seq(index), op); @@ -380,8 +376,8 @@ impl TransactionInner { } let id = self.next_id(); - let prop_index = doc.ops.m.props.cache(prop.clone()); - let query = doc.ops.search(&obj, query::Prop::new(prop_index)); + let prop_index = doc.ops_mut().m.props.cache(prop.clone()); + let query = doc.ops().search(&obj, query::Prop::new(prop_index)); // no key present to delete if query.ops.is_empty() && action == OpType::Delete { @@ -398,7 +394,7 @@ impl TransactionInner { return Err(AutomergeError::MissingCounter); } - let pred = doc.ops.m.sorted_opids(query.ops.iter().map(|o| o.id)); + let pred = doc.ops().m.sorted_opids(query.ops.iter().map(|o| o.id)); let op = Op { id, @@ -425,11 +421,11 @@ impl TransactionInner { action: OpType, ) -> Result, AutomergeError> { let query = doc - .ops + .ops() .search(&obj, query::Nth::new(index, ListEncoding::List)); let id = self.next_id(); - let pred = doc.ops.m.sorted_opids(query.ops.iter().map(|o| o.id)); + let pred = doc.ops().m.sorted_opids(query.ops.iter().map(|o| o.id)); let key = query.key()?; if query.ops.len() == 1 && query.ops[0].is_noop(&action) { @@ -490,7 +486,7 @@ impl TransactionInner { index, del: 1, values: vec![], - splice_type: SpliceType::Text("", doc.text_encoding), + splice_type: SpliceType::Text("", doc.text_encoding()), }, )?; } else { @@ -551,7 +547,7 @@ impl TransactionInner { index, del, values, - splice_type: SpliceType::Text(text, doc.text_encoding), + splice_type: SpliceType::Text(text, doc.text_encoding()), }, ) } @@ -568,13 +564,13 @@ impl TransactionInner { splice_type, }: SpliceArgs<'_>, ) -> Result<(), AutomergeError> { - let ex_obj = doc.ops.id_to_exid(obj.0); + let ex_obj = doc.ops().id_to_exid(obj.0); let encoding = splice_type.encoding(); // delete `del` items - performing the query for each one let mut deleted = 0; while deleted < del { // TODO: could do this with a single custom query - let query = doc.ops.search(&obj, query::Nth::new(index, encoding)); + let query = doc.ops().search(&obj, query::Nth::new(index, encoding)); // if we delete in the middle of a multi-character // move cursor back to the beginning and expand the del width @@ -590,9 +586,10 @@ impl TransactionInner { break; }; - let op = self.next_delete(query.key()?, query.pred(&doc.ops)); + let op = self.next_delete(query.key()?, query.pred(doc.ops())); - doc.ops.add_succ(&obj, &query.ops_pos, &op); + let ops_pos = query.ops_pos; + doc.ops_mut().add_succ(&obj, &ops_pos, &op); self.operations.push((obj, op)); @@ -608,7 +605,9 @@ impl TransactionInner { // do the insert query for the first item and then // insert the remaining ops one after the other if !values.is_empty() { - let query = doc.ops.search(&obj, query::InsertNth::new(index, encoding)); + let query = doc + .ops() + .search(&obj, query::InsertNth::new(index, encoding)); let mut pos = query.pos(); let mut key = query.key()?; let mut cursor = index; @@ -617,7 +616,7 @@ impl TransactionInner { for v in &values { let op = self.next_insert(key, v.clone()); - doc.ops.insert(pos, &obj, op.clone()); + doc.ops_mut().insert(pos, &obj, op.clone()); width = op.width(encoding); cursor += width; @@ -627,7 +626,7 @@ impl TransactionInner { self.operations.push((obj, op)); } - doc.ops.hint(&obj, cursor - width, pos - 1); + doc.ops_mut().hint(&obj, cursor - width, pos - 1); // handle the observer if let Some(obs) = op_observer.as_mut() { @@ -639,7 +638,7 @@ impl TransactionInner { let start = self.operations.len() - values.len(); for (offset, v) in values.iter().enumerate() { let op = &self.operations[start + offset].1; - let value = (v.clone().into(), doc.ops.id_to_exid(op.id)); + let value = (v.clone().into(), doc.ops().id_to_exid(op.id)); obs.insert(doc, ex_obj.clone(), index + offset, value) } } @@ -660,19 +659,19 @@ impl TransactionInner { ) { // TODO - id_to_exid should be a noop if not used - change type to Into? if let Some(op_observer) = op_observer { - let ex_obj = doc.ops.id_to_exid(obj.0); + let ex_obj = doc.ops().id_to_exid(obj.0); if op.insert { - let obj_type = doc.ops.object_type(&obj); + let obj_type = doc.ops().object_type(&obj); assert!(obj_type.unwrap().is_sequence()); match (obj_type, prop) { (Some(ObjType::List), Prop::Seq(index)) => { - let value = (op.value(), doc.ops.id_to_exid(op.id)); + let value = (op.value(), doc.ops().id_to_exid(op.id)); op_observer.insert(doc, ex_obj, index, value) } (Some(ObjType::Text), Prop::Seq(index)) => { // FIXME if op_observer.text_as_seq() { - let value = (op.value(), doc.ops.id_to_exid(op.id)); + let value = (op.value(), doc.ops().id_to_exid(op.id)); op_observer.insert(doc, ex_obj, index, value) } else { op_observer.splice_text(doc, ex_obj, index, op.to_str()) @@ -683,9 +682,9 @@ impl TransactionInner { } else if op.is_delete() { op_observer.delete(doc, ex_obj, prop); } else if let Some(value) = op.get_increment_value() { - op_observer.increment(doc, ex_obj, prop, (value, doc.ops.id_to_exid(op.id))); + op_observer.increment(doc, ex_obj, prop, (value, doc.ops().id_to_exid(op.id))); } else { - let value = (op.value(), doc.ops.id_to_exid(op.id)); + let value = (op.value(), doc.ops().id_to_exid(op.id)); op_observer.put(doc, ex_obj, prop, value, false); } } From c3c04128f5f1703007f650ea3104d98334334aab Mon Sep 17 00:00:00 2001 From: Alex Good Date: Thu, 26 Jan 2023 09:45:26 +0000 Subject: [PATCH 712/730] Only observe the current state on load Problem: When loading a document whilst passing an `OpObserver` we call the OpObserver for every change in the loaded document. This slows down the loading process for two reasons: 1) we have to make a call to the observer for every op 2) we cannot just stream the ops into the OpSet in topological order but must instead buffer them to pass to the observer. Solution: Construct the OpSet first, then only traverse the visible ops in the OpSet, calling the observer. For documents with a deep history this results in vastly fewer calls to the observer and also allows us to construct the OpSet much more quickly. It is slightly different semantically because the observer never gets notified of changes which are not visible, but that shouldn't matter to most observers. --- rust/automerge/Cargo.toml | 1 + rust/automerge/src/automerge.rs | 31 +- rust/automerge/src/automerge/current_state.rs | 890 ++++++++++++++++++ rust/automerge/src/op_set.rs | 55 +- rust/automerge/src/op_set/load.rs | 38 +- rust/automerge/src/storage/chunk.rs | 2 +- rust/automerge/src/sync.rs | 2 +- rust/automerge/src/transaction/inner.rs | 1 - rust/deny.toml | 3 + 9 files changed, 944 insertions(+), 79 deletions(-) create mode 100644 rust/automerge/src/automerge/current_state.rs diff --git a/rust/automerge/Cargo.toml b/rust/automerge/Cargo.toml index e5a9125d..0c10cc2b 100644 --- a/rust/automerge/Cargo.toml +++ b/rust/automerge/Cargo.toml @@ -47,6 +47,7 @@ criterion = "0.4.0" test-log = { version = "0.2.10", features=["trace"], default-features = false} tracing-subscriber = {version = "0.3.9", features = ["fmt", "env-filter"] } automerge-test = { path = "../automerge-test" } +prettytable = "0.10.0" [[bench]] name = "range" diff --git a/rust/automerge/src/automerge.rs b/rust/automerge/src/automerge.rs index 1b789337..e0db8b5a 100644 --- a/rust/automerge/src/automerge.rs +++ b/rust/automerge/src/automerge.rs @@ -26,6 +26,8 @@ use crate::{ }; use serde::Serialize; +mod current_state; + #[cfg(test)] mod tests; @@ -119,17 +121,6 @@ impl Automerge { &self.ops } - pub(crate) fn into_ops(self) -> OpSet { - self.ops - } - - pub(crate) fn actor_id(&self) -> &ActorId { - match &self.actor { - Actor::Unused(id) => id, - Actor::Cached(idx) => self.ops.m.actors.get(*idx), - } - } - /// Remove the current actor from the opset if it has no ops /// /// If the current actor ID has no ops in the opset then remove it from the cache of actor IDs. @@ -455,13 +446,8 @@ impl Automerge { result: op_set, changes, heads, - } = match &mut observer { - Some(o) => { - storage::load::reconstruct_document(&d, mode, OpSet::observed_builder(*o)) - } - None => storage::load::reconstruct_document(&d, mode, OpSet::builder()), - } - .map_err(|e| load::Error::InflateDocument(Box::new(e)))?; + } = storage::load::reconstruct_document(&d, mode, OpSet::builder()) + .map_err(|e| load::Error::InflateDocument(Box::new(e)))?; let mut hashes_by_index = HashMap::new(); let mut actor_to_history: HashMap> = HashMap::new(); let mut clocks = Clocks::new(); @@ -517,6 +503,9 @@ impl Automerge { } load::LoadedChanges::Partial { error, .. } => return Err(error.into()), } + if let Some(observer) = &mut observer { + current_state::observe_current_state(&am, *observer); + } Ok(am) } @@ -715,7 +704,7 @@ impl Automerge { let c = self.history.iter(); let bytes = crate::storage::save::save_document( c, - self.ops.iter(), + self.ops.iter().map(|(objid, _, op)| (objid, op)), &self.ops.m.actors, &self.ops.m.props, &heads, @@ -731,7 +720,7 @@ impl Automerge { let c = self.history.iter(); let bytes = crate::storage::save::save_document( c, - self.ops.iter(), + self.ops.iter().map(|(objid, _, op)| (objid, op)), &self.ops.m.actors, &self.ops.m.props, &heads, @@ -944,7 +933,7 @@ impl Automerge { "pred", "succ" ); - for (obj, op) in self.ops.iter() { + for (obj, _, op) in self.ops.iter() { let id = self.to_string(op.id); let obj = self.to_string(obj); let key = match op.key { diff --git a/rust/automerge/src/automerge/current_state.rs b/rust/automerge/src/automerge/current_state.rs new file mode 100644 index 00000000..1c1bceed --- /dev/null +++ b/rust/automerge/src/automerge/current_state.rs @@ -0,0 +1,890 @@ +use std::{borrow::Cow, collections::HashSet, iter::Peekable}; + +use itertools::Itertools; + +use crate::{ + types::{ElemId, Key, ListEncoding, ObjId, Op, OpId}, + ObjType, OpObserver, OpType, ScalarValue, Value, +}; + +/// Traverse the "current" state of the document, notifying `observer` +/// +/// The "current" state of the document is the set of visible operations. This function will +/// traverse that set of operations and call the corresponding methods on the `observer` as it +/// encounters values. The `observer` methods will be called in the order in which they appear in +/// the document. That is to say that the observer will be notified of parent objects before the +/// objects they contain and elements of a sequence will be notified in the order they occur. +/// +/// Due to only notifying of visible operations the observer will only be called with `put`, +/// `insert`, and `splice`, operations. +pub(super) fn observe_current_state(doc: &crate::Automerge, observer: &mut O) { + // The OpSet already exposes operations in the order they appear in the document. + // `OpSet::iter_objs` iterates over the objects in causal order, this means that parent objects + // will always appear before their children. Furthermore, the operations within each object are + // ordered by key (which means by their position in a sequence for sequences). + // + // Effectively then we iterate over each object, then we group the operations in the object by + // key and for each key find the visible operations for that key. Then we notify the observer + // for each of those visible operations. + let mut visible_objs = HashSet::new(); + visible_objs.insert(ObjId::root()); + for (obj, typ, ops) in doc.ops().iter_objs() { + if !visible_objs.contains(obj) { + continue; + } + let ops_by_key = ops.group_by(|o| o.key); + let actions = ops_by_key + .into_iter() + .flat_map(|(key, key_ops)| key_actions(key, key_ops)); + if typ == ObjType::Text && !observer.text_as_seq() { + track_new_objs_and_notify( + &mut visible_objs, + doc, + obj, + typ, + observer, + text_actions(actions), + ) + } else if typ == ObjType::List { + track_new_objs_and_notify( + &mut visible_objs, + doc, + obj, + typ, + observer, + list_actions(actions), + ) + } else { + track_new_objs_and_notify(&mut visible_objs, doc, obj, typ, observer, actions) + } + } +} + +fn track_new_objs_and_notify, O: OpObserver>( + visible_objs: &mut HashSet, + doc: &crate::Automerge, + obj: &ObjId, + typ: ObjType, + observer: &mut O, + actions: I, +) { + let exid = doc.id_to_exid(obj.0); + for action in actions { + if let Some(obj) = action.made_object() { + visible_objs.insert(obj); + } + action.notify_observer(doc, &exid, obj, typ, observer); + } +} + +trait Action { + /// Notify an observer of whatever this action does + fn notify_observer( + self, + doc: &crate::Automerge, + exid: &crate::ObjId, + obj: &ObjId, + typ: ObjType, + observer: &mut O, + ); + + /// If this action created an object, return the ID of that object + fn made_object(&self) -> Option; +} + +fn key_actions<'a, I: Iterator>( + key: Key, + key_ops: I, +) -> impl Iterator> { + #[derive(Clone)] + enum CurrentOp<'a> { + Put { + value: Value<'a>, + id: OpId, + conflicted: bool, + }, + Insert(Value<'a>, OpId), + } + let current_ops = key_ops + .filter(|o| o.visible()) + .filter_map(|o| match o.action { + OpType::Make(obj_type) => { + let value = Value::Object(obj_type); + if o.insert { + Some(CurrentOp::Insert(value, o.id)) + } else { + Some(CurrentOp::Put { + value, + id: o.id, + conflicted: false, + }) + } + } + OpType::Put(ref value) => { + let value = Value::Scalar(Cow::Borrowed(value)); + if o.insert { + Some(CurrentOp::Insert(value, o.id)) + } else { + Some(CurrentOp::Put { + value, + id: o.id, + conflicted: false, + }) + } + } + _ => None, + }); + current_ops + .coalesce(|previous, current| match (previous, current) { + (CurrentOp::Put { .. }, CurrentOp::Put { value, id, .. }) => Ok(CurrentOp::Put { + value, + id, + conflicted: true, + }), + (previous, current) => Err((previous, current)), + }) + .map(move |op| match op { + CurrentOp::Put { + value, + id, + conflicted, + } => SimpleAction::Put { + prop: key, + tagged_value: (value, id), + conflict: conflicted, + }, + CurrentOp::Insert(val, id) => SimpleAction::Insert { + elem_id: ElemId(id), + tagged_value: (val, id), + }, + }) +} + +/// Either a "put" or "insert" action. i.e. not splicing for text values +enum SimpleAction<'a> { + Put { + prop: Key, + tagged_value: (Value<'a>, OpId), + conflict: bool, + }, + Insert { + elem_id: ElemId, + tagged_value: (Value<'a>, OpId), + }, +} + +impl<'a> Action for SimpleAction<'a> { + fn notify_observer( + self, + doc: &crate::Automerge, + exid: &crate::ObjId, + obj: &ObjId, + typ: ObjType, + observer: &mut O, + ) { + let encoding = match typ { + ObjType::Text => ListEncoding::Text(doc.text_encoding()), + _ => ListEncoding::List, + }; + match self { + Self::Put { + prop, + tagged_value, + conflict, + } => { + let tagged_value = (tagged_value.0, doc.id_to_exid(tagged_value.1)); + let prop = doc.ops().export_key(*obj, prop, encoding).unwrap(); + observer.put(doc, exid.clone(), prop, tagged_value, conflict); + } + Self::Insert { + elem_id, + tagged_value: (value, opid), + } => { + let index = doc + .ops() + .search(obj, crate::query::ElemIdPos::new(elem_id, encoding)) + .index() + .unwrap(); + let tagged_value = (value, doc.id_to_exid(opid)); + observer.insert(doc, doc.id_to_exid(obj.0), index, tagged_value); + } + } + } + + fn made_object(&self) -> Option { + match self { + Self::Put { + tagged_value: (Value::Object(_), id), + .. + } => Some((*id).into()), + Self::Insert { + tagged_value: (Value::Object(_), id), + .. + } => Some((*id).into()), + _ => None, + } + } +} + +/// An `Action` which splices for text values +enum TextAction<'a> { + Action(SimpleAction<'a>), + Splice { start: ElemId, chars: String }, +} + +impl<'a> Action for TextAction<'a> { + fn notify_observer( + self, + doc: &crate::Automerge, + exid: &crate::ObjId, + obj: &ObjId, + typ: ObjType, + observer: &mut O, + ) { + match self { + Self::Action(action) => action.notify_observer(doc, exid, obj, typ, observer), + Self::Splice { start, chars } => { + let index = doc + .ops() + .search( + obj, + crate::query::ElemIdPos::new( + start, + ListEncoding::Text(doc.text_encoding()), + ), + ) + .index() + .unwrap(); + observer.splice_text(doc, doc.id_to_exid(obj.0), index, chars.as_str()); + } + } + } + + fn made_object(&self) -> Option { + match self { + Self::Action(action) => action.made_object(), + _ => None, + } + } +} + +fn list_actions<'a, I: Iterator>>( + actions: I, +) -> impl Iterator> { + actions.map(|a| match a { + SimpleAction::Put { + prop: Key::Seq(elem_id), + tagged_value, + .. + } => SimpleAction::Insert { + elem_id, + tagged_value, + }, + a => a, + }) +} + +/// Condense consecutive `SimpleAction::Insert` actions into one `TextAction::Splice` +fn text_actions<'a, I>(actions: I) -> impl Iterator> +where + I: Iterator>, +{ + TextActions { + ops: actions.peekable(), + } +} + +struct TextActions<'a, I: Iterator>> { + ops: Peekable, +} + +impl<'a, I: Iterator>> Iterator for TextActions<'a, I> { + type Item = TextAction<'a>; + + fn next(&mut self) -> Option { + if let Some(SimpleAction::Insert { .. }) = self.ops.peek() { + let (start, value) = match self.ops.next() { + Some(SimpleAction::Insert { + tagged_value: (value, opid), + .. + }) => (opid, value), + _ => unreachable!(), + }; + let mut chars = match value { + Value::Scalar(Cow::Borrowed(ScalarValue::Str(s))) => s.to_string(), + _ => "\u{fffc}".to_string(), + }; + while let Some(SimpleAction::Insert { .. }) = self.ops.peek() { + if let Some(SimpleAction::Insert { + tagged_value: (value, _), + .. + }) = self.ops.next() + { + match value { + Value::Scalar(Cow::Borrowed(ScalarValue::Str(s))) => chars.push_str(s), + _ => chars.push('\u{fffc}'), + } + } + } + Some(TextAction::Splice { + start: ElemId(start), + chars, + }) + } else { + self.ops.next().map(TextAction::Action) + } + } +} + +#[cfg(test)] +mod tests { + use std::borrow::Cow; + + use crate::{transaction::Transactable, ObjType, OpObserver, Prop, ReadDoc, Value}; + + // Observer ops often carry a "tagged value", which is a value and the OpID of the op which + // created that value. For a lot of values (i.e. any scalar value) we don't care about the + // opid. This type implements `PartialEq` for the `Untagged` variant by ignoring the tag, which + // allows us to express tests which don't care about the tag. + #[derive(Clone, Debug)] + enum ObservedValue { + Tagged(crate::Value<'static>, crate::ObjId), + Untagged(crate::Value<'static>), + } + + impl<'a> From<(Value<'a>, crate::ObjId)> for ObservedValue { + fn from(value: (Value<'a>, crate::ObjId)) -> Self { + Self::Tagged(value.0.into_owned(), value.1) + } + } + + impl PartialEq for ObservedValue { + fn eq(&self, other: &ObservedValue) -> bool { + match (self, other) { + (Self::Tagged(v1, o1), Self::Tagged(v2, o2)) => equal_vals(v1, v2) && o1 == o2, + (Self::Untagged(v1), Self::Untagged(v2)) => equal_vals(v1, v2), + (Self::Tagged(v1, _), Self::Untagged(v2)) => equal_vals(v1, v2), + (Self::Untagged(v1), Self::Tagged(v2, _)) => equal_vals(v1, v2), + } + } + } + + /// Consider counters equal if they have the same current value + fn equal_vals(v1: &Value<'_>, v2: &Value<'_>) -> bool { + match (v1, v2) { + (Value::Scalar(v1), Value::Scalar(v2)) => match (v1.as_ref(), v2.as_ref()) { + (crate::ScalarValue::Counter(c1), crate::ScalarValue::Counter(c2)) => { + c1.current == c2.current + } + _ => v1 == v2, + }, + _ => v1 == v2, + } + } + + #[derive(Debug, Clone, PartialEq)] + enum ObserverCall { + Put { + obj: crate::ObjId, + prop: Prop, + value: ObservedValue, + conflict: bool, + }, + Insert { + obj: crate::ObjId, + index: usize, + value: ObservedValue, + }, + SpliceText { + obj: crate::ObjId, + index: usize, + chars: String, + }, + } + + // A Vec is pretty hard to look at in a test failure. This wrapper prints the + // calls out in a nice table so it's easier to see what's different + #[derive(Clone, PartialEq)] + struct Calls(Vec); + + impl std::fmt::Debug for Calls { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let mut table = prettytable::Table::new(); + table.set_format(*prettytable::format::consts::FORMAT_NO_BORDER_LINE_SEPARATOR); + table.set_titles(prettytable::row![ + "Op", "Object", "Property", "Value", "Conflict" + ]); + for call in &self.0 { + match call { + ObserverCall::Put { + obj, + prop, + value, + conflict, + } => { + table.add_row(prettytable::row![ + "Put", + format!("{}", obj), + prop, + match value { + ObservedValue::Tagged(v, o) => format!("{} ({})", v, o), + ObservedValue::Untagged(v) => format!("{}", v), + }, + conflict + ]); + } + ObserverCall::Insert { obj, index, value } => { + table.add_row(prettytable::row![ + "Insert", + format!("{}", obj), + index, + match value { + ObservedValue::Tagged(v, o) => format!("{} ({})", v, o), + ObservedValue::Untagged(v) => format!("{}", v), + }, + "" + ]); + } + ObserverCall::SpliceText { obj, index, chars } => { + table.add_row(prettytable::row![ + "SpliceText", + format!("{}", obj), + index, + chars, + "" + ]); + } + } + } + let mut out = Vec::new(); + table.print(&mut out).unwrap(); + write!(f, "\n{}\n", String::from_utf8(out).unwrap()) + } + } + + struct ObserverStub { + ops: Vec, + text_as_seq: bool, + } + + impl ObserverStub { + fn new() -> Self { + Self { + ops: Vec::new(), + text_as_seq: true, + } + } + + fn new_text_v2() -> Self { + Self { + ops: Vec::new(), + text_as_seq: false, + } + } + } + + impl OpObserver for ObserverStub { + fn insert( + &mut self, + _doc: &R, + objid: crate::ObjId, + index: usize, + tagged_value: (crate::Value<'_>, crate::ObjId), + ) { + self.ops.push(ObserverCall::Insert { + obj: objid, + index, + value: tagged_value.into(), + }); + } + + fn splice_text( + &mut self, + _doc: &R, + objid: crate::ObjId, + index: usize, + value: &str, + ) { + self.ops.push(ObserverCall::SpliceText { + obj: objid, + index, + chars: value.to_string(), + }); + } + + fn put( + &mut self, + _doc: &R, + objid: crate::ObjId, + prop: crate::Prop, + tagged_value: (crate::Value<'_>, crate::ObjId), + conflict: bool, + ) { + self.ops.push(ObserverCall::Put { + obj: objid, + prop, + value: tagged_value.into(), + conflict, + }); + } + + fn expose( + &mut self, + _doc: &R, + _objid: crate::ObjId, + _prop: crate::Prop, + _tagged_value: (crate::Value<'_>, crate::ObjId), + _conflict: bool, + ) { + panic!("expose not expected"); + } + + fn increment( + &mut self, + _doc: &R, + _objid: crate::ObjId, + _prop: crate::Prop, + _tagged_value: (i64, crate::ObjId), + ) { + panic!("increment not expected"); + } + + fn delete_map(&mut self, _doc: &R, _objid: crate::ObjId, _key: &str) { + panic!("delete not expected"); + } + + fn delete_seq( + &mut self, + _doc: &R, + _objid: crate::ObjId, + _index: usize, + _num: usize, + ) { + panic!("delete not expected"); + } + + fn text_as_seq(&self) -> bool { + self.text_as_seq + } + } + + #[test] + fn basic_test() { + let mut doc = crate::AutoCommit::new(); + doc.put(crate::ROOT, "key", "value").unwrap(); + let map = doc.put_object(crate::ROOT, "map", ObjType::Map).unwrap(); + doc.put(&map, "nested_key", "value").unwrap(); + let list = doc.put_object(crate::ROOT, "list", ObjType::List).unwrap(); + doc.insert(&list, 0, "value").unwrap(); + let text = doc.put_object(crate::ROOT, "text", ObjType::Text).unwrap(); + doc.insert(&text, 0, "a").unwrap(); + + let mut obs = ObserverStub::new(); + super::observe_current_state(doc.document(), &mut obs); + + assert_eq!( + Calls(obs.ops), + Calls(vec![ + ObserverCall::Put { + obj: crate::ROOT, + prop: "key".into(), + value: ObservedValue::Untagged("value".into()), + conflict: false, + }, + ObserverCall::Put { + obj: crate::ROOT, + prop: "list".into(), + value: ObservedValue::Tagged(Value::Object(ObjType::List), list.clone()), + conflict: false, + }, + ObserverCall::Put { + obj: crate::ROOT, + prop: "map".into(), + value: ObservedValue::Tagged(Value::Object(ObjType::Map), map.clone()), + conflict: false, + }, + ObserverCall::Put { + obj: crate::ROOT, + prop: "text".into(), + value: ObservedValue::Tagged(Value::Object(ObjType::Text), text.clone()), + conflict: false, + }, + ObserverCall::Put { + obj: map.clone(), + prop: "nested_key".into(), + value: ObservedValue::Untagged("value".into()), + conflict: false, + }, + ObserverCall::Insert { + obj: list, + index: 0, + value: ObservedValue::Untagged("value".into()), + }, + ObserverCall::Insert { + obj: text, + index: 0, + value: ObservedValue::Untagged("a".into()), + }, + ]) + ); + } + + #[test] + fn test_deleted_ops_omitted() { + let mut doc = crate::AutoCommit::new(); + doc.put(crate::ROOT, "key", "value").unwrap(); + doc.delete(crate::ROOT, "key").unwrap(); + let map = doc.put_object(crate::ROOT, "map", ObjType::Map).unwrap(); + doc.put(&map, "nested_key", "value").unwrap(); + doc.delete(&map, "nested_key").unwrap(); + let list = doc.put_object(crate::ROOT, "list", ObjType::List).unwrap(); + doc.insert(&list, 0, "value").unwrap(); + doc.delete(&list, 0).unwrap(); + let text = doc.put_object(crate::ROOT, "text", ObjType::Text).unwrap(); + doc.insert(&text, 0, "a").unwrap(); + doc.delete(&text, 0).unwrap(); + + doc.put_object(crate::ROOT, "deleted_map", ObjType::Map) + .unwrap(); + doc.delete(crate::ROOT, "deleted_map").unwrap(); + doc.put_object(crate::ROOT, "deleted_list", ObjType::List) + .unwrap(); + doc.delete(crate::ROOT, "deleted_list").unwrap(); + doc.put_object(crate::ROOT, "deleted_text", ObjType::Text) + .unwrap(); + doc.delete(crate::ROOT, "deleted_text").unwrap(); + + let mut obs = ObserverStub::new(); + super::observe_current_state(doc.document(), &mut obs); + + assert_eq!( + Calls(obs.ops), + Calls(vec![ + ObserverCall::Put { + obj: crate::ROOT, + prop: "list".into(), + value: ObservedValue::Tagged(Value::Object(ObjType::List), list.clone()), + conflict: false, + }, + ObserverCall::Put { + obj: crate::ROOT, + prop: "map".into(), + value: ObservedValue::Tagged(Value::Object(ObjType::Map), map.clone()), + conflict: false, + }, + ObserverCall::Put { + obj: crate::ROOT, + prop: "text".into(), + value: ObservedValue::Tagged(Value::Object(ObjType::Text), text.clone()), + conflict: false, + }, + ]) + ); + } + + #[test] + fn test_text_spliced() { + let mut doc = crate::AutoCommit::new(); + let text = doc.put_object(crate::ROOT, "text", ObjType::Text).unwrap(); + doc.insert(&text, 0, "a").unwrap(); + doc.splice_text(&text, 1, 0, "bcdef").unwrap(); + doc.splice_text(&text, 2, 2, "g").unwrap(); + + let mut obs = ObserverStub::new_text_v2(); + super::observe_current_state(doc.document(), &mut obs); + + assert_eq!( + Calls(obs.ops), + Calls(vec![ + ObserverCall::Put { + obj: crate::ROOT, + prop: "text".into(), + value: ObservedValue::Tagged(Value::Object(ObjType::Text), text.clone()), + conflict: false, + }, + ObserverCall::SpliceText { + obj: text, + index: 0, + chars: "abgef".to_string() + } + ]) + ); + } + + #[test] + fn test_counters() { + let actor1 = crate::ActorId::from("aa".as_bytes()); + let actor2 = crate::ActorId::from("bb".as_bytes()); + let mut doc = crate::AutoCommit::new().with_actor(actor2); + + let mut doc2 = doc.fork().with_actor(actor1); + doc2.put(crate::ROOT, "key", "someval").unwrap(); + + doc.put(crate::ROOT, "key", crate::ScalarValue::Counter(1.into())) + .unwrap(); + doc.increment(crate::ROOT, "key", 2).unwrap(); + doc.increment(crate::ROOT, "key", 3).unwrap(); + + doc.merge(&mut doc2).unwrap(); + + let mut obs = ObserverStub::new_text_v2(); + super::observe_current_state(doc.document(), &mut obs); + + assert_eq!( + Calls(obs.ops), + Calls(vec![ObserverCall::Put { + obj: crate::ROOT, + prop: "key".into(), + value: ObservedValue::Untagged(Value::Scalar(Cow::Owned( + crate::ScalarValue::Counter(6.into()) + ))), + conflict: true, + },]) + ); + } + + #[test] + fn test_multiple_list_insertions() { + let mut doc = crate::AutoCommit::new(); + + let list = doc.put_object(crate::ROOT, "list", ObjType::List).unwrap(); + doc.insert(&list, 0, 1).unwrap(); + doc.insert(&list, 1, 2).unwrap(); + + let mut obs = ObserverStub::new_text_v2(); + super::observe_current_state(doc.document(), &mut obs); + + assert_eq!( + Calls(obs.ops), + Calls(vec![ + ObserverCall::Put { + obj: crate::ROOT, + prop: "list".into(), + value: ObservedValue::Tagged(Value::Object(ObjType::List), list.clone()), + conflict: false, + }, + ObserverCall::Insert { + obj: list.clone(), + index: 0, + value: ObservedValue::Untagged(1.into()), + }, + ObserverCall::Insert { + obj: list, + index: 1, + value: ObservedValue::Untagged(2.into()), + }, + ]) + ); + } + + #[test] + fn test_concurrent_insertions_at_same_index() { + let mut doc = crate::AutoCommit::new().with_actor(crate::ActorId::from("aa".as_bytes())); + + let list = doc.put_object(crate::ROOT, "list", ObjType::List).unwrap(); + + let mut doc2 = doc.fork().with_actor(crate::ActorId::from("bb".as_bytes())); + + doc.insert(&list, 0, 1).unwrap(); + doc2.insert(&list, 0, 2).unwrap(); + doc.merge(&mut doc2).unwrap(); + + let mut obs = ObserverStub::new_text_v2(); + super::observe_current_state(doc.document(), &mut obs); + + assert_eq!( + Calls(obs.ops), + Calls(vec![ + ObserverCall::Put { + obj: crate::ROOT, + prop: "list".into(), + value: ObservedValue::Tagged(Value::Object(ObjType::List), list.clone()), + conflict: false, + }, + ObserverCall::Insert { + obj: list.clone(), + index: 0, + value: ObservedValue::Untagged(2.into()), + }, + ObserverCall::Insert { + obj: list, + index: 1, + value: ObservedValue::Untagged(1.into()), + }, + ]) + ); + } + + #[test] + fn test_insert_objects() { + let mut doc = crate::AutoCommit::new().with_actor(crate::ActorId::from("aa".as_bytes())); + + let list = doc.put_object(crate::ROOT, "list", ObjType::List).unwrap(); + + let map = doc.insert_object(&list, 0, ObjType::Map).unwrap(); + doc.put(&map, "key", "value").unwrap(); + + let mut obs = ObserverStub::new_text_v2(); + super::observe_current_state(doc.document(), &mut obs); + + assert_eq!( + Calls(obs.ops), + Calls(vec![ + ObserverCall::Put { + obj: crate::ROOT, + prop: "list".into(), + value: ObservedValue::Tagged(Value::Object(ObjType::List), list.clone()), + conflict: false, + }, + ObserverCall::Insert { + obj: list.clone(), + index: 0, + value: ObservedValue::Tagged(Value::Object(ObjType::Map), map.clone()), + }, + ObserverCall::Put { + obj: map, + prop: "key".into(), + value: ObservedValue::Untagged("value".into()), + conflict: false + }, + ]) + ); + } + + #[test] + fn test_insert_and_update() { + let mut doc = crate::AutoCommit::new(); + + let list = doc.put_object(crate::ROOT, "list", ObjType::List).unwrap(); + + doc.insert(&list, 0, "one").unwrap(); + doc.insert(&list, 1, "two").unwrap(); + doc.put(&list, 0, "three").unwrap(); + doc.put(&list, 1, "four").unwrap(); + + let mut obs = ObserverStub::new_text_v2(); + super::observe_current_state(doc.document(), &mut obs); + + assert_eq!( + Calls(obs.ops), + Calls(vec![ + ObserverCall::Put { + obj: crate::ROOT, + prop: "list".into(), + value: ObservedValue::Tagged(Value::Object(ObjType::List), list.clone()), + conflict: false, + }, + ObserverCall::Insert { + obj: list.clone(), + index: 0, + value: ObservedValue::Untagged("three".into()), + }, + ObserverCall::Insert { + obj: list.clone(), + index: 1, + value: ObservedValue::Untagged("four".into()), + }, + ]) + ); + } +} diff --git a/rust/automerge/src/op_set.rs b/rust/automerge/src/op_set.rs index 5b50d2b0..aab8ce74 100644 --- a/rust/automerge/src/op_set.rs +++ b/rust/automerge/src/op_set.rs @@ -5,7 +5,7 @@ use crate::op_tree::{self, OpTree}; use crate::parents::Parents; use crate::query::{self, OpIdVisSearch, TreeQuery}; use crate::types::{self, ActorId, Key, ListEncoding, ObjId, Op, OpId, OpIds, OpType, Prop}; -use crate::{ObjType, OpObserver}; +use crate::ObjType; use fxhash::FxBuildHasher; use std::borrow::Borrow; use std::cmp::Ordering; @@ -13,7 +13,7 @@ use std::collections::HashMap; use std::ops::RangeBounds; mod load; -pub(crate) use load::{ObservedOpSetBuilder, OpSetBuilder}; +pub(crate) use load::OpSetBuilder; pub(crate) type OpSet = OpSetInternal; @@ -32,12 +32,6 @@ impl OpSetInternal { OpSetBuilder::new() } - /// Create a builder which passes each operation to `observer`. This will be significantly - /// slower than `OpSetBuilder` - pub(crate) fn observed_builder(observer: &mut O) -> ObservedOpSetBuilder<'_, O> { - ObservedOpSetBuilder::new(observer) - } - pub(crate) fn new() -> Self { let mut trees: HashMap<_, _, _> = Default::default(); trees.insert(ObjId::root(), OpTree::new()); @@ -64,7 +58,7 @@ impl OpSetInternal { } pub(crate) fn iter(&self) -> Iter<'_> { - let mut objs: Vec<_> = self.trees.iter().collect(); + let mut objs: Vec<_> = self.trees.iter().map(|t| (t.0, t.1.objtype, t.1)).collect(); objs.sort_by(|a, b| self.m.lamport_cmp((a.0).0, (b.0).0)); Iter { opset: self, @@ -73,6 +67,17 @@ impl OpSetInternal { } } + /// Iterate over objects in the opset in causal order + pub(crate) fn iter_objs( + &self, + ) -> impl Iterator)> + '_ { + let mut objs: Vec<_> = self.trees.iter().map(|t| (t.0, t.1.objtype, t.1)).collect(); + objs.sort_by(|a, b| self.m.lamport_cmp((a.0).0, (b.0).0)); + IterObjs { + trees: objs.into_iter(), + } + } + pub(crate) fn parents(&self, obj: ObjId) -> Parents<'_> { Parents { obj, ops: self } } @@ -286,7 +291,7 @@ impl Default for OpSetInternal { } impl<'a> IntoIterator for &'a OpSetInternal { - type Item = (&'a ObjId, &'a Op); + type Item = (&'a ObjId, ObjType, &'a Op); type IntoIter = Iter<'a>; @@ -295,27 +300,41 @@ impl<'a> IntoIterator for &'a OpSetInternal { } } +pub(crate) struct IterObjs<'a> { + trees: std::vec::IntoIter<(&'a ObjId, ObjType, &'a op_tree::OpTree)>, +} + +impl<'a> Iterator for IterObjs<'a> { + type Item = (&'a ObjId, ObjType, op_tree::OpTreeIter<'a>); + + fn next(&mut self) -> Option { + self.trees + .next() + .map(|(id, typ, tree)| (id, typ, tree.iter())) + } +} + #[derive(Clone)] pub(crate) struct Iter<'a> { opset: &'a OpSet, - trees: std::vec::IntoIter<(&'a ObjId, &'a op_tree::OpTree)>, - current: Option<(&'a ObjId, op_tree::OpTreeIter<'a>)>, + trees: std::vec::IntoIter<(&'a ObjId, ObjType, &'a op_tree::OpTree)>, + current: Option<(&'a ObjId, ObjType, op_tree::OpTreeIter<'a>)>, } impl<'a> Iterator for Iter<'a> { - type Item = (&'a ObjId, &'a Op); + type Item = (&'a ObjId, ObjType, &'a Op); fn next(&mut self) -> Option { - if let Some((id, tree)) = &mut self.current { + if let Some((id, typ, tree)) = &mut self.current { if let Some(next) = tree.next() { - return Some((id, next)); + return Some((id, *typ, next)); } } loop { - self.current = self.trees.next().map(|o| (o.0, o.1.iter())); - if let Some((obj, tree)) = &mut self.current { + self.current = self.trees.next().map(|o| (o.0, o.1, o.2.iter())); + if let Some((obj, typ, tree)) = &mut self.current { if let Some(next) = tree.next() { - return Some((obj, next)); + return Some((obj, *typ, next)); } } else { return None; diff --git a/rust/automerge/src/op_set/load.rs b/rust/automerge/src/op_set/load.rs index 0df7f6ef..e14f46b7 100644 --- a/rust/automerge/src/op_set/load.rs +++ b/rust/automerge/src/op_set/load.rs @@ -6,8 +6,7 @@ use super::{OpSet, OpTree}; use crate::{ op_tree::OpTreeInternal, storage::load::{DocObserver, LoadedObject}, - types::{ObjId, Op}, - Automerge, OpObserver, + types::ObjId, }; /// An opset builder which creates an optree for each object as it finishes loading, inserting the @@ -51,38 +50,3 @@ impl DocObserver for OpSetBuilder { } } } - -/// A DocObserver which just accumulates ops until the document has finished reconstructing and -/// then inserts all of the ops using `OpSet::insert_op_with_observer` -pub(crate) struct ObservedOpSetBuilder<'a, O: OpObserver> { - observer: &'a mut O, - ops: Vec<(ObjId, Op)>, -} - -impl<'a, O: OpObserver> ObservedOpSetBuilder<'a, O> { - pub(crate) fn new(observer: &'a mut O) -> Self { - Self { - observer, - ops: Vec::new(), - } - } -} - -impl<'a, O: OpObserver> DocObserver for ObservedOpSetBuilder<'a, O> { - type Output = OpSet; - - fn object_loaded(&mut self, object: LoadedObject) { - self.ops.reserve(object.ops.len()); - for op in object.ops { - self.ops.push((object.id, op)); - } - } - - fn finish(self, _metadata: super::OpSetMetadata) -> Self::Output { - let mut doc = Automerge::new(); - for (obj, op) in self.ops { - doc.insert_op_with_observer(&obj, op, self.observer); - } - doc.into_ops() - } -} diff --git a/rust/automerge/src/storage/chunk.rs b/rust/automerge/src/storage/chunk.rs index 06e31973..d0048528 100644 --- a/rust/automerge/src/storage/chunk.rs +++ b/rust/automerge/src/storage/chunk.rs @@ -286,7 +286,7 @@ impl Header { fn hash(typ: ChunkType, data: &[u8]) -> ChangeHash { let mut out = vec![u8::from(typ)]; leb128::write::unsigned(&mut out, data.len() as u64).unwrap(); - out.extend(data.as_ref()); + out.extend(data); let hash_result = Sha256::digest(out); let array: [u8; 32] = hash_result.into(); ChangeHash(array) diff --git a/rust/automerge/src/sync.rs b/rust/automerge/src/sync.rs index 5d71d989..d3b6b3fa 100644 --- a/rust/automerge/src/sync.rs +++ b/rust/automerge/src/sync.rs @@ -524,7 +524,7 @@ impl Message { encode_many(&mut buf, self.changes.iter_mut(), |buf, change| { leb128::write::unsigned(buf, change.raw_bytes().len() as u64).unwrap(); - buf.extend(change.raw_bytes().as_ref()) + buf.extend::<&[u8]>(change.raw_bytes().as_ref()) }); buf diff --git a/rust/automerge/src/transaction/inner.rs b/rust/automerge/src/transaction/inner.rs index 95f922f3..0fe735d5 100644 --- a/rust/automerge/src/transaction/inner.rs +++ b/rust/automerge/src/transaction/inner.rs @@ -1,6 +1,5 @@ use std::num::NonZeroU64; -use crate::automerge::Actor; use crate::exid::ExId; use crate::query::{self, OpIdSearch}; use crate::storage::Change as StoredChange; diff --git a/rust/deny.toml b/rust/deny.toml index 12a562ce..473cdae8 100644 --- a/rust/deny.toml +++ b/rust/deny.toml @@ -110,6 +110,9 @@ exceptions = [ # should be revied more fully before release { allow = ["MPL-2.0"], name = "cbindgen" }, { allow = ["BSD-3-Clause"], name = "instant" }, + + # we only use prettytable in tests + { allow = ["BSD-3-Clause"], name = "prettytable" }, ] # Some crates don't have (easily) machine readable licensing information, From 1e33c9d9e0eb33e32dfffe5dd4045aac85822e6a Mon Sep 17 00:00:00 2001 From: Alex Good Date: Wed, 1 Feb 2023 18:08:22 +0000 Subject: [PATCH 713/730] Use Automerge::load instead of load_incremental if empty Problem: when running the sync protocol for a new document the API requires that the user create an empty document and then call `receive_sync_message` on that document. This results in the OpObserver for the new document being called with every single op in the document history. For documents with a large history this can be extremely time consuming, but the OpObserver doesn't need to know about all the hidden states. Solution: Modify `Automerge::load_with` and `Automerge::apply_changes_with` to check if the document is empty before applying changes. If the document _is_ empty then we don't call the observer for every change, but instead use `automerge::observe_current_state` to notify the observer of the new state once all the changes have been applied. --- javascript/test/legacy_tests.ts | 3 +- rust/automerge/src/automerge.rs | 71 +++++++++++++++++++++++++-- rust/automerge/src/automerge/tests.rs | 5 ++ rust/automerge/src/lib.rs | 2 +- 4 files changed, 73 insertions(+), 8 deletions(-) diff --git a/javascript/test/legacy_tests.ts b/javascript/test/legacy_tests.ts index 90c731d9..8c2e552e 100644 --- a/javascript/test/legacy_tests.ts +++ b/javascript/test/legacy_tests.ts @@ -1849,9 +1849,8 @@ describe("Automerge", () => { }) assert.deepStrictEqual(patches, [ { action: "put", path: ["birds"], value: [] }, - { action: "insert", path: ["birds", 0], values: [""] }, + { action: "insert", path: ["birds", 0], values: ["", ""] }, { action: "splice", path: ["birds", 0, 0], value: "Goldfinch" }, - { action: "insert", path: ["birds", 1], values: [""] }, { action: "splice", path: ["birds", 1, 0], value: "Chaffinch" }, ]) }) diff --git a/rust/automerge/src/automerge.rs b/rust/automerge/src/automerge.rs index e0db8b5a..a7223c7c 100644 --- a/rust/automerge/src/automerge.rs +++ b/rust/automerge/src/automerge.rs @@ -37,6 +37,15 @@ pub(crate) enum Actor { Cached(usize), } +/// What to do when loading a document partially succeeds +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum OnPartialLoad { + /// Ignore the error and return the loaded changes + Ignore, + /// Fail the entire load + Error, +} + /// An automerge document which does not manage transactions for you. /// /// ## Creating, loading, merging and forking documents @@ -121,6 +130,18 @@ impl Automerge { &self.ops } + /// Whether this document has any operations + pub fn is_empty(&self) -> bool { + self.history.is_empty() && self.queue.is_empty() + } + + pub(crate) fn actor_id(&self) -> ActorId { + match &self.actor { + Actor::Unused(id) => id.clone(), + Actor::Cached(idx) => self.ops.m.actors[*idx].clone(), + } + } + /// Remove the current actor from the opset if it has no ops /// /// If the current actor ID has no ops in the opset then remove it from the cache of actor IDs. @@ -410,20 +431,26 @@ impl Automerge { /// Load a document. pub fn load(data: &[u8]) -> Result { - Self::load_with::<()>(data, VerificationMode::Check, None) + Self::load_with::<()>(data, OnPartialLoad::Error, VerificationMode::Check, None) } /// Load a document without verifying the head hashes /// /// This is useful for debugging as it allows you to examine a corrupted document. pub fn load_unverified_heads(data: &[u8]) -> Result { - Self::load_with::<()>(data, VerificationMode::DontCheck, None) + Self::load_with::<()>( + data, + OnPartialLoad::Error, + VerificationMode::DontCheck, + None, + ) } /// Load a document with an observer #[tracing::instrument(skip(data, observer), err)] pub fn load_with( data: &[u8], + on_error: OnPartialLoad, mode: VerificationMode, mut observer: Option<&mut Obs>, ) -> Result { @@ -501,7 +528,11 @@ impl Automerge { am.apply_change(change, &mut observer); } } - load::LoadedChanges::Partial { error, .. } => return Err(error.into()), + load::LoadedChanges::Partial { error, .. } => { + if on_error == OnPartialLoad::Error { + return Err(error.into()); + } + } } if let Some(observer) = &mut observer { current_state::observe_current_state(&am, *observer); @@ -526,6 +557,18 @@ impl Automerge { data: &[u8], op_observer: Option<&mut Obs>, ) -> Result { + if self.is_empty() { + let mut doc = + Self::load_with::<()>(data, OnPartialLoad::Ignore, VerificationMode::Check, None)?; + doc = doc + .with_encoding(self.text_encoding) + .with_actor(self.actor_id()); + if let Some(obs) = op_observer { + current_state::observe_current_state(&doc, obs); + } + *self = doc; + return Ok(self.ops.len()); + } let changes = match load::load_changes(storage::parse::Input::new(data)) { load::LoadedChanges::Complete(c) => c, load::LoadedChanges::Partial { error, loaded, .. } => { @@ -566,6 +609,11 @@ impl Automerge { changes: I, mut op_observer: Option<&mut Obs>, ) -> Result<(), AutomergeError> { + // Record this so we can avoid observing each individual change and instead just observe + // the final state after all the changes have been applied. We can only do this for an + // empty document right now, once we have logic to produce the diffs between arbitrary + // states of the OpSet we can make this cleaner. + let empty_at_start = self.is_empty(); for c in changes { if !self.history_index.contains_key(&c.hash()) { if self.duplicate_seq(&c) { @@ -575,7 +623,11 @@ impl Automerge { )); } if self.is_causally_ready(&c) { - self.apply_change(c, &mut op_observer); + if empty_at_start { + self.apply_change::<()>(c, &mut None); + } else { + self.apply_change(c, &mut op_observer); + } } else { self.queue.push(c); } @@ -583,7 +635,16 @@ impl Automerge { } while let Some(c) = self.pop_next_causally_ready_change() { if !self.history_index.contains_key(&c.hash()) { - self.apply_change(c, &mut op_observer); + if empty_at_start { + self.apply_change::<()>(c, &mut None); + } else { + self.apply_change(c, &mut op_observer); + } + } + } + if empty_at_start { + if let Some(observer) = &mut op_observer { + current_state::observe_current_state(self, *observer); } } Ok(()) diff --git a/rust/automerge/src/automerge/tests.rs b/rust/automerge/src/automerge/tests.rs index 8d533fed..3511c4ed 100644 --- a/rust/automerge/src/automerge/tests.rs +++ b/rust/automerge/src/automerge/tests.rs @@ -1507,6 +1507,11 @@ fn observe_counter_change_application() { let changes = doc.get_changes(&[]).unwrap().into_iter().cloned(); let mut new_doc = AutoCommit::new().with_observer(VecOpObserver::default()); + // make a new change to the doc to stop the empty doc logic from skipping the intermediate + // patches. The is probably not really necessary, we could update this test to just test that + // the correct final state is emitted. For now though, we leave it as is. + new_doc.put(ROOT, "foo", "bar").unwrap(); + new_doc.observer().take_patches(); new_doc.apply_changes(changes).unwrap(); assert_eq!( new_doc.observer().take_patches(), diff --git a/rust/automerge/src/lib.rs b/rust/automerge/src/lib.rs index bafd8983..0b4cd743 100644 --- a/rust/automerge/src/lib.rs +++ b/rust/automerge/src/lib.rs @@ -274,7 +274,7 @@ mod values; #[cfg(feature = "optree-visualisation")] mod visualisation; -pub use crate::automerge::Automerge; +pub use crate::automerge::{Automerge, OnPartialLoad}; pub use autocommit::{AutoCommit, AutoCommitWithObs}; pub use autoserde::AutoSerde; pub use change::{Change, LoadError as LoadChangeError}; From 13a775ed9adc04c55067e3dc2eaa294fc862cb09 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Thu, 2 Feb 2023 13:28:22 +0000 Subject: [PATCH 714/730] Speed up loading by generating clocks on demand Context: currently we store a mapping from ChangeHash -> Clock, where `Clock` is the set of (ActorId, (Sequence number, max Op)) pairs derived from the given change and it's dependencies. This clock is used to determine what operations are visible at a given set of heads. Problem: populating this mapping for documents with large histories containing many actors can be very slow as for each change we have to allocate and merge a bunch of hashmaps. Solution: instead of creating the clocks on load, create an adjacency list based representation of the change graph and then derive the clock from this graph when it is needed. Traversing even large graphs is still almost as fast as looking up the clock in a hashmap. --- rust/automerge/src/automerge.rs | 135 ++++------- rust/automerge/src/change_graph.rs | 344 +++++++++++++++++++++++++++++ rust/automerge/src/clock.rs | 6 - rust/automerge/src/clocks.rs | 44 ---- rust/automerge/src/error.rs | 2 +- rust/automerge/src/lib.rs | 2 +- 6 files changed, 392 insertions(+), 141 deletions(-) create mode 100644 rust/automerge/src/change_graph.rs delete mode 100644 rust/automerge/src/clocks.rs diff --git a/rust/automerge/src/automerge.rs b/rust/automerge/src/automerge.rs index a7223c7c..128d4418 100644 --- a/rust/automerge/src/automerge.rs +++ b/rust/automerge/src/automerge.rs @@ -4,8 +4,7 @@ use std::fmt::Debug; use std::num::NonZeroU64; use std::ops::RangeBounds; -use crate::clock::ClockData; -use crate::clocks::Clocks; +use crate::change_graph::ChangeGraph; use crate::columnar::Key as EncodedKey; use crate::exid::ExId; use crate::keys::Keys; @@ -87,8 +86,8 @@ pub struct Automerge { history: Vec, /// Mapping from change hash to index into the history list. history_index: HashMap, - /// Mapping from change hash to vector clock at this state. - clocks: HashMap, + /// Graph of changes + change_graph: ChangeGraph, /// Mapping from actor index to list of seqs seen for them. states: HashMap>, /// Current dependencies of this document (heads hashes). @@ -111,7 +110,7 @@ impl Automerge { queue: vec![], history: vec![], history_index: HashMap::new(), - clocks: HashMap::new(), + change_graph: ChangeGraph::new(), states: HashMap::new(), ops: Default::default(), deps: Default::default(), @@ -477,14 +476,14 @@ impl Automerge { .map_err(|e| load::Error::InflateDocument(Box::new(e)))?; let mut hashes_by_index = HashMap::new(); let mut actor_to_history: HashMap> = HashMap::new(); - let mut clocks = Clocks::new(); + let mut change_graph = ChangeGraph::new(); for (index, change) in changes.iter().enumerate() { // SAFETY: This should be fine because we just constructed an opset containing // all the changes let actor_index = op_set.m.actors.lookup(change.actor_id()).unwrap(); actor_to_history.entry(actor_index).or_default().push(index); hashes_by_index.insert(index, change.hash()); - clocks.add_change(change, actor_index)?; + change_graph.add_change(change, actor_index)?; } let history_index = hashes_by_index.into_iter().map(|(k, v)| (v, k)).collect(); Self { @@ -492,7 +491,7 @@ impl Automerge { history: changes, history_index, states: actor_to_history, - clocks: clocks.into(), + change_graph, ops: op_set, deps: heads.into_iter().collect(), saved: Default::default(), @@ -824,16 +823,8 @@ impl Automerge { .filter(|hash| self.history_index.contains_key(hash)) .copied() .collect::>(); - let heads_clock = self.clock_at(&heads)?; - // keep the hashes that are concurrent or after the heads - changes.retain(|hash| { - self.clocks - .get(hash) - .unwrap() - .partial_cmp(&heads_clock) - .map_or(true, |o| o == Ordering::Greater) - }); + self.change_graph.remove_ancestors(changes, &heads); Ok(()) } @@ -841,7 +832,7 @@ impl Automerge { /// Get the changes since `have_deps` in this document using a clock internally. fn get_changes_clock(&self, have_deps: &[ChangeHash]) -> Result, AutomergeError> { // get the clock for the given deps - let clock = self.clock_at(have_deps)?; + let clock = self.clock_at(have_deps); // get the documents current clock @@ -875,26 +866,8 @@ impl Automerge { .find(|c| c.actor_id() == self.get_actor()); } - fn clock_at(&self, heads: &[ChangeHash]) -> Result { - if let Some(first_hash) = heads.first() { - let mut clock = self - .clocks - .get(first_hash) - .ok_or(AutomergeError::MissingHash(*first_hash))? - .clone(); - - for hash in &heads[1..] { - let c = self - .clocks - .get(hash) - .ok_or(AutomergeError::MissingHash(*hash))?; - clock.merge(c); - } - - Ok(clock) - } else { - Ok(Clock::new()) - } + fn clock_at(&self, heads: &[ChangeHash]) -> Clock { + self.change_graph.clock_for_heads(heads) } fn get_hash(&self, actor: usize, seq: u64) -> Result { @@ -920,22 +893,9 @@ impl Automerge { .push(history_index); self.history_index.insert(change.hash(), history_index); - let mut clock = Clock::new(); - for hash in change.deps() { - let c = self - .clocks - .get(hash) - .expect("Change's deps should already be in the document"); - clock.merge(c); - } - clock.include( - actor_index, - ClockData { - max_op: change.max_op(), - seq: change.seq(), - }, - ); - self.clocks.insert(change.hash(), clock); + self.change_graph + .add_change(&change, actor_index) + .expect("Change's deps should already be in the document"); self.history_index.insert(change.hash(), history_index); self.history.push(change); @@ -1197,9 +1157,8 @@ impl ReadDoc for Automerge { fn keys_at>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt<'_, '_> { if let Ok((obj, _)) = self.exid_to_obj(obj.as_ref()) { - if let Ok(clock) = self.clock_at(heads) { - return KeysAt::new(self, self.ops.keys_at(obj, clock)); - } + let clock = self.clock_at(heads); + return KeysAt::new(self, self.ops.keys_at(obj, clock)); } KeysAt::new(self, None) } @@ -1223,10 +1182,9 @@ impl ReadDoc for Automerge { heads: &[ChangeHash], ) -> MapRangeAt<'_, R> { if let Ok((obj, _)) = self.exid_to_obj(obj.as_ref()) { - if let Ok(clock) = self.clock_at(heads) { - let iter_range = self.ops.map_range_at(obj, range, clock); - return MapRangeAt::new(self, iter_range); - } + let clock = self.clock_at(heads); + let iter_range = self.ops.map_range_at(obj, range, clock); + return MapRangeAt::new(self, iter_range); } MapRangeAt::new(self, None) } @@ -1250,10 +1208,9 @@ impl ReadDoc for Automerge { heads: &[ChangeHash], ) -> ListRangeAt<'_, R> { if let Ok((obj, _)) = self.exid_to_obj(obj.as_ref()) { - if let Ok(clock) = self.clock_at(heads) { - let iter_range = self.ops.list_range_at(obj, range, clock); - return ListRangeAt::new(self, iter_range); - } + let clock = self.clock_at(heads); + let iter_range = self.ops.list_range_at(obj, range, clock); + return ListRangeAt::new(self, iter_range); } ListRangeAt::new(self, None) } @@ -1272,20 +1229,20 @@ impl ReadDoc for Automerge { fn values_at>(&self, obj: O, heads: &[ChangeHash]) -> Values<'_> { if let Ok((obj, obj_type)) = self.exid_to_obj(obj.as_ref()) { - if let Ok(clock) = self.clock_at(heads) { - return match obj_type { - ObjType::Map | ObjType::Table => { - let iter_range = self.ops.map_range_at(obj, .., clock); - Values::new(self, iter_range) - } - ObjType::List | ObjType::Text => { - let iter_range = self.ops.list_range_at(obj, .., clock); - Values::new(self, iter_range) - } - }; + let clock = self.clock_at(heads); + match obj_type { + ObjType::Map | ObjType::Table => { + let iter_range = self.ops.map_range_at(obj, .., clock); + Values::new(self, iter_range) + } + ObjType::List | ObjType::Text => { + let iter_range = self.ops.list_range_at(obj, .., clock); + Values::new(self, iter_range) + } } + } else { + Values::empty(self) } - Values::empty(self) } fn length>(&self, obj: O) -> usize { @@ -1303,18 +1260,18 @@ impl ReadDoc for Automerge { fn length_at>(&self, obj: O, heads: &[ChangeHash]) -> usize { if let Ok((inner_obj, obj_type)) = self.exid_to_obj(obj.as_ref()) { - if let Ok(clock) = self.clock_at(heads) { - return if obj_type == ObjType::Map || obj_type == ObjType::Table { - self.keys_at(obj, heads).count() - } else { - let encoding = ListEncoding::new(obj_type, self.text_encoding); - self.ops - .search(&inner_obj, query::LenAt::new(clock, encoding)) - .len - }; + let clock = self.clock_at(heads); + if obj_type == ObjType::Map || obj_type == ObjType::Table { + self.keys_at(obj, heads).count() + } else { + let encoding = ListEncoding::new(obj_type, self.text_encoding); + self.ops + .search(&inner_obj, query::LenAt::new(clock, encoding)) + .len } + } else { + 0 } - 0 } fn object_type>(&self, obj: O) -> Result { @@ -1338,7 +1295,7 @@ impl ReadDoc for Automerge { heads: &[ChangeHash], ) -> Result { let obj = self.exid_to_obj(obj.as_ref())?.0; - let clock = self.clock_at(heads)?; + let clock = self.clock_at(heads); let query = self.ops.search(&obj, query::ListValsAt::new(clock)); let mut buffer = String::new(); for q in &query.ops { @@ -1413,7 +1370,7 @@ impl ReadDoc for Automerge { ) -> Result, ExId)>, AutomergeError> { let prop = prop.into(); let obj = self.exid_to_obj(obj.as_ref())?.0; - let clock = self.clock_at(heads)?; + let clock = self.clock_at(heads); let result = match prop { Prop::Map(p) => { let prop = self.ops.m.props.lookup(&p); diff --git a/rust/automerge/src/change_graph.rs b/rust/automerge/src/change_graph.rs new file mode 100644 index 00000000..01d269d8 --- /dev/null +++ b/rust/automerge/src/change_graph.rs @@ -0,0 +1,344 @@ +use std::collections::{BTreeMap, BTreeSet}; + +use crate::{ + clock::{Clock, ClockData}, + Change, ChangeHash, +}; + +/// The graph of changes +/// +/// This is a sort of adjacency list based representation, except that instead of using linked +/// lists, we keep all the edges and nodes in two vecs and reference them by index which plays nice +/// with the cache +#[derive(Debug, Clone)] +pub(crate) struct ChangeGraph { + nodes: Vec, + edges: Vec, + hashes: Vec, + nodes_by_hash: BTreeMap, +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] +struct NodeIdx(u32); + +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] +struct EdgeIdx(u32); + +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] +struct HashIdx(u32); + +#[derive(Debug, Clone)] +struct Edge { + // Edges are always child -> parent so we only store the target, the child is implicit + // as you get the edge from the child + target: NodeIdx, + next: Option, +} + +#[derive(Debug, Clone)] +struct ChangeNode { + hash_idx: HashIdx, + actor_index: usize, + seq: u64, + max_op: u64, + parents: Option, +} + +impl ChangeGraph { + pub(crate) fn new() -> Self { + Self { + nodes: Vec::new(), + edges: Vec::new(), + nodes_by_hash: BTreeMap::new(), + hashes: Vec::new(), + } + } + + pub(crate) fn add_change( + &mut self, + change: &Change, + actor_idx: usize, + ) -> Result<(), MissingDep> { + let hash = change.hash(); + if self.nodes_by_hash.contains_key(&hash) { + return Ok(()); + } + let parent_indices = change + .deps() + .iter() + .map(|h| self.nodes_by_hash.get(h).copied().ok_or(MissingDep(*h))) + .collect::, _>>()?; + let node_idx = self.add_node(actor_idx, change); + self.nodes_by_hash.insert(hash, node_idx); + for parent_idx in parent_indices { + self.add_parent(node_idx, parent_idx); + } + Ok(()) + } + + fn add_node(&mut self, actor_index: usize, change: &Change) -> NodeIdx { + let idx = NodeIdx(self.nodes.len() as u32); + let hash_idx = self.add_hash(change.hash()); + self.nodes.push(ChangeNode { + hash_idx, + actor_index, + seq: change.seq(), + max_op: change.max_op(), + parents: None, + }); + idx + } + + fn add_hash(&mut self, hash: ChangeHash) -> HashIdx { + let idx = HashIdx(self.hashes.len() as u32); + self.hashes.push(hash); + idx + } + + fn add_parent(&mut self, child_idx: NodeIdx, parent_idx: NodeIdx) { + let new_edge_idx = EdgeIdx(self.edges.len() as u32); + let new_edge = Edge { + target: parent_idx, + next: None, + }; + self.edges.push(new_edge); + + let child = &mut self.nodes[child_idx.0 as usize]; + if let Some(edge_idx) = child.parents { + let mut edge = &mut self.edges[edge_idx.0 as usize]; + while let Some(next) = edge.next { + edge = &mut self.edges[next.0 as usize]; + } + edge.next = Some(new_edge_idx); + } else { + child.parents = Some(new_edge_idx); + } + } + + fn parents(&self, node_idx: NodeIdx) -> impl Iterator + '_ { + let mut edge_idx = self.nodes[node_idx.0 as usize].parents; + std::iter::from_fn(move || { + let this_edge_idx = edge_idx?; + let edge = &self.edges[this_edge_idx.0 as usize]; + edge_idx = edge.next; + Some(edge.target) + }) + } + + pub(crate) fn clock_for_heads(&self, heads: &[ChangeHash]) -> Clock { + let mut clock = Clock::new(); + + self.traverse_ancestors(heads, |node, _hash| { + clock.include( + node.actor_index, + ClockData { + max_op: node.max_op, + seq: node.seq, + }, + ); + }); + + clock + } + + pub(crate) fn remove_ancestors( + &self, + changes: &mut BTreeSet, + heads: &[ChangeHash], + ) { + self.traverse_ancestors(heads, |_node, hash| { + changes.remove(hash); + }); + } + + /// Call `f` for each (node, hash) in the graph, starting from the given heads + /// + /// No guarantees are made about the order of traversal but each node will only be visited + /// once. + fn traverse_ancestors( + &self, + heads: &[ChangeHash], + mut f: F, + ) { + let mut to_visit = heads + .iter() + .filter_map(|h| self.nodes_by_hash.get(h)) + .copied() + .collect::>(); + + let mut visited = BTreeSet::new(); + + while let Some(idx) = to_visit.pop() { + if visited.contains(&idx) { + continue; + } else { + visited.insert(idx); + } + let node = &self.nodes[idx.0 as usize]; + let hash = &self.hashes[node.hash_idx.0 as usize]; + f(node, hash); + to_visit.extend(self.parents(idx)); + } + } +} + +#[derive(Debug, thiserror::Error)] +#[error("attempted to derive a clock for a change with dependencies we don't have")] +pub struct MissingDep(ChangeHash); + +#[cfg(test)] +mod tests { + use std::{ + num::NonZeroU64, + time::{SystemTime, UNIX_EPOCH}, + }; + + use crate::{ + clock::ClockData, + op_tree::OpSetMetadata, + storage::{change::ChangeBuilder, convert::op_as_actor_id}, + types::{Key, ObjId, Op, OpId, OpIds}, + ActorId, + }; + + use super::*; + + #[test] + fn clock_by_heads() { + let mut builder = TestGraphBuilder::new(); + let actor1 = builder.actor(); + let actor2 = builder.actor(); + let actor3 = builder.actor(); + let change1 = builder.change(&actor1, 10, &[]); + let change2 = builder.change(&actor2, 20, &[change1]); + let change3 = builder.change(&actor3, 30, &[change1]); + let change4 = builder.change(&actor1, 10, &[change2, change3]); + let graph = builder.build(); + + let mut expected_clock = Clock::new(); + expected_clock.include(builder.index(&actor1), ClockData { max_op: 50, seq: 2 }); + expected_clock.include(builder.index(&actor2), ClockData { max_op: 30, seq: 1 }); + expected_clock.include(builder.index(&actor3), ClockData { max_op: 40, seq: 1 }); + + let clock = graph.clock_for_heads(&[change4]); + assert_eq!(clock, expected_clock); + } + + #[test] + fn remove_ancestors() { + let mut builder = TestGraphBuilder::new(); + let actor1 = builder.actor(); + let actor2 = builder.actor(); + let actor3 = builder.actor(); + let change1 = builder.change(&actor1, 10, &[]); + let change2 = builder.change(&actor2, 20, &[change1]); + let change3 = builder.change(&actor3, 30, &[change1]); + let change4 = builder.change(&actor1, 10, &[change2, change3]); + let graph = builder.build(); + + let mut changes = vec![change1, change2, change3, change4] + .into_iter() + .collect::>(); + let heads = vec![change2]; + graph.remove_ancestors(&mut changes, &heads); + + let expected_changes = vec![change3, change4].into_iter().collect::>(); + + assert_eq!(changes, expected_changes); + } + + struct TestGraphBuilder { + actors: Vec, + changes: Vec, + seqs_by_actor: BTreeMap, + } + + impl TestGraphBuilder { + fn new() -> Self { + TestGraphBuilder { + actors: Vec::new(), + changes: Vec::new(), + seqs_by_actor: BTreeMap::new(), + } + } + + fn actor(&mut self) -> ActorId { + let actor = ActorId::random(); + self.actors.push(actor.clone()); + actor + } + + fn index(&self, actor: &ActorId) -> usize { + self.actors.iter().position(|a| a == actor).unwrap() + } + + /// Create a change with `num_new_ops` and `parents` for `actor` + /// + /// The `start_op` and `seq` of the change will be computed from the + /// previous changes for the same actor. + fn change( + &mut self, + actor: &ActorId, + num_new_ops: usize, + parents: &[ChangeHash], + ) -> ChangeHash { + let mut meta = OpSetMetadata::from_actors(self.actors.clone()); + let key = meta.props.cache("key".to_string()); + + let start_op = parents + .iter() + .map(|c| { + self.changes + .iter() + .find(|change| change.hash() == *c) + .unwrap() + .max_op() + }) + .max() + .unwrap_or(0) + + 1; + + let actor_idx = self.index(actor); + let ops = (0..num_new_ops) + .map(|opnum| Op { + id: OpId::new(start_op + opnum as u64, actor_idx), + action: crate::OpType::Put("value".into()), + key: Key::Map(key), + succ: OpIds::empty(), + pred: OpIds::empty(), + insert: false, + }) + .collect::>(); + + let root = ObjId::root(); + let timestamp = SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap() + .as_millis() as i64; + let seq = self.seqs_by_actor.entry(actor.clone()).or_insert(1); + let change = Change::new( + ChangeBuilder::new() + .with_dependencies(parents.to_vec()) + .with_start_op(NonZeroU64::new(start_op).unwrap()) + .with_actor(actor.clone()) + .with_seq(*seq) + .with_timestamp(timestamp) + .build(ops.iter().map(|op| op_as_actor_id(&root, op, &meta))) + .unwrap(), + ); + *seq = seq.checked_add(1).unwrap(); + let hash = change.hash(); + self.changes.push(change); + hash + } + + fn build(&self) -> ChangeGraph { + let mut graph = ChangeGraph::new(); + for change in &self.changes { + let actor_idx = self.index(change.actor_id()); + graph.add_change(change, actor_idx).unwrap(); + } + graph + } + } +} diff --git a/rust/automerge/src/clock.rs b/rust/automerge/src/clock.rs index 79125323..64d00fcf 100644 --- a/rust/automerge/src/clock.rs +++ b/rust/automerge/src/clock.rs @@ -71,12 +71,6 @@ impl Clock { self.0.get(actor_index) } - pub(crate) fn merge(&mut self, other: &Self) { - for (actor, data) in &other.0 { - self.include(*actor, *data); - } - } - fn is_greater(&self, other: &Self) -> bool { let mut has_greater = false; diff --git a/rust/automerge/src/clocks.rs b/rust/automerge/src/clocks.rs deleted file mode 100644 index 60fc5c71..00000000 --- a/rust/automerge/src/clocks.rs +++ /dev/null @@ -1,44 +0,0 @@ -use crate::{ - clock::{Clock, ClockData}, - Change, ChangeHash, -}; -use std::collections::HashMap; - -pub(crate) struct Clocks(HashMap); - -#[derive(Debug, thiserror::Error)] -#[error("attempted to derive a clock for a change with dependencies we don't have")] -pub struct MissingDep(ChangeHash); - -impl Clocks { - pub(crate) fn new() -> Self { - Self(HashMap::new()) - } - - pub(crate) fn add_change( - &mut self, - change: &Change, - actor_index: usize, - ) -> Result<(), MissingDep> { - let mut clock = Clock::new(); - for hash in change.deps() { - let c = self.0.get(hash).ok_or(MissingDep(*hash))?; - clock.merge(c); - } - clock.include( - actor_index, - ClockData { - max_op: change.max_op(), - seq: change.seq(), - }, - ); - self.0.insert(change.hash(), clock); - Ok(()) - } -} - -impl From for HashMap { - fn from(c: Clocks) -> Self { - c.0 - } -} diff --git a/rust/automerge/src/error.rs b/rust/automerge/src/error.rs index 0f024d86..57a87167 100644 --- a/rust/automerge/src/error.rs +++ b/rust/automerge/src/error.rs @@ -7,7 +7,7 @@ use thiserror::Error; #[derive(Error, Debug)] pub enum AutomergeError { #[error(transparent)] - Clocks(#[from] crate::clocks::MissingDep), + ChangeGraph(#[from] crate::change_graph::MissingDep), #[error("failed to load compressed data: {0}")] Deflate(#[source] std::io::Error), #[error("duplicate seq {0} found for actor {1}")] diff --git a/rust/automerge/src/lib.rs b/rust/automerge/src/lib.rs index 0b4cd743..fb8a3793 100644 --- a/rust/automerge/src/lib.rs +++ b/rust/automerge/src/lib.rs @@ -244,8 +244,8 @@ mod autocommit; mod automerge; mod autoserde; mod change; +mod change_graph; mod clock; -mod clocks; mod columnar; mod convert; mod error; From c5fde2802f8dfeaadd2394942d1deebbb7a590d7 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Fri, 3 Feb 2023 15:53:09 +0000 Subject: [PATCH 715/730] @automerge/automerge-wasm@0.1.24 and @automerge/automerge@2.0.2-alpha.1 --- javascript/package.json | 4 ++-- rust/automerge-wasm/package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/javascript/package.json b/javascript/package.json index 017c5a54..8712920c 100644 --- a/javascript/package.json +++ b/javascript/package.json @@ -4,7 +4,7 @@ "Orion Henry ", "Martin Kleppmann" ], - "version": "2.0.1", + "version": "2.0.2-alpha.1", "description": "Javascript implementation of automerge, backed by @automerge/automerge-wasm", "homepage": "https://github.com/automerge/automerge-rs/tree/main/wrappers/javascript", "repository": "github:automerge/automerge-rs", @@ -47,7 +47,7 @@ "typescript": "^4.9.4" }, "dependencies": { - "@automerge/automerge-wasm": "0.1.23", + "@automerge/automerge-wasm": "0.1.24", "uuid": "^9.0.0" } } diff --git a/rust/automerge-wasm/package.json b/rust/automerge-wasm/package.json index cce3199f..57354ce1 100644 --- a/rust/automerge-wasm/package.json +++ b/rust/automerge-wasm/package.json @@ -8,7 +8,7 @@ "description": "wasm-bindgen bindings to the automerge rust implementation", "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-wasm", "repository": "github:automerge/automerge-rs", - "version": "0.1.23", + "version": "0.1.24", "license": "MIT", "files": [ "README.md", From a24d536d16f2adeea7bbdf094402665a80f400ab Mon Sep 17 00:00:00 2001 From: Alex Good Date: Sat, 4 Feb 2023 14:05:10 +0000 Subject: [PATCH 716/730] Move automerge::SequenceTree to automerge_wasm::SequenceTree The `SequenceTree` is only ever used in `automerge_wasm` so move it there. --- rust/automerge-wasm/Cargo.toml | 1 + rust/automerge-wasm/src/lib.rs | 1 + rust/automerge-wasm/src/observer.rs | 4 +- .../src/sequence_tree.rs | 81 +++---------------- rust/automerge/src/lib.rs | 3 - 5 files changed, 14 insertions(+), 76 deletions(-) rename rust/{automerge => automerge-wasm}/src/sequence_tree.rs (87%) diff --git a/rust/automerge-wasm/Cargo.toml b/rust/automerge-wasm/Cargo.toml index 3d2fafe4..b6055a7d 100644 --- a/rust/automerge-wasm/Cargo.toml +++ b/rust/automerge-wasm/Cargo.toml @@ -57,5 +57,6 @@ features = ["console"] [dev-dependencies] futures = "^0.1" +proptest = { version = "^1.0.0", default-features = false, features = ["std"] } wasm-bindgen-futures = "^0.4" wasm-bindgen-test = "^0.3" diff --git a/rust/automerge-wasm/src/lib.rs b/rust/automerge-wasm/src/lib.rs index b53bf3b9..09072ca7 100644 --- a/rust/automerge-wasm/src/lib.rs +++ b/rust/automerge-wasm/src/lib.rs @@ -41,6 +41,7 @@ use wasm_bindgen::JsCast; mod interop; mod observer; +mod sequence_tree; mod sync; mod value; diff --git a/rust/automerge-wasm/src/observer.rs b/rust/automerge-wasm/src/observer.rs index c0b462a6..2351c762 100644 --- a/rust/automerge-wasm/src/observer.rs +++ b/rust/automerge-wasm/src/observer.rs @@ -6,10 +6,12 @@ use crate::{ interop::{self, alloc, js_set}, TextRepresentation, }; -use automerge::{ObjId, OpObserver, Prop, ReadDoc, ScalarValue, SequenceTree, Value}; +use automerge::{ObjId, OpObserver, Prop, ReadDoc, ScalarValue, Value}; use js_sys::{Array, Object}; use wasm_bindgen::prelude::*; +use crate::sequence_tree::SequenceTree; + #[derive(Debug, Clone, Default)] pub(crate) struct Observer { enabled: bool, diff --git a/rust/automerge/src/sequence_tree.rs b/rust/automerge-wasm/src/sequence_tree.rs similarity index 87% rename from rust/automerge/src/sequence_tree.rs rename to rust/automerge-wasm/src/sequence_tree.rs index f95ceab3..91b183a2 100644 --- a/rust/automerge/src/sequence_tree.rs +++ b/rust/automerge-wasm/src/sequence_tree.rs @@ -5,10 +5,10 @@ use std::{ }; pub(crate) const B: usize = 16; -pub type SequenceTree = SequenceTreeInternal; +pub(crate) type SequenceTree = SequenceTreeInternal; #[derive(Clone, Debug)] -pub struct SequenceTreeInternal { +pub(crate) struct SequenceTreeInternal { root_node: Option>, } @@ -24,22 +24,17 @@ where T: Clone + Debug, { /// Construct a new, empty, sequence. - pub fn new() -> Self { + pub(crate) fn new() -> Self { Self { root_node: None } } /// Get the length of the sequence. - pub fn len(&self) -> usize { + pub(crate) fn len(&self) -> usize { self.root_node.as_ref().map_or(0, |n| n.len()) } - /// Check if the sequence is empty. - pub fn is_empty(&self) -> bool { - self.len() == 0 - } - /// Create an iterator through the sequence. - pub fn iter(&self) -> Iter<'_, T> { + pub(crate) fn iter(&self) -> Iter<'_, T> { Iter { inner: self, index: 0, @@ -51,7 +46,7 @@ where /// # Panics /// /// Panics if `index > len`. - pub fn insert(&mut self, index: usize, element: T) { + pub(crate) fn insert(&mut self, index: usize, element: T) { let old_len = self.len(); if let Some(root) = self.root_node.as_mut() { #[cfg(debug_assertions)] @@ -94,27 +89,22 @@ where } /// Push the `element` onto the back of the sequence. - pub fn push(&mut self, element: T) { + pub(crate) fn push(&mut self, element: T) { let l = self.len(); self.insert(l, element) } /// Get the `element` at `index` in the sequence. - pub fn get(&self, index: usize) -> Option<&T> { + pub(crate) fn get(&self, index: usize) -> Option<&T> { self.root_node.as_ref().and_then(|n| n.get(index)) } - /// Get the `element` at `index` in the sequence. - pub fn get_mut(&mut self, index: usize) -> Option<&mut T> { - self.root_node.as_mut().and_then(|n| n.get_mut(index)) - } - /// Removes the element at `index` from the sequence. /// /// # Panics /// /// Panics if `index` is out of bounds. - pub fn remove(&mut self, index: usize) -> T { + pub(crate) fn remove(&mut self, index: usize) -> T { if let Some(root) = self.root_node.as_mut() { #[cfg(debug_assertions)] let len = root.check(); @@ -135,15 +125,6 @@ where panic!("remove from empty tree") } } - - /// Update the `element` at `index` in the sequence, returning the old value. - /// - /// # Panics - /// - /// Panics if `index > len` - pub fn set(&mut self, index: usize, element: T) -> T { - self.root_node.as_mut().unwrap().set(index, element) - } } impl SequenceTreeNode @@ -432,30 +413,6 @@ where assert!(self.is_full()); } - pub(crate) fn set(&mut self, index: usize, element: T) -> T { - if self.is_leaf() { - let old_element = self.elements.get_mut(index).unwrap(); - mem::replace(old_element, element) - } else { - let mut cumulative_len = 0; - for (child_index, child) in self.children.iter_mut().enumerate() { - match (cumulative_len + child.len()).cmp(&index) { - Ordering::Less => { - cumulative_len += child.len() + 1; - } - Ordering::Equal => { - let old_element = self.elements.get_mut(child_index).unwrap(); - return mem::replace(old_element, element); - } - Ordering::Greater => { - return child.set(index - cumulative_len, element); - } - } - } - panic!("Invalid index to set: {} but len was {}", index, self.len()) - } - } - pub(crate) fn get(&self, index: usize) -> Option<&T> { if self.is_leaf() { return self.elements.get(index); @@ -475,26 +432,6 @@ where } None } - - pub(crate) fn get_mut(&mut self, index: usize) -> Option<&mut T> { - if self.is_leaf() { - return self.elements.get_mut(index); - } else { - let mut cumulative_len = 0; - for (child_index, child) in self.children.iter_mut().enumerate() { - match (cumulative_len + child.len()).cmp(&index) { - Ordering::Less => { - cumulative_len += child.len() + 1; - } - Ordering::Equal => return self.elements.get_mut(child_index), - Ordering::Greater => { - return child.get_mut(index - cumulative_len); - } - } - } - } - None - } } impl Default for SequenceTreeInternal diff --git a/rust/automerge/src/lib.rs b/rust/automerge/src/lib.rs index fb8a3793..cbb535af 100644 --- a/rust/automerge/src/lib.rs +++ b/rust/automerge/src/lib.rs @@ -264,7 +264,6 @@ mod op_tree; mod parents; mod query; mod read; -mod sequence_tree; mod storage; pub mod sync; pub mod transaction; @@ -294,8 +293,6 @@ pub use op_observer::Patch; pub use op_observer::VecOpObserver; pub use parents::{Parent, Parents}; pub use read::ReadDoc; -#[doc(hidden)] -pub use sequence_tree::SequenceTree; pub use types::{ActorId, ChangeHash, ObjType, OpType, ParseChangeHashError, Prop, TextEncoding}; pub use value::{ScalarValue, Value}; pub use values::Values; From 11f063cbfe71bb81d849baca89f5eba8d441d594 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Thu, 9 Feb 2023 11:06:08 +0000 Subject: [PATCH 717/730] Remove nightly from CI --- .github/workflows/ci.yaml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index c2d469d5..bfa31bd5 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -137,8 +137,6 @@ jobs: matrix: toolchain: - 1.66.0 - - nightly - continue-on-error: ${{ matrix.toolchain == 'nightly' }} steps: - uses: actions/checkout@v2 - uses: actions-rs/toolchain@v1 From 2cd7427f35e3b9b4a6b4d22d21dd083872015b57 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Tue, 17 Jan 2023 14:51:02 -0700 Subject: [PATCH 718/730] Use our leb128 parser for values This ensures that values in automerge documents are encoded correctly, and that no extra data is smuggled in any LEB fields. --- .../src/columnar/column_range/value.rs | 62 +++++++++--------- rust/automerge/src/columnar/encoding.rs | 2 + ...counter_value_has_incorrect_meta.automerge | Bin 0 -> 63 bytes .../fixtures/counter_value_is_ok.automerge | Bin 0 -> 63 bytes .../counter_value_is_overlong.automerge | Bin 0 -> 63 bytes rust/automerge/tests/test.rs | 14 ++++ 6 files changed, 48 insertions(+), 30 deletions(-) create mode 100644 rust/automerge/tests/fixtures/counter_value_has_incorrect_meta.automerge create mode 100644 rust/automerge/tests/fixtures/counter_value_is_ok.automerge create mode 100644 rust/automerge/tests/fixtures/counter_value_is_overlong.automerge diff --git a/rust/automerge/src/columnar/column_range/value.rs b/rust/automerge/src/columnar/column_range/value.rs index 43f63437..03a5aa60 100644 --- a/rust/automerge/src/columnar/column_range/value.rs +++ b/rust/automerge/src/columnar/column_range/value.rs @@ -4,10 +4,15 @@ use crate::{ columnar::{ encoding::{ leb128::{lebsize, ulebsize}, - raw, DecodeColumnError, RawBytes, RawDecoder, RawEncoder, RleDecoder, RleEncoder, Sink, + raw, DecodeColumnError, DecodeError, RawBytes, RawDecoder, RawEncoder, RleDecoder, + RleEncoder, Sink, }, SpliceError, }, + storage::parse::{ + leb128::{leb128_i64, leb128_u64}, + Input, ParseResult, + }, ScalarValue, }; @@ -217,18 +222,8 @@ impl<'a> Iterator for ValueIter<'a> { ValueType::Null => Some(Ok(ScalarValue::Null)), ValueType::True => Some(Ok(ScalarValue::Boolean(true))), ValueType::False => Some(Ok(ScalarValue::Boolean(false))), - ValueType::Uleb => self.parse_raw(val_meta, |mut bytes| { - let val = leb128::read::unsigned(&mut bytes).map_err(|e| { - DecodeColumnError::invalid_value("value", e.to_string()) - })?; - Ok(ScalarValue::Uint(val)) - }), - ValueType::Leb => self.parse_raw(val_meta, |mut bytes| { - let val = leb128::read::signed(&mut bytes).map_err(|e| { - DecodeColumnError::invalid_value("value", e.to_string()) - })?; - Ok(ScalarValue::Int(val)) - }), + ValueType::Uleb => self.parse_input(val_meta, leb128_u64), + ValueType::Leb => self.parse_input(val_meta, leb128_i64), ValueType::String => self.parse_raw(val_meta, |bytes| { let val = std::str::from_utf8(bytes) .map_err(|e| DecodeColumnError::invalid_value("value", e.to_string()))? @@ -250,17 +245,11 @@ impl<'a> Iterator for ValueIter<'a> { let val = f64::from_le_bytes(raw); Ok(ScalarValue::F64(val)) }), - ValueType::Counter => self.parse_raw(val_meta, |mut bytes| { - let val = leb128::read::signed(&mut bytes).map_err(|e| { - DecodeColumnError::invalid_value("value", e.to_string()) - })?; - Ok(ScalarValue::Counter(val.into())) + ValueType::Counter => self.parse_input(val_meta, |input| { + leb128_i64(input).map(|(i, n)| (i, ScalarValue::Counter(n.into()))) }), - ValueType::Timestamp => self.parse_raw(val_meta, |mut bytes| { - let val = leb128::read::signed(&mut bytes).map_err(|e| { - DecodeColumnError::invalid_value("value", e.to_string()) - })?; - Ok(ScalarValue::Timestamp(val)) + ValueType::Timestamp => self.parse_input(val_meta, |input| { + leb128_i64(input).map(|(i, n)| (i, ScalarValue::Timestamp(n))) }), ValueType::Unknown(code) => self.parse_raw(val_meta, |bytes| { Ok(ScalarValue::Unknown { @@ -284,8 +273,8 @@ impl<'a> Iterator for ValueIter<'a> { } impl<'a> ValueIter<'a> { - fn parse_raw Result>( - &mut self, + fn parse_raw<'b, R, F: Fn(&'b [u8]) -> Result>( + &'b mut self, meta: ValueMeta, f: F, ) -> Option> { @@ -298,11 +287,24 @@ impl<'a> ValueIter<'a> { } Ok(bytes) => bytes, }; - let val = match f(raw) { - Ok(v) => v, - Err(e) => return Some(Err(e)), - }; - Some(Ok(val)) + Some(f(raw)) + } + + fn parse_input<'b, R, F: Fn(Input<'b>) -> ParseResult<'b, R, DecodeError>>( + &'b mut self, + meta: ValueMeta, + f: F, + ) -> Option> + where + R: Into, + { + self.parse_raw(meta, |raw| match f(Input::new(raw)) { + Err(e) => Err(DecodeColumnError::invalid_value("value", e.to_string())), + Ok((i, _)) if !i.is_empty() => { + Err(DecodeColumnError::invalid_value("value", "extra bytes")) + } + Ok((_, v)) => Ok(v.into()), + }) } pub(crate) fn done(&self) -> bool { diff --git a/rust/automerge/src/columnar/encoding.rs b/rust/automerge/src/columnar/encoding.rs index bbdb34a8..c9435448 100644 --- a/rust/automerge/src/columnar/encoding.rs +++ b/rust/automerge/src/columnar/encoding.rs @@ -46,6 +46,8 @@ pub(crate) enum DecodeError { FromInt(#[from] std::num::TryFromIntError), #[error("bad leb128")] BadLeb(#[from] ::leb128::read::Error), + #[error(transparent)] + BadLeb128(#[from] crate::storage::parse::leb128::Error), #[error("attempted to allocate {attempted} which is larger than the maximum of {maximum}")] OverlargeAllocation { attempted: usize, maximum: usize }, #[error("invalid string encoding")] diff --git a/rust/automerge/tests/fixtures/counter_value_has_incorrect_meta.automerge b/rust/automerge/tests/fixtures/counter_value_has_incorrect_meta.automerge new file mode 100644 index 0000000000000000000000000000000000000000..2290b446ca661f302f6591c522a6653ba0be54a6 GIT binary patch literal 63 zcmZq8_iDCFPJPB`${^6qmb+L*-z{NbN`A*m!H-iI8Mkb^bm5T!0|T2Vvk9XUQy5b? TQvp*wVH2@I&u}A*O5KaD{l&S)MXnSh`0lxRq(Bd!v00tEUGyy^a VRsvT7Z~}h;VF7;ue<;uoe*j$F7aafq literal 0 HcmV?d00001 diff --git a/rust/automerge/tests/fixtures/counter_value_is_overlong.automerge b/rust/automerge/tests/fixtures/counter_value_is_overlong.automerge new file mode 100644 index 0000000000000000000000000000000000000000..831346f7f4109e2f292e502e13b326ca2485b351 GIT binary patch literal 63 zcmZq8_iD~Rd#9GsltG}IEqAeszFWe=l>CmBf*+?aGH%&+>B1ue1_m}!W)nsyrZA>( TrUIsV#ze+?#(Iql_4Nz@=B*VY literal 0 HcmV?d00001 diff --git a/rust/automerge/tests/test.rs b/rust/automerge/tests/test.rs index ca6c64c0..191ce2f9 100644 --- a/rust/automerge/tests/test.rs +++ b/rust/automerge/tests/test.rs @@ -1412,6 +1412,20 @@ fn fuzz_crashers() { } } +fn fixture(name: &str) -> Vec { + fs::read("./tests/fixtures/".to_owned() + name).unwrap() +} + +#[test] +fn overlong_leb() { + // the value metadata says "2", but the LEB is only 1-byte long and there's an extra 0 + assert!(Automerge::load(&fixture("counter_value_has_incorrect_meta.automerge")).is_err()); + // the LEB is overlong (using 2 bytes where one would have sufficed) + assert!(Automerge::load(&fixture("counter_value_is_overlong.automerge")).is_err()); + // the LEB is correct + assert!(Automerge::load(&fixture("counter_value_is_ok.automerge")).is_ok()); +} + #[test] fn negative_64() { let mut doc = Automerge::new(); From 5e82dbc3c83c2336ca675ba8f167db5dba9b17cb Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Mon, 13 Feb 2023 21:17:27 -0600 Subject: [PATCH 719/730] rework how skip works to push the logic into node --- javascript/test/basic_test.ts | 16 +++++ rust/automerge/src/op_tree/node.rs | 68 +++++++++++-------- rust/automerge/src/query/prop.rs | 47 ++----------- rust/automerge/src/query/seek_op.rs | 39 ++--------- .../automerge/src/query/seek_op_with_patch.rs | 38 +---------- 5 files changed, 67 insertions(+), 141 deletions(-) diff --git a/javascript/test/basic_test.ts b/javascript/test/basic_test.ts index 5aa1ac34..0e30dc7c 100644 --- a/javascript/test/basic_test.ts +++ b/javascript/test/basic_test.ts @@ -58,6 +58,22 @@ describe("Automerge", () => { }) }) + it("should be able to insert and delete a large number of properties", () => { + let doc = Automerge.init() + + doc = Automerge.change(doc, doc => { + doc['k1'] = true; + }); + + for (let idx = 1; idx <= 200; idx++) { + doc = Automerge.change(doc, doc => { + delete doc['k' + idx]; + doc['k' + (idx + 1)] = true; + assert(Object.keys(doc).length == 1) + }); + } + }) + it("can detect an automerge doc with isAutomerge()", () => { const doc1 = Automerge.from({ sub: { object: true } }) assert(Automerge.isAutomerge(doc1)) diff --git a/rust/automerge/src/op_tree/node.rs b/rust/automerge/src/op_tree/node.rs index ea7fbf48..8f2de662 100644 --- a/rust/automerge/src/op_tree/node.rs +++ b/rust/automerge/src/op_tree/node.rs @@ -27,50 +27,67 @@ impl OpTreeNode { } } + fn search_element<'a, 'b: 'a, Q>( + &'b self, + query: &mut Q, + m: &OpSetMetadata, + ops: &'a [Op], + index: usize, + ) -> bool + where + Q: TreeQuery<'a>, + { + if let Some(e) = self.elements.get(index) { + if query.query_element_with_metadata(&ops[*e], m) == QueryResult::Finish { + return true; + } + } + false + } + pub(crate) fn search<'a, 'b: 'a, Q>( &'b self, query: &mut Q, m: &OpSetMetadata, ops: &'a [Op], - skip: Option, + mut skip: Option, ) -> bool where Q: TreeQuery<'a>, { if self.is_leaf() { - let skip = skip.unwrap_or(0); - for e in self.elements.iter().skip(skip) { + for e in self.elements.iter().skip(skip.unwrap_or(0)) { if query.query_element_with_metadata(&ops[*e], m) == QueryResult::Finish { return true; } } false } else { - let mut skip = skip.unwrap_or(0); for (child_index, child) in self.children.iter().enumerate() { - match skip.cmp(&child.len()) { - Ordering::Greater => { - // not in this child at all - // take off the number of elements in the child as well as the next element - skip -= child.len() + 1; + match skip { + Some(n) if n > child.len() => { + skip = Some(n - child.len() - 1); } - Ordering::Equal => { - // just try the element - skip -= child.len(); - if let Some(e) = self.elements.get(child_index) { - if query.query_element_with_metadata(&ops[*e], m) == QueryResult::Finish - { - return true; - } + Some(n) if n == child.len() => { + skip = None; + if self.search_element(query, m, ops, child_index) { + return true; } } - Ordering::Less => { + Some(n) => { + if child.search(query, m, ops, Some(n)) { + return true; + } + skip = Some(0); // important to not be None so we never call query_node again + if self.search_element(query, m, ops, child_index) { + return true; + } + } + None => { // descend and try find it match query.query_node_with_metadata(child, m, ops) { QueryResult::Descend => { - // search in the child node, passing in the number of items left to - // skip - if child.search(query, m, ops, Some(skip)) { + if child.search(query, m, ops, None) { return true; } } @@ -78,14 +95,9 @@ impl OpTreeNode { QueryResult::Next => (), QueryResult::Skip(_) => panic!("had skip from non-root node"), } - if let Some(e) = self.elements.get(child_index) { - if query.query_element_with_metadata(&ops[*e], m) == QueryResult::Finish - { - return true; - } + if self.search_element(query, m, ops, child_index) { + return true; } - // reset the skip to zero so we continue iterating normally - skip = 0; } } } diff --git a/rust/automerge/src/query/prop.rs b/rust/automerge/src/query/prop.rs index f6062ec6..d2a11361 100644 --- a/rust/automerge/src/query/prop.rs +++ b/rust/automerge/src/query/prop.rs @@ -1,6 +1,6 @@ use crate::op_tree::{OpSetMetadata, OpTreeNode}; use crate::query::{binary_search_by, QueryResult, TreeQuery}; -use crate::types::{Key, ListEncoding, Op}; +use crate::types::{Key, Op}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] @@ -9,15 +9,6 @@ pub(crate) struct Prop<'a> { pub(crate) ops: Vec<&'a Op>, pub(crate) ops_pos: Vec, pub(crate) pos: usize, - start: Option, -} - -#[derive(Debug, Clone, PartialEq)] -struct Start { - /// The index to start searching for in the optree - idx: usize, - /// The total length of the optree - optree_len: usize, } impl<'a> Prop<'a> { @@ -27,7 +18,6 @@ impl<'a> Prop<'a> { ops: vec![], ops_pos: vec![], pos: 0, - start: None, } } } @@ -39,38 +29,9 @@ impl<'a> TreeQuery<'a> for Prop<'a> { m: &OpSetMetadata, ops: &[Op], ) -> QueryResult { - if let Some(Start { - idx: start, - optree_len, - }) = self.start - { - if self.pos + child.len() >= start { - // skip empty nodes - if child.index.visible_len(ListEncoding::default()) == 0 { - if self.pos + child.len() >= optree_len { - self.pos = optree_len; - QueryResult::Finish - } else { - self.pos += child.len(); - QueryResult::Next - } - } else { - QueryResult::Descend - } - } else { - self.pos += child.len(); - QueryResult::Next - } - } else { - // in the root node find the first op position for the key - let start = binary_search_by(child, ops, |op| m.key_cmp(&op.key, &self.key)); - self.start = Some(Start { - idx: start, - optree_len: child.len(), - }); - self.pos = start; - QueryResult::Skip(start) - } + let start = binary_search_by(child, ops, |op| m.key_cmp(&op.key, &self.key)); + self.pos = start; + QueryResult::Skip(start) } fn query_element(&mut self, op: &'a Op) -> QueryResult { diff --git a/rust/automerge/src/query/seek_op.rs b/rust/automerge/src/query/seek_op.rs index 22d1f58d..2ed875d2 100644 --- a/rust/automerge/src/query/seek_op.rs +++ b/rust/automerge/src/query/seek_op.rs @@ -1,6 +1,6 @@ use crate::op_tree::{OpSetMetadata, OpTreeNode}; use crate::query::{binary_search_by, QueryResult, TreeQuery}; -use crate::types::{Key, ListEncoding, Op, HEAD}; +use crate::types::{Key, Op, HEAD}; use std::cmp::Ordering; use std::fmt::Debug; @@ -14,8 +14,6 @@ pub(crate) struct SeekOp<'a> { pub(crate) succ: Vec, /// whether a position has been found found: bool, - /// The found start position of the key if there is one yet (for map objects). - start: Option, } impl<'a> SeekOp<'a> { @@ -25,7 +23,6 @@ impl<'a> SeekOp<'a> { succ: vec![], pos: 0, found: false, - start: None, } } @@ -72,37 +69,9 @@ impl<'a> TreeQuery<'a> for SeekOp<'a> { } } Key::Map(_) => { - if let Some(start) = self.start { - if self.pos + child.len() >= start { - // skip empty nodes - if child.index.visible_len(ListEncoding::List) == 0 { - let child_contains_key = - child.elements.iter().any(|e| ops[*e].key == self.op.key); - if !child_contains_key { - // If we are in a node which has no visible ops, but none of the - // elements of the node match the key of the op, then we must have - // finished processing and so we can just return. - // See https://github.com/automerge/automerge-rs/pull/480 - QueryResult::Finish - } else { - // Otherwise, we need to proceed to the next node - self.pos += child.len(); - QueryResult::Next - } - } else { - QueryResult::Descend - } - } else { - self.pos += child.len(); - QueryResult::Next - } - } else { - // in the root node find the first op position for the key - let start = binary_search_by(child, ops, |op| m.key_cmp(&op.key, &self.op.key)); - self.start = Some(start); - self.pos = start; - QueryResult::Skip(start) - } + let start = binary_search_by(child, ops, |op| m.key_cmp(&op.key, &self.op.key)); + self.pos = start; + QueryResult::Skip(start) } } } diff --git a/rust/automerge/src/query/seek_op_with_patch.rs b/rust/automerge/src/query/seek_op_with_patch.rs index 7cacb032..cd30f5bb 100644 --- a/rust/automerge/src/query/seek_op_with_patch.rs +++ b/rust/automerge/src/query/seek_op_with_patch.rs @@ -16,8 +16,6 @@ pub(crate) struct SeekOpWithPatch<'a> { last_seen: Option, pub(crate) values: Vec<&'a Op>, pub(crate) had_value_before: bool, - /// The found start position of the key if there is one yet (for map objects). - start: Option, } impl<'a> SeekOpWithPatch<'a> { @@ -33,7 +31,6 @@ impl<'a> SeekOpWithPatch<'a> { last_seen: None, values: vec![], had_value_before: false, - start: None, } } @@ -132,38 +129,9 @@ impl<'a> TreeQuery<'a> for SeekOpWithPatch<'a> { // Updating a map: operations appear in sorted order by key Key::Map(_) => { - if let Some(start) = self.start { - if self.pos + child.len() >= start { - // skip empty nodes - if child.index.visible_len(self.encoding) == 0 { - let child_contains_key = - child.elements.iter().any(|e| ops[*e].key == self.op.key); - if !child_contains_key { - // If we are in a node which has no visible ops, but none of the - // elements of the node match the key of the op, then we must have - // finished processing and so we can just return. - // See https://github.com/automerge/automerge-rs/pull/480 - QueryResult::Finish - } else { - self.pos += child.len(); - QueryResult::Next - } - } else { - QueryResult::Descend - } - } else { - self.pos += child.len(); - QueryResult::Next - } - } else { - // in the root node find the first op position for the key - // Search for the place where we need to insert the new operation. First find the - // first op with a key >= the key we're updating - let start = binary_search_by(child, ops, |op| m.key_cmp(&op.key, &self.op.key)); - self.start = Some(start); - self.pos = start; - QueryResult::Skip(start) - } + let start = binary_search_by(child, ops, |op| m.key_cmp(&op.key, &self.op.key)); + self.pos = start; + QueryResult::Skip(start) } } } From 9271b20cf5442369f21dec43ebeed097e8092da8 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Tue, 14 Feb 2023 16:24:25 +0000 Subject: [PATCH 720/730] Correct logic when skip = B and fix formatting A few tests were failing which exposed the fact that if skip is `B` (the out factor of the OpTree) then we set `skip = None` and this causes us to attempt to return `Skip` in a non root node. I ported the failing test from JS to Rust and fixed the problem. I also fixed the formatting issues. --- javascript/test/basic_test.ts | 10 +++---- rust/automerge-wasm/test/test.ts | 2 +- rust/automerge/src/op_tree/node.rs | 4 +-- rust/automerge/src/sync.rs | 45 ++++++++++++++++++++++++++++++ 4 files changed, 53 insertions(+), 8 deletions(-) diff --git a/javascript/test/basic_test.ts b/javascript/test/basic_test.ts index 0e30dc7c..e34484c4 100644 --- a/javascript/test/basic_test.ts +++ b/javascript/test/basic_test.ts @@ -62,15 +62,15 @@ describe("Automerge", () => { let doc = Automerge.init() doc = Automerge.change(doc, doc => { - doc['k1'] = true; - }); + doc["k1"] = true + }) for (let idx = 1; idx <= 200; idx++) { doc = Automerge.change(doc, doc => { - delete doc['k' + idx]; - doc['k' + (idx + 1)] = true; + delete doc["k" + idx] + doc["k" + (idx + 1)] = true assert(Object.keys(doc).length == 1) - }); + }) } }) diff --git a/rust/automerge-wasm/test/test.ts b/rust/automerge-wasm/test/test.ts index 56aaae74..bb4f71e3 100644 --- a/rust/automerge-wasm/test/test.ts +++ b/rust/automerge-wasm/test/test.ts @@ -1447,7 +1447,7 @@ describe('Automerge', () => { sync(n1, n2, s1, s2) // Having n3's last change concurrent to the last sync heads forces us into the slower code path - const change3 = n2.getLastLocalChange() + const change3 = n3.getLastLocalChange() if (change3 === null) throw new RangeError("no local change") n2.applyChanges([change3]) n1.put("_root", "n1", "final"); n1.commit("", 0) diff --git a/rust/automerge/src/op_tree/node.rs b/rust/automerge/src/op_tree/node.rs index 8f2de662..ed1b7646 100644 --- a/rust/automerge/src/op_tree/node.rs +++ b/rust/automerge/src/op_tree/node.rs @@ -69,7 +69,7 @@ impl OpTreeNode { skip = Some(n - child.len() - 1); } Some(n) if n == child.len() => { - skip = None; + skip = Some(0); // important to not be None so we never call query_node again if self.search_element(query, m, ops, child_index) { return true; } @@ -78,7 +78,7 @@ impl OpTreeNode { if child.search(query, m, ops, Some(n)) { return true; } - skip = Some(0); // important to not be None so we never call query_node again + skip = Some(0); // important to not be None so we never call query_node again if self.search_element(query, m, ops, child_index) { return true; } diff --git a/rust/automerge/src/sync.rs b/rust/automerge/src/sync.rs index d3b6b3fa..d6dc2580 100644 --- a/rust/automerge/src/sync.rs +++ b/rust/automerge/src/sync.rs @@ -887,6 +887,51 @@ mod tests { assert_eq!(doc2.get_heads(), all_heads); } + #[test] + fn should_handle_lots_of_branching_and_merging() { + let mut doc1 = crate::AutoCommit::new().with_actor(ActorId::try_from("01234567").unwrap()); + let mut doc2 = crate::AutoCommit::new().with_actor(ActorId::try_from("89abcdef").unwrap()); + let mut doc3 = crate::AutoCommit::new().with_actor(ActorId::try_from("fedcba98").unwrap()); + let mut s1 = State::new(); + let mut s2 = State::new(); + + doc1.put(crate::ROOT, "x", 0).unwrap(); + let change1 = doc1.get_last_local_change().unwrap().clone(); + + doc2.apply_changes([change1.clone()]).unwrap(); + doc3.apply_changes([change1]).unwrap(); + + doc3.put(crate::ROOT, "x", 1).unwrap(); + + //// - n1c1 <------ n1c2 <------ n1c3 <-- etc. <-- n1c20 <------ n1c21 + //// / \/ \/ \/ + //// / /\ /\ /\ + //// c0 <---- n2c1 <------ n2c2 <------ n2c3 <-- etc. <-- n2c20 <------ n2c21 + //// \ / + //// ---------------------------------------------- n3c1 <----- + for i in 1..20 { + doc1.put(crate::ROOT, "n1", i).unwrap(); + doc2.put(crate::ROOT, "n2", i).unwrap(); + let change1 = doc1.get_last_local_change().unwrap().clone(); + let change2 = doc2.get_last_local_change().unwrap().clone(); + doc1.apply_changes([change2.clone()]).unwrap(); + doc2.apply_changes([change1]).unwrap(); + } + + sync(&mut doc1, &mut doc2, &mut s1, &mut s2); + + //// Having n3's last change concurrent to the last sync heads forces us into the slower code path + let change3 = doc3.get_last_local_change().unwrap().clone(); + doc2.apply_changes([change3]).unwrap(); + + doc1.put(crate::ROOT, "n1", "final").unwrap(); + doc2.put(crate::ROOT, "n1", "final").unwrap(); + + sync(&mut doc1, &mut doc2, &mut s1, &mut s2); + + assert_eq!(doc1.get_heads(), doc2.get_heads()); + } + fn sync( a: &mut crate::AutoCommit, b: &mut crate::AutoCommit, From c92d042c87eb724e4878a4df0f8d31177c410c01 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Tue, 14 Feb 2023 17:25:25 +0000 Subject: [PATCH 721/730] @automerge/automerge-wasm@0.1.24 and @automerge/automerge@2.0.2-alpha.2 --- javascript/package.json | 4 ++-- rust/automerge-wasm/package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/javascript/package.json b/javascript/package.json index 8712920c..e39f398a 100644 --- a/javascript/package.json +++ b/javascript/package.json @@ -4,7 +4,7 @@ "Orion Henry ", "Martin Kleppmann" ], - "version": "2.0.2-alpha.1", + "version": "2.0.2-alpha.2", "description": "Javascript implementation of automerge, backed by @automerge/automerge-wasm", "homepage": "https://github.com/automerge/automerge-rs/tree/main/wrappers/javascript", "repository": "github:automerge/automerge-rs", @@ -47,7 +47,7 @@ "typescript": "^4.9.4" }, "dependencies": { - "@automerge/automerge-wasm": "0.1.24", + "@automerge/automerge-wasm": "0.1.25", "uuid": "^9.0.0" } } diff --git a/rust/automerge-wasm/package.json b/rust/automerge-wasm/package.json index 57354ce1..80b39fd4 100644 --- a/rust/automerge-wasm/package.json +++ b/rust/automerge-wasm/package.json @@ -8,7 +8,7 @@ "description": "wasm-bindgen bindings to the automerge rust implementation", "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-wasm", "repository": "github:automerge/automerge-rs", - "version": "0.1.24", + "version": "0.1.25", "license": "MIT", "files": [ "README.md", From 1425af43cdcd61295e0e65bf47fbce0076353682 Mon Sep 17 00:00:00 2001 From: Alex Good Date: Tue, 14 Feb 2023 19:47:53 +0000 Subject: [PATCH 722/730] @automerge/automerge@2.0.2 --- javascript/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/javascript/package.json b/javascript/package.json index e39f398a..79309907 100644 --- a/javascript/package.json +++ b/javascript/package.json @@ -4,7 +4,7 @@ "Orion Henry ", "Martin Kleppmann" ], - "version": "2.0.2-alpha.2", + "version": "2.0.2", "description": "Javascript implementation of automerge, backed by @automerge/automerge-wasm", "homepage": "https://github.com/automerge/automerge-rs/tree/main/wrappers/javascript", "repository": "github:automerge/automerge-rs", From 407faefa6e838abe0bd8526716c98eab592aa123 Mon Sep 17 00:00:00 2001 From: Philip Schatz <253202+philschatz@users.noreply.github.com> Date: Wed, 15 Feb 2023 03:23:02 -0600 Subject: [PATCH 723/730] A few setup fixes (#529) * include deno in dependencies * install javascript dependencies * remove redundant operation --- README.md | 3 +++ flake.nix | 1 + rust/automerge/src/automerge.rs | 1 - 3 files changed, 4 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 94e1bbb8..76d48ddd 100644 --- a/README.md +++ b/README.md @@ -113,6 +113,9 @@ brew install cmake node cmocka # install yarn npm install --global yarn +# install javascript dependencies +yarn --cwd ./javascript + # install rust dependencies cargo install wasm-bindgen-cli wasm-opt cargo-deny diff --git a/flake.nix b/flake.nix index 4f9ba1fe..37835738 100644 --- a/flake.nix +++ b/flake.nix @@ -54,6 +54,7 @@ nodejs yarn + deno # c deps cmake diff --git a/rust/automerge/src/automerge.rs b/rust/automerge/src/automerge.rs index 128d4418..09c3cc9d 100644 --- a/rust/automerge/src/automerge.rs +++ b/rust/automerge/src/automerge.rs @@ -897,7 +897,6 @@ impl Automerge { .add_change(&change, actor_index) .expect("Change's deps should already be in the document"); - self.history_index.insert(change.hash(), history_index); self.history.push(change); history_index From 8de2fa9bd49e1bf04f2a864b3a57f911419a86ba Mon Sep 17 00:00:00 2001 From: Jason Kankiewicz Date: Sat, 25 Feb 2023 10:47:00 -0800 Subject: [PATCH 724/730] C API 2 (#530) The AMvalue union, AMlistItem struct, AMmapItem struct, and AMobjItem struct are gone, replaced by the AMitem struct. The AMchangeHashes, AMchanges, AMlistItems, AMmapItems, AMobjItems, AMstrs, and AMsyncHaves iterators are gone, replaced by the AMitems iterator. The AMitem struct is opaque, getting and setting values is now achieved exclusively through function calls. The AMitemsNext(), AMitemsPrev(), and AMresultItem() functions return a pointer to an AMitem struct so you ultimately get the same thing whether you're iterating over a sequence or calling AMmapGet() or AMlistGet(). Calling AMitemResult() on an AMitem struct will produce a new AMresult struct referencing its storage so now the AMresult struct for an iterator can be subsequently freed without affecting the AMitem structs that were filtered out of it. The storage for a set of AMitem structs can be recombined into a single AMresult struct by passing pointers to their corresponding AMresult structs to AMresultCat(). For C/C++ programmers, I've added AMstrCmp(), AMstrdup(), AM{idxType,objType,status,valType}ToString() and AM{idxType,objType,status,valType}FromString(). It's also now possible to pass arbitrary parameters through AMstack{Item,Items,Result}() to a callback function. --- rust/automerge-c/.clang-format | 250 +++ rust/automerge-c/.gitignore | 8 +- rust/automerge-c/CMakeLists.txt | 344 ++- rust/automerge-c/Cargo.toml | 4 +- rust/automerge-c/README.md | 197 +- rust/automerge-c/cbindgen.toml | 20 +- rust/automerge-c/cmake/Cargo.toml.in | 22 + rust/automerge-c/cmake/cbindgen.toml.in | 48 + rust/automerge-c/cmake/config.h.in | 31 +- .../cmake/enum-string-functions-gen.cmake | 183 ++ ...replace.cmake => file-regex-replace.cmake} | 4 +- .../{file_touch.cmake => file-touch.cmake} | 4 +- rust/automerge-c/docs/CMakeLists.txt | 35 + rust/automerge-c/{ => docs}/img/brandmark.png | Bin rust/automerge-c/examples/CMakeLists.txt | 20 +- rust/automerge-c/examples/README.md | 2 +- rust/automerge-c/examples/quickstart.c | 195 +- .../include/automerge-c/utils/result.h | 30 + .../include/automerge-c/utils/stack.h | 130 ++ .../automerge-c/utils/stack_callback_data.h | 53 + .../include/automerge-c/utils/string.h | 29 + rust/automerge-c/src/CMakeLists.txt | 250 --- rust/automerge-c/src/actor_id.rs | 84 +- rust/automerge-c/src/byte_span.rs | 146 +- rust/automerge-c/src/change.rs | 148 +- rust/automerge-c/src/change_hashes.rs | 400 ---- rust/automerge-c/src/changes.rs | 399 ---- rust/automerge-c/src/doc.rs | 607 +++-- rust/automerge-c/src/doc/list.rs | 555 ++--- rust/automerge-c/src/doc/list/item.rs | 97 - rust/automerge-c/src/doc/list/items.rs | 348 --- rust/automerge-c/src/doc/map.rs | 324 +-- rust/automerge-c/src/doc/map/item.rs | 98 - rust/automerge-c/src/doc/map/items.rs | 340 --- rust/automerge-c/src/doc/utils.rs | 27 +- rust/automerge-c/src/index.rs | 84 + rust/automerge-c/src/item.rs | 1963 ++++++++++++++++ rust/automerge-c/src/items.rs | 401 ++++ rust/automerge-c/src/lib.rs | 9 +- rust/automerge-c/src/obj.rs | 86 +- rust/automerge-c/src/obj/item.rs | 73 - rust/automerge-c/src/obj/items.rs | 341 --- rust/automerge-c/src/result.rs | 1039 ++++----- rust/automerge-c/src/result_stack.rs | 156 -- rust/automerge-c/src/strs.rs | 359 --- rust/automerge-c/src/sync.rs | 2 +- rust/automerge-c/src/sync/have.rs | 25 +- rust/automerge-c/src/sync/haves.rs | 378 ---- rust/automerge-c/src/sync/message.rs | 114 +- rust/automerge-c/src/sync/state.rs | 149 +- rust/automerge-c/src/utils/result.c | 33 + rust/automerge-c/src/utils/stack.c | 106 + .../src/utils/stack_callback_data.c | 9 + rust/automerge-c/src/utils/string.c | 46 + rust/automerge-c/test/CMakeLists.txt | 44 +- rust/automerge-c/test/actor_id_tests.c | 145 +- rust/automerge-c/test/base_state.c | 17 + rust/automerge-c/test/base_state.h | 39 + rust/automerge-c/test/byte_span_tests.c | 118 + rust/automerge-c/test/cmocka_utils.c | 88 + rust/automerge-c/test/cmocka_utils.h | 42 +- rust/automerge-c/test/doc_state.c | 27 + rust/automerge-c/test/doc_state.h | 17 + rust/automerge-c/test/doc_tests.c | 351 ++- rust/automerge-c/test/enum_string_tests.c | 148 ++ rust/automerge-c/test/group_state.c | 27 - rust/automerge-c/test/group_state.h | 16 - rust/automerge-c/test/item_tests.c | 94 + rust/automerge-c/test/list_tests.c | 720 +++--- rust/automerge-c/test/macro_utils.c | 47 +- rust/automerge-c/test/macro_utils.h | 29 +- rust/automerge-c/test/main.c | 17 +- rust/automerge-c/test/map_tests.c | 1754 ++++++++------- .../test/ported_wasm/basic_tests.c | 1986 ++++++++--------- rust/automerge-c/test/ported_wasm/suite.c | 7 +- .../automerge-c/test/ported_wasm/sync_tests.c | 1276 +++++------ rust/automerge-c/test/stack_utils.c | 31 - rust/automerge-c/test/stack_utils.h | 38 - rust/automerge-c/test/str_utils.c | 2 +- rust/automerge-c/test/str_utils.h | 19 +- rust/automerge/src/error.rs | 5 + scripts/ci/cmake-build | 2 +- 82 files changed, 9304 insertions(+), 8607 deletions(-) create mode 100644 rust/automerge-c/.clang-format create mode 100644 rust/automerge-c/cmake/Cargo.toml.in create mode 100644 rust/automerge-c/cmake/cbindgen.toml.in create mode 100644 rust/automerge-c/cmake/enum-string-functions-gen.cmake rename rust/automerge-c/cmake/{file_regex_replace.cmake => file-regex-replace.cmake} (87%) rename rust/automerge-c/cmake/{file_touch.cmake => file-touch.cmake} (82%) create mode 100644 rust/automerge-c/docs/CMakeLists.txt rename rust/automerge-c/{ => docs}/img/brandmark.png (100%) create mode 100644 rust/automerge-c/include/automerge-c/utils/result.h create mode 100644 rust/automerge-c/include/automerge-c/utils/stack.h create mode 100644 rust/automerge-c/include/automerge-c/utils/stack_callback_data.h create mode 100644 rust/automerge-c/include/automerge-c/utils/string.h delete mode 100644 rust/automerge-c/src/CMakeLists.txt delete mode 100644 rust/automerge-c/src/change_hashes.rs delete mode 100644 rust/automerge-c/src/changes.rs delete mode 100644 rust/automerge-c/src/doc/list/item.rs delete mode 100644 rust/automerge-c/src/doc/list/items.rs delete mode 100644 rust/automerge-c/src/doc/map/item.rs delete mode 100644 rust/automerge-c/src/doc/map/items.rs create mode 100644 rust/automerge-c/src/index.rs create mode 100644 rust/automerge-c/src/item.rs create mode 100644 rust/automerge-c/src/items.rs delete mode 100644 rust/automerge-c/src/obj/item.rs delete mode 100644 rust/automerge-c/src/obj/items.rs delete mode 100644 rust/automerge-c/src/result_stack.rs delete mode 100644 rust/automerge-c/src/strs.rs delete mode 100644 rust/automerge-c/src/sync/haves.rs create mode 100644 rust/automerge-c/src/utils/result.c create mode 100644 rust/automerge-c/src/utils/stack.c create mode 100644 rust/automerge-c/src/utils/stack_callback_data.c create mode 100644 rust/automerge-c/src/utils/string.c create mode 100644 rust/automerge-c/test/base_state.c create mode 100644 rust/automerge-c/test/base_state.h create mode 100644 rust/automerge-c/test/byte_span_tests.c create mode 100644 rust/automerge-c/test/cmocka_utils.c create mode 100644 rust/automerge-c/test/doc_state.c create mode 100644 rust/automerge-c/test/doc_state.h create mode 100644 rust/automerge-c/test/enum_string_tests.c delete mode 100644 rust/automerge-c/test/group_state.c delete mode 100644 rust/automerge-c/test/group_state.h create mode 100644 rust/automerge-c/test/item_tests.c delete mode 100644 rust/automerge-c/test/stack_utils.c delete mode 100644 rust/automerge-c/test/stack_utils.h diff --git a/rust/automerge-c/.clang-format b/rust/automerge-c/.clang-format new file mode 100644 index 00000000..dbf16c21 --- /dev/null +++ b/rust/automerge-c/.clang-format @@ -0,0 +1,250 @@ +--- +Language: Cpp +# BasedOnStyle: Chromium +AccessModifierOffset: -1 +AlignAfterOpenBracket: Align +AlignArrayOfStructures: None +AlignConsecutiveAssignments: + Enabled: false + AcrossEmptyLines: false + AcrossComments: false + AlignCompound: false + PadOperators: true +AlignConsecutiveBitFields: + Enabled: false + AcrossEmptyLines: false + AcrossComments: false + AlignCompound: false + PadOperators: false +AlignConsecutiveDeclarations: + Enabled: false + AcrossEmptyLines: false + AcrossComments: false + AlignCompound: false + PadOperators: false +AlignConsecutiveMacros: + Enabled: false + AcrossEmptyLines: false + AcrossComments: false + AlignCompound: false + PadOperators: false +AlignEscapedNewlines: Left +AlignOperands: Align +AlignTrailingComments: true +AllowAllArgumentsOnNextLine: true +AllowAllParametersOfDeclarationOnNextLine: false +AllowShortEnumsOnASingleLine: true +AllowShortBlocksOnASingleLine: Never +AllowShortCaseLabelsOnASingleLine: false +AllowShortFunctionsOnASingleLine: Inline +AllowShortLambdasOnASingleLine: All +AllowShortIfStatementsOnASingleLine: Never +AllowShortLoopsOnASingleLine: false +AlwaysBreakAfterDefinitionReturnType: None +AlwaysBreakAfterReturnType: None +AlwaysBreakBeforeMultilineStrings: true +AlwaysBreakTemplateDeclarations: Yes +AttributeMacros: + - __capability +BinPackArguments: true +BinPackParameters: false +BraceWrapping: + AfterCaseLabel: false + AfterClass: false + AfterControlStatement: Never + AfterEnum: false + AfterFunction: false + AfterNamespace: false + AfterObjCDeclaration: false + AfterStruct: false + AfterUnion: false + AfterExternBlock: false + BeforeCatch: false + BeforeElse: false + BeforeLambdaBody: false + BeforeWhile: false + IndentBraces: false + SplitEmptyFunction: true + SplitEmptyRecord: true + SplitEmptyNamespace: true +BreakBeforeBinaryOperators: None +BreakBeforeConceptDeclarations: Always +BreakBeforeBraces: Attach +BreakBeforeInheritanceComma: false +BreakInheritanceList: BeforeColon +BreakBeforeTernaryOperators: true +BreakConstructorInitializersBeforeComma: false +BreakConstructorInitializers: BeforeColon +BreakAfterJavaFieldAnnotations: false +BreakStringLiterals: true +ColumnLimit: 120 +CommentPragmas: '^ IWYU pragma:' +QualifierAlignment: Leave +CompactNamespaces: false +ConstructorInitializerIndentWidth: 4 +ContinuationIndentWidth: 4 +Cpp11BracedListStyle: true +DeriveLineEnding: true +DerivePointerAlignment: false +DisableFormat: false +EmptyLineAfterAccessModifier: Never +EmptyLineBeforeAccessModifier: LogicalBlock +ExperimentalAutoDetectBinPacking: false +PackConstructorInitializers: NextLine +BasedOnStyle: '' +ConstructorInitializerAllOnOneLineOrOnePerLine: false +AllowAllConstructorInitializersOnNextLine: true +FixNamespaceComments: true +ForEachMacros: + - foreach + - Q_FOREACH + - BOOST_FOREACH +IfMacros: + - KJ_IF_MAYBE +IncludeBlocks: Preserve +IncludeCategories: + - Regex: '^' + Priority: 2 + SortPriority: 0 + CaseSensitive: false + - Regex: '^<.*\.h>' + Priority: 1 + SortPriority: 0 + CaseSensitive: false + - Regex: '^<.*' + Priority: 2 + SortPriority: 0 + CaseSensitive: false + - Regex: '.*' + Priority: 3 + SortPriority: 0 + CaseSensitive: false +IncludeIsMainRegex: '([-_](test|unittest))?$' +IncludeIsMainSourceRegex: '' +IndentAccessModifiers: false +IndentCaseLabels: true +IndentCaseBlocks: false +IndentGotoLabels: true +IndentPPDirectives: None +IndentExternBlock: AfterExternBlock +IndentRequiresClause: true +IndentWidth: 4 +IndentWrappedFunctionNames: false +InsertBraces: false +InsertTrailingCommas: None +JavaScriptQuotes: Leave +JavaScriptWrapImports: true +KeepEmptyLinesAtTheStartOfBlocks: false +LambdaBodyIndentation: Signature +MacroBlockBegin: '' +MacroBlockEnd: '' +MaxEmptyLinesToKeep: 1 +NamespaceIndentation: None +ObjCBinPackProtocolList: Never +ObjCBlockIndentWidth: 2 +ObjCBreakBeforeNestedBlockParam: true +ObjCSpaceAfterProperty: false +ObjCSpaceBeforeProtocolList: true +PenaltyBreakAssignment: 2 +PenaltyBreakBeforeFirstCallParameter: 1 +PenaltyBreakComment: 300 +PenaltyBreakFirstLessLess: 120 +PenaltyBreakOpenParenthesis: 0 +PenaltyBreakString: 1000 +PenaltyBreakTemplateDeclaration: 10 +PenaltyExcessCharacter: 1000000 +PenaltyReturnTypeOnItsOwnLine: 200 +PenaltyIndentedWhitespace: 0 +PointerAlignment: Left +PPIndentWidth: -1 +RawStringFormats: + - Language: Cpp + Delimiters: + - cc + - CC + - cpp + - Cpp + - CPP + - 'c++' + - 'C++' + CanonicalDelimiter: '' + BasedOnStyle: google + - Language: TextProto + Delimiters: + - pb + - PB + - proto + - PROTO + EnclosingFunctions: + - EqualsProto + - EquivToProto + - PARSE_PARTIAL_TEXT_PROTO + - PARSE_TEST_PROTO + - PARSE_TEXT_PROTO + - ParseTextOrDie + - ParseTextProtoOrDie + - ParseTestProto + - ParsePartialTestProto + CanonicalDelimiter: pb + BasedOnStyle: google +ReferenceAlignment: Pointer +ReflowComments: true +RemoveBracesLLVM: false +RequiresClausePosition: OwnLine +SeparateDefinitionBlocks: Leave +ShortNamespaceLines: 1 +SortIncludes: CaseSensitive +SortJavaStaticImport: Before +SortUsingDeclarations: true +SpaceAfterCStyleCast: false +SpaceAfterLogicalNot: false +SpaceAfterTemplateKeyword: true +SpaceBeforeAssignmentOperators: true +SpaceBeforeCaseColon: false +SpaceBeforeCpp11BracedList: false +SpaceBeforeCtorInitializerColon: true +SpaceBeforeInheritanceColon: true +SpaceBeforeParens: ControlStatements +SpaceBeforeParensOptions: + AfterControlStatements: true + AfterForeachMacros: true + AfterFunctionDefinitionName: false + AfterFunctionDeclarationName: false + AfterIfMacros: true + AfterOverloadedOperator: false + AfterRequiresInClause: false + AfterRequiresInExpression: false + BeforeNonEmptyParentheses: false +SpaceAroundPointerQualifiers: Default +SpaceBeforeRangeBasedForLoopColon: true +SpaceInEmptyBlock: false +SpaceInEmptyParentheses: false +SpacesBeforeTrailingComments: 2 +SpacesInAngles: Never +SpacesInConditionalStatement: false +SpacesInContainerLiterals: true +SpacesInCStyleCastParentheses: false +SpacesInLineCommentPrefix: + Minimum: 1 + Maximum: -1 +SpacesInParentheses: false +SpacesInSquareBrackets: false +SpaceBeforeSquareBrackets: false +BitFieldColonSpacing: Both +Standard: Auto +StatementAttributeLikeMacros: + - Q_EMIT +StatementMacros: + - Q_UNUSED + - QT_REQUIRE_VERSION +TabWidth: 8 +UseCRLF: false +UseTab: Never +WhitespaceSensitiveMacros: + - STRINGIZE + - PP_STRINGIZE + - BOOST_PP_STRINGIZE + - NS_SWIFT_NAME + - CF_SWIFT_NAME +... + diff --git a/rust/automerge-c/.gitignore b/rust/automerge-c/.gitignore index f04de582..14d74973 100644 --- a/rust/automerge-c/.gitignore +++ b/rust/automerge-c/.gitignore @@ -1,10 +1,10 @@ automerge automerge.h automerge.o -*.cmake +build/ +CMakeCache.txt CMakeFiles +CMakePresets.json Makefile DartConfiguration.tcl -config.h -CMakeCache.txt -Cargo +out/ diff --git a/rust/automerge-c/CMakeLists.txt b/rust/automerge-c/CMakeLists.txt index 1b68669a..056d111b 100644 --- a/rust/automerge-c/CMakeLists.txt +++ b/rust/automerge-c/CMakeLists.txt @@ -1,97 +1,279 @@ -cmake_minimum_required(VERSION 3.18 FATAL_ERROR) +cmake_minimum_required(VERSION 3.23 FATAL_ERROR) -set(CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/cmake") +project(automerge-c VERSION 0.1.0 + LANGUAGES C + DESCRIPTION "C bindings for the Automerge Rust library.") -# Parse the library name, project name and project version out of Cargo's TOML file. -set(CARGO_LIB_SECTION OFF) +set(LIBRARY_NAME "automerge") -set(LIBRARY_NAME "") - -set(CARGO_PKG_SECTION OFF) - -set(CARGO_PKG_NAME "") - -set(CARGO_PKG_VERSION "") - -file(READ Cargo.toml TOML_STRING) - -string(REPLACE ";" "\\\\;" TOML_STRING "${TOML_STRING}") - -string(REPLACE "\n" ";" TOML_LINES "${TOML_STRING}") - -foreach(TOML_LINE IN ITEMS ${TOML_LINES}) - string(REGEX MATCH "^\\[(lib|package)\\]$" _ ${TOML_LINE}) - - if(CMAKE_MATCH_1 STREQUAL "lib") - set(CARGO_LIB_SECTION ON) - - set(CARGO_PKG_SECTION OFF) - elseif(CMAKE_MATCH_1 STREQUAL "package") - set(CARGO_LIB_SECTION OFF) - - set(CARGO_PKG_SECTION ON) - endif() - - string(REGEX MATCH "^name += +\"([^\"]+)\"$" _ ${TOML_LINE}) - - if(CMAKE_MATCH_1 AND (CARGO_LIB_SECTION AND NOT CARGO_PKG_SECTION)) - set(LIBRARY_NAME "${CMAKE_MATCH_1}") - elseif(CMAKE_MATCH_1 AND (NOT CARGO_LIB_SECTION AND CARGO_PKG_SECTION)) - set(CARGO_PKG_NAME "${CMAKE_MATCH_1}") - endif() - - string(REGEX MATCH "^version += +\"([^\"]+)\"$" _ ${TOML_LINE}) - - if(CMAKE_MATCH_1 AND CARGO_PKG_SECTION) - set(CARGO_PKG_VERSION "${CMAKE_MATCH_1}") - endif() - - if(LIBRARY_NAME AND (CARGO_PKG_NAME AND CARGO_PKG_VERSION)) - break() - endif() -endforeach() - -project(${CARGO_PKG_NAME} VERSION 0.0.1 LANGUAGES C DESCRIPTION "C bindings for the Automerge Rust backend.") - -include(CTest) +set(CMAKE_WINDOWS_EXPORT_ALL_SYMBOLS ON) option(BUILD_SHARED_LIBS "Enable the choice of a shared or static library.") +include(CTest) + include(CMakePackageConfigHelpers) include(GNUInstallDirs) +set(CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/cmake") + string(MAKE_C_IDENTIFIER ${PROJECT_NAME} SYMBOL_PREFIX) string(TOUPPER ${SYMBOL_PREFIX} SYMBOL_PREFIX) -set(CARGO_TARGET_DIR "${CMAKE_CURRENT_BINARY_DIR}/Cargo/target") +set(CARGO_TARGET_DIR "${CMAKE_BINARY_DIR}/Cargo/target") -set(CBINDGEN_INCLUDEDIR "${CARGO_TARGET_DIR}/${CMAKE_INSTALL_INCLUDEDIR}") +set(CBINDGEN_INCLUDEDIR "${CMAKE_BINARY_DIR}/${CMAKE_INSTALL_INCLUDEDIR}") set(CBINDGEN_TARGET_DIR "${CBINDGEN_INCLUDEDIR}/${PROJECT_NAME}") -add_subdirectory(src) +find_program ( + CARGO_CMD + "cargo" + PATHS "$ENV{CARGO_HOME}/bin" + DOC "The Cargo command" +) -# Generate and install the configuration header. +if(NOT CARGO_CMD) + message(FATAL_ERROR "Cargo (Rust package manager) not found! " + "Please install it and/or set the CARGO_HOME " + "environment variable to its path.") +endif() + +string(TOLOWER "${CMAKE_BUILD_TYPE}" BUILD_TYPE_LOWER) + +if(BUILD_TYPE_LOWER STREQUAL debug) + set(CARGO_BUILD_TYPE "debug") + + set(CARGO_FLAG "") +else() + set(CARGO_BUILD_TYPE "release") + + set(CARGO_FLAG "--release") +endif() + +set(CARGO_FEATURES "") + +set(CARGO_BINARY_DIR "${CARGO_TARGET_DIR}/${CARGO_BUILD_TYPE}") + +set(BINDINGS_NAME "${LIBRARY_NAME}_core") + +configure_file( + ${CMAKE_MODULE_PATH}/Cargo.toml.in + ${CMAKE_SOURCE_DIR}/Cargo.toml + @ONLY + NEWLINE_STYLE LF +) + +set(INCLUDE_GUARD_PREFIX "${SYMBOL_PREFIX}") + +configure_file( + ${CMAKE_MODULE_PATH}/cbindgen.toml.in + ${CMAKE_SOURCE_DIR}/cbindgen.toml + @ONLY + NEWLINE_STYLE LF +) + +set(CARGO_OUTPUT + ${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h + ${CARGO_BINARY_DIR}/${CMAKE_STATIC_LIBRARY_PREFIX}${BINDINGS_NAME}${CMAKE_STATIC_LIBRARY_SUFFIX} +) + +# \note cbindgen's naming behavior isn't fully configurable and it ignores +# `const fn` calls (https://github.com/eqrion/cbindgen/issues/252). +add_custom_command( + OUTPUT + ${CARGO_OUTPUT} + COMMAND + # \note cbindgen won't regenerate its output header file after it's been removed but it will after its + # configuration file has been updated. + ${CMAKE_COMMAND} -DCONDITION=NOT_EXISTS -P ${CMAKE_SOURCE_DIR}/cmake/file-touch.cmake -- ${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h ${CMAKE_SOURCE_DIR}/cbindgen.toml + COMMAND + ${CMAKE_COMMAND} -E env CARGO_TARGET_DIR=${CARGO_TARGET_DIR} CBINDGEN_TARGET_DIR=${CBINDGEN_TARGET_DIR} ${CARGO_CMD} build ${CARGO_FLAG} ${CARGO_FEATURES} + COMMAND + # Compensate for cbindgen's translation of consecutive uppercase letters to "ScreamingSnakeCase". + ${CMAKE_COMMAND} -DMATCH_REGEX=A_M\([^_]+\)_ -DREPLACE_EXPR=AM_\\1_ -P ${CMAKE_SOURCE_DIR}/cmake/file-regex-replace.cmake -- ${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h + COMMAND + # Compensate for cbindgen ignoring `std:mem::size_of()` calls. + ${CMAKE_COMMAND} -DMATCH_REGEX=USIZE_ -DREPLACE_EXPR=\+${CMAKE_SIZEOF_VOID_P} -P ${CMAKE_SOURCE_DIR}/cmake/file-regex-replace.cmake -- ${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h + MAIN_DEPENDENCY + src/lib.rs + DEPENDS + src/actor_id.rs + src/byte_span.rs + src/change.rs + src/doc.rs + src/doc/list.rs + src/doc/map.rs + src/doc/utils.rs + src/index.rs + src/item.rs + src/items.rs + src/obj.rs + src/result.rs + src/sync.rs + src/sync/have.rs + src/sync/message.rs + src/sync/state.rs + ${CMAKE_SOURCE_DIR}/build.rs + ${CMAKE_MODULE_PATH}/Cargo.toml.in + ${CMAKE_MODULE_PATH}/cbindgen.toml.in + WORKING_DIRECTORY + ${CMAKE_SOURCE_DIR} + COMMENT + "Producing the bindings' artifacts with Cargo..." + VERBATIM +) + +add_custom_target(${BINDINGS_NAME}_artifacts ALL + DEPENDS ${CARGO_OUTPUT} +) + +add_library(${BINDINGS_NAME} STATIC IMPORTED GLOBAL) + +target_include_directories(${BINDINGS_NAME} INTERFACE "${CBINDGEN_INCLUDEDIR}") + +set_target_properties( + ${BINDINGS_NAME} + PROPERTIES + # \note Cargo writes a debug build into a nested directory instead of + # decorating its name. + DEBUG_POSTFIX "" + DEFINE_SYMBOL "" + IMPORTED_IMPLIB "" + IMPORTED_LOCATION "${CARGO_BINARY_DIR}/${CMAKE_STATIC_LIBRARY_PREFIX}${BINDINGS_NAME}${CMAKE_STATIC_LIBRARY_SUFFIX}" + IMPORTED_NO_SONAME "TRUE" + IMPORTED_SONAME "" + LINKER_LANGUAGE C + PUBLIC_HEADER "${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h" + SOVERSION "${PROJECT_VERSION_MAJOR}" + VERSION "${PROJECT_VERSION}" + # \note Cargo exports all of the symbols automatically. + WINDOWS_EXPORT_ALL_SYMBOLS "TRUE" +) + +target_compile_definitions(${BINDINGS_NAME} INTERFACE $) + +set(UTILS_SUBDIR "utils") + +add_custom_command( + OUTPUT + ${CBINDGEN_TARGET_DIR}/${UTILS_SUBDIR}/enum_string.h + ${CMAKE_BINARY_DIR}/src/${UTILS_SUBDIR}/enum_string.c + COMMAND + ${CMAKE_COMMAND} -DPROJECT_NAME=${PROJECT_NAME} -DLIBRARY_NAME=${LIBRARY_NAME} -DSUBDIR=${UTILS_SUBDIR} -P ${CMAKE_SOURCE_DIR}/cmake/enum-string-functions-gen.cmake -- ${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h ${CBINDGEN_TARGET_DIR}/${UTILS_SUBDIR}/enum_string.h ${CMAKE_BINARY_DIR}/src/${UTILS_SUBDIR}/enum_string.c + MAIN_DEPENDENCY + ${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h + DEPENDS + ${CMAKE_SOURCE_DIR}/cmake/enum-string-functions-gen.cmake + WORKING_DIRECTORY + ${CMAKE_SOURCE_DIR} + COMMENT + "Generating the enum string functions with CMake..." + VERBATIM +) + +add_custom_target(${LIBRARY_NAME}_utilities + DEPENDS ${CBINDGEN_TARGET_DIR}/${UTILS_SUBDIR}/enum_string.h + ${CMAKE_BINARY_DIR}/src/${UTILS_SUBDIR}/enum_string.c +) + +add_library(${LIBRARY_NAME}) + +target_compile_features(${LIBRARY_NAME} PRIVATE c_std_99) + +set(CMAKE_THREAD_PREFER_PTHREAD TRUE) + +set(THREADS_PREFER_PTHREAD_FLAG TRUE) + +find_package(Threads REQUIRED) + +set(LIBRARY_DEPENDENCIES Threads::Threads ${CMAKE_DL_LIBS}) + +if(WIN32) + list(APPEND LIBRARY_DEPENDENCIES Bcrypt userenv ws2_32) +else() + list(APPEND LIBRARY_DEPENDENCIES m) +endif() + +target_link_libraries(${LIBRARY_NAME} + PUBLIC ${BINDINGS_NAME} + ${LIBRARY_DEPENDENCIES} +) + +# \note An imported library's INTERFACE_INCLUDE_DIRECTORIES property can't +# contain a non-existent path so its build-time include directory +# must be specified for all of its dependent targets instead. +target_include_directories(${LIBRARY_NAME} + PUBLIC "$" + "$" +) + +add_dependencies(${LIBRARY_NAME} ${BINDINGS_NAME}_artifacts) + +# Generate the configuration header. math(EXPR INTEGER_PROJECT_VERSION_MAJOR "${PROJECT_VERSION_MAJOR} * 100000") math(EXPR INTEGER_PROJECT_VERSION_MINOR "${PROJECT_VERSION_MINOR} * 100") math(EXPR INTEGER_PROJECT_VERSION_PATCH "${PROJECT_VERSION_PATCH}") -math(EXPR INTEGER_PROJECT_VERSION "${INTEGER_PROJECT_VERSION_MAJOR} + ${INTEGER_PROJECT_VERSION_MINOR} + ${INTEGER_PROJECT_VERSION_PATCH}") +math(EXPR INTEGER_PROJECT_VERSION "${INTEGER_PROJECT_VERSION_MAJOR} + \ + ${INTEGER_PROJECT_VERSION_MINOR} + \ + ${INTEGER_PROJECT_VERSION_PATCH}") configure_file( ${CMAKE_MODULE_PATH}/config.h.in - config.h + ${CBINDGEN_TARGET_DIR}/config.h @ONLY NEWLINE_STYLE LF ) +target_sources(${LIBRARY_NAME} + PRIVATE + src/${UTILS_SUBDIR}/result.c + src/${UTILS_SUBDIR}/stack_callback_data.c + src/${UTILS_SUBDIR}/stack.c + src/${UTILS_SUBDIR}/string.c + ${CMAKE_BINARY_DIR}/src/${UTILS_SUBDIR}/enum_string.c + PUBLIC + FILE_SET api TYPE HEADERS + BASE_DIRS + ${CBINDGEN_INCLUDEDIR} + ${CMAKE_SOURCE_DIR}/${CMAKE_INSTALL_INCLUDEDIR} + FILES + ${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h + ${CBINDGEN_TARGET_DIR}/${UTILS_SUBDIR}/enum_string.h + ${CMAKE_SOURCE_DIR}/${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}/${UTILS_SUBDIR}/result.h + ${CMAKE_SOURCE_DIR}/${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}/${UTILS_SUBDIR}/stack_callback_data.h + ${CMAKE_SOURCE_DIR}/${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}/${UTILS_SUBDIR}/stack.h + ${CMAKE_SOURCE_DIR}/${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}/${UTILS_SUBDIR}/string.h + INTERFACE + FILE_SET config TYPE HEADERS + BASE_DIRS + ${CBINDGEN_INCLUDEDIR} + FILES + ${CBINDGEN_TARGET_DIR}/config.h +) + install( - FILES ${CMAKE_BINARY_DIR}/config.h - DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME} + TARGETS ${LIBRARY_NAME} + EXPORT ${PROJECT_NAME}-config + FILE_SET api + FILE_SET config +) + +# \note Install the Cargo-built core bindings to enable direct linkage. +install( + FILES $ + DESTINATION ${CMAKE_INSTALL_LIBDIR} +) + +install(EXPORT ${PROJECT_NAME}-config + FILE ${PROJECT_NAME}-config.cmake + NAMESPACE "${PROJECT_NAME}::" + DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake/${LIB} ) if(BUILD_TESTING) @@ -100,42 +282,6 @@ if(BUILD_TESTING) enable_testing() endif() +add_subdirectory(docs) + add_subdirectory(examples EXCLUDE_FROM_ALL) - -# Generate and install .cmake files -set(PROJECT_CONFIG_NAME "${PROJECT_NAME}-config") - -set(PROJECT_CONFIG_VERSION_NAME "${PROJECT_CONFIG_NAME}-version") - -write_basic_package_version_file( - ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_CONFIG_VERSION_NAME}.cmake - VERSION ${PROJECT_VERSION} - COMPATIBILITY ExactVersion -) - -# The namespace label starts with the title-cased library name. -string(SUBSTRING ${LIBRARY_NAME} 0 1 NS_FIRST) - -string(SUBSTRING ${LIBRARY_NAME} 1 -1 NS_REST) - -string(TOUPPER ${NS_FIRST} NS_FIRST) - -string(TOLOWER ${NS_REST} NS_REST) - -string(CONCAT NAMESPACE ${NS_FIRST} ${NS_REST} "::") - -# \note CMake doesn't automate the exporting of an imported library's targets -# so the package configuration script must do it. -configure_package_config_file( - ${CMAKE_MODULE_PATH}/${PROJECT_CONFIG_NAME}.cmake.in - ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_CONFIG_NAME}.cmake - INSTALL_DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME} -) - -install( - FILES - ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_CONFIG_NAME}.cmake - ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_CONFIG_VERSION_NAME}.cmake - DESTINATION - ${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME} -) diff --git a/rust/automerge-c/Cargo.toml b/rust/automerge-c/Cargo.toml index d039e460..95a3a29c 100644 --- a/rust/automerge-c/Cargo.toml +++ b/rust/automerge-c/Cargo.toml @@ -7,8 +7,8 @@ license = "MIT" rust-version = "1.57.0" [lib] -name = "automerge" -crate-type = ["cdylib", "staticlib"] +name = "automerge_core" +crate-type = ["staticlib"] bench = false doc = false diff --git a/rust/automerge-c/README.md b/rust/automerge-c/README.md index a9f097e2..1fbca3df 100644 --- a/rust/automerge-c/README.md +++ b/rust/automerge-c/README.md @@ -1,22 +1,29 @@ -automerge-c exposes an API to C that can either be used directly or as a basis -for other language bindings that have good support for calling into C functions. +# Overview -# Building +automerge-c exposes a C API that can either be used directly or as the basis +for other language bindings that have good support for calling C functions. -See the main README for instructions on getting your environment set up, then -you can use `./scripts/ci/cmake-build Release static` to build automerge-c. +# Installing -It will output two files: +See the main README for instructions on getting your environment set up and then +you can build the automerge-c library and install its constituent files within +a root directory of your choosing (e.g. "/usr/local") like so: +```shell +cmake -E make_directory automerge-c/build +cmake -S automerge-c -B automerge-c/build +cmake --build automerge-c/build +cmake --install automerge-c/build --prefix "/usr/local" +``` +Installation is important because the name, location and structure of CMake's +out-of-source build subdirectory is subject to change based on the platform and +the release version; generated headers like `automerge-c/config.h` and +`automerge-c/utils/enum_string.h` are only sure to be found within their +installed locations. -- ./build/Cargo/target/include/automerge-c/automerge.h -- ./build/Cargo/target/release/libautomerge.a - -To use these in your application you must arrange for your C compiler to find -these files, either by moving them to the right location on your computer, or -by configuring the compiler to reference these directories. - -- `export LDFLAGS=-L./build/Cargo/target/release -lautomerge` -- `export CFLAGS=-I./build/Cargo/target/include` +It's not obvious because they are versioned but the `Cargo.toml` and +`cbindgen.toml` configuration files are also generated in order to ensure that +the project name, project version and library name that they contain match those +specified within the top-level `CMakeLists.txt` file. If you'd like to cross compile the library for different platforms you can do so using [cross](https://github.com/cross-rs/cross). For example: @@ -25,134 +32,176 @@ using [cross](https://github.com/cross-rs/cross). For example: This will output a shared library in the directory `rust/target/aarch64-unknown-linux-gnu/release/`. -You can replace `aarch64-unknown-linux-gnu` with any [cross supported targets](https://github.com/cross-rs/cross#supported-targets). The targets below are known to work, though other targets are expected to work too: +You can replace `aarch64-unknown-linux-gnu` with any +[cross supported targets](https://github.com/cross-rs/cross#supported-targets). +The targets below are known to work, though other targets are expected to work +too: - `x86_64-apple-darwin` - `aarch64-apple-darwin` - `x86_64-unknown-linux-gnu` - `aarch64-unknown-linux-gnu` -As a caveat, the header file is currently 32/64-bit dependant. You can re-use it -for all 64-bit architectures, but you must generate a specific header for 32-bit -targets. +As a caveat, CMake generates the `automerge.h` header file in terms of the +processor architecture of the computer on which it was built so, for example, +don't use a header generated for a 64-bit processor if your target is a 32-bit +processor. # Usage -For full reference, read through `automerge.h`, or to get started quickly look -at the +You can build and view the C API's HTML reference documentation like so: +```shell +cmake -E make_directory automerge-c/build +cmake -S automerge-c -B automerge-c/build +cmake --build automerge-c/build --target automerge_docs +firefox automerge-c/build/src/html/index.html +``` + +To get started quickly, look at the [examples](https://github.com/automerge/automerge-rs/tree/main/rust/automerge-c/examples). -Almost all operations in automerge-c act on an AMdoc struct which you can get -from `AMcreate()` or `AMload()`. Operations on a given doc are not thread safe -so you must use a mutex or similar to avoid calling more than one function with -the same AMdoc pointer concurrently. +Almost all operations in automerge-c act on an Automerge document +(`AMdoc` struct) which is structurally similar to a JSON document. -As with all functions that either allocate memory, or could fail if given -invalid input, `AMcreate()` returns an `AMresult`. The `AMresult` contains the -returned doc (or error message), and must be freed with `AMfree()` after you are -done to avoid leaking memory. +You can get a document by calling either `AMcreate()` or `AMload()`. Operations +on a given document are not thread-safe so you must use a mutex or similar to +avoid calling more than one function on the same one concurrently. +A C API function that could succeed or fail returns a result (`AMresult` struct) +containing a status code (`AMstatus` enum) and either a sequence of at least one +item (`AMitem` struct) or a read-only view onto a UTF-8 error message string +(`AMbyteSpan` struct). +An item contains up to three components: an index within its parent object +(`AMbyteSpan` struct or `size_t`), a unique identifier (`AMobjId` struct) and a +value. +The result of a successful function call that doesn't produce any values will +contain a single item that is void (`AM_VAL_TYPE_VOID`). +A returned result **must** be passed to `AMresultFree()` once the item(s) or +error message it contains is no longer needed in order to avoid a memory leak. ``` -#include #include +#include +#include +#include int main(int argc, char** argv) { AMresult *docResult = AMcreate(NULL); if (AMresultStatus(docResult) != AM_STATUS_OK) { - printf("failed to create doc: %s", AMerrorMessage(docResult).src); + char* const err_msg = AMstrdup(AMresultError(docResult), NULL); + printf("failed to create doc: %s", err_msg); + free(err_msg); goto cleanup; } - AMdoc *doc = AMresultValue(docResult).doc; + AMdoc *doc; + AMitemToDoc(AMresultItem(docResult), &doc); // useful code goes here! cleanup: - AMfree(docResult); + AMresultFree(docResult); } ``` -If you are writing code in C directly, you can use the `AMpush()` helper -function to reduce the boilerplate of error handling and freeing for you (see -examples/quickstart.c). +If you are writing an application in C, the `AMstackItem()`, `AMstackItems()` +and `AMstackResult()` functions enable the lifetimes of anonymous results to be +centrally managed and allow the same validation logic to be reused without +relying upon the `goto` statement (see examples/quickstart.c). If you are wrapping automerge-c in another language, particularly one that has a -garbage collector, you can call `AMfree` within a finalizer to ensure that memory -is reclaimed when it is no longer needed. +garbage collector, you can call the `AMresultFree()` function within a finalizer +to ensure that memory is reclaimed when it is no longer needed. -An AMdoc wraps an automerge document which are very similar to JSON documents. -Automerge documents consist of a mutable root, which is always a map from string -keys to values. Values can have the following types: +Automerge documents consist of a mutable root which is always a map from string +keys to values. A value can be one of the following types: - A number of type double / int64_t / uint64_t -- An explicit true / false / nul -- An immutable utf-8 string (AMbyteSpan) -- An immutable array of arbitrary bytes (AMbyteSpan) -- A mutable map from string keys to values (AMmap) -- A mutable list of values (AMlist) -- A mutable string (AMtext) +- An explicit true / false / null +- An immutable UTF-8 string (`AMbyteSpan`). +- An immutable array of arbitrary bytes (`AMbyteSpan`). +- A mutable map from string keys to values. +- A mutable list of values. +- A mutable UTF-8 string. -If you read from a location in the document with no value a value with -`.tag == AM_VALUE_VOID` will be returned, but you cannot write such a value explicitly. +If you read from a location in the document with no value, an item with type +`AM_VAL_TYPE_VOID` will be returned, but you cannot write such a value +explicitly. -Under the hood, automerge references mutable objects by the internal object id, -and `AM_ROOT` is always the object id of the root value. +Under the hood, automerge references a mutable object by its object identifier +where `AM_ROOT` signifies a document's root map object. -There is a function to put each type of value into either a map or a list, and a -function to read the current value from a list. As (in general) collaborators +There are functions to put each type of value into either a map or a list, and +functions to read the current or a historical value from a map or a list. As (in general) collaborators may edit the document at any time, you cannot guarantee that the type of the -value at a given part of the document will stay the same. As a result reading -from the document will return an `AMvalue` union that you can inspect to -determine its type. +value at a given part of the document will stay the same. As a result, reading +from the document will return an `AMitem` struct that you can inspect to +determine the type of value that it contains. Strings in automerge-c are represented using an `AMbyteSpan` which contains a -pointer and a length. Strings must be valid utf-8 and may contain null bytes. -As a convenience you can use `AMstr()` to get the representation of a -null-terminated C string as an `AMbyteSpan`. +pointer and a length. Strings must be valid UTF-8 and may contain NUL (`0`) +characters. +For your convenience, you can call `AMstr()` to get the `AMbyteSpan` struct +equivalent of a null-terminated byte string or `AMstrdup()` to get the +representation of an `AMbyteSpan` struct as a null-terminated byte string +wherein its NUL characters have been removed/replaced as you choose. Putting all of that together, to read and write from the root of the document you can do this: ``` -#include #include +#include +#include +#include int main(int argc, char** argv) { // ...previous example... - AMdoc *doc = AMresultValue(docResult).doc; + AMdoc *doc; + AMitemToDoc(AMresultItem(docResult), &doc); AMresult *putResult = AMmapPutStr(doc, AM_ROOT, AMstr("key"), AMstr("value")); if (AMresultStatus(putResult) != AM_STATUS_OK) { - printf("failed to put: %s", AMerrorMessage(putResult).src); + char* const err_msg = AMstrdup(AMresultError(putResult), NULL); + printf("failed to put: %s", err_msg); + free(err_msg); goto cleanup; } AMresult *getResult = AMmapGet(doc, AM_ROOT, AMstr("key"), NULL); if (AMresultStatus(getResult) != AM_STATUS_OK) { - printf("failed to get: %s", AMerrorMessage(getResult).src); + char* const err_msg = AMstrdup(AMresultError(putResult), NULL); + printf("failed to get: %s", err_msg); + free(err_msg); goto cleanup; } - AMvalue got = AMresultValue(getResult); - if (got.tag != AM_VALUE_STR) { + AMbyteSpan got; + if (AMitemToStr(AMresultItem(getResult), &got)) { + char* const c_str = AMstrdup(got, NULL); + printf("Got %zu-character string \"%s\"", got.count, c_str); + free(c_str); + } else { printf("expected to read a string!"); goto cleanup; } - printf("Got %zu-character string `%s`", got.str.count, got.str.src); cleanup: - AMfree(getResult); - AMfree(putResult); - AMfree(docResult); + AMresultFree(getResult); + AMresultFree(putResult); + AMresultFree(docResult); } ``` -Functions that do not return an `AMresult` (for example `AMmapItemValue()`) do -not allocate memory, but continue to reference memory that was previously -allocated. It's thus important to keep the original `AMresult` alive (in this -case the one returned by `AMmapRange()`) until after you are done with the return -values of these functions. +Functions that do not return an `AMresult` (for example `AMitemKey()`) do +not allocate memory but rather reference memory that was previously +allocated. It's therefore important to keep the original `AMresult` alive (in +this case the one returned by `AMmapRange()`) until after you are finished with +the items that it contains. However, the memory for an individual `AMitem` can +be shared with a new `AMresult` by calling `AMitemResult()` on it. In other +words, a select group of items can be filtered out of a collection and only each +one's corresponding `AMresult` must be kept alive from that point forward; the +originating collection's `AMresult` can be safely freed. Beyond that, good luck! diff --git a/rust/automerge-c/cbindgen.toml b/rust/automerge-c/cbindgen.toml index ada7f48d..21eaaadd 100644 --- a/rust/automerge-c/cbindgen.toml +++ b/rust/automerge-c/cbindgen.toml @@ -1,7 +1,7 @@ after_includes = """\n /** * \\defgroup enumerations Public Enumerations - Symbolic names for integer constants. + * Symbolic names for integer constants. */ /** @@ -12,21 +12,23 @@ after_includes = """\n #define AM_ROOT NULL /** - * \\memberof AMchangeHash + * \\memberof AMdoc * \\def AM_CHANGE_HASH_SIZE * \\brief The count of bytes in a change hash. */ #define AM_CHANGE_HASH_SIZE 32 """ -autogen_warning = "/* Warning, this file is autogenerated by cbindgen. Don't modify this manually. */" +autogen_warning = """ +/** + * \\file + * \\brief All constants, functions and types in the core Automerge C API. + * + * \\warning This file is auto-generated by cbindgen. + */ +""" documentation = true documentation_style = "doxy" -header = """ -/** \\file - * All constants, functions and types in the Automerge library's C API. - */ - """ -include_guard = "AUTOMERGE_H" +include_guard = "AUTOMERGE_C_H" includes = [] language = "C" line_length = 140 diff --git a/rust/automerge-c/cmake/Cargo.toml.in b/rust/automerge-c/cmake/Cargo.toml.in new file mode 100644 index 00000000..781e2fef --- /dev/null +++ b/rust/automerge-c/cmake/Cargo.toml.in @@ -0,0 +1,22 @@ +[package] +name = "@PROJECT_NAME@" +version = "@PROJECT_VERSION@" +authors = ["Orion Henry ", "Jason Kankiewicz "] +edition = "2021" +license = "MIT" +rust-version = "1.57.0" + +[lib] +name = "@BINDINGS_NAME@" +crate-type = ["staticlib"] +bench = false +doc = false + +[dependencies] +@LIBRARY_NAME@ = { path = "../@LIBRARY_NAME@" } +hex = "^0.4.3" +libc = "^0.2" +smol_str = "^0.1.21" + +[build-dependencies] +cbindgen = "^0.24" diff --git a/rust/automerge-c/cmake/cbindgen.toml.in b/rust/automerge-c/cmake/cbindgen.toml.in new file mode 100644 index 00000000..5122b75c --- /dev/null +++ b/rust/automerge-c/cmake/cbindgen.toml.in @@ -0,0 +1,48 @@ +after_includes = """\n +/** + * \\defgroup enumerations Public Enumerations + * Symbolic names for integer constants. + */ + +/** + * \\memberof AMdoc + * \\def AM_ROOT + * \\brief The root object of a document. + */ +#define AM_ROOT NULL + +/** + * \\memberof AMdoc + * \\def AM_CHANGE_HASH_SIZE + * \\brief The count of bytes in a change hash. + */ +#define AM_CHANGE_HASH_SIZE 32 +""" +autogen_warning = """ +/** + * \\file + * \\brief All constants, functions and types in the core Automerge C API. + * + * \\warning This file is auto-generated by cbindgen. + */ +""" +documentation = true +documentation_style = "doxy" +include_guard = "@INCLUDE_GUARD_PREFIX@_H" +includes = [] +language = "C" +line_length = 140 +no_includes = true +style = "both" +sys_includes = ["stdbool.h", "stddef.h", "stdint.h", "time.h"] +usize_is_size_t = true + +[enum] +derive_const_casts = true +enum_class = true +must_use = "MUST_USE_ENUM" +prefix_with_name = true +rename_variants = "ScreamingSnakeCase" + +[export] +item_types = ["constants", "enums", "functions", "opaque", "structs", "typedefs"] diff --git a/rust/automerge-c/cmake/config.h.in b/rust/automerge-c/cmake/config.h.in index 44ba5213..40482cb9 100644 --- a/rust/automerge-c/cmake/config.h.in +++ b/rust/automerge-c/cmake/config.h.in @@ -1,14 +1,35 @@ -#ifndef @SYMBOL_PREFIX@_CONFIG_H -#define @SYMBOL_PREFIX@_CONFIG_H - -/* This header is auto-generated by CMake. */ +#ifndef @INCLUDE_GUARD_PREFIX@_CONFIG_H +#define @INCLUDE_GUARD_PREFIX@_CONFIG_H +/** + * \file + * \brief Configuration pararameters defined by the build system. + * + * \warning This file is auto-generated by CMake. + */ +/** + * \def @SYMBOL_PREFIX@_VERSION + * \brief Denotes a semantic version of the form {MAJOR}{MINOR}{PATCH} as three, + * two-digit decimal numbers without leading zeros (e.g. 100 is 0.1.0). + */ #define @SYMBOL_PREFIX@_VERSION @INTEGER_PROJECT_VERSION@ +/** + * \def @SYMBOL_PREFIX@_MAJOR_VERSION + * \brief Denotes a semantic major version as a decimal number. + */ #define @SYMBOL_PREFIX@_MAJOR_VERSION (@SYMBOL_PREFIX@_VERSION / 100000) +/** + * \def @SYMBOL_PREFIX@_MINOR_VERSION + * \brief Denotes a semantic minor version as a decimal number. + */ #define @SYMBOL_PREFIX@_MINOR_VERSION ((@SYMBOL_PREFIX@_VERSION / 100) % 1000) +/** + * \def @SYMBOL_PREFIX@_PATCH_VERSION + * \brief Denotes a semantic patch version as a decimal number. + */ #define @SYMBOL_PREFIX@_PATCH_VERSION (@SYMBOL_PREFIX@_VERSION % 100) -#endif /* @SYMBOL_PREFIX@_CONFIG_H */ +#endif /* @INCLUDE_GUARD_PREFIX@_CONFIG_H */ diff --git a/rust/automerge-c/cmake/enum-string-functions-gen.cmake b/rust/automerge-c/cmake/enum-string-functions-gen.cmake new file mode 100644 index 00000000..77080e8d --- /dev/null +++ b/rust/automerge-c/cmake/enum-string-functions-gen.cmake @@ -0,0 +1,183 @@ +# This CMake script is used to generate a header and a source file for utility +# functions that convert the tags of generated enum types into strings and +# strings into the tags of generated enum types. +cmake_minimum_required(VERSION 3.23 FATAL_ERROR) + +# Seeks the starting line of the source enum's declaration. +macro(seek_enum_mode) + if (line MATCHES "^(typedef[ \t]+)?enum ") + string(REGEX REPLACE "^enum ([0-9a-zA-Z_]+).*$" "\\1" enum_name "${line}") + set(mode "read_tags") + endif() +endmacro() + +# Scans the input for the current enum's tags. +macro(read_tags_mode) + if(line MATCHES "^}") + set(mode "generate") + elseif(line MATCHES "^[A-Z0-9_]+.*$") + string(REGEX REPLACE "^([A-Za-z0-9_]+).*$" "\\1" tmp "${line}") + list(APPEND enum_tags "${tmp}") + endif() +endmacro() + +macro(write_header_file) + # Generate a to-string function declaration. + list(APPEND header_body + "/**\n" + " * \\ingroup enumerations\n" + " * \\brief Gets the string representation of an `${enum_name}` enum tag.\n" + " *\n" + " * \\param[in] tag An `${enum_name}` enum tag.\n" + " * \\return A null-terminated byte string.\n" + " */\n" + "char const* ${enum_name}ToString(${enum_name} const tag)\;\n" + "\n") + # Generate a from-string function declaration. + list(APPEND header_body + "/**\n" + " * \\ingroup enumerations\n" + " * \\brief Gets an `${enum_name}` enum tag from its string representation.\n" + " *\n" + " * \\param[out] dest An `${enum_name}` enum tag pointer.\n" + " * \\param[in] src A null-terminated byte string.\n" + " * \\return `true` if \\p src matches the string representation of an\n" + " * `${enum_name}` enum tag, `false` otherwise.\n" + " */\n" + "bool ${enum_name}FromString(${enum_name}* dest, char const* const src)\;\n" + "\n") +endmacro() + +macro(write_source_file) + # Generate a to-string function implementation. + list(APPEND source_body + "char const* ${enum_name}ToString(${enum_name} const tag) {\n" + " switch (tag) {\n" + " default:\n" + " return \"???\"\;\n") + foreach(label IN LISTS enum_tags) + list(APPEND source_body + " case ${label}:\n" + " return \"${label}\"\;\n") + endforeach() + list(APPEND source_body + " }\n" + "}\n" + "\n") + # Generate a from-string function implementation. + list(APPEND source_body + "bool ${enum_name}FromString(${enum_name}* dest, char const* const src) {\n") + foreach(label IN LISTS enum_tags) + list(APPEND source_body + " if (!strcmp(src, \"${label}\")) {\n" + " *dest = ${label}\;\n" + " return true\;\n" + " }\n") + endforeach() + list(APPEND source_body + " return false\;\n" + "}\n" + "\n") +endmacro() + +function(main) + set(header_body "") + # File header and includes. + list(APPEND header_body + "#ifndef ${include_guard}\n" + "#define ${include_guard}\n" + "/**\n" + " * \\file\n" + " * \\brief Utility functions for converting enum tags into null-terminated\n" + " * byte strings and vice versa.\n" + " *\n" + " * \\warning This file is auto-generated by CMake.\n" + " */\n" + "\n" + "#include \n" + "\n" + "#include <${library_include}>\n" + "\n") + set(source_body "") + # File includes. + list(APPEND source_body + "/** \\warning This file is auto-generated by CMake. */\n" + "\n" + "#include \"stdio.h\"\n" + "#include \"string.h\"\n" + "\n" + "#include <${header_include}>\n" + "\n") + set(enum_name "") + set(enum_tags "") + set(mode "seek_enum") + file(STRINGS "${input_path}" lines) + foreach(line IN LISTS lines) + string(REGEX REPLACE "^(.+)(//.*)?" "\\1" line "${line}") + string(STRIP "${line}" line) + if(mode STREQUAL "seek_enum") + seek_enum_mode() + elseif(mode STREQUAL "read_tags") + read_tags_mode() + else() + # The end of the enum declaration was reached. + if(NOT enum_name) + # The end of the file was reached. + return() + endif() + if(NOT enum_tags) + message(FATAL_ERROR "No tags found for `${enum_name}`.") + endif() + string(TOLOWER "${enum_name}" output_stem_prefix) + string(CONCAT output_stem "${output_stem_prefix}" "_string") + cmake_path(REPLACE_EXTENSION output_stem "h" OUTPUT_VARIABLE output_header_basename) + write_header_file() + write_source_file() + set(enum_name "") + set(enum_tags "") + set(mode "seek_enum") + endif() + endforeach() + # File footer. + list(APPEND header_body + "#endif /* ${include_guard} */\n") + message(STATUS "Generating header file \"${output_header_path}\"...") + file(WRITE "${output_header_path}" ${header_body}) + message(STATUS "Generating source file \"${output_source_path}\"...") + file(WRITE "${output_source_path}" ${source_body}) +endfunction() + +if(NOT DEFINED PROJECT_NAME) + message(FATAL_ERROR "Variable PROJECT_NAME is not defined.") +elseif(NOT DEFINED LIBRARY_NAME) + message(FATAL_ERROR "Variable LIBRARY_NAME is not defined.") +elseif(NOT DEFINED SUBDIR) + message(FATAL_ERROR "Variable SUBDIR is not defined.") +elseif(${CMAKE_ARGC} LESS 9) + message(FATAL_ERROR "Too few arguments.") +elseif(${CMAKE_ARGC} GREATER 10) + message(FATAL_ERROR "Too many arguments.") +elseif(NOT EXISTS ${CMAKE_ARGV5}) + message(FATAL_ERROR "Input header \"${CMAKE_ARGV7}\" not found.") +endif() +cmake_path(CONVERT "${CMAKE_ARGV7}" TO_CMAKE_PATH_LIST input_path NORMALIZE) +cmake_path(CONVERT "${CMAKE_ARGV8}" TO_CMAKE_PATH_LIST output_header_path NORMALIZE) +cmake_path(CONVERT "${CMAKE_ARGV9}" TO_CMAKE_PATH_LIST output_source_path NORMALIZE) +string(TOLOWER "${PROJECT_NAME}" project_root) +cmake_path(CONVERT "${SUBDIR}" TO_CMAKE_PATH_LIST project_subdir NORMALIZE) +string(TOLOWER "${project_subdir}" project_subdir) +string(TOLOWER "${LIBRARY_NAME}" library_stem) +cmake_path(REPLACE_EXTENSION library_stem "h" OUTPUT_VARIABLE library_basename) +string(JOIN "/" library_include "${project_root}" "${library_basename}") +string(TOUPPER "${PROJECT_NAME}" project_name_upper) +string(TOUPPER "${project_subdir}" include_guard_infix) +string(REGEX REPLACE "/" "_" include_guard_infix "${include_guard_infix}") +string(REGEX REPLACE "-" "_" include_guard_prefix "${project_name_upper}") +string(JOIN "_" include_guard_prefix "${include_guard_prefix}" "${include_guard_infix}") +string(JOIN "/" output_header_prefix "${project_root}" "${project_subdir}") +cmake_path(GET output_header_path STEM output_header_stem) +string(TOUPPER "${output_header_stem}" include_guard_stem) +string(JOIN "_" include_guard "${include_guard_prefix}" "${include_guard_stem}" "H") +cmake_path(GET output_header_path FILENAME output_header_basename) +string(JOIN "/" header_include "${output_header_prefix}" "${output_header_basename}") +main() diff --git a/rust/automerge-c/cmake/file_regex_replace.cmake b/rust/automerge-c/cmake/file-regex-replace.cmake similarity index 87% rename from rust/automerge-c/cmake/file_regex_replace.cmake rename to rust/automerge-c/cmake/file-regex-replace.cmake index 27306458..09005bc2 100644 --- a/rust/automerge-c/cmake/file_regex_replace.cmake +++ b/rust/automerge-c/cmake/file-regex-replace.cmake @@ -1,4 +1,6 @@ -cmake_minimum_required(VERSION 3.18 FATAL_ERROR) +# This CMake script is used to perform string substitutions within a generated +# file. +cmake_minimum_required(VERSION 3.23 FATAL_ERROR) if(NOT DEFINED MATCH_REGEX) message(FATAL_ERROR "Variable \"MATCH_REGEX\" is not defined.") diff --git a/rust/automerge-c/cmake/file_touch.cmake b/rust/automerge-c/cmake/file-touch.cmake similarity index 82% rename from rust/automerge-c/cmake/file_touch.cmake rename to rust/automerge-c/cmake/file-touch.cmake index 087d59b6..2c196755 100644 --- a/rust/automerge-c/cmake/file_touch.cmake +++ b/rust/automerge-c/cmake/file-touch.cmake @@ -1,4 +1,6 @@ -cmake_minimum_required(VERSION 3.18 FATAL_ERROR) +# This CMake script is used to force Cargo to regenerate the header file for the +# core bindings after the out-of-source build directory has been cleaned. +cmake_minimum_required(VERSION 3.23 FATAL_ERROR) if(NOT DEFINED CONDITION) message(FATAL_ERROR "Variable \"CONDITION\" is not defined.") diff --git a/rust/automerge-c/docs/CMakeLists.txt b/rust/automerge-c/docs/CMakeLists.txt new file mode 100644 index 00000000..1d94c872 --- /dev/null +++ b/rust/automerge-c/docs/CMakeLists.txt @@ -0,0 +1,35 @@ +find_package(Doxygen OPTIONAL_COMPONENTS dot) + +if(DOXYGEN_FOUND) + set(DOXYGEN_ALIASES "installed_headerfile=\\headerfile ${LIBRARY_NAME}.h <${PROJECT_NAME}/${LIBRARY_NAME}.h>") + + set(DOXYGEN_GENERATE_LATEX YES) + + set(DOXYGEN_PDF_HYPERLINKS YES) + + set(DOXYGEN_PROJECT_LOGO "${CMAKE_CURRENT_SOURCE_DIR}/img/brandmark.png") + + set(DOXYGEN_SORT_BRIEF_DOCS YES) + + set(DOXYGEN_USE_MDFILE_AS_MAINPAGE "${CMAKE_SOURCE_DIR}/README.md") + + doxygen_add_docs( + ${LIBRARY_NAME}_docs + "${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h" + "${CBINDGEN_TARGET_DIR}/config.h" + "${CBINDGEN_TARGET_DIR}/${UTILS_SUBDIR}/enum_string.h" + "${CMAKE_SOURCE_DIR}/${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}/${UTILS_SUBDIR}/result.h" + "${CMAKE_SOURCE_DIR}/${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}/${UTILS_SUBDIR}/stack_callback_data.h" + "${CMAKE_SOURCE_DIR}/${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}/${UTILS_SUBDIR}/stack.h" + "${CMAKE_SOURCE_DIR}/${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}/${UTILS_SUBDIR}/string.h" + "${CMAKE_SOURCE_DIR}/README.md" + WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} + COMMENT "Producing documentation with Doxygen..." + ) + + # \note A Doxygen input file isn't a file-level dependency so the Doxygen + # command must instead depend upon a target that either outputs the + # file or depends upon it also or it will just output an error message + # when it can't be found. + add_dependencies(${LIBRARY_NAME}_docs ${BINDINGS_NAME}_artifacts ${LIBRARY_NAME}_utilities) +endif() diff --git a/rust/automerge-c/img/brandmark.png b/rust/automerge-c/docs/img/brandmark.png similarity index 100% rename from rust/automerge-c/img/brandmark.png rename to rust/automerge-c/docs/img/brandmark.png diff --git a/rust/automerge-c/examples/CMakeLists.txt b/rust/automerge-c/examples/CMakeLists.txt index 3395124c..f080237b 100644 --- a/rust/automerge-c/examples/CMakeLists.txt +++ b/rust/automerge-c/examples/CMakeLists.txt @@ -1,41 +1,39 @@ -cmake_minimum_required(VERSION 3.18 FATAL_ERROR) - add_executable( - example_quickstart + ${LIBRARY_NAME}_quickstart quickstart.c ) -set_target_properties(example_quickstart PROPERTIES LINKER_LANGUAGE C) +set_target_properties(${LIBRARY_NAME}_quickstart PROPERTIES LINKER_LANGUAGE C) # \note An imported library's INTERFACE_INCLUDE_DIRECTORIES property can't # contain a non-existent path so its build-time include directory # must be specified for all of its dependent targets instead. target_include_directories( - example_quickstart + ${LIBRARY_NAME}_quickstart PRIVATE "$" ) -target_link_libraries(example_quickstart PRIVATE ${LIBRARY_NAME}) +target_link_libraries(${LIBRARY_NAME}_quickstart PRIVATE ${LIBRARY_NAME}) -add_dependencies(example_quickstart ${LIBRARY_NAME}_artifacts) +add_dependencies(${LIBRARY_NAME}_quickstart ${BINDINGS_NAME}_artifacts) if(BUILD_SHARED_LIBS AND WIN32) add_custom_command( - TARGET example_quickstart + TARGET ${LIBRARY_NAME}_quickstart POST_BUILD COMMAND ${CMAKE_COMMAND} -E copy_if_different ${CARGO_CURRENT_BINARY_DIR}/${CMAKE_SHARED_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_${CMAKE_BUILD_TYPE}_POSTFIX}${CMAKE_SHARED_LIBRARY_SUFFIX} - ${CMAKE_CURRENT_BINARY_DIR} + ${CMAKE_BINARY_DIR} COMMENT "Copying the DLL built by Cargo into the examples directory..." VERBATIM ) endif() add_custom_command( - TARGET example_quickstart + TARGET ${LIBRARY_NAME}_quickstart POST_BUILD COMMAND - example_quickstart + ${LIBRARY_NAME}_quickstart COMMENT "Running the example quickstart..." VERBATIM diff --git a/rust/automerge-c/examples/README.md b/rust/automerge-c/examples/README.md index 17aa2227..17e69412 100644 --- a/rust/automerge-c/examples/README.md +++ b/rust/automerge-c/examples/README.md @@ -5,5 +5,5 @@ ```shell cmake -E make_directory automerge-c/build cmake -S automerge-c -B automerge-c/build -cmake --build automerge-c/build --target example_quickstart +cmake --build automerge-c/build --target automerge_quickstart ``` diff --git a/rust/automerge-c/examples/quickstart.c b/rust/automerge-c/examples/quickstart.c index bc418511..ab6769ef 100644 --- a/rust/automerge-c/examples/quickstart.c +++ b/rust/automerge-c/examples/quickstart.c @@ -3,152 +3,127 @@ #include #include +#include +#include +#include +#include -static void abort_cb(AMresultStack**, uint8_t); +static bool abort_cb(AMstack**, void*); /** * \brief Based on https://automerge.github.io/docs/quickstart */ int main(int argc, char** argv) { - AMresultStack* stack = NULL; - AMdoc* const doc1 = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, abort_cb).doc; - AMobjId const* const cards = AMpush(&stack, - AMmapPutObject(doc1, AM_ROOT, AMstr("cards"), AM_OBJ_TYPE_LIST), - AM_VALUE_OBJ_ID, - abort_cb).obj_id; - AMobjId const* const card1 = AMpush(&stack, - AMlistPutObject(doc1, cards, SIZE_MAX, true, AM_OBJ_TYPE_MAP), - AM_VALUE_OBJ_ID, - abort_cb).obj_id; - AMfree(AMmapPutStr(doc1, card1, AMstr("title"), AMstr("Rewrite everything in Clojure"))); - AMfree(AMmapPutBool(doc1, card1, AMstr("done"), false)); - AMobjId const* const card2 = AMpush(&stack, - AMlistPutObject(doc1, cards, SIZE_MAX, true, AM_OBJ_TYPE_MAP), - AM_VALUE_OBJ_ID, - abort_cb).obj_id; - AMfree(AMmapPutStr(doc1, card2, AMstr("title"), AMstr("Rewrite everything in Haskell"))); - AMfree(AMmapPutBool(doc1, card2, AMstr("done"), false)); - AMfree(AMcommit(doc1, AMstr("Add card"), NULL)); + AMstack* stack = NULL; + AMdoc* doc1; + AMitemToDoc(AMstackItem(&stack, AMcreate(NULL), abort_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc1); + AMobjId const* const cards = + AMitemObjId(AMstackItem(&stack, AMmapPutObject(doc1, AM_ROOT, AMstr("cards"), AM_OBJ_TYPE_LIST), abort_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE))); + AMobjId const* const card1 = + AMitemObjId(AMstackItem(&stack, AMlistPutObject(doc1, cards, SIZE_MAX, true, AM_OBJ_TYPE_MAP), abort_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE))); + AMstackItem(NULL, AMmapPutStr(doc1, card1, AMstr("title"), AMstr("Rewrite everything in Clojure")), abort_cb, + AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutBool(doc1, card1, AMstr("done"), false), abort_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMobjId const* const card2 = + AMitemObjId(AMstackItem(&stack, AMlistPutObject(doc1, cards, SIZE_MAX, true, AM_OBJ_TYPE_MAP), abort_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE))); + AMstackItem(NULL, AMmapPutStr(doc1, card2, AMstr("title"), AMstr("Rewrite everything in Haskell")), abort_cb, + AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutBool(doc1, card2, AMstr("done"), false), abort_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMcommit(doc1, AMstr("Add card"), NULL), abort_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - AMdoc* doc2 = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, abort_cb).doc; - AMfree(AMmerge(doc2, doc1)); + AMdoc* doc2; + AMitemToDoc(AMstackItem(&stack, AMcreate(NULL), abort_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc2); + AMstackItem(NULL, AMmerge(doc2, doc1), abort_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - AMbyteSpan const binary = AMpush(&stack, AMsave(doc1), AM_VALUE_BYTES, abort_cb).bytes; - doc2 = AMpush(&stack, AMload(binary.src, binary.count), AM_VALUE_DOC, abort_cb).doc; + AMbyteSpan binary; + AMitemToBytes(AMstackItem(&stack, AMsave(doc1), abort_cb, AMexpect(AM_VAL_TYPE_BYTES)), &binary); + AMitemToDoc(AMstackItem(&stack, AMload(binary.src, binary.count), abort_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc2); - AMfree(AMmapPutBool(doc1, card1, AMstr("done"), true)); - AMfree(AMcommit(doc1, AMstr("Mark card as done"), NULL)); + AMstackItem(NULL, AMmapPutBool(doc1, card1, AMstr("done"), true), abort_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMcommit(doc1, AMstr("Mark card as done"), NULL), abort_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - AMfree(AMlistDelete(doc2, cards, 0)); - AMfree(AMcommit(doc2, AMstr("Delete card"), NULL)); + AMstackItem(NULL, AMlistDelete(doc2, cards, 0), abort_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMcommit(doc2, AMstr("Delete card"), NULL), abort_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - AMfree(AMmerge(doc1, doc2)); + AMstackItem(NULL, AMmerge(doc1, doc2), abort_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - AMchanges changes = AMpush(&stack, AMgetChanges(doc1, NULL), AM_VALUE_CHANGES, abort_cb).changes; - AMchange const* change = NULL; - while ((change = AMchangesNext(&changes, 1)) != NULL) { - AMbyteSpan const change_hash = AMchangeHash(change); - AMchangeHashes const heads = AMpush(&stack, - AMchangeHashesInit(&change_hash, 1), - AM_VALUE_CHANGE_HASHES, - abort_cb).change_hashes; - AMbyteSpan const msg = AMchangeMessage(change); - char* const c_msg = calloc(1, msg.count + 1); - strncpy(c_msg, msg.src, msg.count); - printf("%s %ld\n", c_msg, AMobjSize(doc1, cards, &heads)); + AMitems changes = AMstackItems(&stack, AMgetChanges(doc1, NULL), abort_cb, AMexpect(AM_VAL_TYPE_CHANGE)); + AMitem* item = NULL; + while ((item = AMitemsNext(&changes, 1)) != NULL) { + AMchange const* change; + AMitemToChange(item, &change); + AMitems const heads = AMstackItems(&stack, AMitemFromChangeHash(AMchangeHash(change)), abort_cb, + AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + char* const c_msg = AMstrdup(AMchangeMessage(change), NULL); + printf("%s %zu\n", c_msg, AMobjSize(doc1, cards, &heads)); free(c_msg); } - AMfreeStack(&stack); + AMstackFree(&stack); } -static char const* discriminant_suffix(AMvalueVariant const); - /** - * \brief Prints an error message to `stderr`, deallocates all results in the - * given stack and exits. + * \brief Examines the result at the top of the given stack and, if it's + * invalid, prints an error message to `stderr`, deallocates all results + * in the stack and exits. * - * \param[in,out] stack A pointer to a pointer to an `AMresultStack` struct. - * \param[in] discriminant An `AMvalueVariant` enum tag. - * \pre \p stack` != NULL`. - * \post `*stack == NULL`. + * \param[in,out] stack A pointer to a pointer to an `AMstack` struct. + * \param[in] data A pointer to an owned `AMstackCallbackData` struct or `NULL`. + * \return `true` if the top `AMresult` in \p stack is valid, `false` otherwise. + * \pre \p stack `!= NULL`. */ -static void abort_cb(AMresultStack** stack, uint8_t discriminant) { +static bool abort_cb(AMstack** stack, void* data) { static char buffer[512] = {0}; char const* suffix = NULL; if (!stack) { suffix = "Stack*"; - } - else if (!*stack) { + } else if (!*stack) { suffix = "Stack"; - } - else if (!(*stack)->result) { + } else if (!(*stack)->result) { suffix = ""; } if (suffix) { - fprintf(stderr, "Null `AMresult%s*`.", suffix); - AMfreeStack(stack); + fprintf(stderr, "Null `AMresult%s*`.\n", suffix); + AMstackFree(stack); exit(EXIT_FAILURE); - return; + return false; } AMstatus const status = AMresultStatus((*stack)->result); switch (status) { - case AM_STATUS_ERROR: strcpy(buffer, "Error"); break; - case AM_STATUS_INVALID_RESULT: strcpy(buffer, "Invalid result"); break; - case AM_STATUS_OK: break; - default: sprintf(buffer, "Unknown `AMstatus` tag %d", status); + case AM_STATUS_ERROR: + strcpy(buffer, "Error"); + break; + case AM_STATUS_INVALID_RESULT: + strcpy(buffer, "Invalid result"); + break; + case AM_STATUS_OK: + break; + default: + sprintf(buffer, "Unknown `AMstatus` tag %d", status); } if (buffer[0]) { - AMbyteSpan const msg = AMerrorMessage((*stack)->result); - char* const c_msg = calloc(1, msg.count + 1); - strncpy(c_msg, msg.src, msg.count); - fprintf(stderr, "%s; %s.", buffer, c_msg); + char* const c_msg = AMstrdup(AMresultError((*stack)->result), NULL); + fprintf(stderr, "%s; %s.\n", buffer, c_msg); free(c_msg); - AMfreeStack(stack); + AMstackFree(stack); exit(EXIT_FAILURE); - return; + return false; } - AMvalue const value = AMresultValue((*stack)->result); - fprintf(stderr, "Unexpected tag `AM_VALUE_%s` (%d); expected `AM_VALUE_%s`.", - discriminant_suffix(value.tag), - value.tag, - discriminant_suffix(discriminant)); - AMfreeStack(stack); - exit(EXIT_FAILURE); -} - -/** - * \brief Gets the suffix for a discriminant's corresponding string - * representation. - * - * \param[in] discriminant An `AMvalueVariant` enum tag. - * \return A UTF-8 string. - */ -static char const* discriminant_suffix(AMvalueVariant const discriminant) { - char const* suffix = NULL; - switch (discriminant) { - case AM_VALUE_ACTOR_ID: suffix = "ACTOR_ID"; break; - case AM_VALUE_BOOLEAN: suffix = "BOOLEAN"; break; - case AM_VALUE_BYTES: suffix = "BYTES"; break; - case AM_VALUE_CHANGE_HASHES: suffix = "CHANGE_HASHES"; break; - case AM_VALUE_CHANGES: suffix = "CHANGES"; break; - case AM_VALUE_COUNTER: suffix = "COUNTER"; break; - case AM_VALUE_DOC: suffix = "DOC"; break; - case AM_VALUE_F64: suffix = "F64"; break; - case AM_VALUE_INT: suffix = "INT"; break; - case AM_VALUE_LIST_ITEMS: suffix = "LIST_ITEMS"; break; - case AM_VALUE_MAP_ITEMS: suffix = "MAP_ITEMS"; break; - case AM_VALUE_NULL: suffix = "NULL"; break; - case AM_VALUE_OBJ_ID: suffix = "OBJ_ID"; break; - case AM_VALUE_OBJ_ITEMS: suffix = "OBJ_ITEMS"; break; - case AM_VALUE_STR: suffix = "STR"; break; - case AM_VALUE_STRS: suffix = "STRINGS"; break; - case AM_VALUE_SYNC_MESSAGE: suffix = "SYNC_MESSAGE"; break; - case AM_VALUE_SYNC_STATE: suffix = "SYNC_STATE"; break; - case AM_VALUE_TIMESTAMP: suffix = "TIMESTAMP"; break; - case AM_VALUE_UINT: suffix = "UINT"; break; - case AM_VALUE_VOID: suffix = "VOID"; break; - default: suffix = "..."; + if (data) { + AMstackCallbackData* sc_data = (AMstackCallbackData*)data; + AMvalType const tag = AMitemValType(AMresultItem((*stack)->result)); + if (tag != sc_data->bitmask) { + fprintf(stderr, "Unexpected tag `%s` (%d) instead of `%s` at %s:%d.\n", AMvalTypeToString(tag), tag, + AMvalTypeToString(sc_data->bitmask), sc_data->file, sc_data->line); + free(sc_data); + AMstackFree(stack); + exit(EXIT_FAILURE); + return false; + } } - return suffix; + free(data); + return true; } diff --git a/rust/automerge-c/include/automerge-c/utils/result.h b/rust/automerge-c/include/automerge-c/utils/result.h new file mode 100644 index 00000000..ab8a2f93 --- /dev/null +++ b/rust/automerge-c/include/automerge-c/utils/result.h @@ -0,0 +1,30 @@ +#ifndef AUTOMERGE_C_UTILS_RESULT_H +#define AUTOMERGE_C_UTILS_RESULT_H +/** + * \file + * \brief Utility functions for use with `AMresult` structs. + */ + +#include + +#include + +/** + * \brief Transfers the items within an arbitrary list of results into a + * new result in their order of specification. + * \param[in] count The count of subsequent arguments. + * \param[in] ... A \p count list of arguments, each of which is a pointer to + * an `AMresult` struct whose items will be transferred out of it + * and which is subsequently freed. + * \return A pointer to an `AMresult` struct or `NULL`. + * \pre `∀𝑥 ∈` \p ... `, AMresultStatus(𝑥) == AM_STATUS_OK` + * \post `(∃𝑥 ∈` \p ... `, AMresultStatus(𝑥) != AM_STATUS_OK) -> NULL` + * \attention All `AMresult` struct pointer arguments are passed to + * `AMresultFree()` regardless of success; use `AMresultCat()` + * instead if you wish to pass them to `AMresultFree()` yourself. + * \warning The returned `AMresult` struct pointer must be passed to + * `AMresultFree()` in order to avoid a memory leak. + */ +AMresult* AMresultFrom(int count, ...); + +#endif /* AUTOMERGE_C_UTILS_RESULT_H */ diff --git a/rust/automerge-c/include/automerge-c/utils/stack.h b/rust/automerge-c/include/automerge-c/utils/stack.h new file mode 100644 index 00000000..a8e9fd08 --- /dev/null +++ b/rust/automerge-c/include/automerge-c/utils/stack.h @@ -0,0 +1,130 @@ +#ifndef AUTOMERGE_C_UTILS_STACK_H +#define AUTOMERGE_C_UTILS_STACK_H +/** + * \file + * \brief Utility data structures and functions for hiding `AMresult` structs, + * managing their lifetimes, and automatically applying custom + * validation logic to the `AMitem` structs that they contain. + * + * \note The `AMstack` struct and its related functions drastically reduce the + * need for boilerplate code and/or `goto` statement usage within a C + * application but a higher-level programming language offers even better + * ways to do the same things. + */ + +#include + +/** + * \struct AMstack + * \brief A node in a singly-linked list of result pointers. + */ +typedef struct AMstack { + /** A result to be deallocated. */ + AMresult* result; + /** The previous node in the singly-linked list or `NULL`. */ + struct AMstack* prev; +} AMstack; + +/** + * \memberof AMstack + * \brief The prototype of a function that examines the result at the top of + * the given stack in terms of some arbitrary data. + * + * \param[in,out] stack A pointer to a pointer to an `AMstack` struct. + * \param[in] data A pointer to arbitrary data or `NULL`. + * \return `true` if the top `AMresult` struct in \p stack is valid, `false` + * otherwise. + * \pre \p stack `!= NULL`. + */ +typedef bool (*AMstackCallback)(AMstack** stack, void* data); + +/** + * \memberof AMstack + * \brief Deallocates the storage for a stack of results. + * + * \param[in,out] stack A pointer to a pointer to an `AMstack` struct. + * \pre \p stack `!= NULL` + * \post `*stack == NULL` + */ +void AMstackFree(AMstack** stack); + +/** + * \memberof AMstack + * \brief Gets a result from the stack after removing it. + * + * \param[in,out] stack A pointer to a pointer to an `AMstack` struct. + * \param[in] result A pointer to the `AMresult` to be popped or `NULL` to + * select the top result in \p stack. + * \return A pointer to an `AMresult` struct or `NULL`. + * \pre \p stack `!= NULL` + * \warning The returned `AMresult` struct pointer must be passed to + * `AMresultFree()` in order to avoid a memory leak. + */ +AMresult* AMstackPop(AMstack** stack, AMresult const* result); + +/** + * \memberof AMstack + * \brief Pushes the given result onto the given stack, calls the given + * callback with the given data to validate it and then either gets the + * result if it's valid or gets `NULL` instead. + * + * \param[in,out] stack A pointer to a pointer to an `AMstack` struct. + * \param[in] result A pointer to an `AMresult` struct. + * \param[in] callback A pointer to a function with the same signature as + * `AMstackCallback()` or `NULL`. + * \param[in] data A pointer to arbitrary data or `NULL` which is passed to + * \p callback. + * \return \p result or `NULL`. + * \warning If \p stack `== NULL` then \p result is deallocated in order to + * avoid a memory leak. + */ +AMresult* AMstackResult(AMstack** stack, AMresult* result, AMstackCallback callback, void* data); + +/** + * \memberof AMstack + * \brief Pushes the given result onto the given stack, calls the given + * callback with the given data to validate it and then either gets the + * first item in the sequence of items within that result if it's valid + * or gets `NULL` instead. + * + * \param[in,out] stack A pointer to a pointer to an `AMstack` struct. + * \param[in] result A pointer to an `AMresult` struct. + * \param[in] callback A pointer to a function with the same signature as + * `AMstackCallback()` or `NULL`. + * \param[in] data A pointer to arbitrary data or `NULL` which is passed to + * \p callback. + * \return A pointer to an `AMitem` struct or `NULL`. + * \warning If \p stack `== NULL` then \p result is deallocated in order to + * avoid a memory leak. + */ +AMitem* AMstackItem(AMstack** stack, AMresult* result, AMstackCallback callback, void* data); + +/** + * \memberof AMstack + * \brief Pushes the given result onto the given stack, calls the given + * callback with the given data to validate it and then either gets an + * `AMitems` struct over the sequence of items within that result if it's + * valid or gets an empty `AMitems` instead. + * + * \param[in,out] stack A pointer to a pointer to an `AMstack` struct. + * \param[in] result A pointer to an `AMresult` struct. + * \param[in] callback A pointer to a function with the same signature as + * `AMstackCallback()` or `NULL`. + * \param[in] data A pointer to arbitrary data or `NULL` which is passed to + * \p callback. + * \return An `AMitems` struct. + * \warning If \p stack `== NULL` then \p result is deallocated immediately + * in order to avoid a memory leak. + */ +AMitems AMstackItems(AMstack** stack, AMresult* result, AMstackCallback callback, void* data); + +/** + * \memberof AMstack + * \brief Gets the count of results that have been pushed onto the stack. + * + * \param[in,out] stack A pointer to an `AMstack` struct. + * \return A 64-bit unsigned integer. + */ +size_t AMstackSize(AMstack const* const stack); + +#endif /* AUTOMERGE_C_UTILS_STACK_H */ diff --git a/rust/automerge-c/include/automerge-c/utils/stack_callback_data.h b/rust/automerge-c/include/automerge-c/utils/stack_callback_data.h new file mode 100644 index 00000000..6f9f1edb --- /dev/null +++ b/rust/automerge-c/include/automerge-c/utils/stack_callback_data.h @@ -0,0 +1,53 @@ +#ifndef AUTOMERGE_C_UTILS_PUSH_CALLBACK_DATA_H +#define AUTOMERGE_C_UTILS_PUSH_CALLBACK_DATA_H +/** + * \file + * \brief Utility data structures, functions and macros for supplying + * parameters to the custom validation logic applied to `AMitem` + * structs. + */ + +#include + +/** + * \struct AMstackCallbackData + * \brief A data structure for passing the parameters of an item value test + * to an implementation of the `AMstackCallback` function prototype. + */ +typedef struct { + /** A bitmask of `AMvalType` tags. */ + AMvalType bitmask; + /** A null-terminated file path string. */ + char const* file; + /** The ordinal number of a line within a file. */ + int line; +} AMstackCallbackData; + +/** + * \memberof AMstackCallbackData + * \brief Allocates a new `AMstackCallbackData` struct and initializes its + * members from their corresponding arguments. + * + * \param[in] bitmask A bitmask of `AMvalType` tags. + * \param[in] file A null-terminated file path string. + * \param[in] line The ordinal number of a line within a file. + * \return A pointer to a disowned `AMstackCallbackData` struct. + * \warning The returned pointer must be passed to `free()` to avoid a memory + * leak. + */ +AMstackCallbackData* AMstackCallbackDataInit(AMvalType const bitmask, char const* const file, int const line); + +/** + * \memberof AMstackCallbackData + * \def AMexpect + * \brief Allocates a new `AMstackCallbackData` struct and initializes it from + * an `AMvalueType` bitmask. + * + * \param[in] bitmask A bitmask of `AMvalType` tags. + * \return A pointer to a disowned `AMstackCallbackData` struct. + * \warning The returned pointer must be passed to `free()` to avoid a memory + * leak. + */ +#define AMexpect(bitmask) AMstackCallbackDataInit(bitmask, __FILE__, __LINE__) + +#endif /* AUTOMERGE_C_UTILS_PUSH_CALLBACK_DATA_H */ diff --git a/rust/automerge-c/include/automerge-c/utils/string.h b/rust/automerge-c/include/automerge-c/utils/string.h new file mode 100644 index 00000000..4d61c2e9 --- /dev/null +++ b/rust/automerge-c/include/automerge-c/utils/string.h @@ -0,0 +1,29 @@ +#ifndef AUTOMERGE_C_UTILS_STRING_H +#define AUTOMERGE_C_UTILS_STRING_H +/** + * \file + * \brief Utility functions for use with `AMbyteSpan` structs that provide + * UTF-8 string views. + */ + +#include + +/** + * \memberof AMbyteSpan + * \brief Returns a pointer to a null-terminated byte string which is a + * duplicate of the given UTF-8 string view except for the substitution + * of its NUL (0) characters with the specified null-terminated byte + * string. + * + * \param[in] str A UTF-8 string view as an `AMbyteSpan` struct. + * \param[in] nul A null-terminated byte string to substitute for NUL characters + * or `NULL` to substitute `"\\0"` for NUL characters. + * \return A disowned null-terminated byte string. + * \pre \p str.src `!= NULL` + * \pre \p str.count `<= sizeof(`\p str.src `)` + * \warning The returned pointer must be passed to `free()` to avoid a memory + * leak. + */ +char* AMstrdup(AMbyteSpan const str, char const* nul); + +#endif /* AUTOMERGE_C_UTILS_STRING_H */ diff --git a/rust/automerge-c/src/CMakeLists.txt b/rust/automerge-c/src/CMakeLists.txt deleted file mode 100644 index e02c0a96..00000000 --- a/rust/automerge-c/src/CMakeLists.txt +++ /dev/null @@ -1,250 +0,0 @@ -cmake_minimum_required(VERSION 3.18 FATAL_ERROR) - -find_program ( - CARGO_CMD - "cargo" - PATHS "$ENV{CARGO_HOME}/bin" - DOC "The Cargo command" -) - -if(NOT CARGO_CMD) - message(FATAL_ERROR "Cargo (Rust package manager) not found! Install it and/or set the CARGO_HOME environment variable.") -endif() - -string(TOLOWER "${CMAKE_BUILD_TYPE}" BUILD_TYPE_LOWER) - -if(BUILD_TYPE_LOWER STREQUAL debug) - set(CARGO_BUILD_TYPE "debug") - - set(CARGO_FLAG "") -else() - set(CARGO_BUILD_TYPE "release") - - set(CARGO_FLAG "--release") -endif() - -set(CARGO_FEATURES "") - -set(CARGO_CURRENT_BINARY_DIR "${CARGO_TARGET_DIR}/${CARGO_BUILD_TYPE}") - -set( - CARGO_OUTPUT - ${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h - ${CARGO_CURRENT_BINARY_DIR}/${CMAKE_SHARED_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_SHARED_LIBRARY_SUFFIX} - ${CARGO_CURRENT_BINARY_DIR}/${CMAKE_STATIC_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_STATIC_LIBRARY_SUFFIX} -) - -if(WIN32) - # \note The basename of an import library output by Cargo is the filename - # of its corresponding shared library. - list(APPEND CARGO_OUTPUT ${CARGO_CURRENT_BINARY_DIR}/${CMAKE_SHARED_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_SHARED_LIBRARY_SUFFIX}${CMAKE_STATIC_LIBRARY_SUFFIX}) -endif() - -add_custom_command( - OUTPUT - ${CARGO_OUTPUT} - COMMAND - # \note cbindgen won't regenerate its output header file after it's - # been removed but it will after its configuration file has been - # updated. - ${CMAKE_COMMAND} -DCONDITION=NOT_EXISTS -P ${CMAKE_SOURCE_DIR}/cmake/file_touch.cmake -- ${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h ${CMAKE_SOURCE_DIR}/cbindgen.toml - COMMAND - ${CMAKE_COMMAND} -E env CARGO_TARGET_DIR=${CARGO_TARGET_DIR} CBINDGEN_TARGET_DIR=${CBINDGEN_TARGET_DIR} ${CARGO_CMD} build ${CARGO_FLAG} ${CARGO_FEATURES} - MAIN_DEPENDENCY - lib.rs - DEPENDS - actor_id.rs - byte_span.rs - change_hashes.rs - change.rs - changes.rs - doc.rs - doc/list.rs - doc/list/item.rs - doc/list/items.rs - doc/map.rs - doc/map/item.rs - doc/map/items.rs - doc/utils.rs - obj.rs - obj/item.rs - obj/items.rs - result.rs - result_stack.rs - strs.rs - sync.rs - sync/have.rs - sync/haves.rs - sync/message.rs - sync/state.rs - ${CMAKE_SOURCE_DIR}/build.rs - ${CMAKE_SOURCE_DIR}/Cargo.toml - ${CMAKE_SOURCE_DIR}/cbindgen.toml - WORKING_DIRECTORY - ${CMAKE_SOURCE_DIR} - COMMENT - "Producing the library artifacts with Cargo..." - VERBATIM -) - -add_custom_target( - ${LIBRARY_NAME}_artifacts ALL - DEPENDS ${CARGO_OUTPUT} -) - -# \note cbindgen's naming behavior isn't fully configurable and it ignores -# `const fn` calls (https://github.com/eqrion/cbindgen/issues/252). -add_custom_command( - TARGET ${LIBRARY_NAME}_artifacts - POST_BUILD - COMMAND - # Compensate for cbindgen's variant struct naming. - ${CMAKE_COMMAND} -DMATCH_REGEX=AM\([^_]+_[^_]+\)_Body -DREPLACE_EXPR=AM\\1 -P ${CMAKE_SOURCE_DIR}/cmake/file_regex_replace.cmake -- ${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h - COMMAND - # Compensate for cbindgen's union tag enum type naming. - ${CMAKE_COMMAND} -DMATCH_REGEX=AM\([^_]+\)_Tag -DREPLACE_EXPR=AM\\1Variant -P ${CMAKE_SOURCE_DIR}/cmake/file_regex_replace.cmake -- ${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h - COMMAND - # Compensate for cbindgen's translation of consecutive uppercase letters to "ScreamingSnakeCase". - ${CMAKE_COMMAND} -DMATCH_REGEX=A_M\([^_]+\)_ -DREPLACE_EXPR=AM_\\1_ -P ${CMAKE_SOURCE_DIR}/cmake/file_regex_replace.cmake -- ${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h - COMMAND - # Compensate for cbindgen ignoring `std:mem::size_of()` calls. - ${CMAKE_COMMAND} -DMATCH_REGEX=USIZE_ -DREPLACE_EXPR=\+${CMAKE_SIZEOF_VOID_P} -P ${CMAKE_SOURCE_DIR}/cmake/file_regex_replace.cmake -- ${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h - WORKING_DIRECTORY - ${CMAKE_SOURCE_DIR} - COMMENT - "Compensating for cbindgen deficits..." - VERBATIM -) - -if(BUILD_SHARED_LIBS) - if(WIN32) - set(LIBRARY_DESTINATION "${CMAKE_INSTALL_BINDIR}") - else() - set(LIBRARY_DESTINATION "${CMAKE_INSTALL_LIBDIR}") - endif() - - set(LIBRARY_DEFINE_SYMBOL "${SYMBOL_PREFIX}_EXPORTS") - - # \note The basename of an import library output by Cargo is the filename - # of its corresponding shared library. - set(LIBRARY_IMPLIB "${CARGO_CURRENT_BINARY_DIR}/${CMAKE_SHARED_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_SHARED_LIBRARY_SUFFIX}${CMAKE_STATIC_LIBRARY_SUFFIX}") - - set(LIBRARY_LOCATION "${CARGO_CURRENT_BINARY_DIR}/${CMAKE_SHARED_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_SHARED_LIBRARY_SUFFIX}") - - set(LIBRARY_NO_SONAME "${WIN32}") - - set(LIBRARY_SONAME "${CMAKE_SHARED_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_${CMAKE_BUILD_TYPE}_POSTFIX}${CMAKE_SHARED_LIBRARY_SUFFIX}") - - set(LIBRARY_TYPE "SHARED") -else() - set(LIBRARY_DEFINE_SYMBOL "") - - set(LIBRARY_DESTINATION "${CMAKE_INSTALL_LIBDIR}") - - set(LIBRARY_IMPLIB "") - - set(LIBRARY_LOCATION "${CARGO_CURRENT_BINARY_DIR}/${CMAKE_STATIC_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_STATIC_LIBRARY_SUFFIX}") - - set(LIBRARY_NO_SONAME "TRUE") - - set(LIBRARY_SONAME "") - - set(LIBRARY_TYPE "STATIC") -endif() - -add_library(${LIBRARY_NAME} ${LIBRARY_TYPE} IMPORTED GLOBAL) - -set_target_properties( - ${LIBRARY_NAME} - PROPERTIES - # \note Cargo writes a debug build into a nested directory instead of - # decorating its name. - DEBUG_POSTFIX "" - DEFINE_SYMBOL "${LIBRARY_DEFINE_SYMBOL}" - IMPORTED_IMPLIB "${LIBRARY_IMPLIB}" - IMPORTED_LOCATION "${LIBRARY_LOCATION}" - IMPORTED_NO_SONAME "${LIBRARY_NO_SONAME}" - IMPORTED_SONAME "${LIBRARY_SONAME}" - LINKER_LANGUAGE C - PUBLIC_HEADER "${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h" - SOVERSION "${PROJECT_VERSION_MAJOR}" - VERSION "${PROJECT_VERSION}" - # \note Cargo exports all of the symbols automatically. - WINDOWS_EXPORT_ALL_SYMBOLS "TRUE" -) - -target_compile_definitions(${LIBRARY_NAME} INTERFACE $) - -target_include_directories( - ${LIBRARY_NAME} - INTERFACE - "$" -) - -set(CMAKE_THREAD_PREFER_PTHREAD TRUE) - -set(THREADS_PREFER_PTHREAD_FLAG TRUE) - -find_package(Threads REQUIRED) - -set(LIBRARY_DEPENDENCIES Threads::Threads ${CMAKE_DL_LIBS}) - -if(WIN32) - list(APPEND LIBRARY_DEPENDENCIES Bcrypt userenv ws2_32) -else() - list(APPEND LIBRARY_DEPENDENCIES m) -endif() - -target_link_libraries(${LIBRARY_NAME} INTERFACE ${LIBRARY_DEPENDENCIES}) - -install( - FILES $ - TYPE LIB - # \note The basename of an import library output by Cargo is the filename - # of its corresponding shared library. - RENAME "${CMAKE_STATIC_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_${CMAKE_BUILD_TYPE}_POSTFIX}${CMAKE_STATIC_LIBRARY_SUFFIX}" - OPTIONAL -) - -set(LIBRARY_FILE_NAME "${CMAKE_${LIBRARY_TYPE}_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_${CMAKE_BUILD_TYPE}_POSTFIX}${CMAKE_${LIBRARY_TYPE}_LIBRARY_SUFFIX}") - -install( - FILES $ - RENAME "${LIBRARY_FILE_NAME}" - DESTINATION ${LIBRARY_DESTINATION} -) - -install( - FILES $ - DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME} -) - -find_package(Doxygen OPTIONAL_COMPONENTS dot) - -if(DOXYGEN_FOUND) - set(DOXYGEN_ALIASES "installed_headerfile=\\headerfile ${LIBRARY_NAME}.h <${PROJECT_NAME}/${LIBRARY_NAME}.h>") - - set(DOXYGEN_GENERATE_LATEX YES) - - set(DOXYGEN_PDF_HYPERLINKS YES) - - set(DOXYGEN_PROJECT_LOGO "${CMAKE_SOURCE_DIR}/img/brandmark.png") - - set(DOXYGEN_SORT_BRIEF_DOCS YES) - - set(DOXYGEN_USE_MDFILE_AS_MAINPAGE "${CMAKE_SOURCE_DIR}/README.md") - - doxygen_add_docs( - ${LIBRARY_NAME}_docs - "${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h" - "${CMAKE_SOURCE_DIR}/README.md" - USE_STAMP_FILE - WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} - COMMENT "Producing documentation with Doxygen..." - ) - - # \note A Doxygen input file isn't a file-level dependency so the Doxygen - # command must instead depend upon a target that outputs the file or - # it will just output an error message when it can't be found. - add_dependencies(${LIBRARY_NAME}_docs ${LIBRARY_NAME}_artifacts) -endif() diff --git a/rust/automerge-c/src/actor_id.rs b/rust/automerge-c/src/actor_id.rs index bc86d5ef..5a28959e 100644 --- a/rust/automerge-c/src/actor_id.rs +++ b/rust/automerge-c/src/actor_id.rs @@ -1,4 +1,5 @@ use automerge as am; +use libc::c_int; use std::cell::RefCell; use std::cmp::Ordering; use std::str::FromStr; @@ -11,7 +12,7 @@ macro_rules! to_actor_id { let handle = $handle.as_ref(); match handle { Some(b) => b, - None => return AMresult::err("Invalid AMactorId pointer").into(), + None => return AMresult::error("Invalid `AMactorId*`").into(), } }}; } @@ -57,11 +58,11 @@ impl AsRef for AMactorId { } /// \memberof AMactorId -/// \brief Gets the value of an actor identifier as a sequence of bytes. +/// \brief Gets the value of an actor identifier as an array of bytes. /// /// \param[in] actor_id A pointer to an `AMactorId` struct. -/// \pre \p actor_id `!= NULL`. -/// \return An `AMbyteSpan` struct. +/// \return An `AMbyteSpan` struct for an array of bytes. +/// \pre \p actor_id `!= NULL` /// \internal /// /// # Safety @@ -82,8 +83,8 @@ pub unsafe extern "C" fn AMactorIdBytes(actor_id: *const AMactorId) -> AMbyteSpa /// \return `-1` if \p actor_id1 `<` \p actor_id2, `0` if /// \p actor_id1 `==` \p actor_id2 and `1` if /// \p actor_id1 `>` \p actor_id2. -/// \pre \p actor_id1 `!= NULL`. -/// \pre \p actor_id2 `!= NULL`. +/// \pre \p actor_id1 `!= NULL` +/// \pre \p actor_id2 `!= NULL` /// \internal /// /// #Safety @@ -93,7 +94,7 @@ pub unsafe extern "C" fn AMactorIdBytes(actor_id: *const AMactorId) -> AMbyteSpa pub unsafe extern "C" fn AMactorIdCmp( actor_id1: *const AMactorId, actor_id2: *const AMactorId, -) -> isize { +) -> c_int { match (actor_id1.as_ref(), actor_id2.as_ref()) { (Some(actor_id1), Some(actor_id2)) => match actor_id1.as_ref().cmp(actor_id2.as_ref()) { Ordering::Less => -1, @@ -101,65 +102,69 @@ pub unsafe extern "C" fn AMactorIdCmp( Ordering::Greater => 1, }, (None, Some(_)) => -1, - (Some(_), None) => 1, (None, None) => 0, + (Some(_), None) => 1, } } /// \memberof AMactorId -/// \brief Allocates a new actor identifier and initializes it with a random -/// UUID. +/// \brief Allocates a new actor identifier and initializes it from a random +/// UUID value. /// -/// \return A pointer to an `AMresult` struct containing a pointer to an -/// `AMactorId` struct. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_ACTOR_ID` item. +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. #[no_mangle] pub unsafe extern "C" fn AMactorIdInit() -> *mut AMresult { to_result(Ok::(am::ActorId::random())) } /// \memberof AMactorId -/// \brief Allocates a new actor identifier and initializes it from a sequence -/// of bytes. +/// \brief Allocates a new actor identifier and initializes it from an array of +/// bytes value. /// -/// \param[in] src A pointer to a contiguous sequence of bytes. -/// \param[in] count The number of bytes to copy from \p src. -/// \pre `0 <` \p count `<= sizeof(`\p src`)`. -/// \return A pointer to an `AMresult` struct containing a pointer to an -/// `AMactorId` struct. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \param[in] src A pointer to an array of bytes. +/// \param[in] count The count of bytes to copy from the array pointed to by +/// \p src. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_ACTOR_ID` item. +/// \pre \p src `!= NULL` +/// \pre `sizeof(`\p src `) > 0` +/// \pre \p count `<= sizeof(`\p src `)` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety -/// src must be a byte array of size `>= count` +/// src must be a byte array of length `>= count` #[no_mangle] -pub unsafe extern "C" fn AMactorIdInitBytes(src: *const u8, count: usize) -> *mut AMresult { - let slice = std::slice::from_raw_parts(src, count); - to_result(Ok::(am::ActorId::from( - slice, - ))) +pub unsafe extern "C" fn AMactorIdFromBytes(src: *const u8, count: usize) -> *mut AMresult { + if !src.is_null() { + let value = std::slice::from_raw_parts(src, count); + to_result(Ok::(am::ActorId::from( + value, + ))) + } else { + AMresult::error("Invalid uint8_t*").into() + } } /// \memberof AMactorId /// \brief Allocates a new actor identifier and initializes it from a -/// hexadecimal string. +/// hexadecimal UTF-8 string view value. /// -/// \param[in] hex_str A UTF-8 string view as an `AMbyteSpan` struct. -/// \return A pointer to an `AMresult` struct containing a pointer to an -/// `AMactorId` struct. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \param[in] value A UTF-8 string view as an `AMbyteSpan` struct. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_ACTOR_ID` item. +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// hex_str must be a valid pointer to an AMbyteSpan #[no_mangle] -pub unsafe extern "C" fn AMactorIdInitStr(hex_str: AMbyteSpan) -> *mut AMresult { +pub unsafe extern "C" fn AMactorIdFromStr(value: AMbyteSpan) -> *mut AMresult { use am::AutomergeError::InvalidActorId; - to_result(match (&hex_str).try_into() { + to_result(match (&value).try_into() { Ok(s) => match am::ActorId::from_str(s) { Ok(actor_id) => Ok(actor_id), Err(_) => Err(InvalidActorId(String::from(s))), @@ -169,11 +174,12 @@ pub unsafe extern "C" fn AMactorIdInitStr(hex_str: AMbyteSpan) -> *mut AMresult } /// \memberof AMactorId -/// \brief Gets the value of an actor identifier as a hexadecimal string. +/// \brief Gets the value of an actor identifier as a UTF-8 hexadecimal string +/// view. /// /// \param[in] actor_id A pointer to an `AMactorId` struct. -/// \pre \p actor_id `!= NULL`. /// \return A UTF-8 string view as an `AMbyteSpan` struct. +/// \pre \p actor_id `!= NULL` /// \internal /// /// # Safety diff --git a/rust/automerge-c/src/byte_span.rs b/rust/automerge-c/src/byte_span.rs index fd4c3ca0..5855cfc7 100644 --- a/rust/automerge-c/src/byte_span.rs +++ b/rust/automerge-c/src/byte_span.rs @@ -1,14 +1,17 @@ use automerge as am; -use libc::strlen; +use std::cmp::Ordering; use std::convert::TryFrom; use std::os::raw::c_char; +use libc::{c_int, strlen}; +use smol_str::SmolStr; + macro_rules! to_str { - ($span:expr) => {{ - let result: Result<&str, am::AutomergeError> = (&$span).try_into(); + ($byte_span:expr) => {{ + let result: Result<&str, am::AutomergeError> = (&$byte_span).try_into(); match result { Ok(s) => s, - Err(e) => return AMresult::err(&e.to_string()).into(), + Err(e) => return AMresult::error(&e.to_string()).into(), } }}; } @@ -17,16 +20,17 @@ pub(crate) use to_str; /// \struct AMbyteSpan /// \installed_headerfile -/// \brief A view onto a contiguous sequence of bytes. +/// \brief A view onto an array of bytes. #[repr(C)] pub struct AMbyteSpan { - /// A pointer to an array of bytes. - /// \attention NEVER CALL `free()` ON \p src! - /// \warning \p src is only valid until the `AMfree()` function is called - /// on the `AMresult` struct that stores the array of bytes to - /// which it points. + /// A pointer to the first byte of an array of bytes. + /// \warning \p src is only valid until the array of bytes to which it + /// points is freed. + /// \note If the `AMbyteSpan` came from within an `AMitem` struct then + /// \p src will be freed when the pointer to the `AMresult` struct + /// containing the `AMitem` struct is passed to `AMresultFree()`. pub src: *const u8, - /// The number of bytes in the array. + /// The count of bytes in the array. pub count: usize, } @@ -52,9 +56,7 @@ impl PartialEq for AMbyteSpan { } else if self.src == other.src { return true; } - let slice = unsafe { std::slice::from_raw_parts(self.src, self.count) }; - let other_slice = unsafe { std::slice::from_raw_parts(other.src, other.count) }; - slice == other_slice + <&[u8]>::from(self) == <&[u8]>::from(other) } } @@ -72,10 +74,15 @@ impl From<&am::ActorId> for AMbyteSpan { impl From<&mut am::ActorId> for AMbyteSpan { fn from(actor: &mut am::ActorId) -> Self { - let slice = actor.to_bytes(); + actor.as_ref().into() + } +} + +impl From<&am::ChangeHash> for AMbyteSpan { + fn from(change_hash: &am::ChangeHash) -> Self { Self { - src: slice.as_ptr(), - count: slice.len(), + src: change_hash.0.as_ptr(), + count: change_hash.0.len(), } } } @@ -93,12 +100,9 @@ impl From<*const c_char> for AMbyteSpan { } } -impl From<&am::ChangeHash> for AMbyteSpan { - fn from(change_hash: &am::ChangeHash) -> Self { - Self { - src: change_hash.0.as_ptr(), - count: change_hash.0.len(), - } +impl From<&SmolStr> for AMbyteSpan { + fn from(smol_str: &SmolStr) -> Self { + smol_str.as_bytes().into() } } @@ -111,13 +115,39 @@ impl From<&[u8]> for AMbyteSpan { } } +impl From<&AMbyteSpan> for &[u8] { + fn from(byte_span: &AMbyteSpan) -> Self { + unsafe { std::slice::from_raw_parts(byte_span.src, byte_span.count) } + } +} + +impl From<&AMbyteSpan> for Vec { + fn from(byte_span: &AMbyteSpan) -> Self { + <&[u8]>::from(byte_span).to_vec() + } +} + +impl TryFrom<&AMbyteSpan> for am::ChangeHash { + type Error = am::AutomergeError; + + fn try_from(byte_span: &AMbyteSpan) -> Result { + use am::AutomergeError::InvalidChangeHashBytes; + + let slice: &[u8] = byte_span.into(); + match slice.try_into() { + Ok(change_hash) => Ok(change_hash), + Err(e) => Err(InvalidChangeHashBytes(e)), + } + } +} + impl TryFrom<&AMbyteSpan> for &str { type Error = am::AutomergeError; - fn try_from(span: &AMbyteSpan) -> Result { + fn try_from(byte_span: &AMbyteSpan) -> Result { use am::AutomergeError::InvalidCharacter; - let slice = unsafe { std::slice::from_raw_parts(span.src, span.count) }; + let slice = byte_span.into(); match std::str::from_utf8(slice) { Ok(str_) => Ok(str_), Err(e) => Err(InvalidCharacter(e.valid_up_to())), @@ -125,17 +155,69 @@ impl TryFrom<&AMbyteSpan> for &str { } } -/// \brief Creates an AMbyteSpan from a pointer + length +/// \memberof AMbyteSpan +/// \brief Creates a view onto an array of bytes. /// -/// \param[in] src A pointer to a span of bytes -/// \param[in] count The number of bytes in the span -/// \return An `AMbyteSpan` struct +/// \param[in] src A pointer to an array of bytes or `NULL`. +/// \param[in] count The count of bytes to view from the array pointed to by +/// \p src. +/// \return An `AMbyteSpan` struct. +/// \pre \p count `<= sizeof(`\p src `)` +/// \post `(`\p src `== NULL) -> (AMbyteSpan){NULL, 0}` /// \internal /// /// #Safety -/// AMbytes does not retain the underlying storage, so you must discard the -/// return value before freeing the bytes. +/// src must be a byte array of length `>= count` or `std::ptr::null()` #[no_mangle] pub unsafe extern "C" fn AMbytes(src: *const u8, count: usize) -> AMbyteSpan { - AMbyteSpan { src, count } + AMbyteSpan { + src, + count: if src.is_null() { 0 } else { count }, + } +} + +/// \memberof AMbyteSpan +/// \brief Creates a view onto a C string. +/// +/// \param[in] c_str A null-terminated byte string or `NULL`. +/// \return An `AMbyteSpan` struct. +/// \pre Each byte in \p c_str encodes one UTF-8 character. +/// \internal +/// +/// #Safety +/// c_str must be a null-terminated array of `std::os::raw::c_char` or `std::ptr::null()`. +#[no_mangle] +pub unsafe extern "C" fn AMstr(c_str: *const c_char) -> AMbyteSpan { + c_str.into() +} + +/// \memberof AMbyteSpan +/// \brief Compares two UTF-8 string views lexicographically. +/// +/// \param[in] lhs A UTF-8 string view as an `AMbyteSpan` struct. +/// \param[in] rhs A UTF-8 string view as an `AMbyteSpan` struct. +/// \return Negative value if \p lhs appears before \p rhs in lexicographical order. +/// Zero if \p lhs and \p rhs compare equal. +/// Positive value if \p lhs appears after \p rhs in lexicographical order. +/// \pre \p lhs.src `!= NULL` +/// \pre \p lhs.count `<= sizeof(`\p lhs.src `)` +/// \pre \p rhs.src `!= NULL` +/// \pre \p rhs.count `<= sizeof(`\p rhs.src `)` +/// \internal +/// +/// #Safety +/// lhs.src must be a byte array of length >= lhs.count +/// rhs.src must be a a byte array of length >= rhs.count +#[no_mangle] +pub unsafe extern "C" fn AMstrCmp(lhs: AMbyteSpan, rhs: AMbyteSpan) -> c_int { + match (<&str>::try_from(&lhs), <&str>::try_from(&rhs)) { + (Ok(lhs), Ok(rhs)) => match lhs.cmp(rhs) { + Ordering::Less => -1, + Ordering::Equal => 0, + Ordering::Greater => 1, + }, + (Err(_), Ok(_)) => -1, + (Err(_), Err(_)) => 0, + (Ok(_), Err(_)) => 1, + } } diff --git a/rust/automerge-c/src/change.rs b/rust/automerge-c/src/change.rs index d64a2635..8529ed94 100644 --- a/rust/automerge-c/src/change.rs +++ b/rust/automerge-c/src/change.rs @@ -2,7 +2,6 @@ use automerge as am; use std::cell::RefCell; use crate::byte_span::AMbyteSpan; -use crate::change_hashes::AMchangeHashes; use crate::result::{to_result, AMresult}; macro_rules! to_change { @@ -10,7 +9,7 @@ macro_rules! to_change { let handle = $handle.as_ref(); match handle { Some(b) => b, - None => return AMresult::err("Invalid AMchange pointer").into(), + None => return AMresult::error("Invalid `AMchange*`").into(), } }}; } @@ -21,14 +20,14 @@ macro_rules! to_change { #[derive(Eq, PartialEq)] pub struct AMchange { body: *mut am::Change, - changehash: RefCell>, + change_hash: RefCell>, } impl AMchange { pub fn new(change: &mut am::Change) -> Self { Self { body: change, - changehash: Default::default(), + change_hash: Default::default(), } } @@ -40,12 +39,12 @@ impl AMchange { } pub fn hash(&self) -> AMbyteSpan { - let mut changehash = self.changehash.borrow_mut(); - if let Some(changehash) = changehash.as_ref() { - changehash.into() + let mut change_hash = self.change_hash.borrow_mut(); + if let Some(change_hash) = change_hash.as_ref() { + change_hash.into() } else { let hash = unsafe { (*self.body).hash() }; - let ptr = changehash.insert(hash); + let ptr = change_hash.insert(hash); AMbyteSpan { src: ptr.0.as_ptr(), count: hash.as_ref().len(), @@ -70,11 +69,10 @@ impl AsRef for AMchange { /// \brief Gets the first referenced actor identifier in a change. /// /// \param[in] change A pointer to an `AMchange` struct. -/// \pre \p change `!= NULL`. -/// \return A pointer to an `AMresult` struct containing a pointer to an -/// `AMactorId` struct. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_ACTOR_ID` item. +/// \pre \p change `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety @@ -90,8 +88,8 @@ pub unsafe extern "C" fn AMchangeActorId(change: *const AMchange) -> *mut AMresu /// \memberof AMchange /// \brief Compresses the raw bytes of a change. /// -/// \param[in,out] change A pointer to an `AMchange` struct. -/// \pre \p change `!= NULL`. +/// \param[in] change A pointer to an `AMchange` struct. +/// \pre \p change `!= NULL` /// \internal /// /// # Safety @@ -107,18 +105,20 @@ pub unsafe extern "C" fn AMchangeCompress(change: *mut AMchange) { /// \brief Gets the dependencies of a change. /// /// \param[in] change A pointer to an `AMchange` struct. -/// \return A pointer to an `AMchangeHashes` struct or `NULL`. -/// \pre \p change `!= NULL`. +/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_CHANGE_HASH` items. +/// \pre \p change `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// change must be a valid pointer to an AMchange #[no_mangle] -pub unsafe extern "C" fn AMchangeDeps(change: *const AMchange) -> AMchangeHashes { - match change.as_ref() { - Some(change) => AMchangeHashes::new(change.as_ref().deps()), +pub unsafe extern "C" fn AMchangeDeps(change: *const AMchange) -> *mut AMresult { + to_result(match change.as_ref() { + Some(change) => change.as_ref().deps(), None => Default::default(), - } + }) } /// \memberof AMchange @@ -126,7 +126,7 @@ pub unsafe extern "C" fn AMchangeDeps(change: *const AMchange) -> AMchangeHashes /// /// \param[in] change A pointer to an `AMchange` struct. /// \return An `AMbyteSpan` struct. -/// \pre \p change `!= NULL`. +/// \pre \p change `!= NULL` /// \internal /// /// # Safety @@ -141,32 +141,33 @@ pub unsafe extern "C" fn AMchangeExtraBytes(change: *const AMchange) -> AMbyteSp } /// \memberof AMchange -/// \brief Loads a sequence of bytes into a change. +/// \brief Allocates a new change and initializes it from an array of bytes value. /// /// \param[in] src A pointer to an array of bytes. -/// \param[in] count The number of bytes in \p src to load. -/// \return A pointer to an `AMresult` struct containing an `AMchange` struct. -/// \pre \p src `!= NULL`. -/// \pre `0 <` \p count `<= sizeof(`\p src`)`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \param[in] count The count of bytes to load from the array pointed to by +/// \p src. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_CHANGE` item. +/// \pre \p src `!= NULL` +/// \pre `sizeof(`\p src `) > 0` +/// \pre \p count `<= sizeof(`\p src `)` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety -/// src must be a byte array of size `>= count` +/// src must be a byte array of length `>= count` #[no_mangle] pub unsafe extern "C" fn AMchangeFromBytes(src: *const u8, count: usize) -> *mut AMresult { - let mut data = Vec::new(); - data.extend_from_slice(std::slice::from_raw_parts(src, count)); - to_result(am::Change::from_bytes(data)) + let data = std::slice::from_raw_parts(src, count); + to_result(am::Change::from_bytes(data.to_vec())) } /// \memberof AMchange /// \brief Gets the hash of a change. /// /// \param[in] change A pointer to an `AMchange` struct. -/// \return A change hash as an `AMbyteSpan` struct. -/// \pre \p change `!= NULL`. +/// \return An `AMbyteSpan` struct for a change hash. +/// \pre \p change `!= NULL` /// \internal /// /// # Safety @@ -183,8 +184,8 @@ pub unsafe extern "C" fn AMchangeHash(change: *const AMchange) -> AMbyteSpan { /// \brief Tests the emptiness of a change. /// /// \param[in] change A pointer to an `AMchange` struct. -/// \return A boolean. -/// \pre \p change `!= NULL`. +/// \return `true` if \p change is empty, `false` otherwise. +/// \pre \p change `!= NULL` /// \internal /// /// # Safety @@ -198,12 +199,37 @@ pub unsafe extern "C" fn AMchangeIsEmpty(change: *const AMchange) -> bool { } } +/// \memberof AMchange +/// \brief Loads a document into a sequence of changes. +/// +/// \param[in] src A pointer to an array of bytes. +/// \param[in] count The count of bytes to load from the array pointed to by +/// \p src. +/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_CHANGE` items. +/// \pre \p src `!= NULL` +/// \pre `sizeof(`\p src `) > 0` +/// \pre \p count `<= sizeof(`\p src `)` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// src must be a byte array of length `>= count` +#[no_mangle] +pub unsafe extern "C" fn AMchangeLoadDocument(src: *const u8, count: usize) -> *mut AMresult { + let data = std::slice::from_raw_parts(src, count); + to_result::, _>>( + am::Automerge::load(data) + .and_then(|d| d.get_changes(&[]).map(|c| c.into_iter().cloned().collect())), + ) +} + /// \memberof AMchange /// \brief Gets the maximum operation index of a change. /// /// \param[in] change A pointer to an `AMchange` struct. /// \return A 64-bit unsigned integer. -/// \pre \p change `!= NULL`. +/// \pre \p change `!= NULL` /// \internal /// /// # Safety @@ -221,8 +247,8 @@ pub unsafe extern "C" fn AMchangeMaxOp(change: *const AMchange) -> u64 { /// \brief Gets the message of a change. /// /// \param[in] change A pointer to an `AMchange` struct. -/// \return A UTF-8 string view as an `AMbyteSpan` struct. -/// \pre \p change `!= NULL`. +/// \return An `AMbyteSpan` struct for a UTF-8 string. +/// \pre \p change `!= NULL` /// \internal /// /// # Safety @@ -240,7 +266,7 @@ pub unsafe extern "C" fn AMchangeMessage(change: *const AMchange) -> AMbyteSpan /// /// \param[in] change A pointer to an `AMchange` struct. /// \return A 64-bit unsigned integer. -/// \pre \p change `!= NULL`. +/// \pre \p change `!= NULL` /// \internal /// /// # Safety @@ -259,7 +285,7 @@ pub unsafe extern "C" fn AMchangeSeq(change: *const AMchange) -> u64 { /// /// \param[in] change A pointer to an `AMchange` struct. /// \return A 64-bit unsigned integer. -/// \pre \p change `!= NULL`. +/// \pre \p change `!= NULL` /// \internal /// /// # Safety @@ -267,10 +293,9 @@ pub unsafe extern "C" fn AMchangeSeq(change: *const AMchange) -> u64 { #[no_mangle] pub unsafe extern "C" fn AMchangeSize(change: *const AMchange) -> usize { if let Some(change) = change.as_ref() { - change.as_ref().len() - } else { - 0 + return change.as_ref().len(); } + 0 } /// \memberof AMchange @@ -278,7 +303,7 @@ pub unsafe extern "C" fn AMchangeSize(change: *const AMchange) -> usize { /// /// \param[in] change A pointer to an `AMchange` struct. /// \return A 64-bit unsigned integer. -/// \pre \p change `!= NULL`. +/// \pre \p change `!= NULL` /// \internal /// /// # Safety @@ -297,7 +322,7 @@ pub unsafe extern "C" fn AMchangeStartOp(change: *const AMchange) -> u64 { /// /// \param[in] change A pointer to an `AMchange` struct. /// \return A 64-bit signed integer. -/// \pre \p change `!= NULL`. +/// \pre \p change `!= NULL` /// \internal /// /// # Safety @@ -315,8 +340,8 @@ pub unsafe extern "C" fn AMchangeTime(change: *const AMchange) -> i64 { /// \brief Gets the raw bytes of a change. /// /// \param[in] change A pointer to an `AMchange` struct. -/// \return An `AMbyteSpan` struct. -/// \pre \p change `!= NULL`. +/// \return An `AMbyteSpan` struct for an array of bytes. +/// \pre \p change `!= NULL` /// \internal /// /// # Safety @@ -329,28 +354,3 @@ pub unsafe extern "C" fn AMchangeRawBytes(change: *const AMchange) -> AMbyteSpan Default::default() } } - -/// \memberof AMchange -/// \brief Loads a document into a sequence of changes. -/// -/// \param[in] src A pointer to an array of bytes. -/// \param[in] count The number of bytes in \p src to load. -/// \return A pointer to an `AMresult` struct containing a sequence of -/// `AMchange` structs. -/// \pre \p src `!= NULL`. -/// \pre `0 <` \p count `<= sizeof(`\p src`)`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// -/// # Safety -/// src must be a byte array of size `>= count` -#[no_mangle] -pub unsafe extern "C" fn AMchangeLoadDocument(src: *const u8, count: usize) -> *mut AMresult { - let mut data = Vec::new(); - data.extend_from_slice(std::slice::from_raw_parts(src, count)); - to_result::, _>>( - am::Automerge::load(&data) - .and_then(|d| d.get_changes(&[]).map(|c| c.into_iter().cloned().collect())), - ) -} diff --git a/rust/automerge-c/src/change_hashes.rs b/rust/automerge-c/src/change_hashes.rs deleted file mode 100644 index 029612e9..00000000 --- a/rust/automerge-c/src/change_hashes.rs +++ /dev/null @@ -1,400 +0,0 @@ -use automerge as am; -use std::cmp::Ordering; -use std::ffi::c_void; -use std::mem::size_of; - -use crate::byte_span::AMbyteSpan; -use crate::result::{to_result, AMresult}; - -#[repr(C)] -struct Detail { - len: usize, - offset: isize, - ptr: *const c_void, -} - -/// \note cbindgen won't propagate the value of a `std::mem::size_of()` call -/// (https://github.com/eqrion/cbindgen/issues/252) but it will -/// propagate the name of a constant initialized from it so if the -/// constant's name is a symbolic representation of the value it can be -/// converted into a number by post-processing the header it generated. -pub const USIZE_USIZE_USIZE_: usize = size_of::(); - -impl Detail { - fn new(change_hashes: &[am::ChangeHash], offset: isize) -> Self { - Self { - len: change_hashes.len(), - offset, - ptr: change_hashes.as_ptr() as *const c_void, - } - } - - pub fn advance(&mut self, n: isize) { - if n == 0 { - return; - } - let len = self.len as isize; - self.offset = if self.offset < 0 { - // It's reversed. - let unclipped = self.offset.checked_sub(n).unwrap_or(isize::MIN); - if unclipped >= 0 { - // Clip it to the forward stop. - len - } else { - std::cmp::min(std::cmp::max(-(len + 1), unclipped), -1) - } - } else { - let unclipped = self.offset.checked_add(n).unwrap_or(isize::MAX); - if unclipped < 0 { - // Clip it to the reverse stop. - -(len + 1) - } else { - std::cmp::max(0, std::cmp::min(unclipped, len)) - } - } - } - - pub fn get_index(&self) -> usize { - (self.offset - + if self.offset < 0 { - self.len as isize - } else { - 0 - }) as usize - } - - pub fn next(&mut self, n: isize) -> Option<&am::ChangeHash> { - if self.is_stopped() { - return None; - } - let slice: &[am::ChangeHash] = - unsafe { std::slice::from_raw_parts(self.ptr as *const am::ChangeHash, self.len) }; - let value = &slice[self.get_index()]; - self.advance(n); - Some(value) - } - - pub fn is_stopped(&self) -> bool { - let len = self.len as isize; - self.offset < -len || self.offset == len - } - - pub fn prev(&mut self, n: isize) -> Option<&am::ChangeHash> { - self.advance(-n); - if self.is_stopped() { - return None; - } - let slice: &[am::ChangeHash] = - unsafe { std::slice::from_raw_parts(self.ptr as *const am::ChangeHash, self.len) }; - Some(&slice[self.get_index()]) - } - - pub fn reversed(&self) -> Self { - Self { - len: self.len, - offset: -(self.offset + 1), - ptr: self.ptr, - } - } - - pub fn rewound(&self) -> Self { - Self { - len: self.len, - offset: if self.offset < 0 { -1 } else { 0 }, - ptr: self.ptr, - } - } -} - -impl From for [u8; USIZE_USIZE_USIZE_] { - fn from(detail: Detail) -> Self { - unsafe { - std::slice::from_raw_parts((&detail as *const Detail) as *const u8, USIZE_USIZE_USIZE_) - .try_into() - .unwrap() - } - } -} - -/// \struct AMchangeHashes -/// \installed_headerfile -/// \brief A random-access iterator over a sequence of change hashes. -#[repr(C)] -#[derive(Eq, PartialEq)] -pub struct AMchangeHashes { - /// An implementation detail that is intentionally opaque. - /// \warning Modifying \p detail will cause undefined behavior. - /// \note The actual size of \p detail will vary by platform, this is just - /// the one for the platform this documentation was built on. - detail: [u8; USIZE_USIZE_USIZE_], -} - -impl AMchangeHashes { - pub fn new(change_hashes: &[am::ChangeHash]) -> Self { - Self { - detail: Detail::new(change_hashes, 0).into(), - } - } - - pub fn advance(&mut self, n: isize) { - let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - detail.advance(n); - } - - pub fn len(&self) -> usize { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - detail.len - } - - pub fn next(&mut self, n: isize) -> Option<&am::ChangeHash> { - let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - detail.next(n) - } - - pub fn prev(&mut self, n: isize) -> Option<&am::ChangeHash> { - let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - detail.prev(n) - } - - pub fn reversed(&self) -> Self { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - Self { - detail: detail.reversed().into(), - } - } - - pub fn rewound(&self) -> Self { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - Self { - detail: detail.rewound().into(), - } - } -} - -impl AsRef<[am::ChangeHash]> for AMchangeHashes { - fn as_ref(&self) -> &[am::ChangeHash] { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - unsafe { std::slice::from_raw_parts(detail.ptr as *const am::ChangeHash, detail.len) } - } -} - -impl Default for AMchangeHashes { - fn default() -> Self { - Self { - detail: [0; USIZE_USIZE_USIZE_], - } - } -} - -/// \memberof AMchangeHashes -/// \brief Advances an iterator over a sequence of change hashes by at most -/// \p |n| positions where the sign of \p n is relative to the -/// iterator's direction. -/// -/// \param[in,out] change_hashes A pointer to an `AMchangeHashes` struct. -/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum -/// number of positions to advance. -/// \pre \p change_hashes `!= NULL`. -/// \internal -/// -/// #Safety -/// change_hashes must be a valid pointer to an AMchangeHashes -#[no_mangle] -pub unsafe extern "C" fn AMchangeHashesAdvance(change_hashes: *mut AMchangeHashes, n: isize) { - if let Some(change_hashes) = change_hashes.as_mut() { - change_hashes.advance(n); - }; -} - -/// \memberof AMchangeHashes -/// \brief Compares the sequences of change hashes underlying a pair of -/// iterators. -/// -/// \param[in] change_hashes1 A pointer to an `AMchangeHashes` struct. -/// \param[in] change_hashes2 A pointer to an `AMchangeHashes` struct. -/// \return `-1` if \p change_hashes1 `<` \p change_hashes2, `0` if -/// \p change_hashes1 `==` \p change_hashes2 and `1` if -/// \p change_hashes1 `>` \p change_hashes2. -/// \pre \p change_hashes1 `!= NULL`. -/// \pre \p change_hashes2 `!= NULL`. -/// \internal -/// -/// #Safety -/// change_hashes1 must be a valid pointer to an AMchangeHashes -/// change_hashes2 must be a valid pointer to an AMchangeHashes -#[no_mangle] -pub unsafe extern "C" fn AMchangeHashesCmp( - change_hashes1: *const AMchangeHashes, - change_hashes2: *const AMchangeHashes, -) -> isize { - match (change_hashes1.as_ref(), change_hashes2.as_ref()) { - (Some(change_hashes1), Some(change_hashes2)) => { - match change_hashes1.as_ref().cmp(change_hashes2.as_ref()) { - Ordering::Less => -1, - Ordering::Equal => 0, - Ordering::Greater => 1, - } - } - (None, Some(_)) => -1, - (Some(_), None) => 1, - (None, None) => 0, - } -} - -/// \memberof AMchangeHashes -/// \brief Allocates an iterator over a sequence of change hashes and -/// initializes it from a sequence of byte spans. -/// -/// \param[in] src A pointer to an array of `AMbyteSpan` structs. -/// \param[in] count The number of `AMbyteSpan` structs to copy from \p src. -/// \return A pointer to an `AMresult` struct containing an `AMchangeHashes` -/// struct. -/// \pre \p src `!= NULL`. -/// \pre `0 <` \p count `<= sizeof(`\p src`) / sizeof(AMbyteSpan)`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// -/// # Safety -/// src must be an AMbyteSpan array of size `>= count` -#[no_mangle] -pub unsafe extern "C" fn AMchangeHashesInit(src: *const AMbyteSpan, count: usize) -> *mut AMresult { - let mut change_hashes = Vec::::new(); - for n in 0..count { - let byte_span = &*src.add(n); - let slice = std::slice::from_raw_parts(byte_span.src, byte_span.count); - match slice.try_into() { - Ok(change_hash) => { - change_hashes.push(change_hash); - } - Err(e) => { - return to_result(Err(e)); - } - } - } - to_result(Ok::, am::InvalidChangeHashSlice>( - change_hashes, - )) -} - -/// \memberof AMchangeHashes -/// \brief Gets the change hash at the current position of an iterator over a -/// sequence of change hashes and then advances it by at most \p |n| -/// positions where the sign of \p n is relative to the iterator's -/// direction. -/// -/// \param[in,out] change_hashes A pointer to an `AMchangeHashes` struct. -/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum -/// number of positions to advance. -/// \return An `AMbyteSpan` struct with `.src == NULL` when \p change_hashes -/// was previously advanced past its forward/reverse limit. -/// \pre \p change_hashes `!= NULL`. -/// \internal -/// -/// #Safety -/// change_hashes must be a valid pointer to an AMchangeHashes -#[no_mangle] -pub unsafe extern "C" fn AMchangeHashesNext( - change_hashes: *mut AMchangeHashes, - n: isize, -) -> AMbyteSpan { - if let Some(change_hashes) = change_hashes.as_mut() { - if let Some(change_hash) = change_hashes.next(n) { - return change_hash.into(); - } - } - Default::default() -} - -/// \memberof AMchangeHashes -/// \brief Advances an iterator over a sequence of change hashes by at most -/// \p |n| positions where the sign of \p n is relative to the -/// iterator's direction and then gets the change hash at its new -/// position. -/// -/// \param[in,out] change_hashes A pointer to an `AMchangeHashes` struct. -/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum -/// number of positions to advance. -/// \return An `AMbyteSpan` struct with `.src == NULL` when \p change_hashes is -/// presently advanced past its forward/reverse limit. -/// \pre \p change_hashes `!= NULL`. -/// \internal -/// -/// #Safety -/// change_hashes must be a valid pointer to an AMchangeHashes -#[no_mangle] -pub unsafe extern "C" fn AMchangeHashesPrev( - change_hashes: *mut AMchangeHashes, - n: isize, -) -> AMbyteSpan { - if let Some(change_hashes) = change_hashes.as_mut() { - if let Some(change_hash) = change_hashes.prev(n) { - return change_hash.into(); - } - } - Default::default() -} - -/// \memberof AMchangeHashes -/// \brief Gets the size of the sequence of change hashes underlying an -/// iterator. -/// -/// \param[in] change_hashes A pointer to an `AMchangeHashes` struct. -/// \return The count of values in \p change_hashes. -/// \pre \p change_hashes `!= NULL`. -/// \internal -/// -/// #Safety -/// change_hashes must be a valid pointer to an AMchangeHashes -#[no_mangle] -pub unsafe extern "C" fn AMchangeHashesSize(change_hashes: *const AMchangeHashes) -> usize { - if let Some(change_hashes) = change_hashes.as_ref() { - change_hashes.len() - } else { - 0 - } -} - -/// \memberof AMchangeHashes -/// \brief Creates an iterator over the same sequence of change hashes as the -/// given one but with the opposite position and direction. -/// -/// \param[in] change_hashes A pointer to an `AMchangeHashes` struct. -/// \return An `AMchangeHashes` struct -/// \pre \p change_hashes `!= NULL`. -/// \internal -/// -/// #Safety -/// change_hashes must be a valid pointer to an AMchangeHashes -#[no_mangle] -pub unsafe extern "C" fn AMchangeHashesReversed( - change_hashes: *const AMchangeHashes, -) -> AMchangeHashes { - if let Some(change_hashes) = change_hashes.as_ref() { - change_hashes.reversed() - } else { - Default::default() - } -} - -/// \memberof AMchangeHashes -/// \brief Creates an iterator at the starting position over the same sequence -/// of change hashes as the given one. -/// -/// \param[in] change_hashes A pointer to an `AMchangeHashes` struct. -/// \return An `AMchangeHashes` struct -/// \pre \p change_hashes `!= NULL`. -/// \internal -/// -/// #Safety -/// change_hashes must be a valid pointer to an AMchangeHashes -#[no_mangle] -pub unsafe extern "C" fn AMchangeHashesRewound( - change_hashes: *const AMchangeHashes, -) -> AMchangeHashes { - if let Some(change_hashes) = change_hashes.as_ref() { - change_hashes.rewound() - } else { - Default::default() - } -} diff --git a/rust/automerge-c/src/changes.rs b/rust/automerge-c/src/changes.rs deleted file mode 100644 index 1bff35c8..00000000 --- a/rust/automerge-c/src/changes.rs +++ /dev/null @@ -1,399 +0,0 @@ -use automerge as am; -use std::collections::BTreeMap; -use std::ffi::c_void; -use std::mem::size_of; - -use crate::byte_span::AMbyteSpan; -use crate::change::AMchange; -use crate::result::{to_result, AMresult}; - -#[repr(C)] -struct Detail { - len: usize, - offset: isize, - ptr: *const c_void, - storage: *mut c_void, -} - -/// \note cbindgen won't propagate the value of a `std::mem::size_of()` call -/// (https://github.com/eqrion/cbindgen/issues/252) but it will -/// propagate the name of a constant initialized from it so if the -/// constant's name is a symbolic representation of the value it can be -/// converted into a number by post-processing the header it generated. -pub const USIZE_USIZE_USIZE_USIZE_: usize = size_of::(); - -impl Detail { - fn new(changes: &[am::Change], offset: isize, storage: &mut BTreeMap) -> Self { - let storage: *mut BTreeMap = storage; - Self { - len: changes.len(), - offset, - ptr: changes.as_ptr() as *const c_void, - storage: storage as *mut c_void, - } - } - - pub fn advance(&mut self, n: isize) { - if n == 0 { - return; - } - let len = self.len as isize; - self.offset = if self.offset < 0 { - // It's reversed. - let unclipped = self.offset.checked_sub(n).unwrap_or(isize::MIN); - if unclipped >= 0 { - // Clip it to the forward stop. - len - } else { - std::cmp::min(std::cmp::max(-(len + 1), unclipped), -1) - } - } else { - let unclipped = self.offset.checked_add(n).unwrap_or(isize::MAX); - if unclipped < 0 { - // Clip it to the reverse stop. - -(len + 1) - } else { - std::cmp::max(0, std::cmp::min(unclipped, len)) - } - } - } - - pub fn get_index(&self) -> usize { - (self.offset - + if self.offset < 0 { - self.len as isize - } else { - 0 - }) as usize - } - - pub fn next(&mut self, n: isize) -> Option<*const AMchange> { - if self.is_stopped() { - return None; - } - let slice: &mut [am::Change] = - unsafe { std::slice::from_raw_parts_mut(self.ptr as *mut am::Change, self.len) }; - let storage = unsafe { &mut *(self.storage as *mut BTreeMap) }; - let index = self.get_index(); - let value = match storage.get_mut(&index) { - Some(value) => value, - None => { - storage.insert(index, AMchange::new(&mut slice[index])); - storage.get_mut(&index).unwrap() - } - }; - self.advance(n); - Some(value) - } - - pub fn is_stopped(&self) -> bool { - let len = self.len as isize; - self.offset < -len || self.offset == len - } - - pub fn prev(&mut self, n: isize) -> Option<*const AMchange> { - self.advance(-n); - if self.is_stopped() { - return None; - } - let slice: &mut [am::Change] = - unsafe { std::slice::from_raw_parts_mut(self.ptr as *mut am::Change, self.len) }; - let storage = unsafe { &mut *(self.storage as *mut BTreeMap) }; - let index = self.get_index(); - Some(match storage.get_mut(&index) { - Some(value) => value, - None => { - storage.insert(index, AMchange::new(&mut slice[index])); - storage.get_mut(&index).unwrap() - } - }) - } - - pub fn reversed(&self) -> Self { - Self { - len: self.len, - offset: -(self.offset + 1), - ptr: self.ptr, - storage: self.storage, - } - } - - pub fn rewound(&self) -> Self { - Self { - len: self.len, - offset: if self.offset < 0 { -1 } else { 0 }, - ptr: self.ptr, - storage: self.storage, - } - } -} - -impl From for [u8; USIZE_USIZE_USIZE_USIZE_] { - fn from(detail: Detail) -> Self { - unsafe { - std::slice::from_raw_parts( - (&detail as *const Detail) as *const u8, - USIZE_USIZE_USIZE_USIZE_, - ) - .try_into() - .unwrap() - } - } -} - -/// \struct AMchanges -/// \installed_headerfile -/// \brief A random-access iterator over a sequence of changes. -#[repr(C)] -#[derive(Eq, PartialEq)] -pub struct AMchanges { - /// An implementation detail that is intentionally opaque. - /// \warning Modifying \p detail will cause undefined behavior. - /// \note The actual size of \p detail will vary by platform, this is just - /// the one for the platform this documentation was built on. - detail: [u8; USIZE_USIZE_USIZE_USIZE_], -} - -impl AMchanges { - pub fn new(changes: &[am::Change], storage: &mut BTreeMap) -> Self { - Self { - detail: Detail::new(changes, 0, &mut *storage).into(), - } - } - - pub fn advance(&mut self, n: isize) { - let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - detail.advance(n); - } - - pub fn len(&self) -> usize { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - detail.len - } - - pub fn next(&mut self, n: isize) -> Option<*const AMchange> { - let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - detail.next(n) - } - - pub fn prev(&mut self, n: isize) -> Option<*const AMchange> { - let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - detail.prev(n) - } - - pub fn reversed(&self) -> Self { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - Self { - detail: detail.reversed().into(), - } - } - - pub fn rewound(&self) -> Self { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - Self { - detail: detail.rewound().into(), - } - } -} - -impl AsRef<[am::Change]> for AMchanges { - fn as_ref(&self) -> &[am::Change] { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - unsafe { std::slice::from_raw_parts(detail.ptr as *const am::Change, detail.len) } - } -} - -impl Default for AMchanges { - fn default() -> Self { - Self { - detail: [0; USIZE_USIZE_USIZE_USIZE_], - } - } -} - -/// \memberof AMchanges -/// \brief Advances an iterator over a sequence of changes by at most \p |n| -/// positions where the sign of \p n is relative to the iterator's -/// direction. -/// -/// \param[in,out] changes A pointer to an `AMchanges` struct. -/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum -/// number of positions to advance. -/// \pre \p changes `!= NULL`. -/// \internal -/// -/// #Safety -/// changes must be a valid pointer to an AMchanges -#[no_mangle] -pub unsafe extern "C" fn AMchangesAdvance(changes: *mut AMchanges, n: isize) { - if let Some(changes) = changes.as_mut() { - changes.advance(n); - }; -} - -/// \memberof AMchanges -/// \brief Tests the equality of two sequences of changes underlying a pair of -/// iterators. -/// -/// \param[in] changes1 A pointer to an `AMchanges` struct. -/// \param[in] changes2 A pointer to an `AMchanges` struct. -/// \return `true` if \p changes1 `==` \p changes2 and `false` otherwise. -/// \pre \p changes1 `!= NULL`. -/// \pre \p changes2 `!= NULL`. -/// \internal -/// -/// #Safety -/// changes1 must be a valid pointer to an AMchanges -/// changes2 must be a valid pointer to an AMchanges -#[no_mangle] -pub unsafe extern "C" fn AMchangesEqual( - changes1: *const AMchanges, - changes2: *const AMchanges, -) -> bool { - match (changes1.as_ref(), changes2.as_ref()) { - (Some(changes1), Some(changes2)) => changes1.as_ref() == changes2.as_ref(), - (None, Some(_)) | (Some(_), None) | (None, None) => false, - } -} - -/// \memberof AMchanges -/// \brief Allocates an iterator over a sequence of changes and initializes it -/// from a sequence of byte spans. -/// -/// \param[in] src A pointer to an array of `AMbyteSpan` structs. -/// \param[in] count The number of `AMbyteSpan` structs to copy from \p src. -/// \return A pointer to an `AMresult` struct containing an `AMchanges` struct. -/// \pre \p src `!= NULL`. -/// \pre `0 <` \p count `<= sizeof(`\p src`) / sizeof(AMbyteSpan)`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// -/// # Safety -/// src must be an AMbyteSpan array of size `>= count` -#[no_mangle] -pub unsafe extern "C" fn AMchangesInit(src: *const AMbyteSpan, count: usize) -> *mut AMresult { - let mut changes = Vec::::new(); - for n in 0..count { - let byte_span = &*src.add(n); - let slice = std::slice::from_raw_parts(byte_span.src, byte_span.count); - match slice.try_into() { - Ok(change) => { - changes.push(change); - } - Err(e) => { - return to_result(Err::, am::LoadChangeError>(e)); - } - } - } - to_result(Ok::, am::LoadChangeError>(changes)) -} - -/// \memberof AMchanges -/// \brief Gets the change at the current position of an iterator over a -/// sequence of changes and then advances it by at most \p |n| positions -/// where the sign of \p n is relative to the iterator's direction. -/// -/// \param[in,out] changes A pointer to an `AMchanges` struct. -/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum -/// number of positions to advance. -/// \return A pointer to an `AMchange` struct that's `NULL` when \p changes was -/// previously advanced past its forward/reverse limit. -/// \pre \p changes `!= NULL`. -/// \internal -/// -/// #Safety -/// changes must be a valid pointer to an AMchanges -#[no_mangle] -pub unsafe extern "C" fn AMchangesNext(changes: *mut AMchanges, n: isize) -> *const AMchange { - if let Some(changes) = changes.as_mut() { - if let Some(change) = changes.next(n) { - return change; - } - } - std::ptr::null() -} - -/// \memberof AMchanges -/// \brief Advances an iterator over a sequence of changes by at most \p |n| -/// positions where the sign of \p n is relative to the iterator's -/// direction and then gets the change at its new position. -/// -/// \param[in,out] changes A pointer to an `AMchanges` struct. -/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum -/// number of positions to advance. -/// \return A pointer to an `AMchange` struct that's `NULL` when \p changes is -/// presently advanced past its forward/reverse limit. -/// \pre \p changes `!= NULL`. -/// \internal -/// -/// #Safety -/// changes must be a valid pointer to an AMchanges -#[no_mangle] -pub unsafe extern "C" fn AMchangesPrev(changes: *mut AMchanges, n: isize) -> *const AMchange { - if let Some(changes) = changes.as_mut() { - if let Some(change) = changes.prev(n) { - return change; - } - } - std::ptr::null() -} - -/// \memberof AMchanges -/// \brief Gets the size of the sequence of changes underlying an iterator. -/// -/// \param[in] changes A pointer to an `AMchanges` struct. -/// \return The count of values in \p changes. -/// \pre \p changes `!= NULL`. -/// \internal -/// -/// #Safety -/// changes must be a valid pointer to an AMchanges -#[no_mangle] -pub unsafe extern "C" fn AMchangesSize(changes: *const AMchanges) -> usize { - if let Some(changes) = changes.as_ref() { - changes.len() - } else { - 0 - } -} - -/// \memberof AMchanges -/// \brief Creates an iterator over the same sequence of changes as the given -/// one but with the opposite position and direction. -/// -/// \param[in] changes A pointer to an `AMchanges` struct. -/// \return An `AMchanges` struct. -/// \pre \p changes `!= NULL`. -/// \internal -/// -/// #Safety -/// changes must be a valid pointer to an AMchanges -#[no_mangle] -pub unsafe extern "C" fn AMchangesReversed(changes: *const AMchanges) -> AMchanges { - if let Some(changes) = changes.as_ref() { - changes.reversed() - } else { - Default::default() - } -} - -/// \memberof AMchanges -/// \brief Creates an iterator at the starting position over the same sequence -/// of changes as the given one. -/// -/// \param[in] changes A pointer to an `AMchanges` struct. -/// \return An `AMchanges` struct -/// \pre \p changes `!= NULL`. -/// \internal -/// -/// #Safety -/// changes must be a valid pointer to an AMchanges -#[no_mangle] -pub unsafe extern "C" fn AMchangesRewound(changes: *const AMchanges) -> AMchanges { - if let Some(changes) = changes.as_ref() { - changes.rewound() - } else { - Default::default() - } -} diff --git a/rust/automerge-c/src/doc.rs b/rust/automerge-c/src/doc.rs index f02c01bf..82f52bf7 100644 --- a/rust/automerge-c/src/doc.rs +++ b/rust/automerge-c/src/doc.rs @@ -6,43 +6,23 @@ use std::ops::{Deref, DerefMut}; use crate::actor_id::{to_actor_id, AMactorId}; use crate::byte_span::{to_str, AMbyteSpan}; -use crate::change_hashes::AMchangeHashes; +use crate::items::AMitems; use crate::obj::{to_obj_id, AMobjId, AMobjType}; -use crate::result::{to_result, AMresult, AMvalue}; +use crate::result::{to_result, AMresult}; use crate::sync::{to_sync_message, AMsyncMessage, AMsyncState}; pub mod list; pub mod map; pub mod utils; -use crate::changes::AMchanges; -use crate::doc::utils::{to_doc, to_doc_mut}; - -macro_rules! to_changes { - ($handle:expr) => {{ - let handle = $handle.as_ref(); - match handle { - Some(b) => b, - None => return AMresult::err("Invalid AMchanges pointer").into(), - } - }}; -} - -macro_rules! to_index { - ($index:expr, $len:expr, $param_name:expr) => {{ - if $index > $len && $index != usize::MAX { - return AMresult::err(&format!("Invalid {} {}", $param_name, $index)).into(); - } - std::cmp::min($index, $len) - }}; -} +use crate::doc::utils::{clamp, to_doc, to_doc_mut, to_items}; macro_rules! to_sync_state_mut { ($handle:expr) => {{ let handle = $handle.as_mut(); match handle { Some(b) => b, - None => return AMresult::err("Invalid AMsyncState pointer").into(), + None => return AMresult::error("Invalid `AMsyncState*`").into(), } }}; } @@ -57,6 +37,10 @@ impl AMdoc { pub fn new(auto_commit: am::AutoCommit) -> Self { Self(auto_commit) } + + pub fn is_equal_to(&mut self, other: &mut Self) -> bool { + self.document().get_heads() == other.document().get_heads() + } } impl AsRef for AMdoc { @@ -82,38 +66,38 @@ impl DerefMut for AMdoc { /// \memberof AMdoc /// \brief Applies a sequence of changes to a document. /// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \param[in] changes A pointer to an `AMchanges` struct. -/// \pre \p doc `!= NULL`. -/// \pre \p changes `!= NULL`. -/// \return A pointer to an `AMresult` struct containing a void. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] items A pointer to an `AMitems` struct with `AM_VAL_TYPE_CHANGE` +/// items. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre \p items `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// doc must be a valid pointer to an AMdoc -/// changes must be a valid pointer to an AMchanges. +/// items must be a valid pointer to an AMitems. #[no_mangle] -pub unsafe extern "C" fn AMapplyChanges( - doc: *mut AMdoc, - changes: *const AMchanges, -) -> *mut AMresult { +pub unsafe extern "C" fn AMapplyChanges(doc: *mut AMdoc, items: *const AMitems) -> *mut AMresult { let doc = to_doc_mut!(doc); - let changes = to_changes!(changes); - to_result(doc.apply_changes(changes.as_ref().to_vec())) + let items = to_items!(items); + match Vec::::try_from(items) { + Ok(changes) => to_result(doc.apply_changes(changes)), + Err(e) => AMresult::error(&e.to_string()).into(), + } } /// \memberof AMdoc /// \brief Allocates storage for a document and initializes it by duplicating /// the given document. /// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \return A pointer to an `AMresult` struct containing a pointer to an -/// `AMdoc` struct. -/// \pre \p doc `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_DOC` item. +/// \pre \p doc `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety @@ -129,10 +113,9 @@ pub unsafe extern "C" fn AMclone(doc: *const AMdoc) -> *mut AMresult { /// /// \param[in] actor_id A pointer to an `AMactorId` struct or `NULL` for a /// random one. -/// \return A pointer to an `AMresult` struct containing a pointer to an -/// `AMdoc` struct. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_DOC` item. +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety @@ -149,15 +132,15 @@ pub unsafe extern "C" fn AMcreate(actor_id: *const AMactorId) -> *mut AMresult { /// \brief Commits the current operations on a document with an optional /// message and/or *nix timestamp (milliseconds). /// -/// \param[in,out] doc A pointer to an `AMdoc` struct. +/// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] message A UTF-8 string view as an `AMbyteSpan` struct. /// \param[in] timestamp A pointer to a 64-bit integer or `NULL`. -/// \return A pointer to an `AMresult` struct containing an `AMchangeHashes` -/// with one element if there were operations to commit, or void if -/// there were no operations to commit. -/// \pre \p doc `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \return A pointer to an `AMresult` struct with one `AM_VAL_TYPE_CHANGE_HASH` +/// item if there were operations to commit or an `AM_VAL_TYPE_VOID` item +/// if there were no operations to commit. +/// \pre \p doc `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety @@ -183,24 +166,24 @@ pub unsafe extern "C" fn AMcommit( /// \brief Creates an empty change with an optional message and/or *nix /// timestamp (milliseconds). /// -/// This is useful if you wish to create a "merge commit" which has as its -/// dependents the current heads of the document but you don't have any -/// operations to add to the document. +/// \details This is useful if you wish to create a "merge commit" which has as +/// its dependents the current heads of the document but you don't have +/// any operations to add to the document. /// /// \note If there are outstanding uncommitted changes to the document -/// then two changes will be created: one for creating the outstanding changes -/// and one for the empty change. The empty change will always be the -/// latest change in the document after this call and the returned hash will be -/// the hash of that empty change. +/// then two changes will be created: one for creating the outstanding +/// changes and one for the empty change. The empty change will always be +/// the latest change in the document after this call and the returned +/// hash will be the hash of that empty change. /// -/// \param[in,out] doc A pointer to an `AMdoc` struct. +/// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] message A UTF-8 string view as an `AMbyteSpan` struct. /// \param[in] timestamp A pointer to a 64-bit integer or `NULL`. -/// \return A pointer to an `AMresult` struct containing an `AMchangeHashes` -/// with one element. -/// \pre \p doc `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \return A pointer to an `AMresult` struct with one `AM_VAL_TYPE_CHANGE_HASH` +/// item. +/// \pre \p doc `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety @@ -226,11 +209,11 @@ pub unsafe extern "C" fn AMemptyChange( /// \brief Tests the equality of two documents after closing their respective /// transactions. /// -/// \param[in,out] doc1 An `AMdoc` struct. -/// \param[in,out] doc2 An `AMdoc` struct. +/// \param[in] doc1 A pointer to an `AMdoc` struct. +/// \param[in] doc2 A pointer to an `AMdoc` struct. /// \return `true` if \p doc1 `==` \p doc2 and `false` otherwise. -/// \pre \p doc1 `!= NULL`. -/// \pre \p doc2 `!= NULL`. +/// \pre \p doc1 `!= NULL` +/// \pre \p doc2 `!= NULL` /// \internal /// /// #Safety @@ -239,33 +222,36 @@ pub unsafe extern "C" fn AMemptyChange( #[no_mangle] pub unsafe extern "C" fn AMequal(doc1: *mut AMdoc, doc2: *mut AMdoc) -> bool { match (doc1.as_mut(), doc2.as_mut()) { - (Some(doc1), Some(doc2)) => doc1.document().get_heads() == doc2.document().get_heads(), - (None, Some(_)) | (Some(_), None) | (None, None) => false, + (Some(doc1), Some(doc2)) => doc1.is_equal_to(doc2), + (None, None) | (None, Some(_)) | (Some(_), None) => false, } } /// \memberof AMdoc -/// \brief Forks this document at the current or a historical point for use by +/// \brief Forks this document at its current or a historical point for use by /// a different actor. -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \param[in] heads A pointer to an `AMchangeHashes` struct for a historical -/// point or `NULL` for the current point. -/// \return A pointer to an `AMresult` struct containing a pointer to an -/// `AMdoc` struct. -/// \pre \p doc `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] heads A pointer to an `AMitems` struct with `AM_VAL_TYPE_CHANGE_HASH` +/// items to select a historical point or `NULL` to select its +/// current point. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// doc must be a valid pointer to an AMdoc -/// heads must be a valid pointer to an AMchangeHashes or std::ptr::null() +/// heads must be a valid pointer to an AMitems or std::ptr::null() #[no_mangle] -pub unsafe extern "C" fn AMfork(doc: *mut AMdoc, heads: *const AMchangeHashes) -> *mut AMresult { +pub unsafe extern "C" fn AMfork(doc: *mut AMdoc, heads: *const AMitems) -> *mut AMresult { let doc = to_doc_mut!(doc); match heads.as_ref() { None => to_result(doc.fork()), - Some(heads) => to_result(doc.fork_at(heads.as_ref())), + Some(heads) => match >::try_from(heads) { + Ok(heads) => to_result(doc.fork_at(&heads)), + Err(e) => AMresult::error(&e.to_string()).into(), + }, } } @@ -273,14 +259,14 @@ pub unsafe extern "C" fn AMfork(doc: *mut AMdoc, heads: *const AMchangeHashes) - /// \brief Generates a synchronization message for a peer based upon the given /// synchronization state. /// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \param[in,out] sync_state A pointer to an `AMsyncState` struct. -/// \return A pointer to an `AMresult` struct containing either a pointer to an -/// `AMsyncMessage` struct or a void. -/// \pre \p doc `!= NULL`. -/// \pre \p sync_state `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] sync_state A pointer to an `AMsyncState` struct. +/// \return A pointer to an `AMresult` struct with either an +/// `AM_VAL_TYPE_SYNC_MESSAGE` or `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre \p sync_state `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety @@ -300,11 +286,10 @@ pub unsafe extern "C" fn AMgenerateSyncMessage( /// \brief Gets a document's actor identifier. /// /// \param[in] doc A pointer to an `AMdoc` struct. -/// \return A pointer to an `AMresult` struct containing a pointer to an -/// `AMactorId` struct. -/// \pre \p doc `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_ACTOR_ID` item. +/// \pre \p doc `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety @@ -320,20 +305,22 @@ pub unsafe extern "C" fn AMgetActorId(doc: *const AMdoc) -> *mut AMresult { /// \memberof AMdoc /// \brief Gets the change added to a document by its respective hash. /// -/// \param[in,out] doc A pointer to an `AMdoc` struct. +/// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] src A pointer to an array of bytes. -/// \param[in] count The number of bytes in \p src. -/// \return A pointer to an `AMresult` struct containing an `AMchanges` struct. -/// \pre \p doc `!= NULL`. -/// \pre \p src `!= NULL`. -/// \pre \p count `>= AM_CHANGE_HASH_SIZE`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \param[in] count The count of bytes to copy from the array pointed to by +/// \p src. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_CHANGE` item. +/// \pre \p doc `!= NULL` +/// \pre \p src `!= NULL` +/// \pre `sizeof(`\p src') >= AM_CHANGE_HASH_SIZE` +/// \pre \p count `<= sizeof(`\p src `)` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// doc must be a valid pointer to an AMdoc -/// src must be a byte array of size `>= automerge::types::HASH_SIZE` +/// src must be a byte array of length `>= automerge::types::HASH_SIZE` #[no_mangle] pub unsafe extern "C" fn AMgetChangeByHash( doc: *mut AMdoc, @@ -344,48 +331,48 @@ pub unsafe extern "C" fn AMgetChangeByHash( let slice = std::slice::from_raw_parts(src, count); match slice.try_into() { Ok(change_hash) => to_result(doc.get_change_by_hash(&change_hash)), - Err(e) => AMresult::err(&e.to_string()).into(), + Err(e) => AMresult::error(&e.to_string()).into(), } } /// \memberof AMdoc /// \brief Gets the changes added to a document by their respective hashes. /// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \param[in] have_deps A pointer to an `AMchangeHashes` struct or `NULL`. -/// \return A pointer to an `AMresult` struct containing an `AMchanges` struct. -/// \pre \p doc `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] have_deps A pointer to an `AMitems` struct with +/// `AM_VAL_TYPE_CHANGE_HASH` items or `NULL`. +/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_CHANGE` items. +/// \pre \p doc `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// doc must be a valid pointer to an AMdoc #[no_mangle] -pub unsafe extern "C" fn AMgetChanges( - doc: *mut AMdoc, - have_deps: *const AMchangeHashes, -) -> *mut AMresult { +pub unsafe extern "C" fn AMgetChanges(doc: *mut AMdoc, have_deps: *const AMitems) -> *mut AMresult { let doc = to_doc_mut!(doc); - let empty_deps = Vec::::new(); let have_deps = match have_deps.as_ref() { - Some(have_deps) => have_deps.as_ref(), - None => &empty_deps, + Some(have_deps) => match Vec::::try_from(have_deps) { + Ok(change_hashes) => change_hashes, + Err(e) => return AMresult::error(&e.to_string()).into(), + }, + None => Vec::::new(), }; - to_result(doc.get_changes(have_deps)) + to_result(doc.get_changes(&have_deps)) } /// \memberof AMdoc /// \brief Gets the changes added to a second document that weren't added to /// a first document. /// -/// \param[in,out] doc1 An `AMdoc` struct. -/// \param[in,out] doc2 An `AMdoc` struct. -/// \return A pointer to an `AMresult` struct containing an `AMchanges` struct. -/// \pre \p doc1 `!= NULL`. -/// \pre \p doc2 `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \param[in] doc1 A pointer to an `AMdoc` struct. +/// \param[in] doc2 A pointer to an `AMdoc` struct. +/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_CHANGE` items. +/// \pre \p doc1 `!= NULL` +/// \pre \p doc2 `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety @@ -401,12 +388,11 @@ pub unsafe extern "C" fn AMgetChangesAdded(doc1: *mut AMdoc, doc2: *mut AMdoc) - /// \memberof AMdoc /// \brief Gets the current heads of a document. /// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \return A pointer to an `AMresult` struct containing an `AMchangeHashes` -/// struct. -/// \pre \p doc `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_CHANGE_HASH` items. +/// \pre \p doc `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety @@ -423,41 +409,42 @@ pub unsafe extern "C" fn AMgetHeads(doc: *mut AMdoc) -> *mut AMresult { /// \brief Gets the hashes of the changes in a document that aren't transitive /// dependencies of the given hashes of changes. /// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \param[in] heads A pointer to an `AMchangeHashes` struct or `NULL`. -/// \return A pointer to an `AMresult` struct containing an `AMchangeHashes` -/// struct. -/// \pre \p doc `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] heads A pointer to an `AMitems` struct with `AM_VAL_TYPE_CHANGE_HASH` +/// items or `NULL`. +/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_CHANGE_HASH` items. +/// \pre \p doc `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// doc must be a valid pointer to an AMdoc -/// heads must be a valid pointer to an AMchangeHashes or std::ptr::null() +/// heads must be a valid pointer to an AMitems or std::ptr::null() #[no_mangle] -pub unsafe extern "C" fn AMgetMissingDeps( - doc: *mut AMdoc, - heads: *const AMchangeHashes, -) -> *mut AMresult { +pub unsafe extern "C" fn AMgetMissingDeps(doc: *mut AMdoc, heads: *const AMitems) -> *mut AMresult { let doc = to_doc_mut!(doc); - let empty_heads = Vec::::new(); let heads = match heads.as_ref() { - Some(heads) => heads.as_ref(), - None => &empty_heads, + None => Vec::::new(), + Some(heads) => match >::try_from(heads) { + Ok(heads) => heads, + Err(e) => { + return AMresult::error(&e.to_string()).into(); + } + }, }; - to_result(doc.get_missing_deps(heads)) + to_result(doc.get_missing_deps(heads.as_slice())) } /// \memberof AMdoc /// \brief Gets the last change made to a document. /// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \return A pointer to an `AMresult` struct containing either an `AMchange` -/// struct or a void. -/// \pre \p doc `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \return A pointer to an `AMresult` struct containing either an +/// `AM_VAL_TYPE_CHANGE` or `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety @@ -473,29 +460,33 @@ pub unsafe extern "C" fn AMgetLastLocalChange(doc: *mut AMdoc) -> *mut AMresult /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] heads A pointer to an `AMchangeHashes` struct for historical -/// keys or `NULL` for current keys. -/// \return A pointer to an `AMresult` struct containing an `AMstrs` struct. -/// \pre \p doc `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \param[in] heads A pointer to an `AMitems` struct with `AM_VAL_TYPE_CHANGE_HASH` +/// items to select historical keys or `NULL` to select current +/// keys. +/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_STR` items. +/// \pre \p doc `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// heads must be a valid pointer to an AMchangeHashes or std::ptr::null() +/// heads must be a valid pointer to an AMitems or std::ptr::null() #[no_mangle] pub unsafe extern "C" fn AMkeys( doc: *const AMdoc, obj_id: *const AMobjId, - heads: *const AMchangeHashes, + heads: *const AMitems, ) -> *mut AMresult { let doc = to_doc!(doc); let obj_id = to_obj_id!(obj_id); match heads.as_ref() { None => to_result(doc.keys(obj_id)), - Some(heads) => to_result(doc.keys_at(obj_id, heads.as_ref())), + Some(heads) => match >::try_from(heads) { + Ok(heads) => to_result(doc.keys_at(obj_id, &heads)), + Err(e) => AMresult::error(&e.to_string()).into(), + }, } } @@ -504,42 +495,43 @@ pub unsafe extern "C" fn AMkeys( /// form of an incremental save. /// /// \param[in] src A pointer to an array of bytes. -/// \param[in] count The number of bytes in \p src to load. -/// \return A pointer to an `AMresult` struct containing a pointer to an -/// `AMdoc` struct. -/// \pre \p src `!= NULL`. -/// \pre `0 <` \p count `<= sizeof(`\p src`)`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \param[in] count The count of bytes to load from the array pointed to by +/// \p src. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_DOC` item. +/// \pre \p src `!= NULL` +/// \pre `sizeof(`\p src `) > 0` +/// \pre \p count `<= sizeof(`\p src `)` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety -/// src must be a byte array of size `>= count` +/// src must be a byte array of length `>= count` #[no_mangle] pub unsafe extern "C" fn AMload(src: *const u8, count: usize) -> *mut AMresult { - let mut data = Vec::new(); - data.extend_from_slice(std::slice::from_raw_parts(src, count)); - to_result(am::AutoCommit::load(&data)) + let data = std::slice::from_raw_parts(src, count); + to_result(am::AutoCommit::load(data)) } /// \memberof AMdoc /// \brief Loads the compact form of an incremental save into a document. /// -/// \param[in,out] doc A pointer to an `AMdoc` struct. +/// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] src A pointer to an array of bytes. -/// \param[in] count The number of bytes in \p src to load. -/// \return A pointer to an `AMresult` struct containing the number of -/// operations loaded from \p src. -/// \pre \p doc `!= NULL`. -/// \pre \p src `!= NULL`. -/// \pre `0 <` \p count `<= sizeof(`\p src`)`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \param[in] count The count of bytes to load from the array pointed to by +/// \p src. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_UINT` item. +/// \pre \p doc `!= NULL` +/// \pre \p src `!= NULL` +/// \pre `sizeof(`\p src `) > 0` +/// \pre \p count `<= sizeof(`\p src `)` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// doc must be a valid pointer to an AMdoc -/// src must be a byte array of size `>= count` +/// src must be a byte array of length `>= count` #[no_mangle] pub unsafe extern "C" fn AMloadIncremental( doc: *mut AMdoc, @@ -547,23 +539,21 @@ pub unsafe extern "C" fn AMloadIncremental( count: usize, ) -> *mut AMresult { let doc = to_doc_mut!(doc); - let mut data = Vec::new(); - data.extend_from_slice(std::slice::from_raw_parts(src, count)); - to_result(doc.load_incremental(&data)) + let data = std::slice::from_raw_parts(src, count); + to_result(doc.load_incremental(data)) } /// \memberof AMdoc /// \brief Applies all of the changes in \p src which are not in \p dest to /// \p dest. /// -/// \param[in,out] dest A pointer to an `AMdoc` struct. -/// \param[in,out] src A pointer to an `AMdoc` struct. -/// \return A pointer to an `AMresult` struct containing an `AMchangeHashes` -/// struct. -/// \pre \p dest `!= NULL`. -/// \pre \p src `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \param[in] dest A pointer to an `AMdoc` struct. +/// \param[in] src A pointer to an `AMdoc` struct. +/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_CHANGE_HASH` items. +/// \pre \p dest `!= NULL` +/// \pre \p src `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety @@ -580,31 +570,37 @@ pub unsafe extern "C" fn AMmerge(dest: *mut AMdoc, src: *mut AMdoc) -> *mut AMre /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] heads A pointer to an `AMchangeHashes` struct for historical -/// size or `NULL` for current size. -/// \return A 64-bit unsigned integer. -/// \pre \p doc `!= NULL`. +/// \param[in] heads A pointer to an `AMitems` struct with `AM_VAL_TYPE_CHANGE_HASH` +/// items to select a historical size or `NULL` to select its +/// current size. +/// \return The count of items in the object identified by \p obj_id. +/// \pre \p doc `!= NULL` /// \internal /// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// heads must be a valid pointer to an AMchangeHashes or std::ptr::null() +/// heads must be a valid pointer to an AMitems or std::ptr::null() #[no_mangle] pub unsafe extern "C" fn AMobjSize( doc: *const AMdoc, obj_id: *const AMobjId, - heads: *const AMchangeHashes, + heads: *const AMitems, ) -> usize { if let Some(doc) = doc.as_ref() { let obj_id = to_obj_id!(obj_id); match heads.as_ref() { - None => doc.length(obj_id), - Some(heads) => doc.length_at(obj_id, heads.as_ref()), + None => { + return doc.length(obj_id); + } + Some(heads) => { + if let Ok(heads) = >::try_from(heads) { + return doc.length_at(obj_id, &heads); + } + } } - } else { - 0 } + 0 } /// \memberof AMdoc @@ -612,8 +608,9 @@ pub unsafe extern "C" fn AMobjSize( /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \return An `AMobjType`. -/// \pre \p doc `!= NULL`. +/// \return An `AMobjType` tag or `0`. +/// \pre \p doc `!= NULL` +/// \pre \p obj_id `!= NULL` /// \internal /// /// # Safety @@ -623,44 +620,45 @@ pub unsafe extern "C" fn AMobjSize( pub unsafe extern "C" fn AMobjObjType(doc: *const AMdoc, obj_id: *const AMobjId) -> AMobjType { if let Some(doc) = doc.as_ref() { let obj_id = to_obj_id!(obj_id); - match doc.object_type(obj_id) { - Err(_) => AMobjType::Void, - Ok(obj_type) => obj_type.into(), + if let Ok(obj_type) = doc.object_type(obj_id) { + return (&obj_type).into(); } - } else { - AMobjType::Void } + Default::default() } /// \memberof AMdoc -/// \brief Gets the current or historical values of an object within its entire -/// range. +/// \brief Gets the current or historical items of an entire object. /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] heads A pointer to an `AMchangeHashes` struct for historical -/// items or `NULL` for current items. -/// \return A pointer to an `AMresult` struct containing an `AMobjItems` struct. -/// \pre \p doc `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \param[in] heads A pointer to an `AMitems` struct with `AM_VAL_TYPE_CHANGE_HASH` +/// items to select its historical items or `NULL` to select +/// its current items. +/// \return A pointer to an `AMresult` struct with an `AMitems` struct. +/// \pre \p doc `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// heads must be a valid pointer to an AMchangeHashes or std::ptr::null() +/// heads must be a valid pointer to an AMitems or std::ptr::null() #[no_mangle] -pub unsafe extern "C" fn AMobjValues( +pub unsafe extern "C" fn AMobjItems( doc: *const AMdoc, obj_id: *const AMobjId, - heads: *const AMchangeHashes, + heads: *const AMitems, ) -> *mut AMresult { let doc = to_doc!(doc); let obj_id = to_obj_id!(obj_id); match heads.as_ref() { None => to_result(doc.values(obj_id)), - Some(heads) => to_result(doc.values_at(obj_id, heads.as_ref())), + Some(heads) => match >::try_from(heads) { + Ok(heads) => to_result(doc.values_at(obj_id, &heads)), + Err(e) => AMresult::error(&e.to_string()).into(), + }, } } @@ -670,7 +668,7 @@ pub unsafe extern "C" fn AMobjValues( /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \return The count of pending operations for \p doc. -/// \pre \p doc `!= NULL`. +/// \pre \p doc `!= NULL` /// \internal /// /// # Safety @@ -678,23 +676,22 @@ pub unsafe extern "C" fn AMobjValues( #[no_mangle] pub unsafe extern "C" fn AMpendingOps(doc: *const AMdoc) -> usize { if let Some(doc) = doc.as_ref() { - doc.pending_ops() - } else { - 0 + return doc.pending_ops(); } + 0 } /// \memberof AMdoc /// \brief Receives a synchronization message from a peer based upon a given /// synchronization state. /// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \param[in,out] sync_state A pointer to an `AMsyncState` struct. +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] sync_state A pointer to an `AMsyncState` struct. /// \param[in] sync_message A pointer to an `AMsyncMessage` struct. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre \p sync_state `!= NULL`. -/// \pre \p sync_message `!= NULL`. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre \p sync_state `!= NULL` +/// \pre \p sync_message `!= NULL` /// \internal /// /// # Safety @@ -720,9 +717,9 @@ pub unsafe extern "C" fn AMreceiveSyncMessage( /// \brief Cancels the pending operations added during a document's current /// transaction and gets the number of cancellations. /// -/// \param[in,out] doc A pointer to an `AMdoc` struct. +/// \param[in] doc A pointer to an `AMdoc` struct. /// \return The count of pending operations for \p doc that were cancelled. -/// \pre \p doc `!= NULL`. +/// \pre \p doc `!= NULL` /// \internal /// /// # Safety @@ -730,21 +727,19 @@ pub unsafe extern "C" fn AMreceiveSyncMessage( #[no_mangle] pub unsafe extern "C" fn AMrollback(doc: *mut AMdoc) -> usize { if let Some(doc) = doc.as_mut() { - doc.rollback() - } else { - 0 + return doc.rollback(); } + 0 } /// \memberof AMdoc /// \brief Saves the entirety of a document into a compact form. /// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \return A pointer to an `AMresult` struct containing an array of bytes as -/// an `AMbyteSpan` struct. -/// \pre \p doc `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_BYTES` item. +/// \pre \p doc `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety @@ -759,12 +754,11 @@ pub unsafe extern "C" fn AMsave(doc: *mut AMdoc) -> *mut AMresult { /// \brief Saves the changes to a document since its last save into a compact /// form. /// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \return A pointer to an `AMresult` struct containing an array of bytes as -/// an `AMbyteSpan` struct. -/// \pre \p doc `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_BYTES` item. +/// \pre \p doc `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety @@ -778,13 +772,13 @@ pub unsafe extern "C" fn AMsaveIncremental(doc: *mut AMdoc) -> *mut AMresult { /// \memberof AMdoc /// \brief Puts the actor identifier of a document. /// -/// \param[in,out] doc A pointer to an `AMdoc` struct. +/// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] actor_id A pointer to an `AMactorId` struct. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre \p actor_id `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre \p actor_id `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety @@ -805,76 +799,65 @@ pub unsafe extern "C" fn AMsetActorId( /// \brief Splices values into and/or removes values from the identified object /// at a given position within it. /// -/// \param[in,out] doc A pointer to an `AMdoc` struct. +/// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] pos A position in the object identified by \p obj_id or /// `SIZE_MAX` to indicate one past its end. -/// \param[in] del The number of characters to delete or `SIZE_MAX` to indicate +/// \param[in] del The number of values to delete or `SIZE_MAX` to indicate /// all of them. -/// \param[in] src A pointer to an array of `AMvalue` structs. -/// \param[in] count The number of `AMvalue` structs in \p src to load. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id`)` or \p pos `== SIZE_MAX`. -/// \pre `0 <=` \p del `<= AMobjSize(`\p obj_id`)` or \p del `== SIZE_MAX`. -/// \pre `(`\p src `!= NULL and 1 <=` \p count `<= sizeof(`\p src`)/ -/// sizeof(AMvalue)) or `\p src `== NULL or `\p count `== 0`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \param[in] values A copy of an `AMitems` struct from which values will be +/// spliced starting at its current position; call +/// `AMitemsRewound()` on a used `AMitems` first to ensure +/// that all of its values are spliced in. Pass `(AMitems){0}` +/// when zero values should be spliced in. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` +/// \pre `0 <=` \p del `<= AMobjSize(`\p obj_id `)` or \p del `== SIZE_MAX` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// src must be an AMvalue array of size `>= count` or std::ptr::null() +/// values must be a valid pointer to an AMitems or std::ptr::null() #[no_mangle] pub unsafe extern "C" fn AMsplice( doc: *mut AMdoc, obj_id: *const AMobjId, pos: usize, del: usize, - src: *const AMvalue, - count: usize, + values: AMitems, ) -> *mut AMresult { let doc = to_doc_mut!(doc); let obj_id = to_obj_id!(obj_id); let len = doc.length(obj_id); - let pos = to_index!(pos, len, "pos"); - let del = to_index!(del, len, "del"); - let mut vals: Vec = vec![]; - if !(src.is_null() || count == 0) { - let c_vals = std::slice::from_raw_parts(src, count); - for c_val in c_vals { - match c_val.try_into() { - Ok(s) => { - vals.push(s); - } - Err(e) => { - return AMresult::err(&e.to_string()).into(); - } - } - } + let pos = clamp!(pos, len, "pos"); + let del = clamp!(del, len, "del"); + match Vec::::try_from(&values) { + Ok(vals) => to_result(doc.splice(obj_id, pos, del, vals)), + Err(e) => AMresult::error(&e.to_string()).into(), } - to_result(doc.splice(obj_id, pos, del, vals)) } /// \memberof AMdoc /// \brief Splices characters into and/or removes characters from the /// identified object at a given position within it. /// -/// \param[in,out] doc A pointer to an `AMdoc` struct. +/// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] pos A position in the text object identified by \p obj_id or /// `SIZE_MAX` to indicate one past its end. /// \param[in] del The number of characters to delete or `SIZE_MAX` to indicate /// all of them. /// \param[in] text A UTF-8 string view as an `AMbyteSpan` struct. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id`)` or \p pos `== SIZE_MAX`. -/// \pre `0 <=` \p del `<= AMobjSize(`\p obj_id`)` or \p del `== SIZE_MAX`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` +/// \pre `0 <=` \p del `<= AMobjSize(`\p obj_id `)` or \p del `== SIZE_MAX` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety @@ -891,8 +874,8 @@ pub unsafe extern "C" fn AMspliceText( let doc = to_doc_mut!(doc); let obj_id = to_obj_id!(obj_id); let len = doc.length(obj_id); - let pos = to_index!(pos, len, "pos"); - let del = to_index!(del, len, "del"); + let pos = clamp!(pos, len, "pos"); + let del = clamp!(del, len, "del"); to_result(doc.splice_text(obj_id, pos, del, to_str!(text))) } @@ -901,28 +884,32 @@ pub unsafe extern "C" fn AMspliceText( /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] heads A pointer to an `AMchangeHashes` struct for historical -/// keys or `NULL` for current keys. -/// \return A pointer to an `AMresult` struct containing a UTF-8 string. -/// \pre \p doc `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \param[in] heads A pointer to an `AMitems` struct containing +/// `AM_VAL_TYPE_CHANGE_HASH` items to select a historical string +/// or `NULL` to select the current string. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_STR` item. +/// \pre \p doc `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// heads must be a valid pointer to an AMchangeHashes or std::ptr::null() +/// heads must be a valid pointer to an AMitems or std::ptr::null() #[no_mangle] pub unsafe extern "C" fn AMtext( doc: *const AMdoc, obj_id: *const AMobjId, - heads: *const AMchangeHashes, + heads: *const AMitems, ) -> *mut AMresult { let doc = to_doc!(doc); let obj_id = to_obj_id!(obj_id); match heads.as_ref() { None => to_result(doc.text(obj_id)), - Some(heads) => to_result(doc.text_at(obj_id, heads.as_ref())), + Some(heads) => match >::try_from(heads) { + Ok(heads) => to_result(doc.text_at(obj_id, &heads)), + Err(e) => AMresult::error(&e.to_string()).into(), + }, } } diff --git a/rust/automerge-c/src/doc/list.rs b/rust/automerge-c/src/doc/list.rs index 6bcdeabf..c4503322 100644 --- a/rust/automerge-c/src/doc/list.rs +++ b/rust/automerge-c/src/doc/list.rs @@ -3,47 +3,44 @@ use automerge::transaction::Transactable; use automerge::ReadDoc; use crate::byte_span::{to_str, AMbyteSpan}; -use crate::change_hashes::AMchangeHashes; -use crate::doc::{to_doc, to_doc_mut, to_obj_id, AMdoc}; -use crate::obj::{to_obj_type, AMobjId, AMobjType}; +use crate::doc::{to_doc, to_doc_mut, AMdoc}; +use crate::items::AMitems; +use crate::obj::{to_obj_id, to_obj_type, AMobjId, AMobjType}; use crate::result::{to_result, AMresult}; -pub mod item; -pub mod items; - macro_rules! adjust { - ($index:expr, $insert:expr, $len:expr) => {{ + ($pos:expr, $insert:expr, $len:expr) => {{ // An empty object can only be inserted into. let insert = $insert || $len == 0; let end = if insert { $len } else { $len - 1 }; - if $index > end && $index != usize::MAX { - return AMresult::err(&format!("Invalid index {}", $index)).into(); + if $pos > end && $pos != usize::MAX { + return AMresult::error(&format!("Invalid pos {}", $pos)).into(); } - (std::cmp::min($index, end), insert) + (std::cmp::min($pos, end), insert) }}; } macro_rules! to_range { ($begin:expr, $end:expr) => {{ if $begin > $end { - return AMresult::err(&format!("Invalid range [{}-{})", $begin, $end)).into(); + return AMresult::error(&format!("Invalid range [{}-{})", $begin, $end)).into(); }; ($begin..$end) }}; } /// \memberof AMdoc -/// \brief Deletes an index in a list object. +/// \brief Deletes an item from a list object. /// -/// \param[in,out] doc A pointer to an `AMdoc` struct. +/// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] index An index in the list object identified by \p obj_id or -/// `SIZE_MAX` to indicate its last index. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre `0 <=` \p index `<= AMobjSize(`\p obj_id`)` or \p index `== SIZE_MAX`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \param[in] pos The position of an item within the list object identified by +/// \p obj_id or `SIZE_MAX` to indicate its last item. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety @@ -53,101 +50,109 @@ macro_rules! to_range { pub unsafe extern "C" fn AMlistDelete( doc: *mut AMdoc, obj_id: *const AMobjId, - index: usize, + pos: usize, ) -> *mut AMresult { let doc = to_doc_mut!(doc); let obj_id = to_obj_id!(obj_id); - let (index, _) = adjust!(index, false, doc.length(obj_id)); - to_result(doc.delete(obj_id, index)) + let (pos, _) = adjust!(pos, false, doc.length(obj_id)); + to_result(doc.delete(obj_id, pos)) } /// \memberof AMdoc -/// \brief Gets the current or historical value at an index in a list object. +/// \brief Gets a current or historical item within a list object. /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] index An index in the list object identified by \p obj_id or -/// `SIZE_MAX` to indicate its last index. -/// \param[in] heads A pointer to an `AMchangeHashes` struct for a historical -/// value or `NULL` for the current value. -/// \return A pointer to an `AMresult` struct that doesn't contain a void. -/// \pre \p doc `!= NULL`. -/// \pre `0 <=` \p index `<= AMobjSize(`\p obj_id`)` or \p index `== SIZE_MAX`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \param[in] pos The position of an item within the list object identified by +/// \p obj_id or `SIZE_MAX` to indicate its last item. +/// \param[in] heads A pointer to an `AMitems` struct with `AM_VAL_TYPE_CHANGE_HASH` +/// items to select a historical item at \p pos or `NULL` +/// to select the current item at \p pos. +/// \return A pointer to an `AMresult` struct with an `AMitem` struct. +/// \pre \p doc `!= NULL` +/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// heads must be a valid pointer to an AMchangeHashes or std::ptr::null() +/// heads must be a valid pointer to an AMitems or std::ptr::null() #[no_mangle] pub unsafe extern "C" fn AMlistGet( doc: *const AMdoc, obj_id: *const AMobjId, - index: usize, - heads: *const AMchangeHashes, + pos: usize, + heads: *const AMitems, ) -> *mut AMresult { let doc = to_doc!(doc); let obj_id = to_obj_id!(obj_id); - let (index, _) = adjust!(index, false, doc.length(obj_id)); - to_result(match heads.as_ref() { - None => doc.get(obj_id, index), - Some(heads) => doc.get_at(obj_id, index, heads.as_ref()), - }) + let (pos, _) = adjust!(pos, false, doc.length(obj_id)); + match heads.as_ref() { + None => to_result(doc.get(obj_id, pos)), + Some(heads) => match >::try_from(heads) { + Ok(heads) => to_result(doc.get_at(obj_id, pos, &heads)), + Err(e) => AMresult::error(&e.to_string()).into(), + }, + } } /// \memberof AMdoc -/// \brief Gets all of the historical values at an index in a list object until -/// its current one or a specific one. +/// \brief Gets all of the historical items at a position within a list object +/// until its current one or a specific one. /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] index An index in the list object identified by \p obj_id or -/// `SIZE_MAX` to indicate its last index. -/// \param[in] heads A pointer to an `AMchangeHashes` struct for a historical -/// last value or `NULL` for the current last value. -/// \return A pointer to an `AMresult` struct containing an `AMobjItems` struct. -/// \pre \p doc `!= NULL`. -/// \pre `0 <=` \p index `<= AMobjSize(`\p obj_id`)` or \p index `== SIZE_MAX`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \param[in] pos The position of an item within the list object identified by +/// \p obj_id or `SIZE_MAX` to indicate its last item. +/// \param[in] heads A pointer to an `AMitems` struct with `AM_VAL_TYPE_CHANGE_HASH` +/// items to select a historical last item or `NULL` to select +/// the current last item. +/// \return A pointer to an `AMresult` struct with an `AMitems` struct. +/// \pre \p doc `!= NULL` +/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// heads must be a valid pointer to an AMchangeHashes or std::ptr::null() +/// heads must be a valid pointer to an AMitems or std::ptr::null() #[no_mangle] pub unsafe extern "C" fn AMlistGetAll( doc: *const AMdoc, obj_id: *const AMobjId, - index: usize, - heads: *const AMchangeHashes, + pos: usize, + heads: *const AMitems, ) -> *mut AMresult { let doc = to_doc!(doc); let obj_id = to_obj_id!(obj_id); - let (index, _) = adjust!(index, false, doc.length(obj_id)); + let (pos, _) = adjust!(pos, false, doc.length(obj_id)); match heads.as_ref() { - None => to_result(doc.get_all(obj_id, index)), - Some(heads) => to_result(doc.get_all_at(obj_id, index, heads.as_ref())), + None => to_result(doc.get_all(obj_id, pos)), + Some(heads) => match >::try_from(heads) { + Ok(heads) => to_result(doc.get_all_at(obj_id, pos, &heads)), + Err(e) => AMresult::error(&e.to_string()).into(), + }, } } /// \memberof AMdoc -/// \brief Increments a counter at an index in a list object by the given -/// value. +/// \brief Increments a counter value in an item within a list object by the +/// given value. /// -/// \param[in,out] doc A pointer to an `AMdoc` struct. +/// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] index An index in the list object identified by \p obj_id or -/// `SIZE_MAX` to indicate its last index. +/// \param[in] pos The position of an item within the list object identified by +/// \p obj_id or `SIZE_MAX` to indicate its last item. /// \param[in] value A 64-bit signed integer. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre `0 <=` \p index `<= AMobjSize(`\p obj_id`)` or \p index `== SIZE_MAX`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety @@ -157,32 +162,33 @@ pub unsafe extern "C" fn AMlistGetAll( pub unsafe extern "C" fn AMlistIncrement( doc: *mut AMdoc, obj_id: *const AMobjId, - index: usize, + pos: usize, value: i64, ) -> *mut AMresult { let doc = to_doc_mut!(doc); let obj_id = to_obj_id!(obj_id); - let (index, _) = adjust!(index, false, doc.length(obj_id)); - to_result(doc.increment(obj_id, index, value)) + let (pos, _) = adjust!(pos, false, doc.length(obj_id)); + to_result(doc.increment(obj_id, pos, value)) } /// \memberof AMdoc -/// \brief Puts a boolean as the value at an index in a list object. +/// \brief Puts a boolean value into an item within a list object. /// -/// \param[in,out] doc A pointer to an `AMdoc` struct. +/// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] index An index in the list object identified by \p obj_id or -/// `SIZE_MAX` to indicate its last index if \p insert -/// `== false` or one past its last index if \p insert -/// `== true`. -/// \param[in] insert A flag to insert \p value before \p index instead of -/// writing \p value over \p index. +/// \param[in] pos The position of an item within the list object identified by +/// \p obj_id or `SIZE_MAX` to indicate its last item if +/// \p insert `== false` or one past its last item if +/// \p insert `== true`. +/// \param[in] insert A flag for inserting a new item for \p value before +/// \p pos instead of putting \p value into the item at +/// \p pos. /// \param[in] value A boolean. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre `0 <=` \p index `<= AMobjSize(`\p obj_id`)` or \p index `== SIZE_MAX`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety @@ -192,84 +198,85 @@ pub unsafe extern "C" fn AMlistIncrement( pub unsafe extern "C" fn AMlistPutBool( doc: *mut AMdoc, obj_id: *const AMobjId, - index: usize, + pos: usize, insert: bool, value: bool, ) -> *mut AMresult { let doc = to_doc_mut!(doc); let obj_id = to_obj_id!(obj_id); - let (index, insert) = adjust!(index, insert, doc.length(obj_id)); + let (pos, insert) = adjust!(pos, insert, doc.length(obj_id)); let value = am::ScalarValue::Boolean(value); to_result(if insert { - doc.insert(obj_id, index, value) + doc.insert(obj_id, pos, value) } else { - doc.put(obj_id, index, value) + doc.put(obj_id, pos, value) }) } /// \memberof AMdoc -/// \brief Puts a sequence of bytes as the value at an index in a list object. +/// \brief Puts an array of bytes value at a position within a list object. /// -/// \param[in,out] doc A pointer to an `AMdoc` struct. +/// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] index An index in the list object identified by \p obj_id or -/// `SIZE_MAX` to indicate its last index if \p insert -/// `== false` or one past its last index if \p insert -/// `== true`. -/// \param[in] insert A flag to insert \p src before \p index instead of -/// writing \p src over \p index. -/// \param[in] src A pointer to an array of bytes. -/// \param[in] count The number of bytes to copy from \p src. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre `0 <=` \p index `<= AMobjSize(`\p obj_id`)` or \p index `== SIZE_MAX`. -/// \pre \p src `!= NULL`. -/// \pre `0 <` \p count `<= sizeof(`\p src`)`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \param[in] pos The position of an item within the list object identified by +/// \p obj_id or `SIZE_MAX` to indicate its last item if +/// \p insert `== false` or one past its last item if +/// \p insert `== true`. +/// \param[in] insert A flag for inserting a new item for \p value before +/// \p pos instead of putting \p value into the item at +/// \p pos. +/// \param[in] value A view onto the array of bytes to copy from as an +/// `AMbyteSpan` struct. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` +/// \pre \p value.src `!= NULL` +/// \pre `0 <` \p value.count `<= sizeof(`\p value.src `)` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// src must be a byte array of size `>= count` +/// value.src must be a byte array of length >= value.count #[no_mangle] pub unsafe extern "C" fn AMlistPutBytes( doc: *mut AMdoc, obj_id: *const AMobjId, - index: usize, + pos: usize, insert: bool, - val: AMbyteSpan, + value: AMbyteSpan, ) -> *mut AMresult { let doc = to_doc_mut!(doc); let obj_id = to_obj_id!(obj_id); - let (index, insert) = adjust!(index, insert, doc.length(obj_id)); - let mut value = Vec::new(); - value.extend_from_slice(std::slice::from_raw_parts(val.src, val.count)); + let (pos, insert) = adjust!(pos, insert, doc.length(obj_id)); + let value: Vec = (&value).into(); to_result(if insert { - doc.insert(obj_id, index, value) + doc.insert(obj_id, pos, value) } else { - doc.put(obj_id, index, value) + doc.put(obj_id, pos, value) }) } /// \memberof AMdoc -/// \brief Puts a CRDT counter as the value at an index in a list object. +/// \brief Puts a CRDT counter value into an item within a list object. /// -/// \param[in,out] doc A pointer to an `AMdoc` struct. +/// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] index An index in the list object identified by \p obj_id or -/// `SIZE_MAX` to indicate its last index if \p insert -/// `== false` or one past its last index if \p insert -/// `== true`. -/// \param[in] insert A flag to insert \p value before \p index instead of -/// writing \p value over \p index. +/// \param[in] pos The position of an item within the list object identified by +/// \p obj_id or `SIZE_MAX` to indicate its last item if +/// \p insert `== false` or one past its last item if +/// \p insert `== true`. +/// \param[in] insert A flag for inserting a new item for \p value before +/// \p pos instead of putting \p value into the item at +/// \p pos. /// \param[in] value A 64-bit signed integer. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre `0 <=` \p index `<= AMobjSize(`\p obj_id`)` or \p index `== SIZE_MAX`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety @@ -279,38 +286,39 @@ pub unsafe extern "C" fn AMlistPutBytes( pub unsafe extern "C" fn AMlistPutCounter( doc: *mut AMdoc, obj_id: *const AMobjId, - index: usize, + pos: usize, insert: bool, value: i64, ) -> *mut AMresult { let doc = to_doc_mut!(doc); let obj_id = to_obj_id!(obj_id); - let (index, insert) = adjust!(index, insert, doc.length(obj_id)); + let (pos, insert) = adjust!(pos, insert, doc.length(obj_id)); let value = am::ScalarValue::Counter(value.into()); to_result(if insert { - doc.insert(obj_id, index, value) + doc.insert(obj_id, pos, value) } else { - doc.put(obj_id, index, value) + doc.put(obj_id, pos, value) }) } /// \memberof AMdoc -/// \brief Puts a float as the value at an index in a list object. +/// \brief Puts a float value into an item within a list object. /// -/// \param[in,out] doc A pointer to an `AMdoc` struct. +/// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] index An index in the list object identified by \p obj_id or -/// `SIZE_MAX` to indicate its last index if \p insert -/// `== false` or one past its last index if \p insert -/// `== true`. -/// \param[in] insert A flag to insert \p value before \p index instead of -/// writing \p value over \p index. +/// \param[in] pos The position of an item within the list object identified by +/// \p obj_id or `SIZE_MAX` to indicate its last item if +/// \p insert `== false` or one past its last item if +/// \p insert `== true`. +/// \param[in] insert A flag for inserting a new item for \p value before +/// \p pos instead of putting \p value into the item at +/// \p pos. /// \param[in] value A 64-bit float. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre `0 <=` \p index `<= AMobjSize(`\p obj_id`)` or \p index `== SIZE_MAX`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety @@ -320,37 +328,38 @@ pub unsafe extern "C" fn AMlistPutCounter( pub unsafe extern "C" fn AMlistPutF64( doc: *mut AMdoc, obj_id: *const AMobjId, - index: usize, + pos: usize, insert: bool, value: f64, ) -> *mut AMresult { let doc = to_doc_mut!(doc); let obj_id = to_obj_id!(obj_id); - let (index, insert) = adjust!(index, insert, doc.length(obj_id)); + let (pos, insert) = adjust!(pos, insert, doc.length(obj_id)); to_result(if insert { - doc.insert(obj_id, index, value) + doc.insert(obj_id, pos, value) } else { - doc.put(obj_id, index, value) + doc.put(obj_id, pos, value) }) } /// \memberof AMdoc -/// \brief Puts a signed integer as the value at an index in a list object. +/// \brief Puts a signed integer value into an item within a list object. /// -/// \param[in,out] doc A pointer to an `AMdoc` struct. +/// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] index An index in the list object identified by \p obj_id or -/// `SIZE_MAX` to indicate its last index if \p insert -/// `== false` or one past its last index if \p insert -/// `== true`. -/// \param[in] insert A flag to insert \p value before \p index instead of -/// writing \p value over \p index. +/// \param[in] pos The position of an item within the list object identified by +/// \p obj_id or `SIZE_MAX` to indicate its last item if +/// \p insert `== false` or one past its last item if +/// \p insert `== true`. +/// \param[in] insert A flag for inserting a new item for \p value before +/// \p pos instead of putting \p value into the item at +/// \p pos. /// \param[in] value A 64-bit signed integer. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre `0 <=` \p index `<= AMobjSize(`\p obj_id`)` or \p index `== SIZE_MAX`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety @@ -360,36 +369,37 @@ pub unsafe extern "C" fn AMlistPutF64( pub unsafe extern "C" fn AMlistPutInt( doc: *mut AMdoc, obj_id: *const AMobjId, - index: usize, + pos: usize, insert: bool, value: i64, ) -> *mut AMresult { let doc = to_doc_mut!(doc); let obj_id = to_obj_id!(obj_id); - let (index, insert) = adjust!(index, insert, doc.length(obj_id)); + let (pos, insert) = adjust!(pos, insert, doc.length(obj_id)); to_result(if insert { - doc.insert(obj_id, index, value) + doc.insert(obj_id, pos, value) } else { - doc.put(obj_id, index, value) + doc.put(obj_id, pos, value) }) } /// \memberof AMdoc -/// \brief Puts null as the value at an index in a list object. +/// \brief Puts a null value into an item within a list object. /// -/// \param[in,out] doc A pointer to an `AMdoc` struct. +/// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] index An index in the list object identified by \p obj_id or -/// `SIZE_MAX` to indicate its last index if \p insert -/// `== false` or one past its last index if \p insert -/// `== true`. -/// \param[in] insert A flag to insert \p value before \p index instead of -/// writing \p value over \p index. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre `0 <=` \p index `<= AMobjSize(`\p obj_id`)` or \p index `== SIZE_MAX`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \param[in] pos The position of an item within the list object identified by +/// \p obj_id or `SIZE_MAX` to indicate its last item if +/// \p insert `== false` or one past its last item if +/// \p insert `== true`. +/// \param[in] insert A flag for inserting a new item for \p value before +/// \p pos instead of putting \p value into the item at +/// \p pos. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety @@ -399,38 +409,37 @@ pub unsafe extern "C" fn AMlistPutInt( pub unsafe extern "C" fn AMlistPutNull( doc: *mut AMdoc, obj_id: *const AMobjId, - index: usize, + pos: usize, insert: bool, ) -> *mut AMresult { let doc = to_doc_mut!(doc); let obj_id = to_obj_id!(obj_id); - let (index, insert) = adjust!(index, insert, doc.length(obj_id)); + let (pos, insert) = adjust!(pos, insert, doc.length(obj_id)); to_result(if insert { - doc.insert(obj_id, index, ()) + doc.insert(obj_id, pos, ()) } else { - doc.put(obj_id, index, ()) + doc.put(obj_id, pos, ()) }) } /// \memberof AMdoc -/// \brief Puts an empty object as the value at an index in a list object. +/// \brief Puts an empty object value into an item within a list object. /// -/// \param[in,out] doc A pointer to an `AMdoc` struct. +/// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] index An index in the list object identified by \p obj_id or -/// `SIZE_MAX` to indicate its last index if \p insert -/// `== false` or one past its last index if \p insert -/// `== true`. -/// \param[in] insert A flag to insert \p value before \p index instead of -/// writing \p value over \p index. +/// \param[in] pos The position of an item within the list object identified by +/// \p obj_id or `SIZE_MAX` to indicate its last item if +/// \p insert `== false` or one past its last item if +/// \p insert `== true`. +/// \param[in] insert A flag for inserting a new item for \p value before +/// \p pos instead of putting \p value into the item at +/// \p pos. /// \param[in] obj_type An `AMobjIdType` enum tag. -/// \return A pointer to an `AMresult` struct containing a pointer to an -/// `AMobjId` struct. -/// \pre \p doc `!= NULL`. -/// \pre `0 <=` \p index `<= AMobjSize(`\p obj_id`)` or \p index `== SIZE_MAX`. -/// \pre \p obj_type != `AM_OBJ_TYPE_VOID`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_OBJ_TYPE` item. +/// \pre \p doc `!= NULL` +/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety @@ -440,82 +449,85 @@ pub unsafe extern "C" fn AMlistPutNull( pub unsafe extern "C" fn AMlistPutObject( doc: *mut AMdoc, obj_id: *const AMobjId, - index: usize, + pos: usize, insert: bool, obj_type: AMobjType, ) -> *mut AMresult { let doc = to_doc_mut!(doc); let obj_id = to_obj_id!(obj_id); - let (index, insert) = adjust!(index, insert, doc.length(obj_id)); - let object = to_obj_type!(obj_type); + let (pos, insert) = adjust!(pos, insert, doc.length(obj_id)); + let obj_type = to_obj_type!(obj_type); to_result(if insert { - doc.insert_object(obj_id, index, object) + (doc.insert_object(obj_id, pos, obj_type), obj_type) } else { - doc.put_object(obj_id, index, object) + (doc.put_object(obj_id, pos, obj_type), obj_type) }) } /// \memberof AMdoc -/// \brief Puts a UTF-8 string as the value at an index in a list object. +/// \brief Puts a UTF-8 string value into an item within a list object. /// -/// \param[in,out] doc A pointer to an `AMdoc` struct. +/// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] index An index in the list object identified by \p obj_id or -/// `SIZE_MAX` to indicate its last index if \p insert -/// `== false` or one past its last index if \p insert -/// `== true`. -/// \param[in] insert A flag to insert \p value before \p index instead of -/// writing \p value over \p index. +/// \param[in] pos The position of an item within the list object identified by +/// \p obj_id or `SIZE_MAX` to indicate its last item if +/// \p insert `== false` or one past its last item if +/// \p insert `== true`. +/// \param[in] insert A flag for inserting a new item for \p value before +/// \p pos instead of putting \p value into the item at +/// \p pos. /// \param[in] value A UTF-8 string view as an `AMbyteSpan` struct. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre `0 <=` \p index `<= AMobjSize(`\p obj_id`)` or \p index `== SIZE_MAX`. -/// \pre \p value `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` +/// \pre \p value.src `!= NULL` +/// \pre `0 <` \p value.count `<= sizeof(`\p value.src `)` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// value must be a null-terminated array of `c_char` +/// value.src must be a byte array of length >= value.count #[no_mangle] pub unsafe extern "C" fn AMlistPutStr( doc: *mut AMdoc, obj_id: *const AMobjId, - index: usize, + pos: usize, insert: bool, value: AMbyteSpan, ) -> *mut AMresult { let doc = to_doc_mut!(doc); let obj_id = to_obj_id!(obj_id); - let (index, insert) = adjust!(index, insert, doc.length(obj_id)); + let (pos, insert) = adjust!(pos, insert, doc.length(obj_id)); let value = to_str!(value); to_result(if insert { - doc.insert(obj_id, index, value) + doc.insert(obj_id, pos, value) } else { - doc.put(obj_id, index, value) + doc.put(obj_id, pos, value) }) } /// \memberof AMdoc -/// \brief Puts a *nix timestamp (milliseconds) as the value at an index in a +/// \brief Puts a *nix timestamp (milliseconds) value into an item within a /// list object. /// -/// \param[in,out] doc A pointer to an `AMdoc` struct. +/// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] index An index in the list object identified by \p obj_id or -/// `SIZE_MAX` to indicate its last index if \p insert -/// `== false` or one past its last index if \p insert -/// `== true`. -/// \param[in] insert A flag to insert \p value before \p index instead of -/// writing \p value over \p index. +/// \param[in] pos The position of an item within the list object identified by +/// \p obj_id or `SIZE_MAX` to indicate its last item if +/// \p insert `== false` or one past its last item if +/// \p insert `== true`. +/// \param[in] insert A flag for inserting a new item for \p value before +/// \p pos instead of putting \p value into the item at +/// \p pos. /// \param[in] value A 64-bit signed integer. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre `0 <=` \p index `<= AMobjSize(`\p obj_id`)` or \p index `== SIZE_MAX`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety @@ -525,38 +537,39 @@ pub unsafe extern "C" fn AMlistPutStr( pub unsafe extern "C" fn AMlistPutTimestamp( doc: *mut AMdoc, obj_id: *const AMobjId, - index: usize, + pos: usize, insert: bool, value: i64, ) -> *mut AMresult { let doc = to_doc_mut!(doc); let obj_id = to_obj_id!(obj_id); - let (index, insert) = adjust!(index, insert, doc.length(obj_id)); + let (pos, insert) = adjust!(pos, insert, doc.length(obj_id)); let value = am::ScalarValue::Timestamp(value); to_result(if insert { - doc.insert(obj_id, index, value) + doc.insert(obj_id, pos, value) } else { - doc.put(obj_id, index, value) + doc.put(obj_id, pos, value) }) } /// \memberof AMdoc -/// \brief Puts an unsigned integer as the value at an index in a list object. +/// \brief Puts an unsigned integer value into an item within a list object. /// -/// \param[in,out] doc A pointer to an `AMdoc` struct. +/// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] index An index in the list object identified by \p obj_id or -/// `SIZE_MAX` to indicate its last index if \p insert -/// `== false` or one past its last index if \p insert -/// `== true`. -/// \param[in] insert A flag to insert \p value before \p index instead of -/// writing \p value over \p index. +/// \param[in] pos The position of an item within the list object identified by +/// \p obj_id or `SIZE_MAX` to indicate its last item if +/// \p insert `== false` or one past its last item if +/// \p insert `== true`. +/// \param[in] insert A flag for inserting a new item for \p value before +/// \p pos instead of putting \p value into the item at +/// \p pos. /// \param[in] value A 64-bit unsigned integer. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre `0 <=` \p index `<= AMobjSize(`\p obj_id`)` or \p index `== SIZE_MAX`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety @@ -566,56 +579,58 @@ pub unsafe extern "C" fn AMlistPutTimestamp( pub unsafe extern "C" fn AMlistPutUint( doc: *mut AMdoc, obj_id: *const AMobjId, - index: usize, + pos: usize, insert: bool, value: u64, ) -> *mut AMresult { let doc = to_doc_mut!(doc); let obj_id = to_obj_id!(obj_id); - let (index, insert) = adjust!(index, insert, doc.length(obj_id)); + let (pos, insert) = adjust!(pos, insert, doc.length(obj_id)); to_result(if insert { - doc.insert(obj_id, index, value) + doc.insert(obj_id, pos, value) } else { - doc.put(obj_id, index, value) + doc.put(obj_id, pos, value) }) } /// \memberof AMdoc -/// \brief Gets the current or historical indices and values of the list object -/// within the given range. +/// \brief Gets the current or historical items in the list object within the +/// given range. /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] begin The first index in a range of indices. -/// \param[in] end At least one past the last index in a range of indices. -/// \param[in] heads A pointer to an `AMchangeHashes` struct for historical -/// indices and values or `NULL` for current indices and -/// values. -/// \return A pointer to an `AMresult` struct containing an `AMlistItems` -/// struct. -/// \pre \p doc `!= NULL`. -/// \pre \p begin `<=` \p end `<= SIZE_MAX`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \param[in] begin The first pos in a range of indices. +/// \param[in] end At least one past the last pos in a range of indices. +/// \param[in] heads A pointer to an `AMitems` struct with `AM_VAL_TYPE_CHANGE_HASH` +/// items to select historical items or `NULL` to select +/// current items. +/// \return A pointer to an `AMresult` struct with an `AMitems` struct. +/// \pre \p doc `!= NULL` +/// \pre \p begin `<=` \p end `<= SIZE_MAX` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// heads must be a valid pointer to an AMchangeHashes or std::ptr::null() +/// heads must be a valid pointer to an AMitems or std::ptr::null() #[no_mangle] pub unsafe extern "C" fn AMlistRange( doc: *const AMdoc, obj_id: *const AMobjId, begin: usize, end: usize, - heads: *const AMchangeHashes, + heads: *const AMitems, ) -> *mut AMresult { let doc = to_doc!(doc); let obj_id = to_obj_id!(obj_id); let range = to_range!(begin, end); match heads.as_ref() { None => to_result(doc.list_range(obj_id, range)), - Some(heads) => to_result(doc.list_range_at(obj_id, range, heads.as_ref())), + Some(heads) => match >::try_from(heads) { + Ok(heads) => to_result(doc.list_range_at(obj_id, range, &heads)), + Err(e) => AMresult::error(&e.to_string()).into(), + }, } } diff --git a/rust/automerge-c/src/doc/list/item.rs b/rust/automerge-c/src/doc/list/item.rs deleted file mode 100644 index 7a3869f3..00000000 --- a/rust/automerge-c/src/doc/list/item.rs +++ /dev/null @@ -1,97 +0,0 @@ -use automerge as am; - -use crate::obj::AMobjId; -use crate::result::AMvalue; - -/// \struct AMlistItem -/// \installed_headerfile -/// \brief An item in a list object. -pub struct AMlistItem { - /// The index of an item in a list object. - index: usize, - /// The object identifier of an item in a list object. - obj_id: AMobjId, - /// The value of an item in a list object. - value: am::Value<'static>, -} - -impl AMlistItem { - pub fn new(index: usize, value: am::Value<'static>, obj_id: am::ObjId) -> Self { - Self { - index, - obj_id: AMobjId::new(obj_id), - value, - } - } -} - -impl PartialEq for AMlistItem { - fn eq(&self, other: &Self) -> bool { - self.index == other.index && self.obj_id == other.obj_id && self.value == other.value - } -} - -/* -impl From<&AMlistItem> for (usize, am::Value<'static>, am::ObjId) { - fn from(list_item: &AMlistItem) -> Self { - (list_item.index, list_item.value.0.clone(), list_item.obj_id.as_ref().clone()) - } -} -*/ - -/// \memberof AMlistItem -/// \brief Gets the index of an item in a list object. -/// -/// \param[in] list_item A pointer to an `AMlistItem` struct. -/// \return A 64-bit unsigned integer. -/// \pre \p list_item `!= NULL`. -/// \internal -/// -/// # Safety -/// list_item must be a valid pointer to an AMlistItem -#[no_mangle] -pub unsafe extern "C" fn AMlistItemIndex(list_item: *const AMlistItem) -> usize { - if let Some(list_item) = list_item.as_ref() { - list_item.index - } else { - usize::MAX - } -} - -/// \memberof AMlistItem -/// \brief Gets the object identifier of an item in a list object. -/// -/// \param[in] list_item A pointer to an `AMlistItem` struct. -/// \return A pointer to an `AMobjId` struct. -/// \pre \p list_item `!= NULL`. -/// \internal -/// -/// # Safety -/// list_item must be a valid pointer to an AMlistItem -#[no_mangle] -pub unsafe extern "C" fn AMlistItemObjId(list_item: *const AMlistItem) -> *const AMobjId { - if let Some(list_item) = list_item.as_ref() { - &list_item.obj_id - } else { - std::ptr::null() - } -} - -/// \memberof AMlistItem -/// \brief Gets the value of an item in a list object. -/// -/// \param[in] list_item A pointer to an `AMlistItem` struct. -/// \return An `AMvalue` struct. -/// \pre \p list_item `!= NULL`. -/// \internal -/// -/// # Safety -/// list_item must be a valid pointer to an AMlistItem -#[no_mangle] -pub unsafe extern "C" fn AMlistItemValue<'a>(list_item: *const AMlistItem) -> AMvalue<'a> { - if let Some(list_item) = list_item.as_ref() { - (&list_item.value).into() - } else { - AMvalue::Void - } -} diff --git a/rust/automerge-c/src/doc/list/items.rs b/rust/automerge-c/src/doc/list/items.rs deleted file mode 100644 index 5b4a11fd..00000000 --- a/rust/automerge-c/src/doc/list/items.rs +++ /dev/null @@ -1,348 +0,0 @@ -use std::ffi::c_void; -use std::mem::size_of; - -use crate::doc::list::item::AMlistItem; - -#[repr(C)] -struct Detail { - len: usize, - offset: isize, - ptr: *const c_void, -} - -/// \note cbindgen won't propagate the value of a `std::mem::size_of()` call -/// (https://github.com/eqrion/cbindgen/issues/252) but it will -/// propagate the name of a constant initialized from it so if the -/// constant's name is a symbolic representation of the value it can be -/// converted into a number by post-processing the header it generated. -pub const USIZE_USIZE_USIZE_: usize = size_of::(); - -impl Detail { - fn new(list_items: &[AMlistItem], offset: isize) -> Self { - Self { - len: list_items.len(), - offset, - ptr: list_items.as_ptr() as *const c_void, - } - } - - pub fn advance(&mut self, n: isize) { - if n == 0 { - return; - } - let len = self.len as isize; - self.offset = if self.offset < 0 { - // It's reversed. - let unclipped = self.offset.checked_sub(n).unwrap_or(isize::MIN); - if unclipped >= 0 { - // Clip it to the forward stop. - len - } else { - std::cmp::min(std::cmp::max(-(len + 1), unclipped), -1) - } - } else { - let unclipped = self.offset.checked_add(n).unwrap_or(isize::MAX); - if unclipped < 0 { - // Clip it to the reverse stop. - -(len + 1) - } else { - std::cmp::max(0, std::cmp::min(unclipped, len)) - } - } - } - - pub fn get_index(&self) -> usize { - (self.offset - + if self.offset < 0 { - self.len as isize - } else { - 0 - }) as usize - } - - pub fn next(&mut self, n: isize) -> Option<&AMlistItem> { - if self.is_stopped() { - return None; - } - let slice: &[AMlistItem] = - unsafe { std::slice::from_raw_parts(self.ptr as *const AMlistItem, self.len) }; - let value = &slice[self.get_index()]; - self.advance(n); - Some(value) - } - - pub fn is_stopped(&self) -> bool { - let len = self.len as isize; - self.offset < -len || self.offset == len - } - - pub fn prev(&mut self, n: isize) -> Option<&AMlistItem> { - self.advance(-n); - if self.is_stopped() { - return None; - } - let slice: &[AMlistItem] = - unsafe { std::slice::from_raw_parts(self.ptr as *const AMlistItem, self.len) }; - Some(&slice[self.get_index()]) - } - - pub fn reversed(&self) -> Self { - Self { - len: self.len, - offset: -(self.offset + 1), - ptr: self.ptr, - } - } - - pub fn rewound(&self) -> Self { - Self { - len: self.len, - offset: if self.offset < 0 { -1 } else { 0 }, - ptr: self.ptr, - } - } -} - -impl From for [u8; USIZE_USIZE_USIZE_] { - fn from(detail: Detail) -> Self { - unsafe { - std::slice::from_raw_parts((&detail as *const Detail) as *const u8, USIZE_USIZE_USIZE_) - .try_into() - .unwrap() - } - } -} - -/// \struct AMlistItems -/// \installed_headerfile -/// \brief A random-access iterator over a sequence of list object items. -#[repr(C)] -#[derive(Eq, PartialEq)] -pub struct AMlistItems { - /// An implementation detail that is intentionally opaque. - /// \warning Modifying \p detail will cause undefined behavior. - /// \note The actual size of \p detail will vary by platform, this is just - /// the one for the platform this documentation was built on. - detail: [u8; USIZE_USIZE_USIZE_], -} - -impl AMlistItems { - pub fn new(list_items: &[AMlistItem]) -> Self { - Self { - detail: Detail::new(list_items, 0).into(), - } - } - - pub fn advance(&mut self, n: isize) { - let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - detail.advance(n); - } - - pub fn len(&self) -> usize { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - detail.len - } - - pub fn next(&mut self, n: isize) -> Option<&AMlistItem> { - let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - detail.next(n) - } - - pub fn prev(&mut self, n: isize) -> Option<&AMlistItem> { - let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - detail.prev(n) - } - - pub fn reversed(&self) -> Self { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - Self { - detail: detail.reversed().into(), - } - } - - pub fn rewound(&self) -> Self { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - Self { - detail: detail.rewound().into(), - } - } -} - -impl AsRef<[AMlistItem]> for AMlistItems { - fn as_ref(&self) -> &[AMlistItem] { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - unsafe { std::slice::from_raw_parts(detail.ptr as *const AMlistItem, detail.len) } - } -} - -impl Default for AMlistItems { - fn default() -> Self { - Self { - detail: [0; USIZE_USIZE_USIZE_], - } - } -} - -/// \memberof AMlistItems -/// \brief Advances an iterator over a sequence of list object items by at most -/// \p |n| positions where the sign of \p n is relative to the -/// iterator's direction. -/// -/// \param[in,out] list_items A pointer to an `AMlistItems` struct. -/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum -/// number of positions to advance. -/// \pre \p list_items `!= NULL`. -/// \internal -/// -/// #Safety -/// list_items must be a valid pointer to an AMlistItems -#[no_mangle] -pub unsafe extern "C" fn AMlistItemsAdvance(list_items: *mut AMlistItems, n: isize) { - if let Some(list_items) = list_items.as_mut() { - list_items.advance(n); - }; -} - -/// \memberof AMlistItems -/// \brief Tests the equality of two sequences of list object items underlying -/// a pair of iterators. -/// -/// \param[in] list_items1 A pointer to an `AMlistItems` struct. -/// \param[in] list_items2 A pointer to an `AMlistItems` struct. -/// \return `true` if \p list_items1 `==` \p list_items2 and `false` otherwise. -/// \pre \p list_items1 `!= NULL`. -/// \pre \p list_items2 `!= NULL`. -/// \internal -/// -/// #Safety -/// list_items1 must be a valid pointer to an AMlistItems -/// list_items2 must be a valid pointer to an AMlistItems -#[no_mangle] -pub unsafe extern "C" fn AMlistItemsEqual( - list_items1: *const AMlistItems, - list_items2: *const AMlistItems, -) -> bool { - match (list_items1.as_ref(), list_items2.as_ref()) { - (Some(list_items1), Some(list_items2)) => list_items1.as_ref() == list_items2.as_ref(), - (None, Some(_)) | (Some(_), None) | (None, None) => false, - } -} - -/// \memberof AMlistItems -/// \brief Gets the list object item at the current position of an iterator -/// over a sequence of list object items and then advances it by at most -/// \p |n| positions where the sign of \p n is relative to the -/// iterator's direction. -/// -/// \param[in,out] list_items A pointer to an `AMlistItems` struct. -/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum -/// number of positions to advance. -/// \return A pointer to an `AMlistItem` struct that's `NULL` when -/// \p list_items was previously advanced past its forward/reverse -/// limit. -/// \pre \p list_items `!= NULL`. -/// \internal -/// -/// #Safety -/// list_items must be a valid pointer to an AMlistItems -#[no_mangle] -pub unsafe extern "C" fn AMlistItemsNext( - list_items: *mut AMlistItems, - n: isize, -) -> *const AMlistItem { - if let Some(list_items) = list_items.as_mut() { - if let Some(list_item) = list_items.next(n) { - return list_item; - } - } - std::ptr::null() -} - -/// \memberof AMlistItems -/// \brief Advances an iterator over a sequence of list object items by at most -/// \p |n| positions where the sign of \p n is relative to the -/// iterator's direction and then gets the list object item at its new -/// position. -/// -/// \param[in,out] list_items A pointer to an `AMlistItems` struct. -/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum -/// number of positions to advance. -/// \return A pointer to an `AMlistItem` struct that's `NULL` when -/// \p list_items is presently advanced past its forward/reverse limit. -/// \pre \p list_items `!= NULL`. -/// \internal -/// -/// #Safety -/// list_items must be a valid pointer to an AMlistItems -#[no_mangle] -pub unsafe extern "C" fn AMlistItemsPrev( - list_items: *mut AMlistItems, - n: isize, -) -> *const AMlistItem { - if let Some(list_items) = list_items.as_mut() { - if let Some(list_item) = list_items.prev(n) { - return list_item; - } - } - std::ptr::null() -} - -/// \memberof AMlistItems -/// \brief Gets the size of the sequence of list object items underlying an -/// iterator. -/// -/// \param[in] list_items A pointer to an `AMlistItems` struct. -/// \return The count of values in \p list_items. -/// \pre \p list_items `!= NULL`. -/// \internal -/// -/// #Safety -/// list_items must be a valid pointer to an AMlistItems -#[no_mangle] -pub unsafe extern "C" fn AMlistItemsSize(list_items: *const AMlistItems) -> usize { - if let Some(list_items) = list_items.as_ref() { - list_items.len() - } else { - 0 - } -} - -/// \memberof AMlistItems -/// \brief Creates an iterator over the same sequence of list object items as -/// the given one but with the opposite position and direction. -/// -/// \param[in] list_items A pointer to an `AMlistItems` struct. -/// \return An `AMlistItems` struct -/// \pre \p list_items `!= NULL`. -/// \internal -/// -/// #Safety -/// list_items must be a valid pointer to an AMlistItems -#[no_mangle] -pub unsafe extern "C" fn AMlistItemsReversed(list_items: *const AMlistItems) -> AMlistItems { - if let Some(list_items) = list_items.as_ref() { - list_items.reversed() - } else { - Default::default() - } -} - -/// \memberof AMlistItems -/// \brief Creates an iterator at the starting position over the same sequence -/// of list object items as the given one. -/// -/// \param[in] list_items A pointer to an `AMlistItems` struct. -/// \return An `AMlistItems` struct -/// \pre \p list_items `!= NULL`. -/// \internal -/// -/// #Safety -/// list_items must be a valid pointer to an AMlistItems -#[no_mangle] -pub unsafe extern "C" fn AMlistItemsRewound(list_items: *const AMlistItems) -> AMlistItems { - if let Some(list_items) = list_items.as_ref() { - list_items.rewound() - } else { - Default::default() - } -} diff --git a/rust/automerge-c/src/doc/map.rs b/rust/automerge-c/src/doc/map.rs index 86c6b4a2..b2f7db02 100644 --- a/rust/automerge-c/src/doc/map.rs +++ b/rust/automerge-c/src/doc/map.rs @@ -3,31 +3,29 @@ use automerge::transaction::Transactable; use automerge::ReadDoc; use crate::byte_span::{to_str, AMbyteSpan}; -use crate::change_hashes::AMchangeHashes; -use crate::doc::{to_doc, to_doc_mut, to_obj_id, AMdoc}; -use crate::obj::{to_obj_type, AMobjId, AMobjType}; +use crate::doc::{to_doc, to_doc_mut, AMdoc}; +use crate::items::AMitems; +use crate::obj::{to_obj_id, to_obj_type, AMobjId, AMobjType}; use crate::result::{to_result, AMresult}; -pub mod item; -pub mod items; - /// \memberof AMdoc -/// \brief Deletes a key in a map object. +/// \brief Deletes an item from a map object. /// -/// \param[in,out] doc A pointer to an `AMdoc` struct. +/// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key A UTF-8 string view key for the map object identified by -/// \p obj_id as an `AMbyteSpan` struct. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre \p key `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \param[in] key The UTF-8 string view key of an item within the map object +/// identified by \p obj_id as an `AMbyteSpan` struct. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre \p key.src `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +/// key.src must be a byte array of length >= key.count #[no_mangle] pub unsafe extern "C" fn AMmapDelete( doc: *mut AMdoc, @@ -40,96 +38,107 @@ pub unsafe extern "C" fn AMmapDelete( } /// \memberof AMdoc -/// \brief Gets the current or historical value for a key in a map object. +/// \brief Gets a current or historical item within a map object. /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key A UTF-8 string view key for the map object identified by -/// \p obj_id as an `AMbyteSpan` struct. -/// \param[in] heads A pointer to an `AMchangeHashes` struct for a historical -/// value or `NULL` for the current value. -/// \return A pointer to an `AMresult` struct that doesn't contain a void. -/// \pre \p doc `!= NULL`. -/// \pre \p key `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \param[in] key The UTF-8 string view key of an item within the map object +/// identified by \p obj_id as an `AMbyteSpan` struct. +/// \param[in] heads A pointer to an `AMitems` struct with `AM_VAL_TYPE_CHANGE_HASH` +/// items to select a historical item at \p key or `NULL` +/// to select the current item at \p key. +/// \return A pointer to an `AMresult` struct with an `AMitem` struct. +/// \pre \p doc `!= NULL` +/// \pre \p key.src `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// heads must be a valid pointer to an AMchangeHashes or std::ptr::null() +/// key.src must be a byte array of length >= key.count +/// heads must be a valid pointer to an AMitems or std::ptr::null() #[no_mangle] pub unsafe extern "C" fn AMmapGet( doc: *const AMdoc, obj_id: *const AMobjId, key: AMbyteSpan, - heads: *const AMchangeHashes, + heads: *const AMitems, ) -> *mut AMresult { let doc = to_doc!(doc); let obj_id = to_obj_id!(obj_id); let key = to_str!(key); match heads.as_ref() { None => to_result(doc.get(obj_id, key)), - Some(heads) => to_result(doc.get_at(obj_id, key, heads.as_ref())), + Some(heads) => match >::try_from(heads) { + Ok(heads) => to_result(doc.get_at(obj_id, key, &heads)), + Err(e) => AMresult::error(&e.to_string()).into(), + }, } } /// \memberof AMdoc -/// \brief Gets all of the historical values for a key in a map object until +/// \brief Gets all of the historical items at a key within a map object until /// its current one or a specific one. /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key A UTF-8 string view key for the map object identified by -/// \p obj_id as an `AMbyteSpan` struct. -/// \param[in] heads A pointer to an `AMchangeHashes` struct for a historical -/// last value or `NULL` for the current last value. -/// \return A pointer to an `AMresult` struct containing an `AMobjItems` struct. -/// \pre \p doc `!= NULL`. -/// \pre \p key `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \param[in] key The UTF-8 string view key of an item within the map object +/// identified by \p obj_id as an `AMbyteSpan` struct. +/// \param[in] heads A pointer to an `AMitems` struct with `AM_VAL_TYPE_CHANGE_HASH` +/// items to select a historical last item or `NULL` to +/// select the current last item. +/// \return A pointer to an `AMresult` struct with an `AMItems` struct. +/// \pre \p doc `!= NULL` +/// \pre \p key.src `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// heads must be a valid pointer to an AMchangeHashes or std::ptr::null() +/// key.src must be a byte array of length >= key.count +/// heads must be a valid pointer to an AMitems or std::ptr::null() #[no_mangle] pub unsafe extern "C" fn AMmapGetAll( doc: *const AMdoc, obj_id: *const AMobjId, key: AMbyteSpan, - heads: *const AMchangeHashes, + heads: *const AMitems, ) -> *mut AMresult { let doc = to_doc!(doc); let obj_id = to_obj_id!(obj_id); let key = to_str!(key); match heads.as_ref() { None => to_result(doc.get_all(obj_id, key)), - Some(heads) => to_result(doc.get_all_at(obj_id, key, heads.as_ref())), + Some(heads) => match >::try_from(heads) { + Ok(heads) => to_result(doc.get_all_at(obj_id, key, &heads)), + Err(e) => AMresult::error(&e.to_string()).into(), + }, } } /// \memberof AMdoc -/// \brief Increments a counter for a key in a map object by the given value. +/// \brief Increments a counter at a key in a map object by the given value. /// -/// \param[in,out] doc A pointer to an `AMdoc` struct. +/// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key A UTF-8 string view key for the map object identified by -/// \p obj_id as an `AMbyteSpan` struct. +/// \param[in] key The UTF-8 string view key of an item within the map object +/// identified by \p obj_id as an `AMbyteSpan` struct. /// \param[in] value A 64-bit signed integer. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre \p key `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre \p key.src `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +/// key.src must be a byte array of length >= key.count #[no_mangle] pub unsafe extern "C" fn AMmapIncrement( doc: *mut AMdoc, @@ -145,21 +154,22 @@ pub unsafe extern "C" fn AMmapIncrement( /// \memberof AMdoc /// \brief Puts a boolean as the value of a key in a map object. /// -/// \param[in,out] doc A pointer to an `AMdoc` struct. +/// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key A UTF-8 string view key for the map object identified by -/// \p obj_id as an `AMbyteSpan` struct. +/// \param[in] key The UTF-8 string view key of an item within the map object +/// identified by \p obj_id as an `AMbyteSpan` struct. /// \param[in] value A boolean. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre \p key `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre \p key.src `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +/// key.src must be a byte array of length >= key.count #[no_mangle] pub unsafe extern "C" fn AMmapPutBool( doc: *mut AMdoc, @@ -173,59 +183,58 @@ pub unsafe extern "C" fn AMmapPutBool( } /// \memberof AMdoc -/// \brief Puts a sequence of bytes as the value of a key in a map object. +/// \brief Puts an array of bytes value at a key in a map object. /// -/// \param[in,out] doc A pointer to an `AMdoc` struct. +/// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key A UTF-8 string view key for the map object identified by -/// \p obj_id as an `AMbyteSpan` struct. -/// \param[in] src A pointer to an array of bytes. -/// \param[in] count The number of bytes to copy from \p src. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre \p key `!= NULL`. -/// \pre \p src `!= NULL`. -/// \pre `0 <` \p count `<= sizeof(`\p src`)`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \param[in] key The UTF-8 string view key of an item within the map object +/// identified by \p obj_id as an `AMbyteSpan` struct. +/// \param[in] value A view onto an array of bytes as an `AMbyteSpan` struct. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre \p key.src `!= NULL` +/// \pre \p value.src `!= NULL` +/// \pre `0 <` \p value.count `<= sizeof(`\p value.src `)` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// src must be a byte array of size `>= count` +/// key.src must be a byte array of length >= key.count +/// value.src must be a byte array of length >= value.count #[no_mangle] pub unsafe extern "C" fn AMmapPutBytes( doc: *mut AMdoc, obj_id: *const AMobjId, key: AMbyteSpan, - val: AMbyteSpan, + value: AMbyteSpan, ) -> *mut AMresult { let doc = to_doc_mut!(doc); let key = to_str!(key); - let mut vec = Vec::new(); - vec.extend_from_slice(std::slice::from_raw_parts(val.src, val.count)); - to_result(doc.put(to_obj_id!(obj_id), key, vec)) + to_result(doc.put(to_obj_id!(obj_id), key, Vec::::from(&value))) } /// \memberof AMdoc /// \brief Puts a CRDT counter as the value of a key in a map object. /// -/// \param[in,out] doc A pointer to an `AMdoc` struct. +/// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] key A UTF-8 string view key for the map object identified by /// \p obj_id as an `AMbyteSpan` struct. /// \param[in] value A 64-bit signed integer. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre \p key `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre \p key.src `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +/// key.src must be a byte array of length >= key.count #[no_mangle] pub unsafe extern "C" fn AMmapPutCounter( doc: *mut AMdoc, @@ -245,20 +254,21 @@ pub unsafe extern "C" fn AMmapPutCounter( /// \memberof AMdoc /// \brief Puts null as the value of a key in a map object. /// -/// \param[in,out] doc A pointer to an `AMdoc` struct. +/// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] key A UTF-8 string view key for the map object identified by /// \p obj_id as an `AMbyteSpan` struct. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre \p key `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre \p key.src `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +/// key.src must be a byte array of length >= key.count #[no_mangle] pub unsafe extern "C" fn AMmapPutNull( doc: *mut AMdoc, @@ -273,23 +283,22 @@ pub unsafe extern "C" fn AMmapPutNull( /// \memberof AMdoc /// \brief Puts an empty object as the value of a key in a map object. /// -/// \param[in,out] doc A pointer to an `AMdoc` struct. +/// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] key A UTF-8 string view key for the map object identified by /// \p obj_id as an `AMbyteSpan` struct. /// \param[in] obj_type An `AMobjIdType` enum tag. -/// \return A pointer to an `AMresult` struct containing a pointer to an -/// `AMobjId` struct. -/// \pre \p doc `!= NULL`. -/// \pre \p key `!= NULL`. -/// \pre \p obj_type != `AM_OBJ_TYPE_VOID`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_OBJ_TYPE` item. +/// \pre \p doc `!= NULL` +/// \pre \p key.src `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +/// key.src must be a byte array of length >= key.count #[no_mangle] pub unsafe extern "C" fn AMmapPutObject( doc: *mut AMdoc, @@ -299,27 +308,29 @@ pub unsafe extern "C" fn AMmapPutObject( ) -> *mut AMresult { let doc = to_doc_mut!(doc); let key = to_str!(key); - to_result(doc.put_object(to_obj_id!(obj_id), key, to_obj_type!(obj_type))) + let obj_type = to_obj_type!(obj_type); + to_result((doc.put_object(to_obj_id!(obj_id), key, obj_type), obj_type)) } /// \memberof AMdoc /// \brief Puts a float as the value of a key in a map object. /// -/// \param[in,out] doc A pointer to an `AMdoc` struct. +/// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] key A UTF-8 string view key for the map object identified by /// \p obj_id as an `AMbyteSpan` struct. /// \param[in] value A 64-bit float. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre \p key `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre \p key.src `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +/// key.src must be a byte array of length >= key.count #[no_mangle] pub unsafe extern "C" fn AMmapPutF64( doc: *mut AMdoc, @@ -335,21 +346,22 @@ pub unsafe extern "C" fn AMmapPutF64( /// \memberof AMdoc /// \brief Puts a signed integer as the value of a key in a map object. /// -/// \param[in,out] doc A pointer to an `AMdoc` struct. +/// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] key A UTF-8 string view key for the map object identified by /// \p obj_id as an `AMbyteSpan` struct. /// \param[in] value A 64-bit signed integer. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre \p key `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre \p key.src `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +/// key.src must be a byte array of length >= key.count #[no_mangle] pub unsafe extern "C" fn AMmapPutInt( doc: *mut AMdoc, @@ -365,21 +377,22 @@ pub unsafe extern "C" fn AMmapPutInt( /// \memberof AMdoc /// \brief Puts a UTF-8 string as the value of a key in a map object. /// -/// \param[in,out] doc A pointer to an `AMdoc` struct. +/// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] key A UTF-8 string view key for the map object identified by /// \p obj_id as an `AMbyteSpan` struct. /// \param[in] value A UTF-8 string view as an `AMbyteSpan` struct. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre \p key `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre \p key.src `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +/// key.src must be a byte array of length >= key.count #[no_mangle] pub unsafe extern "C" fn AMmapPutStr( doc: *mut AMdoc, @@ -395,21 +408,22 @@ pub unsafe extern "C" fn AMmapPutStr( /// \brief Puts a *nix timestamp (milliseconds) as the value of a key in a map /// object. /// -/// \param[in,out] doc A pointer to an `AMdoc` struct. +/// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] key A UTF-8 string view key for the map object identified by /// \p obj_id as an `AMbyteSpan` struct. /// \param[in] value A 64-bit signed integer. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre \p key `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre \p key.src `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +/// key.src must be a byte array of length >= key.count #[no_mangle] pub unsafe extern "C" fn AMmapPutTimestamp( doc: *mut AMdoc, @@ -425,21 +439,22 @@ pub unsafe extern "C" fn AMmapPutTimestamp( /// \memberof AMdoc /// \brief Puts an unsigned integer as the value of a key in a map object. /// -/// \param[in,out] doc A pointer to an `AMdoc` struct. +/// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] key A UTF-8 string view key for the map object identified by /// \p obj_id as an `AMbyteSpan` struct. /// \param[in] value A 64-bit unsigned integer. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre \p key `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre \p key.src `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +/// key.src must be a byte array of length >= key.count #[no_mangle] pub unsafe extern "C" fn AMmapPutUint( doc: *mut AMdoc, @@ -453,71 +468,82 @@ pub unsafe extern "C" fn AMmapPutUint( } /// \memberof AMdoc -/// \brief Gets the current or historical keys and values of the map object -/// within the given range. +/// \brief Gets the current or historical items of the map object within the +/// given range. /// /// \param[in] doc A pointer to an `AMdoc` struct. /// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. /// \param[in] begin The first key in a subrange or `AMstr(NULL)` to indicate the /// absolute first key. -/// \param[in] end The key one past the last key in a subrange or `AMstr(NULL)` to -/// indicate one past the absolute last key. -/// \param[in] heads A pointer to an `AMchangeHashes` struct for historical -/// keys and values or `NULL` for current keys and values. -/// \return A pointer to an `AMresult` struct containing an `AMmapItems` -/// struct. -/// \pre \p doc `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \param[in] end The key one past the last key in a subrange or `AMstr(NULL)` +/// to indicate one past the absolute last key. +/// \param[in] heads A pointer to an `AMitems` struct with `AM_VAL_TYPE_CHANGE_HASH` +/// items to select historical items or `NULL` to select +/// current items. +/// \return A pointer to an `AMresult` struct with an `AMitems` struct. +/// \pre \p doc `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// doc must be a valid pointer to an AMdoc /// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// heads must be a valid pointer to an AMchangeHashes or std::ptr::null() +/// begin.src must be a byte array of length >= begin.count or std::ptr::null() +/// end.src must be a byte array of length >= end.count or std::ptr::null() +/// heads must be a valid pointer to an AMitems or std::ptr::null() #[no_mangle] pub unsafe extern "C" fn AMmapRange( doc: *const AMdoc, obj_id: *const AMobjId, begin: AMbyteSpan, end: AMbyteSpan, - heads: *const AMchangeHashes, + heads: *const AMitems, ) -> *mut AMresult { let doc = to_doc!(doc); let obj_id = to_obj_id!(obj_id); + let heads = match heads.as_ref() { + None => None, + Some(heads) => match >::try_from(heads) { + Ok(heads) => Some(heads), + Err(e) => { + return AMresult::error(&e.to_string()).into(); + } + }, + }; match (begin.is_null(), end.is_null()) { (false, false) => { let (begin, end) = (to_str!(begin).to_string(), to_str!(end).to_string()); if begin > end { - return AMresult::err(&format!("Invalid range [{}-{})", begin, end)).into(); + return AMresult::error(&format!("Invalid range [{}-{})", begin, end)).into(); }; let bounds = begin..end; - if let Some(heads) = heads.as_ref() { - to_result(doc.map_range_at(obj_id, bounds, heads.as_ref())) + if let Some(heads) = heads { + to_result(doc.map_range_at(obj_id, bounds, &heads)) } else { to_result(doc.map_range(obj_id, bounds)) } } (false, true) => { let bounds = to_str!(begin).to_string()..; - if let Some(heads) = heads.as_ref() { - to_result(doc.map_range_at(obj_id, bounds, heads.as_ref())) + if let Some(heads) = heads { + to_result(doc.map_range_at(obj_id, bounds, &heads)) } else { to_result(doc.map_range(obj_id, bounds)) } } (true, false) => { let bounds = ..to_str!(end).to_string(); - if let Some(heads) = heads.as_ref() { - to_result(doc.map_range_at(obj_id, bounds, heads.as_ref())) + if let Some(heads) = heads { + to_result(doc.map_range_at(obj_id, bounds, &heads)) } else { to_result(doc.map_range(obj_id, bounds)) } } (true, true) => { let bounds = ..; - if let Some(heads) = heads.as_ref() { - to_result(doc.map_range_at(obj_id, bounds, heads.as_ref())) + if let Some(heads) = heads { + to_result(doc.map_range_at(obj_id, bounds, &heads)) } else { to_result(doc.map_range(obj_id, bounds)) } diff --git a/rust/automerge-c/src/doc/map/item.rs b/rust/automerge-c/src/doc/map/item.rs deleted file mode 100644 index 7914fdc4..00000000 --- a/rust/automerge-c/src/doc/map/item.rs +++ /dev/null @@ -1,98 +0,0 @@ -use automerge as am; - -use crate::byte_span::AMbyteSpan; -use crate::obj::AMobjId; -use crate::result::AMvalue; - -/// \struct AMmapItem -/// \installed_headerfile -/// \brief An item in a map object. -pub struct AMmapItem { - /// The key of an item in a map object. - key: String, - /// The object identifier of an item in a map object. - obj_id: AMobjId, - /// The value of an item in a map object. - value: am::Value<'static>, -} - -impl AMmapItem { - pub fn new(key: &'static str, value: am::Value<'static>, obj_id: am::ObjId) -> Self { - Self { - key: key.to_string(), - obj_id: AMobjId::new(obj_id), - value, - } - } -} - -impl PartialEq for AMmapItem { - fn eq(&self, other: &Self) -> bool { - self.key == other.key && self.obj_id == other.obj_id && self.value == other.value - } -} - -/* -impl From<&AMmapItem> for (String, am::Value<'static>, am::ObjId) { - fn from(map_item: &AMmapItem) -> Self { - (map_item.key.into_string().unwrap(), map_item.value.0.clone(), map_item.obj_id.as_ref().clone()) - } -} -*/ - -/// \memberof AMmapItem -/// \brief Gets the key of an item in a map object. -/// -/// \param[in] map_item A pointer to an `AMmapItem` struct. -/// \return An `AMbyteSpan` view of a UTF-8 string. -/// \pre \p map_item `!= NULL`. -/// \internal -/// -/// # Safety -/// map_item must be a valid pointer to an AMmapItem -#[no_mangle] -pub unsafe extern "C" fn AMmapItemKey(map_item: *const AMmapItem) -> AMbyteSpan { - if let Some(map_item) = map_item.as_ref() { - map_item.key.as_bytes().into() - } else { - Default::default() - } -} - -/// \memberof AMmapItem -/// \brief Gets the object identifier of an item in a map object. -/// -/// \param[in] map_item A pointer to an `AMmapItem` struct. -/// \return A pointer to an `AMobjId` struct. -/// \pre \p map_item `!= NULL`. -/// \internal -/// -/// # Safety -/// map_item must be a valid pointer to an AMmapItem -#[no_mangle] -pub unsafe extern "C" fn AMmapItemObjId(map_item: *const AMmapItem) -> *const AMobjId { - if let Some(map_item) = map_item.as_ref() { - &map_item.obj_id - } else { - std::ptr::null() - } -} - -/// \memberof AMmapItem -/// \brief Gets the value of an item in a map object. -/// -/// \param[in] map_item A pointer to an `AMmapItem` struct. -/// \return An `AMvalue` struct. -/// \pre \p map_item `!= NULL`. -/// \internal -/// -/// # Safety -/// map_item must be a valid pointer to an AMmapItem -#[no_mangle] -pub unsafe extern "C" fn AMmapItemValue<'a>(map_item: *const AMmapItem) -> AMvalue<'a> { - if let Some(map_item) = map_item.as_ref() { - (&map_item.value).into() - } else { - AMvalue::Void - } -} diff --git a/rust/automerge-c/src/doc/map/items.rs b/rust/automerge-c/src/doc/map/items.rs deleted file mode 100644 index cd305971..00000000 --- a/rust/automerge-c/src/doc/map/items.rs +++ /dev/null @@ -1,340 +0,0 @@ -use std::ffi::c_void; -use std::mem::size_of; - -use crate::doc::map::item::AMmapItem; - -#[repr(C)] -struct Detail { - len: usize, - offset: isize, - ptr: *const c_void, -} - -/// \note cbindgen won't propagate the value of a `std::mem::size_of()` call -/// (https://github.com/eqrion/cbindgen/issues/252) but it will -/// propagate the name of a constant initialized from it so if the -/// constant's name is a symbolic representation of the value it can be -/// converted into a number by post-processing the header it generated. -pub const USIZE_USIZE_USIZE_: usize = size_of::(); - -impl Detail { - fn new(map_items: &[AMmapItem], offset: isize) -> Self { - Self { - len: map_items.len(), - offset, - ptr: map_items.as_ptr() as *const c_void, - } - } - - pub fn advance(&mut self, n: isize) { - if n == 0 { - return; - } - let len = self.len as isize; - self.offset = if self.offset < 0 { - // It's reversed. - let unclipped = self.offset.checked_sub(n).unwrap_or(isize::MIN); - if unclipped >= 0 { - // Clip it to the forward stop. - len - } else { - std::cmp::min(std::cmp::max(-(len + 1), unclipped), -1) - } - } else { - let unclipped = self.offset.checked_add(n).unwrap_or(isize::MAX); - if unclipped < 0 { - // Clip it to the reverse stop. - -(len + 1) - } else { - std::cmp::max(0, std::cmp::min(unclipped, len)) - } - } - } - - pub fn get_index(&self) -> usize { - (self.offset - + if self.offset < 0 { - self.len as isize - } else { - 0 - }) as usize - } - - pub fn next(&mut self, n: isize) -> Option<&AMmapItem> { - if self.is_stopped() { - return None; - } - let slice: &[AMmapItem] = - unsafe { std::slice::from_raw_parts(self.ptr as *const AMmapItem, self.len) }; - let value = &slice[self.get_index()]; - self.advance(n); - Some(value) - } - - pub fn is_stopped(&self) -> bool { - let len = self.len as isize; - self.offset < -len || self.offset == len - } - - pub fn prev(&mut self, n: isize) -> Option<&AMmapItem> { - self.advance(-n); - if self.is_stopped() { - return None; - } - let slice: &[AMmapItem] = - unsafe { std::slice::from_raw_parts(self.ptr as *const AMmapItem, self.len) }; - Some(&slice[self.get_index()]) - } - - pub fn reversed(&self) -> Self { - Self { - len: self.len, - offset: -(self.offset + 1), - ptr: self.ptr, - } - } - - pub fn rewound(&self) -> Self { - Self { - len: self.len, - offset: if self.offset < 0 { -1 } else { 0 }, - ptr: self.ptr, - } - } -} - -impl From for [u8; USIZE_USIZE_USIZE_] { - fn from(detail: Detail) -> Self { - unsafe { - std::slice::from_raw_parts((&detail as *const Detail) as *const u8, USIZE_USIZE_USIZE_) - .try_into() - .unwrap() - } - } -} - -/// \struct AMmapItems -/// \installed_headerfile -/// \brief A random-access iterator over a sequence of map object items. -#[repr(C)] -#[derive(Eq, PartialEq)] -pub struct AMmapItems { - /// An implementation detail that is intentionally opaque. - /// \warning Modifying \p detail will cause undefined behavior. - /// \note The actual size of \p detail will vary by platform, this is just - /// the one for the platform this documentation was built on. - detail: [u8; USIZE_USIZE_USIZE_], -} - -impl AMmapItems { - pub fn new(map_items: &[AMmapItem]) -> Self { - Self { - detail: Detail::new(map_items, 0).into(), - } - } - - pub fn advance(&mut self, n: isize) { - let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - detail.advance(n); - } - - pub fn len(&self) -> usize { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - detail.len - } - - pub fn next(&mut self, n: isize) -> Option<&AMmapItem> { - let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - detail.next(n) - } - - pub fn prev(&mut self, n: isize) -> Option<&AMmapItem> { - let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - detail.prev(n) - } - - pub fn reversed(&self) -> Self { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - Self { - detail: detail.reversed().into(), - } - } - - pub fn rewound(&self) -> Self { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - Self { - detail: detail.rewound().into(), - } - } -} - -impl AsRef<[AMmapItem]> for AMmapItems { - fn as_ref(&self) -> &[AMmapItem] { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - unsafe { std::slice::from_raw_parts(detail.ptr as *const AMmapItem, detail.len) } - } -} - -impl Default for AMmapItems { - fn default() -> Self { - Self { - detail: [0; USIZE_USIZE_USIZE_], - } - } -} - -/// \memberof AMmapItems -/// \brief Advances an iterator over a sequence of map object items by at most -/// \p |n| positions where the sign of \p n is relative to the -/// iterator's direction. -/// -/// \param[in,out] map_items A pointer to an `AMmapItems` struct. -/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum -/// number of positions to advance. -/// \pre \p map_items `!= NULL`. -/// \internal -/// -/// #Safety -/// map_items must be a valid pointer to an AMmapItems -#[no_mangle] -pub unsafe extern "C" fn AMmapItemsAdvance(map_items: *mut AMmapItems, n: isize) { - if let Some(map_items) = map_items.as_mut() { - map_items.advance(n); - }; -} - -/// \memberof AMmapItems -/// \brief Tests the equality of two sequences of map object items underlying -/// a pair of iterators. -/// -/// \param[in] map_items1 A pointer to an `AMmapItems` struct. -/// \param[in] map_items2 A pointer to an `AMmapItems` struct. -/// \return `true` if \p map_items1 `==` \p map_items2 and `false` otherwise. -/// \pre \p map_items1 `!= NULL`. -/// \pre \p map_items2 `!= NULL`. -/// \internal -/// -/// #Safety -/// map_items1 must be a valid pointer to an AMmapItems -/// map_items2 must be a valid pointer to an AMmapItems -#[no_mangle] -pub unsafe extern "C" fn AMmapItemsEqual( - map_items1: *const AMmapItems, - map_items2: *const AMmapItems, -) -> bool { - match (map_items1.as_ref(), map_items2.as_ref()) { - (Some(map_items1), Some(map_items2)) => map_items1.as_ref() == map_items2.as_ref(), - (None, Some(_)) | (Some(_), None) | (None, None) => false, - } -} - -/// \memberof AMmapItems -/// \brief Gets the map object item at the current position of an iterator -/// over a sequence of map object items and then advances it by at most -/// \p |n| positions where the sign of \p n is relative to the -/// iterator's direction. -/// -/// \param[in,out] map_items A pointer to an `AMmapItems` struct. -/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum -/// number of positions to advance. -/// \return A pointer to an `AMmapItem` struct that's `NULL` when \p map_items -/// was previously advanced past its forward/reverse limit. -/// \pre \p map_items `!= NULL`. -/// \internal -/// -/// #Safety -/// map_items must be a valid pointer to an AMmapItems -#[no_mangle] -pub unsafe extern "C" fn AMmapItemsNext(map_items: *mut AMmapItems, n: isize) -> *const AMmapItem { - if let Some(map_items) = map_items.as_mut() { - if let Some(map_item) = map_items.next(n) { - return map_item; - } - } - std::ptr::null() -} - -/// \memberof AMmapItems -/// \brief Advances an iterator over a sequence of map object items by at most -/// \p |n| positions where the sign of \p n is relative to the -/// iterator's direction and then gets the map object item at its new -/// position. -/// -/// \param[in,out] map_items A pointer to an `AMmapItems` struct. -/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum -/// number of positions to advance. -/// \return A pointer to an `AMmapItem` struct that's `NULL` when \p map_items -/// is presently advanced past its forward/reverse limit. -/// \pre \p map_items `!= NULL`. -/// \internal -/// -/// #Safety -/// map_items must be a valid pointer to an AMmapItems -#[no_mangle] -pub unsafe extern "C" fn AMmapItemsPrev(map_items: *mut AMmapItems, n: isize) -> *const AMmapItem { - if let Some(map_items) = map_items.as_mut() { - if let Some(map_item) = map_items.prev(n) { - return map_item; - } - } - std::ptr::null() -} - -/// \memberof AMmapItems -/// \brief Gets the size of the sequence of map object items underlying an -/// iterator. -/// -/// \param[in] map_items A pointer to an `AMmapItems` struct. -/// \return The count of values in \p map_items. -/// \pre \p map_items `!= NULL`. -/// \internal -/// -/// #Safety -/// map_items must be a valid pointer to an AMmapItems -#[no_mangle] -pub unsafe extern "C" fn AMmapItemsSize(map_items: *const AMmapItems) -> usize { - if let Some(map_items) = map_items.as_ref() { - map_items.len() - } else { - 0 - } -} - -/// \memberof AMmapItems -/// \brief Creates an iterator over the same sequence of map object items as -/// the given one but with the opposite position and direction. -/// -/// \param[in] map_items A pointer to an `AMmapItems` struct. -/// \return An `AMmapItems` struct -/// \pre \p map_items `!= NULL`. -/// \internal -/// -/// #Safety -/// map_items must be a valid pointer to an AMmapItems -#[no_mangle] -pub unsafe extern "C" fn AMmapItemsReversed(map_items: *const AMmapItems) -> AMmapItems { - if let Some(map_items) = map_items.as_ref() { - map_items.reversed() - } else { - Default::default() - } -} - -/// \memberof AMmapItems -/// \brief Creates an iterator at the starting position over the same sequence of map object items as the given one. -/// -/// \param[in] map_items A pointer to an `AMmapItems` struct. -/// \return An `AMmapItems` struct -/// \pre \p map_items `!= NULL`. -/// \internal -/// -/// #Safety -/// map_items must be a valid pointer to an AMmapItems -#[no_mangle] -pub unsafe extern "C" fn AMmapItemsRewound(map_items: *const AMmapItems) -> AMmapItems { - if let Some(map_items) = map_items.as_ref() { - map_items.rewound() - } else { - Default::default() - } -} diff --git a/rust/automerge-c/src/doc/utils.rs b/rust/automerge-c/src/doc/utils.rs index d98a9a8b..ce465b84 100644 --- a/rust/automerge-c/src/doc/utils.rs +++ b/rust/automerge-c/src/doc/utils.rs @@ -1,9 +1,20 @@ +macro_rules! clamp { + ($index:expr, $len:expr, $param_name:expr) => {{ + if $index > $len && $index != usize::MAX { + return AMresult::error(&format!("Invalid {} {}", $param_name, $index)).into(); + } + std::cmp::min($index, $len) + }}; +} + +pub(crate) use clamp; + macro_rules! to_doc { ($handle:expr) => {{ let handle = $handle.as_ref(); match handle { Some(b) => b, - None => return AMresult::err("Invalid AMdoc pointer").into(), + None => return AMresult::error("Invalid `AMdoc*`").into(), } }}; } @@ -15,9 +26,21 @@ macro_rules! to_doc_mut { let handle = $handle.as_mut(); match handle { Some(b) => b, - None => return AMresult::err("Invalid AMdoc pointer").into(), + None => return AMresult::error("Invalid `AMdoc*`").into(), } }}; } pub(crate) use to_doc_mut; + +macro_rules! to_items { + ($handle:expr) => {{ + let handle = $handle.as_ref(); + match handle { + Some(b) => b, + None => return AMresult::error("Invalid `AMitems*`").into(), + } + }}; +} + +pub(crate) use to_items; diff --git a/rust/automerge-c/src/index.rs b/rust/automerge-c/src/index.rs new file mode 100644 index 00000000..f1ea153b --- /dev/null +++ b/rust/automerge-c/src/index.rs @@ -0,0 +1,84 @@ +use automerge as am; + +use std::any::type_name; + +use smol_str::SmolStr; + +use crate::byte_span::AMbyteSpan; + +/// \struct AMindex +/// \installed_headerfile +/// \brief An item index. +#[derive(PartialEq)] +pub enum AMindex { + /// A UTF-8 string key variant. + Key(SmolStr), + /// A 64-bit unsigned integer position variant. + Pos(usize), +} + +impl TryFrom<&AMindex> for AMbyteSpan { + type Error = am::AutomergeError; + + fn try_from(item: &AMindex) -> Result { + use am::AutomergeError::InvalidValueType; + use AMindex::*; + + if let Key(key) = item { + return Ok(key.into()); + } + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::().to_string(), + }) + } +} + +impl TryFrom<&AMindex> for usize { + type Error = am::AutomergeError; + + fn try_from(item: &AMindex) -> Result { + use am::AutomergeError::InvalidValueType; + use AMindex::*; + + if let Pos(pos) = item { + return Ok(*pos); + } + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::().to_string(), + }) + } +} + +/// \ingroup enumerations +/// \enum AMidxType +/// \installed_headerfile +/// \brief The type of an item's index. +#[derive(PartialEq, Eq)] +#[repr(u8)] +pub enum AMidxType { + /// The default tag, not a type signifier. + Default = 0, + /// A UTF-8 string view key. + Key, + /// A 64-bit unsigned integer position. + Pos, +} + +impl Default for AMidxType { + fn default() -> Self { + Self::Default + } +} + +impl From<&AMindex> for AMidxType { + fn from(index: &AMindex) -> Self { + use AMindex::*; + + match index { + Key(_) => Self::Key, + Pos(_) => Self::Pos, + } + } +} diff --git a/rust/automerge-c/src/item.rs b/rust/automerge-c/src/item.rs new file mode 100644 index 00000000..94735464 --- /dev/null +++ b/rust/automerge-c/src/item.rs @@ -0,0 +1,1963 @@ +use automerge as am; + +use std::any::type_name; +use std::borrow::Cow; +use std::cell::{RefCell, UnsafeCell}; +use std::rc::Rc; + +use crate::actor_id::AMactorId; +use crate::byte_span::{to_str, AMbyteSpan}; +use crate::change::AMchange; +use crate::doc::AMdoc; +use crate::index::{AMidxType, AMindex}; +use crate::obj::AMobjId; +use crate::result::{to_result, AMresult}; +use crate::sync::{AMsyncHave, AMsyncMessage, AMsyncState}; + +/// \struct AMunknownValue +/// \installed_headerfile +/// \brief A value (typically for a `set` operation) whose type is unknown. +#[derive(Default, Eq, PartialEq)] +#[repr(C)] +pub struct AMunknownValue { + /// The value's raw bytes. + bytes: AMbyteSpan, + /// The value's encoded type identifier. + type_code: u8, +} + +pub enum Value { + ActorId(am::ActorId, UnsafeCell>), + Change(Box, UnsafeCell>), + ChangeHash(am::ChangeHash), + Doc(RefCell), + SyncHave(AMsyncHave), + SyncMessage(AMsyncMessage), + SyncState(RefCell), + Value(am::Value<'static>), +} + +impl Value { + pub fn try_into_bytes(&self) -> Result { + use am::AutomergeError::InvalidValueType; + use am::ScalarValue::*; + use am::Value::*; + + if let Self::Value(Scalar(scalar)) = &self { + if let Bytes(vector) = scalar.as_ref() { + return Ok(vector.as_slice().into()); + } + } + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::().to_string(), + }) + } + + pub fn try_into_change_hash(&self) -> Result { + use am::AutomergeError::InvalidValueType; + + if let Self::ChangeHash(change_hash) = &self { + return Ok(change_hash.into()); + } + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::().to_string(), + }) + } + + pub fn try_into_counter(&self) -> Result { + use am::AutomergeError::InvalidValueType; + use am::ScalarValue::*; + use am::Value::*; + + if let Self::Value(Scalar(scalar)) = &self { + if let Counter(counter) = scalar.as_ref() { + return Ok(counter.into()); + } + } + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::().to_string(), + }) + } + + pub fn try_into_int(&self) -> Result { + use am::AutomergeError::InvalidValueType; + use am::ScalarValue::*; + use am::Value::*; + + if let Self::Value(Scalar(scalar)) = &self { + if let Int(int) = scalar.as_ref() { + return Ok(*int); + } + } + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::().to_string(), + }) + } + + pub fn try_into_str(&self) -> Result { + use am::AutomergeError::InvalidValueType; + use am::ScalarValue::*; + use am::Value::*; + + if let Self::Value(Scalar(scalar)) = &self { + if let Str(smol_str) = scalar.as_ref() { + return Ok(smol_str.into()); + } + } + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::().to_string(), + }) + } + + pub fn try_into_timestamp(&self) -> Result { + use am::AutomergeError::InvalidValueType; + use am::ScalarValue::*; + use am::Value::*; + + if let Self::Value(Scalar(scalar)) = &self { + if let Timestamp(timestamp) = scalar.as_ref() { + return Ok(*timestamp); + } + } + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::().to_string(), + }) + } +} + +impl From for Value { + fn from(actor_id: am::ActorId) -> Self { + Self::ActorId(actor_id, Default::default()) + } +} + +impl From for Value { + fn from(auto_commit: am::AutoCommit) -> Self { + Self::Doc(RefCell::new(AMdoc::new(auto_commit))) + } +} + +impl From for Value { + fn from(change: am::Change) -> Self { + Self::Change(Box::new(change), Default::default()) + } +} + +impl From for Value { + fn from(change_hash: am::ChangeHash) -> Self { + Self::ChangeHash(change_hash) + } +} + +impl From for Value { + fn from(have: am::sync::Have) -> Self { + Self::SyncHave(AMsyncHave::new(have)) + } +} + +impl From for Value { + fn from(message: am::sync::Message) -> Self { + Self::SyncMessage(AMsyncMessage::new(message)) + } +} + +impl From for Value { + fn from(state: am::sync::State) -> Self { + Self::SyncState(RefCell::new(AMsyncState::new(state))) + } +} + +impl From> for Value { + fn from(value: am::Value<'static>) -> Self { + Self::Value(value) + } +} + +impl From for Value { + fn from(string: String) -> Self { + Self::Value(am::Value::Scalar(Cow::Owned(am::ScalarValue::Str( + string.into(), + )))) + } +} + +impl<'a> TryFrom<&'a Value> for &'a am::Change { + type Error = am::AutomergeError; + + fn try_from(value: &'a Value) -> Result { + use self::Value::*; + use am::AutomergeError::InvalidValueType; + + match value { + Change(change, _) => Ok(change), + _ => Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::().to_string(), + }), + } + } +} + +impl<'a> TryFrom<&'a Value> for &'a am::ChangeHash { + type Error = am::AutomergeError; + + fn try_from(value: &'a Value) -> Result { + use self::Value::*; + use am::AutomergeError::InvalidValueType; + + match value { + ChangeHash(change_hash) => Ok(change_hash), + _ => Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::().to_string(), + }), + } + } +} + +impl<'a> TryFrom<&'a Value> for &'a am::ScalarValue { + type Error = am::AutomergeError; + + fn try_from(value: &'a Value) -> Result { + use self::Value::*; + use am::AutomergeError::InvalidValueType; + use am::Value::*; + + if let Value(Scalar(scalar)) = value { + return Ok(scalar.as_ref()); + } + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::().to_string(), + }) + } +} + +impl<'a> TryFrom<&'a Value> for &'a AMactorId { + type Error = am::AutomergeError; + + fn try_from(value: &'a Value) -> Result { + use self::Value::*; + use am::AutomergeError::InvalidValueType; + + match value { + ActorId(actor_id, c_actor_id) => unsafe { + Ok((*c_actor_id.get()).get_or_insert(AMactorId::new(actor_id))) + }, + _ => Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::().to_string(), + }), + } + } +} + +impl<'a> TryFrom<&'a mut Value> for &'a mut AMchange { + type Error = am::AutomergeError; + + fn try_from(value: &'a mut Value) -> Result { + use self::Value::*; + use am::AutomergeError::InvalidValueType; + + match value { + Change(change, c_change) => unsafe { + Ok((*c_change.get()).get_or_insert(AMchange::new(change))) + }, + _ => Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::().to_string(), + }), + } + } +} + +impl<'a> TryFrom<&'a mut Value> for &'a mut AMdoc { + type Error = am::AutomergeError; + + fn try_from(value: &'a mut Value) -> Result { + use self::Value::*; + use am::AutomergeError::InvalidValueType; + + match value { + Doc(doc) => Ok(doc.get_mut()), + _ => Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::().to_string(), + }), + } + } +} + +impl<'a> TryFrom<&'a Value> for &'a AMsyncHave { + type Error = am::AutomergeError; + + fn try_from(value: &'a Value) -> Result { + use self::Value::*; + use am::AutomergeError::InvalidValueType; + + match value { + SyncHave(sync_have) => Ok(sync_have), + _ => Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::().to_string(), + }), + } + } +} + +impl<'a> TryFrom<&'a Value> for &'a AMsyncMessage { + type Error = am::AutomergeError; + + fn try_from(value: &'a Value) -> Result { + use self::Value::*; + use am::AutomergeError::InvalidValueType; + + match value { + SyncMessage(sync_message) => Ok(sync_message), + _ => Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::().to_string(), + }), + } + } +} + +impl<'a> TryFrom<&'a mut Value> for &'a mut AMsyncState { + type Error = am::AutomergeError; + + fn try_from(value: &'a mut Value) -> Result { + use self::Value::*; + use am::AutomergeError::InvalidValueType; + + match value { + SyncState(sync_state) => Ok(sync_state.get_mut()), + _ => Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::().to_string(), + }), + } + } +} + +impl TryFrom<&Value> for bool { + type Error = am::AutomergeError; + + fn try_from(value: &Value) -> Result { + use self::Value::*; + use am::AutomergeError::InvalidValueType; + use am::ScalarValue::*; + use am::Value::*; + + if let Value(Scalar(scalar)) = value { + if let Boolean(boolean) = scalar.as_ref() { + return Ok(*boolean); + } + } + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::().to_string(), + }) + } +} + +impl TryFrom<&Value> for f64 { + type Error = am::AutomergeError; + + fn try_from(value: &Value) -> Result { + use self::Value::*; + use am::AutomergeError::InvalidValueType; + use am::ScalarValue::*; + use am::Value::*; + + if let Value(Scalar(scalar)) = value { + if let F64(float) = scalar.as_ref() { + return Ok(*float); + } + } + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::().to_string(), + }) + } +} + +impl TryFrom<&Value> for u64 { + type Error = am::AutomergeError; + + fn try_from(value: &Value) -> Result { + use self::Value::*; + use am::AutomergeError::InvalidValueType; + use am::ScalarValue::*; + use am::Value::*; + + if let Value(Scalar(scalar)) = value { + if let Uint(uint) = scalar.as_ref() { + return Ok(*uint); + } + } + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::().to_string(), + }) + } +} + +impl TryFrom<&Value> for AMunknownValue { + type Error = am::AutomergeError; + + fn try_from(value: &Value) -> Result { + use self::Value::*; + use am::AutomergeError::InvalidValueType; + use am::ScalarValue::*; + use am::Value::*; + + if let Value(Scalar(scalar)) = value { + if let Unknown { bytes, type_code } = scalar.as_ref() { + return Ok(Self { + bytes: bytes.as_slice().into(), + type_code: *type_code, + }); + } + } + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::().to_string(), + }) + } +} + +impl PartialEq for Value { + fn eq(&self, other: &Self) -> bool { + use self::Value::*; + + match (self, other) { + (ActorId(lhs, _), ActorId(rhs, _)) => *lhs == *rhs, + (Change(lhs, _), Change(rhs, _)) => lhs == rhs, + (ChangeHash(lhs), ChangeHash(rhs)) => lhs == rhs, + (Doc(lhs), Doc(rhs)) => lhs.as_ptr() == rhs.as_ptr(), + (SyncMessage(lhs), SyncMessage(rhs)) => *lhs == *rhs, + (SyncState(lhs), SyncState(rhs)) => *lhs == *rhs, + (Value(lhs), Value(rhs)) => lhs == rhs, + _ => false, + } + } +} + +#[derive(Default)] +pub struct Item { + /// The item's index. + index: Option, + /// The item's identifier. + obj_id: Option, + /// The item's value. + value: Option, +} + +impl Item { + pub fn try_into_bytes(&self) -> Result { + use am::AutomergeError::InvalidValueType; + + if let Some(value) = &self.value { + return value.try_into_bytes(); + } + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::>().to_string(), + }) + } + + pub fn try_into_change_hash(&self) -> Result { + use am::AutomergeError::InvalidValueType; + + if let Some(value) = &self.value { + return value.try_into_change_hash(); + } + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::>().to_string(), + }) + } + + pub fn try_into_counter(&self) -> Result { + use am::AutomergeError::InvalidValueType; + + if let Some(value) = &self.value { + return value.try_into_counter(); + } + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::>().to_string(), + }) + } + + pub fn try_into_int(&self) -> Result { + use am::AutomergeError::InvalidValueType; + + if let Some(value) = &self.value { + return value.try_into_int(); + } + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::>().to_string(), + }) + } + + pub fn try_into_str(&self) -> Result { + use am::AutomergeError::InvalidValueType; + + if let Some(value) = &self.value { + return value.try_into_str(); + } + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::>().to_string(), + }) + } + + pub fn try_into_timestamp(&self) -> Result { + use am::AutomergeError::InvalidValueType; + + if let Some(value) = &self.value { + return value.try_into_timestamp(); + } + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::>().to_string(), + }) + } +} + +impl From for Item { + fn from(actor_id: am::ActorId) -> Self { + Value::from(actor_id).into() + } +} + +impl From for Item { + fn from(auto_commit: am::AutoCommit) -> Self { + Value::from(auto_commit).into() + } +} + +impl From for Item { + fn from(change: am::Change) -> Self { + Value::from(change).into() + } +} + +impl From for Item { + fn from(change_hash: am::ChangeHash) -> Self { + Value::from(change_hash).into() + } +} + +impl From<(am::ObjId, am::ObjType)> for Item { + fn from((obj_id, obj_type): (am::ObjId, am::ObjType)) -> Self { + Self { + index: None, + obj_id: Some(AMobjId::new(obj_id)), + value: Some(am::Value::Object(obj_type).into()), + } + } +} + +impl From for Item { + fn from(have: am::sync::Have) -> Self { + Value::from(have).into() + } +} + +impl From for Item { + fn from(message: am::sync::Message) -> Self { + Value::from(message).into() + } +} + +impl From for Item { + fn from(state: am::sync::State) -> Self { + Value::from(state).into() + } +} + +impl From> for Item { + fn from(value: am::Value<'static>) -> Self { + Value::from(value).into() + } +} + +impl From for Item { + fn from(string: String) -> Self { + Value::from(string).into() + } +} + +impl From for Item { + fn from(value: Value) -> Self { + Self { + index: None, + obj_id: None, + value: Some(value), + } + } +} + +impl PartialEq for Item { + fn eq(&self, other: &Self) -> bool { + self.index == other.index && self.obj_id == other.obj_id && self.value == other.value + } +} + +impl<'a> TryFrom<&'a Item> for &'a am::Change { + type Error = am::AutomergeError; + + fn try_from(item: &'a Item) -> Result { + use am::AutomergeError::InvalidValueType; + + if let Some(value) = &item.value { + value.try_into() + } else { + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::>().to_string(), + }) + } + } +} + +impl<'a> TryFrom<&'a Item> for &'a am::ChangeHash { + type Error = am::AutomergeError; + + fn try_from(item: &'a Item) -> Result { + use am::AutomergeError::InvalidValueType; + + if let Some(value) = &item.value { + value.try_into() + } else { + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::>().to_string(), + }) + } + } +} + +impl<'a> TryFrom<&'a Item> for &'a am::ScalarValue { + type Error = am::AutomergeError; + + fn try_from(item: &'a Item) -> Result { + use am::AutomergeError::InvalidValueType; + + if let Some(value) = &item.value { + value.try_into() + } else { + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::>().to_string(), + }) + } + } +} + +impl<'a> TryFrom<&'a Item> for &'a AMactorId { + type Error = am::AutomergeError; + + fn try_from(item: &'a Item) -> Result { + use am::AutomergeError::InvalidValueType; + + if let Some(value) = &item.value { + value.try_into() + } else { + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::>().to_string(), + }) + } + } +} + +impl<'a> TryFrom<&'a mut Item> for &'a mut AMchange { + type Error = am::AutomergeError; + + fn try_from(item: &'a mut Item) -> Result { + use am::AutomergeError::InvalidValueType; + + if let Some(value) = &mut item.value { + value.try_into() + } else { + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::>().to_string(), + }) + } + } +} + +impl<'a> TryFrom<&'a mut Item> for &'a mut AMdoc { + type Error = am::AutomergeError; + + fn try_from(item: &'a mut Item) -> Result { + use am::AutomergeError::InvalidValueType; + + if let Some(value) = &mut item.value { + value.try_into() + } else { + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::>().to_string(), + }) + } + } +} + +impl From<&Item> for AMidxType { + fn from(item: &Item) -> Self { + if let Some(index) = &item.index { + return index.into(); + } + Default::default() + } +} + +impl<'a> TryFrom<&'a Item> for &'a AMsyncHave { + type Error = am::AutomergeError; + + fn try_from(item: &'a Item) -> Result { + use am::AutomergeError::InvalidValueType; + + if let Some(value) = &item.value { + value.try_into() + } else { + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::>().to_string(), + }) + } + } +} + +impl<'a> TryFrom<&'a Item> for &'a AMsyncMessage { + type Error = am::AutomergeError; + + fn try_from(item: &'a Item) -> Result { + use am::AutomergeError::InvalidValueType; + + if let Some(value) = &item.value { + value.try_into() + } else { + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::>().to_string(), + }) + } + } +} + +impl<'a> TryFrom<&'a mut Item> for &'a mut AMsyncState { + type Error = am::AutomergeError; + + fn try_from(item: &'a mut Item) -> Result { + use am::AutomergeError::InvalidValueType; + + if let Some(value) = &mut item.value { + value.try_into() + } else { + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::>().to_string(), + }) + } + } +} + +impl TryFrom<&Item> for bool { + type Error = am::AutomergeError; + + fn try_from(item: &Item) -> Result { + use am::AutomergeError::InvalidValueType; + + if let Some(value) = &item.value { + value.try_into() + } else { + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::>().to_string(), + }) + } + } +} + +impl TryFrom<&Item> for f64 { + type Error = am::AutomergeError; + + fn try_from(item: &Item) -> Result { + use am::AutomergeError::InvalidValueType; + + if let Some(value) = &item.value { + value.try_into() + } else { + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::>().to_string(), + }) + } + } +} + +impl TryFrom<&Item> for u64 { + type Error = am::AutomergeError; + + fn try_from(item: &Item) -> Result { + use am::AutomergeError::InvalidValueType; + + if let Some(value) = &item.value { + value.try_into() + } else { + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::>().to_string(), + }) + } + } +} + +impl TryFrom<&Item> for AMunknownValue { + type Error = am::AutomergeError; + + fn try_from(item: &Item) -> Result { + use am::AutomergeError::InvalidValueType; + + if let Some(value) = &item.value { + value.try_into() + } else { + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::>().to_string(), + }) + } + } +} + +impl TryFrom<&Item> for (am::Value<'static>, am::ObjId) { + type Error = am::AutomergeError; + + fn try_from(item: &Item) -> Result { + use self::Value::*; + use am::AutomergeError::InvalidObjId; + use am::AutomergeError::InvalidValueType; + + let expected = type_name::().to_string(); + match (&item.obj_id, &item.value) { + (None, None) | (None, Some(_)) => Err(InvalidObjId("".to_string())), + (Some(_), None) => Err(InvalidValueType { + expected, + unexpected: type_name::>().to_string(), + }), + (Some(obj_id), Some(value)) => match value { + ActorId(_, _) => Err(InvalidValueType { + expected, + unexpected: type_name::().to_string(), + }), + ChangeHash(_) => Err(InvalidValueType { + expected, + unexpected: type_name::().to_string(), + }), + Change(_, _) => Err(InvalidValueType { + expected, + unexpected: type_name::().to_string(), + }), + Doc(_) => Err(InvalidValueType { + expected, + unexpected: type_name::().to_string(), + }), + SyncHave(_) => Err(InvalidValueType { + expected, + unexpected: type_name::().to_string(), + }), + SyncMessage(_) => Err(InvalidValueType { + expected, + unexpected: type_name::().to_string(), + }), + SyncState(_) => Err(InvalidValueType { + expected, + unexpected: type_name::().to_string(), + }), + Value(v) => Ok((v.clone(), obj_id.as_ref().clone())), + }, + } + } +} + +/// \struct AMitem +/// \installed_headerfile +/// \brief An item within a result. +#[derive(Clone)] +pub struct AMitem(Rc); + +impl AMitem { + pub fn exact(obj_id: am::ObjId, value: Value) -> Self { + Self(Rc::new(Item { + index: None, + obj_id: Some(AMobjId::new(obj_id)), + value: Some(value), + })) + } + + pub fn indexed(index: AMindex, obj_id: am::ObjId, value: Value) -> Self { + Self(Rc::new(Item { + index: Some(index), + obj_id: Some(AMobjId::new(obj_id)), + value: Some(value), + })) + } +} + +impl AsRef for AMitem { + fn as_ref(&self) -> &Item { + self.0.as_ref() + } +} + +impl Default for AMitem { + fn default() -> Self { + Self(Rc::new(Item { + index: None, + obj_id: None, + value: None, + })) + } +} + +impl From for AMitem { + fn from(actor_id: am::ActorId) -> Self { + Value::from(actor_id).into() + } +} + +impl From for AMitem { + fn from(auto_commit: am::AutoCommit) -> Self { + Value::from(auto_commit).into() + } +} + +impl From for AMitem { + fn from(change: am::Change) -> Self { + Value::from(change).into() + } +} + +impl From for AMitem { + fn from(change_hash: am::ChangeHash) -> Self { + Value::from(change_hash).into() + } +} + +impl From<(am::ObjId, am::ObjType)> for AMitem { + fn from((obj_id, obj_type): (am::ObjId, am::ObjType)) -> Self { + Self(Rc::new(Item::from((obj_id, obj_type)))) + } +} + +impl From for AMitem { + fn from(have: am::sync::Have) -> Self { + Value::from(have).into() + } +} + +impl From for AMitem { + fn from(message: am::sync::Message) -> Self { + Value::from(message).into() + } +} + +impl From for AMitem { + fn from(state: am::sync::State) -> Self { + Value::from(state).into() + } +} + +impl From> for AMitem { + fn from(value: am::Value<'static>) -> Self { + Value::from(value).into() + } +} + +impl From for AMitem { + fn from(string: String) -> Self { + Value::from(string).into() + } +} + +impl From for AMitem { + fn from(value: Value) -> Self { + Self(Rc::new(Item::from(value))) + } +} + +impl PartialEq for AMitem { + fn eq(&self, other: &Self) -> bool { + self.as_ref() == other.as_ref() + } +} + +impl<'a> TryFrom<&'a AMitem> for &'a am::Change { + type Error = am::AutomergeError; + + fn try_from(item: &'a AMitem) -> Result { + item.as_ref().try_into() + } +} + +impl<'a> TryFrom<&'a AMitem> for &'a am::ChangeHash { + type Error = am::AutomergeError; + + fn try_from(item: &'a AMitem) -> Result { + item.as_ref().try_into() + } +} + +impl<'a> TryFrom<&'a AMitem> for &'a am::ScalarValue { + type Error = am::AutomergeError; + + fn try_from(item: &'a AMitem) -> Result { + item.as_ref().try_into() + } +} + +impl<'a> TryFrom<&'a AMitem> for &'a AMactorId { + type Error = am::AutomergeError; + + fn try_from(item: &'a AMitem) -> Result { + item.as_ref().try_into() + } +} + +impl<'a> TryFrom<&'a mut AMitem> for &'a mut AMchange { + type Error = am::AutomergeError; + + fn try_from(item: &'a mut AMitem) -> Result { + if let Some(item) = Rc::get_mut(&mut item.0) { + item.try_into() + } else { + Err(Self::Error::Fail) + } + } +} + +impl<'a> TryFrom<&'a mut AMitem> for &'a mut AMdoc { + type Error = am::AutomergeError; + + fn try_from(item: &'a mut AMitem) -> Result { + if let Some(item) = Rc::get_mut(&mut item.0) { + item.try_into() + } else { + Err(Self::Error::Fail) + } + } +} + +impl<'a> TryFrom<&'a AMitem> for &'a AMsyncHave { + type Error = am::AutomergeError; + + fn try_from(item: &'a AMitem) -> Result { + item.as_ref().try_into() + } +} + +impl<'a> TryFrom<&'a AMitem> for &'a AMsyncMessage { + type Error = am::AutomergeError; + + fn try_from(item: &'a AMitem) -> Result { + item.as_ref().try_into() + } +} + +impl<'a> TryFrom<&'a mut AMitem> for &'a mut AMsyncState { + type Error = am::AutomergeError; + + fn try_from(item: &'a mut AMitem) -> Result { + if let Some(item) = Rc::get_mut(&mut item.0) { + item.try_into() + } else { + Err(Self::Error::Fail) + } + } +} + +impl TryFrom<&AMitem> for bool { + type Error = am::AutomergeError; + + fn try_from(item: &AMitem) -> Result { + item.as_ref().try_into() + } +} + +impl TryFrom<&AMitem> for f64 { + type Error = am::AutomergeError; + + fn try_from(item: &AMitem) -> Result { + item.as_ref().try_into() + } +} + +impl TryFrom<&AMitem> for u64 { + type Error = am::AutomergeError; + + fn try_from(item: &AMitem) -> Result { + item.as_ref().try_into() + } +} + +impl TryFrom<&AMitem> for AMunknownValue { + type Error = am::AutomergeError; + + fn try_from(item: &AMitem) -> Result { + item.as_ref().try_into() + } +} + +impl TryFrom<&AMitem> for (am::Value<'static>, am::ObjId) { + type Error = am::AutomergeError; + + fn try_from(item: &AMitem) -> Result { + item.as_ref().try_into() + } +} + +/// \ingroup enumerations +/// \enum AMvalType +/// \installed_headerfile +/// \brief The type of an item's value. +#[derive(PartialEq, Eq)] +#[repr(u32)] +pub enum AMvalType { + /// An actor identifier value. + ActorId = 1 << 1, + /// A boolean value. + Bool = 1 << 2, + /// A view onto an array of bytes value. + Bytes = 1 << 3, + /// A change value. + Change = 1 << 4, + /// A change hash value. + ChangeHash = 1 << 5, + /// A CRDT counter value. + Counter = 1 << 6, + /// The default tag, not a type signifier. + Default = 0, + /// A document value. + Doc = 1 << 7, + /// A 64-bit float value. + F64 = 1 << 8, + /// A 64-bit signed integer value. + Int = 1 << 9, + /// A null value. + Null = 1 << 10, + /// An object type value. + ObjType = 1 << 11, + /// A UTF-8 string view value. + Str = 1 << 12, + /// A synchronization have value. + SyncHave = 1 << 13, + /// A synchronization message value. + SyncMessage = 1 << 14, + /// A synchronization state value. + SyncState = 1 << 15, + /// A *nix timestamp (milliseconds) value. + Timestamp = 1 << 16, + /// A 64-bit unsigned integer value. + Uint = 1 << 17, + /// An unknown type of value. + Unknown = 1 << 18, + /// A void. + Void = 1 << 0, +} + +impl Default for AMvalType { + fn default() -> Self { + Self::Default + } +} + +impl From<&am::Value<'static>> for AMvalType { + fn from(value: &am::Value<'static>) -> Self { + use am::ScalarValue::*; + use am::Value::*; + + match value { + Object(_) => Self::ObjType, + Scalar(scalar) => match scalar.as_ref() { + Boolean(_) => Self::Bool, + Bytes(_) => Self::Bytes, + Counter(_) => Self::Counter, + F64(_) => Self::F64, + Int(_) => Self::Int, + Null => Self::Null, + Str(_) => Self::Str, + Timestamp(_) => Self::Timestamp, + Uint(_) => Self::Uint, + Unknown { .. } => Self::Unknown, + }, + } + } +} + +impl From<&Value> for AMvalType { + fn from(value: &Value) -> Self { + use self::Value::*; + + match value { + ActorId(_, _) => Self::ActorId, + Change(_, _) => Self::Change, + ChangeHash(_) => Self::ChangeHash, + Doc(_) => Self::Doc, + SyncHave(_) => Self::SyncHave, + SyncMessage(_) => Self::SyncMessage, + SyncState(_) => Self::SyncState, + Value(v) => v.into(), + } + } +} + +impl From<&Item> for AMvalType { + fn from(item: &Item) -> Self { + if let Some(value) = &item.value { + return value.into(); + } + Self::Void + } +} + +/// \memberof AMitem +/// \brief Tests the equality of two items. +/// +/// \param[in] item1 A pointer to an `AMitem` struct. +/// \param[in] item2 A pointer to an `AMitem` struct. +/// \return `true` if \p item1 `==` \p item2 and `false` otherwise. +/// \pre \p item1 `!= NULL` +/// \pre \p item2 `!= NULL` +/// \post `!(`\p item1 `&&` \p item2 `) -> false` +/// \internal +/// +/// #Safety +/// item1 must be a valid AMitem pointer +/// item2 must be a valid AMitem pointer +#[no_mangle] +pub unsafe extern "C" fn AMitemEqual(item1: *const AMitem, item2: *const AMitem) -> bool { + match (item1.as_ref(), item2.as_ref()) { + (Some(item1), Some(item2)) => *item1 == *item2, + (None, None) | (None, Some(_)) | (Some(_), None) => false, + } +} + +/// \memberof AMitem +/// \brief Allocates a new item and initializes it from a boolean value. +/// +/// \param[in] value A boolean. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_BOOL` item. +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +#[no_mangle] +pub unsafe extern "C" fn AMitemFromBool(value: bool) -> *mut AMresult { + AMresult::item(am::Value::from(value).into()).into() +} + +/// \memberof AMitem +/// \brief Allocates a new item and initializes it from an array of bytes value. +/// +/// \param[in] src A pointer to an array of bytes. +/// \param[in] count The count of bytes to copy from the array pointed to by +/// \p src. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_BYTES` item. +/// \pre \p src `!= NULL` +/// \pre `sizeof(`\p src `) > 0` +/// \pre \p count `<= sizeof(`\p src `)` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// value.src must be a byte array of length >= value.count +#[no_mangle] +pub unsafe extern "C" fn AMitemFromBytes(src: *const u8, count: usize) -> *mut AMresult { + let value = std::slice::from_raw_parts(src, count); + AMresult::item(am::Value::bytes(value.to_vec()).into()).into() +} + +/// \memberof AMitem +/// \brief Allocates a new item and initializes it from a change hash value. +/// +/// \param[in] value A change hash as an `AMbyteSpan` struct. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_CHANGE_HASH` item. +/// \pre \p value.src `!= NULL` +/// \pre `0 <` \p value.count `<= sizeof(`\p value.src `)` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// value.src must be a byte array of length >= value.count +#[no_mangle] +pub unsafe extern "C" fn AMitemFromChangeHash(value: AMbyteSpan) -> *mut AMresult { + to_result(am::ChangeHash::try_from(&value)) +} + +/// \memberof AMitem +/// \brief Allocates a new item and initializes it from a CRDT counter value. +/// +/// \param[in] value A 64-bit signed integer. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_COUNTER` item. +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +#[no_mangle] +pub unsafe extern "C" fn AMitemFromCounter(value: i64) -> *mut AMresult { + AMresult::item(am::Value::counter(value).into()).into() +} + +/// \memberof AMitem +/// \brief Allocates a new item and initializes it from a float value. +/// +/// \param[in] value A 64-bit float. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_F64` item. +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +#[no_mangle] +pub unsafe extern "C" fn AMitemFromF64(value: f64) -> *mut AMresult { + AMresult::item(am::Value::f64(value).into()).into() +} + +/// \memberof AMitem +/// \brief Allocates a new item and initializes it from a signed integer value. +/// +/// \param[in] value A 64-bit signed integer. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_INT` item. +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +#[no_mangle] +pub unsafe extern "C" fn AMitemFromInt(value: i64) -> *mut AMresult { + AMresult::item(am::Value::int(value).into()).into() +} + +/// \memberof AMitem +/// \brief Allocates a new item and initializes it from a null value. +/// +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_NULL` item. +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +#[no_mangle] +pub unsafe extern "C" fn AMitemFromNull() -> *mut AMresult { + AMresult::item(am::Value::from(()).into()).into() +} + +/// \memberof AMitem +/// \brief Allocates a new item and initializes it from a UTF-8 string value. +/// +/// \param[in] value A UTF-8 string view as an `AMbyteSpan` struct. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_STR` item. +/// \pre \p value.src `!= NULL` +/// \pre `0 <` \p value.count `<= sizeof(`\p value.src `)` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// value.src must be a byte array of length >= value.count +#[no_mangle] +pub unsafe extern "C" fn AMitemFromStr(value: AMbyteSpan) -> *mut AMresult { + AMresult::item(am::Value::str(to_str!(value)).into()).into() +} + +/// \memberof AMitem +/// \brief Allocates a new item and initializes it from a *nix timestamp +/// (milliseconds) value. +/// +/// \param[in] value A 64-bit signed integer. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_TIMESTAMP` item. +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +#[no_mangle] +pub unsafe extern "C" fn AMitemFromTimestamp(value: i64) -> *mut AMresult { + AMresult::item(am::Value::timestamp(value).into()).into() +} + +/// \memberof AMitem +/// \brief Allocates a new item and initializes it from an unsigned integer value. +/// +/// \param[in] value A 64-bit unsigned integer. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_UINT` item. +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +#[no_mangle] +pub unsafe extern "C" fn AMitemFromUint(value: u64) -> *mut AMresult { + AMresult::item(am::Value::uint(value).into()).into() +} + +/// \memberof AMitem +/// \brief Gets the type of an item's index. +/// +/// \param[in] item A pointer to an `AMitem` struct. +/// \return An `AMidxType` enum tag. +/// \pre \p item `!= NULL` +/// \post `(`\p item `== NULL) -> 0` +/// \internal +/// +/// # Safety +/// item must be a valid pointer to an AMitem +#[no_mangle] +pub unsafe extern "C" fn AMitemIdxType(item: *const AMitem) -> AMidxType { + if let Some(item) = item.as_ref() { + return item.0.as_ref().into(); + } + Default::default() +} + +/// \memberof AMitem +/// \brief Gets the object identifier of an item. +/// +/// \param[in] item A pointer to an `AMitem` struct. +/// \return A pointer to an `AMobjId` struct. +/// \pre \p item `!= NULL` +/// \post `(`\p item `== NULL) -> NULL` +/// \internal +/// +/// # Safety +/// item must be a valid pointer to an AMitem +#[no_mangle] +pub unsafe extern "C" fn AMitemObjId(item: *const AMitem) -> *const AMobjId { + if let Some(item) = item.as_ref() { + if let Some(obj_id) = &item.as_ref().obj_id { + return obj_id; + } + } + std::ptr::null() +} + +/// \memberof AMitem +/// \brief Gets the UTF-8 string view key index of an item. +/// +/// \param[in] item A pointer to an `AMitem` struct. +/// \param[out] value A pointer to a UTF-8 string view as an `AMbyteSpan` struct. +/// \return `true` if `AMitemIdxType(`\p item `) == AM_IDX_TYPE_KEY` and +/// \p *value has been reassigned, `false` otherwise. +/// \pre \p item `!= NULL` +/// \internal +/// +/// # Safety +/// item must be a valid pointer to an AMitem +#[no_mangle] +pub unsafe extern "C" fn AMitemKey(item: *const AMitem, value: *mut AMbyteSpan) -> bool { + if let Some(item) = item.as_ref() { + if let Some(index) = &item.as_ref().index { + if let Ok(key) = index.try_into() { + if !value.is_null() { + *value = key; + return true; + } + } + } + } + false +} + +/// \memberof AMitem +/// \brief Gets the unsigned integer position index of an item. +/// +/// \param[in] item A pointer to an `AMitem` struct. +/// \param[out] value A pointer to a `size_t`. +/// \return `true` if `AMitemIdxType(`\p item `) == AM_IDX_TYPE_POS` and +/// \p *value has been reassigned, `false` otherwise. +/// \pre \p item `!= NULL` +/// \internal +/// +/// # Safety +/// item must be a valid pointer to an AMitem +#[no_mangle] +pub unsafe extern "C" fn AMitemPos(item: *const AMitem, value: *mut usize) -> bool { + if let Some(item) = item.as_ref() { + if let Some(index) = &item.as_ref().index { + if let Ok(pos) = index.try_into() { + if !value.is_null() { + *value = pos; + return true; + } + } + } + } + false +} + +/// \memberof AMitem +/// \brief Gets the reference count of an item. +/// +/// \param[in] item A pointer to an `AMitem` struct. +/// \return A 64-bit unsigned integer. +/// \pre \p item `!= NULL` +/// \post `(`\p item `== NULL) -> 0` +/// \internal +/// +/// # Safety +/// item must be a valid pointer to an AMitem +#[no_mangle] +pub unsafe extern "C" fn AMitemRefCount(item: *const AMitem) -> usize { + if let Some(item) = item.as_ref() { + return Rc::strong_count(&item.0); + } + 0 +} + +/// \memberof AMitem +/// \brief Gets a new result for an item. +/// +/// \param[in] item A pointer to an `AMitem` struct. +/// \return A pointer to an `AMresult` struct. +/// \pre \p item `!= NULL` +/// \post `(`\p item `== NULL) -> NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// item must be a valid pointer to an AMitem +#[no_mangle] +pub unsafe extern "C" fn AMitemResult(item: *const AMitem) -> *mut AMresult { + if let Some(item) = item.as_ref() { + return AMresult::item(item.clone()).into(); + } + std::ptr::null_mut() +} + +/// \memberof AMitem +/// \brief Gets the actor identifier value of an item. +/// +/// \param[in] item A pointer to an `AMitem` struct. +/// \param[out] value A pointer to an `AMactorId` struct pointer. +/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_ACTOR_ID` and +/// \p *value has been reassigned, `false` otherwise. +/// \pre \p item `!= NULL` +/// \internal +/// +/// # Safety +/// item must be a valid pointer to an AMitem +#[no_mangle] +pub unsafe extern "C" fn AMitemToActorId( + item: *const AMitem, + value: *mut *const AMactorId, +) -> bool { + if let Some(item) = item.as_ref() { + if let Ok(actor_id) = <&AMactorId>::try_from(item) { + if !value.is_null() { + *value = actor_id; + return true; + } + } + } + false +} + +/// \memberof AMitem +/// \brief Gets the boolean value of an item. +/// +/// \param[in] item A pointer to an `AMitem` struct. +/// \param[out] value A pointer to a boolean. +/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_BOOL` and +/// \p *value has been reassigned, `false` otherwise. +/// \pre \p item `!= NULL` +/// \internal +/// +/// # Safety +/// item must be a valid pointer to an AMitem +#[no_mangle] +pub unsafe extern "C" fn AMitemToBool(item: *const AMitem, value: *mut bool) -> bool { + if let Some(item) = item.as_ref() { + if let Ok(boolean) = item.try_into() { + if !value.is_null() { + *value = boolean; + return true; + } + } + } + false +} + +/// \memberof AMitem +/// \brief Gets the array of bytes value of an item. +/// +/// \param[in] item A pointer to an `AMitem` struct. +/// \param[out] value A pointer to an `AMbyteSpan` struct. +/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_BYTES` and +/// \p *value has been reassigned, `false` otherwise. +/// \pre \p item `!= NULL` +/// \internal +/// +/// # Safety +/// item must be a valid pointer to an AMitem +#[no_mangle] +pub unsafe extern "C" fn AMitemToBytes(item: *const AMitem, value: *mut AMbyteSpan) -> bool { + if let Some(item) = item.as_ref() { + if let Ok(bytes) = item.as_ref().try_into_bytes() { + if !value.is_null() { + *value = bytes; + return true; + } + } + } + false +} + +/// \memberof AMitem +/// \brief Gets the change value of an item. +/// +/// \param[in] item A pointer to an `AMitem` struct. +/// \param[out] value A pointer to an `AMchange` struct pointer. +/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_CHANGE` and +/// \p *value has been reassigned, `false` otherwise. +/// \pre \p item `!= NULL` +/// \internal +/// +/// # Safety +/// item must be a valid pointer to an AMitem +#[no_mangle] +pub unsafe extern "C" fn AMitemToChange(item: *mut AMitem, value: *mut *mut AMchange) -> bool { + if let Some(item) = item.as_mut() { + if let Ok(change) = <&mut AMchange>::try_from(item) { + if !value.is_null() { + *value = change; + return true; + } + } + } + false +} + +/// \memberof AMitem +/// \brief Gets the change hash value of an item. +/// +/// \param[in] item A pointer to an `AMitem` struct. +/// \param[out] value A pointer to an `AMbyteSpan` struct. +/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_CHANGE_HASH` and +/// \p *value has been reassigned, `false` otherwise. +/// \pre \p item `!= NULL` +/// \internal +/// +/// # Safety +/// item must be a valid pointer to an AMitem +#[no_mangle] +pub unsafe extern "C" fn AMitemToChangeHash(item: *const AMitem, value: *mut AMbyteSpan) -> bool { + if let Some(item) = item.as_ref() { + if let Ok(change_hash) = item.as_ref().try_into_change_hash() { + if !value.is_null() { + *value = change_hash; + return true; + } + } + } + false +} + +/// \memberof AMitem +/// \brief Gets the CRDT counter value of an item. +/// +/// \param[in] item A pointer to an `AMitem` struct. +/// \param[out] value A pointer to a signed 64-bit integer. +/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_COUNTER` and +/// \p *value has been reassigned, `false` otherwise. +/// \pre \p item `!= NULL` +/// \internal +/// +/// # Safety +/// item must be a valid pointer to an AMitem +#[no_mangle] +pub unsafe extern "C" fn AMitemToCounter(item: *const AMitem, value: *mut i64) -> bool { + if let Some(item) = item.as_ref() { + if let Ok(counter) = item.as_ref().try_into_counter() { + if !value.is_null() { + *value = counter; + return true; + } + } + } + false +} + +/// \memberof AMitem +/// \brief Gets the document value of an item. +/// +/// \param[in] item A pointer to an `AMitem` struct. +/// \param[out] value A pointer to an `AMdoc` struct pointer. +/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_DOC` and +/// \p *value has been reassigned, `false` otherwise. +/// \pre \p item `!= NULL` +/// \internal +/// +/// # Safety +/// item must be a valid pointer to an AMitem +#[no_mangle] +pub unsafe extern "C" fn AMitemToDoc(item: *mut AMitem, value: *mut *const AMdoc) -> bool { + if let Some(item) = item.as_mut() { + if let Ok(doc) = <&mut AMdoc>::try_from(item) { + if !value.is_null() { + *value = doc; + return true; + } + } + } + false +} + +/// \memberof AMitem +/// \brief Gets the float value of an item. +/// +/// \param[in] item A pointer to an `AMitem` struct. +/// \param[out] value A pointer to a 64-bit float. +/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_F64` and +/// \p *value has been reassigned, `false` otherwise. +/// \pre \p item `!= NULL` +/// \internal +/// +/// # Safety +/// item must be a valid pointer to an AMitem +#[no_mangle] +pub unsafe extern "C" fn AMitemToF64(item: *const AMitem, value: *mut f64) -> bool { + if let Some(item) = item.as_ref() { + if let Ok(float) = item.try_into() { + if !value.is_null() { + *value = float; + return true; + } + } + } + false +} + +/// \memberof AMitem +/// \brief Gets the integer value of an item. +/// +/// \param[in] item A pointer to an `AMitem` struct. +/// \param[out] value A pointer to a signed 64-bit integer. +/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_INT` and +/// \p *value has been reassigned, `false` otherwise. +/// \pre \p item `!= NULL` +/// \internal +/// +/// # Safety +/// item must be a valid pointer to an AMitem +#[no_mangle] +pub unsafe extern "C" fn AMitemToInt(item: *const AMitem, value: *mut i64) -> bool { + if let Some(item) = item.as_ref() { + if let Ok(int) = item.as_ref().try_into_int() { + if !value.is_null() { + *value = int; + return true; + } + } + } + false +} + +/// \memberof AMitem +/// \brief Gets the UTF-8 string view value of an item. +/// +/// \param[in] item A pointer to an `AMitem` struct. +/// \param[out] value A pointer to a UTF-8 string view as an `AMbyteSpan` struct. +/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_STR` and +/// \p *value has been reassigned, `false` otherwise. +/// \pre \p item `!= NULL` +/// \internal +/// +/// # Safety +/// item must be a valid pointer to an AMitem +#[no_mangle] +pub unsafe extern "C" fn AMitemToStr(item: *const AMitem, value: *mut AMbyteSpan) -> bool { + if let Some(item) = item.as_ref() { + if let Ok(str) = item.as_ref().try_into_str() { + if !value.is_null() { + *value = str; + return true; + } + } + } + false +} + +/// \memberof AMitem +/// \brief Gets the synchronization have value of an item. +/// +/// \param[in] item A pointer to an `AMitem` struct. +/// \param[out] value A pointer to an `AMsyncHave` struct pointer. +/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_SYNC_HAVE` and +/// \p *value has been reassigned, `false` otherwise. +/// \pre \p item `!= NULL` +/// \internal +/// +/// # Safety +/// item must be a valid pointer to an AMitem +#[no_mangle] +pub unsafe extern "C" fn AMitemToSyncHave( + item: *const AMitem, + value: *mut *const AMsyncHave, +) -> bool { + if let Some(item) = item.as_ref() { + if let Ok(sync_have) = <&AMsyncHave>::try_from(item) { + if !value.is_null() { + *value = sync_have; + return true; + } + } + } + false +} + +/// \memberof AMitem +/// \brief Gets the synchronization message value of an item. +/// +/// \param[in] item A pointer to an `AMitem` struct. +/// \param[out] value A pointer to an `AMsyncMessage` struct pointer. +/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_SYNC_MESSAGE` and +/// \p *value has been reassigned, `false` otherwise. +/// \pre \p item `!= NULL` +/// \internal +/// +/// # Safety +/// item must be a valid pointer to an AMitem +#[no_mangle] +pub unsafe extern "C" fn AMitemToSyncMessage( + item: *const AMitem, + value: *mut *const AMsyncMessage, +) -> bool { + if let Some(item) = item.as_ref() { + if let Ok(sync_message) = <&AMsyncMessage>::try_from(item) { + if !value.is_null() { + *value = sync_message; + return true; + } + } + } + false +} + +/// \memberof AMitem +/// \brief Gets the synchronization state value of an item. +/// +/// \param[in] item A pointer to an `AMitem` struct. +/// \param[out] value A pointer to an `AMsyncState` struct pointer. +/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_SYNC_STATE` and +/// \p *value has been reassigned, `false` otherwise. +/// \pre \p item `!= NULL` +/// \internal +/// +/// # Safety +/// item must be a valid pointer to an AMitem +#[no_mangle] +pub unsafe extern "C" fn AMitemToSyncState( + item: *mut AMitem, + value: *mut *mut AMsyncState, +) -> bool { + if let Some(item) = item.as_mut() { + if let Ok(sync_state) = <&mut AMsyncState>::try_from(item) { + if !value.is_null() { + *value = sync_state; + return true; + } + } + } + false +} + +/// \memberof AMitem +/// \brief Gets the *nix timestamp (milliseconds) value of an item. +/// +/// \param[in] item A pointer to an `AMitem` struct. +/// \param[out] value A pointer to a signed 64-bit integer. +/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_TIMESTAMP` and +/// \p *value has been reassigned, `false` otherwise. +/// \pre \p item `!= NULL` +/// \internal +/// +/// # Safety +/// item must be a valid pointer to an AMitem +#[no_mangle] +pub unsafe extern "C" fn AMitemToTimestamp(item: *const AMitem, value: *mut i64) -> bool { + if let Some(item) = item.as_ref() { + if let Ok(timestamp) = item.as_ref().try_into_timestamp() { + if !value.is_null() { + *value = timestamp; + return true; + } + } + } + false +} + +/// \memberof AMitem +/// \brief Gets the unsigned integer value of an item. +/// +/// \param[in] item A pointer to an `AMitem` struct. +/// \param[out] value A pointer to a unsigned 64-bit integer. +/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_UINT` and +/// \p *value has been reassigned, `false` otherwise. +/// \pre \p item `!= NULL` +/// \internal +/// +/// # Safety +/// item must be a valid pointer to an AMitem +#[no_mangle] +pub unsafe extern "C" fn AMitemToUint(item: *const AMitem, value: *mut u64) -> bool { + if let Some(item) = item.as_ref() { + if let Ok(uint) = item.try_into() { + if !value.is_null() { + *value = uint; + return true; + } + } + } + false +} + +/// \memberof AMitem +/// \brief Gets the unknown type of value of an item. +/// +/// \param[in] item A pointer to an `AMitem` struct. +/// \param[out] value A pointer to an `AMunknownValue` struct. +/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_UNKNOWN` and +/// \p *value has been reassigned, `false` otherwise. +/// \pre \p item `!= NULL` +/// \internal +/// +/// # Safety +/// item must be a valid pointer to an AMitem +#[no_mangle] +pub unsafe extern "C" fn AMitemToUnknown(item: *const AMitem, value: *mut AMunknownValue) -> bool { + if let Some(item) = item.as_ref() { + if let Ok(unknown) = item.try_into() { + if !value.is_null() { + *value = unknown; + return true; + } + } + } + false +} + +/// \memberof AMitem +/// \brief Gets the type of an item's value. +/// +/// \param[in] item A pointer to an `AMitem` struct. +/// \return An `AMvalType` enum tag. +/// \pre \p item `!= NULL` +/// \post `(`\p item `== NULL) -> 0` +/// \internal +/// +/// # Safety +/// item must be a valid pointer to an AMitem +#[no_mangle] +pub unsafe extern "C" fn AMitemValType(item: *const AMitem) -> AMvalType { + if let Some(item) = item.as_ref() { + return item.0.as_ref().into(); + } + Default::default() +} diff --git a/rust/automerge-c/src/items.rs b/rust/automerge-c/src/items.rs new file mode 100644 index 00000000..361078b3 --- /dev/null +++ b/rust/automerge-c/src/items.rs @@ -0,0 +1,401 @@ +use automerge as am; + +use std::ffi::c_void; +use std::marker::PhantomData; +use std::mem::size_of; + +use crate::item::AMitem; +use crate::result::AMresult; + +#[repr(C)] +struct Detail { + len: usize, + offset: isize, + ptr: *const c_void, +} + +/// \note cbindgen won't propagate the value of a `std::mem::size_of()` call +/// (https://github.com/eqrion/cbindgen/issues/252) but it will +/// propagate the name of a constant initialized from it so if the +/// constant's name is a symbolic representation of the value it can be +/// converted into a number by post-processing the header it generated. +pub const USIZE_USIZE_USIZE_: usize = size_of::(); + +impl Detail { + fn new(items: &[AMitem], offset: isize) -> Self { + Self { + len: items.len(), + offset, + ptr: items.as_ptr() as *mut c_void, + } + } + + pub fn advance(&mut self, n: isize) { + if n == 0 { + return; + } + let len = self.len as isize; + self.offset = if self.offset < 0 { + // It's reversed. + let unclipped = self.offset.checked_sub(n).unwrap_or(isize::MIN); + if unclipped >= 0 { + // Clip it to the forward stop. + len + } else { + std::cmp::min(std::cmp::max(-(len + 1), unclipped), -1) + } + } else { + let unclipped = self.offset.checked_add(n).unwrap_or(isize::MAX); + if unclipped < 0 { + // Clip it to the reverse stop. + -(len + 1) + } else { + std::cmp::max(0, std::cmp::min(unclipped, len)) + } + } + } + + pub fn get_index(&self) -> usize { + (self.offset + + if self.offset < 0 { + self.len as isize + } else { + 0 + }) as usize + } + + pub fn next(&mut self, n: isize) -> Option<&mut AMitem> { + if self.is_stopped() { + return None; + } + let slice: &mut [AMitem] = + unsafe { std::slice::from_raw_parts_mut(self.ptr as *mut AMitem, self.len) }; + let value = &mut slice[self.get_index()]; + self.advance(n); + Some(value) + } + + pub fn is_stopped(&self) -> bool { + let len = self.len as isize; + self.offset < -len || self.offset == len + } + + pub fn prev(&mut self, n: isize) -> Option<&mut AMitem> { + self.advance(-n); + if self.is_stopped() { + return None; + } + let slice: &mut [AMitem] = + unsafe { std::slice::from_raw_parts_mut(self.ptr as *mut AMitem, self.len) }; + Some(&mut slice[self.get_index()]) + } + + pub fn reversed(&self) -> Self { + Self { + len: self.len, + offset: -(self.offset + 1), + ptr: self.ptr, + } + } + + pub fn rewound(&self) -> Self { + Self { + len: self.len, + offset: if self.offset < 0 { -1 } else { 0 }, + ptr: self.ptr, + } + } +} + +impl From for [u8; USIZE_USIZE_USIZE_] { + fn from(detail: Detail) -> Self { + unsafe { + std::slice::from_raw_parts((&detail as *const Detail) as *const u8, USIZE_USIZE_USIZE_) + .try_into() + .unwrap() + } + } +} + +/// \struct AMitems +/// \installed_headerfile +/// \brief A random-access iterator over a sequence of `AMitem` structs. +#[repr(C)] +#[derive(Eq, PartialEq)] +pub struct AMitems<'a> { + /// An implementation detail that is intentionally opaque. + /// \warning Modifying \p detail will cause undefined behavior. + /// \note The actual size of \p detail will vary by platform, this is just + /// the one for the platform this documentation was built on. + detail: [u8; USIZE_USIZE_USIZE_], + phantom: PhantomData<&'a mut AMresult>, +} + +impl<'a> AMitems<'a> { + pub fn new(items: &[AMitem]) -> Self { + Self { + detail: Detail::new(items, 0).into(), + phantom: PhantomData, + } + } + + pub fn advance(&mut self, n: isize) { + let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; + detail.advance(n); + } + + pub fn len(&self) -> usize { + let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; + detail.len + } + + pub fn next(&mut self, n: isize) -> Option<&mut AMitem> { + let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; + detail.next(n) + } + + pub fn prev(&mut self, n: isize) -> Option<&mut AMitem> { + let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; + detail.prev(n) + } + + pub fn reversed(&self) -> Self { + let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; + Self { + detail: detail.reversed().into(), + phantom: PhantomData, + } + } + + pub fn rewound(&self) -> Self { + let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; + Self { + detail: detail.rewound().into(), + phantom: PhantomData, + } + } +} + +impl<'a> AsRef<[AMitem]> for AMitems<'a> { + fn as_ref(&self) -> &[AMitem] { + let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; + unsafe { std::slice::from_raw_parts(detail.ptr as *const AMitem, detail.len) } + } +} + +impl<'a> Default for AMitems<'a> { + fn default() -> Self { + Self { + detail: [0; USIZE_USIZE_USIZE_], + phantom: PhantomData, + } + } +} + +impl TryFrom<&AMitems<'_>> for Vec { + type Error = am::AutomergeError; + + fn try_from(items: &AMitems<'_>) -> Result { + let mut changes = Vec::::with_capacity(items.len()); + for item in items.as_ref().iter() { + match <&am::Change>::try_from(item.as_ref()) { + Ok(change) => { + changes.push(change.clone()); + } + Err(e) => { + return Err(e); + } + } + } + Ok(changes) + } +} + +impl TryFrom<&AMitems<'_>> for Vec { + type Error = am::AutomergeError; + + fn try_from(items: &AMitems<'_>) -> Result { + let mut change_hashes = Vec::::with_capacity(items.len()); + for item in items.as_ref().iter() { + match <&am::ChangeHash>::try_from(item.as_ref()) { + Ok(change_hash) => { + change_hashes.push(*change_hash); + } + Err(e) => { + return Err(e); + } + } + } + Ok(change_hashes) + } +} + +impl TryFrom<&AMitems<'_>> for Vec { + type Error = am::AutomergeError; + + fn try_from(items: &AMitems<'_>) -> Result { + let mut scalars = Vec::::with_capacity(items.len()); + for item in items.as_ref().iter() { + match <&am::ScalarValue>::try_from(item.as_ref()) { + Ok(scalar) => { + scalars.push(scalar.clone()); + } + Err(e) => { + return Err(e); + } + } + } + Ok(scalars) + } +} + +/// \memberof AMitems +/// \brief Advances an iterator over a sequence of object items by at most +/// \p |n| positions where the sign of \p n is relative to the +/// iterator's direction. +/// +/// \param[in] items A pointer to an `AMitems` struct. +/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum +/// number of positions to advance. +/// \pre \p items `!= NULL` +/// \internal +/// +/// #Safety +/// items must be a valid pointer to an AMitems +#[no_mangle] +pub unsafe extern "C" fn AMitemsAdvance(items: *mut AMitems, n: isize) { + if let Some(items) = items.as_mut() { + items.advance(n); + }; +} + +/// \memberof AMitems +/// \brief Tests the equality of two sequences of object items underlying a +/// pair of iterators. +/// +/// \param[in] items1 A pointer to an `AMitems` struct. +/// \param[in] items2 A pointer to an `AMitems` struct. +/// \return `true` if \p items1 `==` \p items2 and `false` otherwise. +/// \pre \p items1 `!= NULL` +/// \pre \p items1 `!= NULL` +/// \post `!(`\p items1 `&&` \p items2 `) -> false` +/// \internal +/// +/// #Safety +/// items1 must be a valid pointer to an AMitems +/// items2 must be a valid pointer to an AMitems +#[no_mangle] +pub unsafe extern "C" fn AMitemsEqual(items1: *const AMitems, items2: *const AMitems) -> bool { + match (items1.as_ref(), items2.as_ref()) { + (Some(items1), Some(items2)) => items1.as_ref() == items2.as_ref(), + (None, None) | (None, Some(_)) | (Some(_), None) => false, + } +} + +/// \memberof AMitems +/// \brief Gets the object item at the current position of an iterator over a +/// sequence of object items and then advances it by at most \p |n| +/// positions where the sign of \p n is relative to the iterator's +/// direction. +/// +/// \param[in] items A pointer to an `AMitems` struct. +/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum +/// number of positions to advance. +/// \return A pointer to an `AMitem` struct that's `NULL` when \p items +/// was previously advanced past its forward/reverse limit. +/// \pre \p items `!= NULL` +/// \internal +/// +/// #Safety +/// items must be a valid pointer to an AMitems +#[no_mangle] +pub unsafe extern "C" fn AMitemsNext(items: *mut AMitems, n: isize) -> *mut AMitem { + if let Some(items) = items.as_mut() { + if let Some(item) = items.next(n) { + return item; + } + } + std::ptr::null_mut() +} + +/// \memberof AMitems +/// \brief Advances an iterator over a sequence of object items by at most +/// \p |n| positions where the sign of \p n is relative to the +/// iterator's direction and then gets the object item at its new +/// position. +/// +/// \param[in] items A pointer to an `AMitems` struct. +/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum +/// number of positions to advance. +/// \return A pointer to an `AMitem` struct that's `NULL` when \p items +/// is presently advanced past its forward/reverse limit. +/// \pre \p items `!= NULL` +/// \internal +/// +/// #Safety +/// items must be a valid pointer to an AMitems +#[no_mangle] +pub unsafe extern "C" fn AMitemsPrev(items: *mut AMitems, n: isize) -> *mut AMitem { + if let Some(items) = items.as_mut() { + if let Some(obj_item) = items.prev(n) { + return obj_item; + } + } + std::ptr::null_mut() +} + +/// \memberof AMitems +/// \brief Gets the size of the sequence underlying an iterator. +/// +/// \param[in] items A pointer to an `AMitems` struct. +/// \return The count of items in \p items. +/// \pre \p items `!= NULL` +/// \internal +/// +/// #Safety +/// items must be a valid pointer to an AMitems +#[no_mangle] +pub unsafe extern "C" fn AMitemsSize(items: *const AMitems) -> usize { + if let Some(items) = items.as_ref() { + return items.len(); + } + 0 +} + +/// \memberof AMitems +/// \brief Creates an iterator over the same sequence of items as the +/// given one but with the opposite position and direction. +/// +/// \param[in] items A pointer to an `AMitems` struct. +/// \return An `AMitems` struct +/// \pre \p items `!= NULL` +/// \internal +/// +/// #Safety +/// items must be a valid pointer to an AMitems +#[no_mangle] +pub unsafe extern "C" fn AMitemsReversed(items: *const AMitems) -> AMitems { + if let Some(items) = items.as_ref() { + return items.reversed(); + } + Default::default() +} + +/// \memberof AMitems +/// \brief Creates an iterator at the starting position over the same sequence +/// of items as the given one. +/// +/// \param[in] items A pointer to an `AMitems` struct. +/// \return An `AMitems` struct +/// \pre \p items `!= NULL` +/// \internal +/// +/// #Safety +/// items must be a valid pointer to an AMitems +#[no_mangle] +pub unsafe extern "C" fn AMitemsRewound(items: *const AMitems) -> AMitems { + if let Some(items) = items.as_ref() { + return items.rewound(); + } + Default::default() +} diff --git a/rust/automerge-c/src/lib.rs b/rust/automerge-c/src/lib.rs index 6418bd33..1ee1a85d 100644 --- a/rust/automerge-c/src/lib.rs +++ b/rust/automerge-c/src/lib.rs @@ -1,11 +1,12 @@ mod actor_id; mod byte_span; mod change; -mod change_hashes; -mod changes; mod doc; +mod index; +mod item; +mod items; mod obj; mod result; -mod result_stack; -mod strs; mod sync; + +// include!(concat!(env!("OUT_DIR"), "/enum_string_functions.rs")); diff --git a/rust/automerge-c/src/obj.rs b/rust/automerge-c/src/obj.rs index 46ff617b..3d52286c 100644 --- a/rust/automerge-c/src/obj.rs +++ b/rust/automerge-c/src/obj.rs @@ -1,12 +1,10 @@ use automerge as am; +use std::any::type_name; use std::cell::RefCell; use std::ops::Deref; use crate::actor_id::AMactorId; -pub mod item; -pub mod items; - macro_rules! to_obj_id { ($handle:expr) => {{ match $handle.as_ref() { @@ -19,12 +17,11 @@ macro_rules! to_obj_id { pub(crate) use to_obj_id; macro_rules! to_obj_type { - ($am_obj_type:expr) => {{ - match $am_obj_type { - AMobjType::Map => am::ObjType::Map, - AMobjType::List => am::ObjType::List, - AMobjType::Text => am::ObjType::Text, - AMobjType::Void => return AMresult::err("Invalid AMobjType value").into(), + ($c_obj_type:expr) => {{ + let result: Result = (&$c_obj_type).try_into(); + match result { + Ok(obj_type) => obj_type, + Err(e) => return AMresult::error(&e.to_string()).into(), } }}; } @@ -79,11 +76,11 @@ impl Deref for AMobjId { } /// \memberof AMobjId -/// \brief Gets the actor identifier of an object identifier. +/// \brief Gets the actor identifier component of an object identifier. /// /// \param[in] obj_id A pointer to an `AMobjId` struct. /// \return A pointer to an `AMactorId` struct or `NULL`. -/// \pre \p obj_id `!= NULL`. +/// \pre \p obj_id `!= NULL` /// \internal /// /// # Safety @@ -97,11 +94,11 @@ pub unsafe extern "C" fn AMobjIdActorId(obj_id: *const AMobjId) -> *const AMacto } /// \memberof AMobjId -/// \brief Gets the counter of an object identifier. +/// \brief Gets the counter component of an object identifier. /// /// \param[in] obj_id A pointer to an `AMobjId` struct. /// \return A 64-bit unsigned integer. -/// \pre \p obj_id `!= NULL`. +/// \pre \p obj_id `!= NULL` /// \internal /// /// # Safety @@ -124,8 +121,9 @@ pub unsafe extern "C" fn AMobjIdCounter(obj_id: *const AMobjId) -> u64 { /// \param[in] obj_id1 A pointer to an `AMobjId` struct. /// \param[in] obj_id2 A pointer to an `AMobjId` struct. /// \return `true` if \p obj_id1 `==` \p obj_id2 and `false` otherwise. -/// \pre \p obj_id1 `!= NULL`. -/// \pre \p obj_id2 `!= NULL`. +/// \pre \p obj_id1 `!= NULL` +/// \pre \p obj_id1 `!= NULL` +/// \post `!(`\p obj_id1 `&&` \p obj_id2 `) -> false` /// \internal /// /// #Safety @@ -135,26 +133,28 @@ pub unsafe extern "C" fn AMobjIdCounter(obj_id: *const AMobjId) -> u64 { pub unsafe extern "C" fn AMobjIdEqual(obj_id1: *const AMobjId, obj_id2: *const AMobjId) -> bool { match (obj_id1.as_ref(), obj_id2.as_ref()) { (Some(obj_id1), Some(obj_id2)) => obj_id1 == obj_id2, - (None, Some(_)) | (Some(_), None) | (None, None) => false, + (None, None) | (None, Some(_)) | (Some(_), None) => false, } } /// \memberof AMobjId -/// \brief Gets the index of an object identifier. +/// \brief Gets the index component of an object identifier. /// /// \param[in] obj_id A pointer to an `AMobjId` struct. /// \return A 64-bit unsigned integer. -/// \pre \p obj_id `!= NULL`. +/// \pre \p obj_id `!= NULL` /// \internal /// /// # Safety /// obj_id must be a valid pointer to an AMobjId #[no_mangle] pub unsafe extern "C" fn AMobjIdIndex(obj_id: *const AMobjId) -> usize { + use am::ObjId::*; + if let Some(obj_id) = obj_id.as_ref() { match obj_id.as_ref() { - am::ObjId::Id(_, _, index) => *index, - am::ObjId::Root => 0, + Id(_, _, index) => *index, + Root => 0, } } else { usize::MAX @@ -163,26 +163,54 @@ pub unsafe extern "C" fn AMobjIdIndex(obj_id: *const AMobjId) -> usize { /// \ingroup enumerations /// \enum AMobjType +/// \installed_headerfile /// \brief The type of an object value. +#[derive(PartialEq, Eq)] #[repr(u8)] pub enum AMobjType { - /// A void. - /// \note This tag is unalphabetized to evaluate as false. - Void = 0, + /// The default tag, not a type signifier. + Default = 0, /// A list. - List, + List = 1, /// A key-value map. Map, /// A list of Unicode graphemes. Text, } -impl From for AMobjType { - fn from(o: am::ObjType) -> Self { +impl Default for AMobjType { + fn default() -> Self { + Self::Default + } +} + +impl From<&am::ObjType> for AMobjType { + fn from(o: &am::ObjType) -> Self { + use am::ObjType::*; + match o { - am::ObjType::Map | am::ObjType::Table => AMobjType::Map, - am::ObjType::List => AMobjType::List, - am::ObjType::Text => AMobjType::Text, + List => Self::List, + Map | Table => Self::Map, + Text => Self::Text, + } + } +} + +impl TryFrom<&AMobjType> for am::ObjType { + type Error = am::AutomergeError; + + fn try_from(c_obj_type: &AMobjType) -> Result { + use am::AutomergeError::InvalidValueType; + use AMobjType::*; + + match c_obj_type { + List => Ok(Self::List), + Map => Ok(Self::Map), + Text => Ok(Self::Text), + _ => Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::().to_string(), + }), } } } diff --git a/rust/automerge-c/src/obj/item.rs b/rust/automerge-c/src/obj/item.rs deleted file mode 100644 index a2e99d06..00000000 --- a/rust/automerge-c/src/obj/item.rs +++ /dev/null @@ -1,73 +0,0 @@ -use automerge as am; - -use crate::obj::AMobjId; -use crate::result::AMvalue; - -/// \struct AMobjItem -/// \installed_headerfile -/// \brief An item in an object. -pub struct AMobjItem { - /// The object identifier of an item in an object. - obj_id: AMobjId, - /// The value of an item in an object. - value: am::Value<'static>, -} - -impl AMobjItem { - pub fn new(value: am::Value<'static>, obj_id: am::ObjId) -> Self { - Self { - obj_id: AMobjId::new(obj_id), - value, - } - } -} - -impl PartialEq for AMobjItem { - fn eq(&self, other: &Self) -> bool { - self.obj_id == other.obj_id && self.value == other.value - } -} - -impl From<&AMobjItem> for (am::Value<'static>, am::ObjId) { - fn from(obj_item: &AMobjItem) -> Self { - (obj_item.value.clone(), obj_item.obj_id.as_ref().clone()) - } -} - -/// \memberof AMobjItem -/// \brief Gets the object identifier of an item in an object. -/// -/// \param[in] obj_item A pointer to an `AMobjItem` struct. -/// \return A pointer to an `AMobjId` struct. -/// \pre \p obj_item `!= NULL`. -/// \internal -/// -/// # Safety -/// obj_item must be a valid pointer to an AMobjItem -#[no_mangle] -pub unsafe extern "C" fn AMobjItemObjId(obj_item: *const AMobjItem) -> *const AMobjId { - if let Some(obj_item) = obj_item.as_ref() { - &obj_item.obj_id - } else { - std::ptr::null() - } -} - -/// \memberof AMobjItem -/// \brief Gets the value of an item in an object. -/// -/// \param[in] obj_item A pointer to an `AMobjItem` struct. -/// \return An `AMvalue` struct. -/// \pre \p obj_item `!= NULL`. -/// \internal -/// -/// # Safety -/// obj_item must be a valid pointer to an AMobjItem -#[no_mangle] -pub unsafe extern "C" fn AMobjItemValue<'a>(obj_item: *const AMobjItem) -> AMvalue<'a> { - if let Some(obj_item) = obj_item.as_ref() { - (&obj_item.value).into() - } else { - AMvalue::Void - } -} diff --git a/rust/automerge-c/src/obj/items.rs b/rust/automerge-c/src/obj/items.rs deleted file mode 100644 index d6b847cf..00000000 --- a/rust/automerge-c/src/obj/items.rs +++ /dev/null @@ -1,341 +0,0 @@ -use std::ffi::c_void; -use std::mem::size_of; - -use crate::obj::item::AMobjItem; - -#[repr(C)] -struct Detail { - len: usize, - offset: isize, - ptr: *const c_void, -} - -/// \note cbindgen won't propagate the value of a `std::mem::size_of()` call -/// (https://github.com/eqrion/cbindgen/issues/252) but it will -/// propagate the name of a constant initialized from it so if the -/// constant's name is a symbolic representation of the value it can be -/// converted into a number by post-processing the header it generated. -pub const USIZE_USIZE_USIZE_: usize = size_of::(); - -impl Detail { - fn new(obj_items: &[AMobjItem], offset: isize) -> Self { - Self { - len: obj_items.len(), - offset, - ptr: obj_items.as_ptr() as *const c_void, - } - } - - pub fn advance(&mut self, n: isize) { - if n == 0 { - return; - } - let len = self.len as isize; - self.offset = if self.offset < 0 { - // It's reversed. - let unclipped = self.offset.checked_sub(n).unwrap_or(isize::MIN); - if unclipped >= 0 { - // Clip it to the forward stop. - len - } else { - std::cmp::min(std::cmp::max(-(len + 1), unclipped), -1) - } - } else { - let unclipped = self.offset.checked_add(n).unwrap_or(isize::MAX); - if unclipped < 0 { - // Clip it to the reverse stop. - -(len + 1) - } else { - std::cmp::max(0, std::cmp::min(unclipped, len)) - } - } - } - - pub fn get_index(&self) -> usize { - (self.offset - + if self.offset < 0 { - self.len as isize - } else { - 0 - }) as usize - } - - pub fn next(&mut self, n: isize) -> Option<&AMobjItem> { - if self.is_stopped() { - return None; - } - let slice: &[AMobjItem] = - unsafe { std::slice::from_raw_parts(self.ptr as *const AMobjItem, self.len) }; - let value = &slice[self.get_index()]; - self.advance(n); - Some(value) - } - - pub fn is_stopped(&self) -> bool { - let len = self.len as isize; - self.offset < -len || self.offset == len - } - - pub fn prev(&mut self, n: isize) -> Option<&AMobjItem> { - self.advance(-n); - if self.is_stopped() { - return None; - } - let slice: &[AMobjItem] = - unsafe { std::slice::from_raw_parts(self.ptr as *const AMobjItem, self.len) }; - Some(&slice[self.get_index()]) - } - - pub fn reversed(&self) -> Self { - Self { - len: self.len, - offset: -(self.offset + 1), - ptr: self.ptr, - } - } - - pub fn rewound(&self) -> Self { - Self { - len: self.len, - offset: if self.offset < 0 { -1 } else { 0 }, - ptr: self.ptr, - } - } -} - -impl From for [u8; USIZE_USIZE_USIZE_] { - fn from(detail: Detail) -> Self { - unsafe { - std::slice::from_raw_parts((&detail as *const Detail) as *const u8, USIZE_USIZE_USIZE_) - .try_into() - .unwrap() - } - } -} - -/// \struct AMobjItems -/// \installed_headerfile -/// \brief A random-access iterator over a sequence of object items. -#[repr(C)] -#[derive(Eq, PartialEq)] -pub struct AMobjItems { - /// An implementation detail that is intentionally opaque. - /// \warning Modifying \p detail will cause undefined behavior. - /// \note The actual size of \p detail will vary by platform, this is just - /// the one for the platform this documentation was built on. - detail: [u8; USIZE_USIZE_USIZE_], -} - -impl AMobjItems { - pub fn new(obj_items: &[AMobjItem]) -> Self { - Self { - detail: Detail::new(obj_items, 0).into(), - } - } - - pub fn advance(&mut self, n: isize) { - let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - detail.advance(n); - } - - pub fn len(&self) -> usize { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - detail.len - } - - pub fn next(&mut self, n: isize) -> Option<&AMobjItem> { - let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - detail.next(n) - } - - pub fn prev(&mut self, n: isize) -> Option<&AMobjItem> { - let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - detail.prev(n) - } - - pub fn reversed(&self) -> Self { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - Self { - detail: detail.reversed().into(), - } - } - - pub fn rewound(&self) -> Self { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - Self { - detail: detail.rewound().into(), - } - } -} - -impl AsRef<[AMobjItem]> for AMobjItems { - fn as_ref(&self) -> &[AMobjItem] { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - unsafe { std::slice::from_raw_parts(detail.ptr as *const AMobjItem, detail.len) } - } -} - -impl Default for AMobjItems { - fn default() -> Self { - Self { - detail: [0; USIZE_USIZE_USIZE_], - } - } -} - -/// \memberof AMobjItems -/// \brief Advances an iterator over a sequence of object items by at most -/// \p |n| positions where the sign of \p n is relative to the -/// iterator's direction. -/// -/// \param[in,out] obj_items A pointer to an `AMobjItems` struct. -/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum -/// number of positions to advance. -/// \pre \p obj_items `!= NULL`. -/// \internal -/// -/// #Safety -/// obj_items must be a valid pointer to an AMobjItems -#[no_mangle] -pub unsafe extern "C" fn AMobjItemsAdvance(obj_items: *mut AMobjItems, n: isize) { - if let Some(obj_items) = obj_items.as_mut() { - obj_items.advance(n); - }; -} - -/// \memberof AMobjItems -/// \brief Tests the equality of two sequences of object items underlying a -/// pair of iterators. -/// -/// \param[in] obj_items1 A pointer to an `AMobjItems` struct. -/// \param[in] obj_items2 A pointer to an `AMobjItems` struct. -/// \return `true` if \p obj_items1 `==` \p obj_items2 and `false` otherwise. -/// \pre \p obj_items1 `!= NULL`. -/// \pre \p obj_items2 `!= NULL`. -/// \internal -/// -/// #Safety -/// obj_items1 must be a valid pointer to an AMobjItems -/// obj_items2 must be a valid pointer to an AMobjItems -#[no_mangle] -pub unsafe extern "C" fn AMobjItemsEqual( - obj_items1: *const AMobjItems, - obj_items2: *const AMobjItems, -) -> bool { - match (obj_items1.as_ref(), obj_items2.as_ref()) { - (Some(obj_items1), Some(obj_items2)) => obj_items1.as_ref() == obj_items2.as_ref(), - (None, Some(_)) | (Some(_), None) | (None, None) => false, - } -} - -/// \memberof AMobjItems -/// \brief Gets the object item at the current position of an iterator over a -/// sequence of object items and then advances it by at most \p |n| -/// positions where the sign of \p n is relative to the iterator's -/// direction. -/// -/// \param[in,out] obj_items A pointer to an `AMobjItems` struct. -/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum -/// number of positions to advance. -/// \return A pointer to an `AMobjItem` struct that's `NULL` when \p obj_items -/// was previously advanced past its forward/reverse limit. -/// \pre \p obj_items `!= NULL`. -/// \internal -/// -/// #Safety -/// obj_items must be a valid pointer to an AMobjItems -#[no_mangle] -pub unsafe extern "C" fn AMobjItemsNext(obj_items: *mut AMobjItems, n: isize) -> *const AMobjItem { - if let Some(obj_items) = obj_items.as_mut() { - if let Some(obj_item) = obj_items.next(n) { - return obj_item; - } - } - std::ptr::null() -} - -/// \memberof AMobjItems -/// \brief Advances an iterator over a sequence of object items by at most -/// \p |n| positions where the sign of \p n is relative to the -/// iterator's direction and then gets the object item at its new -/// position. -/// -/// \param[in,out] obj_items A pointer to an `AMobjItems` struct. -/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum -/// number of positions to advance. -/// \return A pointer to an `AMobjItem` struct that's `NULL` when \p obj_items -/// is presently advanced past its forward/reverse limit. -/// \pre \p obj_items `!= NULL`. -/// \internal -/// -/// #Safety -/// obj_items must be a valid pointer to an AMobjItems -#[no_mangle] -pub unsafe extern "C" fn AMobjItemsPrev(obj_items: *mut AMobjItems, n: isize) -> *const AMobjItem { - if let Some(obj_items) = obj_items.as_mut() { - if let Some(obj_item) = obj_items.prev(n) { - return obj_item; - } - } - std::ptr::null() -} - -/// \memberof AMobjItems -/// \brief Gets the size of the sequence of object items underlying an -/// iterator. -/// -/// \param[in] obj_items A pointer to an `AMobjItems` struct. -/// \return The count of values in \p obj_items. -/// \pre \p obj_items `!= NULL`. -/// \internal -/// -/// #Safety -/// obj_items must be a valid pointer to an AMobjItems -#[no_mangle] -pub unsafe extern "C" fn AMobjItemsSize(obj_items: *const AMobjItems) -> usize { - if let Some(obj_items) = obj_items.as_ref() { - obj_items.len() - } else { - 0 - } -} - -/// \memberof AMobjItems -/// \brief Creates an iterator over the same sequence of object items as the -/// given one but with the opposite position and direction. -/// -/// \param[in] obj_items A pointer to an `AMobjItems` struct. -/// \return An `AMobjItems` struct -/// \pre \p obj_items `!= NULL`. -/// \internal -/// -/// #Safety -/// obj_items must be a valid pointer to an AMobjItems -#[no_mangle] -pub unsafe extern "C" fn AMobjItemsReversed(obj_items: *const AMobjItems) -> AMobjItems { - if let Some(obj_items) = obj_items.as_ref() { - obj_items.reversed() - } else { - Default::default() - } -} - -/// \memberof AMobjItems -/// \brief Creates an iterator at the starting position over the same sequence -/// of object items as the given one. -/// -/// \param[in] obj_items A pointer to an `AMobjItems` struct. -/// \return An `AMobjItems` struct -/// \pre \p obj_items `!= NULL`. -/// \internal -/// -/// #Safety -/// obj_items must be a valid pointer to an AMobjItems -#[no_mangle] -pub unsafe extern "C" fn AMobjItemsRewound(obj_items: *const AMobjItems) -> AMobjItems { - if let Some(obj_items) = obj_items.as_ref() { - obj_items.rewound() - } else { - Default::default() - } -} diff --git a/rust/automerge-c/src/result.rs b/rust/automerge-c/src/result.rs index 599ada96..2975f38b 100644 --- a/rust/automerge-c/src/result.rs +++ b/rust/automerge-c/src/result.rs @@ -1,513 +1,85 @@ use automerge as am; -use smol_str::SmolStr; -use std::any::type_name; -use std::collections::BTreeMap; use std::ops::{Range, RangeFrom, RangeFull, RangeTo}; -use crate::actor_id::AMactorId; use crate::byte_span::AMbyteSpan; -use crate::change::AMchange; -use crate::change_hashes::AMchangeHashes; -use crate::changes::AMchanges; -use crate::doc::list::{item::AMlistItem, items::AMlistItems}; -use crate::doc::map::{item::AMmapItem, items::AMmapItems}; -use crate::doc::AMdoc; -use crate::obj::item::AMobjItem; -use crate::obj::items::AMobjItems; -use crate::obj::AMobjId; -use crate::strs::AMstrs; -use crate::sync::{AMsyncMessage, AMsyncState}; - -/// \struct AMvalue -/// \installed_headerfile -/// \brief A discriminated union of value type variants for a result. -/// -/// \enum AMvalueVariant -/// \brief A value type discriminant. -/// -/// \var AMvalue::actor_id -/// An actor identifier as a pointer to an `AMactorId` struct. -/// -/// \var AMvalue::boolean -/// A boolean. -/// -/// \var AMvalue::bytes -/// A sequence of bytes as an `AMbyteSpan` struct. -/// -/// \var AMvalue::change_hashes -/// A sequence of change hashes as an `AMchangeHashes` struct. -/// -/// \var AMvalue::changes -/// A sequence of changes as an `AMchanges` struct. -/// -/// \var AMvalue::counter -/// A CRDT counter. -/// -/// \var AMvalue::doc -/// A document as a pointer to an `AMdoc` struct. -/// -/// \var AMvalue::f64 -/// A 64-bit float. -/// -/// \var AMvalue::int_ -/// A 64-bit signed integer. -/// -/// \var AMvalue::list_items -/// A sequence of list object items as an `AMlistItems` struct. -/// -/// \var AMvalue::map_items -/// A sequence of map object items as an `AMmapItems` struct. -/// -/// \var AMvalue::obj_id -/// An object identifier as a pointer to an `AMobjId` struct. -/// -/// \var AMvalue::obj_items -/// A sequence of object items as an `AMobjItems` struct. -/// -/// \var AMvalue::str -/// A UTF-8 string view as an `AMbyteSpan` struct. -/// -/// \var AMvalue::strs -/// A sequence of UTF-8 strings as an `AMstrs` struct. -/// -/// \var AMvalue::sync_message -/// A synchronization message as a pointer to an `AMsyncMessage` struct. -/// -/// \var AMvalue::sync_state -/// A synchronization state as a pointer to an `AMsyncState` struct. -/// -/// \var AMvalue::tag -/// The variant discriminator. -/// -/// \var AMvalue::timestamp -/// A *nix timestamp (milliseconds). -/// -/// \var AMvalue::uint -/// A 64-bit unsigned integer. -/// -/// \var AMvalue::unknown -/// A value of unknown type as an `AMunknownValue` struct. -#[repr(u8)] -pub enum AMvalue<'a> { - /// A void variant. - /// \note This tag is unalphabetized so that a zeroed struct will have it. - Void, - /// An actor identifier variant. - ActorId(&'a AMactorId), - /// A boolean variant. - Boolean(bool), - /// A byte array variant. - Bytes(AMbyteSpan), - /// A change hashes variant. - ChangeHashes(AMchangeHashes), - /// A changes variant. - Changes(AMchanges), - /// A CRDT counter variant. - Counter(i64), - /// A document variant. - Doc(*mut AMdoc), - /// A 64-bit float variant. - F64(f64), - /// A 64-bit signed integer variant. - Int(i64), - /// A list items variant. - ListItems(AMlistItems), - /// A map items variant. - MapItems(AMmapItems), - /// A null variant. - Null, - /// An object identifier variant. - ObjId(&'a AMobjId), - /// An object items variant. - ObjItems(AMobjItems), - /// A UTF-8 string view variant. - Str(AMbyteSpan), - /// A UTF-8 string views variant. - Strs(AMstrs), - /// A synchronization message variant. - SyncMessage(&'a AMsyncMessage), - /// A synchronization state variant. - SyncState(&'a mut AMsyncState), - /// A *nix timestamp (milliseconds) variant. - Timestamp(i64), - /// A 64-bit unsigned integer variant. - Uint(u64), - /// An unknown type of scalar value variant. - Unknown(AMunknownValue), -} - -impl<'a> PartialEq for AMvalue<'a> { - fn eq(&self, other: &Self) -> bool { - use AMvalue::*; - - match (self, other) { - (ActorId(lhs), ActorId(rhs)) => *lhs == *rhs, - (Boolean(lhs), Boolean(rhs)) => lhs == rhs, - (Bytes(lhs), Bytes(rhs)) => lhs == rhs, - (ChangeHashes(lhs), ChangeHashes(rhs)) => lhs == rhs, - (Changes(lhs), Changes(rhs)) => lhs == rhs, - (Counter(lhs), Counter(rhs)) => lhs == rhs, - (Doc(lhs), Doc(rhs)) => *lhs == *rhs, - (F64(lhs), F64(rhs)) => lhs == rhs, - (Int(lhs), Int(rhs)) => lhs == rhs, - (ListItems(lhs), ListItems(rhs)) => lhs == rhs, - (MapItems(lhs), MapItems(rhs)) => lhs == rhs, - (ObjId(lhs), ObjId(rhs)) => *lhs == *rhs, - (ObjItems(lhs), ObjItems(rhs)) => lhs == rhs, - (Str(lhs), Str(rhs)) => lhs == rhs, - (Strs(lhs), Strs(rhs)) => lhs == rhs, - (SyncMessage(lhs), SyncMessage(rhs)) => *lhs == *rhs, - (SyncState(lhs), SyncState(rhs)) => *lhs == *rhs, - (Timestamp(lhs), Timestamp(rhs)) => lhs == rhs, - (Uint(lhs), Uint(rhs)) => lhs == rhs, - (Unknown(lhs), Unknown(rhs)) => lhs == rhs, - (Null, Null) | (Void, Void) => true, - _ => false, - } - } -} - -impl From<&am::Value<'_>> for AMvalue<'_> { - fn from(value: &am::Value<'_>) -> Self { - match value { - am::Value::Scalar(scalar) => match scalar.as_ref() { - am::ScalarValue::Boolean(flag) => AMvalue::Boolean(*flag), - am::ScalarValue::Bytes(bytes) => AMvalue::Bytes(bytes.as_slice().into()), - am::ScalarValue::Counter(counter) => AMvalue::Counter(counter.into()), - am::ScalarValue::F64(float) => AMvalue::F64(*float), - am::ScalarValue::Int(int) => AMvalue::Int(*int), - am::ScalarValue::Null => AMvalue::Null, - am::ScalarValue::Str(smol_str) => AMvalue::Str(smol_str.as_bytes().into()), - am::ScalarValue::Timestamp(timestamp) => AMvalue::Timestamp(*timestamp), - am::ScalarValue::Uint(uint) => AMvalue::Uint(*uint), - am::ScalarValue::Unknown { bytes, type_code } => AMvalue::Unknown(AMunknownValue { - bytes: bytes.as_slice().into(), - type_code: *type_code, - }), - }, - // \todo Confirm that an object variant should be ignored - // when there's no object ID variant. - am::Value::Object(_) => AMvalue::Void, - } - } -} - -impl From<&AMvalue<'_>> for u8 { - fn from(value: &AMvalue) -> Self { - use AMvalue::*; - - // \warning These numbers must correspond to the order in which the - // variants of an AMvalue are declared within it. - match value { - ActorId(_) => 1, - Boolean(_) => 2, - Bytes(_) => 3, - ChangeHashes(_) => 4, - Changes(_) => 5, - Counter(_) => 6, - Doc(_) => 7, - F64(_) => 8, - Int(_) => 9, - ListItems(_) => 10, - MapItems(_) => 11, - Null => 12, - ObjId(_) => 13, - ObjItems(_) => 14, - Str(_) => 15, - Strs(_) => 16, - SyncMessage(_) => 17, - SyncState(_) => 18, - Timestamp(_) => 19, - Uint(_) => 20, - Unknown(..) => 21, - Void => 0, - } - } -} - -impl TryFrom<&AMvalue<'_>> for am::ScalarValue { - type Error = am::AutomergeError; - - fn try_from(c_value: &AMvalue) -> Result { - use am::AutomergeError::InvalidValueType; - use AMvalue::*; - - let expected = type_name::().to_string(); - match c_value { - Boolean(b) => Ok(am::ScalarValue::Boolean(*b)), - Bytes(span) => { - let slice = unsafe { std::slice::from_raw_parts(span.src, span.count) }; - Ok(am::ScalarValue::Bytes(slice.to_vec())) - } - Counter(c) => Ok(am::ScalarValue::Counter(c.into())), - F64(f) => Ok(am::ScalarValue::F64(*f)), - Int(i) => Ok(am::ScalarValue::Int(*i)), - Str(span) => { - let result: Result<&str, am::AutomergeError> = span.try_into(); - match result { - Ok(str_) => Ok(am::ScalarValue::Str(SmolStr::new(str_))), - Err(e) => Err(e), - } - } - Timestamp(t) => Ok(am::ScalarValue::Timestamp(*t)), - Uint(u) => Ok(am::ScalarValue::Uint(*u)), - Null => Ok(am::ScalarValue::Null), - Unknown(AMunknownValue { bytes, type_code }) => { - let slice = unsafe { std::slice::from_raw_parts(bytes.src, bytes.count) }; - Ok(am::ScalarValue::Unknown { - bytes: slice.to_vec(), - type_code: *type_code, - }) - } - ActorId(_) => Err(InvalidValueType { - expected, - unexpected: type_name::().to_string(), - }), - ChangeHashes(_) => Err(InvalidValueType { - expected, - unexpected: type_name::().to_string(), - }), - Changes(_) => Err(InvalidValueType { - expected, - unexpected: type_name::().to_string(), - }), - Doc(_) => Err(InvalidValueType { - expected, - unexpected: type_name::().to_string(), - }), - ListItems(_) => Err(InvalidValueType { - expected, - unexpected: type_name::().to_string(), - }), - MapItems(_) => Err(InvalidValueType { - expected, - unexpected: type_name::().to_string(), - }), - ObjId(_) => Err(InvalidValueType { - expected, - unexpected: type_name::().to_string(), - }), - ObjItems(_) => Err(InvalidValueType { - expected, - unexpected: type_name::().to_string(), - }), - Strs(_) => Err(InvalidValueType { - expected, - unexpected: type_name::().to_string(), - }), - SyncMessage(_) => Err(InvalidValueType { - expected, - unexpected: type_name::().to_string(), - }), - SyncState(_) => Err(InvalidValueType { - expected, - unexpected: type_name::().to_string(), - }), - Void => Err(InvalidValueType { - expected, - unexpected: type_name::<()>().to_string(), - }), - } - } -} - -/// \memberof AMvalue -/// \brief Tests the equality of two values. -/// -/// \param[in] value1 A pointer to an `AMvalue` struct. -/// \param[in] value2 A pointer to an `AMvalue` struct. -/// \return `true` if \p value1 `==` \p value2 and `false` otherwise. -/// \pre \p value1 `!= NULL`. -/// \pre \p value2 `!= NULL`. -/// \internal -/// -/// #Safety -/// value1 must be a valid AMvalue pointer -/// value2 must be a valid AMvalue pointer -#[no_mangle] -pub unsafe extern "C" fn AMvalueEqual(value1: *const AMvalue, value2: *const AMvalue) -> bool { - match (value1.as_ref(), value2.as_ref()) { - (Some(value1), Some(value2)) => *value1 == *value2, - (None, Some(_)) | (Some(_), None) | (None, None) => false, - } -} +use crate::index::AMindex; +use crate::item::AMitem; +use crate::items::AMitems; /// \struct AMresult /// \installed_headerfile /// \brief A discriminated union of result variants. pub enum AMresult { - ActorId(am::ActorId, Option), - ChangeHashes(Vec), - Changes(Vec, Option>), - Doc(Box), + Items(Vec), Error(String), - ListItems(Vec), - MapItems(Vec), - ObjId(AMobjId), - ObjItems(Vec), - String(String), - Strings(Vec), - SyncMessage(AMsyncMessage), - SyncState(Box), - Value(am::Value<'static>), - Void, } impl AMresult { - pub(crate) fn err(s: &str) -> Self { - AMresult::Error(s.to_string()) + pub(crate) fn error(s: &str) -> Self { + Self::Error(s.to_string()) + } + + pub(crate) fn item(item: AMitem) -> Self { + Self::Items(vec![item]) + } + + pub(crate) fn items(items: Vec) -> Self { + Self::Items(items) + } +} + +impl Default for AMresult { + fn default() -> Self { + Self::Items(vec![]) } } impl From for AMresult { fn from(auto_commit: am::AutoCommit) -> Self { - AMresult::Doc(Box::new(AMdoc::new(auto_commit))) + Self::item(AMitem::exact(am::ROOT, auto_commit.into())) + } +} + +impl From for AMresult { + fn from(change: am::Change) -> Self { + Self::item(change.into()) } } impl From for AMresult { fn from(change_hash: am::ChangeHash) -> Self { - AMresult::ChangeHashes(vec![change_hash]) + Self::item(change_hash.into()) } } impl From> for AMresult { - fn from(c: Option) -> Self { - match c { - Some(c) => c.into(), - None => AMresult::Void, + fn from(maybe: Option) -> Self { + match maybe { + Some(change_hash) => change_hash.into(), + None => Self::item(Default::default()), } } } -impl From> for AMresult { - fn from(keys: am::Keys<'_, '_>) -> Self { - AMresult::Strings(keys.collect()) - } -} - -impl From> for AMresult { - fn from(keys: am::KeysAt<'_, '_>) -> Self { - AMresult::Strings(keys.collect()) - } -} - -impl From>> for AMresult { - fn from(list_range: am::ListRange<'static, Range>) -> Self { - AMresult::ListItems( - list_range - .map(|(i, v, o)| AMlistItem::new(i, v.clone(), o)) - .collect(), - ) - } -} - -impl From>> for AMresult { - fn from(list_range: am::ListRangeAt<'static, Range>) -> Self { - AMresult::ListItems( - list_range - .map(|(i, v, o)| AMlistItem::new(i, v.clone(), o)) - .collect(), - ) - } -} - -impl From>> for AMresult { - fn from(map_range: am::MapRange<'static, Range>) -> Self { - let map_items: Vec = map_range - .map(|(k, v, o): (&'_ str, am::Value<'_>, am::ObjId)| AMmapItem::new(k, v.clone(), o)) - .collect(); - AMresult::MapItems(map_items) - } -} - -impl From>> for AMresult { - fn from(map_range: am::MapRangeAt<'static, Range>) -> Self { - let map_items: Vec = map_range - .map(|(k, v, o): (&'_ str, am::Value<'_>, am::ObjId)| AMmapItem::new(k, v.clone(), o)) - .collect(); - AMresult::MapItems(map_items) - } -} - -impl From>> for AMresult { - fn from(map_range: am::MapRange<'static, RangeFrom>) -> Self { - let map_items: Vec = map_range - .map(|(k, v, o): (&'_ str, am::Value<'_>, am::ObjId)| AMmapItem::new(k, v.clone(), o)) - .collect(); - AMresult::MapItems(map_items) - } -} - -impl From>> for AMresult { - fn from(map_range: am::MapRangeAt<'static, RangeFrom>) -> Self { - let map_items: Vec = map_range - .map(|(k, v, o): (&'_ str, am::Value<'_>, am::ObjId)| AMmapItem::new(k, v.clone(), o)) - .collect(); - AMresult::MapItems(map_items) - } -} - -impl From> for AMresult { - fn from(map_range: am::MapRange<'static, RangeFull>) -> Self { - let map_items: Vec = map_range - .map(|(k, v, o): (&'_ str, am::Value<'_>, am::ObjId)| AMmapItem::new(k, v.clone(), o)) - .collect(); - AMresult::MapItems(map_items) - } -} - -impl From> for AMresult { - fn from(map_range: am::MapRangeAt<'static, RangeFull>) -> Self { - let map_items: Vec = map_range - .map(|(k, v, o): (&'_ str, am::Value<'_>, am::ObjId)| AMmapItem::new(k, v.clone(), o)) - .collect(); - AMresult::MapItems(map_items) - } -} - -impl From>> for AMresult { - fn from(map_range: am::MapRange<'static, RangeTo>) -> Self { - let map_items: Vec = map_range - .map(|(k, v, o): (&'_ str, am::Value<'_>, am::ObjId)| AMmapItem::new(k, v.clone(), o)) - .collect(); - AMresult::MapItems(map_items) - } -} - -impl From>> for AMresult { - fn from(map_range: am::MapRangeAt<'static, RangeTo>) -> Self { - let map_items: Vec = map_range - .map(|(k, v, o): (&'_ str, am::Value<'_>, am::ObjId)| AMmapItem::new(k, v.clone(), o)) - .collect(); - AMresult::MapItems(map_items) +impl From> for AMresult { + fn from(maybe: Result) -> Self { + match maybe { + Ok(change_hash) => change_hash.into(), + Err(e) => Self::error(&e.to_string()), + } } } impl From for AMresult { fn from(state: am::sync::State) -> Self { - AMresult::SyncState(Box::new(AMsyncState::new(state))) + Self::item(state.into()) } } impl From> for AMresult { fn from(pairs: am::Values<'static>) -> Self { - AMresult::ObjItems(pairs.map(|(v, o)| AMobjItem::new(v.clone(), o)).collect()) - } -} - -impl From, am::ObjId)>, am::AutomergeError>> for AMresult { - fn from(maybe: Result, am::ObjId)>, am::AutomergeError>) -> Self { - match maybe { - Ok(pairs) => AMresult::ObjItems( - pairs - .into_iter() - .map(|(v, o)| AMobjItem::new(v, o)) - .collect(), - ), - Err(e) => AMresult::err(&e.to_string()), - } + Self::items(pairs.map(|(v, o)| AMitem::exact(o, v.into())).collect()) } } @@ -517,37 +89,150 @@ impl From for *mut AMresult { } } +impl From> for AMresult { + fn from(keys: am::Keys<'_, '_>) -> Self { + Self::items(keys.map(|s| s.into()).collect()) + } +} + +impl From> for AMresult { + fn from(keys: am::KeysAt<'_, '_>) -> Self { + Self::items(keys.map(|s| s.into()).collect()) + } +} + +impl From>> for AMresult { + fn from(list_range: am::ListRange<'static, Range>) -> Self { + Self::items( + list_range + .map(|(i, v, o)| AMitem::indexed(AMindex::Pos(i), o, v.into())) + .collect(), + ) + } +} + +impl From>> for AMresult { + fn from(list_range: am::ListRangeAt<'static, Range>) -> Self { + Self::items( + list_range + .map(|(i, v, o)| AMitem::indexed(AMindex::Pos(i), o, v.into())) + .collect(), + ) + } +} + +impl From>> for AMresult { + fn from(map_range: am::MapRange<'static, Range>) -> Self { + Self::items( + map_range + .map(|(k, v, o)| AMitem::indexed(AMindex::Key(k.into()), o, v.into())) + .collect(), + ) + } +} + +impl From>> for AMresult { + fn from(map_range: am::MapRangeAt<'static, Range>) -> Self { + Self::items( + map_range + .map(|(k, v, o)| AMitem::indexed(AMindex::Key(k.into()), o, v.into())) + .collect(), + ) + } +} + +impl From>> for AMresult { + fn from(map_range: am::MapRange<'static, RangeFrom>) -> Self { + Self::items( + map_range + .map(|(k, v, o)| AMitem::indexed(AMindex::Key(k.into()), o, v.into())) + .collect(), + ) + } +} + +impl From>> for AMresult { + fn from(map_range: am::MapRangeAt<'static, RangeFrom>) -> Self { + Self::items( + map_range + .map(|(k, v, o)| AMitem::indexed(AMindex::Key(k.into()), o, v.into())) + .collect(), + ) + } +} + +impl From> for AMresult { + fn from(map_range: am::MapRange<'static, RangeFull>) -> Self { + Self::items( + map_range + .map(|(k, v, o)| AMitem::indexed(AMindex::Key(k.into()), o, v.into())) + .collect(), + ) + } +} + +impl From> for AMresult { + fn from(map_range: am::MapRangeAt<'static, RangeFull>) -> Self { + Self::items( + map_range + .map(|(k, v, o)| AMitem::indexed(AMindex::Key(k.into()), o, v.into())) + .collect(), + ) + } +} + +impl From>> for AMresult { + fn from(map_range: am::MapRange<'static, RangeTo>) -> Self { + Self::items( + map_range + .map(|(k, v, o)| AMitem::indexed(AMindex::Key(k.into()), o, v.into())) + .collect(), + ) + } +} + +impl From>> for AMresult { + fn from(map_range: am::MapRangeAt<'static, RangeTo>) -> Self { + Self::items( + map_range + .map(|(k, v, o)| AMitem::indexed(AMindex::Key(k.into()), o, v.into())) + .collect(), + ) + } +} + impl From> for AMresult { fn from(maybe: Option<&am::Change>) -> Self { - match maybe { - Some(change) => AMresult::Changes(vec![change.clone()], None), - None => AMresult::Void, - } + Self::item(match maybe { + Some(change) => change.clone().into(), + None => Default::default(), + }) } } impl From> for AMresult { fn from(maybe: Option) -> Self { - match maybe { - Some(message) => AMresult::SyncMessage(AMsyncMessage::new(message)), - None => AMresult::Void, - } + Self::item(match maybe { + Some(message) => message.into(), + None => Default::default(), + }) } } impl From> for AMresult { fn from(maybe: Result<(), am::AutomergeError>) -> Self { match maybe { - Ok(()) => AMresult::Void, - Err(e) => AMresult::err(&e.to_string()), + Ok(()) => Self::item(Default::default()), + Err(e) => Self::error(&e.to_string()), } } } + impl From> for AMresult { fn from(maybe: Result) -> Self { match maybe { - Ok(actor_id) => AMresult::ActorId(actor_id, None), - Err(e) => AMresult::err(&e.to_string()), + Ok(actor_id) => Self::item(actor_id.into()), + Err(e) => Self::error(&e.to_string()), } } } @@ -555,8 +240,8 @@ impl From> for AMresult { impl From> for AMresult { fn from(maybe: Result) -> Self { match maybe { - Ok(actor_id) => AMresult::ActorId(actor_id, None), - Err(e) => AMresult::err(&e.to_string()), + Ok(actor_id) => Self::item(actor_id.into()), + Err(e) => Self::error(&e.to_string()), } } } @@ -564,8 +249,8 @@ impl From> for AMresult { impl From> for AMresult { fn from(maybe: Result) -> Self { match maybe { - Ok(auto_commit) => AMresult::Doc(Box::new(AMdoc::new(auto_commit))), - Err(e) => AMresult::err(&e.to_string()), + Ok(auto_commit) => Self::item(auto_commit.into()), + Err(e) => Self::error(&e.to_string()), } } } @@ -573,17 +258,17 @@ impl From> for AMresult { impl From> for AMresult { fn from(maybe: Result) -> Self { match maybe { - Ok(change) => AMresult::Changes(vec![change], None), - Err(e) => AMresult::err(&e.to_string()), + Ok(change) => Self::item(change.into()), + Err(e) => Self::error(&e.to_string()), } } } -impl From> for AMresult { - fn from(maybe: Result) -> Self { - match maybe { - Ok(obj_id) => AMresult::ObjId(AMobjId::new(obj_id)), - Err(e) => AMresult::err(&e.to_string()), +impl From<(Result, am::ObjType)> for AMresult { + fn from(tuple: (Result, am::ObjType)) -> Self { + match tuple { + (Ok(obj_id), obj_type) => Self::item((obj_id, obj_type).into()), + (Err(e), _) => Self::error(&e.to_string()), } } } @@ -591,8 +276,8 @@ impl From> for AMresult { impl From> for AMresult { fn from(maybe: Result) -> Self { match maybe { - Ok(message) => AMresult::SyncMessage(AMsyncMessage::new(message)), - Err(e) => AMresult::err(&e.to_string()), + Ok(message) => Self::item(message.into()), + Err(e) => Self::error(&e.to_string()), } } } @@ -600,8 +285,8 @@ impl From> for AMresult { impl From> for AMresult { fn from(maybe: Result) -> Self { match maybe { - Ok(state) => AMresult::SyncState(Box::new(AMsyncState::new(state))), - Err(e) => AMresult::err(&e.to_string()), + Ok(state) => Self::item(state.into()), + Err(e) => Self::error(&e.to_string()), } } } @@ -609,8 +294,8 @@ impl From> for AMresult { impl From, am::AutomergeError>> for AMresult { fn from(maybe: Result, am::AutomergeError>) -> Self { match maybe { - Ok(value) => AMresult::Value(value), - Err(e) => AMresult::err(&e.to_string()), + Ok(value) => Self::item(value.into()), + Err(e) => Self::error(&e.to_string()), } } } @@ -618,12 +303,9 @@ impl From, am::AutomergeError>> for AMresult { impl From, am::ObjId)>, am::AutomergeError>> for AMresult { fn from(maybe: Result, am::ObjId)>, am::AutomergeError>) -> Self { match maybe { - Ok(Some((value, obj_id))) => match value { - am::Value::Object(_) => AMresult::ObjId(AMobjId::new(obj_id)), - _ => AMresult::Value(value), - }, - Ok(None) => AMresult::Void, - Err(e) => AMresult::err(&e.to_string()), + Ok(Some((value, obj_id))) => Self::item(AMitem::exact(obj_id, value.into())), + Ok(None) => Self::item(Default::default()), + Err(e) => Self::error(&e.to_string()), } } } @@ -631,8 +313,8 @@ impl From, am::ObjId)>, am::AutomergeError>> f impl From> for AMresult { fn from(maybe: Result) -> Self { match maybe { - Ok(string) => AMresult::String(string), - Err(e) => AMresult::err(&e.to_string()), + Ok(string) => Self::item(string.into()), + Err(e) => Self::error(&e.to_string()), } } } @@ -640,8 +322,8 @@ impl From> for AMresult { impl From> for AMresult { fn from(maybe: Result) -> Self { match maybe { - Ok(size) => AMresult::Value(am::Value::uint(size as u64)), - Err(e) => AMresult::err(&e.to_string()), + Ok(size) => Self::item(am::Value::uint(size as u64).into()), + Err(e) => Self::error(&e.to_string()), } } } @@ -649,17 +331,8 @@ impl From> for AMresult { impl From, am::AutomergeError>> for AMresult { fn from(maybe: Result, am::AutomergeError>) -> Self { match maybe { - Ok(changes) => AMresult::Changes(changes, None), - Err(e) => AMresult::err(&e.to_string()), - } - } -} - -impl From, am::LoadChangeError>> for AMresult { - fn from(maybe: Result, am::LoadChangeError>) -> Self { - match maybe { - Ok(changes) => AMresult::Changes(changes, None), - Err(e) => AMresult::err(&e.to_string()), + Ok(changes) => Self::items(changes.into_iter().map(|change| change.into()).collect()), + Err(e) => Self::error(&e.to_string()), } } } @@ -667,12 +340,22 @@ impl From, am::LoadChangeError>> for AMresult { impl From, am::AutomergeError>> for AMresult { fn from(maybe: Result, am::AutomergeError>) -> Self { match maybe { - Ok(changes) => { - let changes: Vec = - changes.iter().map(|&change| change.clone()).collect(); - AMresult::Changes(changes, None) - } - Err(e) => AMresult::err(&e.to_string()), + Ok(changes) => Self::items( + changes + .into_iter() + .map(|change| change.clone().into()) + .collect(), + ), + Err(e) => Self::error(&e.to_string()), + } + } +} + +impl From, am::LoadChangeError>> for AMresult { + fn from(maybe: Result, am::LoadChangeError>) -> Self { + match maybe { + Ok(changes) => Self::items(changes.into_iter().map(|change| change.into()).collect()), + Err(e) => Self::error(&e.to_string()), } } } @@ -680,8 +363,13 @@ impl From, am::AutomergeError>> for AMresult { impl From, am::AutomergeError>> for AMresult { fn from(maybe: Result, am::AutomergeError>) -> Self { match maybe { - Ok(change_hashes) => AMresult::ChangeHashes(change_hashes), - Err(e) => AMresult::err(&e.to_string()), + Ok(change_hashes) => Self::items( + change_hashes + .into_iter() + .map(|change_hash| change_hash.into()) + .collect(), + ), + Err(e) => Self::error(&e.to_string()), } } } @@ -689,8 +377,27 @@ impl From, am::AutomergeError>> for AMresult { impl From, am::InvalidChangeHashSlice>> for AMresult { fn from(maybe: Result, am::InvalidChangeHashSlice>) -> Self { match maybe { - Ok(change_hashes) => AMresult::ChangeHashes(change_hashes), - Err(e) => AMresult::err(&e.to_string()), + Ok(change_hashes) => Self::items( + change_hashes + .into_iter() + .map(|change_hash| change_hash.into()) + .collect(), + ), + Err(e) => Self::error(&e.to_string()), + } + } +} + +impl From, am::ObjId)>, am::AutomergeError>> for AMresult { + fn from(maybe: Result, am::ObjId)>, am::AutomergeError>) -> Self { + match maybe { + Ok(pairs) => Self::items( + pairs + .into_iter() + .map(|(v, o)| AMitem::exact(o, v.into())) + .collect(), + ), + Err(e) => Self::error(&e.to_string()), } } } @@ -698,28 +405,66 @@ impl From, am::InvalidChangeHashSlice>> for AMresult impl From, am::AutomergeError>> for AMresult { fn from(maybe: Result, am::AutomergeError>) -> Self { match maybe { - Ok(bytes) => AMresult::Value(am::Value::bytes(bytes)), - Err(e) => AMresult::err(&e.to_string()), + Ok(bytes) => Self::item(am::Value::bytes(bytes).into()), + Err(e) => Self::error(&e.to_string()), } } } +impl From<&[am::Change]> for AMresult { + fn from(changes: &[am::Change]) -> Self { + Self::items(changes.iter().map(|change| change.clone().into()).collect()) + } +} + impl From> for AMresult { fn from(changes: Vec<&am::Change>) -> Self { - let changes: Vec = changes.iter().map(|&change| change.clone()).collect(); - AMresult::Changes(changes, None) + Self::items( + changes + .into_iter() + .map(|change| change.clone().into()) + .collect(), + ) + } +} + +impl From<&[am::ChangeHash]> for AMresult { + fn from(change_hashes: &[am::ChangeHash]) -> Self { + Self::items( + change_hashes + .iter() + .map(|change_hash| (*change_hash).into()) + .collect(), + ) + } +} + +impl From<&[am::sync::Have]> for AMresult { + fn from(haves: &[am::sync::Have]) -> Self { + Self::items(haves.iter().map(|have| have.clone().into()).collect()) } } impl From> for AMresult { fn from(change_hashes: Vec) -> Self { - AMresult::ChangeHashes(change_hashes) + Self::items( + change_hashes + .into_iter() + .map(|change_hash| change_hash.into()) + .collect(), + ) + } +} + +impl From> for AMresult { + fn from(haves: Vec) -> Self { + Self::items(haves.into_iter().map(|have| have.into()).collect()) } } impl From> for AMresult { fn from(bytes: Vec) -> Self { - AMresult::Value(am::Value::bytes(bytes)) + Self::item(am::Value::bytes(bytes).into()) } } @@ -729,8 +474,9 @@ pub fn to_result>(r: R) -> *mut AMresult { /// \ingroup enumerations /// \enum AMstatus +/// \installed_headerfile /// \brief The status of an API call. -#[derive(Debug)] +#[derive(PartialEq, Eq)] #[repr(u8)] pub enum AMstatus { /// Success. @@ -742,35 +488,80 @@ pub enum AMstatus { InvalidResult, } +/// \memberof AMresult +/// \brief Concatenates the items from two results. +/// +/// \param[in] dest A pointer to an `AMresult` struct. +/// \param[in] src A pointer to an `AMresult` struct. +/// \return A pointer to an `AMresult` struct with the items from \p dest in +/// their original order followed by the items from \p src in their +/// original order. +/// \pre \p dest `!= NULL` +/// \pre \p src `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// dest must be a valid pointer to an AMresult +/// src must be a valid pointer to an AMresult +#[no_mangle] +pub unsafe extern "C" fn AMresultCat(dest: *const AMresult, src: *const AMresult) -> *mut AMresult { + use AMresult::*; + + match (dest.as_ref(), src.as_ref()) { + (Some(dest), Some(src)) => match (dest, src) { + (Items(dest_items), Items(src_items)) => { + return AMresult::items( + dest_items + .iter() + .cloned() + .chain(src_items.iter().cloned()) + .collect(), + ) + .into(); + } + (Error(_), Error(_)) | (Error(_), Items(_)) | (Items(_), Error(_)) => { + AMresult::error("Invalid `AMresult`").into() + } + }, + (None, None) | (None, Some(_)) | (Some(_), None) => { + AMresult::error("Invalid `AMresult*`").into() + } + } +} + /// \memberof AMresult /// \brief Gets a result's error message string. /// /// \param[in] result A pointer to an `AMresult` struct. /// \return A UTF-8 string view as an `AMbyteSpan` struct. -/// \pre \p result `!= NULL`. +/// \pre \p result `!= NULL` /// \internal /// /// # Safety /// result must be a valid pointer to an AMresult #[no_mangle] -pub unsafe extern "C" fn AMerrorMessage(result: *const AMresult) -> AMbyteSpan { - match result.as_ref() { - Some(AMresult::Error(s)) => s.as_bytes().into(), - _ => Default::default(), +pub unsafe extern "C" fn AMresultError(result: *const AMresult) -> AMbyteSpan { + use AMresult::*; + + if let Some(Error(message)) = result.as_ref() { + return message.as_bytes().into(); } + Default::default() } /// \memberof AMresult /// \brief Deallocates the storage for a result. /// -/// \param[in,out] result A pointer to an `AMresult` struct. -/// \pre \p result `!= NULL`. +/// \param[in] result A pointer to an `AMresult` struct. +/// \pre \p result `!= NULL` /// \internal /// /// # Safety /// result must be a valid pointer to an AMresult #[no_mangle] -pub unsafe extern "C" fn AMfree(result: *mut AMresult) { +pub unsafe extern "C" fn AMresultFree(result: *mut AMresult) { if !result.is_null() { let result: AMresult = *Box::from_raw(result); drop(result) @@ -778,39 +569,67 @@ pub unsafe extern "C" fn AMfree(result: *mut AMresult) { } /// \memberof AMresult -/// \brief Gets the size of a result's value. +/// \brief Gets a result's first item. /// /// \param[in] result A pointer to an `AMresult` struct. -/// \return The count of values in \p result. -/// \pre \p result `!= NULL`. +/// \return A pointer to an `AMitem` struct. +/// \pre \p result `!= NULL` +/// \internal +/// +/// # Safety +/// result must be a valid pointer to an AMresult +#[no_mangle] +pub unsafe extern "C" fn AMresultItem(result: *mut AMresult) -> *mut AMitem { + use AMresult::*; + + if let Some(Items(items)) = result.as_mut() { + if !items.is_empty() { + return &mut items[0]; + } + } + std::ptr::null_mut() +} + +/// \memberof AMresult +/// \brief Gets a result's items. +/// +/// \param[in] result A pointer to an `AMresult` struct. +/// \return An `AMitems` struct. +/// \pre \p result `!= NULL` +/// \internal +/// +/// # Safety +/// result must be a valid pointer to an AMresult +#[no_mangle] +pub unsafe extern "C" fn AMresultItems<'a>(result: *mut AMresult) -> AMitems<'a> { + use AMresult::*; + + if let Some(Items(items)) = result.as_mut() { + if !items.is_empty() { + return AMitems::new(items); + } + } + Default::default() +} + +/// \memberof AMresult +/// \brief Gets the size of a result. +/// +/// \param[in] result A pointer to an `AMresult` struct. +/// \return The count of items within \p result. +/// \pre \p result `!= NULL` /// \internal /// /// # Safety /// result must be a valid pointer to an AMresult #[no_mangle] pub unsafe extern "C" fn AMresultSize(result: *const AMresult) -> usize { - if let Some(result) = result.as_ref() { - use AMresult::*; + use self::AMresult::*; - match result { - Error(_) | Void => 0, - ActorId(_, _) - | Doc(_) - | ObjId(_) - | String(_) - | SyncMessage(_) - | SyncState(_) - | Value(_) => 1, - ChangeHashes(change_hashes) => change_hashes.len(), - Changes(changes, _) => changes.len(), - ListItems(list_items) => list_items.len(), - MapItems(map_items) => map_items.len(), - ObjItems(obj_items) => obj_items.len(), - Strings(cstrings) => cstrings.len(), - } - } else { - 0 + if let Some(Items(items)) = result.as_ref() { + return items.len(); } + 0 } /// \memberof AMresult @@ -818,94 +637,24 @@ pub unsafe extern "C" fn AMresultSize(result: *const AMresult) -> usize { /// /// \param[in] result A pointer to an `AMresult` struct. /// \return An `AMstatus` enum tag. -/// \pre \p result `!= NULL`. +/// \pre \p result `!= NULL` /// \internal /// /// # Safety /// result must be a valid pointer to an AMresult #[no_mangle] pub unsafe extern "C" fn AMresultStatus(result: *const AMresult) -> AMstatus { - match result.as_ref() { - Some(AMresult::Error(_)) => AMstatus::Error, - None => AMstatus::InvalidResult, - _ => AMstatus::Ok, - } -} + use AMresult::*; -/// \memberof AMresult -/// \brief Gets a result's value. -/// -/// \param[in] result A pointer to an `AMresult` struct. -/// \return An `AMvalue` struct. -/// \pre \p result `!= NULL`. -/// \internal -/// -/// # Safety -/// result must be a valid pointer to an AMresult -#[no_mangle] -pub unsafe extern "C" fn AMresultValue<'a>(result: *mut AMresult) -> AMvalue<'a> { - let mut content = AMvalue::Void; - if let Some(result) = result.as_mut() { + if let Some(result) = result.as_ref() { match result { - AMresult::ActorId(actor_id, c_actor_id) => match c_actor_id { - None => { - content = AMvalue::ActorId(&*c_actor_id.insert(AMactorId::new(&*actor_id))); - } - Some(c_actor_id) => { - content = AMvalue::ActorId(&*c_actor_id); - } - }, - AMresult::ChangeHashes(change_hashes) => { - content = AMvalue::ChangeHashes(AMchangeHashes::new(change_hashes)); + Error(_) => { + return AMstatus::Error; } - AMresult::Changes(changes, storage) => { - content = AMvalue::Changes(AMchanges::new( - changes, - storage.get_or_insert(BTreeMap::new()), - )); + _ => { + return AMstatus::Ok; } - AMresult::Doc(doc) => content = AMvalue::Doc(&mut **doc), - AMresult::Error(_) => {} - AMresult::ListItems(list_items) => { - content = AMvalue::ListItems(AMlistItems::new(list_items)); - } - AMresult::MapItems(map_items) => { - content = AMvalue::MapItems(AMmapItems::new(map_items)); - } - AMresult::ObjId(obj_id) => { - content = AMvalue::ObjId(obj_id); - } - AMresult::ObjItems(obj_items) => { - content = AMvalue::ObjItems(AMobjItems::new(obj_items)); - } - AMresult::String(string) => content = AMvalue::Str(string.as_bytes().into()), - AMresult::Strings(strings) => { - content = AMvalue::Strs(AMstrs::new(strings)); - } - AMresult::SyncMessage(sync_message) => { - content = AMvalue::SyncMessage(sync_message); - } - AMresult::SyncState(sync_state) => { - content = AMvalue::SyncState(&mut *sync_state); - } - AMresult::Value(value) => { - content = (&*value).into(); - } - AMresult::Void => {} } - }; - content -} - -/// \struct AMunknownValue -/// \installed_headerfile -/// \brief A value (typically for a `set` operation) whose type is unknown. -/// -#[derive(Eq, PartialEq)] -#[repr(C)] -pub struct AMunknownValue { - /// The value's raw bytes. - bytes: AMbyteSpan, - /// The value's encoded type identifier. - type_code: u8, + } + AMstatus::InvalidResult } diff --git a/rust/automerge-c/src/result_stack.rs b/rust/automerge-c/src/result_stack.rs deleted file mode 100644 index cfb9c7d2..00000000 --- a/rust/automerge-c/src/result_stack.rs +++ /dev/null @@ -1,156 +0,0 @@ -use crate::result::{AMfree, AMresult, AMresultStatus, AMresultValue, AMstatus, AMvalue}; - -/// \struct AMresultStack -/// \installed_headerfile -/// \brief A node in a singly-linked list of result pointers. -/// -/// \note Using this data structure is purely optional because its only purpose -/// is to make memory management tolerable for direct usage of this API -/// in C, C++ and Objective-C. -#[repr(C)] -pub struct AMresultStack { - /// A result to be deallocated. - pub result: *mut AMresult, - /// The next node in the singly-linked list or `NULL`. - pub next: *mut AMresultStack, -} - -impl AMresultStack { - pub fn new(result: *mut AMresult, next: *mut AMresultStack) -> Self { - Self { result, next } - } -} - -/// \memberof AMresultStack -/// \brief Deallocates the storage for a stack of results. -/// -/// \param[in,out] stack A pointer to a pointer to an `AMresultStack` struct. -/// \return The number of `AMresult` structs freed. -/// \pre \p stack `!= NULL`. -/// \post `*stack == NULL`. -/// \note Calling this function is purely optional because its only purpose is -/// to make memory management tolerable for direct usage of this API in -/// C, C++ and Objective-C. -/// \internal -/// -/// # Safety -/// stack must be a valid AMresultStack pointer pointer -#[no_mangle] -pub unsafe extern "C" fn AMfreeStack(stack: *mut *mut AMresultStack) -> usize { - if stack.is_null() { - return 0; - } - let mut count: usize = 0; - while !(*stack).is_null() { - AMfree(AMpop(stack)); - count += 1; - } - count -} - -/// \memberof AMresultStack -/// \brief Gets the topmost result from the stack after removing it. -/// -/// \param[in,out] stack A pointer to a pointer to an `AMresultStack` struct. -/// \return A pointer to an `AMresult` struct or `NULL`. -/// \pre \p stack `!= NULL`. -/// \post `*stack == NULL`. -/// \note Calling this function is purely optional because its only purpose is -/// to make memory management tolerable for direct usage of this API in -/// C, C++ and Objective-C. -/// \internal -/// -/// # Safety -/// stack must be a valid AMresultStack pointer pointer -#[no_mangle] -pub unsafe extern "C" fn AMpop(stack: *mut *mut AMresultStack) -> *mut AMresult { - if stack.is_null() || (*stack).is_null() { - return std::ptr::null_mut(); - } - let top = Box::from_raw(*stack); - *stack = top.next; - let result = top.result; - drop(top); - result -} - -/// \memberof AMresultStack -/// \brief The prototype of a function to be called when a value matching the -/// given discriminant cannot be extracted from the result at the top of -/// the given stack. -/// -/// \note Implementing this function is purely optional because its only purpose -/// is to make memory management tolerable for direct usage of this API -/// in C, C++ and Objective-C. -pub type AMpushCallback = - Option ()>; - -/// \memberof AMresultStack -/// \brief Pushes the given result onto the given stack and then either extracts -/// a value matching the given discriminant from that result or, -/// failing that, calls the given function and gets a void value instead. -/// -/// \param[in,out] stack A pointer to a pointer to an `AMresultStack` struct. -/// \param[in] result A pointer to an `AMresult` struct. -/// \param[in] discriminant An `AMvalue` variant's corresponding enum tag. -/// \param[in] callback A pointer to a function with the same signature as -/// `AMpushCallback()` or `NULL`. -/// \return An `AMvalue` struct. -/// \pre \p stack `!= NULL`. -/// \pre \p result `!= NULL`. -/// \warning If \p stack `== NULL` then \p result is deallocated in order to -/// prevent a memory leak. -/// \note Calling this function is purely optional because its only purpose is -/// to make memory management tolerable for direct usage of this API in -/// C, C++ and Objective-C. -/// \internal -/// -/// # Safety -/// stack must be a valid AMresultStack pointer pointer -/// result must be a valid AMresult pointer -#[no_mangle] -pub unsafe extern "C" fn AMpush<'a>( - stack: *mut *mut AMresultStack, - result: *mut AMresult, - discriminant: u8, - callback: AMpushCallback, -) -> AMvalue<'a> { - if stack.is_null() { - // There's no stack to push the result onto so it has to be freed in - // order to prevent a memory leak. - AMfree(result); - if let Some(callback) = callback { - callback(stack, discriminant); - } - return AMvalue::Void; - } else if result.is_null() { - if let Some(callback) = callback { - callback(stack, discriminant); - } - return AMvalue::Void; - } - // Always push the result onto the stack, even if it's wrong, so that the - // given callback can retrieve it. - let node = Box::new(AMresultStack::new(result, *stack)); - let top = Box::into_raw(node); - *stack = top; - // Test that the result contains a value. - match AMresultStatus(result) { - AMstatus::Ok => {} - _ => { - if let Some(callback) = callback { - callback(stack, discriminant); - } - return AMvalue::Void; - } - } - // Test that the result's value matches the given discriminant. - let value = AMresultValue(result); - if discriminant != u8::from(&value) { - if let Some(callback) = callback { - callback(stack, discriminant); - } - return AMvalue::Void; - } - value -} diff --git a/rust/automerge-c/src/strs.rs b/rust/automerge-c/src/strs.rs deleted file mode 100644 index a36861b7..00000000 --- a/rust/automerge-c/src/strs.rs +++ /dev/null @@ -1,359 +0,0 @@ -use std::cmp::Ordering; -use std::ffi::c_void; -use std::mem::size_of; -use std::os::raw::c_char; - -use crate::byte_span::AMbyteSpan; - -/// \brief Creates a string view from a C string. -/// -/// \param[in] c_str A UTF-8 C string. -/// \return A UTF-8 string view as an `AMbyteSpan` struct. -/// \internal -/// -/// #Safety -/// c_str must be a null-terminated array of `c_char` -#[no_mangle] -pub unsafe extern "C" fn AMstr(c_str: *const c_char) -> AMbyteSpan { - c_str.into() -} - -#[repr(C)] -struct Detail { - len: usize, - offset: isize, - ptr: *const c_void, -} - -/// \note cbindgen won't propagate the value of a `std::mem::size_of()` call -/// (https://github.com/eqrion/cbindgen/issues/252) but it will -/// propagate the name of a constant initialized from it so if the -/// constant's name is a symbolic representation of the value it can be -/// converted into a number by post-processing the header it generated. -pub const USIZE_USIZE_USIZE_: usize = size_of::(); - -impl Detail { - fn new(strings: &[String], offset: isize) -> Self { - Self { - len: strings.len(), - offset, - ptr: strings.as_ptr() as *const c_void, - } - } - - pub fn advance(&mut self, n: isize) { - if n == 0 { - return; - } - let len = self.len as isize; - self.offset = if self.offset < 0 { - // It's reversed. - let unclipped = self.offset.checked_sub(n).unwrap_or(isize::MIN); - if unclipped >= 0 { - // Clip it to the forward stop. - len - } else { - std::cmp::min(std::cmp::max(-(len + 1), unclipped), -1) - } - } else { - let unclipped = self.offset.checked_add(n).unwrap_or(isize::MAX); - if unclipped < 0 { - // Clip it to the reverse stop. - -(len + 1) - } else { - std::cmp::max(0, std::cmp::min(unclipped, len)) - } - } - } - - pub fn get_index(&self) -> usize { - (self.offset - + if self.offset < 0 { - self.len as isize - } else { - 0 - }) as usize - } - - pub fn next(&mut self, n: isize) -> Option { - if self.is_stopped() { - return None; - } - let slice: &[String] = - unsafe { std::slice::from_raw_parts(self.ptr as *const String, self.len) }; - let value = slice[self.get_index()].as_bytes().into(); - self.advance(n); - Some(value) - } - - pub fn is_stopped(&self) -> bool { - let len = self.len as isize; - self.offset < -len || self.offset == len - } - - pub fn prev(&mut self, n: isize) -> Option { - self.advance(-n); - if self.is_stopped() { - return None; - } - let slice: &[String] = - unsafe { std::slice::from_raw_parts(self.ptr as *const String, self.len) }; - Some(slice[self.get_index()].as_bytes().into()) - } - - pub fn reversed(&self) -> Self { - Self { - len: self.len, - offset: -(self.offset + 1), - ptr: self.ptr, - } - } - - pub fn rewound(&self) -> Self { - Self { - len: self.len, - offset: if self.offset < 0 { -1 } else { 0 }, - ptr: self.ptr, - } - } -} - -impl From for [u8; USIZE_USIZE_USIZE_] { - fn from(detail: Detail) -> Self { - unsafe { - std::slice::from_raw_parts((&detail as *const Detail) as *const u8, USIZE_USIZE_USIZE_) - .try_into() - .unwrap() - } - } -} - -/// \struct AMstrs -/// \installed_headerfile -/// \brief A random-access iterator over a sequence of UTF-8 strings. -#[repr(C)] -#[derive(Eq, PartialEq)] -pub struct AMstrs { - /// An implementation detail that is intentionally opaque. - /// \warning Modifying \p detail will cause undefined behavior. - /// \note The actual size of \p detail will vary by platform, this is just - /// the one for the platform this documentation was built on. - detail: [u8; USIZE_USIZE_USIZE_], -} - -impl AMstrs { - pub fn new(strings: &[String]) -> Self { - Self { - detail: Detail::new(strings, 0).into(), - } - } - - pub fn advance(&mut self, n: isize) { - let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - detail.advance(n); - } - - pub fn len(&self) -> usize { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - detail.len - } - - pub fn next(&mut self, n: isize) -> Option { - let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - detail.next(n) - } - - pub fn prev(&mut self, n: isize) -> Option { - let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - detail.prev(n) - } - - pub fn reversed(&self) -> Self { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - Self { - detail: detail.reversed().into(), - } - } - - pub fn rewound(&self) -> Self { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - Self { - detail: detail.rewound().into(), - } - } -} - -impl AsRef<[String]> for AMstrs { - fn as_ref(&self) -> &[String] { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - unsafe { std::slice::from_raw_parts(detail.ptr as *const String, detail.len) } - } -} - -impl Default for AMstrs { - fn default() -> Self { - Self { - detail: [0; USIZE_USIZE_USIZE_], - } - } -} - -/// \memberof AMstrs -/// \brief Advances an iterator over a sequence of UTF-8 strings by at most -/// \p |n| positions where the sign of \p n is relative to the -/// iterator's direction. -/// -/// \param[in,out] strs A pointer to an `AMstrs` struct. -/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum -/// number of positions to advance. -/// \pre \p strs `!= NULL`. -/// \internal -/// -/// #Safety -/// strs must be a valid pointer to an AMstrs -#[no_mangle] -pub unsafe extern "C" fn AMstrsAdvance(strs: *mut AMstrs, n: isize) { - if let Some(strs) = strs.as_mut() { - strs.advance(n); - }; -} - -/// \memberof AMstrs -/// \brief Compares the sequences of UTF-8 strings underlying a pair of -/// iterators. -/// -/// \param[in] strs1 A pointer to an `AMstrs` struct. -/// \param[in] strs2 A pointer to an `AMstrs` struct. -/// \return `-1` if \p strs1 `<` \p strs2, `0` if -/// \p strs1 `==` \p strs2 and `1` if -/// \p strs1 `>` \p strs2. -/// \pre \p strs1 `!= NULL`. -/// \pre \p strs2 `!= NULL`. -/// \internal -/// -/// #Safety -/// strs1 must be a valid pointer to an AMstrs -/// strs2 must be a valid pointer to an AMstrs -#[no_mangle] -pub unsafe extern "C" fn AMstrsCmp(strs1: *const AMstrs, strs2: *const AMstrs) -> isize { - match (strs1.as_ref(), strs2.as_ref()) { - (Some(strs1), Some(strs2)) => match strs1.as_ref().cmp(strs2.as_ref()) { - Ordering::Less => -1, - Ordering::Equal => 0, - Ordering::Greater => 1, - }, - (None, Some(_)) => -1, - (Some(_), None) => 1, - (None, None) => 0, - } -} - -/// \memberof AMstrs -/// \brief Gets the key at the current position of an iterator over a sequence -/// of UTF-8 strings and then advances it by at most \p |n| positions -/// where the sign of \p n is relative to the iterator's direction. -/// -/// \param[in,out] strs A pointer to an `AMstrs` struct. -/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum -/// number of positions to advance. -/// \return A UTF-8 string view as an `AMbyteSpan` struct that's `AMstr(NULL)` -/// when \p strs was previously advanced past its forward/reverse limit. -/// \pre \p strs `!= NULL`. -/// \internal -/// -/// #Safety -/// strs must be a valid pointer to an AMstrs -#[no_mangle] -pub unsafe extern "C" fn AMstrsNext(strs: *mut AMstrs, n: isize) -> AMbyteSpan { - if let Some(strs) = strs.as_mut() { - if let Some(key) = strs.next(n) { - return key; - } - } - Default::default() -} - -/// \memberof AMstrs -/// \brief Advances an iterator over a sequence of UTF-8 strings by at most -/// \p |n| positions where the sign of \p n is relative to the -/// iterator's direction and then gets the key at its new position. -/// -/// \param[in,out] strs A pointer to an `AMstrs` struct. -/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum -/// number of positions to advance. -/// \return A UTF-8 string view as an `AMbyteSpan` struct that's `AMstr(NULL)` -/// when \p strs is presently advanced past its forward/reverse limit. -/// \pre \p strs `!= NULL`. -/// \internal -/// -/// #Safety -/// strs must be a valid pointer to an AMstrs -#[no_mangle] -pub unsafe extern "C" fn AMstrsPrev(strs: *mut AMstrs, n: isize) -> AMbyteSpan { - if let Some(strs) = strs.as_mut() { - if let Some(key) = strs.prev(n) { - return key; - } - } - Default::default() -} - -/// \memberof AMstrs -/// \brief Gets the size of the sequence of UTF-8 strings underlying an -/// iterator. -/// -/// \param[in] strs A pointer to an `AMstrs` struct. -/// \return The count of values in \p strs. -/// \pre \p strs `!= NULL`. -/// \internal -/// -/// #Safety -/// strs must be a valid pointer to an AMstrs -#[no_mangle] -pub unsafe extern "C" fn AMstrsSize(strs: *const AMstrs) -> usize { - if let Some(strs) = strs.as_ref() { - strs.len() - } else { - 0 - } -} - -/// \memberof AMstrs -/// \brief Creates an iterator over the same sequence of UTF-8 strings as the -/// given one but with the opposite position and direction. -/// -/// \param[in] strs A pointer to an `AMstrs` struct. -/// \return An `AMstrs` struct. -/// \pre \p strs `!= NULL`. -/// \internal -/// -/// #Safety -/// strs must be a valid pointer to an AMstrs -#[no_mangle] -pub unsafe extern "C" fn AMstrsReversed(strs: *const AMstrs) -> AMstrs { - if let Some(strs) = strs.as_ref() { - strs.reversed() - } else { - AMstrs::default() - } -} - -/// \memberof AMstrs -/// \brief Creates an iterator at the starting position over the same sequence -/// of UTF-8 strings as the given one. -/// -/// \param[in] strs A pointer to an `AMstrs` struct. -/// \return An `AMstrs` struct -/// \pre \p strs `!= NULL`. -/// \internal -/// -/// #Safety -/// strs must be a valid pointer to an AMstrs -#[no_mangle] -pub unsafe extern "C" fn AMstrsRewound(strs: *const AMstrs) -> AMstrs { - if let Some(strs) = strs.as_ref() { - strs.rewound() - } else { - Default::default() - } -} diff --git a/rust/automerge-c/src/sync.rs b/rust/automerge-c/src/sync.rs index cfed1af5..fe0332a1 100644 --- a/rust/automerge-c/src/sync.rs +++ b/rust/automerge-c/src/sync.rs @@ -1,7 +1,7 @@ mod have; -mod haves; mod message; mod state; +pub(crate) use have::AMsyncHave; pub(crate) use message::{to_sync_message, AMsyncMessage}; pub(crate) use state::AMsyncState; diff --git a/rust/automerge-c/src/sync/have.rs b/rust/automerge-c/src/sync/have.rs index 312151e7..37d2031f 100644 --- a/rust/automerge-c/src/sync/have.rs +++ b/rust/automerge-c/src/sync/have.rs @@ -1,23 +1,23 @@ use automerge as am; -use crate::change_hashes::AMchangeHashes; +use crate::result::{to_result, AMresult}; /// \struct AMsyncHave /// \installed_headerfile /// \brief A summary of the changes that the sender of a synchronization /// message already has. #[derive(Clone, Eq, PartialEq)] -pub struct AMsyncHave(*const am::sync::Have); +pub struct AMsyncHave(am::sync::Have); impl AMsyncHave { - pub fn new(have: &am::sync::Have) -> Self { + pub fn new(have: am::sync::Have) -> Self { Self(have) } } impl AsRef for AMsyncHave { fn as_ref(&self) -> &am::sync::Have { - unsafe { &*self.0 } + &self.0 } } @@ -25,17 +25,18 @@ impl AsRef for AMsyncHave { /// \brief Gets the heads of the sender. /// /// \param[in] sync_have A pointer to an `AMsyncHave` struct. -/// \return An `AMchangeHashes` struct. -/// \pre \p sync_have `!= NULL`. +/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_CHANGE_HASH` items. +/// \pre \p sync_have `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// sync_have must be a valid pointer to an AMsyncHave #[no_mangle] -pub unsafe extern "C" fn AMsyncHaveLastSync(sync_have: *const AMsyncHave) -> AMchangeHashes { - if let Some(sync_have) = sync_have.as_ref() { - AMchangeHashes::new(&sync_have.as_ref().last_sync) - } else { - Default::default() - } +pub unsafe extern "C" fn AMsyncHaveLastSync(sync_have: *const AMsyncHave) -> *mut AMresult { + to_result(match sync_have.as_ref() { + Some(sync_have) => sync_have.as_ref().last_sync.as_slice(), + None => Default::default(), + }) } diff --git a/rust/automerge-c/src/sync/haves.rs b/rust/automerge-c/src/sync/haves.rs deleted file mode 100644 index c74b8e96..00000000 --- a/rust/automerge-c/src/sync/haves.rs +++ /dev/null @@ -1,378 +0,0 @@ -use automerge as am; -use std::collections::BTreeMap; -use std::ffi::c_void; -use std::mem::size_of; - -use crate::sync::have::AMsyncHave; - -#[repr(C)] -struct Detail { - len: usize, - offset: isize, - ptr: *const c_void, - storage: *mut c_void, -} - -/// \note cbindgen won't propagate the value of a `std::mem::size_of()` call -/// (https://github.com/eqrion/cbindgen/issues/252) but it will -/// propagate the name of a constant initialized from it so if the -/// constant's name is a symbolic representation of the value it can be -/// converted into a number by post-processing the header it generated. -pub const USIZE_USIZE_USIZE_USIZE_: usize = size_of::(); - -impl Detail { - fn new( - haves: &[am::sync::Have], - offset: isize, - storage: &mut BTreeMap, - ) -> Self { - let storage: *mut BTreeMap = storage; - Self { - len: haves.len(), - offset, - ptr: haves.as_ptr() as *const c_void, - storage: storage as *mut c_void, - } - } - - pub fn advance(&mut self, n: isize) { - if n == 0 { - return; - } - let len = self.len as isize; - self.offset = if self.offset < 0 { - // It's reversed. - let unclipped = self.offset.checked_sub(n).unwrap_or(isize::MIN); - if unclipped >= 0 { - // Clip it to the forward stop. - len - } else { - std::cmp::min(std::cmp::max(-(len + 1), unclipped), -1) - } - } else { - let unclipped = self.offset.checked_add(n).unwrap_or(isize::MAX); - if unclipped < 0 { - // Clip it to the reverse stop. - -(len + 1) - } else { - std::cmp::max(0, std::cmp::min(unclipped, len)) - } - } - } - - pub fn get_index(&self) -> usize { - (self.offset - + if self.offset < 0 { - self.len as isize - } else { - 0 - }) as usize - } - - pub fn next(&mut self, n: isize) -> Option<*const AMsyncHave> { - if self.is_stopped() { - return None; - } - let slice: &[am::sync::Have] = - unsafe { std::slice::from_raw_parts(self.ptr as *const am::sync::Have, self.len) }; - let storage = unsafe { &mut *(self.storage as *mut BTreeMap) }; - let index = self.get_index(); - let value = match storage.get_mut(&index) { - Some(value) => value, - None => { - storage.insert(index, AMsyncHave::new(&slice[index])); - storage.get_mut(&index).unwrap() - } - }; - self.advance(n); - Some(value) - } - - pub fn is_stopped(&self) -> bool { - let len = self.len as isize; - self.offset < -len || self.offset == len - } - - pub fn prev(&mut self, n: isize) -> Option<*const AMsyncHave> { - self.advance(-n); - if self.is_stopped() { - return None; - } - let slice: &[am::sync::Have] = - unsafe { std::slice::from_raw_parts(self.ptr as *const am::sync::Have, self.len) }; - let storage = unsafe { &mut *(self.storage as *mut BTreeMap) }; - let index = self.get_index(); - Some(match storage.get_mut(&index) { - Some(value) => value, - None => { - storage.insert(index, AMsyncHave::new(&slice[index])); - storage.get_mut(&index).unwrap() - } - }) - } - - pub fn reversed(&self) -> Self { - Self { - len: self.len, - offset: -(self.offset + 1), - ptr: self.ptr, - storage: self.storage, - } - } - - pub fn rewound(&self) -> Self { - Self { - len: self.len, - offset: if self.offset < 0 { -1 } else { 0 }, - ptr: self.ptr, - storage: self.storage, - } - } -} - -impl From for [u8; USIZE_USIZE_USIZE_USIZE_] { - fn from(detail: Detail) -> Self { - unsafe { - std::slice::from_raw_parts( - (&detail as *const Detail) as *const u8, - USIZE_USIZE_USIZE_USIZE_, - ) - .try_into() - .unwrap() - } - } -} - -/// \struct AMsyncHaves -/// \installed_headerfile -/// \brief A random-access iterator over a sequence of synchronization haves. -#[repr(C)] -#[derive(Eq, PartialEq)] -pub struct AMsyncHaves { - /// An implementation detail that is intentionally opaque. - /// \warning Modifying \p detail will cause undefined behavior. - /// \note The actual size of \p detail will vary by platform, this is just - /// the one for the platform this documentation was built on. - detail: [u8; USIZE_USIZE_USIZE_USIZE_], -} - -impl AMsyncHaves { - pub fn new(haves: &[am::sync::Have], storage: &mut BTreeMap) -> Self { - Self { - detail: Detail::new(haves, 0, storage).into(), - } - } - - pub fn advance(&mut self, n: isize) { - let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - detail.advance(n); - } - - pub fn len(&self) -> usize { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - detail.len - } - - pub fn next(&mut self, n: isize) -> Option<*const AMsyncHave> { - let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - detail.next(n) - } - - pub fn prev(&mut self, n: isize) -> Option<*const AMsyncHave> { - let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - detail.prev(n) - } - - pub fn reversed(&self) -> Self { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - Self { - detail: detail.reversed().into(), - } - } - - pub fn rewound(&self) -> Self { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - Self { - detail: detail.rewound().into(), - } - } -} - -impl AsRef<[am::sync::Have]> for AMsyncHaves { - fn as_ref(&self) -> &[am::sync::Have] { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - unsafe { std::slice::from_raw_parts(detail.ptr as *const am::sync::Have, detail.len) } - } -} - -impl Default for AMsyncHaves { - fn default() -> Self { - Self { - detail: [0; USIZE_USIZE_USIZE_USIZE_], - } - } -} - -/// \memberof AMsyncHaves -/// \brief Advances an iterator over a sequence of synchronization haves by at -/// most \p |n| positions where the sign of \p n is relative to the -/// iterator's direction. -/// -/// \param[in,out] sync_haves A pointer to an `AMsyncHaves` struct. -/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum -/// number of positions to advance. -/// \pre \p sync_haves `!= NULL`. -/// \internal -/// -/// #Safety -/// sync_haves must be a valid pointer to an AMsyncHaves -#[no_mangle] -pub unsafe extern "C" fn AMsyncHavesAdvance(sync_haves: *mut AMsyncHaves, n: isize) { - if let Some(sync_haves) = sync_haves.as_mut() { - sync_haves.advance(n); - }; -} - -/// \memberof AMsyncHaves -/// \brief Tests the equality of two sequences of synchronization haves -/// underlying a pair of iterators. -/// -/// \param[in] sync_haves1 A pointer to an `AMsyncHaves` struct. -/// \param[in] sync_haves2 A pointer to an `AMsyncHaves` struct. -/// \return `true` if \p sync_haves1 `==` \p sync_haves2 and `false` otherwise. -/// \pre \p sync_haves1 `!= NULL`. -/// \pre \p sync_haves2 `!= NULL`. -/// \internal -/// -/// #Safety -/// sync_haves1 must be a valid pointer to an AMsyncHaves -/// sync_haves2 must be a valid pointer to an AMsyncHaves -#[no_mangle] -pub unsafe extern "C" fn AMsyncHavesEqual( - sync_haves1: *const AMsyncHaves, - sync_haves2: *const AMsyncHaves, -) -> bool { - match (sync_haves1.as_ref(), sync_haves2.as_ref()) { - (Some(sync_haves1), Some(sync_haves2)) => sync_haves1.as_ref() == sync_haves2.as_ref(), - (None, Some(_)) | (Some(_), None) | (None, None) => false, - } -} - -/// \memberof AMsyncHaves -/// \brief Gets the synchronization have at the current position of an iterator -/// over a sequence of synchronization haves and then advances it by at -/// most \p |n| positions where the sign of \p n is relative to the -/// iterator's direction. -/// -/// \param[in,out] sync_haves A pointer to an `AMsyncHaves` struct. -/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum -/// number of positions to advance. -/// \return A pointer to an `AMsyncHave` struct that's `NULL` when -/// \p sync_haves was previously advanced past its forward/reverse -/// limit. -/// \pre \p sync_haves `!= NULL`. -/// \internal -/// -/// #Safety -/// sync_haves must be a valid pointer to an AMsyncHaves -#[no_mangle] -pub unsafe extern "C" fn AMsyncHavesNext( - sync_haves: *mut AMsyncHaves, - n: isize, -) -> *const AMsyncHave { - if let Some(sync_haves) = sync_haves.as_mut() { - if let Some(sync_have) = sync_haves.next(n) { - return sync_have; - } - } - std::ptr::null() -} - -/// \memberof AMsyncHaves -/// \brief Advances an iterator over a sequence of synchronization haves by at -/// most \p |n| positions where the sign of \p n is relative to the -/// iterator's direction and then gets the synchronization have at its -/// new position. -/// -/// \param[in,out] sync_haves A pointer to an `AMsyncHaves` struct. -/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum -/// number of positions to advance. -/// \return A pointer to an `AMsyncHave` struct that's `NULL` when -/// \p sync_haves is presently advanced past its forward/reverse limit. -/// \pre \p sync_haves `!= NULL`. -/// \internal -/// -/// #Safety -/// sync_haves must be a valid pointer to an AMsyncHaves -#[no_mangle] -pub unsafe extern "C" fn AMsyncHavesPrev( - sync_haves: *mut AMsyncHaves, - n: isize, -) -> *const AMsyncHave { - if let Some(sync_haves) = sync_haves.as_mut() { - if let Some(sync_have) = sync_haves.prev(n) { - return sync_have; - } - } - std::ptr::null() -} - -/// \memberof AMsyncHaves -/// \brief Gets the size of the sequence of synchronization haves underlying an -/// iterator. -/// -/// \param[in] sync_haves A pointer to an `AMsyncHaves` struct. -/// \return The count of values in \p sync_haves. -/// \pre \p sync_haves `!= NULL`. -/// \internal -/// -/// #Safety -/// sync_haves must be a valid pointer to an AMsyncHaves -#[no_mangle] -pub unsafe extern "C" fn AMsyncHavesSize(sync_haves: *const AMsyncHaves) -> usize { - if let Some(sync_haves) = sync_haves.as_ref() { - sync_haves.len() - } else { - 0 - } -} - -/// \memberof AMsyncHaves -/// \brief Creates an iterator over the same sequence of synchronization haves -/// as the given one but with the opposite position and direction. -/// -/// \param[in] sync_haves A pointer to an `AMsyncHaves` struct. -/// \return An `AMsyncHaves` struct -/// \pre \p sync_haves `!= NULL`. -/// \internal -/// -/// #Safety -/// sync_haves must be a valid pointer to an AMsyncHaves -#[no_mangle] -pub unsafe extern "C" fn AMsyncHavesReversed(sync_haves: *const AMsyncHaves) -> AMsyncHaves { - if let Some(sync_haves) = sync_haves.as_ref() { - sync_haves.reversed() - } else { - Default::default() - } -} - -/// \memberof AMsyncHaves -/// \brief Creates an iterator at the starting position over the same sequence -/// of synchronization haves as the given one. -/// -/// \param[in] sync_haves A pointer to an `AMsyncHaves` struct. -/// \return An `AMsyncHaves` struct -/// \pre \p sync_haves `!= NULL`. -/// \internal -/// -/// #Safety -/// sync_haves must be a valid pointer to an AMsyncHaves -#[no_mangle] -pub unsafe extern "C" fn AMsyncHavesRewound(sync_haves: *const AMsyncHaves) -> AMsyncHaves { - if let Some(sync_haves) = sync_haves.as_ref() { - sync_haves.rewound() - } else { - Default::default() - } -} diff --git a/rust/automerge-c/src/sync/message.rs b/rust/automerge-c/src/sync/message.rs index 46a6d29a..bdb1db34 100644 --- a/rust/automerge-c/src/sync/message.rs +++ b/rust/automerge-c/src/sync/message.rs @@ -3,18 +3,15 @@ use std::cell::RefCell; use std::collections::BTreeMap; use crate::change::AMchange; -use crate::change_hashes::AMchangeHashes; -use crate::changes::AMchanges; use crate::result::{to_result, AMresult}; use crate::sync::have::AMsyncHave; -use crate::sync::haves::AMsyncHaves; macro_rules! to_sync_message { ($handle:expr) => {{ let handle = $handle.as_ref(); match handle { Some(b) => b, - None => return AMresult::err("Invalid AMsyncMessage pointer").into(), + None => return AMresult::error("Invalid `AMsyncMessage*`").into(), } }}; } @@ -51,55 +48,52 @@ impl AsRef for AMsyncMessage { /// \brief Gets the changes for the recipient to apply. /// /// \param[in] sync_message A pointer to an `AMsyncMessage` struct. -/// \return An `AMchanges` struct. -/// \pre \p sync_message `!= NULL`. +/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_CHANGE` items. +/// \pre \p sync_message `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// sync_message must be a valid pointer to an AMsyncMessage #[no_mangle] -pub unsafe extern "C" fn AMsyncMessageChanges(sync_message: *const AMsyncMessage) -> AMchanges { - if let Some(sync_message) = sync_message.as_ref() { - AMchanges::new( - &sync_message.body.changes, - &mut sync_message.changes_storage.borrow_mut(), - ) - } else { - Default::default() - } +pub unsafe extern "C" fn AMsyncMessageChanges(sync_message: *const AMsyncMessage) -> *mut AMresult { + to_result(match sync_message.as_ref() { + Some(sync_message) => sync_message.body.changes.as_slice(), + None => Default::default(), + }) } /// \memberof AMsyncMessage -/// \brief Decodes a sequence of bytes into a synchronization message. +/// \brief Decodes an array of bytes into a synchronization message. /// /// \param[in] src A pointer to an array of bytes. -/// \param[in] count The number of bytes in \p src to decode. -/// \return A pointer to an `AMresult` struct containing an `AMsyncMessage` -/// struct. -/// \pre \p src `!= NULL`. -/// \pre `0 <` \p count `<= sizeof(`\p src`)`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \param[in] count The count of bytes to decode from the array pointed to by +/// \p src. +/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_SYNC_MESSAGE` item. +/// \pre \p src `!= NULL` +/// \pre `sizeof(`\p src `) > 0` +/// \pre \p count `<= sizeof(`\p src `)` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety -/// src must be a byte array of size `>= count` +/// src must be a byte array of length `>= count` #[no_mangle] pub unsafe extern "C" fn AMsyncMessageDecode(src: *const u8, count: usize) -> *mut AMresult { - let mut data = Vec::new(); - data.extend_from_slice(std::slice::from_raw_parts(src, count)); - to_result(am::sync::Message::decode(&data)) + let data = std::slice::from_raw_parts(src, count); + to_result(am::sync::Message::decode(data)) } /// \memberof AMsyncMessage -/// \brief Encodes a synchronization message as a sequence of bytes. +/// \brief Encodes a synchronization message as an array of bytes. /// /// \param[in] sync_message A pointer to an `AMsyncMessage` struct. -/// \return A pointer to an `AMresult` struct containing an array of bytes as -/// an `AMbyteSpan` struct. -/// \pre \p sync_message `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_BYTES` item. +/// \pre \p sync_message `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety @@ -114,41 +108,40 @@ pub unsafe extern "C" fn AMsyncMessageEncode(sync_message: *const AMsyncMessage) /// \brief Gets a summary of the changes that the sender already has. /// /// \param[in] sync_message A pointer to an `AMsyncMessage` struct. -/// \return An `AMhaves` struct. -/// \pre \p sync_message `!= NULL`. +/// \return A pointer to an `AMresult` struct with `AM_SYNC_HAVE` items. +/// \pre \p sync_message `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// sync_message must be a valid pointer to an AMsyncMessage #[no_mangle] -pub unsafe extern "C" fn AMsyncMessageHaves(sync_message: *const AMsyncMessage) -> AMsyncHaves { - if let Some(sync_message) = sync_message.as_ref() { - AMsyncHaves::new( - &sync_message.as_ref().have, - &mut sync_message.haves_storage.borrow_mut(), - ) - } else { - Default::default() - } +pub unsafe extern "C" fn AMsyncMessageHaves(sync_message: *const AMsyncMessage) -> *mut AMresult { + to_result(match sync_message.as_ref() { + Some(sync_message) => sync_message.as_ref().have.as_slice(), + None => Default::default(), + }) } /// \memberof AMsyncMessage /// \brief Gets the heads of the sender. /// /// \param[in] sync_message A pointer to an `AMsyncMessage` struct. -/// \return An `AMchangeHashes` struct. -/// \pre \p sync_message `!= NULL`. +/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_CHANGE_HASH` items. +/// \pre \p sync_message `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// sync_message must be a valid pointer to an AMsyncMessage #[no_mangle] -pub unsafe extern "C" fn AMsyncMessageHeads(sync_message: *const AMsyncMessage) -> AMchangeHashes { - if let Some(sync_message) = sync_message.as_ref() { - AMchangeHashes::new(&sync_message.as_ref().heads) - } else { - Default::default() - } +pub unsafe extern "C" fn AMsyncMessageHeads(sync_message: *const AMsyncMessage) -> *mut AMresult { + to_result(match sync_message.as_ref() { + Some(sync_message) => sync_message.as_ref().heads.as_slice(), + None => Default::default(), + }) } /// \memberof AMsyncMessage @@ -156,17 +149,18 @@ pub unsafe extern "C" fn AMsyncMessageHeads(sync_message: *const AMsyncMessage) /// by the recipient. /// /// \param[in] sync_message A pointer to an `AMsyncMessage` struct. -/// \return An `AMchangeHashes` struct. -/// \pre \p sync_message `!= NULL`. +/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_CHANGE_HASH` items. +/// \pre \p sync_message `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// sync_message must be a valid pointer to an AMsyncMessage #[no_mangle] -pub unsafe extern "C" fn AMsyncMessageNeeds(sync_message: *const AMsyncMessage) -> AMchangeHashes { - if let Some(sync_message) = sync_message.as_ref() { - AMchangeHashes::new(&sync_message.as_ref().need) - } else { - Default::default() - } +pub unsafe extern "C" fn AMsyncMessageNeeds(sync_message: *const AMsyncMessage) -> *mut AMresult { + to_result(match sync_message.as_ref() { + Some(sync_message) => sync_message.as_ref().need.as_slice(), + None => Default::default(), + }) } diff --git a/rust/automerge-c/src/sync/state.rs b/rust/automerge-c/src/sync/state.rs index 1c1d316f..1d85ed98 100644 --- a/rust/automerge-c/src/sync/state.rs +++ b/rust/automerge-c/src/sync/state.rs @@ -2,17 +2,15 @@ use automerge as am; use std::cell::RefCell; use std::collections::BTreeMap; -use crate::change_hashes::AMchangeHashes; use crate::result::{to_result, AMresult}; use crate::sync::have::AMsyncHave; -use crate::sync::haves::AMsyncHaves; macro_rules! to_sync_state { ($handle:expr) => {{ let handle = $handle.as_ref(); match handle { Some(b) => b, - None => return AMresult::err("Invalid AMsyncState pointer").into(), + None => return AMresult::error("Invalid `AMsyncState*`").into(), } }}; } @@ -56,36 +54,35 @@ impl From for *mut AMsyncState { } /// \memberof AMsyncState -/// \brief Decodes a sequence of bytes into a synchronization state. +/// \brief Decodes an array of bytes into a synchronization state. /// /// \param[in] src A pointer to an array of bytes. -/// \param[in] count The number of bytes in \p src to decode. -/// \return A pointer to an `AMresult` struct containing an `AMsyncState` -/// struct. -/// \pre \p src `!= NULL`. -/// \pre `0 <` \p count `<= sizeof(`\p src`)`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \param[in] count The count of bytes to decode from the array pointed to by +/// \p src. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_SYNC_STATE` item. +/// \pre \p src `!= NULL` +/// \pre `sizeof(`\p src `) > 0` +/// \pre \p count `<= sizeof(`\p src `)` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety -/// src must be a byte array of size `>= count` +/// src must be a byte array of length `>= count` #[no_mangle] pub unsafe extern "C" fn AMsyncStateDecode(src: *const u8, count: usize) -> *mut AMresult { - let mut data = Vec::new(); - data.extend_from_slice(std::slice::from_raw_parts(src, count)); - to_result(am::sync::State::decode(&data)) + let data = std::slice::from_raw_parts(src, count); + to_result(am::sync::State::decode(data)) } /// \memberof AMsyncState -/// \brief Encodes a synchronizaton state as a sequence of bytes. +/// \brief Encodes a synchronization state as an array of bytes. /// /// \param[in] sync_state A pointer to an `AMsyncState` struct. -/// \return A pointer to an `AMresult` struct containing an array of bytes as -/// an `AMbyteSpan` struct. -/// \pre \p sync_state `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_BYTE_SPAN` item. +/// \pre \p sync_state `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety @@ -102,8 +99,9 @@ pub unsafe extern "C" fn AMsyncStateEncode(sync_state: *const AMsyncState) -> *m /// \param[in] sync_state1 A pointer to an `AMsyncState` struct. /// \param[in] sync_state2 A pointer to an `AMsyncState` struct. /// \return `true` if \p sync_state1 `==` \p sync_state2 and `false` otherwise. -/// \pre \p sync_state1 `!= NULL`. -/// \pre \p sync_state2 `!= NULL`. +/// \pre \p sync_state1 `!= NULL` +/// \pre \p sync_state2 `!= NULL` +/// \post `!(`\p sync_state1 `&&` \p sync_state2 `) -> false` /// \internal /// /// #Safety @@ -116,18 +114,17 @@ pub unsafe extern "C" fn AMsyncStateEqual( ) -> bool { match (sync_state1.as_ref(), sync_state2.as_ref()) { (Some(sync_state1), Some(sync_state2)) => sync_state1.as_ref() == sync_state2.as_ref(), - (None, Some(_)) | (Some(_), None) | (None, None) => false, + (None, None) | (None, Some(_)) | (Some(_), None) => false, } } /// \memberof AMsyncState -/// \brief Allocates a new synchronization state and initializes it with -/// defaults. +/// \brief Allocates a new synchronization state and initializes it from +/// default values. /// -/// \return A pointer to an `AMresult` struct containing a pointer to an -/// `AMsyncState` struct. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_SYNC_STATE` item. +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. #[no_mangle] pub extern "C" fn AMsyncStateInit() -> *mut AMresult { to_result(am::sync::State::new()) @@ -137,40 +134,36 @@ pub extern "C" fn AMsyncStateInit() -> *mut AMresult { /// \brief Gets the heads that are shared by both peers. /// /// \param[in] sync_state A pointer to an `AMsyncState` struct. -/// \return An `AMchangeHashes` struct. -/// \pre \p sync_state `!= NULL`. +/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_CHANGE_HASH` items. +/// \pre \p sync_state `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// sync_state must be a valid pointer to an AMsyncState #[no_mangle] -pub unsafe extern "C" fn AMsyncStateSharedHeads(sync_state: *const AMsyncState) -> AMchangeHashes { - if let Some(sync_state) = sync_state.as_ref() { - AMchangeHashes::new(&sync_state.as_ref().shared_heads) - } else { - Default::default() - } +pub unsafe extern "C" fn AMsyncStateSharedHeads(sync_state: *const AMsyncState) -> *mut AMresult { + let sync_state = to_sync_state!(sync_state); + to_result(sync_state.as_ref().shared_heads.as_slice()) } /// \memberof AMsyncState /// \brief Gets the heads that were last sent by this peer. /// /// \param[in] sync_state A pointer to an `AMsyncState` struct. -/// \return An `AMchangeHashes` struct. -/// \pre \p sync_state `!= NULL`. +/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_CHANGE_HASH` items. +/// \pre \p sync_state `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// sync_state must be a valid pointer to an AMsyncState #[no_mangle] -pub unsafe extern "C" fn AMsyncStateLastSentHeads( - sync_state: *const AMsyncState, -) -> AMchangeHashes { - if let Some(sync_state) = sync_state.as_ref() { - AMchangeHashes::new(&sync_state.as_ref().last_sent_heads) - } else { - Default::default() - } +pub unsafe extern "C" fn AMsyncStateLastSentHeads(sync_state: *const AMsyncState) -> *mut AMresult { + let sync_state = to_sync_state!(sync_state); + to_result(sync_state.as_ref().last_sent_heads.as_slice()) } /// \memberof AMsyncState @@ -178,11 +171,13 @@ pub unsafe extern "C" fn AMsyncStateLastSentHeads( /// /// \param[in] sync_state A pointer to an `AMsyncState` struct. /// \param[out] has_value A pointer to a boolean flag that is set to `true` if -/// the returned `AMhaves` struct is relevant, `false` otherwise. -/// \return An `AMhaves` struct. -/// \pre \p sync_state `!= NULL`. -/// \pre \p has_value `!= NULL`. -/// \internal +/// the returned `AMitems` struct is relevant, `false` otherwise. +/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_SYNC_HAVE` items. +/// \pre \p sync_state `!= NULL` +/// \pre \p has_value `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +//// \internal /// /// # Safety /// sync_state must be a valid pointer to an AMsyncState @@ -191,15 +186,15 @@ pub unsafe extern "C" fn AMsyncStateLastSentHeads( pub unsafe extern "C" fn AMsyncStateTheirHaves( sync_state: *const AMsyncState, has_value: *mut bool, -) -> AMsyncHaves { +) -> *mut AMresult { if let Some(sync_state) = sync_state.as_ref() { if let Some(haves) = &sync_state.as_ref().their_have { *has_value = true; - return AMsyncHaves::new(haves, &mut sync_state.their_haves_storage.borrow_mut()); - }; + return to_result(haves.as_slice()); + } }; *has_value = false; - Default::default() + to_result(Vec::::new()) } /// \memberof AMsyncState @@ -207,29 +202,31 @@ pub unsafe extern "C" fn AMsyncStateTheirHaves( /// /// \param[in] sync_state A pointer to an `AMsyncState` struct. /// \param[out] has_value A pointer to a boolean flag that is set to `true` if -/// the returned `AMchangeHashes` struct is relevant, `false` -/// otherwise. -/// \return An `AMchangeHashes` struct. -/// \pre \p sync_state `!= NULL`. -/// \pre \p has_value `!= NULL`. +/// the returned `AMitems` struct is relevant, `false` +/// otherwise. +/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_CHANGE_HASH` items. +/// \pre \p sync_state `!= NULL` +/// \pre \p has_value `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// sync_state must be a valid pointer to an AMsyncState -/// has_value must be a valid pointer to a bool. +/// has_value must be a valid pointer to a bool #[no_mangle] pub unsafe extern "C" fn AMsyncStateTheirHeads( sync_state: *const AMsyncState, has_value: *mut bool, -) -> AMchangeHashes { +) -> *mut AMresult { if let Some(sync_state) = sync_state.as_ref() { if let Some(change_hashes) = &sync_state.as_ref().their_heads { *has_value = true; - return AMchangeHashes::new(change_hashes); + return to_result(change_hashes.as_slice()); } }; *has_value = false; - Default::default() + to_result(Vec::::new()) } /// \memberof AMsyncState @@ -237,27 +234,29 @@ pub unsafe extern "C" fn AMsyncStateTheirHeads( /// /// \param[in] sync_state A pointer to an `AMsyncState` struct. /// \param[out] has_value A pointer to a boolean flag that is set to `true` if -/// the returned `AMchangeHashes` struct is relevant, `false` -/// otherwise. -/// \return An `AMchangeHashes` struct. -/// \pre \p sync_state `!= NULL`. -/// \pre \p has_value `!= NULL`. +/// the returned `AMitems` struct is relevant, `false` +/// otherwise. +/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_CHANGE_HASH` items. +/// \pre \p sync_state `!= NULL` +/// \pre \p has_value `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// sync_state must be a valid pointer to an AMsyncState -/// has_value must be a valid pointer to a bool. +/// has_value must be a valid pointer to a bool #[no_mangle] pub unsafe extern "C" fn AMsyncStateTheirNeeds( sync_state: *const AMsyncState, has_value: *mut bool, -) -> AMchangeHashes { +) -> *mut AMresult { if let Some(sync_state) = sync_state.as_ref() { if let Some(change_hashes) = &sync_state.as_ref().their_need { *has_value = true; - return AMchangeHashes::new(change_hashes); + return to_result(change_hashes.as_slice()); } }; *has_value = false; - Default::default() + to_result(Vec::::new()) } diff --git a/rust/automerge-c/src/utils/result.c b/rust/automerge-c/src/utils/result.c new file mode 100644 index 00000000..f922ca31 --- /dev/null +++ b/rust/automerge-c/src/utils/result.c @@ -0,0 +1,33 @@ +#include + +#include + +AMresult* AMresultFrom(int count, ...) { + AMresult* result = NULL; + bool is_ok = true; + va_list args; + va_start(args, count); + for (int i = 0; i != count; ++i) { + AMresult* src = va_arg(args, AMresult*); + AMresult* dest = result; + is_ok = (AMresultStatus(src) == AM_STATUS_OK); + if (is_ok) { + if (dest) { + result = AMresultCat(dest, src); + is_ok = (AMresultStatus(result) == AM_STATUS_OK); + AMresultFree(dest); + AMresultFree(src); + } else { + result = src; + } + } else { + AMresultFree(src); + } + } + va_end(args); + if (!is_ok) { + AMresultFree(result); + result = NULL; + } + return result; +} diff --git a/rust/automerge-c/src/utils/stack.c b/rust/automerge-c/src/utils/stack.c new file mode 100644 index 00000000..2cad7c5c --- /dev/null +++ b/rust/automerge-c/src/utils/stack.c @@ -0,0 +1,106 @@ +#include +#include + +#include +#include + +void AMstackFree(AMstack** stack) { + if (stack) { + while (*stack) { + AMresultFree(AMstackPop(stack, NULL)); + } + } +} + +AMresult* AMstackPop(AMstack** stack, const AMresult* result) { + if (!stack) { + return NULL; + } + AMstack** prev = stack; + if (result) { + while (*prev && ((*prev)->result != result)) { + *prev = (*prev)->prev; + } + } + if (!*prev) { + return NULL; + } + AMstack* target = *prev; + *prev = target->prev; + AMresult* popped = target->result; + free(target); + return popped; +} + +AMresult* AMstackResult(AMstack** stack, AMresult* result, AMstackCallback callback, void* data) { + if (!stack) { + if (callback) { + /* Create a local stack so that the callback can still examine the + * result. */ + AMstack node = {.result = result, .prev = NULL}; + AMstack* stack = &node; + callback(&stack, data); + } else { + /* \note There is no reason to call this function when both the + * stack and the callback are null. */ + fprintf(stderr, "ERROR: NULL AMstackCallback!\n"); + } + /* \note Nothing can be returned without a stack regardless of + * whether or not the callback validated the result. */ + AMresultFree(result); + return NULL; + } + /* Always push the result onto the stack, even if it's null, so that the + * callback can examine it. */ + AMstack* next = calloc(1, sizeof(AMstack)); + *next = (AMstack){.result = result, .prev = *stack}; + AMstack* top = next; + *stack = top; + if (callback) { + if (!callback(stack, data)) { + /* The result didn't pass the callback's examination. */ + return NULL; + } + } else { + /* Report an obvious error. */ + if (result) { + AMbyteSpan const err_msg = AMresultError(result); + if (err_msg.src && err_msg.count) { + /* \note The callback may be null because the result is supposed + * to be examined externally so return it despite an + * error. */ + char* const cstr = AMstrdup(err_msg, NULL); + fprintf(stderr, "WARNING: %s.\n", cstr); + free(cstr); + } + } else { + /* \note There's no reason to call this function when both the + * result and the callback are null. */ + fprintf(stderr, "ERROR: NULL AMresult*!\n"); + return NULL; + } + } + return result; +} + +AMitem* AMstackItem(AMstack** stack, AMresult* result, AMstackCallback callback, void* data) { + AMitems items = AMstackItems(stack, result, callback, data); + return AMitemsNext(&items, 1); +} + +AMitems AMstackItems(AMstack** stack, AMresult* result, AMstackCallback callback, void* data) { + return (AMstackResult(stack, result, callback, data)) ? AMresultItems(result) : (AMitems){0}; +} + +size_t AMstackSize(AMstack const* const stack) { + if (!stack) { + return 0; + } + size_t count = 0; + AMstack const* prev = stack; + while (prev) { + ++count; + prev = prev->prev; + } + return count; +} \ No newline at end of file diff --git a/rust/automerge-c/src/utils/stack_callback_data.c b/rust/automerge-c/src/utils/stack_callback_data.c new file mode 100644 index 00000000..f1e988d8 --- /dev/null +++ b/rust/automerge-c/src/utils/stack_callback_data.c @@ -0,0 +1,9 @@ +#include + +#include + +AMstackCallbackData* AMstackCallbackDataInit(AMvalType const bitmask, char const* const file, int const line) { + AMstackCallbackData* data = malloc(sizeof(AMstackCallbackData)); + *data = (AMstackCallbackData){.bitmask = bitmask, .file = file, .line = line}; + return data; +} diff --git a/rust/automerge-c/src/utils/string.c b/rust/automerge-c/src/utils/string.c new file mode 100644 index 00000000..a0d1ebe3 --- /dev/null +++ b/rust/automerge-c/src/utils/string.c @@ -0,0 +1,46 @@ +#include +#include + +#include + +char* AMstrdup(AMbyteSpan const str, char const* nul) { + if (!str.src) { + return NULL; + } else if (!str.count) { + return strdup(""); + } + nul = (nul) ? nul : "\\0"; + size_t const nul_len = strlen(nul); + char* dup = NULL; + size_t dup_len = 0; + char const* begin = str.src; + char const* end = begin; + for (size_t i = 0; i != str.count; ++i, ++end) { + if (!*end) { + size_t const len = end - begin; + size_t const alloc_len = dup_len + len + nul_len; + if (dup) { + dup = realloc(dup, alloc_len + 1); + } else { + dup = malloc(alloc_len + 1); + } + memcpy(dup + dup_len, begin, len); + memcpy(dup + dup_len + len, nul, nul_len); + dup[alloc_len] = '\0'; + begin = end + 1; + dup_len = alloc_len; + } + } + if (begin != end) { + size_t const len = end - begin; + size_t const alloc_len = dup_len + len; + if (dup) { + dup = realloc(dup, alloc_len + 1); + } else { + dup = malloc(alloc_len + 1); + } + memcpy(dup + dup_len, begin, len); + dup[alloc_len] = '\0'; + } + return dup; +} diff --git a/rust/automerge-c/test/CMakeLists.txt b/rust/automerge-c/test/CMakeLists.txt index 704a27da..1759f140 100644 --- a/rust/automerge-c/test/CMakeLists.txt +++ b/rust/automerge-c/test/CMakeLists.txt @@ -1,53 +1,51 @@ -cmake_minimum_required(VERSION 3.18 FATAL_ERROR) - -find_package(cmocka REQUIRED) +find_package(cmocka CONFIG REQUIRED) add_executable( - test_${LIBRARY_NAME} + ${LIBRARY_NAME}_test actor_id_tests.c + base_state.c + byte_span_tests.c + cmocka_utils.c + enum_string_tests.c + doc_state.c doc_tests.c - group_state.c + item_tests.c list_tests.c macro_utils.c main.c map_tests.c - stack_utils.c str_utils.c ported_wasm/basic_tests.c ported_wasm/suite.c ported_wasm/sync_tests.c ) -set_target_properties(test_${LIBRARY_NAME} PROPERTIES LINKER_LANGUAGE C) +set_target_properties(${LIBRARY_NAME}_test PROPERTIES LINKER_LANGUAGE C) -# \note An imported library's INTERFACE_INCLUDE_DIRECTORIES property can't -# contain a non-existent path so its build-time include directory -# must be specified for all of its dependent targets instead. -target_include_directories( - test_${LIBRARY_NAME} - PRIVATE "$" -) +if(WIN32) + set(CMOCKA "cmocka::cmocka") +else() + set(CMOCKA "cmocka") +endif() -target_link_libraries(test_${LIBRARY_NAME} PRIVATE cmocka ${LIBRARY_NAME}) +target_link_libraries(${LIBRARY_NAME}_test PRIVATE ${CMOCKA} ${LIBRARY_NAME}) -add_dependencies(test_${LIBRARY_NAME} ${LIBRARY_NAME}_artifacts) +add_dependencies(${LIBRARY_NAME}_test ${BINDINGS_NAME}_artifacts) if(BUILD_SHARED_LIBS AND WIN32) add_custom_command( - TARGET test_${LIBRARY_NAME} + TARGET ${LIBRARY_NAME}_test POST_BUILD - COMMAND ${CMAKE_COMMAND} -E copy_if_different - ${CARGO_CURRENT_BINARY_DIR}/${CMAKE_SHARED_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_${CMAKE_BUILD_TYPE}_POSTFIX}${CMAKE_SHARED_LIBRARY_SUFFIX} - ${CMAKE_CURRENT_BINARY_DIR} - COMMENT "Copying the DLL built by Cargo into the test directory..." + COMMAND ${CMAKE_COMMAND} -E copy_if_different $ $ + COMMENT "Copying the DLL into the tests directory..." VERBATIM ) endif() -add_test(NAME test_${LIBRARY_NAME} COMMAND test_${LIBRARY_NAME}) +add_test(NAME ${LIBRARY_NAME}_test COMMAND ${LIBRARY_NAME}_test) add_custom_command( - TARGET test_${LIBRARY_NAME} + TARGET ${LIBRARY_NAME}_test POST_BUILD COMMAND ${CMAKE_CTEST_COMMAND} --config $ --output-on-failure diff --git a/rust/automerge-c/test/actor_id_tests.c b/rust/automerge-c/test/actor_id_tests.c index c98f2554..918d6213 100644 --- a/rust/automerge-c/test/actor_id_tests.c +++ b/rust/automerge-c/test/actor_id_tests.c @@ -14,99 +14,126 @@ #include "cmocka_utils.h" #include "str_utils.h" +/** + * \brief State for a group of cmocka test cases. + */ typedef struct { + /** An actor ID as an array of bytes. */ uint8_t* src; - AMbyteSpan str; + /** The count of bytes in \p src. */ size_t count; -} GroupState; + /** A stack of results. */ + AMstack* stack; + /** An actor ID as a hexadecimal string. */ + AMbyteSpan str; +} DocState; static int group_setup(void** state) { - GroupState* group_state = test_calloc(1, sizeof(GroupState)); - group_state->str.src = "000102030405060708090a0b0c0d0e0f"; - group_state->str.count = strlen(group_state->str.src); - group_state->count = group_state->str.count / 2; - group_state->src = test_malloc(group_state->count); - hex_to_bytes(group_state->str.src, group_state->src, group_state->count); - *state = group_state; + DocState* doc_state = test_calloc(1, sizeof(DocState)); + doc_state->str = AMstr("000102030405060708090a0b0c0d0e0f"); + doc_state->count = doc_state->str.count / 2; + doc_state->src = test_calloc(doc_state->count, sizeof(uint8_t)); + hex_to_bytes(doc_state->str.src, doc_state->src, doc_state->count); + *state = doc_state; return 0; } static int group_teardown(void** state) { - GroupState* group_state = *state; - test_free(group_state->src); - test_free(group_state); + DocState* doc_state = *state; + test_free(doc_state->src); + AMstackFree(&doc_state->stack); + test_free(doc_state); return 0; } -static void test_AMactorIdInit() { +static void test_AMactorIdFromBytes(void** state) { + DocState* doc_state = *state; + AMstack** stack_ptr = &doc_state->stack; + /* Non-empty string. */ + AMresult* result = AMstackResult(stack_ptr, AMactorIdFromBytes(doc_state->src, doc_state->count), NULL, NULL); + if (AMresultStatus(result) != AM_STATUS_OK) { + fail_msg_view("%s", AMresultError(result)); + } + assert_int_equal(AMresultSize(result), 1); + AMitem* const item = AMresultItem(result); + assert_int_equal(AMitemValType(item), AM_VAL_TYPE_ACTOR_ID); + AMactorId const* actor_id; + assert_true(AMitemToActorId(item, &actor_id)); + AMbyteSpan const bytes = AMactorIdBytes(actor_id); + assert_int_equal(bytes.count, doc_state->count); + assert_memory_equal(bytes.src, doc_state->src, bytes.count); + /* Empty array. */ + /** \todo Find out if this is intentionally allowed. */ + result = AMstackResult(stack_ptr, AMactorIdFromBytes(doc_state->src, 0), NULL, NULL); + if (AMresultStatus(result) != AM_STATUS_OK) { + fail_msg_view("%s", AMresultError(result)); + } + /* NULL array. */ + result = AMstackResult(stack_ptr, AMactorIdFromBytes(NULL, doc_state->count), NULL, NULL); + if (AMresultStatus(result) == AM_STATUS_OK) { + fail_msg("AMactorId from NULL."); + } +} + +static void test_AMactorIdFromStr(void** state) { + DocState* doc_state = *state; + AMstack** stack_ptr = &doc_state->stack; + AMresult* result = AMstackResult(stack_ptr, AMactorIdFromStr(doc_state->str), NULL, NULL); + if (AMresultStatus(result) != AM_STATUS_OK) { + fail_msg_view("%s", AMresultError(result)); + } + assert_int_equal(AMresultSize(result), 1); + AMitem* const item = AMresultItem(result); + assert_int_equal(AMitemValType(item), AM_VAL_TYPE_ACTOR_ID); + /* The hexadecimal string should've been decoded as identical bytes. */ + AMactorId const* actor_id; + assert_true(AMitemToActorId(item, &actor_id)); + AMbyteSpan const bytes = AMactorIdBytes(actor_id); + assert_int_equal(bytes.count, doc_state->count); + assert_memory_equal(bytes.src, doc_state->src, bytes.count); + /* The bytes should've been encoded as an identical hexadecimal string. */ + assert_true(AMitemToActorId(item, &actor_id)); + AMbyteSpan const str = AMactorIdStr(actor_id); + assert_int_equal(str.count, doc_state->str.count); + assert_memory_equal(str.src, doc_state->str.src, str.count); +} + +static void test_AMactorIdInit(void** state) { + DocState* doc_state = *state; + AMstack** stack_ptr = &doc_state->stack; AMresult* prior_result = NULL; AMbyteSpan prior_bytes = {NULL, 0}; AMbyteSpan prior_str = {NULL, 0}; - AMresult* result = NULL; for (size_t i = 0; i != 11; ++i) { - result = AMactorIdInit(); + AMresult* result = AMstackResult(stack_ptr, AMactorIdInit(), NULL, NULL); if (AMresultStatus(result) != AM_STATUS_OK) { - fail_msg_view("%s", AMerrorMessage(result)); + fail_msg_view("%s", AMresultError(result)); } assert_int_equal(AMresultSize(result), 1); - AMvalue const value = AMresultValue(result); - assert_int_equal(value.tag, AM_VALUE_ACTOR_ID); - AMbyteSpan const bytes = AMactorIdBytes(value.actor_id); - AMbyteSpan const str = AMactorIdStr(value.actor_id); + AMitem* const item = AMresultItem(result); + assert_int_equal(AMitemValType(item), AM_VAL_TYPE_ACTOR_ID); + AMactorId const* actor_id; + assert_true(AMitemToActorId(item, &actor_id)); + AMbyteSpan const bytes = AMactorIdBytes(actor_id); + assert_true(AMitemToActorId(item, &actor_id)); + AMbyteSpan const str = AMactorIdStr(actor_id); if (prior_result) { size_t const max_byte_count = fmax(bytes.count, prior_bytes.count); assert_memory_not_equal(bytes.src, prior_bytes.src, max_byte_count); size_t const max_char_count = fmax(str.count, prior_str.count); assert_memory_not_equal(str.src, prior_str.src, max_char_count); - AMfree(prior_result); } prior_result = result; prior_bytes = bytes; prior_str = str; } - AMfree(result); -} - -static void test_AMactorIdInitBytes(void **state) { - GroupState* group_state = *state; - AMresult* const result = AMactorIdInitBytes(group_state->src, group_state->count); - if (AMresultStatus(result) != AM_STATUS_OK) { - fail_msg_view("%s", AMerrorMessage(result)); - } - assert_int_equal(AMresultSize(result), 1); - AMvalue const value = AMresultValue(result); - assert_int_equal(value.tag, AM_VALUE_ACTOR_ID); - AMbyteSpan const bytes = AMactorIdBytes(value.actor_id); - assert_int_equal(bytes.count, group_state->count); - assert_memory_equal(bytes.src, group_state->src, bytes.count); - AMfree(result); -} - -static void test_AMactorIdInitStr(void **state) { - GroupState* group_state = *state; - AMresult* const result = AMactorIdInitStr(group_state->str); - if (AMresultStatus(result) != AM_STATUS_OK) { - fail_msg_view("%s", AMerrorMessage(result)); - } - assert_int_equal(AMresultSize(result), 1); - AMvalue const value = AMresultValue(result); - assert_int_equal(value.tag, AM_VALUE_ACTOR_ID); - /* The hexadecimal string should've been decoded as identical bytes. */ - AMbyteSpan const bytes = AMactorIdBytes(value.actor_id); - assert_int_equal(bytes.count, group_state->count); - assert_memory_equal(bytes.src, group_state->src, bytes.count); - /* The bytes should've been encoded as an identical hexadecimal string. */ - AMbyteSpan const str = AMactorIdStr(value.actor_id); - assert_int_equal(str.count, group_state->str.count); - assert_memory_equal(str.src, group_state->str.src, str.count); - AMfree(result); } int run_actor_id_tests(void) { const struct CMUnitTest tests[] = { + cmocka_unit_test(test_AMactorIdFromBytes), + cmocka_unit_test(test_AMactorIdFromStr), cmocka_unit_test(test_AMactorIdInit), - cmocka_unit_test(test_AMactorIdInitBytes), - cmocka_unit_test(test_AMactorIdInitStr), }; return cmocka_run_group_tests(tests, group_setup, group_teardown); diff --git a/rust/automerge-c/test/base_state.c b/rust/automerge-c/test/base_state.c new file mode 100644 index 00000000..53325a99 --- /dev/null +++ b/rust/automerge-c/test/base_state.c @@ -0,0 +1,17 @@ +#include + +/* local */ +#include "base_state.h" + +int setup_base(void** state) { + BaseState* base_state = calloc(1, sizeof(BaseState)); + *state = base_state; + return 0; +} + +int teardown_base(void** state) { + BaseState* base_state = *state; + AMstackFree(&base_state->stack); + free(base_state); + return 0; +} diff --git a/rust/automerge-c/test/base_state.h b/rust/automerge-c/test/base_state.h new file mode 100644 index 00000000..3c4ff01b --- /dev/null +++ b/rust/automerge-c/test/base_state.h @@ -0,0 +1,39 @@ +#ifndef TESTS_BASE_STATE_H +#define TESTS_BASE_STATE_H + +#include + +/* local */ +#include +#include + +/** + * \struct BaseState + * \brief The shared state for one or more cmocka test cases. + */ +typedef struct { + /** A stack of results. */ + AMstack* stack; +} BaseState; + +/** + * \memberof BaseState + * \brief Sets up the shared state for one or more cmocka test cases. + * + * \param[in,out] state A pointer to a pointer to a `BaseState` struct. + * \pre \p state `!= NULL`. + * \warning The `BaseState` struct returned through \p state must be + * passed to `teardown_base()` in order to avoid a memory leak. + */ +int setup_base(void** state); + +/** + * \memberof BaseState + * \brief Tears down the shared state for one or more cmocka test cases. + * + * \param[in] state A pointer to a pointer to a `BaseState` struct. + * \pre \p state `!= NULL`. + */ +int teardown_base(void** state); + +#endif /* TESTS_BASE_STATE_H */ diff --git a/rust/automerge-c/test/byte_span_tests.c b/rust/automerge-c/test/byte_span_tests.c new file mode 100644 index 00000000..43856f3b --- /dev/null +++ b/rust/automerge-c/test/byte_span_tests.c @@ -0,0 +1,118 @@ +#include +#include +#include +#include +#include + +/* third-party */ +#include + +/* local */ +#include +#include + +static void test_AMbytes(void** state) { + static char const DATA[] = {0x0, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8, 0x9, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf}; + + AMbyteSpan bytes = AMbytes(DATA, sizeof(DATA)); + assert_int_equal(bytes.count, sizeof(DATA)); + assert_memory_equal(bytes.src, DATA, bytes.count); + assert_ptr_equal(bytes.src, DATA); + /* Empty view */ + bytes = AMbytes(DATA, 0); + assert_int_equal(bytes.count, 0); + assert_ptr_equal(bytes.src, DATA); + /* Invalid array */ + bytes = AMbytes(NULL, SIZE_MAX); + assert_int_not_equal(bytes.count, SIZE_MAX); + assert_int_equal(bytes.count, 0); + assert_ptr_equal(bytes.src, NULL); +} + +static void test_AMstr(void** state) { + AMbyteSpan str = AMstr("abcdefghijkl"); + assert_int_equal(str.count, strlen("abcdefghijkl")); + assert_memory_equal(str.src, "abcdefghijkl", str.count); + /* Empty string */ + static char const* const EMPTY = ""; + + str = AMstr(EMPTY); + assert_int_equal(str.count, 0); + assert_ptr_equal(str.src, EMPTY); + /* Invalid string */ + str = AMstr(NULL); + assert_int_equal(str.count, 0); + assert_ptr_equal(str.src, NULL); +} + +static void test_AMstrCmp(void** state) { + /* Length ordering */ + assert_int_equal(AMstrCmp(AMstr("abcdef"), AMstr("abcdefghijkl")), -1); + assert_int_equal(AMstrCmp(AMstr("abcdefghijkl"), AMstr("abcdefghijkl")), 0); + assert_int_equal(AMstrCmp(AMstr("abcdefghijkl"), AMstr("abcdef")), 1); + /* Lexicographical ordering */ + assert_int_equal(AMstrCmp(AMstr("abcdef"), AMstr("ghijkl")), -1); + assert_int_equal(AMstrCmp(AMstr("ghijkl"), AMstr("abcdef")), 1); + /* Case ordering */ + assert_int_equal(AMstrCmp(AMstr("ABCDEFGHIJKL"), AMstr("abcdefghijkl")), -1); + assert_int_equal(AMstrCmp(AMstr("ABCDEFGHIJKL"), AMstr("ABCDEFGHIJKL")), 0); + assert_int_equal(AMstrCmp(AMstr("abcdefghijkl"), AMstr("ABCDEFGHIJKL")), 1); + assert_int_equal(AMstrCmp(AMstr("ABCDEFGHIJKL"), AMstr("abcdef")), -1); + assert_int_equal(AMstrCmp(AMstr("abcdef"), AMstr("ABCDEFGHIJKL")), 1); + assert_int_equal(AMstrCmp(AMstr("GHIJKL"), AMstr("abcdef")), -1); + assert_int_equal(AMstrCmp(AMstr("abcdef"), AMstr("GHIJKL")), 1); + /* NUL character inclusion */ + static char const SRC[] = {'a', 'b', 'c', 'd', 'e', 'f', '\0', 'g', 'h', 'i', 'j', 'k', 'l'}; + static AMbyteSpan const NUL_STR = {.src = SRC, .count = 13}; + + assert_int_equal(AMstrCmp(AMstr("abcdef"), NUL_STR), -1); + assert_int_equal(AMstrCmp(NUL_STR, NUL_STR), 0); + assert_int_equal(AMstrCmp(NUL_STR, AMstr("abcdef")), 1); + /* Empty string */ + assert_int_equal(AMstrCmp(AMstr(""), AMstr("abcdefghijkl")), -1); + assert_int_equal(AMstrCmp(AMstr(""), AMstr("")), 0); + assert_int_equal(AMstrCmp(AMstr("abcdefghijkl"), AMstr("")), 1); + /* Invalid string */ + assert_int_equal(AMstrCmp(AMstr(NULL), AMstr("abcdefghijkl")), -1); + assert_int_equal(AMstrCmp(AMstr(NULL), AMstr(NULL)), 0); + assert_int_equal(AMstrCmp(AMstr("abcdefghijkl"), AMstr(NULL)), 1); +} + +static void test_AMstrdup(void** state) { + static char const SRC[] = {'a', 'b', 'c', '\0', 'd', 'e', 'f', '\0', 'g', 'h', 'i', '\0', 'j', 'k', 'l'}; + static AMbyteSpan const NUL_STR = {.src = SRC, .count = 15}; + + /* Default substitution ("\\0") for NUL */ + char* dup = AMstrdup(NUL_STR, NULL); + assert_int_equal(strlen(dup), 18); + assert_string_equal(dup, "abc\\0def\\0ghi\\0jkl"); + free(dup); + /* Arbitrary substitution for NUL */ + dup = AMstrdup(NUL_STR, ":-O"); + assert_int_equal(strlen(dup), 21); + assert_string_equal(dup, "abc:-Odef:-Oghi:-Ojkl"); + free(dup); + /* Empty substitution for NUL */ + dup = AMstrdup(NUL_STR, ""); + assert_int_equal(strlen(dup), 12); + assert_string_equal(dup, "abcdefghijkl"); + free(dup); + /* Empty string */ + dup = AMstrdup(AMstr(""), NULL); + assert_int_equal(strlen(dup), 0); + assert_string_equal(dup, ""); + free(dup); + /* Invalid string */ + assert_null(AMstrdup(AMstr(NULL), NULL)); +} + +int run_byte_span_tests(void) { + const struct CMUnitTest tests[] = { + cmocka_unit_test(test_AMbytes), + cmocka_unit_test(test_AMstr), + cmocka_unit_test(test_AMstrCmp), + cmocka_unit_test(test_AMstrdup), + }; + + return cmocka_run_group_tests(tests, NULL, NULL); +} diff --git a/rust/automerge-c/test/cmocka_utils.c b/rust/automerge-c/test/cmocka_utils.c new file mode 100644 index 00000000..37c57fb1 --- /dev/null +++ b/rust/automerge-c/test/cmocka_utils.c @@ -0,0 +1,88 @@ +#include +#include +#include +#include + +/* third-party */ +#include +#include +#include +#include + +/* local */ +#include "cmocka_utils.h" + +/** + * \brief Assert that the given expression is true and report failure in terms + * of a line number within a file. + * + * \param[in] c An expression. + * \param[in] file A file's full path string. + * \param[in] line A line number. + */ +#define assert_true_where(c, file, line) _assert_true(cast_ptr_to_largest_integral_type(c), #c, file, line) + +/** + * \brief Assert that the given pointer is non-NULL and report failure in terms + * of a line number within a file. + * + * \param[in] c An expression. + * \param[in] file A file's full path string. + * \param[in] line A line number. + */ +#define assert_non_null_where(c, file, line) assert_true_where(c, file, line) + +/** + * \brief Forces the test to fail immediately and quit, printing the reason in + * terms of a line number within a file. + * + * \param[in] msg A message string into which \p str is interpolated. + * \param[in] str An owned string. + * \param[in] file A file's full path string. + * \param[in] line A line number. + */ +#define fail_msg_where(msg, str, file, line) \ + do { \ + print_error("ERROR: " msg "\n", str); \ + _fail(file, line); \ + } while (0) + +/** + * \brief Forces the test to fail immediately and quit, printing the reason in + * terms of a line number within a file. + * + * \param[in] msg A message string into which \p view.src is interpolated. + * \param[in] view A UTF-8 string view as an `AMbyteSpan` struct. + * \param[in] file A file's full path string. + * \param[in] line A line number. + */ +#define fail_msg_view_where(msg, view, file, line) \ + do { \ + char* const str = AMstrdup(view, NULL); \ + print_error("ERROR: " msg "\n", str); \ + free(str); \ + _fail(file, line); \ + } while (0) + +bool cmocka_cb(AMstack** stack, void* data) { + assert_non_null(data); + AMstackCallbackData* const sc_data = (AMstackCallbackData*)data; + assert_non_null_where(stack, sc_data->file, sc_data->line); + assert_non_null_where(*stack, sc_data->file, sc_data->line); + assert_non_null_where((*stack)->result, sc_data->file, sc_data->line); + if (AMresultStatus((*stack)->result) != AM_STATUS_OK) { + fail_msg_view_where("%s", AMresultError((*stack)->result), sc_data->file, sc_data->line); + return false; + } + /* Test that the types of all item values are members of the mask. */ + AMitems items = AMresultItems((*stack)->result); + AMitem* item = NULL; + while ((item = AMitemsNext(&items, 1)) != NULL) { + AMvalType const tag = AMitemValType(item); + if (!(tag & sc_data->bitmask)) { + fail_msg_where("Unexpected value type `%s`.", AMvalTypeToString(tag), sc_data->file, sc_data->line); + return false; + } + } + return true; +} diff --git a/rust/automerge-c/test/cmocka_utils.h b/rust/automerge-c/test/cmocka_utils.h index 1b488362..b6611bcc 100644 --- a/rust/automerge-c/test/cmocka_utils.h +++ b/rust/automerge-c/test/cmocka_utils.h @@ -1,22 +1,42 @@ -#ifndef CMOCKA_UTILS_H -#define CMOCKA_UTILS_H +#ifndef TESTS_CMOCKA_UTILS_H +#define TESTS_CMOCKA_UTILS_H +#include #include /* third-party */ +#include #include +/* local */ +#include "base_state.h" + /** * \brief Forces the test to fail immediately and quit, printing the reason. * - * \param[in] view A string view as an `AMbyteSpan` struct. + * \param[in] msg A message string into which \p view.src is interpolated. + * \param[in] view A UTF-8 string view as an `AMbyteSpan` struct. */ -#define fail_msg_view(msg, view) do { \ - char* const c_str = test_calloc(1, view.count + 1); \ - strncpy(c_str, view.src, view.count); \ - print_error(msg, c_str); \ - test_free(c_str); \ - fail(); \ -} while (0) +#define fail_msg_view(msg, view) \ + do { \ + char* const c_str = AMstrdup(view, NULL); \ + print_error("ERROR: " msg "\n", c_str); \ + free(c_str); \ + fail(); \ + } while (0) -#endif /* CMOCKA_UTILS_H */ +/** + * \brief Validates the top result in a stack based upon the parameters + * specified within the given data structure and reports violations + * using cmocka assertions. + * + * \param[in,out] stack A pointer to a pointer to an `AMstack` struct. + * \param[in] data A pointer to an owned `AMpushData` struct. + * \return `true` if the top `AMresult` struct in \p stack is valid, `false` + * otherwise. + * \pre \p stack `!= NULL`. + * \pre \p data `!= NULL`. + */ +bool cmocka_cb(AMstack** stack, void* data); + +#endif /* TESTS_CMOCKA_UTILS_H */ diff --git a/rust/automerge-c/test/doc_state.c b/rust/automerge-c/test/doc_state.c new file mode 100644 index 00000000..3cbece50 --- /dev/null +++ b/rust/automerge-c/test/doc_state.c @@ -0,0 +1,27 @@ +#include +#include +#include + +/* third-party */ +#include + +/* local */ +#include +#include "cmocka_utils.h" +#include "doc_state.h" + +int setup_doc(void** state) { + DocState* doc_state = test_calloc(1, sizeof(DocState)); + setup_base((void**)&doc_state->base_state); + AMitemToDoc(AMstackItem(&doc_state->base_state->stack, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), + &doc_state->doc); + *state = doc_state; + return 0; +} + +int teardown_doc(void** state) { + DocState* doc_state = *state; + teardown_base((void**)&doc_state->base_state); + test_free(doc_state); + return 0; +} diff --git a/rust/automerge-c/test/doc_state.h b/rust/automerge-c/test/doc_state.h new file mode 100644 index 00000000..525a49fa --- /dev/null +++ b/rust/automerge-c/test/doc_state.h @@ -0,0 +1,17 @@ +#ifndef TESTS_DOC_STATE_H +#define TESTS_DOC_STATE_H + +/* local */ +#include +#include "base_state.h" + +typedef struct { + BaseState* base_state; + AMdoc* doc; +} DocState; + +int setup_doc(void** state); + +int teardown_doc(void** state); + +#endif /* TESTS_DOC_STATE_H */ diff --git a/rust/automerge-c/test/doc_tests.c b/rust/automerge-c/test/doc_tests.c index 217a4862..c1d21928 100644 --- a/rust/automerge-c/test/doc_tests.c +++ b/rust/automerge-c/test/doc_tests.c @@ -9,12 +9,14 @@ /* local */ #include -#include "group_state.h" -#include "stack_utils.h" +#include +#include "base_state.h" +#include "cmocka_utils.h" +#include "doc_state.h" #include "str_utils.h" typedef struct { - GroupState* group_state; + DocState* doc_state; AMbyteSpan actor_id_str; uint8_t* actor_id_bytes; size_t actor_id_size; @@ -22,7 +24,7 @@ typedef struct { static int setup(void** state) { TestState* test_state = test_calloc(1, sizeof(TestState)); - group_setup((void**)&test_state->group_state); + setup_doc((void**)&test_state->doc_state); test_state->actor_id_str.src = "000102030405060708090a0b0c0d0e0f"; test_state->actor_id_str.count = strlen(test_state->actor_id_str.src); test_state->actor_id_size = test_state->actor_id_str.count / 2; @@ -34,204 +36,195 @@ static int setup(void** state) { static int teardown(void** state) { TestState* test_state = *state; - group_teardown((void**)&test_state->group_state); + teardown_doc((void**)&test_state->doc_state); test_free(test_state->actor_id_bytes); test_free(test_state); return 0; } -static void test_AMkeys_empty() { - AMresultStack* stack = NULL; - AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - AMstrs forward = AMpush(&stack, - AMkeys(doc, AM_ROOT, NULL), - AM_VALUE_STRS, - cmocka_cb).strs; - assert_int_equal(AMstrsSize(&forward), 0); - AMstrs reverse = AMstrsReversed(&forward); - assert_int_equal(AMstrsSize(&reverse), 0); - assert_null(AMstrsNext(&forward, 1).src); - assert_null(AMstrsPrev(&forward, 1).src); - assert_null(AMstrsNext(&reverse, 1).src); - assert_null(AMstrsPrev(&reverse, 1).src); - AMfreeStack(&stack); -} - -static void test_AMkeys_list() { - AMresultStack* stack = NULL; - AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - AMobjId const* const list = AMpush( - &stack, - AMmapPutObject(doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; - AMfree(AMlistPutInt(doc, list, 0, true, 0)); - AMfree(AMlistPutInt(doc, list, 1, true, 0)); - AMfree(AMlistPutInt(doc, list, 2, true, 0)); - AMstrs forward = AMpush(&stack, - AMkeys(doc, list, NULL), - AM_VALUE_STRS, - cmocka_cb).strs; - assert_int_equal(AMstrsSize(&forward), 3); - AMstrs reverse = AMstrsReversed(&forward); - assert_int_equal(AMstrsSize(&reverse), 3); - /* Forward iterator forward. */ - AMbyteSpan str = AMstrsNext(&forward, 1); - assert_ptr_equal(strstr(str.src, "2@"), str.src); - str = AMstrsNext(&forward, 1); - assert_ptr_equal(strstr(str.src, "3@"), str.src); - str = AMstrsNext(&forward, 1); - assert_ptr_equal(strstr(str.src, "4@"), str.src); - assert_null(AMstrsNext(&forward, 1).src); - // /* Forward iterator reverse. */ - str = AMstrsPrev(&forward, 1); - assert_ptr_equal(strstr(str.src, "4@"), str.src); - str = AMstrsPrev(&forward, 1); - assert_ptr_equal(strstr(str.src, "3@"), str.src); - str = AMstrsPrev(&forward, 1); - assert_ptr_equal(strstr(str.src, "2@"), str.src); - assert_null(AMstrsPrev(&forward, 1).src); - /* Reverse iterator forward. */ - str = AMstrsNext(&reverse, 1); - assert_ptr_equal(strstr(str.src, "4@"), str.src); - str = AMstrsNext(&reverse, 1); - assert_ptr_equal(strstr(str.src, "3@"), str.src); - str = AMstrsNext(&reverse, 1); - assert_ptr_equal(strstr(str.src, "2@"), str.src); - assert_null(AMstrsNext(&reverse, 1).src); - /* Reverse iterator reverse. */ - str = AMstrsPrev(&reverse, 1); - assert_ptr_equal(strstr(str.src, "2@"), str.src); - str = AMstrsPrev(&reverse, 1); - assert_ptr_equal(strstr(str.src, "3@"), str.src); - str = AMstrsPrev(&reverse, 1); - assert_ptr_equal(strstr(str.src, "4@"), str.src); - assert_null(AMstrsPrev(&reverse, 1).src); - AMfreeStack(&stack); -} - -static void test_AMkeys_map() { - AMresultStack* stack = NULL; - AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - AMfree(AMmapPutInt(doc, AM_ROOT, AMstr("one"), 1)); - AMfree(AMmapPutInt(doc, AM_ROOT, AMstr("two"), 2)); - AMfree(AMmapPutInt(doc, AM_ROOT, AMstr("three"), 3)); - AMstrs forward = AMpush(&stack, - AMkeys(doc, AM_ROOT, NULL), - AM_VALUE_STRS, - cmocka_cb).strs; - assert_int_equal(AMstrsSize(&forward), 3); - AMstrs reverse = AMstrsReversed(&forward); - assert_int_equal(AMstrsSize(&reverse), 3); - /* Forward iterator forward. */ - AMbyteSpan str = AMstrsNext(&forward, 1); - assert_int_equal(str.count, 3); - assert_memory_equal(str.src, "one", str.count); - str = AMstrsNext(&forward, 1); - assert_int_equal(str.count, 5); - assert_memory_equal(str.src, "three", str.count); - str = AMstrsNext(&forward, 1); - assert_int_equal(str.count, 3); - assert_memory_equal(str.src, "two", str.count); - assert_null(AMstrsNext(&forward, 1).src); - /* Forward iterator reverse. */ - str = AMstrsPrev(&forward, 1); - assert_int_equal(str.count, 3); - assert_memory_equal(str.src, "two", str.count); - str = AMstrsPrev(&forward, 1); - assert_int_equal(str.count, 5); - assert_memory_equal(str.src, "three", str.count); - str = AMstrsPrev(&forward, 1); - assert_int_equal(str.count, 3); - assert_memory_equal(str.src, "one", str.count); - assert_null(AMstrsPrev(&forward, 1).src); - /* Reverse iterator forward. */ - str = AMstrsNext(&reverse, 1); - assert_int_equal(str.count, 3); - assert_memory_equal(str.src, "two", str.count); - str = AMstrsNext(&reverse, 1); - assert_int_equal(str.count, 5); - assert_memory_equal(str.src, "three", str.count); - str = AMstrsNext(&reverse, 1); - assert_int_equal(str.count, 3); - assert_memory_equal(str.src, "one", str.count); - assert_null(AMstrsNext(&reverse, 1).src); - /* Reverse iterator reverse. */ - str = AMstrsPrev(&reverse, 1); - assert_int_equal(str.count, 3); - assert_memory_equal(str.src, "one", str.count); - str = AMstrsPrev(&reverse, 1); - assert_int_equal(str.count, 5); - assert_memory_equal(str.src, "three", str.count); - str = AMstrsPrev(&reverse, 1); - assert_int_equal(str.count, 3); - assert_memory_equal(str.src, "two", str.count); - assert_null(AMstrsPrev(&reverse, 1).src); - AMfreeStack(&stack); -} - -static void test_AMputActor_bytes(void **state) { +static void test_AMkeys_empty(void** state) { TestState* test_state = *state; - AMactorId const* actor_id = AMpush(&test_state->group_state->stack, - AMactorIdInitBytes( - test_state->actor_id_bytes, - test_state->actor_id_size), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id; - AMfree(AMsetActorId(test_state->group_state->doc, actor_id)); - actor_id = AMpush(&test_state->group_state->stack, - AMgetActorId(test_state->group_state->doc), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id; + AMstack** stack_ptr = &test_state->doc_state->base_state->stack; + AMdoc* doc; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); + AMitems forward = AMstackItems(stack_ptr, AMkeys(doc, AM_ROOT, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + assert_int_equal(AMitemsSize(&forward), 0); + AMitems reverse = AMitemsReversed(&forward); + assert_int_equal(AMitemsSize(&reverse), 0); + assert_null(AMitemsNext(&forward, 1)); + assert_null(AMitemsPrev(&forward, 1)); + assert_null(AMitemsNext(&reverse, 1)); + assert_null(AMitemsPrev(&reverse, 1)); +} + +static void test_AMkeys_list(void** state) { + TestState* test_state = *state; + AMstack** stack_ptr = &test_state->doc_state->base_state->stack; + AMdoc* doc; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); + AMobjId const* const list = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE))); + AMstackItem(NULL, AMlistPutInt(doc, list, 0, true, 0), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMlistPutInt(doc, list, 1, true, 0), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMlistPutInt(doc, list, 2, true, 0), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMitems forward = AMstackItems(stack_ptr, AMkeys(doc, list, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + assert_int_equal(AMitemsSize(&forward), 3); + AMitems reverse = AMitemsReversed(&forward); + assert_int_equal(AMitemsSize(&reverse), 3); + /* Forward iterator forward. */ + AMbyteSpan str; + assert_true(AMitemToStr(AMitemsNext(&forward, 1), &str)); + assert_ptr_equal(strstr(str.src, "2@"), str.src); + assert_true(AMitemToStr(AMitemsNext(&forward, 1), &str)); + assert_ptr_equal(strstr(str.src, "3@"), str.src); + assert_true(AMitemToStr(AMitemsNext(&forward, 1), &str)); + assert_ptr_equal(strstr(str.src, "4@"), str.src); + assert_null(AMitemsNext(&forward, 1)); + // /* Forward iterator reverse. */ + assert_true(AMitemToStr(AMitemsPrev(&forward, 1), &str)); + assert_ptr_equal(strstr(str.src, "4@"), str.src); + assert_true(AMitemToStr(AMitemsPrev(&forward, 1), &str)); + assert_ptr_equal(strstr(str.src, "3@"), str.src); + assert_true(AMitemToStr(AMitemsPrev(&forward, 1), &str)); + assert_ptr_equal(strstr(str.src, "2@"), str.src); + assert_null(AMitemsPrev(&forward, 1)); + /* Reverse iterator forward. */ + assert_true(AMitemToStr(AMitemsNext(&reverse, 1), &str)); + assert_ptr_equal(strstr(str.src, "4@"), str.src); + assert_true(AMitemToStr(AMitemsNext(&reverse, 1), &str)); + assert_ptr_equal(strstr(str.src, "3@"), str.src); + assert_true(AMitemToStr(AMitemsNext(&reverse, 1), &str)); + assert_ptr_equal(strstr(str.src, "2@"), str.src); + assert_null(AMitemsNext(&reverse, 1)); + /* Reverse iterator reverse. */ + assert_true(AMitemToStr(AMitemsPrev(&reverse, 1), &str)); + assert_ptr_equal(strstr(str.src, "2@"), str.src); + assert_true(AMitemToStr(AMitemsPrev(&reverse, 1), &str)); + assert_ptr_equal(strstr(str.src, "3@"), str.src); + assert_true(AMitemToStr(AMitemsPrev(&reverse, 1), &str)); + assert_ptr_equal(strstr(str.src, "4@"), str.src); + assert_null(AMitemsPrev(&reverse, 1)); +} + +static void test_AMkeys_map(void** state) { + TestState* test_state = *state; + AMstack** stack_ptr = &test_state->doc_state->base_state->stack; + AMdoc* doc; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); + AMstackItem(NULL, AMmapPutInt(doc, AM_ROOT, AMstr("one"), 1), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutInt(doc, AM_ROOT, AMstr("two"), 2), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutInt(doc, AM_ROOT, AMstr("three"), 3), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMitems forward = AMstackItems(stack_ptr, AMkeys(doc, AM_ROOT, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + assert_int_equal(AMitemsSize(&forward), 3); + AMitems reverse = AMitemsReversed(&forward); + assert_int_equal(AMitemsSize(&reverse), 3); + /* Forward iterator forward. */ + AMbyteSpan str; + assert_true(AMitemToStr(AMitemsNext(&forward, 1), &str)); + assert_int_equal(str.count, 3); + assert_memory_equal(str.src, "one", str.count); + assert_true(AMitemToStr(AMitemsNext(&forward, 1), &str)); + assert_int_equal(str.count, 5); + assert_memory_equal(str.src, "three", str.count); + assert_true(AMitemToStr(AMitemsNext(&forward, 1), &str)); + assert_int_equal(str.count, 3); + assert_memory_equal(str.src, "two", str.count); + assert_null(AMitemsNext(&forward, 1)); + /* Forward iterator reverse. */ + assert_true(AMitemToStr(AMitemsPrev(&forward, 1), &str)); + assert_int_equal(str.count, 3); + assert_memory_equal(str.src, "two", str.count); + assert_true(AMitemToStr(AMitemsPrev(&forward, 1), &str)); + assert_int_equal(str.count, 5); + assert_memory_equal(str.src, "three", str.count); + assert_true(AMitemToStr(AMitemsPrev(&forward, 1), &str)); + assert_int_equal(str.count, 3); + assert_memory_equal(str.src, "one", str.count); + assert_null(AMitemsPrev(&forward, 1)); + /* Reverse iterator forward. */ + assert_true(AMitemToStr(AMitemsNext(&reverse, 1), &str)); + assert_int_equal(str.count, 3); + assert_memory_equal(str.src, "two", str.count); + assert_true(AMitemToStr(AMitemsNext(&reverse, 1), &str)); + assert_int_equal(str.count, 5); + assert_memory_equal(str.src, "three", str.count); + assert_true(AMitemToStr(AMitemsNext(&reverse, 1), &str)); + assert_int_equal(str.count, 3); + assert_memory_equal(str.src, "one", str.count); + assert_null(AMitemsNext(&reverse, 1)); + /* Reverse iterator reverse. */ + assert_true(AMitemToStr(AMitemsPrev(&reverse, 1), &str)); + assert_int_equal(str.count, 3); + assert_memory_equal(str.src, "one", str.count); + assert_true(AMitemToStr(AMitemsPrev(&reverse, 1), &str)); + assert_int_equal(str.count, 5); + assert_memory_equal(str.src, "three", str.count); + assert_true(AMitemToStr(AMitemsPrev(&reverse, 1), &str)); + assert_int_equal(str.count, 3); + assert_memory_equal(str.src, "two", str.count); + assert_null(AMitemsPrev(&reverse, 1)); +} + +static void test_AMputActor_bytes(void** state) { + TestState* test_state = *state; + AMstack** stack_ptr = &test_state->doc_state->base_state->stack; + AMactorId const* actor_id; + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromBytes(test_state->actor_id_bytes, test_state->actor_id_size), cmocka_cb, + AMexpect(AM_VAL_TYPE_ACTOR_ID)), + &actor_id)); + AMstackItem(NULL, AMsetActorId(test_state->doc_state->doc, actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMgetActorId(test_state->doc_state->doc), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), + &actor_id)); AMbyteSpan const bytes = AMactorIdBytes(actor_id); assert_int_equal(bytes.count, test_state->actor_id_size); assert_memory_equal(bytes.src, test_state->actor_id_bytes, bytes.count); } -static void test_AMputActor_str(void **state) { +static void test_AMputActor_str(void** state) { TestState* test_state = *state; - AMactorId const* actor_id = AMpush(&test_state->group_state->stack, - AMactorIdInitStr(test_state->actor_id_str), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id; - AMfree(AMsetActorId(test_state->group_state->doc, actor_id)); - actor_id = AMpush(&test_state->group_state->stack, - AMgetActorId(test_state->group_state->doc), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id; + AMstack** stack_ptr = &test_state->doc_state->base_state->stack; + AMactorId const* actor_id; + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(test_state->actor_id_str), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), + &actor_id)); + AMstackItem(NULL, AMsetActorId(test_state->doc_state->doc, actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMgetActorId(test_state->doc_state->doc), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), + &actor_id)); AMbyteSpan const str = AMactorIdStr(actor_id); assert_int_equal(str.count, test_state->actor_id_str.count); assert_memory_equal(str.src, test_state->actor_id_str.src, str.count); } -static void test_AMspliceText() { - AMresultStack* stack = NULL; - AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - AMobjId const* const text = AMpush(&stack, - AMmapPutObject(doc, AM_ROOT, AMstr("text"), AM_OBJ_TYPE_TEXT), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; - AMfree(AMspliceText(doc, text, 0, 0, AMstr("one + "))); - AMfree(AMspliceText(doc, text, 4, 2, AMstr("two = "))); - AMfree(AMspliceText(doc, text, 8, 2, AMstr("three"))); - AMbyteSpan const str = AMpush(&stack, - AMtext(doc, text, NULL), - AM_VALUE_STR, - cmocka_cb).str; - static char const* const STR_VALUE = "one two three"; - assert_int_equal(str.count, strlen(STR_VALUE)); - assert_memory_equal(str.src, STR_VALUE, str.count); - AMfreeStack(&stack); +static void test_AMspliceText(void** state) { + TestState* test_state = *state; + AMstack** stack_ptr = &test_state->doc_state->base_state->stack; + AMdoc* doc; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); + AMobjId const* const text = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc, AM_ROOT, AMstr("text"), AM_OBJ_TYPE_TEXT), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE))); + AMstackItem(NULL, AMspliceText(doc, text, 0, 0, AMstr("one + ")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMspliceText(doc, text, 4, 2, AMstr("two = ")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMspliceText(doc, text, 8, 2, AMstr("three")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMbyteSpan str; + assert_true( + AMitemToStr(AMstackItem(stack_ptr, AMtext(doc, text, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); + assert_int_equal(str.count, strlen("one two three")); + assert_memory_equal(str.src, "one two three", str.count); } int run_doc_tests(void) { const struct CMUnitTest tests[] = { - cmocka_unit_test(test_AMkeys_empty), - cmocka_unit_test(test_AMkeys_list), - cmocka_unit_test(test_AMkeys_map), + cmocka_unit_test_setup_teardown(test_AMkeys_empty, setup, teardown), + cmocka_unit_test_setup_teardown(test_AMkeys_list, setup, teardown), + cmocka_unit_test_setup_teardown(test_AMkeys_map, setup, teardown), cmocka_unit_test_setup_teardown(test_AMputActor_bytes, setup, teardown), cmocka_unit_test_setup_teardown(test_AMputActor_str, setup, teardown), - cmocka_unit_test(test_AMspliceText), + cmocka_unit_test_setup_teardown(test_AMspliceText, setup, teardown), }; return cmocka_run_group_tests(tests, NULL, NULL); diff --git a/rust/automerge-c/test/enum_string_tests.c b/rust/automerge-c/test/enum_string_tests.c new file mode 100644 index 00000000..11131e43 --- /dev/null +++ b/rust/automerge-c/test/enum_string_tests.c @@ -0,0 +1,148 @@ +#include +#include +#include +#include +#include + +/* third-party */ +#include + +/* local */ +#include +#include + +#define assert_to_string(function, tag) assert_string_equal(function(tag), #tag) + +#define assert_from_string(function, type, tag) \ + do { \ + type out; \ + assert_true(function(&out, #tag)); \ + assert_int_equal(out, tag); \ + } while (0) + +static void test_AMidxTypeToString(void** state) { + assert_to_string(AMidxTypeToString, AM_IDX_TYPE_DEFAULT); + assert_to_string(AMidxTypeToString, AM_IDX_TYPE_KEY); + assert_to_string(AMidxTypeToString, AM_IDX_TYPE_POS); + /* Zero tag */ + assert_string_equal(AMidxTypeToString(0), "AM_IDX_TYPE_DEFAULT"); + /* Invalid tag */ + assert_string_equal(AMidxTypeToString(-1), "???"); +} + +static void test_AMidxTypeFromString(void** state) { + assert_from_string(AMidxTypeFromString, AMidxType, AM_IDX_TYPE_DEFAULT); + assert_from_string(AMidxTypeFromString, AMidxType, AM_IDX_TYPE_KEY); + assert_from_string(AMidxTypeFromString, AMidxType, AM_IDX_TYPE_POS); + /* Invalid tag */ + AMidxType out = -1; + assert_false(AMidxTypeFromString(&out, "???")); + assert_int_equal(out, (AMidxType)-1); +} + +static void test_AMobjTypeToString(void** state) { + assert_to_string(AMobjTypeToString, AM_OBJ_TYPE_DEFAULT); + assert_to_string(AMobjTypeToString, AM_OBJ_TYPE_LIST); + assert_to_string(AMobjTypeToString, AM_OBJ_TYPE_MAP); + assert_to_string(AMobjTypeToString, AM_OBJ_TYPE_TEXT); + /* Zero tag */ + assert_string_equal(AMobjTypeToString(0), "AM_OBJ_TYPE_DEFAULT"); + /* Invalid tag */ + assert_string_equal(AMobjTypeToString(-1), "???"); +} + +static void test_AMobjTypeFromString(void** state) { + assert_from_string(AMobjTypeFromString, AMobjType, AM_OBJ_TYPE_DEFAULT); + assert_from_string(AMobjTypeFromString, AMobjType, AM_OBJ_TYPE_LIST); + assert_from_string(AMobjTypeFromString, AMobjType, AM_OBJ_TYPE_MAP); + assert_from_string(AMobjTypeFromString, AMobjType, AM_OBJ_TYPE_TEXT); + /* Invalid tag */ + AMobjType out = -1; + assert_false(AMobjTypeFromString(&out, "???")); + assert_int_equal(out, (AMobjType)-1); +} + +static void test_AMstatusToString(void** state) { + assert_to_string(AMstatusToString, AM_STATUS_ERROR); + assert_to_string(AMstatusToString, AM_STATUS_INVALID_RESULT); + assert_to_string(AMstatusToString, AM_STATUS_OK); + /* Zero tag */ + assert_string_equal(AMstatusToString(0), "AM_STATUS_OK"); + /* Invalid tag */ + assert_string_equal(AMstatusToString(-1), "???"); +} + +static void test_AMstatusFromString(void** state) { + assert_from_string(AMstatusFromString, AMstatus, AM_STATUS_ERROR); + assert_from_string(AMstatusFromString, AMstatus, AM_STATUS_INVALID_RESULT); + assert_from_string(AMstatusFromString, AMstatus, AM_STATUS_OK); + /* Invalid tag */ + AMstatus out = -1; + assert_false(AMstatusFromString(&out, "???")); + assert_int_equal(out, (AMstatus)-1); +} + +static void test_AMvalTypeToString(void** state) { + assert_to_string(AMvalTypeToString, AM_VAL_TYPE_ACTOR_ID); + assert_to_string(AMvalTypeToString, AM_VAL_TYPE_BOOL); + assert_to_string(AMvalTypeToString, AM_VAL_TYPE_BYTES); + assert_to_string(AMvalTypeToString, AM_VAL_TYPE_CHANGE); + assert_to_string(AMvalTypeToString, AM_VAL_TYPE_CHANGE_HASH); + assert_to_string(AMvalTypeToString, AM_VAL_TYPE_COUNTER); + assert_to_string(AMvalTypeToString, AM_VAL_TYPE_DEFAULT); + assert_to_string(AMvalTypeToString, AM_VAL_TYPE_DOC); + assert_to_string(AMvalTypeToString, AM_VAL_TYPE_F64); + assert_to_string(AMvalTypeToString, AM_VAL_TYPE_INT); + assert_to_string(AMvalTypeToString, AM_VAL_TYPE_NULL); + assert_to_string(AMvalTypeToString, AM_VAL_TYPE_OBJ_TYPE); + assert_to_string(AMvalTypeToString, AM_VAL_TYPE_STR); + assert_to_string(AMvalTypeToString, AM_VAL_TYPE_SYNC_HAVE); + assert_to_string(AMvalTypeToString, AM_VAL_TYPE_SYNC_MESSAGE); + assert_to_string(AMvalTypeToString, AM_VAL_TYPE_SYNC_STATE); + assert_to_string(AMvalTypeToString, AM_VAL_TYPE_TIMESTAMP); + assert_to_string(AMvalTypeToString, AM_VAL_TYPE_UINT); + assert_to_string(AMvalTypeToString, AM_VAL_TYPE_UNKNOWN); + assert_to_string(AMvalTypeToString, AM_VAL_TYPE_VOID); + /* Zero tag */ + assert_string_equal(AMvalTypeToString(0), "AM_VAL_TYPE_DEFAULT"); + /* Invalid tag */ + assert_string_equal(AMvalTypeToString(-1), "???"); +} + +static void test_AMvalTypeFromString(void** state) { + assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_ACTOR_ID); + assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_BOOL); + assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_BYTES); + assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_CHANGE); + assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_CHANGE_HASH); + assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_COUNTER); + assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_DEFAULT); + assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_DOC); + assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_F64); + assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_INT); + assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_NULL); + assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_OBJ_TYPE); + assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_STR); + assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_SYNC_HAVE); + assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_SYNC_MESSAGE); + assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_SYNC_STATE); + assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_TIMESTAMP); + assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_UINT); + assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_UNKNOWN); + assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_VOID); + /* Invalid tag */ + AMvalType out = -1; + assert_false(AMvalTypeFromString(&out, "???")); + assert_int_equal(out, (AMvalType)-1); +} + +int run_enum_string_tests(void) { + const struct CMUnitTest tests[] = { + cmocka_unit_test(test_AMidxTypeToString), cmocka_unit_test(test_AMidxTypeFromString), + cmocka_unit_test(test_AMobjTypeToString), cmocka_unit_test(test_AMobjTypeFromString), + cmocka_unit_test(test_AMstatusToString), cmocka_unit_test(test_AMstatusFromString), + cmocka_unit_test(test_AMvalTypeToString), cmocka_unit_test(test_AMvalTypeFromString), + }; + + return cmocka_run_group_tests(tests, NULL, NULL); +} diff --git a/rust/automerge-c/test/group_state.c b/rust/automerge-c/test/group_state.c deleted file mode 100644 index 0ee14317..00000000 --- a/rust/automerge-c/test/group_state.c +++ /dev/null @@ -1,27 +0,0 @@ -#include -#include -#include - -/* third-party */ -#include - -/* local */ -#include "group_state.h" -#include "stack_utils.h" - -int group_setup(void** state) { - GroupState* group_state = test_calloc(1, sizeof(GroupState)); - group_state->doc = AMpush(&group_state->stack, - AMcreate(NULL), - AM_VALUE_DOC, - cmocka_cb).doc; - *state = group_state; - return 0; -} - -int group_teardown(void** state) { - GroupState* group_state = *state; - AMfreeStack(&group_state->stack); - test_free(group_state); - return 0; -} diff --git a/rust/automerge-c/test/group_state.h b/rust/automerge-c/test/group_state.h deleted file mode 100644 index a71d9dc9..00000000 --- a/rust/automerge-c/test/group_state.h +++ /dev/null @@ -1,16 +0,0 @@ -#ifndef GROUP_STATE_H -#define GROUP_STATE_H - -/* local */ -#include - -typedef struct { - AMresultStack* stack; - AMdoc* doc; -} GroupState; - -int group_setup(void** state); - -int group_teardown(void** state); - -#endif /* GROUP_STATE_H */ diff --git a/rust/automerge-c/test/item_tests.c b/rust/automerge-c/test/item_tests.c new file mode 100644 index 00000000..a30b0556 --- /dev/null +++ b/rust/automerge-c/test/item_tests.c @@ -0,0 +1,94 @@ +#include +#include +#include +#include +#include + +/* third-party */ +#include + +/* local */ +#include +#include +#include "cmocka_utils.h" +#include "doc_state.h" + +static void test_AMitemResult(void** state) { + enum { ITEM_COUNT = 1000 }; + + DocState* doc_state = *state; + AMstack** stack_ptr = &doc_state->base_state->stack; + /* Append the strings to a list so that they'll be in numerical order. */ + AMobjId const* const list = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc_state->doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), + cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); + for (size_t pos = 0; pos != ITEM_COUNT; ++pos) { + size_t const count = snprintf(NULL, 0, "%zu", pos); + char* const src = test_calloc(count + 1, sizeof(char)); + assert_int_equal(sprintf(src, "%zu", pos), count); + AMstackItem(NULL, AMlistPutStr(doc_state->doc, list, pos, true, AMbytes(src, count)), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); + test_free(src); + } + /* Get an item iterator. */ + AMitems items = AMstackItems(stack_ptr, AMlistRange(doc_state->doc, list, 0, SIZE_MAX, NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_STR)); + /* Get the item iterator's result so that it can be freed later. */ + AMresult const* const items_result = (*stack_ptr)->result; + /* Iterate over all of the items and copy their pointers into an array. */ + AMitem* item_ptrs[ITEM_COUNT] = {NULL}; + AMitem* item = NULL; + for (size_t pos = 0; (item = AMitemsNext(&items, 1)) != NULL; ++pos) { + /* The item's reference count should be 1. */ + assert_int_equal(AMitemRefCount(item), 1); + if (pos & 1) { + /* Create a redundant result for an odd item. */ + AMitem* const new_item = AMstackItem(stack_ptr, AMitemResult(item), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + /* The item's old and new pointers will never match. */ + assert_ptr_not_equal(new_item, item); + /* The item's reference count will have been incremented. */ + assert_int_equal(AMitemRefCount(item), 2); + assert_int_equal(AMitemRefCount(new_item), 2); + /* The item's old and new indices should match. */ + assert_int_equal(AMitemIdxType(item), AMitemIdxType(new_item)); + assert_int_equal(AMitemIdxType(item), AM_IDX_TYPE_POS); + size_t pos, new_pos; + assert_true(AMitemPos(item, &pos)); + assert_true(AMitemPos(new_item, &new_pos)); + assert_int_equal(pos, new_pos); + /* The item's old and new object IDs should match. */ + AMobjId const* const obj_id = AMitemObjId(item); + AMobjId const* const new_obj_id = AMitemObjId(new_item); + assert_true(AMobjIdEqual(obj_id, new_obj_id)); + /* The item's old and new value types should match. */ + assert_int_equal(AMitemValType(item), AMitemValType(new_item)); + /* The item's old and new string values should match. */ + AMbyteSpan str; + assert_true(AMitemToStr(item, &str)); + AMbyteSpan new_str; + assert_true(AMitemToStr(new_item, &new_str)); + assert_int_equal(str.count, new_str.count); + assert_memory_equal(str.src, new_str.src, new_str.count); + /* The item's old and new object IDs are one and the same. */ + assert_ptr_equal(obj_id, new_obj_id); + /* The item's old and new string values are one and the same. */ + assert_ptr_equal(str.src, new_str.src); + /* Save the item's new pointer. */ + item_ptrs[pos] = new_item; + } + } + /* Free the item iterator's result. */ + AMresultFree(AMstackPop(stack_ptr, items_result)); + /* An odd item's reference count should be 1 again. */ + for (size_t pos = 1; pos < ITEM_COUNT; pos += 2) { + assert_int_equal(AMitemRefCount(item_ptrs[pos]), 1); + } +} + +int run_item_tests(void) { + const struct CMUnitTest tests[] = { + cmocka_unit_test(test_AMitemResult), + }; + + return cmocka_run_group_tests(tests, setup_doc, teardown_doc); +} diff --git a/rust/automerge-c/test/list_tests.c b/rust/automerge-c/test/list_tests.c index f9bbb340..723dd038 100644 --- a/rust/automerge-c/test/list_tests.c +++ b/rust/automerge-c/test/list_tests.c @@ -11,367 +11,417 @@ /* local */ #include +#include +#include "base_state.h" #include "cmocka_utils.h" -#include "group_state.h" +#include "doc_state.h" #include "macro_utils.h" -#include "stack_utils.h" static void test_AMlistIncrement(void** state) { - GroupState* group_state = *state; - AMobjId const* const list = AMpush( - &group_state->stack, - AMmapPutObject(group_state->doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; - AMfree(AMlistPutCounter(group_state->doc, list, 0, true, 0)); - assert_int_equal(AMpush(&group_state->stack, - AMlistGet(group_state->doc, list, 0, NULL), - AM_VALUE_COUNTER, - cmocka_cb).counter, 0); - AMfree(AMpop(&group_state->stack)); - AMfree(AMlistIncrement(group_state->doc, list, 0, 3)); - assert_int_equal(AMpush(&group_state->stack, - AMlistGet(group_state->doc, list, 0, NULL), - AM_VALUE_COUNTER, - cmocka_cb).counter, 3); - AMfree(AMpop(&group_state->stack)); + DocState* doc_state = *state; + AMstack** stack_ptr = &doc_state->base_state->stack; + AMobjId const* const list = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc_state->doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), + cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); + AMstackItem(NULL, AMlistPutCounter(doc_state->doc, list, 0, true, 0), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + int64_t counter; + assert_true(AMitemToCounter( + AMstackItem(stack_ptr, AMlistGet(doc_state->doc, list, 0, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_COUNTER)), + &counter)); + assert_int_equal(counter, 0); + AMresultFree(AMstackPop(stack_ptr, NULL)); + AMstackItem(NULL, AMlistIncrement(doc_state->doc, list, 0, 3), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + assert_true(AMitemToCounter( + AMstackItem(stack_ptr, AMlistGet(doc_state->doc, list, 0, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_COUNTER)), + &counter)); + assert_int_equal(counter, 3); + AMresultFree(AMstackPop(stack_ptr, NULL)); } -#define test_AMlistPut(suffix, mode) test_AMlistPut ## suffix ## _ ## mode +#define test_AMlistPut(suffix, mode) test_AMlistPut##suffix##_##mode -#define static_void_test_AMlistPut(suffix, mode, member, scalar_value) \ -static void test_AMlistPut ## suffix ## _ ## mode(void **state) { \ - GroupState* group_state = *state; \ - AMobjId const* const list = AMpush( \ - &group_state->stack, \ - AMmapPutObject(group_state->doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST),\ - AM_VALUE_OBJ_ID, \ - cmocka_cb).obj_id; \ - AMfree(AMlistPut ## suffix(group_state->doc, \ - list, \ - 0, \ - !strcmp(#mode, "insert"), \ - scalar_value)); \ - assert_true(AMpush( \ - &group_state->stack, \ - AMlistGet(group_state->doc, list, 0, NULL), \ - AMvalue_discriminant(#suffix), \ - cmocka_cb).member == scalar_value); \ - AMfree(AMpop(&group_state->stack)); \ -} +#define static_void_test_AMlistPut(suffix, mode, type, scalar_value) \ + static void test_AMlistPut##suffix##_##mode(void** state) { \ + DocState* doc_state = *state; \ + AMstack** stack_ptr = &doc_state->base_state->stack; \ + AMobjId const* const list = AMitemObjId( \ + AMstackItem(stack_ptr, AMmapPutObject(doc_state->doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), \ + cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); \ + AMstackItem(NULL, AMlistPut##suffix(doc_state->doc, list, 0, !strcmp(#mode, "insert"), scalar_value), \ + cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); \ + type value; \ + assert_true(AMitemTo##suffix(AMstackItem(stack_ptr, AMlistGet(doc_state->doc, list, 0, NULL), cmocka_cb, \ + AMexpect(suffix_to_val_type(#suffix))), \ + &value)); \ + assert_true(value == scalar_value); \ + AMresultFree(AMstackPop(stack_ptr, NULL)); \ + } -#define test_AMlistPutBytes(mode) test_AMlistPutBytes ## _ ## mode +#define test_AMlistPutBytes(mode) test_AMlistPutBytes##_##mode -#define static_void_test_AMlistPutBytes(mode, bytes_value) \ -static void test_AMlistPutBytes_ ## mode(void **state) { \ - static size_t const BYTES_SIZE = sizeof(bytes_value) / sizeof(uint8_t); \ - \ - GroupState* group_state = *state; \ - AMobjId const* const list = AMpush( \ - &group_state->stack, \ - AMmapPutObject(group_state->doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST),\ - AM_VALUE_OBJ_ID, \ - cmocka_cb).obj_id; \ - AMfree(AMlistPutBytes(group_state->doc, \ - list, \ - 0, \ - !strcmp(#mode, "insert"), \ - AMbytes(bytes_value, BYTES_SIZE))); \ - AMbyteSpan const bytes = AMpush( \ - &group_state->stack, \ - AMlistGet(group_state->doc, list, 0, NULL), \ - AM_VALUE_BYTES, \ - cmocka_cb).bytes; \ - assert_int_equal(bytes.count, BYTES_SIZE); \ - assert_memory_equal(bytes.src, bytes_value, BYTES_SIZE); \ - AMfree(AMpop(&group_state->stack)); \ -} +#define static_void_test_AMlistPutBytes(mode, bytes_value) \ + static void test_AMlistPutBytes_##mode(void** state) { \ + static size_t const BYTES_SIZE = sizeof(bytes_value) / sizeof(uint8_t); \ + \ + DocState* doc_state = *state; \ + AMstack** stack_ptr = &doc_state->base_state->stack; \ + AMobjId const* const list = AMitemObjId( \ + AMstackItem(stack_ptr, AMmapPutObject(doc_state->doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), \ + cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); \ + AMstackItem( \ + NULL, AMlistPutBytes(doc_state->doc, list, 0, !strcmp(#mode, "insert"), AMbytes(bytes_value, BYTES_SIZE)), \ + cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); \ + AMbyteSpan bytes; \ + assert_true(AMitemToBytes( \ + AMstackItem(stack_ptr, AMlistGet(doc_state->doc, list, 0, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), \ + &bytes)); \ + assert_int_equal(bytes.count, BYTES_SIZE); \ + assert_memory_equal(bytes.src, bytes_value, BYTES_SIZE); \ + AMresultFree(AMstackPop(stack_ptr, NULL)); \ + } -#define test_AMlistPutNull(mode) test_AMlistPutNull_ ## mode +#define test_AMlistPutNull(mode) test_AMlistPutNull_##mode -#define static_void_test_AMlistPutNull(mode) \ -static void test_AMlistPutNull_ ## mode(void **state) { \ - GroupState* group_state = *state; \ - AMobjId const* const list = AMpush( \ - &group_state->stack, \ - AMmapPutObject(group_state->doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST),\ - AM_VALUE_OBJ_ID, \ - cmocka_cb).obj_id; \ - AMfree(AMlistPutNull(group_state->doc, \ - list, \ - 0, \ - !strcmp(#mode, "insert"))); \ - AMresult* const result = AMlistGet(group_state->doc, list, 0, NULL); \ - if (AMresultStatus(result) != AM_STATUS_OK) { \ - fail_msg_view("%s", AMerrorMessage(result)); \ - } \ - assert_int_equal(AMresultSize(result), 1); \ - assert_int_equal(AMresultValue(result).tag, AM_VALUE_NULL); \ - AMfree(result); \ -} +#define static_void_test_AMlistPutNull(mode) \ + static void test_AMlistPutNull_##mode(void** state) { \ + DocState* doc_state = *state; \ + AMstack** stack_ptr = &doc_state->base_state->stack; \ + AMobjId const* const list = AMitemObjId( \ + AMstackItem(stack_ptr, AMmapPutObject(doc_state->doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), \ + cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); \ + AMstackItem(NULL, AMlistPutNull(doc_state->doc, list, 0, !strcmp(#mode, "insert")), cmocka_cb, \ + AMexpect(AM_VAL_TYPE_VOID)); \ + AMresult* result = AMstackResult(stack_ptr, AMlistGet(doc_state->doc, list, 0, NULL), NULL, NULL); \ + if (AMresultStatus(result) != AM_STATUS_OK) { \ + fail_msg_view("%s", AMresultError(result)); \ + } \ + assert_int_equal(AMresultSize(result), 1); \ + assert_int_equal(AMitemValType(AMresultItem(result)), AM_VAL_TYPE_NULL); \ + AMresultFree(AMstackPop(stack_ptr, NULL)); \ + } -#define test_AMlistPutObject(label, mode) test_AMlistPutObject_ ## label ## _ ## mode +#define test_AMlistPutObject(label, mode) test_AMlistPutObject_##label##_##mode -#define static_void_test_AMlistPutObject(label, mode) \ -static void test_AMlistPutObject_ ## label ## _ ## mode(void **state) { \ - GroupState* group_state = *state; \ - AMobjId const* const list = AMpush( \ - &group_state->stack, \ - AMmapPutObject(group_state->doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST),\ - AM_VALUE_OBJ_ID, \ - cmocka_cb).obj_id; \ - AMobjType const obj_type = AMobjType_tag(#label); \ - if (obj_type != AM_OBJ_TYPE_VOID) { \ - AMobjId const* const obj_id = AMpush( \ - &group_state->stack, \ - AMlistPutObject(group_state->doc, \ - list, \ - 0, \ - !strcmp(#mode, "insert"), \ - obj_type), \ - AM_VALUE_OBJ_ID, \ - cmocka_cb).obj_id; \ - assert_non_null(obj_id); \ - assert_int_equal(AMobjObjType(group_state->doc, obj_id), obj_type); \ - assert_int_equal(AMobjSize(group_state->doc, obj_id, NULL), 0); \ - } \ - else { \ - AMpush(&group_state->stack, \ - AMlistPutObject(group_state->doc, \ - list, \ - 0, \ - !strcmp(#mode, "insert"), \ - obj_type), \ - AM_VALUE_VOID, \ - NULL); \ - assert_int_not_equal(AMresultStatus(group_state->stack->result), \ - AM_STATUS_OK); \ - } \ - AMfree(AMpop(&group_state->stack)); \ -} +#define static_void_test_AMlistPutObject(label, mode) \ + static void test_AMlistPutObject_##label##_##mode(void** state) { \ + DocState* doc_state = *state; \ + AMstack** stack_ptr = &doc_state->base_state->stack; \ + AMobjId const* const list = AMitemObjId( \ + AMstackItem(stack_ptr, AMmapPutObject(doc_state->doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), \ + cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); \ + AMobjType const obj_type = suffix_to_obj_type(#label); \ + AMobjId const* const obj_id = AMitemObjId( \ + AMstackItem(stack_ptr, AMlistPutObject(doc_state->doc, list, 0, !strcmp(#mode, "insert"), obj_type), \ + cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); \ + assert_non_null(obj_id); \ + assert_int_equal(AMobjObjType(doc_state->doc, obj_id), obj_type); \ + assert_int_equal(AMobjSize(doc_state->doc, obj_id, NULL), 0); \ + AMresultFree(AMstackPop(stack_ptr, NULL)); \ + } -#define test_AMlistPutStr(mode) test_AMlistPutStr ## _ ## mode +#define test_AMlistPutStr(mode) test_AMlistPutStr##_##mode -#define static_void_test_AMlistPutStr(mode, str_value) \ -static void test_AMlistPutStr_ ## mode(void **state) { \ - GroupState* group_state = *state; \ - AMobjId const* const list = AMpush( \ - &group_state->stack, \ - AMmapPutObject(group_state->doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST),\ - AM_VALUE_OBJ_ID, \ - cmocka_cb).obj_id; \ - AMfree(AMlistPutStr(group_state->doc, \ - list, \ - 0, \ - !strcmp(#mode, "insert"), \ - AMstr(str_value))); \ - AMbyteSpan const str = AMpush( \ - &group_state->stack, \ - AMlistGet(group_state->doc, list, 0, NULL), \ - AM_VALUE_STR, \ - cmocka_cb).str; \ - assert_int_equal(str.count, strlen(str_value)); \ - assert_memory_equal(str.src, str_value, str.count); \ - AMfree(AMpop(&group_state->stack)); \ -} +#define static_void_test_AMlistPutStr(mode, str_value) \ + static void test_AMlistPutStr_##mode(void** state) { \ + DocState* doc_state = *state; \ + AMstack** stack_ptr = &doc_state->base_state->stack; \ + AMobjId const* const list = AMitemObjId( \ + AMstackItem(stack_ptr, AMmapPutObject(doc_state->doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), \ + cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); \ + AMstackItem(NULL, AMlistPutStr(doc_state->doc, list, 0, !strcmp(#mode, "insert"), AMstr(str_value)), \ + cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); \ + AMbyteSpan str; \ + assert_true(AMitemToStr( \ + AMstackItem(stack_ptr, AMlistGet(doc_state->doc, list, 0, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), \ + &str)); \ + assert_int_equal(str.count, strlen(str_value)); \ + assert_memory_equal(str.src, str_value, str.count); \ + AMresultFree(AMstackPop(stack_ptr, NULL)); \ + } -static_void_test_AMlistPut(Bool, insert, boolean, true) +static_void_test_AMlistPut(Bool, insert, bool, true); -static_void_test_AMlistPut(Bool, update, boolean, true) +static_void_test_AMlistPut(Bool, update, bool, true); static uint8_t const BYTES_VALUE[] = {INT8_MIN, INT8_MAX / 2, INT8_MAX}; -static_void_test_AMlistPutBytes(insert, BYTES_VALUE) +static_void_test_AMlistPutBytes(insert, BYTES_VALUE); -static_void_test_AMlistPutBytes(update, BYTES_VALUE) +static_void_test_AMlistPutBytes(update, BYTES_VALUE); -static_void_test_AMlistPut(Counter, insert, counter, INT64_MAX) +static_void_test_AMlistPut(Counter, insert, int64_t, INT64_MAX); -static_void_test_AMlistPut(Counter, update, counter, INT64_MAX) +static_void_test_AMlistPut(Counter, update, int64_t, INT64_MAX); -static_void_test_AMlistPut(F64, insert, f64, DBL_MAX) +static_void_test_AMlistPut(F64, insert, double, DBL_MAX); -static_void_test_AMlistPut(F64, update, f64, DBL_MAX) +static_void_test_AMlistPut(F64, update, double, DBL_MAX); -static_void_test_AMlistPut(Int, insert, int_, INT64_MAX) +static_void_test_AMlistPut(Int, insert, int64_t, INT64_MAX); -static_void_test_AMlistPut(Int, update, int_, INT64_MAX) +static_void_test_AMlistPut(Int, update, int64_t, INT64_MAX); -static_void_test_AMlistPutNull(insert) +static_void_test_AMlistPutNull(insert); -static_void_test_AMlistPutNull(update) +static_void_test_AMlistPutNull(update); -static_void_test_AMlistPutObject(List, insert) +static_void_test_AMlistPutObject(List, insert); -static_void_test_AMlistPutObject(List, update) +static_void_test_AMlistPutObject(List, update); -static_void_test_AMlistPutObject(Map, insert) +static_void_test_AMlistPutObject(Map, insert); -static_void_test_AMlistPutObject(Map, update) +static_void_test_AMlistPutObject(Map, update); -static_void_test_AMlistPutObject(Text, insert) +static_void_test_AMlistPutObject(Text, insert); -static_void_test_AMlistPutObject(Text, update) +static_void_test_AMlistPutObject(Text, update); -static_void_test_AMlistPutObject(Void, insert) +static_void_test_AMlistPutStr(insert, + "Hello, " + "world!"); -static_void_test_AMlistPutObject(Void, update) +static_void_test_AMlistPutStr(update, + "Hello," + " world" + "!"); -static_void_test_AMlistPutStr(insert, "Hello, world!") +static_void_test_AMlistPut(Timestamp, insert, int64_t, INT64_MAX); -static_void_test_AMlistPutStr(update, "Hello, world!") +static_void_test_AMlistPut(Timestamp, update, int64_t, INT64_MAX); -static_void_test_AMlistPut(Timestamp, insert, timestamp, INT64_MAX) +static_void_test_AMlistPut(Uint, insert, uint64_t, UINT64_MAX); -static_void_test_AMlistPut(Timestamp, update, timestamp, INT64_MAX) +static_void_test_AMlistPut(Uint, update, uint64_t, UINT64_MAX); -static_void_test_AMlistPut(Uint, insert, uint, UINT64_MAX) - -static_void_test_AMlistPut(Uint, update, uint, UINT64_MAX) - -static void test_get_list_values(void** state) { - AMresultStack* stack = *state; - AMdoc* const doc1 = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - AMobjId const* const list = AMpush( - &stack, - AMmapPutObject(doc1, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; +static void test_get_range_values(void** state) { + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; + AMdoc* doc1; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc1)); + AMobjId const* const list = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc1, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE))); /* Insert elements. */ - AMfree(AMlistPutStr(doc1, list, 0, true, AMstr("First"))); - AMfree(AMlistPutStr(doc1, list, 0, true, AMstr("Second"))); - AMfree(AMlistPutStr(doc1, list, 0, true, AMstr("Third"))); - AMfree(AMlistPutStr(doc1, list, 0, true, AMstr("Fourth"))); - AMfree(AMlistPutStr(doc1, list, 0, true, AMstr("Fifth"))); - AMfree(AMlistPutStr(doc1, list, 0, true, AMstr("Sixth"))); - AMfree(AMlistPutStr(doc1, list, 0, true, AMstr("Seventh"))); - AMfree(AMlistPutStr(doc1, list, 0, true, AMstr("Eighth"))); - AMfree(AMcommit(doc1, AMstr(NULL), NULL)); + AMstackItem(NULL, AMlistPutStr(doc1, list, 0, true, AMstr("First")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMlistPutStr(doc1, list, 0, true, AMstr("Second")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMlistPutStr(doc1, list, 0, true, AMstr("Third")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMlistPutStr(doc1, list, 0, true, AMstr("Fourth")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMlistPutStr(doc1, list, 0, true, AMstr("Fifth")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMlistPutStr(doc1, list, 0, true, AMstr("Sixth")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMlistPutStr(doc1, list, 0, true, AMstr("Seventh")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMlistPutStr(doc1, list, 0, true, AMstr("Eighth")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMcommit(doc1, AMstr(NULL), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - AMchangeHashes const v1 = AMpush(&stack, - AMgetHeads(doc1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - AMdoc* const doc2 = AMpush(&stack, - AMfork(doc1, NULL), - AM_VALUE_DOC, - cmocka_cb).doc; + AMitems const v1 = AMstackItems(stack_ptr, AMgetHeads(doc1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + AMdoc* doc2; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMfork(doc1, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc2)); - AMfree(AMlistPutStr(doc1, list, 2, false, AMstr("Third V2"))); - AMfree(AMcommit(doc1, AMstr(NULL), NULL)); + AMstackItem(NULL, AMlistPutStr(doc1, list, 2, false, AMstr("Third V2")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMcommit(doc1, AMstr(NULL), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - AMfree(AMlistPutStr(doc2, list, 2, false, AMstr("Third V3"))); - AMfree(AMcommit(doc2, AMstr(NULL), NULL)); + AMstackItem(NULL, AMlistPutStr(doc2, list, 2, false, AMstr("Third V3")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMcommit(doc2, AMstr(NULL), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - AMfree(AMmerge(doc1, doc2)); + AMstackItem(NULL, AMmerge(doc1, doc2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - AMlistItems range = AMpush(&stack, - AMlistRange(doc1, list, 0, SIZE_MAX, NULL), - AM_VALUE_LIST_ITEMS, - cmocka_cb).list_items; - assert_int_equal(AMlistItemsSize(&range), 8); + /* Forward vs. reverse: complete current list range. */ + AMitems range = + AMstackItems(stack_ptr, AMlistRange(doc1, list, 0, SIZE_MAX, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + size_t size = AMitemsSize(&range); + assert_int_equal(size, 8); + AMitems range_back = AMitemsReversed(&range); + assert_int_equal(AMitemsSize(&range_back), size); + size_t pos; + assert_true(AMitemPos(AMitemsNext(&range, 1), &pos)); + assert_int_equal(pos, 0); + assert_true(AMitemPos(AMitemsNext(&range_back, 1), &pos)); + assert_int_equal(pos, 7); - AMlistItem const* list_item = NULL; - while ((list_item = AMlistItemsNext(&range, 1)) != NULL) { - AMvalue const val1 = AMlistItemValue(list_item); - AMresult* result = AMlistGet(doc1, list, AMlistItemIndex(list_item), NULL); - AMvalue const val2 = AMresultValue(result); - assert_true(AMvalueEqual(&val1, &val2)); - assert_non_null(AMlistItemObjId(list_item)); - AMfree(result); + AMitem *item1, *item_back1; + size_t count, middle = size / 2; + range = AMitemsRewound(&range); + range_back = AMitemsRewound(&range_back); + for (item1 = NULL, item_back1 = NULL, count = 0; item1 && item_back1; + item1 = AMitemsNext(&range, 1), item_back1 = AMitemsNext(&range_back, 1), ++count) { + size_t pos1, pos_back1; + assert_true(AMitemPos(item1, &pos1)); + assert_true(AMitemPos(item_back1, &pos_back1)); + if ((count == middle) && (middle & 1)) { + /* The iterators are crossing in the middle. */ + assert_int_equal(pos1, pos_back1); + assert_true(AMitemEqual(item1, item_back1)); + assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item_back1))); + } else { + assert_int_not_equal(pos1, pos_back1); + } + AMitem* item2 = AMstackItem(stack_ptr, AMlistGet(doc1, list, pos1, NULL), NULL, NULL); + AMitem* item_back2 = AMstackItem(stack_ptr, AMlistGet(doc1, list, pos_back1, NULL), NULL, NULL); + /** \note An item returned from an `AM...Get()` call doesn't include the + index used to retrieve it. */ + assert_false(AMitemIdxType(item2)); + assert_false(AMitemIdxType(item_back2)); + assert_true(AMitemEqual(item1, item2)); + assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item2))); + assert_true(AMitemEqual(item_back1, item_back2)); + assert_true(AMobjIdEqual(AMitemObjId(item_back1), AMitemObjId(item_back2))); + AMresultFree(AMstackPop(stack_ptr, NULL)); } - range = AMpush(&stack, - AMlistRange(doc1, list, 3, 6, NULL), - AM_VALUE_LIST_ITEMS, - cmocka_cb).list_items; - AMlistItems range_back = AMlistItemsReversed(&range); - assert_int_equal(AMlistItemsSize(&range), 3); - assert_int_equal(AMlistItemIndex(AMlistItemsNext(&range, 1)), 3); - assert_int_equal(AMlistItemIndex(AMlistItemsNext(&range_back, 1)), 5); + /* Forward vs. reverse: partial current list range. */ + range = AMstackItems(stack_ptr, AMlistRange(doc1, list, 1, 6, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + size = AMitemsSize(&range); + assert_int_equal(size, 5); + range_back = AMitemsReversed(&range); + assert_int_equal(AMitemsSize(&range_back), size); + assert_true(AMitemPos(AMitemsNext(&range, 1), &pos)); + assert_int_equal(pos, 1); + assert_true(AMitemPos(AMitemsNext(&range_back, 1), &pos)); + assert_int_equal(pos, 5); - range = AMlistItemsRewound(&range); - while ((list_item = AMlistItemsNext(&range, 1)) != NULL) { - AMvalue const val1 = AMlistItemValue(list_item); - AMresult* result = AMlistGet(doc1, list, AMlistItemIndex(list_item), NULL); - AMvalue const val2 = AMresultValue(result); - assert_true(AMvalueEqual(&val1, &val2)); - assert_non_null(AMlistItemObjId(list_item)); - AMfree(result); + middle = size / 2; + range = AMitemsRewound(&range); + range_back = AMitemsRewound(&range_back); + for (item1 = NULL, item_back1 = NULL, count = 0; item1 && item_back1; + item1 = AMitemsNext(&range, 1), item_back1 = AMitemsNext(&range_back, 1), ++count) { + size_t pos1, pos_back1; + assert_true(AMitemPos(item1, &pos1)); + assert_true(AMitemPos(item_back1, &pos_back1)); + if ((count == middle) && (middle & 1)) { + /* The iterators are crossing in the middle. */ + assert_int_equal(pos1, pos_back1); + assert_true(AMitemEqual(item1, item_back1)); + assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item_back1))); + } else { + assert_int_not_equal(pos1, pos_back1); + } + AMitem* item2 = AMstackItem(stack_ptr, AMlistGet(doc1, list, pos1, NULL), NULL, NULL); + AMitem* item_back2 = AMstackItem(stack_ptr, AMlistGet(doc1, list, pos_back1, NULL), NULL, NULL); + /** \note An item returned from an `AMlistGet()` call doesn't include + the index used to retrieve it. */ + assert_int_equal(AMitemIdxType(item2), 0); + assert_int_equal(AMitemIdxType(item_back2), 0); + assert_true(AMitemEqual(item1, item2)); + assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item2))); + assert_true(AMitemEqual(item_back1, item_back2)); + assert_true(AMobjIdEqual(AMitemObjId(item_back1), AMitemObjId(item_back2))); + AMresultFree(AMstackPop(stack_ptr, NULL)); } - range = AMpush(&stack, - AMlistRange(doc1, list, 0, SIZE_MAX, &v1), - AM_VALUE_LIST_ITEMS, - cmocka_cb).list_items; - assert_int_equal(AMlistItemsSize(&range), 8); - while ((list_item = AMlistItemsNext(&range, 1)) != NULL) { - AMvalue const val1 = AMlistItemValue(list_item); - AMresult* result = AMlistGet(doc1, list, AMlistItemIndex(list_item), &v1); - AMvalue const val2 = AMresultValue(result); - assert_true(AMvalueEqual(&val1, &val2)); - assert_non_null(AMlistItemObjId(list_item)); - AMfree(result); + /* Forward vs. reverse: complete historical map range. */ + range = AMstackItems(stack_ptr, AMlistRange(doc1, list, 0, SIZE_MAX, &v1), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + size = AMitemsSize(&range); + assert_int_equal(size, 8); + range_back = AMitemsReversed(&range); + assert_int_equal(AMitemsSize(&range_back), size); + assert_true(AMitemPos(AMitemsNext(&range, 1), &pos)); + assert_int_equal(pos, 0); + assert_true(AMitemPos(AMitemsNext(&range_back, 1), &pos)); + assert_int_equal(pos, 7); + + middle = size / 2; + range = AMitemsRewound(&range); + range_back = AMitemsRewound(&range_back); + for (item1 = NULL, item_back1 = NULL, count = 0; item1 && item_back1; + item1 = AMitemsNext(&range, 1), item_back1 = AMitemsNext(&range_back, 1), ++count) { + size_t pos1, pos_back1; + assert_true(AMitemPos(item1, &pos1)); + assert_true(AMitemPos(item_back1, &pos_back1)); + if ((count == middle) && (middle & 1)) { + /* The iterators are crossing in the middle. */ + assert_int_equal(pos1, pos_back1); + assert_true(AMitemEqual(item1, item_back1)); + assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item_back1))); + } else { + assert_int_not_equal(pos1, pos_back1); + } + AMitem* item2 = AMstackItem(stack_ptr, AMlistGet(doc1, list, pos1, &v1), NULL, NULL); + AMitem* item_back2 = AMstackItem(stack_ptr, AMlistGet(doc1, list, pos_back1, &v1), NULL, NULL); + /** \note An item returned from an `AM...Get()` call doesn't include the + index used to retrieve it. */ + assert_false(AMitemIdxType(item2)); + assert_false(AMitemIdxType(item_back2)); + assert_true(AMitemEqual(item1, item2)); + assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item2))); + assert_true(AMitemEqual(item_back1, item_back2)); + assert_true(AMobjIdEqual(AMitemObjId(item_back1), AMitemObjId(item_back2))); + AMresultFree(AMstackPop(stack_ptr, NULL)); } - range = AMpush(&stack, - AMlistRange(doc1, list, 3, 6, &v1), - AM_VALUE_LIST_ITEMS, - cmocka_cb).list_items; - range_back = AMlistItemsReversed(&range); - assert_int_equal(AMlistItemsSize(&range), 3); - assert_int_equal(AMlistItemIndex(AMlistItemsNext(&range, 1)), 3); - assert_int_equal(AMlistItemIndex(AMlistItemsNext(&range_back, 1)), 5); + /* Forward vs. reverse: partial historical map range. */ + range = AMstackItems(stack_ptr, AMlistRange(doc1, list, 2, 7, &v1), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + size = AMitemsSize(&range); + assert_int_equal(size, 5); + range_back = AMitemsReversed(&range); + assert_int_equal(AMitemsSize(&range_back), size); + assert_true(AMitemPos(AMitemsNext(&range, 1), &pos)); + assert_int_equal(pos, 2); + assert_true(AMitemPos(AMitemsNext(&range_back, 1), &pos)); + assert_int_equal(pos, 6); - range = AMlistItemsRewound(&range); - while ((list_item = AMlistItemsNext(&range, 1)) != NULL) { - AMvalue const val1 = AMlistItemValue(list_item); - AMresult* result = AMlistGet(doc1, list, AMlistItemIndex(list_item), &v1); - AMvalue const val2 = AMresultValue(result); - assert_true(AMvalueEqual(&val1, &val2)); - assert_non_null(AMlistItemObjId(list_item)); - AMfree(result); + middle = size / 2; + range = AMitemsRewound(&range); + range_back = AMitemsRewound(&range_back); + for (item1 = NULL, item_back1 = NULL, count = 0; item1 && item_back1; + item1 = AMitemsNext(&range, 1), item_back1 = AMitemsNext(&range_back, 1), ++count) { + size_t pos1, pos_back1; + assert_true(AMitemPos(item1, &pos1)); + assert_true(AMitemPos(item_back1, &pos_back1)); + if ((count == middle) && (middle & 1)) { + /* The iterators are crossing in the middle. */ + assert_int_equal(pos1, pos_back1); + assert_true(AMitemEqual(item1, item_back1)); + assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item_back1))); + } else { + assert_int_not_equal(pos1, pos_back1); + } + AMitem* item2 = AMstackItem(stack_ptr, AMlistGet(doc1, list, pos1, &v1), NULL, NULL); + AMitem* item_back2 = AMstackItem(stack_ptr, AMlistGet(doc1, list, pos_back1, &v1), NULL, NULL); + /** \note An item returned from an `AM...Get()` call doesn't include the + index used to retrieve it. */ + assert_false(AMitemIdxType(item2)); + assert_false(AMitemIdxType(item_back2)); + assert_true(AMitemEqual(item1, item2)); + assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item2))); + assert_true(AMitemEqual(item_back1, item_back2)); + assert_true(AMobjIdEqual(AMitemObjId(item_back1), AMitemObjId(item_back2))); + AMresultFree(AMstackPop(stack_ptr, NULL)); } - range = AMpush(&stack, - AMlistRange(doc1, list, 0, SIZE_MAX, NULL), - AM_VALUE_LIST_ITEMS, - cmocka_cb).list_items; - AMobjItems values = AMpush(&stack, - AMobjValues(doc1, list, NULL), - AM_VALUE_OBJ_ITEMS, - cmocka_cb).obj_items; - assert_int_equal(AMlistItemsSize(&range), AMobjItemsSize(&values)); - AMobjItem const* value = NULL; - while ((list_item = AMlistItemsNext(&range, 1)) != NULL && - (value = AMobjItemsNext(&values, 1)) != NULL) { - AMvalue const val1 = AMlistItemValue(list_item); - AMvalue const val2 = AMobjItemValue(value); - assert_true(AMvalueEqual(&val1, &val2)); - assert_true(AMobjIdEqual(AMlistItemObjId(list_item), AMobjItemObjId(value))); + /* List range vs. object range: complete current. */ + range = AMstackItems(stack_ptr, AMlistRange(doc1, list, 0, SIZE_MAX, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + AMitems obj_items = AMstackItems(stack_ptr, AMobjItems(doc1, list, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + assert_int_equal(AMitemsSize(&range), AMitemsSize(&obj_items)); + + AMitem *item, *obj_item; + for (item = NULL, obj_item = NULL; item && obj_item; + item = AMitemsNext(&range, 1), obj_item = AMitemsNext(&obj_items, 1)) { + /** \note Object iteration doesn't yield any item indices. */ + assert_true(AMitemIdxType(item)); + assert_false(AMitemIdxType(obj_item)); + assert_true(AMitemEqual(item, obj_item)); + assert_true(AMobjIdEqual(AMitemObjId(item), AMitemObjId(obj_item))); } - range = AMpush(&stack, - AMlistRange(doc1, list, 0, SIZE_MAX, &v1), - AM_VALUE_LIST_ITEMS, - cmocka_cb).list_items; - values = AMpush(&stack, - AMobjValues(doc1, list, &v1), - AM_VALUE_OBJ_ITEMS, - cmocka_cb).obj_items; - assert_int_equal(AMlistItemsSize(&range), AMobjItemsSize(&values)); - while ((list_item = AMlistItemsNext(&range, 1)) != NULL && - (value = AMobjItemsNext(&values, 1)) != NULL) { - AMvalue const val1 = AMlistItemValue(list_item); - AMvalue const val2 = AMobjItemValue(value); - assert_true(AMvalueEqual(&val1, &val2)); - assert_true(AMobjIdEqual(AMlistItemObjId(list_item), AMobjItemObjId(value))); + /* List range vs. object range: complete historical. */ + range = AMstackItems(stack_ptr, AMlistRange(doc1, list, 0, SIZE_MAX, &v1), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + obj_items = AMstackItems(stack_ptr, AMobjItems(doc1, list, &v1), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + assert_int_equal(AMitemsSize(&range), AMitemsSize(&obj_items)); + + for (item = NULL, obj_item = NULL; item && obj_item; + item = AMitemsNext(&range, 1), obj_item = AMitemsNext(&obj_items, 1)) { + /** \note Object iteration doesn't yield any item indices. */ + assert_true(AMitemIdxType(item)); + assert_false(AMitemIdxType(obj_item)); + assert_true(AMitemEqual(item, obj_item)); + assert_true(AMobjIdEqual(AMitemObjId(item), AMitemObjId(obj_item))); } } -/** \brief A JavaScript application can introduce NUL (`\0`) characters into a - * list object's string value which will truncate it in a C application. +/** + * \brief A JavaScript application can introduce NUL (`\0`) characters into a + * list object's string value which will truncate it in a C application. */ static void test_get_NUL_string_value(void** state) { /* @@ -381,60 +431,52 @@ static void test_get_NUL_string_value(void** state) { doc[0] = 'o\0ps'; }); const bytes = Automerge.save(doc); - console.log("static uint8_t const SAVED_DOC[] = {" + Array.apply([], bytes).join(", ") + "};"); + console.log("static uint8_t const SAVED_DOC[] = {" + Array.apply([], + bytes).join(", ") + "};"); */ static uint8_t const OOPS_VALUE[] = {'o', '\0', 'p', 's'}; static size_t const OOPS_SIZE = sizeof(OOPS_VALUE) / sizeof(uint8_t); static uint8_t const SAVED_DOC[] = { - 133, 111, 74, 131, 224, 28, 197, 17, 0, 113, 1, 16, 246, 137, 63, 193, - 255, 181, 76, 79, 129, 213, 133, 29, 214, 158, 164, 15, 1, 207, 184, - 14, 57, 1, 194, 79, 247, 82, 160, 134, 227, 144, 5, 241, 136, 205, - 238, 250, 251, 54, 34, 250, 210, 96, 204, 132, 153, 203, 110, 109, 6, - 6, 1, 2, 3, 2, 19, 2, 35, 2, 64, 2, 86, 2, 8, 21, 3, 33, 2, 35, 2, 52, - 1, 66, 2, 86, 2, 87, 4, 128, 1, 2, 127, 0, 127, 1, 127, 1, 127, 0, - 127, 0, 127, 7, 127, 1, 48, 127, 0, 127, 1, 1, 127, 1, 127, 70, 111, - 0, 112, 115, 127, 0, 0}; + 133, 111, 74, 131, 224, 28, 197, 17, 0, 113, 1, 16, 246, 137, 63, 193, 255, 181, 76, 79, 129, + 213, 133, 29, 214, 158, 164, 15, 1, 207, 184, 14, 57, 1, 194, 79, 247, 82, 160, 134, 227, 144, + 5, 241, 136, 205, 238, 250, 251, 54, 34, 250, 210, 96, 204, 132, 153, 203, 110, 109, 6, 6, 1, + 2, 3, 2, 19, 2, 35, 2, 64, 2, 86, 2, 8, 21, 3, 33, 2, 35, 2, 52, 1, 66, + 2, 86, 2, 87, 4, 128, 1, 2, 127, 0, 127, 1, 127, 1, 127, 0, 127, 0, 127, 7, 127, + 1, 48, 127, 0, 127, 1, 1, 127, 1, 127, 70, 111, 0, 112, 115, 127, 0, 0}; static size_t const SAVED_DOC_SIZE = sizeof(SAVED_DOC) / sizeof(uint8_t); - AMresultStack* stack = *state; - AMdoc* const doc = AMpush(&stack, - AMload(SAVED_DOC, SAVED_DOC_SIZE), - AM_VALUE_DOC, - cmocka_cb).doc; - AMbyteSpan const str = AMpush(&stack, - AMlistGet(doc, AM_ROOT, 0, NULL), - AM_VALUE_STR, - cmocka_cb).str; + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; + AMdoc* doc; + assert_true(AMitemToDoc( + AMstackItem(stack_ptr, AMload(SAVED_DOC, SAVED_DOC_SIZE), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); + AMbyteSpan str; + assert_true(AMitemToStr( + AMstackItem(stack_ptr, AMlistGet(doc, AM_ROOT, 0, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); assert_int_not_equal(str.count, strlen(OOPS_VALUE)); assert_int_equal(str.count, OOPS_SIZE); assert_memory_equal(str.src, OOPS_VALUE, str.count); } static void test_insert_at_index(void** state) { - AMresultStack* stack = *state; - AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - - AMobjId const* const list = AMpush( - &stack, - AMmapPutObject(doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; + AMdoc* doc; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); + AMobjId const* const list = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE))); /* Insert both at the same index. */ - AMfree(AMlistPutUint(doc, list, 0, true, 0)); - AMfree(AMlistPutUint(doc, list, 0, true, 1)); + AMstackItem(NULL, AMlistPutUint(doc, list, 0, true, 0), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMlistPutUint(doc, list, 0, true, 1), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); assert_int_equal(AMobjSize(doc, list, NULL), 2); - AMstrs const keys = AMpush(&stack, - AMkeys(doc, list, NULL), - AM_VALUE_STRS, - cmocka_cb).strs; - assert_int_equal(AMstrsSize(&keys), 2); - AMlistItems const range = AMpush(&stack, - AMlistRange(doc, list, 0, SIZE_MAX, NULL), - AM_VALUE_LIST_ITEMS, - cmocka_cb).list_items; - assert_int_equal(AMlistItemsSize(&range), 2); + AMitems const keys = AMstackItems(stack_ptr, AMkeys(doc, list, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + assert_int_equal(AMitemsSize(&keys), 2); + AMitems const range = + AMstackItems(stack_ptr, AMlistRange(doc, list, 0, SIZE_MAX, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_UINT)); + assert_int_equal(AMitemsSize(&range), 2); } int run_list_tests(void) { @@ -458,18 +500,16 @@ int run_list_tests(void) { cmocka_unit_test(test_AMlistPutObject(Map, update)), cmocka_unit_test(test_AMlistPutObject(Text, insert)), cmocka_unit_test(test_AMlistPutObject(Text, update)), - cmocka_unit_test(test_AMlistPutObject(Void, insert)), - cmocka_unit_test(test_AMlistPutObject(Void, update)), cmocka_unit_test(test_AMlistPutStr(insert)), cmocka_unit_test(test_AMlistPutStr(update)), cmocka_unit_test(test_AMlistPut(Timestamp, insert)), cmocka_unit_test(test_AMlistPut(Timestamp, update)), cmocka_unit_test(test_AMlistPut(Uint, insert)), cmocka_unit_test(test_AMlistPut(Uint, update)), - cmocka_unit_test_setup_teardown(test_get_list_values, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_get_NUL_string_value, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_insert_at_index, setup_stack, teardown_stack), + cmocka_unit_test_setup_teardown(test_get_range_values, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_get_NUL_string_value, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_insert_at_index, setup_base, teardown_base), }; - return cmocka_run_group_tests(tests, group_setup, group_teardown); + return cmocka_run_group_tests(tests, setup_doc, teardown_doc); } diff --git a/rust/automerge-c/test/macro_utils.c b/rust/automerge-c/test/macro_utils.c index 6d7578b6..3a546eb5 100644 --- a/rust/automerge-c/test/macro_utils.c +++ b/rust/automerge-c/test/macro_utils.c @@ -3,23 +3,36 @@ /* local */ #include "macro_utils.h" -AMvalueVariant AMvalue_discriminant(char const* suffix) { - if (!strcmp(suffix, "Bool")) return AM_VALUE_BOOLEAN; - else if (!strcmp(suffix, "Bytes")) return AM_VALUE_BYTES; - else if (!strcmp(suffix, "Counter")) return AM_VALUE_COUNTER; - else if (!strcmp(suffix, "F64")) return AM_VALUE_F64; - else if (!strcmp(suffix, "Int")) return AM_VALUE_INT; - else if (!strcmp(suffix, "Null")) return AM_VALUE_NULL; - else if (!strcmp(suffix, "Str")) return AM_VALUE_STR; - else if (!strcmp(suffix, "Timestamp")) return AM_VALUE_TIMESTAMP; - else if (!strcmp(suffix, "Uint")) return AM_VALUE_UINT; - else return AM_VALUE_VOID; +AMobjType suffix_to_obj_type(char const* obj_type_label) { + if (!strcmp(obj_type_label, "List")) + return AM_OBJ_TYPE_LIST; + else if (!strcmp(obj_type_label, "Map")) + return AM_OBJ_TYPE_MAP; + else if (!strcmp(obj_type_label, "Text")) + return AM_OBJ_TYPE_TEXT; + else + return AM_OBJ_TYPE_DEFAULT; } -AMobjType AMobjType_tag(char const* obj_type_label) { - if (!strcmp(obj_type_label, "List")) return AM_OBJ_TYPE_LIST; - else if (!strcmp(obj_type_label, "Map")) return AM_OBJ_TYPE_MAP; - else if (!strcmp(obj_type_label, "Text")) return AM_OBJ_TYPE_TEXT; - else if (!strcmp(obj_type_label, "Void")) return AM_OBJ_TYPE_VOID; - else return 0; +AMvalType suffix_to_val_type(char const* suffix) { + if (!strcmp(suffix, "Bool")) + return AM_VAL_TYPE_BOOL; + else if (!strcmp(suffix, "Bytes")) + return AM_VAL_TYPE_BYTES; + else if (!strcmp(suffix, "Counter")) + return AM_VAL_TYPE_COUNTER; + else if (!strcmp(suffix, "F64")) + return AM_VAL_TYPE_F64; + else if (!strcmp(suffix, "Int")) + return AM_VAL_TYPE_INT; + else if (!strcmp(suffix, "Null")) + return AM_VAL_TYPE_NULL; + else if (!strcmp(suffix, "Str")) + return AM_VAL_TYPE_STR; + else if (!strcmp(suffix, "Timestamp")) + return AM_VAL_TYPE_TIMESTAMP; + else if (!strcmp(suffix, "Uint")) + return AM_VAL_TYPE_UINT; + else + return AM_VAL_TYPE_DEFAULT; } diff --git a/rust/automerge-c/test/macro_utils.h b/rust/automerge-c/test/macro_utils.h index 62e262ce..e4c2c5b9 100644 --- a/rust/automerge-c/test/macro_utils.h +++ b/rust/automerge-c/test/macro_utils.h @@ -1,24 +1,23 @@ -#ifndef MACRO_UTILS_H -#define MACRO_UTILS_H +#ifndef TESTS_MACRO_UTILS_H +#define TESTS_MACRO_UTILS_H /* local */ #include /** - * \brief Gets the result value discriminant corresponding to a function name - * suffix. + * \brief Gets the object type tag corresponding to an object type suffix. * - * \param[in] suffix A string. - * \return An `AMvalue` struct discriminant. - */ -AMvalueVariant AMvalue_discriminant(char const* suffix); - -/** - * \brief Gets the object type tag corresponding to an object type label. - * - * \param[in] obj_type_label A string. + * \param[in] suffix An object type suffix string. * \return An `AMobjType` enum tag. */ -AMobjType AMobjType_tag(char const* obj_type_label); +AMobjType suffix_to_obj_type(char const* suffix); -#endif /* MACRO_UTILS_H */ +/** + * \brief Gets the value type tag corresponding to a value type suffix. + * + * \param[in] suffix A value type suffix string. + * \return An `AMvalType` enum tag. + */ +AMvalType suffix_to_val_type(char const* suffix); + +#endif /* TESTS_MACRO_UTILS_H */ diff --git a/rust/automerge-c/test/main.c b/rust/automerge-c/test/main.c index 09b71bd5..2996c9b3 100644 --- a/rust/automerge-c/test/main.c +++ b/rust/automerge-c/test/main.c @@ -1,6 +1,6 @@ +#include #include #include -#include #include /* third-party */ @@ -8,8 +8,14 @@ extern int run_actor_id_tests(void); +extern int run_byte_span_tests(void); + extern int run_doc_tests(void); +extern int run_enum_string_tests(void); + +extern int run_item_tests(void); + extern int run_list_tests(void); extern int run_map_tests(void); @@ -17,11 +23,6 @@ extern int run_map_tests(void); extern int run_ported_wasm_suite(void); int main(void) { - return ( - run_actor_id_tests() + - run_doc_tests() + - run_list_tests() + - run_map_tests() + - run_ported_wasm_suite() - ); + return (run_actor_id_tests() + run_byte_span_tests() + run_doc_tests() + run_enum_string_tests() + + run_item_tests() + run_list_tests() + run_map_tests() + run_ported_wasm_suite()); } diff --git a/rust/automerge-c/test/map_tests.c b/rust/automerge-c/test/map_tests.c index 194da2e8..2ee2e69a 100644 --- a/rust/automerge-c/test/map_tests.c +++ b/rust/automerge-c/test/map_tests.c @@ -11,144 +11,133 @@ /* local */ #include +#include +#include +#include "base_state.h" #include "cmocka_utils.h" -#include "group_state.h" +#include "doc_state.h" #include "macro_utils.h" -#include "stack_utils.h" static void test_AMmapIncrement(void** state) { - GroupState* group_state = *state; - AMfree(AMmapPutCounter(group_state->doc, AM_ROOT, AMstr("Counter"), 0)); - assert_int_equal(AMpush(&group_state->stack, - AMmapGet(group_state->doc, AM_ROOT, AMstr("Counter"), NULL), - AM_VALUE_COUNTER, - cmocka_cb).counter, 0); - AMfree(AMpop(&group_state->stack)); - AMfree(AMmapIncrement(group_state->doc, AM_ROOT, AMstr("Counter"), 3)); - assert_int_equal(AMpush(&group_state->stack, - AMmapGet(group_state->doc, AM_ROOT, AMstr("Counter"), NULL), - AM_VALUE_COUNTER, - cmocka_cb).counter, 3); - AMfree(AMpop(&group_state->stack)); + DocState* doc_state = *state; + AMstack** stack_ptr = &doc_state->base_state->stack; + AMstackItem(NULL, AMmapPutCounter(doc_state->doc, AM_ROOT, AMstr("Counter"), 0), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); + int64_t counter; + assert_true(AMitemToCounter(AMstackItem(stack_ptr, AMmapGet(doc_state->doc, AM_ROOT, AMstr("Counter"), NULL), + cmocka_cb, AMexpect(AM_VAL_TYPE_COUNTER)), + &counter)); + assert_int_equal(counter, 0); + AMresultFree(AMstackPop(stack_ptr, NULL)); + AMstackItem(NULL, AMmapIncrement(doc_state->doc, AM_ROOT, AMstr("Counter"), 3), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); + assert_true(AMitemToCounter(AMstackItem(stack_ptr, AMmapGet(doc_state->doc, AM_ROOT, AMstr("Counter"), NULL), + cmocka_cb, AMexpect(AM_VAL_TYPE_COUNTER)), + &counter)); + assert_int_equal(counter, 3); + AMresultFree(AMstackPop(stack_ptr, NULL)); } -#define test_AMmapPut(suffix) test_AMmapPut ## suffix +#define test_AMmapPut(suffix) test_AMmapPut##suffix -#define static_void_test_AMmapPut(suffix, member, scalar_value) \ -static void test_AMmapPut ## suffix(void **state) { \ - GroupState* group_state = *state; \ - AMfree(AMmapPut ## suffix(group_state->doc, \ - AM_ROOT, \ - AMstr(#suffix), \ - scalar_value)); \ - assert_true(AMpush( \ - &group_state->stack, \ - AMmapGet(group_state->doc, AM_ROOT, AMstr(#suffix), NULL), \ - AMvalue_discriminant(#suffix), \ - cmocka_cb).member == scalar_value); \ - AMfree(AMpop(&group_state->stack)); \ -} +#define static_void_test_AMmapPut(suffix, type, scalar_value) \ + static void test_AMmapPut##suffix(void** state) { \ + DocState* doc_state = *state; \ + AMstack** stack_ptr = &doc_state->base_state->stack; \ + AMstackItem(NULL, AMmapPut##suffix(doc_state->doc, AM_ROOT, AMstr(#suffix), scalar_value), cmocka_cb, \ + AMexpect(AM_VAL_TYPE_VOID)); \ + type value; \ + assert_true(AMitemTo##suffix(AMstackItem(stack_ptr, AMmapGet(doc_state->doc, AM_ROOT, AMstr(#suffix), NULL), \ + cmocka_cb, AMexpect(suffix_to_val_type(#suffix))), \ + &value)); \ + assert_true(value == scalar_value); \ + AMresultFree(AMstackPop(stack_ptr, NULL)); \ + } -static void test_AMmapPutBytes(void **state) { +static void test_AMmapPutBytes(void** state) { static AMbyteSpan const KEY = {"Bytes", 5}; static uint8_t const BYTES_VALUE[] = {INT8_MIN, INT8_MAX / 2, INT8_MAX}; static size_t const BYTES_SIZE = sizeof(BYTES_VALUE) / sizeof(uint8_t); - GroupState* group_state = *state; - AMfree(AMmapPutBytes(group_state->doc, - AM_ROOT, - KEY, - AMbytes(BYTES_VALUE, BYTES_SIZE))); - AMbyteSpan const bytes = AMpush(&group_state->stack, - AMmapGet(group_state->doc, AM_ROOT, KEY, NULL), - AM_VALUE_BYTES, - cmocka_cb).bytes; + DocState* doc_state = *state; + AMstack** stack_ptr = &doc_state->base_state->stack; + AMstackItem(NULL, AMmapPutBytes(doc_state->doc, AM_ROOT, KEY, AMbytes(BYTES_VALUE, BYTES_SIZE)), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); + AMbyteSpan bytes; + assert_true(AMitemToBytes( + AMstackItem(stack_ptr, AMmapGet(doc_state->doc, AM_ROOT, KEY, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), + &bytes)); assert_int_equal(bytes.count, BYTES_SIZE); assert_memory_equal(bytes.src, BYTES_VALUE, BYTES_SIZE); - AMfree(AMpop(&group_state->stack)); + AMresultFree(AMstackPop(stack_ptr, NULL)); } -static void test_AMmapPutNull(void **state) { +static void test_AMmapPutNull(void** state) { static AMbyteSpan const KEY = {"Null", 4}; - GroupState* group_state = *state; - AMfree(AMmapPutNull(group_state->doc, AM_ROOT, KEY)); - AMresult* const result = AMmapGet(group_state->doc, AM_ROOT, KEY, NULL); + DocState* doc_state = *state; + AMstack** stack_ptr = &doc_state->base_state->stack; + AMstackItem(NULL, AMmapPutNull(doc_state->doc, AM_ROOT, KEY), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMresult* result = AMstackResult(stack_ptr, AMmapGet(doc_state->doc, AM_ROOT, KEY, NULL), NULL, NULL); if (AMresultStatus(result) != AM_STATUS_OK) { - fail_msg_view("%s", AMerrorMessage(result)); + fail_msg_view("%s", AMresultError(result)); } assert_int_equal(AMresultSize(result), 1); - assert_int_equal(AMresultValue(result).tag, AM_VALUE_NULL); - AMfree(result); + AMitem* item = AMresultItem(result); + assert_int_equal(AMitemValType(item), AM_VAL_TYPE_NULL); } -#define test_AMmapPutObject(label) test_AMmapPutObject_ ## label +#define test_AMmapPutObject(label) test_AMmapPutObject_##label -#define static_void_test_AMmapPutObject(label) \ -static void test_AMmapPutObject_ ## label(void **state) { \ - GroupState* group_state = *state; \ - AMobjType const obj_type = AMobjType_tag(#label); \ - if (obj_type != AM_OBJ_TYPE_VOID) { \ - AMobjId const* const obj_id = AMpush( \ - &group_state->stack, \ - AMmapPutObject(group_state->doc, \ - AM_ROOT, \ - AMstr(#label), \ - obj_type), \ - AM_VALUE_OBJ_ID, \ - cmocka_cb).obj_id; \ - assert_non_null(obj_id); \ - assert_int_equal(AMobjObjType(group_state->doc, obj_id), obj_type); \ - assert_int_equal(AMobjSize(group_state->doc, obj_id, NULL), 0); \ - } \ - else { \ - AMpush(&group_state->stack, \ - AMmapPutObject(group_state->doc, \ - AM_ROOT, \ - AMstr(#label), \ - obj_type), \ - AM_VALUE_VOID, \ - NULL); \ - assert_int_not_equal(AMresultStatus(group_state->stack->result), \ - AM_STATUS_OK); \ - } \ - AMfree(AMpop(&group_state->stack)); \ -} +#define static_void_test_AMmapPutObject(label) \ + static void test_AMmapPutObject_##label(void** state) { \ + DocState* doc_state = *state; \ + AMstack** stack_ptr = &doc_state->base_state->stack; \ + AMobjType const obj_type = suffix_to_obj_type(#label); \ + AMobjId const* const obj_id = \ + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc_state->doc, AM_ROOT, AMstr(#label), obj_type), \ + cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); \ + assert_non_null(obj_id); \ + assert_int_equal(AMobjObjType(doc_state->doc, obj_id), obj_type); \ + assert_int_equal(AMobjSize(doc_state->doc, obj_id, NULL), 0); \ + AMresultFree(AMstackPop(stack_ptr, NULL)); \ + } -static void test_AMmapPutStr(void **state) { - GroupState* group_state = *state; - AMfree(AMmapPutStr(group_state->doc, AM_ROOT, AMstr("Str"), AMstr("Hello, world!"))); - AMbyteSpan const str = AMpush(&group_state->stack, - AMmapGet(group_state->doc, AM_ROOT, AMstr("Str"), NULL), - AM_VALUE_STR, - cmocka_cb).str; +static void test_AMmapPutStr(void** state) { + DocState* doc_state = *state; + AMstack** stack_ptr = &doc_state->base_state->stack; + AMstackItem(NULL, AMmapPutStr(doc_state->doc, AM_ROOT, AMstr("Str"), AMstr("Hello, world!")), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); + AMbyteSpan str; + assert_true(AMitemToStr(AMstackItem(stack_ptr, AMmapGet(doc_state->doc, AM_ROOT, AMstr("Str"), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_STR)), + &str)); assert_int_equal(str.count, strlen("Hello, world!")); assert_memory_equal(str.src, "Hello, world!", str.count); - AMfree(AMpop(&group_state->stack)); + AMresultFree(AMstackPop(stack_ptr, NULL)); } -static_void_test_AMmapPut(Bool, boolean, true) +static_void_test_AMmapPut(Bool, bool, true); -static_void_test_AMmapPut(Counter, counter, INT64_MAX) +static_void_test_AMmapPut(Counter, int64_t, INT64_MAX); -static_void_test_AMmapPut(F64, f64, DBL_MAX) +static_void_test_AMmapPut(F64, double, DBL_MAX); -static_void_test_AMmapPut(Int, int_, INT64_MAX) +static_void_test_AMmapPut(Int, int64_t, INT64_MAX); -static_void_test_AMmapPutObject(List) +static_void_test_AMmapPutObject(List); -static_void_test_AMmapPutObject(Map) +static_void_test_AMmapPutObject(Map); -static_void_test_AMmapPutObject(Text) +static_void_test_AMmapPutObject(Text); -static_void_test_AMmapPutObject(Void) +static_void_test_AMmapPut(Timestamp, int64_t, INT64_MAX); -static_void_test_AMmapPut(Timestamp, timestamp, INT64_MAX) +static_void_test_AMmapPut(Uint, int64_t, UINT64_MAX); -static_void_test_AMmapPut(Uint, uint, UINT64_MAX) - -/** \brief A JavaScript application can introduce NUL (`\0`) characters into a - * map object's key which will truncate it in a C application. +/** + * \brief A JavaScript application can introduce NUL (`\0`) characters into + * a map object's key which will truncate it in a C application. */ static void test_get_NUL_key(void** state) { /* @@ -158,39 +147,37 @@ static void test_get_NUL_key(void** state) { doc['o\0ps'] = 'oops'; }); const bytes = Automerge.save(doc); - console.log("static uint8_t const SAVED_DOC[] = {" + Array.apply([], bytes).join(", ") + "};"); + console.log("static uint8_t const SAVED_DOC[] = {" + Array.apply([], + bytes).join(", ") + "};"); */ static uint8_t const OOPS_SRC[] = {'o', '\0', 'p', 's'}; static AMbyteSpan const OOPS_KEY = {.src = OOPS_SRC, .count = sizeof(OOPS_SRC) / sizeof(uint8_t)}; static uint8_t const SAVED_DOC[] = { - 133, 111, 74, 131, 233, 150, 60, 244, 0, 116, 1, 16, 223, 253, 146, - 193, 58, 122, 66, 134, 151, 225, 210, 51, 58, 86, 247, 8, 1, 49, 118, - 234, 228, 42, 116, 171, 13, 164, 99, 244, 27, 19, 150, 44, 201, 136, - 222, 219, 90, 246, 226, 123, 77, 120, 157, 155, 55, 182, 2, 178, 64, 6, - 1, 2, 3, 2, 19, 2, 35, 2, 64, 2, 86, 2, 8, 21, 6, 33, 2, 35, 2, 52, 1, - 66, 2, 86, 2, 87, 4, 128, 1, 2, 127, 0, 127, 1, 127, 1, 127, 0, 127, 0, - 127, 7, 127, 4, 111, 0, 112, 115, 127, 0, 127, 1, 1, 127, 1, 127, 70, - 111, 111, 112, 115, 127, 0, 0 - }; + 133, 111, 74, 131, 233, 150, 60, 244, 0, 116, 1, 16, 223, 253, 146, 193, 58, 122, 66, 134, 151, + 225, 210, 51, 58, 86, 247, 8, 1, 49, 118, 234, 228, 42, 116, 171, 13, 164, 99, 244, 27, 19, + 150, 44, 201, 136, 222, 219, 90, 246, 226, 123, 77, 120, 157, 155, 55, 182, 2, 178, 64, 6, 1, + 2, 3, 2, 19, 2, 35, 2, 64, 2, 86, 2, 8, 21, 6, 33, 2, 35, 2, 52, 1, 66, + 2, 86, 2, 87, 4, 128, 1, 2, 127, 0, 127, 1, 127, 1, 127, 0, 127, 0, 127, 7, 127, + 4, 111, 0, 112, 115, 127, 0, 127, 1, 1, 127, 1, 127, 70, 111, 111, 112, 115, 127, 0, 0}; static size_t const SAVED_DOC_SIZE = sizeof(SAVED_DOC) / sizeof(uint8_t); - AMresultStack* stack = *state; - AMdoc* const doc = AMpush(&stack, - AMload(SAVED_DOC, SAVED_DOC_SIZE), - AM_VALUE_DOC, - cmocka_cb).doc; - AMbyteSpan const str = AMpush(&stack, - AMmapGet(doc, AM_ROOT, OOPS_KEY, NULL), - AM_VALUE_STR, - cmocka_cb).str; + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; + AMdoc* doc; + assert_true(AMitemToDoc( + AMstackItem(stack_ptr, AMload(SAVED_DOC, SAVED_DOC_SIZE), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); + AMbyteSpan str; + assert_true(AMitemToStr( + AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, OOPS_KEY, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); assert_int_not_equal(OOPS_KEY.count, strlen(OOPS_KEY.src)); assert_int_equal(str.count, strlen("oops")); assert_memory_equal(str.src, "oops", str.count); } -/** \brief A JavaScript application can introduce NUL (`\0`) characters into a - * map object's string value which will truncate it in a C application. +/** + * \brief A JavaScript application can introduce NUL (`\0`) characters into a + * map object's string value which will truncate it in a C application. */ static void test_get_NUL_string_value(void** state) { /* @@ -200,1209 +187,1369 @@ static void test_get_NUL_string_value(void** state) { doc.oops = 'o\0ps'; }); const bytes = Automerge.save(doc); - console.log("static uint8_t const SAVED_DOC[] = {" + Array.apply([], bytes).join(", ") + "};"); + console.log("static uint8_t const SAVED_DOC[] = {" + Array.apply([], + bytes).join(", ") + "};"); */ static uint8_t const OOPS_VALUE[] = {'o', '\0', 'p', 's'}; static size_t const OOPS_SIZE = sizeof(OOPS_VALUE) / sizeof(uint8_t); static uint8_t const SAVED_DOC[] = { - 133, 111, 74, 131, 63, 94, 151, 29, 0, 116, 1, 16, 156, 159, 189, 12, - 125, 55, 71, 154, 136, 104, 237, 186, 45, 224, 32, 22, 1, 36, 163, - 164, 222, 81, 42, 1, 247, 231, 156, 54, 222, 76, 6, 109, 18, 172, 75, - 36, 118, 120, 68, 73, 87, 186, 230, 127, 68, 19, 81, 149, 185, 6, 1, - 2, 3, 2, 19, 2, 35, 2, 64, 2, 86, 2, 8, 21, 6, 33, 2, 35, 2, 52, 1, - 66, 2, 86, 2, 87, 4, 128, 1, 2, 127, 0, 127, 1, 127, 1, 127, 0, 127, - 0, 127, 7, 127, 4, 111, 111, 112, 115, 127, 0, 127, 1, 1, 127, 1, 127, - 70, 111, 0, 112, 115, 127, 0, 0 - }; + 133, 111, 74, 131, 63, 94, 151, 29, 0, 116, 1, 16, 156, 159, 189, 12, 125, 55, 71, 154, 136, + 104, 237, 186, 45, 224, 32, 22, 1, 36, 163, 164, 222, 81, 42, 1, 247, 231, 156, 54, 222, 76, + 6, 109, 18, 172, 75, 36, 118, 120, 68, 73, 87, 186, 230, 127, 68, 19, 81, 149, 185, 6, 1, + 2, 3, 2, 19, 2, 35, 2, 64, 2, 86, 2, 8, 21, 6, 33, 2, 35, 2, 52, 1, 66, + 2, 86, 2, 87, 4, 128, 1, 2, 127, 0, 127, 1, 127, 1, 127, 0, 127, 0, 127, 7, 127, + 4, 111, 111, 112, 115, 127, 0, 127, 1, 1, 127, 1, 127, 70, 111, 0, 112, 115, 127, 0, 0}; static size_t const SAVED_DOC_SIZE = sizeof(SAVED_DOC) / sizeof(uint8_t); - AMresultStack* stack = *state; - AMdoc* const doc = AMpush(&stack, - AMload(SAVED_DOC, SAVED_DOC_SIZE), - AM_VALUE_DOC, - cmocka_cb).doc; - AMbyteSpan const str = AMpush(&stack, - AMmapGet(doc, AM_ROOT, AMstr("oops"), NULL), - AM_VALUE_STR, - cmocka_cb).str; + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; + AMdoc* doc; + assert_true(AMitemToDoc( + AMstackItem(stack_ptr, AMload(SAVED_DOC, SAVED_DOC_SIZE), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); + AMbyteSpan str; + assert_true(AMitemToStr( + AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("oops"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), + &str)); assert_int_not_equal(str.count, strlen(OOPS_VALUE)); assert_int_equal(str.count, OOPS_SIZE); assert_memory_equal(str.src, OOPS_VALUE, str.count); } static void test_range_iter_map(void** state) { - AMresultStack* stack = *state; - AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - AMfree(AMmapPutUint(doc, AM_ROOT, AMstr("a"), 3)); - AMfree(AMmapPutUint(doc, AM_ROOT, AMstr("b"), 4)); - AMfree(AMmapPutUint(doc, AM_ROOT, AMstr("c"), 5)); - AMfree(AMmapPutUint(doc, AM_ROOT, AMstr("d"), 6)); - AMfree(AMcommit(doc, AMstr(NULL), NULL)); - AMfree(AMmapPutUint(doc, AM_ROOT, AMstr("a"), 7)); - AMfree(AMcommit(doc, AMstr(NULL), NULL)); - AMfree(AMmapPutUint(doc, AM_ROOT, AMstr("a"), 8)); - AMfree(AMmapPutUint(doc, AM_ROOT, AMstr("d"), 9)); - AMfree(AMcommit(doc, AMstr(NULL), NULL)); - AMactorId const* const actor_id = AMpush(&stack, - AMgetActorId(doc), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id; - AMmapItems map_items = AMpush(&stack, - AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - assert_int_equal(AMmapItemsSize(&map_items), 4); + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; + AMdoc* doc; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); + AMstackItem(NULL, AMmapPutUint(doc, AM_ROOT, AMstr("a"), 3), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutUint(doc, AM_ROOT, AMstr("b"), 4), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutUint(doc, AM_ROOT, AMstr("c"), 5), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutUint(doc, AM_ROOT, AMstr("d"), 6), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMcommit(doc, AMstr(NULL), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + AMstackItem(NULL, AMmapPutUint(doc, AM_ROOT, AMstr("a"), 7), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMcommit(doc, AMstr(NULL), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + AMstackItem(NULL, AMmapPutUint(doc, AM_ROOT, AMstr("a"), 8), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutUint(doc, AM_ROOT, AMstr("d"), 9), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMcommit(doc, AMstr(NULL), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + AMactorId const* actor_id; + assert_true(AMitemToActorId(AMstackItem(stack_ptr, AMgetActorId(doc), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), + &actor_id)); + AMitems map_items = AMstackItems(stack_ptr, AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_UINT)); + assert_int_equal(AMitemsSize(&map_items), 4); /* ["b"-"d") */ - AMmapItems range = AMpush(&stack, - AMmapRange(doc, AM_ROOT, AMstr("b"), AMstr("d"), NULL), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; + AMitems range = AMstackItems(stack_ptr, AMmapRange(doc, AM_ROOT, AMstr("b"), AMstr("d"), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_UINT)); /* First */ - AMmapItem const* next = AMmapItemsNext(&range, 1); + AMitem* next = AMitemsNext(&range, 1); assert_non_null(next); - AMbyteSpan key = AMmapItemKey(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + AMbyteSpan key; + assert_true(AMitemKey(next, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "b", key.count); - AMvalue next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_UINT); - assert_int_equal(next_value.uint, 4); - AMobjId const* next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_UINT); + uint64_t uint; + assert_true(AMitemToUint(next, &uint)); + assert_int_equal(uint, 4); + AMobjId const* next_obj_id = AMitemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 2); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); assert_int_equal(AMobjIdIndex(next_obj_id), 0); /* Second */ - next = AMmapItemsNext(&range, 1); + next = AMitemsNext(&range, 1); assert_non_null(next); - key = AMmapItemKey(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "c", key.count); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_UINT); - assert_int_equal(next_value.uint, 5); - next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_UINT); + assert_true(AMitemToUint(next, &uint)); + assert_int_equal(uint, 5); + next_obj_id = AMitemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 3); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); assert_int_equal(AMobjIdIndex(next_obj_id), 0); /* Third */ - assert_null(AMmapItemsNext(&range, 1)); + assert_null(AMitemsNext(&range, 1)); /* ["b"-) */ - range = AMpush(&stack, - AMmapRange(doc, AM_ROOT, AMstr("b"), AMstr(NULL), NULL), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; + range = AMstackItems(stack_ptr, AMmapRange(doc, AM_ROOT, AMstr("b"), AMstr(NULL), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_UINT)); /* First */ - next = AMmapItemsNext(&range, 1); + next = AMitemsNext(&range, 1); assert_non_null(next); - key = AMmapItemKey(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "b", key.count); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_UINT); - assert_int_equal(next_value.uint, 4); - next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_UINT); + assert_true(AMitemToUint(next, &uint)); + assert_int_equal(uint, 4); + next_obj_id = AMitemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 2); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); assert_int_equal(AMobjIdIndex(next_obj_id), 0); /* Second */ - next = AMmapItemsNext(&range, 1); + next = AMitemsNext(&range, 1); assert_non_null(next); - key = AMmapItemKey(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "c", key.count); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_UINT); - assert_int_equal(next_value.uint, 5); - next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_UINT); + assert_true(AMitemToUint(next, &uint)); + assert_int_equal(uint, 5); + next_obj_id = AMitemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 3); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); assert_int_equal(AMobjIdIndex(next_obj_id), 0); /* Third */ - next = AMmapItemsNext(&range, 1); + next = AMitemsNext(&range, 1); assert_non_null(next); - key = AMmapItemKey(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "d", key.count); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_UINT); - assert_int_equal(next_value.uint, 9); - next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_UINT); + assert_true(AMitemToUint(next, &uint)); + assert_int_equal(uint, 9); + next_obj_id = AMitemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 7); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); assert_int_equal(AMobjIdIndex(next_obj_id), 0); /* Fourth */ - assert_null(AMmapItemsNext(&range, 1)); + assert_null(AMitemsNext(&range, 1)); /* [-"d") */ - range = AMpush(&stack, - AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr("d"), NULL), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; + range = AMstackItems(stack_ptr, AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr("d"), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_UINT)); /* First */ - next = AMmapItemsNext(&range, 1); + next = AMitemsNext(&range, 1); assert_non_null(next); - key = AMmapItemKey(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "a", key.count); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_UINT); - assert_int_equal(next_value.uint, 8); - next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_UINT); + assert_true(AMitemToUint(next, &uint)); + assert_int_equal(uint, 8); + next_obj_id = AMitemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 6); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); assert_int_equal(AMobjIdIndex(next_obj_id), 0); /* Second */ - next = AMmapItemsNext(&range, 1); + next = AMitemsNext(&range, 1); assert_non_null(next); - key = AMmapItemKey(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "b", key.count); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_UINT); - assert_int_equal(next_value.uint, 4); - next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_UINT); + assert_true(AMitemToUint(next, &uint)); + assert_int_equal(uint, 4); + next_obj_id = AMitemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 2); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); assert_int_equal(AMobjIdIndex(next_obj_id), 0); /* Third */ - next = AMmapItemsNext(&range, 1); + next = AMitemsNext(&range, 1); assert_non_null(next); - key = AMmapItemKey(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "c", key.count); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_UINT); - assert_int_equal(next_value.uint, 5); - next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_UINT); + assert_true(AMitemToUint(next, &uint)); + assert_int_equal(uint, 5); + next_obj_id = AMitemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 3); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); assert_int_equal(AMobjIdIndex(next_obj_id), 0); /* Fourth */ - assert_null(AMmapItemsNext(&range, 1)); + assert_null(AMitemsNext(&range, 1)); /* ["a"-) */ - range = AMpush(&stack, - AMmapRange(doc, AM_ROOT, AMstr("a"), AMstr(NULL), NULL), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; + range = AMstackItems(stack_ptr, AMmapRange(doc, AM_ROOT, AMstr("a"), AMstr(NULL), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_UINT)); /* First */ - next = AMmapItemsNext(&range, 1); + next = AMitemsNext(&range, 1); assert_non_null(next); - key = AMmapItemKey(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "a", key.count); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_UINT); - assert_int_equal(next_value.uint, 8); - next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_UINT); + assert_true(AMitemToUint(next, &uint)); + assert_int_equal(uint, 8); + next_obj_id = AMitemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 6); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); assert_int_equal(AMobjIdIndex(next_obj_id), 0); /* Second */ - next = AMmapItemsNext(&range, 1); + next = AMitemsNext(&range, 1); assert_non_null(next); - key = AMmapItemKey(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "b", key.count); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_UINT); - assert_int_equal(next_value.uint, 4); - next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_UINT); + assert_true(AMitemToUint(next, &uint)); + assert_int_equal(uint, 4); + next_obj_id = AMitemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 2); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); assert_int_equal(AMobjIdIndex(next_obj_id), 0); /* Third */ - next = AMmapItemsNext(&range, 1); + next = AMitemsNext(&range, 1); assert_non_null(next); - key = AMmapItemKey(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "c", key.count); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_UINT); - assert_int_equal(next_value.uint, 5); - next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_UINT); + assert_true(AMitemToUint(next, &uint)); + assert_int_equal(uint, 5); + next_obj_id = AMitemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 3); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); assert_int_equal(AMobjIdIndex(next_obj_id), 0); /* Fourth */ - next = AMmapItemsNext(&range, 1); + next = AMitemsNext(&range, 1); assert_non_null(next); - key = AMmapItemKey(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "d", key.count); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_UINT); - assert_int_equal(next_value.uint, 9); - next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_UINT); + assert_true(AMitemToUint(next, &uint)); + assert_int_equal(uint, 9); + next_obj_id = AMitemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 7); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); assert_int_equal(AMobjIdIndex(next_obj_id), 0); /* Fifth */ - assert_null(AMmapItemsNext(&range, 1)); + assert_null(AMitemsNext(&range, 1)); } static void test_map_range_back_and_forth_single(void** state) { - AMresultStack* stack = *state; - AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - AMactorId const* const actor_id = AMpush(&stack, - AMgetActorId(doc), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id; + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; + AMdoc* doc; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); + AMactorId const* actor_id; + assert_true(AMitemToActorId(AMstackItem(stack_ptr, AMgetActorId(doc), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), + &actor_id)); - AMfree(AMmapPutStr(doc, AM_ROOT, AMstr("1"), AMstr("a"))); - AMfree(AMmapPutStr(doc, AM_ROOT, AMstr("2"), AMstr("b"))); - AMfree(AMmapPutStr(doc, AM_ROOT, AMstr("3"), AMstr("c"))); + AMstackItem(NULL, AMmapPutStr(doc, AM_ROOT, AMstr("1"), AMstr("a")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutStr(doc, AM_ROOT, AMstr("2"), AMstr("b")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutStr(doc, AM_ROOT, AMstr("3"), AMstr("c")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* Forward, back, back. */ - AMmapItems range_all = AMpush(&stack, - AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; + AMitems range_all = AMstackItems(stack_ptr, AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_STR)); /* First */ - AMmapItem const* next = AMmapItemsNext(&range_all, 1); + AMitem* next = AMitemsNext(&range_all, 1); assert_non_null(next); - AMbyteSpan key = AMmapItemKey(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + AMbyteSpan key; + assert_true(AMitemKey(next, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "1", key.count); - AMvalue next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_int_equal(next_value.str.count, 1); - assert_memory_equal(next_value.str.src, "a", next_value.str.count); - AMobjId const* next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + AMbyteSpan str; + assert_true(AMitemToStr(next, &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "a", str.count); + AMobjId const* next_obj_id = AMitemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 1); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); assert_int_equal(AMobjIdIndex(next_obj_id), 0); /* Third */ - AMmapItems range_back_all = AMmapItemsReversed(&range_all); - range_back_all = AMmapItemsRewound(&range_back_all); - AMmapItem const* next_back = AMmapItemsNext(&range_back_all, 1); + AMitems range_back_all = AMitemsReversed(&range_all); + range_back_all = AMitemsRewound(&range_back_all); + AMitem* next_back = AMitemsNext(&range_back_all, 1); assert_non_null(next_back); - key = AMmapItemKey(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "3", key.count); - AMvalue next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_int_equal(next_back_value.str.count, 1); - assert_memory_equal(next_back_value.str.src, "c", next_back_value.str.count); - AMobjId const* next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + AMbyteSpan str_back; + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 1); + assert_memory_equal(str_back.src, "c", str_back.count); + AMobjId const* next_back_obj_id = AMitemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); /* Second */ - next_back = AMmapItemsNext(&range_back_all, 1); + next_back = AMitemsNext(&range_back_all, 1); assert_non_null(next_back); - key = AMmapItemKey(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "2", key.count); - next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_int_equal(next_back_value.str.count, 1); - assert_memory_equal(next_back_value.str.src, "b", next_back_value.str.count); - next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 1); + assert_memory_equal(str_back.src, "b", str_back.count); + next_back_obj_id = AMitemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); /* Forward, back, forward. */ - range_all = AMmapItemsRewound(&range_all); - range_back_all = AMmapItemsRewound(&range_back_all); + range_all = AMitemsRewound(&range_all); + range_back_all = AMitemsRewound(&range_back_all); /* First */ - next = AMmapItemsNext(&range_all, 1); + next = AMitemsNext(&range_all, 1); assert_non_null(next); - key = AMmapItemKey(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "1", key.count); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_int_equal(next_value.str.count, 1); - assert_memory_equal(next_value.str.src, "a", next_value.str.count); - next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "a", str.count); + next_obj_id = AMitemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 1); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); assert_int_equal(AMobjIdIndex(next_obj_id), 0); /* Third */ - next_back = AMmapItemsNext(&range_back_all, 1); + next_back = AMitemsNext(&range_back_all, 1); assert_non_null(next_back); - key = AMmapItemKey(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "3", key.count); - next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_int_equal(next_back_value.str.count, 1); - assert_memory_equal(next_back_value.str.src, "c", next_back_value.str.count); - next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 1); + assert_memory_equal(str_back.src, "c", str_back.count); + next_back_obj_id = AMitemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); /* Second */ - next = AMmapItemsNext(&range_all, 1); + next = AMitemsNext(&range_all, 1); assert_non_null(next); - key = AMmapItemKey(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "2", key.count); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_int_equal(next_value.str.count, 1); - assert_memory_equal(next_value.str.src, "b", next_value.str.count); - next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next, &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "b", str.count); + next_obj_id = AMitemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 2); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); assert_int_equal(AMobjIdIndex(next_obj_id), 0); /* Forward, forward, forward. */ - range_all = AMmapItemsRewound(&range_all); + range_all = AMitemsRewound(&range_all); /* First */ - next = AMmapItemsNext(&range_all, 1); + next = AMitemsNext(&range_all, 1); assert_non_null(next); - key = AMmapItemKey(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "1", key.count); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_int_equal(next_value.str.count, 1); - assert_memory_equal(next_value.str.src, "a", next_value.str.count); - next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next, &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "a", str.count); + next_obj_id = AMitemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 1); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); assert_int_equal(AMobjIdIndex(next_obj_id), 0); /* Second */ - next = AMmapItemsNext(&range_all, 1); + next = AMitemsNext(&range_all, 1); assert_non_null(next); - key = AMmapItemKey(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "2", key.count); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_int_equal(next_value.str.count, 1); - assert_memory_equal(next_value.str.src, "b", next_value.str.count); - next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next, &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "b", str.count); + next_obj_id = AMitemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 2); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); assert_int_equal(AMobjIdIndex(next_obj_id), 0); /* Third */ - next = AMmapItemsNext(&range_all, 1); + next = AMitemsNext(&range_all, 1); assert_non_null(next); - key = AMmapItemKey(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "3", key.count); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_int_equal(next_value.str.count, 1); - assert_memory_equal(next_value.str.src, "c", next_value.str.count); - next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next, &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "c", str.count); + next_obj_id = AMitemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 3); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); assert_int_equal(AMobjIdIndex(next_obj_id), 0); /* Forward stop */ - assert_null(AMmapItemsNext(&range_all, 1)); + assert_null(AMitemsNext(&range_all, 1)); /* Back, back, back. */ - range_back_all = AMmapItemsRewound(&range_back_all); + range_back_all = AMitemsRewound(&range_back_all); /* Third */ - next_back = AMmapItemsNext(&range_back_all, 1); + next_back = AMitemsNext(&range_back_all, 1); assert_non_null(next_back); - key = AMmapItemKey(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "3", key.count); - next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_int_equal(next_back_value.str.count, 1); - assert_memory_equal(next_back_value.str.src, "c", next_back_value.str.count); - next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 1); + assert_memory_equal(str_back.src, "c", str_back.count); + next_back_obj_id = AMitemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); /* Second */ - next_back = AMmapItemsNext(&range_back_all, 1); + next_back = AMitemsNext(&range_back_all, 1); assert_non_null(next_back); - key = AMmapItemKey(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "2", key.count); - next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_int_equal(next_back_value.str.count, 1); - assert_memory_equal(next_back_value.str.src, "b", next_back_value.str.count); - next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 1); + assert_memory_equal(str_back.src, "b", str_back.count); + next_back_obj_id = AMitemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); /* First */ - next_back = AMmapItemsNext(&range_back_all, 1); + next_back = AMitemsNext(&range_back_all, 1); assert_non_null(next_back); - key = AMmapItemKey(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "1", key.count); - next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_int_equal(next_back_value.str.count, 1); - assert_memory_equal(next_back_value.str.src, "a", next_back_value.str.count); - next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 1); + assert_memory_equal(str_back.src, "a", str_back.count); + next_back_obj_id = AMitemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 1); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); /* Back stop */ - assert_null(AMmapItemsNext(&range_back_all, 1)); + assert_null(AMitemsNext(&range_back_all, 1)); } static void test_map_range_back_and_forth_double(void** state) { - AMresultStack* stack = *state; - AMdoc* const doc1 = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - AMactorId const* const actor_id1= AMpush(&stack, - AMactorIdInitBytes("\0", 1), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id; - AMfree(AMsetActorId(doc1, actor_id1)); + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; + AMdoc* doc1; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc1)); + AMactorId const* actor_id1; + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromBytes("\0", 1), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), &actor_id1)); + AMstackItem(NULL, AMsetActorId(doc1, actor_id1), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMfree(AMmapPutStr(doc1, AM_ROOT, AMstr("1"), AMstr("a"))); - AMfree(AMmapPutStr(doc1, AM_ROOT, AMstr("2"), AMstr("b"))); - AMfree(AMmapPutStr(doc1, AM_ROOT, AMstr("3"), AMstr("c"))); + AMstackItem(NULL, AMmapPutStr(doc1, AM_ROOT, AMstr("1"), AMstr("a")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutStr(doc1, AM_ROOT, AMstr("2"), AMstr("b")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutStr(doc1, AM_ROOT, AMstr("3"), AMstr("c")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* The second actor should win all conflicts here. */ - AMdoc* const doc2 = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - AMactorId const* const actor_id2 = AMpush(&stack, - AMactorIdInitBytes("\1", 1), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id; - AMfree(AMsetActorId(doc2, actor_id2)); - AMfree(AMmapPutStr(doc2, AM_ROOT, AMstr("1"), AMstr("aa"))); - AMfree(AMmapPutStr(doc2, AM_ROOT, AMstr("2"), AMstr("bb"))); - AMfree(AMmapPutStr(doc2, AM_ROOT, AMstr("3"), AMstr("cc"))); + AMdoc* doc2; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc2)); + AMactorId const* actor_id2; + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromBytes("\1", 1), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), &actor_id2)); + AMstackItem(NULL, AMsetActorId(doc2, actor_id2), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutStr(doc2, AM_ROOT, AMstr("1"), AMstr("aa")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutStr(doc2, AM_ROOT, AMstr("2"), AMstr("bb")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutStr(doc2, AM_ROOT, AMstr("3"), AMstr("cc")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMfree(AMmerge(doc1, doc2)); + AMstackItem(NULL, AMmerge(doc1, doc2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* Forward, back, back. */ - AMmapItems range_all = AMpush(&stack, - AMmapRange(doc1, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; + AMitems range_all = AMstackItems(stack_ptr, AMmapRange(doc1, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_STR)); /* First */ - AMmapItem const* next = AMmapItemsNext(&range_all, 1); + AMitem* next = AMitemsNext(&range_all, 1); assert_non_null(next); - AMbyteSpan key = AMmapItemKey(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + AMbyteSpan key; + assert_true(AMitemKey(next, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "1", key.count); - AMvalue next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_int_equal(next_value.str.count, 2); - assert_memory_equal(next_value.str.src, "aa", next_value.str.count); - AMobjId const* next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + AMbyteSpan str; + assert_true(AMitemToStr(next, &str)); + assert_int_equal(str.count, 2); + assert_memory_equal(str.src, "aa", str.count); + AMobjId const* next_obj_id = AMitemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 1); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); assert_int_equal(AMobjIdIndex(next_obj_id), 1); /* Third */ - AMmapItems range_back_all = AMmapItemsReversed(&range_all); - range_back_all = AMmapItemsRewound(&range_back_all); - AMmapItem const* next_back = AMmapItemsNext(&range_back_all, 1); + AMitems range_back_all = AMitemsReversed(&range_all); + range_back_all = AMitemsRewound(&range_back_all); + AMitem* next_back = AMitemsNext(&range_back_all, 1); assert_non_null(next_back); - key = AMmapItemKey(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "3", key.count); - AMvalue next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_int_equal(next_back_value.str.count, 2); - assert_memory_equal(next_back_value.str.src, "cc", next_back_value.str.count); - AMobjId const* next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + AMbyteSpan str_back; + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 2); + assert_memory_equal(str_back.src, "cc", str_back.count); + AMobjId const* next_back_obj_id = AMitemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); /* Second */ - next_back = AMmapItemsNext(&range_back_all, 1); + next_back = AMitemsNext(&range_back_all, 1); assert_non_null(next_back); - key = AMmapItemKey(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "2", key.count); - next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_int_equal(next_back_value.str.count, 2); - assert_memory_equal(next_back_value.str.src, "bb", next_back_value.str.count); - next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 2); + assert_memory_equal(str_back.src, "bb", str_back.count); + next_back_obj_id = AMitemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); /* Forward, back, forward. */ - range_all = AMmapItemsRewound(&range_all); - range_back_all = AMmapItemsRewound(&range_back_all); + range_all = AMitemsRewound(&range_all); + range_back_all = AMitemsRewound(&range_back_all); /* First */ - next = AMmapItemsNext(&range_all, 1); + next = AMitemsNext(&range_all, 1); assert_non_null(next); - key = AMmapItemKey(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "1", key.count); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_int_equal(next_value.str.count, 2); - assert_memory_equal(next_value.str.src, "aa", next_value.str.count); - next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next, &str)); + assert_int_equal(str.count, 2); + assert_memory_equal(str.src, "aa", str.count); + next_obj_id = AMitemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 1); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); assert_int_equal(AMobjIdIndex(next_obj_id), 1); /* Third */ - next_back = AMmapItemsNext(&range_back_all, 1); + next_back = AMitemsNext(&range_back_all, 1); assert_non_null(next_back); - key = AMmapItemKey(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "3", key.count); - next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_int_equal(next_back_value.str.count, 2); - assert_memory_equal(next_back_value.str.src, "cc", next_back_value.str.count); - next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 2); + assert_memory_equal(str_back.src, "cc", str_back.count); + next_back_obj_id = AMitemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); /* Second */ - next = AMmapItemsNext(&range_all, 1); + next = AMitemsNext(&range_all, 1); assert_non_null(next); - key = AMmapItemKey(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "2", key.count); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_int_equal(next_value.str.count, 2); - assert_memory_equal(next_value.str.src, "bb", next_value.str.count); - next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next, &str)); + assert_int_equal(str.count, 2); + assert_memory_equal(str.src, "bb", str.count); + next_obj_id = AMitemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 2); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); assert_int_equal(AMobjIdIndex(next_obj_id), 1); /* Forward, forward, forward. */ - range_all = AMmapItemsRewound(&range_all); + range_all = AMitemsRewound(&range_all); /* First */ - next = AMmapItemsNext(&range_all, 1); + next = AMitemsNext(&range_all, 1); assert_non_null(next); - key = AMmapItemKey(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "1", key.count); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_int_equal(next_value.str.count, 2); - assert_memory_equal(next_value.str.src, "aa", next_value.str.count); - next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next, &str)); + assert_int_equal(str.count, 2); + assert_memory_equal(str.src, "aa", str.count); + next_obj_id = AMitemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 1); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); assert_int_equal(AMobjIdIndex(next_obj_id), 1); /* Second */ - next = AMmapItemsNext(&range_all, 1); + next = AMitemsNext(&range_all, 1); assert_non_null(next); - key = AMmapItemKey(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "2", key.count); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_int_equal(next_value.str.count, 2); - assert_memory_equal(next_value.str.src, "bb", next_value.str.count); - next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next, &str)); + assert_int_equal(str.count, 2); + assert_memory_equal(str.src, "bb", str.count); + next_obj_id = AMitemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 2); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); assert_int_equal(AMobjIdIndex(next_obj_id), 1); /* Third */ - next = AMmapItemsNext(&range_all, 1); + next = AMitemsNext(&range_all, 1); assert_non_null(next); - key = AMmapItemKey(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "3", key.count); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_int_equal(next_value.str.count, 2); - assert_memory_equal(next_value.str.src, "cc", next_value.str.count); - next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next, &str)); + assert_int_equal(str.count, 2); + assert_memory_equal(str.src, "cc", str.count); + next_obj_id = AMitemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 3); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); assert_int_equal(AMobjIdIndex(next_obj_id), 1); /* Forward stop */ - assert_null(AMmapItemsNext(&range_all, 1)); + assert_null(AMitemsNext(&range_all, 1)); /* Back, back, back. */ - range_back_all = AMmapItemsRewound(&range_back_all); + range_back_all = AMitemsRewound(&range_back_all); /* Third */ - next_back = AMmapItemsNext(&range_back_all, 1); + next_back = AMitemsNext(&range_back_all, 1); assert_non_null(next_back); - key = AMmapItemKey(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "3", key.count); - next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_int_equal(next_back_value.str.count, 2); - assert_memory_equal(next_back_value.str.src, "cc", next_back_value.str.count); - next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 2); + assert_memory_equal(str_back.src, "cc", str_back.count); + next_back_obj_id = AMitemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); /* Second */ - next_back = AMmapItemsNext(&range_back_all, 1); + next_back = AMitemsNext(&range_back_all, 1); assert_non_null(next_back); - key = AMmapItemKey(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "2", key.count); - next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_int_equal(next_back_value.str.count, 2); - assert_memory_equal(next_back_value.str.src, "bb", next_back_value.str.count); - next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 2); + assert_memory_equal(str_back.src, "bb", str_back.count); + next_back_obj_id = AMitemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); /* First */ - next_back = AMmapItemsNext(&range_back_all, 1); + next_back = AMitemsNext(&range_back_all, 1); assert_non_null(next_back); - key = AMmapItemKey(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "1", key.count); - next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_int_equal(next_back_value.str.count, 2); - assert_memory_equal(next_back_value.str.src, "aa", next_back_value.str.count); - next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 2); + assert_memory_equal(str_back.src, "aa", str_back.count); + next_back_obj_id = AMitemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 1); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); /* Back stop */ - assert_null(AMmapItemsNext(&range_back_all, 1)); + assert_null(AMitemsNext(&range_back_all, 1)); } static void test_map_range_at_back_and_forth_single(void** state) { - AMresultStack* stack = *state; - AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - AMactorId const* const actor_id = AMpush(&stack, - AMgetActorId(doc), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id; + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; + AMdoc* doc; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); + AMactorId const* actor_id; + assert_true(AMitemToActorId(AMstackItem(stack_ptr, AMgetActorId(doc), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), + &actor_id)); - AMfree(AMmapPutStr(doc, AM_ROOT, AMstr("1"), AMstr("a"))); - AMfree(AMmapPutStr(doc, AM_ROOT, AMstr("2"), AMstr("b"))); - AMfree(AMmapPutStr(doc, AM_ROOT, AMstr("3"), AMstr("c"))); + AMstackItem(NULL, AMmapPutStr(doc, AM_ROOT, AMstr("1"), AMstr("a")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutStr(doc, AM_ROOT, AMstr("2"), AMstr("b")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutStr(doc, AM_ROOT, AMstr("3"), AMstr("c")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMchangeHashes const heads = AMpush(&stack, - AMgetHeads(doc), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; + AMitems const heads = AMstackItems(stack_ptr, AMgetHeads(doc), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* Forward, back, back. */ - AMmapItems range_all = AMpush(&stack, - AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), &heads), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; + AMitems range_all = AMstackItems(stack_ptr, AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), &heads), cmocka_cb, + AMexpect(AM_VAL_TYPE_STR)); /* First */ - AMmapItem const* next = AMmapItemsNext(&range_all, 1); + AMitem* next = AMitemsNext(&range_all, 1); assert_non_null(next); - AMbyteSpan key = AMmapItemKey(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + AMbyteSpan key; + assert_true(AMitemKey(next, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "1", key.count); - AMvalue next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_int_equal(next_value.str.count, 1); - assert_memory_equal(next_value.str.src, "a", next_value.str.count); - AMobjId const* next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + AMbyteSpan str; + assert_true(AMitemToStr(next, &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "a", str.count); + AMobjId const* next_obj_id = AMitemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 1); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); assert_int_equal(AMobjIdIndex(next_obj_id), 0); /* Third */ - AMmapItems range_back_all = AMmapItemsReversed(&range_all); - range_back_all = AMmapItemsRewound(&range_back_all); - AMmapItem const* next_back = AMmapItemsNext(&range_back_all, 1); + AMitems range_back_all = AMitemsReversed(&range_all); + range_back_all = AMitemsRewound(&range_back_all); + AMitem* next_back = AMitemsNext(&range_back_all, 1); assert_non_null(next_back); - key = AMmapItemKey(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "3", key.count); - AMvalue next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_int_equal(next_back_value.str.count, 1); - assert_memory_equal(next_back_value.str.src, "c", next_back_value.str.count); - AMobjId const* next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + AMbyteSpan str_back; + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 1); + assert_memory_equal(str_back.src, "c", str_back.count); + AMobjId const* next_back_obj_id = AMitemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); /* Second */ - next_back = AMmapItemsNext(&range_back_all, 1); + next_back = AMitemsNext(&range_back_all, 1); assert_non_null(next_back); - key = AMmapItemKey(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "2", key.count); - next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_int_equal(next_back_value.str.count, 1); - assert_memory_equal(next_back_value.str.src, "b", next_back_value.str.count); - next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 1); + assert_memory_equal(str_back.src, "b", str_back.count); + next_back_obj_id = AMitemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); /* Forward, back, forward. */ - range_all = AMmapItemsRewound(&range_all); - range_back_all = AMmapItemsRewound(&range_back_all); + range_all = AMitemsRewound(&range_all); + range_back_all = AMitemsRewound(&range_back_all); /* First */ - next = AMmapItemsNext(&range_all, 1); + next = AMitemsNext(&range_all, 1); assert_non_null(next); - key = AMmapItemKey(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "1", key.count); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_int_equal(next_value.str.count, 1); - assert_memory_equal(next_value.str.src, "a", next_value.str.count); - next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next, &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "a", str.count); + next_obj_id = AMitemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 1); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); assert_int_equal(AMobjIdIndex(next_obj_id), 0); /* Third */ - next_back = AMmapItemsNext(&range_back_all, 1); + next_back = AMitemsNext(&range_back_all, 1); assert_non_null(next_back); - key = AMmapItemKey(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "3", key.count); - next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_int_equal(next_back_value.str.count, 1); - assert_memory_equal(next_back_value.str.src, "c", next_back_value.str.count); - next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 1); + assert_memory_equal(str_back.src, "c", str_back.count); + next_back_obj_id = AMitemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); /* Second */ - next = AMmapItemsNext(&range_all, 1); + next = AMitemsNext(&range_all, 1); assert_non_null(next); - key = AMmapItemKey(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "2", key.count); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_int_equal(next_value.str.count, 1); - assert_memory_equal(next_value.str.src, "b", next_value.str.count); - next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next, &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "b", str.count); + next_obj_id = AMitemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 2); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); assert_int_equal(AMobjIdIndex(next_obj_id), 0); /* Forward, forward, forward. */ - range_all = AMmapItemsRewound(&range_all); + range_all = AMitemsRewound(&range_all); /* First */ - next = AMmapItemsNext(&range_all, 1); + next = AMitemsNext(&range_all, 1); assert_non_null(next); - key = AMmapItemKey(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "1", key.count); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_int_equal(next_value.str.count, 1); - assert_memory_equal(next_value.str.src, "a", next_value.str.count); - next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next, &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "a", str.count); + next_obj_id = AMitemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 1); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); assert_int_equal(AMobjIdIndex(next_obj_id), 0); /* Second */ - next = AMmapItemsNext(&range_all, 1); + next = AMitemsNext(&range_all, 1); assert_non_null(next); - key = AMmapItemKey(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "2", key.count); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_int_equal(next_value.str.count, 1); - assert_memory_equal(next_value.str.src, "b", next_value.str.count); - next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next, &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "b", str.count); + next_obj_id = AMitemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 2); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); assert_int_equal(AMobjIdIndex(next_obj_id), 0); /* Third */ - next = AMmapItemsNext(&range_all, 1); + next = AMitemsNext(&range_all, 1); assert_non_null(next); - key = AMmapItemKey(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "3", key.count); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_int_equal(next_value.str.count, 1); - assert_memory_equal(next_value.str.src, "c", next_value.str.count); - next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next, &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "c", str.count); + next_obj_id = AMitemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 3); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); assert_int_equal(AMobjIdIndex(next_obj_id), 0); /* Forward stop */ - assert_null(AMmapItemsNext(&range_all, 1)); + assert_null(AMitemsNext(&range_all, 1)); /* Back, back, back. */ - range_back_all = AMmapItemsRewound(&range_back_all); + range_back_all = AMitemsRewound(&range_back_all); /* Third */ - next_back = AMmapItemsNext(&range_back_all, 1); + next_back = AMitemsNext(&range_back_all, 1); assert_non_null(next_back); - key = AMmapItemKey(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "3", key.count); - next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_int_equal(next_back_value.str.count, 1); - assert_memory_equal(next_back_value.str.src, "c", next_back_value.str.count); - next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 1); + assert_memory_equal(str_back.src, "c", str_back.count); + next_back_obj_id = AMitemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); /* Second */ - next_back = AMmapItemsNext(&range_back_all, 1); + next_back = AMitemsNext(&range_back_all, 1); assert_non_null(next_back); - key = AMmapItemKey(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "2", key.count); - next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_int_equal(next_back_value.str.count, 1); - assert_memory_equal(next_back_value.str.src, "b", next_back_value.str.count); - next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 1); + assert_memory_equal(str_back.src, "b", str_back.count); + next_back_obj_id = AMitemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); /* First */ - next_back = AMmapItemsNext(&range_back_all, 1); + next_back = AMitemsNext(&range_back_all, 1); assert_non_null(next_back); - key = AMmapItemKey(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "1", key.count); - next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_int_equal(next_back_value.str.count, 1); - assert_memory_equal(next_back_value.str.src, "a", next_back_value.str.count); - next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 1); + assert_memory_equal(str_back.src, "a", str_back.count); + next_back_obj_id = AMitemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 1); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); /* Back stop */ - assert_null(AMmapItemsNext(&range_back_all, 1)); + assert_null(AMitemsNext(&range_back_all, 1)); } static void test_map_range_at_back_and_forth_double(void** state) { - AMresultStack* stack = *state; - AMdoc* const doc1 = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - AMactorId const* const actor_id1= AMpush(&stack, - AMactorIdInitBytes("\0", 1), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id; - AMfree(AMsetActorId(doc1, actor_id1)); + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; + AMdoc* doc1; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc1)); + AMactorId const* actor_id1; + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromBytes("\0", 1), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), &actor_id1)); + AMstackItem(NULL, AMsetActorId(doc1, actor_id1), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMfree(AMmapPutStr(doc1, AM_ROOT, AMstr("1"), AMstr("a"))); - AMfree(AMmapPutStr(doc1, AM_ROOT, AMstr("2"), AMstr("b"))); - AMfree(AMmapPutStr(doc1, AM_ROOT, AMstr("3"), AMstr("c"))); + AMstackItem(NULL, AMmapPutStr(doc1, AM_ROOT, AMstr("1"), AMstr("a")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutStr(doc1, AM_ROOT, AMstr("2"), AMstr("b")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutStr(doc1, AM_ROOT, AMstr("3"), AMstr("c")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* The second actor should win all conflicts here. */ - AMdoc* const doc2 = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - AMactorId const* const actor_id2= AMpush(&stack, - AMactorIdInitBytes("\1", 1), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id; - AMfree(AMsetActorId(doc2, actor_id2)); - AMfree(AMmapPutStr(doc2, AM_ROOT, AMstr("1"), AMstr("aa"))); - AMfree(AMmapPutStr(doc2, AM_ROOT, AMstr("2"), AMstr("bb"))); - AMfree(AMmapPutStr(doc2, AM_ROOT, AMstr("3"), AMstr("cc"))); + AMdoc* doc2; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc2)); + AMactorId const* actor_id2; + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromBytes("\1", 1), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), &actor_id2)); + AMstackItem(NULL, AMsetActorId(doc2, actor_id2), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutStr(doc2, AM_ROOT, AMstr("1"), AMstr("aa")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutStr(doc2, AM_ROOT, AMstr("2"), AMstr("bb")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutStr(doc2, AM_ROOT, AMstr("3"), AMstr("cc")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMfree(AMmerge(doc1, doc2)); - AMchangeHashes const heads = AMpush(&stack, - AMgetHeads(doc1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; + AMstackItem(NULL, AMmerge(doc1, doc2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + AMitems const heads = AMstackItems(stack_ptr, AMgetHeads(doc1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* Forward, back, back. */ - AMmapItems range_all = AMpush(&stack, - AMmapRange(doc1, AM_ROOT, AMstr(NULL), AMstr(NULL), &heads), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; + AMitems range_all = AMstackItems(stack_ptr, AMmapRange(doc1, AM_ROOT, AMstr(NULL), AMstr(NULL), &heads), cmocka_cb, + AMexpect(AM_VAL_TYPE_STR)); /* First */ - AMmapItem const* next = AMmapItemsNext(&range_all, 1); + AMitem* next = AMitemsNext(&range_all, 1); assert_non_null(next); - AMbyteSpan key = AMmapItemKey(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + AMbyteSpan key; + assert_true(AMitemKey(next, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "1", key.count); - AMvalue next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_int_equal(next_value.str.count, 2); - assert_memory_equal(next_value.str.src, "aa", next_value.str.count); - AMobjId const* next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + AMbyteSpan str; + assert_true(AMitemToStr(next, &str)); + assert_int_equal(str.count, 2); + assert_memory_equal(str.src, "aa", str.count); + AMobjId const* next_obj_id = AMitemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 1); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); assert_int_equal(AMobjIdIndex(next_obj_id), 1); /* Third */ - AMmapItems range_back_all = AMmapItemsReversed(&range_all); - range_back_all = AMmapItemsRewound(&range_back_all); - AMmapItem const* next_back = AMmapItemsNext(&range_back_all, 1); + AMitems range_back_all = AMitemsReversed(&range_all); + range_back_all = AMitemsRewound(&range_back_all); + AMitem* next_back = AMitemsNext(&range_back_all, 1); assert_non_null(next_back); - key = AMmapItemKey(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "3", key.count); - AMvalue next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_int_equal(next_back_value.str.count, 2); - assert_memory_equal(next_back_value.str.src, "cc", next_back_value.str.count); - AMobjId const* next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + AMbyteSpan str_back; + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 2); + assert_memory_equal(str_back.src, "cc", str_back.count); + AMobjId const* next_back_obj_id = AMitemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); /* Second */ - next_back = AMmapItemsNext(&range_back_all, 1); + next_back = AMitemsNext(&range_back_all, 1); assert_non_null(next_back); - key = AMmapItemKey(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "2", key.count); - next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_int_equal(next_back_value.str.count, 2); - assert_memory_equal(next_back_value.str.src, "bb", next_back_value.str.count); - next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 2); + assert_memory_equal(str_back.src, "bb", str_back.count); + next_back_obj_id = AMitemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); /* Forward, back, forward. */ - range_all = AMmapItemsRewound(&range_all); - range_back_all = AMmapItemsRewound(&range_back_all); + range_all = AMitemsRewound(&range_all); + range_back_all = AMitemsRewound(&range_back_all); /* First */ - next = AMmapItemsNext(&range_all, 1); + next = AMitemsNext(&range_all, 1); assert_non_null(next); - key = AMmapItemKey(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "1", key.count); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_int_equal(next_value.str.count, 2); - assert_memory_equal(next_value.str.src, "aa", next_value.str.count); - next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next, &str)); + assert_int_equal(str.count, 2); + assert_memory_equal(str.src, "aa", str.count); + next_obj_id = AMitemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 1); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); assert_int_equal(AMobjIdIndex(next_obj_id), 1); /* Third */ - next_back = AMmapItemsNext(&range_back_all, 1); + next_back = AMitemsNext(&range_back_all, 1); assert_non_null(next_back); - key = AMmapItemKey(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "3", key.count); - next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_int_equal(next_back_value.str.count, 2); - assert_memory_equal(next_back_value.str.src, "cc", next_back_value.str.count); - next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 2); + assert_memory_equal(str_back.src, "cc", str_back.count); + next_back_obj_id = AMitemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); /* Second */ - next = AMmapItemsNext(&range_all, 1); + next = AMitemsNext(&range_all, 1); assert_non_null(next); - key = AMmapItemKey(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "2", key.count); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_int_equal(next_value.str.count, 2); - assert_memory_equal(next_value.str.src, "bb", next_value.str.count); - next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next, &str)); + assert_int_equal(str.count, 2); + assert_memory_equal(str.src, "bb", str.count); + next_obj_id = AMitemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 2); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); assert_int_equal(AMobjIdIndex(next_obj_id), 1); /* Forward, forward, forward. */ - range_all = AMmapItemsRewound(&range_all); + range_all = AMitemsRewound(&range_all); /* First */ - next = AMmapItemsNext(&range_all, 1); + next = AMitemsNext(&range_all, 1); assert_non_null(next); - key = AMmapItemKey(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "1", key.count); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_int_equal(next_value.str.count, 2); - assert_memory_equal(next_value.str.src, "aa", next_value.str.count); - next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next, &str)); + assert_int_equal(str.count, 2); + assert_memory_equal(str.src, "aa", str.count); + next_obj_id = AMitemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 1); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); assert_int_equal(AMobjIdIndex(next_obj_id), 1); /* Second */ - next = AMmapItemsNext(&range_all, 1); + next = AMitemsNext(&range_all, 1); assert_non_null(next); - key = AMmapItemKey(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "2", key.count); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_int_equal(next_value.str.count, 2); - assert_memory_equal(next_value.str.src, "bb", next_value.str.count); - next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next, &str)); + assert_int_equal(str.count, 2); + assert_memory_equal(str.src, "bb", str.count); + next_obj_id = AMitemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 2); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); assert_int_equal(AMobjIdIndex(next_obj_id), 1); /* Third */ - next = AMmapItemsNext(&range_all, 1); + next = AMitemsNext(&range_all, 1); assert_non_null(next); - key = AMmapItemKey(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "3", key.count); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_int_equal(next_value.str.count, 2); - assert_memory_equal(next_value.str.src, "cc", next_value.str.count); - next_obj_id = AMmapItemObjId(next); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next, &str)); + assert_int_equal(str.count, 2); + assert_memory_equal(str.src, "cc", str.count); + next_obj_id = AMitemObjId(next); assert_int_equal(AMobjIdCounter(next_obj_id), 3); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); assert_int_equal(AMobjIdIndex(next_obj_id), 1); /* Forward stop */ - assert_null(AMmapItemsNext(&range_all, 1)); + assert_null(AMitemsNext(&range_all, 1)); /* Back, back, back. */ - range_back_all = AMmapItemsRewound(&range_back_all); + range_back_all = AMitemsRewound(&range_back_all); /* Third */ - next_back = AMmapItemsNext(&range_back_all, 1); + next_back = AMitemsNext(&range_back_all, 1); assert_non_null(next_back); - key = AMmapItemKey(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "3", key.count); - next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_int_equal(next_back_value.str.count, 2); - assert_memory_equal(next_back_value.str.src, "cc", next_back_value.str.count); - next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 2); + assert_memory_equal(str_back.src, "cc", str_back.count); + next_back_obj_id = AMitemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); /* Second */ - next_back = AMmapItemsNext(&range_back_all, 1); + next_back = AMitemsNext(&range_back_all, 1); assert_non_null(next_back); - key = AMmapItemKey(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "2", key.count); - next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_int_equal(next_back_value.str.count, 2); - assert_memory_equal(next_back_value.str.src, "bb", next_back_value.str.count); - next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 2); + assert_memory_equal(str_back.src, "bb", str_back.count); + next_back_obj_id = AMitemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); /* First */ - next_back = AMmapItemsNext(&range_back_all, 1); + next_back = AMitemsNext(&range_back_all, 1); assert_non_null(next_back); - key = AMmapItemKey(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); assert_int_equal(key.count, 1); assert_memory_equal(key.src, "1", key.count); - next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_int_equal(next_back_value.str.count, 2); - assert_memory_equal(next_back_value.str.src, "aa", next_back_value.str.count); - next_back_obj_id = AMmapItemObjId(next_back); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 2); + assert_memory_equal(str_back.src, "aa", str_back.count); + next_back_obj_id = AMitemObjId(next_back); assert_int_equal(AMobjIdCounter(next_back_obj_id), 1); assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); /* Back stop */ - assert_null(AMmapItemsNext(&range_back_all, 1)); + assert_null(AMitemsNext(&range_back_all, 1)); } static void test_get_range_values(void** state) { - AMresultStack* stack = *state; - AMdoc* const doc1 = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - AMfree(AMmapPutStr(doc1, AM_ROOT, AMstr("aa"), AMstr("aaa"))); - AMfree(AMmapPutStr(doc1, AM_ROOT, AMstr("bb"), AMstr("bbb"))); - AMfree(AMmapPutStr(doc1, AM_ROOT, AMstr("cc"), AMstr("ccc"))); - AMfree(AMmapPutStr(doc1, AM_ROOT, AMstr("dd"), AMstr("ddd"))); - AMfree(AMcommit(doc1, AMstr(NULL), NULL)); + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; + AMdoc* doc1; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc1)); + AMstackItem(NULL, AMmapPutStr(doc1, AM_ROOT, AMstr("aa"), AMstr("aaa")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutStr(doc1, AM_ROOT, AMstr("bb"), AMstr("bbb")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutStr(doc1, AM_ROOT, AMstr("cc"), AMstr("ccc")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutStr(doc1, AM_ROOT, AMstr("dd"), AMstr("ddd")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMcommit(doc1, AMstr(NULL), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - AMchangeHashes const v1 = AMpush(&stack, - AMgetHeads(doc1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - AMdoc* const doc2 = AMpush(&stack, AMfork(doc1, NULL), AM_VALUE_DOC, cmocka_cb).doc; + AMitems const v1 = AMstackItems(stack_ptr, AMgetHeads(doc1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + AMdoc* doc2; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMfork(doc1, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc2)); - AMfree(AMmapPutStr(doc1, AM_ROOT, AMstr("cc"), AMstr("ccc V2"))); - AMfree(AMcommit(doc1, AMstr(NULL), NULL)); + AMstackItem(NULL, AMmapPutStr(doc1, AM_ROOT, AMstr("cc"), AMstr("ccc V2")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMcommit(doc1, AMstr(NULL), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - AMfree(AMmapPutStr(doc2, AM_ROOT, AMstr("cc"), AMstr("ccc V3"))); - AMfree(AMcommit(doc2, AMstr(NULL), NULL)); + AMstackItem(NULL, AMmapPutStr(doc2, AM_ROOT, AMstr("cc"), AMstr("ccc V3")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMcommit(doc2, AMstr(NULL), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - AMfree(AMmerge(doc1, doc2)); + AMstackItem(NULL, AMmerge(doc1, doc2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - AMmapItems range = AMpush(&stack, - AMmapRange(doc1, AM_ROOT, AMstr("b"), AMstr("d"), NULL), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - AMmapItems range_back = AMmapItemsReversed(&range); - assert_int_equal(AMmapItemsSize(&range), 2); + /* Forward vs. reverse: complete current map range. */ + AMitems range = AMstackItems(stack_ptr, AMmapRange(doc1, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_STR)); + size_t size = AMitemsSize(&range); + assert_int_equal(size, 4); + AMitems range_back = AMitemsReversed(&range); + assert_int_equal(AMitemsSize(&range_back), size); + AMbyteSpan key; + assert_true(AMitemKey(AMitemsNext(&range, 1), &key)); + assert_memory_equal(key.src, "aa", key.count); + assert_true(AMitemKey(AMitemsNext(&range_back, 1), &key)); + assert_memory_equal(key.src, "dd", key.count); - AMmapItem const* map_item = NULL; - while ((map_item = AMmapItemsNext(&range, 1)) != NULL) { - AMvalue const val1 = AMmapItemValue(map_item); - AMresult* result = AMmapGet(doc1, AM_ROOT, AMmapItemKey(map_item), NULL); - AMvalue const val2 = AMresultValue(result); - assert_true(AMvalueEqual(&val1, &val2)); - assert_non_null(AMmapItemObjId(map_item)); - AMfree(result); + AMitem *item1, *item_back1; + size_t count, middle = size / 2; + range = AMitemsRewound(&range); + range_back = AMitemsRewound(&range_back); + for (item1 = NULL, item_back1 = NULL, count = 0; item1 && item_back1; + item1 = AMitemsNext(&range, 1), item_back1 = AMitemsNext(&range_back, 1), ++count) { + AMbyteSpan key1, key_back1; + assert_true(AMitemKey(item1, &key1)); + assert_true(AMitemKey(item_back1, &key_back1)); + if ((count == middle) && (middle & 1)) { + /* The iterators are crossing in the middle. */ + assert_int_equal(AMstrCmp(key1, key_back1), 0); + assert_true(AMitemEqual(item1, item_back1)); + assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item_back1))); + } else { + assert_int_not_equal(AMstrCmp(key1, key_back1), 0); + } + AMitem* item2 = AMstackItem(stack_ptr, AMmapGet(doc1, AM_ROOT, key1, NULL), NULL, NULL); + AMitem* item_back2 = AMstackItem(stack_ptr, AMmapGet(doc1, AM_ROOT, key_back1, NULL), NULL, NULL); + /** \note An item returned from an `AM...Get()` call doesn't include the + index used to retrieve it. */ + assert_false(AMitemIdxType(item2)); + assert_false(AMitemIdxType(item_back2)); + assert_true(AMitemEqual(item1, item2)); + assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item2))); + assert_true(AMitemEqual(item1, item2)); + assert_true(AMobjIdEqual(AMitemObjId(item_back1), AMitemObjId(item_back2))); + AMresultFree(AMstackPop(stack_ptr, NULL)); } - assert_int_equal(AMmapItemsSize(&range_back), 2); + /* Forward vs. reverse: partial current map range. */ + range = AMstackItems(stack_ptr, AMmapRange(doc1, AM_ROOT, AMstr("aa"), AMstr("dd"), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_STR)); + size = AMitemsSize(&range); + assert_int_equal(size, 3); + range_back = AMitemsReversed(&range); + assert_int_equal(AMitemsSize(&range_back), size); + assert_true(AMitemKey(AMitemsNext(&range, 1), &key)); + assert_memory_equal(key.src, "aa", key.count); + assert_true(AMitemKey(AMitemsNext(&range_back, 1), &key)); + assert_memory_equal(key.src, "cc", key.count); - while ((map_item = AMmapItemsNext(&range_back, 1)) != NULL) { - AMvalue const val1 = AMmapItemValue(map_item); - AMresult* result = AMmapGet(doc1, AM_ROOT, AMmapItemKey(map_item), NULL); - AMvalue const val2 = AMresultValue(result); - assert_true(AMvalueEqual(&val1, &val2)); - assert_non_null(AMmapItemObjId(map_item)); - AMfree(result); + middle = size / 2; + range = AMitemsRewound(&range); + range_back = AMitemsRewound(&range_back); + for (item1 = NULL, item_back1 = NULL, count = 0; item1 && item_back1; + item1 = AMitemsNext(&range, 1), item_back1 = AMitemsNext(&range_back, 1), ++count) { + AMbyteSpan key1, key_back1; + assert_true(AMitemKey(item1, &key1)); + assert_true(AMitemKey(item_back1, &key_back1)); + if ((count == middle) && (middle & 1)) { + /* The iterators are crossing in the middle. */ + assert_int_equal(AMstrCmp(key1, key_back1), 0); + assert_true(AMitemEqual(item1, item_back1)); + assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item_back1))); + } else { + assert_int_not_equal(AMstrCmp(key1, key_back1), 0); + } + AMitem* item2 = AMstackItem(stack_ptr, AMmapGet(doc1, AM_ROOT, key1, NULL), NULL, NULL); + AMitem* item_back2 = AMstackItem(stack_ptr, AMmapGet(doc1, AM_ROOT, key_back1, NULL), NULL, NULL); + /** \note An item returned from an `AM...Get()` call doesn't include the + index used to retrieve it. */ + assert_false(AMitemIdxType(item2)); + assert_false(AMitemIdxType(item_back2)); + assert_true(AMitemEqual(item1, item2)); + assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item2))); + assert_true(AMitemEqual(item_back1, item_back2)); + assert_true(AMobjIdEqual(AMitemObjId(item_back1), AMitemObjId(item_back2))); + AMresultFree(AMstackPop(stack_ptr, NULL)); } - range = AMpush(&stack, - AMmapRange(doc1, AM_ROOT, AMstr("b"), AMstr("d"), &v1), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - range_back = AMmapItemsReversed(&range); - assert_int_equal(AMmapItemsSize(&range), 2); + /* Forward vs. reverse: complete historical map range. */ + range = AMstackItems(stack_ptr, AMmapRange(doc1, AM_ROOT, AMstr(NULL), AMstr(NULL), &v1), cmocka_cb, + AMexpect(AM_VAL_TYPE_STR)); + size = AMitemsSize(&range); + assert_int_equal(size, 4); + range_back = AMitemsReversed(&range); + assert_int_equal(AMitemsSize(&range_back), size); + assert_true(AMitemKey(AMitemsNext(&range, 1), &key)); + assert_memory_equal(key.src, "aa", key.count); + assert_true(AMitemKey(AMitemsNext(&range_back, 1), &key)); + assert_memory_equal(key.src, "dd", key.count); - while ((map_item = AMmapItemsNext(&range, 1)) != NULL) { - AMvalue const val1 = AMmapItemValue(map_item); - AMresult* result = AMmapGet(doc1, AM_ROOT, AMmapItemKey(map_item), &v1); - AMvalue const val2 = AMresultValue(result); - assert_true(AMvalueEqual(&val1, &val2)); - assert_non_null(AMmapItemObjId(map_item)); - AMfree(result); + middle = size / 2; + range = AMitemsRewound(&range); + range_back = AMitemsRewound(&range_back); + for (item1 = NULL, item_back1 = NULL, count = 0; item1 && item_back1; + item1 = AMitemsNext(&range, 1), item_back1 = AMitemsNext(&range_back, 1), ++count) { + AMbyteSpan key1, key_back1; + assert_true(AMitemKey(item1, &key1)); + assert_true(AMitemKey(item_back1, &key_back1)); + if ((count == middle) && (middle & 1)) { + /* The iterators are crossing in the middle. */ + assert_int_equal(AMstrCmp(key1, key_back1), 0); + assert_true(AMitemEqual(item1, item_back1)); + assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item_back1))); + } else { + assert_int_not_equal(AMstrCmp(key1, key_back1), 0); + } + AMitem* item2 = AMstackItem(stack_ptr, AMmapGet(doc1, AM_ROOT, key1, &v1), NULL, NULL); + AMitem* item_back2 = AMstackItem(stack_ptr, AMmapGet(doc1, AM_ROOT, key_back1, &v1), NULL, NULL); + /** \note An item returned from an `AM...Get()` call doesn't include the + index used to retrieve it. */ + assert_false(AMitemIdxType(item2)); + assert_false(AMitemIdxType(item_back2)); + assert_true(AMitemEqual(item1, item2)); + assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item2))); + assert_true(AMitemEqual(item_back1, item_back2)); + assert_true(AMobjIdEqual(AMitemObjId(item_back1), AMitemObjId(item_back2))); + AMresultFree(AMstackPop(stack_ptr, NULL)); } - assert_int_equal(AMmapItemsSize(&range_back), 2); + /* Forward vs. reverse: partial historical map range. */ + range = AMstackItems(stack_ptr, AMmapRange(doc1, AM_ROOT, AMstr("bb"), AMstr(NULL), &v1), cmocka_cb, + AMexpect(AM_VAL_TYPE_STR)); + size = AMitemsSize(&range); + assert_int_equal(size, 3); + range_back = AMitemsReversed(&range); + assert_int_equal(AMitemsSize(&range_back), size); + assert_true(AMitemKey(AMitemsNext(&range, 1), &key)); + assert_memory_equal(key.src, "bb", key.count); + assert_true(AMitemKey(AMitemsNext(&range_back, 1), &key)); + assert_memory_equal(key.src, "dd", key.count); - while ((map_item = AMmapItemsNext(&range_back, 1)) != NULL) { - AMvalue const val1 = AMmapItemValue(map_item); - AMresult* result = AMmapGet(doc1, AM_ROOT, AMmapItemKey(map_item), &v1); - AMvalue const val2 = AMresultValue(result); - assert_true(AMvalueEqual(&val1, &val2)); - assert_non_null(AMmapItemObjId(map_item)); - AMfree(result); + middle = size / 2; + range = AMitemsRewound(&range); + range_back = AMitemsRewound(&range_back); + for (item1 = NULL, item_back1 = NULL, count = 0; item1 && item_back1; + item1 = AMitemsNext(&range, 1), item_back1 = AMitemsNext(&range_back, 1), ++count) { + AMbyteSpan key1, key_back1; + assert_true(AMitemKey(item1, &key1)); + assert_true(AMitemKey(item_back1, &key_back1)); + if ((count == middle) && (middle & 1)) { + /* The iterators are crossing in the middle. */ + assert_int_equal(AMstrCmp(key1, key_back1), 0); + assert_true(AMitemEqual(item1, item_back1)); + assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item_back1))); + } else { + assert_int_not_equal(AMstrCmp(key1, key_back1), 0); + } + AMitem* item2 = AMstackItem(stack_ptr, AMmapGet(doc1, AM_ROOT, key1, &v1), NULL, NULL); + AMitem* item_back2 = AMstackItem(stack_ptr, AMmapGet(doc1, AM_ROOT, key_back1, &v1), NULL, NULL); + /** \note An item returned from an `AM...Get()` call doesn't include the + index used to retrieve it. */ + assert_false(AMitemIdxType(item2)); + assert_false(AMitemIdxType(item_back2)); + assert_true(AMitemEqual(item1, item2)); + assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item2))); + assert_true(AMitemEqual(item_back1, item_back2)); + assert_true(AMobjIdEqual(AMitemObjId(item_back1), AMitemObjId(item_back2))); + AMresultFree(AMstackPop(stack_ptr, NULL)); } - range = AMpush(&stack, - AMmapRange(doc1, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - AMobjItems values = AMpush(&stack, - AMobjValues(doc1, AM_ROOT, NULL), - AM_VALUE_OBJ_ITEMS, - cmocka_cb).obj_items; - assert_int_equal(AMmapItemsSize(&range), AMobjItemsSize(&values)); - AMobjItem const* value = NULL; - while ((map_item = AMmapItemsNext(&range, 1)) != NULL && - (value = AMobjItemsNext(&values, 1)) != NULL) { - AMvalue const val1 = AMmapItemValue(map_item); - AMvalue const val2 = AMobjItemValue(value); - assert_true(AMvalueEqual(&val1, &val2)); - assert_true(AMobjIdEqual(AMmapItemObjId(map_item), AMobjItemObjId(value))); + /* Map range vs. object range: complete current. */ + range = AMstackItems(stack_ptr, AMmapRange(doc1, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_STR)); + AMitems obj_items = AMstackItems(stack_ptr, AMobjItems(doc1, AM_ROOT, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + assert_int_equal(AMitemsSize(&range), AMitemsSize(&obj_items)); + + AMitem *item, *obj_item; + for (item = NULL, obj_item = NULL; item && obj_item; + item = AMitemsNext(&range, 1), obj_item = AMitemsNext(&obj_items, 1)) { + /** \note Object iteration doesn't yield any item indices. */ + assert_true(AMitemIdxType(item)); + assert_false(AMitemIdxType(obj_item)); + assert_true(AMitemEqual(item, obj_item)); + assert_true(AMobjIdEqual(AMitemObjId(item), AMitemObjId(obj_item))); } - range = AMpush(&stack, - AMmapRange(doc1, AM_ROOT, AMstr(NULL), AMstr(NULL), &v1), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - values = AMpush(&stack, - AMobjValues(doc1, AM_ROOT, &v1), - AM_VALUE_OBJ_ITEMS, - cmocka_cb).obj_items; - assert_int_equal(AMmapItemsSize(&range), AMobjItemsSize(&values)); - while ((map_item = AMmapItemsNext(&range, 1)) != NULL && - (value = AMobjItemsNext(&values, 1)) != NULL) { - AMvalue const val1 = AMmapItemValue(map_item); - AMvalue const val2 = AMobjItemValue(value); - assert_true(AMvalueEqual(&val1, &val2)); - assert_true(AMobjIdEqual(AMmapItemObjId(map_item), AMobjItemObjId(value))); + /* Map range vs. object range: complete historical. */ + range = AMstackItems(stack_ptr, AMmapRange(doc1, AM_ROOT, AMstr(NULL), AMstr(NULL), &v1), cmocka_cb, + AMexpect(AM_VAL_TYPE_STR)); + obj_items = AMstackItems(stack_ptr, AMobjItems(doc1, AM_ROOT, &v1), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + assert_int_equal(AMitemsSize(&range), AMitemsSize(&obj_items)); + + for (item = NULL, obj_item = NULL; item && obj_item; + item = AMitemsNext(&range, 1), obj_item = AMitemsNext(&obj_items, 1)) { + /** \note Object iteration doesn't yield any item indices. */ + assert_true(AMitemIdxType(item)); + assert_false(AMitemIdxType(obj_item)); + assert_true(AMitemEqual(item, obj_item)); + assert_true(AMobjIdEqual(AMitemObjId(item), AMitemObjId(obj_item))); } } @@ -1418,19 +1565,18 @@ int run_map_tests(void) { cmocka_unit_test(test_AMmapPutObject(List)), cmocka_unit_test(test_AMmapPutObject(Map)), cmocka_unit_test(test_AMmapPutObject(Text)), - cmocka_unit_test(test_AMmapPutObject(Void)), cmocka_unit_test(test_AMmapPutStr), cmocka_unit_test(test_AMmapPut(Timestamp)), cmocka_unit_test(test_AMmapPut(Uint)), - cmocka_unit_test_setup_teardown(test_get_NUL_key, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_get_NUL_string_value, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_range_iter_map, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_map_range_back_and_forth_single, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_map_range_back_and_forth_double, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_map_range_at_back_and_forth_single, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_map_range_at_back_and_forth_double, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_get_range_values, setup_stack, teardown_stack), + cmocka_unit_test_setup_teardown(test_get_NUL_key, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_get_NUL_string_value, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_range_iter_map, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_map_range_back_and_forth_single, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_map_range_back_and_forth_double, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_map_range_at_back_and_forth_single, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_map_range_at_back_and_forth_double, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_get_range_values, setup_base, teardown_base), }; - return cmocka_run_group_tests(tests, group_setup, group_teardown); + return cmocka_run_group_tests(tests, setup_doc, teardown_doc); } diff --git a/rust/automerge-c/test/ported_wasm/basic_tests.c b/rust/automerge-c/test/ported_wasm/basic_tests.c index e2659d62..b83ff132 100644 --- a/rust/automerge-c/test/ported_wasm/basic_tests.c +++ b/rust/automerge-c/test/ported_wasm/basic_tests.c @@ -11,7 +11,10 @@ /* local */ #include -#include "../stack_utils.h" +#include +#include +#include "../base_state.h" +#include "../cmocka_utils.h" /** * \brief default import init() should return a promise @@ -22,163 +25,171 @@ static void test_default_import_init_should_return_a_promise(void** state); * \brief should create, clone and free */ static void test_create_clone_and_free(void** state) { - AMresultStack* stack = *state; + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; /* const doc1 = create() */ - AMdoc* const doc1 = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; + AMdoc* doc1; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc1)); /* const doc2 = doc1.clone() */ - AMdoc* const doc2 = AMpush(&stack, AMclone(doc1), AM_VALUE_DOC, cmocka_cb).doc; + AMdoc* doc2; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMclone(doc1), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc2)); } /** * \brief should be able to start and commit */ static void test_start_and_commit(void** state) { - AMresultStack* stack = *state; + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; /* const doc = create() */ - AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; + AMdoc* doc; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); /* doc.commit() */ - AMpush(&stack, AMemptyChange(doc, AMstr(NULL), NULL), AM_VALUE_CHANGE_HASHES, cmocka_cb); + AMstackItems(stack_ptr, AMemptyChange(doc, AMstr(NULL), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); } /** * \brief getting a nonexistent prop does not throw an error */ static void test_getting_a_nonexistent_prop_does_not_throw_an_error(void** state) { - AMresultStack* stack = *state; + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; /* const doc = create() */ - AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; + AMdoc* doc; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); /* const root = "_root" */ /* const result = doc.getWithType(root, "hello") */ /* assert.deepEqual(result, undefined) */ - AMpush(&stack, - AMmapGet(doc, AM_ROOT, AMstr("hello"), NULL), - AM_VALUE_VOID, - cmocka_cb); + AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("hello"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); } /** * \brief should be able to set and get a simple value */ static void test_should_be_able_to_set_and_get_a_simple_value(void** state) { - AMresultStack* stack = *state; + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; /* const doc: Automerge = create("aabbcc") */ - AMdoc* const doc = AMpush(&stack, - AMcreate(AMpush(&stack, - AMactorIdInitStr(AMstr("aabbcc")), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id), - AM_VALUE_DOC, - cmocka_cb).doc; + AMactorId const* actor_id; + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("aabbcc")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), + &actor_id)); + AMdoc* doc; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); /* const root = "_root" */ /* let result */ /* */ /* doc.put(root, "hello", "world") */ - AMfree(AMmapPutStr(doc, AM_ROOT, AMstr("hello"), AMstr("world"))); + AMstackItem(NULL, AMmapPutStr(doc, AM_ROOT, AMstr("hello"), AMstr("world")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* doc.put(root, "number1", 5, "uint") */ - AMfree(AMmapPutUint(doc, AM_ROOT, AMstr("number1"), 5)); + AMstackItem(NULL, AMmapPutUint(doc, AM_ROOT, AMstr("number1"), 5), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* doc.put(root, "number2", 5) */ - AMfree(AMmapPutInt(doc, AM_ROOT, AMstr("number2"), 5)); + AMstackItem(NULL, AMmapPutInt(doc, AM_ROOT, AMstr("number2"), 5), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* doc.put(root, "number3", 5.5) */ - AMfree(AMmapPutF64(doc, AM_ROOT, AMstr("number3"), 5.5)); + AMstackItem(NULL, AMmapPutF64(doc, AM_ROOT, AMstr("number3"), 5.5), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* doc.put(root, "number4", 5.5, "f64") */ - AMfree(AMmapPutF64(doc, AM_ROOT, AMstr("number4"), 5.5)); + AMstackItem(NULL, AMmapPutF64(doc, AM_ROOT, AMstr("number4"), 5.5), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* doc.put(root, "number5", 5.5, "int") */ - AMfree(AMmapPutInt(doc, AM_ROOT, AMstr("number5"), 5.5)); + AMstackItem(NULL, AMmapPutInt(doc, AM_ROOT, AMstr("number5"), 5.5), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* doc.put(root, "bool", true) */ - AMfree(AMmapPutBool(doc, AM_ROOT, AMstr("bool"), true)); + AMstackItem(NULL, AMmapPutBool(doc, AM_ROOT, AMstr("bool"), true), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* doc.put(root, "time1", 1000, "timestamp") */ - AMfree(AMmapPutTimestamp(doc, AM_ROOT, AMstr("time1"), 1000)); + AMstackItem(NULL, AMmapPutTimestamp(doc, AM_ROOT, AMstr("time1"), 1000), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* doc.put(root, "time2", new Date(1001)) */ - AMfree(AMmapPutTimestamp(doc, AM_ROOT, AMstr("time2"), 1001)); + AMstackItem(NULL, AMmapPutTimestamp(doc, AM_ROOT, AMstr("time2"), 1001), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* doc.putObject(root, "list", []); */ - AMfree(AMmapPutObject(doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST)); + AMstackItem(NULL, AMmapPutObject(doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE)); /* doc.put(root, "null", null) */ - AMfree(AMmapPutNull(doc, AM_ROOT, AMstr("null"))); + AMstackItem(NULL, AMmapPutNull(doc, AM_ROOT, AMstr("null")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* */ /* result = doc.getWithType(root, "hello") */ /* assert.deepEqual(result, ["str", "world"]) */ /* assert.deepEqual(doc.get("/", "hello"), "world") */ - AMbyteSpan str = AMpush(&stack, - AMmapGet(doc, AM_ROOT, AMstr("hello"), NULL), - AM_VALUE_STR, - cmocka_cb).str; + AMbyteSpan str; + assert_true(AMitemToStr( + AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("hello"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), + &str)); assert_int_equal(str.count, strlen("world")); assert_memory_equal(str.src, "world", str.count); /* assert.deepEqual(doc.get("/", "hello"), "world") */ /* */ /* result = doc.getWithType(root, "number1") */ /* assert.deepEqual(result, ["uint", 5]) */ - assert_int_equal(AMpush(&stack, - AMmapGet(doc, AM_ROOT, AMstr("number1"), NULL), - AM_VALUE_UINT, - cmocka_cb).uint, 5); + uint64_t uint; + assert_true(AMitemToUint( + AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("number1"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_UINT)), + &uint)); + assert_int_equal(uint, 5); /* assert.deepEqual(doc.get("/", "number1"), 5) */ /* */ /* result = doc.getWithType(root, "number2") */ /* assert.deepEqual(result, ["int", 5]) */ - assert_int_equal(AMpush(&stack, - AMmapGet(doc, AM_ROOT, AMstr("number2"), NULL), - AM_VALUE_INT, - cmocka_cb).int_, 5); + int64_t int_; + assert_true(AMitemToInt( + AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("number2"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_INT)), + &int_)); + assert_int_equal(int_, 5); /* */ /* result = doc.getWithType(root, "number3") */ /* assert.deepEqual(result, ["f64", 5.5]) */ - assert_float_equal(AMpush(&stack, - AMmapGet(doc, AM_ROOT, AMstr("number3"), NULL), - AM_VALUE_F64, - cmocka_cb).f64, 5.5, DBL_EPSILON); + double f64; + assert_true(AMitemToF64( + AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("number3"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_F64)), + &f64)); + assert_float_equal(f64, 5.5, DBL_EPSILON); /* */ /* result = doc.getWithType(root, "number4") */ /* assert.deepEqual(result, ["f64", 5.5]) */ - assert_float_equal(AMpush(&stack, - AMmapGet(doc, AM_ROOT, AMstr("number4"), NULL), - AM_VALUE_F64, - cmocka_cb).f64, 5.5, DBL_EPSILON); + assert_true(AMitemToF64( + AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("number4"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_F64)), + &f64)); + assert_float_equal(f64, 5.5, DBL_EPSILON); /* */ /* result = doc.getWithType(root, "number5") */ /* assert.deepEqual(result, ["int", 5]) */ - assert_int_equal(AMpush(&stack, - AMmapGet(doc, AM_ROOT, AMstr("number5"), NULL), - AM_VALUE_INT, - cmocka_cb).int_, 5); + assert_true(AMitemToInt( + AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("number5"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_INT)), + &int_)); + assert_int_equal(int_, 5); /* */ /* result = doc.getWithType(root, "bool") */ /* assert.deepEqual(result, ["boolean", true]) */ - assert_int_equal(AMpush(&stack, - AMmapGet(doc, AM_ROOT, AMstr("bool"), NULL), - AM_VALUE_BOOLEAN, - cmocka_cb).boolean, true); + bool boolean; + assert_true(AMitemToBool( + AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("bool"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_BOOL)), + &boolean)); + assert_true(boolean); /* */ /* doc.put(root, "bool", false, "boolean") */ - AMfree(AMmapPutBool(doc, AM_ROOT, AMstr("bool"), false)); + AMstackItem(NULL, AMmapPutBool(doc, AM_ROOT, AMstr("bool"), false), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* */ /* result = doc.getWithType(root, "bool") */ /* assert.deepEqual(result, ["boolean", false]) */ - assert_int_equal(AMpush(&stack, - AMmapGet(doc, AM_ROOT, AMstr("bool"), NULL), - AM_VALUE_BOOLEAN, - cmocka_cb).boolean, false); + assert_true(AMitemToBool( + AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("bool"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_BOOL)), + &boolean)); + assert_false(boolean); /* */ /* result = doc.getWithType(root, "time1") */ /* assert.deepEqual(result, ["timestamp", new Date(1000)]) */ - assert_int_equal(AMpush(&stack, - AMmapGet(doc, AM_ROOT, AMstr("time1"), NULL), - AM_VALUE_TIMESTAMP, - cmocka_cb).timestamp, 1000); + int64_t timestamp; + assert_true(AMitemToTimestamp(AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("time1"), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_TIMESTAMP)), + ×tamp)); + assert_int_equal(timestamp, 1000); /* */ /* result = doc.getWithType(root, "time2") */ /* assert.deepEqual(result, ["timestamp", new Date(1001)]) */ - assert_int_equal(AMpush(&stack, - AMmapGet(doc, AM_ROOT, AMstr("time2"), NULL), - AM_VALUE_TIMESTAMP, - cmocka_cb).timestamp, 1001); + assert_true(AMitemToTimestamp(AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("time2"), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_TIMESTAMP)), + ×tamp)); + assert_int_equal(timestamp, 1001); /* */ /* result = doc.getWithType(root, "list") */ /* assert.deepEqual(result, ["list", "10@aabbcc"]); */ - AMobjId const* const list = AMpush(&stack, - AMmapGet(doc, AM_ROOT, AMstr("list"), NULL), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; + AMobjId const* const list = AMitemObjId( + AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("list"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); assert_int_equal(AMobjIdCounter(list), 10); str = AMactorIdStr(AMobjIdActorId(list)); assert_int_equal(str.count, strlen("aabbcc")); @@ -186,38 +197,39 @@ static void test_should_be_able_to_set_and_get_a_simple_value(void** state) { /* */ /* result = doc.getWithType(root, "null") */ /* assert.deepEqual(result, ["null", null]); */ - AMpush(&stack, - AMmapGet(doc, AM_ROOT, AMstr("null"), NULL), - AM_VALUE_NULL, - cmocka_cb); + AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("null"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_NULL)); } /** * \brief should be able to use bytes */ static void test_should_be_able_to_use_bytes(void** state) { - AMresultStack* stack = *state; + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; /* const doc = create() */ - AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; + AMdoc* doc; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); /* doc.put("_root", "data1", new Uint8Array([10, 11, 12])); */ static uint8_t const DATA1[] = {10, 11, 12}; - AMfree(AMmapPutBytes(doc, AM_ROOT, AMstr("data1"), AMbytes(DATA1, sizeof(DATA1)))); + AMstackItem(NULL, AMmapPutBytes(doc, AM_ROOT, AMstr("data1"), AMbytes(DATA1, sizeof(DATA1))), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); /* doc.put("_root", "data2", new Uint8Array([13, 14, 15]), "bytes"); */ static uint8_t const DATA2[] = {13, 14, 15}; - AMfree(AMmapPutBytes(doc, AM_ROOT, AMstr("data2"), AMbytes(DATA2, sizeof(DATA2)))); + AMstackItem(NULL, AMmapPutBytes(doc, AM_ROOT, AMstr("data2"), AMbytes(DATA2, sizeof(DATA2))), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); /* const value1 = doc.getWithType("_root", "data1") */ - AMbyteSpan const value1 = AMpush(&stack, - AMmapGet(doc, AM_ROOT, AMstr("data1"), NULL), - AM_VALUE_BYTES, - cmocka_cb).bytes; + AMbyteSpan value1; + assert_true(AMitemToBytes( + AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("data1"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), + &value1)); /* assert.deepEqual(value1, ["bytes", new Uint8Array([10, 11, 12])]); */ assert_int_equal(value1.count, sizeof(DATA1)); assert_memory_equal(value1.src, DATA1, sizeof(DATA1)); /* const value2 = doc.getWithType("_root", "data2") */ - AMbyteSpan const value2 = AMpush(&stack, - AMmapGet(doc, AM_ROOT, AMstr("data2"), NULL), - AM_VALUE_BYTES, - cmocka_cb).bytes; + AMbyteSpan value2; + assert_true(AMitemToBytes( + AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("data2"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), + &value2)); /* assert.deepEqual(value2, ["bytes", new Uint8Array([13, 14, 15])]); */ assert_int_equal(value2.count, sizeof(DATA2)); assert_memory_equal(value2.src, DATA2, sizeof(DATA2)); @@ -227,103 +239,92 @@ static void test_should_be_able_to_use_bytes(void** state) { * \brief should be able to make subobjects */ static void test_should_be_able_to_make_subobjects(void** state) { - AMresultStack* stack = *state; + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; /* const doc = create() */ - AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; + AMdoc* doc; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); /* const root = "_root" */ /* let result */ /* */ /* const submap = doc.putObject(root, "submap", {}) */ - AMobjId const* const submap = AMpush( - &stack, - AMmapPutObject(doc, AM_ROOT, AMstr("submap"), AM_OBJ_TYPE_MAP), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; + AMobjId const* const submap = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc, AM_ROOT, AMstr("submap"), AM_OBJ_TYPE_MAP), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE))); /* doc.put(submap, "number", 6, "uint") */ - AMfree(AMmapPutUint(doc, submap, AMstr("number"), 6)); + AMstackItem(NULL, AMmapPutUint(doc, submap, AMstr("number"), 6), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* assert.strictEqual(doc.pendingOps(), 2) */ assert_int_equal(AMpendingOps(doc), 2); /* */ /* result = doc.getWithType(root, "submap") */ /* assert.deepEqual(result, ["map", submap]) */ - assert_true(AMobjIdEqual(AMpush(&stack, - AMmapGet(doc, AM_ROOT, AMstr("submap"), NULL), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id, + assert_true(AMobjIdEqual(AMitemObjId(AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("submap"), NULL), + cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))), submap)); /* */ /* result = doc.getWithType(submap, "number") */ /* assert.deepEqual(result, ["uint", 6]) */ - assert_int_equal(AMpush(&stack, - AMmapGet(doc, submap, AMstr("number"), NULL), - AM_VALUE_UINT, - cmocka_cb).uint, - 6); + uint64_t uint; + assert_true(AMitemToUint( + AMstackItem(stack_ptr, AMmapGet(doc, submap, AMstr("number"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_UINT)), + &uint)); + assert_int_equal(uint, 6); } /** * \brief should be able to make lists */ static void test_should_be_able_to_make_lists(void** state) { - AMresultStack* stack = *state; + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; /* const doc = create() */ - AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; + AMdoc* doc; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); /* const root = "_root" */ /* */ /* const sublist = doc.putObject(root, "numbers", []) */ - AMobjId const* const sublist = AMpush( - &stack, - AMmapPutObject(doc, AM_ROOT, AMstr("numbers"), AM_OBJ_TYPE_LIST), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; + AMobjId const* const sublist = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc, AM_ROOT, AMstr("numbers"), AM_OBJ_TYPE_LIST), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE))); /* doc.insert(sublist, 0, "a"); */ - AMfree(AMlistPutStr(doc, sublist, 0, true, AMstr("a"))); + AMstackItem(NULL, AMlistPutStr(doc, sublist, 0, true, AMstr("a")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* doc.insert(sublist, 1, "b"); */ - AMfree(AMlistPutStr(doc, sublist, 1, true, AMstr("b"))); + AMstackItem(NULL, AMlistPutStr(doc, sublist, 1, true, AMstr("b")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* doc.insert(sublist, 2, "c"); */ - AMfree(AMlistPutStr(doc, sublist, 2, true, AMstr("c"))); + AMstackItem(NULL, AMlistPutStr(doc, sublist, 2, true, AMstr("c")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* doc.insert(sublist, 0, "z"); */ - AMfree(AMlistPutStr(doc, sublist, 0, true, AMstr("z"))); + AMstackItem(NULL, AMlistPutStr(doc, sublist, 0, true, AMstr("z")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* */ /* assert.deepEqual(doc.getWithType(sublist, 0), ["str", "z"]) */ - AMbyteSpan str = AMpush(&stack, - AMlistGet(doc, sublist, 0, NULL), - AM_VALUE_STR, - cmocka_cb).str; + AMbyteSpan str; + assert_true(AMitemToStr( + AMstackItem(stack_ptr, AMlistGet(doc, sublist, 0, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); assert_int_equal(str.count, 1); assert_memory_equal(str.src, "z", str.count); /* assert.deepEqual(doc.getWithType(sublist, 1), ["str", "a"]) */ - str = AMpush(&stack, - AMlistGet(doc, sublist, 1, NULL), - AM_VALUE_STR, - cmocka_cb).str; + assert_true(AMitemToStr( + AMstackItem(stack_ptr, AMlistGet(doc, sublist, 1, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); assert_int_equal(str.count, 1); assert_memory_equal(str.src, "a", str.count); /* assert.deepEqual(doc.getWithType(sublist, 2), ["str", "b"]) */ - str = AMpush(&stack, - AMlistGet(doc, sublist, 2, NULL), - AM_VALUE_STR, - cmocka_cb).str; + assert_true(AMitemToStr( + AMstackItem(stack_ptr, AMlistGet(doc, sublist, 2, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); assert_int_equal(str.count, 1); assert_memory_equal(str.src, "b", str.count); /* assert.deepEqual(doc.getWithType(sublist, 3), ["str", "c"]) */ - str = AMpush(&stack, - AMlistGet(doc, sublist, 3, NULL), - AM_VALUE_STR, - cmocka_cb).str; + assert_true(AMitemToStr( + AMstackItem(stack_ptr, AMlistGet(doc, sublist, 3, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); assert_int_equal(str.count, 1); assert_memory_equal(str.src, "c", str.count); /* assert.deepEqual(doc.length(sublist), 4) */ assert_int_equal(AMobjSize(doc, sublist, NULL), 4); /* */ /* doc.put(sublist, 2, "b v2"); */ - AMfree(AMlistPutStr(doc, sublist, 2, false, AMstr("b v2"))); + AMstackItem(NULL, AMlistPutStr(doc, sublist, 2, false, AMstr("b v2")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* */ /* assert.deepEqual(doc.getWithType(sublist, 2), ["str", "b v2"]) */ - str = AMpush(&stack, - AMlistGet(doc, sublist, 2, NULL), - AM_VALUE_STR, - cmocka_cb).str; + assert_true(AMitemToStr( + AMstackItem(stack_ptr, AMlistGet(doc, sublist, 2, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); assert_int_equal(str.count, 4); assert_memory_equal(str.src, "b v2", str.count); /* assert.deepEqual(doc.length(sublist), 4) */ @@ -334,233 +335,217 @@ static void test_should_be_able_to_make_lists(void** state) { * \brief lists have insert, set, splice, and push ops */ static void test_lists_have_insert_set_splice_and_push_ops(void** state) { - AMresultStack* stack = *state; + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; /* const doc = create() */ - AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; + AMdoc* doc; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); /* const root = "_root" */ /* */ /* const sublist = doc.putObject(root, "letters", []) */ - AMobjId const* const sublist = AMpush( - &stack, - AMmapPutObject(doc, AM_ROOT, AMstr("letters"), AM_OBJ_TYPE_LIST), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; + AMobjId const* const sublist = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc, AM_ROOT, AMstr("letters"), AM_OBJ_TYPE_LIST), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE))); /* doc.insert(sublist, 0, "a"); */ - AMfree(AMlistPutStr(doc, sublist, 0, true, AMstr("a"))); + AMstackItem(NULL, AMlistPutStr(doc, sublist, 0, true, AMstr("a")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* doc.insert(sublist, 0, "b"); */ - AMfree(AMlistPutStr(doc, sublist, 0, true, AMstr("b"))); + AMstackItem(NULL, AMlistPutStr(doc, sublist, 0, true, AMstr("b")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* assert.deepEqual(doc.materialize(), { letters: ["b", "a"] }) */ - AMmapItems doc_items = AMpush(&stack, - AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - AMmapItem const* doc_item = AMmapItemsNext(&doc_items, 1); - AMbyteSpan key = AMmapItemKey(doc_item); + AMitem* doc_item = AMstackItem(stack_ptr, AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE)); + assert_int_equal(AMitemIdxType(doc_item), AM_IDX_TYPE_KEY); + AMbyteSpan key; + assert_true(AMitemKey(doc_item, &key)); assert_int_equal(key.count, strlen("letters")); assert_memory_equal(key.src, "letters", key.count); { - AMlistItems list_items = AMpush( - &stack, - AMlistRange(doc, AMmapItemObjId(doc_item), 0, SIZE_MAX, NULL), - AM_VALUE_LIST_ITEMS, - cmocka_cb).list_items; - AMbyteSpan str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + AMitems list_items = AMstackItems(stack_ptr, AMlistRange(doc, AMitemObjId(doc_item), 0, SIZE_MAX, NULL), + cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + assert_int_equal(AMitemsSize(&list_items), 2); + AMbyteSpan str; + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); assert_int_equal(str.count, 1); assert_memory_equal(str.src, "b", str.count); - str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); assert_int_equal(str.count, 1); assert_memory_equal(str.src, "a", str.count); - assert_null(AMlistItemsNext(&list_items, 1)); + assert_null(AMitemsNext(&list_items, 1)); } /* doc.push(sublist, "c"); */ - AMfree(AMlistPutStr(doc, sublist, SIZE_MAX, true, AMstr("c"))); + AMstackItem(NULL, AMlistPutStr(doc, sublist, SIZE_MAX, true, AMstr("c")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* const heads = doc.getHeads() */ - AMchangeHashes const heads = AMpush(&stack, - AMgetHeads(doc), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; + AMitems const heads = AMstackItems(stack_ptr, AMgetHeads(doc), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* assert.deepEqual(doc.materialize(), { letters: ["b", "a", "c"] }) */ - doc_items = AMpush(&stack, - AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - doc_item = AMmapItemsNext(&doc_items, 1); - key = AMmapItemKey(doc_item); + doc_item = AMstackItem(stack_ptr, AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE)); + assert_int_equal(AMitemIdxType(doc_item), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(doc_item, &key)); assert_int_equal(key.count, strlen("letters")); assert_memory_equal(key.src, "letters", key.count); { - AMlistItems list_items = AMpush( - &stack, - AMlistRange(doc, AMmapItemObjId(doc_item), 0, SIZE_MAX, NULL), - AM_VALUE_LIST_ITEMS, - cmocka_cb).list_items; - AMbyteSpan str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + AMitems list_items = AMstackItems(stack_ptr, AMlistRange(doc, AMitemObjId(doc_item), 0, SIZE_MAX, NULL), + cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + assert_int_equal(AMitemsSize(&list_items), 3); + AMbyteSpan str; + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); assert_int_equal(str.count, 1); assert_memory_equal(str.src, "b", str.count); - str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); assert_int_equal(str.count, 1); assert_memory_equal(str.src, "a", str.count); - str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); assert_int_equal(str.count, 1); assert_memory_equal(str.src, "c", str.count); - assert_null(AMlistItemsNext(&list_items, 1)); + assert_null(AMitemsNext(&list_items, 1)); } /* doc.push(sublist, 3, "timestamp"); */ - AMfree(AMlistPutTimestamp(doc, sublist, SIZE_MAX, true, 3)); - /* assert.deepEqual(doc.materialize(), { letters: ["b", "a", "c", new Date(3)] } */ - doc_items = AMpush(&stack, - AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - doc_item = AMmapItemsNext(&doc_items, 1); - key = AMmapItemKey(doc_item); + AMstackItem(NULL, AMlistPutTimestamp(doc, sublist, SIZE_MAX, true, 3), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* assert.deepEqual(doc.materialize(), { letters: ["b", "a", "c", new + * Date(3)] } */ + doc_item = AMstackItem(stack_ptr, AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE)); + assert_int_equal(AMitemIdxType(doc_item), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(doc_item, &key)); assert_int_equal(key.count, strlen("letters")); assert_memory_equal(key.src, "letters", key.count); { - AMlistItems list_items = AMpush( - &stack, - AMlistRange(doc, AMmapItemObjId(doc_item), 0, SIZE_MAX, NULL), - AM_VALUE_LIST_ITEMS, - cmocka_cb).list_items; - AMbyteSpan str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + AMitems list_items = AMstackItems(stack_ptr, AMlistRange(doc, AMitemObjId(doc_item), 0, SIZE_MAX, NULL), + cmocka_cb, AMexpect(AM_VAL_TYPE_STR | AM_VAL_TYPE_TIMESTAMP)); + assert_int_equal(AMitemsSize(&list_items), 4); + AMbyteSpan str; + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); assert_int_equal(str.count, 1); assert_memory_equal(str.src, "b", str.count); - str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); assert_int_equal(str.count, 1); assert_memory_equal(str.src, "a", str.count); - str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); assert_int_equal(str.count, 1); assert_memory_equal(str.src, "c", str.count); - assert_int_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).timestamp, - 3); - assert_null(AMlistItemsNext(&list_items, 1)); + int64_t timestamp; + assert_true(AMitemToTimestamp(AMitemsNext(&list_items, 1), ×tamp)); + assert_int_equal(timestamp, 3); + assert_null(AMitemsNext(&list_items, 1)); } /* doc.splice(sublist, 1, 1, ["d", "e", "f"]); */ - static AMvalue const DATA[] = {{.str_tag = AM_VALUE_STR, .str = {.src = "d", .count = 1}}, - {.str_tag = AM_VALUE_STR, .str = {.src = "e", .count = 1}}, - {.str_tag = AM_VALUE_STR, .str = {.src = "f", .count = 1}}}; - AMfree(AMsplice(doc, sublist, 1, 1, DATA, sizeof(DATA)/sizeof(AMvalue))); - /* assert.deepEqual(doc.materialize(), { letters: ["b", "d", "e", "f", "c", new Date(3)] } */ - doc_items = AMpush(&stack, - AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - doc_item = AMmapItemsNext(&doc_items, 1); - key = AMmapItemKey(doc_item); + AMresult* data = AMstackResult( + stack_ptr, AMresultFrom(3, AMitemFromStr(AMstr("d")), AMitemFromStr(AMstr("e")), AMitemFromStr(AMstr("f"))), + NULL, NULL); + AMstackItem(NULL, AMsplice(doc, sublist, 1, 1, AMresultItems(data)), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* assert.deepEqual(doc.materialize(), { letters: ["b", "d", "e", "f", "c", + * new Date(3)] } */ + doc_item = AMstackItem(stack_ptr, AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE)); + assert_int_equal(AMitemIdxType(doc_item), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(doc_item, &key)); assert_int_equal(key.count, strlen("letters")); assert_memory_equal(key.src, "letters", key.count); { - AMlistItems list_items = AMpush( - &stack, - AMlistRange(doc, AMmapItemObjId(doc_item), 0, SIZE_MAX, NULL), - AM_VALUE_LIST_ITEMS, - cmocka_cb).list_items; - AMbyteSpan str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + AMitems list_items = AMstackItems(stack_ptr, AMlistRange(doc, AMitemObjId(doc_item), 0, SIZE_MAX, NULL), + cmocka_cb, AMexpect(AM_VAL_TYPE_STR | AM_VAL_TYPE_TIMESTAMP)); + AMbyteSpan str; + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); assert_int_equal(str.count, 1); assert_memory_equal(str.src, "b", str.count); - str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); assert_int_equal(str.count, 1); assert_memory_equal(str.src, "d", str.count); - str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); assert_int_equal(str.count, 1); assert_memory_equal(str.src, "e", str.count); - str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); assert_int_equal(str.count, 1); assert_memory_equal(str.src, "f", str.count); - str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); assert_int_equal(str.count, 1); assert_memory_equal(str.src, "c", str.count); - assert_int_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).timestamp, - 3); - assert_null(AMlistItemsNext(&list_items, 1)); + int64_t timestamp; + assert_true(AMitemToTimestamp(AMitemsNext(&list_items, 1), ×tamp)); + assert_int_equal(timestamp, 3); + assert_null(AMitemsNext(&list_items, 1)); } /* doc.put(sublist, 0, "z"); */ - AMfree(AMlistPutStr(doc, sublist, 0, false, AMstr("z"))); - /* assert.deepEqual(doc.materialize(), { letters: ["z", "d", "e", "f", "c", new Date(3)] } */ - doc_items = AMpush(&stack, - AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - doc_item = AMmapItemsNext(&doc_items, 1); - key = AMmapItemKey(doc_item); + AMstackItem(NULL, AMlistPutStr(doc, sublist, 0, false, AMstr("z")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* assert.deepEqual(doc.materialize(), { letters: ["z", "d", "e", "f", "c", + * new Date(3)] } */ + doc_item = AMstackItem(stack_ptr, AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE)); + assert_int_equal(AMitemIdxType(doc_item), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(doc_item, &key)); assert_int_equal(key.count, strlen("letters")); assert_memory_equal(key.src, "letters", key.count); { - AMlistItems list_items = AMpush( - &stack, - AMlistRange(doc, AMmapItemObjId(doc_item), 0, SIZE_MAX, NULL), - AM_VALUE_LIST_ITEMS, - cmocka_cb).list_items; - AMbyteSpan str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + AMitems list_items = AMstackItems(stack_ptr, AMlistRange(doc, AMitemObjId(doc_item), 0, SIZE_MAX, NULL), + cmocka_cb, AMexpect(AM_VAL_TYPE_STR | AM_VAL_TYPE_TIMESTAMP)); + AMbyteSpan str; + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); assert_int_equal(str.count, 1); assert_memory_equal(str.src, "z", str.count); - str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); assert_int_equal(str.count, 1); assert_memory_equal(str.src, "d", str.count); - str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); assert_int_equal(str.count, 1); assert_memory_equal(str.src, "e", str.count); - str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); assert_int_equal(str.count, 1); assert_memory_equal(str.src, "f", str.count); - str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); assert_int_equal(str.count, 1); assert_memory_equal(str.src, "c", str.count); - assert_int_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).timestamp, - 3); - assert_null(AMlistItemsNext(&list_items, 1)); + int64_t timestamp; + assert_true(AMitemToTimestamp(AMitemsNext(&list_items, 1), ×tamp)); + assert_int_equal(timestamp, 3); + assert_null(AMitemsNext(&list_items, 1)); } - /* assert.deepEqual(doc.materialize(sublist), ["z", "d", "e", "f", "c", new Date(3)] */ - AMlistItems sublist_items = AMpush( - &stack, - AMlistRange(doc, sublist, 0, SIZE_MAX, NULL), - AM_VALUE_LIST_ITEMS, - cmocka_cb).list_items; - AMbyteSpan str = AMlistItemValue(AMlistItemsNext(&sublist_items, 1)).str; + /* assert.deepEqual(doc.materialize(sublist), ["z", "d", "e", "f", "c", new + * Date(3)] */ + AMitems sublist_items = AMstackItems(stack_ptr, AMlistRange(doc, sublist, 0, SIZE_MAX, NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_STR | AM_VAL_TYPE_TIMESTAMP)); + AMbyteSpan str; + assert_true(AMitemToStr(AMitemsNext(&sublist_items, 1), &str)); assert_int_equal(str.count, 1); assert_memory_equal(str.src, "z", str.count); - str = AMlistItemValue(AMlistItemsNext(&sublist_items, 1)).str; + assert_true(AMitemToStr(AMitemsNext(&sublist_items, 1), &str)); assert_int_equal(str.count, 1); assert_memory_equal(str.src, "d", str.count); - str = AMlistItemValue(AMlistItemsNext(&sublist_items, 1)).str; + assert_true(AMitemToStr(AMitemsNext(&sublist_items, 1), &str)); assert_int_equal(str.count, 1); assert_memory_equal(str.src, "e", str.count); - str = AMlistItemValue(AMlistItemsNext(&sublist_items, 1)).str; + assert_true(AMitemToStr(AMitemsNext(&sublist_items, 1), &str)); assert_int_equal(str.count, 1); assert_memory_equal(str.src, "f", str.count); - str = AMlistItemValue(AMlistItemsNext(&sublist_items, 1)).str; + assert_true(AMitemToStr(AMitemsNext(&sublist_items, 1), &str)); assert_int_equal(str.count, 1); assert_memory_equal(str.src, "c", str.count); - assert_int_equal(AMlistItemValue(AMlistItemsNext(&sublist_items, 1)).timestamp, - 3); - assert_null(AMlistItemsNext(&sublist_items, 1)); + int64_t timestamp; + assert_true(AMitemToTimestamp(AMitemsNext(&sublist_items, 1), ×tamp)); + assert_int_equal(timestamp, 3); + assert_null(AMitemsNext(&sublist_items, 1)); /* assert.deepEqual(doc.length(sublist), 6) */ assert_int_equal(AMobjSize(doc, sublist, NULL), 6); - /* assert.deepEqual(doc.materialize("/", heads), { letters: ["b", "a", "c"] } */ - doc_items = AMpush(&stack, - AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), &heads), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - doc_item = AMmapItemsNext(&doc_items, 1); - key = AMmapItemKey(doc_item); + /* assert.deepEqual(doc.materialize("/", heads), { letters: ["b", "a", "c"] + * } */ + doc_item = AMstackItem(stack_ptr, AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), &heads), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE)); + assert_int_equal(AMitemIdxType(doc_item), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(doc_item, &key)); assert_int_equal(key.count, strlen("letters")); assert_memory_equal(key.src, "letters", key.count); { - AMlistItems list_items = AMpush( - &stack, - AMlistRange(doc, AMmapItemObjId(doc_item), 0, SIZE_MAX, &heads), - AM_VALUE_LIST_ITEMS, - cmocka_cb).list_items; - AMbyteSpan str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + AMitems list_items = AMstackItems(stack_ptr, AMlistRange(doc, AMitemObjId(doc_item), 0, SIZE_MAX, &heads), + cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + AMbyteSpan str; + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); assert_int_equal(str.count, 1); assert_memory_equal(str.src, "b", str.count); - str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); assert_int_equal(str.count, 1); assert_memory_equal(str.src, "a", str.count); - str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); assert_int_equal(str.count, 1); assert_memory_equal(str.src, "c", str.count); - assert_null(AMlistItemsNext(&list_items, 1)); + assert_null(AMitemsNext(&list_items, 1)); } } @@ -568,67 +553,54 @@ static void test_lists_have_insert_set_splice_and_push_ops(void** state) { * \brief should be able to delete non-existent props */ static void test_should_be_able_to_delete_non_existent_props(void** state) { - AMresultStack* stack = *state; + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; /* const doc = create() */ - AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; + AMdoc* doc; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); /* */ /* doc.put("_root", "foo", "bar") */ - AMfree(AMmapPutStr(doc, AM_ROOT, AMstr("foo"), AMstr("bar"))); + AMstackItem(NULL, AMmapPutStr(doc, AM_ROOT, AMstr("foo"), AMstr("bar")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* doc.put("_root", "bip", "bap") */ - AMfree(AMmapPutStr(doc, AM_ROOT, AMstr("bip"), AMstr("bap"))); + AMstackItem(NULL, AMmapPutStr(doc, AM_ROOT, AMstr("bip"), AMstr("bap")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* const hash1 = doc.commit() */ - AMchangeHashes const hash1 = AMpush(&stack, - AMcommit(doc, AMstr(NULL), NULL), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; + AMitems const hash1 = + AMstackItems(stack_ptr, AMcommit(doc, AMstr(NULL), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* */ /* assert.deepEqual(doc.keys("_root"), ["bip", "foo"]) */ - AMstrs keys = AMpush(&stack, - AMkeys(doc, AM_ROOT, NULL), - AM_VALUE_STRS, - cmocka_cb).strs; - AMbyteSpan str = AMstrsNext(&keys, 1); + AMitems keys = AMstackItems(stack_ptr, AMkeys(doc, AM_ROOT, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + AMbyteSpan str; + assert_true(AMitemToStr(AMitemsNext(&keys, 1), &str)); assert_int_equal(str.count, 3); assert_memory_equal(str.src, "bip", str.count); - str = AMstrsNext(&keys, 1); + assert_true(AMitemToStr(AMitemsNext(&keys, 1), &str)); assert_int_equal(str.count, 3); assert_memory_equal(str.src, "foo", str.count); /* */ /* doc.delete("_root", "foo") */ - AMfree(AMmapDelete(doc, AM_ROOT, AMstr("foo"))); + AMstackItem(NULL, AMmapDelete(doc, AM_ROOT, AMstr("foo")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* doc.delete("_root", "baz") */ - AMfree(AMmapDelete(doc, AM_ROOT, AMstr("baz"))); + AMstackItem(NULL, AMmapDelete(doc, AM_ROOT, AMstr("baz")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* const hash2 = doc.commit() */ - AMchangeHashes const hash2 = AMpush(&stack, - AMcommit(doc, AMstr(NULL), NULL), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; + AMitems const hash2 = + AMstackItems(stack_ptr, AMcommit(doc, AMstr(NULL), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* */ /* assert.deepEqual(doc.keys("_root"), ["bip"]) */ - keys = AMpush(&stack, - AMkeys(doc, AM_ROOT, NULL), - AM_VALUE_STRS, - cmocka_cb).strs; - str = AMstrsNext(&keys, 1); + keys = AMstackItems(stack_ptr, AMkeys(doc, AM_ROOT, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + assert_true(AMitemToStr(AMitemsNext(&keys, 1), &str)); assert_int_equal(str.count, 3); assert_memory_equal(str.src, "bip", str.count); /* assert.deepEqual(doc.keys("_root", [hash1]), ["bip", "foo"]) */ - keys = AMpush(&stack, - AMkeys(doc, AM_ROOT, &hash1), - AM_VALUE_STRS, - cmocka_cb).strs; - str = AMstrsNext(&keys, 1); + keys = AMstackItems(stack_ptr, AMkeys(doc, AM_ROOT, &hash1), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + assert_true(AMitemToStr(AMitemsNext(&keys, 1), &str)); assert_int_equal(str.count, 3); assert_memory_equal(str.src, "bip", str.count); - str = AMstrsNext(&keys, 1); + assert_true(AMitemToStr(AMitemsNext(&keys, 1), &str)); assert_int_equal(str.count, 3); assert_memory_equal(str.src, "foo", str.count); /* assert.deepEqual(doc.keys("_root", [hash2]), ["bip"]) */ - keys = AMpush(&stack, - AMkeys(doc, AM_ROOT, &hash2), - AM_VALUE_STRS, - cmocka_cb).strs; - str = AMstrsNext(&keys, 1); + keys = AMstackItems(stack_ptr, AMkeys(doc, AM_ROOT, &hash2), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + assert_true(AMitemToStr(AMitemsNext(&keys, 1), &str)); assert_int_equal(str.count, 3); assert_memory_equal(str.src, "bip", str.count); } @@ -636,123 +608,114 @@ static void test_should_be_able_to_delete_non_existent_props(void** state) { /** * \brief should be able to del */ -static void test_should_be_able_to_del(void **state) { - AMresultStack* stack = *state; +static void test_should_be_able_to_del(void** state) { + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; /* const doc = create() */ - AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; + AMdoc* doc; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); /* const root = "_root" */ /* */ /* doc.put(root, "xxx", "xxx"); */ - AMfree(AMmapPutStr(doc, AM_ROOT, AMstr("xxx"), AMstr("xxx"))); + AMstackItem(NULL, AMmapPutStr(doc, AM_ROOT, AMstr("xxx"), AMstr("xxx")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* assert.deepEqual(doc.getWithType(root, "xxx"), ["str", "xxx"]) */ - AMbyteSpan const str = AMpush(&stack, - AMmapGet(doc, AM_ROOT, AMstr("xxx"), NULL), - AM_VALUE_STR, - cmocka_cb).str; + AMbyteSpan str; + assert_true(AMitemToStr( + AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("xxx"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), + &str)); assert_int_equal(str.count, 3); assert_memory_equal(str.src, "xxx", str.count); /* doc.delete(root, "xxx"); */ - AMfree(AMmapDelete(doc, AM_ROOT, AMstr("xxx"))); + AMstackItem(NULL, AMmapDelete(doc, AM_ROOT, AMstr("xxx")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* assert.deepEqual(doc.getWithType(root, "xxx"), undefined) */ - AMpush(&stack, - AMmapGet(doc, AM_ROOT, AMstr("xxx"), NULL), - AM_VALUE_VOID, - cmocka_cb); + AMstackItem(NULL, AMmapGet(doc, AM_ROOT, AMstr("xxx"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); } /** * \brief should be able to use counters */ static void test_should_be_able_to_use_counters(void** state) { - AMresultStack* stack = *state; + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; /* const doc = create() */ - AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; + AMdoc* doc; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); /* const root = "_root" */ /* */ /* doc.put(root, "counter", 10, "counter"); */ - AMfree(AMmapPutCounter(doc, AM_ROOT, AMstr("counter"), 10)); + AMstackItem(NULL, AMmapPutCounter(doc, AM_ROOT, AMstr("counter"), 10), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* assert.deepEqual(doc.getWithType(root, "counter"), ["counter", 10]) */ - assert_int_equal(AMpush(&stack, - AMmapGet(doc, AM_ROOT, AMstr("counter"), NULL), - AM_VALUE_COUNTER, - cmocka_cb).counter, 10); + int64_t counter; + assert_true(AMitemToCounter(AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("counter"), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_COUNTER)), + &counter)); + assert_int_equal(counter, 10); /* doc.increment(root, "counter", 10); */ - AMfree(AMmapIncrement(doc, AM_ROOT, AMstr("counter"), 10)); + AMstackItem(NULL, AMmapIncrement(doc, AM_ROOT, AMstr("counter"), 10), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* assert.deepEqual(doc.getWithType(root, "counter"), ["counter", 20]) */ - assert_int_equal(AMpush(&stack, - AMmapGet(doc, AM_ROOT, AMstr("counter"), NULL), - AM_VALUE_COUNTER, - cmocka_cb).counter, 20); + assert_true(AMitemToCounter(AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("counter"), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_COUNTER)), + &counter)); + assert_int_equal(counter, 20); /* doc.increment(root, "counter", -5); */ - AMfree(AMmapIncrement(doc, AM_ROOT, AMstr("counter"), -5)); + AMstackItem(NULL, AMmapIncrement(doc, AM_ROOT, AMstr("counter"), -5), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* assert.deepEqual(doc.getWithType(root, "counter"), ["counter", 15]) */ - assert_int_equal(AMpush(&stack, - AMmapGet(doc, AM_ROOT, AMstr("counter"), NULL), - AM_VALUE_COUNTER, - cmocka_cb).counter, 15); + assert_true(AMitemToCounter(AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("counter"), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_COUNTER)), + &counter)); + assert_int_equal(counter, 15); } /** * \brief should be able to splice text */ static void test_should_be_able_to_splice_text(void** state) { - AMresultStack* stack = *state; + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; /* const doc = create() */ - AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; + AMdoc* doc; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); /* const root = "_root"; */ /* */ /* const text = doc.putObject(root, "text", ""); */ - AMobjId const* const text = AMpush( - &stack, - AMmapPutObject(doc, AM_ROOT, AMstr("text"), AM_OBJ_TYPE_TEXT), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; + AMobjId const* const text = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc, AM_ROOT, AMstr("text"), AM_OBJ_TYPE_TEXT), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE))); /* doc.splice(text, 0, 0, "hello ") */ - AMfree(AMspliceText(doc, text, 0, 0, AMstr("hello "))); + AMstackItem(NULL, AMspliceText(doc, text, 0, 0, AMstr("hello ")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* doc.splice(text, 6, 0, "world") */ - AMfree(AMspliceText(doc, text, 6, 0, AMstr("world"))); + AMstackItem(NULL, AMspliceText(doc, text, 6, 0, AMstr("world")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* doc.splice(text, 11, 0, "!?") */ - AMfree(AMspliceText(doc, text, 11, 0, AMstr("!?"))); + AMstackItem(NULL, AMspliceText(doc, text, 11, 0, AMstr("!?")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* assert.deepEqual(doc.getWithType(text, 0), ["str", "h"]) */ - AMbyteSpan str = AMpush(&stack, - AMlistGet(doc, text, 0, NULL), - AM_VALUE_STR, - cmocka_cb).str; + AMbyteSpan str; + assert_true( + AMitemToStr(AMstackItem(stack_ptr, AMlistGet(doc, text, 0, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); assert_int_equal(str.count, 1); assert_memory_equal(str.src, "h", str.count); /* assert.deepEqual(doc.getWithType(text, 1), ["str", "e"]) */ - str = AMpush(&stack, - AMlistGet(doc, text, 1, NULL), - AM_VALUE_STR, - cmocka_cb).str; + assert_true( + AMitemToStr(AMstackItem(stack_ptr, AMlistGet(doc, text, 1, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); assert_int_equal(str.count, 1); assert_memory_equal(str.src, "e", str.count); /* assert.deepEqual(doc.getWithType(text, 9), ["str", "l"]) */ - str = AMpush(&stack, - AMlistGet(doc, text, 9, NULL), - AM_VALUE_STR, - cmocka_cb).str; + assert_true( + AMitemToStr(AMstackItem(stack_ptr, AMlistGet(doc, text, 9, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); assert_int_equal(str.count, 1); assert_memory_equal(str.src, "l", str.count); /* assert.deepEqual(doc.getWithType(text, 10), ["str", "d"]) */ - str = AMpush(&stack, - AMlistGet(doc, text, 10, NULL), - AM_VALUE_STR, - cmocka_cb).str; + assert_true(AMitemToStr( + AMstackItem(stack_ptr, AMlistGet(doc, text, 10, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); assert_int_equal(str.count, 1); assert_memory_equal(str.src, "d", str.count); /* assert.deepEqual(doc.getWithType(text, 11), ["str", "!"]) */ - str = AMpush(&stack, - AMlistGet(doc, text, 11, NULL), - AM_VALUE_STR, - cmocka_cb).str; + assert_true(AMitemToStr( + AMstackItem(stack_ptr, AMlistGet(doc, text, 11, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); assert_int_equal(str.count, 1); assert_memory_equal(str.src, "!", str.count); /* assert.deepEqual(doc.getWithType(text, 12), ["str", "?"]) */ - str = AMpush(&stack, - AMlistGet(doc, text, 12, NULL), - AM_VALUE_STR, - cmocka_cb).str; + assert_true(AMitemToStr( + AMstackItem(stack_ptr, AMlistGet(doc, text, 12, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); assert_int_equal(str.count, 1); assert_memory_equal(str.src, "?", str.count); } @@ -761,52 +724,45 @@ static void test_should_be_able_to_splice_text(void** state) { * \brief should be able to save all or incrementally */ static void test_should_be_able_to_save_all_or_incrementally(void** state) { - AMresultStack* stack = *state; + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; /* const doc = create() */ - AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; + AMdoc* doc; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); /* */ /* doc.put("_root", "foo", 1) */ - AMfree(AMmapPutInt(doc, AM_ROOT, AMstr("foo"), 1)); + AMstackItem(NULL, AMmapPutInt(doc, AM_ROOT, AMstr("foo"), 1), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* */ /* const save1 = doc.save() */ - AMbyteSpan const save1 = AMpush(&stack, - AMsave(doc), - AM_VALUE_BYTES, - cmocka_cb).bytes; + AMbyteSpan save1; + assert_true(AMitemToBytes(AMstackItem(stack_ptr, AMsave(doc), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &save1)); /* */ /* doc.put("_root", "bar", 2) */ - AMfree(AMmapPutInt(doc, AM_ROOT, AMstr("bar"), 2)); + AMstackItem(NULL, AMmapPutInt(doc, AM_ROOT, AMstr("bar"), 2), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* */ /* const saveMidway = doc.clone().save(); */ - AMbyteSpan const saveMidway = AMpush(&stack, - AMsave( - AMpush(&stack, - AMclone(doc), - AM_VALUE_DOC, - cmocka_cb).doc), - AM_VALUE_BYTES, - cmocka_cb).bytes; + AMdoc* doc_clone; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMclone(doc), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc_clone)); + AMbyteSpan saveMidway; + assert_true( + AMitemToBytes(AMstackItem(stack_ptr, AMsave(doc_clone), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &saveMidway)); /* */ /* const save2 = doc.saveIncremental(); */ - AMbyteSpan const save2 = AMpush(&stack, - AMsaveIncremental(doc), - AM_VALUE_BYTES, - cmocka_cb).bytes; + AMbyteSpan save2; + assert_true( + AMitemToBytes(AMstackItem(stack_ptr, AMsaveIncremental(doc), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &save2)); /* */ /* doc.put("_root", "baz", 3); */ - AMfree(AMmapPutInt(doc, AM_ROOT, AMstr("baz"), 3)); + AMstackItem(NULL, AMmapPutInt(doc, AM_ROOT, AMstr("baz"), 3), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* */ /* const save3 = doc.saveIncremental(); */ - AMbyteSpan const save3 = AMpush(&stack, - AMsaveIncremental(doc), - AM_VALUE_BYTES, - cmocka_cb).bytes; + AMbyteSpan save3; + assert_true( + AMitemToBytes(AMstackItem(stack_ptr, AMsaveIncremental(doc), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &save3)); /* */ /* const saveA = doc.save(); */ - AMbyteSpan const saveA = AMpush(&stack, - AMsave(doc), - AM_VALUE_BYTES, - cmocka_cb).bytes; + AMbyteSpan saveA; + assert_true(AMitemToBytes(AMstackItem(stack_ptr, AMsave(doc), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &saveA)); /* const saveB = new Uint8Array([...save1, ...save2, ...save3]); */ size_t const saveB_count = save1.count + save2.count + save3.count; uint8_t* const saveB_src = test_malloc(saveB_count); @@ -818,104 +774,83 @@ static void test_should_be_able_to_save_all_or_incrementally(void** state) { assert_memory_not_equal(saveA.src, saveB_src, saveA.count); /* */ /* const docA = load(saveA); */ - AMdoc* const docA = AMpush(&stack, - AMload(saveA.src, saveA.count), - AM_VALUE_DOC, - cmocka_cb).doc; + AMdoc* docA; + assert_true(AMitemToDoc( + AMstackItem(stack_ptr, AMload(saveA.src, saveA.count), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &docA)); /* const docB = load(saveB); */ - AMdoc* const docB = AMpush(&stack, - AMload(saveB_src, saveB_count), - AM_VALUE_DOC, - cmocka_cb).doc; + AMdoc* docB; + assert_true(AMitemToDoc( + AMstackItem(stack_ptr, AMload(saveB_src, saveB_count), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &docB)); test_free(saveB_src); /* const docC = load(saveMidway) */ - AMdoc* const docC = AMpush(&stack, - AMload(saveMidway.src, saveMidway.count), - AM_VALUE_DOC, - cmocka_cb).doc; + AMdoc* docC; + assert_true(AMitemToDoc( + AMstackItem(stack_ptr, AMload(saveMidway.src, saveMidway.count), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &docC)); /* docC.loadIncremental(save3) */ - AMfree(AMloadIncremental(docC, save3.src, save3.count)); + AMstackItem(NULL, AMloadIncremental(docC, save3.src, save3.count), cmocka_cb, AMexpect(AM_VAL_TYPE_UINT)); /* */ /* assert.deepEqual(docA.keys("_root"), docB.keys("_root")); */ - AMstrs const keysA = AMpush(&stack, - AMkeys(docA, AM_ROOT, NULL), - AM_VALUE_STRS, - cmocka_cb).strs; - AMstrs const keysB = AMpush(&stack, - AMkeys(docB, AM_ROOT, NULL), - AM_VALUE_STRS, - cmocka_cb).strs; - assert_int_equal(AMstrsCmp(&keysA, &keysB), 0); + AMitems const keysA = AMstackItems(stack_ptr, AMkeys(docA, AM_ROOT, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + AMitems const keysB = AMstackItems(stack_ptr, AMkeys(docB, AM_ROOT, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + assert_true(AMitemsEqual(&keysA, &keysB)); /* assert.deepEqual(docA.save(), docB.save()); */ - AMbyteSpan const save = AMpush(&stack, - AMsave(docA), - AM_VALUE_BYTES, - cmocka_cb).bytes; - assert_memory_equal(save.src, - AMpush(&stack, - AMsave(docB), - AM_VALUE_BYTES, - cmocka_cb).bytes.src, - save.count); + AMbyteSpan docA_save; + assert_true( + AMitemToBytes(AMstackItem(stack_ptr, AMsave(docA), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &docA_save)); + AMbyteSpan docB_save; + assert_true( + AMitemToBytes(AMstackItem(stack_ptr, AMsave(docB), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &docB_save)); + assert_int_equal(docA_save.count, docB_save.count); + assert_memory_equal(docA_save.src, docB_save.src, docA_save.count); /* assert.deepEqual(docA.save(), docC.save()); */ - assert_memory_equal(save.src, - AMpush(&stack, - AMsave(docC), - AM_VALUE_BYTES, - cmocka_cb).bytes.src, - save.count); + AMbyteSpan docC_save; + assert_true( + AMitemToBytes(AMstackItem(stack_ptr, AMsave(docC), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &docC_save)); + assert_int_equal(docA_save.count, docC_save.count); + assert_memory_equal(docA_save.src, docC_save.src, docA_save.count); } /** * \brief should be able to splice text #2 */ static void test_should_be_able_to_splice_text_2(void** state) { - AMresultStack* stack = *state; + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; /* const doc = create() */ - AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; + AMdoc* doc; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); /* const text = doc.putObject("_root", "text", ""); */ - AMobjId const* const text = AMpush( - &stack, - AMmapPutObject(doc, AM_ROOT, AMstr("text"), AM_OBJ_TYPE_TEXT), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; + AMobjId const* const text = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc, AM_ROOT, AMstr("text"), AM_OBJ_TYPE_TEXT), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE))); /* doc.splice(text, 0, 0, "hello world"); */ - AMfree(AMspliceText(doc, text, 0, 0, AMstr("hello world"))); + AMstackItem(NULL, AMspliceText(doc, text, 0, 0, AMstr("hello world")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* const hash1 = doc.commit(); */ - AMchangeHashes const hash1 = AMpush(&stack, - AMcommit(doc, AMstr(NULL), NULL), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; + AMitems const hash1 = + AMstackItems(stack_ptr, AMcommit(doc, AMstr(NULL), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* doc.splice(text, 6, 0, "big bad "); */ - AMfree(AMspliceText(doc, text, 6, 0, AMstr("big bad "))); + AMstackItem(NULL, AMspliceText(doc, text, 6, 0, AMstr("big bad ")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* const hash2 = doc.commit(); */ - AMchangeHashes const hash2 = AMpush(&stack, - AMcommit(doc, AMstr(NULL), NULL), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; + AMitems const hash2 = + AMstackItems(stack_ptr, AMcommit(doc, AMstr(NULL), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* assert.strictEqual(doc.text(text), "hello big bad world") */ - AMbyteSpan str = AMpush(&stack, - AMtext(doc, text, NULL), - AM_VALUE_STR, - cmocka_cb).str; + AMbyteSpan str; + assert_true( + AMitemToStr(AMstackItem(stack_ptr, AMtext(doc, text, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); assert_int_equal(str.count, strlen("hello big bad world")); assert_memory_equal(str.src, "hello big bad world", str.count); /* assert.strictEqual(doc.length(text), 19) */ assert_int_equal(AMobjSize(doc, text, NULL), 19); /* assert.strictEqual(doc.text(text, [hash1]), "hello world") */ - str = AMpush(&stack, - AMtext(doc, text, &hash1), - AM_VALUE_STR, - cmocka_cb).str; + assert_true( + AMitemToStr(AMstackItem(stack_ptr, AMtext(doc, text, &hash1), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); assert_int_equal(str.count, strlen("hello world")); assert_memory_equal(str.src, "hello world", str.count); /* assert.strictEqual(doc.length(text, [hash1]), 11) */ assert_int_equal(AMobjSize(doc, text, &hash1), 11); /* assert.strictEqual(doc.text(text, [hash2]), "hello big bad world") */ - str = AMpush(&stack, - AMtext(doc, text, &hash2), - AM_VALUE_STR, - cmocka_cb).str; + assert_true( + AMitemToStr(AMstackItem(stack_ptr, AMtext(doc, text, &hash2), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); assert_int_equal(str.count, strlen("hello big bad world")); assert_memory_equal(str.src, "hello big bad world", str.count); /* assert.strictEqual(doc.length(text, [hash2]), 19) */ @@ -926,266 +861,234 @@ static void test_should_be_able_to_splice_text_2(void** state) { * \brief local inc increments all visible counters in a map */ static void test_local_inc_increments_all_visible_counters_in_a_map(void** state) { - AMresultStack* stack = *state; + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; /* const doc1 = create("aaaa") */ - AMdoc* const doc1 = AMpush(&stack, - AMcreate(AMpush(&stack, - AMactorIdInitStr(AMstr("aaaa")), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id), - AM_VALUE_DOC, - cmocka_cb).doc; + AMactorId const* actor_id; + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("aaaa")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), &actor_id)); + AMdoc* doc1; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc1)); /* doc1.put("_root", "hello", "world") */ - AMfree(AMmapPutStr(doc1, AM_ROOT, AMstr("hello"), AMstr("world"))); + AMstackItem(NULL, AMmapPutStr(doc1, AM_ROOT, AMstr("hello"), AMstr("world")), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); /* const doc2 = load(doc1.save(), "bbbb"); */ - AMbyteSpan const save = AMpush(&stack, - AMsave(doc1), - AM_VALUE_BYTES, - cmocka_cb).bytes; - AMdoc* const doc2 = AMpush(&stack, - AMload(save.src, save.count), - AM_VALUE_DOC, - cmocka_cb).doc; - AMfree(AMsetActorId(doc2, AMpush(&stack, - AMactorIdInitStr(AMstr("bbbb")), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); + AMbyteSpan save; + assert_true(AMitemToBytes(AMstackItem(stack_ptr, AMsave(doc1), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &save)); + AMdoc* doc2; + assert_true( + AMitemToDoc(AMstackItem(stack_ptr, AMload(save.src, save.count), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc2)); + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("bbbb")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), &actor_id)); + AMstackItem(NULL, AMsetActorId(doc2, actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* const doc3 = load(doc1.save(), "cccc"); */ - AMdoc* const doc3 = AMpush(&stack, - AMload(save.src, save.count), - AM_VALUE_DOC, - cmocka_cb).doc; - AMfree(AMsetActorId(doc3, AMpush(&stack, - AMactorIdInitStr(AMstr("cccc")), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); + AMdoc* doc3; + assert_true( + AMitemToDoc(AMstackItem(stack_ptr, AMload(save.src, save.count), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc3)); + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("cccc")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), &actor_id)); + AMstackItem(NULL, AMsetActorId(doc3, actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* let heads = doc1.getHeads() */ - AMchangeHashes const heads1 = AMpush(&stack, - AMgetHeads(doc1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; + AMitems const heads1 = AMstackItems(stack_ptr, AMgetHeads(doc1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* doc1.put("_root", "cnt", 20) */ - AMfree(AMmapPutInt(doc1, AM_ROOT, AMstr("cnt"), 20)); + AMstackItem(NULL, AMmapPutInt(doc1, AM_ROOT, AMstr("cnt"), 20), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* doc2.put("_root", "cnt", 0, "counter") */ - AMfree(AMmapPutCounter(doc2, AM_ROOT, AMstr("cnt"), 0)); + AMstackItem(NULL, AMmapPutCounter(doc2, AM_ROOT, AMstr("cnt"), 0), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* doc3.put("_root", "cnt", 10, "counter") */ - AMfree(AMmapPutCounter(doc3, AM_ROOT, AMstr("cnt"), 10)); + AMstackItem(NULL, AMmapPutCounter(doc3, AM_ROOT, AMstr("cnt"), 10), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* doc1.applyChanges(doc2.getChanges(heads)) */ - AMchanges const changes2 = AMpush(&stack, - AMgetChanges(doc2, &heads1), - AM_VALUE_CHANGES, - cmocka_cb).changes; - AMfree(AMapplyChanges(doc1, &changes2)); + AMitems const changes2 = + AMstackItems(stack_ptr, AMgetChanges(doc2, &heads1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); + AMstackItem(NULL, AMapplyChanges(doc1, &changes2), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* doc1.applyChanges(doc3.getChanges(heads)) */ - AMchanges const changes3 = AMpush(&stack, - AMgetChanges(doc3, &heads1), - AM_VALUE_CHANGES, - cmocka_cb).changes; - AMfree(AMapplyChanges(doc1, &changes3)); + AMitems const changes3 = + AMstackItems(stack_ptr, AMgetChanges(doc3, &heads1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); + AMstackItem(NULL, AMapplyChanges(doc1, &changes3), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* let result = doc1.getAll("_root", "cnt") */ - AMobjItems result = AMpush(&stack, - AMmapGetAll(doc1, AM_ROOT, AMstr("cnt"), NULL), - AM_VALUE_OBJ_ITEMS, - cmocka_cb).obj_items; + AMitems result = AMstackItems(stack_ptr, AMmapGetAll(doc1, AM_ROOT, AMstr("cnt"), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_COUNTER | AM_VAL_TYPE_INT | AM_VAL_TYPE_STR)); /* assert.deepEqual(result, [ ['int', 20, '2@aaaa'], ['counter', 0, '2@bbbb'], ['counter', 10, '2@cccc'], ]) */ - AMobjItem const* result_item = AMobjItemsNext(&result, 1); - assert_int_equal(AMobjItemValue(result_item).int_, 20); - assert_int_equal(AMobjIdCounter(AMobjItemObjId(result_item)), 2); - AMbyteSpan str = AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))); + AMitem* result_item = AMitemsNext(&result, 1); + int64_t int_; + assert_true(AMitemToInt(result_item, &int_)); + assert_int_equal(int_, 20); + assert_int_equal(AMobjIdCounter(AMitemObjId(result_item)), 2); + AMbyteSpan str = AMactorIdStr(AMobjIdActorId(AMitemObjId(result_item))); assert_int_equal(str.count, 4); assert_memory_equal(str.src, "aaaa", str.count); - result_item = AMobjItemsNext(&result, 1); - assert_int_equal(AMobjItemValue(result_item).counter, 0); - assert_int_equal(AMobjIdCounter(AMobjItemObjId(result_item)), 2); - str = AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))); + result_item = AMitemsNext(&result, 1); + int64_t counter; + assert_true(AMitemToCounter(result_item, &counter)); + assert_int_equal(counter, 0); + assert_int_equal(AMobjIdCounter(AMitemObjId(result_item)), 2); + str = AMactorIdStr(AMobjIdActorId(AMitemObjId(result_item))); assert_int_equal(str.count, 4); assert_memory_equal(str.src, "bbbb", str.count); - result_item = AMobjItemsNext(&result, 1); - assert_int_equal(AMobjItemValue(result_item).counter, 10); - assert_int_equal(AMobjIdCounter(AMobjItemObjId(result_item)), 2); - str = AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))); + result_item = AMitemsNext(&result, 1); + assert_true(AMitemToCounter(result_item, &counter)); + assert_int_equal(counter, 10); + assert_int_equal(AMobjIdCounter(AMitemObjId(result_item)), 2); + str = AMactorIdStr(AMobjIdActorId(AMitemObjId(result_item))); assert_int_equal(str.count, 4); assert_memory_equal(str.src, "cccc", str.count); /* doc1.increment("_root", "cnt", 5) */ - AMfree(AMmapIncrement(doc1, AM_ROOT, AMstr("cnt"), 5)); + AMstackItem(NULL, AMmapIncrement(doc1, AM_ROOT, AMstr("cnt"), 5), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* result = doc1.getAll("_root", "cnt") */ - result = AMpush(&stack, - AMmapGetAll(doc1, AM_ROOT, AMstr("cnt"), NULL), - AM_VALUE_OBJ_ITEMS, - cmocka_cb).obj_items; + result = AMstackItems(stack_ptr, AMmapGetAll(doc1, AM_ROOT, AMstr("cnt"), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_COUNTER)); /* assert.deepEqual(result, [ ['counter', 5, '2@bbbb'], ['counter', 15, '2@cccc'], ]) */ - result_item = AMobjItemsNext(&result, 1); - assert_int_equal(AMobjItemValue(result_item).counter, 5); - assert_int_equal(AMobjIdCounter(AMobjItemObjId(result_item)), 2); - str = AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))); + result_item = AMitemsNext(&result, 1); + assert_true(AMitemToCounter(result_item, &counter)); + assert_int_equal(counter, 5); + assert_int_equal(AMobjIdCounter(AMitemObjId(result_item)), 2); + str = AMactorIdStr(AMobjIdActorId(AMitemObjId(result_item))); assert_int_equal(str.count, 4); assert_memory_equal(str.src, "bbbb", str.count); - result_item = AMobjItemsNext(&result, 1); - assert_int_equal(AMobjItemValue(result_item).counter, 15); - assert_int_equal(AMobjIdCounter(AMobjItemObjId(result_item)), 2); - str = AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))); + result_item = AMitemsNext(&result, 1); + assert_true(AMitemToCounter(result_item, &counter)); + assert_int_equal(counter, 15); + assert_int_equal(AMobjIdCounter(AMitemObjId(result_item)), 2); + str = AMactorIdStr(AMobjIdActorId(AMitemObjId(result_item))); assert_int_equal(str.count, 4); assert_memory_equal(str.src, "cccc", str.count); /* */ /* const save1 = doc1.save() */ - AMbyteSpan const save1 = AMpush(&stack, - AMsave(doc1), - AM_VALUE_BYTES, - cmocka_cb).bytes; + AMbyteSpan save1; + assert_true(AMitemToBytes(AMstackItem(stack_ptr, AMsave(doc1), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &save1)); /* const doc4 = load(save1) */ - AMdoc* const doc4 = AMpush(&stack, - AMload(save1.src, save1.count), - AM_VALUE_DOC, - cmocka_cb).doc; + AMdoc* doc4; + assert_true(AMitemToDoc( + AMstackItem(stack_ptr, AMload(save1.src, save1.count), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc4)); /* assert.deepEqual(doc4.save(), save1); */ - assert_memory_equal(AMpush(&stack, - AMsave(doc4), - AM_VALUE_BYTES, - cmocka_cb).bytes.src, - save1.src, - save1.count); + AMbyteSpan doc4_save; + assert_true( + AMitemToBytes(AMstackItem(stack_ptr, AMsave(doc4), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &doc4_save)); + assert_int_equal(doc4_save.count, save1.count); + assert_memory_equal(doc4_save.src, save1.src, doc4_save.count); } /** * \brief local inc increments all visible counters in a sequence */ static void test_local_inc_increments_all_visible_counters_in_a_sequence(void** state) { - AMresultStack* stack = *state; + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; /* const doc1 = create("aaaa") */ - AMdoc* const doc1 = AMpush(&stack, - AMcreate(AMpush(&stack, - AMactorIdInitStr(AMstr("aaaa")), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id), - AM_VALUE_DOC, - cmocka_cb).doc; + AMactorId const* actor_id; + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("aaaa")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), &actor_id)); + AMdoc* doc1; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc1)); /* const seq = doc1.putObject("_root", "seq", []) */ - AMobjId const* const seq = AMpush( - &stack, - AMmapPutObject(doc1, AM_ROOT, AMstr("seq"), AM_OBJ_TYPE_LIST), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; + AMobjId const* const seq = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc1, AM_ROOT, AMstr("seq"), AM_OBJ_TYPE_LIST), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE))); /* doc1.insert(seq, 0, "hello") */ - AMfree(AMlistPutStr(doc1, seq, 0, true, AMstr("hello"))); + AMstackItem(NULL, AMlistPutStr(doc1, seq, 0, true, AMstr("hello")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* const doc2 = load(doc1.save(), "bbbb"); */ - AMbyteSpan const save1 = AMpush(&stack, - AMsave(doc1), - AM_VALUE_BYTES, - cmocka_cb).bytes; - AMdoc* const doc2 = AMpush(&stack, - AMload(save1.src, save1.count), - AM_VALUE_DOC, - cmocka_cb).doc; - AMfree(AMsetActorId(doc2, AMpush(&stack, - AMactorIdInitStr(AMstr("bbbb")), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); + AMbyteSpan save1; + assert_true(AMitemToBytes(AMstackItem(stack_ptr, AMsave(doc1), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &save1)); + AMdoc* doc2; + assert_true(AMitemToDoc( + AMstackItem(stack_ptr, AMload(save1.src, save1.count), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc2)); + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("bbbb")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), &actor_id)); + AMstackItem(NULL, AMsetActorId(doc2, actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* const doc3 = load(doc1.save(), "cccc"); */ - AMdoc* const doc3 = AMpush(&stack, - AMload(save1.src, save1.count), - AM_VALUE_DOC, - cmocka_cb).doc; - AMfree(AMsetActorId(doc3, AMpush(&stack, - AMactorIdInitStr(AMstr("cccc")), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); + AMdoc* doc3; + assert_true(AMitemToDoc( + AMstackItem(stack_ptr, AMload(save1.src, save1.count), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc3)); + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("cccc")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), &actor_id)); + AMstackItem(NULL, AMsetActorId(doc3, actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* let heads = doc1.getHeads() */ - AMchangeHashes const heads1 = AMpush(&stack, - AMgetHeads(doc1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; + AMitems const heads1 = AMstackItems(stack_ptr, AMgetHeads(doc1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* doc1.put(seq, 0, 20) */ - AMfree(AMlistPutInt(doc1, seq, 0, false, 20)); + AMstackItem(NULL, AMlistPutInt(doc1, seq, 0, false, 20), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* doc2.put(seq, 0, 0, "counter") */ - AMfree(AMlistPutCounter(doc2, seq, 0, false, 0)); + AMstackItem(NULL, AMlistPutCounter(doc2, seq, 0, false, 0), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* doc3.put(seq, 0, 10, "counter") */ - AMfree(AMlistPutCounter(doc3, seq, 0, false, 10)); + AMstackItem(NULL, AMlistPutCounter(doc3, seq, 0, false, 10), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* doc1.applyChanges(doc2.getChanges(heads)) */ - AMchanges const changes2 = AMpush(&stack, - AMgetChanges(doc2, &heads1), - AM_VALUE_CHANGES, - cmocka_cb).changes; - AMfree(AMapplyChanges(doc1, &changes2)); + AMitems const changes2 = + AMstackItems(stack_ptr, AMgetChanges(doc2, &heads1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); + AMstackItem(NULL, AMapplyChanges(doc1, &changes2), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* doc1.applyChanges(doc3.getChanges(heads)) */ - AMchanges const changes3 = AMpush(&stack, - AMgetChanges(doc3, &heads1), - AM_VALUE_CHANGES, - cmocka_cb).changes; - AMfree(AMapplyChanges(doc1, &changes3)); + AMitems const changes3 = + AMstackItems(stack_ptr, AMgetChanges(doc3, &heads1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); + AMstackItem(NULL, AMapplyChanges(doc1, &changes3), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* let result = doc1.getAll(seq, 0) */ - AMobjItems result = AMpush(&stack, - AMlistGetAll(doc1, seq, 0, NULL), - AM_VALUE_OBJ_ITEMS, - cmocka_cb).obj_items; + AMitems result = AMstackItems(stack_ptr, AMlistGetAll(doc1, seq, 0, NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_COUNTER | AM_VAL_TYPE_INT)); /* assert.deepEqual(result, [ ['int', 20, '3@aaaa'], ['counter', 0, '3@bbbb'], ['counter', 10, '3@cccc'], ]) */ - AMobjItem const* result_item = AMobjItemsNext(&result, 1); - assert_int_equal(AMobjItemValue(result_item).int_, 20); - assert_int_equal(AMobjIdCounter(AMobjItemObjId(result_item)), 3); - AMbyteSpan str = AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))); + AMitem* result_item = AMitemsNext(&result, 1); + int64_t int_; + assert_true(AMitemToInt(result_item, &int_)); + assert_int_equal(int_, 20); + assert_int_equal(AMobjIdCounter(AMitemObjId(result_item)), 3); + AMbyteSpan str = AMactorIdStr(AMobjIdActorId(AMitemObjId(result_item))); assert_int_equal(str.count, 4); assert_memory_equal(str.src, "aaaa", str.count); - result_item = AMobjItemsNext(&result, 1); - assert_int_equal(AMobjItemValue(result_item).counter, 0); - assert_int_equal(AMobjIdCounter(AMobjItemObjId(result_item)), 3); - str = AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))); + result_item = AMitemsNext(&result, 1); + int64_t counter; + assert_true(AMitemToCounter(result_item, &counter)); + assert_int_equal(counter, 0); + assert_int_equal(AMobjIdCounter(AMitemObjId(result_item)), 3); + str = AMactorIdStr(AMobjIdActorId(AMitemObjId(result_item))); assert_memory_equal(str.src, "bbbb", str.count); - result_item = AMobjItemsNext(&result, 1); - assert_int_equal(AMobjItemValue(result_item).counter, 10); - assert_int_equal(AMobjIdCounter(AMobjItemObjId(result_item)), 3); - str = AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))); + result_item = AMitemsNext(&result, 1); + assert_true(AMitemToCounter(result_item, &counter)); + assert_int_equal(counter, 10); + assert_int_equal(AMobjIdCounter(AMitemObjId(result_item)), 3); + str = AMactorIdStr(AMobjIdActorId(AMitemObjId(result_item))); assert_int_equal(str.count, 4); assert_memory_equal(str.src, "cccc", str.count); /* doc1.increment(seq, 0, 5) */ - AMfree(AMlistIncrement(doc1, seq, 0, 5)); + AMstackItem(NULL, AMlistIncrement(doc1, seq, 0, 5), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* result = doc1.getAll(seq, 0) */ - result = AMpush(&stack, - AMlistGetAll(doc1, seq, 0, NULL), - AM_VALUE_OBJ_ITEMS, - cmocka_cb).obj_items; + result = AMstackItems(stack_ptr, AMlistGetAll(doc1, seq, 0, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_COUNTER)); /* assert.deepEqual(result, [ ['counter', 5, '3@bbbb'], ['counter', 15, '3@cccc'], ]) */ - result_item = AMobjItemsNext(&result, 1); - assert_int_equal(AMobjItemValue(result_item).counter, 5); - assert_int_equal(AMobjIdCounter(AMobjItemObjId(result_item)), 3); - str = AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))); + result_item = AMitemsNext(&result, 1); + assert_true(AMitemToCounter(result_item, &counter)); + assert_int_equal(counter, 5); + assert_int_equal(AMobjIdCounter(AMitemObjId(result_item)), 3); + str = AMactorIdStr(AMobjIdActorId(AMitemObjId(result_item))); assert_int_equal(str.count, 4); assert_memory_equal(str.src, "bbbb", str.count); - result_item = AMobjItemsNext(&result, 1); - assert_int_equal(AMobjItemValue(result_item).counter, 15); - assert_int_equal(AMobjIdCounter(AMobjItemObjId(result_item)), 3); - str = AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))); + result_item = AMitemsNext(&result, 1); + assert_true(AMitemToCounter(result_item, &counter)); + assert_int_equal(counter, 15); + assert_int_equal(AMobjIdCounter(AMitemObjId(result_item)), 3); + str = AMactorIdStr(AMobjIdActorId(AMitemObjId(result_item))); assert_memory_equal(str.src, "cccc", str.count); /* */ /* const save = doc1.save() */ - AMbyteSpan const save = AMpush(&stack, - AMsave(doc1), - AM_VALUE_BYTES, - cmocka_cb).bytes; + AMbyteSpan save; + assert_true(AMitemToBytes(AMstackItem(stack_ptr, AMsave(doc1), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &save)); /* const doc4 = load(save) */ - AMdoc* const doc4 = AMpush(&stack, - AMload(save.src, save.count), - AM_VALUE_DOC, - cmocka_cb).doc; + AMdoc* doc4; + assert_true( + AMitemToDoc(AMstackItem(stack_ptr, AMload(save.src, save.count), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc4)); /* assert.deepEqual(doc4.save(), save); */ - assert_memory_equal(AMpush(&stack, - AMsave(doc4), - AM_VALUE_BYTES, - cmocka_cb).bytes.src, - save.src, - save.count); + AMbyteSpan doc4_save; + assert_true( + AMitemToBytes(AMstackItem(stack_ptr, AMsave(doc4), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &doc4_save)); + assert_int_equal(doc4_save.count, save.count); + assert_memory_equal(doc4_save.src, save.src, doc4_save.count); } /** @@ -1197,314 +1100,269 @@ static void test_paths_can_be_used_instead_of_objids(void** state); * \brief should be able to fetch changes by hash */ static void test_should_be_able_to_fetch_changes_by_hash(void** state) { - AMresultStack* stack = *state; + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; /* const doc1 = create("aaaa") */ - AMdoc* const doc1 = AMpush(&stack, - AMcreate(AMpush(&stack, - AMactorIdInitStr(AMstr("aaaa")), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id), - AM_VALUE_DOC, - cmocka_cb).doc; + AMactorId const* actor_id; + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("aaaa")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), &actor_id)); + AMdoc* doc1; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc1)); /* const doc2 = create("bbbb") */ - AMdoc* const doc2 = AMpush(&stack, - AMcreate(AMpush(&stack, - AMactorIdInitStr(AMstr("bbbb")), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id), - AM_VALUE_DOC, - cmocka_cb).doc; + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("bbbb")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), &actor_id)); + AMdoc* doc2; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc2)); /* doc1.put("/", "a", "b") */ - AMfree(AMmapPutStr(doc1, AM_ROOT, AMstr("a"), AMstr("b"))); + AMstackItem(NULL, AMmapPutStr(doc1, AM_ROOT, AMstr("a"), AMstr("b")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* doc2.put("/", "b", "c") */ - AMfree(AMmapPutStr(doc2, AM_ROOT, AMstr("b"), AMstr("c"))); + AMstackItem(NULL, AMmapPutStr(doc2, AM_ROOT, AMstr("b"), AMstr("c")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* const head1 = doc1.getHeads() */ - AMchangeHashes head1 = AMpush(&stack, - AMgetHeads(doc1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; + AMitems head1 = AMstackItems(stack_ptr, AMgetHeads(doc1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* const head2 = doc2.getHeads() */ - AMchangeHashes head2 = AMpush(&stack, - AMgetHeads(doc2), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; + AMitems head2 = AMstackItems(stack_ptr, AMgetHeads(doc2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* const change1 = doc1.getChangeByHash(head1[0]) - if (change1 === null) { throw new RangeError("change1 should not be null") */ - AMbyteSpan const change_hash1 = AMchangeHashesNext(&head1, 1); - AMchanges change1 = AMpush( - &stack, - AMgetChangeByHash(doc1, change_hash1.src, change_hash1.count), - AM_VALUE_CHANGES, - cmocka_cb).changes; + if (change1 === null) { throw new RangeError("change1 should not be + null") */ + AMbyteSpan change_hash1; + assert_true(AMitemToChangeHash(AMitemsNext(&head1, 1), &change_hash1)); + AMchange const* change1; + assert_true(AMitemToChange(AMstackItem(stack_ptr, AMgetChangeByHash(doc1, change_hash1.src, change_hash1.count), + cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)), + &change1)); /* const change2 = doc1.getChangeByHash(head2[0]) assert.deepEqual(change2, null) */ - AMbyteSpan const change_hash2 = AMchangeHashesNext(&head2, 1); - AMpush(&stack, - AMgetChangeByHash(doc1, change_hash2.src, change_hash2.count), - AM_VALUE_VOID, - cmocka_cb); + AMbyteSpan change_hash2; + assert_true(AMitemToChangeHash(AMitemsNext(&head2, 1), &change_hash2)); + AMstackItem(NULL, AMgetChangeByHash(doc1, change_hash2.src, change_hash2.count), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); /* assert.deepEqual(decodeChange(change1).hash, head1[0]) */ - assert_memory_equal(AMchangeHash(AMchangesNext(&change1, 1)).src, - change_hash1.src, - change_hash1.count); + assert_memory_equal(AMchangeHash(change1).src, change_hash1.src, change_hash1.count); } /** * \brief recursive sets are possible */ static void test_recursive_sets_are_possible(void** state) { - AMresultStack* stack = *state; + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; /* const doc = create("aaaa") */ - AMdoc* const doc = AMpush(&stack, - AMcreate(AMpush(&stack, - AMactorIdInitStr(AMstr("aaaa")), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id), - AM_VALUE_DOC, - cmocka_cb).doc; + AMactorId const* actor_id; + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("aaaa")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), &actor_id)); + AMdoc* doc; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); /* const l1 = doc.putObject("_root", "list", [{ foo: "bar" }, [1, 2, 3]] */ - AMobjId const* const l1 = AMpush( - &stack, - AMmapPutObject(doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; + AMobjId const* const l1 = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE))); { - AMobjId const* const map = AMpush( - &stack, - AMlistPutObject(doc, l1, 0, true, AM_OBJ_TYPE_MAP), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; - AMfree(AMmapPutStr(doc, map, AMstr("foo"), AMstr("bar"))); - AMobjId const* const list = AMpush( - &stack, - AMlistPutObject(doc, l1, SIZE_MAX, true, AM_OBJ_TYPE_LIST), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; + AMobjId const* const map = AMitemObjId(AMstackItem( + stack_ptr, AMlistPutObject(doc, l1, 0, true, AM_OBJ_TYPE_MAP), cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); + AMstackItem(NULL, AMmapPutStr(doc, map, AMstr("foo"), AMstr("bar")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMobjId const* const list = + AMitemObjId(AMstackItem(stack_ptr, AMlistPutObject(doc, l1, SIZE_MAX, true, AM_OBJ_TYPE_LIST), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE))); for (int value = 1; value != 4; ++value) { - AMfree(AMlistPutInt(doc, list, SIZE_MAX, true, value)); + AMstackItem(NULL, AMlistPutInt(doc, list, SIZE_MAX, true, value), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); } } /* const l2 = doc.insertObject(l1, 0, { zip: ["a", "b"] }) */ - AMobjId const* const l2 = AMpush( - &stack, - AMlistPutObject(doc, l1, 0, true, AM_OBJ_TYPE_MAP), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; + AMobjId const* const l2 = AMitemObjId(AMstackItem(stack_ptr, AMlistPutObject(doc, l1, 0, true, AM_OBJ_TYPE_MAP), + cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); { - AMobjId const* const list = AMpush( - &stack, - AMmapPutObject(doc, l2, AMstr("zip"), AM_OBJ_TYPE_LIST), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; - AMfree(AMlistPutStr(doc, list, SIZE_MAX, true, AMstr("a"))); - AMfree(AMlistPutStr(doc, list, SIZE_MAX, true, AMstr("b"))); + AMobjId const* const list = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc, l2, AMstr("zip"), AM_OBJ_TYPE_LIST), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE))); + AMstackItem(NULL, AMlistPutStr(doc, list, SIZE_MAX, true, AMstr("a")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMlistPutStr(doc, list, SIZE_MAX, true, AMstr("b")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); } - /* const l3 = doc.putObject("_root", "info1", "hello world") // 'text' object */ - AMobjId const* const l3 = AMpush( - &stack, - AMmapPutObject(doc, AM_ROOT, AMstr("info1"), AM_OBJ_TYPE_TEXT), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; - AMfree(AMspliceText(doc, l3, 0, 0, AMstr("hello world"))); + /* const l3 = doc.putObject("_root", "info1", "hello world") // 'text' + * object */ + AMobjId const* const l3 = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc, AM_ROOT, AMstr("info1"), AM_OBJ_TYPE_TEXT), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE))); + AMstackItem(NULL, AMspliceText(doc, l3, 0, 0, AMstr("hello world")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* doc.put("_root", "info2", "hello world") // 'str' */ - AMfree(AMmapPutStr(doc, AM_ROOT, AMstr("info2"), AMstr("hello world"))); + AMstackItem(NULL, AMmapPutStr(doc, AM_ROOT, AMstr("info2"), AMstr("hello world")), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); /* const l4 = doc.putObject("_root", "info3", "hello world") */ - AMobjId const* const l4 = AMpush( - &stack, - AMmapPutObject(doc, AM_ROOT, AMstr("info3"), AM_OBJ_TYPE_TEXT), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; - AMfree(AMspliceText(doc, l4, 0, 0, AMstr("hello world"))); + AMobjId const* const l4 = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc, AM_ROOT, AMstr("info3"), AM_OBJ_TYPE_TEXT), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE))); + AMstackItem(NULL, AMspliceText(doc, l4, 0, 0, AMstr("hello world")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* assert.deepEqual(doc.materialize(), { "list": [{ zip: ["a", "b"] }, { foo: "bar" }, [1, 2, 3]], "info1": "hello world", "info2": "hello world", "info3": "hello world", - }) */ - AMmapItems doc_items = AMpush(&stack, - AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - AMmapItem const* doc_item = AMmapItemsNext(&doc_items, 1); - AMbyteSpan key = AMmapItemKey(doc_item); + }) */ + AMitems doc_items = AMstackItems(stack_ptr, AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE | AM_VAL_TYPE_STR)); + AMitem* doc_item = AMitemsNext(&doc_items, 1); + assert_int_equal(AMitemIdxType(doc_item), AM_IDX_TYPE_KEY); + AMbyteSpan key; + assert_true(AMitemKey(doc_item, &key)); assert_int_equal(key.count, strlen("info1")); assert_memory_equal(key.src, "info1", key.count); - AMbyteSpan str = AMpush(&stack, - AMtext(doc, AMmapItemObjId(doc_item), NULL), - AM_VALUE_STR, - cmocka_cb).str; + AMbyteSpan str; + assert_true(AMitemToStr( + AMstackItem(stack_ptr, AMtext(doc, AMitemObjId(doc_item), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); assert_int_equal(str.count, strlen("hello world")); assert_memory_equal(str.src, "hello world", str.count); - doc_item = AMmapItemsNext(&doc_items, 1); - key = AMmapItemKey(doc_item); + doc_item = AMitemsNext(&doc_items, 1); + assert_int_equal(AMitemIdxType(doc_item), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(doc_item, &key)); assert_int_equal(key.count, strlen("info2")); assert_memory_equal(key.src, "info2", key.count); - str = AMmapItemValue(doc_item).str; + assert_true(AMitemToStr(doc_item, &str)); assert_int_equal(str.count, strlen("hello world")); assert_memory_equal(str.src, "hello world", str.count); - doc_item = AMmapItemsNext(&doc_items, 1); - key = AMmapItemKey(doc_item); + doc_item = AMitemsNext(&doc_items, 1); + assert_int_equal(AMitemIdxType(doc_item), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(doc_item, &key)); assert_int_equal(key.count, strlen("info3")); assert_memory_equal(key.src, "info3", key.count); - str = AMpush(&stack, - AMtext(doc, AMmapItemObjId(doc_item), NULL), - AM_VALUE_STR, - cmocka_cb).str; + assert_true(AMitemToStr( + AMstackItem(stack_ptr, AMtext(doc, AMitemObjId(doc_item), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); assert_int_equal(str.count, strlen("hello world")); assert_memory_equal(str.src, "hello world", str.count); - doc_item = AMmapItemsNext(&doc_items, 1); - key = AMmapItemKey(doc_item); + doc_item = AMitemsNext(&doc_items, 1); + assert_int_equal(AMitemIdxType(doc_item), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(doc_item, &key)); assert_int_equal(key.count, strlen("list")); assert_memory_equal(key.src, "list", key.count); { - AMlistItems list_items = AMpush( - &stack, - AMlistRange(doc, AMmapItemObjId(doc_item), 0, SIZE_MAX, NULL), - AM_VALUE_LIST_ITEMS, - cmocka_cb).list_items; - AMlistItem const* list_item = AMlistItemsNext(&list_items, 1); + AMitems list_items = AMstackItems(stack_ptr, AMlistRange(doc, AMitemObjId(doc_item), 0, SIZE_MAX, NULL), + cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE)); + AMitem const* list_item = AMitemsNext(&list_items, 1); { - AMmapItems map_items = AMpush( - &stack, - AMmapRange(doc, AMlistItemObjId(list_item), AMstr(NULL), AMstr(NULL), NULL), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - AMmapItem const* map_item = AMmapItemsNext(&map_items, 1); - AMbyteSpan const key = AMmapItemKey(map_item); + AMitems map_items = + AMstackItems(stack_ptr, AMmapRange(doc, AMitemObjId(list_item), AMstr(NULL), AMstr(NULL), NULL), + cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE)); + AMitem const* map_item = AMitemsNext(&map_items, 1); + assert_int_equal(AMitemIdxType(map_item), AM_IDX_TYPE_KEY); + AMbyteSpan key; + assert_true(AMitemKey(map_item, &key)); assert_int_equal(key.count, strlen("zip")); assert_memory_equal(key.src, "zip", key.count); { - AMlistItems list_items = AMpush( - &stack, - AMlistRange(doc, AMmapItemObjId(map_item), 0, SIZE_MAX, NULL), - AM_VALUE_LIST_ITEMS, - cmocka_cb).list_items; - AMbyteSpan str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + AMitems list_items = AMstackItems(stack_ptr, AMlistRange(doc, AMitemObjId(map_item), 0, SIZE_MAX, NULL), + cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE | AM_VAL_TYPE_STR)); + AMbyteSpan str; + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); assert_int_equal(str.count, 1); assert_memory_equal(str.src, "a", str.count); - str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); assert_int_equal(str.count, 1); assert_memory_equal(str.src, "b", str.count); } } - list_item = AMlistItemsNext(&list_items, 1); + list_item = AMitemsNext(&list_items, 1); { - AMmapItems map_items = AMpush( - &stack, - AMmapRange(doc, AMlistItemObjId(list_item), AMstr(NULL), AMstr(NULL), NULL), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - AMmapItem const* map_item = AMmapItemsNext(&map_items, 1); - AMbyteSpan const key = AMmapItemKey(map_item); + AMitems map_items = + AMstackItems(stack_ptr, AMmapRange(doc, AMitemObjId(list_item), AMstr(NULL), AMstr(NULL), NULL), + cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE | AM_VAL_TYPE_STR)); + AMitem* map_item = AMitemsNext(&map_items, 1); + assert_int_equal(AMitemIdxType(map_item), AM_IDX_TYPE_KEY); + AMbyteSpan key; + assert_true(AMitemKey(map_item, &key)); assert_int_equal(key.count, strlen("foo")); assert_memory_equal(key.src, "foo", key.count); - AMbyteSpan const str = AMmapItemValue(map_item).str; + AMbyteSpan str; + assert_true(AMitemToStr(map_item, &str)); assert_int_equal(str.count, 3); assert_memory_equal(str.src, "bar", str.count); } - list_item = AMlistItemsNext(&list_items, 1); + list_item = AMitemsNext(&list_items, 1); { - AMlistItems list_items = AMpush( - &stack, - AMlistRange(doc, AMlistItemObjId(list_item), 0, SIZE_MAX, NULL), - AM_VALUE_LIST_ITEMS, - cmocka_cb).list_items; - assert_int_equal(AMlistItemValue( - AMlistItemsNext(&list_items, 1)).int_, - 1); - assert_int_equal(AMlistItemValue( - AMlistItemsNext(&list_items, 1)).int_, - 2); - assert_int_equal(AMlistItemValue( - AMlistItemsNext(&list_items, 1)).int_, - 3); + AMitems list_items = AMstackItems(stack_ptr, AMlistRange(doc, AMitemObjId(list_item), 0, SIZE_MAX, NULL), + cmocka_cb, AMexpect(AM_VAL_TYPE_INT)); + int64_t int_; + assert_true(AMitemToInt(AMitemsNext(&list_items, 1), &int_)); + assert_int_equal(int_, 1); + assert_true(AMitemToInt(AMitemsNext(&list_items, 1), &int_)); + assert_int_equal(int_, 2); + assert_true(AMitemToInt(AMitemsNext(&list_items, 1), &int_)); + assert_int_equal(int_, 3); } } /* assert.deepEqual(doc.materialize(l2), { zip: ["a", "b"] }) */ - AMmapItems map_items = AMpush( - &stack, - AMmapRange(doc, l2, AMstr(NULL), AMstr(NULL), NULL), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - AMmapItem const* map_item = AMmapItemsNext(&map_items, 1); - key = AMmapItemKey(map_item); + AMitems map_items = AMstackItems(stack_ptr, AMmapRange(doc, l2, AMstr(NULL), AMstr(NULL), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE)); + AMitem const* map_item = AMitemsNext(&map_items, 1); + assert_int_equal(AMitemIdxType(map_item), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(map_item, &key)); assert_int_equal(key.count, strlen("zip")); assert_memory_equal(key.src, "zip", key.count); { - AMlistItems list_items = AMpush( - &stack, - AMlistRange(doc, AMmapItemObjId(map_item), 0, SIZE_MAX, NULL), - AM_VALUE_LIST_ITEMS, - cmocka_cb).list_items; - AMbyteSpan str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + AMitems list_items = AMstackItems(stack_ptr, AMlistRange(doc, AMitemObjId(map_item), 0, SIZE_MAX, NULL), + cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + AMbyteSpan str; + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); assert_int_equal(str.count, 1); assert_memory_equal(str.src, "a", str.count); - str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); assert_int_equal(str.count, 1); assert_memory_equal(str.src, "b", str.count); } - /* assert.deepEqual(doc.materialize(l1), [{ zip: ["a", "b"] }, { foo: "bar" }, [1, 2, 3]] */ - AMlistItems list_items = AMpush( - &stack, - AMlistRange(doc, l1, 0, SIZE_MAX, NULL), - AM_VALUE_LIST_ITEMS, - cmocka_cb).list_items; - AMlistItem const* list_item = AMlistItemsNext(&list_items, 1); + /* assert.deepEqual(doc.materialize(l1), [{ zip: ["a", "b"] }, { foo: "bar" + * }, [1, 2, 3]] */ + AMitems list_items = + AMstackItems(stack_ptr, AMlistRange(doc, l1, 0, SIZE_MAX, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE)); + AMitem const* list_item = AMitemsNext(&list_items, 1); { - AMmapItems map_items = AMpush( - &stack, - AMmapRange(doc, AMlistItemObjId(list_item), AMstr(NULL), AMstr(NULL), NULL), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - AMmapItem const* map_item = AMmapItemsNext(&map_items, 1); - AMbyteSpan const key = AMmapItemKey(map_item); + AMitems map_items = + AMstackItems(stack_ptr, AMmapRange(doc, AMitemObjId(list_item), AMstr(NULL), AMstr(NULL), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE)); + AMitem const* map_item = AMitemsNext(&map_items, 1); + assert_int_equal(AMitemIdxType(map_item), AM_IDX_TYPE_KEY); + AMbyteSpan key; + assert_true(AMitemKey(map_item, &key)); assert_int_equal(key.count, strlen("zip")); assert_memory_equal(key.src, "zip", key.count); { - AMlistItems list_items = AMpush( - &stack, - AMlistRange(doc, AMmapItemObjId(map_item), 0, SIZE_MAX, NULL), - AM_VALUE_LIST_ITEMS, - cmocka_cb).list_items; - AMbyteSpan str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + AMitems list_items = AMstackItems(stack_ptr, AMlistRange(doc, AMitemObjId(map_item), 0, SIZE_MAX, NULL), + cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + AMbyteSpan str; + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); assert_int_equal(str.count, 1); assert_memory_equal(str.src, "a", str.count); - str = AMlistItemValue(AMlistItemsNext(&list_items, 1)).str; + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); assert_int_equal(str.count, 1); assert_memory_equal(str.src, "b", str.count); } } - list_item = AMlistItemsNext(&list_items, 1); + list_item = AMitemsNext(&list_items, 1); { - AMmapItems map_items = AMpush( - &stack, - AMmapRange(doc, AMlistItemObjId(list_item), AMstr(NULL), AMstr(NULL), NULL), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - AMmapItem const* map_item = AMmapItemsNext(&map_items, 1); - AMbyteSpan const key = AMmapItemKey(map_item); + AMitems map_items = + AMstackItems(stack_ptr, AMmapRange(doc, AMitemObjId(list_item), AMstr(NULL), AMstr(NULL), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_STR)); + AMitem* map_item = AMitemsNext(&map_items, 1); + assert_int_equal(AMitemIdxType(map_item), AM_IDX_TYPE_KEY); + AMbyteSpan key; + assert_true(AMitemKey(map_item, &key)); assert_int_equal(key.count, strlen("foo")); assert_memory_equal(key.src, "foo", key.count); - AMbyteSpan const str = AMmapItemValue(map_item).str; + AMbyteSpan str; + assert_true(AMitemToStr(map_item, &str)); assert_int_equal(str.count, 3); assert_memory_equal(str.src, "bar", str.count); } - list_item = AMlistItemsNext(&list_items, 1); + list_item = AMitemsNext(&list_items, 1); { - AMlistItems list_items = AMpush( - &stack, - AMlistRange(doc, AMlistItemObjId(list_item), 0, SIZE_MAX, NULL), - AM_VALUE_LIST_ITEMS, - cmocka_cb).list_items; - assert_int_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).int_, - 1); - assert_int_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).int_, - 2); - assert_int_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).int_, - 3); + AMitems list_items = AMstackItems(stack_ptr, AMlistRange(doc, AMitemObjId(list_item), 0, SIZE_MAX, NULL), + cmocka_cb, AMexpect(AM_VAL_TYPE_INT)); + int64_t int_; + assert_true(AMitemToInt(AMitemsNext(&list_items, 1), &int_)); + assert_int_equal(int_, 1); + assert_true(AMitemToInt(AMitemsNext(&list_items, 1), &int_)); + assert_int_equal(int_, 2); + assert_true(AMitemToInt(AMitemsNext(&list_items, 1), &int_)); + assert_int_equal(int_, 3); } /* assert.deepEqual(doc.materialize(l4), "hello world") */ - str = AMpush(&stack, AMtext(doc, l4, NULL), AM_VALUE_STR, cmocka_cb).str; + assert_true(AMitemToStr(AMstackItem(stack_ptr, AMtext(doc, l4, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); assert_int_equal(str.count, strlen("hello world")); assert_memory_equal(str.src, "hello world", str.count); } @@ -1513,65 +1371,41 @@ static void test_recursive_sets_are_possible(void** state) { * \brief only returns an object id when objects are created */ static void test_only_returns_an_object_id_when_objects_are_created(void** state) { - AMresultStack* stack = *state; + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; /* const doc = create("aaaa") */ - AMdoc* const doc = AMpush(&stack, - AMcreate(AMpush(&stack, - AMactorIdInitStr(AMstr("aaaa")), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id), - AM_VALUE_DOC, - cmocka_cb).doc; + AMactorId const* actor_id; + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("aaaa")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), &actor_id)); + AMdoc* doc; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); /* const r1 = doc.put("_root", "foo", "bar") assert.deepEqual(r1, null); */ - AMpush(&stack, - AMmapPutStr(doc, AM_ROOT, AMstr("foo"), AMstr("bar")), - AM_VALUE_VOID, - cmocka_cb); + AMstackItem(NULL, AMmapPutStr(doc, AM_ROOT, AMstr("foo"), AMstr("bar")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* const r2 = doc.putObject("_root", "list", []) */ - AMobjId const* const r2 = AMpush( - &stack, - AMmapPutObject(doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; + AMobjId const* const r2 = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE))); /* const r3 = doc.put("_root", "counter", 10, "counter") assert.deepEqual(r3, null); */ - AMpush(&stack, - AMmapPutCounter(doc, AM_ROOT, AMstr("counter"), 10), - AM_VALUE_VOID, - cmocka_cb); + AMstackItem(NULL, AMmapPutCounter(doc, AM_ROOT, AMstr("counter"), 10), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* const r4 = doc.increment("_root", "counter", 1) assert.deepEqual(r4, null); */ - AMpush(&stack, - AMmapIncrement(doc, AM_ROOT, AMstr("counter"), 1), - AM_VALUE_VOID, - cmocka_cb); + AMstackItem(NULL, AMmapIncrement(doc, AM_ROOT, AMstr("counter"), 1), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* const r5 = doc.delete("_root", "counter") assert.deepEqual(r5, null); */ - AMpush(&stack, - AMmapDelete(doc, AM_ROOT, AMstr("counter")), - AM_VALUE_VOID, - cmocka_cb); + AMstackItem(NULL, AMmapDelete(doc, AM_ROOT, AMstr("counter")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* const r6 = doc.insert(r2, 0, 10); assert.deepEqual(r6, null); */ - AMpush(&stack, - AMlistPutInt(doc, r2, 0, true, 10), - AM_VALUE_VOID, - cmocka_cb); + AMstackItem(NULL, AMlistPutInt(doc, r2, 0, true, 10), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* const r7 = doc.insertObject(r2, 0, {}); */ - AMobjId const* const r7 = AMpush( - &stack, - AMlistPutObject(doc, r2, 0, true, AM_OBJ_TYPE_LIST), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; + AMobjId const* const r7 = AMitemObjId(AMstackItem(stack_ptr, AMlistPutObject(doc, r2, 0, true, AM_OBJ_TYPE_LIST), + cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); /* const r8 = doc.splice(r2, 1, 0, ["a", "b", "c"]); */ - AMvalue const STRS[] = {{.str_tag = AM_VALUE_STR, .str = {.src = "a", .count = 1}}, - {.str_tag = AM_VALUE_STR, .str = {.src = "b", .count = 1}}, - {.str_tag = AM_VALUE_STR, .str = {.src = "c", .count = 1}}}; - AMpush(&stack, - AMsplice(doc, r2, 1, 0, STRS, sizeof(STRS)/sizeof(AMvalue)), - AM_VALUE_VOID, - cmocka_cb); + AMresult* data = AMstackResult( + stack_ptr, AMresultFrom(3, AMitemFromStr(AMstr("a")), AMitemFromStr(AMstr("b")), AMitemFromStr(AMstr("c"))), + NULL, NULL); + AMstackItem(NULL, AMsplice(doc, r2, 1, 0, AMresultItems(data)), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* assert.deepEqual(r2, "2@aaaa"); */ assert_int_equal(AMobjIdCounter(r2), 2); AMbyteSpan str = AMactorIdStr(AMobjIdActorId(r2)); @@ -1587,75 +1421,58 @@ static void test_only_returns_an_object_id_when_objects_are_created(void** state * \brief objects without properties are preserved */ static void test_objects_without_properties_are_preserved(void** state) { - AMresultStack* stack = *state; + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; /* const doc1 = create("aaaa") */ - AMdoc* const doc1 = AMpush(&stack, - AMcreate(AMpush(&stack, - AMactorIdInitStr(AMstr("aaaa")), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id), - AM_VALUE_DOC, - cmocka_cb).doc; + AMactorId const* actor_id; + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("aaaa")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), &actor_id)); + AMdoc* doc1; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc1)); /* const a = doc1.putObject("_root", "a", {}); */ - AMobjId const* const a = AMpush( - &stack, - AMmapPutObject(doc1, AM_ROOT, AMstr("a"), AM_OBJ_TYPE_MAP), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; + AMobjId const* const a = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc1, AM_ROOT, AMstr("a"), AM_OBJ_TYPE_MAP), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE))); /* const b = doc1.putObject("_root", "b", {}); */ - AMobjId const* const b = AMpush( - &stack, - AMmapPutObject(doc1, AM_ROOT, AMstr("b"), AM_OBJ_TYPE_MAP), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; + AMobjId const* const b = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc1, AM_ROOT, AMstr("b"), AM_OBJ_TYPE_MAP), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE))); /* const c = doc1.putObject("_root", "c", {}); */ - AMobjId const* const c = AMpush( - &stack, - AMmapPutObject(doc1, AM_ROOT, AMstr("c"), AM_OBJ_TYPE_MAP), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; + AMobjId const* const c = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc1, AM_ROOT, AMstr("c"), AM_OBJ_TYPE_MAP), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE))); /* const d = doc1.put(c, "d", "dd"); */ - AMfree(AMmapPutStr(doc1, c, AMstr("d"), AMstr("dd"))); + AMstackItem(NULL, AMmapPutStr(doc1, c, AMstr("d"), AMstr("dd")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* const saved = doc1.save(); */ - AMbyteSpan const saved = AMpush(&stack, - AMsave(doc1), - AM_VALUE_BYTES, - cmocka_cb).bytes; + AMbyteSpan saved; + assert_true(AMitemToBytes(AMstackItem(stack_ptr, AMsave(doc1), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &saved)); /* const doc2 = load(saved); */ - AMdoc* const doc2 = AMpush(&stack, - AMload(saved.src, saved.count), - AM_VALUE_DOC, - cmocka_cb).doc; + AMdoc* doc2; + assert_true(AMitemToDoc( + AMstackItem(stack_ptr, AMload(saved.src, saved.count), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc2)); /* assert.deepEqual(doc2.getWithType("_root", "a"), ["map", a]) */ - AMmapItems doc_items = AMpush(&stack, - AMmapRange(doc2, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - assert_true(AMobjIdEqual(AMmapItemObjId(AMmapItemsNext(&doc_items, 1)), a)); + AMitems doc_items = AMstackItems(stack_ptr, AMmapRange(doc2, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE)); + assert_true(AMobjIdEqual(AMitemObjId(AMitemsNext(&doc_items, 1)), a)); /* assert.deepEqual(doc2.keys(a), []) */ - AMstrs keys = AMpush(&stack, - AMkeys(doc1, a, NULL), - AM_VALUE_STRS, - cmocka_cb).strs; - assert_int_equal(AMstrsSize(&keys), 0); + AMitems keys = AMstackItems(stack_ptr, AMkeys(doc1, a, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + assert_int_equal(AMitemsSize(&keys), 0); /* assert.deepEqual(doc2.getWithType("_root", "b"), ["map", b]) */ - assert_true(AMobjIdEqual(AMmapItemObjId(AMmapItemsNext(&doc_items, 1)), b)); + assert_true(AMobjIdEqual(AMitemObjId(AMitemsNext(&doc_items, 1)), b)); /* assert.deepEqual(doc2.keys(b), []) */ - keys = AMpush(&stack, AMkeys(doc1, b, NULL), AM_VALUE_STRS, cmocka_cb).strs; - assert_int_equal(AMstrsSize(&keys), 0); + keys = AMstackItems(stack_ptr, AMkeys(doc1, b, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + assert_int_equal(AMitemsSize(&keys), 0); /* assert.deepEqual(doc2.getWithType("_root", "c"), ["map", c]) */ - assert_true(AMobjIdEqual(AMmapItemObjId(AMmapItemsNext(&doc_items, 1)), c)); + assert_true(AMobjIdEqual(AMitemObjId(AMitemsNext(&doc_items, 1)), c)); /* assert.deepEqual(doc2.keys(c), ["d"]) */ - keys = AMpush(&stack, AMkeys(doc1, c, NULL), AM_VALUE_STRS, cmocka_cb).strs; - AMbyteSpan str = AMstrsNext(&keys, 1); + keys = AMstackItems(stack_ptr, AMkeys(doc1, c, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + AMbyteSpan str; + assert_true(AMitemToStr(AMitemsNext(&keys, 1), &str)); assert_int_equal(str.count, 1); assert_memory_equal(str.src, "d", str.count); /* assert.deepEqual(doc2.getWithType(c, "d"), ["str", "dd"]) */ - AMobjItems obj_items = AMpush(&stack, - AMobjValues(doc1, c, NULL), - AM_VALUE_OBJ_ITEMS, - cmocka_cb).obj_items; - str = AMobjItemValue(AMobjItemsNext(&obj_items, 1)).str; + AMitems obj_items = AMstackItems(stack_ptr, AMobjItems(doc1, c, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + assert_true(AMitemToStr(AMitemsNext(&obj_items, 1), &str)); assert_int_equal(str.count, 2); assert_memory_equal(str.src, "dd", str.count); } @@ -1664,177 +1481,162 @@ static void test_objects_without_properties_are_preserved(void** state) { * \brief should allow you to forkAt a heads */ static void test_should_allow_you_to_forkAt_a_heads(void** state) { - AMresultStack* stack = *state; + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; /* const A = create("aaaaaa") */ - AMdoc* const A = AMpush(&stack, - AMcreate(AMpush(&stack, - AMactorIdInitStr(AMstr("aaaaaa")), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id), - AM_VALUE_DOC, - cmocka_cb).doc; + AMactorId const* actor_id; + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("aaaaaa")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), + &actor_id)); + AMdoc* A; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &A)); /* A.put("/", "key1", "val1"); */ - AMfree(AMmapPutStr(A, AM_ROOT, AMstr("key1"), AMstr("val1"))); + AMstackItem(NULL, AMmapPutStr(A, AM_ROOT, AMstr("key1"), AMstr("val1")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* A.put("/", "key2", "val2"); */ - AMfree(AMmapPutStr(A, AM_ROOT, AMstr("key2"), AMstr("val2"))); + AMstackItem(NULL, AMmapPutStr(A, AM_ROOT, AMstr("key2"), AMstr("val2")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* const heads1 = A.getHeads(); */ - AMchangeHashes const heads1 = AMpush(&stack, - AMgetHeads(A), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; + AMitems const heads1 = AMstackItems(stack_ptr, AMgetHeads(A), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* const B = A.fork("bbbbbb") */ - AMdoc* const B = AMpush(&stack, AMfork(A, NULL), AM_VALUE_DOC, cmocka_cb).doc; - AMfree(AMsetActorId(B, AMpush(&stack, - AMactorIdInitStr(AMstr("bbbbbb")), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); + AMdoc* B; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMfork(A, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &B)); + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("bbbbbb")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), + &actor_id)); + AMstackItem(NULL, AMsetActorId(B, actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* A.put("/", "key3", "val3"); */ - AMfree(AMmapPutStr(A, AM_ROOT, AMstr("key3"), AMstr("val3"))); + AMstackItem(NULL, AMmapPutStr(A, AM_ROOT, AMstr("key3"), AMstr("val3")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* B.put("/", "key4", "val4"); */ - AMfree(AMmapPutStr(B, AM_ROOT, AMstr("key4"), AMstr("val4"))); + AMstackItem(NULL, AMmapPutStr(B, AM_ROOT, AMstr("key4"), AMstr("val4")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* A.merge(B) */ - AMfree(AMmerge(A, B)); + AMstackItem(NULL, AMmerge(A, B), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* const heads2 = A.getHeads(); */ - AMchangeHashes const heads2 = AMpush(&stack, - AMgetHeads(A), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; + AMitems const heads2 = AMstackItems(stack_ptr, AMgetHeads(A), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* A.put("/", "key5", "val5"); */ - AMfree(AMmapPutStr(A, AM_ROOT, AMstr("key5"), AMstr("val5"))); - /* assert.deepEqual(A.forkAt(heads1).materialize("/"), A.materialize("/", heads1) */ - AMmapItems AforkAt1_items = AMpush( - &stack, - AMmapRange( - AMpush(&stack, AMfork(A, &heads1), AM_VALUE_DOC, cmocka_cb).doc, - AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - AMmapItems A1_items = AMpush(&stack, - AMmapRange(A, AM_ROOT, AMstr(NULL), AMstr(NULL), &heads1), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - assert_true(AMmapItemsEqual(&AforkAt1_items, &A1_items)); - /* assert.deepEqual(A.forkAt(heads2).materialize("/"), A.materialize("/", heads2) */ - AMmapItems AforkAt2_items = AMpush( - &stack, - AMmapRange( - AMpush(&stack, AMfork(A, &heads2), AM_VALUE_DOC, cmocka_cb).doc, - AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - AMmapItems A2_items = AMpush(&stack, - AMmapRange(A, AM_ROOT, AMstr(NULL), AMstr(NULL), &heads2), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - assert_true(AMmapItemsEqual(&AforkAt2_items, &A2_items)); + AMstackItem(NULL, AMmapPutStr(A, AM_ROOT, AMstr("key5"), AMstr("val5")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* assert.deepEqual(A.forkAt(heads1).materialize("/"), A.materialize("/", + * heads1) */ + AMdoc* A_forkAt1; + assert_true( + AMitemToDoc(AMstackItem(stack_ptr, AMfork(A, &heads1), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &A_forkAt1)); + AMitems AforkAt1_items = AMstackItems(stack_ptr, AMmapRange(A_forkAt1, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), + cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + AMitems A1_items = AMstackItems(stack_ptr, AMmapRange(A, AM_ROOT, AMstr(NULL), AMstr(NULL), &heads1), cmocka_cb, + AMexpect(AM_VAL_TYPE_STR)); + assert_true(AMitemsEqual(&AforkAt1_items, &A1_items)); + /* assert.deepEqual(A.forkAt(heads2).materialize("/"), A.materialize("/", + * heads2) */ + AMdoc* A_forkAt2; + assert_true( + AMitemToDoc(AMstackItem(stack_ptr, AMfork(A, &heads2), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &A_forkAt2)); + AMitems AforkAt2_items = AMstackItems(stack_ptr, AMmapRange(A_forkAt2, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), + cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + AMitems A2_items = AMstackItems(stack_ptr, AMmapRange(A, AM_ROOT, AMstr(NULL), AMstr(NULL), &heads2), cmocka_cb, + AMexpect(AM_VAL_TYPE_STR)); + assert_true(AMitemsEqual(&AforkAt2_items, &A2_items)); } /** * \brief should handle merging text conflicts then saving & loading */ static void test_should_handle_merging_text_conflicts_then_saving_and_loading(void** state) { - AMresultStack* stack = *state; + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; /* const A = create("aabbcc") */ - AMdoc* const A = AMpush(&stack, - AMcreate(AMpush(&stack, - AMactorIdInitStr(AMstr("aabbcc")), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id), - AM_VALUE_DOC, - cmocka_cb).doc; + AMactorId const* actor_id; + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("aabbcc")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), + &actor_id)); + AMdoc* A; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &A)); /* const At = A.putObject('_root', 'text', "") */ - AMobjId const* const At = AMpush( - &stack, - AMmapPutObject(A, AM_ROOT, AMstr("text"), AM_OBJ_TYPE_TEXT), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; + AMobjId const* const At = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(A, AM_ROOT, AMstr("text"), AM_OBJ_TYPE_TEXT), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE))); /* A.splice(At, 0, 0, 'hello') */ - AMfree(AMspliceText(A, At, 0, 0, AMstr("hello"))); + AMstackItem(NULL, AMspliceText(A, At, 0, 0, AMstr("hello")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* */ /* const B = A.fork() */ - AMdoc* const B = AMpush(&stack, AMfork(A, NULL), AM_VALUE_DOC, cmocka_cb).doc; + AMdoc* B; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMfork(A, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &B)); /* */ /* assert.deepEqual(B.getWithType("_root", "text"), ["text", At]) */ - AMbyteSpan str = AMpush(&stack, - AMtext(B, - AMpush(&stack, - AMmapGet(B, AM_ROOT, AMstr("text"), NULL), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id, - NULL), - AM_VALUE_STR, - cmocka_cb).str; - AMbyteSpan const str2 = AMpush(&stack, - AMtext(A, At, NULL), - AM_VALUE_STR, - cmocka_cb).str; + AMbyteSpan str; + assert_true( + AMitemToStr(AMstackItem(stack_ptr, + AMtext(B, + AMitemObjId(AMstackItem(stack_ptr, AMmapGet(B, AM_ROOT, AMstr("text"), NULL), + cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))), + NULL), + cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), + &str)); + AMbyteSpan str2; + assert_true(AMitemToStr(AMstackItem(stack_ptr, AMtext(A, At, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str2)); assert_int_equal(str.count, str2.count); assert_memory_equal(str.src, str2.src, str.count); /* */ /* B.splice(At, 4, 1) */ - AMfree(AMspliceText(B, At, 4, 1, AMstr(NULL))); + AMstackItem(NULL, AMspliceText(B, At, 4, 1, AMstr(NULL)), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* B.splice(At, 4, 0, '!') */ - AMfree(AMspliceText(B, At, 4, 0, AMstr("!"))); + AMstackItem(NULL, AMspliceText(B, At, 4, 0, AMstr("!")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* B.splice(At, 5, 0, ' ') */ - AMfree(AMspliceText(B, At, 5, 0, AMstr(" "))); + AMstackItem(NULL, AMspliceText(B, At, 5, 0, AMstr(" ")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* B.splice(At, 6, 0, 'world') */ - AMfree(AMspliceText(B, At, 6, 0, AMstr("world"))); + AMstackItem(NULL, AMspliceText(B, At, 6, 0, AMstr("world")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* */ /* A.merge(B) */ - AMfree(AMmerge(A, B)); + AMstackItem(NULL, AMmerge(A, B), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* */ /* const binary = A.save() */ - AMbyteSpan const binary = AMpush(&stack, - AMsave(A), - AM_VALUE_BYTES, - cmocka_cb).bytes; + AMbyteSpan binary; + assert_true(AMitemToBytes(AMstackItem(stack_ptr, AMsave(A), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &binary)); /* */ /* const C = load(binary) */ - AMdoc* const C = AMpush(&stack, - AMload(binary.src, binary.count), - AM_VALUE_DOC, - cmocka_cb).doc; + AMdoc* C; + assert_true(AMitemToDoc( + AMstackItem(stack_ptr, AMload(binary.src, binary.count), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &C)); /* */ /* assert.deepEqual(C.getWithType('_root', 'text'), ['text', '1@aabbcc'] */ - AMobjId const* const C_text = AMpush(&stack, - AMmapGet(C, AM_ROOT, AMstr("text"), NULL), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; + AMobjId const* const C_text = AMitemObjId( + AMstackItem(stack_ptr, AMmapGet(C, AM_ROOT, AMstr("text"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); assert_int_equal(AMobjIdCounter(C_text), 1); str = AMactorIdStr(AMobjIdActorId(C_text)); assert_int_equal(str.count, strlen("aabbcc")); assert_memory_equal(str.src, "aabbcc", str.count); /* assert.deepEqual(C.text(At), 'hell! world') */ - str = AMpush(&stack, AMtext(C, At, NULL), AM_VALUE_STR, cmocka_cb).str; + assert_true(AMitemToStr(AMstackItem(stack_ptr, AMtext(C, At, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); assert_int_equal(str.count, strlen("hell! world")); assert_memory_equal(str.src, "hell! world", str.count); } int run_ported_wasm_basic_tests(void) { const struct CMUnitTest tests[] = { - cmocka_unit_test_setup_teardown(test_create_clone_and_free, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_start_and_commit, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_getting_a_nonexistent_prop_does_not_throw_an_error, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_should_be_able_to_set_and_get_a_simple_value, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_should_be_able_to_use_bytes, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_should_be_able_to_make_subobjects, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_should_be_able_to_make_lists, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_lists_have_insert_set_splice_and_push_ops, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_should_be_able_to_delete_non_existent_props, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_should_be_able_to_del, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_should_be_able_to_use_counters, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_should_be_able_to_splice_text, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_should_be_able_to_save_all_or_incrementally, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_should_be_able_to_splice_text_2, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_local_inc_increments_all_visible_counters_in_a_map, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_local_inc_increments_all_visible_counters_in_a_sequence, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_should_be_able_to_fetch_changes_by_hash, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_recursive_sets_are_possible, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_only_returns_an_object_id_when_objects_are_created, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_objects_without_properties_are_preserved, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_should_allow_you_to_forkAt_a_heads, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_should_handle_merging_text_conflicts_then_saving_and_loading, setup_stack, teardown_stack) - }; + cmocka_unit_test_setup_teardown(test_create_clone_and_free, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_start_and_commit, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_getting_a_nonexistent_prop_does_not_throw_an_error, setup_base, + teardown_base), + cmocka_unit_test_setup_teardown(test_should_be_able_to_set_and_get_a_simple_value, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_should_be_able_to_use_bytes, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_should_be_able_to_make_subobjects, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_should_be_able_to_make_lists, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_lists_have_insert_set_splice_and_push_ops, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_should_be_able_to_delete_non_existent_props, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_should_be_able_to_del, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_should_be_able_to_use_counters, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_should_be_able_to_splice_text, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_should_be_able_to_save_all_or_incrementally, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_should_be_able_to_splice_text_2, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_local_inc_increments_all_visible_counters_in_a_map, setup_base, + teardown_base), + cmocka_unit_test_setup_teardown(test_local_inc_increments_all_visible_counters_in_a_sequence, setup_base, + teardown_base), + cmocka_unit_test_setup_teardown(test_should_be_able_to_fetch_changes_by_hash, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_recursive_sets_are_possible, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_only_returns_an_object_id_when_objects_are_created, setup_base, + teardown_base), + cmocka_unit_test_setup_teardown(test_objects_without_properties_are_preserved, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_should_allow_you_to_forkAt_a_heads, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_should_handle_merging_text_conflicts_then_saving_and_loading, setup_base, + teardown_base)}; return cmocka_run_group_tests(tests, NULL, NULL); } diff --git a/rust/automerge-c/test/ported_wasm/suite.c b/rust/automerge-c/test/ported_wasm/suite.c index fc10fadc..440ed899 100644 --- a/rust/automerge-c/test/ported_wasm/suite.c +++ b/rust/automerge-c/test/ported_wasm/suite.c @@ -1,6 +1,6 @@ +#include #include #include -#include #include /* third-party */ @@ -11,8 +11,5 @@ extern int run_ported_wasm_basic_tests(void); extern int run_ported_wasm_sync_tests(void); int run_ported_wasm_suite(void) { - return ( - run_ported_wasm_basic_tests() + - run_ported_wasm_sync_tests() - ); + return (run_ported_wasm_basic_tests() + run_ported_wasm_sync_tests()); } diff --git a/rust/automerge-c/test/ported_wasm/sync_tests.c b/rust/automerge-c/test/ported_wasm/sync_tests.c index a1ddbf3c..099f8dbf 100644 --- a/rust/automerge-c/test/ported_wasm/sync_tests.c +++ b/rust/automerge-c/test/ported_wasm/sync_tests.c @@ -9,10 +9,12 @@ /* local */ #include -#include "../stack_utils.h" +#include +#include "../base_state.h" +#include "../cmocka_utils.h" typedef struct { - AMresultStack* stack; + BaseState* base_state; AMdoc* n1; AMdoc* n2; AMsyncState* s1; @@ -21,43 +23,35 @@ typedef struct { static int setup(void** state) { TestState* test_state = test_calloc(1, sizeof(TestState)); - test_state->n1 = AMpush(&test_state->stack, - AMcreate(AMpush(&test_state->stack, - AMactorIdInitStr(AMstr("01234567")), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id), - AM_VALUE_DOC, - cmocka_cb).doc; - test_state->n2 = AMpush(&test_state->stack, - AMcreate(AMpush(&test_state->stack, - AMactorIdInitStr(AMstr("89abcdef")), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id), - AM_VALUE_DOC, - cmocka_cb).doc; - test_state->s1 = AMpush(&test_state->stack, - AMsyncStateInit(), - AM_VALUE_SYNC_STATE, - cmocka_cb).sync_state; - test_state->s2 = AMpush(&test_state->stack, - AMsyncStateInit(), - AM_VALUE_SYNC_STATE, - cmocka_cb).sync_state; + setup_base((void**)&test_state->base_state); + AMstack** stack_ptr = &test_state->base_state->stack; + AMactorId const* actor_id; + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("01234567")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), + &actor_id)); + assert_true( + AMitemToDoc(AMstackItem(stack_ptr, AMcreate(actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &test_state->n1)); + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("89abcdef")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), + &actor_id)); + assert_true( + AMitemToDoc(AMstackItem(stack_ptr, AMcreate(actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &test_state->n2)); + assert_true(AMitemToSyncState( + AMstackItem(stack_ptr, AMsyncStateInit(), cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_STATE)), &test_state->s1)); + assert_true(AMitemToSyncState( + AMstackItem(stack_ptr, AMsyncStateInit(), cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_STATE)), &test_state->s2)); *state = test_state; return 0; } static int teardown(void** state) { TestState* test_state = *state; - AMfreeStack(&test_state->stack); + teardown_base((void**)&test_state->base_state); test_free(test_state); return 0; } -static void sync(AMdoc* a, - AMdoc* b, - AMsyncState* a_sync_state, - AMsyncState* b_sync_state) { +static void sync(AMdoc* a, AMdoc* b, AMsyncState* a_sync_state, AMsyncState* b_sync_state) { static size_t const MAX_ITER = 10; AMsyncMessage const* a2b_msg = NULL; @@ -66,29 +60,35 @@ static void sync(AMdoc* a, do { AMresult* a2b_msg_result = AMgenerateSyncMessage(a, a_sync_state); AMresult* b2a_msg_result = AMgenerateSyncMessage(b, b_sync_state); - AMvalue value = AMresultValue(a2b_msg_result); - switch (value.tag) { - case AM_VALUE_SYNC_MESSAGE: { - a2b_msg = value.sync_message; - AMfree(AMreceiveSyncMessage(b, b_sync_state, a2b_msg)); - } - break; - case AM_VALUE_VOID: a2b_msg = NULL; break; + AMitem* item = AMresultItem(a2b_msg_result); + switch (AMitemValType(item)) { + case AM_VAL_TYPE_SYNC_MESSAGE: { + AMitemToSyncMessage(item, &a2b_msg); + AMstackResult(NULL, AMreceiveSyncMessage(b, b_sync_state, a2b_msg), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); + } break; + case AM_VAL_TYPE_VOID: + a2b_msg = NULL; + break; } - value = AMresultValue(b2a_msg_result); - switch (value.tag) { - case AM_VALUE_SYNC_MESSAGE: { - b2a_msg = value.sync_message; - AMfree(AMreceiveSyncMessage(a, a_sync_state, b2a_msg)); - } - break; - case AM_VALUE_VOID: b2a_msg = NULL; break; + item = AMresultItem(b2a_msg_result); + switch (AMitemValType(item)) { + case AM_VAL_TYPE_SYNC_MESSAGE: { + AMitemToSyncMessage(item, &b2a_msg); + AMstackResult(NULL, AMreceiveSyncMessage(a, a_sync_state, b2a_msg), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); + } break; + case AM_VAL_TYPE_VOID: + b2a_msg = NULL; + break; } if (++iter > MAX_ITER) { - fail_msg("Did not synchronize within %d iterations. " - "Do you have a bug causing an infinite loop?", MAX_ITER); + fail_msg( + "Did not synchronize within %d iterations. " + "Do you have a bug causing an infinite loop?", + MAX_ITER); } - } while(a2b_msg || b2a_msg); + } while (a2b_msg || b2a_msg); } static time_t const TIME_0 = 0; @@ -96,151 +96,135 @@ static time_t const TIME_0 = 0; /** * \brief should send a sync message implying no local data */ -static void test_should_send_a_sync_message_implying_no_local_data(void **state) { +static void test_should_send_a_sync_message_implying_no_local_data(void** state) { /* const doc = create() const s1 = initSyncState() */ TestState* test_state = *state; + AMstack** stack_ptr = &test_state->base_state->stack; /* const m1 = doc.generateSyncMessage(s1) if (m1 === null) { throw new RangeError("message should not be null") } const message: DecodedSyncMessage = decodeSyncMessage(m1) */ - AMsyncMessage const* const m1 = AMpush(&test_state->stack, - AMgenerateSyncMessage( - test_state->n1, - test_state->s1), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; + AMsyncMessage const* m1; + assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n1, test_state->s1), + cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), + &m1)); /* assert.deepStrictEqual(message.heads, []) */ - AMchangeHashes heads = AMsyncMessageHeads(m1); - assert_int_equal(AMchangeHashesSize(&heads), 0); + AMitems heads = AMstackItems(stack_ptr, AMsyncMessageHeads(m1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + assert_int_equal(AMitemsSize(&heads), 0); /* assert.deepStrictEqual(message.need, []) */ - AMchangeHashes needs = AMsyncMessageNeeds(m1); - assert_int_equal(AMchangeHashesSize(&needs), 0); + AMitems needs = AMstackItems(stack_ptr, AMsyncMessageNeeds(m1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + assert_int_equal(AMitemsSize(&needs), 0); /* assert.deepStrictEqual(message.have.length, 1) */ - AMsyncHaves haves = AMsyncMessageHaves(m1); - assert_int_equal(AMsyncHavesSize(&haves), 1); + AMitems haves = AMstackItems(stack_ptr, AMsyncMessageHaves(m1), cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_HAVE)); + assert_int_equal(AMitemsSize(&haves), 1); /* assert.deepStrictEqual(message.have[0].lastSync, []) */ - AMsyncHave const* have0 = AMsyncHavesNext(&haves, 1); - AMchangeHashes last_sync = AMsyncHaveLastSync(have0); - assert_int_equal(AMchangeHashesSize(&last_sync), 0); + AMsyncHave const* have0; + assert_true(AMitemToSyncHave(AMitemsNext(&haves, 1), &have0)); + AMitems last_sync = + AMstackItems(stack_ptr, AMsyncHaveLastSync(have0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + assert_int_equal(AMitemsSize(&last_sync), 0); /* assert.deepStrictEqual(message.have[0].bloom.byteLength, 0) assert.deepStrictEqual(message.changes, []) */ - AMchanges changes = AMsyncMessageChanges(m1); - assert_int_equal(AMchangesSize(&changes), 0); + AMitems changes = AMstackItems(stack_ptr, AMsyncMessageChanges(m1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); + assert_int_equal(AMitemsSize(&changes), 0); } /** * \brief should not reply if we have no data as well */ -static void test_should_not_reply_if_we_have_no_data_as_well(void **state) { +static void test_should_not_reply_if_we_have_no_data_as_well(void** state) { /* const n1 = create(), n2 = create() const s1 = initSyncState(), s2 = initSyncState() */ TestState* test_state = *state; + AMstack** stack_ptr = &test_state->base_state->stack; /* const m1 = n1.generateSyncMessage(s1) if (m1 === null) { throw new RangeError("message should not be null") */ - AMsyncMessage const* const m1 = AMpush(&test_state->stack, - AMgenerateSyncMessage( - test_state->n1, - test_state->s1), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; + AMsyncMessage const* m1; + assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n1, test_state->s1), + cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), + &m1)); /* n2.receiveSyncMessage(s2, m1) */ - AMfree(AMreceiveSyncMessage(test_state->n2, test_state->s2, m1)); + AMstackItem(NULL, AMreceiveSyncMessage(test_state->n2, test_state->s2, m1), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* const m2 = n2.generateSyncMessage(s2) assert.deepStrictEqual(m2, null) */ - AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->n2, test_state->s2), - AM_VALUE_VOID, - cmocka_cb); + AMstackItem(NULL, AMgenerateSyncMessage(test_state->n2, test_state->s2), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); } /** * \brief repos with equal heads do not need a reply message */ -static void test_repos_with_equal_heads_do_not_need_a_reply_message(void **state) { +static void test_repos_with_equal_heads_do_not_need_a_reply_message(void** state) { /* const n1 = create(), n2 = create() const s1 = initSyncState(), s2 = initSyncState() */ TestState* test_state = *state; + AMstack** stack_ptr = &test_state->base_state->stack; /* */ /* make two nodes with the same changes */ /* const list = n1.putObject("_root", "n", []) */ - AMobjId const* const list = AMpush(&test_state->stack, - AMmapPutObject(test_state->n1, - AM_ROOT, - AMstr("n"), - AM_OBJ_TYPE_LIST), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; + AMobjId const* const list = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(test_state->n1, AM_ROOT, AMstr("n"), AM_OBJ_TYPE_LIST), + cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* for (let i = 0; i < 10; i++) { */ for (size_t i = 0; i != 10; ++i) { /* n1.insert(list, i, i) */ - AMfree(AMlistPutUint(test_state->n1, AM_ROOT, i, true, i)); + AMstackItem(NULL, AMlistPutUint(test_state->n1, list, i, true, i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); - /* { */ + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* { */ } /* n2.applyChanges(n1.getChanges([])) */ - AMchanges const changes = AMpush(&test_state->stack, - AMgetChanges(test_state->n1, NULL), - AM_VALUE_CHANGES, - cmocka_cb).changes; - AMfree(AMapplyChanges(test_state->n2, &changes)); + AMitems const items = + AMstackItems(stack_ptr, AMgetChanges(test_state->n1, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); + AMstackItem(NULL, AMapplyChanges(test_state->n2, &items), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* assert.deepStrictEqual(n1.materialize(), n2.materialize()) */ assert_true(AMequal(test_state->n1, test_state->n2)); /* */ /* generate a naive sync message */ /* const m1 = n1.generateSyncMessage(s1) if (m1 === null) { throw new RangeError("message should not be null") */ - AMsyncMessage const* m1 = AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->n1, - test_state->s1), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; + AMsyncMessage const* m1; + assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n1, test_state->s1), + cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), + &m1)); /* assert.deepStrictEqual(s1.lastSentHeads, n1.getHeads()) */ - AMchangeHashes const last_sent_heads = AMsyncStateLastSentHeads( - test_state->s1 - ); - AMchangeHashes const heads = AMpush(&test_state->stack, - AMgetHeads(test_state->n1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - assert_int_equal(AMchangeHashesCmp(&last_sent_heads, &heads), 0); + AMitems const last_sent_heads = + AMstackItems(stack_ptr, AMsyncStateLastSentHeads(test_state->s1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + AMitems const heads = + AMstackItems(stack_ptr, AMgetHeads(test_state->n1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + assert_true(AMitemsEqual(&last_sent_heads, &heads)); /* */ /* heads are equal so this message should be null */ /* n2.receiveSyncMessage(s2, m1) */ - AMfree(AMreceiveSyncMessage(test_state->n2, test_state->s2, m1)); + AMstackItem(NULL, AMreceiveSyncMessage(test_state->n2, test_state->s2, m1), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* const m2 = n2.generateSyncMessage(s2) assert.strictEqual(m2, null) */ - AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->n2, test_state->s2), - AM_VALUE_VOID, - cmocka_cb); + AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n2, test_state->s2), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); } /** * \brief n1 should offer all changes to n2 when starting from nothing */ -static void test_n1_should_offer_all_changes_to_n2_when_starting_from_nothing(void **state) { +static void test_n1_should_offer_all_changes_to_n2_when_starting_from_nothing(void** state) { /* const n1 = create(), n2 = create() */ TestState* test_state = *state; - + AMstack** stack_ptr = &test_state->base_state->stack; /* make changes for n1 that n2 should request */ /* const list = n1.putObject("_root", "n", []) */ - AMobjId const* const list = AMpush( - &test_state->stack, - AMmapPutObject(test_state->n1, AM_ROOT, AMstr("n"), AM_OBJ_TYPE_LIST), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; + AMobjId const* const list = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(test_state->n1, AM_ROOT, AMstr("n"), AM_OBJ_TYPE_LIST), + cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* for (let i = 0; i < 10; i++) { */ for (size_t i = 0; i != 10; ++i) { /* n1.insert(list, i, i) */ - AMfree(AMlistPutUint(test_state->n1, AM_ROOT, i, true, i)); + AMstackItem(NULL, AMlistPutUint(test_state->n1, list, i, true, i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); - /* { */ + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* { */ } /* */ /* assert.notDeepStrictEqual(n1.materialize(), n2.materialize()) */ @@ -254,26 +238,24 @@ static void test_n1_should_offer_all_changes_to_n2_when_starting_from_nothing(vo /** * \brief should sync peers where one has commits the other does not */ -static void test_should_sync_peers_where_one_has_commits_the_other_does_not(void **state) { +static void test_should_sync_peers_where_one_has_commits_the_other_does_not(void** state) { /* const n1 = create(), n2 = create() */ TestState* test_state = *state; - + AMstack** stack_ptr = &test_state->base_state->stack; /* make changes for n1 that n2 should request */ /* const list = n1.putObject("_root", "n", []) */ - AMobjId const* const list = AMpush( - &test_state->stack, - AMmapPutObject(test_state->n1, AM_ROOT, AMstr("n"), AM_OBJ_TYPE_LIST), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; + AMobjId const* const list = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(test_state->n1, AM_ROOT, AMstr("n"), AM_OBJ_TYPE_LIST), + cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* for (let i = 0; i < 10; i++) { */ for (size_t i = 0; i != 10; ++i) { /* n1.insert(list, i, i) */ - AMfree(AMlistPutUint(test_state->n1, AM_ROOT, i, true, i)); + AMstackItem(NULL, AMlistPutUint(test_state->n1, list, i, true, i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); - /* { */ + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* { */ } /* */ /* assert.notDeepStrictEqual(n1.materialize(), n2.materialize()) */ @@ -287,19 +269,20 @@ static void test_should_sync_peers_where_one_has_commits_the_other_does_not(void /** * \brief should work with prior sync state */ -static void test_should_work_with_prior_sync_state(void **state) { +static void test_should_work_with_prior_sync_state(void** state) { /* create & synchronize two nodes */ /* const n1 = create(), n2 = create() const s1 = initSyncState(), s2 = initSyncState() */ TestState* test_state = *state; + AMstack** stack_ptr = &test_state->base_state->stack; /* */ /* for (let i = 0; i < 5; i++) { */ for (size_t i = 0; i != 5; ++i) { /* n1.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i)); + AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); - /* { */ + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* { */ } /* */ sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); @@ -308,10 +291,10 @@ static void test_should_work_with_prior_sync_state(void **state) { /* for (let i = 5; i < 10; i++) { */ for (size_t i = 5; i != 10; ++i) { /* n1.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i)); + AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); - /* { */ + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* { */ } /* */ /* assert.notDeepStrictEqual(n1.materialize(), n2.materialize()) */ @@ -325,326 +308,333 @@ static void test_should_work_with_prior_sync_state(void **state) { /** * \brief should not generate messages once synced */ -static void test_should_not_generate_messages_once_synced(void **state) { +static void test_should_not_generate_messages_once_synced(void** state) { /* create & synchronize two nodes */ /* const n1 = create('abc123'), n2 = create('def456') const s1 = initSyncState(), s2 = initSyncState() */ TestState* test_state = *state; - AMfree(AMsetActorId(test_state->n1, AMpush(&test_state->stack, - AMactorIdInitStr(AMstr("abc123")), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); - AMfree(AMsetActorId(test_state->n2, AMpush(&test_state->stack, - AMactorIdInitStr(AMstr("def456")), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); + AMstack** stack_ptr = &test_state->base_state->stack; + AMactorId const* actor_id; + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("abc123")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), + &actor_id)); + AMstackItem(NULL, AMsetActorId(test_state->n1, actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("def456")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), + &actor_id)); + AMstackItem(NULL, AMsetActorId(test_state->n2, actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* */ /* let message, patch for (let i = 0; i < 5; i++) { */ for (size_t i = 0; i != 5; ++i) { /* n1.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i)); + AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); - /* { */ + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* { */ } /* for (let i = 0; i < 5; i++) { */ for (size_t i = 0; i != 5; ++i) { /* n2.put("_root", "y", i) */ - AMfree(AMmapPutUint(test_state->n2, AM_ROOT, AMstr("y"), i)); + AMstackItem(NULL, AMmapPutUint(test_state->n2, AM_ROOT, AMstr("y"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n2.commit("", 0) */ - AMfree(AMcommit(test_state->n2, AMstr(""), &TIME_0)); - /* { */ + AMstackItem(NULL, AMcommit(test_state->n2, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* { */ } /* */ /* n1 reports what it has */ /* message = n1.generateSyncMessage(s1) - if (message === null) { throw new RangeError("message should not be null") */ - AMsyncMessage const* message = AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->n1, - test_state->s1), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; + if (message === null) { throw new RangeError("message should not be + null") */ + AMsyncMessage const* message; + assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n1, test_state->s1), + cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), + &message)); /* */ /* n2 receives that message and sends changes along with what it has */ /* n2.receiveSyncMessage(s2, message) */ - AMfree(AMreceiveSyncMessage(test_state->n2, test_state->s2, message)); + AMstackItem(NULL, AMreceiveSyncMessage(test_state->n2, test_state->s2, message), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); /* message = n2.generateSyncMessage(s2) - if (message === null) { throw new RangeError("message should not be null") */ - message = AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->n2, test_state->s2), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; - AMchanges message_changes = AMsyncMessageChanges(message); - assert_int_equal(AMchangesSize(&message_changes), 5); + if (message === null) { throw new RangeError("message should not be + null") */ + assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n2, test_state->s2), + cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), + &message)); + AMitems message_changes = + AMstackItems(stack_ptr, AMsyncMessageChanges(message), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); + assert_int_equal(AMitemsSize(&message_changes), 5); /* */ /* n1 receives the changes and replies with the changes it now knows that * n2 needs */ /* n1.receiveSyncMessage(s1, message) */ - AMfree(AMreceiveSyncMessage(test_state->n1, test_state->s1, message)); + AMstackItem(NULL, AMreceiveSyncMessage(test_state->n1, test_state->s1, message), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); /* message = n2.generateSyncMessage(s2) - if (message === null) { throw new RangeError("message should not be null") */ - message = AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->n1, test_state->s1), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; - message_changes = AMsyncMessageChanges(message); - assert_int_equal(AMchangesSize(&message_changes), 5); + if (message === null) { throw new RangeError("message should not be + null") */ + assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n1, test_state->s1), + cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), + &message)); + message_changes = AMstackItems(stack_ptr, AMsyncMessageChanges(message), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); + assert_int_equal(AMitemsSize(&message_changes), 5); /* */ /* n2 applies the changes and sends confirmation ending the exchange */ /* n2.receiveSyncMessage(s2, message) */ - AMfree(AMreceiveSyncMessage(test_state->n2, test_state->s2, message)); + AMstackItem(NULL, AMreceiveSyncMessage(test_state->n2, test_state->s2, message), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); /* message = n2.generateSyncMessage(s2) - if (message === null) { throw new RangeError("message should not be null") */ - message = AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->n2, test_state->s2), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; + if (message === null) { throw new RangeError("message should not be + null") */ + assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n2, test_state->s2), + cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), + &message)); /* */ /* n1 receives the message and has nothing more to say */ /* n1.receiveSyncMessage(s1, message) */ - AMfree(AMreceiveSyncMessage(test_state->n1, test_state->s1, message)); + AMstackItem(NULL, AMreceiveSyncMessage(test_state->n1, test_state->s1, message), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); /* message = n1.generateSyncMessage(s1) assert.deepStrictEqual(message, null) */ - AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->n1, test_state->s1), - AM_VALUE_VOID, - cmocka_cb); + AMstackItem(NULL, AMgenerateSyncMessage(test_state->n1, test_state->s1), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* //assert.deepStrictEqual(patch, null) // no changes arrived */ /* */ /* n2 also has nothing left to say */ /* message = n2.generateSyncMessage(s2) assert.deepStrictEqual(message, null) */ - AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->n2, test_state->s2), - AM_VALUE_VOID, - cmocka_cb); + AMstackItem(NULL, AMgenerateSyncMessage(test_state->n2, test_state->s2), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); } /** * \brief should allow simultaneous messages during synchronization */ -static void test_should_allow_simultaneous_messages_during_synchronization(void **state) { +static void test_should_allow_simultaneous_messages_during_synchronization(void** state) { /* create & synchronize two nodes */ /* const n1 = create('abc123'), n2 = create('def456') const s1 = initSyncState(), s2 = initSyncState() */ TestState* test_state = *state; - AMfree(AMsetActorId(test_state->n1, AMpush(&test_state->stack, - AMactorIdInitStr(AMstr("abc123")), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); - AMfree(AMsetActorId(test_state->n2, AMpush(&test_state->stack, - AMactorIdInitStr(AMstr("def456")), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); + AMstack** stack_ptr = &test_state->base_state->stack; + AMactorId const* actor_id; + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("abc123")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), + &actor_id)); + AMstackItem(NULL, AMsetActorId(test_state->n1, actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("def456")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), + &actor_id)); + AMstackItem(NULL, AMsetActorId(test_state->n2, actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* */ /* for (let i = 0; i < 5; i++) { */ for (size_t i = 0; i != 5; ++i) { /* n1.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i)); + AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); - /* { */ + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* { */ } /* for (let i = 0; i < 5; i++) { */ for (size_t i = 0; i != 5; ++i) { /* n2.put("_root", "y", i) */ - AMfree(AMmapPutUint(test_state->n2, AM_ROOT, AMstr("y"), i)); + AMstackItem(NULL, AMmapPutUint(test_state->n2, AM_ROOT, AMstr("y"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n2.commit("", 0) */ - AMfree(AMcommit(test_state->n2, AMstr(""), &TIME_0)); - /* { */ + AMstackItem(NULL, AMcommit(test_state->n2, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* { */ } /* const head1 = n1.getHeads()[0], head2 = n2.getHeads()[0] */ - AMchangeHashes heads1 = AMpush(&test_state->stack, - AMgetHeads(test_state->n1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - AMbyteSpan const head1 = AMchangeHashesNext(&heads1, 1); - AMchangeHashes heads2 = AMpush(&test_state->stack, - AMgetHeads(test_state->n2), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - AMbyteSpan const head2 = AMchangeHashesNext(&heads2, 1); + AMitems heads1 = AMstackItems(stack_ptr, AMgetHeads(test_state->n1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + AMbyteSpan head1; + assert_true(AMitemToChangeHash(AMitemsNext(&heads1, 1), &head1)); + AMitems heads2 = AMstackItems(stack_ptr, AMgetHeads(test_state->n2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + AMbyteSpan head2; + assert_true(AMitemToChangeHash(AMitemsNext(&heads2, 1), &head2)); /* */ /* both sides report what they have but have no shared peer state */ /* let msg1to2, msg2to1 msg1to2 = n1.generateSyncMessage(s1) - if (msg1to2 === null) { throw new RangeError("message should not be null") */ - AMsyncMessage const* msg1to2 = AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->n1, - test_state->s1), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; + if (msg1to2 === null) { throw new RangeError("message should not be + null") */ + AMsyncMessage const* msg1to2; + assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n1, test_state->s1), + cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), + &msg1to2)); /* msg2to1 = n2.generateSyncMessage(s2) - if (msg2to1 === null) { throw new RangeError("message should not be null") */ - AMsyncMessage const* msg2to1 = AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->n2, - test_state->s2), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; + if (msg2to1 === null) { throw new RangeError("message should not be + null") */ + AMsyncMessage const* msg2to1; + assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n2, test_state->s2), + cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), + &msg2to1)); /* assert.deepStrictEqual(decodeSyncMessage(msg1to2).changes.length, 0) */ - AMchanges msg1to2_changes = AMsyncMessageChanges(msg1to2); - assert_int_equal(AMchangesSize(&msg1to2_changes), 0); - /* assert.deepStrictEqual(decodeSyncMessage(msg1to2).have[0].lastSync.length, 0 */ - AMsyncHaves msg1to2_haves = AMsyncMessageHaves(msg1to2); - AMsyncHave const* msg1to2_have = AMsyncHavesNext(&msg1to2_haves, 1); - AMchangeHashes msg1to2_last_sync = AMsyncHaveLastSync(msg1to2_have); - assert_int_equal(AMchangeHashesSize(&msg1to2_last_sync), 0); + AMitems msg1to2_changes = + AMstackItems(stack_ptr, AMsyncMessageChanges(msg1to2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); + assert_int_equal(AMitemsSize(&msg1to2_changes), 0); + /* assert.deepStrictEqual(decodeSyncMessage(msg1to2).have[0].lastSync.length, + * 0 */ + AMitems msg1to2_haves = + AMstackItems(stack_ptr, AMsyncMessageHaves(msg1to2), cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_HAVE)); + AMsyncHave const* msg1to2_have; + assert_true(AMitemToSyncHave(AMitemsNext(&msg1to2_haves, 1), &msg1to2_have)); + AMitems msg1to2_last_sync = + AMstackItems(stack_ptr, AMsyncHaveLastSync(msg1to2_have), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + assert_int_equal(AMitemsSize(&msg1to2_last_sync), 0); /* assert.deepStrictEqual(decodeSyncMessage(msg2to1).changes.length, 0) */ - AMchanges msg2to1_changes = AMsyncMessageChanges(msg2to1); - assert_int_equal(AMchangesSize(&msg2to1_changes), 0); - /* assert.deepStrictEqual(decodeSyncMessage(msg2to1).have[0].lastSync.length, 0 */ - AMsyncHaves msg2to1_haves = AMsyncMessageHaves(msg2to1); - AMsyncHave const* msg2to1_have = AMsyncHavesNext(&msg2to1_haves, 1); - AMchangeHashes msg2to1_last_sync = AMsyncHaveLastSync(msg2to1_have); - assert_int_equal(AMchangeHashesSize(&msg2to1_last_sync), 0); + AMitems msg2to1_changes = + AMstackItems(stack_ptr, AMsyncMessageChanges(msg2to1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); + assert_int_equal(AMitemsSize(&msg2to1_changes), 0); + /* assert.deepStrictEqual(decodeSyncMessage(msg2to1).have[0].lastSync.length, + * 0 */ + AMitems msg2to1_haves = + AMstackItems(stack_ptr, AMsyncMessageHaves(msg2to1), cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_HAVE)); + AMsyncHave const* msg2to1_have; + assert_true(AMitemToSyncHave(AMitemsNext(&msg2to1_haves, 1), &msg2to1_have)); + AMitems msg2to1_last_sync = + AMstackItems(stack_ptr, AMsyncHaveLastSync(msg2to1_have), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + assert_int_equal(AMitemsSize(&msg2to1_last_sync), 0); /* */ /* n1 and n2 receive that message and update sync state but make no patc */ /* n1.receiveSyncMessage(s1, msg2to1) */ - AMfree(AMreceiveSyncMessage(test_state->n1, test_state->s1, msg2to1)); + AMstackItem(NULL, AMreceiveSyncMessage(test_state->n1, test_state->s1, msg2to1), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); /* n2.receiveSyncMessage(s2, msg1to2) */ - AMfree(AMreceiveSyncMessage(test_state->n2, test_state->s2, msg1to2)); + AMstackItem(NULL, AMreceiveSyncMessage(test_state->n2, test_state->s2, msg1to2), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); /* */ /* now both reply with their local changes that the other lacks * (standard warning that 1% of the time this will result in a "needs" * message) */ /* msg1to2 = n1.generateSyncMessage(s1) - if (msg1to2 === null) { throw new RangeError("message should not be null") */ - msg1to2 = AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->n1, test_state->s1), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; + if (msg1to2 === null) { throw new RangeError("message should not be + null") */ + assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n1, test_state->s1), + cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), + &msg1to2)); /* assert.deepStrictEqual(decodeSyncMessage(msg1to2).changes.length, 5) */ - msg1to2_changes = AMsyncMessageChanges(msg1to2); - assert_int_equal(AMchangesSize(&msg1to2_changes), 5); + msg1to2_changes = AMstackItems(stack_ptr, AMsyncMessageChanges(msg1to2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); + assert_int_equal(AMitemsSize(&msg1to2_changes), 5); /* msg2to1 = n2.generateSyncMessage(s2) - if (msg2to1 === null) { throw new RangeError("message should not be null") */ - msg2to1 = AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->n2, test_state->s2), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; + if (msg2to1 === null) { throw new RangeError("message should not be + null") */ + assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n2, test_state->s2), + cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), + &msg2to1)); /* assert.deepStrictEqual(decodeSyncMessage(msg2to1).changes.length, 5) */ - msg2to1_changes = AMsyncMessageChanges(msg2to1); - assert_int_equal(AMchangesSize(&msg2to1_changes), 5); + msg2to1_changes = AMstackItems(stack_ptr, AMsyncMessageChanges(msg2to1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); + assert_int_equal(AMitemsSize(&msg2to1_changes), 5); /* */ /* both should now apply the changes and update the frontend */ /* n1.receiveSyncMessage(s1, msg2to1) */ - AMfree(AMreceiveSyncMessage(test_state->n1, - test_state->s1, - msg2to1)); + AMstackItem(NULL, AMreceiveSyncMessage(test_state->n1, test_state->s1, msg2to1), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); /* assert.deepStrictEqual(n1.getMissingDeps(), []) */ - AMchangeHashes missing_deps = AMpush(&test_state->stack, - AMgetMissingDeps(test_state->n1, NULL), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - assert_int_equal(AMchangeHashesSize(&missing_deps), 0); + AMitems missing_deps = + AMstackItems(stack_ptr, AMgetMissingDeps(test_state->n1, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + assert_int_equal(AMitemsSize(&missing_deps), 0); /* //assert.notDeepStrictEqual(patch1, null) assert.deepStrictEqual(n1.materialize(), { x: 4, y: 4 }) */ - assert_int_equal(AMpush(&test_state->stack, - AMmapGet(test_state->n1, AM_ROOT, AMstr("x"), NULL), - AM_VALUE_UINT, - cmocka_cb).uint, 4); - assert_int_equal(AMpush(&test_state->stack, - AMmapGet(test_state->n1, AM_ROOT, AMstr("y"), NULL), - AM_VALUE_UINT, - cmocka_cb).uint, 4); + uint64_t uint; + assert_true(AMitemToUint(AMstackItem(stack_ptr, AMmapGet(test_state->n1, AM_ROOT, AMstr("x"), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_UINT)), + &uint)); + assert_int_equal(uint, 4); + assert_true(AMitemToUint(AMstackItem(stack_ptr, AMmapGet(test_state->n1, AM_ROOT, AMstr("y"), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_UINT)), + &uint)); + assert_int_equal(uint, 4); /* */ /* n2.receiveSyncMessage(s2, msg1to2) */ - AMfree(AMreceiveSyncMessage(test_state->n2, test_state->s2, msg1to2)); + AMstackItem(NULL, AMreceiveSyncMessage(test_state->n2, test_state->s2, msg1to2), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); /* assert.deepStrictEqual(n2.getMissingDeps(), []) */ - missing_deps = AMpush(&test_state->stack, - AMgetMissingDeps(test_state->n2, NULL), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - assert_int_equal(AMchangeHashesSize(&missing_deps), 0); + missing_deps = + AMstackItems(stack_ptr, AMgetMissingDeps(test_state->n2, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + assert_int_equal(AMitemsSize(&missing_deps), 0); /* //assert.notDeepStrictEqual(patch2, null) assert.deepStrictEqual(n2.materialize(), { x: 4, y: 4 }) */ - assert_int_equal(AMpush(&test_state->stack, - AMmapGet(test_state->n2, AM_ROOT, AMstr("x"), NULL), - AM_VALUE_UINT, - cmocka_cb).uint, 4); - assert_int_equal(AMpush(&test_state->stack, - AMmapGet(test_state->n2, AM_ROOT, AMstr("y"), NULL), - AM_VALUE_UINT, - cmocka_cb).uint, 4); + assert_true(AMitemToUint(AMstackItem(stack_ptr, AMmapGet(test_state->n2, AM_ROOT, AMstr("x"), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_UINT)), + &uint)); + assert_int_equal(uint, 4); + assert_true(AMitemToUint(AMstackItem(stack_ptr, AMmapGet(test_state->n2, AM_ROOT, AMstr("y"), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_UINT)), + &uint)); + assert_int_equal(uint, 4); /* */ /* The response acknowledges the changes received and sends no further * changes */ /* msg1to2 = n1.generateSyncMessage(s1) - if (msg1to2 === null) { throw new RangeError("message should not be null") */ - msg1to2 = AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->n1, test_state->s1), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; + if (msg1to2 === null) { throw new RangeError("message should not be + null") */ + assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n1, test_state->s1), + cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), + &msg1to2)); /* assert.deepStrictEqual(decodeSyncMessage(msg1to2).changes.length, 0) */ - msg1to2_changes = AMsyncMessageChanges(msg1to2); - assert_int_equal(AMchangesSize(&msg1to2_changes), 0); + msg1to2_changes = AMstackItems(stack_ptr, AMsyncMessageChanges(msg1to2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); + assert_int_equal(AMitemsSize(&msg1to2_changes), 0); /* msg2to1 = n2.generateSyncMessage(s2) - if (msg2to1 === null) { throw new RangeError("message should not be null") */ - msg2to1 = AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->n2, test_state->s2), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; + if (msg2to1 === null) { throw new RangeError("message should not be + null") */ + assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n2, test_state->s2), + cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), + &msg2to1)); /* assert.deepStrictEqual(decodeSyncMessage(msg2to1).changes.length, 0) */ - msg2to1_changes = AMsyncMessageChanges(msg2to1); - assert_int_equal(AMchangesSize(&msg2to1_changes), 0); + msg2to1_changes = AMstackItems(stack_ptr, AMsyncMessageChanges(msg2to1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); + assert_int_equal(AMitemsSize(&msg2to1_changes), 0); /* */ /* After receiving acknowledgements, their shared heads should be equal */ /* n1.receiveSyncMessage(s1, msg2to1) */ - AMfree(AMreceiveSyncMessage(test_state->n1, test_state->s1, msg2to1)); + AMstackItem(NULL, AMreceiveSyncMessage(test_state->n1, test_state->s1, msg2to1), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); /* n2.receiveSyncMessage(s2, msg1to2) */ - AMfree(AMreceiveSyncMessage(test_state->n2, test_state->s2, msg1to2)); + AMstackItem(NULL, AMreceiveSyncMessage(test_state->n2, test_state->s2, msg1to2), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); /* assert.deepStrictEqual(s1.sharedHeads, [head1, head2].sort()) */ - AMchangeHashes s1_shared_heads = AMsyncStateSharedHeads(test_state->s1); - assert_memory_equal(AMchangeHashesNext(&s1_shared_heads, 1).src, - head1.src, - head1.count); - assert_memory_equal(AMchangeHashesNext(&s1_shared_heads, 1).src, - head2.src, - head2.count); + AMitems s1_shared_heads = + AMstackItems(stack_ptr, AMsyncStateSharedHeads(test_state->s1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + AMbyteSpan s1_shared_change_hash; + assert_true(AMitemToChangeHash(AMitemsNext(&s1_shared_heads, 1), &s1_shared_change_hash)); + assert_memory_equal(s1_shared_change_hash.src, head1.src, head1.count); + assert_true(AMitemToChangeHash(AMitemsNext(&s1_shared_heads, 1), &s1_shared_change_hash)); + assert_memory_equal(s1_shared_change_hash.src, head2.src, head2.count); /* assert.deepStrictEqual(s2.sharedHeads, [head1, head2].sort()) */ - AMchangeHashes s2_shared_heads = AMsyncStateSharedHeads(test_state->s2); - assert_memory_equal(AMchangeHashesNext(&s2_shared_heads, 1).src, - head1.src, - head1.count); - assert_memory_equal(AMchangeHashesNext(&s2_shared_heads, 1).src, - head2.src, - head2.count); + AMitems s2_shared_heads = + AMstackItems(stack_ptr, AMsyncStateSharedHeads(test_state->s2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + AMbyteSpan s2_shared_change_hash; + assert_true(AMitemToChangeHash(AMitemsNext(&s2_shared_heads, 1), &s2_shared_change_hash)); + assert_memory_equal(s2_shared_change_hash.src, head1.src, head1.count); + assert_true(AMitemToChangeHash(AMitemsNext(&s2_shared_heads, 1), &s2_shared_change_hash)); + assert_memory_equal(s2_shared_change_hash.src, head2.src, head2.count); /* //assert.deepStrictEqual(patch1, null) //assert.deepStrictEqual(patch2, null) */ /* */ /* We're in sync, no more messages required */ /* msg1to2 = n1.generateSyncMessage(s1) assert.deepStrictEqual(msg1to2, null) */ - AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->n1, test_state->s1), - AM_VALUE_VOID, - cmocka_cb); + AMstackItem(NULL, AMgenerateSyncMessage(test_state->n1, test_state->s1), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* msg2to1 = n2.generateSyncMessage(s2) assert.deepStrictEqual(msg2to1, null) */ - AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->n2, test_state->s2), - AM_VALUE_VOID, - cmocka_cb); + AMstackItem(NULL, AMgenerateSyncMessage(test_state->n2, test_state->s2), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* */ /* If we make one more change and start another sync then its lastSync * should be updated */ /* n1.put("_root", "x", 5) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), 5)); + AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), 5), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* msg1to2 = n1.generateSyncMessage(s1) - if (msg1to2 === null) { throw new RangeError("message should not be null") */ - msg1to2 = AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->n1, test_state->s1), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; - /* assert.deepStrictEqual(decodeSyncMessage(msg1to2).have[0].lastSync, [head1, head2].sort( */ - msg1to2_haves = AMsyncMessageHaves(msg1to2); - msg1to2_have = AMsyncHavesNext(&msg1to2_haves, 1); - msg1to2_last_sync = AMsyncHaveLastSync(msg1to2_have); - AMbyteSpan msg1to2_last_sync_next = AMchangeHashesNext(&msg1to2_last_sync, 1); + if (msg1to2 === null) { throw new RangeError("message should not be + null") */ + assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n1, test_state->s1), + cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), + &msg1to2)); + /* assert.deepStrictEqual(decodeSyncMessage(msg1to2).have[0].lastSync, + * [head1, head2].sort( */ + msg1to2_haves = AMstackItems(stack_ptr, AMsyncMessageHaves(msg1to2), cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_HAVE)); + assert_true(AMitemToSyncHave(AMitemsNext(&msg1to2_haves, 1), &msg1to2_have)); + msg1to2_last_sync = + AMstackItems(stack_ptr, AMsyncHaveLastSync(msg1to2_have), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + AMbyteSpan msg1to2_last_sync_next; + assert_true(AMitemToChangeHash(AMitemsNext(&msg1to2_last_sync, 1), &msg1to2_last_sync_next)); assert_int_equal(msg1to2_last_sync_next.count, head1.count); assert_memory_equal(msg1to2_last_sync_next.src, head1.src, head1.count); - msg1to2_last_sync_next = AMchangeHashesNext(&msg1to2_last_sync, 1); + assert_true(AMitemToChangeHash(AMitemsNext(&msg1to2_last_sync, 1), &msg1to2_last_sync_next)); assert_int_equal(msg1to2_last_sync_next.count, head2.count); assert_memory_equal(msg1to2_last_sync_next.src, head2.src, head2.count); } @@ -652,87 +642,89 @@ static void test_should_allow_simultaneous_messages_during_synchronization(void /** * \brief should assume sent changes were received until we hear otherwise */ -static void test_should_assume_sent_changes_were_received_until_we_hear_otherwise(void **state) { +static void test_should_assume_sent_changes_were_received_until_we_hear_otherwise(void** state) { /* const n1 = create('01234567'), n2 = create('89abcdef') const s1 = initSyncState(), s2 = initSyncState() */ TestState* test_state = *state; + AMstack** stack_ptr = &test_state->base_state->stack; /* let message = null */ /* */ /* const items = n1.putObject("_root", "items", []) */ - AMobjId const* items = AMpush(&test_state->stack, - AMmapPutObject(test_state->n1, - AM_ROOT, - AMstr("items"), - AM_OBJ_TYPE_LIST), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; + AMobjId const* const items = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(test_state->n1, AM_ROOT, AMstr("items"), AM_OBJ_TYPE_LIST), + cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* */ /* sync(n1, n2, s1, s2) */ sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); /* */ /* n1.push(items, "x") */ - AMfree(AMlistPutStr(test_state->n1, items, SIZE_MAX, true, AMstr("x"))); + AMstackItem(NULL, AMlistPutStr(test_state->n1, items, SIZE_MAX, true, AMstr("x")), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* message = n1.generateSyncMessage(s1) - if (message === null) { throw new RangeError("message should not be null") */ - AMsyncMessage const* message = AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->n1, - test_state->s1), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; + if (message === null) { throw new RangeError("message should not be null") + */ + AMsyncMessage const* message; + assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n1, test_state->s1), + cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), + &message)); /* assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) */ - AMchanges message_changes = AMsyncMessageChanges(message); - assert_int_equal(AMchangesSize(&message_changes), 1); + AMitems message_changes = + AMstackItems(stack_ptr, AMsyncMessageChanges(message), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); + assert_int_equal(AMitemsSize(&message_changes), 1); /* */ /* n1.push(items, "y") */ - AMfree(AMlistPutStr(test_state->n1, items, SIZE_MAX, true, AMstr("y"))); + AMstackItem(NULL, AMlistPutStr(test_state->n1, items, SIZE_MAX, true, AMstr("y")), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* message = n1.generateSyncMessage(s1) - if (message === null) { throw new RangeError("message should not be null") */ - message = AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->n1, test_state->s1), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; + if (message === null) { throw new RangeError("message should not be + null") */ + assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n1, test_state->s1), + cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), + &message)); /* assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) */ - message_changes = AMsyncMessageChanges(message); - assert_int_equal(AMchangesSize(&message_changes), 1); + message_changes = AMstackItems(stack_ptr, AMsyncMessageChanges(message), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); + assert_int_equal(AMitemsSize(&message_changes), 1); /* */ /* n1.push(items, "z") */ - AMfree(AMlistPutStr(test_state->n1, items, SIZE_MAX, true, AMstr("z"))); + AMstackItem(NULL, AMlistPutStr(test_state->n1, items, SIZE_MAX, true, AMstr("z")), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* */ /* message = n1.generateSyncMessage(s1) - if (message === null) { throw new RangeError("message should not be null") */ - message = AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->n1, test_state->s1), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; + if (message === null) { throw new RangeError("message should not be + null") */ + assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n1, test_state->s1), + cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), + &message)); /* assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) */ - message_changes = AMsyncMessageChanges(message); - assert_int_equal(AMchangesSize(&message_changes), 1); + message_changes = AMstackItems(stack_ptr, AMsyncMessageChanges(message), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); + assert_int_equal(AMitemsSize(&message_changes), 1); } /** * \brief should work regardless of who initiates the exchange */ -static void test_should_work_regardless_of_who_initiates_the_exchange(void **state) { +static void test_should_work_regardless_of_who_initiates_the_exchange(void** state) { /* create & synchronize two nodes */ /* const n1 = create(), n2 = create() const s1 = initSyncState(), s2 = initSyncState() */ TestState* test_state = *state; + AMstack** stack_ptr = &test_state->base_state->stack; /* */ /* for (let i = 0; i < 5; i++) { */ for (size_t i = 0; i != 5; ++i) { /* n1.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i)); + AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); - /* { */ + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* { */ } /* */ /* sync(n1, n2, s1, s2) */ @@ -742,10 +734,10 @@ static void test_should_work_regardless_of_who_initiates_the_exchange(void **sta /* for (let i = 5; i < 10; i++) { */ for (size_t i = 5; i != 10; ++i) { /* n1.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i)); + AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); - /* { */ + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* { */ } /* */ /* assert.notDeepStrictEqual(n1.materialize(), n2.materialize()) */ @@ -759,24 +751,26 @@ static void test_should_work_regardless_of_who_initiates_the_exchange(void **sta /** * \brief should work without prior sync state */ -static void test_should_work_without_prior_sync_state(void **state) { - /* Scenario: ,-- c10 <-- c11 <-- c12 <-- c13 <-- c14 - * c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ - * `-- c15 <-- c16 <-- c17 - * lastSync is undefined. */ +static void test_should_work_without_prior_sync_state(void** state) { + /* Scenario: ,-- + * c10 <-- c11 <-- c12 <-- c13 <-- c14 c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 + * <-- c6 <-- c7 <-- c8 <-- c9 <-+ + * `-- + * c15 <-- c16 <-- c17 lastSync is undefined. */ /* */ /* create two peers both with divergent commits */ /* const n1 = create('01234567'), n2 = create('89abcdef') const s1 = initSyncState(), s2 = initSyncState() */ TestState* test_state = *state; + AMstack** stack_ptr = &test_state->base_state->stack; /* */ /* for (let i = 0; i < 10; i++) { */ for (size_t i = 0; i != 10; ++i) { /* n1.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i)); + AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); - /* { */ + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* { */ } /* */ /* sync(n1, n2) */ @@ -785,19 +779,19 @@ static void test_should_work_without_prior_sync_state(void **state) { /* for (let i = 10; i < 15; i++) { */ for (size_t i = 10; i != 15; ++i) { /* n1.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i)); + AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); - /* { */ + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* { */ } /* */ /* for (let i = 15; i < 18; i++) { */ for (size_t i = 15; i != 18; ++i) { /* n2.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n2, AM_ROOT, AMstr("x"), i)); + AMstackItem(NULL, AMmapPutUint(test_state->n2, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n2.commit("", 0) */ - AMfree(AMcommit(test_state->n2, AMstr(""), &TIME_0)); - /* { */ + AMstackItem(NULL, AMcommit(test_state->n2, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* { */ } /* */ /* assert.notDeepStrictEqual(n1.materialize(), n2.materialize()) */ @@ -805,15 +799,9 @@ static void test_should_work_without_prior_sync_state(void **state) { /* sync(n1, n2) */ sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); /* assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) */ - AMchangeHashes heads1 = AMpush(&test_state->stack, - AMgetHeads(test_state->n1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - AMchangeHashes heads2 = AMpush(&test_state->stack, - AMgetHeads(test_state->n2), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); + AMitems heads1 = AMstackItems(stack_ptr, AMgetHeads(test_state->n1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + AMitems heads2 = AMstackItems(stack_ptr, AMgetHeads(test_state->n2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + assert_true(AMitemsEqual(&heads1, &heads2)); /* assert.deepStrictEqual(n1.materialize(), n2.materialize()) */ assert_true(AMequal(test_state->n1, test_state->n2)); } @@ -821,25 +809,27 @@ static void test_should_work_without_prior_sync_state(void **state) { /** * \brief should work with prior sync state */ -static void test_should_work_with_prior_sync_state_2(void **state) { +static void test_should_work_with_prior_sync_state_2(void** state) { /* Scenario: - * ,-- c10 <-- c11 <-- c12 <-- c13 <-- c14 - * c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ - * `-- c15 <-- c16 <-- c17 - * lastSync is c9. */ + * ,-- + * c10 <-- c11 <-- c12 <-- c13 <-- c14 c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 + * <-- c6 <-- c7 <-- c8 <-- c9 <-+ + * `-- + * c15 <-- c16 <-- c17 lastSync is c9. */ /* */ /* create two peers both with divergent commits */ /* const n1 = create('01234567'), n2 = create('89abcdef') let s1 = initSyncState(), s2 = initSyncState() */ TestState* test_state = *state; + AMstack** stack_ptr = &test_state->base_state->stack; /* */ /* for (let i = 0; i < 10; i++) { */ for (size_t i = 0; i != 10; ++i) { /* n1.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i)); + AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); - /* { */ + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* { */ } /* */ /* sync(n1, n2, s1, s2) */ @@ -848,54 +838,44 @@ static void test_should_work_with_prior_sync_state_2(void **state) { /* for (let i = 10; i < 15; i++) { */ for (size_t i = 10; i != 15; ++i) { /* n1.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i)); + AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); - /* { */ + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* { */ } /* for (let i = 15; i < 18; i++) { */ for (size_t i = 15; i != 18; ++i) { /* n2.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n2, AM_ROOT, AMstr("x"), i)); + AMstackItem(NULL, AMmapPutUint(test_state->n2, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n2.commit("", 0) */ - AMfree(AMcommit(test_state->n2, AMstr(""), &TIME_0)); - /* { */ + AMstackItem(NULL, AMcommit(test_state->n2, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* { */ } /* */ /* s1 = decodeSyncState(encodeSyncState(s1)) */ - AMbyteSpan encoded = AMpush(&test_state->stack, - AMsyncStateEncode(test_state->s1), - AM_VALUE_BYTES, - cmocka_cb).bytes; - AMsyncState* s1 = AMpush(&test_state->stack, - AMsyncStateDecode(encoded.src, encoded.count), - AM_VALUE_SYNC_STATE, - cmocka_cb).sync_state; + AMbyteSpan encoded; + assert_true(AMitemToBytes( + AMstackItem(stack_ptr, AMsyncStateEncode(test_state->s1), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &encoded)); + AMsyncState* s1; + assert_true(AMitemToSyncState(AMstackItem(stack_ptr, AMsyncStateDecode(encoded.src, encoded.count), cmocka_cb, + AMexpect(AM_VAL_TYPE_SYNC_STATE)), + &s1)); /* s2 = decodeSyncState(encodeSyncState(s2)) */ - encoded = AMpush(&test_state->stack, - AMsyncStateEncode(test_state->s2), - AM_VALUE_BYTES, - cmocka_cb).bytes; - AMsyncState* s2 = AMpush(&test_state->stack, - AMsyncStateDecode(encoded.src, - encoded.count), - AM_VALUE_SYNC_STATE, - cmocka_cb).sync_state; + assert_true(AMitemToBytes( + AMstackItem(stack_ptr, AMsyncStateEncode(test_state->s2), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &encoded)); + AMsyncState* s2; + assert_true(AMitemToSyncState(AMstackItem(stack_ptr, AMsyncStateDecode(encoded.src, encoded.count), cmocka_cb, + AMexpect(AM_VAL_TYPE_SYNC_STATE)), + &s2)); /* */ /* assert.notDeepStrictEqual(n1.materialize(), n2.materialize()) */ assert_false(AMequal(test_state->n1, test_state->n2)); /* sync(n1, n2, s1, s2) */ sync(test_state->n1, test_state->n2, s1, s2); /* assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) */ - AMchangeHashes heads1 = AMpush(&test_state->stack, - AMgetHeads(test_state->n1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - AMchangeHashes heads2 = AMpush(&test_state->stack, - AMgetHeads(test_state->n2), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); + AMitems heads1 = AMstackItems(stack_ptr, AMgetHeads(test_state->n1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + AMitems heads2 = AMstackItems(stack_ptr, AMgetHeads(test_state->n2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + assert_true(AMitemsEqual(&heads1, &heads2)); /* assert.deepStrictEqual(n1.materialize(), n2.materialize()) */ assert_true(AMequal(test_state->n1, test_state->n2)); } @@ -903,39 +883,39 @@ static void test_should_work_with_prior_sync_state_2(void **state) { /** * \brief should ensure non-empty state after sync */ -static void test_should_ensure_non_empty_state_after_sync(void **state) { +static void test_should_ensure_non_empty_state_after_sync(void** state) { /* const n1 = create('01234567'), n2 = create('89abcdef') const s1 = initSyncState(), s2 = initSyncState() */ TestState* test_state = *state; + AMstack** stack_ptr = &test_state->base_state->stack; /* */ /* for (let i = 0; i < 3; i++) { */ for (size_t i = 0; i != 3; ++i) { /* n1.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i)); + AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); - /* { */ + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* { */ } /* */ /* sync(n1, n2, s1, s2) */ sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); /* */ /* assert.deepStrictEqual(s1.sharedHeads, n1.getHeads()) */ - AMchangeHashes heads1 = AMpush(&test_state->stack, - AMgetHeads(test_state->n1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - AMchangeHashes shared_heads1 = AMsyncStateSharedHeads(test_state->s1); - assert_int_equal(AMchangeHashesCmp(&shared_heads1, &heads1), 0); + AMitems heads1 = AMstackItems(stack_ptr, AMgetHeads(test_state->n1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + AMitems shared_heads1 = + AMstackItems(stack_ptr, AMsyncStateSharedHeads(test_state->s1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + assert_true(AMitemsEqual(&shared_heads1, &heads1)); /* assert.deepStrictEqual(s2.sharedHeads, n1.getHeads()) */ - AMchangeHashes shared_heads2 = AMsyncStateSharedHeads(test_state->s2); - assert_int_equal(AMchangeHashesCmp(&shared_heads2, &heads1), 0); + AMitems shared_heads2 = + AMstackItems(stack_ptr, AMsyncStateSharedHeads(test_state->s2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + assert_true(AMitemsEqual(&shared_heads2, &heads1)); } /** * \brief should re-sync after one node crashed with data loss */ -static void test_should_resync_after_one_node_crashed_with_data_loss(void **state) { +static void test_should_resync_after_one_node_crashed_with_data_loss(void** state) { /* Scenario: (r) (n2) (n1) * c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 * n2 has changes {c0, c1, c2}, n1's lastSync is c5, and n2's lastSync @@ -946,15 +926,16 @@ static void test_should_resync_after_one_node_crashed_with_data_loss(void **stat let s1 = initSyncState() const s2 = initSyncState() */ TestState* test_state = *state; + AMstack** stack_ptr = &test_state->base_state->stack; /* */ /* n1 makes three changes, which we sync to n2 */ /* for (let i = 0; i < 3; i++) { */ for (size_t i = 0; i != 3; ++i) { /* n1.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i)); + AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); - /* { */ + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* { */ } /* */ sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); @@ -963,28 +944,25 @@ static void test_should_resync_after_one_node_crashed_with_data_loss(void **stat /* let r let rSyncState ;[r, rSyncState] = [n2.clone(), s2.clone()] */ - AMdoc* r = AMpush(&test_state->stack, - AMclone(test_state->n2), - AM_VALUE_DOC, - cmocka_cb).doc; - AMbyteSpan const encoded_s2 = AMpush(&test_state->stack, - AMsyncStateEncode(test_state->s2), - AM_VALUE_BYTES, - cmocka_cb).bytes; - AMsyncState* sync_state_r = AMpush(&test_state->stack, - AMsyncStateDecode(encoded_s2.src, - encoded_s2.count), - AM_VALUE_SYNC_STATE, - cmocka_cb).sync_state; + AMdoc* r; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMclone(test_state->n2), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &r)); + AMbyteSpan encoded_s2; + assert_true( + AMitemToBytes(AMstackItem(stack_ptr, AMsyncStateEncode(test_state->s2), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), + &encoded_s2)); + AMsyncState* sync_state_r; + assert_true(AMitemToSyncState(AMstackItem(stack_ptr, AMsyncStateDecode(encoded_s2.src, encoded_s2.count), cmocka_cb, + AMexpect(AM_VAL_TYPE_SYNC_STATE)), + &sync_state_r)); /* */ /* sync another few commits */ /* for (let i = 3; i < 6; i++) { */ for (size_t i = 3; i != 6; ++i) { /* n1.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i)); + AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); - /* { */ + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* { */ } /* */ /* sync(n1, n2, s1, s2) */ @@ -992,15 +970,9 @@ static void test_should_resync_after_one_node_crashed_with_data_loss(void **stat /* */ /* everyone should be on the same page here */ /* assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) */ - AMchangeHashes heads1 = AMpush(&test_state->stack, - AMgetHeads(test_state->n1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - AMchangeHashes heads2 = AMpush(&test_state->stack, - AMgetHeads(test_state->n2), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); + AMitems heads1 = AMstackItems(stack_ptr, AMgetHeads(test_state->n1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + AMitems heads2 = AMstackItems(stack_ptr, AMgetHeads(test_state->n2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + assert_true(AMitemsEqual(&heads1, &heads2)); /* assert.deepStrictEqual(n1.materialize(), n2.materialize()) */ assert_true(AMequal(test_state->n1, test_state->n2)); /* */ @@ -1009,132 +981,106 @@ static void test_should_resync_after_one_node_crashed_with_data_loss(void **stat /* for (let i = 6; i < 9; i++) { */ for (size_t i = 6; i != 9; ++i) { /* n1.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i)); + AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); - /* { */ + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* { */ } /* */ /* s1 = decodeSyncState(encodeSyncState(s1)) */ - AMbyteSpan const encoded_s1 = AMpush(&test_state->stack, - AMsyncStateEncode(test_state->s1), - AM_VALUE_BYTES, - cmocka_cb).bytes; - AMsyncState* const s1 = AMpush(&test_state->stack, - AMsyncStateDecode(encoded_s1.src, - encoded_s1.count), - AM_VALUE_SYNC_STATE, - cmocka_cb).sync_state; + AMbyteSpan encoded_s1; + assert_true( + AMitemToBytes(AMstackItem(stack_ptr, AMsyncStateEncode(test_state->s1), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), + &encoded_s1)); + AMsyncState* s1; + assert_true(AMitemToSyncState(AMstackItem(stack_ptr, AMsyncStateDecode(encoded_s1.src, encoded_s1.count), cmocka_cb, + AMexpect(AM_VAL_TYPE_SYNC_STATE)), + &s1)); /* rSyncState = decodeSyncState(encodeSyncState(rSyncState)) */ - AMbyteSpan const encoded_r = AMpush(&test_state->stack, - AMsyncStateEncode(sync_state_r), - AM_VALUE_BYTES, - cmocka_cb).bytes; - sync_state_r = AMpush(&test_state->stack, - AMsyncStateDecode(encoded_r.src, encoded_r.count), - AM_VALUE_SYNC_STATE, - cmocka_cb).sync_state; + AMbyteSpan encoded_r; + assert_true(AMitemToBytes( + AMstackItem(stack_ptr, AMsyncStateEncode(sync_state_r), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &encoded_r)); + assert_true(AMitemToSyncState(AMstackItem(stack_ptr, AMsyncStateDecode(encoded_r.src, encoded_r.count), cmocka_cb, + AMexpect(AM_VAL_TYPE_SYNC_STATE)), + &sync_state_r)); /* */ /* assert.notDeepStrictEqual(n1.getHeads(), r.getHeads()) */ - heads1 = AMpush(&test_state->stack, - AMgetHeads(test_state->n1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - AMchangeHashes heads_r = AMpush(&test_state->stack, - AMgetHeads(r), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - assert_int_not_equal(AMchangeHashesCmp(&heads1, &heads_r), 0); + heads1 = AMstackItems(stack_ptr, AMgetHeads(test_state->n1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + AMitems heads_r = AMstackItems(stack_ptr, AMgetHeads(r), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + assert_false(AMitemsEqual(&heads1, &heads_r)); /* assert.notDeepStrictEqual(n1.materialize(), r.materialize()) */ assert_false(AMequal(test_state->n1, r)); /* assert.deepStrictEqual(n1.materialize(), { x: 8 }) */ - assert_int_equal(AMpush(&test_state->stack, - AMmapGet(test_state->n1, AM_ROOT, AMstr("x"), NULL), - AM_VALUE_UINT, - cmocka_cb).uint, 8); + uint64_t uint; + assert_true(AMitemToUint(AMstackItem(stack_ptr, AMmapGet(test_state->n1, AM_ROOT, AMstr("x"), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_UINT)), + &uint)); + assert_int_equal(uint, 8); /* assert.deepStrictEqual(r.materialize(), { x: 2 }) */ - assert_int_equal(AMpush(&test_state->stack, - AMmapGet(r, AM_ROOT, AMstr("x"), NULL), - AM_VALUE_UINT, - cmocka_cb).uint, 2); + assert_true(AMitemToUint( + AMstackItem(stack_ptr, AMmapGet(r, AM_ROOT, AMstr("x"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_UINT)), &uint)); + assert_int_equal(uint, 2); /* sync(n1, r, s1, rSyncState) */ sync(test_state->n1, r, test_state->s1, sync_state_r); /* assert.deepStrictEqual(n1.getHeads(), r.getHeads()) */ - heads1 = AMpush(&test_state->stack, - AMgetHeads(test_state->n1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - heads_r = AMpush(&test_state->stack, - AMgetHeads(r), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - assert_int_equal(AMchangeHashesCmp(&heads1, &heads_r), 0); + heads1 = AMstackItems(stack_ptr, AMgetHeads(test_state->n1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + heads_r = AMstackItems(stack_ptr, AMgetHeads(r), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + assert_true(AMitemsEqual(&heads1, &heads_r)); /* assert.deepStrictEqual(n1.materialize(), r.materialize()) */ assert_true(AMequal(test_state->n1, r)); } /** - * \brief should re-sync after one node experiences data loss without disconnecting + * \brief should re-sync after one node experiences data loss without + * disconnecting */ -static void test_should_resync_after_one_node_experiences_data_loss_without_disconnecting(void **state) { +static void test_should_resync_after_one_node_experiences_data_loss_without_disconnecting(void** state) { /* const n1 = create('01234567'), n2 = create('89abcdef') const s1 = initSyncState(), s2 = initSyncState() */ TestState* test_state = *state; + AMstack** stack_ptr = &test_state->base_state->stack; /* */ /* n1 makes three changes which we sync to n2 */ /* for (let i = 0; i < 3; i++) { */ for (size_t i = 0; i != 3; ++i) { /* n1.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i)); + AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); - /* { */ + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* { */ } /* */ /* sync(n1, n2, s1, s2) */ sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); /* */ /* assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) */ - AMchangeHashes heads1 = AMpush(&test_state->stack, - AMgetHeads(test_state->n1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - AMchangeHashes heads2 = AMpush(&test_state->stack, - AMgetHeads(test_state->n2), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); + AMitems heads1 = AMstackItems(stack_ptr, AMgetHeads(test_state->n1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + AMitems heads2 = AMstackItems(stack_ptr, AMgetHeads(test_state->n2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + assert_true(AMitemsEqual(&heads1, &heads2)); /* assert.deepStrictEqual(n1.materialize(), n2.materialize()) */ assert_true(AMequal(test_state->n1, test_state->n2)); /* */ /* const n2AfterDataLoss = create('89abcdef') */ - AMdoc* n2_after_data_loss = AMpush(&test_state->stack, - AMcreate(AMpush(&test_state->stack, - AMactorIdInitStr(AMstr("89abcdef")), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id), - AM_VALUE_DOC, - cmocka_cb).doc; + AMactorId const* actor_id; + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("89abcdef")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), + &actor_id)); + AMdoc* n2_after_data_loss; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), + &n2_after_data_loss)); /* */ /* "n2" now has no data, but n1 still thinks it does. Note we don't do * decodeSyncState(encodeSyncState(s1)) in order to simulate data loss * without disconnecting */ /* sync(n1, n2AfterDataLoss, s1, initSyncState()) */ - AMsyncState* s2_after_data_loss = AMpush(&test_state->stack, - AMsyncStateInit(), - AM_VALUE_SYNC_STATE, - cmocka_cb).sync_state; + AMsyncState* s2_after_data_loss; + assert_true(AMitemToSyncState( + AMstackItem(stack_ptr, AMsyncStateInit(), cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_STATE)), &s2_after_data_loss)); sync(test_state->n1, n2_after_data_loss, test_state->s1, s2_after_data_loss); /* assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) */ - heads1 = AMpush(&test_state->stack, - AMgetHeads(test_state->n1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - heads2 = AMpush(&test_state->stack, - AMgetHeads(test_state->n2), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); + heads1 = AMstackItems(stack_ptr, AMgetHeads(test_state->n1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + heads2 = AMstackItems(stack_ptr, AMgetHeads(test_state->n2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + assert_true(AMitemsEqual(&heads1, &heads2)); /* assert.deepStrictEqual(n1.materialize(), n2.materialize()) */ assert_true(AMequal(test_state->n1, test_state->n2)); } @@ -1142,33 +1088,33 @@ static void test_should_resync_after_one_node_experiences_data_loss_without_disc /** * \brief should handle changes concurrent to the last sync heads */ -static void test_should_handle_changes_concurrrent_to_the_last_sync_heads(void **state) { - /* const n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('fedcba98' */ +static void test_should_handle_changes_concurrrent_to_the_last_sync_heads(void** state) { + /* const n1 = create('01234567'), n2 = create('89abcdef'), n3 = + * create('fedcba98' */ TestState* test_state = *state; - AMdoc* n3 = AMpush(&test_state->stack, - AMcreate(AMpush(&test_state->stack, - AMactorIdInitStr(AMstr("fedcba98")), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id), - AM_VALUE_DOC, - cmocka_cb).doc; - /* const s12 = initSyncState(), s21 = initSyncState(), s23 = initSyncState(), s32 = initSyncState( */ + AMstack** stack_ptr = &test_state->base_state->stack; + AMactorId const* actor_id; + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("fedcba98")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), + &actor_id)); + AMdoc* n3; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &n3)); + /* const s12 = initSyncState(), s21 = initSyncState(), s23 = + * initSyncState(), s32 = initSyncState( */ AMsyncState* s12 = test_state->s1; AMsyncState* s21 = test_state->s2; - AMsyncState* s23 = AMpush(&test_state->stack, - AMsyncStateInit(), - AM_VALUE_SYNC_STATE, - cmocka_cb).sync_state; - AMsyncState* s32 = AMpush(&test_state->stack, - AMsyncStateInit(), - AM_VALUE_SYNC_STATE, - cmocka_cb).sync_state; + AMsyncState* s23; + assert_true(AMitemToSyncState( + AMstackItem(stack_ptr, AMsyncStateInit(), cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_STATE)), &s23)); + AMsyncState* s32; + assert_true(AMitemToSyncState( + AMstackItem(stack_ptr, AMsyncStateInit(), cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_STATE)), &s32)); /* */ /* Change 1 is known to all three nodes */ /* //n1 = Automerge.change(n1, {time: 0}, doc => doc.x = 1) */ /* n1.put("_root", "x", 1); n1.commit("", 0) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), 1)); - AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); + AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), 1), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* */ /* sync(n1, n2, s12, s21) */ sync(test_state->n1, test_state->n2, s12, s21); @@ -1177,47 +1123,38 @@ static void test_should_handle_changes_concurrrent_to_the_last_sync_heads(void * /* */ /* Change 2 is known to n1 and n2 */ /* n1.put("_root", "x", 2); n1.commit("", 0) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), 2)); - AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); + AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), 2), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* */ /* sync(n1, n2, s12, s21) */ sync(test_state->n1, test_state->n2, s12, s21); /* */ /* Each of the three nodes makes one change (changes 3, 4, 5) */ /* n1.put("_root", "x", 3); n1.commit("", 0) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), 3)); - AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); + AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), 3), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* n2.put("_root", "x", 4); n2.commit("", 0) */ - AMfree(AMmapPutUint(test_state->n2, AM_ROOT, AMstr("x"), 4)); - AMfree(AMcommit(test_state->n2, AMstr(""), &TIME_0)); + AMstackItem(NULL, AMmapPutUint(test_state->n2, AM_ROOT, AMstr("x"), 4), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMcommit(test_state->n2, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* n3.put("_root", "x", 5); n3.commit("", 0) */ - AMfree(AMmapPutUint(n3, AM_ROOT, AMstr("x"), 5)); - AMfree(AMcommit(n3, AMstr(""), &TIME_0)); + AMstackItem(NULL, AMmapPutUint(n3, AM_ROOT, AMstr("x"), 5), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMcommit(n3, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* */ /* Apply n3's latest change to n2. */ /* let change = n3.getLastLocalChange() if (change === null) throw new RangeError("no local change") */ - AMchanges changes = AMpush(&test_state->stack, - AMgetLastLocalChange(n3), - AM_VALUE_CHANGES, - cmocka_cb).changes; + AMitems changes = AMstackItems(stack_ptr, AMgetLastLocalChange(n3), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); /* n2.applyChanges([change]) */ - AMfree(AMapplyChanges(test_state->n2, &changes)); + AMstackItem(NULL, AMapplyChanges(test_state->n2, &changes), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* */ /* Now sync n1 and n2. n3's change is concurrent to n1 and n2's last sync * heads */ /* sync(n1, n2, s12, s21) */ sync(test_state->n1, test_state->n2, s12, s21); /* assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) */ - AMchangeHashes heads1 = AMpush(&test_state->stack, - AMgetHeads(test_state->n1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - AMchangeHashes heads2 = AMpush(&test_state->stack, - AMgetHeads(test_state->n2), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); + AMitems heads1 = AMstackItems(stack_ptr, AMgetHeads(test_state->n1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + AMitems heads2 = AMstackItems(stack_ptr, AMgetHeads(test_state->n2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + assert_true(AMitemsEqual(&heads1, &heads2)); /* assert.deepStrictEqual(n1.materialize(), n2.materialize()) */ assert_true(AMequal(test_state->n1, test_state->n2)); } @@ -1225,39 +1162,35 @@ static void test_should_handle_changes_concurrrent_to_the_last_sync_heads(void * /** * \brief should handle histories with lots of branching and merging */ -static void test_should_handle_histories_with_lots_of_branching_and_merging(void **state) { - /* const n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('fedcba98') - const s1 = initSyncState(), s2 = initSyncState() */ +static void test_should_handle_histories_with_lots_of_branching_and_merging(void** state) { + /* const n1 = create('01234567'), n2 = create('89abcdef'), n3 = + create('fedcba98') const s1 = initSyncState(), s2 = initSyncState() */ TestState* test_state = *state; - AMdoc* n3 = AMpush(&test_state->stack, - AMcreate(AMpush(&test_state->stack, - AMactorIdInitStr(AMstr("fedcba98")), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id), - AM_VALUE_DOC, - cmocka_cb).doc; + AMstack** stack_ptr = &test_state->base_state->stack; + AMactorId const* actor_id; + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("fedcba98")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), + &actor_id)); + AMdoc* n3; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &n3)); /* n1.put("_root", "x", 0); n1.commit("", 0) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), 0)); - AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); + AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), 0), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* let change1 = n1.getLastLocalChange() if (change1 === null) throw new RangeError("no local change") */ - AMchanges change1 = AMpush(&test_state->stack, - AMgetLastLocalChange(test_state->n1), - AM_VALUE_CHANGES, - cmocka_cb).changes; + AMitems change1 = + AMstackItems(stack_ptr, AMgetLastLocalChange(test_state->n1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); /* n2.applyChanges([change1]) */ - AMfree(AMapplyChanges(test_state->n2, &change1)); + AMstackItem(NULL, AMapplyChanges(test_state->n2, &change1), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* let change2 = n1.getLastLocalChange() if (change2 === null) throw new RangeError("no local change") */ - AMchanges change2 = AMpush(&test_state->stack, - AMgetLastLocalChange(test_state->n1), - AM_VALUE_CHANGES, - cmocka_cb).changes; + AMitems change2 = + AMstackItems(stack_ptr, AMgetLastLocalChange(test_state->n1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); /* n3.applyChanges([change2]) */ - AMfree(AMapplyChanges(n3, &change2)); + AMstackItem(NULL, AMapplyChanges(n3, &change2), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n3.put("_root", "x", 1); n3.commit("", 0) */ - AMfree(AMmapPutUint(n3, AM_ROOT, AMstr("x"), 1)); - AMfree(AMcommit(n3, AMstr(""), &TIME_0)); + AMstackItem(NULL, AMmapPutUint(n3, AM_ROOT, AMstr("x"), 1), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMcommit(n3, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* */ /* - n1c1 <------ n1c2 <------ n1c3 <-- etc. <-- n1c20 <------ n1c21 * / \/ \/ \/ @@ -1269,28 +1202,24 @@ static void test_should_handle_histories_with_lots_of_branching_and_merging(void /* for (let i = 1; i < 20; i++) { */ for (size_t i = 1; i != 20; ++i) { /* n1.put("_root", "n1", i); n1.commit("", 0) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, AMstr("n1"), i)); - AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); + AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("n1"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* n2.put("_root", "n2", i); n2.commit("", 0) */ - AMfree(AMmapPutUint(test_state->n2, AM_ROOT, AMstr("n2"), i)); - AMfree(AMcommit(test_state->n2, AMstr(""), &TIME_0)); + AMstackItem(NULL, AMmapPutUint(test_state->n2, AM_ROOT, AMstr("n2"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMcommit(test_state->n2, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* const change1 = n1.getLastLocalChange() if (change1 === null) throw new RangeError("no local change") */ - AMchanges change1 = AMpush(&test_state->stack, - AMgetLastLocalChange(test_state->n1), - AM_VALUE_CHANGES, - cmocka_cb).changes; + AMitems change1 = + AMstackItems(stack_ptr, AMgetLastLocalChange(test_state->n1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); /* const change2 = n2.getLastLocalChange() if (change2 === null) throw new RangeError("no local change") */ - AMchanges change2 = AMpush(&test_state->stack, - AMgetLastLocalChange(test_state->n2), - AM_VALUE_CHANGES, - cmocka_cb).changes; + AMitems change2 = + AMstackItems(stack_ptr, AMgetLastLocalChange(test_state->n2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); /* n1.applyChanges([change2]) */ - AMfree(AMapplyChanges(test_state->n1, &change2)); + AMstackItem(NULL, AMapplyChanges(test_state->n1, &change2), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n2.applyChanges([change1]) */ - AMfree(AMapplyChanges(test_state->n2, &change1)); - /* { */ + AMstackItem(NULL, AMapplyChanges(test_state->n2, &change1), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* { */ } /* */ /* sync(n1, n2, s1, s2) */ @@ -1300,31 +1229,24 @@ static void test_should_handle_histories_with_lots_of_branching_and_merging(void * the slower code path */ /* const change3 = n2.getLastLocalChange() if (change3 === null) throw new RangeError("no local change") */ - AMchanges change3 = AMpush(&test_state->stack, - AMgetLastLocalChange(n3), - AM_VALUE_CHANGES, - cmocka_cb).changes; + AMitems change3 = AMstackItems(stack_ptr, AMgetLastLocalChange(n3), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); /* n2.applyChanges([change3]) */ - AMfree(AMapplyChanges(test_state->n2, &change3)); + AMstackItem(NULL, AMapplyChanges(test_state->n2, &change3), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n1.put("_root", "n1", "final"); n1.commit("", 0) */ - AMfree(AMmapPutStr(test_state->n1, AM_ROOT, AMstr("n1"), AMstr("final"))); - AMfree(AMcommit(test_state->n1, AMstr(""), &TIME_0)); + AMstackItem(NULL, AMmapPutStr(test_state->n1, AM_ROOT, AMstr("n1"), AMstr("final")), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* n2.put("_root", "n2", "final"); n2.commit("", 0) */ - AMfree(AMmapPutStr(test_state->n2, AM_ROOT, AMstr("n2"), AMstr("final"))); - AMfree(AMcommit(test_state->n2, AMstr(""), &TIME_0)); + AMstackItem(NULL, AMmapPutStr(test_state->n2, AM_ROOT, AMstr("n2"), AMstr("final")), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMcommit(test_state->n2, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* */ /* sync(n1, n2, s1, s2) */ sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); /* assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) */ - AMchangeHashes heads1 = AMpush(&test_state->stack, - AMgetHeads(test_state->n1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - AMchangeHashes heads2 = AMpush(&test_state->stack, - AMgetHeads(test_state->n2), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); + AMitems heads1 = AMstackItems(stack_ptr, AMgetHeads(test_state->n1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + AMitems heads2 = AMstackItems(stack_ptr, AMgetHeads(test_state->n2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + assert_true(AMitemsEqual(&heads1, &heads2)); /* assert.deepStrictEqual(n1.materialize(), n2.materialize()) */ assert_true(AMequal(test_state->n1, test_state->n2)); } @@ -1334,20 +1256,26 @@ int run_ported_wasm_sync_tests(void) { cmocka_unit_test_setup_teardown(test_should_send_a_sync_message_implying_no_local_data, setup, teardown), cmocka_unit_test_setup_teardown(test_should_not_reply_if_we_have_no_data_as_well, setup, teardown), cmocka_unit_test_setup_teardown(test_repos_with_equal_heads_do_not_need_a_reply_message, setup, teardown), - cmocka_unit_test_setup_teardown(test_n1_should_offer_all_changes_to_n2_when_starting_from_nothing, setup, teardown), - cmocka_unit_test_setup_teardown(test_should_sync_peers_where_one_has_commits_the_other_does_not, setup, teardown), + cmocka_unit_test_setup_teardown(test_n1_should_offer_all_changes_to_n2_when_starting_from_nothing, setup, + teardown), + cmocka_unit_test_setup_teardown(test_should_sync_peers_where_one_has_commits_the_other_does_not, setup, + teardown), cmocka_unit_test_setup_teardown(test_should_work_with_prior_sync_state, setup, teardown), cmocka_unit_test_setup_teardown(test_should_not_generate_messages_once_synced, setup, teardown), - cmocka_unit_test_setup_teardown(test_should_allow_simultaneous_messages_during_synchronization, setup, teardown), - cmocka_unit_test_setup_teardown(test_should_assume_sent_changes_were_received_until_we_hear_otherwise, setup, teardown), + cmocka_unit_test_setup_teardown(test_should_allow_simultaneous_messages_during_synchronization, setup, + teardown), + cmocka_unit_test_setup_teardown(test_should_assume_sent_changes_were_received_until_we_hear_otherwise, setup, + teardown), cmocka_unit_test_setup_teardown(test_should_work_regardless_of_who_initiates_the_exchange, setup, teardown), cmocka_unit_test_setup_teardown(test_should_work_without_prior_sync_state, setup, teardown), cmocka_unit_test_setup_teardown(test_should_work_with_prior_sync_state_2, setup, teardown), cmocka_unit_test_setup_teardown(test_should_ensure_non_empty_state_after_sync, setup, teardown), cmocka_unit_test_setup_teardown(test_should_resync_after_one_node_crashed_with_data_loss, setup, teardown), - cmocka_unit_test_setup_teardown(test_should_resync_after_one_node_experiences_data_loss_without_disconnecting, setup, teardown), + cmocka_unit_test_setup_teardown(test_should_resync_after_one_node_experiences_data_loss_without_disconnecting, + setup, teardown), cmocka_unit_test_setup_teardown(test_should_handle_changes_concurrrent_to_the_last_sync_heads, setup, teardown), - cmocka_unit_test_setup_teardown(test_should_handle_histories_with_lots_of_branching_and_merging, setup, teardown), + cmocka_unit_test_setup_teardown(test_should_handle_histories_with_lots_of_branching_and_merging, setup, + teardown), }; return cmocka_run_group_tests(tests, NULL, NULL); diff --git a/rust/automerge-c/test/stack_utils.c b/rust/automerge-c/test/stack_utils.c deleted file mode 100644 index f65ea2e5..00000000 --- a/rust/automerge-c/test/stack_utils.c +++ /dev/null @@ -1,31 +0,0 @@ -#include -#include -#include - -/* third-party */ -#include - -/* local */ -#include "cmocka_utils.h" -#include "stack_utils.h" - -void cmocka_cb(AMresultStack** stack, uint8_t discriminant) { - assert_non_null(stack); - assert_non_null(*stack); - assert_non_null((*stack)->result); - if (AMresultStatus((*stack)->result) != AM_STATUS_OK) { - fail_msg_view("%s", AMerrorMessage((*stack)->result)); - } - assert_int_equal(AMresultValue((*stack)->result).tag, discriminant); -} - -int setup_stack(void** state) { - *state = NULL; - return 0; -} - -int teardown_stack(void** state) { - AMresultStack* stack = *state; - AMfreeStack(&stack); - return 0; -} diff --git a/rust/automerge-c/test/stack_utils.h b/rust/automerge-c/test/stack_utils.h deleted file mode 100644 index 473feebc..00000000 --- a/rust/automerge-c/test/stack_utils.h +++ /dev/null @@ -1,38 +0,0 @@ -#ifndef STACK_UTILS_H -#define STACK_UTILS_H - -#include - -/* local */ -#include - -/** - * \brief Reports an error through a cmocka assertion. - * - * \param[in,out] stack A pointer to a pointer to an `AMresultStack` struct. - * \param[in] discriminant An `AMvalueVariant` enum tag. - * \pre \p stack` != NULL`. - */ -void cmocka_cb(AMresultStack** stack, uint8_t discriminant); - -/** - * \brief Allocates a result stack for storing the results allocated during one - * or more test cases. - * - * \param[in,out] state A pointer to a pointer to an `AMresultStack` struct. - * \pre \p state` != NULL`. - * \warning The `AMresultStack` struct returned through \p state must be - * deallocated with `teardown_stack()` in order to prevent memory leaks. - */ -int setup_stack(void** state); - -/** - * \brief Deallocates a result stack after deallocating any results that were - * stored in it by one or more test cases. - * - * \param[in] state A pointer to a pointer to an `AMresultStack` struct. - * \pre \p state` != NULL`. - */ -int teardown_stack(void** state); - -#endif /* STACK_UTILS_H */ diff --git a/rust/automerge-c/test/str_utils.c b/rust/automerge-c/test/str_utils.c index cc923cb4..2937217a 100644 --- a/rust/automerge-c/test/str_utils.c +++ b/rust/automerge-c/test/str_utils.c @@ -1,5 +1,5 @@ -#include #include +#include /* local */ #include "str_utils.h" diff --git a/rust/automerge-c/test/str_utils.h b/rust/automerge-c/test/str_utils.h index b9985683..14a4af73 100644 --- a/rust/automerge-c/test/str_utils.h +++ b/rust/automerge-c/test/str_utils.h @@ -1,14 +1,17 @@ -#ifndef STR_UTILS_H -#define STR_UTILS_H +#ifndef TESTS_STR_UTILS_H +#define TESTS_STR_UTILS_H /** - * \brief Converts a hexadecimal string into a sequence of bytes. + * \brief Converts a hexadecimal string into an array of bytes. * - * \param[in] hex_str A string. - * \param[in] src A pointer to a contiguous sequence of bytes. - * \param[in] count The number of bytes to copy to \p src. - * \pre \p count `<=` length of \p src. + * \param[in] hex_str A hexadecimal string. + * \param[in] src A pointer to an array of bytes. + * \param[in] count The count of bytes to copy into the array pointed to by + * \p src. + * \pre \p src `!= NULL` + * \pre `sizeof(`\p src `) > 0` + * \pre \p count `<= sizeof(`\p src `)` */ void hex_to_bytes(char const* hex_str, uint8_t* src, size_t const count); -#endif /* STR_UTILS_H */ +#endif /* TESTS_STR_UTILS_H */ diff --git a/rust/automerge/src/error.rs b/rust/automerge/src/error.rs index 57a87167..68b8ec65 100644 --- a/rust/automerge/src/error.rs +++ b/rust/automerge/src/error.rs @@ -1,3 +1,4 @@ +use crate::change::LoadError as LoadChangeError; use crate::storage::load::Error as LoadError; use crate::types::{ActorId, ScalarValue}; use crate::value::DataType; @@ -18,6 +19,8 @@ pub enum AutomergeError { Fail, #[error("invalid actor ID `{0}`")] InvalidActorId(String), + #[error(transparent)] + InvalidChangeHashBytes(#[from] InvalidChangeHashSlice), #[error("invalid UTF-8 character at {0}")] InvalidCharacter(usize), #[error("invalid hash {0}")] @@ -39,6 +42,8 @@ pub enum AutomergeError { }, #[error(transparent)] Load(#[from] LoadError), + #[error(transparent)] + LoadChangeError(#[from] LoadChangeError), #[error("increment operations must be against a counter value")] MissingCounter, #[error("hash {0} does not correspond to a change in this document")] diff --git a/scripts/ci/cmake-build b/scripts/ci/cmake-build index f6f9f9b1..25a69756 100755 --- a/scripts/ci/cmake-build +++ b/scripts/ci/cmake-build @@ -16,4 +16,4 @@ C_PROJECT=$THIS_SCRIPT/../../rust/automerge-c; mkdir -p $C_PROJECT/build; cd $C_PROJECT/build; cmake --log-level=ERROR -B . -S .. -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DBUILD_SHARED_LIBS=$SHARED_TOGGLE; -cmake --build . --target test_automerge; +cmake --build . --target automerge_test; From 44fa7ac41647fa465ee7baa0bc0ee64e811dded8 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Mon, 27 Feb 2023 13:12:09 -0700 Subject: [PATCH 725/730] Don't panic on missing deps of change chunks (#538) * Fix doubly-reported ops in load of change chunks Since c3c04128f5f1703007f650ea3104d98334334aab, observers have been called twice when calling Automerge::load() with change chunks. * Better handle change chunks with missing deps Before this change Automerge::load would panic if you passed a change chunk that was missing a dependency, or multiple change chunks not in strict dependency order. After this change these cases will error instead. --- rust/automerge/src/automerge.rs | 38 +++++++++--------- rust/automerge/src/automerge/current_state.rs | 29 ++++++++++++- rust/automerge/src/error.rs | 2 + .../fixtures/two_change_chunks.automerge | Bin 0 -> 177 bytes .../two_change_chunks_compressed.automerge | Bin 0 -> 192 bytes .../two_change_chunks_out_of_order.automerge | Bin 0 -> 177 bytes .../fuzz-crashers/missing_deps.automerge | Bin 0 -> 224 bytes .../missing_deps_compressed.automerge | Bin 0 -> 120 bytes .../missing_deps_subsequent.automerge | Bin 0 -> 180 bytes rust/automerge/tests/test.rs | 13 ++++++ 10 files changed, 62 insertions(+), 20 deletions(-) create mode 100644 rust/automerge/tests/fixtures/two_change_chunks.automerge create mode 100644 rust/automerge/tests/fixtures/two_change_chunks_compressed.automerge create mode 100644 rust/automerge/tests/fixtures/two_change_chunks_out_of_order.automerge create mode 100644 rust/automerge/tests/fuzz-crashers/missing_deps.automerge create mode 100644 rust/automerge/tests/fuzz-crashers/missing_deps_compressed.automerge create mode 100644 rust/automerge/tests/fuzz-crashers/missing_deps_subsequent.automerge diff --git a/rust/automerge/src/automerge.rs b/rust/automerge/src/automerge.rs index 09c3cc9d..9c45ec51 100644 --- a/rust/automerge/src/automerge.rs +++ b/rust/automerge/src/automerge.rs @@ -464,6 +464,7 @@ impl Automerge { return Err(load::Error::BadChecksum.into()); } + let mut change: Option = None; let mut am = match first_chunk { storage::Chunk::Document(d) => { tracing::trace!("first chunk is document chunk, inflating"); @@ -501,30 +502,31 @@ impl Automerge { } } storage::Chunk::Change(stored_change) => { - tracing::trace!("first chunk is change chunk, applying"); - let change = Change::new_from_unverified(stored_change.into_owned(), None) - .map_err(|e| load::Error::InvalidChangeColumns(Box::new(e)))?; - let mut am = Self::new(); - am.apply_change(change, &mut observer); - am + tracing::trace!("first chunk is change chunk"); + change = Some( + Change::new_from_unverified(stored_change.into_owned(), None) + .map_err(|e| load::Error::InvalidChangeColumns(Box::new(e)))?, + ); + Self::new() } storage::Chunk::CompressedChange(stored_change, compressed) => { - tracing::trace!("first chunk is compressed change, decompressing and applying"); - let change = Change::new_from_unverified( - stored_change.into_owned(), - Some(compressed.into_owned()), - ) - .map_err(|e| load::Error::InvalidChangeColumns(Box::new(e)))?; - let mut am = Self::new(); - am.apply_change(change, &mut observer); - am + tracing::trace!("first chunk is compressed change"); + change = Some( + Change::new_from_unverified( + stored_change.into_owned(), + Some(compressed.into_owned()), + ) + .map_err(|e| load::Error::InvalidChangeColumns(Box::new(e)))?, + ); + Self::new() } }; - tracing::trace!("first chunk loaded, loading remaining chunks"); + tracing::trace!("loading change chunks"); match load::load_changes(remaining.reset()) { load::LoadedChanges::Complete(c) => { - for change in c { - am.apply_change(change, &mut observer); + am.apply_changes(change.into_iter().chain(c))?; + if !am.queue.is_empty() { + return Err(AutomergeError::MissingDeps); } } load::LoadedChanges::Partial { error, .. } => { diff --git a/rust/automerge/src/automerge/current_state.rs b/rust/automerge/src/automerge/current_state.rs index 1c1bceed..3f7f4afc 100644 --- a/rust/automerge/src/automerge/current_state.rs +++ b/rust/automerge/src/automerge/current_state.rs @@ -338,9 +338,9 @@ impl<'a, I: Iterator>> Iterator for TextActions<'a, I> { #[cfg(test)] mod tests { - use std::borrow::Cow; + use std::{borrow::Cow, fs}; - use crate::{transaction::Transactable, ObjType, OpObserver, Prop, ReadDoc, Value}; + use crate::{transaction::Transactable, Automerge, ObjType, OpObserver, Prop, ReadDoc, Value}; // Observer ops often carry a "tagged value", which is a value and the OpID of the op which // created that value. For a lot of values (i.e. any scalar value) we don't care about the @@ -887,4 +887,29 @@ mod tests { ]) ); } + + #[test] + fn test_load_changes() { + fn fixture(name: &str) -> Vec { + fs::read("./tests/fixtures/".to_owned() + name).unwrap() + } + + let mut obs = ObserverStub::new(); + let _doc = Automerge::load_with( + &fixture("counter_value_is_ok.automerge"), + crate::OnPartialLoad::Error, + crate::storage::VerificationMode::Check, + Some(&mut obs), + ); + + assert_eq!( + Calls(obs.ops), + Calls(vec![ObserverCall::Put { + obj: crate::ROOT, + prop: "a".into(), + value: ObservedValue::Untagged(crate::ScalarValue::Counter(2000.into()).into()), + conflict: false, + },]) + ); + } } diff --git a/rust/automerge/src/error.rs b/rust/automerge/src/error.rs index 68b8ec65..86dbe9f3 100644 --- a/rust/automerge/src/error.rs +++ b/rust/automerge/src/error.rs @@ -48,6 +48,8 @@ pub enum AutomergeError { MissingCounter, #[error("hash {0} does not correspond to a change in this document")] MissingHash(ChangeHash), + #[error("change's deps should already be in the document")] + MissingDeps, #[error("compressed chunk was not a change")] NonChangeCompressed, #[error("id was not an object id")] diff --git a/rust/automerge/tests/fixtures/two_change_chunks.automerge b/rust/automerge/tests/fixtures/two_change_chunks.automerge new file mode 100644 index 0000000000000000000000000000000000000000..1a84b363ccab6161890367b7b6fadd84091acc1a GIT binary patch literal 177 zcmZq8_iCPX___h3C4;~%u8ahNZ6`uDR*U$arwyk>-a69LX7pdFiPNh77Et z%qEOZOkqp~O!bV3jP(p4*a|da`E&KFj46yDlRhQgGJP()b>hw!qP#CRXsF%#9>DfV qvr}yCn>=m|E0~y2tuSKXU}R!qf>{&J2(*Zyo)K&rW4%~XJp%xrEC}cVUk{s_*GfwAzq7vd=R$+BrLv*EZRo)X zjiFO+wp{Gg>{;2ca-QMDjq?~;7#SHD{{L?UnzQ`5`cUCz3gfK9*9|@;-7W5 zAx_SoEv*boUq4)P)0c_q;Jzcx4-GhyGZORCQx%LDI2f6jm_(UP7@e5Hn8FzgnCcno q8S5Dnfw*2Qsh*(~XdB2HMoR_^(-;|1O*3R*g_#622V@2V2m%1g@ISTy literal 0 HcmV?d00001 diff --git a/rust/automerge/tests/fuzz-crashers/missing_deps.automerge b/rust/automerge/tests/fuzz-crashers/missing_deps.automerge new file mode 100644 index 0000000000000000000000000000000000000000..8a57a0f4c8a82541f9236c878cd22599aefbcce2 GIT binary patch literal 224 zcmZq8_i8>FcHEBfDkJ0W>J_a(?qU6^Yf=o1{~5o&kywAoaLxb!s;Hm{4n%=~4@7_f dT+w7W3mbz0J3uIvfiW3@Dq(DLXvVLM3jvQ0EVKXs literal 0 HcmV?d00001 diff --git a/rust/automerge/tests/fuzz-crashers/missing_deps_compressed.automerge b/rust/automerge/tests/fuzz-crashers/missing_deps_compressed.automerge new file mode 100644 index 0000000000000000000000000000000000000000..2c7b123b6805032546ec438597e31a03245b5a79 GIT binary patch literal 120 zcmV-;0EhpDZ%TvQ<(umQZUAHeoBsjf#K?6GIWE|lwH=|kchIwRB>mYqPdl0|$S{b; zlZl!T#tysb@0Cu7tB#1rhSmZA0s_mq^zPs=2xDkrZf9j6G5`nx0s;aR12h3b0#*W7 a0dN9;0Dl300bv1u0e==^e*ggh0RR6R126Ib literal 0 HcmV?d00001 diff --git a/rust/automerge/tests/fuzz-crashers/missing_deps_subsequent.automerge b/rust/automerge/tests/fuzz-crashers/missing_deps_subsequent.automerge new file mode 100644 index 0000000000000000000000000000000000000000..2fe439afd0c7792801f52a5325a2582478efdd1d GIT binary patch literal 180 zcmZq8_iE-b7ZG8!VGvl5pXAtm&!MU9#x>WYe^O^NGTz(X^8SGVM{-7DUV5s6F$0?@ zvk9XUQy5b?V*yh=Vc^1qZv~et#%p2bkvx1Prjyk;Lcr*+ew Date: Fri, 3 Mar 2023 17:42:40 -0500 Subject: [PATCH 726/730] Suppress clippy warning in parse.rs + bump toolchain (#542) * Fix rust error in parse.rs * Bump toolchain to 1.67.0 --- .github/workflows/ci.yaml | 14 +++++++------- rust/automerge/src/storage/parse.rs | 1 + 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index bfa31bd5..0263f408 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -14,7 +14,7 @@ jobs: - uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: 1.66.0 + toolchain: 1.67.0 default: true components: rustfmt - uses: Swatinem/rust-cache@v1 @@ -28,7 +28,7 @@ jobs: - uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: 1.66.0 + toolchain: 1.67.0 default: true components: clippy - uses: Swatinem/rust-cache@v1 @@ -42,7 +42,7 @@ jobs: - uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: 1.66.0 + toolchain: 1.67.0 default: true - uses: Swatinem/rust-cache@v1 - name: Build rust docs @@ -118,7 +118,7 @@ jobs: - uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: 1.66.0 + toolchain: 1.67.0 default: true - uses: Swatinem/rust-cache@v1 - name: Install CMocka @@ -136,7 +136,7 @@ jobs: strategy: matrix: toolchain: - - 1.66.0 + - 1.67.0 steps: - uses: actions/checkout@v2 - uses: actions-rs/toolchain@v1 @@ -155,7 +155,7 @@ jobs: - uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: 1.66.0 + toolchain: 1.67.0 default: true - uses: Swatinem/rust-cache@v1 - run: ./scripts/ci/build-test @@ -168,7 +168,7 @@ jobs: - uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: 1.66.0 + toolchain: 1.67.0 default: true - uses: Swatinem/rust-cache@v1 - run: ./scripts/ci/build-test diff --git a/rust/automerge/src/storage/parse.rs b/rust/automerge/src/storage/parse.rs index 54668da4..6751afb4 100644 --- a/rust/automerge/src/storage/parse.rs +++ b/rust/automerge/src/storage/parse.rs @@ -308,6 +308,7 @@ impl<'a> Input<'a> { } /// The bytes behind this input - including bytes which have been consumed + #[allow(clippy::misnamed_getters)] pub(crate) fn bytes(&self) -> &'a [u8] { self.original } From 2c1970f6641ea3fe10976721316ae6d07765e4a1 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Sat, 4 Mar 2023 05:09:08 -0700 Subject: [PATCH 727/730] Fix panic on invalid action (#541) We make the validation on parsing operations in the encoded changes stricter to avoid a possible panic when applying changes. --- rust/automerge/src/automerge.rs | 2 +- rust/automerge/src/change.rs | 2 +- .../src/columnar/encoding/col_error.rs | 2 +- rust/automerge/src/error.rs | 2 +- .../src/storage/change/change_op_columns.rs | 20 ++++++++- rust/automerge/src/types.rs | 40 ++++++++++++------ .../fuzz-crashers/action-is-48.automerge | Bin 0 -> 58 bytes 7 files changed, 48 insertions(+), 20 deletions(-) create mode 100644 rust/automerge/tests/fuzz-crashers/action-is-48.automerge diff --git a/rust/automerge/src/automerge.rs b/rust/automerge/src/automerge.rs index 9c45ec51..0dd82253 100644 --- a/rust/automerge/src/automerge.rs +++ b/rust/automerge/src/automerge.rs @@ -723,7 +723,7 @@ impl Automerge { obj, Op { id, - action: OpType::from_index_and_value(c.action, c.val).unwrap(), + action: OpType::from_action_and_value(c.action, c.val), key, succ: Default::default(), pred, diff --git a/rust/automerge/src/change.rs b/rust/automerge/src/change.rs index b5cae7df..be467a84 100644 --- a/rust/automerge/src/change.rs +++ b/rust/automerge/src/change.rs @@ -278,7 +278,7 @@ impl From<&Change> for crate::ExpandedChange { let operations = c .iter_ops() .map(|o| crate::legacy::Op { - action: crate::types::OpType::from_index_and_value(o.action, o.val).unwrap(), + action: crate::types::OpType::from_action_and_value(o.action, o.val), insert: o.insert, key: match o.key { StoredKey::Elem(e) if e.is_head() => { diff --git a/rust/automerge/src/columnar/encoding/col_error.rs b/rust/automerge/src/columnar/encoding/col_error.rs index c8d5c5c0..089556b6 100644 --- a/rust/automerge/src/columnar/encoding/col_error.rs +++ b/rust/automerge/src/columnar/encoding/col_error.rs @@ -1,5 +1,5 @@ #[derive(Clone, Debug)] -pub(crate) struct DecodeColumnError { +pub struct DecodeColumnError { path: Path, error: DecodeColErrorKind, } diff --git a/rust/automerge/src/error.rs b/rust/automerge/src/error.rs index 86dbe9f3..62a7b72f 100644 --- a/rust/automerge/src/error.rs +++ b/rust/automerge/src/error.rs @@ -99,7 +99,7 @@ pub struct InvalidElementId(pub String); pub struct InvalidOpId(pub String); #[derive(Error, Debug)] -pub(crate) enum InvalidOpType { +pub enum InvalidOpType { #[error("unrecognized action index {0}")] UnknownAction(u64), #[error("non numeric argument for inc op")] diff --git a/rust/automerge/src/storage/change/change_op_columns.rs b/rust/automerge/src/storage/change/change_op_columns.rs index 7c3a65ec..cd1cb150 100644 --- a/rust/automerge/src/storage/change/change_op_columns.rs +++ b/rust/automerge/src/storage/change/change_op_columns.rs @@ -14,6 +14,7 @@ use crate::{ }, }, convert, + error::InvalidOpType, storage::{ change::AsChangeOp, columns::{ @@ -22,6 +23,7 @@ use crate::{ RawColumns, }, types::{ElemId, ObjId, OpId, ScalarValue}, + OpType, }; const OBJ_COL_ID: ColumnId = ColumnId::new(0); @@ -276,7 +278,12 @@ impl ChangeOpsColumns { #[derive(thiserror::Error, Debug)] #[error(transparent)] -pub struct ReadChangeOpError(#[from] DecodeColumnError); +pub enum ReadChangeOpError { + #[error(transparent)] + DecodeError(#[from] DecodeColumnError), + #[error(transparent)] + InvalidOpType(#[from] InvalidOpType), +} #[derive(Clone)] pub(crate) struct ChangeOpsIter<'a> { @@ -308,6 +315,11 @@ impl<'a> ChangeOpsIter<'a> { let action = self.action.next_in_col("action")?; let val = self.val.next_in_col("value")?; let pred = self.pred.next_in_col("pred")?; + + // This check is necessary to ensure that OpType::from_action_and_value + // cannot panic later in the process. + OpType::validate_action_and_value(action, &val)?; + Ok(Some(ChangeOp { obj, key, @@ -458,10 +470,14 @@ mod tests { action in 0_u64..6, obj in opid(), insert in any::()) -> ChangeOp { + + let val = if action == 5 && !(value.is_int() || value.is_uint()) { + ScalarValue::Uint(0) + } else { value }; ChangeOp { obj: obj.into(), key, - val: value, + val, pred, action, insert, diff --git a/rust/automerge/src/types.rs b/rust/automerge/src/types.rs index 870569e9..2978aa97 100644 --- a/rust/automerge/src/types.rs +++ b/rust/automerge/src/types.rs @@ -216,23 +216,35 @@ impl OpType { } } - pub(crate) fn from_index_and_value( - index: u64, - value: ScalarValue, - ) -> Result { - match index { - 0 => Ok(Self::Make(ObjType::Map)), - 1 => Ok(Self::Put(value)), - 2 => Ok(Self::Make(ObjType::List)), - 3 => Ok(Self::Delete), - 4 => Ok(Self::Make(ObjType::Text)), + pub(crate) fn validate_action_and_value( + action: u64, + value: &ScalarValue, + ) -> Result<(), error::InvalidOpType> { + match action { + 0..=4 => Ok(()), 5 => match value { - ScalarValue::Int(i) => Ok(Self::Increment(i)), - ScalarValue::Uint(i) => Ok(Self::Increment(i as i64)), + ScalarValue::Int(_) | ScalarValue::Uint(_) => Ok(()), _ => Err(error::InvalidOpType::NonNumericInc), }, - 6 => Ok(Self::Make(ObjType::Table)), - other => Err(error::InvalidOpType::UnknownAction(other)), + 6 => Ok(()), + _ => Err(error::InvalidOpType::UnknownAction(action)), + } + } + + pub(crate) fn from_action_and_value(action: u64, value: ScalarValue) -> OpType { + match action { + 0 => Self::Make(ObjType::Map), + 1 => Self::Put(value), + 2 => Self::Make(ObjType::List), + 3 => Self::Delete, + 4 => Self::Make(ObjType::Text), + 5 => match value { + ScalarValue::Int(i) => Self::Increment(i), + ScalarValue::Uint(i) => Self::Increment(i as i64), + _ => unreachable!("validate_action_and_value returned NonNumericInc"), + }, + 6 => Self::Make(ObjType::Table), + _ => unreachable!("validate_action_and_value returned UnknownAction"), } } } diff --git a/rust/automerge/tests/fuzz-crashers/action-is-48.automerge b/rust/automerge/tests/fuzz-crashers/action-is-48.automerge new file mode 100644 index 0000000000000000000000000000000000000000..16e6f719a13dd6b1d9eff8488ee651ab7f72bfc3 GIT binary patch literal 58 vcmZq8_i8>{b9^SF0fT@6CSYJ-6J<7GbYco)N@OZvGGH_SqI$Lq{Phd~tz-

Date: Tue, 7 Mar 2023 09:49:04 -0700 Subject: [PATCH 728/730] Error instead of corrupt large op counters (#543) Since b78211ca6, OpIds have been silently truncated to 2**32. This causes corruption in the case the op id overflows. This change converts the silent error to a panic, and guards against the panic on the codepath found by the fuzzer. --- .../automerge/src/columnar/column_range/opid.rs | 6 +++--- .../src/columnar/encoding/properties.rs | 2 +- rust/automerge/src/storage/change.rs | 3 +++ .../src/storage/change/change_op_columns.rs | 2 ++ rust/automerge/src/types.rs | 6 +++--- rust/automerge/src/types/opids.rs | 2 +- .../fixtures/64bit_obj_id_change.automerge | Bin 0 -> 73 bytes .../tests/fixtures/64bit_obj_id_doc.automerge | Bin 0 -> 147 bytes rust/automerge/tests/test.rs | 16 ++++++++++++++++ 9 files changed, 29 insertions(+), 8 deletions(-) create mode 100644 rust/automerge/tests/fixtures/64bit_obj_id_change.automerge create mode 100644 rust/automerge/tests/fixtures/64bit_obj_id_doc.automerge diff --git a/rust/automerge/src/columnar/column_range/opid.rs b/rust/automerge/src/columnar/column_range/opid.rs index ae95d758..d2cdce79 100644 --- a/rust/automerge/src/columnar/column_range/opid.rs +++ b/rust/automerge/src/columnar/column_range/opid.rs @@ -104,11 +104,11 @@ impl<'a> OpIdIter<'a> { .transpose() .map_err(|e| DecodeColumnError::decode_raw("counter", e))?; match (actor, counter) { - (Some(Some(a)), Some(Some(c))) => match c.try_into() { - Ok(c) => Ok(Some(OpId::new(c, a as usize))), + (Some(Some(a)), Some(Some(c))) => match u32::try_from(c) { + Ok(c) => Ok(Some(OpId::new(c as u64, a as usize))), Err(_) => Err(DecodeColumnError::invalid_value( "counter", - "negative value encountered", + "negative or large value encountered", )), }, (Some(None), _) => Err(DecodeColumnError::unexpected_null("actor")), diff --git a/rust/automerge/src/columnar/encoding/properties.rs b/rust/automerge/src/columnar/encoding/properties.rs index a3bf1ed0..30f1169d 100644 --- a/rust/automerge/src/columnar/encoding/properties.rs +++ b/rust/automerge/src/columnar/encoding/properties.rs @@ -139,7 +139,7 @@ pub(crate) fn option_splice_scenario< } pub(crate) fn opid() -> impl Strategy + Clone { - (0..(i64::MAX as usize), 0..(i64::MAX as u64)).prop_map(|(actor, ctr)| OpId::new(ctr, actor)) + (0..(u32::MAX as usize), 0..(u32::MAX as u64)).prop_map(|(actor, ctr)| OpId::new(ctr, actor)) } pub(crate) fn elemid() -> impl Strategy + Clone { diff --git a/rust/automerge/src/storage/change.rs b/rust/automerge/src/storage/change.rs index ff3cc9ab..61db0b00 100644 --- a/rust/automerge/src/storage/change.rs +++ b/rust/automerge/src/storage/change.rs @@ -177,6 +177,9 @@ impl<'a> Change<'a, Unverified> { for op in self.iter_ops() { f(op?); } + if u32::try_from(u64::from(self.start_op)).is_err() { + return Err(ReadChangeOpError::CounterTooLarge); + } Ok(Change { bytes: self.bytes, header: self.header, diff --git a/rust/automerge/src/storage/change/change_op_columns.rs b/rust/automerge/src/storage/change/change_op_columns.rs index cd1cb150..86ec59c2 100644 --- a/rust/automerge/src/storage/change/change_op_columns.rs +++ b/rust/automerge/src/storage/change/change_op_columns.rs @@ -283,6 +283,8 @@ pub enum ReadChangeOpError { DecodeError(#[from] DecodeColumnError), #[error(transparent)] InvalidOpType(#[from] InvalidOpType), + #[error("counter too large")] + CounterTooLarge, } #[derive(Clone)] diff --git a/rust/automerge/src/types.rs b/rust/automerge/src/types.rs index 2978aa97..468986ec 100644 --- a/rust/automerge/src/types.rs +++ b/rust/automerge/src/types.rs @@ -439,17 +439,17 @@ pub(crate) struct OpId(u32, u32); impl OpId { pub(crate) fn new(counter: u64, actor: usize) -> Self { - Self(counter as u32, actor as u32) + Self(counter.try_into().unwrap(), actor.try_into().unwrap()) } #[inline] pub(crate) fn counter(&self) -> u64 { - self.0 as u64 + self.0.into() } #[inline] pub(crate) fn actor(&self) -> usize { - self.1 as usize + self.1.try_into().unwrap() } #[inline] diff --git a/rust/automerge/src/types/opids.rs b/rust/automerge/src/types/opids.rs index eaeed471..a81ccb36 100644 --- a/rust/automerge/src/types/opids.rs +++ b/rust/automerge/src/types/opids.rs @@ -129,7 +129,7 @@ mod tests { fn gen_opid(actors: Vec) -> impl Strategy { (0..actors.len()).prop_flat_map(|actor_idx| { - (Just(actor_idx), 0..u64::MAX) + (Just(actor_idx), 0..(u32::MAX as u64)) .prop_map(|(actor_idx, counter)| OpId::new(counter, actor_idx)) }) } diff --git a/rust/automerge/tests/fixtures/64bit_obj_id_change.automerge b/rust/automerge/tests/fixtures/64bit_obj_id_change.automerge new file mode 100644 index 0000000000000000000000000000000000000000..700342a2df71772d78f0373385f44aae9eb88c7b GIT binary patch literal 73 zcmZq8_i9cmO}NHr&meG%DY?GbS?DGk_of5L_6# literal 0 HcmV?d00001 diff --git a/rust/automerge/tests/fixtures/64bit_obj_id_doc.automerge b/rust/automerge/tests/fixtures/64bit_obj_id_doc.automerge new file mode 100644 index 0000000000000000000000000000000000000000..6beb57fe9ad7d5428d5b854c0e39f8bb57dcfdf7 GIT binary patch literal 147 zcmZq8_i7GNJ@|p4gOO3-7FS4!le1?_E5p*)57*rEWlSnfxVFRCa9`J~6^(1kJz9Nc zT|0UFe#&#R(pL)KvpP?)GcqwV33Dj3n{qiYg)y; Date: Thu, 9 Mar 2023 08:09:43 -0700 Subject: [PATCH 729/730] smaller automerge c (#545) * Fix automerge-c tests on mac * Generate significantly smaller automerge-c builds This cuts the size of libautomerge_core.a from 25Mb to 1.6Mb on macOS and 53Mb to 2.7Mb on Linux. As a side-effect of setting codegen-units = 1 for all release builds the optimized wasm files are also 100kb smaller. --- .github/workflows/ci.yaml | 8 +++++--- README.md | 5 ++++- rust/Cargo.toml | 9 ++------- rust/automerge-c/CMakeLists.txt | 26 +++++++++++++++++++++---- rust/automerge-c/test/byte_span_tests.c | 1 + 5 files changed, 34 insertions(+), 15 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 0263f408..8519ac5e 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -2,10 +2,10 @@ name: CI on: push: branches: - - main + - main pull_request: branches: - - main + - main jobs: fmt: runs-on: ubuntu-latest @@ -118,7 +118,7 @@ jobs: - uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: 1.67.0 + toolchain: nightly-2023-01-26 default: true - uses: Swatinem/rust-cache@v1 - name: Install CMocka @@ -127,6 +127,8 @@ jobs: uses: jwlawson/actions-setup-cmake@v1.12 with: cmake-version: latest + - name: Install rust-src + run: rustup component add rust-src - name: Build and test C bindings run: ./scripts/ci/cmake-build Release Static shell: bash diff --git a/README.md b/README.md index 76d48ddd..ad174da4 100644 --- a/README.md +++ b/README.md @@ -57,7 +57,6 @@ to figure out how to use it. If you are looking to build rust applications which use automerge you may want to look into [autosurgeon](https://github.com/alexjg/autosurgeon) - ## Repository Organisation - `./rust` - the rust rust implementation and also the Rust components of @@ -119,6 +118,10 @@ yarn --cwd ./javascript # install rust dependencies cargo install wasm-bindgen-cli wasm-opt cargo-deny +# get nightly rust to produce optimized automerge-c builds +rustup toolchain install nightly +rustup component add rust-src --toolchain nightly + # add wasm target in addition to current architecture rustup target add wasm32-unknown-unknown diff --git a/rust/Cargo.toml b/rust/Cargo.toml index 938100cf..5d29fc9f 100644 --- a/rust/Cargo.toml +++ b/rust/Cargo.toml @@ -10,13 +10,8 @@ members = [ resolver = "2" [profile.release] -debug = true lto = true -opt-level = 3 +codegen-units = 1 [profile.bench] -debug = true - -[profile.release.package.automerge-wasm] -debug = false -opt-level = 3 +debug = true \ No newline at end of file diff --git a/rust/automerge-c/CMakeLists.txt b/rust/automerge-c/CMakeLists.txt index 056d111b..0c35eebd 100644 --- a/rust/automerge-c/CMakeLists.txt +++ b/rust/automerge-c/CMakeLists.txt @@ -43,19 +43,37 @@ endif() string(TOLOWER "${CMAKE_BUILD_TYPE}" BUILD_TYPE_LOWER) +# In order to build with -Z build-std, we need to pass target explicitly. +# https://doc.rust-lang.org/cargo/reference/unstable.html#build-std +execute_process ( + COMMAND rustc -vV + OUTPUT_VARIABLE RUSTC_VERSION + OUTPUT_STRIP_TRAILING_WHITESPACE +) +string(REGEX REPLACE ".*host: ([^ \n]*).*" "\\1" + CARGO_TARGET + ${RUSTC_VERSION} +) + if(BUILD_TYPE_LOWER STREQUAL debug) set(CARGO_BUILD_TYPE "debug") - set(CARGO_FLAG "") + set(CARGO_FLAG --target=${CARGO_TARGET}) else() set(CARGO_BUILD_TYPE "release") - set(CARGO_FLAG "--release") + if (NOT RUSTC_VERSION MATCHES "nightly") + set(RUSTUP_TOOLCHAIN nightly) + endif() + + set(RUSTFLAGS -C\ panic=abort) + + set(CARGO_FLAG -Z build-std=std,panic_abort --release --target=${CARGO_TARGET}) endif() set(CARGO_FEATURES "") -set(CARGO_BINARY_DIR "${CARGO_TARGET_DIR}/${CARGO_BUILD_TYPE}") +set(CARGO_BINARY_DIR "${CARGO_TARGET_DIR}/${CARGO_TARGET}/${CARGO_BUILD_TYPE}") set(BINDINGS_NAME "${LIBRARY_NAME}_core") @@ -90,7 +108,7 @@ add_custom_command( # configuration file has been updated. ${CMAKE_COMMAND} -DCONDITION=NOT_EXISTS -P ${CMAKE_SOURCE_DIR}/cmake/file-touch.cmake -- ${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h ${CMAKE_SOURCE_DIR}/cbindgen.toml COMMAND - ${CMAKE_COMMAND} -E env CARGO_TARGET_DIR=${CARGO_TARGET_DIR} CBINDGEN_TARGET_DIR=${CBINDGEN_TARGET_DIR} ${CARGO_CMD} build ${CARGO_FLAG} ${CARGO_FEATURES} + ${CMAKE_COMMAND} -E env CARGO_TARGET_DIR=${CARGO_TARGET_DIR} CBINDGEN_TARGET_DIR=${CBINDGEN_TARGET_DIR} RUSTUP_TOOLCHAIN=${RUSTUP_TOOLCHAIN} RUSTFLAGS=${RUSTFLAGS} ${CARGO_CMD} build ${CARGO_FLAG} ${CARGO_FEATURES} COMMAND # Compensate for cbindgen's translation of consecutive uppercase letters to "ScreamingSnakeCase". ${CMAKE_COMMAND} -DMATCH_REGEX=A_M\([^_]+\)_ -DREPLACE_EXPR=AM_\\1_ -P ${CMAKE_SOURCE_DIR}/cmake/file-regex-replace.cmake -- ${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h diff --git a/rust/automerge-c/test/byte_span_tests.c b/rust/automerge-c/test/byte_span_tests.c index 43856f3b..0b1c86a1 100644 --- a/rust/automerge-c/test/byte_span_tests.c +++ b/rust/automerge-c/test/byte_span_tests.c @@ -3,6 +3,7 @@ #include #include #include +#include /* third-party */ #include From cb409b6ffe2cec15ce7724c291cf91d383b4c19b Mon Sep 17 00:00:00 2001 From: alexjg Date: Thu, 9 Mar 2023 18:10:23 +0000 Subject: [PATCH 730/730] docs: timestamp -> time in automerge.change examples (#548) --- javascript/src/stable.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/javascript/src/stable.ts b/javascript/src/stable.ts index 74410346..e83b127f 100644 --- a/javascript/src/stable.ts +++ b/javascript/src/stable.ts @@ -305,7 +305,7 @@ export function from>( * @example A change with a message and a timestamp * * ``` - * doc1 = automerge.change(doc1, {message: "add another value", timestamp: 1640995200}, d => { + * doc1 = automerge.change(doc1, {message: "add another value", time: 1640995200}, d => { * d.key2 = "value2" * }) * ``` @@ -316,7 +316,7 @@ export function from>( * let patchCallback = patch => { * patchedPath = patch.path * } - * doc1 = automerge.change(doc1, {message, "add another value", timestamp: 1640995200, patchCallback}, d => { + * doc1 = automerge.change(doc1, {message, "add another value", time: 1640995200, patchCallback}, d => { * d.key2 = "value2" * }) * assert.equal(patchedPath, ["key2"])